Training Model by Knowledge Distillation for Image-text Matching Use knowledge distillation method to compress pre-trained models in Image-Text matching tasks.Design lightweight models and use knowledge distillation methods to achieve better results for previously ineffective models after training.
%0 Conference Paper
%1 conf/icaice/LiuYK23
%A Liu, Hai
%A Yao, Xingxing
%A Kong, Xiangyu
%B ICAICE
%D 2023
%I ACM
%K dblp
%P 476-481
%T Training Model by Knowledge Distillation for Image-text Matching Use knowledge distillation method to compress pre-trained models in Image-Text matching tasks.Design lightweight models and use knowledge distillation methods to achieve better results for previously ineffective models after training.
%U http://dblp.uni-trier.de/db/conf/icaice/icaice2023.html#LiuYK23
@inproceedings{conf/icaice/LiuYK23,
added-at = {2024-05-31T00:00:00.000+0200},
author = {Liu, Hai and Yao, Xingxing and Kong, Xiangyu},
biburl = {https://www.bibsonomy.org/bibtex/2218f83e0fd05b72ef117d2b92701c69d/dblp},
booktitle = {ICAICE},
crossref = {conf/icaice/2023},
ee = {https://doi.org/10.1145/3652628.3652707},
interhash = {a6b5facdbae166549822dc7eda66ce54},
intrahash = {218f83e0fd05b72ef117d2b92701c69d},
keywords = {dblp},
pages = {476-481},
publisher = {ACM},
timestamp = {2024-06-03T07:19:26.000+0200},
title = {Training Model by Knowledge Distillation for Image-text Matching Use knowledge distillation method to compress pre-trained models in Image-Text matching tasks.Design lightweight models and use knowledge distillation methods to achieve better results for previously ineffective models after training.},
url = {http://dblp.uni-trier.de/db/conf/icaice/icaice2023.html#LiuYK23},
year = 2023
}