To learn a new visual category from few examples, prior knowledge from unlabeled data as well as previous related categories may be useful. We develop a new method for transfer learning which exploits available unlabeled data and an arbitrary kernel function; we form a representation based on kernel distances to a large set of unlabeled data points. To transfer knowledge from previous related problems we observe that a category might be learnable using only a small subset of reference prototypes. Related problems may share a significant number of relevant prototypes; we find such a concise representation by performing a joint loss minimization over the training sets of related problems with a shared regularization penalty that minimizes the total number of prototypes involved in the approximation. This optimization problem can be formulated as a linear program that can be solved efficiently. We conduct experiments on a news-topic prediction task where the goal is to predict whether an image belongs to a particular news topic. Our results show that when only few examples are available for training a target topic, leveraging knowledge learnt from other topics can significantly improve performance.
%0 Conference Paper
%1 Quattoni2008Transfer
%A Quattoni, Ariadna
%A Collins, Michael
%A Darrell, Trevor
%B Computer Vision and Pattern Recognition, 2008. CVPR 2008. IEEE Conference on
%D 2008
%K active image labeling learning phd unread
%P 1-8
%R 10.1109/CVPR.2008.4587637
%T Transfer learning for image classification with sparse prototype representations
%X To learn a new visual category from few examples, prior knowledge from unlabeled data as well as previous related categories may be useful. We develop a new method for transfer learning which exploits available unlabeled data and an arbitrary kernel function; we form a representation based on kernel distances to a large set of unlabeled data points. To transfer knowledge from previous related problems we observe that a category might be learnable using only a small subset of reference prototypes. Related problems may share a significant number of relevant prototypes; we find such a concise representation by performing a joint loss minimization over the training sets of related problems with a shared regularization penalty that minimizes the total number of prototypes involved in the approximation. This optimization problem can be formulated as a linear program that can be solved efficiently. We conduct experiments on a news-topic prediction task where the goal is to predict whether an image belongs to a particular news topic. Our results show that when only few examples are available for training a target topic, leveraging knowledge learnt from other topics can significantly improve performance.
@inproceedings{Quattoni2008Transfer,
abstract = {To learn a new visual category from few examples, prior knowledge from unlabeled data as well as previous related categories may be useful. We develop a new method for transfer learning which exploits available unlabeled data and an arbitrary kernel function; we form a representation based on kernel distances to a large set of unlabeled data points. To transfer knowledge from previous related problems we observe that a category might be learnable using only a small subset of reference prototypes. Related problems may share a significant number of relevant prototypes; we find such a concise representation by performing a joint loss minimization over the training sets of related problems with a shared regularization penalty that minimizes the total number of prototypes involved in the approximation. This optimization problem can be formulated as a linear program that can be solved efficiently. We conduct experiments on a news-topic prediction task where the goal is to predict whether an image belongs to a particular news topic. Our results show that when only few examples are available for training a target topic, leveraging knowledge learnt from other topics can significantly improve performance.},
added-at = {2009-07-13T09:30:06.000+0200},
author = {Quattoni, Ariadna and Collins, Michael and Darrell, Trevor},
biburl = {https://www.bibsonomy.org/bibtex/234a82960b4389690f692326f80e2689f/casi},
booktitle = {Computer Vision and Pattern Recognition, 2008. CVPR 2008. IEEE Conference on},
description = {Training with unlabelled data.},
doi = {10.1109/CVPR.2008.4587637},
interhash = {76a3a4e933d1e585d03b717b8e2acae1},
intrahash = {34a82960b4389690f692326f80e2689f},
issn = {1063-6919},
keywords = {active image labeling learning phd unread},
month = {June},
pages = {1-8},
timestamp = {2010-01-29T16:03:41.000+0100},
title = {Transfer learning for image classification with sparse prototype representations},
year = 2008
}