This work presents a new strategy for multi-class classification that requires no class-specific labels, but instead leverages pairwise similarity between examples, which is a weaker form of annotation. The proposed method, meta classification learning, optimizes a binary classifier for pairwise similarity prediction and through this process learns a multi-class classifier as a submodule. We formulate this approach, present a probabilistic graphical model for it, and derive a surprisingly simple loss function that can be used to learn neural network-based models. We then demonstrate that this same framework generalizes to the supervised, unsupervised cross-task, and semi-supervised settings. Our method is evaluated against state of the art in all three learning paradigms and shows a superior or comparable accuracy, providing evidence that learning multi-class classification without multi-class labels is a viable learning option.
%0 Conference Paper
%1 conf/iclr/HsuLSOK19
%A Hsu, Yen-Chang
%A Lv, Zhaoyang
%A Schlosser, Joel
%A Odom, Phillip
%A Kira, Zsolt
%B ICLR (Poster)
%D 2019
%I OpenReview.net
%K clustering pairwise-constraints semi-supervised
%T Multi-class classification without multi-class labels.
%U https://arxiv.org/pdf/1901.00544
%X This work presents a new strategy for multi-class classification that requires no class-specific labels, but instead leverages pairwise similarity between examples, which is a weaker form of annotation. The proposed method, meta classification learning, optimizes a binary classifier for pairwise similarity prediction and through this process learns a multi-class classifier as a submodule. We formulate this approach, present a probabilistic graphical model for it, and derive a surprisingly simple loss function that can be used to learn neural network-based models. We then demonstrate that this same framework generalizes to the supervised, unsupervised cross-task, and semi-supervised settings. Our method is evaluated against state of the art in all three learning paradigms and shows a superior or comparable accuracy, providing evidence that learning multi-class classification without multi-class labels is a viable learning option.
@inproceedings{conf/iclr/HsuLSOK19,
abstract = { This work presents a new strategy for multi-class classification that requires no class-specific labels, but instead leverages pairwise similarity between examples, which is a weaker form of annotation. The proposed method, meta classification learning, optimizes a binary classifier for pairwise similarity prediction and through this process learns a multi-class classifier as a submodule. We formulate this approach, present a probabilistic graphical model for it, and derive a surprisingly simple loss function that can be used to learn neural network-based models. We then demonstrate that this same framework generalizes to the supervised, unsupervised cross-task, and semi-supervised settings. Our method is evaluated against state of the art in all three learning paradigms and shows a superior or comparable accuracy, providing evidence that learning multi-class classification without multi-class labels is a viable learning option. },
added-at = {2020-04-15T13:11:12.000+0200},
author = {Hsu, Yen-Chang and Lv, Zhaoyang and Schlosser, Joel and Odom, Phillip and Kira, Zsolt},
biburl = {https://www.bibsonomy.org/bibtex/200acebf2afd4ef2421f05d619ab609b6/ghagerer},
booktitle = {ICLR (Poster)},
crossref = {conf/iclr/2019},
ee = {https://openreview.net/forum?id=SJzR2iRcK7},
interhash = {d15710a6859ba2a191acb140ca72c416},
intrahash = {00acebf2afd4ef2421f05d619ab609b6},
keywords = {clustering pairwise-constraints semi-supervised},
publisher = {OpenReview.net},
timestamp = {2020-10-20T17:32:35.000+0200},
title = {Multi-class classification without multi-class labels.},
url = {https://arxiv.org/pdf/1901.00544},
year = 2019
}