Can we detect common objects in a variety of image domains without
instance-level annotations? In this paper, we present a framework for a novel
task, cross-domain weakly supervised object detection, which addresses this
question. For this paper, we have access to images with instance-level
annotations in a source domain (e.g., natural image) and images with
image-level annotations in a target domain (e.g., watercolor). In addition, the
classes to be detected in the target domain are all or a subset of those in the
source domain. Starting from a fully supervised object detector, which is
pre-trained on the source domain, we propose a two-step progressive domain
adaptation technique by fine-tuning the detector on two types of artificially
and automatically generated samples. We test our methods on our newly collected
datasets containing three image domains, and achieve an improvement of
approximately 5 to 20 percentage points in terms of mean average precision
(mAP) compared to the best-performing baselines.
%0 Generic
%1 citeulike:14581109
%A xxx,
%D 2018
%K augmentation detection domain\_adapt semisup
%T Cross-Domain Weakly-Supervised Object Detection through Progressive Domain Adaptation
%U http://arxiv.org/abs/1803.11365
%X Can we detect common objects in a variety of image domains without
instance-level annotations? In this paper, we present a framework for a novel
task, cross-domain weakly supervised object detection, which addresses this
question. For this paper, we have access to images with instance-level
annotations in a source domain (e.g., natural image) and images with
image-level annotations in a target domain (e.g., watercolor). In addition, the
classes to be detected in the target domain are all or a subset of those in the
source domain. Starting from a fully supervised object detector, which is
pre-trained on the source domain, we propose a two-step progressive domain
adaptation technique by fine-tuning the detector on two types of artificially
and automatically generated samples. We test our methods on our newly collected
datasets containing three image domains, and achieve an improvement of
approximately 5 to 20 percentage points in terms of mean average precision
(mAP) compared to the best-performing baselines.
@misc{citeulike:14581109,
abstract = {{Can we detect common objects in a variety of image domains without
instance-level annotations? In this paper, we present a framework for a novel
task, cross-domain weakly supervised object detection, which addresses this
question. For this paper, we have access to images with instance-level
annotations in a source domain (e.g., natural image) and images with
image-level annotations in a target domain (e.g., watercolor). In addition, the
classes to be detected in the target domain are all or a subset of those in the
source domain. Starting from a fully supervised object detector, which is
pre-trained on the source domain, we propose a two-step progressive domain
adaptation technique by fine-tuning the detector on two types of artificially
and automatically generated samples. We test our methods on our newly collected
datasets containing three image domains, and achieve an improvement of
approximately 5 to 20 percentage points in terms of mean average precision
(mAP) compared to the best-performing baselines.}},
added-at = {2019-02-27T22:23:29.000+0100},
archiveprefix = {arXiv},
author = {xxx},
biburl = {https://www.bibsonomy.org/bibtex/22dc88c1bd2e632b90c8c1dd3ea3f77f4/nmatsuk},
citeulike-article-id = {14581109},
citeulike-linkout-0 = {http://arxiv.org/abs/1803.11365},
citeulike-linkout-1 = {http://arxiv.org/pdf/1803.11365},
day = 30,
eprint = {1803.11365},
interhash = {f387d50007ee8e2943e83905bba1464b},
intrahash = {2dc88c1bd2e632b90c8c1dd3ea3f77f4},
keywords = {augmentation detection domain\_adapt semisup},
month = mar,
posted-at = {2018-05-05 16:19:40},
priority = {0},
timestamp = {2019-02-27T22:23:29.000+0100},
title = {{Cross-Domain Weakly-Supervised Object Detection through Progressive Domain Adaptation}},
url = {http://arxiv.org/abs/1803.11365},
year = 2018
}