Learning and analyzing 3D point clouds with deep networks is challenging due
to the sparseness and irregularity of the data. In this paper, we present a
data-driven point cloud upsampling technique. The key idea is to learn
multi-level features per point and expand the point set via a multi-branch
convolution unit implicitly in feature space. The expanded feature is then
split to a multitude of features, which are then reconstructed to an upsampled
point set. Our network is applied at a patch-level, with a joint loss function
that encourages the upsampled points to remain on the underlying surface with a
uniform distribution. We conduct various experiments using synthesis and scan
data to evaluate our method and demonstrate its superiority over some baseline
methods and an optimization-based method. Results show that our upsampled
points have better uniformity and are located closer to the underlying
surfaces.
Description
[1801.06761] PU-Net: Point Cloud Upsampling Network
%0 Generic
%1 yu2018punet
%A Yu, Lequan
%A Li, Xianzhi
%A Fu, Chi-Wing
%A Cohen-Or, Daniel
%A Heng, Pheng-Ann
%D 2018
%K 2018 arxiv china cvpr geometry graphics israel point-cloud sampling
%T PU-Net: Point Cloud Upsampling Network
%U http://arxiv.org/abs/1801.06761
%X Learning and analyzing 3D point clouds with deep networks is challenging due
to the sparseness and irregularity of the data. In this paper, we present a
data-driven point cloud upsampling technique. The key idea is to learn
multi-level features per point and expand the point set via a multi-branch
convolution unit implicitly in feature space. The expanded feature is then
split to a multitude of features, which are then reconstructed to an upsampled
point set. Our network is applied at a patch-level, with a joint loss function
that encourages the upsampled points to remain on the underlying surface with a
uniform distribution. We conduct various experiments using synthesis and scan
data to evaluate our method and demonstrate its superiority over some baseline
methods and an optimization-based method. Results show that our upsampled
points have better uniformity and are located closer to the underlying
surfaces.
@misc{yu2018punet,
abstract = {Learning and analyzing 3D point clouds with deep networks is challenging due
to the sparseness and irregularity of the data. In this paper, we present a
data-driven point cloud upsampling technique. The key idea is to learn
multi-level features per point and expand the point set via a multi-branch
convolution unit implicitly in feature space. The expanded feature is then
split to a multitude of features, which are then reconstructed to an upsampled
point set. Our network is applied at a patch-level, with a joint loss function
that encourages the upsampled points to remain on the underlying surface with a
uniform distribution. We conduct various experiments using synthesis and scan
data to evaluate our method and demonstrate its superiority over some baseline
methods and an optimization-based method. Results show that our upsampled
points have better uniformity and are located closer to the underlying
surfaces.},
added-at = {2018-06-10T20:16:22.000+0200},
author = {Yu, Lequan and Li, Xianzhi and Fu, Chi-Wing and Cohen-Or, Daniel and Heng, Pheng-Ann},
biburl = {https://www.bibsonomy.org/bibtex/21ac7cebfb729323901f2054197ce96cd/achakraborty},
description = {[1801.06761] PU-Net: Point Cloud Upsampling Network},
interhash = {44fdc9e66e1f95eb0e11c8dfa10b1875},
intrahash = {1ac7cebfb729323901f2054197ce96cd},
keywords = {2018 arxiv china cvpr geometry graphics israel point-cloud sampling},
note = {cite arxiv:1801.06761Comment: accepted by CVPR2018},
timestamp = {2018-06-10T20:16:22.000+0200},
title = {PU-Net: Point Cloud Upsampling Network},
url = {http://arxiv.org/abs/1801.06761},
year = 2018
}