We develop a framework for learning sparse nonparametric directed acyclic
graphs (DAGs) from data. Our approach is based on a recent algebraic
characterization of DAGs that led to a fully continuous program for score-based
learning of DAG models parametrized by a linear structural equation model
(SEM). We extend this algebraic characterization to nonparametric SEM by
leveraging nonparametric sparsity based on partial derivatives, resulting in a
continuous optimization problem that can be applied to a variety of
nonparametric and semiparametric models including GLMs, additive noise models,
and index models as special cases. Unlike existing approaches that require
specific modeling choices, loss functions, or algorithms, we present a
completely general framework that can be applied to general nonlinear models
(e.g. without additive noise), general differentiable loss functions, and
generic black-box optimization routines. The code is available at
https://github.com/xunzheng/notears.
%0 Journal Article
%1 zheng2019learning
%A Zheng, Xun
%A Dan, Chen
%A Aragam, Bryon
%A Ravikumar, Pradeep
%A Xing, Eric P.
%D 2019
%K causal-analysis graphs sparsity
%T Learning Sparse Nonparametric DAGs
%U http://arxiv.org/abs/1909.13189
%X We develop a framework for learning sparse nonparametric directed acyclic
graphs (DAGs) from data. Our approach is based on a recent algebraic
characterization of DAGs that led to a fully continuous program for score-based
learning of DAG models parametrized by a linear structural equation model
(SEM). We extend this algebraic characterization to nonparametric SEM by
leveraging nonparametric sparsity based on partial derivatives, resulting in a
continuous optimization problem that can be applied to a variety of
nonparametric and semiparametric models including GLMs, additive noise models,
and index models as special cases. Unlike existing approaches that require
specific modeling choices, loss functions, or algorithms, we present a
completely general framework that can be applied to general nonlinear models
(e.g. without additive noise), general differentiable loss functions, and
generic black-box optimization routines. The code is available at
https://github.com/xunzheng/notears.
@article{zheng2019learning,
abstract = {We develop a framework for learning sparse nonparametric directed acyclic
graphs (DAGs) from data. Our approach is based on a recent algebraic
characterization of DAGs that led to a fully continuous program for score-based
learning of DAG models parametrized by a linear structural equation model
(SEM). We extend this algebraic characterization to nonparametric SEM by
leveraging nonparametric sparsity based on partial derivatives, resulting in a
continuous optimization problem that can be applied to a variety of
nonparametric and semiparametric models including GLMs, additive noise models,
and index models as special cases. Unlike existing approaches that require
specific modeling choices, loss functions, or algorithms, we present a
completely general framework that can be applied to general nonlinear models
(e.g. without additive noise), general differentiable loss functions, and
generic black-box optimization routines. The code is available at
https://github.com/xunzheng/notears.},
added-at = {2020-05-03T11:17:18.000+0200},
author = {Zheng, Xun and Dan, Chen and Aragam, Bryon and Ravikumar, Pradeep and Xing, Eric P.},
biburl = {https://www.bibsonomy.org/bibtex/26063b5c46c6eb4b64ce4fcd03478c337/kirk86},
description = {[1909.13189] Learning Sparse Nonparametric DAGs},
interhash = {cb1dee49c03e99849a24a89e99bd6453},
intrahash = {6063b5c46c6eb4b64ce4fcd03478c337},
keywords = {causal-analysis graphs sparsity},
note = {cite arxiv:1909.13189Comment: To appear in AISTATS 2020},
timestamp = {2020-05-03T11:17:18.000+0200},
title = {Learning Sparse Nonparametric DAGs},
url = {http://arxiv.org/abs/1909.13189},
year = 2019
}