The translation equivariance of convolutional layers enables convolutional
neural networks to generalize well on image problems. While translation
equivariance provides a powerful inductive bias for images, we often
additionally desire equivariance to other transformations, such as rotations,
especially for non-image data. We propose a general method to construct a
convolutional layer that is equivariant to transformations from any specified
Lie group with a surjective exponential map. Incorporating equivariance to a
new group requires implementing only the group exponential and logarithm maps,
enabling rapid prototyping. Showcasing the simplicity and generality of our
method, we apply the same model architecture to images, ball-and-stick
molecular data, and Hamiltonian dynamical systems. For Hamiltonian systems, the
equivariance of our models is especially impactful, leading to exact
conservation of linear and angular momentum.
Description
[2002.12880] Generalizing Convolutional Neural Networks for Equivariance to Lie Groups on Arbitrary Continuous Data
%0 Journal Article
%1 finzi2020generalizing
%A Finzi, Marc
%A Stanton, Samuel
%A Izmailov, Pavel
%A Wilson, Andrew Gordon
%D 2020
%K deep-learning equivariance readings
%T Generalizing Convolutional Neural Networks for Equivariance to Lie
Groups on Arbitrary Continuous Data
%U http://arxiv.org/abs/2002.12880
%X The translation equivariance of convolutional layers enables convolutional
neural networks to generalize well on image problems. While translation
equivariance provides a powerful inductive bias for images, we often
additionally desire equivariance to other transformations, such as rotations,
especially for non-image data. We propose a general method to construct a
convolutional layer that is equivariant to transformations from any specified
Lie group with a surjective exponential map. Incorporating equivariance to a
new group requires implementing only the group exponential and logarithm maps,
enabling rapid prototyping. Showcasing the simplicity and generality of our
method, we apply the same model architecture to images, ball-and-stick
molecular data, and Hamiltonian dynamical systems. For Hamiltonian systems, the
equivariance of our models is especially impactful, leading to exact
conservation of linear and angular momentum.
@article{finzi2020generalizing,
abstract = {The translation equivariance of convolutional layers enables convolutional
neural networks to generalize well on image problems. While translation
equivariance provides a powerful inductive bias for images, we often
additionally desire equivariance to other transformations, such as rotations,
especially for non-image data. We propose a general method to construct a
convolutional layer that is equivariant to transformations from any specified
Lie group with a surjective exponential map. Incorporating equivariance to a
new group requires implementing only the group exponential and logarithm maps,
enabling rapid prototyping. Showcasing the simplicity and generality of our
method, we apply the same model architecture to images, ball-and-stick
molecular data, and Hamiltonian dynamical systems. For Hamiltonian systems, the
equivariance of our models is especially impactful, leading to exact
conservation of linear and angular momentum.},
added-at = {2020-03-03T20:55:34.000+0100},
author = {Finzi, Marc and Stanton, Samuel and Izmailov, Pavel and Wilson, Andrew Gordon},
biburl = {https://www.bibsonomy.org/bibtex/2c921173ba103fa7643a30beb95b8a3dc/kirk86},
description = {[2002.12880] Generalizing Convolutional Neural Networks for Equivariance to Lie Groups on Arbitrary Continuous Data},
interhash = {edfef2e155cfa4f14c14e1bb1c7acaf9},
intrahash = {c921173ba103fa7643a30beb95b8a3dc},
keywords = {deep-learning equivariance readings},
note = {cite arxiv:2002.12880},
timestamp = {2020-03-03T20:55:34.000+0100},
title = {Generalizing Convolutional Neural Networks for Equivariance to Lie
Groups on Arbitrary Continuous Data},
url = {http://arxiv.org/abs/2002.12880},
year = 2020
}