Abstract General circulation models (GCMs) typically have a grid size of 25–200 km. Parametrizations are used to represent diabatic processes such as radiative transfer and cloud microphysics and account for subgrid-scale motions and variability. Unlike traditional approaches, neural networks (NNs) can readily exploit recent observational data sets and global cloud-system resolving model (CRM) simulations to learn subgrid variability. This article describes an NN parametrization trained by coarse-graining a near-global CRM simulation with a 4-km horizontal grid spacing. The NN predicts the residual heating and moistening averaged over (160 km)2 grid boxes as a function of the coarse-resolution fields within the same atmospheric column. This NN is coupled to the dynamical core of a GCM with the same 160-km resolution. A recent study described how to train such an NN to be stable when coupled to specified time-evolving advective forcings in a single-column model, but feedbacks between NN and GCM components cause spatially extended simulations to crash within a few days. Analyzing the linearized response of such an NN reveals that it learns to exploit a strong synchrony between precipitation and the atmospheric state above 10 km. Removing these variables from the NN's inputs stabilizes the coupled simulations, which predict the future state more accurately than a coarse-resolution simulation without any parametrizations of subgrid-scale variability, although the mean state slowly drifts.
%0 Journal Article
%1 https://doi.org/10.1029/2019MS001711
%A Brenowitz, Noah D.
%A Bretherton, Christopher S.
%D 2019
%J Journal of Advances in Modeling Earth Systems
%K climate cloud-systemgloballearning machinemodel parameterizationresolving
%N 8
%P 2728-2744
%R https://doi.org/10.1029/2019MS001711
%T Spatially Extended Tests of a Neural Network Parametrization Trained by Coarse-Graining
%U https://agupubs.onlinelibrary.wiley.com/doi/abs/10.1029/2019MS001711
%V 11
%X Abstract General circulation models (GCMs) typically have a grid size of 25–200 km. Parametrizations are used to represent diabatic processes such as radiative transfer and cloud microphysics and account for subgrid-scale motions and variability. Unlike traditional approaches, neural networks (NNs) can readily exploit recent observational data sets and global cloud-system resolving model (CRM) simulations to learn subgrid variability. This article describes an NN parametrization trained by coarse-graining a near-global CRM simulation with a 4-km horizontal grid spacing. The NN predicts the residual heating and moistening averaged over (160 km)2 grid boxes as a function of the coarse-resolution fields within the same atmospheric column. This NN is coupled to the dynamical core of a GCM with the same 160-km resolution. A recent study described how to train such an NN to be stable when coupled to specified time-evolving advective forcings in a single-column model, but feedbacks between NN and GCM components cause spatially extended simulations to crash within a few days. Analyzing the linearized response of such an NN reveals that it learns to exploit a strong synchrony between precipitation and the atmospheric state above 10 km. Removing these variables from the NN's inputs stabilizes the coupled simulations, which predict the future state more accurately than a coarse-resolution simulation without any parametrizations of subgrid-scale variability, although the mean state slowly drifts.
@article{https://doi.org/10.1029/2019MS001711,
abstract = {Abstract General circulation models (GCMs) typically have a grid size of 25–200 km. Parametrizations are used to represent diabatic processes such as radiative transfer and cloud microphysics and account for subgrid-scale motions and variability. Unlike traditional approaches, neural networks (NNs) can readily exploit recent observational data sets and global cloud-system resolving model (CRM) simulations to learn subgrid variability. This article describes an NN parametrization trained by coarse-graining a near-global CRM simulation with a 4-km horizontal grid spacing. The NN predicts the residual heating and moistening averaged over (160 km)2 grid boxes as a function of the coarse-resolution fields within the same atmospheric column. This NN is coupled to the dynamical core of a GCM with the same 160-km resolution. A recent study described how to train such an NN to be stable when coupled to specified time-evolving advective forcings in a single-column model, but feedbacks between NN and GCM components cause spatially extended simulations to crash within a few days. Analyzing the linearized response of such an NN reveals that it learns to exploit a strong synchrony between precipitation and the atmospheric state above 10 km. Removing these variables from the NN's inputs stabilizes the coupled simulations, which predict the future state more accurately than a coarse-resolution simulation without any parametrizations of subgrid-scale variability, although the mean state slowly drifts.},
added-at = {2021-01-19T11:21:16.000+0100},
author = {Brenowitz, Noah D. and Bretherton, Christopher S.},
biburl = {https://www.bibsonomy.org/bibtex/269c6ad35db67a186bb57045d1472ed79/annakrause},
doi = {https://doi.org/10.1029/2019MS001711},
eprint = {https://agupubs.onlinelibrary.wiley.com/doi/pdf/10.1029/2019MS001711},
interhash = {a59ed4c115ec3d53980b8d297288ca19},
intrahash = {69c6ad35db67a186bb57045d1472ed79},
journal = {Journal of Advances in Modeling Earth Systems},
keywords = {climate cloud-systemgloballearning machinemodel parameterizationresolving},
number = 8,
pages = {2728-2744},
timestamp = {2021-01-19T11:21:16.000+0100},
title = {Spatially Extended Tests of a Neural Network Parametrization Trained by Coarse-Graining},
url = {https://agupubs.onlinelibrary.wiley.com/doi/abs/10.1029/2019MS001711},
volume = 11,
year = 2019
}