Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Paper: PyDDA: A New Pythonic Multiple Doppler Retrieval Package #474

Merged
merged 20 commits into from Jul 3, 2019
Merged
Changes from 17 commits
Commits
File filter...
Filter file types
Jump to…
Jump to file or symbol
Failed to load files and symbols.

Always

Just for now

@@ -4,4 +4,5 @@ output
_build
*.pyc
*.swp
*~
*
.idea~
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,202 @@
@article{HelmusandCollis2016,
title = {The {Python} {ARM} {Radar} {Toolkit} ({Py}-{ART}), a {Library} for {Working} with {Weather} {Radar} {Data} in the {Python} {Programming} {Language}},
volume = {4},
copyright = {},
issn = {2049-9647},
url = {http://openresearchsoftware.metajnl.com/articles/10.5334/jors.119/},
doi = {10.5334/jors.119},
abstract = {},
language = {en},
number = {1},
urldate = {2017-10-04},
journal = {Journal of Open Research Software},
author = {Helmus, Jonathan and Collis, Scott},
month = jul,
year = {2016},
keywords = {radar, Doppler velocity, Python, weather, weather radar},
file = {Full Text PDF:/Users/scollis/Zotero/storage/92WV489Y/Helmus and Collis - 2016 - The Python ARM Radar Toolkit (Py-ART), a Library f.pdf:application/pdf;Snapshot:/Users/scollis/Zotero/storage/QT6LDZUE/jors.html:text/html}
}

@Manual{Dask2016,
title = {Dask: Library for dynamic task scheduling},
author = {{Dask Development Team}},
year = {2016},
url = {http://dask.pydata.org},
}

@techreport{CEDRIC,
author = {L. Jay Miller and
Sherrie M. Fredrick},
title = {Custom Editing and Display of Reduced Information in
Cartesian space (CEDRIC) manual},
institution = {National Center for Atmospheric Research, Mesoscale and Microscale Meteorology Division},
year = {1998},
address= {Boulder, CO.},
pages = {1-130},
}

@article{NumPy,
author={S. van der Walt and S. C. Colbert and G. Varoquaux},
journal={Computing in Science Engineering},
title={The NumPy Array: A Structure for Efficient Numerical Computation},
year={2011},
volume={13},
number={2},
pages={22-30},
doi={10.1109/MCSE.2011.37},
ISSN={1521-9615},
month={March},
}

@manual{Cartopy,
author = {Met Office},
title = {Cartopy: a cartographic python library with a matplotlib interface},
year = {2010 - 2015},
address = {Exeter, Devon},
url = {http://scitools.org.uk/cartopy},
}

@Misc{SciPy,
author = {Eric Jones and Travis Oliphant and Pearu Peterson and others},
title = {SciPy: Open source scientific tools for Python},
year = {2001},
url = {"http://www.scipy.org/"},
note = {[Online; accessed <today>]},
}

@article{Byrdetal1995,
author = {Byrd, Richard H. and Lu, Peihuang and Nocedal, Jorge and Zhu, Ciyou},
title = {A Limited Memory Algorithm for Bound Constrained Optimization},
journal = {SIAM J. Sci. Comput.},
issue_date = {Sept. 1995},
volume = {16},
number = {5},
month = sep,
year = {1995},
issn = {1064-8275},
pages = {1190--1208},
numpages = {19},
url = {http://dx.doi.org/10.1137/0916069},
doi = {10.1137/0916069},
acmid = {210980},
publisher = {Society for Industrial and Applied Mathematics},
address = {Philadelphia, PA, USA},
keywords = {bound constrained optimization, large-scale optimization,
limited memory method, nonlinear optimization, quasi-Newton method},
}


@misc{Langetal2017,
author = {Timothy Lang and
Mario Souto and
Shahin Khobahi and
Bobby Jackson},
title = {nasa/MultiDop: MultiDop v0.3},
month = oct,
year = 2017,
doi = {10.5281/zenodo.1035904},
url = {https://doi.org/10.5281/zenodo.1035904}
}

@article{Shapiroetal2009,
author = {Shapiro, Alan and Potvin, Corey K. and Gao, Jidong},
title = {Use of a Vertical Vorticity Equation in Variational Dual-Doppler Wind Analysis},
journal = {Journal of Atmospheric and Oceanic Technology},
volume = {26},
number = {10},
pages = {2089-2106},
year = {2009},
doi = {10.1175/2009JTECHA1256.1},

URL = {
https://doi.org/10.1175/2009JTECHA1256.1

},
eprint = {
https://doi.org/10.1175/2009JTECHA1256.1

}
,
abstract = { Abstract The utility of the anelastic vertical vorticity equation in a weak-constraint
(least squares error) variational dual-Doppler wind analysis procedure is explored.
The analysis winds are obtained by minimizing a cost function accounting for the discrepancies between
observed and analyzed radial winds, errors in the mass conservation equation, errors in the anelastic
vertical vorticity equation, and spatial smoothness constraints. By using Taylor’s frozen-turbulence
hypothesis to shift analysis winds to observation points, discrepancies between radially projected
analysis winds and radial wind observations can be calculated at the actual times and locations
the data are acquired. The frozen-turbulence hypothesis is also used to evaluate the local derivative
term in the vorticity equation. Tests of the analysis procedure are performed with analytical
pseudo-observations of an array of translating and temporally decaying counterrotating updrafts and
downdrafts generated from a Beltrami flow solution of the Navier–Stokes equations. The experiments
explore the value added to the analysis by the vorticity equation constraint in the common scenario
of substantial missing low-level data (radial wind observations at heights beneath 1.5 km are withheld
from the analysis). Experiments focus on the sensitivity of the most sensitive analysis variable—the vertical
velocity component—to values of the weighting coefficients, volume scan period, number of volume scans, and
errors in the estimated frozen-turbulence pattern-translation components. Although the vorticity equation
constraint is found to add value to many of these analyses, the analysis can become significantly degraded
if estimates of the pattern-translation components are largely in error or if the frozen-turbulence hypothesis
itself breaks down. However, tests also suggest that these negative impacts can be mitigated if data are
available in a rapid-scan mode. }
}

@article{Blaylocketal2017,
title = "Cloud archiving and data mining of High-Resolution Rapid Refresh forecast model output",
journal = "Computers and Geosciences",
volume = "109",
pages = "43 - 50",
year = "2017",
issn = "0098-3004",
doi = "https://doi.org/10.1016/j.cageo.2017.08.005",
url = "http://www.sciencedirect.com/science/article/pii/S0098300417305083",
author = "Brian K. Blaylock and John D. Horel and Samuel T. Liston",
keywords = "Object data storage, Data stewardship, Atmospheric modeling, Cloud computing",
abstract = {"Weather-related research often requires synthesizing vast amounts of data that need archival solutions that
are both economical and viable during and past the lifetime of the project. Public cloud computing services (e.g.,
from Amazon, Microsoft, or Google) or private clouds managed by research institutions are providing object data
storage systems potentially appropriate for long-term archives of such large geophysical data sets. We illustrate
the use of a private cloud object store developed by the Center for High Performance Computing (CHPC) at the
University of Utah. Since early 2015, we have been archiving thousands of two-dimensional gridded fields
(each one containing over 1.9 million values over the contiguous United States) from the High-Resolution
Rapid Refresh (HRRR) data assimilation and forecast modeling system. The archive is being used for retrospective
analyses of meteorological conditions during high-impact weather events, assessing the accuracy of the HRRR
forecasts, and providing initial and boundary conditions for research simulations. The archive is accessible
interactively and through automated download procedures for researchers at other institutions that can be
tailored by the user to extract individual two-dimensional grids from within the highly compressed files.
Characteristics of the CHPC object storage system are summarized relative to network file system storage or tape
storage solutions. The CHPC storage system is proving to be a scalable, reliable, extensible, affordable, and usable
archive solution for our research."}
}

@article{Potvinetal2012,
author = {Potvin, Corey K. and Shapiro, Alan and Xue, Ming},
title = {Impact of a Vertical Vorticity Constraint in Variational Dual-Doppler
Wind Analysis: Tests with Real and Simulated Supercell Data},
journal = {Journal of Atmospheric and Oceanic Technology},
volume = {29},
number = {1},
pages = {32-49},
year = {2012},
doi = {10.1175/JTECH-D-11-00019.1},

URL = {
https://doi.org/10.1175/JTECH-D-11-00019.1

},
eprint = {
https://doi.org/10.1175/JTECH-D-11-00019.1

}
,
abstract = { AbstractOne of the greatest challenges to dual-Doppler retrieval of the vertical wind is the lack of
low-level divergence information available to the mass conservation constraint. This study examines the impact of
a vertical vorticity equation constraint on vertical velocity retrievals when radar observations are lacking near
the ground. The analysis proceeds in a three-dimensional variational data assimilation (3DVAR) framework with the
anelastic form of the vertical vorticity equation imposed along with traditional data, mass conservation, and
smoothness constraints. The technique is tested using emulated radial wind observations of a supercell storm
simulated by the Advanced Regional Prediction System (ARPS), as well as real dual-Doppler observations of a
supercell storm that occurred in Oklahoma on 8 May 2003. Special attention is given to procedures to evaluate
the vorticity tendency term, including spatially variable advection correction and estimation of the intrinsic
evolution. Volume scan times ranging from 5 min, typical of operational radar networks, down to 30 s, achievable
by rapid-scan mobile radars, are considered. The vorticity constraint substantially improves the vertical velocity
retrievals in our experiments, particularly for volume scan times smaller than 2 min. }
}
ProTip! Use n and p to navigate between commits in a pull request.
You can’t perform that action at this time.