diff --git a/README.md b/README.md index 9ab459aea..287651ae4 100644 --- a/README.md +++ b/README.md @@ -18,7 +18,7 @@ This analysis repository presumes that the following python packages are availab * bottleneck * basemap * lxml - * nco >= 4.5.4 + * nco >= 4.6.8 * pyproj You can easily install them via the conda command: diff --git a/config.default b/config.default index 5005b627d..53a138699 100644 --- a/config.default +++ b/config.default @@ -41,13 +41,18 @@ parallelTaskCount = 1 # Prefix on the commnd line before a parallel task (e.g. 'srun -n 1 python') # Default is no prefix (run_analysis.py is executed directly) -commandPrefix = +commandPrefix = + +# the parallelism mode in ncclimo ("serial" or "bck") +# Set this to "bck" (background parallelism) if running on a machine that can +# handle 12 simultaneous processes, one for each monthly climatology. +ncclimoParallelMode = serial [input] ## options related to reading in the results to be analyzed # directory containing model results -baseDirectory = /dir/to/model/output +baseDirectory = /dir/for/model/output # Note: an absolute path can be supplied for any of these subdirectories. # A relative path is assumed to be relative to baseDirectory. @@ -86,22 +91,27 @@ autocloseFileLimitFraction = 0.5 # with a single time slice. maxChunkSize = 10000 +# Directory for mapping files (if they have been generated already). If mapping +# files needed by the analysis are not found here, they will be generated and +# placed in the output mappingSubdirectory +# mappingDirectory = /dir/for/mapping/files + [output] ## options related to writing out plots, intermediate cached data sets, logs, ## etc. # directory where analysis should be written # NOTE: This directory path must be specific to each test case. -baseDirectory = /dir/to/analysis/output +baseDirectory = /dir/for/analysis/output # subdirectories within baseDirectory for analysis output scratchSubdirectory = scratch plotsSubdirectory = plots logsSubdirectory = logs mpasClimatologySubdirectory = clim/mpas -mpasRegriddedClimSubdirectory = clim/mpas/regridded mappingSubdirectory = mapping timeSeriesSubdirectory = timeseries +timeCacheSubdirectory = timecache # a list of analyses to generate. Valid names are: # 'timeSeriesOHC', 'timeSeriesSST', 'climatologyMapSST', @@ -142,19 +152,13 @@ startYear = 11 # the last year over which to average climatalogies endYear = 20 -# The comparison grid resolution in degrees +# The comparison lat/lon grid resolution in degrees comparisonLatResolution = 0.5 comparisonLonResolution = 0.5 -# The names of the mapping file used for interpolation. If a mapping file has -# already been generated, supplying the absolute path can save the time of -# generating a new one. If nothing is supplied, the file name is automatically -# generated based on the MPAS mesh name, the comparison grid resolution, and -# the interpolation method -# mpasMappingFile = /path/to/mapping/file - -# overwrite files when building climatologies? -overwriteMpasClimatology = False +# The comparison Antarctic polar stereographic grid size and resolution in km +comparisonAntarcticStereoWidth = 6000. +comparisonAntarcticStereoResolution = 10. # interpolation order for model and observation results. Likely values are # 'bilinear', 'neareststod' (nearest neighbor) or 'conserve' @@ -203,6 +207,8 @@ sssSubdirectory = SSS mldSubdirectory = MLD ninoSubdirectory = Nino mhtSubdirectory = MHT +meltSubdirectory = Melt +soseSubdirectory = SOSE # first and last year of SST observational climatology (preferably one of the # two ranges given below) @@ -213,16 +219,6 @@ sstClimatologyEndYear = 1900 #sstClimatologyStartYear = 1990 #sstClimatologyEndYear = 2011 -# The name of mapping files used for interpolating observations to the -# comparison grid. Interpolation is only performed if the observation grid has -# a different resolution from the comparison grid. If nothing is supplied, the -# file name is automatically generated based on the MPAS mesh name, the -# comparison grid resolution, and the interpolation method -# sstClimatologyMappingFile = /path/to/mapping/file -# sssClimatologyMappingFile = /path/to/mapping/file -# mldClimatologyMappingFile = /path/to/mapping/file - - # interpolation order for observations. Likely values are # 'bilinear', 'neareststod' (nearest neighbor) or 'conserve' interpolationMethod = bilinear @@ -230,14 +226,11 @@ interpolationMethod = bilinear # The directories where observation climatologies will be stored if they need # to be computed. If a relative path is supplied, it is relative to the output # base directory. If an absolute path is supplied, this should point to -# cached climatology files on the desired comparison grid, in which case -# overwriteObsClimatology should be False. If cached regridded files are -# supplied, there is no need to provide cached files before regridding. +# cached climatology files on the desired comparison grid. If cached remapped +# files are supplied, there is no need to provide cached files before +# remapping. climatologySubdirectory = clim/obs -regriddedClimSubdirectory = clim/obs/regridded - -# overwrite files when building climatologies? -overwriteObsClimatology = False +remappedClimSubdirectory = clim/obs/remapped [oceanReference] ## options related to ocean reference run with which the results will be @@ -252,11 +245,11 @@ baseDirectory = /dir/to/ocean/reference # directory where ocean reference simulation results are stored baseDirectory = /dir/to/ocean/reference - + [seaIceObservations] ## options related to sea ice observations with which the results will be ## compared - + # directory where sea ice observations are stored baseDirectory = /dir/to/seaice/observations areaNH = IceArea_timeseries/iceAreaNH_climo.nc @@ -276,13 +269,6 @@ thicknessNH_FM = ICESat/ICESat_gridded_mean_thickness_NH_fm.interp0.5x0.5.nc thicknessSH_ON = ICESat/ICESat_gridded_mean_thickness_SH_on.interp0.5x0.5.nc thicknessSH_FM = ICESat/ICESat_gridded_mean_thickness_SH_fm.interp0.5x0.5.nc -# The name of mapping files used for interpolating observations to the -# comparison grid. Interpolation is only performed if the observation grid has -# a different resolution from the comparison grid. If nothing is supplied, the -# file name is automatically generated based on the MPAS mesh name, the -# comparison grid resolution, and the interpolation method -# seaIceClimatologyMappingFile = /path/to/mapping/file - # interpolation order for observations. Likely values are # 'bilinear', 'neareststod' (nearest neighbor) or 'conserve' interpolationMethod = bilinear @@ -290,14 +276,11 @@ interpolationMethod = bilinear # The directories where observation climatologies will be stored if they need # to be computed. If a relative path is supplied, it is relative to the output # base directory. If an absolute path is supplied, this should point to -# cached climatology files on the desired comparison grid, in which case -# overwriteObsClimatology should be False. If cached regridded files are -# supplied, there is no need to provide cached files before regridding. +# cached climatology files on the desired comparison grid. If cached remapped +# files are supplied, there is no need to provide cached files before +# remapping. climatologySubdirectory = clim/obs -regriddedClimSubdirectory = clim/obs/regridded - -# overwrite files when building climatologies? -overwriteObsClimatology = False +remappedClimSubdirectory = clim/obs/remapped [seaIceReference] ## options related to sea ice reference run with which the results will be @@ -388,9 +371,6 @@ compareWithObservations = True # Supported options are Atlantic and IndoPacific regionNames = ['Atlantic'] -# Mask file for post-processing regional MOC computation -regionMaskFiles = /path/to/MOCregional/mapping/file - # xarray (with dask) divides data sets into "chunks", allowing computations # to be made on data that is larger than the available memory. MPAS-Analysis # supports setting a maximum chunk size for data sets generally, and a @@ -399,7 +379,7 @@ regionMaskFiles = /path/to/MOCregional/mapping/file # is handled automatically. If the MOC calculation encounters memory problems, # consider setting maxChunkSize to a number significantly lower than nEdges # in your MPAS mesh so that the calculation will be divided into smaller -# pieces. +# pieces. # Note, need to use maxChunkSize for the 18to6 # maxChunkSize = 1000 @@ -450,7 +430,7 @@ titleFontSize = 18 polarPlot = False [climatologyMapSST] -## options related to plotting horizontally regridded climatologies of +## options related to plotting horizontally remapped climatologies of ## sea surface temperature (SST) against reference model results and ## observations @@ -470,10 +450,36 @@ colorbarLevelsDifference = [-5, -3, -2, -1, 0, 1, 2, 3, 5] # Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, # Nov, Dec, JFM, AMJ, JAS, OND, ANN) -comparisonTimes = ['JFM', 'JAS', 'ANN'] +seasons = ['JFM', 'JAS', 'ANN'] + +# comparison grid(s) ('lonlat', 'antarctic') on which to plot analysis +comparisonGrids = ['latlon'] + +[climatologyMapSSTAntarctic] +## options related to color maps for horizontally remapped climatologies of +## sea surface temperature (SST) against reference model results and +## observations on an Antarctic comparison grid + +# colormap for model/observations +colormapNameResult = RdBu_r +# the type of norm used in the colormap +normTypeResult = linear +# A dictionary with keywords for the SemiLogNorm +normArgsResult = {'vmin': -2.5, 'vmax': 2.5} +# place the ticks automatically by default +# colorbarTicksResult = numpy.linspace(-2.5, 2.5, 9) + +# colormap for differences +colormapNameDifference = RdBu_r +# the type of norm used in the colormap +normTypeDifference = linear +# A dictionary with keywords for the SemiLogNorm +normArgsDifference = {'vmin': -2., 'vmax': 2.} +# place the ticks automatically by default +# colorbarTicksDifference = numpy.linspace(-2., 2., 9) [climatologyMapSSS] -## options related to plotting horizontally regridded climatologies of +## options related to plotting horizontally remapped climatologies of ## sea surface salinity (SSS) against reference model results and observations # colormap for model/observations @@ -492,10 +498,36 @@ colorbarLevelsDifference = [-3, -2, -1, -0.5, 0, 0.5, 1, 2, 3] # Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, # Nov, Dec, JFM, AMJ, JAS, OND, ANN) -comparisonTimes = ['JFM', 'JAS', 'ANN'] +seasons = ['JFM', 'JAS', 'ANN'] + +# comparison grid(s) ('lonlat', 'antarctic') on which to plot analysis +comparisonGrids = ['latlon'] + +[climatologyMapSSSAntarctic] +## options related to color maps for horizontally remapped climatologies of +## sea surface salinity (SSS) against reference model results and observations +## on an Antarctic comparison grid + +# colormap for model/observations +colormapNameResult = BuOr +# the type of norm used in the colormap +normTypeResult = linear +# A dictionary with keywords for the SemiLogNorm +normArgsResult = {'vmin': 32., 'vmax': 35.} +# place the ticks automatically by default +# colorbarTicksResult = numpy.linspace(32., 35., 9) + +# colormap for differences +colormapNameDifference = RdBu_r +# the type of norm used in the colormap +normTypeDifference = linear +# A dictionary with keywords for the SemiLogNorm +normArgsDifference = {'vmin': -2, 'vmax': 2} +# place the ticks automatically by default +# colorbarTicksDifference = numpy.linspace(-2, 2, 9) [climatologyMapMLD] -## options related to plotting horizontally regridded climatologies of +## options related to plotting horizontally remapped climatologies of ## mixed layer depth (MLD) against reference model results and observations # colormap for model/observations @@ -514,55 +546,276 @@ colorbarLevelsDifference = [-150, -80, -30, -10, 0, 10, 30, 80, 150] # Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, # Nov, Dec, JFM, AMJ, JAS, OND, ANN) -comparisonTimes = ['JFM', 'JAS', 'ANN'] +seasons = ['JFM', 'JAS', 'ANN'] -[climatologyMapSeaIceConcThick] -## options related to plotting horizontally regridded climatologies of -## sea ice concentration and thickness against reference model results and -## observations +# comparison grid(s) ('lonlat', 'antarctic') on which to plot analysis +comparisonGrids = ['latlon'] + +[climatologyMapMLDAntarctic] +## options related to color maps for horizontally remapped climatologies of +## mixed layer depth (MLD) against reference model results and observations +## on an Antarctic comparison grid + +# colormap for model/observations +colormapNameResult = viridis +# color indices into colormapName for filled contours +colormapIndicesResult = [0, 40, 80, 110, 140, 170, 200, 230, 255] +# colormap levels/values for contour boundaries +colorbarLevelsResult = [0, 20, 40, 60, 80, 100, 150, 200, 400, 800] + +# colormap for differences +colormapNameDifference = RdBu_r +# color indices into colormapName for filled contours +colormapIndicesDifference = [0, 28, 57, 85, 113, 142, 170, 198, 227, 255] +# colormap levels/values for contour boundaries +colorbarLevelsDifference = [-150, -80, -30, -10, 0, 10, 30, 80, 150] + +[climatologyMapMeltAntarctic] +## options related to plotting horizontally regridded maps of Antarctic +## sub-ice-shelf melt rates against reference model results and observations + +# comparison grid(s) +# only the Antarctic really makes sense but lat-lon could technically work. +comparisonGrids = ['antarctic'] + +# Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, +# Nov, Dec, JFM, AMJ, JAS, OND, ANN) +seasons = ['JFM', 'JAS', 'ANN'] + +# colormap for model/observations +colormapNameResult = erdc_iceFire_H +# the type of norm used in the colormap +normTypeResult = symLog +# A dictionary with keywords for the SemiLogNorm +normArgsResult = {'linthresh': 1., 'linscale': 0.5, 'vmin': -100., + 'vmax': 100.} +colorbarTicksResult = [-100., -50., -20., -10., -5., -2., -1., 0., 1., 2., 5., + 10., 20., 50., 100.] + +# colormap for differences +colormapNameDifference = RdBu_r +# the type of norm used in the colormap +normTypeDifference = symLog +# A dictionary with keywords for the SemiLogNorm +normArgsDifference = {'linthresh': 1., 'linscale': 0.5, 'vmin': -100., + 'vmax': 100.} +colorbarTicksDifference = [-100., -50., -20., -10., -5., -2., -1., 0., 1., 2., + 5., 10., 20., 50., 100.] + +[timeSeriesAntarcticMelt] +## options related to plotting time series of melt below Antarctic ice shelves + +# list of ice shelves to plot or ['all'] for all 106 ice shelves and regions. +# See "regionNames" in the ice shelf masks file in regionMaskDirectory for +# details. +iceShelvesToPlot = ['Antarctica'] + +# Number of months over which to compute moving average +movingAverageMonths = 1 + +[climatologyMapSoseTemperature] +## options related to plotting climatology maps of Antarctic +## potential temperature at various levels, including the sea floor against +## reference model results and SOSE reanalysis data + +# comparison grid(s) +# only the Antarctic really makes sense but lat-lon could technically work. +comparisonGrids = ['antarctic'] + +# Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, +# Nov, Dec, JFM, AMJ, JAS, OND, ANN) +seasons = ['ANN'] + +# list of depths in meters (positive up) at which to analyze, 'bot' for the +# bottom of the ocean +# depths = [0, -200, -400, -600, -800, 'bot'] +depths = ['bot'] + +# colormap for model/observations +colormapNameResult = RdBu_r +# the type of norm used in the colormap +normTypeResult = linear +# A dictionary with keywords for the SemiLogNorm +normArgsResult = {'vmin': -2., 'vmax': 2.} +# place the ticks automatically by default +# colorbarTicksResult = numpy.linspace(-2., 2., 9) + +# colormap for differences +colormapNameDifference = RdBu_r +# the type of norm used in the colormap +normTypeDifference = linear +# A dictionary with keywords for the SemiLogNorm +normArgsDifference = {'vmin': -2., 'vmax': 2.} +# place the ticks automatically by default +# colorbarTicksDifference = numpy.linspace(-2., 2., 9) + +[climatologyMapSoseSalinity] +## options related to plotting climatology maps of Antarctic +## salinity at various levels, including the sea floor against +## reference model results and SOSE reanalysis data + +# comparison grid(s) +# only the Antarctic really makes sense but lat-lon could technically work. +comparisonGrids = ['antarctic'] + +# Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, +# Nov, Dec, JFM, AMJ, JAS, OND, ANN) +seasons = ['ANN'] + +# list of depths in meters (positive up) at which to analyze, 'bot' for the +# bottom of the ocean +# depths = [0, -200, -400, -600, -800, 'bot'] +depths = ['bot'] + +# colormap for model/observations +colormapNameResult = BuOr +# the type of norm used in the colormap +normTypeResult = linear +# A dictionary with keywords for the SemiLogNorm +normArgsResult = {'vmin': 34.2, 'vmax': 35.2} +# place the ticks automatically by default +# colorbarTicksResult = numpy.linspace(34.2, 35.2, 9) + +# colormap for differences +colormapNameDifference = RdBu_r +# the type of norm used in the colormap +normTypeDifference = linear +# A dictionary with keywords for the SemiLogNorm +normArgsDifference = {'vmin': -0.5, 'vmax': 0.5} +# place the ticks automatically by default +# colorbarTicksDifference = numpy.linspace(-0.5, 0.5, 9) + +[climatologyMapSeaIceConcNH] +## options related to plotting horizontally remapped climatologies of +## sea ice concentration against reference model results and observations +## in the northern hemisphere (NH) + +# colormap for model/observations +colormapNameResult = inferno +# color indices into colormapName for filled contours +colormapIndicesResult = [20, 80, 110, 140, 170, 200, 230, 255] +# colormap levels/values for contour boundaries +colorbarLevelsResult = [0.15, 0.3, 0.5, 0.7, 0.8, 0.85, 0.9, 0.95, 1] + +# colormap for differences +colormapNameDifference = RdBu_r +# color indices into colormapName for filled contours +colormapIndicesDifference = [0, 40, 80, 127, 127, 170, 210, 255] +# colormap levels/values for contour boundaries +colorbarLevelsDifference = [-0.8, -0.6, -0.4, -0.2, 0, 0.2, 0.4, 0.6, 0.8] + +# Times for comparison times (These should be left unchanged, since +# observations are only available for these seasons) +seasons = ['JFM', 'JAS'] + +# comparison grid(s) ('lonlat', 'antarctic') on which to plot analysis +comparisonGrids = ['latlon'] + +# reference lat/lon for sea ice plots in the northern hemisphere +minimumLatitude = 50 +referenceLongitude = 0 + +# a list of prefixes describing the sources of the observations to be used +observationPrefixes = ['NASATeam', 'Bootstrap'] + +[climatologyMapSeaIceConcSH] +## options related to plotting horizontally remapped climatologies of +## sea ice concentration against reference model results and observations +## in the southern hemisphere (SH) + +# colormap for model/observations +colormapNameResult = inferno +# color indices into colormapName for filled contours +colormapIndicesResult = [20, 80, 110, 140, 170, 200, 230, 255] +# colormap levels/values for contour boundaries +colorbarLevelsResult = [0.15, 0.3, 0.5, 0.7, 0.8, 0.85, 0.9, 0.95, 1] + +# colormap for differences +colormapNameDifference = RdBu_r +# color indices into colormapName for filled contours +colormapIndicesDifference = [0, 40, 80, 127, 127, 170, 210, 255] +# colormap levels/values for contour boundaries +colorbarLevelsDifference = [-0.8, -0.6, -0.4, -0.2, 0, 0.2, 0.4, 0.6, 0.8] + +# Times for comparison times (These should be left unchanged, since +# observations are only available for these seasons) +seasons = ['DJF', 'JJA'] + +# comparison grid(s) ('lonlat', 'antarctic') on which to plot analysis +comparisonGrids = ['latlon'] + +# reference lat/lon for sea ice plots in the northern hemisphere +minimumLatitude = -50 +referenceLongitude = 180 + +# a list of prefixes describing the sources of the observations to be used +observationPrefixes = ['NASATeam', 'Bootstrap'] + +[climatologyMapSeaIceThickNH] +## options related to plotting horizontally remapped climatologies of +## sea ice thickness against reference model results and observations +## in the northern hemisphere (NH) # colormap for model/observations -colormapNameConcResultWinter = inferno -colormapNameConcResultSummer = inferno -colormapNameThickResultNH = inferno -colormapNameThickResultSH = inferno +colormapNameResult = inferno # color indices into colormapName for filled contours -colormapIndicesConcResultWinter = [20, 80, 110, 140, 170, 200, 230, 255] -colormapIndicesConcResultSummer = [20, 80, 110, 140, 170, 200, 230, 255] -colormapIndicesThickResultNH = [20, 80, 110, 140, 170, 200, 230, 255] -colormapIndicesThickResultSH = [20, 80, 110, 140, 170, 200, 230, 255] -# colormap levels/values for contour boundaries for: -# concentration in winter and summer -colorbarLevelsConcResultWinter = [0.15, 0.4, 0.7, 0.9, 0.94, 0.96, 0.98, 0.99, 1] -colorbarLevelsConcResultSummer = [0.15, 0.3, 0.5, 0.7, 0.8, 0.85, 0.9, 0.95, 1] -# thickness in the northern and southern hemispheres -colorbarLevelsThickResultNH = [0, 0.25, 0.5, 1, 1.5, 2, 2.5, 3, 3.5] -colorbarLevelsThickResultSH = [0, 0.2, 0.4, 0.6, 0.8, 1, 1.5, 2, 2.5] +colormapIndicesResult = [20, 80, 110, 140, 170, 200, 230, 255] +# colormap levels/values for contour boundaries +colorbarLevelsResult = [0, 0.25, 0.5, 1, 1.5, 2, 2.5, 3, 3.5] # colormap for differences -colormapNameConcDifferenceWinter = RdBu_r -colormapNameConcDifferenceSummer = RdBu_r -colormapNameThickDifferenceNH = RdBu_r -colormapNameThickDifferenceSH = RdBu_r +colormapNameDifference = RdBu_r # color indices into colormapName for filled contours -colormapIndicesConcDifferenceWinter = [0, 40, 80, 127, 127, 170, 210, 255] -colormapIndicesConcDifferenceSummer = [0, 40, 80, 127, 127, 170, 210, 255] -colormapIndicesThickDifferenceNH = [0, 40, 80, 127, 127, 170, 210, 255] -colormapIndicesThickDifferenceSH = [0, 40, 80, 127, 127, 170, 210, 255] -# colormap levels/values for contour boundaries for: -# concentration in winter and summer -colorbarLevelsConcDifferenceWinter = [-0.8, -0.6, -0.4, -0.2, 0, 0.2, 0.4, 0.6, 0.8] -colorbarLevelsConcDifferenceSummer = [-0.8, -0.6, -0.4, -0.2, 0, 0.2, 0.4, 0.6, 0.8] -# thickness in the northern and southern hemispheres -colorbarLevelsThickDifferenceNH = [-2.5, -2, -0.5, -0.1, 0, 0.1, 0.5, 2, 2.5] -colorbarLevelsThickDifferenceSH = [-2.5, -2, -0.5, -0.1, 0, 0.1, 0.5, 2, 2.5] +colormapIndicesDifference = [0, 40, 80, 127, 127, 170, 210, 255] +# colormap levels/values for contour boundaries +colorbarLevelsDifference = [-2.5, -2, -0.5, -0.1, 0, 0.1, 0.5, 2, 2.5] + +# Times for comparison times (These should be left unchanged, since +# observations are only available for these seasons) +seasons = ['FM', 'ON'] + +# comparison grid(s) ('lonlat', 'antarctic') on which to plot analysis +comparisonGrids = ['latlon'] # reference lat/lon for sea ice plots in the northern hemisphere -minimumLatitudeNH = 50 -referenceLongitudeNH = 0 -# reference lat/lon for sea ice plots in the southern hemisphere -minimumLatitudeSH = -50 -referenceLongitudeSH = 180 +minimumLatitude = 50 +referenceLongitude = 0 + +# a list of prefixes describing the sources of the observations to be used +observationPrefixes = [''] + +[climatologyMapSeaIceThickSH] +## options related to plotting horizontally remapped climatologies of +## sea ice thickness against reference model results and observations +## in the southern hemisphere (SH) + +# colormap for model/observations +colormapNameResult = inferno +# color indices into colormapName for filled contours +colormapIndicesResult = [20, 80, 110, 140, 170, 200, 230, 255] +# colormap levels/values for contour boundaries +colorbarLevelsResult = [0, 0.2, 0.4, 0.6, 0.8, 1, 1.5, 2, 2.5] + +# colormap for differences +colormapNameDifference = RdBu_r +# color indices into colormapName for filled contours +colormapIndicesDifference = [0, 40, 80, 127, 127, 170, 210, 255] +# colormap levels/values for contour boundaries +colorbarLevelsDifference = [-2.5, -2, -0.5, -0.1, 0, 0.1, 0.5, 2, 2.5] + +# Times for comparison times (These should be left unchanged, since +# observations are only available for these seasons) +seasons = ['FM', 'ON'] + +# comparison grid(s) ('lonlat', 'antarctic') on which to plot analysis +comparisonGrids = ['latlon'] + +# reference lat/lon for sea ice plots in the northern hemisphere +minimumLatitude = -50 +referenceLongitude = 180 + +# a list of prefixes describing the sources of the observations to be used +observationPrefixes = [''] [regions] ## options related to ocean regions used in several analysis modules @@ -575,6 +828,9 @@ regions = ['arctic', 'equatorial', 'so', 'nino3', 'nino4', 'nino3.4', 'global'] plotTitles = ['Arctic', 'Equatorial (15S-15N)', 'Southern Ocean', 'Nino 3', 'Nino 4', 'Nino 3.4', 'Global Ocean'] +# Directory for region mask files +regionMaskDirectory = /path/to/masks/ + [plot] ## options related to plotting that are the defaults across all analysis ## modules diff --git a/configs/cori/config.20170313.beta1.A_WCYCL1850S.ne30_oECv3_ICG.edison b/configs/cori/config.20170313.beta1.A_WCYCL1850S.ne30_oECv3_ICG.edison new file mode 120000 index 000000000..0e53b2031 --- /dev/null +++ b/configs/cori/config.20170313.beta1.A_WCYCL1850S.ne30_oECv3_ICG.edison @@ -0,0 +1 @@ +../edison/config.20170313.beta1.A_WCYCL1850S.ne30_oECv3_ICG.edison \ No newline at end of file diff --git a/configs/cori/config.20170625.beta1.g60to30_SSSrestore b/configs/cori/config.20170625.beta1.g60to30_SSSrestore new file mode 120000 index 000000000..d098b8f2d --- /dev/null +++ b/configs/cori/config.20170625.beta1.g60to30_SSSrestore @@ -0,0 +1 @@ +../edison/config.20170625.beta1.g60to30_SSSrestore \ No newline at end of file diff --git a/configs/cori/config.20170807.beta1.G_oQU240.edison b/configs/cori/config.20170807.beta1.G_oQU240.edison new file mode 120000 index 000000000..cc30d102d --- /dev/null +++ b/configs/cori/config.20170807.beta1.G_oQU240.edison @@ -0,0 +1 @@ +../edison/config.20170807.beta1.G_oQU240.edison \ No newline at end of file diff --git a/configs/cori/config.20170817.beta1.B_low_res_ice_shelves b/configs/cori/config.20170817.beta1.B_low_res_ice_shelves new file mode 120000 index 000000000..60e84d6fe --- /dev/null +++ b/configs/cori/config.20170817.beta1.B_low_res_ice_shelves @@ -0,0 +1 @@ +../edison/config.20170817.beta1.B_low_res_ice_shelves \ No newline at end of file diff --git a/configs/cori/job_script.cori-haswell.bash b/configs/cori/job_script.cori-haswell.bash new file mode 100644 index 000000000..f40bd3d59 --- /dev/null +++ b/configs/cori/job_script.cori-haswell.bash @@ -0,0 +1,42 @@ +#!/bin/bash -l + +# comment out if using debug queue +#SBATCH --partition=regular +# comment in to get premium queue +##SBATCH --qos=premium +# comment in to get the debug queue +##SBATCH --partition=debug +# comment in when run on cori haswell or knl +#SBATCH -C haswell +#SBATCH --nodes=1 +#SBATCH --time=1:00:00 +#SBATCH --account=acme +#SBATCH --job-name=mpas_analysis +#SBATCH --output=mpas_analysis.o%j +#SBATCH --error=mpas_analysis.e%j +#SBATCH -L cscratch1,SCRATCH,project + +cd $SLURM_SUBMIT_DIR # optional, since this is the default behavior + +export OMP_NUM_THREADS=1 + +module unload python python/base +module use /global/project/projectdirs/acme/software/modulefiles/all +module load python/anaconda-2.7-acme +export PATH=/global/homes/z/zender/bin_cori:${PATH} + +# MPAS/ACME job to be analyzed, including paths to simulation data and +# observations. Change this name and path as needed +run_config_file="config.run_name_here" + +if [ ! -f $run_config_file ]; then + echo "File $run_config_file not found!" + exit 1 +fi +if [ ! -f ./run_analysis.py ]; then + echo "run_analysis.py not found in current directory!" + exit 1 +fi + +srun -N 1 -n 1 ./run_analysis.py $run_config_file + diff --git a/configs/cori/job_script.cori-knl.bash b/configs/cori/job_script.cori-knl.bash new file mode 100644 index 000000000..c21ef7b61 --- /dev/null +++ b/configs/cori/job_script.cori-knl.bash @@ -0,0 +1,42 @@ +#!/bin/bash -l + +# comment out if using debug queue +#SBATCH --partition=regular +# comment in to get premium queue +##SBATCH --qos=premium +# comment in to get the debug queue +##SBATCH --partition=debug +# comment in when run on cori haswell or knl +#SBATCH -C knl +#SBATCH --nodes=1 +#SBATCH --time=1:00:00 +#SBATCH --account=acme +#SBATCH --job-name=mpas_analysis +#SBATCH --output=mpas_analysis.o%j +#SBATCH --error=mpas_analysis.e%j +#SBATCH -L cscratch1,SCRATCH,project + +cd $SLURM_SUBMIT_DIR # optional, since this is the default behavior + +export OMP_NUM_THREADS=1 + +module unload python python/base +module use /global/project/projectdirs/acme/software/modulefiles/all +module load python/anaconda-2.7-acme +export PATH=/global/homes/z/zender/bin_cori:${PATH} + +# MPAS/ACME job to be analyzed, including paths to simulation data and +# observations. Change this name and path as needed +run_config_file="config.run_name_here" + +if [ ! -f $run_config_file ]; then + echo "File $run_config_file not found!" + exit 1 +fi +if [ ! -f ./run_analysis.py ]; then + echo "run_analysis.py not found in current directory!" + exit 1 +fi + +srun -N 1 -n 1 ./run_analysis.py $run_config_file + diff --git a/configs/edison/config.20170313.beta1.A_WCYCL1850S.ne30_oECv3_ICG.edison b/configs/edison/config.20170313.beta1.A_WCYCL1850S.ne30_oECv3_ICG.edison index 73650a03a..ac264b791 100644 --- a/configs/edison/config.20170313.beta1.A_WCYCL1850S.ne30_oECv3_ICG.edison +++ b/configs/edison/config.20170313.beta1.A_WCYCL1850S.ne30_oECv3_ICG.edison @@ -18,7 +18,7 @@ preprocessedReferenceRunName = B1850C5_ne30_v0.4 baseDirectory = /scratch2/scratchdirs/golaz/ACME_simulations/20170313.beta1.A_WCYCL1850S.ne30_oECv3_ICG.edison/run # names of ocean and sea ice meshes (e.g. EC60to30, QU240, RRS30to10, etc.) -mpasMeshName = EC60to30v3 +mpasMeshName = oEC60to30v3 [output] ## options related to writing out plots, intermediate cached data sets, logs, diff --git a/configs/edison/config.20170625.beta1.g60to30_SSSrestore b/configs/edison/config.20170625.beta1.g60to30_SSSrestore new file mode 100644 index 000000000..c22075223 --- /dev/null +++ b/configs/edison/config.20170625.beta1.g60to30_SSSrestore @@ -0,0 +1,179 @@ +[runs] +## options related to the run to be analyzed and reference runs to be +## compared against + +# mainRunName is a name that identifies the simulation being analyzed. +mainRunName = 20170625.beta1.g60to30_SSSrestore +# preprocessedReferenceRunName is the name of a reference run that has been +# preprocessed to compare against (or None to turn off comparison). Reference +# runs of this type would have preprocessed results because they were not +# performed with MPAS components (so they cannot be easily ingested by +# MPAS-Analysis) +preprocessedReferenceRunName = B1850C5_ne30_v0.4 + +[execute] +## options related to executing parallel tasks + +# the number of parallel tasks (1 means tasks run in serial, the default) +#parallelTaskCount = 1 +parallelTaskCount = 6 + +# the parallelism mode in ncclimo ("serial" or "bck") +# Set this to "bck" (background parallelism) if running on a machine that can +# handle 12 simultaneous processes, one for each monthly climatology. +#ncclimoParallelMode = serial +ncclimoParallelMode = bck + +[input] +## options related to reading in the results to be analyzed + +# directory containing model results +baseDirectory = /global/cscratch1/sd/sprice/acme_scratch/edison/g60to30_SSSrestore/run + +# names of ocean and sea ice meshes (e.g. EC60to30, QU240, RRS30to10, etc.) +mpasMeshName = oEC60to30v3wLI + +[output] +## options related to writing out plots, intermediate cached data sets, logs, +## etc. + +# directory where analysis should be written +baseDirectory = /dir/to/analysis/output + +# a list of analyses to generate. Valid names are: +# 'timeSeriesOHC', 'timeSeriesSST', 'climatologyMapSST', +# 'climatologyMapSSS', 'climatologyMapMLD', 'streamfunctionMOC', +# 'indexNino34', 'meridionalHeatTransport', +# 'timeSeriesSeaIceAreaVol', 'climatologyMapSeaIceConcThick' +# the following shortcuts exist: +# 'all' -- all analyses will be run +# 'all_timeSeries' -- all time-series analyses will be run +# 'all_climatology' -- all analyses involving climatologies +# 'all_horizontalMap' -- all analyses involving horizontal climatology maps +# 'all_ocean' -- all ocean analyses will be run +# 'all_seaIce' -- all sea-ice analyses will be run +# 'no_timeSeriesOHC' -- skip 'timeSeriesOHC' (and similarly with the +# other analyses). +# 'no_ocean', 'no_timeSeries', etc. -- in analogy to 'all_*', skip the +# given category of analysis +# an equivalent syntax can be used on the command line to override this +# option: +# ./run_analysis.py config.analysis --generate \ +# all,no_ocean,all_timeSeries +generate = ['all'] + +# alternative examples that would perform all analysis except +# 'timeSeriesOHC' +#generate = ['all', 'no_timeSeriesOHC'] +# Each subsequent list entry can be used to alter previous list entries. For +# example, the following would run all tasks that aren't ocean analyses, +# except that it would also run ocean time series tasks: +#generate = ['all', 'no_ocean', 'all_timeSeries'] + +[climatology] +## options related to producing climatologies, typically to compare against +## observations and previous runs + +# the first year over which to average climatalogies +startYear = 13 +# the last year over which to average climatalogies +endYear = 22 + +# The names of the mapping file used for interpolation. If a mapping file has +# already been generated, supplying the absolute path can save the time of +# generating a new one. If nothing is supplied, the file name is automatically +# generated based on the MPAS mesh name, the comparison grid resolution, and +# the interpolation method +mappingDirectory = /global/project/projectdirs/acme/mapping/maps/ + +[timeSeries] +## options related to producing time series plots, often to compare against +## observations and previous runs + +# start and end years for timeseries analysis. Using out-of-bounds values +# like start_year = 1 and end_year = 9999 will be clipped to the valid range +# of years, and is a good way of insuring that all values are used. +startYear = 1 +endYear = 22 + +[index] +## options related to producing nino index. + +# start and end years for the nino 3.4 analysis. Using out-of-bounds values +# like start_year = 1 and end_year = 9999 will be clipped to the valid range +# of years, and is a good way of insuring that all values are used. +# For valid statistics, index times should include at least 30 years +startYear = 1 +endYear = 9999 + +[oceanObservations] +## options related to ocean observations with which the results will be compared + +# directory where ocean observations are stored +baseDirectory = /global/project/projectdirs/acme/observations/Ocean/ +sstSubdirectory = SST +sssSubdirectory = SSS +mldSubdirectory = MLD +ninoSubdirectory = Nino +mhtSubdirectory = MHT +meltSubdirectory = Melt +soseSubdirectory = SOSE + +[oceanPreprocessedReference] +## options related to preprocessed ocean reference run with which the results +## will be compared (e.g. a POP, CESM or ACME v0 run) + +# directory where ocean reference simulation results are stored +baseDirectory = /global/project/projectdirs/acme/ACMEv0_lowres/B1850C5_ne30_v0.4/ocn/postprocessing + +[seaIceObservations] +## options related to sea ice observations with which the results will be +## compared + +# directory where sea ice observations are stored +baseDirectory = /global/project/projectdirs/acme/observations/SeaIce + +[seaIcePreprocessedReference] +## options related to preprocessed sea ice reference run with which the results +## will be compared (e.g. a CICE, CESM or ACME v0 run) + +# directory where ocean reference simulation results are stored +baseDirectory = /global/project/projectdirs/acme/ACMEv0_lowres/B1850C5_ne30_v0.4/ice/postprocessing + +[timeSeriesSeaIceAreaVol] +## options related to plotting time series of sea ice area and volume + +# plot on polar plot +polarPlot = False + +[streamfunctionMOC] +## options related to plotting the streamfunction of the meridional overturning +## circulation (MOC) +maxChunkSize = 1000 + +[regions] +# Directory containing mask files for ocean basins and ice shelves +regionMaskDirectory = /global/project/projectdirs/acme/mpas_analysis/region_masks + +[climatologyMapSoseTemperature] +# Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, +# Nov, Dec, JFM, AMJ, JAS, OND, ANN) +seasons = ['JFM', 'JAS', 'ANN'] + +# list of depths in meters (positive up) at which to analyze, 'bot' for the +# bottom of the ocean +depths = [0, -200, -400, -600, -800, 'bot'] + +[climatologyMapSoseSalinity] +# Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, +# Nov, Dec, JFM, AMJ, JAS, OND, ANN) +seasons = ['JFM', 'JAS', 'ANN'] + +# list of depths in meters (positive up) at which to analyze, 'bot' for the +# bottom of the ocean +depths = [0, -200, -400, -600, -800, 'bot'] + +[timeSeriesAntarcticMelt] +# a list of ice shelves to plot +iceShelvesToPlot = ['Antarctica', 'Peninsula', 'West Antarctica', 'East Antarctica', 'Larsen_C', 'Filchner-Ronne', 'Brunt_Stancomb', 'Fimbul', 'Amery', 'Totten', 'Ross', 'Getz', 'Thwaites', 'Pine_Island', 'Abbot', 'George_VI'] + diff --git a/configs/edison/config.20161117.beta0.A_WCYCL1850.ne30_oEC.edison b/configs/edison/config.20170807.beta1.G_oQU240.edison similarity index 91% rename from configs/edison/config.20161117.beta0.A_WCYCL1850.ne30_oEC.edison rename to configs/edison/config.20170807.beta1.G_oQU240.edison index 4acc52abc..75e13a720 100644 --- a/configs/edison/config.20161117.beta0.A_WCYCL1850.ne30_oEC.edison +++ b/configs/edison/config.20170807.beta1.G_oQU240.edison @@ -3,7 +3,7 @@ ## compared against # mainRunName is a name that identifies the simulation being analyzed. -mainRunName = 20161117.beta0.A_WCYCL1850.ne30_oEC.edison +mainRunName = 20170807.beta1.G_oQU240.edison # preprocessedReferenceRunName is the name of a reference run that has been # preprocessed to compare against (or None to turn off comparison). Reference # runs of this type would have preprocessed results because they were not @@ -15,10 +15,10 @@ preprocessedReferenceRunName = B1850C5_ne30_v0.4 ## options related to reading in the results to be analyzed # directory containing model results -baseDirectory = /scratch2/scratchdirs/golaz/ACME_simulations/20161117.beta0.A_WCYCL1850.ne30_oEC.edison/run +baseDirectory = /scratch1/scratchdirs/xylar/acme_scratch/edison/G-QU240-master-intel/run # names of ocean and sea ice meshes (e.g. EC60to30, QU240, RRS30to10, etc.) -mpasMeshName = EC60to30 +mpasMeshName = oQU240v3 [output] ## options related to writing out plots, intermediate cached data sets, logs, @@ -62,16 +62,16 @@ generate = ['all'] ## observations and previous runs # the first year over which to average climatalogies -startYear = 41 +startYear = 2 # the last year over which to average climatalogies -endYear = 50 +endYear = 5 # The names of the mapping file used for interpolation. If a mapping file has # already been generated, supplying the absolute path can save the time of # generating a new one. If nothing is supplied, the file name is automatically # generated based on the MPAS mesh name, the comparison grid resolution, and # the interpolation method -mpasMappingFile = /global/project/projectdirs/acme/mapping/maps/map_oEC60to30_TO_0.5x0.5degree_blin.160412.nc +# mpasMappingFile = [timeSeries] ## options related to producing time series plots, often to compare against @@ -81,7 +81,7 @@ mpasMappingFile = /global/project/projectdirs/acme/mapping/maps/map_oEC60to30_TO # like start_year = 1 and end_year = 9999 will be clipped to the valid range # of years, and is a good way of insuring that all values are used. startYear = 1 -endYear = 51 +endYear = 9999 [index] ## options related to producing nino index. @@ -104,6 +104,7 @@ mldSubdirectory = MLD ninoSubdirectory = Nino mhtSubdirectory = MHT + [oceanPreprocessedReference] ## options related to preprocessed ocean reference run with which the results ## will be compared (e.g. a POP, CESM or ACME v0 run) @@ -137,4 +138,4 @@ polarPlot = False maxChunkSize = 1000 # Mask file for ocean basin regional computation -regionMaskFiles = /global/project/projectdirs/acme/mapping/grids/EC60to30v1_SingleRegionAtlanticWTransportTransects_masks.nc +regionMaskFiles = /global/project/projectdirs/acme/mpas_analysis/region_masks/oQU240v3_SingleRegionAtlanticWTransportTransects_masks.nc diff --git a/configs/edison/config.20161006bugfix.alpha8.A_WCYCL1850S.ne30_oEC_ICG.edison b/configs/edison/config.20170817.beta1.B_low_res_ice_shelves similarity index 72% rename from configs/edison/config.20161006bugfix.alpha8.A_WCYCL1850S.ne30_oEC_ICG.edison rename to configs/edison/config.20170817.beta1.B_low_res_ice_shelves index 1f9dc5ecc..7cff09196 100644 --- a/configs/edison/config.20161006bugfix.alpha8.A_WCYCL1850S.ne30_oEC_ICG.edison +++ b/configs/edison/config.20170817.beta1.B_low_res_ice_shelves @@ -3,7 +3,7 @@ ## compared against # mainRunName is a name that identifies the simulation being analyzed. -mainRunName = 20161006bugfix.alpha8.A_WCYCL1850S.ne30_oEC_ICG.edison +mainRunName = 20170817.beta1.B_low_res_ice_shelves # preprocessedReferenceRunName is the name of a reference run that has been # preprocessed to compare against (or None to turn off comparison). Reference # runs of this type would have preprocessed results because they were not @@ -11,14 +11,27 @@ mainRunName = 20161006bugfix.alpha8.A_WCYCL1850S.ne30_oEC_ICG.edison # MPAS-Analysis) preprocessedReferenceRunName = B1850C5_ne30_v0.4 +[execute] +## options related to executing parallel tasks + +# the number of parallel tasks (1 means tasks run in serial, the default) +#parallelTaskCount = 24 +parallelTaskCount = 6 + +# the parallelism mode in ncclimo ("serial" or "bck") +# Set this to "bck" (background parallelism) if running on a machine that can +# handle 12 simultaneous processes, one for each monthly climatology. +#ncclimoParallelMode = bck +ncclimoParallelMode = bck + [input] ## options related to reading in the results to be analyzed # directory containing model results -baseDirectory = /global/cscratch1/sd/jonbob/ACME_simulations/20161006bugfix.alpha8.A_WCYCL1850S.ne30_oEC_ICG.edison/run +baseDirectory = /global/cscratch1/sd/fyke/ACME_simulations/B_low_res_ice_shelves/run # names of ocean and sea ice meshes (e.g. EC60to30, QU240, RRS30to10, etc.) -mpasMeshName = EC60to30 +mpasMeshName = oEC60to30v3wLI [output] ## options related to writing out plots, intermediate cached data sets, logs, @@ -62,15 +75,16 @@ generate = ['all'] ## observations and previous runs # the first year over which to average climatalogies -startYear = 6 +startYear = 1 # the last year over which to average climatalogies -endYear = 10 +endYear = 1 +# The names of the mapping file used for interpolation. If a mapping file has # already been generated, supplying the absolute path can save the time of # generating a new one. If nothing is supplied, the file name is automatically # generated based on the MPAS mesh name, the comparison grid resolution, and # the interpolation method -mpasMappingFile = /global/project/projectdirs/acme/mapping/maps/map_oEC60to30_TO_0.5x0.5degree_blin.160412.nc +mappingDirectory = /global/project/projectdirs/acme/mapping/maps/ [timeSeries] ## options related to producing time series plots, often to compare against @@ -80,7 +94,7 @@ mpasMappingFile = /global/project/projectdirs/acme/mapping/maps/map_oEC60to30_TO # like start_year = 1 and end_year = 9999 will be clipped to the valid range # of years, and is a good way of insuring that all values are used. startYear = 1 -endYear = 51 +endYear = 9999 [index] ## options related to producing nino index. @@ -102,6 +116,8 @@ sssSubdirectory = SSS mldSubdirectory = MLD ninoSubdirectory = Nino mhtSubdirectory = MHT +meltSubdirectory = Melt +soseSubdirectory = SOSE [oceanPreprocessedReference] ## options related to preprocessed ocean reference run with which the results @@ -135,5 +151,29 @@ polarPlot = False ## circulation (MOC) maxChunkSize = 1000 -# Mask file for ocean basin regional computation -regionMaskFiles = /global/project/projectdirs/acme/mapping/grids/EC60to30v1_SingleRegionAtlanticWTransportTransects_masks.nc +[regions] +# Directory containing mask files for ocean basins and ice shelves +regionMaskDirectory = /global/project/projectdirs/acme/mpas_analysis/region_masks + +[climatologyMapSoseTemperature] +# Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, +# Nov, Dec, JFM, AMJ, JAS, OND, ANN) +seasons = ['JFM', 'JAS', 'ANN'] + +# list of depths in meters (positive up) at which to analyze, 'bot' for the +# bottom of the ocean +depths = [0, -200, -400, -600, -800, 'bot'] + +[climatologyMapSoseSalinity] +# Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, +# Nov, Dec, JFM, AMJ, JAS, OND, ANN) +seasons = ['JFM', 'JAS', 'ANN'] + +# list of depths in meters (positive up) at which to analyze, 'bot' for the +# bottom of the ocean +depths = [0, -200, -400, -600, -800, 'bot'] + +[timeSeriesAntarcticMelt] +# a list of ice shelves to plot +iceShelvesToPlot = ['Antarctica', 'Peninsula', 'West Antarctica', 'East Antarctica', 'Larsen_C', 'Filchner-Ronne', 'Brunt_Stancomb', 'Fimbul', 'Amery', 'Totten', 'Ross', 'Getz', 'Thwaites', 'Pine_Island', 'Abbot', 'George_VI'] + diff --git a/configs/edison/job_script.edison.bash b/configs/edison/job_script.edison.bash index 37e2324b2..02d4ac6dd 100644 --- a/configs/edison/job_script.edison.bash +++ b/configs/edison/job_script.edison.bash @@ -6,9 +6,8 @@ ##SBATCH --qos=premium # comment in to get the debug queue ##SBATCH --partition=debug -# change number of nodes to change the number of parallel tasks -# (anything between 1 and the total number of tasks to run) -#SBATCH --nodes=10 +# comment in when run on cori haswell or knl +#SBATCH --nodes=1 #SBATCH --time=1:00:00 #SBATCH --account=acme #SBATCH --job-name=mpas_analysis @@ -23,46 +22,20 @@ export OMP_NUM_THREADS=1 module unload python python/base module use /global/project/projectdirs/acme/software/modulefiles/all module load python/anaconda-2.7-acme +export PATH=/global/homes/z/zender/bin_edison:${PATH} # MPAS/ACME job to be analyzed, including paths to simulation data and # observations. Change this name and path as needed run_config_file="config.run_name_here" -# prefix to run a serial job on a single node on edison -command_prefix="srun -N 1 -n 1" -# change this if not submitting this script from the directory -# containing run_analysis.py -mpas_analysis_dir="." -# one parallel task per node by default -parallel_task_count=$SLURM_JOB_NUM_NODES if [ ! -f $run_config_file ]; then echo "File $run_config_file not found!" exit 1 fi -if [ ! -f $mpas_analysis_dir/run_analysis.py ]; then - echo "run_analysis.py not found in $mpas_analysis_dir!" +if [ ! -f ./run_analysis.py ]; then + echo "run_analysis.py not found in current directory!" exit 1 fi - -# This is a config file generated just for this job with the output directory, -# command prefix and parallel task count from above. -job_config_file=config.output.$SLURM_JOB_ID - -# write out the config file specific to this job -cat < $job_config_file -[execute] -## options related to executing parallel tasks - -# the number of parallel tasks (1 means tasks run in serial, the default) -parallelTaskCount = $parallel_task_count - -# Prefix on the commnd line before a parallel task (e.g. 'srun -n 1 python') -# Default is no prefix (run_analysis.py is executed directly) -commandPrefix = $command_prefix - -EOF - -$mpas_analysis_dir/run_analysis.py $run_config_file \ - $job_config_file +srun -N 1 -n 1 ./run_analysis.py $run_config_file diff --git a/mpas_analysis/configuration/MpasAnalysisConfigParser.py b/mpas_analysis/configuration/MpasAnalysisConfigParser.py index e2c1b6dfb..5cb3e84c8 100644 --- a/mpas_analysis/configuration/MpasAnalysisConfigParser.py +++ b/mpas_analysis/configuration/MpasAnalysisConfigParser.py @@ -5,7 +5,6 @@ that are lists, tuples, dicts, etc (`getExpression(section, option)`). Author: Xylar Asay-Davis, Phillip J. Wolfram -Last Modified: 02/27/2017 """ import numbers @@ -29,7 +28,6 @@ def getWithDefault(self, section, option, default): is present in the config file. Author: Xylar Asay-Davis - Last Modified: 02/27/2017 """ if self.has_section(section): if self.has_option(section, option): @@ -66,7 +64,6 @@ def getExpression(self, section, option, elementType=None, of having selected numpy and / or np functionality available. Author: Xylar Asay-Davis, Phillip J. Wolfram - Last Modified: 04/10/2017 """ if self.has_section(section): if self.has_option(section, option): diff --git a/mpas_analysis/ocean/__init__.py b/mpas_analysis/ocean/__init__.py index af0c690ed..42f63dbac 100644 --- a/mpas_analysis/ocean/__init__.py +++ b/mpas_analysis/ocean/__init__.py @@ -1,7 +1,10 @@ from climatology_map import ClimatologyMapSST, ClimatologyMapMLD, \ ClimatologyMapSSS +from antarctic_climatology_map import ClimatologyMapMeltAntarctic, \ + ClimatologyMapSoseTemperature, ClimatologyMapSoseSalinity from time_series_ohc import TimeSeriesOHC from time_series_sst import TimeSeriesSST from index_nino34 import IndexNino34 from streamfunction_moc import StreamfunctionMOC -from meridional_heat_transport import MeridionalHeatTransport \ No newline at end of file +from meridional_heat_transport import MeridionalHeatTransport +from time_series_antarctic_melt import TimeSeriesAntarcticMelt diff --git a/mpas_analysis/ocean/antarctic_climatology_map.py b/mpas_analysis/ocean/antarctic_climatology_map.py new file mode 100644 index 000000000..3874495b8 --- /dev/null +++ b/mpas_analysis/ocean/antarctic_climatology_map.py @@ -0,0 +1,796 @@ +''' +Analysis tasks for comparing Antarctic climatology maps against observations +and reanalysis data. + +Authors +------- +Xylar Asay-Davis +''' + +import xarray as xr +import numpy + +from .climatology_map import ClimatologyMapOcean + +from ..shared.constants import constants + +from ..shared.io.utility import build_config_full_path + +from ..shared.grid import ProjectionGridDescriptor +from ..shared.climatology import get_antarctic_stereographic_projection, \ + MpasClimatology + +from ..shared.mpas_xarray import mpas_xarray + + +class MpasClimatologyMeltAntarctic(MpasClimatology): # {{{ + """ + A task for creating and remapping climatologies of Antarctic melt rates + + Attributes + ---------- + landIceMask : xarray.DataArray + A mask indicating where there is land ice on the ocean grid (thus, + where melt rates are valid) + + Authors + ------- + Xylar Asay-Davis + """ + + def setup_and_check(self): # {{{ + """ + Perform steps to set up the analysis and check for errors in the setup. + + Authors + ------- + Xylar Asay-Davis + """ + + # first, call setup_and_check from the base class + # (ClimatologyMapOcean), which will perform some common setup + super(MpasClimatologyMeltAntarctic, self).setup_and_check() + + landIceFluxMode = self.namelist.get('config_land_ice_flux_mode') + if landIceFluxMode not in ['standalone', 'coupled']: + raise ValueError('*** climatologyMapMeltAntarctic requires ' + 'config_land_ice_flux_mode \n' + ' to be standalone or coupled. Otherwise, no ' + 'melt rates are available \n' + ' for plotting.') + + # }}} + + def run(self): # {{{ + """ + Compute climatologies of melt rates from ACME/MPAS output + + This function has been overridden to load ``landIceMask`` from a + restart file for later use in masking the melt rate. It then simply + calls the run function from MpasClimatology + + Authors + ------- + Xylar Asay-Davis + """ + + # first, load the land-ice mask from the restart file + dsLandIceMask = xr.open_dataset(self.restartFileName) + dsLandIceMask = mpas_xarray.subset_variables(dsLandIceMask, + ['landIceMask']) + dsLandIceMask = dsLandIceMask.isel(Time=0) + self.landIceMask = dsLandIceMask.landIceMask > 0. + + # then, call run from the base class (MpasClimatology), which will + # perform the main function of the task + super(MpasClimatologyMeltAntarctic, self).run() + + # }}} + + def customize_climatology(self, climatology): # {{{ + """ + First, calls the parent version of ``customize_climatology`` to set + up the ``validMask``. Then, masks the melt rate with the + ``landIceMask`` and scales the field to be in m/yr instead of + kg/m^2/s + + Parameters + ---------- + climatology : ``xarray.Dataset`` object + the climatology data set + + Returns + ------- + climatology : ``xarray.Dataset`` object + the modified climatology data set + + Authors + ------- + Xylar Asay-Davis + """ + + # perform the custimization in MpasClimatology (adding a 'validMask') + climatology = \ + super(MpasClimatologyMeltAntarctic, self).customize_climatology( + climatology) + + fieldName = self.variableList[0] + + # scale the field to m/yr from kg/m^2/s and mask out non-land-ice areas + climatology[fieldName] = \ + constants.sec_per_year/constants.rho_fw * \ + climatology[fieldName].where(self.landIceMask) + + return climatology # }}} + + # }}} + + +class ClimatologyMapMeltAntarctic(ClimatologyMapOcean): # {{{ + """ + An analysis task for comparison of antarctic melt rates against + observations + + Authors + ------- + Xylar Asay-Davis + """ + + def create_mpas_climatology_task(self, comparisonGridNames, seasons): + # {{{ + """ + Create an MpasClimatologyMeltAntarctic task to use as a prerequisite + of this task + + comparisonGridNames : list of {'latlon', 'antarctic'}, optional + The name(s) of the comparison grid to use for remapping. + + seasons : list of str, optional + A list of seasons (keys in ``shared.constants.monthDictionary``) + to be computed. + + Authors + ------- + Xylar Asay-Davis + """ + + climatologyTask = MpasClimatologyMeltAntarctic( + config=self.config, + variableList=[self.mpasFieldName], + taskSuffix='AntarcticMelt', + componentName=self.componentName, + comparisonGridNames=comparisonGridNames, + seasons=seasons, + tags=['climatology', 'landIceCavities'], + iselValues=self.iselValues) + + return climatologyTask # }}} + + def __init__(self, config, season, comparisonGridName): # {{{ + """ + Construct the analysis task. + + Parameters + ---------- + config : instance of MpasAnalysisConfigParser + Contains configuration options + + season : str + A season (keys in ``shared.constants.monthDictionary``) to be + plotted + + comparisonGridName : {'latlon', 'antarctic'} + The name of the comparison grid that where model data an + observations are compared + + Authors + ------- + Xylar Asay-Davis + """ + + self.fieldName = 'melt' + self.fieldNameInTitle = 'Melt Rate' + self.season = season + self.comparisonGridName = comparisonGridName + + self.mpasFieldName = 'timeMonthly_avg_landIceFreshwaterFlux' + self.iselValues = None + + taskName, tags = self.get_task_name_and_tags() + + tags.extend([self.fieldName, 'landIceCavities']) + + # call the constructor from the base class (ClimatologyMapOcean) + super(ClimatologyMapMeltAntarctic, self).__init__( + config=config, + taskName=taskName, + componentName='ocean', + tags=tags) + # }}} + + def setup_and_check(self): # {{{ + """ + Perform steps to set up the analysis and check for errors in the setup. + + Authors + ------- + Xylar Asay-Davis + """ + + # first, call setup_and_check from the base class + # (ClimatologyMapOcean), which will perform some common setup + super(ClimatologyMapMeltAntarctic, self).setup_and_check() + + landIceFluxMode = self.namelist.get('config_land_ice_flux_mode') + if landIceFluxMode not in ['standalone', 'coupled']: + raise ValueError('*** climatologyMapMeltAntarctic requires ' + 'config_land_ice_flux_mode \n' + ' to be standalone or coupled. Otherwise, no ' + 'melt rates are available \n' + ' for plotting.') + + observationsDirectory = build_config_full_path( + self.config, 'oceanObservations', + '{}Subdirectory'.format(self.fieldName)) + + self.obsFileName = \ + '{}/Rignot_2013_melt_rates_6000.0x6000.0km_10.0km_' \ + 'Antarctic_stereo.nc'.format(observationsDirectory) + + self.obsFieldName = 'meltRate' + + # Set appropriate figure labels for melt rates + self.observationTitleLabel = 'Observations (Rignot et al, 2013)' + self.outFileLabel = 'meltRignot' + self.unitsLabel = r'm a$^{-1}$' + self.diffTitleLabel = 'Model - Observations' + # }}} + + def build_observational_dataset(self): # {{{ + ''' + read in the data sets for observations, and possibly rename some + variables and dimensions + + Authors + ------- + Xylar Asay-Davis + ''' + + dsObs = xr.open_mfdataset(self.obsFileName) + + # create a descriptor of the observation grid using the x/y + # coordinates + projection = get_antarctic_stereographic_projection() + obsDescriptor = ProjectionGridDescriptor.read( + projection, fileName=self.obsFileName, xVarName='x', yVarName='y') + + return dsObs, obsDescriptor # }}} + + # }}} + + +class MpasClimatologyDepthSlices(MpasClimatology): # {{{ + """ + A task for creating and remapping climatologies of MPAS fields sliced + at a given set of depths + + Attributes + ---------- + depths : list of {float, 'bot'} + A list of depths at which the climatology will be sliced in the + vertical. + + maxLevelCell : xarray.DataArray + The vertical index of the bottom cell in MPAS results + + verticalIndices : xarray.DataArray + The vertical indices of slice to be plotted + + Authors + ------- + Xylar Asay-Davis + """ + + def run(self): # {{{ + """ + Compute climatologies of T or S from ACME/MPAS output + + This function has been overridden to load ``maxLevelCell`` from a + restart file for later use in indexing bottom T and S. + ``verticalIndex`` is also computed for later indexing of + the model level. It then simply calls the run function from + ClimatologyMapOcean. + + Authors + ------- + Xylar Asay-Davis + """ + + # first, load the land-ice mask from the restart file + ds = xr.open_dataset(self.restartFileName) + ds = mpas_xarray.subset_variables(ds, ['maxLevelCell', + 'bottomDepth', + 'layerThickness']) + + self.maxLevelCell = ds.maxLevelCell - 1 + + verticalIndices = numpy.zeros((len(self.depths), ds.dims['nCells']), + int) + + for depthIndex in range(len(self.depths)): + depth = self.depths[depthIndex] + if depth == 'bot': + # switch to zero-based index + verticalIndices[depthIndex, :] = ds.maxLevelCell.values - 1 + else: + nCells = ds.dims['nCells'] + nVertLevels = ds.dims['nVertLevels'] + zInterface = numpy.zeros((nCells, nVertLevels+1)) + zInterface[:, nVertLevels] = -ds.bottomDepth.values + for zIndex in range(nVertLevels-1, -1, -1): + zInterface[:, zIndex] = zInterface[:, zIndex+1] + \ + ds.layerThickness.values[0, :, zIndex] + zMid = 0.5*(zInterface[:, 0:-1] + zInterface[:, 1:]) + + verticalIndex = numpy.argmin(numpy.abs(zMid-depth), axis=1) + verticalIndices[depthIndex, :] = verticalIndex + + self.verticalIndices = \ + xr.DataArray.from_dict({'dims': ('depthSlice', 'nCells'), + 'data': verticalIndices}) + + # then, call run from the base class (MpasClimatology), which will + # perform the main function of the task + super(MpasClimatologyDepthSlices, self).run() + + def customize_climatology(self, climatology): # {{{ + """ + Uses ``verticalIndex`` to slice the 3D climatology field at each + requested depth. The resulting field has the depth appended to + the variable name. + + Parameters + ---------- + climatology : ``xarray.Dataset`` object + the climatology data set + + Returns + ------- + climatology : ``xarray.Dataset`` object + the modified climatology data set + + Authors + ------- + Xylar Asay-Davis + """ + + # perform the custimization in MpasClimatology (adding a 'validMask') + climatology = \ + super(MpasClimatologyDepthSlices, self).customize_climatology( + climatology) + + climatology.coords['verticalIndex'] = \ + ('nVertLevels', + numpy.arange(climatology.dims['nVertLevels'])) + + depthNames = [str(depth) for depth in self.depths] + + climatology.coords['depthSlice'] = ('depthSlice', depthNames) + + for variableName in self.variableList: + if 'nVertLevels' not in climatology[variableName].dims: + continue + + # mask only the values with the right vertical index + da = climatology[variableName].where( + climatology.verticalIndex == self.verticalIndices) + + # Each vertical layer has at most one non-NaN value so the "sum" + # over the vertical is used to collapse the array in the vertical + # dimension + climatology[variableName] = \ + da.sum(dim='nVertLevels').where( + self.verticalIndices <= self.maxLevelCell) + + climatology = climatology.drop('verticalIndex') + + return climatology # }}} + + # }}} + + +class ClimatologyMapSose(ClimatologyMapOcean): # {{{ + """ + An analysis task for comparison of antarctic temperature, salinity, etc. + against SOSE fields + + Attributes + ---------- + + Authors + ------- + Xylar Asay-Davis + """ + + @classmethod + def create_tasks(cls, config): # {{{ + """ + For each comparison grid, construct one task for computing the + climatologies and one plotting task for each season. The climatology + task is a prerequisite of the plotting tasks, but the plotting tasks + can run in parallel with one another. + + Parameters + ---------- + config : MpasAnalysisConfigParser object + Contains configuration options + + Authors + ------- + Xylar Asay-Davis + """ + + # get the name of the class (something like ClimatologyMapSST) + className = cls.__name__ + # convert the first letter to lowercase to get the corresponding + # seciton name in the config file + sectionName = className[0].lower() + className[1:] + + # read in what seasons we want to plot + seasons = config.getExpression(sectionName, 'seasons') + + if len(seasons) == 0: + raise ValueError('config section {} does not contain valid list ' + 'of seasons'.format(sectionName)) + + comparisonGridNames = config.getExpression(sectionName, + 'comparisonGrids') + + if len(comparisonGridNames) == 0: + raise ValueError('config section {} does not contain valid list ' + 'of comparison grids'.format(sectionName)) + + # read in what seasons we want to plot + depths = config.getExpression(sectionName, 'depths') + + if len(depths) == 0: + raise ValueError('config section {} does not contain valid list ' + 'of depths'.format(sectionName)) + + first = True + tasks = [] + for comparisonGridName in comparisonGridNames: + for season in seasons: + for depth in depths: + # one plotting task for each season, depth and comparison + # grid + climatologyMapTask = cls( + config=config, + depth=depth, + season=season, + comparisonGridName=comparisonGridName) + + if first: + climatologyTask = \ + climatologyMapTask.create_mpas_climatology_task( + comparisonGridNames=comparisonGridNames, + seasons=seasons, + depths=depths) + tasks.append(climatologyTask) + + climatologyMapTask.add_prerequisite_tasks( + [climatologyTask.taskName]) + + climatologyMapTask.climatologyTask = climatologyTask + + tasks.append(climatologyMapTask) + + first = False + + return tasks # }}} + + def __init__(self, config, depth, season, comparisonGridName): + # {{{ + """ + Construct the analysis task. + + Parameters + ---------- + config : instance of MpasAnalysisConfigParser + Contains configuration options + + depth : {float, 'bot'} + Depth at which to perform the comparison, 'bot' for the ocean floor + + season : str + A season (keys in ``shared.constants.monthDictionary``) to be + plotted + + comparisonGridName : {'latlon', 'antarctic'}, optional + The comparison grid on which to plot the analysis, required if + mode == 'plot' + + Authors + ------- + Xylar Asay-Davis + """ + + self.season = season + self.comparisonGridName = comparisonGridName + self.depth = depth + + upperField = self.fieldName[0].upper() + self.fieldName[1:] + if depth == 'bot': + self.fieldNameInTitle = 'Sea Floor {}'.format(upperField) + else: + self.fieldNameInTitle = '{} z={} m'.format(upperField, depth) + + taskName, tags = self.get_task_name_and_tags() + tags.extend(['SOSE', self.fieldName]) + + # append the depth on to the task name + taskName = '{}_depth_{}'.format(taskName, depth) + + # call the constructor from the base class (ClimatologyMapOcean) + super(ClimatologyMapSose, self).__init__(config=config, + taskName=taskName, + componentName='ocean', + tags=tags) + # }}} + + def create_mpas_climatology_task(self, comparisonGridNames, seasons, + depths): + # {{{ + """ + Create an MpasClimatologyMeltAntarctic task to use as a prerequisite + of this task + + comparisonGridNames : list of {'latlon', 'antarctic'}, optional + The name(s) of the comparison grid to use for remapping. + + seasons : list of str + A list of seasons (keys in ``shared.constants.monthDictionary``) + to be computed. + + depths : list of {float, 'bot'} + A list of depths where the field is to be sliced + + Authors + ------- + Xylar Asay-Davis + """ + + upperField = self.fieldName[0].upper() + self.fieldName[1:] + climatologyTask = MpasClimatologyDepthSlices( + config=self.config, + variableList=[self.mpasFieldName], + taskSuffix='{}DepthSlices'.format(upperField), + componentName=self.componentName, + comparisonGridNames=comparisonGridNames, + seasons=seasons, + tags=['climatology'], + iselValues=self.iselValues) + + climatologyTask.depths = depths + + return climatologyTask # }}} + + def post_process_remapped_mpas_climatology(self, remappedClimatology): + # {{{ + ''' + Extract the appropriate slice of the remapped climatology for this + depth. + + Parameters + ---------- + remappedClimatology : ``xarray.Dataset`` object + The remapped climatology to be processed + + Returns + ------- + remappedClimatology : ``xarray.Dataset`` object + The remapped climatology after processing + + Authors + ------- + XylarAsay-Davis + ''' + + remappedClimatology = remappedClimatology.sel( + depthSlice=str(self.depth), drop=True) + + return remappedClimatology + + # }}} + + def build_observational_dataset(self): # {{{ + ''' + read in the data sets for observations, and possibly rename some + variables and dimensions + + Authors + ------- + Xylar Asay-Davis + ''' + + dsObs = xr.open_dataset(self.obsFileName) + dsObs = mpas_xarray.subset_variables(dsObs, [self.obsFieldName, + 'month', 'year']) + if self.depth != 'bot': + dsObs = dsObs.sel(method='nearest', depth=self.depth) + + # create a descriptor of the observation grid using the x/y + # coordinates + projection = get_antarctic_stereographic_projection() + obsDescriptor = ProjectionGridDescriptor.read( + projection, fileName=self.obsFileName, xVarName='x', yVarName='y') + + return dsObs, obsDescriptor # }}} + + # }}} + + +class ClimatologyMapSoseTemperature(ClimatologyMapSose): # {{{ + """ + An analysis task for comparison of antarctic temperature against SOSE + fields + + Authors + ------- + Xylar Asay-Davis + """ + + def __init__(self, config, depth, season, comparisonGridName): + # {{{ + """ + Construct the analysis task. + + Parameters + ---------- + config : instance of MpasAnalysisConfigParser + Contains configuration options + + depth : {float, 'bot'} + Depth at which to perform the comparison, 'bot' for the ocean floor + + season : str + A season (keys in ``shared.constants.monthDictionary``) to be + plotted + + comparisonGridName : {'latlon', 'antarctic'}, optional + The comparison grid on which to plot the analysis, required if + mode == 'plot' + + Authors + ------- + Xylar Asay-Davis + """ + self.fieldName = 'temperature' + self.mpasFieldName = 'timeMonthly_avg_activeTracers_temperature' + self.iselValues = None + + # call the constructor from the base class (ClimatologyMapSose) + super(ClimatologyMapSoseTemperature, self).__init__( + config=config, + depth=depth, + season=season, + comparisonGridName=comparisonGridName) + # }}} + + def setup_and_check(self): # {{{ + """ + Perform steps to set up the analysis and check for errors in the setup. + + Authors + ------- + Xylar Asay-Davis + """ + # first, call setup_and_check from the base class + # (ClimatologyMapSose), which will perform some common setup + super(ClimatologyMapSoseTemperature, self).setup_and_check() + + observationsDirectory = build_config_full_path( + self.config, 'oceanObservations', 'soseSubdirectory') + + self.obsFileName = '{}/SOSE_2005-2010_monthly_pot_temp_6000.0x' \ + '6000.0km_10.0km_Antarctic_stereo.nc'.format( + observationsDirectory) + if self.depth == 'bot': + self.obsFieldName = 'botTheta' + self.outFileLabel = 'botTemp_SOSE' + else: + self.obsFieldName = 'theta' + self.outFileLabel = 'temp_depth_{}_SOSE'.format(self.depth) + self.unitsLabel = '$^\circ$C' + + # Set appropriate figure labels for melt rates + self.observationTitleLabel = 'State Estimate (SOSE)' + self.diffTitleLabel = 'Model - State Estimate' + + # }}} + + # }}} + + +class ClimatologyMapSoseSalinity(ClimatologyMapSose): # {{{ + """ + An analysis task for comparison of antarctic salinity against SOSE + fields + + Authors + ------- + Xylar Asay-Davis + """ + + def __init__(self, config, depth, season, comparisonGridName): + # {{{ + """ + Construct the analysis task. + + Parameters + ---------- + config : instance of MpasAnalysisConfigParser + Contains configuration options + + depth : {float, 'bot'} + Depth at which to perform the comparison, 'bot' for the ocean floor + + season : str + A season (keys in ``shared.constants.monthDictionary``) to be + plotted + + comparisonGridName : {'latlon', 'antarctic'}, optional + The comparison grid on which to plot the analysis, required if + mode == 'plot' + + Authors + ------- + Xylar Asay-Davis + """ + self.fieldName = 'salinity' + self.mpasFieldName = 'timeMonthly_avg_activeTracers_salinity' + self.iselValues = None + + # call the constructor from the base class (ClimatologyMapSose) + super(ClimatologyMapSoseSalinity, self).__init__( + config=config, + depth=depth, + season=season, + comparisonGridName=comparisonGridName) + # }}} + + def setup_and_check(self): # {{{ + """ + Perform steps to set up the analysis and check for errors in the setup. + + Authors + ------- + Xylar Asay-Davis + """ + # first, call setup_and_check from the base class + # (ClimatologyMapSose), which will perform some common setup + super(ClimatologyMapSoseSalinity, self).setup_and_check() + + observationsDirectory = build_config_full_path( + self.config, 'oceanObservations', 'soseSubdirectory') + + self.obsFileName = '{}/SOSE_2005-2010_monthly_salinity_6000.0x' \ + '6000.0km_10.0km_Antarctic_stereo.nc'.format( + observationsDirectory) + if self.depth == 'bot': + self.obsFieldName = 'botSalinity' + self.outFileLabel = 'botSalin_SOSE' + else: + self.obsFieldName = 'salinity' + self.outFileLabel = 'salin_depth_{}_SOSE'.format(self.depth) + self.unitsLabel = 'PSU' + + # Set appropriate figure labels for melt rates + self.observationTitleLabel = 'State Estimate (SOSE)' + self.diffTitleLabel = 'Model - State Estimate' + + # }}} + + # }}} + +# vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python diff --git a/mpas_analysis/ocean/climatology_map.py b/mpas_analysis/ocean/climatology_map.py index 728ebd12f..ff72a6c8c 100644 --- a/mpas_analysis/ocean/climatology_map.py +++ b/mpas_analysis/ocean/climatology_map.py @@ -12,28 +12,21 @@ import datetime import numpy as np import os -import warnings from ..shared.analysis_task import AnalysisTask from ..shared.plot.plotting import plot_global_comparison, \ - setup_colormap + setup_colormap, plot_polar_projection_comparison from ..shared.constants import constants from ..shared.io.utility import build_config_full_path +from ..shared.io import write_netcdf -from ..shared.generalized_reader.generalized_reader \ - import open_multifile_dataset +from ..shared.climatology import get_comparison_descriptor, \ + get_remapper, get_observation_climatology_file_names, \ + compute_climatology, remap_and_write_climatology, MpasClimatology -from ..shared.timekeeping.utility import get_simulation_start_time - -from ..shared.climatology import get_lat_lon_comparison_descriptor, \ - get_remapper, get_mpas_climatology_file_names, \ - get_observation_climatology_file_names, \ - compute_climatology, cache_climatologies, update_start_end_year, \ - remap_and_write_climatology - -from ..shared.grid import MpasMeshDescriptor, LatLonGridDescriptor +from ..shared.grid import LatLonGridDescriptor, interp_extrap_corner from ..shared.mpas_xarray import mpas_xarray @@ -44,257 +37,427 @@ class ClimatologyMapOcean(AnalysisTask): # {{{ """ An analysis task for comparison of 2D model fields against observations. + Attributes + ---------- + climatologyTask : MpasClimatology object + The climatology task that has already finished by the time the ``run`` + method gets called in this class. It is used to figure out the + names of the climatology files that it created. + + season : str + A season (keys in ``shared.constants.monthDictionary``) to be plotted + + comparisonGridName : {'latlon', 'antarctic'} + The name of the comparison grid that where model data an observations + are compared + + fieldName : str + A short name of the field being analyzed + + fieldNameInTitle : str + An equivalent name of the field appropriate for figure titles + + mpasFieldName : str + The name of the MPAS timeSeriesStatsMonthly variable to be analyzed + + iselValues : dict + A dictionary of dimensions and indices (or ``None``) used to extract + a slice of the MPAS field. + + obsFileName : str + A file containing observtions from which to construct seasonal + climatologies. + + obsFieldName : str + A variable in the observations file to plot + + observationTitleLabel : str + A label on the subplot showing observations + + outFileLabel : str + A prefix for the resulting image file. + + unitsLabel : str + The units for the field being plotted. + Authors ------- Luke Van Roekel, Xylar Asay-Davis, Milena Veneziani """ - def setup_and_check(self): # {{{ + @classmethod + def create_tasks(cls, config): # {{{ """ - Perform steps to set up the analysis and check for errors in the setup. + For each comparison grid, construct one task for computing the + climatologies and one plotting task for each season. The climatology + task is a prerequisite of the plotting tasks, but the plotting tasks + can run in parallel with one another. + + Parameters + ---------- + config : MpasAnalysisConfigParser object + Contains configuration options Authors ------- Xylar Asay-Davis """ - # first, call setup_and_check from the base class (AnalysisTask), - # which will perform some common setup, including storing: - # self.runDirectory , self.historyDirectory, self.plotsDirectory, - # self.namelist, self.runStreams, self.historyStreams, - # self.calendar, self.namelistMap, self.streamMap, self.variableMap - super(ClimatologyMapOcean, self).setup_and_check() - - self.check_analysis_enabled( - analysisOptionName='config_am_timeseriesstatsmonthly_enable', - raiseException=True) - - # get a list of timeSeriesStats output files from the streams file, - # reading only those that are between the start and end dates - self.startDate = self.config.get('climatology', 'startDate') - self.endDate = self.config.get('climatology', 'endDate') - streamName = \ - self.historyStreams.find_stream(self.streamMap['timeSeriesStats']) - self.inputFiles = self.historyStreams.readpath( - streamName, startDate=self.startDate, endDate=self.endDate, - calendar=self.calendar) - - if len(self.inputFiles) == 0: - raise IOError('No files were found in stream {} between {} and ' - '{}.'.format(streamName, self.startDate, - self.endDate)) - # }}} + # get the name of the class (something like ClimatologyMapSST) + className = cls.__name__ + # convert the first letter to lowercase to get the corresponding + # seciton name in the config file + sectionName = className[0].lower() + className[1:] + + # read in what seasons we want to plot + seasons = config.getExpression(sectionName, 'seasons') + + if len(seasons) == 0: + raise ValueError('config section {} does not contain valid list ' + 'of seasons'.format(sectionName)) + + comparisonGridNames = config.getExpression(sectionName, + 'comparisonGrids') + + if len(comparisonGridNames) == 0: + raise ValueError('config section {} does not contain valid list ' + 'of comparison grids'.format(sectionName)) + + first = True + tasks = [] + for comparisonGridName in comparisonGridNames: + for season in seasons: + # one plotting task for each season and comparison grid + climatologyMapTask = cls( + config=config, season=season, + comparisonGridName=comparisonGridName) + + if first: + climatologyTask = \ + climatologyMapTask.create_mpas_climatology_task( + comparisonGridNames=comparisonGridNames, + seasons=seasons) + tasks.append(climatologyTask) + + climatologyMapTask.add_prerequisite_tasks( + [climatologyTask.taskName]) + + climatologyMapTask.climatologyTask = climatologyTask + + tasks.append(climatologyMapTask) + + first = False + + return tasks # }}} + + def create_mpas_climatology_task(self, comparisonGridNames, seasons): + # {{{ + """ + Create an MpasClimatology task to use as a prerequisite of this task + + comparisonGridNames : list of {'latlon', 'antarctic'}, optional + The name(s) of the comparison grid to use for remapping. + + seasons : list of str, optional + A list of seasons (keys in ``shared.constants.monthDictionary``) + to be computed. + + Authors + ------- + Xylar Asay-Davis + """ + + taskSuffix = self.fieldName[0].upper() + self.fieldName[1:] + + climatologyTask = MpasClimatology( + config=self.config, + variableList=[self.mpasFieldName], + taskSuffix=taskSuffix, + componentName=self.componentName, + comparisonGridNames=comparisonGridNames, + seasons=seasons, + tags=['climatology'], + iselValues=self.iselValues) + + return climatologyTask # }}} def run(self): # {{{ """ - Plots a comparison of ACME/MPAS output to SST, MLD or SSS observations + Plots 2D map comparing ACME/MPAS ocean output to observations Authors ------- Luke Van Roekel, Xylar Asay-Davis, Milena Veneziani - - Last Modified - ------------- - 03/16/2017 """ - print "\nPlotting 2-d maps of {} climatologies...".format( - self.fieldNameInTitle) + print "\nPlotting 2-d maps of {} climatologies for season {}" \ + "...".format(self.fieldNameInTitle, self.season) # get local versions of member variables for convenience config = self.config - calendar = self.calendar fieldName = self.fieldName + climatologyTask = self.climatologyTask - simulationStartTime = get_simulation_start_time(self.runStreams) + # call setup_and_check() on the climatology task because it will make + # sure the start and end year are set and correct. (In parallel mode, + # this copy of the climatologyTask is different from the one where + # setup_and_check was already called, and run completed successfully.) + climatologyTask.setup_and_check() - print '\n Reading files:\n' \ - ' {} through\n {}'.format( - os.path.basename(self.inputFiles[0]), - os.path.basename(self.inputFiles[-1])) + # we don't have any way to know the observation lat/lon and grid + # without this, so we have to read the observational data set even + # if it's already been remapped. + dsObs, obsDescriptor = self.build_observational_dataset() - mainRunName = config.get('runs', 'mainRunName') + # set the landMask to None so we know we need to compute it later + # (if this is an antarctic projection) + self.landMask = None + + comparisonDescriptor = \ + get_comparison_descriptor(config, self.comparisonGridName) - overwriteMpasClimatology = config.getWithDefault( - 'climatology', 'overwriteMpasClimatology', False) + origObsRemapper = Remapper(comparisonDescriptor, obsDescriptor) - overwriteObsClimatology = config.getWithDefault( - 'oceanObservations', 'overwriteObsClimatology', False) + # Interpolate and compute biases + monthValues = constants.monthDictionary[self.season] - try: - restartFileName = self.runStreams.readpath('restart')[0] - except ValueError: - raise IOError('No MPAS-O restart file found: need at least one ' - 'restart file for ocn_modelvsobs calculation') + remappedFileName = climatologyTask.get_ncclimo_file_name( + self.season, 'remapped', comparisonDescriptor) - outputTimes = config.getExpression(self.taskName, 'comparisonTimes') + remappedModelClimatology = xr.open_dataset(remappedFileName) - comparisonDescriptor = get_lat_lon_comparison_descriptor(config) + remappedModelClimatology = \ + self.post_process_remapped_mpas_climatology( + remappedModelClimatology) - varList = [fieldName] + # now the observations + (climatologyFileName, remappedFileName) = \ + get_observation_climatology_file_names( + config=config, fieldName=fieldName, monthNames=self.season, + componentName='ocean', remapper=origObsRemapper) - ds = open_multifile_dataset(fileNames=self.inputFiles, - calendar=calendar, - config=config, - simulationStartTime=simulationStartTime, - timeVariableName='Time', - variableList=varList, - iselValues=self.iselValues, - variableMap=self.variableMap, - startDate=self.startDate, - endDate=self.endDate) + if not os.path.exists(remappedFileName): - changed, startYear, endYear = update_start_end_year(ds, config, - calendar) + if 'month' in dsObs.coords: + # we should make a seasonal climatology + seasonalClimatology = compute_climatology( + dsObs, monthValues, maskVaries=True) - mpasDescriptor = MpasMeshDescriptor( - restartFileName, meshName=config.get('input', 'mpasMeshName')) + seasonalClimatology.load() + seasonalClimatology.close() + write_netcdf(seasonalClimatology, climatologyFileName) + else: + # we assume dsObs is already a seasonal climatology + seasonalClimatology = dsObs + + # make the remapper for the climatology + obsRemapper = get_remapper( + config=config, sourceDescriptor=obsDescriptor, + comparisonDescriptor=comparisonDescriptor, + mappingFilePrefix='map_obs_{}'.format(fieldName), + method=config.get('oceanObservations', + 'interpolationMethod')) + + if obsRemapper is None: + # no need to remap because the observations are on the + # comparison grid already + remappedObsClimatology = seasonalClimatology + else: + remappedObsClimatology = \ + remap_and_write_climatology( + config, seasonalClimatology, climatologyFileName, + remappedFileName, obsRemapper) - parallel = self.config.getint('execute', 'parallelTaskCount') > 1 - if parallel: - # avoid writing the same mapping file from multiple processes - mappingFilePrefix = 'map_{}'.format(self.taskName) else: - mappingFilePrefix = 'map' - mpasRemapper = get_remapper( - config=config, sourceDescriptor=mpasDescriptor, - comparisonDescriptor=comparisonDescriptor, - mappingFileSection='climatology', - mappingFileOption='mpasMappingFile', - mappingFilePrefix=mappingFilePrefix, - method=config.get('climatology', 'mpasInterpolationMethod')) + remappedObsClimatology = xr.open_dataset(remappedFileName) - obsDescriptor = LatLonGridDescriptor() - obsDescriptor.read(fileName=self.obsFileName, latVarName='lat', - lonVarName='lon') + if self.comparisonGridName == 'latlon': + self._plot_latlon(remappedModelClimatology, remappedObsClimatology) + elif self.comparisonGridName == 'antarctic': + self._plot_antarctic(remappedModelClimatology, + remappedObsClimatology) - origObsRemapper = Remapper(comparisonDescriptor, obsDescriptor) + # }}} + + def post_process_remapped_mpas_climatology(self, remappedClimatology): + # {{{ + ''' + A function for post-processing the remapped MPAS climatology before + plotting. For example, this might include taking slices of a 3D field + that cannot simply be hyperslabbed. + + By default, simply returns remappedClimatology unchanged. + + Parameters + ---------- + remappedClimatology : ``xarray.Dataset`` object + The remapped climatology to be processed + + Returns + ------- + remappedClimatology : ``xarray.Dataset`` object + The remapped climatology after processing + + Authors + ------- + XylarAsay-Davis + ''' + return remappedClimatology + + # }}} + + def get_task_name_and_tags(self): + className = type(self).__name__ + # convert the first letter to lowercase to get the corresponding + # seciton name in the config file + taskPrefix = className[0].lower() + className[1:] + + upperComparison = self.comparisonGridName[0].upper() + \ + self.comparisonGridName[1:] + + taskName = '{}{}_{}'.format(taskPrefix, upperComparison, self.season) + + tags = ['climatology', 'horizontalMap', self.comparisonGridName, + taskPrefix] + + return taskName, tags + + def get_section_name(self): + className = type(self).__name__ + # convert the first letter to lowercase to get the corresponding + # seciton name in the config file + sectionName = className[0].lower() + className[1:] + + upperComparison = self.comparisonGridName[0].upper() + \ + self.comparisonGridName[1:] + + comparisonSectionName = '{}{}'.format(sectionName, upperComparison) + + if self.config.has_section(comparisonSectionName): + sectionName = comparisonSectionName + + return sectionName + + def _plot_latlon(self, remappedModelClimatology, remappedObsClimatology): + # {{{ + """ plotting a global lat-lon data set """ + config = self.config + climatologyTask = self.climatologyTask + startYear = climatologyTask.startYear + endYear = climatologyTask.endYear + + sectionName = self.get_section_name() (colormapResult, colorbarLevelsResult) = setup_colormap( - config, self.taskName, suffix='Result') + config, sectionName, suffix='Result') (colormapDifference, colorbarLevelsDifference) = setup_colormap( - config, self.taskName, suffix='Difference') + config, sectionName, suffix='Difference') - dsObs = None - obsRemapperBuilt = False + modelOutput = \ + remappedModelClimatology[self.mpasFieldName].values - # Interpolate and compute biases - for months in outputTimes: - monthValues = constants.monthDictionary[months] - - (climatologyFileName, climatologyPrefix, regriddedFileName) = \ - get_mpas_climatology_file_names( - config=config, - fieldName=fieldName, - monthNames=months, - mpasMeshName=mpasDescriptor.meshName, - comparisonGridName=comparisonDescriptor.meshName) - - if (overwriteMpasClimatology or - not os.path.exists(regriddedFileName)): - seasonalClimatology = cache_climatologies( - ds, monthValues, config, climatologyPrefix, calendar, - printProgress=True) - - if seasonalClimatology is None: - # apparently, there was no data available to create the - # climatology - warnings.warn('no data to create {} climatology for ' - '{}'.format(fieldName, months)) - continue - - remappedClimatology = remap_and_write_climatology( - config, seasonalClimatology, climatologyFileName, - regriddedFileName, mpasRemapper) + observations = remappedObsClimatology[self.obsFieldName].values - else: + bias = modelOutput - observations + + lon = remappedModelClimatology['lon'].values + lat = remappedModelClimatology['lat'].values + + lonTarg, latTarg = np.meshgrid(lon, lat) - remappedClimatology = xr.open_dataset(regriddedFileName) + mainRunName = config.get('runs', 'mainRunName') + title = '{} ({}, years {:04d}-{:04d})'.format( + self.fieldNameInTitle, self.season, startYear, endYear) + + outFileName = '{}/{}_{}_{}_years{:04d}-{:04d}.png'.format( + self.plotsDirectory, self.outFileLabel, mainRunName, + self.season, startYear, endYear) + + plot_global_comparison(config, + lonTarg, + latTarg, + modelOutput, + observations, + bias, + colormapResult, + colorbarLevelsResult, + colormapDifference, + colorbarLevelsDifference, + fileout=outFileName, + title=title, + modelTitle='{}'.format(mainRunName), + obsTitle=self.observationTitleLabel, + diffTitle=self.diffTitleLabel, + cbarlabel=self.unitsLabel) + # }}} - modelOutput = remappedClimatology[fieldName].values - lon = remappedClimatology['lon'].values - lat = remappedClimatology['lat'].values + def _plot_antarctic(self, remappedModelClimatology, + remappedObsClimatology): # {{{ + """ plotting an Antarctic data set """ + + config = self.config + climatologyTask = self.climatologyTask - lonTarg, latTarg = np.meshgrid(lon, lat) + startYear = climatologyTask.startYear + endYear = climatologyTask.endYear - # now the observations - (climatologyFileName, regriddedFileName) = \ - get_observation_climatology_file_names( - config=config, fieldName=fieldName, monthNames=months, - componentName='ocean', remapper=origObsRemapper) + sectionName = self.get_section_name() - if (overwriteObsClimatology or - not os.path.exists(regriddedFileName)): + mainRunName = config.get('runs', 'mainRunName') + title = '{} ({}, years {:04d}-{:04d})'.format( + self.fieldNameInTitle, self.season, startYear, endYear) - if dsObs is None: - # load the observations the first time - dsObs = self._build_observational_dataset() + if self.landMask is None: + oceanMask = remappedModelClimatology['validMask'].values + self.landMask = np.ma.masked_array( + np.ones(oceanMask.shape), + mask=np.logical_not(np.isnan(oceanMask))) - seasonalClimatology = compute_climatology( - dsObs, monthValues, maskVaries=True) + modelOutput = \ + remappedModelClimatology[self.mpasFieldName].values - if not obsRemapperBuilt: - seasonalClimatology.load() - seasonalClimatology.close() - seasonalClimatology.to_netcdf(climatologyFileName) - # make the remapper for the climatology - obsDescriptor = LatLonGridDescriptor() - obsDescriptor.read(fileName=climatologyFileName, - latVarName='lat', - lonVarName='lon') - - obsRemapper = get_remapper( - config=config, sourceDescriptor=obsDescriptor, - comparisonDescriptor=comparisonDescriptor, - mappingFileSection='oceanObservations', - mappingFileOption='{}ClimatologyMappingFile'.format( - fieldName), - mappingFilePrefix='map_obs_{}'.format(fieldName), - method=config.get('oceanObservations', - 'interpolationMethod')) - - obsRemapperBuilt = True - - if obsRemapper is None: - # no need to remap because the observations are on the - # comparison grid already - remappedClimatology = seasonalClimatology - else: - remappedClimatology = \ - remap_and_write_climatology( - config, seasonalClimatology, climatologyFileName, - regriddedFileName, obsRemapper) + modelOutput = np.ma.masked_where(np.isnan(modelOutput), modelOutput) - else: + observations = remappedObsClimatology[self.obsFieldName].values - remappedClimatology = xr.open_dataset(regriddedFileName) - observations = remappedClimatology[self.obsFieldName].values - - bias = modelOutput - observations - - outFileName = '{}/{}_{}_{}_years{:04d}-{:04d}.png'.format( - self.plotsDirectory, self.outFileLabel, mainRunName, - months, startYear, endYear) - title = '{} ({}, years {:04d}-{:04d})'.format( - self.fieldNameInTitle, months, startYear, endYear) - plot_global_comparison(config, - lonTarg, - latTarg, - modelOutput, - observations, - bias, - colormapResult, - colorbarLevelsResult, - colormapDifference, - colorbarLevelsDifference, - fileout=outFileName, - title=title, - modelTitle='{}'.format(mainRunName), - obsTitle=self.observationTitleLabel, - diffTitle='Model-Observations', - cbarlabel=self.unitsLabel) + observations = np.ma.masked_where(np.isnan(observations), observations) + bias = modelOutput - observations + + x = interp_extrap_corner(remappedModelClimatology['x'].values) + y = interp_extrap_corner(remappedModelClimatology['y'].values) + + outFileName = '{}/{}_{}_{}_{}_years{:04d}-{:04d}.png'.format( + self.plotsDirectory, self.comparisonGridName, + self.outFileLabel, mainRunName, self.season, startYear, + endYear) + + if config.has_option(sectionName, 'colormapIndicesResult'): + colorMapType = 'indexed' + elif config.has_option(sectionName, 'normTypeResult'): + colorMapType = 'norm' + else: + raise ValueError('config section {} contains neither the info' + 'for an indexed color map nor for computing a ' + 'norm'.format(sectionName)) + + plot_polar_projection_comparison( + config, + x, + y, + self.landMask, + modelOutput, + observations, + bias, + fileout=outFileName, + colorMapSectionName=sectionName, + colorMapType=colorMapType, + title=title, + modelTitle='{}'.format(mainRunName), + obsTitle=self.observationTitleLabel, + diffTitle=self.diffTitleLabel, + cbarlabel=self.unitsLabel) # }}} # }}} @@ -309,7 +472,7 @@ class ClimatologyMapSST(ClimatologyMapOcean): # {{{ ------- Luke Van Roekel, Xylar Asay-Davis, Milena Veneziani """ - def __init__(self, config): # {{{ + def __init__(self, config, season, comparisonGridName): # {{{ """ Construct the analysis task. @@ -318,19 +481,35 @@ def __init__(self, config): # {{{ config : instance of MpasAnalysisConfigParser Contains configuration options + season : str + A season (keys in ``shared.constants.monthDictionary``) to be + plotted + + comparisonGridName : {'latlon', 'antarctic'} + The name of the comparison grid that where model data an + observations are compared + Authors ------- Xylar Asay-Davis """ self.fieldName = 'sst' self.fieldNameInTitle = 'SST' + self.season = season + self.comparisonGridName = comparisonGridName + + self.mpasFieldName = 'timeMonthly_avg_activeTracers_temperature' + self.iselValues = {'nVertLevels': 0} + + taskName, tags = self.get_task_name_and_tags() + + tags.append(self.fieldName) # call the constructor from the base class (ClimatologyMapOcean) - super(ClimatologyMapSST, self).__init__( - config=config, - taskName='climatologyMapSST', - componentName='ocean', - tags=['climatology', 'horizontalMap', self.fieldName]) + super(ClimatologyMapSST, self).__init__(config=config, + taskName=taskName, + componentName='ocean', + tags=tags) # }}} @@ -357,8 +536,6 @@ def setup_and_check(self): # {{{ "{}/MODEL.SST.HAD187001-198110.OI198111-201203.nc".format( observationsDirectory) - self.iselValues = {'nVertLevels': 0} - self.obsFieldName = 'SST' climStartYear = self.config.getint('oceanObservations', @@ -379,9 +556,11 @@ def setup_and_check(self): # {{{ self.outFileLabel = 'sstHADOI' self.unitsLabel = r'$^o$C' + self.diffTitleLabel = 'Model - Observations' + # }}} - def _build_observational_dataset(self): # {{{ + def build_observational_dataset(self): # {{{ ''' read in the data sets for observations, and possibly rename some variables and dimensions @@ -404,7 +583,11 @@ def _build_observational_dataset(self): # {{{ dsObs.coords['month'] = dsObs['Time.month'] dsObs.coords['year'] = dsObs['Time.year'] - return dsObs # }}} + # create a descriptor of the observation grid using the lat/lon + # coordinates + obsDescriptor = LatLonGridDescriptor.read(ds=dsObs) + + return dsObs, obsDescriptor # }}} # }}} @@ -418,7 +601,7 @@ class ClimatologyMapSSS(ClimatologyMapOcean): # {{{ ------- Luke Van Roekel, Xylar Asay-Davis, Milena Veneziani """ - def __init__(self, config): # {{{ + def __init__(self, config, season, comparisonGridName): # {{{ """ Construct the analysis task. @@ -427,19 +610,35 @@ def __init__(self, config): # {{{ config : instance of MpasAnalysisConfigParser Contains configuration options + season : str + A season (keys in ``shared.constants.monthDictionary``) to be + plotted + + comparisonGridName : {'latlon', 'antarctic'} + The name of the comparison grid that where model data an + observations are compared + Authors ------- Xylar Asay-Davis """ self.fieldName = 'sss' self.fieldNameInTitle = 'SSS' + self.season = season + self.comparisonGridName = comparisonGridName + + self.mpasFieldName = 'timeMonthly_avg_activeTracers_salinity' + self.iselValues = {'nVertLevels': 0} + + taskName, tags = self.get_task_name_and_tags() + + tags.append(self.fieldName) # call the constructor from the base class (ClimatologyMapOcean) - super(ClimatologyMapSSS, self).__init__( - config=config, - taskName='climatologyMapSSS', - componentName='ocean', - tags=['climatology', 'horizontalMap', self.fieldName]) + super(ClimatologyMapSSS, self).__init__(config=config, + taskName=taskName, + componentName='ocean', + tags=tags) # }}} @@ -466,17 +665,16 @@ def setup_and_check(self): # {{{ '{}/Aquarius_V3_SSS_Monthly.nc'.format( observationsDirectory) - self.iselValues = {'nVertLevels': 0} - self.obsFieldName = 'SSS' self.observationTitleLabel = 'Observations (Aquarius, 2011-2014)' self.outFileLabel = 'sssAquarius' self.unitsLabel = 'PSU' + self.diffTitleLabel = 'Model - Observations' # }}} - def _build_observational_dataset(self): # {{{ + def build_observational_dataset(self): # {{{ ''' read in the data sets for observations, and possibly rename some variables and dimensions @@ -495,7 +693,11 @@ def _build_observational_dataset(self): # {{{ dsObs.coords['month'] = dsObs['Time.month'] dsObs.coords['year'] = dsObs['Time.year'] - return dsObs # }}} + # create a descriptor of the observation grid using the lat/lon + # coordinates + obsDescriptor = LatLonGridDescriptor.read(ds=dsObs) + + return dsObs, obsDescriptor # }}} # }}} @@ -509,7 +711,7 @@ class ClimatologyMapMLD(ClimatologyMapOcean): # {{{ ------- Luke Van Roekel, Xylar Asay-Davis, Milena Veneziani """ - def __init__(self, config): # {{{ + def __init__(self, config, season, comparisonGridName): # {{{ """ Construct the analysis task. @@ -518,20 +720,35 @@ def __init__(self, config): # {{{ config : instance of MpasAnalysisConfigParser Contains configuration options + season : str + A season (keys in ``shared.constants.monthDictionary``) to be + plotted + + comparisonGridName : {'latlon', 'antarctic'} + The name of the comparison grid that where model data an + observations are compared + Authors ------- Xylar Asay-Davis """ - self.fieldName = 'mld' self.fieldNameInTitle = 'MLD' + self.season = season + self.comparisonGridName = comparisonGridName + + self.mpasFieldName = 'timeMonthly_avg_dThreshMLD' + self.iselValues = None + + taskName, tags = self.get_task_name_and_tags() + + tags.append(self.fieldName) # call the constructor from the base class (ClimatologyMapOcean) - super(ClimatologyMapMLD, self).__init__( - config=config, - taskName='climatologyMapMLD', - componentName='ocean', - tags=['climatology', 'horizontalMap', self.fieldName]) + super(ClimatologyMapMLD, self).__init__(config=config, + taskName=taskName, + componentName='ocean', + tags=tags) # }}} @@ -558,8 +775,6 @@ def setup_and_check(self): # {{{ '{}/holtetalley_mld_climatology.nc'.format( observationsDirectory) - self.iselValues = None - self.obsFieldName = 'mld_dt_mean' # Set appropriate MLD figure labels @@ -567,10 +782,11 @@ def setup_and_check(self): # {{{ 'Observations (HolteTalley density threshold MLD)' self.outFileLabel = 'mldHolteTalleyARGO' self.unitsLabel = 'm' + self.diffTitleLabel = 'Model - Observations' # }}} - def _build_observational_dataset(self): # {{{ + def build_observational_dataset(self): # {{{ ''' read in the data sets for observations, and possibly rename some variables and dimensions @@ -603,6 +819,10 @@ def _build_observational_dataset(self): # {{{ dsObs = mpas_xarray.subset_variables(dsObs, [self.obsFieldName, 'month']) - return dsObs # }}} + # create a descriptor of the observation grid using the lat/lon + # coordinates + obsDescriptor = LatLonGridDescriptor.read(ds=dsObs) + + return dsObs, obsDescriptor # }}} # vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python diff --git a/mpas_analysis/ocean/meridional_heat_transport.py b/mpas_analysis/ocean/meridional_heat_transport.py index d4174dbf8..84fbb289c 100644 --- a/mpas_analysis/ocean/meridional_heat_transport.py +++ b/mpas_analysis/ocean/meridional_heat_transport.py @@ -4,19 +4,12 @@ import os import warnings -from ..shared.constants.constants import monthDictionary from ..shared.plot.plotting import plot_vertical_section,\ setup_colormap, plot_1D -from ..shared.io.utility import build_config_full_path, make_directories +from ..shared.io.utility import build_config_full_path -from ..shared.generalized_reader.generalized_reader \ - import open_multifile_dataset - -from ..shared.timekeeping.utility import get_simulation_start_time - -from ..shared.climatology.climatology import update_start_end_year, \ - cache_climatologies +from ..shared.climatology import MpasClimatology from ..shared.analysis_task import AnalysisTask @@ -30,6 +23,55 @@ class MeridionalHeatTransport(AnalysisTask): # {{{ Mark Petersen, Milena Veneziani, Xylar Asay-Davis ''' + @classmethod + def create_tasks(cls, config): # {{{ + """ + For each comparison grid, construct one task for computing the + climatologies and one plotting task for each season. The climatology + task is a prerequisite of the plotting tasks, but the plotting tasks + can run in parallel with one another. + + Parameters + ---------- + config : MpasAnalysisConfigParser object + Contains configuration options + + Authors + ------- + Xylar Asay-Davis + """ + mocTask = cls(config=config) + + taskSuffix = 'MHT' + seasons = ['ANN'] + + variableList = ['timeMonthly_avg_meridionalHeatTransportLat', + 'timeMonthly_avg_meridionalHeatTransportLatZ'] + + climatologyTask = \ + MpasClimatology(config=config, + variableList=variableList, + taskSuffix=taskSuffix, + componentName='ocean', + seasons=seasons, + tags=['climatology']) + + # add climatologyTask as a prerequisite of the MOC task so + # plotting won't happen until we have the required + # climatologies + if mocTask.prerequisiteTasks is None: + mocTask.prerequisiteTasks = [climatologyTask.taskName] + else: + mocTask.prerequisiteTasks.append(climatologyTask.taskName) + # We want to have access to some information from the + # climatologyTask (namely, we need a way to find out what the + # names of the climatology files are that it created), so we'll + # keep a reference to it handy. + mocTask.climatologyTask = climatologyTask + + tasks = [climatologyTask, mocTask] + return tasks # }}} + def __init__(self, config): # {{{ ''' Construct the analysis task. @@ -50,6 +92,7 @@ def __init__(self, config): # {{{ taskName='meridionalHeatTransport', componentName='ocean', tags=['climatology']) + # }}} def setup_and_check(self): # {{{ @@ -73,31 +116,22 @@ def setup_and_check(self): # {{{ super(MeridionalHeatTransport, self).setup_and_check() config = self.config + climatologyTask = self.climatologyTask - self.check_analysis_enabled( - analysisOptionName='config_am_timeseriesstatsmonthly_enable', - raiseException=True) self.check_analysis_enabled( analysisOptionName='config_am_meridionalheattransport_enable', raiseException=True) - # Get a list of timeSeriesStats output files from the streams file, - # reading only those that are between the start and end dates - # First a list necessary for the MHT climatology - streamName = self.historyStreams.find_stream( - self.streamMap['timeSeriesStats']) - self.startDate = config.get('climatology', 'startDate') - self.endDate = config.get('climatology', 'endDate') - self.inputFiles = \ - self.historyStreams.readpath(streamName, - startDate=self.startDate, - endDate=self.endDate, - calendar=self.calendar) - - if len(self.inputFiles) == 0: - raise IOError('No files were found in stream {} between {} and ' - '{}.'.format(streamName, self.startDate, - self.endDate)) + # call setup_and_check() on the climatology task because it will make + # sure the start and end year are set and correct. (In parallel mode, + # this copy of the climatologyTask is different from the one that will + # actually have run to completion before this task gets run.) + climatologyTask.setup_and_check() + + self.startDate = climatologyTask.startDate + self.endDate = climatologyTask.endDate + self.startYear = climatologyTask.startYear + self.endYear = climatologyTask.endYear # Later, we will read in depth and MHT latitude points # from mpaso.hist.am.meridionalHeatTransport.*.nc @@ -109,11 +143,6 @@ def setup_and_check(self): # {{{ self.mhtFile = mhtFiles[0] - self.simulationStartTime = get_simulation_start_time(self.runStreams) - - self.startYear = config.getint('climatology', 'startYear') - self.endYear = config.getint('climatology', 'endYear') - self.sectionName = 'meridionalHeatTransport' # Read in obs file information @@ -179,42 +208,17 @@ def run(self): # {{{ # Then we will need to add another section for regions with a loop # over number of regions. ###################################################################### - variableList = ['avgMeridionalHeatTransportLat', - 'avgMeridionalHeatTransportLatZ'] print '\n Compute and plot global meridional heat transport' - outputDirectory = build_config_full_path(config, 'output', - 'mpasClimatologySubdirectory') - - print '\n List of files for climatologies:\n' \ - ' {} through\n {}'.format( - os.path.basename(self.inputFiles[0]), - os.path.basename(self.inputFiles[-1])) + print ' Load data...' - make_directories(outputDirectory) + # use the climatologyTask to get the right file name for the + # computed climatology + climatologyFileName = self.climatologyTask.get_ncclimo_file_name( + season='ANN', stage='unmasked') - print ' Load data...' - ds = open_multifile_dataset( - fileNames=self.inputFiles, - calendar=self.calendar, - config=config, - simulationStartTime=self.simulationStartTime, - timeVariableName='Time', - variableList=variableList, - variableMap=self.variableMap, - startDate=self.startDate, - endDate=self.endDate) - - changed, startYear, endYear = update_start_end_year(ds, config, - self.calendar) - - # Compute annual climatology - cachePrefix = '{}/meridionalHeatTransport'.format(outputDirectory) - annualClimatology = cache_climatologies(ds, monthDictionary['ANN'], - config, cachePrefix, - self.calendar, - printProgress=True) + annualClimatology = xr.open_dataset(climatologyFileName) # **** Plot MHT **** # Define plotting variables @@ -226,14 +230,15 @@ def run(self): # {{{ print ' Plot global MHT...' # Plot 1D MHT (zonally averaged, depth integrated) x = binBoundaryMerHeatTrans - y = annualClimatology.avgMeridionalHeatTransportLat + daY = annualClimatology.timeMonthly_avg_meridionalHeatTransportLat + y = daY.values[0, :] xLabel = 'latitude [deg]' yLabel = 'meridional heat transport [PW]' title = 'Global MHT (ANN, years {:04d}-{:04d})\n {}'.format( - startYear, endYear, mainRunName) + self.startYear, self.endYear, mainRunName) figureName = '{}/mht_{}_years{:04d}-{:04d}.png'.format( self.plotsDirectory, mainRunName, - startYear, endYear) + self.startYear, self.endYear) if self.observationsFile is not None: # Load in observations dsObs = xr.open_dataset(self.observationsFile) @@ -264,8 +269,9 @@ def run(self): # {{{ # Plot 2D MHT (zonally integrated) # normalize 2D MHT by layer thickness - MHTLatZ = \ - annualClimatology.avgMeridionalHeatTransportLatZ.values.T[:, :] + daMHTLatZ = \ + annualClimatology.timeMonthly_avg_meridionalHeatTransportLatZ + MHTLatZ = daMHTLatZ.values[0, :, :].T for k in range(nVertLevels): MHTLatZ[k, :] = MHTLatZ[k, :]/refLayerThickness[k] @@ -275,10 +281,10 @@ def run(self): # {{{ xLabel = 'latitude [deg]' yLabel = 'depth [m]' title = 'Global MHT (ANN, years {:04d}-{:04d})\n {}'.format( - startYear, endYear, mainRunName) + self.startYear, self.endYear, mainRunName) figureName = '{}/mhtZ_{}_years{:04d}-{:04d}.png'.format( self.plotsDirectory, mainRunName, - startYear, endYear) + self.startYear, self.endYear) colorbarLabel = '[PW/m]' contourLevels = config.getExpression(self.sectionName, 'contourLevelsGlobal', diff --git a/mpas_analysis/ocean/streamfunction_moc.py b/mpas_analysis/ocean/streamfunction_moc.py index 8c1608e07..15910bd00 100644 --- a/mpas_analysis/ocean/streamfunction_moc.py +++ b/mpas_analysis/ocean/streamfunction_moc.py @@ -4,8 +4,7 @@ import os from functools import partial -from ..shared.constants.constants import m3ps_to_Sv, \ - monthDictionary +from ..shared.constants.constants import m3ps_to_Sv from ..shared.plot.plotting import plot_vertical_section,\ timeseries_analysis_plot, setup_colormap @@ -17,8 +16,7 @@ from ..shared.timekeeping.utility import get_simulation_start_time, \ days_to_datetime -from ..shared.climatology.climatology import update_start_end_year, \ - cache_climatologies +from ..shared.climatology import MpasClimatology from ..shared.analysis_task import AnalysisTask @@ -39,6 +37,55 @@ class StreamfunctionMOC(AnalysisTask): # {{{ Milena Veneziani, Mark Petersen, Phillip Wolfram, Xylar Asay-Davis ''' + @classmethod + def create_tasks(cls, config): # {{{ + """ + For each comparison grid, construct one task for computing the + climatologies and one plotting task for each season. The climatology + task is a prerequisite of the plotting tasks, but the plotting tasks + can run in parallel with one another. + + Parameters + ---------- + config : MpasAnalysisConfigParser object + Contains configuration options + + Authors + ------- + Xylar Asay-Davis + """ + mocTask = cls(config=config) + + taskSuffix = 'VelMOC' + seasons = ['ANN'] + + variableList = ['timeMonthly_avg_normalVelocity', + 'timeMonthly_avg_vertVelocityTop'] + + climatologyTask = \ + MpasClimatology(config=config, + variableList=variableList, + taskSuffix=taskSuffix, + componentName='ocean', + seasons=seasons, + tags=['climatology']) + + # add climatologyTask as a prerequisite of the MOC task so + # plotting won't happen until we have the required + # climatologies + if mocTask.prerequisiteTasks is None: + mocTask.prerequisiteTasks = [climatologyTask.taskName] + else: + mocTask.prerequisiteTasks.append(climatologyTask.taskName) + # We want to have access to some information from the + # climatologyTask (namely, we need a way to find out what the + # names of the climatology files are that it created), so we'll + # keep a reference to it handy. + mocTask.climatologyTask = climatologyTask + + tasks = [climatologyTask, mocTask] + return tasks # }}} + def __init__(self, config): # {{{ ''' Construct the analysis task. @@ -84,36 +131,28 @@ def setup_and_check(self): # {{{ super(StreamfunctionMOC, self).setup_and_check() config = self.config - - self.check_analysis_enabled( - analysisOptionName='config_am_timeseriesstatsmonthly_enable', - raiseException=True) + climatologyTask = self.climatologyTask self.mocAnalysisMemberEnabled = self.check_analysis_enabled( analysisOptionName='config_am_mocstreamfunction_enable', raiseException=False) + # call setup_and_check() on the climatology task because it will make + # sure the start and end year are set and correct. (In parallel mode, + # this copy of the climatologyTask is different from the one that will + # actually have run to completion before this task gets run.) + climatologyTask.setup_and_check() + + self.startDateClimo = climatologyTask.startDate + self.endDateClimo = climatologyTask.endDate + self.startYearClimo = climatologyTask.startYear + self.endYearClimo = climatologyTask.endYear + # Get a list of timeSeriesStats output files from the streams file, # reading only those that are between the start and end dates # First a list necessary for the streamfunctionMOC climatology streamName = self.historyStreams.find_stream( self.streamMap['timeSeriesStats']) - self.startDateClimo = config.get('climatology', 'startDate') - self.endDateClimo = config.get('climatology', 'endDate') - self.inputFilesClimo = \ - self.historyStreams.readpath(streamName, - startDate=self.startDateClimo, - endDate=self.endDateClimo, - calendar=self.calendar) - if len(self.inputFilesClimo) == 0: - raise IOError('No files were found in stream {} between {} and ' - '{}.'.format(streamName, self.startDateClimo, - self.endDateClimo)) - - self.simulationStartTime = get_simulation_start_time(self.runStreams) - - self.startYearClimo = config.getint('climatology', 'startYear') - self.endYearClimo = config.getint('climatology', 'endYear') # Then a list necessary for the streamfunctionMOC Atlantic timeseries self.startDateTseries = config.get('timeSeries', 'startDate') @@ -150,11 +189,6 @@ def run(self): # {{{ print "\nPlotting streamfunction of Meridional Overturning " \ "Circulation (MOC)..." - print '\n List of files for climatologies:\n' \ - ' {} through\n {}'.format( - os.path.basename(self.inputFilesClimo[0]), - os.path.basename(self.inputFilesClimo[-1])) - print '\n List of files for time series:\n' \ ' {} through\n {}'.format( os.path.basename(self.inputFilesTseries[0]), @@ -172,7 +206,6 @@ def run(self): # {{{ # sectionName, dictClimo, # dictTseries) else: - self._cache_velocity_climatologies() self._compute_moc_climo_postprocess() dsMOCTimeSeries = self._compute_moc_time_series_postprocess() @@ -250,46 +283,6 @@ def _load_mesh(self): # {{{ refTopDepth, refLayerThickness # }}} - def _cache_velocity_climatologies(self): # {{{ - '''compute yearly velocity climatologies and cache them''' - - variableList = ['avgNormalVelocity', - 'avgVertVelocityTop'] - - config = self.config - - outputDirectory = build_config_full_path(config, 'output', - 'mpasClimatologySubdirectory') - - make_directories(outputDirectory) - - chunking = config.getExpression(self.sectionName, 'maxChunkSize') - ds = open_multifile_dataset( - fileNames=self.inputFilesClimo, - calendar=self.calendar, - config=config, - simulationStartTime=self.simulationStartTime, - timeVariableName='Time', - variableList=variableList, - variableMap=self.variableMap, - startDate=self.startDateClimo, - endDate=self.endDateClimo, - chunking=chunking) - - # update the start and end year in config based on the real extend of - # ds - update_start_end_year(ds, config, self.calendar) - self.startYearClimo = config.getint('climatology', 'startYear') - self.endYearClimo = config.getint('climatology', 'endYear') - - cachePrefix = '{}/meanVelocity'.format(outputDirectory) - - # compute and cache the velocity climatology - cache_climatologies(ds, monthDictionary['ANN'], - config, cachePrefix, self.calendar, - printProgress=True) - # }}} - def _compute_moc_climo_postprocess(self): # {{{ '''compute mean MOC streamfunction as a post-process''' @@ -303,17 +296,21 @@ def _compute_moc_climo_postprocess(self): # {{{ 'regionNames') # Load basin region related variables and save them to dictionary - # NB: The following will need to change with new regional mapping files - regionMaskFiles = config.get(self.sectionName, 'regionMaskFiles') - if not os.path.exists(regionMaskFiles): - raise IOError('Regional masking file for MOC calculation ' - 'does not exist') + mpasMeshName = config.get('input', 'mpasMeshName') + regionMaskDirectory = config.get('regions', 'regionMaskDirectory') + + regionMaskFile = '{}/{}_SingleRegionAtlanticWTransportTransects_' \ + 'masks.nc'.format(regionMaskDirectory, mpasMeshName) + + if not os.path.exists(regionMaskFile): + raise IOError('Regional masking file {} for MOC calculation ' + 'does not exist'.format(regionMaskFile)) iRegion = 0 self.dictRegion = {} for region in self.regionNames: print '\n Reading region and transect mask for ' \ '{}...'.format(region) - ncFileRegional = netCDF4.Dataset(regionMaskFiles, mode='r') + ncFileRegional = netCDF4.Dataset(regionMaskFile, mode='r') maxEdgesInTransect = \ ncFileRegional.dimensions['maxEdgesInTransect'].size transectEdgeMaskSigns = \ @@ -353,22 +350,22 @@ def _compute_moc_climo_postprocess(self): # {{{ if not os.path.exists(outputFileClimo): print ' Load data...' - cachePrefix = '{}/meanVelocity'.format(outputDirectory) - - if self.startYearClimo == self.endYearClimo: - yearString = '{:04d}'.format(self.startYearClimo) - velClimoFile = '{}_year{}.nc'.format(cachePrefix, yearString) - else: - yearString = '{:04d}-{:04d}'.format(self.startYearClimo, - self.endYearClimo) - velClimoFile = '{}_years{}.nc'.format(cachePrefix, yearString) + # use the climatologyTask to get the right file name for the + # computed climatology + velClimoFile = self.climatologyTask.get_ncclimo_file_name( + season='ANN', stage='unmasked') annualClimatology = xr.open_dataset(velClimoFile) + # rename some variables for convenience (what the variableMap used + # to do before we switched ot ncclimo) + annualClimatology = annualClimatology.rename( + {'timeMonthly_avg_normalVelocity': 'avgNormalVelocity', + 'timeMonthly_avg_vertVelocityTop': 'avgVertVelocityTop'}) # Convert to numpy arrays # (can result in a memory error for large array size) - horizontalVel = annualClimatology.avgNormalVelocity.values - verticalVel = annualClimatology.avgVertVelocityTop.values + horizontalVel = annualClimatology.avgNormalVelocity.values[0, :, :] + verticalVel = annualClimatology.avgVertVelocityTop.values[0, :, :] velArea = verticalVel * areaCell[:, np.newaxis] # Create dictionary for MOC climatology (NB: need this form diff --git a/mpas_analysis/ocean/time_series_antarctic_melt.py b/mpas_analysis/ocean/time_series_antarctic_melt.py new file mode 100644 index 000000000..72e854561 --- /dev/null +++ b/mpas_analysis/ocean/time_series_antarctic_melt.py @@ -0,0 +1,238 @@ +import os +import xarray + +from ..shared.analysis_task import AnalysisTask + +from ..shared.constants import constants + +from ..shared.plot.plotting import timeseries_analysis_plot + +from ..shared.generalized_reader.generalized_reader \ + import open_multifile_dataset + +from ..shared.io.utility import build_config_full_path, make_directories + + +class TimeSeriesAntarcticMelt(AnalysisTask): + """ + Performs analysis of the time-series output of Antarctic sub-ice-shelf + melt rates. + + Authors + ------- + Xylar Asay-Davis + """ + + def __init__(self, config): # {{{ + """ + Construct the analysis task. + + Parameters, Milena Veneziani + ---------- + config : instance of MpasAnalysisConfigParser + Contains configuration options + + Authors + ------- + Xylar Asay-Davis + """ + # first, call the constructor from the base class (AnalysisTask) + super(TimeSeriesAntarcticMelt, self).__init__( + config=config, + taskName='timeSeriesAntarcticMelt', + componentName='ocean', + tags=['timeSeries', 'melt', 'landIceCavities']) + + # }}} + + def setup_and_check(self): # {{{ + """ + Perform steps to set up the analysis and check for errors in the setup. + + Raises + ------ + IOError + If files are not present + + Authors + ------- + Xylar Asay-Davis + """ + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.inDirectory, self.plotsDirectory, self.namelist, self.streams + # self.calendar + super(TimeSeriesAntarcticMelt, self).setup_and_check() + + self.check_analysis_enabled( + analysisOptionName='config_am_timeseriesstatsmonthly_enable', + raiseException=True) + + config = self.config + + landIceFluxMode = self.namelist.get('config_land_ice_flux_mode') + if landIceFluxMode not in ['standalone', 'coupled']: + raise ValueError('*** timeSeriesAntarcticMelt requires ' + 'config_land_ice_flux_mode \n' + ' to be standalone or coupled. Otherwise, no ' + 'melt rates are available \n' + ' for plotting.') + + mpasMeshName = config.get('input', 'mpasMeshName') + regionMaskDirectory = config.get('regions', 'regionMaskDirectory') + + self.regionMaskFileName = '{}/{}_iceShelfMasks.nc'.format( + regionMaskDirectory, mpasMeshName) + + if not os.path.exists(self.regionMaskFileName): + raise IOError('Regional masking file {} for Antarctica melt-rate ' + 'calculation does not exist'.format( + self.regionMaskFileName)) + + # Load mesh related variables + try: + self.restartFileName = self.runStreams.readpath('restart')[0] + except ValueError: + raise IOError('No MPAS-O restart file found: need at least one ' + 'restart file for Antarctic melt calculations') + + # get a list of timeSeriesStats output files from the streams file, + # reading only those that are between the start and end dates + streamName = self.historyStreams.find_stream( + self.streamMap['timeSeriesStats']) + self.startDate = config.get('timeSeries', 'startDate') + self.endDate = config.get('timeSeries', 'endDate') + self.inputFiles = \ + self.historyStreams.readpath(streamName, + startDate=self.startDate, + endDate=self.endDate, + calendar=self.calendar) + + if len(self.inputFiles) == 0: + raise IOError('No files were found in stream {} between {} and ' + '{}.'.format(streamName, self.startDate, + self.endDate)) + + return # }}} + + def run(self): # {{{ + """ + Performs analysis of the time-series output of Antarctic sub-ice-shelf + melt rates. + + Authors + ------- + Xylar Asay-Davis + """ + + print "\nPlotting Antarctic melt rate time series..." + + print ' Load melt rate data...' + + config = self.config + calendar = self.calendar + + print '\n Reading files:\n' \ + ' {} through\n {}'.format( + os.path.basename(self.inputFiles[0]), + os.path.basename(self.inputFiles[-1])) + + # Load data: + variableList = ['timeMonthly_avg_landIceFreshwaterFlux'] + timeVariableName = ['xtime_startMonthly', 'xtime_endMonthly'] + ds = open_multifile_dataset(fileNames=self.inputFiles, + calendar=calendar, + config=config, + timeVariableName=timeVariableName, + variableList=variableList, + startDate=self.startDate, + endDate=self.endDate) + + freshwaterFlux = ds.timeMonthly_avg_landIceFreshwaterFlux + + movingAverageMonths = config.getint('timeSeriesAntarcticMelt', + 'movingAverageMonths') + + iceShelvesToPlot = config.getExpression('timeSeriesAntarcticMelt', + 'iceShelvesToPlot') + + dsRestart = xarray.open_dataset(self.restartFileName) + areaCell = dsRestart.landIceFraction.isel(Time=0)*dsRestart.areaCell + + dsRegionMask = xarray.open_dataset(self.regionMaskFileName) + regionNames = list(dsRegionMask.regionNames.values) + nRegions = dsRegionMask.dims['nRegions'] + + if 'all' in iceShelvesToPlot: + iceShelvesToPlot = regionNames + regionIndices = [iRegion for iRegion in range(nRegions)] + else: + regionIndices = [] + for regionName in iceShelvesToPlot: + if regionName not in regionNames: + raise ValueError('Unknown ice shelf name {}'.format( + regionName)) + + iRegion = regionNames.index(regionName) + regionIndices.append(iRegion) + + # select only those regions we want to plot + dsRegionMask = dsRegionMask.isel(nRegions=regionIndices) + cellMasks = dsRegionMask.regionCellMasks + nRegions = dsRegionMask.dims['nRegions'] + + # convert from kg/s to kg/yr + totalMeltFlux = constants.sec_per_year * \ + (cellMasks*areaCell*freshwaterFlux).sum(dim='nCells') + + totalArea = (cellMasks*areaCell).sum(dim='nCells') + + # from kg/m^2/yr to m/yr + meltRates = (1./constants.rho_fw) * (totalMeltFlux/totalArea) + + # convert from kg/yr to GT/yr + totalMeltFlux /= constants.kg_per_GT + + outputDirectory = build_config_full_path(config, 'output', + 'timeseriesSubdirectory') + + make_directories(outputDirectory) + + print ' Make plots...' + for iRegion in range(nRegions): + regionName = iceShelvesToPlot[iRegion] + + title = regionName.replace('_', ' ') + + regionName = regionName.replace(' ', '_') + + xLabel = 'Time (yr)' + yLabel = 'Melt Flux (GT/yr)' + + timeSeries = totalMeltFlux.isel(nRegions=iRegion) + + figureName = '{}/melt_flux_{}.png'.format(self.plotsDirectory, + regionName) + + timeseries_analysis_plot(config, [timeSeries], movingAverageMonths, + title, xLabel, yLabel, figureName, + lineStyles=['b-'], lineWidths=[1.2], + calendar=calendar) + + xLabel = 'Time (yr)' + yLabel = 'Melt Rate (m/yr)' + + timeSeries = meltRates.isel(nRegions=iRegion) + + figureName = '{}/melt_rate_{}.png'.format(self.plotsDirectory, + regionName) + + timeseries_analysis_plot(config, [timeSeries], movingAverageMonths, + title, xLabel, yLabel, figureName, + lineStyles=['b-'], lineWidths=[1.2], + calendar=calendar) + # }}} + +# }}} + +# vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python diff --git a/mpas_analysis/sea_ice/__init__.py b/mpas_analysis/sea_ice/__init__.py index a0e7d0823..94c72455c 100644 --- a/mpas_analysis/sea_ice/__init__.py +++ b/mpas_analysis/sea_ice/__init__.py @@ -1,2 +1,2 @@ -from climatology_map import ClimatologyMapSeaIce +from climatology_map import ClimatologyMapSeaIceConc, ClimatologyMapSeaIceThick from time_series import TimeSeriesSeaIce \ No newline at end of file diff --git a/mpas_analysis/sea_ice/climatology_map.py b/mpas_analysis/sea_ice/climatology_map.py index c91ee6c23..d0a51f53b 100644 --- a/mpas_analysis/sea_ice/climatology_map.py +++ b/mpas_analysis/sea_ice/climatology_map.py @@ -4,25 +4,19 @@ import numpy.ma as ma import numpy as np -import warnings import xarray as xr -from ..shared.constants import constants - -from ..shared.climatology import get_lat_lon_comparison_descriptor, \ - get_remapper, get_mpas_climatology_file_names, \ +from ..shared.climatology import get_remapper, \ get_observation_climatology_file_names, \ - cache_climatologies, update_start_end_year, \ - remap_and_write_climatology -from ..shared.grid import MpasMeshDescriptor, LatLonGridDescriptor + remap_and_write_climatology, \ + get_comparison_descriptor, MpasClimatology +from ..shared.grid import LatLonGridDescriptor from ..shared.plot.plotting import plot_polar_comparison, \ setup_colormap from ..shared.io.utility import build_config_full_path - -from ..shared.generalized_reader.generalized_reader \ - import open_multifile_dataset +from ..shared.io import write_netcdf from .sea_ice_analysis_task import SeaIceAnalysisTask @@ -37,62 +31,107 @@ class ClimatologyMapSeaIce(SeaIceAnalysisTask): Xylar Asay-Davis, Milena Veneziani """ - def __init__(self, config): # {{{ + @classmethod + def create_tasks(cls, config): # {{{ """ - Construct the analysis task. + For each comparison grid, construct one task for computing the + climatologies and one plotting task for each season. The climatology + task is a prerequisite of the plotting tasks, but the plotting tasks + can run in parallel with one another. Parameters ---------- - config : instance of MpasAnalysisConfigParser + config : MpasAnalysisConfigParser object Contains configuration options Authors ------- Xylar Asay-Davis """ - # first, call the constructor from the base class (SeaIceAnalysisTask) - super(ClimatologyMapSeaIce, self).__init__( - config=config, - taskName='climatologyMapSeaIceConcThick', - componentName='seaIce', - tags=['climatology', 'horizontalMap']) - # }}} + # get the name of the class (something like ClimatologyMapSST) + className = cls.__name__ + sectionPrefix = className[0].lower() + className[1:] + + tasks = [] + + # separate climatology tasks for each hemisphere + for hemisphere in ['NH', 'SH']: + # convert the first letter to lowercase to get the corresponding + # seciton name in the config file + sectionName = '{}{}'.format(sectionPrefix, hemisphere) + + # read in what seasons we want to plot + seasons = config.getExpression(sectionName, 'seasons') + + if len(seasons) == 0: + raise ValueError('config section {} does not contain valid ' + 'list of seasons'.format(sectionName)) + + comparisonGridNames = config.getExpression(sectionName, + 'comparisonGrids') + + if len(comparisonGridNames) == 0: + raise ValueError('config section {} does not contain valid ' + 'list of comparison grids'.format( + sectionName)) + + observationPrefixes = \ + config.getExpression(sectionName, 'observationPrefixes') + + climatologyMapTasks = [] + for comparisonGridName in comparisonGridNames: + for season in seasons: + for observationPrefix in observationPrefixes: + # one plotting task for each season, comparison grid, + # and type of observations + climatologyMapTasks.append(cls( + config=config, + hemisphere=hemisphere, + season=season, + comparisonGridName=comparisonGridName, + observationPrefix=observationPrefix)) + + # we'll use the first task (corresponding to the first season in + # the list) as a way to get some information we need to build the + # MpasClimatology task for computing climatologies: + # mpasVariableName, componentName, fieldName + firstTask = climatologyMapTasks[0] + + taskSuffix = firstTask.fieldName[0].upper() + \ + firstTask.fieldName[1:] + hemisphere + + climatologyTask = \ + MpasClimatology(config=config, + variableList=[firstTask.mpasFieldName], + taskSuffix=taskSuffix, + componentName=firstTask.componentName, + comparisonGridNames=comparisonGridNames, + seasons=seasons, + tags=['climatology'], + iselValues=firstTask.iselValues) + + for index in range(len(climatologyMapTasks)): + climatologyMapTask = climatologyMapTasks[index] + # add climatologyTask as a prerequisite of each task so + # plotting won't happen until we have the required + # climatologies + if climatologyMapTask.prerequisiteTasks is None: + climatologyMapTask.prerequisiteTasks = \ + [climatologyTask.taskName] + else: + climatologyMapTask.prerequisiteTasks.append( + climatologyTask.taskName) + # We want to have access to some information from the + # climatologyTask (namely, we need a way to find out what the + # names of the climatology files are that it created), so we'll + # keep a reference to it handy. + climatologyMapTask.climatologyTask = climatologyTask - def setup_and_check(self): # {{{ - """ - Perform steps to set up the analysis and check for errors in the setup. + tasks.append(climatologyTask) + tasks.extend(climatologyMapTasks) - Authors - ------- - Xylar Asay-Davis - """ - # first, call setup_and_check from the base class (SeaIceAnalysisTask), - # which will perform some common setup - super(ClimatologyMapSeaIce, self).setup_and_check() - - self.check_analysis_enabled( - analysisOptionName='config_am_timeseriesstatsmonthly_enable', - raiseException=True) - - # get a list of timeSeriesStatsMonthly output files from the streams - # file, reading only those that are between the start and end dates - streamName = self.historyStreams.find_stream( - self.streamMap['timeSeriesStats']) - self.startDate = self.config.get('climatology', 'startDate') - self.endDate = self.config.get('climatology', 'endDate') - self.inputFiles = \ - self.historyStreams.readpath(streamName, - startDate=self.startDate, - endDate=self.endDate, - calendar=self.calendar) - - if len(self.inputFiles) == 0: - raise IOError('No files were found in stream {} between {} and ' - '{}.'.format(streamName, self.startDate, - self.endDate)) - - return # }}} + return tasks # }}} def run(self): # {{{ """ @@ -104,438 +143,387 @@ def run(self): # {{{ Xylar Asay-Davis, Milena Veneziani """ - print "\nPlotting 2-d maps of sea-ice concentration and thickness " \ - "climatologies..." - - print '\n Reading files:\n' \ - ' {} through\n {}'.format( - os.path.basename(self.inputFiles[0]), - os.path.basename(self.inputFiles[-1])) - # Load data - print ' Load sea-ice data...' - self.ds = open_multifile_dataset( - fileNames=self.inputFiles, - calendar=self.calendar, - config=self.config, - simulationStartTime=self.simulationStartTime, - timeVariableName='Time', - variableList=['iceAreaCell', 'iceVolumeCell'], - variableMap=self.variableMap, - startDate=self.startDate, - endDate=self.endDate) - - # Compute climatologies (first motnhly and then seasonally) - print ' Compute seasonal climatologies...' - - changed, startYear, endYear = update_start_end_year(self.ds, - self.config, - self.calendar) - - mpasDescriptor = MpasMeshDescriptor( - self.restartFileName, - meshName=self.config.get('input', 'mpasMeshName')) - - comparisonDescriptor = get_lat_lon_comparison_descriptor(self.config) - - parallel = self.config.getint('execute', 'parallelTaskCount') > 1 - if parallel: - # avoid writing the same mapping file from multiple processes - mappingFilePrefix = 'map_{}'.format(self.taskName) - else: - mappingFilePrefix = 'map' + print "\nPlotting 2-d maps of {} climatologies for season {}" \ + "...".format(self.fieldNameInTitle, self.season) + + climatologyTask = self.climatologyTask + + # call setup_and_check() on the climatology task because it will make + # sure the start and end year are set and correct. (In parallel mode, + # this copy of the climatologyTask is different from the one where + # setup_and_check was already called, and run completed successfully.) + climatologyTask.setup_and_check() - self.mpasRemapper = get_remapper( - config=self.config, sourceDescriptor=mpasDescriptor, - comparisonDescriptor=comparisonDescriptor, - mappingFileSection='climatology', - mappingFileOption='mpasMappingFile', - mappingFilePrefix=mappingFilePrefix, - method=self.config.get('climatology', 'mpasInterpolationMethod')) + self._do_ploting() # }}} - self._compute_and_plot_concentration() + def get_task_name_and_tags(self): + className = type(self).__name__ + # convert the first letter to lowercase to get the corresponding + # seciton name in the config file + taskPrefix = className[0].lower() + className[1:] - self._compute_and_plot_thickness() # }}} + upperComparison = self.comparisonGridName[0].upper() + \ + self.comparisonGridName[1:] - def _compute_and_plot_concentration(self): + taskName = '{}{}{}{}_{}'.format(taskPrefix, self.hemisphere, + self.observationPrefix, + upperComparison, self.season) + + tags = ['climatology', 'horizontalMap', self.comparisonGridName, + taskPrefix] + + return taskName, tags + + def get_section_name(self): + className = type(self).__name__ + # convert the first letter to lowercase to get the corresponding + # seciton name in the config file + sectionName = className[0].lower() + className[1:] + + sectionName = '{}{}'.format(sectionName, self.hemisphere) + + return sectionName + + def _do_ploting(self): # {{{ ''' - computes seasonal climatologies and plots model results, observations - and biases in sea-ice concentration. + plots model results, observations and biases. Authors ------- Xylar Asay-Davis, Milena Veneziani ''' - - print ' Make ice concentration plots...' - config = self.config - calendar = self.calendar - ds = self.ds mainRunName = config.get('runs', 'mainRunName') - startYear = config.getint('climatology', 'startYear') - endYear = config.getint('climatology', 'endYear') - overwriteMpasClimatology = config.getWithDefault( - 'climatology', 'overwriteMpasClimatology', False) - - overwriteObsClimatology = config.getWithDefault( - 'seaIceObservations', 'overwriteObsClimatology', False) - - subtitle = 'Ice concentration' - - hemisphereSeasons = {'JFM': ('NH', 'Winter'), - 'JAS': ('NH', 'Summer'), - 'DJF': ('SH', 'Winter'), - 'JJA': ('SH', 'Summer')} - - obsFileNames = {} - regriddedObsFileNames = {} - obsRemappers = {} - - comparisonDescriptor = self.mpasRemapper.destinationDescriptor - - buildObsClimatologies = overwriteObsClimatology - for months in hemisphereSeasons: - hemisphere, season = hemisphereSeasons[months] - climFieldName = 'iceConcentration' - for obsName in ['NASATeam', 'Bootstrap']: - key = (months, obsName) - obsFileName = build_config_full_path( - config, 'seaIceObservations', - 'concentration{}{}_{}'.format(obsName, hemisphere, months)) - obsFieldName = '{}_{}_{}'.format(climFieldName, hemisphere, - obsName) - - obsDescriptor = LatLonGridDescriptor() - obsDescriptor.read(fileName=obsFileName, latVarName='t_lat', - lonVarName='t_lon') - obsRemapper = get_remapper( - config=config, sourceDescriptor=obsDescriptor, - comparisonDescriptor=comparisonDescriptor, - mappingFileSection='seaIceObservations', - mappingFileOption='seaIceClimatologyMappingFile', - mappingFilePrefix='map_obs_seaIce', - method=config.get('seaIceObservations', - 'interpolationMethod')) - obsRemappers[key] = obsRemapper - - if not os.path.isfile(obsFileName): - raise OSError('Obs file {} not found.'.format( - obsFileName)) - - (climatologyFileName, regriddedFileName) = \ - get_observation_climatology_file_names( - config=config, fieldName=obsFieldName, - monthNames=months, componentName=self.componentName, - remapper=obsRemapper) - - obsFileNames[key] = obsFileName - regriddedObsFileNames[key] = regriddedFileName - - if not os.path.exists(regriddedFileName): - buildObsClimatologies = True - - for months in hemisphereSeasons: - hemisphere, season = hemisphereSeasons[months] - monthValues = constants.monthDictionary[months] - field = 'iceAreaCell' - climFieldName = 'iceConcentration' - - # interpolate the model results - mpasMeshName = self.mpasRemapper.sourceDescriptor.meshName - comparisonGridName = \ - self.mpasRemapper.destinationDescriptor.meshName - (climatologyFileName, climatologyPrefix, regriddedFileName) = \ - get_mpas_climatology_file_names( - config=config, - fieldName=climFieldName, - monthNames=months, - mpasMeshName=mpasMeshName, - comparisonGridName=comparisonGridName) - - if (overwriteMpasClimatology or - not os.path.exists(regriddedFileName)): - seasonalClimatology = cache_climatologies( - ds, monthValues, config, climatologyPrefix, calendar, - printProgress=True) - if seasonalClimatology is None: - # apparently, there was no data available to create the - # climatology - warnings.warn('no data to create sea ice concentration ' - 'climatology for {}'.format(months)) - continue - - remappedClimatology = remap_and_write_climatology( - config, seasonalClimatology, climatologyFileName, - regriddedFileName, self.mpasRemapper) - else: + comparisonDescriptor = \ + get_comparison_descriptor(config, self.comparisonGridName) + + remappedFileName = self.climatologyTask.get_ncclimo_file_name( + self.season, 'remapped', comparisonDescriptor) - remappedClimatology = xr.open_dataset(regriddedFileName) + obsDescriptor = LatLonGridDescriptor.read(fileName=self.obsFileName, + latVarName='t_lat', + lonVarName='t_lon') - iceConcentration = remappedClimatology[field].values - lon = remappedClimatology['lon'].values - lat = remappedClimatology['lat'].values + remappedClimatology = xr.open_dataset(remappedFileName) - lonTarg, latTarg = np.meshgrid(lon, lat) + modelOutput = remappedClimatology[self.mpasFieldName].values + if self.maskValue is not None: + modelOutput = ma.masked_values(modelOutput, self.maskValue) + lon = remappedClimatology['lon'].values + lat = remappedClimatology['lat'].values - if hemisphere == 'NH': - plotProjection = 'npstere' + lonTarg, latTarg = np.meshgrid(lon, lat) + + if self.hemisphere == 'NH': + plotProjection = 'npstere' + else: + plotProjection = 'spstere' + + sectionName = self.get_section_name() + + (colormapResult, colorbarLevelsResult) = setup_colormap( + config, sectionName, suffix='Result') + (colormapDifference, colorbarLevelsDifference) = setup_colormap( + config, sectionName, suffix='Difference') + + referenceLongitude = config.getfloat(sectionName, 'referenceLongitude') + minimumLatitude = config.getfloat(sectionName, 'minimumLatitude') + + if not os.path.isfile(self.obsFileName): + raise OSError('Obs file {} not found.'.format( + self.obsFileName)) + + obsRemapper = get_remapper( + config=config, + sourceDescriptor=obsDescriptor, + comparisonDescriptor=comparisonDescriptor, + mappingFilePrefix='map_obs_{}'.format(self.fieldName), + method=config.get('seaIceObservations', + 'interpolationMethod')) + + (obsClimatologyFileName, obsRemappedFileName) = \ + get_observation_climatology_file_names( + config=config, fieldName=self.obsFieldName, + monthNames=self.season, componentName=self.componentName, + remapper=obsRemapper) + + if not os.path.exists(obsRemappedFileName): + + # load the observations the first time + seasonalClimatology = self._build_observational_dataset() + write_netcdf(seasonalClimatology, obsClimatologyFileName) + + if obsRemapper is None: + # no need to remap because the observations are on the + # comparison grid already + remappedClimatology = seasonalClimatology else: - plotProjection = 'spstere' - - (colormapResult, colorbarLevelsResult) = setup_colormap( - config, - 'climatologyMapSeaIceConcThick', - suffix='ConcResult{}'.format(season)) - (colormapDifference, colorbarLevelsDifference) = setup_colormap( - config, - 'climatologyMapSeaIceConcThick', - suffix='ConcDifference{}'.format(season)) - - referenceLongitude = config.getfloat( - 'climatologyMapSeaIceConcThick', - 'referenceLongitude{}'.format(hemisphere)) - minimumLatitude = config.getfloat( - 'climatologyMapSeaIceConcThick', - 'minimumLatitude{}'.format(hemisphere)) - - # ice concentrations from NASATeam (or Bootstrap) algorithm - for obsName in ['NASATeam', 'Bootstrap']: - obsFieldName = 'AICE' - - key = (months, obsName) - regriddedFileName = regriddedObsFileNames[key] - - if buildObsClimatologies: - obsFileName = obsFileNames[key] - - seasonalClimatology = xr.open_dataset(obsFileName) - - remappedClimatology = remap_and_write_climatology( - config, seasonalClimatology, climatologyFileName, - regriddedFileName, obsRemappers[key]) - - obsIceConcentration = remappedClimatology[obsFieldName].values - - difference = iceConcentration - obsIceConcentration - - title = '{} ({}, years {:04d}-{:04d})'.format( - subtitle, months, startYear, endYear) - fileout = '{}/iceconc{}{}_{}_{}_years{:04d}-{:04d}.png'.format( - self.plotsDirectory, obsName, hemisphere, mainRunName, - months, startYear, endYear) - plot_polar_comparison( - config, - lonTarg, - latTarg, - iceConcentration, - obsIceConcentration, - difference, - colormapResult, - colorbarLevelsResult, - colormapDifference, - colorbarLevelsDifference, - title=title, - fileout=fileout, - plotProjection=plotProjection, - latmin=minimumLatitude, - lon0=referenceLongitude, - modelTitle=mainRunName, - obsTitle='Observations (SSM/I {})'.format(obsName), - diffTitle='Model-Observations', - cbarlabel='fraction') - - def _compute_and_plot_thickness(self): + remappedClimatology = \ + remap_and_write_climatology( + config, seasonalClimatology, obsClimatologyFileName, + obsRemappedFileName, obsRemapper) + + else: + + remappedClimatology = xr.open_dataset(obsRemappedFileName) + + observations = remappedClimatology[self.obsFieldName].values + if self.maskValue is not None: + observations = ma.masked_values(observations, self.maskValue) + + difference = modelOutput - observations + + startYear = self.climatologyTask.startYear + endYear = self.climatologyTask.endYear + + title = '{} ({}, years {:04d}-{:04d})'.format( + self.fieldNameInTitle, self.season, startYear, endYear) + fileout = '{}/iceconc{}{}_{}_{}_years{:04d}-{:04d}.png'.format( + self.plotsDirectory, self.observationPrefix, self.hemisphere, + mainRunName, self.season, startYear, endYear) + plot_polar_comparison( + config, + lonTarg, + latTarg, + modelOutput, + observations, + difference, + colormapResult, + colorbarLevelsResult, + colormapDifference, + colorbarLevelsDifference, + title=title, + fileout=fileout, + plotProjection=plotProjection, + latmin=minimumLatitude, + lon0=referenceLongitude, + modelTitle=mainRunName, + obsTitle=self.observationTitleLabel, + diffTitle='Model-Observations', + cbarlabel=self.unitsLabel) + # }}} + + +class ClimatologyMapSeaIceConc(ClimatologyMapSeaIce): # {{{ + """ + An analysis task for comparison of sea ice concentration against + observations + + Authors + ------- + Luke Van Roekel, Xylar Asay-Davis, Milena Veneziani + """ + def __init__(self, config, hemisphere, season, comparisonGridName, + observationPrefix): + # {{{ + """ + Construct the analysis task. + + Parameters + ---------- + config : instance of MpasAnalysisConfigParser + Contains configuration options + + seasons : ['NH', 'SH'] + The hemisphere to plot + + seasons : str + A season (keys in ``shared.constants.monthDictionary``) to be + plotted + + comparisonGridName : {'latlon', 'antarctic'} + The name of the comparison grid that where model data an + observations are compared + + observationPrefix : {'NASATeam', 'Bootstrap'} + A prefix describing the set of observations to use + + Authors + ------- + Xylar Asay-Davis + """ + self.fieldName = 'seaIceConc' + self.fieldNameInTitle = 'Sea ice concentration' + self.hemisphere = hemisphere + self.season = season + self.comparisonGridName = comparisonGridName + self.observationPrefix = observationPrefix + + self.mpasFieldName = 'timeMonthly_avg_iceAreaCell' + self.iselValues = None + + taskName, tags = self.get_task_name_and_tags() + + tags.append(self.fieldName) + + # call the constructor from the base class (AnalysisTask) + super(ClimatologyMapSeaIceConc, self).__init__(config=config, + taskName=taskName, + componentName='seaIce', + tags=tags) + + # }}} + + def setup_and_check(self): # {{{ + """ + Perform steps to set up the analysis and check for errors in the setup. + + Authors + ------- + Xylar Asay-Davis + """ + # first, call setup_and_check from the base class + # (ClimatologyMapSeaIce), which will perform some common setup, + # including storing: + # self.runDirectory , self.historyDirectory, self.plotsDirectory, + # self.namelist, self.runStreams, self.historyStreams, + # self.calendar, self.namelistMap, self.streamMap, self.variableMap + super(ClimatologyMapSeaIceConc, self).setup_and_check() + + self.obsFileName = build_config_full_path( + self.config, 'seaIceObservations', + 'concentration{}{}_{}'.format(self.observationPrefix, + self.hemisphere, + self.season)) + + self.obsFieldName = 'AICE' + + self.observationTitleLabel = \ + 'Observations (SSM/I {})'.format(self.observationPrefix) + + self.outFileLabel = 'iceconc{}{}'.format(self.observationPrefix, + self.hemisphere) + self.unitsLabel = 'fraction' + + self.maskValue = None + + # }}} + + def _build_observational_dataset(self): # {{{ ''' - Computes seasonal climatologies and plots model results, observations - and biases in sea-ice thickness. + read in the data sets for observations, and possibly rename some + variables and dimensions Authors ------- - Xylar Asay-Davis, Milena Veneziani + Xylar Asay-Davis ''' - print ' Make ice thickness plots...' + dsObs = xr.open_mfdataset(self.obsFileName) - config = self.config - calendar = self.calendar - ds = self.ds + return dsObs # }}} - subtitle = 'Ice thickness' + # }}} - plotsDirectory = build_config_full_path(config, 'output', - 'plotsSubdirectory') - mainRunName = config.get('runs', 'mainRunName') - startYear = config.getint('climatology', 'startYear') - endYear = config.getint('climatology', 'endYear') - overwriteMpasClimatology = config.getWithDefault( - 'climatology', 'overwriteMpasClimatology', False) - - overwriteObsClimatology = config.getWithDefault( - 'seaIceObservations', 'overwriteObsClimatology', False) - - obsFileNames = {} - regriddedObsFileNames = {} - obsRemappers = {} - - comparisonDescriptor = self.mpasRemapper.destinationDescriptor - - # build a list of regridded observations files - buildObsClimatologies = overwriteObsClimatology - for months in ['FM', 'ON']: - climFieldName = 'iceThickness' - for hemisphere in ['NH', 'SH']: - key = (months, hemisphere) - obsFileName = build_config_full_path( - config, 'seaIceObservations', - 'thickness{}_{}'.format(hemisphere, months)) - if not os.path.isfile(obsFileName): - raise OSError('Obs file {} not found.'.format( - obsFileName)) - - obsFieldName = '{}_{}'.format(climFieldName, hemisphere) - obsDescriptor = LatLonGridDescriptor() - obsDescriptor.read(fileName=obsFileName, latVarName='t_lat', - lonVarName='t_lon') - obsRemapper = get_remapper( - config=config, sourceDescriptor=obsDescriptor, - comparisonDescriptor=comparisonDescriptor, - mappingFileSection='seaIceObservations', - mappingFileOption='seaIceClimatologyMappingFile', - mappingFilePrefix='map_obs_seaIce', - method=config.get('seaIceObservations', - 'interpolationMethod')) - obsRemappers[key] = obsRemapper - - (climatologyFileName, regriddedFileName) = \ - get_observation_climatology_file_names( - config=config, fieldName=obsFieldName, - monthNames=months, componentName=self.componentName, - remapper=obsRemapper) - - obsFileNames[key] = obsFileName - regriddedObsFileNames[key] = regriddedFileName - - if not os.path.exists(regriddedFileName): - buildObsClimatologies = True - - for months in ['FM', 'ON']: - monthValues = constants.monthDictionary[months] - field = 'iceVolumeCell' - climFieldName = 'iceThickness' - - # interpolate the model results - mpasMeshName = self.mpasRemapper.sourceDescriptor.meshName - comparisonGridName = \ - self.mpasRemapper.destinationDescriptor.meshName - (climatologyFileName, climatologyPrefix, regriddedFileName) = \ - get_mpas_climatology_file_names( - config=config, - fieldName=climFieldName, - monthNames=months, - mpasMeshName=mpasMeshName, - comparisonGridName=comparisonGridName) - - if (overwriteMpasClimatology or - not os.path.exists(climatologyFileName)): - seasonalClimatology = cache_climatologies( - ds, monthValues, config, climatologyPrefix, calendar, - printProgress=True) - if seasonalClimatology is None: - # apparently, there was no data available to create the - # climatology - warnings.warn('no data to create sea ice thickness ' - 'climatology for {}'.format(months)) - continue - - remappedClimatology = remap_and_write_climatology( - config, seasonalClimatology, climatologyFileName, - regriddedFileName, self.mpasRemapper) - else: +class ClimatologyMapSeaIceThick(ClimatologyMapSeaIce): # {{{ + """ + An analysis task for comparison of sea ice thickness against observations + + Authors + ------- + Luke Van Roekel, Xylar Asay-Davis, Milena Veneziani + """ + def __init__(self, config, hemisphere, season, comparisonGridName, + observationPrefix=''): + # {{{ + """ + Construct the analysis task. + + Parameters + ---------- + config : instance of MpasAnalysisConfigParser + Contains configuration options - remappedClimatology = xr.open_dataset(regriddedFileName) + seasons : ['NH', 'SH'] + The hemisphere to plot - iceThickness = remappedClimatology[field].values - iceThickness = ma.masked_values(iceThickness, 0) - lon = remappedClimatology['lon'].values - lat = remappedClimatology['lat'].values + seasons : str + A season (keys in ``shared.constants.monthDictionary``) to be + plotted - lonTarg, latTarg = np.meshgrid(lon, lat) + comparisonGridName : {'latlon', 'antarctic'} + The name of the comparison grid that where model data an + observations are compared - for hemisphere in ['NH', 'SH']: - obsFieldName = 'HI' + observationPrefix : {''} + A prefix describing the set of observations to use (empty for + this task) + + Authors + ------- + Xylar Asay-Davis + """ + self.fieldName = 'seaIceThick' + self.fieldNameInTitle = 'Sea ice thickness' + self.hemisphere = hemisphere + self.season = season + self.comparisonGridName = comparisonGridName + self.observationPrefix = observationPrefix - (colormapResult, colorbarLevelsResult) = setup_colormap( - config, - 'climatologyMapSeaIceConcThick', - suffix='ThickResult{}'.format(hemisphere)) - (colormapDifference, colorbarLevelsDifference) = \ - setup_colormap(config, 'climatologyMapSeaIceConcThick', - suffix='ThickDifference{}'.format( - hemisphere)) + self.mpasFieldName = 'timeMonthly_avg_iceVolumeCell' + self.iselValues = None - referenceLongitude = config.getfloat( - 'climatologyMapSeaIceConcThick', - 'referenceLongitude{}'.format(hemisphere)) - minimumLatitude = config.getfloat( - 'climatologyMapSeaIceConcThick', - 'minimumLatitude{}'.format(hemisphere)) + taskName, tags = self.get_task_name_and_tags() - # now the observations - key = (months, hemisphere) - regriddedFileName = regriddedObsFileNames[key] + tags.append(self.fieldName) - if buildObsClimatologies: - obsFileName = obsFileNames[key] + # call the constructor from the base class (AnalysisTask) + super(ClimatologyMapSeaIceThick, self).__init__(config=config, + taskName=taskName, + componentName='seaIce', + tags=tags) - seasonalClimatology = xr.open_dataset(obsFileName) + # }}} - remappedClimatology = remap_and_write_climatology( - config, seasonalClimatology, climatologyFileName, - regriddedFileName, obsRemappers[key]) + def setup_and_check(self): # {{{ + """ + Perform steps to set up the analysis and check for errors in the setup. - obsIceThickness = remappedClimatology[obsFieldName].values + Authors + ------- + Xylar Asay-Davis + """ + # first, call setup_and_check from the base class + # (ClimatologyMapSeaIce), which will perform some common setup, + # including storing: + # self.runDirectory , self.historyDirectory, self.plotsDirectory, + # self.namelist, self.runStreams, self.historyStreams, + # self.calendar, self.namelistMap, self.streamMap, self.variableMap + super(ClimatologyMapSeaIceThick, self).setup_and_check() - # Mask thickness fields - obsIceThickness = ma.masked_values(obsIceThickness, 0) - if hemisphere == 'NH': - # Obs thickness should be nan above 86 (ICESat data) - obsIceThickness[latTarg > 86] = ma.masked - plotProjection = 'npstere' - else: - plotProjection = 'spstere' - - difference = iceThickness - obsIceThickness - - title = '{} ({}, years {:04d}-{:04d})'.format(subtitle, months, - startYear, - endYear) - fileout = '{}/icethick{}_{}_{}_years{:04d}-{:04d}.png'.format( - plotsDirectory, hemisphere, mainRunName, months, startYear, - endYear) - plot_polar_comparison( - config, - lonTarg, - latTarg, - iceThickness, - obsIceThickness, - difference, - colormapResult, - colorbarLevelsResult, - colormapDifference, - colorbarLevelsDifference, - title=title, - fileout=fileout, - plotProjection=plotProjection, - latmin=minimumLatitude, - lon0=referenceLongitude, - modelTitle=mainRunName, - obsTitle='Observations (ICESat)', - diffTitle='Model-Observations', - cbarlabel='m') + self.obsFileName = build_config_full_path( + self.config, 'seaIceObservations', + 'thickness{}_{}'.format(self.hemisphere, self.season)) + + self.obsFieldName = 'HI' + + self.observationTitleLabel = 'Observations (ICESat)' + + self.outFileLabel = 'icethick{}'.format(self.hemisphere) + + self.unitsLabel = 'm' + + self.maskValue = 0 # }}} -# vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python \ No newline at end of file + def _build_observational_dataset(self): # {{{ + ''' + read in the data sets for observations, and possibly rename some + variables and dimensions + + Authors + ------- + Xylar Asay-Davis + ''' + + dsObs = xr.open_mfdataset(self.obsFileName) + + return dsObs # }}} + + # }}} + +# vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python diff --git a/mpas_analysis/sea_ice/sea_ice_analysis_task.py b/mpas_analysis/sea_ice/sea_ice_analysis_task.py index 53897bb6b..4a5aed172 100644 --- a/mpas_analysis/sea_ice/sea_ice_analysis_task.py +++ b/mpas_analysis/sea_ice/sea_ice_analysis_task.py @@ -10,6 +10,15 @@ class SeaIceAnalysisTask(AnalysisTask): # {{{ ''' A base class for sea-ice analysis tasks that implements common setup + Attributes + ---------- + simulationStartTime : str + The start date of the simulation parsed from a restart file identified + by the contents of ``streams``. + + restartFileName : str + Name of a restart file used for mesh information + Authors ------- Xylar Asay-Davis @@ -65,7 +74,6 @@ def setup_and_check(self): # {{{ # }}} - # }}} # vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python diff --git a/mpas_analysis/shared/analysis_task.py b/mpas_analysis/shared/analysis_task.py index 866988962..c14e84a8c 100644 --- a/mpas_analysis/shared/analysis_task.py +++ b/mpas_analysis/shared/analysis_task.py @@ -4,13 +4,15 @@ Authors ------- Xylar Asay-Davis - ''' import warnings +import numpy + +from .timekeeping.utility import days_to_datetime, string_to_days_since_date -from .io import NameList, StreamsFile -from .io.utility import build_config_full_path, make_directories +from .io import NameList, StreamsFile, build_config_full_path, \ + make_directories from .variable_namelist_stream_maps.ocean_maps import oceanNamelistMap, \ oceanStreamMap, oceanVariableMap @@ -23,12 +25,94 @@ class AnalysisTask(object): # {{{ ''' The base class for analysis tasks. + Attributes + ---------- + config : instance of MpasAnalysisConfigParser + Contains configuration options + + taskName : str + The name of the task, typically the same as the class name except + starting with lowercase (e.g. 'myTask' for class 'MyTask') + + componentName : {'ocean', 'seaIce'} + The name of the component (same as the folder where the task + resides) + + tags : list of str + Tags used to describe the task (e.g. 'timeSeries', 'climatology', + horizontalMap', 'index', 'transect'). These are used to determine + which tasks are generated (e.g. 'all_transect' or 'no_climatology' + in the 'generate' flags) + + prerequisiteTasks : list of str, optional + Names of tasks that must complete before this task can run. + Typically, this will include one or more tasks of the form + ``cacheTimes``, e.g. + ``cacheOceanTimeSeriesStatsTimes`` + + runDirectory : str + the base input directory for namelists, streams files and restart files + + historyDirectory : str + the base input directory for history files + + plotsDirectory : str + the directory for writing plots (which is also created if it doesn't + exist) + + namelist : ``NameList`` object + the namelist reader + + runStreams : ``StreamsFile`` object + the streams file reader for streams in the run directory (e.g. restart + files) + + historyStreams : ``StreamsFile`` object + the streams file reader for streams in the history directory (most + streams other than restart files) + + calendar : {'gregorian', 'gregorian_noleap'} + the name of the calendar + + namelistMap : dict + A map between names of namelist options used by MPAS-Analysis and + those in various MPAS versions + + streamMap : dict + a map between names of streams used by MPAS-Analysis and those in + various MPAS versions + + variableMap : dict + a map between names of variables within streams used by MPAS-Analysis + and those in various MPAS versions + Authors ------- Xylar Asay-Davis ''' - def __init__(self, config, taskName, componentName, tags=[]): # {{{ + @classmethod + def create_tasks(cls, config): # {{{ + """ + Construct a single analysis task. This function can be replaced in + child classes so that a list of 2 or more tasks is returned, allowing + a task to also provide its own prerequisite tasks. + + Parameters + ---------- + config : MpasAnalysisConfigParser object + Contains configuration options + + Authors + ------- + Xylar Asay-Davis + """ + + tasks = [cls(config=config)] + return tasks # }}} + + def __init__(self, config, taskName, componentName, tags=[], + prerequisiteTasks=None): # {{{ ''' Construct the analysis task. @@ -55,6 +139,12 @@ def __init__(self, config, taskName, componentName, tags=[]): # {{{ which tasks are generated (e.g. 'all_transect' or 'no_climatology' in the 'generate' flags) + prerequisiteTasks : list of str, optional + Names of tasks that must complete before this task can run. + Typically, this will include one or more tasks of the form + ``cacheTimes``, e.g. + ``cacheOceanTimeSeriesStatsTimes`` + Authors ------- Xylar Asay-Davis @@ -62,33 +152,14 @@ def __init__(self, config, taskName, componentName, tags=[]): # {{{ self.config = config self.taskName = taskName self.componentName = componentName - self.tags = tags # }}} + self.tags = tags + self.prerequisiteTasks = prerequisiteTasks # }}} def setup_and_check(self): # {{{ ''' Perform steps to set up the analysis (e.g. reading namelists and streams files). - After this call, the following member variables are set: - self.runDirectory : the base input directory for namelists, streams - files and restart files - self.historyDirectory : the base input directory for history files - self.plotsDirectory : the directory for writing plots (which is - also created if it doesn't exist) - self.namelist : the namelist reader - self.runStreams : the streams file reader for streams in the run - directory (e.g. restart files) - self.historyStreams : the streams file reader for streams in the - history directory (most streams other than restart files) - self.calendar : the name of the calendar ('gregorian' or - 'gregoraian_noleap') - self.namelistMap : a map between names of namelist options used by - MPAS-Analysis and those in various MPAS versions - self.streamMap : a map between names of streams used by - MPAS-Analysis and those in various MPAS versions - self.variableMap : a map between names of variables within streams - used by MPAS-Analysis and those in various MPAS versions - Individual tasks (children classes of this base class) should first call this method to perform basic setup, then, check whether the configuration is correct for a given analysis and perform additional, @@ -96,6 +167,51 @@ def setup_and_check(self): # {{{ necessary observations and other data files are found, then, determine the list of files to be read when the analysis is run. + If the task includes ``climatology``, ``timeSeries`` or ``index`` tags, + ``startDate`` and ``endDate`` config options are computed from + ``startYear`` and ``endYear``config options. + + After this call, the following attributes are set. + + Attributes + ---------- + runDirectory : str + the base input directory for namelists, streams files and restart + files + + historyDirectory : str + the base input directory for history files + + plotsDirectory : str + the directory for writing plots (which is also created if it + doesn't exist) + + namelist : ``NameList`` object + the namelist reader + + runStreams : ``StreamsFile`` object + the streams file reader for streams in the run directory (e.g. + restart files) + + historyStreams : ``StreamsFile`` object + the streams file reader for streams in the history directory (most + streams other than restart files) + + calendar : {'gregorian', 'gregorian_noleap'} + the name of the calendar + + namelistMap : dict + A map between names of namelist options used by MPAS-Analysis and + those in various MPAS versions + + streamMap : dict + a map between names of streams used by MPAS-Analysis and those in + various MPAS versions + + variableMap : dict + a map between names of variables within streams used by + MPAS-Analysis and those in various MPAS versions + Authors ------- Xylar Asay-Davis @@ -312,6 +428,161 @@ def set_start_end_date(self, section): # {{{ self.config.getint(section, 'endYear')) self.config.set(section, 'endDate', endDate) # }}} + def update_start_end_date(self, section, streamName): # {{{ + ''' + Update the start and end dates (and years) based on the times found + in the given stream. Cache the times if they are not already cached. + + Parameters + ---------- + section : str + The name of a section in the config file containing ``startYear`` + and ``endYear`` options. ``section`` is typically one of + ``climatology``, ``timeSeries`` or ``index`` + + streamName : str + The name of a stream from which to read (and cache) the times + + Returns + ------- + changed : bool + Whether the start and end dates were updated. + + Authors + ------- + Xylar Asay-Davis + ''' + + startDate = self.config.get(section, 'startDate') + endDate = self.config.get(section, 'endDate') + startDate = string_to_days_since_date(dateString=startDate, + calendar=self.calendar) + endDate = string_to_days_since_date(dateString=endDate, + calendar=self.calendar) + + inFileNames = self.get_input_file_names( + streamName, startAndEndDateSection=section) + + fullTimeCache = self.cache_multifile_dataset_times( + inFileNames, streamName, timeVariableName='Time') + + # find only those cached times between starDate and endDate + times = [] + for fileName in fullTimeCache: + localTimes = fullTimeCache[fileName]['times'] + mask = numpy.logical_and(localTimes >= startDate, + localTimes < endDate) + if numpy.count_nonzero(mask) == 0: + continue + + times.extend(list(localTimes[mask])) + + requestedStartYear = self.config.getint('climatology', 'startYear') + requestedEndYear = self.config.getint('climatology', 'endYear') + + startYear = days_to_datetime(numpy.amin(times), + calendar=self.calendar).year + endYear = days_to_datetime(numpy.amax(times), + calendar=self.calendar).year + changed = False + if startYear != requestedStartYear or endYear != requestedEndYear: + message = "{} start and/or end year different from " \ + "requested\n" \ + "requested: {:04d}-{:04d}\n" \ + "actual: {:04d}-{:04d}\n".format(section, + requestedStartYear, + requestedEndYear, + startYear, + endYear) + warnings.warn(message) + self.config.set(section, 'startYear', str(startYear)) + self.config.set(section, 'endYear', str(endYear)) + + startDate = '{:04d}-01-01_00:00:00'.format(startYear) + self.config.set(section, 'startDate', startDate) + endDate = '{:04d}-12-31_23:59:59'.format(endYear) + self.config.set(section, 'endDate', endDate) # }}} + + changed = True + + return changed # }}} + + def get_input_file_names(self, streamName, + startDate=None, endDate=None, + startAndEndDateSection=None): # {{{ + ''' + Get a list of input files corresponding to the given stream and + optionally bounded by the start and end dates found in the given + section of the config file. + + Parameters + ---------- + streamName : str + The name of a stream to check. If ``self.streamMap`` is defined, + the streamName will be mapped to the corresponding name in the + streams file + + startDate, endDate : float, optional + start and end date to use in determining which files to include in + the list + + startAndEndDateSection : str, optional + If ``startDate`` and ``endDate`` arguments are not supplied, the + name of a section in the config file containing ``startDate`` and + ``endDate`` options to use instead. ``startAndEndDateSection`` is + typically one of ``climatology``, ``timeSeries`` or ``index``. + + Raises + ------ + RuntimeError + If no files are found in the desired date range. + + Authors + ------- + Xylar Asay-Davis + ''' + + if startDate is None and endDate is None and \ + startAndEndDateSection is not None: + startDate = self.config.get(startAndEndDateSection, 'startDate') + endDate = self.config.get(startAndEndDateSection, 'endDate') + + if self.streamMap is not None: + streamName = \ + self.historyStreams.find_stream(self.streamMap[streamName]) + inputFileNames = self.historyStreams.readpath(streamName, + startDate=startDate, + endDate=endDate, + calendar=self.calendar) + + if len(inputFileNames) == 0: + raise RuntimeError('No input files could be found in stream {} ' + 'between {} and {}'.format(streamName, + startDate, endDate)) + return inputFileNames # }}} + + def add_prerequisite_tasks(self, prerequisiteTasks): # {{{ + ''' + Add one or more task names to the list of prerequisite tasks + + Parameters + ---------- + prerequisiteTasks : list of str + The names of the tasks to add as prerequisites + + Authors + ------- + Xylar Asay-Davis + ''' + + if self.prerequisiteTasks is None: + self.prerequisiteTasks = prerequisiteTasks + else: + for task in prerequisiteTasks: + if task not in self.prerequisiteTasks: + self.prerequisiteTasks.apend(task) + + # }}} # }}} diff --git a/mpas_analysis/shared/climatology/__init__.py b/mpas_analysis/shared/climatology/__init__.py index 2442345da..7fe92abb5 100644 --- a/mpas_analysis/shared/climatology/__init__.py +++ b/mpas_analysis/shared/climatology/__init__.py @@ -1,5 +1,10 @@ -from .climatology import get_lat_lon_comparison_descriptor, get_remapper, \ - get_mpas_climatology_file_names, get_observation_climatology_file_names, \ - compute_monthly_climatology, compute_climatology, cache_climatologies, \ - update_start_end_year, add_years_months_days_in_month, \ +from .climatology import get_remapper, \ + get_observation_climatology_file_names, \ + compute_monthly_climatology, compute_climatology, \ + add_years_months_days_in_month, \ remap_and_write_climatology + +from .comparison_descriptors import get_comparison_descriptor, \ + get_antarctic_stereographic_projection + +from .mpas_climatology_task import MpasClimatology diff --git a/mpas_analysis/shared/climatology/climatology.py b/mpas_analysis/shared/climatology/climatology.py index c15364722..c846940a5 100644 --- a/mpas_analysis/shared/climatology/climatology.py +++ b/mpas_analysis/shared/climatology/climatology.py @@ -4,10 +4,6 @@ Authors ------- Xylar Asay-Davis - -Last Modified -------------- -04/13/2017 """ import xarray as xr @@ -19,58 +15,16 @@ from ..timekeeping.utility import days_to_datetime -from ..io.utility import build_config_full_path, make_directories, fingerprint_generator +from ..io.utility import build_config_full_path, make_directories, \ + fingerprint_generator +from ..io import write_netcdf from ..interpolation import Remapper from ..grid import LatLonGridDescriptor, ProjectionGridDescriptor -def get_lat_lon_comparison_descriptor(config): # {{{ - """ - Get a descriptor of the lat/lon comparison grid, used for remapping and - determining the grid name - - Parameters - ---------- - config : instance of ``MpasAnalysisConfigParser`` - Contains configuration options - - Returns - ------- - descriptor : ``LatLonGridDescriptor`` object - A descriptor of the lat/lon grid - - Authors - ------- - Xylar Asay-Davis - - Last Modified - ------------- - 04/13/2017 - """ - climSection = 'climatology' - - comparisonLatRes = config.getWithDefault(climSection, - 'comparisonLatResolution', - constants.dLatitude) - comparisonLonRes = config.getWithDefault(climSection, - 'comparisonLatResolution', - constants.dLongitude) - - nLat = int((constants.latmax-constants.latmin)/comparisonLatRes)+1 - nLon = int((constants.lonmax-constants.lonmin)/comparisonLonRes)+1 - lat = numpy.linspace(constants.latmin, constants.latmax, nLat) - lon = numpy.linspace(constants.lonmin, constants.lonmax, nLon) - - descriptor = LatLonGridDescriptor() - descriptor.create(lat, lon, units='degrees') - - return descriptor # }}} - - def get_remapper(config, sourceDescriptor, comparisonDescriptor, - mappingFileSection, mappingFileOption, mappingFilePrefix, - method): # {{{ + mappingFilePrefix, method): # {{{ """ Given config options and descriptions of the source and comparison grids, returns a ``Remapper`` object that can be used to remap from source files @@ -90,11 +44,6 @@ def get_remapper(config, sourceDescriptor, comparisonDescriptor, comparisonDescriptor : ``MeshDescriptor`` subclass object A description of the comparison grid - mappingFileSection, mappingFileOption : str - Section and option in ``config`` where the name of the mapping file - may be given, or where it will be stored if a new mapping file is - created - mappingFilePrefix : str A prefix to be prepended to the mapping file name @@ -110,35 +59,38 @@ def get_remapper(config, sourceDescriptor, comparisonDescriptor, Authors ------- Xylar Asay-Davis - - Last Modified - ------------- - 04/13/2017 """ - if config.has_option(mappingFileSection, mappingFileOption): - # a mapping file was supplied, so we'll use that name - mappingFileName = config.get(mappingFileSection, mappingFileOption) - else: - if _matches_comparison(sourceDescriptor, comparisonDescriptor): - # no need to remap - mappingFileName = None + mappingFileName = None - else: - # we need to build the path to the mapping file and an appropriate - # file name - mappingSubdirectory = build_config_full_path(config, 'output', - 'mappingSubdirectory') + if not _matches_comparison(sourceDescriptor, comparisonDescriptor): + # we need to remap because the grids don't match - make_directories(mappingSubdirectory) - - mappingFileName = '{}/{}_{}_to_{}_{}.nc'.format( - mappingSubdirectory, mappingFilePrefix, - sourceDescriptor.meshName, comparisonDescriptor.meshName, + mappingBaseName = '{}_{}_to_{}_{}.nc'.format( + mappingFilePrefix, + sourceDescriptor.meshName, + comparisonDescriptor.meshName, method) - config.set(mappingFileSection, mappingFileOption, - mappingFileName) + if config.has_option('input', 'mappingDirectory'): + # a mapping directory was supplied, so we'll see if there's + # a mapping file there that we can use + mappingSubdirectory = config.get('input', 'mappingDirectory') + mappingFileName = '{}/{}'.format(mappingSubdirectory, + mappingBaseName) + if not os.path.exists(mappingFileName): + # nope, looks like we still need to make a mapping file + mappingFileName = None + + if mappingFileName is None: + # we don't have a mapping file yet, so get ready to create one + # in the output subfolder if needed + mappingSubdirectory = \ + build_config_full_path(config, 'output', + 'mappingSubdirectory') + make_directories(mappingSubdirectory) + mappingFileName = '{}/{}'.format(mappingSubdirectory, + mappingBaseName) remapper = Remapper(sourceDescriptor, comparisonDescriptor, mappingFileName) @@ -148,94 +100,12 @@ def get_remapper(config, sourceDescriptor, comparisonDescriptor, return remapper # }}} -def get_mpas_climatology_file_names(config, fieldName, monthNames, - mpasMeshName, - comparisonGridName=None): # {{{ - """ - Given config options, the name of a field and a string identifying the - months in a seasonal climatology, returns the full path for MPAS - climatology files before and after regridding. - - Parameters - ---------- - config : instance of MpasAnalysisConfigParser - Contains configuration options - - fieldName : str - Name of the field being mapped, used as a prefix for the climatology - file name. - - monthNames : str - A string identifying the months in a seasonal climatology (e.g. 'JFM') - - mpasMeshName : str - The name of the MPAS mesh - - comparisonGridName : str, optional - The name of the comparison grid (if any) - - Returns - ------- - climatologyFileName : str - The absolute path to a file where the climatology should be stored - before regridding. - - climatologyPrefix : str - The prfix including absolute path for climatology cache files before - regridding. - - regriddedFileName : str - The absolute path to a file where the climatology should be stored - after regridding if ``comparisonGridName`` is supplied - - Authors - ------- - Xylar Asay-Davis - - Last Modified - ------------- - 05/05/2017 - """ - - climSection = 'climatology' - startYear = config.getint(climSection, 'startYear') - endYear = config.getint(climSection, 'endYear') - - climatologyDirectory = build_config_full_path( - config, 'output', 'mpasClimatologySubdirectory') - - make_directories(climatologyDirectory) - - climatologyPrefix = '{}/{}_{}_{}'.format(climatologyDirectory, fieldName, - mpasMeshName, monthNames) - - yearString, fileSuffix = _get_year_string(startYear, endYear) - climatologyFileName = '{}_{}.nc'.format(climatologyPrefix, fileSuffix) - - if comparisonGridName is None: - return (climatologyFileName, climatologyPrefix) - else: - regriddedDirectory = build_config_full_path( - config, 'output', 'mpasRegriddedClimSubdirectory') - - make_directories(regriddedDirectory) - - regriddedFileName = '{}/{}_{}_to_{}_{}_{}.nc'.format( - regriddedDirectory, fieldName, mpasMeshName, - comparisonGridName, monthNames, fileSuffix) - - return (climatologyFileName, climatologyPrefix, - regriddedFileName) - - # }}} - - def get_observation_climatology_file_names(config, fieldName, monthNames, componentName, remapper): # {{{ """ Given config options, the name of a field and a string identifying the months in a seasonal climatology, returns the full path for observation - climatology files before and after regridding. + climatology files before and after remapping. Parameters ---------- @@ -257,19 +127,15 @@ def get_observation_climatology_file_names(config, fieldName, monthNames, ------- climatologyFileName : str The absolute path to a file where the climatology should be stored - before regridding. + before remapping. - regriddedFileName : str + remappedFileName : str The absolute path to a file where the climatology should be stored - after regridding. + after remapping. Authors ------- Xylar Asay-Davis - - Last Modified - ------------- - 03/03/2017 """ obsSection = '{}Observations'.format(componentName) @@ -279,9 +145,9 @@ def get_observation_climatology_file_names(config, fieldName, monthNames, relativePathOption='climatologySubdirectory', relativePathSection=obsSection) - regriddedDirectory = build_config_full_path( + remappedDirectory = build_config_full_path( config=config, section='output', - relativePathOption='regriddedClimSubdirectory', + relativePathOption='remappedClimSubdirectory', relativePathSection=obsSection) obsGridName = remapper.sourceDescriptor.meshName @@ -289,17 +155,17 @@ def get_observation_climatology_file_names(config, fieldName, monthNames, climatologyFileName = '{}/{}_{}_{}.nc'.format( climatologyDirectory, fieldName, obsGridName, monthNames) - regriddedFileName = '{}/{}_{}_to_{}_{}.nc'.format( - regriddedDirectory, fieldName, obsGridName, comparisonGridName, + remappedFileName = '{}/{}_{}_to_{}_{}.nc'.format( + remappedDirectory, fieldName, obsGridName, comparisonGridName, monthNames) make_directories(climatologyDirectory) if not _matches_comparison(remapper.sourceDescriptor, remapper.destinationDescriptor): - make_directories(regriddedDirectory) + make_directories(remappedDirectory) - return (climatologyFileName, regriddedFileName) # }}} + return (climatologyFileName, remappedFileName) # }}} def compute_monthly_climatology(ds, calendar=None, maskVaries=True): # {{{ @@ -337,10 +203,6 @@ def compute_monthly_climatology(ds, calendar=None, maskVaries=True): # {{{ Authors ------- Xylar Asay-Davis - - Last Modified - ------------- - 04/08/2017 """ def compute_one_month_climatology(ds): @@ -395,10 +257,6 @@ def compute_climatology(ds, monthValues, calendar=None, Authors ------- Xylar Asay-Davis - - Last Modified - ------------- - 04/08/2017 """ ds = add_years_months_days_in_month(ds, calendar) @@ -415,143 +273,6 @@ def compute_climatology(ds, monthValues, calendar=None, return climatology # }}} -def cache_climatologies(ds, monthValues, config, cachePrefix, calendar, - printProgress=False): # {{{ - ''' - Cache NetCDF files for each year of an annual climatology, and then use - the cached files to compute a climatology for the full range of years. - The start and end years of the climatology are taken from ``config``, and - are updated in ``config`` if the data set ``ds`` doesn't contain this - full range. - - Note: only works with climatologies where the mask (locations of ``NaN`` - values) doesn't vary with time. - - Parameters - ---------- - ds : ``xarray.Dataset`` or ``xarray.DataArray`` object - A data set with a ``Time`` coordinate expressed as days since - 0001-01-01 - - monthValues : int or array-like of ints - A single month or an array of months to be averaged together - - config : instance of MpasAnalysisConfigParser - Contains configuration options - - cachePrefix : str - The file prefix (including path) to which the year (or years) will be - appended as cache files are stored - - calendar : ``{'gregorian', 'gregorian_noleap'}`` - The name of one of the calendars supported by MPAS cores, used to - determine ``year`` and ``month`` from ``Time`` coordinate - - printProgress: bool, optional - Whether progress messages should be printed as the climatology is - computed - - Returns - ------- - climatology : object of same type as ``ds`` - A data set without the ``'Time'`` coordinate containing the mean - of ds over all months in monthValues, weighted by the number of days - in each month. - - Authors - ------- - Xylar Asay-Davis - - Last Modified - ------------- - 04/11/2017 - ''' - startYearClimo = config.getint('climatology', 'startYear') - endYearClimo = config.getint('climatology', 'endYear') - yearsPerCacheFile = config.getint('climatology', 'yearsPerCacheFile') - - if printProgress: - print ' Computing and caching climatologies covering {}-year ' \ - 'spans...'.format(yearsPerCacheFile) - - ds = add_years_months_days_in_month(ds, calendar) - - cacheInfo, cacheIndices = _setup_climatology_caching(ds, startYearClimo, - endYearClimo, - yearsPerCacheFile, - cachePrefix, - monthValues) - - ds = ds.copy() - ds.coords['cacheIndices'] = ('Time', cacheIndices) - - # compute and store each cache file with interval yearsPerCacheFile - _cache_individual_climatologies(ds, cacheInfo, printProgress, - yearsPerCacheFile, monthValues, - calendar) - - # compute the aggregate climatology - climatology = _cache_aggregated_climatology(startYearClimo, endYearClimo, - cachePrefix, printProgress, - monthValues, cacheInfo) - - return climatology # }}} - - -def update_start_end_year(ds, config, calendar): # {{{ - """ - Given a monthly climatology, compute a seasonal climatology weighted by - the number of days in each month (on the no-leap-year calendar). - - Parameters - ---------- - ds : instance of xarray.Dataset - A data set from which start and end years will be determined - - config : instance of MpasAnalysisConfigParser - Contains configuration options - - calendar : {'gregorian', 'gregorian_noleap'} - The name of one of the calendars supported by MPAS cores - - Returns - ------- - changed : bool - Whether the start and end years were changed - - startYear, endYear : int - The start and end years of the data set - - Authors - ------- - Xylar Asay-Davis - - Last Modified - ------------- - 03/25/2017 - """ - requestedStartYear = config.getint('climatology', 'startYear') - requestedEndYear = config.getint('climatology', 'endYear') - - startYear = days_to_datetime(ds.Time.min().values, calendar=calendar).year - endYear = days_to_datetime(ds.Time.max().values, calendar=calendar).year - changed = False - if startYear != requestedStartYear or endYear != requestedEndYear: - message = "climatology start and/or end year different from " \ - "requested\n" \ - "requestd: {:04d}-{:04d}\n" \ - "actual: {:04d}-{:04d}\n".format(requestedStartYear, - requestedEndYear, - startYear, - endYear) - warnings.warn(message) - config.set('climatology', 'startYear', str(startYear)) - config.set('climatology', 'endYear', str(endYear)) - changed = True - - return changed, startYear, endYear # }}} - - def add_years_months_days_in_month(ds, calendar=None): # {{{ ''' Add ``year``, ``month`` and ``daysInMonth`` as data arrays in ``ds``. @@ -579,10 +300,6 @@ def add_years_months_days_in_month(ds, calendar=None): # {{{ Authors ------- Xylar Asay-Davis - - Last Modified - ------------- - 04/08/2017 ''' if ('year' in ds.coords and 'month' in ds.coords and @@ -623,17 +340,17 @@ def add_years_months_days_in_month(ds, calendar=None): # {{{ def remap_and_write_climatology(config, climatologyDataSet, - climatologyFileName, regriddedFileName, + climatologyFileName, remappedFileName, remapper): # {{{ """ Given a field in a climatology data set, use the ``remapper`` to regrid horizontal dimensions of all fields, write the results to an output file, - and return the regridded data set. + and return the remapped data set. - Note that ``climatologyFileName`` and ``regriddedFileName`` will be + Note that ``climatologyFileName`` and ``remappedFileName`` will be overwritten if they exist, so if this behavior is not desired, the calling code should skip this call if the files exist and simply load the contents - of ``regriddedFileName``. + of ``remappedFileName``. Parameters ---------- @@ -648,10 +365,10 @@ def remap_and_write_climatology(config, climatologyDataSet, climatologyFileName : str The name of the output file to which the data set should be written - before regridding (if using ncremap). + before remapping (if using ncremap). - regriddedFileName : str - The name of the output file to which the regridded data set should + remappedFileName : str + The name of the output file to which the remapped data set should be written. remapper : ``Remapper`` object @@ -666,31 +383,33 @@ def remap_and_write_climatology(config, climatologyDataSet, Authors ------- Xylar Asay-Davis - - Last Modified - ------------- - 04/13/2017 """ useNcremap = config.getboolean('climatology', 'useNcremap') + if (isinstance(remapper.sourceDescriptor, ProjectionGridDescriptor) or + isinstance(remapper.destinationDescriptor, + ProjectionGridDescriptor)): + # ncremap doesn't support projection grids + useNcremap = False + if remapper.mappingFileName is None: # no remapping is needed remappedClimatology = climatologyDataSet else: if useNcremap: if not os.path.exists(climatologyFileName): - climatologyDataSet.to_netcdf(climatologyFileName) + write_netcdf(climatologyDataSet, climatologyFileName) remapper.remap_file(inFileName=climatologyFileName, - outFileName=regriddedFileName, + outFileName=remappedFileName, overwrite=True) - remappedClimatology = xr.open_dataset(regriddedFileName) + remappedClimatology = xr.open_dataset(remappedFileName) else: renormalizationThreshold = config.getfloat( 'climatology', 'renormalizationThreshold') remappedClimatology = remapper.remap(climatologyDataSet, renormalizationThreshold) - remappedClimatology.to_netcdf(regriddedFileName) + write_netcdf(remappedClimatology, remappedFileName) return remappedClimatology # }}} @@ -703,10 +422,6 @@ def _compute_masked_mean(ds, maskVaries): # {{{ Authors ------- Xylar Asay-Davis - - Last Modified - ------------- - 04/08/2017 ''' def ds_to_weights(ds): # make an identical data set to ds but replacing all data arrays with @@ -789,10 +504,6 @@ def _setup_climatology_caching(ds, startYearClimo, endYearClimo, Authors ------- Xylar Asay-Davis - - Last Modified - ------------- - 04/08/2017 ''' cacheInfo = [] @@ -856,10 +567,6 @@ def _cache_individual_climatologies(ds, cacheInfo, printProgress, Authors ------- Xylar Asay-Davis - - Last Modified - ------------- - 04/19/2017 ''' for cacheIndex, info in enumerate(cacheInfo): @@ -882,7 +589,7 @@ def _cache_individual_climatologies(ds, cacheInfo, printProgress, climatology.attrs['totalMonths'] = monthCount climatology.attrs['fingerprintClimo'] = fingerprint_generator() - climatology.to_netcdf(outputFileClimo) + write_netcdf(climatology, outputFileClimo) climatology.close() # }}} @@ -897,10 +604,6 @@ def _cache_aggregated_climatology(startYearClimo, endYearClimo, cachePrefix, Authors ------- Xylar Asay-Davis - - Last Modified - ------------- - 04/19/2017 ''' yearString, fileSuffix = _get_year_string(startYearClimo, endYearClimo) @@ -965,7 +668,7 @@ def _cache_aggregated_climatology(startYearClimo, endYearClimo, cachePrefix, climatology.attrs['totalMonths'] = totalMonths climatology.attrs['fingerprintClimo'] = fingerprint_generator() - climatology.to_netcdf(outputFileClimo) + write_netcdf(climatology, outputFileClimo) return climatology # }}} diff --git a/mpas_analysis/shared/climatology/comparison_descriptors.py b/mpas_analysis/shared/climatology/comparison_descriptors.py new file mode 100644 index 000000000..a1e993088 --- /dev/null +++ b/mpas_analysis/shared/climatology/comparison_descriptors.py @@ -0,0 +1,144 @@ +""" +Functions for creating climatologies from monthly time series data + +Authors +------- +Xylar Asay-Davis +""" + +import numpy +import pyproj + +from ..constants import constants + +from ..grid import LatLonGridDescriptor, ProjectionGridDescriptor + + +def get_comparison_descriptor(config, comparisonGridName): # {{{ + """ + Get the comparison grid descriptor from the comparisonGridName. + + Parameters + ---------- + config : MpasAnalysisConfigParser object + Contains configuration options + + comparisonGridName : {'latlon', 'antarctic'} + The name of the comparison grid to use for remapping. + + Raises + ------ + ValueError + If comparisonGridName does not describe a known comparions grid + + Authors + ------- + Xylar Asay-Davis + """ + if comparisonGridName == 'latlon': + comparisonDescriptor = \ + _get_lat_lon_comparison_descriptor(config) + elif comparisonGridName == 'antarctic': + comparisonDescriptor = \ + _get_antarctic_stereographic_comparison_descriptor(config) + else: + raise ValueError('Unknown comaprison grid type {}'.format( + comparisonGridName)) + return comparisonDescriptor # }}} + + +def get_antarctic_stereographic_projection(): # {{{ + """ + Get a projection for an Antarctic steregraphic comparison grid + + Returns + ------- + projection : ``pyproj.Proj`` object + The projection + + Authors + ------- + Xylar Asay-Davis + """ + projection = pyproj.Proj('+proj=stere +lat_ts=-71.0 +lat_0=-90 +lon_0=0.0 ' + '+k_0=1.0 +x_0=0.0 +y_0=0.0 +ellps=WGS84') + + return projection # }}} + + +def _get_lat_lon_comparison_descriptor(config): # {{{ + """ + Get a descriptor of the lat/lon comparison grid, used for remapping and + determining the grid name + + Parameters + ---------- + config : instance of ``MpasAnalysisConfigParser`` + Contains configuration options + + Returns + ------- + descriptor : ``LatLonGridDescriptor`` object + A descriptor of the lat/lon grid + + Authors + ------- + Xylar Asay-Davis + """ + climSection = 'climatology' + + comparisonLatRes = config.getWithDefault(climSection, + 'comparisonLatResolution', + constants.dLatitude) + comparisonLonRes = config.getWithDefault(climSection, + 'comparisonLatResolution', + constants.dLongitude) + + nLat = int((constants.latmax-constants.latmin)/comparisonLatRes)+1 + nLon = int((constants.lonmax-constants.lonmin)/comparisonLonRes)+1 + lat = numpy.linspace(constants.latmin, constants.latmax, nLat) + lon = numpy.linspace(constants.lonmin, constants.lonmax, nLon) + + descriptor = LatLonGridDescriptor.create(lat, lon, units='degrees') + + return descriptor # }}} + + +def _get_antarctic_stereographic_comparison_descriptor(config): # {{{ + """ + Get a descriptor of an Antarctic steregraphic comparison grid, used for + remapping and determining the grid name + + Parameters + ---------- + config : instance of ``MpasAnalysisConfigParser`` + Contains configuration options + + Returns + ------- + descriptor : ``ProjectionGridDescriptor`` object + A descriptor of the Antarctic comparison grid + + Authors + ------- + Xylar Asay-Davis + """ + climSection = 'climatology' + + comparisonStereoWidth = config.getfloat(climSection, + 'comparisonAntarcticStereoWidth') + comparisonStereoResolution = config.getfloat( + climSection, 'comparisonAntarcticStereoResolution') + + projection = get_antarctic_stereographic_projection() + + xMax = 0.5*comparisonStereoWidth*1e3 + nx = int(comparisonStereoWidth/comparisonStereoResolution)+1 + x = numpy.linspace(-xMax, xMax, nx) + + meshName = '{}x{}km_{}km_Antarctic_stereo'.format( + comparisonStereoWidth, comparisonStereoWidth, + comparisonStereoResolution) + descriptor = ProjectionGridDescriptor.create(projection, x, x, meshName) + + return descriptor # }}} diff --git a/mpas_analysis/shared/climatology/mpas_climatology_task.py b/mpas_analysis/shared/climatology/mpas_climatology_task.py new file mode 100644 index 000000000..799f718ac --- /dev/null +++ b/mpas_analysis/shared/climatology/mpas_climatology_task.py @@ -0,0 +1,637 @@ +import numpy +import xarray as xr +import os +import warnings +import sys +import subprocess +from distutils.spawn import find_executable + +from ..analysis_task import AnalysisTask + +from ..constants import constants + +from ..io.utility import build_config_full_path, make_directories +from ..io import write_netcdf + +from .climatology import get_remapper +from .comparison_descriptors import get_comparison_descriptor + +from ..grid import MpasMeshDescriptor + +from ..mpas_xarray import mpas_xarray + + +class MpasClimatology(AnalysisTask): # {{{ + ''' + An analysis tasks for computing climatologies from output from the + ``timeSeriesStatsMonthly`` analysis member. + + Attributes + ---------- + + taskSuffix : str + The suffix to append to the task name, typically a short name for + the field(s) being analyzed. For clarity, the taskSuffix should + start with a capital letter. + + variableList : list of str + A list of variable names in ``timeSeriesStatsMonthly`` to be + included in the climatologies + + iselValues : dict + A dictionary of dimensions and indices (or ``None``) used to extract + a slice of the MPAS field. + + seasons : list of str + A list of seasons (keys in ``shared.constants.monthDictionary``) + over which the climatology should be computed or ['none'] if only + monthly climatologies are needed. + + inputFiles : list of str + A list of input files used to compute the climatologies. + + comparisonGridNames : list of {``None``, 'latlon', 'antarctic'} + The name(s) of the comparison grid to use for remapping. + + restartFileName : str + If ``comparisonGridName`` is not ``None``, the name of a restart + file from which the MPAS mesh can be read. + + ncclimoModel : {'mpaso', 'mpascice'} + The name of the component expected by ``ncclimo`` + + startDate, endDate : str + The start and end dates of the climatology as strings + + startYear, endYear : int + The start and end years of the climatology + + fillValue : float + The fill value used in MPAS output (but currently not written to the + ``_FillValue`` attribute) + + Authors + ------- + Xylar Asay-Davis + ''' + + def __init__(self, config, variableList, taskSuffix, + componentName, comparisonGridNames=None, + seasons=['none'], tags=None, iselValues=None): # {{{ + ''' + Construct the analysis task. + + Parameters + ---------- + config : instance of MpasAnalysisConfigParser + Contains configuration options + + variableList : list of str + A list of variable names in ``timeSeriesStatsMonthly`` to be + included in the climatologies + + taskSuffix : str + The suffix to append to the task name, typically a short name for + the field(s) being analyzed. For clarity, the taskSuffix should + start with a capital letter. + + componentName : {'ocean', 'seaIce'} + The name of the component (same as the folder where the task + resides) + + comparisonGridNames : list of {'latlon', 'antarctic'}, optional + The name(s) of the comparison grid to use for remapping. + + seasons : list of str, optional + A list of seasons (keys in ``shared.constants.monthDictionary``) + to be computed or ['none'] (not ``None``) if only monthly + climatologies are needed. + + tags : list of str, optional + Tags used to describe the task (e.g. 'timeSeries', 'climatology', + horizontalMap', 'index', 'transect'). These are used to determine + which tasks are generated (e.g. 'all_transect' or 'no_climatology' + in the 'generate' flags) + + iselValues : dict, optional + A dictionary of dimensions and indices (or ``None``) used to + extract a slice of the MPAS field(s). + + Authors + ------- + Xylar Asay-Davis + ''' + self.variableList = variableList + self.seasons = seasons + self.taskSuffix = taskSuffix + self.comparisonGridNames = comparisonGridNames + self.iselValues = iselValues + + # this is a stopgap until MPAS implements the _FillValue attribute + # correctly + self.fillValue = -9.99999979021476795361e+33 + + if 'climatology' not in tags: + tags.append('climatology') + + if componentName == 'ocean': + self.ncclimoModel = 'mpaso' + elif componentName == 'seaIce': + self.ncclimoModel = 'mpascice' + else: + raise ValueError('component {} is not supported by ncclimo.\n' + 'Check with Charlie Zender and Xylar Asay-Davis\n' + 'about getting it added'.format(componentName)) + + # call the constructor from the base class (AnalysisTask) + super(MpasClimatology, self).__init__( + config=config, + taskName='mpasClimatology{}'.format(taskSuffix), + componentName=componentName, + tags=tags) + + # }}} + + def setup_and_check(self): # {{{ + ''' + Perform steps to set up the analysis and check for errors in the setup. + + Authors + ------- + Xylar Asay-Davis + ''' + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.runDirectory , self.historyDirectory, self.plotsDirectory, + # self.namelist, self.runStreams, self.historyStreams, + # self.calendar, self.namelistMap, self.streamMap, self.variableMap + super(MpasClimatology, self).setup_and_check() + + self.check_analysis_enabled( + analysisOptionName='config_am_timeseriesstatsmonthly_enable', + raiseException=True) + + self.restartFileName = None + if self.comparisonGridNames is not None: + try: + self.restartFileName = self.runStreams.readpath('restart')[0] + except ValueError: + raise IOError('No MPAS restart file found: need at least one ' + 'restart file to perform remapping of ' + 'climatologies.') + + # get a list of timeSeriesStats output files from the streams file, + # reading only those that are between the start and end dates + startDate = self.config.get('climatology', 'startDate') + endDate = self.config.get('climatology', 'endDate') + streamName = \ + self.historyStreams.find_stream(self.streamMap['timeSeriesStats']) + self.inputFiles = self.historyStreams.readpath( + streamName, startDate=startDate, endDate=endDate, + calendar=self.calendar) + + if len(self.inputFiles) == 0: + raise IOError('No files were found in stream {} between {} and ' + '{}.'.format(streamName, startDate, endDate)) + + self._update_climatology_bounds_from_file_names() + + # }}} + + def run(self): # {{{ + ''' + Compute the requested climatologies + + Authors + ------- + Xylar Asay-Davis + ''' + + print '\nComputing {} climatologies from files:\n' \ + ' {} through\n {}'.format( + self.taskSuffix, + os.path.basename(self.inputFiles[0]), + os.path.basename(self.inputFiles[-1])) + + config = self.config + + mpasMeshName = config.get('input', 'mpasMeshName') + + if self.seasons[0] is 'none': + seasonsToCheck = constants.abrevMonthNames + else: + seasonsToCheck = self.seasons + + allExist = True + for season in seasonsToCheck: + + climatologyFileName, climatologyDirectory = \ + self.get_ncclimo_file_name(season, 'unmasked', + returnDir=True) + + if not os.path.exists(climatologyFileName): + allExist = False + break + + if not allExist: + self._compute_climatologies_with_ncclimo( + inDirectory=self.historyDirectory, + outDirectory=climatologyDirectory) + + if self.comparisonGridNames is not None: + + parallel = self.config.getint('execute', 'parallelTaskCount') > 1 + if parallel: + # avoid writing the same mapping file from multiple processes + mappingFilePrefix = 'map_{}'.format(self.taskName) + else: + mappingFilePrefix = 'map' + + mpasDescriptor = MpasMeshDescriptor( + self.restartFileName, + meshName=mpasMeshName) + + dsMask = xr.open_dataset(self.inputFiles[0]) + dsMask = mpas_xarray.subset_variables(dsMask, self.variableList) + iselValues = {'Time': 0} + if self.iselValues is not None: + for dim in self.iselValues: + # we've already hyperslabbed this dimension in ncclimo + iselValues[dim] = 0 + # select only Time=0 and possibly only the desired vertical + # slice + dsMask = dsMask.isel(**iselValues) + + firstGrid = True + for comparisonGridName in self.comparisonGridNames: + comparisonDescriptor = \ + get_comparison_descriptor(config, comparisonGridName) + + mpasRemapper = get_remapper( + config=config, sourceDescriptor=mpasDescriptor, + comparisonDescriptor=comparisonDescriptor, + mappingFilePrefix=mappingFilePrefix, + method=config.get('climatology', + 'mpasInterpolationMethod')) + + for season in self.seasons: + if firstGrid: + self._mask_climatologies(season, dsMask, + comparisonDescriptor) + + maskedClimatologyFileName = self.get_ncclimo_file_name( + season, 'masked', comparisonDescriptor) + + remappedFileName = self.get_ncclimo_file_name( + season, 'remapped', comparisonDescriptor) + + if not os.path.exists(remappedFileName): + self._remap(inFileName=maskedClimatologyFileName, + outFileName=remappedFileName, + remapper=mpasRemapper, + comparisonGridName=comparisonGridName) + + firstGrid = False + # }}} + + def get_ncclimo_file_name(self, season, stage, comparisonDescriptor=None, + returnDir=False): # {{{ + """ + Given config options, the name of a field and a string identifying the + months in a seasonal climatology, returns the full path for MPAS + climatology files before and after remapping. + + Parameters + ---------- + season : str + One of the seasons in ``constants.monthDictionary`` + + mpasMeshName : str + The name of the MPAS mesh + + stage : {'unmasked', 'masked', 'remapped'} + The stage of the masking and remapping process + + comparisonDescriptor : MeshDescriptor, optional + The comparison mesh descriptor, used to get the mesh name + + returnDir : bool, optional + Return the directory as well + + Returns + ------- + fileName : str + The path to the climatology file for the specified season. + + Authors + ------- + Xylar Asay-Davis + """ + + climatologyBaseDirectory = build_config_full_path( + self.config, 'output', 'mpasClimatologySubdirectory') + + mpasMeshName = self.config.get('input', 'mpasMeshName') + + climatologyBaseDirectory = '{}/{}'.format(climatologyBaseDirectory, + stage) + + if stage in ['unmasked', 'masked']: + directory = '{}/{}_{}'.format( + climatologyBaseDirectory, self.taskSuffix, mpasMeshName) + elif stage == 'remapped': + directory = '{}/{}_{}_to_{}'.format( + climatologyBaseDirectory, self.taskSuffix, mpasMeshName, + comparisonDescriptor.meshName) + else: + raise ValueError('Unsupported stage {}'.format(stage)) + + make_directories(directory) + + monthValues = sorted(constants.monthDictionary[season]) + startMonth = monthValues[0] + endMonth = monthValues[-1] + + suffix = '{:04d}{:02d}_{:04d}{:02d}_climo'.format( + self.startYear, startMonth, self.endYear, endMonth) + + if season in constants.abrevMonthNames: + season = '{:02d}'.format(monthValues[0]) + fileName = '{}/{}_{}_{}.nc'.format(directory, self.ncclimoModel, + season, suffix) + + if returnDir: + return fileName, directory + else: + return fileName # }}} + + def customize_climatology(self, climatology): # {{{ + """ + A function that can be overridden by child classes for purposes of + making custom changes to the climatology data set after slicing and + masking and before remapping. By default, a field 'validMask' is added + to the climatology. After remapping, this field indicates which cells + on the remapped grid came from valid cells on the MPAS grid, useful for + plotting a land mask (for example). + + Parameters + ---------- + climatology : ``xarray.Dataset`` object + the climatology data set + + Returns + ------- + climatology : ``xarray.Dataset`` object + the modified climatology data set + + Authors + ------- + Xylar Asay-Davis + """ + + # add valid mask as a variable, useful for remapping later + climatology['validMask'] = \ + xr.DataArray(numpy.ones(climatology.dims['nCells']), + dims=['nCells']) + return climatology # }}} + + def _update_climatology_bounds_from_file_names(self): # {{{ + """ + Update the start and end years and dates for climatologies based on the + years actually available in the list of files. + + Authors + ------- + Xylar Asay-Davis + """ + + config = self.config + + requestedStartYear = config.getint('climatology', 'startYear') + requestedEndYear = config.getint('climatology', 'endYear') + + dates = sorted([fileName[-13:-6] for fileName in self.inputFiles]) + years = [int(date[0:4]) for date in dates] + months = [int(date[5:7]) for date in dates] + + # search for the start of the first full year + firstIndex = 0 + while(firstIndex < len(years) and months[firstIndex] != 1): + firstIndex += 1 + startYear = years[firstIndex] + + # search for the end of the last full year + lastIndex = len(years)-1 + while(lastIndex >= 0 and months[lastIndex] != 12): + lastIndex -= 1 + endYear = years[lastIndex] + + if startYear != requestedStartYear or endYear != requestedEndYear: + message = "climatology start and/or end year different from " \ + "requested\n" \ + "requestd: {:04d}-{:04d}\n" \ + "actual: {:04d}-{:04d}\n".format(requestedStartYear, + requestedEndYear, + startYear, + endYear) + warnings.warn(message) + config.set('climatology', 'startYear', str(startYear)) + config.set('climatology', 'endYear', str(endYear)) + + startDate = '{:04d}-01-01_00:00:00'.format(startYear) + config.set('climatology', 'startDate', startDate) + endDate = '{:04d}-12-31_23:59:59'.format(endYear) + config.set('climatology', 'endDate', endDate) + + else: + startDate = config.get('climatology', 'startDate') + endDate = config.get('climatology', 'endDate') + + self.startDate = startDate + self.endDate = endDate + self.startYear = startYear + self.endYear = endYear + + # }}} + + def _compute_climatologies_with_ncclimo(self, inDirectory, outDirectory, + remapper=None, + remappedDirectory=None): # {{{ + ''' + Uses ncclimo to compute monthly, seasonal and/or annual climatologies. + + Parameters + ---------- + inDirectory : str + The run directory containing timeSeriesStatsMonthly output + + outDirectory : str + The output directory where climatologies will be written + + remapper : ``shared.intrpolation.Remapper`` object, optional + If present, a remapper that defines the source and desitnation + grids for remapping the climatologies. + + remappedDirectory : str, optional + If present, the path where remapped climatologies should be + written. By default, remapped files are stored in the same + directory as the climatologies on the source grid. Has no effect + if ``remapper`` is ``None``. + + Raises + ------ + OSError + If ``ncclimo`` is not in the system path. + + Author + ------ + Xylar Asay-Davis + ''' + + if find_executable('ncclimo') is None: + raise OSError('ncclimo not found. Make sure the latest nco ' + 'package is installed: \n' + 'conda install nco\n' + 'Note: this presumes use of the conda-forge ' + 'channel.') + + parallelMode = self.config.get('execute', 'ncclimoParallelMode') + + args = ['ncclimo', + '--clm_md=mth', + '-a', 'sdd', + '-m', self.ncclimoModel, + '-p', parallelMode, + '-v', ','.join(self.variableList), + '--seasons={}'.format(','.join(self.seasons)), + '-s', '{:04d}'.format(self.startYear), + '-e', '{:04d}'.format(self.endYear), + '-i', inDirectory, + '-o', outDirectory] + + if remapper is not None: + args.extend(['-r', remapper.mappingFileName]) + if remappedDirectory is not None: + args.extend(['-O', remappedDirectory]) + + if self.iselValues is not None: + ncksOptions = ['-O', '--no_tmp_fl'] + + for dim in self.iselValues: + val = self.iselValues[dim] + ncksOptions.extend(['-d', '{},{},{}'.format(dim, val, val)]) + + args.extend(['-n', ' '.join(ncksOptions)]) + + # make sure any output is flushed before we add output from the + # subprocess + sys.stdout.flush() + sys.stderr.flush() + + subprocess.check_call(args) # }}} + + def _mask_climatologies(self, season, dsMask, comparisonDescriptor): # {{{ + ''' + For each season, creates a masked version of the climatology + + Parameters + ---------- + season : str + The name of the season to be masked + + dsMask : ``xarray.Dataset`` object + A data set (from the first input file) that can be used to + determine the mask in MPAS output files. + + comparisonDescriptor : MeshDescriptor, optional + The comparison mesh descriptor, used to get the mesh name + + Author + ------ + Xylar Asay-Davis + ''' + + climatologyFileName = self.get_ncclimo_file_name( + season, 'unmasked', comparisonDescriptor) + + maskedClimatologyFileName = self.get_ncclimo_file_name( + season, 'masked', comparisonDescriptor) + + if not os.path.exists(maskedClimatologyFileName): + # slice and mask the data set + climatology = xr.open_dataset(climatologyFileName) + iselValues = {'Time': 0} + if self.iselValues is not None: + iselValues.update(self.iselValues) + # select only Time=0 and possibly only the desired vertical + # slice + climatology = climatology.isel(**iselValues) + + # mask the data set + for variableName in self.variableList: + climatology[variableName] = \ + climatology[variableName].where( + dsMask[variableName] != self.fillValue) + + climatology = self.customize_climatology(climatology) + + write_netcdf(climatology, maskedClimatologyFileName) + # }}} + + def _remap(self, inFileName, outFileName, remapper, comparisonGridName): + # {{{ + """ + Performs remapping either using ``ncremap`` or the native python code, + depending on the requested setting and the comparison grid + + Parameters + ---------- + inFileName : str + The name of the input file to be remapped. + + outFileName : str + The name of the output file to which the remapped data set should + be written. + + remapper : ``Remapper`` object + A remapper that can be used to remap files or data sets to a + comparison grid. + + comparisonGridNames : {'latlon', 'antarctic'} + The name of the comparison grid to use for remapping. + + Authors + ------- + Xylar Asay-Davis + """ + if remapper.mappingFileName is None: + # no remapping is needed + return + + useNcremap = self.config.getboolean('climatology', 'useNcremap') + + if comparisonGridName != 'latlon': + # ncremap doesn't support polar stereographic grids + useNcremap = False + + renormalizationThreshold = self.config.getfloat( + 'climatology', 'renormalizationThreshold') + + if useNcremap: + remapper.remap_file(inFileName=inFileName, + outFileName=outFileName, + overwrite=True, + renormalize=renormalizationThreshold) + else: + + climatologyDataSet = xr.open_dataset(inFileName) + + remappedClimatology = remapper.remap(climatologyDataSet, + renormalizationThreshold) + write_netcdf(remappedClimatology, outFileName) + # }}} + + # }}} + + +# vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python diff --git a/mpas_analysis/shared/constants/constants.py b/mpas_analysis/shared/constants/constants.py index 7df2b1bba..5d81503c3 100644 --- a/mpas_analysis/shared/constants/constants.py +++ b/mpas_analysis/shared/constants/constants.py @@ -22,13 +22,12 @@ monthsInYear = 12 -monthDictionary = {'Jan': 1, 'Feb': 2, 'Mar': 3, 'Apr': 4, 'May': 5, 'Jun': 6, - 'Jul': 7, 'Aug': 8, 'Sep': 9, 'Oct': 10, 'Nov': 11, - 'Dec': 12, 'JFM': np.array([1, 2, 3]), - 'AMJ': np.array([4, 5, 6]), 'JAS': np.array([7, 8, 9]), - 'OND': np.array([10, 11, 12]), 'ANN': np.arange(1, 13), - 'ON': np.array([10, 11]), 'FM': np.array([2, 3]), - 'DJF': np.array([12, 1, 2]), 'JJA': np.array([6, 7, 8])} +monthDictionary = {'Jan': [1], 'Feb': [2], 'Mar': [3], 'Apr': [4], 'May': [5], + 'Jun': [6], 'Jul': [7], 'Aug': [8], 'Sep': [9], 'Oct': [10], + 'Nov': [11], 'Dec': [12], 'JFM': [1, 2, 3], + 'AMJ': [4, 5, 6], 'JAS': [7, 8, 9], 'OND': [10, 11, 12], + 'ANN': list(np.arange(1, 13)), 'ON': [10, 11], 'FM': [2, 3], + 'DJF': [12, 1, 2], 'JJA': [6, 7, 8]} daysInMonth = np.array([31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]) @@ -56,4 +55,10 @@ # small value to prevent division by zero eps = 1.E-10 +# density of freshwater (kg/m^3) +rho_fw = 1000. + +# kilograms per gigatonne +kg_per_GT = 1e12 + # vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python diff --git a/mpas_analysis/shared/generalized_reader/__init__.py b/mpas_analysis/shared/generalized_reader/__init__.py index e69de29bb..da147bbd5 100644 --- a/mpas_analysis/shared/generalized_reader/__init__.py +++ b/mpas_analysis/shared/generalized_reader/__init__.py @@ -0,0 +1 @@ +from generalized_reader import open_multifile_dataset diff --git a/mpas_analysis/shared/grid/__init__.py b/mpas_analysis/shared/grid/__init__.py index 7fae57fb3..beffc841f 100644 --- a/mpas_analysis/shared/grid/__init__.py +++ b/mpas_analysis/shared/grid/__init__.py @@ -1,2 +1,2 @@ from .grid import MpasMeshDescriptor, LatLonGridDescriptor, \ - ProjectionGridDescriptor + ProjectionGridDescriptor, interp_extrap_corner diff --git a/mpas_analysis/shared/grid/grid.py b/mpas_analysis/shared/grid/grid.py index 399c6c19e..1f54b1efd 100644 --- a/mpas_analysis/shared/grid/grid.py +++ b/mpas_analysis/shared/grid/grid.py @@ -15,9 +15,6 @@ ------ Xylar Asay-Davis -Last Modified -------------- -04/16/2017 ''' import netCDF4 @@ -34,10 +31,6 @@ class MeshDescriptor(object): # {{{ Author ------ Xylar Asay-Davis - - Last Modified - ------------- - 04/13/2017 ''' def __init__(self): # {{{ @@ -50,9 +43,6 @@ def __init__(self): # {{{ ------ Xylar Asay-Davis - Last Modified - ------------- - 04/13/2017 ''' self.meshName = None # }}} @@ -70,10 +60,6 @@ def to_scrip(self, scripFileName): # {{{ Authors ------ Xylar Asay-Davis - - Last Modified - ------------- - 03/17/2017 ''' return # }}} @@ -88,10 +74,6 @@ class MpasMeshDescriptor(MeshDescriptor): # {{{ Author ------ Xylar Asay-Davis - - Last Modified - ------------- - 04/16/2017 ''' def __init__(self, fileName, meshName=None): # {{{ @@ -112,10 +94,6 @@ def __init__(self, fileName, meshName=None): # {{{ Author ------ Xylar Asay-Davis - - Last Modified - ------------- - 04/16/2017 ''' ds = xarray.open_dataset(fileName) @@ -153,10 +131,6 @@ def to_scrip(self, scripFileName): # {{{ Authors ------ Xylar Asay-Davis - - Last Modified - ------------- - 04/16/2017 ''' self.scripFileName = scripFileName @@ -212,7 +186,7 @@ def to_scrip(self, scripFileName): # {{{ inFile.close() outFile.close() # }}} -# }}} + # }}} class LatLonGridDescriptor(MeshDescriptor): # {{{ @@ -222,10 +196,6 @@ class LatLonGridDescriptor(MeshDescriptor): # {{{ Author ------ Xylar Asay-Davis - - Last Modified - ------------- - 04/16/2017 ''' def __init__(self): # {{{ ''' @@ -239,22 +209,25 @@ def __init__(self): # {{{ Author ------ Xylar Asay-Davis - - Last Modified - ------------- - 04/05/2017 ''' self.regional = False self.meshName = None # }}} - def read(self, fileName, latVarName='lat', lonVarName='lon'): # {{{ + @classmethod + def read(cls, fileName=None, ds=None, latVarName='lat', + lonVarName='lon'): # {{{ ''' Read the lat-lon grid from a file with the given lat/lon var names. Parameters ---------- - fileName : str - The path of the file containing the lat-lon grid + fileName : str, optional + The path of the file containing the lat-lon grid (if ``ds`` is not + supplied directly) + + ds : ``xarray.Dataset`` object, optional + The path of the file containing the lat-lon grid (if supplied, + ``fileName`` will be ignored) latVarName, lonVarName : str, optional The name of the latitude and longitude variables in the grid file @@ -262,38 +235,39 @@ def read(self, fileName, latVarName='lat', lonVarName='lon'): # {{{ Author ------ Xylar Asay-Davis - - Last Modified - ------------- - 03/17/2017 ''' - ds = xarray.open_dataset(fileName) + if ds is None: + ds = xarray.open_dataset(fileName) - if self.meshName is None and 'meshName' in ds.attrs: - self.meshName = ds.attrs['meshName'] + descriptor = cls() + + if descriptor.meshName is None and 'meshName' in ds.attrs: + descriptor.meshName = ds.attrs['meshName'] # Get info from input file - self.lat = numpy.array(ds[latVarName].values, float) - self.lon = numpy.array(ds[lonVarName].values, float) + descriptor.lat = numpy.array(ds[latVarName].values, float) + descriptor.lon = numpy.array(ds[lonVarName].values, float) if 'degree' in ds[latVarName].units: - self.units = 'degrees' + descriptor.units = 'degrees' else: - self.units = 'radians' - - self._set_coords(latVarName, lonVarName, ds[latVarName].dims[0], - ds[lonVarName].dims[0]) + descriptor.units = 'radians' # interp/extrap corners - self.lonCorner = _interp_extrap_corner(self.lon) - self.latCorner = _interp_extrap_corner(self.lat) + descriptor.lonCorner = interp_extrap_corner(descriptor.lon) + descriptor.latCorner = interp_extrap_corner(descriptor.lat) + + descriptor._set_coords(latVarName, lonVarName, ds[latVarName].dims[0], + ds[lonVarName].dims[0]) if 'history' in ds.attrs: - self.history = '\n'.join([ds.attrs['history'], - ' '.join(sys.argv[:])]) + descriptor.history = '\n'.join([ds.attrs['history'], + ' '.join(sys.argv[:])]) else: - self.history = sys.argv[:] # }}} + descriptor.history = sys.argv[:] + return descriptor # }}} - def create(self, latCorner, lonCorner, units='degrees'): # {{{ + @classmethod + def create(cls, latCorner, lonCorner, units='degrees'): # {{{ ''' Create the lat-lon grid with the given arrays and units. @@ -309,19 +283,17 @@ def create(self, latCorner, lonCorner, units='degrees'): # {{{ Author ------ Xylar Asay-Davis - - Last Modified - ------------- - 03/17/2017 ''' + descriptor = cls() - self.latCorner = latCorner - self.lonCorner = lonCorner - self.lon = 0.5*(lonCorner[0:-1] + lonCorner[1:]) - self.lat = 0.5*(latCorner[0:-1] + latCorner[1:]) - self.units = units - self.history = sys.argv[:] - self._set_coords('lat', 'lon', 'lat', 'lon') # }}} + descriptor.latCorner = latCorner + descriptor.lonCorner = lonCorner + descriptor.lon = 0.5*(lonCorner[0:-1] + lonCorner[1:]) + descriptor.lat = 0.5*(latCorner[0:-1] + latCorner[1:]) + descriptor.units = units + descriptor.history = sys.argv[:] + descriptor._set_coords('lat', 'lon', 'lat', 'lon') + return descriptor # }}} def to_scrip(self, scripFileName): # {{{ ''' @@ -335,10 +307,6 @@ def to_scrip(self, scripFileName): # {{{ Authors ------ Xylar Asay-Davis - - Last Modified - ------------- - 04/16/2017 ''' self.scripFileName = scripFileName @@ -396,7 +364,17 @@ def _set_coords(self, latVarName, lonVarName, latDimName, self.units)) if self.meshName is None: self.meshName = '{}x{}{}'.format(abs(dLat), abs(dLon), units) + + # determine if the grid is regional or global + totalLon = numpy.abs(self.lonCorner[-1] - self.lonCorner[0]) + if units == 'radian': + totalLon = numpy.radians(totalLon) + # regardless of the extent in latitude, it seems like a grid should + # be considered "global" if it covers 360 degrees longitude. + # Otherwise, you end up with a seam at the prime- or antemeridian. + self.regional = not numpy.isclose(totalLon, 360.) # }}} + # }}} class ProjectionGridDescriptor(MeshDescriptor): # {{{ @@ -407,10 +385,6 @@ class ProjectionGridDescriptor(MeshDescriptor): # {{{ Author ------ Xylar Asay-Davis - - Last Modified - ------------- - 04/16/2017 ''' def __init__(self, projection): # {{{ @@ -426,16 +400,14 @@ def __init__(self, projection): # {{{ Author ------ Xylar Asay-Davis - - Last Modified - ------------- - 04/05/2017 ''' self.projection = projection self.latLonProjection = pyproj.Proj(proj='latlong', datum='WGS84') self.regional = True - def read(self, fileName, meshName=None, xVarName='x', yVarName='y'): # {{{ + @classmethod + def read(cls, projection, fileName, meshName=None, xVarName='x', + yVarName='y'): # {{{ ''' Given a grid file with x and y coordinates defining the axes of the logically rectangular grid, read in the x and y coordinates and @@ -443,6 +415,10 @@ def read(self, fileName, meshName=None, xVarName='x', yVarName='y'): # {{{ Parameters ---------- + projection : pyproj.Proj object + The projection used to map from grid x-y space to latitude and + longitude + fileName : str The path of the file containing the grid data @@ -457,40 +433,39 @@ def read(self, fileName, meshName=None, xVarName='x', yVarName='y'): # {{{ Authors ------ Xylar Asay-Davis - - Last Modified - ------------- - 04/16/2017 ''' + descriptor = cls(projection) ds = xarray.open_dataset(fileName) if meshName is None: if 'meshName' not in ds.attrs: raise ValueError('No meshName provided or found in file.') - self.meshName = ds.attrs['meshName'] + descriptor.meshName = ds.attrs['meshName'] else: - self.meshName = meshName + descriptor.meshName = meshName # Get info from input file - self.x = numpy.array(ds[xVarName].values, float) - self.y = numpy.array(ds[yVarName].values, float) + descriptor.x = numpy.array(ds[xVarName].values, float) + descriptor.y = numpy.array(ds[yVarName].values, float) - self._set_coords(xVarName, yVarName, ds[xVarName].dims[0], - ds[yVarName].dims[0]) + descriptor._set_coords(xVarName, yVarName, ds[xVarName].dims[0], + ds[yVarName].dims[0]) # interp/extrap corners - self.xCorner = _interp_extrap_corner(self.x) - self.yCorner = _interp_extrap_corner(self.y) + descriptor.xCorner = interp_extrap_corner(descriptor.x) + descriptor.yCorner = interp_extrap_corner(descriptor.y) # Update history attribute of netCDF file if 'history' in ds.attrs: - self.history = '\n'.join([ds.attrs['history'], - ' '.join(sys.argv[:])]) + descriptor.history = '\n'.join([ds.attrs['history'], + ' '.join(sys.argv[:])]) else: - self.history = sys.argv[:] # }}} + descriptor.history = sys.argv[:] + return descriptor # }}} - def create(self, x, y, meshName): # {{{ + @classmethod + def create(cls, projection, x, y, meshName): # {{{ ''' Given x and y coordinates defining the axes of the logically rectangular grid, save the coordinates interpolate/extrapolate to @@ -508,23 +483,21 @@ def create(self, x, y, meshName): # {{{ Author ------ Xylar Asay-Davis - - Last Modified - ------------- - 03/20/2017 ''' + descriptor = cls(projection) - self.meshName = meshName + descriptor.meshName = meshName - self.x = x - self.y = y + descriptor.x = x + descriptor.y = y - self._set_coords('x', 'y', 'x', 'y') + descriptor._set_coords('x', 'y', 'x', 'y') # interp/extrap corners - self.xCorner = _interp_extrap_corner(self.x) - self.yCorner = _interp_extrap_corner(self.y) - self.history = sys.argv[:] # }}} + descriptor.xCorner = interp_extrap_corner(descriptor.x) + descriptor.yCorner = interp_extrap_corner(descriptor.y) + descriptor.history = sys.argv[:] + return descriptor # }}} def to_scrip(self, scripFileName): # {{{ ''' @@ -538,10 +511,6 @@ def to_scrip(self, scripFileName): # {{{ Authors ------ Xylar Asay-Davis - - Last Modified - ------------- - 04/16/2017 ''' self.scripFileName = scripFileName @@ -594,10 +563,6 @@ def project_to_lat_lon(self, X, Y): # {{{ Authors ------ Xylar Asay-Davis - - Last Modified - ------------- - 03/20/2017 ''' Lon, Lat = pyproj.transform(self.projection, self.latLonProjection, @@ -629,6 +594,19 @@ def _set_coords(self, xVarName, yVarName, xDimName, yDimName): # {{{ self.dimSize = [len(self.x), len(self.y)] # }}} + # }}} + + +def interp_extrap_corner(inField): # {{{ + '''Interpolate/extrapolate a 1D field from grid centers to grid corners''' + + outField = numpy.zeros(len(inField)+1) + outField[1:-1] = 0.5*(inField[0:-1] + inField[1:]) + # extrapolate the ends + outField[0] = 1.5*inField[0] - 0.5*inField[1] + outField[-1] = 1.5*inField[-1] - 0.5*inField[-2] + return outField # }}} + def _create_scrip(outFile, grid_size, grid_corners, grid_rank, units, meshName): # {{{ @@ -659,10 +637,6 @@ def _create_scrip(outFile, grid_size, grid_corners, grid_rank, units, Authors ------ Xylar Asay-Davis - - Last Modified - ------------- - 04/16/2017 ''' # Write to output file # Dimensions @@ -691,17 +665,6 @@ def _create_scrip(outFile, grid_size, grid_corners, grid_rank, units, # }}} -def _interp_extrap_corner(inField): # {{{ - '''Interpolate/extrapolate a 1D field from grid centers to grid corners''' - - outField = numpy.zeros(len(inField)+1) - outField[1:-1] = 0.5*(inField[0:-1] + inField[1:]) - # extrapolate the ends - outField[0] = 1.5*inField[0] - 0.5*inField[1] - outField[-1] = 1.5*inField[-1] - 0.5*inField[-2] - return outField # }}} - - def _unwrap_corners(inField): '''Turn a 2D array of corners into an array of rectangular mesh elements''' outField = numpy.zeros(((inField.shape[0]-1)*(inField.shape[1]-1), 4)) diff --git a/mpas_analysis/shared/interpolation/remapper.py b/mpas_analysis/shared/interpolation/remapper.py index 8a4c9c979..d01008fb2 100644 --- a/mpas_analysis/shared/interpolation/remapper.py +++ b/mpas_analysis/shared/interpolation/remapper.py @@ -11,10 +11,6 @@ Author ------ Xylar Asay-Davis - -Last Modified -------------- -04/13/2017 ''' import subprocess @@ -38,10 +34,6 @@ class Remapper(object): Author ------ Xylar Asay-Davis - - Last Modified - ------------- - 04/13/2017 ''' def __init__(self, sourceDescriptor, destinationDescriptor, @@ -75,10 +67,6 @@ def __init__(self, sourceDescriptor, destinationDescriptor, Author ------ Xylar Asay-Davis - - Last Modified - ------------- - 04/13/2017 ''' if not isinstance(sourceDescriptor, @@ -128,10 +116,6 @@ def build_mapping_file(self, method='bilinear', Author ------ Xylar Asay-Davis - - Last Modified - ------------- - 04/13/2017 ''' if self.mappingFileName is None or \ @@ -176,7 +160,11 @@ def build_mapping_file(self, method='bilinear', # subprocess sys.stdout.flush() sys.stderr.flush() - subprocess.check_call(args) + + # throw out the standard output from ESMF_RegridWeightGen, as it's + # rather verbose but keep stderr + DEVNULL = open(os.devnull, 'wb') + subprocess.check_call(args, stdout=DEVNULL) # remove the temporary SCRIP files os.remove(self.sourceDescriptor.scripFileName) @@ -184,8 +172,8 @@ def build_mapping_file(self, method='bilinear', # }}} - def remap_file(self, inFileName, outFileName, - variableList=None, overwrite=False): # {{{ + def remap_file(self, inFileName, outFileName, variableList=None, + overwrite=False, renormalize=None): # {{{ ''' Given a source file defining either an MPAS mesh or a lat-lon grid and a destination file or set of arrays defining a lat-lon grid, constructs @@ -209,6 +197,9 @@ def remap_file(self, inFileName, outFileName, exists. If `False`, and the destination file is already present, the function does nothing and returns immediately + renormalize : float, optional + A threshold to use to renormalize the data + Raises ------ OSError @@ -221,10 +212,6 @@ def remap_file(self, inFileName, outFileName, Author ------ Xylar Asay-Davis - - Last Modified - ------------- - 04/13/2017 ''' if self.mappingFileName is None: @@ -256,12 +243,22 @@ def remap_file(self, inFileName, outFileName, args = ['ncremap', '-i', inFileName, '-m', self.mappingFileName, + '--vrb=1', '-o', outFileName] + regridArgs = [] + + if renormalize is not None: + regridArgs.append('--renormalize={}'.format(renormalize)) + if isinstance(self.sourceDescriptor, LatLonGridDescriptor): - args.extend(['-R', '--rgr lat_nm={} --rgr lon_nm={}'.format( - self.sourceDescriptor.latVarName, - self.sourceDescriptor.lonVarName)]) + regridArgs.extend(['--rgr lat_nm={}'.format( + self.sourceDescriptor.latVarName), + '--rgr lon_nm={}'.format( + self.sourceDescriptor.lonVarName)]) + + if len(regridArgs) > 0: + args.extend(['-R', ' '.join(regridArgs)]) if isinstance(self.sourceDescriptor, MpasMeshDescriptor): # Note: using the -C (climatology) flag for now because otherwise @@ -272,6 +269,7 @@ def remap_file(self, inFileName, outFileName, if variableList is not None: args.extend(['-v', ','.join(variableList)]) + # make sure any output is flushed before we add output from the # subprocess sys.stdout.flush() @@ -315,10 +313,6 @@ def remap(self, ds, renormalizationThreshold=None): # {{{ Author ------ Xylar Asay-Davis - - Last Modified - ------------- - 04/13/2017 ''' if self.mappingFileName is None: @@ -368,10 +362,6 @@ def _load_mapping(self): # {{{ Author ------ Xylar Asay-Davis - - Last Modified - ------------- - 04/06/2017 ''' if self.mappingLoaded: @@ -438,15 +428,11 @@ def _check_drop(self, dataArray): # {{{ def _remap_data_array(self, dataArray, renormalizationThreshold): # {{{ ''' - Regrids a single xarray data array + Remap a single xarray data array Author ------ Xylar Asay-Davis - - Last Modified - ------------- - 04/05/2017 ''' sourceDims = self.sourceDescriptor.dims @@ -513,15 +499,11 @@ def _remap_data_array(self, dataArray, renormalizationThreshold): # {{{ def _remap_numpy_array(self, inField, remapAxes, renormalizationThreshold): # {{{ ''' - Regrids a single numpy array + Remap a single numpy array Author ------ Xylar Asay-Davis - - Last Modified - ------------- - 04/05/2017 ''' # permute the dimensions of inField so the axes to remap are first, @@ -581,16 +563,6 @@ def _remap_numpy_array(self, inField, remapAxes, return outField # }}} -def _get_lock_path(fileName): # {{{ - '''Returns the name of a temporary lock file unique to a given file name''' - directory = '{}/.locks/'.format(os.path.dirname(fileName)) - try: - os.makedirs(directory) - except OSError: - pass - return '{}/{}.lock'.format(directory, os.path.basename(fileName)) # }}} - - def _get_temp_path(): # {{{ '''Returns the name of a temporary NetCDF file''' return '{}/{}.nc'.format(tempfile._get_default_tempdir(), diff --git a/mpas_analysis/shared/io/__init__.py b/mpas_analysis/shared/io/__init__.py index 0571c93b9..e2ad76440 100644 --- a/mpas_analysis/shared/io/__init__.py +++ b/mpas_analysis/shared/io/__init__.py @@ -1,2 +1,4 @@ from .namelist_streams_interface import NameList, StreamsFile -from .utility import paths +from .utility import paths, make_directories, build_config_full_path, \ + check_path_exists +from .write_netcdf import write_netcdf diff --git a/mpas_analysis/shared/io/utility.py b/mpas_analysis/shared/io/utility.py index c36be1d90..6bb7cf4fe 100644 --- a/mpas_analysis/shared/io/utility.py +++ b/mpas_analysis/shared/io/utility.py @@ -2,8 +2,6 @@ IO utility functions Phillip J. Wolfram, Xylar Asay-Davis - -Last Modified: 03/23/2017 """ import glob @@ -12,32 +10,30 @@ import string -def paths(*args): # {{{ +def paths(*args): # {{{ """ Returns glob'd paths in list for arbitrary number of function arguments. Note, each expanded set of paths is sorted. Phillip J. Wolfram - 10/25/2016 """ paths = [] for aargs in args: paths += sorted(glob.glob(aargs)) - return paths # }}} + return paths # }}} def fingerprint_generator(size=12, - chars=string.ascii_uppercase + string.digits): # {{{ + chars=string.ascii_uppercase + string.digits): # {{{ """ - Returns a random string that can be used as a unique fingerprint + Returns a random string that can be used as a unique fingerprint Reference: http://stackoverflow.com/questions/2257441/random-string-generation-with-upper-case-letters-and-digits-in-python - + Phillip J. Wolfram - 04/27/2017 """ - return ''.join(random.choice(chars) for _ in range(size)) # }}} + return ''.join(random.choice(chars) for _ in range(size)) # }}} def make_directories(path): # {{{ @@ -47,7 +43,6 @@ def make_directories(path): # {{{ Returns the path unchanged. Author: Xylar Asay-Davis - Last Modified: 02/02/2017 """ try: @@ -59,7 +54,7 @@ def make_directories(path): # {{{ def build_config_full_path(config, section, relativePathOption, relativePathSection=None, - defaultPath=None): # {{{ + defaultPath=None): # {{{ """ Returns a full path from a base directory and a relative path @@ -85,10 +80,6 @@ def build_config_full_path(config, section, relativePathOption, Authors ------- Xylar Asay-Davis - - Last Modified - ------------- - 03/23/2017 """ if relativePathSection is None: relativePathSection = section @@ -102,7 +93,7 @@ def build_config_full_path(config, section, relativePathOption, if defaultPath is not None and not os.path.exists(fullPath): fullPath = defaultPath - return fullPath # }}} + return fullPath # }}} def check_path_exists(path): # {{{ diff --git a/mpas_analysis/shared/io/write_netcdf.py b/mpas_analysis/shared/io/write_netcdf.py new file mode 100644 index 000000000..d56401615 --- /dev/null +++ b/mpas_analysis/shared/io/write_netcdf.py @@ -0,0 +1,54 @@ +''' +Functions for writing data sets + +Functions +--------- +write_netcdf - write an xarray data set to a NetCDF file using finite fill + values + +Author +------ +Xylar Asay-Davis + +''' + +import netCDF4 +import numpy + + +def write_netcdf(ds, fileName, fillValues=netCDF4.default_fillvals): # {{{ + ''' + Write an xarray data set to a NetCDF file using finite fill values + + Parameters + ---------- + ds : xarray.Dataset object + The xarray data set to be written to a file + + fileName : str + The fileName to write the data set to + + fillValues : dict + A dictionary of fill values for each supported data type. By default, + this is the dictionary used by the netCDF4 package. Key entries should + be of the form 'f8' (for float64), 'i4' (for int32), etc. + + Author + ------ + Xylar Asay-Davis + + ''' + encodingDict = {} + for variableName in ds: + dtype = ds[variableName].dtype + for fillType in fillValues: + if dtype == numpy.dtype(fillType): + encodingDict[variableName] = \ + {'_FillValue': fillValues[fillType]} + break + + ds.to_netcdf(fileName, encoding=encodingDict) + + # }}} + +# vim: ai ts=4 sts=4 et sw=4 ft=python diff --git a/mpas_analysis/shared/plot/plotting.py b/mpas_analysis/shared/plot/plotting.py index 5914348a1..241bc75da 100644 --- a/mpas_analysis/shared/plot/plotting.py +++ b/mpas_analysis/shared/plot/plotting.py @@ -1,17 +1,13 @@ """ Plotting utilities, including routines for plotting: * time series (and comparing with reference data sets) - * regridded horizontal fields (and comparing with reference data sets) + * remapped horizontal fields (and comparing with reference data sets) * vertical sections on native grid * NINO34 time series and spectra Authors ------- Xylar Asay-Davis, Milena Veneziani, Luke Van Roekel - -Last Modified -------------- -04/07/2017 """ import matplotlib.pyplot as plt @@ -107,10 +103,6 @@ def nino34_spectra_plot(config, f, ninoSpectra, Author ------ Luke Van Roekel - - Last Modified - ------------- - 04/07/2017 """ fig = plt.figure(figsize=figsize, dpi=dpi) @@ -255,10 +247,6 @@ def nino34_timeseries_plot(config, nino34Index, nino34Obs, nino3430, title, Author ------ Luke Van Roekel - - Last Modified - ------------- - 04/07/2017 """ fig = plt.figure(figsize=figsize, dpi=dpi) @@ -336,10 +324,6 @@ def _plot_nino_timeseries(plt, ninoIndex, time, xlabel, ylabel, Author ------ Luke Van Roekel - - Last Modified - ------------- - 04/07/2017 ''' plt.title(panelTitle, y=1.06, **axis_font) y1 = ninoIndex @@ -412,10 +396,6 @@ def timeseries_analysis_plot(config, dsvalues, N, title, xlabel, ylabel, Authors ------- Xylar Asay-Davis, Milena Veneziani - - Last Modified - ------------- - 03/14/2017 """ plt.figure(figsize=figsize, dpi=dpi) @@ -504,10 +484,6 @@ def timeseries_analysis_plot_polar(config, dsvalues, N, title, Authors ------- Adrian K. Turner - - Last Modified - ------------- - 03/15/2017 """ plt.figure(figsize=figsize, dpi=dpi) @@ -649,10 +625,6 @@ def plot_polar_comparison( Authors ------- Xylar Asay-Davis, Milena Veneziani - - Last Modified - ------------- - 03/17/2017 """ # set up figure @@ -800,10 +772,6 @@ def plot_global_comparison( Authors ------- Xylar Asay-Davis, Milena Veneziani - - Last Modified - ------------- - 04/20/2017 """ # set up figure @@ -873,13 +841,175 @@ def plot_global_comparison( plt.close() -def _date_tick(days, pos, calendar='gregorian', includeMonth=True): - days = np.maximum(days, 0.) - date = days_to_datetime(days, calendar) - if includeMonth: - return '{:04d}-{:02d}'.format(date.year, date.month) +def plot_polar_projection_comparison( + config, + x, + y, + landMask, + modelArray, + obsArray, + diffArray, + fileout, + colorMapSectionName, + colorMapType='norm', + title=None, + modelTitle='Model', + obsTitle='Observations', + diffTitle='Model-Observations', + cbarlabel='units', + titleFontSize=None, + figsize=(8, 22), + dpi=300): + + """ + Plots a data set as a longitude/latitude map. + + Parameters + ---------- + config : instance of ConfigParser + the configuration, containing a [plot] section with options that + control plotting + + x, y : numpy ndarrays + 1D x and y arrays defining the projection grid + + landMask : numpy ndarrays + model and observational data sets + + modelArray, obsArray : numpy ndarrays + model and observational data sets + + diffArray : float array + difference between modelArray and obsArray + + fileout : str + the file name to be written + + colorMapSectionName : str + section name in ``config`` where color map info can be found. + + colorMapType : {'norm', 'indexed'}, optional + The type of color map, either a matplotlib norm or indices into a color + map. + + If 'norm', the following options must be defined for suffixes + ``Result`` and ``Difference``: + ``colormapName``, ``normType``, + ``normArgs``, ``colorbarTicks`` + + If 'indexed', these options are required: + ``colormapName``, ``colormapIndices``, + ``colorbarLevels`` + + The colorbar for each panel will be constructed from these options + + + title : str, optional + the subtitle of the plot + + plotProjection : str, optional + Basemap projection for the plot + + modelTitle : str, optional + title of the model panel + + obsTitle : str, optional + title of the observations panel + + diffTitle : str, optional + title of the difference (bias) panel + + cbarlabel : str, optional + label on the colorbar + + titleFontSize : int, optional + size of the title font + + figsize : tuple of float, optional + the size of the figure in inches + + dpi : int, optional + the number of dots per inch of the figure + + Authors + ------- + Xylar Asay-Davis + """ + + def plot_panel(ax, title, array, cmap, norm, ticks): + plt.title(title, y=1.06, **axis_font) + + plt.pcolormesh(x, y, array, cmap=cmap, norm=norm) + cbar = plt.colorbar() + cbar.set_label(cbarlabel) + if ticks is not None: + cbar.set_ticks(ticks) + cbar.set_ticklabels(['{}'.format(tick) for tick in ticks]) + plt.pcolormesh(x, y, landMask, cmap=landColorMap) + plt.contour(xCenter, yCenter, landMask.mask, (0.5,), colors='k', + linewidths=0.5) + ax.axis('off') + ax.set_aspect('equal') + ax.autoscale(tight=True) + + if colorMapType == 'norm': + (cmapModelObs, normModelObs) = _setup_colormap_and_norm( + config, colorMapSectionName, suffix='Result') + (cmapDiff, normDiff) = _setup_colormap_and_norm( + config, colorMapSectionName, suffix='Difference') + + colorbarTicksResult = config.getExpression(colorMapSectionName, + 'colorbarTicksResult') + colorbarTicksDifference = config.getExpression( + colorMapSectionName, 'colorbarTicksDifference') + elif colorMapType == 'indexed': + + (cmapModelObs, colorbarTicksResult) = setup_colormap( + config, colorMapSectionName, suffix='Result') + (cmapDiff, colorbarTicksDifference) = setup_colormap( + config, colorMapSectionName, suffix='Difference') + + normModelObs = cols.BoundaryNorm(colorbarTicksResult, cmapModelObs.N) + normDiff = cols.BoundaryNorm(colorbarTicksDifference, cmapDiff.N) else: - return '{:04d}'.format(date.year) + raise ValueError('colorMapType must be one of {norm, indexed}') + + # set up figure + fig = plt.figure(figsize=figsize, dpi=dpi) + if (title is not None): + if titleFontSize is None: + titleFontSize = config.get('plot', 'titleFontSize') + title_font = {'size': titleFontSize, + 'color': config.get('plot', 'titleFontColor'), + 'weight': config.get('plot', 'titleFontWeight')} + fig.suptitle(title, y=0.95, **title_font) + axis_font = {'size': config.get('plot', 'axisFontSize')} + + # set up land colormap + colorList = [(0.8, 0.8, 0.8), (0.8, 0.8, 0.8)] + landColorMap = cols.LinearSegmentedColormap.from_list('land', colorList) + + # locations of centers for land contour + xCenter = 0.5*(x[1:] + x[0:-1]) + yCenter = 0.5*(y[1:] + y[0:-1]) + + ax = plt.subplot(3, 1, 1) + plot_panel(ax, modelTitle, modelArray, cmapModelObs, normModelObs, + colorbarTicksResult) + + ax = plt.subplot(3, 1, 2) + plot_panel(ax, obsTitle, obsArray, cmapModelObs, normModelObs, + colorbarTicksResult) + + ax = plt.subplot(3, 1, 3) + plot_panel(ax, diffTitle, diffArray, cmapDiff, normDiff, + colorbarTicksDifference) + + if (fileout is not None): + plt.savefig(fileout, dpi=dpi, bbox_inches='tight', pad_inches=0.1) + + if not config.getboolean('plot', 'displayToScreen'): + plt.close() def plot_1D(config, xArrays, fieldArrays, errArrays, @@ -942,10 +1072,6 @@ def plot_1D(config, xArrays, fieldArrays, errArrays, Authors ------- Mark Petersen, Milena Veneziani - - Last Modified - ------------- - 04/20/2017 """ # set up figure @@ -1081,10 +1207,6 @@ def plot_vertical_section( Authors ------- Milena Veneziani, Mark Petersen - - Last Modified - ------------- - 03/13/2017 """ # set up figure @@ -1159,12 +1281,10 @@ def setup_colormap(config, configSectionName, suffix=''): Authors ------- Xylar Asay-Davis, Milena Veneziani - - Last modified - ------------- - 03/17/2017 ''' + _register_custom_colormaps() + colormap = plt.get_cmap(config.get(configSectionName, 'colormapName{}'.format(suffix))) indices = config.getExpression(configSectionName, @@ -1186,6 +1306,185 @@ def setup_colormap(config, configSectionName, suffix=''): return (colormap, colorbarLevels) +def _setup_colormap_and_norm(config, configSectionName, suffix=''): + + ''' + Set up a colormap from the registry + + Parameters + ---------- + config : instance of ConfigParser + the configuration, containing a [plot] section with options that + control plotting + + configSectionName : str + name of config section + + suffix: str, optional + suffix of colormap related options + + Returns + ------- + colormap : srt + new colormap + + norm : ``SymLogNorm`` object + the norm used to normalize the colormap + + Authors + ------- + Xylar Asay-Davis + ''' + + _register_custom_colormaps() + + colormap = plt.get_cmap(config.get(configSectionName, + 'colormapName{}'.format(suffix))) + + normType = config.get(configSectionName, 'normType{}'.format(suffix)) + + kwargs = config.getExpression(configSectionName, + 'normArgs{}'.format(suffix)) + + if normType == 'symLog': + norm = cols.SymLogNorm(**kwargs) + elif normType == 'linear': + norm = cols.Normalize(**kwargs) + else: + raise ValueError('Unsupported norm type {} in section {}'.format( + normType, configSectionName)) + + return (colormap, norm) + + +def _date_tick(days, pos, calendar='gregorian', includeMonth=True): + days = np.maximum(days, 0.) + date = days_to_datetime(days, calendar) + if includeMonth: + return '{:04d}-{:02d}'.format(date.year, date.month) + else: + return '{:04d}'.format(date.year) + + +def _register_custom_colormaps(): + name = 'ferret' + backgroundColor = (0.9, 0.9, 0.9) + + red = np.array([[0, 0.6], + [0.15, 1], + [0.35, 1], + [0.65, 0], + [0.8, 0], + [1, 0.75]]) + + green = np.array([[0, 0], + [0.1, 0], + [0.35, 1], + [1, 0]]) + + blue = np.array([[0, 0], + [0.5, 0], + [0.9, 0.9], + [1, 0.9]]) + + colorCount = 21 + colorList = np.ones((colorCount, 4), float) + colorList[:, 0] = np.interp(np.linspace(0, 1, colorCount), + red[:, 0], red[:, 1]) + colorList[:, 1] = np.interp(np.linspace(0, 1, colorCount), + green[:, 0], green[:, 1]) + colorList[:, 2] = np.interp(np.linspace(0, 1, colorCount), + blue[:, 0], blue[:, 1]) + colorList = colorList[::-1, :] + + colorMap = cols.LinearSegmentedColormap.from_list( + name, colorList, N=255) + + colorMap.set_bad(backgroundColor) + plt.register_cmap(name, colorMap) + + name = 'erdc_iceFire_H' + + colorArray = np.array([ + [-1, 4.05432e-07, 0, 5.90122e-06], + [-0.87451, 0, 0.120401, 0.302675], + [-0.74902, 0, 0.216583, 0.524574], + [-0.623529, 0.0552475, 0.345025, 0.6595], + [-0.498039, 0.128047, 0.492588, 0.720288], + [-0.372549, 0.188955, 0.641309, 0.792092], + [-0.247059, 0.327673, 0.784935, 0.873434], + [-0.121569, 0.60824, 0.892164, 0.935547], + [0.00392157, 0.881371, 0.912178, 0.818099], + [0.129412, 0.951407, 0.835621, 0.449279], + [0.254902, 0.904481, 0.690489, 0], + [0.380392, 0.85407, 0.510864, 0], + [0.505882, 0.777093, 0.33018, 0.00088199], + [0.631373, 0.672862, 0.139087, 0.00269398], + [0.756863, 0.508815, 0, 0], + [0.882353, 0.299417, 0.000366289, 0.000547829], + [1, 0.0157519, 0.00332021, 4.55569e-08]], float) + + colorCount = 255 + colorList = np.ones((colorCount, 4), float) + x = colorArray[:, 0] + for cIndex in range(3): + colorList[:, cIndex] = np.interp( + np.linspace(-1., 1., colorCount), + x, colorArray[:, cIndex+1]) + + colorMap = cols.LinearSegmentedColormap.from_list( + name, colorList, N=255) + + plt.register_cmap(name, colorMap) + + name = 'erdc_iceFire_L' + + colorArray = np.array([ + [-1, 0.870485, 0.913768, 0.832905], + [-0.87451, 0.586919, 0.887865, 0.934003], + [-0.74902, 0.31583, 0.776442, 0.867858], + [-0.623529, 0.18302, 0.632034, 0.787722], + [-0.498039, 0.117909, 0.484134, 0.713825], + [-0.372549, 0.0507239, 0.335979, 0.654741], + [-0.247059, 0, 0.209874, 0.511832], + [-0.121569, 0, 0.114689, 0.28935], + [0.00392157, 0.0157519, 0.00332021, 4.55569e-08], + [0.129412, 0.312914, 0, 0], + [0.254902, 0.520865, 0, 0], + [0.380392, 0.680105, 0.15255, 0.0025996], + [0.505882, 0.785109, 0.339479, 0.000797922], + [0.631373, 0.857354, 0.522494, 0], + [0.756863, 0.910974, 0.699774, 0], + [0.882353, 0.951921, 0.842817, 0.478545], + [1, 0.881371, 0.912178, 0.818099]], float) + + colorCount = 255 + colorList = np.ones((colorCount, 4), float) + x = colorArray[:, 0] + for cIndex in range(3): + colorList[:, cIndex] = np.interp( + np.linspace(-1., 1., colorCount), + x, colorArray[:, cIndex+1]) + + colorMap = cols.LinearSegmentedColormap.from_list( + name, colorList, N=255) + + plt.register_cmap(name, colorMap) + + name = 'BuOr' + colors1 = plt.cm.PuOr(np.linspace(0., 1, 256)) + colors2 = plt.cm.RdBu(np.linspace(0, 1, 256)) + + # combine them and build a new colormap, just the orange from the first + # and the blue from the second + colorList = np.vstack((colors1[0:128, :], colors2[128:256, :])) + # reverse the order + colorList = colorList[::-1, :] + colorMap = cols.LinearSegmentedColormap.from_list(name, colorList) + + plt.register_cmap(name, colorMap) + + def _plot_size_y_axis(plt, xaxisValues, **data): ''' Resize the y-axis limit based on the curves being plotted @@ -1203,10 +1502,6 @@ def _plot_size_y_axis(plt, xaxisValues, **data): Author ------ Luke Van Roekel - - Last modified - ------------- - 04/07/2017 ''' ax = plt.gca() @@ -1244,7 +1539,6 @@ def _plot_xtick_format(plt, calendar, minDays, maxDays, maxXTicks): Author ------ Xylar Asay-Davis - ''' ax = plt.gca() diff --git a/mpas_analysis/shared/time_series/time_series.py b/mpas_analysis/shared/time_series/time_series.py index a132419ad..3163abc6e 100644 --- a/mpas_analysis/shared/time_series/time_series.py +++ b/mpas_analysis/shared/time_series/time_series.py @@ -4,10 +4,6 @@ Authors ------- Xylar Asay-Davis - -Last Modified -------------- -04/08/2017 """ import xarray as xr @@ -70,11 +66,6 @@ def cache_time_series(timesInDataSet, timeSeriesCalcFunction, cacheFileName, Authors ------- Xylar Asay-Davis - - Last Modified - ------------- - 04/08/2017 - ''' timesProcessed = numpy.zeros(len(timesInDataSet), bool) diff --git a/mpas_analysis/shared/timekeeping/MpasRelativeDelta.py b/mpas_analysis/shared/timekeeping/MpasRelativeDelta.py index 295b8e50e..ff1f1eb30 100644 --- a/mpas_analysis/shared/timekeeping/MpasRelativeDelta.py +++ b/mpas_analysis/shared/timekeeping/MpasRelativeDelta.py @@ -16,10 +16,6 @@ class MpasRelativeDelta(relativedelta): Author ------ Xylar Asay-Davis - - Last Modified - ------------- - 02/09/2017 """ def __init__(self, dt1=None, dt2=None, years=0, months=0, days=0, diff --git a/mpas_analysis/shared/timekeeping/utility.py b/mpas_analysis/shared/timekeeping/utility.py index 2f3c4d08c..b5af1f5df 100644 --- a/mpas_analysis/shared/timekeeping/utility.py +++ b/mpas_analysis/shared/timekeeping/utility.py @@ -4,10 +4,6 @@ Author ------ Xylar Asay-Davis - -Last Modified -------------- -02/11/2017 """ import datetime @@ -29,9 +25,9 @@ def get_simulation_start_time(streams): Returns ------- - simulation_start_time : string + simulationStartTime : str The start date of the simulation parsed from a restart file identified - by the contents of `streams`. + by the contents of ``streams``. Raises ------ @@ -41,10 +37,6 @@ def get_simulation_start_time(streams): Author ------ Xylar Asay-Davis - - Last modified - ------------- - 02/11/2017 """ try: @@ -99,10 +91,6 @@ def string_to_datetime(dateString): # {{{ Author ------ Xylar Asay-Davis - - Last modified - ------------- - 02/04/2017 """ (year, month, day, hour, minute, second) = \ @@ -151,10 +139,6 @@ def string_to_relative_delta(dateString, calendar='gregorian'): # {{{ Author ------ Xylar Asay-Davis - - Last modified - ------------- - 02/04/2017 """ (years, months, days, hours, minutes, seconds) = \ @@ -214,10 +198,6 @@ def string_to_days_since_date(dateString, calendar='gregorian', Author ------ Xylar Asay-Davis - - Last modified - ------------- - 02/04/2017 """ isSingleString = isinstance(dateString, str) @@ -267,10 +247,6 @@ def days_to_datetime(days, calendar='gregorian', referenceDate='0001-01-01'): Author ------ Xylar Asay-Davis - - Last modified - ------------- - 02/04/2017 """ datetimes = netCDF4.num2date(days, @@ -324,10 +300,6 @@ def datetime_to_days(dates, calendar='gregorian', referenceDate='0001-01-01'): Author ------ Xylar Asay-Davis - - Last modified - ------------- - 02/11/2017 """ isSingleDate = False @@ -377,10 +349,6 @@ def date_to_days(year=1, month=1, day=1, hour=0, minute=0, second=0, Author ------ Xylar Asay-Davis - - Last modified - ------------- - 02/11/2017 """ calendar = _mpas_to_netcdf_calendar(calendar) @@ -432,10 +400,6 @@ def _parse_date_string(dateString, isInterval=False): # {{{ Author ------ Xylar Asay-Davis - - Last modified - ------------- - 02/04/2017 """ if isInterval: offset = 0 diff --git a/mpas_analysis/shared/variable_namelist_stream_maps/ocean_maps.py b/mpas_analysis/shared/variable_namelist_stream_maps/ocean_maps.py index 22b7738a4..7092b0b73 100644 --- a/mpas_analysis/shared/variable_namelist_stream_maps/ocean_maps.py +++ b/mpas_analysis/shared/variable_namelist_stream_maps/ocean_maps.py @@ -5,10 +5,6 @@ Authors ------- Xylar Asay-Davis - -Last Modified -------------- -03/29/2017 ''' oceanNamelistMap = { diff --git a/mpas_analysis/test/__init__.py b/mpas_analysis/test/__init__.py index 81d0f563d..e22c29e23 100644 --- a/mpas_analysis/test/__init__.py +++ b/mpas_analysis/test/__init__.py @@ -59,6 +59,7 @@ def loaddatadir(request, tmpdir): class TestCase(unittest.TestCase): + def assertEqual(self, a1, a2): assert a1 == a2 or (a1 != a1 and a2 != a2) diff --git a/mpas_analysis/test/test_analysis_task.py b/mpas_analysis/test/test_analysis_task.py index 2a823329a..71524b1cd 100644 --- a/mpas_analysis/test/test_analysis_task.py +++ b/mpas_analysis/test/test_analysis_task.py @@ -31,13 +31,13 @@ def doTest(generate, expectedResults): # Comments from config.template about how generate works: # # a list of analyses to generate. Valid names are: - # 'timeSeriesOHC', 'timeSeriesSST', 'regriddedSST', - # 'regriddedSSS', 'regriddedMLD', 'timeSeriesSeaIceAreaVol', - # 'regriddedSeaIceConcThick' + # 'timeSeriesOHC', 'timeSeriesSST', 'climatologyMapSST', + # 'climatologyMapSSS', 'climatologyMapMLD', 'timeSeriesSeaIceAreaVol', + # 'climatologyMapSeaIceConcThick' # the following shortcuts exist: # 'all' -- all analyses will be run # 'all_timeSeries' -- all time-series analyses will be run - # 'all_regriddedHorizontal' -- all analyses involving regridded + # 'all_horizontalMap' -- all analyses involving remapped # horizontal fields will be run # 'all_ocean' -- all ocean analyses will be run # 'all_seaIce' -- all sea-ice analyses will be run diff --git a/mpas_analysis/test/test_climatology.py b/mpas_analysis/test/test_climatology.py index 55754d567..6cf50584c 100644 --- a/mpas_analysis/test/test_climatology.py +++ b/mpas_analysis/test/test_climatology.py @@ -18,10 +18,10 @@ from mpas_analysis.configuration.MpasAnalysisConfigParser \ import MpasAnalysisConfigParser from mpas_analysis.shared.climatology import \ - get_lat_lon_comparison_descriptor, get_remapper, \ - get_mpas_climatology_file_names, get_observation_climatology_file_names, \ + get_comparison_descriptor, get_remapper, \ + get_observation_climatology_file_names, \ add_years_months_days_in_month, compute_climatology, \ - compute_monthly_climatology, update_start_end_year, cache_climatologies + compute_monthly_climatology from mpas_analysis.shared.grid import MpasMeshDescriptor, LatLonGridDescriptor from mpas_analysis.shared.constants import constants @@ -50,8 +50,8 @@ def setup_config(self, autocloseFileLimitFraction=0.5, config.set('output', 'baseDirectory', self.test_dir) config.set('output', 'mappingSubdirectory', '.') config.set('output', 'mpasClimatologySubdirectory', 'clim/mpas') - config.set('output', 'mpasRegriddedClimSubdirectory', - 'clim/mpas/regrid') + config.set('output', 'mpasRemappedClimSubdirectory', + 'clim/mpas/remap') config.add_section('climatology') config.set('climatology', 'startYear', '2') @@ -59,15 +59,13 @@ def setup_config(self, autocloseFileLimitFraction=0.5, config.set('climatology', 'comparisonLatResolution', '0.5') config.set('climatology', 'comparisonLonResolution', '0.5') - config.set('climatology', 'overwriteMapping', 'False') - config.set('climatology', 'overwriteMpasClimatology', 'False') config.set('climatology', 'mpasInterpolationMethod', 'bilinear') config.add_section('oceanObservations') config.set('oceanObservations', 'interpolationMethod', 'bilinear') config.set('oceanObservations', 'climatologySubdirectory', 'clim/obs') - config.set('oceanObservations', 'regriddedClimSubdirectory', - 'clim/obs/regrid') + config.set('oceanObservations', 'remappedClimSubdirectory', + 'clim/obs/remap') return config @@ -75,7 +73,7 @@ def setup_mpas_remapper(self, config): mpasMeshFileName = '{}/mpasMesh.nc'.format(self.datadir) comparisonDescriptor = \ - get_lat_lon_comparison_descriptor(config) + get_comparison_descriptor(config, 'latlon') mpasDescriptor = MpasMeshDescriptor( mpasMeshFileName, meshName=config.get('input', 'mpasMeshName')) @@ -83,8 +81,6 @@ def setup_mpas_remapper(self, config): remapper = get_remapper( config=config, sourceDescriptor=mpasDescriptor, comparisonDescriptor=comparisonDescriptor, - mappingFileSection='climatology', - mappingFileOption='mpasMappingFile', mappingFilePrefix='map', method=config.get( 'climatology', 'mpasInterpolationMethod')) @@ -94,18 +90,16 @@ def setup_obs_remapper(self, config, fieldName): gridFileName = '{}/obsGrid.nc'.format(self.datadir) comparisonDescriptor = \ - get_lat_lon_comparison_descriptor(config) + get_comparison_descriptor(config, 'latlon') - obsDescriptor = LatLonGridDescriptor() - obsDescriptor.read(fileName=gridFileName, latVarName='lat', - lonVarName='lon') + obsDescriptor = LatLonGridDescriptor.read(fileName=gridFileName, + latVarName='lat', + lonVarName='lon') remapper = \ get_remapper( config=config, sourceDescriptor=obsDescriptor, comparisonDescriptor=comparisonDescriptor, - mappingFileSection='oceanObservations', - mappingFileOption='sstClimatologyMappingFile', mappingFilePrefix='map_obs_{}'.format(fieldName), method=config.get('oceanObservations', 'interpolationMethod')) @@ -135,15 +129,18 @@ def open_test_ds(self, config, calendar): def test_get_mpas_remapper(self): config = self.setup_config() - defaultMappingFileName = '{}/map_QU240_to_0.5x0.5degree_' \ - 'bilinear.nc'.format(self.test_dir) + explicitMappingPath = '{}/maps'.format(self.test_dir) + os.makedirs(explicitMappingPath) - explicitMappingFileName = '{}/mapping.nc'.format(self.test_dir) + fileBase = 'map_QU240_to_0.5x0.5degree_bilinear.nc' + + defaultMappingFileName = '{}/{}'.format(self.test_dir, fileBase) + explicitMappingFileName = '{}/{}'.format(explicitMappingPath, fileBase) for mappingFileName, setName in [(defaultMappingFileName, False), (explicitMappingFileName, True)]: if setName: - config.set('climatology', 'mpasMappingFile', mappingFileName) + config.set('input', 'mappingDirectory', explicitMappingPath) remapper = self.setup_mpas_remapper(config) @@ -156,22 +153,28 @@ def test_get_mpas_remapper(self): assert isinstance(remapper.destinationDescriptor, LatLonGridDescriptor) + if not setName: + # copy the mapping file so it exists in the 'maps' dir + shutil.copyfile(defaultMappingFileName, + explicitMappingFileName) + def test_get_observations_remapper(self): config = self.setup_config() fieldName = 'sst' - defaultMappingFileName = '{}/map_obs_{}_1.0x1.0degree_to_' \ - '0.5x0.5degree_bilinear.nc'.format( - self.test_dir, fieldName) + explicitMappingPath = '{}/maps'.format(self.test_dir) + os.makedirs(explicitMappingPath) - explicitMappingFileName = '{}/mapping.nc'.format(self.test_dir) + fileBase = 'map_obs_sst_1.0x1.0degree_to_0.5x0.5degree_bilinear.nc' + + defaultMappingFileName = '{}/{}'.format(self.test_dir, fileBase) + explicitMappingFileName = '{}/{}'.format(explicitMappingPath, fileBase) for mappingFileName, setName in [(defaultMappingFileName, False), (explicitMappingFileName, True)]: if setName: - config.set('oceanObservations', 'sstClimatologyMappingFile', - mappingFileName) + config.set('input', 'mappingDirectory', explicitMappingPath) remapper = self.setup_obs_remapper(config, fieldName) @@ -184,30 +187,10 @@ def test_get_observations_remapper(self): assert isinstance(remapper.destinationDescriptor, LatLonGridDescriptor) - def test_get_mpas_climatology_file_names(self): - config = self.setup_config() - fieldName = 'sst' - monthNames = 'JFM' - - remapper = self.setup_mpas_remapper(config) - - (climatologyFileName, climatologyPrefix, regriddedFileName) = \ - get_mpas_climatology_file_names( - config, fieldName, monthNames, - remapper.sourceDescriptor.meshName, - remapper.destinationDescriptor.meshName) - expectedClimatologyFileName = '{}/clim/mpas/sst_QU240_JFM_' \ - 'year0002.nc'.format(self.test_dir) - self.assertEqual(climatologyFileName, expectedClimatologyFileName) - - expectedClimatologyPrefix = '{}/clim/mpas/sst_QU240_' \ - 'JFM'.format(self.test_dir) - self.assertEqual(climatologyPrefix, expectedClimatologyPrefix) - - expectedRegriddedFileName = '{}/clim/mpas/regrid/sst_QU240_to_' \ - '0.5x0.5degree_JFM_' \ - 'year0002.nc'.format(self.test_dir) - self.assertEqual(regriddedFileName, expectedRegriddedFileName) + if not setName: + # copy the mapping file so it exists in the 'maps' dir + shutil.copyfile(defaultMappingFileName, + explicitMappingFileName) def test_get_observation_climatology_file_names(self): config = self.setup_config() @@ -217,17 +200,17 @@ def test_get_observation_climatology_file_names(self): remapper = self.setup_obs_remapper(config, fieldName) - (climatologyFileName, regriddedFileName) = \ + (climatologyFileName, remappedFileName) = \ get_observation_climatology_file_names( config, fieldName, monthNames, componentName, remapper) expectedClimatologyFileName = '{}/clim/obs/sst_1.0x1.0degree_' \ 'JFM.nc'.format(self.test_dir) self.assertEqual(climatologyFileName, expectedClimatologyFileName) - expectedRegriddedFileName = '{}/clim/obs/regrid/sst_1.0x1.0degree_' \ - 'to_0.5x0.5degree_' \ - 'JFM.nc'.format(self.test_dir) - self.assertEqual(regriddedFileName, expectedRegriddedFileName) + expectedRemappedFileName = '{}/clim/obs/remap/sst_1.0x1.0degree_' \ + 'to_0.5x0.5degree_' \ + 'JFM.nc'.format(self.test_dir) + self.assertEqual(remappedFileName, expectedRemappedFileName) def test_compute_climatology(self): config = self.setup_config() @@ -289,184 +272,5 @@ def test_compute_monthly_climatology(self): self.assertArrayApproxEqual(monthlyClimatology.month.values, refClimatology.month.values) - def test_update_start_end_year(self): - config = self.setup_config() - calendar = 'gregorian_noleap' - ds = self.open_test_ds(config, calendar) - - changed, startYear, endYear = \ - update_start_end_year(ds, config, calendar) - - assert(not changed) - assert(startYear == 2) - assert(endYear == 2) - - config.set('climatology', 'endYear', '50') - ds = self.open_test_ds(config, calendar) - - with self.assertWarns('climatology start and/or end year different ' - 'from requested'): - changed, startYear, endYear = \ - update_start_end_year(ds, config, calendar) - - assert(changed) - assert(startYear == 2) - assert(endYear == 2) - - def cache_climatologies_setup(self): - config = self.setup_config() - calendar = 'gregorian_noleap' - ds = self.open_test_ds(config, calendar) - fieldName = 'mld' - climFileName = '{}/refSeasonalClim.nc'.format(self.datadir) - refClimatology = xarray.open_dataset(climFileName) - - remapper = self.setup_mpas_remapper(config) - - return {'config': config, 'calendar': calendar, 'ds': ds, - 'fieldName': fieldName, 'climFileName': climFileName, - 'refClimatology': refClimatology, 'remapper': remapper} - - def test_jan_1yr_climo_test1(self): - setup = self.cache_climatologies_setup() - # test1: Just January, 1-year climatologies are cached; only one file - # is produced with suffix year0002; a second run of - # cache_climatologies doesn't modify any files - test1 = {'monthNames': 'Jan', - 'monthValues': [1], - 'yearsPerCacheFile': 1, - 'expectedSuffixes': ['year0002'], - 'expectedModified': [False], - # weird value because first time step of Jan. missing in ds - 'expectedDays': 30.958333, - 'expectedMonths': 1, - 'refClimatology': None} - self.cache_climatologies_driver(test1, **setup) - - def test_jfm_1yr_climo_test2(self): - setup = self.cache_climatologies_setup() - # same as test1 but with JFM - test2 = {'monthNames': 'JFM', - 'monthValues': constants.monthDictionary['JFM'], - 'yearsPerCacheFile': 1, - 'expectedSuffixes': ['year0002'], - 'expectedModified': [False], - # weird value because first time step of Jan. missing in ds - 'expectedDays': 89.958333, - 'expectedMonths': 3, - 'refClimatology': setup['refClimatology']} - self.cache_climatologies_driver(test2, **setup) - - def test_jan_2yr_climo_test3(self): - setup = self.cache_climatologies_setup() - # test3: 2-year climatologies are cached; 2 files are produced - # with suffix years0002-0003 (the "individual" climatology - # file) and year0002 (the "aggregated" climatology file); - # a second tries to update the "individual" cache file - # because it appears to be incomplete but does not attempt - # to update the aggregated climatology file because no - # additional years were processed and the file was already - # complete for the span of years present - test3 = {'monthNames': 'Jan', - 'monthValues': [1], - 'yearsPerCacheFile': 2, - 'expectedSuffixes': ['years0002-0003', 'year0002'], - 'expectedModified': [True, False], - # weird value because first time step of Jan. missing in ds - 'expectedDays': 30.958333, - 'expectedMonths': 1, - 'refClimatology': None} - self.cache_climatologies_driver(test3, **setup) - - def test_jfm_2yr_climo_test4(self): - setup = self.cache_climatologies_setup() - # test4: same as test3 but with JFM - test4 = {'monthNames': 'JFM', - 'monthValues': constants.monthDictionary['JFM'], - 'yearsPerCacheFile': 2, - 'expectedSuffixes': ['years0002-0003', 'year0002'], - 'expectedModified': [True, False], - # weird value because first time step of Jan. missing in ds - 'expectedDays': 89.958333, - 'expectedMonths': 3, - 'refClimatology': setup['refClimatology']} - self.cache_climatologies_driver(test4, **setup) - - def cache_climatologies_driver(self, test, config, fieldName, - ds, remapper, calendar, **kwargs): - monthNames = test['monthNames'] - monthValues = test['monthValues'] - yearsPerCacheFile = test['yearsPerCacheFile'] - expectedSuffixes = test['expectedSuffixes'] - expectedModified = test['expectedModified'] - expectedDays = test['expectedDays'] - expectedMonths = test['expectedMonths'] - refClimatology = test['refClimatology'] - - (climatologyFileName, climatologyPrefix) = \ - get_mpas_climatology_file_names( - config, fieldName, monthNames, - remapper.sourceDescriptor.meshName) - - config.set('climatology', 'yearsPerCacheFile', - str(yearsPerCacheFile)) - # once without cache files - dsClimatology = cache_climatologies( - ds, monthValues, config, climatologyPrefix, calendar, - printProgress=True) - - if refClimatology is not None: - self.assertArrayApproxEqual(dsClimatology.mld.values, - refClimatology.mld.values) - - self.assertEqual(dsClimatology.attrs['totalMonths'], - expectedMonths) - self.assertApproxEqual(dsClimatology.attrs['totalDays'], - expectedDays) - dsClimatology.close() - - fingerprints = [] - for suffix in expectedSuffixes: - expectedClimatologyFileName = '{}/clim/mpas/mld_QU240_' \ - '{}_{}.nc'.format( - self.test_dir, monthNames, - suffix) - assert os.path.exists(expectedClimatologyFileName) - - dsClimatology = xarray.open_dataset(expectedClimatologyFileName) - fingerprints.append(dsClimatology.fingerprintClimo) - - # try it again with cache files saved - dsClimatology = cache_climatologies( - ds, monthValues, config, climatologyPrefix, calendar, - printProgress=True) - - if refClimatology is not None: - self.assertArrayApproxEqual(dsClimatology.mld.values, - refClimatology.mld.values) - - self.assertEqual(dsClimatology.attrs['totalMonths'], - expectedMonths) - self.assertApproxEqual(dsClimatology.attrs['totalDays'], - expectedDays) - dsClimatology.close() - - for index, suffix in enumerate(expectedSuffixes): - expectedClimatologyFileName = '{}/clim/mpas/mld_QU240_' \ - '{}_{}.nc'.format( - self.test_dir, monthNames, - suffix) - - dsClimatology = xarray.open_dataset(expectedClimatologyFileName) - fingerprintCheck = dsClimatology.fingerprintClimo - - # Check whether the given file was modified, and whether - # this was the expected result - fileWasModified = fingerprints[index] != fingerprintCheck - assert fileWasModified == expectedModified[index] - - # remove the cache file for the next try - os.remove(expectedClimatologyFileName) - # vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python diff --git a/mpas_analysis/test/test_interpolate.py b/mpas_analysis/test/test_interpolate.py index 0008459c4..62a35233d 100644 --- a/mpas_analysis/test/test_interpolate.py +++ b/mpas_analysis/test/test_interpolate.py @@ -42,8 +42,9 @@ def get_mpas_descriptor(self): def get_latlon_file_descriptor(self): latLonGridFileName = str(self.datadir.join('SST_annual_1870-1900.nc')) - descriptor = LatLonGridDescriptor() - descriptor.read(latLonGridFileName, latVarName='lat', lonVarName='lon') + descriptor = LatLonGridDescriptor.read(latLonGridFileName, + latVarName='lat', + lonVarName='lon') return (descriptor, latLonGridFileName) @@ -57,8 +58,7 @@ def get_latlon_array_descriptor(self): lon = numpy.array(config.getExpression('interpolate', 'lon', usenumpyfunc=True)) - descriptor = LatLonGridDescriptor() - descriptor.create(lat, lon, units='degrees') + descriptor = LatLonGridDescriptor.create(lat, lon, units='degrees') return descriptor def get_stereographic_array_descriptor(self): @@ -73,9 +73,9 @@ def get_stereographic_array_descriptor(self): res = 100e3 nx = 2*int(xMax/res)+1 x = numpy.linspace(-xMax, xMax, nx) - descriptor = ProjectionGridDescriptor(projection) meshName = '{}km_Antarctic_stereo'.format(int(res*1e-3)) - descriptor.create(x, x, meshName) + descriptor = \ + ProjectionGridDescriptor.create(projection, x, x, meshName) return descriptor def get_file_names(self, suffix): @@ -246,6 +246,8 @@ def test_stereographic_array_to_latlon_array(self): sourceDescriptor = self.get_stereographic_array_descriptor() destinationDescriptor = self.get_latlon_array_descriptor() + print vars(destinationDescriptor) + Lat = sourceDescriptor.coords['lat']['data'] # now, let's make a more complicated field with more dimensions to diff --git a/mpas_analysis/test/test_mpas_climatology.py b/mpas_analysis/test/test_mpas_climatology.py new file mode 100644 index 000000000..53ce0e2e3 --- /dev/null +++ b/mpas_analysis/test/test_mpas_climatology.py @@ -0,0 +1,101 @@ +""" +Unit test infrastructure for climatologies. + +Xylar Asay-Davis +04/11/2017 +""" + +import pytest +import tempfile +import shutil +import os +from mpas_analysis.test import TestCase, loaddatadir +from mpas_analysis.configuration.MpasAnalysisConfigParser \ + import MpasAnalysisConfigParser +from mpas_analysis.shared.climatology import MpasClimatology, \ + get_comparison_descriptor +from mpas_analysis.shared.constants import constants + + +@pytest.mark.usefixtures("loaddatadir") +class TestMpasClimatology(TestCase): + + def setUp(self): + # Create a temporary directory + self.test_dir = tempfile.mkdtemp() + + def tearDown(self): + # Remove the directory after the test + shutil.rmtree(self.test_dir) + + def setup_config(self): + config = MpasAnalysisConfigParser() + config.read('{}/config.GQU240'.format(self.datadir)) + config.set('input', 'baseDirectory', str(self.datadir)) + config.set('output', 'baseDirectory', str(self.test_dir)) + return config + + def setup_climatology(self, config, seasons, comparisonGridNames=None): + variableList = ['timeMonthly_avg_ssh', 'timeMonthly_avg_tThreshMLD'] + climatologyTask = \ + MpasClimatology(config=config, + variableList=variableList, + taskSuffix='SSH_MLD', + componentName='ocean', + comparisonGridNames=comparisonGridNames, + seasons=seasons, + tags=['climatology', 'ssh', 'mld']) + + climatologyTask.setup_and_check() + + climatologyTask.run() + + return climatologyTask + + def test_seasons_none(self): + config = self.setup_config() + climatologyTask = \ + self.setup_climatology(config=config, + seasons=['none']) + + for month in constants.abrevMonthNames: + fileName = climatologyTask.get_ncclimo_file_name( + season=month, stage='unmasked') + assert os.path.exists(fileName) + + def test_seasons(self): + config = self.setup_config() + seasons = ['JFM', 'JAS', 'FM', 'ON', 'ANN'] + climatologyTask = \ + self.setup_climatology(config=config, + seasons=seasons) + + for season in seasons: + fileName = climatologyTask.get_ncclimo_file_name( + season=season, stage='unmasked') + assert os.path.exists(fileName) + + def test_remap(self): + config = self.setup_config() + seasons = ['JFM', 'JAS', 'FM', 'ON', 'ANN'] + comparisonGridNames = ['latlon', 'antarctic'] + climatologyTask = \ + self.setup_climatology(config=config, + seasons=seasons, + comparisonGridNames=comparisonGridNames) + + for season in seasons: + fileName = climatologyTask.get_ncclimo_file_name( + season=season, stage='masked') + assert os.path.exists(fileName) + + for comparisonGridName in comparisonGridNames: + comparisonDescriptor = \ + get_comparison_descriptor(config, comparisonGridName) + fileName = climatologyTask.get_ncclimo_file_name( + season=season, stage='remapped', + comparisonDescriptor=comparisonDescriptor) + assert os.path.exists(fileName) + + +# vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python diff --git a/mpas_analysis/test/test_mpas_climatology/config.GQU240 b/mpas_analysis/test/test_mpas_climatology/config.GQU240 new file mode 100644 index 000000000..35028a656 --- /dev/null +++ b/mpas_analysis/test/test_mpas_climatology/config.GQU240 @@ -0,0 +1,61 @@ +[execute] +parallelTaskCount = 1 + +# the parallelism mode in ncclimo ("serial" or "bck") +# Set this to "bck" (background parallelism) if running on a machine that can +# handle 12 simultaneous processes, one for each monthly climatology. +ncclimoParallelMode = serial + +[input] +# subdirectory containing restart files +runSubdirectory = . +# subdirectory for ocean history files +oceanHistorySubdirectory = . +# subdirectory for sea ice history files +seaIceHistorySubdirectory = . + +# names of namelist and streams files, either a path relative to baseDirectory +# or an absolute path. +oceanNamelistFileName = mpas-o_in +oceanStreamsFileName = streams.ocean +seaIceNamelistFileName = mpas-cice_in +seaIceStreamsFileName = streams.cice + +# names of ocean and sea ice meshes (e.g. EC60to30, QU240, RRS30to10, etc.) +mpasMeshName = oQU240 + + +[output] +# subdirectories within baseDirectory for analysis output +plotsSubdirectory = plots +logsSubdirectory = logs +mpasClimatologySubdirectory = clim/mpas +mappingSubdirectory = mapping + + +[climatology] +# the first year over which to average climatalogies +startYear = 1 +# the last year over which to average climatalogies +endYear = 1 + +# The comparison lat/lon grid resolution in degrees +comparisonLatResolution = 2 +comparisonLonResolution = 2 + +# The comparison Antarctic polar stereographic grid size and resolution in km +comparisonAntarcticStereoWidth = 6000. +comparisonAntarcticStereoResolution = 100. + +# interpolation order for model and observation results. Likely values are +# 'bilinear', 'neareststod' (nearest neighbor) or 'conserve' +mpasInterpolationMethod = bilinear + +# should remapping be performed with ncremap or with the Remapper class +# directly in MPAS-Analysis +useNcremap = True + +# The minimum weight of a destination cell after remapping. Any cell with +# weights lower than this threshold will therefore be masked out. +renormalizationThreshold = 0.01 + diff --git a/mpas_analysis/test/test_mpas_climatology/mpas-o_in b/mpas_analysis/test/test_mpas_climatology/mpas-o_in new file mode 100644 index 000000000..0d8ba89a3 --- /dev/null +++ b/mpas_analysis/test/test_mpas_climatology/mpas-o_in @@ -0,0 +1,1092 @@ +&run_modes + config_ocean_run_mode = 'forward' +/ +&time_management + config_calendar_type = 'gregorian_noleap' + config_do_restart = .true. + config_restart_timestamp_name = 'rpointer.ocn' + config_start_time = 'file' +/ +&io + config_pio_num_iotasks = 0 + config_pio_stride = 1 + config_write_output_on_startup = .true. +/ +&decomposition + config_block_decomp_file_prefix = '/project/projectdirs/acme/inputdata/ocn/mpas-o/oQU240/mpas-o.graph.info.151209.part.' + config_explicit_proc_decomp = .false. + config_num_halos = 3 + config_number_of_blocks = 0 + config_proc_decomp_file_prefix = 'graph.info.part.' +/ +&init_setup + config_expand_sphere = .false. + config_init_configuration = 'none' + config_realistic_coriolis_parameter = .false. + config_vert_levels = -1 + config_vertical_grid = 'uniform' + config_write_cull_cell_mask = .true. +/ +&cvtgenerator + config_1dcvtgenerator_dzseed = 1.2 + config_1dcvtgenerator_stretch1 = 1.0770 + config_1dcvtgenerator_stretch2 = 1.0275 +/ +&init_ssh_and_landicepressure + config_iterative_init_variable = 'landIcePressure' +/ +&time_integration + config_dt = '01:00:00' + config_time_integrator = 'split_explicit' +/ +&ale_vertical_grid + config_dzdk_positive = .false. + config_max_thickness_factor = 6.0 + config_min_thickness = 1.0 + config_use_min_max_thickness = .false. + config_vert_coord_movement = 'uniform_stretching' +/ +&ale_frequency_filtered_thickness + config_highfreqthick_del2 = 100.0 + config_highfreqthick_restore_time = 30.0 + config_thickness_filter_timescale = 5.0 + config_use_freq_filtered_thickness = .false. + config_use_highfreqthick_del2 = .false. + config_use_highfreqthick_restore = .false. +/ +&partial_bottom_cells + config_alter_ics_for_pbcs = .false. + config_min_pbc_fraction = 0.10 + config_pbc_alteration_type = 'full_cell' +/ +&hmix + config_apvm_scale_factor = 0.0 + config_hmix_scalewithmesh = .false. + config_maxmeshdensity = -1.0 +/ +&hmix_del2 + config_mom_del2 = 10.0 + config_tracer_del2 = 10.0 + config_use_mom_del2 = .false. + config_use_tracer_del2 = .false. +/ +&hmix_del4 + config_mom_del4 = 2.0e14 + config_mom_del4_div_factor = 1.0 + config_tracer_del4 = 0.0 + config_use_mom_del4 = .true. + config_use_tracer_del4 = .false. +/ +&hmix_leith + config_leith_dx = 15000.0 + config_leith_parameter = 1.0 + config_leith_visc2_max = 2.5e3 + config_use_leith_del2 = .false. +/ +&mesoscale_eddy_parameterization + config_gravwavespeed_trunc = 0.3 + config_max_relative_slope = 0.01 + config_redi_bottom_layer_tapering_depth = 0.0 + config_redi_kappa = 0.0 + config_redi_surface_layer_tapering_extent = 0.0 + config_standardgm_tracer_kappa = 600.0 + config_use_redi_bottom_layer_tapering = .false. + config_use_redi_surface_layer_tapering = .false. + config_use_standardgm = .true. +/ +&hmix_del2_tensor + config_mom_del2_tensor = 10.0 + config_use_mom_del2_tensor = .false. +/ +&hmix_del4_tensor + config_mom_del4_tensor = 5.0e13 + config_use_mom_del4_tensor = .false. +/ +&rayleigh_damping + config_rayleigh_damping_coeff = 0.0 + config_rayleigh_friction = .false. +/ +&vmix + config_convective_diff = 1.0 + config_convective_visc = 1.0 +/ +&vmix_const + config_use_const_diff = .false. + config_use_const_visc = .false. + config_vert_diff = 1.0e-5 + config_vert_visc = 1.0e-4 +/ +&vmix_rich + config_bkrd_vert_diff = 1.0e-5 + config_bkrd_vert_visc = 1.0e-4 + config_rich_mix = 0.005 + config_use_rich_diff = .false. + config_use_rich_visc = .false. +/ +&vmix_tanh + config_max_diff_tanh = 2.5e-2 + config_max_visc_tanh = 2.5e-1 + config_min_diff_tanh = 1.0e-5 + config_min_visc_tanh = 1.0e-4 + config_use_tanh_diff = .false. + config_use_tanh_visc = .false. + config_zmid_tanh = -100 + config_zwidth_tanh = 100 +/ +&cvmix + config_cvmix_background_diffusion = 1.0e-5 + config_cvmix_background_viscosity = 1.0e-4 + config_cvmix_convective_basedonbvf = .true. + config_cvmix_convective_diffusion = 1.0 + config_cvmix_convective_triggerbvf = 0.0 + config_cvmix_convective_viscosity = 1.0 + config_cvmix_kpp_boundary_layer_depth = 30.0 + config_cvmix_kpp_criticalbulkrichardsonnumber = 0.25 + config_cvmix_kpp_ekmanobl = .false. + config_cvmix_kpp_interpolationomltype = 'quadratic' + config_cvmix_kpp_matching = 'SimpleShapes' + config_cvmix_kpp_monobobl = .false. + config_cvmix_kpp_stop_obl_search = 100.0 + config_cvmix_kpp_surface_layer_averaging = 5.0 + config_cvmix_kpp_surface_layer_extent = 0.1 + config_cvmix_kpp_use_enhanced_diff = .true. + config_cvmix_num_ri_smooth_loops = 2 + config_cvmix_prandtl_number = 1.0 + config_cvmix_shear_kpp_exp = 3 + config_cvmix_shear_kpp_nu_zero = 0.005 + config_cvmix_shear_kpp_ri_zero = 0.7 + config_cvmix_shear_mixing_scheme = 'KPP' + config_cvmix_shear_pp_alpha = 5.0 + config_cvmix_shear_pp_exp = 2.0 + config_cvmix_shear_pp_nu_zero = 0.005 + config_use_cvmix = .true. + config_use_cvmix_background = .true. + config_use_cvmix_convection = .true. + config_use_cvmix_double_diffusion = .false. + config_use_cvmix_fixed_boundary_layer = .false. + config_use_cvmix_kpp = .true. + config_use_cvmix_shear = .true. + config_use_cvmix_tidal_mixing = .false. + configure_cvmix_kpp_minimum_obl_under_sea_ice = 10.0 +/ +&forcing + config_flux_attenuation_coefficient = 0.001 + config_flux_attenuation_coefficient_runoff = 10.0 + config_use_bulk_thickness_flux = .true. + config_use_bulk_wind_stress = .true. +/ +&coupling + config_ssh_grad_relax_timescale = 86400.0 +/ +&shortwaveradiation + config_forcing_restart_file = 'Restart_forcing_time_stamp' + config_jerlov_water_type = 3 + config_surface_buoyancy_depth = 1 + config_sw_absorption_type = 'jerlov' +/ +&frazil_ice + config_frazil_fractional_thickness_limit = 0.1 + config_frazil_heat_of_fusion = 3.337e5 + config_frazil_ice_density = 1000.0 + config_frazil_in_open_ocean = .true. + config_frazil_land_ice_reference_salinity = 0.0 + config_frazil_maximum_depth = 100.0 + config_frazil_maximum_freezing_temperature = 0.0 + config_frazil_sea_ice_reference_salinity = 4.0 + config_frazil_under_land_ice = .true. + config_frazil_use_surface_pressure = .false. + config_specific_heat_sea_water = 3.996e3 + config_use_frazil_ice_formation = .true. +/ +&land_ice_fluxes + config_land_ice_flux_attenuation_coefficient = 10.0 + config_land_ice_flux_boundarylayerneighborweight = 0.0 + config_land_ice_flux_boundarylayerthickness = 10.0 + config_land_ice_flux_cp_ice = 2.009e3 + config_land_ice_flux_formulation = 'Jenkins' + config_land_ice_flux_isomip_gammat = 1e-4 + config_land_ice_flux_jenkins_heat_transfer_coefficient = 0.011 + config_land_ice_flux_jenkins_salt_transfer_coefficient = 3.1e-4 + config_land_ice_flux_mode = 'off' + config_land_ice_flux_rho_ice = 918 + config_land_ice_flux_rms_tidal_velocity = 5e-2 + config_land_ice_flux_topdragcoeff = 2.5e-3 + config_land_ice_flux_usehollandjenkinsadvdiff = .false. +/ +&advection + config_coef_3rd_order = 0.25 + config_horiz_tracer_adv_order = 3 + config_monotonic = .true. + config_vert_tracer_adv = 'stencil' + config_vert_tracer_adv_order = 3 +/ +&bottom_drag + config_bottom_drag_coeff = 1.0e-3 +/ +&ocean_constants + config_density0 = 1026.0 +/ +&pressure_gradient + config_common_level_weight = 0.5 + config_pressure_gradient_type = 'Jacobian_from_TS' +/ +&eos + config_eos_type = 'jm' + config_land_ice_cavity_freezing_temperature_coeff_0 = 6.22e-2 + config_land_ice_cavity_freezing_temperature_coeff_p = -7.43e-8 + config_land_ice_cavity_freezing_temperature_coeff_ps = -1.74e-10 + config_land_ice_cavity_freezing_temperature_coeff_s = -5.63e-2 + config_land_ice_cavity_freezing_temperature_reference_pressure = 0.0 + config_open_ocean_freezing_temperature_coeff_0 = -1.8 + config_open_ocean_freezing_temperature_coeff_p = 0.0 + config_open_ocean_freezing_temperature_coeff_ps = 0.0 + config_open_ocean_freezing_temperature_coeff_s = 0.0 + config_open_ocean_freezing_temperature_reference_pressure = 0.0 +/ +&eos_linear + config_eos_linear_alpha = 0.2 + config_eos_linear_beta = 0.8 + config_eos_linear_densityref = 1000.0 + config_eos_linear_sref = 35.0 + config_eos_linear_tref = 5.0 +/ +&split_explicit_ts + config_btr_dt = '0000_00:03:00' + config_btr_gam1_velwt1 = 0.5 + config_btr_gam2_sshwt1 = 1.0 + config_btr_gam3_velwt2 = 1.0 + config_btr_solve_ssh2 = .false. + config_btr_subcycle_loop_factor = 2 + config_n_bcl_iter_beg = 1 + config_n_bcl_iter_end = 2 + config_n_bcl_iter_mid = 2 + config_n_btr_cor_iter = 2 + config_n_ts_iter = 2 + config_vel_correction = .true. +/ +&testing + config_conduct_tests = .false. + config_tensor_test_function = 'sph_uCosCos' + config_test_tensors = .false. +/ +&debug + config_check_ssh_consistency = .true. + config_check_tracer_monotonicity = .false. + config_check_zlevel_consistency = .false. + config_disable_redi_horizontal_term1 = .false. + config_disable_redi_horizontal_term2 = .false. + config_disable_redi_horizontal_term3 = .false. + config_disable_redi_k33 = .false. + config_disable_thick_all_tend = .false. + config_disable_thick_hadv = .false. + config_disable_thick_sflux = .false. + config_disable_thick_vadv = .false. + config_disable_tr_adv = .false. + config_disable_tr_all_tend = .false. + config_disable_tr_hmix = .false. + config_disable_tr_nonlocalflux = .false. + config_disable_tr_sflux = .false. + config_disable_tr_vmix = .false. + config_disable_vel_all_tend = .false. + config_disable_vel_coriolis = .false. + config_disable_vel_hmix = .false. + config_disable_vel_pgrad = .false. + config_disable_vel_surface_stress = .false. + config_disable_vel_vadv = .false. + config_disable_vel_vmix = .false. + config_filter_btr_mode = .false. + config_include_ke_vertex = .false. + config_prescribe_thickness = .false. + config_prescribe_velocity = .false. + config_read_nearest_restart = .false. +/ +&constrain_haney_number + config_rx1_horiz_smooth_open_ocean_cells = 20 + config_rx1_horiz_smooth_weight = 1.0 + config_rx1_init_inner_weight = 0.1 + config_rx1_inner_iter_count = 10 + config_rx1_max = 5.0 + config_rx1_min_layer_thickness = 1.0 + config_rx1_min_levels = 3 + config_rx1_outer_iter_count = 20 + config_rx1_slope_weight = 1e-1 + config_rx1_vert_smooth_weight = 1.0 + config_rx1_zstar_weight = 1.0 + config_use_rx1_constraint = .false. +/ +&baroclinic_channel + config_baroclinic_channel_bottom_depth = 1000.0 + config_baroclinic_channel_bottom_temperature = 10.1 + config_baroclinic_channel_coriolis_parameter = -1.2e-4 + config_baroclinic_channel_gradient_width_dist = 40e3 + config_baroclinic_channel_gradient_width_frac = 0.08 + config_baroclinic_channel_salinity = 35.0 + config_baroclinic_channel_surface_temperature = 13.1 + config_baroclinic_channel_temperature_difference = 1.2 + config_baroclinic_channel_use_distances = .false. + config_baroclinic_channel_vert_levels = 20 +/ +&lock_exchange + config_lock_exchange_bottom_depth = 20.0 + config_lock_exchange_cold_temperature = 5.0 + config_lock_exchange_direction = 'y' + config_lock_exchange_isopycnal_min_thickness = 0.01 + config_lock_exchange_layer_type = 'z-level' + config_lock_exchange_salinity = 35.0 + config_lock_exchange_vert_levels = 20 + config_lock_exchange_warm_temperature = 30.0 +/ +&internal_waves + config_internal_waves_amplitude_width_dist = 50e3 + config_internal_waves_amplitude_width_frac = 0.33 + config_internal_waves_bottom_depth = 500.0 + config_internal_waves_bottom_temperature = 10.1 + config_internal_waves_isopycnal_displacement = 125.0 + config_internal_waves_layer_type = 'z-level' + config_internal_waves_salinity = 35.0 + config_internal_waves_surface_temperature = 20.1 + config_internal_waves_temperature_difference = 2.0 + config_internal_waves_use_distances = false + config_internal_waves_vert_levels = 20 +/ +&overflow + config_overflow_bottom_depth = 2000.0 + config_overflow_domain_temperature = 20.0 + config_overflow_isopycnal_min_thickness = 0.01 + config_overflow_layer_type = 'z-level' + config_overflow_plug_temperature = 10.0 + config_overflow_plug_width_dist = 20e3 + config_overflow_plug_width_frac = 0.10 + config_overflow_ridge_depth = 500.0 + config_overflow_salinity = 35.0 + config_overflow_slope_center_dist = 40e3 + config_overflow_slope_center_frac = 0.20 + config_overflow_slope_width_dist = 7e3 + config_overflow_slope_width_frac = 0.05 + config_overflow_use_distances = false + config_overflow_vert_levels = 100 +/ +&global_ocean + config_global_ocean_chlorophyll_varname = 'none' + config_global_ocean_clearsky_varname = 'none' + config_global_ocean_cull_inland_seas = .true. + config_global_ocean_deepen_critical_passages = .true. + config_global_ocean_depress_by_land_ice = .false. + config_global_ocean_depth_conversion_factor = 1.0 + config_global_ocean_depth_dimname = 'none' + config_global_ocean_depth_file = 'none' + config_global_ocean_depth_varname = 'none' + config_global_ocean_ecosys_depth_conversion_factor = 1.0 + config_global_ocean_ecosys_depth_varname = 'none' + config_global_ocean_ecosys_file = 'unknown' + config_global_ocean_ecosys_forcing_file = 'unknown' + config_global_ocean_ecosys_forcing_time_dimname = 'none' + config_global_ocean_ecosys_lat_varname = 'none' + config_global_ocean_ecosys_latlon_degrees = .true. + config_global_ocean_ecosys_lon_varname = 'none' + config_global_ocean_ecosys_method = 'bilinear_interpolation' + config_global_ocean_ecosys_ndepth_dimname = 'none' + config_global_ocean_ecosys_nlat_dimname = 'none' + config_global_ocean_ecosys_nlon_dimname = 'none' + config_global_ocean_ecosys_vert_levels = -1 + config_global_ocean_interior_restore_rate = 1.0e-7 + config_global_ocean_land_ice_topo_draft_varname = 'none' + config_global_ocean_land_ice_topo_file = 'none' + config_global_ocean_land_ice_topo_grounded_frac_varname = 'none' + config_global_ocean_land_ice_topo_ice_frac_varname = 'none' + config_global_ocean_land_ice_topo_lat_varname = 'none' + config_global_ocean_land_ice_topo_latlon_degrees = .true. + config_global_ocean_land_ice_topo_lon_varname = 'none' + config_global_ocean_land_ice_topo_nlat_dimname = 'none' + config_global_ocean_land_ice_topo_nlon_dimname = 'none' + config_global_ocean_land_ice_topo_thickness_varname = 'none' + config_global_ocean_minimum_depth = 15 + config_global_ocean_piston_velocity = 5.0e-5 + config_global_ocean_salinity_file = 'none' + config_global_ocean_salinity_varname = 'none' + config_global_ocean_smooth_ecosys_iterations = 0 + config_global_ocean_smooth_topography = .true. + config_global_ocean_smooth_ts_iterations = 0 + config_global_ocean_swdata_file = 'none' + config_global_ocean_swdata_lat_varname = 'none' + config_global_ocean_swdata_latlon_degrees = .true. + config_global_ocean_swdata_lon_varname = 'none' + config_global_ocean_swdata_method = 'bilinear_interpolation' + config_global_ocean_swdata_nlat_dimname = 'none' + config_global_ocean_swdata_nlon_dimname = 'none' + config_global_ocean_temperature_file = 'none' + config_global_ocean_temperature_varname = 'none' + config_global_ocean_topography_file = 'none' + config_global_ocean_topography_has_ocean_frac = .false. + config_global_ocean_topography_lat_varname = 'none' + config_global_ocean_topography_latlon_degrees = .true. + config_global_ocean_topography_lon_varname = 'none' + config_global_ocean_topography_method = 'bilinear_interpolation' + config_global_ocean_topography_nlat_dimname = 'none' + config_global_ocean_topography_nlon_dimname = 'none' + config_global_ocean_topography_ocean_frac_varname = 'none' + config_global_ocean_topography_varname = 'none' + config_global_ocean_tracer_depth_conversion_factor = 1.0 + config_global_ocean_tracer_depth_varname = 'none' + config_global_ocean_tracer_lat_varname = 'none' + config_global_ocean_tracer_latlon_degrees = .true. + config_global_ocean_tracer_lon_varname = 'none' + config_global_ocean_tracer_method = 'bilinear_interpolation' + config_global_ocean_tracer_ndepth_dimname = 'none' + config_global_ocean_tracer_nlat_dimname = 'none' + config_global_ocean_tracer_nlon_dimname = 'none' + config_global_ocean_tracer_vert_levels = -1 + config_global_ocean_windstress_conversion_factor = 1 + config_global_ocean_windstress_file = 'none' + config_global_ocean_windstress_lat_varname = 'none' + config_global_ocean_windstress_latlon_degrees = .true. + config_global_ocean_windstress_lon_varname = 'none' + config_global_ocean_windstress_meridional_varname = 'none' + config_global_ocean_windstress_method = 'bilinear_interpolation' + config_global_ocean_windstress_nlat_dimname = 'none' + config_global_ocean_windstress_nlon_dimname = 'none' + config_global_ocean_windstress_zonal_varname = 'none' + config_global_ocean_zenithangle_varname = 'none' +/ +&cvmix_wswsbf + config_cvmix_wswsbf_bottom_depth = 400.0 + config_cvmix_wswsbf_coriolis_parameter = 1.0e-4 + config_cvmix_wswsbf_evaporation_flux = 0.0 + config_cvmix_wswsbf_interior_salinity_restoring_rate = 1.0e-6 + config_cvmix_wswsbf_interior_temperature_restoring_rate = 1.0e-6 + config_cvmix_wswsbf_latent_heat_flux = 0.0 + config_cvmix_wswsbf_max_windstress = 0.10 + config_cvmix_wswsbf_mixed_layer_depth_salinity = 0.0 + config_cvmix_wswsbf_mixed_layer_depth_temperature = 0.0 + config_cvmix_wswsbf_mixed_layer_salinity_change = 0.0 + config_cvmix_wswsbf_mixed_layer_temperature_change = 0.0 + config_cvmix_wswsbf_rain_flux = 0.0 + config_cvmix_wswsbf_salinity_gradient = 0.0 + config_cvmix_wswsbf_salinity_gradient_mixed_layer = 0.0 + config_cvmix_wswsbf_salinity_piston_velocity = 4.0e-6 + config_cvmix_wswsbf_sensible_heat_flux = 0.0 + config_cvmix_wswsbf_shortwave_heat_flux = 0.0 + config_cvmix_wswsbf_surface_restoring_salinity = 35.0 + config_cvmix_wswsbf_surface_restoring_temperature = 15.0 + config_cvmix_wswsbf_surface_salinity = 35.0 + config_cvmix_wswsbf_surface_temperature = 15.0 + config_cvmix_wswsbf_temperature_gradient = 0.01 + config_cvmix_wswsbf_temperature_gradient_mixed_layer = 0.0 + config_cvmix_wswsbf_temperature_piston_velocity = 4.0e-6 + config_cvmix_wswsbf_vert_levels = 100 + config_cvmix_wswsbf_vertical_grid = 'uniform' +/ +&iso + config_iso_acc_wind = 0.2 + config_iso_asf_wind = -0.05 + config_iso_cont_slope_flag = .true. + config_iso_depression_center_lon = 60 + config_iso_depression_depth = 800 + config_iso_depression_flag = .true. + config_iso_depression_north_lat = -65 + config_iso_depression_south_lat = -72 + config_iso_depression_width = 480000 + config_iso_embayment_center_lat = -71 + config_iso_embayment_center_lon = 60 + config_iso_embayment_depth = 2000 + config_iso_embayment_flag = .true. + config_iso_embayment_radius = 500000 + config_iso_heat_flux_lat_mn = -53 + config_iso_heat_flux_lat_sm = -65 + config_iso_heat_flux_lat_ss = -70 + config_iso_heat_flux_middle = 10 + config_iso_heat_flux_north = -5 + config_iso_heat_flux_region1 = -5 + config_iso_heat_flux_region1_flag = false + config_iso_heat_flux_region1_radius = 300000 + config_iso_heat_flux_region2 = -5 + config_iso_heat_flux_region2_flag = false + config_iso_heat_flux_region2_radius = 240000 + config_iso_heat_flux_south = -5 + config_iso_initial_temp_h0 = 1200 + config_iso_initial_temp_h1 = 500 + config_iso_initial_temp_latn = -50 + config_iso_initial_temp_lats = -75 + config_iso_initial_temp_mt = 0.000075 + config_iso_initial_temp_t1 = 3.5 + config_iso_initial_temp_t2 = 4.0 + config_iso_main_channel_depth = 4000.0 + config_iso_max_cont_slope = 0.01 + config_iso_north_wall_lat = -50 + config_iso_plateau_center_lat = -58 + config_iso_plateau_center_lon = 300 + config_iso_plateau_flag = .true. + config_iso_plateau_height = 2000 + config_iso_plateau_radius = 200000 + config_iso_plateau_slope_width = 1000000 + config_iso_region1_center_lat = -75 + config_iso_region1_center_lon = 60 + config_iso_region2_center_lat = -71 + config_iso_region2_center_lon = 150 + config_iso_region3_center_lat = -71 + config_iso_region3_center_lon = 240 + config_iso_region4_center_lat = -71 + config_iso_region4_center_lon = 330 + config_iso_ridge_center_lon = 180 + config_iso_ridge_flag = .true. + config_iso_ridge_height = 2000.0 + config_iso_ridge_width = 2000000 + config_iso_salinity = 35.0 + config_iso_shelf_depth = 500 + config_iso_shelf_flag = .true. + config_iso_shelf_width = 120000 + config_iso_south_wall_lat = -70 + config_iso_surface_temperature_piston_velocity = 5.787e-5 + config_iso_temperature_restore_lcx1 = 600000 + config_iso_temperature_restore_lcx2 = 600000 + config_iso_temperature_restore_lcx3 = 600000 + config_iso_temperature_restore_lcx4 = 600000 + config_iso_temperature_restore_lcy1 = 600000 + config_iso_temperature_restore_lcy2 = 250000 + config_iso_temperature_restore_lcy3 = 250000 + config_iso_temperature_restore_lcy4 = 250000 + config_iso_temperature_restore_region1_flag = .true. + config_iso_temperature_restore_region2_flag = .true. + config_iso_temperature_restore_region3_flag = .true. + config_iso_temperature_restore_region4_flag = .true. + config_iso_temperature_restore_t1 = -1 + config_iso_temperature_restore_t2 = -1 + config_iso_temperature_restore_t3 = -1 + config_iso_temperature_restore_t4 = -1 + config_iso_temperature_sponge_h1 = 1000 + config_iso_temperature_sponge_l1 = 120000 + config_iso_temperature_sponge_t1 = 10 + config_iso_temperature_sponge_tau1 = 10.0 + config_iso_vert_levels = 100 + config_iso_wind_stress_max = 0.01 + config_iso_wind_trans = -65 +/ +&soma + config_soma_bottom_depth = 2500.0 + config_soma_center_latitude = 35.0 + config_soma_center_longitude = 0.0 + config_soma_density_difference = 4.0 + config_soma_density_difference_linear = 0.05 + config_soma_domain_width = 1.25e6 + config_soma_phi = 0.1 + config_soma_ref_density = 1000.0 + config_soma_restoring_temp_piston_vel = 1.0e-5 + config_soma_shelf_depth = 100.0 + config_soma_shelf_width = -0.4 + config_soma_surface_salinity = 33.0 + config_soma_surface_temp_restoring_at_center_latitude = 7.5 + config_soma_surface_temp_restoring_latitude_gradient = 0.5 + config_soma_surface_temperature = 20.0 + config_soma_thermocline_depth = 300.0 + config_soma_use_surface_temp_restoring = false + config_soma_vert_levels = 100 +/ +&ziso + config_ziso_add_easterly_wind_stress_asf = false + config_ziso_antarctic_shelf_front_width = 600000 + config_ziso_bottom_depth = 2500.0 + config_ziso_coriolis_gradient = 1e-11 + config_ziso_frazil_enable = false + config_ziso_frazil_temperature_anomaly = -3.0 + config_ziso_initial_temp_h1 = 300.0 + config_ziso_initial_temp_mt = 7.5e-5 + config_ziso_initial_temp_t1 = 6.0 + config_ziso_initial_temp_t2 = 3.6 + config_ziso_mean_restoring_temp = 3.0 + config_ziso_meridional_extent = 2.0e6 + config_ziso_reference_coriolis = -1e-4 + config_ziso_restoring_sponge_l = 8.0e4 + config_ziso_restoring_temp_dev_ta = 2.0 + config_ziso_restoring_temp_dev_tb = 2.0 + config_ziso_restoring_temp_piston_vel = 1.93e-5 + config_ziso_restoring_temp_tau = 30.0 + config_ziso_restoring_temp_ze = 1250.0 + config_ziso_shelf_depth = 500.0 + config_ziso_slope_center_position = 5.0e5 + config_ziso_slope_half_width = 1.0e5 + config_ziso_use_slopping_bathymetry = false + config_ziso_vert_levels = 100 + config_ziso_wind_stress_max = 0.2 + config_ziso_wind_stress_shelf_front_max = -0.05 + config_ziso_wind_transition_position = 800000.0 + config_ziso_zonal_extent = 1.0e6 +/ +&sub_ice_shelf_2d + config_sub_ice_shelf_2d_bottom_depth = 2000.0 + config_sub_ice_shelf_2d_bottom_salinity = 34.7 + config_sub_ice_shelf_2d_cavity_thickness = 25.0 + config_sub_ice_shelf_2d_edge_width = 15.0e3 + config_sub_ice_shelf_2d_slope_height = 500.0 + config_sub_ice_shelf_2d_surface_salinity = 34.5 + config_sub_ice_shelf_2d_temperature = 1.0 + config_sub_ice_shelf_2d_vert_levels = 20 + config_sub_ice_shelf_2d_y1 = 30.0e3 + config_sub_ice_shelf_2d_y2 = 60.0e3 +/ +&periodic_planar + config_periodic_planar_bottom_depth = 2500.0 + config_periodic_planar_velocity_strength = 1.0 + config_periodic_planar_vert_levels = 100 +/ +&ecosys_column + config_ecosys_column_bottom_depth = 6000.0 + config_ecosys_column_ecosys_filename = 'unknown' + config_ecosys_column_ts_filename = 'unknown' + config_ecosys_column_vert_levels = 100 + config_ecosys_column_vertical_grid = '100layerACMEv1' +/ +&sea_mount + config_sea_mount_bottom_depth = 5000.0 + config_sea_mount_coriolis_parameter = -1.0e-4 + config_sea_mount_density_alpha = 0.2 + config_sea_mount_density_coef_exp = 1028 + config_sea_mount_density_coef_linear = 1024 + config_sea_mount_density_depth_exp = 500 + config_sea_mount_density_depth_linear = 4500 + config_sea_mount_density_gradient_exp = 3.0 + config_sea_mount_density_gradient_linear = 0.1 + config_sea_mount_density_ref = 1028 + config_sea_mount_density_tref = 5.0 + config_sea_mount_height = 4500.0 + config_sea_mount_layer_type = 'sigma' + config_sea_mount_radius = 10.0e3 + config_sea_mount_salinity = 35.0 + config_sea_mount_stratification_type = 'exponential' + config_sea_mount_vert_levels = 10 + config_sea_mount_width = 40.0e3 +/ +&isomip + config_isomip_bottom_depth = -900.0 + config_isomip_coriolis_parameter = -1.4e-4 + config_isomip_eastern_boundary = 500e3 + config_isomip_ice_fraction1 = 1.0 + config_isomip_ice_fraction2 = 1.0 + config_isomip_ice_fraction3 = 1.0 + config_isomip_northern_boundary = 1000e3 + config_isomip_restoring_salinity = 34.4 + config_isomip_restoring_temperature = -1.9 + config_isomip_salinity = 34.4 + config_isomip_salinity_piston_velocity = 1.157e-5 + config_isomip_southern_boundary = 0.0 + config_isomip_temperature = -1.9 + config_isomip_temperature_piston_velocity = 1.157e-5 + config_isomip_vert_levels = 30 + config_isomip_vertical_level_distribution = 'constant' + config_isomip_western_boundary = 0.0 + config_isomip_y1 = 0.0 + config_isomip_y2 = 400e3 + config_isomip_y3 = 1000e3 + config_isomip_z1 = -700.0 + config_isomip_z2 = -200.0 + config_isomip_z3 = -200.0 +/ +&isomip_plus + config_isomip_plus_coriolis_parameter = -1.409e-4 + config_isomip_plus_effective_density = 1026. + config_isomip_plus_init_bot_sal = 34.5 + config_isomip_plus_init_bot_temp = -1.9 + config_isomip_plus_init_top_sal = 33.8 + config_isomip_plus_init_top_temp = -1.9 + config_isomip_plus_max_bottom_depth = -720.0 + config_isomip_plus_min_column_thickness = 10.0 + config_isomip_plus_min_ocean_fraction = 0.5 + config_isomip_plus_minimum_levels = 3 + config_isomip_plus_restore_bot_sal = 34.7 + config_isomip_plus_restore_bot_temp = 1.0 + config_isomip_plus_restore_evap_rate = 200 + config_isomip_plus_restore_rate = 10.0 + config_isomip_plus_restore_top_sal = 33.8 + config_isomip_plus_restore_top_temp = -1.9 + config_isomip_plus_restore_xmax = 800.0e3 + config_isomip_plus_restore_xmin = 790.0e3 + config_isomip_plus_topography_file = 'input_geometry_processed.nc' + config_isomip_plus_vert_levels = 36 + config_isomip_plus_vertical_level_distribution = 'constant' +/ +&tracer_forcing_activetracers + config_salinity_restoring_constant_piston_velocity = 0.0 + config_salinity_restoring_max_difference = 0.5 + config_use_activetracers = .true. + config_use_activetracers_exponential_decay = .false. + config_use_activetracers_idealage_forcing = .false. + config_use_activetracers_interior_restoring = .false. + config_use_activetracers_surface_bulk_forcing = .true. + config_use_activetracers_surface_restoring = .false. + config_use_activetracers_ttd_forcing = .false. + config_use_surface_salinity_monthly_restoring = .false. +/ +&tracer_forcing_debugtracers + config_use_debugtracers = .false. + config_use_debugtracers_exponential_decay = .false. + config_use_debugtracers_idealage_forcing = .false. + config_use_debugtracers_interior_restoring = .false. + config_use_debugtracers_surface_bulk_forcing = .false. + config_use_debugtracers_surface_restoring = .false. + config_use_debugtracers_ttd_forcing = .false. +/ +&tracer_forcing_ecosystracers + config_use_ecosystracers = .false. + config_use_ecosystracers_exponential_decay = .false. + config_use_ecosystracers_idealage_forcing = .false. + config_use_ecosystracers_interior_restoring = .false. + config_use_ecosystracers_sea_ice_coupling = .false. + config_use_ecosystracers_surface_bulk_forcing = .false. + config_use_ecosystracers_surface_restoring = .false. + config_use_ecosystracers_surface_value = .false. + config_use_ecosystracers_ttd_forcing = .false. +/ +&tracer_forcing_dmstracers + config_use_dmstracers = .false. + config_use_dmstracers_exponential_decay = .false. + config_use_dmstracers_idealage_forcing = .false. + config_use_dmstracers_interior_restoring = .false. + config_use_dmstracers_sea_ice_coupling = .false. + config_use_dmstracers_surface_bulk_forcing = .false. + config_use_dmstracers_surface_restoring = .false. + config_use_dmstracers_surface_value = .false. + config_use_dmstracers_ttd_forcing = .false. +/ +&tracer_forcing_macromoleculestracers + config_use_macromoleculestracers = .false. + config_use_macromoleculestracers_exponential_decay = .false. + config_use_macromoleculestracers_idealage_forcing = .false. + config_use_macromoleculestracers_interior_restoring = .false. + config_use_macromoleculestracers_sea_ice_coupling = .false. + config_use_macromoleculestracers_surface_bulk_forcing = .false. + config_use_macromoleculestracers_surface_restoring = .false. + config_use_macromoleculestracers_surface_value = .false. + config_use_macromoleculestracers_ttd_forcing = .false. +/ +&am_globalstats + config_am_globalstats_compute_interval = 'output_interval' + config_am_globalstats_compute_on_startup = .true. + config_am_globalstats_directory = 'analysis_members' + config_am_globalstats_enable = .true. + config_am_globalstats_output_stream = 'globalStatsOutput' + config_am_globalstats_text_file = .false. + config_am_globalstats_write_on_startup = .true. +/ +&am_surfaceareaweightedaverages + config_am_surfaceareaweightedaverages_compute_interval = '0000-00-00_01:00:00' + config_am_surfaceareaweightedaverages_compute_on_startup = .true. + config_am_surfaceareaweightedaverages_enable = .true. + config_am_surfaceareaweightedaverages_output_stream = 'surfaceAreaWeightedAveragesOutput' + config_am_surfaceareaweightedaverages_write_on_startup = .true. +/ +&am_watermasscensus + config_am_watermasscensus_compute_interval = '0000-00-00_01:00:00' + config_am_watermasscensus_compute_on_startup = .true. + config_am_watermasscensus_enable = .false. + config_am_watermasscensus_maxsalinity = 37.0 + config_am_watermasscensus_maxtemperature = 30.0 + config_am_watermasscensus_minsalinity = 32.0 + config_am_watermasscensus_mintemperature = -2.0 + config_am_watermasscensus_output_stream = 'waterMassCensusOutput' + config_am_watermasscensus_write_on_startup = .true. +/ +&am_layervolumeweightedaverage + config_am_layervolumeweightedaverage_compute_interval = '0000-00-00_01:00:00' + config_am_layervolumeweightedaverage_compute_on_startup = .true. + config_am_layervolumeweightedaverage_enable = .true. + config_am_layervolumeweightedaverage_output_stream = 'layerVolumeWeightedAverageOutput' + config_am_layervolumeweightedaverage_write_on_startup = .true. +/ +&am_zonalmean + config_am_zonalmean_compute_interval = '0000-00-00_01:00:00' + config_am_zonalmean_compute_on_startup = .true. + config_am_zonalmean_enable = .false. + config_am_zonalmean_max_bin = -1.0e34 + config_am_zonalmean_min_bin = -1.0e34 + config_am_zonalmean_num_bins = 180 + config_am_zonalmean_output_stream = 'zonalMeanOutput' + config_am_zonalmean_write_on_startup = .true. +/ +&am_okuboweiss + config_am_okuboweiss_compute_eddy_census = .true. + config_am_okuboweiss_compute_interval = '0000-00-00_01:00:00' + config_am_okuboweiss_compute_on_startup = .true. + config_am_okuboweiss_directory = 'analysis_members' + config_am_okuboweiss_eddy_min_cells = 20 + config_am_okuboweiss_enable = .false. + config_am_okuboweiss_lambda2_normalization = 1e-10 + config_am_okuboweiss_normalization = 1e-10 + config_am_okuboweiss_output_stream = 'okuboWeissOutput' + config_am_okuboweiss_threshold_value = -0.2 + config_am_okuboweiss_use_lat_lon_coords = .true. + config_am_okuboweiss_write_on_startup = .true. +/ +&am_meridionalheattransport + config_am_meridionalheattransport_compute_interval = '0000-00-00_01:00:00' + config_am_meridionalheattransport_compute_on_startup = .true. + config_am_meridionalheattransport_enable = .true. + config_am_meridionalheattransport_max_bin = -1.0e34 + config_am_meridionalheattransport_min_bin = -1.0e34 + config_am_meridionalheattransport_num_bins = 180 + config_am_meridionalheattransport_output_stream = 'meridionalHeatTransportOutput' + config_am_meridionalheattransport_region_group = '' + config_am_meridionalheattransport_write_on_startup = .true. +/ +&am_testcomputeinterval + config_am_testcomputeinterval_compute_interval = '00-00-01_00:00:00' + config_am_testcomputeinterval_compute_on_startup = .true. + config_am_testcomputeinterval_enable = .false. + config_am_testcomputeinterval_output_stream = 'testComputeIntervalOutput' + config_am_testcomputeinterval_write_on_startup = .true. +/ +&am_highfrequencyoutput + config_am_highfrequencyoutput_compute_interval = 'output_interval' + config_am_highfrequencyoutput_compute_on_startup = .false. + config_am_highfrequencyoutput_enable = .true. + config_am_highfrequencyoutput_output_stream = 'highFrequencyOutput' + config_am_highfrequencyoutput_write_on_startup = .false. +/ +&am_timefilters + config_am_timefilters_compute_cell_centered_values = .true. + config_am_timefilters_compute_interval = 'dt' + config_am_timefilters_compute_on_startup = .true. + config_am_timefilters_enable = .false. + config_am_timefilters_initialize_filters = .true. + config_am_timefilters_output_stream = 'timeFiltersOutput' + config_am_timefilters_restart_stream = 'timeFiltersRestart' + config_am_timefilters_tau = '90_00:00:00' + config_am_timefilters_write_on_startup = .true. +/ +&am_lagrparttrack + config_am_lagrparttrack_compute_interval = 'dt' + config_am_lagrparttrack_compute_on_startup = .false. + config_am_lagrparttrack_enable = .false. + config_am_lagrparttrack_filter_number = 0 + config_am_lagrparttrack_input_stream = 'lagrPartTrackInput' + config_am_lagrparttrack_output_stream = 'lagrPartTrackOutput' + config_am_lagrparttrack_region_stream = 'lagrPartTrackRegions' + config_am_lagrparttrack_reset_criteria = 'none' + config_am_lagrparttrack_reset_global_timestamp = '0000_00:00:00' + config_am_lagrparttrack_reset_if_inside_region = .false. + config_am_lagrparttrack_reset_if_outside_region = .false. + config_am_lagrparttrack_restart_stream = 'lagrPartTrackRestart' + config_am_lagrparttrack_write_on_startup = .true. +/ +&am_eliassenpalm + config_am_eliassenpalm_compute_interval = 'output_interval' + config_am_eliassenpalm_compute_on_startup = .true. + config_am_eliassenpalm_debug = .false. + config_am_eliassenpalm_enable = .false. + config_am_eliassenpalm_nbuoyancylayers = 45 + config_am_eliassenpalm_output_stream = 'eliassenPalmOutput' + config_am_eliassenpalm_restart_stream = 'eliassenPalmRestart' + config_am_eliassenpalm_rhomax_buoycoor = 1080 + config_am_eliassenpalm_rhomin_buoycoor = 900 + config_am_eliassenpalm_write_on_startup = .true. +/ +&am_mixedlayerdepths + config_am_mixedlayerdepths_compute_interval = '0000-00-00_01:00:00' + config_am_mixedlayerdepths_compute_on_startup = .true. + config_am_mixedlayerdepths_crit_dens_threshold = 0.03 + config_am_mixedlayerdepths_crit_temp_threshold = 0.2 + config_am_mixedlayerdepths_den_gradient_threshold = 5E-8 + config_am_mixedlayerdepths_dgradient = .true. + config_am_mixedlayerdepths_dthreshold = .true. + config_am_mixedlayerdepths_enable = .true. + config_am_mixedlayerdepths_interp_method = 1 + config_am_mixedlayerdepths_output_stream = 'mixedLayerDepthsOutput' + config_am_mixedlayerdepths_reference_pressure = 1.0E5 + config_am_mixedlayerdepths_temp_gradient_threshold = 5E-7 + config_am_mixedlayerdepths_tgradient = .true. + config_am_mixedlayerdepths_tthreshold = .true. + config_am_mixedlayerdepths_write_on_startup = .true. +/ +&am_regionalstatsdaily + config_am_regionalstatsdaily_1d_weighting_field = 'areaCell' + config_am_regionalstatsdaily_1d_weighting_function = 'mul' + config_am_regionalstatsdaily_2d_weighting_field = 'volumeCell' + config_am_regionalstatsdaily_2d_weighting_function = 'mul' + config_am_regionalstatsdaily_compute_interval = 'output_interval' + config_am_regionalstatsdaily_compute_on_startup = .false. + config_am_regionalstatsdaily_enable = .false. + config_am_regionalstatsdaily_input_stream = 'regionalMasksInput' + config_am_regionalstatsdaily_operation = 'avg' + config_am_regionalstatsdaily_output_stream = 'regionalStatsDailyOutput' + config_am_regionalstatsdaily_region_group = 'all' + config_am_regionalstatsdaily_region_type = 'cell' + config_am_regionalstatsdaily_restart_stream = 'regionalMasksInput' + config_am_regionalstatsdaily_vertical_dimension = 'nVertLevels' + config_am_regionalstatsdaily_vertical_mask = 'cellMask' + config_am_regionalstatsdaily_write_on_startup = .false. +/ +&am_regionalstatsweekly + config_am_regionalstatsweekly_1d_weighting_field = 'areaCell' + config_am_regionalstatsweekly_1d_weighting_function = 'mul' + config_am_regionalstatsweekly_2d_weighting_field = 'volumeCell' + config_am_regionalstatsweekly_2d_weighting_function = 'mul' + config_am_regionalstatsweekly_compute_interval = 'output_interval' + config_am_regionalstatsweekly_compute_on_startup = .false. + config_am_regionalstatsweekly_enable = .false. + config_am_regionalstatsweekly_input_stream = 'regionalMasksInput' + config_am_regionalstatsweekly_operation = 'avg' + config_am_regionalstatsweekly_output_stream = 'regionalStatsWeeklyOutput' + config_am_regionalstatsweekly_region_group = 'all' + config_am_regionalstatsweekly_region_type = 'cell' + config_am_regionalstatsweekly_restart_stream = 'regionalMasksInput' + config_am_regionalstatsweekly_vertical_dimension = 'nVertLevels' + config_am_regionalstatsweekly_vertical_mask = 'cellMask' + config_am_regionalstatsweekly_write_on_startup = .false. +/ +&am_regionalstatsmonthly + config_am_regionalstatsmonthly_1d_weighting_field = 'areaCell' + config_am_regionalstatsmonthly_1d_weighting_function = 'mul' + config_am_regionalstatsmonthly_2d_weighting_field = 'volumeCell' + config_am_regionalstatsmonthly_2d_weighting_function = 'mul' + config_am_regionalstatsmonthly_compute_interval = 'output_interval' + config_am_regionalstatsmonthly_compute_on_startup = .false. + config_am_regionalstatsmonthly_enable = .false. + config_am_regionalstatsmonthly_input_stream = 'regionalMasksInput' + config_am_regionalstatsmonthly_operation = 'avg' + config_am_regionalstatsmonthly_output_stream = 'regionalStatsMonthlyOutput' + config_am_regionalstatsmonthly_region_group = 'all' + config_am_regionalstatsmonthly_region_type = 'cell' + config_am_regionalstatsmonthly_restart_stream = 'regionalMasksInput' + config_am_regionalstatsmonthly_vertical_dimension = 'nVertLevels' + config_am_regionalstatsmonthly_vertical_mask = 'cellMask' + config_am_regionalstatsmonthly_write_on_startup = .false. +/ +&am_regionalstatscustom + config_am_regionalstatscustom_1d_weighting_field = 'areaCell' + config_am_regionalstatscustom_1d_weighting_function = 'mul' + config_am_regionalstatscustom_2d_weighting_field = 'volumeCell' + config_am_regionalstatscustom_2d_weighting_function = 'mul' + config_am_regionalstatscustom_compute_interval = 'output_interval' + config_am_regionalstatscustom_compute_on_startup = .false. + config_am_regionalstatscustom_enable = .false. + config_am_regionalstatscustom_input_stream = 'regionalMasksInput' + config_am_regionalstatscustom_operation = 'avg' + config_am_regionalstatscustom_output_stream = 'regionalStatsCustomOutput' + config_am_regionalstatscustom_region_group = 'all' + config_am_regionalstatscustom_region_type = 'cell' + config_am_regionalstatscustom_restart_stream = 'regionalMasksInput' + config_am_regionalstatscustom_vertical_dimension = 'nVertLevels' + config_am_regionalstatscustom_vertical_mask = 'cellMask' + config_am_regionalstatscustom_write_on_startup = .false. +/ +&am_timeseriesstatsdaily + config_am_timeseriesstatsdaily_backward_output_offset = '00-00-01_00:00:00' + config_am_timeseriesstatsdaily_compute_interval = '00-00-00_01:00:00' + config_am_timeseriesstatsdaily_compute_on_startup = .false. + config_am_timeseriesstatsdaily_duration_intervals = 'repeat_interval' + config_am_timeseriesstatsdaily_enable = .false. + config_am_timeseriesstatsdaily_operation = 'avg' + config_am_timeseriesstatsdaily_output_stream = 'timeSeriesStatsDailyOutput' + config_am_timeseriesstatsdaily_reference_times = 'initial_time' + config_am_timeseriesstatsdaily_repeat_intervals = 'reset_interval' + config_am_timeseriesstatsdaily_reset_intervals = '00-00-01_00:00:00' + config_am_timeseriesstatsdaily_restart_stream = 'timeSeriesStatsDailyRestart' + config_am_timeseriesstatsdaily_write_on_startup = .false. +/ +&am_timeseriesstatsmonthly + config_am_timeseriesstatsmonthly_backward_output_offset = '00-01-00_00:00:00' + config_am_timeseriesstatsmonthly_compute_interval = '00-00-00_01:00:00' + config_am_timeseriesstatsmonthly_compute_on_startup = .false. + config_am_timeseriesstatsmonthly_duration_intervals = 'repeat_interval' + config_am_timeseriesstatsmonthly_enable = .true. + config_am_timeseriesstatsmonthly_operation = 'avg' + config_am_timeseriesstatsmonthly_output_stream = 'timeSeriesStatsMonthlyOutput' + config_am_timeseriesstatsmonthly_reference_times = 'initial_time' + config_am_timeseriesstatsmonthly_repeat_intervals = 'reset_interval' + config_am_timeseriesstatsmonthly_reset_intervals = '00-01-00_00:00:00' + config_am_timeseriesstatsmonthly_restart_stream = 'timeSeriesStatsMonthlyRestart' + config_am_timeseriesstatsmonthly_write_on_startup = .false. +/ +&am_timeseriesstatsclimatology + config_am_timeseriesstatsclimatology_backward_output_offset = '00-03-00_00:00:00' + config_am_timeseriesstatsclimatology_compute_interval = '00-00-00_01:00:00' + config_am_timeseriesstatsclimatology_compute_on_startup = .false. + config_am_timeseriesstatsclimatology_duration_intervals = '00-03-00_00:00:00;00-03-00_00:00:00;00-03-00_00:00:00;00-03-00_00:00:00' + config_am_timeseriesstatsclimatology_enable = .false. + config_am_timeseriesstatsclimatology_operation = 'avg' + config_am_timeseriesstatsclimatology_output_stream = 'timeSeriesStatsClimatologyOutput' + config_am_timeseriesstatsclimatology_reference_times = '00-03-01_00:00:00;00-06-01_00:00:00;00-09-01_00:00:00;00-12-01_00:00:00' + config_am_timeseriesstatsclimatology_repeat_intervals = '01-00-00_00:00:00;01-00-00_00:00:00;01-00-00_00:00:00;01-00-00_00:00:00' + config_am_timeseriesstatsclimatology_reset_intervals = '1000-00-00_00:00:00;1000-00-00_00:00:00;1000-00-00_00:00:00;1000-00-00_00:00:00' + config_am_timeseriesstatsclimatology_restart_stream = 'timeSeriesStatsClimatologyRestart' + config_am_timeseriesstatsclimatology_write_on_startup = .false. +/ +&am_timeseriesstatscustom + config_am_timeseriesstatscustom_backward_output_offset = '00-00-01_00:00:00' + config_am_timeseriesstatscustom_compute_interval = '00-00-00_01:00:00' + config_am_timeseriesstatscustom_compute_on_startup = .false. + config_am_timeseriesstatscustom_duration_intervals = 'repeat_interval' + config_am_timeseriesstatscustom_enable = .false. + config_am_timeseriesstatscustom_operation = 'avg' + config_am_timeseriesstatscustom_output_stream = 'timeSeriesStatsCustomOutput' + config_am_timeseriesstatscustom_reference_times = 'initial_time' + config_am_timeseriesstatscustom_repeat_intervals = 'reset_interval' + config_am_timeseriesstatscustom_reset_intervals = '00-00-07_00:00:00' + config_am_timeseriesstatscustom_restart_stream = 'timeSeriesStatsCustomRestart' + config_am_timeseriesstatscustom_write_on_startup = .false. +/ +&am_pointwisestats + config_am_pointwisestats_compute_interval = 'output_interval' + config_am_pointwisestats_compute_on_startup = .true. + config_am_pointwisestats_enable = .false. + config_am_pointwisestats_output_stream = 'pointwiseStatsOutput' + config_am_pointwisestats_write_on_startup = .true. +/ +&am_debugdiagnostics + config_am_debugdiagnostics_check_state = .true. + config_am_debugdiagnostics_compute_interval = 'dt' + config_am_debugdiagnostics_compute_on_startup = .true. + config_am_debugdiagnostics_enable = .false. + config_am_debugdiagnostics_output_stream = 'debugDiagnosticsOutput' + config_am_debugdiagnostics_write_on_startup = .false. +/ +&am_rpncalculator + config_am_rpncalculator_compute_interval = '0010-00-00_00:00:00' + config_am_rpncalculator_compute_on_startup = .true. + config_am_rpncalculator_enable = .false. + config_am_rpncalculator_expression_1 = 'a b *' + config_am_rpncalculator_expression_2 = 'none' + config_am_rpncalculator_expression_3 = 'none' + config_am_rpncalculator_expression_4 = 'none' + config_am_rpncalculator_output_name_1 = 'volumeCell' + config_am_rpncalculator_output_name_2 = 'none' + config_am_rpncalculator_output_name_3 = 'none' + config_am_rpncalculator_output_name_4 = 'none' + config_am_rpncalculator_output_stream = 'none' + config_am_rpncalculator_variable_a = 'layerThickness' + config_am_rpncalculator_variable_b = 'areaCell' + config_am_rpncalculator_variable_c = 'none' + config_am_rpncalculator_variable_d = 'none' + config_am_rpncalculator_variable_e = 'none' + config_am_rpncalculator_variable_f = 'none' + config_am_rpncalculator_variable_g = 'none' + config_am_rpncalculator_variable_h = 'none' + config_am_rpncalculator_write_on_startup = .false. +/ +&am_transecttransport + config_am_transecttransport_compute_interval = 'output_interval' + config_am_transecttransport_compute_on_startup = .true. + config_am_transecttransport_enable = .false. + config_am_transecttransport_output_stream = 'transectTransportOutput' + config_am_transecttransport_transect_group = 'all' + config_am_transecttransport_write_on_startup = .true. +/ +&am_eddyproductvariables + config_am_eddyproductvariables_compute_interval = 'dt' + config_am_eddyproductvariables_compute_on_startup = .true. + config_am_eddyproductvariables_enable = .false. + config_am_eddyproductvariables_output_stream = 'eddyProductVariablesOutput' + config_am_eddyproductvariables_write_on_startup = .false. +/ +&am_mocstreamfunction + config_am_mocstreamfunction_compute_interval = 'output_interval' + config_am_mocstreamfunction_compute_on_startup = .true. + config_am_mocstreamfunction_enable = .false. + config_am_mocstreamfunction_max_bin = -1.0e34 + config_am_mocstreamfunction_min_bin = -1.0e34 + config_am_mocstreamfunction_normal_velocity_value = 'normalVelocity' + config_am_mocstreamfunction_num_bins = 180 + config_am_mocstreamfunction_output_stream = 'mocStreamfunctionOutput' + config_am_mocstreamfunction_region_group = 'all' + config_am_mocstreamfunction_transect_group = 'all' + config_am_mocstreamfunction_vertical_velocity_value = 'vertVelocityTop' + config_am_mocstreamfunction_write_on_startup = .true. +/ diff --git a/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-01-01.nc b/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-01-01.nc new file mode 100644 index 000000000..37a050ac3 Binary files /dev/null and b/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-01-01.nc differ diff --git a/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-02-01.nc b/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-02-01.nc new file mode 100644 index 000000000..6622c37b1 Binary files /dev/null and b/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-02-01.nc differ diff --git a/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-03-01.nc b/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-03-01.nc new file mode 100644 index 000000000..2574eba76 Binary files /dev/null and b/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-03-01.nc differ diff --git a/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-04-01.nc b/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-04-01.nc new file mode 100644 index 000000000..b5f51d2c4 Binary files /dev/null and b/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-04-01.nc differ diff --git a/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-05-01.nc b/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-05-01.nc new file mode 100644 index 000000000..956d3b4c5 Binary files /dev/null and b/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-05-01.nc differ diff --git a/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-06-01.nc b/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-06-01.nc new file mode 100644 index 000000000..9ffde565b Binary files /dev/null and b/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-06-01.nc differ diff --git a/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-07-01.nc b/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-07-01.nc new file mode 100644 index 000000000..2fcc8a830 Binary files /dev/null and b/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-07-01.nc differ diff --git a/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-08-01.nc b/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-08-01.nc new file mode 100644 index 000000000..2e6b8613a Binary files /dev/null and b/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-08-01.nc differ diff --git a/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-09-01.nc b/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-09-01.nc new file mode 100644 index 000000000..643eff4e0 Binary files /dev/null and b/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-09-01.nc differ diff --git a/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-10-01.nc b/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-10-01.nc new file mode 100644 index 000000000..1bc2a8d50 Binary files /dev/null and b/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-10-01.nc differ diff --git a/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-11-01.nc b/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-11-01.nc new file mode 100644 index 000000000..edf838ce9 Binary files /dev/null and b/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-11-01.nc differ diff --git a/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-12-01.nc b/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-12-01.nc new file mode 100644 index 000000000..cbb61d268 Binary files /dev/null and b/mpas_analysis/test/test_mpas_climatology/mpaso.hist.am.timeSeriesStatsMonthly.0001-12-01.nc differ diff --git a/mpas_analysis/test/test_mpas_climatology/mpaso.rst.0001-01-01_00000.nc b/mpas_analysis/test/test_mpas_climatology/mpaso.rst.0001-01-01_00000.nc new file mode 120000 index 000000000..880a52c2e --- /dev/null +++ b/mpas_analysis/test/test_mpas_climatology/mpaso.rst.0001-01-01_00000.nc @@ -0,0 +1 @@ +../test_interpolate/mpasMesh.nc \ No newline at end of file diff --git a/mpas_analysis/test/test_mpas_climatology/streams.ocean b/mpas_analysis/test/test_mpas_climatology/streams.ocean new file mode 100644 index 000000000..1a025681b --- /dev/null +++ b/mpas_analysis/test/test_mpas_climatology/streams.ocean @@ -0,0 +1,604 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/mpas_analysis/test/test_timekeeping.py b/mpas_analysis/test/test_timekeeping.py index 27161ffe8..f26738f6e 100644 --- a/mpas_analysis/test/test_timekeeping.py +++ b/mpas_analysis/test/test_timekeeping.py @@ -4,10 +4,6 @@ Author ------ Xylar Asay-Davis - -Last Modified -------------- -02/17/2017 """ import pytest diff --git a/preprocess_masks/make_ice_shelf_masks.py b/preprocess_masks/make_ice_shelf_masks.py new file mode 100755 index 000000000..4e4136506 --- /dev/null +++ b/preprocess_masks/make_ice_shelf_masks.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python + +''' +Make a mask file for a given mesh from ice-shelf geometric features. + +The -m flag is used to specify the name of the ACME mesh to which the +masks should be applied. + +Requires: + * a local link to the MPAS mask creator MpasMaskCreator.x + * a local link to a mesh file named _mesh.nc describing the + desired mesh + * the region file iceShelves.geojson produced by running + ./driver_scripts/setup_ice_shelves.py in the geometric_features repo + +Produces: + * _iceShelfMasks.nc, the mask file + +Author: Xylar Asay-Davis +''' + +import subprocess +import argparse + + +parser = \ + argparse.ArgumentParser(description=__doc__, + formatter_class=argparse.RawTextHelpFormatter) +parser.add_argument('-m', '--mesh_name', dest='mesh_name', + help='The ACME name of the mesh', metavar='MESH_NAME', + required=True) +args = parser.parse_args() + +meshFileName = '{}_mesh.nc'.format(args.mesh_name) +maskFileName = '{}_iceShelfMasks.nc'.format(args.mesh_name) +regionFileName = 'iceShelves.geojson' + +tempRegionMaskFile = 'tempRegionMasks.nc' +subprocess.check_call(['./MpasMaskCreator.x', meshFileName, maskFileName, + '-f', regionFileName]) diff --git a/preprocess_observations/README.md b/preprocess_observations/README.md new file mode 100644 index 000000000..ba0dc5c90 --- /dev/null +++ b/preprocess_observations/README.md @@ -0,0 +1,8 @@ +# Observations Preprocessing Scripts + +This directory contains scripts for preprocessing observations. To run the +scripts, copy them to the base directory (`..`), change the paths to the +original obsrevational data sets (see comments in each script for sources +of the relevant data) and run the script to perform preprocessing. The +scripts typically compute climatologies and/or perform remapping to a +comparison grid. \ No newline at end of file diff --git a/preprocess_observations/mds.py b/preprocess_observations/mds.py new file mode 100644 index 000000000..7e81bfa13 --- /dev/null +++ b/preprocess_observations/mds.py @@ -0,0 +1,605 @@ +import sys +import re +import glob +import numpy as np +from operator import mul + +debug = False + +################################################################################ +# metafile parsing + +# for python2.5 +try: next +except NameError: + def next ( obj ): return obj.next() + +_currentline = '' + +class ParseError(ValueError): + def __str__(self): + metafile = self.args[0] + lines = self.args[1:] + try: + name = metafile.name + except AttributeError: + name = metafile + + return '\n'.join(('in metafile: '+name,) + + lines + + ('in: ' + _currentline,)) + + +# these deal with comments in the metafile + +_comment_pattern = re.compile( + r'//.*?$|/\*.*?\*/|\'(?:\\.|[^\\\'])*\'|"(?:\\.|[^\\"])*"', + re.DOTALL | re.MULTILINE + ) + +def _comment_replacer(match): + s = match.group(0) + if s.startswith('/'): + return "" + else: + return s + +def strip_comments(text): + """ strips C and C++ style comments from text """ + return re.sub(_comment_pattern, _comment_replacer, text) + + +_string_pattern = re.compile(r"'(.*)'$") + +def parse1(s): + """ convert one item to appropriate type """ + m = _string_pattern.match(s) + if m: + s = m.group(1) + # unquote quotes + s = re.sub(r"''","'",s) + return s + + if '.' in s or 'e' in s.lower(): + return float(s) + else: + try: + return int(s) + except ValueError: + raise ParseError("Cannot parse value: " + s) + + +_closing = {'[':']', + '{':'}', + } + +def parsemeta(metafile): + """ parses metafile (file object or filename) into a dictionary of lists + of floats, ints or strings + """ + global _currentline + + try: + lines = open(metafile) + except TypeError: + lines = iter(metafile) + + d = {} + for line in lines: + line = strip_comments(line) + # skip empty lines + if re.match(r'\s*$', line): + continue + + m = re.match(r' *(\w*) *= *(.*?) *$', line) + if m: + key,line = m.groups() + else: + raise ParseError(metafile,line) + + # look for the opening delimiter ('[' or '{') + opening = line[0] + try: + closing = _closing[opening] + except KeyError: + raise ParseError(metafile,line,'Values must be enclosed in [] or {}.') + + # read more lines until a matching closing delimiter is found + while closing not in line: + try: + nextline = next(lines) + except StopIteration: + raise ParseError(metafile,line,'No closing ' + closing + ' found.') + + line += ' ' + strip_comments(nextline).rstrip() + + if line[-2:] != closing + ';': + raise ParseError(metafile,line, + 'Values must be enclosed in "[ ];" or "{ };".') + + # remove delimiters + line = line[1:-2].strip(" ,") + _currentline = line + + if opening == '[': + # [] can contain any type of values, separated by commas + val = [ parse1(s) for s in re.split(r',? *',line) ] + else: + # {} can only contain single quote-delimited strings separated by space + val = [ s.rstrip() for s in re.split(r"' *'", line.strip("'")) ] + + d[key] = val + + return d + +################################################################################ + +def message(*args): + sys.stdout.write(' '.join([str(s) for s in args]) + '\n') + + +def warning(*args): + sys.stderr.write(' '.join([str(s) for s in args]) + '\n') + + +def aslist(i): + """ if iterable, turn into list, otherwise put into list """ + try: + res = list(i) + except TypeError: + res = [i] + return res + + +def fromfileshape(filename,dtype,shape=None,**kwargs): + return np.fromfile(filename, dtype, **kwargs).reshape(shape) + + +def scanforfiles(fname): + """ return list of iteration numbers for which metafiles with base fname exist """ + import glob + allfiles = glob.glob(fname + '.' + 10*'[0-9]' + '.001.001.meta') + if len(allfiles) == 0: + allfiles = glob.glob(fname + '.' + 10*'[0-9]' + '.meta') + off = -5 + else: + off = -13 + + itrs = [ int(s[off-10:off]) for s in allfiles ] + itrs.sort() + return itrs + + +def readmeta(f): + """ read meta file and extract tile/timestep-specific parameters """ + meta = parsemeta(f) + dimList = meta.pop('dimList') + # pythonize + gdims = tuple(dimList[-3::-3]) + i0s = [ i-1 for i in dimList[-2::-3] ] + ies = dimList[-1::-3] + # remove file-specific parameters + timeInterval = meta.pop('timeInterval', None) + timeStepNumber = meta.pop('timeStepNumber', None) + map2gl = meta.pop('map2glob', None) + # put back only global dimensions + meta['dimList'] = list(gdims[::-1]) + return gdims,i0s,ies,timeStepNumber,timeInterval,map2gl,meta + + +_typeprefixes = {'ieee-be':'>', + 'b' :'>', + '>' :'>', + 'ieee-le':'<', + 'l' :'<', + '<' :'<', + } +_typesuffixes = {'float32':'f4', + 'float64':'f8', + } + +def rdmds(fnamearg,itrs=-1,machineformat='b',rec=None,fill_value=0, + returnmeta=False,astype=float,region=None,lev=(), + usememmap=False,mm=False,squeeze=True,verbose=False): + """ a = rdmds(fname,...) + a = rdmds(fname,itrs,...) + a,its,meta = rdmds(fname,...,returnmeta=True) + + Read meta-data files as written by MITgcm. + + Without itrs, will try to read + + fname.meta or fname.001.001.meta, ... + + If itrs is a list of integers of an integer, it will read the corresponding + + fname.000000iter.meta, ... + + If itrs is NaN, it will read all iterations for which files are found. + If itrs is Inf, it will read the highest iteration found. + + fname may contain shell wildcards, which is useful for tile files organized + into directories, e.g., + + T = rdmds('prefix*/T', 2880) + + will read prefix0000/T.0000002880.*, prefix0001/T.0000002880.*, ... + (and any others that match the wildcard, so be careful how you name things!) + + Returns: + + a :: numpy array of the data read + its :: list of iteration numbers read (only if itrs=NaN or Inf) + meta :: dictionary of metadata (only if returnmeta=True) + + Keyword arguments: + + machineformat :: endianness ('b' or 'l', default 'b') + rec :: list of records to read (default all) + useful for pickups and multi-field diagnostics files + fill_value :: fill value for missing (blank) tiles (default 0) + astype :: data type to return (default: double precision) + None: keep data type/precision of file + region :: (x0,x1,y0,y1) read only this region (default (0,nx,0,ny)) + lev :: list of levels to read, or, for multiple dimensions + (excluding x,y), tuple(!) of lists (see examples below) + usememmap :: if True, use a memory map for reading data (default False) + recommended when using lev, or region with global files + to save memory and, possibly, time + + Examples: + + XC = rdmds('XC') + XC = rdmds('res_*/XC') + T = rdmds('T.0000002880') + T = rdmds('T',2880) + T2 = rdmds('T',[2880,5760]) + T,its = rdmds('T',numpy.Inf) + VVEL = rdmds('pickup',2880,rec=range(50,100)) + a5 = rdmds('diags',2880,rec=0,lev=[5]) + a = rdmds('diags',2880,rec=0,lev=([0],[0,1,5,6,7])) + from numpy import r_ + a = rdmds('diags',2880,rec=0,lev=([0],r_[:2,5:8])) # same as previous + a = rdmds('diags',2880,rec=0)[0, [0,1,5,6,7], ...] # same, but less efficient + a = rdmds('diags',2880)[0, 0, [0,1,5,6,7], ...] # even less efficient + """ + import functools + usememmap = usememmap or mm + if usememmap: + readdata = np.memmap + else: + readdata = fromfileshape + + # add iteration number to file name unless itrs is -1 + additrs = itrs != -1 + if itrs is np.nan: + # all iterations + itrs = scanforfiles(fnamearg) + if verbose: warning('Reading {0} time levels: '.format(len(itrs)), *itrs) + returnits = True + itrsislist = True + elif itrs is np.inf: + # last iteration + itrs = scanforfiles(fnamearg) + if len(itrs): + if verbose: warning('Found {0} time levels, reading'.format(len(itrs)), itrs[-1]) + else: + if verbose: warning('Found 0 time levels for {}'.format(fnamearg)) + itrs = itrs[-1:] + returnits = True + itrsislist = False + else: + returnits = False + itrsislist = np.iterable(itrs) + + # always make itrs a list + itrs = aslist(itrs) + + allrec = rec is None + reclist = aslist(rec) + if not isinstance(lev,tuple): + lev = (lev,) + levs = tuple( aslist(l) for l in lev ) + levdims = tuple(len(l) for l in levs) + levinds = np.ix_(*levs) + nlev = len(levdims) + + if usememmap: + recsatonce = True + readdata = np.memmap + else: + recsatonce = allrec + readdata = fromfileshape + + try: + typepre = _typeprefixes[machineformat] + except KeyError: + raise ValueError('Allowed machineformats: ' + ' '.join(_typeprefixes)) + + arr = None + metaref = {} + timeStepNumbers = [] + timeIntervals = [] + for iit,it in enumerate(itrs): + if additrs: + fname = fnamearg + '.{0:010d}'.format(int(it)) + else: + fname = fnamearg + + metafiles = glob.glob(fname + 2*('.'+3*'[0-9]') + '.meta') or glob.glob(fname+'.meta') + if len(metafiles) == 0: + raise IOError('No files found for ' + fname + '.meta') + + if verbose: warning(metafiles[0]) + + if debug: warning('Found',len(metafiles),'metafiles for iteration',it) + + for metafile in metafiles: + gdims,i0s,ies,timestep,timeinterval,map2gl,meta = readmeta(metafile) + if arr is None: + # initialize, allocate + try: + dataprec, = meta['dataprec'] + except KeyError: + dataprec, = meta['format'] + tp = typepre + _typesuffixes[dataprec] + size = np.dtype(tp).itemsize + if astype is None: astype = tp + recshape = tuple( ie-i0 for i0,ie in zip(i0s,ies) ) + count = functools.reduce(mul, recshape) + nrecords, = meta['nrecords'] + tileshape = (nrecords,) + recshape + if allrec: + reclist = range(nrecords) + recinds = np.s_[:,] + levinds + else: + recinds = np.ix_(reclist, *levs) + + if region is None: + ri0,rie,rj0,rje = 0,gdims[-1],0,gdims[-2] + else: + ri0,rie,rj0,rje = region + if ri0 < 0: ri0 += gdims[-1] + if rie < 0: rie += gdims[-1] + if rj0 < 0: rj0 += gdims[-2] + if rje < 0: rje += gdims[-2] + + assert nlev+2 <= len(gdims) + rdims = levdims + gdims[len(levdims):-2] + (rje-rj0,rie-ri0) + # always include itrs and rec dimensions and squeeze later + arr = np.empty((len(itrs),len(reclist))+rdims, astype) + arr[...] = fill_value + metaref = meta + else: + if meta != metaref: + raise ValueError('Meta files not compatible') + + datafile = metafile[:-4] + 'data' + + if region is not None: + if map2gl is None: + # overlap of tile with region: + i0 = min(rie, max(ri0, i0s[-1])) + ie = min(rie, max(ri0, ies[-1])) + j0 = min(rje, max(rj0, i0s[-2])) + je = min(rje, max(rj0, ies[-2])) + # source indices + I0 = i0 - i0s[-1] + Ie = ie - i0s[-1] + J0 = j0 - i0s[-2] + Je = je - i0s[-2] + # target indices + i0s[-1] = i0 - ri0 + ies[-1] = ie - ri0 + i0s[-2] = j0 - rj0 + ies[-2] = je - rj0 + else: + raise NotImplementedError('Region selection is not implemented for map2glob != [0,1]') + + sl = tuple( slice(i0,ie) for i0,ie in zip(i0s,ies) ) + if map2gl is None: + # part of arr that will receive tile (all records) + arrtile = arr[(iit,slice(None))+sl] + else: + ny,nx = arr.shape[-2:] + i0 = i0s[-1] + j0 = i0s[-2] + ie = ies[-1] + je = ies[-2] + # "flat" stride for j + jstride = map2gl[1]*nx + map2gl[0] + n = (je-j0)*jstride + # start of a jstride by je-j0 block that contains this tile + ii0 = min(i0+nx*j0, nx*ny-n) + # tile starts at ioff+i0 + ioff = nx*j0 - ii0 + # flatten x,y dimensions + arrflat = arr.reshape(arr.shape[:-2]+(nx*ny,)) + # extract tile + arrmap = arrflat[...,ii0:ii0+n].reshape(arr.shape[:-2]+(je-j0,jstride))[...,:,ioff+i0:ioff+ie] + # slice non-x,y dimensions (except records) + arrtile = arrmap[(iit,slice(None))+sl[:-2]] + del arrflat,arrmap + + if recsatonce: + if region is None: + arrtile[...] = readdata(datafile, tp, shape=tileshape)[recinds] + else: + if Ie > I0 and Je > J0: + if debug: message(datafile, I0,Ie,J0,Je) + arrtile[...] = readdata(datafile, tp, shape=tileshape)[recinds + np.s_[...,J0:Je,I0:Ie]] + else: + f = open(datafile) + for irec,recnum in enumerate(reclist): + if recnum < 0: recnum += nrecords + f.seek(recnum*count*size) + if region is None: + arrtile[irec] = np.fromfile(f, tp, count=count).reshape(recshape)[levinds] + else: + if Ie > I0 and Je > J0: + if debug: message(datafile, I0,Ie,J0,Je) + tilerec = np.fromfile(f, tp, count=count).reshape(recshape) + arrtile[irec] = tilerec[levinds + np.s_[...,J0:Je,I0:Ie]] + f.close() + + if timestep is not None: + timeStepNumbers.extend(timestep) + + if timeinterval is not None: + timeIntervals.append(timeinterval) + + # put list of iteration numbers back into metadata dictionary + if len(timeStepNumbers): + metaref['timeStepNumber'] = timeStepNumbers + + if len(timeIntervals): + metaref['timeInterval'] = timeIntervals + + if arr is None: + arr = np.array([]) + else: + # squeeze singleton iteration, record and level dimensions like matlab version + dims = (len(itrs),len(reclist)) + levdims + if squeeze: + # squeeze all singleton dimensions + squeezed = tuple( d for d in dims if d > 1 ) + else: + # squeeze all that came from scalar arguments + keepers = [itrsislist, np.iterable(rec)] + [np.iterable(l) for l in lev] + squeezed = tuple( d for d,keep in zip(dims, keepers) if keep ) + + arr = arr.reshape(squeezed+arr.shape[2+nlev:]) + + if returnmeta: + meta = dict((k.lower(),v) for k,v in metaref.items()) + return arr,itrs,meta +# elif returnits: +# return arr,itrs + else: + return arr + + +def wrmds(fbase, arr, itr=None, dataprec='float32', ndims=None, nrecords=None, + times=None, fields=None, simulation=None, machineformat='b', + deltat=None, dimlist=None): + ''' wrmds(fbase, arr, itr=None, ...) + + Write array arr to an mds meta/data file set. If itr is given, + the files will be named fbase.0000000itr.data and fbase.0000000itr.meta, + otherwise just fbase.data and fbase.meta. + + Parameters + ---------- + dataprec :: precision of resulting file ('float32' or 'float64') + ndims :: number of non-record dimensions; extra (leading) dimensions + will be folded into 1 record dimension + nrecords :: number of records; will fold as many leading dimensions as + necessary (has to match shape!) + times :: times to write into meta file. Either a single float or a list + of two for a time interval + fields :: list of fields + simulation :: string describing the simulation + machineformat :: 'b' or 'l' for big or little endian + deltat :: time step; provide in place of either times or itr to have one + computed from the other + dimlist :: dimensions as will be stored in file (only useful when passing + meta data from an existing file to wrmds as **kwargs) + ''' + if type(dataprec) == type([]): dataprec, = dataprec + if type(ndims) == type([]): ndims, = ndims + if type(nrecords) == type([]): nrecords, = nrecords + if type(simulation) == type([]): simulation, = simulation + if type(machineformat) == type([]): machineformat, = machineformat + if type(deltat) == type([]): deltat, = deltat + + tp = _typeprefixes[machineformat] + try: + tp = tp + _typesuffixes[dataprec] + except KeyError: + raise ValueError("dataprec must be 'float32' or 'float64'.") + + if ndims is None: + if nrecords is None: + ndims = min(3,len(arr.shape)) + else: + # see how many leading dims we need to make up nrecords + dims = list(arr.shape[::-1]) + n = 1 + while n < nrecords: + n *= dims.pop() + + assert n == nrecords + ndims = len(dims) + + dims = arr.shape[-1:-ndims-1:-1] + nrec = np.prod(arr.shape[:-ndims], dtype=int) + if nrecords is not None and nrecords != nrec: + raise ValueError('Shape/nrecords mismatch') + if dimlist is not None and tuple(dimlist) != dims: + raise ValueError('Shape/dimlist mismatch: {} vs {}'.format(dims, dimlist)) + + if arr.ndim > ndims + 1: + sys.stderr.write("Warning: folding several dimensions into record dimension.\n") + +# arr = arr.reshape((-1,)+arr.shape[-ndims:]) + + if times is not None: + try: + iter(times) + except TypeError: + times = [ times ] + + if deltat is not None: + if itr is None: + itr = int(times[-1]//deltat) + elif times is None: + times = [ deltat*itr ] + else: + sys.stderr.write('Warning: discarding deltat.\n') + + if itr is not None: + fbase = fbase + '.{:010d}'.format(itr) + + with open(fbase + '.meta', 'w') as f: + if simulation is not None: + f.write(" simulation = { '" + simulation + "' };\n") + + f.write(" nDims = [ {:3d} ];\n".format(ndims)) + + if max(dims) < 10000: + fmt = '{:5d}' + else: + fmt = '{:10d}' + + fmt = fmt + ',' + fmt + ',' + fmt + + f.write(" dimList = [\n " + + ",\n ".join(fmt.format(d,1,d) for d in dims) + + "\n ];\n") + + # skipping m2gl + + f.write(" dataprec = [ '" + dataprec + "' ];\n") + + f.write(" nrecords = [ {:5d} ];\n".format(nrec)) + + if itr is not None: + f.write(" timeStepNumber = [ {:10d} ];\n".format(itr)) + + if times is not None: + f.write(" timeInterval = [" + + "".join("{:20.12E}".format(t) for t in times) + + " ];\n") + + if fields is not None: + nflds = len(fields) + f.write(" nFlds = [ {:4d} ];\n".format(nflds)) + f.write(" fldList = {\n") + for row in range((nflds+19)//20): + for field in fields[20*row:20*(row+1)]: + f.write(" '{:<8s}'".format(field)) + f.write("\n") + f.write(" };\n") + + arr.astype(tp).tofile(fbase + '.data') + diff --git a/preprocess_observations/remap_SOSE_T_S.py b/preprocess_observations/remap_SOSE_T_S.py new file mode 100644 index 000000000..17587af55 --- /dev/null +++ b/preprocess_observations/remap_SOSE_T_S.py @@ -0,0 +1,211 @@ +import numpy +import xarray +import matplotlib.pyplot as plt +import matplotlib.colors as colors +import sys +from scipy.io import loadmat +import os + +from mpas_analysis.shared.interpolation import Remapper +from mpas_analysis.shared.grid import LatLonGridDescriptor +from mpas_analysis.shared.climatology.climatology \ + import get_antarctic_stereographic_comparison_descriptor +from mpas_analysis.configuration.MpasAnalysisConfigParser \ + import MpasAnalysisConfigParser + +from mds import rdmds + + +def get_bottom_indices(cellFraction): + nx, ny, nz = cellFraction.shape + botIndices = -1*numpy.ones((nx, ny), int) + for zIndex in range(nz): + mask = cellFraction[:, :, zIndex] > 0. + botIndices[mask] = zIndex + return botIndices + + +def get_monthly_average(filePrefix): + field, itrs, metadata = rdmds(filePrefix, rec=[0], returnmeta=True) + nz, ny, nx = field.shape + # print nx, ny, nz + yearCount = metadata['nrecords'][0]/12 + dims = [12, nx, ny, nz] + + mask3D = cellFraction <= 0. + mask2D = botIndices == -1 + xIndices, yIndices = numpy.meshgrid(numpy.arange(nx), numpy.arange(ny), + indexing='ij') + monthlyClimatologies = numpy.ma.masked_all(dims) + botMonthlyClimatologies = numpy.ma.masked_all((12, nx, ny)) + for month in range(12): + first = True + for year in range(yearCount): + print '{:04d}-{:02d}'.format(year+2005, month+1) + recordIndex = year*12 + month + field = rdmds(filePrefix, rec=[recordIndex]) + field = field.transpose(2, 1, 0) + + field = numpy.ma.masked_array(field, mask=mask3D) + if first: + monthlyClimatologies[month, :, :, :] = field/float(yearCount) + first = False + else: + monthlyClimatologies[month, :, :, :] = \ + monthlyClimatologies[month, :, :, :] + \ + field/float(yearCount) + botMonthlyClimatologies[month, :, :] = \ + numpy.ma.masked_array(field[xIndices, yIndices, botIndices], + mask=mask2D) + + monthlyClimatologies = monthlyClimatologies.transpose(0, 2, 1, 3) + botMonthlyClimatologies = botMonthlyClimatologies.transpose(0, 2, 1) + return monthlyClimatologies, botMonthlyClimatologies + + +inGridName = 'SouthernOcean_0.167x0.167degree' + +inTFileName = '/media/xylar/extra_data/data_overflow/observations/' \ + 'SouthernOcean/SOSE/monthly/THETA_mnthlyBar.0000000100' +inSFileName = '/media/xylar/extra_data/data_overflow/observations/' \ + 'SouthernOcean/SOSE/monthly/SALT_mnthlyBar.0000000100' +inGridFileName = '/media/xylar/extra_data/data_overflow/observations/' \ + 'SouthernOcean/SOSE/grid.mat' + +prefix = 'SOSE_2005-2010_monthly_' + +cacheTFileName = '{}_pot_temp_{}.nc'.format(prefix, inGridName) +cacheSFileName = '{}_salinity_{}.nc'.format(prefix, inGridName) +outTFileName = '{}_pot_temp_{}.nc'.format(prefix, outGridName) +outSFileName = '{}_salinity_{}.nc'.format(prefix, outGridName) + +config = MpasAnalysisConfigParser() +config.read('config.default') + + +inDescriptor = LatLonGridDescriptor() + +if not os.path.exists(cacheTFileName) or not os.path.exists(cacheSFileName): + matGrid = loadmat(inGridFileName) + # lat/lon is a tensor grid so we can use 1-D arrays + lon = matGrid['XC'][:, 0] + lat = matGrid['YC'][0, :] + z = matGrid['RC'][:, 0] + cellFraction = matGrid['hFacC'] + + botIndices = get_bottom_indices(cellFraction) + +if os.path.exists(cacheTFileName): + dsT = xarray.open_dataset(cacheTFileName) +else: + field, botField = get_monthly_average(inTFileName) + + description = 'Monthly potential temperature climatologies from ' \ + '2005-2010 average of the Southern Ocean State Estimate ' \ + '(SOSE)' + botDescription = 'Monthly potential temperature climatologies at sea ' \ + 'floor from 2005-2010 average from SOSE' + dictonary = {'dims': ['Time', 'lon', 'lat', 'depth'], + 'coords': {'month': {'dims': ('Time'), + 'data': range(1, 13), + 'attrs': {'units': 'months'}}, + 'year': {'dims': ('Time'), + 'data': numpy.ones(12), + 'attrs': {'units': 'years'}}, + 'lon': {'dims': ('lon'), + 'data': lon, + 'attrs': {'units': 'degrees'}}, + 'lat': {'dims': ('lat'), + 'data': lat, + 'attrs': {'units': 'degrees'}}, + 'depth': {'dims': ('depth'), + 'data': z, + 'attrs': {'units': 'm'}}}, + 'data_vars': {'theta': + {'dims': ('Time', 'lat', 'lon', 'depth'), + 'data': field, + 'attrs': {'units': '$^\circ$C', + 'description': description}}, + 'botTheta': + {'dims': ('Time', 'lat', 'lon'), + 'data': botField, + 'attrs': {'units': '$^\circ$C', + 'description': botDescription}}}} + + dsT = xarray.Dataset.from_dict(dictonary) + dsT.to_netcdf(cacheTFileName) + +if os.path.exists(cacheSFileName): + dsS = xarray.open_dataset(cacheSFileName) +else: + field, botField = get_monthly_average(inSFileName) + + description = 'Monthly salinity climatologies from 2005-2010 ' \ + 'average of the Southern Ocean State Estimate (SOSE)' + botDescription = 'Monthly salinity climatologies at sea floor ' \ + 'from 2005-2010 average from SOSE' + dictonary = {'dims': ['Time', 'lon', 'lat', 'depth'], + 'coords': {'month': {'dims': ('Time'), + 'data': range(1, 13), + 'attrs': {'units': 'months'}}, + 'year': {'dims': ('Time'), + 'data': numpy.ones(12), + 'attrs': {'units': 'years'}}, + 'lon': {'dims': ('lon'), + 'data': lon, + 'attrs': {'units': 'degrees'}}, + 'lat': {'dims': ('lat'), + 'data': lat, + 'attrs': {'units': 'degrees'}}, + 'depth': {'dims': ('depth'), + 'data': z, + 'attrs': {'units': 'm'}}}, + 'data_vars': {'salinity': + {'dims': ('Time', 'lat', 'lon', 'depth'), + 'data': field, + 'attrs': {'units': 'PSU', + 'description': description}}, + 'botSalinity': + {'dims': ('Time', 'lat', 'lon'), + 'data': botField, + 'attrs': {'units': 'PSU', + 'description': botDescription}}}} + + dsS = xarray.Dataset.from_dict(dictonary) + dsS.to_netcdf(cacheSFileName) + +inDescriptor = LatLonGridDescriptor.read(cacheTFileName, latVarName='lat', + lonVarName='lon') + +outDescriptor = get_antarctic_stereographic_comparison_descriptor(config) +outGridName = outDescriptor.meshName + +mappingFileName = 'map_{}_to_{}.nc'.format(inGridName, outGridName) + +remapper = Remapper(inDescriptor, outDescriptor, mappingFileName) + +remapper.build_mapping_file(method='bilinear') + +remappedT = remapper.remap(dsT, renormalizationThreshold=0.01) + +remappedT.attrs['history'] = ' '.join(sys.argv) +remappedT.to_netcdf(outTFileName) + +remappedS = remapper.remap(dsS, renormalizationThreshold=0.01) + +remappedS.attrs['history'] = ' '.join(sys.argv) +remappedS.to_netcdf(outSFileName) + +normT = colors.Normalize(vmin=-2.0, vmax=2.0) +normS = colors.Normalize(vmin=33.0, vmax=35.0) + +plt.figure() +plt.imshow(remappedT.botTheta.values[0, :, :], origin='lower', cmap='RdBu_r', + norm=normT) +plt.colorbar() +plt.figure() +plt.imshow(remappedS.botSalinity.values[0, :, :], origin='lower', + cmap='RdBu_r', norm=normS) +plt.colorbar() + +plt.show() diff --git a/preprocess_observations/remap_rignot.py b/preprocess_observations/remap_rignot.py new file mode 100644 index 000000000..3abaef9e0 --- /dev/null +++ b/preprocess_observations/remap_rignot.py @@ -0,0 +1,71 @@ +import numpy +import xarray +import matplotlib.pyplot as plt +import matplotlib.colors as colors +import pyproj +import sys + +from mpas_analysis.shared.interpolation import Remapper +from mpas_analysis.shared.grid import ProjectionGridDescriptor +from mpas_analysis.shared.mpas_xarray.mpas_xarray import subset_variables +from mpas_analysis.shared.climatology \ + import get_Antarctic_stereographic_comparison_descriptor +from mpas_analysis.configuration.MpasAnalysisConfigParser \ + import MpasAnalysisConfigParser + +inFileName = '/media/xylar/extra_data/data_overflow/observations/Antarctica/' \ + 'Rignot_et_al._2013/Ant_MeltingRate.nc' + +config = MpasAnalysisConfigParser() +config.read('config.default') + +ds = xarray.open_dataset(inFileName) +ds = subset_variables(ds, ['melt_actual', 'xaxis', 'yaxis']) +lx = numpy.abs(1e-3*(ds.xaxis.values[-1]-ds.xaxis.values[0])) +ly = numpy.abs(1e-3*(ds.yaxis.values[-1]-ds.yaxis.values[0])) + +maskedMeltRate = numpy.ma.masked_array(ds.melt_actual, + mask=(ds.melt_actual.values == 0.)) + +ds['meltRate'] = xarray.DataArray(maskedMeltRate, dims=ds.melt_actual.dims, + coords=ds.melt_actual.coords, + attrs=ds.melt_actual.attrs) + +ds = ds.drop('melt_actual') + +inGridName = '{}x{}km_1.0km_Antarctic_stereo'.format(lx, ly) + +projection = pyproj.Proj('+proj=stere +lat_ts=-71.0 +lat_0=-90 +lon_0=0.0 ' + '+k_0=1.0 +x_0=0.0 +y_0=0.0 +ellps=WGS84') + +inDescriptor = ProjectionGridDescriptor(projection) + +inDescriptor.read(inFileName, xVarName='xaxis', yVarName='yaxis', + meshName=inGridName) + +outDescriptor = get_Antarctic_stereographic_comparison_descriptor(config) +outGridName = outDescriptor.meshName + +outFileName = 'Rignot_2013_melt_rates_{}.nc'.format(outGridName) + +mappingFileName = 'map_{}_to_{}.nc'.format(inGridName, outGridName) + +remapper = Remapper(inDescriptor, outDescriptor, mappingFileName) + +remapper.build_mapping_file(method='bilinear') + +remappedDataset = remapper.remap(ds, renormalizationThreshold=0.01) + +remappedDataset.attrs['history'] = ' '.join(sys.argv) +remappedDataset.to_netcdf(outFileName) + +norm = colors.SymLogNorm(linthresh=1, linscale=1, vmin=-100.0, vmax=100.0) + +plt.figure() +plt.imshow(maskedMeltRate, origin='upper', norm=norm) +plt.colorbar() +plt.figure() +plt.imshow(remappedDataset.meltRate.values, origin='lower', norm=norm) +plt.colorbar() + +plt.show() diff --git a/run_analysis.py b/run_analysis.py index 2905c5b61..57d04ed09 100755 --- a/run_analysis.py +++ b/run_analysis.py @@ -17,6 +17,7 @@ import warnings import subprocess import time +from collections import OrderedDict from mpas_analysis.configuration.MpasAnalysisConfigParser \ import MpasAnalysisConfigParser @@ -25,12 +26,125 @@ make_directories +def build_analysis_list(config, isSubtask): # {{{ + """ + Build a list of analysis tasks based on the 'generate' config option. + + Authors + ------- + Xylar Asay-Davis + """ + + # choose the right rendering backend, depending on whether we're displaying + # to the screen + if not config.getboolean('plot', 'displayToScreen'): + mpl.use('Agg') + + # analysis can only be imported after the right MPL renderer is selected + from mpas_analysis import ocean + from mpas_analysis import sea_ice + + # analyses will be a list of analysis classes + analyses = [] + + # Ocean Analyses + analyses.extend(ocean.TimeSeriesOHC.create_tasks(config)) + analyses.extend(ocean.TimeSeriesSST.create_tasks(config)) + analyses.extend(ocean.IndexNino34.create_tasks(config)) + analyses.extend(ocean.MeridionalHeatTransport.create_tasks(config)) + analyses.extend(ocean.StreamfunctionMOC.create_tasks(config)) + + analyses.extend(ocean.ClimatologyMapSST.create_tasks(config)) + analyses.extend(ocean.ClimatologyMapMLD.create_tasks(config)) + analyses.extend(ocean.ClimatologyMapSSS.create_tasks(config)) + + analyses.extend(ocean.TimeSeriesAntarcticMelt.create_tasks(config)) + analyses.extend(ocean.ClimatologyMapMeltAntarctic.create_tasks(config)) + analyses.extend(ocean.ClimatologyMapSoseTemperature.create_tasks(config)) + analyses.extend(ocean.ClimatologyMapSoseSalinity.create_tasks(config)) + + # Sea Ice Analyses + analyses.extend(sea_ice.TimeSeriesSeaIce.create_tasks(config)) + + analyses.extend(sea_ice.ClimatologyMapSeaIceConc.create_tasks(config)) + analyses.extend(sea_ice.ClimatologyMapSeaIceThick.create_tasks(config)) + + possibleAnalyses = OrderedDict() + for analysisTask in analyses: + possibleAnalyses[analysisTask.taskName] = analysisTask + + # check which analysis we actually want to generate and only keep those + analysesToGenerate = OrderedDict() + for analysisTask in possibleAnalyses.itervalues(): + # update the dictionary with this task and perhaps its prerequisites + analysesToAdd = add_task_and_prereqisites(analysisTask, + possibleAnalyses, + analysesToGenerate, + isPrerequisite=False, + isSubtask=isSubtask) + analysesToGenerate.update(analysesToAdd) + + return analysesToGenerate # }}} + + +def add_task_and_prereqisites(analysisTask, possibleAnalyses, + analysesToGenerate, isPrerequisite, + isSubtask): # {{{ + """ + If a task has been requested through the generate config option or + if it is a prerequisite of a requested task, add it to the dictionary of + tasks to generate. + + Authors + ------- + Xylar Asay-Davis + """ + + analysesToAdd = OrderedDict() + # for each anlaysis task, check if we want to generate this task + # and if the analysis task has a valid configuration + if isPrerequisite or analysisTask.check_generate(): + add = False + try: + analysisTask.setup_and_check() + add = True + except (Exception, BaseException): + traceback.print_exc(file=sys.stdout) + print "ERROR: analysis task {} failed during check and " \ + "will not be run".format(analysisTask.taskName) + if add and not isSubtask: + # first, we should try to add the prerequisites + prereqs = analysisTask.prerequisiteTasks + if prereqs is not None: + for prereq in prereqs: + if prereq not in analysesToGenerate.keys(): + prereqToAdd = add_task_and_prereqisites( + possibleAnalyses[prereq], possibleAnalyses, + analysesToGenerate, isPrerequisite=True, + isSubtask=isSubtask) + if len(prereqToAdd.keys()) == 0: + # a prerequisite failed setup_and_check + print "ERROR: a prerequisite of analysis task {}" \ + " failed during check and will not be" \ + " run".format(analysisTask.taskName) + add = False + break + # the prerequisite (and its prerequisites) should be + # added + analysesToAdd.update(prereqToAdd) + if add: + analysesToAdd[analysisTask.taskName] = analysisTask + + return analysesToAdd # }}} + + def update_generate(config, generate): # {{{ """ Update the 'generate' config option using a string from the command line. - Author: Xylar Asay-Davis - Last Modified: 03/07/2017 + Authors + ------- + Xylar Asay-Davis """ # overwrite the 'generate' in config with a string that parses to @@ -42,8 +156,7 @@ def update_generate(config, generate): # {{{ config.set('output', 'generate', generateString) # }}} -def run_parallel_tasks(config, analyses, configFiles, taskCount): - # {{{ +def run_analysis(config, analyses, configFiles, isSubtask): # {{{ """ Run this script once each for several parallel tasks. @@ -52,40 +165,99 @@ def run_parallel_tasks(config, analyses, configFiles, taskCount): Xylar Asay-Davis """ - taskNames = [analysisTask.taskName for analysisTask in analyses] + taskCount = config.getWithDefault('execute', 'parallelTaskCount', + default=1) - taskCount = min(taskCount, len(taskNames)) + isParallel = not isSubtask and taskCount > 1 and len(analyses) > 1 - (processes, logs) = launch_tasks(taskNames[0:taskCount], config, - configFiles) - remainingTasks = taskNames[taskCount:] - while len(processes) > 0: - (taskName, process) = wait_for_task(processes) - if process.returncode == 0: - print "Task {} has finished successfully.".format(taskName) + for analysisTask in analyses.itervalues(): + if analysisTask.prerequisiteTasks is None or isSubtask: + analysisTask.status = 'ready' else: - print "ERROR in task {}. See log file {} for details".format( - taskName, logs[taskName].name) - logs[taskName].close() - # remove the process from the process dictionary (no need to bother) - processes.pop(taskName) - - if len(remainingTasks) > 0: - (process, log) = launch_tasks(remainingTasks[0:1], config, - configFiles) - # merge the new process and log into these dictionaries - processes.update(process) - logs.update(log) - remainingTasks = remainingTasks[1:] + analysisTask.status = 'blocked' + + processes = {} + logs = {} + + # run each analysis task + lastException = None + + runningCount = 0 + while True: + # we still have tasks to run + for analysisTask in analyses.itervalues(): + if analysisTask.status == 'blocked': + prereqStatus = [analyses[prereq].status for prereq in + analysisTask.prerequisiteTasks] + if any([status == 'fail' for status in prereqStatus]): + # a prerequisite failed so this task cannot succeed + analysisTask.status = 'fail' + if all([status == 'success' for status in prereqStatus]): + # no unfinished prerequisites so we can run this task + analysisTask.status = 'ready' + + unfinishedCount = 0 + for analysisTask in analyses.itervalues(): + if analysisTask.status not in ['success', 'fail']: + unfinishedCount += 1 + + if unfinishedCount <= 0: + # we're done + break + + # launch new tasks + for taskName, analysisTask in analyses.items(): + if analysisTask.status == 'ready': + if isParallel: + process, logFile = launch_task(taskName, config, + configFiles) + processes[taskName] = process + logs[taskName] = logFile + analysisTask.status = 'running' + runningCount += 1 + if runningCount >= taskCount: + break + else: + exception = run_task(config, analysisTask) + if exception is None: + analysisTask.status = 'success' + else: + lastException = exception + analysisTask.status = 'fail' + + if isParallel: + # wait for a task to finish + (taskName, process) = wait_for_task(processes) + analysisTask = analyses[taskName] + runningCount -= 1 + processes.pop(taskName) + if process.returncode == 0: + print "Task {} has finished successfully.".format(taskName) + analysisTask.status = 'success' + else: + print "ERROR in task {}. See log file {} for details".format( + taskName, logs[taskName].name) + analysisTask.status = 'fail' + logs[taskName].close() + + if not isParallel and config.getboolean('plot', 'displayToScreen'): + import matplotlib.pyplot as plt + plt.show() + + # raise the last exception so the process exits with an error + if lastException is not None: + raise lastException + # }}} -def launch_tasks(taskNames, config, configFiles): # {{{ +def launch_task(taskName, config, configFiles): # {{{ """ - Launch one or more tasks + Launch a parallel tasks - Author: Xylar Asay-Davis - Last Modified: 03/08/2017 + Authors + ------- + Xylar Asay-Davis """ thisFile = os.path.realpath(__file__) @@ -96,25 +268,21 @@ def launch_tasks(taskNames, config, configFiles): # {{{ else: commandPrefix = commandPrefix.split(' ') - processes = {} - logs = {} - for taskName in taskNames: - args = commandPrefix + [thisFile, '--generate', taskName] + configFiles + args = commandPrefix + [thisFile, '--subtask', '--generate', taskName] \ + + configFiles - logFileName = '{}/{}.log'.format(logsDirectory, taskName) + logFileName = '{}/{}.log'.format(logsDirectory, taskName) - # write the command to the log file - logFile = open(logFileName, 'w') - logFile.write('Command: {}\n'.format(' '.join(args))) - # make sure the command gets written before the rest of the log - logFile.flush() - print 'Running {}'.format(taskName) - process = subprocess.Popen(args, stdout=logFile, - stderr=subprocess.STDOUT) - processes[taskName] = process - logs[taskName] = logFile + # write the command to the log file + logFile = open(logFileName, 'w') + logFile.write('Command: {}\n'.format(' '.join(args))) + # make sure the command gets written before the rest of the log + logFile.flush() + print 'Running {}'.format(taskName) + process = subprocess.Popen(args, stdout=logFile, + stderr=subprocess.STDOUT) - return (processes, logs) # }}} + return (process, logFile) # }}} def wait_for_task(processes): # {{{ @@ -122,8 +290,9 @@ def wait_for_task(processes): # {{{ Wait for the next process to finish and check its status. Returns both the task name and the process that finished. - Author: Xylar Asay-Davis - Last Modified: 03/08/2017 + Authors + ------- + Xylar Asay-Davis """ # first, check if any process has already finished @@ -144,8 +313,9 @@ def is_running(process): # {{{ """ Returns whether a given process is currently running - Author: Xylar Asay-Davis - Last Modified: 03/08/2017 + Authors + ------- + Xylar Asay-Davis """ try: @@ -156,108 +326,56 @@ def is_running(process): # {{{ return True # }}} -def build_analysis_list(config): # {{{ +def run_task(config, analysisTask): # {{{ """ - Build a list of analysis modules based on the 'generate' config option. + Run a single analysis task, time the task, write out the config file + (including any modifications specific to the task) and return the exception + raised (if any) - Author: Xylar Asay-Davis - Last Modified: 03/07/2017 + Authors + ------- + Xylar Asay-Davis """ - # choose the right rendering backend, depending on whether we're displaying - # to the screen - if not config.getboolean('plot', 'displayToScreen'): - mpl.use('Agg') - - # analysis can only be imported after the right MPL renderer is selected - from mpas_analysis import ocean - from mpas_analysis import sea_ice - - # analyses will be a list of analysis classes - analyses = [] - - # Ocean Analyses - analyses.append(ocean.TimeSeriesOHC(config)) - analyses.append(ocean.TimeSeriesSST(config)) - analyses.append(ocean.IndexNino34(config)) - analyses.append(ocean.MeridionalHeatTransport(config)) - analyses.append(ocean.StreamfunctionMOC(config)) - - analyses.append(ocean.ClimatologyMapSST(config)) - analyses.append(ocean.ClimatologyMapMLD(config)) - analyses.append(ocean.ClimatologyMapSSS(config)) - - # Sea Ice Analyses - analyses.append(sea_ice.TimeSeriesSeaIce(config)) - analyses.append(sea_ice.ClimatologyMapSeaIce(config)) - - # check which analysis we actually want to generate and only keep those - analysesToGenerate = [] - for analysisTask in analyses: - # for each anlaysis module, check if we want to generate this task - # and if the analysis task has a valid configuration - if analysisTask.check_generate(): - add = False - try: - analysisTask.setup_and_check() - add = True - except: - traceback.print_exc(file=sys.stdout) - print "ERROR: analysis module {} failed during check and " \ - "will not be run".format(analysisTask.taskName) - if add: - analysesToGenerate.append(analysisTask) - - return analysesToGenerate # }}} - - -def run_analysis(config, analyses): # {{{ - - # run each analysis task - lastException = None - for analysisTask in analyses: - # write out a copy of the configuration to document the run - logsDirectory = build_config_full_path(config, 'output', - 'logsSubdirectory') - try: - startTime = time.clock() - analysisTask.run() - runDuration = time.clock() - startTime - m, s = divmod(runDuration, 60) - h, m = divmod(int(m), 60) - print 'Execution time: {}:{:02d}:{:05.2f}'.format(h, m, s) - except (Exception, BaseException) as e: - if isinstance(e, KeyboardInterrupt): - raise e - traceback.print_exc(file=sys.stdout) - print "ERROR: analysis module {} failed during run".format( - analysisTask.taskName) - lastException = e - - configFileName = '{}/configs/config.{}'.format(logsDirectory, - analysisTask.taskName) - configFile = open(configFileName, 'w') - config.write(configFile) - configFile.close() - - if config.getboolean('plot', 'displayToScreen'): - import matplotlib.pyplot as plt - plt.show() - - # raise the last exception so the process exits with an error - if lastException is not None: - raise lastException - - return # }}} + # write out a copy of the configuration to document the run + logsDirectory = build_config_full_path(config, 'output', + 'logsSubdirectory') + exception = None + try: + startTime = time.clock() + analysisTask.run() + runDuration = time.clock() - startTime + m, s = divmod(runDuration, 60) + h, m = divmod(int(m), 60) + print 'Execution time: {}:{:02d}:{:05.2f}'.format(h, m, s) + except (Exception, BaseException) as e: + if isinstance(e, KeyboardInterrupt): + raise e + traceback.print_exc(file=sys.stdout) + print "ERROR: analysis task {} failed during run".format( + analysisTask.taskName) + exception = e + + configFileName = '{}/configs/config.{}'.format(logsDirectory, + analysisTask.taskName) + configFile = open(configFileName, 'w') + config.write(configFile) + configFile.close() + + return exception # }}} if __name__ == "__main__": parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument("--subtask", dest="subtask", action='store_true', + help="If this is a subtask when running parallel " + "tasks") parser.add_argument("-g", "--generate", dest="generate", help="A list of analysis modules to generate " - "(nearly identical generate option in config file).", + "(nearly identical generate option in config " + "file).", metavar="ANALYSIS1[,ANALYSIS2,ANALYSIS3,...]") parser.add_argument('configFiles', metavar='CONFIG', type=str, nargs='+', help='config file') @@ -286,14 +404,8 @@ def run_analysis(config, analyses): # {{{ make_directories(logsDirectory) make_directories('{}/configs/'.format(logsDirectory)) - analyses = build_analysis_list(config) - - parallelTaskCount = config.getWithDefault('execute', 'parallelTaskCount', - default=1) + analyses = build_analysis_list(config, args.subtask) - if parallelTaskCount <= 1 or len(analyses) == 1: - run_analysis(config, analyses) - else: - run_parallel_tasks(config, analyses, configFiles, parallelTaskCount) + run_analysis(config, analyses, configFiles, args.subtask) # vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python