Skip to content

Commit

Permalink
Merge branch 'develop' into feature/prepsnowobs_ens
Browse files Browse the repository at this point in the history
  • Loading branch information
CoryMartin-NOAA committed Apr 16, 2024
2 parents 1f35c8c + 420459c commit 929a12b
Show file tree
Hide file tree
Showing 20 changed files with 787 additions and 188 deletions.
2 changes: 0 additions & 2 deletions parm/atm/obs/lists/gdas_prototype_3d.yaml.j2
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,4 @@ observers:
{% include 'atm/obs/config/ompsnp_npp.yaml.j2' %}
{% include 'atm/obs/config/ompstc_npp.yaml.j2' %}
{% include 'atm/obs/config/omi_aura.yaml.j2' %}
{% include 'atm/obs/config/seviri_m08.yaml.j2' %}
{% include 'atm/obs/config/seviri_m11.yaml.j2' %}
{% endfilter %}
63 changes: 0 additions & 63 deletions parm/atm/utils/fv3jedi_fv3inc.yaml.j2

This file was deleted.

62 changes: 62 additions & 0 deletions parm/atm/utils/fv3jedi_fv3inc_lgetkf.yaml.j2
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
variable change:
variable change name: Model2GeoVaLs
input variables: &bkgvars [ua,va,t,ps,sphum,ice_wat,liq_wat,o3mr,surface_geopotential_height]
output variables: &fv3incrvars [ua,va,t,sphum,ice_wat,liq_wat,o3mr,delp,hydrostatic_delz]
jedi increment variables: [ua,va,t,ps,sphum,ice_wat,liq_wat,o3mr]
fv3 increment variables: *fv3incrvars
background geometry:
fms initialization:
namelist filename: ./fv3jedi/fmsmpp.nml
field table filename: ./fv3jedi/field_table
akbk: ./fv3jedi/akbk.nc4
layout:
- {{ layout_x }}
- {{ layout_y }}
npx: {{ npx_ges }}
npy: {{ npy_ges }}
npz: {{ npz_ges }}
field metadata override: ./fv3jedi/fv3jedi_fieldmetadata_history.yaml
jedi increment geometry:
fms initialization:
namelist filename: ./fv3jedi/fmsmpp.nml
field table filename: ./fv3jedi/field_table
akbk: ./fv3jedi/akbk.nc4
layout:
- {{ layout_x }}
- {{ layout_y }}
npx: {{ npx_ges }}
npy: {{ npy_ges }}
npz: {{ npz_ges }}
field metadata override: ./fv3jedi/fv3jedi_fieldmetadata_history.yaml
fv3 increment geometry:
fms initialization:
namelist filename: ./fv3jedi/fmsmpp.nml
field table filename: ./fv3jedi/field_table
akbk: ./fv3jedi/akbk.nc4
layout:
- {{ layout_x }}
- {{ layout_y }}
npx: {{ npx_ges }}
npy: {{ npy_ges }}
npz: {{ npz_ges }}
field metadata override: ./fv3jedi/fv3jedi_fieldmetadata_fv3inc.yaml
members from template:
template:
background input:
datetime: '{{ current_cycle | to_isotime }}'
filetype: cube sphere history
provider: ufs
datapath: ./bkg/mem%mem%
filename: {{ EPREFIX }}atmf006.nc
state variables: *bkgvars
jedi increment input:
filetype: cube sphere history
filename: ./anl/mem%mem%/atminc.{{ current_cycle | to_fv3time }}.nc4
provider: ufs
fv3 increment output:
filetype: auxgrid
gridtype: gaussian
filename: ./anl/mem%mem%/atminc.
pattern: '%mem%'
nmembers: {{ NMEM_ENS }}
zero padding: 3
62 changes: 62 additions & 0 deletions parm/atm/utils/fv3jedi_fv3inc_variational.yaml.j2
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
variable change:
variable change name: Model2GeoVaLs
input variables: &bkgvars [ua,va,t,ps,sphum,ice_wat,liq_wat,o3mr,phis]
output variables: &fv3incrvars [ua,va,t,sphum,ice_wat,liq_wat,o3mr,delp,hydrostatic_delz]
jedi increment variables: [ua,va,t,ps,sphum,ice_wat,liq_wat,o3mr]
fv3 increment variables: *fv3incrvars
background geometry:
fms initialization:
namelist filename: ./fv3jedi/fmsmpp.nml
field table filename: ./fv3jedi/field_table
akbk: ./fv3jedi/akbk.nc4
layout:
- {{ layout_x }}
- {{ layout_y }}
npx: {{ npx_ges }}
npy: {{ npy_ges }}
npz: {{ npz_ges }}
field metadata override: ./fv3jedi/fv3jedi_fieldmetadata_restart.yaml
jedi increment geometry:
fms initialization:
namelist filename: ./fv3jedi/fmsmpp.nml
field table filename: ./fv3jedi/field_table
akbk: ./fv3jedi/akbk.nc4
layout:
- {{ layout_x }}
- {{ layout_y }}
npx: {{ npx_anl }}
npy: {{ npy_anl }}
npz: {{ npz_anl }}
field metadata override: ./fv3jedi/fv3jedi_fieldmetadata_history.yaml
fv3 increment geometry:
fms initialization:
namelist filename: ./fv3jedi/fmsmpp.nml
field table filename: ./fv3jedi/field_table
akbk: ./fv3jedi/akbk.nc4
layout:
- {{ layout_x }}
- {{ layout_y }}
npx: {{ npx_anl }}
npy: {{ npy_anl }}
npz: {{ npz_anl }}
field metadata override: ./fv3jedi/fv3jedi_fieldmetadata_fv3inc.yaml
members:
- background input:
datapath: ./bkg
filetype: fms restart
datetime: '{{ current_cycle | to_isotime }}'
filename_core: '{{ current_cycle | to_fv3time }}.fv_core.res.nc'
filename_trcr: '{{ current_cycle | to_fv3time }}.fv_tracer.res.nc'
filename_sfcd: '{{ current_cycle | to_fv3time }}.sfc_data.nc'
filename_sfcw: '{{ current_cycle | to_fv3time }}.fv_srf_wnd.res.nc'
filename_cplr: '{{ current_cycle | to_fv3time }}.coupler.res'
state variables: *bkgvars
jedi increment input:
filetype: cube sphere history
filename: ./anl/atminc.{{ current_cycle | to_fv3time }}.nc4
provider: ufs
fv3 increment output:
filetype: auxgrid
gridtype: gaussian
filename: ./anl/atminc.

7 changes: 7 additions & 0 deletions parm/soca/obs/obs_stats.yaml.j2
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
time window:
begin: '1900-01-01T00:00:00Z'
end: '2035-01-01T00:00:00Z'
bound to include: begin

obs spaces:
{{ obs_spaces }}
77 changes: 74 additions & 3 deletions scripts/exgdas_global_marine_analysis_post.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,11 @@
import glob
import shutil
from datetime import datetime, timedelta
from wxflow import FileHandler, Logger
from wxflow import AttrDict, FileHandler, Logger, parse_j2yaml
from multiprocessing import Process
import subprocess
import netCDF4
import re

logger = Logger()

Expand All @@ -36,8 +40,6 @@ def list_all_files(dir_in, dir_out, wc='*', fh_list=[]):
return fh_list


logger.info(f"---------------- Copy from RUNDIR to COMOUT")

com_ocean_analysis = os.getenv('COM_OCEAN_ANALYSIS')
com_ice_restart = os.getenv('COM_ICE_RESTART')
anl_dir = os.getenv('DATA')
Expand All @@ -52,6 +54,8 @@ def list_all_files(dir_in, dir_out, wc='*', fh_list=[]):
bdate = datetime.strftime(bdatedt, '%Y-%m-%dT%H:00:00Z')
mdate = datetime.strftime(datetime.strptime(cdate, '%Y%m%d%H'), '%Y-%m-%dT%H:00:00Z')

logger.info(f"---------------- Copy from RUNDIR to COMOUT")

post_file_list = []

# Make a copy the IAU increment
Expand Down Expand Up @@ -106,3 +110,70 @@ def list_all_files(dir_in, dir_out, wc='*', fh_list=[]):
os.path.join(com_ocean_analysis, 'yaml'), wc='*.yaml', fh_list=fh_list)

FileHandler({'copy': fh_list}).sync()

# obs space statistics
logger.info(f"---------------- Compute basic stats")
diags_list = glob.glob(os.path.join(os.path.join(com_ocean_analysis, 'diags', '*.nc4')))
obsstats_j2yaml = str(os.path.join(os.getenv('HOMEgfs'), 'sorc', 'gdas.cd',
'parm', 'soca', 'obs', 'obs_stats.yaml.j2'))


# function to create a minimalist ioda obs sapce
def create_obs_space(data):
os_dict = {"obs space": {
"name": data["obs_space"],
"obsdatain": {
"engine": {"type": "H5File", "obsfile": data["obsfile"]}
},
"simulated variables": [data["variable"]]
},
"variable": data["variable"],
"experiment identifier": data["pslot"],
"csv output": data["csv_output"]
}
return os_dict


# attempt to extract the experiment id from the path
pslot = os.path.normpath(com_ocean_analysis).split(os.sep)[-5]

# iterate through the obs spaces and generate the yaml for gdassoca_obsstats.x
obs_spaces = []
for obsfile in diags_list:

# define an obs space name
obs_space = re.sub(r'\.\d{10}\.nc4$', '', os.path.basename(obsfile))

# get the variable name, assume 1 variable per file
nc = netCDF4.Dataset(obsfile, 'r')
variable = next(iter(nc.groups["ObsValue"].variables))
nc.close()

# filling values for the templated yaml
data = {'obs_space': os.path.basename(obsfile),
'obsfile': obsfile,
'pslot': pslot,
'variable': variable,
'csv_output': os.path.join(com_ocean_analysis,
f"{RUN}.t{cyc}z.ocn.{obs_space}.stats.csv")}
obs_spaces.append(create_obs_space(data))

# create the yaml
data = {'obs_spaces': obs_spaces}
conf = parse_j2yaml(path=obsstats_j2yaml, data=data)
stats_yaml = 'diag_stats.yaml'
conf.save(stats_yaml)

# run the application
# TODO(GorA): this should be setup properly in the g-w once gdassoca_obsstats is in develop
gdassoca_obsstats_exec = os.path.join(os.getenv('HOMEgfs'),
'sorc', 'gdas.cd', 'build', 'bin', 'gdassoca_obsstats.x')
command = f"{os.getenv('launcher')} {gdassoca_obsstats_exec} {stats_yaml}"
logger.info(f"{command}")
result = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)

# issue a warning if the process has failed
if result.returncode != 0:
logger.warning(f"{command} has failed")
if result.stderr:
print("STDERR:", result.stderr.decode())
2 changes: 1 addition & 1 deletion sorc/femps
2 changes: 1 addition & 1 deletion sorc/fv3-jedi-lm
5 changes: 5 additions & 0 deletions test/atm/global-workflow/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,11 @@ add_test(NAME test_gdasapp_atm_jjob_var_run
${PROJECT_BINARY_DIR} ${PROJECT_SOURCE_DIR}
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}/test/atm/global-workflow/testrun)

add_test(NAME test_gdasapp_atm_jjob_var_inc
COMMAND ${PROJECT_SOURCE_DIR}/test/atm/global-workflow/jjob_var_inc.sh
${PROJECT_BINARY_DIR} ${PROJECT_SOURCE_DIR}
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}/test/atm/global-workflow/testrun)

add_test(NAME test_gdasapp_atm_jjob_var_final
COMMAND ${PROJECT_SOURCE_DIR}/test/atm/global-workflow/jjob_var_final.sh
${PROJECT_BINARY_DIR} ${PROJECT_SOURCE_DIR}
Expand Down
55 changes: 55 additions & 0 deletions test/atm/global-workflow/jjob_var_inc.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
#! /usr/bin/env bash

set -x
bindir=$1
srcdir=$2

# Set g-w HOMEgfs
topdir=$(cd "$(dirname "$(readlink -f -n "${bindir}" )" )/../../.." && pwd -P)
export HOMEgfs=$topdir

# Set variables for ctest
export PSLOT=gdas_test
export EXPDIR=$bindir/test/atm/global-workflow/testrun/experiments/$PSLOT
export PDY=20210323
export cyc=18
export CDATE=${PDY}${cyc}
export ROTDIR=$bindir/test/atm/global-workflow/testrun/ROTDIRS/$PSLOT
export RUN=gdas
export CDUMP=gdas
export DATAROOT=$bindir/test/atm/global-workflow/testrun/RUNDIRS/$PSLOT
export COMIN_GES=${bindir}/test/atm/bkg
export pid=${pid:-$$}
export jobid=$pid
export COMROOT=$DATAROOT
export NMEM_ENS=0
export ACCOUNT=da-cpu

# Set python path for workflow utilities and tasks
wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow"
PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}"
export PYTHONPATH

# Detemine machine from config.base
machine=$(echo `grep 'machine=' $EXPDIR/config.base | cut -d"=" -f2` | tr -d '"')

# Set NETCDF and UTILROOT variables (used in config.base)
if [[ $machine = 'HERA' ]]; then
NETCDF=$( which ncdump )
export NETCDF
export UTILROOT="/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/hpc-stack/intel-18.0.5.274/prod_util/1.2.2"
elif [[ $machine = 'ORION' || $machine = 'HERCULES' ]]; then
ncdump=$( which ncdump )
NETCDF=$( echo "${ncdump}" | cut -d " " -f 3 )
export NETCDF
export UTILROOT=/work2/noaa/da/python/opt/intel-2022.1.2/prod_util/1.2.2
fi

# Execute j-job
if [[ $machine = 'HERA' ]]; then
sbatch --ntasks=6 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_FV3_INCREMENT
elif [[ $machine = 'ORION' || $machine = 'HERCULES' ]]; then
sbatch --ntasks=6 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_FV3_INCREMENT
else
${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_FV3_INCREMENT
fi
6 changes: 3 additions & 3 deletions test/atm/global-workflow/jjob_var_run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -50,9 +50,9 @@ fi

# Execute j-job
if [[ $machine = 'HERA' ]]; then
sbatch --ntasks=6 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_RUN
sbatch --ntasks=6 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_VARIATIONAL
elif [[ $machine = 'ORION' || $machine = 'HERCULES' ]]; then
sbatch --ntasks=6 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_RUN
sbatch --ntasks=6 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_VARIATIONAL
else
${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_RUN
${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_VARIATIONAL
fi
Loading

0 comments on commit 929a12b

Please sign in to comment.