Skip to content

Commit

Permalink
Merge branch 'develop' into feature/move_jcb
Browse files Browse the repository at this point in the history
  • Loading branch information
danholdaway committed Jun 13, 2024
2 parents 0cda73b + 9ffce97 commit 368c9c5
Show file tree
Hide file tree
Showing 47 changed files with 1,175 additions and 703 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/norms.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -25,4 +25,4 @@ jobs:
- name: Run C++ linter on utils
run: |
cd $GITHUB_WORKSPACE/GDASApp/utils/test/
./cpplint.py --quiet --recursive $GITHUB_WORKSPACE/GDASApp/utils
./cpplint.py --quiet --recursive --exclude=$GITHUB_WORKSPACE/GDASApp/utils/obsproc/rtofs/*.cc $GITHUB_WORKSPACE/GDASApp/utils
2 changes: 2 additions & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@ cmake_minimum_required( VERSION 3.20 FATAL_ERROR )
find_package(ecbuild 3.5 REQUIRED HINTS ${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/../ecbuild)

project(GDASApp VERSION 1.0.0 LANGUAGES C CXX Fortran )
# include_directories(${CMAKE_SOURCE_DIR})
include_directories(${CMAKE_CURRENT_SOURCE_DIR})

include(GNUInstallDirs)
enable_testing()
Expand Down
5 changes: 5 additions & 0 deletions mains/gdas.cc
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
#include "ufo/instantiateObsFilterFactory.h"
#include "ufo/ObsTraits.h"

#include "oops/runs/ConvertToStructuredGrid.h"
#include "oops/runs/ConvertState.h"
#include "oops/runs/HofX4D.h"
#include "oops/runs/LocalEnsembleDA.h"
Expand Down Expand Up @@ -52,6 +53,9 @@ int runApp(int argc, char** argv, const std::string traits, const std::string ap
// Define a map from app names to lambda functions that create unique_ptr to Applications
std::map<std::string, std::function<std::unique_ptr<oops::Application>()>> apps;

apps["converttostructuredgrid"] = []() {
return std::make_unique<oops::ConvertToStructuredGrid<Traits>>();
};
apps["convertstate"] = []() {
return std::make_unique<oops::ConvertState<Traits>>();
};
Expand Down Expand Up @@ -96,6 +100,7 @@ int main(int argc, char ** argv) {
// Check that the application is recognized
// ----------------------------------------
const std::set<std::string> validApps = {
"converttostructuredgrid",
"convertstate",
"hofx4d",
"localensembleda",
Expand Down
4 changes: 2 additions & 2 deletions parm/aero/berror/aero_diagb.yaml.j2
Original file line number Diff line number Diff line change
Expand Up @@ -56,5 +56,5 @@ rescale: 2.0 # rescales the filtered std. dev. by "rescale"
number of halo points: 4
number of neighbors: 16
simple smoothing:
horizontal iterations: 2
vertical iterations: 1
horizontal iterations: 0
vertical iterations: 0
File renamed without changes.
7 changes: 7 additions & 0 deletions parm/aero/obs/config/viirs_n20_aod.yaml.j2
Original file line number Diff line number Diff line change
Expand Up @@ -28,11 +28,18 @@
obs filters:
- filter: PreQC
maxvalue: 1
- filter: Domain Check
where:
- variable:
name: latitude@MetaData
minvalue: -60
maxvalue: 60
- filter: Bounds Check
filter variables:
- name: aerosolOpticalDepth
channels: 4
minvalue: 0
maxvalue: 4.9
action:
name: reject
- filter: Background Check
Expand Down
50 changes: 50 additions & 0 deletions parm/aero/obs/config/viirs_n21_aod.yaml.j2
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
- obs space:
name: viirs_n21_aod
obsdatain:
engine:
type: H5File
obsfile: "{{ DATA }}/obs/{{ OPREFIX }}viirs_n21.{{ current_cycle | to_YMDH }}.nc4"
obsdataout:
engine:
type: H5File
obsfile: "{{ DATA }}/diags/diag_viirs_n21_{{ current_cycle | to_YMDH }}.nc4"
io pool:
max pool size: 1
simulated variables: [aerosolOpticalDepth]
channels: 4
get values:
interpolation method: barycentric
time interpolation: linear
obs operator:
name: AodCRTM
Absorbers: [H2O,O3]
obs options:
Sensor_ID: v.viirs-m_j2
EndianType: little_endian
CoefficientPath: "{{ DATA }}/crtm/"
AerosolOption: aerosols_gocart_default
obs error:
covariance model: diagonal
obs filters:
- filter: PreQC
maxvalue: 1
- filter: Domain Check
where:
- variable:
name: latitude@MetaData
minvalue: -60
maxvalue: 60
- filter: Bounds Check
filter variables:
- name: aerosolOpticalDepth
channels: 4
minvalue: 0
maxvalue: 4.9
action:
name: reject
- filter: Background Check
channels: 4
threshold: 3.0
action:
name: inflate error
inflation factor: 3.0
7 changes: 7 additions & 0 deletions parm/aero/obs/config/viirs_npp_aod.yaml.j2
Original file line number Diff line number Diff line change
Expand Up @@ -28,11 +28,18 @@
obs filters:
- filter: PreQC
maxvalue: 1
- filter: Domain Check
where:
- variable:
name: latitude@MetaData
minvalue: -60
maxvalue: 60
- filter: Bounds Check
filter variables:
- name: aerosolOpticalDepth
channels: 4
minvalue: 0
maxvalue: 4.9
action:
name: reject
- filter: Background Check
Expand Down
2 changes: 1 addition & 1 deletion sorc/fv3
Submodule fv3 updated 1 files
+3 −1 CMakeLists.txt
2 changes: 1 addition & 1 deletion sorc/gsw
2 changes: 1 addition & 1 deletion sorc/land-imsproc
2 changes: 1 addition & 1 deletion sorc/land-jediincr
35 changes: 0 additions & 35 deletions test/atm/global-workflow/config.atmanl

This file was deleted.

24 changes: 0 additions & 24 deletions test/atm/global-workflow/config.atmensanl

This file was deleted.

4 changes: 2 additions & 2 deletions test/atm/global-workflow/jcb-prototype_3dvar.yaml.j2
Original file line number Diff line number Diff line change
Expand Up @@ -19,5 +19,5 @@ atm_obsdatain_suffix: ".{{ current_cycle | to_YMDH }}.nc"
# --------------
test_reference_filename: {{ HOMEgfs }}/sorc/gdas.cd/test/atm/global-workflow/3dvar.ref
test_output_filename: ./3dvar.out
float_relative_tolerance: 1.0e-3
float_absolute_tolerance: 1.0e-5
test_float_relative_tolerance: 1.0e-3
test_float_absolute_tolerance: 1.0e-5
4 changes: 2 additions & 2 deletions test/atm/global-workflow/jcb-prototype_lgetkf.yaml.j2
Original file line number Diff line number Diff line change
Expand Up @@ -24,5 +24,5 @@ atm_obsdatain_suffix: ".{{ current_cycle | to_YMDH }}.nc"
# --------------
test_reference_filename: {{ HOMEgfs }}/sorc/gdas.cd/test/atm/global-workflow/lgetkf.ref
test_output_filename: ./lgetkf.out
float_relative_tolerance: 1.0e-3
float_absolute_tolerance: 1.0e-5
test_float_relative_tolerance: 1.0e-3
test_float_absolute_tolerance: 1.0e-5
6 changes: 2 additions & 4 deletions test/atm/global-workflow/jjob_ens_final.sh
Original file line number Diff line number Diff line change
Expand Up @@ -46,10 +46,8 @@ elif [[ $machine = 'ORION' || $machine = 'HERCULES' ]]; then
fi

# Execute j-job
if [[ $machine = 'HERA' ]]; then
sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE
elif [[ $machine = 'ORION' || $machine = 'HERCULES' ]]; then
sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE
if [[ $machine = 'HERA' || $machine = 'ORION' || $machine = 'HERCULES' ]]; then
sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait --output=atmensanlfinal-%j.out ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE
else
${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE
fi
6 changes: 2 additions & 4 deletions test/atm/global-workflow/jjob_ens_inc.sh
Original file line number Diff line number Diff line change
Expand Up @@ -46,10 +46,8 @@ elif [[ $machine = 'ORION' || $machine = 'HERCULES' ]]; then
fi

# Execute j-job
if [[ $machine = 'HERA' ]]; then
sbatch --nodes=1 --ntasks=36 --account=$ACCOUNT --qos=batch --time=00:30:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_FV3_INCREMENT
elif [[ $machine = 'ORION' || $machine = 'HERCULES' ]]; then
sbatch --nodes=1 --ntasks=36 --account=$ACCOUNT --qos=batch --time=00:30:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_FV3_INCREMENT
if [[ $machine = 'HERA' || $machine = 'ORION' || $machine = 'HERCULES' ]]; then
sbatch --nodes=1 --ntasks=36 --account=$ACCOUNT --qos=batch --time=00:30:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_FV3_INCREMENT --job-name=atmensanlfv3inc-%j.out
else
${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_FV3_INCREMENT
fi
6 changes: 2 additions & 4 deletions test/atm/global-workflow/jjob_ens_init.sh
Original file line number Diff line number Diff line change
Expand Up @@ -109,10 +109,8 @@ for imem in $(seq 1 $NMEM_ENS); do
done

# Execute j-job
if [[ $machine = 'HERA' ]]; then
sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE
elif [[ $machine = 'ORION' || $machine = 'HERCULES' ]]; then
sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE
if [[ $machine = 'HERA' || $machine = 'ORION' || $machine = 'HERCULES' ]]; then
sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait --output=atmensanlinit-%j.out ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE
else
${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE
fi
6 changes: 2 additions & 4 deletions test/atm/global-workflow/jjob_ens_run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -49,10 +49,8 @@ elif [[ $machine = 'ORION' || $machine = 'HERCULES' ]]; then
fi

# Execute j-job
if [[ $machine = 'HERA' ]]; then
sbatch --nodes=1 --ntasks=36 --account=$ACCOUNT --qos=batch --time=00:30:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_LETKF
elif [[ $machine = 'ORION' || $machine = 'HERCULES' ]]; then
sbatch --nodes=1 --ntasks=36 --account=$ACCOUNT --qos=batch --time=00:30:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_LETKF
if [[ $machine = 'HERA' || $machine = 'ORION' || $machine = 'HERCULES' ]]; then
sbatch --nodes=1 --ntasks=36 --account=$ACCOUNT --qos=batch --time=00:30:00 --export=ALL --wait --output=atmensanlletkf-%j.out ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_LETKF
else
${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_LETKF
fi
6 changes: 2 additions & 4 deletions test/atm/global-workflow/jjob_var_final.sh
Original file line number Diff line number Diff line change
Expand Up @@ -46,10 +46,8 @@ elif [[ $machine = 'ORION' || $machine = 'HERCULES' ]]; then
fi

# Execute j-job
if [[ $machine = 'HERA' ]]; then
sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE
elif [[ $machine = 'ORION' || $machine = 'HERCULES' ]]; then
sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE
if [[ $machine = 'HERA' || $machine = 'ORION' || $machine = 'HERCULES' ]]; then
sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait --output=atmanlfinal-%j.out ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE
else
${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE
fi
6 changes: 2 additions & 4 deletions test/atm/global-workflow/jjob_var_inc.sh
Original file line number Diff line number Diff line change
Expand Up @@ -46,10 +46,8 @@ elif [[ $machine = 'ORION' || $machine = 'HERCULES' ]]; then
fi

# Execute j-job
if [[ $machine = 'HERA' ]]; then
sbatch --ntasks=6 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_FV3_INCREMENT
elif [[ $machine = 'ORION' || $machine = 'HERCULES' ]]; then
sbatch --ntasks=6 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_FV3_INCREMENT
if [[ $machine = 'HERA' || $machine = 'ORION' || $machine = 'HERCULES' ]]; then
sbatch --ntasks=6 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait --output=atmanlfv3inc-%j.out ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_FV3_INCREMENT
else
${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_FV3_INCREMENT
fi
6 changes: 2 additions & 4 deletions test/atm/global-workflow/jjob_var_init.sh
Original file line number Diff line number Diff line change
Expand Up @@ -130,10 +130,8 @@ done


# Execute j-job
if [[ $machine = 'HERA' ]]; then
sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE
elif [[ $machine = 'ORION' || $machine = 'HERCULES' ]]; then
sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE
if [[ $machine = 'HERA' || $machine = 'ORION' || $machine = 'HERCULES' ]]; then
sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait --output=atmanlinit-%j.out ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE
else
${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE
fi
6 changes: 2 additions & 4 deletions test/atm/global-workflow/jjob_var_run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -49,10 +49,8 @@ elif [[ $machine = 'ORION' || $machine = 'HERCULES' ]]; then
fi

# Execute j-job
if [[ $machine = 'HERA' ]]; then
sbatch --ntasks=6 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_VARIATIONAL
elif [[ $machine = 'ORION' || $machine = 'HERCULES' ]]; then
sbatch --ntasks=6 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_VARIATIONAL
if [[ $machine = 'HERA' || $machine = 'ORION' || $machine = 'HERCULES' ]]; then
sbatch --ntasks=6 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait --output=atmanlvar-%j.out ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_VARIATIONAL
else
${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_VARIATIONAL
fi
5 changes: 1 addition & 4 deletions test/atm/global-workflow/setup_workflow_exp.sh
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,6 @@ expdir=$bindir/test/atm/global-workflow/testrun/experiments
rm -rf $comroot $expdir config

# copy config.yaml to local config
cp -r $configdir config
cp $srcdir/test/atm/global-workflow/config.atmanl config/
cp $srcdir/test/atm/global-workflow/config.atmensanl config/
cp $srcdir/test/atm/global-workflow/config.yaml .

# update paths in config.yaml
Expand All @@ -45,7 +42,7 @@ $srcdir/../../workflow/setup_expt.py gfs cycled --idate $idate \
--resensatmos $resensatmos \
--nens $nens \
--pslot $pslot \
--configdir $expdir/../config \
--configdir $configdir \
--comroot $comroot \
--expdir $expdir \
--yaml $expdir/../config.yaml
Expand Down
5 changes: 3 additions & 2 deletions test/soca/gw/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,9 @@ set(jjob_list "JGLOBAL_PREP_OCEAN_OBS"
"JGDAS_GLOBAL_OCEAN_ANALYSIS_ECEN"
# "JGDAS_GLOBAL_OCEAN_ANALYSIS_LETKF"
"JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT"
"JGDAS_GLOBAL_OCEAN_ANALYSIS_POST"
"JGDAS_GLOBAL_OCEAN_ANALYSIS_VRFY")
"JGDAS_GLOBAL_OCEAN_ANALYSIS_POST")
# Temporary Removal while fixing/improving vrfy task
# "JGDAS_GLOBAL_OCEAN_ANALYSIS_VRFY")

set(setup "")
foreach(jjob ${jjob_list})
Expand Down
2 changes: 1 addition & 1 deletion utils/obsproc/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
# add_subdirectory(rtofs)
add_subdirectory(rtofs)
add_subdirectory(applications)
8 changes: 8 additions & 0 deletions utils/obsproc/NetCDFToIodaConverter.h
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,9 @@ namespace gdasapp {
oops::Log::trace() << "NetCDFToIodaConverter::NetCDFToIodaConverter created." << std::endl;
}




// Method to write out a IODA file (writter called in destructor)
void writeToIoda() {
// Extract ioda variables from the provider's files
Expand All @@ -60,6 +63,8 @@ namespace gdasapp {

// Read the provider's netcdf file
gdasapp::obsproc::iodavars::IodaVars iodaVars = providerToIodaVars(inputFilenames_[myrank]);


for (int i = myrank + comm_.size(); i < inputFilenames_.size(); i += comm_.size()) {
iodaVars.append(providerToIodaVars(inputFilenames_[i]));
oops::Log::info() << " appending: " << inputFilenames_[i] << std::endl;
Expand Down Expand Up @@ -200,6 +205,9 @@ namespace gdasapp {
}
}




private:
// Virtual method that reads the provider's netcdf file and store the relevant
// info in a IodaVars struct
Expand Down
Loading

0 comments on commit 368c9c5

Please sign in to comment.