From e2869a1247ad2ba72c1bfe82f7682323d5128f4c Mon Sep 17 00:00:00 2001 From: Kate Friedman Date: Tue, 19 Jul 2022 16:16:20 -0400 Subject: [PATCH 01/16] Updated GFS transfer*list files from operations (develop) (#918) Updated transfer list files from WCOSS2 ops. - In move to WCOSS2 the transfer*list files were moved into a new transfer folder under the upper-level parm folder. - The transfer*list files were updated to clean out unneeded paths and the beginning of the paths were updated from: `com/gfs/_ENVIR_` to: `_COMROOT_/gfs/_SHORTVER_` Refs: #419 --- parm/{ => transfer}/transfer_gdas_1a.list | 4 ++-- parm/{ => transfer}/transfer_gdas_1b.list | 4 ++-- parm/{ => transfer}/transfer_gdas_1c.list | 4 ++-- .../transfer_gdas_enkf_enkf_05.list | 6 ++---- .../transfer_gdas_enkf_enkf_10.list | 6 ++---- .../transfer_gdas_enkf_enkf_15.list | 6 ++---- .../transfer_gdas_enkf_enkf_20.list | 6 ++---- .../transfer_gdas_enkf_enkf_25.list | 6 ++---- .../transfer_gdas_enkf_enkf_30.list | 6 ++---- .../transfer_gdas_enkf_enkf_35.list | 6 ++---- .../transfer_gdas_enkf_enkf_40.list | 6 ++---- .../transfer_gdas_enkf_enkf_45.list | 6 ++---- .../transfer_gdas_enkf_enkf_50.list | 6 ++---- .../transfer_gdas_enkf_enkf_55.list | 6 ++---- .../transfer_gdas_enkf_enkf_60.list | 6 ++---- .../transfer_gdas_enkf_enkf_65.list | 6 ++---- .../transfer_gdas_enkf_enkf_70.list | 6 ++---- .../transfer_gdas_enkf_enkf_75.list | 6 ++---- .../transfer_gdas_enkf_enkf_80.list | 6 ++---- .../transfer_gdas_enkf_enkf_misc.list | 6 ++---- parm/{ => transfer}/transfer_gdas_misc.list | 16 ++++++++-------- parm/{ => transfer}/transfer_gfs_1.list | 4 ++-- parm/{ => transfer}/transfer_gfs_10a.list | 4 ++-- parm/{ => transfer}/transfer_gfs_10b.list | 4 ++-- parm/{ => transfer}/transfer_gfs_2.list | 4 ++-- parm/{ => transfer}/transfer_gfs_3.list | 4 ++-- parm/{ => transfer}/transfer_gfs_4.list | 4 ++-- parm/{ => transfer}/transfer_gfs_5.list | 4 ++-- parm/{ => transfer}/transfer_gfs_6.list | 4 ++-- parm/{ => transfer}/transfer_gfs_7.list | 4 ++-- parm/{ => transfer}/transfer_gfs_8.list | 4 ++-- parm/{ => transfer}/transfer_gfs_9a.list | 4 ++-- parm/{ => transfer}/transfer_gfs_9b.list | 4 ++-- parm/{ => transfer}/transfer_gfs_gempak.list | 4 ++-- parm/{ => transfer}/transfer_gfs_misc.list | 4 ++-- .../transfer_gfs_wave_restart1.list | 4 ++-- .../transfer_gfs_wave_restart2.list | 4 ++-- .../transfer_gfs_wave_restart3.list | 4 ++-- .../transfer_gfs_wave_rundata.list | 4 ++-- parm/{ => transfer}/transfer_gfs_wave_wave.list | 4 ++-- parm/{ => transfer}/transfer_rdhpcs_gdas.list | 4 ++-- .../transfer_rdhpcs_gdas_enkf_enkf_1.list | 4 ++-- .../transfer_rdhpcs_gdas_enkf_enkf_2.list | 4 ++-- .../transfer_rdhpcs_gdas_enkf_enkf_3.list | 4 ++-- .../transfer_rdhpcs_gdas_enkf_enkf_4.list | 4 ++-- .../transfer_rdhpcs_gdas_enkf_enkf_5.list | 4 ++-- .../transfer_rdhpcs_gdas_enkf_enkf_6.list | 4 ++-- .../transfer_rdhpcs_gdas_enkf_enkf_7.list | 4 ++-- .../transfer_rdhpcs_gdas_enkf_enkf_8.list | 4 ++-- parm/{ => transfer}/transfer_rdhpcs_gfs.list | 2 +- .../transfer_rdhpcs_gfs_nawips.list | 2 +- 51 files changed, 106 insertions(+), 140 deletions(-) rename parm/{ => transfer}/transfer_gdas_1a.list (96%) rename parm/{ => transfer}/transfer_gdas_1b.list (96%) rename parm/{ => transfer}/transfer_gdas_1c.list (96%) rename parm/{ => transfer}/transfer_gdas_enkf_enkf_05.list (92%) rename parm/{ => transfer}/transfer_gdas_enkf_enkf_10.list (92%) rename parm/{ => transfer}/transfer_gdas_enkf_enkf_15.list (92%) rename parm/{ => transfer}/transfer_gdas_enkf_enkf_20.list (92%) rename parm/{ => transfer}/transfer_gdas_enkf_enkf_25.list (92%) rename parm/{ => transfer}/transfer_gdas_enkf_enkf_30.list (92%) rename parm/{ => transfer}/transfer_gdas_enkf_enkf_35.list (92%) rename parm/{ => transfer}/transfer_gdas_enkf_enkf_40.list (92%) rename parm/{ => transfer}/transfer_gdas_enkf_enkf_45.list (92%) rename parm/{ => transfer}/transfer_gdas_enkf_enkf_50.list (92%) rename parm/{ => transfer}/transfer_gdas_enkf_enkf_55.list (92%) rename parm/{ => transfer}/transfer_gdas_enkf_enkf_60.list (92%) rename parm/{ => transfer}/transfer_gdas_enkf_enkf_65.list (92%) rename parm/{ => transfer}/transfer_gdas_enkf_enkf_70.list (92%) rename parm/{ => transfer}/transfer_gdas_enkf_enkf_75.list (92%) rename parm/{ => transfer}/transfer_gdas_enkf_enkf_80.list (92%) rename parm/{ => transfer}/transfer_gdas_enkf_enkf_misc.list (91%) rename parm/{ => transfer}/transfer_gdas_misc.list (89%) rename parm/{ => transfer}/transfer_gfs_1.list (96%) rename parm/{ => transfer}/transfer_gfs_10a.list (96%) rename parm/{ => transfer}/transfer_gfs_10b.list (96%) rename parm/{ => transfer}/transfer_gfs_2.list (98%) rename parm/{ => transfer}/transfer_gfs_3.list (98%) rename parm/{ => transfer}/transfer_gfs_4.list (98%) rename parm/{ => transfer}/transfer_gfs_5.list (98%) rename parm/{ => transfer}/transfer_gfs_6.list (98%) rename parm/{ => transfer}/transfer_gfs_7.list (98%) rename parm/{ => transfer}/transfer_gfs_8.list (98%) rename parm/{ => transfer}/transfer_gfs_9a.list (96%) rename parm/{ => transfer}/transfer_gfs_9b.list (96%) rename parm/{ => transfer}/transfer_gfs_gempak.list (96%) rename parm/{ => transfer}/transfer_gfs_misc.list (96%) rename parm/{ => transfer}/transfer_gfs_wave_restart1.list (96%) rename parm/{ => transfer}/transfer_gfs_wave_restart2.list (96%) rename parm/{ => transfer}/transfer_gfs_wave_restart3.list (96%) rename parm/{ => transfer}/transfer_gfs_wave_rundata.list (96%) rename parm/{ => transfer}/transfer_gfs_wave_wave.list (96%) rename parm/{ => transfer}/transfer_rdhpcs_gdas.list (94%) rename parm/{ => transfer}/transfer_rdhpcs_gdas_enkf_enkf_1.list (92%) rename parm/{ => transfer}/transfer_rdhpcs_gdas_enkf_enkf_2.list (92%) rename parm/{ => transfer}/transfer_rdhpcs_gdas_enkf_enkf_3.list (92%) rename parm/{ => transfer}/transfer_rdhpcs_gdas_enkf_enkf_4.list (92%) rename parm/{ => transfer}/transfer_rdhpcs_gdas_enkf_enkf_5.list (92%) rename parm/{ => transfer}/transfer_rdhpcs_gdas_enkf_enkf_6.list (92%) rename parm/{ => transfer}/transfer_rdhpcs_gdas_enkf_enkf_7.list (92%) rename parm/{ => transfer}/transfer_rdhpcs_gdas_enkf_enkf_8.list (92%) rename parm/{ => transfer}/transfer_rdhpcs_gfs.list (97%) rename parm/{ => transfer}/transfer_rdhpcs_gfs_nawips.list (95%) diff --git a/parm/transfer_gdas_1a.list b/parm/transfer/transfer_gdas_1a.list similarity index 96% rename from parm/transfer_gdas_1a.list rename to parm/transfer/transfer_gdas_1a.list index 01e67f6dd0..00c5306f3b 100644 --- a/parm/transfer_gdas_1a.list +++ b/parm/transfer/transfer_gdas_1a.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/*atmf000* @@ -39,7 +39,7 @@ com/gfs/_ENVIR_/gdas._PDY_/ B 100 -com/gfs/_ENVIR_/gdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/*atmf000* diff --git a/parm/transfer_gdas_1b.list b/parm/transfer/transfer_gdas_1b.list similarity index 96% rename from parm/transfer_gdas_1b.list rename to parm/transfer/transfer_gdas_1b.list index ef4829740b..998c71d843 100644 --- a/parm/transfer_gdas_1b.list +++ b/parm/transfer/transfer_gdas_1b.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/*atmf007* @@ -37,7 +37,7 @@ com/gfs/_ENVIR_/gdas._PDY_/ B 100 -com/gfs/_ENVIR_/gdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/*atmf007* diff --git a/parm/transfer_gdas_1c.list b/parm/transfer/transfer_gdas_1c.list similarity index 96% rename from parm/transfer_gdas_1c.list rename to parm/transfer/transfer_gdas_1c.list index 7c8ff99481..42b1bd4f64 100644 --- a/parm/transfer_gdas_1c.list +++ b/parm/transfer/transfer_gdas_1c.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gdas._PDY_/ - /??/atmos/*atmf000* - /??/atmos/*atmf001* - /??/atmos/*atmf002* @@ -44,7 +44,7 @@ com/gfs/_ENVIR_/gdas._PDY_/ B 100 -com/gfs/_ENVIR_/gdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gdas._PDYm1_/ - /??/atmos/*atmf000* - /??/atmos/*atmf001* - /??/atmos/*atmf002* diff --git a/parm/transfer_gdas_enkf_enkf_05.list b/parm/transfer/transfer_gdas_enkf_enkf_05.list similarity index 92% rename from parm/transfer_gdas_enkf_enkf_05.list rename to parm/transfer/transfer_gdas_enkf_enkf_05.list index 518636342c..fe1be06e8c 100644 --- a/parm/transfer_gdas_enkf_enkf_05.list +++ b/parm/transfer/transfer_gdas_enkf_enkf_05.list @@ -24,8 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -#com/gfs/_ENVIR_/enkfgdas._PDY_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/mem001/ @@ -42,8 +41,7 @@ B 100 -#com/gfs/_ENVIR_/enkfgdas._PDYm1_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/mem001/ diff --git a/parm/transfer_gdas_enkf_enkf_10.list b/parm/transfer/transfer_gdas_enkf_enkf_10.list similarity index 92% rename from parm/transfer_gdas_enkf_enkf_10.list rename to parm/transfer/transfer_gdas_enkf_enkf_10.list index db2b5c2568..b2298be8af 100644 --- a/parm/transfer_gdas_enkf_enkf_10.list +++ b/parm/transfer/transfer_gdas_enkf_enkf_10.list @@ -24,8 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -#com/gfs/_ENVIR_/enkfgdas._PDY_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/mem006/ @@ -42,8 +41,7 @@ B 100 -#com/gfs/_ENVIR_/enkfgdas._PDYm1_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/mem006/ diff --git a/parm/transfer_gdas_enkf_enkf_15.list b/parm/transfer/transfer_gdas_enkf_enkf_15.list similarity index 92% rename from parm/transfer_gdas_enkf_enkf_15.list rename to parm/transfer/transfer_gdas_enkf_enkf_15.list index 30e10b51cc..435de61bba 100644 --- a/parm/transfer_gdas_enkf_enkf_15.list +++ b/parm/transfer/transfer_gdas_enkf_enkf_15.list @@ -24,8 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -#com/gfs/_ENVIR_/enkfgdas._PDY_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/mem011/ @@ -42,8 +41,7 @@ B 100 -#com/gfs/_ENVIR_/enkfgdas._PDYm1_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/mem011/ diff --git a/parm/transfer_gdas_enkf_enkf_20.list b/parm/transfer/transfer_gdas_enkf_enkf_20.list similarity index 92% rename from parm/transfer_gdas_enkf_enkf_20.list rename to parm/transfer/transfer_gdas_enkf_enkf_20.list index 493bb2cc52..e329d227bd 100644 --- a/parm/transfer_gdas_enkf_enkf_20.list +++ b/parm/transfer/transfer_gdas_enkf_enkf_20.list @@ -24,8 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -#com/gfs/_ENVIR_/enkfgdas._PDY_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/mem016/ @@ -42,8 +41,7 @@ B 100 -#com/gfs/_ENVIR_/enkfgdas._PDYm1_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/mem016/ diff --git a/parm/transfer_gdas_enkf_enkf_25.list b/parm/transfer/transfer_gdas_enkf_enkf_25.list similarity index 92% rename from parm/transfer_gdas_enkf_enkf_25.list rename to parm/transfer/transfer_gdas_enkf_enkf_25.list index 8e91b1af34..fb6d964369 100644 --- a/parm/transfer_gdas_enkf_enkf_25.list +++ b/parm/transfer/transfer_gdas_enkf_enkf_25.list @@ -24,8 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -#com/gfs/_ENVIR_/enkfgdas._PDY_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/mem021/ @@ -42,8 +41,7 @@ B 100 -#com/gfs/_ENVIR_/enkfgdas._PDYm1_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/mem021/ diff --git a/parm/transfer_gdas_enkf_enkf_30.list b/parm/transfer/transfer_gdas_enkf_enkf_30.list similarity index 92% rename from parm/transfer_gdas_enkf_enkf_30.list rename to parm/transfer/transfer_gdas_enkf_enkf_30.list index d29b79e871..bce22494a4 100644 --- a/parm/transfer_gdas_enkf_enkf_30.list +++ b/parm/transfer/transfer_gdas_enkf_enkf_30.list @@ -24,8 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -#com/gfs/_ENVIR_/enkfgdas._PDY_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/mem026/ @@ -42,8 +41,7 @@ B 100 -#com/gfs/_ENVIR_/enkfgdas._PDYm1_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/mem026/ diff --git a/parm/transfer_gdas_enkf_enkf_35.list b/parm/transfer/transfer_gdas_enkf_enkf_35.list similarity index 92% rename from parm/transfer_gdas_enkf_enkf_35.list rename to parm/transfer/transfer_gdas_enkf_enkf_35.list index 60e69aaeb6..6397c6693e 100644 --- a/parm/transfer_gdas_enkf_enkf_35.list +++ b/parm/transfer/transfer_gdas_enkf_enkf_35.list @@ -24,8 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -#com/gfs/_ENVIR_/enkfgdas._PDY_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/mem031/ @@ -42,8 +41,7 @@ B 100 -#com/gfs/_ENVIR_/enkfgdas._PDYm1_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/mem031/ diff --git a/parm/transfer_gdas_enkf_enkf_40.list b/parm/transfer/transfer_gdas_enkf_enkf_40.list similarity index 92% rename from parm/transfer_gdas_enkf_enkf_40.list rename to parm/transfer/transfer_gdas_enkf_enkf_40.list index 1ce4d8e3d9..d8a85e529c 100644 --- a/parm/transfer_gdas_enkf_enkf_40.list +++ b/parm/transfer/transfer_gdas_enkf_enkf_40.list @@ -24,8 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -#com/gfs/_ENVIR_/enkfgdas._PDY_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/mem036/ @@ -42,8 +41,7 @@ B 100 -#com/gfs/_ENVIR_/enkfgdas._PDYm1_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/mem036/ diff --git a/parm/transfer_gdas_enkf_enkf_45.list b/parm/transfer/transfer_gdas_enkf_enkf_45.list similarity index 92% rename from parm/transfer_gdas_enkf_enkf_45.list rename to parm/transfer/transfer_gdas_enkf_enkf_45.list index 50b2c35d6f..fd8b1d5299 100644 --- a/parm/transfer_gdas_enkf_enkf_45.list +++ b/parm/transfer/transfer_gdas_enkf_enkf_45.list @@ -24,8 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -#com/gfs/_ENVIR_/enkfgdas._PDY_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/mem041/ @@ -42,8 +41,7 @@ B 100 -#com/gfs/_ENVIR_/enkfgdas._PDYm1_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/mem041/ diff --git a/parm/transfer_gdas_enkf_enkf_50.list b/parm/transfer/transfer_gdas_enkf_enkf_50.list similarity index 92% rename from parm/transfer_gdas_enkf_enkf_50.list rename to parm/transfer/transfer_gdas_enkf_enkf_50.list index 69a895bffb..dd7721505b 100644 --- a/parm/transfer_gdas_enkf_enkf_50.list +++ b/parm/transfer/transfer_gdas_enkf_enkf_50.list @@ -24,8 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -#com/gfs/_ENVIR_/enkfgdas._PDY_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/mem046/ @@ -42,8 +41,7 @@ B 100 -#com/gfs/_ENVIR_/enkfgdas._PDYm1_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/mem046/ diff --git a/parm/transfer_gdas_enkf_enkf_55.list b/parm/transfer/transfer_gdas_enkf_enkf_55.list similarity index 92% rename from parm/transfer_gdas_enkf_enkf_55.list rename to parm/transfer/transfer_gdas_enkf_enkf_55.list index a1cbd458e7..4606feb727 100644 --- a/parm/transfer_gdas_enkf_enkf_55.list +++ b/parm/transfer/transfer_gdas_enkf_enkf_55.list @@ -24,8 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -#com/gfs/_ENVIR_/enkfgdas._PDY_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/mem051/ @@ -42,8 +41,7 @@ B 100 -#com/gfs/_ENVIR_/enkfgdas._PDYm1_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/mem051/ diff --git a/parm/transfer_gdas_enkf_enkf_60.list b/parm/transfer/transfer_gdas_enkf_enkf_60.list similarity index 92% rename from parm/transfer_gdas_enkf_enkf_60.list rename to parm/transfer/transfer_gdas_enkf_enkf_60.list index 494b8003a3..e5764082dc 100644 --- a/parm/transfer_gdas_enkf_enkf_60.list +++ b/parm/transfer/transfer_gdas_enkf_enkf_60.list @@ -24,8 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -#com/gfs/_ENVIR_/enkfgdas._PDY_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/mem056/ @@ -42,8 +41,7 @@ B 100 -#com/gfs/_ENVIR_/enkfgdas._PDYm1_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/mem056/ diff --git a/parm/transfer_gdas_enkf_enkf_65.list b/parm/transfer/transfer_gdas_enkf_enkf_65.list similarity index 92% rename from parm/transfer_gdas_enkf_enkf_65.list rename to parm/transfer/transfer_gdas_enkf_enkf_65.list index 63d5af0575..15e12a0660 100644 --- a/parm/transfer_gdas_enkf_enkf_65.list +++ b/parm/transfer/transfer_gdas_enkf_enkf_65.list @@ -24,8 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -#com/gfs/_ENVIR_/enkfgdas._PDY_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/mem061/ @@ -42,8 +41,7 @@ B 100 -#com/gfs/_ENVIR_/enkfgdas._PDYm1_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/mem061/ diff --git a/parm/transfer_gdas_enkf_enkf_70.list b/parm/transfer/transfer_gdas_enkf_enkf_70.list similarity index 92% rename from parm/transfer_gdas_enkf_enkf_70.list rename to parm/transfer/transfer_gdas_enkf_enkf_70.list index b5d484407f..88accbffb9 100644 --- a/parm/transfer_gdas_enkf_enkf_70.list +++ b/parm/transfer/transfer_gdas_enkf_enkf_70.list @@ -24,8 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -#com/gfs/_ENVIR_/enkfgdas._PDY_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/mem066/ @@ -42,8 +41,7 @@ B 100 -#com/gfs/_ENVIR_/enkfgdas._PDYm1_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/mem066/ diff --git a/parm/transfer_gdas_enkf_enkf_75.list b/parm/transfer/transfer_gdas_enkf_enkf_75.list similarity index 92% rename from parm/transfer_gdas_enkf_enkf_75.list rename to parm/transfer/transfer_gdas_enkf_enkf_75.list index f66df06e64..3db4f41a03 100644 --- a/parm/transfer_gdas_enkf_enkf_75.list +++ b/parm/transfer/transfer_gdas_enkf_enkf_75.list @@ -24,8 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -#com/gfs/_ENVIR_/enkfgdas._PDY_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/mem071/ @@ -42,8 +41,7 @@ B 100 -#com/gfs/_ENVIR_/enkfgdas._PDYm1_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/mem071/ diff --git a/parm/transfer_gdas_enkf_enkf_80.list b/parm/transfer/transfer_gdas_enkf_enkf_80.list similarity index 92% rename from parm/transfer_gdas_enkf_enkf_80.list rename to parm/transfer/transfer_gdas_enkf_enkf_80.list index cbf1a8a9cd..58ca72515d 100644 --- a/parm/transfer_gdas_enkf_enkf_80.list +++ b/parm/transfer/transfer_gdas_enkf_enkf_80.list @@ -24,8 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -#com/gfs/_ENVIR_/enkfgdas._PDY_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/mem076/ @@ -42,8 +41,7 @@ B 100 -#com/gfs/_ENVIR_/enkfgdas._PDYm1_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/mem076/ diff --git a/parm/transfer_gdas_enkf_enkf_misc.list b/parm/transfer/transfer_gdas_enkf_enkf_misc.list similarity index 91% rename from parm/transfer_gdas_enkf_enkf_misc.list rename to parm/transfer/transfer_gdas_enkf_enkf_misc.list index 14d052889e..141b4af1da 100644 --- a/parm/transfer_gdas_enkf_enkf_misc.list +++ b/parm/transfer/transfer_gdas_enkf_enkf_misc.list @@ -24,8 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -#com/gfs/_ENVIR_/enkfgdas._PDY_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/* @@ -33,8 +32,7 @@ B 100 -#com/gfs/_ENVIR_/enkfgdas._PDYm1_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/* diff --git a/parm/transfer_gdas_misc.list b/parm/transfer/transfer_gdas_misc.list similarity index 89% rename from parm/transfer_gdas_misc.list rename to parm/transfer/transfer_gdas_misc.list index 6687262a45..3d7fe47a88 100644 --- a/parm/transfer_gdas_misc.list +++ b/parm/transfer/transfer_gdas_misc.list @@ -30,30 +30,30 @@ #B 4500000 -com/gfs/_ENVIR_/syndat/ +_COMROOT_/gfs/_SHORTVER_/syndat/ B 180 -com/gfs/_ENVIR_/gdascounts/ +_COMROOT_/gfs/_SHORTVER_/gdascounts/ + /data_counts._MONPREV_/*** - * B 16000000 -com/gfs/_ENVIR_/gdascounts/ +_COMROOT_/gfs/_SHORTVER_/gdascounts/ + /data_counts._MONCUR_/*** - * B 16000000 -com/gfs/_ENVIR_/gdascounts/ +_COMROOT_/gfs/_SHORTVER_/gdascounts/ + /satcounts._MONPREV_/*** - * B 16000000 -com/gfs/_ENVIR_/gdascounts/ +_COMROOT_/gfs/_SHORTVER_/gdascounts/ + /satcounts._MONCUR_/*** - * B 16000000 -com/gfs/_ENVIR_/sdm_rtdm/ +_COMROOT_/gfs/_SHORTVER_/sdm_rtdm/ + /obcount_30day/ + /obcount_30day/gdas/ + /obcount_30day/gdas/gdas._PDYm1_/*** @@ -61,14 +61,14 @@ com/gfs/_ENVIR_/sdm_rtdm/ - * B 2000000 -com/gfs/_ENVIR_/sdm_rtdm/ +_COMROOT_/gfs/_SHORTVER_/sdm_rtdm/ + /avgdata/ + /avgdata/obcount_30davg.gdas._MONPREV_ + /avgdata/obcount_30davg.gdas.current - * B 256000 -com/gfs/_ENVIR_/gdascounts/ +_COMROOT_/gfs/_SHORTVER_/gdascounts/ + /index.shtml + /index_backup.shtml - * diff --git a/parm/transfer_gfs_1.list b/parm/transfer/transfer_gfs_1.list similarity index 96% rename from parm/transfer_gfs_1.list rename to parm/transfer/transfer_gfs_1.list index ce70343214..84e852ff82 100644 --- a/parm/transfer_gfs_1.list +++ b/parm/transfer/transfer_gfs_1.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gfs._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ + /??/ + /??/atmos/ - /??/atmos/gfs.t??z.atmf???.nc @@ -36,7 +36,7 @@ com/gfs/_ENVIR_/gfs._PDY_/ - /??/wave/* B 100 -com/gfs/_ENVIR_/gfs._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ + /??/ + /??/atmos/ - /??/atmos/gfs.t??z.atmf???.nc diff --git a/parm/transfer_gfs_10a.list b/parm/transfer/transfer_gfs_10a.list similarity index 96% rename from parm/transfer_gfs_10a.list rename to parm/transfer/transfer_gfs_10a.list index a938573073..3beb6d15fd 100644 --- a/parm/transfer_gfs_10a.list +++ b/parm/transfer/transfer_gfs_10a.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gfs._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.master.*1 @@ -35,7 +35,7 @@ com/gfs/_ENVIR_/gfs._PDY_/ - * B 100 -com/gfs/_ENVIR_/gfs._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.master.*1 diff --git a/parm/transfer_gfs_10b.list b/parm/transfer/transfer_gfs_10b.list similarity index 96% rename from parm/transfer_gfs_10b.list rename to parm/transfer/transfer_gfs_10b.list index 83467d6d25..34522d1e2f 100644 --- a/parm/transfer_gfs_10b.list +++ b/parm/transfer/transfer_gfs_10b.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gfs._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.master.*0 @@ -35,7 +35,7 @@ com/gfs/_ENVIR_/gfs._PDY_/ - * B 100 -com/gfs/_ENVIR_/gfs._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.master.*0 diff --git a/parm/transfer_gfs_2.list b/parm/transfer/transfer_gfs_2.list similarity index 98% rename from parm/transfer_gfs_2.list rename to parm/transfer/transfer_gfs_2.list index 373ebfae05..f0ea9bc9c5 100644 --- a/parm/transfer_gfs_2.list +++ b/parm/transfer/transfer_gfs_2.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gfs._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.atmf000.nc @@ -61,7 +61,7 @@ com/gfs/_ENVIR_/gfs._PDY_/ B 100 -com/gfs/_ENVIR_/gfs._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.atmf000.nc diff --git a/parm/transfer_gfs_3.list b/parm/transfer/transfer_gfs_3.list similarity index 98% rename from parm/transfer_gfs_3.list rename to parm/transfer/transfer_gfs_3.list index a5218e198a..636077381e 100644 --- a/parm/transfer_gfs_3.list +++ b/parm/transfer/transfer_gfs_3.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gfs._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.atmf001.nc @@ -61,7 +61,7 @@ com/gfs/_ENVIR_/gfs._PDY_/ B 100 -com/gfs/_ENVIR_/gfs._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.atmf001.nc diff --git a/parm/transfer_gfs_4.list b/parm/transfer/transfer_gfs_4.list similarity index 98% rename from parm/transfer_gfs_4.list rename to parm/transfer/transfer_gfs_4.list index 37acec25ab..b45e4027ff 100644 --- a/parm/transfer_gfs_4.list +++ b/parm/transfer/transfer_gfs_4.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gfs._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.atmf002.nc @@ -61,7 +61,7 @@ com/gfs/_ENVIR_/gfs._PDY_/ B 100 -com/gfs/_ENVIR_/gfs._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.atmf002.nc diff --git a/parm/transfer_gfs_5.list b/parm/transfer/transfer_gfs_5.list similarity index 98% rename from parm/transfer_gfs_5.list rename to parm/transfer/transfer_gfs_5.list index 01e01c2447..21f59df4f8 100644 --- a/parm/transfer_gfs_5.list +++ b/parm/transfer/transfer_gfs_5.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gfs._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.atmf003.nc @@ -61,7 +61,7 @@ com/gfs/_ENVIR_/gfs._PDY_/ B 100 -com/gfs/_ENVIR_/gfs._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.atmf003.nc diff --git a/parm/transfer_gfs_6.list b/parm/transfer/transfer_gfs_6.list similarity index 98% rename from parm/transfer_gfs_6.list rename to parm/transfer/transfer_gfs_6.list index de661359f7..5e90f975fc 100644 --- a/parm/transfer_gfs_6.list +++ b/parm/transfer/transfer_gfs_6.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gfs._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.atmf004.nc @@ -61,7 +61,7 @@ com/gfs/_ENVIR_/gfs._PDY_/ B 100 -com/gfs/_ENVIR_/gfs._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.atmf004.nc diff --git a/parm/transfer_gfs_7.list b/parm/transfer/transfer_gfs_7.list similarity index 98% rename from parm/transfer_gfs_7.list rename to parm/transfer/transfer_gfs_7.list index 841d671944..e3b8dad532 100644 --- a/parm/transfer_gfs_7.list +++ b/parm/transfer/transfer_gfs_7.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gfs._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.atmf005.nc @@ -61,7 +61,7 @@ com/gfs/_ENVIR_/gfs._PDY_/ B 100 -com/gfs/_ENVIR_/gfs._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.atmf005.nc diff --git a/parm/transfer_gfs_8.list b/parm/transfer/transfer_gfs_8.list similarity index 98% rename from parm/transfer_gfs_8.list rename to parm/transfer/transfer_gfs_8.list index 744ef24e70..df146fd207 100644 --- a/parm/transfer_gfs_8.list +++ b/parm/transfer/transfer_gfs_8.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gfs._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.atmf006.nc @@ -60,7 +60,7 @@ com/gfs/_ENVIR_/gfs._PDY_/ B 100 -com/gfs/_ENVIR_/gfs._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.atmf006.nc diff --git a/parm/transfer_gfs_9a.list b/parm/transfer/transfer_gfs_9a.list similarity index 96% rename from parm/transfer_gfs_9a.list rename to parm/transfer/transfer_gfs_9a.list index 2f3c34aaaa..44d316c81e 100644 --- a/parm/transfer_gfs_9a.list +++ b/parm/transfer/transfer_gfs_9a.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gfs._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.sfcf??0.nc @@ -36,7 +36,7 @@ com/gfs/_ENVIR_/gfs._PDY_/ B 100 -com/gfs/_ENVIR_/gfs._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.sfcf??0.nc diff --git a/parm/transfer_gfs_9b.list b/parm/transfer/transfer_gfs_9b.list similarity index 96% rename from parm/transfer_gfs_9b.list rename to parm/transfer/transfer_gfs_9b.list index fd87ee88d4..b2571dfb7c 100644 --- a/parm/transfer_gfs_9b.list +++ b/parm/transfer/transfer_gfs_9b.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gfs._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.sfcf??1.nc @@ -36,7 +36,7 @@ com/gfs/_ENVIR_/gfs._PDY_/ B 100 -com/gfs/_ENVIR_/gfs._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.sfcf??1.nc diff --git a/parm/transfer_gfs_gempak.list b/parm/transfer/transfer_gfs_gempak.list similarity index 96% rename from parm/transfer_gfs_gempak.list rename to parm/transfer/transfer_gfs_gempak.list index 1db0755a47..e491821d69 100644 --- a/parm/transfer_gfs_gempak.list +++ b/parm/transfer/transfer_gfs_gempak.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gfs._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ + /??/ + /??/atmos/ + /??/atmos/gempak/ @@ -33,7 +33,7 @@ com/gfs/_ENVIR_/gfs._PDY_/ B 100 -com/gfs/_ENVIR_/gfs._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/gempak/ diff --git a/parm/transfer_gfs_misc.list b/parm/transfer/transfer_gfs_misc.list similarity index 96% rename from parm/transfer_gfs_misc.list rename to parm/transfer/transfer_gfs_misc.list index e8448e59cc..32f002d1e7 100644 --- a/parm/transfer_gfs_misc.list +++ b/parm/transfer/transfer_gfs_misc.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/sdm_rtdm/ +_COMROOT_/gfs/_SHORTVER_/sdm_rtdm/ + /avgdata/ + /avgdata/obcount_30davg.gfs._MONPREV_ + /avgdata/obcount_30davg.gfs.current @@ -32,7 +32,7 @@ com/gfs/_ENVIR_/sdm_rtdm/ B 256000 -com/gfs/_ENVIR_/sdm_rtdm/ +_COMROOT_/gfs/_SHORTVER_/sdm_rtdm/ + /obcount_30day/ + /obcount_30day/gfs/ + /obcount_30day/gfs/gfs._PDYm1_/*** diff --git a/parm/transfer_gfs_wave_restart1.list b/parm/transfer/transfer_gfs_wave_restart1.list similarity index 96% rename from parm/transfer_gfs_wave_restart1.list rename to parm/transfer/transfer_gfs_wave_restart1.list index 7a669f9cc6..cdac47428a 100644 --- a/parm/transfer_gfs_wave_restart1.list +++ b/parm/transfer/transfer_gfs_wave_restart1.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gfs._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ + /??/ + /??/wave/ + /??/wave/restart/ @@ -32,7 +32,7 @@ com/gfs/_ENVIR_/gfs._PDY_/ - * B 100 -com/gfs/_ENVIR_/gfs._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ + /??/ + /??/wave/ + /??/wave/restart/ diff --git a/parm/transfer_gfs_wave_restart2.list b/parm/transfer/transfer_gfs_wave_restart2.list similarity index 96% rename from parm/transfer_gfs_wave_restart2.list rename to parm/transfer/transfer_gfs_wave_restart2.list index 1f789855a8..6f4eb289af 100644 --- a/parm/transfer_gfs_wave_restart2.list +++ b/parm/transfer/transfer_gfs_wave_restart2.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gfs._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ + /??/ + /??/wave/ + /??/wave/restart/ @@ -32,7 +32,7 @@ com/gfs/_ENVIR_/gfs._PDY_/ - * B 100 -com/gfs/_ENVIR_/gfs._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ + /??/ + /??/wave/ + /??/wave/restart/ diff --git a/parm/transfer_gfs_wave_restart3.list b/parm/transfer/transfer_gfs_wave_restart3.list similarity index 96% rename from parm/transfer_gfs_wave_restart3.list rename to parm/transfer/transfer_gfs_wave_restart3.list index e5e7b2abd7..c8005e53eb 100644 --- a/parm/transfer_gfs_wave_restart3.list +++ b/parm/transfer/transfer_gfs_wave_restart3.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gfs._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ + /??/ + /??/wave/ + /??/wave/restart/ @@ -32,7 +32,7 @@ com/gfs/_ENVIR_/gfs._PDY_/ - * B 100 -com/gfs/_ENVIR_/gfs._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ + /??/ + /??/wave/ + /??/wave/restart/ diff --git a/parm/transfer_gfs_wave_rundata.list b/parm/transfer/transfer_gfs_wave_rundata.list similarity index 96% rename from parm/transfer_gfs_wave_rundata.list rename to parm/transfer/transfer_gfs_wave_rundata.list index d7f977f76d..dfacfe48f7 100644 --- a/parm/transfer_gfs_wave_rundata.list +++ b/parm/transfer/transfer_gfs_wave_rundata.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gfs._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ + /??/ + /??/wave/ + /??/wave/rundata/ @@ -33,7 +33,7 @@ com/gfs/_ENVIR_/gfs._PDY_/ B 100 -com/gfs/_ENVIR_/gfs._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ + /??/ + /??/wave/ + /??/wave/rundata/ diff --git a/parm/transfer_gfs_wave_wave.list b/parm/transfer/transfer_gfs_wave_wave.list similarity index 96% rename from parm/transfer_gfs_wave_wave.list rename to parm/transfer/transfer_gfs_wave_wave.list index 4628d02547..03cf074797 100644 --- a/parm/transfer_gfs_wave_wave.list +++ b/parm/transfer/transfer_gfs_wave_wave.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gfs._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ + /??/ + /??/wave/ + /??/wave/gridded/ @@ -36,7 +36,7 @@ com/gfs/_ENVIR_/gfs._PDY_/ - * B 100 -com/gfs/_ENVIR_/gfs._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ + /??/ + /??/wave/ + /??/wave/gridded/ diff --git a/parm/transfer_rdhpcs_gdas.list b/parm/transfer/transfer_rdhpcs_gdas.list similarity index 94% rename from parm/transfer_rdhpcs_gdas.list rename to parm/transfer/transfer_rdhpcs_gdas.list index e3811d3aa6..a154b022ed 100644 --- a/parm/transfer_rdhpcs_gdas.list +++ b/parm/transfer/transfer_rdhpcs_gdas.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # transferred. -com/gfs/_ENVIR_/gdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/gdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/gdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/gdas.t??z*tcvitals* @@ -45,7 +45,7 @@ com/gfs/_ENVIR_/gdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/gdas._PDYm1_/ E # This directory is a good candidate for compression #Z -com/gfs/_ENVIR_/gdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/gdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/gdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/gdas.t??z*tcvitals* diff --git a/parm/transfer_rdhpcs_gdas_enkf_enkf_1.list b/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_1.list similarity index 92% rename from parm/transfer_rdhpcs_gdas_enkf_enkf_1.list rename to parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_1.list index f924cbd377..aae14dc120 100644 --- a/parm/transfer_rdhpcs_gdas_enkf_enkf_1.list +++ b/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_1.list @@ -27,7 +27,7 @@ # This directory is a good candidate for compression #Z -com/gfs/_ENVIR_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ + /00/ + /00/atmos/ + /00/atmos/mem???/ @@ -37,7 +37,7 @@ com/gfs/_ENVIR_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ E # This directory is a good candidate for compression #Z -com/gfs/_ENVIR_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/ + /00/ + /00/atmos/ + /00/atmos/mem???/ diff --git a/parm/transfer_rdhpcs_gdas_enkf_enkf_2.list b/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_2.list similarity index 92% rename from parm/transfer_rdhpcs_gdas_enkf_enkf_2.list rename to parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_2.list index f7b2f03f9e..1cf3b8f5e4 100644 --- a/parm/transfer_rdhpcs_gdas_enkf_enkf_2.list +++ b/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_2.list @@ -27,7 +27,7 @@ # This directory is a good candidate for compression #Z -com/gfs/_ENVIR_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ + /06/ + /06/atmos/ + /06/atmos/mem???/ @@ -37,7 +37,7 @@ com/gfs/_ENVIR_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ E # This directory is a good candidate for compression #Z -com/gfs/_ENVIR_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/ + /06/ + /06/atmos/ + /06/atmos/mem???/ diff --git a/parm/transfer_rdhpcs_gdas_enkf_enkf_3.list b/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_3.list similarity index 92% rename from parm/transfer_rdhpcs_gdas_enkf_enkf_3.list rename to parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_3.list index f51726923b..ee0dae4c34 100644 --- a/parm/transfer_rdhpcs_gdas_enkf_enkf_3.list +++ b/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_3.list @@ -27,7 +27,7 @@ # This directory is a good candidate for compression #Z -com/gfs/_ENVIR_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ + /12/ + /12/atmos/ + /12/atmos/mem???/ @@ -37,7 +37,7 @@ com/gfs/_ENVIR_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ E # This directory is a good candidate for compression #Z -com/gfs/_ENVIR_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/ + /12/ + /12/atmos/ + /12/atmos/mem???/ diff --git a/parm/transfer_rdhpcs_gdas_enkf_enkf_4.list b/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_4.list similarity index 92% rename from parm/transfer_rdhpcs_gdas_enkf_enkf_4.list rename to parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_4.list index 85c541beb8..29f1a601d1 100644 --- a/parm/transfer_rdhpcs_gdas_enkf_enkf_4.list +++ b/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_4.list @@ -27,7 +27,7 @@ # This directory is a good candidate for compression #Z -com/gfs/_ENVIR_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ + /18/ + /18/atmos/ + /18/atmos/mem???/ @@ -37,7 +37,7 @@ com/gfs/_ENVIR_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ E # This directory is a good candidate for compression #Z -com/gfs/_ENVIR_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/ + /18/ + /18/atmos/ + /18/atmos/mem???/ diff --git a/parm/transfer_rdhpcs_gdas_enkf_enkf_5.list b/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_5.list similarity index 92% rename from parm/transfer_rdhpcs_gdas_enkf_enkf_5.list rename to parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_5.list index 44bf0f4662..7d1dd9ff6a 100644 --- a/parm/transfer_rdhpcs_gdas_enkf_enkf_5.list +++ b/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_5.list @@ -27,7 +27,7 @@ # This directory is a good candidate for compression #Z -com/gfs/_ENVIR_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ + /00/ + /00/atmos/ + /00/atmos/mem???/ @@ -38,7 +38,7 @@ com/gfs/_ENVIR_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ E # This directory is a good candidate for compression #Z -com/gfs/_ENVIR_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/ + /00/ + /00/atmos/ + /00/atmos/mem???/ diff --git a/parm/transfer_rdhpcs_gdas_enkf_enkf_6.list b/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_6.list similarity index 92% rename from parm/transfer_rdhpcs_gdas_enkf_enkf_6.list rename to parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_6.list index 3af2fbae4d..124dbe3aad 100644 --- a/parm/transfer_rdhpcs_gdas_enkf_enkf_6.list +++ b/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_6.list @@ -27,7 +27,7 @@ # This directory is a good candidate for compression #Z -com/gfs/_ENVIR_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ + /06/ + /06/atmos/ + /06/atmos/mem???/ @@ -38,7 +38,7 @@ com/gfs/_ENVIR_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ E # This directory is a good candidate for compression #Z -com/gfs/_ENVIR_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/ + /06/ + /06/atmos/ + /06/atmos/mem???/ diff --git a/parm/transfer_rdhpcs_gdas_enkf_enkf_7.list b/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_7.list similarity index 92% rename from parm/transfer_rdhpcs_gdas_enkf_enkf_7.list rename to parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_7.list index 9a86b20c42..58ff55b5d6 100644 --- a/parm/transfer_rdhpcs_gdas_enkf_enkf_7.list +++ b/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_7.list @@ -27,7 +27,7 @@ # This directory is a good candidate for compression #Z -com/gfs/_ENVIR_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ + /12/ + /12/atmos/ + /12/atmos/mem???/ @@ -38,7 +38,7 @@ com/gfs/_ENVIR_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ E # This directory is a good candidate for compression #Z -com/gfs/_ENVIR_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/ + /12/ + /12/atmos/ + /12/atmos/mem???/ diff --git a/parm/transfer_rdhpcs_gdas_enkf_enkf_8.list b/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_8.list similarity index 92% rename from parm/transfer_rdhpcs_gdas_enkf_enkf_8.list rename to parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_8.list index 747be01fcd..99d3de2843 100644 --- a/parm/transfer_rdhpcs_gdas_enkf_enkf_8.list +++ b/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_8.list @@ -27,7 +27,7 @@ # This directory is a good candidate for compression #Z -com/gfs/_ENVIR_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ + /18/ + /18/atmos/ + /18/atmos/mem???/ @@ -38,7 +38,7 @@ com/gfs/_ENVIR_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ E # This directory is a good candidate for compression #Z -com/gfs/_ENVIR_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/ + /18/ + /18/atmos/ + /18/atmos/mem???/ diff --git a/parm/transfer_rdhpcs_gfs.list b/parm/transfer/transfer_rdhpcs_gfs.list similarity index 97% rename from parm/transfer_rdhpcs_gfs.list rename to parm/transfer/transfer_rdhpcs_gfs.list index 34e006e179..78eedd1f24 100644 --- a/parm/transfer_rdhpcs_gfs.list +++ b/parm/transfer/transfer_rdhpcs_gfs.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # transferred. -com/gfs/_ENVIR_/gfs._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/gfs._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/gfs._PDY_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.*bufr* diff --git a/parm/transfer_rdhpcs_gfs_nawips.list b/parm/transfer/transfer_rdhpcs_gfs_nawips.list similarity index 95% rename from parm/transfer_rdhpcs_gfs_nawips.list rename to parm/transfer/transfer_rdhpcs_gfs_nawips.list index 02d80bac9f..3465d3c360 100644 --- a/parm/transfer_rdhpcs_gfs_nawips.list +++ b/parm/transfer/transfer_rdhpcs_gfs_nawips.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # transferred. -com/gfs/_ENVIR_/gfs._PDY_/ _REMOTEPATH_/com/nawips/_ENVIR_/gfs._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ _REMOTEPATH_/_ENVIR_/com/gfs/_SHORTVER_/gfs._PDY_/ + /??/ + /??/atmos/ + /??/atmos/gempak/ From 98f4d16e9bba86d2c433aa0521d960b566062a1f Mon Sep 17 00:00:00 2001 From: Walter Kolczynski - NOAA Date: Thu, 21 Jul 2022 16:02:31 -0400 Subject: [PATCH 02/16] Add postsnd job when bufrsnd it on (#926) In the workflow refactoring, the addition of postsnd to the task list when bufrsnd is true was inadvertently left out. It is now added back in. --- workflow/applications.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/workflow/applications.py b/workflow/applications.py index e91475e36f..c53f3635ea 100644 --- a/workflow/applications.py +++ b/workflow/applications.py @@ -186,6 +186,9 @@ def _cycled_configs(self): if self.do_gempak: configs += ['gempak'] + if self.do_bufrsnd: + configs += ['postsnd'] + if self.do_awips: configs += ['awips'] From ffcd5bbde7947902a73eebff7dfe04c2ab045b0a Mon Sep 17 00:00:00 2001 From: RussTreadon-NOAA <26926959+RussTreadon-NOAA@users.noreply.github.com> Date: Fri, 22 Jul 2022 16:00:51 -0400 Subject: [PATCH 03/16] Add GDASapp (first wave of JEDI changes) (#871) Merge changes associated with g-w issue #521 from g-w branch feature/ufsda_gdasapp into develop. feature/ufsda_gdasapp contains g-w extensions for JEDI based atmospheric DA. Specifically, this PR adds the option to add JEDI based variational and/or ensemble DA jobs to replace GSI based DA jobs. The toggling on/off of JEDI_VAR and JEDI_ENS jobs is controlled via two new variables added to `config.base.emc.dyn` and `config.base.nco.static` ``` # DA engine export DO_JEDIVAR="NO" export DO_JEDIENS="NO" ``` When both variables are `NO`, the global workflow uses GSI based DA jobs. Thus, the PR does not alter the default behavior of the develop global workflow. When `DO_JEDIVAR=YES`, GSI jobs `anal` and `analdiag` are replaced by JEDI_VAR jobs `atmanalprep`, `atmanalrun`, and `atmanalpost`. When `DO_JEDIENS=YES`, GSI jobs `eobs`, `ediag`, and `eupd` are replaced by JEDI_ENS jobs `atmensanalprep`, `atmensanalrun`, and `atmensanalpost`. `checkout.sh`, `build_all.sh`, and `link_workflow.sh` are updated to clone, build, and install the GDASapp in the global workflow. Local directory `sorc/gdas.cd` contains the GDASApp superstructure plus the relevant components of JEDI needed to run GDASApp. Closes #521 --- .gitignore | 8 ++ env/HERA.env | 25 ++++++ env/JET.env | 17 ++++ env/ORION.env | 25 ++++++ jobs/rocoto/atmanalpost.sh | 13 +++ jobs/rocoto/atmanalprep.sh | 13 +++ jobs/rocoto/atmanalrun.sh | 13 +++ jobs/rocoto/atmensanalpost.sh | 13 +++ jobs/rocoto/atmensanalprep.sh | 13 +++ jobs/rocoto/atmensanalrun.sh | 13 +++ parm/config/config.atmanal | 24 +++++ parm/config/config.atmanalpost | 10 +++ parm/config/config.atmanalprep | 10 +++ parm/config/config.atmanalrun | 14 +++ parm/config/config.atmensanal | 25 ++++++ parm/config/config.atmensanalpost | 10 +++ parm/config/config.atmensanalprep | 10 +++ parm/config/config.atmensanalrun | 14 +++ parm/config/config.base.emc.dyn | 4 + parm/config/config.base.nco.static | 4 + parm/config/config.resources | 59 +++++++++++++ sorc/build_all.sh | 14 +++ sorc/build_gdas.sh | 26 ++++++ sorc/checkout.sh | 1 + sorc/link_workflow.sh | 26 +++++- workflow/applications.py | 40 +++++++-- workflow/rocoto/workflow_tasks.py | 136 ++++++++++++++++++++++++++++- 27 files changed, 567 insertions(+), 13 deletions(-) create mode 100755 jobs/rocoto/atmanalpost.sh create mode 100755 jobs/rocoto/atmanalprep.sh create mode 100755 jobs/rocoto/atmanalrun.sh create mode 100755 jobs/rocoto/atmensanalpost.sh create mode 100755 jobs/rocoto/atmensanalprep.sh create mode 100755 jobs/rocoto/atmensanalrun.sh create mode 100755 parm/config/config.atmanal create mode 100755 parm/config/config.atmanalpost create mode 100755 parm/config/config.atmanalprep create mode 100755 parm/config/config.atmanalrun create mode 100755 parm/config/config.atmensanal create mode 100755 parm/config/config.atmensanalpost create mode 100755 parm/config/config.atmensanalprep create mode 100755 parm/config/config.atmensanalrun create mode 100755 sorc/build_gdas.sh diff --git a/.gitignore b/.gitignore index 9bf41e1bac..6f5a7da9c5 100644 --- a/.gitignore +++ b/.gitignore @@ -82,6 +82,7 @@ sorc/gldas.fd sorc/gsi_enkf.fd sorc/gsi.fd sorc/enkf.fd +sorc/gdas.cd sorc/gsi_utils.fd sorc/gsi_monitor.fd sorc/ufs_utils.fd @@ -145,6 +146,12 @@ jobs/JGFS_ATMOS_WAFS_BLENDING_0P25 jobs/JGFS_ATMOS_WAFS_GCIP jobs/JGFS_ATMOS_WAFS_GRIB2 jobs/JGFS_ATMOS_WAFS_GRIB2_0P25 +jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_POST +jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_PREP +jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_RUN +jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_POST +jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_PREP +jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_RUN # scripts symlinks scripts/exemcsfc_global_sfc_prep.sh scripts/exgdas_atmos_gldas.sh @@ -188,6 +195,7 @@ ush/radmon_verf_angle.sh ush/radmon_verf_bcoef.sh ush/radmon_verf_bcor.sh ush/radmon_verf_time.sh +ush/ufsda ush/wafs_blending.sh ush/wafs_grib2.regrid.sh ush/wafs_intdsk.sh diff --git a/env/HERA.env b/env/HERA.env index ef780a8a49..f5c0a5efcd 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -4,6 +4,7 @@ if [ $# -ne 1 ]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" + echo "atmanalrun atmensanalrun" echo "anal sfcanl fcst post vrfy metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" @@ -50,6 +51,30 @@ elif [ $step = "waveinit" -o $step = "waveprep" -o $step = "wavepostsbs" -o $ste export wavempexec=${launcher} export wave_mpmd=${mpmd} +elif [ $step = "atmanalrun" ]; then + + export CFP_MP=${CFP_MP:-"YES"} + export USE_CFP=${USE_CFP:-"YES"} + export APRUNCFP="$launcher -n \$ncmd --multi-prog" + + nth_max=$(($npe_node_max / $npe_node_atmanalrun)) + + export NTHREADS_ATMANAL=${nth_atmanalrun:-$nth_max} + [[ $NTHREADS_ATMANAL -gt $nth_max ]] && export NTHREADS_ATMANAL=$nth_max + export APRUN_ATMANAL="$launcher -n $npe_atmanalrun" + +elif [ $step = "atmensanalrun" ]; then + + export CFP_MP=${CFP_MP:-"YES"} + export USE_CFP=${USE_CFP:-"YES"} + export APRUNCFP="$launcher -n \$ncmd --multi-prog" + + nth_max=$(($npe_node_max / $npe_node_atmensanalrun)) + + export NTHREADS_ATMENSANAL=${nth_atmensanalrun:-$nth_max} + [[ $NTHREADS_ATMENSANAL -gt $nth_max ]] && export NTHREADS_ATMENSANAL=$nth_max + export APRUN_ATMENSANAL="$launcher -n $npe_atmensanalrun" + elif [ $step = "anal" ]; then export MKL_NUM_THREADS=4 diff --git a/env/JET.env b/env/JET.env index 8d74aba5ad..21321b23c9 100755 --- a/env/JET.env +++ b/env/JET.env @@ -4,6 +4,7 @@ if [ $# -ne 1 ]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" + echo "atmanalrun atmensanalrun" echo "anal sfcanl fcst post vrfy metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" @@ -44,6 +45,22 @@ elif [ $step = "waveinit" -o $step = "waveprep" -o $step = "wavepostsbs" -o $ste export wavempexec=${launcher} export wave_mpmd=${mpmd} +elif [ $step = "atmanalrun" ]; then + + nth_max=$(($npe_node_max / $npe_node_atmanalrun)) + + export NTHREADS_ATMANAL=${nth_atmanalrun:-$nth_max} + [[ $NTHREADS_ATMANAL -gt $nth_max ]] && export NTHREADS_ATMANAL=$nth_max + export APRUN_ATMANAL="$launcher $npe_atmanalrun" + +elif [ $step = "atmensanalrun" ]; then + + nth_max=$(($npe_node_max / $npe_node_atmensanalrun)) + + export NTHREADS_ATMENSANAL=${nth_atmensanalrun:-$nth_max} + [[ $NTHREADS_ATMENSANAL -gt $nth_max ]] && export NTHREADS_ATMENSANAL=$nth_max + export APRUN_ATMENSANAL="$launcher $npe_atmensanalrun" + elif [ $step = "anal" ]; then nth_max=$(($npe_node_max / $npe_node_anal)) diff --git a/env/ORION.env b/env/ORION.env index bef649c43f..f3b58d987c 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -4,6 +4,7 @@ if [ $# -ne 1 ]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" + echo "atmanalrun atmensanalrun" echo "anal sfcanl fcst post vrfy metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" @@ -49,6 +50,30 @@ elif [ $step = "waveinit" -o $step = "waveprep" -o $step = "wavepostsbs" -o $ste export wavempexec=${launcher} export wave_mpmd=${mpmd} +elif [ $step = "atmanalrun" ]; then + + export CFP_MP=${CFP_MP:-"YES"} + export USE_CFP=${USE_CFP:-"YES"} + export APRUNCFP="$launcher -n \$ncmd --multi-prog" + + nth_max=$(($npe_node_max / $npe_node_atmanalrun)) + + export NTHREADS_ATMANAL=${nth_atmanalrun:-$nth_max} + [[ $NTHREADS_ATMANAL -gt $nth_max ]] && export NTHREADS_ATMANAL=$nth_max + export APRUN_ATMANAL="$launcher -n $npe_atmanalrun" + +elif [ $step = "atmensanalrun" ]; then + + export CFP_MP=${CFP_MP:-"YES"} + export USE_CFP=${USE_CFP:-"YES"} + export APRUNCFP="$launcher -n \$ncmd --multi-prog" + + nth_max=$(($npe_node_max / $npe_node_atmensanalrun)) + + export NTHREADS_ATMENSANAL=${nth_atmensanalrun:-$nth_max} + [[ $NTHREADS_ATMENSANAL -gt $nth_max ]] && export NTHREADS_ATMENSANAL=$nth_max + export APRUN_ATMENSANAL="$launcher -n $npe_atmensanalrun" + elif [ $step = "anal" ]; then export MKL_NUM_THREADS=4 diff --git a/jobs/rocoto/atmanalpost.sh b/jobs/rocoto/atmanalpost.sh new file mode 100755 index 0000000000..90a9b9bace --- /dev/null +++ b/jobs/rocoto/atmanalpost.sh @@ -0,0 +1,13 @@ +#!/bin/bash -x + +############################################################### +# Source FV3GFS workflow modules +. $HOMEgfs/ush/load_fv3gfs_modules.sh +status=$? +[[ $status -ne 0 ]] && exit $status + +############################################################### +# Execute the JJOB +$HOMEgfs/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_POST +status=$? +exit $status diff --git a/jobs/rocoto/atmanalprep.sh b/jobs/rocoto/atmanalprep.sh new file mode 100755 index 0000000000..e4b76c8407 --- /dev/null +++ b/jobs/rocoto/atmanalprep.sh @@ -0,0 +1,13 @@ +#!/bin/bash -x + +############################################################### +# Source FV3GFS workflow modules +. $HOMEgfs/ush/load_fv3gfs_modules.sh +status=$? +[[ $status -ne 0 ]] && exit $status + +############################################################### +# Execute the JJOB +$HOMEgfs/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_PREP +status=$? +exit $status diff --git a/jobs/rocoto/atmanalrun.sh b/jobs/rocoto/atmanalrun.sh new file mode 100755 index 0000000000..cebe478b7e --- /dev/null +++ b/jobs/rocoto/atmanalrun.sh @@ -0,0 +1,13 @@ +#!/bin/bash -x + +############################################################### +# Source FV3GFS workflow modules +. $HOMEgfs/ush/load_fv3gfs_modules.sh +status=$? +[[ $status -ne 0 ]] && exit $status + +############################################################### +# Execute the JJOB +$HOMEgfs/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_RUN +status=$? +exit $status diff --git a/jobs/rocoto/atmensanalpost.sh b/jobs/rocoto/atmensanalpost.sh new file mode 100755 index 0000000000..ea6e490f20 --- /dev/null +++ b/jobs/rocoto/atmensanalpost.sh @@ -0,0 +1,13 @@ +#!/bin/bash -x + +############################################################### +# Source FV3GFS workflow modules +. $HOMEgfs/ush/load_fv3gfs_modules.sh +status=$? +[[ $status -ne 0 ]] && exit $status + +############################################################### +# Execute the JJOB +$HOMEgfs/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_POST +status=$? +exit $status diff --git a/jobs/rocoto/atmensanalprep.sh b/jobs/rocoto/atmensanalprep.sh new file mode 100755 index 0000000000..5ed434c6bf --- /dev/null +++ b/jobs/rocoto/atmensanalprep.sh @@ -0,0 +1,13 @@ +#!/bin/bash -x + +############################################################### +# Source FV3GFS workflow modules +. $HOMEgfs/ush/load_fv3gfs_modules.sh +status=$? +[[ $status -ne 0 ]] && exit $status + +############################################################### +# Execute the JJOB +$HOMEgfs/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_PREP +status=$? +exit $status diff --git a/jobs/rocoto/atmensanalrun.sh b/jobs/rocoto/atmensanalrun.sh new file mode 100755 index 0000000000..ddb3bb1432 --- /dev/null +++ b/jobs/rocoto/atmensanalrun.sh @@ -0,0 +1,13 @@ +#!/bin/bash -x + +############################################################### +# Source FV3GFS workflow modules +. $HOMEgfs/ush/load_fv3gfs_modules.sh +status=$? +[[ $status -ne 0 ]] && exit $status + +############################################################### +# Execute the JJOB +$HOMEgfs/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_RUN +status=$? +exit $status diff --git a/parm/config/config.atmanal b/parm/config/config.atmanal new file mode 100755 index 0000000000..9c11e9e593 --- /dev/null +++ b/parm/config/config.atmanal @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.atmanal ########## +# configuration common to all atm analysis tasks + +echo "BEGIN: config.atmanal" + +export OBS_YAML_DIR=$HOMEgfs/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=$HOMEgfs/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype.yaml +export ATMVARYAML=$HOMEgfs/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export BERROR_YAML=$HOMEgfs/sorc/gdas.cd/parm/atm/berror/hybvar_bump.yaml +export FV3JEDI_FIX=$HOMEgfs/fix/fix_jedi +export R2D2_OBS_DB='ufsda_test' +export R2D2_OBS_DUMP='oper_gdas' +export R2D2_OBS_SRC='ncdiag' +export R2D2_BC_SRC='gsi' +export R2D2_BC_DUMP='oper_gdas' +export R2D2_ARCH_DB='local' +export INTERP_METHOD='barycentric' + +export io_layout_x=1 +export io_layout_y=1 + +echo "END: config.atmanal" diff --git a/parm/config/config.atmanalpost b/parm/config/config.atmanalpost new file mode 100755 index 0000000000..fd5f3bbbcc --- /dev/null +++ b/parm/config/config.atmanalpost @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.atmanalpost ########## +# Post Atm Analysis specific + +echo "BEGIN: config.atmanalpost" + +# Get task specific resources +. $EXPDIR/config.resources atmanalpost +echo "END: config.atmanalpost" diff --git a/parm/config/config.atmanalprep b/parm/config/config.atmanalprep new file mode 100755 index 0000000000..0014520f5f --- /dev/null +++ b/parm/config/config.atmanalprep @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.atmanalprep ########## +# Pre Atm Analysis specific + +echo "BEGIN: config.atmanalprep" + +# Get task specific resources +. $EXPDIR/config.resources atmanalprep +echo "END: config.atmanalprep" diff --git a/parm/config/config.atmanalrun b/parm/config/config.atmanalrun new file mode 100755 index 0000000000..5aaac6a01d --- /dev/null +++ b/parm/config/config.atmanalrun @@ -0,0 +1,14 @@ +#!/bin/bash -x + +########## config.atmanalrun ########## +# Atm Analysis specific + +echo "BEGIN: config.atmanalrun" + +# Get task specific resources +. $EXPDIR/config.resources atmanalrun + +# Task specific variables +export JEDIVAREXE=$HOMEgfs/exec/fv3jedi_var.x + +echo "END: config.atmanalrun" diff --git a/parm/config/config.atmensanal b/parm/config/config.atmensanal new file mode 100755 index 0000000000..4f8e244b2e --- /dev/null +++ b/parm/config/config.atmensanal @@ -0,0 +1,25 @@ +#!/bin/bash -x + +########## config.atmensanal ########## +# configuration common to all atm atmensanal analysis tasks + +echo "BEGIN: config.atmensanal" + +export OBS_YAML_DIR=$HOMEgfs/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=$HOMEgfs/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export BERROR_YAML=$HOMEgfs/sorc/gdas.cd/parm/atm/berror/hybvar_bump.yaml +export ATMENSYAML=$HOMEgfs/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export FV3JEDI_FIX=$HOMEgfs/fix/fix_jedi +export R2D2_OBS_DB='ufsda_test' +export R2D2_OBS_DUMP='oper_gdas' +export R2D2_OBS_SRC='ncdiag' +export R2D2_BC_SRC='gsi' +##export R2D2_BC_DUMP='oper_gdas' +export R2D2_BC_DUMP='prgdasens' +export R2D2_ARCH_DB='local' +export INTERP_METHOD='barycentric' + +export io_layout_x=1 # hardwired to 1,1 in yamltools.py +export io_layout_y=1 + +echo "END: config.atmensanal" diff --git a/parm/config/config.atmensanalpost b/parm/config/config.atmensanalpost new file mode 100755 index 0000000000..f79ee5b507 --- /dev/null +++ b/parm/config/config.atmensanalpost @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.atmensanalpost ########## +# Post Atm Analysis specific + +echo "BEGIN: config.atmensanalpost" + +# Get task specific resources +. $EXPDIR/config.resources atmensanalpost +echo "END: config.atmensanalpost" diff --git a/parm/config/config.atmensanalprep b/parm/config/config.atmensanalprep new file mode 100755 index 0000000000..b719b9ac6c --- /dev/null +++ b/parm/config/config.atmensanalprep @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.atmensanalprep ########## +# Pre Atm Analysis specific + +echo "BEGIN: config.atmensanalprep" + +# Get task specific resources +. $EXPDIR/config.resources atmensanalprep +echo "END: config.atmensanalprep" diff --git a/parm/config/config.atmensanalrun b/parm/config/config.atmensanalrun new file mode 100755 index 0000000000..aeb59d1805 --- /dev/null +++ b/parm/config/config.atmensanalrun @@ -0,0 +1,14 @@ +#!/bin/bash -x + +########## config.atmensanalrun ########## +# Atm LETKFs specific + +echo "BEGIN: config.atmensanalrun" + +# Get task specific resources +. $EXPDIR/config.resources atmensanalrun + +# Task specific variables +export JEDIENSEXE=$HOMEgfs/exec/fv3jedi_letkf.x + +echo "END: config.atmensanalrun" diff --git a/parm/config/config.base.emc.dyn b/parm/config/config.base.emc.dyn index c48260abdd..722818b22a 100755 --- a/parm/config/config.base.emc.dyn +++ b/parm/config/config.base.emc.dyn @@ -320,6 +320,10 @@ fi export imp_physics=8 # Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" + # Hybrid related export DOHYBVAR="YES" export NMEM_ENKF=@NMEM_ENKF@ diff --git a/parm/config/config.base.nco.static b/parm/config/config.base.nco.static index 4980ecf752..48db9dd020 100755 --- a/parm/config/config.base.nco.static +++ b/parm/config/config.base.nco.static @@ -215,6 +215,10 @@ export DOBNDPNT_WAVE="YES" export imp_physics=8 # Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" + # Hybrid related export DOHYBVAR="YES" export NMEM_ENKF="80" diff --git a/parm/config/config.resources b/parm/config/config.resources index 47f9ee5bca..f9b3e4132a 100755 --- a/parm/config/config.resources +++ b/parm/config/config.resources @@ -9,6 +9,8 @@ if [ $# -ne 1 ]; then echo "Must specify an input task argument to set resource variables!" echo "argument can be any one of the following:" echo "getic init coupled_ic aerosol_init" + echo "atmanalprep atmanalrun atmanalpost" + echo "atmensanalprep atmensanalrun atmensanalpost" echo "anal sfcanl analcalc analdiag gldas fcst post vrfy metp arch echgres" echo "eobs ediag eomg eupd ecen esfc efcs epos earc" echo "init_chem mom6ic ocnpost" @@ -136,6 +138,35 @@ elif [ $step = "waveawipsgridded" ]; then export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) export NTASKS=${npe_waveawipsgridded} +elif [ $step = "atmanalprep" ]; then + + export wtime_atmanalprep="00:10:00" + export npe_atmanalprep=1 + export nth_atmanalprep=1 + export npe_node_atmanalprep=$(echo "$npe_node_max / $nth_atmanalprep" | bc) + export memory_atmanalprep="3072M" + +elif [ $step = "atmanalrun" ]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanalrun="00:30:00" + export npe_atmanalrun=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_atmanalrun_gfs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_atmanalrun=1 + export nth_atmanalrun_gfs=$nth_atmanalrun + export native_atmanalrun="--exclusive" + export npe_node_atmanalrun=$(echo "$npe_node_max / $nth_atmanalrun" | bc) + +elif [ $step = "atmanalpost" ]; then + + export wtime_atmanalpost="00:30:00" + export npe_atmanalpost=$npe_node_max + export nth_atmanalpost=1 + export npe_node_atmanalpost=$(echo "$npe_node_max / $nth_atmanalpost" | bc) + elif [ $step = "anal" ]; then export wtime_anal="01:00:00" @@ -426,6 +457,34 @@ elif [ $step = "coupled_ic" ]; then export npe_node_coupled_ic=1 export nth_coupled_ic=1 +elif [ $step = "atmensanalprep" ]; then + + export wtime_atmensanalprep="00:10:00" + export npe_atmensanalprep=1 + export nth_atmensanalprep=1 + export npe_node_atmensanalprep=$(echo "$npe_node_max / $nth_atmensanalprep" | bc) + +elif [ $step = "atmensanalrun" ]; then + + # make below case dependent later + export layout_x=2 + export layout_y=3 + + export wtime_atmensanalrun="00:30:00" + export npe_atmensanalrun=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_atmensanalrun_gfs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_atmensanalrun=1 + export nth_atmensanalrun_gfs=$nth_atmensanalrun + export native_atmensanalrun="--exclusive" + export npe_node_atmensanalrun=$(echo "$npe_node_max / $nth_atmensanalrun" | bc) + +elif [ $step = "atmensanalpost" ]; then + + export wtime_atmensanalpost="00:30:00" + export npe_atmensanalpost=$npe_node_max + export nth_atmensanalpost=1 + export npe_node_atmensanalpost=$(echo "$npe_node_max / $nth_atmensanalpost" | bc) + elif [ $step = "eobs" -o $step = "eomg" ]; then export wtime_eobs="00:45:00" diff --git a/sorc/build_all.sh b/sorc/build_all.sh index 956cc1400a..10f8b630c9 100755 --- a/sorc/build_all.sh +++ b/sorc/build_all.sh @@ -147,6 +147,20 @@ $Build_gsi_utils && { ((err+=$rc)) } +#------------------------------------ +# build gdas +#------------------------------------ +$Build_gdas && { + echo " .... Building GDASApp .... " + ./build_gdas.sh > $logs_dir/build_gdas.log 2>&1 + rc=$? + if [[ $rc -ne 0 ]] ; then + echo "Fatal error in building GDAS." + echo "The log file is in $logs_dir/build_gdas.log" + fi + ((err+=$rc)) +} + #------------------------------------ # build gsi monitor #------------------------------------ diff --git a/sorc/build_gdas.sh b/sorc/build_gdas.sh new file mode 100755 index 0000000000..f9238c9ab0 --- /dev/null +++ b/sorc/build_gdas.sh @@ -0,0 +1,26 @@ +#! /usr/bin/env bash +set -eux + +source ./machine-setup.sh > /dev/null 2>&1 +cwd=$(pwd) + +export BUILD_TARGET=$target + +# use more build jobs if on NOAA HPC +build_jobs=4 +case "${target}" in + hera|orion) + build_jobs=10 + ;; +esac + +# Check final exec folder exists +if [ ! -d "../exec" ]; then + mkdir ../exec +fi + +cd gdas.cd +BUILD_JOBS=$build_jobs ./build.sh -t $BUILD_TARGET + +exit + diff --git a/sorc/checkout.sh b/sorc/checkout.sh index 978d932ba7..9e1caf9022 100755 --- a/sorc/checkout.sh +++ b/sorc/checkout.sh @@ -134,6 +134,7 @@ checkout "ufs_model.fd" "https://github.com/ufs-community/ufs-weather-model" checkout "gsi_enkf.fd" "https://github.com/NOAA-EMC/GSI.git" "67f5ab4" ; errs=$((errs + $?)) checkout "gsi_utils.fd" "https://github.com/NOAA-EMC/GSI-Utils.git" "322cc7b" ; errs=$((errs + $?)) checkout "gsi_monitor.fd" "https://github.com/NOAA-EMC/GSI-Monitor.git" "acf8870" ; errs=$((errs + $?)) +checkout "gdas.cd" "https://github.com/NOAA-EMC/GDASApp.git" "5952c9d" ; errs=$((errs + $?)) checkout "gldas.fd" "https://github.com/NOAA-EMC/GLDAS.git" "fd8ba62" ; errs=$((errs + $?)) checkout "ufs_utils.fd" "https://github.com/ufs-community/UFS_UTILS.git" "a2b0817" ; errs=$((errs + $?)) checkout "verif-global.fd" "https://github.com/NOAA-EMC/EMC_verif-global.git" "c267780" ; errs=$((errs + $?)) diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index a34643232a..22e9d99901 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -66,7 +66,8 @@ for dir in fix_aer \ fix_cpl \ fix_wave \ fix_reg2grb2 \ - fix_ugwd + fix_ugwd \ + fix_jedi do if [ -d $dir ]; then [[ $RUN_ENVIR = nco ]] && chmod -R 755 $dir @@ -141,6 +142,20 @@ cd ${pwd}/../fix ||exit 8 $LINK ../sorc/gsi_enkf.fd/fix fix_gsi +#------------------------------ +#--add GDASApp files +#------------------------------ +cd ${pwd}/../jobs ||exit 8 + $LINK ../sorc/gdas.cd/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_PREP . + $LINK ../sorc/gdas.cd/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_RUN . + $LINK ../sorc/gdas.cd/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_POST . + $LINK ../sorc/gdas.cd/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_PREP . + $LINK ../sorc/gdas.cd/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_RUN . + $LINK ../sorc/gdas.cd/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_POST . +cd ${pwd}/../ush ||exit 8 + $LINK ../sorc/gdas.cd/ush/ufsda . + + #------------------------------ #--add DA Monitor file (NOTE: ensure to use correct version) #------------------------------ @@ -254,6 +269,15 @@ for gldasexe in gdas2gldas gldas2gdas gldas_forcing gldas_model gldas_post $LINK ../sorc/gldas.fd/exec/$gldasexe . done +# GDASApp +for gdasexe in fv3jedi_addincrement.x fv3jedi_diffstates.x fv3jedi_ensvariance.x fv3jedi_hofx.x \ + fv3jedi_var.x fv3jedi_convertincrement.x fv3jedi_dirac.x fv3jedi_error_covariance_training.x \ + fv3jedi_letkf.x fv3jedi_convertstate.x fv3jedi_eda.x fv3jedi_forecast.x fv3jedi_plot_field.x \ + fv3jedi_data_checker.py fv3jedi_enshofx.x fv3jedi_hofx_nomodel.x fv3jedi_testdata_downloader.py; do + [[ -s $gdasexe ]] && rm -f $gdasexe + $LINK ../sorc/gdas.cd/build/bin/$gdasexe . +done + #------------------------------ #--link source code directories #------------------------------ diff --git a/workflow/applications.py b/workflow/applications.py index c53f3635ea..8256cd9699 100644 --- a/workflow/applications.py +++ b/workflow/applications.py @@ -105,6 +105,8 @@ def __init__(self, configuration: Configuration) -> None: self.do_wafs = _base.get('DO_WAFS', False) self.do_vrfy = _base.get('DO_VRFY', True) self.do_metp = _base.get('DO_METP', False) + self.do_jedivar = _base.get('DO_JEDIVAR', False) + self.do_jediens = _base.get('DO_JEDIENS', False) self.do_hpssarch = _base.get('HPSSARCH', False) @@ -170,15 +172,25 @@ def _cycled_configs(self): Returns the config_files that are involved in the cycled app """ - configs = ['prep', - 'anal', 'sfcanl', 'analdiag', 'analcalc', - 'fcst', 'post', 'vrfy', 'arch'] + configs = ['prep'] + if self.do_jedivar: + configs += ['atmanalprep', 'atmanalrun', 'atmanalpost'] + else: + configs += ['anal', 'analdiag'] + + configs += ['sfcanl', 'analcalc', 'fcst', 'post', 'vrfy', 'arch'] + + if self.do_gldas: configs += ['gldas'] if self.do_hybvar: - configs += ['eobs', 'eomg', 'ediag', 'eupd', 'ecen', 'esfc', 'efcs', 'echgres', 'epos', 'earc'] + if self.do_jediens: + configs += ['atmensanalprep', 'atmensanalrun', 'atmensanalpost'] + else: + configs += ['eobs', 'eomg', 'ediag', 'eupd'] + configs += ['ecen', 'esfc', 'efcs', 'echgres', 'epos', 'earc'] if self.do_metp: configs += ['metp'] @@ -319,10 +331,17 @@ def _get_cycled_task_names(self): This is the place where that order is set. """ - gdas_gfs_common_tasks_before_fcst = ['prep', 'anal', 'sfcanl', 'analcalc'] + gdas_gfs_common_tasks_before_fcst = ['prep'] gdas_gfs_common_tasks_after_fcst = ['post', 'vrfy'] gdas_gfs_common_cleanup_tasks = ['arch'] + if self.do_jedivar: + gdas_gfs_common_tasks_before_fcst += ['atmanalprep', 'atmanalrun', 'atmanalpost'] + else: + gdas_gfs_common_tasks_before_fcst += ['anal'] + + gdas_gfs_common_tasks_before_fcst += ['sfcanl', 'analcalc'] + gldas_tasks = ['gldas'] wave_prep_tasks = ['waveinit', 'waveprep'] wave_bndpnt_tasks = ['wavepostbndpnt', 'wavepostbndpntbll'] @@ -331,12 +350,17 @@ def _get_cycled_task_names(self): hybrid_gdas_or_gfs_tasks = [] hybrid_gdas_tasks = [] if self.do_hybvar: - hybrid_gdas_or_gfs_tasks += ['eobs', 'eupd', 'echgres'] - hybrid_gdas_or_gfs_tasks += ['ediag'] if self.lobsdiag_forenkf else ['eomg'] + if self.do_jediens: + hybrid_gdas_or_gfs_tasks += ['atmensanalprep', 'atmensanalrun', 'atmensanalpost', 'echgres'] + else: + hybrid_gdas_or_gfs_tasks += ['eobs', 'eupd', 'echgres'] + hybrid_gdas_or_gfs_tasks += ['ediag'] if self.lobsdiag_forenkf else ['eomg'] hybrid_gdas_tasks += ['ecen', 'esfc', 'efcs', 'epos', 'earc'] # Collect all "gdas" cycle tasks - gdas_tasks = gdas_gfs_common_tasks_before_fcst + ['analdiag'] + gdas_tasks = gdas_gfs_common_tasks_before_fcst.copy() + if not self.do_jedivar: + gdas_tasks += ['analdiag'] if self.do_gldas: gdas_tasks += gldas_tasks diff --git a/workflow/rocoto/workflow_tasks.py b/workflow/rocoto/workflow_tasks.py index 8e356a2a52..ea90ee37aa 100644 --- a/workflow/rocoto/workflow_tasks.py +++ b/workflow/rocoto/workflow_tasks.py @@ -12,8 +12,10 @@ class Tasks: SERVICE_TASKS = ['arch', 'earc', 'getic'] VALID_TASKS = ['aerosol_init', 'coupled_ic', 'getic', 'init', 'prep', 'anal', 'sfcanl', 'analcalc', 'analdiag', 'gldas', 'arch', + 'atmanalprep', 'atmanalrun', 'atmanalpost', 'earc', 'ecen', 'echgres', 'ediag', 'efcs', 'eobs', 'eomg', 'epos', 'esfc', 'eupd', + 'atmensanalprep', 'atmensanalrun', 'atmensanalpost', 'fcst', 'post', 'ocnpost', 'vrfy', 'metp', 'postsnd', 'awips', 'gempak', 'wafs', 'wafsblending', 'wafsblending0p25', @@ -336,7 +338,10 @@ def anal(self): def sfcanl(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.cdump}anal'} + if self.app_config.do_jedivar: + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanalrun'} + else: + dep_dict = {'type': 'task', 'name': f'{self.cdump}anal'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep=deps) @@ -348,7 +353,10 @@ def sfcanl(self): def analcalc(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.cdump}anal'} + if self.app_config.do_jedivar: + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanalrun'} + else: + dep_dict = {'type': 'task', 'name': f'{self.cdump}anal'} deps.append(rocoto.add_dependency(dep_dict)) dep_dict = {'type': 'task', 'name': f'{self.cdump}sfcanl'} deps.append(rocoto.add_dependency(dep_dict)) @@ -376,6 +384,65 @@ def analdiag(self): return task + def atmanalprep(self): + + suffix = self._base["SUFFIX"] + dump_suffix = self._base["DUMP_SUFFIX"] + gfs_cyc = self._base["gfs_cyc"] + dmpdir = self._base["DMPDIR"] + do_gfs_enkf = True if self.app_config.do_hybvar and 'gfs' in self.app_config.eupd_cdumps else False + + deps = [] + dep_dict = {'type': 'metatask', 'name': 'gdaspost', 'offset': '-06:00:00'} + deps.append(rocoto.add_dependency(dep_dict)) + data = f'&ROTDIR;/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.atmf009{suffix}' + dep_dict = {'type': 'data', 'data': data, 'offset': '-06:00:00'} + deps.append(rocoto.add_dependency(dep_dict)) + data = f'{dmpdir}/{self.cdump}{dump_suffix}.@Y@m@d/@H/{self.cdump}.t@Hz.updated.status.tm00.bufr_d' + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + cycledef = self.cdump + if self.cdump in ['gfs'] and do_gfs_enkf and gfs_cyc != 4: + cycledef = 'gdas' + + resources = self.get_resource('atmanalprep') + task = create_wf_task('atmanalprep', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, + cycledef=cycledef) + return task + + def atmanalrun(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanalprep'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_hybvar: + dep_dict = {'type': 'metatask', 'name': 'gdasepmn', 'offset': '-06:00:00'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + else: + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('atmanalrun') + task = create_wf_task('atmanalrun', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def atmanalpost(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanalrun'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'cycleexist', 'offset': '-06:00:00'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('atmanalpost') + task = create_wf_task('atmanalpost', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + def gldas(self): deps = [] @@ -880,6 +947,61 @@ def eupd(self): return task + def atmensanalprep(self): + + suffix = self._base["SUFFIX"] + dump_suffix = self._base["DUMP_SUFFIX"] + gfs_cyc = self._base["gfs_cyc"] + dmpdir = self._base["DMPDIR"] + do_gfs_enkf = True if self.app_config.do_hybvar and 'gfs' in self.app_config.eupd_cdumps else False + + deps = [] + dep_dict = {'type': 'metatask', 'name': 'gdaspost', 'offset': '-06:00:00'} + deps.append(rocoto.add_dependency(dep_dict)) + data = f'&ROTDIR;/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.atmf009{suffix}' + dep_dict = {'type': 'data', 'data': data, 'offset': '-06:00:00'} + deps.append(rocoto.add_dependency(dep_dict)) + data = f'{dmpdir}/{self.cdump}{dump_suffix}.@Y@m@d/@H/{self.cdump}.t@Hz.updated.status.tm00.bufr_d' + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + cycledef = self.cdump + if self.cdump in ['gfs'] and do_gfs_enkf and gfs_cyc != 4: + cycledef = 'gdas' + + resources = self.get_resource('atmensanalprep') + task = create_wf_task('atmensanalprep', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, + cycledef=cycledef) + + return task + + def atmensanalrun(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanalprep'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'metatask', 'name': 'gdasepmn', 'offset': '-06:00:00'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('atmensanalrun') + task = create_wf_task('atmensanalrun', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def atmensanalpost(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanalrun'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('atmensanalpost') + task = create_wf_task('atmensanalpost', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + def ecen(self): self._is_this_a_gdas_task(self.cdump, 'ecen') @@ -912,7 +1034,10 @@ def _get_ecengroups(): deps = [] dep_dict = {'type': 'task', 'name': f'{self.cdump}analcalc'} deps.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type': 'task', 'name': f'{eupd_cdump}eupd'} + if self.app_config.do_jediens: + dep_dict = {'type': 'task', 'name': f'{eupd_cdump}atmensanalrun'} + else: + dep_dict = {'type': 'task', 'name': f'{eupd_cdump}eupd'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) @@ -940,7 +1065,10 @@ def esfc(self): deps = [] dep_dict = {'type': 'task', 'name': f'{self.cdump}analcalc'} deps.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type': 'task', 'name': f'{eupd_cdump}eupd'} + if self.app_config.do_jediens: + dep_dict = {'type': 'task', 'name': f'{eupd_cdump}atmensanalrun'} + else: + dep_dict = {'type': 'task', 'name': f'{eupd_cdump}eupd'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) From 2cad536551180d25bfcfc2b5d35fe1089de7f3c3 Mon Sep 17 00:00:00 2001 From: Kate Friedman Date: Tue, 26 Jul 2022 13:25:37 -0400 Subject: [PATCH 04/16] WCOSS2 gempak ush scripts updates and cleanup of old release notes (#920) * WCOSS2 updates to gempak ush scripts - Add /gempak subfolder where needed in gempak ush scripts. - Remove unneeded commented out path settings from older iterations. * Removing older release notes - Cleaning out older GFS version release notes; includes current GFSv16.2.1 release notes, will commit GFSv16.3 release notes with implementation this fall. - Will then keep only the latest release notes moving forward. Refs: #419 --- docs/Release_Notes.gfs.v15.2.0.txt | 261 ----------- docs/Release_Notes.gfs.v15.2.2.txt | 269 ------------ docs/Release_Notes.gfs.v16.0.0.md | 413 ------------------ docs/Release_Notes.gfs.v16.1.0.txt | 193 -------- docs/Release_Notes.gfs_downstream.v15.2.0.txt | 104 ----- docs/Release_Notes.gfs_downstream.v16.0.0.txt | 114 ----- gempak/ush/gdas_ukmet_meta_ver.sh | 3 +- gempak/ush/gfs_meta_comp.sh | 6 +- gempak/ush/gfs_meta_crb.sh | 5 +- gempak/ush/gfs_meta_hur.sh | 2 +- gempak/ush/gfs_meta_mar_comp.sh | 4 +- gempak/ush/gfs_meta_sa2.sh | 2 +- 12 files changed, 8 insertions(+), 1368 deletions(-) delete mode 100644 docs/Release_Notes.gfs.v15.2.0.txt delete mode 100644 docs/Release_Notes.gfs.v15.2.2.txt delete mode 100644 docs/Release_Notes.gfs.v16.0.0.md delete mode 100644 docs/Release_Notes.gfs.v16.1.0.txt delete mode 100644 docs/Release_Notes.gfs_downstream.v15.2.0.txt delete mode 100644 docs/Release_Notes.gfs_downstream.v16.0.0.txt diff --git a/docs/Release_Notes.gfs.v15.2.0.txt b/docs/Release_Notes.gfs.v15.2.0.txt deleted file mode 100644 index 4f3cbcddd9..0000000000 --- a/docs/Release_Notes.gfs.v15.2.0.txt +++ /dev/null @@ -1,261 +0,0 @@ -GFS v15.2.0 RELEASE NOTES - - -PRELUDE (taken from GFS v15.2.0 SCN) - - GFS version 15.1 was implemented into operation at the 12Z cycle on June 12, 2019. It was the first - GFS implementation with the finite­ volume cubed-sphere (FV3) dynamical core as the Weather Service’s - Next Generation Global Prediction System (NGGPS). - - GFS version 15.2 is a minor upgrade. The major change to the system is to ingest new and replacement - satellite observations for data assimilation. It also contains a few other minor upgrades and bug fixes. - - 1) Assimilate new satellite observations - * GOES-17 AMVs - * GOES-17 has already replaced GOES-15 as the operational GOES-West satellite. Distribution of - GOES-15 products is expected to cease around December 2019 and active assimilation of GOES-17 - AMVs is required to prevent a gap in data coverage. - * Metop-C AMSU and MHS - * Metop-C is now the primary satellite in the 9:30 AM orbit. Assimilation of these data provide - additional impact as well as adding robustness to the system. - * KOMPSAT-5 (GPS-RO) - * Provides additional robustness to the system. - * Addition changes are made to VIIRS AMV ingest code to allow continued use after an expected change - to the BUFR format. - - 2) Assimilate buoyb sea-surface temperature (SST) data - TAC2BUFR changes in the buoy network resulted in a reduction of available SST measurements from buoys - to 10% or less of expected levels. Obsproc and GSI changes were required to restore data counts to - previous levels. - - 3) New product: Graphical Turbulence Guidance (GTG) - Global Turbulence product generated using NCAR GTG algorithm will start being disseminated on NOMADS. - The product will be available 3 hourly from F06 to F36 and horizontal resolution will be a quarter - degree. gfs.t{CC}Z.gtg.0p25.grb2f{HHH} - - 4) Update the Unified Post Processor(UPP) to address a mask issue of several land surface fields over - water bodies in grib2 products. - - This update will make GFS.v15.2 p-grid products to be more consistent with GFS.v14 products, and - remove spurious soil moisture along coastlines. These land surface fields include Soil Moisture, - Soil Temperature, Liquid Volumetric Soil Moisture, WEASD, Snow Depth, Water Runoff, GFLUX Ground - Heat Flux, WILT Wilting Point, and FLDCP Field Capacity. - - Affected product files are: - gfs.t{CC}Z.pgrb2.0p25.F{HHH} - gfs.t{CC}Z.pgrb2b.0p25.g{HHH} - gfs.t{CC}Z.pgrb2.0p50.F{HHH} - gfs.t{CC}Z.pgrb2b.0p50.g{HHH} - gfs.t{CC}Z.pgrb2.1p00.F{HHH} - gfs.t{CC}Z.pgrb2b.1p00.g{HHH} - gfs.t{CC}Z.sfluxgrbf{HHH}.grib2 - Where CC is cycle for 00, 06, 12, 18 UTC, and HHH is forecast hour. - - 5) The vessel icing program uses OISST as input. OISST will not be ported from WCOSS Phase 1 to Phase 3 - after Phase 1 is decommissioned in 2020. A decision was made to move the vessel icing program - within the Unified Post Processor(UPP) and use GFS forecast skin temperature as input. Current vessel - icing product in operation (sice.tCCz.siceg) has a 1-deg resolution and is disseminated to the public at - - ftp://ftp.ncep.noaa.gov/pub/data/nccf/com/omb/prod/sice.yyyymmdd/ - - New vessel icing product will be included as a variable (ICEG) in GFS p-grid products gfs.tCCz.pgrb2.xpxx.fxxx - and gfs.tCCz.pgrb2b.xpxx.fxxx at 0.25, 0.5, and 1.0-deg resolutions, and be disseminated to the public at - - ftp://ftp.ncep.noaa.gov/pub/data/nccf/com/gfs/prod/gfs.yyyymmdd/ - - 6) Added three stations to station time series bufr soundings: - - 006011 62.02N 6.76W TOR 00 Thorshvan, Denmark 54 Usr Rqst 4-19 - 999913 15.51S 128.15E WYN 00 Wyndham aerodrome Australia 4 Usr Rqst 1-19 - 999914 57.48N 7.36W EGPL 00 Benbecula, Scotland, UK 6 Usr Rqst 1-19 - - The affected output files are: - gfs_yyyymmddhh.sfc - gfs_yyyymmddhh.snd - gfs.tCCz.bufrsnd.tar.gz - - Three additional files for the stations: - bufr.006011.yyyymmddhh - bufr.999913.yyyymmddhh - bufr.999914.yyyymmddhh - - 7) Reduction of water temperature biases in small lakes. - For small lakes adequate observations do not always exit to support the analysis of lake surface - temperature, often leading to significant departures from both the climatology and real-time observation. - Two changes were introduced to ensure that lake temperatures do not deviate from the climatology when - observations are not available. The first change is to replace a surface mask file at 0.5-degree - resolution with the one on the T1534 Gaussian grid (~13km) to prevent unrealistic SST climatology - from being used for updating the background of the near sea-surface temperature analysis over small - water bodies, such as those in the Great Salt Lake. The second change is to reduce the relaxation - time scale of the SST to climatology in GDAS forecast step from 90 days to 10 days. - - 8) Changes to NOAAPORT/SBN - Product Removals - * GADS FAX product which contains tropical surface analysis in TIF format with G4 compression. - - - -IMPLEMENTATION INSTRUCTIONS - - * NOAA Vlab GIT is used to manage GFS.v15.2 code. The SPA(s) handling the GFS.v15.2 implementation need to have - permission to clone Vlab gerrit repositories. So far Wojciech Cencek has been given access to all GFS.v15.2 - related git repositories. Please contact Kate.Friedman@noaa.gov or Hang.Lei@noaa.gov if there is any VLAB - access issue and/or the individual code managers listed under item #6) below. Please follow the following - steps to install the package on WCOSS DELL - - 1) cd $NWROOTp3 - 2) mkdir gfs.v15.2.0 - 3) cd gfs.v15.2.0 - 4) git clone --recursive gerrit:global-workflow . - 5) git checkout feature/dev-v15.2 - 6) cd sorc - 7) ./checkout.sh - This script extracts the following GFS components from gerrit - MODEL -- tag nemsfv3_gfsv15.2.1 Jun.Wang@noaa.gov - GSI -- tag fv3da_gfs.v15.2.0 Russ.Treadon@noaa.gov - UPP -- tag ncep_post_gtg.v1.1.4 Wen.Meng@noaa.gov - WAFS -- tag gfs_wafs.v5.0.9 Yali.Mao@noaa.gov - - - 8) ./build_all.sh - *This script compiles all GFS components. Runtime output from the build for each package is written - to log files in directory logs. To build an individual program, for instance, gsi, use build_gsi.sh. - - 9) ./link_fv3gfs.sh nco dell - - * Note: 1) ecflow suite definition and scripts are saved in gfs.v15.2.0/ecflow/ecf - 2) ncep_post_gtg.v1.1.4 contains restricted GTG (Graphic Turbulence Guidance) code provided by - NCAR. Please do not post the GTG code in any public domain. - - - - -JOB CHANGES - - * See docs/Release_Notes.gfs_downstream.v15.2.0.txt - - -SORC CHANGES - - * sorc/ - * checkout.sh - update to check out the following tags - * NEMSfv3gfs nemsfv3_gfsv15.2.1 - * ProdGSI fv3da_gfsv15.2.0 - * EMC_post_gtg ncep_post_gtg.v1.1.4 - * EMC_gfs_wafs gfs_wafs.v5.0.9 - * sorc/global_chgres.fd/ - * sfcsub.F - bug fix. set logical variable (lmask) to be false for a land surface variable - * sorc/global_cycle.fd/ - * sfcsub.F - bug fix. set logical variable (lmask) to be false for a land surface variable - - -SCRIPT CHANGES - - * scripts/ - * run_gfsmos_master.sh.cray - remove reference to COMROOTp1 - * run_gfsmos_master.sh.dell - remove reference to COMROOTp1 - * additional script changes documented in docs/Release_Notes.gfs_downstream.v15.2.0.txt - - -PARM/CONFIG CHANGES - - * parm/ - * Two files are modified to set a - * parm/config/config.base.emc.dyn - set 10 day relaxaion time scale to SST climatology in GDAS forecast - * parm/config/config.base.nco.static - set 10 day relaxaion time scale to SST climatology in GDAS forecast - - * Two files were modified for adding three bufr stations: - * parm/product/bufr_stalist.meteo.gfs - * parm/product/bufr_stalist.meteo.gfs3 - - -FIX CHANGES - - * Files in fix/fix_gsi altered by GFS DA v15.2. See GFS DA v15.2 release notes - (sorc/gsi.fd/doc/Release_Notes.gfs_da.v15.2.0.txt) for details - - -PRODUCT CHANGES - - * see SCN - - -RESOURCE INFORMATION - - * Frequency of run - * No change from GFS v15.1 - - * Commonly used libraries, compiler, and modules are defined in gfs.v15.2.0/modulefiles. For nemsfv3gfs, gsi, upp, wafs - they maintain their own module files under gfs.v15.2.0/sorc/(fv3gfs gsi gfs_post global_wafs).fd/modulefiles - * GSI updated to use bufr/11.3.0 - - * Data retention time under $COMROOTp3 for GFS.v15.2 should be the same as GFS.v15.1. - - * Disk space: - * About 4 Gb more per gdas cycle due to additional observation data in gdas and enkf diagnostic files - - * Computational resources and run times: - * Due to processing additional observation data the runtime for the following jobs increases with - respect to GFS v15.1 as noted below - * gfs_analysis : about 30 seconds longer (27.4 minutes for GFS v15.1 -vs- 27.9 minutes for GFS v15.2) - * gdas_enkf_select_obs : about 1 minute longer (3.7 for GFS v15.1 -vs- 4.7 for GFS v15.2) - * gdas_enkf_innovate_obs_grp*: about 30 seconds longer (14.8 for GFS v15.1 -vs - 15.3 for GFS v15.2) - * gdas_enkf_update : about 20 seconds longer (6.4 for GFS v15.1 -vs- 6.7 for GFS v15.2) - - - -PRE-IMPLEMENTATION TESTING REQUIREMENTS - - * Which production jobs should be tested as part of this implementation? - * The entire GFS v15.2 package needs to be installed and tested. EMC can run the same date - and compare NCO and EMC output to confirm the EMC and NCO tests reproduce each other - - * Does this change require a 30-day evaluation? - * No. - - - * Suggested evaluators - * Please contact the following EMC staff for the indicated components - Fanglin.Yang@noaa.gov - MODEL - Russ.Treadon@noaa.gov - DA - Wen.Meng@noaa.gov - UPP - Yali.Mao@noaa.gov - WAFS - Boi.Vuong@noaa.gov - downstream products - - -DISSEMINATION INFORMATION - - * Where should this output be sent? - * No change from GFS v15.1 - - * Who are the users? - * No change from GFS v15.1 - - * Which output files should be transferred from PROD WCOSS to DEV WCOSS? - * No change from GFS v15.1 - - * Directory changes - * No change from GFS v15.1 - - * File changes. - * See SCN - - -HPSS ARCHIVE - - No change from GFS v15.1 - - - -JOB DEPENDENCIES & FLOW DIAGRAM - * No change from GFS v15.1 - - -=========== -Prepared by -Fanglin.Yang@noaa -Russ.Treadon@noaa.gov -Boi.Vuong@noaa.gov -Wen.Meng@noaa.gov - - diff --git a/docs/Release_Notes.gfs.v15.2.2.txt b/docs/Release_Notes.gfs.v15.2.2.txt deleted file mode 100644 index c1978fcf23..0000000000 --- a/docs/Release_Notes.gfs.v15.2.2.txt +++ /dev/null @@ -1,269 +0,0 @@ -GFS v15.2.2 - updated by SPA on 11/13/2019 - -Fixed missing gempak pathes in GFS_GEMPAK_NCDC_UPAPGIF job that caused the black/white background switch in the Fax chart. - -Change: -jobs/JGFS_GEMPAK_NCDC_UPAPGIF - - -GFS v15.2.0 RELEASE NOTES - - -PRELUDE (taken from GFS v15.2.0 SCN) - - GFS version 15.1 was implemented into operation at the 12Z cycle on June 12, 2019. It was the first - GFS implementation with the finite­ volume cubed-sphere (FV3) dynamical core as the Weather Service’s - Next Generation Global Prediction System (NGGPS). - - GFS version 15.2 is a minor upgrade. The major change to the system is to ingest new and replacement - satellite observations for data assimilation. It also contains a few other minor upgrades and bug fixes. - - 1) Assimilate new satellite observations - * GOES-17 AMVs - * GOES-17 has already replaced GOES-15 as the operational GOES-West satellite. Distribution of - GOES-15 products is expected to cease around December 2019 and active assimilation of GOES-17 - AMVs is required to prevent a gap in data coverage. - * Metop-C AMSU and MHS - * Metop-C is now the primary satellite in the 9:30 AM orbit. Assimilation of these data provide - additional impact as well as adding robustness to the system. - * KOMPSAT-5 (GPS-RO) - * Provides additional robustness to the system. - * Addition changes are made to VIIRS AMV ingest code to allow continued use after an expected change - to the BUFR format. - - 2) Assimilate buoyb sea-surface temperature (SST) data - TAC2BUFR changes in the buoy network resulted in a reduction of available SST measurements from buoys - to 10% or less of expected levels. Obsproc and GSI changes were required to restore data counts to - previous levels. - - 3) New product: Graphical Turbulence Guidance (GTG) - Global Turbulence product generated using NCAR GTG algorithm will start being disseminated on NOMADS. - The product will be available 3 hourly from F06 to F36 and horizontal resolution will be a quarter - degree. gfs.t{CC}Z.gtg.0p25.grb2f{HHH} - - 4) Update the Unified Post Processor(UPP) to address a mask issue of several land surface fields over - water bodies in grib2 products. - - This update will make GFS.v15.2 p-grid products to be more consistent with GFS.v14 products, and - remove spurious soil moisture along coastlines. These land surface fields include Soil Moisture, - Soil Temperature, Liquid Volumetric Soil Moisture, WEASD, Snow Depth, Water Runoff, GFLUX Ground - Heat Flux, WILT Wilting Point, and FLDCP Field Capacity. - - Affected product files are: - gfs.t{CC}Z.pgrb2.0p25.F{HHH} - gfs.t{CC}Z.pgrb2b.0p25.g{HHH} - gfs.t{CC}Z.pgrb2.0p50.F{HHH} - gfs.t{CC}Z.pgrb2b.0p50.g{HHH} - gfs.t{CC}Z.pgrb2.1p00.F{HHH} - gfs.t{CC}Z.pgrb2b.1p00.g{HHH} - gfs.t{CC}Z.sfluxgrbf{HHH}.grib2 - Where CC is cycle for 00, 06, 12, 18 UTC, and HHH is forecast hour. - - 5) The vessel icing program uses OISST as input. OISST will not be ported from WCOSS Phase 1 to Phase 3 - after Phase 1 is decommissioned in 2020. A decision was made to move the vessel icing program - within the Unified Post Processor(UPP) and use GFS forecast skin temperature as input. Current vessel - icing product in operation (sice.tCCz.siceg) has a 1-deg resolution and is disseminated to the public at - - ftp://ftp.ncep.noaa.gov/pub/data/nccf/com/omb/prod/sice.yyyymmdd/ - - New vessel icing product will be included as a variable (ICEG) in GFS p-grid products gfs.tCCz.pgrb2.xpxx.fxxx - and gfs.tCCz.pgrb2b.xpxx.fxxx at 0.25, 0.5, and 1.0-deg resolutions, and be disseminated to the public at - - ftp://ftp.ncep.noaa.gov/pub/data/nccf/com/gfs/prod/gfs.yyyymmdd/ - - 6) Added three stations to station time series bufr soundings: - - 006011 62.02N 6.76W TOR 00 Thorshvan, Denmark 54 Usr Rqst 4-19 - 999913 15.51S 128.15E WYN 00 Wyndham aerodrome Australia 4 Usr Rqst 1-19 - 999914 57.48N 7.36W EGPL 00 Benbecula, Scotland, UK 6 Usr Rqst 1-19 - - The affected output files are: - gfs_yyyymmddhh.sfc - gfs_yyyymmddhh.snd - gfs.tCCz.bufrsnd.tar.gz - - Three additional files for the stations: - bufr.006011.yyyymmddhh - bufr.999913.yyyymmddhh - bufr.999914.yyyymmddhh - - 7) Reduction of water temperature biases in small lakes. - For small lakes adequate observations do not always exit to support the analysis of lake surface - temperature, often leading to significant departures from both the climatology and real-time observation. - Two changes were introduced to ensure that lake temperatures do not deviate from the climatology when - observations are not available. The first change is to replace a surface mask file at 0.5-degree - resolution with the one on the T1534 Gaussian grid (~13km) to prevent unrealistic SST climatology - from being used for updating the background of the near sea-surface temperature analysis over small - water bodies, such as those in the Great Salt Lake. The second change is to reduce the relaxation - time scale of the SST to climatology in GDAS forecast step from 90 days to 10 days. - - 8) Changes to NOAAPORT/SBN - Product Removals - * GADS FAX product which contains tropical surface analysis in TIF format with G4 compression. - - - -IMPLEMENTATION INSTRUCTIONS - - * NOAA Vlab GIT is used to manage GFS.v15.2 code. The SPA(s) handling the GFS.v15.2 implementation need to have - permission to clone Vlab gerrit repositories. So far Wojciech Cencek has been given access to all GFS.v15.2 - related git repositories. Please contact Kate.Friedman@noaa.gov or Hang.Lei@noaa.gov if there is any VLAB - access issue and/or the individual code managers listed under item #6) below. Please follow the following - steps to install the package on WCOSS DELL - - 1) cd $NWROOTp3 - 2) mkdir gfs.v15.2.0 - 3) cd gfs.v15.2.0 - 4) git clone --recursive gerrit:global-workflow . - 5) git checkout feature/dev-v15.2 - 6) cd sorc - 7) ./checkout.sh - This script extracts the following GFS components from gerrit - MODEL -- tag nemsfv3_gfsv15.2.1 Jun.Wang@noaa.gov - GSI -- tag fv3da_gfs.v15.2.0 Russ.Treadon@noaa.gov - UPP -- tag ncep_post_gtg.v1.1.4 Wen.Meng@noaa.gov - WAFS -- tag gfs_wafs.v5.0.9 Yali.Mao@noaa.gov - - - 8) ./build_all.sh - *This script compiles all GFS components. Runtime output from the build for each package is written - to log files in directory logs. To build an individual program, for instance, gsi, use build_gsi.sh. - - 9) ./link_fv3gfs.sh nco dell - - * Note: 1) ecflow suite definition and scripts are saved in gfs.v15.2.0/ecflow/ecf - 2) ncep_post_gtg.v1.1.4 contains restricted GTG (Graphic Turbulence Guidance) code provided by - NCAR. Please do not post the GTG code in any public domain. - - - - -JOB CHANGES - - * See docs/Release_Notes.gfs_downstream.v15.2.0.txt - - -SORC CHANGES - - * sorc/ - * checkout.sh - update to check out the following tags - * NEMSfv3gfs nemsfv3_gfsv15.2.1 - * ProdGSI fv3da_gfsv15.2.0 - * EMC_post_gtg ncep_post_gtg.v1.1.4 - * EMC_gfs_wafs gfs_wafs.v5.0.9 - * sorc/global_chgres.fd/ - * sfcsub.F - bug fix. set logical variable (lmask) to be false for a land surface variable - * sorc/global_cycle.fd/ - * sfcsub.F - bug fix. set logical variable (lmask) to be false for a land surface variable - - -SCRIPT CHANGES - - * scripts/ - * run_gfsmos_master.sh.cray - remove reference to COMROOTp1 - * run_gfsmos_master.sh.dell - remove reference to COMROOTp1 - * additional script changes documented in docs/Release_Notes.gfs_downstream.v15.2.0.txt - - -PARM/CONFIG CHANGES - - * parm/ - * Two files are modified to set a - * parm/config/config.base.emc.dyn - set 10 day relaxaion time scale to SST climatology in GDAS forecast - * parm/config/config.base.nco.static - set 10 day relaxaion time scale to SST climatology in GDAS forecast - - * Two files were modified for adding three bufr stations: - * parm/product/bufr_stalist.meteo.gfs - * parm/product/bufr_stalist.meteo.gfs3 - - -FIX CHANGES - - * Files in fix/fix_gsi altered by GFS DA v15.2. See GFS DA v15.2 release notes - (sorc/gsi.fd/doc/Release_Notes.gfs_da.v15.2.0.txt) for details - - -PRODUCT CHANGES - - * see SCN - - -RESOURCE INFORMATION - - * Frequency of run - * No change from GFS v15.1 - - * Commonly used libraries, compiler, and modules are defined in gfs.v15.2.0/modulefiles. For nemsfv3gfs, gsi, upp, wafs - they maintain their own module files under gfs.v15.2.0/sorc/(fv3gfs gsi gfs_post global_wafs).fd/modulefiles - * GSI updated to use bufr/11.3.0 - - * Data retention time under $COMROOTp3 for GFS.v15.2 should be the same as GFS.v15.1. - - * Disk space: - * About 4 Gb more per gdas cycle due to additional observation data in gdas and enkf diagnostic files - - * Computational resources and run times: - * Due to processing additional observation data the runtime for the following jobs increases with - respect to GFS v15.1 as noted below - * gfs_analysis : about 30 seconds longer (27.4 minutes for GFS v15.1 -vs- 27.9 minutes for GFS v15.2) - * gdas_enkf_select_obs : about 1 minute longer (3.7 for GFS v15.1 -vs- 4.7 for GFS v15.2) - * gdas_enkf_innovate_obs_grp*: about 30 seconds longer (14.8 for GFS v15.1 -vs - 15.3 for GFS v15.2) - * gdas_enkf_update : about 20 seconds longer (6.4 for GFS v15.1 -vs- 6.7 for GFS v15.2) - - - -PRE-IMPLEMENTATION TESTING REQUIREMENTS - - * Which production jobs should be tested as part of this implementation? - * The entire GFS v15.2 package needs to be installed and tested. EMC can run the same date - and compare NCO and EMC output to confirm the EMC and NCO tests reproduce each other - - * Does this change require a 30-day evaluation? - * No. - - - * Suggested evaluators - * Please contact the following EMC staff for the indicated components - Fanglin.Yang@noaa.gov - MODEL - Russ.Treadon@noaa.gov - DA - Wen.Meng@noaa.gov - UPP - Yali.Mao@noaa.gov - WAFS - Boi.Vuong@noaa.gov - downstream products - - -DISSEMINATION INFORMATION - - * Where should this output be sent? - * No change from GFS v15.1 - - * Who are the users? - * No change from GFS v15.1 - - * Which output files should be transferred from PROD WCOSS to DEV WCOSS? - * No change from GFS v15.1 - - * Directory changes - * No change from GFS v15.1 - - * File changes. - * See SCN - - -HPSS ARCHIVE - - No change from GFS v15.1 - - - -JOB DEPENDENCIES & FLOW DIAGRAM - * No change from GFS v15.1 - - -=========== -Prepared by -Fanglin.Yang@noaa -Russ.Treadon@noaa.gov -Boi.Vuong@noaa.gov -Wen.Meng@noaa.gov - - diff --git a/docs/Release_Notes.gfs.v16.0.0.md b/docs/Release_Notes.gfs.v16.0.0.md deleted file mode 100644 index 19ce4e5600..0000000000 --- a/docs/Release_Notes.gfs.v16.0.0.md +++ /dev/null @@ -1,413 +0,0 @@ -GFS RELEASE NOTES (GFS.v16.0.0) -- October 9, 2020 - -------- -PRELUDE -------- - -* GFS version 16.0 is the first major upgrade to Finite Volume Cubed Sphere (FV3) dynamical core based GFS which replaced the spectral dynamical core in June 2019. In this upgrade, the number of model vertical layers is increased from 64 to 127 and the model top is extended from the upper stratosphere (~55 km height) to the mesopause (~80 km height). With this upgrade, for the first time, the operational stand alone global deterministic WAVEWATCH III based wave model Multi_1 (wave_multi_1.v3.3) is merged into the GFS system. The WAVEWATCH III model is updated and coupled to the GFS using a one-way coupling scheme where the atmospheric model provides forcing to the wave model using the NOAA Environmental Modeling System (NEMS). Major changes have also been made in other components of the forecast system including model physics, data assimilation, system infrastructure, post-processing and product generation. - -EMC has conducted a set of retrospective and real-time experiments, covering part of the 2018 hurricane season and the entire period from May 10, 2019 to the present, for a comprehensive evaluation of the model upgrades. GFSv16 showed improved forecast skills in many areas. For more details please refer to the Science Change Notice: https://docs.google.com/document/d/1pDLqP6ne2grEJ2vMfw7RnkwyzRsGpGPMb1d2DeDuu2E/edit. - -* GFS.v16 has been reorganized to use a COMPONENT directory structure to separate the atmos and wave components. - -* This release note describes the overall changes made to the entire system. More details about changes in science and structure of the data assimilation system are documented in gfs.v16.0.0/sorc/gsi.fd/doc/Release_Notes.gfsda.v16.0.0.txt. Details about downstream product generation is documented in Release_Notes.gfs_downstream.v16.0.0.txt. - ---------------------------- -IMPLEMENTATION INSTRUCTIONS ---------------------------- - -* The NOAA VLab and both the NOAA-EMC and NCAR organization spaces on GitHub.com are used to manage the GFS.v16 code. The SPA(s) handling the GFS.v16 implementation need to have permissions to clone VLab gerrit repositories and the private NCAR UPP_GTG repository. All NOAA-EMC organization repositories are publicly readable and do not require access permissions. Please contact Fanglin.Yang@noaa.gov if there is any VLAB access issue and/or the individual code managers listed under item #6) below. Please follow the following steps to install the package on WCOSS-Dell: - - 1) cd $NWROOTp3 - 2) mkdir gfs.v16.0.0 - 3) cd gfs.v16.0.0 - 4) git clone -b EMC-v16.0.0 https://github.com/NOAA-EMC/global-workflow.git . - 5) cd sorc - 6) ./checkout.sh -o - * This script extracts the following GFS components: - MODEL tag GFS.v16.0.13 Jun.Wang@noaa.gov - GSI tag gfsda.v16.0.0 Russ.Treadon@noaa.gov - GLDAS tag gldas_gfsv16_release.v1.11.0 Helin.Wei@noaa.gov - UFS_UTIL tag ops-gfsv16.0.0 George.Gayno@noaa.gov - POST tag upp_gfsv16_release.v1.1.0 Wen.Meng@noaa.gov - WAFS tag gfs_wafs.v6.0.9 Yali.Mao@noaa.gov - - 7) ./build_all.sh - *This script compiles all GFS components. Runtime output from the build for each package is written to log files in directory logs. To build an individual program, for instance, gsi, use build_gsi.sh. - - 8) ./link_fv3gfs.sh nco dell - - 9) Please use the script /gpfs/dell6/emc/modeling/noscrub/emc.glopara/para_gfs/misc/copyic_v16rt2_nco.sh on Mars to copy initial conditions from EMC real-time parallel v16rt2 to $COM directory to start NCO’s parallel from the dump step of next cycle. Please remember to change the COMROOT setting in this script to /gpfs/dell1/nco/ops/com. It will rsync v16rt2 data from the dev machine to either dev or prod machine. - -Instruction notes: ------------------- - -* The GSI build script ($HOMEgfs/sorc/build_gsi.sh) must be executed prior to $HOMEgfs/sorc/build_enkf_chgres_recenter_nc.sh. This automatically happens when executing $HOMEgfs/sorc/build_all.sh to build all GFS v16 executables. - -* The RTOFS curfile*h variable settings must be updated in scripts/exgfs_wave_prep.sh when the RTOFS implementation occurs. The “_1hrly” and “_3hrly” text will be removed to update the filenames: - - Before RTOFS implementation (current settings): - - curfile1h=${COMIN_WAV_CUR}/rtofs_glo_2ds_${fext}${fh3_rtofs}_1hrly_prog.nc - curfile3h=${COMIN_WAV_CUR}/rtofs_glo_2ds_${fext}${fh3_rtofs}_3hrly_prog.nc - - After RTOFS implementation: - - curfile1h=${COMIN_WAV_CUR}/rtofs_glo_2ds_${fext}${fh3_rtofs}_prog.nc - curfile3h=${COMIN_WAV_CUR}/rtofs_glo_2ds_${fext}${fh3_rtofs}_prog.nc - -* ecflow suite definition and scripts are saved in gfs.v16.0.0/ecflow/ecf - -* POST contains restricted GTG (Graphic Turbulence Guidance) code provided NCAR. Please do not post the GTG code in any public domain. - ------------ -JOB CHANGES ------------ - -Many job scripts have been added, removed or renamed to meet NCO script naming conventions for GFS.v16. - -Renamed job scripts are: - -* JGDAS_ENKF_RECENTER -> JGDAS_ENKF_ECEN -* JGDAS_GEMPAK -> JGDAS_ATMOS_GEMPAK -* JGDAS_GEMPAK_META -> JGDAS_ATMOS_GEMPAK_META_NCDC -* JGDAS_VMINMON -> JGDAS_ATMOS_VMINMON -* JGDAS_VERFRAD -> JGDAS_ATMOS_VERFRAD -* JGDAS_VERFOZN -> JGDAS_ATMOS_VERFOZN -* JGFS_AWIPS_20KM_1P0DEG -> JGFS_ATMOS_AWIPS_20KM_1P0DEG -* JGFS_AWIPS_G2 -> JGFS_ATMOS_AWIPS_G2 -* JGFS_CYCLONE_GENESIS -> JGFS_ATMOS_CYCLONE_GENESIS -* JGFS_CYCLONE_TRACKER -> JGFS_ATMOS_CYCLONE_TRACKER -* JGFS_FBWIND -> JGFS_ATMOS_FBWIND -* JGFS_GEMPAK -> JGFS_ATMOS_GEMPAK -* JGFS_GEMPAK_META -> JGFS_ATMOS_GEMPAK_META -* JGFS_GEMPAK_NCDC_UPAPGIF -> JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF -* JGFS_GEMPAK_PGRB2_SPEC -> JGFS_ATMOS_GEMPAK_PGRB2_SPEC -* JGFS_PGRB2_SPEC_NPOESS -> JGFS_ATMOS_PGRB2_SPEC_NPOESS -* JGFS_POSTSND -> JGFS_ATMOS_POSTSND -* JGFS_VMINMON -> JGFS_ATMOS_VMINMON -* JGFS_WAFS -> JGFS_ATMOS_WAFS -* JGFS_WAFS_BLENDING -> JGFS_ATMOS_WAFS_BLENDING -* JGFS_WAFS_GCIP -> JGFS_ATMOS_WAFS_GCIP -* JGFS_WAFS_GRIB2 -> JGFS_ATMOS_WAFS_GRIB2 -* JGLOBAL_ANALYSIS -> JGLOBAL_ATMOS_ANALYSIS -* JGLOBAL_EMCSFC_SFC_PREP -> JGLOBAL_ATMOS_EMCSFC_SFC_PREP -* JGLOBAL_ENKF_SELECT_OBS -> JGDAS_ENKF_SELECT_OBS -* JGLOBAL_ENKF_UPDATE -> JGDAS_ENKF_UPDATE -* JGLOBAL_TROPCY_QC_RELOC -> JGLOBAL_ATMOS_TROPCY_QC_RELOC -* JGLOBAL_NCEPPOST -> JGLOBAL_ATMOS_NCEPPOST -* JGLOBAL_POST_MANAGER -> JGLOBAL_ATMOS_POST_MANAGER - -New job scripts are: - -* JGDAS_ATMOS_ANALYSIS_DIAG -* JGDAS_ATMOS_CHGRES_FORENKF -* JGDAS_ATMOS_GLDAS -* JGDAS_ENKF_DIAG -* JGDAS_ENKF_SFC -* JGFS_ATMOS_FSU_GENESIS -* JGFS_ATMOS_WAFS_GRIB2_0P25 -* JGFS_ATMOS_WAFS_BLENDING_0P25 -* JGLOBAL_ATMOS_ANALYSIS_CALC -* JGLOBAL_WAVE_GEMPAK -* JGLOBAL_WAVE_INIT -* JGLOBAL_WAVE_POST_BNDPNT -* JGLOBAL_WAVE_POST_PNT -* JGLOBAL_WAVE_POST_SBS -* JGLOBAL_WAVE_PRDGEN_BULLS -* JGLOBAL_WAVE_PRDGEN_GRIDDED -* JGLOBAL_WAVE_PREP - -Removed job scripts are: - -* JGDAS_BULLS_NAVY -* JGDAS_TROPC -* JGFS_FAX -* JGFS_FAX_WAFS -* JGLOBAL_ENKF_INNOVATE_OBS - --------------- -SCRIPT CHANGES --------------- - -Many scripts have been added, removed or renamed to meet NCO script naming conventions for GFS.v16. - -Renamed scripts are: - -* exemcsfc_global_sfc_prep.sh.ecf -> exemcsfc_global_sfc_prep.sh -* exgdas_nawips.sh.ecf -> exgdas_atmos_nawips.sh -* exgdas_nceppost.sh.ecf -> exgdas_atmos_nceppost.sh -* exgdas_vrfminmon.sh.ecf -> exgdas_atmos_vminmon.sh -* exgdas_vrfyrad.sh.ecf -> exgdas_atmos_verfrad.sh -* exgdas_vrfyozn.sh.ecf -> exgdas_atmos_verfozn.sh -* exgempak_gdas_gif_ncdc.sh.ecf -> exgdas_atmos_gempak_gif_ncdc.sh -* exgempak_gfs_gif_ncdc_skew_t.sh.ecf -> exgfs_atmos_gempak_gif_ncdc_skew_t.sh -* exgfs_awips_20km_1p0deg.sh.ecf -> exgfs_atmos_awips_20km_1p0deg.sh -* exgfs_fbwind.sh.ecf -> exgfs_atmos_fbwind.sh -* exgfs_gempak_meta.sh.ecf -> exgfs_atmos_gempak_meta.sh -* exgfs_grib_awips.sh.ecf -> exgfs_atmos_grib_awips.sh -* exgfs_nawips.sh.ecf -> exgfs_atmos_nawips.sh -* exgfs_nceppost.sh.ecf -> exgfs_atmos_nceppost.sh -* exgfs_pmgr.sh.ecf -> exgfs_pmgr.sh -* exgfs_postsnd.sh.ecf -> exgfs_atmos_postsnd.sh -* exgfs_prdgen_manager.sh.ecf -> exgfs_prdgen_manager.sh -* exgfs_vrfminmon.sh.ecf -> exgfs_atmos_vminmon.sh -* exgfs_wafs_blending.sh.ecf -> exgfs_atmos_wafs_blending.sh -* exgfs_wafs_gcip.sh.ecf -> exgfs_atmos_wafs_gcip.sh -* exgfs_wafs_grib.sh.ecf -> exgfs_atmos_wafs_grib.sh -* exgfs_wafs_grib2.sh.ecf -> exgfs_atmos_wafs_grib2.sh -* exglobal_analysis_fv3gfs.sh.ecf -> exglobal_atmos_analysis.sh -* exglobal_enkf_fcst_fv3gfs.sh.ecf -> exgdas_enkf_fcst.sh -* exglobal_enkf_recenter_fv3gfs.sh.ecf -> exgdas_enkf_ecen.sh -* exglobal_enkf_post_fv3gfs.sh.ecf -> exgdas_enkf_post.sh -* exglobal_enkf_update_fv3gfs.sh.ecf -> exgdas_enkf_update.sh -* exglobal_fcst_nemsfv3gfs.sh -> exglobal_forecast.sh -* exglobal_grib2_special_npoess.sh.ecf -> exgfs_atmos_grib2_special_npoess.sh -* exglobal_innovate_obs_fv3gfs.sh.ecf -> exgdas_enkf_select_obs.sh -* exglobal_pmgr.sh.ecf -> exglobal_atmos_pmgr.sh -* exgoes_nawips.sh.ecf -> exgfs_atmos_goes_nawips.sh -* exnawips.sh.ecf -> exgfs_atmos_nawips.sh -* extropcy_qc_reloc.sh.ecf -> exglobal_atmos_tropcy_qc_reloc.sh - -New scripts are: - -* exgdas_atmos_gldas.sh -* exgdas_enkf_sfc.sh -* exgfs_atmos_wafs_grib2_0p25.sh -* exgfs_atmos_wafs_blending_0p25.sh -* exgfs_wave_init.sh -* exgfs_wave_nawips.sh -* exgfs_wave_post_bndpnt.sh -* exgfs_wave_post_gridded_sbs.sh -* exgfs_wave_post_pnt.sh -* exgfs_wave_prdgen_bulls.sh -* exgfs_wave_prdgen_gridded.sh -* exgfs_wave_prep.sh -* exgdas_atmos_chgres_forenkf.sh -* exglobal_atmos_analysis_calc.sh -* exglobal_diag.sh - -Removed scripts are: - -* exgdas_bulls_navy.sh.ecf -* exgdas_tropc.sh.ecf -* exgfs_fax.sh.ecf -* exgfs_fax_wafs.sh.ecf -* exgfs_grib_awips_g2.sh.ecf -* exgfs_grib_wafs.sh.ecf - -------------------- -PARM/CONFIG CHANGES -------------------- - -All JJOBS except for those used by downstream product generation source config files under ./gfs.v16.0.0/parm/config to set up job-specific parameters. The config.base is sourced by all JJOBS to set parameters that are common to either all JJOBS or are shared by more than one JJOBS. The config.anal is shared by a few analysis steps, config.wave is shared by the wave steps, and config.wafs is shared by the WAFS jobs. Below are the parm (config) files modified or added in GFS.v16. - -Modified configs: - -* config.anal -* config.arch -* config.awips -* config.base.emc.dyn -* config.base.nco.static -* config.earc -* config.ecen -* config.efcs -* config.eobs -* config.epos -* config.eupd -* config.fcst -* config.fv3 -* config.fv3ic -* config.gempak -* config.post -* config.postsnd -* config.prep -* config.prepbufr -* config.resources -* config.vrfy - -New configs: - -* config.analcalc -* config.analdiag -* config.echgres -* config.ediag -* config.esfc -* config.gldas -* config.metp -* config.wafs -* config.wafsblending -* config.wafsblending0p25 -* config.wafsgcip -* config.wafsgrib2 -* config.wafsgrib20p25 -* config.wave -* config.waveawipsbulls -* config.waveawipsgridded -* config.wavegempak -* config.waveinit -* config.wavepostbndpnt -* config.wavepostpnt -* config.wavepostsbs -* config.waveprep - ------------ -FIX CHANGES ------------ - -* All fixed fields used by the system are placed under gfs.v16.0.0/fix, and further categorized based on the type of applications. During the NCO implementation process the fix_gsi and wafs fix files are copied from external repositories via sorc/checkout.sh and linked under /fix via sorc/link_fv3gfs.sh. All other fix files are copied from EMC's local archives via sorc/link_fv3gfs.sh: fix_am, fix_fv3_gmted2010, fix_gldas, fix_orog, fix_verif, fix_wave_gfs - -The entire package takes 165 GB disk space to install. This ./fix directory alone takes ~153G. - -New fix files: - -* fix_am - new solar constants, Thompson MP climatology, salinity climatology -* fix_fv3_gmted2010 - new fix_sfc subfolder -* fix_gldas - new folder with files for GLDAS package -* fix_orog - new global lake files -* fix_verif - new grid2obs files -* fix_wave_gfs - new folder with files for wave component - ---------------- -PRODUCT CHANGES ---------------- - -* Please refer to GFSv16 SCN: https://docs.google.com/document/d/1pDLqP6ne2grEJ2vMfw7RnkwyzRsGpGPMb1d2DeDuu2E/edit - --------------------- -RESOURCE INFORMATION --------------------- - -* Frequency of run - 6 hourly cycle (00, 06, 12, 18Z) - no change from current operations - -* Commonly used libraries, compiler, and modules are defined in gfs.v16.0.0/modulefiles. For FV3, GSI, GLDAS, UPP, WAFS they maintain their own module files under gfs.v16.0.0/sorc/(fv3gfs gsi gldas gfs_post gfs_wafs).fd/modulefiles - -* Data retention time under $COMROOTp3 for GFS.v16 should be the same as GFS.v15. - -* Disk space: The current operational GFS.v15 takes about 10.7 TB online COM disk space per cycle, while GFS.v16 will require about 8.0 TB per cycle. - -* Computational resources and run times: - - * Please refer to the following document for the details of node usage,threading, and walltimes set in ECFLOW job cards for all jobs: - https://docs.google.com/spreadsheets/d/1XAa5mDWLQJSMgyxhR8W7RRuENJN7koJN-rIHLkTgieo/edit#gid=0 - - * Please refer to the following document for the high watermark test results for the overall computational cost of the system: - https://docs.google.com/presentation/d/1aNi5doryHO_lNhtTq-jGzFh9Wi4Xu1Z5DNb921nhw74/edit#slide=id.ga069802256_0_377 - -* Information about the major steps and actual runtimes from EMC high watermark tests are listed below: - - * JGLOBAL_FORECAST (GFS) - * 484 nodes, 3388 tasks, ptile=7, 4 threads/task - * Runtime: 125 minutes - - * JGLOBAL_FORECAST (GDAS) - * 119 nodes, 833 tasks, ptile=7, 4 threads/task - * Runtime: 22 minutes - - * JGLOBAL_ATMOS_ANALYSIS (GFS) - * 250 nodes, 1000 tasks, ptile=4, 7 threads/task - * Runtime: 29 minutes - - * JGLOBAL_ATMOS_ANALYSIS (GDAS) - * 250 nodes, 1000 tasks, ptile=4, 7 threads/task - * Runtime: 38 minutes - - * JGDAS_ENKF_SELECT_OBS - * 120 nodes, 480 tasks, ptile=4, 7 threads/task - * Runtime: 3.8 minutes - - * JGDAS_ENKF_UPDATE - * 240 nodes, 960 tasks, ptile=4, 7 threads/task - * Runtime: 26 minutes - - * JGDAS_ENKF_ECEN - * 20 nodes, 80 tasks, ptile=4, 7 threads/task - * Runtime: 4.4 minutes per realization - * Concurrently run 3 realizations of JGDAS_ENKF_ECEN. Total node usage for 3 jobs x 20 nodes each = 60 nodes. - - * JGDAS_ENKF_FCST - * 15 nodes, 420 tasks, ptile=28, 1 threads/task - * Runtime: 29 minutes per realization - * Concurrently run 40 realizations of JGDAS_ENKF_FCST. Each job processes 2 EnKF - members. Total node usage for 40 jobs x 15 nodes each = 600 nodes - * 40 EnKF forecast groups for GFS.v16 is an increase from the 20 EnKF forecast groups - currently run in operations. - - * JGDAS_ENKF_POST - * 20 nodes, 80 tasks, ptile=4, 7 threads/task - * Runtime: 11 minutes per realization - * Concurrently run 7 realizations of JGDAS_ENKF_POST. 7 forecasts processed, one - per job. Total node usage for 7 jobs x 20 nodes each = 140 nodes. - ---------------------------------------- -PRE-IMPLEMENTATION TESTING REQUIREMENTS ---------------------------------------- - -* Which production jobs should be tested as part of this implementation? - * All components of this package need to be tested. EMC is running a real-time parallel using the same system. We will work with the SPA to provide initial conditions from this parallel to run the NCO parallel during the implementation process. We will compare results from EMC and NCO parallels to ensure they reproduce each other. - -* Does this change require a 30-day evaluation? - * Yes, the entire GFS.v16 package requires a 30-day evaluation - -* Suggested evaluators - * Please contact fanglin.yang@noaa.gov, russ.treadon@noaa.gov, and kate.friedman@noaa.gov for evaluation. - -------------------------- -DISSEMINATION INFORMATION -------------------------- - -* Where should this output be sent? - * Please refer to GFSv16 SCN. Additionally, we have sent a request to NCO Dataflow to start sending new files WAFS_0p25_blended_YYYYMMDDHHfFF.grib2 to AWC only. - -* Who are the users? - * same as current operations plus multi_1 users - -* Which output files should be transferred from PROD WCOSS to DEV WCOSS? - * Same as current operational gfs, plus wave products. As there are certain changes in product names and types, EMC will provide support for NCO dataflow team to finalize the list.The amount of data to be transferred also depends on NCO’s network bandwidth. - -* Directory changes - - * Add $COMPONENT subfolder to gfs, gdas, and enkf paths for atmospheric component underneath the $cyc folder: - - $COMROOTp3/gfs/prod/gfs.$PDY/$cyc/atmos - $COMROOTp3/gfs/prod/gdas.$PDY/$cyc/atmos - $COMROOTp3/gfs/prod/enkf.gdas.$PDY/$cyc/atmos/memXXX - - * Introduce wave model via $COMPONENT subfolder under gfs and gfs $cyc folders: - - $COMROOTp3/gfs/prod/gfs.$PDY/$cyc/wave - $COMROOTp3/gfs/prod/gdas.$PDY/$cyc/wave - -* File changes - - * The UPP(post) file changes can be referred to: - https://docs.google.com/spreadsheets/d/1I-nqfVO67qE3uHah1p9UNbBPgcStXptEj91MBucSTb4/edit?usp=sharing - ------------- -HPSS ARCHIVE ------------- - -Please refer to the following document for current operational GFS.v15 archives and the proposed archives for GFS.v16: -https://docs.google.com/spreadsheets/d/1KkyXa-ZyWCjKul_kijUM4241VBzAerMifMOShLy0crY/edit#gid=0 - -Please check WCOSS /gpfs/dell1/nco/ops/nwprod/runhistory.v2.3.2/parm/gfs to see a full list of GFS.v15 files archived in HPSS tape. - -------------------------------- -JOB DEPENDENCIES & FLOW DIAGRAM -------------------------------- - -GDAS and GFS flowchart (downstream jobs compressed): -https://docs.google.com/presentation/d/1grydJSn3LxNishdHOxwOQMyxkLsEzlIfj1PHiTUrAkE/edit#slide=id.g6ee6c85d17_0_0 - -=========== -Prepared by -Kate.Friedman@noaa.gov -Fanglin.Yang@noaa.gov -Russ.Treadon@noaa.gov -Jun.Wang@noaa.gov -Helin.Wei@noaa.gov -George.Gayno@noaa.gov -Wen.Meng@noaa.gov -Yali.Mao@noaa.gov -Jessica.Meixner@noaa.gov -=========== diff --git a/docs/Release_Notes.gfs.v16.1.0.txt b/docs/Release_Notes.gfs.v16.1.0.txt deleted file mode 100644 index 82960aee09..0000000000 --- a/docs/Release_Notes.gfs.v16.1.0.txt +++ /dev/null @@ -1,193 +0,0 @@ -GFS V16.1.0 RELEASE NOTES - - -PRELUDE - - NOAA awarded Delivery Order 2 (DO-2) of its commercial radio occultation (RO) - data purchase to GeoOptics on February 19, 2021. This purchase covers 1300 - occultations a day over a six month period with the data flow starting on - March 17, 2021. - - The quality of the GeoOptics occultations was examined during Delivery - Order 1 (DO-1) in December 2020 and January 2021. Forecast impact assessment - was conducted using half resolution parallels in the GFSv16 framework. - - Testing of the DO-2 data is being carried out using a real time full - resolution parallel, v161rt1. It was initially started in mid-February - to test reproducibility with the GFSv16 NCO parallel, then adjusted to reduce - computational footprint. Data from GeoOptics began to be assimilated as soon - as it was available on March 17. Configuration for the use of the commercial - data was informed by the results of DO-1 experimentation, with the quality - control and observation errors being treated as other RO data with the - exception of lowering the upper bound of data. - - This package also addresses several GFS v16 bugzilla tickets. GFS v16 - bugzilla tickets #1196 and #1205 are resolved in this package. The error - reported in bugzilla ticket #1206 has been documented to be compiler, not - code, specific. The DA aspect of GFS v16 bugzilla tickets #216, #1198, #1218, - #1221, and #1222 are also addressed by this package. None of the DA - bugzilla changes alter analysis results. - - GFSv16 introduced a bug where the sign of the layer height (delz) increment - was flipped. The included bugfix reverts the sign as originally intended. - This fix was tested in both a low resolution setting and with a full - resolution parallel, v16rt2c. Once v16rt2c was seen as viable, it also - began assimilating the GeoOptics data so both changes were tested in one - package. - - There are several wave related fixes being included in this update: - * Including the RW-NH* boundary points which are used in NHC's - offline NWPS system were removed from multi_1 to GFSv16, which - also requires a minor script bug fix. - * Fix the interpolation to the East Pacific wave grid to include - all of the American Samoa islands. - * Unmask the Gulf of California, Red Sea and Persian Gulf in the - global 0p25 interpolated wave output grid. - - -CHANGES TO RESOURCES AND FILE SIZES - - Impact of including RW-NHC points is: There should be no impact to the - forecast job timing, however the *wave.out_pnt.points* files in the - wave/rundata com directories will each increase in size by 6MB. The - JGLOBAL_WAVE_POST_BNDPNTBLL and JGLOBAL_WAVE_POST_BNDPNT will now take - approximately 20 minutes longer. The gfswave.tZZz.ibpbull_tar will - increase in size to be 78M, gfswave.tZZz.ibpcbull_tar will now be 42M - and the gfswave.tZZz.ibp_tar file will now be 11 GB. - - Impact of East Pacific and global 0p25 wave grid fixes: There are no - impacts in terms of resources or file size. - - -IMPLEMENTATION INSTRUCTIONS - - The NOAA VLab and both the NOAA-EMC and NCAR organization spaces on GitHub.com - are used to manage the GFS.v16.1 code. The SPA(s) handling the GFS.v16.1 - implementation need to have permissions to clone VLab gerrit repositories and - the private NCAR UPP_GTG repository. All NOAA-EMC organization repositories are - publicly readable and do not require access permissions. Please follow the - following steps to install the package on WCOSS-Dell: - - 1) cd $NWROOTp3 - - 2) mkdir gfs.v16.1.0 - - 3) cd gfs.v16.1.0 - - 4) git clone -b EMC-v16.1.0 https://github.com/NOAA-EMC/global-workflow.git . - - 5) cd sorc - - 6) ./checkout.sh -o - * This script extracts the following GFS components: - MODEL tag GFS.v16.0.16 Jun.Wang@noaa.gov - GSI tag gfsda.v16.1.0 Russ.Treadon@noaa.gov - GLDAS tag gldas_gfsv16_release.v1.12.0 Helin.Wei@noaa.gov - UFS_UTILS tag ops-gfsv16.0.0 George.Gayno@noaa.gov - POST tag upp_gfsv16_release.v1.1.3 Wen.Meng@noaa.gov - WAFS tag gfs_wafs.v6.0.21 Yali.Mao@noaa.gov - - 7) ./build_all.sh - * This script compiles all GFS components. Runtime output from the build for - each package is written to log files in directory logs. To build an - individual program, for instance, gsi, use build_gsi.sh. - - 8) ./link_fv3gfs.sh nco dell - - -SORC CHANGES - -* sorc/ - * checkout.sh will checkout the following changed model tags: - * MODEL; tag GFS.v16.0.16 - There are no changes in this tag - compared to operations. The tag number was updated from - tag GFS.v16.0.15 to incorporate the local changes made in operations. - - * GSI; tag gfsda.v16.1.0 - See release notes - doc/Release_Notes.gfsda.v16.1.0.txt in the GSI tag for details. - - * POST; tag upp_gfsv16_release.v1.1.3 - There are no changes in this tag - compared to operations. The tag number was updated from - tag upp_gfsv16_release.v1.1.1 to incorporate the local changes made - in operations. - - * WAFS; tag gfs_wafs.v6.0.21 - There are no changes in this tag compared to - operations. The tag number was updated from gfs_wafs.v6.0.17 to - incorporate the local changes made in operations. - - -JOBS CHANGES - -* No change from GFS v16.0 - - -PARM/CONFIG CHANGES - -* No change from GFS v16.0 - - -SCRIPT CHANGES - -* There is one bug fix for waves that is a script change in scripts/exgfs_wave_post_pnt.sh - to ensure all point output is created (for example for the RW-NH* points). - - -FIX CHANGES - -* The contents of fix/fix_gsi have been updated. See release notes - doc/Release_Notes.gfsda.v16.1.0.txt in the NOAA-EMC/GSI tag gfsda.v16.1.0 - for details. - -* For the wave fix to include the RW-NH-* boundary points for the wave - point output, the following two fix files can be copied to the - fix/fix_wave_gfs folder: - /gpfs/dell2/emc/modeling/noscrub/emc.glopara/git/fv3gfs/fix_wave_gfs_v16.1.1/wave_gfs.buoys - /gpfs/dell2/emc/modeling/noscrub/emc.glopara/git/fv3gfs/fix_wave_gfs_v16.1.1/wave_gfs.buoys.full - - -* For the wave fix to the East Pacific wave grid interpolation the following - fix file with fix can be copied to the fix/fix_wave_gfs folder: - /gpfs/dell2/emc/modeling/noscrub/emc.glopara/git/fv3gfs/fix_wave_gfs_v16.1.1/ep_10m_interp.inp.tmpl - -* For the wave fix to include the Gulf of California, Red Sea, and Persian Gulf - the following to fix files need to be copied to the fix/fix_wave_gfs folder: - /gpfs/dell2/emc/modeling/noscrub/emc.glopara/git/fv3gfs/fix_wave_gfs_v16.1.1/ww3_grid.inp.glo_15mxt - /gpfs/dell2/emc/modeling/noscrub/emc.glopara/git/fv3gfs/fix_wave_gfs_v16.1.1/WHTGRIDINT.bin.glo_15mxt - - -PRE-IMPLEMENTATION TESTING REQUIREMENTS - -* Which production jobs should be tested as part of this implementation? - * The entire GFS v16.1 package needs to be installed and tested. - -* Does this change require a 30-day evaluation? - * No. - - -DISSEMINATION INFORMATION - -* Where should this output be sent? - * No change from GFS v16.0 - -* Who are the users? - * No change from GFS v16.0 - -* Which output files should be transferred from PROD WCOSS to DEV WCOSS? - * No change from GFS v16.0 - -* Directory changes - * No change from GFS v16.0 - -* File changes - * No change from GFS v16.0 - - -HPSS ARCHIVE - -* No change from GFS v16.0 - - -JOB DEPENDENCIES AND FLOW DIAGRAM - -* No change from GFS v16.0 - diff --git a/docs/Release_Notes.gfs_downstream.v15.2.0.txt b/docs/Release_Notes.gfs_downstream.v15.2.0.txt deleted file mode 100644 index b1de8d6649..0000000000 --- a/docs/Release_Notes.gfs_downstream.v15.2.0.txt +++ /dev/null @@ -1,104 +0,0 @@ -RELEASE NOTES: GFS.v15.2.0 downstream products - released October 19, 2019 (tentative date) - -CODE CHANGES - No code change - -JOB CHANGES - The followig jobs (J-job) have been removed from GFS v15.2.0 - - JGFS_FAX (retired in June 12, 2019) - - JGFS_FAX_WAFS (retired in June 12, 2019) - - JGDAS_TROPC (NCO approved to remove in GFS V15.2.0) - - The remain GFS downstream jobs are below: - - JGDAS_GEMPAK - - JGDAS_GEMPAK_META_NCDC - - JGFS_AWIPS_G2 - - JGFS_FBWIND - - JGFS_GEMPAK - - JGFS_GEMPAK_META - - JGFS_PGRB2_SPEC_GEMPAK - - JGFS_AWIPS_20KM_1P0DEG - - JGFS_GEMPAK_NCDC_UPAPGIF - - JGFS_PGRB2_SPEC_NPOESS - - JGDAS_BULLS_NAVY - - All Job cards have same setting for testing on DELL with real-time GFS v15.2 data. - ( An example: JGFS_GEMPAK ) - # LSBATCH: User input - #BSUB -J gfs_gempak_00 - #BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gfs_gempak_00.o%J - #BSUB -q debug - #BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output - #BSUB -W 00:30 - #BSUB -P GFS-T2O - #BSUB -n 24 # 24 tasks - #BSUB -R span[ptile=12] # 12 task per node - #BSUB -R affinity[core(1):distribute=balance] # using 12 cores on node and bind to 1 - # core per task and distribute across sockets - - #################################### - ## Load the GRIB Utilities modules - #################################### - module load EnvVars/1.0.2 - module load ips/18.0.1.163 - module load CFP/2.0.1 - module load impi/18.0.1 - module load lsf/10.1 - module load prod_util/1.1.3 - module load prod_envir/1.0.3 - module load grib_util/1.1.0 - ########################################### - # Now set up GEMPAK/NTRANS environment - ########################################### - module load gempak/7.3.1 - module list - -FV3 GFS DRIVER: - All drivers are used to test GFS downtream jobs in gfs.v15.2.0/driver/product/run_*_dell.sh_xx where is xx is cycle - -The followig jobs, scripts, parm have been modified to meet NCO - EE2 implementation standards. -JOB CHANGES - JGDAS_BULLS_NAVY -removed dependencies for gempak on phase 1 - JGDAS_GEMPAK -removed dependencies for gempak on phase 1 - JGDAS_GEMPAK_META_NCDC - JGFS_AWIPS_20KM_1P0DEG - JGFS_AWIPS_G2 - JGFS_FBWIND - JGFS_GEMPAK -removed dependencies for gempak on phase 1 - JGFS_GEMPAK_META -removed dependencies for gempak on phase 1 - JGFS_GEMPAK_NCDC_UPAPGIF -removed dependencies for gempak on phase 1 - JGFS_PGRB2_SPEC_GEMPAK -removed dependencies for gempak on phase 1 - JGFS_PGRB2_SPEC_NPOESS - -SCRIPTS CHANGES - exgdas_bulls_navy.sh.ecf -removed dependencies for sstoi_grb on phase 1 - exgdas_nawips.sh.ecf -removed dependencies for gempak on phase 1 - exgempak_gdas_gif_ncdc.sh.ecf -removed dependencies for gempak on phase 1 - exgempak_gfs_gif_ncdc_skew_t.sh.ecf -removed dependencies for gempak on phase 1 - exgfs_awips_20km_1p0deg.sh.ecf - exgfs_fbwind.sh.ecf - exgfs_gempak_meta.sh.ecf -removed dependencies for gempak on phase 1 - exgfs_grib_awips_g2.sh.ecf - exgfs_grib_awips.sh.ecf - exgfs_nawips.sh.ecf -removed dependencies for gempak on phase 1 - exglobal_grib2_special_npoess.sh.ecf - exgoes_nawips.sh.ecf -removed dependencies for gempak on phase 1 - exnawips.sh.ecf -removed dependencies for gempak on phase 1 - -PARM CHANGES - No changes in parm cards for AWIPS products from GFS v15.1 - -USH CHANGES - mkbull_ntc.sh -removed dependencies for sstoi_grb on phase 1 - -GEMPAK CHANGES - -Adding the diretory dictionaries -removed dependencies for gempak on phase 1 - (Note: This directory comes from phase 1) - - All gempak's USH and FIX have been modified to remove all dependencies on Phase I - -ECFLOW CHANGES - -Removed GFS FAX, GFS FAX WAFS and GDAS_TROPC in ecflow suite definition and scripts - -=========== -Prepared by -Boi.Vuong@noaa.gov diff --git a/docs/Release_Notes.gfs_downstream.v16.0.0.txt b/docs/Release_Notes.gfs_downstream.v16.0.0.txt deleted file mode 100644 index 5ee6238b9d..0000000000 --- a/docs/Release_Notes.gfs_downstream.v16.0.0.txt +++ /dev/null @@ -1,114 +0,0 @@ -RELEASE NOTES: GFS.v16.0.0 downstream products - released March 31, 2020 (tentative date) - -CODE CHANGES - No code change - -AWIPS CHANGES - Removed field "5WAVH" in All parm crads for AWIPS 20km and 1.0 deg (UPP planed to remove in GFS v16.0) - -GEMPAK CHANGES - Removed simulated GOES 12/13 in GEMPAK and PGRB2 - Added simulated GOES 16/17 in GEMPAK and PGRB2 in LAT/LON 0.25 deg - -JOB CHANGES - - Removed JGDAS_BULLS_NAVY - to be retired in GFS V16.0 - - Removed JGDAS_TROPC - Rteired in GFS v15.2.0 - - The remain GFS downstream jobs are following: - - JGDAS_ATMOS_GEMPAK - - JGDAS_ATMOS_GEMPAK_META_NCDC - - JGFS_ATMOS_AWIPS_G2 - - JGFS_ATMOS_FBWIND - - JGFS_ATMOS_GEMPAK - - JGFS_ATMOS_GEMPAK_META - - JGFS_ATMOS_GEMPAK_PGRB2_SPEC - - JGFS_ATMOS_AWIPS_20KM_1P0DEG - - JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF - - JGFS_ATMOS_PGRB2_SPEC_NPOESS - -SCRIPTS CHANGES - The following script have been removed from GFS v16.0.0 - - exgfs_grib_awips_g2.sh.ecf (retired in June 12, 2019) - - exgdas_bulls_navy.sh.ecf (Plan to retire in GFSS v16.0.0) GDAS and NAVY bulletins - - Removed WINTEMV bulltin in script exgfs_atmos_fbwind.sh (WINTEMV bulletin plan to retire in GFS v16.0.0) - -PARM/wmo - - Removed parm in grid 160,161,213,254,225 -----> Retired in GFS v15.2.7 - --DRIVER - All Job cards have same setting for testing on DELL with real-time GFS v15.2 data. - ( An example: JGFS_ATMOS_GEMPAK ) - # LSBATCH: User input - #BSUB -J gfs_gempak_00 - #BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gfs_gempak_00.o%J - #BSUB -q debug - #BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output - #BSUB -W 00:30 - #BSUB -P GFS-DEV - #BSUB -n 24 # 24 tasks - #BSUB -R span[ptile=12] # 12 task per node - #BSUB -R affinity[core(1):distribute=balance] # using 12 cores on node and bind to 1 - # core per task and distribute across sockets - - #################################### - ## Load the GRIB Utilities modules - #################################### - module load EnvVars/1.0.2 - module load ips/18.0.1.163 - module load CFP/2.0.1 - module load impi/18.0.1 - module load lsf/10.1 - module load prod_util/1.1.4 - module load prod_envir/1.0.3 - module load grib_util/1.1.0 - ########################################### - # Now set up GEMPAK/NTRANS environment - ########################################### - module load gempak/7.3.3 - module list - -FV3 GFS DRIVER: - All drivers are used to test GFS downtream jobs in gfs.v16.0.0/driver/product/run_*_dell.sh_xx where is xx is cycle - -The followig jobs, scripts, parm have been modified to meet NCO - EE2 implementation standards. -JOB CHANGES (no changes) - JGDAS_ATMOS_GEMPAK - JGDAS_ATMOS_GEMPAK_META_NCDC - JGFS_ATMOS_AWIPS_G2 - JGFS_ATMOS_FBWIND - JGFS_ATMOS_GEMPAK - JGFS_ATMOS_GEMPAK_META - JGFS_ATMOS_GEMPAK_PGRB2_SPEC - JGFS_ATMOS_AWIPS_20KM_1P0DEG - JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF - JGFS_ATMOS_PGRB2_SPEC_NPOESS - -SCRIPTS CHANGES - exgdas_atmos_nawips.sh - exgdas_atmos_gempak_gif_ncdc.sh - exgfs_atmos_gempak_gif_ncdc_skew_t.sh - exgfs_atmos_awips_20km_1p0deg.sh - exgfs_atmos_fbwind.sh - exgfs_atmos_gempak_meta.sh - exgfs_atmos_grib_awips.sh - exgfs_atmos_nawips.sh - exgFS_atmos_grib2_special_npoess.sh - exgfs_atmos_goes_nawips.sh -removed dependencies for gempak on phase 1 - exgfs_atmos_nawips.sh -removed dependencies for gempak on phase 1 - -PARM CHANGES - Removed 5WAVH in parm cards for AWIPS products in GFS v16.0.0 - (NCEP POST stopped producing 5WAVH (plan to retire "5WAVH") in GFS v16.0.0 - -USH CHANGES - No changes - -GEMPAK CHANGES - - Added nagrib.tbl file in gempak's FIX directory - -ECFLOW CHANGES - -Removed GFS FAX, GFS FAX WAFS and GDAS_TROPC in ecflow suite definition and scripts - -=========== -Prepared by -Boi.Vuong@noaa.gov diff --git a/gempak/ush/gdas_ukmet_meta_ver.sh b/gempak/ush/gdas_ukmet_meta_ver.sh index dcc350ea74..845fa1cc6b 100755 --- a/gempak/ush/gdas_ukmet_meta_ver.sh +++ b/gempak/ush/gdas_ukmet_meta_ver.sh @@ -139,8 +139,7 @@ for area in $areas sdatenum=$sdate9 cyclenum=$cycle9 fi - # JY grid="$COMROOT/nawips/${envir}/ukmet.20${sdatenum}/ukmet_20${sdatenum}${cyclenum}${dgdattim}" - grid="${COMINukmet}.20${sdatenum}/ukmet_20${sdatenum}${cyclenum}${dgdattim}" + grid="${COMINukmet}.20${sdatenum}/gempak/ukmet_20${sdatenum}${cyclenum}${dgdattim}" # 500 MB HEIGHT METAFILE diff --git a/gempak/ush/gfs_meta_comp.sh b/gempak/ush/gfs_meta_comp.sh index 4e9a3d8820..9bd27c5736 100755 --- a/gempak/ush/gfs_meta_comp.sh +++ b/gempak/ush/gfs_meta_comp.sh @@ -217,8 +217,7 @@ export err=$?;err_chk done # COMPARE THE 1200 UTC GFS MODEL TO THE 0000 UTC UKMET MODEL grid="F-${MDL} | ${PDY2}/${cyc}00" - # JY export HPCUKMET=$COMROOT/nawips/prod/ukmet.${PDY} - export HPCUKMET=${COMINukmet}.${PDY} + export HPCUKMET=${COMINukmet}.${PDY}/gempak grid2="F-UKMETHPC | ${PDY2}/0000" # for gfsfhr in 00 12 24 36 48 60 84 108 for gfsfhr in 00 12 24 84 108 @@ -593,8 +592,7 @@ export err=$?;err_chk done # COMPARE THE 0000 UTC GFS MODEL TO THE 1200 UTC UKMET FROM YESTERDAY grid="F-${MDL} | ${PDY2}/${cyc}00" - #XXW export HPCUKMET=${MODEL}/ukmet.${PDYm1} - export HPCUKMET=${COMINukmet}.${PDYm1} + export HPCUKMET=${COMINukmet}.${PDYm1}/gempak grid2="F-UKMETHPC | ${PDY2m1}/1200" # for gfsfhr in 00 12 24 36 48 60 84 108 for gfsfhr in 00 12 24 84 108 diff --git a/gempak/ush/gfs_meta_crb.sh b/gempak/ush/gfs_meta_crb.sh index 4800578238..82fa7795e8 100755 --- a/gempak/ush/gfs_meta_crb.sh +++ b/gempak/ush/gfs_meta_crb.sh @@ -260,11 +260,8 @@ export err=$?;err_chk if [ ${cyc} -eq 00 ] ; then - # BV export MODEL=/com/nawips/prod - # JY export HPCECMWF=${MODEL}/ecmwf.${PDY} - # JY export HPCUKMET=${MODEL}/ukmet.${PDYm1} export HPCECMWF=${COMINecmwf}.${PDY}/gempak - export HPCUKMET=${COMINukmet}.${PDYm1} + export HPCUKMET=${COMINukmet}.${PDYm1}/gempak grid1="F-${MDL} | ${PDY2}/${cyc}00" grid2="${COMINecmwf}.${PDYm1}/gempak/ecmwf_glob_${PDYm1}12" grid3="F-UKMETHPC | ${PDY2m1}/1200" diff --git a/gempak/ush/gfs_meta_hur.sh b/gempak/ush/gfs_meta_hur.sh index 9590f07b3e..aed25d6d78 100755 --- a/gempak/ush/gfs_meta_hur.sh +++ b/gempak/ush/gfs_meta_hur.sh @@ -338,7 +338,7 @@ if [ ${cyc} -eq 00 ] ; then # JY export HPCECMWF=${MODEL}/ecmwf.${PDY} # JY export HPCUKMET=${MODEL}/ukmet.${PDY} export HPCECMWF=${COMINecmwf}.${PDY}/gempak - export HPCUKMET=${COMINukmet}.${PDY} + export HPCUKMET=${COMINukmet}.${PDY}/gempak grid1="F-${MDL} | ${PDY2}/${cyc}00" grid2="${COMINecmwf}.${PDYm1}/gempak/ecmwf_glob_${PDYm1}12" grid3="F-UKMETHPC | ${PDY2}/${cyc}00" diff --git a/gempak/ush/gfs_meta_mar_comp.sh b/gempak/ush/gfs_meta_mar_comp.sh index d7262814b5..a55fa3c642 100755 --- a/gempak/ush/gfs_meta_mar_comp.sh +++ b/gempak/ush/gfs_meta_mar_comp.sh @@ -181,7 +181,7 @@ export err=$?;err_chk done # COMPARE THE 1200 UTC GFS MODEL TO THE 0000 UTC UKMET MODEL grid="F-${MDL} | ${PDY2}/${cyc}00" - export HPCUKMET=${COMINukmet}.${PDY} + export HPCUKMET=${COMINukmet}.${PDY}/gempak grid2="F-UKMETHPC | ${PDY2}/0000" # for gfsfhr in 00 12 24 36 48 60 84 108 for gfsfhr in 00 12 24 84 108 @@ -534,7 +534,7 @@ export err=$?;err_chk done # COMPARE THE 0000 UTC GFS MODEL TO THE 1200 UTC UKMET FROM YESTERDAY grid="F-${MDL} | ${PDY2}/${cyc}00" - export HPCUKMET=${COMINukmet}.${PDYm1} + export HPCUKMET=${COMINukmet}.${PDYm1}/gempak grid2="F-UKMETHPC | ${PDY2m1}/1200" # for gfsfhr in 00 12 24 36 48 60 84 108 for gfsfhr in 00 12 24 84 108 diff --git a/gempak/ush/gfs_meta_sa2.sh b/gempak/ush/gfs_meta_sa2.sh index de538a249d..a566031030 100755 --- a/gempak/ush/gfs_meta_sa2.sh +++ b/gempak/ush/gfs_meta_sa2.sh @@ -303,7 +303,7 @@ do ukmetfhr=${gfsfhr} fi gfsfhr="F${gfsfhr}" - grid3="${COMINukmet}.${PDY}/ukmet_${PDY}00f${ukmetfhr}" + grid3="${COMINukmet}.${PDY}/gempak/ukmet_${PDY}00f${ukmetfhr}" $GEMEXE/gdplot2_nc << EOF25 \$MAPFIL = mepowo.gsf From 4eb296f7e82459b1d8188636ca3db60b5fa10091 Mon Sep 17 00:00:00 2001 From: RussTreadon-NOAA <26926959+RussTreadon-NOAA@users.noreply.github.com> Date: Tue, 26 Jul 2022 14:50:30 -0400 Subject: [PATCH 05/16] Optimize DA clone, build, and link (#931) The PR contains changes to optimize the DA clone, build, and link. Changes are made to `checkout.sh`, `build_all.sh`, and `link_workflow.sh` in the g-w `sorc/` directory. These changes are in g-w branch `feature/clone` Two arguments are added to `checkout.sh` to allow the user to specify which DA package to build the global workflow with. These options are - `-g`: clone from the [GSI](https://github.com/NOAA-EMC/GSI) repo and build the g-w for GSI-based DA - `-u`: clone from the [GDASApp](https://github.com/NOAA-EMC/GDASApp) repo and build the g-w for UFS-based DA If no option is specified, `checkout.sh` does not clone any DA and DA related repos. This is the default behavior of `checkout.sh`. (_DA related_ repos include [GLDAS](https://github.com/NOAA-EMC/GLDAS), [GSI-utils](https://github.com/NOAA-EMC/GSI-utils), and [GSI-Monitor](https://github.com/NOAA-EMC/GSI-Monitor).) `build_all.sh` is modified to detect which repos and have been cloned and to build accordingly. `link_workflow.sh` is modified to detect which directories are present and link/copy accordingly. Closes #930 --- sorc/build_all.sh | 52 +++++++++---- sorc/checkout.sh | 56 ++++++++++---- sorc/link_workflow.sh | 165 +++++++++++++++++++++++++++--------------- 3 files changed, 184 insertions(+), 89 deletions(-) diff --git a/sorc/build_all.sh b/sorc/build_all.sh index 10f8b630c9..198fd351e0 100755 --- a/sorc/build_all.sh +++ b/sorc/build_all.sh @@ -120,9 +120,10 @@ $Build_ufs_model && { } #------------------------------------ -# build GSI and EnKF +# build GSI and EnKF - optional checkout #------------------------------------ -$Build_gsi_enkf && { +if [ -d gsi_enkf.fd ]; then + $Build_gsi_enkf && { echo " .... Building gsi and enkf .... " ./build_gsi_enkf.sh $_ops_opt $_verbose_opt > $logs_dir/build_gsi_enkf.log 2>&1 rc=$? @@ -132,11 +133,15 @@ $Build_gsi_enkf && { fi ((err+=$rc)) } +else + echo " .... Skip building gsi and enkf .... " +fi #------------------------------------ # build gsi utilities #------------------------------------ -$Build_gsi_utils && { +if [ -d gsi_utils.fd ]; then + $Build_gsi_utils && { echo " .... Building gsi utilities .... " ./build_gsi_utils.sh $_ops_opt $_verbose_opt > $logs_dir/build_gsi_utils.log 2>&1 rc=$? @@ -146,25 +151,33 @@ $Build_gsi_utils && { fi ((err+=$rc)) } +else + echo " .... Skip building gsi utilities .... " +fi #------------------------------------ -# build gdas -#------------------------------------ -$Build_gdas && { - echo " .... Building GDASApp .... " - ./build_gdas.sh > $logs_dir/build_gdas.log 2>&1 - rc=$? - if [[ $rc -ne 0 ]] ; then - echo "Fatal error in building GDAS." - echo "The log file is in $logs_dir/build_gdas.log" - fi - ((err+=$rc)) +# build gdas - optional checkout +#------------------------------------ +if [ -d gdas.cd ]; then + $Build_gdas && { + echo " .... Building GDASApp .... " + ./build_gdas.sh $_verbose_opt > $logs_dir/build_gdas.log 2>&1 + rc=$? + if [[ $rc -ne 0 ]] ; then + echo "Fatal error in building GDASApp." + echo "The log file is in $logs_dir/build_gdas.log" + fi + ((err+=$rc)) } +else + echo " .... Skip building GDASApp .... " +fi #------------------------------------ # build gsi monitor #------------------------------------ -$Build_gsi_monitor && { +if [ -d gsi_monitor.fd ]; then + $Build_gsi_monitor && { echo " .... Building gsi monitor .... " ./build_gsi_monitor.sh $_ops_opt $_verbose_opt > $logs_dir/build_gsi_monitor.log 2>&1 rc=$? @@ -174,6 +187,9 @@ $Build_gsi_monitor && { fi ((err+=$rc)) } +else + echo " .... Skip building gsi monitor .... " +fi #------------------------------------ # build UPP @@ -206,7 +222,8 @@ $Build_ufs_utils && { #------------------------------------ # build gldas #------------------------------------ -$Build_gldas && { +if [ -d gldas.fd ]; then + $Build_gldas && { echo " .... Building gldas .... " ./build_gldas.sh $_verbose_opt > $logs_dir/build_gldas.log 2>&1 rc=$? @@ -216,6 +233,9 @@ $Build_gldas && { fi ((err+=$rc)) } +else + echo " .... Skip building gldas .... " +fi #------------------------------------ # build gfs_wafs - optional checkout diff --git a/sorc/checkout.sh b/sorc/checkout.sh index 9e1caf9022..fcf7235ae7 100755 --- a/sorc/checkout.sh +++ b/sorc/checkout.sh @@ -18,6 +18,10 @@ Usage: $BASH_SOURCE [-c][-h][-m ufs_hash][-o] Check out this UFS hash instead of the default -o: Check out operational-only code (GTG and WAFS) + -g: + Check out GSI for GSI-based DA + -u: + Check out GDASApp for UFS-based DA EOF exit 1 } @@ -56,7 +60,7 @@ function checkout() { fi cd "${topdir}" - if [[ -d "${dir}" && "${CLEAN:-NO}" == "YES" ]]; then + if [[ -d "${dir}" && $CLEAN == "YES" ]]; then echo "|-- Removing existing clone in ${dir}" rm -Rf "$dir" fi @@ -96,13 +100,30 @@ function checkout() { return 0 } -while getopts ":chm:o" option; do +# Set defaults for variables toggled by options +export CLEAN="NO" +CHECKOUT_GSI="NO" +CHECKOUT_GDAS="NO" +checkout_gtg="NO" +checkout_wafs="NO" +ufs_model_hash="b97375c" + +# Parse command line arguments +while getopts ":chgum:o" option; do case $option in c) echo "Recieved -c flag, will delete any existing directories and start clean" export CLEAN="YES" ;; + g) + echo "Receieved -g flag for optional checkout of GSI-based DA" + CHECKOUT_GSI="YES" + ;; h) usage;; + u) + echo "Received -u flag for optional checkout of UFS-based DA" + CHECKOUT_GDAS="YES" + ;; o) echo "Received -o flag for optional checkout of operational-only codes" checkout_gtg="YES" @@ -130,20 +151,29 @@ mkdir -p ${logdir} # The checkout version should always be a speciifc commit (hash or tag), not a branch errs=0 -checkout "ufs_model.fd" "https://github.com/ufs-community/ufs-weather-model" "${ufs_model_hash:-b97375c}" ; errs=$((errs + $?)) -checkout "gsi_enkf.fd" "https://github.com/NOAA-EMC/GSI.git" "67f5ab4" ; errs=$((errs + $?)) -checkout "gsi_utils.fd" "https://github.com/NOAA-EMC/GSI-Utils.git" "322cc7b" ; errs=$((errs + $?)) -checkout "gsi_monitor.fd" "https://github.com/NOAA-EMC/GSI-Monitor.git" "acf8870" ; errs=$((errs + $?)) -checkout "gdas.cd" "https://github.com/NOAA-EMC/GDASApp.git" "5952c9d" ; errs=$((errs + $?)) -checkout "gldas.fd" "https://github.com/NOAA-EMC/GLDAS.git" "fd8ba62" ; errs=$((errs + $?)) -checkout "ufs_utils.fd" "https://github.com/ufs-community/UFS_UTILS.git" "a2b0817" ; errs=$((errs + $?)) -checkout "verif-global.fd" "https://github.com/NOAA-EMC/EMC_verif-global.git" "c267780" ; errs=$((errs + $?)) - -if [[ "${checkout_wafs:-NO}" == "YES" ]]; then +checkout "ufs_model.fd" "https://github.com/ufs-community/ufs-weather-model" "${ufs_model_hash}"; errs=$((errs + $?)) +checkout "ufs_utils.fd" "https://github.com/ufs-community/UFS_UTILS.git" "a2b0817" ; errs=$((errs + $?)) +checkout "verif-global.fd" "https://github.com/NOAA-EMC/EMC_verif-global.git" "c267780" ; errs=$((errs + $?)) + +if [[ $CHECKOUT_GSI == "YES" ]]; then + checkout "gsi_enkf.fd" "https://github.com/NOAA-EMC/GSI.git" "67f5ab4"; errs=$((errs + $?)) +fi + +if [[ $CHECKOUT_GDAS == "YES" ]]; then + checkout "gdas.cd" "https://github.com/NOAA-EMC/GDASApp.git" "5952c9d"; errs=$((errs + $?)) +fi + +if [[ $CHECKOUT_GSI == "YES" || $CHECKOUT_GDAS == "YES" ]]; then + checkout "gsi_utils.fd" "https://github.com/NOAA-EMC/GSI-Utils.git" "322cc7b"; errs=$((errs + $?)) + checkout "gsi_monitor.fd" "https://github.com/NOAA-EMC/GSI-Monitor.git" "acf8870"; errs=$((errs + $?)) + checkout "gldas.fd" "https://github.com/NOAA-EMC/GLDAS.git" "fd8ba62"; errs=$((errs + $?)) +fi + +if [[ $checkout_wafs == "YES" ]]; then checkout "gfs_wafs.fd" "https://github.com/NOAA-EMC/EMC_gfs_wafs.git" "014a0b8"; errs=$((errs + $?)) fi -if [[ "${checkout_gtg:-NO}" == "YES" ]]; then +if [[ $checkout_gtg == "YES" ]]; then ################################################################################ # checkout_gtg ## yes: The gtg code at NCAR private repository is available for ops. GFS only. diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index 22e9d99901..1ac7ad5b56 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -28,6 +28,7 @@ SLINK="ln -fs" pwd=$(pwd -P) # Link post +[[ -d upp.fd ]] && rm -rf upp.fd $LINK ufs_model.fd/FV3/upp upp.fd #------------------------------ @@ -66,8 +67,7 @@ for dir in fix_aer \ fix_cpl \ fix_wave \ fix_reg2grb2 \ - fix_ugwd \ - fix_jedi + fix_ugwd do if [ -d $dir ]; then [[ $RUN_ENVIR = nco ]] && chmod -R 755 $dir @@ -85,13 +85,17 @@ fi #--------------------------------------- #--add files from external repositories #--------------------------------------- -cd ${pwd}/../jobs ||exit 8 +if [ -d ../sorc/gldas.fd ]; then + cd ${pwd}/../jobs ||exit 8 $LINK ../sorc/gldas.fd/jobs/JGDAS_ATMOS_GLDAS . +fi cd ${pwd}/../parm ||exit 8 # [[ -d post ]] && rm -rf post # $LINK ../sorc/upp.fd/parm post - [[ -d gldas ]] && rm -rf gldas - $LINK ../sorc/gldas.fd/parm gldas + if [ -d ../sorc/gldas.fd ]; then + [[ -d gldas ]] && rm -rf gldas + $LINK ../sorc/gldas.fd/parm gldas + fi cd ${pwd}/../parm/post ||exit 8 for file in postxconfig-NT-GEFS-ANL.txt postxconfig-NT-GEFS-F00.txt postxconfig-NT-GEFS.txt postxconfig-NT-GFS-ANL.txt \ postxconfig-NT-GFS-F00-TWO.txt postxconfig-NT-GFS-F00.txt postxconfig-NT-GFS-FLUX-F00.txt postxconfig-NT-GFS-FLUX.txt \ @@ -104,15 +108,19 @@ cd ${pwd}/../parm/post ||exit 8 done cd ${pwd}/../scripts ||exit 8 $LINK ../sorc/ufs_utils.fd/scripts/exemcsfc_global_sfc_prep.sh . - $LINK ../sorc/gldas.fd/scripts/exgdas_atmos_gldas.sh . + if [ -d ../sorc/gldas.fd ]; then + $LINK ../sorc/gldas.fd/scripts/exgdas_atmos_gldas.sh . + fi cd ${pwd}/../ush ||exit 8 for file in emcsfc_ice_blend.sh fv3gfs_driver_grid.sh fv3gfs_make_orog.sh global_cycle_driver.sh \ emcsfc_snow.sh fv3gfs_filter_topo.sh global_cycle.sh fv3gfs_make_grid.sh ; do $LINK ../sorc/ufs_utils.fd/ush/$file . done - for file in gldas_archive.sh gldas_forcing.sh gldas_get_data.sh gldas_process_data.sh gldas_liscrd.sh gldas_post.sh ; do + if [ -d ../sorc/gldas.fd ]; then + for file in gldas_archive.sh gldas_forcing.sh gldas_get_data.sh gldas_process_data.sh gldas_liscrd.sh gldas_post.sh ; do $LINK ../sorc/gldas.fd/ush/$file . - done + done + fi #----------------------------------- @@ -137,29 +145,43 @@ fi #------------------------------ #--add GSI fix directory #------------------------------ -cd ${pwd}/../fix ||exit 8 +if [ -d ../sorc/gsi_enkf.fd ]; then + cd ${pwd}/../fix ||exit 8 [[ -d fix_gsi ]] && rm -rf fix_gsi $LINK ../sorc/gsi_enkf.fd/fix fix_gsi +fi +#------------------------------ +#--add GDASApp fix directory +#------------------------------ +if [ -d ../sorc/gdas.cd ]; then + cd ${pwd}/../fix ||exit 8 + [[ -d fix_gdas ]] && rm -rf fix_gdas + $LINK $FIX_DIR/fix_gdas . +fi #------------------------------ #--add GDASApp files #------------------------------ -cd ${pwd}/../jobs ||exit 8 +if [ -d ../sorc/gdas.cd ]; then + cd ${pwd}/../jobs ||exit 8 $LINK ../sorc/gdas.cd/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_PREP . $LINK ../sorc/gdas.cd/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_RUN . $LINK ../sorc/gdas.cd/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_POST . $LINK ../sorc/gdas.cd/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_PREP . $LINK ../sorc/gdas.cd/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_RUN . $LINK ../sorc/gdas.cd/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_POST . -cd ${pwd}/../ush ||exit 8 + cd ${pwd}/../ush ||exit 8 $LINK ../sorc/gdas.cd/ush/ufsda . +fi #------------------------------ #--add DA Monitor file (NOTE: ensure to use correct version) #------------------------------ -cd ${pwd}/../fix ||exit 8 +if [ -d ../sorc/gsi_monitor.fd ]; then + + cd ${pwd}/../fix ||exit 8 [[ -d gdas ]] && rm -rf gdas mkdir -p gdas cd gdas @@ -170,12 +192,12 @@ cd ${pwd}/../fix ||exit 8 $LINK ../../sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_base.tar . $LINK ../../sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_satype.txt . $LINK ../../sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_scaninfo.txt . -cd ${pwd}/../jobs ||exit 8 + cd ${pwd}/../jobs ||exit 8 $LINK ../sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gdas/jobs/JGDAS_ATMOS_VMINMON . $LINK ../sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gfs/jobs/JGFS_ATMOS_VMINMON . $LINK ../sorc/gsi_monitor.fd/src/Ozone_Monitor/nwprod/gdas_oznmon/jobs/JGDAS_ATMOS_VERFOZN . $LINK ../sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/jobs/JGDAS_ATMOS_VERFRAD . -cd ${pwd}/../parm ||exit 8 + cd ${pwd}/../parm ||exit 8 [[ -d mon ]] && rm -rf mon mkdir -p mon cd mon @@ -184,12 +206,12 @@ cd ${pwd}/../parm ||exit 8 # $LINK ../../sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gfs/parm/gfs_minmon.parm . $LINK ../../sorc/gsi_monitor.fd/src/Ozone_Monitor/nwprod/gdas_oznmon/parm/gdas_oznmon.parm . # $LINK ../../sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/parm/gdas_radmon.parm . -cd ${pwd}/../scripts ||exit 8 + cd ${pwd}/../scripts ||exit 8 $LINK ../sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gdas/scripts/exgdas_atmos_vminmon.sh . $LINK ../sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gfs/scripts/exgfs_atmos_vminmon.sh . $LINK ../sorc/gsi_monitor.fd/src/Ozone_Monitor/nwprod/gdas_oznmon/scripts/exgdas_atmos_verfozn.sh . $LINK ../sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/scripts/exgdas_atmos_verfrad.sh . -cd ${pwd}/../ush ||exit 8 + cd ${pwd}/../ush ||exit 8 $LINK ../sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/minmon_shared/ush/minmon_xtrct_costs.pl . $LINK ../sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/minmon_shared/ush/minmon_xtrct_gnorms.pl . $LINK ../sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/minmon_shared/ush/minmon_xtrct_reduct.pl . @@ -199,6 +221,7 @@ cd ${pwd}/../ush ||exit 8 $LINK ../sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_bcoef.sh . $LINK ../sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_bcor.sh . $LINK ../sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_time.sh . +fi #------------------------------ @@ -244,101 +267,120 @@ for ufs_utilsexe in \ done # GSI -for exe in enkf.x gsi.x; do +if [ -d ../sorc/gsi_enkf.fd ]; then + for exe in enkf.x gsi.x; do [[ -s $exe ]] && rm -f $exe $LINK ../sorc/gsi_enkf.fd/install/bin/$exe . -done + done +fi # GSI Utils -for exe in calc_analysis.x calc_increment_ens_ncio.x calc_increment_ens.x \ +if [ -d ../sorc/gsi_utils.fd ]; then + for exe in calc_analysis.x calc_increment_ens_ncio.x calc_increment_ens.x \ getsfcensmeanp.x getsigensmeanp_smooth.x getsigensstatp.x \ interp_inc.x recentersigp.x;do [[ -s $exe ]] && rm -f $exe $LINK ../sorc/gsi_utils.fd/install/bin/$exe . -done + done +fi # GSI Monitor -for exe in oznmon_horiz.x oznmon_time.x radmon_angle.x \ +if [ -d ../sorc/gsi_monitor.fd ]; then + for exe in oznmon_horiz.x oznmon_time.x radmon_angle.x \ radmon_bcoef.x radmon_bcor.x radmon_time.x; do [[ -s $exe ]] && rm -f $exe $LINK ../sorc/gsi_monitor.fd/install/bin/$exe . -done + done +fi -for gldasexe in gdas2gldas gldas2gdas gldas_forcing gldas_model gldas_post gldas_rst; do +if [ -d ../sorc/gldas.fd ]; then + for gldasexe in gdas2gldas gldas2gdas gldas_forcing gldas_model gldas_post gldas_rst; do [[ -s $gldasexe ]] && rm -f $gldasexe $LINK ../sorc/gldas.fd/exec/$gldasexe . -done + done +fi # GDASApp -for gdasexe in fv3jedi_addincrement.x fv3jedi_diffstates.x fv3jedi_ensvariance.x fv3jedi_hofx.x \ +if [ -d ../sorc/gdas.cd ]; then + for gdasexe in fv3jedi_addincrement.x fv3jedi_diffstates.x fv3jedi_ensvariance.x fv3jedi_hofx.x \ fv3jedi_var.x fv3jedi_convertincrement.x fv3jedi_dirac.x fv3jedi_error_covariance_training.x \ fv3jedi_letkf.x fv3jedi_convertstate.x fv3jedi_eda.x fv3jedi_forecast.x fv3jedi_plot_field.x \ fv3jedi_data_checker.py fv3jedi_enshofx.x fv3jedi_hofx_nomodel.x fv3jedi_testdata_downloader.py; do [[ -s $gdasexe ]] && rm -f $gdasexe $LINK ../sorc/gdas.cd/build/bin/$gdasexe . -done + done +fi #------------------------------ #--link source code directories #------------------------------ cd ${pwd}/../sorc || exit 8 - [[ -d gsi.fd ]] && rm -rf gsi.fd - $SLINK gsi_enkf.fd/src/gsi gsi.fd + if [ -d gsi_enkf.fd ]; then + [[ -d gsi.fd ]] && rm -rf gsi.fd + $SLINK gsi_enkf.fd/src/gsi gsi.fd - [[ -d enkf.fd ]] && rm -rf enkf.fd - $SLINK gsi_enkf.fd/src/enkf enkf.fd + [[ -d enkf.fd ]] && rm -rf enkf.fd + $SLINK gsi_enkf.fd/src/enkf enkf.fd + fi - [[ -d calc_analysis.fd ]] && rm -rf calc_analysis.fd - $SLINK gsi_utils.fd/src/netcdf_io/calc_analysis.fd calc_analysis.fd + if [ -d gsi_utils.fd ]; then + [[ -d calc_analysis.fd ]] && rm -rf calc_analysis.fd + $SLINK gsi_utils.fd/src/netcdf_io/calc_analysis.fd calc_analysis.fd - [[ -d calc_increment_ens.fd ]] && rm -rf calc_increment_ens.fd - $SLINK gsi_utils.fd/src/EnKF/gfs/src/calc_increment_ens.fd calc_increment_ens.fd + [[ -d calc_increment_ens.fd ]] && rm -rf calc_increment_ens.fd + $SLINK gsi_utils.fd/src/EnKF/gfs/src/calc_increment_ens.fd calc_increment_ens.fd - [[ -d calc_increment_ens_ncio.fd ]] && rm -rf calc_increment_ens_ncio.fd - $SLINK gsi_utils.fd/src/EnKF/gfs/src/calc_increment_ens_ncio.fd calc_increment_ens_ncio.fd + [[ -d calc_increment_ens_ncio.fd ]] && rm -rf calc_increment_ens_ncio.fd + $SLINK gsi_utils.fd/src/EnKF/gfs/src/calc_increment_ens_ncio.fd calc_increment_ens_ncio.fd - [[ -d getsfcensmeanp.fd ]] && rm -rf getsfcensmeanp.fd - $SLINK gsi_utils.fd/src/EnKF/gfs/src/getsfcensmeanp.fd getsfcensmeanp.fd + [[ -d getsfcensmeanp.fd ]] && rm -rf getsfcensmeanp.fd + $SLINK gsi_utils.fd/src/EnKF/gfs/src/getsfcensmeanp.fd getsfcensmeanp.fd - [[ -d getsigensmeanp_smooth.fd ]] && rm -rf getsigensmeanp_smooth.fd - $SLINK gsi_utils.fd/src/EnKF/gfs/src/getsigensmeanp_smooth.fd getsigensmeanp_smooth.fd + [[ -d getsigensmeanp_smooth.fd ]] && rm -rf getsigensmeanp_smooth.fd + $SLINK gsi_utils.fd/src/EnKF/gfs/src/getsigensmeanp_smooth.fd getsigensmeanp_smooth.fd - [[ -d getsigensstatp.fd ]] && rm -rf getsigensstatp.fd - $SLINK gsi_utils.fd/src/EnKF/gfs/src/getsigensstatp.fd getsigensstatp.fd + [[ -d getsigensstatp.fd ]] && rm -rf getsigensstatp.fd + $SLINK gsi_utils.fd/src/EnKF/gfs/src/getsigensstatp.fd getsigensstatp.fd - [[ -d recentersigp.fd ]] && rm -rf recentersigp.fd - $SLINK gsi_utils.fd/src/EnKF/gfs/src/recentersigp.fd recentersigp.fd + [[ -d recentersigp.fd ]] && rm -rf recentersigp.fd + $SLINK gsi_utils.fd/src/EnKF/gfs/src/recentersigp.fd recentersigp.fd - [[ -d interp_inc.fd ]] && rm -rf interp_inc.fd - $SLINK gsi_utils.fd/src/netcdf_io/interp_inc.fd interp_inc.fd + [[ -d interp_inc.fd ]] && rm -rf interp_inc.fd + $SLINK gsi_utils.fd/src/netcdf_io/interp_inc.fd interp_inc.fd + fi - [[ -d oznmon_horiz.fd ]] && rm -rf oznmon_horiz.fd - $SLINK gsi_monitor.fd/src/Ozone_Monitor/nwprod/oznmon_shared/sorc/oznmon_horiz.fd oznmon_horiz.fd + if [ -d gsi_monitor.fd ] ; then + [[ -d oznmon_horiz.fd ]] && rm -rf oznmon_horiz.fd + $SLINK gsi_monitor.fd/src/Ozone_Monitor/nwprod/oznmon_shared/sorc/oznmon_horiz.fd oznmon_horiz.fd - [[ -d oznmon_time.fd ]] && rm -rf oznmon_time.fd - $SLINK gsi_monitor.fd/src/Ozone_Monitor/nwprod/oznmon_shared/sorc/oznmon_time.fd oznmon_time.fd + [[ -d oznmon_time.fd ]] && rm -rf oznmon_time.fd + $SLINK gsi_monitor.fd/src/Ozone_Monitor/nwprod/oznmon_shared/sorc/oznmon_time.fd oznmon_time.fd - [[ -d radmon_angle.fd ]] && rm -rf radmon_angle.fd - $SLINK gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radang.fd radmon_angle.fd + [[ -d radmon_angle.fd ]] && rm -rf radmon_angle.fd + $SLINK gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radang.fd radmon_angle.fd - [[ -d radmon_bcoef.fd ]] && rm -rf radmon_bcoef.fd - $SLINK gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radbcoef.fd radmon_bcoef.fd + [[ -d radmon_bcoef.fd ]] && rm -rf radmon_bcoef.fd + $SLINK gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radbcoef.fd radmon_bcoef.fd - [[ -d radmon_bcor.fd ]] && rm -rf radmon_bcor.fd - $SLINK gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radbcor.fd radmon_bcor.fd + [[ -d radmon_bcor.fd ]] && rm -rf radmon_bcor.fd + $SLINK gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radbcor.fd radmon_bcor.fd - [[ -d radmon_time.fd ]] && rm -rf radmon_time.fd - $SLINK gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radtime.fd radmon_time.fd + [[ -d radmon_time.fd ]] && rm -rf radmon_time.fd + $SLINK gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radtime.fd radmon_time.fd + fi + [[ -d gfs_ncep_post.fd ]] && rm -rf gfs_ncep_post.fd $SLINK upp.fd/sorc/ncep_post.fd gfs_ncep_post.fd for prog in fregrid make_hgrid make_solo_mosaic ; do + [[ -d ${prog}.fd ]] && rm -rf ${prog}.fd $SLINK ufs_utils.fd/sorc/fre-nctools.fd/tools/$prog ${prog}.fd done for prog in global_cycle.fd \ emcsfc_ice_blend.fd \ emcsfc_snow2mdl.fd ;do + [[ -d $prog ]] && rm -rf $prog $SLINK ufs_utils.fd/sorc/$prog $prog done @@ -354,9 +396,12 @@ cd ${pwd}/../sorc || exit 8 $SLINK gfs_wafs.fd/sorc/wafs_setmissing.fd wafs_setmissing.fd fi - for prog in gdas2gldas.fd gldas2gdas.fd gldas_forcing.fd gldas_model.fd gldas_post.fd gldas_rst.fd ;do + if [ -d gldas.fd ]; then + for prog in gdas2gldas.fd gldas2gdas.fd gldas_forcing.fd gldas_model.fd gldas_post.fd gldas_rst.fd ;do + [[ -d $prog ]] && rm -rf $prog $SLINK gldas.fd/sorc/$prog $prog - done + done + fi #------------------------------ # copy $HOMEgfs/parm/config/config.base.nco.static as config.base for operations From 490de7bae1322cbaa8ee3d52406034c8edf62dd3 Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Tue, 26 Jul 2022 22:05:59 -0400 Subject: [PATCH 06/16] Remove obsolete platforms (WCOSS1, Dell, Cray, Theia) references. (#922) Removes code related to decommissioned HPC platforms WCOSS 1 (Dell & Cray) and Theia. Some references remain in scripts outside the global-workflow repo that are cloned as part of `checkout.sh`. Scripts from the `driver` directory that were hard-wired for one of the WCOSS1 platforms are also removed. Additionally, this commit also switches to using serial netCDF for resolutions C48. C96, C192. Running with parallel netCDF (on Hera) gave errors when testing at C96 for the deterministic forecast. If someone gives a very compelling reason to use parallel netCDF at these resolutions as default, I would be very interested in what they have to say. Closes #680 --- docs/doxygen/compile | 18 +- docs/doxygen/mainpage.h | 8 +- docs/note_fixfield.txt | 5 +- driver/gdas/para_config.gdas_analysis_high | 33 - driver/gdas/para_config.gdas_enkf_fcst | 66 -- .../para_config.gdas_enkf_inflate_recenter | 28 - .../gdas/para_config.gdas_enkf_innovate_obs | 100 --- driver/gdas/para_config.gdas_enkf_post | 29 - driver/gdas/para_config.gdas_enkf_select_obs | 33 - driver/gdas/para_config.gdas_enkf_update | 33 - driver/gdas/para_config.gdas_forecast_high | 32 - driver/gdas/para_config.gdas_gldas | 63 -- driver/gdas/run_JGDAS_NCEPPOST.sh | 110 --- driver/gdas/test_emcsfc.sh | 46 - driver/gdas/test_exgdas_tropc.sh | 126 --- driver/gdas/test_gdas_analysis_high.sh | 90 -- driver/gdas/test_gdas_enkf_fcst.sh | 89 -- .../gdas/test_gdas_enkf_inflate_recenter.sh | 83 -- driver/gdas/test_gdas_enkf_innovate_obs.sh | 96 -- driver/gdas/test_gdas_enkf_post.sh | 79 -- driver/gdas/test_gdas_enkf_select_obs.sh | 89 -- driver/gdas/test_gdas_enkf_update.sh | 83 -- driver/gdas/test_gdas_forecast_high.sh | 83 -- driver/gdas/test_gdas_gldas.sh | 127 --- driver/gdas/test_jgdas_tropc_cray.sh | 133 --- driver/gdas/test_jgdas_tropcy_qc_reloc.ecf | 126 --- .../gdas/test_jgdas_tropcy_qc_reloc_cray.sh | 154 ---- driver/gdas/test_jgdas_verfozn.sh | 82 -- driver/gdas/test_jgdas_verfozn_cray.sh | 79 -- driver/gdas/test_jgdas_verfrad.sh | 74 -- driver/gdas/test_jgdas_verfrad_cray.sh | 71 -- driver/gdas/test_jgdas_vminmon.sh | 73 -- driver/gdas/test_jgdas_vminmon_cray.sh | 69 -- driver/gfs/para_config.gfs_analysis | 31 - driver/gfs/para_config.gfs_forecast_high | 32 - driver/gfs/para_config.gfs_forecast_low | 32 - driver/gfs/test_emcsfc.sh | 46 - driver/gfs/test_gfs_analysis.sh | 90 -- driver/gfs/test_gfs_forecast_high.sh | 82 -- driver/gfs/test_gfs_forecast_low.sh | 82 -- driver/gfs/test_jgfs_cyclone_tracker.ecf | 121 --- driver/gfs/test_jgfs_cyclone_tracker_cray.sh | 167 ---- driver/gfs/test_jgfs_tropcy_qc_reloc.ecf | 124 --- driver/gfs/test_jgfs_tropcy_qc_reloc_cray.sh | 155 ---- driver/gfs/test_jgfs_vminmon.sh | 84 -- driver/gfs/test_jgfs_vminmon_cray.sh | 68 -- driver/product/change_gfs_downstream_date.sh | 46 - driver/product/change_gfs_downstream_envir.sh | 35 - driver/product/compile_gfsv152.sh | 20 - driver/product/compile_gfsv160.sh | 17 - driver/product/compile_gfsv160_hera.sh | 15 - driver/product/rmo_clean_gfs_output | 23 - ...un_JGDAS_ATMOS_GEMPAK_META_NCDC_dell.sh_00 | 129 --- ...un_JGDAS_ATMOS_GEMPAK_META_NCDC_dell.sh_06 | 129 --- ...un_JGDAS_ATMOS_GEMPAK_META_NCDC_dell.sh_12 | 132 --- ...un_JGDAS_ATMOS_GEMPAK_META_NCDC_dell.sh_18 | 130 --- .../product/run_JGDAS_ATMOS_GEMPAK_dell.sh_00 | 118 --- .../product/run_JGDAS_ATMOS_GEMPAK_dell.sh_06 | 119 --- .../product/run_JGDAS_ATMOS_GEMPAK_dell.sh_12 | 118 --- .../product/run_JGDAS_ATMOS_GEMPAK_dell.sh_18 | 119 --- ...un_JGFS_ATMOS_AWIPS_20KM_1P0DEG_dell.sh_00 | 123 --- ...un_JGFS_ATMOS_AWIPS_20KM_1P0DEG_dell.sh_06 | 123 --- ...un_JGFS_ATMOS_AWIPS_20KM_1P0DEG_dell.sh_12 | 123 --- ...un_JGFS_ATMOS_AWIPS_20KM_1P0DEG_dell.sh_18 | 123 --- .../run_JGFS_ATMOS_AWIPS_G2_dell.sh_00 | 121 --- .../run_JGFS_ATMOS_AWIPS_G2_dell.sh_06 | 121 --- .../run_JGFS_ATMOS_AWIPS_G2_dell.sh_12 | 121 --- .../run_JGFS_ATMOS_AWIPS_G2_dell.sh_18 | 121 --- .../product/run_JGFS_ATMOS_FBWIND_dell.sh_00 | 113 --- .../product/run_JGFS_ATMOS_FBWIND_dell.sh_06 | 113 --- .../product/run_JGFS_ATMOS_FBWIND_dell.sh_12 | 113 --- .../product/run_JGFS_ATMOS_FBWIND_dell.sh_18 | 113 --- .../run_JGFS_ATMOS_GEMPAK_META_dell.sh_00 | 131 --- .../run_JGFS_ATMOS_GEMPAK_META_dell.sh_06 | 131 --- .../run_JGFS_ATMOS_GEMPAK_META_dell.sh_12 | 133 --- .../run_JGFS_ATMOS_GEMPAK_META_dell.sh_18 | 131 --- ..._JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF_dell.sh_00 | 123 --- ..._JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF_dell.sh_06 | 123 --- ..._JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF_dell.sh_12 | 125 --- ..._JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF_dell.sh_18 | 123 --- ...un_JGFS_ATMOS_GEMPAK_PGRB2_SPEC_dell.sh_00 | 139 --- ...un_JGFS_ATMOS_GEMPAK_PGRB2_SPEC_dell.sh_06 | 138 --- ...un_JGFS_ATMOS_GEMPAK_PGRB2_SPEC_dell.sh_12 | 140 --- ...un_JGFS_ATMOS_GEMPAK_PGRB2_SPEC_dell.sh_18 | 138 --- .../product/run_JGFS_ATMOS_GEMPAK_dell.sh_00 | 119 --- .../product/run_JGFS_ATMOS_GEMPAK_dell.sh_06 | 119 --- .../product/run_JGFS_ATMOS_GEMPAK_dell.sh_12 | 119 --- .../product/run_JGFS_ATMOS_GEMPAK_dell.sh_18 | 119 --- ...un_JGFS_ATMOS_PGRB2_SPEC_NPOESS_dell.sh_00 | 116 --- ...un_JGFS_ATMOS_PGRB2_SPEC_NPOESS_dell.sh_06 | 116 --- ...un_JGFS_ATMOS_PGRB2_SPEC_NPOESS_dell.sh_12 | 117 --- ...un_JGFS_ATMOS_PGRB2_SPEC_NPOESS_dell.sh_18 | 116 --- driver/product/run_JGFS_NCEPPOST | 136 --- driver/product/run_JGFS_NCEPPOST.sh | 115 --- driver/product/run_gfs_downstream.sh | 30 - driver/product/run_gfs_downstream_change | 12 - driver/product/run_postsnd.sh | 191 ---- driver/product/run_postsnd.sh.cray | 160 ---- driver/product/run_postsnd.sh.dell | 191 ---- driver/product/run_postsnd.sh.hera | 185 ---- driver/product/run_postsnd.sh.jet | 172 ---- .../analysis/create/jenkfgdas_diag.ecf | 2 +- .../analysis/create/jenkfgdas_select_obs.ecf | 2 +- .../analysis/create/jenkfgdas_update.ecf | 2 +- .../analysis/recenter/ecen/jenkfgdas_ecen.ecf | 2 +- .../analysis/recenter/jenkfgdas_sfc.ecf | 2 +- .../enkfgdas/forecast/jenkfgdas_fcst.ecf | 2 +- .../enkfgdas/post/jenkfgdas_post_master.ecf | 2 +- .../atmos/analysis/jgdas_atmos_analysis.ecf | 2 +- .../analysis/jgdas_atmos_analysis_calc.ecf | 2 +- .../analysis/jgdas_atmos_analysis_diag.ecf | 2 +- .../gdas/atmos/init/jgdas_atmos_gldas.ecf | 2 +- .../atmos/post/jgdas_atmos_post_master.ecf | 2 +- .../jgdas_atmos_chgres_forenkf.ecf | 2 +- .../gdas/atmos/verf/jgdas_atmos_verfozn.ecf | 2 +- .../gdas/atmos/verf/jgdas_atmos_verfrad.ecf | 2 +- .../gdas/atmos/verf/jgdas_atmos_vminmon.ecf | 2 +- ecf/scripts/gdas/jgdas_forecast.ecf | 2 +- .../gdas/wave/init/jgdas_wave_init.ecf | 2 +- .../gdas/wave/post/jgdas_wave_postpnt.ecf | 2 +- .../gdas/wave/post/jgdas_wave_postsbs.ecf | 2 +- .../gdas/wave/prep/jgdas_wave_prep.ecf | 2 +- .../atmos/analysis/jgfs_atmos_analysis.ecf | 2 +- .../analysis/jgfs_atmos_analysis_calc.ecf | 2 +- .../gfs/atmos/gempak/jgfs_atmos_gempak.ecf | 2 +- .../atmos/gempak/jgfs_atmos_gempak_meta.ecf | 2 +- .../gempak/jgfs_atmos_gempak_ncdc_upapgif.ecf | 2 +- .../gempak/jgfs_atmos_npoess_pgrb2_0p5deg.ecf | 2 +- .../gempak/jgfs_atmos_pgrb2_spec_gempak.ecf | 2 +- .../gfs/atmos/post/jgfs_atmos_post_master.ecf | 2 +- .../jgfs_atmos_awips_master.ecf | 2 +- .../awips_g2/jgfs_atmos_awips_g2_master.ecf | 2 +- .../bufr_sounding/jgfs_atmos_postsnd.ecf | 2 +- .../bulletins/jgfs_atmos_fbwind.ecf | 2 +- .../grib2_wafs/jgfs_atmos_wafs_blending.ecf | 2 +- .../jgfs_atmos_wafs_blending_0p25.ecf | 2 +- .../grib2_wafs/jgfs_atmos_wafs_grib2.ecf | 2 +- .../grib2_wafs/jgfs_atmos_wafs_grib2_0p25.ecf | 2 +- .../grib_wafs/jgfs_atmos_wafs_master.ecf | 2 +- .../post_processing/jgfs_atmos_wafs_gcip.ecf | 2 +- .../gfs/atmos/verf/jgfs_atmos_vminmon.ecf | 2 +- ecf/scripts/gfs/jgfs_forecast.ecf | 2 +- ecf/scripts/gfs/wave/init/jgfs_wave_init.ecf | 2 +- .../gfs/wave/post/jgfs_wave_post_bndpnt.ecf | 2 +- .../wave/post/jgfs_wave_post_bndpntbll.ecf | 2 +- .../gfs/wave/post/jgfs_wave_postpnt.ecf | 2 +- .../gfs/wave/post/jgfs_wave_postsbs.ecf | 2 +- .../gfs/wave/post/jgfs_wave_prdgen_bulls.ecf | 2 +- .../wave/post/jgfs_wave_prdgen_gridded.ecf | 2 +- ecf/scripts/gfs/wave/prep/jgfs_wave_prep.ecf | 2 +- env/WCOSS_C.env | 245 ------ env/WCOSS_DELL_P3.env | 244 ----- jobs/JGFS_ATMOS_CYCLONE_TRACKER | 19 +- jobs/JGLOBAL_WAVE_GEMPAK | 8 +- jobs/rocoto/prep.sh | 2 +- modulefiles/module-setup.csh.inc | 32 +- modulefiles/module_base.wcoss_dell_p3.lua | 67 -- modulefiles/modulefile.ww3.wcoss_dell_p3.lua | 23 - modulefiles/workflow_utils.wcoss_dell_p3.lua | 34 - parm/config/config.aero | 27 +- parm/config/config.base.nco.static | 2 +- parm/config/config.coupled_ic | 4 +- parm/config/config.fv3 | 8 +- parm/config/config.fv3.nco.static | 10 +- parm/config/config.post | 8 +- parm/config/config.prepbufr | 4 +- parm/config/config.resources | 71 +- parm/config/config.vrfy | 17 +- scripts/exgdas_atmos_chgres_forenkf.sh | 3 +- scripts/exgdas_atmos_nceppost.sh | 30 +- scripts/exgdas_efsoi.sh | 3 +- scripts/exgdas_efsoi_update.sh | 5 +- scripts/exgdas_enkf_ecen.sh | 1 - scripts/exgdas_enkf_fcst.sh | 1 - scripts/exgdas_enkf_post.sh | 1 - scripts/exgdas_enkf_select_obs.sh | 1 - scripts/exgdas_enkf_sfc.sh | 1 - scripts/exgdas_enkf_update.sh | 1 - scripts/exgfs_wave_init.sh | 17 +- scripts/exgfs_wave_post_gridded_sbs.sh | 53 +- scripts/exgfs_wave_post_pnt.sh | 55 +- scripts/exglobal_atmos_analysis.sh | 19 +- scripts/exglobal_atmos_analysis_calc.sh | 9 +- scripts/exglobal_atmos_sfcanl.sh | 1 - scripts/exglobal_diag.sh | 11 +- scripts/run_gfsmos_master.sh.cray | 330 ------- scripts/run_gfsmos_master.sh.dell | 765 ---------------- scripts/run_gfsmos_master.sh.hera | 833 ------------------ sorc/build_gfs_util.sh | 4 +- sorc/build_ufs_utils.sh | 3 - sorc/enkf_chgres_recenter.fd/makefile | 27 - sorc/enkf_chgres_recenter_nc.fd/makefile | 28 - sorc/fbwndgfs.fd/fbwndgfs.f | 0 sorc/fbwndgfs.fd/makefile.GENERIC | 81 -- sorc/fbwndgfs.fd/makefile.theia | 81 -- sorc/fbwndgfs.fd/makefile.wcoss | 81 -- sorc/fbwndgfs.fd/makefile.wcoss_cray | 81 -- sorc/fbwndgfs.fd/makefile.wcoss_dell_p3 | 81 -- sorc/fv3nc2nemsio.fd/makefile.sh | 24 - sorc/gaussian_sfcanl.fd/CMakeLists.txt | 2 + sorc/gaussian_sfcanl.fd/Makefile | 22 - sorc/gaussian_sfcanl.fd/makefile.sh | 19 - .../weight_gen/CMakeLists.txt | 1 + sorc/gaussian_sfcanl.fd/weight_gen/README | 4 +- .../weight_gen/run.theia.sh | 20 +- .../weight_gen/scrip.fd/CMakeLists.txt | 18 + .../weight_gen/scrip.fd/make.sh | 60 -- .../weight_gen/scrip.fd/makefile | 14 - sorc/gfs_bufr.fd/bfrhdr.f | 0 sorc/gfs_bufr.fd/bfrize.f | 0 sorc/gfs_bufr.fd/buff.f | 0 sorc/gfs_bufr.fd/calwxt_gfs_baldwin.f | 0 sorc/gfs_bufr.fd/calwxt_gfs_ramer.f | 0 sorc/gfs_bufr.fd/funcphys.f | 0 sorc/gfs_bufr.fd/gfsbufr.f | 0 sorc/gfs_bufr.fd/gslp.f | 0 sorc/gfs_bufr.fd/lcl.f | 0 sorc/gfs_bufr.fd/machine.f | 0 sorc/gfs_bufr.fd/makefile_module | 79 -- sorc/gfs_bufr.fd/meteorg.f | 0 sorc/gfs_bufr.fd/modstuff1.f | 0 sorc/gfs_bufr.fd/mstadb.f | 0 sorc/gfs_bufr.fd/newsig1.f | 0 sorc/gfs_bufr.fd/physcons.f | 0 sorc/gfs_bufr.fd/rsearch.f | 0 sorc/gfs_bufr.fd/svp.f | 0 sorc/gfs_bufr.fd/tdew.f | 0 sorc/gfs_bufr.fd/terp3.f | 0 sorc/gfs_bufr.fd/vintg.f | 0 sorc/link_workflow.sh | 14 +- sorc/machine-setup.sh | 92 +- sorc/ncl.setup | 40 +- sorc/reg2grb2.fd/reg2grb2.f | 0 sorc/reg2grb2.fd/regdiag.f | 0 sorc/regrid_nemsio.fd/Makefile | 159 ---- sorc/regrid_nemsio.fd/Makefile.dependency | 9 - sorc/supvit.fd/makefile | 31 - sorc/supvit.fd/supvit_modules.f | 0 sorc/syndat_getjtbul.fd/getjtbul.f | 0 sorc/syndat_getjtbul.fd/makefile | 23 - sorc/syndat_maksynrc.fd/makefile | 21 - sorc/syndat_maksynrc.fd/maksynrc.f | 0 sorc/syndat_qctropcy.fd/makefile | 23 - sorc/syndat_qctropcy.fd/qctropcy.f | 0 sorc/tave.fd/makefile | 25 - sorc/tave.fd/tave.f | 0 sorc/tocsbufr.fd/makefile_module | 82 -- sorc/tocsbufr.fd/tocsbufr.f | 0 sorc/vint.fd/makefile | 27 - sorc/vint.fd/vint.f | 0 ush/forecast_predet.sh | 15 +- ush/fv3gfs_downstream_nems.sh | 16 +- ush/fv3gfs_regrid_nemsio.sh | 1 - ush/gsi_utils.py | 16 +- ush/load_fv3gfs_modules.sh | 37 +- ush/wave_grib2_sbs.sh | 27 +- ush/wave_grid_interp_sbs.sh | 21 +- ush/wave_outp_cat.sh | 23 +- ush/wave_outp_spec.sh | 5 +- ush/wave_prnc_cur.sh | 11 +- util/modulefiles/gfs_util.wcoss_dell_p3 | 22 - util/sorc/compile_gfs_util_wcoss.sh | 20 +- .../mkgfsawps.fd/compile_mkgfsawps_wcoss.sh | 14 +- util/sorc/mkgfsawps.fd/makefile.wcoss_cray | 56 -- util/sorc/mkgfsawps.fd/makefile.wcoss_dell_p3 | 53 -- .../overgridid.fd/compile_overgridid_wcoss.sh | 12 +- .../rdbfmsua.fd/compile_rdbfmsua_wcoss.sh | 14 +- util/sorc/rdbfmsua.fd/makefile.wcoss_cray | 84 -- util/sorc/rdbfmsua.fd/makefile.wcoss_dell_p3 | 84 -- .../webtitle.fd/compile_webtitle_wcoss.sh | 14 +- workflow/README_ecflow.md | 142 +-- workflow/hosts.py | 6 +- workflow/hosts/wcoss_dell_p3.yaml | 19 - 273 files changed, 431 insertions(+), 14430 deletions(-) delete mode 100644 driver/gdas/para_config.gdas_analysis_high delete mode 100755 driver/gdas/para_config.gdas_enkf_fcst delete mode 100755 driver/gdas/para_config.gdas_enkf_inflate_recenter delete mode 100755 driver/gdas/para_config.gdas_enkf_innovate_obs delete mode 100755 driver/gdas/para_config.gdas_enkf_post delete mode 100755 driver/gdas/para_config.gdas_enkf_select_obs delete mode 100755 driver/gdas/para_config.gdas_enkf_update delete mode 100755 driver/gdas/para_config.gdas_forecast_high delete mode 100755 driver/gdas/para_config.gdas_gldas delete mode 100755 driver/gdas/run_JGDAS_NCEPPOST.sh delete mode 100755 driver/gdas/test_emcsfc.sh delete mode 100755 driver/gdas/test_exgdas_tropc.sh delete mode 100755 driver/gdas/test_gdas_analysis_high.sh delete mode 100755 driver/gdas/test_gdas_enkf_fcst.sh delete mode 100755 driver/gdas/test_gdas_enkf_inflate_recenter.sh delete mode 100755 driver/gdas/test_gdas_enkf_innovate_obs.sh delete mode 100755 driver/gdas/test_gdas_enkf_post.sh delete mode 100755 driver/gdas/test_gdas_enkf_select_obs.sh delete mode 100755 driver/gdas/test_gdas_enkf_update.sh delete mode 100755 driver/gdas/test_gdas_forecast_high.sh delete mode 100755 driver/gdas/test_gdas_gldas.sh delete mode 100755 driver/gdas/test_jgdas_tropc_cray.sh delete mode 100755 driver/gdas/test_jgdas_tropcy_qc_reloc.ecf delete mode 100755 driver/gdas/test_jgdas_tropcy_qc_reloc_cray.sh delete mode 100755 driver/gdas/test_jgdas_verfozn.sh delete mode 100755 driver/gdas/test_jgdas_verfozn_cray.sh delete mode 100755 driver/gdas/test_jgdas_verfrad.sh delete mode 100755 driver/gdas/test_jgdas_verfrad_cray.sh delete mode 100755 driver/gdas/test_jgdas_vminmon.sh delete mode 100755 driver/gdas/test_jgdas_vminmon_cray.sh delete mode 100755 driver/gfs/para_config.gfs_analysis delete mode 100755 driver/gfs/para_config.gfs_forecast_high delete mode 100755 driver/gfs/para_config.gfs_forecast_low delete mode 100755 driver/gfs/test_emcsfc.sh delete mode 100755 driver/gfs/test_gfs_analysis.sh delete mode 100755 driver/gfs/test_gfs_forecast_high.sh delete mode 100755 driver/gfs/test_gfs_forecast_low.sh delete mode 100755 driver/gfs/test_jgfs_cyclone_tracker.ecf delete mode 100755 driver/gfs/test_jgfs_cyclone_tracker_cray.sh delete mode 100755 driver/gfs/test_jgfs_tropcy_qc_reloc.ecf delete mode 100755 driver/gfs/test_jgfs_tropcy_qc_reloc_cray.sh delete mode 100755 driver/gfs/test_jgfs_vminmon.sh delete mode 100755 driver/gfs/test_jgfs_vminmon_cray.sh delete mode 100755 driver/product/change_gfs_downstream_date.sh delete mode 100755 driver/product/change_gfs_downstream_envir.sh delete mode 100755 driver/product/compile_gfsv152.sh delete mode 100755 driver/product/compile_gfsv160.sh delete mode 100755 driver/product/compile_gfsv160_hera.sh delete mode 100755 driver/product/rmo_clean_gfs_output delete mode 100755 driver/product/run_JGDAS_ATMOS_GEMPAK_META_NCDC_dell.sh_00 delete mode 100755 driver/product/run_JGDAS_ATMOS_GEMPAK_META_NCDC_dell.sh_06 delete mode 100755 driver/product/run_JGDAS_ATMOS_GEMPAK_META_NCDC_dell.sh_12 delete mode 100755 driver/product/run_JGDAS_ATMOS_GEMPAK_META_NCDC_dell.sh_18 delete mode 100755 driver/product/run_JGDAS_ATMOS_GEMPAK_dell.sh_00 delete mode 100755 driver/product/run_JGDAS_ATMOS_GEMPAK_dell.sh_06 delete mode 100755 driver/product/run_JGDAS_ATMOS_GEMPAK_dell.sh_12 delete mode 100755 driver/product/run_JGDAS_ATMOS_GEMPAK_dell.sh_18 delete mode 100755 driver/product/run_JGFS_ATMOS_AWIPS_20KM_1P0DEG_dell.sh_00 delete mode 100755 driver/product/run_JGFS_ATMOS_AWIPS_20KM_1P0DEG_dell.sh_06 delete mode 100755 driver/product/run_JGFS_ATMOS_AWIPS_20KM_1P0DEG_dell.sh_12 delete mode 100755 driver/product/run_JGFS_ATMOS_AWIPS_20KM_1P0DEG_dell.sh_18 delete mode 100755 driver/product/run_JGFS_ATMOS_AWIPS_G2_dell.sh_00 delete mode 100755 driver/product/run_JGFS_ATMOS_AWIPS_G2_dell.sh_06 delete mode 100755 driver/product/run_JGFS_ATMOS_AWIPS_G2_dell.sh_12 delete mode 100755 driver/product/run_JGFS_ATMOS_AWIPS_G2_dell.sh_18 delete mode 100755 driver/product/run_JGFS_ATMOS_FBWIND_dell.sh_00 delete mode 100755 driver/product/run_JGFS_ATMOS_FBWIND_dell.sh_06 delete mode 100755 driver/product/run_JGFS_ATMOS_FBWIND_dell.sh_12 delete mode 100755 driver/product/run_JGFS_ATMOS_FBWIND_dell.sh_18 delete mode 100755 driver/product/run_JGFS_ATMOS_GEMPAK_META_dell.sh_00 delete mode 100755 driver/product/run_JGFS_ATMOS_GEMPAK_META_dell.sh_06 delete mode 100755 driver/product/run_JGFS_ATMOS_GEMPAK_META_dell.sh_12 delete mode 100755 driver/product/run_JGFS_ATMOS_GEMPAK_META_dell.sh_18 delete mode 100755 driver/product/run_JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF_dell.sh_00 delete mode 100755 driver/product/run_JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF_dell.sh_06 delete mode 100755 driver/product/run_JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF_dell.sh_12 delete mode 100755 driver/product/run_JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF_dell.sh_18 delete mode 100755 driver/product/run_JGFS_ATMOS_GEMPAK_PGRB2_SPEC_dell.sh_00 delete mode 100755 driver/product/run_JGFS_ATMOS_GEMPAK_PGRB2_SPEC_dell.sh_06 delete mode 100755 driver/product/run_JGFS_ATMOS_GEMPAK_PGRB2_SPEC_dell.sh_12 delete mode 100755 driver/product/run_JGFS_ATMOS_GEMPAK_PGRB2_SPEC_dell.sh_18 delete mode 100755 driver/product/run_JGFS_ATMOS_GEMPAK_dell.sh_00 delete mode 100755 driver/product/run_JGFS_ATMOS_GEMPAK_dell.sh_06 delete mode 100755 driver/product/run_JGFS_ATMOS_GEMPAK_dell.sh_12 delete mode 100755 driver/product/run_JGFS_ATMOS_GEMPAK_dell.sh_18 delete mode 100755 driver/product/run_JGFS_ATMOS_PGRB2_SPEC_NPOESS_dell.sh_00 delete mode 100755 driver/product/run_JGFS_ATMOS_PGRB2_SPEC_NPOESS_dell.sh_06 delete mode 100755 driver/product/run_JGFS_ATMOS_PGRB2_SPEC_NPOESS_dell.sh_12 delete mode 100755 driver/product/run_JGFS_ATMOS_PGRB2_SPEC_NPOESS_dell.sh_18 delete mode 100755 driver/product/run_JGFS_NCEPPOST delete mode 100755 driver/product/run_JGFS_NCEPPOST.sh delete mode 100755 driver/product/run_gfs_downstream.sh delete mode 100755 driver/product/run_gfs_downstream_change delete mode 100755 driver/product/run_postsnd.sh delete mode 100755 driver/product/run_postsnd.sh.cray delete mode 100755 driver/product/run_postsnd.sh.dell delete mode 100644 driver/product/run_postsnd.sh.hera delete mode 100755 driver/product/run_postsnd.sh.jet delete mode 100755 env/WCOSS_C.env delete mode 100755 env/WCOSS_DELL_P3.env delete mode 100644 modulefiles/module_base.wcoss_dell_p3.lua delete mode 100644 modulefiles/modulefile.ww3.wcoss_dell_p3.lua delete mode 100644 modulefiles/workflow_utils.wcoss_dell_p3.lua delete mode 100755 scripts/run_gfsmos_master.sh.cray delete mode 100755 scripts/run_gfsmos_master.sh.dell delete mode 100755 scripts/run_gfsmos_master.sh.hera delete mode 100755 sorc/enkf_chgres_recenter.fd/makefile delete mode 100644 sorc/enkf_chgres_recenter_nc.fd/makefile mode change 100755 => 100644 sorc/fbwndgfs.fd/fbwndgfs.f delete mode 100755 sorc/fbwndgfs.fd/makefile.GENERIC delete mode 100755 sorc/fbwndgfs.fd/makefile.theia delete mode 100755 sorc/fbwndgfs.fd/makefile.wcoss delete mode 100755 sorc/fbwndgfs.fd/makefile.wcoss_cray delete mode 100755 sorc/fbwndgfs.fd/makefile.wcoss_dell_p3 delete mode 100755 sorc/fv3nc2nemsio.fd/makefile.sh delete mode 100755 sorc/gaussian_sfcanl.fd/Makefile delete mode 100755 sorc/gaussian_sfcanl.fd/makefile.sh create mode 100644 sorc/gaussian_sfcanl.fd/weight_gen/CMakeLists.txt create mode 100644 sorc/gaussian_sfcanl.fd/weight_gen/scrip.fd/CMakeLists.txt delete mode 100755 sorc/gaussian_sfcanl.fd/weight_gen/scrip.fd/make.sh delete mode 100755 sorc/gaussian_sfcanl.fd/weight_gen/scrip.fd/makefile mode change 100755 => 100644 sorc/gfs_bufr.fd/bfrhdr.f mode change 100755 => 100644 sorc/gfs_bufr.fd/bfrize.f mode change 100755 => 100644 sorc/gfs_bufr.fd/buff.f mode change 100755 => 100644 sorc/gfs_bufr.fd/calwxt_gfs_baldwin.f mode change 100755 => 100644 sorc/gfs_bufr.fd/calwxt_gfs_ramer.f mode change 100755 => 100644 sorc/gfs_bufr.fd/funcphys.f mode change 100755 => 100644 sorc/gfs_bufr.fd/gfsbufr.f mode change 100755 => 100644 sorc/gfs_bufr.fd/gslp.f mode change 100755 => 100644 sorc/gfs_bufr.fd/lcl.f mode change 100755 => 100644 sorc/gfs_bufr.fd/machine.f delete mode 100755 sorc/gfs_bufr.fd/makefile_module mode change 100755 => 100644 sorc/gfs_bufr.fd/meteorg.f mode change 100755 => 100644 sorc/gfs_bufr.fd/modstuff1.f mode change 100755 => 100644 sorc/gfs_bufr.fd/mstadb.f mode change 100755 => 100644 sorc/gfs_bufr.fd/newsig1.f mode change 100755 => 100644 sorc/gfs_bufr.fd/physcons.f mode change 100755 => 100644 sorc/gfs_bufr.fd/rsearch.f mode change 100755 => 100644 sorc/gfs_bufr.fd/svp.f mode change 100755 => 100644 sorc/gfs_bufr.fd/tdew.f mode change 100755 => 100644 sorc/gfs_bufr.fd/terp3.f mode change 100755 => 100644 sorc/gfs_bufr.fd/vintg.f mode change 100755 => 100644 sorc/reg2grb2.fd/reg2grb2.f mode change 100755 => 100644 sorc/reg2grb2.fd/regdiag.f delete mode 100644 sorc/regrid_nemsio.fd/Makefile delete mode 100644 sorc/regrid_nemsio.fd/Makefile.dependency delete mode 100644 sorc/supvit.fd/makefile mode change 100755 => 100644 sorc/supvit.fd/supvit_modules.f mode change 100755 => 100644 sorc/syndat_getjtbul.fd/getjtbul.f delete mode 100755 sorc/syndat_getjtbul.fd/makefile delete mode 100755 sorc/syndat_maksynrc.fd/makefile mode change 100755 => 100644 sorc/syndat_maksynrc.fd/maksynrc.f delete mode 100755 sorc/syndat_qctropcy.fd/makefile mode change 100755 => 100644 sorc/syndat_qctropcy.fd/qctropcy.f delete mode 100755 sorc/tave.fd/makefile mode change 100755 => 100644 sorc/tave.fd/tave.f delete mode 100755 sorc/tocsbufr.fd/makefile_module mode change 100755 => 100644 sorc/tocsbufr.fd/tocsbufr.f delete mode 100755 sorc/vint.fd/makefile mode change 100755 => 100644 sorc/vint.fd/vint.f delete mode 100755 util/modulefiles/gfs_util.wcoss_dell_p3 delete mode 100755 util/sorc/mkgfsawps.fd/makefile.wcoss_cray delete mode 100755 util/sorc/mkgfsawps.fd/makefile.wcoss_dell_p3 delete mode 100755 util/sorc/rdbfmsua.fd/makefile.wcoss_cray delete mode 100755 util/sorc/rdbfmsua.fd/makefile.wcoss_dell_p3 delete mode 100644 workflow/hosts/wcoss_dell_p3.yaml diff --git a/docs/doxygen/compile b/docs/doxygen/compile index 226f267ac9..1273edab98 100755 --- a/docs/doxygen/compile +++ b/docs/doxygen/compile @@ -1,20 +1,22 @@ -#!/bin/ksh -x +#!/bin/bash -machine=${1:-${machine:-"WCOSS_C"}} +set -ex + +machine=${1:-${machine:-"HERA"}} machine=$(echo $machine | tr '[a-z]' '[A-Z]') if [ $machine = "HERA" ]; then doxygen=/usr/bin/doxygen -elif [ $machine = "WCOSS_C" ]; then - doxygen=/gpfs/hps3/emc/hwrf/noscrub/soft/doxygen-1.8.10/bin/doxygen -elif [ $machine = "WCOSS" ]; then - doxygen=/hwrf/noscrub/soft/doxygen-1.8.10/bin/doxygen +elif [ $machine = "ORION" ]; then + doxygen=/bin/doxygen elif [ $machine = "JET" ]; then doxygen=/contrib/doxygen/1.8.10/bin/doxygen else - echo "machine $machine is unrecognized, ABORT!" - echo "try system doxygen" + echo "machine $machine is unrecognized!" + echo "trying system doxygen" doxygen=$(which doxygen) + rc=$? + [[ $rc -ne 0 ]] && (echo "doxygen not found, ABORT!"; exit 1) fi $doxygen diff --git a/docs/doxygen/mainpage.h b/docs/doxygen/mainpage.h index ac26cd861c..40e8e6f946 100644 --- a/docs/doxygen/mainpage.h +++ b/docs/doxygen/mainpage.h @@ -21,7 +21,7 @@ This is a very much a work in progress and any issues should be reported back an To setup an experiment, a python script \c setup_expt.py (located in \c fv3gfs/ush) can be used: $> setup_expt.py -h - usage: setup_expt.py [-h] [--machine {HERA,WCOSS_C}] --pslot PSLOT + usage: setup_expt.py [-h] --pslot PSLOT [--configdir CONFIGDIR] [--idate IDATE] [--icsdir ICSDIR] [--resdet RESDET] [--resens RESENS] [--comrot COMROT] [--expdir EXPDIR] [--nens NENS] [--cdump CDUMP] @@ -32,8 +32,6 @@ To setup an experiment, a python script \c setup_expt.py (located in \ optional arguments: -h, --help show this help message and exit - --machine machine name - (default: WCOSS_C) --pslot parallel experiment name [REQUIRED] (default: None) --configdir full path to directory containing the config files @@ -58,8 +56,8 @@ To setup an experiment, a python script \c setup_expt.py (located in \ The above script creates directories \c EXPDIR and \c COMROT. It will make links for initial conditions from a location provided via the \c --icsdir argument for a chosen resolution for the control \c --resdet and the ensemble \c --resens. Experiment name is controlled by the input argument \c --pslot. The script will ask user input in case any of the directories already exist. It will copy experiment configuration files into the \c EXPDIR from \c CONFIGDIR. Sample initial conditions for a few resolutions are available at:
-Theia: /scratch4/NCEPDEV/da/noscrub/Rahul.Mahajan/ICS
-WCOSS Cray: /gpfs/hps/emc/da/noscrub/Rahul.Mahajan/ICS +Hera: TODO: /path/here/for/initial/conditions
+Orion: TODO: /path/here/for/initial/conditions
Next step is for the user to go through the individual config files (atleast \c config.base) and customize the experiment configuration based on user needs. A stock configuration will be provided at a later stage, but it is imperative that the user understand the steps involved in the system. diff --git a/docs/note_fixfield.txt b/docs/note_fixfield.txt index 292947353c..3b22de5e13 100644 --- a/docs/note_fixfield.txt +++ b/docs/note_fixfield.txt @@ -2,9 +2,8 @@ For EMC, the fix fields for running the model are not included in git repository. They are saved locally on all platforms -Venus/Mars: /gpfs/dell2/emc/modeling/noscrub/emc.glopara/git/fv3gfs/fix -Surge/Luna: /gpfs/hps3/emc/global/noscrub/emc.glopara/git/fv3gfs/fix -Hera: /scratch1/NCEPDEV/global/glopara/fix +Hera: /scratch1/NCEPDEV/global/glopara/fix +Orion: /work/noaa/global/glopara/fix ------------------------------------------------------------------------------ 09/28/2018 diff --git a/driver/gdas/para_config.gdas_analysis_high b/driver/gdas/para_config.gdas_analysis_high deleted file mode 100644 index 053c201541..0000000000 --- a/driver/gdas/para_config.gdas_analysis_high +++ /dev/null @@ -1,33 +0,0 @@ -# Set variables -export NWROOT=$NWTEST -export COM_IN=/gpfs/hps/ptmp/emc.glopara/com2/gfs/para -export COM_OUT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/test -export SENDDBN=YES -export SAVEGES=YES -export SENDCOM=YES -export GETGES_NWG=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export gesenvir=para -export GETGES_COM=/gpfs/hps/ptmp/emc.glopara/com2 -export GESROOT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export KEEPDATA=NO - - -# Set paths -export HOMEgsm=$NWTEST/global_shared.${global_shared_ver} - - -# Set fix files -if [ $CDATE -ge 2017030800 -a $CDATE -lt 2017031312 ] ; then - SATINFO=/gpfs/hps/emc/global/noscrub/emc.glopara/para_gfs/prnemsrn/global_satinfo.txt -fi - - -# Set scripts -GETGESSH=$HOMEgsm/ush/getges.sh - - -# Set executables - - -# Dev para jobs -export ERRSCRIPT='eval [[ $err = 0 ]]' diff --git a/driver/gdas/para_config.gdas_enkf_fcst b/driver/gdas/para_config.gdas_enkf_fcst deleted file mode 100755 index 34d6551cb9..0000000000 --- a/driver/gdas/para_config.gdas_enkf_fcst +++ /dev/null @@ -1,66 +0,0 @@ -# Set variables -export NWROOT=$NWTEST -export COM_IN=/gpfs/hps/ptmp/emc.glopara/com2/gfs/para -export COM_OUT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/test -export SENDDBN=YES -export SAVEGES=NO -export GETGES_NWG=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export gesenvir=para -export GETGES_COM=/gpfs/hps/ptmp/emc.glopara/com2 -export GESROOT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export KEEPDATA=NO - - -# Set paths -export HOMEgdas=$NWTEST/gdas.${gdas_ver} -export HOMEgsm=$NWTEST/global_shared.${global_shared_ver} - - -# Set fix files - - -# Set scripts - - -# Set executables - - -# Dev para jobs -export ERRSCRIPT='eval [[ $err = 0 ]]' - - -# Set beginning and ending ensemble member on EnKF group. -if [[ $ENSGRP -eq 1 ]]; then - export ENSBEG=1 - export ENSEND=8 -elif [[ $ENSGRP -eq 2 ]]; then - export ENSBEG=9 - export ENSEND=16 -elif [[ $ENSGRP -eq 3 ]]; then - export ENSBEG=17 - export ENSEND=24 -elif [[ $ENSGRP -eq 4 ]]; then - export ENSBEG=25 - export ENSEND=32 -elif [[ $ENSGRP -eq 5 ]]; then - export ENSBEG=33 - export ENSEND=40 -elif [[ $ENSGRP -eq 6 ]]; then - export ENSBEG=41 - export ENSEND=48 -elif [[ $ENSGRP -eq 7 ]]; then - export ENSBEG=49 - export ENSEND=56 -elif [[ $ENSGRP -eq 8 ]]; then - export ENSBEG=57 - export ENSEND=64 -elif [[ $ENSGRP -eq 9 ]]; then - export ENSBEG=65 - export ENSEND=72 -elif [[ $ENSGRP -eq 10 ]]; then - export ENSBEG=73 - export ENSEND=80 -else - echo "***ERROR*** INVALID ENSGRP=$ENSGRP" - exit -fi diff --git a/driver/gdas/para_config.gdas_enkf_inflate_recenter b/driver/gdas/para_config.gdas_enkf_inflate_recenter deleted file mode 100755 index fab9c4a333..0000000000 --- a/driver/gdas/para_config.gdas_enkf_inflate_recenter +++ /dev/null @@ -1,28 +0,0 @@ -# Set variables -export NWROOT=$NWTEST -export COM_IN=/gpfs/hps/ptmp/emc.glopara/com2/gfs/para -export COM_OUT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/test -export SENDDBN=YES -export SAVEGES=NO -export SENDCOM=YES -export GETGES_NWG=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export gesenvir=para -export GETGES_COM=/gpfs/hps/ptmp/emc.glopara/com2 -export GESROOT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export KEEPDATA=NO - - -# Set paths - - -# Set fix files - - -# Set scripts - - -# Set executables - - -# Dev para jobs -export ERRSCRIPT='eval [[ $err = 0 ]]' diff --git a/driver/gdas/para_config.gdas_enkf_innovate_obs b/driver/gdas/para_config.gdas_enkf_innovate_obs deleted file mode 100755 index a2827e1dfb..0000000000 --- a/driver/gdas/para_config.gdas_enkf_innovate_obs +++ /dev/null @@ -1,100 +0,0 @@ -# Set variables -export NWROOT=$NWTEST -export COM_IN=/gpfs/hps/ptmp/emc.glopara/com2/gfs/para -export COM_OUT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/test -export SENDDBN=YES -export SAVEGES=NO -export SENDCOM=NO -export GETGES_NWG=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export gesenvir=para -export GETGES_COM=/gpfs/hps/ptmp/emc.glopara/com2 -export GESROOT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export KEEPDATA=NO - - -# Set paths -export HOMEgsm=$NWTEST/global_shared.${global_shared_ver} - - -# Set fix files -if [ $CDATE -ge 2017030800 -a $CDATE -lt 2017031312 ] ; then - SATINFO=/gpfs/hps/emc/global/noscrub/emc.glopara/para_gfs/prnemsrn/global_satinfo.txt -fi - - -# Set scripts -GETGESSH=$HOMEgsm/ush/getges.sh - - -# Set executables - - -# Dev para jobs -export ERRSCRIPT='eval [[ $err = 0 ]]' - - -# Set beginning and ending ensemble member on EnKF group. -if [[ $ENSGRP -eq 1 ]]; then - export ENSBEG=1 - export ENSEND=4 -elif [[ $ENSGRP -eq 2 ]]; then - export ENSBEG=5 - export ENSEND=8 -elif [[ $ENSGRP -eq 3 ]]; then - export ENSBEG=9 - export ENSEND=12 -elif [[ $ENSGRP -eq 4 ]]; then - export ENSBEG=13 - export ENSEND=16 -elif [[ $ENSGRP -eq 5 ]]; then - export ENSBEG=17 - export ENSEND=20 -elif [[ $ENSGRP -eq 6 ]]; then - export ENSBEG=21 - export ENSEND=24 -elif [[ $ENSGRP -eq 7 ]]; then - export ENSBEG=25 - export ENSEND=28 -elif [[ $ENSGRP -eq 8 ]]; then - export ENSBEG=29 - export ENSEND=32 -elif [[ $ENSGRP -eq 9 ]]; then - export ENSBEG=33 - export ENSEND=36 -elif [[ $ENSGRP -eq 10 ]]; then - export ENSBEG=37 - export ENSEND=40 -elif [[ $ENSGRP -eq 11 ]]; then - export ENSBEG=41 - export ENSEND=44 -elif [[ $ENSGRP -eq 12 ]]; then - export ENSBEG=45 - export ENSEND=48 -elif [[ $ENSGRP -eq 13 ]]; then - export ENSBEG=49 - export ENSEND=52 -elif [[ $ENSGRP -eq 14 ]]; then - export ENSBEG=53 - export ENSEND=56 -elif [[ $ENSGRP -eq 15 ]]; then - export ENSBEG=57 - export ENSEND=60 -elif [[ $ENSGRP -eq 16 ]]; then - export ENSBEG=61 - export ENSEND=64 -elif [[ $ENSGRP -eq 17 ]]; then - export ENSBEG=65 - export ENSEND=68 -elif [[ $ENSGRP -eq 18 ]]; then - export ENSBEG=69 - export ENSEND=72 -elif [[ $ENSGRP -eq 19 ]]; then - export ENSBEG=73 - export ENSEND=76 -elif [[ $ENSGRP -eq 20 ]]; then - export ENSBEG=77 - export ENSEND=80 -else - echo "***ERROR*** INVALID ENSGRP=$ENSGRP" - exit -fi diff --git a/driver/gdas/para_config.gdas_enkf_post b/driver/gdas/para_config.gdas_enkf_post deleted file mode 100755 index 58d68cc3e1..0000000000 --- a/driver/gdas/para_config.gdas_enkf_post +++ /dev/null @@ -1,29 +0,0 @@ -# Set variables -export NWROOT=$NWTEST -export COM_IN=/gpfs/hps/ptmp/emc.glopara/com2/gfs/para -export COM_OUT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/test -export SENDDBN=YES -export SAVEGES=NO -export SENDCOM=YES -export GETGES_NWG=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export gesenvir=para -export GETGES_COM=/gpfs/hps/ptmp/emc.glopara/com2 -export GESROOT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export KEEPDATA=NO - - -# Set paths -export HOMEgsm=$NWTEST/global_shared.${global_shared_ver} - - -# Set fix files - - -# Set scripts - - -# Set executables - - -# Dev para jobs -export ERRSCRIPT='eval [[ $err = 0 ]]' diff --git a/driver/gdas/para_config.gdas_enkf_select_obs b/driver/gdas/para_config.gdas_enkf_select_obs deleted file mode 100755 index 04121afe92..0000000000 --- a/driver/gdas/para_config.gdas_enkf_select_obs +++ /dev/null @@ -1,33 +0,0 @@ -# Set variables -export NWROOT=$NWTEST -export COM_IN=/gpfs/hps/ptmp/emc.glopara/com2/gfs/para -export COM_OUT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/test -export SENDDBN=YES -export SAVEGES=NO -export SENDCOM=NO -export GETGES_NWG=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export gesenvir=para -export GETGES_COM=/gpfs/hps/ptmp/emc.glopara/com2 -export GESROOT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export KEEPDATA=NO - - -# Set paths -export HOMEgsm=$NWTEST/global_shared.${global_shared_ver} - - -# Set fix files -if [ $CDATE -ge 2017030800 -a $CDATE -lt 2017031312 ] ; then - SATINFO=/gpfs/hps/emc/global/noscrub/emc.glopara/para_gfs/prnemsrn/global_satinfo.txt -fi - - -# Set scripts -GETGESSH=$HOMEgsm/ush/getges.sh - - -# Set executables - - -# Dev para jobs -export ERRSCRIPT='eval [[ $err = 0 ]]' diff --git a/driver/gdas/para_config.gdas_enkf_update b/driver/gdas/para_config.gdas_enkf_update deleted file mode 100755 index 03b5385f8d..0000000000 --- a/driver/gdas/para_config.gdas_enkf_update +++ /dev/null @@ -1,33 +0,0 @@ -# Set variables -export NWROOT=$NWTEST -export COM_IN=/gpfs/hps/ptmp/emc.glopara/com2/gfs/para -export COM_OUT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/test -export SENDDBN=YES -export SAVEGES=NO -export SENDCOM=YES -export GETGES_NWG=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export gesenvir=para -export GETGES_COM=/gpfs/hps/ptmp/emc.glopara/com2 -export GESROOT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export KEEPDATA=NO - - -# Set paths -export HOMEgsm=$NWTEST/global_shared.${global_shared_ver} - - -# Set fix files -if [ $CDATE -ge 2017030800 -a $CDATE -lt 2017031312 ] ; then - SATINFO=/gpfs/hps/emc/global/noscrub/emc.glopara/para_gfs/prnemsrn/global_satinfo.txt -fi - - -# Set scripts -export GETGESSH=$HOMEgsm/ush/getges.sh - - -# Set executables - - -# Dev para jobs -export ERRSCRIPT='eval [[ $err = 0 ]]' diff --git a/driver/gdas/para_config.gdas_forecast_high b/driver/gdas/para_config.gdas_forecast_high deleted file mode 100755 index 4a25ae632a..0000000000 --- a/driver/gdas/para_config.gdas_forecast_high +++ /dev/null @@ -1,32 +0,0 @@ -# Set variables -export NWROOT=$NWTEST -export COM_IN=/gpfs/hps/ptmp/emc.glopara/com2/gfs/para -export COM_OUT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/test -export SENDDBN=YES -export SAVEGES=NO -export GETGES_NWG=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export gesenvir=para -export GETGES_COM=/gpfs/hps/ptmp/emc.glopara/com2 -export GESROOT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export gespath=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export KEEPDATA=NO - - -# Set paths -export HOMEgdas=$NWTEST/gdas.${gdas_ver} -export HOMEgsm=$NWTEST/global_shared.${global_shared_ver} - - -# Set fix files - - -# Set scripts - - -# Set executables - - -# Dev para jobs -export ERRSCRIPT='eval [[ $err = 0 ]]' - - diff --git a/driver/gdas/para_config.gdas_gldas b/driver/gdas/para_config.gdas_gldas deleted file mode 100755 index a766601e22..0000000000 --- a/driver/gdas/para_config.gdas_gldas +++ /dev/null @@ -1,63 +0,0 @@ -################################################################## -# This parameter file set up the environment variables used in -# the J-Job scripts to run the gldas land analsis system -################################################################## -# module load for untility in run_all of lsf directory - -export GDATE=${PDY:-20191025} -export cyc=${cyc:-00} -export model=${model:-noah} -export QUEUE=debug -export PROJECT_CODE=NLDAS-T2O - -#export workdir=/gpfs/dell2/emc/modeling/noscrub/$LOGNAME/global-workflow -export workdir=${workdir:-$HOMEgfs} - -export gldas_ver=${gldas_ver:-v2.3.0} -export HOMEgldas=${HOMEgldas:-$workdir/sorc/gldas.fd} -export FIXgldas=${FIXgldas:-$HOMEgldas/fix} -export PARMgldas=${PARMgldas:-$HOMEgldas/parm} -export EXECgldas=${EXECgldas:-$HOMEgldas/exec} -export USHgldas=${USHgldas:-$HOMEgldas/ush} - -#export FIXgldas=/gpfs/dell2/emc/modeling/noscrub/emc.glopara/git/fv3gfs/fix/fix_gldas -#export topodir=/gpfs/dell2/emc/modeling/noscrub/emc.glopara/git/fv3gfs/fix/fix_fv3_gmted2010/C768/ -export FIXgldas=${FIXgldas:-$workdir/fix/fix_gldas} -export topodir=${topodir:-$workdir/fix/fix_fv3_gmted2010/C768} - - -export COM_IN=${COM_IN:-$workdir/com/gldas.${gldas_ver}} -export COM_OUT=${COM_OUT:-$COM_IN} - -# set up com and decom directory used -# GFS OPS -if [ ${model} == 'noah' ]; then -export COMROOT=/gpfs/dell1/nco/ops/com -export DCOMROOT=/gpfs/dell1/nco/ops/dcom -fi - -##GFS Retrospective test -#if [ ${model} == 'noah' ]; then -#export COMROOT=/gpfs/dell2/emc/retros/noscrub/Youlong.Xia/com -#export DCOMROOT=/gpfs/dell2/emc/retros/noscrub/Youlong.Xia/com -#fi - -# gfsv16 systhesis test -if [ ${model} == 'noahmp' ]; then -export COMROOT=/gpfs/dell2/emc/retros/noscrub/Youlong.Xia/com -export DCOMROOT=/gpfs/dell2/emc/retros/noscrub/Youlong.Xia/com -fi - -export COMINgdas=${COMINgdas:-${COMROOT}/gfs/prod} -export DCOMIN=${DCOM_IN:-${DCOMROOT}/prod} - - -#export DATA_IN=/gpfs/dell2/ptmp/$LOGNAME/tmpnwprd -#export jlogfile=$DATA_IN/jlogfile -export DATA=/gpfs/dell2/ptmp/$LOGNAME/tmpnwprd -export jlogfile=$DATA/jlogfile - -export SENDCOM=${SENDCOM:-YES} -export SENDECF=${SENDECF:-NO} -export SENDDBN=${SENDDBN:-NO} - diff --git a/driver/gdas/run_JGDAS_NCEPPOST.sh b/driver/gdas/run_JGDAS_NCEPPOST.sh deleted file mode 100755 index 374ba25b50..0000000000 --- a/driver/gdas/run_JGDAS_NCEPPOST.sh +++ /dev/null @@ -1,110 +0,0 @@ -#!/bin/sh - -#BSUB -o out_gdas_nemsio_p25_para_mpiio.%J -#BSUB -e err_gdas_nemsio_p25_para_mpiio.%J -#BSUB -J NEMSPOST -#BSUB -extsched 'CRAYLINUX[]' -R '1*{select[craylinux && !vnode]} + 96*{select[craylinux && vnode]span[ptile=24] cu[type=cabinet]}' -#BSUB -W 00:40 -#BSUB -q dev -#BSUB -P GFS-T2O -#BSUB -M 1000 -#BSUB -cwd /gpfs/hps/emc/global/noscrub/Hui-Ya.Chuang/nems_sample_output_T1534 - -set -x - -# specify user's own post working directory for testing -export svndir=/gpfs/hps/emc/global/noscrub/Hui-Ya.Chuang/post_trunk -export MP_LABELIO=yes -export OMP_NUM_THREADS=1 -export KMP_AFFINITY=disabled -export OMP_STACKSIZE=2048M -export MP_LABELIO=yes -export MP_STDOUTMODE=ordered - -############################################ -# Loading module -############################################ -. $MODULESHOME/init/ksh -module load PrgEnv-intel ESMF-intel-haswell/3_1_0rp5 cfp-intel-sandybridge iobuf craype-hugepages2M craype-haswell -#module load cfp-intel-sandybridge/1.1.0 -module use /gpfs/hps/nco/ops/nwprod/modulefiles -module load prod_envir/1.1.0 -module load prod_util/1.0.4 -module load grib_util/1.0.3 - -# specify PDY (the cycle start yyyymmdd) and cycle -export PDY=20170212 -export cyc=00 -export cycle=t${cyc}z - - -# specify the directory environment for executable, it's either para or prod -export envir=prod - -# set up running dir - -export user=$(whoami) -export DATA=/gpfs/hps/ptmp/${user}/gdas.${PDY}${cyc}_nemsio_mpiio -mkdir -p $DATA -cd $DATA -rm -f ${DATA}/* - -#################################### -# Specify RUN Name and model -#################################### -export NET=gfs -#export RUN=gdas - -#################################### -# Determine Job Output Name on System -#################################### -#export pgmout="OUTPUT.${pid}" -#export pgmerr=errfile - -#################################### -# SENDSMS - Flag Events on SMS -# SENDCOM - Copy Files From TMPDIR to $COMOUT -# SENDDBN - Issue DBNet Client Calls -# RERUN - Rerun posts from beginning (default no) -# VERBOSE - Specify Verbose Output in global_postgp.sh -#################################### -export SAVEGES=NO -export SENDSMS=NO -export SENDCOM=YES -export SENDDBN=NO -export RERUN=NO -export VERBOSE=YES - -export HOMEglobal=${svndir} -export HOMEgfs=${svndir} -export HOMEgdas=${svndir} - -############################################## -# Define COM directories -############################################## -export COMIN=/gpfs/hps/emc/global/noscrub/Hui-Ya.Chuang/para_look_alike/gdas.${PDY} -# specify my own COMOUT dir to mimic operations -export COMOUT=$DATA -mkdir -p $COMOUT - -date - -#export OUTTYP=4 -# need to set FIXglobal to global share superstructure if testing post in non -# super structure environement -export FIXglobal=/gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/q3fy17_final/global_shared.v14.1.0/fix -export APRUN="aprun -j 1 -n24 -N8 -d1 -cc depth" -export nemsioget=/gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/q3fy17_final/global_shared.v14.1.0/exec/nemsio_get - -export KEEPDATA=YES -#export POSTGRB2TBL=$HOMEglobal/parm/params_grib2_tbl_new -$HOMEgfs/jobs/JGDAS_NCEPPOST - -############################################################# - -date - -echo $? - - - diff --git a/driver/gdas/test_emcsfc.sh b/driver/gdas/test_emcsfc.sh deleted file mode 100755 index 5a89e75202..0000000000 --- a/driver/gdas/test_emcsfc.sh +++ /dev/null @@ -1,46 +0,0 @@ -#!/bin/sh - -#-------------------------------------------------------------- -# Run the JGDAS_EMCSFC_SFC_PREP j-job on wcoss cray -# -# Invoke as follows: -# 'cat $script | bsub' -#-------------------------------------------------------------- - -#BSUB -oo emcsfc.log -#BSUB -eo emcsfc.log -#BSUB -q dev_shared -#BSUB -R rusage[mem=2000] -#BSUB -J emcsfc -#BSUB -P GFS-T2O -#BSUB -cwd . -#BSUB -W 0:03 - -set -x - -export cyc="00" -export job=emcsfc_sfc_prep_${cyc} -export KEEPDATA="YES" -export SENDECF="NO" -export SENDCOM="YES" -export RUN_ENVIR="nco" - -export DATA="/gpfs/hps/stmp/$LOGNAME/tmpnwprd/${job}" -export jlogfile="/gpfs/hps/stmp/$LOGNAME/jlogfile" - -module load prod_envir/1.1.0 - -export envir="prod" -export COMROOT="/gpfs/hps/stmp/${LOGNAME}"${COMROOT} - -export NWROOT="/gpfs/hps/emc/global/noscrub/George.Gayno/q3fy17_final" -export global_shared_ver="v14.1.0" - -module load grib_util/1.0.3 -module load prod_util/1.0.5 - -export jobid="LLgdas_emcsfc_sfc_prep" -export gdas_ver="v14.1.0" -$NWROOT/gdas.${gdas_ver}/jobs/JGDAS_EMCSFC_SFC_PREP - -exit 0 diff --git a/driver/gdas/test_exgdas_tropc.sh b/driver/gdas/test_exgdas_tropc.sh deleted file mode 100755 index 351c29c81b..0000000000 --- a/driver/gdas/test_exgdas_tropc.sh +++ /dev/null @@ -1,126 +0,0 @@ -#BSUB -J JGDAS_TROPC_TEST_06 -#BSUB -o /ptmpp2/Qingfu.Liu/com2/jgdas_tropc_06.o%J -#BSUB -e /ptmpp2/Qingfu.Liu/com2/jgdas_tropc_06.o%J -#BSUB -L /bin/sh -#BSUB -q debug -#BSUB -W 00:30 -#BSUB -cwd /ptmpp2/Qingfu.Liu/com2 -#BSUB -P GFS-T2O -##BSUB -R rusage[mem=5000] -##BSUB -R affinity[core] -#BSUB -n 1 -#BSUB -R span[ptile=1] -#BSUB -R affinity[cpu(32):distribute=balance] -#BSUB -a poe -#BSUB -x - -#%include -#%include -. /usrx/local/Modules/default/init/ksh -module use /nwprod2/modulefiles -module load grib_util/v1.0.1 -module load prod_util/v1.0.1 -module load ibmpe ics lsf -#module load grib_util/v1.0.1 -module use /nwpara2/modulefiles -module load util_shared/v1.0.3 - -set -x - -export OMP_NUM_THREADS=32 - - export MP_LABELIO=yes - export MP_MPILIB=mpich2 - export MP_EUILIB=us - export MP_TASK_AFFINITY=cpu:32 - export MP_USE_BULK_XFER=yes - export MP_STDOUTMODE=unordered - export MPICH_ALLTOALL_THROTTLE=0 - export MP_COREFILE_FORMAT=core.txt - export OMP_STACKSIZE=3G - export MP_COMPILER=intel - -#export envir=dev2 -export envir=prod -export cyc=06 -export job=jgdas_tropc_${cyc} -export RUN_ENVIR=test -#export NWROOT=/nwprod2 -export NWROOT=/global/save/Qingfu.Liu - -#export DATAROOT=/tmpnwprd_p2 -export DATAROOT=/ptmpp2/Qingfu.Liu - -#export COMROOT=/com2 -export COMROOT=/ptmpp2/Qingfu.Liu/com2 -export COMDATEROOT=/com2 -export DATA_DUMPJB=/ptmpp2/Qingfu.Liu/com2/111 - -#export DCOMROOT=/dcom - -export COMROOTp1=/com -export KEEPDATA=YES -export CLEAN=NO -export cycle=t00z - -#which setpdy.sh -setpdy.sh -. PDY - -#export PDY=20160216 - -export COMIN=/com/gfs/prod/gdas.${PDY} - -#export utilscript=/nwprod2/util/ush -#export utilexec=/nwprod2/util/exec -#export utilities=/nwprod2/util/ush -#export HOMEutil=/nwprod2/util -#export HOMEgfs=/nwprod2/util -#export HOMEgraph=/nwprod2/util - -export utilscript=$NWROOT/util/ush -export utilexec=$NWROOT/util/exec -export utilities=$NWROOT/util/ush -export HOMEutil=$NWROOT/util -#export HOMEgfs=$NWROOT/util -export HOMEgraph=$NWROOT/util - -# versions file for tracker $tracker.ver -VERSION_FILE=${NWROOT}/versions/tropcy_qc_reloc.ver -if [ -f $VERSION_FILE ]; then - . $VERSION_FILE -else - ecflow_client --abort - exit -fi - -#export shared_global_home=$NWROOT/shared_nco_20160129 -export HOMEgfs=$NWROOT/gfs.v13.0.0 -#export HOMEgdas=$NWROOT/gdas.v13.0.0 -export HOMEgdas=$NWROOT/gdas_nco_20160129 - -# CALL executable job script here -#export HOMERELO=${HOMEgdas} -#export HOMESYND=${HOMERELO} -#export envir_getges=prod -$HOMEgdas/jobs/JGDAS_TROPC - -if [ $? -ne 0 ]; then -# ecflow_client --abort - exit -fi - -#%include -#%manual -###################################################################### -#PURPOSE: Executes the job that creates GFS TC track forecasts -###################################################################### - -###################################################################### -# Job specific troubleshooting instructions: -# see generic troubleshoot manual page -# -###################################################################### - -# include manual page below -#%end diff --git a/driver/gdas/test_gdas_analysis_high.sh b/driver/gdas/test_gdas_analysis_high.sh deleted file mode 100755 index b57727c2d2..0000000000 --- a/driver/gdas/test_gdas_analysis_high.sh +++ /dev/null @@ -1,90 +0,0 @@ -#!/bin/sh --login - -#BSUB -L /bin/sh -#BSUB -P GFS-T2O -#BSUB -o gdas_analysis_high.o%J -#BSUB -e gdas_analysis_high.o%J -#BSUB -J gdas_analysis_high -#BSUB -q devonprod -#BSUB -M 3072 -#BSUB -extsched 'CRAYLINUX[]' -#BSUB -W 01:00 -#BSUB -cwd /gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work/gdas.v14.1.0/driver - -set -x - -export NODES=240 -export ntasks=480 -export ptile=2 -export threads=12 - -export CDATE=2017030806 - - -############################################################# -# Specify whether the run is production or development -############################################################# -export RUN_ENVIR=para -export PDY=$(echo $CDATE | cut -c1-8) -export cyc=$(echo $CDATE | cut -c9-10) -export job=gdas_analysis_high_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=para -export DATAROOT=/gpfs/hps/stmp/$LOGNAME/test - - -############################################################# -# Specify versions -############################################################# -export gdas_ver=v14.1.0 -export global_shared_ver=v14.1.0 -export crtm_ver=2.2.4 -export prod_envir_ver=1.1.0 -export grib_util_ver=1.0.3 -export prod_util_ver=1.0.6 -export util_shared_ver=1.0.3 - - -############################################################# -# Load modules -############################################################# -. $MODULESHOME/init/sh -module load crtm-intel/${crtm_ver} -module load prod_envir/$prod_envir_ver -module load grib_util/$grib_util_ver -module load prod_util/$prod_util_ver -module load util_shared/$util_shared_ver -module load cfp-intel-sandybridge - -module list - - -############################################################# -# WCOSS_C environment settings -############################################################# -export KMP_AFFINITY=disabled -export OMP_STACKSIZE=2G -export OMP_NUM_THREADS=$threads -export FORT_BUFFERED=true - -export OMP_NUM_THREADS_CY=24 -export NTHREADS=$OMP_NUM_THREADS_CY -export NTHREADS_GSI=$threads -export NTHSTACK=1024000000 - - -############################################################# -# Set user specific variables -############################################################# -export NWTEST=/gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work -export PARA_CONFIG=$NWTEST/gdas.${gdas_ver}/driver/para_config.gdas_analysis_high -export JOBGLOBAL=$NWTEST/gdas.${gdas_ver}/jobs - - -############################################################# -# Execute job -############################################################# -$JOBGLOBAL/JGDAS_ANALYSIS_HIGH - -exit diff --git a/driver/gdas/test_gdas_enkf_fcst.sh b/driver/gdas/test_gdas_enkf_fcst.sh deleted file mode 100755 index 097cb2711c..0000000000 --- a/driver/gdas/test_gdas_enkf_fcst.sh +++ /dev/null @@ -1,89 +0,0 @@ -#!/bin/sh --login - -#BSUB -L /bin/sh -#BSUB -P GFS-T2O -#BSUB -o gdas_enkf_fcst.o%J -#BSUB -e gdas_enkf_fcst.o%J -#BSUB -J gdas_enkf_fcst -#BSUB -q devonprod -#BSUB -M 3072 -#BSUB -extsched 'CRAYLINUX[]' -#BSUB -W 01:00 -#BSUB -cwd /gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work/gdas.v14.1.0/driver - -set -x - -# 20 nodes = 18 compute nodes + 2 i/o nodes -# set WRT_GROUP=2 for 2 i/o nodes (see ../parm/gdas_enkf_fcst.parm) -# set WRTPE_PER_GROUP=4 to match ptile -export NODES=20 -export ntasks=80 -export ptile=4 -export threads=6 - -export CDATE=2017011900 -export ENSGRP=1 - -export grp=$ENSGRP -if [[ $grp -lt 10 ]]; then - export grp=0$grp -fi - - -############################################################# -# Specify whether the run is production or development -############################################################# -export RUN_ENVIR=para -export PDY=$(echo $CDATE | cut -c1-8) -export cyc=$(echo $CDATE | cut -c9-10) -export job=gdas_enkf_fcst_grp${grp}_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=para -export DATAROOT=/gpfs/hps/stmp/$LOGNAME/test - - -############################################################# -# Specify versions -############################################################# -export gdas_ver=v14.1.0 -export global_shared_ver=v14.1.0 -export grib_util_ver=1.0.3 -export prod_util_ver=1.0.6 - - -############################################################# -# Load modules -############################################################# -. $MODULESHOME/init/sh -module load grib_util/$grib_util_ver -module load prod_util/$prod_util_ver -module load craype-hugepages4M - -module list - - -############################################################# -# WCOSS_C environment settings -############################################################# -export KMP_AFFINITY=disabled -export OMP_STACKSIZE=2G -export OMP_NUM_THREADS=$threads -export FORT_BUFFERED=true -export NTHREADS_EFCS=$threads - - -############################################################# -# Set user specific variables -############################################################# -export NWTEST=/gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work -export PARA_CONFIG=$NWTEST/gdas.${gdas_ver}/driver/para_config.gdas_enkf_fcst -export JOBGLOBAL=$NWTEST/gdas.${gdas_ver}/jobs - - -############################################################# -# Execute job -############################################################# -$JOBGLOBAL/JGDAS_ENKF_FCST - -exit diff --git a/driver/gdas/test_gdas_enkf_inflate_recenter.sh b/driver/gdas/test_gdas_enkf_inflate_recenter.sh deleted file mode 100755 index d188ac80e2..0000000000 --- a/driver/gdas/test_gdas_enkf_inflate_recenter.sh +++ /dev/null @@ -1,83 +0,0 @@ -#!/bin/sh --login - -#BSUB -L /bin/sh -#BSUB -P GFS-T2O -#BSUB -o gdas_enkf_inflate_recenter.o%J -#BSUB -e gdas_enkf_inflate_recenter.o%J -#BSUB -J gdas_enkf_inflate_recenter -#BSUB -q devonprod -#BSUB -M 3072 -#BSUB -extsched 'CRAYLINUX[]' -#BSUB -W 01:00 -#BSUB -cwd /gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work/gdas.v14.1.0/driver - -set -x -ulimit -s unlimited -ulimit -a - -export NODES=14 -export ntasks=80 -export ptile=6 -export threads=1 - -export CDATE=2017011900 - - -############################################################# -# Specify whether the run is production or development -############################################################# -export RUN_ENVIR=para -export PDY=$(echo $CDATE | cut -c1-8) -export cyc=$(echo $CDATE | cut -c9-10) -export job=gdas_enkf_inflate_recenter_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=para -export DATAROOT=/gpfs/hps/stmp/$LOGNAME/test - - -############################################################# -# Specify versions -############################################################# -export gdas_ver=v14.1.0 -export global_shared_ver=v14.1.0 -export grib_util_ver=1.0.3 -export prod_util_ver=1.0.6 - - -############################################################# -# Load modules -############################################################# -. $MODULESHOME/init/sh -module load grib_util/$grib_util_ver -module load prod_util/$prod_util_ver -module load craype-hugepages2M - -module list - - -############################################################# -# WCOSS_C environment settings -############################################################# -export KMP_AFFINITY=disabled -export OMP_STACKSIZE=2G -export OMP_NUM_THREADS=$threads -export FORT_BUFFERED=true - -export NTHREADS_ENKF=$threads - - -############################################################# -# Set user specific variables -############################################################# -export NWTEST=/gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work -export PARA_CONFIG=$NWTEST/gdas.${gdas_ver}/driver/para_config.gdas_enkf_inflate_recenter -export JOBGLOBAL=$NWTEST/gdas.${gdas_ver}/jobs - - -############################################################# -# Execute job -############################################################# -$JOBGLOBAL/JGDAS_ENKF_INFLATE_RECENTER - -exit diff --git a/driver/gdas/test_gdas_enkf_innovate_obs.sh b/driver/gdas/test_gdas_enkf_innovate_obs.sh deleted file mode 100755 index 15313e809a..0000000000 --- a/driver/gdas/test_gdas_enkf_innovate_obs.sh +++ /dev/null @@ -1,96 +0,0 @@ -#!/bin/sh --login - -#BSUB -L /bin/sh -#BSUB -P GFS-T2O -#BSUB -o gdas_enkf_innovate_obs.o%J -#BSUB -e gdas_enkf_innovate_obs.o%J -#BSUB -J gdas_enkf_innovate_obs -#BSUB -q devonprod -#BSUB -M 3072 -#BSUB -extsched 'CRAYLINUX[]' -#BSUB -W 01:00 -#BSUB -cwd /gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work/gdas.v14.1.0/driver - -set -x - -export NODES=12 -export ntasks=144 -export ptile=12 -export threads=2 - -export CDATE=2017030806 -export ENSGRP=1 - -export grp=$ENSGRP -if [[ $grp -lt 20 ]]; then - export grp=0$grp -fi - - -############################################################# -# Specify whether the run is production or development -############################################################# -export RUN_ENVIR=para -export PDY=$(echo $CDATE | cut -c1-8) -export cyc=$(echo $CDATE | cut -c9-10) -export job=gdas_enkf_innovate_obs_grp${grp}_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=para -export DATAROOT=/gpfs/hps/stmp/$LOGNAME/test - - -############################################################# -# Specify versions -############################################################# -export gdas_ver=v14.1.0 -export global_shared_ver=v14.1.0 -export crtm_ver=2.2.4 -export prod_envir_ver=1.1.0 -export grib_util_ver=1.0.3 -export prod_util_ver=1.0.6 -export util_shared_ver=1.0.3 - - -############################################################# -# Load modules -############################################################# -. $MODULESHOME/init/sh -module load crtm-intel/${crtm_ver} -module load prod_envir/$prod_envir_ver -module load grib_util/$grib_util_ver -module load prod_util/$prod_util_ver -module load util_shared/$util_shared_ver -module load cfp-intel-sandybridge - -module list - - -############################################################# -# WCOSS_C environment settings -############################################################# -export KMP_AFFINITY=disabled -export OMP_STACKSIZE=2G -export OMP_NUM_THREADS=$threads -export FORT_BUFFERED=true - -export OMP_NUM_THREADS_CY=24 -export NTHREADS=$OMP_NUM_THREADS_CY -export NTHREADS_GSI=$threads -export NTHSTACK=1024000000 - - -############################################################# -# Set user specific variables -############################################################# -export NWTEST=/gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work -export PARA_CONFIG=$NWTEST/gdas.${gdas_ver}/driver/para_config.gdas_enkf_innovate_obs -export JOBGLOBAL=$NWTEST/gdas.${gdas_ver}/jobs - - -############################################################# -# Execute job -############################################################# -$JOBGLOBAL/JGDAS_ENKF_INNOVATE_OBS - -exit diff --git a/driver/gdas/test_gdas_enkf_post.sh b/driver/gdas/test_gdas_enkf_post.sh deleted file mode 100755 index 472e3f26ba..0000000000 --- a/driver/gdas/test_gdas_enkf_post.sh +++ /dev/null @@ -1,79 +0,0 @@ -#!/bin/sh --login - -#BSUB -L /bin/sh -#BSUB -P GFS-T2O -#BSUB -o gdas_enkf_post.o%J -#BSUB -e gdas_enkf_post.o%J -#BSUB -J gdas_enkf_post -#BSUB -q devonprod -#BSUB -M 3072 -#BSUB -extsched 'CRAYLINUX[]' -#BSUB -W 01:00 -#BSUB -cwd /gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work/gdas.v14.1.0/driver - -set -x - -export NODES=80 -export ntasks=80 -export ptile=1 -export threads=24 - -export CDATE=2017011900 - -############################################################# -# Specify whether the run is production or development -############################################################# -export RUN_ENVIR=para -export PDY=$(echo $CDATE | cut -c1-8) -export cyc=$(echo $CDATE | cut -c9-10) -export job=gdas_enkf_post_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=para -export DATAROOT=/gpfs/hps/stmp/$LOGNAME/test - - -############################################################# -# Specify versions -############################################################# -export gdas_ver=v14.1.0 -export global_shared_ver=v14.1.0 -export grib_util_ver=1.0.3 -export prod_util_ver=1.0.6 - - -############################################################# -# Load modules -############################################################# -. $MODULESHOME/init/sh -module load grib_util/$grib_util_ver -module load prod_util/$prod_util_ver -module load craype-hugepages4M - -module list - - -############################################################# -# WCOSS_C environment settings -############################################################# -export KMP_AFFINITY=disabled -export OMP_STACKSIZE=2G -export OMP_NUM_THREADS=$threads -export NTHREADS_EPOS=$threads -export FORT_BUFFERED=true - - -############################################################# -# Set user specific variables -############################################################# -export NWTEST=/gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work -export PARA_CONFIG=$NWTEST/gdas.${gdas_ver}/driver/para_config.gdas_enkf_post -export JOBGLOBAL=$NWTEST/gdas.${gdas_ver}/jobs - - -############################################################# -# Execute job -############################################################# -$JOBGLOBAL/JGDAS_ENKF_POST - -exit diff --git a/driver/gdas/test_gdas_enkf_select_obs.sh b/driver/gdas/test_gdas_enkf_select_obs.sh deleted file mode 100755 index 8636b74710..0000000000 --- a/driver/gdas/test_gdas_enkf_select_obs.sh +++ /dev/null @@ -1,89 +0,0 @@ -#!/bin/sh --login - -#BSUB -L /bin/sh -#BSUB -P GFS-T2O -#BSUB -o gdas_enkf_select_obs.o%J -#BSUB -e gdas_enkf_select_obs.o%J -#BSUB -J gdas_enkf_select_obs -#BSUB -q devonprod -#BSUB -M 3072 -#BSUB -extsched 'CRAYLINUX[]' -#BSUB -W 01:00 -#BSUB -cwd /gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work/gdas.v14.1.0/driver - -set -x - -export NODES=12 -export ntasks=144 -export ptile=12 -export threads=2 - -export CDATE=2017030806 - -############################################################# -# Specify whether the run is production or development -############################################################# -export RUN_ENVIR=para -export PDY=$(echo $CDATE | cut -c1-8) -export cyc=$(echo $CDATE | cut -c9-10) -export job=gdas_enkf_select_obs_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=para -export DATAROOT=/gpfs/hps/stmp/$LOGNAME/test - - -############################################################# -# Specify versions -############################################################# -export gdas_ver=v14.1.0 -export global_shared_ver=v14.1.0 -export crtm_ver=2.2.4 -export prod_envir_ver=1.1.0 -export grib_util_ver=1.0.3 -export prod_util_ver=1.0.6 -export util_shared_ver=1.0.3 - - -############################################################# -# Load modules -############################################################# -. $MODULESHOME/init/sh -module load crtm-intel/${crtm_ver} -module load prod_envir/$prod_envir_ver -module load grib_util/$grib_util_ver -module load prod_util/$prod_util_ver -module load util_shared/$util_shared_ver -module load cfp-intel-sandybridge - -module list - - -############################################################# -# WCOSS_C environment settings -############################################################# -export KMP_AFFINITY=disabled -export OMP_STACKSIZE=2G -export OMP_NUM_THREADS=$threads -export FORT_BUFFERED=true - -export OMP_NUM_THREADS_CY=24 -export NTHREADS=$OMP_NUM_THREADS_CY -export NTHREADS_GSI=$threads -export NTHSTACK=1024000000 - - -############################################################# -# Set user specific variables -############################################################# -export NWTEST=/gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work -export PARA_CONFIG=$NWTEST/gdas.${gdas_ver}/driver/para_config.gdas_enkf_select_obs -export JOBGLOBAL=$NWTEST/gdas.${gdas_ver}/jobs - - -############################################################# -# Execute job -############################################################# -$JOBGLOBAL/JGDAS_ENKF_SELECT_OBS - -exit diff --git a/driver/gdas/test_gdas_enkf_update.sh b/driver/gdas/test_gdas_enkf_update.sh deleted file mode 100755 index cf0d8f4318..0000000000 --- a/driver/gdas/test_gdas_enkf_update.sh +++ /dev/null @@ -1,83 +0,0 @@ -#!/bin/sh --login - -#BSUB -L /bin/sh -#BSUB -P GFS-T2O -#BSUB -o gdas_enkf_update.o%J -#BSUB -e gdas_enkf_update.o%J -#BSUB -J gdas_enkf_update -#BSUB -q devonprod -#BSUB -M 3072 -#BSUB -extsched 'CRAYLINUX[]' -#BSUB -W 01:00 -#BSUB -cwd /gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work/gdas.v14.1.0/driver - -set -x - -export NODES=40 -export ntasks=240 -export ptile=6 -export threads=4 - -export CDATE=2017030806 - - -############################################################# -# Specify whether the run is production or development -############################################################# -export RUN_ENVIR=para -export PDY=$(echo $CDATE | cut -c1-8) -export cyc=$(echo $CDATE | cut -c9-10) -export job=gdas_enkf_update_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=para -export DATAROOT=/gpfs/hps/stmp/$LOGNAME/test - - -############################################################# -# Specify versions -############################################################# -export gdas_ver=v14.1.0 -export global_shared_ver=v14.1.0 -export grib_util_ver=1.0.3 -export prod_util_ver=1.0.6 -export util_shared_ver=1.0.3 - - -############################################################# -# Load modules -############################################################# -. $MODULESHOME/init/sh -module load grib_util/$grib_util_ver -module load prod_util/$prod_util_ver -module load util_shared/$util_shared_ver -module load cfp-intel-sandybridge - -module list - - -############################################################# -# WCOSS_C environment settings -############################################################# -export KMP_AFFINITY=disabled -export OMP_STACKSIZE=2G -export OMP_NUM_THREADS=$threads -export FORT_BUFFERED=true - -export NTHREADS_ENKF=$nthreads - - -############################################################# -# Set user specific variables -############################################################# -export NWTEST=/gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work -export PARA_CONFIG=$NWTEST/gdas.${gdas_ver}/driver/para_config.gdas_enkf_update -export JOBGLOBAL=$NWTEST/gdas.${gdas_ver}/jobs - - -############################################################# -# Execute job -############################################################# -$JOBGLOBAL/JGDAS_ENKF_UPDATE - -exit diff --git a/driver/gdas/test_gdas_forecast_high.sh b/driver/gdas/test_gdas_forecast_high.sh deleted file mode 100755 index a8d8af6116..0000000000 --- a/driver/gdas/test_gdas_forecast_high.sh +++ /dev/null @@ -1,83 +0,0 @@ -#!/bin/sh --login - -#BSUB -L /bin/sh -#BSUB -P GFS-T2O -#BSUB -o gdas_forecast_high.o%J -#BSUB -e gdas_forecast_high.o%J -#BSUB -J gdas_forecast_high -#BSUB -q devonprod -#BSUB -M 768 -#BSUB -extsched 'CRAYLINUX[]' -#BSUB -W 01:00 -#BSUB -cwd /gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work/gdas.v14.1.0/driver - -set -x - -# 55 nodes = 49 compute nodes + 6 i/o nodes -# set WRT_GROUP=6 for 6 i/o nodes (see ../parm/gdas_forecast_high.parm) -# set WRTPE_PER_GROUP=4 to match ptile -export NODES=55 -export ntasks=220 -export ptile=4 -export threads=2 - -export CDATE=2017012506 - -############################################################# -# Specify whether the run is production or development -############################################################# -export RUN_ENVIR=para -export PDY=$(echo $CDATE | cut -c1-8) -export cyc=$(echo $CDATE | cut -c9-10) -export job=gdas_forecast_high_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=para -export DATAROOT=/gpfs/hps/stmp/$LOGNAME/test - - -############################################################# -# Specify versions -############################################################# -export gdas_ver=v14.1.0 -export global_shared_ver=v14.1.0 -export gfs_ver=v14.1.0 -export grib_util_ver=1.0.3 -export prod_util_ver=1.0.5 - - -############################################################# -# Load modules -############################################################# -. $MODULESHOME/init/sh -module load grib_util/$grib_util_ver -module load prod_util/$prod_util_ver -module load craype-hugepages16M - -module list - - -############################################################# -# WCOSS_C environment settings -############################################################# -export KMP_AFFINITY=disabled -export OMP_STACKSIZE=1024m -export OMP_NUM_THREADS=$threads -export NTHREADS=$threads - - -############################################################# -# Set user specific variables -############################################################# -export NWTEST=/gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work -export PARA_CONFIG=$NWTEST/gdas.${gdas_ver}/driver/para_config.gdas_forecast_high -export JOBGLOBAL=$NWTEST/gdas.${gdas_ver}/jobs - - -############################################################# -# Execute job -############################################################# -$JOBGLOBAL/JGDAS_FORECAST_HIGH - -exit - diff --git a/driver/gdas/test_gdas_gldas.sh b/driver/gdas/test_gdas_gldas.sh deleted file mode 100755 index 92e40ced3d..0000000000 --- a/driver/gdas/test_gdas_gldas.sh +++ /dev/null @@ -1,127 +0,0 @@ -#!/bin/sh - -#BSUB -o /gpfs/dell2/ptmp/Youlong.Xia/gdas_gldas.o%J -#BSUB -e /gpfs/dell2/ptmp/Youlong.Xia/gdas_gldas.o%J -#BSUB -P NLDAS-T2O -#BSUB -J jgdas_gldas_12 -#BSUB -W 01:00 -#BSUB -q dev -#BSUB -n 112 # number of tasks -#BSUB -R span[ptile=28] # tasks per node -#BSUB -R affinity[core(1):distribute=balance] -#BSUB -M 3072 -#BSUB -extsched 'CRAYLINUX[]' - -set -x - -date - -export NODES=4 -export ntasks=112 -export ptile=28 -export threads=1 - -export launcher="mpirun -n" -export npe_gaussian=6 -export npe_gldas=112 -export APRUN_GAUSSIAN="$launcher $npe_gaussian" -export APRUN_GLDAS="$launcher $npe_gldas" - -export CDATE=${CDATE:-2019110700} - -############################################################# -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=$(echo $CDATE | cut -c1-8) - -export PDY1=$(expr $PDY - 1) - -export cyc=$(echo $CDATE | cut -c9-10) -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load theUtilities module -##################################### -module purge -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_envir/1.0.2 -module load prod_util/1.1.4 -module load grib_util/1.1.0 -module load NetCDF/4.5.0 -########################################### -# Now set up environment -########################################### -module list - -############################################ -# GDAS META PRODUCT GENERATION -############################################ -# set envir=prod or para to test with data in prod or para -# export envir=prod - export envir=para - -export RUN=${RUN:-gdas} - -export SENDCOM=YES -export KEEPDATA=YES -export job=gdas_gldas_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## - export HOMEgw=/gpfs/dell2/emc/modeling/noscrub/$LOGNAME/global-workflow -if [ $envir = "prod" ] ; then -# This setting is for testing with GDAS (production) - export HOMEgldas=/nwprod/gldas.${gldas_ver} - export COMIN=/gpfs/dell1/nco/ops/com/gfs/prod/${RUN}.${PDY} ### NCO PROD - export COMROOT=/gpfs/dell1/nco/ops/com - export DCOMROOT=/gpfs/dell1/nco/ops/dcom -elif [ $envir = "para" ] ; then -# This setting is for testing with GDAS (production) - export HOMEgldas=${HOMEgldas:-$HOMEgfs/sorc/gldas.fd} - export COMIN=/gpfs/dell1/nco/ops/com/gfs/prod/${RUN}.${PDY} ### NCO PROD - export COMROOT=/gpfs/dell1/nco/ops/com - export DCOMROOT=/gpfs/dell1/nco/ops/dcom -else -# export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/prfv3rt1/${RUN}.${PDY}/${cyc}/nawips ### EMC PARA Realtime -# export COMINgdas=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/prfv3rt1/${RUN} ### EMC PARA Realtime - export workdir=${workdir:-$HOMEgfs} - export HOMEgldas=$workdir/sorc/gldas.fd - export COMROOT=$workdir/com - export DCOMROOT=$workdir/dcom - export COMINgdas=$COMROOT - export DCOMIN=$DCOMROOT - export COMIN=$workdir/comin - export COMOUT=$workdir/comout -fi - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT $COMOUTncdc $COMOUTukmet $COMOUTecmwf -fi - -# Set user specific variables -############################################################# -#export NWTEST=/gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work -export PARA_CONFIG=$HOMEgw/driver/gdas/para_config.gdas_gldas -#export JOBGLOBAL=$NWTEST/gdas.${gdas_ver}/jobs -export JOBGLOBAL=$HOMEgldas/jobs - -############################################################# -# Execute job -############################################################# -echo $JOBGLOBAL/JGDAS_ATMOS_GLDAS -$JOBGLOBAL/JGDAS_ATMOS_GLDAS - -exit - diff --git a/driver/gdas/test_jgdas_tropc_cray.sh b/driver/gdas/test_jgdas_tropc_cray.sh deleted file mode 100755 index c24b17a903..0000000000 --- a/driver/gdas/test_jgdas_tropc_cray.sh +++ /dev/null @@ -1,133 +0,0 @@ -#!/bin/bash -#BSUB -J JGDAS_TROPC -#BSUB -W 0:30 -####BSUB -extsched 'CRAYLINUX[]' -R '1*{order[slots] select[craylinux && !vnode]} + 24*{select[craylinux && vnode]span[ptile=24] cu[type=cabinet]}' -BSUB -extsched 'CRAYLINUX[]' -#BSUB -o /gpfs/hps/emc/global/noscrub/Qingfu.Liu/gdas.v14.1.0/driver/test_jgdas_tropc.o%J -#BSUB -e /gpfs/hps/emc/global/noscrub/Qingfu.Liu/gdas.v14.1.0/driver/test_jgdas_tropc.o%J -#BSUB -q "dev" -#BSUB -P "GFS-T2O" -#BSUB -M 500 - -module use /gpfs/hps/nco/ops/nwprod/modulefiles -module load prod_util -module unload grib_util -module load grib_util/1.0.3 -module load util_shared/1.0.3 -##module load crtm-intel/2.2.3 -module load PrgEnv-intel craype cray-mpich ESMF-intel-haswell/3_1_0rp5 -module load NCL-gnu-sandybridge/6.3.0 -module load gcc -module list - -set -x - -export OMP_NUM_THREADS=12 - - export MP_LABELIO=yes - export MP_MPILIB=mpich2 - export MP_EUILIB=us - export MP_TASK_AFFINITY=cpu:24 - export MP_USE_BULK_XFER=yes - export MP_STDOUTMODE=unordered - export MPICH_ALLTOALL_THROTTLE=0 - export MP_COREFILE_FORMAT=core.txt - export OMP_STACKSIZE=3G - export MP_COMPILER=intel - -export envir=para -#export envir=prod -export cyc=06 -export job=jgdas_tropc_${cyc} -export RUN_ENVIR=test -#export NWROOT=/nwprod2 -#export NWROOT=/global/save/Qingfu.Liu -export NWROOT=/gpfs/hps/emc/global/noscrub/Qingfu.Liu - -export DATAROOT=/gpfs/hps/ptmp/Qingfu.Liu/com -#export DATAROOT=/gpfs/hps/emc/global/noscrub/Qingfu.Liu -#export COMROOT=/com2 -export COMROOT=/gpfs/hps/ptmp/Qingfu.Liu/com -#export COMROOT=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com -export COMDATEROOT=/gpfs/hps/ptmp/Qingfu.Liu/com -#export COMDATEROOT=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com -#export COMDATEROOT=/gpfs/tp2/nco/ops/com -export DATA_DUMPJB=/gpfs/hps/ptmp/Qingfu.Liu/com/111 - -#export DCOMROOT=/dcom -export DCOMROOT=/gpfs/tp1/nco/ops/dcom - -#export COMROOTp1=/gpfs/gp1/nco/ops/com -export COMROOTp1=/gpfs/hps/ptmp/Qingfu.Liu/com -#export COMROOTp1=/gpfs/tp2/nco/ops/com -export KEEPDATA=YES -export CLEAN=NO -export cycle=t${cyc}z -export LOUD=on -export BACK=on - -##which setpdy.sh -##setpdy.sh -##. PDY - -#export PDY=20150723 -#export PDY=20140814 -export PDY=20170108 - -#export COMIN=/gpfs/tp2/nco/ops/com/gfs/prod/gdas.${PDY} -export COMIN=/gpfs/hps/ptmp/Qingfu.Liu/com/gfs/para/gdas.${PDY} -#export COMIN=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com/gfs/para/gdas.${PDY} - -#export NWPROOT=/gpfs/tp1/nco/ops/nwprod/util -#export utilscript=$NWPROOT/util/ush -#export utilexec=$NWPROOT/util/exec -#export utilities=$NWPROOT/util/ush -#export HOMEutil=$NWPROOT/util -#export HOMEgraph=$NWPROOT/util - -# versions file for tracker $tracker.ver -VERSION_FILE=${NWROOT}/versions/tropcy_qc_reloc.ver -if [ -f $VERSION_FILE ]; then - . $VERSION_FILE -else - ecflow_client --abort - exit -fi - -#export DUMP=/gpfs/hps/nco/ops/nwprod/hwrf_dump.v3.2.1/ush/dumpjb -#export HOMEobsproc_dump=/gpfs/hps/nco/ops/nwprod/hwrf_dump.v3.2.1 -export DUMP=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/obsproc_dump.tkt-351.crayport/ush/dumpjb -export HOMEobsproc_dump=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/obsproc_dump.tkt-351.crayport -#export FIXobsproc_bufr_dumplist=/gpfs/hps/nco/ops/nwprod/obsproc_bufr_dumplist.v1.2.0/fix -export FIXobsproc_bufr_dumplist=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/gdas.v14.1.0/driver/fix -export HOMEobsproc_shared_bufr_dumplist=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/gdas.v14.1.0/driver -#export HOMEobroc_bufr_dumplist=/gpfs/hps/nco/ops/nwprod/obsproc_bufr_dumplist.v1.2.0 - -export HOMEgfs=$NWROOT/gfs.v14.1.0 -export HOMEgdas=$NWROOT/gdas.v14.1.0 - -# CALL executable job script here -#export HOMERELO=${HOMEgdas} -#export HOMESYND=${HOMERELO} -#export envir_getges=prod -$HOMEgdas/jobs/JGDAS_TROPC - -if [ $? -ne 0 ]; then -# ecflow_client --abort - exit -fi - -#%include -#%manual -###################################################################### -#PURPOSE: Executes the job that creates GFS TC track forecasts -###################################################################### - -###################################################################### -# Job specific troubleshooting instructions: -# see generic troubleshoot manual page -# -###################################################################### - -# include manual page below -#%end diff --git a/driver/gdas/test_jgdas_tropcy_qc_reloc.ecf b/driver/gdas/test_jgdas_tropcy_qc_reloc.ecf deleted file mode 100755 index c413513dcf..0000000000 --- a/driver/gdas/test_jgdas_tropcy_qc_reloc.ecf +++ /dev/null @@ -1,126 +0,0 @@ -#BSUB -J Relocation_TEST_06 -#BSUB -o /ptmpp2/Qingfu.Liu/com2/gdas_tropcy_qc_reloc_06.o%J -#BSUB -e /ptmpp2/Qingfu.Liu/com2/gdas_tropcy_qc_reloc_06.o%J -#BSUB -L /bin/sh -#BSUB -q debug -#BSUB -W 00:30 -#BSUB -cwd /ptmpp2/Qingfu.Liu/com2 -#BSUB -P GFS-T2O -##BSUB -R rusage[mem=5000] -##BSUB -R affinity[core] -#BSUB -n 7 -#BSUB -R span[ptile=1] -#BSUB -R affinity[cpu(32):distribute=balance] -#BSUB -a poe -#BSUB -x - -#%include -#%include -. /usrx/local/Modules/default/init/ksh -module use /nwprod2/modulefiles -module load grib_util/v1.0.1 -module load prod_util/v1.0.1 -module load ibmpe ics lsf - -set -x - -export OMP_NUM_THREADS=32 - - export MP_LABELIO=yes - export MP_MPILIB=mpich2 - export MP_EUILIB=us - export MP_TASK_AFFINITY=cpu:32 - export MP_USE_BULK_XFER=yes - export MP_STDOUTMODE=unordered - export MPICH_ALLTOALL_THROTTLE=0 - export MP_COREFILE_FORMAT=core.txt - export OMP_STACKSIZE=3G - export MP_COMPILER=intel - -export envir=dev2 -export cyc=06 -export job=gdas_tropcy_qc_reloc_${cyc} -export RUN_ENVIR=test -#export NWROOT=/nwprod2 -export NWROOT=/global/save/Qingfu.Liu - -#export DATAROOT=/tmpnwprd_p2 -export DATAROOT=/ptmpp2/Qingfu.Liu - -#export COMROOT=/com2 -export COMROOT=/ptmpp2/Qingfu.Liu/com2 -export COMDATEROOT=/com2 - -#export DCOMROOT=/dcom - -export COMROOTp1=/com -export KEEPDATA=YES -export CLEAN=NO -export cycle=t00z - -#which setpdy.sh -setpdy.sh -. PDY - -export COMINgfs=/com/gfs/prod/gfs.${PDY} -export COMINgdas=/com/gfs/prod/gdas.${PDY} - -# versions file for tracker $tracker.ver -VERSION_FILE=${NWROOT}/versions/tropcy_qc_reloc.ver -if [ -f $VERSION_FILE ]; then - . $VERSION_FILE -else - ecflow_client --abort - exit -fi - -export shared_global_home=$NWROOT/shared_nco_20160129 -export gfs_global_home=$NWROOT/gfs_nco_20160129 -export gdas_global_home=$NWROOT/gdas_nco_20160129 - -export files_override=F - -# CALL executable job script here -export HOMERELO=$shared_global_home -#export HOMERELO=${NWROOT}/tropcy_qc_reloc.${tropcy_qc_reloc_ver}_r62774_phase2 -export HOMESYND=${HOMERELO} -#export envir_getges=prod -$gdas_global_home/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC - -if [ $? -ne 0 ]; then -# ecflow_client --abort - exit -fi - -#%include -#%manual -###################################################################### -#PURPOSE: Executes the job JGLOBAL_ATMOS_TROPCY_QC_RELOC -###################################################################### -############################################################# -# Function been tested: TCvital quality control and archive, hurricane relocation -# -# Calling sequence: JGLOBAL_ATMOS_TROPCY_QC_RELOC, exglobal_atmos_tropcy_qc_reloc.sh, -# # syndat_qctropcy.sh, tropcy_relocate.sh,syndat_getjtbul.sh, -# # tropcy_relocate_extrkr.sh,parse-storm-type.pl -# -# Initial condition: provide hours (cyc=?) -# -# Usage: bsub < test_jgdas_tropcy_qc_reloc -# -# Data_In: COMINgfs=/com/gfs/prod/gfs.${PDY} -# COMINgdas=/com/gfs/prod/gdas.${PDY} -# -# Data_Out: /ptmpp2/Qingfu.Liu/com2/gfs/dev2/gdas.${PDY} -# -# Result verification: compare with the operational results -# (results might be slightly different due to 3hourly/hourly tracker) -############################################################## -###################################################################### -# Job specific troubleshooting instructions: -# see generic troubleshoot manual page -# -###################################################################### - -# include manual page below -#%end diff --git a/driver/gdas/test_jgdas_tropcy_qc_reloc_cray.sh b/driver/gdas/test_jgdas_tropcy_qc_reloc_cray.sh deleted file mode 100755 index 4e757e5085..0000000000 --- a/driver/gdas/test_jgdas_tropcy_qc_reloc_cray.sh +++ /dev/null @@ -1,154 +0,0 @@ -#!/bin/bash -#BSUB -J t1534 -#BSUB -W 0:30 -#BSUB -extsched 'CRAYLINUX[]' -R '1*{order[slots] select[craylinux && !vnode]} + 168*{select[craylinux && vnode]span[ptile=24] cu[type=cabinet]}' -#BSUB -o /gpfs/hps/emc/global/noscrub/Qingfu.Liu/gdas.v14.1.0/driver/gdas_tropcy_qc_reloc_06.o%J -#BSUB -e /gpfs/hps/emc/global/noscrub/Qingfu.Liu/gdas.v14.1.0/driver/gdas_tropcy_qc_reloc_06.o%J -###BSUB -o t574.stdout.%J -###BSUB -e t574.stderr.%J -#BSUB -q "dev" -#BSUB -P "GFS-T2O" -#BSUB -M 200 -###BSUB -M "60" - - -module use /gpfs/hps/nco/ops/nwprod/modulefiles -module load prod_util -module load prod_envir/1.1.0 -module unload grib_util -module load grib_util/1.0.3 -##module load crtm-intel/2.2.3 -module load PrgEnv-intel craype cray-mpich ESMF-intel-haswell/3_1_0rp5 -module list - -#export MKL_CBWR=AVX -#ulimit -s unlimited -#ulimit -a - -set -x - -export OMP_NUM_THREADS=24 - - export MP_LABELIO=yes - export MP_MPILIB=mpich2 - export MP_EUILIB=us - export MP_TASK_AFFINITY=cpu:24 - export MP_USE_BULK_XFER=yes - export MP_STDOUTMODE=unordered - export MPICH_ALLTOALL_THROTTLE=0 - export MP_COREFILE_FORMAT=core.txt - export OMP_STACKSIZE=3G - export MP_COMPILER=intel - -export envir=para -export cyc=06 -export job=gdas_tropcy_qc_reloc_${cyc} -export RUN_ENVIR=test -#export NWROOT=/nwprod2 -#export NWROOT=/global/save/Qingfu.Liu -export NWROOT=/gpfs/hps/emc/global/noscrub/Qingfu.Liu - -#export DATAROOT=/tmpnwprd_p2 -#export DATAROOT=/ptmpp2/Qingfu.Liu -export DATAROOT=/gpfs/hps/ptmp/Qingfu.Liu - -#export COMROOT=/com2 -#export COMROOT=/ptmpp2/Qingfu.Liu/com2 -#export COMROOT=/gpfs/hps/ptmp/Qingfu.Liu/com -#export COMDATEROOT=/com -export COMROOT=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com -export COMDATEROOT=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com - -#export DCOMROOT=/dcom - -#export COMROOTp1=/gpfs/gp1/nco/ops/com -#export COMROOTp1=/gpfs/tp1/nco/ops/com -export COMROOTp1=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com -export KEEPDATA=YES -export CLEAN=NO -export cycle=t${cyc}z - -#which setpdy.sh -#setpdy.sh -#. PDY - -export PDY=20140814 -##export PDY=20150723 - -#export COMINgfs=/com/gfs/prod/gfs.${PDY} -#export COMINgdas=/com/gfs/prod/gdas.${PDY} -#export COMINgfs=/gpfs/tp1/nco/ops/com/gfs/prod/gfs.${PDY} -#export COMINgdas=/gpfs/tp1/nco/ops/com/gfs/prod/gdas.${PDY} -export COMINgfs=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com/gfs/$envir/gfs.${PDY} -export COMINgdas=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com/gfs/$envir/gdas.${PDY} -export ARCHSYND=${COMINsyn:-$(compath.py gfs/prod/syndat)} -export HOMENHC=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/guidance/storm-data/ncep -#export GETGES_COM=/gpfs/tp1/nco/ops/com -export GETGES_COM=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com -export GESROOT=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com - -# versions file for tracker $tracker.ver -VERSION_FILE=${NWROOT}/versions/tropcy_qc_reloc.ver -if [ -f $VERSION_FILE ]; then - . $VERSION_FILE -else - ecflow_client --abort - exit -fi - -export shared_global_home=$NWROOT/global_shared.v14.1.0 -export gfs_global_home=$NWROOT/gfs.v14.1.0 -export gdas_global_home=$NWROOT/gdas.v14.1.0 - -export files_override=F -export PROCESS_TROPCY=NO -export copy_back=NO -export SENDCOM=NO -export APRNRELOC="time aprun -b -j1 -n7 -N1 -d24 -cc depth " -export APRNGETTX="time aprun -q -j1 -n1 -N1 -d1 -cc depth" -#export APRNRELOC="time aprun -b -j0 -n7 -N1 -d32 -cc depth" - -# CALL executable job script here -export HOMERELO=$shared_global_home -#export HOMERELO=${NWROOT}/tropcy_qc_reloc.${tropcy_qc_reloc_ver}_r62774_phase2 -export HOMESYND=${HOMERELO} -#export envir_getges=prod -$gdas_global_home/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC - -if [ $? -ne 0 ]; then -# ecflow_client --abort - exit -fi - -#%include -#%manual -###################################################################### -#PURPOSE: Executes the job JGLOBAL_ATMOS_TROPCY_QC_RELOC -###################################################################### -############################################################# -# Function been tested: TCvital quality control and archive, hurricane relocation -# -# Calling sequence: JGLOBAL_ATMOS_TROPCY_QC_RELOC, exglobal_atmos_tropcy_qc_reloc.sh, -# # syndat_qctropcy.sh, tropcy_relocate.sh,syndat_getjtbul.sh, -# # tropcy_relocate_extrkr.sh,parse-storm-type.pl -# -# Initial condition: provide hours (cyc=?) -# -# Usage: bsub < test_jgdas_tropcy_qc_reloc -# -# Data_In: COMINgfs=/com/gfs/prod/gfs.${PDY} -# COMINgdas=/com/gfs/prod/gdas.${PDY} -# -# Data_Out: /ptmpp2/Qingfu.Liu/com2/gfs/dev2/gdas.${PDY} -# -# Result verification: compare with the operational results -# (results might be slightly different due to 3hourly/hourly tracker) -############################################################## -###################################################################### -# Job specific troubleshooting instructions: -# see generic troubleshoot manual page -# -###################################################################### - -# include manual page below -#%end diff --git a/driver/gdas/test_jgdas_verfozn.sh b/driver/gdas/test_jgdas_verfozn.sh deleted file mode 100755 index 699849b782..0000000000 --- a/driver/gdas/test_jgdas_verfozn.sh +++ /dev/null @@ -1,82 +0,0 @@ -#!/bin/ksh - -#BSUB -o gdas_verfozn.o%J -#BSUB -e gdas_verfozn.o%J -#BSUB -J gdas_verfozn -#BSUB -q dev_shared -#BSUB -n 1 -#BSUB -R affinity[core] -#BSUB -M 100 -#BSUB -W 00:05 -#BSUB -a poe -#BSUB -P GFS-T2O - -##------------------------------------------------------------ -## This is the test driver script for the wcoss/ibm systems -## to run the JGDAS_VERFOZN job. -##------------------------------------------------------------ - -set -x - -export PDATE=${PDATE:-2018020806} -export NET=${NET:-gfs} -export RUN=${RUN:-gdas} - - -export PDY=$(echo $PDATE | cut -c1-8) -export cyc=$(echo $PDATE | cut -c9-10) -export job=gdas_verfozn.${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=${envir:-test} - -me=$(hostname | cut -c1) -export DATAROOT=${DATAROOT:-/gpfs/${me}d2/emc/da/noscrub/${LOGNAME}/test_data} -export COMROOT=${COMROOT:-/ptmpp1/${LOGNAME}/com} -export OZN_WORK_DIR=${OZN_WORK_DIR:-/stmpp1/${LOGNAME}/oznmon.${pid}} - -#------------------------------------------------------------ -# Specify versions -# -export gfs_ver=v15.0.0 - - -#------------------------------------------------------------ -# Load modules -# -. /usrx/local/Modules/3.2.9/init/ksh -module use /nwprod2/modulefiles -module load prod_util -module load util_shared - - -module list - - -#------------------------------------------------------------ -# WCOSS environment settings -# -export POE=YES - - -#------------------------------------------------------------ -# Set user specific variables -# -export OZNMON_SUFFIX=${OZNMON_SUFFIX:-testozn} -export NWTEST=${NWTEST:-/gpfs/${me}d2/emc/da/noscrub/${LOGNAME}/gfs.${gfs_ver}} - -export HOMEgfs_ozn=${HOMEgfs_ozn:-${NWTEST}} -export SCRgfs_ozn=${SCRgfs_ozn:-${HOMEgfs_ozn}/scripts} -JOBgfs_ozn=${JOBgfs_ozn:-${HOMEgfs_ozn}/jobs} - -export HOMEoznmon=${HOMEoznmon:-${NWTEST}} -export COM_IN=${COM_IN:-$DATAROOT} -export OZN_TANKDIR=${OZN_TANKDIR:-${COMROOT}/${OZNMON_SUFFIX}} - -#------------------------------------------------------------ -# Execute job -# -${JOBgfs_ozn}/JGDAS_VERFOZN - -exit - diff --git a/driver/gdas/test_jgdas_verfozn_cray.sh b/driver/gdas/test_jgdas_verfozn_cray.sh deleted file mode 100755 index fb2457624a..0000000000 --- a/driver/gdas/test_jgdas_verfozn_cray.sh +++ /dev/null @@ -1,79 +0,0 @@ -#!/bin/ksh - -#BSUB -o gdas_verfozn.o%J -#BSUB -e gdas_verfozn.o%J -#BSUB -J gdas_verfozn -#BSUB -q dev -#BSUB -M 80 -#BSUB -W 00:05 -#BSUB -P GFS-T2O -#BSUB -R "select[mem>80] rusage[mem=80]" - -##------------------------------------------------------------ -## This is the test driver script for the cray systems -## to run the JGDAS_VERFOZN job. -##------------------------------------------------------------ - -set -x - -export PDATE=${PDATE:-2018020812} -export NET=${NET:-gfs} -export RUN=${RUN:-gdas} - - -export PDY=$(echo $PDATE | cut -c1-8) -export cyc=$(echo $PDATE | cut -c9-10) -export job=gdas_verfozn.${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=${envir:-test} - -me=$(hostname | cut -c1) -export DATAROOT=${DATAROOT:-/gpfs/hps3/emc/da/noscrub/${LOGNAME}/test_data} -export COMROOT=${COMROOT:-/gpfs/hps2/ptmp/${LOGNAME}/com} -export OZN_WORK_DIR=${OZN_WORK_DIR:-/gpfs/hps2/stmp/${LOGNAME}/oznmon.${pid}} - -#------------------------------------------------------------ -# Specify versions -# -export gfs_ver=v15.0.0 - - -#------------------------------------------------------------ -# Load modules -# -. $MODULESHOME/init/ksh - -module load prod_util -module load util_shared - -module list - - -#------------------------------------------------------------ -# WCOSS environment settings -# -export POE=YES - - -#------------------------------------------------------------ -# Set user specific variables -# -export OZNMON_SUFFIX=${OZNMON_SUFFIX:-testozn} -export NWTEST=${NWTEST:-/gpfs/hps3/emc/da/noscrub/${LOGNAME}/gfs.${gfs_ver}} - -export HOMEgfs_ozn=${HOMEgfs_ozn:-${NWTEST}} -export SCRgfs_ozn=${SCRgfs_ozn:-${HOMEgfs_ozn}/scripts} -JOBgfs_ozn=${JOBgfs_ozn:-${HOMEgfs_ozn}/jobs} - -export HOMEoznmon=${HOMEoznmon:-${NWTEST}} -export COM_IN=${COM_IN:-$DATAROOT} -export OZN_TANKDIR=${OZN_TANKDIR:-${COMROOT}/${OZNMON_SUFFIX}} - -#------------------------------------------------------------ -# Execute job -# -${JOBgfs_ozn}/JGDAS_VERFOZN - -exit - diff --git a/driver/gdas/test_jgdas_verfrad.sh b/driver/gdas/test_jgdas_verfrad.sh deleted file mode 100755 index f286185185..0000000000 --- a/driver/gdas/test_jgdas_verfrad.sh +++ /dev/null @@ -1,74 +0,0 @@ -#!/bin/ksh - -#BSUB -o gdas_verfrad.o%J -#BSUB -e gdas_verfrad.o%J -#BSUB -J gdas_verfrad -#BSUB -q dev_shared -#BSUB -n 1 -#BSUB -R affinity[core] -#BSUB -M 100 -#BSUB -W 00:20 -#BSUB -a poe -#BSUB -P GFS-T2O - -set -x - -export PDATE=${PDATE:-2018022112} - -############################################################# -# Specify whether the run is production or development -############################################################# -export PDY=$(echo $PDATE | cut -c1-8) -export cyc=$(echo $PDATE | cut -c9-10) -export job=gdas_verfrad.${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=para -me=$(hostname | cut -c1) -export DATAROOT=${DATAROOT:-/gpfs/${me}d2/emc/da/noscrub/${LOGNAME}/test_data} -export COMROOT=${COMROOT:-/ptmpp1/$LOGNAME/com} - - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v15.0.0 - - -############################################################# -# Load modules -############################################################# -. /usrx/local/Modules/3.2.9/init/ksh -module use /nwprod2/modulefiles -module load prod_util -module load util_shared - - -module list - - -############################################################# -# WCOSS environment settings -############################################################# -export POE=YES - - -############################################################# -# Set user specific variables -############################################################# -export RADMON_SUFFIX=${RADMON_SUFFIX:-testrad} -export NWTEST=${NWTEST:-/gpfs/${me}d2/emc/da/noscrub/Edward.Safford/gfs.${gfs_ver}} -export HOMEgfs=${HOMEgfs:-${NWTEST}} -export JOBGLOBAL=${JOBGLOBAL:-${HOMEgfs}/jobs} -export HOMEradmon=${HOMEradmon:-${NWTEST}} -export COM_IN=${COM_IN:-${DATAROOT}} -export TANKverf=${TANKverf:-${COMROOT}/${RADMON_SUFFIX}} - - -############################################################# -# Execute job -############################################################# -$JOBGLOBAL/JGDAS_VERFRAD - -exit - diff --git a/driver/gdas/test_jgdas_verfrad_cray.sh b/driver/gdas/test_jgdas_verfrad_cray.sh deleted file mode 100755 index d9668984ad..0000000000 --- a/driver/gdas/test_jgdas_verfrad_cray.sh +++ /dev/null @@ -1,71 +0,0 @@ -#!/bin/ksh - -#BSUB -o gdas_verfrad.o%J -#BSUB -e gdas_verfrad.o%J -#BSUB -J gdas_verfrad -#BSUB -q dev -#BSUB -M 100 -#BSUB -W 00:20 -#BSUB -P GFS-T2O -#BSUB -R "select[mem>100] rusage[mem=100]" - -set -x - -export PDATE=${PDATE:-2016100106} - -############################################################# -# Specify whether the run is production or development -############################################################# -export PDY=$(echo $PDATE | cut -c1-8) -export cyc=$(echo $PDATE | cut -c9-10) -export job=gdas_verfrad.${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=para - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v15.0.0 -#export global_shared_ver=v14.1.0 -#export gdas_radmon_ver=v2.0.0 -#export radmon_shared_ver=v2.0.4 - - -############################################################# -# Load modules -############################################################# -. $MODULESHOME/init/ksh - -module load prod_util - -module list - - -############################################################# -# WCOSS environment settings -############################################################# -export POE=YES - - -############################################################# -# Set user specific variables -############################################################# -export DATAROOT=${DATAROOT:-/gpfs/hps3/emc/da/noscrub/$LOGNAME/test_data} -export COMROOT=${COMROOT:-/gpfs/hps2/ptmp/$LOGNAME/com} -export RADMON_SUFFIX=${RADMON_SUFFIX:-testrad} -export NWTEST=${NWTEST:-/gpfs/hps3/emc/da/noscrub/${LOGNAME}/gfs.${gfs_ver}} -export HOMEgfs=${HOMEgfs:-${NWTEST}} -export JOBGLOBAL=${JOBGLOBAL:-${HOMEgfs}/jobs} -export HOMEradmon=${HOMEradmon:-${NWTEST}} -export COM_IN=${COM_IN:-${DATAROOT}} -export TANKverf=${TANKverf:-${COMROOT}/${RADMON_SUFFIX}} - - -############################################################# -# Execute job -############################################################# -$JOBGLOBAL/JGDAS_VERFRAD - -exit - diff --git a/driver/gdas/test_jgdas_vminmon.sh b/driver/gdas/test_jgdas_vminmon.sh deleted file mode 100755 index 983a0e7c24..0000000000 --- a/driver/gdas/test_jgdas_vminmon.sh +++ /dev/null @@ -1,73 +0,0 @@ -#!/bin/ksh - -#BSUB -o gdas_vminmon.o%J -#BSUB -e gdas_vminmon.o%J -#BSUB -J gdas_vminmon -#BSUB -q dev_shared -#BSUB -n 1 -#BSUB -R affinity[core] -#BSUB -M 80 -#BSUB -W 00:05 -#BSUB -a poe -#BSUB -P GFS-T2O - -set -x - -export PDATE=${PDATE:-2016030706} - -############################################################# -# Specify whether the run is production or development -############################################################# -export PDY=$(echo $PDATE | cut -c1-8) -export cyc=$(echo $PDATE | cut -c9-10) -me=$(hostname | cut -c1) - -export job=gdas_vminmon.${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=para -export DATAROOT=/gpfs/${me}d2/emc/da/noscrub/Edward.Safford/test_data -export COMROOT=/ptmpp1/$LOGNAME/com - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v15.0.0 - - -############################################################# -# Load modules -############################################################# -. /usrx/local/Modules/3.2.9/init/ksh -module use /nwprod2/modulefiles -module load grib_util -module load prod_util - -module list - - -############################################################# -# WCOSS environment settings -############################################################# -export POE=YES - - -############################################################# -# Set user specific variables -############################################################# -export MINMON_SUFFIX=${MINMON_SUFFIX:-testminmon} -export NWTEST=${NWTEST:-/gpfs/${me}d2/emc/da/noscrub/Edward.Safford} -export HOMEgfs=${HOMEgfs:-${NWTEST}/gfs.${gfs_ver}} -export JOBGLOBAL=${JOBGLOBAL:-${HOMEgfs}/jobs} - -export COM_IN=${COM_IN:-${DATAROOT}} -export M_TANKverf=${M_TANKverf:-${COMROOT}/${MINMON_SUFFIX}} - - -############################################################# -# Execute job -############################################################# -$JOBGLOBAL/JGDAS_VMINMON - -exit - diff --git a/driver/gdas/test_jgdas_vminmon_cray.sh b/driver/gdas/test_jgdas_vminmon_cray.sh deleted file mode 100755 index 6078d2ade4..0000000000 --- a/driver/gdas/test_jgdas_vminmon_cray.sh +++ /dev/null @@ -1,69 +0,0 @@ -#!/bin/ksh - -#BSUB -o gdas_vminmon.o%J -#BSUB -e gdas_vminmon.o%J -#BSUB -J gdas_vminmon -#BSUB -q dev -#BSUB -M 80 -#BSUB -W 00:05 -#BSUB -P GFS-T2O -#BSUB -R "select[mem>80] rusage[mem=80]" - -set -x - -export PDATE=${PDATE:-2016030700} - -############################################################# -# Specify whether the run is production or development -############################################################# -export PDY=$(echo $PDATE | cut -c1-8) -export cyc=$(echo $PDATE | cut -c9-10) -export job=gdas_vminmon.${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=para -export DATAROOT=${DATAROOT:-/gpfs/hps3/emc/da/noscrub/$LOGNAME/test_data} -export COMROOT=${COMROOT:-/gpfs/hps2/ptmp/$LOGNAME/com} - - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v15.0.0 - - -############################################################# -# Load modules -############################################################# -. $MODULESHOME/init/ksh - -module load prod_util -module load pm5 - -module list - - -############################################################# -# WCOSS environment settings -############################################################# -export POE=YES - - -############################################################# -# Set user specific variables -############################################################# -export MINMON_SUFFIX=${MINMON_SUFFIX:-testminmon} -export NWTEST=${NWTEST:-/gpfs/hps3/emc/da/noscrub/${LOGNAME}} -export HOMEgfs=${HOMEgfs:-${NWTEST}/gfs.${gfs_ver}} -export JOBGLOBAL=${JOBGLOBAL:-${HOMEgfs}/jobs} -export COM_IN=${COM_IN:-${DATAROOT}} -export M_TANKverf=${M_TANKverf:-${COMROOT}/${MINMON_SUFFIX}} - - -############################################################# -# Execute job -############################################################# -$JOBGLOBAL/JGDAS_VMINMON - -exit - diff --git a/driver/gfs/para_config.gfs_analysis b/driver/gfs/para_config.gfs_analysis deleted file mode 100755 index e910b5a1f7..0000000000 --- a/driver/gfs/para_config.gfs_analysis +++ /dev/null @@ -1,31 +0,0 @@ -# Set variables -export NWROOT=$NWTEST -export COM_IN=/gpfs/hps/ptmp/emc.glopara/com2/gfs/para -export COM_OUT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/test -export SENDDBN=YES -export SAVEGES=YES -export SENDCOM=YES -export gesenvir=para -export GESROOT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/test -export KEEPDATA=NO - - -# Set paths -export HOMEgsm=$NWTEST/global_shared.${global_shared_ver} - - -# Set fix files -if [ $CDATE -ge 2017030800 -a $CDATE -lt 2017031312 ] ; then - SATINFO=/gpfs/hps/emc/global/noscrub/emc.glopara/para_gfs/prnemsrn/global_satinfo.txt -fi - - -# Set scripts -GETGESSH=$HOMEgsm/ush/getges.sh - - -# Set executables - - -# Dev para jobs -export ERRSCRIPT='eval [[ $err = 0 ]]' diff --git a/driver/gfs/para_config.gfs_forecast_high b/driver/gfs/para_config.gfs_forecast_high deleted file mode 100755 index c1454c4110..0000000000 --- a/driver/gfs/para_config.gfs_forecast_high +++ /dev/null @@ -1,32 +0,0 @@ -# Set variables -export NWROOT=$NWTEST -export COM_IN=/gpfs/hps/ptmp/emc.glopara/com2/gfs/para -export COM_OUT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/test -export SENDDBN=YES -export SAVEGES=NO -export GETGES_NWG=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export gesenvir=para -export GETGES_COM=/gpfs/hps/ptmp/emc.glopara/com2 -export GESROOT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export gespath=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export KEEPDATA=NO - - -# Set paths -export HOMEgfs=$NWTEST/gfs.${gfs_ver} -export HOMEgsm=$NWTEST/global_shared.${global_shared_ver} - - -# Set fix files - - -# Set scripts - - -# Set executables - - -# Dev para jobs -export ERRSCRIPT='eval [[ $err = 0 ]]' - - diff --git a/driver/gfs/para_config.gfs_forecast_low b/driver/gfs/para_config.gfs_forecast_low deleted file mode 100755 index c1454c4110..0000000000 --- a/driver/gfs/para_config.gfs_forecast_low +++ /dev/null @@ -1,32 +0,0 @@ -# Set variables -export NWROOT=$NWTEST -export COM_IN=/gpfs/hps/ptmp/emc.glopara/com2/gfs/para -export COM_OUT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/test -export SENDDBN=YES -export SAVEGES=NO -export GETGES_NWG=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export gesenvir=para -export GETGES_COM=/gpfs/hps/ptmp/emc.glopara/com2 -export GESROOT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export gespath=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export KEEPDATA=NO - - -# Set paths -export HOMEgfs=$NWTEST/gfs.${gfs_ver} -export HOMEgsm=$NWTEST/global_shared.${global_shared_ver} - - -# Set fix files - - -# Set scripts - - -# Set executables - - -# Dev para jobs -export ERRSCRIPT='eval [[ $err = 0 ]]' - - diff --git a/driver/gfs/test_emcsfc.sh b/driver/gfs/test_emcsfc.sh deleted file mode 100755 index 477b1ddbc7..0000000000 --- a/driver/gfs/test_emcsfc.sh +++ /dev/null @@ -1,46 +0,0 @@ -#!/bin/sh - -#-------------------------------------------------------------- -# Run the JGFS_EMCSFC_SFC_PREP j-job on wcoss cray -# -# Invoke as follows: -# 'cat $script | bsub' -#-------------------------------------------------------------- - -#BSUB -oo emcsfc.log -#BSUB -eo emcsfc.log -#BSUB -q dev_shared -#BSUB -R rusage[mem=2000] -#BSUB -J emcsfc -#BSUB -P GFS-T2O -#BSUB -cwd . -#BSUB -W 0:03 - -set -x - -export cyc="00" -export job=emcsfc_sfc_prep_${cyc} -export KEEPDATA="YES" -export SENDECF="NO" -export SENDCOM="YES" -export RUN_ENVIR="nco" - -export DATA="/gpfs/hps/stmp/$LOGNAME/tmpnwprd/${job}" -export jlogfile="/gpfs/hps/stmp/$LOGNAME/jlogfile" - -module load prod_envir/1.1.0 - -export envir="prod" -export COMROOT="/gpfs/hps/stmp/${LOGNAME}"${COMROOT} - -export NWROOT="/gpfs/hps/emc/global/noscrub/George.Gayno/q3fy17_final" -export global_shared_ver="v14.1.0" - -module load grib_util/1.0.3 -module load prod_util/1.0.5 - -export jobid="LLgfs_emcsfc_sfc_prep" -export gfs_ver="v14.1.0" -$NWROOT/gfs.${gfs_ver}/jobs/JGFS_EMCSFC_SFC_PREP - -exit 0 diff --git a/driver/gfs/test_gfs_analysis.sh b/driver/gfs/test_gfs_analysis.sh deleted file mode 100755 index 4b697f6a7f..0000000000 --- a/driver/gfs/test_gfs_analysis.sh +++ /dev/null @@ -1,90 +0,0 @@ -#!/bin/sh --login - -#BSUB -L /bin/sh -#BSUB -P GFS-T2O -#BSUB -o gfs_analysis.o%J -#BSUB -e gfs_analysis.o%J -#BSUB -J gfs_analysis -#BSUB -q devonprod -#BSUB -M 3072 -#BSUB -extsched 'CRAYLINUX[]' -#BSUB -W 01:00 -#BSUB -cwd /gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/q3fy17_final/gfs.v14.1.0/driver - -set -x - -export NODES=240 -export ntasks=480 -export ptile=2 -export threads=12 - -export CDATE=2017040700 - - -############################################################# -# Specify whether the run is production or development -############################################################# -export RUN_ENVIR=para -export PDY=$(echo $CDATE | cut -c1-8) -export cyc=$(echo $CDATE | cut -c9-10) -export job=gfs_analysis_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=para -export DATAROOT=/gpfs/hps/stmp/$LOGNAME/test - - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v14.1.0 -export global_shared_ver=v14.1.0 -export crtm_ver=2.2.4 -export prod_envir_ver=1.1.0 -export grib_util_ver=1.0.3 -export prod_util_ver=1.0.8 -export util_shared_ver=1.0.3 - - -############################################################# -# Load modules -############################################################# -. $MODULESHOME/init/sh -module load crtm-intel/${crtm_ver} -module load prod_envir/$prod_envir_ver -module load grib_util/$grib_util_ver -module load prod_util/$prod_util_ver -module load util_shared/$util_shared_ver -module load cfp-intel-sandybridge - -module list - - -############################################################# -# WCOSS_C environment settings -############################################################# -export KMP_AFFINITY=disabled -export OMP_STACKSIZE=2G -export OMP_NUM_THREADS=$threads -export FORT_BUFFERED=true - -export OMP_NUM_THREADS_CY=24 -export NTHREADS=$OMP_NUM_THREADS_CY -export NTHREADS_GSI=$threads -export NTHSTACK=1024000000 - - -############################################################# -# Set user specific variables -############################################################# -export NWTEST=/gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/q3fy17_final -export PARA_CONFIG=$NWTEST/gfs.${gfs_ver}/driver/para_config.gfs_analysis -export JOBGLOBAL=$NWTEST/gfs.${gfs_ver}/jobs - - -############################################################# -# Execute job -############################################################# -$JOBGLOBAL/JGFS_ANALYSIS - -exit diff --git a/driver/gfs/test_gfs_forecast_high.sh b/driver/gfs/test_gfs_forecast_high.sh deleted file mode 100755 index ceef917ee8..0000000000 --- a/driver/gfs/test_gfs_forecast_high.sh +++ /dev/null @@ -1,82 +0,0 @@ -#!/bin/sh --login - -#BSUB -L /bin/sh -#BSUB -P GFS-T2O -#BSUB -o gfs_forecast_high.o%J -#BSUB -e gfs_forecast_high.o%J -#BSUB -J gfs_forecast_high -#BSUB -q devonprod -#BSUB -M 768 -#BSUB -extsched 'CRAYLINUX[]' -#BSUB -W 02:00 -#BSUB -cwd /gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work/gfs.v14.1.0/driver - -set -x - -# 65 nodes = 59 compute nodes + 6 i/o nodes -# set WRT_GROUP=6 for 6 i/o nodes (see ../parm/gfs_forecast_high.parm) -# set WRTPE_PER_GROUP=4 to match ptile -export NODES=65 -export ntasks=260 -export ptile=4 -export threads=6 - -export CDATE=2017012506 - -############################################################# -# Specify whether the run is production or development -############################################################# -export RUN_ENVIR=para -export PDY=$(echo $CDATE | cut -c1-8) -export cyc=$(echo $CDATE | cut -c9-10) -export job=gfs_forecast_high_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=para -export DATAROOT=/gpfs/hps/stmp/$LOGNAME/test - - -############################################################# -# Specify versions -############################################################# -export global_shared_ver=v14.1.0 -export gfs_ver=v14.1.0 -export grib_util_ver=1.0.3 -export prod_util_ver=1.0.5 - - -############################################################# -# Load modules -############################################################# -. $MODULESHOME/init/sh -module load grib_util/$grib_util_ver -module load prod_util/$prod_util_ver -module load craype-hugepages16M - -module list - - -############################################################# -# WCOSS_C environment settings -############################################################# -export KMP_AFFINITY=disabled -export OMP_STACKSIZE=1024m -export OMP_NUM_THREADS=$threads -export NTHREADS=$threads - - -############################################################# -# Set user specific variables -############################################################# -export NWTEST=/gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work -export PARA_CONFIG=$NWTEST/gfs.${gfs_ver}/driver/para_config.gfs_forecast_high -export JOBGLOBAL=$NWTEST/gfs.${gfs_ver}/jobs - - -############################################################# -# Execute job -############################################################# -$JOBGLOBAL/JGFS_FORECAST_HIGH - -exit - diff --git a/driver/gfs/test_gfs_forecast_low.sh b/driver/gfs/test_gfs_forecast_low.sh deleted file mode 100755 index af36679da0..0000000000 --- a/driver/gfs/test_gfs_forecast_low.sh +++ /dev/null @@ -1,82 +0,0 @@ -#!/bin/sh --login - -#BSUB -L /bin/sh -#BSUB -P GFS-T2O -#BSUB -o gfs_forecast_low.o%J -#BSUB -e gfs_forecast_low.o%J -#BSUB -J gfs_forecast_low -#BSUB -q devonprod -#BSUB -M 768 -#BSUB -extsched 'CRAYLINUX[]' -#BSUB -W 01:00 -#BSUB -cwd /gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work/gfs.v14.1.0/driver - -set -x - -# 20 nodes = 18 compute nodes + 2 i/o nodes -# set WRT_GROUP=2 for 2 i/o nodes (see ../parm/gfs_forecast_low.parm) -# set WRTPE_PER_GROUP=4 to match ptile -export NODES=20 -export ntasks=80 -export ptile=4 -export threads=6 - -export CDATE=2017012506 - -############################################################# -# Specify whether the run is production or development -############################################################# -export RUN_ENVIR=para -export PDY=$(echo $CDATE | cut -c1-8) -export cyc=$(echo $CDATE | cut -c9-10) -export job=gfs_forecast_low_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=para -export DATAROOT=/gpfs/hps/stmp/$LOGNAME/test - - -############################################################# -# Specify versions -############################################################# -export global_shared_ver=v14.1.0 -export gfs_ver=v14.1.0 -export grib_util_ver=1.0.3 -export prod_util_ver=1.0.5 - - -############################################################# -# Load modules -############################################################# -. $MODULESHOME/init/sh -module load grib_util/$grib_util_ver -module load prod_util/$prod_util_ver -module load craype-hugepages16M - -module list - - -############################################################# -# WCOSS_C environment settings -############################################################# -export KMP_AFFINITY=disabled -export OMP_STACKSIZE=1024m -export OMP_NUM_THREADS=$threads -export NTHREADS=$threads - - -############################################################# -# Set user specific variables -############################################################# -export NWTEST=/gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work -export PARA_CONFIG=$NWTEST/gfs.${gfs_ver}/driver/para_config.gfs_forecast_low -export JOBGLOBAL=$NWTEST/gfs.${gfs_ver}/jobs - - -############################################################# -# Execute job -############################################################# -$JOBGLOBAL/JGFS_FORECAST_LOW - -exit - diff --git a/driver/gfs/test_jgfs_cyclone_tracker.ecf b/driver/gfs/test_jgfs_cyclone_tracker.ecf deleted file mode 100755 index 326ac94fc3..0000000000 --- a/driver/gfs/test_jgfs_cyclone_tracker.ecf +++ /dev/null @@ -1,121 +0,0 @@ -#BSUB -J GFS_tracker_TEST_06 -#BSUB -o /ptmpp2/Qingfu.Liu/com2/gfs_cyclone_traker_06.o%J -#BSUB -e /ptmpp2/Qingfu.Liu/com2/gfs_cyclone_traker_06.o%J -#BSUB -L /bin/sh -#BSUB -q debug -#BSUB -W 00:30 -#BSUB -cwd /ptmpp2/Qingfu.Liu/com2 -#BSUB -P GFS-T2O -##BSUB -R rusage[mem=5000] -##BSUB -R affinity[core] -#BSUB -n 1 -#BSUB -R span[ptile=1] -#BSUB -R affinity[cpu(32):distribute=balance] -#BSUB -a poe -#BSUB -x - -#%include -#%include -. /usrx/local/Modules/default/init/ksh -module use /nwprod2/modulefiles -module load grib_util/v1.0.1 -module load prod_util/v1.0.1 -module load ibmpe ics lsf - -set -x - -export OMP_NUM_THREADS=32 - - export MP_LABELIO=yes - export MP_MPILIB=mpich2 - export MP_EUILIB=us - export MP_TASK_AFFINITY=cpu:32 - export MP_USE_BULK_XFER=yes - export MP_STDOUTMODE=unordered - export MPICH_ALLTOALL_THROTTLE=0 - export MP_COREFILE_FORMAT=core.txt - export OMP_STACKSIZE=3G - export MP_COMPILER=intel - -export envir=dev2 -export cyc=06 -export job=gfs_cyclone_tracker_${cyc} -export RUN_ENVIR=test -#export NWROOT=/nwprod2 -export NWROOT=/global/save/Qingfu.Liu - -#export DATAROOT=/tmpnwprd_p2 -export DATAROOT=/ptmpp2/Qingfu.Liu - -#export COMROOT=/com2 -export COMROOT=/ptmpp2/Qingfu.Liu/com2 -export COMDATEROOT=/com2 - -export shared_global_home=$NWROOT/shared_nco_20160129 -export gfs_global_home=$NWROOT/gfs_nco_20160129 -export gdas_global_home=$NWROOT/gdas_nco_20160129 -#export DCOMROOT=/dcom - -export COMROOTp1=/com -export KEEPDATA=YES -export CLEAN=NO -export cycle=t00z - -#which setpdy.sh -setpdy.sh -. PDY - -export COMINgfs=/com/gfs/prod/gfs.${PDY} -export COMINgdas=/com/gfs/prod/gdas.${PDY} - -# versions file for tracker $tracker.ver -VERSION_FILE=${NWROOT}/versions/tropcy_qc_reloc.ver -if [ -f $VERSION_FILE ]; then - . $VERSION_FILE -else - ecflow_client --abort - exit -fi - -# CALL executable job script here -#export HOMERELO=${NWROOT}/tropcy_qc_reloc.${tropcy_qc_reloc_ver}_r62774_phase2 -export HOMERELO=$shared_global_home -export HOMESYND=${HOMERELO} -#export envir_getges=prod -$gfs_global_home/jobs/JGFS_ATMOS_CYCLONE_TRACKER - -if [ $? -ne 0 ]; then -# ecflow_client --abort - exit -fi - -#%include -#%manual -######################################################################## -###################################################################### -#PURPOSE: Executes the job JGFS_ATMOS_CYCLONE_TRACKER -###################################################################### -############################################################### -## Function been tested: creates GFS TC forecast track -## -## Calling sequence: JGFS_ATMOS_CYCLONE_TRACKER, global_extrkr.sh -## -## Initial condition: provide hours (cyc=?) -## -## Usage: bsub < test_jgfs_cyclone_tracker -## -## Data_In: COMINgfs=/com/gfs/prod/gfs.${PDY} -## COMINgdas=/com/gfs/prod/gdas.${PDY} -## -## Data_Out: /ptmpp2/Qingfu.Liu/com2/gfs/dev2/gfs.$PDY -## -## Result verification: compare with the operational results -################################################################ -###################################################################### -# Job specific troubleshooting instructions: -# see generic troubleshoot manual page -# -###################################################################### - -# include manual page below -#%end diff --git a/driver/gfs/test_jgfs_cyclone_tracker_cray.sh b/driver/gfs/test_jgfs_cyclone_tracker_cray.sh deleted file mode 100755 index 595512503e..0000000000 --- a/driver/gfs/test_jgfs_cyclone_tracker_cray.sh +++ /dev/null @@ -1,167 +0,0 @@ -#!/bin/bash -#BSUB -J t1534 -#BSUB -W 0:30 -#BSUB -extsched 'CRAYLINUX[]' -R '1*{order[slots] select[craylinux && !vnode]} + 24*{select[craylinux && vnode]span[ptile=24] cu[type=cabinet]}' -#BSUB -o /gpfs/hps/emc/global/noscrub/Qingfu.Liu/gfs.v14.1.0/driver/test_jgfs_cyclone_tracker_06.o%J -#BSUB -e /gpfs/hps/emc/global/noscrub/Qingfu.Liu/gfs.v14.1.0/driver/test_jgfs_cyclone_tracker_06.o%J -###BSUB -o t574.stdout.%J -###BSUB -e t574.stderr.%J -#BSUB -q "dev" -#BSUB -P "GFS-T2O" -#BSUB -M 200 -###BSUB -M "60" - - -module use /gpfs/hps/nco/ops/nwprod/modulefiles -module load prod_util -module load prod_envir/1.1.0 -module unload grib_util -module load grib_util/1.0.3 -##module load crtm-intel/2.2.3 -module load PrgEnv-intel craype cray-mpich ESMF-intel-haswell/3_1_0rp5 -module list - -#export MKL_CBWR=AVX -#ulimit -s unlimited -#ulimit -a - -set -x - -export OMP_NUM_THREADS=24 - - export MP_LABELIO=yes - export MP_MPILIB=mpich2 - export MP_EUILIB=us - export MP_TASK_AFFINITY=cpu:24 - export MP_USE_BULK_XFER=yes - export MP_STDOUTMODE=unordered - export MPICH_ALLTOALL_THROTTLE=0 - export MP_COREFILE_FORMAT=core.txt - export OMP_STACKSIZE=3G - export MP_COMPILER=intel - -#export envir=prod -export envir=para -export cyc=06 -export job=test_jgfs_cyclone_tracker_${cyc} -export RUN_ENVIR=test -#export NWROOT=/nwprod2 -#export NWROOT=/global/save/Qingfu.Liu -export NWROOT=/gpfs/hps/emc/global/noscrub/Qingfu.Liu - -#export DATAROOT=/tmpnwprd_p2 -#export DATAROOT=/ptmpp2/Qingfu.Liu -export DATAROOT=/gpfs/hps/ptmp/Qingfu.Liu - -#export COMROOT=/com2 -#export COMROOT=/ptmpp2/Qingfu.Liu/com2 -#export COMROOT=/gpfs/hps/ptmp/Qingfu.Liu/com -#export COMROOT=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com -#export COMDATEROOT=/com -#export COMROOT=/gpfs/hps/ptmp/emc.glopara/com2 -#export COMDATEROOT=/com2 -export COMROOT=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com -export COMDATEROOT=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com - -#export DCOMROOT=/dcom - -#export COMROOTp1=/gpfs/gp1/nco/ops/com -#export COMROOTp1=/gpfs/tp2/nco/ops/com -#export COMROOTp1=/gpfs/gp2/nco/ops/com -export COMROOTp1=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com -export KEEPDATA=YES -export CLEAN=NO -export cycle=t${cyc}z - -#which setpdy.sh -#setpdy.sh -#. PDY - -export archsyndir=${COMINsyn:-$(compath.py gfs/prod/syndat)} -export WGRIB2=/gpfs/hps/nco/ops/nwprod/grib_util.v1.0.3/exec/wgrib2 -export GRB2INDEX=/gpfs/hps/nco/ops/nwprod/grib_util.v1.0.3/exec/grb2index -export GRBINDEX2=/gpfs/hps/nco/ops/nwprod/grib_util.v1.0.3/exec/grb2index - -#export PDY=20150723 -export PDY=20140814 - -#export COMINgfs=/com/gfs/prod/gfs.${PDY} -#export COMINgdas=/com/gfs/prod/gdas.${PDY} -#export COMINgfs=/gpfs/gp2/nco/ops/com/gfs/prod/gfs.${PDY} -#export COMINgdas=/gpfs/gp2/nco/ops/com/gfs/prod/gdas.${PDY} -export COMINgfs=$COMROOT/gfs/$envir/gfs.${PDY} -export COMINgdas=$COMROOT/gfs/$envir/gdas.${PDY} -export ARCHSYND=${COMINsyn:-$(compath.py gfs/prod/syndat)} -export HOMENHC=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/guidance/storm-data/ncep -#export GETGES_COM=/gpfs/gp2/nco/ops/com -#export GESROOT=/gpfs/gp2/nco/ops/com -#export GESROOT=/gpfs/hps/ptmp/Qingfu.Liu/com -#export GETGES_COM=/gpfs/hps/ptmp/Qingfu.Liu/com -export GESROOT=$COMROOT -export GETGES_COM=$COMROOT - -# versions file for tracker $tracker.ver -VERSION_FILE=${NWROOT}/versions/tropcy_qc_reloc.ver -if [ -f $VERSION_FILE ]; then - . $VERSION_FILE -else - ecflow_client --abort - exit -fi - -export shared_global_home=$NWROOT/global_shared.v14.1.0 -export gfs_global_home=$NWROOT/gfs.v14.1.0 -export gdas_global_home=$NWROOT/gdas.v14.1.0 - -export files_override=F -export PROCESS_TROPCY=NO -export copy_back=NO -export SENDCOM=NO -export APRNRELOC="time aprun -b -j1 -n7 -N1 -d24 -cc depth " -export APRNGETTX="time aprun -q -j1 -n1 -N1 -d1 -cc depth" -#export APRNRELOC="time aprun -b -j0 -n7 -N1 -d32 -cc depth" - -# CALL executable job script here -export HOMERELO=$shared_global_home -#export HOMERELO=${NWROOT}/tropcy_qc_reloc.${tropcy_qc_reloc_ver}_r62774_phase2 -export HOMESYND=${HOMERELO} -#export envir_getges=prod -$gfs_global_home/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC - -if [ $? -ne 0 ]; then -# ecflow_client --abort - exit -fi - -#%include -#%manual -###################################################################### -#PURPOSE: Executes the job JGLOBAL_ATMOS_TROPCY_QC_RELOC -###################################################################### -############################################################# -# Function been tested: TCvital quality control and archive, hurricane relocation -# -# Calling sequence: JGLOBAL_ATMOS_TROPCY_QC_RELOC, exglobal_atmos_tropcy_qc_reloc.sh, -# # syndat_qctropcy.sh, tropcy_relocate.sh,syndat_getjtbul.sh, -# # tropcy_relocate_extrkr.sh,parse-storm-type.pl -# -# Initial condition: provide hours (cyc=?) -# -# Usage: bsub < test_jgfs_tropcy_qc_reloc -# -# Data_In: COMINgfs=/com/gfs/prod/gfs.${PDY} -# COMINgdas=/com/gfs/prod/gdas.${PDY} -# -# Data_Out: /ptmpp2/Qingfu.Liu/com2/gfs/dev2/gfs.${PDY} -# -# Result verification: compare with the operational results -# (results might be slightly different due to 3hourly/hourly tracker) -############################################################## -###################################################################### -# Job specific troubleshooting instructions: -# see generic troubleshoot manual page -# -###################################################################### - -# include manual page below -#%end diff --git a/driver/gfs/test_jgfs_tropcy_qc_reloc.ecf b/driver/gfs/test_jgfs_tropcy_qc_reloc.ecf deleted file mode 100755 index 07c85f36b9..0000000000 --- a/driver/gfs/test_jgfs_tropcy_qc_reloc.ecf +++ /dev/null @@ -1,124 +0,0 @@ -#BSUB -J Relocation_TEST_06 -#BSUB -o /ptmpp2/Qingfu.Liu/com2/gfs_tropcy_qc_reloc_06.o%J -#BSUB -e /ptmpp2/Qingfu.Liu/com2/gfs_tropcy_qc_reloc_06.o%J -#BSUB -L /bin/sh -#BSUB -q debug -#BSUB -W 00:30 -#BSUB -cwd /ptmpp2/Qingfu.Liu/com2 -#BSUB -P GFS-T2O -##BSUB -R rusage[mem=5000] -##BSUB -R affinity[core] -#BSUB -n 7 -#BSUB -R span[ptile=1] -#BSUB -R affinity[cpu(32):distribute=balance] -#BSUB -a poe -#BSUB -x - -#%include -#%include -. /usrx/local/Modules/default/init/ksh -module use /nwprod2/modulefiles -module load grib_util/v1.0.1 -module load prod_util/v1.0.1 -module load ibmpe ics lsf - -set -x - -export OMP_NUM_THREADS=32 - - export MP_LABELIO=yes - export MP_MPILIB=mpich2 - export MP_EUILIB=us - export MP_TASK_AFFINITY=cpu:32 - export MP_USE_BULK_XFER=yes - export MP_STDOUTMODE=unordered - export MPICH_ALLTOALL_THROTTLE=0 - export MP_COREFILE_FORMAT=core.txt - export OMP_STACKSIZE=3G - export MP_COMPILER=intel - -export envir=dev2 -export cyc=06 -export job=gfs_tropcy_qc_reloc_${cyc} -export RUN_ENVIR=test -#export NWROOT=/nwprod2 -export NWROOT=/global/save/Qingfu.Liu - -#export DATAROOT=/tmpnwprd_p2 -export DATAROOT=/ptmpp2/Qingfu.Liu - -#export COMROOT=/com2 -export COMROOT=/ptmpp2/Qingfu.Liu/com2 -export COMDATEROOT=/com2 - -#export DCOMROOT=/dcom - -export shared_global_home=$NWROOT/shared_nco_20160129 -export gfs_global_home=$NWROOT/gfs_nco_20160129 -export gdas_global_home=$NWROOT/gdas_nco_20160129 - -export COMROOTp1=/com -export KEEPDATA=YES -export CLEAN=NO -export cycle=t00z - -#which setpdy.sh -setpdy.sh -. PDY - -export COMINgfs=/com/gfs/prod/gfs.${PDY} -export COMINgdas=/com/gfs/prod/gdas.${PDY} - -# versions file for tracker $tracker.ver -VERSION_FILE=${NWROOT}/versions/tropcy_qc_reloc.ver -if [ -f $VERSION_FILE ]; then - . $VERSION_FILE -else - ecflow_client --abort - exit -fi - -# CALL executable job script here -export HOMERELO=$shared_global_home -#export HOMERELO=${NWROOT}/tropcy_qc_reloc.${tropcy_qc_reloc_ver}_r62774_phase2 -export HOMESYND=${HOMERELO} -#export envir_getges=prod -$gfs_global_home/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC - -if [ $? -ne 0 ]; then -# ecflow_client --abort - exit -fi - -#%include -#%manual -###################################################################### -##PURPOSE: Executes the job JGLOBAL_ATMOS_TROPCY_QC_RELOC -####################################################################### -############################################################## -# Function been tested: TCvital quality control and archive, hurricane relocation -# -# Calling sequence: JGLOBAL_ATMOS_TROPCY_QC_RELOC, exglobal_atmos_tropcy_qc_reloc.sh, -# syndat_qctropcy.sh, tropcy_relocate.sh,syndat_getjtbul.sh, -# tropcy_relocate_extrkr.sh,parse-storm-type.pl -# -# Initial condition: provide hours (cyc=?) -# -# Usage: bsub < test_jgfs_tropcy_qc_reloc -# -# Data_In: COMINgfs=/com/gfs/prod/gfs.${PDY} -# COMINgdas=/com/gfs/prod/gdas.${PDY} -# -# Data_Out: /ptmpp2/Qingfu.Liu/com2/gfs/dev2/gfs.${PDY} -# -# Result verification: compare with the operational results -# (results might be slightly different due to 3hourly/hourly tracker) -############################################################### -###################################################################### -# Job specific troubleshooting instructions: -# see generic troubleshoot manual page -# -###################################################################### - -# include manual page below -#%end diff --git a/driver/gfs/test_jgfs_tropcy_qc_reloc_cray.sh b/driver/gfs/test_jgfs_tropcy_qc_reloc_cray.sh deleted file mode 100755 index 1c82464246..0000000000 --- a/driver/gfs/test_jgfs_tropcy_qc_reloc_cray.sh +++ /dev/null @@ -1,155 +0,0 @@ -#!/bin/bash -#BSUB -J t1534 -#BSUB -W 0:30 -#BSUB -extsched 'CRAYLINUX[]' -R '1*{order[slots] select[craylinux && !vnode]} + 168*{select[craylinux && vnode]span[ptile=24] cu[type=cabinet]}' -#BSUB -o /gpfs/hps/emc/global/noscrub/Qingfu.Liu/gfs.v14.1.0/driver/gfs_tropcy_qc_reloc_06.o%J -#BSUB -e /gpfs/hps/emc/global/noscrub/Qingfu.Liu/gfs.v14.1.0/driver/gfs_tropcy_qc_reloc_06.o%J -###BSUB -o t574.stdout.%J -###BSUB -e t574.stderr.%J -#BSUB -q "dev" -#BSUB -P "GFS-T2O" -#BSUB -M 200 -###BSUB -M "60" - - -module use /gpfs/hps/nco/ops/nwprod/modulefiles -module load prod_util -module load prod_envir/1.1.0 -module unload grib_util -module load grib_util/1.0.3 -##module load crtm-intel/2.2.3 -module load PrgEnv-intel craype cray-mpich ESMF-intel-haswell/3_1_0rp5 -module list - -#export MKL_CBWR=AVX -#ulimit -s unlimited -#ulimit -a - -set -x - -export OMP_NUM_THREADS=24 - - export MP_LABELIO=yes - export MP_MPILIB=mpich2 - export MP_EUILIB=us - export MP_TASK_AFFINITY=cpu:24 - export MP_USE_BULK_XFER=yes - export MP_STDOUTMODE=unordered - export MPICH_ALLTOALL_THROTTLE=0 - export MP_COREFILE_FORMAT=core.txt - export OMP_STACKSIZE=3G - export MP_COMPILER=intel - -export envir=para -export cyc=06 -export job=gfs_tropcy_qc_reloc_${cyc} -export RUN_ENVIR=test -#export NWROOT=/nwprod2 -#export NWROOT=/global/save/Qingfu.Liu -export NWROOT=/gpfs/hps/emc/global/noscrub/Qingfu.Liu - -#export DATAROOT=/tmpnwprd_p2 -#export DATAROOT=/ptmpp2/Qingfu.Liu -export DATAROOT=/gpfs/hps/ptmp/Qingfu.Liu - -#export COMROOT=/com2 -#export COMROOT=/ptmpp2/Qingfu.Liu/com2 -#export COMROOT=/gpfs/hps/ptmp/Qingfu.Liu/com -#export COMDATEROOT=/com -export COMROOT=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com -export COMDATEROOT=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com - -#export DCOMROOT=/dcom - -#export COMROOTp1=/gpfs/gp1/nco/ops/com -#export COMROOTp1=/gpfs/tp1/nco/ops/com -export COMROOTp1=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com -export KEEPDATA=YES -export CLEAN=NO -export cycle=t${cyc}z - -#which setpdy.sh -#setpdy.sh -#. PDY - -export PDY=20140814 -##export PDY=20150723 - -#export COMINgfs=/com/gfs/prod/gfs.${PDY} -#export COMINgdas=/com/gfs/prod/gdas.${PDY} -#export COMINgfs=/gpfs/tp1/nco/ops/com/gfs/prod/gfs.${PDY} -#export COMINgdas=/gpfs/tp1/nco/ops/com/gfs/prod/gdas.${PDY} -export COMINgfs=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com/gfs/$envir/gfs.${PDY} -export COMINgdas=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com/gfs/$envir/gdas.${PDY} -export ARCHSYND=${COMINsyn:-$(compath.py gfs/prod/syndat)} -export HOMENHC=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/guidance/storm-data/ncep -#export GETGES_COM=/gpfs/tp1/nco/ops/com -export GETGES_COM=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com -export GESROOT=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com - -# versions file for tracker $tracker.ver -VERSION_FILE=${NWROOT}/versions/tropcy_qc_reloc.ver -if [ -f $VERSION_FILE ]; then - . $VERSION_FILE -else - ecflow_client --abort - exit -fi - -export shared_global_home=$NWROOT/global_shared.v14.1.0 -export gfs_global_home=$NWROOT/gfs.v14.1.0 -export gdas_global_home=$NWROOT/gdas.v14.1.0 - -export files_override=F -export PROCESS_TROPCY=NO -export copy_back=NO -export SENDCOM=NO -export APRNRELOC="time aprun -b -j1 -n7 -N1 -d24 -cc depth " -export APRNGETTX="time aprun -q -j1 -n1 -N1 -d1 -cc depth" -#export APRNRELOC="time aprun -b -j0 -n7 -N1 -d32 -cc depth" - -# CALL executable job script here -export HOMERELO=$shared_global_home -#export HOMERELO=${NWROOT}/tropcy_qc_reloc.${tropcy_qc_reloc_ver}_r62774_phase2 -export HOMESYND=${HOMERELO} -#export envir_getges=prod -$gfs_global_home/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC - - -if [ $? -ne 0 ]; then -# ecflow_client --abort - exit -fi - -#%include -#%manual -###################################################################### -#PURPOSE: Executes the job JGLOBAL_ATMOS_TROPCY_QC_RELOC -###################################################################### -############################################################# -# Function been tested: TCvital quality control and archive, hurricane relocation -# -# Calling sequence: JGLOBAL_ATMOS_TROPCY_QC_RELOC, exglobal_atmos_tropcy_qc_reloc.sh, -# # syndat_qctropcy.sh, tropcy_relocate.sh,syndat_getjtbul.sh, -# # tropcy_relocate_extrkr.sh,parse-storm-type.pl -# -# Initial condition: provide hours (cyc=?) -# -# Usage: bsub < test_jgfs_tropcy_qc_reloc -# -# Data_In: COMINgfs=/com/gfs/prod/gfs.${PDY} -# COMINgdas=/com/gfs/prod/gdas.${PDY} -# -# Data_Out: /ptmpp2/Qingfu.Liu/com2/gfs/dev2/gfs.${PDY} -# -# Result verification: compare with the operational results -# (results might be slightly different due to 3hourly/hourly tracker) -############################################################## -###################################################################### -# Job specific troubleshooting instructions: -# see generic troubleshoot manual page -# -###################################################################### - -# include manual page below -#%end diff --git a/driver/gfs/test_jgfs_vminmon.sh b/driver/gfs/test_jgfs_vminmon.sh deleted file mode 100755 index 1483b3352e..0000000000 --- a/driver/gfs/test_jgfs_vminmon.sh +++ /dev/null @@ -1,84 +0,0 @@ -#!/bin/ksh - -#BSUB -o gfs_vminmon.o%J -#BSUB -e gfs_vminmon.o%J -#BSUB -J gfs_vminmon -#BSUB -q dev_shared -#BSUB -n 1 -#BSUB -R affinity[core] -#BSUB -M 80 -#BSUB -W 00:05 -#BSUB -a poe -#BSUB -P GFS-T2O - -set -x - -export NET='gfs' -export RUN='gfs' -export PDATE=${PDATE:-2016030206} - -export PDY=$(echo $PDATE | cut -c1-8) -export cyc=$(echo $PDATE | cut -c9-10) -export job=gfs_vminmon.${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=para -export Z=${Z:-gz} -me=$(hostname | cut -c1) -export DATAROOT=${DATAROOT:-/gpfs/${me}d2/emc/da/noscrub/Edward.Safford/test_data} -export COMROOT=${COMROOT:-/ptmpp1/$LOGNAME/com} - - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v15.0.0 - - -############################################################# -# Load modules -############################################################# -. /usrx/local/Modules/3.2.9/init/ksh -module use /nwprod2/modulefiles -#module load grib_util -module load prod_util -#module load util_shared - -module list - - -############################################################# -# WCOSS environment settings -############################################################# -export POE=YES - - -############################################################# -# Set user specific variables -############################################################# -export MINMON_SUFFIX=testminmon -export NWTEST=${NWTEST:-/gpfs/${me}d2/emc/da/noscrub/Edward.Safford} -export HOMEgfs=${HOMEgfs:-${NWTEST}/gfs.${gfs_ver}} -export JOBGLOBAL=${JOBGLOBAL:-${HOMEgfs}/jobs} -#export HOMEminmon=${HOMEminmon:-${NWTEST}/global_shared.${global_shared_ver}} - -export COM_IN=${COM_IN:-${DATAROOT}} -export M_TANKverf=${M_TANKverf:-${COMROOT}/${MINMON_SUFFIX}} - -jlogdir=${jlogdir:-/ptmpp1/${LOGNAME}/jlogs} -if [[ ! -d ${jlogdir} ]]; then - mkdir -p ${jlogdir} -fi - -export jlogfile=${jlogfile:-${jlogdir}/${MINMON_SUFFIX}.${NET}.${RUN}.jlogfile} -if [[ -e ${jlogfile} ]]; then - rm -f ${jlogfile} -fi - -############################################################# -# Execute job -############################################################# -$JOBGLOBAL/JGFS_VMINMON - -exit - diff --git a/driver/gfs/test_jgfs_vminmon_cray.sh b/driver/gfs/test_jgfs_vminmon_cray.sh deleted file mode 100755 index fd3c6f19a3..0000000000 --- a/driver/gfs/test_jgfs_vminmon_cray.sh +++ /dev/null @@ -1,68 +0,0 @@ -#!/bin/ksh - -#BSUB -o gfs_vminmon.o%J -#BSUB -e gfs_vminmon.o%J -#BSUB -J gfs_vminmon -#BSUB -q dev -#BSUB -M 80 -#BSUB -W 00:05 -#BSUB -P GFS-T2O -#BSUB -R "select[mem>80] rusage[mem=80]" - -set -x - -export PDATE=${PDATE:-2016030800} - -############################################################# -# Specify whether the run is production or development -############################################################# -export PDY=$(echo $PDATE | cut -c1-8) -export cyc=$(echo $PDATE | cut -c9-10) -export job=gfs_vminmon.${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=para -export DATAROOT=${DATAROOT:-/gpfs/hps3/emc/da/noscrub/$LOGNAME/test_data} -export COMROOT=${COMROOT:-/gpfs/hps2/ptmp/$LOGNAME/com} - - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v15.0.0 - - -############################################################# -# Load modules -############################################################# -. $MODULESHOME/init/ksh - -module load prod_util -module load pm5 - -module list - - -############################################################# -# WCOSS environment settings -############################################################# -export POE=YES - - -############################################################# -# Set user specific variables -############################################################# -export MINMON_SUFFIX=${MINMON_SUFFIX:-testminmon} -export NWTEST=${NWTEST:-/gpfs/hps3/emc/da/noscrub/${LOGNAME}} -export HOMEgfs=${HOMEgfs:-${NWTEST}/gfs.${gfs_ver}} -export JOBGLOBAL=${JOBGLOBAL:-${HOMEgfs}/jobs} -export COM_IN=${COM_IN:-${DATAROOT}} -export M_TANKverf=${M_TANKverf:-${COMROOT}/${MINMON_SUFFIX}} - -############################################################# -# Execute job -############################################################# -$JOBGLOBAL/JGFS_VMINMON - -exit - diff --git a/driver/product/change_gfs_downstream_date.sh b/driver/product/change_gfs_downstream_date.sh deleted file mode 100755 index c9d4f10e29..0000000000 --- a/driver/product/change_gfs_downstream_date.sh +++ /dev/null @@ -1,46 +0,0 @@ -set -x - -# export cyc=12 -# export cyc=18 -export fhr=012 -export dir=$( pwd ) -export PDY=$(date -u +%Y%m%d) -export PDY1=$(expr $PDY - 1) - -export olddate=20200712 -export newdate=20200922 - -export gdas=/gpfs/dell1/nco/ops/com/gfs/prod/gdas.${PDY}/${cyc} -export gdasgp=/gpfs/dell1/nco/ops/com/gfs/prod/gdas.${PDY}/${cyc}/gempak -export gdasmeta=/gpfs/dell1/nco/ops/com/gfs/prod/gdas.${PDY}/${cyc}/gempak/meta - -export gdastest=/gpfs/dell2/ptmp/Boi.Vuong/com/gfs/prod/gdas.${PDY}/${cyc} -export gdastestgp=/gpfs/dell2/ptmp/Boi.Vuong/com/gfs/prod/gdas.${PDY}/${cyc}/gempak -export gdastestmeta=/gpfs/dell2/ptmp/Boi.Vuong/com/gfs/prod/gdas.${PDY}/${cyc}/gempak/meta - -for cyc in 00 06 12 18 -# for cyc in 00 -do -sed -i "s/${olddate}/${newdate}/g" run_JGDAS_ATMOS_GEMPAK_dell.sh_${cyc} -sed -i "s/${olddate}/${newdate}/g" run_JGDAS_ATMOS_GEMPAK_META_NCDC_dell.sh_${cyc} -sed -i "s/${olddate}/${newdate}/g" run_JGFS_ATMOS_GEMPAK_dell.sh_${cyc} -sed -i "s/${olddate}/${newdate}/g" run_JGFS_ATMOS_AWIPS_20KM_1P0DEG_dell.sh_${cyc} -sed -i "s/${olddate}/${newdate}/g" run_JGFS_ATMOS_AWIPS_G2_dell.sh_${cyc} -sed -i "s/${olddate}/${newdate}/g" run_JGFS_ATMOS_FBWIND_dell.sh_${cyc} -sed -i "s/${olddate}/${newdate}/g" run_JGFS_ATMOS_GEMPAK_META_dell.sh_${cyc} -sed -i "s/${olddate}/${newdate}/g" run_JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF_dell.sh_${cyc} -sed -i "s/${olddate}/${newdate}/g" run_JGFS_ATMOS_PGRB2_SPEC_NPOESS_dell.sh_${cyc} -sed -i "s/${olddate}/${newdate}/g" run_JGFS_ATMOS_GEMPAK_PGRB2_SPEC_dell.sh_${cyc} - -sed -i s/envir=prod/envir=para/g run_JGDAS_ATMOS_GEMPAK_dell.sh_${cyc} -sed -i s/envir=prod/envir=para/g run_JGDAS_ATMOS_GEMPAK_META_NCDC_dell.sh_${cyc} -sed -i s/envir=prod/envir=para/g run_JGFS_ATMOS_GEMPAK_dell.sh_${cyc} -sed -i s/envir=prod/envir=para/g run_JGFS_ATMOS_AWIPS_20KM_1P0DEG_dell.sh_${cyc} -sed -i s/envir=prod/envir=para/g run_JGFS_ATMOS_AWIPS_G2_dell.sh_${cyc} -sed -i s/envir=prod/envir=para/g run_JGFS_ATMOS_FBWIND_dell.sh_${cyc} -sed -i s/envir=prod/envir=para/g run_JGFS_ATMOS_GEMPAK_META_dell.sh_${cyc} -sed -i s/envir=prod/envir=para/g run_JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF_dell.sh_${cyc} -sed -i s/envir=prod/envir=para/g run_JGFS_ATMOS_PGRB2_SPEC_NPOESS_dell.sh_${cyc} -sed -i s/envir=prod/envir=para/g run_JGFS_ATMOS_GEMPAK_PGRB2_SPEC_dell.sh_${cyc} - -done diff --git a/driver/product/change_gfs_downstream_envir.sh b/driver/product/change_gfs_downstream_envir.sh deleted file mode 100755 index e10e22fb1f..0000000000 --- a/driver/product/change_gfs_downstream_envir.sh +++ /dev/null @@ -1,35 +0,0 @@ -set -x - -# export cyc=12 -# export cyc=18 -export fhr=012 -export dir=$( pwd ) -export PDY=$(date -u +%Y%m%d) -export PDY1=$(expr $PDY - 1) - -export olddate=20200106 -export newdate=20200712 - -export gdas=/gpfs/dell1/nco/ops/com/gfs/prod/gdas.${PDY}/${cyc} -export gdasgp=/gpfs/dell1/nco/ops/com/gfs/prod/gdas.${PDY}/${cyc}/gempak -export gdasmeta=/gpfs/dell1/nco/ops/com/gfs/prod/gdas.${PDY}/${cyc}/gempak/meta - -export gdastest=/gpfs/dell2/ptmp/Boi.Vuong/com/gfs/prod/gdas.${PDY}/${cyc} -export gdastestgp=/gpfs/dell2/ptmp/Boi.Vuong/com/gfs/prod/gdas.${PDY}/${cyc}/gempak -export gdastestmeta=/gpfs/dell2/ptmp/Boi.Vuong/com/gfs/prod/gdas.${PDY}/${cyc}/gempak/meta - -# for cyc in 00 06 12 18 -for cyc in 00 -do - vi run_JGDAS_ATMOS_GEMPAK_dell.sh_${cyc} - vi run_JGDAS_ATMOS_GEMPAK_META_NCDC_dell.sh_${cyc} - vi run_JGFS_ATMOS_GEMPAK_dell.sh_${cyc} - vi run_JGFS_ATMOS_AWIPS_20KM_1P0DEG_dell.sh_${cyc} - vi run_JGFS_ATMOS_AWIPS_G2_dell.sh_${cyc} - vi run_JGFS_ATMOS_FBWIND_dell.sh_${cyc} - vi run_JGFS_ATMOS_GEMPAK_META_dell.sh_${cyc} - vi run_JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF_dell.sh_${cyc} - vi run_JGFS_ATMOS_PGRB2_SPEC_NPOESS_dell.sh_${cyc} - vi run_JGFS_ATMOS_GEMPAK_PGRB2_SPEC_dell.sh_${cyc} - -done diff --git a/driver/product/compile_gfsv152.sh b/driver/product/compile_gfsv152.sh deleted file mode 100755 index 73685f0c9d..0000000000 --- a/driver/product/compile_gfsv152.sh +++ /dev/null @@ -1,20 +0,0 @@ - - -set -x - -export version=v15.2.0 - -cd /gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git/gfs.$version/sorc - -build_gdas.sh -build_gfs_fbwndgfs.sh -build_gfs_overpdtg2.sh -build_gfs_wintemv.sh -build_gfs_util.sh - -cp /gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git/trim_rh.sh /gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git/gfs.$version/ush - -cd /gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git/gfs.$version/util/sorc -compile_gfs_util_wcoss.sh - - diff --git a/driver/product/compile_gfsv160.sh b/driver/product/compile_gfsv160.sh deleted file mode 100755 index 0c80153378..0000000000 --- a/driver/product/compile_gfsv160.sh +++ /dev/null @@ -1,17 +0,0 @@ - - -set -x - -export version=v16.0.0 - -cd /gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git/gfs.$version/sorc - -build_gfs_fbwndgfs.sh -build_gfs_util.sh - -cp /gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git/trim_rh.sh /gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git/gfs.$version/ush - -cd /gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git/gfs.$version/util/sorc -compile_gfs_util_wcoss.sh - - diff --git a/driver/product/compile_gfsv160_hera.sh b/driver/product/compile_gfsv160_hera.sh deleted file mode 100755 index 2d555e9977..0000000000 --- a/driver/product/compile_gfsv160_hera.sh +++ /dev/null @@ -1,15 +0,0 @@ - - -set -x - -export version=v16.0.0 - -cd /scratch2/NCEPDEV/stmp3/Boi.Vuong/gfs.v16.0.0/sorc - -./build_gfs_fbwndgfs.sh -./build_gfs_util.sh - -cp /scratch2/NCEPDEV/stmp3/Boi.Vuong/trim_rh.sh /scratch2/NCEPDEV/stmp3/Boi.Vuong/gfs.$version/ush - -cd /scratch2/NCEPDEV/stmp3/Boi.Vuong/gfs.$version/util/sorc -sh compile_gfs_util_wcoss.sh diff --git a/driver/product/rmo_clean_gfs_output b/driver/product/rmo_clean_gfs_output deleted file mode 100755 index 2eecf595b8..0000000000 --- a/driver/product/rmo_clean_gfs_output +++ /dev/null @@ -1,23 +0,0 @@ - -set -x - -cd /gpfs/dell2/ptmp/Boi.Vuong/output -ls -l -sleep 3 -cd /gpfs/dell2/ptmp/Boi.Vuong -ls -l -sleep 3 - -rm -rf /gpfs/dell2/ptmp/Boi.Vuong/output -rm -rf /gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p /gpfs/dell2/ptmp/Boi.Vuong -mkdir -m 775 -p /gpfs/dell2/ptmp/Boi.Vuong/output -mkdir -m 775 -p /gpfs/dell2/ptmp/Boi.Vuong/com - -chmod -R 775 /gpfs/dell2/ptmp/Boi.Vuong/output /gpfs/dell2/ptmp/Boi.Vuong/com - -cd /gpfs/dell2/ptmp/Boi.Vuong/output -ls -exit - diff --git a/driver/product/run_JGDAS_ATMOS_GEMPAK_META_NCDC_dell.sh_00 b/driver/product/run_JGDAS_ATMOS_GEMPAK_META_NCDC_dell.sh_00 deleted file mode 100755 index bae8bc42df..0000000000 --- a/driver/product/run_JGDAS_ATMOS_GEMPAK_META_NCDC_dell.sh_00 +++ /dev/null @@ -1,129 +0,0 @@ -#!/bin/sh - -#BSUB -J jgdas_gempak_meta_ncdc_00 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gdas_gempak_meta_ncdc_00.o%J -#BSUB -q debug -#BSUB -n 2 # number of tasks -#BSUB -R span[ptile=1] # 1 task per node -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -R affinity[core(1):distribute=balance] - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 - -export PDY1=$(expr $PDY - 1) - -export cyc=00 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -#################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -########################################### -# Now set up GEMPAK/NTRANS environment -########################################### -module load gempak/7.3.3 -module list - -############################################ -# GDAS GEMPAK META PRODUCT GENERATION -############################################ -# set envir=para or para to test with data in prod or para - export envir=para -# export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gdas_gempak_meta_ncdc_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -########################################################## -# obtain unique process id (pid) and make temp directory -########################################################## -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export PARMwmo=${PARMwmo:-$HOMEgfs/parm/wmo} -export PARMproduct=${PARMproduct:-$HOMEgfs/parm/product} -export FIXgempak=${FIXgempak:-$HOMEgfs/gempak/fix} -export USHgempak=${USHgempak:-$HOMEgfs/gempak/ush} -export SRCgfs=${SRCgfs:-$HOMEgfs/scripts} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gdas} -export model=${model:-gdas} - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## -if [ $envir = "prod" ] ; then -export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}/gempak} -export COMINgdas=${COMINgdas:-$(compath.py ${NET}/${envir}/${RUN})} - -else - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos/gempak ### EMC PARA Realtime - export COMINgdas=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN} ### EMC PARA Realtime - -fi -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos/gempak/meta - -export COMINukmet=${COMINukmet:-$(compath.py nawips/prod/ukmet)} -export COMINecmwf=${COMINecmwf:-$(compath.py ecmwf/prod/ecmwf)} - -export COMOUTncdc=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos/gempak/meta - -export COMOUTukmet=${COMOUT} -export COMOUTecmwf=${COMOUT} - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT $COMOUTncdc $COMOUTukmet $COMOUTecmwf -fi - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC diff --git a/driver/product/run_JGDAS_ATMOS_GEMPAK_META_NCDC_dell.sh_06 b/driver/product/run_JGDAS_ATMOS_GEMPAK_META_NCDC_dell.sh_06 deleted file mode 100755 index 557efb82de..0000000000 --- a/driver/product/run_JGDAS_ATMOS_GEMPAK_META_NCDC_dell.sh_06 +++ /dev/null @@ -1,129 +0,0 @@ -#!/bin/sh - -#BSUB -J jgdas_gempak_meta_ncdc_06 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gdas_gempak_meta_ncdc_06.o%J -#BSUB -q debug -#BSUB -n 2 # number of tasks -#BSUB -R span[ptile=1] # 1 task per node -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -R affinity[core(1):distribute=balance] - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 - -export PDY1=$(expr $PDY - 1) - -export cyc=06 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -#################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -########################################### -# Now set up GEMPAK/NTRANS environment -########################################### -module load gempak/7.3.3 -module list - -############################################ -# GDAS GEMPAK META PRODUCT GENERATION -############################################ -# set envir=para or para to test with data in prod or para -# export envir=para - export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gdas_gempak_meta_ncdc_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -########################################################## -# obtain unique process id (pid) and make temp directory -########################################################## -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export PARMwmo=${PARMwmo:-$HOMEgfs/parm/wmo} -export PARMproduct=${PARMproduct:-$HOMEgfs/parm/product} -export FIXgempak=${FIXgempak:-$HOMEgfs/gempak/fix} -export USHgempak=${USHgempak:-$HOMEgfs/gempak/ush} -export SRCgfs=${SRCgfs:-$HOMEgfs/scripts} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gdas} -export model=${model:-gdas} - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## -if [ $envir = "prod" ] ; then -export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}/gempak} -export COMINgdas=${COMINgdas:-$(compath.py ${NET}/${envir}/${RUN})} - -else - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos/gempak ### EMC PARA Realtime - export COMINgdas=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN} ### EMC PARA Realtime - -fi -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos/gempak/meta - -export COMINukmet=${COMINukmet:-$(compath.py nawips/prod/ukmet)} -export COMINecmwf=${COMINecmwf:-$(compath.py ecmwf/prod/ecmwf)} - -export COMOUTncdc=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos/gempak/meta - -export COMOUTukmet=${COMOUT} -export COMOUTecmwf=${COMOUT} - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT $COMOUTncdc $COMOUTukmet $COMOUTecmwf -fi - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC diff --git a/driver/product/run_JGDAS_ATMOS_GEMPAK_META_NCDC_dell.sh_12 b/driver/product/run_JGDAS_ATMOS_GEMPAK_META_NCDC_dell.sh_12 deleted file mode 100755 index 7212dec812..0000000000 --- a/driver/product/run_JGDAS_ATMOS_GEMPAK_META_NCDC_dell.sh_12 +++ /dev/null @@ -1,132 +0,0 @@ -#!/bin/sh - -#BSUB -J jgdas_gempak_meta_ncdc_12 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gdas_gempak_meta_ncdc_12.o%J -#BSUB -q debug -#BSUB -n 2 # number of tasks -#BSUB -R span[ptile=1] # 1 task per node -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -R affinity[core(1):distribute=balance] - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 - -export PDY1=$(expr $PDY - 1) - -export cyc=12 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -#################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -########################################### -# Now set up GEMPAK/NTRANS environment -########################################### -module load gempak/7.3.3 -module list - -############################################ -# GDAS GEMPAK META PRODUCT GENERATION -############################################ -# set envir=para or para to test with data in prod or para -# export envir=para - export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gdas_gempak_meta_ncdc_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -########################################################## -# obtain unique process id (pid) and make temp directory -########################################################## -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export PARMwmo=${PARMwmo:-$HOMEgfs/parm/wmo} -export PARMproduct=${PARMproduct:-$HOMEgfs/parm/product} -export FIXgempak=${FIXgempak:-$HOMEgfs/gempak/fix} -export USHgempak=${USHgempak:-$HOMEgfs/gempak/ush} -export SRCgfs=${SRCgfs:-$HOMEgfs/scripts} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gdas} -export model=${model:-gdas} - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## -if [ $envir = "prod" ] ; then -export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}/gempak} -export COMINgdas=${COMINgdas:-$(compath.py ${NET}/${envir}/${RUN})} - -else - -# export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos/gempak ### EMC PARA Realtime -# export COMINgdas=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN} ### EMC PARA Realtime - export COMIN=/gpfs/dell2/ptmp/Boi.Vuong/com/gfs/para/gdas.20200922/12/atmos/gempak ### Boi PARA Realtime - export COMINgdas=/gpfs/dell2/ptmp/Boi.Vuong/com/gfs/para/gdas ### Boi PARA Realtime - -fi -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos/gempak/meta - -export COMINukmet=${COMINukmet:-$(compath.py nawips/prod/ukmet)} -export COMINecmwf=${COMINecmwf:-$(compath.py ecmwf/prod/ecmwf)} - -export COMOUTncdc=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos/gempak/meta - -export COMOUTukmet=${COMOUT} -export COMOUTecmwf=${COMOUT} - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT $COMOUTncdc $COMOUTukmet $COMOUTecmwf -fi - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC diff --git a/driver/product/run_JGDAS_ATMOS_GEMPAK_META_NCDC_dell.sh_18 b/driver/product/run_JGDAS_ATMOS_GEMPAK_META_NCDC_dell.sh_18 deleted file mode 100755 index 15f8fca7a1..0000000000 --- a/driver/product/run_JGDAS_ATMOS_GEMPAK_META_NCDC_dell.sh_18 +++ /dev/null @@ -1,130 +0,0 @@ -#!/bin/sh - -#BSUB -J jgdas_gempak_meta_ncdc_18 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gdas_gempak_meta_ncdc_18.o%J -#BSUB -q debug -#BSUB -n 2 # number of tasks -#BSUB -R span[ptile=1] # 1 task per node -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -R affinity[core(1):distribute=balance] - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 - -export PDY1=$(expr $PDY - 1) - -export cyc=18 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -#################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -########################################### -# Now set up GEMPAK/NTRANS environment -########################################### -module load gempak/7.3.3 -module list - -############################################ -# GDAS GEMPAK META PRODUCT GENERATION -############################################ -# set envir=para or para to test with data in prod or para -# export envir=para - export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gdas_gempak_meta_ncdc_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -########################################################## -# obtain unique process id (pid) and make temp directory -########################################################## -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export PARMwmo=${PARMwmo:-$HOMEgfs/parm/wmo} -export PARMproduct=${PARMproduct:-$HOMEgfs/parm/product} -export FIXgempak=${FIXgempak:-$HOMEgfs/gempak/fix} -export USHgempak=${USHgempak:-$HOMEgfs/gempak/ush} -export SRCgfs=${SRCgfs:-$HOMEgfs/scripts} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gdas} -export model=${model:-gdas} - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## -if [ $envir = "prod" ] ; then -export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}/gempak} -export COMINgdas=${COMINgdas:-$(compath.py ${NET}/${envir}/${RUN})} - -else - - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos/gempak ### EMC PARA Realtime - export COMINgdas=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN} ### EMC PARA Realtime - -fi -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos/gempak/meta - -export COMINukmet=${COMINukmet:-$(compath.py nawips/prod/ukmet)} -export COMINecmwf=${COMINecmwf:-$(compath.py ecmwf/prod/ecmwf)} - -export COMOUTncdc=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos/gempak/meta - -export COMOUTukmet=${COMOUT} -export COMOUTecmwf=${COMOUT} - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT $COMOUTncdc $COMOUTukmet $COMOUTecmwf -fi - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC diff --git a/driver/product/run_JGDAS_ATMOS_GEMPAK_dell.sh_00 b/driver/product/run_JGDAS_ATMOS_GEMPAK_dell.sh_00 deleted file mode 100755 index 6deb0f9168..0000000000 --- a/driver/product/run_JGDAS_ATMOS_GEMPAK_dell.sh_00 +++ /dev/null @@ -1,118 +0,0 @@ -#!/bin/sh - -#BSUB -J gdas_gempak_00 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gdas_gempak_00.o%J -#BSUB -q debug -#BSUB -n 2 # number of tasks -#BSUB -R span[ptile=1] # 1 task per node -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -R affinity[core(1):distribute=balance] - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 - -export PDY1=$(expr $PDY - 1) - -export cyc=00 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -#################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -########################################### -# Now set up GEMPAK/NTRANS environment -########################################### -module load gempak/7.3.3 -module list - -############################################ -# GDAS GEMPAK PRODUCT GENERATION -############################################ -# set envir=para or para to test with data in prod or para - export envir=para -# export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gdas_gempak_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -########################################################## -# obtain unique process id (pid) and make temp directory -########################################################## -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export PARMwmo=${PARMwmo:-$HOMEgfs/parm/wmo} -export PARMproduct=${PARMproduct:-$HOMEgfs/parm/product} -export FIXgempak=${FIXgempak:-$HOMEgfs/gempak/fix} -export USHgempak=${USHgempak:-$HOMEgfs/gempak/ush} -export SRCgfs=${SRCgfs:-$HOMEgfs/scripts} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gdas} -export model=${model:-gdas} - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}} ### NCO PROD -else - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos ### EMC PARA Realtime -fi - -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos/gempak - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT -fi - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGDAS_ATMOS_GEMPAK diff --git a/driver/product/run_JGDAS_ATMOS_GEMPAK_dell.sh_06 b/driver/product/run_JGDAS_ATMOS_GEMPAK_dell.sh_06 deleted file mode 100755 index eb1d9b32ea..0000000000 --- a/driver/product/run_JGDAS_ATMOS_GEMPAK_dell.sh_06 +++ /dev/null @@ -1,119 +0,0 @@ -#!/bin/sh - -#BSUB -J gdas_gempak_06 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gdas_gempak_06.o%J -#BSUB -q debug -#BSUB -n 2 # number of tasks -#BSUB -R span[ptile=1] # 1 task per node -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -R affinity[core(1):distribute=balance] - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 - -export PDY1=$(expr $PDY - 1) - -export cyc=06 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -#################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -########################################### -# Now set up GEMPAK/NTRANS environment -########################################### -module load gempak/7.3.3 -module list - -############################################ -# GDAS GEMPAK PRODUCT GENERATION -############################################ -# set envir=para or para to test with data in prod or para - export envir=para - export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gdas_gempak_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -########################################################## -# obtain unique process id (pid) and make temp directory -########################################################## -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export PARMwmo=${PARMwmo:-$HOMEgfs/parm/wmo} -export PARMproduct=${PARMproduct:-$HOMEgfs/parm/product} -export FIXgempak=${FIXgempak:-$HOMEgfs/gempak/fix} -export USHgempak=${USHgempak:-$HOMEgfs/gempak/ush} -export SRCgfs=${SRCgfs:-$HOMEgfs/scripts} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gdas} -export model=${model:-gdas} - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}} ### NCO PROD -else - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos ### EMC PARA Realtime - -fi - -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos/gempak - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT -fi - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGDAS_ATMOS_GEMPAK diff --git a/driver/product/run_JGDAS_ATMOS_GEMPAK_dell.sh_12 b/driver/product/run_JGDAS_ATMOS_GEMPAK_dell.sh_12 deleted file mode 100755 index 32b2ae1829..0000000000 --- a/driver/product/run_JGDAS_ATMOS_GEMPAK_dell.sh_12 +++ /dev/null @@ -1,118 +0,0 @@ -#!/bin/sh - -#BSUB -J gdas_gempak_12 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gdas_gempak_12.o%J -#BSUB -q debug -#BSUB -n 2 # number of tasks -#BSUB -R span[ptile=1] # 1 task per node -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -R affinity[core(1):distribute=balance] - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 - -export PDY1=$(expr $PDY - 1) - -export cyc=12 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -#################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -########################################### -# Now set up GEMPAK/NTRANS environment -########################################### -module load gempak/7.3.3 -module list - -############################################ -# GDAS GEMPAK PRODUCT GENERATION -############################################ -# set envir=para or para to test with data in prod or para - export envir=para - export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gdas_gempak_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -########################################################## -# obtain unique process id (pid) and make temp directory -########################################################## -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export PARMwmo=${PARMwmo:-$HOMEgfs/parm/wmo} -export PARMproduct=${PARMproduct:-$HOMEgfs/parm/product} -export FIXgempak=${FIXgempak:-$HOMEgfs/gempak/fix} -export USHgempak=${USHgempak:-$HOMEgfs/gempak/ush} -export SRCgfs=${SRCgfs:-$HOMEgfs/scripts} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gdas} -export model=${model:-gdas} - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}} ### NCO PROD -else - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos ### EMC PARA Realtime -fi - -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos/gempak - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT -fi - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGDAS_ATMOS_GEMPAK diff --git a/driver/product/run_JGDAS_ATMOS_GEMPAK_dell.sh_18 b/driver/product/run_JGDAS_ATMOS_GEMPAK_dell.sh_18 deleted file mode 100755 index 7974bfecf6..0000000000 --- a/driver/product/run_JGDAS_ATMOS_GEMPAK_dell.sh_18 +++ /dev/null @@ -1,119 +0,0 @@ -#!/bin/sh - -#BSUB -J gdas_gempak_18 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gdas_gempak_18.o%J -#BSUB -q debug -#BSUB -n 2 # number of tasks -#BSUB -R span[ptile=1] # 1 task per node -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -R affinity[core(1):distribute=balance] - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 - -export PDY1=$(expr $PDY - 1) - -export cyc=18 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -#################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -########################################### -# Now set up GEMPAK/NTRANS environment -########################################### -module load gempak/7.3.3 -module list - -############################################ -# GDAS GEMPAK PRODUCT GENERATION -############################################ -# set envir=para or para to test with data in prod or para - export envir=para - export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gdas_gempak_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -########################################################## -# obtain unique process id (pid) and make temp directory -########################################################## -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export PARMwmo=${PARMwmo:-$HOMEgfs/parm/wmo} -export PARMproduct=${PARMproduct:-$HOMEgfs/parm/product} -export FIXgempak=${FIXgempak:-$HOMEgfs/gempak/fix} -export USHgempak=${USHgempak:-$HOMEgfs/gempak/ush} -export SRCgfs=${SRCgfs:-$HOMEgfs/scripts} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gdas} -export model=${model:-gdas} - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}} ### NCO PROD -else - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos ### EMC PARA Realtime - -fi - -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos/gempak - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT -fi - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGDAS_ATMOS_GEMPAK diff --git a/driver/product/run_JGFS_ATMOS_AWIPS_20KM_1P0DEG_dell.sh_00 b/driver/product/run_JGFS_ATMOS_AWIPS_20KM_1P0DEG_dell.sh_00 deleted file mode 100755 index 74a6484c24..0000000000 --- a/driver/product/run_JGFS_ATMOS_AWIPS_20KM_1P0DEG_dell.sh_00 +++ /dev/null @@ -1,123 +0,0 @@ -#!/bin/sh - -#BSUB -J jgfs_awips_f012_20km_1p00_00 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gfs_awips_f012_20km_1p00_00.o%J -#BSUB -q debug -#BSUB -n 1 # number of tasks -#BSUB -R span[ptile=1] # 1 task per node -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -R affinity[core(1):distribute=balance] - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 - -export PDY1=$(expr $PDY - 1) - -export cyc=00 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -##################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -module list - -################################################ -# GFS_AWIPS_20KM_1P00 AWIPS PRODUCT GENERATION -################################################ - -export fcsthrs=012 - -############################################ -# user defined -############################################ -# set envir=para or para to test with data in prod or para -# export envir=para - export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gfs_awips_f${fcsthrs}_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export USHgfs=${USHgfs:-$HOMEgfs/ush} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export PARMwmo=${PARMwmo:-$HOMEgfs/parm/wmo} -export PARMproduct=${PARMproduct:-$HOMEgfs/parm/product} -export FIXgfs=${FIXgfs:-$HOMEgfs/fix} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export model=${model:-gfs} - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}} ### NCO PROD -else - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos ### EMC PARA Realtime - -fi - -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos -export COMOUTwmo=${COMOUTwmo:-${COMOUT}/wmo} - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT $COMOUTwmo -fi - -export MPIRUN_AWIPSCFP="mpirun -n 4 cfp " - -######################################################### -# obtain unique process id (pid) and make temp directory -######################################################### -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG diff --git a/driver/product/run_JGFS_ATMOS_AWIPS_20KM_1P0DEG_dell.sh_06 b/driver/product/run_JGFS_ATMOS_AWIPS_20KM_1P0DEG_dell.sh_06 deleted file mode 100755 index 7a74d22acf..0000000000 --- a/driver/product/run_JGFS_ATMOS_AWIPS_20KM_1P0DEG_dell.sh_06 +++ /dev/null @@ -1,123 +0,0 @@ -#!/bin/sh - -#BSUB -J jgfs_awips_f012_20km_1p00_06 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gfs_awips_f012_20km_1p00_06.o%J -#BSUB -q debug -#BSUB -n 1 # number of tasks -#BSUB -R span[ptile=1] # 1 task per node -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -R affinity[core(1):distribute=balance] - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 - -export PDY1=$(expr $PDY - 1) - -export cyc=06 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -##################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -module list - -################################################ -# GFS_AWIPS_20KM_1P00 AWIPS PRODUCT GENERATION -################################################ - -export fcsthrs=012 - -############################################ -# user defined -############################################ -# set envir=para or para to test with data in prod or para - export envir=para - export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gfs_awips_f${fcsthrs}_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export USHgfs=${USHgfs:-$HOMEgfs/ush} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export PARMwmo=${PARMwmo:-$HOMEgfs/parm/wmo} -export PARMproduct=${PARMproduct:-$HOMEgfs/parm/product} -export FIXgfs=${FIXgfs:-$HOMEgfs/fix} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export model=${model:-gfs} - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}} ### NCO PROD -else - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos ### EMC PARA Realtime - -fi - -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos -export COMOUTwmo=${COMOUTwmo:-${COMOUT}/wmo} - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT $COMOUTwmo -fi - -export MPIRUN_AWIPSCFP="mpirun -n 4 cfp " - -######################################################### -# obtain unique process id (pid) and make temp directory -######################################################### -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG diff --git a/driver/product/run_JGFS_ATMOS_AWIPS_20KM_1P0DEG_dell.sh_12 b/driver/product/run_JGFS_ATMOS_AWIPS_20KM_1P0DEG_dell.sh_12 deleted file mode 100755 index e8ad9e9e20..0000000000 --- a/driver/product/run_JGFS_ATMOS_AWIPS_20KM_1P0DEG_dell.sh_12 +++ /dev/null @@ -1,123 +0,0 @@ -#!/bin/sh - -#BSUB -J jgfs_awips_f012_20km_1p00_12 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gfs_awips_f012_20km_1p00_12.o%J -#BSUB -q debug -#BSUB -n 1 # number of tasks -#BSUB -R span[ptile=1] # 1 task per node -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -R affinity[core(1):distribute=balance] - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 - -export PDY1=$(expr $PDY - 1) - -export cyc=12 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -##################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -module list - -################################################ -# GFS_AWIPS_20KM_1P00 AWIPS PRODUCT GENERATION -################################################ - -export fcsthrs=012 - -############################################ -# user defined -############################################ -# set envir=para or para to test with data in prod or para - export envir=para -# export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gfs_awips_f${fcsthrs}_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export USHgfs=${USHgfs:-$HOMEgfs/ush} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export PARMwmo=${PARMwmo:-$HOMEgfs/parm/wmo} -export PARMproduct=${PARMproduct:-$HOMEgfs/parm/product} -export FIXgfs=${FIXgfs:-$HOMEgfs/fix} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export model=${model:-gfs} - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}} ### NCO PROD -else - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos ### EMC PARA Realtime - -fi - -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos -export COMOUTwmo=${COMOUTwmo:-${COMOUT}/wmo} - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT $COMOUTwmo -fi - -export MPIRUN_AWIPSCFP="mpirun -n 4 cfp " - -######################################################### -# obtain unique process id (pid) and make temp directory -######################################################### -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG diff --git a/driver/product/run_JGFS_ATMOS_AWIPS_20KM_1P0DEG_dell.sh_18 b/driver/product/run_JGFS_ATMOS_AWIPS_20KM_1P0DEG_dell.sh_18 deleted file mode 100755 index 40404067dc..0000000000 --- a/driver/product/run_JGFS_ATMOS_AWIPS_20KM_1P0DEG_dell.sh_18 +++ /dev/null @@ -1,123 +0,0 @@ -#!/bin/sh - -#BSUB -J jgfs_awips_f012_20km_1p00_18 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gfs_awips_f012_20km_1p00_18.o%J -#BSUB -q debug -#BSUB -n 1 # number of tasks -#BSUB -R span[ptile=1] # 1 task per node -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -R affinity[core(1):distribute=balance] - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 - -export PDY1=$(expr $PDY - 1) - -export cyc=18 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -##################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -module list - -################################################ -# GFS_AWIPS_20KM_1P00 AWIPS PRODUCT GENERATION -################################################ - -export fcsthrs=012 - -############################################ -# user defined -############################################ -# set envir=para or para to test with data in prod or para - export envir=para - export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gfs_awips_f${fcsthrs}_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export USHgfs=${USHgfs:-$HOMEgfs/ush} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export PARMwmo=${PARMwmo:-$HOMEgfs/parm/wmo} -export PARMproduct=${PARMproduct:-$HOMEgfs/parm/product} -export FIXgfs=${FIXgfs:-$HOMEgfs/fix} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export model=${model:-gfs} - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}} ### NCO PROD -else - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos ### EMC PARA Realtime - -fi - -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos -export COMOUTwmo=${COMOUTwmo:-${COMOUT}/wmo} - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT $COMOUTwmo -fi - -export MPIRUN_AWIPSCFP="mpirun -n 4 cfp " - -######################################################### -# obtain unique process id (pid) and make temp directory -######################################################### -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG diff --git a/driver/product/run_JGFS_ATMOS_AWIPS_G2_dell.sh_00 b/driver/product/run_JGFS_ATMOS_AWIPS_G2_dell.sh_00 deleted file mode 100755 index 1fdffa726c..0000000000 --- a/driver/product/run_JGFS_ATMOS_AWIPS_G2_dell.sh_00 +++ /dev/null @@ -1,121 +0,0 @@ -#!/bin/sh - -#BSUB -J jgfs_awips_f12_00 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gfs_awips_f12_00.o%J -#BSUB -q debug -#BSUB -n 4 # number of tasks -#BSUB -R span[ptile=2] # 1 task per node -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -R affinity[core(1):distribute=balance] - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 - -export PDY1=$(expr $PDY - 1) - -export cyc=00 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -##################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -module list - -######################################## -# GFS_AWIPS_G2 AWIPS PRODUCT GENERATION -######################################## - -export fcsthrs=012 - -############################################ -# User Define COM, COMOUTwmo, COMIN directories -############################################ -# set envir=para or para to test with data in prod or para - export envir=para -export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gfs_awips_f${fcsthrs}_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export USHgfs=${USHgfs:-$HOMEgfs/ush} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export PARMwmo=${PARMwmo:-$HOMEgfs/parm/wmo} -export PARMproduct=${PARMproduct:-$HOMEgfs/parm/product} -export FIXgfs=${FIXgfs:-$HOMEgfs/fix} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export model=${model:-gfs} - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}} ### NCO PROD -else - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos ### EMC PARA Realtime - -fi - -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos -export COMOUTwmo=${COMOUTwmo:-${COMOUT}/wmo} - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT $COMOUTwmo -fi - -######################################################### -# obtain unique process id (pid) and make temp directory -######################################################### -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 diff --git a/driver/product/run_JGFS_ATMOS_AWIPS_G2_dell.sh_06 b/driver/product/run_JGFS_ATMOS_AWIPS_G2_dell.sh_06 deleted file mode 100755 index efa6146f91..0000000000 --- a/driver/product/run_JGFS_ATMOS_AWIPS_G2_dell.sh_06 +++ /dev/null @@ -1,121 +0,0 @@ -#!/bin/sh - -#BSUB -J jgfs_awips_f12_06 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gfs_awips_f12_06.o%J -#BSUB -q debug -#BSUB -n 4 # number of tasks -#BSUB -R span[ptile=2] # 1 task per node -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -R affinity[core(1):distribute=balance] - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 - -export PDY1=$(expr $PDY - 1) - -export cyc=06 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -##################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -module list - -######################################## -# GFS_AWIPS_G2 AWIPS PRODUCT GENERATION -######################################## - -export fcsthrs=012 - -############################################ -# User Define COM, COMOUTwmo, COMIN directories -############################################ -# set envir=para or para to test with data in prod or para - export envir=para - export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gfs_awips_f${fcsthrs}_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export USHgfs=${USHgfs:-$HOMEgfs/ush} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export PARMwmo=${PARMwmo:-$HOMEgfs/parm/wmo} -export PARMproduct=${PARMproduct:-$HOMEgfs/parm/product} -export FIXgfs=${FIXgfs:-$HOMEgfs/fix} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export model=${model:-gfs} - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}} ### NCO PROD -else - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos ### EMC PARA Realtime - -fi - -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos -export COMOUTwmo=${COMOUTwmo:-${COMOUT}/wmo} - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT $COMOUTwmo -fi - -######################################################### -# obtain unique process id (pid) and make temp directory -######################################################### -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 diff --git a/driver/product/run_JGFS_ATMOS_AWIPS_G2_dell.sh_12 b/driver/product/run_JGFS_ATMOS_AWIPS_G2_dell.sh_12 deleted file mode 100755 index 6a6218700c..0000000000 --- a/driver/product/run_JGFS_ATMOS_AWIPS_G2_dell.sh_12 +++ /dev/null @@ -1,121 +0,0 @@ -#!/bin/sh - -#BSUB -J jgfs_awips_f12_12 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gfs_awips_f12_12.o%J -#BSUB -q debug -#BSUB -n 4 # number of tasks -#BSUB -R span[ptile=2] # 1 task per node -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -R affinity[core(1):distribute=balance] - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 - -export PDY1=$(expr $PDY - 1) - -export cyc=12 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -##################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -module list - -######################################## -# GFS_AWIPS_G2 AWIPS PRODUCT GENERATION -######################################## - -export fcsthrs=012 - -############################################ -# User Define COM, COMOUTwmo, COMIN directories -############################################ -# set envir=para or para to test with data in prod or para - export envir=para - export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gfs_awips_f${fcsthrs}_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export USHgfs=${USHgfs:-$HOMEgfs/ush} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export PARMwmo=${PARMwmo:-$HOMEgfs/parm/wmo} -export PARMproduct=${PARMproduct:-$HOMEgfs/parm/product} -export FIXgfs=${FIXgfs:-$HOMEgfs/fix} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export model=${model:-gfs} - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}} ### NCO PROD -else - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos ### EMC PARA Realtime - -fi - -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos -export COMOUTwmo=${COMOUTwmo:-${COMOUT}/wmo} - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT $COMOUTwmo -fi - -######################################################### -# obtain unique process id (pid) and make temp directory -######################################################### -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 diff --git a/driver/product/run_JGFS_ATMOS_AWIPS_G2_dell.sh_18 b/driver/product/run_JGFS_ATMOS_AWIPS_G2_dell.sh_18 deleted file mode 100755 index 70f732bf9b..0000000000 --- a/driver/product/run_JGFS_ATMOS_AWIPS_G2_dell.sh_18 +++ /dev/null @@ -1,121 +0,0 @@ -#!/bin/sh - -#BSUB -J jgfs_awips_f12_18 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gfs_awips_f12_18.o%J -#BSUB -q debug -#BSUB -n 4 # number of tasks -#BSUB -R span[ptile=2] # 1 task per node -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -R affinity[core(1):distribute=balance] - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 - -export PDY1=$(expr $PDY - 1) - -export cyc=18 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -##################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -module list - -######################################## -# GFS_AWIPS_G2 AWIPS PRODUCT GENERATION -######################################## - -export fcsthrs=012 - -############################################ -# User Define COM, COMOUTwmo, COMIN directories -############################################ -# set envir=para or para to test with data in prod or para - export envir=para - export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gfs_awips_f${fcsthrs}_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export USHgfs=${USHgfs:-$HOMEgfs/ush} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export PARMwmo=${PARMwmo:-$HOMEgfs/parm/wmo} -export PARMproduct=${PARMproduct:-$HOMEgfs/parm/product} -export FIXgfs=${FIXgfs:-$HOMEgfs/fix} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export model=${model:-gfs} - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}} ### NCO PROD -else - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos ### EMC PARA Realtime - -fi - -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos -export COMOUTwmo=${COMOUTwmo:-${COMOUT}/wmo} - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT $COMOUTwmo -fi - -######################################################### -# obtain unique process id (pid) and make temp directory -######################################################### -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 diff --git a/driver/product/run_JGFS_ATMOS_FBWIND_dell.sh_00 b/driver/product/run_JGFS_ATMOS_FBWIND_dell.sh_00 deleted file mode 100755 index 3852d0092b..0000000000 --- a/driver/product/run_JGFS_ATMOS_FBWIND_dell.sh_00 +++ /dev/null @@ -1,113 +0,0 @@ -#!/bin/sh - -#BSUB -J gfs_fbwind_00 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gfs_fbwind_00.o%J -#BSUB -q debug -#BSUB -n 1 # number of tasks -#BSUB -R span[ptile=1] # 1 task per node -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -R affinity[core(1):distribute=balance] - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 - -export PDY1=$(expr $PDY - 1) - -export cyc=00 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -##################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -module list - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## -# set envir=para or para to test with data in prod or para - export envir=para -export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gfs_fbwind_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export USHgfs=${USHgfs:-$HOMEgfs/ush} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export FIXgfs=${FIXgfs:-$HOMEgfs/fix} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export model=${model:-gfs} - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}} ### NCO PROD -else - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos ### EMC PARA Realtime - -fi - -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos -export COMOUTwmo=${COMOUTwmo:-${COMOUT}/wmo} - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT $COMOUTwmo -fi - -######################################################### -# obtain unique process id (pid) and make temp directory -######################################################### -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGFS_ATMOS_FBWIND diff --git a/driver/product/run_JGFS_ATMOS_FBWIND_dell.sh_06 b/driver/product/run_JGFS_ATMOS_FBWIND_dell.sh_06 deleted file mode 100755 index 2008f44310..0000000000 --- a/driver/product/run_JGFS_ATMOS_FBWIND_dell.sh_06 +++ /dev/null @@ -1,113 +0,0 @@ -#!/bin/sh - -#BSUB -J gfs_fbwind_06 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gfs_fbwind_06.o%J -#BSUB -q debug -#BSUB -n 1 # number of tasks -#BSUB -R span[ptile=1] # 1 task per node -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -R affinity[core(1):distribute=balance] - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 - -export PDY1=$(expr $PDY - 1) - -export cyc=06 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -##################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -module list - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## -# set envir=para or para to test with data in prod or para - export envir=para - export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gfs_fbwind_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export USHgfs=${USHgfs:-$HOMEgfs/ush} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export FIXgfs=${FIXgfs:-$HOMEgfs/fix} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export model=${model:-gfs} - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}} ### NCO PROD -else - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos ### EMC PARA Realtime - -fi - -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos -export COMOUTwmo=${COMOUTwmo:-${COMOUT}/wmo} - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT $COMOUTwmo -fi - -######################################################### -# obtain unique process id (pid) and make temp directory -######################################################### -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGFS_ATMOS_FBWIND diff --git a/driver/product/run_JGFS_ATMOS_FBWIND_dell.sh_12 b/driver/product/run_JGFS_ATMOS_FBWIND_dell.sh_12 deleted file mode 100755 index 13af5f5fe9..0000000000 --- a/driver/product/run_JGFS_ATMOS_FBWIND_dell.sh_12 +++ /dev/null @@ -1,113 +0,0 @@ -#!/bin/sh - -#BSUB -J gfs_fbwind_12 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gfs_fbwind_12.o%J -#BSUB -q debug -#BSUB -n 1 # number of tasks -#BSUB -R span[ptile=1] # 1 task per node -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -R affinity[core(1):distribute=balance] - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 - -export PDY1=$(expr $PDY - 1) - -export cyc=12 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -##################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -module list - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## -# set envir=para or para to test with data in prod or para - export envir=para -# export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gfs_fbwind_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export USHgfs=${USHgfs:-$HOMEgfs/ush} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export FIXgfs=${FIXgfs:-$HOMEgfs/fix} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export model=${model:-gfs} - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}} ### NCO PROD -else - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos ### EMC PARA Realtime - -fi - -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos -export COMOUTwmo=${COMOUTwmo:-${COMOUT}/wmo} - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT $COMOUTwmo -fi - -######################################################### -# obtain unique process id (pid) and make temp directory -######################################################### -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGFS_ATMOS_FBWIND diff --git a/driver/product/run_JGFS_ATMOS_FBWIND_dell.sh_18 b/driver/product/run_JGFS_ATMOS_FBWIND_dell.sh_18 deleted file mode 100755 index d4352db500..0000000000 --- a/driver/product/run_JGFS_ATMOS_FBWIND_dell.sh_18 +++ /dev/null @@ -1,113 +0,0 @@ -#!/bin/sh - -#BSUB -J gfs_fbwind_18 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gfs_fbwind_18.o%J -#BSUB -q debug -#BSUB -n 1 # number of tasks -#BSUB -R span[ptile=1] # 1 task per node -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -R affinity[core(1):distribute=balance] - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 - -export PDY1=$(expr $PDY - 1) - -export cyc=18 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -##################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -module list - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## -# set envir=para or para to test with data in prod or para - export envir=para - export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gfs_fbwind_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export USHgfs=${USHgfs:-$HOMEgfs/ush} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export FIXgfs=${FIXgfs:-$HOMEgfs/fix} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export model=${model:-gfs} - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}} ### NCO PROD -else - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos ### EMC PARA Realtime - -fi - -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos -export COMOUTwmo=${COMOUTwmo:-${COMOUT}/wmo} - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT $COMOUTwmo -fi - -######################################################### -# obtain unique process id (pid) and make temp directory -######################################################### -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGFS_ATMOS_FBWIND diff --git a/driver/product/run_JGFS_ATMOS_GEMPAK_META_dell.sh_00 b/driver/product/run_JGFS_ATMOS_GEMPAK_META_dell.sh_00 deleted file mode 100755 index e033eb3731..0000000000 --- a/driver/product/run_JGFS_ATMOS_GEMPAK_META_dell.sh_00 +++ /dev/null @@ -1,131 +0,0 @@ -#!/bin/sh - -#BSUB -J gfs_gempak_meta_00 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gfs_gempak_meta_00.o%J -#BSUB -q debug -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -n 28 # 28 tasks -#BSUB -R span[ptile=14] # 14 task per node -#BSUB -R affinity[core(1):distribute=balance] # using 14 cores on node and bind to 1 - # core per task and distribute across sockets - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200114 - -export PDY1=$(expr $PDY - 1) - -export cyc=00 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -#################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -########################################### -# Now set up GEMPAK/NTRANS environment -########################################### -module load gempak/7.3.3 -module list - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## -# set envir=para or para to test with data in prod or para - export envir=para - export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gfs_gempak_meta_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################# -#set the fcst hrs for all the cycles -############################################# -export fhbeg=00 -export fhend=384 -export fhinc=12 - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -########################################################## -# obtain unique process id (pid) and make temp directory -########################################################## -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p -m 775 $DATA -cd $DATA - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -export FIXgempak=${FIXgempak:-$HOMEgfs/gempak/fix} -export USHgempak=${USHgempak:-$HOMEgfs/gempak/ush} -export SRCgfs=${SRCgfs:-$HOMEgfs/scripts} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export model=${model:-gfs} - -############################################## -# Define COM directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}/gempak} - export COMINgempak=${COMINgempak:-${COMROOT}/${NET}/${envir}} -else - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos/gempak ### EMC PARA Realtime - export COMINgempak=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para ### EMC PARA Realtime - -fi -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos/gempak/meta - -export COMINukmet=${COMINukmet:-$(compath.py nawips/prod/ukmet)} -export COMINecmwf=${COMINecmwf:-$(compath.py ecmwf/prod/ecmwf)} -export COMINnam=${COMINnam:-$(compath.py nam/prod/nam)} - -if [ ! -f $COMOUT ] ; then - mkdir -p -m 775 $COMOUT -fi - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGFS_ATMOS_GEMPAK_META diff --git a/driver/product/run_JGFS_ATMOS_GEMPAK_META_dell.sh_06 b/driver/product/run_JGFS_ATMOS_GEMPAK_META_dell.sh_06 deleted file mode 100755 index 6959859b5c..0000000000 --- a/driver/product/run_JGFS_ATMOS_GEMPAK_META_dell.sh_06 +++ /dev/null @@ -1,131 +0,0 @@ -#!/bin/sh - -#BSUB -J gfs_gempak_meta_06 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gfs_gempak_meta_06.o%J -#BSUB -q debug -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -n 28 # 28 tasks -#BSUB -R span[ptile=14] # 14 task per node -#BSUB -R affinity[core(1):distribute=balance] # using 14 cores on node and bind to 1 - # core per task and distribute across sockets - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 - -export PDY1=$(expr $PDY - 1) - -export cyc=06 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -#################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -########################################### -# Now set up GEMPAK/NTRANS environment -########################################### -module load gempak/7.3.3 -module list - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## -# set envir=para or para to test with data in prod or para - export envir=para - export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gfs_gempak_meta_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################# -#set the fcst hrs for all the cycles -############################################# -export fhbeg=00 -export fhend=384 -export fhinc=12 - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -########################################################## -# obtain unique process id (pid) and make temp directory -########################################################## -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p -m 775 $DATA -cd $DATA - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -export FIXgempak=${FIXgempak:-$HOMEgfs/gempak/fix} -export USHgempak=${USHgempak:-$HOMEgfs/gempak/ush} -export SRCgfs=${SRCgfs:-$HOMEgfs/scripts} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export model=${model:-gfs} - -############################################## -# Define COM directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}/gempak} - export COMINgempak=${COMINgempak:-${COMROOT}/${NET}/${envir}} -else - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos/gempak ### EMC PARA Realtime - export COMINgempak=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para ### EMC PARA Realtime - -fi -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos/gempak/meta - -export COMINukmet=${COMINukmet:-$(compath.py nawips/prod/ukmet)} -export COMINecmwf=${COMINecmwf:-$(compath.py ecmwf/prod/ecmwf)} -export COMINnam=${COMINnam:-$(compath.py nam/prod/nam)} - -if [ ! -f $COMOUT ] ; then - mkdir -p -m 775 $COMOUT -fi - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGFS_ATMOS_GEMPAK_META diff --git a/driver/product/run_JGFS_ATMOS_GEMPAK_META_dell.sh_12 b/driver/product/run_JGFS_ATMOS_GEMPAK_META_dell.sh_12 deleted file mode 100755 index ea5e7599da..0000000000 --- a/driver/product/run_JGFS_ATMOS_GEMPAK_META_dell.sh_12 +++ /dev/null @@ -1,133 +0,0 @@ -#!/bin/sh - -#BSUB -J gfs_gempak_meta_12 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gfs_gempak_meta_12.o%J -#BSUB -q debug -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -n 28 # 28 tasks -#BSUB -R span[ptile=14] # 14 task per node -#BSUB -R affinity[core(1):distribute=balance] # using 14 cores on node and bind to 1 - # core per task and distribute across sockets - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 - -export PDY1=$(expr $PDY - 1) - -export cyc=12 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -#################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -########################################### -# Now set up GEMPAK/NTRANS environment -########################################### -module load gempak/7.3.3 -module list - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## -# set envir=para or para to test with data in prod or para - export envir=para - export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gfs_gempak_meta_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################# -#set the fcst hrs for all the cycles -############################################# -export fhbeg=00 -export fhend=384 -export fhinc=12 - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -########################################################## -# obtain unique process id (pid) and make temp directory -########################################################## -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p -m 775 $DATA -cd $DATA - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -export FIXgempak=${FIXgempak:-$HOMEgfs/gempak/fix} -export USHgempak=${USHgempak:-$HOMEgfs/gempak/ush} -export SRCgfs=${SRCgfs:-$HOMEgfs/scripts} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export model=${model:-gfs} - -############################################## -# Define COM directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}/gempak} - export COMINgempak=${COMINgempak:-${COMROOT}/${NET}/${envir}} -else -# export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos/gempak ### EMC PARA Realtime -# export COMINgempak=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para ### EMC PARA Realtime - export COMIN=/gpfs/dell2/ptmp/Boi.Vuong/com/gfs/para/gfs.20200922/12/atmos/gempak - export COMINgempak=/gpfs/dell2/ptmp/Boi.Vuong/com/gfs/para - -fi -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos/gempak/meta - -export COMINukmet=${COMINukmet:-$(compath.py nawips/prod/ukmet)} -export COMINecmwf=${COMINecmwf:-$(compath.py ecmwf/prod/ecmwf)} -export COMINnam=${COMINnam:-$(compath.py nam/prod/nam)} - -if [ ! -f $COMOUT ] ; then - mkdir -p -m 775 $COMOUT -fi - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGFS_ATMOS_GEMPAK_META diff --git a/driver/product/run_JGFS_ATMOS_GEMPAK_META_dell.sh_18 b/driver/product/run_JGFS_ATMOS_GEMPAK_META_dell.sh_18 deleted file mode 100755 index c1ad53b100..0000000000 --- a/driver/product/run_JGFS_ATMOS_GEMPAK_META_dell.sh_18 +++ /dev/null @@ -1,131 +0,0 @@ -#!/bin/sh - -#BSUB -J gfs_gempak_meta_18 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gfs_gempak_meta_18.o%J -#BSUB -q debug -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -n 28 # 28 tasks -#BSUB -R span[ptile=14] # 14 task per node -#BSUB -R affinity[core(1):distribute=balance] # using 14 cores on node and bind to 1 - # core per task and distribute across sockets - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 - -export PDY1=$(expr $PDY - 1) - -export cyc=18 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -#################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -########################################### -# Now set up GEMPAK/NTRANS environment -########################################### -module load gempak/7.3.3 -module list - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## -# set envir=para or para to test with data in prod or para - export envir=para - export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gfs_gempak_meta_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################# -#set the fcst hrs for all the cycles -############################################# -export fhbeg=00 -export fhend=384 -export fhinc=12 - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -########################################################## -# obtain unique process id (pid) and make temp directory -########################################################## -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p -m 775 $DATA -cd $DATA - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -export FIXgempak=${FIXgempak:-$HOMEgfs/gempak/fix} -export USHgempak=${USHgempak:-$HOMEgfs/gempak/ush} -export SRCgfs=${SRCgfs:-$HOMEgfs/scripts} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export model=${model:-gfs} - -############################################## -# Define COM directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}/gempak} - export COMINgempak=${COMINgempak:-${COMROOT}/${NET}/${envir}} -else - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos/gempak ### EMC PARA Realtime - export COMINgempak=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para ### EMC PARA Realtime - -fi -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos/gempak/meta - -export COMINukmet=${COMINukmet:-$(compath.py nawips/prod/ukmet)} -export COMINecmwf=${COMINecmwf:-$(compath.py ecmwf/prod/ecmwf)} -export COMINnam=${COMINnam:-$(compath.py nam/prod/nam)} - -if [ ! -f $COMOUT ] ; then - mkdir -p -m 775 $COMOUT -fi - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGFS_ATMOS_GEMPAK_META diff --git a/driver/product/run_JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF_dell.sh_00 b/driver/product/run_JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF_dell.sh_00 deleted file mode 100755 index 6a91b052e9..0000000000 --- a/driver/product/run_JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF_dell.sh_00 +++ /dev/null @@ -1,123 +0,0 @@ -#!/bin/sh - -#BSUB -J jgfs_gempak_upapgif_00 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gfs_gempak_upapgif_00.o%J -#BSUB -q debug -#BSUB -n 1 # number of tasks -#BSUB -R span[ptile=1] # 1 task per node -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -R affinity[core(1):distribute=balance] - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200114 - -export PDY1=$(expr $PDY - 1) - -export cyc=00 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -#################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -########################################### -# Now set up GEMPAK/NTRANS environment -########################################### -module load gempak/7.3.3 -module list - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## - -# set envir=para or para to test with data in prod or para - export envir=para - export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gfs_gempak_upapgif_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -########################################################## -# obtain unique process id (pid) and make temp directory -########################################################## -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -export FIXgempak=${FIXgempak:-$HOMEgfs/gempak/fix} -export USHgempak=${USHgempak:-$HOMEgfs/gempak/ush} -export SRCgfs=${SRCgfs:-$HOMEgfs/scripts} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export model=${model:-gfs} -export MODEL=GFS - -############################################## -# Define COM directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}/gempak} - export COMINgfs=${COMINgfs:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}} -else - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos/gempak ### EMC PARA Realtime - export COMINgfs=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos ### EMC PARA Realtime - -fi - -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos -export COMOUTwmo=${COMOUTwmo:-${COMOUT}/wmo} - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT $COMOUTwmo -fi - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF diff --git a/driver/product/run_JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF_dell.sh_06 b/driver/product/run_JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF_dell.sh_06 deleted file mode 100755 index 7228e4f36a..0000000000 --- a/driver/product/run_JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF_dell.sh_06 +++ /dev/null @@ -1,123 +0,0 @@ -#!/bin/sh - -#BSUB -J jgfs_gempak_upapgif_06 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gfs_gempak_upapgif_06.o%J -#BSUB -q debug -#BSUB -n 1 # number of tasks -#BSUB -R span[ptile=1] # 1 task per node -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -R affinity[core(1):distribute=balance] - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 - -export PDY1=$(expr $PDY - 1) - -export cyc=06 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -#################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -########################################### -# Now set up GEMPAK/NTRANS environment -########################################### -module load gempak/7.3.3 -module list - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## - -# set envir=para or para to test with data in prod or para - export envir=para - export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gfs_gempak_upapgif_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -########################################################## -# obtain unique process id (pid) and make temp directory -########################################################## -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -export FIXgempak=${FIXgempak:-$HOMEgfs/gempak/fix} -export USHgempak=${USHgempak:-$HOMEgfs/gempak/ush} -export SRCgfs=${SRCgfs:-$HOMEgfs/scripts} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export model=${model:-gfs} -export MODEL=GFS - -############################################## -# Define COM directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}/gempak} - export COMINgfs=${COMINgfs:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}} -else - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos/gempak ### EMC PARA Realtime - export COMINgfs=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos ### EMC PARA Realtime - -fi - -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos -export COMOUTwmo=${COMOUTwmo:-${COMOUT}/wmo} - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT $COMOUTwmo -fi - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF diff --git a/driver/product/run_JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF_dell.sh_12 b/driver/product/run_JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF_dell.sh_12 deleted file mode 100755 index d44c484db6..0000000000 --- a/driver/product/run_JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF_dell.sh_12 +++ /dev/null @@ -1,125 +0,0 @@ -#!/bin/sh - -#BSUB -J jgfs_gempak_upapgif_12 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gfs_gempak_upapgif_12.o%J -#BSUB -q debug -#BSUB -n 1 # number of tasks -#BSUB -R span[ptile=1] # 1 task per node -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -R affinity[core(1):distribute=balance] - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 - -export PDY1=$(expr $PDY - 1) - -export cyc=12 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -#################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -########################################### -# Now set up GEMPAK/NTRANS environment -########################################### -module load gempak/7.3.3 -module list - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## - -# set envir=para or para to test with data in prod or para - export envir=para - export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gfs_gempak_upapgif_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -########################################################## -# obtain unique process id (pid) and make temp directory -########################################################## -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -export FIXgempak=${FIXgempak:-$HOMEgfs/gempak/fix} -export USHgempak=${USHgempak:-$HOMEgfs/gempak/ush} -export SRCgfs=${SRCgfs:-$HOMEgfs/scripts} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export model=${model:-gfs} -export MODEL=GFS - -############################################## -# Define COM directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}/gempak} - export COMINgfs=${COMINgfs:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}} -else -# export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos/gempak ### EMC PARA Realtime -# export COMINgfs=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos ### EMC PARA Realtime - - export COMIN=/gpfs/dell2/ptmp/Boi.Vuong/com/gfs/para/gfs.20200922/12/atmos/gempak *** Boi PARA realtime - export COMINgfs=/gpfs/dell2/ptmp/Boi.Vuong/com/gfs/para/gfs.20200922/12/atmos *** Boi PARA realtime -fi - -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos -export COMOUTwmo=${COMOUTwmo:-${COMOUT}/wmo} - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT $COMOUTwmo -fi - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF diff --git a/driver/product/run_JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF_dell.sh_18 b/driver/product/run_JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF_dell.sh_18 deleted file mode 100755 index e59b2ce39d..0000000000 --- a/driver/product/run_JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF_dell.sh_18 +++ /dev/null @@ -1,123 +0,0 @@ -#!/bin/sh - -#BSUB -J jgfs_gempak_upapgif_18 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gfs_gempak_upapgif_18.o%J -#BSUB -q debug -#BSUB -n 1 # number of tasks -#BSUB -R span[ptile=1] # 1 task per node -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -R affinity[core(1):distribute=balance] - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 - -export PDY1=$(expr $PDY - 1) - -export cyc=18 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -#################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -########################################### -# Now set up GEMPAK/NTRANS environment -########################################### -module load gempak/7.3.3 -module list - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## - -# set envir=para or para to test with data in prod or para - export envir=para - export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gfs_gempak_upapgif_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -########################################################## -# obtain unique process id (pid) and make temp directory -########################################################## -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -export FIXgempak=${FIXgempak:-$HOMEgfs/gempak/fix} -export USHgempak=${USHgempak:-$HOMEgfs/gempak/ush} -export SRCgfs=${SRCgfs:-$HOMEgfs/scripts} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export model=${model:-gfs} -export MODEL=GFS - -############################################## -# Define COM directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}/gempak} - export COMINgfs=${COMINgfs:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}} -else - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos/gempak ### EMC PARA Realtime - export COMINgfs=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos ### EMC PARA Realtime - -fi - -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos -export COMOUTwmo=${COMOUTwmo:-${COMOUT}/wmo} - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT $COMOUTwmo -fi - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF diff --git a/driver/product/run_JGFS_ATMOS_GEMPAK_PGRB2_SPEC_dell.sh_00 b/driver/product/run_JGFS_ATMOS_GEMPAK_PGRB2_SPEC_dell.sh_00 deleted file mode 100755 index d75205798c..0000000000 --- a/driver/product/run_JGFS_ATMOS_GEMPAK_PGRB2_SPEC_dell.sh_00 +++ /dev/null @@ -1,139 +0,0 @@ -#!/bin/sh - -#BSUB -J jgfs_pgrb2_spec_gempak_00 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gfs_pgrb2_spec_gempak_00.o%J -#BSUB -q debug -#BSUB -n 2 # number of tasks -#BSUB -R span[ptile=2] # 2 task per node -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -R affinity[core(1):distribute=balance] - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 -export PDY1=$(expr $PDY - 1) - -export cyc=00 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -#################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -########################################### -# Now set up GEMPAK/NTRANS environment -########################################### -module load gempak/7.3.3 -module list - -############################################ -# GFS_PGRB2_SPEC_GEMPAK PRODUCT GENERATION -############################################ - -export LAUNCH_MODE=MPI - -############################################### -# Set MP variables -############################################### -export OMP_NUM_THREADS=1 -export MP_LABELIO=yes -export MP_PULSE=0 -export MP_DEBUG_NOTIMEOUT=yes - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## - -# set envir=para or para to test with data in prod or para - export envir=para - export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gfs_pgrb2_spec_gempak_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -########################################################## -# obtain unique process id (pid) and make temp directory -######################################################### -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -export FIXgempak=${FIXgempak:-$HOMEgfs/gempak/fix} -export USHgempak=${USHgempak:-$HOMEgfs/gempak/ush} -export SRCgfs=${SRCgfs:-$HOMEgfs/scripts} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=gfs - -############################################## -# Define COM directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${NET}.${PDY})/${cyc}} - export COMIN=/gpfs/dell2/ptmp/Boi.Vuong/com/gfs/prod/gfs.20200922/00 -else - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos ### EMC PARA Realtime - -fi - -export COMOUT=${COMROOT2}/${NET}/${envir}/${NET}.${PDY}/${cyc}/atmos/gempak - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT -fi - -################################################################# -# Execute the script for the regular grib -################################################################# -export DATA_HOLD=$DATA -export DATA=$DATA_HOLD/SPECIAL -mkdir -p $DATA -cd $DATA - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC diff --git a/driver/product/run_JGFS_ATMOS_GEMPAK_PGRB2_SPEC_dell.sh_06 b/driver/product/run_JGFS_ATMOS_GEMPAK_PGRB2_SPEC_dell.sh_06 deleted file mode 100755 index 67cb3bd8c6..0000000000 --- a/driver/product/run_JGFS_ATMOS_GEMPAK_PGRB2_SPEC_dell.sh_06 +++ /dev/null @@ -1,138 +0,0 @@ -#!/bin/sh - -#BSUB -J jgfs_pgrb2_spec_gempak_06 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gfs_pgrb2_spec_gempak_06.o%J -#BSUB -q debug -#BSUB -n 2 # number of tasks -#BSUB -R span[ptile=2] # 2 task per node -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -R affinity[core(1):distribute=balance] - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 -export PDY1=$(expr $PDY - 1) - -export cyc=06 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -#################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -########################################### -# Now set up GEMPAK/NTRANS environment -########################################### -module load gempak/7.3.3 -module list - -############################################ -# GFS_PGRB2_SPEC_GEMPAK PRODUCT GENERATION -############################################ - -export LAUNCH_MODE=MPI - -############################################### -# Set MP variables -############################################### -export OMP_NUM_THREADS=1 -export MP_LABELIO=yes -export MP_PULSE=0 -export MP_DEBUG_NOTIMEOUT=yes - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## - -# set envir=para or para to test with data in prod or para - export envir=para - export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gfs_pgrb2_spec_gempak_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -########################################################## -# obtain unique process id (pid) and make temp directory -######################################################### -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -export FIXgempak=${FIXgempak:-$HOMEgfs/gempak/fix} -export USHgempak=${USHgempak:-$HOMEgfs/gempak/ush} -export SRCgfs=${SRCgfs:-$HOMEgfs/scripts} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=gfs - -############################################## -# Define COM directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${NET}.${PDY})/${cyc}} -else - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos ### EMC PARA Realtime - -fi - -export COMOUT=${COMROOT2}/${NET}/${envir}/${NET}.${PDY}/${cyc}/atmos/gempak - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT -fi - -################################################################# -# Execute the script for the regular grib -################################################################# -export DATA_HOLD=$DATA -export DATA=$DATA_HOLD/SPECIAL -mkdir -p $DATA -cd $DATA - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC diff --git a/driver/product/run_JGFS_ATMOS_GEMPAK_PGRB2_SPEC_dell.sh_12 b/driver/product/run_JGFS_ATMOS_GEMPAK_PGRB2_SPEC_dell.sh_12 deleted file mode 100755 index c84760622c..0000000000 --- a/driver/product/run_JGFS_ATMOS_GEMPAK_PGRB2_SPEC_dell.sh_12 +++ /dev/null @@ -1,140 +0,0 @@ -#!/bin/sh - -#BSUB -J jgfs_pgrb2_spec_gempak_12 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gfs_pgrb2_spec_gempak_12.o%J -#BSUB -q debug -#BSUB -n 2 # number of tasks -#BSUB -R span[ptile=2] # 2 task per node -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -R affinity[core(1):distribute=balance] - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 -export PDY1=$(expr $PDY - 1) - -export cyc=12 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -#################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -########################################### -# Now set up GEMPAK/NTRANS environment -########################################### -module load gempak/7.3.3 -module list - -############################################ -# GFS_PGRB2_SPEC_GEMPAK PRODUCT GENERATION -############################################ - -export LAUNCH_MODE=MPI - -############################################### -# Set MP variables -############################################### -export OMP_NUM_THREADS=1 -export MP_LABELIO=yes -export MP_PULSE=0 -export MP_DEBUG_NOTIMEOUT=yes - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## - -# set envir=para or para to test with data in prod or para - export envir=para - export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gfs_pgrb2_spec_gempak_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -########################################################## -# obtain unique process id (pid) and make temp directory -######################################################### -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -export FIXgempak=${FIXgempak:-$HOMEgfs/gempak/fix} -export USHgempak=${USHgempak:-$HOMEgfs/gempak/ush} -export SRCgfs=${SRCgfs:-$HOMEgfs/scripts} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=gfs - -############################################## -# Define COM directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${NET}.${PDY})/${cyc}} - export COMIN=/gpfs/dell2/ptmp/Boi.Vuong/com/gfs/prod/gfs.20200922/12 -else -# export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos ### EMC PARA Realtime - export COMIN=/gpfs/dell2/ptmp/Boi.Vuong/com/gfs/para/gfs.20200922/12/atmos %%%% Boi PARA Realtime - -fi - -export COMOUT=${COMROOT2}/${NET}/${envir}/${NET}.${PDY}/${cyc}/atmos/gempak - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT -fi - -################################################################# -# Execute the script for the regular grib -################################################################# -export DATA_HOLD=$DATA -export DATA=$DATA_HOLD/SPECIAL -mkdir -p $DATA -cd $DATA - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC diff --git a/driver/product/run_JGFS_ATMOS_GEMPAK_PGRB2_SPEC_dell.sh_18 b/driver/product/run_JGFS_ATMOS_GEMPAK_PGRB2_SPEC_dell.sh_18 deleted file mode 100755 index 6675407304..0000000000 --- a/driver/product/run_JGFS_ATMOS_GEMPAK_PGRB2_SPEC_dell.sh_18 +++ /dev/null @@ -1,138 +0,0 @@ -#!/bin/sh - -#BSUB -J jgfs_pgrb2_spec_gempak_18 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gfs_pgrb2_spec_gempak_18.o%J -#BSUB -q debug -#BSUB -n 2 # number of tasks -#BSUB -R span[ptile=2] # 2 task per node -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -R affinity[core(1):distribute=balance] - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 -export PDY1=$(expr $PDY - 1) - -export cyc=18 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -#################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -########################################### -# Now set up GEMPAK/NTRANS environment -########################################### -module load gempak/7.3.3 -module list - -############################################ -# GFS_PGRB2_SPEC_GEMPAK PRODUCT GENERATION -############################################ - -export LAUNCH_MODE=MPI - -############################################### -# Set MP variables -############################################### -export OMP_NUM_THREADS=1 -export MP_LABELIO=yes -export MP_PULSE=0 -export MP_DEBUG_NOTIMEOUT=yes - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## - -# set envir=para or para to test with data in prod or para - export envir=para - export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gfs_pgrb2_spec_gempak_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -########################################################## -# obtain unique process id (pid) and make temp directory -######################################################### -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -export FIXgempak=${FIXgempak:-$HOMEgfs/gempak/fix} -export USHgempak=${USHgempak:-$HOMEgfs/gempak/ush} -export SRCgfs=${SRCgfs:-$HOMEgfs/scripts} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=gfs - -############################################## -# Define COM directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${NET}.${PDY})/${cyc}} -else - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos ### EMC PARA Realtime - -fi - -export COMOUT=${COMROOT2}/${NET}/${envir}/${NET}.${PDY}/${cyc}/atmos/gempak - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT -fi - -################################################################# -# Execute the script for the regular grib -################################################################# -export DATA_HOLD=$DATA -export DATA=$DATA_HOLD/SPECIAL -mkdir -p $DATA -cd $DATA - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC diff --git a/driver/product/run_JGFS_ATMOS_GEMPAK_dell.sh_00 b/driver/product/run_JGFS_ATMOS_GEMPAK_dell.sh_00 deleted file mode 100755 index 6eb92a1294..0000000000 --- a/driver/product/run_JGFS_ATMOS_GEMPAK_dell.sh_00 +++ /dev/null @@ -1,119 +0,0 @@ -#!/bin/sh - -#BSUB -J gfs_gempak_00 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gfs_gempak_00.o%J -#BSUB -q debug -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -n 24 # 24 tasks -#BSUB -R span[ptile=12] # 12 task per node -#BSUB -R affinity[core(1):distribute=balance] # using 12 cores on node and bind to 1 core per task and distribute across sockets - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 - -export PDY1=$(expr $PDY - 1) - -export cyc=00 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -#################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -########################################### -# Now set up GEMPAK/NTRANS environment -########################################### -module load gempak/7.3.3 -module list - -############################################ -# Define COM, COMOUTwmo, COMIN directories -############################################ -# set envir=para or para to test with data in prod or para - export envir=para -export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gfs_gempak_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -########################################################## -# obtain unique process id (pid) and make temp directory -########################################################## -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -export FIXgempak=${FIXgempak:-$HOMEgfs/gempak/fix} -export USHgempak=${USHgempak:-$HOMEgfs/gempak/ush} -export SRCgfs=${SRCgfs:-$HOMEgfs/scripts} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export model=${model:-gfs} - -############################################## -# Define COM directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}} ### NCO PROD -else -# export COMIN=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git/${RUN}.${PDY}/${cyc} ### Boi PARA -# export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/prfv3rt3b/gfs.${PDY}/${cyc} ### EMC PARA Realtime - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs.20200922/00 -fi - -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/gempak - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT -fi - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGFS_ATMOS_GEMPAK diff --git a/driver/product/run_JGFS_ATMOS_GEMPAK_dell.sh_06 b/driver/product/run_JGFS_ATMOS_GEMPAK_dell.sh_06 deleted file mode 100755 index 7008c8c2b0..0000000000 --- a/driver/product/run_JGFS_ATMOS_GEMPAK_dell.sh_06 +++ /dev/null @@ -1,119 +0,0 @@ -#!/bin/sh - -#BSUB -J gfs_gempak_06 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gfs_gempak_06.o%J -#BSUB -q debug -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -n 24 # 24 tasks -#BSUB -R span[ptile=12] # 12 task per node -#BSUB -R affinity[core(1):distribute=balance] # using 12 cores on node and bind to 1 core per task and distribute across sockets - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 - -export PDY1=$(expr $PDY - 1) - -export cyc=06 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -#################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -########################################### -# Now set up GEMPAK/NTRANS environment -########################################### -module load gempak/7.3.3 -module list - -############################################ -# Define COM, COMOUTwmo, COMIN directories -############################################ -# set envir=para or para to test with data in prod or para - export envir=para - export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gfs_gempak_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -########################################################## -# obtain unique process id (pid) and make temp directory -########################################################## -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -export FIXgempak=${FIXgempak:-$HOMEgfs/gempak/fix} -export USHgempak=${USHgempak:-$HOMEgfs/gempak/ush} -export SRCgfs=${SRCgfs:-$HOMEgfs/scripts} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export model=${model:-gfs} - -############################################## -# Define COM directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}} ### NCO PROD -else -# export COMIN=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git/${RUN}.${PDY}/${cyc} ### Boi PARA - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/gfs.${PDY}/${cyc}/atmos ### EMC PARA Realtime - export ILPOST=3 -fi - -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos/gempak - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT -fi - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGFS_ATMOS_GEMPAK diff --git a/driver/product/run_JGFS_ATMOS_GEMPAK_dell.sh_12 b/driver/product/run_JGFS_ATMOS_GEMPAK_dell.sh_12 deleted file mode 100755 index a15fdc77fc..0000000000 --- a/driver/product/run_JGFS_ATMOS_GEMPAK_dell.sh_12 +++ /dev/null @@ -1,119 +0,0 @@ -#!/bin/sh - -#BSUB -J gfs_gempak_12 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gfs_gempak_12.o%J -#BSUB -q debug -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -n 24 # 24 tasks -#BSUB -R span[ptile=12] # 12 task per node -#BSUB -R affinity[core(1):distribute=balance] # using 12 cores on node and bind to 1 core per task and distribute across sockets - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 - -export PDY1=$(expr $PDY - 1) - -export cyc=12 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -#################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -########################################### -# Now set up GEMPAK/NTRANS environment -########################################### -module load gempak/7.3.3 -module list - -############################################ -# Define COM, COMOUTwmo, COMIN directories -############################################ -# set envir=para or para to test with data in prod or para - export envir=para - export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gfs_gempak_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -########################################################## -# obtain unique process id (pid) and make temp directory -########################################################## -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -export FIXgempak=${FIXgempak:-$HOMEgfs/gempak/fix} -export USHgempak=${USHgempak:-$HOMEgfs/gempak/ush} -export SRCgfs=${SRCgfs:-$HOMEgfs/scripts} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export model=${model:-gfs} - -############################################## -# Define COM directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}} ### NCO PROD -else -# export COMIN=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git/${RUN}.${PDY}/${cyc} ### Boi PARA - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/gfs.${PDY}/${cyc}/atmos ### EMC PARA Realtime - export ILPOST=3 -fi - -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos/gempak - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT -fi - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGFS_ATMOS_GEMPAK diff --git a/driver/product/run_JGFS_ATMOS_GEMPAK_dell.sh_18 b/driver/product/run_JGFS_ATMOS_GEMPAK_dell.sh_18 deleted file mode 100755 index e772c06a58..0000000000 --- a/driver/product/run_JGFS_ATMOS_GEMPAK_dell.sh_18 +++ /dev/null @@ -1,119 +0,0 @@ -#!/bin/sh - -#BSUB -J gfs_gempak_18 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gfs_gempak_18.o%J -#BSUB -q debug -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -n 24 # 24 tasks -#BSUB -R span[ptile=12] # 12 task per node -#BSUB -R affinity[core(1):distribute=balance] # using 12 cores on node and bind to 1 core per task and distribute across sockets - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 - -export PDY1=$(expr $PDY - 1) - -export cyc=18 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -#################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -########################################### -# Now set up GEMPAK/NTRANS environment -########################################### -module load gempak/7.3.3 -module list - -############################################ -# Define COM, COMOUTwmo, COMIN directories -############################################ -# set envir=para or para to test with data in prod or para - export envir=para - export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gfs_gempak_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -########################################################## -# obtain unique process id (pid) and make temp directory -########################################################## -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -export FIXgempak=${FIXgempak:-$HOMEgfs/gempak/fix} -export USHgempak=${USHgempak:-$HOMEgfs/gempak/ush} -export SRCgfs=${SRCgfs:-$HOMEgfs/scripts} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export model=${model:-gfs} - -############################################## -# Define COM directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}} ### NCO PROD -else -# export COMIN=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git/${RUN}.${PDY}/${cyc} ### Boi PARA - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/gfs.${PDY}/${cyc}/atmos ### EMC PARA Realtime - export ILPOST=3 -fi - -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos/gempak - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT -fi - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGFS_ATMOS_GEMPAK diff --git a/driver/product/run_JGFS_ATMOS_PGRB2_SPEC_NPOESS_dell.sh_00 b/driver/product/run_JGFS_ATMOS_PGRB2_SPEC_NPOESS_dell.sh_00 deleted file mode 100755 index a9c848724c..0000000000 --- a/driver/product/run_JGFS_ATMOS_PGRB2_SPEC_NPOESS_dell.sh_00 +++ /dev/null @@ -1,116 +0,0 @@ -#!/bin/sh - -#BSUB -J jgfs_pgrb2_spec_npoess_00 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gfs_pgrb2_spec_npoess_00.o%J -#BSUB -q debug -#BSUB -n 1 # number of tasks -#BSUB -R span[ptile=1] # 1 task per node -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -R affinity[core(1):distribute=balance] - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 - -export PDY1=$(expr $PDY - 1) - -export cyc=00 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -##################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -module list - -############################################ -# GFS PGRB2_SPECIAL_POST PRODUCT GENERATION -############################################ -# set envir=para or para to test with data in prod or para - export envir=para - export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gfs_pgrb2_spec_npoess_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -########################################################## -# obtain unique process id (pid) and make temp directory -########################################################## -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export USHgfs=${USHgfs:-$HOMEgfs/ush} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -export PARMwmo=${PARMwmo:-$HOMEgfs/parm/wmo} -export PARMproduct=${PARMproduct:-$HOMEgfs/parm/product} -export FIXgfs=${FIXgfs:-$HOMEgfs/fix} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export model=${model:-gfs} - -############################################## -# Define COM directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}} ### NCO PROD -else - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos ### EMC PARA Realtime - -fi - -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos -export COMOUTwmo=${COMOUTwmo:-${COMOUT}/wmo} - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT $COMOUTwmo -fi - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS diff --git a/driver/product/run_JGFS_ATMOS_PGRB2_SPEC_NPOESS_dell.sh_06 b/driver/product/run_JGFS_ATMOS_PGRB2_SPEC_NPOESS_dell.sh_06 deleted file mode 100755 index 5657ad324a..0000000000 --- a/driver/product/run_JGFS_ATMOS_PGRB2_SPEC_NPOESS_dell.sh_06 +++ /dev/null @@ -1,116 +0,0 @@ -#!/bin/sh - -#BSUB -J jgfs_pgrb2_spec_npoess_06 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gfs_pgrb2_spec_npoess_06.o%J -#BSUB -q debug -#BSUB -n 1 # number of tasks -#BSUB -R span[ptile=1] # 1 task per node -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -R affinity[core(1):distribute=balance] - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 - -export PDY1=$(expr $PDY - 1) - -export cyc=06 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -##################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -module list - -############################################ -# GFS PGRB2_SPECIAL_POST PRODUCT GENERATION -############################################ -# set envir=para or para to test with data in prod or para - export envir=para - export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gfs_pgrb2_spec_npoess_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -########################################################## -# obtain unique process id (pid) and make temp directory -########################################################## -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export USHgfs=${USHgfs:-$HOMEgfs/ush} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -export PARMwmo=${PARMwmo:-$HOMEgfs/parm/wmo} -export PARMproduct=${PARMproduct:-$HOMEgfs/parm/product} -export FIXgfs=${FIXgfs:-$HOMEgfs/fix} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export model=${model:-gfs} - -############################################## -# Define COM directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}} ### NCO PROD -else - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos ### EMC PARA Realtime - -fi - -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos -export COMOUTwmo=${COMOUTwmo:-${COMOUT}/wmo} - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT $COMOUTwmo -fi - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS diff --git a/driver/product/run_JGFS_ATMOS_PGRB2_SPEC_NPOESS_dell.sh_12 b/driver/product/run_JGFS_ATMOS_PGRB2_SPEC_NPOESS_dell.sh_12 deleted file mode 100755 index 0f2952c540..0000000000 --- a/driver/product/run_JGFS_ATMOS_PGRB2_SPEC_NPOESS_dell.sh_12 +++ /dev/null @@ -1,117 +0,0 @@ -#!/bin/sh - -#BSUB -J jgfs_pgrb2_spec_npoess_12 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gfs_pgrb2_spec_npoess_12.o%J -#BSUB -q debug -#BSUB -n 1 # number of tasks -#BSUB -R span[ptile=1] # 1 task per node -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -R affinity[core(1):distribute=balance] - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 - -export PDY1=$(expr $PDY - 1) - -export cyc=12 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -##################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -module list - -############################################ -# GFS PGRB2_SPECIAL_POST PRODUCT GENERATION -############################################ -# set envir=para or para to test with data in prod or para - export envir=para - export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gfs_pgrb2_spec_npoess_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -########################################################## -# obtain unique process id (pid) and make temp directory -########################################################## -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export USHgfs=${USHgfs:-$HOMEgfs/ush} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -export PARMwmo=${PARMwmo:-$HOMEgfs/parm/wmo} -export PARMproduct=${PARMproduct:-$HOMEgfs/parm/product} -export FIXgfs=${FIXgfs:-$HOMEgfs/fix} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export model=${model:-gfs} - -############################################## -# Define COM directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}} ### NCO PROD - export COMIN=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git/gfs.20200922/12 -else - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos ### EMC PARA Realtime - -fi - -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos -export COMOUTwmo=${COMOUTwmo:-${COMOUT}/wmo} - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT $COMOUTwmo -fi - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS diff --git a/driver/product/run_JGFS_ATMOS_PGRB2_SPEC_NPOESS_dell.sh_18 b/driver/product/run_JGFS_ATMOS_PGRB2_SPEC_NPOESS_dell.sh_18 deleted file mode 100755 index cacb6b7d07..0000000000 --- a/driver/product/run_JGFS_ATMOS_PGRB2_SPEC_NPOESS_dell.sh_18 +++ /dev/null @@ -1,116 +0,0 @@ -#!/bin/sh - -#BSUB -J jgfs_pgrb2_spec_npoess_18 -#BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gfs_pgrb2_spec_npoess_18.o%J -#BSUB -q debug -#BSUB -n 1 # number of tasks -#BSUB -R span[ptile=1] # 1 task per node -#BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output -#BSUB -W 00:30 -#BSUB -P GFS-DEV -#BSUB -R affinity[core(1):distribute=balance] - -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=20200922 - -export PDY1=$(expr $PDY - 1) - -export cyc=18 -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load the GRIB Utilities module -##################################### -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_util/1.1.4 -module load prod_envir/1.0.3 -module load grib_util/1.1.0 -module list - -############################################ -# GFS PGRB2_SPECIAL_POST PRODUCT GENERATION -############################################ -# set envir=para or para to test with data in prod or para - export envir=para - export envir=para - -export SENDCOM=YES -export KEEPDATA=YES -export job=gfs_pgrb2_spec_npoess_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -# Set FAKE DBNET for testing -export SENDDBN=YES -export DBNROOT=/gpfs/hps/nco/ops/nwprod/prod_util.v1.0.24/fakedbn - -export DATAROOT=/gpfs/dell2/ptmp/Boi.Vuong/output -export NWROOT=/gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git -export COMROOT2=/gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p ${COMROOT2} ${COMROOT2}/logs ${COMROOT2}/logs/jlogfiles -export jlogfile=${COMROOT2}/logs/jlogfiles/jlogfile.${jobid} - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v16.0.0 - -########################################################## -# obtain unique process id (pid) and make temp directory -########################################################## -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p $DATA -cd $DATA - -################################ -# Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export USHgfs=${USHgfs:-$HOMEgfs/ush} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -export PARMwmo=${PARMwmo:-$HOMEgfs/parm/wmo} -export PARMproduct=${PARMproduct:-$HOMEgfs/parm/product} -export FIXgfs=${FIXgfs:-$HOMEgfs/fix} - -################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export model=${model:-gfs} - -############################################## -# Define COM directories -############################################## -if [ $envir = "prod" ] ; then -# This setting is for testing with GFS (production) - export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}} ### NCO PROD -else - export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/v16rt2/gfs/para/${RUN}.${PDY}/${cyc}/atmos ### EMC PARA Realtime - -fi - -export COMOUT=${COMROOT2}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/atmos -export COMOUTwmo=${COMOUTwmo:-${COMOUT}/wmo} - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT $COMOUTwmo -fi - -############################################# -# run the GFS job -############################################# -sh $HOMEgfs/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS diff --git a/driver/product/run_JGFS_NCEPPOST b/driver/product/run_JGFS_NCEPPOST deleted file mode 100755 index b4a6baf9f9..0000000000 --- a/driver/product/run_JGFS_NCEPPOST +++ /dev/null @@ -1,136 +0,0 @@ -#!/bin/sh - -#BSUB -o gfs_post.o%J -#BSUB -e gfs_post.o%J -#BSUB -J gfs_post -#BSUB -extsched 'CRAYLINUX[]' -#BSUB -W 02:00 -#BSUB -q devhigh -#BSUB -P GFS-T2O -#BSUB -M 1000 -#BSUB -cwd /gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work/gfs.v14.1.0/driver - -set -x - -export NODES=3 -export ntasks=24 -export ptile=8 -export threads=1 - -# specify user's own post working directory for testing -export svndir=/gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work -export MP_LABELIO=yes - -export OMP_NUM_THREADS=$threads - - -############################################ -# Loading module -############################################ -. $MODULESHOME/init/ksh -module load PrgEnv-intel ESMF-intel-haswell/3_1_0rp5 cfp-intel-sandybridge iobuf craype-hugepages2M craype-haswell -#module load cfp-intel-sandybridge/1.1.0 -module use /gpfs/hps/nco/ops/nwprod/modulefiles -module load prod_envir/1.1.0 -module load prod_util/1.0.4 -module load grib_util/1.0.3 -##module load crtm-intel/2.2.4 -module list - -export hwrf_ver=v10.0.6 - -# specify PDY (the cycle start yyyymmdd) and cycle -export CDATE=2017052500 -export PDY=$(echo $CDATE | cut -c1-8) -export cyc=$(echo $CDATE | cut -c9-10) -export cycle=t${cyc}z - - -# specify the directory environment for executable, it's either para or prod -export envir=prod - -# set up running dir - -export job=gfs_post_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -export DATA=/gpfs/hps/stmp/$LOGNAME/test/$jobid -mkdir -p $DATA -cd $DATA -rm -f ${DATA}/* - -#################################### -# Specify RUN Name and model -#################################### -export NET=gfs -export RUN=gfs - -#################################### -# Determine Job Output Name on System -#################################### -#export pgmout="OUTPUT.${pid}" -#export pgmerr=errfile - -#################################### -# SENDSMS - Flag Events on SMS -# SENDCOM - Copy Files From TMPDIR to $COMOUT -# SENDDBN - Issue DBNet Client Calls -# RERUN - Rerun posts from beginning (default no) -# VERBOSE - Specify Verbose Output in global_postgp.sh -#################################### -export SAVEGES=NO -export SENDSMS=NO -export SENDCOM=YES -export SENDDBN=NO -export RERUN=NO -export VERBOSE=YES - -export HOMEglobal=${svndir}/global_shared.v14.1.0 -export HOMEgfs=${svndir}/gfs.v14.1.0 -############################################## -# Define COM directories -############################################## -##export COMIN=$COMROOThps/gfs/para/gfs.${PDY} -export COMIN=/gpfs/hps/ptmp/emc.glopara/com2/gfs/para/gfs.${PDY} -export COMOUT=/gpfs/hps/ptmp/$LOGNAME/com2/gfs/test/gfs.$PDY -mkdir -p $COMOUT - -############################################## -# Define GES directories -############################################## -gespath=$GESROOThps -export GESdir=$gespath/${RUN}.${PDY} - -#################################### -# Specify Forecast Hour Range -#################################### - -export allfhr="anl 00 01 06 12 60 120 180 240 252 384" -for post_times in $allfhr -do -export post_times - -date - -#export OUTTYP=4 -# need to set FIXglobal to global share superstructure if testing post in non -# super structure environement -export FIXglobal=$svndir/global_shared.v14.1.0/fix -export APRUN="aprun -j 1 -n${ntasks} -N${ptile} -d${threads} -cc depth" -export nemsioget=$svndir/global_shared.v14.1.0/exec/nemsio_get - -export KEEPDATA=YES -export REMOVE_DATA=NO -#export POSTGRB2TBL=$HOMEglobal/parm/params_grib2_tbl_new -$HOMEgfs/jobs/JGFS_NCEPPOST - -############################################################# - -date - -echo $? - -done - - diff --git a/driver/product/run_JGFS_NCEPPOST.sh b/driver/product/run_JGFS_NCEPPOST.sh deleted file mode 100755 index e2169b1215..0000000000 --- a/driver/product/run_JGFS_NCEPPOST.sh +++ /dev/null @@ -1,115 +0,0 @@ -#!/bin/sh - -#BSUB -a poe -#BSUB -P GFS-T2O -#BSUB -eo gfspost1.dayfile.%J -#BSUB -oo gfspost1.dayfile.%J -#BSUB -J gfspost1 -#BSUB -network type=sn_all:mode=US -#BSUB -q "debug2" -#BSUB -n 24 -#BSUB -R span[ptile=8] -#BSUB -R affinity[core(3)] -#BSUB -x -#BSUB -W 00:15 - -############################################################# -# Function been tested: GFS master pgb file for a giving hour. -# -# Calling sequence: run_JGFS_NCEPPOST.sh -> JGFS_NCEPPOST -> exgfs_nceppost.sh -> global_nceppost.sh -> ncep_post -# -# Initial condition: CDATE=2016020900 (where /global/noscrub/emc.glopara/com/gfs/para/gfs.${PDY}${cyc} has data -# post_times="12" (Which hour of the master grib2 file to generate) -# GRIBVERSION=${GRIBVERSION:-'grib2'} (Grib2 data for the master pgb) -# -# -# Usage: bsub& /dev/null } ) then source /apps/lmod/lmod/init/$__ms_shell endif @@ -19,34 +19,6 @@ else if ( { test -d /work } ) then source /apps/lmod/init/$__ms_shell endif module purge -else if ( { test -d /gpfs/hps -a -e /etc/SuSE-release } ) then - # We are on NOAA Luna or Surge - if ( ! { module help >& /dev/null } ) then - source /opt/modules/default/init/$__ms_shell - endif - module purge - module purge - unset _LMFILES_ - unset LOADEDMODULES - module use /opt/modulefiles - module use /opt/cray/ari/modulefiles - module use /opt/cray/craype/default/alt-modulefiles - module use /opt/cray/alt-modulefiles - module use /gpfs/hps/nco/ops/nwprod/modulefiles - module use /gpfs/hps/nco/ops/nwprod/lib/modulefiles - module use /usrx/local/prod/modulefiles -else if ( { test -d /dcom -a -d /hwrf } ) then - # We are on NOAA Tide or Gyre - if ( ! { module help >& /dev/null } ) then - source /usrx/local/Modules/default/init/$__ms_shell - endif - module purge -else if ( { test -L /usrx && sh -c "readlink /usrx 2> /dev/null | grep dell" } ) then - # We are on WCOSS Mars or Venus - if ( ! { module help >& /dev/null } ) then - source /usrx/local/prod/lmod/lmod/init/$__ms_shell - endif - module purge else if ( { test -d /glade } ) then # We are on NCAR Yellowstone if ( ! { module help >& /dev/null } ) then @@ -54,7 +26,7 @@ else if ( { test -d /glade } ) then endif module purge else if ( { test -d /lustre -a -d /ncrc } ) then - # We are on GAEA. + # We are on GAEA. if ( ! { module help >& /dev/null } ) then # We cannot simply load the module command. The GAEA # /etc/csh.login modifies a number of module-related variables diff --git a/modulefiles/module_base.wcoss_dell_p3.lua b/modulefiles/module_base.wcoss_dell_p3.lua deleted file mode 100644 index 30b73bde49..0000000000 --- a/modulefiles/module_base.wcoss_dell_p3.lua +++ /dev/null @@ -1,67 +0,0 @@ -help([[ -Load environment to run GFS on WCOSS-Dell -]]) - -prepend_path("MODULEPATH", "/usrx/local/nceplibs/dev/hpc-stack/libs/hpc-stack/modulefiles/stack") - -load(pathJoin("hpc", "1.1.0")) -load(pathJoin("hpc-ips", "18.0.1.163")) -load(pathJoin("hpc-impi", "18.0.1")) - -load(pathJoin("lsf", "10.1")) -load(pathJoin("EnvVars", "1.0.3")) -load(pathJoin("HPSS", "5.0.2.5")) -load(pathJoin("NCL", "6.4.0")) - -load(pathJoin("prod_util", "1.2.2")) -load(pathJoin("prod_envir", "1.1.0")) -load(pathJoin("grib_util", "1.2.2")) -load(pathJoin("util_shared", "1.3.0")) - -load(pathJoin("crtm", "2.3.0")) -setenv("CRTM_FIX","/gpfs/dell1/nco/ops/nwprod/lib/crtm/v2.3.0/fix") - -load(pathJoin("NCO", "4.7.0")) -load(pathJoin("CFP", "2.0.2")) -setenv("USE_CFP","YES") -load("pm5") - -load(pathJoin("jasper", "2.0.25")) -load(pathJoin("zlib", "1.2.11")) -load(pathJoin("png", "1.6.35")) - -load(pathJoin("hdf5", "1.10.6")) -load(pathJoin("netcdf", "4.7.4")) -load(pathJoin("pio", "2.5.2")) -load(pathJoin("esmf", "8.2.1b04")) -load(pathJoin("fms", "2021.03")) - -load(pathJoin("bacio", "2.4.1")) -load(pathJoin("g2", "3.4.2")) -load(pathJoin("g2tmpl", "1.10.0")) -load(pathJoin("ip", "3.3.3")) -load(pathJoin("nemsio", "2.5.2")) -load(pathJoin("sp", "2.3.3")) -load(pathJoin("w3emc", "2.7.3")) -load(pathJoin("w3nco", "2.4.1")) - -load(pathJoin("wgrib2", "2.0.8")) -setenv("WGRIB2","wgrib2") - -append_path("MODULEPATH", "/gpfs/dell1/nco/ops/nwprod/modulefiles/") -load(pathJoin("gempak", "7.3.3")) - -load(pathJoin("bufr_dumplist", "2.0.0")) -load(pathJoin("dumpjb", "5.1.0")) - -load(pathJoin("cdo", "1.9.8")) - --- Temporary until official hpc-stack is updated -prepend_path("MODULEPATH", "/gpfs/dell2/emc/modeling/noscrub/Walter.Kolczynski/save/hpc-stack/modulefiles/stack") -load(pathJoin("hpc", "1.2.0")) -load(pathJoin("hpc-python", "3.6.3")) -load(pathJoin("ufswm", "1.0.0")) -load(pathJoin("met", "9.1")) -load(pathJoin("metplus", "3.1")) - -whatis("Description: GFS run environment") diff --git a/modulefiles/modulefile.ww3.wcoss_dell_p3.lua b/modulefiles/modulefile.ww3.wcoss_dell_p3.lua deleted file mode 100644 index 321698a9b6..0000000000 --- a/modulefiles/modulefile.ww3.wcoss_dell_p3.lua +++ /dev/null @@ -1,23 +0,0 @@ -help([[ -Build environment for WW3 on WCOSS-Dell -]]) - -prepend_path("MODULEPATH", "/usrx/local/nceplibs/dev/hpc-stack/libs/hpc-stack/modulefiles/stack") - -load(pathJoin("hpc", "1.1.0")) -load(pathJoin("hpc-ips", "18.0.1.163")) -load(pathJoin("hpc-impi", "18.0.1")) - -load(pathJoin("cmake", "3.20.0")) - -load(pathJoin("jasper", "2.0.25")) -load(pathJoin("zlib", "1.2.11")) -load(pathJoin("png", "1.6.35")) - -load(pathJoin("bacio", "2.4.1")) -load(pathJoin("g2", "3.4.1")) - -load(pathJoin("hdf5", "1.10.6")) -load(pathJoin("netcdf", "4.7.4")) - -load(pathJoin("w3nco", "2.4.1")) diff --git a/modulefiles/workflow_utils.wcoss_dell_p3.lua b/modulefiles/workflow_utils.wcoss_dell_p3.lua deleted file mode 100644 index 2a3a67e463..0000000000 --- a/modulefiles/workflow_utils.wcoss_dell_p3.lua +++ /dev/null @@ -1,34 +0,0 @@ -help([[ -Build environment for workflow utilities on WCOSS-Dell -]]) - -prepend_path("MODULEPATH", "/usrx/local/nceplibs/dev/hpc-stack/libs/hpc-stack/modulefiles/stack") - -load(pathJoin("hpc", "1.1.0")) -load(pathJoin("hpc-ips", "18.0.1.163")) -load(pathJoin("hpc-impi", "18.0.1")) - -load(pathJoin("cmake", "3.20.2")) - -load(pathJoin("jasper", "2.0.25")) -load(pathJoin("zlib", "1.2.11")) -load(pathJoin("png", "1.6.35")) - -load(pathJoin("hdf5", "1.10.6")) -load(pathJoin("netcdf", "4.7.4")) - -load(pathJoin("bacio", "2.4.1")) -load(pathJoin("g2", "3.4.1")) -load(pathJoin("w3nco", "2.4.1")) -load(pathJoin("w3emc", "2.7.3")) -load(pathJoin("sp", "2.3.3")) -load(pathJoin("ip", "3.3.3")) -load(pathJoin("nemsio", "2.5.2")) -load(pathJoin("nemsiogfs", "2.5.3")) -load(pathJoin("ncio", "1.0.0")) -load(pathJoin("landsfcutil", "2.4.1")) -load(pathJoin("sigio", "2.3.2")) -load(pathJoin("bufr", "11.4.0")) - -load(pathJoin("wgrib2", "2.0.8")) -setenv("WGRIB2","wgrib2") diff --git a/parm/config/config.aero b/parm/config/config.aero index e05fc79770..9b6b2a5ca6 100644 --- a/parm/config/config.aero +++ b/parm/config/config.aero @@ -1,24 +1,27 @@ #!/bin/ksh -x -# + # UFS-Aerosols settings -# + # Directory containing GOCART configuration files. Defaults to parm/chem if unset. AERO_CONFIG_DIR=$HOMEgfs/parm/chem -# + # Path to the input data tree case $machine in - "WCOSS_DELL_P3") AERO_INPUTS_DIR="/gpfs/dell2/emc/modeling/noscrub/Walter.Kolczynski/global-workflow/gocart_emissions" ;; - "HERA") AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions";; - "ORION") AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions";; - *) - echo "FATAL ERROR: Machine $machine unsupported for aerosols" - exit 2 - ;; + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; esac -# + # Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) AERO_EMIS_FIRE=QFED -# + # Aerosol convective scavenging factors (list of string array elements) # Element syntax: ':'. Use = * to set default factor for all aerosol tracers # Scavenging factors are set to 0 (no scavenging) if unset diff --git a/parm/config/config.base.nco.static b/parm/config/config.base.nco.static index 48db9dd020..7dae9d5dd9 100755 --- a/parm/config/config.base.nco.static +++ b/parm/config/config.base.nco.static @@ -6,7 +6,7 @@ echo "BEGIN: config.base" # Machine environment -export machine="WCOSS_DELL_P3" +export machine="WCOSS2" # EMC parallel or NCO production export RUN_ENVIR="nco" diff --git a/parm/config/config.coupled_ic b/parm/config/config.coupled_ic index a7f3394983..7b4b6f51aa 100755 --- a/parm/config/config.coupled_ic +++ b/parm/config/config.coupled_ic @@ -7,9 +7,7 @@ echo "BEGIN: config.coupled_ic" # Get task specific resources source $EXPDIR/config.resources coupled_ic -if [[ "$machine" == "WCOSS_DELL_P3" ]]; then - export BASE_CPLIC="/gpfs/dell2/emc/modeling/noscrub/Walter.Kolczynski/global-workflow/IC" -elif [[ "$machine" == "HERA" ]]; then +if [[ "$machine" == "HERA" ]]; then export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" elif [[ "$machine" == "ORION" ]]; then export BASE_CPLIC="/work/noaa/global/wkolczyn/noscrub/global-workflow/IC" diff --git a/parm/config/config.fv3 b/parm/config/config.fv3 index 8c4b74ee87..93f836f642 100755 --- a/parm/config/config.fv3 +++ b/parm/config/config.fv3 @@ -20,11 +20,7 @@ case_in=$1 echo "BEGIN: config.fv3" -if [[ "$machine" = "WCOSS_DELL_P3" ]]; then - export npe_node_max=28 -elif [[ "$machine" = "WCOSS_C" ]]; then - export npe_node_max=24 -elif [[ "$machine" = "JET" ]]; then +if [[ "$machine" = "JET" ]]; then if [[ "$PARTITION_BATCH" = "xjet" ]]; then export npe_node_max=24 elif [[ "$PARTITION_BATCH" = "vjet" || "$PARTITION_BATCH" = "sjet" ]]; then @@ -163,7 +159,7 @@ export kchunk3d=1 # Determine whether to use parallel NetCDF based on resolution case $case_in in "C48" | "C96" | "C192") - export OUTPUT_FILETYPE_ATM="netcdf_parallel" + export OUTPUT_FILETYPE_ATM="netcdf" export OUTPUT_FILETYPE_SFC="netcdf" ;; "C384" | "C768" | "C1152" | "C3072") diff --git a/parm/config/config.fv3.nco.static b/parm/config/config.fv3.nco.static index f7c6981491..619ef6399b 100755 --- a/parm/config/config.fv3.nco.static +++ b/parm/config/config.fv3.nco.static @@ -20,11 +20,7 @@ case_in=$1 echo "BEGIN: config.fv3" -if [[ "$machine" = "WCOSS_DELL_P3" ]]; then - export npe_node_max=28 -elif [[ "$machine" = "WCOSS_C" ]]; then - export npe_node_max=24 -elif [[ "$machine" = "JET" ]]; then +if [[ "$machine" = "JET" ]]; then export npe_node_max=24 elif [[ "$machine" = "HERA" ]]; then export npe_node_max=40 @@ -89,8 +85,8 @@ case $case_in in export layout_y=8 export layout_x_gfs=6 export layout_y_gfs=6 - export npe_wav=35 - export npe_wav_gfs=35 + export npe_wav=35 + export npe_wav_gfs=35 export nth_fv3=1 export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export WRITE_GROUP=1 diff --git a/parm/config/config.post b/parm/config/config.post index 71aaae22ec..2ca6c3d753 100755 --- a/parm/config/config.post +++ b/parm/config/config.post @@ -15,7 +15,7 @@ echo "BEGIN: config.post" export NPOSTGRP=42 export OUTTYP=4 export MODEL_OUT_FORM=binarynemsiompiio -if [ $OUTPUT_FILE = "netcdf" ]; then +if [ $OUTPUT_FILE = "netcdf" ]; then export MODEL_OUT_FORM=netcdfpara fi @@ -34,11 +34,7 @@ export nth_postgp=1 export GFS_DOWNSTREAM="YES" export downset=2 -if [ $machine = "WCOSS_DELL_P3" ]; then - export npe_dwn=28 -else - export npe_dwn=24 -fi +export npe_dwn=24 export GRIBVERSION='grib2' export SENDCOM="YES" diff --git a/parm/config/config.prepbufr b/parm/config/config.prepbufr index 904d946774..b86cb89d12 100755 --- a/parm/config/config.prepbufr +++ b/parm/config/config.prepbufr @@ -11,9 +11,9 @@ echo "BEGIN: config.prepbufr" # Set variables if [ $machine = "HERA" ]; then - export GESROOT=/scratch1/NCEPDEV/rstprod # set by module prod_envir on WCOSS_C + export GESROOT=/scratch1/NCEPDEV/rstprod elif [ $machine = "ORION" ]; then - export GESROOT=/dev/null + export GESROOT=/dev/null fi echo "END: config.prepbufr" diff --git a/parm/config/config.resources b/parm/config/config.resources index f9b3e4132a..cda6f2edce 100755 --- a/parm/config/config.resources +++ b/parm/config/config.resources @@ -26,14 +26,7 @@ step=$1 echo "BEGIN: config.resources" -if [[ "$machine" = "WCOSS_DELL_P3" ]]; then - export npe_node_max=28 - if [ "$QUEUE" = "dev2" -o "$QUEUE" = "devonprod2" -o "$QUEUE" = "devmax2" ]; then # WCOSS Dell 3.5 - export npe_node_max=40 - fi -elif [[ "$machine" = "WCOSS_C" ]]; then - export npe_node_max=24 -elif [[ "$machine" = "JET" ]]; then +if [[ "$machine" = "JET" ]]; then if [[ "$PARTITION_BATCH" = "xjet" ]]; then export npe_node_max=24 elif [[ "$PARTITION_BATCH" = "vjet" || "$PARTITION_BATCH" = "sjet" ]]; then @@ -60,11 +53,7 @@ elif [ $step = "aerosol_init" ]; then export nth_aerosol_init=1 export npe_node_aerosol_init=$(echo "$npe_node_max / $nth_aerosol_init" | bc) export NTASKS=${npe_aerosol_init} - export memory_aerosol_init="4G" - if [ $machine = "HERA" ]; then - # Hera needs a bit more memory since it has fewer CPU/core - export memory_aerosol_init="6G" - fi + export memory_aerosol_init="6G" elif [ $step = "waveinit" ]; then @@ -181,13 +170,9 @@ elif [ $step = "anal" ]; then export npe_anal=84 export npe_anal_gfs=84 fi - if [[ "$machine" = "WCOSS_DELL_P3" ]]; then - export nth_anal=4 - fi export nth_anal_gfs=$nth_anal export npe_node_anal=$(echo "$npe_node_max / $nth_anal" | bc) export nth_cycle=$nth_anal - if [[ "$machine" == "WCOSS_C" ]]; then export memory_anal="3072M"; fi elif [ $step = "analcalc" ]; then @@ -195,7 +180,6 @@ elif [ $step = "analcalc" ]; then export npe_analcalc=127 export nth_analcalc=1 export npe_node_analcalc=$npe_node_max - if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export npe_analcalc=127 ; fi elif [ $step = "analdiag" ]; then @@ -203,7 +187,6 @@ elif [ $step = "analdiag" ]; then export npe_analdiag=112 export nth_analdiag=1 export npe_node_analdiag=$npe_node_max - if [[ "$machine" == "WCOSS_C" ]]; then export memory_analdiag="3072M"; fi elif [ $step = "sfcanl" ]; then @@ -221,8 +204,6 @@ elif [ $step = "gldas" ]; then export npe_gaussian=96 export nth_gaussian=1 export npe_node_gaussian=24 - if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export npe_gldas=112 ; fi - if [[ "$machine" == "WCOSS_C" ]]; then export memory_gldas="3072M"; fi elif [ $step = "fcst" ]; then @@ -269,8 +250,6 @@ elif [ $step = "fcst" ]; then export npe_node_fcst=$(echo "$npe_node_max / $nth_fcst" | bc) export npe_node_fcst_gfs=$(echo "$npe_node_max / $nth_fcst_gfs" | bc) - if [[ "$machine" == "WCOSS_C" ]]; then export memory_fcst="1024M"; fi - if [[ $DO_WAVE == "YES" ]]; then case $waveGRD in 'gnh_10m aoc_9km gsh_15m') export WAVPETS=140 ;; @@ -336,8 +315,6 @@ elif [ $step = "post" ]; then export nth_post=1 export npe_node_post=12 export npe_node_dwn=$npe_node_max - if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export npe_node_post=28 ; fi - if [[ "$machine" == "WCOSS_C" ]]; then export memory_post="3072M"; fi elif [ $step = "wafs" ]; then @@ -390,9 +367,7 @@ elif [ $step = "vrfy" ]; then export npe_node_vrfy=1 export npe_vrfy_gfs=1 export npe_node_vrfy_gfs=1 - if [[ "$machine" == "WCOSS_C" ]]; then - export memory_vrfy="3072M" - elif [[ "$machine" == "HERA" ]]; then + if [[ "$machine" == "HERA" ]]; then export memory_vrfy="16384M" fi @@ -405,11 +380,6 @@ elif [ $step = "metp" ]; then export wtime_metp_gfs="06:00:00" export npe_metp_gfs=4 export npe_node_metp_gfs=4 - if [[ "$machine" == "WCOSS_C" ]]; then - export memory_metp="3072M" - elif [[ "$machine" == "THEIA" ]]; then - export memory_metp="16384M" - fi elif [ $step = "echgres" ]; then @@ -424,11 +394,7 @@ elif [ $step = "init" ]; then export npe_init=24 export nth_init=1 export npe_node_init=6 - if [ $machine = "WCOSS_DELL_P3" ]; then - export memory_init="10G" - else - export memory_init="70G" - fi + export memory_init="70G" elif [ $step = "init_chem" ]; then @@ -500,14 +466,9 @@ elif [ $step = "eobs" -o $step = "eomg" ]; then fi export npe_eomg=$npe_eobs export nth_eobs=2 - if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export nth_eobs=7; fi export nth_eomg=$nth_eobs export npe_node_eobs=$(echo "$npe_node_max / $nth_eobs" | bc) export npe_node_eomg=$npe_node_eobs - if [[ "$machine" == "WCOSS_C" ]]; then - export memory_eobs="3072M" - export memory_eomg=$memory_eobs - fi elif [ $step = "ediag" ]; then @@ -515,7 +476,6 @@ elif [ $step = "ediag" ]; then export npe_ediag=56 export nth_ediag=1 export npe_node_ediag=$npe_node_max - if [[ "$machine" == "WCOSS_C" ]]; then export memory_ediag="3072M"; fi elif [ $step = "eupd" ]; then @@ -523,9 +483,6 @@ elif [ $step = "eupd" ]; then if [ $CASE = "C768" ]; then export npe_eupd=480 export nth_eupd=6 - if [[ "$machine" = "WCOSS_DELL_P3" ]]; then - export nth_eupd=7 - fi if [[ "$machine" = "HERA" ]]; then export npe_eupd=150 export nth_eupd=40 @@ -533,9 +490,6 @@ elif [ $step = "eupd" ]; then elif [ $CASE = "C384" ]; then export npe_eupd=270 export nth_eupd=2 - if [[ "$machine" = "WCOSS_DELL_P3" ]]; then - export nth_eupd=9 - fi if [[ "$machine" = "HERA" ]]; then export npe_eupd=100 export nth_eupd=40 @@ -549,20 +503,15 @@ elif [ $step = "eupd" ]; then fi fi export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) - if [[ "$machine" == "WCOSS_C" ]]; then - export memory_eupd="3072M" - fi elif [ $step = "ecen" ]; then export wtime_ecen="00:10:00" export npe_ecen=80 export nth_ecen=6 - if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export nth_ecen=7; fi if [ $CASE = "C384" -o $CASE = "C192" -o $CASE = "C96" -o $CASE = "C48" ]; then export nth_ecen=2; fi export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) export nth_cycle=$nth_ecen - if [[ "$machine" == "WCOSS_C" ]]; then export memory_ecen="3072M"; fi elif [ $step = "esfc" ]; then @@ -571,7 +520,6 @@ elif [ $step = "esfc" ]; then export npe_node_esfc=$npe_node_max export nth_esfc=1 export nth_cycle=$nth_esfc - if [[ "$machine" == "WCOSS_C" ]]; then export memory_esfc="3072M"; fi elif [ $step = "efcs" ]; then @@ -583,16 +531,13 @@ elif [ $step = "efcs" ]; then export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) export nth_efcs=${nth_fv3:-2} export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) - if [[ "$machine" == "WCOSS_C" ]]; then export memory_efcs="254M"; fi elif [ $step = "epos" ]; then export wtime_epos="00:15:00" export npe_epos=80 export nth_epos=6 - if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export nth_epos=7; fi export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) - if [[ "$machine" == "WCOSS_C" ]]; then export memory_epos="254M"; fi elif [ $step = "postsnd" ]; then @@ -607,7 +552,6 @@ elif [ $step = "postsnd" ]; then export npe_node_postsnd=4 fi if [[ "$machine" = "HERA" ]]; then export npe_node_postsnd=2; fi - if [[ "$machine" == "WCOSS_C" ]]; then export memory_postsnd="254M"; fi elif [ $step = "awips" ]; then @@ -615,12 +559,6 @@ elif [ $step = "awips" ]; then export npe_awips=4 export npe_node_awips=4 export nth_awips=2 - if [[ "$machine" == "WCOSS_DELL_P3" ]]; then - export npe_awips=2 - export npe_node_awips=2 - export nth_awips=1 - fi - if [[ "$machine" == "WCOSS_C" ]]; then export memory_awips="2048M"; fi elif [ $step = "gempak" ]; then @@ -628,7 +566,6 @@ elif [ $step = "gempak" ]; then export npe_gempak=17 export npe_node_gempak=4 export nth_gempak=3 - if [[ "$machine" == "WCOSS_C" ]]; then export memory_gempak="254M"; fi else diff --git a/parm/config/config.vrfy b/parm/config/config.vrfy index 0eda2537a9..cd3b7150ce 100755 --- a/parm/config/config.vrfy +++ b/parm/config/config.vrfy @@ -41,12 +41,7 @@ if [ $VRFYFITS = "YES" ]; then export CONVNETC="YES" fi - if [ $machine = "WCOSS_C" ]; then - export fitdir="$BASE_GIT/verif/global/parafits.fv3nems/batrun" - export PREPQFITSH="$fitdir/subfits_cray_nems" - elif [ $machine = "WCOSS_DELL_P3" ]; then - export PREPQFITSH="$fitdir/subfits_dell_nems" - elif [ $machine = "HERA" ]; then + if [ $machine = "HERA" ]; then export PREPQFITSH="$fitdir/subfits_hera_slurm" elif [ $machine = "ORION" ]; then export PREPQFITSH="$fitdir/subfits_orion_netcdf" @@ -111,10 +106,10 @@ if [ "$VRFYTRAK" = "YES" ]; then export TRACKERSH="$HOMEgfs/jobs/JGFS_ATMOS_CYCLONE_TRACKER" if [ "$CDUMP" = "gdas" ]; then - export FHOUT_CYCLONE=3 + export FHOUT_CYCLONE=3 export FHMAX_CYCLONE=$FHMAX else - export FHOUT_CYCLONE=6 + export FHOUT_CYCLONE=6 export FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) fi export COMINsyn=${COMINsyn:-${COMROOT}/gfs/prod/syndat} @@ -133,11 +128,7 @@ fi if [[ "$RUNMOS" == "YES" && "$CDUMP" == "gfs" ]]; then - if [ $machine = "WCOSS_C" ] ; then - export RUNGFSMOSSH="$HOMEgfs/scripts/run_gfsmos_master.sh.cray" - elif [ $machine = "WCOSS_DELL_P3" ] ; then - export RUNGFSMOSSH="$HOMEgfs/scripts/run_gfsmos_master.sh.dell" - elif [ $machine = "HERA" ] ; then + if [ $machine = "HERA" ] ; then export RUNGFSMOSSH="$HOMEgfs/scripts/run_gfsmos_master.sh.hera" else echo "WARNING: MOS package is not enabled on $machine!" diff --git a/scripts/exgdas_atmos_chgres_forenkf.sh b/scripts/exgdas_atmos_chgres_forenkf.sh index 805b8be6d0..97f323b00f 100755 --- a/scripts/exgdas_atmos_chgres_forenkf.sh +++ b/scripts/exgdas_atmos_chgres_forenkf.sh @@ -14,7 +14,6 @@ # # Attributes: # Language: POSIX shell -# Machine: WCOSS-Dell / Hera # ################################################################################ @@ -174,7 +173,7 @@ EOF export pgm=$CHGRESNCEXEC . prep_step - $APRUN_CHGRES $CHGRESNCEXEC chgres_nc_gauss0$FHR.nml + $APRUN_CHGRES $CHGRESNCEXEC chgres_nc_gauss0$FHR.nml export err=$?; err_chk fi done diff --git a/scripts/exgdas_atmos_nceppost.sh b/scripts/exgdas_atmos_nceppost.sh index 8616b35453..403cdb4bca 100755 --- a/scripts/exgdas_atmos_nceppost.sh +++ b/scripts/exgdas_atmos_nceppost.sh @@ -1,6 +1,6 @@ ##################################################################### echo "-----------------------------------------------------" -echo " exgdas_nceppost.sh" +echo " exgdas_nceppost.sh" echo " Sep 07 - Chuang - Modified script to run unified post" echo " July 14 - Carlis - Changed to 0.25 deg grib2 master file" echo " Feb 16 - Lin - Modify to use Vertical Structure" @@ -62,7 +62,7 @@ if [ $OUTTYP -eq 4 ] ; then else export SUFFIX= fi -export machine=${machine:-WCOSS_C} +export machine=${machine:-WCOSS2} ########################### # Specify Output layers @@ -97,7 +97,7 @@ if [ ${stime} = "anl" ]; then export OUTTYP=${OUTTYP:-4} - # specify output file name from chgres which is input file name to nceppost + # specify output file name from chgres which is input file name to nceppost # if model already runs gfs io, make sure GFSOUT is linked to the gfsio file # new imported variable for global_nceppost.sh @@ -108,7 +108,7 @@ if [ ${stime} = "anl" ]; then if [ $GRIBVERSION = 'grib2' ]; then export POSTGRB2TBL=${POSTGRB2TBL:-${g2tmpl_ROOT}/share/params_grib2_tbl_new} - export PostFlatFile=${PostFlatFile:-$PARMpost/postxconfig-NT-GFS-ANL.txt} + export PostFlatFile=${PostFlatFile:-$PARMpost/postxconfig-NT-GFS-ANL.txt} export CTLFILE=$PARMpost/postcntrl_gfs_anl.xml fi @@ -124,9 +124,9 @@ if [ ${stime} = "anl" ]; then export PGBOUT2=pgbfile.grib2 export PGIOUT2=pgifile.grib2.idx export IGEN=$IGEN_ANL - export FILTER=0 + export FILTER=0 - # specify fhr even for analysis because postgp uses it + # specify fhr even for analysis because postgp uses it # export fhr=00 $POSTGPSH @@ -166,7 +166,7 @@ if [ ${stime} = "anl" ]; then fi fi fi - rm pgbfile.grib2 + rm pgbfile.grib2 else #### atmanl file not found need failing job echo " *** FATAL ERROR: No model anl file output " @@ -177,12 +177,12 @@ else ## not_anl if_stimes SLEEP_LOOP_MAX=$(expr $SLEEP_TIME / $SLEEP_INT) ############################################################ - # Loop Through the Post Forecast Files + # Loop Through the Post Forecast Files ############################################################ for fhr in $post_times; do ############################### - # Start Looping for the + # Start Looping for the # existence of the restart files ############################### set -x @@ -197,7 +197,7 @@ else ## not_anl if_stimes fi ############################### # If we reach this point assume - # fcst job never reached restart + # fcst job never reached restart # period and error exit ############################### if [ $ic -eq $SLEEP_LOOP_MAX ]; then @@ -212,7 +212,7 @@ else ## not_anl if_stimes postmsg "$msg" ############################### - # Put restart files into /nwges + # Put restart files into /nwges # for backup to start Model Fcst ############################### [[ -f flxfile ]] && rm flxfile @@ -258,7 +258,7 @@ else ## not_anl if_stimes fi else if [ $fhr -eq 0 ]; then - export PostFlatFile=$PARMpost/postxconfig-NT-GFS-F00.txt + export PostFlatFile=$PARMpost/postxconfig-NT-GFS-F00.txt export CTLFILE=${CTLFILEGFS:-$PARMpost/postcntrl_gfs_f00.xml} else export CTLFILE=${CTLFILEGFS:-$PARMpost/postcntrl_gfs.xml} @@ -306,11 +306,11 @@ else ## not_anl if_stimes if [ $SENDCOM = 'YES' ]; then if [ $GRIBVERSION = 'grib2' ] ; then - if [ $INLINE_POST = ".false." ]; then + if [ $INLINE_POST = ".false." ]; then cp $PGBOUT2 $COMOUT/${MASTERFHR} fi $GRB2INDEX $PGBOUT2 $COMOUT/${MASTERFHRIDX} - fi + fi # Model generated flux files will be in nemsio after FY17 upgrade # use post to generate Grib2 flux files @@ -344,7 +344,7 @@ else ## not_anl if_stimes $DBNROOT/bin/dbn_alert MODEL ${run}_SGB_GB2 $job $COMOUT/${PREFIX}sfluxgrbf${fhr}.grib2 $DBNROOT/bin/dbn_alert MODEL ${run}_SGB_GB2_WIDX $job $COMOUT/${PREFIX}sfluxgrbf${fhr}.grib2.idx fi - fi + fi [[ -f pgbfile.grib2 ]] && rm pgbfile.grib2 [[ -f flxfile ]] && rm flxfile diff --git a/scripts/exgdas_efsoi.sh b/scripts/exgdas_efsoi.sh index 3ca5d05eb9..f730634c83 100755 --- a/scripts/exgdas_efsoi.sh +++ b/scripts/exgdas_efsoi.sh @@ -13,7 +13,6 @@ # # Attributes: # Language: POSIX shell -# Machine: Hera # ################################################################################ @@ -154,7 +153,7 @@ $NLN $COMIN_GES_ENS/${GPREFIX}atmf006.ensmean${GSUFFIX} sfg_${CDATE}_fhr03_ensme # The following deals with different files with the same local name (assuming # a 24hr EFSOI forecast): # both are hybrid analyses from gdas - one from CDATE saved during the -# corresponding GDAS cycle in the efsoigdas tree to be used in +# corresponding GDAS cycle in the efsoigdas tree to be used in # the localization advection in EFSOI, the other from VDATE to be used # for verification. diff --git a/scripts/exgdas_efsoi_update.sh b/scripts/exgdas_efsoi_update.sh index ca1ca5917f..cf1542cc57 100755 --- a/scripts/exgdas_efsoi_update.sh +++ b/scripts/exgdas_efsoi_update.sh @@ -14,7 +14,6 @@ # # Attributes: # Language: POSIX shell -# Machine: WCOSS-Cray/Theia # ################################################################################ @@ -289,7 +288,7 @@ cat > enkf.nml << EOFnml paranc=$paranc,write_fv3_incr=$write_fv3_incr, efsoi_cycling=.true., $WRITE_INCR_ZERO - $NAM_ENKF + $NAM_ENKF / &satobs_enkf sattypes_rad(1) = 'amsua_n15', dsis(1) = 'amsua_n15', @@ -409,7 +408,7 @@ export ERR=$rc export err=$ERR $ERRSCRIPT || exit 2 -# save for EFSOI task +# save for EFSOI task $NCP $COMOUT_ANL_ENS/$GBIASe $COMOUT_ANL_ENSFSOI # save for EFSOI localization advection $NCP $ATMANL_GSI_ENSRES $COMOUT_ANL_ENSFSOI diff --git a/scripts/exgdas_enkf_ecen.sh b/scripts/exgdas_enkf_ecen.sh index f3671968ff..eaa120ca47 100755 --- a/scripts/exgdas_enkf_ecen.sh +++ b/scripts/exgdas_enkf_ecen.sh @@ -13,7 +13,6 @@ # # Attributes: # Language: POSIX shell -# Machine: WCOSS-Cray/Theia # ################################################################################ diff --git a/scripts/exgdas_enkf_fcst.sh b/scripts/exgdas_enkf_fcst.sh index 3263115057..2dfcaa419a 100755 --- a/scripts/exgdas_enkf_fcst.sh +++ b/scripts/exgdas_enkf_fcst.sh @@ -13,7 +13,6 @@ # # Attributes: # Language: POSIX shell -# Machine: WCOSS-Cray/Theia # #### ################################################################################ diff --git a/scripts/exgdas_enkf_post.sh b/scripts/exgdas_enkf_post.sh index ebf877b3cc..5e8ecc7298 100755 --- a/scripts/exgdas_enkf_post.sh +++ b/scripts/exgdas_enkf_post.sh @@ -13,7 +13,6 @@ # # Attributes: # Language: POSIX shell -# Machine: WCOSS-Cray/Theia # ################################################################################ diff --git a/scripts/exgdas_enkf_select_obs.sh b/scripts/exgdas_enkf_select_obs.sh index 4a0f0d3ee3..488bec6c2a 100755 --- a/scripts/exgdas_enkf_select_obs.sh +++ b/scripts/exgdas_enkf_select_obs.sh @@ -13,7 +13,6 @@ # # Attributes: # Language: POSIX shell -# Machine: WCOSS-Cray/Theia # ################################################################################ diff --git a/scripts/exgdas_enkf_sfc.sh b/scripts/exgdas_enkf_sfc.sh index 407a6097a7..4595f84324 100755 --- a/scripts/exgdas_enkf_sfc.sh +++ b/scripts/exgdas_enkf_sfc.sh @@ -13,7 +13,6 @@ # # Attributes: # Language: POSIX shell -# Machine: WCOSS-Cray/Theia # ################################################################################ diff --git a/scripts/exgdas_enkf_update.sh b/scripts/exgdas_enkf_update.sh index 0f639d12e3..c76cc47931 100755 --- a/scripts/exgdas_enkf_update.sh +++ b/scripts/exgdas_enkf_update.sh @@ -13,7 +13,6 @@ # # Attributes: # Language: POSIX shell -# Machine: WCOSS-Cray/Theia # ################################################################################ diff --git a/scripts/exgfs_wave_init.sh b/scripts/exgfs_wave_init.sh index 83d7859cb8..8c268a6176 100755 --- a/scripts/exgfs_wave_init.sh +++ b/scripts/exgfs_wave_init.sh @@ -20,7 +20,6 @@ # # Attributes: # Language: Bourne-again (BASH) shell -# Machine: WCOSS-DELL-P3 # ############################################################################### # @@ -54,7 +53,7 @@ # Script will run only if pre-defined NTASKS # The actual work is distributed over these tasks. - if [ -z ${NTASKS} ] + if [ -z ${NTASKS} ] then echo "FATAL ERROR: requires NTASKS to be set " err=1; export err;${errchk} @@ -138,7 +137,7 @@ fi done -# 1.a.1 Execute parallel or serialpoe +# 1.a.1 Execute parallel or serialpoe if [ "$nmoddef" -gt '0' ] then @@ -161,7 +160,7 @@ echo ' ------------------------------------' echo ' ' [[ "$LOUD" = YES ]] && set -x - + if [ "$NTASKS" -gt '1' ] then if [ ${CFP_MP:-"NO"} = "YES" ]; then @@ -174,7 +173,7 @@ ./cmdfile exit=$? fi - + if [ "$exit" != '0' ] then set +x @@ -186,8 +185,8 @@ echo ' ' [[ "$LOUD" = YES ]] && set -x fi - - fi + + fi # 1.a.3 File check @@ -200,7 +199,7 @@ echo " mod_def.$grdID succesfully created/copied " echo ' ' [[ "$LOUD" = YES ]] && set -x - else + else set +x echo ' ' echo '********************************************** ' @@ -215,7 +214,7 @@ done # --------------------------------------------------------------------------- # -# 2. Ending +# 2. Ending set +x echo ' ' diff --git a/scripts/exgfs_wave_post_gridded_sbs.sh b/scripts/exgfs_wave_post_gridded_sbs.sh index 2eb6a82211..622eb57880 100755 --- a/scripts/exgfs_wave_post_gridded_sbs.sh +++ b/scripts/exgfs_wave_post_gridded_sbs.sh @@ -8,23 +8,22 @@ # # Author: Jose-Henrique Alves Org: NCEP/EMC Date: 2019-12-06 # Abstract: This script is the postprocessor for the wave component in GFS. -# This version runs side-by-side with the GFS fcst step. +# This version runs side-by-side with the GFS fcst step. # It executes several scripts forpreparing and creating output data # as follows: # -# wave_grib2_sbs.sh : generates GRIB2 files. -# wave_grid_interp_ush.sh : interpolates data from new grids to old grids +# wave_grib2_sbs.sh : generates GRIB2 files. +# wave_grid_interp_ush.sh : interpolates data from new grids to old grids # # Script history log: -# 2019-12-06 J-Henrique Alves: First Version adapted from HTolman post.sh 2007 +# 2019-12-06 J-Henrique Alves: First Version adapted from HTolman post.sh 2007 # 2020-06-10 J-Henrique Alves: Porting to R&D machine Hera -# 2020-07-31 Jessica Meixner: Removing points, now gridded data only +# 2020-07-31 Jessica Meixner: Removing points, now gridded data only # # $Id$ # # Attributes: # Language: Bourne-again (Bash) Shell -# Machine: WCOSS-DELL-P3 # ############################################################################### # @@ -59,7 +58,7 @@ # Script will run only if pre-defined NTASKS # The actual work is distributed over these tasks. - if [ -z ${NTASKS} ] + if [ -z ${NTASKS} ] then echo "FATAL ERROR: requires NTASKS to be set " err=1; export err;${errchk} @@ -88,15 +87,15 @@ [[ "$LOUD" = YES ]] && set -x -# 0.c.3 Define CDATE_POST +# 0.c.3 Define CDATE_POST export CDATE_POST=${CDATE} export FHRUN=0 # --------------------------------------------------------------------------- # # 1. Get files that are used by most child scripts - export DOGRB_WAV=${DOGRB_WAV:-'YES'} #Create grib2 files - export DOGRI_WAV=${DOGRI_WAV:-'NO'} #Create interpolated grids + export DOGRB_WAV=${DOGRB_WAV:-'YES'} #Create grib2 files + export DOGRI_WAV=${DOGRI_WAV:-'NO'} #Create interpolated grids exit_code=0 @@ -106,10 +105,10 @@ echo '-----------------------' [[ "$LOUD" = YES ]] && set -x -# 1.a Model definition files and output files (set up using poe) +# 1.a Model definition files and output files (set up using poe) # 1.a.1 Copy model definition files - for grdID in $waveGRD $wavepostGRD $waveinterpGRD + for grdID in $waveGRD $wavepostGRD $waveinterpGRD do if [ -f "$COMIN/rundata/${CDUMP}wave.mod_def.${grdID}" ] then @@ -121,8 +120,8 @@ fi done -# 1.a.2 Check that model definition files exist - for grdID in $waveGRD $wavepostGRD $waveinterpGRD +# 1.a.2 Check that model definition files exist + for grdID in $waveGRD $wavepostGRD $waveinterpGRD do if [ ! -f mod_def.$grdID ] then @@ -142,7 +141,7 @@ [[ "$LOUD" = YES ]] && set -x fi done - + # 1.b Input template files @@ -154,7 +153,7 @@ then cp -f $PARMwave/${intGRD}_interp.inp.tmpl ${intGRD}_interp.inp.tmpl fi - + if [ -f ${intGRD}_interp.inp.tmpl ] then set +x @@ -209,7 +208,7 @@ set +x echo ' ' echo " Input files read and processed at : $(date)" - echo ' ' + echo ' ' echo ' Data summary : ' echo ' ---------------------------------------------' echo " Sufficient data for GRID interpolation : $DOGRI_WAV" @@ -227,7 +226,7 @@ echo ' Making command file for sbs grib2 and GRID Interpolation ' [[ "$LOUD" = YES ]] && set -x -# 1.a.2 Loop over forecast time to generate post files +# 1.a.2 Loop over forecast time to generate post files # When executed side-by-side, serial mode (cfp when run after the fcst step) # Contingency for RERUN=YES if [ "${RERUN}" = "YES" ]; then @@ -244,7 +243,7 @@ fhrg=$fhr iwaitmax=120 # Maximum loop cycles for waiting until wave component output file is ready (fails after max) while [ $fhr -le $FHMAX_WAV ]; do - + ymdh=$($NDATE $fhr $CDATE) YMD=$(echo $ymdh | cut -c1-8) HMS="$(echo $ymdh | cut -c9-10)0000" @@ -271,17 +270,17 @@ for wavGRD in ${waveGRD} ; do gfile=$COMIN/rundata/${WAV_MOD_TAG}.out_grd.${wavGRD}.${YMD}.${HMS} while [ ! -s ${gfile} ]; do sleep 10; let iwait=iwait+1; done - if [ $iwait -eq $iwaitmax ]; then + if [ $iwait -eq $iwaitmax ]; then echo '*************************************************** ' echo " FATAL ERROR : NO RAW FIELD OUTPUT FILE out_grd.$grdID " echo '*************************************************** ' echo ' ' [[ "$LOUD" = YES ]] && set -x - echo "$WAV_MOD_TAG post $grdID $date $cycle : field output missing." + echo "$WAV_MOD_TAG post $grdID $date $cycle : field output missing." err=3; export err;${errchk} exit $err fi - ln -s ${gfile} ./out_grd.${wavGRD} + ln -s ${gfile} ./out_grd.${wavGRD} done if [ "$DOGRI_WAV" = 'YES' ] @@ -308,7 +307,7 @@ fi echo "${GRIBDATA}/${fcmdigrd}.${nigrd}" >> ${fcmdnow} chmod 744 ${fcmdigrd}.${nigrd} - nigrd=$((nigrd+1)) + nigrd=$((nigrd+1)) done fi @@ -344,18 +343,18 @@ nlines=$( wc -l ${fcmdnow} | awk '{print $1}' ) while [ $iline -le $nlines ]; do line=$( sed -n ''$iline'p' ${fcmdnow} ) - if [ -z "$line" ]; then + if [ -z "$line" ]; then break else - if [ "$ifirst" = 'yes' ]; then - echo "#!/bin/sh" > cmdmfile.$nfile + if [ "$ifirst" = 'yes' ]; then + echo "#!/bin/sh" > cmdmfile.$nfile echo "$nfile cmdmfile.$nfile" >> cmdmprog chmod 744 cmdmfile.$nfile fi echo $line >> cmdmfile.$nfile nfile=$(( nfile + 1 )) if [ $nfile -eq $NTASKS ]; then - nfile=0 + nfile=0 ifirst='no' fi iline=$(( iline + 1 )) diff --git a/scripts/exgfs_wave_post_pnt.sh b/scripts/exgfs_wave_post_pnt.sh index f515e35d01..c3cab2999d 100755 --- a/scripts/exgfs_wave_post_pnt.sh +++ b/scripts/exgfs_wave_post_pnt.sh @@ -11,13 +11,13 @@ # It executes several scripts forpreparing and creating output data # as follows: # -# wave_outp_spec.sh : generates spectral data for output locations -# wave_outp_bull.sh : generates bulletins for output locations +# wave_outp_spec.sh : generates spectral data for output locations +# wave_outp_bull.sh : generates bulletins for output locations # wave_outp_cat.sh : cats the by hour into the single output file -# wave_tar.sh : tars the spectral and bulletin multiple files +# wave_tar.sh : tars the spectral and bulletin multiple files # # Script history log: -# 2019-12-06 J-Henrique Alves: First Version adapted from HTolman post.sh 2007 +# 2019-12-06 J-Henrique Alves: First Version adapted from HTolman post.sh 2007 # 2020-06-10 J-Henrique Alves: Porting to R&D machine Hera # 2020-07-30 Jessica Meixner: Points only - no gridded data # 2020-09-29 Jessica Meixner: optimized by changing loop structures @@ -26,7 +26,6 @@ # # Attributes: # Language: Bourne-again (Bash) Shell -# Machine: WCOSS-DELL-P3 # ############################################################################### # @@ -61,7 +60,7 @@ # Script will run only if pre-defined NTASKS # The actual work is distributed over these tasks. - if [ -z ${NTASKS} ] + if [ -z ${NTASKS} ] then echo "FATAL ERROR: requires NTASKS to be set " err=1; export err;${errchk} @@ -77,7 +76,7 @@ export STA_DIR=$DATA/station_ascii_files if [ -d $STA_DIR ] - then + then rm -rf ${STA_DIR} fi mkdir -p ${STA_DIR} @@ -107,7 +106,7 @@ echo '-----------------------' [[ "$LOUD" = YES ]] && set -x -# 1.a Model definition files and output files (set up using poe) +# 1.a Model definition files and output files (set up using poe) # 1.a.1 Set up the parallel command tasks @@ -150,7 +149,7 @@ [[ "$LOUD" = YES ]] && set -x fi done - + # 1.c Output locations file rm -f buoy.loc @@ -246,28 +245,28 @@ -e "s/ITYPE/0/g" \ -e "s/FORMAT/F/g" \ ww3_outp_spec.inp.tmpl > ww3_outp.inp - + ln -s mod_def.$waveuoutpGRD mod_def.ww3 YMD=$(echo $CDATE | cut -c1-8) HMS="$(echo $CDATE | cut -c9-10)0000" if [ -f $COMIN/rundata/${WAV_MOD_TAG}.out_pnt.${waveuoutpGRD}.${YMD}.${HMS} ] then - ln -s $COMIN/rundata/${WAV_MOD_TAG}.out_pnt.${waveuoutpGRD}.${YMD}.${HMS} ./out_pnt.${waveuoutpGRD} + ln -s $COMIN/rundata/${WAV_MOD_TAG}.out_pnt.${waveuoutpGRD}.${YMD}.${HMS} ./out_pnt.${waveuoutpGRD} else echo '*************************************************** ' echo " FATAL ERROR : NO RAW POINT OUTPUT FILE out_pnt.${waveuoutpGRD}.${YMD}.${HMS} " echo '*************************************************** ' echo ' ' [[ "$LOUD" = YES ]] && set -x - echo "$WAV_MOD_TAG post $waveuoutpGRD $CDATE $cycle : field output missing." + echo "$WAV_MOD_TAG post $waveuoutpGRD $CDATE $cycle : field output missing." err=4; export err;${errchk} fi - + rm -f buoy_tmp.loc buoy_log.ww3 ww3_oup.inp ln -fs ./out_pnt.${waveuoutpGRD} ./out_pnt.ww3 ln -fs ./mod_def.${waveuoutpGRD} ./mod_def.ww3 export pgm=ww3_outp;. prep_step - $EXECwave/ww3_outp > buoy_lst.loc 2>&1 + $EXECwave/ww3_outp > buoy_lst.loc 2>&1 export err=$?;err_chk @@ -326,7 +325,7 @@ set +x echo ' ' echo " Input files read and processed at : $(date)" - echo ' ' + echo ' ' echo ' Data summary : ' echo ' ---------------------------------------------' echo " Sufficient data for spectral files : $DOSPC_WAV ($Nb points)" @@ -336,7 +335,7 @@ [[ "$LOUD" = YES ]] && set -x # --------------------------------------------------------------------------- # -# 2. Make files for processing boundary points +# 2. Make files for processing boundary points # # 2.a Command file set-up @@ -348,10 +347,10 @@ touch cmdfile chmod 744 cmdfile -# 1.a.2 Loop over forecast time to generate post files +# 1.a.2 Loop over forecast time to generate post files fhr=$FHMIN_WAV while [ $fhr -le $FHMAX_WAV_PNT ]; do - + echo " Creating the wave point scripts at : $(date)" ymdh=$($NDATE $fhr $CDATE) YMD=$(echo $ymdh | cut -c1-8) @@ -360,7 +359,7 @@ FH3=$(printf %03i $fhr) rm -f tmpcmdfile.${FH3} - touch tmpcmdfile.${FH3} + touch tmpcmdfile.${FH3} mkdir output_$YMDHMS cd output_$YMDHMS @@ -371,9 +370,9 @@ pfile=$COMIN/rundata/${WAV_MOD_TAG}.out_pnt.${waveuoutpGRD}.${YMD}.${HMS} if [ -f ${pfile} ] - then + then ln -fs ${pfile} ./out_pnt.${waveuoutpGRD} - else + else echo " FATAL ERROR : NO RAW POINT OUTPUT FILE out_pnt.$waveuoutpGRD.${YMD}.${HMS} " echo ' ' [[ "$LOUD" = YES ]] && set -x @@ -382,7 +381,7 @@ fi cd $DATA - + if [ "$DOSPC_WAV" = 'YES' ] then export dtspec=3600. @@ -443,18 +442,18 @@ nlines=$( wc -l cmdfile | awk '{print $1}' ) while [ $iline -le $nlines ]; do line=$( sed -n ''$iline'p' cmdfile ) - if [ -z "$line" ]; then + if [ -z "$line" ]; then break else - if [ "$ifirst" = 'yes' ]; then - echo "#!/bin/sh" > cmdmfile.$nfile + if [ "$ifirst" = 'yes' ]; then + echo "#!/bin/sh" > cmdmfile.$nfile echo "$nfile cmdmfile.$nfile" >> cmdmprog chmod 744 cmdmfile.$nfile fi echo $line >> cmdmfile.$nfile nfile=$(( nfile + 1 )) if [ $nfile -eq $NTASKS ]; then - nfile=0 + nfile=0 ifirst='no' fi iline=$(( iline + 1 )) @@ -482,7 +481,7 @@ exit=$? else chmod 744 cmdfile - ./cmdfile + ./cmdfile exit=$? fi @@ -500,7 +499,7 @@ exit $err fi -# 2.b Loop over each buoy to cat the final buoy file for all fhr +# 2.b Loop over each buoy to cat the final buoy file for all fhr cd $DATA diff --git a/scripts/exglobal_atmos_analysis.sh b/scripts/exglobal_atmos_analysis.sh index 5984ba7895..da3621838c 100755 --- a/scripts/exglobal_atmos_analysis.sh +++ b/scripts/exglobal_atmos_analysis.sh @@ -13,9 +13,8 @@ # # Attributes: # Language: POSIX shell -# Machine: WCOSS-Cray / Theia # -################################################################################ +################################################################################# # Set environment. export VERBOSE=${VERBOSE:-"YES"} @@ -426,7 +425,7 @@ if [ $USE_CORRELATED_OBERRS == "YES" ]; then exit 1 fi -# Correlated error utlizes mkl lapack. Found it necesary to fix the +# Correlated error utlizes mkl lapack. Found it necesary to fix the # number of mkl threads to ensure reproducible results independent # of the job configuration. export MKL_NUM_THREADS=1 @@ -601,7 +600,7 @@ fi if [ $GENDIAG = "YES" ] ; then if [ $lrun_subdirs = ".true." ] ; then if [ -d $DIAG_DIR ]; then - rm -rf $DIAG_DIR + rm -rf $DIAG_DIR fi npe_m1="$(($npe_gsi-1))" for pe in $(seq 0 $npe_m1); do @@ -681,9 +680,9 @@ EOFunzip diag_file=$(echo $type | cut -d',' -f1) if [ $USE_CFP = "YES" ] ; then echo "$nm $DATA/unzip.sh $diag_file $DIAG_SUFFIX" | tee -a $DATA/mp_unzip.sh - if [ ${CFP_MP:-"NO"} = "YES" ]; then - nm=$((nm+1)) - fi + if [ ${CFP_MP:-"NO"} = "YES" ]; then + nm=$((nm+1)) + fi else fname=$(echo $diag_file | cut -d'.' -f1) date=$(echo $diag_file | cut -d'.' -f2) @@ -959,7 +958,7 @@ export err=$?; err_chk ############################################################## -# If full analysis field written, calculate analysis increment +# If full analysis field written, calculate analysis increment # here before releasing FV3 forecast if [ $DO_CALC_INCREMENT = "YES" ]; then $CALCINCPY @@ -1005,8 +1004,8 @@ cd $pwd [[ $mkdata = "YES" ]] && rm -rf $DATA ############################################################## -# Add this statement to release the forecast job once the -# atmopsheric analysis and updated surface RESTARTS are +# Add this statement to release the forecast job once the +# atmopsheric analysis and updated surface RESTARTS are # available. Do not release forecast when RUN=enkf ############################################################## if [ $SENDECF = "YES" -a "$RUN" != "enkf" ]; then diff --git a/scripts/exglobal_atmos_analysis_calc.sh b/scripts/exglobal_atmos_analysis_calc.sh index 5160815b44..02a45de52e 100755 --- a/scripts/exglobal_atmos_analysis_calc.sh +++ b/scripts/exglobal_atmos_analysis_calc.sh @@ -13,7 +13,6 @@ # # Attributes: # Language: POSIX shell -# Machine: WCOSS-Dell / Hera # ################################################################################ @@ -151,7 +150,7 @@ rm -rf dir.* ############################################################## # If analysis increment is written by GSI, produce an analysis file here -if [ $DO_CALC_ANALYSIS == "YES" ]; then +if [ $DO_CALC_ANALYSIS == "YES" ]; then # link analysis and increment files $NLN $ATMANL siganl $NLN $ATMINC siginc.nc @@ -173,12 +172,12 @@ if [ $DO_CALC_ANALYSIS == "YES" ]; then $NLN $ATMG03 sigf03 $NLN $ATMGES sigf06 $NLN $ATMG09 sigf09 - + [[ -f $ATMG04 ]] && $NLN $ATMG04 sigf04 [[ -f $ATMG05 ]] && $NLN $ATMG05 sigf05 [[ -f $ATMG07 ]] && $NLN $ATMG07 sigf07 [[ -f $ATMG08 ]] && $NLN $ATMG08 sigf08 - + # Link hourly backgrounds (if present) if [ -f $ATMG04 -a -f $ATMG05 -a -f $ATMG07 -a -f $ATMG08 ]; then nhr_obsbin=1 @@ -195,7 +194,7 @@ fi if [ $DOGAUSFCANL = "YES" ]; then export APRUNSFC=$APRUN_GAUSFCANL export OMP_NUM_THREADS_SFC=$NTHREADS_GAUSFCANL - + $GAUSFCANLSH export err=$?; err_chk fi diff --git a/scripts/exglobal_atmos_sfcanl.sh b/scripts/exglobal_atmos_sfcanl.sh index 50a6f42d00..a8c275c10d 100755 --- a/scripts/exglobal_atmos_sfcanl.sh +++ b/scripts/exglobal_atmos_sfcanl.sh @@ -13,7 +13,6 @@ # # Attributes: # Language: POSIX shell -# Machine: WCOSS-Dell # ################################################################################ diff --git a/scripts/exglobal_diag.sh b/scripts/exglobal_diag.sh index 89d23a992e..e2422b5877 100755 --- a/scripts/exglobal_diag.sh +++ b/scripts/exglobal_diag.sh @@ -13,7 +13,6 @@ # # Attributes: # Language: POSIX shell -# Machine: WCOSS-Dell / Hera # ################################################################################ @@ -197,9 +196,9 @@ EOFdiag if [ $count -gt 1 ]; then if [ $USE_CFP = "YES" ]; then echo "$nm $DATA/diag.sh $lrun_subdirs $binary_diag $type $loop $string $CDATE $DIAG_COMPRESS $DIAG_SUFFIX" | tee -a $DATA/mp_diag.sh - if [ ${CFP_MP:-"NO"} = "YES" ]; then - nm=$((nm+1)) - fi + if [ ${CFP_MP:-"NO"} = "YES" ]; then + nm=$((nm+1)) + fi else if [ $binary_diag = ".true." ]; then cat ${prefix}${type}_${loop}* > diag_${type}_${string}.${CDATE}${DIAG_SUFFIX} @@ -212,7 +211,7 @@ EOFdiag elif [ $count -eq 1 ]; then cat ${prefix}${type}_${loop}* > diag_${type}_${string}.${CDATE}${DIAG_SUFFIX} if [ $DIAG_COMPRESS = "YES" ]; then - $COMPRESS diag_${type}_${string}.${CDATE}${DIAG_SUFFIX} + $COMPRESS diag_${type}_${string}.${CDATE}${DIAG_SUFFIX} fi echo "diag_${type}_${string}.${CDATE}*" >> ${diaglist[n]} numfile[n]=$(expr ${numfile[n]} + 1) @@ -250,7 +249,7 @@ EOFdiag for rtype in $rlist; do ${CHGRP_CMD} *${rtype}* done - + # If requested, create diagnostic file tarballs if [ $DIAG_TARBALL = "YES" ]; then echo $(date) START tar diagnostic files >&2 diff --git a/scripts/run_gfsmos_master.sh.cray b/scripts/run_gfsmos_master.sh.cray deleted file mode 100755 index e013f0b34c..0000000000 --- a/scripts/run_gfsmos_master.sh.cray +++ /dev/null @@ -1,330 +0,0 @@ -#!/bin/sh -####################################################################### -# run_gfsmos.sh -# -# History: -# 03/29/13 Scallion Created (Adapted from Eric Engle's -# "master" scripts) -# 12/03/13 Scallion Added ptype/wxgrid -# 04/18/18 J Wagner Set up to run GFSMOS after GFS FV3 retros -# -# Purpose: -# To run the GFS-MOS operational suite -# -####################################################################### -set -x - -if (( $# > 1 )); then - echo "Incorrect number of arguments." - echo "Syntax: $0 [PDYCYC (optional)]" - echo "Exiting..." - exit 1 -fi - -####################################################################### -# Source bash_profile to run with proper modules on cron -####################################################################### -#. ~/.profile - -####################################################################### -# Set global variables neede in the run script and/or each individual -# job script. -####################################################################### - - -. $MODULESHOME/init/sh 2>/dev/null -module load prod_envir/1.1.0 -module load cfp-intel-sandybridge -module load craype-hugepages2M -export FORT_BUFFERED=TRUE -export KMP_AFFINITY=disabled -export envir=prod -#export QUEUE=dev - -#------------------ -export SENDCOM_SAVE=$SENDCOM -export SENDCOM=YES -#------------------ - -dateIn=$1 -export PDY=$(echo $dateIn | cut -c 1-8) -export cyc=$(echo $dateIn | cut -c 9-10) -export prevday=$($NDATE -24 ${PDY}00 | cut -c1-8) - -# -# VARIABLES TO SET -# PTMPROOT and STMPROOT should be set to the user's directories -# COMDATEROOT defined by module prod_util -#export PTMPROOT=/gpfs/hps3/ptmp/$USER -export PTMPROOT=$ROTDIR/gfsmos.$PDY -#export STMPROOT=/gpfs/hps3/stmp/$USER -export STMPROOT=$RUNDIR/gfsmos.$PDY -export DATAROOT=$STMPROOT -export MODELROOT=$NWROOThps -export MODELDIR=$MODELROOT/gfsmos.v5.0.6 -export CODEDIR=$MODELROOT/mos_shared.v2.6.1 - -if [[ $SITE == "SURGE" ]]; then - export siteprefix=g -elif [[ $SITE == "LUNA" ]]; then - export siteprefix=t -else - echo "SITE $SITE not recognized" - exit -fi - -export range=${range:-"both"} -export skipmodel=n -export skipprep=n -export stnonly=Y -export cycle="t${cyc}z" -export pid="gfs.$$" -export dailylog=$PTMPROOT/dailylog/log.$PDY -export jlogfile=$dailylog/jlogfile_gfsmos -mkdir -p $dailylog - -export SENDDBN=NO -export GET_IOPROFILE=NO - -# Specify Execution Areas -export HOMEmdl=$MODELDIR -export HOMEcode=$CODEDIR - -if [ ! -d $HOMEmdl ]; then - echo "$HOMEmdl does not exist" - exit 1 -fi - -if [ ! -d $HOMEcode ]; then - echo "$HOMEcode does not exist" - exit 1 -fi - -# Load modules -module load prod_util - -module unload grib_util/1.0.3 -module use /usrx/local/nceplibs/modulefiles -module load grib_util/1.1.0 - -# VARIABLES TO SET -# GFSDIR should be set to the directory containing the input GFS FV3 data -# COMOUT should be set to the directory where the GFSMOS output files will be saved -#export GFSDIR=/gpfs/hps3/ptmp/emc.glopara/fv3fy18retro2 -export GFSDIR=$ROTDIR -export COMINgfs=$GFSDIR/gfs.${PDY}/${cyc} -export COMOUT=$ROTDIR/gfsmos.$PDY - -if [[ ! -d $PTMPROOT/qprod ]]; then - mkdir -p $PTMPROOT/qprod -fi - -if [[ ! -d $COMOUT ]]; then - mkdir -p $COMOUT -fi - -export PCOM=$COMOUT - -if [[ ! -d $PCOM ]]; then - mkdir -p $PCOM -fi - - -# NOTE: On WCOSS_C the directory from which bsub -# is executed must exist when the submitted job -# begins. Otherwise, the submitted job fails -# with TERM_CWD_NOTEXIST error. - -mkdir -p $DATAROOT -cd $DATAROOT - - -######################################################################## -# JGFSMOS_PREP47 -######################################################################## -export job=gfsmos_prep_${cyc}_${pid} -export COMIN=$GFSDIR -jobCard=$HOMEmdl/jobs/JGFSMOS_PREP -# Define DATA and create directory -export DATA=$DATAROOT/gfsmos_prep_${cyc} -export logfile=$dailylog/$job.out -export out_dir=$dailylog - -export PTILE=7 -export NTASK=7 -export OMP_NUM_THREADS=1 - -bsub -J $job \ - -o $logfile \ - -q $QUEUE \ - -W 2:00 \ - -M 2500 \ - -P $ACCOUNT \ - -extsched 'CRAYLINUX[]' \ - -R '1*{select[craylinux && !vnode]} + 24*{select[craylinux && vnode]span[ptile=24]}' \ - $jobCard - -######################################################################## -# JGFSMOS_FORECAST -######################################################################## -if [ $prevday -lt $(date -u +%Y%m%d -d "48 hours ago") ]; then - export COMINhourly=$PTMPROOT/hourly.$PDY - if [[ ! -d $COMINhourly ]]; then - mkdir -p $COMINhourly - fi - \cp /gpfs/${siteprefix}d1/mdl/mdlstat/save/rotate/hry/${PDY}03 $COMINhourly/sfctbl.03 - \cp /gpfs/${siteprefix}d1/mdl/mdlstat/save/rotate/hry/${PDY}09 $COMINhourly/sfctbl.09 - \cp /gpfs/${siteprefix}d1/mdl/mdlstat/save/rotate/hry/${PDY}15 $COMINhourly/sfctbl.15 - \cp /gpfs/${siteprefix}d1/mdl/mdlstat/save/rotate/hry/${PDY}21 $COMINhourly/sfctbl.21 -fi - -# -# Change COMIN to get files from user's PTMP "qprod" area -export COMIN=$COMOUT -# -export job=gfsmos_fcst_${cyc}_${pid} -jobCard=$HOMEmdl/jobs/JGFSMOS_FORECAST -export DATA=$DATAROOT/gfsmos_fcst_${cyc} -export logfile=$dailylog/$job.out -export out_dir=$dailylog -# Set dependencies -deps="done(gfsmos_prep_${cyc}_${pid})" -complist="metar cooprfcmeso tstms" -complist2="" -export PTILE=3 -export NTASK=3 -export OMP_NUM_THREADS=1 - -bsub -J ${job} -oo $logfile -q ${QUEUE} -P $ACCOUNT \ - -W 2:30 -M 2000 \ - -extsched 'CRAYLINUX[]' \ - -R '1*{select[craylinux && !vnode]} + 72*{select[craylinux && vnode]span[ptile=24]}' \ - -w "$deps" \ - $jobCard -# -######################################################################## -# JGFSMOS_PRDGEN -######################################################################## -jobCard=$HOMEmdl/jobs/JGFSMOS_PRDGEN -export job=gfsmos_prdgen_${cyc}_${pid} -# Change COMIN back to COMOUT -export COMIN=$COMOUT -# Define DATA and create directory -export DATA=$DATAROOT/gfsmos_prdgen_${cyc} -export logfile=$dailylog/$job.out -export out_dir=$dailylog -# Set dependencies -deps="done(gfsmos_fcst_${cyc}_${pid})" -nodes='1*{select[craylinux && !vnode]} + 24*{select[craylinux && vnode]span[ptile=24]}' -export PTILE=1 -export NTASK=1 -export OMP_NUM_THREADS=1 -# -bsub -J ${job} -oo $logfile -q ${QUEUE} -P $ACCOUNT \ - -W 1:00 -M 2000 \ - -extsched 'CRAYLINUX[]' \ - -R "$nodes" \ - -w "$deps" \ - $jobCard -# -######################################################################## -# EXTENDED-RANGE JOBS -######################################################################## - -######################################################################## -# JGFSMOS_EXT_PREP47 -######################################################################## -export job=gfsmos_extprep_${cyc}_${pid} -export COMIN=$GFSDIR -jobCard=$HOMEmdl/jobs/JGFSMOS_EXT_PREP -# Define DATA and create directory -export DATA=$DATAROOT/gfsmos_extprep_${cyc} -export logfile=$dailylog/$job.out -export out_dir=$dailylog - -export PTILE=5 -export NTASK=10 -export OMP_NUM_THREADS=1 - -bsub -J $job \ - -o $logfile \ - -q $QUEUE \ - -W 2:00 \ - -M 2500 \ - -P $ACCOUNT \ - -extsched 'CRAYLINUX[]' \ - -R '1*{select[craylinux && !vnode]} + 48*{select[craylinux && vnode]span[ptile=24]}' \ - $jobCard - - -# Skip EXT_FORECAST for 06/18 -if [ $cyc -eq 00 -o $cyc -eq 12 ]; then -######################################################################## -# JGFSMOS_EXT_FORECAST -######################################################################## -# -# Change COMIN to get files from user's PTMP "qprod" area -export COMIN=$COMOUT -# -export job=gfsmos_extfcst_${cyc}_${pid} -jobCard=$HOMEmdl/jobs/JGFSMOS_EXT_FORECAST -export DATA=$DATAROOT/gfsmos_extfcst_${cyc} -export logfile=$dailylog/$job.out -export out_dir=$dailylog -# Set dependencies -if [[ "$skipprep" != "y" && "$range" == "both" ]]; then - deps="done(gfsmos_extprep_${cyc}_${pid}) && done(gfsmos_prdgen_${cyc}_${pid})" -elif [[ "$skipprep" != "y" && "$range" == "ext" ]]; then - deps="done(gfsmos_extprep_${cyc}_${pid})" -elif [[ "$skipprep" == "y" && "$range" == "ext" ]]; then - deps="" -else - deps="done(gfsmos_prdgen_${cyc}_${pid})" -fi -# -export PTILE=4 -export NTASK=10 -export OMP_NUM_THREADS=1 -# -bsub -J ${job} -oo $logfile -q $QUEUE -P $ACCOUNT \ - -W 4:00 -M 2000 \ - -extsched 'CRAYLINUX[]' \ - -R '1*{select[craylinux && !vnode]} + 72*{select[craylinux && vnode]span[ptile=24]}' \ - -w "$deps" \ - $jobCard -# -fi #endif for skipping 06/18 ext_fcst -######################################################################### -## JGFSMOS_EXT_PRDGEN -######################################################################### -jobCard=$HOMEmdl/jobs/JGFSMOS_EXT_PRDGEN -export job=gfsmos_extprdgen_${cyc}_${pid} -# Change COMIN back to COMOUT -export COMIN=$COMOUT -# Define DATA and create directory -export DATA=$DATAROOT/gfsmos_extprdgen_${cyc} -export logfile=$dailylog/$job.out -export out_dir=$dailylog -# Set dependencies -if [[ "$cyc" == "06" || "$cyc" == "18" ]]; then - deps="done(gfsmos_prdgen_${cyc}_${pid})" -elif [[ "$range" == "both" ]]; then - deps="done(gfsmos_extfcst_${cyc}_${pid}) && done(gfsmos_prdgen_${cyc}_${pid})" -else - deps="done(gfsmos_extfcst_${cyc}_${pid})" -fi -nodes='1*{select[craylinux && !vnode]} + 24*{select[craylinux && vnode]span[ptile=24]}' -export PTILE=1 -export NTASK=1 -export OMP_NUM_THREADS=1 - -bsub -J ${job} -oo $logfile -q ${QUEUE} -P $ACCOUNT \ - -W 2:00 -M 2000 \ - -extsched 'CRAYLINUX[]' \ - -R "$nodes" \ - -w "$deps" \ - $jobCard - - -#--------------------- -export SENDCOM=$SENDCOM_SAVE diff --git a/scripts/run_gfsmos_master.sh.dell b/scripts/run_gfsmos_master.sh.dell deleted file mode 100755 index eb255e11f5..0000000000 --- a/scripts/run_gfsmos_master.sh.dell +++ /dev/null @@ -1,765 +0,0 @@ -#!/bin/sh -####################################################################### -# run_gfsmos.sh -# -# History: -# 03/29/13 Scallion Created (Adapted from Eric Engle's -# "master" scripts) -# 12/03/13 Scallion Added ptype/wxgrid -# -# Purpose: -# To run the GFS-MOS operational suite -# -####################################################################### -set -x - -if (( $# > 1 )); then - echo "Incorrect number of arguments." - echo "Syntax: $0 [PDYCYC (optional)]" - echo "Exiting..." - exit 1 -fi - -##export PATH=./:$PATH - -####################################################################### -# Only run on the dev machine -####################################################################### -#/u/Scott.Scallion/bin/run-on-dev.sh -#[[ $? -eq 1 ]] && exit 1 - -####################################################################### -# Source bash_profile to run with proper modules on cron -####################################################################### -#elim . ~/.bash_profile 1> /dev/null 2>&1 -#. ~/.bash_profile - -####################################################################### -# Check the host to determine whether tide or gyre is prod -####################################################################### -#chkhost=$(hostname | cut -c1) -if [[ $SITE == "GYRE" ]] || [[ $SITE == "SURGE" ]] || [[ $SITE == "VENUS" ]]; then - gort="g" -elif [[ $SITE == "TIDE" ]] || [[ $SITE == "LUNA" ]] || [[ $SITE == "MARS" ]]; then - gort="t" -fi - -####################################################################### -# Set global variables neede in the run script and/or each individual -# job script. -####################################################################### -. $MODULESHOME/init/bash -#module purge 2>/dev/null -module load EnvVars/1.0.2 2>/dev/null -module load ips/18.0.1.163 2>/dev/null -module load impi/18.0.1 2>/dev/null -module load lsf/10.1 2>/dev/null -module load prod_envir/1.0.3 2>/dev/null -module load prod_util/1.1.4 2>/dev/null -module load CFP/2.0.1 2>/dev/null - -module use -a /gpfs/dell1/nco/ops/nwpara/modulefiles/compiler_prod/ips/18.0.1 -module load grib_util/1.1.1 2>/dev/null - -export FORT_BUFFERED=TRUE -export KMP_AFFINITY=disabled -export envir=prod -export RUN_ENVIR=${RUN_ENVIR:-""} -#export QUEUE=dev - -#------------------ -export SENDCOM=YES -export SENDCOM_SAVE=$SENDCOM -#------------------ - -#-------------------------------- -# COMDATEROOT defined by module prod_util -##export PTMPROOT=/gpfs/dell2/ptmp/$USER -##export STMPROOT=/gpfs/dell2/stmp/$USER -##export MODELROOT=/gpfs/dell2/mdl/mdlstat/noscrub/usr/$USER/nwprod -##export MODELDIR=$MODELROOT/gfsmos.v5.0.6 -##export CODEDIR=$MODELROOT/mos_shared.v2.6.1 - -export PTMPROOT=$ROTDIR/gfsmos.$PDY -export STMPROOT=$RUNDIR/gfsmos.$PDY -export DATAROOT=$STMPROOT -##export MODELROOT=/gpfs/dell2/emc/modeling/noscrub/emc.glopara/git/gfsmos -export MODELROOT=/gpfs/dell2/mdl/mdlstat/noscrub/usr/Scott.Scallion/gfsv16 -#export MODELDIR=$MODELROOT/gfsmos.v5.2.0.1 -export MODELDIR=$MODELROOT/gfsmos.v5.2.0.1-restructure -export CODEDIR=/gpfs/dell1/nco/ops/nwpara/mos_shared.v2.6.5 -#-------------------------------- - -#-------------------------------- -##export PDY=20180419 -##export PDY=$(date -u +%Y%m%d) -##export prevday=$(date -u --date="${PDY} 0000 UTC 24 hours ago" +%Y%m%d) - -dateIn=$1 -#if [ $REALTIME = "YES" ]; then -# GDATE=$($NDATE -24 $dateIn) -# dateIn=$GDATE -#fi -export PDY=$(echo $dateIn | cut -c 1-8) -export cyc=$(echo $dateIn | cut -c 9-10) -export prevday=$($NDATE -24 ${PDY}00 | cut -c1-8) -#-------------------------------- - - -#-------------------------------- -##let hour=$(date -u +"%-H") -##if [[ $hour -ge 4 && $hour -lt 10 ]]; then -## export cyc=00 -## export range=both -##elif [[ $hour -ge 10 && $hour -lt 16 ]]; then -## export cyc=06 -## export range=both -##elif [[ $hour -ge 16 && $hour -lt 22 ]]; then -## export cyc=12 -## export range=both -##elif [[ $hour -ge 22 && $hour -le 23 ]]; then -## export cyc=18 -## export range=both -##else -## export cyc=18 -## export PDY=$prevday -## export range=both -##fi -## -##cyc_list="00 06 12 18" -##if [[ $# == 1 ]] && [[ $cyc_list =~ $1 ]]; then -## export cyc=$1 -## if [ "$cyc" == "00" -o "$cyc" == "12" ]; then -## export range=both -## else -## export range=both -## fi -##elif [[ $# == 1 ]]; then -## echo "$1 is not a valid cycle (choose 00 or 12)" -## exit 1 -##fi - -# ERIC ENGLE 05/30/2018 - PATCH FOR FV3GFS WINTER 17/18 PARALLEL -export range=${range:-"both"} - -#if [ $cyc -eq 00 -o $cyc -eq 12 ]; then -# if [ $cyc -eq 00 ]; then -# export range=both -# else -# export range=short -# fi -# ERIC ENGLE 05/30/2018 - PATCH FOR FV3GFS WINTER 17/18 PARALLEL -export stnonly='Y' -export skipmodel=n -export skipprep=n -export cycle="t${cyc}z" -export pid="gfs_qprod.$$" -export dailylog=$PTMPROOT/dailylog/log.$PDY -export jlogfile=$dailylog/jlogfile_gfsmos -mkdir -p $dailylog - -export SENDDBN=NO -export SENDDBN_NTC=NO -export GET_IOPROFILE=NO - -# Specify Execution Areas -export HOMEmdl=$MODELDIR -export HOMEcode=$CODEDIR -#export utilscript=/gpfs/hps/mdl/mdlstat/noscrub/usr/Scott.Scallion/ush - -if [ ! -d $HOMEmdl ]; then - echo "$HOMEmdl does not exist" - exit 1 -fi - -if [ ! -d $HOMEcode ]; then - echo "$HOMEcode does not exist" - exit 1 -fi - - -#------------------------------------- -# Define COMOUT (COMIN will vary by job) -#export GFSDIR=$COMROOThps/gfs/prod/gfs.$PDY -##export GFSDIR=$COMROOThps/gfs/prod/gfs.$PDY -##export COMINgfs=$GFSDIR -##export COMOUT=$PTMPROOT/qprod/gfsmos.$PDY - -export GFSDIR=$COMROOT/gfs/prod/gfs.${PDY} -if [[ "$RUN_ENVIR" = "emc" ]] ; then - export GFSDIR=$ROTDIR/gfs.${PDY} -fi -export COMINgfs=$GFSDIR -export COMOUT=$ROTDIR/gfsmos.$PDY - -#export COMINm1=$PTMPROOT/gfsmos.$prevday -#export COMINm1=$PTMPROOT/qprod/gfsmos_gmos_pre-nbmv2.$prevday -#export COMINm1=$PTMPROOT/qprod/gfsmos.$prevday - -if [[ ! -d $PTMPROOT/qprod ]]; then - mkdir -p $PTMPROOT/qprod -fi - -if [[ ! -d $COMOUT ]]; then - mkdir -p $COMOUT -fi - -export COMOUTwmo=$PTMPROOT/wmo - -if [[ ! -d $COMOUTwmo ]]; then - mkdir -p $COMOUTwmo -fi - - -# NOTE: On WCOSS_DELL_P3 the directory from which bsub -# is executed must exist when the submitted job -# begins. Otherwise, the submitted job fails -# with TERM_CWD_NOTEXIST error. - -mkdir -p $DATAROOT -cd $DATAROOT - - -if [ "$range" == "short" -o "$range" == "both" ]; then -######################################################################## -######################################################################## -# SHORT-RANGE JOBS -######################################################################## -######################################################################## - -######################################################################## -# Wait for 1 degree model data -######################################################################## -##if [ "$skipmodel" != "y" ]; then -##let attempts=1 -##while [[ $attempts -le 120 ]] -##do -## if [[ -f $GFSDIR/gfs.$cycle.pgrb2.1p00.f096 ]]; then -## echo "Model file found. Proceeding..." -## break -## else -## if [[ $attempts -le 60 ]]; then -## sleep 60 -## else -## sleep 180 -## fi -## attempts=$((attempts+1)) -## fi -##done -## -##if [[ $attempts -gt 120 ]]; then -## echo "Waited 4 hours for model file, but it did not appear." -## echo "Exiting..." -## exit 1 -##fi -## -##fi #endif for skipmodel - -######################################################################## -# Wait for 1/4 degree model data -######################################################################## -##if [ "$skipmodel" != "y" ]; then -##let attempts=1 -##while [[ $attempts -le 120 ]] -##do -## if [[ -f $GFSDIR/gfs.$cycle.pgrb2.0p25.f096 ]]; then -## echo "Model file found. Proceeding..." -## break -## else -## if [[ $attempts -le 60 ]]; then -## sleep 60 -## else -## sleep 180 -## fi -## attempts=$((attempts+1)) -## fi -##done - -##if [[ $attempts -gt 120 ]]; then -## echo "Waited 4 hours for model file, but it did not appear." -## echo "Exiting..." -## exit 1 -##fi -## -##fi #endif for skipmodel - -######################################################################## -# Wait for 1.0 degree GFS model files before running (Pacific MOS) -######################################################################## -##if [ "$skipmodel" != "y" ]; then -##let attempts=1 -##while [[ $attempts -le 120 ]] -##do -## if [[ -f $GFSDIR/gfs.$cycle.pgrb2.1p00.f096 ]]; then -## echo "1.0 degree model file found. Proceeding..." -## break -## else -## if [[ $attempts -le 60 ]]; then -## sleep 60 -## else -## sleep 180 -## fi -## attempts=$((attempts+1)) -## fi -##done -## -##if [[ $attempts -gt 120 ]]; then -## echo "Waited 4 hours for model file, but it did not appear." -## echo "Exiting..." -## exit 1 -##fi -## -##fi #endif for skipmodel - - -######################################################################## -# JGFSMOS_PREP47 -######################################################################## -if [[ "$skipprep" != "y" ]]; then - export job=gfsmos_prep_${cyc}_${pid} - export COMIN=$GFSDIR - jobCard=$HOMEmdl/jobs/JGFSMOS_STN_PREP -# Define DATA and create directory - export DATA=$STMPROOT/qprod/gfsmos_prep_${cyc} - export logfile=$dailylog/$job.out - export out_dir=$dailylog - - export NTASK=3 - export PTILE=3 - export OMP_NUM_THREADS=1 - - bsub -J $job \ - -o $logfile \ - -q $QUEUE \ - -x \ - -n $NTASK \ - -R "span[ptile=$PTILE]" \ - -W 0:25 \ - -P $ACCOUNT \ - $jobCard - -fi #end for skipprep - -######################################################################## -# JGFSMOS_FORECAST -######################################################################## -if [[ ! -d /gpfs/dell1/nco/ops/com/mos/prod/hry_mos.$PDY ]]; then - export ROTATE=/gpfs/dell2/mdl/mdlstat/noscrub/rotate - export COMINhry_mos=$PTMPROOT/hourly.$PDY - if [[ ! -d $COMINhry_mos ]]; then - mkdir -p $COMINhry_mos - fi - \cp $ROTATE/hry/${PDY}03 $COMINhry_mos/sfctbl.03 - \cp $ROTATE/hry/${PDY}09 $COMINhry_mos/sfctbl.09 - \cp $ROTATE/hry/${PDY}15 $COMINhry_mos/sfctbl.15 - \cp $ROTATE/hry/${PDY}21 $COMINhry_mos/sfctbl.21 -fi - -# Change COMIN to get files from user's PTMP "qprod" area -export COMIN=$COMOUT - -export job=gfsmos_fcst_${cyc}_${pid} -jobCard=$HOMEmdl/jobs/JGFSMOS_STN_FORECAST -export DATA=$STMPROOT/qprod/gfsmos_fcst_${cyc} -export logfile=$dailylog/$job.out -export out_dir=$dailylog -# Set dependencies -if [[ "$skipprep" != "y" ]]; then - deps="done(gfsmos_prep_${cyc}_${pid})" -else - deps="" -fi -if [[ $cyc == "00" || $cyc == "12" ]] && [[ "$stnonly" != "Y" ]]; then - complist="metar pac cooprfcmeso goe higoe akgoe tstms" - complist2="copopo3 coptype akpopo3 akptype" -else - complist="metar cooprfcmeso tstms" - complist2="" -fi - -if [[ $cyc == "00" || $cyc == "12" ]] && [[ "$stnonly" != "Y" ]]; then - export NTASK=11 - export PTILE=1 - export OMP_NUM_THREADS=1 -elif [[ $cyc == "00" || $cyc == "12" ]] && [[ "$stnonly" == "Y" ]]; then - export NTASK=5 - export PTILE=5 - export OMP_NUM_THREADS=1 -else - export NTASK=4 - export PTILE=4 - export OMP_NUM_THREADS=1 -fi - -bsub -J ${job} \ - -o $logfile \ - -q ${QUEUE} \ - -x \ - -n $NTASK \ - -R "span[ptile=$PTILE]" \ - -W 0:20 \ - -P $ACCOUNT \ - -w "$deps" \ - $jobCard - -######################################################################## -# JGFSMOS_PRDGEN -######################################################################## -jobCard=$HOMEmdl/jobs/JGFSMOS_STN_PRDGEN -export job=gfsmos_prdgen_${cyc}_${pid} -# Change COMIN back to COMOUT -export COMIN=$COMOUT -# Define DATA and create directory -export DATA=$STMPROOT/qprod/gfsmos_prdgen_${cyc} -export logfile=$dailylog/$job.out -export out_dir=$dailylog -# Set dependencies -deps="done(gfsmos_fcst_${cyc}_${pid})" -# Set Nodes -if [ $cyc -eq 00 -o $cyc -eq 12 ] && [[ "$stnonly" != "Y" ]]; then - #nodes='1*{select[craylinux && !vnode]} + 168*{select[craylinux && vnode]span[ptile=24]}' - export NTASK=1 - export PTILE=1 - export OMP_NUM_THREADS=20 -else - #nodes='1*{select[craylinux && !vnode]} + 24*{select[craylinux && vnode]span[ptile=24]}' - export NTASK=1 - export PTILE=1 - export OMP_NUM_THREADS=1 -fi - -bsub -J ${job} \ - -o $logfile \ - -q ${QUEUE} \ - -x \ - -n $NTASK \ - -R "span[ptile=$PTILE]" \ - -W 0:30 \ - -P $ACCOUNT \ - -w "$deps" \ - $jobCard - -######################################################################## -# JGFSMOS_WX_PRDGEN (00z and 12z only) -######################################################################## -#if [ $cyc -eq 00 -o $cyc -eq 12 ]; then -# jobCard=$HOMEmdl/jobs/JGFSMOS_WX_PRDGEN -# export job=gfsmos_wx_prdgen_${cyc}_${pid} -# # Change COMIN back to COMOUT -# export COMIN=$COMOUT -# # Define DATA and create directory -# export DATA=$STMPROOT/qprod/gfsmos_wx_prdgen_${cyc} -# export logfile=$dailylog/$job.out -# export out_dir=$dailylog -# # Set dependencies -# deps="done(gfsmos_prdgen_${cyc}_${pid})" -# -# export NTASK=2 -# export PTILE=1 -# export OMP_NUM_THREADS=20 -# -# #bsub -J ${job} -oo $logfile -q ${QUEUE} -P MDLST-T2O \ -# # -W 1:00 -M 1000 \ -# # -extsched 'CRAYLINUX[]' \ -# # -R '1*{select[craylinux && !vnode]} + 48*{select[craylinux && vnode]span[ptile=24]}' \ -# # -w "$deps" \ -# # $jobCard -# -# bsub -J ${job} \ -# -o $logfile \ -# -q ${QUEUE} -x \ -# -n $NTASK \ -# -R "span[ptile=$PTILE]" \ -# -W 1:00 \ -# -M 3000 \ -# -P $ACCOUNT \ -# -w "$deps" \ -# $jobCard -#fi - -fi #endif short - -######################################################################## -######################################################################## -# EXTENDED-RANGE JOBS -######################################################################## -######################################################################## - -if [ "$range" == "ext" -o "$range" == "both" ]; then - -######################################################################## -# Wait for 1/4 degree model data -######################################################################## -#if [ "$skipmodel" != "y" ]; then -#let attempts=1 -#proj_list=$(seq -f %03g 252 12 384) -#for tau in $proj_list -#do -# while [[ $attempts -le 120 ]] -# do -# if [[ -f $GFSDIR/gfs.$cycle.pgrb2.0p25.f${tau} && -f $GFSDIR/gfs.$cycle.pgrb2.0p50.f${tau} ]]; then -# echo "Model file found. Proceeding to next..." -# break -# else -# if [[ $attempts -le 60 ]]; then -# sleep 60 -# else -# sleep 180 -# fi -# attempts=$((attempts+1)) -# fi -# done -#done -# -#if [[ $attempts -gt 120 ]]; then -# echo "Waited 4 hours for model file, but it did not appear." -# echo "Exiting..." -# exit 1 -#fi - -#fi #endif for skipmodel - -######################################################################## -# Wait for 1.0/2.5 degree GFS model files before running (Pacific GFS) -######################################################################## -#if [ "$skipmodel" != "y" ]; then -#let attempts1deg=1 -#proj_list=$(seq -f %03g 204 12 384) -#for tau in $proj_list -#do -# while [[ $attempts1deg -le 120 ]] -# do -## ERIC ENGLE 05/30/2018 - PATCH FOR FV3GFS WINTER 17/18 PARALLEL -# #if [[ -f $GFSDIR/gfs.$cycle.pgrb2.1p00.f384 && -f $GFSDIR/gfs.$cycle.pgrb2.2p50.f240 ]]; then -# if [ $cyc -eq 00 ]; then -# waitfile=$GFSDIR/gfs.$cycle.pgrb2.1p00.f384 -# else -# waitfile=$GFSDIR/gfs.$cycle.pgrb2.1p00.f180 -# fi -# if [[ -f $waitfile ]]; then -## ERIC ENGLE 05/30/2018 - PATCH FOR FV3GFS WINTER 17/18 PARALLEL -# #echo "1.0/2.5 degree model files found. Proceeding to next..." -# echo "1.0 degree model files found. Proceeding to next..." -# break -# else -# if [[ $attempts1deg -le 60 ]]; then -# sleep 60 -# else -# sleep 180 -# fi -# attempts1deg=$((aattempts1deg+1)) -# fi -# done -#done -# -#if [[ $attempts1deg -gt 120 ]]; then -# echo "Waited 4 hours for 1.0 degree model file, but it did not appear." -# echo "Exiting..." -# exit 1 -#fi - -#fi #endif for skipmodel - -######################################################################## -# JGFSMOS_EXT_PREP47 -######################################################################## -if [[ "$skipprep" != "y" ]]; then - export job=gfsmos_extprep_${cyc}_${pid} - export COMIN=$GFSDIR - jobCard=$HOMEmdl/jobs/JGFSMOS_EXT_STN_PREP -# Define DATA and create directory - export DATA=$STMPROOT/qprod/gfsmos_extprep_${cyc} - export logfile=$dailylog/$job.out - export out_dir=$dailylog - - export NTASK=2 - export PTILE=2 - export OMP_NUM_THREADS=1 - -# bsub -J $job \ -# -o $logfile \ -# -q $QUEUE \ -# -W 2:00 \ -# -M 2500 \ -# -P MDLST-T2O \ -# -extsched 'CRAYLINUX[]' \ -# -R '1*{select[craylinux && !vnode]} + 24*{select[craylinux && vnode]span[ptile=24]}' \ -# $jobCard - -bsub -J ${job} \ - -o $logfile \ - -q ${QUEUE} \ - -n $NTASK \ - -R "span[ptile=$PTILE]" \ - -W 2:00 \ - -P $ACCOUNT \ - $jobCard - -fi #end for skipprep - -# Skip EXT_FORECAST for 06/18 -if [ $cyc -eq 00 -o $cyc -eq 12 ]; then -######################################################################## -# JGFSMOS_EXT_FORECAST -######################################################################## -if [[ ! -d /gpfs/dell1/nco/ops/com/mos/prod/hry_mos.$PDY ]]; then - export ROTATE=/gpfs/dell2/mdl/mdlstat/noscrub/rotate - export COMINhry_mos=$PTMPROOT/hourly.$PDY - if [[ ! -d $COMINhry_mos ]]; then - mkdir -p $COMINhry_mos - fi - \cp $ROTATE/hry/${PDY}03 $COMINhry_mos/sfctbl.03 - \cp $ROTATE/hry/${PDY}09 $COMINhry_mos/sfctbl.09 - \cp $ROTATE/hry/${PDY}15 $COMINhry_mos/sfctbl.15 - \cp $ROTATE/hry/${PDY}21 $COMINhry_mos/sfctbl.21 -fi - -# Change COMIN to get files from user's PTMP "qprod" area -export COMIN=$COMOUT - -export job=gfsmos_extfcst_${cyc}_${pid} -jobCard=$HOMEmdl/jobs/JGFSMOS_EXT_STN_FORECAST -export DATA=$STMPROOT/qprod/gfsmos_extfcst_${cyc} -export logfile=$dailylog/$job.out -export out_dir=$dailylog -# Set dependencies -if [[ "$skipprep" != "y" && "$range" == "both" ]]; then - deps="done(gfsmos_extprep_${cyc}_${pid}) && done(gfsmos_fcst_${cyc}_${pid})" -elif [[ "$skipprep" != "y" && "$range" == "ext" ]]; then - deps="done(gfsmos_extprep_${cyc}_${pid})" -elif [[ "$skipprep" == "y" && "$range" == "ext" ]]; then - deps="" -else - deps="done(gfsmos_fcst_${cyc}_${pid})" -fi - -if [[ $stnonly != "Y" ]]; then - export NTASK=10 - export PTILE=1 - export OMP_NUM_THREADS=1 -else - export NTASK=3 - export PTILE=3 - export OMP_NUM_THREADS=1 -fi - -#bsub -J ${job} -oo $logfile -q $QUEUE -P MDLST-T2O \ -# -W 1:00 -M 2000 \ -# -extsched 'CRAYLINUX[]' \ -# -R '1*{select[craylinux && !vnode]} + 24*{select[craylinux && vnode]span[ptile=24]}' \ -# -w "$deps" \ -# $jobCard - -bsub -J ${job} \ - -o $logfile \ - -q ${QUEUE} \ - -x \ - -n $NTASK \ - -R "span[ptile=$PTILE]" \ - -W 1:00 \ - -P $ACCOUNT \ - -w "$deps" \ - $jobCard - -fi #endif for skipping 06/18 ext_fcst -######################################################################## -# JGFSMOS_EXT_PRDGEN -######################################################################## -jobCard=$HOMEmdl/jobs/JGFSMOS_EXT_STN_PRDGEN -export job=gfsmos_extprdgen_${cyc}_${pid} -# Change COMIN back to COMOUT -export COMIN=$COMOUT -# Define DATA and create directory -export DATA=$STMPROOT/qprod/gfsmos_extprdgen_${cyc} -export logfile=$dailylog/$job.out -export out_dir=$dailylog -# Set dependencies -if [[ "$cyc" == "06" || "$cyc" == "18" ]]; then - deps="done(gfsmos_prdgen_${cyc}_${pid})" -elif [[ "$range" == "both" ]]; then - deps="done(gfsmos_extfcst_${cyc}_${pid}) && done(gfsmos_prdgen_${cyc}_${pid})" -else - deps="done(gfsmos_extfcst_${cyc}_${pid})" -fi -# Set Nodes -if [ $cyc -eq 00 -o $cyc -eq 12 ] && [[ "$stnonly" != "Y" ]]; then - #nodes='1*{select[craylinux && !vnode]} + 168*{select[craylinux && vnode]span[ptile=24]}' - export NTASK=1 - export PTILE=1 - export OMP_NUM_THREADS=20 -else - #nodes='1*{select[craylinux && !vnode]} + 24*{select[craylinux && vnode]span[ptile=24]}' - export NTASK=1 - export PTILE=1 - export OMP_NUM_THREADS=1 -fi - - -#bsub -J ${job} -oo $logfile -q ${QUEUE} -P MDLST-T2O \ -# -W 1:00 -M 2000 \ -# -extsched 'CRAYLINUX[]' \ -# -R "$nodes" \ -# -w "$deps" \ -# $jobCard - -bsub -J ${job} \ - -o $logfile \ - -q ${QUEUE} \ - -x \ - -n $NTASK \ - -R "span[ptile=$PTILE]" \ - -W 1:00 \ - -P $ACCOUNT \ - -w "$deps" \ - $jobCard - -# Exit here with $cyc is 06 or 18 -if [ $cyc -eq 06 -o $cyc -eq 18 ]; then - exit 0 -fi -######################################################################## -# JGFSMOS_WX_EXT_PRDGEN -######################################################################## -#jobCard=$HOMEmdl/jobs/JGFSMOS_WX_EXT_PRDGEN -#export job=gfsmos_wx_extprdgen_${cyc}_${pid} -## Change COMIN back to COMOUT -#export COMIN=$COMOUT -## Define DATA and create directory -#export DATA=$STMPROOT/qprod/gfsmos_wx_extprdgen_${cyc} -#export logfile=$dailylog/$job.out -#export out_dir=$dailylog -## Set dependencies -#if [[ "$range" == "both" ]]; then -# deps="done(gfsmos_extprdgen_${cyc}_${pid}) && done(gfsmos_wx_prdgen_${cyc}_${pid})" -#else -# deps="done(gfsmos_extprdgen_${cyc}_${pid})" -#fi -# -#export NTASK=1 -#export PTILE=1 -#export OMP_NUM_THREADS=20 - -##bsub -J ${job} -oo $logfile -q ${QUEUE} -P MDLST-T2O \ -## -W 1:00 -M 1000 \ -## -extsched 'CRAYLINUX[]' \ -## -R '1*{select[craylinux && !vnode]} + 48*{select[craylinux && vnode]span[ptile=24]}' \ -## -w "$deps" \ -## $jobCard - -#bsub -J ${job} \ -# -o $logfile \ -# -q ${QUEUE} -x \ -# -n $NTASK \ -# -R "span[ptile=$PTILE]" \ -# -W 1:00 \ -# -M 3000 \ -# -P $ACCOUNT \ -# -w "$deps" \ -# $jobCard - -fi #endif for ext/both - -#--------------------- -export SENDCOM=$SENDCOM_SAVE - -exit 0 diff --git a/scripts/run_gfsmos_master.sh.hera b/scripts/run_gfsmos_master.sh.hera deleted file mode 100755 index 04bfda61c5..0000000000 --- a/scripts/run_gfsmos_master.sh.hera +++ /dev/null @@ -1,833 +0,0 @@ -#!/bin/sh -####################################################################### -# run_gfsmos.sh -# -# History: -# 03/29/13 Scallion Created (Adapted from Eric Engle's -# "master" scripts) -# 12/03/13 Scallion Added ptype/wxgrid -# -# Purpose: -# To run the GFS-MOS operational suite -# -####################################################################### -set -x - -if (( $# > 1 )); then - echo "Incorrect number of arguments." - echo "Syntax: $0 [PDYCYC (optional)]" - echo "Exiting..." - exit 1 -fi - -export PATH=./:$PATH - -####################################################################### -# Only run on the dev machine -####################################################################### -#/u/Scott.Scallion/bin/run-on-dev.sh -#[[ $? -eq 1 ]] && exit 1 - -####################################################################### -# Source bash_profile to run with proper modules on cron -####################################################################### -#elim . ~/.bash_profile 1> /dev/null 2>&1 -#. ~/.bash_profile - -####################################################################### -# Check the host to determine whether tide or gyre is prod -####################################################################### -#chkhost=$(hostname | cut -c1) -#if [[ $SITE == "GYRE" ]] || [[ $SITE == "SURGE" ]] || [[ $SITE == "VENUS" ]]; then -# gort="g" -#elif [[ $SITE == "TIDE" ]] || [[ $SITE == "LUNA" ]] || [[ $SITE == "MARS" ]]; then -# gort="t" -#fi - -####################################################################### -# Set global variables neede in the run script and/or each individual -# job script. -####################################################################### -#. $MODULESHOME/init/bash -#module purge 2>/dev/null -#module load EnvVars/1.0.2 2>/dev/null -#module load ips/18.0.1.163 2>/dev/null -#module load impi/18.0.1 2>/dev/null -#module load lsf/10.1 2>/dev/null -#module load prod_envir/1.0.3 2>/dev/null -#module load prod_util/1.1.3 2>/dev/null -#module load CFP/2.0.1 2>/dev/null -# -#module use -a /gpfs/dell1/nco/ops/nwpara/modulefiles/compiler_prod/ips/18.0.1 -#module load grib_util/1.1.1 2>/dev/null - -# HERA -module load intel/18.0.5.274 -module load impi/2018.0.4 - -module use /scratch2/NCEPDEV/nwprod/NCEPLIBS/modulefiles -module load bacio/2.0.3 -module load bufr/11.3.0 -module load g2/3.1.1 -module load jasper/1.900.1 -module load png/1.2.44 -module load w3emc/2.4.0 -module load w3nco/2.0.7 -module load z/1.2.11 - -module use /scratch1/NCEPDEV/mdl/nwprod/modulefiles -module load prod_util/1.0.14 - -module use /scratch1/NCEPDEV/mdl/apps/modulefiles -module load CFP/2.0.1 - -export FORT_BUFFERED=TRUE -export KMP_AFFINITY=disabled -export envir=prod -export RUN_ENVIR=${RUN_ENVIR:-""} -#export QUEUE=dev - -#------------------ -export SENDCOM=YES -export SENDCOM_SAVE=$SENDCOM -#------------------ - -#-------------------------------- -# COMDATEROOT defined by module prod_util -##export PTMPROOT=/gpfs/dell2/ptmp/$USER -##export STMPROOT=/gpfs/dell2/stmp/$USER -##export MODELROOT=/gpfs/dell2/mdl/mdlstat/noscrub/usr/$USER/nwprod -##export MODELDIR=$MODELROOT/gfsmos.v5.0.6 -##export CODEDIR=$MODELROOT/mos_shared.v2.6.1 - -export PTMPROOT=$ROTDIR/gfsmos.$PDY -export STMPROOT=$RUNDIR/gfsmos.$PDY -export DATAROOT=$STMPROOT -##export MODELROOT=/gpfs/dell2/emc/modeling/noscrub/emc.glopara/git/gfsmos -#export MODELROOT=/gpfs/dell2/mdl/mdlstat/noscrub/usr/Scott.Scallion/gfsv16 -#export MODELROOT=/scratch1/NCEPDEV/mdl/nwprod -export MODELROOT=/scratch1/NCEPDEV/global/glopara/git/global-workflow -#export MODELDIR=$MODELROOT/gfsmos.v5.2.0.1 -export MODELDIR=$MODELROOT/gfsmos.v5.2.0 -#export CODEDIR=/gpfs/dell1/nco/ops/nwpara/mos_shared.v2.6.5 -export CODEDIR=$MODELROOT/mos_shared.v2.6.5 -#-------------------------------- - -#-------------------------------- -##export PDY=20180419 -##export PDY=$(date -u +%Y%m%d) -##export prevday=$(date -u --date="${PDY} 0000 UTC 24 hours ago" +%Y%m%d) - -dateIn=$1 -#if [ $REALTIME = "YES" ]; then -# GDATE=$($NDATE -24 $dateIn) -# dateIn=$GDATE -#fi -export PDY=$(echo $dateIn | cut -c 1-8) -export cyc=$(echo $dateIn | cut -c 9-10) -export prevday=$($NDATE -24 ${PDY}00 | cut -c1-8) -#-------------------------------- - - -#-------------------------------- -##let hour=$(date -u +"%-H") -##if [[ $hour -ge 4 && $hour -lt 10 ]]; then -## export cyc=00 -## export range=both -##elif [[ $hour -ge 10 && $hour -lt 16 ]]; then -## export cyc=06 -## export range=both -##elif [[ $hour -ge 16 && $hour -lt 22 ]]; then -## export cyc=12 -## export range=both -##elif [[ $hour -ge 22 && $hour -le 23 ]]; then -## export cyc=18 -## export range=both -##else -## export cyc=18 -## export PDY=$prevday -## export range=both -##fi -## -##cyc_list="00 06 12 18" -##if [[ $# == 1 ]] && [[ $cyc_list =~ $1 ]]; then -## export cyc=$1 -## if [ "$cyc" == "00" -o "$cyc" == "12" ]; then -## export range=both -## else -## export range=both -## fi -##elif [[ $# == 1 ]]; then -## echo "$1 is not a valid cycle (choose 00 or 12)" -## exit 1 -##fi - -# ERIC ENGLE 05/30/2018 - PATCH FOR FV3GFS WINTER 17/18 PARALLEL -export range=${range:-"both"} - -#if [ $cyc -eq 00 -o $cyc -eq 12 ]; then -# if [ $cyc -eq 00 ]; then -# export range=both -# else -# export range=short -# fi -# ERIC ENGLE 05/30/2018 - PATCH FOR FV3GFS WINTER 17/18 PARALLEL -export stnonly='Y' -export skipmodel=n -export skipprep=n -export cycle="t${cyc}z" -export pid="gfs_qprod.$$" -export dailylog=$PTMPROOT/dailylog/log.$PDY -export jlogfile=$dailylog/jlogfile_gfsmos -mkdir -p $dailylog - -export SENDDBN=NO -export SENDDBN_NTC=NO -export GET_IOPROFILE=NO - -# Specify Execution Areas -export HOMEmdl=$MODELDIR -export HOMEcode=$CODEDIR -#export utilscript=/gpfs/hps/mdl/mdlstat/noscrub/usr/Scott.Scallion/ush - -if [ ! -d $HOMEmdl ]; then - echo "$HOMEmdl does not exist" - exit 1 -fi - -if [ ! -d $HOMEcode ]; then - echo "$HOMEcode does not exist" - exit 1 -fi - - -#------------------------------------- -# Define COMOUT (COMIN will vary by job) -#export GFSDIR=$COMROOThps/gfs/prod/gfs.$PDY -##export GFSDIR=$COMROOThps/gfs/prod/gfs.$PDY -##export COMINgfs=$GFSDIR -##export COMOUT=$PTMPROOT/qprod/gfsmos.$PDY - -export GFSDIR=$COMROOT/gfs/prod/gfs.${PDY} -if [[ "$RUN_ENVIR" = "emc" ]] ; then - export GFSDIR=$ROTDIR/gfs.${PDY} -fi -export COMINgfs=$GFSDIR -export COMOUT=$ROTDIR/gfsmos.$PDY - -#export COMINm1=$PTMPROOT/gfsmos.$prevday -#export COMINm1=$PTMPROOT/qprod/gfsmos_gmos_pre-nbmv2.$prevday -#export COMINm1=$PTMPROOT/qprod/gfsmos.$prevday - -if [[ ! -d $PTMPROOT/qprod ]]; then - mkdir -p $PTMPROOT/qprod -fi - -if [[ ! -d $COMOUT ]]; then - mkdir -p $COMOUT -fi - -export COMOUTwmo=$PTMPROOT/wmo - -if [[ ! -d $COMOUTwmo ]]; then - mkdir -p $COMOUTwmo -fi - - -# NOTE: On WCOSS_DELL_P3 the directory from which bsub -# is executed must exist when the submitted job -# begins. Otherwise, the submitted job fails -# with TERM_CWD_NOTEXIST error. - -mkdir -p $DATAROOT -cd $DATAROOT - - -if [ "$range" == "short" -o "$range" == "both" ]; then -######################################################################## -######################################################################## -# SHORT-RANGE JOBS -######################################################################## -######################################################################## - -######################################################################## -# Wait for 1 degree model data -######################################################################## -##if [ "$skipmodel" != "y" ]; then -##let attempts=1 -##while [[ $attempts -le 120 ]] -##do -## if [[ -f $GFSDIR/gfs.$cycle.pgrb2.1p00.f096 ]]; then -## echo "Model file found. Proceeding..." -## break -## else -## if [[ $attempts -le 60 ]]; then -## sleep 60 -## else -## sleep 180 -## fi -## attempts=$((attempts+1)) -## fi -##done -## -##if [[ $attempts -gt 120 ]]; then -## echo "Waited 4 hours for model file, but it did not appear." -## echo "Exiting..." -## exit 1 -##fi -## -##fi #endif for skipmodel - -######################################################################## -# Wait for 1/4 degree model data -######################################################################## -##if [ "$skipmodel" != "y" ]; then -##let attempts=1 -##while [[ $attempts -le 120 ]] -##do -## if [[ -f $GFSDIR/gfs.$cycle.pgrb2.0p25.f096 ]]; then -## echo "Model file found. Proceeding..." -## break -## else -## if [[ $attempts -le 60 ]]; then -## sleep 60 -## else -## sleep 180 -## fi -## attempts=$((attempts+1)) -## fi -##done - -##if [[ $attempts -gt 120 ]]; then -## echo "Waited 4 hours for model file, but it did not appear." -## echo "Exiting..." -## exit 1 -##fi -## -##fi #endif for skipmodel - -######################################################################## -# Wait for 1.0 degree GFS model files before running (Pacific MOS) -######################################################################## -##if [ "$skipmodel" != "y" ]; then -##let attempts=1 -##while [[ $attempts -le 120 ]] -##do -## if [[ -f $GFSDIR/gfs.$cycle.pgrb2.1p00.f096 ]]; then -## echo "1.0 degree model file found. Proceeding..." -## break -## else -## if [[ $attempts -le 60 ]]; then -## sleep 60 -## else -## sleep 180 -## fi -## attempts=$((attempts+1)) -## fi -##done -## -##if [[ $attempts -gt 120 ]]; then -## echo "Waited 4 hours for model file, but it did not appear." -## echo "Exiting..." -## exit 1 -##fi -## -##fi #endif for skipmodel - - -######################################################################## -# JGFSMOS_PREP47 -######################################################################## -if [[ "$skipprep" != "y" ]]; then - export job=gfsmos_prep_${cyc}_${pid} - export COMIN=$GFSDIR - jobCard=$HOMEmdl/jobs/JGFSMOS_STN_PREP -# Define DATA and create directory - export DATA=$STMPROOT/qprod/gfsmos_prep_${cyc} - export logfile=$dailylog/$job.out - export out_dir=$dailylog - - export NTASK=3 - export PTILE=3 - export OMP_NUM_THREADS=1 - -# bsub -J $job \ -# -o $logfile \ -# -q $QUEUE \ -# -x \ -# -n $NTASK \ -# -R "span[ptile=$PTILE]" \ -# -W 0:25 \ -# -P $ACCOUNT \ -# $jobCard - -# HERA (Slurm) -#sbatch -A $ACCOUNT -J $job -q batch -n $NTASK --ntasks-per-node=$PTILE --mem-per-cpu=1g -t 00:25:00 -o $logfile $jobCard -sbatch -A $ACCOUNT -J $job -q batch -n $NTASK --ntasks-per-node=$PTILE --mem-per-cpu=1g -t 01:00:00 -o $logfile $jobCard 1> temp -JGFSMOS_STN_PREP_JOBID=$(cat temp | sed 's/[A-Za-z ]//g') - -fi #end for skipprep - -######################################################################## -# JGFSMOS_FORECAST -######################################################################## -#if [[ ! -d /gpfs/dell1/nco/ops/com/mos/prod/hry_mos.$PDY ]]; then -# export ROTATE=/gpfs/dell2/mdl/mdlstat/noscrub/rotate -# #export COMINhry_mos=$PTMPROOT/hourly.$PDY -# export COMINhry_mos=/scratch1/NCEPDEV/mdl/Michael.N.Baker/hry/hry_mos.$PDY -# if [[ ! -d $COMINhry_mos ]]; then -# mkdir -p $COMINhry_mos -# fi -# \cp $ROTATE/hry/${PDY}03 $COMINhry_mos/sfctbl.03 -# \cp $ROTATE/hry/${PDY}09 $COMINhry_mos/sfctbl.09 -# \cp $ROTATE/hry/${PDY}15 $COMINhry_mos/sfctbl.15 -# \cp $ROTATE/hry/${PDY}21 $COMINhry_mos/sfctbl.21 -#fi -export COMINhry_mos=/scratch1/NCEPDEV/mdl/Michael.N.Baker/hry/hry_mos.$PDY - -# Change COMIN to get files from user's PTMP "qprod" area -export COMIN=$COMOUT - -export job=gfsmos_fcst_${cyc}_${pid} -jobCard=$HOMEmdl/jobs/JGFSMOS_STN_FORECAST -export DATA=$STMPROOT/qprod/gfsmos_fcst_${cyc} -export logfile=$dailylog/$job.out -export out_dir=$dailylog -# Set dependencies -if [[ "$skipprep" != "y" ]]; then - #ORIG deps="done(gfsmos_prep_${cyc}_${pid})" - deps="afterany:$JGFSMOS_STN_PREP_JOBID" -else - deps="" -fi -if [[ $cyc == "00" || $cyc == "12" ]] && [[ "$stnonly" != "Y" ]]; then - complist="metar pac cooprfcmeso goe higoe akgoe tstms" - complist2="copopo3 coptype akpopo3 akptype" -else - complist="metar cooprfcmeso tstms" - complist2="" -fi - -if [[ $cyc == "00" || $cyc == "12" ]] && [[ "$stnonly" != "Y" ]]; then - export NTASK=11 - export PTILE=1 - export OMP_NUM_THREADS=1 -elif [[ $cyc == "00" || $cyc == "12" ]] && [[ "$stnonly" == "Y" ]]; then - export NTASK=5 - export PTILE=5 - export OMP_NUM_THREADS=1 -else - export NTASK=4 - export PTILE=4 - export OMP_NUM_THREADS=1 -fi - -#bsub -J ${job} \ -# -o $logfile \ -# -q ${QUEUE} \ -# -x \ -# -n $NTASK \ -# -R "span[ptile=$PTILE]" \ -# -W 0:20 \ -# -P $ACCOUNT \ -# -w "$deps" \ -# $jobCard - -# HERA (Slurm) -#sbatch -A $ACCOUNT -J $job -q batch -n $NTASK --ntasks-per-node=$PTILE --mem-per-cpu=4g -t 00:20:00 -o $logfile $jobCard -sbatch -A $ACCOUNT -J $job -q batch -n $NTASK --ntasks-per-node=$PTILE --mem-per-cpu=4g -t 01:00:00 -o $logfile --dependency=$deps $jobCard 1> temp -JGFSMOS_STN_FORECAST_JOBID=$(cat temp | sed 's/[A-Za-z ]//g') - -######################################################################## -# JGFSMOS_PRDGEN -######################################################################## -jobCard=$HOMEmdl/jobs/JGFSMOS_STN_PRDGEN -export job=gfsmos_prdgen_${cyc}_${pid} -# Change COMIN back to COMOUT -export COMIN=$COMOUT -# Define DATA and create directory -export DATA=$STMPROOT/qprod/gfsmos_prdgen_${cyc} -export logfile=$dailylog/$job.out -export out_dir=$dailylog -# Set dependencies -#ORIG deps="done(gfsmos_fcst_${cyc}_${pid})" -deps="afterany:$JGFSMOS_STN_FORECAST_JOBID" -# Set Nodes -if [ $cyc -eq 00 -o $cyc -eq 12 ] && [[ "$stnonly" != "Y" ]]; then - #nodes='1*{select[craylinux && !vnode]} + 168*{select[craylinux && vnode]span[ptile=24]}' - export NTASK=1 - export PTILE=1 - #ORIG export OMP_NUM_THREADS=20 - export OMP_NUM_THREADS=1 -else - #nodes='1*{select[craylinux && !vnode]} + 24*{select[craylinux && vnode]span[ptile=24]}' - export NTASK=1 - export PTILE=1 - export OMP_NUM_THREADS=1 -fi - -#bsub -J ${job} \ -# -o $logfile \ -# -q ${QUEUE} \ -# -x \ -# -n $NTASK \ -# -R "span[ptile=$PTILE]" \ -# -W 0:30 \ -# -P $ACCOUNT \ -# -w "$deps" \ -# $jobCard - -# HERA (Slurm) -#sbatch -A $ACCOUNT -J $job -q batch -n $NTASK --ntasks-per-node=$PTILE --mem-per-cpu=4g -t 00:30:00 -o $logfile $jobCard -#sbatch -A $ACCOUNT -J $job -q batch -n $NTASK --ntasks-per-node=$PTILE --mem-per-cpu=4g -t 02:00:00 -o $logfile --dependency=$deps $jobCard 1> temp -sbatch -A $ACCOUNT -J $job -q batch -n $NTASK --ntasks-per-node=$PTILE --exclusive -t 02:00:00 -o $logfile --dependency=$deps $jobCard 1> temp -JGFSMOS_STN_PRDGEN_JOBID=$(cat temp | sed 's/[A-Za-z ]//g') - -######################################################################## -# JGFSMOS_WX_PRDGEN (00z and 12z only) -######################################################################## -#if [ $cyc -eq 00 -o $cyc -eq 12 ]; then -# jobCard=$HOMEmdl/jobs/JGFSMOS_WX_PRDGEN -# export job=gfsmos_wx_prdgen_${cyc}_${pid} -# # Change COMIN back to COMOUT -# export COMIN=$COMOUT -# # Define DATA and create directory -# export DATA=$STMPROOT/qprod/gfsmos_wx_prdgen_${cyc} -# export logfile=$dailylog/$job.out -# export out_dir=$dailylog -# # Set dependencies -# deps="done(gfsmos_prdgen_${cyc}_${pid})" -# -# export NTASK=2 -# export PTILE=1 -# export OMP_NUM_THREADS=20 -# -# #bsub -J ${job} -oo $logfile -q ${QUEUE} -P MDLST-T2O \ -# # -W 1:00 -M 1000 \ -# # -extsched 'CRAYLINUX[]' \ -# # -R '1*{select[craylinux && !vnode]} + 48*{select[craylinux && vnode]span[ptile=24]}' \ -# # -w "$deps" \ -# # $jobCard -# -# bsub -J ${job} \ -# -o $logfile \ -# -q ${QUEUE} -x \ -# -n $NTASK \ -# -R "span[ptile=$PTILE]" \ -# -W 1:00 \ -# -M 3000 \ -# -P $ACCOUNT \ -# -w "$deps" \ -# $jobCard -#fi - -fi #endif short - -######################################################################## -######################################################################## -# EXTENDED-RANGE JOBS -######################################################################## -######################################################################## - -if [ "$range" == "ext" -o "$range" == "both" ]; then - -######################################################################## -# Wait for 1/4 degree model data -######################################################################## -#if [ "$skipmodel" != "y" ]; then -#let attempts=1 -#proj_list=$(seq -f %03g 252 12 384) -#for tau in $proj_list -#do -# while [[ $attempts -le 120 ]] -# do -# if [[ -f $GFSDIR/gfs.$cycle.pgrb2.0p25.f${tau} && -f $GFSDIR/gfs.$cycle.pgrb2.0p50.f${tau} ]]; then -# echo "Model file found. Proceeding to next..." -# break -# else -# if [[ $attempts -le 60 ]]; then -# sleep 60 -# else -# sleep 180 -# fi -# attempts=$((attempts+1)) -# fi -# done -#done -# -#if [[ $attempts -gt 120 ]]; then -# echo "Waited 4 hours for model file, but it did not appear." -# echo "Exiting..." -# exit 1 -#fi - -#fi #endif for skipmodel - -######################################################################## -# Wait for 1.0/2.5 degree GFS model files before running (Pacific GFS) -######################################################################## -#if [ "$skipmodel" != "y" ]; then -#let attempts1deg=1 -#proj_list=$(seq -f %03g 204 12 384) -#for tau in $proj_list -#do -# while [[ $attempts1deg -le 120 ]] -# do -## ERIC ENGLE 05/30/2018 - PATCH FOR FV3GFS WINTER 17/18 PARALLEL -# #if [[ -f $GFSDIR/gfs.$cycle.pgrb2.1p00.f384 && -f $GFSDIR/gfs.$cycle.pgrb2.2p50.f240 ]]; then -# if [ $cyc -eq 00 ]; then -# waitfile=$GFSDIR/gfs.$cycle.pgrb2.1p00.f384 -# else -# waitfile=$GFSDIR/gfs.$cycle.pgrb2.1p00.f180 -# fi -# if [[ -f $waitfile ]]; then -## ERIC ENGLE 05/30/2018 - PATCH FOR FV3GFS WINTER 17/18 PARALLEL -# #echo "1.0/2.5 degree model files found. Proceeding to next..." -# echo "1.0 degree model files found. Proceeding to next..." -# break -# else -# if [[ $attempts1deg -le 60 ]]; then -# sleep 60 -# else -# sleep 180 -# fi -# attempts1deg=$((aattempts1deg+1)) -# fi -# done -#done -# -#if [[ $attempts1deg -gt 120 ]]; then -# echo "Waited 4 hours for 1.0 degree model file, but it did not appear." -# echo "Exiting..." -# exit 1 -#fi - -#fi #endif for skipmodel - -######################################################################## -# JGFSMOS_EXT_PREP47 -######################################################################## -if [[ "$skipprep" != "y" ]]; then - export job=gfsmos_extprep_${cyc}_${pid} - export COMIN=$GFSDIR - jobCard=$HOMEmdl/jobs/JGFSMOS_EXT_STN_PREP -# Define DATA and create directory - export DATA=$STMPROOT/qprod/gfsmos_extprep_${cyc} - export logfile=$dailylog/$job.out - export out_dir=$dailylog - - export NTASK=2 - export PTILE=2 - export OMP_NUM_THREADS=1 - -# bsub -J $job \ -# -o $logfile \ -# -q $QUEUE \ -# -W 2:00 \ -# -M 2500 \ -# -P MDLST-T2O \ -# -extsched 'CRAYLINUX[]' \ -# -R '1*{select[craylinux && !vnode]} + 24*{select[craylinux && vnode]span[ptile=24]}' \ -# $jobCard - -#bsub -J ${job} \ -# -o $logfile \ -# -q ${QUEUE} \ -# -n $NTASK \ -# -R "span[ptile=$PTILE]" \ -# -W 2:00 \ -# -P $ACCOUNT \ -# $jobCard - -# HERA (Slurm) -#sbatch -A $ACCOUNT -J $job -q batch -n $NTASK --ntasks-per-node=$PTILE --mem-per-cpu=4g -t 00:10:00 -o $logfile $jobCard -sbatch -A $ACCOUNT -J $job -q batch -n $NTASK --ntasks-per-node=$PTILE --mem-per-cpu=4g -t 01:00:00 -o $logfile $jobCard 1> temp -JGFSMOS_EXT_STN_PREP_JOBID=$(cat temp | sed 's/[A-Za-z ]//g') - -fi #end for skipprep - -# Skip EXT_FORECAST for 06/18 -if [ $cyc -eq 00 -o $cyc -eq 12 ]; then -######################################################################## -# JGFSMOS_EXT_FORECAST -######################################################################## -#if [[ ! -d /gpfs/dell1/nco/ops/com/mos/prod/hry_mos.$PDY ]]; then -# export ROTATE=/gpfs/dell2/mdl/mdlstat/noscrub/rotate -# #export COMINhry_mos=$PTMPROOT/hourly.$PDY -# export COMINhry_mos=/scratch1/NCEPDEV/mdl/Michael.N.Baker/hry/hry_mos.$PDY -# if [[ ! -d $COMINhry_mos ]]; then -# mkdir -p $COMINhry_mos -# fi -# \cp $ROTATE/hry/${PDY}03 $COMINhry_mos/sfctbl.03 -# \cp $ROTATE/hry/${PDY}09 $COMINhry_mos/sfctbl.09 -# \cp $ROTATE/hry/${PDY}15 $COMINhry_mos/sfctbl.15 -# \cp $ROTATE/hry/${PDY}21 $COMINhry_mos/sfctbl.21 -#fi -export COMINhry_mos=/scratch1/NCEPDEV/mdl/Michael.N.Baker/hry/hry_mos.$PDY - -# Change COMIN to get files from user's PTMP "qprod" area -export COMIN=$COMOUT - -export job=gfsmos_extfcst_${cyc}_${pid} -jobCard=$HOMEmdl/jobs/JGFSMOS_EXT_STN_FORECAST -export DATA=$STMPROOT/qprod/gfsmos_extfcst_${cyc} -export logfile=$dailylog/$job.out -export out_dir=$dailylog -# Set dependencies -if [[ "$skipprep" != "y" && "$range" == "both" ]]; then - #ORIG deps="done(gfsmos_extprep_${cyc}_${pid}) && done(gfsmos_fcst_${cyc}_${pid})" - deps="afterany:${JGFSMOS_EXT_STN_PREP_JOBID},${JGFSMOS_STN_FORECAST_JOBID}" -elif [[ "$skipprep" != "y" && "$range" == "ext" ]]; then - #ORIG deps="done(gfsmos_extprep_${cyc}_${pid})" - deps="afterany:$JGFSMOS_EXT_STN_PREP_JOBID" -elif [[ "$skipprep" == "y" && "$range" == "ext" ]]; then - deps="" -else - #ORIG deps="done(gfsmos_fcst_${cyc}_${pid})" - deps="afterany:$JGFSMOS_STN_FORECAST_JOBID" -fi - -if [[ $stnonly != "Y" ]]; then - export NTASK=10 - export PTILE=1 - export OMP_NUM_THREADS=1 -else - export NTASK=3 - export PTILE=3 - export OMP_NUM_THREADS=1 -fi - -#bsub -J ${job} -oo $logfile -q $QUEUE -P MDLST-T2O \ -# -W 1:00 -M 2000 \ -# -extsched 'CRAYLINUX[]' \ -# -R '1*{select[craylinux && !vnode]} + 24*{select[craylinux && vnode]span[ptile=24]}' \ -# -w "$deps" \ -# $jobCard - -#bsub -J ${job} \ -# -o $logfile \ -# -q ${QUEUE} \ -# -x \ -# -n $NTASK \ -# -R "span[ptile=$PTILE]" \ -# -W 1:00 \ -# -P $ACCOUNT \ -# -w "$deps" \ -# $jobCard - -# HERA (Slurm) -#sbatch -A $ACCOUNT -J $job -q batch -n $NTASK --ntasks-per-node=$PTILE --mem-per-cpu=4g -t 01:00:00 -o $logfile $jobCard -sbatch -A $ACCOUNT -J $job -q batch -n $NTASK --ntasks-per-node=$PTILE --mem-per-cpu=4g -t 02:00:00 -o $logfile --dependency=$deps $jobCard 1> temp -JGFSMOS_EXT_STN_FORECAST_JOBID=$(cat temp | sed 's/[A-Za-z ]//g') - -fi #endif for skipping 06/18 ext_fcst -######################################################################## -# JGFSMOS_EXT_PRDGEN -######################################################################## -jobCard=$HOMEmdl/jobs/JGFSMOS_EXT_STN_PRDGEN -export job=gfsmos_extprdgen_${cyc}_${pid} -# Change COMIN back to COMOUT -export COMIN=$COMOUT -# Define DATA and create directory -export DATA=$STMPROOT/qprod/gfsmos_extprdgen_${cyc} -export logfile=$dailylog/$job.out -export out_dir=$dailylog -# Set dependencies -if [[ "$cyc" == "06" || "$cyc" == "18" ]]; then - #ORIG deps="done(gfsmos_prdgen_${cyc}_${pid})" - deps="afterany:$JGFSMOS_STN_PRDGEN_JOBID" -elif [[ "$range" == "both" ]]; then - #ORIG deps="done(gfsmos_extfcst_${cyc}_${pid}) && done(gfsmos_prdgen_${cyc}_${pid})" - deps="afterany:${JGFSMOS_EXT_STN_FORECAST_JOBID},${JGFSMOS_STN_PRDGEN_JOBID}" -else - #ORIG deps="done(gfsmos_extfcst_${cyc}_${pid})" - deps="afterany:$JGFSMOS_EXT_STN_FORECAST" -fi -# Set Nodes -if [ $cyc -eq 00 -o $cyc -eq 12 ] && [[ "$stnonly" != "Y" ]]; then - #nodes='1*{select[craylinux && !vnode]} + 168*{select[craylinux && vnode]span[ptile=24]}' - export NTASK=1 - export PTILE=1 - #ORIG export OMP_NUM_THREADS=20 - export OMP_NUM_THREADS=1 -else - #nodes='1*{select[craylinux && !vnode]} + 24*{select[craylinux && vnode]span[ptile=24]}' - export NTASK=1 - export PTILE=1 - export OMP_NUM_THREADS=1 -fi - - -#bsub -J ${job} -oo $logfile -q ${QUEUE} -P MDLST-T2O \ -# -W 1:00 -M 2000 \ -# -extsched 'CRAYLINUX[]' \ -# -R "$nodes" \ -# -w "$deps" \ -# $jobCard - -#bsub -J ${job} \ -# -o $logfile \ -# -q ${QUEUE} \ -# -x \ -# -n $NTASK \ -# -R "span[ptile=$PTILE]" \ -# -W 1:00 \ -# -P $ACCOUNT \ -# -w "$deps" \ -# $jobCard - -# HERA (Slurm) -#sbatch -A $ACCOUNT -J $job -q batch -n $NTASK --ntasks-per-node=$PTILE --mem-per-cpu=4g -t 01:00:00 -o $logfile $jobCard -#sbatch -A $ACCOUNT -J $job -q batch -n $NTASK --ntasks-per-node=$PTILE --mem-per-cpu=4g -t 02:00:00 -o $logfile --dependency=$deps $jobCard #NOTE: No need to redirect stdout. -sbatch -A $ACCOUNT -J $job -q batch -n $NTASK --ntasks-per-node=$PTILE --exclusive -t 02:00:00 -o $logfile --dependency=$deps $jobCard #NOTE: No need to redirect stdout. - -# Exit here with $cyc is 06 or 18 -if [ $cyc -eq 06 -o $cyc -eq 18 ]; then - exit 0 -fi -######################################################################## -# JGFSMOS_WX_EXT_PRDGEN -######################################################################## -#jobCard=$HOMEmdl/jobs/JGFSMOS_WX_EXT_PRDGEN -#export job=gfsmos_wx_extprdgen_${cyc}_${pid} -## Change COMIN back to COMOUT -#export COMIN=$COMOUT -## Define DATA and create directory -#export DATA=$STMPROOT/qprod/gfsmos_wx_extprdgen_${cyc} -#export logfile=$dailylog/$job.out -#export out_dir=$dailylog -## Set dependencies -#if [[ "$range" == "both" ]]; then -# deps="done(gfsmos_extprdgen_${cyc}_${pid}) && done(gfsmos_wx_prdgen_${cyc}_${pid})" -#else -# deps="done(gfsmos_extprdgen_${cyc}_${pid})" -#fi -# -#export NTASK=1 -#export PTILE=1 -#export OMP_NUM_THREADS=20 - -##bsub -J ${job} -oo $logfile -q ${QUEUE} -P MDLST-T2O \ -## -W 1:00 -M 1000 \ -## -extsched 'CRAYLINUX[]' \ -## -R '1*{select[craylinux && !vnode]} + 48*{select[craylinux && vnode]span[ptile=24]}' \ -## -w "$deps" \ -## $jobCard - -#bsub -J ${job} \ -# -o $logfile \ -# -q ${QUEUE} -x \ -# -n $NTASK \ -# -R "span[ptile=$PTILE]" \ -# -W 1:00 \ -# -M 3000 \ -# -P $ACCOUNT \ -# -w "$deps" \ -# $jobCard - -fi #endif for ext/both - -#--------------------- -export SENDCOM=$SENDCOM_SAVE - -exit 0 diff --git a/sorc/build_gfs_util.sh b/sorc/build_gfs_util.sh index 4844bf0068..675d1c9609 100755 --- a/sorc/build_gfs_util.sh +++ b/sorc/build_gfs_util.sh @@ -7,12 +7,12 @@ export dir=$( pwd ) cd ../util/sorc # Check for gfs_util folders exist -if [ ! -d "./mkgfsawps.fd" ]; then +if [ ! -d "./mkgfsawps.fd" ]; then echo " " echo " GFS_UTIL folders DO NOT exist " echo " " exit -fi +fi echo "" echo " Building ... Executables for GFS_UTILITIES " diff --git a/sorc/build_ufs_utils.sh b/sorc/build_ufs_utils.sh index a45b008650..480dda9b89 100755 --- a/sorc/build_ufs_utils.sh +++ b/sorc/build_ufs_utils.sh @@ -4,9 +4,6 @@ set -eux source ./machine-setup.sh > /dev/null 2>&1 cwd=$(pwd) -if [ $target = wcoss_dell_p3 ]; then target=dell; fi -if [ $target = wcoss_cray ]; then target=cray; fi - cd ufs_utils.fd ./build_all.sh diff --git a/sorc/enkf_chgres_recenter.fd/makefile b/sorc/enkf_chgres_recenter.fd/makefile deleted file mode 100755 index 2a5f36b369..0000000000 --- a/sorc/enkf_chgres_recenter.fd/makefile +++ /dev/null @@ -1,27 +0,0 @@ -SHELL= /bin/sh - -LIBS= $(NEMSIO_LIB) $(BACIO_LIB4) $(W3NCO_LIBd) $(IP_LIBd) $(SP_LIBd) - -CMD= enkf_chgres_recenter.x - -OBJS = driver.o input_data.o interp.o output_data.o utils.o setup.o - -$(CMD): $(OBJS) - $(FC) $(FFLAGS) -o $(CMD) $(OBJS) $(LIBS) - -driver.o: setup.o output_data.o interp.o input_data.o driver.f90 - $(FC) $(FFLAGS) -I$(NEMSIO_INC) -c driver.f90 -interp.o: setup.o utils.o output_data.o input_data.o interp.f90 - $(FC) $(FFLAGS) -I$(NEMSIO_INC) -c interp.f90 -input_data.o: setup.o utils.o input_data.f90 - $(FC) $(FFLAGS) -I$(NEMSIO_INC) -c input_data.f90 -output_data.o: setup.o utils.o input_data.o output_data.f90 - $(FC) $(FFLAGS) -I$(NEMSIO_INC) -c output_data.f90 -setup.o: setup.f90 - $(FC) $(FFLAGS) -I$(NEMSIO_INC) -c setup.f90 -utils.o: utils.f90 - $(FC) $(FFLAGS) -I$(NEMSIO_INC) -c utils.f90 -clean: - rm -f *.o *.mod ${CMD} -install: - -cp $(CMD) ../../exec/. diff --git a/sorc/enkf_chgres_recenter_nc.fd/makefile b/sorc/enkf_chgres_recenter_nc.fd/makefile deleted file mode 100644 index c9f4c7be37..0000000000 --- a/sorc/enkf_chgres_recenter_nc.fd/makefile +++ /dev/null @@ -1,28 +0,0 @@ -SHELL= /bin/sh - -LIBS= $(FV3GFS_NCIO_LIB) $(BACIO_LIB4) $(W3NCO_LIB4) $(IP_LIB4) $(SP_LIB4) -L$(NETCDF)/lib -lnetcdff -lnetcdf -lhdf5_hl -lhdf5 -lz - -CMD= enkf_chgres_recenter_nc.x - -OBJS = driver.o input_data.o interp.o output_data.o utils.o setup.o - -$(CMD): $(OBJS) - $(FC) $(FFLAGS) -o $(CMD) $(OBJS) $(LIBS) - -driver.o: setup.o output_data.o interp.o input_data.o driver.f90 - $(FC) $(FFLAGS) -I$(FV3GFS_NCIO_INC) -I$(NETCDF)/include -c driver.f90 -interp.o: setup.o utils.o output_data.o input_data.o interp.f90 - $(FC) $(FFLAGS) -I$(FV3GFS_NCIO_INC) -I$(NETCDF)/include -c interp.f90 -input_data.o: setup.o utils.o input_data.f90 - $(FC) $(FFLAGS) -I$(FV3GFS_NCIO_INC) -I$(NETCDF)/include -c input_data.f90 -output_data.o: setup.o utils.o input_data.o output_data.f90 - $(FC) $(FFLAGS) -I$(FV3GFS_NCIO_INC) -I$(NETCDF)/include -c output_data.f90 -setup.o: setup.f90 - $(FC) $(FFLAGS) -I$(FV3GFS_NCIO_INC) -I$(NETCDF)/include -c setup.f90 -utils.o: utils.f90 - $(FC) $(FFLAGS) -I$(FV3GFS_NCIO_INC) -I$(NETCDF)/include -c utils.f90 -clean: - rm -f *.o *.mod ${CMD} -install: - -cp $(CMD) ../../exec/. - diff --git a/sorc/fbwndgfs.fd/fbwndgfs.f b/sorc/fbwndgfs.fd/fbwndgfs.f old mode 100755 new mode 100644 diff --git a/sorc/fbwndgfs.fd/makefile.GENERIC b/sorc/fbwndgfs.fd/makefile.GENERIC deleted file mode 100755 index f38539916f..0000000000 --- a/sorc/fbwndgfs.fd/makefile.GENERIC +++ /dev/null @@ -1,81 +0,0 @@ -SHELL=/bin/sh -# -# This makefile was produced by /usr/bin/fmgen at 12:00:38 PM on 11/27/96 -# If it is invoked by the command line -# make -f makefile -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable named a.out. -# -# If it is invoked by the command line -# make -f makefile a.out.prof -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable which profiles -# named a.out.prof. -# -# To remove all the objects but leave the executables use the command line -# make -f makefile clean -# -# To remove everything but the source files use the command line -# make -f makefile clobber -# -# To remove the source files created by /usr/bin/fmgen and this makefile -# use the command line -# make -f makefile void -# -# The parameters SRCS and OBJS should not need to be changed. If, however, -# you need to add a new module add the name of the source module to the -# SRCS parameter and add the name of the resulting object file to the OBJS -# parameter. The new modules are not limited to fortran, but may be C, YACC, -# LEX, or CAL. An explicit rule will need to be added for PASCAL modules. -# -SRCS= fbwndgfs.f - -OBJS= fbwndgfs.o - -# Tunable parameters -# -# FC Name of the fortran compiling system to use -# LDFLAGS Flags to the loader -# LIBS List of libraries -# CMD Name of the executable -# PROFLIB Library needed for profiling -# -FC = ifort -LDFLAGS = -LIBS = ${W3NCO_LIB8} ${W3EMC_LIB8} ${BACIO_LIB8} ${IP_LIB8} ${SP_LIB8} -CMD = fbwndgfs -PROFLIB = -lprof - -# To perform the default compilation, use the first line -# To compile with flowtracing turned on, use the second line -# To compile giving profile additonal information, use the third line -# WARNING: SIMULTANEOUSLY PROFILING AND FLOWTRACING IS NOT RECOMMENDED -FFLAGS = -O3 -g -I ${IP_INC8} -assume byterecl -convert big_endian -r8 -i8 -#FFLAGS = -F -#FFLAGS = -Wf"-ez" - -# Lines from here on down should not need to be changed. They are the -# actual rules which make uses to build a.out. -# -all: $(CMD) - -$(CMD): $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(LIBS) - -# Make the profiled version of the command and call it a.out.prof -# -$(CMD).prof: $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(PROFLIB) $(LIBS) - -install: - mkdir -p ../../exec - cp -p $(CMD) ../../exec - -clean: - -rm -f $(OBJS) - -clobber: clean - -rm -f $(CMD) $(CMD).prof - -void: clobber - -rm -f $(SRCS) makefile diff --git a/sorc/fbwndgfs.fd/makefile.theia b/sorc/fbwndgfs.fd/makefile.theia deleted file mode 100755 index f38539916f..0000000000 --- a/sorc/fbwndgfs.fd/makefile.theia +++ /dev/null @@ -1,81 +0,0 @@ -SHELL=/bin/sh -# -# This makefile was produced by /usr/bin/fmgen at 12:00:38 PM on 11/27/96 -# If it is invoked by the command line -# make -f makefile -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable named a.out. -# -# If it is invoked by the command line -# make -f makefile a.out.prof -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable which profiles -# named a.out.prof. -# -# To remove all the objects but leave the executables use the command line -# make -f makefile clean -# -# To remove everything but the source files use the command line -# make -f makefile clobber -# -# To remove the source files created by /usr/bin/fmgen and this makefile -# use the command line -# make -f makefile void -# -# The parameters SRCS and OBJS should not need to be changed. If, however, -# you need to add a new module add the name of the source module to the -# SRCS parameter and add the name of the resulting object file to the OBJS -# parameter. The new modules are not limited to fortran, but may be C, YACC, -# LEX, or CAL. An explicit rule will need to be added for PASCAL modules. -# -SRCS= fbwndgfs.f - -OBJS= fbwndgfs.o - -# Tunable parameters -# -# FC Name of the fortran compiling system to use -# LDFLAGS Flags to the loader -# LIBS List of libraries -# CMD Name of the executable -# PROFLIB Library needed for profiling -# -FC = ifort -LDFLAGS = -LIBS = ${W3NCO_LIB8} ${W3EMC_LIB8} ${BACIO_LIB8} ${IP_LIB8} ${SP_LIB8} -CMD = fbwndgfs -PROFLIB = -lprof - -# To perform the default compilation, use the first line -# To compile with flowtracing turned on, use the second line -# To compile giving profile additonal information, use the third line -# WARNING: SIMULTANEOUSLY PROFILING AND FLOWTRACING IS NOT RECOMMENDED -FFLAGS = -O3 -g -I ${IP_INC8} -assume byterecl -convert big_endian -r8 -i8 -#FFLAGS = -F -#FFLAGS = -Wf"-ez" - -# Lines from here on down should not need to be changed. They are the -# actual rules which make uses to build a.out. -# -all: $(CMD) - -$(CMD): $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(LIBS) - -# Make the profiled version of the command and call it a.out.prof -# -$(CMD).prof: $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(PROFLIB) $(LIBS) - -install: - mkdir -p ../../exec - cp -p $(CMD) ../../exec - -clean: - -rm -f $(OBJS) - -clobber: clean - -rm -f $(CMD) $(CMD).prof - -void: clobber - -rm -f $(SRCS) makefile diff --git a/sorc/fbwndgfs.fd/makefile.wcoss b/sorc/fbwndgfs.fd/makefile.wcoss deleted file mode 100755 index f38539916f..0000000000 --- a/sorc/fbwndgfs.fd/makefile.wcoss +++ /dev/null @@ -1,81 +0,0 @@ -SHELL=/bin/sh -# -# This makefile was produced by /usr/bin/fmgen at 12:00:38 PM on 11/27/96 -# If it is invoked by the command line -# make -f makefile -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable named a.out. -# -# If it is invoked by the command line -# make -f makefile a.out.prof -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable which profiles -# named a.out.prof. -# -# To remove all the objects but leave the executables use the command line -# make -f makefile clean -# -# To remove everything but the source files use the command line -# make -f makefile clobber -# -# To remove the source files created by /usr/bin/fmgen and this makefile -# use the command line -# make -f makefile void -# -# The parameters SRCS and OBJS should not need to be changed. If, however, -# you need to add a new module add the name of the source module to the -# SRCS parameter and add the name of the resulting object file to the OBJS -# parameter. The new modules are not limited to fortran, but may be C, YACC, -# LEX, or CAL. An explicit rule will need to be added for PASCAL modules. -# -SRCS= fbwndgfs.f - -OBJS= fbwndgfs.o - -# Tunable parameters -# -# FC Name of the fortran compiling system to use -# LDFLAGS Flags to the loader -# LIBS List of libraries -# CMD Name of the executable -# PROFLIB Library needed for profiling -# -FC = ifort -LDFLAGS = -LIBS = ${W3NCO_LIB8} ${W3EMC_LIB8} ${BACIO_LIB8} ${IP_LIB8} ${SP_LIB8} -CMD = fbwndgfs -PROFLIB = -lprof - -# To perform the default compilation, use the first line -# To compile with flowtracing turned on, use the second line -# To compile giving profile additonal information, use the third line -# WARNING: SIMULTANEOUSLY PROFILING AND FLOWTRACING IS NOT RECOMMENDED -FFLAGS = -O3 -g -I ${IP_INC8} -assume byterecl -convert big_endian -r8 -i8 -#FFLAGS = -F -#FFLAGS = -Wf"-ez" - -# Lines from here on down should not need to be changed. They are the -# actual rules which make uses to build a.out. -# -all: $(CMD) - -$(CMD): $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(LIBS) - -# Make the profiled version of the command and call it a.out.prof -# -$(CMD).prof: $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(PROFLIB) $(LIBS) - -install: - mkdir -p ../../exec - cp -p $(CMD) ../../exec - -clean: - -rm -f $(OBJS) - -clobber: clean - -rm -f $(CMD) $(CMD).prof - -void: clobber - -rm -f $(SRCS) makefile diff --git a/sorc/fbwndgfs.fd/makefile.wcoss_cray b/sorc/fbwndgfs.fd/makefile.wcoss_cray deleted file mode 100755 index 0ebe267cb9..0000000000 --- a/sorc/fbwndgfs.fd/makefile.wcoss_cray +++ /dev/null @@ -1,81 +0,0 @@ -SHELL=/bin/sh -# -# This makefile was produced by /usr/bin/fmgen at 12:00:38 PM on 11/27/96 -# If it is invoked by the command line -# make -f makefile -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable named a.out. -# -# If it is invoked by the command line -# make -f makefile a.out.prof -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable which profiles -# named a.out.prof. -# -# To remove all the objects but leave the executables use the command line -# make -f makefile clean -# -# To remove everything but the source files use the command line -# make -f makefile clobber -# -# To remove the source files created by /usr/bin/fmgen and this makefile -# use the command line -# make -f makefile void -# -# The parameters SRCS and OBJS should not need to be changed. If, however, -# you need to add a new module add the name of the source module to the -# SRCS parameter and add the name of the resulting object file to the OBJS -# parameter. The new modules are not limited to fortran, but may be C, YACC, -# LEX, or CAL. An explicit rule will need to be added for PASCAL modules. -# -SRCS= fbwndgfs.f - -OBJS= fbwndgfs.o - -# Tunable parameters -# -# FC Name of the fortran compiling system to use -# LDFLAGS Flags to the loader -# LIBS List of libraries -# CMD Name of the executable -# PROFLIB Library needed for profiling -# -FC = ftn -LDFLAGS = -LIBS = ${W3NCO_LIB8} ${W3EMC_LIB8} ${BACIO_LIB8} ${IP_LIB8} ${SP_LIB8} -CMD = fbwndgfs -PROFLIB = -lprof - -# To perform the default compilation, use the first line -# To compile with flowtracing turned on, use the second line -# To compile giving profile additonal information, use the third line -# WARNING: SIMULTANEOUSLY PROFILING AND FLOWTRACING IS NOT RECOMMENDED -FFLAGS = -O3 -g -I ${IP_INC8} -assume byterecl -convert big_endian -r8 -i8 -axCORE-AVX2 -#FFLAGS = -F -#FFLAGS = -Wf"-ez" - -# Lines from here on down should not need to be changed. They are the -# actual rules which make uses to build a.out. -# -all: $(CMD) - -$(CMD): $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(LIBS) - -# Make the profiled version of the command and call it a.out.prof -# -$(CMD).prof: $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(PROFLIB) $(LIBS) - -install: - mkdir -p ../../exec - cp -p $(CMD) ../../exec - -clean: - -rm -f $(OBJS) - -clobber: clean - -rm -f $(CMD) $(CMD).prof - -void: clobber - -rm -f $(SRCS) makefile diff --git a/sorc/fbwndgfs.fd/makefile.wcoss_dell_p3 b/sorc/fbwndgfs.fd/makefile.wcoss_dell_p3 deleted file mode 100755 index f38539916f..0000000000 --- a/sorc/fbwndgfs.fd/makefile.wcoss_dell_p3 +++ /dev/null @@ -1,81 +0,0 @@ -SHELL=/bin/sh -# -# This makefile was produced by /usr/bin/fmgen at 12:00:38 PM on 11/27/96 -# If it is invoked by the command line -# make -f makefile -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable named a.out. -# -# If it is invoked by the command line -# make -f makefile a.out.prof -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable which profiles -# named a.out.prof. -# -# To remove all the objects but leave the executables use the command line -# make -f makefile clean -# -# To remove everything but the source files use the command line -# make -f makefile clobber -# -# To remove the source files created by /usr/bin/fmgen and this makefile -# use the command line -# make -f makefile void -# -# The parameters SRCS and OBJS should not need to be changed. If, however, -# you need to add a new module add the name of the source module to the -# SRCS parameter and add the name of the resulting object file to the OBJS -# parameter. The new modules are not limited to fortran, but may be C, YACC, -# LEX, or CAL. An explicit rule will need to be added for PASCAL modules. -# -SRCS= fbwndgfs.f - -OBJS= fbwndgfs.o - -# Tunable parameters -# -# FC Name of the fortran compiling system to use -# LDFLAGS Flags to the loader -# LIBS List of libraries -# CMD Name of the executable -# PROFLIB Library needed for profiling -# -FC = ifort -LDFLAGS = -LIBS = ${W3NCO_LIB8} ${W3EMC_LIB8} ${BACIO_LIB8} ${IP_LIB8} ${SP_LIB8} -CMD = fbwndgfs -PROFLIB = -lprof - -# To perform the default compilation, use the first line -# To compile with flowtracing turned on, use the second line -# To compile giving profile additonal information, use the third line -# WARNING: SIMULTANEOUSLY PROFILING AND FLOWTRACING IS NOT RECOMMENDED -FFLAGS = -O3 -g -I ${IP_INC8} -assume byterecl -convert big_endian -r8 -i8 -#FFLAGS = -F -#FFLAGS = -Wf"-ez" - -# Lines from here on down should not need to be changed. They are the -# actual rules which make uses to build a.out. -# -all: $(CMD) - -$(CMD): $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(LIBS) - -# Make the profiled version of the command and call it a.out.prof -# -$(CMD).prof: $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(PROFLIB) $(LIBS) - -install: - mkdir -p ../../exec - cp -p $(CMD) ../../exec - -clean: - -rm -f $(OBJS) - -clobber: clean - -rm -f $(CMD) $(CMD).prof - -void: clobber - -rm -f $(SRCS) makefile diff --git a/sorc/fv3nc2nemsio.fd/makefile.sh b/sorc/fv3nc2nemsio.fd/makefile.sh deleted file mode 100755 index 30c60cf7f0..0000000000 --- a/sorc/fv3nc2nemsio.fd/makefile.sh +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/ksh -set -x - -machine=${1:-"cray"} - -source ../../modulefiles/module-setup.sh.inc -module use ../../modulefiles -module load modulefile.fv3nc2nemsio.$machine - -LIBnetcdf=$($NETCDF/bin/nf-config --flibs) -INCnetcdf=$($NETCDF/bin/nf-config --fflags) -export NETCDF_LDFLAGS=$LIBnetcdf -export NETCDF_INCLUDE=$INCnetcdf - - -$FCMP $FFLAGS -c kinds.f90 -$FCMP $FFLAGS -c constants.f90 -$FCMP $FFLAGS $NETCDF_INCLUDE -I $NEMSIO_INC -c fv3_module.f90 -$FCMP $FFLAGS $NETCDF_INCLUDE -I $NEMSIO_INC -I. -o fv3nc2nemsio.x fv3_main.f90 fv3_module.o $NETCDF_LDFLAGS $NEMSIO_LIB $BACIO_LIB4 $W3NCO_LIBd - -mv fv3nc2nemsio.x ../../exec/. -rm -f *.o *.mod - -exit 0 diff --git a/sorc/gaussian_sfcanl.fd/CMakeLists.txt b/sorc/gaussian_sfcanl.fd/CMakeLists.txt index 6447fdaf6a..b83035e86e 100644 --- a/sorc/gaussian_sfcanl.fd/CMakeLists.txt +++ b/sorc/gaussian_sfcanl.fd/CMakeLists.txt @@ -1,3 +1,5 @@ +add_subdirectory(weight_gen) + list(APPEND fortran_src gaussian_sfcanl.f90 ) diff --git a/sorc/gaussian_sfcanl.fd/Makefile b/sorc/gaussian_sfcanl.fd/Makefile deleted file mode 100755 index 69cd35f7ae..0000000000 --- a/sorc/gaussian_sfcanl.fd/Makefile +++ /dev/null @@ -1,22 +0,0 @@ -SHELL= /bin/sh - -CMD= gaussian_sfcanl.exe - -OBJS = gaussian_sfcanl.o - -build: $(CMD) - -$(CMD): $(OBJS) - $(FCOMP) $(FFLAGS) -I$(NEMSIO_INC) $(NETCDF_INCLUDE) -o $(CMD) $(OBJS) $(NETCDF_LDFLAGS_F) $(NEMSIO_LIB) $(BACIO_LIB4) $(W3NCO_LIBd) $(SP_LIB4) - -gaussian_sfcanl.o: gaussian_sfcanl.f90 - $(FCOMP) $(FFLAGS) -I$(NEMSIO_INC) $(NETCDF_INCLUDE) -c gaussian_sfcanl.f90 - -install: - cp ${CMD} ../../exec - -clean: - rm -f *.o *.mod ${CMD} ../../exec/${CMD} - -test: - @echo NO TESTS YET diff --git a/sorc/gaussian_sfcanl.fd/makefile.sh b/sorc/gaussian_sfcanl.fd/makefile.sh deleted file mode 100755 index b1c5adefde..0000000000 --- a/sorc/gaussian_sfcanl.fd/makefile.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/sh - -export FFLAGS="-O3 -fp-model precise -g -r8 -i4" -# for debugging -#export FFLAGS="-g -r8 -i4 -warn unused -check bounds" - -export NETCDF_INCLUDE="-I${NETCDF}/include" -export NETCDF_LDFLAGS_F="-L${NETCDF}/lib -lnetcdf -lnetcdff -L${HDF5}/lib -lhdf5 " - -make clean -make build -err=$? -if [ $err -ne 0 ]; then - echo ERROR BUILDING GAUSSIAN_SFCANL - exit 2 -fi -make install - -exit diff --git a/sorc/gaussian_sfcanl.fd/weight_gen/CMakeLists.txt b/sorc/gaussian_sfcanl.fd/weight_gen/CMakeLists.txt new file mode 100644 index 0000000000..db3cbf17a8 --- /dev/null +++ b/sorc/gaussian_sfcanl.fd/weight_gen/CMakeLists.txt @@ -0,0 +1 @@ +add_subdirectory(scrip.fd) diff --git a/sorc/gaussian_sfcanl.fd/weight_gen/README b/sorc/gaussian_sfcanl.fd/weight_gen/README index 304c6f0e0e..10294dfc33 100644 --- a/sorc/gaussian_sfcanl.fd/weight_gen/README +++ b/sorc/gaussian_sfcanl.fd/weight_gen/README @@ -1,5 +1,5 @@ Creates the ESMF integration weight files to transform from cubed-sphere grids -to comparable (in resolution) global gaussian grids. +to comparable (in resolution) global gaussian grids. First, compile the program that creates the 'scrip' files for the global gaussian grids. For each resolution, two grids are created: @@ -8,7 +8,7 @@ To compile, cd to ./scrip.fd and type 'make.sh'. Currently, only compiles/runs on Theia. Then, run the 'run.theia.ksh' script for the resolution desired. -Script first calls the 'scrip' program, then calls ESMF utility +Script first calls the 'scrip' program, then calls ESMF utility 'RegridWeightGen' to create the interpolation weight files. Weight files for the following transforms are created: diff --git a/sorc/gaussian_sfcanl.fd/weight_gen/run.theia.sh b/sorc/gaussian_sfcanl.fd/weight_gen/run.theia.sh index c1673fd655..afcd0f18ec 100755 --- a/sorc/gaussian_sfcanl.fd/weight_gen/run.theia.sh +++ b/sorc/gaussian_sfcanl.fd/weight_gen/run.theia.sh @@ -53,7 +53,7 @@ module load hdf5/1.8.14 RWG=/scratch4/NCEPDEV/nems/noscrub/emc.nemspara/soft/esmf/7.1.0r/bin/ESMF_RegridWeightGen #------------------------------------------------------------------------ -# Path to the 'mosaic' and 'grid' files for each cubed-sphere +# Path to the 'mosaic' and 'grid' files for each cubed-sphere # resolution. #------------------------------------------------------------------------ @@ -79,46 +79,46 @@ case $CRES in "C48" ) LONB="192" LATB="94" - LATB2="96" + LATB2="96" ;; "C96" ) LONB="384" LATB="192" - LATB2="194" + LATB2="194" ;; "C128" ) LONB="512" LATB="256" - LATB2="258" + LATB2="258" ;; "C192" ) LONB="768" LATB="384" - LATB2="386" + LATB2="386" ;; "C384" ) LONB="1536" LATB="768" - LATB2="770" + LATB2="770" ;; "C768" ) LONB="3072" LATB="1536" - LATB2="1538" + LATB2="1538" ;; "C1152" ) LONB="4608" LATB="2304" - LATB2="2306" + LATB2="2306" ;; "C3072" ) LONB="12288" LATB="6144" - LATB2="6146" + LATB2="6146" ;; * ) echo "GRID NOT SUPPORTED" - exit 3 + exit 3 ;; esac diff --git a/sorc/gaussian_sfcanl.fd/weight_gen/scrip.fd/CMakeLists.txt b/sorc/gaussian_sfcanl.fd/weight_gen/scrip.fd/CMakeLists.txt new file mode 100644 index 0000000000..4534bb0ab6 --- /dev/null +++ b/sorc/gaussian_sfcanl.fd/weight_gen/scrip.fd/CMakeLists.txt @@ -0,0 +1,18 @@ +list(APPEND fortran_src + scrip.f90 +) + +if(CMAKE_Fortran_COMPILER_ID MATCHES "^(Intel)$") + set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -r8 -i4 -convert big_endian") +elseif(CMAKE_Fortran_COMPILER_ID MATCHES "^(GNU)$") + set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -fdefault-real-8") +endif() + +set(exe_name scrip.x) +add_executable(${exe_name} ${fortran_src}) +target_link_libraries( + ${exe_name} + sp::sp_d + NetCDF::NetCDF_Fortran) + +install(TARGETS ${exe_name} RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}) diff --git a/sorc/gaussian_sfcanl.fd/weight_gen/scrip.fd/make.sh b/sorc/gaussian_sfcanl.fd/weight_gen/scrip.fd/make.sh deleted file mode 100755 index 12ed3eefd9..0000000000 --- a/sorc/gaussian_sfcanl.fd/weight_gen/scrip.fd/make.sh +++ /dev/null @@ -1,60 +0,0 @@ -#!/bin/sh - -set -x - -mac=$(hostname -f) - -case $mac in - -#--------------------------------------------------------------------------------- -# BUILD PROGRAM ON WCOSS Phase 1/2. -#--------------------------------------------------------------------------------- - -g????.ncep.noaa.gov | t????.ncep.noaa.gov) - - echo "WCOSS PHASE 1/2 BUILD NOT ADDED YET" - exit 1 ;; - -#--------------------------------------------------------------------------------- -# BUILD PROGRAM ON WCOSS CRAY. -#--------------------------------------------------------------------------------- - -llogin? | slogin?) - - echo "WCOSS CRAY BUILD NOT ADDED YET" - exit 1 ;; - -#--------------------------------------------------------------------------------- -# BUILD PROGRAM ON HERA. -#--------------------------------------------------------------------------------- - -hfe??) - - source /apps/lmod/lmod/init/sh - module purge - - module load intel/18.0.5.274 - - export FCOMP=ifort - export FFLAGS="-O0 -g -traceback -r8 -i4 -convert big_endian -check bounds" - - module load netcdf/4.7.0 - module load hdf5/1.10.5 - export NETCDF_INCLUDE="-I${NETCDF}/include" - export NETCDF_LDFLAGS_F="-L${NETCDF}/lib -lnetcdf -lnetcdff -L${HDF5}/lib -lhdf5 -lhdf5_fortran" - - module use -a /scratch2/NCEPDEV/nwprod/NCEPLIBS/modulefiles - module load sp/2.0.2 - - make clean - make - rc=$? ;; - -*) - - echo "DOES NOT BUILD ON THIS MACHINE." - exit 1 ;; - -esac - -exit diff --git a/sorc/gaussian_sfcanl.fd/weight_gen/scrip.fd/makefile b/sorc/gaussian_sfcanl.fd/weight_gen/scrip.fd/makefile deleted file mode 100755 index 74949b96bb..0000000000 --- a/sorc/gaussian_sfcanl.fd/weight_gen/scrip.fd/makefile +++ /dev/null @@ -1,14 +0,0 @@ -SHELL= /bin/sh - -CMD= scrip.exe - -OBJS = scrip.o - -$(CMD): $(OBJS) - $(FCOMP) $(FFLAGS) $(NETCDF_INCLUDE) -o $(CMD) $(OBJS) $(NETCDF_LDFLAGS_F) $(SP_LIBd) - -scrip.o: scrip.f90 - $(FCOMP) $(FFLAGS) $(NETCDF_INCLUDE) -c scrip.f90 - -clean: - rm -f *.o *.mod ${CMD} *.exe.* diff --git a/sorc/gfs_bufr.fd/bfrhdr.f b/sorc/gfs_bufr.fd/bfrhdr.f old mode 100755 new mode 100644 diff --git a/sorc/gfs_bufr.fd/bfrize.f b/sorc/gfs_bufr.fd/bfrize.f old mode 100755 new mode 100644 diff --git a/sorc/gfs_bufr.fd/buff.f b/sorc/gfs_bufr.fd/buff.f old mode 100755 new mode 100644 diff --git a/sorc/gfs_bufr.fd/calwxt_gfs_baldwin.f b/sorc/gfs_bufr.fd/calwxt_gfs_baldwin.f old mode 100755 new mode 100644 diff --git a/sorc/gfs_bufr.fd/calwxt_gfs_ramer.f b/sorc/gfs_bufr.fd/calwxt_gfs_ramer.f old mode 100755 new mode 100644 diff --git a/sorc/gfs_bufr.fd/funcphys.f b/sorc/gfs_bufr.fd/funcphys.f old mode 100755 new mode 100644 diff --git a/sorc/gfs_bufr.fd/gfsbufr.f b/sorc/gfs_bufr.fd/gfsbufr.f old mode 100755 new mode 100644 diff --git a/sorc/gfs_bufr.fd/gslp.f b/sorc/gfs_bufr.fd/gslp.f old mode 100755 new mode 100644 diff --git a/sorc/gfs_bufr.fd/lcl.f b/sorc/gfs_bufr.fd/lcl.f old mode 100755 new mode 100644 diff --git a/sorc/gfs_bufr.fd/machine.f b/sorc/gfs_bufr.fd/machine.f old mode 100755 new mode 100644 diff --git a/sorc/gfs_bufr.fd/makefile_module b/sorc/gfs_bufr.fd/makefile_module deleted file mode 100755 index d9d5374a7a..0000000000 --- a/sorc/gfs_bufr.fd/makefile_module +++ /dev/null @@ -1,79 +0,0 @@ -##################################################################################### -# gfs_bufr using module compile standard -# # 11/08/2019 guang.ping.lou@noaa.gov: Create NetCDF version -# ##################################################################################### -# set -eux -# - -FC = $(myFC) $(myFCFLAGS) -CPP = $(myCPP) $(myCPPFLAGS) - -FFLAGS = -I$(NETCDF_INCLUDES) \ - -I$(NEMSIO_INC) \ - -I$(SIGIO_INC) \ - -I$(W3EMC_INC4) - -LIBS = -L$(NETCDF_LIBRARIES) -lnetcdff -lnetcdf \ - -L$(HDF5_LIBRARIES) -lhdf5_hl -lhdf5 -lz \ - $(NEMSIO_LIB) \ - $(W3EMC_LIB4) \ - $(W3NCO_LIB4) \ - $(BUFR_LIB4) \ - $(BACIO_LIB4) \ - $(SP_LIB4) \ - $(SIGIO_LIB) - -SRCM = gfsbufr.f -OBJS = physcons.o funcphys.o meteorg.o bfrhdr.o newsig1.o terp3.o\ - bfrize.o vintg.o buff.o rsearch.o \ - svp.o calpreciptype.o lcl.o mstadb.o tdew.o\ - machine.o gslp.o modstuff1.o read_nemsio.o read_netcdf_p.o - -CMD = ../../exec/gfs_bufr - -$(CMD): $(SRCM) $(OBJS) - $(FC) $(FFLAGS) $(SRCM) $(OBJS) $(LIBS) -o $(CMD) - -machine.o: machine.f - $(FC) $(FFLAGS) -free -c machine.f -physcons.o: physcons.f machine.o - $(FC) $(FFLAGS) -free -c physcons.f -funcphys.o: funcphys.f physcons.o - $(FC) $(FFLAGS) -free -c funcphys.f -gslp.o: gslp.f - $(FC) $(FFLAGS) -free -c gslp.f -modstuff1.o: modstuff1.f - $(FC) $(INC) $(FFLAGS) -free -c modstuff1.f -meteorg.o: meteorg.f physcons.o funcphys.o - $(FC) $(INC) $(FFLAGS) -c meteorg.f -read_netcdf_p.o: read_netcdf_p.f - $(FC) $(INC) $(FFLAGS) -c read_netcdf_p.f -read_nemsio.o: read_nemsio.f - $(FC) $(INC) $(FFLAGS) -c read_nemsio.f -bfrhdr.o: bfrhdr.f - $(FC) $(FFLAGS) -c bfrhdr.f -newsig1.o: newsig1.f - $(FC) $(FFLAGS) -c newsig1.f -terp3.o: terp3.f - $(FC) $(FFLAGS) -c terp3.f -bfrize.o: bfrize.f - $(FC) $(FFLAGS) -c bfrize.f -vintg.o: vintg.f - $(FC) $(FFLAGS) -c vintg.f -buff.o: buff.f - $(FC) $(FFLAGS) -c buff.f -rsearch.o: rsearch.f - $(FC) $(FFLAGS) -c rsearch.f -svp.o: svp.f - $(FC) $(FFLAGS) -c svp.f -calpreciptype.o: calpreciptype.f physcons.o funcphys.o - $(FC) $(FFLAGS) -FR -c calpreciptype.f -lcl.o: lcl.f - $(FC) $(FFLAGS) -c lcl.f -mstadb.o: mstadb.f - $(FC) $(FFLAGS) -c mstadb.f -tdew.o: tdew.f - $(FC) $(FFLAGS) -c tdew.f - -clean: - /bin/rm -f $(OBJS) *.mod gfs_bufr diff --git a/sorc/gfs_bufr.fd/meteorg.f b/sorc/gfs_bufr.fd/meteorg.f old mode 100755 new mode 100644 diff --git a/sorc/gfs_bufr.fd/modstuff1.f b/sorc/gfs_bufr.fd/modstuff1.f old mode 100755 new mode 100644 diff --git a/sorc/gfs_bufr.fd/mstadb.f b/sorc/gfs_bufr.fd/mstadb.f old mode 100755 new mode 100644 diff --git a/sorc/gfs_bufr.fd/newsig1.f b/sorc/gfs_bufr.fd/newsig1.f old mode 100755 new mode 100644 diff --git a/sorc/gfs_bufr.fd/physcons.f b/sorc/gfs_bufr.fd/physcons.f old mode 100755 new mode 100644 diff --git a/sorc/gfs_bufr.fd/rsearch.f b/sorc/gfs_bufr.fd/rsearch.f old mode 100755 new mode 100644 diff --git a/sorc/gfs_bufr.fd/svp.f b/sorc/gfs_bufr.fd/svp.f old mode 100755 new mode 100644 diff --git a/sorc/gfs_bufr.fd/tdew.f b/sorc/gfs_bufr.fd/tdew.f old mode 100755 new mode 100644 diff --git a/sorc/gfs_bufr.fd/terp3.f b/sorc/gfs_bufr.fd/terp3.f old mode 100755 new mode 100644 diff --git a/sorc/gfs_bufr.fd/vintg.f b/sorc/gfs_bufr.fd/vintg.f old mode 100755 new mode 100644 diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index 1ac7ad5b56..df44202afe 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -8,16 +8,16 @@ machine=${2} if [ $# -lt 2 ]; then echo '***ERROR*** must specify two arguements: (1) RUN_ENVIR, (2) machine' - echo ' Syntax: link_workflow.sh ( nco | emc ) ( cray | dell | hera | orion | jet | stampede )' + echo ' Syntax: link_workflow.sh ( nco | emc ) ( hera | orion | jet | stampede )' exit 1 fi if [ $RUN_ENVIR != emc -a $RUN_ENVIR != nco ]; then - echo ' Syntax: link_workflow.sh ( nco | emc ) ( cray | dell | hera | orion | jet | stampede )' + echo ' Syntax: link_workflow.sh ( nco | emc ) ( hera | orion | jet | stampede )' exit 1 fi -if [ $machine != cray -a $machine != dell -a $machine != hera -a $machine != orion -a $machine != jet -a $machine != stampede ]; then - echo ' Syntax: link_workflow.sh ( nco | emc ) ( cray | dell | hera | orion | jet | stampede )' +if [ $machine != hera -a $machine != orion -a $machine != jet -a $machine != stampede ]; then + echo ' Syntax: link_workflow.sh ( nco | emc ) ( hera | orion | jet | stampede )' exit 1 fi @@ -34,11 +34,7 @@ $LINK ufs_model.fd/FV3/upp upp.fd #------------------------------ #--model fix fields #------------------------------ -if [ $machine = "cray" ]; then - FIX_DIR="/gpfs/hps3/emc/global/noscrub/emc.glopara/git/fv3gfs/fix" -elif [ $machine = "dell" ]; then - FIX_DIR="/gpfs/dell2/emc/modeling/noscrub/emc.glopara/git/fv3gfs/fix_NEW" -elif [ $machine = "hera" ]; then +if [ $machine = "hera" ]; then FIX_DIR="/scratch1/NCEPDEV/global/glopara/fix_NEW" elif [ $machine = "orion" ]; then FIX_DIR="/work/noaa/global/glopara/fix_NEW" diff --git a/sorc/machine-setup.sh b/sorc/machine-setup.sh index d64c7ddf77..27c4d33ee9 100644 --- a/sorc/machine-setup.sh +++ b/sorc/machine-setup.sh @@ -27,9 +27,7 @@ if [[ -d /work ]] ; then source /apps/lmod/lmod/init/$__ms_shell fi target=orion - module purge - export myFC=mpiifort export FCOMP=mpiifort @@ -37,64 +35,19 @@ if [[ -d /work ]] ; then elif [[ -d /scratch1 ]] ; then # We are on NOAA Hera if ( ! eval module help > /dev/null 2>&1 ) ; then - echo load the module command 1>&2 + echo load the module command 1>&2 source /apps/lmod/lmod/init/$__ms_shell fi target=hera - module purge - export myFC=mpiifort export FCOMP=mpiifort -##--------------------------------------------------------------------------- -elif [[ -d /gpfs/hps && -e /etc/SuSE-release ]] ; then - # We are on NOAA Luna or Surge - if ( ! eval module help > /dev/null 2>&1 ) ; then - echo load the module command 1>&2 - source /opt/modules/default/init/$__ms_shell - fi - - target=wcoss_cray - # Silence the "module purge" to avoid the expected error messages - # related to modules that load modules. - module purge > /dev/null 2>&1 - module use /usrx/local/prod/modulefiles - module use /gpfs/hps/nco/ops/nwprod/lib/modulefiles - module use /gpfs/hps/nco/ops/nwprod/modulefiles - module use /opt/cray/alt-modulefiles - module use /opt/cray/craype/default/alt-modulefiles - module use /opt/cray/ari/modulefiles - module use /opt/modulefiles - module purge > /dev/null 2>&1 - # Workaround until module issues are fixed: - #unset _LMFILES_ - #unset LOADEDMODULES - echo y 2> /dev/null | module clear > /dev/null 2>&1 - module use /usrx/local/prod/modulefiles - module use /gpfs/hps/nco/ops/nwprod/lib/modulefiles - module use /gpfs/hps/nco/ops/nwprod/modulefiles - module use /opt/cray/alt-modulefiles - module use /opt/cray/craype/default/alt-modulefiles - module use /opt/cray/ari/modulefiles - module use /opt/modulefiles - module load modules - -##--------------------------------------------------------------------------- -elif [[ -L /usrx && "$( readlink /usrx 2> /dev/null )" =~ dell ]] ; then - # We are on NOAA Venus or Mars - if ( ! eval module help > /dev/null 2>&1 ) ; then - echo load the module command 1>&2 - source /usrx/local/prod/lmod/lmod/init/$__ms_shell - fi - target=wcoss_dell_p3 - module purge - ##--------------------------------------------------------------------------- elif [[ -d /glade ]] ; then # We are on NCAR Yellowstone if ( ! eval module help > /dev/null 2>&1 ) ; then - echo load the module command 1>&2 + echo load the module command 1>&2 . /usr/share/Modules/init/$__ms_shell fi target=yellowstone @@ -102,9 +55,7 @@ elif [[ -d /glade ]] ; then ##--------------------------------------------------------------------------- elif [[ -d /lustre && -d /ncrc ]] ; then - # We are on GAEA. - # We are on GAEA. - echo gaea + # We are on GAEA. if ( ! eval module help > /dev/null 2>&1 ) ; then # We cannot simply load the module command. The GAEA # /etc/profile modifies a number of module-related variables @@ -118,7 +69,7 @@ elif [[ -d /lustre && -d /ncrc ]] ; then fi module purge module purge -# clean up after purge + # clean up after purge unset _LMFILES_ unset _LMFILES_000 unset _LMFILES_001 @@ -144,18 +95,17 @@ elif [[ -d /lustre && -d /ncrc ]] ; then source /etc/profile unset __ms_source_etc_profile fi - -target=gaea - -# GWV ADD -module load craype -module load intel -export NCEPLIBS=/lustre/f2/dev/ncep/George.Vandenberghe/NEWCOPY/l508/lib/ -module use $NCEPLIBS/modulefiles -export myFC=ftn -export WRFPATH=$NCEPLIBS/wrf.shared.new/v1.1.1/src -export FCOMP=ftn -# END GWV ADD + target=gaea + + # GWV ADD + module load craype + module load intel + export NCEPLIBS=/lustre/f2/dev/ncep/George.Vandenberghe/NEWCOPY/l508/lib/ + module use $NCEPLIBS/modulefiles + export WRFPATH=$NCEPLIBS/wrf.shared.new/v1.1.1/src + export myFC=ftn + export FCOMP=ftn + # END GWV ADD ##--------------------------------------------------------------------------- elif [[ -d /lfs3 ]] ; then @@ -166,13 +116,11 @@ elif [[ -d /lfs3 ]] ; then fi target=jet module purge - -#export NCEPLIBS=/mnt/lfs3/projects/hfv3gfs/gwv/ljtjet/lib - export NCEPLIBS=/mnt/lfs3/projects/hfv3gfs/gwv/ljtjet/lib -export NCEPLIBS=/mnt/lfs3/projects/hfv3gfs/gwv/NCEPLIBS.15X - module use $NCEPLIBS/modulefiles -export WRFPATH=$NCEPLIBS/wrf.shared.new/v1.1.1/src -export myFC=mpiifort + export NCEPLIBS=/mnt/lfs3/projects/hfv3gfs/gwv/ljtjet/lib + export NCEPLIBS=/mnt/lfs3/projects/hfv3gfs/gwv/NCEPLIBS.15X + module use $NCEPLIBS/modulefiles + export WRFPATH=$NCEPLIBS/wrf.shared.new/v1.1.1/src + export myFC=mpiifort else echo WARNING: UNKNOWN PLATFORM 1>&2 diff --git a/sorc/ncl.setup b/sorc/ncl.setup index c848fb9887..de01309038 100644 --- a/sorc/ncl.setup +++ b/sorc/ncl.setup @@ -1,28 +1,12 @@ -if [ $target == wcoss_dell_p3 ] -then -module load NCL/6.4.0 -fi - -if [ $target == wcoss_cray ] -then -module load gcc/4.9.2 -module load NCL-gnu-haswell/6.3.0 -fi - -if [ $target == wcoss ] -then -module load ncarg/v6.1.0 -fi - -if [ $target == jet ] -then -module load ncl/6.5.0 -export NCARG_LIB=$NCARG_ROOT/lib -fi - -if [ $target == hera ] -then -module load ncl/6.5.0 -export NCARG_LIB=$NCARG_ROOT/lib -fi - +#!/bin/bash + +set +x +case $target in + 'jet'|'hera') + module load ncl/6.5.0 + export NCARG_LIB=$NCARG_ROOT/lib + ;; + *) + echo "[${BASH_SOURCE}]: unknown $target" + ;; +esac diff --git a/sorc/reg2grb2.fd/reg2grb2.f b/sorc/reg2grb2.fd/reg2grb2.f old mode 100755 new mode 100644 diff --git a/sorc/reg2grb2.fd/regdiag.f b/sorc/reg2grb2.fd/regdiag.f old mode 100755 new mode 100644 diff --git a/sorc/regrid_nemsio.fd/Makefile b/sorc/regrid_nemsio.fd/Makefile deleted file mode 100644 index 4a089699b4..0000000000 --- a/sorc/regrid_nemsio.fd/Makefile +++ /dev/null @@ -1,159 +0,0 @@ -#============================================================================== -# -# REGRID_NEMSIO Makefile -# -#============================================================================== - -#----------------------------------------------------------------------------- -# -- Parent make (calls child make) -- -#----------------------------------------------------------------------------- - -# ------------- -# General Rules -# ------------- - -SHELL=/bin/sh - -RM = /bin/rm -f -MKDIR = /bin/mkdir -p - -#------------ -# Include machine dependent compile & load options -#------------ - -MAKE_CONF = -include $(MAKE_CONF) - -# ------------- -# This makefile -# ------------- - -MAKE_FILE = Makefile - -# ----------- -# Load module -# ----------- - -EXE_FILE = regrid_nemsio - -# -------------------- -# Installing directory -# -------------------- - -INSTALL_DIR = ../../exec/ - -# -------- -# Log file -# -------- - -LOG_FILE = log.make.$(EXE_FILE) - -# --------------- -# Call child make -# --------------- - -"" : - @$(MAKE) -f $(MAKE_FILE) all - -# ------------ -# Make install -# ------------ - -install: - @echo - @echo '==== INSTALL =================================================' - @if [ -e $(INSTALL_DIR) ]; then \ - if [ ! -d $(INSTALL_DIR) ]; then \ - echo '### Fail to create installing directory ###' ;\ - echo '### Stop the installation ###' ;\ - exit ;\ - fi ;\ - else \ - echo " mkdir -p $(INSTALL_DIR)" ;\ - mkdir -p $(INSTALL_DIR) ;\ - fi - cp $(EXE_FILE) $(INSTALL_DIR) - @cd $(INSTALL_DIR) ; ls -l $(pwd)/$(EXE_FILE) - -#----------- -# Make clean -# ---------- - -clean: - @echo - @echo '==== CLEAN ===================================================' - - $(RM) $(EXE_FILE) *.o *.mod - - $(RM) log.make.$(EXE_FILE) - -#----------------------------------------------------------------------------- -# -- Child make -- -#----------------------------------------------------------------------------- - -# --------- -# Libraries -# --------- - -INCS = $(NETCDF_INCLUDE) -I$(NEMSIO_INC) -LIBS = $(NEMSIO_LIB) $(BACIO_LIB4) $(W3NCO_LIBd) $(SP_LIB4) $(NETCDF_LDFLAGS) - -# ------------ -# Source files -# ------------ - -SRCSF90 = \ - kinds.f90 \ - constants.f90 \ - physcons.f90 \ - mpi_interface.f90 \ - namelist_def.f90 \ - variable_interface.f90 \ - netcdfio_interface.f90 \ - interpolation_interface.f90 \ - gfs_nems_interface.f90 \ - fv3_interface.f90 - -SRCS = $(SRCSF77) $(SRCSF90) - -# ------------ -# Object files -# ------------ - -OBJS = ${SRCSF90:.f90=.o} ${SRCSF77:.f=.o} - -# ------------ -# Dependencies -# ------------ -MAKE_DEPEND = Makefile.dependency -include $(MAKE_DEPEND) - -# ----------------------- -# Default compiling rules -# ----------------------- - -.SUFFIXES : -.SUFFIXES : .F90 .f90 .f .c .o - -.f90.o : - @echo - @echo '---> Compiling $<' - $(F90) $(FCFFLAGS) $(INCS) $(OPTIMIZATION) $(DEBUG) -c $< - -.f.o : - @echo - @echo '---> Compiling $<' - $(F77) $(FCFFLAGS) $(OPTIMIZATION) $(DEBUG) -c $< - -# ------------------------ -# Call compiler and linker -# ------------------------ - -all: REGRID_NEMSIO - -REGRID_NEMSIO: $(OBJS) - $(LD) $(LDFLAGS) $(OBJS) $(INCS) main.f90 $(LIBS) -o $(EXE_FILE) > $(LOG_FILE) - -help: - @ echo "Available targets:" - @ echo " make creates executable" - @ echo " make install creates exec & places it in bin" - @ echo " make clean cleans objects, exec, and alien files" diff --git a/sorc/regrid_nemsio.fd/Makefile.dependency b/sorc/regrid_nemsio.fd/Makefile.dependency deleted file mode 100644 index 52f6e80077..0000000000 --- a/sorc/regrid_nemsio.fd/Makefile.dependency +++ /dev/null @@ -1,9 +0,0 @@ -kinds.o: kinds.f90 -constants.o: constants.f90 kinds.o -physcons.o: physcons.f90 kinds.o -variable_interface.o: variable_interface.f90 namelist_def.o physcons.o constants.o kinds.o -namelist_def.o: namelist_def.f90 mpi_interface.o kinds.o -netcdfio_interface.o: netcdfio_interface.f90 kinds.o -interpolation_interface.o: interpolation_interface.f90 constants.o kinds.o namelist_def.o netcdfio_interface.o -gfs_nems_interface.o: gfs_nems_interface.f90 variable_interface.o constants.o kinds.o mpi_interface.o namelist_def.o -fv3_interface.o: fv3_interface.f90 variable_interface.o interpolation_interface.o constants.o kinds.o mpi_interface.o namelist_def.o netcdfio_interface.o gfs_nems_interface.o diff --git a/sorc/supvit.fd/makefile b/sorc/supvit.fd/makefile deleted file mode 100644 index 288e42beff..0000000000 --- a/sorc/supvit.fd/makefile +++ /dev/null @@ -1,31 +0,0 @@ -SHELL= /bin/sh -ISIZE = 4 -RSIZE = 8 -COMP= ifort -##LIBS_SUP= -L/contrib/nceplibs/nwprod/lib -lw3emc_d -lw3nco_d -lg2_d -lbacio_4 -ljasper -lpng -lz -LDFLAGS= -##ccs FFLAGS= -O -qflttrap=ov:zero:inv:enable -qcheck -qextchk -qwarn64 -qintsize=$(ISIZE) -qrealsize=$(RSIZE) -# FFLAGS= -O2 -check bounds -check format -xHost -fpe0 -# DEBUG= -check bounds -check format -FFLAGS= -O2 -g -i$(ISIZE) -r$(RSIZE) - -supvit: supvit_main.f supvit_modules.o - @echo " " - @echo " Compiling program that sorts and updates vitals records...." - $(COMP) $(FFLAGS) $(LDFLAGS) supvit_modules.o supvit_main.f $(LIBS_SUP) -o supvit - @echo " " - -supvit_modules.o: supvit_modules.f - @echo " " - @echo " Compiling the modules....." - $(COMP) -c supvit_modules.f -o supvit_modules.o - @echo " " - -CMD = supvit - -clean: - -rm -f *.o *.mod - -install: - mv $(CMD) ../../exec/$(CMD) - diff --git a/sorc/supvit.fd/supvit_modules.f b/sorc/supvit.fd/supvit_modules.f old mode 100755 new mode 100644 diff --git a/sorc/syndat_getjtbul.fd/getjtbul.f b/sorc/syndat_getjtbul.fd/getjtbul.f old mode 100755 new mode 100644 diff --git a/sorc/syndat_getjtbul.fd/makefile b/sorc/syndat_getjtbul.fd/makefile deleted file mode 100755 index 3ac5730f31..0000000000 --- a/sorc/syndat_getjtbul.fd/makefile +++ /dev/null @@ -1,23 +0,0 @@ -SHELL= /bin/sh -#LIBS= -L/nwprod/lib -lw3nco_v2.0.5_4 -#LIBS= -L/contrib/nceplibs/nwprod/lib -lw3nco_v2.0.5_4 -FC= ifort -#DEBUG = -ftrapuv -check all -fp-stack-check -fstack-protector -##DEBUG = -ftrapuv -fp-stack-check -fstack-protector -FFLAGS= -O3 -g -traceback -assume noold_ldout_format $(DEBUG) -LDFLAGS= -SRCS= getjtbul.f -OBJS= getjtbul.o -CMD= syndat_getjtbul - -all: $(CMD) - -$(CMD): $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(LIBS_SYN_GET) - -clean: - -rm -f $(OBJS) - -install: - -mv $(CMD) ../../exec/$(CMD) - diff --git a/sorc/syndat_maksynrc.fd/makefile b/sorc/syndat_maksynrc.fd/makefile deleted file mode 100755 index 9adcb17e26..0000000000 --- a/sorc/syndat_maksynrc.fd/makefile +++ /dev/null @@ -1,21 +0,0 @@ -SHELL= /bin/sh -#LIBS= -L/nwprod/lib -lw3nco_v2.0.5_4 -lbacio_v2.0.1_4 -##LIBS_SYN_MAK= -L/contrib/nceplibs/nwprod/lib -lw3nco_v2.0.5_4 -lbacio_v2.0.1_4 -FC= ifort -#DEBUG = -ftrapuv -check all -check nooutput_conversion -fp-stack-check -fstack-protector -FFLAGS= -O3 -g -traceback -assume noold_ldout_format $(DEBUG) -LDFLAGS= -SRCS= maksynrc.f -OBJS= maksynrc.o -CMD= syndat_maksynrc - -all: $(CMD) - -$(CMD): $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(LIBS_SYN_MAK) - -clean: - -rm -f $(OBJS) - -install: - mv $(CMD) ../../exec/$(CMD) diff --git a/sorc/syndat_maksynrc.fd/maksynrc.f b/sorc/syndat_maksynrc.fd/maksynrc.f old mode 100755 new mode 100644 diff --git a/sorc/syndat_qctropcy.fd/makefile b/sorc/syndat_qctropcy.fd/makefile deleted file mode 100755 index d667c26cbe..0000000000 --- a/sorc/syndat_qctropcy.fd/makefile +++ /dev/null @@ -1,23 +0,0 @@ -SHELL= /bin/sh -#LIBS= -L/nwprod/lib -lw3nco_v2.0.5_8 -##LIBS= -L/contrib/nceplibs/nwprod/lib -lw3nco_v2.0.5_8 -FC= ifort -#DEBUG = -ftrapuv -check all -check noarg_temp_created -fp-stack-check -fstack-protector -## if '-check all' enabled, include '-check noarg_temp_created' to avoid warning msgs indicating -## slight performance hit due to chosen method of passing array arguments to w3difdat -FFLAGS= -O3 -g -traceback -r8 -i8 -assume byterecl -assume noold_ldout_format $(DEBUG) -LDFLAGS= -SRCS= qctropcy.f -OBJS= qctropcy.o -CMD= syndat_qctropcy - -all: $(CMD) - -$(CMD): $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(LIBS_SYN_QCT) - -clean: - -rm -f $(OBJS) - -install: - mv $(CMD) ../../exec/$(CMD) diff --git a/sorc/syndat_qctropcy.fd/qctropcy.f b/sorc/syndat_qctropcy.fd/qctropcy.f old mode 100755 new mode 100644 diff --git a/sorc/tave.fd/makefile b/sorc/tave.fd/makefile deleted file mode 100755 index 3ccaf4b87b..0000000000 --- a/sorc/tave.fd/makefile +++ /dev/null @@ -1,25 +0,0 @@ -SHELL= /bin/sh -ISIZE = 4 -RSIZE = 8 -COMP= ifort -##INC = /contrib/nceplibs/nwprod/lib/incmod/g2_d -##LIBS= -L/contrib/nceplibs/nwprod/lib -lw3emc_d -lw3nco_d -lg2_d -lbacio_4 -ljasper -lpng -lz -LDFLAGS= -# DEBUG= -check all -debug all -traceback -FFLAGS= -O2 -g -traceback -I $(INC) -i$(ISIZE) -r$(RSIZE) -# FFLAGS= -O3 -I $(INC) -i$(ISIZE) -r$(RSIZE) - -tave: tave.f - @echo " " - @echo " Compiling the interpolation program....." - $(COMP) $(FFLAGS) $(LDFLAGS) tave.f $(LIBS) -o tave.x - @echo " " - -CMD = tave.x - -clean: - -rm -f *.o *.mod - -install: - mv $(CMD) ../../exec/$(CMD) - diff --git a/sorc/tave.fd/tave.f b/sorc/tave.fd/tave.f old mode 100755 new mode 100644 diff --git a/sorc/tocsbufr.fd/makefile_module b/sorc/tocsbufr.fd/makefile_module deleted file mode 100755 index 06f5ba7092..0000000000 --- a/sorc/tocsbufr.fd/makefile_module +++ /dev/null @@ -1,82 +0,0 @@ -SHELL=/bin/sh -# -# This makefile was produced by /usr/bin/fmgen at 11:21:07 AM on 10/28/94 -# If it is invoked by the command line -# make -f makefile -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable named a.out. -# -# If it is invoked by the command line -# make -f makefile a.out.prof -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable which profiles -# named a.out.prof. -# -# To remove all the objects but leave the executables use the command line -# make -f makefile clean -# -# To remove everything but the source files use the command line -# make -f makefile clobber -# -# To remove the source files created by /usr/bin/fmgen and this makefile -# use the command line -# make -f makefile void -# -# The parameters SRCS and OBJS should not need to be changed. If, however, -# you need to add a new module add the name of the source module to the -# SRCS parameter and add the name of the resulting object file to the OBJS -# parameter. The new modules are not limited to fortran, but may be C, YACC, -# LEX, or CAL. An explicit rule will need to be added for PASCAL modules. -# -SRCS= tocsbufr.f - -OBJS= tocsbufr.o - -# Tunable parameters -# -# FC Name of the fortran compiling system to use -# LDFLAGS Flags to the loader -# LIBS List of libraries -# CMD Name of the executable -# PROFLIB Library needed for profiling -# -FC = $(myFC) -LDFLAGS = $(myFCFLAGS) -LIBS = $(W3EMC_LIB4) \ - $(W3NCO_LIB4) \ - $(BUFR_LIB4) \ - $(BACIO_LIB4) \ - $(SP_LIB4) \ - $(SIGIO_LIB) -CMD = ../../exec/tocsbufr -PROFLIB = -lprof - -# To perform the default compilation, use the first line -# To compile with flowtracing turned on, use the second line -# To compile giving profile additonal information, use the third line -# WARNING: SIMULTANEOUSLY PROFILING AND FLOWTRACING IS NOT RECOMMENDED -FFLAGS = $(FFLAGSM) -#FFLAGS = -F -#FFLAGS = -Wf"-ez" - -# Lines from here on down should not need to be changed. They are the -# actual rules which make uses to build a.out. -# -all: $(CMD) - -$(CMD): $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(LIBS) - -# Make the profiled version of the command and call it a.out.prof -# -$(CMD).prof: $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(PROFLIB) $(LIBS) - -clean: - -rm -f $(OBJS) - -clobber: clean - -rm -f $(CMD) $(CMD).prof - -void: clobber - -rm -f $(SRCS) makefile diff --git a/sorc/tocsbufr.fd/tocsbufr.f b/sorc/tocsbufr.fd/tocsbufr.f old mode 100755 new mode 100644 diff --git a/sorc/vint.fd/makefile b/sorc/vint.fd/makefile deleted file mode 100755 index 06647d1fc6..0000000000 --- a/sorc/vint.fd/makefile +++ /dev/null @@ -1,27 +0,0 @@ -SHELL= /bin/sh -ISIZE = 4 -RSIZE = 8 -COMP= ifort -##INC = /contrib/nceplibs/nwprod/lib/incmod/g2_d -##LIBS= -L/contrib/nceplibs/nwprod/lib -lw3emc_d -lw3nco_d -lg2_d -lbacio_4 -ljasper -lpng -lz -LDFLAGS= -# FFLAGS= -O3 -I $(INC) -i$(ISIZE) -r$(RSIZE) -# DEBUG= -check all -debug all -traceback -FFLAGS= -O2 -g -traceback -I $(INC) -i$(ISIZE) -r$(RSIZE) - -vint: vint.f - @echo " " - @echo " Compiling the interpolation program....." - $(COMP) $(FFLAGS) $(LDFLAGS) vint.f $(LIBS) -o vint.x - @echo " " - -.PHONY: clean - -CMD = vint.x - -clean: - -rm -f *.o *.mod - -install: - mv $(CMD) ../../exec/$(CMD) - diff --git a/sorc/vint.fd/vint.f b/sorc/vint.fd/vint.f old mode 100755 new mode 100644 diff --git a/ush/forecast_predet.sh b/ush/forecast_predet.sh index ac536d72de..6965d7f30e 100755 --- a/ush/forecast_predet.sh +++ b/ush/forecast_predet.sh @@ -14,7 +14,7 @@ common_predet(){ echo "SUB ${FUNCNAME[0]}: Defining variables for shared through models" pwd=$(pwd) - machine=${machine:-"WCOSS_C"} + machine=${machine:-"WCOSS2"} machine=$(echo $machine | tr '[a-z]' '[A-Z]') CASE=${CASE:-C768} CDATE=${CDATE:-2017032500} @@ -143,19 +143,6 @@ FV3_GFS_predet(){ rCDUMP=${rCDUMP:-$CDUMP} - #------------------------------------------------------------------ - # setup the runtime environment - if [ $machine = "WCOSS_C" ] ; then - HUGEPAGES=${HUGEPAGES:-hugepages4M} - . $MODULESHOME/init/sh 2>/dev/null - module load iobuf craype-$HUGEPAGES 2>/dev/null - export MPICH_GNI_COLL_OPT_OFF=${MPICH_GNI_COLL_OPT_OFF:-MPI_Alltoallv} - export MKL_CBWR=AVX2 - export WRTIOBUF=${WRTIOBUF:-"4M"} - export NC_BLKSZ=${NC_BLKSZ:-"4M"} - export IOBUF_PARAMS="*nemsio:verbose:size=${WRTIOBUF},*:verbose:size=${NC_BLKSZ}" - fi - #------------------------------------------------------- if [ ! -d $ROTDIR ]; then mkdir -p $ROTDIR; fi mkdata=NO diff --git a/ush/fv3gfs_downstream_nems.sh b/ush/fv3gfs_downstream_nems.sh index 3138fe75f0..68da1ce8b8 100755 --- a/ush/fv3gfs_downstream_nems.sh +++ b/ush/fv3gfs_downstream_nems.sh @@ -90,15 +90,11 @@ fi $WGRIB2 $PGBOUT2 | grep -F -f $paramlist | $WGRIB2 -i -grib tmpfile1_$fhr3 $PGBOUT2 export err=$?; err_chk -#if [ $machine = WCOSS -o $machine = WCOSS_C -a $downset = 2 ]; then if [ $downset = 2 ]; then $WGRIB2 $PGBOUT2 | grep -F -f $paramlistb | $WGRIB2 -i -grib tmpfile2_$fhr3 $PGBOUT2 export err=$?; err_chk fi -#----------------------------------------------------- -#----------------------------------------------------- -#if [ $machine = WCOSS -o $machine = WCOSS_C -o $machine = WCOSS_DELL_P3 ]; then #----------------------------------------------------- #----------------------------------------------------- export nset=1 @@ -139,7 +135,7 @@ while [ $nset -le $totalset ]; do if [[ $rc -eq 0 ]] ; then export end=$(expr ${end} + 1) fi - # if final record is land, add next record icec + # if final record is land, add next record icec $WGRIB2 -d $end $tmpfile |egrep -i "land" export rc=$? if [[ $rc -eq 0 ]] ; then @@ -170,14 +166,14 @@ while [ $nset -le $totalset ]; do export MP_PGMMODEL=mpmd export MP_CMDFILE=$DATA/poescript launcher=${APRUN_DWN:-"aprun -j 1 -n 24 -N 24 -d 1 cfp"} - if [ $machine = WCOSS_C -o $machine = WCOSS_DELL_P3 -o $machine = WCOSS2 ] ; then + if [ $machine = WCOSS2 ] ; then $launcher $MP_CMDFILE elif [ $machine = HERA -o $machine = ORION -o $machine = JET -o $machine = S4 ] ; then if [ -s $DATA/poescript_srun ]; then rm -f $DATA/poescript_srun; fi touch $DATA/poescript_srun nm=0 cat $DATA/poescript | while read line; do - echo "$nm $line" >> $DATA/poescript_srun + echo "$nm $line" >> $DATA/poescript_srun nm=$((nm+1)) done ${launcher:-"srun --export=ALL"} -n $nm --multi-prog $DATA/poescript_srun @@ -218,8 +214,8 @@ while [ $nset -le $totalset ]; do # $WGRIB2 land.grb -set_grib_type same -new_grid_interpolation bilinear -new_grid_winds earth -new_grid $grid0p25 newland.grb # $WGRIB2 newland.grb -set_byte 4 11 218 -grib newnewland.grb # cat ./newnewland.grb >> pgb2file_${fhr3}_0p25 - # $CNVGRIB -g21 newnewland.grb newnewland.grb1 - # cat ./newnewland.grb1 >> pgbfile_${fhr3}_0p25 + # $CNVGRIB -g21 newnewland.grb newnewland.grb1 + # cat ./newnewland.grb1 >> pgbfile_${fhr3}_0p25 ##0p5 degree # rm -f newland.grb newnewland.grb newnewland.grb1 # $WGRIB2 land.grb -set_grib_type same -new_grid_interpolation bilinear -new_grid_winds earth -new_grid $grid0p5 newland.grb @@ -243,7 +239,7 @@ while [ $nset -le $totalset ]; do cp pgb2file_${fhr3}_1p0 $COMOUT/${PREFIX}pgrb2.1p00.anl $WGRIB2 -s pgb2file_${fhr3}_0p5 > $COMOUT/${PREFIX}pgrb2.0p50.anl.idx $WGRIB2 -s pgb2file_${fhr3}_1p0 > $COMOUT/${PREFIX}pgrb2.1p00.anl.idx - if [ "$PGB1F" = 'YES' ]; then + if [ "$PGB1F" = 'YES' ]; then cp pgbfile_${fhr3}_1p0 $COMOUT/${PREFIX}pgrb.1p00.anl $GRBINDEX $COMOUT/${PREFIX}pgrb.1p00.anl $COMOUT/${PREFIX}pgrb.1p00.anl.idx fi diff --git a/ush/fv3gfs_regrid_nemsio.sh b/ush/fv3gfs_regrid_nemsio.sh index d43f5a5503..19a050520f 100755 --- a/ush/fv3gfs_regrid_nemsio.sh +++ b/ush/fv3gfs_regrid_nemsio.sh @@ -17,7 +17,6 @@ # # Attributes: # Language: Portable Operating System Interface (POSIX) Shell -# Machine: WCOSS-CRAY, Theia ################################################################################ # Set environment. diff --git a/ush/gsi_utils.py b/ush/gsi_utils.py index 79c6e627e0..b33be51adb 100644 --- a/ush/gsi_utils.py +++ b/ush/gsi_utils.py @@ -72,12 +72,8 @@ def get_ncdims(ncfile): """ try: import netCDF4 as nc - except ImportError: - print("Python Error!") - print("netCDF4 Python module not available. Do you have the proper Python available in your environment?") - print("Hera: module use -a /contrib/modulefiles && module load anaconda/2.3.0") - print("Dell: module load python/3.6.3") - print(" ") + except ImportError as err: + raise ImportError(f"Unable to import netCDF4 module\n{err}") ncf = nc.Dataset(ncfile) ncdims = {} for d in ncf.dimensions.keys(): @@ -117,12 +113,8 @@ def get_timeinfo(ncfile): """ try: import netCDF4 as nc - except ImportError: - print("Python Error!") - print("netCDF4 Python module not available. Do you have the proper Python available in your environment?") - print("Hera: module use -a /contrib/modulefiles && module load anaconda/2.3.0") - print("Dell: module load python/3.6.3") - print(" ") + except ImportError as err: + raise ImportError(f"Unable to import netCDF4 module\n{err}") import datetime as dt import re ncf = nc.Dataset(ncfile) diff --git a/ush/load_fv3gfs_modules.sh b/ush/load_fv3gfs_modules.sh index 2f1a301db0..e78ec23bf6 100755 --- a/ush/load_fv3gfs_modules.sh +++ b/ush/load_fv3gfs_modules.sh @@ -1,45 +1,34 @@ #!/bin/sh -#set -x ############################################################### # Setup runtime environment by loading modules ulimit_s=$( ulimit -S -s ) -#ulimit -S -s 10000 set +x # Find module command and purge: -source "$HOMEgfs/modulefiles/module-setup.sh.inc" +source "$HOMEgfs/modulefiles/module-setup.sh.inc" # Load our modules: -module use "$HOMEgfs/modulefiles" +module use "$HOMEgfs/modulefiles" if [[ -d /lfs3 ]] ; then - # We are on NOAA Jet - module load module_base.jet + # We are on NOAA Jet + module load module_base.jet elif [[ -d /scratch1 ]] ; then - # We are on NOAA Hera - module load module_base.hera + # We are on NOAA Hera + module load module_base.hera elif [[ -d /work ]] ; then - # We are on MSU Orion - module load module_base.orion -elif [[ -d /gpfs/hps && -e /etc/SuSE-release ]] ; then - # We are on NOAA Luna or Surge - module load module_base.wcoss_c -elif [[ -L /usrx && "$( readlink /usrx 2> /dev/null )" =~ dell ]] ; then - # We are on NOAA Mars or Venus - module load module_base.wcoss_dell_p3 -elif [[ -d /dcom && -d /hwrf ]] ; then - # We are on NOAA Tide or Gyre - module load module_base.wcoss + # We are on MSU Orion + module load module_base.orion elif [[ -d /glade ]] ; then - # We are on NCAR Yellowstone - module load module_base.cheyenne + # We are on NCAR Yellowstone + module load module_base.cheyenne elif [[ -d /lustre && -d /ncrc ]] ; then - # We are on GAEA. - module load module_base.gaea + # We are on GAEA. + module load module_base.gaea else - echo WARNING: UNKNOWN PLATFORM + echo WARNING: UNKNOWN PLATFORM fi set -x diff --git a/ush/wave_grib2_sbs.sh b/ush/wave_grib2_sbs.sh index 20f6b8b1e0..2e2584a891 100755 --- a/ush/wave_grib2_sbs.sh +++ b/ush/wave_grib2_sbs.sh @@ -1,5 +1,5 @@ #!/bin/bash -# +# ################################################################################ # # UNIX Script Documentation Block @@ -17,11 +17,10 @@ # # Attributes: # Language: Bourne-again (BASH) shell -# Machine: WCOSS-DELL-P3 # -# Requirements: -# - wgrib2 with IPOLATES library -# +# Requirements: +# - wgrib2 with IPOLATES library +# ################################################################################ # --------------------------------------------------------------------------- # # 0. Preparations @@ -39,8 +38,8 @@ alertName=$(echo $RUN|tr [a-z] [A-Z]) - grdID=$1 - gribDIR=${grdID}_grib + grdID=$1 + gribDIR=${grdID}_grib rm -rfd ${gribDIR} mkdir ${gribDIR} err=$? @@ -119,7 +118,7 @@ # 0.e Links to working directory ln -s ${DATA}/mod_def.$grdID mod_def.ww3 - ln -s ${DATA}/output_${ymdh}0000/out_grd.$grdID out_grd.ww3 + ln -s ${DATA}/output_${ymdh}0000/out_grd.$grdID out_grd.ww3 # --------------------------------------------------------------------------- # # 1. Generate GRIB file with all data @@ -139,7 +138,7 @@ ${DATA}/ww3_grib2.${grdID}.inp.tmpl > ww3_grib.inp - echo "ww3_grib.inp" + echo "ww3_grib.inp" cat ww3_grib.inp # 1.b Run GRIB packing program @@ -163,13 +162,13 @@ exit 3 fi - if [ $fht -gt 0 ]; then + if [ $fht -gt 0 ]; then $WGRIB2 gribfile -set_date $CDATE -set_ftime "$fhr hour fcst" -grib ${COMOUT}/gridded/${outfile} err=$? - else - $WGRIB2 gribfile -set_date $CDATE -set_ftime "$fhr hour fcst" -set table_1.4 1 -set table_1.2 1 -grib ${COMOUT}/gridded/${outfile} + else + $WGRIB2 gribfile -set_date $CDATE -set_ftime "$fhr hour fcst" -set table_1.4 1 -set table_1.2 1 -grib ${COMOUT}/gridded/${outfile} err=$? - fi + fi if [ $err != 0 ] then @@ -239,7 +238,7 @@ echo "${outfile} is global.0p50, not alert out" fi - + # --------------------------------------------------------------------------- # # 3. Clean up the directory diff --git a/ush/wave_grid_interp_sbs.sh b/ush/wave_grid_interp_sbs.sh index 5fb1ce7bcb..bb68333b17 100755 --- a/ush/wave_grid_interp_sbs.sh +++ b/ush/wave_grid_interp_sbs.sh @@ -1,5 +1,5 @@ #!/bin/bash -# +# ################################################################################ # # UNIX Script Documentation Block @@ -17,11 +17,10 @@ # # Attributes: # Language: Bourne-again (BASH) shell -# Machine: WCOSS-DELL-P3 # -# Requirements: -# - wgrib2 with IPOLATES library -# +# Requirements: +# - wgrib2 with IPOLATES library +# ################################################################################ # --------------------------------------------------------------------------- # # 0. Preparations @@ -37,7 +36,7 @@ cd $GRDIDATA - grdID=$1 + grdID=$1 ymdh=$2 dt=$3 nst=$4 @@ -89,18 +88,18 @@ # 0.c Links to files rm -f ${DATA}/output_${ymdh}0000/out_grd.$grdID - + if [ ! -f ${DATA}/${grdID}_interp.inp.tmpl ]; then cp $PARMwave/${grdID}_interp.inp.tmpl ${DATA} fi - ln -sf ${DATA}/${grdID}_interp.inp.tmpl . + ln -sf ${DATA}/${grdID}_interp.inp.tmpl . for ID in $waveGRD do ln -sf ${DATA}/output_${ymdh}0000/out_grd.$ID . done - for ID in $waveGRD $grdID + for ID in $waveGRD $grdID do ln -sf ${DATA}/mod_def.$ID . done @@ -156,7 +155,7 @@ cp -f ./WHTGRIDINT.bin ${DATA}/WHTGRIDINT.bin.${grdID} cp -f ./WHTGRIDINT.bin ${FIXwave}/WHTGRIDINT.bin.${grdID} fi - + if [ "$err" != '0' ] then @@ -196,7 +195,7 @@ # # fi - fi + fi # --------------------------------------------------------------------------- # # 2. Clean up the directory diff --git a/ush/wave_outp_cat.sh b/ush/wave_outp_cat.sh index 56d1b63896..536e4203a5 100755 --- a/ush/wave_outp_cat.sh +++ b/ush/wave_outp_cat.sh @@ -1,22 +1,21 @@ #!/bin/bash -# +# ################################################################################ # # UNIX Script Documentation Block -# Script name: wave_outp_cat.sh -# Script description: Gathers ASCII data files for all fhr for each buoy +# Script name: wave_outp_cat.sh +# Script description: Gathers ASCII data files for all fhr for each buoy # # Author: Jessica Meixner Org: NCEP/EMC Date: 2020-08-27 -# Abstract: Cats spec files from each fhr into one for each buoy +# Abstract: Cats spec files from each fhr into one for each buoy # # Script history log: -# 2020-08-27 Jessica Meixner creation of script +# 2020-08-27 Jessica Meixner creation of script # # $Id$ # # Attributes: # Language: Bourne-again (BASH) shell -# Machine: WCOSS-DELL-P3 # ################################################################################ # --------------------------------------------------------------------------- # @@ -30,7 +29,7 @@ # Use LOUD variable to turn on/off trace. Defaults to YES (on). export LOUD=${LOUD:-YES}; [[ $LOUD = yes ]] && export LOUD=YES [[ "$LOUD" != YES ]] && set +x - + bloc=$1 MAXHOUR=$2 specdir=$3 @@ -69,7 +68,7 @@ # --------------------------------------------------------------------------- # -# 1. Cat for a buoy all fhr into one file +# 1. Cat for a buoy all fhr into one file set +x echo " Generate input file for ww3_outp." @@ -79,7 +78,7 @@ then outfile=${STA_DIR}/${specdir}/$WAV_MOD_TAG.$buoy.bull coutfile=${STA_DIR}/c${specdir}/$WAV_MOD_TAG.$buoy.cbull - rm outfile coutfile + rm outfile coutfile else outfile=${STA_DIR}/${specdir}/$WAV_MOD_TAG.$buoy.spec rm outfile @@ -94,9 +93,9 @@ then outfilefhr=${STA_DIR}/${specdir}fhr/$WAV_MOD_TAG.${ymdh}.$buoy.bull coutfilefhr=${STA_DIR}/c${specdir}fhr/$WAV_MOD_TAG.${ymdh}.$buoy.cbull - else + else outfilefhr=${STA_DIR}/${specdir}fhr/$WAV_MOD_TAG.${ymdh}.$buoy.spec - fi + fi if [ -f $outfilefhr ] then @@ -105,7 +104,7 @@ cat $outfilefhr >> ${STA_DIR}/${specdir}/$WAV_MOD_TAG.$buoy.bull cat $coutfilefhr >> ${STA_DIR}/c${specdir}/$WAV_MOD_TAG.$buoy.cbull rm $outfilefhr $coutfilefhr - else + else cat $outfilefhr >> ${STA_DIR}/${specdir}/$WAV_MOD_TAG.$buoy.spec #rm $outfilefhr fi diff --git a/ush/wave_outp_spec.sh b/ush/wave_outp_spec.sh index da91d3f17f..e48d637307 100755 --- a/ush/wave_outp_spec.sh +++ b/ush/wave_outp_spec.sh @@ -1,5 +1,5 @@ #!/bin/bash -# +# ################################################################################ # # UNIX Script Documentation Block @@ -17,7 +17,6 @@ # # Attributes: # Language: Bourne-again (BASH) shell -# Machine: WCOSS-DELL-P3 # ################################################################################ # --------------------------------------------------------------------------- # @@ -31,7 +30,7 @@ # Use LOUD variable to turn on/off trace. Defaults to YES (on). export LOUD=${LOUD:-YES}; [[ $LOUD = yes ]] && export LOUD=YES [[ "$LOUD" != YES ]] && set +x - + bloc=$1 ymdh=$2 specdir=$3 diff --git a/ush/wave_prnc_cur.sh b/ush/wave_prnc_cur.sh index d48af0c71f..bb98fee07b 100755 --- a/ush/wave_prnc_cur.sh +++ b/ush/wave_prnc_cur.sh @@ -1,5 +1,5 @@ #!/bin/sh -# +# ################################################################################ # # UNIX Script Documentation Block @@ -12,13 +12,12 @@ # Script history log: # 2019-10-02 J-Henrique Alves: origination, first version # 2019-11-02 J-Henrique Alves Ported to global-workflow. -# 2020-06-10 J-Henrique Alves Ported R&D machine Hera +# 2020-06-10 J-Henrique Alves Ported R&D machine Hera # # $Id$ # # Attributes: # Language: Bourne-again (BASH) shell -# Machine: WCOSS-DELL-P3 # ################################################################################ # @@ -46,12 +45,12 @@ mv -f cur_temp3.nc cur_uv_${PDY}_${fext}${fh3}_flat.nc # If weights need to be regenerated due to CDO ver change, use: # $CDO genbil,r4320x2160 rtofs_glo_2ds_f000_3hrly_prog.nc weights.nc cp ${FIXwave}/weights_rtofs_to_r4320x2160.nc ./weights.nc - + # Interpolate to regular 5 min grid $CDO remap,r4320x2160,weights.nc cur_uv_${PDY}_${fext}${fh3}_flat.nc cur_5min_01.nc # Perform 9-point smoothing twice to make RTOFS data less noisy when -# interpolating from 1/12 deg RTOFS grid to 1/6 deg wave grid +# interpolating from 1/12 deg RTOFS grid to 1/6 deg wave grid if [ "WAV_CUR_CDO_SMOOTH" = "YES" ]; then $CDO -f nc -smooth9 cur_5min_01.nc cur_5min_02.nc $CDO -f nc -smooth9 cur_5min_02.nc cur_glo_uv_${PDY}_${fext}${fh3}_5min.nc @@ -62,7 +61,7 @@ fi # Cleanup rm -f cur_temp[123].nc cur_5min_??.nc cur_glo_uv_${PDY}_${fext}${fh3}.nc weights.nc -if [ ${flagfirst} = "T" ] +if [ ${flagfirst} = "T" ] then sed -e "s/HDRFL/T/g" ${PARMwave}/ww3_prnc.cur.${WAVECUR_FID}.inp.tmpl > ww3_prnc.inp else diff --git a/util/modulefiles/gfs_util.wcoss_dell_p3 b/util/modulefiles/gfs_util.wcoss_dell_p3 deleted file mode 100755 index bde874a371..0000000000 --- a/util/modulefiles/gfs_util.wcoss_dell_p3 +++ /dev/null @@ -1,22 +0,0 @@ -#%Module##################################################### -## Module file for GFS util -############################################################# -# -# Loading required system modules -# - module load ips/18.0.1.163 - module load impi/18.0.1 - module load NCL/6.4.0 - -# Loading GEMPAK module - module use -a /gpfs/dell1/nco/ops/nwprod/modulefiles/ - module load gempak/7.3.3 - -# Loading Intel-Compiled NCEP Libraries - module load bacio/2.0.3 - module load w3emc/2.4.0 - module load w3nco/2.2.0 - module load ip/3.0.2 - module load sp/2.0.3 - module load g2/3.2.0 - module load bufr/11.3.0 diff --git a/util/sorc/compile_gfs_util_wcoss.sh b/util/sorc/compile_gfs_util_wcoss.sh index 2fe4e2c7cf..724626d3ea 100755 --- a/util/sorc/compile_gfs_util_wcoss.sh +++ b/util/sorc/compile_gfs_util_wcoss.sh @@ -2,7 +2,7 @@ ###################################################################### # -# Build executable GFS utility for GFS V16.0.0 +# Build executable GFS utility for GFS V16.0.0 # ###################################################################### @@ -10,21 +10,13 @@ LMOD_EXACT_MATCH=no source ../../sorc/machine-setup.sh > /dev/null 2>&1 cwd=$(pwd) -if [ "$target" = "wcoss_dell_p3" ] || [ "$target" = "wcoss_cray" ] || [ "$target" = "hera" ] ; then +if [ "$target" = "hera" ] ; then echo " " - echo " You are on WCOSS: $target " + echo " You are on $target " echo " " -elif [ "$target" = "wcoss" ] ; then +else echo " " - echo " " - echo " You are on WCOSS: $target " - echo " You do not need to build GFS utilities for GFS V16.0.0 " - echo " " - echo " " - exit -else - echo " " - echo " Your machine is $target is not recognized as a WCOSS machine." + echo " Your machine $target is not supported" echo " The script $0 can not continue. Aborting!" echo " " exit @@ -33,7 +25,7 @@ echo " " # Load required modules source ../modulefiles/gfs_util.${target} -module list +module list dirlist="overgridid rdbfmsua webtitle mkgfsawps" set -x diff --git a/util/sorc/mkgfsawps.fd/compile_mkgfsawps_wcoss.sh b/util/sorc/mkgfsawps.fd/compile_mkgfsawps_wcoss.sh index 7c7a5022e3..5d12f3e53c 100755 --- a/util/sorc/mkgfsawps.fd/compile_mkgfsawps_wcoss.sh +++ b/util/sorc/mkgfsawps.fd/compile_mkgfsawps_wcoss.sh @@ -3,21 +3,13 @@ LMOD_EXACT_MATCH=no source ../../../sorc/machine-setup.sh > /dev/null 2>&1 cwd=$(pwd) -if [ "$target" = "wcoss_dell_p3" ] || [ "$target" = "wcoss_cray" ] || [ "$target" = "hera" ] ; then +if [ "$target" = "hera" ]; then echo " " - echo " You are on WCOSS: $target " + echo " You are on $target " echo " " -elif [ "$target" = "wcoss" ] ; then - echo " " - echo " " - echo " You are on WCOSS: $target " - echo " You do not need to build GFS utilities for GFS V15.0.0 " - echo " " - echo " " - exit else echo " " - echo " Your machine is $target is not recognized as a WCOSS machine." + echo " Your machine $target is not supported" echo " The script $0 can not continue. Aborting!" echo " " exit diff --git a/util/sorc/mkgfsawps.fd/makefile.wcoss_cray b/util/sorc/mkgfsawps.fd/makefile.wcoss_cray deleted file mode 100755 index b1bd05f7e9..0000000000 --- a/util/sorc/mkgfsawps.fd/makefile.wcoss_cray +++ /dev/null @@ -1,56 +0,0 @@ -SHELL=/bin/sh -# -SRCS= mkgfsawps.f - -OBJS= mkgfsawps.o - -# Tunable parameters -# -# FC Name of the fortran compiling system to use -# LDFLAGS Flags to the loader -# LIBS List of libraries -# CMD Name of the executable -# PROFLIB Library needed for profiling -# -FC = ifort - -LDFLAGS = -IOMP5_LIB=/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libiomp5.a - -LIBS = -Xlinker --start-group ${W3NCO_LIBd} ${W3NCO_LIBd} ${IP_LIBd} ${SP_LIBd} ${BACIO_LIB4} ${IOMP5_LIB} - -CMD = mkgfsawps -PROFLIB = -lprof - -# To perform the default compilation, use the first line -# To compile with flowtracing turned on, use the second line -# To compile giving profile additonal information, use the third line -# WARNING: SIMULTANEOUSLY PROFILING AND FLOWTRACING IS NOT RECOMMENDED -FFLAGS = -O3 -g -convert big_endian -r8 -i4 -assume noold_ldout_format - -#FFLAGS = -F -#FFLAGS = -Wf"-ez" - -# Lines from here on down should not need to be changed. They are the -# actual rules which make uses to build a.out. -# -all: $(CMD) - -$(CMD): $(OBJS) - $(FC) -o $(LDFLAGS) $(@) $(OBJS) $(LIBS) - rm -f $(OBJS) - -# Make the profiled version of the command and call it a.out.prof -# -$(CMD).prof: $(OBJS) - $(FC) -o $(LDFLAGS) $(@) $(OBJS) $(LIBS) - -rm -f $(OBJS) - -clean: - -rm -f $(OBJS) - -clobber: clean - -rm -f $(CMD) $(CMD).prof - -void: clobber - -rm -f $(SRCS) makefile diff --git a/util/sorc/mkgfsawps.fd/makefile.wcoss_dell_p3 b/util/sorc/mkgfsawps.fd/makefile.wcoss_dell_p3 deleted file mode 100755 index 86f3c417b1..0000000000 --- a/util/sorc/mkgfsawps.fd/makefile.wcoss_dell_p3 +++ /dev/null @@ -1,53 +0,0 @@ -SHELL=/bin/sh -# -SRCS= mkgfsawps.f - -OBJS= mkgfsawps.o - -# Tunable parameters -# -# FC Name of the fortran compiling system to use -# LDFLAGS Flags to the loader -# LIBS List of libraries -# CMD Name of the executable -# PROFLIB Library needed for profiling -# -FC = ifort - -LDFLAGS = -IOMP5_LIB=/usrx/local/prod/intel/2018UP01/lib/intel64/libiomp5.a - -LIBS = -Xlinker --start-group ${W3NCO_LIBd} ${W3NCO_LIBd} ${IP_LIBd} ${SP_LIBd} ${BACIO_LIB4} ${IOMP5_LIB} - -CMD = mkgfsawps -PROFLIB = -lprof - -# To perform the default compilation, use the first line -# To compile with flowtracing turned on, use the second line -# To compile giving profile additonal information, use the third line -# WARNING: SIMULTANEOUSLY PROFILING AND FLOWTRACING IS NOT RECOMMENDED -FFLAGS = -O3 -g -convert big_endian -r8 -i4 -assume noold_ldout_format - -# Lines from here on down should not need to be changed. They are the -# actual rules which make uses to build a.out. -# -all: $(CMD) - -$(CMD): $(OBJS) - $(FC) -o $(LDFLAGS) $(@) $(OBJS) $(LIBS) - rm -f $(OBJS) - -# Make the profiled version of the command and call it a.out.prof -# -$(CMD).prof: $(OBJS) - $(FC) -o $(LDFLAGS) $(@) $(OBJS) $(LIBS) - -rm -f $(OBJS) - -clean: - -rm -f $(OBJS) - -clobber: clean - -rm -f $(CMD) $(CMD).prof - -void: clobber - -rm -f $(SRCS) makefile diff --git a/util/sorc/overgridid.fd/compile_overgridid_wcoss.sh b/util/sorc/overgridid.fd/compile_overgridid_wcoss.sh index 3c55640367..d7b0e0185c 100755 --- a/util/sorc/overgridid.fd/compile_overgridid_wcoss.sh +++ b/util/sorc/overgridid.fd/compile_overgridid_wcoss.sh @@ -10,21 +10,13 @@ LMOD_EXACT_MATCH=no source ../../../sorc/machine-setup.sh > /dev/null 2>&1 cwd=$(pwd) -if [ "$target" = "wcoss_dell_p3" ] || [ "$target" = "wcoss_cray" ] || [ "$target" = "hera" ] ; then +if [ "$target" = "hera" ]; then echo " " echo " You are on $target " echo " " -elif [ "$target" = "wcoss" ] ; then - echo " " - echo " " - echo " You are on WCOSS: $target " - echo " You do not need to build GFS utilities for GFS V15.0.0 " - echo " " - echo " " - exit else echo " " - echo " Your machine is $target is not recognized as a WCOSS machine." + echo " Your machine $target is not supported" echo " The script $0 can not continue. Aborting!" echo " " exit diff --git a/util/sorc/rdbfmsua.fd/compile_rdbfmsua_wcoss.sh b/util/sorc/rdbfmsua.fd/compile_rdbfmsua_wcoss.sh index 63d0ccdb2a..2ffcdc6190 100755 --- a/util/sorc/rdbfmsua.fd/compile_rdbfmsua_wcoss.sh +++ b/util/sorc/rdbfmsua.fd/compile_rdbfmsua_wcoss.sh @@ -10,21 +10,13 @@ LMOD_EXACT_MATCH=no source ../../../sorc/machine-setup.sh > /dev/null 2>&1 cwd=$(pwd) -if [ "$target" = "wcoss_dell_p3" ] || [ "$target" = "wcoss_cray" ] || [ "$target" = "hera" ] ; then +if [ "$target" = "hera" ]; then echo " " - echo " You are on WCOSS: $target " + echo " You are on $target " echo " " -elif [ "$target" = "wcoss" ] ; then - echo " " - echo " " - echo " You are on WCOSS: $target " - echo " You do not need to build GFS utilities for GFS V15.0.0 " - echo " " - echo " " - exit else echo " " - echo " Your machine is $target is not recognized as a WCOSS machine." + echo " Your machine $target is not supported." echo " The script $0 can not continue. Aborting!" echo " " exit diff --git a/util/sorc/rdbfmsua.fd/makefile.wcoss_cray b/util/sorc/rdbfmsua.fd/makefile.wcoss_cray deleted file mode 100755 index 69d183f394..0000000000 --- a/util/sorc/rdbfmsua.fd/makefile.wcoss_cray +++ /dev/null @@ -1,84 +0,0 @@ -SHELL=/bin/sh -# -# This makefile was produced by /usr/bin/fmgen at 11:21:07 AM on 10/28/94 -# If it is invoked by the command line -# make -f makefile -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable named a.out. -# -# If it is invoked by the command line -# make -f makefile a.out.prof -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable which profiles -# named a.out.prof. -# -# To remove all the objects but leave the executables use the command line -# make -f makefile clean -# -# To remove everything but the source files use the command line -# make -f makefile clobber -# -# To remove the source files created by /usr/bin/fmgen and this makefile -# use the command line -# make -f makefile void -# -# The parameters SRCS and OBJS should not need to be changed. If, however, -# you need to add a new module add the name of the source module to the -# SRCS parameter and add the name of the resulting object file to the OBJS -# parameter. The new modules are not limited to fortran, but may be C, YACC, -# LEX, or CAL. An explicit rule will need to be added for PASCAL modules. -# -OBJS= rdbfmsua.o - - -# Tunable parameters -# -# FC Name of the fortran compiling system to use -# LDFLAGS Flags to the loader -# LIBS List of libraries -# CMD Name of the executable -# -FC = ifort -# FFLAGS = -O3 -q32 -I${GEMINC} -I${NAWIPS}/os/${NA_OS}/include -# FFLAGS = -I${GEMINC} -I${NAWIPS}/os/${NA_OS}/include -FFLAGS = -I${GEMINC} -I${OS_INC} -# LDFLAGS = -O3 -q32 -s -# LDFLAGS = -Wl,-Map,MAPFILE - -# BRIDGE=/gpfs/dell1/nco/ops/nwpara/gempak.v7.3.1/nawips/os/linux3.10.0_x86_64/lib/libbridge.a -BRIDGE=${GEMOLB}/libbridge.a - -LIBS = ${DECOD_UT_LIB} ${BUFR_LIB4} \ - -L${GEMOLB} -lgemlib -lappl -lsyslib -lcgemlib -lgfortran ${BRIDGE} - -# -L${GEMOLB} -lgemlib -lappl -lsyslib -lcgemlib -lgfortran ${BRIDGE} -# -L/nwprod/gempak/nawips1/os/linux2.6.32_x86_64/lib -lgemlib -lappl -lsyslib -lcgemlib -lbridge -lncepBUFR \ -# -lgfortran - -CMD = rdbfmsua - -# To perform the default compilation, use the first line -# To compile with flowtracing turned on, use the second line -# To compile giving profile additonal information, use the third line -# CFLAGS= -O3 -q32 - -# Lines from here on down should not need to be changed. They are the -# actual rules which make uses to build a.out. -# - -$(CMD): $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(LIBS) - - -# The following rule reads the required NAWIPS definitions and then recursively -# runs this same makefile with a new target in the spawned shell. -# - -clean: - -rm -f ${OBJS} - -clobber: clean - -rm -f ${CMD} - -void: clobber - -rm -f ${SRCS} makefile diff --git a/util/sorc/rdbfmsua.fd/makefile.wcoss_dell_p3 b/util/sorc/rdbfmsua.fd/makefile.wcoss_dell_p3 deleted file mode 100755 index 69d183f394..0000000000 --- a/util/sorc/rdbfmsua.fd/makefile.wcoss_dell_p3 +++ /dev/null @@ -1,84 +0,0 @@ -SHELL=/bin/sh -# -# This makefile was produced by /usr/bin/fmgen at 11:21:07 AM on 10/28/94 -# If it is invoked by the command line -# make -f makefile -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable named a.out. -# -# If it is invoked by the command line -# make -f makefile a.out.prof -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable which profiles -# named a.out.prof. -# -# To remove all the objects but leave the executables use the command line -# make -f makefile clean -# -# To remove everything but the source files use the command line -# make -f makefile clobber -# -# To remove the source files created by /usr/bin/fmgen and this makefile -# use the command line -# make -f makefile void -# -# The parameters SRCS and OBJS should not need to be changed. If, however, -# you need to add a new module add the name of the source module to the -# SRCS parameter and add the name of the resulting object file to the OBJS -# parameter. The new modules are not limited to fortran, but may be C, YACC, -# LEX, or CAL. An explicit rule will need to be added for PASCAL modules. -# -OBJS= rdbfmsua.o - - -# Tunable parameters -# -# FC Name of the fortran compiling system to use -# LDFLAGS Flags to the loader -# LIBS List of libraries -# CMD Name of the executable -# -FC = ifort -# FFLAGS = -O3 -q32 -I${GEMINC} -I${NAWIPS}/os/${NA_OS}/include -# FFLAGS = -I${GEMINC} -I${NAWIPS}/os/${NA_OS}/include -FFLAGS = -I${GEMINC} -I${OS_INC} -# LDFLAGS = -O3 -q32 -s -# LDFLAGS = -Wl,-Map,MAPFILE - -# BRIDGE=/gpfs/dell1/nco/ops/nwpara/gempak.v7.3.1/nawips/os/linux3.10.0_x86_64/lib/libbridge.a -BRIDGE=${GEMOLB}/libbridge.a - -LIBS = ${DECOD_UT_LIB} ${BUFR_LIB4} \ - -L${GEMOLB} -lgemlib -lappl -lsyslib -lcgemlib -lgfortran ${BRIDGE} - -# -L${GEMOLB} -lgemlib -lappl -lsyslib -lcgemlib -lgfortran ${BRIDGE} -# -L/nwprod/gempak/nawips1/os/linux2.6.32_x86_64/lib -lgemlib -lappl -lsyslib -lcgemlib -lbridge -lncepBUFR \ -# -lgfortran - -CMD = rdbfmsua - -# To perform the default compilation, use the first line -# To compile with flowtracing turned on, use the second line -# To compile giving profile additonal information, use the third line -# CFLAGS= -O3 -q32 - -# Lines from here on down should not need to be changed. They are the -# actual rules which make uses to build a.out. -# - -$(CMD): $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(LIBS) - - -# The following rule reads the required NAWIPS definitions and then recursively -# runs this same makefile with a new target in the spawned shell. -# - -clean: - -rm -f ${OBJS} - -clobber: clean - -rm -f ${CMD} - -void: clobber - -rm -f ${SRCS} makefile diff --git a/util/sorc/webtitle.fd/compile_webtitle_wcoss.sh b/util/sorc/webtitle.fd/compile_webtitle_wcoss.sh index 40cdc22f40..d0d8f79bde 100755 --- a/util/sorc/webtitle.fd/compile_webtitle_wcoss.sh +++ b/util/sorc/webtitle.fd/compile_webtitle_wcoss.sh @@ -10,21 +10,13 @@ LMOD_EXACT_MATCH=no source ../../../sorc/machine-setup.sh > /dev/null 2>&1 cwd=$(pwd) -if [ "$target" = "wcoss_dell_p3" ] || [ "$target" = "wcoss_cray" ] || [ "$target" = "hera" ] ; then +if [ "$target" = "hera" ]; then echo " " - echo " You are on WCOSS: $target " + echo " You are on $target " echo " " -elif [ "$target" = "wcoss" ] ; then - echo " " - echo " " - echo " You are on WCOSS: $target " - echo " You do not need to build GFS utilities for GFS V15.0.0 " - echo " " - echo " " - exit else echo " " - echo " Your machine is $target is not recognized as a WCOSS machine." + echo " Your machine $target is not supported." echo " The script $0 can not continue. Aborting!" echo " " exit diff --git a/workflow/README_ecflow.md b/workflow/README_ecflow.md index 6b9114480e..5dd73617ed 100644 --- a/workflow/README_ecflow.md +++ b/workflow/README_ecflow.md @@ -14,7 +14,7 @@ variables that are set in the shell, then using the ecFlow API, a definition file is created. While the application creates the definition file it also uses the path defined as `ECFgfs`, which will be elaborated on later in this guide, and creates the folders and scripts that match the definition file, setting the -`ECFHome` variable in the definition file to match the `ECFgfs` parameter. +`ECFHome` variable in the definition file to match the `ECFgfs` parameter. Please refer to the [setup the YAML](#configuring-the-yaml-file) section for instructions on how to setup the YAML file for what you want. @@ -94,20 +94,20 @@ shell environment or by specifying a value in the config.base file. To use an en the YAML file has a reserved word prefix `env.`. The code functions by parsing the YAML file into a dictionary then doing a recursive search over that dictionary to determine if the `env.` prefix is used anywhere, either a value or key. When a node uses that syntax, the application will search first -the current shell environment variables for a match, if none exists, then it will search +the current shell environment variables for a match, if none exists, then it will search the `config.base` file for any configurations that may have been exported from there. Finally, it will -then replace the string `env.PARAMETER` with the value from the shell or `config.base` file in the +then replace the string `env.PARAMETER` with the value from the shell or `config.base` file in the dictionary that was imported. The original YAML file will remain unchanged. **NOTE:** The environment variable cannot be used in conjunction with a string so trying to use `env.ECFgfs/include` will return only the value for `ECFgfs`, it will not append any strings or -values to the beginning or end of the value. +values to the beginning or end of the value. Example: -Entering `env.FHMAX_GFS` as a value for a node will use the value that was +Entering `env.FHMAX_GFS` as a value for a node will use the value that was specified in the `config.base` file for the `FHMAX_GFS` export. This will be reflected in the final -definition file. It will not be updated in the original YAML file, that will remain as -`env.FHMAX_GFS`. +definition file. It will not be updated in the original YAML file, that will remain as +`env.FHMAX_GFS`. ### Script Repository @@ -245,7 +245,7 @@ Once a suite line has been added, families need to be added under a `nodes:` hea First add the `nodes:` dictionary line under the suite name, then create the hierarchical structure for the families. Families can be dictionary objects under other families. In the example below, the suites -`prod00` and `prod06` will have the family `gfs`. +`prod00` and `prod06` will have the family `gfs`. Then only the `prod00` suite will have the family `gdas` added to it. Once the family structure has been setup, add in a `tasks` dictionary under the @@ -306,14 +306,14 @@ endsuite ### Adding edits Edits can be added to either families, tasks or suites by putting an `edits:` -dictionary tag and then listing the edits below. The format for edits will be -the edit variable on the left and then the value on the right. +dictionary tag and then listing the edits below. The format for edits will be +the edit variable on the left and then the value on the right. -So in this example below, lets consider that we want the `RUN` value to be +So in this example below, lets consider that we want the `RUN` value to be `GFS` for both the `prod00` and `prod06` suite but we wnat the `CYC` value -to be `00` for the `prod00` suite and `06` for the `prod06` suite. So in -that case we would use the individual declaration for the suites for the -`CYC` value only and then the listed suites declaration for the rest. +to be `00` for the `prod00` suite and `06` for the `prod06` suite. So in +that case we would use the individual declaration for the suites for the +`CYC` value only and then the listed suites declaration for the rest. * Example ```YAML @@ -367,7 +367,7 @@ endsuite #### Task Setup Extras Tasks are added in as a dictionary under the `tasks:` header. So if you want to add -multiple tasks to a family, do not add them in list syntax, add them as hashes to the dictionary. +multiple tasks to a family, do not add them in list syntax, add them as hashes to the dictionary. * Example ```YAML @@ -422,26 +422,26 @@ endsuite #### Task Script Repository and Templates When adding tasks, it is possible that you may want to run a task for every forecast hour in a large range -but not want to copy and paste the same script for every forecast hour. With the generator application, you -can specify a `template:` parameter. After defining the [script repo](#script-repository) parameter, the -application will search the defined directory for the template script. It will then copy the template script to +but not want to copy and paste the same script for every forecast hour. With the generator application, you +can specify a `template:` parameter. After defining the [script repo](#script-repository) parameter, the +application will search the defined directory for the template script. It will then copy the template script to the destination folder for the suite with an adjusted name. In the example below, you can see the range used for the `jgfs_atmos_post_f` forecast hour task with a template. Please refer to the [ranges and lists](#ranges-and-lists) section of this document for information on how to set up a range but for the purposes of the example below, we are focusing on the template. What is relevant here is that we want 4 instances of the `jgfs_atmos_post_f` forecast hour script to be in place and use the same -`jgfs_atmos_post_master` script for the template. +`jgfs_atmos_post_master` script for the template. In addition to the resultant defintion file, noted below is the folder that was created for the `prod00` suite. The -`prod00` folder is located at the `$HOMEecf`, in the case below you cans see it is defined as -`/usr1/knevins/global-workflow/ecf` location and contains four instances of the `jgfs_atmos_post_master` -script, each renamed to match the `task` name in the definition file. +`prod00` folder is located at the `$HOMEecf`, in the case below you cans see it is defined as +`/usr1/knevins/global-workflow/ecf` location and contains four instances of the `jgfs_atmos_post_master` +script, each renamed to match the `task` name in the definition file. -**NOTE:** A special template value is `skip`. If you use `template: skip` in a task, the generator will know that -the script is in fact not in the script repository and it will not attempt to copy or create it but it will -add it to the definition file. This is useful in conjunction with the [defstatus](#defstatus) parameter so the -suite will skip already done tasks and there won't be a representation of it in the final directory. +**NOTE:** A special template value is `skip`. If you use `template: skip` in a task, the generator will know that +the script is in fact not in the script repository and it will not attempt to copy or create it but it will +add it to the definition file. This is useful in conjunction with the [defstatus](#defstatus) parameter so the +suite will skip already done tasks and there won't be a representation of it in the final directory. * Example ```YAML @@ -499,13 +499,13 @@ prod00 #### Events To add an event, you first need to add the `events:` dictionary heading underneath the node to which it needs to be -added. Then underneath that `events:` heading, as a list object, add the list of events that you want have attached. +added. Then underneath that `events:` heading, as a list object, add the list of events that you want have attached. -**NOTE:** Events can be ranges or list objects, please see the section below on creating lists or ranges. +**NOTE:** Events can be ranges or list objects, please see the section below on creating lists or ranges. **NOTE:** Events must be added in a list. This is not the same structure as adding tasks, which are dictionary objects, -the events list is an actual list so please make sure to add a hyphen, `-`, in front of every event that you wish to -add. +the events list is an actual list so please make sure to add a hyphen, `-`, in front of every event that you wish to +add. * Example ```YAML @@ -549,9 +549,9 @@ To add a trigger, add a `triggers:` dictionary heading underneath the task or fa item with the identifier for what you want the trigger to look for. So for a task, it would be `- task: task_name` or for a family it would be `- family: family_name` -**NOTE:** It was mentioned above but an important distinction from tasks is that triggers need to be in list format. -The reason for this is due to triggers being either families or tasks, and that is determined by the dictionary -label for the list item. +**NOTE:** It was mentioned above but an important distinction from tasks is that triggers need to be in list format. +The reason for this is due to triggers being either families or tasks, and that is determined by the dictionary +label for the list item. **NOTE:** By default, multiple triggers are added to a node with __AND__ @@ -559,7 +559,7 @@ Triggers can also have the following items associated with it: * `event:` * This is listed as part of the list item but in it's own `event:` header. The `event:` must exist within the suite or it will be rejected. - * Events can be lists or ranges. + * Events can be lists or ranges. * `state:` * This will identify the state of the task or family in the trigger. States are generally `active`, `complete`, or `queued`. @@ -637,16 +637,16 @@ endsuite At the time of this README, the use case for the def status was to be able to add nodes to a definition file, and have them marked as complete so that the ecflow run knows that the script is there but acknowleges as done without having to do anything. This is useful when running development tasks, that rely on an operational task, but the operational task -is already done and nothing else needs to be executed. +is already done and nothing else needs to be executed. To add defstatus to a task or family, add a `defstatus:` parameter underneath the node, not a dictionary, this will be a key/value pair. It will have a value associated with it so the item will look like `defstatus: value` -**NOTE:** A defstatus can be added to a family or a task object. Both are acceptable formats. +**NOTE:** A defstatus can be added to a family or a task object. Both are acceptable formats. -**NOTE:** When a defstatus is defined for a parent object, all child nodes under the object inherit that so in the +**NOTE:** When a defstatus is defined for a parent object, all child nodes under the object inherit that so in the example below, all families and tasks are considered complete and since the `template: skip` value is there for the -task, the script generator will not attempt to look for it in the script repo. +task, the script generator will not attempt to look for it in the script repo. * Example ```YAML @@ -695,35 +695,35 @@ endsuite ### Repeats -Repeats are in a standalone section because of the nature of how ecflow handles repeating tasks. Ecflow has multiple +Repeats are in a standalone section because of the nature of how ecflow handles repeating tasks. Ecflow has multiple methods for handling repeating tasks but they lack a lot of the specificity that one would hope. Trying to identify something as simple as run every 6 hours for the next three days is a rather complex setup. With that, after adding a repat, please double check the setup to make sure that the code has done the repeat type that you are looking to -accomplish. +accomplish. Repeats are declared with the `repeat:` key value and the value has a specific syntax as follows: - `YYYYMMDD(HH)? to YYYYMMDD(HH)? (by DD:HH:MM)?` -where the items in the `()?` are optional. + `YYYYMMDD(HH)? to YYYYMMDD(HH)? (by DD:HH:MM)?` +where the items in the `()?` are optional. -The first value is the start time specified in year, month, day with a hour value as optional. The second value +The first value is the start time specified in year, month, day with a hour value as optional. The second value is the end date in year, month, day format with an hour as an optional value. The third is the increment time in day, hour and minute format. The day is optional as well in third value. It can be read as starting at the first value, repeat until the second value is reached and increment by the third value. If no third value is specified -increment by 1 hour. +increment by 1 hour. -The value `2022032400 to 2022042400` is valid as is the value `2022032400 to 2022042400 by 18:00`. +The value `2022032400 to 2022042400` is valid as is the value `2022032400 to 2022042400 by 18:00`. * If the repeat string has the start and end dates on the same day, just a `time` string with a `date` option will -be used. +be used. * If the repeat string has the start and end on different days but within a 24 hour window, there will be a start date with a repeats and a time string added to the definition file. -* If the repeat spans multiple days, it requires a combination of time, date and crons in the definition file. +* If the repeat spans multiple days, it requires a combination of time, date and crons in the definition file. To elaborate on the example below of `2022032400 to 2022042400 by 18:00`. That will be read as starting at 00Z on -March 24th 2022, run every 18 hours until April 24th 2022. This will be reflected in the definition file with a -`date` value of March 24th, `24.3.2022` to start, a `time` value of `00:00` indicating start, a relative `time` +March 24th 2022, run every 18 hours until April 24th 2022. This will be reflected in the definition file with a +`date` value of March 24th, `24.3.2022` to start, a `time` value of `00:00` indicating start, a relative `time` value of `+18:00` to indicate that after running and waiting 18 hours, run again, and a `repeat` value -to indicate that this needs to happen 42 times to get to April 24th. +to indicate that this needs to happen 42 times to get to April 24th. * Example ```YAML @@ -756,19 +756,19 @@ endsuite ## Ranges and Lists -If you need to have multiple forecast hours or have a similar node object with just a few characters difference, the -concept of ranges and lists will be very useful in this situation. Families, tasks, or even triggers and events can +If you need to have multiple forecast hours or have a similar node object with just a few characters difference, the +concept of ranges and lists will be very useful in this situation. Families, tasks, or even triggers and events can have ranges or lists associated with them to shorten the creation of the definition YAML. The goal is to have one -line that can create multiple suites, familes, or tasks or even events. +line that can create multiple suites, familes, or tasks or even events. A range is a basic counting structure that follows the [Python range](https://docs.python.org/3.3/library/stdtypes.html?highlight=range#range) class object format. It is specified in one of the following three formats: * `( $MAX_VALUE )` * `( $START_VALUE, $MAX_VALUE )` * `( $START_VALUE, $MAX_VALUE, $STEP )` -As you can see from the examples, if only one value is specified then it uses that as the max value, if two, then a -start and end, and three includes an increment. It uses default values of 0 for the start value and 1 for the increment -if nothing else is specified. +As you can see from the examples, if only one value is specified then it uses that as the max value, if two, then a +start and end, and three includes an increment. It uses default values of 0 for the start value and 1 for the increment +if nothing else is specified. ### Range Hierarchy @@ -776,18 +776,18 @@ The code also uses a heirarchy structure so that range values can be passed down to modify them slightly. To use a parent counter, use the same notation as the list or range but do not put any values in the notation. So if there is a range of `(4)` for a parent node and the child node has the notation `( )` in it then when the parent node uses the value `1`, so will the child node. An example of this would be that if a parent node has a -string value like `jgfs_atmos_post_f( 4 )` there will be 4 objects created in the definition file, -`jgfs_atmos_post_f000`, `jgfs_atmos_post_f001`, `jgfs_atmos_post_f002`, `jgfs_atmos_post_f003`. -Then if that task has an edit that reads `FHR: 'f( )'` then the node `jgfs_atmos_post_f001` will have an edit that -reads `FHR: f001` and so on. +string value like `jgfs_atmos_post_f( 4 )` there will be 4 objects created in the definition file, +`jgfs_atmos_post_f000`, `jgfs_atmos_post_f001`, `jgfs_atmos_post_f002`, `jgfs_atmos_post_f003`. +Then if that task has an edit that reads `FHR: 'f( )'` then the node `jgfs_atmos_post_f001` will have an edit that +reads `FHR: f001` and so on. -If there is no maximum value as well, you can also modify the increment or start values. In the same example from -above if `jgfs_atmos_post_f( 4 )` is the node definition but you wanted the edit value to start at 1 instead of +If there is no maximum value as well, you can also modify the increment or start values. In the same example from +above if `jgfs_atmos_post_f( 4 )` is the node definition but you wanted the edit value to start at 1 instead of 0, then using `FHRGRP: '( 1, )'` which uses 1 as the start value but as you can see has no max value, will set the -value of the edit in node `jgfs_atmos_post_f001` to `FHRGRP: 002`. Similar can also be done for something like -the incremenet value so if the edit was specified as `FHRGRP: '( ,,6 )'` the value for the edit in node -`jgfs_atmos_post_f001` would be set to `FHRGRP: 006` because it would incrememnt by 6 but still use the same -parent counter for the base since no start or max value was specified. +value of the edit in node `jgfs_atmos_post_f001` to `FHRGRP: 002`. Similar can also be done for something like +the incremenet value so if the edit was specified as `FHRGRP: '( ,,6 )'` the value for the edit in node +`jgfs_atmos_post_f001` would be set to `FHRGRP: 006` because it would incrememnt by 6 but still use the same +parent counter for the base since no start or max value was specified. * Example ```YAML @@ -866,14 +866,14 @@ endsuite Lists are similar to the ranges but use the `[ ]` bracket syntax. Items in the list can be of any type and will run the same way as ranges. The list cane be either within a string or just a list format for YAML and both should be -interpreted by the generator properly. +interpreted by the generator properly. -**NOTE:** Lists will also match ranges if they are equal in length. So if you have a range of four and a list of -four, when the first element of the range is used, the first element of the list is also used. +**NOTE:** Lists will also match ranges if they are equal in length. So if you have a range of four and a list of +four, when the first element of the range is used, the first element of the list is also used. **NOTE:** Lists do not inheret the parent values directly. They read the position but given the flexibility needed -it does not pass down the actual values. The code could be updated to do that easily if it turns out to be a -future need but due to potential conflicts, it was not set that way in this version. +it does not pass down the actual values. The code could be updated to do that easily if it turns out to be a +future need but due to potential conflicts, it was not set that way in this version. * Example ```YAML diff --git a/workflow/hosts.py b/workflow/hosts.py index c54112dce1..57bce4e5e4 100644 --- a/workflow/hosts.py +++ b/workflow/hosts.py @@ -24,7 +24,7 @@ class Host: """ SUPPORTED_HOSTS = ['HERA', 'ORION', 'JET', - 'WCOSS_DELL_P3', 'WCOSS2'] + 'WCOSS2'] def __init__(self, host=None): @@ -48,10 +48,6 @@ def detect(cls): machine = 'ORION' elif os.path.exists('/lfs4/HFIP'): machine = 'JET' - elif os.path.exists('/gpfs') and os.path.exists('/etc/SuSE-release'): - machine = 'WCOSS_C' - elif os.path.exists('/gpfs/dell2'): - machine = 'WCOSS_DELL_P3' elif os.path.exists('/lfs/f1'): machine = 'WCOSS2' diff --git a/workflow/hosts/wcoss_dell_p3.yaml b/workflow/hosts/wcoss_dell_p3.yaml deleted file mode 100644 index daa9595c31..0000000000 --- a/workflow/hosts/wcoss_dell_p3.yaml +++ /dev/null @@ -1,19 +0,0 @@ -base_git: '/gpfs/dell2/emc/modeling/noscrub/emc.glopara/git' -base_svn: '/gpfs/dell2/emc/modeling/noscrub/emc.glopara/git' -dmpdir: '/gpfs/dell3/emc/global/dump' -nwprod: '${NWROOT:-"/gpfs/dell1/nco/ops/nwprod"}' -comroot: '${COMROOT:-"/gpfs/dell1/nco/ops/com"}' -homedir: '/gpfs/dell2/emc/modeling/noscrub/$USER' -stmp: '/gpfs/dell3/stmp/$USER' -ptmp: '/gpfs/dell3/ptmp/$USER' -noscrub: $HOMEDIR -account: GFS-DEV -scheduler: lsf -queue: dev -queue_service: dev_transfer -partition_batch: None -chgrp_rstprod: 'YES' -chgrp_cmd: 'chgrp rstprod' -hpssarch: 'YES' -localarch: 'NO' -atardir: '/NCEPDEV/$HPSS_PROJECT/1year/$USER/$machine/scratch/$PSLOT' \ No newline at end of file From f04f3ba4dfd5b358aea425f65f82d40917597776 Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Tue, 26 Jul 2022 22:07:42 -0400 Subject: [PATCH 07/16] change gdasechgres dependency to just gdasefcs01 instead of gdasefmn (#933) Replaces the dependency of `gdasechgres` on `gdasefmn` with `gdasefcs01`. Presently, `gdasechgres` has 2 dependencies: - `gdasfcst` - deterministic forecast - `gdasefmn` - ensemble forecasts (all of them). The work done in `gdasechgres` actually depends only on the `mem001/atmos/gdas.tHHz.atmf006.nc`. This file is used as a template as well as obtaining `hgtsfc`. As such, there is no reason to depend on the entire ensemble of forecasts to be complete before `gdasechgres` can start. --- workflow/rocoto/workflow_tasks.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/workflow/rocoto/workflow_tasks.py b/workflow/rocoto/workflow_tasks.py index ea90ee37aa..9154ac37f7 100644 --- a/workflow/rocoto/workflow_tasks.py +++ b/workflow/rocoto/workflow_tasks.py @@ -437,7 +437,7 @@ def atmanalpost(self): dep_dict = {'type': 'cycleexist', 'offset': '-06:00:00'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - + resources = self.get_resource('atmanalpost') task = create_wf_task('atmanalpost', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) @@ -999,7 +999,7 @@ def atmensanalpost(self): resources = self.get_resource('atmensanalpost') task = create_wf_task('atmensanalpost', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) - + return task def ecen(self): @@ -1109,7 +1109,7 @@ def echgres(self): deps = [] dep_dict = {'type': 'task', 'name': f'{self.cdump}fcst'} deps.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type': 'metatask', 'name': f'{self.cdump}efmn'} + dep_dict = {'type': 'task', 'name': f'{self.cdump}efcs01'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) From 1ed89c7d202974bd4ccd2b048fedc56382edc9ec Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Thu, 28 Jul 2022 15:35:37 -0400 Subject: [PATCH 08/16] bring GDASApp jjobs and exscripts to global-workflow (#941) --- .gitignore | 6 - jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_POST | 123 +++++++++++++ jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_PREP | 123 +++++++++++++ jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_RUN | 123 +++++++++++++ jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_POST | 126 +++++++++++++ jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_PREP | 126 +++++++++++++ jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_RUN | 126 +++++++++++++ scripts/exgdas_global_atmos_analysis_post.py | 45 +++++ scripts/exgdas_global_atmos_analysis_prep.py | 44 +++++ scripts/exgdas_global_atmos_analysis_run.sh | 181 +++++++++++++++++++ scripts/exgdas_global_atmos_ensanal_post.py | 44 +++++ scripts/exgdas_global_atmos_ensanal_run.sh | 172 ++++++++++++++++++ sorc/link_workflow.sh | 7 - 13 files changed, 1233 insertions(+), 13 deletions(-) create mode 100755 jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_POST create mode 100755 jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_PREP create mode 100755 jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_RUN create mode 100755 jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_POST create mode 100755 jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_PREP create mode 100755 jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_RUN create mode 100755 scripts/exgdas_global_atmos_analysis_post.py create mode 100755 scripts/exgdas_global_atmos_analysis_prep.py create mode 100755 scripts/exgdas_global_atmos_analysis_run.sh create mode 100755 scripts/exgdas_global_atmos_ensanal_post.py create mode 100755 scripts/exgdas_global_atmos_ensanal_run.sh diff --git a/.gitignore b/.gitignore index 6f5a7da9c5..60151e3ab0 100644 --- a/.gitignore +++ b/.gitignore @@ -146,12 +146,6 @@ jobs/JGFS_ATMOS_WAFS_BLENDING_0P25 jobs/JGFS_ATMOS_WAFS_GCIP jobs/JGFS_ATMOS_WAFS_GRIB2 jobs/JGFS_ATMOS_WAFS_GRIB2_0P25 -jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_POST -jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_PREP -jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_RUN -jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_POST -jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_PREP -jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_RUN # scripts symlinks scripts/exemcsfc_global_sfc_prep.sh scripts/exgdas_atmos_gldas.sh diff --git a/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_POST b/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_POST new file mode 100755 index 0000000000..7784695445 --- /dev/null +++ b/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_POST @@ -0,0 +1,123 @@ +#!/bin/bash +##set -ex +set -x +set -u + +export PS4='$SECONDS + ' +date + + +############################# +# Source relevant config files +############################# +export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} +configs="base atmanal atmanalpost" +config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} +for config in $configs; do + . $config_path/config.$config + status=$? + [[ $status -ne 0 ]] && exit $status +done + + +########################################## +# Source machine runtime environment +########################################## +. $HOMEgfs/env/${machine}.env atmanalpost +status=$? +[[ $status -ne 0 ]] && exit $status + + +############################################## +# Obtain unique process id (pid) and make temp directory +############################################## +export pid=${pid:-$$} +export outid=${outid:-"LL$job"} + +export DATA=${DATA:-${DATAROOT}/${jobid:?}} +mkdir -p $DATA +cd $DATA || (echo "$DATA does not exist. ABORT!"; exit 1) + + +############################################## +# Run setpdy and initialize PDY variables +############################################## +export cycle="t${cyc}z" +setpdy.sh +. ./PDY + + +############################################## +# Determine Job Output Name on System +############################################## +export pgmout="OUTPUT.${pid}" +export pgmerr=errfile + + +############################################## +# Set variables used in the script +############################################## +export CDATE=${CDATE:-${PDY}${cyc}} +export CDUMP=${CDUMP:-${RUN:-"gfs"}} +export COMPONENT=${COMPONENT:-atmos} + +############################################## +# Begin JOB SPECIFIC work +############################################## + +export GDATE=$(date +%Y%m%d%H -d "${CDATE:0:8} ${CDATE:8:2} - ${assim_freq} hours") +gPDY=${GDATE:0:8} +export gcyc=${GDATE:8:2} +export GDUMP=${GDUMP:-"gdas"} + +export OPREFIX="${CDUMP}.t${cyc}z." +export GPREFIX="${GDUMP}.t${gcyc}z." +export APREFIX="${CDUMP}.t${cyc}z." +export GSUFFIX=${GSUFFIX:-$SUFFIX} +export ASUFFIX=${ASUFFIX:-$SUFFIX} + +export COMOUT=${COMOUT:-$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT} + +mkdir -p $COMOUT + +# COMIN_GES and COMIN_GES_ENS are used in script +export COMIN_GES="$ROTDIR/$GDUMP.$gPDY/$gcyc/$COMPONENT" +export COMIN_GES_ENS="$ROTDIR/enkf$GDUMP.$gPDY/$gcyc/$COMPONENT" + +# NOTE BELOW IS A HACK FOR TESTING +# PLEASE FIX THIS LATER +# ASK @aerorahul +# HOW TO HANDLE DIFFERENT COMPILERS/ETC. FOR MODEL VS DA +# PROD_UTIL, ETC. DO NOT EXIST FOR JEDI MODULE VERSIONS +module purge +module use $HOMEgfs/sorc/gdas.cd/modulefiles +module load GDAS/orion +export PYTHONPATH=$HOMEgfs/sorc/gdas.cd/ush/:$PYTHONPATH + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASPOSTPY:-$HOMEgfs/sorc/gdas.cd/scripts/exgdas_global_atmos_analysis_post.py} +$EXSCRIPT +status=$? +[[ $status -ne 0 ]] && exit $status + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [ -e "$pgmout" ] ; then + cat $pgmout +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd $DATAROOT +[[ $KEEPDATA = "NO" ]] && rm -rf $DATA + +date +exit 0 diff --git a/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_PREP b/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_PREP new file mode 100755 index 0000000000..cde886e36b --- /dev/null +++ b/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_PREP @@ -0,0 +1,123 @@ +#!/bin/bash +##set -ex +set -x +set -u + +export PS4='$SECONDS + ' +date + + +############################# +# Source relevant config files +############################# +export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} +configs="base atmanal atmanalprep" +config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} +for config in $configs; do + . $config_path/config.$config + status=$? + [[ $status -ne 0 ]] && exit $status +done + + +########################################## +# Source machine runtime environment +########################################## +. $HOMEgfs/env/${machine}.env atmanalprep +status=$? +[[ $status -ne 0 ]] && exit $status + + +############################################## +# Obtain unique process id (pid) and make temp directory +############################################## +export pid=${pid:-$$} +export outid=${outid:-"LL$job"} + +export DATA=${DATA:-${DATAROOT}/${jobid:?}} +mkdir -p $DATA +cd $DATA || (echo "$DATA does not exist. ABORT!"; exit 1) + + +############################################## +# Run setpdy and initialize PDY variables +############################################## +export cycle="t${cyc}z" +setpdy.sh +. ./PDY + + +############################################## +# Determine Job Output Name on System +############################################## +export pgmout="OUTPUT.${pid}" +export pgmerr=errfile + + +############################################## +# Set variables used in the script +############################################## +export CDATE=${CDATE:-${PDY}${cyc}} +export CDUMP=${CDUMP:-${RUN:-"gfs"}} +export COMPONENT=${COMPONENT:-atmos} + +############################################## +# Begin JOB SPECIFIC work +############################################## + +export GDATE=$(date +%Y%m%d%H -d "${CDATE:0:8} ${CDATE:8:2} - ${assim_freq} hours") +gPDY=${GDATE:0:8} +export gcyc=${GDATE:8:2} +export GDUMP=${GDUMP:-"gdas"} + +export OPREFIX="${CDUMP}.t${cyc}z." +export GPREFIX="${GDUMP}.t${gcyc}z." +export APREFIX="${CDUMP}.t${cyc}z." +export GSUFFIX=${GSUFFIX:-$SUFFIX} +export ASUFFIX=${ASUFFIX:-$SUFFIX} + +export COMOUT=${COMOUT:-$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT} + +mkdir -p $COMOUT + +# COMIN_GES and COMIN_GES_ENS are used in script +export COMIN_GES="$ROTDIR/$GDUMP.$gPDY/$gcyc/$COMPONENT" +export COMIN_GES_ENS="$ROTDIR/enkf$GDUMP.$gPDY/$gcyc/$COMPONENT" + +# NOTE BELOW IS A HACK FOR TESTING +# PLEASE FIX THIS LATER +# ASK @aerorahul +# HOW TO HANDLE DIFFERENT COMPILERS/ETC. FOR MODEL VS DA +# PROD_UTIL, ETC. DO NOT EXIST FOR JEDI MODULE VERSIONS +module purge +module use $HOMEgfs/sorc/gdas.cd/modulefiles +module load GDAS/orion +export PYTHONPATH=$HOMEgfs/sorc/gdas.cd/ush/:$PYTHONPATH + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASPREPPY:-$HOMEgfs/sorc/gdas.cd/scripts/exgdas_global_atmos_analysis_prep.py} +$EXSCRIPT +status=$? +[[ $status -ne 0 ]] && exit $status + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [ -e "$pgmout" ] ; then + cat $pgmout +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd $DATAROOT +[[ $KEEPDATA = "NO" ]] && rm -rf $DATA + +date +exit 0 diff --git a/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_RUN b/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_RUN new file mode 100755 index 0000000000..aadf4d7a42 --- /dev/null +++ b/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_RUN @@ -0,0 +1,123 @@ +#!/bin/bash +##set -ex +set -x +set -u + +export PS4='$SECONDS + ' +date + + +############################# +# Source relevant config files +############################# +export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} +configs="base atmanal atmanalrun" +config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} +for config in $configs; do + . $config_path/config.$config + status=$? + [[ $status -ne 0 ]] && exit $status +done + + +########################################## +# Source machine runtime environment +########################################## +. $HOMEgfs/env/${machine}.env atmanalrun +status=$? +[[ $status -ne 0 ]] && exit $status + + +############################################## +# Obtain unique process id (pid) and make temp directory +############################################## +export pid=${pid:-$$} +export outid=${outid:-"LL$job"} + +export DATA=${DATA:-${DATAROOT}/${jobid:?}} +mkdir -p $DATA +cd $DATA || (echo "$DATA does not exist. ABORT!"; exit 1) + + +############################################## +# Run setpdy and initialize PDY variables +############################################## +export cycle="t${cyc}z" +setpdy.sh +. ./PDY + + +############################################## +# Determine Job Output Name on System +############################################## +export pgmout="OUTPUT.${pid}" +export pgmerr=errfile + + +############################################## +# Set variables used in the script +############################################## +export CDATE=${CDATE:-${PDY}${cyc}} +export CDUMP=${CDUMP:-${RUN:-"gfs"}} +export COMPONENT=${COMPONENT:-atmos} + +############################################## +# Begin JOB SPECIFIC work +############################################## + +export GDATE=$(date +%Y%m%d%H -d "${CDATE:0:8} ${CDATE:8:2} - ${assim_freq} hours") +gPDY=${GDATE:0:8} +export gcyc=${GDATE:8:2} +export GDUMP=${GDUMP:-"gdas"} + +export OPREFIX="${CDUMP}.t${cyc}z." +export GPREFIX="${GDUMP}.t${gcyc}z." +export APREFIX="${CDUMP}.t${cyc}z." +export GSUFFIX=${GSUFFIX:-$SUFFIX} +export ASUFFIX=${ASUFFIX:-$SUFFIX} + +export COMOUT=${COMOUT:-$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT} + +mkdir -p $COMOUT + +# COMIN_GES and COMIN_GES_ENS are used in script +export COMIN_GES="$ROTDIR/$GDUMP.$gPDY/$gcyc/$COMPONENT" +export COMIN_GES_ENS="$ROTDIR/enkf$GDUMP.$gPDY/$gcyc/$COMPONENT" + +# NOTE BELOW IS A HACK FOR TESTING +# PLEASE FIX THIS LATER +# ASK @aerorahul +# HOW TO HANDLE DIFFERENT COMPILERS/ETC. FOR MODEL VS DA +# PROD_UTIL, ETC. DO NOT EXIST FOR JEDI MODULE VERSIONS +module purge +module use $HOMEgfs/sorc/gdas.cd/modulefiles +module load GDAS/orion +export PYTHONPATH=$HOMEgfs/sorc/gdas.cd/ush/:$PYTHONPATH + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASRUNSH:-$HOMEgfs/sorc/gdas.cd/scripts/exgdas_global_atmos_analysis_run.sh} +$EXSCRIPT +status=$? +[[ $status -ne 0 ]] && exit $status + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [ -e "$pgmout" ] ; then + cat $pgmout +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd $DATAROOT +[[ $KEEPDATA = "NO" ]] && rm -rf $DATA + +date +exit 0 diff --git a/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_POST b/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_POST new file mode 100755 index 0000000000..f1e85447d2 --- /dev/null +++ b/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_POST @@ -0,0 +1,126 @@ +#!/bin/bash +##set -ex +set -x +set -u + +export PS4='$SECONDS + ' +date + + +############################# +# Source relevant config files +############################# +export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} +configs="base atmensanal atmensanalpost" +config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} +for config in $configs; do + . $config_path/config.$config + status=$? + [[ $status -ne 0 ]] && exit $status +done + + +########################################## +# Source machine runtime environment +########################################## +. $HOMEgfs/env/${machine}.env atmensanalpost +status=$? +[[ $status -ne 0 ]] && exit $status + + +############################################## +# Obtain unique process id (pid) and make temp directory +############################################## +export pid=${pid:-$$} +export outid=${outid:-"LL$job"} + +export DATA=${DATA:-${DATAROOT}/${jobid:?}} +mkdir -p $DATA +cd $DATA || (echo "$DATA does not exist. ABORT!"; exit 1) + + +############################################## +# Run setpdy and initialize PDY variables +############################################## +export cycle="t${cyc}z" +setpdy.sh +. ./PDY + + +############################################## +# Determine Job Output Name on System +############################################## +export pgmout="OUTPUT.${pid}" +export pgmerr=errfile + + +############################################## +# Set variables used in the script +############################################## +export CDATE=${CDATE:-${PDY}${cyc}} +export CDUMP=${CDUMP:-${RUN:-"gfs"}} +export COMPONENT=${COMPONENT:-atmos} + +############################################## +# Begin JOB SPECIFIC work +############################################## + +export GDATE=$(date +%Y%m%d%H -d "${CDATE:0:8} ${CDATE:8:2} - ${assim_freq} hours") +gPDY=${GDATE:0:8} +export gcyc=${GDATE:8:2} +export GDUMP=${GDUMP:-"gdas"} + +export OPREFIX="${CDUMP}.t${cyc}z." +export GPREFIX="${GDUMP}.t${gcyc}z." +export APREFIX="${CDUMP}.t${cyc}z." +export GSUFFIX=${GSUFFIX:-$SUFFIX} +export ASUFFIX=${ASUFFIX:-$SUFFIX} + +export COMOUT=${COMOUT:-$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT} +export COMOUT_ENS=${COMOUT_ENS:-$ROTDIR/enkf$CDUMP.$PDY/$cyc/$COMPONENT} + +mkdir -p $COMOUT +mkdir -p $COMOUT_ENS + +# COMIN_GES and COMIN_GES_ENS are used in script +export COMIN="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" +export COMIN_GES="$ROTDIR/$GDUMP.$gPDY/$gcyc/$COMPONENT" +export COMIN_GES_ENS="$ROTDIR/enkf$GDUMP.$gPDY/$gcyc/$COMPONENT" + +# NOTE BELOW IS A HACK FOR TESTING +# PLEASE FIX THIS LATER +# ASK @aerorahul +# HOW TO HANDLE DIFFERENT COMPILERS/ETC. FOR MODEL VS DA +# PROD_UTIL, ETC. DO NOT EXIST FOR JEDI MODULE VERSIONS +module purge +module use $HOMEgfs/sorc/gdas.cd/modulefiles +module load GDAS/orion +export PYTHONPATH=$HOMEgfs/sorc/gdas.cd/ush/:$PYTHONPATH + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASPOSTPY:-$HOMEgfs/sorc/gdas.cd/scripts/exgdas_global_atmos_ensanal_post.py} +$EXSCRIPT +status=$? +[[ $status -ne 0 ]] && exit $status + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [ -e "$pgmout" ] ; then + cat $pgmout +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd $DATAROOT +[[ $KEEPDATA = "NO" ]] && rm -rf $DATA + +date +exit 0 diff --git a/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_PREP b/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_PREP new file mode 100755 index 0000000000..618984bcab --- /dev/null +++ b/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_PREP @@ -0,0 +1,126 @@ +#!/bin/bash +##set -ex +set -x +set -u + +export PS4='$SECONDS + ' +date + + +############################# +# Source relevant config files +############################# +export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} +configs="base atmensanal atmensanalprep" +config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} +for config in $configs; do + . $config_path/config.$config + status=$? + [[ $status -ne 0 ]] && exit $status +done + + +########################################## +# Source machine runtime environment +########################################## +. $HOMEgfs/env/${machine}.env atmensanalprep +status=$? +[[ $status -ne 0 ]] && exit $status + + +############################################## +# Obtain unique process id (pid) and make temp directory +############################################## +export pid=${pid:-$$} +export outid=${outid:-"LL$job"} + +export DATA=${DATA:-${DATAROOT}/${jobid:?}} +mkdir -p $DATA +cd $DATA || (echo "$DATA does not exist. ABORT!"; exit 1) + + +############################################## +# Run setpdy and initialize PDY variables +############################################## +export cycle="t${cyc}z" +setpdy.sh +. ./PDY + + +############################################## +# Determine Job Output Name on System +############################################## +export pgmout="OUTPUT.${pid}" +export pgmerr=errfile + + +############################################## +# Set variables used in the script +############################################## +export CDATE=${CDATE:-${PDY}${cyc}} +export CDUMP=${CDUMP:-${RUN:-"gfs"}} +export COMPONENT=${COMPONENT:-atmos} + +############################################## +# Begin JOB SPECIFIC work +############################################## + +export GDATE=$(date +%Y%m%d%H -d "${CDATE:0:8} ${CDATE:8:2} - ${assim_freq} hours") +gPDY=${GDATE:0:8} +export gcyc=${GDATE:8:2} +export GDUMP=${GDUMP:-"gdas"} + +export OPREFIX="${CDUMP}.t${cyc}z." +export GPREFIX="${GDUMP}.t${gcyc}z." +export APREFIX="${CDUMP}.t${cyc}z." +export GSUFFIX=${GSUFFIX:-$SUFFIX} +export ASUFFIX=${ASUFFIX:-$SUFFIX} + +export COMOUT=${COMOUT:-$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT} +export COMOUT_ENS=${COMOUT_ENS:-$ROTDIR/enkf$CDUMP.$PDY/$cyc/$COMPONENT} + +mkdir -p $COMOUT +mkdir -p $COMOUT_ENS + +# COMIN_GES and COMIN_GES_ENS are used in script +export COMIN="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" +export COMIN_GES="$ROTDIR/$GDUMP.$gPDY/$gcyc/$COMPONENT" +export COMIN_GES_ENS="$ROTDIR/enkf$GDUMP.$gPDY/$gcyc/$COMPONENT" + +# NOTE BELOW IS A HACK FOR TESTING +# PLEASE FIX THIS LATER +# ASK @aerorahul +# HOW TO HANDLE DIFFERENT COMPILERS/ETC. FOR MODEL VS DA +# PROD_UTIL, ETC. DO NOT EXIST FOR JEDI MODULE VERSIONS +module purge +module use $HOMEgfs/sorc/gdas.cd/modulefiles +module load GDAS/orion +export PYTHONPATH=$HOMEgfs/sorc/gdas.cd/ush/:$PYTHONPATH + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASPREPPY:-$HOMEgfs/sorc/gdas.cd/scripts/exgdas_global_atmos_analysis_prep.py} +$EXSCRIPT +status=$? +[[ $status -ne 0 ]] && exit $status + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [ -e "$pgmout" ] ; then + cat $pgmout +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd $DATAROOT +[[ $KEEPDATA = "NO" ]] && rm -rf $DATA + +date +exit 0 diff --git a/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_RUN b/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_RUN new file mode 100755 index 0000000000..d3b2c7f90e --- /dev/null +++ b/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_RUN @@ -0,0 +1,126 @@ +#!/bin/bash +##set -ex +set -x +set -u + +export PS4='$SECONDS + ' +date + + +############################# +# Source relevant config files +############################# +export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} +configs="base atmensanal atmensanalrun" +config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} +for config in $configs; do + . $config_path/config.$config + status=$? + [[ $status -ne 0 ]] && exit $status +done + + +########################################## +# Source machine runtime environment +########################################## +. $HOMEgfs/env/${machine}.env atmensanalrun +status=$? +[[ $status -ne 0 ]] && exit $status + + +############################################## +# Obtain unique process id (pid) and make temp directory +############################################## +export pid=${pid:-$$} +export outid=${outid:-"LL$job"} + +export DATA=${DATA:-${DATAROOT}/${jobid:?}} +mkdir -p $DATA +cd $DATA || (echo "$DATA does not exist. ABORT!"; exit 1) + + +############################################## +# Run setpdy and initialize PDY variables +############################################## +export cycle="t${cyc}z" +setpdy.sh +. ./PDY + + +############################################## +# Determine Job Output Name on System +############################################## +export pgmout="OUTPUT.${pid}" +export pgmerr=errfile + + +############################################## +# Set variables used in the script +############################################## +export CDATE=${CDATE:-${PDY}${cyc}} +export CDUMP=${CDUMP:-${RUN:-"gfs"}} +export COMPONENT=${COMPONENT:-atmos} + +############################################## +# Begin JOB SPECIFIC work +############################################## + +export GDATE=$(date +%Y%m%d%H -d "${CDATE:0:8} ${CDATE:8:2} - ${assim_freq} hours") +gPDY=${GDATE:0:8} +export gcyc=${GDATE:8:2} +export GDUMP=${GDUMP:-"gdas"} + +export OPREFIX="${CDUMP}.t${cyc}z." +export GPREFIX="${GDUMP}.t${gcyc}z." +export APREFIX="${CDUMP}.t${cyc}z." +export GSUFFIX=${GSUFFIX:-$SUFFIX} +export ASUFFIX=${ASUFFIX:-$SUFFIX} + +export COMOUT=${COMOUT:-$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT} +export COMOUT_ENS=${COMOUT_ENS:-$ROTDIR/enkf$CDUMP.$PDY/$cyc/$COMPONENT} + +mkdir -p $COMOUT +mkdir -p $COMOUT_ENS + +# COMIN_GES and COMIN_GES_ENS are used in script +export COMIN="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" +export COMIN_GES="$ROTDIR/$GDUMP.$gPDY/$gcyc/$COMPONENT" +export COMIN_GES_ENS="$ROTDIR/enkf$GDUMP.$gPDY/$gcyc/$COMPONENT" + +# NOTE BELOW IS A HACK FOR TESTING +# PLEASE FIX THIS LATER +# ASK @aerorahul +# HOW TO HANDLE DIFFERENT COMPILERS/ETC. FOR MODEL VS DA +# PROD_UTIL, ETC. DO NOT EXIST FOR JEDI MODULE VERSIONS +module purge +module use $HOMEgfs/sorc/gdas.cd/modulefiles +module load GDAS/orion +export PYTHONPATH=$HOMEgfs/sorc/gdas.cd/ush/:$PYTHONPATH + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASRUNSH:-$HOMEgfs/sorc/gdas.cd/scripts/exgdas_global_atmos_ensanal_run.sh} +$EXSCRIPT +status=$? +[[ $status -ne 0 ]] && exit $status + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [ -e "$pgmout" ] ; then + cat $pgmout +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd $DATAROOT +[[ $KEEPDATA = "NO" ]] && rm -rf $DATA + +date +exit 0 diff --git a/scripts/exgdas_global_atmos_analysis_post.py b/scripts/exgdas_global_atmos_analysis_post.py new file mode 100755 index 0000000000..2f17ee4aea --- /dev/null +++ b/scripts/exgdas_global_atmos_analysis_post.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python3 +################################################################################ +# UNIX Script Documentation Block +# . . +# Script name: exgdas_global_atmos_analysis_post.py +# Script description: Post atmospheric analysis script. +# +# Author: Cory Martin Org: NCEP/EMC Date: 2021-12-29 +# +# Abstract: This script runs after the atmospheric analysis and +# archives each diagnostic file into the R2D2 local user database. +# +# $Id$ +# +# Attributes: +# Language: Python3 +# +################################################################################ + +# import os and sys to add ush to path +import logging +import os +import sys + +# set up logger +logging.basicConfig(format='%(asctime)s:%(levelname)s:%(message)s', level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S') + +# get absolute path of ush/ directory either from env or relative to this file +my_dir = os.path.dirname(__file__) +my_home = os.path.dirname(os.path.dirname(my_dir)) +gdas_home = os.path.join(os.environ['HOMEgfs'], 'sorc', 'gdas.cd') +sys.path.append(os.path.join(os.getenv('HOMEgfs', my_home), 'ush')) +logging.info(f"sys.path={sys.path}") + +# import UFSDA utilities +import ufsda + +# get configuration based on environment variables +config = ufsda.misc_utils.get_env_config(component='atm') +config['DIAG_DIR'] = os.path.join(os.environ['COMOUT'], 'diags') +config['BIAS_OUT_DIR'] = os.path.join(os.environ['COMOUT'], 'bc') +config['provider'] = 'ncdiag' + +# use R2D2 to archive diags and bias correction coefficient files +ufsda.archive.atm_diags(config) diff --git a/scripts/exgdas_global_atmos_analysis_prep.py b/scripts/exgdas_global_atmos_analysis_prep.py new file mode 100755 index 0000000000..fc48c5d060 --- /dev/null +++ b/scripts/exgdas_global_atmos_analysis_prep.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python3 +################################################################################ +# UNIX Script Documentation Block +# . . +# Script name: exgdas_global_atmos_analysis_prep.py +# Script description: Stages files and generates YAML for Global Atmosphere Analysis +# +# Author: Cory Martin Org: NCEP/EMC Date: 2021-12-21 +# +# Abstract: This script stages necessary input files and produces YAML +# configuration input file for FV3-JEDI executable(s) needed +# to produce a UFS Global Atmospheric Analysis. +# +# $Id$ +# +# Attributes: +# Language: Python3 +# +################################################################################ + +# import os and sys to add ush to path +import logging +import os +import sys + +# set up logger +logging.basicConfig(format='%(asctime)s:%(levelname)s:%(message)s', level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S') + +# get absolute path of ush/ directory either from env or relative to this file +my_dir = os.path.dirname(__file__) +my_home = os.path.dirname(os.path.dirname(my_dir)) +gdas_home = os.path.join(os.environ['HOMEgfs'], 'sorc', 'gdas.cd') +sys.path.append(os.path.join(os.getenv('HOMEgfs', my_home), 'ush')) +logging.info(f"sys.path={sys.path}") + +# import UFSDA utilities +import ufsda + +# get configuration based on environment variables +config = ufsda.misc_utils.get_env_config(component='atm') + +# use R2D2 to stage obs and bias correction coefficient files +ufsda.stage.atm_obs(config) +ufsda.stage.bias_obs(config) diff --git a/scripts/exgdas_global_atmos_analysis_run.sh b/scripts/exgdas_global_atmos_analysis_run.sh new file mode 100755 index 0000000000..b5aaf24cbc --- /dev/null +++ b/scripts/exgdas_global_atmos_analysis_run.sh @@ -0,0 +1,181 @@ +#!/bin/bash +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: exgdas_global_atmos_analysis_run.sh +# Script description: Runs the global atmospheric analysis with FV3-JEDI +# +# Author: Cory Martin Org: NCEP/EMC Date: 2021-12-28 +# +# Abstract: This script makes a global model atmospheric analysis using FV3-JEDI +# and also (for now) updates increment files using a python ush utility +# +# $Id$ +# +# Attributes: +# Language: POSIX shell +# Machine: Orion +# +################################################################################ + +# Set environment. +export VERBOSE=${VERBOSE:-"YES"} +if [ $VERBOSE = "YES" ]; then + echo $(date) EXECUTING $0 $* >&2 + set -x +fi + +# Directories +pwd=$(pwd) + +# Utilities +export NLN=${NLN:-"/bin/ln -sf"} +export INCPY=${INCPY:-"$HOMEgfs/sorc/gdas.cd/ush/jediinc2fv3.py"} +export GENYAML=${GENYAML:-"$HOMEgfs/sorc/gdas.cd/ush/genYAML"} +export GETOBSYAML=${GETOBSYAML:-"$HOMEgfs/sorc/gdas.cd/ush/get_obs_list.py"} + +################################################################################ +# make subdirectories +mkdir -p $DATA/fv3jedi +mkdir -p $DATA/obs +mkdir -p $DATA/diags +mkdir -p $DATA/bc +mkdir -p $DATA/anl + +################################################################################ +# generate YAML file +cat > $DATA/temp.yaml << EOF +template: ${ATMVARYAML} +output: $DATA/fv3jedi_var.yaml +config: + atm: true + BERROR_YAML: $BERROR_YAML + OBS_DIR: obs + DIAG_DIR: diags + CRTM_COEFF_DIR: crtm + BIAS_IN_DIR: obs + BIAS_OUT_DIR: bc + OBS_PREFIX: $OPREFIX + BIAS_PREFIX: $GPREFIX + OBS_LIST: $OBS_LIST + OBS_YAML_DIR: $OBS_YAML_DIR + BKG_DIR: bkg + fv3jedi_staticb_dir: berror + fv3jedi_fix_dir: fv3jedi + fv3jedi_fieldset_dir: fv3jedi + fv3jedi_fieldmetadata_dir: fv3jedi + OBS_DATE: '$CDATE' + BIAS_DATE: '$GDATE' + ANL_DIR: anl/ + NMEM_ENKF: '$NMEM_ENKF' + INTERP_METHOD: '$INTERP_METHOD' +EOF +$GENYAML --config $DATA/temp.yaml + +################################################################################ +# link observations to $DATA +$GETOBSYAML --config $DATA/fv3jedi_var.yaml --output $DATA/${OPREFIX}obsspace_list +files=$(cat $DATA/${OPREFIX}obsspace_list) +for file in $files; do + basefile=$(basename $file) + $NLN $COMOUT/$basefile $DATA/obs/$basefile +done + +# link backgrounds to $DATA +# linking FMS RESTART files for now +# change to (or make optional) for cube sphere history later +$NLN ${COMIN_GES}/RESTART $DATA/bkg + + +# optionally link ensemble backgrounds to $DATA +if [ $DOHYBVAR = "YES" ]; then + mkdir -p $DATA/ens + fhrs="06" + if [ $l4densvar = ".true." ]; then + fhrs="03 04 05 06 07 08 09" + fi + + for imem in $(seq 1 $NMEM_ENKF); do + memchar="mem"$(printf %03i $imem) + for fhr in $fhrs; do + $NLN ${COMIN_GES_ENS}/$memchar/RESTART $DATA/ens/$memchar + done + done + +fi + +################################################################################ +# link fix files to $DATA +# static B +CASE_BERROR=${CASE_BERROR:-${CASE_ANL:-$CASE}} +$NLN $FV3JEDI_FIX/bump/$CASE_BERROR/ $DATA/berror + +# vertical coordinate +LAYERS=$(expr $LEVS - 1) +$NLN $FV3JEDI_FIX/fv3jedi/fv3files/akbk${LAYERS}.nc4 $DATA/fv3jedi/akbk.nc4 + +# other FV3-JEDI fix files +$NLN $FV3JEDI_FIX/fv3jedi/fv3files/fmsmpp.nml $DATA/fv3jedi/fmsmpp.nml +$NLN $FV3JEDI_FIX/fv3jedi/fv3files/field_table_gfdl $DATA/fv3jedi/field_table + +# fieldmetadata +$NLN $FV3JEDI_FIX/fv3jedi/fieldmetadata/gfs-restart.yaml $DATA/fv3jedi/gfs-restart.yaml + +# fieldsets +fieldsets="dynamics.yaml ufo.yaml" +for fieldset in $fieldsets; do + $NLN $FV3JEDI_FIX/fv3jedi/fieldsets/$fieldset $DATA/fv3jedi/$fieldset +done + +# CRTM coeffs +$NLN $FV3JEDI_FIX/crtm/2.3.0_jedi $DATA/crtm + +# Link executable to $DATA +$NLN $JEDIVAREXE $DATA/fv3jedi_var.x + +################################################################################ +# run executable +export pgm=$JEDIVAREXE +. prep_step +$APRUN_ATMANAL $DATA/fv3jedi_var.x $DATA/fv3jedi_var.yaml 1>&1 2>&2 +export err=$?; err_chk + +################################################################################ +# translate FV3-JEDI increment to FV3 readable format +atminc_jedi=$DATA/anl/atminc.${PDY}_${cyc}0000z.nc4 +atminc_fv3=$COMOUT/${CDUMP}.${cycle}.atminc.nc +if [ -s $atminc_jedi ]; then + $INCPY $atminc_jedi $atminc_fv3 + export err=$? +else + echo "***WARNING*** missing $atminc_jedi ABORT" + export err=99 +fi +err_chk + +################################################################################ +# Create log file noting creating of analysis increment file +echo "$CDUMP $CDATE atminc and tiled sfcanl done at `date`" > $COMOUT/${CDUMP}.${cycle}.loginc.txt + +################################################################################ +# Copy diags and YAML to $COMOUT +cp -r $DATA/fv3jedi_var.yaml $COMOUT/${CDUMP}.${cycle}.fv3jedi_var.yaml +cp -rf $DATA/diags $COMOUT/ +cp -rf $DATA/bc $COMOUT/ + +# ***WARNING*** PATCH +# Copy abias, abias_pc, and abias_air from previous cycle to current cycle +# Deterministic abias used in enkf cycle +alist="abias abias_air abias_int abias_pc" +for abias in $alist; do + cp $COMIN_GES/${GPREFIX}${abias} $COMOUT/${APREFIX}${abias} +done + +################################################################################ +set +x +if [ $VERBOSE = "YES" ]; then + echo $(date) EXITING $0 with return code $err >&2 +fi +exit $err + +################################################################################ diff --git a/scripts/exgdas_global_atmos_ensanal_post.py b/scripts/exgdas_global_atmos_ensanal_post.py new file mode 100755 index 0000000000..6c5384953f --- /dev/null +++ b/scripts/exgdas_global_atmos_ensanal_post.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python3 +################################################################################ +# UNIX Script Documentation Block +# . . +# Script name: exgdas_global_atmos_analysis_post.py +# Script description: Post atmospheric analysis script. +# +# Author: Cory Martin Org: NCEP/EMC Date: 2021-12-29 +# +# Abstract: This script runs after the atmospheric analysis and +# archives each diagnostic file into the R2D2 local user database. +# +# $Id$ +# +# Attributes: +# Language: Python3 +# +################################################################################ + +# import os and sys to add ush to path +import logging +import os +import sys + +# set up logger +logging.basicConfig(format='%(asctime)s:%(levelname)s:%(message)s', level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S') + +# get absolute path of ush/ directory either from env or relative to this file +my_dir = os.path.dirname(__file__) +my_home = os.path.dirname(os.path.dirname(my_dir)) +gdas_home = os.path.join(os.environ['HOMEgfs'], 'sorc', 'gdas.cd') +sys.path.append(os.path.join(os.getenv('HOMEgfs', my_home), 'ush')) +logging.info(f"sys.path={sys.path}") + +# import UFSDA utilities +import ufsda + +# get configuration based on environment variables +config = ufsda.misc_utils.get_env_config(component='atm') +config['DIAG_DIR'] = os.path.join(os.environ['COMOUT_ENS'], 'diags') +config['provider'] = 'ncdiag_lgetkf' + +# use R2D2 to archive hofx files +ufsda.archive.atm_diags(config) diff --git a/scripts/exgdas_global_atmos_ensanal_run.sh b/scripts/exgdas_global_atmos_ensanal_run.sh new file mode 100755 index 0000000000..27f3339b58 --- /dev/null +++ b/scripts/exgdas_global_atmos_ensanal_run.sh @@ -0,0 +1,172 @@ +#!/bin/bash +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: exgdas_global_atmos_analysis_run.sh +# Script description: Runs the global atmospheric analysis with FV3-JEDI +# +# Author: Cory Martin Org: NCEP/EMC Date: 2021-12-28 +# +# Abstract: This script makes a global model atmospheric analysis using FV3-JEDI +# and also (for now) updates increment files using a python ush utility +# +# $Id$ +# +# Attributes: +# Language: POSIX shell +# Machine: Orion +# +################################################################################ + +# Set environment. +export VERBOSE=${VERBOSE:-"YES"} +if [ $VERBOSE = "YES" ]; then + echo $(date) EXECUTING $0 $* >&2 + set -x +fi + +# Directories +pwd=$(pwd) + +# Utilities +export NLN=${NLN:-"/bin/ln -sf"} +export INCPY=${INCPY:-"$HOMEgfs/sorc/gdas.cd/ush/jediinc2fv3.py"} +export GENYAML=${GENYAML:-"$HOMEgfs/sorc/gdas.cd/ush/genYAML"} +export GETOBSYAML=${GETOBSYAML:-"$HOMEgfs/sorc/gdas.cd/ush/get_obs_list.py"} + +################################################################################ +# make subdirectories +mkdir -p $DATA/fv3jedi +mkdir -p $DATA/obs +mkdir -p $DATA/diags +mkdir -p $DATA/bc +mkdir -p $DATA/anl + +################################################################################ +# generate YAML file +cat > $DATA/temp.yaml << EOF +template: ${ATMENSYAML} +output: $DATA/fv3jedi_ens.yaml +config: + atm: true + BERROR_YAML: $BERROR_YAML + OBS_DIR: obs + DIAG_DIR: diags + CRTM_COEFF_DIR: crtm + BIAS_IN_DIR: obs + BIAS_OUT_DIR: bc + OBS_PREFIX: $OPREFIX + BIAS_PREFIX: $GPREFIX + OBS_LIST: $OBS_LIST + OBS_YAML_DIR: $OBS_YAML_DIR + BKG_DIR: bkg + fv3jedi_staticb_dir: berror + fv3jedi_fix_dir: fv3jedi + fv3jedi_fieldset_dir: fv3jedi + fv3jedi_fieldmetadata_dir: fv3jedi + OBS_DATE: '$CDATE' + BIAS_DATE: '$GDATE' + ANL_DIR: anl/ + NMEM_ENKF: '$NMEM_ENKF' + INTERP_METHOD: '$INTERP_METHOD' +EOF +$GENYAML --config $DATA/temp.yaml + +################################################################################ +# link observations to $DATA +$GETOBSYAML --config $DATA/fv3jedi_ens.yaml --output $DATA/${OPREFIX}obsspace_list +files=$(cat $DATA/${OPREFIX}obsspace_list) +for file in $files; do + basefile=$(basename $file) + $NLN $COMIN/$basefile $DATA/obs/$basefile +done + +# link backgrounds to $DATA +# linking FMS RESTART files for now +# change to (or make optional) for cube sphere history later +##$NLN ${COMIN_GES}/RESTART $DATA/bkg + + +# Link ensemble backgrounds to $DATA. Make directories +# for ensemble output +if [ $DOHYBVAR = "YES" -o $DO_JEDIENS = "YES" ]; then + mkdir -p $DATA/bkg + for imem in $(seq 1 $NMEM_ENKF); do + memchar="mem"$(printf %03i $imem) + mkdir -p $DATA/bkg/$memchar + $NLN ${COMIN_GES_ENS}/$memchar/RESTART $DATA/bkg/$memchar + mkdir -p $DATA/anl/$memchar + done +fi + +################################################################################ +# link fix files to $DATA +# static B +##CASE_BERROR=${CASE_BERROR:-${CASE_ANL:-$CASE}} +##$NLN $FV3JEDI_FIX/bump/$CASE_BERROR/ $DATA/berror + +# vertical coordinate +LAYERS=$(expr $LEVS - 1) +$NLN $FV3JEDI_FIX/fv3jedi/fv3files/akbk${LAYERS}.nc4 $DATA/fv3jedi/akbk.nc4 + +# other FV3-JEDI fix files +$NLN $FV3JEDI_FIX/fv3jedi/fv3files/fmsmpp.nml $DATA/fv3jedi/fmsmpp.nml +$NLN $FV3JEDI_FIX/fv3jedi/fv3files/field_table_gfdl $DATA/fv3jedi/field_table + +# fieldmetadata +$NLN $FV3JEDI_FIX/fv3jedi/fieldmetadata/gfs-restart.yaml $DATA/fv3jedi/gfs-restart.yaml + +# fieldsets +fieldsets="dynamics.yaml ufo.yaml" +for fieldset in $fieldsets; do + $NLN $FV3JEDI_FIX/fv3jedi/fieldsets/$fieldset $DATA/fv3jedi/$fieldset +done + +# CRTM coeffs +$NLN $FV3JEDI_FIX/crtm/2.3.0_jedi $DATA/crtm + +# Link executable to $DATA +$NLN $JEDIENSEXE $DATA/fv3jedi_ens.x + +################################################################################ +# run executable +export pgm=$JEDIVAREXE +. prep_step +$APRUN_ATMENSANAL $DATA/fv3jedi_ens.x $DATA/fv3jedi_ens.yaml 1>&1 2>&2 +export err=$?; err_chk + +################################################################################ +# translate FV3-JEDI increment to FV3 readable format +for imem in $(seq 1 $NMEM_ENKF); do + memchar="mem"$(printf %03i $imem) + atminc_jedi=$DATA/anl/$memchar/atminc.${PDY}_${cyc}0000z.nc4 + atminc_fv3=$COMOUT_ENS/$memchar/${CDUMP}.${cycle}.atminc.nc + mkdir -p $COMOUT_ENS/$memchar + if [ -s $atminc_jedi ]; then + $INCPY $atminc_jedi $atminc_fv3 + export err=$? + else + echo "***WARNING*** missing $atminc_jedi ABORT" + export err=99 + fi + err_chk +done + +################################################################################ +# Create log file noting creating of analysis increment file +echo "$CDUMP $CDATE atminc done at `date`" > $COMOUT_ENS/${CDUMP}.${cycle}.loginc.txt + +################################################################################ +# Copy diags and YAML to $COMOUT +cp -r $DATA/fv3jedi_ens.yaml $COMOUT_ENS/${CDUMP}.${cycle}.fv3jedi_ens.yaml +cp -rf $DATA/diags $COMOUT_ENS/ + + +################################################################################ +set +x +if [ $VERBOSE = "YES" ]; then + echo $(date) EXITING $0 with return code $err >&2 +fi +exit $err + +################################################################################ diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index df44202afe..8eb4585922 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -160,13 +160,6 @@ fi #--add GDASApp files #------------------------------ if [ -d ../sorc/gdas.cd ]; then - cd ${pwd}/../jobs ||exit 8 - $LINK ../sorc/gdas.cd/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_PREP . - $LINK ../sorc/gdas.cd/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_RUN . - $LINK ../sorc/gdas.cd/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_POST . - $LINK ../sorc/gdas.cd/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_PREP . - $LINK ../sorc/gdas.cd/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_RUN . - $LINK ../sorc/gdas.cd/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_POST . cd ${pwd}/../ush ||exit 8 $LINK ../sorc/gdas.cd/ush/ufsda . fi From e4b01b99f50c674635477ff3e2e962b9d5ed54aa Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Thu, 28 Jul 2022 18:28:28 -0400 Subject: [PATCH 09/16] Remove Cray, Dell, WCOSS1 from module-setup.sh.inc (#943) This file was missed in the initial cleanup. --- modulefiles/module-setup.sh.inc | 33 ++------------------------------- 1 file changed, 2 insertions(+), 31 deletions(-) diff --git a/modulefiles/module-setup.sh.inc b/modulefiles/module-setup.sh.inc index 75c7249ea4..b55643719c 100644 --- a/modulefiles/module-setup.sh.inc +++ b/modulefiles/module-setup.sh.inc @@ -34,35 +34,6 @@ elif [[ -d /work ]] ; then source /apps/lmod/lmod/init/$__ms_shell fi module purge -elif [[ -d /gpfs/hps && -e /etc/SuSE-release ]] ; then - # We are on NOAA Luna or Surge - if ( ! eval module help > /dev/null 2>&1 ) ; then - source /opt/modules/default/init/$__ms_shell - fi - module purge - module purge - # Workaround until module issues are fixed: - unset _LMFILES_ - unset LOADEDMODULES - module use /opt/modulefiles - module use /opt/cray/ari/modulefiles - module use /opt/cray/craype/default/alt-modulefiles - module use /opt/cray/alt-modulefiles - module use /gpfs/hps/nco/ops/nwprod/modulefiles - module use /gpfs/hps/nco/ops/nwprod/lib/modulefiles - module use /usrx/local/prod/modulefiles -elif [[ -d /dcom && -d /hwrf ]] ; then - # We are on NOAA Tide or Gyre - if ( ! eval module help > /dev/null 2>&1 ) ; then - source /usrx/local/Modules/default/init/$__ms_shell - fi - module purge -elif [[ -L /usrx && "$( readlink /usrx 2> /dev/null )" =~ dell ]] ; then - # We are on NOAA Mars or Venus - if ( ! eval module help > /dev/null 2>&1 ) ; then - source /usrx/local/prod/lmod/lmod/init/$__ms_shell - fi - module purge elif [[ -d /glade ]] ; then # We are on NCAR Yellowstone if ( ! eval module help > /dev/null 2>&1 ) ; then @@ -70,7 +41,7 @@ elif [[ -d /glade ]] ; then fi module purge elif [[ -d /lustre && -d /ncrc ]] ; then - # We are on GAEA. + # We are on GAEA. if ( ! eval module help > /dev/null 2>&1 ) ; then # We cannot simply load the module command. The GAEA # /etc/profile modifies a number of module-related variables @@ -83,7 +54,7 @@ elif [[ -d /lustre && -d /ncrc ]] ; then __ms_source_etc_profile=no fi module purge - # clean up after purge + # clean up after purge unset _LMFILES_ unset _LMFILES_000 unset _LMFILES_001 From 949513642d33cb3976d0f8e7dd273aedec505a17 Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Thu, 28 Jul 2022 18:36:58 -0400 Subject: [PATCH 10/16] minimal intervention to create a data-atmosphere xml (#936) --- parm/config/config.base.emc.dyn | 7 ++++- parm/config/config.coupled_ic | 1 + workflow/applications.py | 30 ++++++++++++------- workflow/rocoto/workflow_tasks.py | 49 ++++++++++++++++++++++--------- workflow/setup_expt.py | 2 +- 5 files changed, 63 insertions(+), 26 deletions(-) diff --git a/parm/config/config.base.emc.dyn b/parm/config/config.base.emc.dyn index 722818b22a..e26eb35404 100755 --- a/parm/config/config.base.emc.dyn +++ b/parm/config/config.base.emc.dyn @@ -152,6 +152,7 @@ export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} export APP=@APP@ # Defaults: +export DO_ATM="YES" export DO_COUPLED="NO" export DO_WAVE="NO" export DO_OCN="NO" @@ -164,7 +165,6 @@ export cplwav2atm=".false." case "${APP}" in ATM) - echo "APP=ATM; will use defaults" export confignamevarfornems="atm" ;; ATMA) @@ -177,6 +177,11 @@ case "${APP}" in export WAVE_CDUMP="both" export confignamevarfornems="leapfrog_atm_wav" ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; S2S*) export DO_COUPLED="YES" export DO_OCN="YES" diff --git a/parm/config/config.coupled_ic b/parm/config/config.coupled_ic index 7b4b6f51aa..15f15efa0c 100755 --- a/parm/config/config.coupled_ic +++ b/parm/config/config.coupled_ic @@ -17,5 +17,6 @@ export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c export CPL_ICEIC=CPC export CPL_OCNIC=CPC3Dvar export CPL_WAVIC=GEFSwave20210528v2 +export CPL_DATM=CDEPS_DATM echo "END: config.coupled_ic" diff --git a/workflow/applications.py b/workflow/applications.py index 8256cd9699..1766c4071f 100644 --- a/workflow/applications.py +++ b/workflow/applications.py @@ -93,6 +93,7 @@ def __init__(self, configuration: Configuration) -> None: self.model_app = _base.get('APP', 'ATM') self.do_hybvar = _base.get('DOHYBVAR', False) + self.do_atm = _base.get('DO_ATM', True) self.do_wave = _base.get('DO_WAVE', False) self.do_wave_bnd = _base.get('DOBNDPNT_WAVE', False) self.do_ocean = _base.get('DO_OCN', False) @@ -181,12 +182,12 @@ def _cycled_configs(self): configs += ['sfcanl', 'analcalc', 'fcst', 'post', 'vrfy', 'arch'] - + if self.do_gldas: configs += ['gldas'] if self.do_hybvar: - if self.do_jediens: + if self.do_jediens: configs += ['atmensanalprep', 'atmensanalrun', 'atmensanalpost'] else: configs += ['eobs', 'eomg', 'ediag', 'eupd'] @@ -224,9 +225,14 @@ def _forecast_only_configs(self): Returns the config_files that are involved in the forecast-only app """ - configs = ['fcst', 'post', 'vrfy', 'arch'] + configs = ['fcst'] + + if self.do_atm: + configs += ['post', 'vrfy'] + + configs += ['arch'] - if self.model_app in ['S2S', 'S2SW', 'S2SWA']: + if self.model_app in ['S2S', 'S2SW', 'S2SWA', 'NG-GODAS']: configs += ['coupled_ic'] else: configs += ['init'] @@ -239,7 +245,7 @@ def _forecast_only_configs(self): if self.do_ocean or self.do_ice: configs += ['ocnpost'] - if self.do_metp: + if self.do_atm and self.do_metp: configs += ['metp'] if self.do_gempak: @@ -436,7 +442,7 @@ def _get_forecast_only_task_names(self): tasks = [] - if 'S2S' in self.model_app: + if self.model_app in ['S2S', 'S2SW', 'S2SWA', 'NG-GODAS']: tasks += ['coupled_ic'] else: if self.do_hpssarch: @@ -452,12 +458,16 @@ def _get_forecast_only_task_names(self): tasks += ['fcst'] - tasks += ['post'] - if 'S2S' in self.model_app: + if self.do_atm: + tasks += ['post'] + + if self.model_app in ['S2S', 'S2SW', 'S2SWA', 'NG-GODAS']: tasks += ['ocnpost'] - tasks += ['vrfy'] - if self.do_metp: + if self.do_atm: + tasks += ['vrfy'] + + if self.do_atm and self.do_metp: tasks += ['metp'] if self.do_wave: diff --git a/workflow/rocoto/workflow_tasks.py b/workflow/rocoto/workflow_tasks.py index 9154ac37f7..22d4ee2c14 100644 --- a/workflow/rocoto/workflow_tasks.py +++ b/workflow/rocoto/workflow_tasks.py @@ -140,13 +140,20 @@ def coupled_ic(self): deps = [] # Atm ICs - atm_res = self._base.get('CASE', 'C384') - prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_ATMIC']}/@Y@m@d@H/{self.cdump}" - for file in ['gfs_ctrl.nc'] + \ - [f'{datatype}_data.tile{tile}.nc' - for datatype in ['gfs', 'sfc'] - for tile in range(1, self.n_tiles + 1)]: - data = f"{prefix}/{atm_res}/INPUT/{file}" + if self.app_config.do_atm: + atm_res = self._base.get('CASE', 'C384') + prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_ATMIC']}/@Y@m@d@H/{self.cdump}" + for file in ['gfs_ctrl.nc'] + \ + [f'{datatype}_data.tile{tile}.nc' + for datatype in ['gfs', 'sfc'] + for tile in range(1, self.n_tiles + 1)]: + data = f"{prefix}/{atm_res}/INPUT/{file}" + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + else: # data-atmosphere + # TODO - need more information about how these forcings are stored + prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_DATM']}/@Y@m@d@H" + data = f"{prefix}/gefs.@Y@m.nc" dep_dict = {'type': 'data', 'data': data} deps.append(rocoto.add_dependency(dep_dict)) @@ -474,14 +481,28 @@ def fcst(self): @property def _fcst_forecast_only(self): dependencies = [] + deps = [] - data = f'&ROTDIR;/{self.cdump}.@Y@m@d/@H/atmos/INPUT/sfc_data.tile6.nc' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{self.cdump}.@Y@m@d/@H/atmos/RESTART/@Y@m@d.@H0000.sfcanl_data.tile6.nc' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies.append(rocoto.create_dependency(dep_condition='or', dep=deps)) + if self.app_config.do_atm: + data = f'&ROTDIR;/{self.cdump}.@Y@m@d/@H/atmos/INPUT/sfc_data.tile6.nc' + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + data = f'&ROTDIR;/{self.cdump}.@Y@m@d/@H/atmos/RESTART/@Y@m@d.@H0000.sfcanl_data.tile6.nc' + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies.append(rocoto.create_dependency(dep_condition='or', dep=deps)) + + else: # data-atmosphere + data = f'&ICSDIR;/@Y@m@d@H/datm/gefs.@Y@m.nc' # GEFS forcing + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + data = '&ICSDIR;/@Y@m@d@H/ocn/MOM.res.nc' # TODO - replace with actual ocean IC + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + data = '&ICSDIR;/@Y@m@d@H/ice/cice5_model.res.nc' # TODO - replace with actual ice IC + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies.append(rocoto.create_dependency(dep_condition='and', dep=deps)) if self.app_config.do_wave and self.cdump in self.app_config.wave_cdumps: wave_job = 'waveprep' if self.app_config.model_app in ['ATMW'] else 'waveinit' diff --git a/workflow/setup_expt.py b/workflow/setup_expt.py index 000f939cea..566e936d24 100755 --- a/workflow/setup_expt.py +++ b/workflow/setup_expt.py @@ -232,7 +232,7 @@ def input_args(): # forecast only mode additional arguments forecasts.add_argument('--app', help='UFS application', type=str, choices=[ - 'ATM', 'ATMA', 'ATMW', 'S2S', 'S2SW', 'S2SWA'], required=False, default='ATM') + 'ATM', 'ATMA', 'ATMW', 'S2S', 'S2SW', 'S2SWA', 'NG-GODAS'], required=False, default='ATM') args = parser.parse_args() From e480093446797e556bc1371f9f80610a7c9a6d4b Mon Sep 17 00:00:00 2001 From: Walter Kolczynski - NOAA Date: Fri, 29 Jul 2022 13:42:02 -0400 Subject: [PATCH 11/16] Add preamble, convert to bash, and remove env (#929) This is the first in a wave of commits to improve and standardize the scripts within global workflow. In this commit, all scripts run during execution are converted to bash and a preamble is added to every script that is not sourced by another script. Every script executed during a forecast cycle is converted to bash. This was mostly straightforward, though there were a couple Korne-shell conventions (primarily using `typeset` to format strings) that had to be replaced with bash-compatable alternatives like `printf`. This in turn required a few modification to prevent zero-padded numbers from being treated as octals (other may have been pre-existing bugs). The preamble contains a number of feature to standardize code and improve script flow and debugging. - First, it uses two variables, `$STRICT` and `$TRACE` to control the behavior of `set`. When `$STRICT` is `"YES"`, error on undefined variables (`set -u`) and exit on non-zero return (`set -e`) are turned on. When `$TRACE` is `"YES"`, command trace (`set -x`) is turned on and a useful string is set to `$PS4` that gives the name and line number of the script. Both `$STRICT` and `$TRACE` default to `"YES"`. They also set up commands, `$ERR_EXIT_ON` and `$TRACE_ON`, that will restore the setting of each in the event a script needs to temporarily turn them off. - Second, the preamble sets up primative timing of scripts using Posix `date`. - Third, it echos the script is beginning and at what time. - Finally, it also establishes a postamble function and sets it to run as a trap of EXIT. The postamble will use the end time to calculate the run time of the script, then print that the script has ended at what time, how long has elapsed, and the exit code. By setting this up as a trap instead of just calling it at the end of the script, it ensures the postamble is called even if the script exits early because there is an error. - In response to this standardization, parts of scripts that performed these preamble functions (announcing start/end, `set -x`, etc) have been deleted. For some scripts where temporarily turning off `-x` or `-e` is needed, they now use `$ERR_EXIT_ON` and `$TRACE_ON` to return to the correct state afterwards, instead of blindly turning the setting back on. - Additionally, some modifications were needed to comply with `set -eu`. Mostly taking care of undefined variables, but also a couple instances where a non-zero return code had to be dealt with. If users wish to use their own preamble script instead, the default script can be overridden by setting `$PREAMBLE_SCRIPT` before the run begins. Instance where scripts would print the full list of environment variables have been removed. These can be spot added back in to debug as necessary. Alternatively, a future PR will add them back in in a standardized way. `rstprod.sh` is added to the link list from gsi_monitor.fd, as it is needed for the radmon scripts. The placeholders for AWIPS and GEMPAK in the Hera and Orion environment scripts were replaced with the correct definitions. There were also other modifications to AWIPS and GEMPAK scripts to get it working for development (AWIPS still isn't and will be fixed in the future). GSI scripts that were brought in recently had all of their backticks replaced with `$( )` as was done with all other script previously. Refs: #397 --- .gitignore | 1 + env/HERA.env | 14 +- env/ORION.env | 14 +- jobs/JGDAS_ATMOS_ANALYSIS_DIAG | 19 +-- jobs/JGDAS_ATMOS_CHGRES_FORENKF | 19 +-- jobs/JGDAS_ATMOS_GEMPAK | 13 +- jobs/JGDAS_ATMOS_GEMPAK_META_NCDC | 11 +- jobs/JGDAS_EFSOI | 19 +-- jobs/JGDAS_EFSOI_ECEN | 19 +-- jobs/JGDAS_EFSOI_FCST | 20 +-- jobs/JGDAS_EFSOI_POST | 20 +-- jobs/JGDAS_EFSOI_SFC | 20 +-- jobs/JGDAS_EFSOI_UPDATE | 19 +-- jobs/JGDAS_ENKF_DIAG | 18 +- jobs/JGDAS_ENKF_ECEN | 18 +- jobs/JGDAS_ENKF_FCST | 18 +- jobs/JGDAS_ENKF_POST | 18 +- jobs/JGDAS_ENKF_SELECT_OBS | 18 +- jobs/JGDAS_ENKF_SFC | 18 +- jobs/JGDAS_ENKF_UPDATE | 17 +- jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_POST | 8 +- jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_PREP | 8 +- jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_RUN | 8 +- jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_POST | 8 +- jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_PREP | 8 +- jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_RUN | 8 +- jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG | 12 +- jobs/JGFS_ATMOS_AWIPS_G2 | 12 +- jobs/JGFS_ATMOS_CYCLONE_GENESIS | 21 +-- jobs/JGFS_ATMOS_CYCLONE_TRACKER | 19 +-- jobs/JGFS_ATMOS_FBWIND | 11 +- jobs/JGFS_ATMOS_FSU_GENESIS | 17 +- jobs/JGFS_ATMOS_GEMPAK | 38 +++-- jobs/JGFS_ATMOS_GEMPAK_META | 14 +- jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF | 14 +- jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC | 16 +- jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS | 11 +- jobs/JGFS_ATMOS_POSTSND | 13 +- jobs/JGLOBAL_ATMOS_ANALYSIS | 18 +- jobs/JGLOBAL_ATMOS_ANALYSIS_CALC | 18 +- jobs/JGLOBAL_ATMOS_EMCSFC_SFC_PREP | 15 +- jobs/JGLOBAL_ATMOS_NCEPPOST | 21 +-- jobs/JGLOBAL_ATMOS_POST_MANAGER | 8 +- jobs/JGLOBAL_ATMOS_SFCANL | 20 +-- jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC | 19 +-- jobs/JGLOBAL_FORECAST | 17 +- jobs/JGLOBAL_WAVE_GEMPAK | 13 +- jobs/JGLOBAL_WAVE_INIT | 9 +- jobs/JGLOBAL_WAVE_POST_BNDPNT | 18 +- jobs/JGLOBAL_WAVE_POST_BNDPNTBLL | 16 +- jobs/JGLOBAL_WAVE_POST_PNT | 22 +-- jobs/JGLOBAL_WAVE_POST_SBS | 26 ++- jobs/JGLOBAL_WAVE_PRDGEN_BULLS | 10 +- jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED | 25 +-- jobs/JGLOBAL_WAVE_PREP | 9 +- jobs/rocoto/aerosol_init.sh | 6 +- jobs/rocoto/anal.sh | 8 +- jobs/rocoto/analcalc.sh | 6 +- jobs/rocoto/analdiag.sh | 6 +- jobs/rocoto/arch.sh | 40 +++-- jobs/rocoto/awips.sh | 8 +- jobs/rocoto/coupled_ic.sh | 14 +- jobs/rocoto/earc.sh | 17 +- jobs/rocoto/ecen.sh | 6 +- jobs/rocoto/echgres.sh | 8 +- jobs/rocoto/ediag.sh | 6 +- jobs/rocoto/efcs.sh | 5 +- jobs/rocoto/eobs.sh | 6 +- jobs/rocoto/eomg.sh | 6 +- jobs/rocoto/epos.sh | 6 +- jobs/rocoto/esfc.sh | 6 +- jobs/rocoto/eupd.sh | 6 +- jobs/rocoto/fcst.sh | 6 +- jobs/rocoto/gempak.sh | 6 +- jobs/rocoto/getic.sh | 6 +- jobs/rocoto/gldas.sh | 5 +- jobs/rocoto/init.sh | 8 +- jobs/rocoto/metp.sh | 6 +- jobs/rocoto/ocnpost.sh | 7 +- jobs/rocoto/post.sh | 29 +--- jobs/rocoto/postsnd.sh | 6 +- jobs/rocoto/prep.sh | 6 +- jobs/rocoto/sfcanl.sh | 6 +- jobs/rocoto/vrfy.sh | 6 +- jobs/rocoto/wafs.sh | 6 +- jobs/rocoto/wafsblending.sh | 9 +- jobs/rocoto/wafsblending0p25.sh | 9 +- jobs/rocoto/wafsgcip.sh | 9 +- jobs/rocoto/wafsgrib2.sh | 9 +- jobs/rocoto/wafsgrib20p25.sh | 9 +- jobs/rocoto/waveawipsbulls.sh | 7 +- jobs/rocoto/waveawipsgridded.sh | 6 +- jobs/rocoto/wavegempak.sh | 6 +- jobs/rocoto/waveinit.sh | 7 +- jobs/rocoto/wavepostbndpnt.sh | 7 +- jobs/rocoto/wavepostbndpntbll.sh | 7 +- jobs/rocoto/wavepostpnt.sh | 7 +- jobs/rocoto/wavepostsbs.sh | 7 +- jobs/rocoto/waveprep.sh | 7 +- parm/config/config.aero | 2 +- parm/config/config.aerosol_init | 2 +- parm/config/config.anal | 4 +- parm/config/config.analcalc | 2 +- parm/config/config.analdiag | 2 +- parm/config/config.arch | 2 +- parm/config/config.awips | 2 +- parm/config/config.base.emc.dyn | 2 +- parm/config/config.base.nco.static | 2 +- parm/config/config.coupled_ic | 2 +- parm/config/config.defaults.s2sw | 2 +- parm/config/config.earc | 2 +- parm/config/config.ecen | 2 +- parm/config/config.echgres | 2 +- parm/config/config.ediag | 2 +- parm/config/config.efcs | 2 +- parm/config/config.eobs | 2 +- parm/config/config.epos | 2 +- parm/config/config.esfc | 2 +- parm/config/config.eupd | 2 +- parm/config/config.fcst | 2 +- parm/config/config.fv3 | 2 +- parm/config/config.fv3.nco.static | 2 +- parm/config/config.gempak | 2 +- parm/config/config.getic | 2 +- parm/config/config.gldas | 2 +- parm/config/config.ice | 2 +- parm/config/config.init | 2 +- parm/config/config.metp | 2 +- parm/config/config.nsst | 2 +- parm/config/config.ocn | 2 +- parm/config/config.ocnpost | 2 +- parm/config/config.post | 2 +- parm/config/config.postsnd | 2 +- parm/config/config.prep | 2 +- parm/config/config.prepbufr | 2 +- parm/config/config.resources | 4 +- parm/config/config.sfcanl | 2 +- parm/config/config.vrfy | 2 +- parm/config/config.wafs | 2 +- parm/config/config.wafsblending | 2 +- parm/config/config.wafsblending0p25 | 2 +- parm/config/config.wafsgcip | 2 +- parm/config/config.wafsgrib2 | 2 +- parm/config/config.wafsgrib20p25 | 2 +- parm/config/config.wave | 2 +- parm/config/config.waveawipsbulls | 2 +- parm/config/config.waveawipsgridded | 2 +- parm/config/config.wavegempak | 2 +- parm/config/config.waveinit | 2 +- parm/config/config.wavepostbndpnt | 2 +- parm/config/config.wavepostbndpntbll | 2 +- parm/config/config.wavepostpnt | 2 +- parm/config/config.wavepostsbs | 2 +- parm/config/config.waveprep | 2 +- scripts/exgdas_atmos_chgres_forenkf.sh | 16 +- scripts/exgdas_atmos_gempak_gif_ncdc.sh | 9 +- scripts/exgdas_atmos_nawips.sh | 41 ++--- scripts/exgdas_atmos_nceppost.sh | 58 +++---- scripts/exgdas_efsoi.sh | 18 +- scripts/exgdas_efsoi_update.sh | 19 +-- scripts/exgdas_enkf_ecen.sh | 16 +- scripts/exgdas_enkf_fcst.sh | 18 +- scripts/exgdas_enkf_post.sh | 20 +-- scripts/exgdas_enkf_select_obs.sh | 16 +- scripts/exgdas_enkf_sfc.sh | 16 +- scripts/exgdas_enkf_update.sh | 19 +-- scripts/exgdas_global_atmos_analysis_run.sh | 13 +- scripts/exgdas_global_atmos_ensanal_run.sh | 13 +- scripts/exgfs_atmos_awips_20km_1p0deg.sh | 36 ++-- scripts/exgfs_atmos_fbwind.sh | 62 ++----- scripts/exgfs_atmos_gempak_gif_ncdc_skew_t.sh | 35 ++-- scripts/exgfs_atmos_gempak_meta.sh | 38 ++--- scripts/exgfs_atmos_goes_nawips.sh | 44 ++--- scripts/exgfs_atmos_grib2_special_npoess.sh | 46 ++---- scripts/exgfs_atmos_grib_awips.sh | 44 ++--- scripts/exgfs_atmos_nawips.sh | 44 ++--- scripts/exgfs_atmos_nceppost.sh | 84 +++++----- scripts/exgfs_atmos_postsnd.sh | 27 +-- scripts/exgfs_pmgr.sh | 17 +- scripts/exgfs_prdgen_manager.sh | 16 +- scripts/exgfs_wave_init.sh | 49 ++---- scripts/exgfs_wave_nawips.sh | 35 ++-- scripts/exgfs_wave_post_gridded_sbs.sh | 66 +++----- scripts/exgfs_wave_post_pnt.sh | 85 ++++------ scripts/exgfs_wave_prdgen_bulls.sh | 60 +++---- scripts/exgfs_wave_prdgen_gridded.sh | 41 ++--- scripts/exgfs_wave_prep.sh | 53 +++--- scripts/exglobal_atmos_analysis.sh | 17 +- scripts/exglobal_atmos_analysis_calc.sh | 17 +- scripts/exglobal_atmos_pmgr.sh | 14 +- scripts/exglobal_atmos_sfcanl.sh | 15 +- scripts/exglobal_atmos_tropcy_qc_reloc.sh | 47 ++---- scripts/exglobal_diag.sh | 21 +-- scripts/exglobal_forecast.sh | 22 +-- scripts/run_reg2grb2.sh | 6 +- scripts/run_regrid.sh | 7 +- sorc/link_workflow.sh | 2 +- ush/cplvalidate.sh | 2 +- ush/drive_makeprepbufr.sh | 6 +- ush/file_utils.sh | 27 +++ ush/forecast_det.sh | 3 +- ush/forecast_postdet.sh | 13 +- ush/forecast_predet.sh | 7 +- ush/fv3gfs_downstream_nems.sh | 28 ++-- ush/fv3gfs_dwn_nems.sh | 8 +- ush/fv3gfs_nc2nemsio.sh | 8 +- ush/fv3gfs_regrid_nemsio.sh | 14 +- ush/fv3gfs_remap.sh | 6 +- ush/fv3gfs_remap_weights.sh | 2 +- ush/gaussian_sfcanl.sh | 23 +-- ush/getdump.sh | 8 +- ush/getges.sh | 19 ++- ush/gfs_bfr2gpk.sh | 5 +- ush/gfs_bufr.sh | 12 +- ush/gfs_bufr_netcdf.sh | 12 +- ush/gfs_nceppost.sh | 38 ++--- ush/gfs_sndp.sh | 12 +- ush/gfs_transfer.sh | 31 ++-- ush/gfs_truncate_enkf.sh | 7 +- ush/global_extrkr.sh | 154 +++++++++--------- ush/global_nceppost.sh | 20 +-- ush/global_savefits.sh | 5 +- ush/hpssarch_gen.sh | 5 +- ush/inter_flux.sh | 10 +- ush/link_crtm_fix.sh | 4 +- ush/load_fv3gfs_modules.sh | 13 +- ush/mod_icec.sh | 12 +- ush/nems_configure.sh | 2 +- ush/parsing_model_configure_DATM.sh | 2 +- ush/parsing_model_configure_FV3.sh | 2 +- ush/parsing_namelists_CICE.sh | 2 + ush/parsing_namelists_FV3.sh | 45 ++--- ush/parsing_namelists_MOM6.sh | 1 + ush/parsing_namelists_WW3.sh | 5 +- ush/preamble.sh | 86 ++++++++++ ush/scale_dec.sh | 7 +- ush/syndat_getjtbul.sh | 16 +- ush/syndat_qctropcy.sh | 106 +++--------- ush/trim_rh.sh | 7 +- ush/tropcy_relocate.sh | 32 ++-- ush/tropcy_relocate_extrkr.sh | 96 +++++------ ush/wave_grib2_sbs.sh | 46 ++---- ush/wave_grid_interp_sbs.sh | 38 ++--- ush/wave_grid_moddef.sh | 34 ++-- ush/wave_outp_cat.sh | 37 ++--- ush/wave_outp_spec.sh | 45 ++--- ush/wave_prnc_cur.sh | 8 +- ush/wave_prnc_ice.sh | 59 +++---- ush/wave_tar.sh | 44 +++-- 249 files changed, 1615 insertions(+), 2307 deletions(-) create mode 100644 ush/file_utils.sh create mode 100644 ush/preamble.sh diff --git a/.gitignore b/.gitignore index 60151e3ab0..df54a09892 100644 --- a/.gitignore +++ b/.gitignore @@ -190,6 +190,7 @@ ush/radmon_verf_bcoef.sh ush/radmon_verf_bcor.sh ush/radmon_verf_time.sh ush/ufsda +ush/rstprod.sh ush/wafs_blending.sh ush/wafs_grib2.regrid.sh ush/wafs_intdsk.sh diff --git a/env/HERA.env b/env/HERA.env index f5c0a5efcd..124b3e8b9a 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash if [ $# -ne 1 ]; then @@ -261,9 +261,17 @@ elif [ $step = "postsnd" ]; then elif [ $step = "awips" ]; then - echo "WARNING: $step is not enabled on $machine!" + nth_max=$(($npe_node_max / $npe_node_awips)) + + export NTHREADS_AWIPS=${nth_awips:-2} + [[ $NTHREADS_AWIPS -gt $nth_max ]] && export NTHREADS_AWIPS=$nth_max + export APRUN_AWIPSCFP="$launcher -n $npe_awips --multi-prog" elif [ $step = "gempak" ]; then - echo "WARNING: $step is not enabled on $machine!" + nth_max=$(($npe_node_max / $npe_node_gempak)) + + export NTHREADS_GEMPAK=${nth_gempak:-1} + [[ $NTHREADS_GEMPAK -gt $nth_max ]] && export NTHREADS_GEMPAK=$nth_max + export APRUN="$launcher -n $npe_gempak --multi-prog" fi diff --git a/env/ORION.env b/env/ORION.env index f3b58d987c..bef0661f47 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash if [ $# -ne 1 ]; then @@ -259,9 +259,17 @@ elif [ $step = "postsnd" ]; then elif [ $step = "awips" ]; then - echo "WARNING: $step is not enabled on $machine!" + nth_max=$(($npe_node_max / $npe_node_awips)) + + export NTHREADS_AWIPS=${nth_awips:-2} + [[ $NTHREADS_AWIPS -gt $nth_max ]] && export NTHREADS_AWIPS=$nth_max + export APRUN_AWIPSCFP="$launcher -n $npe_awips --multi-prog" elif [ $step = "gempak" ]; then - echo "WARNING: $step is not enabled on $machine!" + nth_max=$(($npe_node_max / $npe_node_gempak)) + + export NTHREADS_GEMPAK=${nth_gempak:-1} + [[ $NTHREADS_GEMPAK -gt $nth_max ]] && export NTHREADS_GEMPAK=$nth_max + export APRUN="$launcher -n $npe_gempak --multi-prog" fi diff --git a/jobs/JGDAS_ATMOS_ANALYSIS_DIAG b/jobs/JGDAS_ATMOS_ANALYSIS_DIAG index da16176ab3..4b2728e13f 100755 --- a/jobs/JGDAS_ATMOS_ANALYSIS_DIAG +++ b/jobs/JGDAS_ATMOS_ANALYSIS_DIAG @@ -1,10 +1,8 @@ -#!/bin/bash -set -x +#! /usr/bin/env bash -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" +export RUN_ENVIR=${RUN_ENVIR:-"nco"} ############################# # Source relevant config files @@ -112,11 +110,6 @@ fi ############################################################### # Run relevant script -env -echo "HAS BEGUN on $(hostname)" -$LOGSCRIPT - - ${ANALDIAGSH:-$SCRgfs/exglobal_diag.sh} status=$? [[ $status -ne 0 ]] && exit $status @@ -133,15 +126,11 @@ if [ -e "$pgmout" ] ; then cat $pgmout fi - -echo "ENDED NORMALLY." - - ########################################## # Remove the Temporary working directory ########################################## cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date + exit 0 diff --git a/jobs/JGDAS_ATMOS_CHGRES_FORENKF b/jobs/JGDAS_ATMOS_CHGRES_FORENKF index cf69bdd770..d2268df767 100755 --- a/jobs/JGDAS_ATMOS_CHGRES_FORENKF +++ b/jobs/JGDAS_ATMOS_CHGRES_FORENKF @@ -1,10 +1,8 @@ -#!/bin/bash -set -x +#! /usr/bin/env bash -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" +export RUN_ENVIR=${RUN_ENVIR:-"nco"} ############################# # Source relevant config files @@ -97,11 +95,6 @@ export COMIN_GES_ENS="$ROTDIR/enkfgdas.$gPDY/$gcyc/$COMPONENT" ############################################################### # Run relevant script -env -echo "HAS BEGUN on $(hostname)" -$LOGSCRIPT - - ${CHGRESFCSTSH:-$SCRgfs/exgdas_atmos_chgres_forenkf.sh} status=$? [[ $status -ne 0 ]] && exit $status @@ -118,15 +111,11 @@ if [ -e "$pgmout" ] ; then cat $pgmout fi - -echo "ENDED NORMALLY." - - ########################################## # Remove the Temporary working directory ########################################## cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date + exit 0 diff --git a/jobs/JGDAS_ATMOS_GEMPAK b/jobs/JGDAS_ATMOS_GEMPAK index e2d3d89da7..8865432450 100755 --- a/jobs/JGDAS_ATMOS_GEMPAK +++ b/jobs/JGDAS_ATMOS_GEMPAK @@ -1,8 +1,6 @@ -#!/bin/sh +#! /usr/bin/env bash -set -xa -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" ############################################ # GDAS GEMPAK PRODUCT GENERATION @@ -73,7 +71,6 @@ fi export pgmout=OUTPUT.$$ -env if [ -f $DATA/poescrip ]; then rm $DATA/poescript @@ -107,11 +104,6 @@ APRUNCFP=$(eval echo $APRUN_GEMPAKCFP) $APRUNCFP $DATA/poescript export err=$?; err_chk -######################################################## - -msg="JOB $job HAS COMPLETED NORMALLY!" -postmsg $jlogfile "$msg" - ############################################ # print exec I/O output ############################################ @@ -126,4 +118,3 @@ if [ "$KEEPDATA" != "YES" ] ; then rm -rf $DATA fi -date diff --git a/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC b/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC index 12951c2c53..ffb46db0f9 100755 --- a/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC +++ b/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC @@ -1,8 +1,6 @@ -#!/bin/sh +#! /usr/bin/env bash -set -xa -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" ############################################ # GDAS GEMPAK META NCDC PRODUCT GENERATION @@ -86,7 +84,6 @@ fi export pgmout=OUTPUT.$$ -env ######################################################## # Execute the script. @@ -107,9 +104,6 @@ $SRCgfs/exgdas_atmos_gempak_gif_ncdc.sh export err=$?; err_chk ######################################################## -msg="JOB $job HAS COMPLETED NORMALLY!" -postmsg $jlogfile "$msg" - ############################################ # print exec I/O output ############################################ @@ -124,4 +118,3 @@ if [ "$KEEPDATA" != "YES" ] ; then rm -rf $DATA fi -date diff --git a/jobs/JGDAS_EFSOI b/jobs/JGDAS_EFSOI index af60ddeb7b..6c90468989 100755 --- a/jobs/JGDAS_EFSOI +++ b/jobs/JGDAS_EFSOI @@ -1,10 +1,8 @@ -#!/bin/ksh -set -x +#! /usr/bin/env bash -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" +export RUN_ENVIR=${RUN_ENVIR:-"nco"} ############################# # Source relevant config files @@ -97,10 +95,6 @@ mkdir -p $OSENSE_SAVE_DIR ############################################################### # Run relevant exglobal script -env -msg="HAS BEGUN on `hostname`" -postmsg "$jlogfile" "$msg" -$LOGSCRIPT ${EFSOIUPDSH:-$SCRgfs/exgdas_efsoi.sh} status=$? @@ -117,11 +111,6 @@ if [ -e "$pgmout" ] ; then cat $pgmout fi - -msg="ENDED NORMALLY." -postmsg "$jlogfile" "$msg" - - ########################################## # Remove the Temporary working directory ########################################## @@ -129,5 +118,5 @@ cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date + exit 0 diff --git a/jobs/JGDAS_EFSOI_ECEN b/jobs/JGDAS_EFSOI_ECEN index 28d04b9f5c..864bb1ae4a 100755 --- a/jobs/JGDAS_EFSOI_ECEN +++ b/jobs/JGDAS_EFSOI_ECEN @@ -1,10 +1,8 @@ -#!/bin/ksh -set -x +#! /usr/bin/env bash -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" +export RUN_ENVIR=${RUN_ENVIR:-"nco"} ############################# # Source relevant config files @@ -106,11 +104,6 @@ export COMIN_GES_ENS="$ROTDIR/enkf$CDUMP.$gPDY/$gcyc/$COMPONENT" ############################################################### # Run relevant script -env -msg="HAS BEGUN on `hostname`" -postmsg "$jlogfile" "$msg" -$LOGSCRIPT - ${ENKFRECENSH:-$SCRgfs/exgdas_enkf_ecen.sh} status=$? @@ -129,15 +122,11 @@ if [ -e "$pgmout" ] ; then fi -msg="ENDED NORMALLY." -postmsg "$jlogfile" "$msg" - - ########################################## # Remove the Temporary working directory ########################################## cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date + exit 0 diff --git a/jobs/JGDAS_EFSOI_FCST b/jobs/JGDAS_EFSOI_FCST index a9e4fc78f1..2794ee9c93 100755 --- a/jobs/JGDAS_EFSOI_FCST +++ b/jobs/JGDAS_EFSOI_FCST @@ -1,10 +1,8 @@ -#!/bin/ksh -set -x +#! /usr/bin/env bash -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" +export RUN_ENVIR=${RUN_ENVIR:-"nco"} ############################# # Source relevant config files @@ -93,11 +91,6 @@ export ENSBEG=$((ENSEND - NMEM_EFCSGRP + 1)) ############################################################### # Run relevant script -env -msg="HAS BEGUN on `hostname`" -postmsg "$jlogfile" "$msg" -$LOGSCRIPT - ${ENKFFCSTSH:-$SCRgfs/exgdas_enkf_fcst.sh} status=$? @@ -129,16 +122,11 @@ if [ -e "$pgmout" ] ; then cat $pgmout fi - -msg="ENDED NORMALLY." -postmsg "$jlogfile" "$msg" - - ########################################## # Remove the Temporary working directory ########################################## cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date + exit 0 diff --git a/jobs/JGDAS_EFSOI_POST b/jobs/JGDAS_EFSOI_POST index f71f865b7e..aca5bd51ed 100755 --- a/jobs/JGDAS_EFSOI_POST +++ b/jobs/JGDAS_EFSOI_POST @@ -1,10 +1,8 @@ -#!/bin/ksh -set -x +#! /usr/bin/env bash -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" +export RUN_ENVIR=${RUN_ENVIR:-"nco"} ############################# # Source relevant config files @@ -84,11 +82,6 @@ export LEVS=$((LEVS-1)) ############################################################### # Run relevant script -env -msg="HAS BEGUN on `hostname`" -postmsg "$jlogfile" "$msg" -$LOGSCRIPT - ${ENKFPOSTSH:-$SCRgfs/exgdas_enkf_post.sh} status=$? @@ -106,16 +99,11 @@ if [ -e "$pgmout" ] ; then cat $pgmout fi - -msg="ENDED NORMALLY." -postmsg "$jlogfile" "$msg" - - ########################################## # Remove the Temporary working directory ########################################## cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date + exit 0 diff --git a/jobs/JGDAS_EFSOI_SFC b/jobs/JGDAS_EFSOI_SFC index 5924393c33..5ba5fb6393 100755 --- a/jobs/JGDAS_EFSOI_SFC +++ b/jobs/JGDAS_EFSOI_SFC @@ -1,10 +1,8 @@ -#!/bin/ksh -set -x +#! /usr/bin/env bash -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" +export RUN_ENVIR=${RUN_ENVIR:-"nco"} ############################# # Source relevant config files @@ -118,11 +116,6 @@ export COMIN_GES_ENS="$ROTDIR/enkf$CDUMP.$gPDY/$gcyc/$COMPONENT" ############################################################### # Run relevant script -env -msg="HAS BEGUN on `hostname`" -postmsg "$jlogfile" "$msg" -$LOGSCRIPT - ${ENKFRESFCSH:-$SCRgfs/exgdas_enkf_sfc.sh} status=$? @@ -140,16 +133,11 @@ if [ -e "$pgmout" ] ; then cat $pgmout fi - -msg="ENDED NORMALLY." -postmsg "$jlogfile" "$msg" - - ########################################## # Remove the Temporary working directory ########################################## cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date + exit 0 diff --git a/jobs/JGDAS_EFSOI_UPDATE b/jobs/JGDAS_EFSOI_UPDATE index 3d730eab7e..e5efd6241a 100755 --- a/jobs/JGDAS_EFSOI_UPDATE +++ b/jobs/JGDAS_EFSOI_UPDATE @@ -1,10 +1,8 @@ -#!/bin/ksh -set -x +#! /usr/bin/env bash -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" +export RUN_ENVIR=${RUN_ENVIR:-"nco"} ############################# # Source relevant config files @@ -92,10 +90,6 @@ mkdir -p $COMOUT_ANL_ENSFSOI ############################################################### # Run relevant exglobal script -env -msg="HAS BEGUN on `hostname`" -postmsg "$jlogfile" "$msg" -$LOGSCRIPT ${EFSOIUPDSH:-$SCRgfs/exgdas_efsoi_update.sh} status=$? @@ -121,11 +115,6 @@ if [ -e "$pgmout" ] ; then cat $pgmout fi - -msg="ENDED NORMALLY." -postmsg "$jlogfile" "$msg" - - ########################################## # Remove the Temporary working directory ########################################## @@ -133,5 +122,5 @@ cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date + exit 0 diff --git a/jobs/JGDAS_ENKF_DIAG b/jobs/JGDAS_ENKF_DIAG index 1c26192569..5ce8d86b78 100755 --- a/jobs/JGDAS_ENKF_DIAG +++ b/jobs/JGDAS_ENKF_DIAG @@ -1,10 +1,8 @@ -#!/bin/bash -set -x +#! /usr/bin/env bash -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" +export RUN_ENVIR=${RUN_ENVIR:-"nco"} ############################# # Source relevant config files @@ -147,10 +145,6 @@ done ############################################################### # Run relevant script -env -echo "HAS BEGUN on $(hostname)" -$LOGSCRIPT - ${ANALDIAGSH:-$SCRgfs/exglobal_diag.sh} status=$? @@ -168,15 +162,11 @@ if [ -e "$pgmout" ] ; then cat $pgmout fi - -echo "ENDED NORMALLY." - - ########################################## # Remove the Temporary working directory ########################################## cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date + exit 0 diff --git a/jobs/JGDAS_ENKF_ECEN b/jobs/JGDAS_ENKF_ECEN index f52abfe420..1e7a51b5ae 100755 --- a/jobs/JGDAS_ENKF_ECEN +++ b/jobs/JGDAS_ENKF_ECEN @@ -1,10 +1,8 @@ -#!/bin/bash -set -x +#! /usr/bin/env bash -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" +export RUN_ENVIR=${RUN_ENVIR:-"nco"} ############################# # Source relevant config files @@ -103,10 +101,6 @@ export COMIN_GES_ENS="$ROTDIR/enkf$CDUMP.$gPDY/$gcyc/$COMPONENT" ############################################################### # Run relevant script -env -echo "HAS BEGUN on $(hostname)" -$LOGSCRIPT - ${ENKFRECENSH:-$SCRgfs/exgdas_enkf_ecen.sh} status=$? @@ -124,15 +118,11 @@ if [ -e "$pgmout" ] ; then cat $pgmout fi - -echo "ENDED NORMALLY." - - ########################################## # Remove the Temporary working directory ########################################## cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date + exit 0 diff --git a/jobs/JGDAS_ENKF_FCST b/jobs/JGDAS_ENKF_FCST index 88d61c0870..68b3a53204 100755 --- a/jobs/JGDAS_ENKF_FCST +++ b/jobs/JGDAS_ENKF_FCST @@ -1,10 +1,8 @@ -#!/bin/bash -set -x +#! /usr/bin/env bash -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" +export RUN_ENVIR=${RUN_ENVIR:-"nco"} ############################# # Source relevant config files @@ -83,10 +81,6 @@ export ENSBEG=$((ENSEND - NMEM_EFCSGRP + 1)) ############################################################### # Run relevant script -env -echo "HAS BEGUN on $(hostname)" -$LOGSCRIPT - ${ENKFFCSTSH:-$SCRgfs/exgdas_enkf_fcst.sh} status=$? @@ -126,15 +120,11 @@ if [ -e "$pgmout" ] ; then cat $pgmout fi - -echo "ENDED NORMALLY." - - ########################################## # Remove the Temporary working directory ########################################## cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date + exit 0 diff --git a/jobs/JGDAS_ENKF_POST b/jobs/JGDAS_ENKF_POST index 7309305c0f..dcc6335e44 100755 --- a/jobs/JGDAS_ENKF_POST +++ b/jobs/JGDAS_ENKF_POST @@ -1,10 +1,8 @@ -#!/bin/bash -set -x +#! /usr/bin/env bash -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" +export RUN_ENVIR=${RUN_ENVIR:-"nco"} ############################# # Source relevant config files @@ -77,10 +75,6 @@ export LEVS=$((LEVS-1)) ############################################################### # Run relevant script -env -echo "HAS BEGUN on $(hostname)" -$LOGSCRIPT - ${ENKFPOSTSH:-$SCRgfs/exgdas_enkf_post.sh} status=$? @@ -98,15 +92,11 @@ if [ -e "$pgmout" ] ; then cat $pgmout fi - -echo "ENDED NORMALLY." - - ########################################## # Remove the Temporary working directory ########################################## cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date + exit 0 diff --git a/jobs/JGDAS_ENKF_SELECT_OBS b/jobs/JGDAS_ENKF_SELECT_OBS index 919eec5bc5..92bd78b04c 100755 --- a/jobs/JGDAS_ENKF_SELECT_OBS +++ b/jobs/JGDAS_ENKF_SELECT_OBS @@ -1,10 +1,8 @@ -#!/bin/bash -set -x +#! /usr/bin/env bash -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" +export RUN_ENVIR=${RUN_ENVIR:-"nco"} ############################# # Source relevant config files @@ -149,10 +147,6 @@ done ############################################################### # Run relevant script -env -echo "HAS BEGUN on $(hostname)" -$LOGSCRIPT - ${INVOBSSH:-$SCRgfs/exgdas_enkf_select_obs.sh} status=$? @@ -178,15 +172,11 @@ if [ -e "$pgmout" ] ; then cat $pgmout fi - -echo "ENDED NORMALLY." - - ########################################## # Remove the Temporary working directory ########################################## cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date + exit 0 diff --git a/jobs/JGDAS_ENKF_SFC b/jobs/JGDAS_ENKF_SFC index a0383f2cf0..54f196234a 100755 --- a/jobs/JGDAS_ENKF_SFC +++ b/jobs/JGDAS_ENKF_SFC @@ -1,10 +1,8 @@ -#!/bin/bash -set -x +#! /usr/bin/env bash -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" +export RUN_ENVIR=${RUN_ENVIR:-"nco"} ############################# # Source relevant config files @@ -104,10 +102,6 @@ export COMIN_GES_ENS="$ROTDIR/enkf$CDUMP.$gPDY/$gcyc/$COMPONENT" ############################################################### # Run relevant script -env -echo "HAS BEGUN on $(hostname)" -$LOGSCRIPT - ${ENKFRESFCSH:-$SCRgfs/exgdas_enkf_sfc.sh} status=$? @@ -125,15 +119,11 @@ if [ -e "$pgmout" ] ; then cat $pgmout fi - -echo "ENDED NORMALLY." - - ########################################## # Remove the Temporary working directory ########################################## cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date + exit 0 diff --git a/jobs/JGDAS_ENKF_UPDATE b/jobs/JGDAS_ENKF_UPDATE index 07bf37f7f8..dafd9b13f2 100755 --- a/jobs/JGDAS_ENKF_UPDATE +++ b/jobs/JGDAS_ENKF_UPDATE @@ -1,10 +1,8 @@ -#!/bin/bash -set -x +#! /usr/bin/env bash -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" +export RUN_ENVIR=${RUN_ENVIR:-"nco"} ############################# # Source relevant config files @@ -81,9 +79,6 @@ export COMOUT_ANL_ENS="$ROTDIR/enkf$CDUMP.$PDY/$cyc/$COMPONENT" ############################################################### # Run relevant script -env -echo "HAS BEGUN on $(hostname)" -$LOGSCRIPT ${ENKFUPDSH:-$SCRgfs/exgdas_enkf_update.sh} status=$? @@ -109,15 +104,11 @@ if [ -e "$pgmout" ] ; then cat $pgmout fi - -echo "ENDED NORMALLY." - - ########################################## # Remove the Temporary working directory ########################################## cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date + exit 0 diff --git a/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_POST b/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_POST index 7784695445..d8cd0416cd 100755 --- a/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_POST +++ b/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_POST @@ -1,11 +1,6 @@ #!/bin/bash -##set -ex -set -x -set -u - -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" ############################# # Source relevant config files @@ -119,5 +114,4 @@ fi cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date exit 0 diff --git a/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_PREP b/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_PREP index cde886e36b..01895d4d5c 100755 --- a/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_PREP +++ b/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_PREP @@ -1,11 +1,6 @@ #!/bin/bash -##set -ex -set -x -set -u - -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" ############################# # Source relevant config files @@ -119,5 +114,4 @@ fi cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date exit 0 diff --git a/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_RUN b/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_RUN index aadf4d7a42..7998623851 100755 --- a/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_RUN +++ b/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_RUN @@ -1,11 +1,6 @@ #!/bin/bash -##set -ex -set -x -set -u - -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" ############################# # Source relevant config files @@ -119,5 +114,4 @@ fi cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date exit 0 diff --git a/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_POST b/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_POST index f1e85447d2..871ba7a35f 100755 --- a/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_POST +++ b/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_POST @@ -1,11 +1,6 @@ #!/bin/bash -##set -ex -set -x -set -u - -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" ############################# # Source relevant config files @@ -122,5 +117,4 @@ fi cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date exit 0 diff --git a/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_PREP b/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_PREP index 618984bcab..b284e90e67 100755 --- a/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_PREP +++ b/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_PREP @@ -1,11 +1,6 @@ #!/bin/bash -##set -ex -set -x -set -u - -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" ############################# # Source relevant config files @@ -122,5 +117,4 @@ fi cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date exit 0 diff --git a/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_RUN b/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_RUN index d3b2c7f90e..b3f5c0fa90 100755 --- a/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_RUN +++ b/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_RUN @@ -1,11 +1,6 @@ #!/bin/bash -##set -ex -set -x -set -u - -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" ############################# # Source relevant config files @@ -122,5 +117,4 @@ fi cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date exit 0 diff --git a/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG b/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG index ea70ae7b14..2528013e39 100755 --- a/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +++ b/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG @@ -1,9 +1,8 @@ -#!/bin/sh +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" -set -xa -export PS4='$SECONDS + ' export OMP_NUM_THREADS=${OMP_NUM_THREADS:-1} -date ########################################### # GFS_AWIPS_20KM AWIPS PRODUCT GENERATION @@ -62,7 +61,6 @@ fi export pgmout=OUTPUT.$$ -env ######################################################## # Execute the script. @@ -76,9 +74,6 @@ $HOMEgfs/scripts/exgfs_atmos_awips_20km_1p0deg.sh $fcsthrs export err=$?; err_chk ######################################################## -msg="JOB $job HAS COMPLETED NORMALLY!" -postmsg $jlogfile "$msg" - ############################################ # print exec I/O output ############################################ @@ -93,4 +88,3 @@ if [ "$KEEPDATA" != "YES" ] ; then rm -rf $DATA fi -date diff --git a/jobs/JGFS_ATMOS_AWIPS_G2 b/jobs/JGFS_ATMOS_AWIPS_G2 index 634cf07cdd..9dd2fdca63 100755 --- a/jobs/JGFS_ATMOS_AWIPS_G2 +++ b/jobs/JGFS_ATMOS_AWIPS_G2 @@ -1,8 +1,7 @@ -#!/bin/sh +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" -set -xa -export PS4='$SECONDS + ' -date export OMP_NUM_THREADS=${OMP_NUM_THREADS:-1} ######################################## @@ -63,7 +62,6 @@ fi export pgmout=OUTPUT.$$ -env ######################################################## # Execute the script. @@ -73,9 +71,6 @@ cd $DATA/awips_g1 $HOMEgfs/scripts/exgfs_atmos_grib_awips.sh $fcsthrs export err=$?; err_chk -msg="JOB $job HAS COMPLETED NORMALLY!" -postmsg $jlogfile "$msg" - ############################################ # print exec I/O output ############################################ @@ -90,4 +85,3 @@ if [ "$KEEPDATA" != "YES" ] ; then rm -rf $DATA fi -date diff --git a/jobs/JGFS_ATMOS_CYCLONE_GENESIS b/jobs/JGFS_ATMOS_CYCLONE_GENESIS index 090e1e1bfb..79d43ebb1e 100755 --- a/jobs/JGFS_ATMOS_CYCLONE_GENESIS +++ b/jobs/JGFS_ATMOS_CYCLONE_GENESIS @@ -1,10 +1,8 @@ -#!/bin/ksh -set -x +#! /usr/bin/env bash -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" +export RUN_ENVIR=${RUN_ENVIR:-"nco"} ############################# # Source relevant config files @@ -99,8 +97,6 @@ export COMOUT=${ROTDIR}/${RUN}.${PDY}/${cyc}/$COMPONENT export JYYYY=$(echo ${PDY} | cut -c1-4) export COMINgenvit=${COMINgenvit:-${COMOUT}/genesis_vital_${JYYYY}} export COMOUTgenvit=${COMOUTgenvit:-${COMOUT}/genesis_vital_${JYYYY}} -#export COMINgenvit=${COMINgenvit:-${DATA}/genesis_vital_${JYYYY}} -#export COMOUTgenvit=${COMOUTgenvit:-${DATA}/genesis_vital_${JYYYY}} export COMINsyn=${COMINsyn:-$(compath.py gfs/prod/syndat)} @@ -109,11 +105,6 @@ mkdir -m 775 -p $COMOUTgenvit ############################################## # Run relevant script ############################################## -env -msg="HAS BEGUN on $(hostname)" -postmsg "$jlogfile" "$msg" -$LOGSCRIPT - ${SCRIPTens_tracker}/exgfs_tc_genesis.sh export err=$?; err_chk @@ -125,15 +116,11 @@ if [ -e "$pgmout" ] ; then cat $pgmout fi - -msg="ENDED NORMALLY." -postmsg "$jlogfile" "$msg" - ########################################## # Remove the Temporary working directory ########################################## cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date + exit 0 diff --git a/jobs/JGFS_ATMOS_CYCLONE_TRACKER b/jobs/JGFS_ATMOS_CYCLONE_TRACKER index d2e44e115b..4b05ea0b80 100755 --- a/jobs/JGFS_ATMOS_CYCLONE_TRACKER +++ b/jobs/JGFS_ATMOS_CYCLONE_TRACKER @@ -1,10 +1,8 @@ -#!/bin/ksh -set -x +#! /usr/bin/env bash -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" +export RUN_ENVIR=${RUN_ENVIR:-"nco"} ############################# # Source relevant config files @@ -115,10 +113,6 @@ fi ############################################## # Run relevant script ############################################## -env -msg="HAS BEGUN on $(hostname)" -postmsg "$jlogfile" "$msg" -$LOGSCRIPT ############################################################# # Execute the script @@ -153,16 +147,11 @@ if [ -e "$pgmout" ] ; then cat $pgmout fi - -msg="ENDED NORMALLY." -postmsg "$jlogfile" "$msg" - - ########################################## # Remove the Temporary working directory ########################################## cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date + exit 0 diff --git a/jobs/JGFS_ATMOS_FBWIND b/jobs/JGFS_ATMOS_FBWIND index 0c43bf2643..42e459dd0b 100755 --- a/jobs/JGFS_ATMOS_FBWIND +++ b/jobs/JGFS_ATMOS_FBWIND @@ -1,8 +1,6 @@ -#!/bin/sh +#! /usr/bin/env bash -set -xa -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" ############################################ # GFS FBWIND PRODUCT GENERATION @@ -62,7 +60,6 @@ fi export pgmout=OUTPUT.$$ -env ######################################################## # Execute the script. @@ -70,9 +67,6 @@ $HOMEgfs/scripts/exgfs_atmos_fbwind.sh export err=$?;err_chk ######################################################## -msg="JOB $job HAS COMPLETED NORMALLY!" -postmsg $jlogfile "$msg" - ############################################ # print exec I/O output ############################################ @@ -87,4 +81,3 @@ if [ "$KEEPDATA" != "YES" ] ; then rm -rf $DATA fi -date diff --git a/jobs/JGFS_ATMOS_FSU_GENESIS b/jobs/JGFS_ATMOS_FSU_GENESIS index 57aa709e06..eb3069bfcb 100755 --- a/jobs/JGFS_ATMOS_FSU_GENESIS +++ b/jobs/JGFS_ATMOS_FSU_GENESIS @@ -1,10 +1,8 @@ -#!/bin/ksh -set -x +#! /usr/bin/env bash -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" +export RUN_ENVIR=${RUN_ENVIR:-"nco"} ############################# # Source relevant config files @@ -119,10 +117,6 @@ fi ############################################## # Run relevant script ############################################## -env -msg="HAS BEGUN on $(hostname)" -postmsg "$jlogfile" "$msg" -$LOGSCRIPT ############################################################# # Execute the script @@ -136,14 +130,11 @@ if [ -e "$pgmout" ] ; then cat $pgmout fi -msg="ENDED NORMALLY." -postmsg "$jlogfile" "$msg" - ########################################## # Remove the Temporary working directory ########################################## cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date + exit 0 diff --git a/jobs/JGFS_ATMOS_GEMPAK b/jobs/JGFS_ATMOS_GEMPAK index 173fad7d51..502bb96a7a 100755 --- a/jobs/JGFS_ATMOS_GEMPAK +++ b/jobs/JGFS_ATMOS_GEMPAK @@ -1,13 +1,30 @@ -#!/bin/sh +#! /usr/bin/env bash -set -xa -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" ############################################ # GFS GEMPAK PRODUCT GENERATION ############################################ +############################# +# Source relevant config files +############################# +configs="base gempak" +export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} +config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} +for config in $configs; do + . $config_path/config.$config + status=$? + [[ $status -ne 0 ]] && exit $status +done + +########################################## +# Source machine runtime environment +########################################## +. $HOMEgfs/env/${machine}.env gempak +status=$? +[[ $status -ne 0 ]] && exit $status + ########################################################## # obtain unique process id (pid) and make temp directory ########################################################## @@ -73,7 +90,6 @@ fi export pgmout=OUTPUT.$$ -env rm -f poescript @@ -129,6 +145,10 @@ echo "time $SRCgfs/exgfs_atmos_nawips.sh gfs35_atl 180 GFS_GEMPAK_WWB &> $DATA/g echo "time $SRCgfs/exgfs_atmos_nawips.sh gfs40 180 GFS_GEMPAK_WWB &> $DATA/gfs40.$$.1 " >>poescript echo "time $SRCgfs/exgfs_atmos_nawips.sh gfs40 180 GFS_GEMPAK_WWB &> $DATA/gfs40.$$.2 " >>poescript +# Add task number to the MPMD script +nl -n ln -v 0 poescript > poescript.new +mv poescript.new poescript + cat poescript chmod 775 $DATA/poescript @@ -139,7 +159,7 @@ ntasks=${NTASKS_GEMPAK:-$(cat $DATA/poescript | wc -l)} ptile=${PTILE_GEMPAK:-4} threads=${NTHREADS_GEMPAK:-1} export OMP_NUM_THREADS=$threads -APRUN="mpirun -n $ntasks cfp " +APRUN=${APRUN:-"mpirun -n $ntasks cfp "} APRUN_GEMPAKCFP=${APRUN_GEMPAKCFP:-$APRUN} APRUNCFP=$(eval echo $APRUN_GEMPAKCFP) @@ -147,11 +167,6 @@ APRUNCFP=$(eval echo $APRUN_GEMPAKCFP) $APRUNCFP $DATA/poescript export err=$?; err_chk -cat $DATA/gfs*.$$.? - -msg="JOB $job HAS COMPLETED NORMALLY!" -postmsg $jlogfile "$msg" - ############################################ # print exec I/O output ############################################ @@ -166,4 +181,3 @@ if [ "$KEEPDATA" != "YES" ] ; then rm -rf $DATA fi -date diff --git a/jobs/JGFS_ATMOS_GEMPAK_META b/jobs/JGFS_ATMOS_GEMPAK_META index d3590b88bf..9d6683a521 100755 --- a/jobs/JGFS_ATMOS_GEMPAK_META +++ b/jobs/JGFS_ATMOS_GEMPAK_META @@ -1,8 +1,6 @@ -#!/bin/sh +#! /usr/bin/env bash -set -xa -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" ############################################ # GFS GEMPAK META PRODUCT GENERATION @@ -83,16 +81,12 @@ export COMINnam=${COMINnam:-$(compath.py nam/prod/nam)} export SENDDBN=${SENDDBN:-NO} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} -msg="Begin job for $job" -postmsg "$jlogfile" "$msg" - if [ $SENDCOM = YES ] ; then mkdir -m 775 -p $COMOUT fi export pgmout=OUTPUT.$$ -env ######################################################## # Execute the script. @@ -100,9 +94,6 @@ $SRCgfs/exgfs_atmos_gempak_meta.sh export err=$?; err_chk ######################################################## -msg="JOB $job HAS COMPLETED NORMALLY!" -postmsg $jlogfile "$msg" - ############################################ # print exec I/O output ############################################ @@ -117,4 +108,3 @@ if [ "$KEEPDATA" != "YES" ] ; then rm -rf $DATA fi -date diff --git a/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF b/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF index cc24556892..4b8a04e6a9 100755 --- a/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF +++ b/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF @@ -1,8 +1,6 @@ -#!/bin/sh +#! /usr/bin/env bash -set -xa -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" ############################################ # GFS GEMPAK NCDC PRODUCT GENERATION @@ -84,10 +82,6 @@ fi export pgmout=OUTPUT.$$ -env - -msg="Begin job for $job" -postmsg "$jlogfile" "$msg" ######################################################## # Execute the script. @@ -95,9 +89,6 @@ $SRCgfs/exgfs_atmos_gempak_gif_ncdc_skew_t.sh export err=$?; err_chk ######################################################## -msg="JOB $job HAS COMPLETED NORMALLY!" -postmsg $jlogfile "$msg" - ############################################ # print exec I/O output ############################################ @@ -112,4 +103,3 @@ if [ "$KEEPDATA" != "YES" ] ; then rm -rf $DATA fi -date diff --git a/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC b/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC index 0f57c6adb2..d8d05b27f2 100755 --- a/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC +++ b/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC @@ -1,8 +1,6 @@ -#!/bin/sh +#! /usr/bin/env bash -set -xa -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" ############################################ # GFS_PGRB2_SPEC_GEMPAK PRODUCT GENERATION @@ -63,13 +61,9 @@ if [ $SENDCOM = YES ] ; then mkdir -m 775 -p $COMOUT fi -env export DATA_HOLD=$DATA -msg="Begin job for $job" -postmsg "$jlogfile" "$msg" - ################################################################# # Execute the script for the regular grib ################################################################# @@ -85,7 +79,6 @@ export finc=3 export fstart=000 echo "RUNS the Program" -set -xa ######################################################## # Execute the script. @@ -107,7 +100,6 @@ export finc=3 export fstart=000 echo "RUNS the Program" -set -xa ######################################################## # Execute the script. @@ -115,9 +107,6 @@ $SRCgfs/exgfs_atmos_goes_nawips.sh export err=$?; err_chk ######################################################## -msg="JOB $job HAS COMPLETED NORMALLY!" -postmsg $jlogfile "$msg" - echo "end of program" cd $DATA_HOLD echo "######################################" @@ -138,4 +127,3 @@ if [ "$KEEPDATA" != "YES" ] ; then rm -rf $DATA fi -date diff --git a/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS b/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS index 7cc9f69309..8ae1170800 100755 --- a/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS +++ b/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS @@ -1,8 +1,6 @@ -#!/bin/sh +#! /usr/bin/env bash -set -xa -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" export OMP_NUM_THREADS=${OMP_NUM_THREADS:-1} @@ -65,7 +63,6 @@ fi export pgmout=OUTPUT.$$ -env #################################### # Specify Forecast Hour Range @@ -124,9 +121,6 @@ $HOMEgfs/scripts/exgfs_atmos_grib2_special_npoess.sh export err=$?;err_chk ############################################################# -msg="JOB $job HAS COMPLETED NORMALLY!" -postmsg $jlogfile "$msg" - ############################################ # print exec I/O output ############################################ @@ -141,4 +135,3 @@ if [ "$KEEPDATA" != "YES" ] ; then rm -rf $DATA fi -date diff --git a/jobs/JGFS_ATMOS_POSTSND b/jobs/JGFS_ATMOS_POSTSND index 5faf16f778..013e6d1648 100755 --- a/jobs/JGFS_ATMOS_POSTSND +++ b/jobs/JGFS_ATMOS_POSTSND @@ -1,9 +1,8 @@ -#!/bin/ksh -set -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4='$SECONDS + ' -date ############################# # Source relevant config files @@ -92,7 +91,6 @@ export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} [[ ! -d $pcom ]] && mkdir -p $pcom [[ ! -d $COMAWP ]] && mkdir -p $COMAWP -env ######################################################## # Execute the script. @@ -112,15 +110,12 @@ if [ -e "$pgmout" ] ; then cat $pgmout fi -msg="ENDED NORMALLY." -postmsg "$jlogfile" "$msg" - ########################################## # Remove the Temporary working directory ########################################## cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date + exit 0 diff --git a/jobs/JGLOBAL_ATMOS_ANALYSIS b/jobs/JGLOBAL_ATMOS_ANALYSIS index af28afe3b6..df1f4ab474 100755 --- a/jobs/JGLOBAL_ATMOS_ANALYSIS +++ b/jobs/JGLOBAL_ATMOS_ANALYSIS @@ -1,10 +1,8 @@ -#!/bin/bash -set -x +#! /usr/bin/env bash -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" +export RUN_ENVIR=${RUN_ENVIR:-"nco"} ############################# # Source relevant config files @@ -140,10 +138,6 @@ fi ############################################################### # Run relevant script -env -echo "HAS BEGUN on $(hostname)" -$LOGSCRIPT - ${ANALYSISSH:-$SCRgfs/exglobal_atmos_analysis.sh} status=$? @@ -171,15 +165,11 @@ if [ -e "$pgmout" ] ; then cat $pgmout fi - -echo "ENDED NORMALLY." - - ########################################## # Remove the Temporary working directory ########################################## cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date + exit 0 diff --git a/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC b/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC index 63c143200f..39438e32b7 100755 --- a/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC +++ b/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC @@ -1,10 +1,8 @@ -#!/bin/bash -set -x +#! /usr/bin/env bash -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" +export RUN_ENVIR=${RUN_ENVIR:-"nco"} ############################# # Source relevant config files @@ -127,10 +125,6 @@ export DOGAUSFCANL=${DOGAUSFCANL:-"YES"} ############################################################### # Run relevant script -env -echo "HAS BEGUN on $(hostname)" -$LOGSCRIPT - ${ANALCALCSH:-$SCRgfs/exglobal_atmos_analysis_calc.sh} status=$? @@ -148,15 +142,11 @@ if [ -e "$pgmout" ] ; then cat $pgmout fi - -echo "ENDED NORMALLY." - - ########################################## # Remove the Temporary working directory ########################################## cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date + exit 0 diff --git a/jobs/JGLOBAL_ATMOS_EMCSFC_SFC_PREP b/jobs/JGLOBAL_ATMOS_EMCSFC_SFC_PREP index 22389d6f05..c0aab4e921 100755 --- a/jobs/JGLOBAL_ATMOS_EMCSFC_SFC_PREP +++ b/jobs/JGLOBAL_ATMOS_EMCSFC_SFC_PREP @@ -1,9 +1,8 @@ -#!/bin/ksh -set -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4='$SECONDS + ' -date ############################# # Source relevant config files @@ -73,10 +72,6 @@ export BLENDED_ICE_FILE_m6hrs=${BLENDED_ICE_FILE_m6hrs:-${COMINgfs_m6hrs}/${RUN} # Run relevant script ############################################################### -env -msg="HAS BEGUN on $(hostname)" -postmsg "$jlogfile" "$msg" - ${EMCSFCPREPSH:-$SCRgfs/exemcsfc_global_sfc_prep.sh} status=$? [[ $status -ne 0 ]] && exit $status @@ -92,15 +87,11 @@ if [ -e ${pgmout} ]; then cat $pgmout fi -msg="ENDED NORMALLY." -postmsg "$jlogfile" "$msg" - ########################################## # Remove the Temporary working directory ########################################## cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date exit 0 diff --git a/jobs/JGLOBAL_ATMOS_NCEPPOST b/jobs/JGLOBAL_ATMOS_NCEPPOST index d4b0983605..4014ebf5e6 100755 --- a/jobs/JGLOBAL_ATMOS_NCEPPOST +++ b/jobs/JGLOBAL_ATMOS_NCEPPOST @@ -1,10 +1,8 @@ -#!/bin/ksh -set -x +#! /usr/bin/env bash -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" +export RUN_ENVIR=${RUN_ENVIR:-"nco"} ############################# # Source relevant config files @@ -72,7 +70,7 @@ export COMPONENT=${COMPONENT:-atmos} ############################################## export APRUNP=${APRUN:-$APRUN_NP} export RERUN=${RERUN:-NO} -export HOMECRTM=${HOMECRTM:-${NWROOT}/lib/crtm/${crtm_ver}} +export HOMECRTM=${HOMECRTM:-${NWROOT:-}/lib/crtm/${crtm_ver:-}} export FIXCRTM=${CRTM_FIX:-${HOMECRTM}/fix} export PARMpost=${PARMpost:-$HOMEgfs/parm/post} export INLINE_POST=${WRITE_DOPOST:-".false."} @@ -118,11 +116,6 @@ export SLEEP_INT=5 ############################################################### # Run relevant exglobal script -env -msg="HAS BEGUN on $(hostname)" -postmsg "$msg" -$LOGSCRIPT - $SCRgfs/ex${RUN}_atmos_nceppost.sh status=$? @@ -139,15 +132,11 @@ if [ -e "$pgmout" ]; then cat $pgmout fi -msg="ENDED NORMALLY." -postmsg "$msg" - - ########################################## # Remove the Temporary working directory ########################################## cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date + exit 0 diff --git a/jobs/JGLOBAL_ATMOS_POST_MANAGER b/jobs/JGLOBAL_ATMOS_POST_MANAGER index 94c848627f..b931a7aa90 100755 --- a/jobs/JGLOBAL_ATMOS_POST_MANAGER +++ b/jobs/JGLOBAL_ATMOS_POST_MANAGER @@ -1,17 +1,16 @@ -#!/bin/sh +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ######################################## # GFS post manager ######################################## export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4='$SECONDS + ' -date ############################# # Source relevant config files ############################# -set -x configs="base post" export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} @@ -101,4 +100,3 @@ export COMOUT=${COMOUT:-$ROTDIR/$RUN.$PDY/$cyc/$COMPONENT} $HOMEgfs/scripts/exglobal_atmos_pmgr.sh ######################################################## -date diff --git a/jobs/JGLOBAL_ATMOS_SFCANL b/jobs/JGLOBAL_ATMOS_SFCANL index b5c7c7b953..7d0e70782b 100755 --- a/jobs/JGLOBAL_ATMOS_SFCANL +++ b/jobs/JGLOBAL_ATMOS_SFCANL @@ -1,10 +1,8 @@ -#!/bin/bash -set -x +#! /usr/bin/env bash -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" +export RUN_ENVIR=${RUN_ENVIR:-"nco"} ############################# # Source relevant config files @@ -18,7 +16,6 @@ for config in $configs; do [[ $status -ne 0 ]] && exit $status done - ########################################## # Source machine runtime environment ########################################## @@ -105,10 +102,6 @@ fi ############################################################### # Run relevant script -msg="HAS BEGUN on $(hostname)" -postmsg "$jlogfile" "$msg" -$LOGSCRIPT - ${SFCANALSH:-$SCRgfs/exglobal_atmos_sfcanl.sh} status=$? @@ -126,16 +119,11 @@ if [ -e "$pgmout" ] ; then cat $pgmout fi - -msg="ENDED NORMALLY." -postmsg "$jlogfile" "$msg" - - ########################################## # Remove the Temporary working directory ########################################## cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date + exit 0 diff --git a/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC b/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC index e49e6a0244..5496861e5f 100755 --- a/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC +++ b/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC @@ -1,10 +1,8 @@ -#!/bin/ksh -set -x +#! /usr/bin/env bash -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4='$SECONDS + ' -date +source "$HOMEgfs/ush/preamble.sh" +export RUN_ENVIR=${RUN_ENVIR:-"nco"} ############################# # Source relevant config files @@ -104,11 +102,6 @@ export BKGFREQ=1 # for hourly relocation ############################################## # Run relevant script ############################################## -env -msg="HAS BEGUN on $(hostname)" -postmsg "$jlogfile" "$msg" -$LOGSCRIPT - ${TROPCYQCRELOSH:-$SCRgfs/exglobal_atmos_tropcy_qc_reloc.sh} status=$? @@ -122,15 +115,11 @@ if [ -e "$pgmout" ] ; then cat $pgmout fi -msg="ENDED NORMALLY." -postmsg "$jlogfile" "$msg" - - ########################################## # Remove the Temporary working directory ########################################## cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date + exit 0 diff --git a/jobs/JGLOBAL_FORECAST b/jobs/JGLOBAL_FORECAST index d72a07614f..40e8f46051 100755 --- a/jobs/JGLOBAL_FORECAST +++ b/jobs/JGLOBAL_FORECAST @@ -1,9 +1,8 @@ -#!/bin/ksh -set -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4='$SECONDS + ' -date #-------------------------------- if [ $RUN_ENVIR = "emc" ]; then @@ -135,11 +134,6 @@ fi ############################################################### # Run relevant exglobal script -env -msg="HAS BEGUN on $(hostname)" -postmsg "$jlogfile" "$msg" -$LOGSCRIPT - ${FORECASTSH:-$SCRgfs/exglobal_forecast.sh} status=$? @@ -157,14 +151,11 @@ if [ -e "$pgmout" ] ; then cat $pgmout fi -msg="ENDED NORMALLY." -postmsg "$jlogfile" "$msg" - ########################################## # Remove the Temporary working directory ########################################## cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date + exit 0 diff --git a/jobs/JGLOBAL_WAVE_GEMPAK b/jobs/JGLOBAL_WAVE_GEMPAK index aee0207acd..591dcff393 100755 --- a/jobs/JGLOBAL_WAVE_GEMPAK +++ b/jobs/JGLOBAL_WAVE_GEMPAK @@ -1,9 +1,6 @@ -#!/bin/bash +#! /usr/bin/env bash - -date -set -xa -export PS4='$SECONDS + ' +source "$HOMEgfs/ush/preamble.sh" # JY - 10/29, move the block in the front, otherwise PDY is not defined for COMIN export DATA=${DATA:-${DATAROOT}/${jobid:?}} @@ -18,10 +15,6 @@ export cycle=${cycle:-t${cyc}z} setpdy.sh . PDY -env -msg="Begin job for $job" -postmsg "$jlogfile" "$msg" - # export NET=${NET:-gfs} @@ -63,5 +56,5 @@ if [ "$KEEPDATA" != "YES" ]; then rm -rf $DATA fi -date + exit 0 diff --git a/jobs/JGLOBAL_WAVE_INIT b/jobs/JGLOBAL_WAVE_INIT index bab8f04742..013dff7e70 100755 --- a/jobs/JGLOBAL_WAVE_INIT +++ b/jobs/JGLOBAL_WAVE_INIT @@ -1,9 +1,8 @@ -#!/bin/bash +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" -date export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4=' $SECONDS + ' -set -x -e ############################# # Source relevant config files @@ -79,5 +78,5 @@ $HOMEgfs/scripts/exgfs_wave_init.sh cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date + exit 0 diff --git a/jobs/JGLOBAL_WAVE_POST_BNDPNT b/jobs/JGLOBAL_WAVE_POST_BNDPNT index 93b690ad1d..0821a9fdaf 100755 --- a/jobs/JGLOBAL_WAVE_POST_BNDPNT +++ b/jobs/JGLOBAL_WAVE_POST_BNDPNT @@ -1,9 +1,8 @@ -#!/bin/bash +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" -date export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4=' $SECONDS + ' -set -x -e ############################# # Source relevant config files @@ -68,12 +67,9 @@ export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/$COMPONENT} mkdir -p $COMOUT/station -env | sort # Set wave model ID tag to include member number # if ensemble; waveMEMB var empty in deterministic -# Set wave model ID tag to include member number -# if ensemble; waveMEMB var empty in deterministic membTAG='p' if [ "${waveMEMB}" == "00" ]; then membTAG='c'; fi export membTAG @@ -91,11 +87,9 @@ export DOBNDPNT_WAV='YES' #not boundary points $HOMEgfs/scripts/exgfs_wave_post_pnt.sh err=$? if [ $err -ne 0 ]; then - msg="FATAL ERROR: ex-script of GWES_POST failed!" -else - msg="$job completed normally!" + echo "FATAL ERROR: ex-script of GWES_POST failed!" + exit ${err} fi -postmsg "$jlogfile" "$msg" ########################################## # Remove the Temporary working directory @@ -103,5 +97,5 @@ postmsg "$jlogfile" "$msg" cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date + exit 0 diff --git a/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL b/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL index 5d37dd35fc..404ab14d9e 100755 --- a/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL +++ b/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL @@ -1,9 +1,8 @@ -#!/bin/bash +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" -date export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4=' $SECONDS + ' -set -x -e ############################# # Source relevant config files @@ -69,7 +68,6 @@ export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/$COMPONENT} mkdir -p $COMOUT/station -env | sort # Set wave model ID tag to include member number # if ensemble; waveMEMB var empty in deterministic @@ -92,11 +90,9 @@ export DOBNDPNT_WAV='YES' #boundary points $HOMEgfs/scripts/exgfs_wave_post_pnt.sh err=$? if [ $err -ne 0 ]; then - msg="FATAL ERROR: ex-script of GFS_WAVE_POST_PNT failed!" -else - msg="$job completed normally!" + echo "FATAL ERROR: ex-script of GFS_WAVE_POST_PNT failed!" + exit ${err} fi -postmsg "$jlogfile" "$msg" ########################################## # Remove the Temporary working directory @@ -104,5 +100,5 @@ postmsg "$jlogfile" "$msg" cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date + exit 0 diff --git a/jobs/JGLOBAL_WAVE_POST_PNT b/jobs/JGLOBAL_WAVE_POST_PNT index 092916b7f6..acde66e7a5 100755 --- a/jobs/JGLOBAL_WAVE_POST_PNT +++ b/jobs/JGLOBAL_WAVE_POST_PNT @@ -1,16 +1,15 @@ -#!/bin/bash +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" -date export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4=' $SECONDS + ' -set -x -e ############################# # Source relevant config files ############################# configs="base wave wavepostsbs wavepostpnt" export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} +config_path=${EXPDIR:-${NWROOT:-}/gfs.${gfs_ver}/parm/config} for config in $configs; do . $config_path/config.$config status=$? @@ -29,8 +28,8 @@ export NET=${NET:-gfs} export RUN=${RUN:-gfs} export COMPONENT=${COMPONENT:-wave} -export HOMEgefs=${HOMEgefs:-$NWROOT/$NET.${gefs_ver}} -export HOMEgfs=${HOMEgfs:-$NWROOT/$NET.${gfs_ver}} +export HOMEgefs=${HOMEgefs:-${NWROOT:-}/$NET.${gefs_ver:-}} +export HOMEgfs=${HOMEgfs:-${NWROOT:-}/$NET.${gfs_ver}} # Add default errchk = err_chk export errchk=${errchk:-err_chk} @@ -68,7 +67,6 @@ export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/$COMPONENT} mkdir -p $COMOUT/station -env | sort # Set wave model ID tag to include member number # if ensemble; waveMEMB var empty in deterministic @@ -91,11 +89,9 @@ export DOBNDPNT_WAV='NO' #not boundary points $HOMEgfs/scripts/exgfs_wave_post_pnt.sh err=$? if [ $err -ne 0 ]; then - msg="FATAL ERROR: ex-script of GWES_POST failed!" -else - msg="$job completed normally!" + echo "FATAL ERROR: ex-script of GWES_POST failed!" + exir ${err} fi -postmsg "$jlogfile" "$msg" ########################################## # Remove the Temporary working directory @@ -103,5 +99,5 @@ postmsg "$jlogfile" "$msg" cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date + exit 0 diff --git a/jobs/JGLOBAL_WAVE_POST_SBS b/jobs/JGLOBAL_WAVE_POST_SBS index d798e28def..868cf8b242 100755 --- a/jobs/JGLOBAL_WAVE_POST_SBS +++ b/jobs/JGLOBAL_WAVE_POST_SBS @@ -1,16 +1,15 @@ -#!/bin/bash +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" -date export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4=' $SECONDS + ' -set -x -e ############################# # Source relevant config files ############################# configs="base wave wavepostsbs" export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} +config_path=${EXPDIR:-${NWROOT:-}/gfs.${gfs_ver}/parm/config} for config in $configs; do . $config_path/config.$config status=$? @@ -29,8 +28,8 @@ export NET=${NET:-gfs} export RUN=${RUN:-gfs} export COMPONENT=${COMPONENT:-wave} -export HOMEgefs=${HOMEgefs:-$NWROOT/$NET.${gefs_ver}} -export HOMEgfs=${HOMEgfs:-$NWROOT/$NET.${gfs_ver}} +export HOMEgefs=${HOMEgefs:-${NWROOT:-}/$NET.${gefs_ver:-}} +export HOMEgfs=${HOMEgfs:-${NWROOT:-}/$NET.${gfs_ver}} # Add default errchk = err_chk export errchk=${errchk:-err_chk} @@ -66,13 +65,12 @@ fi export COMIN=${COMIN:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/$COMPONENT} export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/$COMPONENT} -export COMINice=${COMINice:-${COMROOTp2}/omb/prod} +export COMINice=${COMINice:-${COMROOTp2:-${COMROOT}}/omb/prod} export COMINwnd=${COMINwnd:-${COMROOT}/gfs/prod} -export COMIN_WAV_CUR=${COMIN_WAV_CUR:-${COMROOTp2}/rtofs/prod} +export COMIN_WAV_CUR=${COMIN_WAV_CUR:-${COMROOTp2:-${COMROOT}}/rtofs/prod} mkdir -p $COMOUT/gridded -env | sort # Set wave model ID tag to include member number # if ensemble; waveMEMB var empty in deterministic @@ -89,11 +87,9 @@ export CFP_VERBOSE=1 $HOMEgfs/scripts/exgfs_wave_post_gridded_sbs.sh err=$? if [ $err -ne 0 ]; then - msg="FATAL ERROR: ex-script of GWES_POST failed!" -else - msg="$job completed normally!" + echo "FATAL ERROR: ex-script of GWES_POST failed!" + exit $err fi -postmsg "$jlogfile" "$msg" ########################################## # Remove the Temporary working directory @@ -101,5 +97,5 @@ postmsg "$jlogfile" "$msg" cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date + exit 0 diff --git a/jobs/JGLOBAL_WAVE_PRDGEN_BULLS b/jobs/JGLOBAL_WAVE_PRDGEN_BULLS index db8738dcb9..617217dfac 100755 --- a/jobs/JGLOBAL_WAVE_PRDGEN_BULLS +++ b/jobs/JGLOBAL_WAVE_PRDGEN_BULLS @@ -1,8 +1,6 @@ -#!/bin/bash +#! /usr/bin/env bash -date -export PS4=' $SECONDS + ' -set -xa +source "$HOMEgfs/ush/preamble.sh" export DATA=${DATA:-${DATAROOT}/${jobid:?}} mkdir -p $DATA @@ -16,7 +14,6 @@ export cycle=${cycle:-t${cyc}z} # Set PDY setpdy.sh . PDY -env export NET=${NET:-gfs} export RUN=${RUN:-gfs} @@ -56,6 +53,7 @@ if [ "$KEEPDATA" != "YES" ]; then cd $DATAROOT rm -rf $DATA fi -date + + exit 0 diff --git a/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED b/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED index 6e9f8ea5c2..45cea6d4e2 100755 --- a/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED +++ b/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED @@ -1,8 +1,6 @@ -#!/bin/bash +#! /usr/bin/env bash -date -export PS4=' $SECONDS + ' -set -xa +source "$HOMEgfs/ush/preamble.sh" export DATA=${DATA:-${DATAROOT}/${jobid:?}} mkdir -p $DATA @@ -16,7 +14,6 @@ export cycle=${cycle:-t${cyc}z} # Set PDY setpdy.sh . PDY - env # PATH for working directory export NET=${NET:-gfs} @@ -45,21 +42,6 @@ if [ $SENDCOM = YES ]; then mkdir -p $COMOUT $PCOM fi -# JY - move up -#export DATA=${DATA:-${DATAROOT}/${jobid:?}} -#mkdir -p $DATA -#cd $DATA -# -####################################### -### Set up the cycle variable -####################################### -#export cycle=${cycle:-t${cyc}z} - -## Set PDY -# setpdy.sh -# . PDY -# env - ################################### # Execute the Script ################################### @@ -73,6 +55,7 @@ if [ "$KEEPDATA" != "YES" ]; then cd $DATAROOT rm -rf $DATA fi -date + + exit 0 diff --git a/jobs/JGLOBAL_WAVE_PREP b/jobs/JGLOBAL_WAVE_PREP index 666ecb3b9e..5878e36444 100755 --- a/jobs/JGLOBAL_WAVE_PREP +++ b/jobs/JGLOBAL_WAVE_PREP @@ -1,9 +1,8 @@ -#!/bin/bash +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" -date export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4=' $SECONDS + ' -set -x -e ############################# # Source relevant config files @@ -101,5 +100,5 @@ $HOMEgfs/scripts/exgfs_wave_prep.sh cd $DATAROOT [[ $KEEPDATA = "NO" ]] && rm -rf $DATA -date + exit 0 diff --git a/jobs/rocoto/aerosol_init.sh b/jobs/rocoto/aerosol_init.sh index d95f043e70..34ccc0fe26 100755 --- a/jobs/rocoto/aerosol_init.sh +++ b/jobs/rocoto/aerosol_init.sh @@ -1,6 +1,6 @@ -#!/bin/bash +#! /usr/bin/env bash -set -x +source "$HOMEgfs/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules @@ -34,5 +34,5 @@ fi ############################################################## # Exit cleanly -set +x + exit 0 diff --git a/jobs/rocoto/anal.sh b/jobs/rocoto/anal.sh index 5f39309dbb..cd7fdc932a 100755 --- a/jobs/rocoto/anal.sh +++ b/jobs/rocoto/anal.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules @@ -10,4 +12,6 @@ status=$? # Execute the JJOB $HOMEgfs/jobs/JGLOBAL_ATMOS_ANALYSIS status=$? -exit $status + + +exit ${status} diff --git a/jobs/rocoto/analcalc.sh b/jobs/rocoto/analcalc.sh index df5915086e..d80756cfc7 100755 --- a/jobs/rocoto/analcalc.sh +++ b/jobs/rocoto/analcalc.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules @@ -10,4 +12,6 @@ status=$? # Execute the JJOB $HOMEgfs/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC status=$? + + exit $status diff --git a/jobs/rocoto/analdiag.sh b/jobs/rocoto/analdiag.sh index 6e29a69600..f9d97360c6 100755 --- a/jobs/rocoto/analdiag.sh +++ b/jobs/rocoto/analdiag.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules @@ -10,4 +12,6 @@ status=$? # Execute the JJOB $HOMEgfs/jobs/JGDAS_ATMOS_ANALYSIS_DIAG status=$? + + exit $status diff --git a/jobs/rocoto/arch.sh b/jobs/rocoto/arch.sh index e6ce577d1c..c9441b5a75 100755 --- a/jobs/rocoto/arch.sh +++ b/jobs/rocoto/arch.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### ## Abstract: @@ -60,9 +62,11 @@ PDY_MOS=$(echo $CDATE_MOS | cut -c1-8) COMIN=${COMINatmos:-"$ROTDIR/$CDUMP.$PDY/$cyc/atmos"} cd $COMIN +source "${HOMEgfs}/ush/file_utils.sh" + [[ ! -d $ARCDIR ]] && mkdir -p $ARCDIR -$NCP ${APREFIX}gsistat $ARCDIR/gsistat.${CDUMP}.${CDATE} -$NCP ${APREFIX}pgrb2.1p00.anl $ARCDIR/pgbanl.${CDUMP}.${CDATE}.grib2 +nb_copy ${APREFIX}gsistat $ARCDIR/gsistat.${CDUMP}.${CDATE} +nb_copy ${APREFIX}pgrb2.1p00.anl $ARCDIR/pgbanl.${CDUMP}.${CDATE}.grib2 # Archive 1 degree forecast GRIB2 files for verification if [ $CDUMP = "gfs" ]; then @@ -71,16 +75,16 @@ if [ $CDUMP = "gfs" ]; then while [ $fhr -le $fhmax ]; do fhr2=$(printf %02i $fhr) fhr3=$(printf %03i $fhr) - $NCP ${APREFIX}pgrb2.1p00.f$fhr3 $ARCDIR/pgbf${fhr2}.${CDUMP}.${CDATE}.grib2 - (( fhr = 10#$fhr + 10#$FHOUT_GFS )) + nb_copy ${APREFIX}pgrb2.1p00.f$fhr3 $ARCDIR/pgbf${fhr2}.${CDUMP}.${CDATE}.grib2 + fhr=$((10#$fhr + 10#$FHOUT_GFS )) done fi if [ $CDUMP = "gdas" ]; then flist="000 003 006 009" for fhr in $flist; do fname=${APREFIX}pgrb2.1p00.f${fhr} - fhr2=$(printf %02i $fhr) - $NCP $fname $ARCDIR/pgbf${fhr2}.${CDUMP}.${CDATE}.grib2 + fhr2=$(printf %02i $((10#$fhr))) + nb_copy $fname $ARCDIR/pgbf${fhr2}.${CDUMP}.${CDATE}.grib2 done fi @@ -97,15 +101,17 @@ if [ $CDUMP = "gdas" -a -s gdas.t${cyc}z.cyclone.trackatcfunix ]; then fi if [ $CDUMP = "gfs" ]; then - $NCP storms.gfso.atcf_gen.$CDATE ${ARCDIR}/. - $NCP storms.gfso.atcf_gen.altg.$CDATE ${ARCDIR}/. - $NCP trak.gfso.atcfunix.$CDATE ${ARCDIR}/. - $NCP trak.gfso.atcfunix.altg.$CDATE ${ARCDIR}/. + nb_copy storms.gfso.atcf_gen.$CDATE ${ARCDIR}/. + nb_copy storms.gfso.atcf_gen.altg.$CDATE ${ARCDIR}/. + nb_copy trak.gfso.atcfunix.$CDATE ${ARCDIR}/. + nb_copy trak.gfso.atcfunix.altg.$CDATE ${ARCDIR}/. mkdir -p ${ARCDIR}/tracker.$CDATE/$CDUMP blist="epac natl" for basin in $blist; do - cp -rp $basin ${ARCDIR}/tracker.$CDATE/$CDUMP + if [[ -f $basin ]]; then + cp -rp $basin ${ARCDIR}/tracker.$CDATE/$CDUMP + fi done fi @@ -121,8 +127,8 @@ if [ $CDUMP = "gfs" -a $FITSARC = "YES" ]; then fhr3=$(printf %03i $fhr) sfcfile=${prefix}.sfcf${fhr3}${ASUFFIX} sigfile=${prefix}.atmf${fhr3}${ASUFFIX} - $NCP $sfcfile $VFYARC/${CDUMP}.$PDY/$cyc/ - $NCP $sigfile $VFYARC/${CDUMP}.$PDY/$cyc/ + nb_copy $sfcfile $VFYARC/${CDUMP}.$PDY/$cyc/ + nb_copy $sigfile $VFYARC/${CDUMP}.$PDY/$cyc/ (( fhr = 10#$fhr + 6 )) done fi @@ -228,12 +234,14 @@ if [ $CDUMP = "gfs" ]; then #--save mdl gfsmos output from all cycles in the 18Z archive directory if [ -d gfsmos.$PDY_MOS -a $cyc -eq 18 ]; then + set +e $TARCMD -P -cvf $ATARDIR/$CDATE_MOS/gfsmos.tar ./gfsmos.$PDY_MOS status=$? if [ $status -ne 0 -a $CDATE -ge $firstday ]; then echo "$(echo $TARCMD | tr 'a-z' 'A-Z') $CDATE gfsmos.tar failed" exit $status fi + ${ERR_EXIT_ON:-set -e} fi elif [ $CDUMP = "gdas" ]; then @@ -260,12 +268,14 @@ fi # Turn on extended globbing options shopt -s extglob for targrp in $targrp_list; do + set +e $TARCMD -P -cvf $ATARDIR/$CDATE/${targrp}.tar $(cat $ARCH_LIST/${targrp}.txt) status=$? if [ $status -ne 0 -a $CDATE -ge $firstday ]; then echo "$(echo $TARCMD | tr 'a-z' 'A-Z') $CDATE ${targrp}.tar failed" exit $status fi + ${ERR_EXIT_ON:-set -e} done # Turn extended globbing back off shopt -u extglob @@ -396,4 +406,6 @@ COMIN="$ROTDIR/$CDUMP.$rPDY" ############################################################### + + exit 0 diff --git a/jobs/rocoto/awips.sh b/jobs/rocoto/awips.sh index 8d94cdef20..f8e5646aa6 100755 --- a/jobs/rocoto/awips.sh +++ b/jobs/rocoto/awips.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### ## Abstract: @@ -73,7 +75,7 @@ for fhr in $fhrlst; do fhmax=84 if [ $fhr -ge $fhmin -a $fhr -le $fhmax ] ; then if [[ $(expr $fhr % 3) -eq 0 ]]; then - fhr3=$(printf %03i $fhr) + fhr3=$(printf %03d $((10#$fhr))) # Check for input file existence. If not present, sleep # Loop SLEEP_LOOP_MAX times. Abort if not found. @@ -145,4 +147,6 @@ done ############################################################### # Force Exit out cleanly if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATAROOT ; fi + + exit 0 diff --git a/jobs/rocoto/coupled_ic.sh b/jobs/rocoto/coupled_ic.sh index 973d6b4ad4..1be2a216b5 100755 --- a/jobs/rocoto/coupled_ic.sh +++ b/jobs/rocoto/coupled_ic.sh @@ -1,6 +1,6 @@ -#!/bin/bash +#! /usr/bin/env bash -set -x +source "$HOMEgfs/ush/preamble.sh" ############################################################### ## Abstract: @@ -55,7 +55,7 @@ rc=$? if [[ $rc -ne 0 ]] ; then echo "FATAL: Unable to copy $BASE_CPLIC/$CPL_ATMIC/$CDATE/$CDUMP/* to $ICSDIR/$CDATE/atmos/ (Error code $rc)" fi -((err+=$rc)) +err=$((err + rc)) # Setup Ocean IC files @@ -64,7 +64,7 @@ rc=$? if [[ $rc -ne 0 ]] ; then echo "FATAL: Unable to copy $BASE_CPLIC/$CPL_OCNIC/$CDATE/ocn/$OCNRES/MOM*.nc to $ICSDIR/$CDATE/ocn/ (Error code $rc)" fi -((err+=$rc)) +err=$((err + rc)) #Setup Ice IC files cp $BASE_CPLIC/$CPL_ICEIC/$CDATE/ice/$ICERES/cice5_model_${ICERESdec}.res_$CDATE.nc $ICSDIR/$CDATE/ice/cice_model_${ICERESdec}.res_$CDATE.nc @@ -72,7 +72,7 @@ rc=$? if [[ $rc -ne 0 ]] ; then echo "FATAL: Unable to copy $BASE_CPLIC/$CPL_ICEIC/$CDATE/ice/$ICERES/cice5_model_${ICERESdec}.res_$CDATE.nc to $ICSDIR/$CDATE/ice/cice_model_${ICERESdec}.res_$CDATE.nc (Error code $rc)" fi -((err+=$rc)) +err=$((err + rc)) if [ $DO_WAVE = "YES" ]; then [[ ! -d $ICSDIR/$CDATE/wav ]] && mkdir -p $ICSDIR/$CDATE/wav @@ -83,7 +83,7 @@ if [ $DO_WAVE = "YES" ]; then if [[ $rc -ne 0 ]] ; then echo "FATAL: Unable to copy $BASE_CPLIC/$CPL_WAVIC/$CDATE/wav/$grdID/*restart.$grdID to $ICSDIR/$CDATE/wav/ (Error code $rc)" fi - ((err+=$rc)) + err=$((err + rc)) done fi @@ -112,5 +112,5 @@ fi ############################################################## # Exit cleanly -set +x + exit 0 diff --git a/jobs/rocoto/earc.sh b/jobs/rocoto/earc.sh index 5d57abf26e..8b80b4b9e8 100755 --- a/jobs/rocoto/earc.sh +++ b/jobs/rocoto/earc.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### ## Abstract: @@ -56,6 +58,7 @@ fi cd $ROTDIR +source "${HOMEgfs}/ush/file_utils.sh" ################################################################### # ENSGRP > 0 archives a group of ensemble members @@ -134,24 +137,26 @@ if [ $ENSGRP -eq 0 ]; then [ ! -d $ATARDIR/$CDATE ] && mkdir -p $ATARDIR/$CDATE fi + set +e $TARCMD -P -cvf $ATARDIR/$CDATE/enkf${CDUMP}.tar $(cat $ARCH_LIST/enkf${CDUMP}.txt) status=$? if [ $status -ne 0 -a $CDATE -ge $firstday ]; then echo "$(echo $TARCMD | tr 'a-z' 'A-Z') $CDATE enkf${CDUMP}.tar failed" exit $status fi + ${ERR_EXIT_ON:-set -eu} fi #-- Archive online for verification and diagnostics [[ ! -d $ARCDIR ]] && mkdir -p $ARCDIR cd $ARCDIR - $NCP $ROTDIR/enkf${CDUMP}.$PDY/$cyc/$COMPONENT/${CDUMP}.t${cyc}z.enkfstat enkfstat.${CDUMP}.$CDATE - $NCP $ROTDIR/enkf${CDUMP}.$PDY/$cyc/$COMPONENT/${CDUMP}.t${cyc}z.gsistat.ensmean gsistat.${CDUMP}.${CDATE}.ensmean + nb_copy $ROTDIR/enkf${CDUMP}.$PDY/$cyc/$COMPONENT/${CDUMP}.t${cyc}z.enkfstat enkfstat.${CDUMP}.$CDATE + nb_copy $ROTDIR/enkf${CDUMP}.$PDY/$cyc/$COMPONENT/${CDUMP}.t${cyc}z.gsistat.ensmean gsistat.${CDUMP}.${CDATE}.ensmean if [ $CDUMP_ENKF != "GDAS" ]; then - $NCP $ROTDIR/enkfgfs.$PDY/$cyc/$COMPONENT/${CDUMP}.t${cyc}z.enkfstat enkfstat.gfs.$CDATE - $NCP $ROTDIR/enkfgfs.$PDY/$cyc/$COMPONENT/${CDUMP}.t${cyc}z.gsistat.ensmean gsistat.gfs.${CDATE}.ensmean + nb_copy $ROTDIR/enkfgfs.$PDY/$cyc/$COMPONENT/${CDUMP}.t${cyc}z.enkfstat enkfstat.gfs.$CDATE + nb_copy $ROTDIR/enkfgfs.$PDY/$cyc/$COMPONENT/${CDUMP}.t${cyc}z.gsistat.ensmean gsistat.gfs.${CDATE}.ensmean fi fi @@ -219,4 +224,6 @@ for ctype in $clist; do done ############################################################### + + exit 0 diff --git a/jobs/rocoto/ecen.sh b/jobs/rocoto/ecen.sh index 8c88a63ef5..dd4a8ac8a6 100755 --- a/jobs/rocoto/ecen.sh +++ b/jobs/rocoto/ecen.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules @@ -24,4 +26,6 @@ done ############################################################### # Exit out cleanly + + exit 0 diff --git a/jobs/rocoto/echgres.sh b/jobs/rocoto/echgres.sh index 733257349e..3171388f6a 100755 --- a/jobs/rocoto/echgres.sh +++ b/jobs/rocoto/echgres.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules @@ -10,4 +12,6 @@ status=$? # Execute the JJOB $HOMEgfs/jobs/JGDAS_ATMOS_CHGRES_FORENKF status=$? -exit $status + + +exit ${status} diff --git a/jobs/rocoto/ediag.sh b/jobs/rocoto/ediag.sh index cb9df99b9b..b09a7f4963 100755 --- a/jobs/rocoto/ediag.sh +++ b/jobs/rocoto/ediag.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules @@ -10,4 +12,6 @@ status=$? # Execute the JJOB $HOMEgfs/jobs/JGDAS_ENKF_DIAG status=$? + + exit $status diff --git a/jobs/rocoto/efcs.sh b/jobs/rocoto/efcs.sh index 04d5eb3c3e..4454ad6c8b 100755 --- a/jobs/rocoto/efcs.sh +++ b/jobs/rocoto/efcs.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules @@ -10,4 +12,5 @@ status=$? # Execute the JJOB $HOMEgfs/jobs/JGDAS_ENKF_FCST status=$? + exit $status diff --git a/jobs/rocoto/eobs.sh b/jobs/rocoto/eobs.sh index c635f9ed44..f6dc275578 100755 --- a/jobs/rocoto/eobs.sh +++ b/jobs/rocoto/eobs.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules @@ -10,4 +12,6 @@ status=$? # Execute the JJOB $HOMEgfs/jobs/JGDAS_ENKF_SELECT_OBS status=$? + + exit $status diff --git a/jobs/rocoto/eomg.sh b/jobs/rocoto/eomg.sh index a0519c5318..de981c02bb 100755 --- a/jobs/rocoto/eomg.sh +++ b/jobs/rocoto/eomg.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules @@ -10,4 +12,6 @@ status=$? # Execute the JJOB $HOMEgfs/jobs/JGDAS_ENKF_INNOVATE_OBS status=$? + + exit $status diff --git a/jobs/rocoto/epos.sh b/jobs/rocoto/epos.sh index e7ad93e69e..1039b8ab20 100755 --- a/jobs/rocoto/epos.sh +++ b/jobs/rocoto/epos.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules @@ -25,4 +27,6 @@ done ############################################################### # Exit out cleanly + + exit 0 diff --git a/jobs/rocoto/esfc.sh b/jobs/rocoto/esfc.sh index 50ee46e97a..d830c59c50 100755 --- a/jobs/rocoto/esfc.sh +++ b/jobs/rocoto/esfc.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules @@ -10,4 +12,6 @@ status=$? # Execute the JJOB $HOMEgfs/jobs/JGDAS_ENKF_SFC status=$? + + exit $status diff --git a/jobs/rocoto/eupd.sh b/jobs/rocoto/eupd.sh index 1d8dc3b119..d202c45aef 100755 --- a/jobs/rocoto/eupd.sh +++ b/jobs/rocoto/eupd.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules @@ -10,4 +12,6 @@ status=$? # Execute the JJOB $HOMEgfs/jobs/JGDAS_ENKF_UPDATE status=$? + + exit $status diff --git a/jobs/rocoto/fcst.sh b/jobs/rocoto/fcst.sh index 199c89724a..d59872c60c 100755 --- a/jobs/rocoto/fcst.sh +++ b/jobs/rocoto/fcst.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules @@ -10,4 +12,6 @@ status=$? # Execute the JJOB $HOMEgfs/jobs/JGLOBAL_FORECAST status=$? + + exit $status diff --git a/jobs/rocoto/gempak.sh b/jobs/rocoto/gempak.sh index 2b119ab3f4..5b7f43ce47 100755 --- a/jobs/rocoto/gempak.sh +++ b/jobs/rocoto/gempak.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### ## Abstract: @@ -66,4 +68,6 @@ $GEMPAKSH ############################################################### # Force Exit out cleanly if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATAROOT ; fi + + exit 0 diff --git a/jobs/rocoto/getic.sh b/jobs/rocoto/getic.sh index 700799b677..84008e6ca0 100755 --- a/jobs/rocoto/getic.sh +++ b/jobs/rocoto/getic.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### ## Abstract: @@ -158,4 +160,6 @@ cd $DATAROOT ############################################################### # Exit out cleanly + + exit 0 diff --git a/jobs/rocoto/gldas.sh b/jobs/rocoto/gldas.sh index 87fba5a211..db16dd883f 100755 --- a/jobs/rocoto/gldas.sh +++ b/jobs/rocoto/gldas.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules @@ -12,4 +14,5 @@ status=$? $HOMEgfs/jobs/JGDAS_ATMOS_GLDAS status=$? + exit $status diff --git a/jobs/rocoto/init.sh b/jobs/rocoto/init.sh index ed03799e2b..0432750e72 100755 --- a/jobs/rocoto/init.sh +++ b/jobs/rocoto/init.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### ## Abstract: @@ -53,7 +55,7 @@ export RUNICSH=${RUNICSH:-${GDASINIT_DIR}/run_v16.chgres.sh} # Check if init is needed and run if so if [[ $gfs_ver = "v16" && $EXP_WARM_START = ".true." && $CASE = $OPS_RES ]]; then echo "Detected v16 $OPS_RES warm starts, will not run init. Exiting..." - exit 0 + else # Run chgres_cube if [ ! -d $OUTDIR ]; then mkdir -p $OUTDIR ; fi @@ -70,4 +72,6 @@ cd $DATAROOT ############################################################### # Exit out cleanly + + exit 0 diff --git a/jobs/rocoto/metp.sh b/jobs/rocoto/metp.sh index e18d35fc20..80138b9026 100755 --- a/jobs/rocoto/metp.sh +++ b/jobs/rocoto/metp.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### ## Abstract: @@ -73,4 +75,6 @@ fi ############################################################### # Force Exit out cleanly if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATAROOT ; fi + + exit 0 diff --git a/jobs/rocoto/ocnpost.sh b/jobs/rocoto/ocnpost.sh index ad42680dba..0f6413ec43 100755 --- a/jobs/rocoto/ocnpost.sh +++ b/jobs/rocoto/ocnpost.sh @@ -1,5 +1,6 @@ -#!/bin/bash -set -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### ## CICE5/MOM6 post driver script @@ -155,4 +156,6 @@ status=$? if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATA ; fi ############################################################### # Exit out cleanly + + exit 0 diff --git a/jobs/rocoto/post.sh b/jobs/rocoto/post.sh index 55cf654b4a..b32e8c511d 100755 --- a/jobs/rocoto/post.sh +++ b/jobs/rocoto/post.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### ## NCEP post driver script @@ -24,27 +26,10 @@ fi #--------------------------------------------------------------- for fhr in $fhrlst; do - - if [ ! -f $restart_file${fhr}.nemsio -a ! -f $restart_file${fhr}.nc -a ! -f $restart_file${fhr}.txt ]; then - echo "Nothing to process for FHR = $fhr, cycle, wait for 5 minutes" - sleep 300 - fi - if [ ! -f $restart_file${fhr}.nemsio -a ! -f $restart_file${fhr}.nc -a ! -f $restart_file${fhr}.txt ]; then - echo "Nothing to process for FHR = $fhr, cycle, skip" - continue - fi - - #master=$ROTDIR/${CDUMP}.${PDY}/${cyc}/$COMPONENT/${CDUMP}.t${cyc}z.master.grb2f${fhr} - pgb0p25=$ROTDIR/${CDUMP}.${PDY}/${cyc}/$COMPONENT/${CDUMP}.t${cyc}z.pgrb2.0p25.f${fhr} - if [ ! -s $pgb0p25 ]; then - export post_times=$fhr - $HOMEgfs/jobs/JGLOBAL_ATMOS_NCEPPOST - status=$? - [[ $status -ne 0 ]] && exit $status - fi - + export post_times=$fhr + $HOMEgfs/jobs/JGLOBAL_ATMOS_NCEPPOST + status=$? + [[ $status -ne 0 ]] && exit $status done -############################################################### -# Exit out cleanly exit 0 diff --git a/jobs/rocoto/postsnd.sh b/jobs/rocoto/postsnd.sh index 5472a9206b..fadfaa6d9e 100755 --- a/jobs/rocoto/postsnd.sh +++ b/jobs/rocoto/postsnd.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules @@ -12,5 +14,7 @@ status=$? # Execute the JJOB $HOMEgfs/jobs/JGFS_ATMOS_POSTSND status=$? + + exit $status diff --git a/jobs/rocoto/prep.sh b/jobs/rocoto/prep.sh index 3c96078747..7d22adc7aa 100755 --- a/jobs/rocoto/prep.sh +++ b/jobs/rocoto/prep.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules @@ -126,4 +128,6 @@ fi ################################################################################ # Exit out cleanly + + exit 0 diff --git a/jobs/rocoto/sfcanl.sh b/jobs/rocoto/sfcanl.sh index 457b205c43..7b9812f37b 100755 --- a/jobs/rocoto/sfcanl.sh +++ b/jobs/rocoto/sfcanl.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules @@ -10,4 +12,6 @@ status=$? # Execute the JJOB $HOMEgfs/jobs/JGLOBAL_ATMOS_SFCANL status=$? + + exit $status diff --git a/jobs/rocoto/vrfy.sh b/jobs/rocoto/vrfy.sh index ccc4f01388..57bf1814b9 100755 --- a/jobs/rocoto/vrfy.sh +++ b/jobs/rocoto/vrfy.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### ## Abstract: @@ -180,4 +182,6 @@ fi ############################################################### # Force Exit out cleanly if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATAROOT ; fi + + exit 0 diff --git a/jobs/rocoto/wafs.sh b/jobs/rocoto/wafs.sh index f50f1b6801..8aab955cc8 100755 --- a/jobs/rocoto/wafs.sh +++ b/jobs/rocoto/wafs.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### echo @@ -52,4 +54,6 @@ done ############################################################### # Force Exit out cleanly if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATAROOT ; fi + + exit 0 diff --git a/jobs/rocoto/wafsblending.sh b/jobs/rocoto/wafsblending.sh index bbdb9f8205..2793986e80 100755 --- a/jobs/rocoto/wafsblending.sh +++ b/jobs/rocoto/wafsblending.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### echo @@ -32,9 +34,10 @@ echo "=============== START TO RUN WAFSBLENDING ===============" # Execute the JJOB $HOMEgfs/jobs/JGFS_ATMOS_WAFS_BLENDING status=$? -exit $status ############################################################### # Force Exit out cleanly if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATAROOT ; fi -exit 0 + + +exit $status diff --git a/jobs/rocoto/wafsblending0p25.sh b/jobs/rocoto/wafsblending0p25.sh index 70f7c69005..fb06284f55 100755 --- a/jobs/rocoto/wafsblending0p25.sh +++ b/jobs/rocoto/wafsblending0p25.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### echo @@ -32,9 +34,10 @@ echo "=============== START TO RUN WAFSBLENDING0P25 ===============" # Execute the JJOB $HOMEgfs/jobs/JGFS_ATMOS_WAFS_BLENDING_0P25 status=$? -exit $status ############################################################### # Force Exit out cleanly if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATAROOT ; fi -exit 0 + + +exit $status diff --git a/jobs/rocoto/wafsgcip.sh b/jobs/rocoto/wafsgcip.sh index 8ecc6fb922..f3e98a03da 100755 --- a/jobs/rocoto/wafsgcip.sh +++ b/jobs/rocoto/wafsgcip.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### echo @@ -32,9 +34,10 @@ echo "=============== START TO RUN WAFSGCIP ===============" # Execute the JJOB $HOMEgfs/jobs/JGFS_ATMOS_WAFS_GCIP status=$? -exit $status ############################################################### # Force Exit out cleanly if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATAROOT ; fi -exit 0 + + +exit $status diff --git a/jobs/rocoto/wafsgrib2.sh b/jobs/rocoto/wafsgrib2.sh index d6c379db8e..c7dbead30d 100755 --- a/jobs/rocoto/wafsgrib2.sh +++ b/jobs/rocoto/wafsgrib2.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### echo @@ -32,9 +34,10 @@ echo "=============== START TO RUN WAFSGRIB2 ===============" # Execute the JJOB $HOMEgfs/jobs/JGFS_ATMOS_WAFS_GRIB2 status=$? -exit $status ############################################################### # Force Exit out cleanly if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATAROOT ; fi -exit 0 + + +exit $status diff --git a/jobs/rocoto/wafsgrib20p25.sh b/jobs/rocoto/wafsgrib20p25.sh index ef95f47867..e99ee210d9 100755 --- a/jobs/rocoto/wafsgrib20p25.sh +++ b/jobs/rocoto/wafsgrib20p25.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### echo @@ -32,9 +34,10 @@ echo "=============== START TO RUN WAFSGRIB20p25 ===============" # Execute the JJOB $HOMEgfs/jobs/JGFS_ATMOS_WAFS_GRIB2_0P25 status=$? -exit $status ############################################################### # Force Exit out cleanly if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATAROOT ; fi -exit 0 + + +exit $status diff --git a/jobs/rocoto/waveawipsbulls.sh b/jobs/rocoto/waveawipsbulls.sh index 6461a7aac9..1e1e1cd4e2 100755 --- a/jobs/rocoto/waveawipsbulls.sh +++ b/jobs/rocoto/waveawipsbulls.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### echo @@ -32,5 +34,6 @@ echo "=============== START TO RUN WAVE PRDGEN BULLS ===============" # Execute the JJOB $HOMEgfs/jobs/JGLOBAL_WAVE_PRDGEN_BULLS status=$? -exit $status + +exit $status diff --git a/jobs/rocoto/waveawipsgridded.sh b/jobs/rocoto/waveawipsgridded.sh index 70e85dc2c7..3627ba62c4 100755 --- a/jobs/rocoto/waveawipsgridded.sh +++ b/jobs/rocoto/waveawipsgridded.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### echo @@ -32,4 +34,6 @@ echo "=============== START TO RUN WAVE PRDGEN GRIDDED ===============" # Execute the JJOB $HOMEgfs/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED status=$? + + exit $status diff --git a/jobs/rocoto/wavegempak.sh b/jobs/rocoto/wavegempak.sh index 870f6281b4..d4cf1667fc 100755 --- a/jobs/rocoto/wavegempak.sh +++ b/jobs/rocoto/wavegempak.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### echo @@ -30,4 +32,6 @@ echo "=============== START TO RUN WAVE GEMPAK ===============" # Execute the JJOB $HOMEgfs/jobs/JGLOBAL_WAVE_GEMPAK status=$? + + exit $status diff --git a/jobs/rocoto/waveinit.sh b/jobs/rocoto/waveinit.sh index 14a6a5cdeb..5995b85302 100755 --- a/jobs/rocoto/waveinit.sh +++ b/jobs/rocoto/waveinit.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### echo @@ -15,7 +17,4 @@ $HOMEgfs/jobs/JGLOBAL_WAVE_INIT status=$? [[ $status -ne 0 ]] && exit $status -############################################################### -# Force Exit out cleanly -if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATAROOT ; fi exit 0 diff --git a/jobs/rocoto/wavepostbndpnt.sh b/jobs/rocoto/wavepostbndpnt.sh index 52c5d9fafe..fe0e2a0723 100755 --- a/jobs/rocoto/wavepostbndpnt.sh +++ b/jobs/rocoto/wavepostbndpnt.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### echo @@ -15,7 +17,4 @@ $HOMEgfs/jobs/JGLOBAL_WAVE_POST_BNDPNT status=$? [[ $status -ne 0 ]] && exit $status -############################################################### -# Force Exit out cleanly -if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATAROOT ; fi exit 0 diff --git a/jobs/rocoto/wavepostbndpntbll.sh b/jobs/rocoto/wavepostbndpntbll.sh index 72fb77bd1e..cea3c0bc6b 100755 --- a/jobs/rocoto/wavepostbndpntbll.sh +++ b/jobs/rocoto/wavepostbndpntbll.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### echo @@ -15,7 +17,4 @@ $HOMEgfs/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL status=$? [[ $status -ne 0 ]] && exit $status -############################################################### -# Force Exit out cleanly -if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATAROOT ; fi exit 0 diff --git a/jobs/rocoto/wavepostpnt.sh b/jobs/rocoto/wavepostpnt.sh index f00c09550b..1b1d8c9765 100755 --- a/jobs/rocoto/wavepostpnt.sh +++ b/jobs/rocoto/wavepostpnt.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### echo @@ -15,7 +17,4 @@ $HOMEgfs/jobs/JGLOBAL_WAVE_POST_PNT status=$? [[ $status -ne 0 ]] && exit $status -############################################################### -# Force Exit out cleanly -if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATAROOT ; fi exit 0 diff --git a/jobs/rocoto/wavepostsbs.sh b/jobs/rocoto/wavepostsbs.sh index d8ec7cc268..fb4fdfbd8b 100755 --- a/jobs/rocoto/wavepostsbs.sh +++ b/jobs/rocoto/wavepostsbs.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### echo @@ -15,7 +17,4 @@ $HOMEgfs/jobs/JGLOBAL_WAVE_POST_SBS status=$? [[ $status -ne 0 ]] && exit $status -############################################################### -# Force Exit out cleanly -if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATAROOT ; fi exit 0 diff --git a/jobs/rocoto/waveprep.sh b/jobs/rocoto/waveprep.sh index 9705fbc31a..c55c8526d9 100755 --- a/jobs/rocoto/waveprep.sh +++ b/jobs/rocoto/waveprep.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### echo @@ -15,7 +17,4 @@ $HOMEgfs/jobs/JGLOBAL_WAVE_PREP status=$? [[ $status -ne 0 ]] && exit $status -############################################################### -# Force Exit out cleanly -if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATAROOT ; fi exit 0 diff --git a/parm/config/config.aero b/parm/config/config.aero index 9b6b2a5ca6..74c5cb7fa5 100644 --- a/parm/config/config.aero +++ b/parm/config/config.aero @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash # UFS-Aerosols settings diff --git a/parm/config/config.aerosol_init b/parm/config/config.aerosol_init index 430640ad94..0e586e0231 100644 --- a/parm/config/config.aerosol_init +++ b/parm/config/config.aerosol_init @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.aerosol_init ########## diff --git a/parm/config/config.anal b/parm/config/config.anal index 01955c2da9..6d3a48c82e 100755 --- a/parm/config/config.anal +++ b/parm/config/config.anal @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.anal ########## # Analysis specific @@ -33,7 +33,7 @@ fi # Set parameters specific to L127 if [ $LEVS = "128" ]; then export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," - export SETUP="gpstop=55,nsig_ext=56,$SETUP" + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" fi # Set namelist option for LETKF diff --git a/parm/config/config.analcalc b/parm/config/config.analcalc index 5866ce5ac6..c02aafc2c3 100755 --- a/parm/config/config.analcalc +++ b/parm/config/config.analcalc @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.analcalc ########## # GFS post-anal specific (non-diag) diff --git a/parm/config/config.analdiag b/parm/config/config.analdiag index 285e614d37..7b128d3bad 100755 --- a/parm/config/config.analdiag +++ b/parm/config/config.analdiag @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.analdiag ########## # GFS post-anal specific (diag) diff --git a/parm/config/config.arch b/parm/config/config.arch index fca519c414..c705e0b7ed 100755 --- a/parm/config/config.arch +++ b/parm/config/config.arch @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.arch ########## # Archive specific diff --git a/parm/config/config.awips b/parm/config/config.awips index 6167b91f7e..9003e9f6b0 100755 --- a/parm/config/config.awips +++ b/parm/config/config.awips @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.awips ########## # GFS awips step specific diff --git a/parm/config/config.base.emc.dyn b/parm/config/config.base.emc.dyn index e26eb35404..af18173d60 100755 --- a/parm/config/config.base.emc.dyn +++ b/parm/config/config.base.emc.dyn @@ -1,4 +1,4 @@ -#!/bin/bash -x +#! /usr/bin/env bash ########## config.base ########## # Common to all steps diff --git a/parm/config/config.base.nco.static b/parm/config/config.base.nco.static index 7dae9d5dd9..4612e82814 100755 --- a/parm/config/config.base.nco.static +++ b/parm/config/config.base.nco.static @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.base ########## # Common to all steps diff --git a/parm/config/config.coupled_ic b/parm/config/config.coupled_ic index 15f15efa0c..0df82591d9 100755 --- a/parm/config/config.coupled_ic +++ b/parm/config/config.coupled_ic @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.coupled_ic ########## diff --git a/parm/config/config.defaults.s2sw b/parm/config/config.defaults.s2sw index a2926ffff5..5032a998ad 100644 --- a/parm/config/config.defaults.s2sw +++ b/parm/config/config.defaults.s2sw @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash # Empty variables must include a space otherwise they will be overwritten diff --git a/parm/config/config.earc b/parm/config/config.earc index 7cb1de235f..de73a93731 100755 --- a/parm/config/config.earc +++ b/parm/config/config.earc @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.earc ########## # Ensemble archive specific diff --git a/parm/config/config.ecen b/parm/config/config.ecen index c9609e3ff8..2b686c6b48 100755 --- a/parm/config/config.ecen +++ b/parm/config/config.ecen @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.ecen ########## # Ensemble recentering specific diff --git a/parm/config/config.echgres b/parm/config/config.echgres index cbf176c92f..478c6b4bcf 100755 --- a/parm/config/config.echgres +++ b/parm/config/config.echgres @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.echgres ########## # regrid full-res forecast for use in ensemble-res analysis generation diff --git a/parm/config/config.ediag b/parm/config/config.ediag index 192b5d0b48..12b142088d 100755 --- a/parm/config/config.ediag +++ b/parm/config/config.ediag @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.ediag ########## # GFS ensemble post-eobs specific diff --git a/parm/config/config.efcs b/parm/config/config.efcs index af84dc730f..a1f0fe49cc 100755 --- a/parm/config/config.efcs +++ b/parm/config/config.efcs @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.efcs ########## # Ensemble forecast specific, dependency: config.fcst diff --git a/parm/config/config.eobs b/parm/config/config.eobs index e46dde2f34..21f982addc 100755 --- a/parm/config/config.eobs +++ b/parm/config/config.eobs @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.eobs config.eomg ########## # Ensemble innovation specific, dependency config.anal diff --git a/parm/config/config.epos b/parm/config/config.epos index 441a1ff995..8026a2ba2e 100755 --- a/parm/config/config.epos +++ b/parm/config/config.epos @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.epos ########## # Ensemble post processing specific diff --git a/parm/config/config.esfc b/parm/config/config.esfc index 53cbb09175..2bb3d48bb4 100755 --- a/parm/config/config.esfc +++ b/parm/config/config.esfc @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.esfc ########## # Ensemble surface specific diff --git a/parm/config/config.eupd b/parm/config/config.eupd index 0e9d42e093..1ac90d2b75 100755 --- a/parm/config/config.eupd +++ b/parm/config/config.eupd @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.eupd ########## # Ensemble update specific, dependency config.anal diff --git a/parm/config/config.fcst b/parm/config/config.fcst index e73c35d15d..2c380b9111 100755 --- a/parm/config/config.fcst +++ b/parm/config/config.fcst @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.fcst ########## # Forecast specific diff --git a/parm/config/config.fv3 b/parm/config/config.fv3 index 93f836f642..c8c959362e 100755 --- a/parm/config/config.fv3 +++ b/parm/config/config.fv3 @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.fv3 ########## # FV3 model resolution specific parameters diff --git a/parm/config/config.fv3.nco.static b/parm/config/config.fv3.nco.static index 619ef6399b..9181ca88e9 100755 --- a/parm/config/config.fv3.nco.static +++ b/parm/config/config.fv3.nco.static @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.fv3 ########## # FV3 model resolution specific parameters diff --git a/parm/config/config.gempak b/parm/config/config.gempak index 2bc49dcf17..a2b5ecbaf5 100755 --- a/parm/config/config.gempak +++ b/parm/config/config.gempak @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.gempak ########## # GFS gempak step specific diff --git a/parm/config/config.getic b/parm/config/config.getic index 4671cc9c4a..fce3f9ecf6 100755 --- a/parm/config/config.getic +++ b/parm/config/config.getic @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.getic ########## # Fetching GFS initial conditions specific diff --git a/parm/config/config.gldas b/parm/config/config.gldas index 4410c2ceab..8d503d0368 100755 --- a/parm/config/config.gldas +++ b/parm/config/config.gldas @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.gldas ########## # GDAS gldas step specific diff --git a/parm/config/config.ice b/parm/config/config.ice index 101fc82452..3a6916600f 100644 --- a/parm/config/config.ice +++ b/parm/config/config.ice @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash export NX_GLB="1440" export NY_GLB="1080" diff --git a/parm/config/config.init b/parm/config/config.init index eeb04a7d40..2301b1cdc1 100755 --- a/parm/config/config.init +++ b/parm/config/config.init @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.init ########## # Prepare initial conditions diff --git a/parm/config/config.metp b/parm/config/config.metp index 2c39d9b431..4be7151ffa 100755 --- a/parm/config/config.metp +++ b/parm/config/config.metp @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.metp ########## # METplus verification step specific diff --git a/parm/config/config.nsst b/parm/config/config.nsst index ef8767834d..b4c58eedb3 100755 --- a/parm/config/config.nsst +++ b/parm/config/config.nsst @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.nsst ########## # NSST specific diff --git a/parm/config/config.ocn b/parm/config/config.ocn index fbb750d90b..1675713e7c 100644 --- a/parm/config/config.ocn +++ b/parm/config/config.ocn @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash # OCNRES is currently being set in config.base # case "$CASE" in diff --git a/parm/config/config.ocnpost b/parm/config/config.ocnpost index b734bab758..89304df7f4 100755 --- a/parm/config/config.ocnpost +++ b/parm/config/config.ocnpost @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.ocnpost ########## diff --git a/parm/config/config.post b/parm/config/config.post index 2ca6c3d753..a545f0fafc 100755 --- a/parm/config/config.post +++ b/parm/config/config.post @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.post ########## # Post specific diff --git a/parm/config/config.postsnd b/parm/config/config.postsnd index d64b401cdb..53d66bf4f6 100755 --- a/parm/config/config.postsnd +++ b/parm/config/config.postsnd @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.postsnd ########## # GFS bufr sounding step specific diff --git a/parm/config/config.prep b/parm/config/config.prep index c04be70a09..ac172bf5b8 100755 --- a/parm/config/config.prep +++ b/parm/config/config.prep @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.prep ########## # Prep step specific diff --git a/parm/config/config.prepbufr b/parm/config/config.prepbufr index b86cb89d12..2d6ececc5b 100755 --- a/parm/config/config.prepbufr +++ b/parm/config/config.prepbufr @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.prepbufr ########## # PREPBUFR specific configuration diff --git a/parm/config/config.resources b/parm/config/config.resources index cda6f2edce..b2390970ae 100755 --- a/parm/config/config.resources +++ b/parm/config/config.resources @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.resources ########## # Set resource information for job tasks @@ -563,7 +563,7 @@ elif [ $step = "awips" ]; then elif [ $step = "gempak" ]; then export wtime_gempak="02:00:00" - export npe_gempak=17 + export npe_gempak=28 export npe_node_gempak=4 export nth_gempak=3 diff --git a/parm/config/config.sfcanl b/parm/config/config.sfcanl index 76da647eef..9592fb77c9 100644 --- a/parm/config/config.sfcanl +++ b/parm/config/config.sfcanl @@ -1,4 +1,4 @@ -#!/bin/bash -x +#! /usr/bin/env bash ########## config.sfcanl ########## # GFS surface analysis specific diff --git a/parm/config/config.vrfy b/parm/config/config.vrfy index cd3b7150ce..312c6fe9ad 100755 --- a/parm/config/config.vrfy +++ b/parm/config/config.vrfy @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.vrfy ########## # Verification step specific diff --git a/parm/config/config.wafs b/parm/config/config.wafs index 8bf5577030..fe2ba8cae7 100755 --- a/parm/config/config.wafs +++ b/parm/config/config.wafs @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.wafs ########## diff --git a/parm/config/config.wafsblending b/parm/config/config.wafsblending index dfd7d1715e..e49ffbdb88 100755 --- a/parm/config/config.wafsblending +++ b/parm/config/config.wafsblending @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.wafsblending ########## diff --git a/parm/config/config.wafsblending0p25 b/parm/config/config.wafsblending0p25 index 28a2de90ff..947baab2bb 100755 --- a/parm/config/config.wafsblending0p25 +++ b/parm/config/config.wafsblending0p25 @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.wafsblending0p25 ########## diff --git a/parm/config/config.wafsgcip b/parm/config/config.wafsgcip index 793dae1694..4909795c30 100755 --- a/parm/config/config.wafsgcip +++ b/parm/config/config.wafsgcip @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.wafsgcip ########## diff --git a/parm/config/config.wafsgrib2 b/parm/config/config.wafsgrib2 index 27b137cd8c..0d657788e0 100755 --- a/parm/config/config.wafsgrib2 +++ b/parm/config/config.wafsgrib2 @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.wafsgrib2 ########## # Post specific diff --git a/parm/config/config.wafsgrib20p25 b/parm/config/config.wafsgrib20p25 index 8b55333c00..40cf80df22 100755 --- a/parm/config/config.wafsgrib20p25 +++ b/parm/config/config.wafsgrib20p25 @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.wafsgrib20p25 ########## diff --git a/parm/config/config.wave b/parm/config/config.wave index a3b06a8041..f69adda3ec 100755 --- a/parm/config/config.wave +++ b/parm/config/config.wave @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.wave ########## # Wave steps specific diff --git a/parm/config/config.waveawipsbulls b/parm/config/config.waveawipsbulls index ec39bfb646..e3748e9cd1 100755 --- a/parm/config/config.waveawipsbulls +++ b/parm/config/config.waveawipsbulls @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.waveawipsbulls ########## # Wave steps specific diff --git a/parm/config/config.waveawipsgridded b/parm/config/config.waveawipsgridded index 7f2972bb24..e84352558e 100755 --- a/parm/config/config.waveawipsgridded +++ b/parm/config/config.waveawipsgridded @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.waveawipsgridded ########## # Wave steps specific diff --git a/parm/config/config.wavegempak b/parm/config/config.wavegempak index ec1f59d25c..66af59f2a4 100755 --- a/parm/config/config.wavegempak +++ b/parm/config/config.wavegempak @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.wavegempak ########## # Wave steps specific diff --git a/parm/config/config.waveinit b/parm/config/config.waveinit index 93960e5e25..61715f7f01 100755 --- a/parm/config/config.waveinit +++ b/parm/config/config.waveinit @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.waveinit ########## # Wave steps specific diff --git a/parm/config/config.wavepostbndpnt b/parm/config/config.wavepostbndpnt index eb3bb72ef8..eaa1626e62 100755 --- a/parm/config/config.wavepostbndpnt +++ b/parm/config/config.wavepostbndpnt @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.wavepostbndpnt ########## # Wave steps specific diff --git a/parm/config/config.wavepostbndpntbll b/parm/config/config.wavepostbndpntbll index d26d70fa7a..bb7224cc70 100755 --- a/parm/config/config.wavepostbndpntbll +++ b/parm/config/config.wavepostbndpntbll @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.wavepostbndpntbll ########## # Wave steps specific diff --git a/parm/config/config.wavepostpnt b/parm/config/config.wavepostpnt index 276ca230a6..8befb91760 100755 --- a/parm/config/config.wavepostpnt +++ b/parm/config/config.wavepostpnt @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.wavepostpnt ########## # Wave steps specific diff --git a/parm/config/config.wavepostsbs b/parm/config/config.wavepostsbs index 7eea92f100..12880dd020 100755 --- a/parm/config/config.wavepostsbs +++ b/parm/config/config.wavepostsbs @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.wavepostsbs ########## # Wave steps specific diff --git a/parm/config/config.waveprep b/parm/config/config.waveprep index 1a9770bf4e..1c9a40c1d8 100755 --- a/parm/config/config.waveprep +++ b/parm/config/config.waveprep @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.waveprep ########## # Wave steps specific diff --git a/scripts/exgdas_atmos_chgres_forenkf.sh b/scripts/exgdas_atmos_chgres_forenkf.sh index 97f323b00f..afc7cc9f5e 100755 --- a/scripts/exgdas_atmos_chgres_forenkf.sh +++ b/scripts/exgdas_atmos_chgres_forenkf.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#! /usr/bin/env bash ################################################################################ #### UNIX Script Documentation Block # . . @@ -17,12 +17,7 @@ # ################################################################################ -# Set environment. -export VERBOSE=${VERBOSE:-"YES"} -if [ $VERBOSE = "YES" ]; then - echo $(date) EXECUTING $0 $* >&2 - set -x -fi +source "$HOMEgfs/ush/preamble.sh" # Directories. pwd=$(pwd) @@ -149,7 +144,7 @@ if [ $DO_CALC_ANALYSIS == "YES" ]; then [[ -f $DATA/mp_chgres.sh ]] && rm $DATA/mp_chgres.sh fi - nfhrs=`echo $IAUFHRS_ENKF | sed 's/,/ /g'` + nfhrs=$(echo $IAUFHRS_ENKF | sed 's/,/ /g') for FHR in $nfhrs; do echo "Regridding deterministic forecast for forecast hour $FHR" rm -f chgres_nc_gauss0$FHR.nml @@ -203,8 +198,5 @@ fi cd $pwd [[ $mkdata = "YES" ]] && rm -rf $DATA -set +x -if [ $VERBOSE = "YES" ]; then - echo $(date) EXITING $0 with return code $err >&2 -fi + exit $err diff --git a/scripts/exgdas_atmos_gempak_gif_ncdc.sh b/scripts/exgdas_atmos_gempak_gif_ncdc.sh index 884ae1cf25..3671d5511f 100755 --- a/scripts/exgdas_atmos_gempak_gif_ncdc.sh +++ b/scripts/exgdas_atmos_gempak_gif_ncdc.sh @@ -1,16 +1,14 @@ -#!/bin/sh +#! /usr/bin/env bash ############################################################## # Add the NCDC GIF processing to the end of the gempak_gif job # There is no timing issue with the NCDC GIF, so it is # okay to just add it here. If timing becomes a problem # in the future, we should move it above somewhere else. ############################################################## -export PS4='exgempakgif_ncdc:$SECONDS + ' -set -xa + +source "$HOMEgfs/ush/preamble.sh" cd $DATA -msg="The NCDC GIF processing has begun" -postmsg "$jlogfile" "$msg" export NTS=$USHgempak/restore @@ -60,4 +58,5 @@ then done fi + exit diff --git a/scripts/exgdas_atmos_nawips.sh b/scripts/exgdas_atmos_nawips.sh index 48146edf60..4836065aa7 100755 --- a/scripts/exgdas_atmos_nawips.sh +++ b/scripts/exgdas_atmos_nawips.sh @@ -1,30 +1,26 @@ -#!/bin/ksh +#! /usr/bin/env bash + ################################################################### -echo "----------------------------------------------------" -echo "exnawips - convert NCEP GRIB files into GEMPAK Grids" -echo "----------------------------------------------------" -echo "History: Mar 2000 - First implementation of this new script." -echo "S Lilly: May 2008 - add logic to make sure that all of the " -echo " data produced from the restricted ECMWF" -echo " data on the CCS is properly protected." +# echo "----------------------------------------------------" +# echo "exnawips - convert NCEP GRIB files into GEMPAK Grids" +# echo "----------------------------------------------------" +# echo "History: Mar 2000 - First implementation of this new script." +# echo "S Lilly: May 2008 - add logic to make sure that all of the " +# echo " data produced from the restricted ECMWF" +# echo " data on the CCS is properly protected." ##################################################################### -set -xa +source "$HOMEgfs/ush/preamble.sh" "${2}" cd $DATA RUN=$1 fend=$2 DBN_ALERT_TYPE=$3 -export 'PS4=$RUN:$SECONDS + ' - DATA_RUN=$DATA/$RUN mkdir -p $DATA_RUN cd $DATA_RUN -msg="Begin job for $job" -postmsg "$jlogfile" "$msg" - cp $FIXgempak/g2varswmo2.tbl g2varswmo2.tbl export err=$? if [[ $err -ne 0 ]] ; then @@ -74,13 +70,10 @@ pdsext=no maxtries=180 fhcnt=$fstart while [ $fhcnt -le $fend ] ; do - typeset -Z3 fhr - - fhr=$fhcnt + fhr=$(printf "%03d" $fhcnt) fhcnt3=$(expr $fhr % 3) - fhr3=$fhcnt - typeset -Z3 fhr3 + fhr3=$(printf "%03d" $fhcnt) GEMGRD=${RUN}_${PDY}${cyc}f${fhr3} @@ -173,16 +166,6 @@ done $GEMEXE/gpend ##################################################################### -# GOOD RUN -set +x -echo "**************JOB $RUN NAWIPS COMPLETED NORMALLY ON THE IBM" -echo "**************JOB $RUN NAWIPS COMPLETED NORMALLY ON THE IBM" -echo "**************JOB $RUN NAWIPS COMPLETED NORMALLY ON THE IBM" -set -x -##################################################################### -msg='Job completed normally.' -echo $msg -postmsg "$jlogfile" "$msg" ############################### END OF SCRIPT ####################### diff --git a/scripts/exgdas_atmos_nceppost.sh b/scripts/exgdas_atmos_nceppost.sh index 403cdb4bca..005911db6a 100755 --- a/scripts/exgdas_atmos_nceppost.sh +++ b/scripts/exgdas_atmos_nceppost.sh @@ -1,34 +1,33 @@ +#! /usr/bin/env bash + ##################################################################### -echo "-----------------------------------------------------" -echo " exgdas_nceppost.sh" -echo " Sep 07 - Chuang - Modified script to run unified post" -echo " July 14 - Carlis - Changed to 0.25 deg grib2 master file" -echo " Feb 16 - Lin - Modify to use Vertical Structure" -echo " Aug 17 - Meng - Modify to use 3-digit forecast hour naming" -echo " master and flux files" -echo " Dec 17 - Meng - Link sfc data file to flxfile " -echo " since fv3gfs does not output sfc files any more." -echo " Dec 17 - Meng - Add fv3gfs_downstream_nems.sh for pgb processing " -echo " and remove writing data file to /nwges" -echo " Jan 18 - Meng - For EE2 standard, move IDRT POSTGPVARS setting" -echo " from j-job script." -echo " Feb 18 - Meng - Removed legacy setting for generating grib1 data" -echo " and reading sigio model outputs." -echo " Aug 20 - Meng - Remove .ecf extentsion per EE2 review." -echo " Sep 20 - Meng - Update clean up files per EE2 review." -echo " Mar 21 - Meng - Update POSTGRB2TBL default setting." -echo " Oct 21 - Meng - Remove jlogfile for wcoss2 transition." -echo " Feb 22 - Lin - Exception handling if anl input not found." -echo "-----------------------------------------------------" +# echo "-----------------------------------------------------" +# echo " exgdas_nceppost.sh" +# echo " Sep 07 - Chuang - Modified script to run unified post" +# echo " July 14 - Carlis - Changed to 0.25 deg grib2 master file" +# echo " Feb 16 - Lin - Modify to use Vertical Structure" +# echo " Aug 17 - Meng - Modify to use 3-digit forecast hour naming" +# echo " master and flux files" +# echo " Dec 17 - Meng - Link sfc data file to flxfile " +# echo " since fv3gfs does not output sfc files any more." +# echo " Dec 17 - Meng - Add fv3gfs_downstream_nems.sh for pgb processing " +# echo " and remove writing data file to /nwges" +# echo " Jan 18 - Meng - For EE2 standard, move IDRT POSTGPVARS setting" +# echo " from j-job script." +# echo " Feb 18 - Meng - Removed legacy setting for generating grib1 data" +# echo " and reading sigio model outputs." +# echo " Aug 20 - Meng - Remove .ecf extentsion per EE2 review." +# echo " Sep 20 - Meng - Update clean up files per EE2 review." +# echo " Mar 21 - Meng - Update POSTGRB2TBL default setting." +# echo " Oct 21 - Meng - Remove jlogfile for wcoss2 transition." +# echo " Feb 22 - Lin - Exception handling if anl input not found." +# echo "-----------------------------------------------------" ##################################################################### -set -x +source "$HOMEgfs/ush/preamble.sh" cd $DATA -msg="HAS BEGUN on $(hostname)" -postmsg "$msg" - export POSTGPSH=${POSTGPSH:-$USHgfs/gfs_nceppost.sh} export GFSDOWNSH=${GFSDOWNSH:-$USHgfs/fv3gfs_downstream_nems.sh} export GFSDWNSH=${GFSDWNSH:-$USHgfs/fv3gfs_dwn_nems.sh} @@ -185,7 +184,6 @@ else ## not_anl if_stimes # Start Looping for the # existence of the restart files ############################### - set -x export pgm="postcheck" ic=1 while [ $ic -le $SLEEP_LOOP_MAX ]; do @@ -206,10 +204,6 @@ else ## not_anl if_stimes err_chk fi done - set -x - - msg="Starting post for fhr=$fhr" - postmsg "$msg" ############################### # Put restart files into /nwges @@ -351,10 +345,6 @@ else ## not_anl if_stimes done fi ## end_if_times -#cat $pgmout -#msg='ENDED NORMALLY.' -#postmsg "$jlogfile" "$msg" - exit 0 ################## END OF SCRIPT ####################### diff --git a/scripts/exgdas_efsoi.sh b/scripts/exgdas_efsoi.sh index f730634c83..95d20bb46e 100755 --- a/scripts/exgdas_efsoi.sh +++ b/scripts/exgdas_efsoi.sh @@ -1,4 +1,5 @@ -#!/bin/ksh +#! /usr/bin/env bash + ################################################################################ #### UNIX Script Documentation Block # . . @@ -16,12 +17,7 @@ # ################################################################################ -# Set environment. -VERBOSE=${VERBOSE:-"YES"} -if [ $VERBOSE = "YES" ] ; then - echo $(date) EXECUTING $0 $* >&2 - set -x -fi +source "$HOMEgfs/ush/preamble.sh" # Directories. pwd=$(pwd) @@ -140,7 +136,7 @@ $NLN $VLOCALEIG vlocal_eig.dat ################################################################################ # Ensemble guess, observational data and analyses/increments -nfhrs=`echo $IAUFHRS_ENKF | sed 's/,/ /g'` +nfhrs=$(echo $IAUFHRS_ENKF | sed 's/,/ /g') for imem in $(seq 1 $NMEM_ENKF); do memchar="mem"$(printf %03i $imem) mkdir ${memchar} @@ -257,8 +253,6 @@ $NCP osense_${CDATE}.dat $OSENSE_SAVE_DIR/$OSENSEOUT cd $pwd [[ $mkdata = "YES" ]] && rm -rf $DATA -set +x -if [ $VERBOSE = "YES" ]; then - echo $(date) EXITING $0 with return code $err >&2 -fi + + exit $err diff --git a/scripts/exgdas_efsoi_update.sh b/scripts/exgdas_efsoi_update.sh index cf1542cc57..7e755f4ecf 100755 --- a/scripts/exgdas_efsoi_update.sh +++ b/scripts/exgdas_efsoi_update.sh @@ -1,4 +1,5 @@ -#!/bin/ksh +#! /usr/bin/env bash + ################################################################################ #### UNIX Script Documentation Block # . . @@ -17,12 +18,7 @@ # ################################################################################ -# Set environment. -VERBOSE=${VERBOSE:-"YES"} -if [ $VERBOSE = "YES" ] ; then - echo $(date) EXECUTING $0 $* >&2 - set -x -fi +source "$HOMEgfs/ush/preamble.sh" # Directories. pwd=$(pwd) @@ -174,7 +170,6 @@ for ftype in \$flist; do tar -xvf \$fname done EOFuntar - set -x chmod 755 $DATA/untar.sh fi @@ -193,7 +188,7 @@ else tar -xvf $fname done fi -nfhrs=`echo $IAUFHRS_ENKF | sed 's/,/ /g'` +nfhrs=$(echo $IAUFHRS_ENKF | sed 's/,/ /g') for imem in $(seq 1 $NMEM_ENKF); do memchar="mem"$(printf %03i $imem) if [ $lobsdiag_forenkf = ".false." ]; then @@ -421,8 +416,6 @@ cat stdout stderr > $COMOUT_ANL_ENSFSOI/$ENKFSTAT cd $pwd [[ $mkdata = "YES" ]] && rm -rf $DATA -set +x -if [ $VERBOSE = "YES" ]; then - echo $(date) EXITING $0 with return code $err >&2 -fi + + exit $err diff --git a/scripts/exgdas_enkf_ecen.sh b/scripts/exgdas_enkf_ecen.sh index eaa120ca47..91e7483be9 100755 --- a/scripts/exgdas_enkf_ecen.sh +++ b/scripts/exgdas_enkf_ecen.sh @@ -1,4 +1,5 @@ -#!/bin/bash +#! /usr/bin/env bash + ################################################################################ #### UNIX Script Documentation Block # . . @@ -16,12 +17,7 @@ # ################################################################################ -# Set environment. -VERBOSE=${VERBOSE:-"YES"} -if [ $VERBOSE = "YES" ]; then - echo $(date) EXECUTING $0 $* >&2 - set -x -fi +source "$HOMEgfs/ush/preamble.sh" # Directories. pwd=$(pwd) @@ -369,8 +365,6 @@ done # loop over analysis times in window # Postprocessing cd $pwd [[ $mkdata = "YES" ]] && rm -rf $DATA -set +x -if [ $VERBOSE = "YES" ]; then - echo $(date) EXITING $0 with return code $err >&2 -fi + + exit $err diff --git a/scripts/exgdas_enkf_fcst.sh b/scripts/exgdas_enkf_fcst.sh index 2dfcaa419a..cd79681887 100755 --- a/scripts/exgdas_enkf_fcst.sh +++ b/scripts/exgdas_enkf_fcst.sh @@ -1,4 +1,5 @@ -#!/bin/bash +#! /usr/bin/env bash + ################################################################################ #### UNIX Script Documentation Block # . . @@ -17,12 +18,7 @@ #### ################################################################################ -# Set environment. -export VERBOSE=${VERBOSE:-"YES"} -if [ $VERBOSE = "YES" ] ; then - echo $(date) EXECUTING $0 $* >&2 - set -x -fi +source "$HOMEgfs/ush/preamble.sh" # Directories. pwd=$(pwd) @@ -176,7 +172,7 @@ for imem in $(seq $ENSBEG $ENSEND); do err_exit "FATAL ERROR: forecast of member $cmem FAILED. Aborting job" fi - ((rc+=ra)) + rc=$((rc+ra)) fi @@ -227,8 +223,6 @@ export err=$rc; err_chk # Postprocessing cd $pwd [[ $mkdata = "YES" ]] && rm -rf $DATATOP -set +x -if [ $VERBOSE = "YES" ] ; then - echo $(date) EXITING $0 with return code $err >&2 -fi + + exit $err diff --git a/scripts/exgdas_enkf_post.sh b/scripts/exgdas_enkf_post.sh index 5e8ecc7298..2ef2895d19 100755 --- a/scripts/exgdas_enkf_post.sh +++ b/scripts/exgdas_enkf_post.sh @@ -1,4 +1,5 @@ -#!/bin/bash +#! /usr/bin/env bash + ################################################################################ #### UNIX Script Documentation Block # . . @@ -16,12 +17,7 @@ # ################################################################################ -# Set environment. -VERBOSE=${VERBOSE:-"YES"} -if [ $VERBOSE = "YES" ]; then - echo $(date) EXECUTING $0 $* >&2 - set -x -fi +source "$HOMEgfs/ush/preamble.sh" # Directories. pwd=$(pwd) @@ -112,7 +108,7 @@ for fhr in $(seq $FHMIN $FHOUT $FHMAX); do $APRUN_EPOS ${DATA}/$(basename $GETSFCENSMEANEXEC) ./ sfcf${fhrchar}.ensmean sfcf${fhrchar} $NMEM_ENKF ra=$? - ((rc+=ra)) + rc=$((rc+ra)) export_pgm=$GETATMENSMEANEXEC . prep_step @@ -123,7 +119,7 @@ for fhr in $(seq $FHMIN $FHOUT $FHMAX); do $APRUN_EPOS ${DATA}/$(basename $GETATMENSMEANEXEC) ./ atmf${fhrchar}.ensmean atmf${fhrchar} $NMEM_ENKF fi ra=$? - ((rc+=ra)) + rc=$((rc+ra)) done export err=$rc; err_chk @@ -161,8 +157,6 @@ fi # Postprocessing cd $pwd [[ $mkdata = "YES" ]] && rm -rf $DATA -set +x -if [ $VERBOSE = "YES" ]; then - echo $(date) EXITING $0 with return code $err >&2 -fi + + exit $err diff --git a/scripts/exgdas_enkf_select_obs.sh b/scripts/exgdas_enkf_select_obs.sh index 488bec6c2a..92e1fd8c60 100755 --- a/scripts/exgdas_enkf_select_obs.sh +++ b/scripts/exgdas_enkf_select_obs.sh @@ -1,4 +1,5 @@ -#!/bin/bash +#! /usr/bin/env bash + ################################################################################ #### UNIX Script Documentation Block # . . @@ -16,12 +17,7 @@ # ################################################################################ -# Set environment. -export VERBOSE=${VERBOSE:-"YES"} -if [ $VERBOSE = "YES" ]; then - echo $(date) EXECUTING $0 $* >&2 - set -x -fi +source "$HOMEgfs/ush/preamble.sh" # Directories. pwd=$(pwd) @@ -119,8 +115,6 @@ export err=$?; err_chk # Postprocessing cd $pwd [[ $mkdata = "YES" ]] && rm -rf $DATA -set +x -if [ $VERBOSE = "YES" ]; then - echo $(date) EXITING $0 with return code $err >&2 -fi + + exit $err diff --git a/scripts/exgdas_enkf_sfc.sh b/scripts/exgdas_enkf_sfc.sh index 4595f84324..4589a59356 100755 --- a/scripts/exgdas_enkf_sfc.sh +++ b/scripts/exgdas_enkf_sfc.sh @@ -1,4 +1,5 @@ -#!/bin/bash +#! /usr/bin/env bash + ################################################################################ #### UNIX Script Documentation Block # . . @@ -16,12 +17,7 @@ # ################################################################################ -# Set environment. -VERBOSE=${VERBOSE:-"YES"} -if [ $VERBOSE = "YES" ]; then - echo $(date) EXECUTING $0 $* >&2 - set -x -fi +source "$HOMEgfs/ush/preamble.sh" # Directories. pwd=$(pwd) @@ -196,8 +192,6 @@ fi # Postprocessing cd $pwd [[ $mkdata = "YES" ]] && rm -rf $DATA -set +x -if [ $VERBOSE = "YES" ]; then - echo $(date) EXITING $0 with return code $err >&2 -fi + + exit $err diff --git a/scripts/exgdas_enkf_update.sh b/scripts/exgdas_enkf_update.sh index c76cc47931..422b2e54e2 100755 --- a/scripts/exgdas_enkf_update.sh +++ b/scripts/exgdas_enkf_update.sh @@ -1,4 +1,5 @@ -#!/bin/bash +#! /usr/bin/env bash + ################################################################################ #### UNIX Script Documentation Block # . . @@ -16,12 +17,7 @@ # ################################################################################ -# Set environment. -VERBOSE=${VERBOSE:-"YES"} -if [ $VERBOSE = "YES" ] ; then - echo $(date) EXECUTING $0 $* >&2 - set -x -fi +source "$HOMEgfs/ush/preamble.sh" # Directories. pwd=$(pwd) @@ -171,7 +167,6 @@ for ftype in \$flist; do tar -xvf \$fname done EOFuntar - set -x chmod 755 $DATA/untar.sh fi @@ -190,7 +185,7 @@ else tar -xvf $fname done fi -nfhrs=`echo $IAUFHRS_ENKF | sed 's/,/ /g'` +nfhrs=$(echo $IAUFHRS_ENKF | sed 's/,/ /g') for imem in $(seq 1 $NMEM_ENKF); do memchar="mem"$(printf %03i $imem) if [ $lobsdiag_forenkf = ".false." ]; then @@ -398,8 +393,6 @@ cat stdout stderr > $COMOUT_ANL_ENS/$ENKFSTAT # Postprocessing cd $pwd [[ $mkdata = "YES" ]] && rm -rf $DATA -set +x -if [ $VERBOSE = "YES" ]; then - echo $(date) EXITING $0 with return code $err >&2 -fi + + exit $err diff --git a/scripts/exgdas_global_atmos_analysis_run.sh b/scripts/exgdas_global_atmos_analysis_run.sh index b5aaf24cbc..0f2edbd746 100755 --- a/scripts/exgdas_global_atmos_analysis_run.sh +++ b/scripts/exgdas_global_atmos_analysis_run.sh @@ -19,11 +19,7 @@ ################################################################################ # Set environment. -export VERBOSE=${VERBOSE:-"YES"} -if [ $VERBOSE = "YES" ]; then - echo $(date) EXECUTING $0 $* >&2 - set -x -fi +source "$HOMEgfs/ush/preamble.sh" # Directories pwd=$(pwd) @@ -155,7 +151,7 @@ err_chk ################################################################################ # Create log file noting creating of analysis increment file -echo "$CDUMP $CDATE atminc and tiled sfcanl done at `date`" > $COMOUT/${CDUMP}.${cycle}.loginc.txt +echo "$CDUMP $CDATE atminc and tiled sfcanl done at $(date)" > $COMOUT/${CDUMP}.${cycle}.loginc.txt ################################################################################ # Copy diags and YAML to $COMOUT @@ -172,10 +168,7 @@ for abias in $alist; do done ################################################################################ -set +x -if [ $VERBOSE = "YES" ]; then - echo $(date) EXITING $0 with return code $err >&2 -fi + exit $err ################################################################################ diff --git a/scripts/exgdas_global_atmos_ensanal_run.sh b/scripts/exgdas_global_atmos_ensanal_run.sh index 27f3339b58..68d53d3695 100755 --- a/scripts/exgdas_global_atmos_ensanal_run.sh +++ b/scripts/exgdas_global_atmos_ensanal_run.sh @@ -19,11 +19,7 @@ ################################################################################ # Set environment. -export VERBOSE=${VERBOSE:-"YES"} -if [ $VERBOSE = "YES" ]; then - echo $(date) EXECUTING $0 $* >&2 - set -x -fi +source "$HOMEgfs/ush/preamble.sh" # Directories pwd=$(pwd) @@ -154,7 +150,7 @@ done ################################################################################ # Create log file noting creating of analysis increment file -echo "$CDUMP $CDATE atminc done at `date`" > $COMOUT_ENS/${CDUMP}.${cycle}.loginc.txt +echo "$CDUMP $CDATE atminc done at $(date)" > $COMOUT_ENS/${CDUMP}.${cycle}.loginc.txt ################################################################################ # Copy diags and YAML to $COMOUT @@ -163,10 +159,7 @@ cp -rf $DATA/diags $COMOUT_ENS/ ################################################################################ -set +x -if [ $VERBOSE = "YES" ]; then - echo $(date) EXITING $0 with return code $err >&2 -fi + exit $err ################################################################################ diff --git a/scripts/exgfs_atmos_awips_20km_1p0deg.sh b/scripts/exgfs_atmos_awips_20km_1p0deg.sh index 1f4414c1b4..3f9f84f237 100755 --- a/scripts/exgfs_atmos_awips_20km_1p0deg.sh +++ b/scripts/exgfs_atmos_awips_20km_1p0deg.sh @@ -1,4 +1,5 @@ -#!/bin/ksh +#! /usr/bin/env bash + ############################################################################## # UTILITY SCRIPT NAME : exgfs_awips_20km_1p0deg.sh # DATE WRITTEN : 11/01/2017 @@ -10,13 +11,16 @@ # 1st argument - Forecast Hour - format of 3I (3 digits) # ############################################################################### -echo "------------------------------------------------" -echo "JGFS_AWIPS_00/06/12/18 GFS postprocessing" -echo "------------------------------------------------" -echo "History: NOV 2017 - First implementation of this new script to " -echo " process GFS AWIPS 20km and 1.0 deg grids products " -echo " " +# echo "------------------------------------------------" +# echo "JGFS_AWIPS_00/06/12/18 GFS postprocessing" +# echo "------------------------------------------------" +# echo "History: NOV 2017 - First implementation of this new script to " +# echo " process GFS AWIPS 20km and 1.0 deg grids products " +# echo " " ############################################################################### + +source "$HOMEgfs/ush/preamble.sh" + fcsthrs="$1" num=$# job_name=$(echo $job|sed 's/[jpt]gfs/gfs/') @@ -38,8 +42,6 @@ fi cd $DATA -set -x - ############################################### # Wait for the availability of the pgrb file ############################################### @@ -60,9 +62,6 @@ do fi done -######################################## -msg="HAS BEGUN!" -postmsg "$jlogfile" "$msg" ######################################## echo " ------------------------------------------" @@ -75,7 +74,7 @@ echo "#######################################" echo " Process GRIB AWIP GRIB2 PRODUCTS " echo "#######################################" echo " " -set -x +${TRACE_ON:-set -x} # Set type of Interpolation for WGRIB2 export opt1=' -set_grib_type same -new_grid_winds earth ' @@ -251,16 +250,5 @@ if [ -e "$pgmout" ] ; then cat $pgmout fi -############################################################################################ -# GOOD RUN -set +x -echo "**************JOB EXGFS_AWIPS_20KM_1P0DEG.SH.ECF COMPLETED NORMALLY ON THE WCOSS" -echo "**************JOB EXGFS_AWIPS_20KM_1P0DEG.SH.ECF COMPLETED NORMALLY ON THE WCOSS" -echo "**************JOB EXGFS_AWIPS_20KM_1P0DEG.SH.ECF COMPLETED NORMALLY ON THE WCOSS" -set -x -############################################################################################ - -msg="HAS COMPLETED NORMALLY!" -postmsg "$jlogfile" "$msg" ############## END OF SCRIPT ####################### diff --git a/scripts/exgfs_atmos_fbwind.sh b/scripts/exgfs_atmos_fbwind.sh index 2ca5870cbf..a4ecd248f0 100755 --- a/scripts/exgfs_atmos_fbwind.sh +++ b/scripts/exgfs_atmos_fbwind.sh @@ -1,27 +1,27 @@ -#!/bin/ksh -echo "------------------------------------------------" -echo "JGFS_BULLS - 24hr GFS processing" -echo "------------------------------------------------" -echo "History: Jul 2004 - First implementation of this new script." -echo " FBWNDGFS (FB Winds) program for 15 sites outside" -echo " the Hawaiian Islands." -echo " Feb 2006 - L Sager Send bulletins to TOC via NTC. " -echo " Jul 2014 - B Vuong Modified to use GFS master GRIB2" -echo " and Add bulletins WINTEMV process." -echo " Sep 2016 - B Vuong Modified to use GFS 0p25 deg GRIB2" -echo " Nov 2019 - B Vuong Removed WINTEMV bulletin (retired)" +#! /usr/bin/env bash + +##################################################################### +# echo "------------------------------------------------" +# echo "JGFS_BULLS - 24hr GFS processing" +# echo "------------------------------------------------" +# echo "History: Jul 2004 - First implementation of this new script." +# echo " FBWNDGFS (FB Winds) program for 15 sites outside" +# echo " the Hawaiian Islands." +# echo " Feb 2006 - L Sager Send bulletins to TOC via NTC. " +# echo " Jul 2014 - B Vuong Modified to use GFS master GRIB2" +# echo " and Add bulletins WINTEMV process." +# echo " Sep 2016 - B Vuong Modified to use GFS 0p25 deg GRIB2" +# echo " Nov 2019 - B Vuong Removed WINTEMV bulletin (retired)" ##################################################################### +source "$HOMEgfs/ush/preamble.sh" + cd $DATA ###################### # Set up Here Files. ###################### -set -x -msg="Begin job for $job" -postmsg "$jlogfile" "$msg" - job_name=$(echo $job|sed 's/[jpt]gfs/gfs/') set +x @@ -31,7 +31,7 @@ echo " Process Bulletins of forecast winds and temps for Hawaii " echo " and 15 sites outside of the Hawaiian Islands. " echo "#############################################################" echo " " -set -x +${TRACE_ON:-set -x} export pgm=bulls_fbwndgfs . prep_step @@ -82,35 +82,7 @@ then ${UTILgfs}/ush/make_ntc_bull.pl WMOBH NONE KWNO NONE tran.fbwnd_pacific ${COMOUTwmo}/tran.fbwnd_pacific.$job_name fi -# -# EMC is proposing to retire WINTEMV bulletin in GFS v16.0 -# - -# if test ${cycle} = 't00z' -o ${cycle} = 't12z' -# then -# -# set +x -# echo " " -# echo "#################################################" -# echo " Process 06, 12, 18 and 24 fcsthrs WINTEM Bulletins. " -# echo "#################################################" -# echo " " -# set -x -# sh $USHgfs/mkwintem.sh -# -#fi - -##################################################################### -# GOOD RUN -set +x -echo "**************JOB JGFS_FBWIND COMPLETED NORMALLY ON IBM-SP" -echo "**************JOB JGFS_FBWIND COMPLETED NORMALLY ON IBM-SP" -echo "**************JOB JGFS_FBWIND COMPLETED NORMALLY ON IBM-SP" -set -x ##################################################################### -msg='Job completed normally.' -echo $msg -postmsg "$jlogfile" "$msg" ############################### END OF SCRIPT ####################### diff --git a/scripts/exgfs_atmos_gempak_gif_ncdc_skew_t.sh b/scripts/exgfs_atmos_gempak_gif_ncdc_skew_t.sh index 4de9d33736..394c5c30d8 100755 --- a/scripts/exgfs_atmos_gempak_gif_ncdc_skew_t.sh +++ b/scripts/exgfs_atmos_gempak_gif_ncdc_skew_t.sh @@ -1,16 +1,15 @@ -#!/bin/sh +#! /usr/bin/env bash + ############################################################## # Add the NCDC GIF processing to the end of the gempak_gif job # There is no timing issue with the NCDC GIF, so it is # okay to just add it here. If timing becomes a problem # in the future, we should move it above somewhere else. ############################################################## -export PS4='exgempakgif_ncdc_skewt:$SECONDS + ' -set -xa + +source "$HOMEgfs/ush/preamble.sh" cd $DATA -msg="The NCDC GIF processing has begun" -postmsg "$jlogfile" "$msg" export NTS=$USHgempak/restore @@ -58,21 +57,19 @@ then fi #################################################################################### -echo "-----------------------------------------------------------------------------" -echo "GFS MAG postprocessing script exmag_sigman_skew_k_gfs_gif_ncdc_skew_t.sh " -echo "-----------------------------------------------------------------------------" -echo "History: Mar 2012 added to processing for enhanced MAG skew_t" -echo "2012-03-11 Mabe -- reworked script to add significant level " -echo " data to existing mandatory level data in a new file" -echo "2013-04-24 Mabe -- Reworked to remove unneeded output with " -echo " conversion to WCOSS" +# echo "-----------------------------------------------------------------------------" +# echo "GFS MAG postprocessing script exmag_sigman_skew_k_gfs_gif_ncdc_skew_t.sh " +# echo "-----------------------------------------------------------------------------" +# echo "History: Mar 2012 added to processing for enhanced MAG skew_t" +# echo "2012-03-11 Mabe -- reworked script to add significant level " +# echo " data to existing mandatory level data in a new file" +# echo "2013-04-24 Mabe -- Reworked to remove unneeded output with " +# echo " conversion to WCOSS" # Add ms to filename to make it different since it has both mandatory # and significant level data $COMOUT/${RUN}.${cycle}.msupperair # $COMOUT/${RUN}.${cycle}.msupperairtble ##################################################################################### -set -x - cd $DATA export RSHPDY=$(echo $PDY | cut -c5-)$(echo $PDY | cut -c3-4) @@ -112,14 +109,10 @@ fi fi ############################################################ -# GOOD RUN -set +x -echo "********** JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF COMPLETED" -set -x -############################################################ + if [ -e "$pgmout" ] ; then cat $pgmout fi -msg="HAS COMPLETED NORMALLY!" + exit diff --git a/scripts/exgfs_atmos_gempak_meta.sh b/scripts/exgfs_atmos_gempak_meta.sh index dfd5b38392..cb64138c61 100755 --- a/scripts/exgfs_atmos_gempak_meta.sh +++ b/scripts/exgfs_atmos_gempak_meta.sh @@ -1,9 +1,6 @@ -#!/bin/ksh +#! /usr/bin/env bash -set -x - -msg="JOB $job HAS BEGUN" -postmsg "$jlogfile" "$msg" +source "$HOMEgfs/ush/preamble.sh" cd $DATA @@ -14,13 +11,13 @@ fhr=$fhend export numproc=23 while [ $fhr -ge $fhbeg ] ; do - typeset -Z3 fhr - ls -l $COMIN/$GEMGRD1${fhr} - err1=$? - if [ $err1 -eq 0 -o $fhr -eq $fhbeg ] ; then - break - fi - fhr=$(expr $fhr - $fhinc) + fhr=$(printf "%03d" $fhr) + ls -l $COMIN/$GEMGRD1${fhr} + err1=$? + if [ $err1 -eq 0 -o $fhr -eq $fhbeg ] ; then + break + fi + fhr=$(expr $fhr - $fhinc) done maxtries=180 @@ -73,9 +70,7 @@ do rm $DATA/poescript # fi - if [ $fhr -lt 100 ] ; then - typeset -Z2 fhr - fi + fhr=$(printf "%02d" $fhr) if [ $do_all -eq 1 ] ; then do_all=0 @@ -110,7 +105,7 @@ do # If this is the final fcst hour, alert the # file to all centers. # - if [ $fhr -ge $fhend ] ; then + if [ 10#$fhr -ge $fhend ] ; then export DBN_ALERT_TYPE=GFS_METAFILE_LAST fi @@ -130,7 +125,7 @@ do $APRUNCFP $DATA/poescript export err=$?; err_chk - typeset -Z3 fhr + fhr=$(printf "%03d" $fhr) if [ $fhr -eq 126 ] ; then let fhr=fhr+6 else @@ -139,14 +134,7 @@ do done ##################################################################### -# GOOD RUN -set +x -echo "**************JOB GFS_META COMPLETED NORMALLY on the IBM-SP" -echo "**************JOB GFS_META COMPLETED NORMALLY on the IBM-SP" -echo "**************JOB GFS_META COMPLETED NORMALLY on the IBM-SP" -set -x -##################################################################### -echo EXITING $0 + exit # diff --git a/scripts/exgfs_atmos_goes_nawips.sh b/scripts/exgfs_atmos_goes_nawips.sh index c18f2b8b42..7aae2e143c 100755 --- a/scripts/exgfs_atmos_goes_nawips.sh +++ b/scripts/exgfs_atmos_goes_nawips.sh @@ -1,16 +1,17 @@ -#!/bin/ksh +#! /usr/bin/env bash + ################################################################### -echo "----------------------------------------------------" -echo "exnawips - convert NCEP GRIB files into GEMPAK Grids" -echo "----------------------------------------------------" -echo "History: Mar 2000 - First implementation of this new script." -echo "S Lilly: May 2008 - add logic to make sure that all of the " -echo " data produced from the restricted ECMWF" -echo " data on the CCS is properly protected." -echo "C. Magee: 10/2013 - swap X and Y for rtgssthr Atl and Pac." +# echo "----------------------------------------------------" +# echo "exnawips - convert NCEP GRIB files into GEMPAK Grids" +# echo "----------------------------------------------------" +# echo "History: Mar 2000 - First implementation of this new script." +# echo "S Lilly: May 2008 - add logic to make sure that all of the " +# echo " data produced from the restricted ECMWF" +# echo " data on the CCS is properly protected." +# echo "C. Magee: 10/2013 - swap X and Y for rtgssthr Atl and Pac." ##################################################################### -set -xa +source "$HOMEgfs/ush/preamble.sh" cd $DATA @@ -19,9 +20,6 @@ cp $FIXgempak/g2vcrdwmo2.tbl g2vcrdwmo2.tbl cp $FIXgempak/g2varsncep1.tbl g2varsncep1.tbl cp $FIXgempak/g2vcrdncep1.tbl g2vcrdncep1.tbl -msg="Begin job for $job" -postmsg "$jlogfile" "$msg" - # # NAGRIB_TABLE=$FIXgempak/nagrib.tbl NAGRIB=$GEMEXE/nagrib2 @@ -53,16 +51,10 @@ pdsext=no maxtries=180 fhcnt=$fstart while [ $fhcnt -le $fend ] ; do -# if [ $fhcnt -ge 100 ] ; then - typeset -Z3 fhr -# else -# typeset -Z2 fhr -# fi - fhr=$fhcnt + fhr=$(printf "%03d" $fhcnt) fhcnt3=$(expr $fhr % 3) - fhr3=$fhcnt - typeset -Z3 fhr3 + fhr3=$(printf "03d" $fhcnt) GRIBIN=$COMIN/${model}.${cycle}.${GRIB}${fhr}${EXT} GEMGRD=${RUN}_${PDY}${cyc}f${fhr3} @@ -126,17 +118,7 @@ EOF let fhcnt=fhcnt+finc done -##################################################################### -# GOOD RUN -set +x -echo "**************JOB $RUN NAWIPS COMPLETED NORMALLY ON THE IBM" -echo "**************JOB $RUN NAWIPS COMPLETED NORMALLY ON THE IBM" -echo "**************JOB $RUN NAWIPS COMPLETED NORMALLY ON THE IBM" -set -x ##################################################################### -msg='Job completed normally.' -echo $msg -postmsg "$jlogfile" "$msg" ############################### END OF SCRIPT ####################### diff --git a/scripts/exgfs_atmos_grib2_special_npoess.sh b/scripts/exgfs_atmos_grib2_special_npoess.sh index 1e81f9815b..ad24bf6435 100755 --- a/scripts/exgfs_atmos_grib2_special_npoess.sh +++ b/scripts/exgfs_atmos_grib2_special_npoess.sh @@ -1,18 +1,16 @@ -#!/bin/ksh +#! /usr/bin/env bash + ##################################################################### -echo "-----------------------------------------------------" -echo " exglobal_grib2_special_npoess.sh" -echo " Jan 2008 - Chuang - Produces 1x1 degree special Grib from master." -echo "-----------------------------------------------------" +# echo "-----------------------------------------------------" +# echo " exglobal_grib2_special_npoess.sh" +# echo " Jan 2008 - Chuang - Produces 1x1 degree special Grib from master." +# echo "-----------------------------------------------------" ##################################################################### -set -x +source "$HOMEgfs/ush/preamble.sh" cd $DATA -msg="HAS BEGUN on $(hostname)" -postmsg "$jlogfile" "$msg" - ############################################################ # Define Variables: # ----------------- @@ -49,12 +47,12 @@ SLEEP_LOOP_MAX=$(expr $SLEEP_TIME / $SLEEP_INT) ############################################################################## export SHOUR=000 export FHOUR=024 -export fhr=$SHOUR -typeset -Z3 fhr +export fhr=$(printf "%03d" $SHOUR) + ############################################################ # Loop Through the Post Forecast Files ############################################################ -while test $fhr -le $FHOUR +while test 10#$fhr -le $FHOUR do ############################### @@ -87,10 +85,6 @@ do ###################################################################### # Process Global NPOESS 0.50 GFS GRID PRODUCTS IN GRIB2 F000 - F024 # ###################################################################### - set -x - msg="Starting half degree grib generation for fhr=$fhr" - postmsg "$jlogfile" "$msg" - paramlist=${PARMproduct}/global_npoess_paramlist_g2 cp $COMIN/gfs.t${cyc}z.pgrb2.0p50.f${fhr} tmpfile2 cp $COMIN/gfs.t${cyc}z.pgrb2b.0p50.f${fhr} tmpfile2b @@ -112,8 +106,7 @@ do echo "$PDY$cyc$fhr" > $COMOUT/${RUN}.t${cyc}z.control.halfdeg.npoess fi rm tmpfile pgb2file - export fhr=$(expr $fhr + $FHINC) - typeset -Z3 fhr + export fhr=$(printf "%03d" $(expr $fhr + $FHINC)) done @@ -122,14 +115,13 @@ done ################################################################ export SHOUR=000 export FHOUR=180 -export fhr=$SHOUR -typeset -Z3 fhr +export fhr=$(printf "%03d" $SHOUR) ################################# # Process GFS PGRB2_SPECIAL_POST ################################# -while test $fhr -le $FHOUR +while test 10#$fhr -le $FHOUR do ############################### # Start Looping for the @@ -158,10 +150,7 @@ do err_chk fi done - set -x - - msg="Starting special grib file generation for fhr=$fhr" - postmsg "$jlogfile" "$msg" + ${TRACE_ON:-set -x} ############################### # Put restart files into /nwges @@ -208,13 +197,8 @@ do export fhour=$(expr ${fhr} % 6 ) fi - export fhr=$(expr $fhr + $FHINC) - typeset -Z3 fhr + export fhr=$(printf "%03d" $(expr $fhr + $FHINC)) done -######################################################## - -msg='ENDED NORMALLY.' -postmsg "$jlogfile" "$msg" ################## END OF SCRIPT ####################### diff --git a/scripts/exgfs_atmos_grib_awips.sh b/scripts/exgfs_atmos_grib_awips.sh index 2e5ec91526..5252d71983 100755 --- a/scripts/exgfs_atmos_grib_awips.sh +++ b/scripts/exgfs_atmos_grib_awips.sh @@ -1,4 +1,5 @@ -#!/bin/ksh +#! /usr/bin/env bash + ###################################################################### # UTILITY SCRIPT NAME : exgfs_grib_awips.sh # DATE WRITTEN : 10/04/2004 @@ -9,29 +10,27 @@ # 1st argument - Forecast Hour - format of 2I # ##################################################################### -echo "------------------------------------------------" -echo "JGFS_AWIPS_00/06/12/18 GFS postprocessing" -echo "------------------------------------------------" -echo "History: OCT 2004 - First implementation of this new script." -echo " JUN 2014 - Modified to remove process for AWIPS in GRIB2" -echo " to script exgfs_grib_awips_g2.sh and this " -echo " script only process AWIPS GRIB1 (211 and 225)" -echo " AUG 2015 - Modified for WCOSS phase2" -echo " FEB 2019 - Removed grid 225" +# echo "------------------------------------------------" +# echo "JGFS_AWIPS_00/06/12/18 GFS postprocessing" +# echo "------------------------------------------------" +# echo "History: OCT 2004 - First implementation of this new script." +# echo " JUN 2014 - Modified to remove process for AWIPS in GRIB2" +# echo " to script exgfs_grib_awips_g2.sh and this " +# echo " script only process AWIPS GRIB1 (211 and 225)" +# echo " AUG 2015 - Modified for WCOSS phase2" +# echo " FEB 2019 - Removed grid 225" ##################################################################### -set +x + +source "$HOMEgfs/ush/preamble.sh" + fcsthrs="$1" num=$# job_name=$(echo $job|sed 's/[jpt]gfs/gfs/') -typeset -Z3 fcsthrs +fcsthrs=$(printf "%03d" $fcsthrs) -export PS4='gfs_grib_awips:f$fcsthrs:$SECONDS + ' export SCALEDEC=${SCALDEC:-$USHgfs/scale_dec.sh} -#if [ $fhcsthrs -t 100 ]; then -# fcsthrs=0$fcsthrs -#fi if test "$num" -ge 1 then echo "" @@ -49,8 +48,6 @@ fi cd $DATA/awips_g1 -set -x - ############################################### # Wait for the availability of the pgrb file ############################################### @@ -86,7 +83,7 @@ echo "###############################################" echo " Process GFS GRIB1 AWIP PRODUCTS (211) " echo "###############################################" echo " " -set -x +${TRACE_ON:-set -x} cp $COMIN/gfs.t${cyc}z.pgrb2.0p25.f${fcsthrs} tmpfile2 cp $COMIN/gfs.t${cyc}z.pgrb2b.0p25.f${fcsthrs} tmpfile2b @@ -149,16 +146,7 @@ if [ -e "$pgmout" ] ; then cat $pgmout fi -############################################################################### -# GOOD RUN -set +x -echo "**************JOB EXGFS_GRIB_AWIPS.SH.ECF COMPLETED NORMALLY ON THE IBM" -echo "**************JOB EXGFS_GRIB_AWIPS.SH.ECF COMPLETED NORMALLY ON THE IBM" -echo "**************JOB EXGFS_GRIB_AWIPS.SH.ECF COMPLETED NORMALLY ON THE IBM" -set -x ############################################################################### -msg="HAS COMPLETED NORMALLY!" -postmsg "$jlogfile" "$msg" ############## END OF SCRIPT ####################### diff --git a/scripts/exgfs_atmos_nawips.sh b/scripts/exgfs_atmos_nawips.sh index 8e1ba652d6..5b75173506 100755 --- a/scripts/exgfs_atmos_nawips.sh +++ b/scripts/exgfs_atmos_nawips.sh @@ -1,15 +1,16 @@ -#!/bin/ksh +#! /usr/bin/env bash + ################################################################### -echo "----------------------------------------------------" -echo "exnawips - convert NCEP GRIB files into GEMPAK Grids" -echo "----------------------------------------------------" -echo "History: Mar 2000 - First implementation of this new script." -echo "S Lilly: May 2008 - add logic to make sure that all of the " -echo " data produced from the restricted ECMWF" -echo " data on the CCS is properly protected." +# echo "----------------------------------------------------" +# echo "exnawips - convert NCEP GRIB files into GEMPAK Grids" +# echo "----------------------------------------------------" +# echo "History: Mar 2000 - First implementation of this new script." +# echo "S Lilly: May 2008 - add logic to make sure that all of the " +# echo " data produced from the restricted ECMWF" +# echo " data on the CCS is properly protected." ##################################################################### -set -xa +source "$HOMEgfs/ush/preamble.sh" "${2}" #### If EMC GFS PARA runs hourly file are not available, The ILPOST #### will set to 3 hour in EMC GFS PARA. @@ -21,16 +22,10 @@ RUN=$1 fend=$2 DBN_ALERT_TYPE=$3 -export 'PS4=$RUN:$SECONDS + ' - DATA_RUN=$DATA/$RUN mkdir -p $DATA_RUN cd $DATA_RUN -msg="Begin job for $job" -postmsg "$jlogfile" "$msg" - - # NAGRIB=$GEMEXE/nagrib2_nc # @@ -49,20 +44,17 @@ maxtries=360 fhcnt=$fstart while [ $fhcnt -le $fend ] ; do -if mkdir lock.$fhcnt ; then +if [[ $(mkdir lock.${fhcnt}) == 0 ]] ; then cd lock.$fhcnt cp $FIXgempak/g2varswmo2.tbl g2varswmo2.tbl cp $FIXgempak/g2vcrdwmo2.tbl g2vcrdwmo2.tbl cp $FIXgempak/g2varsncep1.tbl g2varsncep1.tbl cp $FIXgempak/g2vcrdncep1.tbl g2vcrdncep1.tbl - typeset -Z3 fhr - - fhr=$fhcnt + fhr=$(printf "%03d" $fhcnt) fhcnt3=$(expr $fhr % 3) - fhr3=$fhcnt - typeset -Z3 fhr3 + fhr3=$(printf "%03d" $fhcnt) GEMGRD=${RUN}_${PDY}${cyc}f${fhr3} @@ -189,16 +181,6 @@ done $GEMEXE/gpend ##################################################################### -# GOOD RUN -set +x -echo "**************JOB $RUN NAWIPS COMPLETED NORMALLY ON THE IBM" -echo "**************JOB $RUN NAWIPS COMPLETED NORMALLY ON THE IBM" -echo "**************JOB $RUN NAWIPS COMPLETED NORMALLY ON THE IBM" -set -x -##################################################################### -msg='Job completed normally.' -echo $msg -postmsg "$jlogfile" "$msg" ############################### END OF SCRIPT ####################### diff --git a/scripts/exgfs_atmos_nceppost.sh b/scripts/exgfs_atmos_nceppost.sh index e653a36e39..ae94472aeb 100755 --- a/scripts/exgfs_atmos_nceppost.sh +++ b/scripts/exgfs_atmos_nceppost.sh @@ -1,47 +1,46 @@ +#! /usr/bin/env bash + ##################################################################### -echo "-----------------------------------------------------" -echo " exgfs_nceppost.sh" -echo " Apr 99 - Michaud - Generated to post global forecast" -echo " Mar 03 - Zhu - Add post for 0.5x0.5 degree" -echo " Nov 03 - Gilbert - Modified from exglobal_post.sh.sms" -echo " to run only one master post job." -echo " Jan 07 - Cooke - Add DBNet Alert for Master files" -echo " May 07 - Chuang - Modified scripts to run unified post" -echo " Feb 10 - Carlis - Add 12-hr accum precip bucket at f192" -echo " Jun 12 - Wang - Add option for grb2" -echo " Jul 14 - Carlis - Add 0.25 deg master " -echo " Mar 17 - F Yang - Modified for running fv3gfs" -echo " Aug 17 - Meng - Add flags for turning on/off flx, gtg " -echo " and satellite look like file creation" -echo " and use 3-digit forecast hour naming" -echo " post output files" -echo " Dec 17 - Meng - Link sfc data file to flxfile " -echo " since fv3gfs does not output sfc files any more." -echo " Dec 17 - Meng - Add fv3gfs_downstream_nems.sh for pgb processing " -echo " Jan 18 - Meng - Add flag PGBF for truning on/off pgb processing. " -echo " Jan 18 - Meng - For EE2 standard, move IDRT POSTGPVARS setting" -echo " from j-job script." -echo " Feb 18 - Meng - Removed legacy setting for generating grib1 data" -echo " and reading sigio model outputs." -echo " Aug 20 - Meng - Remove .ecf extentsion per EE2 review." -echo " Sep 20 - Meng - Update clean up files per EE2 review." -echo " Dec 20 - Meng - Add alert for special data file." -echo " Mar 21 - Meng - Update POSTGRB2TBL default setting." -echo " Jun 21 - Mao - Instead of err_chk, catch err and print out" -echo " WAFS failure warnings to avoid job crashing" -echo " Oct 21 - Meng - Remove jlogfile for wcoss2 transition." -echo " Feb 22 - Lin - Exception handling if anl input not found." -echo "-----------------------------------------------------" +# echo "-----------------------------------------------------" +# echo " exgfs_nceppost.sh" +# echo " Apr 99 - Michaud - Generated to post global forecast" +# echo " Mar 03 - Zhu - Add post for 0.5x0.5 degree" +# echo " Nov 03 - Gilbert - Modified from exglobal_post.sh.sms" +# echo " to run only one master post job." +# echo " Jan 07 - Cooke - Add DBNet Alert for Master files" +# echo " May 07 - Chuang - Modified scripts to run unified post" +# echo " Feb 10 - Carlis - Add 12-hr accum precip bucket at f192" +# echo " Jun 12 - Wang - Add option for grb2" +# echo " Jul 14 - Carlis - Add 0.25 deg master " +# echo " Mar 17 - F Yang - Modified for running fv3gfs" +# echo " Aug 17 - Meng - Add flags for turning on/off flx, gtg " +# echo " and satellite look like file creation" +# echo " and use 3-digit forecast hour naming" +# echo " post output files" +# echo " Dec 17 - Meng - Link sfc data file to flxfile " +# echo " since fv3gfs does not output sfc files any more." +# echo " Dec 17 - Meng - Add fv3gfs_downstream_nems.sh for pgb processing " +# echo " Jan 18 - Meng - Add flag PGBF for truning on/off pgb processing. " +# echo " Jan 18 - Meng - For EE2 standard, move IDRT POSTGPVARS setting" +# echo " from j-job script." +# echo " Feb 18 - Meng - Removed legacy setting for generating grib1 data" +# echo " and reading sigio model outputs." +# echo " Aug 20 - Meng - Remove .ecf extentsion per EE2 review." +# echo " Sep 20 - Meng - Update clean up files per EE2 review." +# echo " Dec 20 - Meng - Add alert for special data file." +# echo " Mar 21 - Meng - Update POSTGRB2TBL default setting." +# echo " Jun 21 - Mao - Instead of err_chk, catch err and print out" +# echo " WAFS failure warnings to avoid job crashing" +# echo " Oct 21 - Meng - Remove jlogfile for wcoss2 transition." +# echo " Feb 22 - Lin - Exception handling if anl input not found." +# echo "-----------------------------------------------------" ##################################################################### -set -x +source "$HOMEgfs/ush/preamble.sh" cd $DATA # specify model output format type: 4 for nemsio, 3 for sigio -msg="HAS BEGUN on $(hostname)" -postmsg "$msg" - export POSTGPSH=${POSTGPSH:-$USHgfs/gfs_nceppost.sh} export GFSDOWNSH=${GFSDOWNSH:-$USHgfs/fv3gfs_downstream_nems.sh} export GFSDOWNSHF=${GFSDOWNSHF:-$USHgfs/inter_flux.sh} @@ -242,7 +241,6 @@ else ## not_anl if_stime # Start Looping for the # existence of the restart files ############################### - set -x export pgm="postcheck" ic=1 while [ $ic -le $SLEEP_LOOP_MAX ]; do @@ -263,10 +261,6 @@ else ## not_anl if_stime err_chk fi done - set -x - - msg="Starting post for fhr=$fhr" - postmsg "$msg" ############################### # Put restart files into /nwges @@ -478,7 +472,7 @@ else ## not_anl if_stime ########################## WAFS start ########################## # Generate WAFS products on ICAO standard level. # Do not need to be sent out to public, WAFS package will process the data. - if [[ $WAFSF = "YES" && $fhr -le 120 ]]; then + if [[ $WAFSF = "YES" && 10#$fhr -le 120 ]]; then if [[ $RUN = gfs && $GRIBVERSION = 'grib2' ]]; then export OUTTYP=${OUTTYP:-4} @@ -527,9 +521,7 @@ else ## not_anl if_stime #---------------------------------- fi ## end_if_stime -#cat $pgmout -#msg='ENDED NORMALLY.' -#postmsg "$jlogfile" "$msg" + exit 0 diff --git a/scripts/exgfs_atmos_postsnd.sh b/scripts/exgfs_atmos_postsnd.sh index 67b9ce56fa..114ab4e234 100755 --- a/scripts/exgfs_atmos_postsnd.sh +++ b/scripts/exgfs_atmos_postsnd.sh @@ -1,4 +1,5 @@ -#!/bin/ksh +#! /usr/bin/env bash + ################################################################ # Script Name: exgfs_atmos_postsnd.sh.sms # Script Description: Generate GFS BUFR sounding files @@ -18,12 +19,11 @@ # 8) 2019-10-18 Guang Ping Lou Transition to reading in NetCDF model data # 9) 2019-12-18 Guang Ping Lou generalizing to reading in NetCDF or nemsio ################################################################ -set -xa + +source "$HOMEgfs/ush/preamble.sh" cd $DATA -######################################## -msg="HAS BEGUN" -#postmsg "$jlogfile" "$msg" + ######################################## ################################################### @@ -165,21 +165,6 @@ ${APRUN_POSTSNDCFP} cmdfile sh $USHbufrsnd/gfs_bfr2gpk.sh fi -################################################ -# Convert the bufr soundings into GEMPAK files -################################################ -##$USHbufrsnd/gfs_bfr2gpk.sh - -##################################################################### -# GOOD RUN -set +x -echo "**************JOB GFS_meteogrm COMPLETED NORMALLY ON THE IBM" -echo "**************JOB GFS_meteogrm COMPLETED NORMALLY ON THE IBM" -echo "**************JOB GFS_meteogrm COMPLETED NORMALLY ON THE IBM" -set -x -##################################################################### - -msg='HAS COMPLETED NORMALLY.' -#postmsg "$jlogfile" "$msg" + ############## END OF SCRIPT ####################### diff --git a/scripts/exgfs_pmgr.sh b/scripts/exgfs_pmgr.sh index d16db66032..a417bbed55 100755 --- a/scripts/exgfs_pmgr.sh +++ b/scripts/exgfs_pmgr.sh @@ -1,13 +1,14 @@ -#! /bin/ksh +#! /usr/bin/env bash + # # Script name: exgfs_pmgr.sh.sms # # This script monitors the progress of the gfs_fcst job # -set -x + +source "$HOMEgfs/ush/preamble.sh" hour=00 -typeset -Z2 hour TEND=384 TCP=385 @@ -17,15 +18,12 @@ fi while [ $hour -lt $TCP ]; do + hour=$(printf "%02d" $hour) echo $hour >>posthours - if [ $hour -lt 240 ] + if [ 10#$hour -lt 240 ] then - if [ $hour -eq 99 ] - then - typeset -Z3 hour - fi # JY if [ $hour -lt 12 ] - if [ $hour -lt 120 ] + if [ 10#$hour -lt 120 ] then let "hour=hour+1" else @@ -74,6 +72,5 @@ do done -echo Exiting $0 exit diff --git a/scripts/exgfs_prdgen_manager.sh b/scripts/exgfs_prdgen_manager.sh index 6584178a7e..7d0a95696b 100755 --- a/scripts/exgfs_prdgen_manager.sh +++ b/scripts/exgfs_prdgen_manager.sh @@ -1,13 +1,14 @@ -#! /bin/ksh +#! /usr/bin/env bash + # # Script name: exgfs_pmgr.sh.sms # # This script monitors the progress of the gfs_fcst job # -set -x + +source "$HOMEgfs/ush/preamble.sh" hour=00 -typeset -Z2 hour TEND=384 TCP=385 @@ -17,13 +18,11 @@ fi while [ $hour -lt $TCP ]; do + hour=$(printf "%02d" $hour) echo $hour >>pgrb2_hours - if [ $hour -lt 240 ] + if [ 10#$hour -lt 240 ] then - if [ $hour -eq 99 ]; then - typeset -Z3 hour - fi - if [ $hour -lt 120 ] + if [ 10#$hour -lt 120 ] then let "hour=hour+1" else @@ -71,6 +70,5 @@ do done -echo Exiting $0 exit diff --git a/scripts/exgfs_wave_init.sh b/scripts/exgfs_wave_init.sh index 8c268a6176..31c39fd52a 100755 --- a/scripts/exgfs_wave_init.sh +++ b/scripts/exgfs_wave_init.sh @@ -1,5 +1,5 @@ -#!/bin/bash -# +#! /usr/bin/env bash + ################################################################################ # # UNIX Script Documentation Block @@ -25,20 +25,15 @@ # # --------------------------------------------------------------------------- # # 0. Preparations -# 0.a Basic modes of operation - set -x +source "$HOMEgfs/ush/preamble.sh" + +# 0.a Basic modes of operation err=0 - # Use LOUD variable to turn on/off trace. Defaults to YES (on). - export LOUD=${LOUD:-YES}; [[ $LOUD = yes ]] && export LOUD=YES - [[ "$LOUD" != YES ]] && set +x cd $DATA - echo "HAS BEGUN on $(hostname)" - echo "Starting MWW3 INIT CONFIG SCRIPT for ${CDUMP}wave" - set +x echo ' ' echo ' ********************************' @@ -49,7 +44,7 @@ echo ' ' echo "Starting at : $(date)" echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} # Script will run only if pre-defined NTASKS # The actual work is distributed over these tasks. @@ -63,7 +58,7 @@ echo ' ' echo " Script set to run with $NTASKS tasks " echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} # --------------------------------------------------------------------------- # @@ -73,7 +68,7 @@ echo 'Preparing input files :' echo '-----------------------' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} # 1.a Model definition files @@ -93,14 +88,14 @@ then set +x echo " Mod def file for $grdID found in ${COMIN}/rundata. copying ...." - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} cp $COMIN/rundata/${CDUMP}wave.mod_def.${grdID} mod_def.$grdID else set +x echo " Mod def file for $grdID not found in ${COMIN}/rundata. Setting up to generate ..." echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} if [ -f $PARMwave/ww3_grid.inp.$grdID ] then cp $PARMwave/ww3_grid.inp.$grdID ww3_grid.inp.$grdID @@ -112,7 +107,7 @@ echo ' ' echo " ww3_grid.inp.$grdID copied ($PARMwave/ww3_grid.inp.$grdID)." echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} else set +x echo ' ' @@ -121,7 +116,7 @@ echo '*********************************************************** ' echo " grdID = $grdID" echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} err=2;export err;${errchk} fi @@ -146,7 +141,7 @@ echo ' ' echo " Generating $nmoddef mod def files" echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} # Set number of processes for mpmd wavenproc=$(wc -l cmdfile | awk '{print $1}') @@ -159,8 +154,7 @@ echo " Executing the mod_def command file at : $(date)" echo ' ------------------------------------' echo ' ' - [[ "$LOUD" = YES ]] && set -x - + ${TRACE_ON:-set -x} if [ "$NTASKS" -gt '1' ] then if [ ${CFP_MP:-"NO"} = "YES" ]; then @@ -183,7 +177,7 @@ echo '********************************************************' echo ' See Details Below ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} fi fi @@ -198,7 +192,7 @@ echo ' ' echo " mod_def.$grdID succesfully created/copied " echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} else set +x echo ' ' @@ -208,7 +202,7 @@ echo " grdID = $grdID" echo ' ' sed "s/^/$grdID.out : /g" $grdID.out - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} err=3;export err;${errchk} fi done @@ -216,14 +210,5 @@ # --------------------------------------------------------------------------- # # 2. Ending - set +x - echo ' ' - echo "Ending at : $(date)" - echo ' ' - echo ' *** End of MWW3 Init Config ***' - echo ' ' - [[ "$LOUD" = YES ]] && set -x - - exit $err # End of MWW3 init config script ------------------------------------------- # diff --git a/scripts/exgfs_wave_nawips.sh b/scripts/exgfs_wave_nawips.sh index c4fa9e764d..8d41578d7e 100755 --- a/scripts/exgfs_wave_nawips.sh +++ b/scripts/exgfs_wave_nawips.sh @@ -1,16 +1,17 @@ -#!/bin/ksh +#! /usr/bin/env bash + ################################################################### -echo "----------------------------------------------------" -echo "exnawips - convert NCEP GRIB files into GEMPAK Grids" -echo "----------------------------------------------------" -echo "History: Mar 2000 - First implementation of this new script." -echo "Sept 2011 - First implementation of this new script based on" -echo " /nwprod/scripts/exnawips.sh.sms" -echo " March 2020- Modified for GEFSv12.0" +# echo "----------------------------------------------------" +# echo "exnawips - convert NCEP GRIB files into GEMPAK Grids" +# echo "----------------------------------------------------" +# echo "History: Mar 2000 - First implementation of this new script." +# echo "Sept 2011 - First implementation of this new script based on" +# echo " /nwprod/scripts/exnawips.sh.sms" +# echo " March 2020- Modified for GEFSv12.0" # March-2020 Roberto.Padilla@noaa.gov ##################################################################### -set -xa +source "$HOMEgfs/ush/preamble.sh" #export grids=${grids:-'glo_30m at_10m ep_10m wc_10m ao_9km'} #Interpolated grids export grids=${grids:-'glo_10m gso_15m ao_9km'} #Native grids @@ -90,7 +91,7 @@ while [ $fhcnt -le $FHMAX_WAV ]; do echo '**************************** ' echo ' ' echo $msg - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} echo "$RUNwave $grdID ${fhr} prdgen $date $cycle : GRIB file missing." >> $wavelog err=1;export err;${errchk} || exit ${err} fi @@ -111,7 +112,7 @@ while [ $fhcnt -le $FHMAX_WAV ]; do echo '************************************************************* ' echo ' ' echo $msg - #[[ "$LOUD" = YES ]] && set -x + #${TRACE_ON:-set -x} echo "$RUNwave $grdID prdgen $date $cycle : error in grbindex." >> $wavelog err=2;export err;err_chk else @@ -177,14 +178,6 @@ while [ $fhcnt -le $FHMAX_WAV ]; do let fhcnt=fhcnt+inc done ##################################################################### -# GOOD RUN -set +x -echo "**************JOB $RUN NAWIPS COMPLETED NORMALLY ON THE IBM" -echo "**************JOB $RUN NAWIPS COMPLETED NORMALLY ON THE IBM" -echo "**************JOB $RUN NAWIPS COMPLETED NORMALLY ON THE IBM" -set -x -##################################################################### -msg='Job completed normally.' -echo $msg -postmsg "$jlogfile" "$msg" + + ############################### END OF SCRIPT ####################### diff --git a/scripts/exgfs_wave_post_gridded_sbs.sh b/scripts/exgfs_wave_post_gridded_sbs.sh index 622eb57880..b602ba3a0e 100755 --- a/scripts/exgfs_wave_post_gridded_sbs.sh +++ b/scripts/exgfs_wave_post_gridded_sbs.sh @@ -1,5 +1,5 @@ -#!/bin/bash -# +#! /usr/bin/env bash + ################################################################################ # # UNIX Script Documentation Block @@ -29,12 +29,10 @@ # # --------------------------------------------------------------------------- # # 0. Preparations -# 0.a Basic modes of operation - set -x - # Use LOUD variable to turn on/off trace. Defaults to YES (on). - export LOUD=${LOUD:-YES}; [[ $LOUD = yes ]] && export LOUD=YES - [[ "$LOUD" != YES ]] && set +x +source "$HOMEgfs/ush/preamble.sh" + +# 0.a Basic modes of operation # Set wave model ID tag to include member number # if ensemble; waveMEMB var empty in deterministic @@ -42,7 +40,6 @@ cd $DATA - echo "HAS BEGUN on $(hostname)" echo "Starting WAVE POSTPROCESSOR SCRIPT for $WAV_MOD_TAG" set +x @@ -54,7 +51,7 @@ echo "Starting at : $(date)" echo '-------------' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} # Script will run only if pre-defined NTASKS # The actual work is distributed over these tasks. @@ -84,7 +81,7 @@ echo " Interpolated grids : $waveinterpGRD" echo " Post-process grids : $wavepostGRD" echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} # 0.c.3 Define CDATE_POST @@ -103,7 +100,7 @@ echo ' ' echo 'Preparing input files :' echo '-----------------------' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} # 1.a Model definition files and output files (set up using poe) @@ -114,7 +111,7 @@ then set +x echo " Mod def file for $grdID found in ${COMIN}/rundata. copying ...." - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} cp -f $COMIN/rundata/${CDUMP}wave.mod_def.${grdID} mod_def.$grdID fi @@ -131,14 +128,14 @@ echo " FATAL ERROR : NO MOD_DEF FILE mod_def.$grdID " echo '*************************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} err=2; export err;${errchk} exit $err DOGRB_WAV='NO' else set +x echo "File mod_def.$grdID found. Syncing to all nodes ..." - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} fi done @@ -158,7 +155,7 @@ then set +x echo " ${intGRD}_interp.inp.tmpl copied. Syncing to all nodes ..." - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} else set +x echo ' ' @@ -166,7 +163,7 @@ echo '*** ERROR : NO TEMPLATE FOR GRINT INPUT FILE *** ' echo '*********************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} echo "$WAV_MOD_TAG post $date $cycle : GRINT template file missing." exit_code=1 DOGRI_WAV='NO' @@ -187,7 +184,7 @@ then set +x echo " ww3_grib2.${grbGRD}.inp.tmpl copied. Syncing to all nodes ..." - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} else set +x echo ' ' @@ -195,7 +192,7 @@ echo "*** ERROR : NO TEMPLATE FOR ${grbGRD} GRIB INPUT FILE *** " echo '*********************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} exit_code=2 DOGRB_WAV='NO' fi @@ -214,7 +211,7 @@ echo " Sufficient data for GRID interpolation : $DOGRI_WAV" echo " Sufficient data for GRIB files : $DOGRB_WAV" echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} # --------------------------------------------------------------------------- # # 2. Make consolidated grib2 file for side-by-side grids and interpolate @@ -224,12 +221,12 @@ set +x echo ' Making command file for sbs grib2 and GRID Interpolation ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} # 1.a.2 Loop over forecast time to generate post files # When executed side-by-side, serial mode (cfp when run after the fcst step) # Contingency for RERUN=YES - if [ "${RERUN}" = "YES" ]; then + if [ "${RERUN-NO}" = "YES" ]; then fhr=$((FHRUN + FHMIN_WAV)) if [ $FHMAX_HF_WAV -gt 0 ] && [ $FHOUT_HF_WAV -gt 0 ] && [ $fhr -lt $FHMAX_HF_WAV ]; then FHINCG=$FHOUT_HF_WAV @@ -275,7 +272,7 @@ echo " FATAL ERROR : NO RAW FIELD OUTPUT FILE out_grd.$grdID " echo '*************************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} echo "$WAV_MOD_TAG post $grdID $date $cycle : field output missing." err=3; export err;${errchk} exit $err @@ -370,7 +367,7 @@ echo " Executing the grib2_sbs scripts at : $(date)" echo ' ------------------------------------' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} if [ "$wavenproc" -gt '1' ] then @@ -395,7 +392,7 @@ echo '*************************************' echo ' See Details Below ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} err=4; export err;${errchk} exit $err fi @@ -419,7 +416,7 @@ echo '********************************************' echo ' See Details Below ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} err=5; export err;${errchk} exit $err fi @@ -439,23 +436,6 @@ # --------------------------------------------------------------------------- # # 7. Ending output - set +x - echo ' ' - echo "Ending at : $(date)" - echo '-----------' - echo ' ' - echo ' *** End of MWW3 postprocessor ***' - echo ' ' - [[ "$LOUD" = YES ]] && set -x - - if [ "$exit_code" -ne '0' ] - then - echo " FATAL ERROR: Problem in MWW3 POST" - err=6; export err;${errchk} - exit $err - else - echo " Side-by-Side Wave Post Completed Normally " - exit 0 - fi +echo "$exit_code" # End of MWW3 prostprocessor script ---------------------------------------- # diff --git a/scripts/exgfs_wave_post_pnt.sh b/scripts/exgfs_wave_post_pnt.sh index c3cab2999d..cf42db0bb4 100755 --- a/scripts/exgfs_wave_post_pnt.sh +++ b/scripts/exgfs_wave_post_pnt.sh @@ -1,5 +1,5 @@ -#!/bin/bash -# +#! /usr/bin/env bash + ################################################################################ # # UNIX Script Documentation Block @@ -31,12 +31,10 @@ # # --------------------------------------------------------------------------- # # 0. Preparations -# 0.a Basic modes of operation - set -x - # Use LOUD variable to turn on/off trace. Defaults to YES (on). - export LOUD=${LOUD:-YES}; [[ $LOUD = yes ]] && export LOUD=YES - [[ "$LOUD" != YES ]] && set +x +source "$HOMEgfs/ush/preamble.sh" + +# 0.a Basic modes of operation cd $DATA @@ -56,7 +54,7 @@ echo "Starting at : $(date)" echo '-------------' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} # Script will run only if pre-defined NTASKS # The actual work is distributed over these tasks. @@ -93,7 +91,7 @@ echo '-------------------' echo " Output points : $waveuoutpGRD" echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} # --------------------------------------------------------------------------- # # 1. Get files that are used by most child scripts @@ -104,7 +102,7 @@ echo ' ' echo 'Preparing input files :' echo '-----------------------' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} # 1.a Model definition files and output files (set up using poe) @@ -114,16 +112,17 @@ touch cmdfile chmod 744 cmdfile - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} # Copy model definition files + iloop=0 for grdID in $waveuoutpGRD do if [ -f "$COMIN/rundata/${CDUMP}wave.mod_def.${grdID}" ] then set +x echo " Mod def file for $grdID found in ${COMIN}/rundata. copying ...." - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} cp -f $COMIN/rundata/${CDUMP}wave.mod_def.${grdID} mod_def.$grdID iloop=$(expr $iloop + 1) @@ -140,13 +139,13 @@ echo " FATAL ERROR : NO MOD_DEF FILE mod_def.$grdID " echo '*************************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} err=2; export err;${errchk} exit $err else set +x echo "File mod_def.$grdID found. Syncing to all nodes ..." - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} fi done @@ -170,7 +169,7 @@ then set +x echo " buoy.loc and buoy.ibp copied and processed ($PARMwave/wave_${NET}.buoys)." - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} else set +x echo ' ' @@ -178,7 +177,7 @@ echo ' FATAL ERROR : NO BUOY LOCATION FILE ' echo '************************************* ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} err=3; export err;${errchk} exit $err DOSPC_WAV='NO' @@ -196,7 +195,7 @@ then set +x echo " ww3_outp_spec.inp.tmpl copied. Syncing to all grids ..." - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} else set +x echo ' ' @@ -204,7 +203,7 @@ echo '*** ERROR : NO TEMPLATE FOR SPEC INPUT FILE *** ' echo '*********************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} exit_code=3 DOSPC_WAV='NO' DOBLL_WAV='NO' @@ -219,7 +218,7 @@ then set +x echo " ww3_outp_bull.inp.tmpl copied. Syncing to all nodes ..." - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} else set +x echo ' ' @@ -227,7 +226,7 @@ echo '*** ERROR : NO TEMPLATE FOR BULLETIN INPUT FILE *** ' echo '*************************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} exit_code=4 DOBLL_WAV='NO' fi @@ -257,7 +256,7 @@ echo " FATAL ERROR : NO RAW POINT OUTPUT FILE out_pnt.${waveuoutpGRD}.${YMD}.${HMS} " echo '*************************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} echo "$WAV_MOD_TAG post $waveuoutpGRD $CDATE $cycle : field output missing." err=4; export err;${errchk} fi @@ -281,7 +280,7 @@ echo ' ' cat buoy_tmp.loc echo "$WAV_MOD_TAG post $date $cycle : buoy log file failed to be created." - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} err=5;export err;${errchk} DOSPC_WAV='NO' DOBLL_WAV='NO' @@ -304,7 +303,7 @@ then set +x echo 'Buoy log file created. Syncing to all nodes ...' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} else set +x echo ' ' @@ -312,7 +311,7 @@ echo '*** ERROR : NO BUOY LOG FILE CREATED *** ' echo '**************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} err=6;export err;${errchk} DOSPC_WAV='NO' DOBLL_WAV='NO' @@ -332,7 +331,7 @@ echo " Sufficient data for bulletins : $DOBLL_WAV ($Nb points)" echo " Boundary points : $DOBNDPNT_WAV" echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} # --------------------------------------------------------------------------- # # 2. Make files for processing boundary points @@ -341,7 +340,7 @@ set +x echo ' Making command file for wave post points ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} rm -f cmdfile touch cmdfile @@ -375,7 +374,7 @@ else echo " FATAL ERROR : NO RAW POINT OUTPUT FILE out_pnt.$waveuoutpGRD.${YMD}.${HMS} " echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} err=7; export err;${errchk} exit $err fi @@ -469,7 +468,7 @@ echo " Executing the wave point scripts at : $(date)" echo ' ------------------------------------' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} if [ "$wavenproc" -gt '1' ] then @@ -494,7 +493,7 @@ echo '*************************************' echo ' See Details Below ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} err=8; export err;${errchk} exit $err fi @@ -561,7 +560,7 @@ echo " Executing the boundary point cat script at : $(date)" echo ' ------------------------------------' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} if [ "$wavenproc" -gt '1' ] then @@ -586,7 +585,7 @@ echo '*************************************' echo ' See Details Below ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} err=9; export err;${errchk} exit $err fi @@ -605,7 +604,7 @@ echo ' ' echo ' Making command file for taring all point output files.' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} # 6.b Spectral data files @@ -663,7 +662,7 @@ echo " Executing the wave_tar scripts at : $(date)" echo ' ------------------------------------' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} if [ "$wavenproc" -gt '1' ] then @@ -688,7 +687,7 @@ echo '*************************************' echo ' See Details Below ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} err=10; export err;${errchk} exit $err fi @@ -696,23 +695,7 @@ # --------------------------------------------------------------------------- # # 4. Ending output - set +x - echo ' ' - echo "Ending at : $(date)" - echo '-----------' - echo ' ' - echo ' *** End of MWW3 pnt postprocessor ***' - echo ' ' - [[ "$LOUD" = YES ]] && set -x - if [ "$exit_code" -ne '0' ] - then - echo " FATAL ERROR: Problem in MWW3 PNT POST" - err=11; export err;${errchk} - exit $err - else - echo " Point Wave Post Completed Normally " - exit 0 - fi +exit $exit_code # End of MWW3 point prostprocessor script ---------------------------------------- # diff --git a/scripts/exgfs_wave_prdgen_bulls.sh b/scripts/exgfs_wave_prdgen_bulls.sh index dc46136a54..10bdee523b 100755 --- a/scripts/exgfs_wave_prdgen_bulls.sh +++ b/scripts/exgfs_wave_prdgen_bulls.sh @@ -1,4 +1,5 @@ -#!/bin/bash +#! /usr/bin/env bash + ############################################################################### # # # This script is the product generator ("graphics job") for the # @@ -16,11 +17,10 @@ ############################################################################### # --------------------------------------------------------------------------- # # 0. Preparations + +source "$HOMEgfs/ush/preamble.sh" + # 0.a Basic modes of operation - set -xa - # Use LOUD variable to turn on/off trace. Defaults to YES (on). - export LOUD=${LOUD:-YES}; [[ $LOUD = yes ]] && export LOUD=YES - [[ "$LOUD" != YES ]] && set +x # PATH for working and home directories export RUNwave=${RUNwave:-${RUN}${COMPONENT}} @@ -40,10 +40,6 @@ cd $DATA export wavelog=${DATA}/${RUNwave}_prdgbulls.log - postmsg "$jlogfile" "HAS BEGUN on $(hostname)" - - msg="Starting MWW3 BULLETINS PRODUCTS SCRIPT" - postmsg "$jlogfile" "$msg" touch $wavelog # 0.b Date and time stuff export date=$PDY @@ -58,12 +54,12 @@ echo "Starting at : $(date)" echo ' ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} # 1. Get necessary files set +x echo " Copying bulletins from $COMIN" - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} # 1.a Link the input file and untar it BullIn=$COMIN/station/${RUNwave}.$cycle.cbull_tar @@ -79,7 +75,7 @@ echo '************************************ ' echo ' ' echo $msg - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} msg="FATAL ERROR ${RUNwave} prdgen $date $cycle : bulletin tar missing." echo $msg >> $wavelog export err=1; ${errchk} @@ -88,14 +84,14 @@ set +x echo " Untarring bulletins ..." - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} tar -xf cbull.tar OK=$? if [ "$OK" = '0' ]; then set +x echo " Unpacking successfull ..." - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} rm -f cbull.tar else msg="ABNORMAL EXIT: ERROR IN BULLETIN UNTAR" @@ -107,7 +103,7 @@ echo '****************************************** ' echo ' ' echo $msg - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} echo "${RUNwave} prdgen $date $cycle : bulletin untar error." >> $wavelog err=2;export err;err_chk exit $err @@ -117,7 +113,7 @@ set +x echo ' Nb=$(ls -1 *.cbull | wc -l)' Nb=$(ls -1 *.cbull | wc -l) - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} echo ' ' echo " Number of bulletin files : $Nb" echo ' --------------------------' @@ -135,7 +131,7 @@ echo '******************************************* ' echo ' ' echo $msg - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} echo "${RUNwave} prdgen $date $cycle : Bulletin header data file missing." >> $wavelog err=3;export err;err_chk exit $err @@ -148,7 +144,7 @@ echo ' Sourcing data file with header info ...' # 2.b Set up environment variables - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} . awipsbull.data # 2.c Generate list of bulletins to process @@ -166,9 +162,8 @@ echo " Processing $bull ($headr $oname) ..." if [ -z "$headr" ] || [ ! -s $fname ]; then - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} msg="ABNORMAL EXIT: MISSING BULLETING INFO" - postmsg "$jlogfile" "$msg" set +x echo ' ' echo '******************************************** ' @@ -176,20 +171,20 @@ echo '******************************************** ' echo ' ' echo $msg - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} echo "${RUNwave} prdgen $date $cycle : Missing bulletin data." >> $wavelog err=4;export err;err_chk exit $err fi - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} formbul.pl -d $headr -f $fname -j $job -m ${RUNwave} \ -p $PCOM -s NO -o $oname > formbul.out 2>&1 OK=$? if [ "$OK" != '0' ] || [ ! -f $oname ]; then - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} cat formbul.out msg="ABNORMAL EXIT: ERROR IN formbul" postmsg "$jlogfile" "$msg" @@ -200,7 +195,7 @@ echo '************************************** ' echo ' ' echo $msg - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} echo "${RUNwave} prdgen $date $cycle : error in formbul." >> $wavelog err=5;export err;err_chk exit $err @@ -211,7 +206,7 @@ done # 3. Send output files to the proper destination - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} if [ "$SENDCOM" = YES ]; then cp awipsbull.$cycle.${RUNwave} $PCOM/awipsbull.$cycle.${RUNwave} if [ "$SENDDBN_NTC" = YES ]; then @@ -219,7 +214,7 @@ else if [ "${envir}" = "para" ] || [ "${envir}" = "test" ] || [ "${envir}" = "dev" ]; then echo "Making NTC bulletin for parallel environment, but do not alert." - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} (export SENDDBN=NO; make_ntc_bull.pl WMOBH NONE KWBC NONE \ $DATA/awipsbull.$cycle.${RUNwave} $PCOM/awipsbull.$cycle.${RUNwave}) fi @@ -229,23 +224,12 @@ # --------------------------------------------------------------------------- # # 4. Clean up - set +x; [[ "$LOUD" = YES ]] && set -v + set -v rm -f ${RUNwave}.*.cbull awipsbull.data set +v # --------------------------------------------------------------------------- # # 5. Ending output - set +x - echo ' ' - echo ' ' - echo "Ending at : $(date)" - echo ' ' - echo ' *** End of MWW3 BULLETINS product generation ***' - echo ' ' - [[ "$LOUD" = YES ]] && set -x - - msg="$job completed normally" - postmsg "$jlogfile" "$msg" # End of MWW3 product generation script -------------------------------------- # diff --git a/scripts/exgfs_wave_prdgen_gridded.sh b/scripts/exgfs_wave_prdgen_gridded.sh index ed6141afec..b56fb15819 100755 --- a/scripts/exgfs_wave_prdgen_gridded.sh +++ b/scripts/exgfs_wave_prdgen_gridded.sh @@ -1,4 +1,5 @@ -#!/bin/ksh +#! /usr/bin/env bash + ############################################################################### # # # This script is the product generator ("graphics job") for the # @@ -17,11 +18,10 @@ ############################################################################### # --------------------------------------------------------------------------- # # 0. Preparations + +source "$HOMEgfs/ush/preamble.sh" + # 0.a Basic modes of operation - set -xa - # Use LOUD variable to turn on/off trace. Defaults to YES (on). - export LOUD=${LOUD:-YES}; [[ $LOUD = yes ]] && export LOUD=YES - [[ "$LOUD" != YES ]] && set +x export RUNwave=${RUNwave:-${RUN}${COMPONENT}} export envir=${envir:-ops} @@ -63,14 +63,14 @@ echo " AWIPS grib fields" echo " Wave Grids : $grids" echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} # --------------------------------------------------------------------------- # # 1. Get necessary files echo ' ' echo 'Preparing input files :' echo '-----------------------' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} #======================================================================= ASWELL=(SWELL1 SWELL2) # Indices of HS from partitions @@ -120,7 +120,7 @@ echo '**************************** ' echo ' ' echo $msg - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} echo "$RUNwave $grdID ${fhr} prdgen $date $cycle : GRIB file missing." >> $wavelog err=1;export err;${errchk} || exit ${err} fi @@ -177,12 +177,12 @@ # 2.a.1 Set up for tocgrib2 echo " Do set up for tocgrib2." - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} #AWIPSGRB=awipsgrib.$grdID.f${fhr} AWIPSGRB=awipsgrib # 2.a.2 Make GRIB index echo " Make GRIB index for tocgrib2." - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} $GRB2INDEX gribfile.$grdID.f${fhr} gribindex.$grdID.f${fhr} OK=$? @@ -197,7 +197,7 @@ echo '******************************************** ' echo ' ' echo $msg - #[[ "$LOUD" = YES ]] && set -x + #${TRACE_ON:-set -x} echo "$RUNwave $grdID prdgen $date $cycle : error in grbindex." >> $wavelog err=4;export err;err_chk fi @@ -205,7 +205,7 @@ # 2.a.3 Run AWIPS GRIB packing program tocgrib2 echo " Run tocgrib2" - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} export pgm=tocgrib2 export pgmout=tocgrib2.out . prep_step @@ -227,7 +227,7 @@ echo '*************************************** ' echo ' ' echo $msg - #[[ "$LOUD" = YES ]] && set -x + #${TRACE_ON:-set -x} echo "$RUNwave prdgen $date $cycle : error in tocgrib2." >> $wavelog err=5;export err;err_chk else @@ -236,13 +236,13 @@ # 2.a.7 Get the AWIPS grib bulletin out ... #set +x echo " Get awips GRIB bulletins out ..." - #[[ "$LOUD" = YES ]] && set -x + #${TRACE_ON:-set -x} if [ "$SENDCOM" = 'YES' ] then #set +x echo " Saving $AWIPSGRB.$grdOut.f${fhr} as grib2.$cycle.awipsww3_${grdID}.f${fhr}" echo " in $PCOM" - #[[ "$LOUD" = YES ]] && set -x + #${TRACE_ON:-set -x} cp $AWIPSGRB.$grdID.f${fhr} $PCOM/grib2.$cycle.f${fhr}.awipsww3_${grdOut} #set +x fi @@ -268,22 +268,13 @@ # --------------------------------------------------------------------------- # # 5. Clean up - set +x; [[ "$LOUD" = YES ]] && set -v + set -v rm -f gribfile gribindex.* awipsgrb.* awipsbull.data set +v # --------------------------------------------------------------------------- # # 6. Ending output - echo ' ' - echo ' ' - echo "Ending at : $(date)" - echo ' ' - echo ' *** End of MWW3 product generation ***' - echo ' ' - [[ "$LOUD" = YES ]] && set -x - msg="$job completed normally" - postmsg "$jlogfile" "$msg" # End of GFSWAVE product generation script -------------------------------------- # diff --git a/scripts/exgfs_wave_prep.sh b/scripts/exgfs_wave_prep.sh index 2e2e18985f..f3ecf388be 100755 --- a/scripts/exgfs_wave_prep.sh +++ b/scripts/exgfs_wave_prep.sh @@ -1,5 +1,5 @@ -#!/bin/bash -# +#! /usr/bin/env bash + ################################################################################ # # UNIX Script Documentation Block @@ -39,12 +39,10 @@ ############################################################################### # --------------------------------------------------------------------------- # # 0. Preparations -# 0.a Basic modes of operation - set -x - # Use LOUD variable to turn on/off trace. Defaults to YES (on). - export LOUD=${LOUD:-YES}; [[ $LOUD = yes ]] && export LOUD=YES - [[ "$LOUD" != YES ]] && set +x +source "$HOMEgfs/ush/preamble.sh" + +# 0.a Basic modes of operation # Set wave model ID tag to include member number # if ensemble; waveMEMB var empty in deterministic @@ -66,7 +64,7 @@ echo ' ' echo "Starting at : $(date)" echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} if [ "$INDRUN" = 'no' ] then @@ -138,7 +136,7 @@ echo " starting time : $time_beg" echo " ending time : $time_end" echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} # Script will run only if pre-defined NTASKS # The actual work is distributed over these tasks. @@ -155,7 +153,7 @@ echo 'Preparing input files :' echo '-----------------------' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} # 1.a Model definition files @@ -175,7 +173,7 @@ then set +x echo " Mod def file for $grdID found in ${COMIN}/rundata. copying ...." - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} cp $COMIN/rundata/${CDUMP}wave.mod_def.${grdID} mod_def.$grdID else @@ -187,7 +185,7 @@ echo " grdID = $grdID" echo ' ' echo "FATAL ERROR: NO MODEL DEFINITION FILE" - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} err=2;export err;${errchk} fi done @@ -227,7 +225,7 @@ echo ' ' echo " ww3_prnc.${type}.$grdID.inp.tmpl copied ($PARMwave)." echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} else set +x echo ' ' @@ -238,7 +236,7 @@ echo ' ' echo "ABNORMAL EXIT: NO FILE $file" echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} err=4;export err;${errchk} fi done @@ -267,7 +265,7 @@ echo ' ' sed "s/^/wave_prnc_ice.out : /g" wave_prnc_ice.out echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} err=5;export err;${errchk} else mv -f wave_prnc_ice.out $DATA/outtmp @@ -275,7 +273,7 @@ echo ' ' echo ' Ice field unpacking successful.' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} fi else echo ' ' @@ -297,7 +295,7 @@ echo '*** FATAL ERROR : Not set-up to preprocess wind *** ' echo '*************************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} err=6;export err;${errchk} fi @@ -315,7 +313,7 @@ echo ' ' echo ' Concatenate binary current fields ...' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} # Prepare files for cfp process rm -f cmdfile @@ -386,13 +384,13 @@ else curfile=${curfile3h} fi - set $setoff + set -x echo ' ' echo '************************************** ' echo "*** FATAL ERROR: NO CUR FILE $curfile *** " echo '************************************** ' echo ' ' - set $seton + ${TRACE_ON:-set -x} echo "FATAL ERROR - NO CURRENT FILE (RTOFS)" err=11;export err;${errchk} exit $err @@ -425,7 +423,7 @@ echo " Executing the curr prnc cmdfile at : $(date)" echo ' ------------------------------------' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} if [ $wavenproc -gt '1' ] then @@ -450,7 +448,7 @@ echo '********************************************' echo ' See Details Below ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} fi files=$(ls ${WAVECUR_DID}.* 2> /dev/null) @@ -464,7 +462,7 @@ echo '******************************************** ' echo ' ' echo "ABNORMAL EXIT: NO ${WAVECUR_FID}.* FILES FOUND" - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} err=11;export err;${errchk} fi @@ -495,14 +493,7 @@ # --------------------------------------------------------------------------- # # 4. Ending output - set +x - echo ' ' - echo "Ending at : $(date)" - echo ' ' - echo ' *** End of MWW3 preprocessor ***' - echo ' ' - [[ "$LOUD" = YES ]] && set -x - exit $err +exit $err # End of MWW3 preprocessor script ------------------------------------------- # diff --git a/scripts/exglobal_atmos_analysis.sh b/scripts/exglobal_atmos_analysis.sh index da3621838c..7970b9b3d8 100755 --- a/scripts/exglobal_atmos_analysis.sh +++ b/scripts/exglobal_atmos_analysis.sh @@ -1,4 +1,5 @@ -#!/bin/bash +#! /usr/bin/env bash + ################################################################################ #### UNIX Script Documentation Block # . . @@ -17,11 +18,8 @@ ################################################################################# # Set environment. -export VERBOSE=${VERBOSE:-"YES"} -if [ $VERBOSE = "YES" ]; then - echo $(date) EXECUTING $0 $* >&2 - set -x -fi + +source "$HOMEgfs/ush/preamble.sh" # Directories. pwd=$(pwd) @@ -1011,13 +1009,10 @@ cd $pwd if [ $SENDECF = "YES" -a "$RUN" != "enkf" ]; then ecflow_client --event release_fcst fi -echo "$CDUMP $CDATE atminc done at `date`" > $COMOUT/${APREFIX}loginc.txt +echo "$CDUMP $CDATE atminc done at $(date)" > $COMOUT/${APREFIX}loginc.txt ################################################################################ -set +x -if [ $VERBOSE = "YES" ]; then - echo $(date) EXITING $0 with return code $err >&2 -fi + exit $err ################################################################################ diff --git a/scripts/exglobal_atmos_analysis_calc.sh b/scripts/exglobal_atmos_analysis_calc.sh index 02a45de52e..2fa44c16b4 100755 --- a/scripts/exglobal_atmos_analysis_calc.sh +++ b/scripts/exglobal_atmos_analysis_calc.sh @@ -1,4 +1,5 @@ -#!/bin/bash +#! /usr/bin/env bash + ################################################################################ #### UNIX Script Documentation Block # . . @@ -17,11 +18,8 @@ ################################################################################ # Set environment. -export VERBOSE=${VERBOSE:-"YES"} -if [ $VERBOSE = "YES" ]; then - echo $(date) EXECUTING $0 $* >&2 - set -x -fi + +source "$HOMEgfs/ush/preamble.sh" # Directories. pwd=$(pwd) @@ -199,16 +197,13 @@ if [ $DOGAUSFCANL = "YES" ]; then export err=$?; err_chk fi -echo "$CDUMP $CDATE atmanl and sfcanl done at `date`" > $COMOUT/${APREFIX}loganl.txt +echo "$CDUMP $CDATE atmanl and sfcanl done at $(date)" > $COMOUT/${APREFIX}loganl.txt ################################################################################ # Postprocessing cd $pwd [[ $mkdata = "YES" ]] && rm -rf $DATA -set +x -if [ $VERBOSE = "YES" ]; then - echo $(date) EXITING $0 with return code $err >&2 -fi + exit $err diff --git a/scripts/exglobal_atmos_pmgr.sh b/scripts/exglobal_atmos_pmgr.sh index 1a00eda1a6..6e4c2ed3f4 100755 --- a/scripts/exglobal_atmos_pmgr.sh +++ b/scripts/exglobal_atmos_pmgr.sh @@ -1,13 +1,14 @@ -#! /bin/ksh +#! /usr/bin/env bash + # # Script name: exgfs_pmgr.sh.sms # # This script monitors the progress of the gfs_fcst job # -set -x + +source "$HOMEgfs/ush/preamble.sh" hour=00 -typeset -Z2 hour case $RUN in gfs) @@ -25,11 +26,9 @@ if [ -e posthours ]; then fi while [ $hour -lt $TCP ]; do + hour=$(printf "%02d" $hour) echo $hour >>posthours - if [ $hour -lt 120 ]; then - if [ $hour -eq 99 ]; then - typeset -Z3 hour - fi + if [ 10#$hour -lt 120 ]; then let "hour=hour+1" else let "hour=hour+3" @@ -69,6 +68,5 @@ while [ $icnt -lt 1000 ]; do fi done -echo Exiting $0 exit diff --git a/scripts/exglobal_atmos_sfcanl.sh b/scripts/exglobal_atmos_sfcanl.sh index a8c275c10d..899e0ae84a 100755 --- a/scripts/exglobal_atmos_sfcanl.sh +++ b/scripts/exglobal_atmos_sfcanl.sh @@ -1,4 +1,5 @@ -#!/bin/bash +#! /usr/bin/env bash + ################################################################################ #### UNIX Script Documentation Block # . . @@ -17,11 +18,8 @@ ################################################################################ # Set environment. -export VERBOSE=${VERBOSE:-"YES"} -if [ $VERBOSE = "YES" ]; then - echo $(date) EXECUTING $0 $* >&2 - set -x -fi + +source "$HOMEgfs/ush/preamble.sh" # Directories. pwd=$(pwd) @@ -217,10 +215,7 @@ cd $pwd ################################################################################ -set +x -if [ $VERBOSE = "YES" ]; then - echo $(date) EXITING $0 with return code $err >&2 -fi + exit $err ################################################################################ diff --git a/scripts/exglobal_atmos_tropcy_qc_reloc.sh b/scripts/exglobal_atmos_tropcy_qc_reloc.sh index f158b10ff9..6f96d7cfb4 100755 --- a/scripts/exglobal_atmos_tropcy_qc_reloc.sh +++ b/scripts/exglobal_atmos_tropcy_qc_reloc.sh @@ -1,21 +1,20 @@ +#! /usr/bin/env bash + ############################################################################ -echo "---------------------------------------------------------------------" -echo "exglobal_atmos_tropcy_qc_reloc.sh - Tropical Cyclone QC/Relocation Prcocessing" -echo "---------------------------------------------------------------------" -echo "History: Jun 13 2006 - Original script." -echo " March 2013 - No changes needed for WCOSS transition" -echo " MP_LABELIO default added" -echo " Oct 2013 - Use main USH vars as part of minor pkg cleanup" +# echo "---------------------------------------------------------------------" +# echo "exglobal_atmos_tropcy_qc_reloc.sh - Tropical Cyclone QC/Relocation Prcocessing" +# echo "---------------------------------------------------------------------" +# echo "History: Jun 13 2006 - Original script." +# echo " March 2013 - No changes needed for WCOSS transition" +# echo " MP_LABELIO default added" +# echo " Oct 2013 - Use main USH vars as part of minor pkg cleanup" ############################################################################ -set -x +source "$HOMEgfs/ush/preamble.sh" # Make sure we are in the $DATA directory cd $DATA -msg="HAS BEGUN on $(hostname)" -postmsg "$jlogfile" "$msg" - cat break > $pgmout export COMSP=$COMOUT/${RUN}.${cycle}. @@ -26,9 +25,6 @@ cdate10=$( ${NDATE:?} -$tmhr $PDY$cyc) NET_uc=$(echo $RUN | tr [a-z] [A-Z]) tmmark_uc=$(echo $tmmark | tr [a-z] [A-Z]) -msg="$NET_uc ANALYSIS TIME IS $PDY$cyc" -postmsg "$jlogfile" "$msg" - iflag=0 if [ $RUN = ndas ]; then if [ $DO_RELOCATE = NO ]; then @@ -71,9 +67,7 @@ if [ "$PROCESS_TROPCY" = 'YES' ]; then cd $COMOUT pwd - set +x ls -ltr *syndata* - set -x cd $ARCHSYND pwd;ls -ltr cat syndat_dateck @@ -156,27 +150,8 @@ fi ######################################################## -# GOOD RUN -set +x -echo " " -echo " ****** PROCESSING COMPLETED NORMALLY" -echo " ****** PROCESSING COMPLETED NORMALLY" -echo " ****** PROCESSING COMPLETED NORMALLY" -echo " ****** PROCESSING COMPLETED NORMALLY" -echo " " -set -x - - # save standard output -cat break $pgmout break > allout -cat allout -# rm allout - -sleep 10 +cat break $pgmout break -if [ $iflag -eq 0 ]; then - msg='ENDED NORMALLY.' - postmsg "$jlogfile" "$msg" -fi ################## END OF SCRIPT ####################### diff --git a/scripts/exglobal_diag.sh b/scripts/exglobal_diag.sh index e2422b5877..0423a9fc70 100755 --- a/scripts/exglobal_diag.sh +++ b/scripts/exglobal_diag.sh @@ -1,4 +1,5 @@ -#!/bin/bash +#! /usr/bin/env bash + ################################################################################ #### UNIX Script Documentation Block # . . @@ -17,11 +18,8 @@ ################################################################################ # Set environment. -export VERBOSE=${VERBOSE:-"YES"} -if [[ "$VERBOSE" = "YES" ]]; then - echo $(date) EXECUTING $0 $* >&2 - set -x -fi + +source "$HOMEgfs/ush/preamble.sh" # Directories. pwd=$(pwd) @@ -247,7 +245,9 @@ EOFdiag # Restrict diagnostic files containing rstprod data rlist="conv_gps conv_ps conv_pw conv_q conv_sst conv_t conv_uv saphir" for rtype in $rlist; do - ${CHGRP_CMD} *${rtype}* + set +e + ${CHGRP_CMD} *${rtype}* + ${STRICT_ON:-set -e} done # If requested, create diagnostic file tarballs @@ -285,11 +285,8 @@ if [[ "$REMOVE_DIAG_DIR" = "YES" && "$err" = "0" ]]; then fi cd $pwd -[[ $mkdata = "YES" ]] && rm -rf $DATA +[[ "${mkdata:-YES}" = "YES" ]] && rm -rf $DATA + -set +x -if [[ "$VERBOSE" = "YES" ]]; then - echo $(date) EXITING $0 with return code $err >&2 -fi exit $err diff --git a/scripts/exglobal_forecast.sh b/scripts/exglobal_forecast.sh index 4c398e5055..3f2ad87caf 100755 --- a/scripts/exglobal_forecast.sh +++ b/scripts/exglobal_forecast.sh @@ -1,4 +1,5 @@ -#!/bin/bash +#! /usr/bin/env bash + ################################################################################ ## UNIX Script Documentation Block ## Script name: exglobal_fcst_nemsfv3gfs.sh @@ -76,11 +77,7 @@ # Main body starts here ####################### -VERBOSE=${VERBOSE:-"YES"} -if [ $VERBOSE = "YES" ] ; then - echo $(date) EXECUTING $0 $* >&2 - set -x -fi +source "$HOMEgfs/ush/preamble.sh" SCRIPTDIR=$(dirname $(readlink -f "$0") )/../ush echo "MAIN: environment loaded for $machine platform,Current Script locates in $SCRIPTDIR." @@ -131,7 +128,7 @@ case $RUN in 'gefs') FV3_GEFS_predet;; esac [[ $cplflx = .true. ]] && MOM6_predet -#[[ $cplwav = .true. ]] && WW3_predet #no WW3_predet at this time +[[ $cplwav = .true. ]] && WW3_predet [[ $cplice = .true. ]] && CICE_predet case $RUN in @@ -219,14 +216,5 @@ fi echo "MAIN: Output copied to COMROT" #------------------------------------------------------------------ -if [ $VERBOSE = "YES" ] ; then - echo $(date) EXITING $0 with return code $err >&2 -fi -if [ $err != 0 ]; then - echo "MAIN: $confignamevarfornems Forecast failed" - exit $err -else - echo "MAIN: $confignamevarfornems Forecast completed at normal status" - exit 0 -fi +exit $err diff --git a/scripts/run_reg2grb2.sh b/scripts/run_reg2grb2.sh index e1b1e927bf..2284088f47 100755 --- a/scripts/run_reg2grb2.sh +++ b/scripts/run_reg2grb2.sh @@ -1,5 +1,6 @@ -#!/bin/bash -set -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" #requires grib_util module @@ -69,3 +70,4 @@ $executable > reg2grb2.$CDATE.$IDATE.out grid2p05="0 6 0 0 0 0 0 0 720 361 0 0 90000000 0 48 -90000000 359500000 500000 500000 0" #### $NWPROD/util/exec/copygb2 -g "${grid2p05}" -i0 -x $outfile $outfile0p5 $COPYGB2 -g "${grid2p05}" -i0 -x $outfile $outfile0p5 + diff --git a/scripts/run_regrid.sh b/scripts/run_regrid.sh index 2e59e0aafe..6d18eeb693 100755 --- a/scripts/run_regrid.sh +++ b/scripts/run_regrid.sh @@ -1,7 +1,7 @@ -#!/bin/bash -set -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" -echo "Entered $0" MOM6REGRID=${MOM6REGRID:-$HOMEgfs} export EXEC_DIR=$MOM6REGRID/exec export USH_DIR=$MOM6REGRID/ush @@ -23,3 +23,4 @@ ls -alrt $NCL $USH_DIR/icepost.ncl $NCL $USH_DIR/ocnpost.ncl ##################################################################### + diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index 8eb4585922..af00b790c6 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -210,9 +210,9 @@ if [ -d ../sorc/gsi_monitor.fd ]; then $LINK ../sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_bcoef.sh . $LINK ../sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_bcor.sh . $LINK ../sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_time.sh . + $LINK ../sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/ush/rstprod.sh . fi - #------------------------------ #--link executables #------------------------------ diff --git a/ush/cplvalidate.sh b/ush/cplvalidate.sh index 29db7b3ad9..754fa72102 100755 --- a/ush/cplvalidate.sh +++ b/ush/cplvalidate.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#! /usr/bin/env bash ##### ## This script validates $confignamevarfornems diff --git a/ush/drive_makeprepbufr.sh b/ush/drive_makeprepbufr.sh index e945b36743..31154c10bb 100755 --- a/ush/drive_makeprepbufr.sh +++ b/ush/drive_makeprepbufr.sh @@ -1,4 +1,5 @@ -#!/bin/sh -x +#! /usr/bin/env bash + ############################################################### # < next few lines under version control, D O N O T E D I T > # $Date$ @@ -17,6 +18,8 @@ ## CDUMP : cycle name (gdas / gfs) ############################################################### +source "$HOMEgfs/ush/preamble.sh" + ############################################################### # Source relevant configs configs="base prep prepbufr" @@ -135,4 +138,5 @@ $NCP $DATA/prepbufr.acft_profiles $COMOUT/${APREFIX}prepbufr.acft_profiles ############################################################### # Exit out cleanly if [ $KEEPDATA = "NO" ] ; then rm -rf $DATA ; fi + exit 0 diff --git a/ush/file_utils.sh b/ush/file_utils.sh new file mode 100644 index 0000000000..544a270b0a --- /dev/null +++ b/ush/file_utils.sh @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +nb_copy() { + # + # TODO - Extend this to take multiple arguments for file_in (like cp) + # + # Copy a file if it exists, print a warning otherwise but don't + # error. + # + # Syntax + # nb_copy file_in file_out + # + # Arguments + # file_in: the file to copy + # file_out: the destination of the copy + # + # Environment variables + # NCP: Command to use to copy (default: cp) + # + local file_in="${1}" + local file_out="${2}" + if [[ -f ${file_in} ]]; then + ${NCP:-cp} ${file_in} ${file_out} + else + echo "WARNING: No file ${file_in} found (pwd: $(pwd))" + fi +} diff --git a/ush/forecast_det.sh b/ush/forecast_det.sh index 206fa1884c..f3823cde99 100755 --- a/ush/forecast_det.sh +++ b/ush/forecast_det.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#! /usr/bin/env bash ##### ## "forecast_det.sh" @@ -8,7 +8,6 @@ ## This script is a definition of functions. ##### - # For all non-evironment variables # Cycling and forecast hour specific parameters diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index 140e41ba55..089ff7dd5b 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#! /usr/bin/env bash ##### ## This script defines functions for data I/O and namelist. @@ -91,7 +91,7 @@ EOF exit 1 fi $NLN $increment_file $DATA/INPUT/fv_increment$i.nc - IAU_INC_FILES="'fv_increment$i.nc',$IAU_INC_FILES" + IAU_INC_FILES="'fv_increment$i.nc',${IAU_INC_FILES:-}" done read_increment=".false." res_latlon_dynamics="" @@ -643,7 +643,7 @@ WW3_postdet() { export WRDIR=${ROTDIR}/${CDUMPRSTwave}.${WRPDY}/${WRcyc}/wave/restart export RSTDIR_WAVE=$ROTDIR/${CDUMP}.${PDY}/${cyc}/wave/restart export datwave=$COMOUTwave/rundata - export wavprfx=${CDUMPwave}${WAV_MEMBER} + export wavprfx=${CDUMPwave}${WAV_MEMBER:-} #Copy initial condition files: for wavGRD in $waveGRD ; do @@ -651,7 +651,7 @@ WW3_postdet() { if [ $RERUN = "NO" ]; then waverstfile=${WRDIR}/${sPDY}.${scyc}0000.restart.${wavGRD} else - waverstfile=${RSTDIR_WAVE}/${PDYT}.${cyct}0000.restart.${wavGRD} + waverstfile=${RSTDIR_WAVE}/${PDYT}.${cyct}0000.restart.${wavGRD} fi else waverstfile=${RSTDIR_WAVE}/${sPDY}.${scyc}0000.restart.${wavGRD} @@ -820,7 +820,7 @@ MOM6_postdet() { if [ $fhr = 'anl' ]; then continue fi - if [ -z $last_fhr ]; then + if [ -z ${last_fhr:-} ]; then last_fhr=$fhr continue fi @@ -997,7 +997,8 @@ GOCART_rc() { cat ${AERO_CONFIG_DIR}/ExtData.${AERO_EMIS_FIRE:-none} ; \ echo "%%" ; \ } > $DATA/AERO_ExtData.rc - [[ $status -ne 0 ]] && exit $status + status=$? + if (( status != 0 )); then exit $status; fi fi fi } diff --git a/ush/forecast_predet.sh b/ush/forecast_predet.sh index 6965d7f30e..947fae59cd 100755 --- a/ush/forecast_predet.sh +++ b/ush/forecast_predet.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#! /usr/bin/env bash ##### ## "forecast_def.sh" @@ -8,7 +8,6 @@ ## This script is a definition of functions. ##### - # For all non-evironment variables # Cycling and forecast hour specific parameters common_predet(){ @@ -125,9 +124,9 @@ FV3_GFS_predet(){ cores_per_node=${cores_per_node:-${npe_node_fcst:-24}} ntiles=${ntiles:-6} if [ $MEMBER -lt 0 ]; then - NTASKS_TOT=${NTASKS_TOT:-$npe_fcst_gfs} + NTASKS_TOT=${NTASKS_TOT:-${npe_fcst_gfs:-0}} else - NTASKS_TOT=${NTASKS_TOT:-$npe_efcs} + NTASKS_TOT=${NTASKS_TOT:-${npe_efcs:-0}} fi TYPE=${TYPE:-"nh"} # choices: nh, hydro diff --git a/ush/fv3gfs_downstream_nems.sh b/ush/fv3gfs_downstream_nems.sh index 68da1ce8b8..a1257ec4af 100755 --- a/ush/fv3gfs_downstream_nems.sh +++ b/ush/fv3gfs_downstream_nems.sh @@ -1,5 +1,4 @@ -#!/bin/ksh -set -x +#! /usr/bin/env bash #----------------------------------------------------------------------- #-Hui-Ya Chuang, January 2014: First version. @@ -31,8 +30,7 @@ set -x # 1. Modify sea icea cover via land-sea mask. #----------------------------------------------------------------------- - -echo "!!!!!CREATING $RUN DOWNSTREAM PRODUCTS FOR FH = $FH !!!!!!" +source "$HOMEgfs/ush/preamble.sh" "$FH" export downset=${downset:-1} export DATA=${DATA:-/ptmpd2/$LOGNAME/test} @@ -130,16 +128,27 @@ while [ $nset -le $totalset ]; do # if final record of each piece is ugrd, add vgrd # copygb will only interpolate u and v together #$WGRIB2 -d $end $tmpfile |grep -i ugrd - $WGRIB2 -d $end $tmpfile |egrep -i "ugrd|ustm|uflx|u-gwd" + # grep returns 1 if no match is found, so temporarily turn off exit on non-zero rc + set +e + $WGRIB2 -d $end $tmpfile | egrep -i "ugrd|ustm|uflx|u-gwd" export rc=$? + ${ERR_EXIT_ON:-set -eu} if [[ $rc -eq 0 ]] ; then export end=$(expr ${end} + 1) + elif [[ $rc -gt 1 ]]; then + echo "FATAL: WGRIB2 failed with error code ${rc}" + exit $rc fi - # if final record is land, add next record icec - $WGRIB2 -d $end $tmpfile |egrep -i "land" + # if final record is land, add next record icec + set +e + $WGRIB2 -d $end $tmpfile | egrep -i "land" export rc=$? + ${ERR_EXIT_ON:-set -eu} if [[ $rc -eq 0 ]] ; then export end=$(expr ${end} + 1) + elif [[ $rc -gt 1 ]]; then + echo "FATAL: WGRIB2 failed with error code ${rc}" + exit $rc fi if [ $iproc -eq $nproc ]; then export end=$ncount @@ -176,6 +185,7 @@ while [ $nset -le $totalset ]; do echo "$nm $line" >> $DATA/poescript_srun nm=$((nm+1)) done + nm=$(wc -l < $DATA/poescript_srun) ${launcher:-"srun --export=ALL"} -n $nm --multi-prog $DATA/poescript_srun else $launcher @@ -283,8 +293,4 @@ while [ $nset -le $totalset ]; do export nset=$(expr $nset + 1 ) done -echo "!!!!!!CREATION OF SELECT $RUN DOWNSTREAM PRODUCTS COMPLETED FOR FHR = $FH !!!!!!!" -#--------------------------------------------------------------- - - exit 0 diff --git a/ush/fv3gfs_dwn_nems.sh b/ush/fv3gfs_dwn_nems.sh index b49daee45c..eb29445b36 100755 --- a/ush/fv3gfs_dwn_nems.sh +++ b/ush/fv3gfs_dwn_nems.sh @@ -1,5 +1,4 @@ -#!/bin/ksh -set -x +#! /usr/bin/env bash # this script generates 0.25/0.5/1/2.5 deg pgb files for each small Grib file # Hui-Ya Chuang 01/2014: First Version @@ -12,6 +11,8 @@ set -x # Wen Meng 10/2019: Use bilinear interpolation for LAND, It can trancate land-sea mask as 0 or 1. # Wen Meng 11/2019: Teak sea ice cover via land-sea mask. +source "$HOMEgfs/ush/preamble.sh" + export tmpfile=$1 export fhr3=$2 export iproc=$3 @@ -42,6 +43,7 @@ export grid2p5="latlon 0:144:2.5 90:73:-2.5" export PGB1F=${PGB1F:-"NO"} export PGBS=${PGBS:-"NO"} +optncpu=${optncpu:-} if [ $nset = 1 ]; then if [ "$PGBS" = "YES" ]; then @@ -105,5 +107,3 @@ fi # $CNVGRIB -g21 pgb2file_${fhr3}_${iproc}_1p0 pgbfile_${fhr3}_${iproc}_1p0 # $CNVGRIB -g21 pgb2file_${fhr3}_${iproc}_2p5 pgbfile_${fhr3}_${iproc}_2p5 #---------------------------------------------------------------------------------------------- - -exit 0 diff --git a/ush/fv3gfs_nc2nemsio.sh b/ush/fv3gfs_nc2nemsio.sh index 4b239e18a3..99eea9ce5f 100755 --- a/ush/fv3gfs_nc2nemsio.sh +++ b/ush/fv3gfs_nc2nemsio.sh @@ -1,5 +1,5 @@ -#!/bin/ksh -set -x +#! /usr/bin/env bash + #---------------------------------------------------------------------------- #--Fanglin Yang, October 2016: convert FV3 NetCDF files to NEMSIO format. # Note FV3 lat-lon grid is located at the center of each grid box, @@ -8,6 +8,8 @@ set -x # X(1,1)=[0.25E,89.75S], X(nlon,nlat)=[359.75E,89.75N] #--------------------------------------------------------------------------- +source "$HOMEgfs/ush/preamble.sh" + export CDATE=${CDATE:-"2016100300"} export GG=${master_grid:-"0p25deg"} # 1deg 0p5deg 0p25deg 0p125deg export FHZER=${FHZER:-6} # accumulation bucket in hours @@ -67,5 +69,5 @@ for fhour in $(echo $fdiag | sed "s/,/ /g"); do done #--------------------------------------------------- -echo $(date) EXITING $0 with return code $err >&2 + exit $err diff --git a/ush/fv3gfs_regrid_nemsio.sh b/ush/fv3gfs_regrid_nemsio.sh index 19a050520f..7b92c27cde 100755 --- a/ush/fv3gfs_regrid_nemsio.sh +++ b/ush/fv3gfs_regrid_nemsio.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#! /usr/bin/env bash ################################################################################ # UNIX Script Documentation Block @@ -19,12 +19,7 @@ # Language: Portable Operating System Interface (POSIX) Shell ################################################################################ -# Set environment. -VERBOSE=${VERBOSE:-"YES"} -if [ $VERBOSE = YES ] ; then - echo $(date) EXECUTING $0 $* >&2 - set -x -fi +source "$HOMEgfs/ush/preamble.sh" #------------------------------------------------------- # Directories and paths @@ -120,8 +115,5 @@ for ftype in atm sfc; do done #------------------------------------------------------------------ -set +x -if [ $VERBOSE = "YES" ] ; then - echo $(date) EXITING $0 with return code $err >&2 -fi + exit $err diff --git a/ush/fv3gfs_remap.sh b/ush/fv3gfs_remap.sh index d5258e0975..b1c3546d97 100755 --- a/ush/fv3gfs_remap.sh +++ b/ush/fv3gfs_remap.sh @@ -1,11 +1,12 @@ -#!/bin/ksh -set -ax +#! /usr/bin/env bash #-------------------------------------- #-- remap FV3 6 tiles to global array #-- Fanglin Yang, October 2016 #-------------------------------------- +source "$HOMEgfs/ush/preamble.sh" + export CDATE=${CDATE:-"2016100300"} export CASE=${CASE:-"C192"} # C48 C96 C192 C384 C768 C1152 C3072 export GG=${master_grid:-"0p25deg"} # 1deg 0p5deg 0p25deg 0p125deg @@ -114,6 +115,5 @@ for type in atmos_4xdaily nggps2d nggps3d ; do done -echo $(date) EXITING $0 with return code $err >&2 exit $err diff --git a/ush/fv3gfs_remap_weights.sh b/ush/fv3gfs_remap_weights.sh index a4140a7c88..15dfc73e3f 100755 --- a/ush/fv3gfs_remap_weights.sh +++ b/ush/fv3gfs_remap_weights.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#! /usr/bin/env bash #BSUB -L /bin/sh #BSUB -P FV3GFS-T2O #BSUB -oo log.weights diff --git a/ush/gaussian_sfcanl.sh b/ush/gaussian_sfcanl.sh index 866cd6d255..147afd5497 100755 --- a/ush/gaussian_sfcanl.sh +++ b/ush/gaussian_sfcanl.sh @@ -1,4 +1,5 @@ -#!/bin/ksh +#! /usr/bin/env bash + ################################################################################ #### UNIX Script Documentation Block # . . @@ -109,12 +110,7 @@ # ################################################################################ -# Set environment. -VERBOSE=${VERBOSE:-"NO"} -if [[ "$VERBOSE" = "YES" ]] ; then - echo $(date) EXECUTING $0 $* >&2 - set -x -fi +source "$HOMEgfs/ush/preamble.sh" CASE=${CASE:-C768} res=$(echo $CASE | cut -c2-) @@ -144,7 +140,7 @@ DATA=${DATA:-$(pwd)} COMOUT=${COMOUT:-$(pwd)} # Filenames. -XC=${XC} +XC=${XC:-} GAUSFCANLEXE=${GAUSFCANLEXE:-$EXECgfs/gaussian_sfcanl.exe} SIGLEVEL=${SIGLEVEL:-$FIXam/global_hyblev.l${LEVSP1}.txt} @@ -160,7 +156,7 @@ export REDERR=${REDERR:-'2>'} # Set defaults ################################################################################ # Preprocessing -$INISCRIPT +${INISCRIPT:-} pwd=$(pwd) if [[ -d $DATA ]] then @@ -236,10 +232,5 @@ $ERRSCRIPT||exit 2 # Postprocessing cd $pwd [[ $mkdata = YES ]]&&rmdir $DATA -$ENDSCRIPT -set +x -if [[ "$VERBOSE" = "YES" ]] -then - echo $(date) EXITING $0 with return code $err >&2 -fi -exit $err + +exit ${err} diff --git a/ush/getdump.sh b/ush/getdump.sh index e5487e34a2..462ca5e755 100755 --- a/ush/getdump.sh +++ b/ush/getdump.sh @@ -1,5 +1,6 @@ -#!/bin/ksh -set -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" COMPONENT=${COMPONENT:-atmos} @@ -38,6 +39,3 @@ else fi exit 0 - - - diff --git a/ush/getges.sh b/ush/getges.sh index 62ce0eea3f..2fb54fccc7 100755 --- a/ush/getges.sh +++ b/ush/getges.sh @@ -1,4 +1,5 @@ -#!/bin/ksh +#! /usr/bin/env bash + ################################################################################ # # Name: getges.sh Author: Mark Iredell @@ -74,6 +75,9 @@ # ################################################################################ #------------------------------------------------------------------------------- + +source "$HOMEgfs/ush/preamble.sh" + # Set some default parameters. fhbeg=03 # hour to begin searching backward for guess fhinc=03 # hour to increment backward in search @@ -151,7 +155,7 @@ if [[ $gfile = '?' || $# -gt 1 || $err -ne 0 || -z $valid ||\ fi exit 1 fi -[[ $quiet = NO ]]&&set -x + if [[ $envir != prod && $envir != test && $envir != para && $envir != dump && $envir != pr? && $envir != dev ]];then netwk=$envir envir=prod @@ -1345,8 +1349,9 @@ while [[ $fh -le $fhend ]];do ghp2=$fhp2;[[ $ghp2 -lt 100 ]]&&ghp2=0$ghp2 ghp3=$fhp3;[[ $ghp3 -lt 100 ]]&&ghp3=0$ghp3 id=$($NDATE -$fh $valid) - typeset -L8 day=$id - typeset -R2 cyc=$id + + day=$(echo $id | xargs | cut -c8) + cyc=$(echo $id | xargs | rev | cut -c1-2 | rev) eval list=\$getlist$fh [[ -z "$list" ]]&&list=${geslist} for ges_var in $list;do @@ -1369,8 +1374,10 @@ fi # Either copy guess to a file or write guess name to standard output. if [[ -z "$gfile" ]];then echo $ges - exit $? + err=$? else cp $ges $gfile - exit $? + err=$? fi + +exit ${err} diff --git a/ush/gfs_bfr2gpk.sh b/ush/gfs_bfr2gpk.sh index 5971817f00..c11ec62735 100755 --- a/ush/gfs_bfr2gpk.sh +++ b/ush/gfs_bfr2gpk.sh @@ -1,4 +1,5 @@ -#!/bin/sh +#! /usr/bin/env bash + ######################################################################### # # # Script: gfs_bfr2gpk # @@ -9,7 +10,7 @@ # Log: # # K. Brill/HPC 04/12/05 # ######################################################################### -set -x +source "$HOMEgfs/ush/preamble.sh" # Set GEMPAK paths. diff --git a/ush/gfs_bufr.sh b/ush/gfs_bufr.sh index 5788bbcc43..07bebd5ac0 100755 --- a/ush/gfs_bufr.sh +++ b/ush/gfs_bufr.sh @@ -1,4 +1,5 @@ -#!/bin/ksh +#! /usr/bin/env bash + # # UTILITY SCRIPT NAME : gfsbufr.sh # AUTHOR : Hua-Lu Pan @@ -16,10 +17,9 @@ # 2018-05-22 Guang Ping Lou: Making it work for both GFS and FV3GFS # 2018-05-30 Guang Ping Lou: Make sure all files are available. # 2019-10-10 Guang Ping Lou: Read in NetCDF files -echo "History: February 2003 - First implementation of this utility script" +# echo "History: February 2003 - First implementation of this utility script" # - -set -ax +source "$HOMEgfs/ush/preamble.sh" if test "$F00FLAG" = "YES" then @@ -112,4 +112,6 @@ ln -sf ${STNLIST:-$PARMbufrsnd/bufr_stalist.meteo.gfs} fort.8 ln -sf $PARMbufrsnd/bufr_ij13km.txt fort.7 ${APRUN_POSTSND} $EXECbufrsnd/gfs_bufr < gfsparm > out_gfs_bufr_$FEND -export err=$?;err_chk +export err=$? + +exit ${err} diff --git a/ush/gfs_bufr_netcdf.sh b/ush/gfs_bufr_netcdf.sh index 9733e02c3d..30d7631da3 100755 --- a/ush/gfs_bufr_netcdf.sh +++ b/ush/gfs_bufr_netcdf.sh @@ -1,4 +1,5 @@ -#!/bin/ksh +#! /usr/bin/env bash + # # UTILITY SCRIPT NAME : gfsbufr.sh # AUTHOR : Hua-Lu Pan @@ -16,10 +17,9 @@ # 2018-05-22 Guang Ping Lou: Making it work for both GFS and FV3GFS # 2018-05-30 Guang Ping Lou: Make sure all files are available. # 2019-10-10 Guang Ping Lou: Read in NetCDF files -echo "History: February 2003 - First implementation of this utility script" +# echo "History: February 2003 - First implementation of this utility script" # - -set -ax +source "$HOMEgfs/ush/preamble.sh" if test "$F00FLAG" = "YES" then @@ -112,4 +112,6 @@ ln -sf ${STNLIST:-$PARMbufrsnd/bufr_stalist.meteo.gfs} fort.8 ln -sf $PARMbufrsnd/bufr_ij13km.txt fort.7 ${APRUN_POSTSND} $EXECbufrsnd/gfs_bufr < gfsparm > out_gfs_bufr_$FEND -export err=$?;err_chk +export err=$? + +exit ${err} diff --git a/ush/gfs_nceppost.sh b/ush/gfs_nceppost.sh index 6c75572d10..a8bee62d4d 100755 --- a/ush/gfs_nceppost.sh +++ b/ush/gfs_nceppost.sh @@ -1,3 +1,4 @@ +#! /usr/bin/env bash ################################################################################ #### UNIX Script Documentation Block @@ -187,16 +188,13 @@ #### ################################################################################ # Set environment. -export VERBOSE=${VERBOSE:-"NO"} -if [[ "$VERBOSE" = "YES" ]]; then - echo $(date) EXECUTING $0 $* >&2 - set -x -fi +source "$HOMEgfs/ush/preamble.sh" + # Command line arguments. -export SIGINP=${1:-${SIGINP}} -export FLXINP=${2:-${FLXINP}} -export FLXIOUT=${3:-${FLXIOUT}} -export PGBOUT=${4:-${PGBOUT}} +export SIGINP=${1:-${SIGINP:-}} +export FLXINP=${2:-${FLXINP:-}} +export FLXIOUT=${3:-${FLXIOUT:-}} +export PGBOUT=${4:-${PGBOUT:-}} #export PGIOUT=${5:-${PGIOUT}} export PGIOUT=${PGIOUT:-pgb.idx} export IO=${6:-${IO:-0}} @@ -210,15 +208,15 @@ export EXECgfs=${EXECgfs:-$NWPROD/exec} export USHgfs=${USHgfs:-$NWPROD/ush} export DATA=${DATA:-$(pwd)} # Filenames. -export MP=${MP:-$([[ $LOADL_STEP_TYPE = PARALLEL ]]&&echo "p"||echo "s")} -export XC=${XC} +export MP=${MP:-$([[ ${LOADL_STEP_TYPE:-SERIAL} = PARALLEL ]]&&echo "p"||echo "s")} +export XC=${XC:-} export POSTGPEXEC=${POSTGPEXEC:-${EXECgfs}/gfs_ncep_post} export OVERPARMEXEC=${OVERPARMEXEC:-${EXECgfs}/overparm_grib} export POSTGPLIST=${POSTGPLIST:-/dev/null} -export INISCRIPT=${INISCRIPT} +export INISCRIPT=${INISCRIPT:-} export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} -export LOGSCRIPT=${LOGSCRIPT} -export ENDSCRIPT=${ENDSCRIPT} +export LOGSCRIPT=${LOGSCRIPT:-} +export ENDSCRIPT=${ENDSCRIPT:-} export GFSOUT=${GFSOUT:-gfsout} export CTLFILE=${CTLFILE:-$NWPROD/parm/gfs_cntrl.parm} #export MODEL_OUT_FORM=${MODEL_OUT_FORM:-binarynemsiompiio} @@ -236,15 +234,13 @@ export GENPSICHI=${GENPSICHI:-NO} export GENPSICHIEXE=${GENPSICHIEXE:-${EXECgfs}/genpsiandchi} export ens=${ens:-NO} #export D3DINP=${D3DINP:-/dev/null} -typeset -L1 l=$PGMOUT +l=$(echo $PGMOUT | xargs | cut -c1) [[ $l = '&' ]]&&a=''||a='>' export REDOUT=${REDOUT:-'1>'$a} -typeset -L1 l=$PGMERR +l=$(echo $PGMERR | xargs | cut -c1) [[ $l = '&' ]]&&a=''||a='>' export REDERR=${REDERR:-'2>'$a} ################################################################################ -# Preprocessing -$INISCRIPT # Chuang: Run chgres if OUTTYP=1 or 0 @@ -420,9 +416,5 @@ fi # Postprocessing cd $pwd [[ $mkdata = YES ]]&&rmdir $DATA -$ENDSCRIPT -set +x -if [[ "$VERBOSE" = "YES" ]]; then - echo $(date) EXITING $0 with return code $err >&2 -fi + exit $err diff --git a/ush/gfs_sndp.sh b/ush/gfs_sndp.sh index 53bc6fd9d6..a0616e27b4 100755 --- a/ush/gfs_sndp.sh +++ b/ush/gfs_sndp.sh @@ -1,4 +1,5 @@ -#!/bin/ksh +#! /usr/bin/env bash + ################################################################ # Script Name: gfs_sndp.sh # Script Description: Format GFS BUFR sounding files for AWIPS @@ -6,7 +7,7 @@ # 1) 2004-09-10 Steve Gilbert First Implementation ################################################################ -set -x +source "$HOMEgfs/ush/preamble.sh" # Create "collectives" consisting of groupings of the soundings # into files designated by geographical region. Each input @@ -16,7 +17,6 @@ export m=$1 mkdir $DATA/$m cd $DATA/$m cp $FIXbufrsnd/gfs_collective${m}.list $DATA/$m/. -set +x CCCC=KWBC file_list=gfs_collective${m}.list @@ -60,7 +60,6 @@ EOF rm $DATA/${m}/bufrout done -set -x # if test $SENDCOM = 'NO' if test $SENDCOM = 'YES' then @@ -71,8 +70,3 @@ set -x cp $DATA/${m}/gfs_collective$m.fil ${COMOUT}/bufr.${cycle}/. fi -## let "m=m+1" - -## done - -#exit diff --git a/ush/gfs_transfer.sh b/ush/gfs_transfer.sh index f8b00ea623..9d23ec849a 100755 --- a/ush/gfs_transfer.sh +++ b/ush/gfs_transfer.sh @@ -1,21 +1,22 @@ -#!/bin/ksh +#! /usr/bin/env bash ##################################################################### -echo "-----------------------------------------------------" -echo " Script: gfs_transfer.sh" -echo " " -echo " Purpose - Copy GFS Posts to /nwges and /com" -echo " Alert posted files to DBNet" -echo " " -echo " History - " -echo " Cooke - 04/21/05 - Inital version, based off of" -echo " global_transfer.sh" -echo " Meng - 01/04/18 - Remove writing data file to /nwges." -echo " Meng - 09/14/20 - Update model output format to netcdf for GFS V16" -echo "-----------------------------------------------------" +# echo "-----------------------------------------------------" +# echo " Script: gfs_transfer.sh" +# echo " " +# echo " Purpose - Copy GFS Posts to /nwges and /com" +# echo " Alert posted files to DBNet" +# echo " " +# echo " History - " +# echo " Cooke - 04/21/05 - Inital version, based off of" +# echo " global_transfer.sh" +# echo " Meng - 01/04/18 - Remove writing data file to /nwges." +# echo " Meng - 09/14/20 - Update model output format to netcdf for GFS V16" +# echo "-----------------------------------------------------" ##################################################################### -set -xa - + +source "$HOMEgfs/ush/preamble.sh" + # export CNVGRIB=/nwprod/util/exec/cnvgrib # export GRB2INDX=/nwprod/util/exec/grb2index # export WGRIB2=/nwprod/util/exec/wgrib2 diff --git a/ush/gfs_truncate_enkf.sh b/ush/gfs_truncate_enkf.sh index 8d9e2b959c..c7bdfad0c4 100755 --- a/ush/gfs_truncate_enkf.sh +++ b/ush/gfs_truncate_enkf.sh @@ -1,6 +1,6 @@ -#!/bin/ksh +#! /usr/bin/env bash -set -x +source "$HOMEgfs/ush/preamble.sh" member=$1 export SIGINP=$2 @@ -45,12 +45,11 @@ export APRUNC=${APRUNC:-""} export VERBOSE=YES echo "execute $CHGRESSH for $member" -eval "$CHGRESSH" +$CHGRESSH rc=$? export ERR=$rc export err=$ERR -echo EXITING $0 with return code $err exit $err diff --git a/ush/global_extrkr.sh b/ush/global_extrkr.sh index 67624a9898..ad0b249b28 100755 --- a/ush/global_extrkr.sh +++ b/ush/global_extrkr.sh @@ -1,54 +1,51 @@ -#!/bin/ksh +#! /usr/bin/env bash -#module load ics -export PS4='+t+$SECONDS extrkr.sh:$LINENO -- ' +source "$HOMEgfs/ush/preamble.sh" userid=$LOGNAME -set +x ############################################################################## -cat< $(date)" -set -x +${TRACE_ON:-set -x} set +x echo " " echo "TIMING: Before call to gettrk at $(date)" echo " " -set -x +${TRACE_ON:-set -x} ##/usrx/local/bin/getrusage -a /hwrf/save/Qingfu.Liu/trak/para/exec/gettrk <${namelist} @@ -1462,11 +1456,11 @@ set +x echo " " echo "TIMING: After call to gettrk at $(date)" echo " " -set -x +${TRACE_ON:-set -x} set +x echo "+++ TIMING: AFTER gettrk ---> $(date)" -set -x +${TRACE_ON:-set -x} #--------------------------------------------------------------# # Send a message to the jlogfile for each storm that used @@ -1496,7 +1490,7 @@ echo " -----------------------------------------------" echo " NOW COPYING OUTPUT TRACK FILES TO COM " echo " -----------------------------------------------" echo " " -set -x +${TRACE_ON:-set -x} if [[ ! -e "$track_file_path" ]] ; then $postmsg "$jlogfile" "WARNING: tracker output file does not exist. This is probably an error. File: $track_file_path" @@ -1665,12 +1659,12 @@ if [ ${gettrk_rcc} -eq 0 ]; then echo " " echo "+++ Adding records to TPC ATCFUNIX directory: /tpcprd/atcf_unix/${at}${NO}${syyyy}" echo " " - set -x + ${TRACE_ON:-set -x} else set +x echo " " echo "There is no TPC ATCFUNIX directory for: /tpcprd/atcf_unix/${at}${NO}${syyyy}" - set -x + ${TRACE_ON:-set -x} fi done fi @@ -1697,7 +1691,7 @@ else echo "!!! model= ${atcfout}, forecast initial time = ${PDY}${CYL}" echo "!!! Exiting...." echo " " - set -x + ${TRACE_ON:-set -x} err_exit " FAILED ${jobid} - ERROR RUNNING GETTRK IN TRACKER SCRIPT- ABNORMAL EXIT" fi diff --git a/ush/global_nceppost.sh b/ush/global_nceppost.sh index cbc9ba6ccc..45aebe8e4f 100755 --- a/ush/global_nceppost.sh +++ b/ush/global_nceppost.sh @@ -1,3 +1,4 @@ +#! /usr/bin/env bash ################################################################################ #### UNIX Script Documentation Block @@ -183,11 +184,8 @@ #### ################################################################################ # Set environment. -export VERBOSE=${VERBOSE:-"NO"} -if [[ "$VERBOSE" = "YES" ]]; then - echo $(date) EXECUTING $0 $* >&2 - set -x -fi +source "$HOMEgfs/ush/preamble.sh" + # Command line arguments. export SIGINP=${1:-${SIGINP}} export FLXINP=${2:-${FLXINP}} @@ -233,15 +231,13 @@ export GENPSICHI=${GENPSICHI:-NO} export GENPSICHIEXE=${GENPSICHIEXE:-${EXECglobal}/genpsiandchi} export ens=${ens:-NO} #export D3DINP=${D3DINP:-/dev/null} -typeset -L1 l=$PGMOUT +l=$(echo $PGMOUT | xargs | cut -c1) [[ $l = '&' ]]&&a=''||a='>' export REDOUT=${REDOUT:-'1>'$a} -typeset -L1 l=$PGMERR +l=$(echo $PGMERR | xargs | cut -c1) [[ $l = '&' ]]&&a=''||a='>' export REDERR=${REDERR:-'2>'$a} ################################################################################ -# Preprocessing -$INISCRIPT # Chuang: Run chgres if OUTTYP=1 or 0 @@ -484,9 +480,5 @@ fi # Postprocessing cd $pwd [[ $mkdata = YES ]]&&rmdir $DATA -$ENDSCRIPT -set +x -if [[ "$VERBOSE" = "YES" ]]; then - echo $(date) EXITING $0 with return code $err >&2 -fi + exit $err diff --git a/ush/global_savefits.sh b/ush/global_savefits.sh index 9efbf778af..f26132dd8a 100755 --- a/ush/global_savefits.sh +++ b/ush/global_savefits.sh @@ -1,9 +1,10 @@ -#!/bin/ksh -set -xeua +#! /usr/bin/env bash ######################################################## # save fit and horiz files for all analysis cycles ######################################################## +source "$HOMEgfs/ush/preamble.sh" + export FIT_DIR=${FIT_DIR:-$COMOUT/fits} export HORZ_DIR=${HORZ_DIR:-$COMOUT/horiz} export fh1=06 diff --git a/ush/hpssarch_gen.sh b/ush/hpssarch_gen.sh index ab90f3351a..9785de98ac 100755 --- a/ush/hpssarch_gen.sh +++ b/ush/hpssarch_gen.sh @@ -1,11 +1,10 @@ -#!/bin/ksh -set -x +#! /usr/bin/env bash ################################################### # Fanglin Yang, 20180318 # --create bunches of files to be archived to HPSS ################################################### - +source "$HOMEgfs/ush/preamble.sh" type=${1:-gfs} ##gfs, gdas, enkfgdas or enkfggfs diff --git a/ush/inter_flux.sh b/ush/inter_flux.sh index 98a9248caf..6b3d434069 100755 --- a/ush/inter_flux.sh +++ b/ush/inter_flux.sh @@ -1,5 +1,6 @@ -#!/bin/ksh -set -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" "$FH" #----------------------------------------------------------------------- #-Wen Meng, 03/2019: First version. @@ -7,9 +8,6 @@ set -x # into lat-lon grids. #----------------------------------------------------------------------- - -echo "!!!!!CREATING $RUN FLUX PRODUCTS FOR FH = $FH !!!!!!" - export CNVGRIB=${CNVGRIB:-${NWPROD:-/nwprod}/util/exec/cnvgrib21} export COPYGB2=${COPYGB2:-${NWPROD:-/nwprod}/util/exec/copygb2} export WGRIB2=${WGRIB2:-${NWPROD:-/nwprod}/util/exec/wgrib2} @@ -62,8 +60,6 @@ fi cp fluxfile_${fhr3}_1p00 $COMOUT/${PREFIX}flux.1p00.f${fhr3} #--------------------------------------------------------------- -echo "!!!!!CREATION OF SELECT $RUN FLUX PRODUCTS COMPLETED FOR FHR = $FH !!!!!" -#--------------------------------------------------------------- exit 0 diff --git a/ush/link_crtm_fix.sh b/ush/link_crtm_fix.sh index 3307b5f6dd..0d4d8dc55b 100755 --- a/ush/link_crtm_fix.sh +++ b/ush/link_crtm_fix.sh @@ -1,4 +1,6 @@ -#! /bin/sh +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" # Get CRTM fix directory from (in this order): # 1. First argument to script, or diff --git a/ush/load_fv3gfs_modules.sh b/ush/load_fv3gfs_modules.sh index e78ec23bf6..3979d9184d 100755 --- a/ush/load_fv3gfs_modules.sh +++ b/ush/load_fv3gfs_modules.sh @@ -1,11 +1,14 @@ -#!/bin/sh +#! /usr/bin/env bash ############################################################### +if [[ "${DEBUG_WORKFLOW:-NO}" == "NO" ]]; then + echo "Loading modules quietly..." + set +x +fi + # Setup runtime environment by loading modules ulimit_s=$( ulimit -S -s ) -set +x - # Find module command and purge: source "$HOMEgfs/modulefiles/module-setup.sh.inc" @@ -31,8 +34,8 @@ else echo WARNING: UNKNOWN PLATFORM fi -set -x - # Restore stack soft limit: ulimit -S -s "$ulimit_s" unset ulimit_s + +${TRACE_ON:-set -x} diff --git a/ush/mod_icec.sh b/ush/mod_icec.sh index bb8c22182f..f62131846e 100755 --- a/ush/mod_icec.sh +++ b/ush/mod_icec.sh @@ -1,13 +1,15 @@ -#!/bin/sh -set -x +#! /usr/bin/env bash + #This script is used for modifing icee via land-sea mask #Wen Meng 11/2019: First Version +source "$HOMEgfs/ush/preamble.sh" + f=$1 export WGRIB2=${WGRIB2:-${NWPROD:-/nwprod}/util/exec/wgrib2} -$WGRIB2 $optncpu $f \ +$WGRIB2 ${optncpu:-} $f \ -if 'LAND' -rpn 'sto_1' -fi \ -if 'ICEC' -rpn 'rcl_1:0:==:*' -fi \ -set_grib_type same \ @@ -17,7 +19,3 @@ export err=$?; err_chk mv $f.new $f exit 0 - -#-if 'ICEC' -rpn 'rcl_1:-1:*:1:+:*' -fi \ - - diff --git a/ush/nems_configure.sh b/ush/nems_configure.sh index 990272a41f..04fea90f35 100755 --- a/ush/nems_configure.sh +++ b/ush/nems_configure.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#! /usr/bin/env bash ##### ## This script writes nems.configure file diff --git a/ush/parsing_model_configure_DATM.sh b/ush/parsing_model_configure_DATM.sh index a2e7c8c918..ecd3fa6dd6 100755 --- a/ush/parsing_model_configure_DATM.sh +++ b/ush/parsing_model_configure_DATM.sh @@ -1,4 +1,4 @@ -#! /bin/sh +#! /usr/bin/env bash ##### ## "parsing_model_configure_DATM.sh" diff --git a/ush/parsing_model_configure_FV3.sh b/ush/parsing_model_configure_FV3.sh index 4c35179e90..4574b6e352 100755 --- a/ush/parsing_model_configure_FV3.sh +++ b/ush/parsing_model_configure_FV3.sh @@ -1,4 +1,4 @@ -#! /bin/sh +#! /usr/bin/env bash ##### ## "parsing_model_configure_FV3.sh" diff --git a/ush/parsing_namelists_CICE.sh b/ush/parsing_namelists_CICE.sh index 0c7dbd0d43..f7e00fd070 100755 --- a/ush/parsing_namelists_CICE.sh +++ b/ush/parsing_namelists_CICE.sh @@ -1,3 +1,5 @@ +#! /usr/bin/env bash + # parsing namelist of CICE CICE_namelists(){ diff --git a/ush/parsing_namelists_FV3.sh b/ush/parsing_namelists_FV3.sh index 63e46dc20a..41fdb04f1b 100755 --- a/ush/parsing_namelists_FV3.sh +++ b/ush/parsing_namelists_FV3.sh @@ -1,4 +1,5 @@ -#! /bin/sh +#! /usr/bin/env bash + ##### ## "parsing_namelist_FV3.sh" ## This script writes namelist for FV3 model @@ -34,7 +35,7 @@ EOF cat $DIAG_TABLE >> diag_table fi -if [ ! -z "${AERO_DIAG_TABLE}" ]; then +if [ ! -z "${AERO_DIAG_TABLE:-}" ]; then cat ${AERO_DIAG_TABLE} >> diag_table fi @@ -44,7 +45,7 @@ cat $DIAG_TABLE_APPEND >> diag_table $NCP $DATA_TABLE data_table # build field_table -if [ ! -z "${AERO_FIELD_TABLE}" ]; then +if [ ! -z "${AERO_FIELD_TABLE:-}" ]; then nrec=$( cat ${FIELD_TABLE} | wc -l ) prec=${nrec} if (( dnats > 0 )); then @@ -69,20 +70,20 @@ cat > input.nml < input.nml < input.nml < input.nml <> input.nml <> input.nml <> input.nml <> input.nml << EOF - $nam_stochy_nml + ${nam_stochy_nml:-} / EOF @@ -635,13 +636,13 @@ EOF ISEED_LNDP = ${ISEED_LNDP:-$ISEED} lndp_var_list = ${lndp_var_list} lndp_prt_list = ${lndp_prt_list} - $nam_sfcperts_nml + ${nam_sfcperts_nml:-} / EOF else cat >> input.nml << EOF &nam_sfcperts - $nam_sfcperts_nml + ${nam_sfcperts_nml:-} / EOF fi diff --git a/ush/parsing_namelists_MOM6.sh b/ush/parsing_namelists_MOM6.sh index 617c774483..49d6ea5ff5 100755 --- a/ush/parsing_namelists_MOM6.sh +++ b/ush/parsing_namelists_MOM6.sh @@ -1,3 +1,4 @@ +#! /usr/bin/env bash MOM6_namelists(){ diff --git a/ush/parsing_namelists_WW3.sh b/ush/parsing_namelists_WW3.sh index 06d22814bb..209fe9d11a 100755 --- a/ush/parsing_namelists_WW3.sh +++ b/ush/parsing_namelists_WW3.sh @@ -1,3 +1,4 @@ +#! /usr/bin/env bash WW3_namelists(){ @@ -69,7 +70,7 @@ WW3_namelists(){ echo " starting time : $time_beg" echo " ending time : $time_end" echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} @@ -107,7 +108,7 @@ WW3_namelists(){ then set +x echo " buoy.loc copied ($PARMwave/wave_${NET}.buoys)." - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} else echo " FATAL ERROR : buoy.loc ($PARMwave/wave_${NET}.buoys) NOT FOUND" exit 12 diff --git a/ush/preamble.sh b/ush/preamble.sh new file mode 100644 index 0000000000..bfa326f103 --- /dev/null +++ b/ush/preamble.sh @@ -0,0 +1,86 @@ +#! /usr/bin/env bash + +####### +# Preamble script to be SOURCED at the beginning of every script. Sets +# useful PS4 and optionally turns on set -x and set -eu. Also sets up +# crude script timing and provides a postamble that runs on exit. +# +# Syntax: +# preamble.sh [id] +# +# Aruguments: +# id: Optional identifier string. Use when running the same script +# multiple times in the same job (e.g. MPMD) +# +# Input environment variables: +# TRACE (YES/NO): Whether to echo every command (set -x) [default: "YES"] +# STRICT (YES/NO): Whether to exit immediately on error or undefined variable +# (set -eu) [default: "YES"] +# +####### +set +x +if [[ -v '1' ]]; then + id="(${1})" +else + id="" +fi + +# Record the start time so we can calculate the elapsed time later +start_time=$(date +%s) + +# Get the base name of the calling script +_calling_script=$(basename ${BASH_SOURCE[1]}) + +# Announce the script has begun +echo "Begin ${_calling_script} at $(date -u)" + +# Stage our variables +export STRICT=${STRICT:-"YES"} +export TRACE=${TRACE:-"YES"} +export ERR_EXIT_ON="" +export TRACE_ON="" + +if [[ $STRICT == "YES" ]]; then + # Exit on error and undefined variable + export ERR_EXIT_ON="set -eu" +fi +if [[ $TRACE == "YES" ]]; then + export TRACE_ON="set -x" + # Print the script name and line number of each command as it is executed + export PS4='+ $(basename $BASH_SOURCE)[$LINENO]'"$id: " +fi + +postamble() { + # + # Commands to execute when a script ends. + # + # Syntax: + # postamble script start_time rc + # + # Arguments: + # script: name of the script ending + # start_time: start time of script (in seconds) + # rc: the exit code of the script + # + + set +x + script=${1} + start_time=${2} + rc=${3} + + # Calculate the elapsed time + end_time=$(date +%s) + elapsed_sec=$((end_time - start_time)) + elapsed=$(date -d@${elapsed_sec} -u +%H:%M:%S) + + # Announce the script has ended, then pass the error code up + echo "End ${script} at $(date -u) with error code ${rc:-0} (time elapsed: ${elapsed})" + exit ${rc} +} + +# Place the postamble in a trap so it is always called no matter how the script exits +trap "postamble ${_calling_script} ${start_time} \$?" EXIT + +# Turn on our settings +$ERR_EXIT_ON +$TRACE_ON diff --git a/ush/scale_dec.sh b/ush/scale_dec.sh index 8fba2f703b..59e2bab14e 100755 --- a/ush/scale_dec.sh +++ b/ush/scale_dec.sh @@ -1,4 +1,5 @@ -#!/bin/ksh +#! /usr/bin/env bash + # # This script uses WGRIB2 to change binary scale factor # and Decimal scale factor in GRIB2 file @@ -7,7 +8,8 @@ # D = decimal scaling or the text 'same' with no quotes # B = binary scaling or the text 'same' with no quotes # -set -x + +source "$HOMEgfs/ush/preamble.sh" f=$1 @@ -22,4 +24,5 @@ $WGRIB2 $f -not_if ':(TMP|PWAT|WEASD):' -grib $f.new \ -set_scaling 0 0 -grib_out $f.new export err=$?; err_chk mv $f.new $f + exit 0 diff --git a/ush/syndat_getjtbul.sh b/ush/syndat_getjtbul.sh index dc3c0f6482..89196d0596 100755 --- a/ush/syndat_getjtbul.sh +++ b/ush/syndat_getjtbul.sh @@ -1,3 +1,4 @@ +#! /usr/bin/env bash # Script to recover JTWC Bulletins from Tank # $TANK_TROPCY/$ymddir/wtxtbul/tropcyc @@ -25,8 +26,7 @@ # jlogfile - path to job log file (skipped over by this script if not # passed in) - -set -xua +source "$HOMEgfs/ush/preamble.sh" EXECSYND=${EXECSYND:-${HOMESYND}/exec} @@ -39,7 +39,7 @@ positional parameter 1" echo echo $msg echo - set -x + ${TRACE_ON:-set -x} echo $msg >> $pgmout set +u [ -n "$jlogfile" ] && postmsg "$jlogfile" "$msg" @@ -94,7 +94,7 @@ echo " pdym1 is $pdym1" echo echo " ymddir is $ymddir" echo -set -x +${TRACE_ON:-set -x} find=$ymd" "$hour echo "looking for string $find in $jtwcdir/tropcyc" >> $pgmout @@ -159,7 +159,7 @@ set +x echo echo 'The foreground exit status for SYNDAT_GETJTBUL is ' $errget echo -set -x +${TRACE_ON:-set -x} if [ "$errget" -gt '0' ];then if [ "$errget" -eq '1' ];then msg="No JTWC bulletins in $jtwcdir/tropcyc, no JTWC tcvitals \ @@ -182,7 +182,7 @@ RETURN CODE $errget" echo echo $msg echo - set -x + ${TRACE_ON:-set -x} echo $msg >> $pgmout set +u [ -n "$jlogfile" ] && postmsg "$jlogfile" "$msg" @@ -194,7 +194,7 @@ rec. passed to qctropcy" echo echo $msg echo - set -x + ${TRACE_ON:-set -x} echo $msg >> $pgmout set +u [ -n "$jlogfile" ] && postmsg "$jlogfile" "$msg" @@ -206,7 +206,7 @@ echo "----------------------------------------------------------" echo "*********** COMPLETED PROGRAM syndat_getjtbul **********" echo "----------------------------------------------------------" echo -set -x +${TRACE_ON:-set -x} if [ "$errget" -eq '0' ];then echo "Completed JTWC tcvitals records are:" >> $pgmout diff --git a/ush/syndat_qctropcy.sh b/ush/syndat_qctropcy.sh index 1f1f64b548..571a7543b5 100755 --- a/ush/syndat_qctropcy.sh +++ b/ush/syndat_qctropcy.sh @@ -1,5 +1,4 @@ - -set +x +#! /usr/bin/env bash # SCRIPT NAME : syndat_qctropcy.sh # AUTHOR : Steven Lord/Hua-Lu pan/Dennis Keyser/Diane Stokes @@ -12,19 +11,19 @@ set +x # prediction centers by the executable syndat_qctropcy # # -echo "History: JUN 1997 - First implementation of this utility script" -echo " JUL 1997 - Added tcvitals made manually by SDM; Added " -echo " jtwc/fnoc tcvitals " -echo " MAR 2000 Converted to IBM-SP " -echo " MAR 2013 Converted to WCOSS " -echo " Added option files_override which can set " -echo " namelist var used for logical variable " -echo " FILES in syndat_qctropcy to control final " -echo " copying of records and file manipulation. " -echo " (typically F for testing, otherwise not set)" -echo " Added dateck fallback if archive file misg." -echo " OCT 2013 Remove defaults for parm, exec, fix and ush " -echo " directories. These must now be passed in. " +# echo "History: JUN 1997 - First implementation of this utility script" +# echo " JUL 1997 - Added tcvitals made manually by SDM; Added " +# echo " jtwc/fnoc tcvitals " +# echo " MAR 2000 Converted to IBM-SP " +# echo " MAR 2013 Converted to WCOSS " +# echo " Added option files_override which can set " +# echo " namelist var used for logical variable " +# echo " FILES in syndat_qctropcy to control final " +# echo " copying of records and file manipulation. " +# echo " (typically F for testing, otherwise not set)" +# echo " Added dateck fallback if archive file misg." +# echo " OCT 2013 Remove defaults for parm, exec, fix and ush " +# echo " directories. These must now be passed in. " # # # Positional parameters passed in: @@ -72,7 +71,7 @@ echo " directories. These must now be passed in. " # (Default: not set) # TIMEIT - optional time and resource reporting (Default: not set) -set -xua +source "$HOMEgfs/ush/preamble.sh" ARCHSYND=${ARCHSYND:-$COMROOTp3/gfs/prod/syndat} HOMENHCp1=${HOMENHCp1:-/gpfs/?p1/nhc/save/guidance/storm-data/ncep} @@ -96,11 +95,8 @@ set +x echo echo $msg echo -set -x +${TRACE_ON:-set -x} echo $msg >> $pgmout -set +u -[ -n "$jlogfile" ] && postmsg "$jlogfile" "$msg" -set -u if [ "$#" -ne '1' ]; then msg="**NON-FATAL ERROR PROGRAM SYNDAT_QCTROPCY run date not in \ @@ -109,21 +105,15 @@ positional parameter 1" echo echo $msg echo - set -x + ${TRACE_ON:-set -x} echo $msg >> $pgmout - set +u - [ -n "$jlogfile" ] && postmsg "$jlogfile" "$msg" - set -u msg="**NO TROPICAL CYCLONE tcvitals processed --> non-fatal" set +x echo echo $msg echo - set -x + ${TRACE_ON:-set -x} echo $msg >> $pgmout - set +u - [ -n "$jlogfile" ] && postmsg "$jlogfile" "$msg" - set -u # Copy null files into "${COMSP}syndata.tcvitals.$tmmark" and # "${COMSP}jtwc-fnoc.tcvitals.$tmmark" so later ftp attempts will find and @@ -147,7 +137,7 @@ set +x echo echo "Run date is $CDATE10" echo -set -x +${TRACE_ON:-set -x} year=$(echo $CDATE10 | cut -c1-4) @@ -169,11 +159,8 @@ if [ $dateck_size -lt 10 ]; then echo 1900010100 > dateck set +x echo -e "\n${msg}\n" - set -x + ${TRACE_ON:-set -x} echo $msg >> $pgmout - set +u - [ -n "$jlogfile" ] && postmsg "$jlogfile" "$msg" - set -u fi @@ -201,11 +188,8 @@ if [ -n "$files_override" ]; then # for testing, typically want FILES=F fi set +x echo -e "\n${msg}\n" - set -x + ${TRACE_ON:-set -x} echo $msg >> $pgmout - set +u - [ -n "$jlogfile" ] && postmsg "$jlogfile" "$msg" - set -u fi echo " &INPUT RUNID = '${net}_${tmmark}_${cyc}', FILES = $files " > vitchk.inp @@ -289,28 +273,22 @@ set +x echo echo "The foreground exit status for SYNDAT_QCTROPCY is " $errqct echo -set -x +${TRACE_ON:-set -x} if [ "$errqct" -gt '0' ];then msg="**NON-FATAL ERROR PROGRAM SYNDAT_QCTROPCY RETURN CODE $errqct" set +x echo echo $msg echo - set -x + ${TRACE_ON:-set -x} echo $msg >> $pgmout - set +u - [ -n "$jlogfile" ] && postmsg "$jlogfile" "$msg" - set -u msg="**NO TROPICAL CYCLONE tcvitals processed --> non-fatal" set +x echo echo $msg echo - set -x + ${TRACE_ON:-set -x} echo $msg >> $pgmout - set +u - [ -n "$jlogfile" ] && postmsg "$jlogfile" "$msg" - set -u # In the event of a ERROR in PROGRAM SYNDAT_QCTROPCY, copy null files into # "${COMSP}syndata.tcvitals.$tmmark" and "${COMSP}jtwc-fnoc.tcvitals.$tmmark" @@ -333,19 +311,7 @@ echo "----------------------------------------------------------" echo "********** COMPLETED PROGRAM syndat_qctropcy **********" echo "----------------------------------------------------------" echo -set -x - -if [ -s current ]; then - msg="program SYNDAT_QCTROPCY completed normally - tcvitals records \ -processed" -else -msg="no records available for program SYNDAT_QCTROPCY - null tcvitals file \ -produced" -fi -set +u -[ -n "$jlogfile" ] && postmsg "$jlogfile" "$msg" -set -u - +${TRACE_ON:-set -x} if [ "$copy_back" = 'YES' ]; then cat lthistry>>$ARCHSYND/syndat_lthistry.$year @@ -390,11 +356,8 @@ $HOMENHC/tcvitals successfully updated by syndat_qctropcy" echo echo $msg echo - set -x + ${TRACE_ON:-set -x} echo $msg >> $pgmout - set +u - [ -n "$jlogfile" ] && postmsg "$jlogfile" "$msg" - set -u fi else @@ -405,11 +368,8 @@ not changed by syndat_qctropcy" echo echo $msg echo - set -x + ${TRACE_ON:-set -x} echo $msg >> $pgmout - set +u - [ -n "$jlogfile" ] && postmsg "$jlogfile" "$msg" - set -u fi @@ -428,16 +388,4 @@ fi # Write JTWC/FNOC Tcvitals to /com path since not saved anywhere else [ $SENDCOM = YES ] && cp fnoc ${COMSP}jtwc-fnoc.tcvitals.$tmmark -msg="TROPICAL CYCLONE TCVITALS QC PROCESSING HAS COMPLETED FOR $CDATE10" -set +x -echo -echo $msg -echo -set -x -echo $msg >> $pgmout -echo " " >> $pgmout -set +u -[ -n "$jlogfile" ] && postmsg "$jlogfile" "$msg" -set -u - exit diff --git a/ush/trim_rh.sh b/ush/trim_rh.sh index 9140e97124..2de2e17c7b 100755 --- a/ush/trim_rh.sh +++ b/ush/trim_rh.sh @@ -1,14 +1,15 @@ -#!/bin/ksh -set -x +#! /usr/bin/env bash #This is scripts is used to trim RH vaule larger than 100. # Wen Meng 12/2017: First Version +source "$HOMEgfs/ush/preamble.sh" + f=$1 export WGRIB2=${WGRIB2:-${NWPROD:-/nwprod}/util/exec/wgrib2} -$WGRIB2 $optncpu $f -not_if ':RH:' -grib $f.new \ +$WGRIB2 ${optncpu:-} $f -not_if ':RH:' -grib $f.new \ -if ':RH:' -rpn "10:*:0.5:+:floor:1000:min:10:/" -set_grib_type same \ -set_scaling -1 0 -grib_out $f.new export err=$?; err_chk diff --git a/ush/tropcy_relocate.sh b/ush/tropcy_relocate.sh index 44205b1846..e3a82efaf7 100755 --- a/ush/tropcy_relocate.sh +++ b/ush/tropcy_relocate.sh @@ -1,4 +1,5 @@ -#!/bin/ksh +#! /usr/bin/env bash + #### UNIX Script Documentation Block # # Script name: tropcy_relocate.sh @@ -210,7 +211,7 @@ # #### -set -aux +source "$HOMEgfs/ush/preamble.sh" MACHINE=${MACHINE:-$(hostname -s | cut -c 1-3)} @@ -255,7 +256,7 @@ then echo "problem with obtaining date record;" echo "ABNORMAL EXIT!!!!!!!!!!!" echo - set -x + ${TRACE_ON:-set -x} if [ -s $DATA/err_exit ]; then $DATA/err_exit else @@ -273,7 +274,7 @@ set +x echo echo "CENTER DATE/TIME FOR RELOCATION PROCESSING IS $CDATE10" echo -set -x +${TRACE_ON:-set -x} #---------------------------------------------------------------------------- @@ -343,7 +344,7 @@ if [ $modhr -ne 0 ]; then not a multiple of 3-hrs;" echo "ABNORMAL EXIT!!!!!!!!!!!" echo - set -x + ${TRACE_ON:-set -x} if [ -s $DATA/err_exit ]; then $DATA/err_exit else @@ -366,14 +367,14 @@ echo " Get TCVITALS file valid for -$fhr hrs relative to center" echo " relocation processing date/time" echo "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" echo - set -x + ${TRACE_ON:-set -x} $USHGETGES/getges.sh -e $envir_getges -n $network_getges \ -v $CDATE10 -f $fhr -t tcvges tcvitals.m${fhr} set +x echo echo "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" echo - set -x + ${TRACE_ON:-set -x} fi done @@ -416,7 +417,7 @@ echo " Get global sigma GUESS valid for $fhr hrs relative to center" echo " relocation processing date/time" echo "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" echo - set -x + ${TRACE_ON:-set -x} $USHGETGES/getges.sh -e $envir_getges -n $network_getges \ -v $CDATE10 -t $stype $sges errges=$? @@ -428,7 +429,7 @@ echo "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" to center relocation date/time;" echo "ABNORMAL EXIT!!!!!!!!!!!" echo - set -x + ${TRACE_ON:-set -x} if [ -s $DATA/err_exit ]; then $DATA/err_exit else @@ -460,7 +461,7 @@ to center relocation date/time;" echo echo "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" echo - set -x + ${TRACE_ON:-set -x} fi if [ ! -s $pges ]; then set +x @@ -470,7 +471,7 @@ echo " Get global pressure grib GUESS valid for $fhr hrs relative to center" echo " relocation processing date/time" echo "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" echo - set -x + ${TRACE_ON:-set -x} $USHGETGES/getges.sh -e $envir_getges -n $network_getges \ -v $CDATE10 -t $ptype $pges errges=$? @@ -482,7 +483,7 @@ echo "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" relative to center relocation date/time;" echo "ABNORMAL EXIT!!!!!!!!!!!" echo - set -x + ${TRACE_ON:-set -x} if [ -s $DATA/err_exit ]; then $DATA/err_exit else @@ -495,7 +496,7 @@ relative to center relocation date/time;" echo echo "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" echo - set -x + ${TRACE_ON:-set -x} fi done @@ -567,7 +568,7 @@ else echo "$USHRELO/tropcy_relocate_extrkr.sh failed" echo "ABNORMAL EXIT!!!!!!!!!!!" echo - set -x + ${TRACE_ON:-set -x} if [ -s $DATA/err_exit ]; then $DATA/err_exit "Script $USHRELO/tropcy_relocate_extrkr.sh failed" else @@ -650,7 +651,7 @@ else # check for success # ----------------- - echo; set -x + echo; ${TRACE_ON:-set -x} if [ "$errSTATUS" -gt '0' ]; then if [ -s $DATA/err_exit ]; then $DATA/err_exit "Script RELOCATE_GES failed" @@ -737,5 +738,6 @@ $CDATE10" fi + exit 0 diff --git a/ush/tropcy_relocate_extrkr.sh b/ush/tropcy_relocate_extrkr.sh index a245dca98e..79295cead0 100755 --- a/ush/tropcy_relocate_extrkr.sh +++ b/ush/tropcy_relocate_extrkr.sh @@ -1,8 +1,9 @@ -#!/bin/ksh +#! /usr/bin/env bash + # This script is executed by the script tropcy_relocate.sh # -------------------------------------------------------- -set -aeux +source "$HOMEgfs/ush/preamble.sh" export machine=${machine:-ZEUS} export machine=$(echo $machine|tr '[a-z]' '[A-Z]') @@ -238,7 +239,7 @@ cmodel=$(echo ${cmodel} | tr "[A-Z]" "[a-z]") case ${cmodel} in - gdas) set +x; echo " "; echo " ++ operational GDAS chosen"; set -x; + gdas) set +x; echo " "; echo " ++ operational GDAS chosen"; ${TRACE_ON:-set -x}; fcstlen=9 ; fcsthrs="" for fhr in $( seq 0 $BKGFREQ 9); do @@ -271,48 +272,48 @@ case ${cmodel} in # jpdtn=0 for deterministic data. g2_jpdtn=0 model=8;; - gfs) set +x; echo " "; echo " ++ operational GFS chosen"; set -x; + gfs) set +x; echo " "; echo " ++ operational GFS chosen"; ${TRACE_ON:-set -x}; fcsthrsgfs=' 00 06 12 18 24 30 36 42 48 54 60 66 72 78'; gfsdir=$COMIN; gfsgfile=gfs.t${dishh}z.pgrbf; model=1;; - mrf) set +x; echo " "; echo " ++ operational MRF chosen"; set -x; + mrf) set +x; echo " "; echo " ++ operational MRF chosen"; ${TRACE_ON:-set -x}; fcsthrsmrf=' 00 12 24 36 48 60 72'; mrfdir=$COMIN; mrfgfile=drfmr.t${dishh}z.pgrbf; model=2;; - ukmet) set +x; echo " "; echo " ++ operational UKMET chosen"; set -x; + ukmet) set +x; echo " "; echo " ++ operational UKMET chosen"; ${TRACE_ON:-set -x}; fcsthrsukmet=' 00 12 24 36 48 60 72'; ukmetdir=$COMIN; ukmetgfile=ukmet.t${dishh}z.ukmet; model=3;; - ecmwf) set +x; echo " "; echo " ++ operational ECMWF chosen"; set -x; + ecmwf) set +x; echo " "; echo " ++ operational ECMWF chosen"; ${TRACE_ON:-set -x}; fcsthrsecmwf=' 00 24 48 72'; ecmwfdir=$COMIN; ecmwfgfile=ecmgrb25.t12z; model=4;; - ngm) set +x; echo " "; echo " ++ operational NGM chosen"; set -x; + ngm) set +x; echo " "; echo " ++ operational NGM chosen"; ${TRACE_ON:-set -x}; fcsthrsngm=' 00 06 12 18 24 30 36 42 48'; ngmdir=$COMIN; ngmgfile=ngm.t${dishh}z.pgrb.f; model=5;; - nam) set +x; echo " "; echo " ++ operational Early NAM chosen"; set -x; + nam) set +x; echo " "; echo " ++ operational Early NAM chosen"; ${TRACE_ON:-set -x}; fcsthrsnam=' 00 06 12 18 24 30 36 42 48'; namdir=$COMIN; namgfile=nam.t${dishh}z.awip32; model=6;; - ngps) set +x; echo " "; echo " ++ operational NAVGEM chosen"; set -x; + ngps) set +x; echo " "; echo " ++ operational NAVGEM chosen"; ${TRACE_ON:-set -x}; fcsthrsngps=' 00 12 24 36 48 60 72'; #ngpsdir=/com/hourly/prod/hourly.${CENT}${symd}; ngpsdir=$OMIN; ngpsgfile=fnoc.t${dishh}z; model=7;; other) set +x; echo " "; echo " Model selected by user is ${cmodel}, which is a "; - echo "user-defined model, NOT operational...."; echo " "; set -x; + echo "user-defined model, NOT operational...."; echo " "; ${TRACE_ON:-set -x}; model=9;; *) set +x; echo " "; echo " !!! Model selected is not recognized."; echo " Model= ---> ${cmodel} <--- ..... Please submit the script again...."; - echo " "; set -x; exit 8;; + echo " "; ${TRACE_ON:-set -x}; exit 8;; esac @@ -376,7 +377,7 @@ if [ ${cmodel} = 'other' ]; then echo " replace the forecast hour characters 00 with XX. Please check the" echo " name in the kickoff script and qsub it again. Exiting....." echo " " - set -x + ${TRACE_ON:-set -x} exit 8 fi @@ -399,7 +400,7 @@ if [ ${cmodel} = 'other' ]; then echo " " echo " !!! Exiting loop, only processing 14 forecast files ...." echo " " - set -x + ${TRACE_ON:-set -x} break fi @@ -414,7 +415,7 @@ if [ ${cmodel} = 'other' ]; then echo " " echo " +++ Found file ${fnamebeg}${fhour}${fnameend}" echo " " - set -x + ${TRACE_ON:-set -x} let fhrct=fhrct+1 else fflag='n' @@ -434,7 +435,7 @@ if [ ${cmodel} = 'other' ]; then echo " !!! Please check the directory to make sure the file" echo " !!! is there and then submit this job again." echo " " - set -x + ${TRACE_ON:-set -x} exit 8 fi @@ -443,7 +444,7 @@ if [ ${cmodel} = 'other' ]; then echo " Max forecast hour is $maxhour" echo " List of forecast hours: $fcsthrsother" echo " " - set -x + ${TRACE_ON:-set -x} # -------------------------------------------------- # In order for the fortran program to know how many @@ -525,7 +526,7 @@ if [ ${numvitrecs} -eq 0 ]; then echo "!!! It could just be that there are no storms for the current" echo "!!! time. Please check the dates and submit this job again...." echo " " - set -x + ${TRACE_ON:-set -x} exit 8 fi @@ -573,19 +574,17 @@ pgm=$(basename $SUPVX) if [ -s $DATA/prep_step ]; then set +e . $DATA/prep_step - set -e + ${ERR_EXIT_ON:-set -eu} else [ -f errfile ] && rm errfile export XLFUNITS=0 unset $(env | grep XLFUNIT | awk -F= '{print $1}') - set +u - if [ -z "$XLFRTEOPTS" ]; then + if [ -z "${XLFRTEOPTS:-}" ]; then export XLFRTEOPTS="unit_vars=yes" else export XLFRTEOPTS="${XLFRTEOPTS}:unit_vars=yes" fi - set -u fi @@ -614,14 +613,14 @@ set +x echo echo 'The foreground exit status for SUPVIT is ' $err echo -set -x +${TRACE_ON:-set -x} if [ $err -eq 0 ]; then set +x echo " " echo " Normal end for program supvitql (which updates TC vitals file)." echo " " - set -x + ${TRACE_ON:-set -x} else set +x echo " " @@ -631,7 +630,7 @@ else echo "!!! model= ${cmodel}, forecast initial time = ${symd}${dishh}" echo "!!! Exiting...." echo " " - set -x + ${TRACE_ON:-set -x} fi if [ -s $DATA/err_chk ]; then $DATA/err_chk @@ -661,7 +660,7 @@ if [ ${numvitrecs} -eq 0 ]; then echo "!!! File ${vdir}/vitals.upd.${cmodel}.${symd}${dishh} is empty." echo "!!! Please check the dates and submit this job again...." echo " " - set -x + ${TRACE_ON:-set -x} exit 8 fi @@ -677,7 +676,7 @@ echo " Below is a list of the storms to be processed: " | tee -a storm_list echo " " | tee -a storm_list cat ${vdir}/vitals.upd.${cmodel}.${symd}${dishh} | tee -a storm_list echo " " | tee -a storm_list -set -x +${TRACE_ON:-set -x} set +u [ -n "../$pgmout" ] && cat storm_list >> ../$pgmout @@ -730,7 +729,7 @@ echo " NOW CUTTING APART INPUT GRIB FILES TO " echo " CREATE 1 BIG GRIB INPUT FILE " echo " -----------------------------------------" echo " " -set -x +${TRACE_ON:-set -x} #grid='255 0 151 71 70000 190000 128 0000 340000 1000 1000 64' #grid='255 0 360 181 90000 0000 128 -90000 -1000 1000 1000 64' @@ -757,7 +756,7 @@ if [ ${model} -eq 5 ]; then echo " !!! in the analysis data." echo " *******************************************************************" echo " " - set -x + ${TRACE_ON:-set -x} fi if [ -s ${vdir}/ngmlatlon.pgrb.${symd}${dishh} ]; then @@ -773,7 +772,7 @@ if [ ${model} -eq 5 ]; then echo " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" echo " !!! NGM File missing: ${ngmdir}/${ngmgfile}${fhour}" echo " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" - set -x + ${TRACE_ON:-set -x} continue fi if [ -s $TMPDIR/tmpixfile ]; then rm $TMPDIR/tmpixfile; fi @@ -784,7 +783,7 @@ if [ ${model} -eq 5 ]; then echo " " echo " Extracting NGM GRIB data for forecast hour = $fhour" echo " " - set -x + ${TRACE_ON:-set -x} g1=${ngmdir}/${ngmgfile}${fhour} @@ -808,7 +807,7 @@ if [ ${model} -eq 5 ]; then echo "!!! sure you've allocated enough memory for this job (error 134 using $COPYGB is " echo "!!! typically due to using more memory than you've allocated). Exiting....." echo " " - set -x + ${TRACE_ON:-set -x} exit 8 fi @@ -847,7 +846,7 @@ if [ ${model} -eq 6 ]; then echo " !!! in the analysis data." echo " *******************************************************************" echo " " - set -x + ${TRACE_ON:-set -x} fi if [ -s ${vdir}/namlatlon.pgrb.${symd}${dishh} ]; then @@ -863,7 +862,7 @@ if [ ${model} -eq 6 ]; then echo " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" echo " !!! Early NAM File missing: ${namdir}/${namgfile}${fhour}.tm00" echo " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" - set -x + ${TRACE_ON:-set -x} continue fi if [ -s $TMPDIR/tmpixfile ]; then rm $TMPDIR/tmpixfile; fi @@ -874,7 +873,7 @@ if [ ${model} -eq 6 ]; then echo " " echo " Extracting Early NAM GRIB data for forecast hour = $fhour" echo " " - set -x + ${TRACE_ON:-set -x} g1=${namdir}/${namgfile}${fhour}.tm00 @@ -899,7 +898,7 @@ if [ ${model} -eq 6 ]; then echo "!!! sure you've allocated enough memory for this job (error 134 using $COPYGB is " echo "!!! typically due to using more memory than you've allocated). Exiting....." echo " " - set -x + ${TRACE_ON:-set -x} exit 8 fi @@ -947,7 +946,7 @@ if [ ${model} -eq 4 ]; then echo " " echo " !!! Due to missing ECMWF file, execution is ending...." echo " " - set -x + ${TRACE_ON:-set -x} exit 8 fi @@ -990,7 +989,7 @@ if [ ${model} -eq 1 ]; then echo " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" echo " !!! GFS File missing: ${gfsdir}/${gfsgfile}${fhour}" echo " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" - set -x + ${TRACE_ON:-set -x} continue fi @@ -1061,7 +1060,7 @@ if [ ${model} -eq 8 ]; then echo " !!! gdas File missing: $gfile" echo " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" echo " " - set -x + ${TRACE_ON:-set -x} continue fi @@ -1110,7 +1109,7 @@ if [ ${model} -eq 8 ]; then echo " !!! gdas File missing: $gfile" echo " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" echo " " - set -x + ${TRACE_ON:-set -x} continue fi @@ -1165,7 +1164,7 @@ if [ ${model} -eq 2 ]; then echo " !!! MRF File missing: ${mrfdir}/${mrfgfile}${fhour}" echo " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" echo " " - set -x + ${TRACE_ON:-set -x} continue fi @@ -1220,7 +1219,7 @@ if [ ${model} -eq 3 ]; then echo " !!! UKMET File missing: ${ukmetdir}/${ukmetgfile}${fhour}" echo " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" echo " " - set -x + ${TRACE_ON:-set -x} continue fi @@ -1261,7 +1260,7 @@ if [ ${model} -eq 7 ]; then echo " " echo " !!! Due to missing NAVGEM file, execution is ending...." echo " " - set -x + ${TRACE_ON:-set -x} exit 8 fi @@ -1336,7 +1335,7 @@ if [ ${model} -eq 9 ]; then echo "!!! Forecast File missing: ${otherdir}/${fnamebeg}00${fnameend}" echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" echo " " - set -x + ${TRACE_ON:-set -x} continue fi @@ -1410,7 +1409,7 @@ if [ ${model} -eq 9 ]; then echo "!!! sure you've allocated enough memory for this job (error 134 using $COPYGB is " echo "!!! typically due to using more memory than you've allocated). Exiting....." echo " " - set -x + ${TRACE_ON:-set -x} exit 8 fi @@ -1441,9 +1440,9 @@ while [ $ist -le 15 ] do if [ ${stormflag[${ist}]} -ne 1 ] then - set +x; echo "Storm number $ist NOT selected for processing"; set -x + set +x; echo "Storm number $ist NOT selected for processing"; ${TRACE_ON:-set -x} else - set +x; echo "Storm number $ist IS selected for processing...."; set -x + set +x; echo "Storm number $ist IS selected for processing...."; ${TRACE_ON:-set -x} fi let ist=ist+1 done @@ -1562,7 +1561,7 @@ set +x echo echo 'The foreground exit status for GETTRK is ' $err echo -set -x +${TRACE_ON:-set -x} if [ -s $DATA/err_chk ]; then $DATA/err_chk @@ -1581,5 +1580,6 @@ fi cp ${vdir}/trak.${cmodel}.all.${symdh} ${DATA}/model_track.all + exit 0 diff --git a/ush/wave_grib2_sbs.sh b/ush/wave_grib2_sbs.sh index 2e2584a891..a4463156f6 100755 --- a/ush/wave_grib2_sbs.sh +++ b/ush/wave_grib2_sbs.sh @@ -1,5 +1,5 @@ -#!/bin/bash -# +#! /usr/bin/env bash + ################################################################################ # # UNIX Script Documentation Block @@ -24,15 +24,10 @@ ################################################################################ # --------------------------------------------------------------------------- # # 0. Preparations -# 0.a Basic modes of operation - # set execution trace prompt. ${0##*/} adds the script's basename - PS4=" \${SECONDS} ${0##*/} L\${LINENO} + " - set -x +source "$HOMEgfs/ush/preamble.sh" - # Use LOUD variable to turn on/off trace. Defaults to YES (on). - export LOUD=${LOUD:-YES}; [[ $LOUD = yes ]] && export LOUD=YES - [[ "$LOUD" != YES ]] && set +x +# 0.a Basic modes of operation cd $GRIBDATA @@ -51,7 +46,7 @@ echo '*** FATAL ERROR : ERROR IN ww3_grib2 (COULD NOT CREATE TEMP DIRECTORY) *** ' echo '******************************************************************************* ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} exit 1 fi @@ -86,7 +81,7 @@ echo '! Make GRIB files |' echo '+--------------------------------+' echo " Model ID : $WAV_MOD_TAG" - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} if [ -z "$CDATE" ] || [ -z "$cycle" ] || [ -z "$EXECwave" ] || \ [ -z "$COMOUT" ] || [ -z "$WAV_MOD_TAG" ] || [ -z "$SENDCOM" ] || \ @@ -99,7 +94,7 @@ echo '*** EXPORTED VARIABLES IN postprocessor NOT SET ***' echo '***************************************************' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} exit 1 fi @@ -113,7 +108,7 @@ echo " Number of times : Single SBS echo " GRIB field flags : $gribflags" echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} # 0.e Links to working directory @@ -127,7 +122,7 @@ set +x echo " Generate input file for ww3_grib2" - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} sed -e "s/TIME/$tstart/g" \ -e "s/DT/$dtgrib/g" \ @@ -145,7 +140,7 @@ set +x echo " Run ww3_grib2" echo " Executing $EXECwave/ww3_grib" - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} export pgm=ww3_grib;. prep_step $EXECwave/ww3_grib > grib2_${grdnam}_${FH3}.out 2>&1 @@ -158,11 +153,11 @@ echo '*** FATAL ERROR : ERROR IN ww3_grib encoding *** ' echo '************************************************ ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} exit 3 fi - if [ $fht -gt 0 ]; then + if [ $fhr -gt 0 ]; then $WGRIB2 gribfile -set_date $CDATE -set_ftime "$fhr hour fcst" -grib ${COMOUT}/gridded/${outfile} err=$? else @@ -178,7 +173,7 @@ echo '*** FATAL ERROR : ERROR IN ww3_grib2 *** ' echo '********************************************* ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} exit 3 fi @@ -209,7 +204,7 @@ echo ' ' echo " Error in moving grib file ${outfile} to com" echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} exit 4 fi if [ ! -s $COMOUT/gridded/${outfile} ] @@ -222,7 +217,7 @@ echo ' ' echo " Error in moving grib file ${outfile}.idx to com" echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} exit 4 fi @@ -231,7 +226,7 @@ set +x echo " Alerting GRIB file as $COMOUT/gridded/${outfile}" echo " Alerting GRIB index file as $COMOUT/gridded/${outfile}.idx" - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} $DBNROOT/bin/dbn_alert MODEL ${alertName}_WAVE_GB2 $job $COMOUT/gridded/${outfile} $DBNROOT/bin/dbn_alert MODEL ${alertName}_WAVE_GB2_WIDX $job $COMOUT/gridded/${outfile}.idx else @@ -246,7 +241,7 @@ set +x echo " Removing work directory after success." - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} cd ../ mv -f ${gribDIR} done.${gribDIR} @@ -256,13 +251,8 @@ echo ' ' echo " File ${COMOUT}/gridded/${outfile} found, skipping generation process" echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} fi - set +x - echo ' ' - echo "End of ww3_grib2.sh at" - date - [[ "$LOUD" = YES ]] && set -x # End of ww3_grib2.sh -------------------------------------------------- # diff --git a/ush/wave_grid_interp_sbs.sh b/ush/wave_grid_interp_sbs.sh index bb68333b17..59a604d0f5 100755 --- a/ush/wave_grid_interp_sbs.sh +++ b/ush/wave_grid_interp_sbs.sh @@ -1,5 +1,5 @@ -#!/bin/bash -# +#! /usr/bin/env bash + ################################################################################ # # UNIX Script Documentation Block @@ -24,15 +24,10 @@ ################################################################################ # --------------------------------------------------------------------------- # # 0. Preparations -# 0.a Basic modes of operation - # set execution trace prompt. ${0##*/} adds the script's basename - PS4=" \${SECONDS} ${0##*/} L\${LINENO} + " - set -x +source "$HOMEgfs/ush/preamble.sh" - # Use LOUD variable to turn on/off trace. Defaults to YES (on). - export LOUD=${LOUD:-YES}; [[ $LOUD = yes ]] && export LOUD=YES - [[ "$LOUD" != YES ]] && set +x +# 0.a Basic modes of operation cd $GRDIDATA @@ -53,7 +48,7 @@ echo '*** FATAL ERROR : ERROR IN ww3_grid_interp (COULD NOT CREATE TEMP DIRECTORY) *** ' echo '************************************************************************************* ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} exit 1 fi @@ -68,7 +63,7 @@ echo '! Make GRID files |' echo '+--------------------------------+' echo " Model ID : $WAV_MOD_TAG" - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} if [ -z "$CDATE" ] || [ -z "$cycle" ] || [ -z "$EXECwave" ] || \ [ -z "$COMOUT" ] || [ -z "$WAV_MOD_TAG" ] || [ -z "$SENDCOM" ] || \ @@ -81,7 +76,7 @@ echo '***************************************************' echo ' ' echo "$CDATE $cycle $EXECwave $COMOUT $WAV_MOD_TAG $SENDCOM $SENDDBN $waveGRD" - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} exit 1 fi @@ -123,7 +118,7 @@ set +x echo ' ' echo " Copying $FIXwave/WHTGRIDINT.bin.${grdID} " - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} cp $FIXwave/WHTGRIDINT.bin.${grdID} ${DATA} wht_OK='yes' else @@ -143,7 +138,7 @@ set +x echo " Run ww3_gint echo " Executing $EXECwave/ww3_gint - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} export pgm=ww3_gint;. prep_step $EXECwave/ww3_gint 1> gint.${grdID}.out 2>&1 @@ -165,7 +160,7 @@ echo '*** FATAL ERROR : ERROR IN ww3_gint interpolation * ' echo '*************************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} exit 3 fi @@ -181,14 +176,14 @@ then set +x echo " Saving GRID file as $COMOUT/rundata/$WAV_MOD_TAG.out_grd.$grdID.${CDATE}" - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} cp ${DATA}/output_${ymdh}0000/out_grd.$grdID $COMOUT/rundata/$WAV_MOD_TAG.out_grd.$grdID.${CDATE} # if [ "$SENDDBN" = 'YES' ] # then # set +x # echo " Alerting GRID file as $COMOUT/rundata/$WAV_MOD_TAG.out_grd.$grdID.${CDATE} -# [[ "$LOUD" = YES ]] && set -x +# ${TRACE_ON:-set -x} # # PUT DBNET ALERT HERE .... @@ -200,16 +195,7 @@ # --------------------------------------------------------------------------- # # 2. Clean up the directory - set +x - echo " Removing work directory after success." - [[ "$LOUD" = YES ]] && set -x - cd ../ mv -f grint_${grdID}_${ymdh} done.grint_${grdID}_${ymdh} - set +x - echo ' ' - echo "End of ww3_interp.sh at" - date - # End of ww3_grid_interp.sh -------------------------------------------- # diff --git a/ush/wave_grid_moddef.sh b/ush/wave_grid_moddef.sh index a9a12b6efe..80c041df37 100755 --- a/ush/wave_grid_moddef.sh +++ b/ush/wave_grid_moddef.sh @@ -1,5 +1,5 @@ -#!/bin/bash -# +#! /usr/bin/env bash + ################################################################################ # # UNIX Script Documentation Block @@ -19,15 +19,10 @@ # # --------------------------------------------------------------------------- # # 0. Preparations -# 0.a Basic modes of operation - # set execution trace prompt. ${0##*/} adds the script's basename - PS4=" \${SECONDS} ${0##*/} L\${LINENO} + " - set -x +source "$HOMEgfs/ush/preamble.sh" - # Use LOUD variable to turn on/off trace. Defaults to YES (on). - export LOUD=${LOUD:-YES}; [[ $LOUD = yes ]] && export LOUD=YES - [[ "$LOUD" != YES ]] && set +x +# 0.a Basic modes of operation echo "Generating mod_def file" @@ -43,7 +38,7 @@ echo '+--------------------------------+' echo " Grid : $1" echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} # 0.b Check if grid set @@ -55,7 +50,7 @@ echo '*** Grid not identifife in ww3_mod_def.sh ***' echo '**************************************************' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} exit 1 else grdID=$1 @@ -72,7 +67,7 @@ echo '*** EXPORTED VARIABLES IN ww3_mod_def.sh NOT SET ***' echo '*********************************************************' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} exit 2 fi @@ -84,7 +79,7 @@ echo ' Creating mod_def file ...' echo " Executing $EXECwave/ww3_grid" echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} rm -f ww3_grid.inp ln -sf ../ww3_grid.inp.$grdID ww3_grid.inp @@ -100,7 +95,7 @@ echo '*** FATAL ERROR : ERROR IN ww3_grid *** ' echo '******************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} exit 3 fi @@ -115,19 +110,14 @@ echo '*** FATAL ERROR : MOD DEF FILE NOT FOUND *** ' echo '******************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} exit 4 fi # --------------------------------------------------------------------------- # # 3. Clean up - cd .. - rm -rf moddef_$grdID - - set +x - echo ' ' - echo 'End of ww3_mod_def.sh at' - date +cd .. +rm -rf moddef_$grdID # End of ww3_mod_def.sh ------------------------------------------------- # diff --git a/ush/wave_outp_cat.sh b/ush/wave_outp_cat.sh index 536e4203a5..7adf77dbf0 100755 --- a/ush/wave_outp_cat.sh +++ b/ush/wave_outp_cat.sh @@ -1,5 +1,5 @@ -#!/bin/bash -# +#! /usr/bin/env bash + ################################################################################ # # UNIX Script Documentation Block @@ -20,16 +20,10 @@ ################################################################################ # --------------------------------------------------------------------------- # # 0. Preparations -# 0.a Basic modes of operation - - # set execution trace prompt. ${0##*/} adds the script's basename - PS4=" \${SECONDS} ${0##*/} L\${LINENO} + " - set -x - # Use LOUD variable to turn on/off trace. Defaults to YES (on). - export LOUD=${LOUD:-YES}; [[ $LOUD = yes ]] && export LOUD=YES - [[ "$LOUD" != YES ]] && set +x +source "$HOMEgfs/ush/preamble.sh" +# 0.a Basic modes of operation bloc=$1 MAXHOUR=$2 specdir=$3 @@ -44,7 +38,7 @@ echo '*** LOCATION ID IN ww3_outp_spec.sh NOT SET ***' echo '***********************************************' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} exit 1 else buoy=$bloc @@ -62,7 +56,7 @@ echo '*** EXPORTED VARIABLES IN ww3_outp_cat.sh NOT SET ***' echo '******************************************************' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} exit 3 fi @@ -72,16 +66,20 @@ set +x echo " Generate input file for ww3_outp." - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} if [ "$specdir" = "bull" ] then outfile=${STA_DIR}/${specdir}/$WAV_MOD_TAG.$buoy.bull coutfile=${STA_DIR}/c${specdir}/$WAV_MOD_TAG.$buoy.cbull - rm outfile coutfile + for f in outfile coutfile; do + if [[ -f ${f} ]]; then rm ${f}; fi + done else outfile=${STA_DIR}/${specdir}/$WAV_MOD_TAG.$buoy.spec - rm outfile + if [[ -f ${outfile} ]]; then + rm ${outfile} + fi fi fhr=$FHMIN_WAV @@ -115,7 +113,7 @@ echo "*** FATAL ERROR : OUTPUT DATA FILE FOR BOUY $bouy at ${ymdh} NOT FOUND *** " echo '************************************************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} err=2; export err;${errchk} exit $err fi @@ -139,14 +137,9 @@ echo " FATAL ERROR : OUTPUTFILE ${outfile} not created " echo '*************************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} err=2; export err;${errchk} exit $err fi - set +x - echo ' ' - echo 'End of ww3_outp_cat.sh at' - date - # End of ww3_outp_cat.sh ---------------------------------------------------- # diff --git a/ush/wave_outp_spec.sh b/ush/wave_outp_spec.sh index e48d637307..a652d36745 100755 --- a/ush/wave_outp_spec.sh +++ b/ush/wave_outp_spec.sh @@ -1,5 +1,5 @@ -#!/bin/bash -# +#! /usr/bin/env bash + ################################################################################ # # UNIX Script Documentation Block @@ -21,16 +21,10 @@ ################################################################################ # --------------------------------------------------------------------------- # # 0. Preparations -# 0.a Basic modes of operation - # set execution trace prompt. ${0##*/} adds the script's basename - PS4=" \${SECONDS} ${0##*/} L\${LINENO} + " - set -x - - # Use LOUD variable to turn on/off trace. Defaults to YES (on). - export LOUD=${LOUD:-YES}; [[ $LOUD = yes ]] && export LOUD=YES - [[ "$LOUD" != YES ]] && set +x +source "$HOMEgfs/ush/preamble.sh" +# 0.a Basic modes of operation bloc=$1 ymdh=$2 specdir=$3 @@ -51,7 +45,7 @@ echo '*** FATAL ERROR : ERROR IN ww3_outp_spec (COULD NOT CREATE TEMP DIRECTORY) *** ' echo '****************************************************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} exit 1 fi @@ -63,7 +57,7 @@ echo '! Make spectral file |' echo '+--------------------------------+' echo " Model ID : $WAV_MOD_TAG" - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} # 0.b Check if buoy location set @@ -75,7 +69,7 @@ echo '*** LOCATION ID IN ww3_outp_spec.sh NOT SET ***' echo '***********************************************' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} exit 1 else buoy=$bloc @@ -90,7 +84,7 @@ echo " Location ID/# : $buoy (${point})" echo " Spectral output start time : $ymdh " echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} break fi done < tmp_list.loc @@ -101,7 +95,7 @@ echo '*** LOCATION ID IN ww3_outp_spec.sh NOT RECOGNIZED ***' echo '******************************************************' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} exit 2 fi fi @@ -119,7 +113,7 @@ echo '*** EXPORTED VARIABLES IN ww3_outp_spec.sh NOT SET ***' echo '******************************************************' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} exit 3 fi @@ -131,7 +125,7 @@ set +x echo " Output starts at $tstart." echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} # 0.e sync important files @@ -150,7 +144,7 @@ set +x echo " Generate input file for ww3_outp." - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} if [ "$specdir" = "bull" ] then @@ -177,7 +171,7 @@ set +x echo " Executing $EXECwave/ww3_outp" - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} export pgm=ww3_outp;. prep_step $EXECwave/ww3_outp 1> outp_${specdir}_${buoy}.out 2>&1 @@ -192,7 +186,7 @@ echo '*** FATAL ERROR : ERROR IN ww3_outp *** ' echo '******************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} exit 4 fi @@ -236,18 +230,13 @@ echo '*** FATAL ERROR : OUTPUT DATA FILE FOR BOUY $bouy NOT FOUND *** ' echo '***************************************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} exit 5 fi # 3.b Clean up the rest - cd .. - rm -rf ${specdir}_${bloc} - - set +x - echo ' ' - echo 'End of ww3_outp_spec.sh at' - date +cd .. +rm -rf ${specdir}_${bloc} # End of ww3_outp_spec.sh ---------------------------------------------------- # diff --git a/ush/wave_prnc_cur.sh b/ush/wave_prnc_cur.sh index bb98fee07b..7b193313d3 100755 --- a/ush/wave_prnc_cur.sh +++ b/ush/wave_prnc_cur.sh @@ -1,5 +1,5 @@ -#!/bin/sh -# +#! /usr/bin/env bash + ################################################################################ # # UNIX Script Documentation Block @@ -21,7 +21,8 @@ # ################################################################################ # -set -x + +source "$HOMEgfs/ush/preamble.sh" ymdh_rtofs=$1 curfile=$2 @@ -94,4 +95,3 @@ fi mv -f current.ww3 ${DATA}/${WAVECUR_DID}.${ymdh_rtofs} cd ${DATA} - diff --git a/ush/wave_prnc_ice.sh b/ush/wave_prnc_ice.sh index f73646a07a..16473dbd1f 100755 --- a/ush/wave_prnc_ice.sh +++ b/ush/wave_prnc_ice.sh @@ -1,4 +1,5 @@ -#!/bin/sh +#! /usr/bin/env bash + ############################################################################### # # # This script preprocesses ice fields for the ocean wave models. # @@ -25,13 +26,13 @@ # # --------------------------------------------------------------------------- # # 0. Preparations + +source "$HOMEgfs/ush/preamble.sh" + # 0.a Basic modes of operation cd $DATA - seton='-xa' - setoff='+xa' - set $seton - + rm -rf ice mkdir ice cd ice @@ -40,7 +41,7 @@ # 0.b Define directories and the search path. # The tested variables should be exported by the postprocessor script. - set $setoff + set +x echo ' ' echo '+--------------------------------+' echo '! Make ice fields |' @@ -50,7 +51,7 @@ echo " Ice grid ID : $WAVEICE_FID" echo " Ice file : $WAVICEFILE" echo ' ' - set $seton + ${TRACE_ON:-set -x} echo "Making ice fields." if [ -z "$YMDH" ] || [ -z "$cycle" ] || \ @@ -58,14 +59,14 @@ [ -z "$WAV_MOD_TAG" ] || [ -z "$WAVEICE_FID" ] || [ -z "$SENDCOM" ] || \ [ -z "$COMIN_WAV_ICE" ] then - set $setoff + set +x echo ' ' echo '**************************************************' echo '*** EXPORTED VARIABLES IN preprocessor NOT SET ***' echo '**************************************************' echo ' ' exit 1 - set $seton + ${TRACE_ON:-set -x} echo "NON-FATAL ERROR - EXPORTED VARIABLES IN preprocessor NOT SET" fi @@ -86,17 +87,17 @@ if [ -f ice.grib ] then - set $setoff + set +x echo " ice.grib copied ($file)." - set $seton + ${TRACE_ON:-set -x} else - set $setoff + set +x echo ' ' echo '************************************** ' echo "*** FATAL ERROR: NO ICE FILE $file *** " echo '************************************** ' echo ' ' - set $seton + ${TRACE_ON:-set -x} echo "FATAL ERROR - NO ICE FILE (GFS GRIB)" exit 2 fi @@ -105,9 +106,9 @@ # 2. Process the GRIB packed ice file # 2.a Unpack data - set $setoff + set +x echo ' Extracting data from ice.grib ...' - set $seton + ${TRACE_ON:-set -x} $WGRIB2 ice.grib -netcdf icean_5m.nc 2>&1 > wgrib.out @@ -117,13 +118,13 @@ if [ "$err" != '0' ] then cat wgrib.out - set $setoff + set +x echo ' ' echo '**************************************** ' echo '*** ERROR IN UNPACKING GRIB ICE FILE *** ' echo '**************************************** ' echo ' ' - set $seton + ${TRACE_ON:-set -x} echo "ERROR IN UNPACKING GRIB ICE FILE." exit 3 fi @@ -135,10 +136,10 @@ # 2.d Run through preprocessor wave_prep - set $setoff + set +x echo ' Run through preprocessor ...' echo ' ' - set $seton + ${TRACE_ON:-set -x} cp -f ${DATA}/ww3_prnc.ice.$WAVEICE_FID.inp.tmpl ww3_prnc.inp @@ -150,13 +151,13 @@ if [ "$err" != '0' ] then cat prnc_${WAVEICE_FID}_${cycle}.out - set $setoff + set +x echo ' ' echo '******************************************** ' echo '*** WARNING: NON-FATAL ERROR IN ww3_prnc *** ' echo '******************************************** ' echo ' ' - set $seton + ${TRACE_ON:-set -x} echo "WARNING: NON-FATAL ERROR IN ww3_prnc." exit 4 fi @@ -177,25 +178,17 @@ icefile=${CDUMP}wave.${WAVEICE_FID}.$cycle.ice fi - set $setoff + set +x echo " Saving ice.ww3 as $COMOUT/rundata/${icefile}" - set $seton + ${TRACE_ON:-set -x} cp ice.ww3 $COMOUT/rundata/${icefile} rm -f ice.ww3 # --------------------------------------------------------------------------- # # 4. Clean up the directory - set $setoff - echo " Removing work directory after success." - set $seton +cd .. - cd .. - rm -rf ice - - set $setoff - echo ' ' - echo 'End of waveice.sh at' - date +rm -rf ice # End of waveice.sh --------------------------------------------------------- # diff --git a/ush/wave_tar.sh b/ush/wave_tar.sh index f071d2d490..452601dceb 100755 --- a/ush/wave_tar.sh +++ b/ush/wave_tar.sh @@ -1,4 +1,5 @@ -#!/bin/bash +#! /usr/bin/env bash + ############################################################################### # # # This script tars the sectral or bulletin files into a single file and # @@ -23,15 +24,10 @@ # # --------------------------------------------------------------------------- # # 0. Preparations -# 0.a Basic modes of operation - # set execution trace prompt. ${0##*/} adds the script's basename - PS4=" \${SECONDS} ${0##*/} L\${LINENO} + " - set -x +source "$HOMEgfs/ush/preamble.sh" - # Use LOUD variable to turn on/off trace. Defaults to YES (on). - export LOUD=${LOUD:-YES}; [[ $LOUD = yes ]] && export LOUD=YES - [[ "$LOUD" != YES ]] && set -x +# 0.a Basic modes of operation cd $DATA echo "Making TAR FILE" @@ -46,7 +42,7 @@ echo " ID : $1" echo " Type : $2" echo " Number of files : $3" - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} # 0.b Check if type set @@ -59,7 +55,7 @@ echo '*** VARIABLES IN ww3_tar.sh NOT SET ***' echo '********************************************' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} exit 1 else ID=$1 @@ -89,7 +85,7 @@ echo '*** EXPORTED VARIABLES IN ww3_tar.sh NOT SET ***' echo '*****************************************************' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} exit 2 fi @@ -101,7 +97,7 @@ set +x echo ' ' echo ' Making tar file ...' - set -x + ${TRACE_ON:-set -x} count=0 countMAX=5 @@ -125,7 +121,7 @@ echo '*** FATAL ERROR : TAR CREATION FAILED *** ' echo '***************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} exit 3 fi @@ -136,7 +132,7 @@ else set +x echo ' All files not found for tar. Sleeping 10 seconds and trying again ..' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} sleep 10 count=$(expr $count + 1) fi @@ -151,7 +147,7 @@ echo '*** FATAL ERROR : TAR CREATION FAILED *** ' echo '***************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} exit 3 fi @@ -171,7 +167,7 @@ echo '*** FATAL ERROR : SPECTRAL TAR COMPRESSION FAILED *** ' echo '***************************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} exit 4 fi fi @@ -185,7 +181,7 @@ set +x echo ' ' echo " Moving tar file ${file_name} to $COMOUT ..." - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} cp ${file_name} $COMOUT/station/. @@ -199,7 +195,7 @@ echo '*** FATAL ERROR : TAR COPY FAILED *** ' echo '************************************* ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} exit 4 fi @@ -209,21 +205,19 @@ echo ' ' echo " Alerting TAR file as $COMOUT/station/${file_name}" echo ' ' - [[ "$LOUD" = YES ]] && set -x + ${TRACE_ON:-set -x} $DBNROOT/bin/dbn_alert MODEL ${alertName}_WAVE_TAR $job $COMOUT/station/${file_name} fi # --------------------------------------------------------------------------- # # 4. Final clean up - cd $DATA +cd $DATA - set +x; [[ "$LOUD" = YES ]] && set -v +if [[ ${KEEPDATA:-NO} == "NO" ]]; then + set -v rm -rf ${STA_DIR}/${type} set +v - - echo ' ' - echo 'End of ww3_tar.sh at' - date +fi # End of ww3_tar.sh ----------------------------------------------------- # From 145b67f70f44abbb713e19073016bacfbfcb8184 Mon Sep 17 00:00:00 2001 From: Walter Kolczynski - NOAA Date: Fri, 29 Jul 2022 13:42:38 -0400 Subject: [PATCH 12/16] Initial commit of directory comparison tools (#934) Adds a new `test/` directory to the top level. Inside are miscellaneous scripts I have used to test bitwise identicality of experiments. Main scripts: - `diff_ROTDIR.sh`: Compares two output directories - `diff_UFS_rundir.sh`: Compares two UFS run directories Other scripts and file are helpers to these two main scripts. May eventually form starting point of a global workflow regression test (#267) Refs #267 --- test/README.md | 115 +++++++++++++++++++++++ test/coordinates.lst | 8 ++ test/diff_ROTDIR.sh | 162 +++++++++++++++++++++++++++++++++ test/diff_UFS_rundir.sh | 110 ++++++++++++++++++++++ test/diff_grib_files.py | 74 +++++++++++++++ test/netcdf_op_functions.sh | 177 ++++++++++++++++++++++++++++++++++++ test/test_utils.sh | 26 ++++++ 7 files changed, 672 insertions(+) create mode 100644 test/README.md create mode 100644 test/coordinates.lst create mode 100755 test/diff_ROTDIR.sh create mode 100755 test/diff_UFS_rundir.sh create mode 100755 test/diff_grib_files.py create mode 100644 test/netcdf_op_functions.sh create mode 100644 test/test_utils.sh diff --git a/test/README.md b/test/README.md new file mode 100644 index 0000000000..8d9d273ce2 --- /dev/null +++ b/test/README.md @@ -0,0 +1,115 @@ +# Global workflow comparison tools +A collection of tools to compare two different global workflow experiments for bitwise identicality. + +## Disclaimer + +These tools are still a work-in-progress. Use at your own risk. There is no guarantee every relevant file will be compared (but feel free to make a pull request adding more). + +# Usage + +## Quick start +### To compare two UFS run directories +``` +./diff_UFS_rundir.sh dirA dirB +``` +Where `dirA` and `dirB` are the two UFS run directories. + + +### To compare two ROTDIRs +``` +./diff_ROTDIR.sh dirA dirB +``` +Where `dirA` and `dirB` are the two cycle directories (`.../gfs.YYYYMMDD/HH/`) + +OR + +``` +./diff_ROTDIR.sh rotdir cdate expA expB +``` + +Where: +- `rotdir` is the root of your rotdirs (the portion of path the experiments share) +- `cdate` is the datetime of the cycle in YYYMMDDHH format +- `expA` and `expB` are the experiment names ($PSLOT) of each experiment + +## Description + +There are currently two tools included in this package: +* `diff_UFS_rundir.sh` will compare two UFS run directories (must have retained them by setting `KEEPDATA` to `NO` in config.base) +* `diff_ROTDIR.sh` will compare entire ROTDIRs + +Both scripts work similarly. You will need two experiments to compare. Typically this means a "baseline" experiment using the current develop and whatever feature you are working on. Experiments need to be for the same cycle and use all the same settings, otherwise there is no chance of them matching. Except for specific text files, file lists are constructed by globbing the first experiment directory, so if the second experiment contains files that would otherwise be included, they will be skipped. + +There are three classes of files compared: +- Text files, by simple posix diff +- GRiB2 files, using correaltion from `wgrib2` +- NetCDF files, using NetCDF Operators (nco) + +Text and grib2 files are processed first and complete quickly. NetCDF processing is currently a lot slower. + +Any variables listed in the coordinates.lst file will be ignored when comparing NetCDFs. This is because coordinate variables are not differenced, so when iterating through the variables of the difference they will be non-zero. + +## Output + +Output will appear like this: +``` +=== === + + +``` + +For text files, it will be the ouput of posix diff, which is just an empty string when identical: +``` +... + +=== field_table === + + +=== input.nml === +310,313c310,313 +< FNGLAC = '/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/global-workflow/develop/fix/fix_am/global_glacier.2x2.grb' +< FNMXIC = '/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/global-workflow/develop/fix/fix_am/global_maxice.2x2.grb' +< FNTSFC = '/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/global-workflow/develop/fix/fix_am/RTGSST.1982.2012.monthly.clim.grb' +< FNSNOC = '/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/global-workflow/develop/fix/fix_am/global_snoclim.1.875.grb' +--- +> FNGLAC = '/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/global-workflow/add_preamble/fix/fix_am/global_glacier.2x2.grb' +> FNMXIC = '/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/global-workflow/add_preamble/fix/fix_am/global_maxice.2x2.grb' +> FNTSFC = '/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/global-workflow/add_preamble/fix/fix_am/RTGSST.1982.2012.monthly.clim.grb' +> FNSNOC = '/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/global-workflow/add_preamble/fix/fix_am/global_snoclim.1.875.grb' + +... +``` +(Text diffs have two extra blank line to separate the output.) + +Grib files will look like this if they are identical: +``` +=== GFSFLX.GrbF00 === +All fields are identical! +=== GFSFLX.GrbF03 === +All fields are identical! +=== GFSFLX.GrbF06 === +All fields are identical! +=== GFSFLX.GrbF09 === +All fields are identical! +=== GFSFLX.GrbF12 === +All fields are identical! + +... + +``` + +And NetCDFs will look like this: +``` +=== atmf000.nc === +0 differences found +=== atmf003.nc === +0 differences found +=== atmf006.nc === +0 differences found +=== atmf009.nc === +0 differences found + +... +``` + +If any variables in a grib or NetCDF do not match, they will be listed instead. diff --git a/test/coordinates.lst b/test/coordinates.lst new file mode 100644 index 0000000000..f175c2f047 --- /dev/null +++ b/test/coordinates.lst @@ -0,0 +1,8 @@ +grid_xt +grid_yt +lat +lon +pfull +phalf +time +time_iso diff --git a/test/diff_ROTDIR.sh b/test/diff_ROTDIR.sh new file mode 100755 index 0000000000..3a864f5f09 --- /dev/null +++ b/test/diff_ROTDIR.sh @@ -0,0 +1,162 @@ +#! /bin/env bash + +# +# Differences relevant output files in two different experiment ROTDIRs. +# Text files are compared via posix diff. GRiB files are compared via +# correlation reported by wgrib2. NetCDF files are compared by using +# NetCDF operators to calculate a diff then make sure all non-coordinate +# variable differences are zero. File lists are created by globbing key +# directories under the first experiment given. +# +# Syntax: +# diff_ROTDIR.sh [-c coord_file][-h] rotdir cdate expA expB +# +# OR +# +# diff_ROTDIR.sh [-c coord_file][-h] dirA dirB +# +# Arguments: +# rotdir: root rotdir where ROTDIRS are held +# cdate: experiment date/cycle in YYYYMMDDHH format +# expA, expB: experiment ids (PSLOT) to compare +# +# dirA, dirB: full paths to the cycle directories to be compared +# (${rotdir}/${exp}/gfs.${YYYYMMDD}/${cyc}) +# +# Options: +# -c coord_file: file containing a list of coordinate variables +# -h: print usage message and exit +# + +set -eu + +usage() { + # + # Print usage statement + # + echo <<- 'EOF' + Differences relevant output files in two different experiment ROTDIRs. + Text files are compared via posix diff. GRiB files are compared via + correlation reported by wgrib2. NetCDF files are compared by using + NetCDF operators to calculate a diff then make sure all non-coordinate + variable differences are zero. File lists are created by globbing key + directories under the first experiment given. + + Syntax: + diff_ROTDIR.sh [-c coord_file][-h] rotdir cdate expA expB + + OR + + diff_ROTDIR.sh [-c coord_file][-h] dirA dirB + + Arguments: + rotdir: root rotdir where ROTDIRS are held + cdate: experiment date/cycle in YYYYMMDDHH format + expA, expB: experiment ids (PSLOT) to compare + + dirA, dirB: full paths to the cycle directories to be compared + (${rotdir}/${exp}/gfs.${YYYYMMDD}/${cyc}) + + Options: + -c coord_file: file containing a list of coordinate variables + -h: print usage message and exit + EOF +} + +while getopts ":c:h" option; do + case "${option}" in + c) coord_file=${OPTARG} ;; + h) usage; exit 0 ;; + *) echo "Unknown option ${option}"; exit 1 ;; + esac +done + +num_args=$# +case $num_args in + 2) # Direct directory paths + dirA=$1 + dirB=$2 + ;; + 4) # Derive directory paths + rotdir=$1 + date=$2 + expA=$3 + expB=$4 + + YYYYMMDD=$(echo $date | cut -c1-8) + cyc=$(echo $date | cut -c9-10) + dirA="$rotdir/$expA/gfs.${YYYYMMDD}/${cyc}" + dirB="$rotdir/$expB/gfs.${YYYYMMDD}/${cyc}" + ;; + *) # Unknown option + echo "${num_args} is not a valid number of arguments, use 2 or 4" + usage + exit 1 + ;; +esac + +temp_file=".diff.nc" + +# Contains a bunch of NetCDF Operator shortcuts (will load nco module) +source ./netcdf_op_functions.sh +source ./test_utils.sh + +coord_file="${coord_file:-./coordinates.lst}" + +## Text files +files="" +files="${files} atmos/input.nml" # This file will be different because of the fix paths +files="${files} $(basename_list 'atmos/' "$dirA/atmos/storms.*" "$dirA/atmos/trak.*")" +if [[ -d $dirA/ice ]]; then + files="${files} ice/ice_in" +fi +if [[ -d $dirA/ocean ]]; then + files="${files} ocean/MOM_input" +fi +# if [[ -d $dirA/wave ]]; then +# files="${files} $(basename_list 'wave/station/' "$dirA/wave/station/*bull_tar")" +# fi + +for file in $files; do + echo "=== ${file} ===" + fileA="$dirA/$file" + fileB="$dirB/$file" + diff $fileA $fileB || : +done + +## GRiB files + +module load wgrib2/2.0.8 + +files="" +files="${files} $(basename_list 'atmos/' $dirA/atmos/*grb2* $dirA/atmos/*.flux.*)" +if [[ -d $dirA/wave ]]; then + files="${files} $(basename_list 'wave/gridded/' $dirA/wave/gridded/*.grib2)" +fi +if [[ -d $dirA/ocean ]]; then + files="${files} $(basename_list 'ocean/' $dirA/ocean/*grb2)" +fi + +for file in $files; do + echo "=== ${file} ===" + fileA="$dirA/$file" + fileB="$dirB/$file" + ./diff_grib_files.py $fileA $fileB +done + +## NetCDF Files +files="" +files="${files} $(basename_list 'atmos/' $dirA/atmos/*.nc)" +if [[ -d $dirA/ice ]]; then + files="${files} $(basename_list 'ice/' $dirA/ice/*.nc)" +fi +if [[ -d $dirA/ocean ]]; then + files="${files} $(basename_list 'ocean/' $dirA/ocean/*.nc)" +fi + +for file in $files; do + echo "=== ${file} ===" + fileA="$dirA/$file" + fileB="$dirB/$file" + nccmp -q $fileA $fileB $coord_file +done diff --git a/test/diff_UFS_rundir.sh b/test/diff_UFS_rundir.sh new file mode 100755 index 0000000000..fac2242a65 --- /dev/null +++ b/test/diff_UFS_rundir.sh @@ -0,0 +1,110 @@ +#! /bin/env bash + +# +# Differences relevant output files in two UFS model directories. GRiB files +# are compared via correlation reported by wgrib2. NetCDF files are compared +# by using NetCDF operators to calculate a diff then make sure all non- +# coordinate variable differences are zero. +# +# Syntax: +# diff_UFS_rundir.sh [-c coord_file][-h] dirA dirB +# +# Arguments: +# dirA, dirB: full paths to the UFS run directories to be compared +# +# Options: +# -c coord_file: file containing a list of coordinate variables +# -h: print usage message and exit +# + +set -eu + +usage() { + # + # Print usage statement + # + echo <<- 'EOF' + Differences relevant output files in two UFS model directories. GRiB files + are compared via correlation reported by wgrib2. NetCDF files are compared + by using NetCDF operators to calculate a diff then make sure all non- + coordinate variable differences are zero. + + Syntax: + diff_UFS_rundir.sh [-c coord_file][-h] dirA dirB + + Arguments: + dirA, dirB: full paths to the UFS run directories to be compared + + Options: + -c coord_file: file containing a list of coordinate variables + -h: print usage message and exit + EOF +} + +while getopts ":c:h" option; do + case "${option}" in + c) coord_file=${OPTARG} ;; + h) usage; exit 0 ;; + *) echo "Unknown option ${option}"; exit 1 ;; + esac +done + +num_args=$# +case $num_args in + 2) # Direct directory paths + dirA=$1 + dirB=$2 + ;; + *) # Unknown option + echo "${num_args} is not a valid number of arguments, use 2" + usage + exit 1 + ;; +esac + +source ./netcdf_op_functions.sh +source ./test_utils.sh + +temp_file=".diff.nc" +coord_file="${coord_file:-./coordinates.lst}" + +# Input files +files="data_table diag_table fd_nems.yaml field_table ice_in input.nml med_modelio.nml \ + model_configure nems.configure pio_in ww3_multi.inp ww3_shel.inp" + +for file in $files; do + echo "=== ${file} ===" + fileA="$dirA/$file" + fileB="$dirB/$file" + if [[ -f "$fileA" ]]; then + diff $fileA $fileB || : + else + echo ; echo; +done + +# GRiB files +files="$(basename_list '' $dirA/GFSFLX.Grb*)" + +module load wgrib2/2.0.8 + +for file in $files; do + echo "=== ${file} ===" + fileA="$dirA/$file" + fileB="$dirB/$file" + ./diff_grib_files.py $fileA $fileB +done + +# NetCDF Files +files="" +files="${files} $(basename_list '' $dirA/atmf*.nc $dirA/sfcf*.nc)" +if [[ -d "$dirA/history" ]]; then + files="$(basename_list 'history/' $dirA/history/*.nc)" +fi + +for file in $files; do + echo "=== ${file} ===" + fileA="$dirA/$file" + fileB="$dirB/$file" + nccmp -q $fileA $fileB $coord_file +done + diff --git a/test/diff_grib_files.py b/test/diff_grib_files.py new file mode 100755 index 0000000000..43619f143d --- /dev/null +++ b/test/diff_grib_files.py @@ -0,0 +1,74 @@ +#! /bin/env python3 +''' +Compares two grib2 files and print any variables that have a + non-identity correlation. + +Syntax +------ +diff_grib_files.py fileA fileB + +Parameters +---------- +fileA: string + Path to the first grib2 file +fileB: string + Path to the second grib2 file + +''' + +import re +import sys +import subprocess + +# TODO - Update to also check the min just in case the grib files have a constant offset + +def count_nonid_corr(test_string: str, quiet=False): + ''' + Scan a wgrib2 print of the correlation between two values and count + how many variables have a non-identity correlation. Any such variables + are printed. + + wgrib2 is assumed to be invoked by the following command: + wgrib2 {fileA} -var -rpn 'sto_1' -import_grib {fileB} -rpn 'rcl_1:print_corr' + + Parameters + ---------- + test_string: str + STDOUT from wgrib2 call. + + quiet: bool, optional + Whether to suppress print messages of non-identy variables and summary. + + Returns + ------- + int + Number of non-identify correlations represented in the string. + + + ''' + pattern = re.compile(r"(\d+:\d+:)(?P.*):rpn_corr=(?P.*)") + matches = [m.groupdict() for m in pattern.finditer(test_string)] + + count = 0 + for match in matches: + if float(match['corr']) != 1.0: + count = count + 1 + if not quiet: + print(f"{match['var']}: corr={match['corr']}") + + if not quiet: + if count == 0: + print("All fields are identical!") + else: + print(f"{count} variables are different") + + return count + +if __name__ == '__main__': + fileA = sys.argv[0] + fileB = sys.argv[1] + + wgrib2_cmd = f"wgrib2 {fileA} -var -rpn 'sto_1' -import_grib {fileB} -rpn 'rcl_1:print_corr'" + + string = subprocess.run(wgrib2_cmd, shell=True, stdout=subprocess.PIPE).stdout.decode("utf-8") + count_nonid_corr(string) diff --git a/test/netcdf_op_functions.sh b/test/netcdf_op_functions.sh new file mode 100644 index 0000000000..0085855ea3 --- /dev/null +++ b/test/netcdf_op_functions.sh @@ -0,0 +1,177 @@ +#! /bin/env bash + +if [ -t 0 ]; then + module load nco/4.9.3 +fi + +## NetCDF operator shortcuts +# From nco.sourceforge.net/nco.html#Filters-for-ncks +# ncattget $att_nm $var_nm $fl_nm : What attributes does variable have? +function ncattget { ncks --trd -M -m ${3} | grep -E -i "^${2} attribute [0-9]+: ${1}" | cut -f 11- -d ' ' | sort ; } +# ncunits $att_val $fl_nm : Which variables have given units? +function ncunits { ncks --trd -m ${2} | grep -E -i " attribute [0-9]+: units.+ ${1}" | cut -f 1 -d ' ' | sort ; } +# ncavg $var_nm $fl_nm : What is mean of variable? +function ncavg { + temp_file=${PTMP:-$HOME}/foo.nc + ncwa -y avg -O -C -v ${1} ${2} ${temp_file} + ncks --trd -H -C -v ${1} ${temp_file} | cut -f 3- -d ' ' + rm ${temp_file} +} +# ncavg $var_nm $fl_nm : What is mean of variable? +function ncavg { + temp_file=${PTMP:-$HOME}/foo.nc + ncap2 -O -C -v -s "foo=${1}.avg();print(foo)" ${2} ${temp_file} | cut -f 3- -d ' ' + rm ${temp_file} +} +# ncdmnlst $fl_nm : What dimensions are in file? +function ncdmnlst { ncks --cdl -m ${1} | cut -d ':' -f 1 | cut -d '=' -s -f 1 ; } +# ncvardmnlst $var_nm $fl_nm : What dimensions are in a variable? +function ncvardmnlst { ncks --trd -m -v ${1} ${2} | grep -E -i "^${1} dimension [0-9]+: " | cut -f 4 -d ' ' | sed 's/,//' ; } +# ncvardmnlatlon $var_nm $fl_nm : Does variable contain both lat and lon dimensions? +# function ncvardmnlatlon { flg=$(ncks -C -v ${1} -m ${2} | grep -E -i "${1}\(" | grep -E "lat.*lon|lon.*lat") ; [[ ! -z "$flg" ]] && echo "Yes, ${1} has both lat and lon dimensions" || echo "No, ${1} does not have both lat and lon dimensions" } +# ncdmnsz $dmn_nm $fl_nm : What is dimension size? +function ncdmnsz { ncks --trd -m -M ${2} | grep -E -i ": ${1}, size =" | cut -f 7 -d ' ' | uniq ; } +# ncgrplst $fl_nm : What groups are in file? +function ncgrplst { ncks -m ${1} | grep 'group:' | cut -d ':' -f 2 | cut -d ' ' -f 2 | sort ; } +# ncvarlst $fl_nm : What variables are in file? +function ncvarlst { ncks --trd -m ${1} | grep -E ': type' | cut -f 1 -d ' ' | sed 's/://' | sort ; } +# ncmax $var_nm $fl_nm : What is maximum of variable? +function ncmax { + temp_file=${PTMP:-$HOME}/foo.nc + ncwa -y max -O -C -v ${1} ${2} ${temp_file} + ncks --trd -H -C -v ${1} ${temp_file} | cut -f 3- -d ' ' + rm ${temp_file} +} +# ncmax $var_nm $fl_nm : What is maximum of variable? +function ncmax { + temp_file=${PTMP:-$HOME}/foo.nc + ncap2 -O -C -v -s "foo=${1}.max();print(foo)" ${2} ${temp_file} | cut -f 3- -d ' ' + rm ${temp_file} +} +# ncmdn $var_nm $fl_nm : What is median of variable? +function ncmdn { + temp_file=${PTMP:-$HOME}/foo.nc + ncap2 -O -C -v -s "foo=gsl_stats_median_from_sorted_data(${1}.sort());print(foo)" ${2} ${temp_file} | cut -f 3- -d ' ' + rm ${temp_file} +} +# ncmin $var_nm $fl_nm : What is minimum of variable? +function ncmin { + temp_file=${PTMP:-$HOME}/foo.nc + ncap2 -O -C -v -s "foo=${1}.min();print(foo)" ${2} ${temp_file} | cut -f 3- -d ' ' + rm ${temp_file} +} +# ncrng $var_nm $fl_nm : What is range of variable? +function ncrng { + temp_file=${PTMP:-$HOME}/foo.nc + ncap2 -O -C -v -s "foo_min=${1}.min();foo_max=${1}.max();print(foo_min,\"%f\");print(\" to \");print(foo_max,\"%f\")" ${2} ${temp_file} + rm ${temp_file} +} +# ncmode $var_nm $fl_nm : What is mode of variable? +function ncmode { + temp_file=${PTMP:-$HOME}/foo.nc + ncap2 -O -C -v -s "foo=gsl_stats_median_from_sorted_data(${1}.sort());print(foo)" ${2} ${temp_file} | cut -f 3- -d ' ' + rm ${temp_file} +} +# ncrecsz $fl_nm : What is record dimension size? +function ncrecsz { ncks --trd -M ${1} | grep -E -i "^Root record dimension 0:" | cut -f 10- -d ' ' ; } +# nctypget $var_nm $fl_nm : What type is variable? +function nctypget { ncks --trd -m -v ${1} ${2} | grep -E -i "^${1}: type" | cut -f 3 -d ' ' | cut -f 1 -d ',' ; } + +function nccorr() { + temp_file=${PTMP:-$HOME}/foo.nc + ncap2 -O -C -v -s "foo_min=${1}.min();foo_max=${1}.max();print(foo_min,\"%f\");print(\" to \");print(foo_max,\"%f\")" ${2} ${temp_file} + rm ${temp_file} +} + +# Heavily modified from original +function nccmp() { + # + # Compare two netcdf files + # + # Uses ncdiff to create a difference of two NetCDFs, then checks to + # make sure all non-coordinate fields of the diff are zero. + # + # Syntax: + # nccmp [-q][-z] fileA fileB coord_file + # + # Arguments: + # fileA, fileB: NetCDFs to be compared + # coord_file: File containing coordinate variables + # + # Options: + # -q: quiet mode (implies -z) + # -z: suppress displaying fields with zero difference + # + # Notes: + # Will create a temporary file .diff in the $PTMP directory + # if PTMP is defined, otherwise .diff is created in the + # current directory. + # + + local OPTIND + suppress_msg="" + hide_zeros="NO" + quiet="NO" + while getopts ":qz" option; do + case "${option}" in + q) quiet="YES" ;& + z) suppress_msg=" (Suppressing zero difference fields)" + hide_zeros="YES" + ;; + *) echo "Unknown option ${option}" + ;; + esac + done + shift "$((OPTIND-1))" + fileA="${1}" + fileB="${2}" + coord_file="${3:-/dev/null}" + temp_file="${PTMP:-$(pwd)}/.diff" + if [[ ${quiet} == "NO" ]]; then + echo + echo "Comparing ${fileA} and ${fileB}" + fi + # Create diff of the files + ncdiff ${fileA} ${fileB} ${temp_file} --overwrite + if [[ ${quiet} == "NO" ]]; then + echo "Difference report:${suppress_msg}" + echo "(Coordinate variables will always be non-zero)" + fi + count=0 + # Check each variable + for var in $(ncvarlst ${temp_file}); do + if [[ $(egrep -o "^${var}\$" ${coord_file} | wc -l) == 0 ]]; then + # Variable is not in coordinate list + max=$(ncmax $var $temp_file 2> /dev/null) + if [[ -z $max ]]; then + echo "Error reading max of ${var}" + count=$((count + 1)) + continue + fi + min=$(ncmin $var $temp_file 2> /dev/null) + if [[ -z $min ]]; then + echo "Error reading min of ${var}" + count=$((count + 1)) + continue + fi + if [[ ${hide_zeros} == "NO" ]] || (( $(echo "$max != 0 || $min != 0" | bc) )); then + # Min/max is not zero or we are not hiding zeros + echo "${var}: ${min}..${max}" + count=$((count + 1)) + fi + else + # + # ncdiff doesn't difference coordinate variables. Instead coordinates + # are just placed in the diff file. While this is generally what we + # want, when checking for equivilence we need to ignore them. + # + if [[ ${quiet} == "NO" ]]; then + echo "$Coordinate ${var} ignored" + fi + fi + done + rm $temp_file + echo "${count} differences found" +} + + diff --git a/test/test_utils.sh b/test/test_utils.sh new file mode 100644 index 0000000000..b00e1d49cf --- /dev/null +++ b/test/test_utils.sh @@ -0,0 +1,26 @@ +#! /bin/env bash + +basename_list() { + # + # Take a list of paths, determines the base name, then + # prepends it to a base path. + # + # Syntax: + # basename_list base file_in* + # + # Arguments: + # base: Common root directory of all paths in list + # file_in: List of paths relative to $base/ + # + # Returns: + # List of paths constructed by prepending $base to each + # item in $file_in + # + base="${1}" + list="" + + for file_in in "${@:2}"; do + list="$list ${base}$(basename $file_in)" + done + echo $list +} From 395720cef000ef221c49c93d4f68417b7fda64b6 Mon Sep 17 00:00:00 2001 From: Walter Kolczynski - NOAA Date: Mon, 1 Aug 2022 17:38:50 -0400 Subject: [PATCH 13/16] Add ocean post to archive dependencies (#949) The archive job was not waiting for ocean post to complete because there was no dependency. Fixes #948 --- workflow/rocoto/workflow_tasks.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/workflow/rocoto/workflow_tasks.py b/workflow/rocoto/workflow_tasks.py index 22d4ee2c14..c34605b52a 100644 --- a/workflow/rocoto/workflow_tasks.py +++ b/workflow/rocoto/workflow_tasks.py @@ -905,6 +905,9 @@ def arch(self): if self.app_config.do_wave_bnd: dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostbndpnt'} deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_ocean: + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}ocnpost'} + deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) resources = self.get_resource('arch') From b2155ad3dc999a2f41aeace58f3b199a8ddde65c Mon Sep 17 00:00:00 2001 From: Walter Kolczynski - NOAA Date: Wed, 3 Aug 2022 10:04:06 -0400 Subject: [PATCH 14/16] Fix GLDAS j-job link (#954) The cd was misplaced when checking for the existence of gldas.fd to create the link for the j-job, so the directory was never found and the job never linked. --- sorc/link_workflow.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index af00b790c6..d3a78422a5 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -81,8 +81,8 @@ fi #--------------------------------------- #--add files from external repositories #--------------------------------------- +cd ${pwd}/../jobs ||exit 8 if [ -d ../sorc/gldas.fd ]; then - cd ${pwd}/../jobs ||exit 8 $LINK ../sorc/gldas.fd/jobs/JGDAS_ATMOS_GLDAS . fi cd ${pwd}/../parm ||exit 8 From a658e75e579ebc4f454377e5a9cf4c1fc54e4e3d Mon Sep 17 00:00:00 2001 From: Jessica Meixner Date: Wed, 3 Aug 2022 18:34:34 +0000 Subject: [PATCH 15/16] Updates for P8 (#946) These are the final updates for Prototype 8. These changes include: * Updating to the latest ufs-weather-model hash (in progress, waiting for PR) which will update the calculation of 2m T * A small update to the organic carbon coefficients for p8, raises them from 0.3 -> 0.4 for oc1 and oc2 * Uses 10km input files for aerosols * Sets do_gsl_drag_tofd=false by default, which helps with stability of the model Closes #937 --- parm/chem/ExtData.other | 8 ++++---- parm/config/config.aero | 2 +- parm/config/config.fcst | 4 ++-- sorc/checkout.sh | 2 +- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/parm/chem/ExtData.other b/parm/chem/ExtData.other index f302117a62..5eb1e1dd0b 100644 --- a/parm/chem/ExtData.other +++ b/parm/chem/ExtData.other @@ -11,12 +11,12 @@ TROPP 'Pa' Y N - 0.0 1.0 DU_SRC NA N Y - none none du_src ExtData/Dust/gocart.dust_source.v5a.x1152_y721.nc # FENGSHA input files. Note: regridding should be N or E - Use files with _FillValue != NaN -DU_CLAY '1' Y E - none none clayfrac ExtData/Dust/FENGSHA_SOILGRIDS2019_GEFSv12_v1.2.nc -DU_SAND '1' Y E - none none sandfrac ExtData/Dust/FENGSHA_SOILGRIDS2019_GEFSv12_v1.2.nc +DU_CLAY '1' Y E - none none clayfrac ExtData/Dust/FENGSHA_p81_10km_inputs.nc +DU_SAND '1' Y E - none none sandfrac ExtData/Dust/FENGSHA_p81_10km_inputs.nc DU_SILT '1' Y E - none none siltfrac /dev/null DU_SSM '1' Y E - none none ssm /dev/null:1.0 -DU_RDRAG '1' Y E %y4-%m2-%d2t12:00:00 none none albedo_drag ExtData/Dust/FENGSHA_Albedo_drag_v1.nc -DU_UTHRES '1' Y E - none none uthres ExtData/Dust/randomforestensemble_uthres.nc +DU_RDRAG '1' Y E %y4-%m2-%d2t12:00:00 none none albedo_drag ExtData/Dust/FENGSHA_p81_10km_inputs.nc +DU_UTHRES '1' Y E - none none uthres ExtData/Dust/FENGSHA_p81_10km_inputs.nc #====== Sulfate Sources ================================================= # Anthropogenic (BF & FF) emissions -- allowed to input as two layers diff --git a/parm/config/config.aero b/parm/config/config.aero index 74c5cb7fa5..3aeb33790e 100644 --- a/parm/config/config.aero +++ b/parm/config/config.aero @@ -25,7 +25,7 @@ AERO_EMIS_FIRE=QFED # Aerosol convective scavenging factors (list of string array elements) # Element syntax: ':'. Use = * to set default factor for all aerosol tracers # Scavenging factors are set to 0 (no scavenging) if unset -aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" # # Number of diagnostic aerosol tracers (default: 0) aero_diag_tracers=2 diff --git a/parm/config/config.fcst b/parm/config/config.fcst index 2c380b9111..cb1add3fff 100755 --- a/parm/config/config.fcst +++ b/parm/config/config.fcst @@ -122,7 +122,7 @@ if [ $gwd_opt -eq 2 ]; then #export do_gsl_drag_tofd=".true." #export do_ugwp_v1_orog_only=".false." - #--used for UFS p8b + #--used for UFS p8 export knob_ugwp_version=0 export do_ugwp=".false." export do_tofd=".false." @@ -132,7 +132,7 @@ if [ $gwd_opt -eq 2 ]; then export do_ugwp_v0_nst_only=".false." export do_gsl_drag_ls_bl=".false." export do_gsl_drag_ss=".true." - export do_gsl_drag_tofd=".true." + export do_gsl_drag_tofd=".false." export do_ugwp_v1_orog_only=".false." export launch_level=$(echo "$LEVS/2.35" |bc) fi diff --git a/sorc/checkout.sh b/sorc/checkout.sh index fcf7235ae7..ff3792f67a 100755 --- a/sorc/checkout.sh +++ b/sorc/checkout.sh @@ -106,7 +106,7 @@ CHECKOUT_GSI="NO" CHECKOUT_GDAS="NO" checkout_gtg="NO" checkout_wafs="NO" -ufs_model_hash="b97375c" +ufs_model_hash="Prototype-P8" # Parse command line arguments while getopts ":chgum:o" option; do From 1026b2c96eb8d987527b7b38a5d75f5b1786533c Mon Sep 17 00:00:00 2001 From: Kate Friedman Date: Wed, 3 Aug 2022 14:45:48 -0400 Subject: [PATCH 16/16] Remove SDATE=CDATE IAU block in NCO config.base (#963) - Remove the block in config.base.nco.static that checks if CDATE=SDATE and turns IAU settings to 0. - This block is not needed in operations and causes issues in pre-implementation developer testing when starting a new warm-started parallel with wave restarts. Refs: #960 --- parm/config/config.base.nco.static | 4 ---- 1 file changed, 4 deletions(-) diff --git a/parm/config/config.base.nco.static b/parm/config/config.base.nco.static index 4612e82814..a94f0be863 100755 --- a/parm/config/config.base.nco.static +++ b/parm/config/config.base.nco.static @@ -194,10 +194,6 @@ export IAU_OFFSET=6 export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble export IAUFHRS_ENKF="3,6,9" export IAU_DELTHRS_ENKF=6 -if [[ "$SDATE" = "$CDATE" ]]; then - export IAU_OFFSET=0 - export IAU_FHROT=0 -fi # Use Jacobians in eupd and thereby remove need to run eomg export lobsdiag_forenkf=".true."