Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

clean up variable and subroutine names for Ldas grid #637

Merged
merged 13 commits into from
Mar 22, 2023
Merged
14 changes: 7 additions & 7 deletions src/Applications/LDAS_App/preprocess_ldas_routines.F90
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ module preprocess_ldas_routines

use LDAS_TileCoordRoutines, ONLY: &
LDAS_create_grid_g, &
get_tile_grid, &
get_minExtent_grid, &
io_domain_files

use nr_ran2_gasdev, ONLY: &
Expand Down Expand Up @@ -537,9 +537,9 @@ subroutine domain_setup( &
! determine smallest subgrid of tile_grid_d that contains all
! catchments/tiles in domain

call get_tile_grid( N_cat_domain, tile_coord%i_indg, tile_coord%j_indg, &
tile_coord%min_lon, tile_coord%min_lat, tile_coord%max_lon, tile_coord%max_lat, &
tile_grid_g, tile_grid_d)
tile_grid_d = get_minExtent_grid( N_cat_domain, tile_coord%i_indg, tile_coord%j_indg, &
tile_coord%min_lon, tile_coord%min_lat, tile_coord%max_lon, tile_coord%max_lat, &
tile_grid_g)

! output domain files

Expand Down Expand Up @@ -3027,9 +3027,9 @@ subroutine LDAS_read_til_file( tile_file, catch_file, tile_grid_g, tile_coord_la
N_tile_land=i
allocate(tile_coord_land(N_tile_land))
tile_coord_land=tile_coord(1:N_tile_land)
! hash_[x]_indg is not written into the tile_coord file and not needed in preprocessing
tile_coord_land%hash_i_indg = nint(nodata_generic)
tile_coord_land%hash_j_indg = nint(nodata_generic)
! pert_[x]_indg is not written into the tile_coord file and not needed in preprocessing
tile_coord_land%pert_i_indg = nint(nodata_generic)
tile_coord_land%pert_j_indg = nint(nodata_generic)
if(present(f2g)) then
allocate(f2g(fid))
f2g = f2g_tmp(1:fid)
Expand Down
87 changes: 49 additions & 38 deletions src/Components/GEOSldas_GridComp/GEOS_LdasGridComp.F90
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@ module GEOS_LdasGridCompMod

use EASE_conv, only: ease_inverse
use LDAS_TileCoordType, only: tile_coord_type , T_TILECOORD_STATE, TILECOORD_WRAP
use LDAS_TileCoordType, only: grid_def_type, io_grid_def_type
use LDAS_TileCoordRoutines, only: get_tile_grid, get_ij_ind_from_latlon, io_domain_files
use LDAS_TileCoordType, only: grid_def_type, io_grid_def_type, operator (==)
use LDAS_TileCoordRoutines, only: get_minExtent_grid, get_ij_ind_from_latlon, io_domain_files
use LDAS_ConvertMod, only: esmf2ldas
use LDAS_PertRoutinesMod, only: get_pert_grid
use LDAS_ensdrv_functions,ONLY: get_io_filename
Expand Down Expand Up @@ -406,9 +406,10 @@ subroutine Initialize(gc, import, export, clock, rc)
integer :: N_catf
integer :: LSM_CHOICE

type(grid_def_type) :: tile_grid_g
type(grid_def_type) :: tile_grid_f
type(grid_def_type) :: tile_grid_l
type(grid_def_type) :: tile_grid_g, pert_grid_g
type(grid_def_type) :: tile_grid_f, pert_grid_f
type(grid_def_type) :: tile_grid_l, pert_grid_l

type(date_time_type):: start_time
type(ESMF_Time) :: CurrentTime
!type(CubedSphereGridFactory) :: cubed_sphere_factory
Expand Down Expand Up @@ -619,39 +620,50 @@ subroutine Initialize(gc, import, export, clock, rc)
close(10)
call io_grid_def_type('w', logunit, tile_grid_f, 'tile_grid_f')

block
type(grid_def_type) :: latlon_tmp_g
integer :: perturbations

call MAPL_GetResource(MAPL, perturbations, 'PERTURBATIONS:', default=0, rc=status)
if(trim(grid_type) == "Cubed-Sphere" ) then

_ASSERT(index(tile_grid_g%gridtype, 'c3') /=0, "tile_grid_g does not describe a cubed-sphere grid")

!1) generate a lat-lon grid for landpert and land assim ( 4*N_lonX3*N_lon)
call get_pert_grid(tile_grid_g, latlon_tmp_g)
tile_grid_g = latlon_tmp_g
!2) get hash index
do i = 1, N_catf
call get_ij_ind_from_latlon(latlon_tmp_g,tile_coord_f(i)%com_lat,tile_coord_f(i)%com_lon, &
tile_coord_f(i)%hash_i_indg,tile_coord_f(i)%hash_j_indg)
enddo
!3) re-generate tile_grid_f in Lat-Lon
call get_tile_grid(N_catf, tile_coord_f%hash_i_indg, tile_coord_f%hash_j_indg, &
tile_coord_f%min_lon, tile_coord_f%min_lat, tile_coord_f%max_lon, tile_coord_f%max_lat, &
tile_grid_g, tile_grid_f)

endif
end block
! get a grid for perturbations and EnKF:
!
! tile grid ! pert grid
! (defines tile space) ! (used for perturbations and as "hash" grid in EnKF analysis)
! ===========================================================================================================
! lat/lon ! same as tile_grid (i.e., lat/lon)
! -----------------------------------------------------------------------------------------------------------
! EASEv[X] ! same as tile_grid (i.e., EASE)
! -----------------------------------------------------------------------------------------------------------
! cube-sphere ! lat/lon grid of resolution similar to that of (cube-sphere) tile_grid

pert_grid_g = get_pert_grid(tile_grid_g)

if ( .not. (pert_grid_g==tile_grid_g) ) then
gmao-rreichle marked this conversation as resolved.
Show resolved Hide resolved

! arrive here when tile_grid_g is cube-sphere and pert_grid_g is lat/lon after call to get_pert_grid() above

!1) get pert_i_indg, pert_j_indg for tiles in (full) domain relative to pert_grid_g
do i = 1, N_catf
call get_ij_ind_from_latlon(pert_grid_g,tile_coord_f(i)%com_lat,tile_coord_f(i)%com_lon, &
tile_coord_f(i)%pert_i_indg,tile_coord_f(i)%pert_j_indg)
enddo
!2) determine pert_grid_f
pert_grid_f = get_minExtent_grid(N_catf, tile_coord_f%pert_i_indg, tile_coord_f%pert_j_indg, &
tile_coord_f%min_lon, tile_coord_f%min_lat, tile_coord_f%max_lon, tile_coord_f%max_lat, &
pert_grid_g)

else

pert_grid_f = tile_grid_f

! note that %pert_i_indg and %pert_j_indg were initialized to %i_indg and %j_indg
! in io_tile_coord_type() when tile_coord was read via io_domain_files()

endif
endif

call MPI_BCAST(N_catf,1,MPI_INTEGER,0,mpicomm,mpierr)
if (.not. IamRoot) allocate(tile_coord_f(N_catf))

call MPI_BCAST(tile_coord_f,N_catf, MPI_tile_coord_type,0,mpicomm, mpierr)
call MPI_BCAST(pert_grid_g, 1, MPI_grid_def_type, 0,mpicomm, mpierr)
call MPI_BCAST(pert_grid_f, 1, MPI_grid_def_type, 0,mpicomm, mpierr)
call MPI_BCAST(tile_grid_g, 1, MPI_grid_def_type, 0,mpicomm, mpierr)
call MPI_BCAST(tile_grid_f, 1, MPI_grid_def_type, 0,mpicomm, mpierr)

block
integer, allocatable :: f2tile_id(:), tile_id2f(:)
Expand Down Expand Up @@ -682,17 +694,16 @@ subroutine Initialize(gc, import, export, clock, rc)

allocate(tcinternal%tile_coord_f,source = tile_coord_f)

call get_tile_grid(land_nt_local, &
tcinternal%tile_coord%hash_i_indg, tcinternal%tile_coord%hash_j_indg, &
pert_grid_l = get_minExtent_grid(land_nt_local, &
tcinternal%tile_coord%pert_i_indg, tcinternal%tile_coord%pert_j_indg, &
tcinternal%tile_coord%min_lon, tcinternal%tile_coord%min_lat, &
tcinternal%tile_coord%max_lon, tcinternal%tile_coord%max_lat, &
tile_grid_g,tile_grid_l)

! re-arrange tile_coord_f
pert_grid_g)

tcinternal%grid_g = tile_grid_g
tcinternal%grid_f = tile_grid_f
tcinternal%grid_l = tile_grid_l
tcinternal%pgrid_g = pert_grid_g
tcinternal%pgrid_f = pert_grid_f
tcinternal%pgrid_l = pert_grid_l
tcinternal%tgrid_g = tile_grid_g
gmao-rreichle marked this conversation as resolved.
Show resolved Hide resolved

do i = 1, NUM_ENSEMBLE
call MAPL_GetObjectFromGC(gcs(METFORCE(i)), CHILD_MAPL, rc=status)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1078,8 +1078,6 @@ subroutine Initialize(gc, import, export, clock, rc)
! mapping f to re-orderd f so it is continous for mpi_gather
! rf -- ordered by processors. Within the processor, ordered by MAPL grid
integer, allocatable :: f2rf(:) ! mapping re-orderd rf to f for the LDASsa output
type(grid_def_type) :: tile_grid_g
type(grid_def_type) :: tile_grid_f
character(len=300) :: seed_fname
character(len=300) :: fname_tpl
character(len=14) :: datestamp
Expand Down Expand Up @@ -1842,8 +1840,8 @@ subroutine RUN ( GC, IMPORT, EXPORT, CLOCK, RC )
NUM_ENSEMBLE, N_catl, N_catf, N_obsl_max, &
trim(out_path), trim(exp_id), exp_domain, &
met_force, lai, cat_param, mwRTM_param, &
tile_coord_l, tile_coord_rf, tcinternal%grid_f, &
tcinternal%grid_f, tcinternal%grid_l, tcinternal%grid_g, &
tile_coord_l, tile_coord_rf, &
tcinternal%tgrid_g, tcinternal%pgrid_f, tcinternal%pgrid_g, &
weiyuan-jiang marked this conversation as resolved.
Show resolved Hide resolved
N_catl_vec, low_ind, l2rf, rf2l, &
N_force_pert, N_progn_pert, force_pert_param, progn_pert_param, &
update_type, &
Expand Down Expand Up @@ -1880,7 +1878,7 @@ subroutine RUN ( GC, IMPORT, EXPORT, CLOCK, RC )
date_time_new, trim(out_path), trim(exp_id), &
N_obsl, N_obs_param, NUM_ENSEMBLE, &
N_catl, tile_coord_l, &
N_catf, tile_coord_rf, tcinternal%grid_f, tcinternal%grid_g, &
N_catf, tile_coord_rf, tcinternal%pgrid_f, tcinternal%pgrid_g, &
N_catl_vec, low_ind, rf2l, N_catg, rf2g, &
obs_param, &
met_force, lai, &
Expand Down Expand Up @@ -1959,7 +1957,7 @@ subroutine RUN ( GC, IMPORT, EXPORT, CLOCK, RC )
if (out_smapL4SMaup) &
call write_smapL4SMaup( 'analysis', date_time_new, trim(out_path), &
trim(exp_id), NUM_ENSEMBLE, N_catl, N_catf, N_obsl, tile_coord_rf, &
tcinternal%grid_g, N_catl_vec, low_ind, &
tcinternal%tgrid_g, N_catl_vec, low_ind, &
N_obs_param, obs_param, Observations_l, cat_param, cat_progn )

end if ! end if (.true.)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -943,10 +943,10 @@ subroutine apply_adapt_P( N_pert, pert_param, pert_adapt_param,

! get mean and std in tile space

call grid2tile( pert_grid, N_catd, tile_coord%hash_i_indg,tile_coord%hash_j_indg, & !tile_coord, &
call grid2tile( pert_grid, N_catd, tile_coord%pert_i_indg,tile_coord%pert_j_indg, & !tile_coord, &
pert_param(n)%mean, mu )

call grid2tile( pert_grid, N_catd, tile_coord%hash_i_indg,tile_coord%hash_j_indg, & !tile_coord, &
call grid2tile( pert_grid, N_catd, tile_coord%pert_i_indg,tile_coord%pert_j_indg, & !tile_coord, &
pert_param(n)%std, sg )

select case (pert_param(n)%typ)
Expand Down
Loading