Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

clean up variable and subroutine names for Ldas grid #637

Merged
merged 13 commits into from
Mar 22, 2023
Merged
8 changes: 4 additions & 4 deletions src/Applications/LDAS_App/preprocess_ldas_routines.F90
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ module preprocess_ldas_routines

use LDAS_TileCoordRoutines, ONLY: &
LDAS_create_grid_g, &
get_tile_grid, &
get_minExtent_grid, &
io_domain_files

use nr_ran2_gasdev, ONLY: &
Expand Down Expand Up @@ -537,9 +537,9 @@ subroutine domain_setup( &
! determine smallest subgrid of tile_grid_d that contains all
! catchments/tiles in domain

call get_tile_grid( N_cat_domain, tile_coord%i_indg, tile_coord%j_indg, &
tile_coord%min_lon, tile_coord%min_lat, tile_coord%max_lon, tile_coord%max_lat, &
tile_grid_g, tile_grid_d)
tile_grid_d = get_minExtent_grid( N_cat_domain, tile_coord%i_indg, tile_coord%j_indg, &
tile_coord%min_lon, tile_coord%min_lat, tile_coord%max_lon, tile_coord%max_lat, &
tile_grid_g)

! output domain files

Expand Down
66 changes: 30 additions & 36 deletions src/Components/GEOSldas_GridComp/GEOS_LdasGridComp.F90
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ module GEOS_LdasGridCompMod
use EASE_conv, only: ease_inverse
use LDAS_TileCoordType, only: tile_coord_type , T_TILECOORD_STATE, TILECOORD_WRAP
use LDAS_TileCoordType, only: grid_def_type, io_grid_def_type
use LDAS_TileCoordRoutines, only: get_tile_grid, get_ij_ind_from_latlon, io_domain_files
use LDAS_TileCoordRoutines, only: get_minExtent_grid, get_ij_ind_from_latlon, io_domain_files
use LDAS_ConvertMod, only: esmf2ldas
use LDAS_PertRoutinesMod, only: get_pert_grid
use LDAS_ensdrv_functions,ONLY: get_io_filename
Expand Down Expand Up @@ -406,9 +406,10 @@ subroutine Initialize(gc, import, export, clock, rc)
integer :: N_catf
integer :: LSM_CHOICE

type(grid_def_type) :: tile_grid_g
type(grid_def_type) :: tile_grid_f
type(grid_def_type) :: tile_grid_l
type(grid_def_type) :: tile_grid_g, pert_grid_g
type(grid_def_type) :: tile_grid_f, pert_grid_f
type(grid_def_type) :: tile_grid_l, pert_grid_l

type(date_time_type):: start_time
type(ESMF_Time) :: CurrentTime
!type(CubedSphereGridFactory) :: cubed_sphere_factory
Expand Down Expand Up @@ -619,39 +620,33 @@ subroutine Initialize(gc, import, export, clock, rc)
close(10)
call io_grid_def_type('w', logunit, tile_grid_f, 'tile_grid_f')

block
type(grid_def_type) :: latlon_tmp_g
integer :: perturbations

call MAPL_GetResource(MAPL, perturbations, 'PERTURBATIONS:', default=0, rc=status)
if(trim(grid_type) == "Cubed-Sphere" ) then

_ASSERT(index(tile_grid_g%gridtype, 'c3') /=0, "tile_grid_g does not describe a cubed-sphere grid")

!1) generate a lat-lon grid for landpert and land assim ( 4*N_lonX3*N_lon)
call get_pert_grid(tile_grid_g, latlon_tmp_g)
tile_grid_g = latlon_tmp_g
!2) get hash index
do i = 1, N_catf
call get_ij_ind_from_latlon(latlon_tmp_g,tile_coord_f(i)%com_lat,tile_coord_f(i)%com_lon, &
tile_coord_f(i)%hash_i_indg,tile_coord_f(i)%hash_j_indg)
enddo
!3) re-generate tile_grid_f in Lat-Lon
call get_tile_grid(N_catf, tile_coord_f%hash_i_indg, tile_coord_f%hash_j_indg, &
tile_coord_f%min_lon, tile_coord_f%min_lat, tile_coord_f%max_lon, tile_coord_f%max_lat, &
tile_grid_g, tile_grid_f)
pert_grid_g = get_pert_grid(tile_grid_g)
pert_grid_f = get_pert_grid(tile_grid_f)

if(trim(grid_type) == "Cubed-Sphere" ) then

_ASSERT(index(tile_grid_g%gridtype, 'c3') /=0, "tile_grid_g does not describe a cubed-sphere grid")

!1) get hash index for cubed-sphere
do i = 1, N_catf
call get_ij_ind_from_latlon(pert_grid_g,tile_coord_f(i)%com_lat,tile_coord_f(i)%com_lon, &
tile_coord_f(i)%hash_i_indg,tile_coord_f(i)%hash_j_indg)
enddo
!2) re-generate pert_grid_f in Lat-Lon
pert_grid_f = get_minExtent_grid(N_catf, tile_coord_f%hash_i_indg, tile_coord_f%hash_j_indg, &
tile_coord_f%min_lon, tile_coord_f%min_lat, tile_coord_f%max_lon, tile_coord_f%max_lat, &
pert_grid_g)

endif
end block

else
endif
endif

call MPI_BCAST(N_catf,1,MPI_INTEGER,0,mpicomm,mpierr)
if (.not. IamRoot) allocate(tile_coord_f(N_catf))

call MPI_BCAST(tile_coord_f,N_catf, MPI_tile_coord_type,0,mpicomm, mpierr)
call MPI_BCAST(tile_grid_g, 1, MPI_grid_def_type, 0,mpicomm, mpierr)
call MPI_BCAST(tile_grid_f, 1, MPI_grid_def_type, 0,mpicomm, mpierr)
call MPI_BCAST(pert_grid_g, 1, MPI_grid_def_type, 0,mpicomm, mpierr)
call MPI_BCAST(pert_grid_f, 1, MPI_grid_def_type, 0,mpicomm, mpierr)

block
integer, allocatable :: f2tile_id(:), tile_id2f(:)
Expand Down Expand Up @@ -682,17 +677,16 @@ subroutine Initialize(gc, import, export, clock, rc)

allocate(tcinternal%tile_coord_f,source = tile_coord_f)

call get_tile_grid(land_nt_local, &
pert_grid_l = get_minExtent_grid(land_nt_local, &
tcinternal%tile_coord%hash_i_indg, tcinternal%tile_coord%hash_j_indg, &
tcinternal%tile_coord%min_lon, tcinternal%tile_coord%min_lat, &
tcinternal%tile_coord%max_lon, tcinternal%tile_coord%max_lat, &
tile_grid_g,tile_grid_l)
pert_grid_g)

! re-arrange tile_coord_f

tcinternal%grid_g = tile_grid_g
tcinternal%grid_f = tile_grid_f
tcinternal%grid_l = tile_grid_l
tcinternal%pgrid_g = pert_grid_g
tcinternal%pgrid_f = pert_grid_f
tcinternal%pgrid_l = pert_grid_l

do i = 1, NUM_ENSEMBLE
call MAPL_GetObjectFromGC(gcs(METFORCE(i)), CHILD_MAPL, rc=status)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1078,8 +1078,6 @@ subroutine Initialize(gc, import, export, clock, rc)
! mapping f to re-orderd f so it is continous for mpi_gather
! rf -- ordered by processors. Within the processor, ordered by MAPL grid
integer, allocatable :: f2rf(:) ! mapping re-orderd rf to f for the LDASsa output
type(grid_def_type) :: tile_grid_g
type(grid_def_type) :: tile_grid_f
character(len=300) :: seed_fname
character(len=300) :: fname_tpl
character(len=14) :: datestamp
Expand Down Expand Up @@ -1842,8 +1840,8 @@ subroutine RUN ( GC, IMPORT, EXPORT, CLOCK, RC )
NUM_ENSEMBLE, N_catl, N_catf, N_obsl_max, &
trim(out_path), trim(exp_id), exp_domain, &
met_force, lai, cat_param, mwRTM_param, &
tile_coord_l, tile_coord_rf, tcinternal%grid_f, &
tcinternal%grid_f, tcinternal%grid_l, tcinternal%grid_g, &
tile_coord_l, tile_coord_rf, &
tcinternal%pgrid_f, tcinternal%pgrid_g, &
N_catl_vec, low_ind, l2rf, rf2l, &
N_force_pert, N_progn_pert, force_pert_param, progn_pert_param, &
update_type, &
Expand Down Expand Up @@ -1880,7 +1878,7 @@ subroutine RUN ( GC, IMPORT, EXPORT, CLOCK, RC )
date_time_new, trim(out_path), trim(exp_id), &
N_obsl, N_obs_param, NUM_ENSEMBLE, &
N_catl, tile_coord_l, &
N_catf, tile_coord_rf, tcinternal%grid_f, tcinternal%grid_g, &
N_catf, tile_coord_rf, tcinternal%pgrid_f, tcinternal%pgrid_g, &
N_catl_vec, low_ind, rf2l, N_catg, rf2g, &
obs_param, &
met_force, lai, &
Expand Down Expand Up @@ -1959,7 +1957,7 @@ subroutine RUN ( GC, IMPORT, EXPORT, CLOCK, RC )
if (out_smapL4SMaup) &
call write_smapL4SMaup( 'analysis', date_time_new, trim(out_path), &
trim(exp_id), NUM_ENSEMBLE, N_catl, N_catf, N_obsl, tile_coord_rf, &
tcinternal%grid_g, N_catl_vec, low_ind, &
tcinternal%pgrid_g, N_catl_vec, low_ind, &
N_obs_param, obs_param, Observations_l, cat_param, cat_progn )

end if ! end if (.true.)
Expand Down
Loading