Skip to content

Commit

Permalink
task_list: Replace int8 encoding with atom_pair_type
Browse files Browse the repository at this point in the history
  • Loading branch information
oschuett committed Jun 29, 2020
1 parent ac9d9e6 commit a9c3103
Show file tree
Hide file tree
Showing 5 changed files with 161 additions and 164 deletions.
17 changes: 10 additions & 7 deletions src/qs_collocate_density.F
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,8 @@ MODULE qs_collocate_density
USE rs_pw_interface, ONLY: density_rs2pw,&
density_rs2pw_basic
USE task_list_methods, ONLY: rs_distribute_matrix
USE task_list_types, ONLY: task_list_type,&
USE task_list_types, ONLY: atom_pair_type,&
task_list_type,&
task_type

!$ USE OMP_LIB, ONLY: omp_get_max_threads, omp_get_thread_num, omp_get_num_threads
Expand Down Expand Up @@ -1430,7 +1431,6 @@ SUBROUTINE calculate_rho_elec(matrix_p, matrix_p_kp, rho, rho_gspace, total_rho,
jkind, jkind_old, jpgf, jset, jset_old, lb, lbr, lbw, maxco, maxsgf_set, n, na1, na2, &
natoms, nb1, nb2, nblock, ncoa, ncob, nimages, nr, nrlevel, nseta, nsetb, ntasks, &
nthread, nw, nxy, nz, nzsize, sgfa, sgfb, ub
INTEGER(kind=int_8), DIMENSION(:), POINTER :: atom_pair_recv, atom_pair_send
INTEGER, DIMENSION(:), POINTER :: la_max, la_min, lb_max, lb_min, npgfa, &
npgfb, nsgfa, nsgfb
INTEGER, DIMENSION(:, :), POINTER :: first_sgfa, first_sgfb
Expand All @@ -1443,6 +1443,7 @@ SUBROUTINE calculate_rho_elec(matrix_p, matrix_p_kp, rho, rho_gspace, total_rho,
REAL(KIND=dp), DIMENSION(:, :), POINTER :: p_block, pab, sphi_a, sphi_b, work, &
zeta, zetb
REAL(KIND=dp), DIMENSION(:, :, :), POINTER :: pabt, workt
TYPE(atom_pair_type), DIMENSION(:), POINTER :: atom_pair_recv, atom_pair_send
TYPE(cell_type), POINTER :: cell
TYPE(cube_info_type), DIMENSION(:), POINTER :: cube_info
TYPE(dbcsr_p_type), DIMENSION(:), POINTER :: deltap
Expand Down Expand Up @@ -1641,8 +1642,9 @@ SUBROUTINE calculate_rho_elec(matrix_p, matrix_p_kp, rho, rho_gspace, total_rho,

! distribute the matrix
IF (distributed_rs_grids) THEN
CALL rs_distribute_matrix(rs_descs, deltap, atom_pair_send, atom_pair_recv, &
natoms, nimages, scatter=.TRUE.)
CALL rs_distribute_matrix(rs_descs=rs_descs, pmats=deltap, &
atom_pair_send=atom_pair_send, atom_pair_recv=atom_pair_recv, &
nimages=nimages, scatter=.TRUE.)
ENDIF

! map all tasks on the grids
Expand Down Expand Up @@ -1940,7 +1942,6 @@ SUBROUTINE calculate_drho_elec(matrix_p, matrix_p_kp, drho, drho_gspace, qs_env,
ikind_old, img, img_old, ipgf, iset, iset_old, itask, ithread, jatom, jatom_old, jkind, &
jkind_old, jpgf, jset, jset_old, maxco, maxsgf_set, na1, na2, natoms, nb1, nb2, ncoa, &
ncob, nimages, nseta, nsetb, ntasks, nthread, sgfa, sgfb
INTEGER(kind=int_8), DIMENSION(:), POINTER :: atom_pair_recv, atom_pair_send
INTEGER, DIMENSION(:), POINTER :: la_max, la_min, lb_max, lb_min, npgfa, &
npgfb, nsgfa, nsgfb
INTEGER, DIMENSION(:, :), POINTER :: first_sgfa, first_sgfb
Expand All @@ -1952,6 +1953,7 @@ SUBROUTINE calculate_drho_elec(matrix_p, matrix_p_kp, drho, drho_gspace, qs_env,
REAL(KIND=dp), DIMENSION(:, :), POINTER :: p_block, pab, sphi_a, sphi_b, work, &
zeta, zetb
REAL(KIND=dp), DIMENSION(:, :, :), POINTER :: pabt, workt
TYPE(atom_pair_type), DIMENSION(:), POINTER :: atom_pair_recv, atom_pair_send
TYPE(cell_type), POINTER :: cell
TYPE(cube_info_type), DIMENSION(:), POINTER :: cube_info
TYPE(dbcsr_p_type), DIMENSION(:), POINTER :: deltap
Expand Down Expand Up @@ -2076,8 +2078,9 @@ SUBROUTINE calculate_drho_elec(matrix_p, matrix_p_kp, drho, drho_gspace, qs_env,

! distribute the matrix
IF (distributed_rs_grids) THEN
CALL rs_distribute_matrix(rs_descs, deltap, atom_pair_send, atom_pair_recv, &
natoms, nimages, scatter=.TRUE.)
CALL rs_distribute_matrix(rs_descs=rs_descs, pmats=deltap, &
atom_pair_send=atom_pair_send, atom_pair_recv=atom_pair_recv, &
nimages=nimages, scatter=.TRUE.)
ENDIF

! map all tasks on the grids
Expand Down
26 changes: 14 additions & 12 deletions src/qs_integrate_potential_product.F
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,7 @@ MODULE qs_integrate_potential_product
USE grid_api, ONLY: integrate_pgf_product
USE input_constants, ONLY: do_admm_exch_scaling_merlot
USE kinds, ONLY: default_string_length,&
dp,&
int_8
dp
USE memory_utilities, ONLY: reallocate
USE orbital_pointers, ONLY: ncoset
USE particle_types, ONLY: particle_type
Expand All @@ -66,7 +65,8 @@ MODULE qs_integrate_potential_product
rs_grid_retain
USE rs_pw_interface, ONLY: potential_pw2rs
USE task_list_methods, ONLY: rs_distribute_matrix
USE task_list_types, ONLY: task_list_type,&
USE task_list_types, ONLY: atom_pair_type,&
task_list_type,&
task_type
USE virial_types, ONLY: virial_type

Expand Down Expand Up @@ -201,15 +201,14 @@ SUBROUTINE integrate_v_rspace_low(v_rspace, hmat, hmat_kp, pmat, pmat_kp, qs_env
ipair, ipgf, ipgf_new, iset, iset_new, iset_old, itask, ithread, jatom, jkind, jkind_old, &
jpgf, jpgf_new, jset, jset_new, jset_old, maxco, maxpgf, maxset, maxsgf_set, na1, na2, &
nb1, nb2, ncoa, ncob, nimages, nkind, nseta, nsetb, nthread, offs_dv, sgfa, sgfb
INTEGER(KIND=int_8), DIMENSION(:), POINTER :: atom_pair_recv, atom_pair_send
INTEGER, ALLOCATABLE, DIMENSION(:) :: atom_of_kind, kind_of
INTEGER, ALLOCATABLE, DIMENSION(:, :) :: block_touched
INTEGER, DIMENSION(:), POINTER :: la_max, la_min, lb_max, lb_min, npgfa, &
npgfb, nsgfa, nsgfb
INTEGER, DIMENSION(:, :), POINTER :: first_sgfa, first_sgfb
LOGICAL :: atom_pair_changed, atom_pair_done, distributed_grids, do_kp, found, h_duplicated, &
has_threads, my_compute_tau, my_force_adm, my_gapw, new_set_pair_coming, p_duplicated, &
pab_required, scatter, use_subpatch, use_virial
pab_required, use_subpatch, use_virial
REAL(KIND=dp) :: admm_scal_fac, eps_rho_rspace, f, &
prefactor, radius, scalef, zetp
REAL(KIND=dp), DIMENSION(3) :: force_a, force_b, ra, rab, rab_inv, rb, &
Expand All @@ -222,6 +221,7 @@ SUBROUTINE integrate_v_rspace_low(v_rspace, hmat, hmat_kp, pmat, pmat_kp, qs_env
REAL(KIND=dp), DIMENSION(:, :, :), POINTER :: habt, hadb, hdab, pabt, workt
REAL(kind=dp), DIMENSION(:, :, :, :), POINTER :: hadbt, hdabt
TYPE(admm_type), POINTER :: admm_env
TYPE(atom_pair_type), DIMENSION(:), POINTER :: atom_pair_recv, atom_pair_send
TYPE(atomic_kind_type), DIMENSION(:), POINTER :: atomic_kind_set
TYPE(cell_type), POINTER :: cell
TYPE(cube_info_type), DIMENSION(:), POINTER :: cube_info
Expand Down Expand Up @@ -446,8 +446,9 @@ SUBROUTINE integrate_v_rspace_low(v_rspace, hmat, hmat_kp, pmat, pmat_kp, qs_env
END DO

IF (distributed_grids .AND. pab_required) THEN
CALL rs_distribute_matrix(rs_descs, deltap, atom_pair_send, atom_pair_recv, &
natom, nimages, scatter=.TRUE.)
CALL rs_distribute_matrix(rs_descs=rs_descs, pmats=deltap, &
atom_pair_send=atom_pair_send, atom_pair_recv=atom_pair_recv, &
nimages=nimages, scatter=.TRUE.)
ENDIF

IF (debug_this_module) THEN
Expand Down Expand Up @@ -817,16 +818,17 @@ SUBROUTINE integrate_v_rspace_low(v_rspace, hmat, hmat_kp, pmat, pmat_kp, qs_env
IF (h_duplicated) THEN
! Reconstruct H matrix if using distributed RS grids
! note send and recv direction reversed WRT collocate
scatter = .FALSE.
IF (do_kp) THEN
CALL rs_distribute_matrix(rs_descs, dhmat, atom_pair_recv, atom_pair_send, &
natom, nimages, scatter, hmats=hmat_kp)
CALL rs_distribute_matrix(rs_descs=rs_descs, pmats=dhmat, &
atom_pair_send=atom_pair_recv, atom_pair_recv=atom_pair_send, & ! swapped
nimages=nimages, scatter=.FALSE., hmats=hmat_kp)
ELSE
ALLOCATE (htemp(1))
htemp(1)%matrix => hmat%matrix

CALL rs_distribute_matrix(rs_descs, dhmat, atom_pair_recv, atom_pair_send, &
natom, nimages, scatter, hmats=htemp)
CALL rs_distribute_matrix(rs_descs=rs_descs, pmats=dhmat, &
atom_pair_send=atom_pair_recv, atom_pair_recv=atom_pair_send, & ! swapped
nimages=nimages, scatter=.FALSE., hmats=htemp)

NULLIFY (htemp(1)%matrix)
DEALLOCATE (htemp)
Expand Down
32 changes: 18 additions & 14 deletions src/qs_linres_current.F
Original file line number Diff line number Diff line change
Expand Up @@ -68,8 +68,7 @@ MODULE qs_linres_current
section_vals_get_subs_vals,&
section_vals_type
USE kinds, ONLY: default_path_length,&
dp,&
int_8
dp
USE mathconstants, ONLY: twopi
USE memory_utilities, ONLY: reallocate
USE orbital_pointers, ONLY: ncoset
Expand Down Expand Up @@ -126,7 +125,8 @@ MODULE qs_linres_current
USE task_list_methods, ONLY: distribute_tasks,&
rs_distribute_matrix,&
task_list_inner_loop
USE task_list_types, ONLY: reallocate_tasks,&
USE task_list_types, ONLY: atom_pair_type,&
reallocate_tasks,&
task_type
#include "./base/base_uses.f90"

Expand Down Expand Up @@ -595,7 +595,6 @@ SUBROUTINE calculate_jrho_resp(mat_d0, mat_jp, mat_jp_rii, mat_jp_riii, iB, idir
jatom_old, jkind, jkind_old, jpgf, jset, jset_old, maxco, maxpgf, maxset, maxsgf, &
maxsgf_set, na1, na2, natom, nb1, nb2, ncoa, ncob, nimages, nkind, nseta, nsetb, ntasks, &
nthread, sgfa, sgfb
INTEGER(kind=int_8), DIMENSION(:), POINTER :: atom_pair_recv, atom_pair_send
INTEGER, DIMENSION(:), POINTER :: la_max, la_min, lb_max, lb_min, npgfa, &
npgfb, nsgfa, nsgfb
INTEGER, DIMENSION(:, :), POINTER :: first_sgfa, first_sgfb
Expand All @@ -612,6 +611,7 @@ SUBROUTINE calculate_jrho_resp(mat_d0, mat_jp, mat_jp_rii, mat_jp_riii, iB, idir
jpab_a, jpab_b, jpab_c, jpab_d, jpblock_a, jpblock_b, jpblock_c, jpblock_d, rpgfa, rpgfb, &
sphi_a, sphi_b, work, zeta, zetb
REAL(KIND=dp), DIMENSION(:, :, :), POINTER :: jpabt_a, jpabt_b, jpabt_c, jpabt_d, workt
TYPE(atom_pair_type), DIMENSION(:), POINTER :: atom_pair_recv, atom_pair_send
TYPE(cell_type), POINTER :: cell
TYPE(cp_para_env_type), POINTER :: para_env
TYPE(cube_info_type), DIMENSION(:), POINTER :: cube_info
Expand Down Expand Up @@ -871,8 +871,8 @@ SUBROUTINE calculate_jrho_resp(mat_d0, mat_jp, mat_jp_rii, mat_jp_riii, iB, idir
ENDIF

! sorts / redistributes the task list
CALL distribute_tasks(rs_descs, ntasks, natom, nimages, &
tasks, atom_pair_send, atom_pair_recv, &
CALL distribute_tasks(rs_descs=rs_descs, ntasks=ntasks, natoms=natom, tasks=tasks, &
atom_pair_send=atom_pair_send, atom_pair_recv=atom_pair_recv, &
symmetric=.FALSE., reorder_rs_grid_ranks=.TRUE., &
skip_load_balance_distributed=.FALSE.)

Expand Down Expand Up @@ -941,14 +941,18 @@ SUBROUTINE calculate_jrho_resp(mat_d0, mat_jp, mat_jp_rii, mat_jp_riii, iB, idir
!-------------------------------------------------------------

IF (distributed_rs_grids) THEN
CALL rs_distribute_matrix(rs_descs, deltajp_a, atom_pair_send, atom_pair_recv, &
natom, nimages, scatter=.TRUE.)
CALL rs_distribute_matrix(rs_descs, deltajp_b, atom_pair_send, atom_pair_recv, &
natom, nimages, scatter=.TRUE.)
CALL rs_distribute_matrix(rs_descs, deltajp_c, atom_pair_send, atom_pair_recv, &
natom, nimages, scatter=.TRUE.)
IF (do_igaim) CALL rs_distribute_matrix(rs_descs, deltajp_d, atom_pair_send, atom_pair_recv, &
natom, nimages, scatter=.TRUE.)
CALL rs_distribute_matrix(rs_descs=rs_descs, pmats=deltajp_a, &
atom_pair_send=atom_pair_send, atom_pair_recv=atom_pair_recv, &
nimages=nimages, scatter=.TRUE.)
CALL rs_distribute_matrix(rs_descs=rs_descs, pmats=deltajp_b, &
atom_pair_send=atom_pair_send, atom_pair_recv=atom_pair_recv, &
nimages=nimages, scatter=.TRUE.)
CALL rs_distribute_matrix(rs_descs=rs_descs, pmats=deltajp_c, &
atom_pair_send=atom_pair_send, atom_pair_recv=atom_pair_recv, &
nimages=nimages, scatter=.TRUE.)
IF (do_igaim) CALL rs_distribute_matrix(rs_descs=rs_descs, pmats=deltajp_d, &
atom_pair_send=atom_pair_send, atom_pair_recv=atom_pair_recv, &
nimages=nimages, scatter=.TRUE.)
ENDIF

ithread = 0
Expand Down

0 comments on commit a9c3103

Please sign in to comment.