62 type(states_elec_t),
intent(in) :: st
63 type(namespace_t),
intent(in) :: namespace
64 type(mesh_t),
intent(in) :: mesh
65 integer,
intent(out) :: blocksize(2)
66 integer,
intent(out) :: total_np
78 if (.not. st%scalapack_compatible)
then
79 message(1) =
"Attempt to use ScaLAPACK when processes have not been distributed in compatible layout."
80 message(2) =
"You need to set ScaLAPACKCompatible = yes in the input file and re-run."
84 if (mesh%parallel_in_domains)
then
85 blocksize(1) = maxval(mesh%pv%np_local_vec) + (st%d%dim - 1) * &
86 maxval(mesh%pv%np_local_vec + mesh%pv%np_bndry + mesh%pv%np_ghost)
88 blocksize(1) = mesh%np + (st%d%dim - 1)*mesh%np_part
91 if (st%parallel_in_states)
then
92 blocksize(2) = maxval(st%dist%num)
97 total_np = blocksize(1)*st%dom_st_proc_grid%nprow
100 assert(st%d%dim*mesh%np_part >= blocksize(1))
120 type(states_elec_t),
intent(inout) :: this
126 assert(
allocated(this%group%psib))
128 if (this%mpi_grp%size == 1)
then
133 assert(.not.
allocated(this%group%rma_win))
135 safe_allocate(this%group%rma_win(1:this%group%nblocks, 1:this%nik))
137 do iqn = this%d%kpt%start, this%d%kpt%end
138 do ib = 1, this%group%nblocks
139 if (this%group%block_is_local(ib, iqn))
then
140 call this%group%psib(ib, iqn)%remote_access_start(this%mpi_grp, this%group%rma_win(ib, iqn))
144 call mpi_win_create(0, int(0, mpi_address_kind), 1, &
145 mpi_info_null, this%mpi_grp%comm, this%group%rma_win(ib, iqn),
mpi_err)
165 if (.not.
allocated(this%group%rma_win))
return
169 assert(
allocated(this%group%psib))
171 do iqn = this%d%kpt%start, this%d%kpt%end
172 do ib = 1, this%group%nblocks
173 if (this%group%block_is_local(ib, iqn))
then
174 call this%group%psib(ib, iqn)%remote_access_stop(this%group%rma_win(ib, iqn))
177 call mpi_win_free(this%group%rma_win(ib, iqn),
mpi_err)
183 safe_deallocate_a(this%group%rma_win)
192 integer,
intent(in) :: np
193 integer,
intent(in) :: ib
194 integer,
intent(in) :: ik
195 logical,
optional,
intent(in) :: packed
200 call dwfs_elec_init(psib, st%d%dim, st%group%block_range(ib, 1), st%group%block_range(ib, 2), &
201 np, ik, packed=packed)
203 call zwfs_elec_init(psib, st%d%dim, st%group%block_range(ib, 1), st%group%block_range(ib, 2), &
204 np, ik, packed=packed)
219 type(
mesh_t),
intent(in) :: mesh
220 integer,
intent(in) :: ib
221 integer,
intent(in) :: iqn
222 class(
wfs_elec_t),
pointer,
intent(out) :: psib
224 integer :: total_size
232 if (this%group%block_is_local(ib, iqn))
then
233 psib => this%group%psib(ib, iqn)
241 assert(
allocated(this%group%rma_win))
243 call psib%do_pack(copy = .false.)
244 assert(product(psib%pack_size) < huge(0_int32))
245 total_size = product(int(psib%pack_size, int32))
250 call mpi_win_lock(mpi_lock_shared, this%group%block_node(ib), 0, this%group%rma_win(ib, iqn),
mpi_err)
253 call mpi_get(psib%dff_pack(1, 1), total_size, mpi_double_precision, &
254 this%group%block_node(ib), int(0, mpi_address_kind), total_size, mpi_double_precision, &
255 this%group%rma_win(ib, iqn),
mpi_err)
257 call mpi_get(psib%zff_pack(1, 1), total_size, mpi_double_complex, &
258 this%group%block_node(ib), int(0, mpi_address_kind), total_size, mpi_double_complex, &
259 this%group%rma_win(ib, iqn),
mpi_err)
262 call mpi_win_unlock(this%group%block_node(ib), this%group%rma_win(ib, iqn),
mpi_err)
277 integer,
intent(in) :: ib
282 if (this%group%block_is_local(ib, psib%ik))
then
286 safe_deallocate_p(psib)
296#include "states_elec_parallel_inc.F90"
299#include "complex.F90"
300#include "states_elec_parallel_inc.F90"
This module implements batches of mesh functions.
This module defines the meshes, which are used in Octopus.
character(len=256), dimension(max_lines), public message
to be output by fatal, warning
subroutine, public messages_fatal(no_lines, only_root_writes, namespace)
integer, public mpi_err
used to store return values of mpi calls
subroutine, public profiling_out(label)
Increment out counter and sum up difference between entry and exit time.
subroutine, public profiling_in(label, exclude)
Increment in counter and save entry time.
pure logical function, public states_are_real(st)
This module provides routines for communicating states when using states parallelization.
subroutine, public states_elec_parallel_remote_access_stop(this)
stop remote memory access for states on other processors
subroutine dstates_elec_parallel_gather_3(st, dims, psi)
gather distributed states into a local array
subroutine dstates_elec_parallel_gather_1(st, aa)
gather a one-dimensional array, distributed over states
subroutine zstates_elec_parallel_gather_1(st, aa)
gather a one-dimensional array, distributed over states
subroutine, public states_elec_parallel_get_block(this, mesh, ib, iqn, psib)
retrieve wave functions from a different node
subroutine, public states_elec_parallel_allocate_batch(st, psib, np, ib, ik, packed)
Initializes a wfs_elec_t for np points, given block index ib, ad the k-point index ik.
subroutine, public states_elec_parallel_remote_access_start(this)
start remote memory access for states on other processors
subroutine zstates_elec_parallel_gather_3(st, dims, psi)
gather distributed states into a local array
subroutine, public states_elec_parallel_blacs_blocksize(st, namespace, mesh, blocksize, total_np)
determine the block size for a BLACS distribution
subroutine, public states_elec_parallel_release_block(this, ib, psib)
subroutine, public zwfs_elec_init(this, dim, st_start, st_end, np, ik, special, packed)
initialize an empty wfs_elec_t object
subroutine, public dwfs_elec_init(this, dim, st_start, st_end, np, ik, special, packed)
initialize an empty wfs_elec_t object
Describes mesh distribution to nodes.
The states_elec_t class contains all electronic wave functions.
batches of electronic states