20 #ifndef OHMMS_COMMUNICATE_H 21 #define OHMMS_COMMUNICATE_H 28 #include "mpi3/environment.hpp" 29 namespace mpi3 = boost::mpi3;
107 #if defined(HAVE_MPI) 109 operator MPI_Comm()
const {
return myMPI; }
130 void setName(
const char* aname,
int alen) {
myName = std::string(aname, alen); }
141 #ifndef HOST_NAME_MAX 142 #ifdef _POSIX_HOST_NAME_MAX 143 #define HOST_NAME_MAX _POSIX_HOST_NAME_MAX 158 void split_comm(
int key, MPI_Comm&
comm)
161 MPI_Comm_split(
myMPI, key, myrank, &
comm);
170 void reduce(T* restrict, T* restrict,
int n);
176 void bcast(T* restrict,
int n);
178 void send(
int dest,
int tag, T&);
180 void gather(T& sb, T& rb,
int dest = 0);
181 template<
typename T,
typename IT>
182 void gatherv(T& sb, T& rb, IT& counts, IT& displ,
int dest = 0);
185 template<
typename T,
typename IT>
186 void allgatherv(T& sb, T& rb, IT& counts, IT& displ);
188 void scatter(T& sb, T& rb,
int dest = 0);
189 template<
typename T,
typename IT>
190 void scatterv(T& sb, T& rb, IT& counts, IT& displ,
int source = 0);
199 template<
typename T,
typename IT>
200 void gatherv(T* sb, T* rb,
int n, IT& counts, IT& displ,
int dest = 0);
201 template<
typename T,
typename TMPI,
typename IT>
202 void gatherv_in_place(T* buf, TMPI& datatype, IT& counts, IT& displ,
int dest = 0);
234 mutable mpi3::communicator
comm;
247 #endif // OHMMS_COMMUNICATE_H virtual ~Communicate()
destructor Call proper finalization of Communication library
void initialize(int argc, char **argv)
request isend(int dest, int tag, T &)
request irecv(int source, int tag, T &)
int rank() const
return the rank
void send(int dest, int tag, T &)
void gatherv(T &sb, T &rb, IT &counts, IT &displ, int dest=0)
void cleanupMessage(void *)
mpi_comm_type myMPI
Raw communicator.
Communicate * Controller
Global Communicator for a process.
int size() const
return the number of tasks
Wrapping information on parallelism.
int getGroupID() const
return the group id
const std::string & getName() const
std::string myName
Communicator name.
void allgather(T &sb, T &rb, int count)
void setName(const char *aname, int alen)
void gather(T &sb, T &rb, int dest=0)
bool isGroupLeader()
return true if the current MPI rank is the group lead
void gatherv_in_place(T *buf, TMPI &datatype, IT &counts, IT &displ, int dest=0)
int d_ngroups
Total number of groups in the parent communicator.
int d_groupid
Group ID of the current communicator in the parent communicator.
void setName(const std::string &aname)
Communicate NodeComm() const
provide a node/shared-memory communicator from current (parent) communicator
int getNumGroups() const
return the number of intra_comms which belong to the same group
void scatter(T &sb, T &rb, int dest=0)
std::unique_ptr< Communicate > GroupLeaderComm
Group Leader Communicator.
void reduce_in_place(T *restrict, int n)
mpi_comm_type getMPI() const
return the Communicator ID (typically MPI_WORLD_COMM)
Communicate * getGroupLeaderComm()
static const int MPI_REQUEST_NULL
void barrier_and_abort(const std::string &msg) const
static const int MPI_COMM_NULL
void allgatherv(T &sb, T &rb, IT &counts, IT &displ)
void scatterv(T &sb, T &rb, IT &counts, IT &displ, int source=0)