Introduction
The c++ (cpp) typenamecommmap example is extracted from the most popular open source projects, you can refer to the following example for usage.
Programming language: C++ (Cpp)
Class/type: typenameCommMap
Example#1File:
fei_CommMap.hppProject:
cakeisalie/oomphlib_003
void addItemsToCommMap(int proc, size_t numItems, const T* items,
typename CommMap<T>::Type& comm_map,
bool keep_sorted_and_unique = true)
{
typename CommMap<T>::Type::iterator iter = comm_map.find(proc);
if (iter == comm_map.end()) {
iter = comm_map.insert(std::make_pair(proc,std::vector<T>())).first;
}
std::vector<T>& comm_items = iter->second;
if (keep_sorted_and_unique) {
for(size_t i=0; i<numItems; ++i) {
fei::sortedListInsert(items[i], comm_items);
}
}
else {
for(size_t i=0; i<numItems; ++i) {
comm_items.push_back(items[i]);
}
}
}
Example#2File:
fei_CommUtils.hppProject:
00liujj/trilinos
int exchangeCommMapData(MPI_Comm comm,
const typename CommMap<T>::Type& sendCommMap,
typename CommMap<T>::Type& recvCommMap,
bool recvProcsKnownOnEntry = false,
bool recvLengthsKnownOnEntry = false)
{
if (!recvProcsKnownOnEntry) {
recvCommMap.clear();
}
#ifndef FEI_SER
int tag = 11120;
MPI_Datatype mpi_dtype = fei::mpiTraits<T>::mpi_type();
std::vector<int> sendProcs;
fei::copyKeysToVector(sendCommMap, sendProcs);
std::vector<int> recvProcs;
if (recvProcsKnownOnEntry) {
fei::copyKeysToVector(recvCommMap, recvProcs);
}
else {
mirrorProcs(comm, sendProcs, recvProcs);
for(size_t i=0; i<recvProcs.size(); ++i) {
addItemsToCommMap<T>(recvProcs[i], 0, NULL, recvCommMap);
}
}
if (!recvLengthsKnownOnEntry) {
std::vector<int> tmpIntData(sendProcs.size());
std::vector<int> recvLengths(recvProcs.size());
typename fei::CommMap<T>::Type::const_iterator
s_iter = sendCommMap.begin(), s_end = sendCommMap.end();
for(size_t i=0; s_iter != s_end; ++s_iter, ++i) {
tmpIntData[i] = s_iter->second.size();
}
if ( exchangeIntData(comm, sendProcs, tmpIntData, recvProcs, recvLengths) != 0) {
return(-1);
}
for(size_t i=0; i<recvProcs.size(); ++i) {
std::vector<T>& rdata = recvCommMap[recvProcs[i]];
rdata.resize(recvLengths[i]);
}
}
//launch Irecv's for recv-data:
std::vector<MPI_Request> mpiReqs;
mpiReqs.resize(recvProcs.size());
typename fei::CommMap<T>::Type::iterator
r_iter = recvCommMap.begin(), r_end = recvCommMap.end();
size_t req_offset = 0;
for(; r_iter != r_end; ++r_iter) {
int rproc = r_iter->first;
std::vector<T>& recv_vec = r_iter->second;
int len = recv_vec.size();
T* recv_buf = len > 0 ? &recv_vec[0] : NULL;
CHK_MPI( MPI_Irecv(recv_buf, len, mpi_dtype, rproc,
tag, comm, &mpiReqs[req_offset++]) );
}
//send the send-data:
typename fei::CommMap<T>::Type::const_iterator
s_iter = sendCommMap.begin(), s_end = sendCommMap.end();
for(; s_iter != s_end; ++s_iter) {
int sproc = s_iter->first;
const std::vector<T>& send_vec = s_iter->second;
int len = send_vec.size();
T* send_buf = len>0 ? const_cast<T*>(&send_vec[0]) : NULL;
CHK_MPI( MPI_Send(send_buf, len, mpi_dtype, sproc, tag, comm) );
}
//complete the Irecvs:
for(size_t i=0; i<mpiReqs.size(); ++i) {
int index;
MPI_Status status;
CHK_MPI( MPI_Waitany(mpiReqs.size(), &mpiReqs[0], &index, &status) );
}
#endif
return(0);
}