Skip to content

Commit

Permalink
#42: Rename global method to zoltan_get_global_comm()
Browse files Browse the repository at this point in the history
  • Loading branch information
thearusable authored and JacobDomagala committed Aug 25, 2023
1 parent 4b4af31 commit 1ba29cd
Show file tree
Hide file tree
Showing 58 changed files with 330 additions and 330 deletions.
2 changes: 1 addition & 1 deletion packages/zoltan/README.developer
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ printf("GID: " ZOLTAN_ID_SPEC ", LID %d\n", my_gid, my_lid);

To send a ZOLTAN_ID_TYPE in an MPI message, use ZOLTAN_ID_MPI_TYPE:

MPI_Bcast(&gid, 1, ZOLTAN_ID_MPI_TYPE, 0, MPI_Comm_Default());
MPI_Bcast(&gid, 1, ZOLTAN_ID_MPI_TYPE, 0, zoltan_get_global_comm());

To silence compiler warnings, you can properly specify a constant of type ZOLTAN_ID_TYPE using ZOLTAN_ID_CONSTANT:

Expand Down
20 changes: 10 additions & 10 deletions packages/zoltan/src/Utilities/Communication/comm_default.c
Original file line number Diff line number Diff line change
Expand Up @@ -52,21 +52,21 @@
extern "C" {
#endif

static pthread_mutex_t global_comm_lock;
static MPI_Comm Global_Zoltan_Comm = MPI_COMM_WORLD;
static pthread_mutex_t zoltan_global_mpi_lock;
static MPI_Comm Zoltan_Global_MPI_Comm = MPI_COMM_WORLD;

/* Function to set the default communicator */
inline void initialize_global_comm(MPI_Comm comm) {
pthread_mutex_lock(&global_comm_lock);
Global_Zoltan_Comm = comm;
pthread_mutex_unlock(&global_comm_lock);
inline void zoltan_initialize_global_comm(MPI_Comm comm) {
pthread_mutex_lock(&zoltan_global_mpi_lock);
Zoltan_Global_MPI_Comm = comm;
pthread_mutex_unlock(&zoltan_global_mpi_lock);
}

/* Function to get the default communicator */
inline MPI_Comm MPI_Comm_Default() {
pthread_mutex_lock(&global_comm_lock);
MPI_Comm comm = Global_Zoltan_Comm;
pthread_mutex_unlock(&global_comm_lock);
inline MPI_Comm zoltan_get_global_comm() {
pthread_mutex_lock(&zoltan_global_mpi_lock);
MPI_Comm comm = Zoltan_Global_MPI_Comm;
pthread_mutex_unlock(&zoltan_global_mpi_lock);
return comm;
}

Expand Down
2 changes: 1 addition & 1 deletion packages/zoltan/src/Utilities/Communication/comm_do.c
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ char *recv_data) /* array of data I'll own after comm */

/* Check input parameters */
if (!plan) {
MPI_Comm_rank(MPI_Comm_Default(), &my_proc);
MPI_Comm_rank(zoltan_get_global_comm(), &my_proc);
ZOLTAN_COMM_ERROR("Communication plan = NULL", yo, my_proc);
return ZOLTAN_FATAL;
}
Expand Down
2 changes: 1 addition & 1 deletion packages/zoltan/src/Utilities/Communication/comm_info.c
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ int i, j, k, my_proc;

/* Check input parameters */
if (!plan) {
MPI_Comm_rank(MPI_Comm_Default(), &my_proc);
MPI_Comm_rank(zoltan_get_global_comm(), &my_proc);
ZOLTAN_COMM_ERROR("Communication plan = NULL", yo, my_proc);
return ZOLTAN_FATAL;
}
Expand Down
2 changes: 1 addition & 1 deletion packages/zoltan/src/Utilities/Memory/mem.c
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ static int nfree = 0; /* number of calls to free */
#define GET_RANK(a) *(a)=0
#else
#include <zoltan_comm.h>
#define GET_RANK(a) MPI_Comm_rank(MPI_Comm_Default(), (a))
#define GET_RANK(a) MPI_Comm_rank(zoltan_get_global_comm(), (a))
#endif

#define MAX_STRING_LEN 50
Expand Down
2 changes: 1 addition & 1 deletion packages/zoltan/src/Utilities/Timer/zoltan_timer.c
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ extern "C" {
#define FATALERROR(yo, str) \
{ \
int ppproc; \
MPI_Comm_rank(MPI_Comm_Default(), &ppproc); \
MPI_Comm_rank(zoltan_get_global_comm(), &ppproc); \
ZOLTAN_PRINT_ERROR(ppproc, yo, str); \
return ZOLTAN_FATAL; \
}
Expand Down
2 changes: 1 addition & 1 deletion packages/zoltan/src/ch/ch_init_dist.c
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ int max_assignment, have_assignments;
/* Broadcast initial assignments if they exist.
* Assignments can be used for partitions and/or processors.
*/
MPI_Comm_rank(MPI_Comm_Default(), &proc);
MPI_Comm_rank(zoltan_get_global_comm(), &proc);

/* First, tell other processors whether the assignments array is NULL. */
if (proc == host_proc)
Expand Down
10 changes: 5 additions & 5 deletions packages/zoltan/src/driver/dr_chaco_io.c
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ int read_chaco_file(int Proc,
file_error = (fp == NULL);
}

MPI_Bcast(&file_error, 1, MPI_INT, 0, MPI_Comm_Default());
MPI_Bcast(&file_error, 1, MPI_INT, 0, zoltan_get_global_comm());

if (file_error) {
sprintf(cmesg, "fatal: Could not open Chaco graph file %s",
Expand Down Expand Up @@ -236,14 +236,14 @@ for (i=0; i<nvtxs; i++) { /* move 2/3 of points much closer to "a" */

/* Distribute graph */

if (!chaco_dist_graph(MPI_Comm_Default(), pio_info, 0, &gnvtxs, &nvtxs,
if (!chaco_dist_graph(zoltan_get_global_comm(), pio_info, 0, &gnvtxs, &nvtxs,
&start, &adj, &vwgt_dim, &vwgts, &ewgt_dim, &ewgts,
&ndim, &x, &y, &z, &assignments)) {
Gen_Error(0, "fatal: Error returned from chaco_dist_graph");
return 0;
}

MPI_Bcast(&base, 1, MPI_INT, 0, MPI_Comm_Default());
MPI_Bcast(&base, 1, MPI_INT, 0, zoltan_get_global_comm());

if (!chaco_setup_mesh_struct(Proc, Num_Proc, prob, mesh, pio_info, gnvtxs, nvtxs,
start, adj, vwgt_dim, vwgts, ewgt_dim, ewgts,
Expand Down Expand Up @@ -341,7 +341,7 @@ int i;
* Each element has one set of coordinates (i.e., node) if a coords file
* was provided; zero otherwise.
*/
MPI_Bcast( &no_geom, 1, MPI_INT, 0, MPI_Comm_Default());
MPI_Bcast( &no_geom, 1, MPI_INT, 0, zoltan_get_global_comm());
if (no_geom)
mesh->eb_nnodes[0] = 0;
else
Expand Down Expand Up @@ -551,7 +551,7 @@ void chaco_init_local_ids(
int i;
int Proc;

MPI_Comm_rank(MPI_Comm_Default(), &Proc);
MPI_Comm_rank(zoltan_get_global_comm(), &Proc);

*num_vtx = ch_dist_max_num_vtx(assignments);
*vtx_list = (int *) malloc(((int)*num_vtx) * sizeof(int));
Expand Down
8 changes: 4 additions & 4 deletions packages/zoltan/src/driver/dr_chaco_io.c.shockstem
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ int read_chaco_file(int Proc,
file_error = (fp == NULL);
}

MPI_Bcast(&file_error, 1, MPI_INT, 0, MPI_Comm_Default());
MPI_Bcast(&file_error, 1, MPI_INT, 0, zoltan_get_global_comm());

if (file_error) {
sprintf(cmesg, "fatal: Could not open Chaco graph file %s",
Expand Down Expand Up @@ -218,7 +218,7 @@ printf("%d KDDKDD NEARESTSFILE %s\n", Proc, chaco_fname); fflush(stdout);
}

/* Distribute graph */
if (!chaco_dist_graph(MPI_Comm_Default(), pio_info, 0, &gnvtxs, &nvtxs,
if (!chaco_dist_graph(zoltan_get_global_comm(), pio_info, 0, &gnvtxs, &nvtxs,
&start, &adj, &vwgt_dim, &vwgts, &ewgt_dim, &ewgts,
&ndim, &x, &y, &z, &assignments) != 0) {
Gen_Error(0, "fatal: Error returned from chaco_dist_graph");
Expand Down Expand Up @@ -315,7 +315,7 @@ int i;
* Each element has one set of coordinates (i.e., node) if a coords file
* was provided; zero otherwise.
*/
MPI_Bcast( &no_geom, 1, MPI_INT, 0, MPI_Comm_Default());
MPI_Bcast( &no_geom, 1, MPI_INT, 0, zoltan_get_global_comm());
if (no_geom)
mesh->eb_nnodes[0] = 0;
else
Expand Down Expand Up @@ -516,7 +516,7 @@ void chaco_init_local_ids(
int i;
int Proc;

MPI_Comm_rank(MPI_Comm_Default(), &Proc);
MPI_Comm_rank(zoltan_get_global_comm(), &Proc);

*num_vtx = ch_dist_max_num_vtx(assignments);
*vtx_list = (int *) malloc(*num_vtx * sizeof(int));
Expand Down
4 changes: 2 additions & 2 deletions packages/zoltan/src/driver/dr_dd.c
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,8 @@ int build_elem_dd(MESH_INFO_PTR mesh)
int maxelems;

MPI_Allreduce(&(mesh->num_elems), &maxelems, 1, MPI_INT, MPI_MAX,
MPI_Comm_Default());
if (Zoltan_DD_Create(&(mesh->dd), MPI_Comm_Default(), 1, 0, 0, maxelems, 0) != 0){
zoltan_get_global_comm());
if (Zoltan_DD_Create(&(mesh->dd), zoltan_get_global_comm(), 1, 0, 0, maxelems, 0) != 0){
Gen_Error(0, "fatal: NULL returned from Zoltan_DD_Create()\n");
return 0;
}
Expand Down
2 changes: 1 addition & 1 deletion packages/zoltan/src/driver/dr_ddCPP.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ int build_elem_dd(MESH_INFO_PTR mesh)
{
destroy_elem_dd();

dd = new Zoltan_DD(MPI_Comm_Default(), 1, 0, 0, 0, 0);
dd = new Zoltan_DD(zoltan_get_global_comm(), 1, 0, 0, 0, 0);

return update_elem_dd(mesh);
}
Expand Down
2 changes: 1 addition & 1 deletion packages/zoltan/src/driver/dr_err.c
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,7 @@ void error_report(int Proc)
}
}

MPI_Abort(MPI_Comm_Default(), -1);
MPI_Abort(zoltan_get_global_comm(), -1);
}

#ifdef __cplusplus
Expand Down
20 changes: 10 additions & 10 deletions packages/zoltan/src/driver/dr_eval.c
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ ZOLTAN_ID_TYPE gsumcuts, gmaxcuts, gmincuts, elemcount;
ZOLTAN_ID_TYPE gsumelems, gmaxelems, gminelems;
double gsumload, gmaxload, gminload;

MPI_Comm_rank(MPI_Comm_Default(), &proc);
MPI_Comm_rank(zoltan_get_global_comm(), &proc);

for (i = 0; i < mesh->necmap; i++) {
cuts += mesh->ecmap_cnt[i];
Expand All @@ -89,19 +89,19 @@ double gsumload, gmaxload, gminload;
load += mesh->elements[i].cpu_wgt[0];
}

MPI_Allreduce(&cuts, &gsumcuts, 1, ZOLTAN_ID_MPI_TYPE, MPI_SUM, MPI_Comm_Default());
MPI_Allreduce(&cuts, &gmaxcuts, 1, ZOLTAN_ID_MPI_TYPE, MPI_MAX, MPI_Comm_Default());
MPI_Allreduce(&cuts, &gmincuts, 1, ZOLTAN_ID_MPI_TYPE, MPI_MIN, MPI_Comm_Default());
MPI_Allreduce(&cuts, &gsumcuts, 1, ZOLTAN_ID_MPI_TYPE, MPI_SUM, zoltan_get_global_comm());
MPI_Allreduce(&cuts, &gmaxcuts, 1, ZOLTAN_ID_MPI_TYPE, MPI_MAX, zoltan_get_global_comm());
MPI_Allreduce(&cuts, &gmincuts, 1, ZOLTAN_ID_MPI_TYPE, MPI_MIN, zoltan_get_global_comm());

elemcount = mesh->num_elems - mesh->blank_count;

MPI_Allreduce(&elemcount, &gsumelems, 1, ZOLTAN_ID_MPI_TYPE, MPI_SUM, MPI_Comm_Default());
MPI_Allreduce(&elemcount, &gmaxelems, 1, ZOLTAN_ID_MPI_TYPE, MPI_MAX, MPI_Comm_Default());
MPI_Allreduce(&elemcount, &gminelems, 1, ZOLTAN_ID_MPI_TYPE, MPI_MIN, MPI_Comm_Default());
MPI_Allreduce(&elemcount, &gsumelems, 1, ZOLTAN_ID_MPI_TYPE, MPI_SUM, zoltan_get_global_comm());
MPI_Allreduce(&elemcount, &gmaxelems, 1, ZOLTAN_ID_MPI_TYPE, MPI_MAX, zoltan_get_global_comm());
MPI_Allreduce(&elemcount, &gminelems, 1, ZOLTAN_ID_MPI_TYPE, MPI_MIN, zoltan_get_global_comm());

MPI_Allreduce(&load, &gsumload, 1, MPI_DOUBLE, MPI_SUM, MPI_Comm_Default());
MPI_Allreduce(&load, &gmaxload, 1, MPI_DOUBLE, MPI_MAX, MPI_Comm_Default());
MPI_Allreduce(&load, &gminload, 1, MPI_DOUBLE, MPI_MIN, MPI_Comm_Default());
MPI_Allreduce(&load, &gsumload, 1, MPI_DOUBLE, MPI_SUM, zoltan_get_global_comm());
MPI_Allreduce(&load, &gmaxload, 1, MPI_DOUBLE, MPI_MAX, zoltan_get_global_comm());
MPI_Allreduce(&load, &gminload, 1, MPI_DOUBLE, MPI_MIN, zoltan_get_global_comm());

if (proc == 0) {
printf("DRIVER EVAL: load: max %f min %f sum %f\n",
Expand Down
8 changes: 4 additions & 4 deletions packages/zoltan/src/driver/dr_exoII_io.c
Original file line number Diff line number Diff line change
Expand Up @@ -230,9 +230,9 @@ int read_exoII_file(int Proc,

/* Perform reduction on necessary fields of element blocks. kdd 2/2001 */
MPI_Allreduce(nnodes, mesh->eb_nnodes, mesh->num_el_blks, MPI_INT, MPI_MAX,
MPI_Comm_Default());
zoltan_get_global_comm());
MPI_Allreduce(etypes, mesh->eb_etypes, mesh->num_el_blks, MPI_INT, MPI_MIN,
MPI_Comm_Default());
zoltan_get_global_comm());
for (i = 0; i < mesh->num_el_blks; i++) {
strcpy(mesh->eb_names[i], get_elem_name(mesh->eb_etypes[i]));
}
Expand Down Expand Up @@ -893,7 +893,7 @@ static int read_comm_map_info(int pexoid, int Proc, PROB_INFO_PTR prob,
* for the adjacent elements in this communication map.
*/

ierr = Zoltan_Comm_Create(&comm_obj, max_len, proc_ids, MPI_Comm_Default(),
ierr = Zoltan_Comm_Create(&comm_obj, max_len, proc_ids, zoltan_get_global_comm(),
msg, &nrecv);
if (ierr != ZOLTAN_OK) {
Gen_Error(0, "fatal: Error returned from Zoltan_Comm_Create");
Expand Down Expand Up @@ -1016,7 +1016,7 @@ char cmesg[256];
char *str = "Proc";

/* generate the parallel filename for this processor */
MPI_Comm_size(MPI_Comm_Default(), &Num_Proc);
MPI_Comm_size(zoltan_get_global_comm(), &Num_Proc);
gen_par_filename(pio_info->pexo_fname, tmp_nem_fname, pio_info, Proc,
Num_Proc);
/*
Expand Down
8 changes: 4 additions & 4 deletions packages/zoltan/src/driver/dr_exoII_ioCPP.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -226,9 +226,9 @@ int read_exoII_file(int Proc,

/* Perform reduction on necessary fields of element blocks. kdd 2/2001 */
MPI_Allreduce(nnodes, mesh->eb_nnodes, mesh->num_el_blks,
MPI_INT, MPI_MAX, MPI_Comm_Default()) ;
MPI_INT, MPI_MAX, zoltan_get_global_comm()) ;
MPI_Allreduce(etypes, mesh->eb_etypes, mesh->num_el_blks,
MPI_INT, MPI_MIN, MPI_Comm_Default());
MPI_INT, MPI_MIN, zoltan_get_global_comm());
for (i = 0; i < mesh->num_el_blks; i++) {
strcpy(mesh->eb_names[i], get_elem_name(mesh->eb_etypes[i]));
}
Expand Down Expand Up @@ -888,7 +888,7 @@ static int read_comm_map_info(int pexoid, int Proc, PROB_INFO_PTR prob,
* for the adjacent elements in this communication map.
*/

comm_obj = new Zoltan_Comm(max_len, proc_ids, MPI_Comm_Default(), msg, &nrecv);
comm_obj = new Zoltan_Comm(max_len, proc_ids, zoltan_get_global_comm(), msg, &nrecv);

if (nrecv != max_len) {
Gen_Error(0, "fatal: Error returned from Zoltan_Comm constructor");
Expand Down Expand Up @@ -990,7 +990,7 @@ char cmesg[256];

/* generate the parallel filename for this processor */
int Num_Proc = 0;
MPI_Comm_size(MPI_Comm_Default(), &Num_Proc);
MPI_Comm_size(zoltan_get_global_comm(), &Num_Proc);

gen_par_filename(pio_info->pexo_fname, tmp_nem_fname, pio_info, Proc,
Num_Proc);
Expand Down
10 changes: 5 additions & 5 deletions packages/zoltan/src/driver/dr_gnuplot.c
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ int output_gnu(const char *cmd_file,
/* Sort by part numbers. Assumes # parts >= # proc. */
if (nelems > 0)
Zoltan_quicksort_pointer_inc_int_int(index, parts, NULL, 0, nelems-1);
MPI_Allreduce(&max_part, &gmax_part, 1, MPI_INT, MPI_MAX, MPI_Comm_Default());
MPI_Allreduce(&max_part, &gmax_part, 1, MPI_INT, MPI_MAX, zoltan_get_global_comm());
gnum_part = gmax_part + 1;
}

Expand Down Expand Up @@ -267,10 +267,10 @@ int output_gnu(const char *cmd_file,
}
}

MPI_Reduce(&locMinX,&globMinX,1,MPI_FLOAT,MPI_MIN,0,MPI_Comm_Default());
MPI_Reduce(&locMinY,&globMinY,1,MPI_FLOAT,MPI_MIN,0,MPI_Comm_Default());
MPI_Reduce(&locMaxX,&globMaxX,1,MPI_FLOAT,MPI_MAX,0,MPI_Comm_Default());
MPI_Reduce(&locMaxY,&globMaxY,1,MPI_FLOAT,MPI_MAX,0,MPI_Comm_Default());
MPI_Reduce(&locMinX,&globMinX,1,MPI_FLOAT,MPI_MIN,0,zoltan_get_global_comm());
MPI_Reduce(&locMinY,&globMinY,1,MPI_FLOAT,MPI_MIN,0,zoltan_get_global_comm());
MPI_Reduce(&locMaxX,&globMaxX,1,MPI_FLOAT,MPI_MAX,0,zoltan_get_global_comm());
MPI_Reduce(&locMaxY,&globMaxY,1,MPI_FLOAT,MPI_MAX,0,zoltan_get_global_comm());

}
else if (pio_info->file_type == NEMESIS_FILE) { /* Nemesis input file */
Expand Down
Loading

0 comments on commit 1ba29cd

Please sign in to comment.