Test-local routine.
17{
18 PetscMPIInt rank = 0, size = 1;
19 PetscInt local_particles = 0;
20 PetscInt remainder = 0;
21 PetscInt global_particles = 0;
22 PetscInt remainder_min = 0, remainder_max = 0;
23 const PetscInt total_particles = 137;
24
25 PetscFunctionBeginUser;
26 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
27 PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
28 PetscCall(
PicurvAssertBool((PetscBool)(size >= 2),
"unit-mpi requires at least two MPI ranks"));
29
31
33 total_particles / size + (((PetscInt)rank < remainder) ? 1 : 0),
34 local_particles,
35 "local particle share should match quotient+remainder policy"));
36
37 PetscCallMPI(MPI_Allreduce(&local_particles, &global_particles, 1, MPIU_INT, MPI_SUM, PETSC_COMM_WORLD));
38 PetscCall(
PicurvAssertIntEqual(total_particles, global_particles,
"distributed particle count must conserve total particles"));
39
40 PetscCallMPI(MPI_Allreduce(&remainder, &remainder_min, 1, MPIU_INT, MPI_MIN, PETSC_COMM_WORLD));
41 PetscCallMPI(MPI_Allreduce(&remainder, &remainder_max, 1, MPIU_INT, MPI_MAX, PETSC_COMM_WORLD));
42 PetscCall(
PicurvAssertIntEqual(remainder_min, remainder_max,
"all ranks should report the same remainder"));
43 PetscFunctionReturn(0);
44}
PetscErrorCode DistributeParticles(PetscInt numParticles, PetscMPIInt rank, PetscMPIInt size, PetscInt *particlesPerProcess, PetscInt *remainder)
Distributes particles evenly across MPI processes, handling any remainders.
PetscErrorCode PicurvAssertIntEqual(PetscInt expected, PetscInt actual, const char *context)
Shared test-support routine.
PetscErrorCode PicurvAssertBool(PetscBool value, const char *context)
Shared test-support routine.