19 PetscMPIInt rank = 0, size = 1;
20 PetscInt local_particles = 0;
21 PetscInt remainder = 0;
22 PetscInt global_particles = 0;
23 PetscInt remainder_min = 0, remainder_max = 0;
24 const PetscInt total_particles = 137;
26 PetscFunctionBeginUser;
27 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
28 PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
29 PetscCall(
PicurvAssertBool((PetscBool)(size >= 2),
"unit-mpi requires at least two MPI ranks"));
34 total_particles / size + (((PetscInt)rank < remainder) ? 1 : 0),
36 "local particle share should match quotient+remainder policy"));
38 PetscCallMPI(MPI_Allreduce(&local_particles, &global_particles, 1, MPIU_INT, MPI_SUM, PETSC_COMM_WORLD));
39 PetscCall(
PicurvAssertIntEqual(total_particles, global_particles,
"distributed particle count must conserve total particles"));
41 PetscCallMPI(MPI_Allreduce(&remainder, &remainder_min, 1, MPIU_INT, MPI_MIN, PETSC_COMM_WORLD));
42 PetscCallMPI(MPI_Allreduce(&remainder, &remainder_max, 1, MPIU_INT, MPI_MAX, PETSC_COMM_WORLD));
43 PetscCall(
PicurvAssertIntEqual(remainder_min, remainder_max,
"all ranks should report the same remainder"));
44 PetscFunctionReturn(0);
56 PetscMPIInt rank = 0, size = 1;
57 PetscReal global_min_x = 0.0;
58 PetscReal global_max_x = 0.0;
59 PetscReal expected_global_max_x = 0.0;
61 PetscFunctionBeginUser;
62 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
63 PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
64 PetscCall(
PicurvAssertBool((PetscBool)(size >= 2),
"unit-mpi requires at least two MPI ranks"));
67 expected_global_max_x = ((PetscReal)(user->
IM - 1) / (PetscReal)user->
IM) + 1.0e-6;
71 PetscCall(
PicurvAssertBool((PetscBool)(boxes != NULL),
"all ranks should hold the gathered bounding-box table"));
73 for (PetscMPIInt r = 0; r < size; ++r) {
82 PetscCallMPI(MPI_Allreduce(&local_bbox.
min_coords.
x, &global_min_x, 1, MPIU_REAL, MPI_MIN, PETSC_COMM_WORLD));
83 PetscCallMPI(MPI_Allreduce(&local_bbox.
max_coords.
x, &global_max_x, 1, MPIU_REAL, MPI_MAX, PETSC_COMM_WORLD));
84 PetscCall(
PicurvAssertBool((PetscBool)(global_min_x <= 0.0),
"global min x should include domain start"));
85 PetscCall(
PicurvAssertRealNear(expected_global_max_x, global_max_x, 1.0e-10,
"global max x should match the normalized physical-node domain end"));
89 PetscFunctionReturn(0);
100 PetscMPIInt rank = 0, size = 1;
101 PetscInt *cell_ids = NULL;
102 PetscReal *positions = NULL;
105 PetscFunctionBeginUser;
106 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
107 PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
108 PetscCall(
PicurvAssertIntEqual(2, size,
"restart migration unit test expects exactly two MPI ranks"));
110 PetscCall(PetscMemzero(&my_cell_info,
sizeof(my_cell_info)));
117 PetscCallMPI(MPI_Allgather(&my_cell_info,
sizeof(
RankCellInfo), MPI_BYTE,
121 PetscCall(DMSwarmGetField(user->
swarm,
"DMSwarm_CellID", NULL, NULL, (
void **)&cell_ids));
122 PetscCall(DMSwarmGetField(user->
swarm,
"position", NULL, NULL, (
void **)&positions));
127 positions[0] = 0.875;
138 PetscCall(DMSwarmRestoreField(user->
swarm,
"position", NULL, NULL, (
void **)&positions));
139 PetscCall(DMSwarmRestoreField(user->
swarm,
"DMSwarm_CellID", NULL, NULL, (
void **)&cell_ids));
142 PetscCall(DMSwarmGetLocalSize(user->
swarm, &nlocal));
144 "restart migration should move the foreign particle onto the owning rank"));
147 PetscFunctionReturn(0);
162 ierr = PetscInitialize(&argc, &argv, NULL,
"PICurv MPI-focused runtime tests");
167 ierr =
PicurvRunTests(
"unit-mpi", cases,
sizeof(cases) /
sizeof(cases[0]));
173 ierr = PetscFinalize();
Header file for Particle Motion and migration related functions.
PetscErrorCode MigrateRestartParticlesUsingCellID(UserCtx *user)
Fast-path migration for restart particles using preloaded Cell IDs.
Header file for Particle Swarm management functions.
PetscErrorCode DistributeParticles(PetscInt numParticles, PetscMPIInt rank, PetscMPIInt size, PetscInt *particlesPerProcess, PetscInt *remainder)
Distributes particles evenly across MPI processes, handling any remainders.
Public interface for grid, solver, and metric setup routines.
PetscErrorCode BroadcastAllBoundingBoxes(UserCtx *user, BoundingBox **bboxlist)
Broadcasts the bounding box information collected on rank 0 to all other ranks.
PetscErrorCode ComputeLocalBoundingBox(UserCtx *user, BoundingBox *localBBox)
Computes the local bounding box of the grid on the current process.
PetscErrorCode GatherAllBoundingBoxes(UserCtx *user, BoundingBox **allBBoxes)
Gathers local bounding boxes from all MPI processes to rank 0.
PetscErrorCode GetOwnedCellRange(const DMDALocalInfo *info_nodes, PetscInt dim, PetscInt *xs_cell_global_out, PetscInt *xm_cell_local_out)
Determines the global starting index and number of CELLS owned by the current processor in a specifie...
int main(int argc, char **argv)
Runs the unit-mpi PETSc test binary.
static PetscErrorCode TestRestartCellIdMigrationMovesParticleToOwner(void)
Tests restart fast-path migration using preloaded cell ownership metadata.
static PetscErrorCode TestDistributeParticlesCollectiveConsistency(void)
Tests collective particle distribution consistency across MPI ranks.
static PetscErrorCode TestBoundingBoxCollectivesMultiRank(void)
Tests multi-rank bounding-box gather and broadcast helpers.
PetscErrorCode PicurvCreateMinimalContexts(SimCtx **simCtx_out, UserCtx **user_out, PetscInt mx, PetscInt my, PetscInt mz)
Builds minimal SimCtx and UserCtx fixtures for C unit tests.
PetscErrorCode PicurvAssertRealNear(PetscReal expected, PetscReal actual, PetscReal tol, const char *context)
Asserts that two real values agree within tolerance.
PetscErrorCode PicurvDestroyMinimalContexts(SimCtx **simCtx_ptr, UserCtx **user_ptr)
Destroys minimal SimCtx/UserCtx fixtures and all owned PETSc objects.
PetscErrorCode PicurvCreateSwarmPair(UserCtx *user, PetscInt nlocal, const char *post_field_name)
Creates matched solver and post-processing swarms for tests.
PetscErrorCode PicurvRunTests(const char *suite_name, const PicurvTestCase *cases, size_t case_count)
Runs a named C test suite and prints pass/fail progress markers.
PetscErrorCode PicurvAssertIntEqual(PetscInt expected, PetscInt actual, const char *context)
Asserts that two integer values are equal.
PetscErrorCode PicurvAssertBool(PetscBool value, const char *context)
Asserts that one boolean condition is true.
Shared declarations for the PICurv C test fixture and assertion layer.
Named test case descriptor consumed by PicurvRunTests.
Cmpnts max_coords
Maximum x, y, z coordinates of the bounding box.
Cmpnts min_coords
Minimum x, y, z coordinates of the bounding box.
RankCellInfo * RankCellInfoMap
Defines a 3D axis-aligned bounding box.
A lean struct to hold the global cell ownership range for a single MPI rank.
The master context for the entire simulation.
User-defined context containing data specific to a single computational grid level.