PICurv 0.1.0
A Parallel Particle-In-Cell Solver for Curvilinear LES
Loading...
Searching...
No Matches
Functions
test_mpi_kernels.c File Reference

C unit tests for MPI collective helper routines. More...

#include "test_support.h"
#include "ParticleMotion.h"
#include "ParticleSwarm.h"
#include "grid.h"
#include <stdlib.h>
Include dependency graph for test_mpi_kernels.c:

Go to the source code of this file.

Functions

static PetscErrorCode TestDistributeParticlesCollectiveConsistency (void)
 Tests collective particle distribution consistency across MPI ranks.
 
static PetscErrorCode TestBoundingBoxCollectivesMultiRank (void)
 Tests multi-rank bounding-box gather and broadcast helpers.
 
static PetscErrorCode TestRestartCellIdMigrationMovesParticleToOwner (void)
 Tests restart fast-path migration using preloaded cell ownership metadata.
 
int main (int argc, char **argv)
 Runs the unit-mpi PETSc test binary.
 

Detailed Description

C unit tests for MPI collective helper routines.

Definition in file test_mpi_kernels.c.

Function Documentation

◆ TestDistributeParticlesCollectiveConsistency()

static PetscErrorCode TestDistributeParticlesCollectiveConsistency ( void  )
static

Tests collective particle distribution consistency across MPI ranks.

Definition at line 17 of file test_mpi_kernels.c.

18{
19 PetscMPIInt rank = 0, size = 1;
20 PetscInt local_particles = 0;
21 PetscInt remainder = 0;
22 PetscInt global_particles = 0;
23 PetscInt remainder_min = 0, remainder_max = 0;
24 const PetscInt total_particles = 137;
25
26 PetscFunctionBeginUser;
27 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
28 PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
29 PetscCall(PicurvAssertBool((PetscBool)(size >= 2), "unit-mpi requires at least two MPI ranks"));
30
31 PetscCall(DistributeParticles(total_particles, rank, size, &local_particles, &remainder));
32
33 PetscCall(PicurvAssertIntEqual(
34 total_particles / size + (((PetscInt)rank < remainder) ? 1 : 0),
35 local_particles,
36 "local particle share should match quotient+remainder policy"));
37
38 PetscCallMPI(MPI_Allreduce(&local_particles, &global_particles, 1, MPIU_INT, MPI_SUM, PETSC_COMM_WORLD));
39 PetscCall(PicurvAssertIntEqual(total_particles, global_particles, "distributed particle count must conserve total particles"));
40
41 PetscCallMPI(MPI_Allreduce(&remainder, &remainder_min, 1, MPIU_INT, MPI_MIN, PETSC_COMM_WORLD));
42 PetscCallMPI(MPI_Allreduce(&remainder, &remainder_max, 1, MPIU_INT, MPI_MAX, PETSC_COMM_WORLD));
43 PetscCall(PicurvAssertIntEqual(remainder_min, remainder_max, "all ranks should report the same remainder"));
44 PetscFunctionReturn(0);
45}
PetscErrorCode DistributeParticles(PetscInt numParticles, PetscMPIInt rank, PetscMPIInt size, PetscInt *particlesPerProcess, PetscInt *remainder)
Distributes particles evenly across MPI processes, handling any remainders.
PetscErrorCode PicurvAssertIntEqual(PetscInt expected, PetscInt actual, const char *context)
Asserts that two integer values are equal.
PetscErrorCode PicurvAssertBool(PetscBool value, const char *context)
Asserts that one boolean condition is true.
Here is the call graph for this function:
Here is the caller graph for this function:

◆ TestBoundingBoxCollectivesMultiRank()

static PetscErrorCode TestBoundingBoxCollectivesMultiRank ( void  )
static

Tests multi-rank bounding-box gather and broadcast helpers.

Definition at line 50 of file test_mpi_kernels.c.

51{
52 SimCtx *simCtx = NULL;
53 UserCtx *user = NULL;
54 BoundingBox local_bbox;
55 BoundingBox *boxes = NULL;
56 PetscMPIInt rank = 0, size = 1;
57 PetscReal global_min_x = 0.0;
58 PetscReal global_max_x = 0.0;
59 PetscReal expected_global_max_x = 0.0;
60
61 PetscFunctionBeginUser;
62 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
63 PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
64 PetscCall(PicurvAssertBool((PetscBool)(size >= 2), "unit-mpi requires at least two MPI ranks"));
65
66 PetscCall(PicurvCreateMinimalContexts(&simCtx, &user, 8, 6, 4));
67 expected_global_max_x = ((PetscReal)(user->IM - 1) / (PetscReal)user->IM) + 1.0e-6;
68 PetscCall(ComputeLocalBoundingBox(user, &local_bbox));
69 PetscCall(GatherAllBoundingBoxes(user, &boxes));
70 PetscCall(BroadcastAllBoundingBoxes(user, &boxes));
71 PetscCall(PicurvAssertBool((PetscBool)(boxes != NULL), "all ranks should hold the gathered bounding-box table"));
72
73 for (PetscMPIInt r = 0; r < size; ++r) {
74 PetscCall(PicurvAssertBool((PetscBool)(boxes[r].min_coords.x <= boxes[r].max_coords.x), "bbox x-range should be valid"));
75 PetscCall(PicurvAssertBool((PetscBool)(boxes[r].min_coords.y <= boxes[r].max_coords.y), "bbox y-range should be valid"));
76 PetscCall(PicurvAssertBool((PetscBool)(boxes[r].min_coords.z <= boxes[r].max_coords.z), "bbox z-range should be valid"));
77 }
78
79 PetscCall(PicurvAssertRealNear(local_bbox.min_coords.x, boxes[rank].min_coords.x, 1.0e-10, "local bbox min x preserved"));
80 PetscCall(PicurvAssertRealNear(local_bbox.max_coords.x, boxes[rank].max_coords.x, 1.0e-10, "local bbox max x preserved"));
81
82 PetscCallMPI(MPI_Allreduce(&local_bbox.min_coords.x, &global_min_x, 1, MPIU_REAL, MPI_MIN, PETSC_COMM_WORLD));
83 PetscCallMPI(MPI_Allreduce(&local_bbox.max_coords.x, &global_max_x, 1, MPIU_REAL, MPI_MAX, PETSC_COMM_WORLD));
84 PetscCall(PicurvAssertBool((PetscBool)(global_min_x <= 0.0), "global min x should include domain start"));
85 PetscCall(PicurvAssertRealNear(expected_global_max_x, global_max_x, 1.0e-10, "global max x should match the normalized physical-node domain end"));
86
87 free(boxes);
88 PetscCall(PicurvDestroyMinimalContexts(&simCtx, &user));
89 PetscFunctionReturn(0);
90}
PetscErrorCode BroadcastAllBoundingBoxes(UserCtx *user, BoundingBox **bboxlist)
Broadcasts the bounding box information collected on rank 0 to all other ranks.
Definition grid.c:883
PetscErrorCode ComputeLocalBoundingBox(UserCtx *user, BoundingBox *localBBox)
Computes the local bounding box of the grid on the current process.
Definition grid.c:679
PetscErrorCode GatherAllBoundingBoxes(UserCtx *user, BoundingBox **allBBoxes)
Gathers local bounding boxes from all MPI processes to rank 0.
Definition grid.c:821
PetscErrorCode PicurvCreateMinimalContexts(SimCtx **simCtx_out, UserCtx **user_out, PetscInt mx, PetscInt my, PetscInt mz)
Builds minimal SimCtx and UserCtx fixtures for C unit tests.
PetscErrorCode PicurvAssertRealNear(PetscReal expected, PetscReal actual, PetscReal tol, const char *context)
Asserts that two real values agree within tolerance.
PetscErrorCode PicurvDestroyMinimalContexts(SimCtx **simCtx_ptr, UserCtx **user_ptr)
Destroys minimal SimCtx/UserCtx fixtures and all owned PETSc objects.
Cmpnts max_coords
Maximum x, y, z coordinates of the bounding box.
Definition variables.h:156
Cmpnts min_coords
Minimum x, y, z coordinates of the bounding box.
Definition variables.h:155
PetscScalar x
Definition variables.h:101
PetscInt IM
Definition variables.h:820
Defines a 3D axis-aligned bounding box.
Definition variables.h:154
The master context for the entire simulation.
Definition variables.h:643
User-defined context containing data specific to a single computational grid level.
Definition variables.h:811
Here is the call graph for this function:
Here is the caller graph for this function:

◆ TestRestartCellIdMigrationMovesParticleToOwner()

static PetscErrorCode TestRestartCellIdMigrationMovesParticleToOwner ( void  )
static

Tests restart fast-path migration using preloaded cell ownership metadata.

Definition at line 95 of file test_mpi_kernels.c.

96{
97 SimCtx *simCtx = NULL;
98 UserCtx *user = NULL;
99 RankCellInfo my_cell_info;
100 PetscMPIInt rank = 0, size = 1;
101 PetscInt *cell_ids = NULL;
102 PetscReal *positions = NULL;
103 PetscInt nlocal = 0;
104
105 PetscFunctionBeginUser;
106 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
107 PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
108 PetscCall(PicurvAssertIntEqual(2, size, "restart migration unit test expects exactly two MPI ranks"));
109
110 PetscCall(PetscMemzero(&my_cell_info, sizeof(my_cell_info)));
111 PetscCall(PicurvCreateMinimalContexts(&simCtx, &user, 8, 4, 4));
112 PetscCall(PicurvCreateSwarmPair(user, 1, "ske"));
113 PetscCall(GetOwnedCellRange(&user->info, 0, &my_cell_info.xs_cell, &my_cell_info.xm_cell));
114 PetscCall(GetOwnedCellRange(&user->info, 1, &my_cell_info.ys_cell, &my_cell_info.ym_cell));
115 PetscCall(GetOwnedCellRange(&user->info, 2, &my_cell_info.zs_cell, &my_cell_info.zm_cell));
116 PetscCall(PetscMalloc1(size, &user->RankCellInfoMap));
117 PetscCallMPI(MPI_Allgather(&my_cell_info, sizeof(RankCellInfo), MPI_BYTE,
118 user->RankCellInfoMap, sizeof(RankCellInfo), MPI_BYTE,
119 PETSC_COMM_WORLD));
120
121 PetscCall(DMSwarmGetField(user->swarm, "DMSwarm_CellID", NULL, NULL, (void **)&cell_ids));
122 PetscCall(DMSwarmGetField(user->swarm, "position", NULL, NULL, (void **)&positions));
123 if (rank == 0) {
124 cell_ids[0] = user->RankCellInfoMap[1].xs_cell;
125 cell_ids[1] = user->RankCellInfoMap[1].ys_cell;
126 cell_ids[2] = user->RankCellInfoMap[1].zs_cell;
127 positions[0] = 0.875;
128 positions[1] = 0.5;
129 positions[2] = 0.5;
130 } else {
131 cell_ids[0] = user->RankCellInfoMap[1].xs_cell;
132 cell_ids[1] = user->RankCellInfoMap[1].ys_cell;
133 cell_ids[2] = user->RankCellInfoMap[1].zs_cell;
134 positions[0] = 0.95;
135 positions[1] = 0.5;
136 positions[2] = 0.5;
137 }
138 PetscCall(DMSwarmRestoreField(user->swarm, "position", NULL, NULL, (void **)&positions));
139 PetscCall(DMSwarmRestoreField(user->swarm, "DMSwarm_CellID", NULL, NULL, (void **)&cell_ids));
140
141 PetscCall(MigrateRestartParticlesUsingCellID(user));
142 PetscCall(DMSwarmGetLocalSize(user->swarm, &nlocal));
143 PetscCall(PicurvAssertIntEqual((rank == 0) ? 0 : 2, nlocal,
144 "restart migration should move the foreign particle onto the owning rank"));
145
146 PetscCall(PicurvDestroyMinimalContexts(&simCtx, &user));
147 PetscFunctionReturn(0);
148}
PetscErrorCode MigrateRestartParticlesUsingCellID(UserCtx *user)
Fast-path migration for restart particles using preloaded Cell IDs.
PetscErrorCode GetOwnedCellRange(const DMDALocalInfo *info_nodes, PetscInt dim, PetscInt *xs_cell_global_out, PetscInt *xm_cell_local_out)
Determines the global starting index and number of CELLS owned by the current processor in a specifie...
Definition setup.c:1883
PetscErrorCode PicurvCreateSwarmPair(UserCtx *user, PetscInt nlocal, const char *post_field_name)
Creates matched solver and post-processing swarms for tests.
PetscInt ys_cell
Definition variables.h:187
PetscInt xs_cell
Definition variables.h:187
PetscInt zm_cell
Definition variables.h:188
PetscInt zs_cell
Definition variables.h:187
PetscInt xm_cell
Definition variables.h:188
RankCellInfo * RankCellInfoMap
Definition variables.h:881
PetscInt ym_cell
Definition variables.h:188
DMDALocalInfo info
Definition variables.h:818
A lean struct to hold the global cell ownership range for a single MPI rank.
Definition variables.h:186
Here is the call graph for this function:
Here is the caller graph for this function:

◆ main()

int main ( int  argc,
char **  argv 
)

Runs the unit-mpi PETSc test binary.

Definition at line 153 of file test_mpi_kernels.c.

154{
155 PetscErrorCode ierr;
156 const PicurvTestCase cases[] = {
157 {"distribute-particles-collective-consistency", TestDistributeParticlesCollectiveConsistency},
158 {"bounding-box-collectives-multi-rank", TestBoundingBoxCollectivesMultiRank},
159 {"restart-cellid-migration-moves-particle-to-owner", TestRestartCellIdMigrationMovesParticleToOwner},
160 };
161
162 ierr = PetscInitialize(&argc, &argv, NULL, "PICurv MPI-focused runtime tests");
163 if (ierr) {
164 return (int)ierr;
165 }
166
167 ierr = PicurvRunTests("unit-mpi", cases, sizeof(cases) / sizeof(cases[0]));
168 if (ierr) {
169 PetscFinalize();
170 return (int)ierr;
171 }
172
173 ierr = PetscFinalize();
174 return (int)ierr;
175}
static PetscErrorCode TestRestartCellIdMigrationMovesParticleToOwner(void)
Tests restart fast-path migration using preloaded cell ownership metadata.
static PetscErrorCode TestDistributeParticlesCollectiveConsistency(void)
Tests collective particle distribution consistency across MPI ranks.
static PetscErrorCode TestBoundingBoxCollectivesMultiRank(void)
Tests multi-rank bounding-box gather and broadcast helpers.
PetscErrorCode PicurvRunTests(const char *suite_name, const PicurvTestCase *cases, size_t case_count)
Runs a named C test suite and prints pass/fail progress markers.
Named test case descriptor consumed by PicurvRunTests.
Here is the call graph for this function: