PICurv 0.1.0
A Parallel Particle-In-Cell Solver for Curvilinear LES
Loading...
Searching...
No Matches
interpolation.c
Go to the documentation of this file.
1/**
2 * @file interpolation.c
3 * @brief Main program for DMSwarm interpolation using the fdf-curvIB method.
4 *
5 * Provides routines for interpolation between corner-based and center-based
6 * fields in the cell-centered DM (fda), plus partial usage examples for
7 * DMSwarm-based field sampling.
8 */
9
10#include "interpolation.h"
11
12// Number of weights used in certain trilinear interpolation examples
13#define NUM_WEIGHTS 8
14// Define a buffer size for error messages if not already available
15#ifndef ERROR_MSG_BUFFER_SIZE
16#define ERROR_MSG_BUFFER_SIZE 256 // Or use PETSC_MAX_PATH_LEN if appropriate
17#endif
18
19#undef __FUNCT__
20#define __FUNCT__ "InterpolateFieldFromCornerToCenter_Vector"
21/**
22 * @brief Interpolates a vector field from corner nodes to cell centers, respecting application conventions.
23 *
24 * For each interior computational cell (k,j,i), this function calculates the cell-centered value
25 * as the arithmetic average of the values at its 8 surrounding corner nodes.
26 *
27 * This function adheres to the convention that the valid computational domain for cell-centered
28 * quantities excludes the first and last indices in each direction (e.g., cells at i=0 and i=IM).
29 * The loop bounds are constructed to match this, making it a direct counterpart to
30 * functions like ComputeCellCentersAndSpacing.
31 *
32 * @param[in] field_arr Input: Ghosted 3D array of corner-node data, accessed via GLOBAL indices.
33 * @param[out] centfield_arr Output: 3D array for interpolated cell-center values, accessed via GLOBAL indices.
34 * @param[in] user User context containing DMDA information.
35 *
36 * @return PetscErrorCode 0 on success.
37 */
39 Cmpnts ***field_arr, /* Input: Ghosted local array view from user->fda (global node indices) */
40 Cmpnts ***centfield_arr, /* Output: Array view for cell-centered data (global indices) */
41 UserCtx *user)
42{
43 PetscErrorCode ierr;
44 DMDALocalInfo info;
45
46 PetscFunctionBeginUser;
47
48 ierr = DMDAGetLocalInfo(user->fda, &info); CHKERRQ(ierr);
49
50 // Get local and global grid dimensions
51 PetscInt xs = info.xs, xe = info.xs + info.xm;
52 PetscInt ys = info.ys, ye = info.ys + info.ym;
53 PetscInt zs = info.zs, ze = info.zs + info.zm;
54 PetscInt mx = info.mx, my = info.my, mz = info.mz;
55
56 // Determine loop bounds to compute for interior cells only, matching the code's convention.
57 // Start at index 1 if the process owns the global boundary at index 0.
58 PetscInt is = (xs == 0) ? 1 : xs;
59 PetscInt js = (ys == 0) ? 1 : ys;
60 PetscInt ks = (zs == 0) ? 1 : zs;
61
62 // Stop one cell short if the process owns the global boundary at the max index.
63 PetscInt ie = (xe == mx) ? xe - 1 : xe;
64 PetscInt je = (ye == my) ? ye - 1 : ye;
65 PetscInt ke = (ze == mz) ? ze - 1 : ze;
66
67 // Loop over the locally owned INTERIOR cells.
68 for (PetscInt k = ks; k < ke; k++) {
69 for (PetscInt j = js; j < je; j++) {
70 for (PetscInt i = is; i < ie; i++) {
71 // Calculate cell center value as the average of its 8 corner nodes
72 centfield_arr[k][j][i].x = 0.125 * (field_arr[k][j][i].x + field_arr[k][j-1][i].x +
73 field_arr[k-1][j][i].x + field_arr[k-1][j-1][i].x +
74 field_arr[k][j][i-1].x + field_arr[k][j-1][i-1].x +
75 field_arr[k-1][j][i-1].x + field_arr[k-1][j-1][i-1].x);
76
77 centfield_arr[k][j][i].y = 0.125 * (field_arr[k][j][i].y + field_arr[k][j-1][i].y +
78 field_arr[k-1][j][i].y + field_arr[k-1][j-1][i].y +
79 field_arr[k][j][i-1].y + field_arr[k][j-1][i-1].y +
80 field_arr[k-1][j][i-1].y + field_arr[k-1][j-1][i-1].y);
81
82 centfield_arr[k][j][i].z = 0.125 * (field_arr[k][j][i].z + field_arr[k][j-1][i].z +
83 field_arr[k-1][j][i].z + field_arr[k-1][j-1][i].z +
84 field_arr[k][j][i-1].z + field_arr[k][j-1][i-1].z +
85 field_arr[k-1][j][i-1].z + field_arr[k-1][j-1][i-1].z);
86 }
87 }
88 }
89
90 PetscFunctionReturn(0);
91}
92
93#undef __FUNCT__
94#define __FUNCT__ "InterpolateFieldFromCornerToCenter_Scalar"
95/**
96 * @brief Interpolates a scalar field from corner nodes to cell centers, respecting application conventions.
97 *
98 * For each interior computational cell (k,j,i), this function calculates the cell-centered value
99 * as the arithmetic average of the values at its 8 surrounding corner nodes.
100 *
101 * This function adheres to the convention that the valid computational domain for cell-centered
102 * quantities excludes the first and last indices in each direction (e.g., cells at i=0 and i=IM).
103 * The loop bounds are constructed to match this.
104 *
105 * @param[in] field_arr Input: Ghosted 3D array of corner-node data, accessed via GLOBAL indices.
106 * @param[out] centfield_arr Output: 3D array for interpolated cell-center values, accessed via GLOBAL indices.
107 * @param[in] user User context containing DMDA information.
108 *
109 * @return PetscErrorCode 0 on success.
110 */
112 PetscReal ***field_arr, /* Input: Ghosted local array view from user->fda (global node indices) */
113 PetscReal ***centfield_arr, /* Output: Array view for cell-centered data (global indices) */
114 UserCtx *user)
115{
116 PetscErrorCode ierr;
117 DMDALocalInfo info;
118
119 PetscFunctionBeginUser;
120
121 ierr = DMDAGetLocalInfo(user->da, &info); CHKERRQ(ierr);
122
123 // Get local and global grid dimensions
124 PetscInt xs = info.xs, xe = info.xs + info.xm;
125 PetscInt ys = info.ys, ye = info.ys + info.ym;
126 PetscInt zs = info.zs, ze = info.zs + info.zm;
127 PetscInt mx = info.mx, my = info.my, mz = info.mz;
128
129 // Determine loop bounds to compute for interior cells only, matching the code's convention.
130 // Start at index 1 if the process owns the global boundary at index 0.
131 PetscInt is = (xs == 0) ? 1 : xs;
132 PetscInt js = (ys == 0) ? 1 : ys;
133 PetscInt ks = (zs == 0) ? 1 : zs;
134
135 // Stop one cell short if the process owns the global boundary at the max index.
136 PetscInt ie = (xe == mx) ? xe - 1 : xe;
137 PetscInt je = (ye == my) ? ye - 1 : ye;
138 PetscInt ke = (ze == mz) ? ze - 1 : ze;
139
140 // Loop over the locally owned INTERIOR cells.
141 for (PetscInt k = ks; k < ke; k++) {
142 for (PetscInt j = js; j < je; j++) {
143 for (PetscInt i = is; i < ie; i++) {
144 // Calculate cell center value as the average of its 8 corner nodes
145 centfield_arr[k][j][i] = 0.125 * (field_arr[k][j][i] + field_arr[k][j-1][i] +
146 field_arr[k-1][j][i] + field_arr[k-1][j-1][i] +
147 field_arr[k][j][i-1] + field_arr[k][j-1][i-1] +
148 field_arr[k-1][j][i-1] + field_arr[k-1][j-1][i-1]);
149 }
150 }
151 }
152
153 PetscFunctionReturn(0);
154}
155
156#undef __FUNCT__
157#define __FUNCT__ "TestCornerToCenterInterpolation"
158/**
159 * @brief Tests the InterpolateFieldFromCornerToCenter function by reproducing the Cent vector.
160 *
161 * This function serves as a unit test. It performs the following steps:
162 * 1. Takes the corner-centered nodal coordinates (from DMGetCoordinatesLocal) as input.
163 * 2. Uses the `InterpolateFieldFromCornerToCenter` macro to interpolate these coordinates to
164 * the cell centers, storing the result in a new temporary vector.
165 * 3. Compares this new vector with the `user->Cent` vector, which is assumed to have been
166 * computed by `ComputeCellCentersAndSpacing` and serves as the ground truth.
167 * 4. A 2-norm of the difference is computed. If it is below a small tolerance, the test passes.
168 *
169 * @note This function should be called immediately after `ComputeCellCentersAndSpacing` has
170 * been successfully executed.
171 *
172 * @param user The UserCtx for a specific grid level.
173 * @return PetscErrorCode 0 on success, or a PETSc error code on failure.
174 */
176{
177 PetscErrorCode ierr;
178 Vec lCoords, TestCent;
179 Cmpnts ***coor_arr, ***test_cent_arr;
180 PetscReal diff_norm;
181
182 PetscFunctionBeginUser;
183
184 // 1. Create a temporary vector to hold the result of our interpolation.
185 // It must have the same layout and size as the ground-truth user->Cent vector.
186 ierr = VecDuplicate(user->Cent, &TestCent); CHKERRQ(ierr);
187
188 // 2. Get the input (corner coordinates) and output (our test vector) arrays.
189 ierr = DMGetCoordinatesLocal(user->da, &lCoords); CHKERRQ(ierr);
190 ierr = DMDAVecGetArrayRead(user->fda, lCoords, &coor_arr); CHKERRQ(ierr);
191 ierr = DMDAVecGetArray(user->fda, TestCent, &test_cent_arr); CHKERRQ(ierr);
192
193 // 3. Call the generic interpolation macro.
194 // The macro will see that `coor_arr` is of type `Cmpnts***` and correctly
195 // call the `InterpolateFieldFromCornerToCenter_Vector` function.
196 ierr = InterpolateFieldFromCornerToCenter(coor_arr, test_cent_arr, user); CHKERRQ(ierr);
197
198 // 4. Restore the arrays.
199 ierr = DMDAVecRestoreArrayRead(user->fda, lCoords, &coor_arr); CHKERRQ(ierr);
200 ierr = DMDAVecRestoreArray(user->fda, TestCent, &test_cent_arr); CHKERRQ(ierr);
201
202 // 5. IMPORTANT: Assemble the vector so its values are communicated across processors
203 // and it's ready for global operations like VecNorm.
204 ierr = VecAssemblyBegin(TestCent); CHKERRQ(ierr);
205 ierr = VecAssemblyEnd(TestCent); CHKERRQ(ierr);
206
207 // 6. Compare the result with the ground truth.
208 // We compute TestCent = -1.0 * user->Cent + 1.0 * TestCent.
209 // This calculates the difference vector: TestCent - user->Cent.
210 ierr = VecAXPY(TestCent, -1.0, user->Cent); CHKERRQ(ierr);
211
212 // Now, compute the L2 norm of the difference vector. If the functions are
213 // identical, the norm should be zero (or very close due to floating point).
214 ierr = VecNorm(TestCent, NORM_2, &diff_norm); CHKERRQ(ierr);
215
216 // 7. Report the result and clean up.
217 if (diff_norm < 1.0e-12) {
218 LOG_ALLOW(GLOBAL,LOG_DEBUG,"[SUCCESS] Test passed. Norm of difference is %g.\n", (double)diff_norm);
219 } else {
220 LOG_ALLOW(GLOBAL,LOG_DEBUG, "[FAILURE] Test failed. Norm of difference is %g.\n", (double)diff_norm);
221 }
222
223 ierr = VecDestroy(&TestCent); CHKERRQ(ierr);
224
225 PetscFunctionReturn(0);
226}
227
228#undef __FUNCT__
229#define __FUNCT__ "InterpolateFieldFromCenterToCorner_Vector"
230/**
231 * @brief Interpolates a vector field from cell centers to corner nodes.
232 *
233 *
234 * @param[in] centfield_arr Input: 3D array (ghosted) of cell-centered data, accessed via GLOBAL indices.
235 * @param[out] corner_arr Output: 3D array where interpolated node values are stored,
236 * also accessed via GLOBAL indices for the owned part.
237 * @param[in] user User context containing DMDA information.
238 *
239 * @return PetscErrorCode 0 on success.
240 */
242 Cmpnts ***centfield_arr, /* Input: Ghosted local array from Vec (read) */
243 Cmpnts ***corner_arr, /* Output: global array from Vec (write) */
244 UserCtx *user)
245{
246 PetscErrorCode ierr;
247 DMDALocalInfo info;
248 PetscMPIInt rank;
250 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
251 ierr = DMDAGetLocalInfo(user->fda, &info); CHKERRQ(ierr);
252
253 // Node ownership range (GLOBAL indices)
254 PetscInt xs_node = info.xs, xm_node = info.xm, xe_node = xs_node + xm_node;
255 PetscInt ys_node = info.ys, ym_node = info.ym, ye_node = ys_node + ym_node;
256 PetscInt zs_node = info.zs, zm_node = info.zm, ze_node = zs_node + zm_node;
257
258 PetscInt nCellsX = info.mx - 2; // Number of cells in x-direction
259 PetscInt nCellsY = info.my - 2; // Number of cells in y-direction
260 PetscInt nCellsZ = info.mz - 2; // Number of cells in z-direction
261
262
263 // Global grid dimensions (used for valid cell check)
264 PetscInt IM = info.mx - 1; // Total nodes in i-direction
265 PetscInt JM = info.my - 1; // Total nodes in j-direction
266 PetscInt KM = info.mz - 1; // Total nodes in k-direction
267
268 // Ghosted array starting indices (from the same info struct)
269 PetscInt gxs = info.gxs;
270 PetscInt gys = info.gys;
271 PetscInt gzs = info.gzs;
272
273 PetscInt xs = info.xs;
274 PetscInt ys = info.ys;
275 PetscInt zs = info.zs;
276
278 "[Rank %d] Starting -- Node ownership k=%d..%d, j=%d..%d, i=%d..%d\n",
279 rank, zs_node, ze_node-1, ys_node, ye_node-1, xs_node, xe_node-1);
280
281 // Loop over the GLOBAL indices of the NODES owned by this processor
282 for (PetscInt k = zs_node; k < ze_node; k++) {
283 for (PetscInt j = ys_node; j < ye_node; j++) {
284 for (PetscInt i = xs_node; i < xe_node; i++) {
285 Cmpnts sum = {0.0, 0.0, 0.0};
286 PetscInt count = 0;
287
288 // DEBUG 1 TEST
289 /*
290 if(rank == 1 && i == 24 && j == 12 && k == 49){
291 PetscInt i_cell_A = i - 1;
292 PetscInt i_cell_B = i;
293
294 Cmpnts ucat_A = centfield_arr[k][j][i_cell_A]; // 23
295 Cmpnts ucat_B = centfield_arr[k][j][i_cell_B]; // 24 (out-of-bounds if IM=25)
296
297 PetscPrintf(PETSC_COMM_WORLD,"[Rank %d] DEBUG TEST at Node(k,j,i)=%d,%d,%d: Read { Valid Cell }Ucat[%d][%d][%d]=(%.2f,%.2f,%.2f) and {Out-of-Bounds Cell}Ucat[%d][%d][%d]=(%.2f,%.2f,%.2f)\n",
298 rank, k, j, i,
299 k, j, i_cell_A, ucat_A.x, ucat_A.y, ucat_A.z,
300 k, j, i_cell_B, ucat_B.x, ucat_B.y, ucat_B.z);
301
302 }
303 */
304
305 // Skip processing the unused last node in each dimension.
306 if(i >= IM || j >= JM || k >= KM){
307 continue;
308 }
309 // Loop over the 8 potential cells surrounding node N(k,j,i) and accumulate values.
310 // The index offsets correspond to the relative position of the indices(shifted) that represent cell-centered field values of cells that share the node N(k,j,i) as a corner
311 for (PetscInt dk_offset = -1; dk_offset <= 0; dk_offset++) {
312 for (PetscInt dj_offset = -1; dj_offset <= 0; dj_offset++) {
313 for (PetscInt di_offset = -1; di_offset <= 0; di_offset++) {
314
315 // These are still GLOBAL cell indices
316 PetscInt global_cell_k = k + dk_offset;
317 PetscInt global_cell_j = j + dj_offset;
318 PetscInt global_cell_i = i + di_offset;
319
320 // Check if this corresponds to a valid GLOBAL cell index
321 if (global_cell_i >= 0 && global_cell_i < nCellsX &&
322 global_cell_j >= 0 && global_cell_j < nCellsY &&
323 global_cell_k >= 0 && global_cell_k < nCellsZ)
324 {
325 Cmpnts cell_val = centfield_arr[global_cell_k + 1][global_cell_j + 1][global_cell_i + 1];
326
327 LOG_LOOP_ALLOW_EXACT(LOCAL, LOG_VERBOSE,k,49,"[Rank %d] successful read from [%d][%d][%d] -> (%.2f, %.2f, %.2f)\n",
328 rank,global_cell_k,global_cell_j,global_cell_i,cell_val.x, cell_val.y, cell_val.z);
329
330 sum.x += cell_val.x;
331 sum.y += cell_val.y;
332 sum.z += cell_val.z;
333 count++;
334 }
335 }
336 }
337 }
338
339 PetscInt i_global_write = i; // Global index in GLOBAL array.
340 PetscInt j_global_write = j;
341 PetscInt k_global_write = k;
342
343 // We write directly into the array using the global loop indices.
344 if (count > 0) {
345 corner_arr[k_global_write][j_global_write][i_global_write].x = sum.x / (PetscReal)count;
346 corner_arr[k_global_write][j_global_write][i_global_write].y = sum.y / (PetscReal)count;
347 corner_arr[k_global_write][j_global_write][i_global_write].z = sum.z / (PetscReal)count;
348 } else {
349 // This case should ideally not happen for a valid owned node, but as a failsafe:
350 corner_arr[k_global_write][j_global_write][i_global_write] = (Cmpnts){0.0, 0.0, 0.0};
351 }
352
353 // DEBUG 2
354 /*
355 if(rank == 1){
356 if(i == 11 && j == 11 && k == 49){
357 Cmpnts ucat_node = corner_arr[k][j][i];
358 PetscPrintf(PETSC_COMM_WORLD,"[Rank %d] DEBUG TEST at Node(k,j,i)=%d,%d,%d: Wrote CornerUcat[%d][%d][%d]=(%.2f,%.2f,%.2f)\n",
359 rank, k, j, i,
360 k, j, i, ucat_node.x, ucat_node.y, ucat_node.z);
361 }
362 }
363
364 if(rank == 0 && i == 24 && j == 12 && k == 0){
365 Cmpnts ucat_node = corner_arr[k][j][i];
366 PetscPrintf(PETSC_COMM_WORLD,"[Rank %d] DEBUG TEST at Node(k,j,i)=%d,%d,%d: Wrote CornerUcat[%d][%d][%d]=(%.2f,%.2f,%.2f)\n",
367 rank, k, j, i,
368 k, j, i, ucat_node.x, ucat_node.y, ucat_node.z);
369 }
370 */
371 // LOG_LOOP_ALLOW_EXACT(LOCAL, LOG_VERBOSE,k,48,"[Rank %d] Node(k,j,i)=%d,%d,%d finished loops and write.\n", rank, k, j, i);
372 }
373 }
374 }
376 return 0;
377}
378
379#undef __FUNCT__
380#define __FUNCT__ "InterpolateFieldFromCenterToCorner_Scalar"
381/**
382 * @brief Interpolates a scalar field from cell centers to corner nodes.
383 *
384 *
385 * @param[in] centfield_arr Input: 3D array (ghosted) of cell-centered data, accessed via GLOBAL indices.
386 * @param[out] corner_arr Output: 3D array where interpolated node values are stored,
387 * also accessed via GLOBAL indices for the owned part.
388 * @param[in] user User context containing DMDA information.
389 *
390 * @return PetscErrorCode 0 on success.
391 */
393 PetscReal ***centfield_arr, /* Input: Ghosted local array from Vec (read) */
394 PetscReal ***corner_arr, /* Output: global array from Vec (write) */
395 UserCtx *user)
396{
397 PetscErrorCode ierr;
398 DMDALocalInfo info;
399 PetscMPIInt rank;
401 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
402 ierr = DMDAGetLocalInfo(user->fda, &info); CHKERRQ(ierr);
403
404 // Node ownership range (GLOBAL indices)
405 PetscInt xs_node = info.xs, xm_node = info.xm, xe_node = xs_node + xm_node;
406 PetscInt ys_node = info.ys, ym_node = info.ym, ye_node = ys_node + ym_node;
407 PetscInt zs_node = info.zs, zm_node = info.zm, ze_node = zs_node + zm_node;
408
409 PetscInt nCellsX = info.mx - 2; // Number of cells in x-direction
410 PetscInt nCellsY = info.my - 2; // Number of cells in y-direction
411 PetscInt nCellsZ = info.mz - 2; // Number of cells in z-direction
412
413
414 // Global grid dimensions (used for valid cell check)
415 PetscInt IM = info.mx - 1; // Total nodes in i-direction
416 PetscInt JM = info.my - 1; // Total nodes in j-direction
417 PetscInt KM = info.mz - 1; // Total nodes in k-direction
418
419 // Ghosted array starting indices (from the same info struct)
420 PetscInt gxs = info.gxs;
421 PetscInt gys = info.gys;
422 PetscInt gzs = info.gzs;
423
424 PetscInt xs = info.xs;
425 PetscInt ys = info.ys;
426 PetscInt zs = info.zs;
427
429 "[Rank %d] Starting -- Node ownership k=%d..%d, j=%d..%d, i=%d..%d\n",
430 rank, zs_node, ze_node-1, ys_node, ye_node-1, xs_node, xe_node-1);
431
432 // Loop over the GLOBAL indices of the NODES owned by this processor
433 for (PetscInt k = zs_node; k < ze_node; k++) {
434 for (PetscInt j = ys_node; j < ye_node; j++) {
435 for (PetscInt i = xs_node; i < xe_node; i++) {
436 PetscReal sum = 0.0;
437 PetscInt count = 0;
438
439 // DEBUG 1 TEST
440 /*
441 if(rank == 1 && i == 24 && j == 12 && k == 49){
442 PetscInt i_cell_A = i - 1;
443 PetscInt i_cell_B = i;
444
445 Cmpnts ucat_A = centfield_arr[k][j][i_cell_A]; // 23
446 Cmpnts ucat_B = centfield_arr[k][j][i_cell_B]; // 24 (out-of-bounds if IM=25)
447
448 PetscPrintf(PETSC_COMM_WORLD,"[Rank %d] DEBUG TEST at Node(k,j,i)=%d,%d,%d: Read { Valid Cell }Ucat[%d][%d][%d]=(%.2f,%.2f,%.2f) and {Out-of-Bounds Cell}Ucat[%d][%d][%d]=(%.2f,%.2f,%.2f)\n",
449 rank, k, j, i,
450 k, j, i_cell_A, ucat_A.x, ucat_A.y, ucat_A.z,
451 k, j, i_cell_B, ucat_B.x, ucat_B.y, ucat_B.z);
452
453 }
454 */
455
456 // Skip processing the unused last node in each dimension.
457 if(i >= IM || j >= JM || k >= KM){
458 continue;
459 }
460 // Loop over the 8 potential cells surrounding node N(k,j,i) and accumulate values.
461 // The index offsets correspond to the relative position of the indices(shifted) that represent cell-centered field values of cells that share the node N(k,j,i) as a corner
462 for (PetscInt dk_offset = -1; dk_offset <= 0; dk_offset++) {
463 for (PetscInt dj_offset = -1; dj_offset <= 0; dj_offset++) {
464 for (PetscInt di_offset = -1; di_offset <= 0; di_offset++) {
465
466 // These are still GLOBAL cell indices
467 PetscInt global_cell_k = k + dk_offset;
468 PetscInt global_cell_j = j + dj_offset;
469 PetscInt global_cell_i = i + di_offset;
470
471 // Check if this corresponds to a valid GLOBAL cell index
472 if (global_cell_i >= 0 && global_cell_i < nCellsX &&
473 global_cell_j >= 0 && global_cell_j < nCellsY &&
474 global_cell_k >= 0 && global_cell_k < nCellsZ)
475 {
476 PetscReal cell_val = centfield_arr[global_cell_k + 1][global_cell_j + 1][global_cell_i + 1];
477
478 LOG_LOOP_ALLOW_EXACT(LOCAL, LOG_VERBOSE,k,49,"[Rank %d] successful read from [%d][%d][%d] -> (%.2f)\n",
479 rank,global_cell_k,global_cell_j,global_cell_i,cell_val);
480
481 sum += cell_val;
482 count++;
483 }
484 }
485 }
486 }
487
488 PetscInt i_global_write = i; // Global index in GLOBAL array.
489 PetscInt j_global_write = j;
490 PetscInt k_global_write = k;
491
492 // We write directly into the array using the global loop indices.
493 if (count > 0) {
494 corner_arr[k_global_write][j_global_write][i_global_write] = sum / (PetscReal)count;
495 } else {
496 // This case should ideally not happen for a valid owned node, but as a failsafe:
497 corner_arr[k_global_write][j_global_write][i_global_write] = 0.0;
498 }
499
500 // DEBUG 2
501 /*
502 if(rank == 1){
503 if(i == 11 && j == 11 && k == 49){
504 Cmpnts ucat_node = corner_arr[k][j][i];
505 PetscPrintf(PETSC_COMM_WORLD,"[Rank %d] DEBUG TEST at Node(k,j,i)=%d,%d,%d: Wrote CornerUcat[%d][%d][%d]=(%.2f,%.2f,%.2f)\n",
506 rank, k, j, i,
507 k, j, i, ucat_node.x, ucat_node.y, ucat_node.z);
508 }
509 }
510
511 if(rank == 0 && i == 24 && j == 12 && k == 0){
512 Cmpnts ucat_node = corner_arr[k][j][i];
513 PetscPrintf(PETSC_COMM_WORLD,"[Rank %d] DEBUG TEST at Node(k,j,i)=%d,%d,%d: Wrote CornerUcat[%d][%d][%d]=(%.2f,%.2f,%.2f)\n",
514 rank, k, j, i,
515 k, j, i, ucat_node.x, ucat_node.y, ucat_node.z);
516 }
517 */
518 // LOG_LOOP_ALLOW_EXACT(LOCAL, LOG_VERBOSE,k,48,"[Rank %d] Node(k,j,i)=%d,%d,%d finished loops and write.\n", rank, k, j, i);
519 }
520 }
521 }
523 return 0;
524}
525
526#undef __FUNCT
527#define __FUNCT "PiecWiseLinearInterpolation_Scalar"
528/**
529 * @brief Retrieves the scalar value at the cell (iCell, jCell, kCell).
530 *
531 * This function does a simple piecewise (zeroth‐order) interpolation
532 * by returning fieldScal[kCell][jCell][iCell], ignoring any fractional coordinates.
533 *
534 * @param[in] fieldName A string identifying the field (e.g. "temperature").
535 * @param[in] fieldScal 3D array of PetscReal (indexed as [k][j][i]).
536 * @param[in] iCell, jCell, kCell Integral cell indices to sample.
537 * @param[out] val Pointer to a PetscReal that receives the sampled scalar.
538 *
539 * @return PetscErrorCode 0 on success
540 */
542 const char *fieldName,
543 PetscReal ***fieldScal,
544 PetscInt iCell,
545 PetscInt jCell,
546 PetscInt kCell,
547 PetscReal *val)
548{
549 PetscFunctionBegin;
550 *val = fieldScal[kCell][jCell][iCell];
551
552 // Optional logging
554 "Field '%s' at (i=%d, j=%d, k=%d) => val=%.6f\n",
555 fieldName, iCell, jCell, kCell, *val);
556
557 PetscFunctionReturn(0);
558}
559
560#undef __FUNCT
561#define __FUNCT "PiecWiseLinearInterpolation_Vector"
562/**
563 * @brief Retrieves the vector (Cmpnts) at the cell (iCell, jCell, kCell).
564 *
565 * This function simply sets:
566 * vec->x = fieldVec[kCell][jCell][iCell].x
567 * vec->y = fieldVec[kCell][jCell][iCell].y
568 * vec->z = fieldVec[kCell][jCell][iCell].z
569 * effectively a nearest‐cell or piecewise approach.
570 *
571 * @param[in] fieldName String identifying the field (e.g. "velocity").
572 * @param[in] fieldVec 3D array of Cmpnts (indexed as [k][j][i]).
573 * @param[in] iCell, jCell, kCell Integral cell indices to sample.
574 * @param[out] vec Pointer to a Cmpnts struct that receives the sampled vector.
575 *
576 * @return PetscErrorCode 0 on success
577 */
579 const char *fieldName,
580 Cmpnts ***fieldVec,
581 PetscInt iCell,
582 PetscInt jCell,
583 PetscInt kCell,
584 Cmpnts *vec)
585{
586 PetscFunctionBegin;
587 vec->x = fieldVec[kCell][jCell][iCell].x;
588 vec->y = fieldVec[kCell][jCell][iCell].y;
589 vec->z = fieldVec[kCell][jCell][iCell].z;
590
591 // Optional logging
593 "Field '%s' at (i=%d, j=%d, k=%d) => (x=%.6f, y=%.6f, z=%.6f)\n",
594 fieldName, iCell, jCell, kCell, vec->x, vec->y, vec->z);
595
596 PetscFunctionReturn(0);
597}
598
599
600#undef __FUNCT
601#define __FUNCT "ComputeTrilinearWeights"
602
603/**
604 * @brief Computes the trilinear interpolation weights from the interpolation coefficients.
605 *
606 * This function computes the weights for trilinear interpolation at the eight corners of a cell
607 * using the interpolation coefficients provided along the x, y, and z directions.
608 *
609 * @param[in] a1 Interpolation coefficient along the x-direction (normalized coordinate within the cell).
610 * @param[in] a2 Interpolation coefficient along the y-direction (normalized coordinate within the cell).
611 * @param[in] a3 Interpolation coefficient along the z-direction (normalized coordinate within the cell).
612 * @param[out] w Array of 8 weights, each corresponding to one corner of the cell.
613 *
614 * @note
615 * - The coefficients `a1`, `a2`, and `a3` should be in the range [0, 1].
616 * - The order of weights corresponds to the eight corners of a hexahedral cell.
617 */
618static inline void ComputeTrilinearWeights(PetscReal a1, PetscReal a2, PetscReal a3, PetscReal *w) {
619 LOG_ALLOW(GLOBAL, LOG_VERBOSE, "Computing weights for a1=%f, a2=%f, a3=%f.\n", a1, a2, a3);
620
621 // Ensure a1, a2, a3 are within [0,1]
622 a1 = PetscMax(0.0, PetscMin(1.0, a1));
623 a2 = PetscMax(0.0, PetscMin(1.0, a2));
624 a3 = PetscMax(0.0, PetscMin(1.0, a3));
625
626 const PetscReal oa1 = 1.0 - a1;
627 const PetscReal oa2 = 1.0 - a2;
628 const PetscReal oa3 = 1.0 - a3;
629
630 w[0] = oa1 * oa2 * oa3; /* cornerOffsets[0] => (0,0,0) */
631 w[1] = a1 * oa2 * oa3; /* cornerOffsets[1] => (1,0,0) */
632 w[2] = oa1 * a2 * oa3; /* cornerOffsets[2] => (0,1,0) */
633 w[3] = a1 * a2 * oa3; /* cornerOffsets[3] => (1,1,0) */
634 w[4] = oa1 * oa2 * a3; /* cornerOffsets[4] => (0,0,1) */
635 w[5] = a1 * oa2 * a3; /* cornerOffsets[5] => (1,0,1) */
636 w[6] = oa1 * a2 * a3; /* cornerOffsets[6] => (0,1,1) */
637 w[7] = a1 * a2 * a3; /* cornerOffsets[7] => (1,1,1) */
638
639 // Log the computed weights for debugging
640 LOG_ALLOW(LOCAL,LOG_VERBOSE, "Weights computed - "
641 "w0=%f, w1=%f, w2=%f, w3=%f, w4=%f, w5=%f, w6=%f, w7=%f. \n",
642 w[0], w[1], w[2], w[3], w[4], w[5], w[6], w[7]);
643}
644
645#undef __FUNCT
646#define __FUNCT "TrilinearInterpolation_Scalar"
647
648/**
649 * @brief Computes the trilinear Interpolateted scalar at a given point.
650 *
651 * @param[in] fieldName A string representing the name of the scalar field (e.g., "temperature").
652 * @param[in] fieldScal 3D array of the field from a DMDA (indexed as [k][j][i]),
653 * each cell a PetscReal.
654 * @param[in] i, j, k Integral cell indices (the "lower" corner in each dimension).
655 * @param[in] a1, a2, a3 Normalized coordinates within the cell ([0,1] range).
656 * @param[out] val Pointer to a PetscReal that will store the Interpolateted scalar.
657 *
658 * This function uses the standard 8-corner trilinear formula via `ComputeTrilinearWeights()`.
659 * If a different scheme is desired, implement a new function with the same PetscInterface.
660 */
662 const char *fieldName,
663 PetscReal ***fieldScal,
664 PetscInt i,
665 PetscInt j,
666 PetscInt k,
667 PetscReal a1,
668 PetscReal a2,
669 PetscReal a3,
670 PetscReal *val)
671{
672 PetscFunctionBegin; // PETSc macro for error/stack tracing
673
674 // Compute the 8 corner weights
675 PetscReal wcorner[8];
676 ComputeTrilinearWeights(a1, a2, a3, wcorner);
677
678 // Offsets for cell corners
679 PetscInt i1 = i + 1;
680 PetscInt j1 = j + 1;
681 PetscInt k1 = k + 1;
682
683 // Initialize the output scalar
684 PetscReal sum = 0.0;
685
686 // Corner 0 => (i, j, k)
687 sum += wcorner[0] * fieldScal[k ][j ][i ];
688 // Corner 1 => (i+1, j, k)
689 sum += wcorner[1] * fieldScal[k ][j ][i1];
690 // Corner 2 => (i, j+1, k)
691 sum += wcorner[2] * fieldScal[k ][j1][i ];
692 // Corner 3 => (i+1, j+1, k)
693 sum += wcorner[3] * fieldScal[k ][j1][i1];
694 // Corner 4 => (i, j, k+1)
695 sum += wcorner[4] * fieldScal[k1][j ][i ];
696 // Corner 5 => (i+1, j, k+1)
697 sum += wcorner[5] * fieldScal[k1][j ][i1];
698 // Corner 6 => (i, j+1, k+1)
699 sum += wcorner[6] * fieldScal[k1][j1][i ];
700 // Corner 7 => (i+1, j+1, k+1)
701 sum += wcorner[7] * fieldScal[k1][j1][i1];
702
703 *val = sum;
704
705 // Logging (optional)
707 "Field '%s' at (i=%d, j=%d, k=%d), "
708 "a1=%.6f, a2=%.6f, a3=%.6f -> val=%.6f.\n",
709 fieldName, i, j, k, a1, a2, a3, *val);
710
711 // LOG_ALLOW_SYNC(GLOBAL, LOG_INFO,
712 // "TrilinearInterpolation_Scalar: Completed interpolation for field '%s' across local cells.\n",
713 // fieldName);
714
715 PetscFunctionReturn(0);
716}
717
718
719#undef __FUNCT
720#define __FUNCT "TrilinearInterpolation_Vector"
721/**
722 * @brief Computes the trilinear Interpolateted vector (e.g., velocity) at a given point.
723 *
724 * @param[in] fieldName A string representing the name of the vector field (e.g., "velocity").
725 * @param[in] fieldVec 3D array of the field from a DMDA (indexed as [k][j][i]),
726 * each cell of type Cmpnts.
727 * @param[in] i, j, k Integral cell indices (the "lower" corner in each dimension).
728 * @param[in] a1, a2, a3 Normalized coordinates within the cell ([0,1] range).
729 * @param[out] vec Pointer to a Cmpnts struct that will store the Interpolateted vector (x, y, z).
730 *
731 * This function uses the standard 8-corner trilinear formula via `ComputeTrilinearWeights()`.
732 * If a different scheme is desired, implement a new function with the same PetscInterface.
733 */
734/**
735 * @brief Computes the trilinear Interpolateted vector at a given point, with partial weighting if corners go out of range.
736 *
737 * If any of the 8 corners (i or i+1, j or j+1, k or k+1) is out of [0..mx), [0..my), [0..mz),
738 * that corner is skipped and the corresponding weight is omitted. The total is normalized
739 * by the sum of used weights, so we get a partial interpolation near boundaries.
740 */
742 const char *fieldName,
743 Cmpnts ***fieldVec, /* 3D array [k][j][i], dimension [mz][my][mx] */
744 PetscInt i, // local cell index i
745 PetscInt j, // local cell index j
746 PetscInt k, // local cell index k
747 PetscReal a1,
748 PetscReal a2,
749 PetscReal a3,
750 Cmpnts *vec)
751{
752 PetscFunctionBegin; // PETSc macro for error/stack tracing
753
754 // Compute the 8 corner weights
755 PetscErrorCode ierr;
756 PetscReal wcorner[8];
757 PetscMPIInt rank;
758
759 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr);
760
761 LOG_ALLOW(LOCAL,LOG_VERBOSE,"[Rank %d] Computing Trilinear Weights.\n",rank);
762 ComputeTrilinearWeights(a1, a2, a3, wcorner);
763
764 LOG_ALLOW(LOCAL,LOG_VERBOSE,"[Rank %d] Trilinear weights computed for local cell %d,%d,%d.\n",rank,i,j,k);
765
766 // For partial interpolation, we'll keep track of how many corners are valid
767 // and how much sum of weights is used. Then we do a final normalization.
768 PetscReal sumW = 0.0;
769 Cmpnts accum = {0.0, 0.0, 0.0};
770
771 // The eight corner indices, with their weights:
772 // corners: (i,j,k), (i+1,j,k), (i,j+1,k), (i+1,j+1,k), etc.
773 // We store them in an array to iterate cleanly.
774 const PetscInt cornerOffsets[8][3] = {
775 {0, 0, 0},
776 {1, 0, 0},
777 {0, 1, 0},
778 {1, 1, 0},
779 {0, 0, 1},
780 {1, 0, 1},
781 {0, 1, 1},
782 {1, 1, 1}
783 };
784
785 // Weighted partial sum
786 for (PetscInt c = 0; c < 8; c++) {
787 const PetscInt di = cornerOffsets[c][0];
788 const PetscInt dj = cornerOffsets[c][1];
789 const PetscInt dk = cornerOffsets[c][2];
790 PetscInt iC = i + di;
791 PetscInt jC = j + dj;
792 PetscInt kC = k + dk;
793
794 /*
795 // skip if out of domain
796 // (Assuming you know global domain is [0..mx), [0..my), [0..mz).)
797 if (iC < 0 || iC >= (PetscInt)userGlobalMx ||
798 jC < 0 || jC >= (PetscInt)userGlobalMy ||
799 kC < 0 || kC >= (PetscInt)userGlobalMz)
800 {
801 // skip this corner
802 continue;
803 }
804
805 */
806
807 LOG_ALLOW(LOCAL,LOG_VERBOSE,"[Rank %d] %s[%d][%d][%d] = (%.4f,%.4f,%.4f).\n",rank,fieldName,kC,jC,iC,fieldVec[kC][jC][iC].x,fieldVec[kC][jC][iC].y,fieldVec[kC][jC][iC].z);
808
809 // Otherwise, accumulate
810 accum.x += wcorner[c] * fieldVec[kC][jC][iC].x;
811 accum.y += wcorner[c] * fieldVec[kC][jC][iC].y;
812 accum.z += wcorner[c] * fieldVec[kC][jC][iC].z;
813 sumW += wcorner[c];
814 }
815
816 // If sumW=0 => out-of-range or zero weighting => set (0,0,0)
817 if (sumW > 1.0e-14) {
818 vec->x = accum.x / sumW;
819 vec->y = accum.y / sumW;
820 vec->z = accum.z / sumW;
821 } else {
822 vec->x = 0.0; vec->y = 0.0; vec->z = 0.0;
823 }
824
825 PetscFunctionReturn(0);
826}
827
828
829#undef __FUNCT
830#define __FUNCT "InterpolateEulerFieldToSwarmForParticle"
831/**
832 * @brief Interpolates a single field (scalar or vector) for one particle, storing the result in a swarm array.
833 *
834 * This helper function is used inside a larger loop over local particles.
835 *
836 * @param[in] fieldName A string identifying the field (e.g. "velocity", "temperature", etc.).
837 * @param[in] fieldPtr A Pointer to the local DMDA array for the field:
838 * - (PetscReal ***) if scalar (blockSize = 1)
839 * - (Cmpnts ***) if vector (blockSize = 3)
840 * @param[in] particle A Pointer to the Particle struct containing cell ID and weight information.
841 * @param[out] swarmOut A Pointer to the DMSwarm output array:
842 * - (PetscReal*) if scalar dof=1
843 * - (PetscReal*) if vector dof=3 (storing x,y,z in consecutive reals)
844 * - or (Cmpnts*) if you store the result as a Cmpnts struct
845 * @param[in] p The local particle index (used to compute the correct offset in swarmOut).
846 * @param[in] blockSize The number of degrees of freedom (1 => scalar, 3 => vector).
847 *
848 * This routine demonstrates a switch between:
849 * - PiecewiseLinearInterpolation (zeroth order / nearest cell)
850 * - TrilinearInterpolation (8-corner weighted).
851 *
852 * By default, PiecewiseLinearInterpolation is active, while the TrilinearInterpolation calls are commented out.
853 * To switch to trilinear, simply comment/uncomment appropriately.
854 *
855 * Logging calls are provided (LOG_ALLOW) that you can adapt to your existing logging system.
856 *
857 * @return PetscErrorCode 0 on success, non-zero on error.
858 */
859static inline PetscErrorCode InterpolateEulerFieldToSwarmForParticle(
860 const char *fieldName,
861 void *fieldPtr, /* typed Pointer => either (PetscReal***) or (Cmpnts***) */
862 Particle *particle, /* particle struct containing Cell ID and weight information. */
863 void *swarmOut, /* typed Pointer => (PetscReal*) or (Cmpnts*) or dof=3 array */
864 PetscInt p, /* particle index */
865 PetscInt blockSize) /* dof=1 => scalar, dof=3 => vector */
866{
867 PetscErrorCode ierr;
868 PetscFunctionBegin;
869
870 PetscInt iCell = particle->cell[0];
871 PetscInt jCell = particle->cell[1];
872 PetscInt kCell = particle->cell[2];
873 PetscReal a1 = particle->weights.x;
874 PetscReal a2 = particle->weights.y;
875 PetscReal a3 = particle->weights.z;
876
878
879 // Optional logging at start
881 "field='%s', blockSize=%d, "
882 "cell IDs=(%d,%d,%d), weights=(%.4f,%.4f,%.4f)\n",
883 fieldName, blockSize, iCell, jCell, kCell, a1, a2, a3);
884
885 /*
886 If blockSize=1, we PetscInterpret the fieldPtr as a 3D array of PetscReal (scalar).
887 If blockSize=3, we PetscInterpret the fieldPtr as a 3D array of Cmpnts (vector).
888 */
889 if (blockSize == 1) {
890 /* Scalar field: Cast fieldPtr to (PetscReal ***). */
891 PetscReal ***fieldScal = (PetscReal ***) fieldPtr;
892 PetscReal val;
893
894 // Currently using trilinear.
895 ierr = TrilinearInterpolation(fieldName, fieldScal,
896 iCell, jCell, kCell,
897 a1, a2, a3,
898 &val);
899 CHKERRQ(ierr);
900
901 // Alternative (commented) call to PiecewiseLinearInterpolation (zeroth order) :
902 // PetscErrorCode ierr = PiecewiseLinearInterpolation(fieldName,
903 // fieldScal,
904 // iCell, jCell, kCell,
905 // &val);
906 // CHKERRQ(ierr);
907
908 // Write the scalar result to the swarm output at index [p].
909 ((PetscReal*)swarmOut)[p] = val;
910
912 "field='%s', result=%.6f "
913 "stored at swarmOut index p=%d.\n", fieldName, val, (PetscInt)p);
914 }
915 else if (blockSize == 3) {
916 /* Vector field: Cast fieldPtr to (Cmpnts ***). */
917 Cmpnts ***fieldVec = (Cmpnts ***) fieldPtr;
918 Cmpnts vec;
919
920
921
922
923 // Piecewise interpolation (zeroth order).
924 // PetscErrorCode ierr = PieceWiseLinearInterpolation(fieldName,
925 // fieldVec,
926 // iCell, jCell, kCell,
927 // &vec);
928 // CHKERRQ(ierr);
929
930 // Alternative (commented) call to trilinear:
931 ierr = TrilinearInterpolation(fieldName, fieldVec,
932 iCell, jCell, kCell,
933 a1, a2, a3,
934 &vec);
935 CHKERRQ(ierr);
936
937 // If swarmOut is an array of 3 reals per particle:
938 ((PetscReal*)swarmOut)[3*p + 0] = vec.x;
939 ((PetscReal*)swarmOut)[3*p + 1] = vec.y;
940 ((PetscReal*)swarmOut)[3*p + 2] = vec.z;
941
943 "field='%s', result=(%.6f,%.6f,%.6f) "
944 "stored at swarmOut[3p..3p+2], p=%d.\n",
945 fieldName, vec.x, vec.y, vec.z, (PetscInt)p);
946
947 /*
948 If you store the vector result as a Cmpnts in the swarm, do instead:
949 ((Cmpnts*)swarmOut)[p] = vec;
950 but ensure your DMSwarm field is sized for a Cmpnts struct.
951 */
952 }
953 else {
954 /* If blockSize isn't 1 or 3, we raise an error. */
955 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP,
956 "InterpolateEulerFieldToSwarmForParticle: only blockSize=1 or 3 supported, got %d.",
957 (PetscInt)blockSize);
958 }
959
961 PetscFunctionReturn(0);
962}
963
964#undef __FUNCT__
965#define __FUNCT__ "InterpolateEulerFieldToSwarm"
966
967/**
968 * @brief Interpolates a cell-centered field (scalar or vector) onto DMSwarm particles,
969 * using a robust, PETSc-idiomatic two-stage process.
970 *
971 * This function first converts the cell-centered input data to corner-node data,
972 * storing this intermediate result in a PETSc Vec to correctly handle the communication
973 * of ghost-point information across parallel ranks. It then performs a final trilinear
974 * interpolation from the ghosted corner data to each particle's location.
975 *
976 * Workflow:
977 * 1. Create temporary PETSc Vecs (`cornerGlobal`, `cornerLocal`) to manage the
978 * intermediate corner-node data, using the existing nodal DMDA (`user->fda`).
979 * 2. Call a dispatch macro that uses the runtime block size (`bs`) to select the
980 * correct underlying center-to-corner function, writing results into `cornerGlobal`.
981 * 3. Perform a ghost-point exchange (`DMGlobalToLocal`) to transfer the boundary
982 * data from `cornerGlobal` into the ghost regions of `cornerLocal`.
983 * 4. Loop over all local particles. For each particle:
984 * a. Convert its global cell index to a local index relative to the ghosted array.
985 * b. Check if the particle's interpolation stencil is fully contained within
986 * the owned+ghost region. If not, log a warning and set the result to zero.
987 * c. Perform the final trilinear interpolation using the ghosted `cornerLocal` data.
988 * 5. Restore all PETSc objects to prevent memory leaks.
989 *
990 * @param[in] user User context with DMDA, DMSwarm, etc.
991 * @param[in] fieldlocal_cellCentered Vec (from fda) with cell-centered data (e.g., Ucat).
992 * @param[in] fieldName Human-readable field name for logging (e.g., "Ucat").
993 * @param[in] swarmOutFieldName Name of the DMSwarm field where interpolation results go.
994 *
995 * @return PetscErrorCode 0 on success.
996 */
998 UserCtx *user,
999 Vec fieldLocal_cellCentered,
1000 const char *fieldName,
1001 const char *swarmOutFieldName)
1002{
1003 PetscErrorCode ierr;
1004 DM fda = user->fda;
1005 DM swarm = user->swarm;
1006 PetscInt bs;
1007 DMDALocalInfo info;
1008 PetscMPIInt rank;
1009
1010 // Generic pointers to the raw data arrays
1011 void *cellCenterPtr_read;
1012 void *cornerPtr_read_with_ghosts;
1013
1014 // Swarm-related pointers
1015 PetscInt *cellIDs = NULL;
1016 PetscReal *weights = NULL;
1017 PetscInt64 *pids = NULL; // For logging particle IDs in warnings
1018 void *swarmOut = NULL;
1019 PetscReal *pos = NULL;
1020 PetscInt *status = NULL;
1021 PetscInt nLocal;
1022
1023 PetscFunctionBegin;
1025 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
1026 ierr = DMDAGetLocalInfo(fda, &info); CHKERRQ(ierr);
1027 ierr = VecGetBlockSize(fieldLocal_cellCentered, &bs); CHKERRQ(ierr);
1028 if (bs != 1 && bs != 3) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "BlockSize must be 1 or 3.");
1029
1030 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting for field '%s'... \n", fieldName);
1031 // Select the appropriate DM for intermediate corner data based on block size
1032 DM dm_corner = (bs == 3) ? user->fda : user->da;
1033
1034 /* STAGE 1: Center-to-Corner Calculation with Communication */
1035
1036 // (A) Create or reuse the corner vectors.
1037 if(user->CellFieldAtCorner){
1038 PetscInt cached_bs;
1039 ierr = VecGetBlockSize(user->CellFieldAtCorner, &cached_bs); CHKERRQ(ierr);
1040 if(cached_bs != bs){
1041 LOG_ALLOW(LOCAL, LOG_WARNING, "[Rank %d] Block size mismatch for existing corner vector and current field %s(Cached: %d, New: %d). Replacing.\n", rank, fieldName, cached_bs, bs);
1042 ierr = VecDestroy(&user->CellFieldAtCorner); CHKERRQ(ierr);
1043 ierr = VecDestroy(&user->lCellFieldAtCorner); CHKERRQ(ierr);
1044 user->CellFieldAtCorner = NULL;
1045 user->lCellFieldAtCorner = NULL;
1046 }
1047 }
1048
1049 if(user->CellFieldAtCorner == NULL){
1050 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] Creating new global corner vector for field '%s'.\n", rank, fieldName);
1051 ierr = DMCreateGlobalVector(dm_corner, &user->CellFieldAtCorner); CHKERRQ(ierr);
1052 }else{
1053 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] Reusing existing global corner vector for field '%s'.\n", rank, fieldName);
1054 }
1055
1056 ierr = VecSet(user->CellFieldAtCorner,0.0); CHKERRQ(ierr);
1057 Vec cornerGlobal = user->CellFieldAtCorner;
1058
1059 if(user->lCellFieldAtCorner == NULL){
1060 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] Creating new local corner vector for field '%s'.\n", rank, fieldName);
1061 ierr = DMCreateLocalVector(dm_corner, &user->lCellFieldAtCorner); CHKERRQ(ierr);
1062 }else{
1063 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] Reusing existing local corner vector for field '%s'.\n", rank, fieldName);
1064 }
1065
1066 ierr = VecSet(user->lCellFieldAtCorner,0.0); CHKERRQ(ierr);
1067 Vec cornerLocal = user->lCellFieldAtCorner;
1068
1069 // (B) Get a read-only array from the input cell-centered vector
1070 ierr = DMDAVecGetArrayRead(dm_corner, fieldLocal_cellCentered, &cellCenterPtr_read); CHKERRQ(ierr);
1071
1072 PetscInt size;
1073 ierr = VecGetSize(cornerGlobal, &size); CHKERRQ(ierr);
1074 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] Corner global vector size for field '%s': %d.\n", rank, fieldName, (PetscInt)size);
1075
1076 size = 0;
1077
1078 ierr = VecGetSize(cornerLocal, &size); CHKERRQ(ierr);
1079 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] Corner local vector size for field '%s': %d.\n", rank, fieldName, (PetscInt)size);
1080
1081 size = 0;
1082
1083 ierr = VecGetSize(fieldLocal_cellCentered, &size); CHKERRQ(ierr);
1084 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] Cell-centered local vector size for field '%s': %d.\n", rank, fieldName, (PetscInt)size);
1085
1086 PetscInt xs,ys,zs,gxs,gys,gzs;
1087
1088 ierr = DMDAGetCorners(dm_corner,&xs,&ys,&zs,NULL,NULL,NULL); CHKERRQ(ierr);
1089 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] DMDAGetCorners for field '%s': xs=%d, ys=%d, zs=%d.\n", rank, fieldName, (PetscInt)xs, (PetscInt)ys, (PetscInt)zs);
1090
1091 ierr = DMDAGetGhostCorners(dm_corner,&gxs,&gys,&gzs,NULL,NULL,NULL); CHKERRQ(ierr);
1092 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] DMDAGetGhostCorners for field '%s': gxs=%d, gys=%d, gzs=%d.\n", rank, fieldName, (PetscInt)gxs, (PetscInt)gys, (PetscInt)gzs);
1093
1094 // DEBUG: Inspect Ucat ghost cells before interpolation
1095 /*
1096 if (bs == 3) {
1097 Cmpnts ***ucat_array = (Cmpnts***)cellCenterPtr_read;
1098 DMDALocalInfo info_debug;
1099 ierr = DMDAGetLocalInfo(fda, &info_debug); CHKERRQ(ierr);
1100
1101 // Only print on rank 0 to avoid clutter
1102 if (rank == 0) {
1103 PetscPrintf(PETSC_COMM_SELF, "\nDEBUG: Inspecting Ucat Ghost Cells...\n");
1104 PetscPrintf(PETSC_COMM_SELF, "--------------------------------------------------\n");
1105
1106 // --- Check the MINIMUM-SIDE (-Xi) ---
1107 // The first physical cell is at index info.xs (local index).
1108 // The first ghost cell is at info.xs - 1.
1109 PetscInt i_first_phys = info_debug.xs;
1110 PetscInt i_first_ghost = info_debug.xs - 1;
1111 PetscInt j_mid = info_debug.ys + info_debug.ym / 2;
1112 PetscInt k_mid = info_debug.zs + info_debug.zm / 2;
1113
1114 PetscPrintf(PETSC_COMM_SELF, "MIN-SIDE (-Xi):\n");
1115 PetscPrintf(PETSC_COMM_SELF, " Ghost Cell Ucat[%d][%d][%d] = (% .6f, % .6f, % .6f)\n",
1116 k_mid, j_mid, i_first_ghost,
1117 ucat_array[k_mid][j_mid][i_first_ghost].x,
1118 ucat_array[k_mid][j_mid][i_first_ghost].y,
1119 ucat_array[k_mid][j_mid][i_first_ghost].z);
1120 PetscPrintf(PETSC_COMM_SELF, " First Phys Cell Ucat[%d][%d][%d] = (% .6f, % .6f, % .6f)\n",
1121 k_mid, j_mid, i_first_phys,
1122 ucat_array[k_mid][j_mid][i_first_phys].x,
1123 ucat_array[k_mid][j_mid][i_first_phys].y,
1124 ucat_array[k_mid][j_mid][i_first_phys].z);
1125
1126
1127 // --- Check the MAXIMUM-SIDE (+Xi) ---
1128 // The last physical cell is at index info.xs + info.xm - 1.
1129 // The first ghost cell on the max side is at info.xs + info.xm.
1130 PetscInt i_last_phys = info_debug.xs + info_debug.xm - 1;
1131 PetscInt i_last_ghost = info_debug.xs + info_debug.xm;
1132
1133 PetscPrintf(PETSC_COMM_SELF, "MAX-SIDE (+Xi):\n");
1134 PetscPrintf(PETSC_COMM_SELF, " Last Phys Cell Ucat[%d][%d][%d] = (% .6f, % .6f, % .6f)\n",
1135 k_mid, j_mid, i_last_phys,
1136 ucat_array[k_mid][j_mid][i_last_phys].x,
1137 ucat_array[k_mid][j_mid][i_last_phys].y,
1138 ucat_array[k_mid][j_mid][i_last_phys].z);
1139 PetscPrintf(PETSC_COMM_SELF, " Ghost Cell Ucat[%d][%d][%d] = (% .6f, % .6f, % .6f)\n",
1140 k_mid, j_mid, i_last_ghost,
1141 ucat_array[k_mid][j_mid][i_last_ghost].x,
1142 ucat_array[k_mid][j_mid][i_last_ghost].y,
1143 ucat_array[k_mid][j_mid][i_last_ghost].z);
1144 PetscPrintf(PETSC_COMM_SELF, "--------------------------------------------------\n\n");
1145 }
1146 }
1147 */
1148 // DEBUG
1149
1150 // (C) Perform the center-to-corner interpolation directly into the global corner vector
1151 void *cornerPtr_write = NULL;
1152 ierr = DMDAVecGetArray(dm_corner, cornerGlobal, &cornerPtr_write); CHKERRQ(ierr);
1153
1154 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] Starting center-to-corner interpolation for '%s'.\n", rank, fieldName);
1155
1156 // SINGLE, CLEAN CALL SITE: The macro handles the runtime dispatch based on 'bs'.
1157 ierr = InterpolateFieldFromCenterToCorner(bs, cellCenterPtr_read, cornerPtr_write, user); CHKERRQ(ierr);
1158
1159 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] Finished center-to-corner interpolation for '%s'.\n", rank, fieldName);
1160 ierr = DMDAVecRestoreArray(dm_corner, cornerGlobal, &cornerPtr_write); CHKERRQ(ierr);
1161
1162 ierr = DMDAVecRestoreArrayRead(dm_corner, fieldLocal_cellCentered, &cellCenterPtr_read); CHKERRQ(ierr);
1163
1164 ierr = MPI_Barrier(PETSC_COMM_WORLD); CHKERRQ(ierr);
1165
1166 //////////// DEBUG
1167 /*
1168 // Log a synchronized header message from all ranks.
1169 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "DEBUG: Dumping cornerGlobal BEFORE ghost exchange...\n");
1170
1171 // VecView prints to a PETSc viewer. PETSC_VIEWER_STDOUT_WORLD is a global, synchronized viewer.
1172 // We only need to call it if logging is active for this function.
1173 if (is_function_allowed(__func__) && (int)(LOG_DEBUG) <= (int)get_log_level()) {
1174 ierr = VecView(cornerGlobal, PETSC_VIEWER_STDOUT_WORLD); CHKERRQ(ierr);
1175 }
1176
1177 // Log a synchronized footer message.
1178 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "DEBUG: Finished dumping cornerGlobal.\n");
1179 */
1180 ////////// DEBUG
1181
1182 // ierr = PetscBarrier((PetscObject)cornerGlobal); CHKERRQ(ierr);
1183
1184 // (D) CRITICAL STEP: Communicate the newly computed corner data to fill ghost regions
1185 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] Beginning ghost exchange for corner data...\n", rank);
1186
1187 ierr = DMGlobalToLocalBegin(dm_corner, cornerGlobal, INSERT_VALUES, cornerLocal); CHKERRQ(ierr);
1188 ierr = DMGlobalToLocalEnd(dm_corner, cornerGlobal, INSERT_VALUES, cornerLocal); CHKERRQ(ierr);
1189
1190 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] Ghost exchange for corner data complete.\n", rank);
1191
1192 LOG_ALLOW_SYNC(GLOBAL, LOG_VERBOSE, "getting array from %s on all ranks.\n", dm_corner == user->fda ? "fda" : "da");
1193 if (is_function_allowed(__func__) && (int)(LOG_VERBOSE) <= (int)get_log_level()) {
1194 // DEBUG: Inspect cornerLocal after ghost exchange
1195
1196 ierr = LOG_FIELD_ANATOMY(user,"CornerField", "After Corner Velocity Interpolated"); CHKERRQ(ierr);
1197
1198 // DEBUG
1199 //ierr = DMView(user->fda, PETSC_VIEWER_STDOUT_WORLD); CHKERRQ(ierr);
1200 }
1201
1202 /////// DEBUG
1203 /*
1204 // Log a synchronized header message from all ranks.
1205 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "DEBUG: Dumping cornerLocal AFTER ghost exchange...\n");
1206
1207 // Here, we want each rank to print its own local vector.
1208 // PETSC_VIEWER_STDOUT_SELF is the correct tool for this. The LOG_ALLOW_SYNC
1209 // wrapper will ensure the output from each rank is printed sequentially.
1210 if (is_function_allowed(__func__) && (int)(LOG_DEBUG) <= (int)get_log_level()) {
1211 PetscMPIInt rank_d;
1212 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank_d); CHKERRQ(ierr);
1213
1214 // Use a synchronized print to create a clean header for each rank's output
1215 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "\n--- cornerLocal (Rank %d) ---\n", rank_d);
1216 PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT);
1217
1218 // Print the local vector's contents for this rank
1219 ierr = VecView(cornerLocal, PETSC_VIEWER_STDOUT_SELF); CHKERRQ(ierr);
1220
1221 // Use a synchronized print to create a clean footer
1222 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "--- End cornerLocal (Rank %d) ---\n", rank_d);
1223 PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT);
1224 }
1225
1226 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "DEBUG: Finished dumping cornerLocal.\n");
1227 */
1228 ///// DEBUG
1229
1230 // STAGE 2: Particle Interpolation using Ghosted Corner Data */
1231
1232 // (E) Get the local, GHOSTED array of corner data for the final interpolation step
1233 ierr = DMDAVecGetArrayRead(dm_corner, cornerLocal, &cornerPtr_read_with_ghosts); CHKERRQ(ierr);
1234
1235 // (F) Retrieve swarm fields for the particle loop
1236 ierr = DMSwarmGetLocalSize(swarm, &nLocal); CHKERRQ(ierr);
1237 ierr = DMSwarmGetField(swarm, "DMSwarm_CellID", NULL, NULL, (void**)&cellIDs); CHKERRQ(ierr);
1238 ierr = DMSwarmGetField(swarm, "DMSwarm_pid", NULL, NULL, (void**)&pids); CHKERRQ(ierr);
1239 ierr = DMSwarmGetField(swarm, "weight", NULL, NULL, (void**)&weights); CHKERRQ(ierr);
1240 ierr = DMSwarmGetField(swarm, "position", NULL, NULL, (void**)&pos); CHKERRQ(ierr);
1241 ierr = DMSwarmGetField(swarm, "DMSwarm_location_status", NULL, NULL, (void**)&status); CHKERRQ(ierr);
1242 ierr = DMSwarmGetField(swarm, swarmOutFieldName, NULL, NULL, &swarmOut); CHKERRQ(ierr);
1243
1244 LOG_ALLOW(LOCAL,LOG_TRACE," Rank %d holds data upto & including %d,%d,%d.\n",rank,info.gxs + info.gxm,info.gys+info.gym,info.gzs+info.gzm);
1245
1246 // (G) Loop over each local particle
1247 for (PetscInt p = 0; p < nLocal; p++) {
1248
1249 Particle particle;
1250
1251 ierr = UnpackSwarmFields(p,pids,weights,pos,cellIDs,NULL,status,NULL,NULL,NULL,&particle); CHKERRQ(ierr);
1252
1254 "[Rank %d] Particle PID %lld: global cell=(%d,%d,%d), weights=(%.4f,%.4f,%.4f)\n",
1255 rank, (long long)particle.PID, particle.cell[0], particle.cell[1], particle.cell[2],
1256 particle.weights.x, particle.weights.y, particle.weights.z);
1257 // Safety check: Ensure the entire 8-node stencil is within the valid memory
1258 // region (owned + ghosts) of the local array.
1259
1260 if (particle.cell[0] < info.gxs || particle.cell[0] >= info.gxs + info.gxm - 1 ||
1261 particle.cell[1] < info.gys || particle.cell[1] >= info.gys + info.gym - 1 ||
1262 particle.cell[2] < info.gzs || particle.cell[2] >= info.gzs + info.gzm - 1)
1263 {
1265 "[Rank %d] Particle PID %lld in global cell (%d,%d,%d) is in an un-interpolatable region (requires ghosts of ghosts or is out of bounds). Zeroing field '%s'.\n",
1266 rank, (long long)particle.PID, particle.cell[0], particle.cell[1], particle.cell[2], fieldName);
1267 if (bs == 3) {
1268 ((PetscReal*)swarmOut)[3*p + 0] = 0.0;
1269 ((PetscReal*)swarmOut)[3*p + 1] = 0.0;
1270 ((PetscReal*)swarmOut)[3*p + 2] = 0.0;
1271 } else {
1272 ((PetscReal*)swarmOut)[p] = 0.0;
1273 }
1274 continue;
1275 }
1276
1278 fieldName,
1279 cornerPtr_read_with_ghosts,
1280 &particle,
1281 swarmOut, p, bs);
1282 CHKERRQ(ierr);
1283 }
1284
1285 // (H) Restore all retrieved arrays and vectors */
1286 ierr = DMDAVecRestoreArrayRead(dm_corner, cornerLocal, &cornerPtr_read_with_ghosts); CHKERRQ(ierr);
1287
1288 // (I) Restore swarm fields
1289 ierr = DMSwarmRestoreField(swarm, swarmOutFieldName, NULL, NULL, &swarmOut); CHKERRQ(ierr);
1290 ierr = DMSwarmRestoreField(swarm, "weight", NULL, NULL, (void**)&weights); CHKERRQ(ierr);
1291 ierr = DMSwarmRestoreField(swarm, "DMSwarm_pid", NULL, NULL, (void**)&pids); CHKERRQ(ierr);
1292 ierr = DMSwarmRestoreField(swarm, "DMSwarm_CellID", NULL, NULL, (void**)&cellIDs); CHKERRQ(ierr);
1293 ierr = DMSwarmRestoreField(swarm, "position", NULL, NULL, (void**)&pos); CHKERRQ(ierr);
1294 ierr = DMSwarmRestoreField(swarm, "DMSwarm_location_status", NULL, NULL, (void**)&status); CHKERRQ(ierr);
1295 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished for field '%s'.\n", fieldName);
1297 PetscFunctionReturn(0);
1298}
1299
1300#undef __FUNCT__
1301#define __FUNCT__ "InterpolateAllFieldsToSwarm"
1302 /**
1303 * @brief Interpolates all relevant fields from the DMDA to the DMSwarm.
1304 *
1305 * Currently, it Interpolatetes:
1306 * - user->Ucat (vector field) into the DMSwarm field "swarmVelocity".
1307 *
1308 * To add more fields, duplicate the call to InterpolateOneFieldOverSwarm and provide:
1309 * - The global Vec for that field (e.g. user->Tcat for temperature),
1310 * - A human-readable field name (for logging),
1311 * - A DMSwarm output field name (e.g. "swarmTemperature").
1312 *
1313 * @param[in,out] user Pointer to a UserCtx containing:
1314 * - user->da (DM for the grid),
1315 * - user->swarm (DMSwarm for particles),.
1316 * - user->Ucat (Vec for the vector field),
1317 * - possibly more fields like user->Tcat, user->Pcat, etc.
1318 *
1319 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1320 */
1322{
1323 PetscErrorCode ierr;
1324 PetscMPIInt rank;
1325 PetscFunctionBegin;
1326
1328
1329 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank); CHKERRQ(ierr);
1330
1331 /*
1332 1) Interpolate the 'velocity' field (user->Ucat) into DMSwarm's "swarmVelocity".
1333 - The function InterpolateOneFieldOverSwarm() loops over all local particles,
1334 retrieves iCell, jCell, kCell, (a1,a2,a3) from the DMSwarm,
1335 and calls the appropriate trilinear interpolation routine.
1336
1337 - "velocity" => just a label used in logging or debugging.
1338 - "swarmVelocity" => the DMSwarm field name where we store the result
1339 (assume dof=3 if user->Ucat has blockSize=3).
1340 */
1341
1343 " Interpolation of ucat to velocity begins on rank %d.\n",rank);
1344 // Make sure to pass the *LOCAL* Vector to the function below!
1345 ierr = InterpolateEulerFieldToSwarm(user, user->lUcat,
1346 "Ucat",
1347 "velocity"); CHKERRQ(ierr);
1349 "Diffusivity", "Diffusivity"); CHKERRQ(ierr);
1350
1352 "DiffusivityGradient", "DiffusivityGradient"); CHKERRQ(ierr);
1353 /* Add fields as necessary here*/
1354
1355 ierr = MPI_Barrier(PETSC_COMM_WORLD); CHKERRQ(ierr);
1357 "[rank %d]Completed Interpolateting all fields to the swarm.\n",rank);
1358
1360
1361 PetscFunctionReturn(0);
1362}
1363
1364/////////////////////// Scatter from particles to euler fields
1365
1366/**
1367 * @brief Functions for scattering particle data (scalar or vector) onto
1368 * Eulerian grid fields by averaging contributions within each cell.
1369 *
1370 * This file provides a modular set of functions to perform particle-to-grid
1371 * projection, specifically calculating cell-averaged quantities from particle properties.
1372 * It assumes a PETSc environment using DMDA for the grids and DMSwarm for particles.
1373 *
1374 * Key Features:
1375 * - Handles both scalar (DOF=1) and vector (DOF=3) particle fields.
1376 * - Uses a pre-calculated particle count vector (`ParticleCount`) for normalization.
1377 * - Implicitly determines the target Eulerian DM (typically `user->da` for scalars,
1378 * `user->fda` for vectors) based on standard field names ("P", "Nvert", "Ucat", "Ucont","Psi").
1379 * - Modifies existing Eulerian field vectors (e.g., `user->P`, `user->Ucat`,user->Psi`) in place.
1380 * - Provides a high-level wrapper function (`ScatterAllParticleFieldsToEulerFields`)
1381 * to easily scatter a standard set of fields.
1382 * - Uses only the base `SETERRQ` macro for error reporting to maximize compiler compatibility.
1383 *
1384 * Dependencies:
1385 * - PETSc library (DMDA, DMSwarm, Vec, IS, PetscLog, etc.)
1386 * - A `UserCtx` struct (defined elsewhere, e.g., "userctx.h") containing pointers
1387 * to relevant DMs (`da`, `fda`), Vecs (`ParticleCount`, `P`, `Nvert`, `Ucat`, etc.),
1388 * and the `DMSwarm` object (`swarm`).
1389 * - A custom DMSwarm field named `"DMSwarm_CellID"` (blockSize=3, type=PETSC_INT)
1390 * must be registered and populated with the local cell indices for each particle.
1391 * - Logging infrastructure (`LOG_ALLOW`, etc.) assumed to be defined elsewhere.
1392 *
1393 * @defgroup scatter_module Particle-to-Grid Scattering
1394 * @{
1395 */
1396
1397//-----------------------------------------------------------------------------
1398// Internal Helper Modules (Lower-level building blocks)
1399//-----------------------------------------------------------------------------
1400/**
1401 * @defgroup scatter_module_internal Internal Scattering Helpers
1402 * @ingroup scatter_module
1403 * @brief Lower-level functions used by the main scattering routines.
1404 * @{
1405 */
1406
1407#undef __FUNCT__
1408#define __FUNCT__ "GetScatterTargetInfo"
1409/**
1410 * @brief Determines the target Eulerian DM and expected DOF for scattering a given particle field.
1411 *
1412 * Based on hardcoded rules mapping particle field names ("P", "Nvert", "Ucat", "Ucont")
1413 * to user context DMs (`user->da` or `user->fda`). This function encapsulates the
1414 * policy of where different fields should be scattered. Modify this function to
1415 * add rules for custom fields.
1416 *
1417 * @param[in] user Pointer to the UserCtx containing required DMs (`da`, `fda`).
1418 * @param[in] particleFieldName Name of the particle field.
1419 * @param[out] targetDM Pointer to store the determined target DM (`user->da` or `user->fda`).
1420 * @param[out] expected_dof Pointer to store the expected DOF (1 or 3) for this field.
1421 *
1422 * @return PetscErrorCode Returns 0 on success. Error codes:
1423 * - `PETSC_ERR_ARG_NULL` if required inputs are NULL.
1424 * - `PETSC_ERR_ARG_WRONG` if `particleFieldName` is not recognized.
1425 */
1426PetscErrorCode GetScatterTargetInfo(UserCtx *user, const char *particleFieldName,
1427 DM *targetDM, PetscInt *expected_dof)
1428{
1429 char msg[ERROR_MSG_BUFFER_SIZE]; // Buffer for formatted error messages
1430 PetscFunctionBeginUser;
1431
1433
1434 // --- Input Validation ---
1435 // Check for NULL pointers in essential inputs
1436 if (!user) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "UserCtx pointer is NULL.");
1437 if (!user->da) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "UserCtx->da is NULL.");
1438 if (!user->fda) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "UserCtx->fda is NULL."); // Needed for vector fields
1439 if (!particleFieldName) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "particleFieldName is NULL.");
1440 if (!targetDM) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Output targetDM pointer is NULL.");
1441 if (!expected_dof) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Output expected_dof pointer is NULL.");
1442
1443 // --- Determine Target DM and DOF based on Field Name ---
1444 // Compare the input field name with known scalar fields targeting 'da'
1445 if (strcmp(particleFieldName, "Psi") == 0 || strcmp(particleFieldName, "Nvert") == 0) {
1446 *expected_dof = 1; // Scalar fields have DOF 1
1447 *targetDM = user->da; // Target the primary scalar DMDA
1448 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Field '%s' targets DM 'da' (DOF=1).\n", particleFieldName);
1449 }
1450 // Compare with known vector fields targeting 'fda'
1451 else if (strcmp(particleFieldName, "Ucat") == 0 || strcmp(particleFieldName, "Ucont") == 0) {
1452 *expected_dof = 3; // Vector fields have DOF 3
1453 *targetDM = user->fda; // Target the vector DMDA (often node-based)
1454 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Field '%s' targets DM 'fda' (DOF=3).\n", particleFieldName);
1455 }
1456 // --- Add rules for other fields here ---
1457 // else if (strcmp(particleFieldName, "SomeOtherScalar") == 0) { *expected_dof = 1; *targetDM = user->da; }
1458 // else if (strcmp(particleFieldName, "SomeOtherVector") == 0) { *expected_dof = 3; *targetDM = user->someOtherDM; }
1459 else {
1460 // The provided field name doesn't match any known rules
1461 *targetDM = NULL; // Indicate failure
1462 *expected_dof = 0;
1463 // Format the error message manually
1464 PetscSNPrintf(msg, sizeof(msg), "Field name '%s' is not recognized for automatic DM selection.", particleFieldName);
1465 // Use SETERRQ with the formatted message and appropriate error code
1466 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, msg); // Use WRONG argument error code
1467 }
1468
1470
1471 PetscFunctionReturn(0);
1472}
1473
1474#undef __FUNCT__
1475#define __FUNCT__ "GetPersistentLocalVector"
1476/**
1477 * @brief Retrieves the persistent local vector (e.g., lPsi, lUcat) for a given field name.
1478 *
1479 * @param user User context containing the persistent vectors.
1480 * @param fieldName Name of the field ("Psi", "Ucat", etc.).
1481 * @param localVec Output pointer to the vector.
1482 * @return PetscErrorCode
1483 */
1484PetscErrorCode GetPersistentLocalVector(UserCtx *user, const char *fieldName, Vec *localVec)
1485{
1486 PetscFunctionBeginUser;
1487
1488 if (!user || !fieldName || !localVec)
1489 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Null input in GetPersistentLocalVector");
1490
1491 if (strcmp(fieldName, "Psi") == 0) {
1492 if (!user->lPsi) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "user->lPsi is not initialized");
1493 *localVec = user->lPsi;
1494 }
1495 else if (strcmp(fieldName, "Ucat") == 0) {
1496 if (!user->lUcat) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "user->lUcat is not initialized");
1497 *localVec = user->lUcat;
1498 }
1499 else if (strcmp(fieldName, "Ucont") == 0) {
1500 if (!user->lUcont) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "user->lUcont is not initialized");
1501 *localVec = user->lUcont;
1502 }
1503 else if (strcmp(fieldName, "Nvert") == 0) {
1504 if (!user->lNvert) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "user->lNvert is not initialized");
1505 *localVec = user->lNvert;
1506 }
1507 else {
1508 char msg[ERROR_MSG_BUFFER_SIZE];
1509 PetscSNPrintf(msg, sizeof(msg), "Field '%s' does not have a mapped persistent local vector.", fieldName);
1510 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, msg);
1511 }
1512
1513 PetscFunctionReturn(0);
1514}
1515
1516#undef __FUNCT__
1517#define __FUNCT__ "AccumulateParticleField"
1518/**
1519 * @brief Accumulates particle field values into a ghosted Local Vector.
1520 *
1521 * This function iterates over local particles and sums their specified field property
1522 * into the provided grid vector. It supports both scalar (DOF=1) and vector (DOF=3) fields.
1523 *
1524 * **CRITICAL USAGE NOTES:**
1525 * 1. `localAccumulatorVec` MUST be a Local Vector (obtained via DMGetLocalVector or DMCreateLocalVector).
1526 * It must have memory allocated for ghost slots. Passing a Global Vector will trigger an error.
1527 * 2. The vector is NOT zeroed internally. The caller must zero it if fresh accumulation is desired.
1528 * 3. This function performs pure accumulation. It does NOT communicate ghost values to owners.
1529 * The caller is responsible for calling DMLocalToGlobalBegin/End afterwards.
1530 *
1531 * @param[in] swarm The DMSwarm object containing the particles.
1532 * @param[in] particleFieldName The name of the particle field to accumulate (e.g., "Psi", "Ucat").
1533 * @param[in] gridSumDM The DMDA defining the grid topology.
1534 * @param[in,out] localAccumulatorVec The ghosted local vector to accumulate into.
1535 *
1536 * @return PetscErrorCode 0 on success.
1537 */
1538PetscErrorCode AccumulateParticleField(DM swarm, const char *particleFieldName,
1539 DM gridSumDM, Vec localAccumulatorVec)
1540{
1541 PetscErrorCode ierr;
1542 PetscInt dof;
1543 PetscInt nlocal, p;
1544 const PetscReal *particle_arr = NULL;
1545 const PetscInt *cell_id_arr = NULL;
1546
1547 // DMDA Accessors
1548 PetscScalar ***arr_1d = NULL; // For scalar fields
1549 PetscScalar ****arr_3d = NULL; // For vector fields
1550
1551 // Ghosted dimension variables
1552 PetscInt gxs, gys, gzs, gxm, gym, gzm;
1553 PetscMPIInt rank;
1554 char msg[ERROR_MSG_BUFFER_SIZE];
1555
1556 PetscFunctionBeginUser;
1558
1559 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
1560
1561 // --- 1. Validation & Setup ---
1562 if (!swarm || !gridSumDM || !localAccumulatorVec)
1563 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Null input in AccumulateParticleField.");
1564
1565 // Get DMDA information
1566 ierr = DMDAGetInfo(gridSumDM, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &dof, NULL, NULL, NULL, NULL, NULL); CHKERRQ(ierr);
1567 // Get Ghosted Corners (Global indices of the ghosted patch start, and its dimensions)
1568 ierr = DMDAGetGhostCorners(gridSumDM, &gxs, &gys, &gzs, &gxm, &gym, &gzm); CHKERRQ(ierr);
1569
1570 // --- 2. Verify Vector Type (Global vs Local) ---
1571 {
1572 PetscInt vecSize;
1573 PetscInt expectedLocalSize = gxm * gym * gzm * dof;
1574 ierr = VecGetSize(localAccumulatorVec, &vecSize); CHKERRQ(ierr);
1575
1576 if (vecSize != expectedLocalSize) {
1577 PetscSNPrintf(msg, sizeof(msg),
1578 "Vector dimension mismatch! Expected Ghosted Local Vector size %d (gxm*gym*gzm*dof), got %d. "
1579 "Did you pass a Global Vector instead of a Local Vector?",
1580 expectedLocalSize, vecSize);
1581 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, msg);
1582 }
1583 }
1584
1585 // --- 3. Acquire Particle Data ---
1586 ierr = DMSwarmGetLocalSize(swarm, &nlocal); CHKERRQ(ierr);
1587 // These calls will fail nicely if the field doesn't exist
1588 ierr = DMSwarmGetField(swarm, particleFieldName, NULL, NULL, (void **)&particle_arr); CHKERRQ(ierr);
1589 ierr = DMSwarmGetField(swarm, "DMSwarm_CellID", NULL, NULL, (void **)&cell_id_arr); CHKERRQ(ierr);
1590
1591 // --- 4. Acquire Grid Accessors ---
1592 // DMDAVecGetArray* handles the mapping from Global (i,j,k) to the underlying Local Array index
1593 if (dof == 1) {
1594 ierr = DMDAVecGetArray(gridSumDM, localAccumulatorVec, &arr_1d); CHKERRQ(ierr);
1595 } else if (dof == 3) {
1596 ierr = DMDAVecGetArrayDOF(gridSumDM, localAccumulatorVec, &arr_3d); CHKERRQ(ierr);
1597 } else {
1598 PetscSNPrintf(msg, sizeof(msg), "Unsupported DOF=%d. AccumulateParticleField supports DOF 1 or 3.", dof);
1599 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, msg);
1600 }
1601
1602 // --- 5. Accumulate Loop ---
1603 // Iterate through all local particles
1604 for (p = 0; p < nlocal; ++p) {
1605 // Retrieve Geometric Grid Index (0-based)
1606 PetscInt i_geom = cell_id_arr[p * 3 + 0];
1607 PetscInt j_geom = cell_id_arr[p * 3 + 1];
1608 PetscInt k_geom = cell_id_arr[p * 3 + 2];
1609
1610 // Apply Shift (+1) to match Memory Layout (Index 0 is boundary/ghost)
1611 PetscInt i = i_geom + 1;
1612 PetscInt j = j_geom + 1;
1613 PetscInt k = k_geom + 1;
1614
1615 // Bounds Check: Ensure (i,j,k) falls within the Local Ghosted Patch.
1616 // This allows writing to ghost slots which will later be reduced to the owner rank.
1617 if (i >= gxs && i < gxs + gxm &&
1618 j >= gys && j < gys + gym &&
1619 k >= gzs && k < gzs + gzm)
1620 {
1621 if (dof == 1) {
1622 arr_1d[k][j][i] += particle_arr[p];
1623 } else {
1624 // For DOF=3, unroll the loop for slight optimization
1625 arr_3d[k][j][i][0] += particle_arr[p * 3 + 0];
1626 arr_3d[k][j][i][1] += particle_arr[p * 3 + 1];
1627 arr_3d[k][j][i][2] += particle_arr[p * 3 + 2];
1628 }
1629 }
1630 // Note: Particles outside the ghost layer are skipped. This is expected behavior
1631 // if particles have not yet been migrated or localized correctly.
1632 }
1633
1634 // --- 6. Restore Arrays and Fields ---
1635 if (dof == 1) {
1636 ierr = DMDAVecRestoreArray(gridSumDM, localAccumulatorVec, &arr_1d); CHKERRQ(ierr);
1637 } else {
1638 ierr = DMDAVecRestoreArrayDOF(gridSumDM, localAccumulatorVec, &arr_3d); CHKERRQ(ierr);
1639 }
1640
1641 ierr = DMSwarmRestoreField(swarm, particleFieldName, NULL, NULL, (void **)&particle_arr); CHKERRQ(ierr);
1642 ierr = DMSwarmRestoreField(swarm, "DMSwarm_CellID", NULL, NULL, (void **)&cell_id_arr); CHKERRQ(ierr);
1643
1645 PetscFunctionReturn(0);
1646}
1647
1648//#undef __FUNCT__
1649//#define __FUNCT__ "AccumulateParticleField"
1650
1651/**
1652 * @brief Accumulates a particle field (scalar or vector) into a target grid sum vector.
1653 *
1654 * This function iterates through local particles, identifies their cell using the
1655 * `"DMSwarm_CellID"` field, and adds the particle's field value (`particleFieldName`)
1656 * to the corresponding cell location in the `gridSumVec`. It handles both scalar
1657 * (DOF=1) and vector (DOF=3) fields automatically based on the DOF of `gridSumDM`.
1658 *
1659 * IMPORTANT: The caller must ensure `gridSumVec` is zeroed before calling this
1660 * function if a fresh sum calculation is desired.
1661 *
1662 * @param[in] swarm The DMSwarm containing particles.
1663 * @param[in] particleFieldName Name of the field on the particles (must match DOF of `gridSumDM`).
1664 * @param[in] gridSumDM The DMDA associated with `gridSumVec`. Its DOF determines
1665 * how many components are accumulated.
1666 * @param[in,out] gridSumVec The Vec (associated with `gridSumDM`) to accumulate sums into.
1667 *
1668 * @return PetscErrorCode 0 on success. Errors if fields don't exist, DMs are incompatible,
1669 * or memory access fails.
1670 */
1671/*
1672 PetscErrorCode AccumulateParticleField(DM swarm, const char *particleFieldName,
1673 DM gridSumDM, Vec gridSumVec)
1674{
1675 PetscErrorCode ierr;
1676 PetscInt dof; // DOF determined from gridSumDM
1677 PetscInt nlocal, p; // Local particle count and loop index
1678 const PetscReal *particle_arr = NULL; // Pointer to particle field data array (assuming Real)
1679 const PetscInt *cell_id_arr = NULL; // Pointer to particle cell ID array ("DMSwarm_CellID", Int)
1680 //PetscScalar *sum_arr_ptr = NULL; // Pointer to grid sum vector data array (Scalar)
1681 PetscScalar ***arr_1d = NULL; // Pointer to grid sum vector data array (for DOF=1)
1682 PetscScalar ****arr_3d = NULL; // Pointer to grid sum vector data array (for DOF=3)
1683 PetscInt gxs, gys, gzs; // Start indices of local ghosted patch (often 0)
1684 PetscInt gxm, gym, gzm; // Dimensions of local ghosted patch (including ghosts)
1685 PetscMPIInt rank; // MPI rank for logging
1686 char msg[ERROR_MSG_BUFFER_SIZE]; // Buffer for formatted error messages
1687
1688 PetscFunctionBeginUser;
1689
1690 PROFILE_FUNCTION_BEGIN;
1691
1692 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
1693
1694 // --- Get DOF from the target grid DM ---
1695 ierr = DMDAGetInfo(gridSumDM, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &dof, NULL, NULL, NULL, NULL, NULL); CHKERRQ(ierr);
1696 // Validate that the DOF is supported (currently 1 or 3)
1697 if (dof != 1 && dof != 3) {
1698 PetscSNPrintf(msg, sizeof(msg), "gridSumDM DOF must be 1 or 3, got %d.", dof);
1699 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, msg);
1700 }
1701
1702 // --- Get Particle Data Arrays ---
1703 // DMSwarmGetField will return an error if the field doesn't exist, caught by CHKERRQ.
1704 ierr = DMSwarmGetField(swarm, particleFieldName, NULL, NULL, (void **)&particle_arr); CHKERRQ(ierr);
1705 ierr = DMSwarmGetField(swarm, "DMSwarm_CellID", NULL, NULL, (void **)&cell_id_arr); CHKERRQ(ierr);
1706
1707 // Get number of local particles *after* successfully getting field pointers
1708 ierr = DMSwarmGetLocalSize(swarm, &nlocal); CHKERRQ(ierr);
1709
1710 // --- Get Grid Sum Vector Array & Dimensions ---
1711 if(dof == 1){
1712 ierr = DMDAVecGetArray(gridSumVec, &sum_arr_ptr); CHKERRQ(ierr);
1713 }
1714
1715 // Get dimensions needed for calculating flat index within the local ghosted array
1716 ierr = DMDAGetGhostCorners(gridSumDM, &gxs, &gys, &gzs, &gxm, &gym, &gzm); CHKERRQ(ierr);
1717
1718 // --- Accumulate Locally ---
1719 LOG_ALLOW(LOCAL, LOG_DEBUG, "(Rank %d): Accumulating '%s' (DOF=%d) from %d particles using CellID field 'DMSwarm_CellID'.\n", rank, particleFieldName, dof, nlocal);
1720 // Loop over all particles owned by this process
1721 for (p = 0; p < nlocal; ++p) {
1722 // Extract local cell indices (relative to start of ghosted patch, [0..gxm-1], etc.)
1723 // Assumes DMSwarm_CellID stores (i, j, k) contiguously for each particle.
1724 PetscInt pidx_geom = cell_id_arr[p * 3 + 0]; // Local i-index
1725 PetscInt pidy_geom = cell_id_arr[p * 3 + 1]; // Local j-index
1726 PetscInt pidz_geom = cell_id_arr[p * 3 + 2]; // Local k-index
1727
1728 // Apply the index shift to convert from geometric to cell-centered field indexing
1729 PetscInt pidx_field = pidx_geom + 1;
1730 PetscInt pidy_field = pidy_geom + 1;
1731 PetscInt pidz_field = pidz_geom + 1;
1732
1733 // Bounds Check: Ensure the particle's cell index is within the valid local ghosted region
1734 if (pidx_field >= 0 && pidx_field < gxm && pidy_field >= 0 && pidy_field < gym && pidz_field >= 0 && pidz_field < gzm)
1735 {
1736 // Calculate the flat 1D index for this cell within the linear ghosted array
1737 // Uses PETSc's standard C-style row-major ordering (k-slowest, j-middle, i-fastest)
1738 // Corrected: k (slowest), j, i (fastest)
1739 PetscInt cell_flat_idx = (pidz_field * gym + pidy_field) * gxm + pidx_field;
1740
1741 // Calculate the base index for this particle's data in particle_arr
1742 PetscInt particle_base_idx = p * dof;
1743 // Calculate the base index for this cell's data in sum_arr_ptr
1744 PetscInt grid_base_idx = cell_flat_idx * dof;
1745
1746 // Add particle components to the grid sum vector components
1747 for (PetscInt c = 0; c < dof; ++c) {
1748 sum_arr_ptr[grid_base_idx + c] += particle_arr[particle_base_idx + c];
1749 }
1750 } else {
1751 // Log a warning if a particle's CellID is outside the expected local region.
1752 // This might indicate particles needing migration or boundary issues.
1753 LOG_ALLOW(LOCAL, LOG_WARNING, "(Rank %d): Particle %d (field '%s') has out-of-bounds CellID (%d, %d, %d). Ghosted dims: %dx%dx%d. Skipping.\n",
1754 rank, p, particleFieldName, pidx_field, pidy_field, pidz_field, gxm, gym, gzm);
1755 }
1756 } // End of particle loop
1757
1758 // --- Restore Access to Arrays ---
1759 ierr = VecRestoreArray(gridSumVec, &sum_arr_ptr); CHKERRQ(ierr);
1760 ierr = DMSwarmRestoreField(swarm, particleFieldName, NULL, NULL, (void **)&particle_arr); CHKERRQ(ierr);
1761 ierr = DMSwarmRestoreField(swarm, "DMSwarm_CellID", NULL, NULL, (void **)&cell_id_arr); CHKERRQ(ierr);
1762
1763 // --- Assemble Global Sum Vector ---
1764 // Crucial for parallel execution: sums contributions for cells shared across process boundaries.
1765 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Assembling global sum vector for '%s'.\n", particleFieldName);
1766 ierr = VecAssemblyBegin(gridSumVec); CHKERRQ(ierr);
1767 ierr = VecAssemblyEnd(gridSumVec); CHKERRQ(ierr);
1768
1769 PROFILE_FUNCTION_END;
1770
1771 PetscFunctionReturn(0);
1772}
1773*/
1774
1775#undef __FUNCT__
1776#define __FUNCT__ "NormalizeGridVectorByCount"
1777
1778/**
1779 * @brief Normalizes a grid vector of sums by a grid vector of counts to produce an average.
1780 *
1781 * Calculates `avgVec[i] = sumVec[i] / countVec[i]` for each component of each
1782 * cell owned by the current process, provided `countVec[i] > 0`. Otherwise, sets `avgVec[i] = 0`.
1783 * Handles both scalar (DOF=1) and vector (DOF=3) data fields based on the DOF of `dataDM`.
1784 * Uses DMDA multi-dimensional array accessors (`DMDAVecGetArray...`) for safe and convenient indexing.
1785 *
1786 * @param[in] countDM The DMDA associated with `countVec` (must have DOF=1).
1787 * @param[in] countVec The Vec containing particle counts per cell (read-only).
1788 * @param[in] dataDM The DMDA associated with `sumVec` and `avgVec` (must have DOF=1 or DOF=3).
1789 * @param[in] sumVec The Vec containing the accumulated sums per cell (read-only).
1790 * @param[in,out] avgVec The Vec where the calculated averages will be stored (overwritten). Must be
1791 * associated with `dataDM`.
1792 *
1793 * @return PetscErrorCode 0 on success. Errors on incompatible DMs or memory access failure.
1794 */
1795PetscErrorCode NormalizeGridVectorByCount(DM countDM, Vec countVec,
1796 DM dataDM, Vec sumVec, Vec avgVec)
1797{
1798 PetscErrorCode ierr;
1799 PetscInt data_dof;
1800 PetscInt count_dof;
1801 PetscMPIInt rank;
1802 char msg[ERROR_MSG_BUFFER_SIZE];
1803
1804 // Pointers for DMDA array accessors - declare specific types
1805 PetscScalar ***count_arr_3d = NULL; // For DOF=1 count vector (3D DMDA)
1806 PetscScalar ***sum_arr_scalar = NULL; // For DOF=1 sum vector (3D DMDA)
1807 PetscScalar ***avg_arr_scalar = NULL; // For DOF=1 avg vector (3D DMDA)
1808 PetscScalar ****sum_arr_vector = NULL; // For DOF=3 sum vector (3D DMDA + DOF)
1809 PetscScalar ****avg_arr_vector = NULL; // For DOF=3 avg vector (3D DMDA + DOF)
1810
1811
1812 PetscFunctionBeginUser;
1813
1815
1816 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
1817
1818 // --- Validation ---
1819 ierr = DMDAGetInfo(countDM, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &count_dof, NULL, NULL, NULL, NULL, NULL); CHKERRQ(ierr);
1820 ierr = DMDAGetInfo(dataDM, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &data_dof, NULL, NULL, NULL, NULL, NULL); CHKERRQ(ierr);
1821 if (count_dof != 1) { PetscSNPrintf(msg, sizeof(msg), "countDM must have DOF=1, got %d.", count_dof); SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, msg); }
1822 if (data_dof != 1 && data_dof != 3) { PetscSNPrintf(msg, sizeof(msg), "dataDM DOF must be 1 or 3, got %d.", data_dof); SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, msg); }
1823
1824 // --- Get Array Access using appropriate DMDA accessors ---
1825 ierr = DMDAVecGetArrayRead(countDM, countVec, &count_arr_3d); CHKERRQ(ierr);
1826
1827 if (data_dof == 1) {
1828 ierr = DMDAVecGetArrayRead(dataDM, sumVec, &sum_arr_scalar); CHKERRQ(ierr);
1829 ierr = DMDAVecGetArray(dataDM, avgVec, &avg_arr_scalar); CHKERRQ(ierr);
1830 } else { // data_dof == 3
1831 ierr = DMDAVecGetArrayDOFRead(dataDM, sumVec, &sum_arr_vector); CHKERRQ(ierr);
1832 ierr = DMDAVecGetArrayDOF(dataDM, avgVec, &avg_arr_vector); CHKERRQ(ierr);
1833 }
1834
1835 // Get the corners (global start indices) and dimensions of the *local owned* region
1836 PetscInt xs, ys, zs, xm, ym, zm;
1837 ierr = DMDAGetCorners(countDM, &xs, &ys, &zs, &xm, &ym, &zm); CHKERRQ(ierr);
1838
1839 // --- Normalize Over Owned Cells ---
1840 LOG_ALLOW(LOCAL, LOG_DEBUG, "(Rank %d): Normalizing DOF=%d data over owned range [%d:%d, %d:%d, %d:%d].\n",
1841 rank, data_dof, xs, xs+xm, ys, ys+ym, zs, zs+zm);
1842
1843 // Loop using GLOBAL indices (i, j, k) over the range owned by this process
1844 for (PetscInt k = zs; k < zs + zm; ++k) {
1845 for (PetscInt j = ys; j < ys + ym; ++j) {
1846 for (PetscInt i = xs; i < xs + xm; ++i) {
1847
1848 // Access the count using standard 3D indexing
1849 PetscScalar count = count_arr_3d[k][j][i];
1850
1851 if (PetscRealPart(count) > 0.5) { // Use tolerance for float comparison
1852 if (data_dof == 1) {
1853 // Access scalar sum/avg using standard 3D indexing
1854 avg_arr_scalar[k][j][i] = sum_arr_scalar[k][j][i] / count;
1855 } else { // data_dof == 3
1856 // Access vector components using DOF indexing on the last dimension
1857 for (PetscInt c = 0; c < data_dof; ++c) {
1858 avg_arr_vector[k][j][i][c] = sum_arr_vector[k][j][i][c] / count;
1859 }
1860 }
1861 } else { // count is zero or negative
1862 // Set average to zero
1863 if (data_dof == 1) {
1864 avg_arr_scalar[k][j][i] = 0.0;
1865 } else { // data_dof == 3
1866 for (PetscInt c = 0; c < data_dof; ++c) {
1867 avg_arr_vector[k][j][i][c] = 0.0;
1868 }
1869 }
1870 } // end if count > 0.5
1871 } // end i loop
1872 } // end j loop
1873 } // end k loop
1874
1875 // --- Restore Arrays using appropriate functions ---
1876 ierr = DMDAVecRestoreArrayRead(countDM, countVec, &count_arr_3d); CHKERRQ(ierr);
1877 if (data_dof == 1) {
1878 ierr = DMDAVecRestoreArrayRead(dataDM, sumVec, &sum_arr_scalar); CHKERRQ(ierr);
1879 ierr = DMDAVecRestoreArray(dataDM, avgVec, &avg_arr_scalar); CHKERRQ(ierr);
1880 } else { // data_dof == 3
1881 ierr = DMDAVecRestoreArrayDOFRead(dataDM, sumVec, &sum_arr_vector); CHKERRQ(ierr);
1882 ierr = DMDAVecRestoreArrayDOF(dataDM, avgVec, &avg_arr_vector); CHKERRQ(ierr);
1883 }
1884
1885 // --- Assemble Final Average Vector ---
1886 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Assembling final average vector (DOF=%d).\n", data_dof);
1887 ierr = VecAssemblyBegin(avgVec); CHKERRQ(ierr);
1888 ierr = VecAssemblyEnd(avgVec); CHKERRQ(ierr);
1889
1890
1892
1893 PetscFunctionReturn(0);
1894}
1895
1896/** @} */ // End of scatter_module_internal group
1897
1898//-----------------------------------------------------------------------------
1899// User-Facing API
1900//-----------------------------------------------------------------------------
1901//-----------------------------------------------------------------------------
1902// MODULE 4: Internal Scatter Orchestration Helper - No PetscErrorClear
1903//-----------------------------------------------------------------------------
1904
1905#undef __FUNCT__
1906#define __FUNCT__ "ScatterParticleFieldToEulerField_Internal"
1907/**
1908 * @brief Internal helper function to orchestrate the scatter operation (accumulate + normalize).
1909 * @ingroup scatter_module_internal
1910 *
1911 * Manages the temporary sum vector and calls the accumulation and normalization
1912 * functions. Assumes caller determined target DM and DOF. Checks for particle field existence.
1913 * NOTE: If DMSwarmGetField fails, the subsequent SETERRQ will overwrite the original error.
1914 *
1915 * @param[in] user Pointer to UserCtx containing swarm, ParticleCount, da.
1916 * @param[in] particleFieldName Name of the field in the DMSwarm.
1917 * @param[in] targetDM The DMDA where the final average and intermediate sum reside.
1918 * @param[in] expected_dof The expected DOF (1 or 3) for the targetDM and field.
1919 * @param[in,out] eulerFieldAverageVec The pre-created Vec associated with targetDM to store the result.
1920 *
1921 * @return PetscErrorCode 0 on success. Errors if particle field doesn't exist or
1922 * underlying helpers fail.
1923 */
1925 const char *particleFieldName,
1926 DM targetDM,
1927 PetscInt expected_dof,
1928 Vec eulerFieldAverageVec)
1929{
1930 PetscErrorCode ierr;
1931 Vec globalsumVec = NULL;
1932 Vec localsumVec = NULL;
1933 char msg[ERROR_MSG_BUFFER_SIZE]; // Buffer for formatted error messages
1934
1935 PetscFunctionBeginUser;
1936
1938
1939 if (!user || !user->swarm || !user->ParticleCount || !particleFieldName || !targetDM || !eulerFieldAverageVec)
1940 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "NULL input provided to ScatterParticleFieldToEulerField_Internal.");
1941
1942 // --- Check if Particle Field Exists ---
1943 // Attempt a GetField call; if it fails, the field doesn't exist.
1944 // We let CHKERRQ handle the error directly if the field doesn't exist OR
1945 // we catch it specifically to provide a more tailored message.
1946
1947 /*
1948 LOG_ALLOW(GLOBAL,LOG_DEBUG,"Field %s being accessed to check existence \n",particleFieldName);
1949 ierr = DMSwarmGetField(user->swarm, particleFieldName, NULL, NULL, NULL);
1950 if (ierr) { // If GetField returns an error
1951 PetscSNPrintf(msg, sizeof(msg), "Particle field '%s' not found in DMSwarm for scattering.", particleFieldName);
1952 // Directly set the error, overwriting the one from GetField
1953 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, msg);
1954 }
1955 ierr = DMSwarmRestoreField(user->swarm, particleFieldName, NULL, NULL, NULL);
1956 */
1957
1958 // --- Setup Temporary Sum Vector ---
1959 ierr = VecDuplicate(eulerFieldAverageVec, &globalsumVec); CHKERRQ(ierr);
1960 ierr = VecSet(globalsumVec, 0.0); CHKERRQ(ierr);
1961 ierr = PetscSNPrintf(msg, sizeof(msg), "TempSum_%s", particleFieldName); CHKERRQ(ierr);
1962 ierr = PetscObjectSetName((PetscObject)globalsumVec, msg); CHKERRQ(ierr);
1963
1964 // create local vector for accumulation
1965 ierr = DMGetLocalVector(targetDM, &localsumVec); CHKERRQ(ierr);
1966 ierr = VecSet(localsumVec, 0.0); CHKERRQ(ierr); // Must be zeroed before accumulation
1967 ierr = PetscSNPrintf(msg, sizeof(msg), "LocalTempSum_%s", particleFieldName); CHKERRQ(ierr);
1968 ierr = PetscObjectSetName((PetscObject)localsumVec, msg); CHKERRQ(ierr);
1969
1970 // --- Accumulate ---
1971 // This will call DMSwarmGetField again. If it failed above, it will likely fail here too,
1972 // unless the error was cleared somehow between the check and here (unlikely).
1973 // If the check above was skipped (Option 1), this is where the error for non-existent
1974 // field will be caught by CHKERRQ.
1975 ierr = AccumulateParticleField(user->swarm, particleFieldName, targetDM, localsumVec); CHKERRQ(ierr);
1976
1977 // --- Local to Global Sum ---
1978 ierr = DMLocalToGlobalBegin(targetDM, localsumVec, ADD_VALUES, globalsumVec); CHKERRQ(ierr);
1979 ierr = DMLocalToGlobalEnd(targetDM, localsumVec, ADD_VALUES, globalsumVec); CHKERRQ(ierr);
1980 // Return local vector to DM
1981 ierr = DMRestoreLocalVector(targetDM, &localsumVec); CHKERRQ(ierr);
1982
1983 // Calculate the number of particles per cell.
1984 ierr = CalculateParticleCountPerCell(user); CHKERRQ(ierr);
1985 // --- Normalize ---
1986 ierr = NormalizeGridVectorByCount(user->da, user->ParticleCount, targetDM, globalsumVec, eulerFieldAverageVec); CHKERRQ(ierr);
1987
1988 // --- Cleanup ---
1989 ierr = VecDestroy(&globalsumVec); CHKERRQ(ierr);
1990
1991
1993
1994 PetscFunctionReturn(0);
1995}
1996
1997#undef __FUNCT__
1998#define __FUNCT__ "ScatterParticleFieldToEulerField"
1999
2000/**
2001 * @brief Scatters a particle field (scalar or vector) to the corresponding Eulerian field average.
2002 * @ingroup scatter_module
2003 *
2004 * This is the primary user-facing function for scattering a single field.
2005 * It determines the target Eulerian DM (`user->da` or `user->fda`) based on the
2006 * `particleFieldName`, validates that the provided `eulerFieldAverageVec` is compatible
2007 * with that target DM, and then orchestrates the scatter operation by calling
2008 * an internal helper function. The final averaged result is stored IN-PLACE in
2009 * the `eulerFieldAverageVec`.
2010 *
2011 * @param[in] user Pointer to UserCtx containing `da`, `fda`, `swarm`, `ParticleCount`.
2012 * @param[in] particleFieldName Name of the field in the DMSwarm (e.g., "Psi", "Ucat").
2013 * @param[in,out] eulerFieldAverageVec Pre-created Vec associated with the correct target DM
2014 * (implicitly `da` or `fda`). Result stored here.
2015 * This vector should be zeroed by the caller if desired
2016 * before this function (e.g., using `VecSet`). The wrapper
2017 * `ScatterAllParticleFieldsToEulerFields` handles this zeroing.
2018 *
2019 * @return PetscErrorCode 0 on success. Errors on NULL input, unrecognized field name,
2020 * incompatible target vector, or if the particle field doesn't exist.
2021 */
2023 const char *particleFieldName,
2024 Vec eulerFieldAverageVec)
2025{
2026 PetscErrorCode ierr;
2027 DM targetDM = NULL; // Will point to user->da or user->fda
2028 PetscInt expected_dof = 0; // Will be 1 or 3
2029 char msg[ERROR_MSG_BUFFER_SIZE]; // Buffer for formatted error messages
2030
2031 PetscFunctionBeginUser;
2032
2034
2035 // --- Essential Input Validation ---
2036 if (!user) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "UserCtx pointer is NULL.");
2037 if (!user->swarm) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "UserCtx->swarm is NULL.");
2038 if (!user->ParticleCount) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "UserCtx->ParticleCount is NULL.");
2039 if (!eulerFieldAverageVec) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Output eulerFieldAverageVec is NULL.");
2040 // particleFieldName validity checked within GetScatterTargetInfo
2041
2042 // --- Determine Target DM & DOF using the helper function ---
2043 ierr = GetScatterTargetInfo(user, particleFieldName, &targetDM, &expected_dof); CHKERRQ(ierr);
2044 // If GetScatterTargetInfo failed (e.g., unknown name), CHKERRQ would have returned.
2045
2046 // --- Validate the provided Target Vec's Compatibility ---
2047 DM vec_dm;
2048 PetscInt vec_dof;
2049 // Check that the provided average vector has a DM associated with it
2050 ierr = VecGetDM(eulerFieldAverageVec, &vec_dm); CHKERRQ(ierr);
2051 if (!vec_dm) {
2052 PetscSNPrintf(msg, sizeof(msg), "Provided eulerFieldAverageVec for field '%s' does not have an associated DM.", particleFieldName);
2053 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, msg);
2054 }
2055 // Get the block size (DOF) of the provided vector
2056 ierr = VecGetBlockSize(eulerFieldAverageVec, &vec_dof); CHKERRQ(ierr);
2057 // Compare the vector's associated DM with the one determined by the field name
2058 if (vec_dm != targetDM) {
2059 const char *target_dm_name = "targetDM", *vec_dm_name = "vec_dm";
2060 // Get actual names if possible for a more informative error message
2061 PetscObjectGetName((PetscObject)targetDM, &target_dm_name);
2062 PetscObjectGetName((PetscObject)vec_dm, &vec_dm_name);
2063 PetscSNPrintf(msg, sizeof(msg), "Provided eulerFieldAverageVec associated with DM '%s', but field '%s' requires scatter to DM '%s'.", vec_dm_name, particleFieldName, target_dm_name);
2064 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, msg);
2065 }
2066 // Compare the vector's DOF with the one expected for the field name
2067 if (vec_dof != expected_dof) {
2068 PetscSNPrintf(msg, sizeof(msg), "Field '%s' requires DOF %d, but provided eulerFieldAverageVec has DOF %d.", particleFieldName, expected_dof, vec_dof);
2069 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, msg);
2070 }
2071
2072 // --- Perform Scatter using Internal Helper ---
2073 // Log intent before calling the core logic
2074 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Scattering field '%s' (DOF=%d).\n", particleFieldName, expected_dof);
2075 ierr = ScatterParticleFieldToEulerField_Internal(user, // Pass user context
2076 particleFieldName, // Name of particle field
2077 targetDM, // Determined target DM (da or fda)
2078 expected_dof, // Determined DOF (1 or 3)
2079 eulerFieldAverageVec); // The output vector
2080 CHKERRQ(ierr); // Handle potential errors from the internal function
2081
2082 LOG_ALLOW(GLOBAL, LOG_INFO, "Successfully scattered field '%s'.\n", particleFieldName);
2083
2085
2086 PetscFunctionReturn(0);
2087}
2088
2089#undef __FUNCT__
2090#define __FUNCT__ "ScatterAllParticleFieldsToEulerFields"
2091/**
2092 * @brief Scatters a predefined set of particle fields to their corresponding Eulerian fields.
2093 * @ingroup scatter_module
2094 *
2095 * This convenience function calls the unified `ScatterParticleFieldToEulerField`
2096 * for a standard set of fields (currently just "Psi", others commented out). It assumes
2097 * the target Eulerian Vec objects (e.g., `user->Psi`, `user->Ucat`) exist in the
2098 * UserCtx structure and are correctly associated with their respective DMs (`user->da`
2099 * or `user->fda`).
2100 *
2101 * **IMPORTANT:** It zeros the target Vecs before scattering. Ensure `user->ParticleCount`
2102 * has been computed accurately *before* calling this function, reflecting the current
2103 * particle distribution.
2104 *
2105 * @param[in,out] user Pointer to the UserCtx structure containing all required DMs,
2106 * Vecs (`ParticleCount`, target Eulerian fields like `P`, `Ucat`), and `swarm`.
2107 *
2108 * @return PetscErrorCode 0 on success. Errors if prerequisites (like ParticleCount)
2109 * are missing or if underlying scatter calls fail (e.g., particle field missing).
2110 */
2112{
2113 PetscErrorCode ierr;
2114 PetscFunctionBeginUser;
2116
2117 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting scattering of specified particle fields to Eulerian grids.\n");
2118
2119 // --- Pre-computation Check: Ensure Particle Counts are Ready ---
2120 if (!user->ParticleCount) {
2121 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "UserCtx->ParticleCount is NULL. Compute counts before calling ScatterAllParticleFieldsToEulerFields.");
2122 }
2123
2124 // --- Scatter Particle Field "Psi" -> Eulerian Field user->Psi (on da) ---
2125 // Check if the target Eulerian vector 'user->Psi' exists.
2126 if (user->Psi) {
2127
2128 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Scattering particle field 'Psi' to user->Psi.\n");
2129 // Zero the target vector before accumulating the new average for this step/call.
2130
2131 // Debug Verification ------------------------------------------------
2132 Vec swarm_Psi;
2133 PetscReal Avg_Psi,Avg_swarm_Psi;
2134
2135 ierr = VecMean(user->Psi,&Avg_Psi);
2136 LOG_ALLOW(GLOBAL,LOG_DEBUG," Average of Scalar(Psi) before scatter: %.4f.\n",Avg_Psi);
2137
2138 ierr = DMSwarmCreateGlobalVectorFromField(user->swarm,"Psi",&swarm_Psi);
2139 ierr = VecMean(swarm_Psi,&Avg_swarm_Psi);
2140
2141 LOG_ALLOW(GLOBAL,LOG_DEBUG," Average of Particle Scalar(Psi): %.4f.\n",Avg_swarm_Psi);
2142
2143 ierr = DMSwarmDestroyGlobalVectorFromField(user->swarm,"Psi",&swarm_Psi);
2144 // Debug----------------------------------------------------------------
2145
2146 //ierr = VecSet(user->P, 0.0); CHKERRQ(ierr);
2147 // Call the unified scatter function. It will handle DM determination and validation.
2148 // It will also error out if the *particle* field "Psi" doesn't exist in the swarm.
2149 ierr = ScatterParticleFieldToEulerField(user, "Psi", user->Psi); CHKERRQ(ierr);
2150 ierr = VecMean(user->Psi,&Avg_Psi);
2151
2152 LOG_ALLOW(GLOBAL,LOG_DEBUG," Average of Scalar(Psi) after scatter: %.4f.\n",Avg_Psi);
2153 } else {
2154 // Only log a warning if the target Eulerian field is missing in the context.
2155 LOG_ALLOW(GLOBAL, LOG_WARNING, "Skipping scatter for 'Psi': UserCtx->Psi is NULL.\n");
2156 }
2157
2158 // --- (Commented Out) Scatter Other Fields ---
2159 // To enable scattering for other fields, uncomment the relevant block
2160 // AND ensure:
2161 // 1. The corresponding particle field (e.g., "Nvert", "Ucat") exists in user->swarm.
2162 // 2. The corresponding target Eulerian Vec (e.g., user->Nvert, user->Ucat) exists in user->ctx.
2163 // 3. The target Eulerian Vec is associated with the correct DM (da for Nvert, fda for Ucat).
2164
2165 /*
2166 // Scatter Particle Field "Nvert" -> Eulerian Field user->Nvert (on da)
2167 if (user->Nvert) {
2168 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Scattering particle field 'Nvert' to user->Nvert.\n");
2169 ierr = VecSet(user->Nvert, 0.0); CHKERRQ(ierr);
2170 ierr = ScatterParticleFieldToEulerField(user, "Nvert", user->Nvert); CHKERRQ(ierr);
2171 } else {
2172 LOG_ALLOW(GLOBAL, LOG_WARNING, "Skipping scatter for 'Nvert': UserCtx->Nvert is NULL.\n");
2173 }
2174 */
2175
2176 /*
2177 // Scatter Particle Field "Ucat" -> Eulerian Field user->Ucat (on fda)
2178 if (user->Ucat) {
2179 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Scattering particle field 'Ucat' to user->Ucat.\n");
2180 ierr = VecSet(user->Ucat, 0.0); CHKERRQ(ierr);
2181 ierr = ScatterParticleFieldToEulerField(user, "Ucat", user->Ucat); CHKERRQ(ierr);
2182 } else {
2183 LOG_ALLOW(GLOBAL, LOG_WARNING, "Skipping scatter for 'Ucat': UserCtx->Ucat is NULL.\n");
2184 }
2185 */
2186
2187 /*
2188 // Scatter Particle Field "Ucont" -> Eulerian Field user->Ucont (on fda)
2189 if (user->Ucont) {
2190 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Scattering particle field 'Ucont' to user->Ucont.\n");
2191 ierr = VecSet(user->Ucont, 0.0); CHKERRQ(ierr);
2192 ierr = ScatterParticleFieldToEulerField(user, "Ucont", user->Ucont); CHKERRQ(ierr);
2193 } else {
2194 LOG_ALLOW(GLOBAL, LOG_WARNING, "Skipping scatter for 'Ucont': UserCtx->Ucont is NULL.\n");
2195 }
2196 */
2197
2198 // Add more fields as needed following the pattern above...
2199
2200 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished scattering specified particle fields.\n");
2202 PetscFunctionReturn(0);
2203}
2204
2205/** @} */ // End of scatter_module group
2206
2207
2208#undef __FUNCT__
2209#define __FUNCT__ "InterpolateCornerToFaceCenter_Scalar"
2210
2211/**
2212 * @brief Interpolates a scalar field from corner nodes to all face centers.
2213 *
2214 * This routine computes the average of the four corner-node values
2215 * defining each face of a hexahedral cell:
2216 * - X-faces (perpendicular to X): face between (i-1,i) in X-dir
2217 * - Y-faces (perpendicular to Y): face between (j-1,j) in Y-dir
2218 * - Z-faces (perpendicular to Z): face between (k-1,k) in Z-dir
2219 *
2220 * @param[in] corner_arr Ghosted node-centered array (global indexing) from user->fda.
2221 * @param[out] faceX_arr Local array for X-faces sized [zm][ym][xm+1].
2222 * @param[out] faceY_arr Local array for Y-faces sized [zm][ym+1][xm].
2223 * @param[out] faceZ_arr Local array for Z-faces sized [zm+1][ym][xm].
2224 * @param[in] user User context containing DMDA 'fda' and GetOwnedCellRange.
2225 *
2226 * @return PetscErrorCode 0 on success, non-zero on failure.
2227 */
2229 PetscReal ***corner_arr,
2230 PetscReal ***faceX_arr,
2231 PetscReal ***faceY_arr,
2232 PetscReal ***faceZ_arr,
2233 UserCtx *user)
2234{
2235 PetscErrorCode ierr;
2236 DMDALocalInfo info;
2237
2238 PetscFunctionBeginUser;
2239
2241
2242 ierr = DMDAGetLocalInfo(user->fda, &info); CHKERRQ(ierr);
2243
2244 // Determine owned-cell ranges based on corner-node ownership
2245 PetscInt xs, xm, ys, ym, zs, zm;
2246 ierr = GetOwnedCellRange(&info, 0, &xs, &xm); CHKERRQ(ierr);
2247 ierr = GetOwnedCellRange(&info, 1, &ys, &ym); CHKERRQ(ierr);
2248 ierr = GetOwnedCellRange(&info, 2, &zs, &zm); CHKERRQ(ierr);
2249
2250 // Global exclusive end indices for cells
2251 PetscInt xe = xs + xm;
2252 PetscInt ye = ys + ym;
2253 PetscInt ze = zs + zm;
2254
2255 // --- X‐faces: loops k=zs..ze-1, j=ys..ye-1, i=xs..xe (xm+1 faces per row) ---
2256 for (PetscInt k = zs; k < ze; ++k) {
2257 PetscInt k_loc = k - zs;
2258 for (PetscInt j = ys; j < ye; ++j) {
2259 PetscInt j_loc = j - ys;
2260 for (PetscInt i = xs; i <= xe; ++i) {
2261 PetscInt i_loc = i - xs; // 0..xm
2262 // Average the four corners of the Y-Z face at X = i
2263 PetscReal sum = corner_arr[k ][j ][i]
2264 + corner_arr[k+1][j ][i]
2265 + corner_arr[k ][j+1][i]
2266 + corner_arr[k+1][j+1][i];
2267 faceX_arr[k_loc][j_loc][i_loc] = sum * 0.25;
2268 }
2269 }
2270 }
2271
2272 // --- Y‐faces: loops k=zs..ze-1, j=ys..ye (ym+1 faces), i=xs..xe-1 ---
2273 for (PetscInt k = zs; k < ze; ++k) {
2274 PetscInt k_loc = k - zs;
2275 for (PetscInt j = ys; j <= ye; ++j) {
2276 PetscInt j_loc = j - ys; // 0..ym
2277 for (PetscInt i = xs; i < xe; ++i) {
2278 PetscInt i_loc = i - xs;
2279 // Average the four corners of the X-Z face at Y = j
2280 PetscReal sum = corner_arr[k ][j][i ]
2281 + corner_arr[k+1][j][i ]
2282 + corner_arr[k ][j][i+1]
2283 + corner_arr[k+1][j][i+1];
2284 faceY_arr[k_loc][j_loc][i_loc] = sum * 0.25;
2285 }
2286 }
2287 }
2288
2289 // --- Z‐faces: loops k=zs..ze (zm+1), j=ys..ye-1, i=xs..xe-1 ---
2290 for (PetscInt k = zs; k <= ze; ++k) {
2291 PetscInt k_loc = k - zs;
2292 for (PetscInt j = ys; j < ye; ++j) {
2293 PetscInt j_loc = j - ys;
2294 for (PetscInt i = xs; i < xe; ++i) {
2295 PetscInt i_loc = i - xs;
2296 // Average the four corners of the X-Y face at Z = k
2297 PetscReal sum = corner_arr[k][j ][i ]
2298 + corner_arr[k][j ][i+1]
2299 + corner_arr[k][j+1][i ]
2300 + corner_arr[k][j+1][i+1];
2301 faceZ_arr[k_loc][j_loc][i_loc] = sum * 0.25;
2302 }
2303 }
2304 }
2305
2307
2308 PetscFunctionReturn(0);
2309}
2310
2311#undef __FUNCT__
2312#define __FUNCT__ "InterpolateCornerToFaceCenter_Vector"
2313
2314/**
2315 * @brief Interpolates a vector field from corner nodes to all face centers.
2316 *
2317 * Identical to the scalar version, except it averages each component of the
2318 * Cmpnts struct at the four corner-nodes per face.
2319 *
2320 * @param[in] corner_arr Ghosted 3-component array (global node indices).
2321 * @param[out] faceX_arr Local array of Cmpnts for X-faces sized [zm][ym][xm+1].
2322 * @param[out] faceY_arr Local array of Cmpnts for Y-faces sized [zm][ym+1][xm].
2323 * @param[out] faceZ_arr Local array of Cmpnts for Z-faces sized [zm+1][ym][xm].
2324 * @param[in] user User context containing DMDA 'fda'.
2325 *
2326 * @return PetscErrorCode 0 on success.
2327 */
2329 Cmpnts ***corner_arr,
2330 Cmpnts ***faceX_arr,
2331 Cmpnts ***faceY_arr,
2332 Cmpnts ***faceZ_arr,
2333 UserCtx *user)
2334{
2335 PetscErrorCode ierr;
2336 DMDALocalInfo info;
2337 PetscMPIInt rank;
2338
2339 PetscFunctionBeginUser;
2340
2342
2343 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
2345 "Rank %d starting InterpolateFieldFromCornerToFaceCenter_Vector.\n", rank);
2346
2347 ierr = DMDAGetLocalInfo(user->fda, &info); CHKERRQ(ierr);
2348
2349 PetscInt xs, xm, ys, ym, zs, zm;
2350 ierr = GetOwnedCellRange(&info, 0, &xs, &xm); CHKERRQ(ierr);
2351 ierr = GetOwnedCellRange(&info, 1, &ys, &ym); CHKERRQ(ierr);
2352 ierr = GetOwnedCellRange(&info, 2, &zs, &zm); CHKERRQ(ierr);
2353
2354 PetscInt xe = xs + xm;
2355 PetscInt ye = ys + ym;
2356 PetscInt ze = zs + zm;
2357
2358 // X-faces
2359 for (PetscInt k = zs; k < ze; ++k) {
2360 PetscInt k_loc = k - zs;
2361 for (PetscInt j = ys; j < ye; ++j) {
2362 PetscInt j_loc = j - ys;
2363 for (PetscInt i = xs; i <= xe; ++i) {
2364 PetscInt i_loc = i - xs;
2365 Cmpnts sum = {0,0,0};
2366 sum.x = corner_arr[k ][j ][i].x + corner_arr[k+1][j ][i].x
2367 + corner_arr[k ][j+1][i].x + corner_arr[k+1][j+1][i].x;
2368 sum.y = corner_arr[k ][j ][i].y + corner_arr[k+1][j ][i].y
2369 + corner_arr[k ][j+1][i].y + corner_arr[k+1][j+1][i].y;
2370 sum.z = corner_arr[k ][j ][i].z + corner_arr[k+1][j ][i].z
2371 + corner_arr[k ][j+1][i].z + corner_arr[k+1][j+1][i].z;
2372 faceX_arr[k_loc][j_loc][i_loc].x = sum.x * 0.25;
2373 faceX_arr[k_loc][j_loc][i_loc].y = sum.y * 0.25;
2374 faceX_arr[k_loc][j_loc][i_loc].z = sum.z * 0.25;
2375 }
2376 }
2377 }
2378
2380 "Rank %d x-face Interpolation complete.\n", rank);
2381
2382 // Y-faces
2383 for (PetscInt k = zs; k < ze; ++k) {
2384 PetscInt k_loc = k - zs;
2385 for (PetscInt j = ys; j <= ye; ++j) {
2386 PetscInt j_loc = j - ys;
2387 for (PetscInt i = xs; i < xe; ++i) {
2388 PetscInt i_loc = i - xs;
2389 Cmpnts sum = {0,0,0};
2390 sum.x = corner_arr[k ][j][i ].x + corner_arr[k+1][j][i ].x
2391 + corner_arr[k ][j][i+1].x + corner_arr[k+1][j][i+1].x;
2392 sum.y = corner_arr[k ][j][i ].y + corner_arr[k+1][j][i ].y
2393 + corner_arr[k ][j][i+1].y + corner_arr[k+1][j][i+1].y;
2394 sum.z = corner_arr[k ][j][i ].z + corner_arr[k+1][j][i ].z
2395 + corner_arr[k ][j][i+1].z + corner_arr[k+1][j][i+1].z;
2396 faceY_arr[k_loc][j_loc][i_loc].x = sum.x * 0.25;
2397 faceY_arr[k_loc][j_loc][i_loc].y = sum.y * 0.25;
2398 faceY_arr[k_loc][j_loc][i_loc].z = sum.z * 0.25;
2399 }
2400 }
2401 }
2402
2404 "Rank %d y-face Interpolation complete.\n", rank);
2405
2406 // Z-faces
2407 for (PetscInt k = zs; k <= ze; ++k) {
2408 PetscInt k_loc = k - zs;
2409 for (PetscInt j = ys; j < ye; ++j) {
2410 PetscInt j_loc = j - ys;
2411 for (PetscInt i = xs; i < xe; ++i) {
2412 PetscInt i_loc = i - xs;
2413 Cmpnts sum = {0,0,0};
2414 sum.x = corner_arr[k][j ][i ].x + corner_arr[k][j ][i+1].x
2415 + corner_arr[k][j+1][i ].x + corner_arr[k][j+1][i+1].x;
2416 sum.y = corner_arr[k][j ][i ].y + corner_arr[k][j ][i+1].y
2417 + corner_arr[k][j+1][i ].y + corner_arr[k][j+1][i+1].y;
2418 sum.z = corner_arr[k][j ][i ].z + corner_arr[k][j ][i+1].z
2419 + corner_arr[k][j+1][i ].z + corner_arr[k][j+1][i+1].z;
2420 faceZ_arr[k_loc][j_loc][i_loc].x = sum.x * 0.25;
2421 faceZ_arr[k_loc][j_loc][i_loc].y = sum.y * 0.25;
2422 faceZ_arr[k_loc][j_loc][i_loc].z = sum.z * 0.25;
2423 }
2424 }
2425 }
2426
2428 "Rank %d z-face Interpolation complete.\n", rank);
2429
2431 PetscFunctionReturn(0);
2432}
PetscErrorCode UnpackSwarmFields(PetscInt i, const PetscInt64 *PIDs, const PetscReal *weights, const PetscReal *positions, const PetscInt *cellIndices, PetscReal *velocities, PetscInt *LocStatus, PetscReal *diffusivity, Cmpnts *diffusivitygradient, PetscReal *psi, Particle *particle)
Initializes a Particle struct with data from DMSwarm fields.
PetscErrorCode GetScatterTargetInfo(UserCtx *user, const char *particleFieldName, DM *targetDM, PetscInt *expected_dof)
Determines the target Eulerian DM and expected DOF for scattering a given particle field.
PetscErrorCode NormalizeGridVectorByCount(DM countDM, Vec countVec, DM dataDM, Vec sumVec, Vec avgVec)
Normalizes a grid vector of sums by a grid vector of counts to produce an average.
PetscErrorCode GetPersistentLocalVector(UserCtx *user, const char *fieldName, Vec *localVec)
Retrieves the persistent local vector (e.g., lPsi, lUcat) for a given field name.
PetscErrorCode AccumulateParticleField(DM swarm, const char *particleFieldName, DM gridSumDM, Vec gridSumVec)
Accumulates a particle field (scalar or vector) into a target grid sum vector.
static PetscErrorCode ScatterParticleFieldToEulerField_Internal(UserCtx *user, const char *particleFieldName, DM targetDM, PetscInt expected_dof, Vec eulerFieldAverageVec)
Internal helper function to orchestrate the scatter operation (accumulate + normalize).
PetscErrorCode CalculateParticleCountPerCell(UserCtx *user)
Counts particles in each cell of the DMDA 'da' and stores the result in user->ParticleCount.
PetscErrorCode ScatterAllParticleFieldsToEulerFields(UserCtx *user)
Scatters a predefined set of particle fields to their corresponding Eulerian fields.
PetscErrorCode ScatterParticleFieldToEulerField(UserCtx *user, const char *particleFieldName, Vec eulerFieldAverageVec)
Scatters a particle field (scalar or vector) to the corresponding Eulerian field average.
PetscErrorCode InterpolateFieldFromCornerToCenter_Vector(Cmpnts ***field_arr, Cmpnts ***centfield_arr, UserCtx *user)
Interpolates a vector field from corner nodes to cell centers, respecting application conventions.
PetscErrorCode InterpolateCornerToFaceCenter_Vector(Cmpnts ***corner_arr, Cmpnts ***faceX_arr, Cmpnts ***faceY_arr, Cmpnts ***faceZ_arr, UserCtx *user)
Interpolates a vector field from corner nodes to all face centers.
PetscErrorCode InterpolateFieldFromCornerToCenter_Scalar(PetscReal ***field_arr, PetscReal ***centfield_arr, UserCtx *user)
Interpolates a scalar field from corner nodes to cell centers, respecting application conventions.
PetscErrorCode InterpolateAllFieldsToSwarm(UserCtx *user)
Interpolates all relevant fields from the DMDA to the DMSwarm.
static PetscErrorCode InterpolateEulerFieldToSwarmForParticle(const char *fieldName, void *fieldPtr, Particle *particle, void *swarmOut, PetscInt p, PetscInt blockSize)
Interpolates a single field (scalar or vector) for one particle, storing the result in a swarm array.
PetscErrorCode TrilinearInterpolation_Vector(const char *fieldName, Cmpnts ***fieldVec, PetscInt i, PetscInt j, PetscInt k, PetscReal a1, PetscReal a2, PetscReal a3, Cmpnts *vec)
Computes the trilinear Interpolateted vector (e.g., velocity) at a given point.
PetscErrorCode TrilinearInterpolation_Scalar(const char *fieldName, PetscReal ***fieldScal, PetscInt i, PetscInt j, PetscInt k, PetscReal a1, PetscReal a2, PetscReal a3, PetscReal *val)
Computes the trilinear Interpolateted scalar at a given point.
PetscErrorCode PieceWiseLinearInterpolation_Scalar(const char *fieldName, PetscReal ***fieldScal, PetscInt iCell, PetscInt jCell, PetscInt kCell, PetscReal *val)
Retrieves the scalar value at the cell (iCell, jCell, kCell).
#define ERROR_MSG_BUFFER_SIZE
PetscErrorCode TestCornerToCenterInterpolation(UserCtx *user)
Tests the InterpolateFieldFromCornerToCenter function by reproducing the Cent vector.
PetscErrorCode InterpolateFieldFromCenterToCorner_Vector(Cmpnts ***centfield_arr, Cmpnts ***corner_arr, UserCtx *user)
Interpolates a vector field from cell centers to corner nodes.
static void ComputeTrilinearWeights(PetscReal a1, PetscReal a2, PetscReal a3, PetscReal *w)
Computes the trilinear interpolation weights from the interpolation coefficients.
PetscErrorCode InterpolateCornerToFaceCenter_Scalar(PetscReal ***corner_arr, PetscReal ***faceX_arr, PetscReal ***faceY_arr, PetscReal ***faceZ_arr, UserCtx *user)
Interpolates a scalar field from corner nodes to all face centers.
PetscErrorCode InterpolateEulerFieldToSwarm(UserCtx *user, Vec fieldLocal_cellCentered, const char *fieldName, const char *swarmOutFieldName)
Interpolates a cell-centered field (scalar or vector) onto DMSwarm particles, using a robust,...
PetscErrorCode PieceWiseLinearInterpolation_Vector(const char *fieldName, Cmpnts ***fieldVec, PetscInt iCell, PetscInt jCell, PetscInt kCell, Cmpnts *vec)
Retrieves the vector (Cmpnts) at the cell (iCell, jCell, kCell).
PetscErrorCode InterpolateFieldFromCenterToCorner_Scalar(PetscReal ***centfield_arr, PetscReal ***corner_arr, UserCtx *user)
Interpolates a scalar field from cell centers to corner nodes.
#define TrilinearInterpolation(fieldName, fieldPtr, i, j, k, a1, a2, a3, outPtr)
Macro that calls either the scalar or vector trilinear interpolation function based on the type of th...
#define InterpolateFieldFromCenterToCorner(blockSize, centfield_ptr, corner_ptr, user_ctx)
Macro to dispatch to the correct scalar or vector center-to-corner function based on a runtime block ...
#define InterpolateFieldFromCornerToCenter(field, centfield, user)
Generic macro to call the appropriate interpolation function based on the field type.
PetscBool is_function_allowed(const char *functionName)
Checks if a given function is in the allow-list.
Definition logging.c:157
#define LOG_ALLOW_SYNC(scope, level, fmt,...)
----— DEBUG ---------------------------------------— #define LOG_ALLOW(scope, level,...
Definition logging.h:267
#define LOCAL
Logging scope definitions for controlling message output.
Definition logging.h:45
#define GLOBAL
Scope for global logging across all processes.
Definition logging.h:46
#define LOG_ALLOW(scope, level, fmt,...)
Logging macro that checks both the log level and whether the calling function is in the allowed-funct...
Definition logging.h:200
#define PROFILE_FUNCTION_END
Marks the end of a profiled code block.
Definition logging.h:746
PetscErrorCode LOG_FIELD_ANATOMY(UserCtx *user, const char *field_name, const char *stage_name)
Logs the anatomy of a specified field at key boundary locations, respecting the solver's specific gri...
Definition logging.c:1460
LogLevel get_log_level()
Retrieves the current logging level from the environment variable LOG_LEVEL.
Definition logging.c:39
#define LOG_LOOP_ALLOW_EXACT(scope, level, var, val, fmt,...)
Logs a custom message if a variable equals a specific value.
Definition logging.h:348
@ LOG_TRACE
Very fine-grained tracing information for in-depth debugging.
Definition logging.h:33
@ LOG_INFO
Informational messages about program execution.
Definition logging.h:31
@ LOG_WARNING
Non-critical issues that warrant attention.
Definition logging.h:29
@ LOG_DEBUG
Detailed debugging information.
Definition logging.h:32
@ LOG_VERBOSE
Extremely detailed logs, typically for development use only.
Definition logging.h:34
#define PROFILE_FUNCTION_BEGIN
Marks the beginning of a profiled code block (typically a function).
Definition logging.h:737
PetscErrorCode GetOwnedCellRange(const DMDALocalInfo *info_nodes, PetscInt dim, PetscInt *xs_cell_global, PetscInt *xm_cell_local)
Determines the global starting index and number of CELLS owned by the current processor in a specifie...
Definition setup.c:1756
Vec lDiffusivityGradient
Definition variables.h:759
PetscInt cell[3]
Definition variables.h:167
Vec lNvert
Definition variables.h:755
Vec lCellFieldAtCorner
Definition variables.h:764
Vec lPsi
Definition variables.h:801
PetscScalar x
Definition variables.h:101
PetscScalar z
Definition variables.h:101
Vec ParticleCount
Definition variables.h:800
Vec CellFieldAtCorner
Definition variables.h:764
Vec lUcont
Definition variables.h:755
Vec lUcat
Definition variables.h:755
PetscScalar y
Definition variables.h:101
Cmpnts weights
Definition variables.h:170
Vec Cent
Definition variables.h:776
Vec lDiffusivity
Definition variables.h:758
PetscInt64 PID
Definition variables.h:166
Vec Psi
Definition variables.h:801
A 3D point or vector with PetscScalar components.
Definition variables.h:100
Defines a particle's core properties for Lagrangian tracking.
Definition variables.h:165
User-defined context containing data specific to a single computational grid level.
Definition variables.h:728