PICurv 0.1.0
A Parallel Particle-In-Cell Solver for Curvilinear LES
Loading...
Searching...
No Matches
interpolation.c
Go to the documentation of this file.
1/**
2 * @file interpolation.c
3 * @brief Main program for DMSwarm interpolation using the fdf-curvIB method.
4 *
5 * Provides routines for interpolation between corner-based and center-based
6 * fields in the cell-centered DM (fda), plus partial usage examples for
7 * DMSwarm-based field sampling.
8 */
9
10#include "interpolation.h"
11
12// Number of weights used in certain trilinear interpolation examples
13#define NUM_WEIGHTS 8
14// Define a buffer size for error messages if not already available
15#ifndef ERROR_MSG_BUFFER_SIZE
16#define ERROR_MSG_BUFFER_SIZE 256 // Or use PETSC_MAX_PATH_LEN if appropriate
17#endif
18
19#undef __FUNCT__
20#define __FUNCT__ "InterpolateFieldFromCornerToCenter_Vector"
21/**
22 * @brief Internal helper implementation: `InterpolateFieldFromCornerToCenter_Vector()`.
23 * @details Local to this translation unit.
24 */
26 Cmpnts ***field_arr, /* Input: Ghosted local array view from user->fda (global node indices) */
27 Cmpnts ***centfield_arr, /* Output: Array view for cell-centered data (global indices) */
28 UserCtx *user)
29{
30 PetscErrorCode ierr;
31 DMDALocalInfo info;
32
33 PetscFunctionBeginUser;
34
35 ierr = DMDAGetLocalInfo(user->fda, &info); CHKERRQ(ierr);
36
37 // Get local and global grid dimensions
38 PetscInt xs = info.xs, xe = info.xs + info.xm;
39 PetscInt ys = info.ys, ye = info.ys + info.ym;
40 PetscInt zs = info.zs, ze = info.zs + info.zm;
41 PetscInt mx = info.mx, my = info.my, mz = info.mz;
42
43 // Determine loop bounds to compute for interior cells only, matching the code's convention.
44 // Start at index 1 if the process owns the global boundary at index 0.
45 PetscInt is = (xs == 0) ? 1 : xs;
46 PetscInt js = (ys == 0) ? 1 : ys;
47 PetscInt ks = (zs == 0) ? 1 : zs;
48
49 // Stop one cell short if the process owns the global boundary at the max index.
50 PetscInt ie = (xe == mx) ? xe - 1 : xe;
51 PetscInt je = (ye == my) ? ye - 1 : ye;
52 PetscInt ke = (ze == mz) ? ze - 1 : ze;
53
54 // Loop over the locally owned INTERIOR cells.
55 for (PetscInt k = ks; k < ke; k++) {
56 for (PetscInt j = js; j < je; j++) {
57 for (PetscInt i = is; i < ie; i++) {
58 // Calculate cell center value as the average of its 8 corner nodes
59 centfield_arr[k][j][i].x = 0.125 * (field_arr[k][j][i].x + field_arr[k][j-1][i].x +
60 field_arr[k-1][j][i].x + field_arr[k-1][j-1][i].x +
61 field_arr[k][j][i-1].x + field_arr[k][j-1][i-1].x +
62 field_arr[k-1][j][i-1].x + field_arr[k-1][j-1][i-1].x);
63
64 centfield_arr[k][j][i].y = 0.125 * (field_arr[k][j][i].y + field_arr[k][j-1][i].y +
65 field_arr[k-1][j][i].y + field_arr[k-1][j-1][i].y +
66 field_arr[k][j][i-1].y + field_arr[k][j-1][i-1].y +
67 field_arr[k-1][j][i-1].y + field_arr[k-1][j-1][i-1].y);
68
69 centfield_arr[k][j][i].z = 0.125 * (field_arr[k][j][i].z + field_arr[k][j-1][i].z +
70 field_arr[k-1][j][i].z + field_arr[k-1][j-1][i].z +
71 field_arr[k][j][i-1].z + field_arr[k][j-1][i-1].z +
72 field_arr[k-1][j][i-1].z + field_arr[k-1][j-1][i-1].z);
73 }
74 }
75 }
76
77 PetscFunctionReturn(0);
78}
79
80#undef __FUNCT__
81#define __FUNCT__ "InterpolateFieldFromCornerToCenter_Scalar"
82/**
83 * @brief Internal helper implementation: `InterpolateFieldFromCornerToCenter_Scalar()`.
84 * @details Local to this translation unit.
85 */
87 PetscReal ***field_arr, /* Input: Ghosted local array view from user->fda (global node indices) */
88 PetscReal ***centfield_arr, /* Output: Array view for cell-centered data (global indices) */
89 UserCtx *user)
90{
91 PetscErrorCode ierr;
92 DMDALocalInfo info;
93
94 PetscFunctionBeginUser;
95
96 ierr = DMDAGetLocalInfo(user->da, &info); CHKERRQ(ierr);
97
98 // Get local and global grid dimensions
99 PetscInt xs = info.xs, xe = info.xs + info.xm;
100 PetscInt ys = info.ys, ye = info.ys + info.ym;
101 PetscInt zs = info.zs, ze = info.zs + info.zm;
102 PetscInt mx = info.mx, my = info.my, mz = info.mz;
103
104 // Determine loop bounds to compute for interior cells only, matching the code's convention.
105 // Start at index 1 if the process owns the global boundary at index 0.
106 PetscInt is = (xs == 0) ? 1 : xs;
107 PetscInt js = (ys == 0) ? 1 : ys;
108 PetscInt ks = (zs == 0) ? 1 : zs;
109
110 // Stop one cell short if the process owns the global boundary at the max index.
111 PetscInt ie = (xe == mx) ? xe - 1 : xe;
112 PetscInt je = (ye == my) ? ye - 1 : ye;
113 PetscInt ke = (ze == mz) ? ze - 1 : ze;
114
115 // Loop over the locally owned INTERIOR cells.
116 for (PetscInt k = ks; k < ke; k++) {
117 for (PetscInt j = js; j < je; j++) {
118 for (PetscInt i = is; i < ie; i++) {
119 // Calculate cell center value as the average of its 8 corner nodes
120 centfield_arr[k][j][i] = 0.125 * (field_arr[k][j][i] + field_arr[k][j-1][i] +
121 field_arr[k-1][j][i] + field_arr[k-1][j-1][i] +
122 field_arr[k][j][i-1] + field_arr[k][j-1][i-1] +
123 field_arr[k-1][j][i-1] + field_arr[k-1][j-1][i-1]);
124 }
125 }
126 }
127
128 PetscFunctionReturn(0);
129}
130
131#undef __FUNCT__
132#define __FUNCT__ "TestCornerToCenterInterpolation"
133/**
134 * @brief Internal helper implementation: `TestCornerToCenterInterpolation()`.
135 * @details Local to this translation unit.
136 */
138{
139 PetscErrorCode ierr;
140 Vec lCoords, TestCent;
141 Cmpnts ***coor_arr, ***test_cent_arr;
142 PetscReal diff_norm;
143
144 PetscFunctionBeginUser;
145
146 // 1. Create a temporary vector to hold the result of our interpolation.
147 // It must have the same layout and size as the ground-truth user->Cent vector.
148 ierr = VecDuplicate(user->Cent, &TestCent); CHKERRQ(ierr);
149
150 // 2. Get the input (corner coordinates) and output (our test vector) arrays.
151 ierr = DMGetCoordinatesLocal(user->da, &lCoords); CHKERRQ(ierr);
152 ierr = DMDAVecGetArrayRead(user->fda, lCoords, &coor_arr); CHKERRQ(ierr);
153 ierr = DMDAVecGetArray(user->fda, TestCent, &test_cent_arr); CHKERRQ(ierr);
154
155 // 3. Call the generic interpolation macro.
156 // The macro will see that `coor_arr` is of type `Cmpnts***` and correctly
157 // call the `InterpolateFieldFromCornerToCenter_Vector` function.
158 ierr = InterpolateFieldFromCornerToCenter(coor_arr, test_cent_arr, user); CHKERRQ(ierr);
159
160 // 4. Restore the arrays.
161 ierr = DMDAVecRestoreArrayRead(user->fda, lCoords, &coor_arr); CHKERRQ(ierr);
162 ierr = DMDAVecRestoreArray(user->fda, TestCent, &test_cent_arr); CHKERRQ(ierr);
163
164 // 5. IMPORTANT: Assemble the vector so its values are communicated across processors
165 // and it's ready for global operations like VecNorm.
166 ierr = VecAssemblyBegin(TestCent); CHKERRQ(ierr);
167 ierr = VecAssemblyEnd(TestCent); CHKERRQ(ierr);
168
169 // 6. Compare the result with the ground truth.
170 // We compute TestCent = -1.0 * user->Cent + 1.0 * TestCent.
171 // This calculates the difference vector: TestCent - user->Cent.
172 ierr = VecAXPY(TestCent, -1.0, user->Cent); CHKERRQ(ierr);
173
174 // Now, compute the L2 norm of the difference vector. If the functions are
175 // identical, the norm should be zero (or very close due to floating point).
176 ierr = VecNorm(TestCent, NORM_2, &diff_norm); CHKERRQ(ierr);
177
178 // 7. Report the result and clean up.
179 if (diff_norm < 1.0e-12) {
180 LOG_ALLOW(GLOBAL,LOG_DEBUG,"[SUCCESS] Test passed. Norm of difference is %g.\n", (double)diff_norm);
181 } else {
182 LOG_ALLOW(GLOBAL,LOG_DEBUG, "[FAILURE] Test failed. Norm of difference is %g.\n", (double)diff_norm);
183 }
184
185 ierr = VecDestroy(&TestCent); CHKERRQ(ierr);
186
187 PetscFunctionReturn(0);
188}
189
190#undef __FUNCT__
191#define __FUNCT__ "InterpolateFieldFromCenterToCorner_Vector"
192/**
193 * @brief Internal helper implementation: `InterpolateFieldFromCenterToCorner_Vector()`.
194 * @details Local to this translation unit.
195 */
197 Cmpnts ***centfield_arr, /* Input: Ghosted local array from Vec (read) */
198 Cmpnts ***corner_arr, /* Output: global array from Vec (write) */
199 UserCtx *user)
200{
201 PetscErrorCode ierr;
202 DMDALocalInfo info;
203 PetscMPIInt rank;
205 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
206 ierr = DMDAGetLocalInfo(user->fda, &info); CHKERRQ(ierr);
207
208 // Node ownership range (GLOBAL indices)
209 PetscInt xs_node = info.xs, xm_node = info.xm, xe_node = xs_node + xm_node;
210 PetscInt ys_node = info.ys, ym_node = info.ym, ye_node = ys_node + ym_node;
211 PetscInt zs_node = info.zs, zm_node = info.zm, ze_node = zs_node + zm_node;
212
213 PetscInt nCellsX = info.mx - 2; // Number of cells in x-direction
214 PetscInt nCellsY = info.my - 2; // Number of cells in y-direction
215 PetscInt nCellsZ = info.mz - 2; // Number of cells in z-direction
216
217
218 // Global grid dimensions (used for valid cell check)
219 PetscInt IM = info.mx - 1; // Total nodes in i-direction
220 PetscInt JM = info.my - 1; // Total nodes in j-direction
221 PetscInt KM = info.mz - 1; // Total nodes in k-direction
222
224 "[Rank %d] Starting -- Node ownership k=%d..%d, j=%d..%d, i=%d..%d\n",
225 rank, zs_node, ze_node-1, ys_node, ye_node-1, xs_node, xe_node-1);
226
227 // Loop over the GLOBAL indices of the NODES owned by this processor
228 for (PetscInt k = zs_node; k < ze_node; k++) {
229 for (PetscInt j = ys_node; j < ye_node; j++) {
230 for (PetscInt i = xs_node; i < xe_node; i++) {
231 Cmpnts sum = {0.0, 0.0, 0.0};
232 PetscInt count = 0;
233
234 // DEBUG 1 TEST
235 /*
236 if(rank == 1 && i == 24 && j == 12 && k == 49){
237 PetscInt i_cell_A = i - 1;
238 PetscInt i_cell_B = i;
239
240 Cmpnts ucat_A = centfield_arr[k][j][i_cell_A]; // 23
241 Cmpnts ucat_B = centfield_arr[k][j][i_cell_B]; // 24 (out-of-bounds if IM=25)
242
243 PetscPrintf(PETSC_COMM_WORLD,"[Rank %d] DEBUG TEST at Node(k,j,i)=%d,%d,%d: Read { Valid Cell }Ucat[%d][%d][%d]=(%.2f,%.2f,%.2f) and {Out-of-Bounds Cell}Ucat[%d][%d][%d]=(%.2f,%.2f,%.2f)\n",
244 rank, k, j, i,
245 k, j, i_cell_A, ucat_A.x, ucat_A.y, ucat_A.z,
246 k, j, i_cell_B, ucat_B.x, ucat_B.y, ucat_B.z);
247
248 }
249 */
250
251 // Skip processing the unused last node in each dimension.
252 if(i >= IM || j >= JM || k >= KM){
253 continue;
254 }
255 // Loop over the 8 potential cells surrounding node N(k,j,i) and accumulate values.
256 // The index offsets correspond to the relative position of the indices(shifted) that represent cell-centered field values of cells that share the node N(k,j,i) as a corner
257 for (PetscInt dk_offset = -1; dk_offset <= 0; dk_offset++) {
258 for (PetscInt dj_offset = -1; dj_offset <= 0; dj_offset++) {
259 for (PetscInt di_offset = -1; di_offset <= 0; di_offset++) {
260
261 // These are still GLOBAL cell indices
262 PetscInt global_cell_k = k + dk_offset;
263 PetscInt global_cell_j = j + dj_offset;
264 PetscInt global_cell_i = i + di_offset;
265
266 // Check if this corresponds to a valid GLOBAL cell index
267 if (global_cell_i >= 0 && global_cell_i < nCellsX &&
268 global_cell_j >= 0 && global_cell_j < nCellsY &&
269 global_cell_k >= 0 && global_cell_k < nCellsZ)
270 {
271 Cmpnts cell_val = centfield_arr[global_cell_k + 1][global_cell_j + 1][global_cell_i + 1];
272
273 LOG_LOOP_ALLOW_EXACT(LOCAL, LOG_VERBOSE,k,49,"[Rank %d] successful read from [%d][%d][%d] -> (%.2f, %.2f, %.2f)\n",
274 rank,global_cell_k,global_cell_j,global_cell_i,cell_val.x, cell_val.y, cell_val.z);
275
276 sum.x += cell_val.x;
277 sum.y += cell_val.y;
278 sum.z += cell_val.z;
279 count++;
280 }
281 }
282 }
283 }
284
285 PetscInt i_global_write = i; // Global index in GLOBAL array.
286 PetscInt j_global_write = j;
287 PetscInt k_global_write = k;
288
289 // We write directly into the array using the global loop indices.
290 if (count > 0) {
291 corner_arr[k_global_write][j_global_write][i_global_write].x = sum.x / (PetscReal)count;
292 corner_arr[k_global_write][j_global_write][i_global_write].y = sum.y / (PetscReal)count;
293 corner_arr[k_global_write][j_global_write][i_global_write].z = sum.z / (PetscReal)count;
294 } else {
295 // This case should ideally not happen for a valid owned node, but as a failsafe:
296 corner_arr[k_global_write][j_global_write][i_global_write] = (Cmpnts){0.0, 0.0, 0.0};
297 }
298
299 // DEBUG 2
300 /*
301 if(rank == 1){
302 if(i == 11 && j == 11 && k == 49){
303 Cmpnts ucat_node = corner_arr[k][j][i];
304 PetscPrintf(PETSC_COMM_WORLD,"[Rank %d] DEBUG TEST at Node(k,j,i)=%d,%d,%d: Wrote CornerUcat[%d][%d][%d]=(%.2f,%.2f,%.2f)\n",
305 rank, k, j, i,
306 k, j, i, ucat_node.x, ucat_node.y, ucat_node.z);
307 }
308 }
309
310 if(rank == 0 && i == 24 && j == 12 && k == 0){
311 Cmpnts ucat_node = corner_arr[k][j][i];
312 PetscPrintf(PETSC_COMM_WORLD,"[Rank %d] DEBUG TEST at Node(k,j,i)=%d,%d,%d: Wrote CornerUcat[%d][%d][%d]=(%.2f,%.2f,%.2f)\n",
313 rank, k, j, i,
314 k, j, i, ucat_node.x, ucat_node.y, ucat_node.z);
315 }
316 */
317 // LOG_LOOP_ALLOW_EXACT(LOCAL, LOG_VERBOSE,k,48,"[Rank %d] Node(k,j,i)=%d,%d,%d finished loops and write.\n", rank, k, j, i);
318 }
319 }
320 }
322 return 0;
323}
324
325#undef __FUNCT__
326#define __FUNCT__ "InterpolateFieldFromCenterToCorner_Scalar"
327/**
328 * @brief Internal helper implementation: `InterpolateFieldFromCenterToCorner_Scalar()`.
329 * @details Local to this translation unit.
330 */
332 PetscReal ***centfield_arr, /* Input: Ghosted local array from Vec (read) */
333 PetscReal ***corner_arr, /* Output: global array from Vec (write) */
334 UserCtx *user)
335{
336 PetscErrorCode ierr;
337 DMDALocalInfo info;
338 PetscMPIInt rank;
340 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
341 ierr = DMDAGetLocalInfo(user->fda, &info); CHKERRQ(ierr);
342
343 // Node ownership range (GLOBAL indices)
344 PetscInt xs_node = info.xs, xm_node = info.xm, xe_node = xs_node + xm_node;
345 PetscInt ys_node = info.ys, ym_node = info.ym, ye_node = ys_node + ym_node;
346 PetscInt zs_node = info.zs, zm_node = info.zm, ze_node = zs_node + zm_node;
347
348 PetscInt nCellsX = info.mx - 2; // Number of cells in x-direction
349 PetscInt nCellsY = info.my - 2; // Number of cells in y-direction
350 PetscInt nCellsZ = info.mz - 2; // Number of cells in z-direction
351
352
353 // Global grid dimensions (used for valid cell check)
354 PetscInt IM = info.mx - 1; // Total nodes in i-direction
355 PetscInt JM = info.my - 1; // Total nodes in j-direction
356 PetscInt KM = info.mz - 1; // Total nodes in k-direction
357
359 "[Rank %d] Starting -- Node ownership k=%d..%d, j=%d..%d, i=%d..%d\n",
360 rank, zs_node, ze_node-1, ys_node, ye_node-1, xs_node, xe_node-1);
361
362 // Loop over the GLOBAL indices of the NODES owned by this processor
363 for (PetscInt k = zs_node; k < ze_node; k++) {
364 for (PetscInt j = ys_node; j < ye_node; j++) {
365 for (PetscInt i = xs_node; i < xe_node; i++) {
366 PetscReal sum = 0.0;
367 PetscInt count = 0;
368
369 // DEBUG 1 TEST
370 /*
371 if(rank == 1 && i == 24 && j == 12 && k == 49){
372 PetscInt i_cell_A = i - 1;
373 PetscInt i_cell_B = i;
374
375 Cmpnts ucat_A = centfield_arr[k][j][i_cell_A]; // 23
376 Cmpnts ucat_B = centfield_arr[k][j][i_cell_B]; // 24 (out-of-bounds if IM=25)
377
378 PetscPrintf(PETSC_COMM_WORLD,"[Rank %d] DEBUG TEST at Node(k,j,i)=%d,%d,%d: Read { Valid Cell }Ucat[%d][%d][%d]=(%.2f,%.2f,%.2f) and {Out-of-Bounds Cell}Ucat[%d][%d][%d]=(%.2f,%.2f,%.2f)\n",
379 rank, k, j, i,
380 k, j, i_cell_A, ucat_A.x, ucat_A.y, ucat_A.z,
381 k, j, i_cell_B, ucat_B.x, ucat_B.y, ucat_B.z);
382
383 }
384 */
385
386 // Skip processing the unused last node in each dimension.
387 if(i >= IM || j >= JM || k >= KM){
388 continue;
389 }
390 // Loop over the 8 potential cells surrounding node N(k,j,i) and accumulate values.
391 // The index offsets correspond to the relative position of the indices(shifted) that represent cell-centered field values of cells that share the node N(k,j,i) as a corner
392 for (PetscInt dk_offset = -1; dk_offset <= 0; dk_offset++) {
393 for (PetscInt dj_offset = -1; dj_offset <= 0; dj_offset++) {
394 for (PetscInt di_offset = -1; di_offset <= 0; di_offset++) {
395
396 // These are still GLOBAL cell indices
397 PetscInt global_cell_k = k + dk_offset;
398 PetscInt global_cell_j = j + dj_offset;
399 PetscInt global_cell_i = i + di_offset;
400
401 // Check if this corresponds to a valid GLOBAL cell index
402 if (global_cell_i >= 0 && global_cell_i < nCellsX &&
403 global_cell_j >= 0 && global_cell_j < nCellsY &&
404 global_cell_k >= 0 && global_cell_k < nCellsZ)
405 {
406 PetscReal cell_val = centfield_arr[global_cell_k + 1][global_cell_j + 1][global_cell_i + 1];
407
408 LOG_LOOP_ALLOW_EXACT(LOCAL, LOG_VERBOSE,k,49,"[Rank %d] successful read from [%d][%d][%d] -> (%.2f)\n",
409 rank,global_cell_k,global_cell_j,global_cell_i,cell_val);
410
411 sum += cell_val;
412 count++;
413 }
414 }
415 }
416 }
417
418 PetscInt i_global_write = i; // Global index in GLOBAL array.
419 PetscInt j_global_write = j;
420 PetscInt k_global_write = k;
421
422 // We write directly into the array using the global loop indices.
423 if (count > 0) {
424 corner_arr[k_global_write][j_global_write][i_global_write] = sum / (PetscReal)count;
425 } else {
426 // This case should ideally not happen for a valid owned node, but as a failsafe:
427 corner_arr[k_global_write][j_global_write][i_global_write] = 0.0;
428 }
429
430 // DEBUG 2
431 /*
432 if(rank == 1){
433 if(i == 11 && j == 11 && k == 49){
434 Cmpnts ucat_node = corner_arr[k][j][i];
435 PetscPrintf(PETSC_COMM_WORLD,"[Rank %d] DEBUG TEST at Node(k,j,i)=%d,%d,%d: Wrote CornerUcat[%d][%d][%d]=(%.2f,%.2f,%.2f)\n",
436 rank, k, j, i,
437 k, j, i, ucat_node.x, ucat_node.y, ucat_node.z);
438 }
439 }
440
441 if(rank == 0 && i == 24 && j == 12 && k == 0){
442 Cmpnts ucat_node = corner_arr[k][j][i];
443 PetscPrintf(PETSC_COMM_WORLD,"[Rank %d] DEBUG TEST at Node(k,j,i)=%d,%d,%d: Wrote CornerUcat[%d][%d][%d]=(%.2f,%.2f,%.2f)\n",
444 rank, k, j, i,
445 k, j, i, ucat_node.x, ucat_node.y, ucat_node.z);
446 }
447 */
448 // LOG_LOOP_ALLOW_EXACT(LOCAL, LOG_VERBOSE,k,48,"[Rank %d] Node(k,j,i)=%d,%d,%d finished loops and write.\n", rank, k, j, i);
449 }
450 }
451 }
453 return 0;
454}
455
456#undef __FUNCT
457#define __FUNCT "PiecWiseLinearInterpolation_Scalar"
458/**
459 * @brief Internal helper implementation: `PieceWiseLinearInterpolation_Scalar()`.
460 * @details Local to this translation unit.
461 */
463 const char *fieldName,
464 PetscReal ***fieldScal,
465 PetscInt iCell,
466 PetscInt jCell,
467 PetscInt kCell,
468 PetscReal *val)
469{
470 PetscFunctionBegin;
471 *val = fieldScal[kCell][jCell][iCell];
472
473 // Optional logging
475 "Field '%s' at (i=%d, j=%d, k=%d) => val=%.6f\n",
476 fieldName, iCell, jCell, kCell, *val);
477
478 PetscFunctionReturn(0);
479}
480
481#undef __FUNCT
482#define __FUNCT "PiecWiseLinearInterpolation_Vector"
483/**
484 * @brief Internal helper implementation: `PieceWiseLinearInterpolation_Vector()`.
485 * @details Local to this translation unit.
486 */
488 const char *fieldName,
489 Cmpnts ***fieldVec,
490 PetscInt iCell,
491 PetscInt jCell,
492 PetscInt kCell,
493 Cmpnts *vec)
494{
495 PetscFunctionBegin;
496 vec->x = fieldVec[kCell][jCell][iCell].x;
497 vec->y = fieldVec[kCell][jCell][iCell].y;
498 vec->z = fieldVec[kCell][jCell][iCell].z;
499
500 // Optional logging
502 "Field '%s' at (i=%d, j=%d, k=%d) => (x=%.6f, y=%.6f, z=%.6f)\n",
503 fieldName, iCell, jCell, kCell, vec->x, vec->y, vec->z);
504
505 PetscFunctionReturn(0);
506}
507
508
509#undef __FUNCT
510#define __FUNCT "ComputeTrilinearWeights"
511
512/**
513 * @brief Internal helper implementation: `ComputeTrilinearWeights()`.
514 * @details Local to this translation unit.
515 */
516static inline void ComputeTrilinearWeights(PetscReal a1, PetscReal a2, PetscReal a3, PetscReal *w) {
517 LOG_ALLOW(GLOBAL, LOG_VERBOSE, "Computing weights for a1=%f, a2=%f, a3=%f.\n", a1, a2, a3);
518
519 // Ensure a1, a2, a3 are within [0,1]
520 a1 = PetscMax(0.0, PetscMin(1.0, a1));
521 a2 = PetscMax(0.0, PetscMin(1.0, a2));
522 a3 = PetscMax(0.0, PetscMin(1.0, a3));
523
524 const PetscReal oa1 = 1.0 - a1;
525 const PetscReal oa2 = 1.0 - a2;
526 const PetscReal oa3 = 1.0 - a3;
527
528 w[0] = oa1 * oa2 * oa3; /* cornerOffsets[0] => (0,0,0) */
529 w[1] = a1 * oa2 * oa3; /* cornerOffsets[1] => (1,0,0) */
530 w[2] = oa1 * a2 * oa3; /* cornerOffsets[2] => (0,1,0) */
531 w[3] = a1 * a2 * oa3; /* cornerOffsets[3] => (1,1,0) */
532 w[4] = oa1 * oa2 * a3; /* cornerOffsets[4] => (0,0,1) */
533 w[5] = a1 * oa2 * a3; /* cornerOffsets[5] => (1,0,1) */
534 w[6] = oa1 * a2 * a3; /* cornerOffsets[6] => (0,1,1) */
535 w[7] = a1 * a2 * a3; /* cornerOffsets[7] => (1,1,1) */
536
537 // Log the computed weights for debugging
538 LOG_ALLOW(LOCAL,LOG_VERBOSE, "Weights computed - "
539 "w0=%f, w1=%f, w2=%f, w3=%f, w4=%f, w5=%f, w6=%f, w7=%f. \n",
540 w[0], w[1], w[2], w[3], w[4], w[5], w[6], w[7]);
541}
542
543/**
544 * @brief Unclamped trilinear weights for boundary extrapolation.
545 * @details Identical to ComputeTrilinearWeights() but without clamping
546 * a1, a2, a3 to [0,1]. Allows weights outside [0,1] for linear
547 * extrapolation at non-periodic boundaries. Weights still sum to 1.0.
548 */
549static inline void ComputeTrilinearWeightsUnclamped(PetscReal a1, PetscReal a2, PetscReal a3, PetscReal *w) {
550 LOG_ALLOW(GLOBAL, LOG_VERBOSE, "Computing unclamped weights for a1=%f, a2=%f, a3=%f.\n", a1, a2, a3);
551
552 const PetscReal oa1 = 1.0 - a1;
553 const PetscReal oa2 = 1.0 - a2;
554 const PetscReal oa3 = 1.0 - a3;
555
556 w[0] = oa1 * oa2 * oa3;
557 w[1] = a1 * oa2 * oa3;
558 w[2] = oa1 * a2 * oa3;
559 w[3] = a1 * a2 * oa3;
560 w[4] = oa1 * oa2 * a3;
561 w[5] = a1 * oa2 * a3;
562 w[6] = oa1 * a2 * a3;
563 w[7] = a1 * a2 * a3;
564
565 LOG_ALLOW(LOCAL, LOG_VERBOSE, "Unclamped weights - "
566 "w0=%f, w1=%f, w2=%f, w3=%f, w4=%f, w5=%f, w6=%f, w7=%f.\n",
567 w[0], w[1], w[2], w[3], w[4], w[5], w[6], w[7]);
568}
569
570#undef __FUNCT
571#define __FUNCT "TrilinearInterpolation_Scalar"
572
573/**
574 * @brief Internal helper implementation: `TrilinearInterpolation_Scalar()`.
575 * @details Local to this translation unit.
576 */
578 const char *fieldName,
579 PetscReal ***fieldScal,
580 PetscInt i,
581 PetscInt j,
582 PetscInt k,
583 PetscReal a1,
584 PetscReal a2,
585 PetscReal a3,
586 PetscReal *val)
587{
588 PetscFunctionBegin; // PETSc macro for error/stack tracing
589
590 // Compute the 8 corner weights
591 PetscReal wcorner[8];
592 ComputeTrilinearWeights(a1, a2, a3, wcorner);
593
594 // Offsets for cell corners
595 PetscInt i1 = i + 1;
596 PetscInt j1 = j + 1;
597 PetscInt k1 = k + 1;
598
599 // Initialize the output scalar
600 PetscReal sum = 0.0;
601
602 // Corner 0 => (i, j, k)
603 sum += wcorner[0] * fieldScal[k ][j ][i ];
604 // Corner 1 => (i+1, j, k)
605 sum += wcorner[1] * fieldScal[k ][j ][i1];
606 // Corner 2 => (i, j+1, k)
607 sum += wcorner[2] * fieldScal[k ][j1][i ];
608 // Corner 3 => (i+1, j+1, k)
609 sum += wcorner[3] * fieldScal[k ][j1][i1];
610 // Corner 4 => (i, j, k+1)
611 sum += wcorner[4] * fieldScal[k1][j ][i ];
612 // Corner 5 => (i+1, j, k+1)
613 sum += wcorner[5] * fieldScal[k1][j ][i1];
614 // Corner 6 => (i, j+1, k+1)
615 sum += wcorner[6] * fieldScal[k1][j1][i ];
616 // Corner 7 => (i+1, j+1, k+1)
617 sum += wcorner[7] * fieldScal[k1][j1][i1];
618
619 *val = sum;
620
621 // Logging (optional)
623 "Field '%s' at (i=%d, j=%d, k=%d), "
624 "a1=%.6f, a2=%.6f, a3=%.6f -> val=%.6f.\n",
625 fieldName, i, j, k, a1, a2, a3, *val);
626
627 // LOG_ALLOW_SYNC(GLOBAL, LOG_INFO,
628 // "TrilinearInterpolation_Scalar: Completed interpolation for field '%s' across local cells.\n",
629 // fieldName);
630
631 PetscFunctionReturn(0);
632}
633
634
635#undef __FUNCT
636#define __FUNCT "TrilinearInterpolation_Vector"
637/**
638 * @brief Internal helper implementation: `TrilinearInterpolation_Vector()`.
639 * @details Local to this translation unit.
640 */
642 const char *fieldName,
643 Cmpnts ***fieldVec, /* 3D array [k][j][i], dimension [mz][my][mx] */
644 PetscInt i, // local cell index i
645 PetscInt j, // local cell index j
646 PetscInt k, // local cell index k
647 PetscReal a1,
648 PetscReal a2,
649 PetscReal a3,
650 Cmpnts *vec)
651{
652 PetscFunctionBegin; // PETSc macro for error/stack tracing
653
654 // Compute the 8 corner weights
655 PetscErrorCode ierr;
656 PetscReal wcorner[8];
657 PetscMPIInt rank;
658
659 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr);
660
661 LOG_ALLOW(LOCAL,LOG_VERBOSE,"[Rank %d] Computing Trilinear Weights.\n",rank);
662 ComputeTrilinearWeights(a1, a2, a3, wcorner);
663
664 LOG_ALLOW(LOCAL,LOG_VERBOSE,"[Rank %d] Trilinear weights computed for local cell %d,%d,%d.\n",rank,i,j,k);
665
666 // For partial interpolation, we'll keep track of how many corners are valid
667 // and how much sum of weights is used. Then we do a final normalization.
668 PetscReal sumW = 0.0;
669 Cmpnts accum = {0.0, 0.0, 0.0};
670
671 // The eight corner indices, with their weights:
672 // corners: (i,j,k), (i+1,j,k), (i,j+1,k), (i+1,j+1,k), etc.
673 // We store them in an array to iterate cleanly.
674 const PetscInt cornerOffsets[8][3] = {
675 {0, 0, 0},
676 {1, 0, 0},
677 {0, 1, 0},
678 {1, 1, 0},
679 {0, 0, 1},
680 {1, 0, 1},
681 {0, 1, 1},
682 {1, 1, 1}
683 };
684
685 // Weighted partial sum
686 for (PetscInt c = 0; c < 8; c++) {
687 const PetscInt di = cornerOffsets[c][0];
688 const PetscInt dj = cornerOffsets[c][1];
689 const PetscInt dk = cornerOffsets[c][2];
690 PetscInt iC = i + di;
691 PetscInt jC = j + dj;
692 PetscInt kC = k + dk;
693
694 /*
695 // skip if out of domain
696 // (Assuming you know global domain is [0..mx), [0..my), [0..mz).)
697 if (iC < 0 || iC >= (PetscInt)userGlobalMx ||
698 jC < 0 || jC >= (PetscInt)userGlobalMy ||
699 kC < 0 || kC >= (PetscInt)userGlobalMz)
700 {
701 // skip this corner
702 continue;
703 }
704
705 */
706
707 LOG_ALLOW(LOCAL,LOG_VERBOSE,"[Rank %d] %s[%d][%d][%d] = (%.4f,%.4f,%.4f).\n",rank,fieldName,kC,jC,iC,fieldVec[kC][jC][iC].x,fieldVec[kC][jC][iC].y,fieldVec[kC][jC][iC].z);
708
709 // Otherwise, accumulate
710 accum.x += wcorner[c] * fieldVec[kC][jC][iC].x;
711 accum.y += wcorner[c] * fieldVec[kC][jC][iC].y;
712 accum.z += wcorner[c] * fieldVec[kC][jC][iC].z;
713 sumW += wcorner[c];
714 }
715
716 // If sumW=0 => out-of-range or zero weighting => set (0,0,0)
717 if (sumW > 1.0e-14) {
718 vec->x = accum.x / sumW;
719 vec->y = accum.y / sumW;
720 vec->z = accum.z / sumW;
721 } else {
722 vec->x = 0.0; vec->y = 0.0; vec->z = 0.0;
723 }
724
725 PetscFunctionReturn(0);
726}
727
728
729#undef __FUNCT
730#define __FUNCT "InterpolateEulerFieldToSwarmForParticle"
731/**
732 * @brief Internal helper implementation: `InterpolateEulerFieldToSwarmForParticle()`.
733 * @details Local to this translation unit.
734 */
735static inline PetscErrorCode InterpolateEulerFieldToSwarmForParticle(
736 const char *fieldName,
737 void *fieldPtr, /* typed Pointer => either (PetscReal***) or (Cmpnts***) */
738 Particle *particle, /* particle struct containing Cell ID and weight information. */
739 void *swarmOut, /* typed Pointer => (PetscReal*) or (Cmpnts*) or dof=3 array */
740 PetscInt p, /* particle index */
741 PetscInt blockSize) /* dof=1 => scalar, dof=3 => vector */
742{
743 PetscErrorCode ierr;
744 PetscFunctionBegin;
745
746 PetscInt iCell = particle->cell[0];
747 PetscInt jCell = particle->cell[1];
748 PetscInt kCell = particle->cell[2];
749 PetscReal a1 = particle->weights.x;
750 PetscReal a2 = particle->weights.y;
751 PetscReal a3 = particle->weights.z;
752
754
755 // Optional logging at start
757 "field='%s', blockSize=%d, "
758 "cell IDs=(%d,%d,%d), weights=(%.4f,%.4f,%.4f)\n",
759 fieldName, blockSize, iCell, jCell, kCell, a1, a2, a3);
760
761 /*
762 If blockSize=1, we PetscInterpret the fieldPtr as a 3D array of PetscReal (scalar).
763 If blockSize=3, we PetscInterpret the fieldPtr as a 3D array of Cmpnts (vector).
764 */
765 if (blockSize == 1) {
766 /* Scalar field: Cast fieldPtr to (PetscReal ***). */
767 PetscReal ***fieldScal = (PetscReal ***) fieldPtr;
768 PetscReal val;
769
770 // Currently using trilinear.
771 ierr = TrilinearInterpolation(fieldName, fieldScal,
772 iCell, jCell, kCell,
773 a1, a2, a3,
774 &val);
775 CHKERRQ(ierr);
776
777 // Alternative (commented) call to PiecewiseLinearInterpolation (zeroth order) :
778 // PetscErrorCode ierr = PiecewiseLinearInterpolation(fieldName,
779 // fieldScal,
780 // iCell, jCell, kCell,
781 // &val);
782 // CHKERRQ(ierr);
783
784 // Write the scalar result to the swarm output at index [p].
785 ((PetscReal*)swarmOut)[p] = val;
786
788 "field='%s', result=%.6f "
789 "stored at swarmOut index p=%d.\n", fieldName, val, (PetscInt)p);
790 }
791 else if (blockSize == 3) {
792 /* Vector field: Cast fieldPtr to (Cmpnts ***). */
793 Cmpnts ***fieldVec = (Cmpnts ***) fieldPtr;
794 Cmpnts vec;
795
796
797
798
799 // Piecewise interpolation (zeroth order).
800 // PetscErrorCode ierr = PieceWiseLinearInterpolation(fieldName,
801 // fieldVec,
802 // iCell, jCell, kCell,
803 // &vec);
804 // CHKERRQ(ierr);
805
806 // Alternative (commented) call to trilinear:
807 ierr = TrilinearInterpolation(fieldName, fieldVec,
808 iCell, jCell, kCell,
809 a1, a2, a3,
810 &vec);
811 CHKERRQ(ierr);
812
813 // If swarmOut is an array of 3 reals per particle:
814 ((PetscReal*)swarmOut)[3*p + 0] = vec.x;
815 ((PetscReal*)swarmOut)[3*p + 1] = vec.y;
816 ((PetscReal*)swarmOut)[3*p + 2] = vec.z;
817
819 "field='%s', result=(%.6f,%.6f,%.6f) "
820 "stored at swarmOut[3p..3p+2], p=%d.\n",
821 fieldName, vec.x, vec.y, vec.z, (PetscInt)p);
822
823 /*
824 If you store the vector result as a Cmpnts in the swarm, do instead:
825 ((Cmpnts*)swarmOut)[p] = vec;
826 but ensure your DMSwarm field is sized for a Cmpnts struct.
827 */
828 }
829 else {
830 /* If blockSize isn't 1 or 3, we raise an error. */
831 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP,
832 "InterpolateEulerFieldToSwarmForParticle: only blockSize=1 or 3 supported, got %d.",
833 (PetscInt)blockSize);
834 }
835
837 PetscFunctionReturn(0);
838}
839
840#undef __FUNCT__
841#define __FUNCT__ "InterpolateEulerFieldFromCenterToSwarm"
842
843/**
844 * @brief Direct cell-center trilinear interpolation (second-order on curvilinear grids).
845 * @details For each particle, determines the 8 nearest cell centers via octant
846 * detection, constructs a dual cell, computes face-distance-based
847 * trilinear weights, and interpolates directly from cell-centered data.
848 * At non-periodic boundaries where the dual cell cannot be fully formed,
849 * octant clamping with unclamped trilinear extrapolation preserves
850 * second-order accuracy. Periodic boundaries use ghost cell data directly.
851 * No intermediate corner staging or extra ghost exchange is needed.
852 */
854 UserCtx *user,
855 Vec fieldLocal_cellCentered,
856 const char *fieldName,
857 const char *swarmOutFieldName)
858{
859 PetscErrorCode ierr;
860 DM swarm = user->swarm;
861 PetscInt bs;
862 DMDALocalInfo info;
863 PetscMPIInt rank;
864 void *fieldPtr = NULL;
865 Cmpnts ***cent = NULL;
866
867 PetscInt *cellIDs = NULL;
868 PetscReal *weights = NULL;
869 PetscInt64 *pids = NULL;
870 void *swarmOut = NULL;
871 PetscReal *pos = NULL;
872 PetscInt *status = NULL;
873 PetscInt nLocal;
874
875 PetscFunctionBegin;
877 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
878 ierr = DMDAGetLocalInfo(user->fda, &info); CHKERRQ(ierr);
879 ierr = VecGetBlockSize(fieldLocal_cellCentered, &bs); CHKERRQ(ierr);
880 if (bs != 1 && bs != 3) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "BlockSize must be 1 or 3.");
881
882 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting direct cell-center interpolation for field '%s'...\n", fieldName);
883
884 /* Number of physical cells in each direction */
885 PetscInt nCellsX = info.mx - 2;
886 PetscInt nCellsY = info.my - 2;
887 PetscInt nCellsZ = info.mz - 2;
888
889 /* Periodic flags per direction */
890 PetscBool x_periodic = (user->boundary_faces[BC_FACE_NEG_X].mathematical_type == PERIODIC);
891 PetscBool y_periodic = (user->boundary_faces[BC_FACE_NEG_Y].mathematical_type == PERIODIC);
892 PetscBool z_periodic = (user->boundary_faces[BC_FACE_NEG_Z].mathematical_type == PERIODIC);
893
894 /* Get read-only arrays for the cell-centered field and cell center coordinates */
895 DM dm_field = (bs == 3) ? user->fda : user->da;
896 ierr = DMDAVecGetArrayRead(dm_field, fieldLocal_cellCentered, &fieldPtr); CHKERRQ(ierr);
897 ierr = DMDAVecGetArrayRead(user->fda, user->lCent, (void *)&cent); CHKERRQ(ierr);
898
899 /* Retrieve swarm fields */
900 ierr = DMSwarmGetLocalSize(swarm, &nLocal); CHKERRQ(ierr);
901 ierr = DMSwarmGetField(swarm, "DMSwarm_CellID", NULL, NULL, (void**)&cellIDs); CHKERRQ(ierr);
902 ierr = DMSwarmGetField(swarm, "DMSwarm_pid", NULL, NULL, (void**)&pids); CHKERRQ(ierr);
903 ierr = DMSwarmGetField(swarm, "weight", NULL, NULL, (void**)&weights); CHKERRQ(ierr);
904 ierr = DMSwarmGetField(swarm, "position", NULL, NULL, (void**)&pos); CHKERRQ(ierr);
905 ierr = DMSwarmGetField(swarm, "DMSwarm_location_status", NULL, NULL, (void**)&status); CHKERRQ(ierr);
906 ierr = DMSwarmGetField(swarm, swarmOutFieldName, NULL, NULL, &swarmOut); CHKERRQ(ierr);
907
908 /* Loop over each local particle */
909 for (PetscInt p = 0; p < nLocal; p++) {
910
911 Particle particle;
912 ierr = UnpackSwarmFields(p, pids, weights, pos, cellIDs, NULL, status, NULL, NULL, NULL, &particle); CHKERRQ(ierr);
913
914 /* Step 1: Get host cell indices and existing weights */
915 PetscInt ci = particle.cell[0];
916 PetscInt cj = particle.cell[1];
917 PetscInt ck = particle.cell[2];
918 PetscReal a1 = particle.weights.x;
919 PetscReal a2 = particle.weights.y;
920 PetscReal a3 = particle.weights.z;
921
922 /* Step 2: Determine octant */
923 PetscInt oi = (a1 < 0.5) ? -1 : 0;
924 PetscInt oj = (a2 < 0.5) ? -1 : 0;
925 PetscInt ok = (a3 < 0.5) ? -1 : 0;
926 PetscInt bi = ci + oi;
927 PetscInt bj = cj + oj;
928 PetscInt bk = ck + ok;
929
930 /* Step 3: Clamp octant at non-periodic boundaries */
931 PetscBool needs_unclamped = PETSC_FALSE;
932
933 if (!x_periodic) {
934 if (bi < 0) { bi = 0; needs_unclamped = PETSC_TRUE; }
935 if (bi + 1 >= nCellsX) { bi = nCellsX - 2; needs_unclamped = PETSC_TRUE; }
936 }
937 if (!y_periodic) {
938 if (bj < 0) { bj = 0; needs_unclamped = PETSC_TRUE; }
939 if (bj + 1 >= nCellsY) { bj = nCellsY - 2; needs_unclamped = PETSC_TRUE; }
940 }
941 if (!z_periodic) {
942 if (bk < 0) { bk = 0; needs_unclamped = PETSC_TRUE; }
943 if (bk + 1 >= nCellsZ) { bk = nCellsZ - 2; needs_unclamped = PETSC_TRUE; }
944 }
945
946 /* Step 4: Bounds check — dual cell stencil must fit in ghosted region */
947 if (bi + 1 < info.gxs || bi + 1 >= info.gxs + info.gxm - 1 ||
948 bj + 1 < info.gys || bj + 1 >= info.gys + info.gym - 1 ||
949 bk + 1 < info.gzs || bk + 1 >= info.gzs + info.gzm - 1)
950 {
952 "[Rank %d] Particle PID %lld: dual cell (%d,%d,%d) out of ghosted region. Zeroing '%s'.\n",
953 rank, (long long)particle.PID, bi, bj, bk, fieldName);
954 if (bs == 3) {
955 ((PetscReal*)swarmOut)[3*p + 0] = 0.0;
956 ((PetscReal*)swarmOut)[3*p + 1] = 0.0;
957 ((PetscReal*)swarmOut)[3*p + 2] = 0.0;
958 } else {
959 ((PetscReal*)swarmOut)[p] = 0.0;
960 }
961 continue;
962 }
963
964 /* Step 5: Construct dual cell from lCent (shifted index: cent[k+1][j+1][i+1] for cell (i,j,k))
965 * Vertex ordering follows GetCellVerticesFromGrid() convention (walkingsearch.c:423-430). */
966 Cell dual_cell;
967 dual_cell.vertices[0] = cent[bk + 1][bj + 1][bi + 1]; /* (bi, bj, bk ) */
968 dual_cell.vertices[1] = cent[bk + 1][bj + 1][bi + 2]; /* (bi+1, bj, bk ) */
969 dual_cell.vertices[2] = cent[bk + 1][bj + 2][bi + 2]; /* (bi+1, bj+1, bk ) */
970 dual_cell.vertices[3] = cent[bk + 1][bj + 2][bi + 1]; /* (bi, bj+1, bk ) */
971 dual_cell.vertices[4] = cent[bk + 2][bj + 2][bi + 1]; /* (bi, bj+1, bk+1) */
972 dual_cell.vertices[5] = cent[bk + 2][bj + 2][bi + 2]; /* (bi+1, bj+1, bk+1) */
973 dual_cell.vertices[6] = cent[bk + 2][bj + 1][bi + 2]; /* (bi+1, bj, bk+1) */
974 dual_cell.vertices[7] = cent[bk + 2][bj + 1][bi + 1]; /* (bi, bj, bk+1) */
975
976 /* Step 6: Compute face distances from particle to dual cell faces */
977 PetscReal d[NUM_FACES];
978 ierr = CalculateDistancesToCellFaces(particle.loc, &dual_cell, d, 1e-11); CHKERRQ(ierr);
979
980 /* Step 7: Compute trilinear parametric coordinates from face distances */
981 PetscReal a1_new = d[LEFT] / (d[LEFT] + d[RIGHT]);
982 PetscReal a2_new = d[BOTTOM] / (d[BOTTOM] + d[TOP]);
983 PetscReal a3_new = d[BACK] / (d[FRONT] + d[BACK]);
984
986 "[Rank %d] PID %lld: dual base=(%d,%d,%d), new weights=(%.4f,%.4f,%.4f), unclamped=%d\n",
987 rank, (long long)particle.PID, bi, bj, bk, a1_new, a2_new, a3_new, (int)needs_unclamped);
988
989 /* Step 8: Call TrilinearInterpolation directly with shifted dual-cell indices.
990 * Shifted index: pass (bi+1, bj+1, bk+1) so the kernel accesses
991 * field[bk+1..bk+2][bj+1..bj+2][bi+1..bi+2] = cells (bi..bi+1, bj..bj+1, bk..bk+1). */
992 if (bs == 1) {
993 PetscReal ***fieldScal = (PetscReal ***)fieldPtr;
994 PetscReal val;
995 if (needs_unclamped) {
996 PetscReal w[8];
997 ComputeTrilinearWeightsUnclamped(a1_new, a2_new, a3_new, w);
998 val = w[0] * fieldScal[bk+1][bj+1][bi+1] + w[1] * fieldScal[bk+1][bj+1][bi+2]
999 + w[2] * fieldScal[bk+1][bj+2][bi+1] + w[3] * fieldScal[bk+1][bj+2][bi+2]
1000 + w[4] * fieldScal[bk+2][bj+1][bi+1] + w[5] * fieldScal[bk+2][bj+1][bi+2]
1001 + w[6] * fieldScal[bk+2][bj+2][bi+1] + w[7] * fieldScal[bk+2][bj+2][bi+2];
1002 } else {
1003 ierr = TrilinearInterpolation_Scalar(fieldName, fieldScal,
1004 bi + 1, bj + 1, bk + 1, a1_new, a2_new, a3_new, &val); CHKERRQ(ierr);
1005 }
1006 ((PetscReal*)swarmOut)[p] = val;
1007 } else {
1008 Cmpnts ***fieldVec = (Cmpnts ***)fieldPtr;
1009 Cmpnts vec;
1010 if (needs_unclamped) {
1011 PetscReal w[8];
1012 ComputeTrilinearWeightsUnclamped(a1_new, a2_new, a3_new, w);
1013 vec.x = w[0]*fieldVec[bk+1][bj+1][bi+1].x + w[1]*fieldVec[bk+1][bj+1][bi+2].x
1014 + w[2]*fieldVec[bk+1][bj+2][bi+1].x + w[3]*fieldVec[bk+1][bj+2][bi+2].x
1015 + w[4]*fieldVec[bk+2][bj+1][bi+1].x + w[5]*fieldVec[bk+2][bj+1][bi+2].x
1016 + w[6]*fieldVec[bk+2][bj+2][bi+1].x + w[7]*fieldVec[bk+2][bj+2][bi+2].x;
1017 vec.y = w[0]*fieldVec[bk+1][bj+1][bi+1].y + w[1]*fieldVec[bk+1][bj+1][bi+2].y
1018 + w[2]*fieldVec[bk+1][bj+2][bi+1].y + w[3]*fieldVec[bk+1][bj+2][bi+2].y
1019 + w[4]*fieldVec[bk+2][bj+1][bi+1].y + w[5]*fieldVec[bk+2][bj+1][bi+2].y
1020 + w[6]*fieldVec[bk+2][bj+2][bi+1].y + w[7]*fieldVec[bk+2][bj+2][bi+2].y;
1021 vec.z = w[0]*fieldVec[bk+1][bj+1][bi+1].z + w[1]*fieldVec[bk+1][bj+1][bi+2].z
1022 + w[2]*fieldVec[bk+1][bj+2][bi+1].z + w[3]*fieldVec[bk+1][bj+2][bi+2].z
1023 + w[4]*fieldVec[bk+2][bj+1][bi+1].z + w[5]*fieldVec[bk+2][bj+1][bi+2].z
1024 + w[6]*fieldVec[bk+2][bj+2][bi+1].z + w[7]*fieldVec[bk+2][bj+2][bi+2].z;
1025 } else {
1026 ierr = TrilinearInterpolation_Vector(fieldName, fieldVec,
1027 bi + 1, bj + 1, bk + 1, a1_new, a2_new, a3_new, &vec); CHKERRQ(ierr);
1028 }
1029 ((PetscReal*)swarmOut)[3*p + 0] = vec.x;
1030 ((PetscReal*)swarmOut)[3*p + 1] = vec.y;
1031 ((PetscReal*)swarmOut)[3*p + 2] = vec.z;
1032 }
1033 }
1034
1035 /* Restore arrays and swarm fields */
1036 ierr = DMDAVecRestoreArrayRead(dm_field, fieldLocal_cellCentered, &fieldPtr); CHKERRQ(ierr);
1037 ierr = DMDAVecRestoreArrayRead(user->fda, user->lCent, (void *)&cent); CHKERRQ(ierr);
1038 ierr = DMSwarmRestoreField(swarm, swarmOutFieldName, NULL, NULL, &swarmOut); CHKERRQ(ierr);
1039 ierr = DMSwarmRestoreField(swarm, "weight", NULL, NULL, (void**)&weights); CHKERRQ(ierr);
1040 ierr = DMSwarmRestoreField(swarm, "DMSwarm_pid", NULL, NULL, (void**)&pids); CHKERRQ(ierr);
1041 ierr = DMSwarmRestoreField(swarm, "DMSwarm_CellID", NULL, NULL, (void**)&cellIDs); CHKERRQ(ierr);
1042 ierr = DMSwarmRestoreField(swarm, "position", NULL, NULL, (void**)&pos); CHKERRQ(ierr);
1043 ierr = DMSwarmRestoreField(swarm, "DMSwarm_location_status", NULL, NULL, (void**)&status); CHKERRQ(ierr);
1044
1045 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished direct cell-center interpolation for field '%s'.\n", fieldName);
1047 PetscFunctionReturn(0);
1048}
1049
1050#undef __FUNCT__
1051#define __FUNCT__ "InterpolateEulerFieldFromCornerToSwarm"
1052
1053/**
1054 * @brief Corner-averaged interpolation path (legacy).
1055 * @details Stages cell-centered data to corners via unweighted averaging,
1056 * then trilinear interpolation from corners to particles.
1057 * Local to this translation unit.
1058 */
1060 UserCtx *user,
1061 Vec fieldLocal_cellCentered,
1062 const char *fieldName,
1063 const char *swarmOutFieldName)
1064{
1065 PetscErrorCode ierr;
1066 DM fda = user->fda;
1067 DM swarm = user->swarm;
1068 PetscInt bs;
1069 DMDALocalInfo info;
1070 PetscMPIInt rank;
1071
1072 // Generic pointers to the raw data arrays
1073 void *cellCenterPtr_read;
1074 void *cornerPtr_read_with_ghosts;
1075
1076 // Swarm-related pointers
1077 PetscInt *cellIDs = NULL;
1078 PetscReal *weights = NULL;
1079 PetscInt64 *pids = NULL; // For logging particle IDs in warnings
1080 void *swarmOut = NULL;
1081 PetscReal *pos = NULL;
1082 PetscInt *status = NULL;
1083 PetscInt nLocal;
1084
1085 PetscFunctionBegin;
1087 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
1088 ierr = DMDAGetLocalInfo(fda, &info); CHKERRQ(ierr);
1089 ierr = VecGetBlockSize(fieldLocal_cellCentered, &bs); CHKERRQ(ierr);
1090 if (bs != 1 && bs != 3) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "BlockSize must be 1 or 3.");
1091
1092 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting for field '%s'... \n", fieldName);
1093 // Select the appropriate DM for intermediate corner data based on block size
1094 DM dm_corner = (bs == 3) ? user->fda : user->da;
1095
1096 /* STAGE 1: Center-to-Corner Calculation with Communication */
1097
1098 // (A) Create or reuse the corner vectors.
1099 if(user->CellFieldAtCorner){
1100 PetscInt cached_bs;
1101 ierr = VecGetBlockSize(user->CellFieldAtCorner, &cached_bs); CHKERRQ(ierr);
1102 if(cached_bs != bs){
1103 LOG_ALLOW(LOCAL, LOG_WARNING, "[Rank %d] Block size mismatch for existing corner vector and current field %s(Cached: %d, New: %d). Replacing.\n", rank, fieldName, cached_bs, bs);
1104 ierr = VecDestroy(&user->CellFieldAtCorner); CHKERRQ(ierr);
1105 ierr = VecDestroy(&user->lCellFieldAtCorner); CHKERRQ(ierr);
1106 user->CellFieldAtCorner = NULL;
1107 user->lCellFieldAtCorner = NULL;
1108 }
1109 }
1110
1111 if(user->CellFieldAtCorner == NULL){
1112 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] Creating new global corner vector for field '%s'.\n", rank, fieldName);
1113 ierr = DMCreateGlobalVector(dm_corner, &user->CellFieldAtCorner); CHKERRQ(ierr);
1114 }else{
1115 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] Reusing existing global corner vector for field '%s'.\n", rank, fieldName);
1116 }
1117
1118 ierr = VecSet(user->CellFieldAtCorner,0.0); CHKERRQ(ierr);
1119 Vec cornerGlobal = user->CellFieldAtCorner;
1120
1121 if(user->lCellFieldAtCorner == NULL){
1122 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] Creating new local corner vector for field '%s'.\n", rank, fieldName);
1123 ierr = DMCreateLocalVector(dm_corner, &user->lCellFieldAtCorner); CHKERRQ(ierr);
1124 }else{
1125 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] Reusing existing local corner vector for field '%s'.\n", rank, fieldName);
1126 }
1127
1128 ierr = VecSet(user->lCellFieldAtCorner,0.0); CHKERRQ(ierr);
1129 Vec cornerLocal = user->lCellFieldAtCorner;
1130
1131 // (B) Get a read-only array from the input cell-centered vector
1132 ierr = DMDAVecGetArrayRead(dm_corner, fieldLocal_cellCentered, &cellCenterPtr_read); CHKERRQ(ierr);
1133
1134 PetscInt size;
1135 ierr = VecGetSize(cornerGlobal, &size); CHKERRQ(ierr);
1136 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] Corner global vector size for field '%s': %d.\n", rank, fieldName, (PetscInt)size);
1137
1138 size = 0;
1139
1140 ierr = VecGetSize(cornerLocal, &size); CHKERRQ(ierr);
1141 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] Corner local vector size for field '%s': %d.\n", rank, fieldName, (PetscInt)size);
1142
1143 size = 0;
1144
1145 ierr = VecGetSize(fieldLocal_cellCentered, &size); CHKERRQ(ierr);
1146 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] Cell-centered local vector size for field '%s': %d.\n", rank, fieldName, (PetscInt)size);
1147
1148 PetscInt xs,ys,zs,gxs,gys,gzs;
1149
1150 ierr = DMDAGetCorners(dm_corner,&xs,&ys,&zs,NULL,NULL,NULL); CHKERRQ(ierr);
1151 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] DMDAGetCorners for field '%s': xs=%d, ys=%d, zs=%d.\n", rank, fieldName, (PetscInt)xs, (PetscInt)ys, (PetscInt)zs);
1152
1153 ierr = DMDAGetGhostCorners(dm_corner,&gxs,&gys,&gzs,NULL,NULL,NULL); CHKERRQ(ierr);
1154 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] DMDAGetGhostCorners for field '%s': gxs=%d, gys=%d, gzs=%d.\n", rank, fieldName, (PetscInt)gxs, (PetscInt)gys, (PetscInt)gzs);
1155
1156 // DEBUG: Inspect Ucat ghost cells before interpolation
1157 /*
1158 if (bs == 3) {
1159 Cmpnts ***ucat_array = (Cmpnts***)cellCenterPtr_read;
1160 DMDALocalInfo info_debug;
1161 ierr = DMDAGetLocalInfo(fda, &info_debug); CHKERRQ(ierr);
1162
1163 // Only print on rank 0 to avoid clutter
1164 if (rank == 0) {
1165 PetscPrintf(PETSC_COMM_SELF, "\nDEBUG: Inspecting Ucat Ghost Cells...\n");
1166 PetscPrintf(PETSC_COMM_SELF, "--------------------------------------------------\n");
1167
1168 // --- Check the MINIMUM-SIDE (-Xi) ---
1169 // The first physical cell is at index info.xs (local index).
1170 // The first ghost cell is at info.xs - 1.
1171 PetscInt i_first_phys = info_debug.xs;
1172 PetscInt i_first_ghost = info_debug.xs - 1;
1173 PetscInt j_mid = info_debug.ys + info_debug.ym / 2;
1174 PetscInt k_mid = info_debug.zs + info_debug.zm / 2;
1175
1176 PetscPrintf(PETSC_COMM_SELF, "MIN-SIDE (-Xi):\n");
1177 PetscPrintf(PETSC_COMM_SELF, " Ghost Cell Ucat[%d][%d][%d] = (% .6f, % .6f, % .6f)\n",
1178 k_mid, j_mid, i_first_ghost,
1179 ucat_array[k_mid][j_mid][i_first_ghost].x,
1180 ucat_array[k_mid][j_mid][i_first_ghost].y,
1181 ucat_array[k_mid][j_mid][i_first_ghost].z);
1182 PetscPrintf(PETSC_COMM_SELF, " First Phys Cell Ucat[%d][%d][%d] = (% .6f, % .6f, % .6f)\n",
1183 k_mid, j_mid, i_first_phys,
1184 ucat_array[k_mid][j_mid][i_first_phys].x,
1185 ucat_array[k_mid][j_mid][i_first_phys].y,
1186 ucat_array[k_mid][j_mid][i_first_phys].z);
1187
1188
1189 // --- Check the MAXIMUM-SIDE (+Xi) ---
1190 // The last physical cell is at index info.xs + info.xm - 1.
1191 // The first ghost cell on the max side is at info.xs + info.xm.
1192 PetscInt i_last_phys = info_debug.xs + info_debug.xm - 1;
1193 PetscInt i_last_ghost = info_debug.xs + info_debug.xm;
1194
1195 PetscPrintf(PETSC_COMM_SELF, "MAX-SIDE (+Xi):\n");
1196 PetscPrintf(PETSC_COMM_SELF, " Last Phys Cell Ucat[%d][%d][%d] = (% .6f, % .6f, % .6f)\n",
1197 k_mid, j_mid, i_last_phys,
1198 ucat_array[k_mid][j_mid][i_last_phys].x,
1199 ucat_array[k_mid][j_mid][i_last_phys].y,
1200 ucat_array[k_mid][j_mid][i_last_phys].z);
1201 PetscPrintf(PETSC_COMM_SELF, " Ghost Cell Ucat[%d][%d][%d] = (% .6f, % .6f, % .6f)\n",
1202 k_mid, j_mid, i_last_ghost,
1203 ucat_array[k_mid][j_mid][i_last_ghost].x,
1204 ucat_array[k_mid][j_mid][i_last_ghost].y,
1205 ucat_array[k_mid][j_mid][i_last_ghost].z);
1206 PetscPrintf(PETSC_COMM_SELF, "--------------------------------------------------\n\n");
1207 }
1208 }
1209 */
1210 // DEBUG
1211
1212 // (C) Perform the center-to-corner interpolation directly into the global corner vector
1213 void *cornerPtr_write = NULL;
1214 ierr = DMDAVecGetArray(dm_corner, cornerGlobal, &cornerPtr_write); CHKERRQ(ierr);
1215
1216 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] Starting center-to-corner interpolation for '%s'.\n", rank, fieldName);
1217
1218 // SINGLE, CLEAN CALL SITE: The macro handles the runtime dispatch based on 'bs'.
1219 ierr = InterpolateFieldFromCenterToCorner(bs, cellCenterPtr_read, cornerPtr_write, user); CHKERRQ(ierr);
1220
1221 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] Finished center-to-corner interpolation for '%s'.\n", rank, fieldName);
1222 ierr = DMDAVecRestoreArray(dm_corner, cornerGlobal, &cornerPtr_write); CHKERRQ(ierr);
1223
1224 ierr = DMDAVecRestoreArrayRead(dm_corner, fieldLocal_cellCentered, &cellCenterPtr_read); CHKERRQ(ierr);
1225
1226 ierr = MPI_Barrier(PETSC_COMM_WORLD); CHKERRQ(ierr);
1227
1228 //////////// DEBUG
1229 /*
1230 // Log a synchronized header message from all ranks.
1231 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "DEBUG: Dumping cornerGlobal BEFORE ghost exchange...\n");
1232
1233 // VecView prints to a PETSc viewer. PETSC_VIEWER_STDOUT_WORLD is a global, synchronized viewer.
1234 // We only need to call it if logging is active for this function.
1235 if (is_function_allowed(__func__) && (int)(LOG_DEBUG) <= (int)get_log_level()) {
1236 ierr = VecView(cornerGlobal, PETSC_VIEWER_STDOUT_WORLD); CHKERRQ(ierr);
1237 }
1238
1239 // Log a synchronized footer message.
1240 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "DEBUG: Finished dumping cornerGlobal.\n");
1241 */
1242 ////////// DEBUG
1243
1244 // ierr = PetscBarrier((PetscObject)cornerGlobal); CHKERRQ(ierr);
1245
1246 // (D) CRITICAL STEP: Communicate the newly computed corner data to fill ghost regions
1247 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] Beginning ghost exchange for corner data...\n", rank);
1248
1249 ierr = DMGlobalToLocalBegin(dm_corner, cornerGlobal, INSERT_VALUES, cornerLocal); CHKERRQ(ierr);
1250 ierr = DMGlobalToLocalEnd(dm_corner, cornerGlobal, INSERT_VALUES, cornerLocal); CHKERRQ(ierr);
1251
1252 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] Ghost exchange for corner data complete.\n", rank);
1253
1254 LOG_ALLOW_SYNC(GLOBAL, LOG_VERBOSE, "getting array from %s on all ranks.\n", dm_corner == user->fda ? "fda" : "da");
1255 if (is_function_allowed(__func__) && (int)(LOG_VERBOSE) <= (int)get_log_level()) {
1256 // DEBUG: Inspect cornerLocal after ghost exchange
1257
1258 ierr = LOG_FIELD_ANATOMY(user,"CornerField", "After Corner Velocity Interpolated"); CHKERRQ(ierr);
1259
1260 // DEBUG
1261 //ierr = DMView(user->fda, PETSC_VIEWER_STDOUT_WORLD); CHKERRQ(ierr);
1262 }
1263
1264 /////// DEBUG
1265 /*
1266 // Log a synchronized header message from all ranks.
1267 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "DEBUG: Dumping cornerLocal AFTER ghost exchange...\n");
1268
1269 // Here, we want each rank to print its own local vector.
1270 // PETSC_VIEWER_STDOUT_SELF is the correct tool for this. The LOG_ALLOW_SYNC
1271 // wrapper will ensure the output from each rank is printed sequentially.
1272 if (is_function_allowed(__func__) && (int)(LOG_DEBUG) <= (int)get_log_level()) {
1273 PetscMPIInt rank_d;
1274 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank_d); CHKERRQ(ierr);
1275
1276 // Use a synchronized print to create a clean header for each rank's output
1277 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "\n--- cornerLocal (Rank %d) ---\n", rank_d);
1278 PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT);
1279
1280 // Print the local vector's contents for this rank
1281 ierr = VecView(cornerLocal, PETSC_VIEWER_STDOUT_SELF); CHKERRQ(ierr);
1282
1283 // Use a synchronized print to create a clean footer
1284 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "--- End cornerLocal (Rank %d) ---\n", rank_d);
1285 PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT);
1286 }
1287
1288 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "DEBUG: Finished dumping cornerLocal.\n");
1289 */
1290 ///// DEBUG
1291
1292 // STAGE 2: Particle Interpolation using Ghosted Corner Data */
1293
1294 // (E) Get the local, GHOSTED array of corner data for the final interpolation step
1295 ierr = DMDAVecGetArrayRead(dm_corner, cornerLocal, &cornerPtr_read_with_ghosts); CHKERRQ(ierr);
1296
1297 // (F) Retrieve swarm fields for the particle loop
1298 ierr = DMSwarmGetLocalSize(swarm, &nLocal); CHKERRQ(ierr);
1299 ierr = DMSwarmGetField(swarm, "DMSwarm_CellID", NULL, NULL, (void**)&cellIDs); CHKERRQ(ierr);
1300 ierr = DMSwarmGetField(swarm, "DMSwarm_pid", NULL, NULL, (void**)&pids); CHKERRQ(ierr);
1301 ierr = DMSwarmGetField(swarm, "weight", NULL, NULL, (void**)&weights); CHKERRQ(ierr);
1302 ierr = DMSwarmGetField(swarm, "position", NULL, NULL, (void**)&pos); CHKERRQ(ierr);
1303 ierr = DMSwarmGetField(swarm, "DMSwarm_location_status", NULL, NULL, (void**)&status); CHKERRQ(ierr);
1304 ierr = DMSwarmGetField(swarm, swarmOutFieldName, NULL, NULL, &swarmOut); CHKERRQ(ierr);
1305
1306 LOG_ALLOW(LOCAL,LOG_TRACE," Rank %d holds data upto & including %d,%d,%d.\n",rank,info.gxs + info.gxm,info.gys+info.gym,info.gzs+info.gzm);
1307
1308 // (G) Loop over each local particle
1309 for (PetscInt p = 0; p < nLocal; p++) {
1310
1311 Particle particle;
1312
1313 ierr = UnpackSwarmFields(p,pids,weights,pos,cellIDs,NULL,status,NULL,NULL,NULL,&particle); CHKERRQ(ierr);
1314
1316 "[Rank %d] Particle PID %lld: global cell=(%d,%d,%d), weights=(%.4f,%.4f,%.4f)\n",
1317 rank, (long long)particle.PID, particle.cell[0], particle.cell[1], particle.cell[2],
1318 particle.weights.x, particle.weights.y, particle.weights.z);
1319 // Safety check: Ensure the entire 8-node stencil is within the valid memory
1320 // region (owned + ghosts) of the local array.
1321
1322 if (particle.cell[0] < info.gxs || particle.cell[0] >= info.gxs + info.gxm - 1 ||
1323 particle.cell[1] < info.gys || particle.cell[1] >= info.gys + info.gym - 1 ||
1324 particle.cell[2] < info.gzs || particle.cell[2] >= info.gzs + info.gzm - 1)
1325 {
1327 "[Rank %d] Particle PID %lld in global cell (%d,%d,%d) is in an un-interpolatable region (requires ghosts of ghosts or is out of bounds). Zeroing field '%s'.\n",
1328 rank, (long long)particle.PID, particle.cell[0], particle.cell[1], particle.cell[2], fieldName);
1329 if (bs == 3) {
1330 ((PetscReal*)swarmOut)[3*p + 0] = 0.0;
1331 ((PetscReal*)swarmOut)[3*p + 1] = 0.0;
1332 ((PetscReal*)swarmOut)[3*p + 2] = 0.0;
1333 } else {
1334 ((PetscReal*)swarmOut)[p] = 0.0;
1335 }
1336 continue;
1337 }
1338
1340 fieldName,
1341 cornerPtr_read_with_ghosts,
1342 &particle,
1343 swarmOut, p, bs);
1344 CHKERRQ(ierr);
1345 }
1346
1347 // (H) Restore all retrieved arrays and vectors */
1348 ierr = DMDAVecRestoreArrayRead(dm_corner, cornerLocal, &cornerPtr_read_with_ghosts); CHKERRQ(ierr);
1349
1350 // (I) Restore swarm fields
1351 ierr = DMSwarmRestoreField(swarm, swarmOutFieldName, NULL, NULL, &swarmOut); CHKERRQ(ierr);
1352 ierr = DMSwarmRestoreField(swarm, "weight", NULL, NULL, (void**)&weights); CHKERRQ(ierr);
1353 ierr = DMSwarmRestoreField(swarm, "DMSwarm_pid", NULL, NULL, (void**)&pids); CHKERRQ(ierr);
1354 ierr = DMSwarmRestoreField(swarm, "DMSwarm_CellID", NULL, NULL, (void**)&cellIDs); CHKERRQ(ierr);
1355 ierr = DMSwarmRestoreField(swarm, "position", NULL, NULL, (void**)&pos); CHKERRQ(ierr);
1356 ierr = DMSwarmRestoreField(swarm, "DMSwarm_location_status", NULL, NULL, (void**)&status); CHKERRQ(ierr);
1357 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished for field '%s'.\n", fieldName);
1359 PetscFunctionReturn(0);
1360}
1361
1362#undef __FUNCT__
1363#define __FUNCT__ "InterpolateEulerFieldToSwarm"
1364
1365/**
1366 * @brief Dispatches grid-to-particle interpolation to the method selected in the control file.
1367 * @details Routes to InterpolateEulerFieldFromCenterToSwarm (direct trilinear, second-order)
1368 * or InterpolateEulerFieldFromCornerToSwarm (corner-averaged, legacy) based on
1369 * user->simCtx->interpolationMethod.
1370 */
1372 UserCtx *user,
1373 Vec fieldLocal_cellCentered,
1374 const char *fieldName,
1375 const char *swarmOutFieldName)
1376{
1377 PetscErrorCode ierr;
1378 PetscFunctionBegin;
1379
1381 ierr = InterpolateEulerFieldFromCenterToSwarm(user, fieldLocal_cellCentered,
1382 fieldName, swarmOutFieldName); CHKERRQ(ierr);
1383 } else {
1384 ierr = InterpolateEulerFieldFromCornerToSwarm(user, fieldLocal_cellCentered,
1385 fieldName, swarmOutFieldName); CHKERRQ(ierr);
1386 }
1387
1388 PetscFunctionReturn(0);
1389}
1390
1391#undef __FUNCT__
1392#define __FUNCT__ "InterpolateAllFieldsToSwarm"
1393/**
1394 * @brief Internal helper implementation: `InterpolateAllFieldsToSwarm()`.
1395 * @details Local to this translation unit.
1396 */
1398{
1399 PetscErrorCode ierr;
1400 PetscMPIInt rank;
1401 PetscFunctionBegin;
1402
1404
1405 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank); CHKERRQ(ierr);
1406
1407 /*
1408 1) Interpolate the 'velocity' field (user->Ucat) into DMSwarm's "swarmVelocity".
1409 - The function InterpolateOneFieldOverSwarm() loops over all local particles,
1410 retrieves iCell, jCell, kCell, (a1,a2,a3) from the DMSwarm,
1411 and calls the appropriate trilinear interpolation routine.
1412
1413 - "velocity" => just a label used in logging or debugging.
1414 - "swarmVelocity" => the DMSwarm field name where we store the result
1415 (assume dof=3 if user->Ucat has blockSize=3).
1416 */
1417
1419 " Interpolation of ucat to velocity begins on rank %d.\n",rank);
1420 // Make sure to pass the *LOCAL* Vector to the function below!
1421 ierr = InterpolateEulerFieldToSwarm(user, user->lUcat,
1422 "Ucat",
1423 "velocity"); CHKERRQ(ierr);
1425 "Diffusivity", "Diffusivity"); CHKERRQ(ierr);
1426
1428 "DiffusivityGradient", "DiffusivityGradient"); CHKERRQ(ierr);
1429 /* Add fields as necessary here*/
1430
1431 ierr = MPI_Barrier(PETSC_COMM_WORLD); CHKERRQ(ierr);
1433 "[rank %d]Completed Interpolateting all fields to the swarm.\n",rank);
1434
1436
1437 PetscFunctionReturn(0);
1438}
1439
1440/////////////////////// Scatter from particles to euler fields
1441
1442/**
1443 * @brief Functions for scattering particle data (scalar or vector) onto
1444 * Eulerian grid fields by averaging contributions within each cell.
1445 *
1446 * This file provides a modular set of functions to perform particle-to-grid
1447 * projection, specifically calculating cell-averaged quantities from particle properties.
1448 * It assumes a PETSc environment using DMDA for the grids and DMSwarm for particles.
1449 *
1450 * Key Features:
1451 * - Handles both scalar (DOF=1) and vector (DOF=3) particle fields.
1452 * - Uses a pre-calculated particle count vector (`ParticleCount`) for normalization.
1453 * - Implicitly determines the target Eulerian DM (typically `user->da` for scalars,
1454 * `user->fda` for vectors) based on standard field names ("P", "Nvert", "Ucat", "Ucont","Psi").
1455 * - Modifies existing Eulerian field vectors (e.g., `user->P`, `user->Ucat`,user->Psi`) in place.
1456 * - Provides a high-level wrapper function (`ScatterAllParticleFieldsToEulerFields`)
1457 * to easily scatter a standard set of fields.
1458 * - Uses only the base `SETERRQ` macro for error reporting to maximize compiler compatibility.
1459 *
1460 * Dependencies:
1461 * - PETSc library (DMDA, DMSwarm, Vec, IS, PetscLog, etc.)
1462 * - A `UserCtx` struct (defined elsewhere, e.g., "userctx.h") containing pointers
1463 * to relevant DMs (`da`, `fda`), Vecs (`ParticleCount`, `P`, `Nvert`, `Ucat`, etc.),
1464 * and the `DMSwarm` object (`swarm`).
1465 * - A custom DMSwarm field named `"DMSwarm_CellID"` (blockSize=3, type=PETSC_INT)
1466 * must be registered and populated with the local cell indices for each particle.
1467 * - Logging infrastructure (`LOG_ALLOW`, etc.) assumed to be defined elsewhere.
1468 *
1469 * @defgroup scatter_module Particle-to-Grid Scattering
1470 * @{
1471 */
1472
1473//-----------------------------------------------------------------------------
1474// Internal Helper Modules (Lower-level building blocks)
1475//-----------------------------------------------------------------------------
1476/**
1477 * @defgroup scatter_module_internal Internal Scattering Helpers
1478 * @ingroup scatter_module
1479 * @brief Lower-level functions used by the main scattering routines.
1480 * @{
1481 */
1482
1483#undef __FUNCT__
1484#define __FUNCT__ "GetScatterTargetInfo"
1485/* Implementation for GetScatterTargetInfo declared in include/interpolation.h. */
1486PetscErrorCode GetScatterTargetInfo(UserCtx *user, const char *particleFieldName,
1487 DM *targetDM, PetscInt *expected_dof)
1488{
1489 char msg[ERROR_MSG_BUFFER_SIZE]; // Buffer for formatted error messages
1490 PetscFunctionBeginUser;
1491
1493
1494 // --- Input Validation ---
1495 // Check for NULL pointers in essential inputs
1496 if (!user) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "UserCtx pointer is NULL.");
1497 if (!user->da) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "UserCtx->da is NULL.");
1498 if (!user->fda) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "UserCtx->fda is NULL."); // Needed for vector fields
1499 if (!particleFieldName) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "particleFieldName is NULL.");
1500 if (!targetDM) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Output targetDM pointer is NULL.");
1501 if (!expected_dof) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Output expected_dof pointer is NULL.");
1502
1503 // --- Determine Target DM and DOF based on Field Name ---
1504 // Compare the input field name with known scalar fields targeting 'da'
1505 if (strcmp(particleFieldName, "Psi") == 0 || strcmp(particleFieldName, "Nvert") == 0) {
1506 *expected_dof = 1; // Scalar fields have DOF 1
1507 *targetDM = user->da; // Target the primary scalar DMDA
1508 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Field '%s' targets DM 'da' (DOF=1).\n", particleFieldName);
1509 }
1510 // Compare with known vector fields targeting 'fda'
1511 else if (strcmp(particleFieldName, "Ucat") == 0 || strcmp(particleFieldName, "Ucont") == 0) {
1512 *expected_dof = 3; // Vector fields have DOF 3
1513 *targetDM = user->fda; // Target the vector DMDA (often node-based)
1514 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Field '%s' targets DM 'fda' (DOF=3).\n", particleFieldName);
1515 }
1516 // --- Add rules for other fields here ---
1517 // else if (strcmp(particleFieldName, "SomeOtherScalar") == 0) { *expected_dof = 1; *targetDM = user->da; }
1518 // else if (strcmp(particleFieldName, "SomeOtherVector") == 0) { *expected_dof = 3; *targetDM = user->someOtherDM; }
1519 else {
1520 // The provided field name doesn't match any known rules
1521 *targetDM = NULL; // Indicate failure
1522 *expected_dof = 0;
1523 // Format the error message manually
1524 PetscSNPrintf(msg, sizeof(msg), "Field name '%s' is not recognized for automatic DM selection.", particleFieldName);
1525 // Use SETERRQ with the formatted message and appropriate error code
1526 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "%s", msg); // Use WRONG argument error code
1527 }
1528
1530
1531 PetscFunctionReturn(0);
1532}
1533
1534#undef __FUNCT__
1535#define __FUNCT__ "GetPersistentLocalVector"
1536/**
1537 * @brief Internal helper implementation: `GetPersistentLocalVector()`.
1538 * @details Local to this translation unit.
1539 */
1540PetscErrorCode GetPersistentLocalVector(UserCtx *user, const char *fieldName, Vec *localVec)
1541{
1542 PetscFunctionBeginUser;
1543
1544 if (!user || !fieldName || !localVec)
1545 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Null input in GetPersistentLocalVector");
1546
1547 if (strcmp(fieldName, "Psi") == 0) {
1548 if (!user->lPsi) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "user->lPsi is not initialized");
1549 *localVec = user->lPsi;
1550 }
1551 else if (strcmp(fieldName, "Ucat") == 0) {
1552 if (!user->lUcat) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "user->lUcat is not initialized");
1553 *localVec = user->lUcat;
1554 }
1555 else if (strcmp(fieldName, "Ucont") == 0) {
1556 if (!user->lUcont) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "user->lUcont is not initialized");
1557 *localVec = user->lUcont;
1558 }
1559 else if (strcmp(fieldName, "Nvert") == 0) {
1560 if (!user->lNvert) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "user->lNvert is not initialized");
1561 *localVec = user->lNvert;
1562 }
1563 else {
1564 char msg[ERROR_MSG_BUFFER_SIZE];
1565 PetscSNPrintf(msg, sizeof(msg), "Field '%s' does not have a mapped persistent local vector.", fieldName);
1566 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "%s", msg);
1567 }
1568
1569 PetscFunctionReturn(0);
1570}
1571
1572#undef __FUNCT__
1573#define __FUNCT__ "AccumulateParticleField"
1574/*
1575 * Internal implementation detail for public API `AccumulateParticleField`.
1576 * Documentation is maintained in include/interpolation.h to keep a single source
1577 * of truth for Doxygen output.
1578 */
1579PetscErrorCode AccumulateParticleField(DM swarm, const char *particleFieldName,
1580 DM gridSumDM, Vec localAccumulatorVec)
1581{
1582 PetscErrorCode ierr;
1583 PetscInt dof;
1584 PetscInt nlocal, p;
1585 const PetscReal *particle_arr = NULL;
1586 const PetscInt *cell_id_arr = NULL;
1587
1588 // DMDA Accessors
1589 PetscScalar ***arr_1d = NULL; // For scalar fields
1590 PetscScalar ****arr_3d = NULL; // For vector fields
1591
1592 // Ghosted dimension variables
1593 PetscInt gxs, gys, gzs, gxm, gym, gzm;
1594 PetscMPIInt rank;
1595 char msg[ERROR_MSG_BUFFER_SIZE];
1596
1597 PetscFunctionBeginUser;
1599
1600 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
1601
1602 // --- 1. Validation & Setup ---
1603 if (!swarm || !gridSumDM || !localAccumulatorVec)
1604 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Null input in AccumulateParticleField.");
1605
1606 // Get DMDA information
1607 ierr = DMDAGetInfo(gridSumDM, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &dof, NULL, NULL, NULL, NULL, NULL); CHKERRQ(ierr);
1608 // Get Ghosted Corners (Global indices of the ghosted patch start, and its dimensions)
1609 ierr = DMDAGetGhostCorners(gridSumDM, &gxs, &gys, &gzs, &gxm, &gym, &gzm); CHKERRQ(ierr);
1610
1611 // --- 2. Verify Vector Type (Global vs Local) ---
1612 {
1613 PetscInt vecSize;
1614 PetscInt expectedLocalSize = gxm * gym * gzm * dof;
1615 ierr = VecGetSize(localAccumulatorVec, &vecSize); CHKERRQ(ierr);
1616
1617 if (vecSize != expectedLocalSize) {
1618 PetscSNPrintf(msg, sizeof(msg),
1619 "Vector dimension mismatch! Expected Ghosted Local Vector size %d (gxm*gym*gzm*dof), got %d. "
1620 "Did you pass a Global Vector instead of a Local Vector?",
1621 expectedLocalSize, vecSize);
1622 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "%s", msg);
1623 }
1624 }
1625
1626 // --- 3. Acquire Particle Data ---
1627 ierr = DMSwarmGetLocalSize(swarm, &nlocal); CHKERRQ(ierr);
1628 // These calls will fail nicely if the field doesn't exist
1629 ierr = DMSwarmGetField(swarm, particleFieldName, NULL, NULL, (void **)&particle_arr); CHKERRQ(ierr);
1630 ierr = DMSwarmGetField(swarm, "DMSwarm_CellID", NULL, NULL, (void **)&cell_id_arr); CHKERRQ(ierr);
1631
1632 // --- 4. Acquire Grid Accessors ---
1633 // DMDAVecGetArray* handles the mapping from Global (i,j,k) to the underlying Local Array index
1634 if (dof == 1) {
1635 ierr = DMDAVecGetArray(gridSumDM, localAccumulatorVec, &arr_1d); CHKERRQ(ierr);
1636 } else if (dof == 3) {
1637 ierr = DMDAVecGetArrayDOF(gridSumDM, localAccumulatorVec, &arr_3d); CHKERRQ(ierr);
1638 } else {
1639 PetscSNPrintf(msg, sizeof(msg), "Unsupported DOF=%d. AccumulateParticleField supports DOF 1 or 3.", dof);
1640 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "%s", msg);
1641 }
1642
1643 // --- 5. Accumulate Loop ---
1644 // Iterate through all local particles
1645 for (p = 0; p < nlocal; ++p) {
1646 // Retrieve Geometric Grid Index (0-based)
1647 PetscInt i_geom = cell_id_arr[p * 3 + 0];
1648 PetscInt j_geom = cell_id_arr[p * 3 + 1];
1649 PetscInt k_geom = cell_id_arr[p * 3 + 2];
1650
1651 // Apply Shift (+1) to match Memory Layout (Index 0 is boundary/ghost)
1652 PetscInt i = i_geom + 1;
1653 PetscInt j = j_geom + 1;
1654 PetscInt k = k_geom + 1;
1655
1656 // Bounds Check: Ensure (i,j,k) falls within the Local Ghosted Patch.
1657 // This allows writing to ghost slots which will later be reduced to the owner rank.
1658 if (i >= gxs && i < gxs + gxm &&
1659 j >= gys && j < gys + gym &&
1660 k >= gzs && k < gzs + gzm)
1661 {
1662 if (dof == 1) {
1663 arr_1d[k][j][i] += particle_arr[p];
1664 } else {
1665 // For DOF=3, unroll the loop for slight optimization
1666 arr_3d[k][j][i][0] += particle_arr[p * 3 + 0];
1667 arr_3d[k][j][i][1] += particle_arr[p * 3 + 1];
1668 arr_3d[k][j][i][2] += particle_arr[p * 3 + 2];
1669 }
1670 }
1671 // Note: Particles outside the ghost layer are skipped. This is expected behavior
1672 // if particles have not yet been migrated or localized correctly.
1673 }
1674
1675 // --- 6. Restore Arrays and Fields ---
1676 if (dof == 1) {
1677 ierr = DMDAVecRestoreArray(gridSumDM, localAccumulatorVec, &arr_1d); CHKERRQ(ierr);
1678 } else {
1679 ierr = DMDAVecRestoreArrayDOF(gridSumDM, localAccumulatorVec, &arr_3d); CHKERRQ(ierr);
1680 }
1681
1682 ierr = DMSwarmRestoreField(swarm, particleFieldName, NULL, NULL, (void **)&particle_arr); CHKERRQ(ierr);
1683 ierr = DMSwarmRestoreField(swarm, "DMSwarm_CellID", NULL, NULL, (void **)&cell_id_arr); CHKERRQ(ierr);
1684
1686 PetscFunctionReturn(0);
1687}
1688
1689#undef __FUNCT__
1690#define __FUNCT__ "NormalizeGridVectorByCount"
1691
1692/* Implementation for NormalizeGridVectorByCount declared in include/interpolation.h. */
1693PetscErrorCode NormalizeGridVectorByCount(DM countDM, Vec countVec,
1694 DM dataDM, Vec sumVec, Vec avgVec)
1695{
1696 PetscErrorCode ierr;
1697 PetscInt data_dof;
1698 PetscInt count_dof;
1699 PetscMPIInt rank;
1700 char msg[ERROR_MSG_BUFFER_SIZE];
1701
1702 // Pointers for DMDA array accessors - declare specific types
1703 PetscScalar ***count_arr_3d = NULL; // For DOF=1 count vector (3D DMDA)
1704 PetscScalar ***sum_arr_scalar = NULL; // For DOF=1 sum vector (3D DMDA)
1705 PetscScalar ***avg_arr_scalar = NULL; // For DOF=1 avg vector (3D DMDA)
1706 PetscScalar ****sum_arr_vector = NULL; // For DOF=3 sum vector (3D DMDA + DOF)
1707 PetscScalar ****avg_arr_vector = NULL; // For DOF=3 avg vector (3D DMDA + DOF)
1708
1709
1710 PetscFunctionBeginUser;
1711
1713
1714 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
1715
1716 // --- Validation ---
1717 ierr = DMDAGetInfo(countDM, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &count_dof, NULL, NULL, NULL, NULL, NULL); CHKERRQ(ierr);
1718 ierr = DMDAGetInfo(dataDM, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &data_dof, NULL, NULL, NULL, NULL, NULL); CHKERRQ(ierr);
1719 if (count_dof != 1) { PetscSNPrintf(msg, sizeof(msg), "countDM must have DOF=1, got %d.", count_dof); SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "%s", msg); }
1720 if (data_dof != 1 && data_dof != 3) { PetscSNPrintf(msg, sizeof(msg), "dataDM DOF must be 1 or 3, got %d.", data_dof); SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "%s", msg); }
1721
1722 // --- Get Array Access using appropriate DMDA accessors ---
1723 ierr = DMDAVecGetArrayRead(countDM, countVec, &count_arr_3d); CHKERRQ(ierr);
1724
1725 if (data_dof == 1) {
1726 ierr = DMDAVecGetArrayRead(dataDM, sumVec, &sum_arr_scalar); CHKERRQ(ierr);
1727 ierr = DMDAVecGetArray(dataDM, avgVec, &avg_arr_scalar); CHKERRQ(ierr);
1728 } else { // data_dof == 3
1729 ierr = DMDAVecGetArrayDOFRead(dataDM, sumVec, &sum_arr_vector); CHKERRQ(ierr);
1730 ierr = DMDAVecGetArrayDOF(dataDM, avgVec, &avg_arr_vector); CHKERRQ(ierr);
1731 }
1732
1733 // Get the corners (global start indices) and dimensions of the *local owned* region
1734 PetscInt xs, ys, zs, xm, ym, zm;
1735 ierr = DMDAGetCorners(countDM, &xs, &ys, &zs, &xm, &ym, &zm); CHKERRQ(ierr);
1736
1737 // --- Normalize Over Owned Cells ---
1738 LOG_ALLOW(LOCAL, LOG_DEBUG, "(Rank %d): Normalizing DOF=%d data over owned range [%d:%d, %d:%d, %d:%d].\n",
1739 rank, data_dof, xs, xs+xm, ys, ys+ym, zs, zs+zm);
1740
1741 // Loop using GLOBAL indices (i, j, k) over the range owned by this process
1742 for (PetscInt k = zs; k < zs + zm; ++k) {
1743 for (PetscInt j = ys; j < ys + ym; ++j) {
1744 for (PetscInt i = xs; i < xs + xm; ++i) {
1745
1746 // Access the count using standard 3D indexing
1747 PetscScalar count = count_arr_3d[k][j][i];
1748
1749 if (PetscRealPart(count) > 0.5) { // Use tolerance for float comparison
1750 if (data_dof == 1) {
1751 // Access scalar sum/avg using standard 3D indexing
1752 avg_arr_scalar[k][j][i] = sum_arr_scalar[k][j][i] / count;
1753 } else { // data_dof == 3
1754 // Access vector components using DOF indexing on the last dimension
1755 for (PetscInt c = 0; c < data_dof; ++c) {
1756 avg_arr_vector[k][j][i][c] = sum_arr_vector[k][j][i][c] / count;
1757 }
1758 }
1759 } else { // count is zero or negative
1760 // Set average to zero
1761 if (data_dof == 1) {
1762 avg_arr_scalar[k][j][i] = 0.0;
1763 } else { // data_dof == 3
1764 for (PetscInt c = 0; c < data_dof; ++c) {
1765 avg_arr_vector[k][j][i][c] = 0.0;
1766 }
1767 }
1768 } // end if count > 0.5
1769 } // end i loop
1770 } // end j loop
1771 } // end k loop
1772
1773 // --- Restore Arrays using appropriate functions ---
1774 ierr = DMDAVecRestoreArrayRead(countDM, countVec, &count_arr_3d); CHKERRQ(ierr);
1775 if (data_dof == 1) {
1776 ierr = DMDAVecRestoreArrayRead(dataDM, sumVec, &sum_arr_scalar); CHKERRQ(ierr);
1777 ierr = DMDAVecRestoreArray(dataDM, avgVec, &avg_arr_scalar); CHKERRQ(ierr);
1778 } else { // data_dof == 3
1779 ierr = DMDAVecRestoreArrayDOFRead(dataDM, sumVec, &sum_arr_vector); CHKERRQ(ierr);
1780 ierr = DMDAVecRestoreArrayDOF(dataDM, avgVec, &avg_arr_vector); CHKERRQ(ierr);
1781 }
1782
1783 // --- Assemble Final Average Vector ---
1784 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Assembling final average vector (DOF=%d).\n", data_dof);
1785 ierr = VecAssemblyBegin(avgVec); CHKERRQ(ierr);
1786 ierr = VecAssemblyEnd(avgVec); CHKERRQ(ierr);
1787
1788
1790
1791 PetscFunctionReturn(0);
1792}
1793
1794/** @} */ // End of scatter_module_internal group
1795
1796//-----------------------------------------------------------------------------
1797// User-Facing API
1798//-----------------------------------------------------------------------------
1799//-----------------------------------------------------------------------------
1800// MODULE 4: Internal Scatter Orchestration Helper - No PetscErrorClear
1801//-----------------------------------------------------------------------------
1802
1803#undef __FUNCT__
1804#define __FUNCT__ "ScatterParticleFieldToEulerField_Internal"
1805/**
1806 * @brief Internal helper implementation: `ScatterParticleFieldToEulerField_Internal()`.
1807 * @details Local to this translation unit.
1808 */
1810 const char *particleFieldName,
1811 DM targetDM,
1812 PetscInt expected_dof,
1813 Vec eulerFieldAverageVec)
1814{
1815 PetscErrorCode ierr;
1816 PetscInt target_dof = 0;
1817 Vec globalsumVec = NULL;
1818 Vec localsumVec = NULL;
1819 char msg[ERROR_MSG_BUFFER_SIZE]; // Buffer for formatted error messages
1820
1821 PetscFunctionBeginUser;
1822
1824
1825 if (!user || !user->swarm || !user->ParticleCount || !particleFieldName || !targetDM || !eulerFieldAverageVec)
1826 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "NULL input provided to ScatterParticleFieldToEulerField_Internal.");
1827
1828 ierr = DMDAGetInfo(targetDM, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &target_dof, NULL, NULL, NULL, NULL, NULL); CHKERRQ(ierr);
1829 if (target_dof != expected_dof) {
1830 PetscSNPrintf(msg, sizeof(msg),
1831 "Field '%s' expects DOF %d but targetDM reports DOF %d.",
1832 particleFieldName, expected_dof, target_dof);
1833 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "%s", msg);
1834 }
1835
1836 // --- Check if Particle Field Exists ---
1837 // Attempt a GetField call; if it fails, the field doesn't exist.
1838 // We let CHKERRQ handle the error directly if the field doesn't exist OR
1839 // we catch it specifically to provide a more tailored message.
1840
1841 /*
1842 LOG_ALLOW(GLOBAL,LOG_DEBUG,"Field %s being accessed to check existence \n",particleFieldName);
1843 ierr = DMSwarmGetField(user->swarm, particleFieldName, NULL, NULL, NULL);
1844 if (ierr) { // If GetField returns an error
1845 PetscSNPrintf(msg, sizeof(msg), "Particle field '%s' not found in DMSwarm for scattering.", particleFieldName);
1846 // Directly set the error, overwriting the one from GetField
1847 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, msg);
1848 }
1849 ierr = DMSwarmRestoreField(user->swarm, particleFieldName, NULL, NULL, NULL);
1850 */
1851
1852 // --- Setup Temporary Sum Vector ---
1853 ierr = VecDuplicate(eulerFieldAverageVec, &globalsumVec); CHKERRQ(ierr);
1854 ierr = VecSet(globalsumVec, 0.0); CHKERRQ(ierr);
1855 ierr = PetscSNPrintf(msg, sizeof(msg), "TempSum_%s", particleFieldName); CHKERRQ(ierr);
1856 ierr = PetscObjectSetName((PetscObject)globalsumVec, msg); CHKERRQ(ierr);
1857
1858 // create local vector for accumulation
1859 ierr = DMGetLocalVector(targetDM, &localsumVec); CHKERRQ(ierr);
1860 ierr = VecSet(localsumVec, 0.0); CHKERRQ(ierr); // Must be zeroed before accumulation
1861 ierr = PetscSNPrintf(msg, sizeof(msg), "LocalTempSum_%s", particleFieldName); CHKERRQ(ierr);
1862 ierr = PetscObjectSetName((PetscObject)localsumVec, msg); CHKERRQ(ierr);
1863
1864 // --- Accumulate ---
1865 // This will call DMSwarmGetField again. If it failed above, it will likely fail here too,
1866 // unless the error was cleared somehow between the check and here (unlikely).
1867 // If the check above was skipped (Option 1), this is where the error for non-existent
1868 // field will be caught by CHKERRQ.
1869 ierr = AccumulateParticleField(user->swarm, particleFieldName, targetDM, localsumVec); CHKERRQ(ierr);
1870
1871 // --- Local to Global Sum ---
1872 ierr = DMLocalToGlobalBegin(targetDM, localsumVec, ADD_VALUES, globalsumVec); CHKERRQ(ierr);
1873 ierr = DMLocalToGlobalEnd(targetDM, localsumVec, ADD_VALUES, globalsumVec); CHKERRQ(ierr);
1874 // Return local vector to DM
1875 ierr = DMRestoreLocalVector(targetDM, &localsumVec); CHKERRQ(ierr);
1876
1877 // Calculate the number of particles per cell.
1878 ierr = CalculateParticleCountPerCell(user); CHKERRQ(ierr);
1879 // --- Normalize ---
1880 ierr = NormalizeGridVectorByCount(user->da, user->ParticleCount, targetDM, globalsumVec, eulerFieldAverageVec); CHKERRQ(ierr);
1881
1882 // --- Cleanup ---
1883 ierr = VecDestroy(&globalsumVec); CHKERRQ(ierr);
1884
1885
1887
1888 PetscFunctionReturn(0);
1889}
1890
1891#undef __FUNCT__
1892#define __FUNCT__ "ScatterParticleFieldToEulerField"
1893
1894/* Implementation for ScatterParticleFieldToEulerField declared in include/interpolation.h. */
1896 const char *particleFieldName,
1897 Vec eulerFieldAverageVec)
1898{
1899 PetscErrorCode ierr;
1900 DM targetDM = NULL; // Will point to user->da or user->fda
1901 PetscInt expected_dof = 0; // Will be 1 or 3
1902 char msg[ERROR_MSG_BUFFER_SIZE]; // Buffer for formatted error messages
1903
1904 PetscFunctionBeginUser;
1905
1907
1908 // --- Essential Input Validation ---
1909 if (!user) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "UserCtx pointer is NULL.");
1910 if (!user->swarm) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "UserCtx->swarm is NULL.");
1911 if (!user->ParticleCount) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "UserCtx->ParticleCount is NULL.");
1912 if (!eulerFieldAverageVec) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Output eulerFieldAverageVec is NULL.");
1913 // particleFieldName validity checked within GetScatterTargetInfo
1914
1915 // --- Determine Target DM & DOF using the helper function ---
1916 ierr = GetScatterTargetInfo(user, particleFieldName, &targetDM, &expected_dof); CHKERRQ(ierr);
1917 // If GetScatterTargetInfo failed (e.g., unknown name), CHKERRQ would have returned.
1918
1919 // --- Validate the provided Target Vec's Compatibility ---
1920 DM vec_dm;
1921 PetscInt vec_dof;
1922 // Check that the provided average vector has a DM associated with it
1923 ierr = VecGetDM(eulerFieldAverageVec, &vec_dm); CHKERRQ(ierr);
1924 if (!vec_dm) {
1925 PetscSNPrintf(msg, sizeof(msg), "Provided eulerFieldAverageVec for field '%s' does not have an associated DM.", particleFieldName);
1926 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "%s", msg);
1927 }
1928 // Get the block size (DOF) of the provided vector
1929 ierr = VecGetBlockSize(eulerFieldAverageVec, &vec_dof); CHKERRQ(ierr);
1930 // Compare the vector's associated DM with the one determined by the field name
1931 if (vec_dm != targetDM) {
1932 const char *target_dm_name = "targetDM", *vec_dm_name = "vec_dm";
1933 // Get actual names if possible for a more informative error message
1934 PetscObjectGetName((PetscObject)targetDM, &target_dm_name);
1935 PetscObjectGetName((PetscObject)vec_dm, &vec_dm_name);
1936 PetscSNPrintf(msg, sizeof(msg), "Provided eulerFieldAverageVec associated with DM '%s', but field '%s' requires scatter to DM '%s'.", vec_dm_name, particleFieldName, target_dm_name);
1937 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "%s", msg);
1938 }
1939 // Compare the vector's DOF with the one expected for the field name
1940 if (vec_dof != expected_dof) {
1941 PetscSNPrintf(msg, sizeof(msg), "Field '%s' requires DOF %d, but provided eulerFieldAverageVec has DOF %d.", particleFieldName, expected_dof, vec_dof);
1942 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "%s", msg);
1943 }
1944
1945 // --- Perform Scatter using Internal Helper ---
1946 // Log intent before calling the core logic
1947 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Scattering field '%s' (DOF=%d).\n", particleFieldName, expected_dof);
1948 ierr = ScatterParticleFieldToEulerField_Internal(user, // Pass user context
1949 particleFieldName, // Name of particle field
1950 targetDM, // Determined target DM (da or fda)
1951 expected_dof, // Determined DOF (1 or 3)
1952 eulerFieldAverageVec); // The output vector
1953 CHKERRQ(ierr); // Handle potential errors from the internal function
1954
1955 LOG_ALLOW(GLOBAL, LOG_INFO, "Successfully scattered field '%s'.\n", particleFieldName);
1956
1958
1959 PetscFunctionReturn(0);
1960}
1961
1962#undef __FUNCT__
1963#define __FUNCT__ "ScatterAllParticleFieldsToEulerFields"
1964/* Implementation for ScatterAllParticleFieldsToEulerFields declared in include/interpolation.h. */
1966{
1967 PetscErrorCode ierr;
1968 PetscFunctionBeginUser;
1970
1971 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting scattering of specified particle fields to Eulerian grids.\n");
1972
1973 // --- Pre-computation Check: Ensure Particle Counts are Ready ---
1974 if (!user->ParticleCount) {
1975 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "UserCtx->ParticleCount is NULL. Compute counts before calling ScatterAllParticleFieldsToEulerFields.");
1976 }
1977
1978 // --- Scatter Particle Field "Psi" -> Eulerian Field user->Psi (on da) ---
1979 // Check if the target Eulerian vector 'user->Psi' exists.
1980 if (user->Psi) {
1981
1982 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Scattering particle field 'Psi' to user->Psi.\n");
1983 // Zero the target vector before accumulating the new average for this step/call.
1984
1985 // Debug Verification ------------------------------------------------
1986 Vec swarm_Psi;
1987 PetscReal Avg_Psi,Avg_swarm_Psi;
1988
1989 ierr = VecMean(user->Psi,&Avg_Psi);
1990 LOG_ALLOW(GLOBAL,LOG_DEBUG," Average of Scalar(Psi) before scatter: %.4f.\n",Avg_Psi);
1991
1992 ierr = DMSwarmCreateGlobalVectorFromField(user->swarm,"Psi",&swarm_Psi);
1993 ierr = VecMean(swarm_Psi,&Avg_swarm_Psi);
1994
1995 LOG_ALLOW(GLOBAL,LOG_DEBUG," Average of Particle Scalar(Psi): %.4f.\n",Avg_swarm_Psi);
1996
1997 ierr = DMSwarmDestroyGlobalVectorFromField(user->swarm,"Psi",&swarm_Psi);
1998 // Debug----------------------------------------------------------------
1999
2000 //ierr = VecSet(user->P, 0.0); CHKERRQ(ierr);
2001 // Call the unified scatter function. It will handle DM determination and validation.
2002 // It will also error out if the *particle* field "Psi" doesn't exist in the swarm.
2003 ierr = ScatterParticleFieldToEulerField(user, "Psi", user->Psi); CHKERRQ(ierr);
2004 ierr = VecMean(user->Psi,&Avg_Psi);
2005
2006 LOG_ALLOW(GLOBAL,LOG_DEBUG," Average of Scalar(Psi) after scatter: %.4f.\n",Avg_Psi);
2007 } else {
2008 // Only log a warning if the target Eulerian field is missing in the context.
2009 LOG_ALLOW(GLOBAL, LOG_WARNING, "Skipping scatter for 'Psi': UserCtx->Psi is NULL.\n");
2010 }
2011
2012 // --- (Commented Out) Scatter Other Fields ---
2013 // To enable scattering for other fields, uncomment the relevant block
2014 // AND ensure:
2015 // 1. The corresponding particle field (e.g., "Nvert", "Ucat") exists in user->swarm.
2016 // 2. The corresponding target Eulerian Vec (e.g., user->Nvert, user->Ucat) exists in user->ctx.
2017 // 3. The target Eulerian Vec is associated with the correct DM (da for Nvert, fda for Ucat).
2018
2019 /*
2020 // Scatter Particle Field "Nvert" -> Eulerian Field user->Nvert (on da)
2021 if (user->Nvert) {
2022 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Scattering particle field 'Nvert' to user->Nvert.\n");
2023 ierr = VecSet(user->Nvert, 0.0); CHKERRQ(ierr);
2024 ierr = ScatterParticleFieldToEulerField(user, "Nvert", user->Nvert); CHKERRQ(ierr);
2025 } else {
2026 LOG_ALLOW(GLOBAL, LOG_WARNING, "Skipping scatter for 'Nvert': UserCtx->Nvert is NULL.\n");
2027 }
2028 */
2029
2030 /*
2031 // Scatter Particle Field "Ucat" -> Eulerian Field user->Ucat (on fda)
2032 if (user->Ucat) {
2033 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Scattering particle field 'Ucat' to user->Ucat.\n");
2034 ierr = VecSet(user->Ucat, 0.0); CHKERRQ(ierr);
2035 ierr = ScatterParticleFieldToEulerField(user, "Ucat", user->Ucat); CHKERRQ(ierr);
2036 } else {
2037 LOG_ALLOW(GLOBAL, LOG_WARNING, "Skipping scatter for 'Ucat': UserCtx->Ucat is NULL.\n");
2038 }
2039 */
2040
2041 /*
2042 // Scatter Particle Field "Ucont" -> Eulerian Field user->Ucont (on fda)
2043 if (user->Ucont) {
2044 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Scattering particle field 'Ucont' to user->Ucont.\n");
2045 ierr = VecSet(user->Ucont, 0.0); CHKERRQ(ierr);
2046 ierr = ScatterParticleFieldToEulerField(user, "Ucont", user->Ucont); CHKERRQ(ierr);
2047 } else {
2048 LOG_ALLOW(GLOBAL, LOG_WARNING, "Skipping scatter for 'Ucont': UserCtx->Ucont is NULL.\n");
2049 }
2050 */
2051
2052 // Add more fields as needed following the pattern above...
2053
2054 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished scattering specified particle fields.\n");
2056 PetscFunctionReturn(0);
2057}
2058
2059/** @} */ // End of scatter_module group
2060
2061
2062#undef __FUNCT__
2063#define __FUNCT__ "InterpolateCornerToFaceCenter_Scalar"
2064
2065/**
2066 * @brief Internal helper implementation: `InterpolateCornerToFaceCenter_Scalar()`.
2067 * @details Local to this translation unit.
2068 */
2070 PetscReal ***corner_arr,
2071 PetscReal ***faceX_arr,
2072 PetscReal ***faceY_arr,
2073 PetscReal ***faceZ_arr,
2074 UserCtx *user)
2075{
2076 PetscErrorCode ierr;
2077 DMDALocalInfo info;
2078
2079 PetscFunctionBeginUser;
2080
2082
2083 ierr = DMDAGetLocalInfo(user->fda, &info); CHKERRQ(ierr);
2084
2085 // Determine owned-cell ranges based on corner-node ownership
2086 PetscInt xs, xm, ys, ym, zs, zm;
2087 ierr = GetOwnedCellRange(&info, 0, &xs, &xm); CHKERRQ(ierr);
2088 ierr = GetOwnedCellRange(&info, 1, &ys, &ym); CHKERRQ(ierr);
2089 ierr = GetOwnedCellRange(&info, 2, &zs, &zm); CHKERRQ(ierr);
2090
2091 // Global exclusive end indices for cells
2092 PetscInt xe = xs + xm;
2093 PetscInt ye = ys + ym;
2094 PetscInt ze = zs + zm;
2095
2096 // --- X‐faces: loops k=zs..ze-1, j=ys..ye-1, i=xs..xe (xm+1 faces per row) ---
2097 for (PetscInt k = zs; k < ze; ++k) {
2098 PetscInt k_loc = k - zs;
2099 for (PetscInt j = ys; j < ye; ++j) {
2100 PetscInt j_loc = j - ys;
2101 for (PetscInt i = xs; i <= xe; ++i) {
2102 PetscInt i_loc = i - xs; // 0..xm
2103 // Average the four corners of the Y-Z face at X = i
2104 PetscReal sum = corner_arr[k ][j ][i]
2105 + corner_arr[k+1][j ][i]
2106 + corner_arr[k ][j+1][i]
2107 + corner_arr[k+1][j+1][i];
2108 faceX_arr[k_loc][j_loc][i_loc] = sum * 0.25;
2109 }
2110 }
2111 }
2112
2113 // --- Y‐faces: loops k=zs..ze-1, j=ys..ye (ym+1 faces), i=xs..xe-1 ---
2114 for (PetscInt k = zs; k < ze; ++k) {
2115 PetscInt k_loc = k - zs;
2116 for (PetscInt j = ys; j <= ye; ++j) {
2117 PetscInt j_loc = j - ys; // 0..ym
2118 for (PetscInt i = xs; i < xe; ++i) {
2119 PetscInt i_loc = i - xs;
2120 // Average the four corners of the X-Z face at Y = j
2121 PetscReal sum = corner_arr[k ][j][i ]
2122 + corner_arr[k+1][j][i ]
2123 + corner_arr[k ][j][i+1]
2124 + corner_arr[k+1][j][i+1];
2125 faceY_arr[k_loc][j_loc][i_loc] = sum * 0.25;
2126 }
2127 }
2128 }
2129
2130 // --- Z‐faces: loops k=zs..ze (zm+1), j=ys..ye-1, i=xs..xe-1 ---
2131 for (PetscInt k = zs; k <= ze; ++k) {
2132 PetscInt k_loc = k - zs;
2133 for (PetscInt j = ys; j < ye; ++j) {
2134 PetscInt j_loc = j - ys;
2135 for (PetscInt i = xs; i < xe; ++i) {
2136 PetscInt i_loc = i - xs;
2137 // Average the four corners of the X-Y face at Z = k
2138 PetscReal sum = corner_arr[k][j ][i ]
2139 + corner_arr[k][j ][i+1]
2140 + corner_arr[k][j+1][i ]
2141 + corner_arr[k][j+1][i+1];
2142 faceZ_arr[k_loc][j_loc][i_loc] = sum * 0.25;
2143 }
2144 }
2145 }
2146
2148
2149 PetscFunctionReturn(0);
2150}
2151
2152#undef __FUNCT__
2153#define __FUNCT__ "InterpolateCornerToFaceCenter_Vector"
2154
2155/**
2156 * @brief Internal helper implementation: `InterpolateCornerToFaceCenter_Vector()`.
2157 * @details Local to this translation unit.
2158 */
2160 Cmpnts ***corner_arr,
2161 Cmpnts ***faceX_arr,
2162 Cmpnts ***faceY_arr,
2163 Cmpnts ***faceZ_arr,
2164 UserCtx *user)
2165{
2166 PetscErrorCode ierr;
2167 DMDALocalInfo info;
2168 PetscMPIInt rank;
2169
2170 PetscFunctionBeginUser;
2171
2173
2174 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
2176 "Rank %d starting InterpolateFieldFromCornerToFaceCenter_Vector.\n", rank);
2177
2178 ierr = DMDAGetLocalInfo(user->fda, &info); CHKERRQ(ierr);
2179
2180 PetscInt xs, xm, ys, ym, zs, zm;
2181 ierr = GetOwnedCellRange(&info, 0, &xs, &xm); CHKERRQ(ierr);
2182 ierr = GetOwnedCellRange(&info, 1, &ys, &ym); CHKERRQ(ierr);
2183 ierr = GetOwnedCellRange(&info, 2, &zs, &zm); CHKERRQ(ierr);
2184
2185 PetscInt xe = xs + xm;
2186 PetscInt ye = ys + ym;
2187 PetscInt ze = zs + zm;
2188
2189 // X-faces
2190 for (PetscInt k = zs; k < ze; ++k) {
2191 PetscInt k_loc = k - zs;
2192 for (PetscInt j = ys; j < ye; ++j) {
2193 PetscInt j_loc = j - ys;
2194 for (PetscInt i = xs; i <= xe; ++i) {
2195 PetscInt i_loc = i - xs;
2196 Cmpnts sum = {0,0,0};
2197 sum.x = corner_arr[k ][j ][i].x + corner_arr[k+1][j ][i].x
2198 + corner_arr[k ][j+1][i].x + corner_arr[k+1][j+1][i].x;
2199 sum.y = corner_arr[k ][j ][i].y + corner_arr[k+1][j ][i].y
2200 + corner_arr[k ][j+1][i].y + corner_arr[k+1][j+1][i].y;
2201 sum.z = corner_arr[k ][j ][i].z + corner_arr[k+1][j ][i].z
2202 + corner_arr[k ][j+1][i].z + corner_arr[k+1][j+1][i].z;
2203 faceX_arr[k_loc][j_loc][i_loc].x = sum.x * 0.25;
2204 faceX_arr[k_loc][j_loc][i_loc].y = sum.y * 0.25;
2205 faceX_arr[k_loc][j_loc][i_loc].z = sum.z * 0.25;
2206 }
2207 }
2208 }
2209
2211 "Rank %d x-face Interpolation complete.\n", rank);
2212
2213 // Y-faces
2214 for (PetscInt k = zs; k < ze; ++k) {
2215 PetscInt k_loc = k - zs;
2216 for (PetscInt j = ys; j <= ye; ++j) {
2217 PetscInt j_loc = j - ys;
2218 for (PetscInt i = xs; i < xe; ++i) {
2219 PetscInt i_loc = i - xs;
2220 Cmpnts sum = {0,0,0};
2221 sum.x = corner_arr[k ][j][i ].x + corner_arr[k+1][j][i ].x
2222 + corner_arr[k ][j][i+1].x + corner_arr[k+1][j][i+1].x;
2223 sum.y = corner_arr[k ][j][i ].y + corner_arr[k+1][j][i ].y
2224 + corner_arr[k ][j][i+1].y + corner_arr[k+1][j][i+1].y;
2225 sum.z = corner_arr[k ][j][i ].z + corner_arr[k+1][j][i ].z
2226 + corner_arr[k ][j][i+1].z + corner_arr[k+1][j][i+1].z;
2227 faceY_arr[k_loc][j_loc][i_loc].x = sum.x * 0.25;
2228 faceY_arr[k_loc][j_loc][i_loc].y = sum.y * 0.25;
2229 faceY_arr[k_loc][j_loc][i_loc].z = sum.z * 0.25;
2230 }
2231 }
2232 }
2233
2235 "Rank %d y-face Interpolation complete.\n", rank);
2236
2237 // Z-faces
2238 for (PetscInt k = zs; k <= ze; ++k) {
2239 PetscInt k_loc = k - zs;
2240 for (PetscInt j = ys; j < ye; ++j) {
2241 PetscInt j_loc = j - ys;
2242 for (PetscInt i = xs; i < xe; ++i) {
2243 PetscInt i_loc = i - xs;
2244 Cmpnts sum = {0,0,0};
2245 sum.x = corner_arr[k][j ][i ].x + corner_arr[k][j ][i+1].x
2246 + corner_arr[k][j+1][i ].x + corner_arr[k][j+1][i+1].x;
2247 sum.y = corner_arr[k][j ][i ].y + corner_arr[k][j ][i+1].y
2248 + corner_arr[k][j+1][i ].y + corner_arr[k][j+1][i+1].y;
2249 sum.z = corner_arr[k][j ][i ].z + corner_arr[k][j ][i+1].z
2250 + corner_arr[k][j+1][i ].z + corner_arr[k][j+1][i+1].z;
2251 faceZ_arr[k_loc][j_loc][i_loc].x = sum.x * 0.25;
2252 faceZ_arr[k_loc][j_loc][i_loc].y = sum.y * 0.25;
2253 faceZ_arr[k_loc][j_loc][i_loc].z = sum.z * 0.25;
2254 }
2255 }
2256 }
2257
2259 "Rank %d z-face Interpolation complete.\n", rank);
2260
2262 PetscFunctionReturn(0);
2263}
PetscErrorCode CalculateParticleCountPerCell(UserCtx *user)
Counts particles in each cell of the DMDA 'da' and stores the result in user->ParticleCount.
PetscErrorCode UnpackSwarmFields(PetscInt i, const PetscInt64 *PIDs, const PetscReal *weights, const PetscReal *positions, const PetscInt *cellIndices, PetscReal *velocities, PetscInt *LocStatus, PetscReal *diffusivity, Cmpnts *diffusivitygradient, PetscReal *psi, Particle *particle)
Initializes a Particle struct with data from DMSwarm fields.
PetscErrorCode GetScatterTargetInfo(UserCtx *user, const char *particleFieldName, DM *targetDM, PetscInt *expected_dof)
Determines the target Eulerian DM and expected DOF for scattering a given particle field.
PetscErrorCode NormalizeGridVectorByCount(DM countDM, Vec countVec, DM dataDM, Vec sumVec, Vec avgVec)
Normalizes a grid vector of sums by a grid vector of counts to produce an average.
PetscErrorCode GetPersistentLocalVector(UserCtx *user, const char *fieldName, Vec *localVec)
Internal helper implementation: GetPersistentLocalVector().
PetscErrorCode AccumulateParticleField(DM swarm, const char *particleFieldName, DM gridSumDM, Vec gridSumVec)
Accumulates a particle field (scalar or vector) into a target grid sum vector.
PetscErrorCode ScatterAllParticleFieldsToEulerFields(UserCtx *user)
Scatters a predefined set of particle fields to their corresponding Eulerian fields.
PetscErrorCode ScatterParticleFieldToEulerField(UserCtx *user, const char *particleFieldName, Vec eulerFieldAverageVec)
Scatters a particle field (scalar or vector) to the corresponding Eulerian field average.
static PetscErrorCode ScatterParticleFieldToEulerField_Internal(UserCtx *user, const char *particleFieldName, DM targetDM, PetscInt expected_dof, Vec eulerFieldAverageVec)
Internal helper implementation: ScatterParticleFieldToEulerField_Internal().
PetscErrorCode InterpolateFieldFromCornerToCenter_Vector(Cmpnts ***field_arr, Cmpnts ***centfield_arr, UserCtx *user)
Internal helper implementation: InterpolateFieldFromCornerToCenter_Vector().
PetscErrorCode InterpolateCornerToFaceCenter_Vector(Cmpnts ***corner_arr, Cmpnts ***faceX_arr, Cmpnts ***faceY_arr, Cmpnts ***faceZ_arr, UserCtx *user)
Internal helper implementation: InterpolateCornerToFaceCenter_Vector().
PetscErrorCode InterpolateFieldFromCornerToCenter_Scalar(PetscReal ***field_arr, PetscReal ***centfield_arr, UserCtx *user)
Internal helper implementation: InterpolateFieldFromCornerToCenter_Scalar().
PetscErrorCode InterpolateAllFieldsToSwarm(UserCtx *user)
Internal helper implementation: InterpolateAllFieldsToSwarm().
static PetscErrorCode InterpolateEulerFieldToSwarmForParticle(const char *fieldName, void *fieldPtr, Particle *particle, void *swarmOut, PetscInt p, PetscInt blockSize)
Internal helper implementation: InterpolateEulerFieldToSwarmForParticle().
PetscErrorCode TrilinearInterpolation_Vector(const char *fieldName, Cmpnts ***fieldVec, PetscInt i, PetscInt j, PetscInt k, PetscReal a1, PetscReal a2, PetscReal a3, Cmpnts *vec)
Internal helper implementation: TrilinearInterpolation_Vector().
PetscErrorCode TrilinearInterpolation_Scalar(const char *fieldName, PetscReal ***fieldScal, PetscInt i, PetscInt j, PetscInt k, PetscReal a1, PetscReal a2, PetscReal a3, PetscReal *val)
Internal helper implementation: TrilinearInterpolation_Scalar().
PetscErrorCode PieceWiseLinearInterpolation_Scalar(const char *fieldName, PetscReal ***fieldScal, PetscInt iCell, PetscInt jCell, PetscInt kCell, PetscReal *val)
Internal helper implementation: PieceWiseLinearInterpolation_Scalar().
static void ComputeTrilinearWeightsUnclamped(PetscReal a1, PetscReal a2, PetscReal a3, PetscReal *w)
Unclamped trilinear weights for boundary extrapolation.
#define ERROR_MSG_BUFFER_SIZE
PetscErrorCode TestCornerToCenterInterpolation(UserCtx *user)
Internal helper implementation: TestCornerToCenterInterpolation().
static PetscErrorCode InterpolateEulerFieldFromCornerToSwarm(UserCtx *user, Vec fieldLocal_cellCentered, const char *fieldName, const char *swarmOutFieldName)
Corner-averaged interpolation path (legacy).
PetscErrorCode InterpolateFieldFromCenterToCorner_Vector(Cmpnts ***centfield_arr, Cmpnts ***corner_arr, UserCtx *user)
Internal helper implementation: InterpolateFieldFromCenterToCorner_Vector().
static void ComputeTrilinearWeights(PetscReal a1, PetscReal a2, PetscReal a3, PetscReal *w)
Internal helper implementation: ComputeTrilinearWeights().
static PetscErrorCode InterpolateEulerFieldFromCenterToSwarm(UserCtx *user, Vec fieldLocal_cellCentered, const char *fieldName, const char *swarmOutFieldName)
Direct cell-center trilinear interpolation (second-order on curvilinear grids).
PetscErrorCode InterpolateCornerToFaceCenter_Scalar(PetscReal ***corner_arr, PetscReal ***faceX_arr, PetscReal ***faceY_arr, PetscReal ***faceZ_arr, UserCtx *user)
Internal helper implementation: InterpolateCornerToFaceCenter_Scalar().
PetscErrorCode InterpolateEulerFieldToSwarm(UserCtx *user, Vec fieldLocal_cellCentered, const char *fieldName, const char *swarmOutFieldName)
Dispatches grid-to-particle interpolation to the method selected in the control file.
PetscErrorCode PieceWiseLinearInterpolation_Vector(const char *fieldName, Cmpnts ***fieldVec, PetscInt iCell, PetscInt jCell, PetscInt kCell, Cmpnts *vec)
Internal helper implementation: PieceWiseLinearInterpolation_Vector().
PetscErrorCode InterpolateFieldFromCenterToCorner_Scalar(PetscReal ***centfield_arr, PetscReal ***corner_arr, UserCtx *user)
Internal helper implementation: InterpolateFieldFromCenterToCorner_Scalar().
#define TrilinearInterpolation(fieldName, fieldPtr, i, j, k, a1, a2, a3, outPtr)
Macro that calls either the scalar or vector trilinear interpolation function based on the type of th...
#define InterpolateFieldFromCenterToCorner(blockSize, centfield_ptr, corner_ptr, user_ctx)
Macro to dispatch to the correct scalar or vector center-to-corner function based on a runtime block ...
#define InterpolateFieldFromCornerToCenter(field, centfield, user)
Generic macro to call the appropriate interpolation function based on the field type.
PetscBool is_function_allowed(const char *functionName)
Checks if a given function is in the allow-list.
Definition logging.c:183
#define LOG_ALLOW_SYNC(scope, level, fmt,...)
Synchronized logging macro that checks both the log level and whether the calling function is in the ...
Definition logging.h:252
#define LOCAL
Logging scope definitions for controlling message output.
Definition logging.h:44
#define GLOBAL
Scope for global logging across all processes.
Definition logging.h:45
#define LOG_ALLOW(scope, level, fmt,...)
Logging macro that checks both the log level and whether the calling function is in the allowed-funct...
Definition logging.h:199
#define PROFILE_FUNCTION_END
Marks the end of a profiled code block.
Definition logging.h:779
PetscErrorCode LOG_FIELD_ANATOMY(UserCtx *user, const char *field_name, const char *stage_name)
Logs the anatomy of a specified field at key boundary locations, respecting the solver's specific gri...
Definition logging.c:1515
LogLevel get_log_level()
Retrieves the current logging level from the environment variable LOG_LEVEL.
Definition logging.c:84
#define LOG_LOOP_ALLOW_EXACT(scope, level, var, val, fmt,...)
Logs a custom message if a variable equals a specific value.
Definition logging.h:334
@ LOG_TRACE
Very fine-grained tracing information for in-depth debugging.
Definition logging.h:32
@ LOG_INFO
Informational messages about program execution.
Definition logging.h:30
@ LOG_WARNING
Non-critical issues that warrant attention.
Definition logging.h:29
@ LOG_DEBUG
Detailed debugging information.
Definition logging.h:31
@ LOG_VERBOSE
Extremely detailed logs, typically for development use only.
Definition logging.h:33
#define PROFILE_FUNCTION_BEGIN
Marks the beginning of a profiled code block (typically a function).
Definition logging.h:770
PetscErrorCode GetOwnedCellRange(const DMDALocalInfo *info_nodes, PetscInt dim, PetscInt *xs_cell_global_out, PetscInt *xm_cell_local_out)
Determines the global starting index and number of CELLS owned by the current processor in a specifie...
Definition setup.c:1883
Vec lDiffusivityGradient
Definition variables.h:841
Vec lCent
Definition variables.h:858
@ PERIODIC
Definition variables.h:260
BoundaryFaceConfig boundary_faces[6]
Definition variables.h:829
PetscInt cell[3]
Definition variables.h:167
Vec lNvert
Definition variables.h:837
SimCtx * simCtx
Back-pointer to the master simulation context.
Definition variables.h:814
Vec lCellFieldAtCorner
Definition variables.h:846
Vec lPsi
Definition variables.h:883
PetscScalar x
Definition variables.h:101
Cmpnts loc
Definition variables.h:168
InterpolationMethod interpolationMethod
Definition variables.h:744
PetscScalar z
Definition variables.h:101
@ INTERP_TRILINEAR
Definition variables.h:522
Vec ParticleCount
Definition variables.h:882
Vec CellFieldAtCorner
Definition variables.h:846
Vec lUcont
Definition variables.h:837
Vec lUcat
Definition variables.h:837
PetscScalar y
Definition variables.h:101
Cmpnts weights
Definition variables.h:170
@ TOP
Definition variables.h:145
@ FRONT
Definition variables.h:145
@ BOTTOM
Definition variables.h:145
@ BACK
Definition variables.h:145
@ LEFT
Definition variables.h:145
@ NUM_FACES
Definition variables.h:145
@ RIGHT
Definition variables.h:145
Vec Cent
Definition variables.h:858
Vec lDiffusivity
Definition variables.h:840
BCType mathematical_type
Definition variables.h:336
PetscInt64 PID
Definition variables.h:166
Cmpnts vertices[8]
Coordinates of the eight vertices of the cell.
Definition variables.h:161
Vec Psi
Definition variables.h:883
@ BC_FACE_NEG_X
Definition variables.h:245
@ BC_FACE_NEG_Z
Definition variables.h:247
@ BC_FACE_NEG_Y
Definition variables.h:246
Defines the vertices of a single hexahedral grid cell.
Definition variables.h:160
A 3D point or vector with PetscScalar components.
Definition variables.h:100
Defines a particle's core properties for Lagrangian tracking.
Definition variables.h:165
User-defined context containing data specific to a single computational grid level.
Definition variables.h:811
PetscErrorCode CalculateDistancesToCellFaces(const Cmpnts p, const Cell *cell, PetscReal *d, const PetscReal threshold)
Computes the signed distances from a point to each face of a cubic cell.