PICurv 0.1.0
A Parallel Particle-In-Cell Solver for Curvilinear LES
Loading...
Searching...
No Matches
interpolation.c
Go to the documentation of this file.
1/**
2 * @file Interpolatetion.c
3 * @brief Main program for DMSwarm Interpolatetion using the fdf-curvIB method.
4 *
5 * Provides routines for Interpolatetion between corner-based and center-based
6 * fields in the cell-centered DM (fda), plus partial usage examples for
7 * DMSwarm-based field sampling.
8 */
9
10#include "interpolation.h"
11
12// Number of weights used in certain trilinear Interpolatetion examples
13#define NUM_WEIGHTS 8
14// Define a buffer size for error messages if not already available
15#ifndef ERROR_MSG_BUFFER_SIZE
16#define ERROR_MSG_BUFFER_SIZE 256 // Or use PETSC_MAX_PATH_LEN if appropriate
17#endif
18
19
20/**
21 * @brief Interpolates a vector field from corner nodes to cell centers.
22 *
23 * This function estimates the value of a vector field at the center of each
24 * computational cell (whose origin node is owned by this rank) by averaging
25 * the values from the 8 corner nodes defining that cell.
26 *
27 * @param[in] field_arr Input: 3D array view (ghosted) of vector data at nodes,
28 * obtained from a Vec associated with user->fda. Accessed via GLOBAL node indices.
29 * Ghost values must be up-to-date.
30 * @param[out] centfield_arr Output: 3D local array (0-indexed) where interpolated cell center values
31 * are stored. Sized [zm_cell_local][ym_cell_local][xm_cell_local]
32 * by the caller. centfield_arr[k_loc][j_loc][i_loc] stores the value for
33 * the cell corresponding to the (i_loc, j_loc, k_loc)-th owned cell origin.
34 * @param[in] user User context containing DMDA information (fda, IM, JM, KM).
35 *
36 * @return PetscErrorCode 0 on success.
37 */
39 Cmpnts ***field_arr, /* Input: Ghosted local array view from user->fda (global node indices) */
40 Cmpnts ***centfield_arr, /* Output: Local 0-indexed array */
41 UserCtx *user)
42{
43 PetscErrorCode ierr;
44 PetscMPIInt rank;
45 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
47 "Rank %d starting InterpolateFieldFromCornerToCenter_Vector.\n", rank);
48
49 // Get local info based on the NODE-based DMDA (user->fda)
50 // This DM defines the ownership of the nodes from which we interpolate,
51 // and thus defines the cells whose centers we will compute.
52 DMDALocalInfo info_nodes_fda;
53 ierr = DMDAGetLocalInfo(user->fda, &info_nodes_fda); CHKERRQ(ierr);
54
55 // Determine owned CELL ranges based on owning their origin NODES (from user->fda)
56 PetscInt xs_cell_global_i, xm_cell_local_i; // Global start cell index, local count of owned cells
57 PetscInt ys_cell_global_j, ym_cell_local_j;
58 PetscInt zs_cell_global_k, zm_cell_local_k;
59
60
61 ierr = GetOwnedCellRange(&info_nodes_fda, 0, &xs_cell_global_i, &xm_cell_local_i); CHKERRQ(ierr);
62 ierr = GetOwnedCellRange(&info_nodes_fda, 1, &ys_cell_global_j, &ym_cell_local_j); CHKERRQ(ierr);
63 ierr = GetOwnedCellRange(&info_nodes_fda, 2, &zs_cell_global_k, &zm_cell_local_k); CHKERRQ(ierr);
64
65 // Exclusive end global cell indices
66 PetscInt xe_cell_global_i_excl = xs_cell_global_i + xm_cell_local_i;
67 PetscInt ye_cell_global_j_excl = ys_cell_global_j + ym_cell_local_j;
68 PetscInt ze_cell_global_k_excl = zs_cell_global_k + zm_cell_local_k;
69
70 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d : Processing Owned Cells (global indices) k=%d..%d (count %d), j=%d..%d (count %d), i=%d..%d (count %d)\n",
71 rank,
72 zs_cell_global_k, ze_cell_global_k_excl -1, zm_cell_local_k,
73 ys_cell_global_j, ye_cell_global_j_excl -1, ym_cell_local_j,
74 xs_cell_global_i, xe_cell_global_i_excl -1, xm_cell_local_i);
75
76 // Loop over the GLOBAL indices of the CELLS whose origin nodes are owned by this processor
77 // (according to user->fda partitioning)
78 for (PetscInt k_glob_cell = zs_cell_global_k; k_glob_cell < ze_cell_global_k_excl; k_glob_cell++) {
79 PetscInt k_local = k_glob_cell - zs_cell_global_k; // 0-based local index for centfield_arr
80
81 for (PetscInt j_glob_cell = ys_cell_global_j; j_glob_cell < ye_cell_global_j_excl; j_glob_cell++) {
82 PetscInt j_local = j_glob_cell - ys_cell_global_j; // 0-based local index
83
84 for (PetscInt i_glob_cell = xs_cell_global_i; i_glob_cell < xe_cell_global_i_excl; i_glob_cell++) {
85 PetscInt i_local = i_glob_cell - xs_cell_global_i; // 0-based local index
86
87 Cmpnts sum = {0.0, 0.0, 0.0};
88 // Count is always 8 for a hexahedral cell
89 // PetscInt count = 0; // Not strictly needed if we assume 8 corners
90
91 // Loop over the 8 corner NODES that define cell C(i_glob_cell, j_glob_cell, k_glob_cell)
92 for (PetscInt dk = 0; dk < 2; dk++) {
93 for (PetscInt dj = 0; dj < 2; dj++) {
94 for (PetscInt di = 0; di < 2; di++) {
95 PetscInt ni_glob = i_glob_cell + di; // Global NODE index i or i+1
96 PetscInt nj_glob = j_glob_cell + dj; // Global NODE index j or j+1
97 PetscInt nk_glob = k_glob_cell + dk; // Global NODE index k or k+1
98
99 // Access the input 'field_arr' (node values) using GLOBAL node indices.
100 // Ghosts in field_arr must be valid.
101 // DMDAVecGetArrayRead handles mapping global indices to local memory.
102 sum.x += field_arr[nk_glob][nj_glob][ni_glob].x;
103 sum.y += field_arr[nk_glob][nj_glob][ni_glob].y;
104 sum.z += field_arr[nk_glob][nj_glob][ni_glob].z;
105 // count++;
106 }
107 }
108 }
109
110 // Calculate average value representing the cell center
111 Cmpnts center_value;
112 center_value.x = sum.x / 8.0;
113 center_value.y = sum.y / 8.0;
114 center_value.z = sum.z / 8.0;
115
116 // Store result into the local, 0-indexed output array 'centfield_arr'
117 // Defensive check (should not be strictly necessary if caller allocated centfield_arr
118 // with dimensions xm_cell_local_i, ym_cell_local_j, zm_cell_local_k)
119 if (i_local < 0 || i_local >= xm_cell_local_i ||
120 j_local < 0 || j_local >= ym_cell_local_j ||
121 k_local < 0 || k_local >= zm_cell_local_k) {
122 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Local index out of bounds for centfield_arr write!");
123 }
124 centfield_arr[k_local][j_local][i_local] = center_value;
125
126 } // End loop i_glob_cell
127 } // End loop j_glob_cell
128 } // End loop k_glob_cell
129
131 "Rank %d completed InterpolateFieldFromCornerToCenter_Vector.\n", rank);
132 return 0;
133}
134
135// -----------------------------------------------------------------------------
136// Interpolation: Corner -> Center (scalar)
137// -----------------------------------------------------------------------------
138
139/**
140 * @brief Interpolates a scalar field from corner nodes to cell centers.
141 *
142 * This function estimates the value of a scalar field at the center of each
143 * computational cell (whose origin node is owned by this rank) by averaging
144 * the values from the 8 corner nodes defining that cell.
145 *
146 * @param[in] field_arr Input: 3D array view (ghosted) of scalar data at nodes,
147 * obtained from a Vec associated with user->da. Accessed via GLOBAL node indices.
148 * Ghost values must be up-to-date.
149 * @param[out] centfield_arr Output: 3D local array (0-indexed) where interpolated cell center values
150 * are stored. Sized [zm_cell_local][ym_cell_local][xm_cell_local]
151 * by the caller. centfield_arr[k_loc][j_loc][i_loc] stores the value for
152 * the cell corresponding to the (i_loc, j_loc, k_loc)-th owned cell origin.
153 * @param[in] user User context containing DMDA information (da, IM, JM, KM).
154 *
155 * @return PetscErrorCode 0 on success.
156 */
158 PetscReal ***field_arr, /* Input: Ghosted local array view from user->da (global node indices) */
159 PetscReal ***centfield_arr, /* Output: Local 0-indexed array for cell centers */
160 UserCtx *user)
161{
162 PetscErrorCode ierr;
163 PetscMPIInt rank;
164 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
166 "Rank %d starting InterpolateFieldFromCornerToCenter_Scalar.\n", rank);
167
168 // Get local info based on user->da (which is node-based but defines cell origins)
169 DMDALocalInfo info_da_nodes;
170 ierr = DMDAGetLocalInfo(user->da, &info_da_nodes); CHKERRQ(ierr);
171
172 // Determine owned CELL ranges based on owning their origin NODES (from user->da)
173 PetscInt xs_cell_global_i, xm_cell_local_i; // Global start cell index, local count of owned cells
174 PetscInt ys_cell_global_j, ym_cell_local_j;
175 PetscInt zs_cell_global_k, zm_cell_local_k;
176
177 ierr = GetOwnedCellRange(&info_da_nodes, 0, &xs_cell_global_i, &xm_cell_local_i); CHKERRQ(ierr);
178 ierr = GetOwnedCellRange(&info_da_nodes, 1, &ys_cell_global_j, &ym_cell_local_j); CHKERRQ(ierr);
179 ierr = GetOwnedCellRange(&info_da_nodes, 2, &zs_cell_global_k, &zm_cell_local_k); CHKERRQ(ierr);
180
181 // Exclusive end global cell indices
182 PetscInt xe_cell_global_i_excl = xs_cell_global_i + xm_cell_local_i;
183 PetscInt ye_cell_global_j_excl = ys_cell_global_j + ym_cell_local_j;
184 PetscInt ze_cell_global_k_excl = zs_cell_global_k + zm_cell_local_k;
185
186 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d (IFCTCS): Processing Owned Cells (global indices) k=%d..%d (count %d), j=%d..%d (count %d), i=%d..%d (count %d)\n",
187 rank,
188 zs_cell_global_k, ze_cell_global_k_excl -1, zm_cell_local_k,
189 ys_cell_global_j, ye_cell_global_j_excl -1, ym_cell_local_j,
190 xs_cell_global_i, xe_cell_global_i_excl -1, xm_cell_local_i);
191
192 // Loop over the GLOBAL indices of the CELLS whose origin nodes are owned by this processor
193 // (according to user->da partitioning)
194 for (PetscInt k_glob_cell = zs_cell_global_k; k_glob_cell < ze_cell_global_k_excl; k_glob_cell++) {
195 PetscInt k_local = k_glob_cell - zs_cell_global_k; // 0-based local index for centfield_arr
196
197 for (PetscInt j_glob_cell = ys_cell_global_j; j_glob_cell < ye_cell_global_j_excl; j_glob_cell++) {
198 PetscInt j_local = j_glob_cell - ys_cell_global_j; // 0-based local index
199
200 for (PetscInt i_glob_cell = xs_cell_global_i; i_glob_cell < xe_cell_global_i_excl; i_glob_cell++) {
201 PetscInt i_local = i_glob_cell - xs_cell_global_i; // 0-based local index
202
203 PetscReal sum = 0.0;
204 // Count is always 8 for a hexahedral cell if all nodes are accessible
205 // PetscInt count = 0;
206
207 // Loop over the 8 NODE indices that define cell C(i_glob_cell, j_glob_cell, k_glob_cell)
208 for (PetscInt dk = 0; dk < 2; dk++) {
209 for (PetscInt dj = 0; dj < 2; dj++) {
210 for (PetscInt di = 0; di < 2; di++) {
211 PetscInt ni_glob = i_glob_cell + di; // Global NODE index for corner
212 PetscInt nj_glob = j_glob_cell + dj; // Global NODE index for corner
213 PetscInt nk_glob = k_glob_cell + dk; // Global NODE index for corner
214
215 // Access the input 'field_arr' (node values) using GLOBAL node indices.
216 sum += field_arr[nk_glob][nj_glob][ni_glob];
217 // count++;
218 }
219 }
220 }
221
222 PetscReal center_value = sum / 8.0;
223
224 // Store the calculated cell center value into the output array 'centfield_arr'
225 // using the LOCAL 0-based cell index.
226 // Defensive check (should not be strictly necessary if caller allocated centfield_arr correctly)
227 if (i_local < 0 || i_local >= xm_cell_local_i ||
228 j_local < 0 || j_local >= ym_cell_local_j ||
229 k_local < 0 || k_local >= zm_cell_local_k) {
230 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Local index out of bounds for centfield_arr write in Scalar version!");
231 }
232 centfield_arr[k_local][j_local][i_local] = center_value;
233
234 } // End loop i_glob_cell
235 } // End loop j_glob_cell
236 } // End loop k_glob_cell
237
239 "Rank %d completed InterpolateFieldFromCornerToCenter_Scalar.\n", rank);
240 return 0;
241}
242
243// -----------------------------------------------------------------------------
244// Interpolation: Center -> Corner (scalar)
245// -----------------------------------------------------------------------------
246/**
247 * @brief Interpolates a scalar field from cell centers to corner nodes.
248 *
249 * This function estimates the value of a scalar field at each grid node by averaging
250 * the values from the cell centers of the cells surrounding that node (up to 8).
251 * It handles physical boundaries by averaging only the available adjacent cells.
252 *
253 * Assumes input `centfield_arr` is from a ghosted local vector associated with `user->da` (DOF=1, s=2)
254 * and output `field_arr` is from a ghosted local vector also associated with `user->da` (DOF=1, s=2).
255 * Input array uses GLOBAL cell indices, output array uses GLOBAL node indices.
256 *
257 * @param[in] centfield_arr Input: 3D array (ghosted) of scalar data at cell centers,
258 * accessed via GLOBAL cell indices (k=0..KM-1, j=0..JM-1, i=0..IM-1).
259 * @param[out] field_arr Output: 3D array (ghosted) where interpolated node values are stored,
260 * accessed via GLOBAL node indices (k=0..KM, j=0..JM, i=0..IM).
261 * @param[in] user User context containing DMDA information (da).
262 *
263 * @return PetscErrorCode 0 on success.
264 */
266 PetscReal ***centfield_arr, /* Input: Ghosted local array based on da (cell indices) */
267 PetscReal ***field_arr, /* Output: Ghosted local array based on da (node indices) */
268 UserCtx *user)
269{
270 PetscErrorCode ierr;
271 PetscMPIInt rank;
272 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
274 "Rank %d starting interpolation.\n", rank);
275
276 // Get local info based on the DMDA (da). This info primarily describes owned nodes.
277 DMDALocalInfo info;
278 ierr = DMDAGetLocalInfo(user->da, &info); CHKERRQ(ierr);
279
280 // Define the range of NODES owned by this processor using GLOBAL indices
281 PetscInt xs_node = info.xs;
282 PetscInt xm_node = info.xm;
283 PetscInt xe_node = xs_node + xm_node;
284 PetscInt ys_node = info.ys;
285 PetscInt ym_node = info.ym;
286 PetscInt ye_node = ys_node + ym_node;
287 PetscInt zs_node = info.zs;
288 PetscInt zm_node = info.zm;
289 PetscInt ze_node = zs_node + zm_node;
290
291 // Get Global dimensions (number of cells IM, JM, KM)
292 PetscInt IM = info.mx - 1;
293 PetscInt JM = info.my - 1;
294 PetscInt KM = info.mz - 1;
295
296
297 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Rank %d: Interpolating for owned NODES k=%d..%d, j=%d..%d, i=%d..%d\n",
298 rank, zs_node, ze_node, ys_node, ye_node, xs_node, xe_node);
299
300 // Loop over the GLOBAL indices of the NODES owned by this processor
301 for (PetscInt k = zs_node; k < ze_node; k++) { // Global NODE index k (0..KM)
302 for (PetscInt j = ys_node; j < ye_node; j++) { // Global NODE index j (0..JM)
303 for (PetscInt i = xs_node; i < xe_node; i++) { // Global NODE index i (0..IM)
304
305 PetscReal sum = 0.0;
306 PetscInt count = 0;
307
308 // Loop over the potential 8 CELL indices surrounding node (i,j,k)
309 // Cell indices are (i & i-1), (j & j-1), (k & k-1)
310 for (PetscInt dk = -1; dk <= 0; dk++) { // Relative cell k-index offset
311 for (PetscInt dj = -1; dj <= 0; dj++) { // Relative cell j-index offset
312 for (PetscInt di = -1; di <= 0; di++) { // Relative cell i-index offset
313
314 PetscInt ci = i + di; // Global CELL index of neighbor
315 PetscInt cj = j + dj; // Global CELL index of neighbor
316 PetscInt ck = k + dk; // Global CELL index of neighbor
317
318 // Check if this CELL index is within the valid global cell range (0..IM-1, etc.)
319 if (ci >= 0 && ci <= IM-1 &&
320 cj >= 0 && cj <= JM-1 &&
321 ck >= 0 && ck <= KM-1)
322 {
323 // Access the input 'centfield_arr' using GLOBAL cell indices.
324 // Relies on centfield_arr being from a ghosted local vector.
325 sum += centfield_arr[ck][cj][ci];
326 count++;
327 // Optional Debug Log
328 // LOG_LOOP_ALLOW(GLOBAL, LOG_DEBUG, i*j*k, 10000,
329 // " Rank %d | Node(i,j,k)=%d,%d,%d | Using Cell(ci,cj,ck)=%d,%d,%d | Value=%.3f | Count=%d\n",
330 // rank, i, j, k, ci, cj, ck, centfield_arr[ck][cj][ci], count);
331 }
332 } // end di loop
333 } // end dj loop
334 } // end dk loop
335
336 // Calculate average and store in the output 'field_arr' at the NODE index (i,j,k)
337 if (count > 0) {
338 // Store the result using the GLOBAL node index [k][j][i]
339 field_arr[k][j][i] = sum / (PetscReal)count;
340 } else {
341 // This indicates an issue - a node should always be adjacent to at least one cell
343 "Rank %d: Node (i=%d,j=%d,k=%d) had count=0 surrounding cells! Check logic/ghosting.\n", rank, i, j, k);
344 // Assign a default value or handle error
345 field_arr[k][j][i] = 0.0; // Defaulting to zero might hide issues
346 }
347 } // End loop i (nodes)
348 } // End loop j (nodes)
349 } // End loop k (nodes)
350
352 "Rank %d completed interpolation.\n", rank);
353 return 0;
354}
355
356
357// -----------------------------------------------------------------------------
358// Interpolation: Center -> Corner (vector)
359// -----------------------------------------------------------------------------
360
361/**
362 * @brief Interpolates a vector field from cell centers to corner nodes.
363 *
364 * This function estimates the value of a vector field at each grid node by averaging
365 * the vector values from the cell centers of the cells surrounding that node (up to 8).
366 * It handles physical boundaries by averaging only the available adjacent cells.
367 *
368 * Assumes input `centfield_arr` is from a ghosted local vector (e.g., representing ucat,
369 * stored using node-indexing convention) and output `field_arr` is a ghosted local
370 * vector associated with `user->fda` (DOF=3, s=2), accessed using global node indices.
371 *
372 * @param[in] centfield_arr Input: 3D array (ghosted) of vector data conceptually at cell centers,
373 * accessed via GLOBAL indices respecting the storage convention
374 * (e.g., `ucat[k][j][i]` uses node index `i` but represents cell `C(i,j,k)` for interior).
375 * @param[out] field_arr Output: 3D array (ghosted) where interpolated node values are stored,
376 * accessed via GLOBAL node indices (k=0..KM, j=0..JM, i=0..IM).
377 * @param[in] user User context containing DMDA information (da and fda).
378 *
379 * @return PetscErrorCode 0 on success.
380 */
381PetscErrorCode InterpolateFieldFromCenterToCorner_Vector( // Or original name if you prefer
382 Cmpnts ***centfield_arr, /* Input: Ghosted local array (fda-based, cell data at node indices) */
383 Cmpnts ***field_arr, /* Output: local array (fda-based, true node data) */
384 UserCtx *user)
385{
386 PetscErrorCode ierr;
387 PetscMPIInt rank;
388 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
389
390 DMDALocalInfo info_nodes;
391 ierr = DMDAGetLocalInfo(user->fda, &info_nodes); CHKERRQ(ierr);
392
393 // Node ownership range (GLOBAL indices)
394 PetscInt xs_node = info_nodes.xs, xm_node = info_nodes.xm, xe_node = xs_node + xm_node;
395 PetscInt ys_node = info_nodes.ys, ym_node = info_nodes.ym, ye_node = ys_node + ym_node;
396 PetscInt zs_node = info_nodes.zs, zm_node = info_nodes.zm, ze_node = zs_node + zm_node;
397
398 // Global grid dimensions (NODES) - Used for global cell index check
399 PetscInt MX_node = user->IM; //info_nodes.mx;
400 PetscInt MY_node = user->JM; //info_nodes.my;
401 PetscInt MZ_node = user->KM; //info_nodes.mz;
402 PetscInt IM = MX_node - 1; // Max cell index i
403 PetscInt JM = MY_node - 1; // Max cell index j
404 PetscInt KM = MZ_node - 1; // Max cell index k
405
406
407 // Valid range for accessing the INPUT ghosted array (using NODE indices)
408 PetscInt gxs_node = info_nodes.gxs, gxm_node = info_nodes.gxm, gxe_node = gxs_node + gxm_node;
409 PetscInt gys_node = info_nodes.gys, gym_node = info_nodes.gym, gye_node = gys_node + gym_node;
410 PetscInt gzs_node = info_nodes.gzs, gzm_node = info_nodes.gzm, gze_node = gzs_node + gzm_node;
411
412 // Log only if this function is allowed by the list set in main()
413 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Rank %d: Interpolating for owned NODES k=%d..%d, j=%d..%d, i=%d..%d\n",
414 rank, zs_node, ze_node-1, ys_node, ye_node-1, xs_node, xe_node-1);
415
416 LOG_ALLOW(GLOBAL,LOG_DEBUG, "[%d] Dumping DM state (user->fda) BEFORE GetArray:\n", rank);
417
418
419 if(get_log_level() == LOG_DEBUG && is_function_allowed(__func__)){
420 DMView(user->fda, PETSC_VIEWER_STDOUT_SELF);
421 // Inside InterpolateFieldFromCenterToCorner_Vector, before the loops:
422
423 PetscMPIInt rank_check;
424 MPI_Comm_rank(PETSC_COMM_WORLD, &rank_check);
425 if (rank_check == 1) { // Only for Rank 1
426 LOG_ALLOW(LOCAL,LOG_DEBUG, "[1] Attempting test read of OWNED INTERIOR centfield_arr[3][1][1]\n");
427 Cmpnts test_val_owned_interior = centfield_arr[7][1][1];
428 LOG_ALLOW(LOCAL,LOG_DEBUG, "[1] SUCCESS reading owned interior: x=%f\n", test_val_owned_interior.x);
429
430 LOG_ALLOW(LOCAL,LOG_DEBUG, "[1] Attempting test read of OWNED BOUNDARY centfield_arr[3][0][0]\n");
431 Cmpnts test_val_owned_boundary = centfield_arr[7][0][0];
432 LOG_ALLOW(LOCAL,LOG_DEBUG, "[1] SUCCESS reading owned boundary: x=%f\n", test_val_owned_boundary.x);
433
434 LOG_ALLOW(LOCAL,LOG_DEBUG, "[1] Attempting test read of GHOST centfield_arr[2][0][0]\n");
435 Cmpnts test_val_ghost = centfield_arr[7][0][0]; // This is the line that likely crashes
436 LOG_ALLOW(LOCAL,LOG_DEBUG, "[1] SUCCESS reading ghost: x=%f\n", test_val_ghost.x);
437
438 }
439 }
440
441 ierr = PetscBarrier(NULL);
442
443 // Proceed with the original loops...
444 // Loop over the GLOBAL indices of the NODES owned by this processor (k, j, i)
445 for (PetscInt k = zs_node; k < ze_node; k++) {
446 for (PetscInt j = ys_node; j < ye_node; j++) {
447 for (PetscInt i = xs_node; i < xe_node; i++) {
448 /*
449 if(get_log_level() == LOG_DEBUG && is_function_allowed(__func__)){
450 // --- TEMPORARY TEST --- WORKS ONLY WHEN IM,JM,KM=5 !!!
451 if (rank == 1 && k == 3 && j == 0 && i == 0) {
452 LOG_ALLOW(GLOBAL,LOG_DEBUG, "[1] Test read inside loop for (3,0,0): Accessing centfield_arr[2][0][0]\n");
453 Cmpnts test_val_loop = centfield_arr[2][0][0]; // The crashing access
454 LOG_ALLOW(GLOBAL,LOG_DEBUG, "[1] Test read inside loop SUCCESS: x=%f\n", test_val_loop.x);
455 }
456 // --- END TEMPORARY TEST ---
457 }
458 */
459
460 Cmpnts sum = {0.0, 0.0, 0.0};
461 PetscInt count = 0;
462 PetscBool attempted_read = PETSC_FALSE; // Flag to track if read was tried
463
464 // Loop over the 8 potential cells surrounding node N(k,j,i)
465 for (PetscInt dk_offset = -1; dk_offset <= 0; dk_offset++) {
466 for (PetscInt dj_offset = -1; dj_offset <= 0; dj_offset++) {
467 for (PetscInt di_offset = -1; di_offset <= 0; di_offset++) {
468
469 // Calculate the NODE index where the relevant cell's data is stored
470 PetscInt node_idx_k = k + dk_offset;
471 PetscInt node_idx_j = j + dj_offset;
472 PetscInt node_idx_i = i + di_offset;
473
474
475 // Check if this NODE index corresponds to a valid GLOBAL cell index
476 PetscInt cell_idx_k = node_idx_k; // Cell index is same as node index for storage
477 PetscInt cell_idx_j = node_idx_j;
478 PetscInt cell_idx_i = node_idx_i;
479
480
481
482 if (cell_idx_i >= 0 && cell_idx_i <= MX_node && // Cell index check
483 cell_idx_j >= 0 && cell_idx_j <= MY_node &&
484 cell_idx_k >= 0 && cell_idx_k <= MZ_node)
485 {
486
487 /*
488 // Check if this NODE index is valid within the GLOBAL node domain (0..Mx-1)
489 // This implicitly checks if the corresponding cell is valid (0..Mx-2)
490 if (node_idx_i >= 0 && node_idx_i < MX_node -1 &&
491 node_idx_j >= 0 && node_idx_j < MY_node -1 &&
492 node_idx_k >= 0 && node_idx_k < MZ_node -1)
493 {
494 */
495
496 // Check if the NODE index is within the accessible LOCAL+GHOST range of centfield_arr
497 if (node_idx_i >= gxs_node && node_idx_i < gxe_node &&
498 node_idx_j >= gys_node && node_idx_j < gye_node &&
499 node_idx_k >= gzs_node && node_idx_k < gze_node)
500 {
501 // Log attempt just before read
502 LOG_LOOP_ALLOW_EXACT(LOCAL, LOG_DEBUG,k,6,"PRE-READ: Rank %d targeting Node(k,j,i)=%d,%d,%d. Reading input centfield_arr[%d][%d][%d] (for cell C(%d,%d,%d))\n",
503 rank,
504 k,j,i,
505 node_idx_k,node_idx_j,node_idx_i,
506 cell_idx_k, cell_idx_j, cell_idx_i);
507
508 attempted_read = PETSC_TRUE; // Mark that we are attempting a read
509
510 // Convert GLOBAL neighbor index to LOCAL index for reading from the ghosted array
511 PetscInt k_local_read = node_idx_k - gzs_node;
512 PetscInt j_local_read = node_idx_j - gys_node;
513 PetscInt i_local_read = node_idx_i - gxs_node;
514 // ---> READ <---
515 Cmpnts cell_val = centfield_arr[k_local_read][j_local_read][i_local_read];
516// Cmpnts cell_val = centfield_arr[node_idx_k][node_idx_j][node_idx_i];
517
518 // Log success immediately after read
519 LOG_LOOP_ALLOW_EXACT(LOCAL, LOG_DEBUG,k,6,"POST-READ: Rank %d successful read from [%d][%d][%d] -> (%.2f, %.2f, %.2f)\n",
520 rank,
521 node_idx_k,node_idx_j,node_idx_i,
522 cell_val.x, cell_val.y, cell_val.z);
523
524 sum.x += cell_val.x;
525 sum.y += cell_val.y;
526 sum.z += cell_val.z;
527 count++;
528
529 } else {
530 LOG_ALLOW(GLOBAL, LOG_WARNING, /* ... Ghost range warning ... */);
531 }
532 } // end global cell check
533 } // end di_offset
534 } // end dj_offset
535 } // end dk_offset
536
537 // ---> Convert GLOBAL node indices (k,j,i) to LOCAL indices for field_arr <---
538 // field_arr is dimensioned nx * ny * nz (local node dimensions)
539 // Global indices (k,j,i) range from (zs,ys,xs) to (ze-1, ye-1, xe-1)
540 // Local indices range from 0 to (zm-1, ym-1, xm-1)
541 PetscInt k_local = k - zs_node; // Offset by the starting global index
542 PetscInt j_local = j - ys_node;
543 PetscInt i_local = i - xs_node;
544 // Calculate average and write to output node (k,j,i)
545 if (count > 0) {
546 LOG_LOOP_ALLOW_EXACT(LOCAL, LOG_DEBUG,k,6,"PRE-WRITE: Rank %d targeting Node(k,j,i)=%d,%d,%d. Writing avg (count=%d)\n", rank,k,j,i, count);
547
548 // --- Defensive check (optional but recommended) ---
549 if (k_local < 0 || k_local >= info_nodes.zm ||
550 j_local < 0 || j_local >= info_nodes.ym ||
551 i_local < 0 || i_local >= info_nodes.xm) {
552 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Calculated local write index out of bounds!");
553 }
554 // --- End check ---
555
556 // ---> Write using LOCAL indices <---
557 field_arr[k_local][j_local][i_local].x = sum.x / (PetscReal)count;
558 field_arr[k_local][j_local][i_local].y = sum.y / (PetscReal)count;
559 field_arr[k_local][j_local][i_local].z = sum.z / (PetscReal)count;
560
561 LOG_LOOP_ALLOW_EXACT(LOCAL, LOG_DEBUG,k,6,"POST-WRITE: Rank %d successful write to field_arr[%d][%d][%d] (local)\n", rank, k_local,j_local,i_local); // Log local indices
562
563
564
565 } else {
566 LOG_ALLOW(GLOBAL, LOG_WARNING, // Use WARNING or ERROR
567 "Rank %d: Node (i=%d,j=%d,k=%d) had count=0 surrounding valid cells! Check logic/ghosting. Writing zero.\n", rank, i, j, k);
568 field_arr[k_local][j_local][i_local] = (Cmpnts){0.0, 0.0, 0.0};
569 }
570 // Add a log entry even if count=0 or no read was attempted for this node
571 if (!attempted_read && count==0) {
572 LOG_ALLOW(LOCAL, LOG_DEBUG,"NODE-COMPLETE: Rank %d Node(k,j,i)=%d,%d,%d finished loops, no valid cells/reads attempted.\n", rank, k, j, i);
573 } else if (count == 0 && attempted_read) {
574 // This case should ideally not happen if the ghost region check is correct
575 LOG_ALLOW(LOCAL, LOG_ERROR,"NODE-COMPLETE: Rank %d Node(k,j,i)=%d,%d,%d finished loops, attempted reads but count=0!\n", rank, k, j, i);
576 } else {
577 // This is the normal completion case after writing
578 LOG_LOOP_ALLOW_EXACT(LOCAL, LOG_DEBUG,k,6,"NODE-COMPLETE: Rank %d Node(k,j,i)=%d,%d,%d finished loops and write.\n", rank, k, j, i);
579 }
580
581 } // End loop node i
582 } // End loop node j
583 } // End loop node k
584
585 LOG_ALLOW_SYNC(GLOBAL, LOG_INFO, // Use INFO for completion
586 "Rank %d completed interpolation function.\n", rank); // Changed message slightly
587 return 0;
588}
589
590/**
591 * @brief Interpolates a vector field from cell centers to corner nodes.
592 *
593 * This version is adapted to write directly into a ghosted local array obtained from DMDAVecGetArray(),
594 * which allows using GLOBAL indices for writing to the OWNED portion of the array.
595 *
596 * @param[in] centfield_arr Input: 3D array (ghosted) of cell-centered data, accessed via GLOBAL indices.
597 * @param[out] corner_arr Output: 3D array (ghosted) where interpolated node values are stored,
598 * also accessed via GLOBAL indices for the owned part.
599 * @param[in] user User context containing DMDA information.
600 *
601 * @return PetscErrorCode 0 on success.
602 */
604 Cmpnts ***centfield_arr, /* Input: Ghosted local array from Vec (read) */
605 Cmpnts ***corner_arr, /* Output: Ghosted local array from Vec (write) */
606 UserCtx *user)
607{
608 PetscErrorCode ierr;
609 DMDALocalInfo info_nodes;
610 PetscMPIInt rank;
611 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
612 ierr = DMDAGetLocalInfo(user->fda, &info_nodes); CHKERRQ(ierr);
613
614 // Node ownership range (GLOBAL indices)
615 PetscInt xs_node = info_nodes.xs, xm_node = info_nodes.xm, xe_node = xs_node + xm_node;
616 PetscInt ys_node = info_nodes.ys, ym_node = info_nodes.ym, ye_node = ys_node + ym_node;
617 PetscInt zs_node = info_nodes.zs, zm_node = info_nodes.zm, ze_node = zs_node + zm_node;
618
619 // Global grid dimensions (used for valid cell check)
620 PetscInt IM = info_nodes.mx; // Total nodes in i-direction
621 PetscInt JM = info_nodes.my; // Total nodes in j-direction
622 PetscInt KM = info_nodes.mz; // Total nodes in k-direction
623
624 // Ghosted array starting indices (from the same info struct)
625 PetscInt gxs = info_nodes.gxs;
626 PetscInt gys = info_nodes.gys;
627 PetscInt gzs = info_nodes.gzs;
628
629 // Loop over the GLOBAL indices of the NODES owned by this processor
630 for (PetscInt k = zs_node; k < ze_node; k++) {
631 for (PetscInt j = ys_node; j < ye_node; j++) {
632 for (PetscInt i = xs_node; i < xe_node; i++) {
633 Cmpnts sum = {0.0, 0.0, 0.0};
634 PetscInt count = 0;
635
636 // Loop over the 8 potential cells surrounding node N(k,j,i)
637 for (PetscInt dk_offset = -1; dk_offset <= 0; dk_offset++) {
638 for (PetscInt dj_offset = -1; dj_offset <= 0; dj_offset++) {
639 for (PetscInt di_offset = -1; di_offset <= 0; di_offset++) {
640
641 // These are still GLOBAL cell indices
642 PetscInt global_cell_k = k + dk_offset;
643 PetscInt global_cell_j = j + dj_offset;
644 PetscInt global_cell_i = i + di_offset;
645
646 // Check if this corresponds to a valid GLOBAL cell index
647 if (global_cell_i >= 0 && global_cell_i < IM - 1 &&
648 global_cell_j >= 0 && global_cell_j < JM - 1 &&
649 global_cell_k >= 0 && global_cell_k < KM - 1)
650 {
651
652 // Now, read from the local array using the local index
653 Cmpnts cell_val = centfield_arr[global_cell_k][global_cell_j][global_cell_i];
654
655 LOG_LOOP_ALLOW_EXACT(LOCAL, LOG_DEBUG,k,6,"[Rank %d] successful read from [%d][%d][%d] -> (%.2f, %.2f, %.2f)\n",
656 rank,global_cell_k,global_cell_j,global_cell_i,cell_val.x, cell_val.y, cell_val.z);
657
658 sum.x += cell_val.x;
659 sum.y += cell_val.y;
660 sum.z += cell_val.z;
661 count++;
662 }
663 }
664 }
665 }
666
667 // --- MODIFICATION: Write using GLOBAL indices ---
668 // The calculation of i_local, j_local, k_local is removed.
669 // We write directly into the array using the global loop indices.
670 if (count > 0) {
671 corner_arr[k][j][i].x = sum.x / (PetscReal)count;
672 corner_arr[k][j][i].y = sum.y / (PetscReal)count;
673 corner_arr[k][j][i].z = sum.z / (PetscReal)count;
674 } else {
675 // This case should ideally not happen for a valid owned node, but as a failsafe:
676 corner_arr[k][j][i] = (Cmpnts){0.0, 0.0, 0.0};
677 }
678
679 LOG_LOOP_ALLOW_EXACT(LOCAL, LOG_DEBUG,k,6,"[Rank %d] Node(k,j,i)=%d,%d,%d finished loops and write.\n", rank, k, j, i);
680 }
681 }
682 }
683 return 0;
684}
685
686/**
687 * @brief Interpolates a scalar field from cell centers to corner nodes.
688 *
689 * This version is adapted to write directly into a ghosted local array obtained from DMDAVecGetArray(),
690 * which allows using GLOBAL indices for writing to the OWNED portion of the array.
691 *
692 * @param[in] centfield_arr Input: 3D array (ghosted) of scalar data at cell centers, accessed via GLOBAL indices.
693 * @param[out] corner_arr Output: 3D array (ghosted) where interpolated node values are stored,
694 * also accessed via GLOBAL indices for the owned part.
695 * @param[in] user User context containing DMDA information.
696 *
697 * @return PetscErrorCode 0 on success.
698 */
700 PetscReal ***centfield_arr, /* Input: Ghosted local array from Vec (read) */
701 PetscReal ***corner_arr, /* Output: Ghosted local array from Vec (write) */
702 UserCtx *user)
703{
704 PetscErrorCode ierr;
705 DMDALocalInfo info_nodes;
706 ierr = DMDAGetLocalInfo(user->fda, &info_nodes); CHKERRQ(ierr);
707
708 // Node ownership range (GLOBAL indices)
709 PetscInt xs_node = info_nodes.xs, xe_node = info_nodes.xs + info_nodes.xm;
710 PetscInt ys_node = info_nodes.ys, ye_node = info_nodes.ys + info_nodes.ym;
711 PetscInt zs_node = info_nodes.zs, ze_node = info_nodes.zs + info_nodes.zm;
712
713 // Global grid dimensions (number of nodes)
714 PetscInt IM = info_nodes.mx;
715 PetscInt JM = info_nodes.my;
716 PetscInt KM = info_nodes.mz;
717
718 // Loop over the GLOBAL indices of the NODES owned by this processor
719 for (PetscInt k = zs_node; k < ze_node; k++) {
720 for (PetscInt j = ys_node; j < ye_node; j++) {
721 for (PetscInt i = xs_node; i < xe_node; i++) {
722 PetscReal sum = 0.0;
723 PetscInt count = 0;
724
725 // Loop over the 8 potential cells surrounding node N(k,j,i)
726 for (PetscInt dk_offset = -1; dk_offset <= 0; dk_offset++) {
727 for (PetscInt dj_offset = -1; dj_offset <= 0; dj_offset++) {
728 for (PetscInt di_offset = -1; di_offset <= 0; di_offset++) {
729 // This is the global index of the cell-centered data point
730 PetscInt cell_idx_k = k + dk_offset;
731 PetscInt cell_idx_j = j + dj_offset;
732 PetscInt cell_idx_i = i + di_offset;
733
734 // Check if this corresponds to a valid GLOBAL cell index.
735 // Cell indices run from 0 to NumberOfNodes-2.
736 if (cell_idx_i >= 0 && cell_idx_i < IM - 1 &&
737 cell_idx_j >= 0 && cell_idx_j < JM - 1 &&
738 cell_idx_k >= 0 && cell_idx_k < KM - 1)
739 {
740 sum += centfield_arr[cell_idx_k][cell_idx_j][cell_idx_i];
741 count++;
742 }
743 }
744 }
745 }
746
747 // Calculate average and write to the output array using GLOBAL indices
748 if (count > 0) {
749 corner_arr[k][j][i] = sum / (PetscReal)count;
750 } else {
751 corner_arr[k][j][i] = 0.0; // Failsafe
752 }
753 }
754 }
755 }
756 return 0;
757}
758
759/**
760 * @brief Retrieves the scalar value at the cell (iCell, jCell, kCell).
761 *
762 * This function does a simple piecewise (zeroth‐order) Interpolatetion
763 * by returning fieldScal[kCell][jCell][iCell], ignoring any fractional coordinates.
764 *
765 * @param[in] fieldName A string identifying the field (e.g. "temperature").
766 * @param[in] fieldScal 3D array of PetscReal (indexed as [k][j][i]).
767 * @param[in] iCell, jCell, kCell Integral cell indices to sample.
768 * @param[out] val Pointer to a PetscReal that receives the sampled scalar.
769 *
770 * @return PetscErrorCode 0 on success
771 */
773 const char *fieldName,
774 PetscReal ***fieldScal,
775 PetscInt iCell,
776 PetscInt jCell,
777 PetscInt kCell,
778 PetscReal *val)
779{
780 PetscFunctionBegin;
781 *val = fieldScal[kCell][jCell][iCell];
782
783 // Optional logging
785 "PieceWiseLinearInterpolation_Scalar: Field '%s' at (i=%d, j=%d, k=%d) => val=%.6f\n",
786 fieldName, iCell, jCell, kCell, *val);
787
788 PetscFunctionReturn(0);
789}
790
791
792/**
793 * @brief Retrieves the vector (Cmpnts) at the cell (iCell, jCell, kCell).
794 *
795 * This function simply sets:
796 * vec->x = fieldVec[kCell][jCell][iCell].x
797 * vec->y = fieldVec[kCell][jCell][iCell].y
798 * vec->z = fieldVec[kCell][jCell][iCell].z
799 * effectively a nearest‐cell or piecewise approach.
800 *
801 * @param[in] fieldName String identifying the field (e.g. "velocity").
802 * @param[in] fieldVec 3D array of Cmpnts (indexed as [k][j][i]).
803 * @param[in] iCell, jCell, kCell Integral cell indices to sample.
804 * @param[out] vec Pointer to a Cmpnts struct that receives the sampled vector.
805 *
806 * @return PetscErrorCode 0 on success
807 */
809 const char *fieldName,
810 Cmpnts ***fieldVec,
811 PetscInt iCell,
812 PetscInt jCell,
813 PetscInt kCell,
814 Cmpnts *vec)
815{
816 PetscFunctionBegin;
817 vec->x = fieldVec[kCell][jCell][iCell].x;
818 vec->y = fieldVec[kCell][jCell][iCell].y;
819 vec->z = fieldVec[kCell][jCell][iCell].z;
820
821 // Optional logging
823 "PieceWiseLinearInterpolation_Vector: Field '%s' at (i=%d, j=%d, k=%d) => (x=%.6f, y=%.6f, z=%.6f)\n",
824 fieldName, iCell, jCell, kCell, vec->x, vec->y, vec->z);
825
826 PetscFunctionReturn(0);
827}
828
829/**
830 * @brief Computes the trilinear Interpolatetion weights from the Interpolatetion coefficients.
831 *
832 * This function computes the weights for trilinear Interpolatetion at the eight corners of a cell
833 * using the Interpolatetion coefficients provided along the x, y, and z directions.
834 *
835 * @param[in] a1 Interpolation coefficient along the x-direction (normalized coordinate within the cell).
836 * @param[in] a2 Interpolation coefficient along the y-direction (normalized coordinate within the cell).
837 * @param[in] a3 Interpolation coefficient along the z-direction (normalized coordinate within the cell).
838 * @param[out] w Array of 8 weights, each corresponding to one corner of the cell.
839 *
840 * @note
841 * - The coefficients `a1`, `a2`, and `a3` should be in the range [0, 1].
842 * - The order of weights corresponds to the eight corners of a hexahedral cell.
843 */
844static inline void ComputeTrilinearWeights(PetscReal a1, PetscReal a2, PetscReal a3, PetscReal *w) {
845 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Computing weights for a1=%f, a2=%f, a3=%f.\n", a1, a2, a3);
846
847 // Ensure a1, a2, a3 are within [0,1]
848 a1 = PetscMax(0.0, PetscMin(1.0, a1));
849 a2 = PetscMax(0.0, PetscMin(1.0, a2));
850 a3 = PetscMax(0.0, PetscMin(1.0, a3));
851
852 const PetscReal oa1 = 1.0 - a1;
853 const PetscReal oa2 = 1.0 - a2;
854 const PetscReal oa3 = 1.0 - a3;
855
856 w[0] = oa1 * oa2 * oa3; /* cornerOffsets[0] => (0,0,0) */
857 w[1] = a1 * oa2 * oa3; /* cornerOffsets[1] => (1,0,0) */
858 w[2] = oa1 * a2 * oa3; /* cornerOffsets[2] => (0,1,0) */
859 w[3] = a1 * a2 * oa3; /* cornerOffsets[3] => (1,1,0) */
860 w[4] = oa1 * oa2 * a3; /* cornerOffsets[4] => (0,0,1) */
861 w[5] = a1 * oa2 * a3; /* cornerOffsets[5] => (1,0,1) */
862 w[6] = oa1 * a2 * a3; /* cornerOffsets[6] => (0,1,1) */
863 w[7] = a1 * a2 * a3; /* cornerOffsets[7] => (1,1,1) */
864
865 // Log the computed weights for debugging
866 LOG_ALLOW(LOCAL,LOG_DEBUG, "Weights computed - "
867 "w0=%f, w1=%f, w2=%f, w3=%f, w4=%f, w5=%f, w6=%f, w7=%f. \n",
868 w[0], w[1], w[2], w[3], w[4], w[5], w[6], w[7]);
869}
870
871/**
872 * @brief Computes the trilinear Interpolateted scalar at a given point.
873 *
874 * @param[in] fieldName A string representing the name of the scalar field (e.g., "temperature").
875 * @param[in] fieldScal 3D array of the field from a DMDA (indexed as [k][j][i]),
876 * each cell a PetscReal.
877 * @param[in] i, j, k Integral cell indices (the "lower" corner in each dimension).
878 * @param[in] a1, a2, a3 Normalized coordinates within the cell ([0,1] range).
879 * @param[out] val Pointer to a PetscReal that will store the Interpolateted scalar.
880 *
881 * This function uses the standard 8-corner trilinear formula via `ComputeTrilinearWeights()`.
882 * If a different scheme is desired, implement a new function with the same PetscInterface.
883 */
885 const char *fieldName,
886 PetscReal ***fieldScal,
887 PetscInt i,
888 PetscInt j,
889 PetscInt k,
890 PetscReal a1,
891 PetscReal a2,
892 PetscReal a3,
893 PetscReal *val)
894{
895 PetscFunctionBegin; // PETSc macro for error/stack tracing
896
897 // Compute the 8 corner weights
898 PetscReal wcorner[8];
899 ComputeTrilinearWeights(a1, a2, a3, wcorner);
900
901 // Offsets for cell corners
902 PetscInt i1 = i + 1;
903 PetscInt j1 = j + 1;
904 PetscInt k1 = k + 1;
905
906 // Initialize the output scalar
907 PetscReal sum = 0.0;
908
909 // Corner 0 => (i, j, k)
910 sum += wcorner[0] * fieldScal[k ][j ][i ];
911 // Corner 1 => (i+1, j, k)
912 sum += wcorner[1] * fieldScal[k ][j ][i1];
913 // Corner 2 => (i, j+1, k)
914 sum += wcorner[2] * fieldScal[k ][j1][i ];
915 // Corner 3 => (i+1, j+1, k)
916 sum += wcorner[3] * fieldScal[k ][j1][i1];
917 // Corner 4 => (i, j, k+1)
918 sum += wcorner[4] * fieldScal[k1][j ][i ];
919 // Corner 5 => (i+1, j, k+1)
920 sum += wcorner[5] * fieldScal[k1][j ][i1];
921 // Corner 6 => (i, j+1, k+1)
922 sum += wcorner[6] * fieldScal[k1][j1][i ];
923 // Corner 7 => (i+1, j+1, k+1)
924 sum += wcorner[7] * fieldScal[k1][j1][i1];
925
926 *val = sum;
927
928 // Logging (optional)
930 "TrilinearInterpolation_Scalar: Field '%s' at (i=%d, j=%d, k=%d), "
931 "a1=%.6f, a2=%.6f, a3=%.6f -> val=%.6f.\n",
932 fieldName, i, j, k, a1, a2, a3, *val);
933
934 // LOG_ALLOW_SYNC(GLOBAL, LOG_INFO,
935 // "TrilinearInterpolation_Scalar: Completed Interpolatetion for field '%s' across local cells.\n",
936 // fieldName);
937
938 PetscFunctionReturn(0);
939}
940
941
942/**
943 * @brief Computes the trilinear Interpolateted vector (e.g., velocity) at a given point.
944 *
945 * @param[in] fieldName A string representing the name of the vector field (e.g., "velocity").
946 * @param[in] fieldVec 3D array of the field from a DMDA (indexed as [k][j][i]),
947 * each cell of type Cmpnts.
948 * @param[in] i, j, k Integral cell indices (the "lower" corner in each dimension).
949 * @param[in] a1, a2, a3 Normalized coordinates within the cell ([0,1] range).
950 * @param[out] vec Pointer to a Cmpnts struct that will store the Interpolateted vector (x, y, z).
951 *
952 * This function uses the standard 8-corner trilinear formula via `ComputeTrilinearWeights()`.
953 * If a different scheme is desired, implement a new function with the same PetscInterface.
954 */
955/**
956 * @brief Computes the trilinear Interpolateted vector at a given point, with partial weighting if corners go out of range.
957 *
958 * If any of the 8 corners (i or i+1, j or j+1, k or k+1) is out of [0..mx), [0..my), [0..mz),
959 * that corner is skipped and the corresponding weight is omitted. The total is normalized
960 * by the sum of used weights, so we get a partial Interpolatetion near boundaries.
961 */
963 const char *fieldName,
964 Cmpnts ***fieldVec, /* 3D array [k][j][i], dimension [mz][my][mx] */
965 PetscInt i, // local cell index i
966 PetscInt j, // local cell index j
967 PetscInt k, // local cell index k
968 PetscReal a1,
969 PetscReal a2,
970 PetscReal a3,
971 Cmpnts *vec)
972{
973 PetscFunctionBegin; // PETSc macro for error/stack tracing
974
975 // Compute the 8 corner weights
976 PetscErrorCode ierr;
977 PetscReal wcorner[8];
978 PetscMPIInt rank;
979
980 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr);
981
982 LOG_ALLOW(LOCAL,LOG_DEBUG,"[Rank %d] Computing Trilinear Weights.\n",rank);
983 ComputeTrilinearWeights(a1, a2, a3, wcorner);
984
985 LOG_ALLOW(LOCAL,LOG_DEBUG,"[Rank %d] Trilinear weights computed for local cell %d,%d,%d.\n",rank,i,j,k);
986
987 // For partial Interpolatetion, we'll keep track of how many corners are valid
988 // and how much sum of weights is used. Then we do a final normalization.
989 PetscReal sumW = 0.0;
990 Cmpnts accum = {0.0, 0.0, 0.0};
991
992 // The eight corner indices, with their weights:
993 // corners: (i,j,k), (i+1,j,k), (i,j+1,k), (i+1,j+1,k), etc.
994 // We store them in an array to iterate cleanly.
995 const PetscInt cornerOffsets[8][3] = {
996 {0, 0, 0},
997 {1, 0, 0},
998 {0, 1, 0},
999 {1, 1, 0},
1000 {0, 0, 1},
1001 {1, 0, 1},
1002 {0, 1, 1},
1003 {1, 1, 1}
1004 };
1005
1006 // Weighted partial sum
1007 for (PetscInt c = 0; c < 8; c++) {
1008 const PetscInt di = cornerOffsets[c][0];
1009 const PetscInt dj = cornerOffsets[c][1];
1010 const PetscInt dk = cornerOffsets[c][2];
1011 PetscInt iC = i + di;
1012 PetscInt jC = j + dj;
1013 PetscInt kC = k + dk;
1014
1015 /*
1016 // skip if out of domain
1017 // (Assuming you know global domain is [0..mx), [0..my), [0..mz).)
1018 if (iC < 0 || iC >= (PetscInt)userGlobalMx ||
1019 jC < 0 || jC >= (PetscInt)userGlobalMy ||
1020 kC < 0 || kC >= (PetscInt)userGlobalMz)
1021 {
1022 // skip this corner
1023 continue;
1024 }
1025
1026 */
1027
1028 LOG_ALLOW(LOCAL,LOG_DEBUG,"[Rank %d] %s[%d][%d][%d] = (%.4f,%.4f,%.4f).\n",rank,fieldName,kC,jC,iC,fieldVec[kC][jC][iC].x,fieldVec[kC][jC][iC].y,fieldVec[kC][jC][iC].z);
1029
1030 // Otherwise, accumulate
1031 accum.x += wcorner[c] * fieldVec[kC][jC][iC].x;
1032 accum.y += wcorner[c] * fieldVec[kC][jC][iC].y;
1033 accum.z += wcorner[c] * fieldVec[kC][jC][iC].z;
1034 sumW += wcorner[c];
1035 }
1036
1037 // If sumW=0 => out-of-range or zero weighting => set (0,0,0)
1038 if (sumW > 1.0e-14) {
1039 vec->x = accum.x / sumW;
1040 vec->y = accum.y / sumW;
1041 vec->z = accum.z / sumW;
1042 } else {
1043 vec->x = 0.0; vec->y = 0.0; vec->z = 0.0;
1044 }
1045
1046 PetscFunctionReturn(0);
1047}
1048
1049
1050/**
1051 * @brief Interpolates a single field (scalar or vector) for one particle, storing the result in a swarm array.
1052 *
1053 * This helper function is used inside a larger loop over local particles.
1054 *
1055 * @param[in] fieldName A string identifying the field (e.g. "velocity", "temperature", etc.).
1056 * @param[in] fieldPtr A Pointer to the local DMDA array for the field:
1057 * - (PetscReal ***) if scalar (blockSize = 1)
1058 * - (Cmpnts ***) if vector (blockSize = 3)
1059 * @param[in] iCell, jCell, kCell The cell indices for this particle (already clamped if needed).
1060 * @param[in] a1, a2, a3 Interpolation coefficients in [0..1].
1061 * If using PiecewiseLinearInterpolation, these are currently ignored.
1062 * @param[out] swarmOut A Pointer to the DMSwarm output array:
1063 * - (PetscReal*) if scalar dof=1
1064 * - (PetscReal*) if vector dof=3 (storing x,y,z in consecutive reals)
1065 * - or (Cmpnts*) if you store the result as a Cmpnts struct
1066 * @param[in] p The local particle index (used to compute the correct offset in swarmOut).
1067 * @param[in] blockSize The number of degrees of freedom (1 => scalar, 3 => vector).
1068 *
1069 * This routine demonstrates a switch between:
1070 * - PiecewiseLinearInterpolation (zeroth order / nearest cell)
1071 * - TrilinearInterpolation (8-corner weighted).
1072 *
1073 * By default, PiecewiseLinearInterpolation is active, while the TrilinearInterpolation calls are commented out.
1074 * To switch to trilinear, simply comment/uncomment appropriately.
1075 *
1076 * Logging calls are provided (LOG_ALLOW) that you can adapt to your existing logging system.
1077 *
1078 * @return PetscErrorCode 0 on success, non-zero on error.
1079 */
1080static inline PetscErrorCode InterpolateEulerFieldToSwarmForParticle(
1081 const char *fieldName,
1082 void *fieldPtr, /* typed Pointer => either (PetscReal***) or (Cmpnts***) */
1083 PetscInt iCell,
1084 PetscInt jCell,
1085 PetscInt kCell,
1086 PetscReal a1,
1087 PetscReal a2,
1088 PetscReal a3,
1089 void *swarmOut, /* typed Pointer => (PetscReal*) or (Cmpnts*) or dof=3 array */
1090 PetscInt p, /* particle index */
1091 PetscInt blockSize) /* dof=1 => scalar, dof=3 => vector */
1092{
1093 PetscErrorCode ierr;
1094 PetscFunctionBegin;
1095
1096 // Optional logging at start
1098 "InterpolateEulerFieldToSwarmForParticle: field='%s', blockSize=%d, "
1099 "cell IDs=(%d,%d,%d), weights=(%.4f,%.4f,%.4f)\n",
1100 fieldName, blockSize, iCell, jCell, kCell, a1, a2, a3);
1101
1102 /*
1103 If blockSize=1, we PetscInterpret the fieldPtr as a 3D array of PetscReal (scalar).
1104 If blockSize=3, we PetscInterpret the fieldPtr as a 3D array of Cmpnts (vector).
1105 */
1106 if (blockSize == 1) {
1107 /* Scalar field: Cast fieldPtr to (PetscReal ***). */
1108 PetscReal ***fieldScal = (PetscReal ***) fieldPtr;
1109 PetscReal val;
1110
1111 // Currently using trilinear.
1112 ierr = TrilinearInterpolation(fieldName, fieldScal,
1113 iCell, jCell, kCell,
1114 a1, a2, a3,
1115 &val);
1116 CHKERRQ(ierr);
1117
1118 // Alternative (commented) call to PiecewiseLinearInterpolation (zeroth order) :
1119 // PetscErrorCode ierr = PiecewiseLinearInterpolation(fieldName,
1120 // fieldScal,
1121 // iCell, jCell, kCell,
1122 // &val);
1123 // CHKERRQ(ierr);
1124
1125 // Write the scalar result to the swarm output at index [p].
1126 ((PetscReal*)swarmOut)[p] = val;
1127
1129 "InterpolateEulerFieldToSwarmForParticle [Scalar]: field='%s', result=%.6f "
1130 "stored at swarmOut index p=%d.\n", fieldName, val, (PetscInt)p);
1131 }
1132 else if (blockSize == 3) {
1133 /* Vector field: Cast fieldPtr to (Cmpnts ***). */
1134 Cmpnts ***fieldVec = (Cmpnts ***) fieldPtr;
1135 Cmpnts vec;
1136
1137
1138
1139
1140 // Piecewise Interpolatetion (zeroth order).
1141 // PetscErrorCode ierr = PieceWiseLinearInterpolation(fieldName,
1142 // fieldVec,
1143 // iCell, jCell, kCell,
1144 // &vec);
1145 // CHKERRQ(ierr);
1146
1147 // Alternative (commented) call to trilinear:
1148 ierr = TrilinearInterpolation(fieldName, fieldVec,
1149 iCell, jCell, kCell,
1150 a1, a2, a3,
1151 &vec);
1152 CHKERRQ(ierr);
1153
1154 // If swarmOut is an array of 3 reals per particle:
1155 ((PetscReal*)swarmOut)[3*p + 0] = vec.x;
1156 ((PetscReal*)swarmOut)[3*p + 1] = vec.y;
1157 ((PetscReal*)swarmOut)[3*p + 2] = vec.z;
1158
1160 "InterpolateEulerFieldToSwarmForParticle [Vector]: field='%s', result=(%.6f,%.6f,%.6f) "
1161 "stored at swarmOut[3p..3p+2], p=%d.\n",
1162 fieldName, vec.x, vec.y, vec.z, (PetscInt)p);
1163
1164 /*
1165 If you store the vector result as a Cmpnts in the swarm, do instead:
1166 ((Cmpnts*)swarmOut)[p] = vec;
1167 but ensure your DMSwarm field is sized for a Cmpnts struct.
1168 */
1169 }
1170 else {
1171 /* If blockSize isn't 1 or 3, we raise an error. */
1172 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP,
1173 "InterpolateEulerFieldToSwarmForParticle: only blockSize=1 or 3 supported, got %d.",
1174 (PetscInt)blockSize);
1175 }
1176
1177 PetscFunctionReturn(0);
1178}
1179
1180/**
1181 * @brief Interpolates a cell-centered field (scalar or vector) onto DMSwarm particles,
1182 * using a robust, PETSc-idiomatic two-stage process.
1183 *
1184 * This function first converts the cell-centered input data to corner-node data,
1185 * storing this intermediate result in a PETSc Vec to correctly handle the communication
1186 * of ghost-point information across parallel ranks. It then performs a final trilinear
1187 * interpolation from the ghosted corner data to each particle's location.
1188 *
1189 * Workflow:
1190 * 1. Create temporary PETSc Vecs (`cornerGlobal`, `cornerLocal`) to manage the
1191 * intermediate corner-node data, using the existing nodal DMDA (`user->fda`).
1192 * 2. Call a dispatch macro that uses the runtime block size (`bs`) to select the
1193 * correct underlying center-to-corner function, writing results into `cornerGlobal`.
1194 * 3. Perform a ghost-point exchange (`DMGlobalToLocal`) to transfer the boundary
1195 * data from `cornerGlobal` into the ghost regions of `cornerLocal`.
1196 * 4. Loop over all local particles. For each particle:
1197 * a. Convert its global cell index to a local index relative to the ghosted array.
1198 * b. Check if the particle's interpolation stencil is fully contained within
1199 * the owned+ghost region. If not, log a warning and set the result to zero.
1200 * c. Perform the final trilinear interpolation using the ghosted `cornerLocal` data.
1201 * 5. Restore all PETSc objects to prevent memory leaks.
1202 *
1203 * @param[in] user User context with DMDA, DMSwarm, etc.
1204 * @param[in] fieldGlobal_cellCentered Vec (from fda) with cell-centered data (e.g., Ucat).
1205 * @param[in] fieldName Human-readable field name for logging (e.g., "Ucat").
1206 * @param[in] swarmOutFieldName Name of the DMSwarm field where interpolation results go.
1207 *
1208 * @return PetscErrorCode 0 on success.
1209 */
1211 UserCtx *user,
1212 Vec fieldLocal_cellCentered,
1213 const char *fieldName,
1214 const char *swarmOutFieldName)
1215{
1216 PetscErrorCode ierr;
1217 DM fda = user->fda;
1218 DM swarm = user->swarm;
1219 PetscInt bs;
1220 DMDALocalInfo info;
1221 PetscMPIInt rank;
1222
1223 // Intermediate vectors to manage corner data and its ghosts
1224 Vec cornerGlobal, cornerLocal;
1225
1226 // Generic pointers to the raw data arrays
1227 void *cellCenterPtr_read;
1228 void *cornerPtr_read_with_ghosts;
1229
1230 // Swarm-related pointers
1231 PetscInt *cellIDs = NULL;
1232 PetscReal *weights = NULL;
1233 PetscInt *pids = NULL; // For logging particle IDs in warnings
1234 void *swarmOut = NULL;
1235 PetscInt nLocal;
1236
1237 PetscFunctionBegin;
1238 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
1239 ierr = DMDAGetLocalInfo(fda, &info); CHKERRQ(ierr);
1240 ierr = VecGetBlockSize(fieldLocal_cellCentered, &bs); CHKERRQ(ierr);
1241 if (bs != 1 && bs != 3) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "BlockSize must be 1 or 3.");
1242
1243 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting for field '%s'... \n", fieldName);
1244
1245 /* STAGE 1: Center-to-Corner Calculation with Communication */
1246
1247 // (A) Get temporary Vecs from the DMDA pool to manage corner data
1248 ierr = DMGetGlobalVector(fda, &cornerGlobal); CHKERRQ(ierr);
1249 ierr = DMGetLocalVector(fda, &cornerLocal); CHKERRQ(ierr);
1250
1251 ierr = VecSet(cornerGlobal,0.0); CHKERRQ(ierr);
1252 ierr = VecSet(cornerLocal,0.0); CHKERRQ(ierr);
1253
1254 // (B) Get a read-only array from the input cell-centered vector
1255 ierr = DMDAVecGetArrayRead(fda, fieldLocal_cellCentered, &cellCenterPtr_read); CHKERRQ(ierr);
1256
1257 // (C) Perform the center-to-corner interpolation directly into the global corner vector
1258 void *cornerPtr_write = NULL;
1259 ierr = DMDAVecGetArray(fda, cornerLocal, &cornerPtr_write); CHKERRQ(ierr);
1260
1261 LOG_ALLOW(LOCAL, LOG_DEBUG, "[Rank %d] Starting center-to-corner interpolation for '%s'.\n", rank, fieldName);
1262
1263 // SINGLE, CLEAN CALL SITE: The macro handles the runtime dispatch based on 'bs'.
1264 ierr = InterpolateFieldFromCenterToCorner(bs, cellCenterPtr_read, cornerPtr_write, user); CHKERRQ(ierr);
1265
1266 LOG_ALLOW(LOCAL, LOG_DEBUG, "[Rank %d] Finished center-to-corner interpolation for '%s'.\n", rank, fieldName);
1267 ierr = DMDAVecRestoreArray(fda, cornerLocal, &cornerPtr_write); CHKERRQ(ierr);
1268
1269 ierr = DMDAVecRestoreArrayRead(fda, fieldLocal_cellCentered, &cellCenterPtr_read); CHKERRQ(ierr);
1270
1271 //////////// DEBUG
1272 /*
1273 // Log a synchronized header message from all ranks.
1274 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "DEBUG: Dumping cornerGlobal BEFORE ghost exchange...\n");
1275
1276 // VecView prints to a PETSc viewer. PETSC_VIEWER_STDOUT_WORLD is a global, synchronized viewer.
1277 // We only need to call it if logging is active for this function.
1278 if (is_function_allowed(__func__) && (int)(LOG_DEBUG) <= (int)get_log_level()) {
1279 ierr = VecView(cornerGlobal, PETSC_VIEWER_STDOUT_WORLD); CHKERRQ(ierr);
1280 }
1281
1282 // Log a synchronized footer message.
1283 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "DEBUG: Finished dumping cornerGlobal.\n");
1284 */
1285 ////////// DEBUG
1286
1287 // ierr = PetscBarrier((PetscObject)cornerGlobal); CHKERRQ(ierr);
1288
1289 // (D) CRITICAL STEP: Communicate the newly computed corner data to fill ghost regions
1290 LOG_ALLOW(LOCAL, LOG_DEBUG, "[Rank %d] Beginning ghost exchange for corner data...\n", rank);
1291 ierr = DMLocalToGlobalBegin(fda, cornerLocal, INSERT_VALUES, cornerGlobal); CHKERRQ(ierr);
1292 ierr = DMLocalToGlobalEnd(fda, cornerLocal, INSERT_VALUES, cornerGlobal); CHKERRQ(ierr);
1293
1294 ierr = DMGlobalToLocalBegin(fda, cornerGlobal, INSERT_VALUES, cornerLocal); CHKERRQ(ierr);
1295 ierr = DMGlobalToLocalEnd(fda, cornerGlobal, INSERT_VALUES, cornerLocal); CHKERRQ(ierr);
1296
1297 LOG_ALLOW(LOCAL, LOG_DEBUG, "[Rank %d] Ghost exchange for corner data complete.\n", rank);
1298
1299 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "DEBUG: About to get array from FDA on all ranks.\n");
1300 if (is_function_allowed(__func__) && (int)(LOG_DEBUG) <= (int)get_log_level()) {
1301 ierr = DMView(user->fda, PETSC_VIEWER_STDOUT_WORLD); CHKERRQ(ierr);
1302 }
1303 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "DEBUG: Finished DMView.\n");
1304
1305 /////// DEBUG
1306 /*
1307 // Log a synchronized header message from all ranks.
1308 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "DEBUG: Dumping cornerLocal AFTER ghost exchange...\n");
1309
1310 // Here, we want each rank to print its own local vector.
1311 // PETSC_VIEWER_STDOUT_SELF is the correct tool for this. The LOG_ALLOW_SYNC
1312 // wrapper will ensure the output from each rank is printed sequentially.
1313 if (is_function_allowed(__func__) && (int)(LOG_DEBUG) <= (int)get_log_level()) {
1314 PetscMPIInt rank_d;
1315 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank_d); CHKERRQ(ierr);
1316
1317 // Use a synchronized print to create a clean header for each rank's output
1318 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "\n--- cornerLocal (Rank %d) ---\n", rank_d);
1319 PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT);
1320
1321 // Print the local vector's contents for this rank
1322 ierr = VecView(cornerLocal, PETSC_VIEWER_STDOUT_SELF); CHKERRQ(ierr);
1323
1324 // Use a synchronized print to create a clean footer
1325 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "--- End cornerLocal (Rank %d) ---\n", rank_d);
1326 PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT);
1327 }
1328
1329 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "DEBUG: Finished dumping cornerLocal.\n");
1330 */
1331 ///// DEBUG
1332
1333 /* STAGE 2: Particle Interpolation using Ghosted Corner Data */
1334
1335 // (E) Get the local, GHOSTED array of corner data for the final interpolation step
1336 ierr = DMDAVecGetArrayRead(fda, cornerLocal, &cornerPtr_read_with_ghosts); CHKERRQ(ierr);
1337
1338 // (F) Retrieve swarm fields for the particle loop
1339 ierr = DMSwarmGetLocalSize(swarm, &nLocal); CHKERRQ(ierr);
1340 ierr = DMSwarmGetField(swarm, "DMSwarm_CellID", NULL, NULL, (void**)&cellIDs); CHKERRQ(ierr);
1341 ierr = DMSwarmGetField(swarm, "DMSwarm_pid", NULL, NULL, (void**)&pids); CHKERRQ(ierr);
1342 ierr = DMSwarmGetField(swarm, "weight", NULL, NULL, (void**)&weights); CHKERRQ(ierr);
1343 ierr = DMSwarmGetField(swarm, swarmOutFieldName, NULL, NULL, &swarmOut); CHKERRQ(ierr);
1344
1345 // (G) Loop over each local particle
1346 for (PetscInt p = 0; p < nLocal; p++) {
1347 PetscInt iCell_global = cellIDs[3 * p + 0];
1348 PetscInt jCell_global = cellIDs[3 * p + 1];
1349 PetscInt kCell_global = cellIDs[3 * p + 2];
1350
1351 // Convert GLOBAL cell index to LOCAL GHOSTED index for the corner array
1352 PetscInt iCell_local = iCell_global;//- info.gxs;
1353 PetscInt jCell_local = jCell_global;//- info.gys;
1354 PetscInt kCell_local = kCell_global;//- info.gzs;
1355
1356 // Safety check: Ensure the entire 8-node stencil is within the valid memory
1357 // region (owned + ghosts) of the local array.
1358
1359 LOG_ALLOW(LOCAL,LOG_DEBUG," The ghosted max of current rank %d are: %d,%d,%d.\n",rank,info.gxs + info.gxm,info.gys+info.gym,info.gzs+info.gzm);
1360
1361 if (iCell_local < 0 || iCell_local >= info.gxs + info.gxm - 1 ||
1362 jCell_local < 0 || jCell_local >= info.gys + info.gym - 1 ||
1363 kCell_local < 0 || kCell_local >= info.gzs + info.gzm - 1)
1364 {
1366 "[Rank %d] Particle PID %lld in global cell (%d,%d,%d) is in an un-interpolatable region (requires ghosts of ghosts or is out of bounds). Zeroing field '%s'.\n",
1367 rank, (long long)pids[p], iCell_global, jCell_global, kCell_global, fieldName);
1368
1369 if (bs == 3) {
1370 ((PetscReal*)swarmOut)[3*p + 0] = 0.0;
1371 ((PetscReal*)swarmOut)[3*p + 1] = 0.0;
1372 ((PetscReal*)swarmOut)[3*p + 2] = 0.0;
1373 } else {
1374 ((PetscReal*)swarmOut)[p] = 0.0;
1375 }
1376 continue;
1377 }
1378
1379 PetscReal a1 = weights[3 * p + 0];
1380 PetscReal a2 = weights[3 * p + 1];
1381 PetscReal a3 = weights[3 * p + 2];
1382
1384 fieldName,
1385 cornerPtr_read_with_ghosts,
1386 iCell_local, jCell_local, kCell_local,
1387 a1, a2, a3,
1388 swarmOut, p, bs);
1389 CHKERRQ(ierr);
1390 }
1391
1392 /* (H) Restore all retrieved arrays and vectors */
1393 ierr = DMDAVecRestoreArrayRead(fda, cornerLocal, &cornerPtr_read_with_ghosts); CHKERRQ(ierr);
1394 ierr = DMSwarmRestoreField(swarm, swarmOutFieldName, NULL, NULL, &swarmOut); CHKERRQ(ierr);
1395 ierr = DMSwarmRestoreField(swarm, "weight", NULL, NULL, (void**)&weights); CHKERRQ(ierr);
1396 ierr = DMSwarmRestoreField(swarm, "DMSwarm_pid", NULL, NULL, (void**)&pids); CHKERRQ(ierr);
1397 ierr = DMSwarmRestoreField(swarm, "DMSwarm_CellID", NULL, NULL, (void**)&cellIDs); CHKERRQ(ierr);
1398
1399 ierr = DMRestoreLocalVector(fda, &cornerLocal); CHKERRQ(ierr);
1400 ierr = DMRestoreGlobalVector(fda, &cornerGlobal); CHKERRQ(ierr);
1401
1402 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished for field '%s'.\n", fieldName);
1403 PetscFunctionReturn(0);
1404}
1405
1406/**
1407 * @brief Interpolates a cell-centered field (scalar or vector) onto DMSwarm particles,
1408 * converting the cell-center data to corner data first, then looping over particles.
1409 *
1410 * Steps:
1411 * 1) Check that the Vec has blockSize=1 or 3 (scalar vs. vector).
1412 * 2) Map the cell-centered Vec to a local array (fieldGlobal -> localPtr).
1413 * 3) Allocate a corner array (cornerPtr) via Allocate3DArray(...), sized (zm+1, ym+1, xm+1).
1414 * 4) Convert from cell-centers to corners via InterpolateFieldFromCenterToCorner(...).
1415 * 5) Restore the cell-centered array.
1416 * 6) Retrieve DMSwarm fields: "DMSwarm_CellID", "weight", and swarmOutFieldName.
1417 * 7) Loop over local particles, clamp i/j/k, skip or zero out if out of range, read (a1,a2,a3).
1418 * 8) Call InterpolateEulerFieldToSwarmForParticle(...) with cornerPtr to do final Interpolatetion.
1419 * 9) Restore swarm fields, free the corner array.
1420 *
1421 * @param[in] user User context with:
1422 * - user->da (cell-centered DMDA),
1423 * - user->swarm (DMSwarm).
1424 * @param[in] fieldGlobal Vec with blockSize=1 or 3, storing the cell-centered field.
1425 * @param[in] fieldName Human-readable field name for logging (e.g. "velocity").
1426 * @param[in] swarmOutFieldName Name of the DMSwarm field where Interpolatetion results go.
1427 *
1428 * @return PetscErrorCode 0 on success, non-zero on error.
1429 */
1430/*
1431PetscErrorCode InterpolateEulerFieldToSwarm(
1432 UserCtx *user,
1433 Vec fieldGlobal, //DMDA Vec containing cell‐center data
1434 const char *fieldName, // e.g., "Ucat"
1435 const char *swarmOutFieldName) // Name of the output DMSwarm field
1436{
1437 PetscErrorCode ierr;
1438 DM fda = user->fda; // DM for cell‐center field data
1439 // DM da = user->da; DM for grid information (local indices)
1440 DM swarm = user->swarm; DMSwarm for particles
1441 PetscInt bs; Block size: 1 (scalar) or 3 (vector)
1442 DMDALocalInfo info; // Local grid info
1443void *localPtr = NULL; // Pointer to cell‐center data from fda
1444void *cornerPtr = NULL; // Will hold the typed corner array
1445 void *swarmOut = NULL; // Pointer to the swarm output field
1446 PetscInt *cellIDs = NULL; // Particle cell indices from swarm
1447 PetscReal *weights = NULL; // Interpolation coefficients from swarm
1448 PetscInt nLocal;
1449 PetscMPIInt rank;
1450
1451
1452
1453 PetscFunctionBegin;
1454 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
1455 // (A) Check block size and get local domain info
1456 ierr = VecGetBlockSize(fieldGlobal, &bs); CHKERRQ(ierr);
1457 if (bs != 1 && bs != 3) {
1458 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP,
1459 "InterpolateEulerFieldToSwarm: blockSize must be 1 or 3, got %d.", (PetscInt)bs);
1460 }
1461 ierr = DMDAGetLocalInfo(fda, &info); CHKERRQ(ierr);
1462
1463 PetscInt xs_node = info.xs;
1464 PetscInt ys_node = info.ys;
1465 PetscInt zs_node = info.zs;
1466
1467 LOG_ALLOW(GLOBAL, LOG_INFO,
1468 "InterpolateEulerFieldToSwarm: Starting with field='%s', blockSize=%d, local domain: (%d x %d x %d)\n",
1469 fieldName, bs, info.xm, info.ym, info.zm);
1470
1471 // (B) Map the cell-centered Vec to a local array using the DM attached to fieldGlobal
1472 LOG_ALLOW(LOCAL,LOG_DEBUG, "[Rank %d] Dumping DM state (user->fda) BEFORE GetArray:\n", rank);
1473
1474 if(get_log_level() == LOG_DEBUG && is_function_allowed(__func__)) DMView(user->fda, PETSC_VIEWER_STDOUT_SELF);
1475
1476 ierr = DMDAVecGetArrayRead(fda, fieldGlobal, &localPtr); CHKERRQ(ierr);
1477
1478 // Corner domain is larger than cell center domain
1479 PetscInt nz = info.zm;
1480 PetscInt ny = info.ym;
1481 PetscInt nx = info.xm;
1482 LOG_ALLOW(LOCAL, LOG_DEBUG,
1483 "[Rank %d] Allocating corner array of size (%d x %d x %d), blockSize = %d\n",
1484 rank,nx, ny, nz, bs);
1485 if (bs == 1) {
1486 // Declare a typed Pointer for scalar corners
1487 PetscReal ***cornerScal = NULL;
1488 ierr = Allocate3DArray(&cornerScal, nz, ny, nx); CHKERRQ(ierr);
1489 if (!cornerScal) {
1490 LOG_ALLOW(GLOBAL, LOG_DEBUG, "InterpolateEulerFieldToSwarm: 3D Array Allocation for scalar corners failed.\n");
1491 }
1492 // Save typed Pointer into cornerPtr so later code can cast appropriately
1493 cornerPtr = (void*) cornerScal;
1494 // (D) Convert cell-center data to corners for scalar field
1495 ierr = InterpolateFieldFromCenterToCorner( (PetscReal ***) localPtr, cornerScal, user); CHKERRQ(ierr);
1496 } else {
1497 // For vector fields
1498 Cmpnts ***cornerVec = NULL;
1499 ierr = Allocate3DArray(&cornerVec, nz, ny, nx); CHKERRQ(ierr);
1500 if (!cornerVec) {
1501 LOG_ALLOW(GLOBAL, LOG_DEBUG, "InterpolateEulerFieldToSwarm: 3D Array Allocation for vector corners failed.\n");
1502 }
1503 cornerPtr = (void*) cornerVec;
1504
1505 // Comment out the actual call:
1506 ierr = InterpolateFieldFromCenterToCorner( (Cmpnts ***) localPtr, (Cmpnts ***)cornerPtr, user); CHKERRQ(ierr);
1507 */
1508 /*
1509 // --- DEBUG - BYPASS SECTION ---
1510 LOG_ALLOW(LOCAL,LOG_DEBUG, "[%d] DEBUG: Bypassing InterpolateFieldFromCenterToCorner call.\n", rank);
1511
1512
1513 // Instead, populate cornerPtr with fixed values
1514 // Use local indices 0..nz-1, 0..ny-1, 0..nx-1 because cornerPtr is a local array
1515 if (bs == 3) {
1516 Cmpnts ***cornerVec_bypass = (Cmpnts ***)cornerPtr;
1517 for (PetscInt k_local = 0; k_local < nz; ++k_local) {
1518 for (PetscInt j_local = 0; j_local < ny; ++j_local) {
1519 for (PetscInt i_local = 0; i_local < nx; ++i_local) {
1520 cornerVec_bypass[k_local][j_local][i_local].x = 1.0; // Or 0.0, or rank number
1521 cornerVec_bypass[k_local][j_local][i_local].y = 2.0;
1522 cornerVec_bypass[k_local][j_local][i_local].z = 3.0;
1523 }
1524 }
1525 }
1526 LOG_ALLOW(LOCAL,LOG_DEBUG, "[%d] DEBUG: Finished setting fixed values in cornerVec_bypass.\n", rank);
1527
1528 }
1529 */
1530 /*
1531 LOG_ALLOW(LOCAL, LOG_INFO,
1532 "InterpolateEulerFieldToSwarm: Rank %d Completed center-to-corner Interpolatetion for field='%s'.\n",
1533 rank,fieldName);
1534 }
1535 // (E) Restore the cell-centered array since we now have corner data
1536 ierr = DMDAVecRestoreArrayRead(fda, fieldGlobal, &localPtr); CHKERRQ(ierr);
1537
1538 // (F) Retrieve swarm fields: cell IDs, weights, and the output field
1539 ierr = DMSwarmGetLocalSize(swarm, &nLocal); CHKERRQ(ierr);
1540 ierr = DMSwarmGetField(swarm, "DMSwarm_CellID", NULL, NULL, (void**)&cellIDs); CHKERRQ(ierr);
1541 ierr = DMSwarmGetField(swarm, "weight", NULL, NULL, (void**)&weights); CHKERRQ(ierr);
1542 ierr = DMSwarmGetField(swarm, swarmOutFieldName, NULL, NULL, &swarmOut); CHKERRQ(ierr);
1543 LOG_ALLOW(GLOBAL, LOG_INFO,
1544 "InterpolateEulerFieldToSwarm: Found %d local particles to process for field='%s'.\n",
1545 nLocal, fieldName);
1546
1547 // (G) Loop over each local particle and perform the final Interpolatetion from corners
1548 for (PetscInt p = 0; p < nLocal; p++) {
1549 PetscInt iCell_global = (PetscInt)cellIDs[3*p + 0];
1550 PetscInt jCell_global = (PetscInt)cellIDs[3*p + 1];
1551 PetscInt kCell_global = (PetscInt)cellIDs[3*p + 2];
1552
1553// --- Convert GLOBAL cell index to LOCAL cell index ---
1554 // NOTE: This assumes the cell index corresponds directly to the
1555 // "lower-left-front" node index for interpolation purposes.
1556 // We need the local index relative to the cornerPtr array,
1557 // which is indexed from 0 based on owned NODES.
1558
1559 PetscInt iCell_local = iCell_global - xs_node;
1560 PetscInt jCell_local = jCell_global - ys_node;
1561 PetscInt kCell_local = kCell_global - zs_node;
1562
1563 // --- Clamp LOCAL indices and check bounds for the BASE cell index ---
1564 // This check ensures the base index (i,j,k) for the interpolation
1565 // is within the bounds of the locally allocated cornerPtr array.
1566 // The interpolation function will handle the +1 offsets.
1567 // Boundary clamp: adjust indices to be within [0, mx), [0, my), [0, mz)
1568 if (iCell_local >= user->info.mx) iCell_local = user->info.mx - 1;
1569 if (jCell_local >= user->info.my) jCell_local = user->info.my - 1;
1570 if (kCell_local >= user->info.mz) kCell_local = user->info.mz - 1;
1571
1572 if (iCell_local < 0 ||
1573 jCell_local < 0 ||
1574 kCell_local < 0 ||
1575 iCell_local >= user->info.xm-1 ||
1576 jCell_local >= user->info.ym-1 ||
1577 kCell_local >= user->info.zm-1)
1578 {
1579
1580 LOG_ALLOW(LOCAL,LOG_DEBUG,"[Rank %d] Rank/Domain boundary reached(cell wise): (k,j,i) - (%d,%d,%d).\n",rank,kCell_local,jCell_local,iCell_local);
1581
1582 // Out-of-range: set output to zero
1583 if (bs == 1) {
1584 ((PetscReal*)swarmOut)[p] = 0.0;
1585 } else {
1586 ((PetscReal*)swarmOut)[3*p + 0] = 0.0;
1587 ((PetscReal*)swarmOut)[3*p + 1] = 0.0;
1588 ((PetscReal*)swarmOut)[3*p + 2] = 0.0;
1589 }
1590 continue;
1591 }
1592
1593
1594 // Retrieve Interpolatetion coefficients (a1, a2, a3) for this particle
1595 PetscReal alpha1 = weights[3*p + 0];
1596 PetscReal alpha2 = weights[3*p + 1];
1597 PetscReal alpha3 = weights[3*p + 2];
1598 */
1599 /* (Optional) If your final Interpolatetion expects a corner offset, adjust here.
1600 For example, if the cell center corresponds to the average of corners at (iCell,jCell,kCell)
1601 and (iCell+1, jCell+1, kCell+1), you might add +1. For now, we pass indices as is.
1602 */
1603 /*
1604 PetscInt iUse = iCell_local; // + 1 if required
1605 PetscInt jUse = jCell_local; // + 1 if required
1606 PetscInt kUse = kCell_local; // + 1 if required
1607 */
1608 /* (H) Call the per-particle Interpolatetion function.
1609 This function will use your _Generic macro (TrilinearInterpolation or PiecewiseLinearInterpolation)
1610 on the corner data.
1611 */
1612 /*
1613 ierr = InterpolateEulerFieldToSwarmForParticle(
1614 fieldName, // e.g., "Ucat"
1615 cornerPtr, // typed Pointer: (PetscReal***) or (Cmpnts***)
1616 iUse, jUse, kUse,
1617 alpha1, alpha2, alpha3,
1618 swarmOut, // Pointer to swarm output array
1619 p, // particle index
1620 bs); // block size: 1 or 3
1621 CHKERRQ(ierr);
1622 }
1623
1624 // (I) Restore swarm fields
1625 ierr = DMSwarmRestoreField(swarm, swarmOutFieldName, NULL, NULL, &swarmOut); CHKERRQ(ierr);
1626 ierr = DMSwarmRestoreField(swarm, "weight", NULL, NULL, (void**)&weights); CHKERRQ(ierr);
1627 ierr = DMSwarmRestoreField(swarm, "DMSwarm_CellID", NULL, NULL, (void**)&cellIDs); CHKERRQ(ierr);
1628
1629 // (J) Deallocate the corner array using the generic deallocation macro
1630 if (bs == 1) {
1631 PetscReal ***cornerScal = (PetscReal ***) cornerPtr;
1632 ierr = Deallocate3DArray(cornerScal, info.zm, info.ym); CHKERRQ(ierr);
1633 } else {
1634 Cmpnts ***cornerVec = (Cmpnts ***) cornerPtr;
1635 ierr = Deallocate3DArray(cornerVec, info.zm, info.ym); CHKERRQ(ierr);
1636 }
1637
1638 LOG_ALLOW(GLOBAL, LOG_INFO,
1639 "InterpolateEulerFieldToSwarm: Rank %d Completed Interpolatetion of field='%s' for %d local particles.\n",
1640 rank,fieldName, nLocal);
1641
1642 PetscFunctionReturn(0);
1643}
1644*/
1645
1646#undef __FUNCT__
1647#define __FUNCT__ "InterpolateAllFieldsToSwarm"
1648 /**
1649 * @brief Interpolates all relevant fields from the DMDA to the DMSwarm.
1650 *
1651 * Currently, it Interpolatetes:
1652 * - user->Ucat (vector field) into the DMSwarm field "swarmVelocity".
1653 *
1654 * To add more fields, duplicate the call to InterpolateOneFieldOverSwarm and provide:
1655 * - The global Vec for that field (e.g. user->Tcat for temperature),
1656 * - A human-readable field name (for logging),
1657 * - A DMSwarm output field name (e.g. "swarmTemperature").
1658 *
1659 * @param[in,out] user Pointer to a UserCtx containing:
1660 * - user->da (DM for the grid),
1661 * - user->swarm (DMSwarm for particles),
1662 * - user->Ucat (Vec for the vector field),
1663 * - possibly more fields like user->Tcat, user->Pcat, etc.
1664 *
1665 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1666 */
1668{
1669 PetscErrorCode ierr;
1670 PetscMPIInt rank;
1671 PetscFunctionBegin;
1672
1674
1675 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank); CHKERRQ(ierr);
1676
1677 /*
1678 1) Interpolate the 'velocity' field (user->Ucat) into DMSwarm's "swarmVelocity".
1679 - The function InterpolateOneFieldOverSwarm() loops over all local particles,
1680 retrieves iCell, jCell, kCell, (a1,a2,a3) from the DMSwarm,
1681 and calls the appropriate trilinear Interpolatetion routine.
1682
1683 - "velocity" => just a label used in logging or debugging.
1684 - "swarmVelocity" => the DMSwarm field name where we store the result
1685 (assume dof=3 if user->Ucat has blockSize=3).
1686 */
1687
1689 "InterpolateteAllFieldsToSwarm: Interpolation of ucat to velocity begins on rank %d.\n",rank);
1690 // Make sure to pass the *GLOBAL* Vector to the function below!
1691 ierr = InterpolateEulerFieldToSwarm(user, user->lUcat,
1692 "Ucat",
1693 "velocity"); CHKERRQ(ierr);
1694 /*
1695 2) (OPTIONAL) If you have more fields, you can Interpolatete them similarly.
1696
1697 For example, if user->Tcat is a scalar Vec for "temperature" and the DMSwarm
1698 has a field "Temperature" (with dof=1), you could do:
1699
1700 ierr = InterpolateOneFieldOverSwarm(user, user->Tcat,
1701 "temperature",
1702 "swarmTemperature"); CHKERRQ(ierr);
1703
1704 For pressure:
1705 ierr = InterpolateOneFieldOverSwarm(user, user->Pcat,
1706 "pressure",
1707 "swarmPressure"); CHKERRQ(ierr);
1708
1709 ...and so forth.
1710 */
1711
1712 /*
1713 3) Optionally, synchronize or log that all fields are done
1714 */
1715 ierr = MPI_Barrier(PETSC_COMM_WORLD); CHKERRQ(ierr);
1717 "[rank %d]Completed Interpolateting all fields to the swarm.\n",rank);
1718
1720
1721 PetscFunctionReturn(0);
1722}
1723
1724/////////////////////// Scatter from particles to euler fields
1725
1726/**
1727 * @brief Functions for scattering particle data (scalar or vector) onto
1728 * Eulerian grid fields by averaging contributions within each cell.
1729 *
1730 * This file provides a modular set of functions to perform particle-to-grid
1731 * projection, specifically calculating cell-averaged quantities from particle properties.
1732 * It assumes a PETSc environment using DMDA for the grids and DMSwarm for particles.
1733 *
1734 * Key Features:
1735 * - Handles both scalar (DOF=1) and vector (DOF=3) particle fields.
1736 * - Uses a pre-calculated particle count vector (`ParticleCount`) for normalization.
1737 * - Implicitly determines the target Eulerian DM (typically `user->da` for scalars,
1738 * `user->fda` for vectors) based on standard field names ("P", "Nvert", "Ucat", "Ucont","Psi").
1739 * - Modifies existing Eulerian field vectors (e.g., `user->P`, `user->Ucat`,user->Psi`) in place.
1740 * - Provides a high-level wrapper function (`ScatterAllParticleFieldsToEulerFields`)
1741 * to easily scatter a standard set of fields.
1742 * - Uses only the base `SETERRQ` macro for error reporting to maximize compiler compatibility.
1743 *
1744 * Dependencies:
1745 * - PETSc library (DMDA, DMSwarm, Vec, IS, PetscLog, etc.)
1746 * - A `UserCtx` struct (defined elsewhere, e.g., "userctx.h") containing pointers
1747 * to relevant DMs (`da`, `fda`), Vecs (`ParticleCount`, `P`, `Nvert`, `Ucat`, etc.),
1748 * and the `DMSwarm` object (`swarm`).
1749 * - A custom DMSwarm field named `"DMSwarm_CellID"` (blockSize=3, type=PETSC_INT)
1750 * must be registered and populated with the local cell indices for each particle.
1751 * - Logging infrastructure (`LOG_ALLOW`, etc.) assumed to be defined elsewhere.
1752 *
1753 * @defgroup scatter_module Particle-to-Grid Scattering
1754 * @{
1755 */
1756
1757//-----------------------------------------------------------------------------
1758// Internal Helper Modules (Lower-level building blocks)
1759//-----------------------------------------------------------------------------
1760/**
1761 * @defgroup scatter_module_internal Internal Scattering Helpers
1762 * @ingroup scatter_module
1763 * @brief Lower-level functions used by the main scattering routines.
1764 * @{
1765 */
1766
1767/**
1768 * @brief Determines the target Eulerian DM and expected DOF for scattering a given particle field.
1769 *
1770 * Based on hardcoded rules mapping particle field names ("P", "Nvert", "Ucat", "Ucont")
1771 * to user context DMs (`user->da` or `user->fda`). This function encapsulates the
1772 * policy of where different fields should be scattered. Modify this function to
1773 * add rules for custom fields.
1774 *
1775 * @param[in] user Pointer to the UserCtx containing required DMs (`da`, `fda`).
1776 * @param[in] particleFieldName Name of the particle field.
1777 * @param[out] targetDM Pointer to store the determined target DM (`user->da` or `user->fda`).
1778 * @param[out] expected_dof Pointer to store the expected DOF (1 or 3) for this field.
1779 *
1780 * @return PetscErrorCode Returns 0 on success. Error codes:
1781 * - `PETSC_ERR_ARG_NULL` if required inputs are NULL.
1782 * - `PETSC_ERR_ARG_WRONG` if `particleFieldName` is not recognized.
1783 */
1784PetscErrorCode GetScatterTargetInfo(UserCtx *user, const char *particleFieldName,
1785 DM *targetDM, PetscInt *expected_dof)
1786{
1787 char msg[ERROR_MSG_BUFFER_SIZE]; // Buffer for formatted error messages
1788 PetscFunctionBeginUser;
1789
1790 // --- Input Validation ---
1791 // Check for NULL pointers in essential inputs
1792 if (!user) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "UserCtx pointer is NULL.");
1793 if (!user->da) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "UserCtx->da is NULL.");
1794 if (!user->fda) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "UserCtx->fda is NULL."); // Needed for vector fields
1795 if (!particleFieldName) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "particleFieldName is NULL.");
1796 if (!targetDM) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Output targetDM pointer is NULL.");
1797 if (!expected_dof) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Output expected_dof pointer is NULL.");
1798
1799 // --- Determine Target DM and DOF based on Field Name ---
1800 // Compare the input field name with known scalar fields targeting 'da'
1801 if (strcmp(particleFieldName, "Psi") == 0 || strcmp(particleFieldName, "Nvert") == 0) {
1802 *expected_dof = 1; // Scalar fields have DOF 1
1803 *targetDM = user->da; // Target the primary scalar DMDA
1804 LOG_ALLOW(GLOBAL, LOG_DEBUG, "GetScatterTargetInfo: Field '%s' targets DM 'da' (DOF=1).\n", particleFieldName);
1805 }
1806 // Compare with known vector fields targeting 'fda'
1807 else if (strcmp(particleFieldName, "Ucat") == 0 || strcmp(particleFieldName, "Ucont") == 0) {
1808 *expected_dof = 3; // Vector fields have DOF 3
1809 *targetDM = user->fda; // Target the vector DMDA (often node-based)
1810 LOG_ALLOW(GLOBAL, LOG_DEBUG, "GetScatterTargetInfo: Field '%s' targets DM 'fda' (DOF=3).\n", particleFieldName);
1811 }
1812 // --- Add rules for other fields here ---
1813 // else if (strcmp(particleFieldName, "SomeOtherScalar") == 0) { *expected_dof = 1; *targetDM = user->da; }
1814 // else if (strcmp(particleFieldName, "SomeOtherVector") == 0) { *expected_dof = 3; *targetDM = user->someOtherDM; }
1815 else {
1816 // The provided field name doesn't match any known rules
1817 *targetDM = NULL; // Indicate failure
1818 *expected_dof = 0;
1819 // Format the error message manually
1820 PetscSNPrintf(msg, sizeof(msg), "GetScatterTargetInfo: Field name '%s' is not recognized for automatic DM selection.", particleFieldName);
1821 // Use SETERRQ with the formatted message and appropriate error code
1822 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, msg); // Use WRONG argument error code
1823 }
1824
1825 PetscFunctionReturn(0);
1826}
1827
1828
1829/**
1830 * @brief Accumulates a particle field (scalar or vector) into a target grid sum vector.
1831 *
1832 * This function iterates through local particles, identifies their cell using the
1833 * `"DMSwarm_CellID"` field, and adds the particle's field value (`particleFieldName`)
1834 * to the corresponding cell location in the `gridSumVec`. It handles both scalar
1835 * (DOF=1) and vector (DOF=3) fields automatically based on the DOF of `gridSumDM`.
1836 *
1837 * IMPORTANT: The caller must ensure `gridSumVec` is zeroed before calling this
1838 * function if a fresh sum calculation is desired.
1839 *
1840 * @param[in] swarm The DMSwarm containing particles.
1841 * @param[in] particleFieldName Name of the field on the particles (must match DOF of `gridSumDM`).
1842 * @param[in] gridSumDM The DMDA associated with `gridSumVec`. Its DOF determines
1843 * how many components are accumulated.
1844 * @param[in,out] gridSumVec The Vec (associated with `gridSumDM`) to accumulate sums into.
1845 *
1846 * @return PetscErrorCode 0 on success. Errors if fields don't exist, DMs are incompatible,
1847 * or memory access fails.
1848 */
1849PetscErrorCode AccumulateParticleField(DM swarm, const char *particleFieldName,
1850 DM gridSumDM, Vec gridSumVec)
1851{
1852 PetscErrorCode ierr;
1853 PetscInt dof; // DOF determined from gridSumDM
1854 PetscInt nlocal, p; // Local particle count and loop index
1855 const PetscReal *particle_arr = NULL; // Pointer to particle field data array (assuming Real)
1856 const PetscInt *cell_id_arr = NULL; // Pointer to particle cell ID array ("DMSwarm_CellID", Int)
1857 PetscScalar *sum_arr_ptr = NULL; // Pointer to grid sum vector data array (Scalar)
1858 PetscInt gxs, gys, gzs; // Start indices of local ghosted patch (often 0)
1859 PetscInt gxm, gym, gzm; // Dimensions of local ghosted patch (including ghosts)
1860 PetscMPIInt rank; // MPI rank for logging
1861 char msg[ERROR_MSG_BUFFER_SIZE]; // Buffer for formatted error messages
1862
1863 PetscFunctionBeginUser;
1864 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
1865
1866 // --- Get DOF from the target grid DM ---
1867 ierr = DMDAGetInfo(gridSumDM, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &dof, NULL, NULL, NULL, NULL, NULL); CHKERRQ(ierr);
1868 // Validate that the DOF is supported (currently 1 or 3)
1869 if (dof != 1 && dof != 3) {
1870 PetscSNPrintf(msg, sizeof(msg), "AccumulateParticleField: gridSumDM DOF must be 1 or 3, got %d.", dof);
1871 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, msg);
1872 }
1873
1874 // --- Get Particle Data Arrays ---
1875 // DMSwarmGetField will return an error if the field doesn't exist, caught by CHKERRQ.
1876 ierr = DMSwarmGetField(swarm, particleFieldName, NULL, NULL, (void **)&particle_arr); CHKERRQ(ierr);
1877 ierr = DMSwarmGetField(swarm, "DMSwarm_CellID", NULL, NULL, (void **)&cell_id_arr); CHKERRQ(ierr);
1878
1879 // Get number of local particles *after* successfully getting field pointers
1880 ierr = DMSwarmGetLocalSize(swarm, &nlocal); CHKERRQ(ierr);
1881
1882 // --- Get Grid Sum Vector Array & Dimensions ---
1883 ierr = VecGetArray(gridSumVec, &sum_arr_ptr); CHKERRQ(ierr);
1884 // Get dimensions needed for calculating flat index within the local ghosted array
1885 ierr = DMDAGetGhostCorners(gridSumDM, &gxs, &gys, &gzs, &gxm, &gym, &gzm); CHKERRQ(ierr);
1886
1887 // --- Accumulate Locally ---
1888 LOG_ALLOW(LOCAL, LOG_DEBUG, "AccumulateParticleField (Rank %d): Accumulating '%s' (DOF=%d) from %d particles using CellID field 'DMSwarm_CellID'.\n", rank, particleFieldName, dof, nlocal);
1889 // Loop over all particles owned by this process
1890 for (p = 0; p < nlocal; ++p) {
1891 // Extract local cell indices (relative to start of ghosted patch, [0..gxm-1], etc.)
1892 // Assumes DMSwarm_CellID stores (i, j, k) contiguously for each particle.
1893 PetscInt pidx = cell_id_arr[p * 3 + 0]; // Local i-index
1894 PetscInt pidy = cell_id_arr[p * 3 + 1]; // Local j-index
1895 PetscInt pidz = cell_id_arr[p * 3 + 2]; // Local k-index
1896
1897 // Bounds Check: Ensure the particle's cell index is within the valid local ghosted region
1898 if (pidx >= 0 && pidx < gxm && pidy >= 0 && pidy < gym && pidz >= 0 && pidz < gzm)
1899 {
1900 // Calculate the flat 1D index for this cell within the linear ghosted array
1901 // Uses PETSc's standard C-style row-major ordering (k-slowest, j-middle, i-fastest)
1902 // Corrected: k (slowest), j, i (fastest)
1903 PetscInt cell_flat_idx = (pidz * gym + pidy) * gxm + pidx;
1904
1905 // Calculate the base index for this particle's data in particle_arr
1906 PetscInt particle_base_idx = p * dof;
1907 // Calculate the base index for this cell's data in sum_arr_ptr
1908 PetscInt grid_base_idx = cell_flat_idx * dof;
1909
1910 // Add particle components to the grid sum vector components
1911 for (PetscInt c = 0; c < dof; ++c) {
1912 sum_arr_ptr[grid_base_idx + c] += particle_arr[particle_base_idx + c];
1913 }
1914 } else {
1915 // Log a warning if a particle's CellID is outside the expected local region.
1916 // This might indicate particles needing migration or boundary issues.
1917 LOG_ALLOW(LOCAL, LOG_WARNING, "AccumulateParticleField (Rank %d): Particle %d (field '%s') has out-of-bounds CellID (%d, %d, %d). Ghosted dims: %dx%dx%d. Skipping.\n",
1918 rank, p, particleFieldName, pidx, pidy, pidz, gxm, gym, gzm);
1919 }
1920 } // End of particle loop
1921
1922 // --- Restore Access to Arrays ---
1923 ierr = VecRestoreArray(gridSumVec, &sum_arr_ptr); CHKERRQ(ierr);
1924 ierr = DMSwarmRestoreField(swarm, particleFieldName, NULL, NULL, (void **)&particle_arr); CHKERRQ(ierr);
1925 ierr = DMSwarmRestoreField(swarm, "DMSwarm_CellID", NULL, NULL, (void **)&cell_id_arr); CHKERRQ(ierr);
1926
1927 // --- Assemble Global Sum Vector ---
1928 // Crucial for parallel execution: sums contributions for cells shared across process boundaries.
1929 LOG_ALLOW(GLOBAL, LOG_DEBUG, "AccumulateParticleField: Assembling global sum vector for '%s'.\n", particleFieldName);
1930 ierr = VecAssemblyBegin(gridSumVec); CHKERRQ(ierr);
1931 ierr = VecAssemblyEnd(gridSumVec); CHKERRQ(ierr);
1932
1933 PetscFunctionReturn(0);
1934}
1935
1936
1937/**
1938 * @brief Normalizes a grid vector of sums by a grid vector of counts to produce an average.
1939 *
1940 * Calculates `avgVec[i] = sumVec[i] / countVec[i]` for each component of each
1941 * cell owned by the current process, provided `countVec[i] > 0`. Otherwise, sets `avgVec[i] = 0`.
1942 * Handles both scalar (DOF=1) and vector (DOF=3) data fields based on the DOF of `dataDM`.
1943 * Uses DMDA multi-dimensional array accessors (`DMDAVecGetArray...`) for safe and convenient indexing.
1944 *
1945 * @param[in] countDM The DMDA associated with `countVec` (must have DOF=1).
1946 * @param[in] countVec The Vec containing particle counts per cell (read-only).
1947 * @param[in] dataDM The DMDA associated with `sumVec` and `avgVec` (must have DOF=1 or DOF=3).
1948 * @param[in] sumVec The Vec containing the accumulated sums per cell (read-only).
1949 * @param[in,out] avgVec The Vec where the calculated averages will be stored (overwritten). Must be
1950 * associated with `dataDM`.
1951 *
1952 * @return PetscErrorCode 0 on success. Errors on incompatible DMs or memory access failure.
1953 */
1954PetscErrorCode NormalizeGridVectorByCount(DM countDM, Vec countVec,
1955 DM dataDM, Vec sumVec, Vec avgVec)
1956{
1957 PetscErrorCode ierr;
1958 PetscInt data_dof;
1959 PetscInt count_dof;
1960 PetscMPIInt rank;
1961 char msg[ERROR_MSG_BUFFER_SIZE];
1962
1963 // Pointers for DMDA array accessors - declare specific types
1964 PetscScalar ***count_arr_3d = NULL; // For DOF=1 count vector (3D DMDA)
1965 PetscScalar ***sum_arr_scalar = NULL; // For DOF=1 sum vector (3D DMDA)
1966 PetscScalar ***avg_arr_scalar = NULL; // For DOF=1 avg vector (3D DMDA)
1967 PetscScalar ***sum_arr_vector = NULL; // For DOF=3 sum vector (3D DMDA + DOF)
1968 PetscScalar ***avg_arr_vector = NULL; // For DOF=3 avg vector (3D DMDA + DOF)
1969
1970
1971 PetscFunctionBeginUser;
1972 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
1973
1974 // --- Validation ---
1975 ierr = DMDAGetInfo(countDM, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &count_dof, NULL, NULL, NULL, NULL, NULL); CHKERRQ(ierr);
1976 ierr = DMDAGetInfo(dataDM, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &data_dof, NULL, NULL, NULL, NULL, NULL); CHKERRQ(ierr);
1977 if (count_dof != 1) { PetscSNPrintf(msg, sizeof(msg), "countDM must have DOF=1, got %d.", count_dof); SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, msg); }
1978 if (data_dof != 1 && data_dof != 3) { PetscSNPrintf(msg, sizeof(msg), "dataDM DOF must be 1 or 3, got %d.", data_dof); SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, msg); }
1979
1980 // --- Get Array Access using appropriate DMDA accessors ---
1981 ierr = DMDAVecGetArrayRead(countDM, countVec, &count_arr_3d); CHKERRQ(ierr);
1982
1983 if (data_dof == 1) {
1984 ierr = DMDAVecGetArrayRead(dataDM, sumVec, &sum_arr_scalar); CHKERRQ(ierr);
1985 ierr = DMDAVecGetArray(dataDM, avgVec, &avg_arr_scalar); CHKERRQ(ierr);
1986 } else { // data_dof == 3
1987 ierr = DMDAVecGetArrayDOFRead(dataDM, sumVec, &sum_arr_vector); CHKERRQ(ierr);
1988 ierr = DMDAVecGetArrayDOF(dataDM, avgVec, &avg_arr_vector); CHKERRQ(ierr);
1989 }
1990
1991 // Get the corners (global start indices) and dimensions of the *local owned* region
1992 PetscInt xs, ys, zs, xm, ym, zm;
1993 ierr = DMDAGetCorners(countDM, &xs, &ys, &zs, &xm, &ym, &zm); CHKERRQ(ierr);
1994
1995 // --- Normalize Over Owned Cells ---
1996 LOG_ALLOW(LOCAL, LOG_DEBUG, "NormalizeGridVectorByCount (Rank %d): Normalizing DOF=%d data over owned range [%d:%d, %d:%d, %d:%d].\n",
1997 rank, data_dof, xs, xs+xm, ys, ys+ym, zs, zs+zm);
1998
1999 // Loop using GLOBAL indices (i, j, k) over the range owned by this process
2000 for (PetscInt k = zs; k < zs + zm; ++k) {
2001 for (PetscInt j = ys; j < ys + ym; ++j) {
2002 for (PetscInt i = xs; i < xs + xm; ++i) {
2003
2004 // Access the count using standard 3D indexing
2005 PetscScalar count = count_arr_3d[k][j][i];
2006
2007 if (PetscRealPart(count) > 0.5) { // Use tolerance for float comparison
2008 if (data_dof == 1) {
2009 // Access scalar sum/avg using standard 3D indexing
2010 avg_arr_scalar[k][j][i] = sum_arr_scalar[k][j][i] / count;
2011 } else { // data_dof == 3
2012 // Access vector components using DOF indexing on the last dimension
2013 for (PetscInt c = 0; c < data_dof; ++c) {
2014 avg_arr_vector[k][j][i * data_dof + c] = sum_arr_vector[k][j][i * data_dof + c] / count;
2015 }
2016 }
2017 } else { // count is zero or negative
2018 // Set average to zero
2019 if (data_dof == 1) {
2020 avg_arr_scalar[k][j][i] = 0.0;
2021 } else { // data_dof == 3
2022 for (PetscInt c = 0; c < data_dof; ++c) {
2023 avg_arr_vector[k][j][i * data_dof + c] = 0.0;
2024 }
2025 }
2026 } // end if count > 0.5
2027 } // end i loop
2028 } // end j loop
2029 } // end k loop
2030
2031 // --- Restore Arrays using appropriate functions ---
2032 ierr = DMDAVecRestoreArrayRead(countDM, countVec, &count_arr_3d); CHKERRQ(ierr);
2033 if (data_dof == 1) {
2034 ierr = DMDAVecRestoreArrayRead(dataDM, sumVec, &sum_arr_scalar); CHKERRQ(ierr);
2035 ierr = DMDAVecRestoreArray(dataDM, avgVec, &avg_arr_scalar); CHKERRQ(ierr);
2036 } else { // data_dof == 3
2037 ierr = DMDAVecRestoreArrayDOFRead(dataDM, sumVec, &sum_arr_vector); CHKERRQ(ierr);
2038 ierr = DMDAVecRestoreArrayDOF(dataDM, avgVec, &avg_arr_vector); CHKERRQ(ierr);
2039 }
2040
2041 // --- Assemble Final Average Vector ---
2042 LOG_ALLOW(GLOBAL, LOG_DEBUG, "NormalizeGridVectorByCount: Assembling final average vector (DOF=%d).\n", data_dof);
2043 ierr = VecAssemblyBegin(avgVec); CHKERRQ(ierr);
2044 ierr = VecAssemblyEnd(avgVec); CHKERRQ(ierr);
2045
2046 PetscFunctionReturn(0);
2047}
2048
2049/** @} */ // End of scatter_module_internal group
2050
2051//-----------------------------------------------------------------------------
2052// User-Facing API
2053//-----------------------------------------------------------------------------
2054//-----------------------------------------------------------------------------
2055// MODULE 4: Internal Scatter Orchestration Helper - No PetscErrorClear
2056//-----------------------------------------------------------------------------
2057/**
2058 * @brief Internal helper function to orchestrate the scatter operation (accumulate + normalize).
2059 * @ingroup scatter_module_internal
2060 *
2061 * Manages the temporary sum vector and calls the accumulation and normalization
2062 * functions. Assumes caller determined target DM and DOF. Checks for particle field existence.
2063 * NOTE: If DMSwarmGetField fails, the subsequent SETERRQ will overwrite the original error.
2064 *
2065 * @param[in] user Pointer to UserCtx containing swarm, ParticleCount, da.
2066 * @param[in] particleFieldName Name of the field in the DMSwarm.
2067 * @param[in] targetDM The DMDA where the final average and intermediate sum reside.
2068 * @param[in] expected_dof The expected DOF (1 or 3) for the targetDM and field.
2069 * @param[in,out] eulerFieldAverageVec The pre-created Vec associated with targetDM to store the result.
2070 *
2071 * @return PetscErrorCode 0 on success. Errors if particle field doesn't exist or
2072 * underlying helpers fail.
2073 */
2075 const char *particleFieldName,
2076 DM targetDM,
2077 PetscInt expected_dof,
2078 Vec eulerFieldAverageVec)
2079{
2080 PetscErrorCode ierr;
2081 Vec sumVec = NULL;
2082 char msg[ERROR_MSG_BUFFER_SIZE]; // Buffer for formatted error messages
2083
2084 PetscFunctionBeginUser;
2085
2086 if (!user || !user->swarm || !user->ParticleCount || !particleFieldName || !targetDM || !eulerFieldAverageVec)
2087 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "NULL input provided to ScatterParticleFieldToEulerField_Internal.");
2088
2089 // --- Check if Particle Field Exists ---
2090 // Attempt a GetField call; if it fails, the field doesn't exist.
2091 // We let CHKERRQ handle the error directly if the field doesn't exist OR
2092 // we catch it specifically to provide a more tailored message.
2093
2094 /*
2095 LOG_ALLOW(GLOBAL,LOG_DEBUG,"Field %s being accessed to check existence \n",particleFieldName);
2096 ierr = DMSwarmGetField(user->swarm, particleFieldName, NULL, NULL, NULL);
2097 if (ierr) { // If GetField returns an error
2098 PetscSNPrintf(msg, sizeof(msg), "Particle field '%s' not found in DMSwarm for scattering.", particleFieldName);
2099 // Directly set the error, overwriting the one from GetField
2100 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, msg);
2101 }
2102 ierr = DMSwarmRestoreField(user->swarm, particleFieldName, NULL, NULL, NULL);
2103 */
2104
2105 // --- Setup Temporary Sum Vector ---
2106 ierr = VecDuplicate(eulerFieldAverageVec, &sumVec); CHKERRQ(ierr);
2107 ierr = VecSet(sumVec, 0.0); CHKERRQ(ierr);
2108 ierr = PetscSNPrintf(msg, sizeof(msg), "TempSum_%s", particleFieldName); CHKERRQ(ierr);
2109 ierr = PetscObjectSetName((PetscObject)sumVec, msg); CHKERRQ(ierr);
2110
2111 // --- Accumulate ---
2112 // This will call DMSwarmGetField again. If it failed above, it will likely fail here too,
2113 // unless the error was cleared somehow between the check and here (unlikely).
2114 // If the check above was skipped (Option 1), this is where the error for non-existent
2115 // field will be caught by CHKERRQ.
2116 ierr = AccumulateParticleField(user->swarm, particleFieldName, targetDM, sumVec); CHKERRQ(ierr);
2117
2118 // Calculate the number of particles per cell.
2119 ierr = CalculateParticleCountPerCell(user);
2120 // --- Normalize ---
2121 ierr = NormalizeGridVectorByCount(user->da, user->ParticleCount, targetDM, sumVec, eulerFieldAverageVec); CHKERRQ(ierr);
2122
2123 // --- Cleanup ---
2124 ierr = VecDestroy(&sumVec); CHKERRQ(ierr);
2125
2126 PetscFunctionReturn(0);
2127}
2128
2129/**
2130 * @brief Scatters a particle field (scalar or vector) to the corresponding Eulerian field average.
2131 * @ingroup scatter_module
2132 *
2133 * This is the primary user-facing function for scattering a single field.
2134 * It determines the target Eulerian DM (`user->da` or `user->fda`) based on the
2135 * `particleFieldName`, validates that the provided `eulerFieldAverageVec` is compatible
2136 * with that target DM, and then orchestrates the scatter operation by calling
2137 * an internal helper function. The final averaged result is stored IN-PLACE in
2138 * the `eulerFieldAverageVec`.
2139 *
2140 * @param[in] user Pointer to UserCtx containing `da`, `fda`, `swarm`, `ParticleCount`.
2141 * @param[in] particleFieldName Name of the field in the DMSwarm (e.g., "Psi", "Ucat").
2142 * @param[in,out] eulerFieldAverageVec Pre-created Vec associated with the correct target DM
2143 * (implicitly `da` or `fda`). Result stored here.
2144 * This vector should be zeroed by the caller if desired
2145 * before this function (e.g., using `VecSet`). The wrapper
2146 * `ScatterAllParticleFieldsToEulerFields` handles this zeroing.
2147 *
2148 * @return PetscErrorCode 0 on success. Errors on NULL input, unrecognized field name,
2149 * incompatible target vector, or if the particle field doesn't exist.
2150 */
2152 const char *particleFieldName,
2153 Vec eulerFieldAverageVec)
2154{
2155 PetscErrorCode ierr;
2156 DM targetDM = NULL; // Will point to user->da or user->fda
2157 PetscInt expected_dof = 0; // Will be 1 or 3
2158 char msg[ERROR_MSG_BUFFER_SIZE]; // Buffer for formatted error messages
2159
2160 PetscFunctionBeginUser;
2161
2162 // --- Essential Input Validation ---
2163 if (!user) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "UserCtx pointer is NULL.");
2164 if (!user->swarm) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "UserCtx->swarm is NULL.");
2165 if (!user->ParticleCount) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "UserCtx->ParticleCount is NULL.");
2166 if (!eulerFieldAverageVec) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Output eulerFieldAverageVec is NULL.");
2167 // particleFieldName validity checked within GetScatterTargetInfo
2168
2169 // --- Determine Target DM & DOF using the helper function ---
2170 ierr = GetScatterTargetInfo(user, particleFieldName, &targetDM, &expected_dof); CHKERRQ(ierr);
2171 // If GetScatterTargetInfo failed (e.g., unknown name), CHKERRQ would have returned.
2172
2173 // --- Validate the provided Target Vec's Compatibility ---
2174 DM vec_dm;
2175 PetscInt vec_dof;
2176 // Check that the provided average vector has a DM associated with it
2177 ierr = VecGetDM(eulerFieldAverageVec, &vec_dm); CHKERRQ(ierr);
2178 if (!vec_dm) {
2179 PetscSNPrintf(msg, sizeof(msg), "Provided eulerFieldAverageVec for field '%s' does not have an associated DM.", particleFieldName);
2180 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, msg);
2181 }
2182 // Get the block size (DOF) of the provided vector
2183 ierr = VecGetBlockSize(eulerFieldAverageVec, &vec_dof); CHKERRQ(ierr);
2184 // Compare the vector's associated DM with the one determined by the field name
2185 if (vec_dm != targetDM) {
2186 const char *target_dm_name = "targetDM", *vec_dm_name = "vec_dm";
2187 // Get actual names if possible for a more informative error message
2188 PetscObjectGetName((PetscObject)targetDM, &target_dm_name);
2189 PetscObjectGetName((PetscObject)vec_dm, &vec_dm_name);
2190 PetscSNPrintf(msg, sizeof(msg), "Provided eulerFieldAverageVec associated with DM '%s', but field '%s' requires scatter to DM '%s'.", vec_dm_name, particleFieldName, target_dm_name);
2191 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, msg);
2192 }
2193 // Compare the vector's DOF with the one expected for the field name
2194 if (vec_dof != expected_dof) {
2195 PetscSNPrintf(msg, sizeof(msg), "Field '%s' requires DOF %d, but provided eulerFieldAverageVec has DOF %d.", particleFieldName, expected_dof, vec_dof);
2196 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, msg);
2197 }
2198
2199 // --- Perform Scatter using Internal Helper ---
2200 // Log intent before calling the core logic
2201 LOG_ALLOW(GLOBAL, LOG_DEBUG, "ScatterParticleFieldToEulerField: Scattering field '%s' (DOF=%d).\n", particleFieldName, expected_dof);
2202 ierr = ScatterParticleFieldToEulerField_Internal(user, // Pass user context
2203 particleFieldName, // Name of particle field
2204 targetDM, // Determined target DM (da or fda)
2205 expected_dof, // Determined DOF (1 or 3)
2206 eulerFieldAverageVec); // The output vector
2207 CHKERRQ(ierr); // Handle potential errors from the internal function
2208
2209 LOG_ALLOW(GLOBAL, LOG_INFO, "ScatterParticleFieldToEulerField: Successfully scattered field '%s'.\n", particleFieldName);
2210 PetscFunctionReturn(0);
2211}
2212
2213#undef __FUNCT__
2214#define __FUNCT__ "ScatterAllParticleFieldsToEulerFields"
2215/**
2216 * @brief Scatters a predefined set of particle fields to their corresponding Eulerian fields.
2217 * @ingroup scatter_module
2218 *
2219 * This convenience function calls the unified `ScatterParticleFieldToEulerField`
2220 * for a standard set of fields (currently just "Psi", others commented out). It assumes
2221 * the target Eulerian Vec objects (e.g., `user->Psi`, `user->Ucat`) exist in the
2222 * UserCtx structure and are correctly associated with their respective DMs (`user->da`
2223 * or `user->fda`).
2224 *
2225 * **IMPORTANT:** It zeros the target Vecs before scattering. Ensure `user->ParticleCount`
2226 * has been computed accurately *before* calling this function, reflecting the current
2227 * particle distribution.
2228 *
2229 * @param[in,out] user Pointer to the UserCtx structure containing all required DMs,
2230 * Vecs (`ParticleCount`, target Eulerian fields like `P`, `Ucat`), and `swarm`.
2231 *
2232 * @return PetscErrorCode 0 on success. Errors if prerequisites (like ParticleCount)
2233 * are missing or if underlying scatter calls fail (e.g., particle field missing).
2234 */
2236{
2237 PetscErrorCode ierr;
2238 PetscFunctionBeginUser;
2240
2241 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting scattering of specified particle fields to Eulerian grids.\n");
2242
2243 // --- Pre-computation Check: Ensure Particle Counts are Ready ---
2244 if (!user->ParticleCount) {
2245 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "UserCtx->ParticleCount is NULL. Compute counts before calling ScatterAllParticleFieldsToEulerFields.");
2246 }
2247
2248 // --- Scatter Particle Field "Psi" -> Eulerian Field user->P (on da) ---
2249 // Check if the target Eulerian vector 'user->P' exists.
2250 if (user->Psi) {
2251
2252 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Scattering particle field 'Psi' to user->Psi.\n");
2253 // Zero the target vector before accumulating the new average for this step/call.
2254
2255 // Debug Verification ------------------------------------------------
2256 Vec swarm_Psi;
2257 PetscReal Avg_Psi,Avg_swarm_Psi;
2258
2259 ierr = VecMean(user->P,&Avg_Psi);
2260 LOG_ALLOW(GLOBAL,LOG_DEBUG," Average of Scalar(Psi) before scatter: %.4f.\n",Avg_Psi);
2261
2262 ierr = DMSwarmCreateGlobalVectorFromField(user->swarm,"Psi",&swarm_Psi);
2263 ierr = VecMean(swarm_Psi,&Avg_swarm_Psi);
2264
2265 LOG_ALLOW(GLOBAL,LOG_DEBUG," Average of Particle Scalar(Psi): %.4f.\n",Avg_swarm_Psi);
2266
2267 ierr = DMSwarmDestroyGlobalVectorFromField(user->swarm,"Psi",&swarm_Psi);
2268 // Debug----------------------------------------------------------------
2269
2270 //ierr = VecSet(user->P, 0.0); CHKERRQ(ierr);
2271 // Call the unified scatter function. It will handle DM determination and validation.
2272 // It will also error out if the *particle* field "Psi" doesn't exist in the swarm.
2273 ierr = ScatterParticleFieldToEulerField(user, "Psi", user->Psi); CHKERRQ(ierr);
2274 ierr = VecMean(user->Psi,&Avg_Psi);
2275
2276 LOG_ALLOW(GLOBAL,LOG_DEBUG," Average of Scalar(Psi) after scatter: %.4f.\n",Avg_Psi);
2277 } else {
2278 // Only log a warning if the target Eulerian field is missing in the context.
2279 LOG_ALLOW(GLOBAL, LOG_WARNING, "Skipping scatter for 'Psi': UserCtx->Psi is NULL.\n");
2280 }
2281
2282 // --- (Commented Out) Scatter Other Fields ---
2283 // To enable scattering for other fields, uncomment the relevant block
2284 // AND ensure:
2285 // 1. The corresponding particle field (e.g., "Nvert", "Ucat") exists in user->swarm.
2286 // 2. The corresponding target Eulerian Vec (e.g., user->Nvert, user->Ucat) exists in user->ctx.
2287 // 3. The target Eulerian Vec is associated with the correct DM (da for Nvert, fda for Ucat).
2288
2289 /*
2290 // Scatter Particle Field "Nvert" -> Eulerian Field user->Nvert (on da)
2291 if (user->Nvert) {
2292 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Scattering particle field 'Nvert' to user->Nvert.\n");
2293 ierr = VecSet(user->Nvert, 0.0); CHKERRQ(ierr);
2294 ierr = ScatterParticleFieldToEulerField(user, "Nvert", user->Nvert); CHKERRQ(ierr);
2295 } else {
2296 LOG_ALLOW(GLOBAL, LOG_WARNING, "Skipping scatter for 'Nvert': UserCtx->Nvert is NULL.\n");
2297 }
2298 */
2299
2300 /*
2301 // Scatter Particle Field "Ucat" -> Eulerian Field user->Ucat (on fda)
2302 if (user->Ucat) {
2303 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Scattering particle field 'Ucat' to user->Ucat.\n");
2304 ierr = VecSet(user->Ucat, 0.0); CHKERRQ(ierr);
2305 ierr = ScatterParticleFieldToEulerField(user, "Ucat", user->Ucat); CHKERRQ(ierr);
2306 } else {
2307 LOG_ALLOW(GLOBAL, LOG_WARNING, "Skipping scatter for 'Ucat': UserCtx->Ucat is NULL.\n");
2308 }
2309 */
2310
2311 /*
2312 // Scatter Particle Field "Ucont" -> Eulerian Field user->Ucont (on fda)
2313 if (user->Ucont) {
2314 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Scattering particle field 'Ucont' to user->Ucont.\n");
2315 ierr = VecSet(user->Ucont, 0.0); CHKERRQ(ierr);
2316 ierr = ScatterParticleFieldToEulerField(user, "Ucont", user->Ucont); CHKERRQ(ierr);
2317 } else {
2318 LOG_ALLOW(GLOBAL, LOG_WARNING, "Skipping scatter for 'Ucont': UserCtx->Ucont is NULL.\n");
2319 }
2320 */
2321
2322 // Add more fields as needed following the pattern above...
2323
2324 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished scattering specified particle fields.\n");
2326 PetscFunctionReturn(0);
2327}
2328
2329/** @} */ // End of scatter_module group
2330
2331/**
2332 * @brief Interpolates a scalar field from corner nodes to all face centers.
2333 *
2334 * This routine computes the average of the four corner-node values
2335 * defining each face of a hexahedral cell:
2336 * - X-faces (perpendicular to X): face between (i-1,i) in X-dir
2337 * - Y-faces (perpendicular to Y): face between (j-1,j) in Y-dir
2338 * - Z-faces (perpendicular to Z): face between (k-1,k) in Z-dir
2339 *
2340 * @param[in] corner_arr Ghosted node-centered array (global indexing) from user->fda.
2341 * @param[out] faceX_arr Local array for X-faces sized [zm][ym][xm+1].
2342 * @param[out] faceY_arr Local array for Y-faces sized [zm][ym+1][xm].
2343 * @param[out] faceZ_arr Local array for Z-faces sized [zm+1][ym][xm].
2344 * @param[in] user User context containing DMDA 'fda' and GetOwnedCellRange.
2345 *
2346 * @return PetscErrorCode 0 on success, non-zero on failure.
2347 */
2349 PetscReal ***corner_arr,
2350 PetscReal ***faceX_arr,
2351 PetscReal ***faceY_arr,
2352 PetscReal ***faceZ_arr,
2353 UserCtx *user)
2354{
2355 PetscErrorCode ierr;
2356 DMDALocalInfo info;
2357 ierr = DMDAGetLocalInfo(user->fda, &info); CHKERRQ(ierr);
2358
2359 // Determine owned-cell ranges based on corner-node ownership
2360 PetscInt xs, xm, ys, ym, zs, zm;
2361 ierr = GetOwnedCellRange(&info, 0, &xs, &xm); CHKERRQ(ierr);
2362 ierr = GetOwnedCellRange(&info, 1, &ys, &ym); CHKERRQ(ierr);
2363 ierr = GetOwnedCellRange(&info, 2, &zs, &zm); CHKERRQ(ierr);
2364
2365 // Global exclusive end indices for cells
2366 PetscInt xe = xs + xm;
2367 PetscInt ye = ys + ym;
2368 PetscInt ze = zs + zm;
2369
2370 // --- X‐faces: loops k=zs..ze-1, j=ys..ye-1, i=xs..xe (xm+1 faces per row) ---
2371 for (PetscInt k = zs; k < ze; ++k) {
2372 PetscInt k_loc = k - zs;
2373 for (PetscInt j = ys; j < ye; ++j) {
2374 PetscInt j_loc = j - ys;
2375 for (PetscInt i = xs; i <= xe; ++i) {
2376 PetscInt i_loc = i - xs; // 0..xm
2377 // Average the four corners of the Y-Z face at X = i
2378 PetscReal sum = corner_arr[k ][j ][i]
2379 + corner_arr[k+1][j ][i]
2380 + corner_arr[k ][j+1][i]
2381 + corner_arr[k+1][j+1][i];
2382 faceX_arr[k_loc][j_loc][i_loc] = sum * 0.25;
2383 }
2384 }
2385 }
2386
2387 // --- Y‐faces: loops k=zs..ze-1, j=ys..ye (ym+1 faces), i=xs..xe-1 ---
2388 for (PetscInt k = zs; k < ze; ++k) {
2389 PetscInt k_loc = k - zs;
2390 for (PetscInt j = ys; j <= ye; ++j) {
2391 PetscInt j_loc = j - ys; // 0..ym
2392 for (PetscInt i = xs; i < xe; ++i) {
2393 PetscInt i_loc = i - xs;
2394 // Average the four corners of the X-Z face at Y = j
2395 PetscReal sum = corner_arr[k ][j][i ]
2396 + corner_arr[k+1][j][i ]
2397 + corner_arr[k ][j][i+1]
2398 + corner_arr[k+1][j][i+1];
2399 faceY_arr[k_loc][j_loc][i_loc] = sum * 0.25;
2400 }
2401 }
2402 }
2403
2404 // --- Z‐faces: loops k=zs..ze (zm+1), j=ys..ye-1, i=xs..xe-1 ---
2405 for (PetscInt k = zs; k <= ze; ++k) {
2406 PetscInt k_loc = k - zs;
2407 for (PetscInt j = ys; j < ye; ++j) {
2408 PetscInt j_loc = j - ys;
2409 for (PetscInt i = xs; i < xe; ++i) {
2410 PetscInt i_loc = i - xs;
2411 // Average the four corners of the X-Y face at Z = k
2412 PetscReal sum = corner_arr[k][j ][i ]
2413 + corner_arr[k][j ][i+1]
2414 + corner_arr[k][j+1][i ]
2415 + corner_arr[k][j+1][i+1];
2416 faceZ_arr[k_loc][j_loc][i_loc] = sum * 0.25;
2417 }
2418 }
2419 }
2420
2421 return 0;
2422}
2423
2424/**
2425 * @brief Interpolates a vector field from corner nodes to all face centers.
2426 *
2427 * Identical to the scalar version, except it averages each component of the
2428 * Cmpnts struct at the four corner-nodes per face.
2429 *
2430 * @param[in] corner_arr Ghosted 3-component array (global node indices).
2431 * @param[out] faceX_arr Local array of Cmpnts for X-faces sized [zm][ym][xm+1].
2432 * @param[out] faceY_arr Local array of Cmpnts for Y-faces sized [zm][ym+1][xm].
2433 * @param[out] faceZ_arr Local array of Cmpnts for Z-faces sized [zm+1][ym][xm].
2434 * @param[in] user User context containing DMDA 'fda'.
2435 *
2436 * @return PetscErrorCode 0 on success.
2437 */
2439 Cmpnts ***corner_arr,
2440 Cmpnts ***faceX_arr,
2441 Cmpnts ***faceY_arr,
2442 Cmpnts ***faceZ_arr,
2443 UserCtx *user)
2444{
2445 PetscErrorCode ierr;
2446 DMDALocalInfo info;
2447 PetscMPIInt rank;
2448
2449 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
2451 "Rank %d starting InterpolateFieldFromCornerToFaceCenter_Vector.\n", rank);
2452
2453 ierr = DMDAGetLocalInfo(user->fda, &info); CHKERRQ(ierr);
2454
2455 PetscInt xs, xm, ys, ym, zs, zm;
2456 ierr = GetOwnedCellRange(&info, 0, &xs, &xm); CHKERRQ(ierr);
2457 ierr = GetOwnedCellRange(&info, 1, &ys, &ym); CHKERRQ(ierr);
2458 ierr = GetOwnedCellRange(&info, 2, &zs, &zm); CHKERRQ(ierr);
2459
2460 PetscInt xe = xs + xm;
2461 PetscInt ye = ys + ym;
2462 PetscInt ze = zs + zm;
2463
2464 // X-faces
2465 for (PetscInt k = zs; k < ze; ++k) {
2466 PetscInt k_loc = k - zs;
2467 for (PetscInt j = ys; j < ye; ++j) {
2468 PetscInt j_loc = j - ys;
2469 for (PetscInt i = xs; i <= xe; ++i) {
2470 PetscInt i_loc = i - xs;
2471 Cmpnts sum = {0,0,0};
2472 sum.x = corner_arr[k ][j ][i].x + corner_arr[k+1][j ][i].x
2473 + corner_arr[k ][j+1][i].x + corner_arr[k+1][j+1][i].x;
2474 sum.y = corner_arr[k ][j ][i].y + corner_arr[k+1][j ][i].y
2475 + corner_arr[k ][j+1][i].y + corner_arr[k+1][j+1][i].y;
2476 sum.z = corner_arr[k ][j ][i].z + corner_arr[k+1][j ][i].z
2477 + corner_arr[k ][j+1][i].z + corner_arr[k+1][j+1][i].z;
2478 faceX_arr[k_loc][j_loc][i_loc].x = sum.x * 0.25;
2479 faceX_arr[k_loc][j_loc][i_loc].y = sum.y * 0.25;
2480 faceX_arr[k_loc][j_loc][i_loc].z = sum.z * 0.25;
2481 }
2482 }
2483 }
2484
2486 "Rank %d x-face Interpolation complete.\n", rank);
2487
2488 // Y-faces
2489 for (PetscInt k = zs; k < ze; ++k) {
2490 PetscInt k_loc = k - zs;
2491 for (PetscInt j = ys; j <= ye; ++j) {
2492 PetscInt j_loc = j - ys;
2493 for (PetscInt i = xs; i < xe; ++i) {
2494 PetscInt i_loc = i - xs;
2495 Cmpnts sum = {0,0,0};
2496 sum.x = corner_arr[k ][j][i ].x + corner_arr[k+1][j][i ].x
2497 + corner_arr[k ][j][i+1].x + corner_arr[k+1][j][i+1].x;
2498 sum.y = corner_arr[k ][j][i ].y + corner_arr[k+1][j][i ].y
2499 + corner_arr[k ][j][i+1].y + corner_arr[k+1][j][i+1].y;
2500 sum.z = corner_arr[k ][j][i ].z + corner_arr[k+1][j][i ].z
2501 + corner_arr[k ][j][i+1].z + corner_arr[k+1][j][i+1].z;
2502 faceY_arr[k_loc][j_loc][i_loc].x = sum.x * 0.25;
2503 faceY_arr[k_loc][j_loc][i_loc].y = sum.y * 0.25;
2504 faceY_arr[k_loc][j_loc][i_loc].z = sum.z * 0.25;
2505 }
2506 }
2507 }
2508
2510 "Rank %d y-face Interpolation complete.\n", rank);
2511
2512 // Z-faces
2513 for (PetscInt k = zs; k <= ze; ++k) {
2514 PetscInt k_loc = k - zs;
2515 for (PetscInt j = ys; j < ye; ++j) {
2516 PetscInt j_loc = j - ys;
2517 for (PetscInt i = xs; i < xe; ++i) {
2518 PetscInt i_loc = i - xs;
2519 Cmpnts sum = {0,0,0};
2520 sum.x = corner_arr[k][j ][i ].x + corner_arr[k][j ][i+1].x
2521 + corner_arr[k][j+1][i ].x + corner_arr[k][j+1][i+1].x;
2522 sum.y = corner_arr[k][j ][i ].y + corner_arr[k][j ][i+1].y
2523 + corner_arr[k][j+1][i ].y + corner_arr[k][j+1][i+1].y;
2524 sum.z = corner_arr[k][j ][i ].z + corner_arr[k][j ][i+1].z
2525 + corner_arr[k][j+1][i ].z + corner_arr[k][j+1][i+1].z;
2526 faceZ_arr[k_loc][j_loc][i_loc].x = sum.x * 0.25;
2527 faceZ_arr[k_loc][j_loc][i_loc].y = sum.y * 0.25;
2528 faceZ_arr[k_loc][j_loc][i_loc].z = sum.z * 0.25;
2529 }
2530 }
2531 }
2532
2534 "Rank %d z-face Interpolation complete.\n", rank);
2535
2536 return 0;
2537}
PetscErrorCode GetScatterTargetInfo(UserCtx *user, const char *particleFieldName, DM *targetDM, PetscInt *expected_dof)
Determines the target Eulerian DM and expected DOF for scattering a given particle field.
PetscErrorCode NormalizeGridVectorByCount(DM countDM, Vec countVec, DM dataDM, Vec sumVec, Vec avgVec)
Normalizes a grid vector of sums by a grid vector of counts to produce an average.
PetscErrorCode AccumulateParticleField(DM swarm, const char *particleFieldName, DM gridSumDM, Vec gridSumVec)
Accumulates a particle field (scalar or vector) into a target grid sum vector.
static PetscErrorCode ScatterParticleFieldToEulerField_Internal(UserCtx *user, const char *particleFieldName, DM targetDM, PetscInt expected_dof, Vec eulerFieldAverageVec)
Internal helper function to orchestrate the scatter operation (accumulate + normalize).
PetscErrorCode CalculateParticleCountPerCell(UserCtx *user)
Counts particles in each cell of the DMDA 'da' and stores the result in user->ParticleCount.
PetscErrorCode ScatterAllParticleFieldsToEulerFields(UserCtx *user)
Scatters a predefined set of particle fields to their corresponding Eulerian fields.
PetscErrorCode ScatterParticleFieldToEulerField(UserCtx *user, const char *particleFieldName, Vec eulerFieldAverageVec)
Scatters a particle field (scalar or vector) to the corresponding Eulerian field average.
PetscErrorCode InterpolateFieldFromCornerToCenter_Vector(Cmpnts ***field_arr, Cmpnts ***centfield_arr, UserCtx *user)
Interpolates a vector field from corner nodes to cell centers.
PetscErrorCode InterpolateCornerToFaceCenter_Vector(Cmpnts ***corner_arr, Cmpnts ***faceX_arr, Cmpnts ***faceY_arr, Cmpnts ***faceZ_arr, UserCtx *user)
Interpolates a vector field from corner nodes to all face centers.
PetscErrorCode InterpolateFieldFromCornerToCenter_Scalar(PetscReal ***field_arr, PetscReal ***centfield_arr, UserCtx *user)
Interpolates a scalar field from corner nodes to cell centers.
PetscErrorCode InterpolateAllFieldsToSwarm(UserCtx *user)
Interpolates all relevant fields from the DMDA to the DMSwarm.
PetscErrorCode TrilinearInterpolation_Vector(const char *fieldName, Cmpnts ***fieldVec, PetscInt i, PetscInt j, PetscInt k, PetscReal a1, PetscReal a2, PetscReal a3, Cmpnts *vec)
Computes the trilinear Interpolateted vector (e.g., velocity) at a given point.
PetscErrorCode InterpolateFieldFromCenterToCorner_Scalar(PetscReal ***centfield_arr, PetscReal ***field_arr, UserCtx *user)
Interpolates a scalar field from cell centers to corner nodes.
static PetscErrorCode InterpolateEulerFieldToSwarmForParticle(const char *fieldName, void *fieldPtr, PetscInt iCell, PetscInt jCell, PetscInt kCell, PetscReal a1, PetscReal a2, PetscReal a3, void *swarmOut, PetscInt p, PetscInt blockSize)
Interpolates a single field (scalar or vector) for one particle, storing the result in a swarm array.
PetscErrorCode InterpolateFieldFromCenterToCorner_Vector_Petsc(Cmpnts ***centfield_arr, Cmpnts ***corner_arr, UserCtx *user)
Interpolates a vector field from cell centers to corner nodes.
PetscErrorCode TrilinearInterpolation_Scalar(const char *fieldName, PetscReal ***fieldScal, PetscInt i, PetscInt j, PetscInt k, PetscReal a1, PetscReal a2, PetscReal a3, PetscReal *val)
Computes the trilinear Interpolateted scalar at a given point.
PetscErrorCode PieceWiseLinearInterpolation_Scalar(const char *fieldName, PetscReal ***fieldScal, PetscInt iCell, PetscInt jCell, PetscInt kCell, PetscReal *val)
Retrieves the scalar value at the cell (iCell, jCell, kCell).
#define ERROR_MSG_BUFFER_SIZE
PetscErrorCode InterpolateFieldFromCenterToCorner_Vector(Cmpnts ***centfield_arr, Cmpnts ***field_arr, UserCtx *user)
Interpolates a vector field from cell centers to corner nodes.
static void ComputeTrilinearWeights(PetscReal a1, PetscReal a2, PetscReal a3, PetscReal *w)
Computes the trilinear Interpolatetion weights from the Interpolatetion coefficients.
PetscErrorCode InterpolateCornerToFaceCenter_Scalar(PetscReal ***corner_arr, PetscReal ***faceX_arr, PetscReal ***faceY_arr, PetscReal ***faceZ_arr, UserCtx *user)
Interpolates a scalar field from corner nodes to all face centers.
PetscErrorCode InterpolateEulerFieldToSwarm(UserCtx *user, Vec fieldLocal_cellCentered, const char *fieldName, const char *swarmOutFieldName)
Interpolates a cell-centered field (scalar or vector) onto DMSwarm particles, using a robust,...
PetscErrorCode InterpolateFieldFromCenterToCorner_Scalar_Petsc(PetscReal ***centfield_arr, PetscReal ***corner_arr, UserCtx *user)
Interpolates a scalar field from cell centers to corner nodes.
PetscErrorCode PieceWiseLinearInterpolation_Vector(const char *fieldName, Cmpnts ***fieldVec, PetscInt iCell, PetscInt jCell, PetscInt kCell, Cmpnts *vec)
Retrieves the vector (Cmpnts) at the cell (iCell, jCell, kCell).
#define TrilinearInterpolation(fieldName, fieldPtr, i, j, k, a1, a2, a3, outPtr)
Macro that calls either the scalar or vector trilinear interpolation function based on the type of th...
#define InterpolateFieldFromCenterToCorner(blockSize, centfield_ptr, corner_ptr, user_ctx)
Macro to dispatch to the correct scalar or vector center-to-corner function based on a runtime block ...
PetscBool is_function_allowed(const char *functionName)
Checks if a given function is in the allow-list.
Definition logging.c:159
#define LOG_ALLOW_SYNC(scope, level, fmt,...)
----— DEBUG ---------------------------------------— #define LOG_ALLOW(scope, level,...
Definition logging.h:274
#define LOCAL
Logging scope definitions for controlling message output.
Definition logging.h:44
#define GLOBAL
Scope for global logging across all processes.
Definition logging.h:45
#define LOG_ALLOW(scope, level, fmt,...)
Logging macro that checks both the log level and whether the calling function is in the allowed-funct...
Definition logging.h:207
#define PROFILE_FUNCTION_END
Marks the end of a profiled code block.
Definition logging.h:767
LogLevel get_log_level()
Retrieves the current logging level from the environment variable LOG_LEVEL.
Definition logging.c:49
#define LOG_LOOP_ALLOW_EXACT(scope, level, var, val, fmt,...)
Logs a custom message if a variable equals a specific value.
Definition logging.h:355
@ LOG_ERROR
Critical errors that may halt the program.
Definition logging.h:29
@ LOG_INFO
Informational messages about program execution.
Definition logging.h:32
@ LOG_WARNING
Non-critical issues that warrant attention.
Definition logging.h:30
@ LOG_DEBUG
Detailed debugging information.
Definition logging.h:33
#define PROFILE_FUNCTION_BEGIN
Marks the beginning of a profiled code block (typically a function).
Definition logging.h:758
PetscErrorCode GetOwnedCellRange(const DMDALocalInfo *info_nodes, PetscInt dim, PetscInt *xs_cell_global, PetscInt *xm_cell_local)
Determines the global starting index and number of CELLS owned by the current processor in a specifie...
Definition setup.c:1344
PetscInt KM
Definition variables.h:639
PetscScalar x
Definition variables.h:100
PetscScalar z
Definition variables.h:100
Vec ParticleCount
Definition variables.h:697
PetscInt JM
Definition variables.h:639
Vec lUcat
Definition variables.h:657
PetscScalar y
Definition variables.h:100
PetscInt IM
Definition variables.h:639
Vec Psi
Definition variables.h:698
A 3D point or vector with PetscScalar components.
Definition variables.h:99
User-defined context containing data specific to a single computational grid level.
Definition variables.h:630