PICurv 0.1.0
A Parallel Particle-In-Cell Solver for Curvilinear LES
Loading...
Searching...
No Matches
interpolation.c
Go to the documentation of this file.
1/**
2 * @file interpolation.c
3 * @brief Main program for DMSwarm interpolation using the fdf-curvIB method.
4 *
5 * Provides routines for interpolation between corner-based and center-based
6 * fields in the cell-centered DM (fda), plus partial usage examples for
7 * DMSwarm-based field sampling.
8 */
9
10#include "interpolation.h"
11
12// Number of weights used in certain trilinear interpolation examples
13#define NUM_WEIGHTS 8
14// Define a buffer size for error messages if not already available
15#ifndef ERROR_MSG_BUFFER_SIZE
16#define ERROR_MSG_BUFFER_SIZE 256 // Or use PETSC_MAX_PATH_LEN if appropriate
17#endif
18
19#undef __FUNCT__
20#define __FUNCT__ "InterpolateFieldFromCornerToCenter_Vector"
21/**
22 * @brief Interpolates a vector field from corner nodes to cell centers.
23 *
24 * This function estimates the value of a vector field at the center of each
25 * computational cell (whose origin node is owned by this rank) by averaging
26 * the values from the 8 corner nodes defining that cell.
27 *
28 * @param[in] field_arr Input: 3D array view (ghosted) of vector data at nodes,
29 * obtained from a Vec associated with user->fda. Accessed via GLOBAL node indices.
30 * Ghost values must be up-to-date.
31 * @param[out] centfield_arr Output: 3D local array (0-indexed) where interpolated cell center values
32 * are stored. Sized [zm_cell_local][ym_cell_local][xm_cell_local]
33 * by the caller. centfield_arr[k_loc][j_loc][i_loc] stores the value for
34 * the cell corresponding to the (i_loc, j_loc, k_loc)-th owned cell origin.
35 * @param[in] user User context containing DMDA information (fda, IM, JM, KM).
36 *
37 * @return PetscErrorCode 0 on success.
38 */
40 Cmpnts ***field_arr, /* Input: Ghosted local array view from user->fda (global node indices) */
41 Cmpnts ***centfield_arr, /* Output: Local 0-indexed array */
42 UserCtx *user)
43{
44 PetscErrorCode ierr;
45 PetscMPIInt rank;
46 PetscFunctionBeginUser;
48
49 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
51 "Rank %d starting InterpolateFieldFromCornerToCenter_Vector.\n", rank);
52
53 // Get local info based on the NODE-based DMDA (user->fda)
54 // This DM defines the ownership of the nodes from which we interpolate,
55 // and thus defines the cells whose centers we will compute.
56 DMDALocalInfo info_fda;
57 ierr = DMDAGetLocalInfo(user->fda, &info_fda); CHKERRQ(ierr);
58
59 // Determine owned CELL ranges based on owning their origin NODES (from user->fda)
60 PetscInt xs_cell_global_i, xm_cell_local_i; // Global start cell index, local count of owned cells
61 PetscInt ys_cell_global_j, ym_cell_local_j;
62 PetscInt zs_cell_global_k, zm_cell_local_k;
63
64
65 ierr = GetOwnedCellRange(&info_fda, 0, &xs_cell_global_i, &xm_cell_local_i); CHKERRQ(ierr);
66 ierr = GetOwnedCellRange(&info_fda, 1, &ys_cell_global_j, &ym_cell_local_j); CHKERRQ(ierr);
67 ierr = GetOwnedCellRange(&info_fda, 2, &zs_cell_global_k, &zm_cell_local_k); CHKERRQ(ierr);
68
69 // Exclusive end global cell indices
70 PetscInt xe_cell_global_i_excl = xs_cell_global_i + xm_cell_local_i;
71 PetscInt ye_cell_global_j_excl = ys_cell_global_j + ym_cell_local_j;
72 PetscInt ze_cell_global_k_excl = zs_cell_global_k + zm_cell_local_k;
73
74 LOG_ALLOW(LOCAL, LOG_TRACE, "Rank %d : Processing Owned Cells (global indices) k=%d..%d (count %d), j=%d..%d (count %d), i=%d..%d (count %d)\n",
75 rank,
76 zs_cell_global_k, ze_cell_global_k_excl -1, zm_cell_local_k,
77 ys_cell_global_j, ye_cell_global_j_excl -1, ym_cell_local_j,
78 xs_cell_global_i, xe_cell_global_i_excl -1, xm_cell_local_i);
79
80 // Loop over the GLOBAL indices of the CELLS whose origin nodes are owned by this processor
81 // (according to user->fda partitioning)
82 for (PetscInt k_glob_cell = zs_cell_global_k; k_glob_cell < ze_cell_global_k_excl; k_glob_cell++) {
83 PetscInt k_local = k_glob_cell - zs_cell_global_k; // 0-based local index for centfield_arr
84
85 for (PetscInt j_glob_cell = ys_cell_global_j; j_glob_cell < ye_cell_global_j_excl; j_glob_cell++) {
86 PetscInt j_local = j_glob_cell - ys_cell_global_j; // 0-based local index
87
88 for (PetscInt i_glob_cell = xs_cell_global_i; i_glob_cell < xe_cell_global_i_excl; i_glob_cell++) {
89 PetscInt i_local = i_glob_cell - xs_cell_global_i; // 0-based local index
90
91 Cmpnts sum = {0.0, 0.0, 0.0};
92 // Count is always 8 for a hexahedral cell
93 // PetscInt count = 0; // Not strictly needed if we assume 8 corners
94
95 // Loop over the 8 corner NODES that define cell C(i_glob_cell, j_glob_cell, k_glob_cell)
96 for (PetscInt dk = 0; dk < 2; dk++) {
97 for (PetscInt dj = 0; dj < 2; dj++) {
98 for (PetscInt di = 0; di < 2; di++) {
99 PetscInt ni_glob = i_glob_cell + di; // Global NODE index i or i+1
100 PetscInt nj_glob = j_glob_cell + dj; // Global NODE index j or j+1
101 PetscInt nk_glob = k_glob_cell + dk; // Global NODE index k or k+1
102
103 // Access the input 'field_arr' (node values) using GLOBAL node indices.
104 // Ghosts in field_arr must be valid.
105 // DMDAVecGetArrayRead handles mapping global indices to local memory.
106 sum.x += field_arr[nk_glob][nj_glob][ni_glob].x;
107 sum.y += field_arr[nk_glob][nj_glob][ni_glob].y;
108 sum.z += field_arr[nk_glob][nj_glob][ni_glob].z;
109 // count++;
110 }
111 }
112 }
113
114 // Calculate average value representing the cell center
115 Cmpnts center_value;
116 center_value.x = sum.x / 8.0;
117 center_value.y = sum.y / 8.0;
118 center_value.z = sum.z / 8.0;
119
120 // Store result into the local, 0-indexed output array 'centfield_arr'
121 // Defensive check (should not be strictly necessary if caller allocated centfield_arr
122 // with dimensions xm_cell_local_i, ym_cell_local_j, zm_cell_local_k)
123 if (i_local < 0 || i_local >= xm_cell_local_i ||
124 j_local < 0 || j_local >= ym_cell_local_j ||
125 k_local < 0 || k_local >= zm_cell_local_k) {
126 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Local index out of bounds for centfield_arr write!");
127 }
128 centfield_arr[k_local][j_local][i_local] = center_value;
129
130 } // End loop i_glob_cell
131 } // End loop j_glob_cell
132 } // End loop k_glob_cell
133
135 "Rank %d completed InterpolateFieldFromCornerToCenter_Vector.\n", rank);
136
138
139 PetscFunctionReturn(0);
140}
141
142#undef __FUNCT__
143#define __FUNCT__ "InterpolateFieldFromCornerToCenter_Scalar"
144// -----------------------------------------------------------------------------
145// Interpolation: Corner -> Center (scalar)
146// -----------------------------------------------------------------------------
147
148/**
149 * @brief Interpolates a scalar field from corner nodes to cell centers.
150 *
151 * This function estimates the value of a scalar field at the center of each
152 * computational cell (whose origin node is owned by this rank) by averaging
153 * the values from the 8 corner nodes defining that cell.
154 *
155 * @param[in] field_arr Input: 3D array view (ghosted) of scalar data at nodes,
156 * obtained from a Vec associated with user->da. Accessed via GLOBAL node indices.
157 * Ghost values must be up-to-date.
158 * @param[out] centfield_arr Output: 3D local array (0-indexed) where interpolated cell center values
159 * are stored. Sized [zm_cell_local][ym_cell_local][xm_cell_local]
160 * by the caller. centfield_arr[k_loc][j_loc][i_loc] stores the value for
161 * the cell corresponding to the (i_loc, j_loc, k_loc)-th owned cell origin.
162 * @param[in] user User context containing DMDA information (da, IM, JM, KM).
163 *
164 * @return PetscErrorCode 0 on success.
165 */
167 PetscReal ***field_arr, /* Input: Ghosted local array view from user->da (global node indices) */
168 PetscReal ***centfield_arr, /* Output: Local 0-indexed array for cell centers */
169 UserCtx *user)
170{
171 PetscErrorCode ierr;
172 PetscMPIInt rank;
173 PetscFunctionBeginUser;
175 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
177 "Rank %d starting InterpolateFieldFromCornerToCenter_Scalar.\n", rank);
178
179 // Get local info based on user->da (which is node-based but defines cell origins)
180 DMDALocalInfo info_da_nodes;
181 ierr = DMDAGetLocalInfo(user->da, &info_da_nodes); CHKERRQ(ierr);
182
183 // Determine owned CELL ranges based on owning their origin NODES (from user->da)
184 PetscInt xs_cell_global_i, xm_cell_local_i; // Global start cell index, local count of owned cells
185 PetscInt ys_cell_global_j, ym_cell_local_j;
186 PetscInt zs_cell_global_k, zm_cell_local_k;
187
188 ierr = GetOwnedCellRange(&info_da_nodes, 0, &xs_cell_global_i, &xm_cell_local_i); CHKERRQ(ierr);
189 ierr = GetOwnedCellRange(&info_da_nodes, 1, &ys_cell_global_j, &ym_cell_local_j); CHKERRQ(ierr);
190 ierr = GetOwnedCellRange(&info_da_nodes, 2, &zs_cell_global_k, &zm_cell_local_k); CHKERRQ(ierr);
191
192 // Exclusive end global cell indices
193 PetscInt xe_cell_global_i_excl = xs_cell_global_i + xm_cell_local_i;
194 PetscInt ye_cell_global_j_excl = ys_cell_global_j + ym_cell_local_j;
195 PetscInt ze_cell_global_k_excl = zs_cell_global_k + zm_cell_local_k;
196
197 LOG_ALLOW(LOCAL, LOG_TRACE, "Rank %d: Processing Owned Cells (global indices) k=%d..%d (count %d), j=%d..%d (count %d), i=%d..%d (count %d)\n",
198 rank,
199 zs_cell_global_k, ze_cell_global_k_excl -1, zm_cell_local_k,
200 ys_cell_global_j, ye_cell_global_j_excl -1, ym_cell_local_j,
201 xs_cell_global_i, xe_cell_global_i_excl -1, xm_cell_local_i);
202
203 // Loop over the GLOBAL indices of the CELLS whose origin nodes are owned by this processor
204 // (according to user->da partitioning)
205 for (PetscInt k_glob_cell = zs_cell_global_k; k_glob_cell < ze_cell_global_k_excl; k_glob_cell++) {
206 PetscInt k_local = k_glob_cell - zs_cell_global_k; // 0-based local index for centfield_arr
207
208 for (PetscInt j_glob_cell = ys_cell_global_j; j_glob_cell < ye_cell_global_j_excl; j_glob_cell++) {
209 PetscInt j_local = j_glob_cell - ys_cell_global_j; // 0-based local index
210
211 for (PetscInt i_glob_cell = xs_cell_global_i; i_glob_cell < xe_cell_global_i_excl; i_glob_cell++) {
212 PetscInt i_local = i_glob_cell - xs_cell_global_i; // 0-based local index
213
214 PetscReal sum = 0.0;
215 // Count is always 8 for a hexahedral cell if all nodes are accessible
216 // PetscInt count = 0;
217
218 // Loop over the 8 NODE indices that define cell C(i_glob_cell, j_glob_cell, k_glob_cell)
219 for (PetscInt dk = 0; dk < 2; dk++) {
220 for (PetscInt dj = 0; dj < 2; dj++) {
221 for (PetscInt di = 0; di < 2; di++) {
222 PetscInt ni_glob = i_glob_cell + di; // Global NODE index for corner
223 PetscInt nj_glob = j_glob_cell + dj; // Global NODE index for corner
224 PetscInt nk_glob = k_glob_cell + dk; // Global NODE index for corner
225
226 // Access the input 'field_arr' (node values) using GLOBAL node indices.
227 sum += field_arr[nk_glob][nj_glob][ni_glob];
228 // count++;
229 }
230 }
231 }
232
233 PetscReal center_value = sum / 8.0;
234
235 // Store the calculated cell center value into the output array 'centfield_arr'
236 // using the LOCAL 0-based cell index.
237 // Defensive check (should not be strictly necessary if caller allocated centfield_arr correctly)
238 if (i_local < 0 || i_local >= xm_cell_local_i ||
239 j_local < 0 || j_local >= ym_cell_local_j ||
240 k_local < 0 || k_local >= zm_cell_local_k) {
241 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Local index out of bounds for centfield_arr write in Scalar version!");
242 }
243 centfield_arr[k_local][j_local][i_local] = center_value;
244
245 } // End loop i_glob_cell
246 } // End loop j_glob_cell
247 } // End loop k_glob_cell
248
250 "Rank %d completed InterpolateFieldFromCornerToCenter_Scalar.\n", rank);
251
253
254 PetscFunctionReturn(0);
255}
256
257#undef __FUNCT__
258#define __FUNCT__ "InterpolateFieldFromCenterToCorner_Vector_Petsc"
259/**
260 * @brief Interpolates a vector field from cell centers to corner nodes.
261 *
262 *
263 * @param[in] centfield_arr Input: 3D array (ghosted) of cell-centered data, accessed via GLOBAL indices.
264 * @param[out] corner_arr Output: 3D array where interpolated node values are stored,
265 * also accessed via GLOBAL indices for the owned part.
266 * @param[in] user User context containing DMDA information.
267 *
268 * @return PetscErrorCode 0 on success.
269 */
271 Cmpnts ***centfield_arr, /* Input: Ghosted local array from Vec (read) */
272 Cmpnts ***corner_arr, /* Output: global array from Vec (write) */
273 UserCtx *user)
274{
275 PetscErrorCode ierr;
276 DMDALocalInfo info;
277 PetscMPIInt rank;
279 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
280 ierr = DMDAGetLocalInfo(user->fda, &info); CHKERRQ(ierr);
281
282 // Node ownership range (GLOBAL indices)
283 PetscInt xs_node = info.xs, xm_node = info.xm, xe_node = xs_node + xm_node;
284 PetscInt ys_node = info.ys, ym_node = info.ym, ye_node = ys_node + ym_node;
285 PetscInt zs_node = info.zs, zm_node = info.zm, ze_node = zs_node + zm_node;
286
287 PetscInt nCellsX = info.mx - 2; // Number of cells in x-direction
288 PetscInt nCellsY = info.my - 2; // Number of cells in y-direction
289 PetscInt nCellsZ = info.mz - 2; // Number of cells in z-direction
290
291
292 // Global grid dimensions (used for valid cell check)
293 PetscInt IM = info.mx - 1; // Total nodes in i-direction
294 PetscInt JM = info.my - 1; // Total nodes in j-direction
295 PetscInt KM = info.mz - 1; // Total nodes in k-direction
296
297 // Ghosted array starting indices (from the same info struct)
298 PetscInt gxs = info.gxs;
299 PetscInt gys = info.gys;
300 PetscInt gzs = info.gzs;
301
302 PetscInt xs = info.xs;
303 PetscInt ys = info.ys;
304 PetscInt zs = info.zs;
305
307 "[Rank %d] Starting InterpolateFieldFromCenterToCorner_Vector_Petsc: Node ownership k=%d..%d, j=%d..%d, i=%d..%d\n",
308 rank, zs_node, ze_node-1, ys_node, ye_node-1, xs_node, xe_node-1);
309
310 // Loop over the GLOBAL indices of the NODES owned by this processor
311 for (PetscInt k = zs_node; k < ze_node; k++) {
312 for (PetscInt j = ys_node; j < ye_node; j++) {
313 for (PetscInt i = xs_node; i < xe_node; i++) {
314 Cmpnts sum = {0.0, 0.0, 0.0};
315 PetscInt count = 0;
316
317 // DEBUG 1 TEST
318 /*
319 if(rank == 1 && i == 24 && j == 12 && k == 49){
320 PetscInt i_cell_A = i - 1;
321 PetscInt i_cell_B = i;
322
323 Cmpnts ucat_A = centfield_arr[k][j][i_cell_A]; // 23
324 Cmpnts ucat_B = centfield_arr[k][j][i_cell_B]; // 24 (out-of-bounds if IM=25)
325
326 PetscPrintf(PETSC_COMM_WORLD,"[Rank %d] DEBUG TEST at Node(k,j,i)=%d,%d,%d: Read { Valid Cell }Ucat[%d][%d][%d]=(%.2f,%.2f,%.2f) and {Out-of-Bounds Cell}Ucat[%d][%d][%d]=(%.2f,%.2f,%.2f)\n",
327 rank, k, j, i,
328 k, j, i_cell_A, ucat_A.x, ucat_A.y, ucat_A.z,
329 k, j, i_cell_B, ucat_B.x, ucat_B.y, ucat_B.z);
330
331 }
332 */
333
334 // Skip processing the unused last node in each dimension.
335 if(i >= IM || j >= JM || k >= KM){
336 continue;
337 }
338 // Loop over the 8 potential cells surrounding node N(k,j,i) and accumulate values.
339 // The index offsets correspond to the relative position of the indices(shifted) that represent cell-centered field values of cells that share the node N(k,j,i) as a corner
340 for (PetscInt dk_offset = -1; dk_offset <= 0; dk_offset++) {
341 for (PetscInt dj_offset = -1; dj_offset <= 0; dj_offset++) {
342 for (PetscInt di_offset = -1; di_offset <= 0; di_offset++) {
343
344 // These are still GLOBAL cell indices
345 PetscInt global_cell_k = k + dk_offset;
346 PetscInt global_cell_j = j + dj_offset;
347 PetscInt global_cell_i = i + di_offset;
348
349 // Check if this corresponds to a valid GLOBAL cell index
350 if (global_cell_i >= 0 && global_cell_i < nCellsX &&
351 global_cell_j >= 0 && global_cell_j < nCellsY &&
352 global_cell_k >= 0 && global_cell_k < nCellsZ)
353 {
354 Cmpnts cell_val = centfield_arr[global_cell_k + 1][global_cell_j + 1][global_cell_i + 1];
355
356 LOG_LOOP_ALLOW_EXACT(LOCAL, LOG_VERBOSE,k,49,"[Rank %d] successful read from [%d][%d][%d] -> (%.2f, %.2f, %.2f)\n",
357 rank,global_cell_k,global_cell_j,global_cell_i,cell_val.x, cell_val.y, cell_val.z);
358
359 sum.x += cell_val.x;
360 sum.y += cell_val.y;
361 sum.z += cell_val.z;
362 count++;
363 }
364 }
365 }
366 }
367
368 PetscInt i_global_write = i; // Global index in GLOBAL array.
369 PetscInt j_global_write = j;
370 PetscInt k_global_write = k;
371
372 // We write directly into the array using the global loop indices.
373 if (count > 0) {
374 corner_arr[k_global_write][j_global_write][i_global_write].x = sum.x / (PetscReal)count;
375 corner_arr[k_global_write][j_global_write][i_global_write].y = sum.y / (PetscReal)count;
376 corner_arr[k_global_write][j_global_write][i_global_write].z = sum.z / (PetscReal)count;
377 } else {
378 // This case should ideally not happen for a valid owned node, but as a failsafe:
379 corner_arr[k_global_write][j_global_write][i_global_write] = (Cmpnts){0.0, 0.0, 0.0};
380 }
381
382 // DEBUG 2
383 /*
384 if(rank == 1){
385 if(i == 11 && j == 11 && k == 49){
386 Cmpnts ucat_node = corner_arr[k][j][i];
387 PetscPrintf(PETSC_COMM_WORLD,"[Rank %d] DEBUG TEST at Node(k,j,i)=%d,%d,%d: Wrote CornerUcat[%d][%d][%d]=(%.2f,%.2f,%.2f)\n",
388 rank, k, j, i,
389 k, j, i, ucat_node.x, ucat_node.y, ucat_node.z);
390 }
391 }
392
393 if(rank == 0 && i == 24 && j == 12 && k == 0){
394 Cmpnts ucat_node = corner_arr[k][j][i];
395 PetscPrintf(PETSC_COMM_WORLD,"[Rank %d] DEBUG TEST at Node(k,j,i)=%d,%d,%d: Wrote CornerUcat[%d][%d][%d]=(%.2f,%.2f,%.2f)\n",
396 rank, k, j, i,
397 k, j, i, ucat_node.x, ucat_node.y, ucat_node.z);
398 }
399 */
400 // LOG_LOOP_ALLOW_EXACT(LOCAL, LOG_VERBOSE,k,48,"[Rank %d] Node(k,j,i)=%d,%d,%d finished loops and write.\n", rank, k, j, i);
401 }
402 }
403 }
405 return 0;
406}
407
408
409#undef __FUNCT__
410#define __FUNCT__ "InterpolateFieldFromCenterToCorner_Scalar_Petsc"
411/**
412 * @brief Interpolates a scalar field from cell centers to corner nodes.
413 *
414 * This version is adapted to write directly into a ghosted local array obtained from DMDAVecGetArray(),
415 * which allows using GLOBAL indices for writing to the OWNED portion of the array.
416 *
417 * @param[in] centfield_arr Input: 3D array (ghosted) of scalar data at cell centers, accessed via GLOBAL indices.
418 * @param[out] corner_arr Output: 3D array (ghosted) where interpolated node values are stored,
419 * also accessed via GLOBAL indices for the owned part.
420 * @param[in] user User context containing DMDA information.
421 *
422 * @return PetscErrorCode 0 on success.
423 */
425 PetscReal ***centfield_arr, /* Input: Ghosted local array from Vec (read) */
426 PetscReal ***corner_arr, /* Output: Ghosted local array from Vec (write) */
427 UserCtx *user)
428{
429 PetscErrorCode ierr;
430 DMDALocalInfo info;
432 ierr = DMDAGetLocalInfo(user->fda, &info); CHKERRQ(ierr);
433
434 // Node ownership range (GLOBAL indices)
435 PetscInt xs_node = info.xs, xe_node = info.xs + info.xm;
436 PetscInt ys_node = info.ys, ye_node = info.ys + info.ym;
437 PetscInt zs_node = info.zs, ze_node = info.zs + info.zm;
438
439 // Global grid dimensions (number of nodes)
440 PetscInt IM = info.mx-1;
441 PetscInt JM = info.my-1;
442 PetscInt KM = info.mz-1;
443
444 // Loop over the GLOBAL indices of the NODES owned by this processor
445 for (PetscInt k = zs_node; k < ze_node; k++) {
446 for (PetscInt j = ys_node; j < ye_node; j++) {
447 for (PetscInt i = xs_node; i < xe_node; i++) {
448 PetscReal sum = 0.0;
449 PetscInt count = 0;
450
451 // Loop over the 8 potential cells surrounding node N(k,j,i)
452 for (PetscInt dk_offset = -1; dk_offset <= 0; dk_offset++) {
453 for (PetscInt dj_offset = -1; dj_offset <= 0; dj_offset++) {
454 for (PetscInt di_offset = -1; di_offset <= 0; di_offset++) {
455 // This is the global index of the cell-centered data point
456 PetscInt cell_idx_k = k + dk_offset;
457 PetscInt cell_idx_j = j + dj_offset;
458 PetscInt cell_idx_i = i + di_offset;
459
460 // Check if this corresponds to a valid GLOBAL cell index.
461 // Cell indices run from 0 to NumberOfNodes-2.
462 if (cell_idx_i >= 0 && cell_idx_i < IM - 1 &&
463 cell_idx_j >= 0 && cell_idx_j < JM - 1 &&
464 cell_idx_k >= 0 && cell_idx_k < KM - 1)
465 {
466 sum += centfield_arr[cell_idx_k][cell_idx_j][cell_idx_i];
467 count++;
468 }
469 }
470 }
471 }
472
473 // Calculate average and write to the output array using GLOBAL indices
474 if (count > 0) {
475 corner_arr[k][j][i] = sum / (PetscReal)count;
476 } else {
477 corner_arr[k][j][i] = 0.0; // Failsafe
478 }
479 }
480 }
481 }
483 return 0;
484}
485
486
487#undef __FUNCT
488#define __FUNCT "PiecWiseLinearInterpolation_Scalar"
489/**
490 * @brief Retrieves the scalar value at the cell (iCell, jCell, kCell).
491 *
492 * This function does a simple piecewise (zeroth‐order) interpolation
493 * by returning fieldScal[kCell][jCell][iCell], ignoring any fractional coordinates.
494 *
495 * @param[in] fieldName A string identifying the field (e.g. "temperature").
496 * @param[in] fieldScal 3D array of PetscReal (indexed as [k][j][i]).
497 * @param[in] iCell, jCell, kCell Integral cell indices to sample.
498 * @param[out] val Pointer to a PetscReal that receives the sampled scalar.
499 *
500 * @return PetscErrorCode 0 on success
501 */
503 const char *fieldName,
504 PetscReal ***fieldScal,
505 PetscInt iCell,
506 PetscInt jCell,
507 PetscInt kCell,
508 PetscReal *val)
509{
510 PetscFunctionBegin;
511 *val = fieldScal[kCell][jCell][iCell];
512
513 // Optional logging
515 "Field '%s' at (i=%d, j=%d, k=%d) => val=%.6f\n",
516 fieldName, iCell, jCell, kCell, *val);
517
518 PetscFunctionReturn(0);
519}
520
521#undef __FUNCT
522#define __FUNCT "PiecWiseLinearInterpolation_Vector"
523/**
524 * @brief Retrieves the vector (Cmpnts) at the cell (iCell, jCell, kCell).
525 *
526 * This function simply sets:
527 * vec->x = fieldVec[kCell][jCell][iCell].x
528 * vec->y = fieldVec[kCell][jCell][iCell].y
529 * vec->z = fieldVec[kCell][jCell][iCell].z
530 * effectively a nearest‐cell or piecewise approach.
531 *
532 * @param[in] fieldName String identifying the field (e.g. "velocity").
533 * @param[in] fieldVec 3D array of Cmpnts (indexed as [k][j][i]).
534 * @param[in] iCell, jCell, kCell Integral cell indices to sample.
535 * @param[out] vec Pointer to a Cmpnts struct that receives the sampled vector.
536 *
537 * @return PetscErrorCode 0 on success
538 */
540 const char *fieldName,
541 Cmpnts ***fieldVec,
542 PetscInt iCell,
543 PetscInt jCell,
544 PetscInt kCell,
545 Cmpnts *vec)
546{
547 PetscFunctionBegin;
548 vec->x = fieldVec[kCell][jCell][iCell].x;
549 vec->y = fieldVec[kCell][jCell][iCell].y;
550 vec->z = fieldVec[kCell][jCell][iCell].z;
551
552 // Optional logging
554 "Field '%s' at (i=%d, j=%d, k=%d) => (x=%.6f, y=%.6f, z=%.6f)\n",
555 fieldName, iCell, jCell, kCell, vec->x, vec->y, vec->z);
556
557 PetscFunctionReturn(0);
558}
559
560
561#undef __FUNCT
562#define __FUNCT "ComputeTrilinearWeights"
563
564/**
565 * @brief Computes the trilinear interpolation weights from the interpolation coefficients.
566 *
567 * This function computes the weights for trilinear interpolation at the eight corners of a cell
568 * using the interpolation coefficients provided along the x, y, and z directions.
569 *
570 * @param[in] a1 Interpolation coefficient along the x-direction (normalized coordinate within the cell).
571 * @param[in] a2 Interpolation coefficient along the y-direction (normalized coordinate within the cell).
572 * @param[in] a3 Interpolation coefficient along the z-direction (normalized coordinate within the cell).
573 * @param[out] w Array of 8 weights, each corresponding to one corner of the cell.
574 *
575 * @note
576 * - The coefficients `a1`, `a2`, and `a3` should be in the range [0, 1].
577 * - The order of weights corresponds to the eight corners of a hexahedral cell.
578 */
579static inline void ComputeTrilinearWeights(PetscReal a1, PetscReal a2, PetscReal a3, PetscReal *w) {
580 LOG_ALLOW(GLOBAL, LOG_VERBOSE, "Computing weights for a1=%f, a2=%f, a3=%f.\n", a1, a2, a3);
581
582 // Ensure a1, a2, a3 are within [0,1]
583 a1 = PetscMax(0.0, PetscMin(1.0, a1));
584 a2 = PetscMax(0.0, PetscMin(1.0, a2));
585 a3 = PetscMax(0.0, PetscMin(1.0, a3));
586
587 const PetscReal oa1 = 1.0 - a1;
588 const PetscReal oa2 = 1.0 - a2;
589 const PetscReal oa3 = 1.0 - a3;
590
591 w[0] = oa1 * oa2 * oa3; /* cornerOffsets[0] => (0,0,0) */
592 w[1] = a1 * oa2 * oa3; /* cornerOffsets[1] => (1,0,0) */
593 w[2] = oa1 * a2 * oa3; /* cornerOffsets[2] => (0,1,0) */
594 w[3] = a1 * a2 * oa3; /* cornerOffsets[3] => (1,1,0) */
595 w[4] = oa1 * oa2 * a3; /* cornerOffsets[4] => (0,0,1) */
596 w[5] = a1 * oa2 * a3; /* cornerOffsets[5] => (1,0,1) */
597 w[6] = oa1 * a2 * a3; /* cornerOffsets[6] => (0,1,1) */
598 w[7] = a1 * a2 * a3; /* cornerOffsets[7] => (1,1,1) */
599
600 // Log the computed weights for debugging
601 LOG_ALLOW(LOCAL,LOG_VERBOSE, "Weights computed - "
602 "w0=%f, w1=%f, w2=%f, w3=%f, w4=%f, w5=%f, w6=%f, w7=%f. \n",
603 w[0], w[1], w[2], w[3], w[4], w[5], w[6], w[7]);
604}
605
606#undef __FUNCT
607#define __FUNCT "TrilinearInterpolation_Scalar"
608
609/**
610 * @brief Computes the trilinear Interpolateted scalar at a given point.
611 *
612 * @param[in] fieldName A string representing the name of the scalar field (e.g., "temperature").
613 * @param[in] fieldScal 3D array of the field from a DMDA (indexed as [k][j][i]),
614 * each cell a PetscReal.
615 * @param[in] i, j, k Integral cell indices (the "lower" corner in each dimension).
616 * @param[in] a1, a2, a3 Normalized coordinates within the cell ([0,1] range).
617 * @param[out] val Pointer to a PetscReal that will store the Interpolateted scalar.
618 *
619 * This function uses the standard 8-corner trilinear formula via `ComputeTrilinearWeights()`.
620 * If a different scheme is desired, implement a new function with the same PetscInterface.
621 */
623 const char *fieldName,
624 PetscReal ***fieldScal,
625 PetscInt i,
626 PetscInt j,
627 PetscInt k,
628 PetscReal a1,
629 PetscReal a2,
630 PetscReal a3,
631 PetscReal *val)
632{
633 PetscFunctionBegin; // PETSc macro for error/stack tracing
634
635 // Compute the 8 corner weights
636 PetscReal wcorner[8];
637 ComputeTrilinearWeights(a1, a2, a3, wcorner);
638
639 // Offsets for cell corners
640 PetscInt i1 = i + 1;
641 PetscInt j1 = j + 1;
642 PetscInt k1 = k + 1;
643
644 // Initialize the output scalar
645 PetscReal sum = 0.0;
646
647 // Corner 0 => (i, j, k)
648 sum += wcorner[0] * fieldScal[k ][j ][i ];
649 // Corner 1 => (i+1, j, k)
650 sum += wcorner[1] * fieldScal[k ][j ][i1];
651 // Corner 2 => (i, j+1, k)
652 sum += wcorner[2] * fieldScal[k ][j1][i ];
653 // Corner 3 => (i+1, j+1, k)
654 sum += wcorner[3] * fieldScal[k ][j1][i1];
655 // Corner 4 => (i, j, k+1)
656 sum += wcorner[4] * fieldScal[k1][j ][i ];
657 // Corner 5 => (i+1, j, k+1)
658 sum += wcorner[5] * fieldScal[k1][j ][i1];
659 // Corner 6 => (i, j+1, k+1)
660 sum += wcorner[6] * fieldScal[k1][j1][i ];
661 // Corner 7 => (i+1, j+1, k+1)
662 sum += wcorner[7] * fieldScal[k1][j1][i1];
663
664 *val = sum;
665
666 // Logging (optional)
668 "Field '%s' at (i=%d, j=%d, k=%d), "
669 "a1=%.6f, a2=%.6f, a3=%.6f -> val=%.6f.\n",
670 fieldName, i, j, k, a1, a2, a3, *val);
671
672 // LOG_ALLOW_SYNC(GLOBAL, LOG_INFO,
673 // "TrilinearInterpolation_Scalar: Completed interpolation for field '%s' across local cells.\n",
674 // fieldName);
675
676 PetscFunctionReturn(0);
677}
678
679
680#undef __FUNCT
681#define __FUNCT "TrilinearInterpolation_Vector"
682/**
683 * @brief Computes the trilinear Interpolateted vector (e.g., velocity) at a given point.
684 *
685 * @param[in] fieldName A string representing the name of the vector field (e.g., "velocity").
686 * @param[in] fieldVec 3D array of the field from a DMDA (indexed as [k][j][i]),
687 * each cell of type Cmpnts.
688 * @param[in] i, j, k Integral cell indices (the "lower" corner in each dimension).
689 * @param[in] a1, a2, a3 Normalized coordinates within the cell ([0,1] range).
690 * @param[out] vec Pointer to a Cmpnts struct that will store the Interpolateted vector (x, y, z).
691 *
692 * This function uses the standard 8-corner trilinear formula via `ComputeTrilinearWeights()`.
693 * If a different scheme is desired, implement a new function with the same PetscInterface.
694 */
695/**
696 * @brief Computes the trilinear Interpolateted vector at a given point, with partial weighting if corners go out of range.
697 *
698 * If any of the 8 corners (i or i+1, j or j+1, k or k+1) is out of [0..mx), [0..my), [0..mz),
699 * that corner is skipped and the corresponding weight is omitted. The total is normalized
700 * by the sum of used weights, so we get a partial interpolation near boundaries.
701 */
703 const char *fieldName,
704 Cmpnts ***fieldVec, /* 3D array [k][j][i], dimension [mz][my][mx] */
705 PetscInt i, // local cell index i
706 PetscInt j, // local cell index j
707 PetscInt k, // local cell index k
708 PetscReal a1,
709 PetscReal a2,
710 PetscReal a3,
711 Cmpnts *vec)
712{
713 PetscFunctionBegin; // PETSc macro for error/stack tracing
714
715 // Compute the 8 corner weights
716 PetscErrorCode ierr;
717 PetscReal wcorner[8];
718 PetscMPIInt rank;
719
720 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr);
721
722 LOG_ALLOW(LOCAL,LOG_VERBOSE,"[Rank %d] Computing Trilinear Weights.\n",rank);
723 ComputeTrilinearWeights(a1, a2, a3, wcorner);
724
725 LOG_ALLOW(LOCAL,LOG_VERBOSE,"[Rank %d] Trilinear weights computed for local cell %d,%d,%d.\n",rank,i,j,k);
726
727 // For partial interpolation, we'll keep track of how many corners are valid
728 // and how much sum of weights is used. Then we do a final normalization.
729 PetscReal sumW = 0.0;
730 Cmpnts accum = {0.0, 0.0, 0.0};
731
732 // The eight corner indices, with their weights:
733 // corners: (i,j,k), (i+1,j,k), (i,j+1,k), (i+1,j+1,k), etc.
734 // We store them in an array to iterate cleanly.
735 const PetscInt cornerOffsets[8][3] = {
736 {0, 0, 0},
737 {1, 0, 0},
738 {0, 1, 0},
739 {1, 1, 0},
740 {0, 0, 1},
741 {1, 0, 1},
742 {0, 1, 1},
743 {1, 1, 1}
744 };
745
746 // Weighted partial sum
747 for (PetscInt c = 0; c < 8; c++) {
748 const PetscInt di = cornerOffsets[c][0];
749 const PetscInt dj = cornerOffsets[c][1];
750 const PetscInt dk = cornerOffsets[c][2];
751 PetscInt iC = i + di;
752 PetscInt jC = j + dj;
753 PetscInt kC = k + dk;
754
755 /*
756 // skip if out of domain
757 // (Assuming you know global domain is [0..mx), [0..my), [0..mz).)
758 if (iC < 0 || iC >= (PetscInt)userGlobalMx ||
759 jC < 0 || jC >= (PetscInt)userGlobalMy ||
760 kC < 0 || kC >= (PetscInt)userGlobalMz)
761 {
762 // skip this corner
763 continue;
764 }
765
766 */
767
768 LOG_ALLOW(LOCAL,LOG_VERBOSE,"[Rank %d] %s[%d][%d][%d] = (%.4f,%.4f,%.4f).\n",rank,fieldName,kC,jC,iC,fieldVec[kC][jC][iC].x,fieldVec[kC][jC][iC].y,fieldVec[kC][jC][iC].z);
769
770 // Otherwise, accumulate
771 accum.x += wcorner[c] * fieldVec[kC][jC][iC].x;
772 accum.y += wcorner[c] * fieldVec[kC][jC][iC].y;
773 accum.z += wcorner[c] * fieldVec[kC][jC][iC].z;
774 sumW += wcorner[c];
775 }
776
777 // If sumW=0 => out-of-range or zero weighting => set (0,0,0)
778 if (sumW > 1.0e-14) {
779 vec->x = accum.x / sumW;
780 vec->y = accum.y / sumW;
781 vec->z = accum.z / sumW;
782 } else {
783 vec->x = 0.0; vec->y = 0.0; vec->z = 0.0;
784 }
785
786 PetscFunctionReturn(0);
787}
788
789
790#undef __FUNCT
791#define __FUNCT "InterpolateEulerFieldToSwarmForParticle"
792/**
793 * @brief Interpolates a single field (scalar or vector) for one particle, storing the result in a swarm array.
794 *
795 * This helper function is used inside a larger loop over local particles.
796 *
797 * @param[in] fieldName A string identifying the field (e.g. "velocity", "temperature", etc.).
798 * @param[in] fieldPtr A Pointer to the local DMDA array for the field:
799 * - (PetscReal ***) if scalar (blockSize = 1)
800 * - (Cmpnts ***) if vector (blockSize = 3)
801 * @param[in] iCell, jCell, kCell The cell indices for this particle (already clamped if needed).
802 * @param[in] a1, a2, a3 Interpolation coefficients in [0..1].
803 * If using PiecewiseLinearInterpolation, these are currently ignored.
804 * @param[out] swarmOut A Pointer to the DMSwarm output array:
805 * - (PetscReal*) if scalar dof=1
806 * - (PetscReal*) if vector dof=3 (storing x,y,z in consecutive reals)
807 * - or (Cmpnts*) if you store the result as a Cmpnts struct
808 * @param[in] p The local particle index (used to compute the correct offset in swarmOut).
809 * @param[in] blockSize The number of degrees of freedom (1 => scalar, 3 => vector).
810 *
811 * This routine demonstrates a switch between:
812 * - PiecewiseLinearInterpolation (zeroth order / nearest cell)
813 * - TrilinearInterpolation (8-corner weighted).
814 *
815 * By default, PiecewiseLinearInterpolation is active, while the TrilinearInterpolation calls are commented out.
816 * To switch to trilinear, simply comment/uncomment appropriately.
817 *
818 * Logging calls are provided (LOG_ALLOW) that you can adapt to your existing logging system.
819 *
820 * @return PetscErrorCode 0 on success, non-zero on error.
821 */
822static inline PetscErrorCode InterpolateEulerFieldToSwarmForParticle(
823 const char *fieldName,
824 void *fieldPtr, /* typed Pointer => either (PetscReal***) or (Cmpnts***) */
825 PetscInt iCell,
826 PetscInt jCell,
827 PetscInt kCell,
828 PetscReal a1,
829 PetscReal a2,
830 PetscReal a3,
831 void *swarmOut, /* typed Pointer => (PetscReal*) or (Cmpnts*) or dof=3 array */
832 PetscInt p, /* particle index */
833 PetscInt blockSize) /* dof=1 => scalar, dof=3 => vector */
834{
835 PetscErrorCode ierr;
836 PetscFunctionBegin;
837
839
840 // Optional logging at start
842 "field='%s', blockSize=%d, "
843 "cell IDs=(%d,%d,%d), weights=(%.4f,%.4f,%.4f)\n",
844 fieldName, blockSize, iCell, jCell, kCell, a1, a2, a3);
845
846 /*
847 If blockSize=1, we PetscInterpret the fieldPtr as a 3D array of PetscReal (scalar).
848 If blockSize=3, we PetscInterpret the fieldPtr as a 3D array of Cmpnts (vector).
849 */
850 if (blockSize == 1) {
851 /* Scalar field: Cast fieldPtr to (PetscReal ***). */
852 PetscReal ***fieldScal = (PetscReal ***) fieldPtr;
853 PetscReal val;
854
855 // Currently using trilinear.
856 ierr = TrilinearInterpolation(fieldName, fieldScal,
857 iCell, jCell, kCell,
858 a1, a2, a3,
859 &val);
860 CHKERRQ(ierr);
861
862 // Alternative (commented) call to PiecewiseLinearInterpolation (zeroth order) :
863 // PetscErrorCode ierr = PiecewiseLinearInterpolation(fieldName,
864 // fieldScal,
865 // iCell, jCell, kCell,
866 // &val);
867 // CHKERRQ(ierr);
868
869 // Write the scalar result to the swarm output at index [p].
870 ((PetscReal*)swarmOut)[p] = val;
871
873 "field='%s', result=%.6f "
874 "stored at swarmOut index p=%d.\n", fieldName, val, (PetscInt)p);
875 }
876 else if (blockSize == 3) {
877 /* Vector field: Cast fieldPtr to (Cmpnts ***). */
878 Cmpnts ***fieldVec = (Cmpnts ***) fieldPtr;
879 Cmpnts vec;
880
881
882
883
884 // Piecewise interpolation (zeroth order).
885 // PetscErrorCode ierr = PieceWiseLinearInterpolation(fieldName,
886 // fieldVec,
887 // iCell, jCell, kCell,
888 // &vec);
889 // CHKERRQ(ierr);
890
891 // Alternative (commented) call to trilinear:
892 ierr = TrilinearInterpolation(fieldName, fieldVec,
893 iCell, jCell, kCell,
894 a1, a2, a3,
895 &vec);
896 CHKERRQ(ierr);
897
898 // If swarmOut is an array of 3 reals per particle:
899 ((PetscReal*)swarmOut)[3*p + 0] = vec.x;
900 ((PetscReal*)swarmOut)[3*p + 1] = vec.y;
901 ((PetscReal*)swarmOut)[3*p + 2] = vec.z;
902
904 "field='%s', result=(%.6f,%.6f,%.6f) "
905 "stored at swarmOut[3p..3p+2], p=%d.\n",
906 fieldName, vec.x, vec.y, vec.z, (PetscInt)p);
907
908 /*
909 If you store the vector result as a Cmpnts in the swarm, do instead:
910 ((Cmpnts*)swarmOut)[p] = vec;
911 but ensure your DMSwarm field is sized for a Cmpnts struct.
912 */
913 }
914 else {
915 /* If blockSize isn't 1 or 3, we raise an error. */
916 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP,
917 "InterpolateEulerFieldToSwarmForParticle: only blockSize=1 or 3 supported, got %d.",
918 (PetscInt)blockSize);
919 }
920
922 PetscFunctionReturn(0);
923}
924
925#undef __FUNCT__
926#define __FUNCT__ "InterpolateEulerFieldToSwarm"
927
928/**
929 * @brief Interpolates a cell-centered field (scalar or vector) onto DMSwarm particles,
930 * using a robust, PETSc-idiomatic two-stage process.
931 *
932 * This function first converts the cell-centered input data to corner-node data,
933 * storing this intermediate result in a PETSc Vec to correctly handle the communication
934 * of ghost-point information across parallel ranks. It then performs a final trilinear
935 * interpolation from the ghosted corner data to each particle's location.
936 *
937 * Workflow:
938 * 1. Create temporary PETSc Vecs (`cornerGlobal`, `cornerLocal`) to manage the
939 * intermediate corner-node data, using the existing nodal DMDA (`user->fda`).
940 * 2. Call a dispatch macro that uses the runtime block size (`bs`) to select the
941 * correct underlying center-to-corner function, writing results into `cornerGlobal`.
942 * 3. Perform a ghost-point exchange (`DMGlobalToLocal`) to transfer the boundary
943 * data from `cornerGlobal` into the ghost regions of `cornerLocal`.
944 * 4. Loop over all local particles. For each particle:
945 * a. Convert its global cell index to a local index relative to the ghosted array.
946 * b. Check if the particle's interpolation stencil is fully contained within
947 * the owned+ghost region. If not, log a warning and set the result to zero.
948 * c. Perform the final trilinear interpolation using the ghosted `cornerLocal` data.
949 * 5. Restore all PETSc objects to prevent memory leaks.
950 *
951 * @param[in] user User context with DMDA, DMSwarm, etc.
952 * @param[in] fieldGlobal_cellCentered Vec (from fda) with cell-centered data (e.g., Ucat).
953 * @param[in] fieldName Human-readable field name for logging (e.g., "Ucat").
954 * @param[in] swarmOutFieldName Name of the DMSwarm field where interpolation results go.
955 *
956 * @return PetscErrorCode 0 on success.
957 */
959 UserCtx *user,
960 Vec fieldLocal_cellCentered,
961 const char *fieldName,
962 const char *swarmOutFieldName)
963{
964 PetscErrorCode ierr;
965 DM fda = user->fda;
966 DM swarm = user->swarm;
967 PetscInt bs;
968 DMDALocalInfo info;
969 PetscMPIInt rank;
970
971 // Generic pointers to the raw data arrays
972 void *cellCenterPtr_read;
973 void *cornerPtr_read_with_ghosts;
974
975 // Swarm-related pointers
976 PetscInt *cellIDs = NULL;
977 PetscReal *weights = NULL;
978 PetscInt *pids = NULL; // For logging particle IDs in warnings
979 void *swarmOut = NULL;
980 PetscInt nLocal;
981
982 PetscFunctionBegin;
984 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
985 ierr = DMDAGetLocalInfo(fda, &info); CHKERRQ(ierr);
986 ierr = VecGetBlockSize(fieldLocal_cellCentered, &bs); CHKERRQ(ierr);
987 if (bs != 1 && bs != 3) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "BlockSize must be 1 or 3.");
988
989 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting for field '%s'... \n", fieldName);
990
991 /* STAGE 1: Center-to-Corner Calculation with Communication */
992
993 // (A) Create the corner vectors.
994 if(user->CellFieldAtCorner == NULL){
995 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] Creating new global corner vector for field '%s'.\n", rank, fieldName);
996 ierr = DMCreateGlobalVector(fda, &user->CellFieldAtCorner); CHKERRQ(ierr);
997 }else{
998 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] Reusing existing global corner vector for field '%s'.\n", rank, fieldName);
999 }
1000
1001 ierr = VecSet(user->CellFieldAtCorner,0.0); CHKERRQ(ierr);
1002 Vec cornerGlobal = user->CellFieldAtCorner;
1003
1004 if(user->lCellFieldAtCorner == NULL){
1005 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] Creating new local corner vector for field '%s'.\n", rank, fieldName);
1006 ierr = DMCreateLocalVector(fda, &user->lCellFieldAtCorner); CHKERRQ(ierr);
1007 }else{
1008 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] Reusing existing local corner vector for field '%s'.\n", rank, fieldName);
1009 }
1010
1011 ierr = VecSet(user->lCellFieldAtCorner,0.0); CHKERRQ(ierr);
1012 Vec cornerLocal = user->lCellFieldAtCorner;
1013
1014 // (B) Get a read-only array from the input cell-centered vector
1015 ierr = DMDAVecGetArrayRead(fda, fieldLocal_cellCentered, &cellCenterPtr_read); CHKERRQ(ierr);
1016
1017 PetscInt size;
1018 ierr = VecGetSize(cornerGlobal, &size); CHKERRQ(ierr);
1019 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] Corner global vector size for field '%s': %d.\n", rank, fieldName, (PetscInt)size);
1020
1021 size = 0;
1022
1023 ierr = VecGetSize(fieldLocal_cellCentered, &size); CHKERRQ(ierr);
1024 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] Cell-centered local vector size for field '%s': %d.\n", rank, fieldName, (PetscInt)size);
1025
1026 size = 0;
1027
1028 ierr = VecGetLocalSize(cornerGlobal, &size); CHKERRQ(ierr);
1029 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] Corner global vector LOCAL size for field '%s': %d.\n", rank, fieldName, (PetscInt)size);
1030
1031 size = 0;
1032
1033 ierr = VecGetLocalSize(fieldLocal_cellCentered, &size); CHKERRQ(ierr);
1034 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] Cell-centered local vector LOCAL size for field '%s': %d.\n", rank, fieldName, (PetscInt)size);
1035
1036 size = 0;
1037
1038 ierr = VecGetSize(cornerLocal, &size); CHKERRQ(ierr);
1039 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] Corner local vector size for field '%s': %d.\n", rank, fieldName, (PetscInt)size);
1040
1041 PetscInt xs,ys,zs,gxs,gys,gzs;
1042 ierr = DMDAGetCorners(fda,&xs,&ys,&zs,NULL,NULL,NULL); CHKERRQ(ierr);
1043 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] DMDAGetCorners for field '%s': xs=%d, ys=%d, zs=%d.\n", rank, fieldName, (PetscInt)xs, (PetscInt)ys, (PetscInt)zs);
1044
1045 ierr = DMDAGetGhostCorners(fda,&gxs,&gys,&gzs,NULL,NULL,NULL); CHKERRQ(ierr);
1046 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] DMDAGetGhostCorners for field '%s': gxs=%d, gys=%d, gzs=%d.\n", rank, fieldName, (PetscInt)gxs, (PetscInt)gys, (PetscInt)gzs);
1047
1048 // DEBUG: Inspect Ucat ghost cells before interpolation
1049 /*
1050 if (bs == 3) {
1051 Cmpnts ***ucat_array = (Cmpnts***)cellCenterPtr_read;
1052 DMDALocalInfo info_debug;
1053 ierr = DMDAGetLocalInfo(fda, &info_debug); CHKERRQ(ierr);
1054
1055 // Only print on rank 0 to avoid clutter
1056 if (rank == 0) {
1057 PetscPrintf(PETSC_COMM_SELF, "\nDEBUG: Inspecting Ucat Ghost Cells...\n");
1058 PetscPrintf(PETSC_COMM_SELF, "--------------------------------------------------\n");
1059
1060 // --- Check the MINIMUM-SIDE (-Xi) ---
1061 // The first physical cell is at index info.xs (local index).
1062 // The first ghost cell is at info.xs - 1.
1063 PetscInt i_first_phys = info_debug.xs;
1064 PetscInt i_first_ghost = info_debug.xs - 1;
1065 PetscInt j_mid = info_debug.ys + info_debug.ym / 2;
1066 PetscInt k_mid = info_debug.zs + info_debug.zm / 2;
1067
1068 PetscPrintf(PETSC_COMM_SELF, "MIN-SIDE (-Xi):\n");
1069 PetscPrintf(PETSC_COMM_SELF, " Ghost Cell Ucat[%d][%d][%d] = (% .6f, % .6f, % .6f)\n",
1070 k_mid, j_mid, i_first_ghost,
1071 ucat_array[k_mid][j_mid][i_first_ghost].x,
1072 ucat_array[k_mid][j_mid][i_first_ghost].y,
1073 ucat_array[k_mid][j_mid][i_first_ghost].z);
1074 PetscPrintf(PETSC_COMM_SELF, " First Phys Cell Ucat[%d][%d][%d] = (% .6f, % .6f, % .6f)\n",
1075 k_mid, j_mid, i_first_phys,
1076 ucat_array[k_mid][j_mid][i_first_phys].x,
1077 ucat_array[k_mid][j_mid][i_first_phys].y,
1078 ucat_array[k_mid][j_mid][i_first_phys].z);
1079
1080
1081 // --- Check the MAXIMUM-SIDE (+Xi) ---
1082 // The last physical cell is at index info.xs + info.xm - 1.
1083 // The first ghost cell on the max side is at info.xs + info.xm.
1084 PetscInt i_last_phys = info_debug.xs + info_debug.xm - 1;
1085 PetscInt i_last_ghost = info_debug.xs + info_debug.xm;
1086
1087 PetscPrintf(PETSC_COMM_SELF, "MAX-SIDE (+Xi):\n");
1088 PetscPrintf(PETSC_COMM_SELF, " Last Phys Cell Ucat[%d][%d][%d] = (% .6f, % .6f, % .6f)\n",
1089 k_mid, j_mid, i_last_phys,
1090 ucat_array[k_mid][j_mid][i_last_phys].x,
1091 ucat_array[k_mid][j_mid][i_last_phys].y,
1092 ucat_array[k_mid][j_mid][i_last_phys].z);
1093 PetscPrintf(PETSC_COMM_SELF, " Ghost Cell Ucat[%d][%d][%d] = (% .6f, % .6f, % .6f)\n",
1094 k_mid, j_mid, i_last_ghost,
1095 ucat_array[k_mid][j_mid][i_last_ghost].x,
1096 ucat_array[k_mid][j_mid][i_last_ghost].y,
1097 ucat_array[k_mid][j_mid][i_last_ghost].z);
1098 PetscPrintf(PETSC_COMM_SELF, "--------------------------------------------------\n\n");
1099 }
1100 }
1101 */
1102 // DEBUG
1103
1104 // (C) Perform the center-to-corner interpolation directly into the global corner vector
1105 void *cornerPtr_write = NULL;
1106 ierr = DMDAVecGetArray(fda, cornerGlobal, &cornerPtr_write); CHKERRQ(ierr);
1107
1108 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] Starting center-to-corner interpolation for '%s'.\n", rank, fieldName);
1109
1110 // SINGLE, CLEAN CALL SITE: The macro handles the runtime dispatch based on 'bs'.
1111 ierr = InterpolateFieldFromCenterToCorner(bs, cellCenterPtr_read, cornerPtr_write, user); CHKERRQ(ierr);
1112
1113 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] Finished center-to-corner interpolation for '%s'.\n", rank, fieldName);
1114 ierr = DMDAVecRestoreArray(fda, cornerGlobal, &cornerPtr_write); CHKERRQ(ierr);
1115
1116 ierr = DMDAVecRestoreArrayRead(fda, fieldLocal_cellCentered, &cellCenterPtr_read); CHKERRQ(ierr);
1117
1118 ierr = MPI_Barrier(PETSC_COMM_WORLD); CHKERRQ(ierr);
1119
1120 //////////// DEBUG
1121 /*
1122 // Log a synchronized header message from all ranks.
1123 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "DEBUG: Dumping cornerGlobal BEFORE ghost exchange...\n");
1124
1125 // VecView prints to a PETSc viewer. PETSC_VIEWER_STDOUT_WORLD is a global, synchronized viewer.
1126 // We only need to call it if logging is active for this function.
1127 if (is_function_allowed(__func__) && (int)(LOG_DEBUG) <= (int)get_log_level()) {
1128 ierr = VecView(cornerGlobal, PETSC_VIEWER_STDOUT_WORLD); CHKERRQ(ierr);
1129 }
1130
1131 // Log a synchronized footer message.
1132 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "DEBUG: Finished dumping cornerGlobal.\n");
1133 */
1134 ////////// DEBUG
1135
1136 // ierr = PetscBarrier((PetscObject)cornerGlobal); CHKERRQ(ierr);
1137
1138 // (D) CRITICAL STEP: Communicate the newly computed corner data to fill ghost regions
1139 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] Beginning ghost exchange for corner data...\n", rank);
1140
1141 ierr = DMGlobalToLocalBegin(fda, cornerGlobal, INSERT_VALUES, cornerLocal); CHKERRQ(ierr);
1142 ierr = DMGlobalToLocalEnd(fda, cornerGlobal, INSERT_VALUES, cornerLocal); CHKERRQ(ierr);
1143
1144 LOG_ALLOW(LOCAL, LOG_TRACE, "[Rank %d] Ghost exchange for corner data complete.\n", rank);
1145
1146 LOG_ALLOW_SYNC(GLOBAL, LOG_VERBOSE, "getting array from FDA on all ranks.\n");
1147 if (is_function_allowed(__func__) && (int)(LOG_VERBOSE) <= (int)get_log_level()) {
1148 // DEBUG: Inspect cornerLocal after ghost exchange
1149
1150 ierr = LOG_FIELD_ANATOMY(user,"CornerField", "After Corner Velocity Interpolated"); CHKERRQ(ierr);
1151
1152 // DEBUG
1153 //ierr = DMView(user->fda, PETSC_VIEWER_STDOUT_WORLD); CHKERRQ(ierr);
1154 }
1155
1156 /////// DEBUG
1157 /*
1158 // Log a synchronized header message from all ranks.
1159 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "DEBUG: Dumping cornerLocal AFTER ghost exchange...\n");
1160
1161 // Here, we want each rank to print its own local vector.
1162 // PETSC_VIEWER_STDOUT_SELF is the correct tool for this. The LOG_ALLOW_SYNC
1163 // wrapper will ensure the output from each rank is printed sequentially.
1164 if (is_function_allowed(__func__) && (int)(LOG_DEBUG) <= (int)get_log_level()) {
1165 PetscMPIInt rank_d;
1166 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank_d); CHKERRQ(ierr);
1167
1168 // Use a synchronized print to create a clean header for each rank's output
1169 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "\n--- cornerLocal (Rank %d) ---\n", rank_d);
1170 PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT);
1171
1172 // Print the local vector's contents for this rank
1173 ierr = VecView(cornerLocal, PETSC_VIEWER_STDOUT_SELF); CHKERRQ(ierr);
1174
1175 // Use a synchronized print to create a clean footer
1176 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "--- End cornerLocal (Rank %d) ---\n", rank_d);
1177 PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT);
1178 }
1179
1180 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "DEBUG: Finished dumping cornerLocal.\n");
1181 */
1182 ///// DEBUG
1183
1184 /* STAGE 2: Particle Interpolation using Ghosted Corner Data */
1185
1186 // (E) Get the local, GHOSTED array of corner data for the final interpolation step
1187 ierr = DMDAVecGetArrayRead(fda, cornerLocal, &cornerPtr_read_with_ghosts); CHKERRQ(ierr);
1188
1189 // (F) Retrieve swarm fields for the particle loop
1190 ierr = DMSwarmGetLocalSize(swarm, &nLocal); CHKERRQ(ierr);
1191 ierr = DMSwarmGetField(swarm, "DMSwarm_CellID", NULL, NULL, (void**)&cellIDs); CHKERRQ(ierr);
1192 ierr = DMSwarmGetField(swarm, "DMSwarm_pid", NULL, NULL, (void**)&pids); CHKERRQ(ierr);
1193 ierr = DMSwarmGetField(swarm, "weight", NULL, NULL, (void**)&weights); CHKERRQ(ierr);
1194 ierr = DMSwarmGetField(swarm, swarmOutFieldName, NULL, NULL, &swarmOut); CHKERRQ(ierr);
1195
1196 LOG_ALLOW(LOCAL,LOG_TRACE," Rank %d holds data upto & including %d,%d,%d.\n",rank,info.gxs + info.gxm,info.gys+info.gym,info.gzs+info.gzm);
1197
1198 // (G) Loop over each local particle
1199 for (PetscInt p = 0; p < nLocal; p++) {
1200 PetscInt iCell_global = cellIDs[3 * p + 0];
1201 PetscInt jCell_global = cellIDs[3 * p + 1];
1202 PetscInt kCell_global = cellIDs[3 * p + 2];
1203
1204
1206 "[Rank %d] Particle PID %lld: global cell=(%d,%d,%d), weights=(%.4f,%.4f,%.4f)\n",
1207 rank, (long long)pids[p], iCell_global, jCell_global, kCell_global,
1208 weights[3 * p + 0], weights[3 * p + 1], weights[3 * p + 2]);
1209 // Safety check: Ensure the entire 8-node stencil is within the valid memory
1210 // region (owned + ghosts) of the local array.
1211
1212 if (iCell_global < info.gxs || iCell_global >= info.gxs + info.gxm - 1 ||
1213 jCell_global < info.gxs || jCell_global >= info.gys + info.gym - 1 ||
1214 kCell_global < info.gxs || kCell_global >= info.gzs + info.gzm - 1)
1215 {
1217 "[Rank %d] Particle PID %lld in global cell (%d,%d,%d) is in an un-interpolatable region (requires ghosts of ghosts or is out of bounds). Zeroing field '%s'.\n",
1218 rank, (long long)pids[p], iCell_global, jCell_global, kCell_global, fieldName);
1219
1220 if (bs == 3) {
1221 ((PetscReal*)swarmOut)[3*p + 0] = 0.0;
1222 ((PetscReal*)swarmOut)[3*p + 1] = 0.0;
1223 ((PetscReal*)swarmOut)[3*p + 2] = 0.0;
1224 } else {
1225 ((PetscReal*)swarmOut)[p] = 0.0;
1226 }
1227 continue;
1228 }
1229
1230 PetscReal a1 = weights[3 * p + 0];
1231 PetscReal a2 = weights[3 * p + 1];
1232 PetscReal a3 = weights[3 * p + 2];
1233
1235 fieldName,
1236 cornerPtr_read_with_ghosts,
1237 iCell_global, jCell_global, kCell_global,
1238 a1, a2, a3,
1239 swarmOut, p, bs);
1240 CHKERRQ(ierr);
1241 }
1242
1243 /* (H) Restore all retrieved arrays and vectors */
1244 ierr = DMDAVecRestoreArrayRead(fda, cornerLocal, &cornerPtr_read_with_ghosts); CHKERRQ(ierr);
1245 ierr = DMSwarmRestoreField(swarm, swarmOutFieldName, NULL, NULL, &swarmOut); CHKERRQ(ierr);
1246 ierr = DMSwarmRestoreField(swarm, "weight", NULL, NULL, (void**)&weights); CHKERRQ(ierr);
1247 ierr = DMSwarmRestoreField(swarm, "DMSwarm_pid", NULL, NULL, (void**)&pids); CHKERRQ(ierr);
1248 ierr = DMSwarmRestoreField(swarm, "DMSwarm_CellID", NULL, NULL, (void**)&cellIDs); CHKERRQ(ierr);
1249
1250 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished for field '%s'.\n", fieldName);
1252 PetscFunctionReturn(0);
1253}
1254
1255#undef __FUNCT__
1256#define __FUNCT__ "InterpolateAllFieldsToSwarm"
1257 /**
1258 * @brief Interpolates all relevant fields from the DMDA to the DMSwarm.
1259 *
1260 * Currently, it Interpolatetes:
1261 * - user->Ucat (vector field) into the DMSwarm field "swarmVelocity".
1262 *
1263 * To add more fields, duplicate the call to InterpolateOneFieldOverSwarm and provide:
1264 * - The global Vec for that field (e.g. user->Tcat for temperature),
1265 * - A human-readable field name (for logging),
1266 * - A DMSwarm output field name (e.g. "swarmTemperature").
1267 *
1268 * @param[in,out] user Pointer to a UserCtx containing:
1269 * - user->da (DM for the grid),
1270 * - user->swarm (DMSwarm for particles),.
1271 * - user->Ucat (Vec for the vector field),
1272 * - possibly more fields like user->Tcat, user->Pcat, etc.
1273 *
1274 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1275 */
1277{
1278 PetscErrorCode ierr;
1279 PetscMPIInt rank;
1280 PetscFunctionBegin;
1281
1283
1284 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank); CHKERRQ(ierr);
1285
1286 /*
1287 1) Interpolate the 'velocity' field (user->Ucat) into DMSwarm's "swarmVelocity".
1288 - The function InterpolateOneFieldOverSwarm() loops over all local particles,
1289 retrieves iCell, jCell, kCell, (a1,a2,a3) from the DMSwarm,
1290 and calls the appropriate trilinear interpolation routine.
1291
1292 - "velocity" => just a label used in logging or debugging.
1293 - "swarmVelocity" => the DMSwarm field name where we store the result
1294 (assume dof=3 if user->Ucat has blockSize=3).
1295 */
1296
1298 " Interpolation of ucat to velocity begins on rank %d.\n",rank);
1299 // Make sure to pass the *GLOBAL* Vector to the function below!
1300 ierr = InterpolateEulerFieldToSwarm(user, user->lUcat,
1301 "Ucat",
1302 "velocity"); CHKERRQ(ierr);
1303 /*
1304 2) (OPTIONAL) If you have more fields, you can Interpolatete them similarly.
1305
1306 For example, if user->Tcat is a scalar Vec for "temperature" and the DMSwarm
1307 has a field "Temperature" (with dof=1), you could do:
1308
1309 ierr = InterpolateOneFieldOverSwarm(user, user->Tcat,
1310 "temperature",
1311 "swarmTemperature"); CHKERRQ(ierr);
1312
1313 For pressure:
1314 ierr = InterpolateOneFieldOverSwarm(user, user->Pcat,
1315 "pressure",
1316 "swarmPressure"); CHKERRQ(ierr);
1317
1318 ...and so forth.
1319 */
1320
1321 /*
1322 3) Optionally, synchronize or log that all fields are done
1323 */
1324 ierr = MPI_Barrier(PETSC_COMM_WORLD); CHKERRQ(ierr);
1326 "[rank %d]Completed Interpolateting all fields to the swarm.\n",rank);
1327
1329
1330 PetscFunctionReturn(0);
1331}
1332
1333/////////////////////// Scatter from particles to euler fields
1334
1335/**
1336 * @brief Functions for scattering particle data (scalar or vector) onto
1337 * Eulerian grid fields by averaging contributions within each cell.
1338 *
1339 * This file provides a modular set of functions to perform particle-to-grid
1340 * projection, specifically calculating cell-averaged quantities from particle properties.
1341 * It assumes a PETSc environment using DMDA for the grids and DMSwarm for particles.
1342 *
1343 * Key Features:
1344 * - Handles both scalar (DOF=1) and vector (DOF=3) particle fields.
1345 * - Uses a pre-calculated particle count vector (`ParticleCount`) for normalization.
1346 * - Implicitly determines the target Eulerian DM (typically `user->da` for scalars,
1347 * `user->fda` for vectors) based on standard field names ("P", "Nvert", "Ucat", "Ucont","Psi").
1348 * - Modifies existing Eulerian field vectors (e.g., `user->P`, `user->Ucat`,user->Psi`) in place.
1349 * - Provides a high-level wrapper function (`ScatterAllParticleFieldsToEulerFields`)
1350 * to easily scatter a standard set of fields.
1351 * - Uses only the base `SETERRQ` macro for error reporting to maximize compiler compatibility.
1352 *
1353 * Dependencies:
1354 * - PETSc library (DMDA, DMSwarm, Vec, IS, PetscLog, etc.)
1355 * - A `UserCtx` struct (defined elsewhere, e.g., "userctx.h") containing pointers
1356 * to relevant DMs (`da`, `fda`), Vecs (`ParticleCount`, `P`, `Nvert`, `Ucat`, etc.),
1357 * and the `DMSwarm` object (`swarm`).
1358 * - A custom DMSwarm field named `"DMSwarm_CellID"` (blockSize=3, type=PETSC_INT)
1359 * must be registered and populated with the local cell indices for each particle.
1360 * - Logging infrastructure (`LOG_ALLOW`, etc.) assumed to be defined elsewhere.
1361 *
1362 * @defgroup scatter_module Particle-to-Grid Scattering
1363 * @{
1364 */
1365
1366//-----------------------------------------------------------------------------
1367// Internal Helper Modules (Lower-level building blocks)
1368//-----------------------------------------------------------------------------
1369/**
1370 * @defgroup scatter_module_internal Internal Scattering Helpers
1371 * @ingroup scatter_module
1372 * @brief Lower-level functions used by the main scattering routines.
1373 * @{
1374 */
1375
1376#undef __FUNCT__
1377#define __FUNCT__ "GetScatterTargetInfo"
1378/**
1379 * @brief Determines the target Eulerian DM and expected DOF for scattering a given particle field.
1380 *
1381 * Based on hardcoded rules mapping particle field names ("P", "Nvert", "Ucat", "Ucont")
1382 * to user context DMs (`user->da` or `user->fda`). This function encapsulates the
1383 * policy of where different fields should be scattered. Modify this function to
1384 * add rules for custom fields.
1385 *
1386 * @param[in] user Pointer to the UserCtx containing required DMs (`da`, `fda`).
1387 * @param[in] particleFieldName Name of the particle field.
1388 * @param[out] targetDM Pointer to store the determined target DM (`user->da` or `user->fda`).
1389 * @param[out] expected_dof Pointer to store the expected DOF (1 or 3) for this field.
1390 *
1391 * @return PetscErrorCode Returns 0 on success. Error codes:
1392 * - `PETSC_ERR_ARG_NULL` if required inputs are NULL.
1393 * - `PETSC_ERR_ARG_WRONG` if `particleFieldName` is not recognized.
1394 */
1395PetscErrorCode GetScatterTargetInfo(UserCtx *user, const char *particleFieldName,
1396 DM *targetDM, PetscInt *expected_dof)
1397{
1398 char msg[ERROR_MSG_BUFFER_SIZE]; // Buffer for formatted error messages
1399 PetscFunctionBeginUser;
1400
1402
1403 // --- Input Validation ---
1404 // Check for NULL pointers in essential inputs
1405 if (!user) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "UserCtx pointer is NULL.");
1406 if (!user->da) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "UserCtx->da is NULL.");
1407 if (!user->fda) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "UserCtx->fda is NULL."); // Needed for vector fields
1408 if (!particleFieldName) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "particleFieldName is NULL.");
1409 if (!targetDM) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Output targetDM pointer is NULL.");
1410 if (!expected_dof) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Output expected_dof pointer is NULL.");
1411
1412 // --- Determine Target DM and DOF based on Field Name ---
1413 // Compare the input field name with known scalar fields targeting 'da'
1414 if (strcmp(particleFieldName, "Psi") == 0 || strcmp(particleFieldName, "Nvert") == 0) {
1415 *expected_dof = 1; // Scalar fields have DOF 1
1416 *targetDM = user->da; // Target the primary scalar DMDA
1417 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Field '%s' targets DM 'da' (DOF=1).\n", particleFieldName);
1418 }
1419 // Compare with known vector fields targeting 'fda'
1420 else if (strcmp(particleFieldName, "Ucat") == 0 || strcmp(particleFieldName, "Ucont") == 0) {
1421 *expected_dof = 3; // Vector fields have DOF 3
1422 *targetDM = user->fda; // Target the vector DMDA (often node-based)
1423 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Field '%s' targets DM 'fda' (DOF=3).\n", particleFieldName);
1424 }
1425 // --- Add rules for other fields here ---
1426 // else if (strcmp(particleFieldName, "SomeOtherScalar") == 0) { *expected_dof = 1; *targetDM = user->da; }
1427 // else if (strcmp(particleFieldName, "SomeOtherVector") == 0) { *expected_dof = 3; *targetDM = user->someOtherDM; }
1428 else {
1429 // The provided field name doesn't match any known rules
1430 *targetDM = NULL; // Indicate failure
1431 *expected_dof = 0;
1432 // Format the error message manually
1433 PetscSNPrintf(msg, sizeof(msg), "Field name '%s' is not recognized for automatic DM selection.", particleFieldName);
1434 // Use SETERRQ with the formatted message and appropriate error code
1435 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, msg); // Use WRONG argument error code
1436 }
1437
1439
1440 PetscFunctionReturn(0);
1441}
1442
1443#undef __FUNCT__
1444#define __FUNCT__ "AccumulateParticleField"
1445
1446/**
1447 * @brief Accumulates a particle field (scalar or vector) into a target grid sum vector.
1448 *
1449 * This function iterates through local particles, identifies their cell using the
1450 * `"DMSwarm_CellID"` field, and adds the particle's field value (`particleFieldName`)
1451 * to the corresponding cell location in the `gridSumVec`. It handles both scalar
1452 * (DOF=1) and vector (DOF=3) fields automatically based on the DOF of `gridSumDM`.
1453 *
1454 * IMPORTANT: The caller must ensure `gridSumVec` is zeroed before calling this
1455 * function if a fresh sum calculation is desired.
1456 *
1457 * @param[in] swarm The DMSwarm containing particles.
1458 * @param[in] particleFieldName Name of the field on the particles (must match DOF of `gridSumDM`).
1459 * @param[in] gridSumDM The DMDA associated with `gridSumVec`. Its DOF determines
1460 * how many components are accumulated.
1461 * @param[in,out] gridSumVec The Vec (associated with `gridSumDM`) to accumulate sums into.
1462 *
1463 * @return PetscErrorCode 0 on success. Errors if fields don't exist, DMs are incompatible,
1464 * or memory access fails.
1465 */
1466PetscErrorCode AccumulateParticleField(DM swarm, const char *particleFieldName,
1467 DM gridSumDM, Vec gridSumVec)
1468{
1469 PetscErrorCode ierr;
1470 PetscInt dof; // DOF determined from gridSumDM
1471 PetscInt nlocal, p; // Local particle count and loop index
1472 const PetscReal *particle_arr = NULL; // Pointer to particle field data array (assuming Real)
1473 const PetscInt *cell_id_arr = NULL; // Pointer to particle cell ID array ("DMSwarm_CellID", Int)
1474 PetscScalar *sum_arr_ptr = NULL; // Pointer to grid sum vector data array (Scalar)
1475 PetscInt gxs, gys, gzs; // Start indices of local ghosted patch (often 0)
1476 PetscInt gxm, gym, gzm; // Dimensions of local ghosted patch (including ghosts)
1477 PetscMPIInt rank; // MPI rank for logging
1478 char msg[ERROR_MSG_BUFFER_SIZE]; // Buffer for formatted error messages
1479
1480 PetscFunctionBeginUser;
1481
1483
1484 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
1485
1486 // --- Get DOF from the target grid DM ---
1487 ierr = DMDAGetInfo(gridSumDM, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &dof, NULL, NULL, NULL, NULL, NULL); CHKERRQ(ierr);
1488 // Validate that the DOF is supported (currently 1 or 3)
1489 if (dof != 1 && dof != 3) {
1490 PetscSNPrintf(msg, sizeof(msg), "gridSumDM DOF must be 1 or 3, got %d.", dof);
1491 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, msg);
1492 }
1493
1494 // --- Get Particle Data Arrays ---
1495 // DMSwarmGetField will return an error if the field doesn't exist, caught by CHKERRQ.
1496 ierr = DMSwarmGetField(swarm, particleFieldName, NULL, NULL, (void **)&particle_arr); CHKERRQ(ierr);
1497 ierr = DMSwarmGetField(swarm, "DMSwarm_CellID", NULL, NULL, (void **)&cell_id_arr); CHKERRQ(ierr);
1498
1499 // Get number of local particles *after* successfully getting field pointers
1500 ierr = DMSwarmGetLocalSize(swarm, &nlocal); CHKERRQ(ierr);
1501
1502 // --- Get Grid Sum Vector Array & Dimensions ---
1503 ierr = VecGetArray(gridSumVec, &sum_arr_ptr); CHKERRQ(ierr);
1504 // Get dimensions needed for calculating flat index within the local ghosted array
1505 ierr = DMDAGetGhostCorners(gridSumDM, &gxs, &gys, &gzs, &gxm, &gym, &gzm); CHKERRQ(ierr);
1506
1507 // --- Accumulate Locally ---
1508 LOG_ALLOW(LOCAL, LOG_DEBUG, "(Rank %d): Accumulating '%s' (DOF=%d) from %d particles using CellID field 'DMSwarm_CellID'.\n", rank, particleFieldName, dof, nlocal);
1509 // Loop over all particles owned by this process
1510 for (p = 0; p < nlocal; ++p) {
1511 // Extract local cell indices (relative to start of ghosted patch, [0..gxm-1], etc.)
1512 // Assumes DMSwarm_CellID stores (i, j, k) contiguously for each particle.
1513 PetscInt pidx_geom = cell_id_arr[p * 3 + 0]; // Local i-index
1514 PetscInt pidy_geom = cell_id_arr[p * 3 + 1]; // Local j-index
1515 PetscInt pidz_geom = cell_id_arr[p * 3 + 2]; // Local k-index
1516
1517 // Apply the index shift to convert from geometric to cell-centered field indexing
1518 PetscInt pidx_field = pidx_geom + 1;
1519 PetscInt pidy_field = pidy_geom + 1;
1520 PetscInt pidz_field = pidz_geom + 1;
1521
1522 // Bounds Check: Ensure the particle's cell index is within the valid local ghosted region
1523 if (pidx_field >= 0 && pidx_field < gxm && pidy_field >= 0 && pidy_field < gym && pidz_field >= 0 && pidz_field < gzm)
1524 {
1525 // Calculate the flat 1D index for this cell within the linear ghosted array
1526 // Uses PETSc's standard C-style row-major ordering (k-slowest, j-middle, i-fastest)
1527 // Corrected: k (slowest), j, i (fastest)
1528 PetscInt cell_flat_idx = (pidz_field * gym + pidy_field) * gxm + pidx_field;
1529
1530 // Calculate the base index for this particle's data in particle_arr
1531 PetscInt particle_base_idx = p * dof;
1532 // Calculate the base index for this cell's data in sum_arr_ptr
1533 PetscInt grid_base_idx = cell_flat_idx * dof;
1534
1535 // Add particle components to the grid sum vector components
1536 for (PetscInt c = 0; c < dof; ++c) {
1537 sum_arr_ptr[grid_base_idx + c] += particle_arr[particle_base_idx + c];
1538 }
1539 } else {
1540 // Log a warning if a particle's CellID is outside the expected local region.
1541 // This might indicate particles needing migration or boundary issues.
1542 LOG_ALLOW(LOCAL, LOG_WARNING, "(Rank %d): Particle %d (field '%s') has out-of-bounds CellID (%d, %d, %d). Ghosted dims: %dx%dx%d. Skipping.\n",
1543 rank, p, particleFieldName, pidx_field, pidy_field, pidz_field, gxm, gym, gzm);
1544 }
1545 } // End of particle loop
1546
1547 // --- Restore Access to Arrays ---
1548 ierr = VecRestoreArray(gridSumVec, &sum_arr_ptr); CHKERRQ(ierr);
1549 ierr = DMSwarmRestoreField(swarm, particleFieldName, NULL, NULL, (void **)&particle_arr); CHKERRQ(ierr);
1550 ierr = DMSwarmRestoreField(swarm, "DMSwarm_CellID", NULL, NULL, (void **)&cell_id_arr); CHKERRQ(ierr);
1551
1552 // --- Assemble Global Sum Vector ---
1553 // Crucial for parallel execution: sums contributions for cells shared across process boundaries.
1554 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Assembling global sum vector for '%s'.\n", particleFieldName);
1555 ierr = VecAssemblyBegin(gridSumVec); CHKERRQ(ierr);
1556 ierr = VecAssemblyEnd(gridSumVec); CHKERRQ(ierr);
1557
1559
1560 PetscFunctionReturn(0);
1561}
1562
1563
1564#undef __FUNCT__
1565#define __FUNCT__ "NormalizeGridVectorByCount"
1566
1567/**
1568 * @brief Normalizes a grid vector of sums by a grid vector of counts to produce an average.
1569 *
1570 * Calculates `avgVec[i] = sumVec[i] / countVec[i]` for each component of each
1571 * cell owned by the current process, provided `countVec[i] > 0`. Otherwise, sets `avgVec[i] = 0`.
1572 * Handles both scalar (DOF=1) and vector (DOF=3) data fields based on the DOF of `dataDM`.
1573 * Uses DMDA multi-dimensional array accessors (`DMDAVecGetArray...`) for safe and convenient indexing.
1574 *
1575 * @param[in] countDM The DMDA associated with `countVec` (must have DOF=1).
1576 * @param[in] countVec The Vec containing particle counts per cell (read-only).
1577 * @param[in] dataDM The DMDA associated with `sumVec` and `avgVec` (must have DOF=1 or DOF=3).
1578 * @param[in] sumVec The Vec containing the accumulated sums per cell (read-only).
1579 * @param[in,out] avgVec The Vec where the calculated averages will be stored (overwritten). Must be
1580 * associated with `dataDM`.
1581 *
1582 * @return PetscErrorCode 0 on success. Errors on incompatible DMs or memory access failure.
1583 */
1584PetscErrorCode NormalizeGridVectorByCount(DM countDM, Vec countVec,
1585 DM dataDM, Vec sumVec, Vec avgVec)
1586{
1587 PetscErrorCode ierr;
1588 PetscInt data_dof;
1589 PetscInt count_dof;
1590 PetscMPIInt rank;
1591 char msg[ERROR_MSG_BUFFER_SIZE];
1592
1593 // Pointers for DMDA array accessors - declare specific types
1594 PetscScalar ***count_arr_3d = NULL; // For DOF=1 count vector (3D DMDA)
1595 PetscScalar ***sum_arr_scalar = NULL; // For DOF=1 sum vector (3D DMDA)
1596 PetscScalar ***avg_arr_scalar = NULL; // For DOF=1 avg vector (3D DMDA)
1597 PetscScalar ***sum_arr_vector = NULL; // For DOF=3 sum vector (3D DMDA + DOF)
1598 PetscScalar ***avg_arr_vector = NULL; // For DOF=3 avg vector (3D DMDA + DOF)
1599
1600
1601 PetscFunctionBeginUser;
1602
1604
1605 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
1606
1607 // --- Validation ---
1608 ierr = DMDAGetInfo(countDM, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &count_dof, NULL, NULL, NULL, NULL, NULL); CHKERRQ(ierr);
1609 ierr = DMDAGetInfo(dataDM, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &data_dof, NULL, NULL, NULL, NULL, NULL); CHKERRQ(ierr);
1610 if (count_dof != 1) { PetscSNPrintf(msg, sizeof(msg), "countDM must have DOF=1, got %d.", count_dof); SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, msg); }
1611 if (data_dof != 1 && data_dof != 3) { PetscSNPrintf(msg, sizeof(msg), "dataDM DOF must be 1 or 3, got %d.", data_dof); SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, msg); }
1612
1613 // --- Get Array Access using appropriate DMDA accessors ---
1614 ierr = DMDAVecGetArrayRead(countDM, countVec, &count_arr_3d); CHKERRQ(ierr);
1615
1616 if (data_dof == 1) {
1617 ierr = DMDAVecGetArrayRead(dataDM, sumVec, &sum_arr_scalar); CHKERRQ(ierr);
1618 ierr = DMDAVecGetArray(dataDM, avgVec, &avg_arr_scalar); CHKERRQ(ierr);
1619 } else { // data_dof == 3
1620 ierr = DMDAVecGetArrayDOFRead(dataDM, sumVec, &sum_arr_vector); CHKERRQ(ierr);
1621 ierr = DMDAVecGetArrayDOF(dataDM, avgVec, &avg_arr_vector); CHKERRQ(ierr);
1622 }
1623
1624 // Get the corners (global start indices) and dimensions of the *local owned* region
1625 PetscInt xs, ys, zs, xm, ym, zm;
1626 ierr = DMDAGetCorners(countDM, &xs, &ys, &zs, &xm, &ym, &zm); CHKERRQ(ierr);
1627
1628 // --- Normalize Over Owned Cells ---
1629 LOG_ALLOW(LOCAL, LOG_DEBUG, "(Rank %d): Normalizing DOF=%d data over owned range [%d:%d, %d:%d, %d:%d].\n",
1630 rank, data_dof, xs, xs+xm, ys, ys+ym, zs, zs+zm);
1631
1632 // Loop using GLOBAL indices (i, j, k) over the range owned by this process
1633 for (PetscInt k = zs; k < zs + zm; ++k) {
1634 for (PetscInt j = ys; j < ys + ym; ++j) {
1635 for (PetscInt i = xs; i < xs + xm; ++i) {
1636
1637 // Access the count using standard 3D indexing
1638 PetscScalar count = count_arr_3d[k][j][i];
1639
1640 if (PetscRealPart(count) > 0.5) { // Use tolerance for float comparison
1641 if (data_dof == 1) {
1642 // Access scalar sum/avg using standard 3D indexing
1643 avg_arr_scalar[k][j][i] = sum_arr_scalar[k][j][i] / count;
1644 } else { // data_dof == 3
1645 // Access vector components using DOF indexing on the last dimension
1646 for (PetscInt c = 0; c < data_dof; ++c) {
1647 avg_arr_vector[k][j][i * data_dof + c] = sum_arr_vector[k][j][i * data_dof + c] / count;
1648 }
1649 }
1650 } else { // count is zero or negative
1651 // Set average to zero
1652 if (data_dof == 1) {
1653 avg_arr_scalar[k][j][i] = 0.0;
1654 } else { // data_dof == 3
1655 for (PetscInt c = 0; c < data_dof; ++c) {
1656 avg_arr_vector[k][j][i * data_dof + c] = 0.0;
1657 }
1658 }
1659 } // end if count > 0.5
1660 } // end i loop
1661 } // end j loop
1662 } // end k loop
1663
1664 // --- Restore Arrays using appropriate functions ---
1665 ierr = DMDAVecRestoreArrayRead(countDM, countVec, &count_arr_3d); CHKERRQ(ierr);
1666 if (data_dof == 1) {
1667 ierr = DMDAVecRestoreArrayRead(dataDM, sumVec, &sum_arr_scalar); CHKERRQ(ierr);
1668 ierr = DMDAVecRestoreArray(dataDM, avgVec, &avg_arr_scalar); CHKERRQ(ierr);
1669 } else { // data_dof == 3
1670 ierr = DMDAVecRestoreArrayDOFRead(dataDM, sumVec, &sum_arr_vector); CHKERRQ(ierr);
1671 ierr = DMDAVecRestoreArrayDOF(dataDM, avgVec, &avg_arr_vector); CHKERRQ(ierr);
1672 }
1673
1674 // --- Assemble Final Average Vector ---
1675 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Assembling final average vector (DOF=%d).\n", data_dof);
1676 ierr = VecAssemblyBegin(avgVec); CHKERRQ(ierr);
1677 ierr = VecAssemblyEnd(avgVec); CHKERRQ(ierr);
1678
1679
1681
1682 PetscFunctionReturn(0);
1683}
1684
1685/** @} */ // End of scatter_module_internal group
1686
1687//-----------------------------------------------------------------------------
1688// User-Facing API
1689//-----------------------------------------------------------------------------
1690//-----------------------------------------------------------------------------
1691// MODULE 4: Internal Scatter Orchestration Helper - No PetscErrorClear
1692//-----------------------------------------------------------------------------
1693
1694#undef __FUNCT__
1695#define __FUNCT__ "ScatterParticleFieldToEulerField_Internal"
1696/**
1697 * @brief Internal helper function to orchestrate the scatter operation (accumulate + normalize).
1698 * @ingroup scatter_module_internal
1699 *
1700 * Manages the temporary sum vector and calls the accumulation and normalization
1701 * functions. Assumes caller determined target DM and DOF. Checks for particle field existence.
1702 * NOTE: If DMSwarmGetField fails, the subsequent SETERRQ will overwrite the original error.
1703 *
1704 * @param[in] user Pointer to UserCtx containing swarm, ParticleCount, da.
1705 * @param[in] particleFieldName Name of the field in the DMSwarm.
1706 * @param[in] targetDM The DMDA where the final average and intermediate sum reside.
1707 * @param[in] expected_dof The expected DOF (1 or 3) for the targetDM and field.
1708 * @param[in,out] eulerFieldAverageVec The pre-created Vec associated with targetDM to store the result.
1709 *
1710 * @return PetscErrorCode 0 on success. Errors if particle field doesn't exist or
1711 * underlying helpers fail.
1712 */
1714 const char *particleFieldName,
1715 DM targetDM,
1716 PetscInt expected_dof,
1717 Vec eulerFieldAverageVec)
1718{
1719 PetscErrorCode ierr;
1720 Vec sumVec = NULL;
1721 char msg[ERROR_MSG_BUFFER_SIZE]; // Buffer for formatted error messages
1722
1723 PetscFunctionBeginUser;
1724
1726
1727 if (!user || !user->swarm || !user->ParticleCount || !particleFieldName || !targetDM || !eulerFieldAverageVec)
1728 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "NULL input provided to ScatterParticleFieldToEulerField_Internal.");
1729
1730 // --- Check if Particle Field Exists ---
1731 // Attempt a GetField call; if it fails, the field doesn't exist.
1732 // We let CHKERRQ handle the error directly if the field doesn't exist OR
1733 // we catch it specifically to provide a more tailored message.
1734
1735 /*
1736 LOG_ALLOW(GLOBAL,LOG_DEBUG,"Field %s being accessed to check existence \n",particleFieldName);
1737 ierr = DMSwarmGetField(user->swarm, particleFieldName, NULL, NULL, NULL);
1738 if (ierr) { // If GetField returns an error
1739 PetscSNPrintf(msg, sizeof(msg), "Particle field '%s' not found in DMSwarm for scattering.", particleFieldName);
1740 // Directly set the error, overwriting the one from GetField
1741 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, msg);
1742 }
1743 ierr = DMSwarmRestoreField(user->swarm, particleFieldName, NULL, NULL, NULL);
1744 */
1745
1746 // --- Setup Temporary Sum Vector ---
1747 ierr = VecDuplicate(eulerFieldAverageVec, &sumVec); CHKERRQ(ierr);
1748 ierr = VecSet(sumVec, 0.0); CHKERRQ(ierr);
1749 ierr = PetscSNPrintf(msg, sizeof(msg), "TempSum_%s", particleFieldName); CHKERRQ(ierr);
1750 ierr = PetscObjectSetName((PetscObject)sumVec, msg); CHKERRQ(ierr);
1751
1752 // --- Accumulate ---
1753 // This will call DMSwarmGetField again. If it failed above, it will likely fail here too,
1754 // unless the error was cleared somehow between the check and here (unlikely).
1755 // If the check above was skipped (Option 1), this is where the error for non-existent
1756 // field will be caught by CHKERRQ.
1757 ierr = AccumulateParticleField(user->swarm, particleFieldName, targetDM, sumVec); CHKERRQ(ierr);
1758
1759 // Calculate the number of particles per cell.
1760 ierr = CalculateParticleCountPerCell(user);
1761 // --- Normalize ---
1762 ierr = NormalizeGridVectorByCount(user->da, user->ParticleCount, targetDM, sumVec, eulerFieldAverageVec); CHKERRQ(ierr);
1763
1764 // --- Cleanup ---
1765 ierr = VecDestroy(&sumVec); CHKERRQ(ierr);
1766
1767
1769
1770 PetscFunctionReturn(0);
1771}
1772
1773#undef __FUNCT__
1774#define __FUNCT__ "ScatterParticleFieldToEulerField"
1775
1776/**
1777 * @brief Scatters a particle field (scalar or vector) to the corresponding Eulerian field average.
1778 * @ingroup scatter_module
1779 *
1780 * This is the primary user-facing function for scattering a single field.
1781 * It determines the target Eulerian DM (`user->da` or `user->fda`) based on the
1782 * `particleFieldName`, validates that the provided `eulerFieldAverageVec` is compatible
1783 * with that target DM, and then orchestrates the scatter operation by calling
1784 * an internal helper function. The final averaged result is stored IN-PLACE in
1785 * the `eulerFieldAverageVec`.
1786 *
1787 * @param[in] user Pointer to UserCtx containing `da`, `fda`, `swarm`, `ParticleCount`.
1788 * @param[in] particleFieldName Name of the field in the DMSwarm (e.g., "Psi", "Ucat").
1789 * @param[in,out] eulerFieldAverageVec Pre-created Vec associated with the correct target DM
1790 * (implicitly `da` or `fda`). Result stored here.
1791 * This vector should be zeroed by the caller if desired
1792 * before this function (e.g., using `VecSet`). The wrapper
1793 * `ScatterAllParticleFieldsToEulerFields` handles this zeroing.
1794 *
1795 * @return PetscErrorCode 0 on success. Errors on NULL input, unrecognized field name,
1796 * incompatible target vector, or if the particle field doesn't exist.
1797 */
1799 const char *particleFieldName,
1800 Vec eulerFieldAverageVec)
1801{
1802 PetscErrorCode ierr;
1803 DM targetDM = NULL; // Will point to user->da or user->fda
1804 PetscInt expected_dof = 0; // Will be 1 or 3
1805 char msg[ERROR_MSG_BUFFER_SIZE]; // Buffer for formatted error messages
1806
1807 PetscFunctionBeginUser;
1808
1810
1811 // --- Essential Input Validation ---
1812 if (!user) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "UserCtx pointer is NULL.");
1813 if (!user->swarm) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "UserCtx->swarm is NULL.");
1814 if (!user->ParticleCount) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "UserCtx->ParticleCount is NULL.");
1815 if (!eulerFieldAverageVec) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Output eulerFieldAverageVec is NULL.");
1816 // particleFieldName validity checked within GetScatterTargetInfo
1817
1818 // --- Determine Target DM & DOF using the helper function ---
1819 ierr = GetScatterTargetInfo(user, particleFieldName, &targetDM, &expected_dof); CHKERRQ(ierr);
1820 // If GetScatterTargetInfo failed (e.g., unknown name), CHKERRQ would have returned.
1821
1822 // --- Validate the provided Target Vec's Compatibility ---
1823 DM vec_dm;
1824 PetscInt vec_dof;
1825 // Check that the provided average vector has a DM associated with it
1826 ierr = VecGetDM(eulerFieldAverageVec, &vec_dm); CHKERRQ(ierr);
1827 if (!vec_dm) {
1828 PetscSNPrintf(msg, sizeof(msg), "Provided eulerFieldAverageVec for field '%s' does not have an associated DM.", particleFieldName);
1829 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, msg);
1830 }
1831 // Get the block size (DOF) of the provided vector
1832 ierr = VecGetBlockSize(eulerFieldAverageVec, &vec_dof); CHKERRQ(ierr);
1833 // Compare the vector's associated DM with the one determined by the field name
1834 if (vec_dm != targetDM) {
1835 const char *target_dm_name = "targetDM", *vec_dm_name = "vec_dm";
1836 // Get actual names if possible for a more informative error message
1837 PetscObjectGetName((PetscObject)targetDM, &target_dm_name);
1838 PetscObjectGetName((PetscObject)vec_dm, &vec_dm_name);
1839 PetscSNPrintf(msg, sizeof(msg), "Provided eulerFieldAverageVec associated with DM '%s', but field '%s' requires scatter to DM '%s'.", vec_dm_name, particleFieldName, target_dm_name);
1840 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, msg);
1841 }
1842 // Compare the vector's DOF with the one expected for the field name
1843 if (vec_dof != expected_dof) {
1844 PetscSNPrintf(msg, sizeof(msg), "Field '%s' requires DOF %d, but provided eulerFieldAverageVec has DOF %d.", particleFieldName, expected_dof, vec_dof);
1845 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, msg);
1846 }
1847
1848 // --- Perform Scatter using Internal Helper ---
1849 // Log intent before calling the core logic
1850 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Scattering field '%s' (DOF=%d).\n", particleFieldName, expected_dof);
1851 ierr = ScatterParticleFieldToEulerField_Internal(user, // Pass user context
1852 particleFieldName, // Name of particle field
1853 targetDM, // Determined target DM (da or fda)
1854 expected_dof, // Determined DOF (1 or 3)
1855 eulerFieldAverageVec); // The output vector
1856 CHKERRQ(ierr); // Handle potential errors from the internal function
1857
1858 LOG_ALLOW(GLOBAL, LOG_INFO, "Successfully scattered field '%s'.\n", particleFieldName);
1859
1861
1862 PetscFunctionReturn(0);
1863}
1864
1865#undef __FUNCT__
1866#define __FUNCT__ "ScatterAllParticleFieldsToEulerFields"
1867/**
1868 * @brief Scatters a predefined set of particle fields to their corresponding Eulerian fields.
1869 * @ingroup scatter_module
1870 *
1871 * This convenience function calls the unified `ScatterParticleFieldToEulerField`
1872 * for a standard set of fields (currently just "Psi", others commented out). It assumes
1873 * the target Eulerian Vec objects (e.g., `user->Psi`, `user->Ucat`) exist in the
1874 * UserCtx structure and are correctly associated with their respective DMs (`user->da`
1875 * or `user->fda`).
1876 *
1877 * **IMPORTANT:** It zeros the target Vecs before scattering. Ensure `user->ParticleCount`
1878 * has been computed accurately *before* calling this function, reflecting the current
1879 * particle distribution.
1880 *
1881 * @param[in,out] user Pointer to the UserCtx structure containing all required DMs,
1882 * Vecs (`ParticleCount`, target Eulerian fields like `P`, `Ucat`), and `swarm`.
1883 *
1884 * @return PetscErrorCode 0 on success. Errors if prerequisites (like ParticleCount)
1885 * are missing or if underlying scatter calls fail (e.g., particle field missing).
1886 */
1888{
1889 PetscErrorCode ierr;
1890 PetscFunctionBeginUser;
1892
1893 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting scattering of specified particle fields to Eulerian grids.\n");
1894
1895 // --- Pre-computation Check: Ensure Particle Counts are Ready ---
1896 if (!user->ParticleCount) {
1897 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "UserCtx->ParticleCount is NULL. Compute counts before calling ScatterAllParticleFieldsToEulerFields.");
1898 }
1899
1900 // --- Scatter Particle Field "Psi" -> Eulerian Field user->Psi (on da) ---
1901 // Check if the target Eulerian vector 'user->Psi' exists.
1902 if (user->Psi) {
1903
1904 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Scattering particle field 'Psi' to user->Psi.\n");
1905 // Zero the target vector before accumulating the new average for this step/call.
1906
1907 // Debug Verification ------------------------------------------------
1908 Vec swarm_Psi;
1909 PetscReal Avg_Psi,Avg_swarm_Psi;
1910
1911 ierr = VecMean(user->Psi,&Avg_Psi);
1912 LOG_ALLOW(GLOBAL,LOG_DEBUG," Average of Scalar(Psi) before scatter: %.4f.\n",Avg_Psi);
1913
1914 ierr = DMSwarmCreateGlobalVectorFromField(user->swarm,"Psi",&swarm_Psi);
1915 ierr = VecMean(swarm_Psi,&Avg_swarm_Psi);
1916
1917 LOG_ALLOW(GLOBAL,LOG_DEBUG," Average of Particle Scalar(Psi): %.4f.\n",Avg_swarm_Psi);
1918
1919 ierr = DMSwarmDestroyGlobalVectorFromField(user->swarm,"Psi",&swarm_Psi);
1920 // Debug----------------------------------------------------------------
1921
1922 //ierr = VecSet(user->P, 0.0); CHKERRQ(ierr);
1923 // Call the unified scatter function. It will handle DM determination and validation.
1924 // It will also error out if the *particle* field "Psi" doesn't exist in the swarm.
1925 ierr = ScatterParticleFieldToEulerField(user, "Psi", user->Psi); CHKERRQ(ierr);
1926 ierr = VecMean(user->Psi,&Avg_Psi);
1927
1928 LOG_ALLOW(GLOBAL,LOG_DEBUG," Average of Scalar(Psi) after scatter: %.4f.\n",Avg_Psi);
1929 } else {
1930 // Only log a warning if the target Eulerian field is missing in the context.
1931 LOG_ALLOW(GLOBAL, LOG_WARNING, "Skipping scatter for 'Psi': UserCtx->Psi is NULL.\n");
1932 }
1933
1934 // --- (Commented Out) Scatter Other Fields ---
1935 // To enable scattering for other fields, uncomment the relevant block
1936 // AND ensure:
1937 // 1. The corresponding particle field (e.g., "Nvert", "Ucat") exists in user->swarm.
1938 // 2. The corresponding target Eulerian Vec (e.g., user->Nvert, user->Ucat) exists in user->ctx.
1939 // 3. The target Eulerian Vec is associated with the correct DM (da for Nvert, fda for Ucat).
1940
1941 /*
1942 // Scatter Particle Field "Nvert" -> Eulerian Field user->Nvert (on da)
1943 if (user->Nvert) {
1944 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Scattering particle field 'Nvert' to user->Nvert.\n");
1945 ierr = VecSet(user->Nvert, 0.0); CHKERRQ(ierr);
1946 ierr = ScatterParticleFieldToEulerField(user, "Nvert", user->Nvert); CHKERRQ(ierr);
1947 } else {
1948 LOG_ALLOW(GLOBAL, LOG_WARNING, "Skipping scatter for 'Nvert': UserCtx->Nvert is NULL.\n");
1949 }
1950 */
1951
1952 /*
1953 // Scatter Particle Field "Ucat" -> Eulerian Field user->Ucat (on fda)
1954 if (user->Ucat) {
1955 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Scattering particle field 'Ucat' to user->Ucat.\n");
1956 ierr = VecSet(user->Ucat, 0.0); CHKERRQ(ierr);
1957 ierr = ScatterParticleFieldToEulerField(user, "Ucat", user->Ucat); CHKERRQ(ierr);
1958 } else {
1959 LOG_ALLOW(GLOBAL, LOG_WARNING, "Skipping scatter for 'Ucat': UserCtx->Ucat is NULL.\n");
1960 }
1961 */
1962
1963 /*
1964 // Scatter Particle Field "Ucont" -> Eulerian Field user->Ucont (on fda)
1965 if (user->Ucont) {
1966 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Scattering particle field 'Ucont' to user->Ucont.\n");
1967 ierr = VecSet(user->Ucont, 0.0); CHKERRQ(ierr);
1968 ierr = ScatterParticleFieldToEulerField(user, "Ucont", user->Ucont); CHKERRQ(ierr);
1969 } else {
1970 LOG_ALLOW(GLOBAL, LOG_WARNING, "Skipping scatter for 'Ucont': UserCtx->Ucont is NULL.\n");
1971 }
1972 */
1973
1974 // Add more fields as needed following the pattern above...
1975
1976 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished scattering specified particle fields.\n");
1978 PetscFunctionReturn(0);
1979}
1980
1981/** @} */ // End of scatter_module group
1982
1983
1984#undef __FUNCT__
1985#define __FUNCT__ "InterpolateCornerToFaceCenter_Scalar"
1986
1987/**
1988 * @brief Interpolates a scalar field from corner nodes to all face centers.
1989 *
1990 * This routine computes the average of the four corner-node values
1991 * defining each face of a hexahedral cell:
1992 * - X-faces (perpendicular to X): face between (i-1,i) in X-dir
1993 * - Y-faces (perpendicular to Y): face between (j-1,j) in Y-dir
1994 * - Z-faces (perpendicular to Z): face between (k-1,k) in Z-dir
1995 *
1996 * @param[in] corner_arr Ghosted node-centered array (global indexing) from user->fda.
1997 * @param[out] faceX_arr Local array for X-faces sized [zm][ym][xm+1].
1998 * @param[out] faceY_arr Local array for Y-faces sized [zm][ym+1][xm].
1999 * @param[out] faceZ_arr Local array for Z-faces sized [zm+1][ym][xm].
2000 * @param[in] user User context containing DMDA 'fda' and GetOwnedCellRange.
2001 *
2002 * @return PetscErrorCode 0 on success, non-zero on failure.
2003 */
2005 PetscReal ***corner_arr,
2006 PetscReal ***faceX_arr,
2007 PetscReal ***faceY_arr,
2008 PetscReal ***faceZ_arr,
2009 UserCtx *user)
2010{
2011 PetscErrorCode ierr;
2012 DMDALocalInfo info;
2013
2014 PetscFunctionBeginUser;
2015
2017
2018 ierr = DMDAGetLocalInfo(user->fda, &info); CHKERRQ(ierr);
2019
2020 // Determine owned-cell ranges based on corner-node ownership
2021 PetscInt xs, xm, ys, ym, zs, zm;
2022 ierr = GetOwnedCellRange(&info, 0, &xs, &xm); CHKERRQ(ierr);
2023 ierr = GetOwnedCellRange(&info, 1, &ys, &ym); CHKERRQ(ierr);
2024 ierr = GetOwnedCellRange(&info, 2, &zs, &zm); CHKERRQ(ierr);
2025
2026 // Global exclusive end indices for cells
2027 PetscInt xe = xs + xm;
2028 PetscInt ye = ys + ym;
2029 PetscInt ze = zs + zm;
2030
2031 // --- X‐faces: loops k=zs..ze-1, j=ys..ye-1, i=xs..xe (xm+1 faces per row) ---
2032 for (PetscInt k = zs; k < ze; ++k) {
2033 PetscInt k_loc = k - zs;
2034 for (PetscInt j = ys; j < ye; ++j) {
2035 PetscInt j_loc = j - ys;
2036 for (PetscInt i = xs; i <= xe; ++i) {
2037 PetscInt i_loc = i - xs; // 0..xm
2038 // Average the four corners of the Y-Z face at X = i
2039 PetscReal sum = corner_arr[k ][j ][i]
2040 + corner_arr[k+1][j ][i]
2041 + corner_arr[k ][j+1][i]
2042 + corner_arr[k+1][j+1][i];
2043 faceX_arr[k_loc][j_loc][i_loc] = sum * 0.25;
2044 }
2045 }
2046 }
2047
2048 // --- Y‐faces: loops k=zs..ze-1, j=ys..ye (ym+1 faces), i=xs..xe-1 ---
2049 for (PetscInt k = zs; k < ze; ++k) {
2050 PetscInt k_loc = k - zs;
2051 for (PetscInt j = ys; j <= ye; ++j) {
2052 PetscInt j_loc = j - ys; // 0..ym
2053 for (PetscInt i = xs; i < xe; ++i) {
2054 PetscInt i_loc = i - xs;
2055 // Average the four corners of the X-Z face at Y = j
2056 PetscReal sum = corner_arr[k ][j][i ]
2057 + corner_arr[k+1][j][i ]
2058 + corner_arr[k ][j][i+1]
2059 + corner_arr[k+1][j][i+1];
2060 faceY_arr[k_loc][j_loc][i_loc] = sum * 0.25;
2061 }
2062 }
2063 }
2064
2065 // --- Z‐faces: loops k=zs..ze (zm+1), j=ys..ye-1, i=xs..xe-1 ---
2066 for (PetscInt k = zs; k <= ze; ++k) {
2067 PetscInt k_loc = k - zs;
2068 for (PetscInt j = ys; j < ye; ++j) {
2069 PetscInt j_loc = j - ys;
2070 for (PetscInt i = xs; i < xe; ++i) {
2071 PetscInt i_loc = i - xs;
2072 // Average the four corners of the X-Y face at Z = k
2073 PetscReal sum = corner_arr[k][j ][i ]
2074 + corner_arr[k][j ][i+1]
2075 + corner_arr[k][j+1][i ]
2076 + corner_arr[k][j+1][i+1];
2077 faceZ_arr[k_loc][j_loc][i_loc] = sum * 0.25;
2078 }
2079 }
2080 }
2081
2083
2084 PetscFunctionReturn(0);
2085}
2086
2087#undef __FUNCT__
2088#define __FUNCT__ "InterpolateCornerToFaceCenter_Vector"
2089
2090/**
2091 * @brief Interpolates a vector field from corner nodes to all face centers.
2092 *
2093 * Identical to the scalar version, except it averages each component of the
2094 * Cmpnts struct at the four corner-nodes per face.
2095 *
2096 * @param[in] corner_arr Ghosted 3-component array (global node indices).
2097 * @param[out] faceX_arr Local array of Cmpnts for X-faces sized [zm][ym][xm+1].
2098 * @param[out] faceY_arr Local array of Cmpnts for Y-faces sized [zm][ym+1][xm].
2099 * @param[out] faceZ_arr Local array of Cmpnts for Z-faces sized [zm+1][ym][xm].
2100 * @param[in] user User context containing DMDA 'fda'.
2101 *
2102 * @return PetscErrorCode 0 on success.
2103 */
2105 Cmpnts ***corner_arr,
2106 Cmpnts ***faceX_arr,
2107 Cmpnts ***faceY_arr,
2108 Cmpnts ***faceZ_arr,
2109 UserCtx *user)
2110{
2111 PetscErrorCode ierr;
2112 DMDALocalInfo info;
2113 PetscMPIInt rank;
2114
2115 PetscFunctionBeginUser;
2116
2118
2119 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
2121 "Rank %d starting InterpolateFieldFromCornerToFaceCenter_Vector.\n", rank);
2122
2123 ierr = DMDAGetLocalInfo(user->fda, &info); CHKERRQ(ierr);
2124
2125 PetscInt xs, xm, ys, ym, zs, zm;
2126 ierr = GetOwnedCellRange(&info, 0, &xs, &xm); CHKERRQ(ierr);
2127 ierr = GetOwnedCellRange(&info, 1, &ys, &ym); CHKERRQ(ierr);
2128 ierr = GetOwnedCellRange(&info, 2, &zs, &zm); CHKERRQ(ierr);
2129
2130 PetscInt xe = xs + xm;
2131 PetscInt ye = ys + ym;
2132 PetscInt ze = zs + zm;
2133
2134 // X-faces
2135 for (PetscInt k = zs; k < ze; ++k) {
2136 PetscInt k_loc = k - zs;
2137 for (PetscInt j = ys; j < ye; ++j) {
2138 PetscInt j_loc = j - ys;
2139 for (PetscInt i = xs; i <= xe; ++i) {
2140 PetscInt i_loc = i - xs;
2141 Cmpnts sum = {0,0,0};
2142 sum.x = corner_arr[k ][j ][i].x + corner_arr[k+1][j ][i].x
2143 + corner_arr[k ][j+1][i].x + corner_arr[k+1][j+1][i].x;
2144 sum.y = corner_arr[k ][j ][i].y + corner_arr[k+1][j ][i].y
2145 + corner_arr[k ][j+1][i].y + corner_arr[k+1][j+1][i].y;
2146 sum.z = corner_arr[k ][j ][i].z + corner_arr[k+1][j ][i].z
2147 + corner_arr[k ][j+1][i].z + corner_arr[k+1][j+1][i].z;
2148 faceX_arr[k_loc][j_loc][i_loc].x = sum.x * 0.25;
2149 faceX_arr[k_loc][j_loc][i_loc].y = sum.y * 0.25;
2150 faceX_arr[k_loc][j_loc][i_loc].z = sum.z * 0.25;
2151 }
2152 }
2153 }
2154
2156 "Rank %d x-face Interpolation complete.\n", rank);
2157
2158 // Y-faces
2159 for (PetscInt k = zs; k < ze; ++k) {
2160 PetscInt k_loc = k - zs;
2161 for (PetscInt j = ys; j <= ye; ++j) {
2162 PetscInt j_loc = j - ys;
2163 for (PetscInt i = xs; i < xe; ++i) {
2164 PetscInt i_loc = i - xs;
2165 Cmpnts sum = {0,0,0};
2166 sum.x = corner_arr[k ][j][i ].x + corner_arr[k+1][j][i ].x
2167 + corner_arr[k ][j][i+1].x + corner_arr[k+1][j][i+1].x;
2168 sum.y = corner_arr[k ][j][i ].y + corner_arr[k+1][j][i ].y
2169 + corner_arr[k ][j][i+1].y + corner_arr[k+1][j][i+1].y;
2170 sum.z = corner_arr[k ][j][i ].z + corner_arr[k+1][j][i ].z
2171 + corner_arr[k ][j][i+1].z + corner_arr[k+1][j][i+1].z;
2172 faceY_arr[k_loc][j_loc][i_loc].x = sum.x * 0.25;
2173 faceY_arr[k_loc][j_loc][i_loc].y = sum.y * 0.25;
2174 faceY_arr[k_loc][j_loc][i_loc].z = sum.z * 0.25;
2175 }
2176 }
2177 }
2178
2180 "Rank %d y-face Interpolation complete.\n", rank);
2181
2182 // Z-faces
2183 for (PetscInt k = zs; k <= ze; ++k) {
2184 PetscInt k_loc = k - zs;
2185 for (PetscInt j = ys; j < ye; ++j) {
2186 PetscInt j_loc = j - ys;
2187 for (PetscInt i = xs; i < xe; ++i) {
2188 PetscInt i_loc = i - xs;
2189 Cmpnts sum = {0,0,0};
2190 sum.x = corner_arr[k][j ][i ].x + corner_arr[k][j ][i+1].x
2191 + corner_arr[k][j+1][i ].x + corner_arr[k][j+1][i+1].x;
2192 sum.y = corner_arr[k][j ][i ].y + corner_arr[k][j ][i+1].y
2193 + corner_arr[k][j+1][i ].y + corner_arr[k][j+1][i+1].y;
2194 sum.z = corner_arr[k][j ][i ].z + corner_arr[k][j ][i+1].z
2195 + corner_arr[k][j+1][i ].z + corner_arr[k][j+1][i+1].z;
2196 faceZ_arr[k_loc][j_loc][i_loc].x = sum.x * 0.25;
2197 faceZ_arr[k_loc][j_loc][i_loc].y = sum.y * 0.25;
2198 faceZ_arr[k_loc][j_loc][i_loc].z = sum.z * 0.25;
2199 }
2200 }
2201 }
2202
2204 "Rank %d z-face Interpolation complete.\n", rank);
2205
2207 PetscFunctionReturn(0);
2208}
PetscErrorCode GetScatterTargetInfo(UserCtx *user, const char *particleFieldName, DM *targetDM, PetscInt *expected_dof)
Determines the target Eulerian DM and expected DOF for scattering a given particle field.
PetscErrorCode NormalizeGridVectorByCount(DM countDM, Vec countVec, DM dataDM, Vec sumVec, Vec avgVec)
Normalizes a grid vector of sums by a grid vector of counts to produce an average.
PetscErrorCode AccumulateParticleField(DM swarm, const char *particleFieldName, DM gridSumDM, Vec gridSumVec)
Accumulates a particle field (scalar or vector) into a target grid sum vector.
static PetscErrorCode ScatterParticleFieldToEulerField_Internal(UserCtx *user, const char *particleFieldName, DM targetDM, PetscInt expected_dof, Vec eulerFieldAverageVec)
Internal helper function to orchestrate the scatter operation (accumulate + normalize).
PetscErrorCode CalculateParticleCountPerCell(UserCtx *user)
Counts particles in each cell of the DMDA 'da' and stores the result in user->ParticleCount.
PetscErrorCode ScatterAllParticleFieldsToEulerFields(UserCtx *user)
Scatters a predefined set of particle fields to their corresponding Eulerian fields.
PetscErrorCode ScatterParticleFieldToEulerField(UserCtx *user, const char *particleFieldName, Vec eulerFieldAverageVec)
Scatters a particle field (scalar or vector) to the corresponding Eulerian field average.
PetscErrorCode InterpolateFieldFromCornerToCenter_Vector(Cmpnts ***field_arr, Cmpnts ***centfield_arr, UserCtx *user)
Interpolates a vector field from corner nodes to cell centers.
PetscErrorCode InterpolateCornerToFaceCenter_Vector(Cmpnts ***corner_arr, Cmpnts ***faceX_arr, Cmpnts ***faceY_arr, Cmpnts ***faceZ_arr, UserCtx *user)
Interpolates a vector field from corner nodes to all face centers.
PetscErrorCode InterpolateFieldFromCornerToCenter_Scalar(PetscReal ***field_arr, PetscReal ***centfield_arr, UserCtx *user)
Interpolates a scalar field from corner nodes to cell centers.
PetscErrorCode InterpolateAllFieldsToSwarm(UserCtx *user)
Interpolates all relevant fields from the DMDA to the DMSwarm.
PetscErrorCode TrilinearInterpolation_Vector(const char *fieldName, Cmpnts ***fieldVec, PetscInt i, PetscInt j, PetscInt k, PetscReal a1, PetscReal a2, PetscReal a3, Cmpnts *vec)
Computes the trilinear Interpolateted vector (e.g., velocity) at a given point.
static PetscErrorCode InterpolateEulerFieldToSwarmForParticle(const char *fieldName, void *fieldPtr, PetscInt iCell, PetscInt jCell, PetscInt kCell, PetscReal a1, PetscReal a2, PetscReal a3, void *swarmOut, PetscInt p, PetscInt blockSize)
Interpolates a single field (scalar or vector) for one particle, storing the result in a swarm array.
PetscErrorCode InterpolateFieldFromCenterToCorner_Vector_Petsc(Cmpnts ***centfield_arr, Cmpnts ***corner_arr, UserCtx *user)
Interpolates a vector field from cell centers to corner nodes.
PetscErrorCode TrilinearInterpolation_Scalar(const char *fieldName, PetscReal ***fieldScal, PetscInt i, PetscInt j, PetscInt k, PetscReal a1, PetscReal a2, PetscReal a3, PetscReal *val)
Computes the trilinear Interpolateted scalar at a given point.
PetscErrorCode PieceWiseLinearInterpolation_Scalar(const char *fieldName, PetscReal ***fieldScal, PetscInt iCell, PetscInt jCell, PetscInt kCell, PetscReal *val)
Retrieves the scalar value at the cell (iCell, jCell, kCell).
#define ERROR_MSG_BUFFER_SIZE
static void ComputeTrilinearWeights(PetscReal a1, PetscReal a2, PetscReal a3, PetscReal *w)
Computes the trilinear interpolation weights from the interpolation coefficients.
PetscErrorCode InterpolateCornerToFaceCenter_Scalar(PetscReal ***corner_arr, PetscReal ***faceX_arr, PetscReal ***faceY_arr, PetscReal ***faceZ_arr, UserCtx *user)
Interpolates a scalar field from corner nodes to all face centers.
PetscErrorCode InterpolateEulerFieldToSwarm(UserCtx *user, Vec fieldLocal_cellCentered, const char *fieldName, const char *swarmOutFieldName)
Interpolates a cell-centered field (scalar or vector) onto DMSwarm particles, using a robust,...
PetscErrorCode InterpolateFieldFromCenterToCorner_Scalar_Petsc(PetscReal ***centfield_arr, PetscReal ***corner_arr, UserCtx *user)
Interpolates a scalar field from cell centers to corner nodes.
PetscErrorCode PieceWiseLinearInterpolation_Vector(const char *fieldName, Cmpnts ***fieldVec, PetscInt iCell, PetscInt jCell, PetscInt kCell, Cmpnts *vec)
Retrieves the vector (Cmpnts) at the cell (iCell, jCell, kCell).
#define TrilinearInterpolation(fieldName, fieldPtr, i, j, k, a1, a2, a3, outPtr)
Macro that calls either the scalar or vector trilinear interpolation function based on the type of th...
#define InterpolateFieldFromCenterToCorner(blockSize, centfield_ptr, corner_ptr, user_ctx)
Macro to dispatch to the correct scalar or vector center-to-corner function based on a runtime block ...
PetscBool is_function_allowed(const char *functionName)
Checks if a given function is in the allow-list.
Definition logging.c:157
#define LOG_ALLOW_SYNC(scope, level, fmt,...)
----— DEBUG ---------------------------------------— #define LOG_ALLOW(scope, level,...
Definition logging.h:268
#define LOCAL
Logging scope definitions for controlling message output.
Definition logging.h:46
#define GLOBAL
Scope for global logging across all processes.
Definition logging.h:47
#define LOG_ALLOW(scope, level, fmt,...)
Logging macro that checks both the log level and whether the calling function is in the allowed-funct...
Definition logging.h:201
#define PROFILE_FUNCTION_END
Marks the end of a profiled code block.
Definition logging.h:740
PetscErrorCode LOG_FIELD_ANATOMY(UserCtx *user, const char *field_name, const char *stage_name)
Logs the anatomy of a specified field at key boundary locations, respecting the solver's specific gri...
Definition logging.c:1435
LogLevel get_log_level()
Retrieves the current logging level from the environment variable LOG_LEVEL.
Definition logging.c:39
#define LOG_LOOP_ALLOW_EXACT(scope, level, var, val, fmt,...)
Logs a custom message if a variable equals a specific value.
Definition logging.h:349
@ LOG_TRACE
Very fine-grained tracing information for in-depth debugging.
Definition logging.h:34
@ LOG_INFO
Informational messages about program execution.
Definition logging.h:32
@ LOG_WARNING
Non-critical issues that warrant attention.
Definition logging.h:30
@ LOG_DEBUG
Detailed debugging information.
Definition logging.h:33
@ LOG_VERBOSE
Extremely detailed logs, typically for development use only.
Definition logging.h:35
#define PROFILE_FUNCTION_BEGIN
Marks the beginning of a profiled code block (typically a function).
Definition logging.h:731
PetscErrorCode GetOwnedCellRange(const DMDALocalInfo *info_nodes, PetscInt dim, PetscInt *xs_cell_global, PetscInt *xm_cell_local)
Determines the global starting index and number of CELLS owned by the current processor in a specifie...
Definition setup.c:1663
Vec lCellFieldAtCorner
Definition variables.h:693
PetscScalar x
Definition variables.h:101
PetscScalar z
Definition variables.h:101
Vec ParticleCount
Definition variables.h:729
Vec CellFieldAtCorner
Definition variables.h:693
Vec lUcat
Definition variables.h:688
PetscScalar y
Definition variables.h:101
Vec Psi
Definition variables.h:730
A 3D point or vector with PetscScalar components.
Definition variables.h:100
User-defined context containing data specific to a single computational grid level.
Definition variables.h:661