PICurv 0.1.0
A Parallel Particle-In-Cell Solver for Curvilinear LES
Loading...
Searching...
No Matches
interpolation.c File Reference
#include "interpolation.h"
Include dependency graph for interpolation.c:

Go to the source code of this file.

Macros

#define NUM_WEIGHTS   8
 
#define ERROR_MSG_BUFFER_SIZE   256
 
#define __FUNCT__   "InterpolateAllFieldsToSwarm"
 Interpolates a cell-centered field (scalar or vector) onto DMSwarm particles, converting the cell-center data to corner data first, then looping over particles.
 
#define __FUNCT__   "ScatterAllParticleFieldsToEulerFields"
 Interpolates a cell-centered field (scalar or vector) onto DMSwarm particles, converting the cell-center data to corner data first, then looping over particles.
 

Functions

PetscErrorCode InterpolateFieldFromCornerToCenter_Vector (Cmpnts ***field_arr, Cmpnts ***centfield_arr, UserCtx *user)
 Interpolates a vector field from corner nodes to cell centers.
 
PetscErrorCode InterpolateFieldFromCornerToCenter_Scalar (PetscReal ***field_arr, PetscReal ***centfield_arr, UserCtx *user)
 Interpolates a scalar field from corner nodes to cell centers.
 
PetscErrorCode InterpolateFieldFromCenterToCorner_Scalar (PetscReal ***centfield_arr, PetscReal ***field_arr, UserCtx *user)
 Interpolates a scalar field from cell centers to corner nodes.
 
PetscErrorCode InterpolateFieldFromCenterToCorner_Vector (Cmpnts ***centfield_arr, Cmpnts ***field_arr, UserCtx *user)
 Interpolates a vector field from cell centers to corner nodes.
 
PetscErrorCode InterpolateFieldFromCenterToCorner_Vector_Petsc (Cmpnts ***centfield_arr, Cmpnts ***corner_arr, UserCtx *user)
 Interpolates a vector field from cell centers to corner nodes.
 
PetscErrorCode InterpolateFieldFromCenterToCorner_Scalar_Petsc (PetscReal ***centfield_arr, PetscReal ***corner_arr, UserCtx *user)
 Interpolates a scalar field from cell centers to corner nodes.
 
PetscErrorCode PieceWiseLinearInterpolation_Scalar (const char *fieldName, PetscReal ***fieldScal, PetscInt iCell, PetscInt jCell, PetscInt kCell, PetscReal *val)
 Retrieves the scalar value at the cell (iCell, jCell, kCell).
 
PetscErrorCode PieceWiseLinearInterpolation_Vector (const char *fieldName, Cmpnts ***fieldVec, PetscInt iCell, PetscInt jCell, PetscInt kCell, Cmpnts *vec)
 Retrieves the vector (Cmpnts) at the cell (iCell, jCell, kCell).
 
static void ComputeTrilinearWeights (PetscReal a1, PetscReal a2, PetscReal a3, PetscReal *w)
 Computes the trilinear Interpolatetion weights from the Interpolatetion coefficients.
 
PetscErrorCode TrilinearInterpolation_Scalar (const char *fieldName, PetscReal ***fieldScal, PetscInt i, PetscInt j, PetscInt k, PetscReal a1, PetscReal a2, PetscReal a3, PetscReal *val)
 Computes the trilinear Interpolateted scalar at a given point.
 
PetscErrorCode TrilinearInterpolation_Vector (const char *fieldName, Cmpnts ***fieldVec, PetscInt i, PetscInt j, PetscInt k, PetscReal a1, PetscReal a2, PetscReal a3, Cmpnts *vec)
 Computes the trilinear Interpolateted vector (e.g., velocity) at a given point.
 
static PetscErrorCode InterpolateEulerFieldToSwarmForParticle (const char *fieldName, void *fieldPtr, PetscInt iCell, PetscInt jCell, PetscInt kCell, PetscReal a1, PetscReal a2, PetscReal a3, void *swarmOut, PetscInt p, PetscInt blockSize)
 Interpolates a single field (scalar or vector) for one particle, storing the result in a swarm array.
 
PetscErrorCode InterpolateEulerFieldToSwarm (UserCtx *user, Vec fieldLocal_cellCentered, const char *fieldName, const char *swarmOutFieldName)
 Interpolates a cell-centered field (scalar or vector) onto DMSwarm particles, using a robust, PETSc-idiomatic two-stage process.
 
PetscErrorCode InterpolateAllFieldsToSwarm (UserCtx *user)
 Interpolates all relevant fields from the DMDA to the DMSwarm.
 
PetscErrorCode GetScatterTargetInfo (UserCtx *user, const char *particleFieldName, DM *targetDM, PetscInt *expected_dof)
 Determines the target Eulerian DM and expected DOF for scattering a given particle field.
 
PetscErrorCode AccumulateParticleField (DM swarm, const char *particleFieldName, DM gridSumDM, Vec gridSumVec)
 Accumulates a particle field (scalar or vector) into a target grid sum vector.
 
PetscErrorCode NormalizeGridVectorByCount (DM countDM, Vec countVec, DM dataDM, Vec sumVec, Vec avgVec)
 Normalizes a grid vector of sums by a grid vector of counts to produce an average.
 
static PetscErrorCode ScatterParticleFieldToEulerField_Internal (UserCtx *user, const char *particleFieldName, DM targetDM, PetscInt expected_dof, Vec eulerFieldAverageVec)
 Internal helper function to orchestrate the scatter operation (accumulate + normalize).
 
PetscErrorCode ScatterParticleFieldToEulerField (UserCtx *user, const char *particleFieldName, Vec eulerFieldAverageVec)
 Scatters a particle field (scalar or vector) to the corresponding Eulerian field average.
 
PetscErrorCode ScatterAllParticleFieldsToEulerFields (UserCtx *user)
 Scatters a predefined set of particle fields to their corresponding Eulerian fields.
 
PetscErrorCode InterpolateCornerToFaceCenter_Scalar (PetscReal ***corner_arr, PetscReal ***faceX_arr, PetscReal ***faceY_arr, PetscReal ***faceZ_arr, UserCtx *user)
 Interpolates a scalar field from corner nodes to all face centers.
 
PetscErrorCode InterpolateCornerToFaceCenter_Vector (Cmpnts ***corner_arr, Cmpnts ***faceX_arr, Cmpnts ***faceY_arr, Cmpnts ***faceZ_arr, UserCtx *user)
 Interpolates a vector field from corner nodes to all face centers.
 

Macro Definition Documentation

◆ NUM_WEIGHTS

#define NUM_WEIGHTS   8

Definition at line 13 of file interpolation.c.

◆ ERROR_MSG_BUFFER_SIZE

#define ERROR_MSG_BUFFER_SIZE   256

Definition at line 16 of file interpolation.c.

Function Documentation

◆ InterpolateFieldFromCornerToCenter_Vector()

PetscErrorCode InterpolateFieldFromCornerToCenter_Vector ( Cmpnts ***  field_arr,
Cmpnts ***  centfield_arr,
UserCtx user 
)

Interpolates a vector field from corner nodes to cell centers.

Safely interpolate a vector field from corner nodes (from the coordinate DM) to cell centers (from the cell-centered DM) using the provided UserCtx.

This function estimates the value of a vector field at the center of each computational cell (whose origin node is owned by this rank) by averaging the values from the 8 corner nodes defining that cell.

Parameters
[in]field_arrInput: 3D array view (ghosted) of vector data at nodes, obtained from a Vec associated with user->fda. Accessed via GLOBAL node indices. Ghost values must be up-to-date.
[out]centfield_arrOutput: 3D local array (0-indexed) where interpolated cell center values are stored. Sized [zm_cell_local][ym_cell_local][xm_cell_local] by the caller. centfield_arr[k_loc][j_loc][i_loc] stores the value for the cell corresponding to the (i_loc, j_loc, k_loc)-th owned cell origin.
[in]userUser context containing DMDA information (fda, IM, JM, KM).
Returns
PetscErrorCode 0 on success.

Definition at line 38 of file interpolation.c.

42{
43 PetscErrorCode ierr;
44 PetscMPIInt rank;
45 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
47 "Rank %d starting InterpolateFieldFromCornerToCenter_Vector.\n", rank);
48
49 // Get local info based on the NODE-based DMDA (user->fda)
50 // This DM defines the ownership of the nodes from which we interpolate,
51 // and thus defines the cells whose centers we will compute.
52 DMDALocalInfo info_nodes_fda;
53 ierr = DMDAGetLocalInfo(user->fda, &info_nodes_fda); CHKERRQ(ierr);
54
55 // Determine owned CELL ranges based on owning their origin NODES (from user->fda)
56 PetscInt xs_cell_global_i, xm_cell_local_i; // Global start cell index, local count of owned cells
57 PetscInt ys_cell_global_j, ym_cell_local_j;
58 PetscInt zs_cell_global_k, zm_cell_local_k;
59
60
61 ierr = GetOwnedCellRange(&info_nodes_fda, 0, &xs_cell_global_i, &xm_cell_local_i); CHKERRQ(ierr);
62 ierr = GetOwnedCellRange(&info_nodes_fda, 1, &ys_cell_global_j, &ym_cell_local_j); CHKERRQ(ierr);
63 ierr = GetOwnedCellRange(&info_nodes_fda, 2, &zs_cell_global_k, &zm_cell_local_k); CHKERRQ(ierr);
64
65 // Exclusive end global cell indices
66 PetscInt xe_cell_global_i_excl = xs_cell_global_i + xm_cell_local_i;
67 PetscInt ye_cell_global_j_excl = ys_cell_global_j + ym_cell_local_j;
68 PetscInt ze_cell_global_k_excl = zs_cell_global_k + zm_cell_local_k;
69
70 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d : Processing Owned Cells (global indices) k=%d..%d (count %d), j=%d..%d (count %d), i=%d..%d (count %d)\n",
71 rank,
72 zs_cell_global_k, ze_cell_global_k_excl -1, zm_cell_local_k,
73 ys_cell_global_j, ye_cell_global_j_excl -1, ym_cell_local_j,
74 xs_cell_global_i, xe_cell_global_i_excl -1, xm_cell_local_i);
75
76 // Loop over the GLOBAL indices of the CELLS whose origin nodes are owned by this processor
77 // (according to user->fda partitioning)
78 for (PetscInt k_glob_cell = zs_cell_global_k; k_glob_cell < ze_cell_global_k_excl; k_glob_cell++) {
79 PetscInt k_local = k_glob_cell - zs_cell_global_k; // 0-based local index for centfield_arr
80
81 for (PetscInt j_glob_cell = ys_cell_global_j; j_glob_cell < ye_cell_global_j_excl; j_glob_cell++) {
82 PetscInt j_local = j_glob_cell - ys_cell_global_j; // 0-based local index
83
84 for (PetscInt i_glob_cell = xs_cell_global_i; i_glob_cell < xe_cell_global_i_excl; i_glob_cell++) {
85 PetscInt i_local = i_glob_cell - xs_cell_global_i; // 0-based local index
86
87 Cmpnts sum = {0.0, 0.0, 0.0};
88 // Count is always 8 for a hexahedral cell
89 // PetscInt count = 0; // Not strictly needed if we assume 8 corners
90
91 // Loop over the 8 corner NODES that define cell C(i_glob_cell, j_glob_cell, k_glob_cell)
92 for (PetscInt dk = 0; dk < 2; dk++) {
93 for (PetscInt dj = 0; dj < 2; dj++) {
94 for (PetscInt di = 0; di < 2; di++) {
95 PetscInt ni_glob = i_glob_cell + di; // Global NODE index i or i+1
96 PetscInt nj_glob = j_glob_cell + dj; // Global NODE index j or j+1
97 PetscInt nk_glob = k_glob_cell + dk; // Global NODE index k or k+1
98
99 // Access the input 'field_arr' (node values) using GLOBAL node indices.
100 // Ghosts in field_arr must be valid.
101 // DMDAVecGetArrayRead handles mapping global indices to local memory.
102 sum.x += field_arr[nk_glob][nj_glob][ni_glob].x;
103 sum.y += field_arr[nk_glob][nj_glob][ni_glob].y;
104 sum.z += field_arr[nk_glob][nj_glob][ni_glob].z;
105 // count++;
106 }
107 }
108 }
109
110 // Calculate average value representing the cell center
111 Cmpnts center_value;
112 center_value.x = sum.x / 8.0;
113 center_value.y = sum.y / 8.0;
114 center_value.z = sum.z / 8.0;
115
116 // Store result into the local, 0-indexed output array 'centfield_arr'
117 // Defensive check (should not be strictly necessary if caller allocated centfield_arr
118 // with dimensions xm_cell_local_i, ym_cell_local_j, zm_cell_local_k)
119 if (i_local < 0 || i_local >= xm_cell_local_i ||
120 j_local < 0 || j_local >= ym_cell_local_j ||
121 k_local < 0 || k_local >= zm_cell_local_k) {
122 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Local index out of bounds for centfield_arr write!");
123 }
124 centfield_arr[k_local][j_local][i_local] = center_value;
125
126 } // End loop i_glob_cell
127 } // End loop j_glob_cell
128 } // End loop k_glob_cell
129
131 "Rank %d completed InterpolateFieldFromCornerToCenter_Vector.\n", rank);
132 return 0;
133}
#define LOG_ALLOW_SYNC(scope, level, fmt,...)
----— DEBUG ---------------------------------------— #define LOG_ALLOW(scope, level,...
Definition logging.h:274
#define LOCAL
Logging scope definitions for controlling message output.
Definition logging.h:44
#define GLOBAL
Scope for global logging across all processes.
Definition logging.h:45
#define LOG_ALLOW(scope, level, fmt,...)
Logging macro that checks both the log level and whether the calling function is in the allowed-funct...
Definition logging.h:207
@ LOG_DEBUG
Detailed debugging information.
Definition logging.h:33
PetscErrorCode GetOwnedCellRange(const DMDALocalInfo *info_nodes, PetscInt dim, PetscInt *xs_cell_global, PetscInt *xm_cell_local)
Determines the global starting index and number of CELLS owned by the current processor in a specifie...
Definition setup.c:1344
PetscScalar x
Definition variables.h:100
PetscScalar z
Definition variables.h:100
PetscScalar y
Definition variables.h:100
A 3D point or vector with PetscScalar components.
Definition variables.h:99
Here is the call graph for this function:

◆ InterpolateFieldFromCornerToCenter_Scalar()

PetscErrorCode InterpolateFieldFromCornerToCenter_Scalar ( PetscReal ***  field_arr,
PetscReal ***  centfield_arr,
UserCtx user 
)

Interpolates a scalar field from corner nodes to cell centers.

Safely interpolate a scalar field from corner nodes (from the coordinate DM) to cell centers (from the cell-centered DM) using the provided UserCtx.

This function estimates the value of a scalar field at the center of each computational cell (whose origin node is owned by this rank) by averaging the values from the 8 corner nodes defining that cell.

Parameters
[in]field_arrInput: 3D array view (ghosted) of scalar data at nodes, obtained from a Vec associated with user->da. Accessed via GLOBAL node indices. Ghost values must be up-to-date.
[out]centfield_arrOutput: 3D local array (0-indexed) where interpolated cell center values are stored. Sized [zm_cell_local][ym_cell_local][xm_cell_local] by the caller. centfield_arr[k_loc][j_loc][i_loc] stores the value for the cell corresponding to the (i_loc, j_loc, k_loc)-th owned cell origin.
[in]userUser context containing DMDA information (da, IM, JM, KM).
Returns
PetscErrorCode 0 on success.

Definition at line 157 of file interpolation.c.

161{
162 PetscErrorCode ierr;
163 PetscMPIInt rank;
164 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
166 "Rank %d starting InterpolateFieldFromCornerToCenter_Scalar.\n", rank);
167
168 // Get local info based on user->da (which is node-based but defines cell origins)
169 DMDALocalInfo info_da_nodes;
170 ierr = DMDAGetLocalInfo(user->da, &info_da_nodes); CHKERRQ(ierr);
171
172 // Determine owned CELL ranges based on owning their origin NODES (from user->da)
173 PetscInt xs_cell_global_i, xm_cell_local_i; // Global start cell index, local count of owned cells
174 PetscInt ys_cell_global_j, ym_cell_local_j;
175 PetscInt zs_cell_global_k, zm_cell_local_k;
176
177 ierr = GetOwnedCellRange(&info_da_nodes, 0, &xs_cell_global_i, &xm_cell_local_i); CHKERRQ(ierr);
178 ierr = GetOwnedCellRange(&info_da_nodes, 1, &ys_cell_global_j, &ym_cell_local_j); CHKERRQ(ierr);
179 ierr = GetOwnedCellRange(&info_da_nodes, 2, &zs_cell_global_k, &zm_cell_local_k); CHKERRQ(ierr);
180
181 // Exclusive end global cell indices
182 PetscInt xe_cell_global_i_excl = xs_cell_global_i + xm_cell_local_i;
183 PetscInt ye_cell_global_j_excl = ys_cell_global_j + ym_cell_local_j;
184 PetscInt ze_cell_global_k_excl = zs_cell_global_k + zm_cell_local_k;
185
186 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d (IFCTCS): Processing Owned Cells (global indices) k=%d..%d (count %d), j=%d..%d (count %d), i=%d..%d (count %d)\n",
187 rank,
188 zs_cell_global_k, ze_cell_global_k_excl -1, zm_cell_local_k,
189 ys_cell_global_j, ye_cell_global_j_excl -1, ym_cell_local_j,
190 xs_cell_global_i, xe_cell_global_i_excl -1, xm_cell_local_i);
191
192 // Loop over the GLOBAL indices of the CELLS whose origin nodes are owned by this processor
193 // (according to user->da partitioning)
194 for (PetscInt k_glob_cell = zs_cell_global_k; k_glob_cell < ze_cell_global_k_excl; k_glob_cell++) {
195 PetscInt k_local = k_glob_cell - zs_cell_global_k; // 0-based local index for centfield_arr
196
197 for (PetscInt j_glob_cell = ys_cell_global_j; j_glob_cell < ye_cell_global_j_excl; j_glob_cell++) {
198 PetscInt j_local = j_glob_cell - ys_cell_global_j; // 0-based local index
199
200 for (PetscInt i_glob_cell = xs_cell_global_i; i_glob_cell < xe_cell_global_i_excl; i_glob_cell++) {
201 PetscInt i_local = i_glob_cell - xs_cell_global_i; // 0-based local index
202
203 PetscReal sum = 0.0;
204 // Count is always 8 for a hexahedral cell if all nodes are accessible
205 // PetscInt count = 0;
206
207 // Loop over the 8 NODE indices that define cell C(i_glob_cell, j_glob_cell, k_glob_cell)
208 for (PetscInt dk = 0; dk < 2; dk++) {
209 for (PetscInt dj = 0; dj < 2; dj++) {
210 for (PetscInt di = 0; di < 2; di++) {
211 PetscInt ni_glob = i_glob_cell + di; // Global NODE index for corner
212 PetscInt nj_glob = j_glob_cell + dj; // Global NODE index for corner
213 PetscInt nk_glob = k_glob_cell + dk; // Global NODE index for corner
214
215 // Access the input 'field_arr' (node values) using GLOBAL node indices.
216 sum += field_arr[nk_glob][nj_glob][ni_glob];
217 // count++;
218 }
219 }
220 }
221
222 PetscReal center_value = sum / 8.0;
223
224 // Store the calculated cell center value into the output array 'centfield_arr'
225 // using the LOCAL 0-based cell index.
226 // Defensive check (should not be strictly necessary if caller allocated centfield_arr correctly)
227 if (i_local < 0 || i_local >= xm_cell_local_i ||
228 j_local < 0 || j_local >= ym_cell_local_j ||
229 k_local < 0 || k_local >= zm_cell_local_k) {
230 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Local index out of bounds for centfield_arr write in Scalar version!");
231 }
232 centfield_arr[k_local][j_local][i_local] = center_value;
233
234 } // End loop i_glob_cell
235 } // End loop j_glob_cell
236 } // End loop k_glob_cell
237
239 "Rank %d completed InterpolateFieldFromCornerToCenter_Scalar.\n", rank);
240 return 0;
241}
Here is the call graph for this function:

◆ InterpolateFieldFromCenterToCorner_Scalar()

PetscErrorCode InterpolateFieldFromCenterToCorner_Scalar ( PetscReal ***  centfield_arr,
PetscReal ***  field_arr,
UserCtx user 
)

Interpolates a scalar field from cell centers to corner nodes.

This function estimates the value of a scalar field at each grid node by averaging the values from the cell centers of the cells surrounding that node (up to 8). It handles physical boundaries by averaging only the available adjacent cells.

Assumes input centfield_arr is from a ghosted local vector associated with user->da (DOF=1, s=2) and output field_arr is from a ghosted local vector also associated with user->da (DOF=1, s=2). Input array uses GLOBAL cell indices, output array uses GLOBAL node indices.

Parameters
[in]centfield_arrInput: 3D array (ghosted) of scalar data at cell centers, accessed via GLOBAL cell indices (k=0..KM-1, j=0..JM-1, i=0..IM-1).
[out]field_arrOutput: 3D array (ghosted) where interpolated node values are stored, accessed via GLOBAL node indices (k=0..KM, j=0..JM, i=0..IM).
[in]userUser context containing DMDA information (da).
Returns
PetscErrorCode 0 on success.

Definition at line 265 of file interpolation.c.

269{
270 PetscErrorCode ierr;
271 PetscMPIInt rank;
272 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
274 "Rank %d starting interpolation.\n", rank);
275
276 // Get local info based on the DMDA (da). This info primarily describes owned nodes.
277 DMDALocalInfo info;
278 ierr = DMDAGetLocalInfo(user->da, &info); CHKERRQ(ierr);
279
280 // Define the range of NODES owned by this processor using GLOBAL indices
281 PetscInt xs_node = info.xs;
282 PetscInt xm_node = info.xm;
283 PetscInt xe_node = xs_node + xm_node;
284 PetscInt ys_node = info.ys;
285 PetscInt ym_node = info.ym;
286 PetscInt ye_node = ys_node + ym_node;
287 PetscInt zs_node = info.zs;
288 PetscInt zm_node = info.zm;
289 PetscInt ze_node = zs_node + zm_node;
290
291 // Get Global dimensions (number of cells IM, JM, KM)
292 PetscInt IM = info.mx - 1;
293 PetscInt JM = info.my - 1;
294 PetscInt KM = info.mz - 1;
295
296
297 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Rank %d: Interpolating for owned NODES k=%d..%d, j=%d..%d, i=%d..%d\n",
298 rank, zs_node, ze_node, ys_node, ye_node, xs_node, xe_node);
299
300 // Loop over the GLOBAL indices of the NODES owned by this processor
301 for (PetscInt k = zs_node; k < ze_node; k++) { // Global NODE index k (0..KM)
302 for (PetscInt j = ys_node; j < ye_node; j++) { // Global NODE index j (0..JM)
303 for (PetscInt i = xs_node; i < xe_node; i++) { // Global NODE index i (0..IM)
304
305 PetscReal sum = 0.0;
306 PetscInt count = 0;
307
308 // Loop over the potential 8 CELL indices surrounding node (i,j,k)
309 // Cell indices are (i & i-1), (j & j-1), (k & k-1)
310 for (PetscInt dk = -1; dk <= 0; dk++) { // Relative cell k-index offset
311 for (PetscInt dj = -1; dj <= 0; dj++) { // Relative cell j-index offset
312 for (PetscInt di = -1; di <= 0; di++) { // Relative cell i-index offset
313
314 PetscInt ci = i + di; // Global CELL index of neighbor
315 PetscInt cj = j + dj; // Global CELL index of neighbor
316 PetscInt ck = k + dk; // Global CELL index of neighbor
317
318 // Check if this CELL index is within the valid global cell range (0..IM-1, etc.)
319 if (ci >= 0 && ci <= IM-1 &&
320 cj >= 0 && cj <= JM-1 &&
321 ck >= 0 && ck <= KM-1)
322 {
323 // Access the input 'centfield_arr' using GLOBAL cell indices.
324 // Relies on centfield_arr being from a ghosted local vector.
325 sum += centfield_arr[ck][cj][ci];
326 count++;
327 // Optional Debug Log
328 // LOG_LOOP_ALLOW(GLOBAL, LOG_DEBUG, i*j*k, 10000,
329 // " Rank %d | Node(i,j,k)=%d,%d,%d | Using Cell(ci,cj,ck)=%d,%d,%d | Value=%.3f | Count=%d\n",
330 // rank, i, j, k, ci, cj, ck, centfield_arr[ck][cj][ci], count);
331 }
332 } // end di loop
333 } // end dj loop
334 } // end dk loop
335
336 // Calculate average and store in the output 'field_arr' at the NODE index (i,j,k)
337 if (count > 0) {
338 // Store the result using the GLOBAL node index [k][j][i]
339 field_arr[k][j][i] = sum / (PetscReal)count;
340 } else {
341 // This indicates an issue - a node should always be adjacent to at least one cell
343 "Rank %d: Node (i=%d,j=%d,k=%d) had count=0 surrounding cells! Check logic/ghosting.\n", rank, i, j, k);
344 // Assign a default value or handle error
345 field_arr[k][j][i] = 0.0; // Defaulting to zero might hide issues
346 }
347 } // End loop i (nodes)
348 } // End loop j (nodes)
349 } // End loop k (nodes)
350
352 "Rank %d completed interpolation.\n", rank);
353 return 0;
354}
@ LOG_ERROR
Critical errors that may halt the program.
Definition logging.h:29

◆ InterpolateFieldFromCenterToCorner_Vector()

PetscErrorCode InterpolateFieldFromCenterToCorner_Vector ( Cmpnts ***  centfield_arr,
Cmpnts ***  field_arr,
UserCtx user 
)

Interpolates a vector field from cell centers to corner nodes.

This function estimates the value of a vector field at each grid node by averaging the vector values from the cell centers of the cells surrounding that node (up to 8). It handles physical boundaries by averaging only the available adjacent cells.

Assumes input centfield_arr is from a ghosted local vector (e.g., representing ucat, stored using node-indexing convention) and output field_arr is a ghosted local vector associated with user->fda (DOF=3, s=2), accessed using global node indices.

Parameters
[in]centfield_arrInput: 3D array (ghosted) of vector data conceptually at cell centers, accessed via GLOBAL indices respecting the storage convention (e.g., ucat[k][j][i] uses node index i but represents cell C(i,j,k) for interior).
[out]field_arrOutput: 3D array (ghosted) where interpolated node values are stored, accessed via GLOBAL node indices (k=0..KM, j=0..JM, i=0..IM).
[in]userUser context containing DMDA information (da and fda).
Returns
PetscErrorCode 0 on success.

Definition at line 381 of file interpolation.c.

385{
386 PetscErrorCode ierr;
387 PetscMPIInt rank;
388 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
389
390 DMDALocalInfo info_nodes;
391 ierr = DMDAGetLocalInfo(user->fda, &info_nodes); CHKERRQ(ierr);
392
393 // Node ownership range (GLOBAL indices)
394 PetscInt xs_node = info_nodes.xs, xm_node = info_nodes.xm, xe_node = xs_node + xm_node;
395 PetscInt ys_node = info_nodes.ys, ym_node = info_nodes.ym, ye_node = ys_node + ym_node;
396 PetscInt zs_node = info_nodes.zs, zm_node = info_nodes.zm, ze_node = zs_node + zm_node;
397
398 // Global grid dimensions (NODES) - Used for global cell index check
399 PetscInt MX_node = user->IM; //info_nodes.mx;
400 PetscInt MY_node = user->JM; //info_nodes.my;
401 PetscInt MZ_node = user->KM; //info_nodes.mz;
402 PetscInt IM = MX_node - 1; // Max cell index i
403 PetscInt JM = MY_node - 1; // Max cell index j
404 PetscInt KM = MZ_node - 1; // Max cell index k
405
406
407 // Valid range for accessing the INPUT ghosted array (using NODE indices)
408 PetscInt gxs_node = info_nodes.gxs, gxm_node = info_nodes.gxm, gxe_node = gxs_node + gxm_node;
409 PetscInt gys_node = info_nodes.gys, gym_node = info_nodes.gym, gye_node = gys_node + gym_node;
410 PetscInt gzs_node = info_nodes.gzs, gzm_node = info_nodes.gzm, gze_node = gzs_node + gzm_node;
411
412 // Log only if this function is allowed by the list set in main()
413 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Rank %d: Interpolating for owned NODES k=%d..%d, j=%d..%d, i=%d..%d\n",
414 rank, zs_node, ze_node-1, ys_node, ye_node-1, xs_node, xe_node-1);
415
416 LOG_ALLOW(GLOBAL,LOG_DEBUG, "[%d] Dumping DM state (user->fda) BEFORE GetArray:\n", rank);
417
418
419 if(get_log_level() == LOG_DEBUG && is_function_allowed(__func__)){
420 DMView(user->fda, PETSC_VIEWER_STDOUT_SELF);
421 // Inside InterpolateFieldFromCenterToCorner_Vector, before the loops:
422
423 PetscMPIInt rank_check;
424 MPI_Comm_rank(PETSC_COMM_WORLD, &rank_check);
425 if (rank_check == 1) { // Only for Rank 1
426 LOG_ALLOW(LOCAL,LOG_DEBUG, "[1] Attempting test read of OWNED INTERIOR centfield_arr[3][1][1]\n");
427 Cmpnts test_val_owned_interior = centfield_arr[7][1][1];
428 LOG_ALLOW(LOCAL,LOG_DEBUG, "[1] SUCCESS reading owned interior: x=%f\n", test_val_owned_interior.x);
429
430 LOG_ALLOW(LOCAL,LOG_DEBUG, "[1] Attempting test read of OWNED BOUNDARY centfield_arr[3][0][0]\n");
431 Cmpnts test_val_owned_boundary = centfield_arr[7][0][0];
432 LOG_ALLOW(LOCAL,LOG_DEBUG, "[1] SUCCESS reading owned boundary: x=%f\n", test_val_owned_boundary.x);
433
434 LOG_ALLOW(LOCAL,LOG_DEBUG, "[1] Attempting test read of GHOST centfield_arr[2][0][0]\n");
435 Cmpnts test_val_ghost = centfield_arr[7][0][0]; // This is the line that likely crashes
436 LOG_ALLOW(LOCAL,LOG_DEBUG, "[1] SUCCESS reading ghost: x=%f\n", test_val_ghost.x);
437
438 }
439 }
440
441 ierr = PetscBarrier(NULL);
442
443 // Proceed with the original loops...
444 // Loop over the GLOBAL indices of the NODES owned by this processor (k, j, i)
445 for (PetscInt k = zs_node; k < ze_node; k++) {
446 for (PetscInt j = ys_node; j < ye_node; j++) {
447 for (PetscInt i = xs_node; i < xe_node; i++) {
448 /*
449 if(get_log_level() == LOG_DEBUG && is_function_allowed(__func__)){
450 // --- TEMPORARY TEST --- WORKS ONLY WHEN IM,JM,KM=5 !!!
451 if (rank == 1 && k == 3 && j == 0 && i == 0) {
452 LOG_ALLOW(GLOBAL,LOG_DEBUG, "[1] Test read inside loop for (3,0,0): Accessing centfield_arr[2][0][0]\n");
453 Cmpnts test_val_loop = centfield_arr[2][0][0]; // The crashing access
454 LOG_ALLOW(GLOBAL,LOG_DEBUG, "[1] Test read inside loop SUCCESS: x=%f\n", test_val_loop.x);
455 }
456 // --- END TEMPORARY TEST ---
457 }
458 */
459
460 Cmpnts sum = {0.0, 0.0, 0.0};
461 PetscInt count = 0;
462 PetscBool attempted_read = PETSC_FALSE; // Flag to track if read was tried
463
464 // Loop over the 8 potential cells surrounding node N(k,j,i)
465 for (PetscInt dk_offset = -1; dk_offset <= 0; dk_offset++) {
466 for (PetscInt dj_offset = -1; dj_offset <= 0; dj_offset++) {
467 for (PetscInt di_offset = -1; di_offset <= 0; di_offset++) {
468
469 // Calculate the NODE index where the relevant cell's data is stored
470 PetscInt node_idx_k = k + dk_offset;
471 PetscInt node_idx_j = j + dj_offset;
472 PetscInt node_idx_i = i + di_offset;
473
474
475 // Check if this NODE index corresponds to a valid GLOBAL cell index
476 PetscInt cell_idx_k = node_idx_k; // Cell index is same as node index for storage
477 PetscInt cell_idx_j = node_idx_j;
478 PetscInt cell_idx_i = node_idx_i;
479
480
481
482 if (cell_idx_i >= 0 && cell_idx_i <= MX_node && // Cell index check
483 cell_idx_j >= 0 && cell_idx_j <= MY_node &&
484 cell_idx_k >= 0 && cell_idx_k <= MZ_node)
485 {
486
487 /*
488 // Check if this NODE index is valid within the GLOBAL node domain (0..Mx-1)
489 // This implicitly checks if the corresponding cell is valid (0..Mx-2)
490 if (node_idx_i >= 0 && node_idx_i < MX_node -1 &&
491 node_idx_j >= 0 && node_idx_j < MY_node -1 &&
492 node_idx_k >= 0 && node_idx_k < MZ_node -1)
493 {
494 */
495
496 // Check if the NODE index is within the accessible LOCAL+GHOST range of centfield_arr
497 if (node_idx_i >= gxs_node && node_idx_i < gxe_node &&
498 node_idx_j >= gys_node && node_idx_j < gye_node &&
499 node_idx_k >= gzs_node && node_idx_k < gze_node)
500 {
501 // Log attempt just before read
502 LOG_LOOP_ALLOW_EXACT(LOCAL, LOG_DEBUG,k,6,"PRE-READ: Rank %d targeting Node(k,j,i)=%d,%d,%d. Reading input centfield_arr[%d][%d][%d] (for cell C(%d,%d,%d))\n",
503 rank,
504 k,j,i,
505 node_idx_k,node_idx_j,node_idx_i,
506 cell_idx_k, cell_idx_j, cell_idx_i);
507
508 attempted_read = PETSC_TRUE; // Mark that we are attempting a read
509
510 // Convert GLOBAL neighbor index to LOCAL index for reading from the ghosted array
511 PetscInt k_local_read = node_idx_k - gzs_node;
512 PetscInt j_local_read = node_idx_j - gys_node;
513 PetscInt i_local_read = node_idx_i - gxs_node;
514 // ---> READ <---
515 Cmpnts cell_val = centfield_arr[k_local_read][j_local_read][i_local_read];
516// Cmpnts cell_val = centfield_arr[node_idx_k][node_idx_j][node_idx_i];
517
518 // Log success immediately after read
519 LOG_LOOP_ALLOW_EXACT(LOCAL, LOG_DEBUG,k,6,"POST-READ: Rank %d successful read from [%d][%d][%d] -> (%.2f, %.2f, %.2f)\n",
520 rank,
521 node_idx_k,node_idx_j,node_idx_i,
522 cell_val.x, cell_val.y, cell_val.z);
523
524 sum.x += cell_val.x;
525 sum.y += cell_val.y;
526 sum.z += cell_val.z;
527 count++;
528
529 } else {
530 LOG_ALLOW(GLOBAL, LOG_WARNING, /* ... Ghost range warning ... */);
531 }
532 } // end global cell check
533 } // end di_offset
534 } // end dj_offset
535 } // end dk_offset
536
537 // ---> Convert GLOBAL node indices (k,j,i) to LOCAL indices for field_arr <---
538 // field_arr is dimensioned nx * ny * nz (local node dimensions)
539 // Global indices (k,j,i) range from (zs,ys,xs) to (ze-1, ye-1, xe-1)
540 // Local indices range from 0 to (zm-1, ym-1, xm-1)
541 PetscInt k_local = k - zs_node; // Offset by the starting global index
542 PetscInt j_local = j - ys_node;
543 PetscInt i_local = i - xs_node;
544 // Calculate average and write to output node (k,j,i)
545 if (count > 0) {
546 LOG_LOOP_ALLOW_EXACT(LOCAL, LOG_DEBUG,k,6,"PRE-WRITE: Rank %d targeting Node(k,j,i)=%d,%d,%d. Writing avg (count=%d)\n", rank,k,j,i, count);
547
548 // --- Defensive check (optional but recommended) ---
549 if (k_local < 0 || k_local >= info_nodes.zm ||
550 j_local < 0 || j_local >= info_nodes.ym ||
551 i_local < 0 || i_local >= info_nodes.xm) {
552 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Calculated local write index out of bounds!");
553 }
554 // --- End check ---
555
556 // ---> Write using LOCAL indices <---
557 field_arr[k_local][j_local][i_local].x = sum.x / (PetscReal)count;
558 field_arr[k_local][j_local][i_local].y = sum.y / (PetscReal)count;
559 field_arr[k_local][j_local][i_local].z = sum.z / (PetscReal)count;
560
561 LOG_LOOP_ALLOW_EXACT(LOCAL, LOG_DEBUG,k,6,"POST-WRITE: Rank %d successful write to field_arr[%d][%d][%d] (local)\n", rank, k_local,j_local,i_local); // Log local indices
562
563
564
565 } else {
566 LOG_ALLOW(GLOBAL, LOG_WARNING, // Use WARNING or ERROR
567 "Rank %d: Node (i=%d,j=%d,k=%d) had count=0 surrounding valid cells! Check logic/ghosting. Writing zero.\n", rank, i, j, k);
568 field_arr[k_local][j_local][i_local] = (Cmpnts){0.0, 0.0, 0.0};
569 }
570 // Add a log entry even if count=0 or no read was attempted for this node
571 if (!attempted_read && count==0) {
572 LOG_ALLOW(LOCAL, LOG_DEBUG,"NODE-COMPLETE: Rank %d Node(k,j,i)=%d,%d,%d finished loops, no valid cells/reads attempted.\n", rank, k, j, i);
573 } else if (count == 0 && attempted_read) {
574 // This case should ideally not happen if the ghost region check is correct
575 LOG_ALLOW(LOCAL, LOG_ERROR,"NODE-COMPLETE: Rank %d Node(k,j,i)=%d,%d,%d finished loops, attempted reads but count=0!\n", rank, k, j, i);
576 } else {
577 // This is the normal completion case after writing
578 LOG_LOOP_ALLOW_EXACT(LOCAL, LOG_DEBUG,k,6,"NODE-COMPLETE: Rank %d Node(k,j,i)=%d,%d,%d finished loops and write.\n", rank, k, j, i);
579 }
580
581 } // End loop node i
582 } // End loop node j
583 } // End loop node k
584
585 LOG_ALLOW_SYNC(GLOBAL, LOG_INFO, // Use INFO for completion
586 "Rank %d completed interpolation function.\n", rank); // Changed message slightly
587 return 0;
588}
PetscBool is_function_allowed(const char *functionName)
Checks if a given function is in the allow-list.
Definition logging.c:159
LogLevel get_log_level()
Retrieves the current logging level from the environment variable LOG_LEVEL.
Definition logging.c:49
#define LOG_LOOP_ALLOW_EXACT(scope, level, var, val, fmt,...)
Logs a custom message if a variable equals a specific value.
Definition logging.h:355
@ LOG_INFO
Informational messages about program execution.
Definition logging.h:32
@ LOG_WARNING
Non-critical issues that warrant attention.
Definition logging.h:30
PetscInt KM
Definition variables.h:639
PetscInt JM
Definition variables.h:639
PetscInt IM
Definition variables.h:639
Here is the call graph for this function:

◆ InterpolateFieldFromCenterToCorner_Vector_Petsc()

PetscErrorCode InterpolateFieldFromCenterToCorner_Vector_Petsc ( Cmpnts ***  centfield_arr,
Cmpnts ***  corner_arr,
UserCtx user 
)

Interpolates a vector field from cell centers to corner nodes.

This version is adapted to write directly into a ghosted local array obtained from DMDAVecGetArray(), which allows using GLOBAL indices for writing to the OWNED portion of the array.

Parameters
[in]centfield_arrInput: 3D array (ghosted) of cell-centered data, accessed via GLOBAL indices.
[out]corner_arrOutput: 3D array (ghosted) where interpolated node values are stored, also accessed via GLOBAL indices for the owned part.
[in]userUser context containing DMDA information.
Returns
PetscErrorCode 0 on success.

Definition at line 603 of file interpolation.c.

607{
608 PetscErrorCode ierr;
609 DMDALocalInfo info_nodes;
610 PetscMPIInt rank;
611 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
612 ierr = DMDAGetLocalInfo(user->fda, &info_nodes); CHKERRQ(ierr);
613
614 // Node ownership range (GLOBAL indices)
615 PetscInt xs_node = info_nodes.xs, xm_node = info_nodes.xm, xe_node = xs_node + xm_node;
616 PetscInt ys_node = info_nodes.ys, ym_node = info_nodes.ym, ye_node = ys_node + ym_node;
617 PetscInt zs_node = info_nodes.zs, zm_node = info_nodes.zm, ze_node = zs_node + zm_node;
618
619 // Global grid dimensions (used for valid cell check)
620 PetscInt IM = info_nodes.mx; // Total nodes in i-direction
621 PetscInt JM = info_nodes.my; // Total nodes in j-direction
622 PetscInt KM = info_nodes.mz; // Total nodes in k-direction
623
624 // Ghosted array starting indices (from the same info struct)
625 PetscInt gxs = info_nodes.gxs;
626 PetscInt gys = info_nodes.gys;
627 PetscInt gzs = info_nodes.gzs;
628
629 // Loop over the GLOBAL indices of the NODES owned by this processor
630 for (PetscInt k = zs_node; k < ze_node; k++) {
631 for (PetscInt j = ys_node; j < ye_node; j++) {
632 for (PetscInt i = xs_node; i < xe_node; i++) {
633 Cmpnts sum = {0.0, 0.0, 0.0};
634 PetscInt count = 0;
635
636 // Loop over the 8 potential cells surrounding node N(k,j,i)
637 for (PetscInt dk_offset = -1; dk_offset <= 0; dk_offset++) {
638 for (PetscInt dj_offset = -1; dj_offset <= 0; dj_offset++) {
639 for (PetscInt di_offset = -1; di_offset <= 0; di_offset++) {
640
641 // These are still GLOBAL cell indices
642 PetscInt global_cell_k = k + dk_offset;
643 PetscInt global_cell_j = j + dj_offset;
644 PetscInt global_cell_i = i + di_offset;
645
646 // Check if this corresponds to a valid GLOBAL cell index
647 if (global_cell_i >= 0 && global_cell_i < IM - 1 &&
648 global_cell_j >= 0 && global_cell_j < JM - 1 &&
649 global_cell_k >= 0 && global_cell_k < KM - 1)
650 {
651
652 // Now, read from the local array using the local index
653 Cmpnts cell_val = centfield_arr[global_cell_k][global_cell_j][global_cell_i];
654
655 LOG_LOOP_ALLOW_EXACT(LOCAL, LOG_DEBUG,k,6,"[Rank %d] successful read from [%d][%d][%d] -> (%.2f, %.2f, %.2f)\n",
656 rank,global_cell_k,global_cell_j,global_cell_i,cell_val.x, cell_val.y, cell_val.z);
657
658 sum.x += cell_val.x;
659 sum.y += cell_val.y;
660 sum.z += cell_val.z;
661 count++;
662 }
663 }
664 }
665 }
666
667 // --- MODIFICATION: Write using GLOBAL indices ---
668 // The calculation of i_local, j_local, k_local is removed.
669 // We write directly into the array using the global loop indices.
670 if (count > 0) {
671 corner_arr[k][j][i].x = sum.x / (PetscReal)count;
672 corner_arr[k][j][i].y = sum.y / (PetscReal)count;
673 corner_arr[k][j][i].z = sum.z / (PetscReal)count;
674 } else {
675 // This case should ideally not happen for a valid owned node, but as a failsafe:
676 corner_arr[k][j][i] = (Cmpnts){0.0, 0.0, 0.0};
677 }
678
679 LOG_LOOP_ALLOW_EXACT(LOCAL, LOG_DEBUG,k,6,"[Rank %d] Node(k,j,i)=%d,%d,%d finished loops and write.\n", rank, k, j, i);
680 }
681 }
682 }
683 return 0;
684}

◆ InterpolateFieldFromCenterToCorner_Scalar_Petsc()

PetscErrorCode InterpolateFieldFromCenterToCorner_Scalar_Petsc ( PetscReal ***  centfield_arr,
PetscReal ***  corner_arr,
UserCtx user 
)

Interpolates a scalar field from cell centers to corner nodes.

This version is adapted to write directly into a ghosted local array obtained from DMDAVecGetArray(), which allows using GLOBAL indices for writing to the OWNED portion of the array.

Parameters
[in]centfield_arrInput: 3D array (ghosted) of scalar data at cell centers, accessed via GLOBAL indices.
[out]corner_arrOutput: 3D array (ghosted) where interpolated node values are stored, also accessed via GLOBAL indices for the owned part.
[in]userUser context containing DMDA information.
Returns
PetscErrorCode 0 on success.

Definition at line 699 of file interpolation.c.

703{
704 PetscErrorCode ierr;
705 DMDALocalInfo info_nodes;
706 ierr = DMDAGetLocalInfo(user->fda, &info_nodes); CHKERRQ(ierr);
707
708 // Node ownership range (GLOBAL indices)
709 PetscInt xs_node = info_nodes.xs, xe_node = info_nodes.xs + info_nodes.xm;
710 PetscInt ys_node = info_nodes.ys, ye_node = info_nodes.ys + info_nodes.ym;
711 PetscInt zs_node = info_nodes.zs, ze_node = info_nodes.zs + info_nodes.zm;
712
713 // Global grid dimensions (number of nodes)
714 PetscInt IM = info_nodes.mx;
715 PetscInt JM = info_nodes.my;
716 PetscInt KM = info_nodes.mz;
717
718 // Loop over the GLOBAL indices of the NODES owned by this processor
719 for (PetscInt k = zs_node; k < ze_node; k++) {
720 for (PetscInt j = ys_node; j < ye_node; j++) {
721 for (PetscInt i = xs_node; i < xe_node; i++) {
722 PetscReal sum = 0.0;
723 PetscInt count = 0;
724
725 // Loop over the 8 potential cells surrounding node N(k,j,i)
726 for (PetscInt dk_offset = -1; dk_offset <= 0; dk_offset++) {
727 for (PetscInt dj_offset = -1; dj_offset <= 0; dj_offset++) {
728 for (PetscInt di_offset = -1; di_offset <= 0; di_offset++) {
729 // This is the global index of the cell-centered data point
730 PetscInt cell_idx_k = k + dk_offset;
731 PetscInt cell_idx_j = j + dj_offset;
732 PetscInt cell_idx_i = i + di_offset;
733
734 // Check if this corresponds to a valid GLOBAL cell index.
735 // Cell indices run from 0 to NumberOfNodes-2.
736 if (cell_idx_i >= 0 && cell_idx_i < IM - 1 &&
737 cell_idx_j >= 0 && cell_idx_j < JM - 1 &&
738 cell_idx_k >= 0 && cell_idx_k < KM - 1)
739 {
740 sum += centfield_arr[cell_idx_k][cell_idx_j][cell_idx_i];
741 count++;
742 }
743 }
744 }
745 }
746
747 // Calculate average and write to the output array using GLOBAL indices
748 if (count > 0) {
749 corner_arr[k][j][i] = sum / (PetscReal)count;
750 } else {
751 corner_arr[k][j][i] = 0.0; // Failsafe
752 }
753 }
754 }
755 }
756 return 0;
757}

◆ PieceWiseLinearInterpolation_Scalar()

PetscErrorCode PieceWiseLinearInterpolation_Scalar ( const char *  fieldName,
PetscReal ***  fieldScal,
PetscInt  iCell,
PetscInt  jCell,
PetscInt  kCell,
PetscReal *  val 
)

Retrieves the scalar value at the cell (iCell, jCell, kCell).

This function does a simple piecewise (zeroth‐order) Interpolatetion by returning fieldScal[kCell][jCell][iCell], ignoring any fractional coordinates.

Parameters
[in]fieldNameA string identifying the field (e.g. "temperature").
[in]fieldScal3D array of PetscReal (indexed as [k][j][i]).
[in]iCell,jCell,kCellIntegral cell indices to sample.
[out]valPointer to a PetscReal that receives the sampled scalar.
Returns
PetscErrorCode 0 on success

Definition at line 772 of file interpolation.c.

779{
780 PetscFunctionBegin;
781 *val = fieldScal[kCell][jCell][iCell];
782
783 // Optional logging
785 "PieceWiseLinearInterpolation_Scalar: Field '%s' at (i=%d, j=%d, k=%d) => val=%.6f\n",
786 fieldName, iCell, jCell, kCell, *val);
787
788 PetscFunctionReturn(0);
789}

◆ PieceWiseLinearInterpolation_Vector()

PetscErrorCode PieceWiseLinearInterpolation_Vector ( const char *  fieldName,
Cmpnts ***  fieldVec,
PetscInt  iCell,
PetscInt  jCell,
PetscInt  kCell,
Cmpnts vec 
)

Retrieves the vector (Cmpnts) at the cell (iCell, jCell, kCell).

This function simply sets: vec->x = fieldVec[kCell][jCell][iCell].x vec->y = fieldVec[kCell][jCell][iCell].y vec->z = fieldVec[kCell][jCell][iCell].z effectively a nearest‐cell or piecewise approach.

Parameters
[in]fieldNameString identifying the field (e.g. "velocity").
[in]fieldVec3D array of Cmpnts (indexed as [k][j][i]).
[in]iCell,jCell,kCellIntegral cell indices to sample.
[out]vecPointer to a Cmpnts struct that receives the sampled vector.
Returns
PetscErrorCode 0 on success

Definition at line 808 of file interpolation.c.

815{
816 PetscFunctionBegin;
817 vec->x = fieldVec[kCell][jCell][iCell].x;
818 vec->y = fieldVec[kCell][jCell][iCell].y;
819 vec->z = fieldVec[kCell][jCell][iCell].z;
820
821 // Optional logging
823 "PieceWiseLinearInterpolation_Vector: Field '%s' at (i=%d, j=%d, k=%d) => (x=%.6f, y=%.6f, z=%.6f)\n",
824 fieldName, iCell, jCell, kCell, vec->x, vec->y, vec->z);
825
826 PetscFunctionReturn(0);
827}

◆ ComputeTrilinearWeights()

static void ComputeTrilinearWeights ( PetscReal  a1,
PetscReal  a2,
PetscReal  a3,
PetscReal *  w 
)
inlinestatic

Computes the trilinear Interpolatetion weights from the Interpolatetion coefficients.

This function computes the weights for trilinear Interpolatetion at the eight corners of a cell using the Interpolatetion coefficients provided along the x, y, and z directions.

Parameters
[in]a1Interpolation coefficient along the x-direction (normalized coordinate within the cell).
[in]a2Interpolation coefficient along the y-direction (normalized coordinate within the cell).
[in]a3Interpolation coefficient along the z-direction (normalized coordinate within the cell).
[out]wArray of 8 weights, each corresponding to one corner of the cell.
Note
  • The coefficients a1, a2, and a3 should be in the range [0, 1].
  • The order of weights corresponds to the eight corners of a hexahedral cell.

Definition at line 844 of file interpolation.c.

844 {
845 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Computing weights for a1=%f, a2=%f, a3=%f.\n", a1, a2, a3);
846
847 // Ensure a1, a2, a3 are within [0,1]
848 a1 = PetscMax(0.0, PetscMin(1.0, a1));
849 a2 = PetscMax(0.0, PetscMin(1.0, a2));
850 a3 = PetscMax(0.0, PetscMin(1.0, a3));
851
852 const PetscReal oa1 = 1.0 - a1;
853 const PetscReal oa2 = 1.0 - a2;
854 const PetscReal oa3 = 1.0 - a3;
855
856 w[0] = oa1 * oa2 * oa3; /* cornerOffsets[0] => (0,0,0) */
857 w[1] = a1 * oa2 * oa3; /* cornerOffsets[1] => (1,0,0) */
858 w[2] = oa1 * a2 * oa3; /* cornerOffsets[2] => (0,1,0) */
859 w[3] = a1 * a2 * oa3; /* cornerOffsets[3] => (1,1,0) */
860 w[4] = oa1 * oa2 * a3; /* cornerOffsets[4] => (0,0,1) */
861 w[5] = a1 * oa2 * a3; /* cornerOffsets[5] => (1,0,1) */
862 w[6] = oa1 * a2 * a3; /* cornerOffsets[6] => (0,1,1) */
863 w[7] = a1 * a2 * a3; /* cornerOffsets[7] => (1,1,1) */
864
865 // Log the computed weights for debugging
866 LOG_ALLOW(LOCAL,LOG_DEBUG, "Weights computed - "
867 "w0=%f, w1=%f, w2=%f, w3=%f, w4=%f, w5=%f, w6=%f, w7=%f. \n",
868 w[0], w[1], w[2], w[3], w[4], w[5], w[6], w[7]);
869}
Here is the caller graph for this function:

◆ TrilinearInterpolation_Scalar()

PetscErrorCode TrilinearInterpolation_Scalar ( const char *  fieldName,
PetscReal ***  fieldScal,
PetscInt  i,
PetscInt  j,
PetscInt  k,
PetscReal  a1,
PetscReal  a2,
PetscReal  a3,
PetscReal *  val 
)

Computes the trilinear Interpolateted scalar at a given point.

Computes the trilinear interpolated scalar at a given point.

Parameters
[in]fieldNameA string representing the name of the scalar field (e.g., "temperature").
[in]fieldScal3D array of the field from a DMDA (indexed as [k][j][i]), each cell a PetscReal.
[in]i,j,kIntegral cell indices (the "lower" corner in each dimension).
[in]a1,a2,a3Normalized coordinates within the cell ([0,1] range).
[out]valPointer to a PetscReal that will store the Interpolateted scalar.

This function uses the standard 8-corner trilinear formula via ComputeTrilinearWeights(). If a different scheme is desired, implement a new function with the same PetscInterface.

Definition at line 884 of file interpolation.c.

894{
895 PetscFunctionBegin; // PETSc macro for error/stack tracing
896
897 // Compute the 8 corner weights
898 PetscReal wcorner[8];
899 ComputeTrilinearWeights(a1, a2, a3, wcorner);
900
901 // Offsets for cell corners
902 PetscInt i1 = i + 1;
903 PetscInt j1 = j + 1;
904 PetscInt k1 = k + 1;
905
906 // Initialize the output scalar
907 PetscReal sum = 0.0;
908
909 // Corner 0 => (i, j, k)
910 sum += wcorner[0] * fieldScal[k ][j ][i ];
911 // Corner 1 => (i+1, j, k)
912 sum += wcorner[1] * fieldScal[k ][j ][i1];
913 // Corner 2 => (i, j+1, k)
914 sum += wcorner[2] * fieldScal[k ][j1][i ];
915 // Corner 3 => (i+1, j+1, k)
916 sum += wcorner[3] * fieldScal[k ][j1][i1];
917 // Corner 4 => (i, j, k+1)
918 sum += wcorner[4] * fieldScal[k1][j ][i ];
919 // Corner 5 => (i+1, j, k+1)
920 sum += wcorner[5] * fieldScal[k1][j ][i1];
921 // Corner 6 => (i, j+1, k+1)
922 sum += wcorner[6] * fieldScal[k1][j1][i ];
923 // Corner 7 => (i+1, j+1, k+1)
924 sum += wcorner[7] * fieldScal[k1][j1][i1];
925
926 *val = sum;
927
928 // Logging (optional)
930 "TrilinearInterpolation_Scalar: Field '%s' at (i=%d, j=%d, k=%d), "
931 "a1=%.6f, a2=%.6f, a3=%.6f -> val=%.6f.\n",
932 fieldName, i, j, k, a1, a2, a3, *val);
933
934 // LOG_ALLOW_SYNC(GLOBAL, LOG_INFO,
935 // "TrilinearInterpolation_Scalar: Completed Interpolatetion for field '%s' across local cells.\n",
936 // fieldName);
937
938 PetscFunctionReturn(0);
939}
static void ComputeTrilinearWeights(PetscReal a1, PetscReal a2, PetscReal a3, PetscReal *w)
Computes the trilinear Interpolatetion weights from the Interpolatetion coefficients.
Here is the call graph for this function:

◆ TrilinearInterpolation_Vector()

PetscErrorCode TrilinearInterpolation_Vector ( const char *  fieldName,
Cmpnts ***  fieldVec,
PetscInt  i,
PetscInt  j,
PetscInt  k,
PetscReal  a1,
PetscReal  a2,
PetscReal  a3,
Cmpnts vec 
)

Computes the trilinear Interpolateted vector (e.g., velocity) at a given point.

Computes the trilinear interpolated vector (e.g., velocity) at a given point.

Parameters
[in]fieldNameA string representing the name of the vector field (e.g., "velocity").
[in]fieldVec3D array of the field from a DMDA (indexed as [k][j][i]), each cell of type Cmpnts.
[in]i,j,kIntegral cell indices (the "lower" corner in each dimension).
[in]a1,a2,a3Normalized coordinates within the cell ([0,1] range).
[out]vecPointer to a Cmpnts struct that will store the Interpolateted vector (x, y, z).

This function uses the standard 8-corner trilinear formula via ComputeTrilinearWeights(). If a different scheme is desired, implement a new function with the same PetscInterface.

Computes the trilinear Interpolateted vector at a given point, with partial weighting if corners go out of range.

If any of the 8 corners (i or i+1, j or j+1, k or k+1) is out of [0..mx), [0..my), [0..mz), that corner is skipped and the corresponding weight is omitted. The total is normalized by the sum of used weights, so we get a partial Interpolatetion near boundaries.

Definition at line 962 of file interpolation.c.

972{
973 PetscFunctionBegin; // PETSc macro for error/stack tracing
974
975 // Compute the 8 corner weights
976 PetscErrorCode ierr;
977 PetscReal wcorner[8];
978 PetscMPIInt rank;
979
980 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr);
981
982 LOG_ALLOW(LOCAL,LOG_DEBUG,"[Rank %d] Computing Trilinear Weights.\n",rank);
983 ComputeTrilinearWeights(a1, a2, a3, wcorner);
984
985 LOG_ALLOW(LOCAL,LOG_DEBUG,"[Rank %d] Trilinear weights computed for local cell %d,%d,%d.\n",rank,i,j,k);
986
987 // For partial Interpolatetion, we'll keep track of how many corners are valid
988 // and how much sum of weights is used. Then we do a final normalization.
989 PetscReal sumW = 0.0;
990 Cmpnts accum = {0.0, 0.0, 0.0};
991
992 // The eight corner indices, with their weights:
993 // corners: (i,j,k), (i+1,j,k), (i,j+1,k), (i+1,j+1,k), etc.
994 // We store them in an array to iterate cleanly.
995 const PetscInt cornerOffsets[8][3] = {
996 {0, 0, 0},
997 {1, 0, 0},
998 {0, 1, 0},
999 {1, 1, 0},
1000 {0, 0, 1},
1001 {1, 0, 1},
1002 {0, 1, 1},
1003 {1, 1, 1}
1004 };
1005
1006 // Weighted partial sum
1007 for (PetscInt c = 0; c < 8; c++) {
1008 const PetscInt di = cornerOffsets[c][0];
1009 const PetscInt dj = cornerOffsets[c][1];
1010 const PetscInt dk = cornerOffsets[c][2];
1011 PetscInt iC = i + di;
1012 PetscInt jC = j + dj;
1013 PetscInt kC = k + dk;
1014
1015 /*
1016 // skip if out of domain
1017 // (Assuming you know global domain is [0..mx), [0..my), [0..mz).)
1018 if (iC < 0 || iC >= (PetscInt)userGlobalMx ||
1019 jC < 0 || jC >= (PetscInt)userGlobalMy ||
1020 kC < 0 || kC >= (PetscInt)userGlobalMz)
1021 {
1022 // skip this corner
1023 continue;
1024 }
1025
1026 */
1027
1028 LOG_ALLOW(LOCAL,LOG_DEBUG,"[Rank %d] %s[%d][%d][%d] = (%.4f,%.4f,%.4f).\n",rank,fieldName,kC,jC,iC,fieldVec[kC][jC][iC].x,fieldVec[kC][jC][iC].y,fieldVec[kC][jC][iC].z);
1029
1030 // Otherwise, accumulate
1031 accum.x += wcorner[c] * fieldVec[kC][jC][iC].x;
1032 accum.y += wcorner[c] * fieldVec[kC][jC][iC].y;
1033 accum.z += wcorner[c] * fieldVec[kC][jC][iC].z;
1034 sumW += wcorner[c];
1035 }
1036
1037 // If sumW=0 => out-of-range or zero weighting => set (0,0,0)
1038 if (sumW > 1.0e-14) {
1039 vec->x = accum.x / sumW;
1040 vec->y = accum.y / sumW;
1041 vec->z = accum.z / sumW;
1042 } else {
1043 vec->x = 0.0; vec->y = 0.0; vec->z = 0.0;
1044 }
1045
1046 PetscFunctionReturn(0);
1047}
Here is the call graph for this function:

◆ InterpolateEulerFieldToSwarmForParticle()

static PetscErrorCode InterpolateEulerFieldToSwarmForParticle ( const char *  fieldName,
void *  fieldPtr,
PetscInt  iCell,
PetscInt  jCell,
PetscInt  kCell,
PetscReal  a1,
PetscReal  a2,
PetscReal  a3,
void *  swarmOut,
PetscInt  p,
PetscInt  blockSize 
)
inlinestatic

Interpolates a single field (scalar or vector) for one particle, storing the result in a swarm array.

This helper function is used inside a larger loop over local particles.

Parameters
[in]fieldNameA string identifying the field (e.g. "velocity", "temperature", etc.).
[in]fieldPtrA Pointer to the local DMDA array for the field:
  • (PetscReal ***) if scalar (blockSize = 1)
  • (Cmpnts ***) if vector (blockSize = 3)
[in]iCell,jCell,kCellThe cell indices for this particle (already clamped if needed).
[in]a1,a2,a3Interpolation coefficients in [0..1]. If using PiecewiseLinearInterpolation, these are currently ignored.
[out]swarmOutA Pointer to the DMSwarm output array:
  • (PetscReal*) if scalar dof=1
  • (PetscReal*) if vector dof=3 (storing x,y,z in consecutive reals)
  • or (Cmpnts*) if you store the result as a Cmpnts struct
[in]pThe local particle index (used to compute the correct offset in swarmOut).
[in]blockSizeThe number of degrees of freedom (1 => scalar, 3 => vector).

This routine demonstrates a switch between:

  • PiecewiseLinearInterpolation (zeroth order / nearest cell)
  • TrilinearInterpolation (8-corner weighted).

By default, PiecewiseLinearInterpolation is active, while the TrilinearInterpolation calls are commented out. To switch to trilinear, simply comment/uncomment appropriately.

Logging calls are provided (LOG_ALLOW) that you can adapt to your existing logging system.

Returns
PetscErrorCode 0 on success, non-zero on error.

Definition at line 1080 of file interpolation.c.

1092{
1093 PetscErrorCode ierr;
1094 PetscFunctionBegin;
1095
1096 // Optional logging at start
1098 "InterpolateEulerFieldToSwarmForParticle: field='%s', blockSize=%d, "
1099 "cell IDs=(%d,%d,%d), weights=(%.4f,%.4f,%.4f)\n",
1100 fieldName, blockSize, iCell, jCell, kCell, a1, a2, a3);
1101
1102 /*
1103 If blockSize=1, we PetscInterpret the fieldPtr as a 3D array of PetscReal (scalar).
1104 If blockSize=3, we PetscInterpret the fieldPtr as a 3D array of Cmpnts (vector).
1105 */
1106 if (blockSize == 1) {
1107 /* Scalar field: Cast fieldPtr to (PetscReal ***). */
1108 PetscReal ***fieldScal = (PetscReal ***) fieldPtr;
1109 PetscReal val;
1110
1111 // Currently using trilinear.
1112 ierr = TrilinearInterpolation(fieldName, fieldScal,
1113 iCell, jCell, kCell,
1114 a1, a2, a3,
1115 &val);
1116 CHKERRQ(ierr);
1117
1118 // Alternative (commented) call to PiecewiseLinearInterpolation (zeroth order) :
1119 // PetscErrorCode ierr = PiecewiseLinearInterpolation(fieldName,
1120 // fieldScal,
1121 // iCell, jCell, kCell,
1122 // &val);
1123 // CHKERRQ(ierr);
1124
1125 // Write the scalar result to the swarm output at index [p].
1126 ((PetscReal*)swarmOut)[p] = val;
1127
1129 "InterpolateEulerFieldToSwarmForParticle [Scalar]: field='%s', result=%.6f "
1130 "stored at swarmOut index p=%d.\n", fieldName, val, (PetscInt)p);
1131 }
1132 else if (blockSize == 3) {
1133 /* Vector field: Cast fieldPtr to (Cmpnts ***). */
1134 Cmpnts ***fieldVec = (Cmpnts ***) fieldPtr;
1135 Cmpnts vec;
1136
1137
1138
1139
1140 // Piecewise Interpolatetion (zeroth order).
1141 // PetscErrorCode ierr = PieceWiseLinearInterpolation(fieldName,
1142 // fieldVec,
1143 // iCell, jCell, kCell,
1144 // &vec);
1145 // CHKERRQ(ierr);
1146
1147 // Alternative (commented) call to trilinear:
1148 ierr = TrilinearInterpolation(fieldName, fieldVec,
1149 iCell, jCell, kCell,
1150 a1, a2, a3,
1151 &vec);
1152 CHKERRQ(ierr);
1153
1154 // If swarmOut is an array of 3 reals per particle:
1155 ((PetscReal*)swarmOut)[3*p + 0] = vec.x;
1156 ((PetscReal*)swarmOut)[3*p + 1] = vec.y;
1157 ((PetscReal*)swarmOut)[3*p + 2] = vec.z;
1158
1160 "InterpolateEulerFieldToSwarmForParticle [Vector]: field='%s', result=(%.6f,%.6f,%.6f) "
1161 "stored at swarmOut[3p..3p+2], p=%d.\n",
1162 fieldName, vec.x, vec.y, vec.z, (PetscInt)p);
1163
1164 /*
1165 If you store the vector result as a Cmpnts in the swarm, do instead:
1166 ((Cmpnts*)swarmOut)[p] = vec;
1167 but ensure your DMSwarm field is sized for a Cmpnts struct.
1168 */
1169 }
1170 else {
1171 /* If blockSize isn't 1 or 3, we raise an error. */
1172 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP,
1173 "InterpolateEulerFieldToSwarmForParticle: only blockSize=1 or 3 supported, got %d.",
1174 (PetscInt)blockSize);
1175 }
1176
1177 PetscFunctionReturn(0);
1178}
#define TrilinearInterpolation(fieldName, fieldPtr, i, j, k, a1, a2, a3, outPtr)
Macro that calls either the scalar or vector trilinear interpolation function based on the type of th...
Here is the caller graph for this function:

◆ InterpolateEulerFieldToSwarm()

PetscErrorCode InterpolateEulerFieldToSwarm ( UserCtx user,
Vec  fieldLocal_cellCentered,
const char *  fieldName,
const char *  swarmOutFieldName 
)

Interpolates a cell-centered field (scalar or vector) onto DMSwarm particles, using a robust, PETSc-idiomatic two-stage process.

This function first converts the cell-centered input data to corner-node data, storing this intermediate result in a PETSc Vec to correctly handle the communication of ghost-point information across parallel ranks. It then performs a final trilinear interpolation from the ghosted corner data to each particle's location.

Workflow:

  1. Create temporary PETSc Vecs (cornerGlobal, cornerLocal) to manage the intermediate corner-node data, using the existing nodal DMDA (user->fda).
  2. Call a dispatch macro that uses the runtime block size (bs) to select the correct underlying center-to-corner function, writing results into cornerGlobal.
  3. Perform a ghost-point exchange (DMGlobalToLocal) to transfer the boundary data from cornerGlobal into the ghost regions of cornerLocal.
  4. Loop over all local particles. For each particle: a. Convert its global cell index to a local index relative to the ghosted array. b. Check if the particle's interpolation stencil is fully contained within the owned+ghost region. If not, log a warning and set the result to zero. c. Perform the final trilinear interpolation using the ghosted cornerLocal data.
  5. Restore all PETSc objects to prevent memory leaks.
Parameters
[in]userUser context with DMDA, DMSwarm, etc.
[in]fieldGlobal_cellCenteredVec (from fda) with cell-centered data (e.g., Ucat).
[in]fieldNameHuman-readable field name for logging (e.g., "Ucat").
[in]swarmOutFieldNameName of the DMSwarm field where interpolation results go.
Returns
PetscErrorCode 0 on success.

Definition at line 1210 of file interpolation.c.

1215{
1216 PetscErrorCode ierr;
1217 DM fda = user->fda;
1218 DM swarm = user->swarm;
1219 PetscInt bs;
1220 DMDALocalInfo info;
1221 PetscMPIInt rank;
1222
1223 // Intermediate vectors to manage corner data and its ghosts
1224 Vec cornerGlobal, cornerLocal;
1225
1226 // Generic pointers to the raw data arrays
1227 void *cellCenterPtr_read;
1228 void *cornerPtr_read_with_ghosts;
1229
1230 // Swarm-related pointers
1231 PetscInt *cellIDs = NULL;
1232 PetscReal *weights = NULL;
1233 PetscInt *pids = NULL; // For logging particle IDs in warnings
1234 void *swarmOut = NULL;
1235 PetscInt nLocal;
1236
1237 PetscFunctionBegin;
1238 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
1239 ierr = DMDAGetLocalInfo(fda, &info); CHKERRQ(ierr);
1240 ierr = VecGetBlockSize(fieldLocal_cellCentered, &bs); CHKERRQ(ierr);
1241 if (bs != 1 && bs != 3) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "BlockSize must be 1 or 3.");
1242
1243 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting for field '%s'... \n", fieldName);
1244
1245 /* STAGE 1: Center-to-Corner Calculation with Communication */
1246
1247 // (A) Get temporary Vecs from the DMDA pool to manage corner data
1248 ierr = DMGetGlobalVector(fda, &cornerGlobal); CHKERRQ(ierr);
1249 ierr = DMGetLocalVector(fda, &cornerLocal); CHKERRQ(ierr);
1250
1251 ierr = VecSet(cornerGlobal,0.0); CHKERRQ(ierr);
1252 ierr = VecSet(cornerLocal,0.0); CHKERRQ(ierr);
1253
1254 // (B) Get a read-only array from the input cell-centered vector
1255 ierr = DMDAVecGetArrayRead(fda, fieldLocal_cellCentered, &cellCenterPtr_read); CHKERRQ(ierr);
1256
1257 // (C) Perform the center-to-corner interpolation directly into the global corner vector
1258 void *cornerPtr_write = NULL;
1259 ierr = DMDAVecGetArray(fda, cornerLocal, &cornerPtr_write); CHKERRQ(ierr);
1260
1261 LOG_ALLOW(LOCAL, LOG_DEBUG, "[Rank %d] Starting center-to-corner interpolation for '%s'.\n", rank, fieldName);
1262
1263 // SINGLE, CLEAN CALL SITE: The macro handles the runtime dispatch based on 'bs'.
1264 ierr = InterpolateFieldFromCenterToCorner(bs, cellCenterPtr_read, cornerPtr_write, user); CHKERRQ(ierr);
1265
1266 LOG_ALLOW(LOCAL, LOG_DEBUG, "[Rank %d] Finished center-to-corner interpolation for '%s'.\n", rank, fieldName);
1267 ierr = DMDAVecRestoreArray(fda, cornerLocal, &cornerPtr_write); CHKERRQ(ierr);
1268
1269 ierr = DMDAVecRestoreArrayRead(fda, fieldLocal_cellCentered, &cellCenterPtr_read); CHKERRQ(ierr);
1270
1271 //////////// DEBUG
1272 /*
1273 // Log a synchronized header message from all ranks.
1274 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "DEBUG: Dumping cornerGlobal BEFORE ghost exchange...\n");
1275
1276 // VecView prints to a PETSc viewer. PETSC_VIEWER_STDOUT_WORLD is a global, synchronized viewer.
1277 // We only need to call it if logging is active for this function.
1278 if (is_function_allowed(__func__) && (int)(LOG_DEBUG) <= (int)get_log_level()) {
1279 ierr = VecView(cornerGlobal, PETSC_VIEWER_STDOUT_WORLD); CHKERRQ(ierr);
1280 }
1281
1282 // Log a synchronized footer message.
1283 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "DEBUG: Finished dumping cornerGlobal.\n");
1284 */
1285 ////////// DEBUG
1286
1287 // ierr = PetscBarrier((PetscObject)cornerGlobal); CHKERRQ(ierr);
1288
1289 // (D) CRITICAL STEP: Communicate the newly computed corner data to fill ghost regions
1290 LOG_ALLOW(LOCAL, LOG_DEBUG, "[Rank %d] Beginning ghost exchange for corner data...\n", rank);
1291 ierr = DMLocalToGlobalBegin(fda, cornerLocal, INSERT_VALUES, cornerGlobal); CHKERRQ(ierr);
1292 ierr = DMLocalToGlobalEnd(fda, cornerLocal, INSERT_VALUES, cornerGlobal); CHKERRQ(ierr);
1293
1294 ierr = DMGlobalToLocalBegin(fda, cornerGlobal, INSERT_VALUES, cornerLocal); CHKERRQ(ierr);
1295 ierr = DMGlobalToLocalEnd(fda, cornerGlobal, INSERT_VALUES, cornerLocal); CHKERRQ(ierr);
1296
1297 LOG_ALLOW(LOCAL, LOG_DEBUG, "[Rank %d] Ghost exchange for corner data complete.\n", rank);
1298
1299 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "DEBUG: About to get array from FDA on all ranks.\n");
1300 if (is_function_allowed(__func__) && (int)(LOG_DEBUG) <= (int)get_log_level()) {
1301 ierr = DMView(user->fda, PETSC_VIEWER_STDOUT_WORLD); CHKERRQ(ierr);
1302 }
1303 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "DEBUG: Finished DMView.\n");
1304
1305 /////// DEBUG
1306 /*
1307 // Log a synchronized header message from all ranks.
1308 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "DEBUG: Dumping cornerLocal AFTER ghost exchange...\n");
1309
1310 // Here, we want each rank to print its own local vector.
1311 // PETSC_VIEWER_STDOUT_SELF is the correct tool for this. The LOG_ALLOW_SYNC
1312 // wrapper will ensure the output from each rank is printed sequentially.
1313 if (is_function_allowed(__func__) && (int)(LOG_DEBUG) <= (int)get_log_level()) {
1314 PetscMPIInt rank_d;
1315 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank_d); CHKERRQ(ierr);
1316
1317 // Use a synchronized print to create a clean header for each rank's output
1318 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "\n--- cornerLocal (Rank %d) ---\n", rank_d);
1319 PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT);
1320
1321 // Print the local vector's contents for this rank
1322 ierr = VecView(cornerLocal, PETSC_VIEWER_STDOUT_SELF); CHKERRQ(ierr);
1323
1324 // Use a synchronized print to create a clean footer
1325 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "--- End cornerLocal (Rank %d) ---\n", rank_d);
1326 PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT);
1327 }
1328
1329 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "DEBUG: Finished dumping cornerLocal.\n");
1330 */
1331 ///// DEBUG
1332
1333 /* STAGE 2: Particle Interpolation using Ghosted Corner Data */
1334
1335 // (E) Get the local, GHOSTED array of corner data for the final interpolation step
1336 ierr = DMDAVecGetArrayRead(fda, cornerLocal, &cornerPtr_read_with_ghosts); CHKERRQ(ierr);
1337
1338 // (F) Retrieve swarm fields for the particle loop
1339 ierr = DMSwarmGetLocalSize(swarm, &nLocal); CHKERRQ(ierr);
1340 ierr = DMSwarmGetField(swarm, "DMSwarm_CellID", NULL, NULL, (void**)&cellIDs); CHKERRQ(ierr);
1341 ierr = DMSwarmGetField(swarm, "DMSwarm_pid", NULL, NULL, (void**)&pids); CHKERRQ(ierr);
1342 ierr = DMSwarmGetField(swarm, "weight", NULL, NULL, (void**)&weights); CHKERRQ(ierr);
1343 ierr = DMSwarmGetField(swarm, swarmOutFieldName, NULL, NULL, &swarmOut); CHKERRQ(ierr);
1344
1345 // (G) Loop over each local particle
1346 for (PetscInt p = 0; p < nLocal; p++) {
1347 PetscInt iCell_global = cellIDs[3 * p + 0];
1348 PetscInt jCell_global = cellIDs[3 * p + 1];
1349 PetscInt kCell_global = cellIDs[3 * p + 2];
1350
1351 // Convert GLOBAL cell index to LOCAL GHOSTED index for the corner array
1352 PetscInt iCell_local = iCell_global;//- info.gxs;
1353 PetscInt jCell_local = jCell_global;//- info.gys;
1354 PetscInt kCell_local = kCell_global;//- info.gzs;
1355
1356 // Safety check: Ensure the entire 8-node stencil is within the valid memory
1357 // region (owned + ghosts) of the local array.
1358
1359 LOG_ALLOW(LOCAL,LOG_DEBUG," The ghosted max of current rank %d are: %d,%d,%d.\n",rank,info.gxs + info.gxm,info.gys+info.gym,info.gzs+info.gzm);
1360
1361 if (iCell_local < 0 || iCell_local >= info.gxs + info.gxm - 1 ||
1362 jCell_local < 0 || jCell_local >= info.gys + info.gym - 1 ||
1363 kCell_local < 0 || kCell_local >= info.gzs + info.gzm - 1)
1364 {
1366 "[Rank %d] Particle PID %lld in global cell (%d,%d,%d) is in an un-interpolatable region (requires ghosts of ghosts or is out of bounds). Zeroing field '%s'.\n",
1367 rank, (long long)pids[p], iCell_global, jCell_global, kCell_global, fieldName);
1368
1369 if (bs == 3) {
1370 ((PetscReal*)swarmOut)[3*p + 0] = 0.0;
1371 ((PetscReal*)swarmOut)[3*p + 1] = 0.0;
1372 ((PetscReal*)swarmOut)[3*p + 2] = 0.0;
1373 } else {
1374 ((PetscReal*)swarmOut)[p] = 0.0;
1375 }
1376 continue;
1377 }
1378
1379 PetscReal a1 = weights[3 * p + 0];
1380 PetscReal a2 = weights[3 * p + 1];
1381 PetscReal a3 = weights[3 * p + 2];
1382
1384 fieldName,
1385 cornerPtr_read_with_ghosts,
1386 iCell_local, jCell_local, kCell_local,
1387 a1, a2, a3,
1388 swarmOut, p, bs);
1389 CHKERRQ(ierr);
1390 }
1391
1392 /* (H) Restore all retrieved arrays and vectors */
1393 ierr = DMDAVecRestoreArrayRead(fda, cornerLocal, &cornerPtr_read_with_ghosts); CHKERRQ(ierr);
1394 ierr = DMSwarmRestoreField(swarm, swarmOutFieldName, NULL, NULL, &swarmOut); CHKERRQ(ierr);
1395 ierr = DMSwarmRestoreField(swarm, "weight", NULL, NULL, (void**)&weights); CHKERRQ(ierr);
1396 ierr = DMSwarmRestoreField(swarm, "DMSwarm_pid", NULL, NULL, (void**)&pids); CHKERRQ(ierr);
1397 ierr = DMSwarmRestoreField(swarm, "DMSwarm_CellID", NULL, NULL, (void**)&cellIDs); CHKERRQ(ierr);
1398
1399 ierr = DMRestoreLocalVector(fda, &cornerLocal); CHKERRQ(ierr);
1400 ierr = DMRestoreGlobalVector(fda, &cornerGlobal); CHKERRQ(ierr);
1401
1402 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished for field '%s'.\n", fieldName);
1403 PetscFunctionReturn(0);
1404}
static PetscErrorCode InterpolateEulerFieldToSwarmForParticle(const char *fieldName, void *fieldPtr, PetscInt iCell, PetscInt jCell, PetscInt kCell, PetscReal a1, PetscReal a2, PetscReal a3, void *swarmOut, PetscInt p, PetscInt blockSize)
Interpolates a single field (scalar or vector) for one particle, storing the result in a swarm array.
#define InterpolateFieldFromCenterToCorner(blockSize, centfield_ptr, corner_ptr, user_ctx)
Macro to dispatch to the correct scalar or vector center-to-corner function based on a runtime block ...
Here is the call graph for this function:
Here is the caller graph for this function:

◆ InterpolateAllFieldsToSwarm()

PetscErrorCode InterpolateAllFieldsToSwarm ( UserCtx user)

Interpolates all relevant fields from the DMDA to the DMSwarm.

Currently, it Interpolatetes:

  • user->Ucat (vector field) into the DMSwarm field "swarmVelocity".

To add more fields, duplicate the call to InterpolateOneFieldOverSwarm and provide:

  • The global Vec for that field (e.g. user->Tcat for temperature),
  • A human-readable field name (for logging),
  • A DMSwarm output field name (e.g. "swarmTemperature").
Parameters
[in,out]userPointer to a UserCtx containing:
  • user->da (DM for the grid),
  • user->swarm (DMSwarm for particles),
  • user->Ucat (Vec for the vector field),
  • possibly more fields like user->Tcat, user->Pcat, etc.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Definition at line 1667 of file interpolation.c.

1668{
1669 PetscErrorCode ierr;
1670 PetscMPIInt rank;
1671 PetscFunctionBegin;
1672
1674
1675 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank); CHKERRQ(ierr);
1676
1677 /*
1678 1) Interpolate the 'velocity' field (user->Ucat) into DMSwarm's "swarmVelocity".
1679 - The function InterpolateOneFieldOverSwarm() loops over all local particles,
1680 retrieves iCell, jCell, kCell, (a1,a2,a3) from the DMSwarm,
1681 and calls the appropriate trilinear Interpolatetion routine.
1682
1683 - "velocity" => just a label used in logging or debugging.
1684 - "swarmVelocity" => the DMSwarm field name where we store the result
1685 (assume dof=3 if user->Ucat has blockSize=3).
1686 */
1687
1689 "InterpolateteAllFieldsToSwarm: Interpolation of ucat to velocity begins on rank %d.\n",rank);
1690 // Make sure to pass the *GLOBAL* Vector to the function below!
1691 ierr = InterpolateEulerFieldToSwarm(user, user->lUcat,
1692 "Ucat",
1693 "velocity"); CHKERRQ(ierr);
1694 /*
1695 2) (OPTIONAL) If you have more fields, you can Interpolatete them similarly.
1696
1697 For example, if user->Tcat is a scalar Vec for "temperature" and the DMSwarm
1698 has a field "Temperature" (with dof=1), you could do:
1699
1700 ierr = InterpolateOneFieldOverSwarm(user, user->Tcat,
1701 "temperature",
1702 "swarmTemperature"); CHKERRQ(ierr);
1703
1704 For pressure:
1705 ierr = InterpolateOneFieldOverSwarm(user, user->Pcat,
1706 "pressure",
1707 "swarmPressure"); CHKERRQ(ierr);
1708
1709 ...and so forth.
1710 */
1711
1712 /*
1713 3) Optionally, synchronize or log that all fields are done
1714 */
1715 ierr = MPI_Barrier(PETSC_COMM_WORLD); CHKERRQ(ierr);
1717 "[rank %d]Completed Interpolateting all fields to the swarm.\n",rank);
1718
1720
1721 PetscFunctionReturn(0);
1722}
PetscErrorCode InterpolateEulerFieldToSwarm(UserCtx *user, Vec fieldLocal_cellCentered, const char *fieldName, const char *swarmOutFieldName)
Interpolates a cell-centered field (scalar or vector) onto DMSwarm particles, using a robust,...
#define PROFILE_FUNCTION_END
Marks the end of a profiled code block.
Definition logging.h:767
#define PROFILE_FUNCTION_BEGIN
Marks the beginning of a profiled code block (typically a function).
Definition logging.h:758
Vec lUcat
Definition variables.h:657
Here is the call graph for this function:
Here is the caller graph for this function:

◆ InterpolateCornerToFaceCenter_Scalar()

PetscErrorCode InterpolateCornerToFaceCenter_Scalar ( PetscReal ***  corner_arr,
PetscReal ***  faceX_arr,
PetscReal ***  faceY_arr,
PetscReal ***  faceZ_arr,
UserCtx user 
)

Interpolates a scalar field from corner nodes to all face centers.

This routine computes the average of the four corner-node values defining each face of a hexahedral cell:

  • X-faces (perpendicular to X): face between (i-1,i) in X-dir
  • Y-faces (perpendicular to Y): face between (j-1,j) in Y-dir
  • Z-faces (perpendicular to Z): face between (k-1,k) in Z-dir
Parameters
[in]corner_arrGhosted node-centered array (global indexing) from user->fda.
[out]faceX_arrLocal array for X-faces sized [zm][ym][xm+1].
[out]faceY_arrLocal array for Y-faces sized [zm][ym+1][xm].
[out]faceZ_arrLocal array for Z-faces sized [zm+1][ym][xm].
[in]userUser context containing DMDA 'fda' and GetOwnedCellRange.
Returns
PetscErrorCode 0 on success, non-zero on failure.

Definition at line 2348 of file interpolation.c.

2354{
2355 PetscErrorCode ierr;
2356 DMDALocalInfo info;
2357 ierr = DMDAGetLocalInfo(user->fda, &info); CHKERRQ(ierr);
2358
2359 // Determine owned-cell ranges based on corner-node ownership
2360 PetscInt xs, xm, ys, ym, zs, zm;
2361 ierr = GetOwnedCellRange(&info, 0, &xs, &xm); CHKERRQ(ierr);
2362 ierr = GetOwnedCellRange(&info, 1, &ys, &ym); CHKERRQ(ierr);
2363 ierr = GetOwnedCellRange(&info, 2, &zs, &zm); CHKERRQ(ierr);
2364
2365 // Global exclusive end indices for cells
2366 PetscInt xe = xs + xm;
2367 PetscInt ye = ys + ym;
2368 PetscInt ze = zs + zm;
2369
2370 // --- X‐faces: loops k=zs..ze-1, j=ys..ye-1, i=xs..xe (xm+1 faces per row) ---
2371 for (PetscInt k = zs; k < ze; ++k) {
2372 PetscInt k_loc = k - zs;
2373 for (PetscInt j = ys; j < ye; ++j) {
2374 PetscInt j_loc = j - ys;
2375 for (PetscInt i = xs; i <= xe; ++i) {
2376 PetscInt i_loc = i - xs; // 0..xm
2377 // Average the four corners of the Y-Z face at X = i
2378 PetscReal sum = corner_arr[k ][j ][i]
2379 + corner_arr[k+1][j ][i]
2380 + corner_arr[k ][j+1][i]
2381 + corner_arr[k+1][j+1][i];
2382 faceX_arr[k_loc][j_loc][i_loc] = sum * 0.25;
2383 }
2384 }
2385 }
2386
2387 // --- Y‐faces: loops k=zs..ze-1, j=ys..ye (ym+1 faces), i=xs..xe-1 ---
2388 for (PetscInt k = zs; k < ze; ++k) {
2389 PetscInt k_loc = k - zs;
2390 for (PetscInt j = ys; j <= ye; ++j) {
2391 PetscInt j_loc = j - ys; // 0..ym
2392 for (PetscInt i = xs; i < xe; ++i) {
2393 PetscInt i_loc = i - xs;
2394 // Average the four corners of the X-Z face at Y = j
2395 PetscReal sum = corner_arr[k ][j][i ]
2396 + corner_arr[k+1][j][i ]
2397 + corner_arr[k ][j][i+1]
2398 + corner_arr[k+1][j][i+1];
2399 faceY_arr[k_loc][j_loc][i_loc] = sum * 0.25;
2400 }
2401 }
2402 }
2403
2404 // --- Z‐faces: loops k=zs..ze (zm+1), j=ys..ye-1, i=xs..xe-1 ---
2405 for (PetscInt k = zs; k <= ze; ++k) {
2406 PetscInt k_loc = k - zs;
2407 for (PetscInt j = ys; j < ye; ++j) {
2408 PetscInt j_loc = j - ys;
2409 for (PetscInt i = xs; i < xe; ++i) {
2410 PetscInt i_loc = i - xs;
2411 // Average the four corners of the X-Y face at Z = k
2412 PetscReal sum = corner_arr[k][j ][i ]
2413 + corner_arr[k][j ][i+1]
2414 + corner_arr[k][j+1][i ]
2415 + corner_arr[k][j+1][i+1];
2416 faceZ_arr[k_loc][j_loc][i_loc] = sum * 0.25;
2417 }
2418 }
2419 }
2420
2421 return 0;
2422}
Here is the call graph for this function:

◆ InterpolateCornerToFaceCenter_Vector()

PetscErrorCode InterpolateCornerToFaceCenter_Vector ( Cmpnts ***  corner_arr,
Cmpnts ***  faceX_arr,
Cmpnts ***  faceY_arr,
Cmpnts ***  faceZ_arr,
UserCtx user 
)

Interpolates a vector field from corner nodes to all face centers.

Identical to the scalar version, except it averages each component of the Cmpnts struct at the four corner-nodes per face.

Parameters
[in]corner_arrGhosted 3-component array (global node indices).
[out]faceX_arrLocal array of Cmpnts for X-faces sized [zm][ym][xm+1].
[out]faceY_arrLocal array of Cmpnts for Y-faces sized [zm][ym+1][xm].
[out]faceZ_arrLocal array of Cmpnts for Z-faces sized [zm+1][ym][xm].
[in]userUser context containing DMDA 'fda'.
Returns
PetscErrorCode 0 on success.

Definition at line 2438 of file interpolation.c.

2444{
2445 PetscErrorCode ierr;
2446 DMDALocalInfo info;
2447 PetscMPIInt rank;
2448
2449 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
2451 "Rank %d starting InterpolateFieldFromCornerToFaceCenter_Vector.\n", rank);
2452
2453 ierr = DMDAGetLocalInfo(user->fda, &info); CHKERRQ(ierr);
2454
2455 PetscInt xs, xm, ys, ym, zs, zm;
2456 ierr = GetOwnedCellRange(&info, 0, &xs, &xm); CHKERRQ(ierr);
2457 ierr = GetOwnedCellRange(&info, 1, &ys, &ym); CHKERRQ(ierr);
2458 ierr = GetOwnedCellRange(&info, 2, &zs, &zm); CHKERRQ(ierr);
2459
2460 PetscInt xe = xs + xm;
2461 PetscInt ye = ys + ym;
2462 PetscInt ze = zs + zm;
2463
2464 // X-faces
2465 for (PetscInt k = zs; k < ze; ++k) {
2466 PetscInt k_loc = k - zs;
2467 for (PetscInt j = ys; j < ye; ++j) {
2468 PetscInt j_loc = j - ys;
2469 for (PetscInt i = xs; i <= xe; ++i) {
2470 PetscInt i_loc = i - xs;
2471 Cmpnts sum = {0,0,0};
2472 sum.x = corner_arr[k ][j ][i].x + corner_arr[k+1][j ][i].x
2473 + corner_arr[k ][j+1][i].x + corner_arr[k+1][j+1][i].x;
2474 sum.y = corner_arr[k ][j ][i].y + corner_arr[k+1][j ][i].y
2475 + corner_arr[k ][j+1][i].y + corner_arr[k+1][j+1][i].y;
2476 sum.z = corner_arr[k ][j ][i].z + corner_arr[k+1][j ][i].z
2477 + corner_arr[k ][j+1][i].z + corner_arr[k+1][j+1][i].z;
2478 faceX_arr[k_loc][j_loc][i_loc].x = sum.x * 0.25;
2479 faceX_arr[k_loc][j_loc][i_loc].y = sum.y * 0.25;
2480 faceX_arr[k_loc][j_loc][i_loc].z = sum.z * 0.25;
2481 }
2482 }
2483 }
2484
2486 "Rank %d x-face Interpolation complete.\n", rank);
2487
2488 // Y-faces
2489 for (PetscInt k = zs; k < ze; ++k) {
2490 PetscInt k_loc = k - zs;
2491 for (PetscInt j = ys; j <= ye; ++j) {
2492 PetscInt j_loc = j - ys;
2493 for (PetscInt i = xs; i < xe; ++i) {
2494 PetscInt i_loc = i - xs;
2495 Cmpnts sum = {0,0,0};
2496 sum.x = corner_arr[k ][j][i ].x + corner_arr[k+1][j][i ].x
2497 + corner_arr[k ][j][i+1].x + corner_arr[k+1][j][i+1].x;
2498 sum.y = corner_arr[k ][j][i ].y + corner_arr[k+1][j][i ].y
2499 + corner_arr[k ][j][i+1].y + corner_arr[k+1][j][i+1].y;
2500 sum.z = corner_arr[k ][j][i ].z + corner_arr[k+1][j][i ].z
2501 + corner_arr[k ][j][i+1].z + corner_arr[k+1][j][i+1].z;
2502 faceY_arr[k_loc][j_loc][i_loc].x = sum.x * 0.25;
2503 faceY_arr[k_loc][j_loc][i_loc].y = sum.y * 0.25;
2504 faceY_arr[k_loc][j_loc][i_loc].z = sum.z * 0.25;
2505 }
2506 }
2507 }
2508
2510 "Rank %d y-face Interpolation complete.\n", rank);
2511
2512 // Z-faces
2513 for (PetscInt k = zs; k <= ze; ++k) {
2514 PetscInt k_loc = k - zs;
2515 for (PetscInt j = ys; j < ye; ++j) {
2516 PetscInt j_loc = j - ys;
2517 for (PetscInt i = xs; i < xe; ++i) {
2518 PetscInt i_loc = i - xs;
2519 Cmpnts sum = {0,0,0};
2520 sum.x = corner_arr[k][j ][i ].x + corner_arr[k][j ][i+1].x
2521 + corner_arr[k][j+1][i ].x + corner_arr[k][j+1][i+1].x;
2522 sum.y = corner_arr[k][j ][i ].y + corner_arr[k][j ][i+1].y
2523 + corner_arr[k][j+1][i ].y + corner_arr[k][j+1][i+1].y;
2524 sum.z = corner_arr[k][j ][i ].z + corner_arr[k][j ][i+1].z
2525 + corner_arr[k][j+1][i ].z + corner_arr[k][j+1][i+1].z;
2526 faceZ_arr[k_loc][j_loc][i_loc].x = sum.x * 0.25;
2527 faceZ_arr[k_loc][j_loc][i_loc].y = sum.y * 0.25;
2528 faceZ_arr[k_loc][j_loc][i_loc].z = sum.z * 0.25;
2529 }
2530 }
2531 }
2532
2534 "Rank %d z-face Interpolation complete.\n", rank);
2535
2536 return 0;
2537}
Here is the call graph for this function: