PICurv 0.1.0
A Parallel Particle-In-Cell Solver for Curvilinear LES
Loading...
Searching...
No Matches
Functions
io.h File Reference

Public interface for data input/output routines. More...

#include "variables.h"
#include "logging.h"
#include "Boundaries.h"
Include dependency graph for io.h:
This graph shows which files directly or indirectly include this file:

Go to the source code of this file.

Functions

PetscErrorCode ReadGridGenerationInputs (UserCtx *user)
 Parses command-line options for a programmatically generated grid for a SINGLE block.
 
PetscErrorCode PopulateFinestUserGridResolutionFromOptions (UserCtx *finest_users, PetscInt nblk)
 Parses grid resolution arrays (-im, -jm, -km) once and applies them to all finest-grid blocks.
 
PetscErrorCode ReadGridFile (UserCtx *user)
 Sets grid dimensions from a file for a SINGLE block using a one-time read cache.
 
PetscErrorCode VerifyPathExistence (const char *path, PetscBool is_dir, PetscBool is_optional, const char *description, PetscBool *exists)
 A parallel-safe helper to verify the existence of a generic file or directory path.
 
PetscErrorCode ReadSimulationFields (UserCtx *user, PetscInt ti)
 Reads binary field data for velocity, pressure, and other required vectors.
 
PetscErrorCode ReadFieldData (UserCtx *user, const char *field_name, Vec field_vec, PetscInt ti, const char *ext)
 Reads data for a specific field from a file into the provided vector.
 
PetscErrorCode ReadStatisticalFields (UserCtx *user, PetscInt ti)
 Reads statistical fields used for time-averaged simulations.
 
PetscErrorCode ReadLESFields (UserCtx *user, PetscInt ti)
 Reads LES-related fields used in turbulence modeling.
 
PetscErrorCode ReadRANSFields (UserCtx *user, PetscInt ti)
 Reads RANS-related fields for turbulence modeling.
 
PetscErrorCode WriteFieldData (UserCtx *user, const char *field_name, Vec field_vec, PetscInt ti, const char *ext)
 Writes data from a specific PETSc vector to a file.
 
PetscErrorCode WriteSimulationFields (UserCtx *user)
 Writes simulation fields to files.
 
PetscErrorCode WriteStatisticalFields (UserCtx *user)
 Writes statistical fields for averaging purposes.
 
PetscErrorCode WriteLESFields (UserCtx *user)
 Writes LES-related fields.
 
PetscErrorCode WriteRANSFields (UserCtx *user)
 Writes RANS-related fields.
 
PetscErrorCode WriteSwarmField (UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
 Writes data from a specific field in a PETSc Swarm to a file.
 
PetscErrorCode WriteSwarmIntField (UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
 Writes integer data from a specific PETSc Swarm field to a file.
 
PetscErrorCode WriteAllSwarmFields (UserCtx *user)
 Writes a predefined set of PETSc Swarm fields to files.
 
PetscInt ReadDataFileToArray (const char *filename, double **data_out, PetscInt *Nout, MPI_Comm comm)
 
PetscInt CreateVTKFileFromMetadata (const char *filename, const VTKMetaData *meta, MPI_Comm comm)
 
PetscErrorCode VecToArrayOnRank0 (Vec inVec, PetscInt *N, double **arrayOut)
 Gathers the contents of a distributed PETSc Vec into a single array on rank 0.
 
PetscErrorCode SwarmFieldToArrayOnRank0 (DM swarm, const char *field_name, PetscInt *n_total_particles, PetscInt *n_components, void **gathered_array)
 Gathers any DMSwarm field from all ranks to a single, contiguous array on rank 0.
 
PetscErrorCode ReadSwarmField (UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
 Reads data from a file into a specified field of a PETSc DMSwarm.
 
PetscErrorCode ReadSwarmIntField (UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
 Reads integer swarm data by using ReadFieldData and casting the result.
 
PetscErrorCode ReadAllSwarmFields (UserCtx *user, PetscInt ti)
 Reads multiple fields (positions, velocity, CellID, and weight) into a DMSwarm.
 
PetscErrorCode ReadPositionsFromFile (PetscInt timeIndex, UserCtx *user, double **coordsArray, PetscInt *Ncoords)
 Reads coordinate data (for particles) from file into a PETSc Vec, then gathers it to rank 0.
 
PetscErrorCode ReadFieldDataToRank0 (PetscInt timeIndex, const char *fieldName, UserCtx *user, double **scalarArray, PetscInt *Nscalars)
 Reads a named field from file into a PETSc Vec, then gathers it to rank 0.
 
PetscErrorCode DisplayBanner (SimCtx *simCtx)
 Displays a structured banner summarizing the simulation configuration.
 
PetscErrorCode StringToBCFace (const char *str, BCFace *face_out)
 Converts a string representation of a face to a BCFace enum.
 
PetscErrorCode StringToBCType (const char *str, BCType *type_out)
 Converts a string representation of a BC type to a BCType enum.
 
PetscErrorCode StringToBCHandlerType (const char *str, BCHandlerType *handler_out)
 Converts a string representation of a handler to a BCHandlerType enum.
 
PetscErrorCode ValidateBCHandlerForBCType (BCType type, BCHandlerType handler)
 Validates that a specific handler is compatible with a general BC type.
 
void FreeBC_ParamList (BC_Param *head)
 Frees the memory allocated for a linked list of BC_Param structs.
 
PetscErrorCode GetBCParamReal (BC_Param *params, const char *key, PetscReal *value_out, PetscBool *found)
 Searches a BC_Param linked list for a key and returns its value as a double.
 
PetscErrorCode GetBCParamBool (BC_Param *params, const char *key, PetscBool *value_out, PetscBool *found)
 Searches a BC_Param linked list for a key and returns its value as a bool.
 
PetscErrorCode ParseAllBoundaryConditions (UserCtx *user, const char *bcs_input_filename)
 Parses the boundary conditions file to configure the type, handler, and any associated parameters for all 6 global faces of the domain.
 
PetscErrorCode DeterminePeriodicity (SimCtx *simCtx)
 Scans all block-specific boundary condition files to determine a globally consistent periodicity for each dimension, reusing the core type parser.
 
void TrimWhitespace (char *str)
 Helper function to trim leading/trailing whitespace from a string.
 
PetscErrorCode ParsePostProcessingSettings (SimCtx *simCtx)
 Initializes post-processing settings from a config file and command-line overrides.
 
PetscErrorCode ParseScalingInformation (SimCtx *simCtx)
 Parses physical scaling parameters from command-line options.
 

Detailed Description

Public interface for data input/output routines.

This header declares functions responsible for parsing grid geometry information, either from command-line options for programmatically generated grids or by reading the header of a grid definition file.

Definition in file io.h.

Function Documentation

◆ ReadGridGenerationInputs()

PetscErrorCode ReadGridGenerationInputs ( UserCtx user)

Parses command-line options for a programmatically generated grid for a SINGLE block.

This function reads all per-block array options related to grid geometry, such as dimensions (-im), domain bounds (-xMins), and stretching ratios (-rxs). It then populates the fields of the provided UserCtx struct using its internal block index user->_this.

Parameters
userPointer to the UserCtx for a specific block. The function will populate the geometric fields (IM, Min_X, rx, etc.) within this struct.
Returns
PetscErrorCode 0 on success, or a PETSc error code on failure.

This function is responsible for reading all per-block array options related to grid geometry, such as dimensions (-im), domain bounds (-xMins, -xMaxs), and stretching ratios (-rxs). It reads the entire array for each option, then uses the block index stored in user->_this to select the correct value and populate the fields of the provided UserCtx struct.

Parameters
userPointer to the UserCtx for a specific block. The function will populate the geometric fields (IM, Min_X, rx, etc.) within this struct.
Returns
PetscErrorCode 0 on success, or a PETSc error code on failure.

Definition at line 77 of file io.c.

78{
79 PetscErrorCode ierr;
80 SimCtx *simCtx = user->simCtx;
81 PetscInt nblk = simCtx->block_number;
82 PetscInt block_index = user->_this;
83 PetscBool found;
84
85 // Temporary arrays to hold the parsed values for ALL blocks
86 PetscInt *IMs = NULL, *JMs = NULL, *KMs = NULL, *cgrids = NULL;
87 PetscReal *xMins = NULL, *xMaxs = NULL, *rxs = NULL;
88 PetscReal *yMins = NULL, *yMaxs = NULL, *rys = NULL;
89 PetscReal *zMins = NULL, *zMaxs = NULL, *rzs = NULL;
90
91 PetscFunctionBeginUser;
93
94 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Reading generated grid inputs for block %d.\n", simCtx->rank, block_index);
95
96 if (block_index >= nblk) {
97 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Block index %d is out of range for nblk=%d", block_index, nblk);
98 }
99
100 // --- Allocate temporary storage for all array options ---
101 ierr = PetscMalloc4(nblk, &IMs, nblk, &JMs, nblk, &KMs, nblk, &cgrids); CHKERRQ(ierr);
102 ierr = PetscMalloc6(nblk, &xMins, nblk, &xMaxs, nblk, &rxs, nblk, &yMins, nblk, &yMaxs, nblk, &rys); CHKERRQ(ierr);
103 ierr = PetscMalloc3(nblk, &zMins, nblk, &zMaxs, nblk, &rzs); CHKERRQ(ierr);
104
105 // --- Set default values for the temporary arrays ---
106 for (PetscInt i = 0; i < nblk; ++i) {
107 IMs[i] = 10; JMs[i] = 10; KMs[i] = 10; cgrids[i] = 0;
108 xMins[i] = 0.0; xMaxs[i] = 1.0; rxs[i] = 1.0;
109 yMins[i] = 0.0; yMaxs[i] = 1.0; rys[i] = 1.0;
110 zMins[i] = 0.0; zMaxs[i] = 1.0; rzs[i] = 1.0;
111 }
112
113 // --- Parse the array options from the command line / control file ---
114 PetscInt count;
115 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-im", IMs, &count, &found); CHKERRQ(ierr);
116 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-jm", JMs, &count, &found); CHKERRQ(ierr);
117 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-km", KMs, &count, &found); CHKERRQ(ierr);
118 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-xMins", xMins, &count, &found); CHKERRQ(ierr);
119 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-xMaxs", xMaxs, &count, &found); CHKERRQ(ierr);
120 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-rxs", rxs, &count, &found); CHKERRQ(ierr);
121 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-yMins", yMins, &count, &found); CHKERRQ(ierr);
122 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-yMaxs", yMaxs, &count, &found); CHKERRQ(ierr);
123 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-rys", rys, &count, &found); CHKERRQ(ierr);
124 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-zMins", zMins, &count, &found); CHKERRQ(ierr);
125 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-zMaxs", zMaxs, &count, &found); CHKERRQ(ierr);
126 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-rzs", rzs, &count, &found); CHKERRQ(ierr);
127 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-cgrids", cgrids, &count, &found); CHKERRQ(ierr);
128
129 // --- Assign the parsed values to the specific UserCtx struct passed in ---
130 user->IM = IMs[block_index];
131 user->JM = JMs[block_index];
132 user->KM = KMs[block_index];
133 user->Min_X = xMins[block_index];
134 user->Max_X = xMaxs[block_index];
135 user->rx = rxs[block_index];
136 user->Min_Y = yMins[block_index];
137 user->Max_Y = yMaxs[block_index];
138 user->ry = rys[block_index];
139 user->Min_Z = zMins[block_index];
140 user->Max_Z = zMaxs[block_index];
141 user->rz = rzs[block_index];
142 user->cgrid = cgrids[block_index];
143
144 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Block %d grid generation inputs set: IM=%d, JM=%d, KM=%d\n",
145 simCtx->rank, block_index, user->IM, user->JM, user->KM);
146 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Block %d bounds: X=[%.2f, %.2f], Y=[%.2f, %.2f], Z=[%.2f, %.2f]\n",
147 simCtx->rank, block_index, user->Min_X, user->Max_X, user->Min_Y, user->Max_Y, user->Min_Z, user->Max_Z);
148
149 // --- Clean up temporary storage ---
150 ierr = PetscFree4(IMs, JMs, KMs, cgrids); CHKERRQ(ierr);
151 ierr = PetscFree6(xMins, xMaxs, rxs, yMins, yMaxs, rys); CHKERRQ(ierr);
152 ierr = PetscFree3(zMins, zMaxs, rzs); CHKERRQ(ierr);
153
155 PetscFunctionReturn(0);
156}
#define LOCAL
Logging scope definitions for controlling message output.
Definition logging.h:45
#define LOG_ALLOW(scope, level, fmt,...)
Logging macro that checks both the log level and whether the calling function is in the allowed-funct...
Definition logging.h:200
#define PROFILE_FUNCTION_END
Marks the end of a profiled code block.
Definition logging.h:746
@ LOG_DEBUG
Detailed debugging information.
Definition logging.h:32
#define PROFILE_FUNCTION_BEGIN
Marks the beginning of a profiled code block (typically a function).
Definition logging.h:737
PetscMPIInt rank
Definition variables.h:592
PetscInt cgrid
Definition variables.h:748
PetscInt block_number
Definition variables.h:654
SimCtx * simCtx
Back-pointer to the master simulation context.
Definition variables.h:736
PetscReal Min_X
Definition variables.h:743
PetscInt KM
Definition variables.h:742
PetscInt _this
Definition variables.h:746
PetscReal ry
Definition variables.h:747
PetscReal Max_Y
Definition variables.h:743
PetscReal rz
Definition variables.h:747
PetscInt JM
Definition variables.h:742
PetscReal Min_Z
Definition variables.h:743
PetscReal Max_X
Definition variables.h:743
PetscReal Min_Y
Definition variables.h:743
PetscInt IM
Definition variables.h:742
PetscReal rx
Definition variables.h:747
PetscReal Max_Z
Definition variables.h:743
The master context for the entire simulation.
Definition variables.h:589
Here is the caller graph for this function:

◆ PopulateFinestUserGridResolutionFromOptions()

PetscErrorCode PopulateFinestUserGridResolutionFromOptions ( UserCtx finest_users,
PetscInt  nblk 
)

Parses grid resolution arrays (-im, -jm, -km) once and applies them to all finest-grid blocks.

This helper centralizes one-time resolution ingestion for analytical grid setup. It fills IM/JM/KM in each element of the finest-level UserCtx array.

Parameters
finest_usersPointer to the finest-level UserCtx array (length nblk).
nblkNumber of blocks in the finest-level array.
Returns
PetscErrorCode 0 on success, or a PETSc error code on failure.

Definition at line 161 of file io.c.

162{
163 PetscErrorCode ierr;
164 PetscBool found;
165 PetscInt *IMs = NULL, *JMs = NULL, *KMs = NULL;
166 SimCtx *simCtx = NULL;
167
168 PetscFunctionBeginUser;
169
170 if (!finest_users) {
171 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "finest_users cannot be NULL.");
172 }
173 if (nblk <= 0) {
174 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "nblk must be positive. Got %d.", nblk);
175 }
176 simCtx = finest_users[0].simCtx;
177
178 ierr = PetscMalloc3(nblk, &IMs, nblk, &JMs, nblk, &KMs); CHKERRQ(ierr);
179 for (PetscInt i = 0; i < nblk; ++i) {
180 IMs[i] = 10; JMs[i] = 10; KMs[i] = 10;
181 }
182
183 PetscInt count;
184 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-im", IMs, &count, &found); CHKERRQ(ierr);
185 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-jm", JMs, &count, &found); CHKERRQ(ierr);
186 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-km", KMs, &count, &found); CHKERRQ(ierr);
187
188 for (PetscInt bi = 0; bi < nblk; ++bi) {
189 finest_users[bi].IM = IMs[bi];
190 finest_users[bi].JM = JMs[bi];
191 finest_users[bi].KM = KMs[bi];
192 if (simCtx) {
194 "Rank %d: Preloaded analytical grid resolution for block %d: IM=%d, JM=%d, KM=%d\n",
195 simCtx->rank, bi, finest_users[bi].IM, finest_users[bi].JM, finest_users[bi].KM);
196 }
197 }
198
199 ierr = PetscFree3(IMs, JMs, KMs); CHKERRQ(ierr);
200 PetscFunctionReturn(0);
201}
Here is the caller graph for this function:

◆ ReadGridFile()

PetscErrorCode ReadGridFile ( UserCtx user)

Sets grid dimensions from a file for a SINGLE block using a one-time read cache.

This function uses a static-variable pattern to ensure the grid file header is read only once, collectively, by all processes on the first call. Subsequent calls simply retrieve the pre-loaded and broadcasted data for the specified block.

Parameters
userPointer to the UserCtx for a specific block. This function will populate the IM, JM, and KM fields.
Returns
PetscErrorCode 0 on success, or a PETSc error code on failure.

This function uses a static-variable pattern to ensure the grid file header is read only once, collectively, by all processes. The first time any process calls this function, it triggers a collective operation where rank 0 reads the file and broadcasts the dimensions for all blocks. This data is stored in static "cached" arrays.

On every call (including the first), the function retrieves the dimensions for the specific block (identified by user->_this) from the cached arrays and populates the IM, JM, and KM fields of the provided UserCtx.

Parameters
userPointer to the UserCtx for a specific block.
Returns
PetscErrorCode 0 on success, or a PETSc error code on failure.

Definition at line 222 of file io.c.

223{
224 PetscErrorCode ierr;
225 SimCtx *simCtx = user->simCtx;
226 PetscInt block_index = user->_this;
227
228 PetscFunctionBeginUser;
230
231 // --- One-Time Read and Broadcast Logic ---
233 LOG_ALLOW_SYNC(GLOBAL, LOG_INFO, "First call to ReadGridFile. Reading and broadcasting grid file header from '%s'...\n", simCtx->grid_file);
234 PetscMPIInt rank = simCtx->rank;
235 PetscInt nblk = simCtx->block_number;
236
237 if (rank == 0) {
238 FILE *fd = fopen(simCtx->grid_file, "r");
239 if (!fd) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_OPEN, "Cannot open file: %s", simCtx->grid_file);
240
241 fscanf(fd, "%d\n", &g_nblk_from_file);
242 // ---- Read first token; decide whether it is the header or nblk ----
243 char firstTok[32] = {0};
244 if (fscanf(fd, "%31s", firstTok) != 1)
245 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_READ, "Empty grid file: %s", simCtx->grid_file);
246
247 if (strcmp(firstTok, "PICGRID") == 0) {
248 // Header is present – read nblk from the next line
249 if (fscanf(fd, "%d", &g_nblk_from_file) != 1)
250 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_READ, "Expected number of blocks after \"PICGRID\" in %s", simCtx->grid_file);
251 } else {
252 // No header – the token we just read is actually nblk
253 g_nblk_from_file = (PetscInt)strtol(firstTok, NULL, 10);
254 }
255 if (g_nblk_from_file != nblk) {
256 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Mismatch: -nblk is %d but grid file specifies %d blocks.", nblk, g_nblk_from_file);
257 }
258
259 ierr = PetscMalloc3(nblk, &g_IMs_from_file, nblk, &g_JMs_from_file, nblk, &g_KMs_from_file); CHKERRQ(ierr);
260 for (PetscInt i = 0; i < nblk; ++i) {
261 if (fscanf(fd, "%d %d %d\n", &g_IMs_from_file[i], &g_JMs_from_file[i], &g_KMs_from_file[i]) != 3) {
262 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_READ, "Expected 3 integers for block %d in %s", i, simCtx->grid_file);
263 }
264 }
265 fclose(fd);
266 }
267
268 // Broadcast nblk to verify (optional, good practice)
269 ierr = MPI_Bcast(&g_nblk_from_file, 1, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
270
271 // Allocate on other ranks before receiving the broadcast
272 if (rank != 0) {
273 ierr = PetscMalloc3(nblk, &g_IMs_from_file, nblk, &g_JMs_from_file, nblk, &g_KMs_from_file); CHKERRQ(ierr);
274 }
275
276 // Broadcast the data arrays
277 ierr = MPI_Bcast(g_IMs_from_file, nblk, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
278 ierr = MPI_Bcast(g_JMs_from_file, nblk, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
279 ierr = MPI_Bcast(g_KMs_from_file, nblk, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
280
281 g_file_has_been_read = PETSC_TRUE;
282 LOG_ALLOW(GLOBAL, LOG_INFO, "Grid file header read and broadcast complete.\n");
283 }
284
285 // --- Per-Block Assignment Logic (runs on every call) ---
286 user->IM = g_IMs_from_file[block_index];
287 user->JM = g_JMs_from_file[block_index];
288 user->KM = g_KMs_from_file[block_index];
289
290 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Set file inputs for Block %d: IM=%d, JM=%d, KM=%d\n",
291 simCtx->rank, block_index, user->IM, user->JM, user->KM);
292
294 PetscFunctionReturn(0);
295}
static PetscInt * g_IMs_from_file
Caches the IM dimensions for all blocks read from the grid file.
Definition io.c:19
static PetscInt g_nblk_from_file
Stores the number of blocks read from the grid file.
Definition io.c:17
static PetscBool g_file_has_been_read
A flag to ensure the grid file is read only once.
Definition io.c:25
static PetscInt * g_KMs_from_file
Caches the KM dimensions for all blocks read from the grid file.
Definition io.c:23
static PetscInt * g_JMs_from_file
Caches the JM dimensions for all blocks read from the grid file.
Definition io.c:21
#define LOG_ALLOW_SYNC(scope, level, fmt,...)
----— DEBUG ---------------------------------------— #define LOG_ALLOW(scope, level,...
Definition logging.h:267
#define GLOBAL
Scope for global logging across all processes.
Definition logging.h:46
@ LOG_INFO
Informational messages about program execution.
Definition logging.h:31
char grid_file[PETSC_MAX_PATH_LEN]
Definition variables.h:659
Here is the caller graph for this function:

◆ VerifyPathExistence()

PetscErrorCode VerifyPathExistence ( const char *  path,
PetscBool  is_dir,
PetscBool  is_optional,
const char *  description,
PetscBool *  exists 
)

A parallel-safe helper to verify the existence of a generic file or directory path.

This function centralizes the logic for checking arbitrary paths. Only Rank 0 performs the filesystem check, and the result is broadcast to all other processes. This ensures collective and synchronized decision-making across all ranks. It is intended for configuration files, source directories, etc., where the path is known completely.

Parameters
[in]pathThe full path to the file or directory to check.
[in]is_dirPETSC_TRUE if checking for a directory, PETSC_FALSE for a file.
[in]is_optionalPETSC_TRUE if the path is optional (results in a warning), PETSC_FALSE if mandatory (results in an error).
[in]descriptionA user-friendly description of the path for logging (e.g., "Grid file").
[out]existsThe result of the check (identical on all ranks).
Returns
PetscErrorCode

Definition at line 807 of file io.c.

808{
809 PetscErrorCode ierr;
810 PetscMPIInt rank;
811 MPI_Comm comm = PETSC_COMM_WORLD;
812
813 PetscFunctionBeginUser;
814 ierr = MPI_Comm_rank(comm, &rank); CHKERRQ(ierr);
815
816 if (rank == 0) {
817 if (is_dir) {
818 ierr = PetscTestDirectory(path, 'r', exists); CHKERRQ(ierr);
819 } else {
820 ierr = PetscTestFile(path, 'r', exists); CHKERRQ(ierr);
821 }
822
823 if (!(*exists)) {
824 if (is_optional) {
825 LOG_ALLOW(GLOBAL, LOG_WARNING, "Optional %s not found at: %s (using defaults/ignoring).\n", description, path);
826 } else {
827 LOG_ALLOW(GLOBAL, LOG_ERROR, "Mandatory %s not found at: %s\n", description, path);
828 }
829 } else {
830 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Found %s: %s\n", description, path);
831 }
832 }
833
834 // Broadcast the result from Rank 0
835 PetscMPIInt exists_int = (rank == 0) ? (PetscMPIInt)(*exists) : 0;
836 ierr = MPI_Bcast(&exists_int, 1, MPI_INT, 0, comm); CHKERRMPI(ierr);
837 *exists = (PetscBool)exists_int;
838
839 // Collective error for mandatory files
840 if (!(*exists) && !is_optional) {
841 SETERRQ(comm, PETSC_ERR_FILE_OPEN, "Mandatory %s not found. Rank 0 expected it at '%s'. Check path and permissions.", description, path);
842 }
843
844 PetscFunctionReturn(0);
845}
@ LOG_ERROR
Critical errors that may halt the program.
Definition logging.h:28
@ LOG_WARNING
Non-critical issues that warrant attention.
Definition logging.h:29
Here is the caller graph for this function:

◆ ReadSimulationFields()

PetscErrorCode ReadSimulationFields ( UserCtx user,
PetscInt  ti 
)

Reads binary field data for velocity, pressure, and other required vectors.

Reads contravariant velocity (Ucont) from vfield, Cartesian velocity (Ucat) from ufield, pressure (P), node state (Nvert_o), and optionally statistical quantities, LES, and RANS fields from binary files. Logs missing files but continues execution.

Parameters
[in,out]userPointer to the UserCtx structure containing the simulation context.
[in]Thetimestep at which the simulation field data needs to be read.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Reads binary field data for velocity, pressure, and other required vectors.

This function reads contravariant velocity, Cartesian velocity, pressure, and node state fields from their respective binary files. It also conditionally reads LES, RANS, and statistical fields if they are enabled.

Parameters
[in,out]userPointer to the UserCtx structure containing simulation context.
[in]tiTime index for constructing the file name.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Definition at line 1276 of file io.c.

1277{
1278 PetscErrorCode ierr;
1279
1280 SimCtx *simCtx = user->simCtx;
1281 const char *source_path = NULL;
1282 const char *eulerian_ext = "dat";
1283
1284 if(simCtx->exec_mode == EXEC_MODE_POSTPROCESSOR){
1285 source_path = simCtx->pps->source_dir;
1286 if (simCtx->pps->eulerianExt[0] != '\0') {
1287 eulerian_ext = simCtx->pps->eulerianExt;
1288 }
1289 } else if(simCtx->exec_mode == EXEC_MODE_SOLVER){
1290 source_path = simCtx->restart_dir;
1291 } else{
1292 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Invalid execution mode for reading simulation fields.");
1293 }
1294
1295 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to read simulation fields.\n");
1296
1297 // Set the current I/O directory context
1298 ierr = PetscSNPrintf(simCtx->_io_context_buffer, sizeof(simCtx->_io_context_buffer), "%s/%s",source_path,simCtx->euler_subdir); CHKERRQ(ierr);
1299 simCtx->current_io_directory = simCtx->_io_context_buffer;
1300
1301 // Read Cartesian velocity field
1302 ierr = ReadFieldData(user, "ufield", user->Ucat, ti, eulerian_ext); CHKERRQ(ierr);
1303
1304 // Read contravariant velocity field
1305 ierr = ReadFieldData(user, "vfield", user->Ucont, ti, eulerian_ext); CHKERRQ(ierr);
1306
1307 // Read pressure field
1308 ierr = ReadFieldData(user, "pfield", user->P, ti, eulerian_ext); CHKERRQ(ierr);
1309
1310 // Read node state field (nvert)
1311 ierr = ReadFieldData(user, "nvfield", user->Nvert, ti, eulerian_ext); CHKERRQ(ierr);
1312
1313 LOG_ALLOW(GLOBAL,LOG_INFO,"Successfully read all mandatory fields. \n");
1314
1315 if(simCtx->np>0){
1316 // Read Particle Count field
1317 if(!user->ParticleCount){
1318 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "ParticleCount Vec is NULL but np>0");
1319 }
1320 ierr = ReadOptionalField(user, "ParticleCount", "Particle Count", user->ParticleCount, ti, eulerian_ext); CHKERRQ(ierr);
1321
1322 ierr = ReadOptionalField(user, "psifield", "Scalar Psi Field", user->Psi, ti, eulerian_ext); CHKERRQ(ierr);
1323 }
1324 else{
1325 LOG_ALLOW(GLOBAL, LOG_INFO, "No particles in simulation, skipping Particle fields reading.\n");
1326 }
1327 // Process LES fields if enabled
1328 if (simCtx->les) {
1329 ierr = ReadLESFields(user,ti); CHKERRQ(ierr);
1330 }
1331
1332 // Process RANS fields if enabled
1333 if (simCtx->rans) {
1334 ierr = ReadRANSFields(user,ti); CHKERRQ(ierr);
1335 }
1336
1337 // Process statistical fields if averaging is enabled
1338 if (simCtx->averaging) {
1339 ierr = ReadStatisticalFields(user,ti); CHKERRQ(ierr);
1340 }
1341
1342 simCtx->current_io_directory = NULL; // Clear the I/O context after reading
1343
1344 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading simulation fields.\n");
1345
1346 return 0;
1347}
PetscErrorCode ReadStatisticalFields(UserCtx *user, PetscInt ti)
Reads statistical fields for averaging purposes.
Definition io.c:1360
PetscErrorCode ReadRANSFields(UserCtx *user, PetscInt ti)
Reads RANS-related fields.
Definition io.c:1426
PetscErrorCode ReadLESFields(UserCtx *user, PetscInt ti)
Reads LES-related fields.
Definition io.c:1391
PetscErrorCode ReadFieldData(UserCtx *user, const char *field_name, Vec field_vec, PetscInt ti, const char *ext)
Reads data for a specific field from a file into the provided vector.
Definition io.c:1040
static PetscErrorCode ReadOptionalField(UserCtx *user, const char *field_name, const char *field_label, Vec field_vec, PetscInt ti, const char *ext)
Checks for and reads an optional Eulerian field from a file into a Vec.
Definition io.c:920
char euler_subdir[PETSC_MAX_PATH_LEN]
Definition variables.h:611
PetscInt rans
Definition variables.h:674
PetscInt np
Definition variables.h:681
PetscBool averaging
Definition variables.h:678
Vec Ucont
Definition variables.h:760
char * current_io_directory
Definition variables.h:615
char eulerianExt[8]
Definition variables.h:524
char source_dir[PETSC_MAX_PATH_LEN]
Definition variables.h:501
Vec Ucat
Definition variables.h:760
Vec ParticleCount
Definition variables.h:805
PostProcessParams * pps
Definition variables.h:720
@ EXEC_MODE_SOLVER
Definition variables.h:562
@ EXEC_MODE_POSTPROCESSOR
Definition variables.h:563
char _io_context_buffer[PETSC_MAX_PATH_LEN]
Definition variables.h:614
PetscInt les
Definition variables.h:674
Vec Nvert
Definition variables.h:760
ExecutionMode exec_mode
Definition variables.h:607
char restart_dir[PETSC_MAX_PATH_LEN]
Definition variables.h:609
Vec Psi
Definition variables.h:806
Here is the call graph for this function:
Here is the caller graph for this function:

◆ ReadFieldData()

PetscErrorCode ReadFieldData ( UserCtx user,
const char *  field_name,
Vec  field_vec,
PetscInt  ti,
const char *  ext 
)

Reads data for a specific field from a file into the provided vector.

This function uses the field name to construct the file path and reads the data from the corresponding file into the provided PETSc vector.

Parameters
[in]userPointer to the UserCtx structure containing simulation context.
[in]field_nameName of the field (e.g., "ufield", "vfield", "pfield").
[out]field_vecPETSc vector to store the field data.
[in]tiTime index for constructing the file name.
[in]extFile extension (e.g., "dat").
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Definition at line 1040 of file io.c.

1045{
1046 PetscErrorCode ierr;
1047 char filename[PETSC_MAX_PATH_LEN];
1048 MPI_Comm comm;
1049 PetscMPIInt rank,size;
1050 SimCtx *simCtx = user->simCtx;
1051
1052
1053 PetscFunctionBeginUser;
1055
1056 if(!simCtx->current_io_directory){
1057 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "I/O context directory was not set before calling ReadFieldData().");
1058 }
1059
1060
1061 ierr = PetscObjectGetComm((PetscObject)field_vec,&comm);CHKERRQ(ierr);
1062 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1063 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1064
1065 const char *source_path = NULL;
1066 source_path = simCtx->current_io_directory;
1067
1068 if(!source_path){
1069 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "source_path was not set for the current execution mode.");
1070 }
1071 /* ---------------------------------------------------------------------
1072 * Compose <path-to-file>/<field_name><step with 5 digits>_0.<ext>
1073 * (all restart files are written by rank-0 with that naming scheme).
1074 * ------------------------------------------------------------------ */
1075 ierr = PetscSNPrintf(filename,sizeof(filename),
1076 "%s/%s%05" PetscInt_FMT "_0.%s",
1077 source_path,field_name,ti,ext);CHKERRQ(ierr);
1078
1080 "Attempting to read <%s> on rank %d/%d\n",
1081 filename,(int)rank,(int)size);
1082
1083 /* ======================================================================
1084 * 1. SERIAL JOB – just hand the Vec to VecLoad()
1085 * ==================================================================== */
1086 if(size==1)
1087 {
1088 PetscViewer viewer;
1089 PetscBool found;
1090 Vec temp_vec;
1091 PetscInt expectedSize,loadedSize;
1092
1093 ierr = PetscTestFile(filename,'r',&found);CHKERRQ(ierr);
1094 if(!found) SETERRQ(comm,PETSC_ERR_FILE_OPEN,
1095 "Restart/Source file not found: %s",filename);
1096
1097 ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,filename,FILE_MODE_READ,&viewer);CHKERRQ(ierr);
1098// ---- START MODIFICATION ----
1099 // DO NOT load directly into field_vec, as this can resize it, which is
1100 // illegal for DMSwarm "view" vectors. Instead, load into a temporary vector.
1101 ierr = VecCreate(PETSC_COMM_SELF, &temp_vec); CHKERRQ(ierr);
1102 ierr = VecLoad(temp_vec,viewer);CHKERRQ(ierr);
1103 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
1104
1105 // Sanity check: ensure the file size matches the expected vector size.
1106 ierr = VecGetSize(field_vec, &expectedSize);CHKERRQ(ierr);
1107 ierr = VecGetSize(temp_vec, &loadedSize);CHKERRQ(ierr);
1108 if (loadedSize != expectedSize) {
1109 SETERRQ(comm,PETSC_ERR_FILE_UNEXPECTED,
1110 "File %s holds %d entries – expected %d for field '%s'",
1111 filename, loadedSize, expectedSize, field_name);
1112 }
1113
1114 // Now, safely copy the data from the temporary vector to the final destination.
1115 ierr = VecCopy(temp_vec, field_vec);CHKERRQ(ierr);
1116
1117 // Clean up the temporary vector.
1118 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
1119
1120 // ---- END MODIFICATION ----
1121
1122 /* create EMPTY sequential Vec – VecLoad() will size it correctly */
1123 /*
1124 ierr = VecCreate(PETSC_COMM_SELF,&seq_vec);CHKERRQ(ierr);
1125 ierr = VecSetType(seq_vec,VECSEQ);CHKERRQ(ierr);
1126
1127 ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,filename,
1128 FILE_MODE_READ,&viewer);CHKERRQ(ierr);
1129
1130 ierr = VecLoad(field_vec,viewer);CHKERRQ(ierr);
1131 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
1132 */
1134 "Loaded <%s> (serial path)\n",filename);
1135
1137 PetscFunctionReturn(0);
1138 }
1139
1140 /* ======================================================================
1141 * 2. PARALLEL JOB
1142 * ==================================================================== */
1143 PetscInt globalSize;
1144 ierr = VecGetSize(field_vec,&globalSize);CHKERRQ(ierr);
1145
1146 DM dm = NULL;
1147 const char *dmtype = NULL;
1148 Vec nat = NULL; /* Natural-ordered vector for DMDA */
1149
1150 /* -------------------- rank-0 : read the sequential file -------------- */
1151 Vec seq_vec = NULL; /* only valid on rank-0 */
1152 const PetscScalar *seqArray = NULL; /* borrowed pointer on rank-0 only */
1153
1154 if(rank==0)
1155 {
1156 PetscViewer viewer;
1157 PetscBool found;
1158
1159 ierr = PetscTestFile(filename,'r',&found);CHKERRQ(ierr);
1160 if(!found) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_OPEN,
1161 "Restart file not found: %s",filename);
1162
1163 /* create EMPTY sequential Vec – VecLoad() will size it correctly */
1164 ierr = VecCreate(PETSC_COMM_SELF,&seq_vec);CHKERRQ(ierr);
1165 ierr = VecSetType(seq_vec,VECSEQ);CHKERRQ(ierr);
1166
1167 ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,filename,
1168 FILE_MODE_READ,&viewer);CHKERRQ(ierr);
1169 ierr = VecLoad(seq_vec,viewer);CHKERRQ(ierr);
1170 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
1171
1172 /* size sanity-check */
1173 PetscInt loaded;
1174 ierr = VecGetSize(seq_vec,&loaded);CHKERRQ(ierr);
1175 if(loaded != globalSize)
1176 SETERRQ(comm,PETSC_ERR_FILE_UNEXPECTED,
1177 "File %s holds %d entries – expected %d",
1178 filename,loaded,globalSize);
1179
1180 /* borrow array for later Bcast */
1181 ierr = VecGetArrayRead(seq_vec,&seqArray);CHKERRQ(ierr);
1182
1184 "Rank 0 successfully loaded <%s>\n",filename);
1185 }
1186
1187 /* -------------------- Check if this is a DMDA vector ----------------- */
1188 ierr = VecGetDM(field_vec, &dm); CHKERRQ(ierr);
1189 if (dm) { ierr = DMGetType(dm, &dmtype); CHKERRQ(ierr); }
1190
1191 if (dmtype && !strcmp(dmtype, DMDA)) {
1192 /* ==================================================================
1193 * DMDA PATH: File is in natural ordering, need to convert to global
1194 * ================================================================== */
1195
1196 /* Create natural vector */
1197 ierr = DMDACreateNaturalVector(dm, &nat); CHKERRQ(ierr);
1198
1199 /* Scatter from rank 0's seq_vec to all ranks' natural vector */
1200 VecScatter scatter;
1201 Vec nat_seq = NULL; /* Sequential natural vector on rank 0 */
1202
1203 ierr = VecScatterCreateToZero(nat, &scatter, &nat_seq); CHKERRQ(ierr);
1204
1205 /* Reverse scatter: from rank 0 to all ranks */
1206 ierr = VecScatterBegin(scatter, (rank == 0 ? seq_vec : nat_seq), nat,
1207 INSERT_VALUES, SCATTER_REVERSE); CHKERRQ(ierr);
1208 ierr = VecScatterEnd(scatter, (rank == 0 ? seq_vec : nat_seq), nat,
1209 INSERT_VALUES, SCATTER_REVERSE); CHKERRQ(ierr);
1210
1211 /* Convert natural → global ordering */
1212 ierr = DMDANaturalToGlobalBegin(dm, nat, INSERT_VALUES, field_vec); CHKERRQ(ierr);
1213 ierr = DMDANaturalToGlobalEnd(dm, nat, INSERT_VALUES, field_vec); CHKERRQ(ierr);
1214
1215 /* Cleanup */
1216 ierr = VecScatterDestroy(&scatter); CHKERRQ(ierr);
1217 ierr = VecDestroy(&nat_seq); CHKERRQ(ierr);
1218 ierr = VecDestroy(&nat); CHKERRQ(ierr);
1219
1220 } else {
1221 /* ==================================================================
1222 * NON-DMDA PATH: Use broadcast and direct copy (assumes global ordering)
1223 * ================================================================== */
1224
1225 PetscScalar *buffer = NULL;
1226 if (rank == 0) {
1227 buffer = (PetscScalar *)seqArray;
1228 } else {
1229 ierr = PetscMalloc1(globalSize, &buffer); CHKERRQ(ierr);
1230 }
1231
1232 ierr = MPI_Bcast(buffer, (int)globalSize, MPIU_SCALAR, 0, comm); CHKERRQ(ierr);
1233
1234 /* Copy slice based on ownership range */
1235 PetscInt rstart, rend, loc;
1236 PetscScalar *locArray;
1237
1238 ierr = VecGetOwnershipRange(field_vec, &rstart, &rend); CHKERRQ(ierr);
1239 loc = rend - rstart;
1240
1241 ierr = VecGetArray(field_vec, &locArray); CHKERRQ(ierr);
1242 ierr = PetscMemcpy(locArray, buffer + rstart, loc * sizeof(PetscScalar)); CHKERRQ(ierr);
1243 ierr = VecRestoreArray(field_vec, &locArray); CHKERRQ(ierr);
1244
1245 if (rank != 0) {
1246 ierr = PetscFree(buffer); CHKERRQ(ierr);
1247 }
1248 }
1249
1250 /* -------------------- tidy up ---------------------------------------- */
1251 if (rank == 0) {
1252 ierr = VecRestoreArrayRead(seq_vec, &seqArray); CHKERRQ(ierr);
1253 ierr = VecDestroy(&seq_vec); CHKERRQ(ierr);
1254 }
1255
1257 "Loaded <%s> (parallel path)\n",filename);
1258
1260 PetscFunctionReturn(0);
1261}
@ LOG_TRACE
Very fine-grained tracing information for in-depth debugging.
Definition logging.h:33
Here is the caller graph for this function:

◆ ReadStatisticalFields()

PetscErrorCode ReadStatisticalFields ( UserCtx user,
PetscInt  ti 
)

Reads statistical fields used for time-averaged simulations.

Reads statistical quantities such as velocity sums and pressure sums. Logs missing files and initializes fields to zero if files are unavailable.

Parameters
[in,out]userPointer to the UserCtx structure containing the simulation context.
[in]Thetimestep at which the simulation field data needs to be read.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Reads statistical fields used for time-averaged simulations.

This function reads data for fields such as Ucat_sum, Ucat_cross_sum, Ucat_square_sum, and P_sum, used for statistical analysis during simulation.

Parameters
[in,out]userPointer to the UserCtx structure containing simulation context.
[in]tiTime index for constructing the file name.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Definition at line 1360 of file io.c.

1361{
1362 PetscErrorCode ierr;
1363 const char *eulerian_ext = "dat";
1364 if (user->simCtx->exec_mode == EXEC_MODE_POSTPROCESSOR && user->simCtx->pps->eulerianExt[0] != '\0') {
1365 eulerian_ext = user->simCtx->pps->eulerianExt;
1366 }
1367
1368 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to read statistical fields.\n");
1369
1370 ierr = ReadOptionalField(user, "su0", "Velocity Sum", user->Ucat_sum, ti, eulerian_ext); CHKERRQ(ierr);
1371 ierr = ReadOptionalField(user, "su1", "Velocity Cross Sum", user->Ucat_cross_sum, ti, eulerian_ext); CHKERRQ(ierr);
1372 ierr = ReadOptionalField(user, "su2", "Velocity Square Sum",user->Ucat_square_sum, ti, eulerian_ext); CHKERRQ(ierr);
1373 ierr = ReadOptionalField(user, "sp", "Pressure Sum", user->P_sum, ti, eulerian_ext); CHKERRQ(ierr);
1374
1375 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading statistical fields.\n");
1376
1377 return 0;
1378}
Vec Ucat_square_sum
Definition variables.h:791
Vec P_sum
Definition variables.h:791
Vec Ucat_sum
Definition variables.h:791
Vec Ucat_cross_sum
Definition variables.h:791
Here is the call graph for this function:
Here is the caller graph for this function:

◆ ReadLESFields()

PetscErrorCode ReadLESFields ( UserCtx user,
PetscInt  ti 
)

Reads LES-related fields used in turbulence modeling.

Reads the Smagorinsky constant (Cs) and transfers data to local vectors. Logs missing files and initializes fields to zero if files are unavailable.

Parameters
[in,out]userPointer to the UserCtx structure containing the simulation context.
[in]tiTime index for constructing the file name.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Reads LES-related fields used in turbulence modeling.

This function reads LES-related fields such as Cs (Smagorinsky constant) into their respective PETSc vectors.

Parameters
[in,out]userPointer to the UserCtx structure containing simulation context.
[in]tiTime index for constructing the file name.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Definition at line 1391 of file io.c.

1392{
1393 PetscErrorCode ierr;
1394 const char *eulerian_ext = "dat";
1395 if (user->simCtx->exec_mode == EXEC_MODE_POSTPROCESSOR && user->simCtx->pps->eulerianExt[0] != '\0') {
1396 eulerian_ext = user->simCtx->pps->eulerianExt;
1397 }
1398
1399 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to read LES fields.\n");
1400
1401 ierr = ReadOptionalField(user, "Nu_t", "Turbulent Viscosity", user->Nu_t, ti, eulerian_ext); CHKERRQ(ierr);
1402 ierr = ReadOptionalField(user, "cs", "Smagorinsky Constant (Cs)", user->CS, ti, eulerian_ext); CHKERRQ(ierr);
1403
1404 DMGlobalToLocalBegin(user->da, user->CS, INSERT_VALUES, user->lCs);
1405 DMGlobalToLocalEnd(user->da, user->CS, INSERT_VALUES, user->lCs);
1406
1407 DMGlobalToLocalBegin(user->da, user->Nu_t, INSERT_VALUES, user->lNu_t);
1408 DMGlobalToLocalEnd(user->da, user->Nu_t, INSERT_VALUES, user->lNu_t);
1409
1410 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading LES fields.\n");
1411
1412 return 0;
1413}
Vec lCs
Definition variables.h:788
Vec lNu_t
Definition variables.h:788
Vec Nu_t
Definition variables.h:788
Here is the call graph for this function:
Here is the caller graph for this function:

◆ ReadRANSFields()

PetscErrorCode ReadRANSFields ( UserCtx user,
PetscInt  ti 
)

Reads RANS-related fields for turbulence modeling.

Reads K_Omega fields (used in RANS modeling) and initializes them if files are unavailable.

Parameters
[in,out]userPointer to the UserCtx structure containing the simulation context.
[in]tiTime index for constructing the file name.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Reads RANS-related fields for turbulence modeling.

This function reads RANS-related fields such as K_Omega into their respective PETSc vectors.

Parameters
[in,out]userPointer to the UserCtx structure containing simulation context.
[in]tiTime index for constructing the file name.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Definition at line 1426 of file io.c.

1427{
1428 PetscErrorCode ierr;
1429 const char *eulerian_ext = "dat";
1430 if (user->simCtx->exec_mode == EXEC_MODE_POSTPROCESSOR && user->simCtx->pps->eulerianExt[0] != '\0') {
1431 eulerian_ext = user->simCtx->pps->eulerianExt;
1432 }
1433
1434 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to read RANS fields.\n");
1435
1436 ierr = ReadOptionalField(user, "kfield", "K-Omega RANS", user->K_Omega, ti, eulerian_ext); CHKERRQ(ierr);
1437 ierr = ReadOptionalField(user, "Nu_t", "Turbulent Viscosity", user->Nu_t, ti, eulerian_ext); CHKERRQ(ierr);
1438
1439 VecCopy(user->K_Omega, user->K_Omega_o);
1440
1441 DMGlobalToLocalBegin(user->fda2, user->K_Omega, INSERT_VALUES, user->lK_Omega);
1442 DMGlobalToLocalEnd(user->fda2, user->K_Omega, INSERT_VALUES, user->lK_Omega);
1443
1444 DMGlobalToLocalBegin(user->da, user->Nu_t, INSERT_VALUES, user->lNu_t);
1445 DMGlobalToLocalEnd(user->da, user->Nu_t, INSERT_VALUES, user->lNu_t);
1446
1447 DMGlobalToLocalBegin(user->fda2, user->K_Omega_o, INSERT_VALUES, user->lK_Omega_o);
1448 DMGlobalToLocalEnd(user->fda2, user->K_Omega_o, INSERT_VALUES, user->lK_Omega_o);
1449
1450 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading RANS fields.\n");
1451
1452 return 0;
1453}
Vec K_Omega_o
Definition variables.h:788
Vec K_Omega
Definition variables.h:788
Vec lK_Omega_o
Definition variables.h:788
Vec lK_Omega
Definition variables.h:788
Here is the call graph for this function:
Here is the caller graph for this function:

◆ WriteFieldData()

PetscErrorCode WriteFieldData ( UserCtx user,
const char *  field_name,
Vec  field_vec,
PetscInt  ti,
const char *  ext 
)

Writes data from a specific PETSc vector to a file.

This function uses the field name to construct the file path and writes the data from the provided PETSc vector to the corresponding file.

Parameters
[in]userPointer to the UserCtx structure containing simulation context.
[in]field_nameName of the field (e.g., "ufield", "vfield", "pfield").
[in]field_vecPETSc vector containing the field data to write.
[in]tiTime index for constructing the file name.
[in]extFile extension (e.g., "dat").
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Writes data from a specific PETSc vector to a file.

This function is now parallel-safe.

  • In PARALLEL: All processes send their local data to Rank 0. Rank 0 assembles the data into a temporary sequential vector and writes it to a single file.
  • In SERIAL: It performs a direct, simple write.

This ensures the output file is always in a simple, portable format.

Parameters
[in]userPointer to the UserCtx structure.
[in]field_nameName of the field (e.g., "position").
[in]field_vecThe parallel PETSc vector containing the data to write.
[in]tiTime index for constructing the file name.
[in]extFile extension (e.g., "dat").
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Dump a distributed PETSc Vec to the single sequential file format used by our restart / post-processing tools.

The companion of ReadFieldData(): it always produces one file (e.g. results/ufield00006_0.dat) regardless of how many MPI ranks are running.

Behaviour

# MPI ranks Strategy
1 Direct VecView() into the file.
> 1 VecScatterCreateToZero() gathers the distributed Vec onto rank 0.
Rank 0 writes the sequential Vec; all other ranks allocate no storage.

The routine never alters or destroys the parallel Vec passed in; the gather buffer is created and freed internally.

Parameters
[in]userSimulation context (used only for logging).
[in]field_nameLogical field name (forms part of the filename).
[in]field_vecDistributed PETSc Vec to write.
[in]tiTimestep index used in the filename.
[in]extFile extension ("dat" in our workflow).
Returns
0 on success or a PETSc error code.

Definition at line 1701 of file io.c.

1706{
1707 PetscErrorCode ierr;
1708 MPI_Comm comm;
1709 PetscMPIInt rank, size;
1710
1711 const PetscInt placeholder_int = 0; /* keep legacy name */
1712 char filename[PETSC_MAX_PATH_LEN];
1713 SimCtx *simCtx=user->simCtx;
1714
1715 PetscFunctionBeginUser;
1717
1718 if(!simCtx->output_dir){
1719 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "Output directory was not set before calling WriteFieldData().");
1720 }
1721
1722 /* ------------------------------------------------------------ */
1723 /* Basic communicator information */
1724 /* ------------------------------------------------------------ */
1725 ierr = PetscObjectGetComm((PetscObject)field_vec,&comm);CHKERRQ(ierr);
1726 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1727 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1728
1729 ierr = PetscSNPrintf(filename,sizeof(filename),
1730 "%s/%s%05" PetscInt_FMT "_%d.%s",
1731 simCtx->current_io_directory,field_name,ti,placeholder_int,ext);CHKERRQ(ierr);
1732
1734 " Preparing to write <%s> on rank %d/%d\n",
1735 filename,rank,size);
1736
1737 /* ------------------------------------------------------------ */
1738 /* 1. Serial path */
1739 /* ------------------------------------------------------------ */
1740 if (size == 1) {
1741 PetscViewer viewer;
1742
1743 ierr = PetscViewerBinaryOpen(comm,filename,
1744 FILE_MODE_WRITE,&viewer);CHKERRQ(ierr);
1745 ierr = VecView(field_vec,viewer);CHKERRQ(ierr);
1746 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
1747
1749 " Wrote <%s> (serial path)\n",filename);
1750 PetscFunctionReturn(0);
1751 }
1752
1753 /* ------------------------------------------------------------ */
1754 /* 2. Parallel path */
1755 /* ------------------------------------------------------------ */
1756 VecScatter scatter;
1757 Vec seq_vec=NULL; /* created by PETSc, lives only on rank 0 */
1758 DM dm = NULL;
1759 const char *dmtype = NULL;
1760 Vec nat = NULL; /* Natural-ordered vector for DMDA */
1761
1762 /* 2.1 Check if vector has DMDA and convert to natural ordering if needed */
1763 ierr = VecGetDM(field_vec, &dm); CHKERRQ(ierr);
1764 if (dm) { ierr = DMGetType(dm, &dmtype); CHKERRQ(ierr); }
1765
1766 if (dmtype && !strcmp(dmtype, DMDA)) {
1767 /* DMDA path: convert to natural ordering first */
1768 ierr = DMDACreateNaturalVector(dm, &nat); CHKERRQ(ierr);
1769 ierr = DMDAGlobalToNaturalBegin(dm, field_vec, INSERT_VALUES, nat); CHKERRQ(ierr);
1770 ierr = DMDAGlobalToNaturalEnd(dm, field_vec, INSERT_VALUES, nat); CHKERRQ(ierr);
1771
1772 /* Gather the natural-ordered vector */
1773 ierr = VecScatterCreateToZero(nat, &scatter, &seq_vec); CHKERRQ(ierr);
1774 } else {
1775 /* Non-DMDA path: direct gather in global ordering */
1776 ierr = VecScatterCreateToZero(field_vec, &scatter, &seq_vec); CHKERRQ(ierr);
1777 }
1778
1779 /* 2.2 Gather distributed → sequential (on rank 0) */
1780 ierr = VecScatterBegin(scatter, (nat ? nat : field_vec), seq_vec,
1781 INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
1782 ierr = VecScatterEnd(scatter, (nat ? nat : field_vec), seq_vec,
1783 INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
1784
1785 /* 2.3 Rank 0 writes the file */
1786 if (rank == 0) {
1787 PetscViewer viewer;
1788
1789 /* (optional) value diagnostics */
1790 PetscReal vmin,vmax;
1791 ierr = VecMin(seq_vec,NULL,&vmin);CHKERRQ(ierr);
1792 ierr = VecMax(seq_vec,NULL,&vmax);CHKERRQ(ierr);
1794 " <%s> range = [%.4e … %.4e]\n",
1795 field_name,(double)vmin,(double)vmax);
1796
1797 ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,filename,
1798 FILE_MODE_WRITE,&viewer);CHKERRQ(ierr);
1799 ierr = VecView(seq_vec,viewer);CHKERRQ(ierr);
1800 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
1801
1803 " Wrote <%s> (parallel path)\n",filename);
1804 }
1805
1806 /* 2.4 Cleanup */
1807 ierr = VecScatterDestroy(&scatter);CHKERRQ(ierr);
1808 ierr = VecDestroy(&seq_vec);CHKERRQ(ierr);
1809 if (nat) { ierr = VecDestroy(&nat); CHKERRQ(ierr); }
1810
1812 PetscFunctionReturn(0);
1813}
char output_dir[PETSC_MAX_PATH_LEN]
Definition variables.h:610
Here is the caller graph for this function:

◆ WriteSimulationFields()

PetscErrorCode WriteSimulationFields ( UserCtx user)

Writes simulation fields to files.

This function writes contravariant velocity, Cartesian velocity, pressure, and node state fields to their respective binary files. It also conditionally writes LES, RANS, and statistical fields if they are enabled.

Parameters
[in]userPointer to the UserCtx structure containing simulation context.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Definition at line 1826 of file io.c.

1827{
1828 PetscErrorCode ierr;
1829
1830 SimCtx *simCtx = user->simCtx;
1831
1832 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to write simulation fields.\n");
1833
1834 // Set the current IO directory
1835 ierr = PetscSNPrintf(simCtx->_io_context_buffer, sizeof(simCtx->_io_context_buffer),
1836 "%s/%s", simCtx->output_dir, simCtx->euler_subdir);CHKERRQ(ierr);
1837 simCtx->current_io_directory = simCtx->_io_context_buffer;
1838
1839 // Write contravariant velocity field
1840 ierr = WriteFieldData(user, "vfield", user->Ucont, simCtx->step, "dat"); CHKERRQ(ierr);
1841
1842 // Write Cartesian velocity field
1843 ierr = WriteFieldData(user, "ufield", user->Ucat, simCtx->step, "dat"); CHKERRQ(ierr);
1844
1845 // Write pressure field
1846 ierr = WriteFieldData(user, "pfield", user->P, simCtx->step, "dat"); CHKERRQ(ierr);
1847
1848 // Write node state field (nvert)
1849 ierr = WriteFieldData(user, "nvfield", user->Nvert, simCtx->step, "dat"); CHKERRQ(ierr);
1850
1851 // Write ParticleCountPerCell if enabled.
1852 if(simCtx->np>0){
1853 ierr = WriteFieldData(user, "ParticleCount",user->ParticleCount,simCtx->step,"dat"); CHKERRQ(ierr);
1854 ierr = WriteFieldData(user, "psifield", user->Psi, simCtx->step, "dat"); CHKERRQ(ierr);
1855 }
1856
1857 // Write LES fields if enabled
1858 if (simCtx->les) {
1859 ierr = WriteLESFields(user); CHKERRQ(ierr);
1860 }
1861
1862 // Write RANS fields if enabled
1863 if (simCtx->rans) {
1864 ierr = WriteRANSFields(user); CHKERRQ(ierr);
1865 }
1866
1867 // Write statistical fields if averaging is enabled
1868 if (simCtx->averaging) {
1869 ierr = WriteStatisticalFields(user); CHKERRQ(ierr);
1870 }
1871
1872 simCtx->current_io_directory = NULL;
1873
1874 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished writing simulation fields.\n");
1875
1876 return 0;
1877}
PetscErrorCode WriteStatisticalFields(UserCtx *user)
Writes statistical fields for averaging purposes.
Definition io.c:1889
PetscErrorCode WriteFieldData(UserCtx *user, const char *field_name, Vec field_vec, PetscInt ti, const char *ext)
Writes data from a specific PETSc vector to a single, sequential file.
Definition io.c:1701
PetscErrorCode WriteRANSFields(UserCtx *user)
Writes RANS-related fields.
Definition io.c:1951
PetscErrorCode WriteLESFields(UserCtx *user)
Writes LES-related fields.
Definition io.c:1917
PetscInt step
Definition variables.h:597
Here is the call graph for this function:
Here is the caller graph for this function:

◆ WriteStatisticalFields()

PetscErrorCode WriteStatisticalFields ( UserCtx user)

Writes statistical fields for averaging purposes.

This function writes data for fields such as Ucat_sum, Ucat_cross_sum, Ucat_square_sum, and P_sum to their respective binary files.

Parameters
[in]userPointer to the UserCtx structure containing simulation context.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Definition at line 1889 of file io.c.

1890{
1891 PetscErrorCode ierr;
1892
1893 SimCtx *simCtx = user->simCtx;
1894
1895 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to write statistical fields.\n");
1896
1897 ierr = WriteFieldData(user, "su0", user->Ucat_sum, simCtx->step, "dat"); CHKERRQ(ierr);
1898 ierr = WriteFieldData(user, "su1", user->Ucat_cross_sum, simCtx->step, "dat"); CHKERRQ(ierr);
1899 ierr = WriteFieldData(user, "su2", user->Ucat_square_sum, simCtx->step, "dat"); CHKERRQ(ierr);
1900 ierr = WriteFieldData(user, "sp", user->P_sum, simCtx->step, "dat"); CHKERRQ(ierr);
1901
1902 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished writing statistical fields.\n");
1903
1904 return 0;
1905}
Here is the call graph for this function:
Here is the caller graph for this function:

◆ WriteLESFields()

PetscErrorCode WriteLESFields ( UserCtx user)

Writes LES-related fields.

This function writes LES-related fields such as Cs (Smagorinsky constant) to their respective binary files.

Parameters
[in]userPointer to the UserCtx structure containing simulation context.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Definition at line 1917 of file io.c.

1918{
1919 PetscErrorCode ierr;
1920
1921 SimCtx *simCtx = user->simCtx;
1922
1923 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to write LES fields.\n");
1924
1925
1926 DMLocalToGlobalBegin(user->da, user->lCs, INSERT_VALUES, user->CS);
1927 DMLocalToGlobalEnd(user->da, user->lCs, INSERT_VALUES, user->CS);
1928
1929 DMLocalToGlobalBegin(user->da, user->lNu_t, INSERT_VALUES, user->Nu_t);
1930 DMLocalToGlobalEnd(user->da, user->lNu_t, INSERT_VALUES, user->Nu_t);
1931
1932 ierr = WriteFieldData(user, "Nu_t", user->Nu_t, simCtx->step, "dat"); CHKERRQ(ierr);
1933 ierr = WriteFieldData(user, "cs", user->CS, simCtx->step, "dat"); CHKERRQ(ierr);
1934
1935
1936 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished writing LES fields.\n");
1937
1938 return 0;
1939}
Here is the call graph for this function:
Here is the caller graph for this function:

◆ WriteRANSFields()

PetscErrorCode WriteRANSFields ( UserCtx user)

Writes RANS-related fields.

This function writes RANS-related fields such as K_Omega to their respective binary files.

Parameters
[in]userPointer to the UserCtx structure containing simulation context.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Definition at line 1951 of file io.c.

1952{
1953 PetscErrorCode ierr;
1954
1955 SimCtx *simCtx = user->simCtx;
1956
1957 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to write RANS fields.\n");
1958
1959 ierr = WriteFieldData(user, "kfield", user->K_Omega, simCtx->step, "dat"); CHKERRQ(ierr);
1960
1961 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished writing RANS fields.\n");
1962
1963 return 0;
1964}
Here is the call graph for this function:
Here is the caller graph for this function:

◆ WriteSwarmField()

PetscErrorCode WriteSwarmField ( UserCtx user,
const char *  field_name,
PetscInt  ti,
const char *  ext 
)

Writes data from a specific field in a PETSc Swarm to a file.

This function retrieves the Swarm from the UserCtx (i.e., user->swarm) and creates a global PETSc vector from the specified Swarm field. It then calls the existing WriteFieldData() function to handle the actual I/O operation. After writing the data, the function destroys the temporary global vector to avoid memory leaks.

Parameters
[in]userPointer to the UserCtx structure containing simulation context and the PetscSwarm (as user->swarm).
[in]field_nameName of the Swarm field to be written (e.g., "my_field").
[in]tiTime index used to construct the output file name.
[in]extFile extension (e.g., "dat", "bin").
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.
Note
Compatible with PETSc 3.14.4.

This function retrieves the Swarm from the UserCtx (i.e., user->swarm) and creates a global PETSc vector from the specified Swarm field. It then calls the existing WriteFieldData() function to handle the actual I/O operation. After writing the data, the function destroys the temporary global vector to avoid memory leaks.

Parameters
[in]userPointer to the UserCtx structure containing simulation context and the PetscSwarm (as user->swarm).
[in]field_nameName of the Swarm field to be written (e.g., "my_field").
[in]tiTime index used to construct the output file name.
[in]extFile extension (e.g., "dat", "bin").
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.
Note
Compatible with PETSc 3.14.4 and newer.

Definition at line 1985 of file io.c.

1986{
1987 PetscErrorCode ierr;
1988 Vec fieldVec;
1989 DM swarm;
1990
1991 PetscFunctionBeginUser; /* PETSc macro indicating start of function */
1992
1993 /*
1994 * 1) Retrieve the PetscSwarm from the user context.
1995 * Ensure user->swarm is initialized and not NULL.
1996 */
1997 swarm = user->swarm;
1998
1999 /*
2000 * 2) Create a global vector from the specified swarm field.
2001 * This function is available in PETSc 3.14.4.
2002 * It provides a read/write "view" of the swarm field as a global Vec.
2003 */
2005 "Attempting to create global vector from field: %s\n",
2006 field_name);
2007 ierr = DMSwarmCreateGlobalVectorFromField(swarm, field_name, &fieldVec);CHKERRQ(ierr);
2008
2009 /*
2010 * 3) Use your existing WriteFieldData() to write the global vector to a file.
2011 * The field name, time index, and extension are passed along for naming.
2012 */
2014 "Calling WriteFieldData for field: %s\n",
2015 field_name);
2016 ierr = WriteFieldData(user, field_name, fieldVec, ti, ext);CHKERRQ(ierr);
2017
2018 /*
2019 * 4) Destroy the global vector once the data is successfully written.
2020 * This step is crucial for avoiding memory leaks.
2021 * DMSwarmDestroyGlobalVectorFromField() is also available in PETSc 3.14.4.
2022 */
2024 "Destroying the global vector for field: %s\n",
2025 field_name);
2026 ierr = DMSwarmDestroyGlobalVectorFromField(swarm, field_name, &fieldVec);CHKERRQ(ierr);
2027
2028 /* Log and return success. */
2030 "Successfully wrote swarm data for field: %s\n",
2031 field_name);
2032
2033 PetscFunctionReturn(0); /* PETSc macro indicating end of function */
2034}
Here is the call graph for this function:
Here is the caller graph for this function:

◆ WriteSwarmIntField()

PetscErrorCode WriteSwarmIntField ( UserCtx user,
const char *  field_name,
PetscInt  ti,
const char *  ext 
)

Writes integer data from a specific PETSc Swarm field to a file.

This function is designed for swarm fields that store integer data (e.g., DMSwarm_CellID), which cannot be converted to a standard PETSc Vec of PetscScalars. It accesses the raw data pointer for the field on each rank using DMSwarmGetField(), writes the local data to a rank-specific binary file, and then restores the field access.

Parameters
[in]userPointer to the UserCtx structure containing the PetscSwarm.
[in]field_nameName of the integer Swarm field to be written.
[in]tiTime index used to construct the output file name.
[in]extFile extension (e.g., "dat", "bin").
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Writes integer data from a specific PETSc Swarm field to a file.

This function provides a bridge to write integer-based swarm fields (like DMSwarm_CellID) using the existing Vec-based I/O routine (WriteFieldData). It works by:

  1. Creating a temporary parallel Vec with the same layout as other swarm fields.
  2. Accessing the local integer data from the swarm field.
  3. Populating the temporary Vec by casting each integer to a PetscScalar.
  4. Calling the standard WriteFieldData() function with the temporary Vec.
  5. Destroying the temporary Vec.
Parameters
[in]userPointer to the UserCtx structure.
[in]field_nameName of the integer Swarm field to be written.
[in]tiTime index for the output file.
[in]extFile extension.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Definition at line 2054 of file io.c.

2055{
2056 PetscErrorCode ierr;
2057 DM swarm = user->swarm;
2058 Vec temp_vec; // Temporary Vec to hold casted data
2059 PetscInt nlocal, nglobal,bs,i;
2060 void *field_array_void;
2061 PetscScalar *scalar_array; // Pointer to the temporary Vec's scalar data
2062
2063 PetscFunctionBeginUser;
2064
2065 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Casting '%s' to Vec for writing.\n", field_name);
2066
2067 // Get the swarm field properties
2068 ierr = DMSwarmGetLocalSize(swarm, &nlocal); CHKERRQ(ierr);
2069 ierr = DMSwarmGetSize(swarm, &nglobal); CHKERRQ(ierr);
2070 ierr = DMSwarmGetField(swarm, field_name, &bs, NULL, &field_array_void); CHKERRQ(ierr);
2071
2072 // Create Temporary parallel Vec wit the CORRECT layout
2073 ierr = VecCreate(PETSC_COMM_WORLD, &temp_vec); CHKERRQ(ierr);
2074 ierr = VecSetType(temp_vec, VECMPI); CHKERRQ(ierr);
2075 ierr = VecSetSizes(temp_vec, nlocal*bs, nglobal*bs); CHKERRQ(ierr);
2076 ierr = VecSetUp(temp_vec); CHKERRQ(ierr);
2077
2078 // Defining Vector field to mandatory field 'position'
2079 DMSwarmVectorDefineField(swarm,"position");
2080
2081 ierr = VecGetArray(temp_vec, &scalar_array); CHKERRQ(ierr);
2082
2083 if(strcasecmp(field_name,"DMSwarm_pid") == 0){
2084 PetscInt64 *int64_array = (PetscInt64 *)field_array_void;
2085 // Perform the cast from PetscInt64 to PetscScalar
2086 for (i = 0; i < nlocal*bs; i++) {
2087 scalar_array[i] = (PetscScalar)int64_array[i];
2088 }
2089 }else{
2090 PetscInt *int_array = (PetscInt *)field_array_void;
2091 //Perform the cast from PetscInt to PetscScalar
2092 for (i = 0; i < nlocal*bs; i++) {
2093 scalar_array[i] = (PetscScalar)int_array[i];
2094 }
2095 }
2096
2097 // Restore access to both arrays
2098 ierr = VecRestoreArray(temp_vec, &scalar_array); CHKERRQ(ierr);
2099 ierr = DMSwarmRestoreField(swarm, field_name, &bs, NULL, &field_array_void); CHKERRQ(ierr);
2100
2101 // Call your existing writer with the temporary, populated Vec
2102 ierr = WriteFieldData(user, field_name, temp_vec, ti, ext); CHKERRQ(ierr);
2103
2104 // Clean up
2105 ierr = VecDestroy(&temp_vec); CHKERRQ(ierr);
2106
2107 PetscFunctionReturn(0);
2108}
Here is the call graph for this function:
Here is the caller graph for this function:

◆ WriteAllSwarmFields()

PetscErrorCode WriteAllSwarmFields ( UserCtx user)

Writes a predefined set of PETSc Swarm fields to files.

This function iterates through a hardcoded list of common swarm fields (position, velocity, etc.) and calls the WriteSwarmField() helper function for each one. This provides a straightforward way to output essential particle data at a given simulation step.

This function will only execute if particles are enabled in the simulation (i.e., user->simCtx->np > 0 and user->swarm is not NULL).

Parameters
[in]userPointer to the UserCtx structure containing the simulation context and the PetscSwarm.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Definition at line 2126 of file io.c.

2127{
2128 PetscErrorCode ierr;
2129 SimCtx *simCtx = user->simCtx;
2130
2131 PetscFunctionBeginUser;
2132
2133 // If no swarm is configured or there are no particles, do nothing and return.
2134 if (!user->swarm || simCtx->np <= 0) {
2135 PetscFunctionReturn(0);
2136 }
2137
2138 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to write swarm fields.\n");
2139
2140 // Ensure the current IO directory is set
2141 ierr = PetscSNPrintf(simCtx->_io_context_buffer, sizeof(simCtx->_io_context_buffer),
2142 "%s/%s", simCtx->output_dir, simCtx->particle_subdir);CHKERRQ(ierr);
2143 simCtx->current_io_directory = simCtx->_io_context_buffer;
2144
2145 // Write particle position field
2146 ierr = WriteSwarmField(user, "position", simCtx->step, "dat"); CHKERRQ(ierr);
2147
2148 // Write particle velocity field
2149 ierr = WriteSwarmField(user, "velocity", simCtx->step, "dat"); CHKERRQ(ierr);
2150
2151 // Write particle weight field
2152 ierr = WriteSwarmField(user, "weight", simCtx->step, "dat"); CHKERRQ(ierr);
2153
2154 // Write custom particle field "Psi"
2155 ierr = WriteSwarmField(user, "Psi", simCtx->step, "dat"); CHKERRQ(ierr);
2156
2157 // Integer fields require special handling
2158
2159 // Write the background mesh cell ID for each particle
2160 ierr = WriteSwarmIntField(user, "DMSwarm_CellID", simCtx->step, "dat"); CHKERRQ(ierr);
2161
2162 // Write the particle location status (e.g., inside or outside the domain)
2163 ierr = WriteSwarmIntField(user, "DMSwarm_location_status", simCtx->step, "dat"); CHKERRQ(ierr);
2164
2165 // Write the unique particle ID
2166 ierr = WriteSwarmIntField(user, "DMSwarm_pid", simCtx->step, "dat"); CHKERRQ(ierr);
2167
2168 simCtx->current_io_directory = NULL;
2169
2170 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished writing swarm fields.\n");
2171
2172 PetscFunctionReturn(0);
2173}
PetscErrorCode WriteSwarmField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
Writes data from a specific field in a PETSc Swarm to a file.
Definition io.c:1985
PetscErrorCode WriteSwarmIntField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
Writes integer swarm data by casting it to a temporary Vec and using WriteFieldData.
Definition io.c:2054
char particle_subdir[PETSC_MAX_PATH_LEN]
Definition variables.h:612
Here is the call graph for this function:
Here is the caller graph for this function:

◆ ReadDataFileToArray()

PetscInt ReadDataFileToArray ( const char *  filename,
double **  data_out,
PetscInt *  Nout,
MPI_Comm  comm 
)

Definition at line 2727 of file io.c.

2731{
2732 /* STEP 0: Prepare local variables & log function entry */
2733 PetscMPIInt rank, size;
2734 PetscErrorCode ierr;
2735 FILE *fp = NULL;
2736 PetscInt N = 0; /* number of lines/values read on rank 0 */
2737 double *array = NULL; /* pointer to local array on each rank */
2738 PetscInt fileExistsFlag = 0; /* 0 = doesn't exist, 1 = does exist */
2739
2741 "Start reading from file: %s\n",
2742 filename);
2743
2744 /* Basic error checking: data_out, Nout must be non-null. */
2745 if (!filename || !data_out || !Nout) {
2747 "Null pointer argument provided.\n");
2748 return 1;
2749 }
2750
2751 /* Determine rank/size for coordinating I/O. */
2752 MPI_Comm_rank(comm, &rank);
2753 MPI_Comm_size(comm, &size);
2754
2755 /* STEP 1: On rank 0, check if file can be opened. */
2756 if (!rank) {
2757 fp = fopen(filename, "r");
2758 if (fp) {
2759 fileExistsFlag = 1;
2760 fclose(fp);
2761 }
2762 }
2763
2764 /* STEP 2: Broadcast file existence to all ranks. */
2765 // In ReadDataFileToArray:
2766 ierr = MPI_Bcast(&fileExistsFlag, 1, MPI_INT, 0, comm); CHKERRQ(ierr);
2767
2768 if (!fileExistsFlag) {
2769 /* If file does not exist, log & return. */
2770 if (!rank) {
2772 "File '%s' not found.\n",
2773 filename);
2774 }
2775 return 2;
2776 }
2777
2778 /* STEP 3: Rank 0 re-opens and reads the file, counting lines, etc. */
2779 if (!rank) {
2780 fp = fopen(filename, "r");
2781 if (!fp) {
2783 "File '%s' could not be opened for reading.\n",
2784 filename);
2785 return 3;
2786 }
2787
2788 /* (3a) Count lines first. */
2789 {
2790 char line[256];
2791 while (fgets(line, sizeof(line), fp)) {
2792 N++;
2793 }
2794 }
2795
2797 "File '%s' has %d lines.\n",
2798 filename, N);
2799
2800 /* (3b) Allocate array on rank 0. */
2801 array = (double*)malloc(N * sizeof(double));
2802 if (!array) {
2803 fclose(fp);
2805 "malloc failed for array.\n");
2806 return 4;
2807 }
2808
2809 /* (3c) Rewind & read values into array. */
2810 rewind(fp);
2811 {
2812 PetscInt i = 0;
2813 char line[256];
2814 while (fgets(line, sizeof(line), fp)) {
2815 double val;
2816 if (sscanf(line, "%lf", &val) == 1) {
2817 array[i++] = val;
2818 }
2819 }
2820 }
2821 fclose(fp);
2822
2824 "Successfully read %d values from '%s'.\n",
2825 N, filename);
2826 }
2827
2828 /* STEP 4: Broadcast the integer N to all ranks. */
2829 ierr = MPI_Bcast(&N, 1, MPI_INT, 0, comm); CHKERRQ(ierr);
2830
2831 /* STEP 5: Each rank allocates an array to receive the broadcast if rank>0. */
2832 if (rank) {
2833 array = (double*)malloc(N * sizeof(double));
2834 if (!array) {
2836 "malloc failed on rank %d.\n",
2837 rank);
2838 return 5;
2839 }
2840 }
2841
2842 /* STEP 6: Broadcast the actual data from rank 0 to all. */
2843 ierr = MPI_Bcast(array, N, MPI_DOUBLE, 0, comm); CHKERRQ(ierr);
2844
2845 /* STEP 7: Assign outputs on all ranks. */
2846 *data_out = array;
2847 *Nout = N;
2848
2850 "Done. Provided array of length=%d to all ranks.\n",
2851 N);
2852 return 0; /* success */
2853}

◆ CreateVTKFileFromMetadata()

PetscInt CreateVTKFileFromMetadata ( const char *  filename,
const VTKMetaData meta,
MPI_Comm  comm 
)

Definition at line 128 of file vtk_io.c.

129{
130 PetscMPIInt rank;
131 MPI_Comm_rank(comm, &rank);
132 PetscErrorCode ierr = 0;
133
134 if (!rank) {
135 LOG_ALLOW(GLOBAL, LOG_INFO, "Rank 0 writing combined VTK file '%s'.\n", filename);
136 FILE *fp = fopen(filename, "wb");
137 if (!fp) {
138 LOG_ALLOW(GLOBAL, LOG_ERROR, "fopen failed for %s.\n", filename);
139 return PETSC_ERR_FILE_OPEN;
140 }
141
142 PetscInt boffset = 0;
143
144 ierr = WriteVTKFileHeader(fp, meta, &boffset);
145 if(ierr) { fclose(fp); return ierr; }
146
147 if (meta->coords) {
148 ierr = WriteVTKAppendedBlock(fp, meta->coords, 3 * meta->npoints, sizeof(PetscScalar));
149 if(ierr) { fclose(fp); return ierr; }
150 }
151 /*
152 // ======== DEBUG: Dump first few values of Ucat_nodal if present ========
153 if (!rank) {
154 for (PetscInt i = 0; i < meta->num_point_data_fields; i++) {
155 const VTKFieldInfo* f = &meta->point_data_fields[i];
156 if (strcasecmp(f->name, "Ucat_nodal") == 0) {
157 const PetscScalar *a = f->data;
158 const PetscInt npts = meta->npoints;
159 const PetscInt nshow = (npts < 5) ? npts : 5;
160
161 LOG_ALLOW(GLOBAL, LOG_INFO,
162 "DBG (writer) Ucat_nodal: first %d of %" PetscInt_FMT " tuples:\n",
163 (int)nshow, npts);
164 for (PetscInt t = 0; t < nshow; ++t) {
165 LOG_ALLOW(GLOBAL, LOG_INFO,
166 " Ucat_nodal[%3" PetscInt_FMT "] = (%g, %g, %g)\n",
167 t, (double)a[3*t+0], (double)a[3*t+1], (double)a[3*t+2]);
168 }
169 }
170 }
171 }
172 */
173 // ======== END DEBUG ========
174
175
176 for (PetscInt i = 0; i < meta->num_point_data_fields; i++) {
177 const VTKFieldInfo* field = &meta->point_data_fields[i];
178 if (field->data) {
179 ierr = WriteVTKAppendedBlock(fp, field->data, field->num_components * meta->npoints, sizeof(PetscScalar));
180 if(ierr) { fclose(fp); return ierr; }
181 }
182 }
183 if (meta->fileType == VTK_POLYDATA) {
184 if (meta->connectivity) {
185 ierr = WriteVTKAppendedBlock(fp, meta->connectivity, meta->npoints, sizeof(PetscInt));
186 if(ierr) { fclose(fp); return ierr; }
187 }
188 if (meta->offsets) {
189 ierr = WriteVTKAppendedBlock(fp, meta->offsets, meta->npoints, sizeof(PetscInt));
190 if(ierr) { fclose(fp); return ierr; }
191 }
192 }
193 ierr = WriteVTKFileFooter(fp, meta);
194 if(ierr) { fclose(fp); return ierr; }
195
196 fclose(fp);
197 LOG_ALLOW(GLOBAL, LOG_INFO, "Rank 0 finished writing VTK file '%s'.\n", filename);
198 }
199 return 0;
200}
PetscInt npoints
Definition variables.h:550
PetscInt num_components
Definition variables.h:537
PetscInt num_point_data_fields
Definition variables.h:553
PetscInt * connectivity
Definition variables.h:554
PetscInt * offsets
Definition variables.h:555
VTKFileType fileType
Definition variables.h:548
PetscScalar * data
Definition variables.h:538
PetscScalar * coords
Definition variables.h:551
VTKFieldInfo point_data_fields[20]
Definition variables.h:552
@ VTK_POLYDATA
Definition variables.h:544
Stores all necessary information for a single data array in a VTK file.
Definition variables.h:535
static PetscErrorCode WriteVTKFileFooter(FILE *fp, const VTKMetaData *meta)
Definition vtk_io.c:116
static PetscErrorCode WriteVTKAppendedBlock(FILE *fp, const void *data, PetscInt num_elements, size_t element_size)
Definition vtk_io.c:17
static PetscErrorCode WriteVTKFileHeader(FILE *fp, const VTKMetaData *meta, PetscInt *boffset)
Definition vtk_io.c:106
Here is the call graph for this function:
Here is the caller graph for this function:

◆ VecToArrayOnRank0()

PetscErrorCode VecToArrayOnRank0 ( Vec  inVec,
PetscInt *  N,
double **  arrayOut 
)

Gathers the contents of a distributed PETSc Vec into a single array on rank 0.

Parameters
[in]inVecThe input (possibly distributed) Vec.
[out]NThe global size of the vector.
[out]arrayOutOn rank 0, points to the newly allocated array holding all data. On other ranks, it is set to NULL.
Returns
PetscErrorCode Return 0 on success, nonzero on failure.

Gathers the contents of a distributed PETSc Vec into a single array on rank 0.

If the Vec has a DMDA attached, the gather is performed in DMDA "natural" ordering.

Parameters
[in]inVecThe PETSc Vec (may be distributed).
[out]NGlobal length of the Vec (includes dof).
[out]arrayOutOn rank 0, newly allocated buffer with the gathered values (PetscScalar-sized). On other ranks, set to NULL.

Definition at line 2184 of file io.c.

2185{
2186 PetscErrorCode ierr;
2187 MPI_Comm comm;
2188 PetscMPIInt rank;
2189 PetscInt globalSize;
2190 DM dm = NULL;
2191 const char *dmtype = NULL;
2192
2193 /* For DMDA path */
2194 Vec nat = NULL, seqNat = NULL;
2195 VecScatter scatNat = NULL;
2196 const PetscScalar *nar = NULL;
2197 PetscScalar *buf = NULL;
2198
2199 /* For generic (no DM) path */
2200 Vec seq = NULL;
2201 VecScatter scat = NULL;
2202 const PetscScalar *sar = NULL;
2203
2204 PetscFunctionBeginUser;
2205
2206 ierr = PetscObjectGetComm((PetscObject)inVec, &comm); CHKERRQ(ierr);
2207 ierr = MPI_Comm_rank(comm, &rank); CHKERRQ(ierr);
2208 ierr = VecGetSize(inVec, &globalSize); CHKERRQ(ierr);
2209 *N = globalSize;
2210 *arrayOut = NULL;
2211
2212 ierr = VecGetDM(inVec, &dm); CHKERRQ(ierr);
2213 if (dm) { ierr = DMGetType(dm, &dmtype); CHKERRQ(ierr); }
2214
2215 if (dmtype && !strcmp(dmtype, DMDA)) {
2216 /* --- DMDA path: go to NATURAL ordering, then gather to rank 0 --- */
2217 ierr = DMDACreateNaturalVector(dm, &nat); CHKERRQ(ierr);
2218 ierr = DMDAGlobalToNaturalBegin(dm, inVec, INSERT_VALUES, nat); CHKERRQ(ierr);
2219 ierr = DMDAGlobalToNaturalEnd (dm, inVec, INSERT_VALUES, nat); CHKERRQ(ierr);
2220
2221 ierr = VecScatterCreateToZero(nat, &scatNat, &seqNat); CHKERRQ(ierr);
2222 ierr = VecScatterBegin(scatNat, nat, seqNat, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
2223 ierr = VecScatterEnd (scatNat, nat, seqNat, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
2224
2225 if (rank == 0) {
2226 PetscInt nseq;
2227 ierr = VecGetLocalSize(seqNat, &nseq); CHKERRQ(ierr);
2228 ierr = VecGetArrayRead(seqNat, &nar); CHKERRQ(ierr);
2229
2230 ierr = PetscMalloc1(nseq, &buf); CHKERRQ(ierr);
2231 ierr = PetscMemcpy(buf, nar, (size_t)nseq * sizeof(PetscScalar)); CHKERRQ(ierr);
2232
2233 ierr = VecRestoreArrayRead(seqNat, &nar); CHKERRQ(ierr);
2234 *arrayOut = (double*)buf; /* hand back as double* for drop-in compatibility */
2235 }
2236
2237 ierr = VecScatterDestroy(&scatNat); CHKERRQ(ierr);
2238 ierr = VecDestroy(&seqNat); CHKERRQ(ierr);
2239 ierr = VecDestroy(&nat); CHKERRQ(ierr);
2240 } else {
2241 /* --- No DM attached: plain gather in Vec’s global (parallel) layout order --- */
2242 ierr = VecScatterCreateToZero(inVec, &scat, &seq); CHKERRQ(ierr);
2243 ierr = VecScatterBegin(scat, inVec, seq, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
2244 ierr = VecScatterEnd (scat, inVec, seq, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
2245
2246 if (rank == 0) {
2247 PetscInt nseq;
2248 ierr = VecGetLocalSize(seq, &nseq); CHKERRQ(ierr);
2249 ierr = VecGetArrayRead(seq, &sar); CHKERRQ(ierr);
2250
2251 ierr = PetscMalloc1(nseq, &buf); CHKERRQ(ierr);
2252 ierr = PetscMemcpy(buf, sar, (size_t)nseq * sizeof(PetscScalar)); CHKERRQ(ierr);
2253
2254 ierr = VecRestoreArrayRead(seq, &sar); CHKERRQ(ierr);
2255 *arrayOut = (double*)buf;
2256 }
2257
2258 ierr = VecScatterDestroy(&scat); CHKERRQ(ierr);
2259 ierr = VecDestroy(&seq); CHKERRQ(ierr);
2260 }
2261
2262 PetscFunctionReturn(0);
2263}
Here is the caller graph for this function:

◆ SwarmFieldToArrayOnRank0()

PetscErrorCode SwarmFieldToArrayOnRank0 ( DM  swarm,
const char *  field_name,
PetscInt *  n_total_particles,
PetscInt *  n_components,
void **  gathered_array 
)

Gathers any DMSwarm field from all ranks to a single, contiguous array on rank 0.

This is a generic, type-aware version of SwarmFieldToArrayOnRank0. It is a COLLECTIVE operation.

Parameters
[in]swarmThe DMSwarm to gather from.
[in]field_nameThe name of the field to gather.
[out]n_total_particles(Output on rank 0) Total number of particles in the global swarm.
[out]n_components(Output on rank 0) Number of components for the field.
[out]gathered_array(Output on rank 0) A newly allocated array containing the full, gathered data. The caller is responsible for freeing this memory and for casting it to the correct type.
Returns
PetscErrorCode

Definition at line 2279 of file io.c.

2280{
2281 PetscErrorCode ierr;
2282 PetscMPIInt rank, size;
2283 PetscInt nlocal, nglobal, bs;
2284 void *local_array_void;
2285 size_t element_size = 0;
2286 MPI_Datatype mpi_type = MPI_BYTE; // We'll send raw bytes
2287
2288 PetscFunctionBeginUser;
2289
2290 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
2291 ierr = MPI_Comm_size(PETSC_COMM_WORLD, &size); CHKERRQ(ierr);
2292
2293 // All ranks get swarm properties to determine send/receive counts
2294 ierr = DMSwarmGetLocalSize(swarm, &nlocal); CHKERRQ(ierr);
2295 ierr = DMSwarmGetSize(swarm, &nglobal); CHKERRQ(ierr);
2296 ierr = DMSwarmGetField(swarm, field_name, &bs, NULL, &local_array_void); CHKERRQ(ierr);
2297
2298 // Determine the size of one element of the field's data type
2299 if (strcasecmp(field_name, "DMSwarm_pid") == 0) {
2300 element_size = sizeof(PetscInt64);
2301 } else if (strcasecmp(field_name, "DMSwarm_CellID") == 0 || strcasecmp(field_name, "DMSwarm_location_status") == 0) {
2302 element_size = sizeof(PetscInt);
2303 } else {
2304 element_size = sizeof(PetscScalar);
2305 }
2306
2307 if (rank == 0) {
2308 *n_total_particles = nglobal;
2309 *n_components = bs;
2310 *gathered_array = NULL; // Initialize output
2311 }
2312
2313 if (size == 1) { // Serial case is a simple copy
2314 if (rank == 0) {
2315 ierr = PetscMalloc(nglobal * bs * element_size, gathered_array); CHKERRQ(ierr);
2316 ierr = PetscMemcpy(*gathered_array, local_array_void, nglobal * bs * element_size); CHKERRQ(ierr);
2317 }
2318 } else { // Parallel case: use MPI_Gatherv
2319 PetscInt *recvcounts = NULL, *displs = NULL;
2320 if (rank == 0) {
2321 ierr = PetscMalloc1(size, &recvcounts); CHKERRQ(ierr);
2322 ierr = PetscMalloc1(size, &displs); CHKERRQ(ierr);
2323 }
2324 PetscInt sendcount = nlocal * bs;
2325
2326 // Gather the number of elements (not bytes) from each rank
2327 ierr = MPI_Gather(&sendcount, 1, MPIU_INT, recvcounts, 1, MPIU_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
2328
2329 if (rank == 0) {
2330 displs[0] = 0;
2331 // Convert counts and calculate displacements in terms of BYTES
2332 for (PetscMPIInt i = 0; i < size; i++) recvcounts[i] *= element_size;
2333 for (PetscMPIInt i = 1; i < size; i++) displs[i] = displs[i-1] + recvcounts[i-1];
2334
2335 ierr = PetscMalloc(nglobal * bs * element_size, gathered_array); CHKERRQ(ierr);
2336 }
2337
2338 // Use Gatherv with MPI_BYTE to handle any data type generically
2339 ierr = MPI_Gatherv(local_array_void, nlocal * bs * element_size, MPI_BYTE,
2340 *gathered_array, recvcounts, displs, MPI_BYTE,
2341 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
2342
2343 if (rank == 0) {
2344 ierr = PetscFree(recvcounts); CHKERRQ(ierr);
2345 ierr = PetscFree(displs); CHKERRQ(ierr);
2346 }
2347 }
2348
2349 ierr = DMSwarmRestoreField(swarm, field_name, &bs, NULL, &local_array_void); CHKERRQ(ierr);
2350
2351 PetscFunctionReturn(0);
2352}
Here is the caller graph for this function:

◆ ReadSwarmField()

PetscErrorCode ReadSwarmField ( UserCtx user,
const char *  field_name,
PetscInt  ti,
const char *  ext 
)

Reads data from a file into a specified field of a PETSc DMSwarm.

This function is the counterpart to WriteSwarmField(). It creates a global PETSc vector that references the specified DMSwarm field, uses ReadFieldData() to read the data from a file, and then destroys the global vector reference.

Parameters
[in]userPointer to the UserCtx structure (containing user->swarm).
[in]field_nameName of the DMSwarm field to read into (must be previously declared/allocated).
[in]tiTime index used to construct the input file name.
[in]extFile extension (e.g., "dat" or "bin").
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.
Note
Compatible with PETSc 3.14.x.

Definition at line 1472 of file io.c.

1473{
1474 PetscErrorCode ierr;
1475 DM swarm;
1476 Vec fieldVec;
1477
1478 PetscFunctionBegin;
1479
1480 swarm = user->swarm;
1481
1482 LOG_ALLOW(GLOBAL,LOG_DEBUG," ReadSwarmField Begins \n");
1483
1484 /* 2) Create a global vector that references the specified Swarm field. */
1485 ierr = DMSwarmCreateGlobalVectorFromField(swarm, field_name, &fieldVec);CHKERRQ(ierr);
1486
1487 LOG_ALLOW(GLOBAL,LOG_DEBUG," Vector created from Field \n");
1488
1489 /* 3) Use the ReadFieldData() function to read data into fieldVec. */
1490 ierr = ReadFieldData(user, field_name, fieldVec, ti, ext);CHKERRQ(ierr);
1491
1492 /* 4) Destroy the global vector reference. */
1493 ierr = DMSwarmDestroyGlobalVectorFromField(swarm, field_name, &fieldVec);CHKERRQ(ierr);
1494
1495 PetscFunctionReturn(0);
1496}
Here is the call graph for this function:
Here is the caller graph for this function:

◆ ReadSwarmIntField()

PetscErrorCode ReadSwarmIntField ( UserCtx user,
const char *  field_name,
PetscInt  ti,
const char *  ext 
)

Reads integer swarm data by using ReadFieldData and casting the result.

This function is the counterpart to WriteSwarmIntField. It reads a file containing floating-point data (that was originally integer) into a temporary Vec and then casts it back to the integer swarm field. It works by:

  1. Creating a temporary parallel Vec.
  2. Calling the standard ReadFieldData() to populate this Vec.
  3. Accessing the local data of both the Vec and the swarm field.
  4. Populating the swarm's integer field by casting each PetscScalar back to a PetscInt.
  5. Destroying the temporary Vec.
Parameters
[in]userPointer to the UserCtx structure.
[in]field_nameName of the integer Swarm field to be read.
[in]tiTime index for the input file.
[in]extFile extension.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Definition at line 1517 of file io.c.

1518{
1519 PetscErrorCode ierr;
1520 DM swarm = user->swarm;
1521 Vec temp_vec;
1522 PetscInt nlocal, nglobal, bs, i;
1523 const PetscScalar *scalar_array; // Read-only pointer from the temp Vec
1524 void *field_array_void;
1525
1526
1527 PetscFunctionBeginUser;
1528
1529 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Reading '%s' via temporary Vec.\n", field_name);
1530
1531 // Get the properties of the swarm field to determine the expected layout
1532 ierr = DMSwarmGetLocalSize(swarm, &nlocal); CHKERRQ(ierr);
1533 ierr = DMSwarmGetSize(swarm, &nglobal); CHKERRQ(ierr);
1534 // We get the block size but not the data pointer yet
1535 ierr = DMSwarmGetField(swarm, field_name, &bs, NULL, NULL); CHKERRQ(ierr);
1536 ierr = DMSwarmRestoreField(swarm, field_name, &bs, NULL, NULL); CHKERRQ(ierr);
1537
1538 // Create a temporary Vec with the CORRECT layout to receive the data
1539 ierr = VecCreate(PETSC_COMM_WORLD, &temp_vec); CHKERRQ(ierr);
1540 ierr = VecSetType(temp_vec, VECMPI); CHKERRQ(ierr);
1541 ierr = VecSetSizes(temp_vec, nlocal * bs, nglobal * bs); CHKERRQ(ierr);
1542 ierr = VecSetBlockSize(temp_vec, bs); CHKERRQ(ierr);
1543 ierr = VecSetUp(temp_vec); CHKERRQ(ierr);
1544
1545 // Call your existing reader to populate the temporary Vec
1546 ierr = ReadFieldData(user, field_name, temp_vec, ti, ext); CHKERRQ(ierr);
1547
1548 // Get local pointers
1549 ierr = VecGetArrayRead(temp_vec, &scalar_array); CHKERRQ(ierr);
1550 ierr = DMSwarmGetField(swarm, field_name, NULL, NULL, &field_array_void); CHKERRQ(ierr);
1551
1552 // Perform the cast back, using the correct loop size (nlocal * bs)
1553 if (strcmp(field_name, "DMSwarm_pid") == 0) {
1554 PetscInt64 *int64_array = (PetscInt64 *)field_array_void;
1555 for (i = 0; i < nlocal * bs; i++) {
1556 int64_array[i] = (PetscInt64)scalar_array[i];
1557 }
1558 } else {
1559 PetscInt *int_array = (PetscInt *)field_array_void;
1560 for (i = 0; i < nlocal * bs; i++) {
1561 int_array[i] = (PetscInt)scalar_array[i];
1562 }
1563 }
1564
1565 // Restore access
1566 ierr = DMSwarmRestoreField(swarm, field_name, NULL, NULL, &field_array_void); CHKERRQ(ierr);
1567 ierr = VecRestoreArrayRead(temp_vec, &scalar_array); CHKERRQ(ierr);
1568
1569 // 6. Clean up
1570 ierr = VecDestroy(&temp_vec); CHKERRQ(ierr);
1571
1572 PetscFunctionReturn(0);
1573}
Here is the call graph for this function:
Here is the caller graph for this function:

◆ ReadAllSwarmFields()

PetscErrorCode ReadAllSwarmFields ( UserCtx user,
PetscInt  ti 
)

Reads multiple fields (positions, velocity, CellID, and weight) into a DMSwarm.

This function is analogous to ReadSimulationFields() but targets a DMSwarm. Each Swarm field is read from a separate file using ReadSwarmField().

Parameters
[in,out]userPointer to the UserCtx structure containing the DMSwarm (user->swarm).
[in]tiTime index for constructing the file name.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Reads multiple fields (positions, velocity, CellID, and weight) into a DMSwarm.

This function reads all necessary and optional fields for a DMSwarm for a given timestep. It assumes the swarm has already been resized to match the particle count in the input files.

The 'position' field is considered MANDATORY. If its file is missing or corrupt, the function will return a fatal error.

All other fields (velocity, CellID, weight, etc.) are OPTIONAL. If their corresponding files are not found, a warning is logged, and the function continues without error. If an optional file IS found but is corrupt or has a size mismatch, it is treated as a fatal error.

Parameters
[in,out]userPointer to the UserCtx structure containing the DMSwarm (user->swarm).
[in]tiTime index for constructing the file names.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Definition at line 1595 of file io.c.

1596{
1597 PetscErrorCode ierr;
1598 PetscInt nGlobal;
1599 SimCtx *simCtx = user->simCtx;
1600 const char *source_path = NULL;
1601 const char *particle_ext = "dat";
1602
1603 PetscFunctionBeginUser;
1604 ierr = DMSwarmGetSize(user->swarm, &nGlobal); CHKERRQ(ierr);
1605 LOG_ALLOW(GLOBAL, LOG_INFO, "Reading DMSwarm fields for timestep %d (swarm size is %d).\n", ti, nGlobal);
1606
1607 if (nGlobal == 0) {
1608 LOG_ALLOW(GLOBAL, LOG_INFO, "Swarm is empty for timestep %d. Nothing to read.\n", ti);
1609 PetscFunctionReturn(0);
1610 }
1611
1612 // First, determine the top-level source directory based on the execution mode.
1613 if (simCtx->exec_mode == EXEC_MODE_SOLVER) {
1614 source_path = simCtx->restart_dir;
1615 } else if (simCtx->exec_mode == EXEC_MODE_POSTPROCESSOR) {
1616 source_path = simCtx->pps->source_dir;
1617 if (simCtx->pps->particleExt[0] != '\0') {
1618 particle_ext = simCtx->pps->particleExt;
1619 }
1620 } else {
1621 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONGSTATE, "Invalid execution mode for reading simulation fields.");
1622 }
1623
1624 // Set the current I/O directory context
1625 ierr = PetscSNPrintf(simCtx->_io_context_buffer, sizeof(simCtx->_io_context_buffer),
1626 "%s/%s", source_path, simCtx->particle_subdir); CHKERRQ(ierr);
1627
1628 simCtx->current_io_directory = simCtx->_io_context_buffer;
1629
1630 /* 1) Read positions (REQUIRED) */
1631 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Reading mandatory position field...\n");
1632 ierr = ReadSwarmField(user, "position", ti, particle_ext);
1633 if (ierr) {
1634 SETERRQ(PETSC_COMM_WORLD, ierr, "Failed to read MANDATORY 'position' field for step %d. Cannot continue.", ti);
1635 }
1636 LOG_ALLOW(GLOBAL, LOG_INFO, "Successfully read mandatory position field for step %d.\n", ti);
1637
1638 /* 2) Read all OPTIONAL fields using the helper function. */
1639 /* The helper will print a warning and continue if a file is not found. */
1640 ierr = ReadOptionalSwarmField(user, "velocity", "Velocity", ti, particle_ext); CHKERRQ(ierr);
1641 ierr = ReadOptionalSwarmField(user, "DMSwarm_pid", "Particle ID", ti, particle_ext); CHKERRQ(ierr);
1642 ierr = ReadOptionalSwarmField(user, "DMSwarm_CellID", "Cell ID", ti, particle_ext); CHKERRQ(ierr);
1643 ierr = ReadOptionalSwarmField(user, "weight", "Particle Weight", ti, particle_ext); CHKERRQ(ierr);
1644 ierr = ReadOptionalSwarmField(user, "Psi", "Scalar Psi", ti, particle_ext); CHKERRQ(ierr);
1645 ierr = ReadOptionalSwarmField(user, "DMSwarm_location_status", "Migration Status", ti, particle_ext); CHKERRQ(ierr);
1646
1647 simCtx->current_io_directory = NULL; // Clear the I/O context after reading
1648
1649 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading DMSwarm fields for timestep %d.\n", ti);
1650 PetscFunctionReturn(0);
1651}
PetscErrorCode ReadSwarmField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
Reads data from a file into a specified field of a PETSc DMSwarm.
Definition io.c:1472
static PetscErrorCode ReadOptionalSwarmField(UserCtx *user, const char *field_name, const char *field_label, PetscInt ti, const char *ext)
Checks for and reads an optional DMSwarm field from a file.
Definition io.c:970
char particleExt[8]
Definition variables.h:525
Here is the call graph for this function:
Here is the caller graph for this function:

◆ ReadPositionsFromFile()

PetscErrorCode ReadPositionsFromFile ( PetscInt  timeIndex,
UserCtx user,
double **  coordsArray,
PetscInt *  Ncoords 
)

Reads coordinate data (for particles) from file into a PETSc Vec, then gathers it to rank 0.

This function uses ReadFieldData to fill a PETSc Vec with coordinate data, then leverages VecToArrayOnRank0 to gather that data into a contiguous array (valid on rank 0 only).

Parameters
[in]timeIndexThe time index used to construct file names.
[in]userPointer to the user context.
[out]coordsArrayOn rank 0, will point to a newly allocated array holding the coordinates.
[out]NcoordsOn rank 0, the length of coordsArray. On other ranks, 0.
Returns
PetscErrorCode Returns 0 on success, or non-zero on failures.

Definition at line 2869 of file io.c.

2873{
2874 PetscFunctionBeginUser;
2875
2876 PetscErrorCode ierr;
2877 Vec coordsVec;
2878
2879 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Creating coords Vec.\n");
2880 ierr = VecCreate(PETSC_COMM_WORLD, &coordsVec);CHKERRQ(ierr);
2881 ierr = VecSetFromOptions(coordsVec);CHKERRQ(ierr);
2882
2883 // For example: "position" is the name of the coordinate data
2884 ierr = ReadFieldData(user, "position", coordsVec, timeIndex, "dat");
2885 if (ierr) {
2887 "Error reading position data (ti=%d).\n",
2888 timeIndex);
2889 PetscFunctionReturn(ierr);
2890 }
2891
2892 LOG_ALLOW(GLOBAL, LOG_DEBUG, "ReadPositions - Gathering coords Vec to rank 0.\n");
2893 ierr = VecToArrayOnRank0(coordsVec, Ncoords, coordsArray);CHKERRQ(ierr);
2894
2895 ierr = VecDestroy(&coordsVec);CHKERRQ(ierr);
2896
2898 "Successfully gathered coordinates. Ncoords=%d.\n", *Ncoords);
2899 PetscFunctionReturn(0);
2900}
PetscErrorCode VecToArrayOnRank0(Vec inVec, PetscInt *N, double **arrayOut)
Gather a (possibly distributed) PETSc Vec onto rank 0 as a contiguous C array.
Definition io.c:2184
Here is the call graph for this function:

◆ ReadFieldDataToRank0()

PetscErrorCode ReadFieldDataToRank0 ( PetscInt  timeIndex,
const char *  fieldName,
UserCtx user,
double **  scalarArray,
PetscInt *  Nscalars 
)

Reads a named field from file into a PETSc Vec, then gathers it to rank 0.

This function wraps ReadFieldData and VecToArrayOnRank0 into a single step. The gathered data is stored in scalarArray on rank 0, with its length in Nscalars.

Parameters
[in]timeIndexThe time index used to construct file names.
[in]fieldNameName of the field to be read (e.g., "velocity").
[in]userPointer to the user context.
[out]scalarArrayOn rank 0, a newly allocated array holding the field data.
[out]NscalarsOn rank 0, length of scalarArray. On other ranks, 0.
Returns
PetscErrorCode Returns 0 on success, or non-zero on failures.

Definition at line 2917 of file io.c.

2922{
2923 PetscFunctionBeginUser;
2924
2925 PetscErrorCode ierr;
2926 Vec fieldVec;
2927
2928 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Creating field Vec.\n");
2929 ierr = VecCreate(PETSC_COMM_WORLD, &fieldVec);CHKERRQ(ierr);
2930 ierr = VecSetFromOptions(fieldVec);CHKERRQ(ierr);
2931
2932 ierr = ReadFieldData(user, fieldName, fieldVec, timeIndex, "dat");
2933 if (ierr) {
2935 "Error reading field '%s' (ti=%d).\n",
2936 fieldName, timeIndex);
2937 PetscFunctionReturn(ierr);
2938 }
2939
2940 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Gathering field Vec to rank 0.\n");
2941 ierr = VecToArrayOnRank0(fieldVec, Nscalars, scalarArray);CHKERRQ(ierr);
2942
2943 ierr = VecDestroy(&fieldVec);CHKERRQ(ierr);
2944
2946 "Successfully gathered field '%s'. Nscalars=%d.\n",
2947 fieldName, *Nscalars);
2948 PetscFunctionReturn(0);
2949}
Here is the call graph for this function:

◆ DisplayBanner()

PetscErrorCode DisplayBanner ( SimCtx simCtx)

Displays a structured banner summarizing the simulation configuration.

This function prints key simulation parameters to standard output. It is intended to be called ONLY by MPI rank 0. It retrieves global domain bounds from user->global_domain_bbox and boundary conditions for all faces from user->face_bc_types.

Parameters
[in]userPointer to UserCtx structure.
[in]bboxlist(If rank 0 needed to compute global_domain_bbox here, otherwise NULL)
Returns
PetscErrorCode Returns 0 on success.

This function prints key simulation parameters to standard output. It is intended to be called ONLY by MPI rank 0. It retrieves global domain bounds from user->global_domain_bbox and boundary conditions for all faces from user->face_bc_types.

Parameters
[in]userPointer to UserCtx structure.
[in]StartTimeInitial simulation time.
[in]StartStepStarting timestep index.
[in]StepsToRunTotal number of timesteps to run.
[in]num_mpi_procsTotal number of MPI processes.
[in]total_num_particlesTotal number of particles.
[in]bboxlist(If rank 0 needed to compute global_domain_bbox here, otherwise NULL)
Returns
PetscErrorCode Returns 0 on success.

Definition at line 2372 of file io.c.

2373{
2374 PetscErrorCode ierr;
2375 PetscMPIInt rank;
2376 Cmpnts global_min_coords, global_max_coords;
2377 PetscReal StartTime;
2378 PetscInt StartStep,StepsToRun,total_num_particles;
2379 PetscMPIInt num_mpi_procs;
2380
2381 // SimCtx *simCtx = user->simCtx;
2382 UserCtx *user = simCtx->usermg.mgctx[simCtx->usermg.mglevels - 1].user;
2383 num_mpi_procs = simCtx->size;
2384 StartTime = simCtx->StartTime;
2385 StartStep = simCtx->StartStep;
2386 StepsToRun = simCtx->StepsToRun;
2387 total_num_particles = simCtx->np;
2388 BoundingBox *bboxlist_on_rank0 = simCtx->bboxlist;
2389
2390
2391 PetscFunctionBeginUser;
2392
2393 if (!user) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "DisplayBanner - UserCtx pointer is NULL.");
2394 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
2395
2396 if (rank == 0) {
2397 // If global_domain_bbox is not pre-populated in UserCtx, compute it here from bboxlist_on_rank0
2398 // This assumes bboxlist_on_rank0 is valid and contains all local bounding boxes on rank 0.
2399 if (bboxlist_on_rank0 && num_mpi_procs > 0) {
2400 global_min_coords = bboxlist_on_rank0[0].min_coords;
2401 global_max_coords = bboxlist_on_rank0[0].max_coords;
2402 for (PetscMPIInt p = 1; p < num_mpi_procs; ++p) {
2403 global_min_coords.x = PetscMin(global_min_coords.x, bboxlist_on_rank0[p].min_coords.x);
2404 global_min_coords.y = PetscMin(global_min_coords.y, bboxlist_on_rank0[p].min_coords.y);
2405 global_min_coords.z = PetscMin(global_min_coords.z, bboxlist_on_rank0[p].min_coords.z);
2406 global_max_coords.x = PetscMax(global_max_coords.x, bboxlist_on_rank0[p].max_coords.x);
2407 global_max_coords.y = PetscMax(global_max_coords.y, bboxlist_on_rank0[p].max_coords.y);
2408 global_max_coords.z = PetscMax(global_max_coords.z, bboxlist_on_rank0[p].max_coords.z);
2409 }
2410 // Optionally store this in user->global_domain_bbox if it's useful elsewhere
2411 // user->global_domain_bbox.min_coords = global_min_coords;
2412 // user->global_domain_bbox.max_coords = global_max_coords;
2413 } else {
2414 // Fallback or warning if bboxlist is not available for global calculation
2415 LOG_ALLOW(LOCAL, LOG_WARNING, "(Rank 0) - bboxlist not provided or num_mpi_procs <=0; using user->bbox for domain bounds.\n");
2416 // global_min_coords = user->bbox.min_coords; // Use local bbox of rank 0 as fallback
2417 // global_max_coords = user->bbox.max_coords;
2418 }
2419
2420 ierr = PetscPrintf(PETSC_COMM_SELF, "\n"); CHKERRQ(ierr);
2421 ierr = PetscPrintf(PETSC_COMM_SELF, "=============================================================\n"); CHKERRQ(ierr);
2422 ierr = PetscPrintf(PETSC_COMM_SELF, " CASE SUMMARY \n"); CHKERRQ(ierr);
2423 ierr = PetscPrintf(PETSC_COMM_SELF, "=============================================================\n"); CHKERRQ(ierr);
2424 ierr = PetscPrintf(PETSC_COMM_SELF, " Grid Points : %d X %d X %d\n", user->IM, user->JM, user->KM); CHKERRQ(ierr);
2425 ierr = PetscPrintf(PETSC_COMM_SELF, " Cells : %d X %d X %d\n", user->IM - 1, user->JM - 1, user->KM - 1); CHKERRQ(ierr);
2426 ierr = PetscPrintf(PETSC_COMM_SELF, " Global Domain Bounds (X) : %.6f to %.6f\n", (double)global_min_coords.x, (double)global_max_coords.x); CHKERRQ(ierr);
2427 ierr = PetscPrintf(PETSC_COMM_SELF, " Global Domain Bounds (Y) : %.6f to %.6f\n", (double)global_min_coords.y, (double)global_max_coords.y); CHKERRQ(ierr);
2428 ierr = PetscPrintf(PETSC_COMM_SELF, " Global Domain Bounds (Z) : %.6f to %.6f\n", (double)global_min_coords.z, (double)global_max_coords.z); CHKERRQ(ierr);
2429 if(strcmp(simCtx->eulerianSource,"load")==0 || strcmp(simCtx->eulerianSource,"solve")==0){
2430 ierr = PetscPrintf(PETSC_COMM_SELF, "-------------------- Boundary Conditions --------------------\n"); CHKERRQ(ierr);
2431 const int face_name_width = 17; // Adjusted for longer names (Zeta,Eta,Xi)
2432 for (PetscInt i_face = 0; i_face < 6; ++i_face) {
2433 BCFace current_face = (BCFace)i_face;
2434 // The BCFaceToString will now return the Xi, Eta, Zeta versions
2435 const char* face_str = BCFaceToString(current_face);
2436 const char* bc_type_str = BCTypeToString(user->boundary_faces[current_face].mathematical_type);
2437 const char* bc_handler_type_str = BCHandlerTypeToString(user->boundary_faces[current_face].handler_type);
2438 if(user->boundary_faces[current_face].mathematical_type == INLET){
2440 Cmpnts inlet_velocity = {0.0,0.0,0.0};
2441 PetscBool found;
2442 ierr = GetBCParamReal(user->boundary_faces[current_face].params,"vx",&inlet_velocity.x,&found); CHKERRQ(ierr);
2443 ierr = GetBCParamReal(user->boundary_faces[current_face].params,"vy",&inlet_velocity.y,&found); CHKERRQ(ierr);
2444 ierr = GetBCParamReal(user->boundary_faces[current_face].params,"vz",&inlet_velocity.z,&found); CHKERRQ(ierr);
2445 ierr = PetscPrintf(PETSC_COMM_SELF, " Face %-*s : %s - %s - [%.4f,%.4f,%.4f]\n",
2446 face_name_width, face_str, bc_type_str, bc_handler_type_str,inlet_velocity.x,inlet_velocity.y,inlet_velocity.z); CHKERRQ(ierr);
2447 } else if(user->boundary_faces[current_face].handler_type == BC_HANDLER_INLET_PARABOLIC){
2448 PetscReal v_max = 0.0;
2449 PetscBool found;
2450 ierr = GetBCParamReal(user->boundary_faces[current_face].params,"v_max",&v_max,&found); CHKERRQ(ierr);
2451 ierr = PetscPrintf(PETSC_COMM_SELF, " Face %-*s : %s - %s - v_max=%.4f\n",
2452 face_name_width, face_str, bc_type_str, bc_handler_type_str, v_max); CHKERRQ(ierr);
2453 }
2455 PetscReal flux;
2456 PetscBool trimflag,foundflux,foundtrimflag;
2457 ierr = GetBCParamReal(user->boundary_faces[current_face].params,"target_flux",&flux,&foundflux); CHKERRQ(ierr);
2458 ierr = GetBCParamBool(user->boundary_faces[current_face].params,"apply_trim",&trimflag,&foundtrimflag); CHKERRQ(ierr);
2459 ierr = PetscPrintf(PETSC_COMM_SELF, " Face %-*s : %s - %s - [%.4f] - %s\n",
2460 face_name_width, face_str, bc_type_str, bc_handler_type_str,flux,trimflag?"Trim Ucont":"No Trim"); CHKERRQ(ierr);
2461 } else{
2462 ierr = PetscPrintf(PETSC_COMM_SELF, " Face %-*s : %s - %s\n",
2463 face_name_width, face_str, bc_type_str,bc_handler_type_str); CHKERRQ(ierr);
2464 }
2465 }
2466 }
2467 ierr = PetscPrintf(PETSC_COMM_SELF, "-------------------------------------------------------------\n"); CHKERRQ(ierr);
2468 ierr = PetscPrintf(PETSC_COMM_SELF, " Start Time : %.4f\n", (double)StartTime); CHKERRQ(ierr);
2469 ierr = PetscPrintf(PETSC_COMM_SELF, " Timestep Size : %.4f\n", (double)simCtx->dt); CHKERRQ(ierr);
2470 ierr = PetscPrintf(PETSC_COMM_SELF, " Starting Step : %d\n", StartStep); CHKERRQ(ierr);
2471 ierr = PetscPrintf(PETSC_COMM_SELF, " Total Steps to Run : %d\n", StepsToRun); CHKERRQ(ierr);
2472 ierr = PetscPrintf(PETSC_COMM_SELF, " Number of MPI Processes : %d\n", num_mpi_procs); CHKERRQ(ierr);
2473 ierr = PetscPrintf(PETSC_COMM_WORLD," Number of Particles : %d\n", total_num_particles); CHKERRQ(ierr);
2474 if(strcmp(simCtx->eulerianSource,"solve")==0 || strcmp(simCtx->eulerianSource,"load")==0){
2475 const char* field_init_str = FieldInitializationToString(simCtx->FieldInitialization);
2476 const char* particle_init_str = ParticleInitializationToString(simCtx->ParticleInitialization);
2477 ierr = PetscPrintf(PETSC_COMM_WORLD," Reynolds Number : %le\n", simCtx->ren); CHKERRQ(ierr);
2478 //ierr = PetscPrintf(PETSC_COMM_WORLD," Von-Neumann Number : %le\n", simCtx->vnn); CHKERRQ(ierr);
2479 ierr = PetscPrintf(PETSC_COMM_SELF, " Particle Initialization Mode: %s\n", particle_init_str); CHKERRQ(ierr);
2480 if(strcmp(simCtx->eulerianSource,"solve")==0){
2481 //ierr = PetscPrintf(PETSC_COMM_WORLD," Stanton Number : %le\n", simCtx->st); CHKERRQ(ierr);
2482 ierr = PetscPrintf(PETSC_COMM_WORLD," Momentum Equation Solver : %s\n", MomentumSolverTypeToString(simCtx->mom_solver_type)); CHKERRQ(ierr);
2483 ierr = PetscPrintf(PETSC_COMM_WORLD," Initial Pseudo-CFL : %le\n", simCtx->pseudo_cfl); CHKERRQ(ierr);
2484 ierr = PetscPrintf(PETSC_COMM_WORLD," Large Eddy Simulation Model : %s\n", LESModelToString(simCtx->les)); CHKERRQ(ierr);
2485 }
2488 if (user->inletFaceDefined) {
2489 ierr = PetscPrintf(PETSC_COMM_SELF, " Particles Initialized At : %s (Enum Val: %d)\n", BCFaceToString(user->identifiedInletBCFace), user->identifiedInletBCFace); CHKERRQ(ierr);
2490 } else {
2491 ierr = PetscPrintf(PETSC_COMM_SELF, " Particles Initialized At : --- (No INLET face identified)\n"); CHKERRQ(ierr);
2492 }
2493 }
2494
2495 ierr = PetscPrintf(PETSC_COMM_SELF, " Field Initialization Mode : %s\n", field_init_str); CHKERRQ(ierr);
2496 if (simCtx->FieldInitialization == 1) {
2497 ierr = PetscPrintf(PETSC_COMM_SELF, " Constant Velocity : x - %.4f, y - %.4f, z - %.4f \n", (double)simCtx->InitialConstantContra.x,(double)simCtx->InitialConstantContra.y,(double)simCtx->InitialConstantContra.z ); CHKERRQ(ierr);
2498 }
2499 } else if(strcmp(simCtx->eulerianSource,"analytical")==0){
2500 ierr = PetscPrintf(PETSC_COMM_WORLD," Analytical Solution Type : %s\n", simCtx->AnalyticalSolutionType); CHKERRQ(ierr);
2501 }
2502 ierr = PetscPrintf(PETSC_COMM_SELF, "=============================================================\n"); CHKERRQ(ierr);
2503 ierr = PetscPrintf(PETSC_COMM_SELF, "\n"); CHKERRQ(ierr);
2504 }
2505 PetscFunctionReturn(0);
2506}
PetscErrorCode GetBCParamReal(BC_Param *params, const char *key, PetscReal *value_out, PetscBool *found)
Searches a BC_Param linked list for a key and returns its value as a double.
Definition io.c:405
PetscErrorCode GetBCParamBool(BC_Param *params, const char *key, PetscBool *value_out, PetscBool *found)
Searches a BC_Param linked list for a key and returns its value as a bool.
Definition io.c:430
const char * BCHandlerTypeToString(BCHandlerType handler_type)
Converts a BCHandlerType enum to its string representation.
Definition logging.c:748
const char * FieldInitializationToString(PetscInt FieldInitialization)
Helper function to convert FieldInitialization to a string representation.
Definition logging.c:660
const char * BCFaceToString(BCFace face)
Helper function to convert BCFace enum to a string representation.
Definition logging.c:643
const char * BCTypeToString(BCType type)
Helper function to convert BCType enum to a string representation.
Definition logging.c:722
const char * LESModelToString(LESModelType LESFlag)
Helper function to convert LES Flag to a string representation.
Definition logging.c:691
const char * MomentumSolverTypeToString(MomentumSolverType SolverFlag)
Helper function to convert Momentum Solver flag to a string representation.
Definition logging.c:706
const char * ParticleInitializationToString(ParticleInitializationType ParticleInitialization)
Helper function to convert ParticleInitialization to a string representation.
Definition logging.c:675
@ INLET
Definition variables.h:217
UserCtx * user
Definition variables.h:474
PetscBool inletFaceDefined
Definition variables.h:752
BoundaryFaceConfig boundary_faces[6]
Definition variables.h:751
BCFace identifiedInletBCFace
Definition variables.h:753
@ PARTICLE_INIT_SURFACE_RANDOM
Random placement on the inlet face.
Definition variables.h:466
@ PARTICLE_INIT_SURFACE_EDGES
Deterministic placement at inlet face edges.
Definition variables.h:469
PetscReal StartTime
Definition variables.h:602
UserMG usermg
Definition variables.h:703
@ BC_HANDLER_INLET_PARABOLIC
Definition variables.h:236
@ BC_HANDLER_INLET_CONSTANT_VELOCITY
Definition variables.h:235
@ BC_HANDLER_PERIODIC_DRIVEN_CONSTANT_FLUX
Definition variables.h:245
PetscInt FieldInitialization
Definition variables.h:641
PetscReal ren
Definition variables.h:636
BCHandlerType handler_type
Definition variables.h:296
Cmpnts max_coords
Maximum x, y, z coordinates of the bounding box.
Definition variables.h:156
PetscReal dt
Definition variables.h:603
PetscInt StepsToRun
Definition variables.h:600
PetscInt StartStep
Definition variables.h:599
Cmpnts min_coords
Minimum x, y, z coordinates of the bounding box.
Definition variables.h:155
PetscScalar x
Definition variables.h:101
BoundingBox * bboxlist
Definition variables.h:684
BC_Param * params
Definition variables.h:297
char eulerianSource[PETSC_MAX_PATH_LEN]
Definition variables.h:608
ParticleInitializationType ParticleInitialization
Definition variables.h:685
PetscScalar z
Definition variables.h:101
PetscInt mglevels
Definition variables.h:481
char AnalyticalSolutionType[PETSC_MAX_PATH_LEN]
Definition variables.h:621
Cmpnts InitialConstantContra
Definition variables.h:642
PetscScalar y
Definition variables.h:101
PetscMPIInt size
Definition variables.h:593
MGCtx * mgctx
Definition variables.h:484
BCType mathematical_type
Definition variables.h:295
MomentumSolverType mom_solver_type
Definition variables.h:628
PetscReal pseudo_cfl
Definition variables.h:636
BCFace
Identifies the six logical faces of a structured computational block.
Definition variables.h:203
Defines a 3D axis-aligned bounding box.
Definition variables.h:154
A 3D point or vector with PetscScalar components.
Definition variables.h:100
User-defined context containing data specific to a single computational grid level.
Definition variables.h:733
Here is the call graph for this function:
Here is the caller graph for this function:

◆ StringToBCFace()

PetscErrorCode StringToBCFace ( const char *  str,
BCFace face_out 
)

Converts a string representation of a face to a BCFace enum.

Parameters
strThe input string (e.g., "-Xi", "+Zeta"). Case-insensitive.
[out]face_outThe resulting BCFace enum.
Returns
0 on success.

Definition at line 325 of file io.c.

325 {
326 if (strcasecmp(str, "-Xi") == 0) *face_out = BC_FACE_NEG_X;
327 else if (strcasecmp(str, "+Xi") == 0) *face_out = BC_FACE_POS_X;
328 else if (strcasecmp(str, "-Eta") == 0) *face_out = BC_FACE_NEG_Y;
329 else if (strcasecmp(str, "+Eta") == 0) *face_out = BC_FACE_POS_Y;
330 else if (strcasecmp(str, "-Zeta") == 0) *face_out = BC_FACE_NEG_Z;
331 else if (strcasecmp(str, "+Zeta") == 0) *face_out = BC_FACE_POS_Z;
332 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown face specifier: %s", str);
333 return 0;
334}
@ BC_FACE_NEG_X
Definition variables.h:204
@ BC_FACE_POS_Z
Definition variables.h:206
@ BC_FACE_POS_Y
Definition variables.h:205
@ BC_FACE_NEG_Z
Definition variables.h:206
@ BC_FACE_POS_X
Definition variables.h:204
@ BC_FACE_NEG_Y
Definition variables.h:205
Here is the caller graph for this function:

◆ StringToBCType()

PetscErrorCode StringToBCType ( const char *  str,
BCType type_out 
)

Converts a string representation of a BC type to a BCType enum.

Parameters
strThe input string (e.g., "WALL", "INLET"). Case-insensitive.
[out]type_outThe resulting BCType enum.
Returns
0 on success.

Definition at line 342 of file io.c.

342 {
343 if (strcasecmp(str, "WALL") == 0) *type_out = WALL;
344 else if (strcasecmp(str, "SYMMETRY") == 0) *type_out = SYMMETRY;
345 else if (strcasecmp(str, "INLET") == 0) *type_out = INLET;
346 else if (strcasecmp(str, "OUTLET") == 0) *type_out = OUTLET;
347 else if (strcasecmp(str, "PERIODIC") == 0) *type_out = PERIODIC;
348 // ... add other BCTypes here ...
349 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown BC Type string: %s", str);
350 return 0;
351}
@ SYMMETRY
Definition variables.h:215
@ OUTLET
Definition variables.h:216
@ PERIODIC
Definition variables.h:219
@ WALL
Definition variables.h:213
Here is the caller graph for this function:

◆ StringToBCHandlerType()

PetscErrorCode StringToBCHandlerType ( const char *  str,
BCHandlerType handler_out 
)

Converts a string representation of a handler to a BCHandlerType enum.

Parameters
strThe input string (e.g., "noslip", "constant_velocity"). Case-insensitive.
[out]handler_outThe resulting BCHandlerType enum.
Returns
0 on success.

Definition at line 359 of file io.c.

359 {
360 if (strcasecmp(str, "noslip") == 0) *handler_out = BC_HANDLER_WALL_NOSLIP;
361 else if (strcasecmp(str, "constant_velocity") == 0) *handler_out = BC_HANDLER_INLET_CONSTANT_VELOCITY;
362 else if (strcasecmp(str, "conservation") == 0) *handler_out = BC_HANDLER_OUTLET_CONSERVATION;
363 else if (strcasecmp(str, "parabolic") == 0) *handler_out = BC_HANDLER_INLET_PARABOLIC;
364 else if (strcasecmp(str,"geometric") == 0) *handler_out = BC_HANDLER_PERIODIC_GEOMETRIC;
365 else if (strcasecmp(str,"constant_flux") == 0) *handler_out = BC_HANDLER_PERIODIC_DRIVEN_CONSTANT_FLUX;
366 else if (strcasecmp(str,"initial_flux") == 0) *handler_out = BC_HANDLER_PERIODIC_DRIVEN_INITIAL_FLUX;
367 // ... add other BCHandlerTypes here ...
368 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown BC Handler string: %s", str);
369 return 0;
370}
@ BC_HANDLER_PERIODIC_GEOMETRIC
Definition variables.h:243
@ BC_HANDLER_PERIODIC_DRIVEN_INITIAL_FLUX
Definition variables.h:246
@ BC_HANDLER_WALL_NOSLIP
Definition variables.h:232
@ BC_HANDLER_OUTLET_CONSERVATION
Definition variables.h:241
Here is the caller graph for this function:

◆ ValidateBCHandlerForBCType()

PetscErrorCode ValidateBCHandlerForBCType ( BCType  type,
BCHandlerType  handler 
)

Validates that a specific handler is compatible with a general BC type.

Parameters
typeThe general BCType.
handlerThe specific BCHandlerType.
Returns
0 if compatible, error code otherwise.

Definition at line 378 of file io.c.

378 {
379 switch (type) {
380 case OUTLET:
381 if(handler != BC_HANDLER_OUTLET_CONSERVATION) return PETSC_ERR_ARG_WRONG;
382 break;
383 case WALL:
384 if (handler != BC_HANDLER_WALL_NOSLIP && handler != BC_HANDLER_WALL_MOVING) return PETSC_ERR_ARG_WRONG;
385 break;
386 case INLET:
387 if (handler != BC_HANDLER_INLET_CONSTANT_VELOCITY && handler != BC_HANDLER_INLET_PARABOLIC) return PETSC_ERR_ARG_WRONG;
388 break;
389 case PERIODIC:
390 if(handler != BC_HANDLER_PERIODIC_GEOMETRIC && handler != BC_HANDLER_PERIODIC_DRIVEN_CONSTANT_FLUX && handler != BC_HANDLER_PERIODIC_DRIVEN_INITIAL_FLUX) return PETSC_ERR_ARG_WRONG;
391 // ... add other validation cases here ...
392 default: break;
393 }
394 return 0; // Combination is valid
395}
@ BC_HANDLER_WALL_MOVING
Definition variables.h:233
Here is the caller graph for this function:

◆ FreeBC_ParamList()

void FreeBC_ParamList ( BC_Param head)

Frees the memory allocated for a linked list of BC_Param structs.

Parameters
headA pointer to the head of the linked list to be freed.

Definition at line 308 of file io.c.

308 {
309 BC_Param *current = head;
310 while (current != NULL) {
311 BC_Param *next = current->next;
312 PetscFree(current->key);
313 PetscFree(current->value);
314 PetscFree(current);
315 current = next;
316 }
317}
struct BC_Param_s * next
Definition variables.h:266
char * key
Definition variables.h:264
char * value
Definition variables.h:265
A node in a linked list for storing key-value parameters from the bcs.dat file.
Definition variables.h:263
Here is the caller graph for this function:

◆ GetBCParamReal()

PetscErrorCode GetBCParamReal ( BC_Param params,
const char *  key,
PetscReal *  value_out,
PetscBool *  found 
)

Searches a BC_Param linked list for a key and returns its value as a double.

Parameters
paramsThe head of the BC_Param linked list.
keyThe key to search for (case-insensitive).
[out]value_outThe found value, converted to a PetscReal.
[out]foundSet to PETSC_TRUE if the key was found, PETSC_FALSE otherwise.
Returns
0 on success.

Definition at line 405 of file io.c.

405 {
406 *found = PETSC_FALSE;
407 *value_out = 0.0;
408 if (!key) return 0; // No key to search for
409
410 BC_Param *current = params;
411 while (current) {
412 if (strcasecmp(current->key, key) == 0) {
413 *value_out = atof(current->value);
414 *found = PETSC_TRUE;
415 return 0; // Found it, we're done
416 }
417 current = current->next;
418 }
419 return 0; // It's not an error to not find the key.
420}
Here is the caller graph for this function:

◆ GetBCParamBool()

PetscErrorCode GetBCParamBool ( BC_Param params,
const char *  key,
PetscBool *  value_out,
PetscBool *  found 
)

Searches a BC_Param linked list for a key and returns its value as a bool.

Parameters
paramsThe head of the BC_Param linked list.
keyThe key to search for (case-insensitive).
[out]value_outThe found value, converted to a PetscBool.
[out]foundSet to PETSC_TRUE if the key was found, PETSC_FALSE otherwise.
Returns
0 on success.

Definition at line 430 of file io.c.

430 {
431 *found = PETSC_FALSE;
432 *value_out = PETSC_FALSE;
433 if (!key) return 0; // No key to search for
434
435 BC_Param *current = params;
436 while (current) {
437 if (strcasecmp(current->key, key) == 0) {
438 // Key was found.
439 *found = PETSC_TRUE;
440
441 // Check the value string. Default to FALSE if the value is NULL or doesn't match a "true" string.
442 if (current->value &&
443 (strcasecmp(current->value, "true") == 0 ||
444 strcmp(current->value, "1") == 0 ||
445 strcasecmp(current->value, "yes") == 0))
446 {
447 *value_out = PETSC_TRUE;
448 } else {
449 *value_out = PETSC_FALSE;
450 }
451 return 0; // Found it, we're done
452 }
453 current = current->next;
454 }
455 return 0; // It's not an error to not find the key.
456}
Here is the caller graph for this function:

◆ ParseAllBoundaryConditions()

PetscErrorCode ParseAllBoundaryConditions ( UserCtx user,
const char *  bcs_input_filename 
)

Parses the boundary conditions file to configure the type, handler, and any associated parameters for all 6 global faces of the domain.

This function performs the following steps:

  1. On MPI rank 0, it reads the specified configuration file line-by-line.
  2. It parses each line for <Face> <Type> <Handler> [param=value]... format.
  3. It validates the parsed strings and stores the configuration, including a linked list of parameters, in a temporary array.
  4. It then serializes this configuration and broadcasts it to all other MPI ranks.
  5. All ranks (including rank 0) then deserialize the broadcasted data to populate their local user->boundary_faces array identically.
  6. It also sets legacy fields in UserCtx for compatibility with other modules.
Parameters
[in,out]userThe main UserCtx struct where the final configuration for all ranks will be stored.
[in]bcs_input_filenameThe path to the boundary conditions configuration file.
Returns
PetscErrorCode 0 on success, error code on failure.

Definition at line 484 of file io.c.

485{
486 PetscErrorCode ierr;
487 PetscMPIInt rank;
488
489 // Temporary storage for rank 0 to build the configuration before broadcasting.
490 BoundaryFaceConfig configs_rank0[6];
491
492 PetscFunctionBeginUser;
494 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
495
496 if (rank == 0) {
497 FILE *file;
498 char line_buffer[1024];
499
500 // Initialize the temporary config array with safe defaults on rank 0.
501 for (int i = 0; i < 6; i++) {
502 configs_rank0[i].face_id = (BCFace)i;
503 configs_rank0[i].mathematical_type = WALL;
504 configs_rank0[i].handler_type = BC_HANDLER_WALL_NOSLIP;
505 configs_rank0[i].params = NULL;
506 configs_rank0[i].handler = NULL; // Handler object is not created here.
507 }
508
509 LOG_ALLOW(GLOBAL, LOG_INFO, "Parsing BC configuration from '%s' on rank 0... \n", bcs_input_filename);
510 file = fopen(bcs_input_filename, "r");
511 if (!file) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_OPEN, "Could not open BCs file '%s'.", bcs_input_filename);
512
513 while (fgets(line_buffer, sizeof(line_buffer), file)) {
514 char *current_pos = line_buffer;
515 while (isspace((unsigned char)*current_pos)) current_pos++; // Skip leading whitespace
516 if (*current_pos == '#' || *current_pos == '\0' || *current_pos == '\n' || *current_pos == '\r') continue;
517
518 char *face_str = strtok(current_pos, " \t\n\r");
519 char *type_str = strtok(NULL, " \t\n\r");
520 char *handler_str = strtok(NULL, " \t\n\r");
521
522 if (!face_str || !type_str || !handler_str) {
523 LOG_ALLOW(GLOBAL, LOG_WARNING, "Malformed line in bcs.dat, skipping: %s \n", line_buffer);
524 continue;
525 }
526
527 BCFace face_enum;
528 BCType type_enum;
529 BCHandlerType handler_enum;
530 const char* handler_name_for_log;
531
532 // --- Convert strings to enums and validate ---
533 ierr = StringToBCFace(face_str, &face_enum); CHKERRQ(ierr);
534 ierr = StringToBCType(type_str, &type_enum); CHKERRQ(ierr);
535 ierr = StringToBCHandlerType(handler_str, &handler_enum); CHKERRQ(ierr);
536 ierr = ValidateBCHandlerForBCType(type_enum, handler_enum);
537 if (ierr) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Validation failed: Handler '%s' is not valid for Type '%s' on Face '%s'.\n", handler_str, type_str, face_str);
538
539 // Store the core types for the corresponding face
540 configs_rank0[face_enum].mathematical_type = type_enum;
541 configs_rank0[face_enum].handler_type = handler_enum;
542 handler_name_for_log = BCHandlerTypeToString(handler_enum); // Assumes this utility exists
543 LOG_ALLOW(GLOBAL, LOG_DEBUG, " Parsed Face '%s': Type=%s, Handler=%s \n", face_str, type_str, handler_name_for_log);
544
545 // --- Parse optional key=value parameters for this face ---
546 FreeBC_ParamList(configs_rank0[face_enum].params); // Clear any previous (default) params
547 configs_rank0[face_enum].params = NULL;
548 BC_Param **param_next_ptr = &configs_rank0[face_enum].params; // Pointer to the 'next' pointer to build the list
549
550 char* token;
551 while ((token = strtok(NULL, " \t\n\r")) != NULL) {
552 char* equals_ptr = strchr(token, '=');
553 if (!equals_ptr) {
554 LOG_ALLOW(GLOBAL, LOG_WARNING, "Malformed parameter '%s' on face '%s', skipping. \n", token, face_str);
555 continue;
556 }
557
558 *equals_ptr = '\0'; // Temporarily split the string at '=' to separate key and value
559 char* key_str = token;
560 char* value_str = equals_ptr + 1;
561
562 BC_Param *new_param;
563 ierr = PetscMalloc1(1, &new_param); CHKERRQ(ierr);
564 ierr = PetscStrallocpy(key_str, &new_param->key); CHKERRQ(ierr);
565 ierr = PetscStrallocpy(value_str, &new_param->value); CHKERRQ(ierr);
566 new_param->next = NULL;
567
568 *param_next_ptr = new_param;
569 param_next_ptr = &new_param->next;
570 LOG_ALLOW(GLOBAL, LOG_TRACE, " - Found param: [%s] = [%s] \n", new_param->key, new_param->value);
571 }
572 }
573 fclose(file);
574 }
575
576 // =========================================================================
577 // BROADCASTING THE CONFIGURATION FROM RANK 0
578 // =========================================================================
579 // This is a critical step to ensure all processes have the same configuration.
580
581 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "Rank %d broadcasting/receiving BC configuration.\n", rank);
582
583 for (int i = 0; i < 6; i++) {
584 // --- Broadcast simple enums ---
585 if (rank == 0) {
586 user->boundary_faces[i] = configs_rank0[i]; // Rank 0 populates its final struct
587 }
588 ierr = MPI_Bcast(&user->boundary_faces[i].mathematical_type, 1, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
589 ierr = MPI_Bcast(&user->boundary_faces[i].handler_type, 1, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
590
591 // --- Serialize and Broadcast the parameter linked list ---
592 PetscInt n_params = 0;
593 if (rank == 0) { // On rank 0, count the number of parameters to send
594 for (BC_Param *p = user->boundary_faces[i].params; p; p = p->next) n_params++;
595 }
596 ierr = MPI_Bcast(&n_params, 1, MPI_INT, 0, PETSC_COMM_WORLD);CHKERRQ(ierr);
597
598 if (rank != 0) { // Non-root ranks need to receive and build the list
599 FreeBC_ParamList(user->boundary_faces[i].params); // Ensure list is empty before building
600 user->boundary_faces[i].params = NULL;
601 }
602
603 BC_Param **param_next_ptr = &user->boundary_faces[i].params;
604
605 for (int j = 0; j < n_params; j++) {
606 char key_buf[256] = {0}, val_buf[256] = {0};
607 if (rank == 0) {
608 // On rank 0, navigate to the j-th param and copy its data to buffers
609 BC_Param *p = user->boundary_faces[i].params;
610 for (int k = 0; k < j; k++) p = p->next;
611 strncpy(key_buf, p->key, 255);
612 strncpy(val_buf, p->value, 255);
613 }
614
615 ierr = MPI_Bcast(key_buf, 256, MPI_CHAR, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
616 ierr = MPI_Bcast(val_buf, 256, MPI_CHAR, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
617
618 if (rank != 0) {
619 // On non-root ranks, deserialize: create a new node and append it
620 BC_Param *new_param;
621 ierr = PetscMalloc1(1, &new_param); CHKERRQ(ierr);
622 ierr = PetscStrallocpy(key_buf, &new_param->key); CHKERRQ(ierr);
623 ierr = PetscStrallocpy(val_buf, &new_param->value); CHKERRQ(ierr);
624 new_param->next = NULL;
625 *param_next_ptr = new_param;
626 param_next_ptr = &new_param->next;
627 } else {
628 // On rank 0, just advance the pointer for the next iteration
629 param_next_ptr = &((*param_next_ptr)->next);
630 }
631 }
632 user->boundary_faces[i].face_id = (BCFace)i; // Ensure face_id is set on all ranks
633 }
634
635 // --- Set legacy fields for compatibility with particle system ---
636 user->inletFaceDefined = PETSC_FALSE;
637 for (int i=0; i<6; i++) {
638
639 if (user->boundary_faces[i].mathematical_type == INLET && !user->inletFaceDefined) {
640 user->inletFaceDefined = PETSC_TRUE;
641 user->identifiedInletBCFace = (BCFace)i;
642 LOG_ALLOW(GLOBAL, LOG_INFO, "Inlet face for particle initialization identified as Face %d.\n", i);
643 break; // Found the first one, stop looking
644 }
645 }
646
647
648 if (rank == 0) {
649 // Rank 0 can now free the linked lists it created for the temporary storage.
650 // As written, user->boundary_faces was populated directly on rank 0, so no extra free is needed.
651 // for(int i=0; i<6; i++) FreeBC_ParamList(configs_rank0[i].params); // This would be needed if we used configs_rank0 exclusively
652 }
653
655 PetscFunctionReturn(0);
656}
PetscErrorCode StringToBCHandlerType(const char *str, BCHandlerType *handler_out)
Converts a string representation of a handler to a BCHandlerType enum.
Definition io.c:359
PetscErrorCode ValidateBCHandlerForBCType(BCType type, BCHandlerType handler)
Validates that a specific handler is compatible with a general BC type.
Definition io.c:378
PetscErrorCode StringToBCFace(const char *str, BCFace *face_out)
Converts a string representation of a face to a BCFace enum.
Definition io.c:325
void FreeBC_ParamList(BC_Param *head)
Frees the memory allocated for a linked list of BC_Param structs.
Definition io.c:308
PetscErrorCode StringToBCType(const char *str, BCType *type_out)
Converts a string representation of a BC type to a BCType enum.
Definition io.c:342
BCType
Defines the general mathematical/physical Category of a boundary.
Definition variables.h:210
BCHandlerType
Defines the specific computational "strategy" for a boundary handler.
Definition variables.h:230
BoundaryCondition * handler
Definition variables.h:298
Holds the complete configuration for one of the six boundary faces.
Definition variables.h:293
Here is the call graph for this function:
Here is the caller graph for this function:

◆ DeterminePeriodicity()

PetscErrorCode DeterminePeriodicity ( SimCtx simCtx)

Scans all block-specific boundary condition files to determine a globally consistent periodicity for each dimension, reusing the core type parser.

This is a lightweight pre-parser intended to be called before DMDA creation. It ensures that the periodicity setting is consistent across all blocks, which is a physical requirement for the domain.

  1. It collectively verifies that the mandatory BCS file for each block exists.
  2. On MPI rank 0, it then iterates through the files.
  3. For each line, it attempts to convert the type string to a BCType enum using the standard StringToBCType helper.
  4. If the conversion is successful AND the type is PERIODIC, it flags the corresponding face.
  5. If the conversion fails (e.g., for "WALL", "INLET", etc.), the error is cleared and the line is simply ignored, as it's not relevant to periodicity.
  6. It validates consistency (e.g., -Xi and +Xi match) and ensures all block files specify the same global periodicity.
  7. It broadcasts the final three flags (as integers 0 or 1) to all MPI ranks.
  8. All ranks update the i_periodic, j_periodic, and k_periodic fields in their SimCtx.
Parameters
[in,out]simCtxThe master SimCtx struct, containing the bcs_files list and where the final periodicity flags will be stored.
Returns
PetscErrorCode 0 on success, error code on failure.

Scans all block-specific boundary condition files to determine a globally consistent periodicity for each dimension, reusing the core type parser.

This is a lightweight pre-parser intended to be called before DMDA creation. It ensures that the periodicity setting is consistent across all blocks, which is a physical requirement for the domain.

  1. It collectively verifies that the mandatory BCS file for each block exists.
  2. On MPI rank 0, it then iterates through the files.
  3. It parses each line to extract the first (face) and second (type) tokens, mimicking the main BC parser's tokenization logic.
  4. It performs a direct, case-insensitive string comparison on the "type" token to check if it is "PERIODIC". Other types are silently ignored.
  5. It validates consistency (e.g., -Xi and +Xi match) and ensures all block files specify the same global periodicity.
  6. It broadcasts the final three flags (as integers 0 or 1) to all MPI ranks.
  7. All ranks update the i_periodic, j_periodic, and k_periodic fields in their SimCtx.
Parameters
[in,out]simCtxThe master SimCtx struct, containing the bcs_files list and where the final periodicity flags will be stored.
Returns
PetscErrorCode 0 on success, error code on failure.

Definition at line 690 of file io.c.

691{
692 PetscErrorCode ierr;
693 PetscMPIInt rank;
694 PetscInt periodic_flags[3] = {0, 0, 0}; // Index 0:I, 1:J, 2:K
695
696 PetscFunctionBeginUser;
697 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
698
699 // --- Part 1: Collectively verify all BCS files exist before proceeding ---
700 for (PetscInt bi = 0; bi < simCtx->block_number; bi++) {
701 const char *bcs_filename = simCtx->bcs_files[bi];
702 if (!bcs_filename) SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_NULL, "BCS filename for block %d is not set in SimCtx.", bi);
703 char desc_buf[256];
704 PetscBool file_exists;
705 snprintf(desc_buf, sizeof(desc_buf), "BCS file for block %d", bi);
706 ierr = VerifyPathExistence(bcs_filename, PETSC_FALSE, PETSC_FALSE, desc_buf, &file_exists); CHKERRQ(ierr);
707 }
708
709 // --- Part 2: Rank 0 does the parsing, since we know all files exist ---
710 if (rank == 0) {
711 PetscBool global_is_periodic[3] = {PETSC_FALSE, PETSC_FALSE, PETSC_FALSE};
712 PetscBool is_set = PETSC_FALSE;
713
714 for (PetscInt bi = 0; bi < simCtx->block_number; bi++) {
715 const char *bcs_filename = simCtx->bcs_files[bi];
716 FILE *file = fopen(bcs_filename, "r");
717
718 PetscBool face_is_periodic[6] = {PETSC_FALSE};
719 char line_buffer[1024];
720
721 while (fgets(line_buffer, sizeof(line_buffer), file)) {
722 char *current_pos = line_buffer;
723 while (isspace((unsigned char)*current_pos)) current_pos++;
724 if (*current_pos == '#' || *current_pos == '\0' || *current_pos == '\n') continue;
725
726 // --- Tokenize the line exactly like the main parser ---
727 char *face_str = strtok(current_pos, " \t\n\r");
728 char *type_str = strtok(NULL, " \t\n\r");
729
730 // If the line doesn't have at least two tokens, we can't determine the type.
731 if (!face_str || !type_str) continue;
732
733 // --- Perform a direct, non-erroring check on the mathematical type string ---
734 if (strcasecmp(type_str, "PERIODIC") == 0) {
735 BCFace face_enum;
736 // A malformed face string on a periodic line IS a fatal error.
737 ierr = StringToBCFace(face_str, &face_enum); CHKERRQ(ierr);
738 face_is_periodic[face_enum] = PETSC_TRUE;
739 }
740 // Any other type_str (e.g., "WALL", "INLET") is correctly and silently ignored.
741 }
742 fclose(file);
743
744 // --- Validate consistency within this file ---
745 if (face_is_periodic[BC_FACE_NEG_X] != face_is_periodic[BC_FACE_POS_X])
746 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Inconsistent X-periodicity in file '%s' for block %d. Both -Xi and +Xi must be periodic or neither.", bcs_filename, bi);
747 if (face_is_periodic[BC_FACE_NEG_Y] != face_is_periodic[BC_FACE_POS_Y])
748 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Inconsistent Y-periodicity in file '%s' for block %d. Both -Eta and +Eta must be periodic or neither.", bcs_filename, bi);
749 if (face_is_periodic[BC_FACE_NEG_Z] != face_is_periodic[BC_FACE_POS_Z])
750 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Inconsistent Z-periodicity in file '%s' for block %d. Both -Zeta and +Zeta must be periodic or neither.", bcs_filename, bi);
751
752 PetscBool local_is_periodic[3] = {face_is_periodic[BC_FACE_NEG_X], face_is_periodic[BC_FACE_NEG_Y], face_is_periodic[BC_FACE_NEG_Z]};
753
754 // --- Validate consistency across block files ---
755 if (!is_set) {
756 global_is_periodic[0] = local_is_periodic[0];
757 global_is_periodic[1] = local_is_periodic[1];
758 global_is_periodic[2] = local_is_periodic[2];
759 is_set = PETSC_TRUE;
760 } else {
761 if (global_is_periodic[0] != local_is_periodic[0] || global_is_periodic[1] != local_is_periodic[1] || global_is_periodic[2] != local_is_periodic[2]) {
762 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP,
763 "Periodicity mismatch between blocks. Block 0 requires (I:%d, J:%d, K:%d), but block %d (file '%s') has (I:%d, J:%d, K:%d).",
764 (int)global_is_periodic[0], (int)global_is_periodic[1], (int)global_is_periodic[2],
765 bi, bcs_filename,
766 (int)local_is_periodic[0], (int)local_is_periodic[1], (int)local_is_periodic[2]);
767 }
768 }
769 } // end loop over blocks
770
771 periodic_flags[0] = (global_is_periodic[0]) ? 1 : 0;
772 periodic_flags[1] = (global_is_periodic[1]) ? 1 : 0;
773 periodic_flags[2] = (global_is_periodic[2]) ? 1 : 0;
774
775 LOG_ALLOW(GLOBAL, LOG_INFO, "Global periodicity determined: I-periodic=%d, J-periodic=%d, K-periodic=%d\n",
776 periodic_flags[0], periodic_flags[1], periodic_flags[2]);
777 }
778
779 // --- Part 3: Broadcast the final flags from rank 0 to all other ranks ---
780 ierr = MPI_Bcast(periodic_flags, 3, MPIU_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
781
782 // --- All ranks now update their SimCtx ---
783 simCtx->i_periodic = periodic_flags[0];
784 simCtx->j_periodic = periodic_flags[1];
785 simCtx->k_periodic = periodic_flags[2];
786
787 PetscFunctionReturn(0);
788}
PetscErrorCode VerifyPathExistence(const char *path, PetscBool is_dir, PetscBool is_optional, const char *description, PetscBool *exists)
A parallel-safe helper to verify the existence of a generic file or directory path.
Definition io.c:807
char ** bcs_files
Definition variables.h:662
PetscInt k_periodic
Definition variables.h:655
PetscInt i_periodic
Definition variables.h:655
PetscInt j_periodic
Definition variables.h:655
Here is the call graph for this function:
Here is the caller graph for this function:

◆ TrimWhitespace()

void TrimWhitespace ( char *  str)

Helper function to trim leading/trailing whitespace from a string.

Parameters
strThe string to trim in-place.

Helper function to trim leading/trailing whitespace from a string.

Parameters
strThe string to be trimmed.

Definition at line 36 of file io.c.

36 {
37 if (!str) return;
38
39 char *start = str;
40 // Find the first non-whitespace character
41 while (isspace((unsigned char)*start)) {
42 start++;
43 }
44
45 // Find the end of the string
46 char *end = str + strlen(str) - 1;
47 // Move backwards from the end to find the last non-whitespace character
48 while (end > start && isspace((unsigned char)*end)) {
49 end--;
50 }
51
52 // Null-terminate after the last non-whitespace character
53 *(end + 1) = '\0';
54
55 // If there was leading whitespace, shift the string to the left
56 if (str != start) {
57 memmove(str, start, (end - start) + 2); // +2 to include the new null terminator
58 }
59}
Here is the caller graph for this function:

◆ ParsePostProcessingSettings()

PetscErrorCode ParsePostProcessingSettings ( SimCtx simCtx)

Initializes post-processing settings from a config file and command-line overrides.

This function establishes the configuration for a post-processing run by:

  1. Setting hardcoded default values in the PostProcessParams struct.
  2. Reading a configuration file to override the defaults.
  3. Parsing command-line options (-startTime, -endTime, etc.) which can override both the defaults and the file settings.
Parameters
simCtxThe pointer to the simulation context that contains the postprocessing file and struct.
Returns
PetscErrorCode

This function establishes the configuration for a post-processing run by:

  1. Setting hardcoded default values in the PostProcessParams struct.
  2. Reading a configuration file to override the defaults.
  3. Parsing command-line options (-startTime, -endTime, etc.) which can override both the defaults and the file settings.
Parameters
configFileThe path to the configuration file to parse.
ppsPointer to the PostProcessParams struct to be populated.
Returns
PetscErrorCode

Definition at line 2523 of file io.c.

2524{
2525 FILE *file;
2526 char line[1024];
2527 PetscBool startTimeSet, endTimeSet, timeStepSet;
2528
2529 PetscFunctionBeginUser;
2531
2532 if (!simCtx || !simCtx->pps) {
2533 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_NULL, "SimCtx or its pps member is NULL in ParsePostProcessingSettings.");
2534 }
2535
2536 char *configFile = simCtx->PostprocessingControlFile;
2537 PostProcessParams *pps = simCtx->pps;
2538
2539
2540 // --- 1. Set Sane Defaults First ---
2541 pps->startTime = 0;
2542 pps->endTime = 0;
2543 pps->timeStep = 1;
2544 pps->outputParticles = PETSC_FALSE;
2545 pps->particle_output_freq = simCtx->LoggingFrequency; // Default to logging frequency;
2546 strcpy(pps->process_pipeline, "");
2547 strcpy(pps->output_fields_instantaneous, "Ucat,P");
2548 strcpy(pps->output_fields_averaged, "");
2549 strcpy(pps->output_prefix, "Field");
2550 strcpy(pps->particle_output_prefix,"Particle");
2551 strcpy(pps->particle_fields,"velocity,CellID,weight,pid");
2552 strcpy(pps->particle_pipeline,"");
2553 strncpy(pps->statistics_pipeline, "", MAX_PIPELINE_LENGTH - 1);
2554 strncpy(pps->statistics_output_prefix, "Stats", MAX_FILENAME_LENGTH - 1);
2555 strcpy(pps->particleExt,"dat"); // The input file format for particles.
2556 strcpy(pps->eulerianExt,"dat"); // The input file format for Eulerian fields.
2557 pps->reference[0] = pps->reference[1] = pps->reference[2] = 1;
2558 strncpy(pps->source_dir, simCtx->output_dir, sizeof(pps->source_dir) - 1);
2559 pps->source_dir[sizeof(pps->source_dir) - 1] = '\0'; // Ensure null-termination
2560
2561 // --- 2. Parse the Configuration File (overrides defaults) ---
2562 file = fopen(configFile, "r");
2563 if (file) {
2564 LOG_ALLOW(GLOBAL, LOG_INFO, "Parsing post-processing config file: %s\n", configFile);
2565 while (fgets(line, sizeof(line), file)) {
2566 char *key, *value, *comment;
2567 comment = strchr(line, '#'); if (comment) *comment = '\0';
2568 TrimWhitespace(line); if (strlen(line) == 0) continue;
2569 key = strtok(line, "="); value = strtok(NULL, "=");
2570 if (key && value) {
2571 TrimWhitespace(key); TrimWhitespace(value);
2572 if (strcmp(key, "startTime") == 0) pps->startTime = atoi(value);
2573 else if (strcmp(key, "endTime") == 0) pps->endTime = atoi(value);
2574 else if (strcmp(key, "timeStep") == 0) pps->timeStep = atoi(value);
2575 else if (strcmp(key, "output_particles") == 0) {
2576 if (strcasecmp(value, "true") == 0) pps->outputParticles = PETSC_TRUE;
2577 }
2578 else if (strcasecmp(key, "process_pipeline") == 0) {
2579 strncpy(pps->process_pipeline, value, MAX_PIPELINE_LENGTH - 1);
2580 pps->process_pipeline[MAX_PIPELINE_LENGTH - 1] = '\0'; // Ensure null-termination
2581 } else if (strcasecmp(key, "output_fields_instantaneous") == 0) {
2582 strncpy(pps->output_fields_instantaneous, value, MAX_FIELD_LIST_LENGTH - 1);
2584 } else if (strcasecmp(key, "output_fields_averaged") == 0) {
2585 strncpy(pps->output_fields_averaged, value, MAX_FIELD_LIST_LENGTH - 1);
2587 } else if (strcasecmp(key, "output_prefix") == 0) {
2588 strncpy(pps->output_prefix, value, MAX_FILENAME_LENGTH - 1);
2589 pps->output_prefix[MAX_FILENAME_LENGTH - 1] = '\0';
2590 } else if (strcasecmp(key, "particle_output_prefix") == 0) {
2591 strncpy(pps->particle_output_prefix, value, MAX_FILENAME_LENGTH - 1);
2593 } else if (strcasecmp(key, "particle_fields_instantaneous") == 0) {
2594 strncpy(pps->particle_fields, value, MAX_FIELD_LIST_LENGTH - 1);
2595 pps->particle_fields[MAX_FIELD_LIST_LENGTH - 1] = '\0';
2596 } else if (strcasecmp(key, "particle_pipeline") == 0) {
2597 strncpy(pps->particle_pipeline, value, MAX_PIPELINE_LENGTH - 1);
2598 pps->particle_pipeline[MAX_PIPELINE_LENGTH - 1] = '\0';
2599 } else if (strcasecmp(key, "particle_output_freq") == 0) {
2600 pps->particle_output_freq = atoi(value);
2601 } else if (strcasecmp(key, "statistics_pipeline") == 0) {
2602 strncpy(pps->statistics_pipeline, value, MAX_PIPELINE_LENGTH - 1);
2604 } else if (strcasecmp(key, "statistics_output_prefix") == 0) {
2605 strncpy(pps->statistics_output_prefix, value, MAX_FILENAME_LENGTH - 1);
2607 } else if (strcasecmp(key, "particleExt") == 0) {
2608 strncpy(pps->particleExt, value, sizeof(pps->particleExt) - 1);
2609 pps->particleExt[sizeof(pps->particleExt) - 1] = '\0';
2610 } else if (strcasecmp(key, "eulerianExt") == 0) {
2611 strncpy(pps->eulerianExt, value, sizeof(pps->eulerianExt) - 1);
2612 pps->eulerianExt[sizeof(pps->eulerianExt) - 1] = '\0';
2613 } else if (strcmp(key, "reference_ip") == 0) {pps->reference[0] = atoi(value);
2614 } else if (strcmp(key, "reference_jp") == 0) {pps->reference[1] = atoi(value);
2615 } else if (strcmp(key, "reference_kp") == 0) {pps->reference[2] = atoi(value);
2616 } else if (strcasecmp(key, "source_directory") == 0) {
2617 strncpy(pps->source_dir, value, sizeof(pps->source_dir) - 1);
2618 pps->source_dir[sizeof(pps->source_dir) - 1] = '\0';
2619 } else {
2620 LOG_ALLOW(GLOBAL, LOG_WARNING, "Unknown key '%s' in post-processing config file. Ignoring.\n", key);
2621 }
2622 // Add parsing for pipeline, fields, etc. in later phases
2623 }
2624 }
2625 fclose(file);
2626 } else {
2627 LOG_ALLOW(GLOBAL, LOG_WARNING, "Could not open post-processing config file '%s'. Using defaults and command-line overrides.\n", configFile);
2628 }
2629
2630 // --- 3. Parse Command-Line Options (overrides file settings and defaults) ---
2631 PetscOptionsGetInt(NULL, NULL, "-startTime", &pps->startTime, &startTimeSet);
2632 PetscOptionsGetInt(NULL, NULL, "-endTime", &pps->endTime, &endTimeSet);
2633 PetscOptionsGetInt(NULL, NULL, "-timeStep", &pps->timeStep, &timeStepSet);
2634 PetscOptionsGetBool(NULL, NULL, "-output_particles", &pps->outputParticles, NULL);
2635
2636 if(pps->endTime==-1){
2637 pps->endTime = simCtx->StartStep + simCtx->StepsToRun; // Total steps if endTime is set to -1.
2638 }
2639
2640 // If only startTime is given on command line, run for a single step
2641 if (startTimeSet && !endTimeSet) {
2642 pps->endTime = pps->startTime;
2643 }
2644
2645 LOG_ALLOW(GLOBAL, LOG_INFO, "Post-processing configured to run from t=%d to t=%d with step %d. Particle output: %s.\n",
2646 pps->startTime, pps->endTime, pps->timeStep, pps->outputParticles ? "TRUE" : "FALSE");
2647
2648 LOG_ALLOW(GLOBAL, LOG_INFO, "Process Pipeline: %s\n", pps->process_pipeline);
2649 LOG_ALLOW(GLOBAL, LOG_INFO, "Instantaneous Output Fields: %s\n", pps->output_fields_instantaneous);
2650 LOG_ALLOW(GLOBAL, LOG_INFO, "Output Prefix: %s\n", pps->output_prefix);
2651 LOG_ALLOW(GLOBAL, LOG_INFO, "Particle Output Prefix: %s\n", pps->particle_output_prefix);
2652 LOG_ALLOW(GLOBAL, LOG_INFO, "Particle Fields: %s\n", pps->particle_fields);
2653 LOG_ALLOW(GLOBAL, LOG_INFO, "Particle Pipeline: %s\n", pps->particle_pipeline);
2654 LOG_ALLOW(GLOBAL, LOG_INFO, "Particle Output Frequency: %d\n", pps->particle_output_freq);
2655 LOG_ALLOW(GLOBAL, LOG_INFO, "Averaged Output Fields (reserved): %s\n", pps->output_fields_averaged);
2656 LOG_ALLOW(GLOBAL, LOG_INFO, "Post input extensions: Eulerian='.%s', Particle='.%s'\n", pps->eulerianExt, pps->particleExt);
2657
2659 PetscFunctionReturn(0);
2660}
void TrimWhitespace(char *str)
Trims leading and trailing whitespace from a string in-place.
Definition io.c:36
char statistics_output_prefix[256]
basename for CSV output, e.g.
Definition variables.h:521
char particle_output_prefix[256]
Definition variables.h:516
char output_prefix[256]
Definition variables.h:513
#define MAX_FIELD_LIST_LENGTH
Definition variables.h:490
char output_fields_averaged[1024]
Definition variables.h:512
#define MAX_PIPELINE_LENGTH
Definition variables.h:489
PetscInt reference[3]
Definition variables.h:528
PetscInt timeStep
Definition variables.h:506
#define MAX_FILENAME_LENGTH
Definition variables.h:491
char statistics_pipeline[1024]
e.g.
Definition variables.h:520
char output_fields_instantaneous[1024]
Definition variables.h:511
char particle_pipeline[1024]
Definition variables.h:514
char process_pipeline[1024]
Definition variables.h:510
PetscInt particle_output_freq
Definition variables.h:517
char particle_fields[1024]
Definition variables.h:515
PetscBool outputParticles
Definition variables.h:507
PetscInt startTime
Definition variables.h:504
char PostprocessingControlFile[PETSC_MAX_PATH_LEN]
Definition variables.h:719
PetscInt LoggingFrequency
Definition variables.h:708
Holds all configuration parameters for a post-processing run.
Definition variables.h:499
Here is the call graph for this function:
Here is the caller graph for this function:

◆ ParseScalingInformation()

PetscErrorCode ParseScalingInformation ( SimCtx simCtx)

Parses physical scaling parameters from command-line options.

This function reads the reference length, velocity, and density from the PETSc options database (provided via -scaling_L_ref, etc.). It populates the simCtx->scaling struct and calculates the derived reference pressure. It sets default values of 1.0 for a fully non-dimensional case if the options are not provided.

Parameters
[in,out]simCtxThe simulation context whose 'scaling' member will be populated.
Returns
PetscErrorCode

Definition at line 2677 of file io.c.

2678{
2679 PetscErrorCode ierr;
2680 PetscBool flg;
2681
2682 PetscFunctionBeginUser;
2684
2685 if (!simCtx) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "SimCtx is NULL in ParseScalingInformation");
2686
2687 // --- 1. Set default values to 1.0 ---
2688 // This represents a purely non-dimensional run if no scaling is provided.
2689 simCtx->scaling.L_ref = 1.0;
2690 simCtx->scaling.U_ref = 1.0;
2691 simCtx->scaling.rho_ref = 1.0;
2692
2693 // --- 2. Read overrides from the command line / control file ---
2694 ierr = PetscOptionsGetReal(NULL, NULL, "-scaling_L_ref", &simCtx->scaling.L_ref, &flg); CHKERRQ(ierr);
2695 ierr = PetscOptionsGetReal(NULL, NULL, "-scaling_U_ref", &simCtx->scaling.U_ref, &flg); CHKERRQ(ierr);
2696 ierr = PetscOptionsGetReal(NULL, NULL, "-scaling_rho_ref", &simCtx->scaling.rho_ref, &flg); CHKERRQ(ierr);
2697
2698 // --- 3. Calculate derived scaling factors ---
2699 // Check for division by zero to be safe, though U_ref should be positive.
2700 if (simCtx->scaling.U_ref <= 0.0) {
2701 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_OUTOFRANGE, "Reference velocity U_ref must be positive. Got %g", (double)simCtx->scaling.U_ref);
2702 }
2703 simCtx->scaling.P_ref = simCtx->scaling.rho_ref * simCtx->scaling.U_ref * simCtx->scaling.U_ref;
2704
2705 // --- 4. Log the final, effective scales for verification ---
2706 LOG(GLOBAL, LOG_INFO, "---------------- Physical Scales Initialized -----------------\n");
2707 LOG(GLOBAL, LOG_INFO, " L_ref: %.4f, U_ref: %.4f, rho_ref: %.4f, P_ref: %.4f\n",
2708 simCtx->scaling.L_ref, simCtx->scaling.U_ref, simCtx->scaling.rho_ref, simCtx->scaling.P_ref);
2709 LOG(GLOBAL, LOG_INFO, "--------------------------------------------------------------\n");
2710
2712 PetscFunctionReturn(0);
2713}
#define LOG(scope, level, fmt,...)
Logging macro for PETSc-based applications with scope control.
Definition logging.h:84
PetscReal L_ref
Definition variables.h:571
ScalingCtx scaling
Definition variables.h:649
PetscReal P_ref
Definition variables.h:574
PetscReal rho_ref
Definition variables.h:573
PetscReal U_ref
Definition variables.h:572
Here is the caller graph for this function: