PICurv 0.1.0
A Parallel Particle-In-Cell Solver for Curvilinear LES
Loading...
Searching...
No Matches
Functions
io.h File Reference

Public interface for data input/output routines. More...

#include "variables.h"
#include "logging.h"
#include "Boundaries.h"
Include dependency graph for io.h:
This graph shows which files directly or indirectly include this file:

Go to the source code of this file.

Functions

PetscErrorCode ReadGridGenerationInputs (UserCtx *user)
 Parses command-line options for a programmatically generated grid for a SINGLE block.
 
PetscErrorCode PopulateFinestUserGridResolutionFromOptions (UserCtx *finest_users, PetscInt nblk)
 Parses grid resolution arrays (-im, -jm, -km) once and applies them to all finest-grid blocks.
 
PetscErrorCode ReadGridFile (UserCtx *user)
 Sets grid dimensions from a file for a SINGLE block using a one-time read cache.
 
PetscErrorCode VerifyPathExistence (const char *path, PetscBool is_dir, PetscBool is_optional, const char *description, PetscBool *exists)
 A parallel-safe helper to verify the existence of a generic file or directory path.
 
PetscBool ShouldWriteDataOutput (const SimCtx *simCtx, PetscInt completed_step)
 Returns whether full field/restart output should be written for the.
 
PetscErrorCode ReadSimulationFields (UserCtx *user, PetscInt ti)
 Reads binary field data for velocity, pressure, and other required vectors.
 
PetscErrorCode ReadFieldData (UserCtx *user, const char *field_name, Vec field_vec, PetscInt ti, const char *ext)
 Reads data for a specific field from a file into the provided vector.
 
PetscErrorCode ReadStatisticalFields (UserCtx *user, PetscInt ti)
 Reads statistical fields used for time-averaged simulations.
 
PetscErrorCode ReadLESFields (UserCtx *user, PetscInt ti)
 Reads LES-related fields used in turbulence modeling.
 
PetscErrorCode ReadRANSFields (UserCtx *user, PetscInt ti)
 Reads RANS-related fields for turbulence modeling.
 
PetscErrorCode WriteFieldData (UserCtx *user, const char *field_name, Vec field_vec, PetscInt ti, const char *ext)
 Writes data from a specific PETSc vector to a file.
 
PetscErrorCode WriteSimulationFields (UserCtx *user)
 Writes simulation fields to files.
 
PetscErrorCode WriteStatisticalFields (UserCtx *user)
 Writes statistical fields for averaging purposes.
 
PetscErrorCode WriteLESFields (UserCtx *user)
 Writes LES-related fields.
 
PetscErrorCode WriteRANSFields (UserCtx *user)
 Writes RANS-related fields.
 
PetscErrorCode WriteSwarmField (UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
 Writes data from a specific field in a PETSc Swarm to a file.
 
PetscErrorCode WriteSwarmIntField (UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
 Writes integer data from a specific PETSc Swarm field to a file.
 
PetscErrorCode WriteAllSwarmFields (UserCtx *user)
 Writes a predefined set of PETSc Swarm fields to files.
 
PetscInt ReadDataFileToArray (const char *filename, double **data_out, PetscInt *Nout, MPI_Comm comm)
 Reads a simple ASCII data file containing one numeric value per line.
 
PetscInt CreateVTKFileFromMetadata (const char *filename, const VTKMetaData *meta, MPI_Comm comm)
 Creates a VTK file from prepared metadata and field payloads.
 
PetscErrorCode VecToArrayOnRank0 (Vec inVec, PetscInt *N, double **arrayOut)
 Gathers the contents of a distributed PETSc Vec into a single array on rank 0.
 
PetscErrorCode SwarmFieldToArrayOnRank0 (DM swarm, const char *field_name, PetscInt *n_total_particles, PetscInt *n_components, void **gathered_array)
 Gathers any DMSwarm field from all ranks to a single, contiguous array on rank 0.
 
PetscErrorCode ReadSwarmField (UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
 Reads data from a file into a specified field of a PETSc DMSwarm.
 
PetscErrorCode ReadSwarmIntField (UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
 Reads integer swarm data by using ReadFieldData and casting the result.
 
PetscErrorCode ReadAllSwarmFields (UserCtx *user, PetscInt ti)
 Reads multiple fields (positions, velocity, CellID, and weight) into a DMSwarm.
 
PetscErrorCode ReadPositionsFromFile (PetscInt timeIndex, UserCtx *user, double **coordsArray, PetscInt *Ncoords)
 Reads coordinate data (for particles) from file into a PETSc Vec, then gathers it to rank 0.
 
PetscErrorCode ReadFieldDataToRank0 (PetscInt timeIndex, const char *fieldName, UserCtx *user, double **scalarArray, PetscInt *Nscalars)
 Reads a named field from file into a PETSc Vec, then gathers it to rank 0.
 
PetscErrorCode DisplayBanner (SimCtx *simCtx)
 Displays a structured banner summarizing the simulation configuration.
 
PetscErrorCode StringToBCFace (const char *str, BCFace *face_out)
 Converts a face-token string (e.g., "-Xi", "+Eta") to the internal BCFace enum.
 
PetscErrorCode StringToBCType (const char *str, BCType *type_out)
 Converts a mathematical BC type string (e.g., "PERIODIC", "WALL") to BCType.
 
PetscErrorCode StringToBCHandlerType (const char *str, BCHandlerType *handler_out)
 Converts a BC handler token (implementation strategy) to BCHandlerType.
 
PetscErrorCode ValidateBCHandlerForBCType (BCType type, BCHandlerType handler)
 Validates that a selected handler is compatible with a mathematical BC type.
 
void FreeBC_ParamList (BC_Param *head)
 Frees an entire linked list of boundary-condition parameters.
 
PetscErrorCode GetBCParamReal (BC_Param *params, const char *key, PetscReal *value_out, PetscBool *found)
 Searches a BC_Param linked list for a key and returns its value as a double.
 
PetscErrorCode GetBCParamBool (BC_Param *params, const char *key, PetscBool *value_out, PetscBool *found)
 Searches a BC_Param linked list for a key and returns its value as a bool.
 
PetscErrorCode ParseAllBoundaryConditions (UserCtx *user, const char *bcs_input_filename)
 Parses the boundary conditions file to configure the type, handler, and any associated parameters for all 6 global faces of the domain.
 
PetscErrorCode DeterminePeriodicity (SimCtx *simCtx)
 Scans all block-specific boundary condition files to determine a globally consistent periodicity for each dimension, reusing the core type parser.
 
void TrimWhitespace (char *str)
 Helper function to trim leading/trailing whitespace from a string.
 
PetscErrorCode ParsePostProcessingSettings (SimCtx *simCtx)
 Initializes post-processing settings from a config file and command-line overrides.
 
PetscErrorCode ParseScalingInformation (SimCtx *simCtx)
 Parses physical scaling parameters from command-line options.
 

Detailed Description

Public interface for data input/output routines.

This header declares functions responsible for parsing grid geometry information, either from command-line options for programmatically generated grids or by reading the header of a grid definition file.

Definition in file io.h.

Function Documentation

◆ ReadGridGenerationInputs()

PetscErrorCode ReadGridGenerationInputs ( UserCtx user)

Parses command-line options for a programmatically generated grid for a SINGLE block.

This function reads all per-block array options related to grid geometry, such as dimensions (-im), domain bounds (-xMins), and stretching ratios (-rxs). It then populates the fields of the provided UserCtx struct using its internal block index user->_this.

Parameters
userPointer to the UserCtx for a specific block. The function will populate the geometric fields (IM, Min_X, rx, etc.) within this struct.
Returns
PetscErrorCode 0 on success, or a PETSc error code on failure.

Parses command-line options for a programmatically generated grid for a SINGLE block.

Local to this translation unit.

Definition at line 85 of file io.c.

86{
87 PetscErrorCode ierr;
88 SimCtx *simCtx = user->simCtx;
89 PetscInt nblk = simCtx->block_number;
90 PetscInt block_index = user->_this;
91 PetscBool found;
92
93 // Temporary arrays to hold the parsed values for ALL blocks
94 PetscInt *IMs = NULL, *JMs = NULL, *KMs = NULL, *cgrids = NULL;
95 PetscReal *xMins = NULL, *xMaxs = NULL, *rxs = NULL;
96 PetscReal *yMins = NULL, *yMaxs = NULL, *rys = NULL;
97 PetscReal *zMins = NULL, *zMaxs = NULL, *rzs = NULL;
98
99 PetscFunctionBeginUser;
101
102 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Reading generated grid inputs for block %d.\n", simCtx->rank, block_index);
103
104 if (block_index >= nblk) {
105 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Block index %d is out of range for nblk=%d", block_index, nblk);
106 }
107
108 // --- Allocate temporary storage for all array options ---
109 ierr = PetscMalloc4(nblk, &IMs, nblk, &JMs, nblk, &KMs, nblk, &cgrids); CHKERRQ(ierr);
110 ierr = PetscMalloc6(nblk, &xMins, nblk, &xMaxs, nblk, &rxs, nblk, &yMins, nblk, &yMaxs, nblk, &rys); CHKERRQ(ierr);
111 ierr = PetscMalloc3(nblk, &zMins, nblk, &zMaxs, nblk, &rzs); CHKERRQ(ierr);
112
113 // --- Set default values for the temporary arrays ---
114 for (PetscInt i = 0; i < nblk; ++i) {
115 IMs[i] = 10; JMs[i] = 10; KMs[i] = 10; cgrids[i] = 0;
116 xMins[i] = 0.0; xMaxs[i] = 1.0; rxs[i] = 1.0;
117 yMins[i] = 0.0; yMaxs[i] = 1.0; rys[i] = 1.0;
118 zMins[i] = 0.0; zMaxs[i] = 1.0; rzs[i] = 1.0;
119 }
120
121 // --- Parse the array options from the command line / control file ---
122 PetscInt count;
123 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-im", IMs, &count, &found); CHKERRQ(ierr);
124 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-jm", JMs, &count, &found); CHKERRQ(ierr);
125 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-km", KMs, &count, &found); CHKERRQ(ierr);
126 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-xMins", xMins, &count, &found); CHKERRQ(ierr);
127 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-xMaxs", xMaxs, &count, &found); CHKERRQ(ierr);
128 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-rxs", rxs, &count, &found); CHKERRQ(ierr);
129 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-yMins", yMins, &count, &found); CHKERRQ(ierr);
130 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-yMaxs", yMaxs, &count, &found); CHKERRQ(ierr);
131 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-rys", rys, &count, &found); CHKERRQ(ierr);
132 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-zMins", zMins, &count, &found); CHKERRQ(ierr);
133 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-zMaxs", zMaxs, &count, &found); CHKERRQ(ierr);
134 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-rzs", rzs, &count, &found); CHKERRQ(ierr);
135 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-cgrids", cgrids, &count, &found); CHKERRQ(ierr);
136
137 // --- Assign the parsed values to the specific UserCtx struct passed in ---
138 user->IM = IMs[block_index];
139 user->JM = JMs[block_index];
140 user->KM = KMs[block_index];
141 user->Min_X = xMins[block_index];
142 user->Max_X = xMaxs[block_index];
143 user->rx = rxs[block_index];
144 user->Min_Y = yMins[block_index];
145 user->Max_Y = yMaxs[block_index];
146 user->ry = rys[block_index];
147 user->Min_Z = zMins[block_index];
148 user->Max_Z = zMaxs[block_index];
149 user->rz = rzs[block_index];
150 user->cgrid = cgrids[block_index];
151
152 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Block %d grid generation inputs set: IM=%d, JM=%d, KM=%d\n",
153 simCtx->rank, block_index, user->IM, user->JM, user->KM);
154 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Block %d bounds: X=[%.2f, %.2f], Y=[%.2f, %.2f], Z=[%.2f, %.2f]\n",
155 simCtx->rank, block_index, user->Min_X, user->Max_X, user->Min_Y, user->Max_Y, user->Min_Z, user->Max_Z);
156
157 // --- Clean up temporary storage ---
158 ierr = PetscFree4(IMs, JMs, KMs, cgrids); CHKERRQ(ierr);
159 ierr = PetscFree6(xMins, xMaxs, rxs, yMins, yMaxs, rys); CHKERRQ(ierr);
160 ierr = PetscFree3(zMins, zMaxs, rzs); CHKERRQ(ierr);
161
163 PetscFunctionReturn(0);
164}
#define LOCAL
Logging scope definitions for controlling message output.
Definition logging.h:44
#define LOG_ALLOW(scope, level, fmt,...)
Logging macro that checks both the log level and whether the calling function is in the allowed-funct...
Definition logging.h:199
#define PROFILE_FUNCTION_END
Marks the end of a profiled code block.
Definition logging.h:779
@ LOG_DEBUG
Detailed debugging information.
Definition logging.h:31
#define PROFILE_FUNCTION_BEGIN
Marks the beginning of a profiled code block (typically a function).
Definition logging.h:770
PetscMPIInt rank
Definition variables.h:646
PetscInt cgrid
Definition variables.h:826
PetscInt block_number
Definition variables.h:712
SimCtx * simCtx
Back-pointer to the master simulation context.
Definition variables.h:814
PetscReal Min_X
Definition variables.h:821
PetscInt KM
Definition variables.h:820
PetscInt _this
Definition variables.h:824
PetscReal ry
Definition variables.h:825
PetscReal Max_Y
Definition variables.h:821
PetscReal rz
Definition variables.h:825
PetscInt JM
Definition variables.h:820
PetscReal Min_Z
Definition variables.h:821
PetscReal Max_X
Definition variables.h:821
PetscReal Min_Y
Definition variables.h:821
PetscInt IM
Definition variables.h:820
PetscReal rx
Definition variables.h:825
PetscReal Max_Z
Definition variables.h:821
The master context for the entire simulation.
Definition variables.h:643
Here is the caller graph for this function:

◆ PopulateFinestUserGridResolutionFromOptions()

PetscErrorCode PopulateFinestUserGridResolutionFromOptions ( UserCtx finest_users,
PetscInt  nblk 
)

Parses grid resolution arrays (-im, -jm, -km) once and applies them to all finest-grid blocks.

This helper centralizes one-time resolution ingestion for analytical grid setup. It fills IM/JM/KM in each element of the finest-level UserCtx array.

Parameters
finest_usersPointer to the finest-level UserCtx array (length nblk).
nblkNumber of blocks in the finest-level array.
Returns
PetscErrorCode 0 on success, or a PETSc error code on failure.

Parses grid resolution arrays (-im, -jm, -km) once and applies them to all finest-grid blocks.

Local to this translation unit.

Definition at line 170 of file io.c.

171{
172 PetscErrorCode ierr;
173 PetscBool found;
174 PetscInt *IMs = NULL, *JMs = NULL, *KMs = NULL;
175 SimCtx *simCtx = NULL;
176
177 PetscFunctionBeginUser;
178
179 if (!finest_users) {
180 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "finest_users cannot be NULL.");
181 }
182 if (nblk <= 0) {
183 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "nblk must be positive. Got %d.", nblk);
184 }
185 simCtx = finest_users[0].simCtx;
186
187 ierr = PetscMalloc3(nblk, &IMs, nblk, &JMs, nblk, &KMs); CHKERRQ(ierr);
188 for (PetscInt i = 0; i < nblk; ++i) {
189 IMs[i] = 10; JMs[i] = 10; KMs[i] = 10;
190 }
191
192 PetscInt count;
193 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-im", IMs, &count, &found); CHKERRQ(ierr);
194 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-jm", JMs, &count, &found); CHKERRQ(ierr);
195 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-km", KMs, &count, &found); CHKERRQ(ierr);
196
197 for (PetscInt bi = 0; bi < nblk; ++bi) {
198 finest_users[bi].IM = IMs[bi];
199 finest_users[bi].JM = JMs[bi];
200 finest_users[bi].KM = KMs[bi];
201 if (simCtx) {
203 "Rank %d: Preloaded analytical grid resolution for block %d: IM=%d, JM=%d, KM=%d\n",
204 simCtx->rank, bi, finest_users[bi].IM, finest_users[bi].JM, finest_users[bi].KM);
205 }
206 }
207
208 ierr = PetscFree3(IMs, JMs, KMs); CHKERRQ(ierr);
209 PetscFunctionReturn(0);
210}
Here is the caller graph for this function:

◆ ReadGridFile()

PetscErrorCode ReadGridFile ( UserCtx user)

Sets grid dimensions from a file for a SINGLE block using a one-time read cache.

This function uses a static-variable pattern to ensure the grid file header is read only once, collectively, by all processes on the first call. Subsequent calls simply retrieve the pre-loaded and broadcasted data for the specified block.

Parameters
userPointer to the UserCtx for a specific block. This function will populate the IM, JM, and KM fields.
Returns
PetscErrorCode 0 on success, or a PETSc error code on failure.

Sets grid dimensions from a file for a SINGLE block using a one-time read cache.

Local to this translation unit.

Definition at line 219 of file io.c.

220{
221 PetscErrorCode ierr;
222 SimCtx *simCtx = user->simCtx;
223 PetscInt block_index = user->_this;
224
225 PetscFunctionBeginUser;
227
228 // --- One-Time Read and Broadcast Logic ---
230 LOG_ALLOW_SYNC(GLOBAL, LOG_INFO, "First call to ReadGridFile. Reading and broadcasting grid file header from '%s'...\n", simCtx->grid_file);
231 PetscMPIInt rank = simCtx->rank;
232 PetscInt nblk = simCtx->block_number;
233
234 if (rank == 0) {
235 FILE *fd = fopen(simCtx->grid_file, "r");
236 if (!fd) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_OPEN, "Cannot open file: %s", simCtx->grid_file);
237
238 // Read and validate the canonical PICGRID header.
239 char firstTok[32] = {0};
240 if (fscanf(fd, "%31s", firstTok) != 1)
241 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_READ, "Empty grid file: %s", simCtx->grid_file);
242 if (strcmp(firstTok, "PICGRID") != 0)
243 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_READ,
244 "Grid file %s must begin with the canonical PICGRID header.", simCtx->grid_file);
245 if (fscanf(fd, "%d", &g_nblk_from_file) != 1)
246 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_READ, "Expected number of blocks after \"PICGRID\" in %s", simCtx->grid_file);
247 if (g_nblk_from_file != nblk) {
248 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Mismatch: -nblk is %d but grid file specifies %d blocks.", nblk, g_nblk_from_file);
249 }
250
251 ierr = PetscMalloc3(nblk, &g_IMs_from_file, nblk, &g_JMs_from_file, nblk, &g_KMs_from_file); CHKERRQ(ierr);
252 for (PetscInt i = 0; i < nblk; ++i) {
253 if (fscanf(fd, "%d %d %d\n", &g_IMs_from_file[i], &g_JMs_from_file[i], &g_KMs_from_file[i]) != 3) {
254 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_READ, "Expected 3 integers for block %d in %s", i, simCtx->grid_file);
255 }
256 }
257 fclose(fd);
258 }
259
260 // Broadcast nblk to verify (optional, good practice)
261 ierr = MPI_Bcast(&g_nblk_from_file, 1, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
262
263 // Allocate on other ranks before receiving the broadcast
264 if (rank != 0) {
265 ierr = PetscMalloc3(nblk, &g_IMs_from_file, nblk, &g_JMs_from_file, nblk, &g_KMs_from_file); CHKERRQ(ierr);
266 }
267
268 // Broadcast the data arrays
269 ierr = MPI_Bcast(g_IMs_from_file, nblk, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
270 ierr = MPI_Bcast(g_JMs_from_file, nblk, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
271 ierr = MPI_Bcast(g_KMs_from_file, nblk, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
272
273 g_file_has_been_read = PETSC_TRUE;
274 LOG_ALLOW(GLOBAL, LOG_INFO, "Grid file header read and broadcast complete.\n");
275 }
276
277 // --- Per-Block Assignment Logic (runs on every call) ---
278 user->IM = g_IMs_from_file[block_index];
279 user->JM = g_JMs_from_file[block_index];
280 user->KM = g_KMs_from_file[block_index];
281
282 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Set file inputs for Block %d: IM=%d, JM=%d, KM=%d\n",
283 simCtx->rank, block_index, user->IM, user->JM, user->KM);
284
286 PetscFunctionReturn(0);
287}
static PetscInt * g_IMs_from_file
Caches the IM dimensions for all blocks read from the grid file.
Definition io.c:19
static PetscInt g_nblk_from_file
Stores the number of blocks read from the grid file.
Definition io.c:17
static PetscBool g_file_has_been_read
A flag to ensure the grid file is read only once.
Definition io.c:25
static PetscInt * g_KMs_from_file
Caches the KM dimensions for all blocks read from the grid file.
Definition io.c:23
static PetscInt * g_JMs_from_file
Caches the JM dimensions for all blocks read from the grid file.
Definition io.c:21
#define LOG_ALLOW_SYNC(scope, level, fmt,...)
Synchronized logging macro that checks both the log level and whether the calling function is in the ...
Definition logging.h:252
#define GLOBAL
Scope for global logging across all processes.
Definition logging.h:45
@ LOG_INFO
Informational messages about program execution.
Definition logging.h:30
char grid_file[PETSC_MAX_PATH_LEN]
Definition variables.h:717
Here is the caller graph for this function:

◆ VerifyPathExistence()

PetscErrorCode VerifyPathExistence ( const char *  path,
PetscBool  is_dir,
PetscBool  is_optional,
const char *  description,
PetscBool *  exists 
)

A parallel-safe helper to verify the existence of a generic file or directory path.

This function centralizes the logic for checking arbitrary paths. Only Rank 0 performs the filesystem check, and the result is broadcast to all other processes. This ensures collective and synchronized decision-making across all ranks. It is intended for configuration files, source directories, etc., where the path is known completely.

Parameters
[in]pathThe full path to the file or directory to check.
[in]is_dirPETSC_TRUE if checking for a directory, PETSC_FALSE for a file.
[in]is_optionalPETSC_TRUE if the path is optional (results in a warning), PETSC_FALSE if mandatory (results in an error).
[in]descriptionA user-friendly description of the path for logging (e.g., "Grid file").
[out]existsThe result of the check (identical on all ranks).
Returns
PetscErrorCode

A parallel-safe helper to verify the existence of a generic file or directory path.

Local to this translation unit.

Definition at line 738 of file io.c.

739{
740 PetscErrorCode ierr;
741 PetscMPIInt rank;
742 MPI_Comm comm = PETSC_COMM_WORLD;
743
744 PetscFunctionBeginUser;
745 ierr = MPI_Comm_rank(comm, &rank); CHKERRQ(ierr);
746
747 if (rank == 0) {
748 if (is_dir) {
749 ierr = PetscTestDirectory(path, 'r', exists); CHKERRQ(ierr);
750 } else {
751 ierr = PetscTestFile(path, 'r', exists); CHKERRQ(ierr);
752 }
753
754 if (!(*exists)) {
755 if (is_optional) {
756 LOG_ALLOW(GLOBAL, LOG_WARNING, "Optional %s not found at: %s (using defaults/ignoring).\n", description, path);
757 } else {
758 LOG_ALLOW(GLOBAL, LOG_ERROR, "Mandatory %s not found at: %s\n", description, path);
759 }
760 } else {
761 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Found %s: %s\n", description, path);
762 }
763 }
764
765 // Broadcast the result from Rank 0
766 PetscMPIInt exists_int = (rank == 0) ? (PetscMPIInt)(*exists) : 0;
767 ierr = MPI_Bcast(&exists_int, 1, MPI_INT, 0, comm); CHKERRMPI(ierr);
768 *exists = (PetscBool)exists_int;
769
770 // Collective error for mandatory files
771 if (!(*exists) && !is_optional) {
772 SETERRQ(comm, PETSC_ERR_FILE_OPEN, "Mandatory %s not found. Rank 0 expected it at '%s'. Check path and permissions.", description, path);
773 }
774
775 PetscFunctionReturn(0);
776}
@ LOG_ERROR
Critical errors that may halt the program.
Definition logging.h:28
@ LOG_WARNING
Non-critical issues that warrant attention.
Definition logging.h:29
Here is the caller graph for this function:

◆ ShouldWriteDataOutput()

PetscBool ShouldWriteDataOutput ( const SimCtx simCtx,
PetscInt  completed_step 
)

Returns whether full field/restart output should be written for the.

completed timestep.

Parameters
simCtxSimulation context controlling the operation.
completed_stepCompleted step index used by the decision helper.
Returns
PetscBool indicating the result of ShouldWriteDataOutput().

Returns whether full field/restart output should be written for the.

Full API contract (arguments, ownership, side effects) is documented with the header declaration in include/io.h.

See also
ShouldWriteDataOutput()

Definition at line 70 of file io.c.

71{
72 if (!simCtx) {
73 return PETSC_FALSE;
74 }
75 return (PetscBool)(simCtx->tiout > 0 && completed_step > 0 && completed_step % simCtx->tiout == 0);
76}
PetscInt tiout
Definition variables.h:655
Here is the caller graph for this function:

◆ ReadSimulationFields()

PetscErrorCode ReadSimulationFields ( UserCtx user,
PetscInt  ti 
)

Reads binary field data for velocity, pressure, and other required vectors.

Reads contravariant velocity (Ucont) from vfield, Cartesian velocity (Ucat) from ufield, pressure (P), node state (Nvert_o), and optionally statistical quantities, LES, and RANS fields from binary files. Logs missing files but continues execution.

Parameters
[in,out]userPointer to the UserCtx structure containing the simulation context.
[in]tiTimestep index used to construct restart file names.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Reads binary field data for velocity, pressure, and other required vectors.

Local to this translation unit.

Definition at line 1126 of file io.c.

1127{
1128 PetscErrorCode ierr;
1129
1130 SimCtx *simCtx = user->simCtx;
1131 const char *source_path = NULL;
1132 const char *eulerian_ext = "dat";
1133
1134 if(simCtx->exec_mode == EXEC_MODE_POSTPROCESSOR){
1135 source_path = simCtx->pps->source_dir;
1136 if (simCtx->pps->eulerianExt[0] != '\0') {
1137 eulerian_ext = simCtx->pps->eulerianExt;
1138 }
1139 } else if(simCtx->exec_mode == EXEC_MODE_SOLVER){
1140 source_path = simCtx->restart_dir;
1141 } else{
1142 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Invalid execution mode for reading simulation fields.");
1143 }
1144
1145 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to read simulation fields.\n");
1146
1147 // Set the current I/O directory context
1148 ierr = PetscSNPrintf(simCtx->_io_context_buffer, sizeof(simCtx->_io_context_buffer), "%s/%s",source_path,simCtx->euler_subdir); CHKERRQ(ierr);
1149 simCtx->current_io_directory = simCtx->_io_context_buffer;
1150
1151 // Read Cartesian velocity field
1152 ierr = ReadFieldData(user, "ufield", user->Ucat, ti, eulerian_ext); CHKERRQ(ierr);
1153
1154 // Read contravariant velocity field
1155 ierr = ReadFieldData(user, "vfield", user->Ucont, ti, eulerian_ext); CHKERRQ(ierr);
1156
1157 // Read pressure field
1158 ierr = ReadFieldData(user, "pfield", user->P, ti, eulerian_ext); CHKERRQ(ierr);
1159
1160 // Read node state field (nvert)
1161 ierr = ReadFieldData(user, "nvfield", user->Nvert, ti, eulerian_ext); CHKERRQ(ierr);
1162
1163 LOG_ALLOW(GLOBAL,LOG_INFO,"Successfully read all mandatory fields. \n");
1164
1165 if(simCtx->np>0){
1166 // Read Particle Count field
1167 if(!user->ParticleCount){
1168 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "ParticleCount Vec is NULL but np>0");
1169 }
1170 ierr = ReadOptionalField(user, "ParticleCount", "Particle Count", user->ParticleCount, ti, eulerian_ext); CHKERRQ(ierr);
1171
1172 ierr = ReadOptionalField(user, "psifield", "Scalar Psi Field", user->Psi, ti, eulerian_ext); CHKERRQ(ierr);
1173 }
1174 else{
1175 LOG_ALLOW(GLOBAL, LOG_INFO, "No particles in simulation, skipping Particle fields reading.\n");
1176 }
1177 // Process LES fields if enabled
1178 if (simCtx->les) {
1179 ierr = ReadLESFields(user,ti); CHKERRQ(ierr);
1180 }
1181
1182 // Process RANS fields if enabled
1183 if (simCtx->rans) {
1184 ierr = ReadRANSFields(user,ti); CHKERRQ(ierr);
1185 }
1186
1187 // Process statistical fields if averaging is enabled
1188 if (simCtx->averaging) {
1189 ierr = ReadStatisticalFields(user,ti); CHKERRQ(ierr);
1190 }
1191
1192 simCtx->current_io_directory = NULL; // Clear the I/O context after reading
1193
1194 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading simulation fields.\n");
1195
1196 return 0;
1197}
PetscErrorCode ReadStatisticalFields(UserCtx *user, PetscInt ti)
Implementation of ReadStatisticalFields().
Definition io.c:1205
PetscErrorCode ReadRANSFields(UserCtx *user, PetscInt ti)
Internal helper implementation: ReadRANSFields().
Definition io.c:1257
PetscErrorCode ReadLESFields(UserCtx *user, PetscInt ti)
Internal helper implementation: ReadLESFields().
Definition io.c:1229
PetscErrorCode ReadFieldData(UserCtx *user, const char *field_name, Vec field_vec, PetscInt ti, const char *ext)
Internal helper implementation: ReadFieldData().
Definition io.c:898
static PetscErrorCode ReadOptionalField(UserCtx *user, const char *field_name, const char *field_label, Vec field_vec, PetscInt ti, const char *ext)
Internal helper implementation: ReadOptionalField().
Definition io.c:822
char euler_subdir[PETSC_MAX_PATH_LEN]
Definition variables.h:666
PetscInt rans
Definition variables.h:732
PetscInt np
Definition variables.h:739
PetscBool averaging
Definition variables.h:736
Vec Ucont
Definition variables.h:837
char * current_io_directory
Definition variables.h:670
char eulerianExt[8]
Definition variables.h:578
char source_dir[PETSC_MAX_PATH_LEN]
Definition variables.h:555
Vec Ucat
Definition variables.h:837
Vec ParticleCount
Definition variables.h:882
PostProcessParams * pps
Definition variables.h:798
@ EXEC_MODE_SOLVER
Definition variables.h:616
@ EXEC_MODE_POSTPROCESSOR
Definition variables.h:617
char _io_context_buffer[PETSC_MAX_PATH_LEN]
Definition variables.h:669
PetscInt les
Definition variables.h:732
Vec Nvert
Definition variables.h:837
ExecutionMode exec_mode
Definition variables.h:662
char restart_dir[PETSC_MAX_PATH_LEN]
Definition variables.h:664
Vec Psi
Definition variables.h:883
Here is the call graph for this function:
Here is the caller graph for this function:

◆ ReadFieldData()

PetscErrorCode ReadFieldData ( UserCtx user,
const char *  field_name,
Vec  field_vec,
PetscInt  ti,
const char *  ext 
)

Reads data for a specific field from a file into the provided vector.

This function uses the field name to construct the file path and reads the data from the corresponding file into the provided PETSc vector.

Parameters
[in]userPointer to the UserCtx structure containing simulation context.
[in]field_nameName of the field (e.g., "ufield", "vfield", "pfield").
[out]field_vecPETSc vector to store the field data.
[in]tiTime index for constructing the file name.
[in]extFile extension (e.g., "dat").
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Reads data for a specific field from a file into the provided vector.

Local to this translation unit.

Definition at line 898 of file io.c.

903{
904 PetscErrorCode ierr;
905 char filename[PETSC_MAX_PATH_LEN];
906 MPI_Comm comm;
907 PetscMPIInt rank,size;
908 SimCtx *simCtx = user->simCtx;
909
910
911 PetscFunctionBeginUser;
913
914 if(!simCtx->current_io_directory){
915 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "I/O context directory was not set before calling ReadFieldData().");
916 }
917
918
919 ierr = PetscObjectGetComm((PetscObject)field_vec,&comm);CHKERRQ(ierr);
920 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
921 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
922
923 const char *source_path = NULL;
924 source_path = simCtx->current_io_directory;
925
926 if(!source_path){
927 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "source_path was not set for the current execution mode.");
928 }
929 /* ---------------------------------------------------------------------
930 * Compose <path-to-file>/<field_name><step with 5 digits>_0.<ext>
931 * (all restart files are written by rank-0 with that naming scheme).
932 * ------------------------------------------------------------------ */
933 ierr = PetscSNPrintf(filename,sizeof(filename),
934 "%s/%s%05" PetscInt_FMT "_0.%s",
935 source_path,field_name,ti,ext);CHKERRQ(ierr);
936
938 "Attempting to read <%s> on rank %d/%d\n",
939 filename,(int)rank,(int)size);
940
941 /* ======================================================================
942 * 1. SERIAL JOB – just hand the Vec to VecLoad()
943 * ==================================================================== */
944 if(size==1)
945 {
946 PetscViewer viewer;
947 PetscBool found;
948 Vec temp_vec;
949 PetscInt expectedSize,loadedSize;
950
951 ierr = PetscTestFile(filename,'r',&found);CHKERRQ(ierr);
952 if(!found) SETERRQ(comm,PETSC_ERR_FILE_OPEN,
953 "Restart/Source file not found: %s",filename);
954
955 ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,filename,FILE_MODE_READ,&viewer);CHKERRQ(ierr);
956// ---- START MODIFICATION ----
957 // DO NOT load directly into field_vec, as this can resize it, which is
958 // illegal for DMSwarm "view" vectors. Instead, load into a temporary vector.
959 ierr = VecCreate(PETSC_COMM_SELF, &temp_vec); CHKERRQ(ierr);
960 ierr = VecLoad(temp_vec,viewer);CHKERRQ(ierr);
961 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
962
963 // Sanity check: ensure the file size matches the expected vector size.
964 ierr = VecGetSize(field_vec, &expectedSize);CHKERRQ(ierr);
965 ierr = VecGetSize(temp_vec, &loadedSize);CHKERRQ(ierr);
966 if (loadedSize != expectedSize) {
967 SETERRQ(comm,PETSC_ERR_FILE_UNEXPECTED,
968 "File %s holds %d entries – expected %d for field '%s'",
969 filename, loadedSize, expectedSize, field_name);
970 }
971
972 // Now, safely copy the data from the temporary vector to the final destination.
973 ierr = VecCopy(temp_vec, field_vec);CHKERRQ(ierr);
974
975 // Clean up the temporary vector.
976 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
977
978 // ---- END MODIFICATION ----
979
980 /* create EMPTY sequential Vec – VecLoad() will size it correctly */
981 /*
982 ierr = VecCreate(PETSC_COMM_SELF,&seq_vec);CHKERRQ(ierr);
983 ierr = VecSetType(seq_vec,VECSEQ);CHKERRQ(ierr);
984
985 ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,filename,
986 FILE_MODE_READ,&viewer);CHKERRQ(ierr);
987
988 ierr = VecLoad(field_vec,viewer);CHKERRQ(ierr);
989 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
990 */
992 "Loaded <%s> (serial path)\n",filename);
993
995 PetscFunctionReturn(0);
996 }
997
998 /* ======================================================================
999 * 2. PARALLEL JOB
1000 * ==================================================================== */
1001 PetscInt globalSize;
1002 ierr = VecGetSize(field_vec,&globalSize);CHKERRQ(ierr);
1003
1004 DM dm = NULL;
1005 const char *dmtype = NULL;
1006 Vec nat = NULL; /* Natural-ordered vector for DMDA */
1007
1008 /* -------------------- rank-0 : read the sequential file -------------- */
1009 Vec seq_vec = NULL; /* only valid on rank-0 */
1010 const PetscScalar *seqArray = NULL; /* borrowed pointer on rank-0 only */
1011
1012 if(rank==0)
1013 {
1014 PetscViewer viewer;
1015 PetscBool found;
1016
1017 ierr = PetscTestFile(filename,'r',&found);CHKERRQ(ierr);
1018 if(!found) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_OPEN,
1019 "Restart file not found: %s",filename);
1020
1021 /* create EMPTY sequential Vec – VecLoad() will size it correctly */
1022 ierr = VecCreate(PETSC_COMM_SELF,&seq_vec);CHKERRQ(ierr);
1023 ierr = VecSetType(seq_vec,VECSEQ);CHKERRQ(ierr);
1024
1025 ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,filename,
1026 FILE_MODE_READ,&viewer);CHKERRQ(ierr);
1027 ierr = VecLoad(seq_vec,viewer);CHKERRQ(ierr);
1028 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
1029
1030 /* size sanity-check */
1031 PetscInt loaded;
1032 ierr = VecGetSize(seq_vec,&loaded);CHKERRQ(ierr);
1033 if(loaded != globalSize)
1034 SETERRQ(comm,PETSC_ERR_FILE_UNEXPECTED,
1035 "File %s holds %d entries – expected %d",
1036 filename,loaded,globalSize);
1037
1038 /* borrow array for later Bcast */
1039 ierr = VecGetArrayRead(seq_vec,&seqArray);CHKERRQ(ierr);
1040
1042 "Rank 0 successfully loaded <%s>\n",filename);
1043 }
1044
1045 /* -------------------- Check if this is a DMDA vector ----------------- */
1046 ierr = VecGetDM(field_vec, &dm); CHKERRQ(ierr);
1047 if (dm) { ierr = DMGetType(dm, &dmtype); CHKERRQ(ierr); }
1048
1049 if (dmtype && !strcmp(dmtype, DMDA)) {
1050 /* ==================================================================
1051 * DMDA PATH: File is in natural ordering, need to convert to global
1052 * ================================================================== */
1053
1054 /* Create natural vector */
1055 ierr = DMDACreateNaturalVector(dm, &nat); CHKERRQ(ierr);
1056
1057 /* Scatter from rank 0's seq_vec to all ranks' natural vector */
1058 VecScatter scatter;
1059 Vec nat_seq = NULL; /* Sequential natural vector on rank 0 */
1060
1061 ierr = VecScatterCreateToZero(nat, &scatter, &nat_seq); CHKERRQ(ierr);
1062
1063 /* Reverse scatter: from rank 0 to all ranks */
1064 ierr = VecScatterBegin(scatter, (rank == 0 ? seq_vec : nat_seq), nat,
1065 INSERT_VALUES, SCATTER_REVERSE); CHKERRQ(ierr);
1066 ierr = VecScatterEnd(scatter, (rank == 0 ? seq_vec : nat_seq), nat,
1067 INSERT_VALUES, SCATTER_REVERSE); CHKERRQ(ierr);
1068
1069 /* Convert natural → global ordering */
1070 ierr = DMDANaturalToGlobalBegin(dm, nat, INSERT_VALUES, field_vec); CHKERRQ(ierr);
1071 ierr = DMDANaturalToGlobalEnd(dm, nat, INSERT_VALUES, field_vec); CHKERRQ(ierr);
1072
1073 /* Cleanup */
1074 ierr = VecScatterDestroy(&scatter); CHKERRQ(ierr);
1075 ierr = VecDestroy(&nat_seq); CHKERRQ(ierr);
1076 ierr = VecDestroy(&nat); CHKERRQ(ierr);
1077
1078 } else {
1079 /* ==================================================================
1080 * NON-DMDA PATH: Use broadcast and direct copy (assumes global ordering)
1081 * ================================================================== */
1082
1083 PetscScalar *buffer = NULL;
1084 if (rank == 0) {
1085 buffer = (PetscScalar *)seqArray;
1086 } else {
1087 ierr = PetscMalloc1(globalSize, &buffer); CHKERRQ(ierr);
1088 }
1089
1090 ierr = MPI_Bcast(buffer, (int)globalSize, MPIU_SCALAR, 0, comm); CHKERRQ(ierr);
1091
1092 /* Copy slice based on ownership range */
1093 PetscInt rstart, rend, loc;
1094 PetscScalar *locArray;
1095
1096 ierr = VecGetOwnershipRange(field_vec, &rstart, &rend); CHKERRQ(ierr);
1097 loc = rend - rstart;
1098
1099 ierr = VecGetArray(field_vec, &locArray); CHKERRQ(ierr);
1100 ierr = PetscMemcpy(locArray, buffer + rstart, loc * sizeof(PetscScalar)); CHKERRQ(ierr);
1101 ierr = VecRestoreArray(field_vec, &locArray); CHKERRQ(ierr);
1102
1103 if (rank != 0) {
1104 ierr = PetscFree(buffer); CHKERRQ(ierr);
1105 }
1106 }
1107
1108 /* -------------------- tidy up ---------------------------------------- */
1109 if (rank == 0) {
1110 ierr = VecRestoreArrayRead(seq_vec, &seqArray); CHKERRQ(ierr);
1111 ierr = VecDestroy(&seq_vec); CHKERRQ(ierr);
1112 }
1113
1115 "Loaded <%s> (parallel path)\n",filename);
1116
1118 PetscFunctionReturn(0);
1119}
@ LOG_TRACE
Very fine-grained tracing information for in-depth debugging.
Definition logging.h:32
Here is the caller graph for this function:

◆ ReadStatisticalFields()

PetscErrorCode ReadStatisticalFields ( UserCtx user,
PetscInt  ti 
)

Reads statistical fields used for time-averaged simulations.

Reads statistical quantities such as velocity sums and pressure sums. Logs missing files and initializes fields to zero if files are unavailable.

Parameters
[in,out]userPointer to the UserCtx structure containing the simulation context.
[in]tiTimestep index used to construct statistics file names.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Reads statistical fields used for time-averaged simulations.

Full API contract (arguments, ownership, side effects) is documented with the header declaration in include/io.h.

See also
ReadStatisticalFields()

Definition at line 1205 of file io.c.

1206{
1207 PetscErrorCode ierr;
1208 const char *eulerian_ext = "dat";
1209 if (user->simCtx->exec_mode == EXEC_MODE_POSTPROCESSOR && user->simCtx->pps->eulerianExt[0] != '\0') {
1210 eulerian_ext = user->simCtx->pps->eulerianExt;
1211 }
1212
1213 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to read statistical fields.\n");
1214
1215 ierr = ReadOptionalField(user, "su0", "Velocity Sum", user->Ucat_sum, ti, eulerian_ext); CHKERRQ(ierr);
1216 ierr = ReadOptionalField(user, "su1", "Velocity Cross Sum", user->Ucat_cross_sum, ti, eulerian_ext); CHKERRQ(ierr);
1217 ierr = ReadOptionalField(user, "su2", "Velocity Square Sum",user->Ucat_square_sum, ti, eulerian_ext); CHKERRQ(ierr);
1218 ierr = ReadOptionalField(user, "sp", "Pressure Sum", user->P_sum, ti, eulerian_ext); CHKERRQ(ierr);
1219
1220 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading statistical fields.\n");
1221
1222 return 0;
1223}
Vec Ucat_square_sum
Definition variables.h:868
Vec P_sum
Definition variables.h:868
Vec Ucat_sum
Definition variables.h:868
Vec Ucat_cross_sum
Definition variables.h:868
Here is the call graph for this function:
Here is the caller graph for this function:

◆ ReadLESFields()

PetscErrorCode ReadLESFields ( UserCtx user,
PetscInt  ti 
)

Reads LES-related fields used in turbulence modeling.

Reads the Smagorinsky constant (Cs) and transfers data to local vectors. Logs missing files and initializes fields to zero if files are unavailable.

Parameters
[in,out]userPointer to the UserCtx structure containing the simulation context.
[in]tiTime index for constructing the file name.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Reads LES-related fields used in turbulence modeling.

Local to this translation unit.

Definition at line 1229 of file io.c.

1230{
1231 PetscErrorCode ierr;
1232 const char *eulerian_ext = "dat";
1233 if (user->simCtx->exec_mode == EXEC_MODE_POSTPROCESSOR && user->simCtx->pps->eulerianExt[0] != '\0') {
1234 eulerian_ext = user->simCtx->pps->eulerianExt;
1235 }
1236
1237 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to read LES fields.\n");
1238
1239 ierr = ReadOptionalField(user, "Nu_t", "Turbulent Viscosity", user->Nu_t, ti, eulerian_ext); CHKERRQ(ierr);
1240 ierr = ReadOptionalField(user, "cs", "Smagorinsky Constant (Cs)", user->CS, ti, eulerian_ext); CHKERRQ(ierr);
1241
1242 DMGlobalToLocalBegin(user->da, user->CS, INSERT_VALUES, user->lCs);
1243 DMGlobalToLocalEnd(user->da, user->CS, INSERT_VALUES, user->lCs);
1244
1245 DMGlobalToLocalBegin(user->da, user->Nu_t, INSERT_VALUES, user->lNu_t);
1246 DMGlobalToLocalEnd(user->da, user->Nu_t, INSERT_VALUES, user->lNu_t);
1247
1248 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading LES fields.\n");
1249
1250 return 0;
1251}
Vec lCs
Definition variables.h:865
Vec lNu_t
Definition variables.h:865
Vec Nu_t
Definition variables.h:865
Here is the call graph for this function:
Here is the caller graph for this function:

◆ ReadRANSFields()

PetscErrorCode ReadRANSFields ( UserCtx user,
PetscInt  ti 
)

Reads RANS-related fields for turbulence modeling.

Reads K_Omega fields (used in RANS modeling) and initializes them if files are unavailable.

Parameters
[in,out]userPointer to the UserCtx structure containing the simulation context.
[in]tiTime index for constructing the file name.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Reads RANS-related fields for turbulence modeling.

Local to this translation unit.

Definition at line 1257 of file io.c.

1258{
1259 PetscErrorCode ierr;
1260 const char *eulerian_ext = "dat";
1261 if (user->simCtx->exec_mode == EXEC_MODE_POSTPROCESSOR && user->simCtx->pps->eulerianExt[0] != '\0') {
1262 eulerian_ext = user->simCtx->pps->eulerianExt;
1263 }
1264
1265 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to read RANS fields.\n");
1266
1267 ierr = ReadOptionalField(user, "kfield", "K-Omega RANS", user->K_Omega, ti, eulerian_ext); CHKERRQ(ierr);
1268 ierr = ReadOptionalField(user, "Nu_t", "Turbulent Viscosity", user->Nu_t, ti, eulerian_ext); CHKERRQ(ierr);
1269
1270 VecCopy(user->K_Omega, user->K_Omega_o);
1271
1272 DMGlobalToLocalBegin(user->fda2, user->K_Omega, INSERT_VALUES, user->lK_Omega);
1273 DMGlobalToLocalEnd(user->fda2, user->K_Omega, INSERT_VALUES, user->lK_Omega);
1274
1275 DMGlobalToLocalBegin(user->da, user->Nu_t, INSERT_VALUES, user->lNu_t);
1276 DMGlobalToLocalEnd(user->da, user->Nu_t, INSERT_VALUES, user->lNu_t);
1277
1278 DMGlobalToLocalBegin(user->fda2, user->K_Omega_o, INSERT_VALUES, user->lK_Omega_o);
1279 DMGlobalToLocalEnd(user->fda2, user->K_Omega_o, INSERT_VALUES, user->lK_Omega_o);
1280
1281 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading RANS fields.\n");
1282
1283 return 0;
1284}
Vec K_Omega_o
Definition variables.h:865
Vec K_Omega
Definition variables.h:865
Vec lK_Omega_o
Definition variables.h:865
Vec lK_Omega
Definition variables.h:865
Here is the call graph for this function:
Here is the caller graph for this function:

◆ WriteFieldData()

PetscErrorCode WriteFieldData ( UserCtx user,
const char *  field_name,
Vec  field_vec,
PetscInt  ti,
const char *  ext 
)

Writes data from a specific PETSc vector to a file.

This function uses the field name to construct the file path and writes the data from the provided PETSc vector to the corresponding file.

Parameters
[in]userPointer to the UserCtx structure containing simulation context.
[in]field_nameName of the field (e.g., "ufield", "vfield", "pfield").
[in]field_vecPETSc vector containing the field data to write.
[in]tiTime index for constructing the file name.
[in]extFile extension (e.g., "dat").
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Writes data from a specific PETSc vector to a file.

Local to this translation unit.

Definition at line 1448 of file io.c.

1453{
1454 PetscErrorCode ierr;
1455 MPI_Comm comm;
1456 PetscMPIInt rank, size;
1457
1458 const PetscInt placeholder_int = 0; /* keep legacy name */
1459 char filename[PETSC_MAX_PATH_LEN];
1460 SimCtx *simCtx=user->simCtx;
1461
1462 PetscFunctionBeginUser;
1464
1465 if(!simCtx->current_io_directory){
1466 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "I/O context directory was not set before calling WriteFieldData().");
1467 }
1468
1469 /* ------------------------------------------------------------ */
1470 /* Basic communicator information */
1471 /* ------------------------------------------------------------ */
1472 ierr = PetscObjectGetComm((PetscObject)field_vec,&comm);CHKERRQ(ierr);
1473 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1474 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1475
1476 ierr = PetscSNPrintf(filename,sizeof(filename),
1477 "%s/%s%05" PetscInt_FMT "_%d.%s",
1478 simCtx->current_io_directory,field_name,ti,placeholder_int,ext);CHKERRQ(ierr);
1479
1481 " Preparing to write <%s> on rank %d/%d\n",
1482 filename,rank,size);
1483
1484 /* ------------------------------------------------------------ */
1485 /* 1. Serial path */
1486 /* ------------------------------------------------------------ */
1487 if (size == 1) {
1488 PetscViewer viewer;
1489
1490 ierr = PetscViewerBinaryOpen(comm,filename,
1491 FILE_MODE_WRITE,&viewer);CHKERRQ(ierr);
1492 ierr = VecView(field_vec,viewer);CHKERRQ(ierr);
1493 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
1494
1496 " Wrote <%s> (serial path)\n",filename);
1497 PetscFunctionReturn(0);
1498 }
1499
1500 /* ------------------------------------------------------------ */
1501 /* 2. Parallel path */
1502 /* ------------------------------------------------------------ */
1503 VecScatter scatter;
1504 Vec seq_vec=NULL; /* created by PETSc, lives only on rank 0 */
1505 DM dm = NULL;
1506 const char *dmtype = NULL;
1507 Vec nat = NULL; /* Natural-ordered vector for DMDA */
1508
1509 /* 2.1 Check if vector has DMDA and convert to natural ordering if needed */
1510 ierr = VecGetDM(field_vec, &dm); CHKERRQ(ierr);
1511 if (dm) { ierr = DMGetType(dm, &dmtype); CHKERRQ(ierr); }
1512
1513 if (dmtype && !strcmp(dmtype, DMDA)) {
1514 /* DMDA path: convert to natural ordering first */
1515 ierr = DMDACreateNaturalVector(dm, &nat); CHKERRQ(ierr);
1516 ierr = DMDAGlobalToNaturalBegin(dm, field_vec, INSERT_VALUES, nat); CHKERRQ(ierr);
1517 ierr = DMDAGlobalToNaturalEnd(dm, field_vec, INSERT_VALUES, nat); CHKERRQ(ierr);
1518
1519 /* Gather the natural-ordered vector */
1520 ierr = VecScatterCreateToZero(nat, &scatter, &seq_vec); CHKERRQ(ierr);
1521 } else {
1522 /* Non-DMDA path: direct gather in global ordering */
1523 ierr = VecScatterCreateToZero(field_vec, &scatter, &seq_vec); CHKERRQ(ierr);
1524 }
1525
1526 /* 2.2 Gather distributed → sequential (on rank 0) */
1527 ierr = VecScatterBegin(scatter, (nat ? nat : field_vec), seq_vec,
1528 INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
1529 ierr = VecScatterEnd(scatter, (nat ? nat : field_vec), seq_vec,
1530 INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
1531
1532 /* 2.3 Rank 0 writes the file */
1533 if (rank == 0) {
1534 PetscViewer viewer;
1535
1536 /* (optional) value diagnostics */
1537 PetscReal vmin,vmax;
1538 ierr = VecMin(seq_vec,NULL,&vmin);CHKERRQ(ierr);
1539 ierr = VecMax(seq_vec,NULL,&vmax);CHKERRQ(ierr);
1541 " <%s> range = [%.4e … %.4e]\n",
1542 field_name,(double)vmin,(double)vmax);
1543
1544 ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,filename,
1545 FILE_MODE_WRITE,&viewer);CHKERRQ(ierr);
1546 ierr = VecView(seq_vec,viewer);CHKERRQ(ierr);
1547 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
1548
1550 " Wrote <%s> (parallel path)\n",filename);
1551 }
1552
1553 /* 2.4 Cleanup */
1554 ierr = VecScatterDestroy(&scatter);CHKERRQ(ierr);
1555 ierr = VecDestroy(&seq_vec);CHKERRQ(ierr);
1556 if (nat) { ierr = VecDestroy(&nat); CHKERRQ(ierr); }
1557
1559 PetscFunctionReturn(0);
1560}
Here is the caller graph for this function:

◆ WriteSimulationFields()

PetscErrorCode WriteSimulationFields ( UserCtx user)

Writes simulation fields to files.

This function writes contravariant velocity, Cartesian velocity, pressure, and node state fields to their respective binary files. It also conditionally writes LES, RANS, and statistical fields if they are enabled.

Parameters
[in]userPointer to the UserCtx structure containing simulation context.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Writes simulation fields to files.

Full API contract (arguments, ownership, side effects) is documented with the header declaration in include/io.h.

See also
WriteSimulationFields()

Definition at line 1568 of file io.c.

1569{
1570 PetscErrorCode ierr;
1571
1572 SimCtx *simCtx = user->simCtx;
1573
1574 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to write simulation fields.\n");
1575
1576 // Set the current IO directory
1577 ierr = PetscSNPrintf(simCtx->_io_context_buffer, sizeof(simCtx->_io_context_buffer),
1578 "%s/%s", simCtx->output_dir, simCtx->euler_subdir);CHKERRQ(ierr);
1579 simCtx->current_io_directory = simCtx->_io_context_buffer;
1580
1581 // Write contravariant velocity field
1582 ierr = WriteFieldData(user, "vfield", user->Ucont, simCtx->step, "dat"); CHKERRQ(ierr);
1583
1584 // Write Cartesian velocity field
1585 ierr = WriteFieldData(user, "ufield", user->Ucat, simCtx->step, "dat"); CHKERRQ(ierr);
1586
1587 // Write pressure field
1588 ierr = WriteFieldData(user, "pfield", user->P, simCtx->step, "dat"); CHKERRQ(ierr);
1589
1590 // Write node state field (nvert)
1591 ierr = WriteFieldData(user, "nvfield", user->Nvert, simCtx->step, "dat"); CHKERRQ(ierr);
1592
1593 // Write ParticleCountPerCell if enabled.
1594 if(simCtx->np>0){
1595 ierr = WriteFieldData(user, "ParticleCount",user->ParticleCount,simCtx->step,"dat"); CHKERRQ(ierr);
1596 ierr = WriteFieldData(user, "psifield", user->Psi, simCtx->step, "dat"); CHKERRQ(ierr);
1597 }
1598
1599 // Write LES fields if enabled
1600 if (simCtx->les) {
1601 ierr = WriteLESFields(user); CHKERRQ(ierr);
1602 }
1603
1604 // Write RANS fields if enabled
1605 if (simCtx->rans) {
1606 ierr = WriteRANSFields(user); CHKERRQ(ierr);
1607 }
1608
1609 // Write statistical fields if averaging is enabled
1610 if (simCtx->averaging) {
1611 ierr = WriteStatisticalFields(user); CHKERRQ(ierr);
1612 }
1613
1614 simCtx->current_io_directory = NULL;
1615
1616 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished writing simulation fields.\n");
1617
1618 return 0;
1619}
PetscErrorCode WriteStatisticalFields(UserCtx *user)
Implementation of WriteStatisticalFields().
Definition io.c:1627
PetscErrorCode WriteFieldData(UserCtx *user, const char *field_name, Vec field_vec, PetscInt ti, const char *ext)
Internal helper implementation: WriteFieldData().
Definition io.c:1448
PetscErrorCode WriteRANSFields(UserCtx *user)
Implementation of WriteRANSFields().
Definition io.c:1679
PetscErrorCode WriteLESFields(UserCtx *user)
Internal helper implementation: WriteLESFields().
Definition io.c:1649
char output_dir[PETSC_MAX_PATH_LEN]
Definition variables.h:665
PetscInt step
Definition variables.h:651
Here is the call graph for this function:
Here is the caller graph for this function:

◆ WriteStatisticalFields()

PetscErrorCode WriteStatisticalFields ( UserCtx user)

Writes statistical fields for averaging purposes.

This function writes data for fields such as Ucat_sum, Ucat_cross_sum, Ucat_square_sum, and P_sum to their respective binary files.

Parameters
[in]userPointer to the UserCtx structure containing simulation context.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Writes statistical fields for averaging purposes.

Full API contract (arguments, ownership, side effects) is documented with the header declaration in include/io.h.

See also
WriteStatisticalFields()

Definition at line 1627 of file io.c.

1628{
1629 PetscErrorCode ierr;
1630
1631 SimCtx *simCtx = user->simCtx;
1632
1633 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to write statistical fields.\n");
1634
1635 ierr = WriteFieldData(user, "su0", user->Ucat_sum, simCtx->step, "dat"); CHKERRQ(ierr);
1636 ierr = WriteFieldData(user, "su1", user->Ucat_cross_sum, simCtx->step, "dat"); CHKERRQ(ierr);
1637 ierr = WriteFieldData(user, "su2", user->Ucat_square_sum, simCtx->step, "dat"); CHKERRQ(ierr);
1638 ierr = WriteFieldData(user, "sp", user->P_sum, simCtx->step, "dat"); CHKERRQ(ierr);
1639
1640 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished writing statistical fields.\n");
1641
1642 return 0;
1643}
Here is the call graph for this function:
Here is the caller graph for this function:

◆ WriteLESFields()

PetscErrorCode WriteLESFields ( UserCtx user)

Writes LES-related fields.

This function writes LES-related fields such as Cs (Smagorinsky constant) to their respective binary files.

Parameters
[in]userPointer to the UserCtx structure containing simulation context.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Writes LES-related fields.

Local to this translation unit.

Definition at line 1649 of file io.c.

1650{
1651 PetscErrorCode ierr;
1652
1653 SimCtx *simCtx = user->simCtx;
1654
1655 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to write LES fields.\n");
1656
1657
1658 DMLocalToGlobalBegin(user->da, user->lCs, INSERT_VALUES, user->CS);
1659 DMLocalToGlobalEnd(user->da, user->lCs, INSERT_VALUES, user->CS);
1660
1661 DMLocalToGlobalBegin(user->da, user->lNu_t, INSERT_VALUES, user->Nu_t);
1662 DMLocalToGlobalEnd(user->da, user->lNu_t, INSERT_VALUES, user->Nu_t);
1663
1664 ierr = WriteFieldData(user, "Nu_t", user->Nu_t, simCtx->step, "dat"); CHKERRQ(ierr);
1665 ierr = WriteFieldData(user, "cs", user->CS, simCtx->step, "dat"); CHKERRQ(ierr);
1666
1667
1668 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished writing LES fields.\n");
1669
1670 return 0;
1671}
Here is the call graph for this function:
Here is the caller graph for this function:

◆ WriteRANSFields()

PetscErrorCode WriteRANSFields ( UserCtx user)

Writes RANS-related fields.

This function writes RANS-related fields such as K_Omega to their respective binary files.

Parameters
[in]userPointer to the UserCtx structure containing simulation context.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Writes RANS-related fields.

Full API contract (arguments, ownership, side effects) is documented with the header declaration in include/io.h.

See also
WriteRANSFields()

Definition at line 1679 of file io.c.

1680{
1681 PetscErrorCode ierr;
1682
1683 SimCtx *simCtx = user->simCtx;
1684
1685 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to write RANS fields.\n");
1686
1687 ierr = WriteFieldData(user, "kfield", user->K_Omega, simCtx->step, "dat"); CHKERRQ(ierr);
1688
1689 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished writing RANS fields.\n");
1690
1691 return 0;
1692}
Here is the call graph for this function:
Here is the caller graph for this function:

◆ WriteSwarmField()

PetscErrorCode WriteSwarmField ( UserCtx user,
const char *  field_name,
PetscInt  ti,
const char *  ext 
)

Writes data from a specific field in a PETSc Swarm to a file.

This function retrieves the Swarm from the UserCtx (i.e., user->swarm) and creates a global PETSc vector from the specified Swarm field. It then calls the existing WriteFieldData() function to handle the actual I/O operation. After writing the data, the function destroys the temporary global vector to avoid memory leaks.

Parameters
[in]userPointer to the UserCtx structure containing simulation context and the PetscSwarm (as user->swarm).
[in]field_nameName of the Swarm field to be written (e.g., "my_field").
[in]tiTime index used to construct the output file name.
[in]extFile extension (e.g., "dat", "bin").
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.
Note
Compatible with PETSc 3.14.4.

Writes data from a specific field in a PETSc Swarm to a file.

Full API contract (arguments, ownership, side effects) is documented with the header declaration in include/io.h.

See also
WriteSwarmField()

Definition at line 1700 of file io.c.

1701{
1702 PetscErrorCode ierr;
1703 Vec fieldVec;
1704 DM swarm;
1705
1706 PetscFunctionBeginUser; /* PETSc macro indicating start of function */
1707
1708 /*
1709 * 1) Retrieve the PetscSwarm from the user context.
1710 * Ensure user->swarm is initialized and not NULL.
1711 */
1712 swarm = user->swarm;
1713
1714 /*
1715 * 2) Create a global vector from the specified swarm field.
1716 * This function is available in PETSc 3.14.4.
1717 * It provides a read/write "view" of the swarm field as a global Vec.
1718 */
1720 "Attempting to create global vector from field: %s\n",
1721 field_name);
1722 ierr = DMSwarmCreateGlobalVectorFromField(swarm, field_name, &fieldVec);CHKERRQ(ierr);
1723
1724 /*
1725 * 3) Use your existing WriteFieldData() to write the global vector to a file.
1726 * The field name, time index, and extension are passed along for naming.
1727 */
1729 "Calling WriteFieldData for field: %s\n",
1730 field_name);
1731 ierr = WriteFieldData(user, field_name, fieldVec, ti, ext);CHKERRQ(ierr);
1732
1733 /*
1734 * 4) Destroy the global vector once the data is successfully written.
1735 * This step is crucial for avoiding memory leaks.
1736 * DMSwarmDestroyGlobalVectorFromField() is also available in PETSc 3.14.4.
1737 */
1739 "Destroying the global vector for field: %s\n",
1740 field_name);
1741 ierr = DMSwarmDestroyGlobalVectorFromField(swarm, field_name, &fieldVec);CHKERRQ(ierr);
1742
1743 /* Log and return success. */
1745 "Successfully wrote swarm data for field: %s\n",
1746 field_name);
1747
1748 PetscFunctionReturn(0); /* PETSc macro indicating end of function */
1749}
Here is the call graph for this function:
Here is the caller graph for this function:

◆ WriteSwarmIntField()

PetscErrorCode WriteSwarmIntField ( UserCtx user,
const char *  field_name,
PetscInt  ti,
const char *  ext 
)

Writes integer data from a specific PETSc Swarm field to a file.

This function is designed for swarm fields that store integer data (e.g., DMSwarm_CellID), which cannot be converted to a standard PETSc Vec of PetscScalars. It accesses the raw data pointer for the field on each rank using DMSwarmGetField(), writes the local data to a rank-specific binary file, and then restores the field access.

Parameters
[in]userPointer to the UserCtx structure containing the PetscSwarm.
[in]field_nameName of the integer Swarm field to be written.
[in]tiTime index used to construct the output file name.
[in]extFile extension (e.g., "dat", "bin").
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Writes integer data from a specific PETSc Swarm field to a file.

Local to this translation unit.

Definition at line 1755 of file io.c.

1756{
1757 PetscErrorCode ierr;
1758 DM swarm = user->swarm;
1759 Vec temp_vec; // Temporary Vec to hold casted data
1760 PetscInt nlocal, nglobal,bs,i;
1761 void *field_array_void;
1762 PetscScalar *scalar_array; // Pointer to the temporary Vec's scalar data
1763
1764 PetscFunctionBeginUser;
1765
1766 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Casting '%s' to Vec for writing.\n", field_name);
1767
1768 // Get the swarm field properties
1769 ierr = DMSwarmGetLocalSize(swarm, &nlocal); CHKERRQ(ierr);
1770 ierr = DMSwarmGetSize(swarm, &nglobal); CHKERRQ(ierr);
1771 ierr = DMSwarmGetField(swarm, field_name, &bs, NULL, &field_array_void); CHKERRQ(ierr);
1772
1773 // Create Temporary parallel Vec wit the CORRECT layout
1774 ierr = VecCreate(PETSC_COMM_WORLD, &temp_vec); CHKERRQ(ierr);
1775 ierr = VecSetType(temp_vec, VECMPI); CHKERRQ(ierr);
1776 ierr = VecSetSizes(temp_vec, nlocal*bs, nglobal*bs); CHKERRQ(ierr);
1777 ierr = VecSetUp(temp_vec); CHKERRQ(ierr);
1778
1779 // Defining Vector field to mandatory field 'position'
1780 DMSwarmVectorDefineField(swarm,"position");
1781
1782 ierr = VecGetArray(temp_vec, &scalar_array); CHKERRQ(ierr);
1783
1784 if(strcasecmp(field_name,"DMSwarm_pid") == 0){
1785 PetscInt64 *int64_array = (PetscInt64 *)field_array_void;
1786 // Perform the cast from PetscInt64 to PetscScalar
1787 for (i = 0; i < nlocal*bs; i++) {
1788 scalar_array[i] = (PetscScalar)int64_array[i];
1789 }
1790 }else{
1791 PetscInt *int_array = (PetscInt *)field_array_void;
1792 //Perform the cast from PetscInt to PetscScalar
1793 for (i = 0; i < nlocal*bs; i++) {
1794 scalar_array[i] = (PetscScalar)int_array[i];
1795 }
1796 }
1797
1798 // Restore access to both arrays
1799 ierr = VecRestoreArray(temp_vec, &scalar_array); CHKERRQ(ierr);
1800 ierr = DMSwarmRestoreField(swarm, field_name, &bs, NULL, &field_array_void); CHKERRQ(ierr);
1801
1802 // Call your existing writer with the temporary, populated Vec
1803 ierr = WriteFieldData(user, field_name, temp_vec, ti, ext); CHKERRQ(ierr);
1804
1805 // Clean up
1806 ierr = VecDestroy(&temp_vec); CHKERRQ(ierr);
1807
1808 PetscFunctionReturn(0);
1809}
Here is the call graph for this function:
Here is the caller graph for this function:

◆ WriteAllSwarmFields()

PetscErrorCode WriteAllSwarmFields ( UserCtx user)

Writes a predefined set of PETSc Swarm fields to files.

This function iterates through a hardcoded list of common swarm fields (position, velocity, etc.) and calls the WriteSwarmField() helper function for each one. This provides a straightforward way to output essential particle data at a given simulation step.

This function will only execute if particles are enabled in the simulation (i.e., user->simCtx->np > 0 and user->swarm is not NULL).

Parameters
[in]userPointer to the UserCtx structure containing the simulation context and the PetscSwarm.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Writes a predefined set of PETSc Swarm fields to files.

Local to this translation unit.

Definition at line 1815 of file io.c.

1816{
1817 PetscErrorCode ierr;
1818 SimCtx *simCtx = user->simCtx;
1819
1820 PetscFunctionBeginUser;
1821
1822 // If no swarm is configured or there are no particles, do nothing and return.
1823 if (!user->swarm || simCtx->np <= 0) {
1824 PetscFunctionReturn(0);
1825 }
1826
1827 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to write swarm fields.\n");
1828
1829 // Ensure the current IO directory is set
1830 ierr = PetscSNPrintf(simCtx->_io_context_buffer, sizeof(simCtx->_io_context_buffer),
1831 "%s/%s", simCtx->output_dir, simCtx->particle_subdir);CHKERRQ(ierr);
1832 simCtx->current_io_directory = simCtx->_io_context_buffer;
1833
1834 // Write particle position field
1835 ierr = WriteSwarmField(user, "position", simCtx->step, "dat"); CHKERRQ(ierr);
1836
1837 // Write particle velocity field
1838 ierr = WriteSwarmField(user, "velocity", simCtx->step, "dat"); CHKERRQ(ierr);
1839
1840 // Write particle weight field
1841 ierr = WriteSwarmField(user, "weight", simCtx->step, "dat"); CHKERRQ(ierr);
1842
1843 // Write custom particle field "Psi"
1844 ierr = WriteSwarmField(user, "Psi", simCtx->step, "dat"); CHKERRQ(ierr);
1845
1846 // Integer fields require special handling
1847
1848 // Write the background mesh cell ID for each particle
1849 ierr = WriteSwarmIntField(user, "DMSwarm_CellID", simCtx->step, "dat"); CHKERRQ(ierr);
1850
1851 // Write the particle location status (e.g., inside or outside the domain)
1852 ierr = WriteSwarmIntField(user, "DMSwarm_location_status", simCtx->step, "dat"); CHKERRQ(ierr);
1853
1854 // Write the unique particle ID
1855 ierr = WriteSwarmIntField(user, "DMSwarm_pid", simCtx->step, "dat"); CHKERRQ(ierr);
1856
1857 simCtx->current_io_directory = NULL;
1858
1859 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished writing swarm fields.\n");
1860
1861 PetscFunctionReturn(0);
1862}
PetscErrorCode WriteSwarmField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
Implementation of WriteSwarmField().
Definition io.c:1700
PetscErrorCode WriteSwarmIntField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
Internal helper implementation: WriteSwarmIntField().
Definition io.c:1755
char particle_subdir[PETSC_MAX_PATH_LEN]
Definition variables.h:667
Here is the call graph for this function:
Here is the caller graph for this function:

◆ ReadDataFileToArray()

PetscInt ReadDataFileToArray ( const char *  filename,
double **  data_out,
PetscInt *  Nout,
MPI_Comm  comm 
)

Reads a simple ASCII data file containing one numeric value per line.

This helper performs rank-0 file I/O, broadcasts the parsed result to the rest of the communicator, and returns a replicated array on every rank.

Parameters
filenamePath to the input data file.
data_outOutput pointer to the allocated scalar array.
NoutOutput pointer storing the number of values read.
commMPI communicator used for the coordinated read/broadcast sequence.
Returns
Integer value produced by ReadDataFileToArray().

Reads a simple ASCII data file containing one numeric value per line.

Full API contract (arguments, ownership, side effects) is documented with the header declaration in include/io.h.

See also
ReadDataFileToArray()

Definition at line 2401 of file io.c.

2405{
2406 /* STEP 0: Prepare local variables & log function entry */
2407 PetscMPIInt rank, size;
2408 PetscErrorCode ierr;
2409 FILE *fp = NULL;
2410 PetscInt N = 0; /* number of lines/values read on rank 0 */
2411 double *array = NULL; /* pointer to local array on each rank */
2412 PetscInt fileExistsFlag = 0; /* 0 = doesn't exist, 1 = does exist */
2413
2415 "Start reading from file: %s\n",
2416 filename);
2417
2418 /* Basic error checking: data_out, Nout must be non-null. */
2419 if (!filename || !data_out || !Nout) {
2421 "Null pointer argument provided.\n");
2422 return 1;
2423 }
2424
2425 /* Determine rank/size for coordinating I/O. */
2426 MPI_Comm_rank(comm, &rank);
2427 MPI_Comm_size(comm, &size);
2428
2429 /* STEP 1: On rank 0, check if file can be opened. */
2430 if (!rank) {
2431 fp = fopen(filename, "r");
2432 if (fp) {
2433 fileExistsFlag = 1;
2434 fclose(fp);
2435 }
2436 }
2437
2438 /* STEP 2: Broadcast file existence to all ranks. */
2439 // In ReadDataFileToArray:
2440 ierr = MPI_Bcast(&fileExistsFlag, 1, MPI_INT, 0, comm); CHKERRQ(ierr);
2441
2442 if (!fileExistsFlag) {
2443 /* If file does not exist, log & return. */
2444 if (!rank) {
2446 "File '%s' not found.\n",
2447 filename);
2448 }
2449 return 2;
2450 }
2451
2452 /* STEP 3: Rank 0 re-opens and reads the file, counting lines, etc. */
2453 if (!rank) {
2454 fp = fopen(filename, "r");
2455 if (!fp) {
2457 "File '%s' could not be opened for reading.\n",
2458 filename);
2459 return 3;
2460 }
2461
2462 /* (3a) Count lines first. */
2463 {
2464 char line[256];
2465 while (fgets(line, sizeof(line), fp)) {
2466 N++;
2467 }
2468 }
2469
2471 "File '%s' has %d lines.\n",
2472 filename, N);
2473
2474 /* (3b) Allocate array on rank 0. */
2475 array = (double*)malloc(N * sizeof(double));
2476 if (!array) {
2477 fclose(fp);
2479 "malloc failed for array.\n");
2480 return 4;
2481 }
2482
2483 /* (3c) Rewind & read values into array. */
2484 rewind(fp);
2485 {
2486 PetscInt i = 0;
2487 char line[256];
2488 while (fgets(line, sizeof(line), fp)) {
2489 double val;
2490 if (sscanf(line, "%lf", &val) == 1) {
2491 array[i++] = val;
2492 }
2493 }
2494 }
2495 fclose(fp);
2496
2498 "Successfully read %d values from '%s'.\n",
2499 N, filename);
2500 }
2501
2502 /* STEP 4: Broadcast the integer N to all ranks. */
2503 ierr = MPI_Bcast(&N, 1, MPI_INT, 0, comm); CHKERRQ(ierr);
2504
2505 /* STEP 5: Each rank allocates an array to receive the broadcast if rank>0. */
2506 if (rank) {
2507 array = (double*)malloc(N * sizeof(double));
2508 if (!array) {
2510 "malloc failed on rank %d.\n",
2511 rank);
2512 return 5;
2513 }
2514 }
2515
2516 /* STEP 6: Broadcast the actual data from rank 0 to all. */
2517 ierr = MPI_Bcast(array, N, MPI_DOUBLE, 0, comm); CHKERRQ(ierr);
2518
2519 /* STEP 7: Assign outputs on all ranks. */
2520 *data_out = array;
2521 *Nout = N;
2522
2524 "Done. Provided array of length=%d to all ranks.\n",
2525 N);
2526 return 0; /* success */
2527}

◆ CreateVTKFileFromMetadata()

PetscInt CreateVTKFileFromMetadata ( const char *  filename,
const VTKMetaData meta,
MPI_Comm  comm 
)

Creates a VTK file from prepared metadata and field payloads.

This helper dispatches to the structured-grid or polydata writer based on the metadata contents and emits the assembled VTK file on the requested communicator.

Parameters
filenamePath to the output VTK file.
metaVTK metadata describing the output geometry and field payloads.
commMPI communicator used by the write operation.
Returns
Integer value produced by CreateVTKFileFromMetadata().

Creates a VTK file from prepared metadata and field payloads.

Full API contract (arguments, ownership, side effects) is documented with the header declaration in include/vtk_io.h.

See also
CreateVTKFileFromMetadata()

Definition at line 153 of file vtk_io.c.

154{
155 PetscMPIInt rank;
156 MPI_Comm_rank(comm, &rank);
157 PetscErrorCode ierr = 0;
158
159 if (!rank) {
160 char tmp_filename[PETSC_MAX_PATH_LEN];
161 FILE *fp = NULL;
162
163 ierr = PetscSNPrintf(tmp_filename, sizeof(tmp_filename), "%s.tmp", filename);CHKERRQ(ierr);
164 LOG_ALLOW(GLOBAL, LOG_INFO, "Rank 0 writing combined VTK file '%s'.\n", filename);
165 fp = fopen(tmp_filename, "wb");
166 if (!fp) {
167 LOG_ALLOW(GLOBAL, LOG_ERROR, "fopen failed for %s.\n", tmp_filename);
168 return PETSC_ERR_FILE_OPEN;
169 }
170
171 PetscInt boffset = 0;
172
173 ierr = WriteVTKFileHeader(fp, meta, &boffset);
174 if (ierr) {
175 fclose(fp);
176 remove(tmp_filename);
177 return ierr;
178 }
179
180 if (meta->coords) {
181 ierr = WriteVTKAppendedBlock(fp, meta->coords, 3 * meta->npoints, sizeof(PetscScalar));
182 if (ierr) {
183 fclose(fp);
184 remove(tmp_filename);
185 return ierr;
186 }
187 }
188
189 for (PetscInt i = 0; i < meta->num_point_data_fields; i++) {
190 const VTKFieldInfo *field = &meta->point_data_fields[i];
191 if (field->data) {
192 ierr = WriteVTKAppendedBlock(fp, field->data, field->num_components * meta->npoints, sizeof(PetscScalar));
193 if (ierr) {
194 fclose(fp);
195 remove(tmp_filename);
196 return ierr;
197 }
198 }
199 }
200 if (meta->fileType == VTK_POLYDATA) {
201 if (meta->connectivity) {
202 ierr = WriteVTKAppendedBlock(fp, meta->connectivity, meta->npoints, sizeof(PetscInt));
203 if (ierr) {
204 fclose(fp);
205 remove(tmp_filename);
206 return ierr;
207 }
208 }
209 if (meta->offsets) {
210 ierr = WriteVTKAppendedBlock(fp, meta->offsets, meta->npoints, sizeof(PetscInt));
211 if (ierr) {
212 fclose(fp);
213 remove(tmp_filename);
214 return ierr;
215 }
216 }
217 }
218 ierr = WriteVTKFileFooter(fp, meta);
219 if (ierr) {
220 fclose(fp);
221 remove(tmp_filename);
222 return ierr;
223 }
224 if (fclose(fp) != 0) {
225 remove(tmp_filename);
226 return PETSC_ERR_FILE_WRITE;
227 }
228 if (rename(tmp_filename, filename) != 0) {
229 remove(tmp_filename);
230 return PETSC_ERR_FILE_WRITE;
231 }
232 LOG_ALLOW(GLOBAL, LOG_INFO, "Rank 0 finished writing VTK file '%s'.\n", filename);
233 }
234 return 0;
235}
PetscInt npoints
Definition variables.h:604
PetscInt num_components
Definition variables.h:591
PetscInt num_point_data_fields
Definition variables.h:607
PetscInt * connectivity
Definition variables.h:608
PetscInt * offsets
Definition variables.h:609
VTKFileType fileType
Definition variables.h:602
PetscScalar * data
Definition variables.h:592
PetscScalar * coords
Definition variables.h:605
VTKFieldInfo point_data_fields[20]
Definition variables.h:606
@ VTK_POLYDATA
Definition variables.h:598
Stores all necessary information for a single data array in a VTK file.
Definition variables.h:589
static PetscErrorCode WriteVTKFileFooter(FILE *fp, const VTKMetaData *meta)
Internal helper implementation: WriteVTKFileFooter().
Definition vtk_io.c:136
static PetscErrorCode WriteVTKAppendedBlock(FILE *fp, const void *data, PetscInt num_elements, size_t element_size)
Internal helper implementation: WriteVTKAppendedBlock().
Definition vtk_io.c:21
static PetscErrorCode WriteVTKFileHeader(FILE *fp, const VTKMetaData *meta, PetscInt *boffset)
Internal helper implementation: WriteVTKFileHeader().
Definition vtk_io.c:122
Here is the call graph for this function:
Here is the caller graph for this function:

◆ VecToArrayOnRank0()

PetscErrorCode VecToArrayOnRank0 ( Vec  inVec,
PetscInt *  N,
double **  arrayOut 
)

Gathers the contents of a distributed PETSc Vec into a single array on rank 0.

Parameters
[in]inVecThe input (possibly distributed) Vec.
[out]NThe global size of the vector.
[out]arrayOutOn rank 0, points to the newly allocated array holding all data. On other ranks, it is set to NULL.
Returns
PetscErrorCode Return 0 on success, nonzero on failure.

Gathers the contents of a distributed PETSc Vec into a single array on rank 0.

Local to this translation unit.

Definition at line 1868 of file io.c.

1869{
1870 PetscErrorCode ierr;
1871 MPI_Comm comm;
1872 PetscMPIInt rank;
1873 PetscInt globalSize;
1874 DM dm = NULL;
1875 const char *dmtype = NULL;
1876
1877 /* For DMDA path */
1878 Vec nat = NULL, seqNat = NULL;
1879 VecScatter scatNat = NULL;
1880 const PetscScalar *nar = NULL;
1881 PetscScalar *buf = NULL;
1882
1883 /* For generic (no DM) path */
1884 Vec seq = NULL;
1885 VecScatter scat = NULL;
1886 const PetscScalar *sar = NULL;
1887
1888 PetscFunctionBeginUser;
1889
1890 ierr = PetscObjectGetComm((PetscObject)inVec, &comm); CHKERRQ(ierr);
1891 ierr = MPI_Comm_rank(comm, &rank); CHKERRQ(ierr);
1892 ierr = VecGetSize(inVec, &globalSize); CHKERRQ(ierr);
1893 *N = globalSize;
1894 *arrayOut = NULL;
1895
1896 ierr = VecGetDM(inVec, &dm); CHKERRQ(ierr);
1897 if (dm) { ierr = DMGetType(dm, &dmtype); CHKERRQ(ierr); }
1898
1899 if (dmtype && !strcmp(dmtype, DMDA)) {
1900 /* --- DMDA path: go to NATURAL ordering, then gather to rank 0 --- */
1901 ierr = DMDACreateNaturalVector(dm, &nat); CHKERRQ(ierr);
1902 ierr = DMDAGlobalToNaturalBegin(dm, inVec, INSERT_VALUES, nat); CHKERRQ(ierr);
1903 ierr = DMDAGlobalToNaturalEnd (dm, inVec, INSERT_VALUES, nat); CHKERRQ(ierr);
1904
1905 ierr = VecScatterCreateToZero(nat, &scatNat, &seqNat); CHKERRQ(ierr);
1906 ierr = VecScatterBegin(scatNat, nat, seqNat, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
1907 ierr = VecScatterEnd (scatNat, nat, seqNat, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
1908
1909 if (rank == 0) {
1910 PetscInt nseq;
1911 ierr = VecGetLocalSize(seqNat, &nseq); CHKERRQ(ierr);
1912 ierr = VecGetArrayRead(seqNat, &nar); CHKERRQ(ierr);
1913
1914 ierr = PetscMalloc1(nseq, &buf); CHKERRQ(ierr);
1915 ierr = PetscMemcpy(buf, nar, (size_t)nseq * sizeof(PetscScalar)); CHKERRQ(ierr);
1916
1917 ierr = VecRestoreArrayRead(seqNat, &nar); CHKERRQ(ierr);
1918 *arrayOut = (double*)buf; /* hand back as double* to match the helper signature */
1919 }
1920
1921 ierr = VecScatterDestroy(&scatNat); CHKERRQ(ierr);
1922 ierr = VecDestroy(&seqNat); CHKERRQ(ierr);
1923 ierr = VecDestroy(&nat); CHKERRQ(ierr);
1924 } else {
1925 /* --- No DM attached: plain gather in Vec’s global (parallel) layout order --- */
1926 ierr = VecScatterCreateToZero(inVec, &scat, &seq); CHKERRQ(ierr);
1927 ierr = VecScatterBegin(scat, inVec, seq, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
1928 ierr = VecScatterEnd (scat, inVec, seq, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
1929
1930 if (rank == 0) {
1931 PetscInt nseq;
1932 ierr = VecGetLocalSize(seq, &nseq); CHKERRQ(ierr);
1933 ierr = VecGetArrayRead(seq, &sar); CHKERRQ(ierr);
1934
1935 ierr = PetscMalloc1(nseq, &buf); CHKERRQ(ierr);
1936 ierr = PetscMemcpy(buf, sar, (size_t)nseq * sizeof(PetscScalar)); CHKERRQ(ierr);
1937
1938 ierr = VecRestoreArrayRead(seq, &sar); CHKERRQ(ierr);
1939 *arrayOut = (double*)buf;
1940 }
1941
1942 ierr = VecScatterDestroy(&scat); CHKERRQ(ierr);
1943 ierr = VecDestroy(&seq); CHKERRQ(ierr);
1944 }
1945
1946 PetscFunctionReturn(0);
1947}
Here is the caller graph for this function:

◆ SwarmFieldToArrayOnRank0()

PetscErrorCode SwarmFieldToArrayOnRank0 ( DM  swarm,
const char *  field_name,
PetscInt *  n_total_particles,
PetscInt *  n_components,
void **  gathered_array 
)

Gathers any DMSwarm field from all ranks to a single, contiguous array on rank 0.

This is a generic, type-aware version of SwarmFieldToArrayOnRank0. It is a COLLECTIVE operation.

Parameters
[in]swarmThe DMSwarm to gather from.
[in]field_nameThe name of the field to gather.
[out]n_total_particles(Output on rank 0) Total number of particles in the global swarm.
[out]n_components(Output on rank 0) Number of components for the field.
[out]gathered_array(Output on rank 0) A newly allocated array containing the full, gathered data. The caller is responsible for freeing this memory and for casting it to the correct type.
Returns
PetscErrorCode

Gathers any DMSwarm field from all ranks to a single, contiguous array on rank 0.

Local to this translation unit.

Definition at line 1953 of file io.c.

1954{
1955 PetscErrorCode ierr;
1956 PetscMPIInt rank, size;
1957 PetscInt nlocal, nglobal, bs;
1958 void *local_array_void;
1959 size_t element_size = 0;
1960
1961 PetscFunctionBeginUser;
1962
1963 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
1964 ierr = MPI_Comm_size(PETSC_COMM_WORLD, &size); CHKERRQ(ierr);
1965
1966 // All ranks get swarm properties to determine send/receive counts
1967 ierr = DMSwarmGetLocalSize(swarm, &nlocal); CHKERRQ(ierr);
1968 ierr = DMSwarmGetSize(swarm, &nglobal); CHKERRQ(ierr);
1969 ierr = DMSwarmGetField(swarm, field_name, &bs, NULL, &local_array_void); CHKERRQ(ierr);
1970
1971 // Determine the size of one element of the field's data type
1972 if (strcasecmp(field_name, "DMSwarm_pid") == 0) {
1973 element_size = sizeof(PetscInt64);
1974 } else if (strcasecmp(field_name, "DMSwarm_CellID") == 0 || strcasecmp(field_name, "DMSwarm_location_status") == 0) {
1975 element_size = sizeof(PetscInt);
1976 } else {
1977 element_size = sizeof(PetscScalar);
1978 }
1979
1980 if (rank == 0) {
1981 *n_total_particles = nglobal;
1982 *n_components = bs;
1983 *gathered_array = NULL; // Initialize output
1984 }
1985
1986 if (size == 1) { // Serial case is a simple copy
1987 if (rank == 0) {
1988 ierr = PetscMalloc(nglobal * bs * element_size, gathered_array); CHKERRQ(ierr);
1989 ierr = PetscMemcpy(*gathered_array, local_array_void, nglobal * bs * element_size); CHKERRQ(ierr);
1990 }
1991 } else { // Parallel case: use MPI_Gatherv
1992 PetscInt *recvcounts = NULL, *displs = NULL;
1993 if (rank == 0) {
1994 ierr = PetscMalloc1(size, &recvcounts); CHKERRQ(ierr);
1995 ierr = PetscMalloc1(size, &displs); CHKERRQ(ierr);
1996 }
1997 PetscInt sendcount = nlocal * bs;
1998
1999 // Gather the number of elements (not bytes) from each rank
2000 ierr = MPI_Gather(&sendcount, 1, MPIU_INT, recvcounts, 1, MPIU_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
2001
2002 if (rank == 0) {
2003 displs[0] = 0;
2004 // Convert counts and calculate displacements in terms of BYTES
2005 for (PetscMPIInt i = 0; i < size; i++) recvcounts[i] *= element_size;
2006 for (PetscMPIInt i = 1; i < size; i++) displs[i] = displs[i-1] + recvcounts[i-1];
2007
2008 ierr = PetscMalloc(nglobal * bs * element_size, gathered_array); CHKERRQ(ierr);
2009 }
2010
2011 // Use Gatherv with MPI_BYTE to handle any data type generically
2012 ierr = MPI_Gatherv(local_array_void, nlocal * bs * element_size, MPI_BYTE,
2013 *gathered_array, recvcounts, displs, MPI_BYTE,
2014 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
2015
2016 if (rank == 0) {
2017 ierr = PetscFree(recvcounts); CHKERRQ(ierr);
2018 ierr = PetscFree(displs); CHKERRQ(ierr);
2019 }
2020 }
2021
2022 ierr = DMSwarmRestoreField(swarm, field_name, &bs, NULL, &local_array_void); CHKERRQ(ierr);
2023
2024 PetscFunctionReturn(0);
2025}
Here is the caller graph for this function:

◆ ReadSwarmField()

PetscErrorCode ReadSwarmField ( UserCtx user,
const char *  field_name,
PetscInt  ti,
const char *  ext 
)

Reads data from a file into a specified field of a PETSc DMSwarm.

This function is the counterpart to WriteSwarmField(). It creates a global PETSc vector that references the specified DMSwarm field, uses ReadFieldData() to read the data from a file, and then destroys the global vector reference.

Parameters
[in]userPointer to the UserCtx structure (containing user->swarm).
[in]field_nameName of the DMSwarm field to read into (must be previously declared/allocated).
[in]tiTime index used to construct the input file name.
[in]extFile extension (e.g., "dat" or "bin").
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.
Note
Compatible with PETSc 3.14.x.

Reads data from a file into a specified field of a PETSc DMSwarm.

Local to this translation unit.

Definition at line 1291 of file io.c.

1292{
1293 PetscErrorCode ierr;
1294 DM swarm;
1295 Vec fieldVec;
1296
1297 PetscFunctionBegin;
1298
1299 swarm = user->swarm;
1300
1301 LOG_ALLOW(GLOBAL,LOG_DEBUG," ReadSwarmField Begins \n");
1302
1303 /* 2) Create a global vector that references the specified Swarm field. */
1304 ierr = DMSwarmCreateGlobalVectorFromField(swarm, field_name, &fieldVec);CHKERRQ(ierr);
1305
1306 LOG_ALLOW(GLOBAL,LOG_DEBUG," Vector created from Field \n");
1307
1308 /* 3) Use the ReadFieldData() function to read data into fieldVec. */
1309 ierr = ReadFieldData(user, field_name, fieldVec, ti, ext);CHKERRQ(ierr);
1310
1311 /* 4) Destroy the global vector reference. */
1312 ierr = DMSwarmDestroyGlobalVectorFromField(swarm, field_name, &fieldVec);CHKERRQ(ierr);
1313
1314 PetscFunctionReturn(0);
1315}
Here is the call graph for this function:
Here is the caller graph for this function:

◆ ReadSwarmIntField()

PetscErrorCode ReadSwarmIntField ( UserCtx user,
const char *  field_name,
PetscInt  ti,
const char *  ext 
)

Reads integer swarm data by using ReadFieldData and casting the result.

This function is the counterpart to WriteSwarmIntField. It reads a file containing floating-point data (that was originally integer) into a temporary Vec and then casts it back to the integer swarm field. It works by:

  1. Creating a temporary parallel Vec.
  2. Calling the standard ReadFieldData() to populate this Vec.
  3. Accessing the local data of both the Vec and the swarm field.
  4. Populating the swarm's integer field by casting each PetscScalar back to a PetscInt.
  5. Destroying the temporary Vec.
Parameters
[in]userPointer to the UserCtx structure.
[in]field_nameName of the integer Swarm field to be read.
[in]tiTime index for the input file.
[in]extFile extension.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Reads integer swarm data by using ReadFieldData and casting the result.

Local to this translation unit.

Definition at line 1321 of file io.c.

1322{
1323 PetscErrorCode ierr;
1324 DM swarm = user->swarm;
1325 Vec temp_vec;
1326 PetscInt nlocal, nglobal, bs, i;
1327 const PetscScalar *scalar_array; // Read-only pointer from the temp Vec
1328 void *field_array_void;
1329
1330
1331 PetscFunctionBeginUser;
1332
1333 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Reading '%s' via temporary Vec.\n", field_name);
1334
1335 // Get the properties of the swarm field to determine the expected layout
1336 ierr = DMSwarmGetLocalSize(swarm, &nlocal); CHKERRQ(ierr);
1337 ierr = DMSwarmGetSize(swarm, &nglobal); CHKERRQ(ierr);
1338 // We get the block size but not the data pointer yet
1339 ierr = DMSwarmGetField(swarm, field_name, &bs, NULL, NULL); CHKERRQ(ierr);
1340 ierr = DMSwarmRestoreField(swarm, field_name, &bs, NULL, NULL); CHKERRQ(ierr);
1341
1342 // Create a temporary Vec with the CORRECT layout to receive the data
1343 ierr = VecCreate(PETSC_COMM_WORLD, &temp_vec); CHKERRQ(ierr);
1344 ierr = VecSetType(temp_vec, VECMPI); CHKERRQ(ierr);
1345 ierr = VecSetSizes(temp_vec, nlocal * bs, nglobal * bs); CHKERRQ(ierr);
1346 ierr = VecSetBlockSize(temp_vec, bs); CHKERRQ(ierr);
1347 ierr = VecSetUp(temp_vec); CHKERRQ(ierr);
1348
1349 // Call your existing reader to populate the temporary Vec
1350 ierr = ReadFieldData(user, field_name, temp_vec, ti, ext); CHKERRQ(ierr);
1351
1352 // Get local pointers
1353 ierr = VecGetArrayRead(temp_vec, &scalar_array); CHKERRQ(ierr);
1354 ierr = DMSwarmGetField(swarm, field_name, NULL, NULL, &field_array_void); CHKERRQ(ierr);
1355
1356 // Perform the cast back, using the correct loop size (nlocal * bs)
1357 if (strcmp(field_name, "DMSwarm_pid") == 0) {
1358 PetscInt64 *int64_array = (PetscInt64 *)field_array_void;
1359 for (i = 0; i < nlocal * bs; i++) {
1360 int64_array[i] = (PetscInt64)scalar_array[i];
1361 }
1362 } else {
1363 PetscInt *int_array = (PetscInt *)field_array_void;
1364 for (i = 0; i < nlocal * bs; i++) {
1365 int_array[i] = (PetscInt)scalar_array[i];
1366 }
1367 }
1368
1369 // Restore access
1370 ierr = DMSwarmRestoreField(swarm, field_name, NULL, NULL, &field_array_void); CHKERRQ(ierr);
1371 ierr = VecRestoreArrayRead(temp_vec, &scalar_array); CHKERRQ(ierr);
1372
1373 // 6. Clean up
1374 ierr = VecDestroy(&temp_vec); CHKERRQ(ierr);
1375
1376 PetscFunctionReturn(0);
1377}
Here is the call graph for this function:
Here is the caller graph for this function:

◆ ReadAllSwarmFields()

PetscErrorCode ReadAllSwarmFields ( UserCtx user,
PetscInt  ti 
)

Reads multiple fields (positions, velocity, CellID, and weight) into a DMSwarm.

This function is analogous to ReadSimulationFields() but targets a DMSwarm. Each Swarm field is read from a separate file using ReadSwarmField().

Parameters
[in,out]userPointer to the UserCtx structure containing the DMSwarm (user->swarm).
[in]tiTime index for constructing the file name.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Reads multiple fields (positions, velocity, CellID, and weight) into a DMSwarm.

Local to this translation unit.

Definition at line 1383 of file io.c.

1384{
1385 PetscErrorCode ierr;
1386 PetscInt nGlobal;
1387 SimCtx *simCtx = user->simCtx;
1388 const char *source_path = NULL;
1389 const char *particle_ext = "dat";
1390
1391 PetscFunctionBeginUser;
1392 ierr = DMSwarmGetSize(user->swarm, &nGlobal); CHKERRQ(ierr);
1393 LOG_ALLOW(GLOBAL, LOG_INFO, "Reading DMSwarm fields for timestep %d (swarm size is %d).\n", ti, nGlobal);
1394
1395 if (nGlobal == 0) {
1396 LOG_ALLOW(GLOBAL, LOG_INFO, "Swarm is empty for timestep %d. Nothing to read.\n", ti);
1397 PetscFunctionReturn(0);
1398 }
1399
1400 // First, determine the top-level source directory based on the execution mode.
1401 if (simCtx->exec_mode == EXEC_MODE_SOLVER) {
1402 source_path = simCtx->restart_dir;
1403 } else if (simCtx->exec_mode == EXEC_MODE_POSTPROCESSOR) {
1404 source_path = simCtx->pps->source_dir;
1405 if (simCtx->pps->particleExt[0] != '\0') {
1406 particle_ext = simCtx->pps->particleExt;
1407 }
1408 } else {
1409 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONGSTATE, "Invalid execution mode for reading simulation fields.");
1410 }
1411
1412 // Set the current I/O directory context
1413 ierr = PetscSNPrintf(simCtx->_io_context_buffer, sizeof(simCtx->_io_context_buffer),
1414 "%s/%s", source_path, simCtx->particle_subdir); CHKERRQ(ierr);
1415
1416 simCtx->current_io_directory = simCtx->_io_context_buffer;
1417
1418 /* 1) Read positions (REQUIRED) */
1419 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Reading mandatory position field...\n");
1420 ierr = ReadSwarmField(user, "position", ti, particle_ext);
1421 if (ierr) {
1422 SETERRQ(PETSC_COMM_WORLD, ierr, "Failed to read MANDATORY 'position' field for step %d. Cannot continue.", ti);
1423 }
1424 LOG_ALLOW(GLOBAL, LOG_INFO, "Successfully read mandatory position field for step %d.\n", ti);
1425
1426 /* 2) Read all OPTIONAL fields using the helper function. */
1427 /* The helper will print a warning and continue if a file is not found. */
1428 ierr = ReadOptionalSwarmField(user, "velocity", "Velocity", ti, particle_ext); CHKERRQ(ierr);
1429 ierr = ReadOptionalSwarmField(user, "DMSwarm_pid", "Particle ID", ti, particle_ext); CHKERRQ(ierr);
1430 ierr = ReadOptionalSwarmField(user, "DMSwarm_CellID", "Cell ID", ti, particle_ext); CHKERRQ(ierr);
1431 ierr = ReadOptionalSwarmField(user, "weight", "Particle Weight", ti, particle_ext); CHKERRQ(ierr);
1432 ierr = ReadOptionalSwarmField(user, "Psi", "Scalar Psi", ti, particle_ext); CHKERRQ(ierr);
1433 ierr = ReadOptionalSwarmField(user, "DMSwarm_location_status", "Migration Status", ti, particle_ext); CHKERRQ(ierr);
1434
1435 simCtx->current_io_directory = NULL; // Clear the I/O context after reading
1436
1437 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading DMSwarm fields for timestep %d.\n", ti);
1438 PetscFunctionReturn(0);
1439}
PetscErrorCode ReadSwarmField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
Internal helper implementation: ReadSwarmField().
Definition io.c:1291
static PetscErrorCode ReadOptionalSwarmField(UserCtx *user, const char *field_name, const char *field_label, PetscInt ti, const char *ext)
Internal helper implementation: ReadOptionalSwarmField().
Definition io.c:856
char particleExt[8]
Definition variables.h:579
Here is the call graph for this function:
Here is the caller graph for this function:

◆ ReadPositionsFromFile()

PetscErrorCode ReadPositionsFromFile ( PetscInt  timeIndex,
UserCtx user,
double **  coordsArray,
PetscInt *  Ncoords 
)

Reads coordinate data (for particles) from file into a PETSc Vec, then gathers it to rank 0.

This function uses ReadFieldData to fill a PETSc Vec with coordinate data, then leverages VecToArrayOnRank0 to gather that data into a contiguous array (valid on rank 0 only).

Parameters
[in]timeIndexThe time index used to construct file names.
[in]userPointer to the user context.
[out]coordsArrayOn rank 0, will point to a newly allocated array holding the coordinates.
[out]NcoordsOn rank 0, the length of coordsArray. On other ranks, 0.
Returns
PetscErrorCode Returns 0 on success, or non-zero on failures.

Reads coordinate data (for particles) from file into a PETSc Vec, then gathers it to rank 0.

Local to this translation unit.

Definition at line 2533 of file io.c.

2537{
2538 PetscFunctionBeginUser;
2539
2540 PetscErrorCode ierr;
2541 Vec coordsVec;
2542
2543 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Creating coords Vec.\n");
2544 ierr = VecCreate(PETSC_COMM_WORLD, &coordsVec);CHKERRQ(ierr);
2545 ierr = VecSetFromOptions(coordsVec);CHKERRQ(ierr);
2546
2547 // For example: "position" is the name of the coordinate data
2548 ierr = ReadFieldData(user, "position", coordsVec, timeIndex, "dat");
2549 if (ierr) {
2551 "Error reading position data (ti=%d).\n",
2552 timeIndex);
2553 PetscFunctionReturn(ierr);
2554 }
2555
2556 LOG_ALLOW(GLOBAL, LOG_DEBUG, "ReadPositions - Gathering coords Vec to rank 0.\n");
2557 ierr = VecToArrayOnRank0(coordsVec, Ncoords, coordsArray);CHKERRQ(ierr);
2558
2559 ierr = VecDestroy(&coordsVec);CHKERRQ(ierr);
2560
2562 "Successfully gathered coordinates. Ncoords=%d.\n", *Ncoords);
2563 PetscFunctionReturn(0);
2564}
PetscErrorCode VecToArrayOnRank0(Vec inVec, PetscInt *N, double **arrayOut)
Internal helper implementation: VecToArrayOnRank0().
Definition io.c:1868
Here is the call graph for this function:

◆ ReadFieldDataToRank0()

PetscErrorCode ReadFieldDataToRank0 ( PetscInt  timeIndex,
const char *  fieldName,
UserCtx user,
double **  scalarArray,
PetscInt *  Nscalars 
)

Reads a named field from file into a PETSc Vec, then gathers it to rank 0.

This function wraps ReadFieldData and VecToArrayOnRank0 into a single step. The gathered data is stored in scalarArray on rank 0, with its length in Nscalars.

Parameters
[in]timeIndexThe time index used to construct file names.
[in]fieldNameName of the field to be read (e.g., "velocity").
[in]userPointer to the user context.
[out]scalarArrayOn rank 0, a newly allocated array holding the field data.
[out]NscalarsOn rank 0, length of scalarArray. On other ranks, 0.
Returns
PetscErrorCode Returns 0 on success, or non-zero on failures.

Reads a named field from file into a PETSc Vec, then gathers it to rank 0.

Local to this translation unit.

Definition at line 2571 of file io.c.

2576{
2577 PetscFunctionBeginUser;
2578
2579 PetscErrorCode ierr;
2580 Vec fieldVec;
2581
2582 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Creating field Vec.\n");
2583 ierr = VecCreate(PETSC_COMM_WORLD, &fieldVec);CHKERRQ(ierr);
2584 ierr = VecSetFromOptions(fieldVec);CHKERRQ(ierr);
2585
2586 ierr = ReadFieldData(user, fieldName, fieldVec, timeIndex, "dat");
2587 if (ierr) {
2589 "Error reading field '%s' (ti=%d).\n",
2590 fieldName, timeIndex);
2591 PetscFunctionReturn(ierr);
2592 }
2593
2594 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Gathering field Vec to rank 0.\n");
2595 ierr = VecToArrayOnRank0(fieldVec, Nscalars, scalarArray);CHKERRQ(ierr);
2596
2597 ierr = VecDestroy(&fieldVec);CHKERRQ(ierr);
2598
2600 "Successfully gathered field '%s'. Nscalars=%d.\n",
2601 fieldName, *Nscalars);
2602 PetscFunctionReturn(0);
2603}
Here is the call graph for this function:

◆ DisplayBanner()

PetscErrorCode DisplayBanner ( SimCtx simCtx)

Displays a structured banner summarizing the simulation configuration.

This function prints key simulation parameters to standard output. It is intended to be called ONLY by MPI rank 0. It retrieves global domain bounds and block metadata from simCtx.

Parameters
[in]simCtxPointer to the master simulation context.
Returns
PetscErrorCode Returns 0 on success.

Displays a structured banner summarizing the simulation configuration.

Local to this translation unit.

Definition at line 2031 of file io.c.

2032{
2033 PetscErrorCode ierr;
2034 PetscMPIInt rank;
2035 Cmpnts global_min_coords, global_max_coords;
2036 PetscReal StartTime;
2037 PetscInt StartStep,StepsToRun,total_num_particles;
2038 PetscMPIInt num_mpi_procs;
2039
2040 // SimCtx *simCtx = user->simCtx;
2041 UserCtx *user = simCtx->usermg.mgctx[simCtx->usermg.mglevels - 1].user;
2042 num_mpi_procs = simCtx->size;
2043 StartTime = simCtx->StartTime;
2044 StartStep = simCtx->StartStep;
2045 StepsToRun = simCtx->StepsToRun;
2046 total_num_particles = simCtx->np;
2047 BoundingBox *bboxlist_on_rank0 = simCtx->bboxlist;
2048
2049
2050 PetscFunctionBeginUser;
2051
2052 if (!user) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "DisplayBanner - UserCtx pointer is NULL.");
2053 global_min_coords = user->bbox.min_coords;
2054 global_max_coords = user->bbox.max_coords;
2055 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
2056
2057 if (rank == 0) {
2058 // If global_domain_bbox is not pre-populated in UserCtx, compute it here from bboxlist_on_rank0
2059 // This assumes bboxlist_on_rank0 is valid and contains all local bounding boxes on rank 0.
2060 if (bboxlist_on_rank0 && num_mpi_procs > 0) {
2061 global_min_coords = bboxlist_on_rank0[0].min_coords;
2062 global_max_coords = bboxlist_on_rank0[0].max_coords;
2063 for (PetscMPIInt p = 1; p < num_mpi_procs; ++p) {
2064 global_min_coords.x = PetscMin(global_min_coords.x, bboxlist_on_rank0[p].min_coords.x);
2065 global_min_coords.y = PetscMin(global_min_coords.y, bboxlist_on_rank0[p].min_coords.y);
2066 global_min_coords.z = PetscMin(global_min_coords.z, bboxlist_on_rank0[p].min_coords.z);
2067 global_max_coords.x = PetscMax(global_max_coords.x, bboxlist_on_rank0[p].max_coords.x);
2068 global_max_coords.y = PetscMax(global_max_coords.y, bboxlist_on_rank0[p].max_coords.y);
2069 global_max_coords.z = PetscMax(global_max_coords.z, bboxlist_on_rank0[p].max_coords.z);
2070 }
2071 // Optionally store this in user->global_domain_bbox if it's useful elsewhere
2072 // user->global_domain_bbox.min_coords = global_min_coords;
2073 // user->global_domain_bbox.max_coords = global_max_coords;
2074 } else {
2075 // Fallback or warning if bboxlist is not available for global calculation
2076 LOG_ALLOW(LOCAL, LOG_WARNING, "(Rank 0) - bboxlist not provided or num_mpi_procs <=0; using user->bbox for domain bounds.\n");
2077 // global_min_coords = user->bbox.min_coords; // Use local bbox of rank 0 as fallback
2078 // global_max_coords = user->bbox.max_coords;
2079 }
2080
2081 ierr = PetscPrintf(PETSC_COMM_SELF, "\n"); CHKERRQ(ierr);
2082 ierr = PetscPrintf(PETSC_COMM_SELF, "=============================================================\n"); CHKERRQ(ierr);
2083 ierr = PetscPrintf(PETSC_COMM_SELF, " CASE SUMMARY \n"); CHKERRQ(ierr);
2084 ierr = PetscPrintf(PETSC_COMM_SELF, "=============================================================\n"); CHKERRQ(ierr);
2085 ierr = PetscPrintf(PETSC_COMM_SELF, " Grid Points : %d X %d X %d\n", user->IM, user->JM, user->KM); CHKERRQ(ierr);
2086 ierr = PetscPrintf(PETSC_COMM_SELF, " Cells : %d X %d X %d\n", user->IM - 1, user->JM - 1, user->KM - 1); CHKERRQ(ierr);
2087 ierr = PetscPrintf(PETSC_COMM_SELF, " Global Domain Bounds (X) : %.6f to %.6f\n", (double)global_min_coords.x, (double)global_max_coords.x); CHKERRQ(ierr);
2088 ierr = PetscPrintf(PETSC_COMM_SELF, " Global Domain Bounds (Y) : %.6f to %.6f\n", (double)global_min_coords.y, (double)global_max_coords.y); CHKERRQ(ierr);
2089 ierr = PetscPrintf(PETSC_COMM_SELF, " Global Domain Bounds (Z) : %.6f to %.6f\n", (double)global_min_coords.z, (double)global_max_coords.z); CHKERRQ(ierr);
2090 if(strcmp(simCtx->eulerianSource,"load")==0 || strcmp(simCtx->eulerianSource,"solve")==0){
2091 ierr = PetscPrintf(PETSC_COMM_SELF, "-------------------- Boundary Conditions --------------------\n"); CHKERRQ(ierr);
2092 const int face_name_width = 17; // Adjusted for longer names (Zeta,Eta,Xi)
2093 for (PetscInt i_face = 0; i_face < 6; ++i_face) {
2094 BCFace current_face = (BCFace)i_face;
2095 // The BCFaceToString will now return the Xi, Eta, Zeta versions
2096 const char* face_str = BCFaceToString(current_face);
2097 const char* bc_type_str = BCTypeToString(user->boundary_faces[current_face].mathematical_type);
2098 const char* bc_handler_type_str = BCHandlerTypeToString(user->boundary_faces[current_face].handler_type);
2099 if(user->boundary_faces[current_face].mathematical_type == INLET){
2101 Cmpnts inlet_velocity = {0.0,0.0,0.0};
2102 PetscBool found;
2103 ierr = GetBCParamReal(user->boundary_faces[current_face].params,"vx",&inlet_velocity.x,&found); CHKERRQ(ierr);
2104 ierr = GetBCParamReal(user->boundary_faces[current_face].params,"vy",&inlet_velocity.y,&found); CHKERRQ(ierr);
2105 ierr = GetBCParamReal(user->boundary_faces[current_face].params,"vz",&inlet_velocity.z,&found); CHKERRQ(ierr);
2106 ierr = PetscPrintf(PETSC_COMM_SELF, " Face %-*s : %s - %s - [%.4f,%.4f,%.4f]\n",
2107 face_name_width, face_str, bc_type_str, bc_handler_type_str,inlet_velocity.x,inlet_velocity.y,inlet_velocity.z); CHKERRQ(ierr);
2108 } else if(user->boundary_faces[current_face].handler_type == BC_HANDLER_INLET_PARABOLIC){
2109 PetscReal v_max = 0.0;
2110 PetscBool found;
2111 ierr = GetBCParamReal(user->boundary_faces[current_face].params,"v_max",&v_max,&found); CHKERRQ(ierr);
2112 ierr = PetscPrintf(PETSC_COMM_SELF, " Face %-*s : %s - %s - v_max=%.4f\n",
2113 face_name_width, face_str, bc_type_str, bc_handler_type_str, v_max); CHKERRQ(ierr);
2114 }
2116 PetscReal flux;
2117 PetscBool trimflag,foundflux,foundtrimflag;
2118 ierr = GetBCParamReal(user->boundary_faces[current_face].params,"target_flux",&flux,&foundflux); CHKERRQ(ierr);
2119 ierr = GetBCParamBool(user->boundary_faces[current_face].params,"apply_trim",&trimflag,&foundtrimflag); CHKERRQ(ierr);
2120 ierr = PetscPrintf(PETSC_COMM_SELF, " Face %-*s : %s - %s - [%.4f] - %s\n",
2121 face_name_width, face_str, bc_type_str, bc_handler_type_str,flux,trimflag?"Trim Ucont":"No Trim"); CHKERRQ(ierr);
2122 } else{
2123 ierr = PetscPrintf(PETSC_COMM_SELF, " Face %-*s : %s - %s\n",
2124 face_name_width, face_str, bc_type_str,bc_handler_type_str); CHKERRQ(ierr);
2125 }
2126 }
2127 }
2128 ierr = PetscPrintf(PETSC_COMM_SELF, "-------------------------------------------------------------\n"); CHKERRQ(ierr);
2129 ierr = PetscPrintf(PETSC_COMM_SELF, " Run Mode : %s\n", simCtx->OnlySetup ? "SETUP ONLY" : "Full Simulation"); CHKERRQ(ierr);
2130 ierr = PetscPrintf(PETSC_COMM_SELF, " Start Time : %.4f\n", (double)StartTime); CHKERRQ(ierr);
2131 ierr = PetscPrintf(PETSC_COMM_SELF, " Timestep Size : %.4f\n", (double)simCtx->dt); CHKERRQ(ierr);
2132 ierr = PetscPrintf(PETSC_COMM_SELF, " Starting Step : %d\n", StartStep); CHKERRQ(ierr);
2133 ierr = PetscPrintf(PETSC_COMM_SELF, " Total Steps to Run : %d\n", StepsToRun); CHKERRQ(ierr);
2134 ierr = PetscPrintf(PETSC_COMM_SELF, " Ending Step : %d\n", StartStep + StepsToRun); CHKERRQ(ierr);
2135 if (simCtx->tiout > 0) {
2136 ierr = PetscPrintf(PETSC_COMM_SELF, " Field/Restart Cadence : every %d step(s)\n", simCtx->tiout); CHKERRQ(ierr);
2137 } else {
2138 ierr = PetscPrintf(PETSC_COMM_SELF, " Field/Restart Cadence : DISABLED\n"); CHKERRQ(ierr);
2139 }
2140 ierr = PetscPrintf(PETSC_COMM_SELF, " Immersed Boundary : %s\n", simCtx->immersed ? "ENABLED" : "DISABLED"); CHKERRQ(ierr);
2141 if (simCtx->walltimeGuardEnabled) {
2142 ierr = PetscPrintf(
2143 PETSC_COMM_SELF,
2144 " Runtime Walltime Guard : %s (warmup=%d, multiplier=%.2f, min=%.1f s, alpha=%.2f)\n",
2145 simCtx->walltimeGuardActive ? "ENABLED" : "CONFIGURED BUT INACTIVE",
2146 simCtx->walltimeGuardWarmupSteps,
2147 (double)simCtx->walltimeGuardMultiplier,
2148 (double)simCtx->walltimeGuardMinSeconds,
2149 (double)simCtx->walltimeGuardEstimatorAlpha
2150 ); CHKERRQ(ierr);
2151 } else {
2152 ierr = PetscPrintf(PETSC_COMM_SELF, " Runtime Walltime Guard : DISABLED\n"); CHKERRQ(ierr);
2153 }
2154 ierr = PetscPrintf(PETSC_COMM_SELF, " Number of MPI Processes : %d\n", num_mpi_procs); CHKERRQ(ierr);
2155 ierr = PetscPrintf(PETSC_COMM_WORLD," Number of Particles : %d\n", total_num_particles); CHKERRQ(ierr);
2156 if (simCtx->np > 0) {
2157 const char *particle_init_str = ParticleInitializationToString(simCtx->ParticleInitialization);
2158
2159 if (simCtx->particleConsoleOutputFreq > 0) {
2160 ierr = PetscPrintf(PETSC_COMM_SELF, " Particle Console Cadence : every %d step(s)\n", simCtx->particleConsoleOutputFreq); CHKERRQ(ierr);
2161 } else {
2162 ierr = PetscPrintf(PETSC_COMM_SELF, " Particle Console Cadence : DISABLED\n"); CHKERRQ(ierr);
2163 }
2164 ierr = PetscPrintf(PETSC_COMM_SELF, " Particle Log Row Sampling : every %d particle(s)\n", simCtx->LoggingFrequency); CHKERRQ(ierr);
2165 if (simCtx->StartStep > 0) {
2166 ierr = PetscPrintf(PETSC_COMM_SELF, " Particle Restart Mode : %s\n", simCtx->particleRestartMode); CHKERRQ(ierr);
2167 }
2168 ierr = PetscPrintf(PETSC_COMM_SELF, " Particle Initialization Mode: %s\n", particle_init_str); CHKERRQ(ierr);
2169 ierr = PetscPrintf(PETSC_COMM_SELF, " Interpolation Method : %s\n",
2170 simCtx->interpolationMethod == INTERP_TRILINEAR ? "Trilinear (direct cell-center)" : "CornerAveraged (legacy)"); CHKERRQ(ierr);
2173 if (user->inletFaceDefined) {
2174 ierr = PetscPrintf(PETSC_COMM_SELF, " Particles Initialized At : %s (Enum Val: %d)\n", BCFaceToString(user->identifiedInletBCFace), user->identifiedInletBCFace); CHKERRQ(ierr);
2175 } else {
2176 ierr = PetscPrintf(PETSC_COMM_SELF, " Particles Initialized At : --- (No INLET face identified)\n"); CHKERRQ(ierr);
2177 }
2178 }
2179 }
2180 if(strcmp(simCtx->eulerianSource,"solve")==0 || strcmp(simCtx->eulerianSource,"load")==0){
2181 const char* field_init_str = FieldInitializationToString(simCtx->FieldInitialization);
2182 ierr = PetscPrintf(PETSC_COMM_WORLD," Reynolds Number : %le\n", simCtx->ren); CHKERRQ(ierr);
2183 //ierr = PetscPrintf(PETSC_COMM_WORLD," Von-Neumann Number : %le\n", simCtx->vnn); CHKERRQ(ierr);
2184 if(strcmp(simCtx->eulerianSource,"solve")==0){
2185 //ierr = PetscPrintf(PETSC_COMM_WORLD," Stanton Number : %le\n", simCtx->st); CHKERRQ(ierr);
2186 ierr = PetscPrintf(PETSC_COMM_WORLD," Momentum Equation Solver : %s\n", MomentumSolverTypeToString(simCtx->mom_solver_type)); CHKERRQ(ierr);
2187 ierr = PetscPrintf(PETSC_COMM_WORLD," Initial Pseudo-CFL : %le\n", simCtx->pseudo_cfl); CHKERRQ(ierr);
2188 ierr = PetscPrintf(PETSC_COMM_WORLD," Large Eddy Simulation Model : %s\n", LESModelToString(simCtx->les)); CHKERRQ(ierr);
2189 }
2190 ierr = PetscPrintf(PETSC_COMM_SELF, " Field Initialization Mode : %s\n", field_init_str); CHKERRQ(ierr);
2191 if (simCtx->FieldInitialization == 1) {
2192 ierr = PetscPrintf(PETSC_COMM_SELF, " Constant Velocity : x - %.4f, y - %.4f, z - %.4f \n", (double)simCtx->InitialConstantContra.x,(double)simCtx->InitialConstantContra.y,(double)simCtx->InitialConstantContra.z ); CHKERRQ(ierr);
2193 }
2194 } else if(strcmp(simCtx->eulerianSource,"analytical")==0){
2195 ierr = PetscPrintf(PETSC_COMM_WORLD," Analytical Solution Type : %s\n", simCtx->AnalyticalSolutionType); CHKERRQ(ierr);
2196 }
2197 ierr = PetscPrintf(PETSC_COMM_SELF, "=============================================================\n"); CHKERRQ(ierr);
2198 ierr = PetscPrintf(PETSC_COMM_SELF, "\n"); CHKERRQ(ierr);
2199 }
2200 PetscFunctionReturn(0);
2201}
PetscErrorCode GetBCParamReal(BC_Param *params, const char *key, PetscReal *value_out, PetscBool *found)
Internal helper implementation: GetBCParamReal().
Definition io.c:387
PetscErrorCode GetBCParamBool(BC_Param *params, const char *key, PetscBool *value_out, PetscBool *found)
Internal helper implementation: GetBCParamBool().
Definition io.c:408
const char * BCHandlerTypeToString(BCHandlerType handler_type)
Converts a BCHandlerType enum to its string representation.
Definition logging.c:771
const char * FieldInitializationToString(PetscInt FieldInitialization)
Helper function to convert FieldInitialization to a string representation.
Definition logging.c:687
const char * BCFaceToString(BCFace face)
Helper function to convert BCFace enum to a string representation.
Definition logging.c:669
const char * BCTypeToString(BCType type)
Helper function to convert BCType enum to a string representation.
Definition logging.c:751
const char * LESModelToString(LESModelType LESFlag)
Helper function to convert LES Flag to a string representation.
Definition logging.c:720
const char * MomentumSolverTypeToString(MomentumSolverType SolverFlag)
Helper function to convert Momentum Solver flag to a string representation.
Definition logging.c:736
const char * ParticleInitializationToString(ParticleInitializationType ParticleInitialization)
Helper function to convert ParticleInitialization to a string representation.
Definition logging.c:703
@ INLET
Definition variables.h:258
UserCtx * user
Definition variables.h:528
PetscBool inletFaceDefined
Definition variables.h:830
BoundaryFaceConfig boundary_faces[6]
Definition variables.h:829
BCFace identifiedInletBCFace
Definition variables.h:831
PetscBool walltimeGuardActive
Definition variables.h:782
@ PARTICLE_INIT_SURFACE_RANDOM
Random placement on the inlet face.
Definition variables.h:509
@ PARTICLE_INIT_SURFACE_EDGES
Deterministic placement at inlet face edges.
Definition variables.h:512
PetscReal StartTime
Definition variables.h:657
UserMG usermg
Definition variables.h:764
@ BC_HANDLER_INLET_PARABOLIC
Definition variables.h:277
@ BC_HANDLER_INLET_CONSTANT_VELOCITY
Definition variables.h:276
@ BC_HANDLER_PERIODIC_DRIVEN_CONSTANT_FLUX
Definition variables.h:286
PetscInt FieldInitialization
Definition variables.h:696
PetscReal ren
Definition variables.h:691
BCHandlerType handler_type
Definition variables.h:337
Cmpnts max_coords
Maximum x, y, z coordinates of the bounding box.
Definition variables.h:156
PetscReal dt
Definition variables.h:658
PetscInt StepsToRun
Definition variables.h:654
PetscInt StartStep
Definition variables.h:653
Cmpnts min_coords
Minimum x, y, z coordinates of the bounding box.
Definition variables.h:155
PetscBool OnlySetup
Definition variables.h:659
PetscScalar x
Definition variables.h:101
InterpolationMethod interpolationMethod
Definition variables.h:744
char particleRestartMode[16]
Definition variables.h:745
BoundingBox * bboxlist
Definition variables.h:742
BC_Param * params
Definition variables.h:338
char eulerianSource[PETSC_MAX_PATH_LEN]
Definition variables.h:663
PetscBool walltimeGuardEnabled
Definition variables.h:781
ParticleInitializationType ParticleInitialization
Definition variables.h:743
PetscScalar z
Definition variables.h:101
@ INTERP_TRILINEAR
Definition variables.h:522
PetscInt mglevels
Definition variables.h:535
char AnalyticalSolutionType[PETSC_MAX_PATH_LEN]
Definition variables.h:676
PetscInt particleConsoleOutputFreq
Definition variables.h:656
Cmpnts InitialConstantContra
Definition variables.h:697
PetscScalar y
Definition variables.h:101
PetscMPIInt size
Definition variables.h:647
MGCtx * mgctx
Definition variables.h:538
BCType mathematical_type
Definition variables.h:336
BoundingBox bbox
Definition variables.h:822
MomentumSolverType mom_solver_type
Definition variables.h:683
PetscInt immersed
Definition variables.h:673
PetscReal pseudo_cfl
Definition variables.h:691
PetscInt LoggingFrequency
Definition variables.h:769
BCFace
Identifies the six logical faces of a structured computational block.
Definition variables.h:244
Defines a 3D axis-aligned bounding box.
Definition variables.h:154
A 3D point or vector with PetscScalar components.
Definition variables.h:100
User-defined context containing data specific to a single computational grid level.
Definition variables.h:811
Here is the call graph for this function:
Here is the caller graph for this function:

◆ StringToBCFace()

PetscErrorCode StringToBCFace ( const char *  str,
BCFace face_out 
)

Converts a face-token string (e.g., "-Xi", "+Eta") to the internal BCFace enum.

Parameters
[in]strInput token from configuration.
[out]face_outParsed enum value on success.
Returns
PetscErrorCode 0 on success, non-zero for invalid tokens or null pointers.

Converts a face-token string (e.g., "-Xi", "+Eta") to the internal BCFace enum.

Local to this translation unit.

Definition at line 317 of file io.c.

317 {
318 if (strcasecmp(str, "-Xi") == 0) *face_out = BC_FACE_NEG_X;
319 else if (strcasecmp(str, "+Xi") == 0) *face_out = BC_FACE_POS_X;
320 else if (strcasecmp(str, "-Eta") == 0) *face_out = BC_FACE_NEG_Y;
321 else if (strcasecmp(str, "+Eta") == 0) *face_out = BC_FACE_POS_Y;
322 else if (strcasecmp(str, "-Zeta") == 0) *face_out = BC_FACE_NEG_Z;
323 else if (strcasecmp(str, "+Zeta") == 0) *face_out = BC_FACE_POS_Z;
324 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown face specifier: %s", str);
325 return 0;
326}
@ BC_FACE_NEG_X
Definition variables.h:245
@ BC_FACE_POS_Z
Definition variables.h:247
@ BC_FACE_POS_Y
Definition variables.h:246
@ BC_FACE_NEG_Z
Definition variables.h:247
@ BC_FACE_POS_X
Definition variables.h:245
@ BC_FACE_NEG_Y
Definition variables.h:246
Here is the caller graph for this function:

◆ StringToBCType()

PetscErrorCode StringToBCType ( const char *  str,
BCType type_out 
)

Converts a mathematical BC type string (e.g., "PERIODIC", "WALL") to BCType.

Parameters
[in]strInput token from configuration.
[out]type_outParsed enum value on success.
Returns
PetscErrorCode 0 on success, non-zero for invalid tokens or null pointers.

Converts a mathematical BC type string (e.g., "PERIODIC", "WALL") to BCType.

Local to this translation unit.

Definition at line 332 of file io.c.

332 {
333 if (strcasecmp(str, "WALL") == 0) *type_out = WALL;
334 else if (strcasecmp(str, "SYMMETRY") == 0) *type_out = SYMMETRY;
335 else if (strcasecmp(str, "INLET") == 0) *type_out = INLET;
336 else if (strcasecmp(str, "OUTLET") == 0) *type_out = OUTLET;
337 else if (strcasecmp(str, "PERIODIC") == 0) *type_out = PERIODIC;
338 // ... add other BCTypes here ...
339 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown BC Type string: %s", str);
340 return 0;
341}
@ SYMMETRY
Definition variables.h:256
@ OUTLET
Definition variables.h:257
@ PERIODIC
Definition variables.h:260
@ WALL
Definition variables.h:254
Here is the caller graph for this function:

◆ StringToBCHandlerType()

PetscErrorCode StringToBCHandlerType ( const char *  str,
BCHandlerType handler_out 
)

Converts a BC handler token (implementation strategy) to BCHandlerType.

Parameters
[in]strInput handler token from configuration.
[out]handler_outParsed enum value on success.
Returns
PetscErrorCode 0 on success, non-zero for invalid tokens or null pointers.

Converts a BC handler token (implementation strategy) to BCHandlerType.

Local to this translation unit.

Definition at line 347 of file io.c.

347 {
348 if (strcasecmp(str, "noslip") == 0) *handler_out = BC_HANDLER_WALL_NOSLIP;
349 else if (strcasecmp(str, "constant_velocity") == 0) *handler_out = BC_HANDLER_INLET_CONSTANT_VELOCITY;
350 else if (strcasecmp(str, "conservation") == 0) *handler_out = BC_HANDLER_OUTLET_CONSERVATION;
351 else if (strcasecmp(str, "parabolic") == 0) *handler_out = BC_HANDLER_INLET_PARABOLIC;
352 else if (strcasecmp(str,"geometric") == 0) *handler_out = BC_HANDLER_PERIODIC_GEOMETRIC;
353 else if (strcasecmp(str,"constant_flux") == 0) *handler_out = BC_HANDLER_PERIODIC_DRIVEN_CONSTANT_FLUX;
354 else if (strcasecmp(str,"initial_flux") == 0) *handler_out = BC_HANDLER_PERIODIC_DRIVEN_INITIAL_FLUX;
355 // ... add other BCHandlerTypes here ...
356 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown BC Handler string: %s", str);
357 return 0;
358}
@ BC_HANDLER_PERIODIC_GEOMETRIC
Definition variables.h:284
@ BC_HANDLER_PERIODIC_DRIVEN_INITIAL_FLUX
Definition variables.h:287
@ BC_HANDLER_WALL_NOSLIP
Definition variables.h:273
@ BC_HANDLER_OUTLET_CONSERVATION
Definition variables.h:282
Here is the caller graph for this function:

◆ ValidateBCHandlerForBCType()

PetscErrorCode ValidateBCHandlerForBCType ( BCType  type,
BCHandlerType  handler 
)

Validates that a selected handler is compatible with a mathematical BC type.

Parameters
[in]typeMathematical BC type (e.g., WALL, PERIODIC).
[in]handlerSelected handler implementation enum.
Returns
PetscErrorCode 0 if compatible, non-zero if the combination is invalid.

Validates that a selected handler is compatible with a mathematical BC type.

Local to this translation unit.

Definition at line 364 of file io.c.

364 {
365 switch (type) {
366 case OUTLET:
367 if(handler != BC_HANDLER_OUTLET_CONSERVATION) return PETSC_ERR_ARG_WRONG;
368 break;
369 case WALL:
370 if (handler != BC_HANDLER_WALL_NOSLIP && handler != BC_HANDLER_WALL_MOVING) return PETSC_ERR_ARG_WRONG;
371 break;
372 case INLET:
373 if (handler != BC_HANDLER_INLET_CONSTANT_VELOCITY && handler != BC_HANDLER_INLET_PARABOLIC) return PETSC_ERR_ARG_WRONG;
374 break;
375 case PERIODIC:
376 if(handler != BC_HANDLER_PERIODIC_GEOMETRIC && handler != BC_HANDLER_PERIODIC_DRIVEN_CONSTANT_FLUX && handler != BC_HANDLER_PERIODIC_DRIVEN_INITIAL_FLUX) return PETSC_ERR_ARG_WRONG;
377 // ... add other validation cases here ...
378 default: break;
379 }
380 return 0; // Combination is valid
381}
@ BC_HANDLER_WALL_MOVING
Definition variables.h:274
Here is the caller graph for this function:

◆ FreeBC_ParamList()

void FreeBC_ParamList ( BC_Param head)

Frees an entire linked list of boundary-condition parameters.

Parameters
[in,out]headHead pointer of the BC_Param list to destroy.

Frees an entire linked list of boundary-condition parameters.

Full API contract (arguments, ownership, side effects) is documented with the header declaration in include/io.h.

See also
FreeBC_ParamList()

Definition at line 302 of file io.c.

302 {
303 BC_Param *current = head;
304 while (current != NULL) {
305 BC_Param *next = current->next;
306 PetscFree(current->key);
307 PetscFree(current->value);
308 PetscFree(current);
309 current = next;
310 }
311}
struct BC_Param_s * next
Definition variables.h:307
char * key
Definition variables.h:305
char * value
Definition variables.h:306
A node in a linked list for storing key-value parameters from the bcs.dat file.
Definition variables.h:304
Here is the caller graph for this function:

◆ GetBCParamReal()

PetscErrorCode GetBCParamReal ( BC_Param params,
const char *  key,
PetscReal *  value_out,
PetscBool *  found 
)

Searches a BC_Param linked list for a key and returns its value as a double.

Parameters
paramsThe head of the BC_Param linked list.
keyThe key to search for (case-insensitive).
[out]value_outThe found value, converted to a PetscReal.
[out]foundSet to PETSC_TRUE if the key was found, PETSC_FALSE otherwise.
Returns
0 on success.

Searches a BC_Param linked list for a key and returns its value as a double.

Local to this translation unit.

Definition at line 387 of file io.c.

387 {
388 *found = PETSC_FALSE;
389 *value_out = 0.0;
390 if (!key) return 0; // No key to search for
391
392 BC_Param *current = params;
393 while (current) {
394 if (strcasecmp(current->key, key) == 0) {
395 *value_out = atof(current->value);
396 *found = PETSC_TRUE;
397 return 0; // Found it, we're done
398 }
399 current = current->next;
400 }
401 return 0; // It's not an error to not find the key.
402}
Here is the caller graph for this function:

◆ GetBCParamBool()

PetscErrorCode GetBCParamBool ( BC_Param params,
const char *  key,
PetscBool *  value_out,
PetscBool *  found 
)

Searches a BC_Param linked list for a key and returns its value as a bool.

Parameters
paramsThe head of the BC_Param linked list.
keyThe key to search for (case-insensitive).
[out]value_outThe found value, converted to a PetscBool.
[out]foundSet to PETSC_TRUE if the key was found, PETSC_FALSE otherwise.
Returns
0 on success.

Searches a BC_Param linked list for a key and returns its value as a bool.

Local to this translation unit.

Definition at line 408 of file io.c.

408 {
409 *found = PETSC_FALSE;
410 *value_out = PETSC_FALSE;
411 if (!key) return 0; // No key to search for
412
413 BC_Param *current = params;
414 while (current) {
415 if (strcasecmp(current->key, key) == 0) {
416 // Key was found.
417 *found = PETSC_TRUE;
418
419 // Check the value string. Default to FALSE if the value is NULL or doesn't match a "true" string.
420 if (current->value &&
421 (strcasecmp(current->value, "true") == 0 ||
422 strcmp(current->value, "1") == 0 ||
423 strcasecmp(current->value, "yes") == 0))
424 {
425 *value_out = PETSC_TRUE;
426 } else {
427 *value_out = PETSC_FALSE;
428 }
429 return 0; // Found it, we're done
430 }
431 current = current->next;
432 }
433 return 0; // It's not an error to not find the key.
434}
Here is the caller graph for this function:

◆ ParseAllBoundaryConditions()

PetscErrorCode ParseAllBoundaryConditions ( UserCtx user,
const char *  bcs_input_filename 
)

Parses the boundary conditions file to configure the type, handler, and any associated parameters for all 6 global faces of the domain.

This function performs the following steps:

  1. On MPI rank 0, it reads the specified configuration file line-by-line.
  2. It parses each line for <Face> <Type> <Handler> [param=value]... format.
  3. It validates the parsed strings and stores the configuration, including a linked list of parameters, in a temporary array.
  4. It then serializes this configuration and broadcasts it to all other MPI ranks.
  5. All ranks (including rank 0) then deserialize the broadcasted data to populate their local user->boundary_faces array identically.
  6. It also sets the particle inlet lookup fields in UserCtx.
Parameters
[in,out]userThe main UserCtx struct where the final configuration for all ranks will be stored.
[in]bcs_input_filenameThe path to the boundary conditions configuration file.
Returns
PetscErrorCode 0 on success, error code on failure.

Parses the boundary conditions file to configure the type, handler, and any associated parameters for all 6 global faces of the domain.

Local to this translation unit.

Definition at line 447 of file io.c.

448{
449 PetscErrorCode ierr;
450 PetscMPIInt rank;
451
452 // Temporary storage for rank 0 to build the configuration before broadcasting.
453 BoundaryFaceConfig configs_rank0[6];
454
455 PetscFunctionBeginUser;
457 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
458
459 if (rank == 0) {
460 FILE *file;
461 char line_buffer[1024];
462
463 // Initialize the temporary config array with safe defaults on rank 0.
464 for (int i = 0; i < 6; i++) {
465 configs_rank0[i].face_id = (BCFace)i;
466 configs_rank0[i].mathematical_type = WALL;
467 configs_rank0[i].handler_type = BC_HANDLER_WALL_NOSLIP;
468 configs_rank0[i].params = NULL;
469 configs_rank0[i].handler = NULL; // Handler object is not created here.
470 }
471
472 LOG_ALLOW(GLOBAL, LOG_INFO, "Parsing BC configuration from '%s' on rank 0... \n", bcs_input_filename);
473 file = fopen(bcs_input_filename, "r");
474 if (!file) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_OPEN, "Could not open BCs file '%s'.", bcs_input_filename);
475
476 while (fgets(line_buffer, sizeof(line_buffer), file)) {
477 char *current_pos = line_buffer;
478 while (isspace((unsigned char)*current_pos)) current_pos++; // Skip leading whitespace
479 if (*current_pos == '#' || *current_pos == '\0' || *current_pos == '\n' || *current_pos == '\r') continue;
480
481 char *face_str = strtok(current_pos, " \t\n\r");
482 char *type_str = strtok(NULL, " \t\n\r");
483 char *handler_str = strtok(NULL, " \t\n\r");
484
485 if (!face_str || !type_str || !handler_str) {
486 LOG_ALLOW(GLOBAL, LOG_WARNING, "Malformed line in bcs.dat, skipping: %s \n", line_buffer);
487 continue;
488 }
489
490 BCFace face_enum;
491 BCType type_enum;
492 BCHandlerType handler_enum;
493 const char* handler_name_for_log;
494
495 // --- Convert strings to enums and validate ---
496 ierr = StringToBCFace(face_str, &face_enum); CHKERRQ(ierr);
497 ierr = StringToBCType(type_str, &type_enum); CHKERRQ(ierr);
498 ierr = StringToBCHandlerType(handler_str, &handler_enum); CHKERRQ(ierr);
499 ierr = ValidateBCHandlerForBCType(type_enum, handler_enum);
500 if (ierr) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Validation failed: Handler '%s' is not valid for Type '%s' on Face '%s'.\n", handler_str, type_str, face_str);
501
502 // Store the core types for the corresponding face
503 configs_rank0[face_enum].mathematical_type = type_enum;
504 configs_rank0[face_enum].handler_type = handler_enum;
505 handler_name_for_log = BCHandlerTypeToString(handler_enum); // Assumes this utility exists
506 LOG_ALLOW(GLOBAL, LOG_DEBUG, " Parsed Face '%s': Type=%s, Handler=%s \n", face_str, type_str, handler_name_for_log);
507
508 // --- Parse optional key=value parameters for this face ---
509 FreeBC_ParamList(configs_rank0[face_enum].params); // Clear any previous (default) params
510 configs_rank0[face_enum].params = NULL;
511 BC_Param **param_next_ptr = &configs_rank0[face_enum].params; // Pointer to the 'next' pointer to build the list
512
513 char* token;
514 while ((token = strtok(NULL, " \t\n\r")) != NULL) {
515 char* equals_ptr = strchr(token, '=');
516 if (!equals_ptr) {
517 LOG_ALLOW(GLOBAL, LOG_WARNING, "Malformed parameter '%s' on face '%s', skipping. \n", token, face_str);
518 continue;
519 }
520
521 *equals_ptr = '\0'; // Temporarily split the string at '=' to separate key and value
522 char* key_str = token;
523 char* value_str = equals_ptr + 1;
524
525 BC_Param *new_param;
526 ierr = PetscMalloc1(1, &new_param); CHKERRQ(ierr);
527 ierr = PetscStrallocpy(key_str, &new_param->key); CHKERRQ(ierr);
528 ierr = PetscStrallocpy(value_str, &new_param->value); CHKERRQ(ierr);
529 new_param->next = NULL;
530
531 *param_next_ptr = new_param;
532 param_next_ptr = &new_param->next;
533 LOG_ALLOW(GLOBAL, LOG_TRACE, " - Found param: [%s] = [%s] \n", new_param->key, new_param->value);
534 }
535 }
536 fclose(file);
537 }
538
539 // =========================================================================
540 // BROADCASTING THE CONFIGURATION FROM RANK 0
541 // =========================================================================
542 // This is a critical step to ensure all processes have the same configuration.
543
544 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "Rank %d broadcasting/receiving BC configuration.\n", rank);
545
546 for (int i = 0; i < 6; i++) {
547 // --- Broadcast simple enums ---
548 if (rank == 0) {
549 user->boundary_faces[i] = configs_rank0[i]; // Rank 0 populates its final struct
550 }
551 ierr = MPI_Bcast(&user->boundary_faces[i].mathematical_type, 1, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
552 ierr = MPI_Bcast(&user->boundary_faces[i].handler_type, 1, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
553
554 // --- Serialize and Broadcast the parameter linked list ---
555 PetscInt n_params = 0;
556 if (rank == 0) { // On rank 0, count the number of parameters to send
557 for (BC_Param *p = user->boundary_faces[i].params; p; p = p->next) n_params++;
558 }
559 ierr = MPI_Bcast(&n_params, 1, MPI_INT, 0, PETSC_COMM_WORLD);CHKERRQ(ierr);
560
561 if (rank != 0) { // Non-root ranks need to receive and build the list
562 FreeBC_ParamList(user->boundary_faces[i].params); // Ensure list is empty before building
563 user->boundary_faces[i].params = NULL;
564 }
565
566 BC_Param **param_next_ptr = &user->boundary_faces[i].params;
567
568 for (int j = 0; j < n_params; j++) {
569 char key_buf[256] = {0}, val_buf[256] = {0};
570 if (rank == 0) {
571 // On rank 0, navigate to the j-th param and copy its data to buffers
572 BC_Param *p = user->boundary_faces[i].params;
573 for (int k = 0; k < j; k++) p = p->next;
574 strncpy(key_buf, p->key, 255);
575 strncpy(val_buf, p->value, 255);
576 }
577
578 ierr = MPI_Bcast(key_buf, 256, MPI_CHAR, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
579 ierr = MPI_Bcast(val_buf, 256, MPI_CHAR, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
580
581 if (rank != 0) {
582 // On non-root ranks, deserialize: create a new node and append it
583 BC_Param *new_param;
584 ierr = PetscMalloc1(1, &new_param); CHKERRQ(ierr);
585 ierr = PetscStrallocpy(key_buf, &new_param->key); CHKERRQ(ierr);
586 ierr = PetscStrallocpy(val_buf, &new_param->value); CHKERRQ(ierr);
587 new_param->next = NULL;
588 *param_next_ptr = new_param;
589 param_next_ptr = &new_param->next;
590 } else {
591 // On rank 0, just advance the pointer for the next iteration
592 param_next_ptr = &((*param_next_ptr)->next);
593 }
594 }
595 user->boundary_faces[i].face_id = (BCFace)i; // Ensure face_id is set on all ranks
596 }
597
598 // --- Set particle inlet lookup fields used by the particle system ---
599 user->inletFaceDefined = PETSC_FALSE;
600 for (int i=0; i<6; i++) {
601
602 if (user->boundary_faces[i].mathematical_type == INLET && !user->inletFaceDefined) {
603 user->inletFaceDefined = PETSC_TRUE;
604 user->identifiedInletBCFace = (BCFace)i;
605 LOG_ALLOW(GLOBAL, LOG_INFO, "Inlet face for particle initialization identified as Face %d.\n", i);
606 break; // Found the first one, stop looking
607 }
608 }
609
610
611 if (rank == 0) {
612 // Rank 0 can now free the linked lists it created for the temporary storage.
613 // As written, user->boundary_faces was populated directly on rank 0, so no extra free is needed.
614 // for(int i=0; i<6; i++) FreeBC_ParamList(configs_rank0[i].params); // This would be needed if we used configs_rank0 exclusively
615 }
616
618 PetscFunctionReturn(0);
619}
PetscErrorCode StringToBCHandlerType(const char *str, BCHandlerType *handler_out)
Internal helper implementation: StringToBCHandlerType().
Definition io.c:347
PetscErrorCode ValidateBCHandlerForBCType(BCType type, BCHandlerType handler)
Internal helper implementation: ValidateBCHandlerForBCType().
Definition io.c:364
PetscErrorCode StringToBCFace(const char *str, BCFace *face_out)
Internal helper implementation: StringToBCFace().
Definition io.c:317
void FreeBC_ParamList(BC_Param *head)
Implementation of FreeBC_ParamList().
Definition io.c:302
PetscErrorCode StringToBCType(const char *str, BCType *type_out)
Internal helper implementation: StringToBCType().
Definition io.c:332
BCType
Defines the general mathematical/physical Category of a boundary.
Definition variables.h:251
BCHandlerType
Defines the specific computational "strategy" for a boundary handler.
Definition variables.h:271
BoundaryCondition * handler
Definition variables.h:339
Holds the complete configuration for one of the six boundary faces.
Definition variables.h:334
Here is the call graph for this function:
Here is the caller graph for this function:

◆ DeterminePeriodicity()

PetscErrorCode DeterminePeriodicity ( SimCtx simCtx)

Scans all block-specific boundary condition files to determine a globally consistent periodicity for each dimension, reusing the core type parser.

This is a lightweight pre-parser intended to be called before DMDA creation. It ensures that the periodicity setting is consistent across all blocks, which is a physical requirement for the domain.

  1. It collectively verifies that the mandatory BCS file for each block exists.
  2. On MPI rank 0, it then iterates through the files.
  3. For each line, it attempts to convert the type string to a BCType enum using the standard StringToBCType helper.
  4. If the conversion is successful AND the type is PERIODIC, it flags the corresponding face.
  5. If the conversion fails (e.g., for "WALL", "INLET", etc.), the error is cleared and the line is simply ignored, as it's not relevant to periodicity.
  6. It validates consistency (e.g., -Xi and +Xi match) and ensures all block files specify the same global periodicity.
  7. It broadcasts the final three flags (as integers 0 or 1) to all MPI ranks.
  8. All ranks update the i_periodic, j_periodic, and k_periodic fields in their SimCtx.
Parameters
[in,out]simCtxThe master SimCtx struct, containing the bcs_files list and where the final periodicity flags will be stored.
Returns
PetscErrorCode 0 on success, error code on failure.

Scans all block-specific boundary condition files to determine a globally consistent periodicity for each dimension, reusing the core type parser.

Local to this translation unit.

Definition at line 634 of file io.c.

635{
636 PetscErrorCode ierr;
637 PetscMPIInt rank;
638 PetscInt periodic_flags[3] = {0, 0, 0}; // Index 0:I, 1:J, 2:K
639
640 PetscFunctionBeginUser;
641 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
642
643 // --- Part 1: Collectively verify all BCS files exist before proceeding ---
644 for (PetscInt bi = 0; bi < simCtx->block_number; bi++) {
645 const char *bcs_filename = simCtx->bcs_files[bi];
646 if (!bcs_filename) SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_NULL, "BCS filename for block %d is not set in SimCtx.", bi);
647 char desc_buf[256];
648 PetscBool file_exists;
649 snprintf(desc_buf, sizeof(desc_buf), "BCS file for block %d", bi);
650 ierr = VerifyPathExistence(bcs_filename, PETSC_FALSE, PETSC_FALSE, desc_buf, &file_exists); CHKERRQ(ierr);
651 }
652
653 // --- Part 2: Rank 0 does the parsing, since we know all files exist ---
654 if (rank == 0) {
655 PetscBool global_is_periodic[3] = {PETSC_FALSE, PETSC_FALSE, PETSC_FALSE};
656 PetscBool is_set = PETSC_FALSE;
657
658 for (PetscInt bi = 0; bi < simCtx->block_number; bi++) {
659 const char *bcs_filename = simCtx->bcs_files[bi];
660 FILE *file = fopen(bcs_filename, "r");
661
662 PetscBool face_is_periodic[6] = {PETSC_FALSE};
663 char line_buffer[1024];
664
665 while (fgets(line_buffer, sizeof(line_buffer), file)) {
666 char *current_pos = line_buffer;
667 while (isspace((unsigned char)*current_pos)) current_pos++;
668 if (*current_pos == '#' || *current_pos == '\0' || *current_pos == '\n') continue;
669
670 // --- Tokenize the line exactly like the main parser ---
671 char *face_str = strtok(current_pos, " \t\n\r");
672 char *type_str = strtok(NULL, " \t\n\r");
673
674 // If the line doesn't have at least two tokens, we can't determine the type.
675 if (!face_str || !type_str) continue;
676
677 // --- Perform a direct, non-erroring check on the mathematical type string ---
678 if (strcasecmp(type_str, "PERIODIC") == 0) {
679 BCFace face_enum;
680 // A malformed face string on a periodic line IS a fatal error.
681 ierr = StringToBCFace(face_str, &face_enum); CHKERRQ(ierr);
682 face_is_periodic[face_enum] = PETSC_TRUE;
683 }
684 // Any other type_str (e.g., "WALL", "INLET") is correctly and silently ignored.
685 }
686 fclose(file);
687
688 // --- Validate consistency within this file ---
689 if (face_is_periodic[BC_FACE_NEG_X] != face_is_periodic[BC_FACE_POS_X])
690 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Inconsistent X-periodicity in file '%s' for block %d. Both -Xi and +Xi must be periodic or neither.", bcs_filename, bi);
691 if (face_is_periodic[BC_FACE_NEG_Y] != face_is_periodic[BC_FACE_POS_Y])
692 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Inconsistent Y-periodicity in file '%s' for block %d. Both -Eta and +Eta must be periodic or neither.", bcs_filename, bi);
693 if (face_is_periodic[BC_FACE_NEG_Z] != face_is_periodic[BC_FACE_POS_Z])
694 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Inconsistent Z-periodicity in file '%s' for block %d. Both -Zeta and +Zeta must be periodic or neither.", bcs_filename, bi);
695
696 PetscBool local_is_periodic[3] = {face_is_periodic[BC_FACE_NEG_X], face_is_periodic[BC_FACE_NEG_Y], face_is_periodic[BC_FACE_NEG_Z]};
697
698 // --- Validate consistency across block files ---
699 if (!is_set) {
700 global_is_periodic[0] = local_is_periodic[0];
701 global_is_periodic[1] = local_is_periodic[1];
702 global_is_periodic[2] = local_is_periodic[2];
703 is_set = PETSC_TRUE;
704 } else {
705 if (global_is_periodic[0] != local_is_periodic[0] || global_is_periodic[1] != local_is_periodic[1] || global_is_periodic[2] != local_is_periodic[2]) {
706 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP,
707 "Periodicity mismatch between blocks. Block 0 requires (I:%d, J:%d, K:%d), but block %d (file '%s') has (I:%d, J:%d, K:%d).",
708 (int)global_is_periodic[0], (int)global_is_periodic[1], (int)global_is_periodic[2],
709 bi, bcs_filename,
710 (int)local_is_periodic[0], (int)local_is_periodic[1], (int)local_is_periodic[2]);
711 }
712 }
713 } // end loop over blocks
714
715 periodic_flags[0] = (global_is_periodic[0]) ? 1 : 0;
716 periodic_flags[1] = (global_is_periodic[1]) ? 1 : 0;
717 periodic_flags[2] = (global_is_periodic[2]) ? 1 : 0;
718
719 LOG_ALLOW(GLOBAL, LOG_INFO, "Global periodicity determined: I-periodic=%d, J-periodic=%d, K-periodic=%d\n",
720 periodic_flags[0], periodic_flags[1], periodic_flags[2]);
721 }
722
723 // --- Part 3: Broadcast the final flags from rank 0 to all other ranks ---
724 ierr = MPI_Bcast(periodic_flags, 3, MPIU_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
725
726 // --- All ranks now update their SimCtx ---
727 simCtx->i_periodic = periodic_flags[0];
728 simCtx->j_periodic = periodic_flags[1];
729 simCtx->k_periodic = periodic_flags[2];
730
731 PetscFunctionReturn(0);
732}
PetscErrorCode VerifyPathExistence(const char *path, PetscBool is_dir, PetscBool is_optional, const char *description, PetscBool *exists)
Internal helper implementation: VerifyPathExistence().
Definition io.c:738
char ** bcs_files
Definition variables.h:720
PetscInt k_periodic
Definition variables.h:713
PetscInt i_periodic
Definition variables.h:713
PetscInt j_periodic
Definition variables.h:713
Here is the call graph for this function:
Here is the caller graph for this function:

◆ TrimWhitespace()

void TrimWhitespace ( char *  str)

Helper function to trim leading/trailing whitespace from a string.

Parameters
strThe string to trim in-place.

Helper function to trim leading/trailing whitespace from a string.

Full API contract (arguments, ownership, side effects) is documented with the header declaration in include/io.h.

See also
TrimWhitespace()

Definition at line 38 of file io.c.

38 {
39 if (!str) return;
40
41 char *start = str;
42 // Find the first non-whitespace character
43 while (isspace((unsigned char)*start)) {
44 start++;
45 }
46
47 // Find the end of the string
48 char *end = str + strlen(str) - 1;
49 // Move backwards from the end to find the last non-whitespace character
50 while (end > start && isspace((unsigned char)*end)) {
51 end--;
52 }
53
54 // Null-terminate after the last non-whitespace character
55 *(end + 1) = '\0';
56
57 // If there was leading whitespace, shift the string to the left
58 if (str != start) {
59 memmove(str, start, (end - start) + 2); // +2 to include the new null terminator
60 }
61}
Here is the caller graph for this function:

◆ ParsePostProcessingSettings()

PetscErrorCode ParsePostProcessingSettings ( SimCtx simCtx)

Initializes post-processing settings from a config file and command-line overrides.

This function establishes the configuration for a post-processing run by:

  1. Setting hardcoded default values in the PostProcessParams struct.
  2. Reading a configuration file to override the defaults.
  3. Parsing command-line options (-startTime, -endTime, etc.) which can override both the defaults and the file settings.
Parameters
simCtxThe pointer to the simulation context that contains the postprocessing file and struct.
Returns
PetscErrorCode

Initializes post-processing settings from a config file and command-line overrides.

Local to this translation unit.

Definition at line 2209 of file io.c.

2210{
2211 FILE *file;
2212 char line[1024];
2213 PetscBool startTimeSet, endTimeSet, timeStepSet;
2214
2215 PetscFunctionBeginUser;
2217
2218 if (!simCtx || !simCtx->pps) {
2219 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_NULL, "SimCtx or its pps member is NULL in ParsePostProcessingSettings.");
2220 }
2221
2222 char *configFile = simCtx->PostprocessingControlFile;
2223 PostProcessParams *pps = simCtx->pps;
2224
2225
2226 // --- 1. Set Sane Defaults First ---
2227 pps->startTime = 0;
2228 pps->endTime = 0;
2229 pps->timeStep = 1;
2230 pps->outputParticles = PETSC_FALSE;
2231 pps->particle_output_freq = simCtx->LoggingFrequency; // Default to logging frequency;
2232 strcpy(pps->process_pipeline, "");
2233 strcpy(pps->output_fields_instantaneous, "Ucat,P");
2234 strcpy(pps->output_fields_averaged, "");
2235 strcpy(pps->output_prefix, "Field");
2236 strcpy(pps->particle_output_prefix,"Particle");
2237 strcpy(pps->particle_fields,"velocity,CellID,weight,pid");
2238 strcpy(pps->particle_pipeline,"");
2239 strncpy(pps->statistics_pipeline, "", MAX_PIPELINE_LENGTH - 1);
2240 strncpy(pps->statistics_output_prefix, "Stats", MAX_FILENAME_LENGTH - 1);
2241 strcpy(pps->particleExt,"dat"); // The input file format for particles.
2242 strcpy(pps->eulerianExt,"dat"); // The input file format for Eulerian fields.
2243 pps->reference[0] = pps->reference[1] = pps->reference[2] = 1;
2244 strncpy(pps->source_dir, simCtx->output_dir, sizeof(pps->source_dir) - 1);
2245 pps->source_dir[sizeof(pps->source_dir) - 1] = '\0'; // Ensure null-termination
2246
2247 // --- 2. Parse the Configuration File (overrides defaults) ---
2248 file = fopen(configFile, "r");
2249 if (file) {
2250 LOG_ALLOW(GLOBAL, LOG_INFO, "Parsing post-processing config file: %s\n", configFile);
2251 while (fgets(line, sizeof(line), file)) {
2252 char *key, *value, *comment;
2253 comment = strchr(line, '#'); if (comment) *comment = '\0';
2254 TrimWhitespace(line); if (strlen(line) == 0) continue;
2255 key = strtok(line, "="); value = strtok(NULL, "=");
2256 if (key && value) {
2257 TrimWhitespace(key); TrimWhitespace(value);
2258 if (strcmp(key, "startTime") == 0) pps->startTime = atoi(value);
2259 else if (strcmp(key, "endTime") == 0) pps->endTime = atoi(value);
2260 else if (strcmp(key, "timeStep") == 0) pps->timeStep = atoi(value);
2261 else if (strcmp(key, "output_particles") == 0) {
2262 if (strcasecmp(value, "true") == 0) pps->outputParticles = PETSC_TRUE;
2263 }
2264 else if (strcasecmp(key, "process_pipeline") == 0) {
2265 strncpy(pps->process_pipeline, value, MAX_PIPELINE_LENGTH - 1);
2266 pps->process_pipeline[MAX_PIPELINE_LENGTH - 1] = '\0'; // Ensure null-termination
2267 } else if (strcasecmp(key, "output_fields_instantaneous") == 0) {
2268 strncpy(pps->output_fields_instantaneous, value, MAX_FIELD_LIST_LENGTH - 1);
2270 } else if (strcasecmp(key, "output_fields_averaged") == 0) {
2271 strncpy(pps->output_fields_averaged, value, MAX_FIELD_LIST_LENGTH - 1);
2273 } else if (strcasecmp(key, "output_prefix") == 0) {
2274 strncpy(pps->output_prefix, value, MAX_FILENAME_LENGTH - 1);
2275 pps->output_prefix[MAX_FILENAME_LENGTH - 1] = '\0';
2276 } else if (strcasecmp(key, "particle_output_prefix") == 0) {
2277 strncpy(pps->particle_output_prefix, value, MAX_FILENAME_LENGTH - 1);
2279 } else if (strcasecmp(key, "particle_fields_instantaneous") == 0) {
2280 strncpy(pps->particle_fields, value, MAX_FIELD_LIST_LENGTH - 1);
2281 pps->particle_fields[MAX_FIELD_LIST_LENGTH - 1] = '\0';
2282 } else if (strcasecmp(key, "particle_pipeline") == 0) {
2283 strncpy(pps->particle_pipeline, value, MAX_PIPELINE_LENGTH - 1);
2284 pps->particle_pipeline[MAX_PIPELINE_LENGTH - 1] = '\0';
2285 } else if (strcasecmp(key, "particle_output_freq") == 0) {
2286 pps->particle_output_freq = atoi(value);
2287 } else if (strcasecmp(key, "statistics_pipeline") == 0) {
2288 strncpy(pps->statistics_pipeline, value, MAX_PIPELINE_LENGTH - 1);
2290 } else if (strcasecmp(key, "statistics_output_prefix") == 0) {
2291 strncpy(pps->statistics_output_prefix, value, MAX_FILENAME_LENGTH - 1);
2293 } else if (strcasecmp(key, "particleExt") == 0) {
2294 strncpy(pps->particleExt, value, sizeof(pps->particleExt) - 1);
2295 pps->particleExt[sizeof(pps->particleExt) - 1] = '\0';
2296 } else if (strcasecmp(key, "eulerianExt") == 0) {
2297 strncpy(pps->eulerianExt, value, sizeof(pps->eulerianExt) - 1);
2298 pps->eulerianExt[sizeof(pps->eulerianExt) - 1] = '\0';
2299 } else if (strcmp(key, "reference_ip") == 0) {pps->reference[0] = atoi(value);
2300 } else if (strcmp(key, "reference_jp") == 0) {pps->reference[1] = atoi(value);
2301 } else if (strcmp(key, "reference_kp") == 0) {pps->reference[2] = atoi(value);
2302 } else if (strcasecmp(key, "source_directory") == 0) {
2303 strncpy(pps->source_dir, value, sizeof(pps->source_dir) - 1);
2304 pps->source_dir[sizeof(pps->source_dir) - 1] = '\0';
2305 } else {
2306 LOG_ALLOW(GLOBAL, LOG_WARNING, "Unknown key '%s' in post-processing config file. Ignoring.\n", key);
2307 }
2308 // Add parsing for pipeline, fields, etc. in later phases
2309 }
2310 }
2311 fclose(file);
2312 } else {
2313 LOG_ALLOW(GLOBAL, LOG_WARNING, "Could not open post-processing config file '%s'. Using defaults and command-line overrides.\n", configFile);
2314 }
2315
2316 // --- 3. Parse Command-Line Options (overrides file settings and defaults) ---
2317 PetscOptionsGetInt(NULL, NULL, "-startTime", &pps->startTime, &startTimeSet);
2318 PetscOptionsGetInt(NULL, NULL, "-endTime", &pps->endTime, &endTimeSet);
2319 PetscOptionsGetInt(NULL, NULL, "-timeStep", &pps->timeStep, &timeStepSet);
2320 PetscOptionsGetBool(NULL, NULL, "-output_particles", &pps->outputParticles, NULL);
2321
2322 if(pps->endTime==-1){
2323 pps->endTime = simCtx->StartStep + simCtx->StepsToRun; // Total steps if endTime is set to -1.
2324 }
2325
2326 // If only startTime is given on command line, run for a single step
2327 if (startTimeSet && !endTimeSet) {
2328 pps->endTime = pps->startTime;
2329 }
2330
2331 LOG_ALLOW(GLOBAL, LOG_INFO, "Post-processing configured to run from t=%d to t=%d with step %d. Particle output: %s.\n",
2332 pps->startTime, pps->endTime, pps->timeStep, pps->outputParticles ? "TRUE" : "FALSE");
2333
2334 LOG_ALLOW(GLOBAL, LOG_INFO, "Process Pipeline: %s\n", pps->process_pipeline);
2335 LOG_ALLOW(GLOBAL, LOG_INFO, "Instantaneous Output Fields: %s\n", pps->output_fields_instantaneous);
2336 LOG_ALLOW(GLOBAL, LOG_INFO, "Output Prefix: %s\n", pps->output_prefix);
2337 LOG_ALLOW(GLOBAL, LOG_INFO, "Particle Output Prefix: %s\n", pps->particle_output_prefix);
2338 LOG_ALLOW(GLOBAL, LOG_INFO, "Particle Fields: %s\n", pps->particle_fields);
2339 LOG_ALLOW(GLOBAL, LOG_INFO, "Particle Pipeline: %s\n", pps->particle_pipeline);
2340 LOG_ALLOW(GLOBAL, LOG_INFO, "Particle Output Frequency: %d\n", pps->particle_output_freq);
2341 LOG_ALLOW(GLOBAL, LOG_INFO, "Averaged Output Fields (reserved): %s\n", pps->output_fields_averaged);
2342 LOG_ALLOW(GLOBAL, LOG_INFO, "Post input extensions: Eulerian='.%s', Particle='.%s'\n", pps->eulerianExt, pps->particleExt);
2343
2345 PetscFunctionReturn(0);
2346}
void TrimWhitespace(char *str)
Implementation of TrimWhitespace().
Definition io.c:38
char statistics_output_prefix[256]
basename for CSV output, e.g.
Definition variables.h:575
char particle_output_prefix[256]
Definition variables.h:570
char output_prefix[256]
Definition variables.h:567
#define MAX_FIELD_LIST_LENGTH
Definition variables.h:544
char output_fields_averaged[1024]
Definition variables.h:566
#define MAX_PIPELINE_LENGTH
Definition variables.h:543
PetscInt reference[3]
Definition variables.h:582
PetscInt timeStep
Definition variables.h:560
#define MAX_FILENAME_LENGTH
Definition variables.h:545
char statistics_pipeline[1024]
e.g.
Definition variables.h:574
char output_fields_instantaneous[1024]
Definition variables.h:565
char particle_pipeline[1024]
Definition variables.h:568
char process_pipeline[1024]
Definition variables.h:564
PetscInt particle_output_freq
Definition variables.h:571
char particle_fields[1024]
Definition variables.h:569
PetscBool outputParticles
Definition variables.h:561
PetscInt startTime
Definition variables.h:558
char PostprocessingControlFile[PETSC_MAX_PATH_LEN]
Definition variables.h:797
Holds all configuration parameters for a post-processing run.
Definition variables.h:553
Here is the call graph for this function:
Here is the caller graph for this function:

◆ ParseScalingInformation()

PetscErrorCode ParseScalingInformation ( SimCtx simCtx)

Parses physical scaling parameters from command-line options.

This function reads the reference length, velocity, and density from the PETSc options database (provided via -scaling_L_ref, etc.). It populates the simCtx->scaling struct and calculates the derived reference pressure. It sets default values of 1.0 for a fully non-dimensional case if the options are not provided.

Parameters
[in,out]simCtxThe simulation context whose 'scaling' member will be populated.
Returns
PetscErrorCode

Parses physical scaling parameters from command-line options.

Full API contract (arguments, ownership, side effects) is documented with the header declaration in include/io.h.

See also
ParseScalingInformation()

Definition at line 2357 of file io.c.

2358{
2359 PetscErrorCode ierr;
2360 PetscBool flg;
2361
2362 PetscFunctionBeginUser;
2364
2365 if (!simCtx) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "SimCtx is NULL in ParseScalingInformation");
2366
2367 // --- 1. Set default values to 1.0 ---
2368 // This represents a purely non-dimensional run if no scaling is provided.
2369 simCtx->scaling.L_ref = 1.0;
2370 simCtx->scaling.U_ref = 1.0;
2371 simCtx->scaling.rho_ref = 1.0;
2372
2373 // --- 2. Read overrides from the command line / control file ---
2374 ierr = PetscOptionsGetReal(NULL, NULL, "-scaling_L_ref", &simCtx->scaling.L_ref, &flg); CHKERRQ(ierr);
2375 ierr = PetscOptionsGetReal(NULL, NULL, "-scaling_U_ref", &simCtx->scaling.U_ref, &flg); CHKERRQ(ierr);
2376 ierr = PetscOptionsGetReal(NULL, NULL, "-scaling_rho_ref", &simCtx->scaling.rho_ref, &flg); CHKERRQ(ierr);
2377
2378 // --- 3. Calculate derived scaling factors ---
2379 // Check for division by zero to be safe, though U_ref should be positive.
2380 if (simCtx->scaling.U_ref <= 0.0) {
2381 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_OUTOFRANGE, "Reference velocity U_ref must be positive. Got %g", (double)simCtx->scaling.U_ref);
2382 }
2383 simCtx->scaling.P_ref = simCtx->scaling.rho_ref * simCtx->scaling.U_ref * simCtx->scaling.U_ref;
2384
2385 // --- 4. Log the final, effective scales for verification ---
2386 LOG(GLOBAL, LOG_INFO, "---------------- Physical Scales Initialized -----------------\n");
2387 LOG(GLOBAL, LOG_INFO, " L_ref: %.4f, U_ref: %.4f, rho_ref: %.4f, P_ref: %.4f\n",
2388 simCtx->scaling.L_ref, simCtx->scaling.U_ref, simCtx->scaling.rho_ref, simCtx->scaling.P_ref);
2389 LOG(GLOBAL, LOG_INFO, "--------------------------------------------------------------\n");
2390
2392 PetscFunctionReturn(0);
2393}
#define LOG(scope, level, fmt,...)
Logging macro for PETSc-based applications with scope control.
Definition logging.h:83
PetscReal L_ref
Definition variables.h:625
ScalingCtx scaling
Definition variables.h:707
PetscReal P_ref
Definition variables.h:628
PetscReal rho_ref
Definition variables.h:627
PetscReal U_ref
Definition variables.h:626
Here is the caller graph for this function: