PICurv 0.1.0
A Parallel Particle-In-Cell Solver for Curvilinear LES
Loading...
Searching...
No Matches
io.h File Reference

Public interface for data input/output routines. More...

#include "variables.h"
#include "logging.h"
#include "Boundaries.h"
Include dependency graph for io.h:
This graph shows which files directly or indirectly include this file:

Go to the source code of this file.

Functions

PetscErrorCode ReadGridGenerationInputs (UserCtx *user)
 Parses command-line options for a programmatically generated grid for a SINGLE block.
 
PetscErrorCode ReadGridFile (UserCtx *user)
 Sets grid dimensions from a file for a SINGLE block using a one-time read cache.
 
PetscErrorCode VerifyPathExistence (const char *path, PetscBool is_dir, PetscBool is_optional, const char *description, PetscBool *exists)
 A parallel-safe helper to verify the existence of a generic file or directory path.
 
PetscErrorCode ReadSimulationFields (UserCtx *user, PetscInt ti)
 Reads binary field data for velocity, pressure, and other required vectors.
 
PetscErrorCode ReadFieldData (UserCtx *user, const char *field_name, Vec field_vec, PetscInt ti, const char *ext)
 Reads data for a specific field from a file into the provided vector.
 
PetscErrorCode ReadStatisticalFields (UserCtx *user, PetscInt ti)
 Reads statistical fields used for time-averaged simulations.
 
PetscErrorCode ReadLESFields (UserCtx *user, PetscInt ti)
 Reads LES-related fields used in turbulence modeling.
 
PetscErrorCode ReadRANSFields (UserCtx *user, PetscInt ti)
 Reads RANS-related fields for turbulence modeling.
 
PetscErrorCode WriteFieldData (UserCtx *user, const char *field_name, Vec field_vec, PetscInt ti, const char *ext)
 Writes data from a specific PETSc vector to a file.
 
PetscErrorCode WriteSimulationFields (UserCtx *user)
 Writes simulation fields to files.
 
PetscErrorCode WriteStatisticalFields (UserCtx *user)
 Writes statistical fields for averaging purposes.
 
PetscErrorCode WriteLESFields (UserCtx *user)
 Writes LES-related fields.
 
PetscErrorCode WriteRANSFields (UserCtx *user)
 Writes RANS-related fields.
 
PetscErrorCode WriteSwarmField (UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
 Writes data from a specific field in a PETSc Swarm to a file.
 
PetscErrorCode WriteSwarmIntField (UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
 Writes integer data from a specific PETSc Swarm field to a file.
 
PetscErrorCode WriteAllSwarmFields (UserCtx *user)
 Writes a predefined set of PETSc Swarm fields to files.
 
PetscInt ReadDataFileToArray (const char *filename, double **data_out, PetscInt *Nout, MPI_Comm comm)
 
PetscInt CreateVTKFileFromMetadata (const char *filename, const VTKMetaData *meta, MPI_Comm comm)
 
PetscErrorCode VecToArrayOnRank0 (Vec inVec, PetscInt *N, double **arrayOut)
 Gathers the contents of a distributed PETSc Vec into a single array on rank 0.
 
PetscErrorCode SwarmFieldToArrayOnRank0 (DM swarm, const char *field_name, PetscInt *n_total_particles, PetscInt *n_components, void **gathered_array)
 Gathers any DMSwarm field from all ranks to a single, contiguous array on rank 0.
 
PetscErrorCode ReadSwarmField (UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
 Reads data from a file into a specified field of a PETSc DMSwarm.
 
PetscErrorCode ReadSwarmIntField (UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
 Reads integer swarm data by using ReadFieldData and casting the result.
 
PetscErrorCode ReadAllSwarmFields (UserCtx *user, PetscInt ti)
 Reads multiple fields (positions, velocity, CellID, and weight) into a DMSwarm.
 
PetscErrorCode ReadPositionsFromFile (PetscInt timeIndex, UserCtx *user, double **coordsArray, PetscInt *Ncoords)
 Reads coordinate data (for particles) from file into a PETSc Vec, then gathers it to rank 0.
 
PetscErrorCode ReadFieldDataToRank0 (PetscInt timeIndex, const char *fieldName, UserCtx *user, double **scalarArray, PetscInt *Nscalars)
 Reads a named field from file into a PETSc Vec, then gathers it to rank 0.
 
PetscErrorCode DisplayBanner (SimCtx *simCtx)
 Displays a structured banner summarizing the simulation configuration.
 
PetscErrorCode StringToBCFace (const char *str, BCFace *face_out)
 Converts a string representation of a face to a BCFace enum.
 
PetscErrorCode StringToBCType (const char *str, BCType *type_out)
 Converts a string representation of a BC type to a BCType enum.
 
PetscErrorCode StringToBCHandlerType (const char *str, BCHandlerType *handler_out)
 Converts a string representation of a handler to a BCHandlerType enum.
 
PetscErrorCode ValidateBCHandlerForBCType (BCType type, BCHandlerType handler)
 Validates that a specific handler is compatible with a general BC type.
 
void FreeBC_ParamList (BC_Param *head)
 Frees the memory allocated for a linked list of BC_Param structs.
 
PetscErrorCode GetBCParamReal (BC_Param *params, const char *key, PetscReal *value_out, PetscBool *found)
 Searches a BC_Param linked list for a key and returns its value as a double.
 
PetscErrorCode GetBCParamBool (BC_Param *params, const char *key, PetscBool *value_out, PetscBool *found)
 Searches a BC_Param linked list for a key and returns its value as a bool.
 
PetscErrorCode ParseAllBoundaryConditions (UserCtx *user, const char *bcs_input_filename)
 Parses the boundary conditions file to configure the type, handler, and any associated parameters for all 6 global faces of the domain.
 
PetscErrorCode DeterminePeriodicity (SimCtx *simCtx)
 Scans all block-specific boundary condition files to determine a globally consistent periodicity for each dimension, reusing the core type parser.
 
void TrimWhitespace (char *str)
 Helper function to trim leading/trailing whitespace from a string.
 
PetscErrorCode ParsePostProcessingSettings (SimCtx *simCtx)
 Initializes post-processing settings from a config file and command-line overrides.
 
PetscErrorCode ParseScalingInformation (SimCtx *simCtx)
 Parses physical scaling parameters from command-line options.
 

Detailed Description

Public interface for data input/output routines.

This header declares functions responsible for parsing grid geometry information, either from command-line options for programmatically generated grids or by reading the header of a grid definition file.

Definition in file io.h.

Function Documentation

◆ ReadGridGenerationInputs()

PetscErrorCode ReadGridGenerationInputs ( UserCtx user)

Parses command-line options for a programmatically generated grid for a SINGLE block.

This function reads all per-block array options related to grid geometry, such as dimensions (-im), domain bounds (-xMins), and stretching ratios (-rxs). It then populates the fields of the provided UserCtx struct using its internal block index user->_this.

Parameters
userPointer to the UserCtx for a specific block. The function will populate the geometric fields (IM, Min_X, rx, etc.) within this struct.
Returns
PetscErrorCode 0 on success, or a PETSc error code on failure.

This function is responsible for reading all per-block array options related to grid geometry, such as dimensions (-im), domain bounds (-xMins, -xMaxs), and stretching ratios (-rxs). It reads the entire array for each option, then uses the block index stored in user->_this to select the correct value and populate the fields of the provided UserCtx struct.

Parameters
userPointer to the UserCtx for a specific block. The function will populate the geometric fields (IM, Min_X, rx, etc.) within this struct.
Returns
PetscErrorCode 0 on success, or a PETSc error code on failure.

Definition at line 77 of file io.c.

78{
79 PetscErrorCode ierr;
80 SimCtx *simCtx = user->simCtx;
81 PetscInt nblk = simCtx->block_number;
82 PetscInt block_index = user->_this;
83 PetscBool found;
84
85 // Temporary arrays to hold the parsed values for ALL blocks
86 PetscInt *IMs = NULL, *JMs = NULL, *KMs = NULL, *cgrids = NULL;
87 PetscReal *xMins = NULL, *xMaxs = NULL, *rxs = NULL;
88 PetscReal *yMins = NULL, *yMaxs = NULL, *rys = NULL;
89 PetscReal *zMins = NULL, *zMaxs = NULL, *rzs = NULL;
90
91 PetscFunctionBeginUser;
93
94 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Reading generated grid inputs for block %d.\n", simCtx->rank, block_index);
95
96 if (block_index >= nblk) {
97 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Block index %d is out of range for nblk=%d", block_index, nblk);
98 }
99
100 // --- Allocate temporary storage for all array options ---
101 ierr = PetscMalloc4(nblk, &IMs, nblk, &JMs, nblk, &KMs, nblk, &cgrids); CHKERRQ(ierr);
102 ierr = PetscMalloc6(nblk, &xMins, nblk, &xMaxs, nblk, &rxs, nblk, &yMins, nblk, &yMaxs, nblk, &rys); CHKERRQ(ierr);
103 ierr = PetscMalloc3(nblk, &zMins, nblk, &zMaxs, nblk, &rzs); CHKERRQ(ierr);
104
105 // --- Set default values for the temporary arrays ---
106 for (PetscInt i = 0; i < nblk; ++i) {
107 IMs[i] = 10; JMs[i] = 10; KMs[i] = 10; cgrids[i] = 0;
108 xMins[i] = 0.0; xMaxs[i] = 1.0; rxs[i] = 1.0;
109 yMins[i] = 0.0; yMaxs[i] = 1.0; rys[i] = 1.0;
110 zMins[i] = 0.0; zMaxs[i] = 1.0; rzs[i] = 1.0;
111 }
112
113 // --- Parse the array options from the command line / control file ---
114 PetscInt count;
115 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-im", IMs, &count, &found); CHKERRQ(ierr);
116 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-jm", JMs, &count, &found); CHKERRQ(ierr);
117 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-km", KMs, &count, &found); CHKERRQ(ierr);
118 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-xMins", xMins, &count, &found); CHKERRQ(ierr);
119 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-xMaxs", xMaxs, &count, &found); CHKERRQ(ierr);
120 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-rxs", rxs, &count, &found); CHKERRQ(ierr);
121 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-yMins", yMins, &count, &found); CHKERRQ(ierr);
122 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-yMaxs", yMaxs, &count, &found); CHKERRQ(ierr);
123 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-rys", rys, &count, &found); CHKERRQ(ierr);
124 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-zMins", zMins, &count, &found); CHKERRQ(ierr);
125 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-zMaxs", zMaxs, &count, &found); CHKERRQ(ierr);
126 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-rzs", rzs, &count, &found); CHKERRQ(ierr);
127 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-cgrids", cgrids, &count, &found); CHKERRQ(ierr);
128
129 // --- Assign the parsed values to the specific UserCtx struct passed in ---
130 user->IM = IMs[block_index];
131 user->JM = JMs[block_index];
132 user->KM = KMs[block_index];
133 user->Min_X = xMins[block_index];
134 user->Max_X = xMaxs[block_index];
135 user->rx = rxs[block_index];
136 user->Min_Y = yMins[block_index];
137 user->Max_Y = yMaxs[block_index];
138 user->ry = rys[block_index];
139 user->Min_Z = zMins[block_index];
140 user->Max_Z = zMaxs[block_index];
141 user->rz = rzs[block_index];
142 user->cgrid = cgrids[block_index];
143
144 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Block %d grid generation inputs set: IM=%d, JM=%d, KM=%d\n",
145 simCtx->rank, block_index, user->IM, user->JM, user->KM);
146 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Block %d bounds: X=[%.2f, %.2f], Y=[%.2f, %.2f], Z=[%.2f, %.2f]\n",
147 simCtx->rank, block_index, user->Min_X, user->Max_X, user->Min_Y, user->Max_Y, user->Min_Z, user->Max_Z);
148
149 // --- Clean up temporary storage ---
150 ierr = PetscFree4(IMs, JMs, KMs, cgrids); CHKERRQ(ierr);
151 ierr = PetscFree6(xMins, xMaxs, rxs, yMins, yMaxs, rys); CHKERRQ(ierr);
152 ierr = PetscFree3(zMins, zMaxs, rzs); CHKERRQ(ierr);
153
155 PetscFunctionReturn(0);
156}
#define LOCAL
Logging scope definitions for controlling message output.
Definition logging.h:45
#define LOG_ALLOW(scope, level, fmt,...)
Logging macro that checks both the log level and whether the calling function is in the allowed-funct...
Definition logging.h:200
#define PROFILE_FUNCTION_END
Marks the end of a profiled code block.
Definition logging.h:746
@ LOG_DEBUG
Detailed debugging information.
Definition logging.h:32
#define PROFILE_FUNCTION_BEGIN
Marks the beginning of a profiled code block (typically a function).
Definition logging.h:737
PetscMPIInt rank
Definition variables.h:588
PetscInt cgrid
Definition variables.h:743
PetscInt block_number
Definition variables.h:649
SimCtx * simCtx
Back-pointer to the master simulation context.
Definition variables.h:731
PetscReal Min_X
Definition variables.h:738
PetscInt KM
Definition variables.h:737
PetscInt _this
Definition variables.h:741
PetscReal ry
Definition variables.h:742
PetscReal Max_Y
Definition variables.h:738
PetscReal rz
Definition variables.h:742
PetscInt JM
Definition variables.h:737
PetscReal Min_Z
Definition variables.h:738
PetscReal Max_X
Definition variables.h:738
PetscReal Min_Y
Definition variables.h:738
PetscInt IM
Definition variables.h:737
PetscReal rx
Definition variables.h:742
PetscReal Max_Z
Definition variables.h:738
The master context for the entire simulation.
Definition variables.h:585
Here is the caller graph for this function:

◆ ReadGridFile()

PetscErrorCode ReadGridFile ( UserCtx user)

Sets grid dimensions from a file for a SINGLE block using a one-time read cache.

This function uses a static-variable pattern to ensure the grid file header is read only once, collectively, by all processes on the first call. Subsequent calls simply retrieve the pre-loaded and broadcasted data for the specified block.

Parameters
userPointer to the UserCtx for a specific block. This function will populate the IM, JM, and KM fields.
Returns
PetscErrorCode 0 on success, or a PETSc error code on failure.

This function uses a static-variable pattern to ensure the grid file header is read only once, collectively, by all processes. The first time any process calls this function, it triggers a collective operation where rank 0 reads the file and broadcasts the dimensions for all blocks. This data is stored in static "cached" arrays.

On every call (including the first), the function retrieves the dimensions for the specific block (identified by user->_this) from the cached arrays and populates the IM, JM, and KM fields of the provided UserCtx.

Parameters
userPointer to the UserCtx for a specific block.
Returns
PetscErrorCode 0 on success, or a PETSc error code on failure.

Definition at line 177 of file io.c.

178{
179 PetscErrorCode ierr;
180 SimCtx *simCtx = user->simCtx;
181 PetscInt block_index = user->_this;
182
183 PetscFunctionBeginUser;
185
186 // --- One-Time Read and Broadcast Logic ---
188 LOG_ALLOW_SYNC(GLOBAL, LOG_INFO, "First call to ReadGridFile. Reading and broadcasting grid file header from '%s'...\n", simCtx->grid_file);
189 PetscMPIInt rank = simCtx->rank;
190 PetscInt nblk = simCtx->block_number;
191
192 if (rank == 0) {
193 FILE *fd = fopen(simCtx->grid_file, "r");
194 if (!fd) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_OPEN, "Cannot open file: %s", simCtx->grid_file);
195
196 fscanf(fd, "%d\n", &g_nblk_from_file);
197 // ---- Read first token; decide whether it is the header or nblk ----
198 char firstTok[32] = {0};
199 if (fscanf(fd, "%31s", firstTok) != 1)
200 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_READ, "Empty grid file: %s", simCtx->grid_file);
201
202 if (strcmp(firstTok, "PICGRID") == 0) {
203 // Header is present – read nblk from the next line
204 if (fscanf(fd, "%d", &g_nblk_from_file) != 1)
205 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_READ, "Expected number of blocks after \"PICGRID\" in %s", simCtx->grid_file);
206 } else {
207 // No header – the token we just read is actually nblk
208 g_nblk_from_file = (PetscInt)strtol(firstTok, NULL, 10);
209 }
210 if (g_nblk_from_file != nblk) {
211 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Mismatch: -nblk is %d but grid file specifies %d blocks.", nblk, g_nblk_from_file);
212 }
213
214 ierr = PetscMalloc3(nblk, &g_IMs_from_file, nblk, &g_JMs_from_file, nblk, &g_KMs_from_file); CHKERRQ(ierr);
215 for (PetscInt i = 0; i < nblk; ++i) {
216 if (fscanf(fd, "%d %d %d\n", &g_IMs_from_file[i], &g_JMs_from_file[i], &g_KMs_from_file[i]) != 3) {
217 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_READ, "Expected 3 integers for block %d in %s", i, simCtx->grid_file);
218 }
219 }
220 fclose(fd);
221 }
222
223 // Broadcast nblk to verify (optional, good practice)
224 ierr = MPI_Bcast(&g_nblk_from_file, 1, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
225
226 // Allocate on other ranks before receiving the broadcast
227 if (rank != 0) {
228 ierr = PetscMalloc3(nblk, &g_IMs_from_file, nblk, &g_JMs_from_file, nblk, &g_KMs_from_file); CHKERRQ(ierr);
229 }
230
231 // Broadcast the data arrays
232 ierr = MPI_Bcast(g_IMs_from_file, nblk, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
233 ierr = MPI_Bcast(g_JMs_from_file, nblk, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
234 ierr = MPI_Bcast(g_KMs_from_file, nblk, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
235
236 g_file_has_been_read = PETSC_TRUE;
237 LOG_ALLOW(GLOBAL, LOG_INFO, "Grid file header read and broadcast complete.\n");
238 }
239
240 // --- Per-Block Assignment Logic (runs on every call) ---
241 user->IM = g_IMs_from_file[block_index];
242 user->JM = g_JMs_from_file[block_index];
243 user->KM = g_KMs_from_file[block_index];
244
245 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Set file inputs for Block %d: IM=%d, JM=%d, KM=%d\n",
246 simCtx->rank, block_index, user->IM, user->JM, user->KM);
247
249 PetscFunctionReturn(0);
250}
static PetscInt * g_IMs_from_file
Caches the IM dimensions for all blocks read from the grid file.
Definition io.c:19
static PetscInt g_nblk_from_file
Stores the number of blocks read from the grid file.
Definition io.c:17
static PetscBool g_file_has_been_read
A flag to ensure the grid file is read only once.
Definition io.c:25
static PetscInt * g_KMs_from_file
Caches the KM dimensions for all blocks read from the grid file.
Definition io.c:23
static PetscInt * g_JMs_from_file
Caches the JM dimensions for all blocks read from the grid file.
Definition io.c:21
#define LOG_ALLOW_SYNC(scope, level, fmt,...)
----— DEBUG ---------------------------------------— #define LOG_ALLOW(scope, level,...
Definition logging.h:267
#define GLOBAL
Scope for global logging across all processes.
Definition logging.h:46
@ LOG_INFO
Informational messages about program execution.
Definition logging.h:31
char grid_file[PETSC_MAX_PATH_LEN]
Definition variables.h:654
Here is the caller graph for this function:

◆ VerifyPathExistence()

PetscErrorCode VerifyPathExistence ( const char *  path,
PetscBool  is_dir,
PetscBool  is_optional,
const char *  description,
PetscBool *  exists 
)

A parallel-safe helper to verify the existence of a generic file or directory path.

This function centralizes the logic for checking arbitrary paths. Only Rank 0 performs the filesystem check, and the result is broadcast to all other processes. This ensures collective and synchronized decision-making across all ranks. It is intended for configuration files, source directories, etc., where the path is known completely.

Parameters
[in]pathThe full path to the file or directory to check.
[in]is_dirPETSC_TRUE if checking for a directory, PETSC_FALSE for a file.
[in]is_optionalPETSC_TRUE if the path is optional (results in a warning), PETSC_FALSE if mandatory (results in an error).
[in]descriptionA user-friendly description of the path for logging (e.g., "Grid file").
[out]existsThe result of the check (identical on all ranks).
Returns
PetscErrorCode

Definition at line 762 of file io.c.

763{
764 PetscErrorCode ierr;
765 PetscMPIInt rank;
766 MPI_Comm comm = PETSC_COMM_WORLD;
767
768 PetscFunctionBeginUser;
769 ierr = MPI_Comm_rank(comm, &rank); CHKERRQ(ierr);
770
771 if (rank == 0) {
772 if (is_dir) {
773 ierr = PetscTestDirectory(path, 'r', exists); CHKERRQ(ierr);
774 } else {
775 ierr = PetscTestFile(path, 'r', exists); CHKERRQ(ierr);
776 }
777
778 if (!(*exists)) {
779 if (is_optional) {
780 LOG_ALLOW(GLOBAL, LOG_WARNING, "Optional %s not found at: %s (using defaults/ignoring).\n", description, path);
781 } else {
782 LOG_ALLOW(GLOBAL, LOG_ERROR, "Mandatory %s not found at: %s\n", description, path);
783 }
784 } else {
785 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Found %s: %s\n", description, path);
786 }
787 }
788
789 // Broadcast the result from Rank 0
790 PetscMPIInt exists_int = (rank == 0) ? (PetscMPIInt)(*exists) : 0;
791 ierr = MPI_Bcast(&exists_int, 1, MPI_INT, 0, comm); CHKERRMPI(ierr);
792 *exists = (PetscBool)exists_int;
793
794 // Collective error for mandatory files
795 if (!(*exists) && !is_optional) {
796 SETERRQ(comm, PETSC_ERR_FILE_OPEN, "Mandatory %s not found. Rank 0 expected it at '%s'. Check path and permissions.", description, path);
797 }
798
799 PetscFunctionReturn(0);
800}
@ LOG_ERROR
Critical errors that may halt the program.
Definition logging.h:28
@ LOG_WARNING
Non-critical issues that warrant attention.
Definition logging.h:29
Here is the caller graph for this function:

◆ ReadSimulationFields()

PetscErrorCode ReadSimulationFields ( UserCtx user,
PetscInt  ti 
)

Reads binary field data for velocity, pressure, and other required vectors.

Reads contravariant velocity (Ucont) from vfield, Cartesian velocity (Ucat) from ufield, pressure (P), node state (Nvert_o), and optionally statistical quantities, LES, and RANS fields from binary files. Logs missing files but continues execution.

Parameters
[in,out]userPointer to the UserCtx structure containing the simulation context.
[in]Thetimestep at which the simulation field data needs to be read.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Reads binary field data for velocity, pressure, and other required vectors.

This function reads contravariant velocity, Cartesian velocity, pressure, and node state fields from their respective binary files. It also conditionally reads LES, RANS, and statistical fields if they are enabled.

Parameters
[in,out]userPointer to the UserCtx structure containing simulation context.
[in]tiTime index for constructing the file name.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Definition at line 1231 of file io.c.

1232{
1233 PetscErrorCode ierr;
1234
1235 SimCtx *simCtx = user->simCtx;
1236 const char *source_path = NULL;
1237
1238 if(simCtx->exec_mode == EXEC_MODE_POSTPROCESSOR){
1239 source_path = simCtx->pps->source_dir;
1240 } else if(simCtx->exec_mode == EXEC_MODE_SOLVER){
1241 source_path = simCtx->restart_dir;
1242 } else{
1243 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Invalid execution mode for reading simulation fields.");
1244 }
1245
1246 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to read simulation fields.\n");
1247
1248 // Set the current I/O directory context
1249 ierr = PetscSNPrintf(simCtx->_io_context_buffer, sizeof(simCtx->_io_context_buffer), "%s/%s",source_path,simCtx->euler_subdir); CHKERRQ(ierr);
1250 simCtx->current_io_directory = simCtx->_io_context_buffer;
1251
1252 // Read Cartesian velocity field
1253 ierr = ReadFieldData(user, "ufield", user->Ucat, ti, "dat"); CHKERRQ(ierr);
1254
1255 // Read contravariant velocity field
1256 ierr = ReadFieldData(user, "vfield", user->Ucont, ti, "dat"); CHKERRQ(ierr);
1257
1258 // Read pressure field
1259 ierr = ReadFieldData(user, "pfield", user->P, ti, "dat"); CHKERRQ(ierr);
1260
1261 // Read node state field (nvert)
1262 ierr = ReadFieldData(user, "nvfield", user->Nvert, ti, "dat"); CHKERRQ(ierr);
1263
1264 LOG_ALLOW(GLOBAL,LOG_INFO,"Successfully read all mandatory fields. \n");
1265
1266 if(simCtx->np>0){
1267 // Read Particle Count field
1268 if(!user->ParticleCount){
1269 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "ParticleCount Vec is NULL but np>0");
1270 }
1271 ierr = ReadOptionalField(user, "ParticleCount", "Particle Count", user->ParticleCount, ti, "dat"); CHKERRQ(ierr);
1272
1273 ierr = ReadOptionalField(user, "psifield", "Scalar Psi Field", user->Psi, ti, "dat"); CHKERRQ(ierr);
1274 }
1275 else{
1276 LOG_ALLOW(GLOBAL, LOG_INFO, "No particles in simulation, skipping Particle fields reading.\n");
1277 }
1278 // Process LES fields if enabled
1279 if (simCtx->les) {
1280 ierr = ReadLESFields(user,ti); CHKERRQ(ierr);
1281 }
1282
1283 // Process RANS fields if enabled
1284 if (simCtx->rans) {
1285 ierr = ReadRANSFields(user,ti); CHKERRQ(ierr);
1286 }
1287
1288 // Process statistical fields if averaging is enabled
1289 if (simCtx->averaging) {
1290 ierr = ReadStatisticalFields(user,ti); CHKERRQ(ierr);
1291 }
1292
1293 simCtx->current_io_directory = NULL; // Clear the I/O context after reading
1294
1295 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading simulation fields.\n");
1296
1297 return 0;
1298}
PetscErrorCode ReadStatisticalFields(UserCtx *user, PetscInt ti)
Reads statistical fields for averaging purposes.
Definition io.c:1311
PetscErrorCode ReadRANSFields(UserCtx *user, PetscInt ti)
Reads RANS-related fields.
Definition io.c:1369
PetscErrorCode ReadLESFields(UserCtx *user, PetscInt ti)
Reads LES-related fields.
Definition io.c:1338
PetscErrorCode ReadFieldData(UserCtx *user, const char *field_name, Vec field_vec, PetscInt ti, const char *ext)
Reads data for a specific field from a file into the provided vector.
Definition io.c:995
static PetscErrorCode ReadOptionalField(UserCtx *user, const char *field_name, const char *field_label, Vec field_vec, PetscInt ti, const char *ext)
Checks for and reads an optional Eulerian field from a file into a Vec.
Definition io.c:875
char euler_subdir[PETSC_MAX_PATH_LEN]
Definition variables.h:607
PetscInt rans
Definition variables.h:669
PetscInt np
Definition variables.h:676
PetscBool averaging
Definition variables.h:673
Vec Ucont
Definition variables.h:755
char * current_io_directory
Definition variables.h:611
char source_dir[PETSC_MAX_PATH_LEN]
Definition variables.h:501
Vec Ucat
Definition variables.h:755
Vec ParticleCount
Definition variables.h:800
PostProcessParams * pps
Definition variables.h:715
@ EXEC_MODE_SOLVER
Definition variables.h:558
@ EXEC_MODE_POSTPROCESSOR
Definition variables.h:559
char _io_context_buffer[PETSC_MAX_PATH_LEN]
Definition variables.h:610
PetscInt les
Definition variables.h:669
Vec Nvert
Definition variables.h:755
ExecutionMode exec_mode
Definition variables.h:603
char restart_dir[PETSC_MAX_PATH_LEN]
Definition variables.h:605
Vec Psi
Definition variables.h:801
Here is the call graph for this function:
Here is the caller graph for this function:

◆ ReadFieldData()

PetscErrorCode ReadFieldData ( UserCtx user,
const char *  field_name,
Vec  field_vec,
PetscInt  ti,
const char *  ext 
)

Reads data for a specific field from a file into the provided vector.

This function uses the field name to construct the file path and reads the data from the corresponding file into the provided PETSc vector.

Parameters
[in]userPointer to the UserCtx structure containing simulation context.
[in]field_nameName of the field (e.g., "ufield", "vfield", "pfield").
[out]field_vecPETSc vector to store the field data.
[in]tiTime index for constructing the file name.
[in]extFile extension (e.g., "dat").
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Definition at line 995 of file io.c.

1000{
1001 PetscErrorCode ierr;
1002 char filename[PETSC_MAX_PATH_LEN];
1003 MPI_Comm comm;
1004 PetscMPIInt rank,size;
1005 SimCtx *simCtx = user->simCtx;
1006
1007
1008 PetscFunctionBeginUser;
1010
1011 if(!simCtx->current_io_directory){
1012 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "I/O context directory was not set before calling ReadFieldData().");
1013 }
1014
1015
1016 ierr = PetscObjectGetComm((PetscObject)field_vec,&comm);CHKERRQ(ierr);
1017 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1018 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1019
1020 const char *source_path = NULL;
1021 source_path = simCtx->current_io_directory;
1022
1023 if(!source_path){
1024 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "source_path was not set for the current execution mode.");
1025 }
1026 /* ---------------------------------------------------------------------
1027 * Compose <path-to-file>/<field_name><step with 5 digits>_0.<ext>
1028 * (all restart files are written by rank-0 with that naming scheme).
1029 * ------------------------------------------------------------------ */
1030 ierr = PetscSNPrintf(filename,sizeof(filename),
1031 "%s/%s%05" PetscInt_FMT "_0.%s",
1032 source_path,field_name,ti,ext);CHKERRQ(ierr);
1033
1035 "Attempting to read <%s> on rank %d/%d\n",
1036 filename,(int)rank,(int)size);
1037
1038 /* ======================================================================
1039 * 1. SERIAL JOB – just hand the Vec to VecLoad()
1040 * ==================================================================== */
1041 if(size==1)
1042 {
1043 PetscViewer viewer;
1044 PetscBool found;
1045 Vec temp_vec;
1046 PetscInt expectedSize,loadedSize;
1047
1048 ierr = PetscTestFile(filename,'r',&found);CHKERRQ(ierr);
1049 if(!found) SETERRQ(comm,PETSC_ERR_FILE_OPEN,
1050 "Restart/Source file not found: %s",filename);
1051
1052 ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,filename,FILE_MODE_READ,&viewer);CHKERRQ(ierr);
1053// ---- START MODIFICATION ----
1054 // DO NOT load directly into field_vec, as this can resize it, which is
1055 // illegal for DMSwarm "view" vectors. Instead, load into a temporary vector.
1056 ierr = VecCreate(PETSC_COMM_SELF, &temp_vec); CHKERRQ(ierr);
1057 ierr = VecLoad(temp_vec,viewer);CHKERRQ(ierr);
1058 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
1059
1060 // Sanity check: ensure the file size matches the expected vector size.
1061 ierr = VecGetSize(field_vec, &expectedSize);CHKERRQ(ierr);
1062 ierr = VecGetSize(temp_vec, &loadedSize);CHKERRQ(ierr);
1063 if (loadedSize != expectedSize) {
1064 SETERRQ(comm,PETSC_ERR_FILE_UNEXPECTED,
1065 "File %s holds %d entries – expected %d for field '%s'",
1066 filename, loadedSize, expectedSize, field_name);
1067 }
1068
1069 // Now, safely copy the data from the temporary vector to the final destination.
1070 ierr = VecCopy(temp_vec, field_vec);CHKERRQ(ierr);
1071
1072 // Clean up the temporary vector.
1073 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
1074
1075 // ---- END MODIFICATION ----
1076
1077 /* create EMPTY sequential Vec – VecLoad() will size it correctly */
1078 /*
1079 ierr = VecCreate(PETSC_COMM_SELF,&seq_vec);CHKERRQ(ierr);
1080 ierr = VecSetType(seq_vec,VECSEQ);CHKERRQ(ierr);
1081
1082 ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,filename,
1083 FILE_MODE_READ,&viewer);CHKERRQ(ierr);
1084
1085 ierr = VecLoad(field_vec,viewer);CHKERRQ(ierr);
1086 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
1087 */
1089 "Loaded <%s> (serial path)\n",filename);
1090
1092 PetscFunctionReturn(0);
1093 }
1094
1095 /* ======================================================================
1096 * 2. PARALLEL JOB
1097 * ==================================================================== */
1098 PetscInt globalSize;
1099 ierr = VecGetSize(field_vec,&globalSize);CHKERRQ(ierr);
1100
1101 DM dm = NULL;
1102 const char *dmtype = NULL;
1103 Vec nat = NULL; /* Natural-ordered vector for DMDA */
1104
1105 /* -------------------- rank-0 : read the sequential file -------------- */
1106 Vec seq_vec = NULL; /* only valid on rank-0 */
1107 const PetscScalar *seqArray = NULL; /* borrowed pointer on rank-0 only */
1108
1109 if(rank==0)
1110 {
1111 PetscViewer viewer;
1112 PetscBool found;
1113
1114 ierr = PetscTestFile(filename,'r',&found);CHKERRQ(ierr);
1115 if(!found) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_OPEN,
1116 "Restart file not found: %s",filename);
1117
1118 /* create EMPTY sequential Vec – VecLoad() will size it correctly */
1119 ierr = VecCreate(PETSC_COMM_SELF,&seq_vec);CHKERRQ(ierr);
1120 ierr = VecSetType(seq_vec,VECSEQ);CHKERRQ(ierr);
1121
1122 ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,filename,
1123 FILE_MODE_READ,&viewer);CHKERRQ(ierr);
1124 ierr = VecLoad(seq_vec,viewer);CHKERRQ(ierr);
1125 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
1126
1127 /* size sanity-check */
1128 PetscInt loaded;
1129 ierr = VecGetSize(seq_vec,&loaded);CHKERRQ(ierr);
1130 if(loaded != globalSize)
1131 SETERRQ(comm,PETSC_ERR_FILE_UNEXPECTED,
1132 "File %s holds %d entries – expected %d",
1133 filename,loaded,globalSize);
1134
1135 /* borrow array for later Bcast */
1136 ierr = VecGetArrayRead(seq_vec,&seqArray);CHKERRQ(ierr);
1137
1139 "Rank 0 successfully loaded <%s>\n",filename);
1140 }
1141
1142 /* -------------------- Check if this is a DMDA vector ----------------- */
1143 ierr = VecGetDM(field_vec, &dm); CHKERRQ(ierr);
1144 if (dm) { ierr = DMGetType(dm, &dmtype); CHKERRQ(ierr); }
1145
1146 if (dmtype && !strcmp(dmtype, DMDA)) {
1147 /* ==================================================================
1148 * DMDA PATH: File is in natural ordering, need to convert to global
1149 * ================================================================== */
1150
1151 /* Create natural vector */
1152 ierr = DMDACreateNaturalVector(dm, &nat); CHKERRQ(ierr);
1153
1154 /* Scatter from rank 0's seq_vec to all ranks' natural vector */
1155 VecScatter scatter;
1156 Vec nat_seq = NULL; /* Sequential natural vector on rank 0 */
1157
1158 ierr = VecScatterCreateToZero(nat, &scatter, &nat_seq); CHKERRQ(ierr);
1159
1160 /* Reverse scatter: from rank 0 to all ranks */
1161 ierr = VecScatterBegin(scatter, (rank == 0 ? seq_vec : nat_seq), nat,
1162 INSERT_VALUES, SCATTER_REVERSE); CHKERRQ(ierr);
1163 ierr = VecScatterEnd(scatter, (rank == 0 ? seq_vec : nat_seq), nat,
1164 INSERT_VALUES, SCATTER_REVERSE); CHKERRQ(ierr);
1165
1166 /* Convert natural → global ordering */
1167 ierr = DMDANaturalToGlobalBegin(dm, nat, INSERT_VALUES, field_vec); CHKERRQ(ierr);
1168 ierr = DMDANaturalToGlobalEnd(dm, nat, INSERT_VALUES, field_vec); CHKERRQ(ierr);
1169
1170 /* Cleanup */
1171 ierr = VecScatterDestroy(&scatter); CHKERRQ(ierr);
1172 ierr = VecDestroy(&nat_seq); CHKERRQ(ierr);
1173 ierr = VecDestroy(&nat); CHKERRQ(ierr);
1174
1175 } else {
1176 /* ==================================================================
1177 * NON-DMDA PATH: Use broadcast and direct copy (assumes global ordering)
1178 * ================================================================== */
1179
1180 PetscScalar *buffer = NULL;
1181 if (rank == 0) {
1182 buffer = (PetscScalar *)seqArray;
1183 } else {
1184 ierr = PetscMalloc1(globalSize, &buffer); CHKERRQ(ierr);
1185 }
1186
1187 ierr = MPI_Bcast(buffer, (int)globalSize, MPIU_SCALAR, 0, comm); CHKERRQ(ierr);
1188
1189 /* Copy slice based on ownership range */
1190 PetscInt rstart, rend, loc;
1191 PetscScalar *locArray;
1192
1193 ierr = VecGetOwnershipRange(field_vec, &rstart, &rend); CHKERRQ(ierr);
1194 loc = rend - rstart;
1195
1196 ierr = VecGetArray(field_vec, &locArray); CHKERRQ(ierr);
1197 ierr = PetscMemcpy(locArray, buffer + rstart, loc * sizeof(PetscScalar)); CHKERRQ(ierr);
1198 ierr = VecRestoreArray(field_vec, &locArray); CHKERRQ(ierr);
1199
1200 if (rank != 0) {
1201 ierr = PetscFree(buffer); CHKERRQ(ierr);
1202 }
1203 }
1204
1205 /* -------------------- tidy up ---------------------------------------- */
1206 if (rank == 0) {
1207 ierr = VecRestoreArrayRead(seq_vec, &seqArray); CHKERRQ(ierr);
1208 ierr = VecDestroy(&seq_vec); CHKERRQ(ierr);
1209 }
1210
1212 "Loaded <%s> (parallel path)\n",filename);
1213
1215 PetscFunctionReturn(0);
1216}
@ LOG_TRACE
Very fine-grained tracing information for in-depth debugging.
Definition logging.h:33
Here is the caller graph for this function:

◆ ReadStatisticalFields()

PetscErrorCode ReadStatisticalFields ( UserCtx user,
PetscInt  ti 
)

Reads statistical fields used for time-averaged simulations.

Reads statistical quantities such as velocity sums and pressure sums. Logs missing files and initializes fields to zero if files are unavailable.

Parameters
[in,out]userPointer to the UserCtx structure containing the simulation context.
[in]Thetimestep at which the simulation field data needs to be read.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Reads statistical fields used for time-averaged simulations.

This function reads data for fields such as Ucat_sum, Ucat_cross_sum, Ucat_square_sum, and P_sum, used for statistical analysis during simulation.

Parameters
[in,out]userPointer to the UserCtx structure containing simulation context.
[in]tiTime index for constructing the file name.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Definition at line 1311 of file io.c.

1312{
1313 PetscErrorCode ierr;
1314
1315 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to read statistical fields.\n");
1316
1317 ierr = ReadOptionalField(user, "su0", "Velocity Sum", user->Ucat_sum, ti, "dat"); CHKERRQ(ierr);
1318 ierr = ReadOptionalField(user, "su1", "Velocity Cross Sum", user->Ucat_cross_sum, ti, "dat"); CHKERRQ(ierr);
1319 ierr = ReadOptionalField(user, "su2", "Velocity Square Sum",user->Ucat_square_sum, ti, "dat"); CHKERRQ(ierr);
1320 ierr = ReadOptionalField(user, "sp", "Pressure Sum", user->P_sum, ti, "dat"); CHKERRQ(ierr);
1321
1322 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading statistical fields.\n");
1323
1324 return 0;
1325}
Vec Ucat_square_sum
Definition variables.h:786
Vec P_sum
Definition variables.h:786
Vec Ucat_sum
Definition variables.h:786
Vec Ucat_cross_sum
Definition variables.h:786
Here is the call graph for this function:
Here is the caller graph for this function:

◆ ReadLESFields()

PetscErrorCode ReadLESFields ( UserCtx user,
PetscInt  ti 
)

Reads LES-related fields used in turbulence modeling.

Reads the Smagorinsky constant (Cs) and transfers data to local vectors. Logs missing files and initializes fields to zero if files are unavailable.

Parameters
[in,out]userPointer to the UserCtx structure containing the simulation context.
[in]tiTime index for constructing the file name.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Reads LES-related fields used in turbulence modeling.

This function reads LES-related fields such as Cs (Smagorinsky constant) into their respective PETSc vectors.

Parameters
[in,out]userPointer to the UserCtx structure containing simulation context.
[in]tiTime index for constructing the file name.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Definition at line 1338 of file io.c.

1339{
1340 PetscErrorCode ierr;
1341
1342 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to read LES fields.\n");
1343
1344 ierr = ReadOptionalField(user, "Nu_t", "Turbulent Viscosity", user->Nu_t, ti, "dat"); CHKERRQ(ierr);
1345 ierr = ReadOptionalField(user, "cs", "Smagorinsky Constant (Cs)", user->CS, ti, "dat"); CHKERRQ(ierr);
1346
1347 DMGlobalToLocalBegin(user->da, user->CS, INSERT_VALUES, user->lCs);
1348 DMGlobalToLocalEnd(user->da, user->CS, INSERT_VALUES, user->lCs);
1349
1350 DMGlobalToLocalBegin(user->da, user->Nu_t, INSERT_VALUES, user->lNu_t);
1351 DMGlobalToLocalEnd(user->da, user->Nu_t, INSERT_VALUES, user->lNu_t);
1352
1353 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading LES fields.\n");
1354
1355 return 0;
1356}
Vec lCs
Definition variables.h:783
Vec lNu_t
Definition variables.h:783
Vec Nu_t
Definition variables.h:783
Here is the call graph for this function:
Here is the caller graph for this function:

◆ ReadRANSFields()

PetscErrorCode ReadRANSFields ( UserCtx user,
PetscInt  ti 
)

Reads RANS-related fields for turbulence modeling.

Reads K_Omega fields (used in RANS modeling) and initializes them if files are unavailable.

Parameters
[in,out]userPointer to the UserCtx structure containing the simulation context.
[in]tiTime index for constructing the file name.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Reads RANS-related fields for turbulence modeling.

This function reads RANS-related fields such as K_Omega into their respective PETSc vectors.

Parameters
[in,out]userPointer to the UserCtx structure containing simulation context.
[in]tiTime index for constructing the file name.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Definition at line 1369 of file io.c.

1370{
1371 PetscErrorCode ierr;
1372
1373 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to read RANS fields.\n");
1374
1375 ierr = ReadOptionalField(user, "kfield", "K-Omega RANS", user->K_Omega, ti, "dat"); CHKERRQ(ierr);
1376 ierr = ReadOptionalField(user, "Nu_t", "Turbulent Viscosity", user->Nu_t, ti, "dat"); CHKERRQ(ierr);
1377
1378 VecCopy(user->K_Omega, user->K_Omega_o);
1379
1380 DMGlobalToLocalBegin(user->fda2, user->K_Omega, INSERT_VALUES, user->lK_Omega);
1381 DMGlobalToLocalEnd(user->fda2, user->K_Omega, INSERT_VALUES, user->lK_Omega);
1382
1383 DMGlobalToLocalBegin(user->da, user->Nu_t, INSERT_VALUES, user->lNu_t);
1384 DMGlobalToLocalEnd(user->da, user->Nu_t, INSERT_VALUES, user->lNu_t);
1385
1386 DMGlobalToLocalBegin(user->fda2, user->K_Omega_o, INSERT_VALUES, user->lK_Omega_o);
1387 DMGlobalToLocalEnd(user->fda2, user->K_Omega_o, INSERT_VALUES, user->lK_Omega_o);
1388
1389 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading RANS fields.\n");
1390
1391 return 0;
1392}
Vec K_Omega_o
Definition variables.h:783
Vec K_Omega
Definition variables.h:783
Vec lK_Omega_o
Definition variables.h:783
Vec lK_Omega
Definition variables.h:783
Here is the call graph for this function:
Here is the caller graph for this function:

◆ WriteFieldData()

PetscErrorCode WriteFieldData ( UserCtx user,
const char *  field_name,
Vec  field_vec,
PetscInt  ti,
const char *  ext 
)

Writes data from a specific PETSc vector to a file.

This function uses the field name to construct the file path and writes the data from the provided PETSc vector to the corresponding file.

Parameters
[in]userPointer to the UserCtx structure containing simulation context.
[in]field_nameName of the field (e.g., "ufield", "vfield", "pfield").
[in]field_vecPETSc vector containing the field data to write.
[in]tiTime index for constructing the file name.
[in]extFile extension (e.g., "dat").
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Writes data from a specific PETSc vector to a file.

This function is now parallel-safe.

  • In PARALLEL: All processes send their local data to Rank 0. Rank 0 assembles the data into a temporary sequential vector and writes it to a single file.
  • In SERIAL: It performs a direct, simple write.

This ensures the output file is always in a simple, portable format.

Parameters
[in]userPointer to the UserCtx structure.
[in]field_nameName of the field (e.g., "position").
[in]field_vecThe parallel PETSc vector containing the data to write.
[in]tiTime index for constructing the file name.
[in]extFile extension (e.g., "dat").
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Dump a distributed PETSc Vec to the single sequential file format used by our restart / post-processing tools.

The companion of ReadFieldData(): it always produces one file (e.g. results/ufield00006_0.dat) regardless of how many MPI ranks are running.

Behaviour

# MPI ranks Strategy
1 Direct VecView() into the file.
> 1 VecScatterCreateToZero() gathers the distributed Vec onto rank 0.
Rank 0 writes the sequential Vec; all other ranks allocate no storage.

The routine never alters or destroys the parallel Vec passed in; the gather buffer is created and freed internally.

Parameters
[in]userSimulation context (used only for logging).
[in]field_nameLogical field name (forms part of the filename).
[in]field_vecDistributed PETSc Vec to write.
[in]tiTimestep index used in the filename.
[in]extFile extension ("dat" in our workflow).
Returns
0 on success or a PETSc error code.

Definition at line 1636 of file io.c.

1641{
1642 PetscErrorCode ierr;
1643 MPI_Comm comm;
1644 PetscMPIInt rank, size;
1645
1646 const PetscInt placeholder_int = 0; /* keep legacy name */
1647 char filename[PETSC_MAX_PATH_LEN];
1648 SimCtx *simCtx=user->simCtx;
1649
1650 PetscFunctionBeginUser;
1652
1653 if(!simCtx->output_dir){
1654 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "Output directory was not set before calling WriteFieldData().");
1655 }
1656
1657 /* ------------------------------------------------------------ */
1658 /* Basic communicator information */
1659 /* ------------------------------------------------------------ */
1660 ierr = PetscObjectGetComm((PetscObject)field_vec,&comm);CHKERRQ(ierr);
1661 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1662 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1663
1664 ierr = PetscSNPrintf(filename,sizeof(filename),
1665 "%s/%s%05" PetscInt_FMT "_%d.%s",
1666 simCtx->current_io_directory,field_name,ti,placeholder_int,ext);CHKERRQ(ierr);
1667
1669 " Preparing to write <%s> on rank %d/%d\n",
1670 filename,rank,size);
1671
1672 /* ------------------------------------------------------------ */
1673 /* 1. Serial path */
1674 /* ------------------------------------------------------------ */
1675 if (size == 1) {
1676 PetscViewer viewer;
1677
1678 ierr = PetscViewerBinaryOpen(comm,filename,
1679 FILE_MODE_WRITE,&viewer);CHKERRQ(ierr);
1680 ierr = VecView(field_vec,viewer);CHKERRQ(ierr);
1681 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
1682
1684 " Wrote <%s> (serial path)\n",filename);
1685 PetscFunctionReturn(0);
1686 }
1687
1688 /* ------------------------------------------------------------ */
1689 /* 2. Parallel path */
1690 /* ------------------------------------------------------------ */
1691 VecScatter scatter;
1692 Vec seq_vec=NULL; /* created by PETSc, lives only on rank 0 */
1693 DM dm = NULL;
1694 const char *dmtype = NULL;
1695 Vec nat = NULL; /* Natural-ordered vector for DMDA */
1696
1697 /* 2.1 Check if vector has DMDA and convert to natural ordering if needed */
1698 ierr = VecGetDM(field_vec, &dm); CHKERRQ(ierr);
1699 if (dm) { ierr = DMGetType(dm, &dmtype); CHKERRQ(ierr); }
1700
1701 if (dmtype && !strcmp(dmtype, DMDA)) {
1702 /* DMDA path: convert to natural ordering first */
1703 ierr = DMDACreateNaturalVector(dm, &nat); CHKERRQ(ierr);
1704 ierr = DMDAGlobalToNaturalBegin(dm, field_vec, INSERT_VALUES, nat); CHKERRQ(ierr);
1705 ierr = DMDAGlobalToNaturalEnd(dm, field_vec, INSERT_VALUES, nat); CHKERRQ(ierr);
1706
1707 /* Gather the natural-ordered vector */
1708 ierr = VecScatterCreateToZero(nat, &scatter, &seq_vec); CHKERRQ(ierr);
1709 } else {
1710 /* Non-DMDA path: direct gather in global ordering */
1711 ierr = VecScatterCreateToZero(field_vec, &scatter, &seq_vec); CHKERRQ(ierr);
1712 }
1713
1714 /* 2.2 Gather distributed → sequential (on rank 0) */
1715 ierr = VecScatterBegin(scatter, (nat ? nat : field_vec), seq_vec,
1716 INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
1717 ierr = VecScatterEnd(scatter, (nat ? nat : field_vec), seq_vec,
1718 INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
1719
1720 /* 2.3 Rank 0 writes the file */
1721 if (rank == 0) {
1722 PetscViewer viewer;
1723
1724 /* (optional) value diagnostics */
1725 PetscReal vmin,vmax;
1726 ierr = VecMin(seq_vec,NULL,&vmin);CHKERRQ(ierr);
1727 ierr = VecMax(seq_vec,NULL,&vmax);CHKERRQ(ierr);
1729 " <%s> range = [%.4e … %.4e]\n",
1730 field_name,(double)vmin,(double)vmax);
1731
1732 ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,filename,
1733 FILE_MODE_WRITE,&viewer);CHKERRQ(ierr);
1734 ierr = VecView(seq_vec,viewer);CHKERRQ(ierr);
1735 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
1736
1738 " Wrote <%s> (parallel path)\n",filename);
1739 }
1740
1741 /* 2.4 Cleanup */
1742 ierr = VecScatterDestroy(&scatter);CHKERRQ(ierr);
1743 ierr = VecDestroy(&seq_vec);CHKERRQ(ierr);
1744 if (nat) { ierr = VecDestroy(&nat); CHKERRQ(ierr); }
1745
1747 PetscFunctionReturn(0);
1748}
char output_dir[PETSC_MAX_PATH_LEN]
Definition variables.h:606
Here is the caller graph for this function:

◆ WriteSimulationFields()

PetscErrorCode WriteSimulationFields ( UserCtx user)

Writes simulation fields to files.

This function writes contravariant velocity, Cartesian velocity, pressure, and node state fields to their respective binary files. It also conditionally writes LES, RANS, and statistical fields if they are enabled.

Parameters
[in]userPointer to the UserCtx structure containing simulation context.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Definition at line 1761 of file io.c.

1762{
1763 PetscErrorCode ierr;
1764
1765 SimCtx *simCtx = user->simCtx;
1766
1767 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to write simulation fields.\n");
1768
1769 // Set the current IO directory
1770 ierr = PetscSNPrintf(simCtx->_io_context_buffer, sizeof(simCtx->_io_context_buffer),
1771 "%s/%s", simCtx->output_dir, simCtx->euler_subdir);CHKERRQ(ierr);
1772 simCtx->current_io_directory = simCtx->_io_context_buffer;
1773
1774 // Write contravariant velocity field
1775 ierr = WriteFieldData(user, "vfield", user->Ucont, simCtx->step, "dat"); CHKERRQ(ierr);
1776
1777 // Write Cartesian velocity field
1778 ierr = WriteFieldData(user, "ufield", user->Ucat, simCtx->step, "dat"); CHKERRQ(ierr);
1779
1780 // Write pressure field
1781 ierr = WriteFieldData(user, "pfield", user->P, simCtx->step, "dat"); CHKERRQ(ierr);
1782
1783 // Write node state field (nvert)
1784 ierr = WriteFieldData(user, "nvfield", user->Nvert, simCtx->step, "dat"); CHKERRQ(ierr);
1785
1786 // Write ParticleCountPerCell if enabled.
1787 if(simCtx->np>0){
1788 ierr = WriteFieldData(user, "ParticleCount",user->ParticleCount,simCtx->step,"dat"); CHKERRQ(ierr);
1789 ierr = WriteFieldData(user, "psifield", user->Psi, simCtx->step, "dat"); CHKERRQ(ierr);
1790 }
1791
1792 // Write LES fields if enabled
1793 if (simCtx->les) {
1794 ierr = WriteLESFields(user); CHKERRQ(ierr);
1795 }
1796
1797 // Write RANS fields if enabled
1798 if (simCtx->rans) {
1799 ierr = WriteRANSFields(user); CHKERRQ(ierr);
1800 }
1801
1802 // Write statistical fields if averaging is enabled
1803 if (simCtx->averaging) {
1804 ierr = WriteStatisticalFields(user); CHKERRQ(ierr);
1805 }
1806
1807 simCtx->current_io_directory = NULL;
1808
1809 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished writing simulation fields.\n");
1810
1811 return 0;
1812}
PetscErrorCode WriteStatisticalFields(UserCtx *user)
Writes statistical fields for averaging purposes.
Definition io.c:1824
PetscErrorCode WriteFieldData(UserCtx *user, const char *field_name, Vec field_vec, PetscInt ti, const char *ext)
Writes data from a specific PETSc vector to a single, sequential file.
Definition io.c:1636
PetscErrorCode WriteRANSFields(UserCtx *user)
Writes RANS-related fields.
Definition io.c:1886
PetscErrorCode WriteLESFields(UserCtx *user)
Writes LES-related fields.
Definition io.c:1852
PetscInt step
Definition variables.h:593
Here is the call graph for this function:
Here is the caller graph for this function:

◆ WriteStatisticalFields()

PetscErrorCode WriteStatisticalFields ( UserCtx user)

Writes statistical fields for averaging purposes.

This function writes data for fields such as Ucat_sum, Ucat_cross_sum, Ucat_square_sum, and P_sum to their respective binary files.

Parameters
[in]userPointer to the UserCtx structure containing simulation context.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Definition at line 1824 of file io.c.

1825{
1826 PetscErrorCode ierr;
1827
1828 SimCtx *simCtx = user->simCtx;
1829
1830 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to write statistical fields.\n");
1831
1832 ierr = WriteFieldData(user, "su0", user->Ucat_sum, simCtx->step, "dat"); CHKERRQ(ierr);
1833 ierr = WriteFieldData(user, "su1", user->Ucat_cross_sum, simCtx->step, "dat"); CHKERRQ(ierr);
1834 ierr = WriteFieldData(user, "su2", user->Ucat_square_sum, simCtx->step, "dat"); CHKERRQ(ierr);
1835 ierr = WriteFieldData(user, "sp", user->P_sum, simCtx->step, "dat"); CHKERRQ(ierr);
1836
1837 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished writing statistical fields.\n");
1838
1839 return 0;
1840}
Here is the call graph for this function:
Here is the caller graph for this function:

◆ WriteLESFields()

PetscErrorCode WriteLESFields ( UserCtx user)

Writes LES-related fields.

This function writes LES-related fields such as Cs (Smagorinsky constant) to their respective binary files.

Parameters
[in]userPointer to the UserCtx structure containing simulation context.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Definition at line 1852 of file io.c.

1853{
1854 PetscErrorCode ierr;
1855
1856 SimCtx *simCtx = user->simCtx;
1857
1858 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to write LES fields.\n");
1859
1860
1861 DMLocalToGlobalBegin(user->da, user->lCs, INSERT_VALUES, user->CS);
1862 DMLocalToGlobalEnd(user->da, user->lCs, INSERT_VALUES, user->CS);
1863
1864 DMLocalToGlobalBegin(user->da, user->lNu_t, INSERT_VALUES, user->Nu_t);
1865 DMLocalToGlobalEnd(user->da, user->lNu_t, INSERT_VALUES, user->Nu_t);
1866
1867 ierr = WriteFieldData(user, "Nu_t", user->Nu_t, simCtx->step, "dat"); CHKERRQ(ierr);
1868 ierr = WriteFieldData(user, "cs", user->CS, simCtx->step, "dat"); CHKERRQ(ierr);
1869
1870
1871 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished writing LES fields.\n");
1872
1873 return 0;
1874}
Here is the call graph for this function:
Here is the caller graph for this function:

◆ WriteRANSFields()

PetscErrorCode WriteRANSFields ( UserCtx user)

Writes RANS-related fields.

This function writes RANS-related fields such as K_Omega to their respective binary files.

Parameters
[in]userPointer to the UserCtx structure containing simulation context.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Definition at line 1886 of file io.c.

1887{
1888 PetscErrorCode ierr;
1889
1890 SimCtx *simCtx = user->simCtx;
1891
1892 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to write RANS fields.\n");
1893
1894 ierr = WriteFieldData(user, "kfield", user->K_Omega, simCtx->step, "dat"); CHKERRQ(ierr);
1895
1896 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished writing RANS fields.\n");
1897
1898 return 0;
1899}
Here is the call graph for this function:
Here is the caller graph for this function:

◆ WriteSwarmField()

PetscErrorCode WriteSwarmField ( UserCtx user,
const char *  field_name,
PetscInt  ti,
const char *  ext 
)

Writes data from a specific field in a PETSc Swarm to a file.

This function retrieves the Swarm from the UserCtx (i.e., user->swarm) and creates a global PETSc vector from the specified Swarm field. It then calls the existing WriteFieldData() function to handle the actual I/O operation. After writing the data, the function destroys the temporary global vector to avoid memory leaks.

Parameters
[in]userPointer to the UserCtx structure containing simulation context and the PetscSwarm (as user->swarm).
[in]field_nameName of the Swarm field to be written (e.g., "my_field").
[in]tiTime index used to construct the output file name.
[in]extFile extension (e.g., "dat", "bin").
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.
Note
Compatible with PETSc 3.14.4.

This function retrieves the Swarm from the UserCtx (i.e., user->swarm) and creates a global PETSc vector from the specified Swarm field. It then calls the existing WriteFieldData() function to handle the actual I/O operation. After writing the data, the function destroys the temporary global vector to avoid memory leaks.

Parameters
[in]userPointer to the UserCtx structure containing simulation context and the PetscSwarm (as user->swarm).
[in]field_nameName of the Swarm field to be written (e.g., "my_field").
[in]tiTime index used to construct the output file name.
[in]extFile extension (e.g., "dat", "bin").
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.
Note
Compatible with PETSc 3.14.4 and newer.

Definition at line 1920 of file io.c.

1921{
1922 PetscErrorCode ierr;
1923 Vec fieldVec;
1924 DM swarm;
1925
1926 PetscFunctionBeginUser; /* PETSc macro indicating start of function */
1927
1928 /*
1929 * 1) Retrieve the PetscSwarm from the user context.
1930 * Ensure user->swarm is initialized and not NULL.
1931 */
1932 swarm = user->swarm;
1933
1934 /*
1935 * 2) Create a global vector from the specified swarm field.
1936 * This function is available in PETSc 3.14.4.
1937 * It provides a read/write "view" of the swarm field as a global Vec.
1938 */
1940 "Attempting to create global vector from field: %s\n",
1941 field_name);
1942 ierr = DMSwarmCreateGlobalVectorFromField(swarm, field_name, &fieldVec);CHKERRQ(ierr);
1943
1944 /*
1945 * 3) Use your existing WriteFieldData() to write the global vector to a file.
1946 * The field name, time index, and extension are passed along for naming.
1947 */
1949 "Calling WriteFieldData for field: %s\n",
1950 field_name);
1951 ierr = WriteFieldData(user, field_name, fieldVec, ti, ext);CHKERRQ(ierr);
1952
1953 /*
1954 * 4) Destroy the global vector once the data is successfully written.
1955 * This step is crucial for avoiding memory leaks.
1956 * DMSwarmDestroyGlobalVectorFromField() is also available in PETSc 3.14.4.
1957 */
1959 "Destroying the global vector for field: %s\n",
1960 field_name);
1961 ierr = DMSwarmDestroyGlobalVectorFromField(swarm, field_name, &fieldVec);CHKERRQ(ierr);
1962
1963 /* Log and return success. */
1965 "Successfully wrote swarm data for field: %s\n",
1966 field_name);
1967
1968 PetscFunctionReturn(0); /* PETSc macro indicating end of function */
1969}
Here is the call graph for this function:
Here is the caller graph for this function:

◆ WriteSwarmIntField()

PetscErrorCode WriteSwarmIntField ( UserCtx user,
const char *  field_name,
PetscInt  ti,
const char *  ext 
)

Writes integer data from a specific PETSc Swarm field to a file.

This function is designed for swarm fields that store integer data (e.g., DMSwarm_CellID), which cannot be converted to a standard PETSc Vec of PetscScalars. It accesses the raw data pointer for the field on each rank using DMSwarmGetField(), writes the local data to a rank-specific binary file, and then restores the field access.

Parameters
[in]userPointer to the UserCtx structure containing the PetscSwarm.
[in]field_nameName of the integer Swarm field to be written.
[in]tiTime index used to construct the output file name.
[in]extFile extension (e.g., "dat", "bin").
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Writes integer data from a specific PETSc Swarm field to a file.

This function provides a bridge to write integer-based swarm fields (like DMSwarm_CellID) using the existing Vec-based I/O routine (WriteFieldData). It works by:

  1. Creating a temporary parallel Vec with the same layout as other swarm fields.
  2. Accessing the local integer data from the swarm field.
  3. Populating the temporary Vec by casting each integer to a PetscScalar.
  4. Calling the standard WriteFieldData() function with the temporary Vec.
  5. Destroying the temporary Vec.
Parameters
[in]userPointer to the UserCtx structure.
[in]field_nameName of the integer Swarm field to be written.
[in]tiTime index for the output file.
[in]extFile extension.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Definition at line 1989 of file io.c.

1990{
1991 PetscErrorCode ierr;
1992 DM swarm = user->swarm;
1993 Vec temp_vec; // Temporary Vec to hold casted data
1994 PetscInt nlocal, nglobal,bs,i;
1995 void *field_array_void;
1996 PetscScalar *scalar_array; // Pointer to the temporary Vec's scalar data
1997
1998 PetscFunctionBeginUser;
1999
2000 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Casting '%s' to Vec for writing.\n", field_name);
2001
2002 // Get the swarm field properties
2003 ierr = DMSwarmGetLocalSize(swarm, &nlocal); CHKERRQ(ierr);
2004 ierr = DMSwarmGetSize(swarm, &nglobal); CHKERRQ(ierr);
2005 ierr = DMSwarmGetField(swarm, field_name, &bs, NULL, &field_array_void); CHKERRQ(ierr);
2006
2007 // Create Temporary parallel Vec wit the CORRECT layout
2008 ierr = VecCreate(PETSC_COMM_WORLD, &temp_vec); CHKERRQ(ierr);
2009 ierr = VecSetType(temp_vec, VECMPI); CHKERRQ(ierr);
2010 ierr = VecSetSizes(temp_vec, nlocal*bs, nglobal*bs); CHKERRQ(ierr);
2011 ierr = VecSetUp(temp_vec); CHKERRQ(ierr);
2012
2013 // Defining Vector field to mandatory field 'position'
2014 DMSwarmVectorDefineField(swarm,"position");
2015
2016 ierr = VecGetArray(temp_vec, &scalar_array); CHKERRQ(ierr);
2017
2018 if(strcasecmp(field_name,"DMSwarm_pid") == 0){
2019 PetscInt64 *int64_array = (PetscInt64 *)field_array_void;
2020 // Perform the cast from PetscInt64 to PetscScalar
2021 for (i = 0; i < nlocal*bs; i++) {
2022 scalar_array[i] = (PetscScalar)int64_array[i];
2023 }
2024 }else{
2025 PetscInt *int_array = (PetscInt *)field_array_void;
2026 //Perform the cast from PetscInt to PetscScalar
2027 for (i = 0; i < nlocal*bs; i++) {
2028 scalar_array[i] = (PetscScalar)int_array[i];
2029 }
2030 }
2031
2032 // Restore access to both arrays
2033 ierr = VecRestoreArray(temp_vec, &scalar_array); CHKERRQ(ierr);
2034 ierr = DMSwarmRestoreField(swarm, field_name, &bs, NULL, &field_array_void); CHKERRQ(ierr);
2035
2036 // Call your existing writer with the temporary, populated Vec
2037 ierr = WriteFieldData(user, field_name, temp_vec, ti, ext); CHKERRQ(ierr);
2038
2039 // Clean up
2040 ierr = VecDestroy(&temp_vec); CHKERRQ(ierr);
2041
2042 PetscFunctionReturn(0);
2043}
Here is the call graph for this function:
Here is the caller graph for this function:

◆ WriteAllSwarmFields()

PetscErrorCode WriteAllSwarmFields ( UserCtx user)

Writes a predefined set of PETSc Swarm fields to files.

This function iterates through a hardcoded list of common swarm fields (position, velocity, etc.) and calls the WriteSwarmField() helper function for each one. This provides a straightforward way to output essential particle data at a given simulation step.

This function will only execute if particles are enabled in the simulation (i.e., user->simCtx->np > 0 and user->swarm is not NULL).

Parameters
[in]userPointer to the UserCtx structure containing the simulation context and the PetscSwarm.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Definition at line 2061 of file io.c.

2062{
2063 PetscErrorCode ierr;
2064 SimCtx *simCtx = user->simCtx;
2065
2066 PetscFunctionBeginUser;
2067
2068 // If no swarm is configured or there are no particles, do nothing and return.
2069 if (!user->swarm || simCtx->np <= 0) {
2070 PetscFunctionReturn(0);
2071 }
2072
2073 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to write swarm fields.\n");
2074
2075 // Ensure the current IO directory is set
2076 ierr = PetscSNPrintf(simCtx->_io_context_buffer, sizeof(simCtx->_io_context_buffer),
2077 "%s/%s", simCtx->output_dir, simCtx->particle_subdir);CHKERRQ(ierr);
2078 simCtx->current_io_directory = simCtx->_io_context_buffer;
2079
2080 // Write particle position field
2081 ierr = WriteSwarmField(user, "position", simCtx->step, "dat"); CHKERRQ(ierr);
2082
2083 // Write particle velocity field
2084 ierr = WriteSwarmField(user, "velocity", simCtx->step, "dat"); CHKERRQ(ierr);
2085
2086 // Write particle weight field
2087 ierr = WriteSwarmField(user, "weight", simCtx->step, "dat"); CHKERRQ(ierr);
2088
2089 // Write custom particle field "Psi"
2090 ierr = WriteSwarmField(user, "Psi", simCtx->step, "dat"); CHKERRQ(ierr);
2091
2092 // Integer fields require special handling
2093
2094 // Write the background mesh cell ID for each particle
2095 ierr = WriteSwarmIntField(user, "DMSwarm_CellID", simCtx->step, "dat"); CHKERRQ(ierr);
2096
2097 // Write the particle location status (e.g., inside or outside the domain)
2098 ierr = WriteSwarmIntField(user, "DMSwarm_location_status", simCtx->step, "dat"); CHKERRQ(ierr);
2099
2100 // Write the unique particle ID
2101 ierr = WriteSwarmIntField(user, "DMSwarm_pid", simCtx->step, "dat"); CHKERRQ(ierr);
2102
2103 simCtx->current_io_directory = NULL;
2104
2105 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished writing swarm fields.\n");
2106
2107 PetscFunctionReturn(0);
2108}
PetscErrorCode WriteSwarmField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
Writes data from a specific field in a PETSc Swarm to a file.
Definition io.c:1920
PetscErrorCode WriteSwarmIntField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
Writes integer swarm data by casting it to a temporary Vec and using WriteFieldData.
Definition io.c:1989
char particle_subdir[PETSC_MAX_PATH_LEN]
Definition variables.h:608
Here is the call graph for this function:
Here is the caller graph for this function:

◆ ReadDataFileToArray()

PetscInt ReadDataFileToArray ( const char *  filename,
double **  data_out,
PetscInt *  Nout,
MPI_Comm  comm 
)

Definition at line 2643 of file io.c.

2647{
2648 /* STEP 0: Prepare local variables & log function entry */
2649 PetscMPIInt rank, size;
2650 PetscErrorCode ierr;
2651 FILE *fp = NULL;
2652 PetscInt N = 0; /* number of lines/values read on rank 0 */
2653 double *array = NULL; /* pointer to local array on each rank */
2654 PetscInt fileExistsFlag = 0; /* 0 = doesn't exist, 1 = does exist */
2655
2657 "Start reading from file: %s\n",
2658 filename);
2659
2660 /* Basic error checking: data_out, Nout must be non-null. */
2661 if (!filename || !data_out || !Nout) {
2663 "Null pointer argument provided.\n");
2664 return 1;
2665 }
2666
2667 /* Determine rank/size for coordinating I/O. */
2668 MPI_Comm_rank(comm, &rank);
2669 MPI_Comm_size(comm, &size);
2670
2671 /* STEP 1: On rank 0, check if file can be opened. */
2672 if (!rank) {
2673 fp = fopen(filename, "r");
2674 if (fp) {
2675 fileExistsFlag = 1;
2676 fclose(fp);
2677 }
2678 }
2679
2680 /* STEP 2: Broadcast file existence to all ranks. */
2681 // In ReadDataFileToArray:
2682 ierr = MPI_Bcast(&fileExistsFlag, 1, MPI_INT, 0, comm); CHKERRQ(ierr);
2683
2684 if (!fileExistsFlag) {
2685 /* If file does not exist, log & return. */
2686 if (!rank) {
2688 "File '%s' not found.\n",
2689 filename);
2690 }
2691 return 2;
2692 }
2693
2694 /* STEP 3: Rank 0 re-opens and reads the file, counting lines, etc. */
2695 if (!rank) {
2696 fp = fopen(filename, "r");
2697 if (!fp) {
2699 "File '%s' could not be opened for reading.\n",
2700 filename);
2701 return 3;
2702 }
2703
2704 /* (3a) Count lines first. */
2705 {
2706 char line[256];
2707 while (fgets(line, sizeof(line), fp)) {
2708 N++;
2709 }
2710 }
2711
2713 "File '%s' has %d lines.\n",
2714 filename, N);
2715
2716 /* (3b) Allocate array on rank 0. */
2717 array = (double*)malloc(N * sizeof(double));
2718 if (!array) {
2719 fclose(fp);
2721 "malloc failed for array.\n");
2722 return 4;
2723 }
2724
2725 /* (3c) Rewind & read values into array. */
2726 rewind(fp);
2727 {
2728 PetscInt i = 0;
2729 char line[256];
2730 while (fgets(line, sizeof(line), fp)) {
2731 double val;
2732 if (sscanf(line, "%lf", &val) == 1) {
2733 array[i++] = val;
2734 }
2735 }
2736 }
2737 fclose(fp);
2738
2740 "Successfully read %d values from '%s'.\n",
2741 N, filename);
2742 }
2743
2744 /* STEP 4: Broadcast the integer N to all ranks. */
2745 ierr = MPI_Bcast(&N, 1, MPI_INT, 0, comm); CHKERRQ(ierr);
2746
2747 /* STEP 5: Each rank allocates an array to receive the broadcast if rank>0. */
2748 if (rank) {
2749 array = (double*)malloc(N * sizeof(double));
2750 if (!array) {
2752 "malloc failed on rank %d.\n",
2753 rank);
2754 return 5;
2755 }
2756 }
2757
2758 /* STEP 6: Broadcast the actual data from rank 0 to all. */
2759 ierr = MPI_Bcast(array, N, MPI_DOUBLE, 0, comm); CHKERRQ(ierr);
2760
2761 /* STEP 7: Assign outputs on all ranks. */
2762 *data_out = array;
2763 *Nout = N;
2764
2766 "Done. Provided array of length=%d to all ranks.\n",
2767 N);
2768 return 0; /* success */
2769}

◆ CreateVTKFileFromMetadata()

PetscInt CreateVTKFileFromMetadata ( const char *  filename,
const VTKMetaData meta,
MPI_Comm  comm 
)

Definition at line 128 of file vtk_io.c.

129{
130 PetscMPIInt rank;
131 MPI_Comm_rank(comm, &rank);
132 PetscErrorCode ierr = 0;
133
134 if (!rank) {
135 LOG_ALLOW(GLOBAL, LOG_INFO, "Rank 0 writing combined VTK file '%s'.\n", filename);
136 FILE *fp = fopen(filename, "wb");
137 if (!fp) {
138 LOG_ALLOW(GLOBAL, LOG_ERROR, "fopen failed for %s.\n", filename);
139 return PETSC_ERR_FILE_OPEN;
140 }
141
142 PetscInt boffset = 0;
143
144 ierr = WriteVTKFileHeader(fp, meta, &boffset);
145 if(ierr) { fclose(fp); return ierr; }
146
147 if (meta->coords) {
148 ierr = WriteVTKAppendedBlock(fp, meta->coords, 3 * meta->npoints, sizeof(PetscScalar));
149 if(ierr) { fclose(fp); return ierr; }
150 }
151 /*
152 // ======== DEBUG: Dump first few values of Ucat_nodal if present ========
153 if (!rank) {
154 for (PetscInt i = 0; i < meta->num_point_data_fields; i++) {
155 const VTKFieldInfo* f = &meta->point_data_fields[i];
156 if (strcasecmp(f->name, "Ucat_nodal") == 0) {
157 const PetscScalar *a = f->data;
158 const PetscInt npts = meta->npoints;
159 const PetscInt nshow = (npts < 5) ? npts : 5;
160
161 LOG_ALLOW(GLOBAL, LOG_INFO,
162 "DBG (writer) Ucat_nodal: first %d of %" PetscInt_FMT " tuples:\n",
163 (int)nshow, npts);
164 for (PetscInt t = 0; t < nshow; ++t) {
165 LOG_ALLOW(GLOBAL, LOG_INFO,
166 " Ucat_nodal[%3" PetscInt_FMT "] = (%g, %g, %g)\n",
167 t, (double)a[3*t+0], (double)a[3*t+1], (double)a[3*t+2]);
168 }
169 }
170 }
171 }
172 */
173 // ======== END DEBUG ========
174
175
176 for (PetscInt i = 0; i < meta->num_point_data_fields; i++) {
177 const VTKFieldInfo* field = &meta->point_data_fields[i];
178 if (field->data) {
179 ierr = WriteVTKAppendedBlock(fp, field->data, field->num_components * meta->npoints, sizeof(PetscScalar));
180 if(ierr) { fclose(fp); return ierr; }
181 }
182 }
183 if (meta->fileType == VTK_POLYDATA) {
184 if (meta->connectivity) {
185 ierr = WriteVTKAppendedBlock(fp, meta->connectivity, meta->npoints, sizeof(PetscInt));
186 if(ierr) { fclose(fp); return ierr; }
187 }
188 if (meta->offsets) {
189 ierr = WriteVTKAppendedBlock(fp, meta->offsets, meta->npoints, sizeof(PetscInt));
190 if(ierr) { fclose(fp); return ierr; }
191 }
192 }
193 ierr = WriteVTKFileFooter(fp, meta);
194 if(ierr) { fclose(fp); return ierr; }
195
196 fclose(fp);
197 LOG_ALLOW(GLOBAL, LOG_INFO, "Rank 0 finished writing VTK file '%s'.\n", filename);
198 }
199 return 0;
200}
PetscInt npoints
Definition variables.h:546
PetscInt num_components
Definition variables.h:533
PetscInt num_point_data_fields
Definition variables.h:549
PetscInt * connectivity
Definition variables.h:550
PetscInt * offsets
Definition variables.h:551
VTKFileType fileType
Definition variables.h:544
PetscScalar * data
Definition variables.h:534
PetscScalar * coords
Definition variables.h:547
VTKFieldInfo point_data_fields[20]
Definition variables.h:548
@ VTK_POLYDATA
Definition variables.h:540
Stores all necessary information for a single data array in a VTK file.
Definition variables.h:531
static PetscErrorCode WriteVTKFileFooter(FILE *fp, const VTKMetaData *meta)
Definition vtk_io.c:116
static PetscErrorCode WriteVTKAppendedBlock(FILE *fp, const void *data, PetscInt num_elements, size_t element_size)
Definition vtk_io.c:17
static PetscErrorCode WriteVTKFileHeader(FILE *fp, const VTKMetaData *meta, PetscInt *boffset)
Definition vtk_io.c:106
Here is the call graph for this function:
Here is the caller graph for this function:

◆ VecToArrayOnRank0()

PetscErrorCode VecToArrayOnRank0 ( Vec  inVec,
PetscInt *  N,
double **  arrayOut 
)

Gathers the contents of a distributed PETSc Vec into a single array on rank 0.

Parameters
[in]inVecThe input (possibly distributed) Vec.
[out]NThe global size of the vector.
[out]arrayOutOn rank 0, points to the newly allocated array holding all data. On other ranks, it is set to NULL.
Returns
PetscErrorCode Return 0 on success, nonzero on failure.

Gathers the contents of a distributed PETSc Vec into a single array on rank 0.

If the Vec has a DMDA attached, the gather is performed in DMDA "natural" ordering.

Parameters
[in]inVecThe PETSc Vec (may be distributed).
[out]NGlobal length of the Vec (includes dof).
[out]arrayOutOn rank 0, newly allocated buffer with the gathered values (PetscScalar-sized). On other ranks, set to NULL.

Definition at line 2119 of file io.c.

2120{
2121 PetscErrorCode ierr;
2122 MPI_Comm comm;
2123 PetscMPIInt rank;
2124 PetscInt globalSize;
2125 DM dm = NULL;
2126 const char *dmtype = NULL;
2127
2128 /* For DMDA path */
2129 Vec nat = NULL, seqNat = NULL;
2130 VecScatter scatNat = NULL;
2131 const PetscScalar *nar = NULL;
2132 PetscScalar *buf = NULL;
2133
2134 /* For generic (no DM) path */
2135 Vec seq = NULL;
2136 VecScatter scat = NULL;
2137 const PetscScalar *sar = NULL;
2138
2139 PetscFunctionBeginUser;
2140
2141 ierr = PetscObjectGetComm((PetscObject)inVec, &comm); CHKERRQ(ierr);
2142 ierr = MPI_Comm_rank(comm, &rank); CHKERRQ(ierr);
2143 ierr = VecGetSize(inVec, &globalSize); CHKERRQ(ierr);
2144 *N = globalSize;
2145 *arrayOut = NULL;
2146
2147 ierr = VecGetDM(inVec, &dm); CHKERRQ(ierr);
2148 if (dm) { ierr = DMGetType(dm, &dmtype); CHKERRQ(ierr); }
2149
2150 if (dmtype && !strcmp(dmtype, DMDA)) {
2151 /* --- DMDA path: go to NATURAL ordering, then gather to rank 0 --- */
2152 ierr = DMDACreateNaturalVector(dm, &nat); CHKERRQ(ierr);
2153 ierr = DMDAGlobalToNaturalBegin(dm, inVec, INSERT_VALUES, nat); CHKERRQ(ierr);
2154 ierr = DMDAGlobalToNaturalEnd (dm, inVec, INSERT_VALUES, nat); CHKERRQ(ierr);
2155
2156 ierr = VecScatterCreateToZero(nat, &scatNat, &seqNat); CHKERRQ(ierr);
2157 ierr = VecScatterBegin(scatNat, nat, seqNat, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
2158 ierr = VecScatterEnd (scatNat, nat, seqNat, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
2159
2160 if (rank == 0) {
2161 PetscInt nseq;
2162 ierr = VecGetLocalSize(seqNat, &nseq); CHKERRQ(ierr);
2163 ierr = VecGetArrayRead(seqNat, &nar); CHKERRQ(ierr);
2164
2165 ierr = PetscMalloc1(nseq, &buf); CHKERRQ(ierr);
2166 ierr = PetscMemcpy(buf, nar, (size_t)nseq * sizeof(PetscScalar)); CHKERRQ(ierr);
2167
2168 ierr = VecRestoreArrayRead(seqNat, &nar); CHKERRQ(ierr);
2169 *arrayOut = (double*)buf; /* hand back as double* for drop-in compatibility */
2170 }
2171
2172 ierr = VecScatterDestroy(&scatNat); CHKERRQ(ierr);
2173 ierr = VecDestroy(&seqNat); CHKERRQ(ierr);
2174 ierr = VecDestroy(&nat); CHKERRQ(ierr);
2175 } else {
2176 /* --- No DM attached: plain gather in Vec’s global (parallel) layout order --- */
2177 ierr = VecScatterCreateToZero(inVec, &scat, &seq); CHKERRQ(ierr);
2178 ierr = VecScatterBegin(scat, inVec, seq, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
2179 ierr = VecScatterEnd (scat, inVec, seq, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
2180
2181 if (rank == 0) {
2182 PetscInt nseq;
2183 ierr = VecGetLocalSize(seq, &nseq); CHKERRQ(ierr);
2184 ierr = VecGetArrayRead(seq, &sar); CHKERRQ(ierr);
2185
2186 ierr = PetscMalloc1(nseq, &buf); CHKERRQ(ierr);
2187 ierr = PetscMemcpy(buf, sar, (size_t)nseq * sizeof(PetscScalar)); CHKERRQ(ierr);
2188
2189 ierr = VecRestoreArrayRead(seq, &sar); CHKERRQ(ierr);
2190 *arrayOut = (double*)buf;
2191 }
2192
2193 ierr = VecScatterDestroy(&scat); CHKERRQ(ierr);
2194 ierr = VecDestroy(&seq); CHKERRQ(ierr);
2195 }
2196
2197 PetscFunctionReturn(0);
2198}
Here is the caller graph for this function:

◆ SwarmFieldToArrayOnRank0()

PetscErrorCode SwarmFieldToArrayOnRank0 ( DM  swarm,
const char *  field_name,
PetscInt *  n_total_particles,
PetscInt *  n_components,
void **  gathered_array 
)

Gathers any DMSwarm field from all ranks to a single, contiguous array on rank 0.

This is a generic, type-aware version of SwarmFieldToArrayOnRank0. It is a COLLECTIVE operation.

Parameters
[in]swarmThe DMSwarm to gather from.
[in]field_nameThe name of the field to gather.
[out]n_total_particles(Output on rank 0) Total number of particles in the global swarm.
[out]n_components(Output on rank 0) Number of components for the field.
[out]gathered_array(Output on rank 0) A newly allocated array containing the full, gathered data. The caller is responsible for freeing this memory and for casting it to the correct type.
Returns
PetscErrorCode

Definition at line 2214 of file io.c.

2215{
2216 PetscErrorCode ierr;
2217 PetscMPIInt rank, size;
2218 PetscInt nlocal, nglobal, bs;
2219 void *local_array_void;
2220 size_t element_size = 0;
2221 MPI_Datatype mpi_type = MPI_BYTE; // We'll send raw bytes
2222
2223 PetscFunctionBeginUser;
2224
2225 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
2226 ierr = MPI_Comm_size(PETSC_COMM_WORLD, &size); CHKERRQ(ierr);
2227
2228 // All ranks get swarm properties to determine send/receive counts
2229 ierr = DMSwarmGetLocalSize(swarm, &nlocal); CHKERRQ(ierr);
2230 ierr = DMSwarmGetSize(swarm, &nglobal); CHKERRQ(ierr);
2231 ierr = DMSwarmGetField(swarm, field_name, &bs, NULL, &local_array_void); CHKERRQ(ierr);
2232
2233 // Determine the size of one element of the field's data type
2234 if (strcasecmp(field_name, "DMSwarm_pid") == 0) {
2235 element_size = sizeof(PetscInt64);
2236 } else if (strcasecmp(field_name, "DMSwarm_CellID") == 0 || strcasecmp(field_name, "DMSwarm_location_status") == 0) {
2237 element_size = sizeof(PetscInt);
2238 } else {
2239 element_size = sizeof(PetscScalar);
2240 }
2241
2242 if (rank == 0) {
2243 *n_total_particles = nglobal;
2244 *n_components = bs;
2245 *gathered_array = NULL; // Initialize output
2246 }
2247
2248 if (size == 1) { // Serial case is a simple copy
2249 if (rank == 0) {
2250 ierr = PetscMalloc(nglobal * bs * element_size, gathered_array); CHKERRQ(ierr);
2251 ierr = PetscMemcpy(*gathered_array, local_array_void, nglobal * bs * element_size); CHKERRQ(ierr);
2252 }
2253 } else { // Parallel case: use MPI_Gatherv
2254 PetscInt *recvcounts = NULL, *displs = NULL;
2255 if (rank == 0) {
2256 ierr = PetscMalloc1(size, &recvcounts); CHKERRQ(ierr);
2257 ierr = PetscMalloc1(size, &displs); CHKERRQ(ierr);
2258 }
2259 PetscInt sendcount = nlocal * bs;
2260
2261 // Gather the number of elements (not bytes) from each rank
2262 ierr = MPI_Gather(&sendcount, 1, MPIU_INT, recvcounts, 1, MPIU_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
2263
2264 if (rank == 0) {
2265 displs[0] = 0;
2266 // Convert counts and calculate displacements in terms of BYTES
2267 for (PetscMPIInt i = 0; i < size; i++) recvcounts[i] *= element_size;
2268 for (PetscMPIInt i = 1; i < size; i++) displs[i] = displs[i-1] + recvcounts[i-1];
2269
2270 ierr = PetscMalloc(nglobal * bs * element_size, gathered_array); CHKERRQ(ierr);
2271 }
2272
2273 // Use Gatherv with MPI_BYTE to handle any data type generically
2274 ierr = MPI_Gatherv(local_array_void, nlocal * bs * element_size, MPI_BYTE,
2275 *gathered_array, recvcounts, displs, MPI_BYTE,
2276 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
2277
2278 if (rank == 0) {
2279 ierr = PetscFree(recvcounts); CHKERRQ(ierr);
2280 ierr = PetscFree(displs); CHKERRQ(ierr);
2281 }
2282 }
2283
2284 ierr = DMSwarmRestoreField(swarm, field_name, &bs, NULL, &local_array_void); CHKERRQ(ierr);
2285
2286 PetscFunctionReturn(0);
2287}
Here is the caller graph for this function:

◆ ReadSwarmField()

PetscErrorCode ReadSwarmField ( UserCtx user,
const char *  field_name,
PetscInt  ti,
const char *  ext 
)

Reads data from a file into a specified field of a PETSc DMSwarm.

This function is the counterpart to WriteSwarmField(). It creates a global PETSc vector that references the specified DMSwarm field, uses ReadFieldData() to read the data from a file, and then destroys the global vector reference.

Parameters
[in]userPointer to the UserCtx structure (containing user->swarm).
[in]field_nameName of the DMSwarm field to read into (must be previously declared/allocated).
[in]tiTime index used to construct the input file name.
[in]extFile extension (e.g., "dat" or "bin").
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.
Note
Compatible with PETSc 3.14.x.

Definition at line 1411 of file io.c.

1412{
1413 PetscErrorCode ierr;
1414 DM swarm;
1415 Vec fieldVec;
1416
1417 PetscFunctionBegin;
1418
1419 swarm = user->swarm;
1420
1421 LOG_ALLOW(GLOBAL,LOG_DEBUG," ReadSwarmField Begins \n");
1422
1423 /* 2) Create a global vector that references the specified Swarm field. */
1424 ierr = DMSwarmCreateGlobalVectorFromField(swarm, field_name, &fieldVec);CHKERRQ(ierr);
1425
1426 LOG_ALLOW(GLOBAL,LOG_DEBUG," Vector created from Field \n");
1427
1428 /* 3) Use the ReadFieldData() function to read data into fieldVec. */
1429 ierr = ReadFieldData(user, field_name, fieldVec, ti, ext);CHKERRQ(ierr);
1430
1431 /* 4) Destroy the global vector reference. */
1432 ierr = DMSwarmDestroyGlobalVectorFromField(swarm, field_name, &fieldVec);CHKERRQ(ierr);
1433
1434 PetscFunctionReturn(0);
1435}
Here is the call graph for this function:
Here is the caller graph for this function:

◆ ReadSwarmIntField()

PetscErrorCode ReadSwarmIntField ( UserCtx user,
const char *  field_name,
PetscInt  ti,
const char *  ext 
)

Reads integer swarm data by using ReadFieldData and casting the result.

This function is the counterpart to WriteSwarmIntField. It reads a file containing floating-point data (that was originally integer) into a temporary Vec and then casts it back to the integer swarm field. It works by:

  1. Creating a temporary parallel Vec.
  2. Calling the standard ReadFieldData() to populate this Vec.
  3. Accessing the local data of both the Vec and the swarm field.
  4. Populating the swarm's integer field by casting each PetscScalar back to a PetscInt.
  5. Destroying the temporary Vec.
Parameters
[in]userPointer to the UserCtx structure.
[in]field_nameName of the integer Swarm field to be read.
[in]tiTime index for the input file.
[in]extFile extension.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Definition at line 1456 of file io.c.

1457{
1458 PetscErrorCode ierr;
1459 DM swarm = user->swarm;
1460 Vec temp_vec;
1461 PetscInt nlocal, nglobal, bs, i;
1462 const PetscScalar *scalar_array; // Read-only pointer from the temp Vec
1463 void *field_array_void;
1464
1465
1466 PetscFunctionBeginUser;
1467
1468 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Reading '%s' via temporary Vec.\n", field_name);
1469
1470 // Get the properties of the swarm field to determine the expected layout
1471 ierr = DMSwarmGetLocalSize(swarm, &nlocal); CHKERRQ(ierr);
1472 ierr = DMSwarmGetSize(swarm, &nglobal); CHKERRQ(ierr);
1473 // We get the block size but not the data pointer yet
1474 ierr = DMSwarmGetField(swarm, field_name, &bs, NULL, NULL); CHKERRQ(ierr);
1475 ierr = DMSwarmRestoreField(swarm, field_name, &bs, NULL, NULL); CHKERRQ(ierr);
1476
1477 // Create a temporary Vec with the CORRECT layout to receive the data
1478 ierr = VecCreate(PETSC_COMM_WORLD, &temp_vec); CHKERRQ(ierr);
1479 ierr = VecSetType(temp_vec, VECMPI); CHKERRQ(ierr);
1480 ierr = VecSetSizes(temp_vec, nlocal * bs, nglobal * bs); CHKERRQ(ierr);
1481 ierr = VecSetBlockSize(temp_vec, bs); CHKERRQ(ierr);
1482 ierr = VecSetUp(temp_vec); CHKERRQ(ierr);
1483
1484 // Call your existing reader to populate the temporary Vec
1485 ierr = ReadFieldData(user, field_name, temp_vec, ti, ext); CHKERRQ(ierr);
1486
1487 // Get local pointers
1488 ierr = VecGetArrayRead(temp_vec, &scalar_array); CHKERRQ(ierr);
1489 ierr = DMSwarmGetField(swarm, field_name, NULL, NULL, &field_array_void); CHKERRQ(ierr);
1490
1491 // Perform the cast back, using the correct loop size (nlocal * bs)
1492 if (strcmp(field_name, "DMSwarm_pid") == 0) {
1493 PetscInt64 *int64_array = (PetscInt64 *)field_array_void;
1494 for (i = 0; i < nlocal * bs; i++) {
1495 int64_array[i] = (PetscInt64)scalar_array[i];
1496 }
1497 } else {
1498 PetscInt *int_array = (PetscInt *)field_array_void;
1499 for (i = 0; i < nlocal * bs; i++) {
1500 int_array[i] = (PetscInt)scalar_array[i];
1501 }
1502 }
1503
1504 // Restore access
1505 ierr = DMSwarmRestoreField(swarm, field_name, NULL, NULL, &field_array_void); CHKERRQ(ierr);
1506 ierr = VecRestoreArrayRead(temp_vec, &scalar_array); CHKERRQ(ierr);
1507
1508 // 6. Clean up
1509 ierr = VecDestroy(&temp_vec); CHKERRQ(ierr);
1510
1511 PetscFunctionReturn(0);
1512}
Here is the call graph for this function:
Here is the caller graph for this function:

◆ ReadAllSwarmFields()

PetscErrorCode ReadAllSwarmFields ( UserCtx user,
PetscInt  ti 
)

Reads multiple fields (positions, velocity, CellID, and weight) into a DMSwarm.

This function is analogous to ReadSimulationFields() but targets a DMSwarm. Each Swarm field is read from a separate file using ReadSwarmField().

Parameters
[in,out]userPointer to the UserCtx structure containing the DMSwarm (user->swarm).
[in]tiTime index for constructing the file name.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Reads multiple fields (positions, velocity, CellID, and weight) into a DMSwarm.

This function reads all necessary and optional fields for a DMSwarm for a given timestep. It assumes the swarm has already been resized to match the particle count in the input files.

The 'position' field is considered MANDATORY. If its file is missing or corrupt, the function will return a fatal error.

All other fields (velocity, CellID, weight, etc.) are OPTIONAL. If their corresponding files are not found, a warning is logged, and the function continues without error. If an optional file IS found but is corrupt or has a size mismatch, it is treated as a fatal error.

Parameters
[in,out]userPointer to the UserCtx structure containing the DMSwarm (user->swarm).
[in]tiTime index for constructing the file names.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Definition at line 1534 of file io.c.

1535{
1536 PetscErrorCode ierr;
1537 PetscInt nGlobal;
1538 SimCtx *simCtx = user->simCtx;
1539 const char *source_path = NULL;
1540
1541 PetscFunctionBeginUser;
1542 ierr = DMSwarmGetSize(user->swarm, &nGlobal); CHKERRQ(ierr);
1543 LOG_ALLOW(GLOBAL, LOG_INFO, "Reading DMSwarm fields for timestep %d (swarm size is %d).\n", ti, nGlobal);
1544
1545 if (nGlobal == 0) {
1546 LOG_ALLOW(GLOBAL, LOG_INFO, "Swarm is empty for timestep %d. Nothing to read.\n", ti);
1547 PetscFunctionReturn(0);
1548 }
1549
1550 // First, determine the top-level source directory based on the execution mode.
1551 if (simCtx->exec_mode == EXEC_MODE_SOLVER) {
1552 source_path = simCtx->restart_dir;
1553 } else if (simCtx->exec_mode == EXEC_MODE_POSTPROCESSOR) {
1554 source_path = simCtx->pps->source_dir;
1555 } else {
1556 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONGSTATE, "Invalid execution mode for reading simulation fields.");
1557 }
1558
1559 // Set the current I/O directory context
1560 ierr = PetscSNPrintf(simCtx->_io_context_buffer, sizeof(simCtx->_io_context_buffer),
1561 "%s/%s", source_path, simCtx->particle_subdir); CHKERRQ(ierr);
1562
1563 simCtx->current_io_directory = simCtx->_io_context_buffer;
1564
1565 /* 1) Read positions (REQUIRED) */
1566 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Reading mandatory position field...\n");
1567 ierr = ReadSwarmField(user, "position", ti, "dat");
1568 if (ierr) {
1569 SETERRQ(PETSC_COMM_WORLD, ierr, "Failed to read MANDATORY 'position' field for step %d. Cannot continue.", ti);
1570 }
1571 LOG_ALLOW(GLOBAL, LOG_INFO, "Successfully read mandatory position field for step %d.\n", ti);
1572
1573 /* 2) Read all OPTIONAL fields using the helper function. */
1574 /* The helper will print a warning and continue if a file is not found. */
1575 ierr = ReadOptionalSwarmField(user, "velocity", "Velocity", ti, "dat"); CHKERRQ(ierr);
1576 ierr = ReadOptionalSwarmField(user, "DMSwarm_pid", "Particle ID", ti, "dat"); CHKERRQ(ierr);
1577 ierr = ReadOptionalSwarmField(user, "DMSwarm_CellID", "Cell ID", ti, "dat"); CHKERRQ(ierr);
1578 ierr = ReadOptionalSwarmField(user, "weight", "Particle Weight", ti, "dat"); CHKERRQ(ierr);
1579 ierr = ReadOptionalSwarmField(user, "Psi", "Scalar Psi", ti, "dat"); CHKERRQ(ierr);
1580 ierr = ReadOptionalSwarmField(user, "DMSwarm_location_status", "Migration Status", ti, "dat"); CHKERRQ(ierr);
1581
1582 simCtx->current_io_directory = NULL; // Clear the I/O context after reading
1583
1584 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading DMSwarm fields for timestep %d.\n", ti);
1585 PetscFunctionReturn(0);
1586}
PetscErrorCode ReadSwarmField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
Reads data from a file into a specified field of a PETSc DMSwarm.
Definition io.c:1411
static PetscErrorCode ReadOptionalSwarmField(UserCtx *user, const char *field_name, const char *field_label, PetscInt ti, const char *ext)
Checks for and reads an optional DMSwarm field from a file.
Definition io.c:925
Here is the call graph for this function:
Here is the caller graph for this function:

◆ ReadPositionsFromFile()

PetscErrorCode ReadPositionsFromFile ( PetscInt  timeIndex,
UserCtx user,
double **  coordsArray,
PetscInt *  Ncoords 
)

Reads coordinate data (for particles) from file into a PETSc Vec, then gathers it to rank 0.

This function uses ReadFieldData to fill a PETSc Vec with coordinate data, then leverages VecToArrayOnRank0 to gather that data into a contiguous array (valid on rank 0 only).

Parameters
[in]timeIndexThe time index used to construct file names.
[in]userPointer to the user context.
[out]coordsArrayOn rank 0, will point to a newly allocated array holding the coordinates.
[out]NcoordsOn rank 0, the length of coordsArray. On other ranks, 0.
Returns
PetscErrorCode Returns 0 on success, or non-zero on failures.

Definition at line 2785 of file io.c.

2789{
2790 PetscFunctionBeginUser;
2791
2792 PetscErrorCode ierr;
2793 Vec coordsVec;
2794
2795 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Creating coords Vec.\n");
2796 ierr = VecCreate(PETSC_COMM_WORLD, &coordsVec);CHKERRQ(ierr);
2797 ierr = VecSetFromOptions(coordsVec);CHKERRQ(ierr);
2798
2799 // For example: "position" is the name of the coordinate data
2800 ierr = ReadFieldData(user, "position", coordsVec, timeIndex, "dat");
2801 if (ierr) {
2803 "Error reading position data (ti=%d).\n",
2804 timeIndex);
2805 PetscFunctionReturn(ierr);
2806 }
2807
2808 LOG_ALLOW(GLOBAL, LOG_DEBUG, "ReadPositions - Gathering coords Vec to rank 0.\n");
2809 ierr = VecToArrayOnRank0(coordsVec, Ncoords, coordsArray);CHKERRQ(ierr);
2810
2811 ierr = VecDestroy(&coordsVec);CHKERRQ(ierr);
2812
2814 "Successfully gathered coordinates. Ncoords=%d.\n", *Ncoords);
2815 PetscFunctionReturn(0);
2816}
PetscErrorCode VecToArrayOnRank0(Vec inVec, PetscInt *N, double **arrayOut)
Gather a (possibly distributed) PETSc Vec onto rank 0 as a contiguous C array.
Definition io.c:2119
Here is the call graph for this function:

◆ ReadFieldDataToRank0()

PetscErrorCode ReadFieldDataToRank0 ( PetscInt  timeIndex,
const char *  fieldName,
UserCtx user,
double **  scalarArray,
PetscInt *  Nscalars 
)

Reads a named field from file into a PETSc Vec, then gathers it to rank 0.

This function wraps ReadFieldData and VecToArrayOnRank0 into a single step. The gathered data is stored in scalarArray on rank 0, with its length in Nscalars.

Parameters
[in]timeIndexThe time index used to construct file names.
[in]fieldNameName of the field to be read (e.g., "velocity").
[in]userPointer to the user context.
[out]scalarArrayOn rank 0, a newly allocated array holding the field data.
[out]NscalarsOn rank 0, length of scalarArray. On other ranks, 0.
Returns
PetscErrorCode Returns 0 on success, or non-zero on failures.

Definition at line 2833 of file io.c.

2838{
2839 PetscFunctionBeginUser;
2840
2841 PetscErrorCode ierr;
2842 Vec fieldVec;
2843
2844 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Creating field Vec.\n");
2845 ierr = VecCreate(PETSC_COMM_WORLD, &fieldVec);CHKERRQ(ierr);
2846 ierr = VecSetFromOptions(fieldVec);CHKERRQ(ierr);
2847
2848 ierr = ReadFieldData(user, fieldName, fieldVec, timeIndex, "dat");
2849 if (ierr) {
2851 "Error reading field '%s' (ti=%d).\n",
2852 fieldName, timeIndex);
2853 PetscFunctionReturn(ierr);
2854 }
2855
2856 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Gathering field Vec to rank 0.\n");
2857 ierr = VecToArrayOnRank0(fieldVec, Nscalars, scalarArray);CHKERRQ(ierr);
2858
2859 ierr = VecDestroy(&fieldVec);CHKERRQ(ierr);
2860
2862 "Successfully gathered field '%s'. Nscalars=%d.\n",
2863 fieldName, *Nscalars);
2864 PetscFunctionReturn(0);
2865}
Here is the call graph for this function:

◆ DisplayBanner()

PetscErrorCode DisplayBanner ( SimCtx simCtx)

Displays a structured banner summarizing the simulation configuration.

This function prints key simulation parameters to standard output. It is intended to be called ONLY by MPI rank 0. It retrieves global domain bounds from user->global_domain_bbox and boundary conditions for all faces from user->face_bc_types.

Parameters
[in]userPointer to UserCtx structure.
[in]bboxlist(If rank 0 needed to compute global_domain_bbox here, otherwise NULL)
Returns
PetscErrorCode Returns 0 on success.

This function prints key simulation parameters to standard output. It is intended to be called ONLY by MPI rank 0. It retrieves global domain bounds from user->global_domain_bbox and boundary conditions for all faces from user->face_bc_types.

Parameters
[in]userPointer to UserCtx structure.
[in]StartTimeInitial simulation time.
[in]StartStepStarting timestep index.
[in]StepsToRunTotal number of timesteps to run.
[in]num_mpi_procsTotal number of MPI processes.
[in]total_num_particlesTotal number of particles.
[in]bboxlist(If rank 0 needed to compute global_domain_bbox here, otherwise NULL)
Returns
PetscErrorCode Returns 0 on success.

Definition at line 2307 of file io.c.

2308{
2309 PetscErrorCode ierr;
2310 PetscMPIInt rank;
2311 Cmpnts global_min_coords, global_max_coords;
2312 PetscReal StartTime;
2313 PetscInt StartStep,StepsToRun,total_num_particles;
2314 PetscMPIInt num_mpi_procs;
2315
2316 // SimCtx *simCtx = user->simCtx;
2317 UserCtx *user = simCtx->usermg.mgctx[simCtx->usermg.mglevels - 1].user;
2318 num_mpi_procs = simCtx->size;
2319 StartTime = simCtx->StartTime;
2320 StartStep = simCtx->StartStep;
2321 StepsToRun = simCtx->StepsToRun;
2322 total_num_particles = simCtx->np;
2323 BoundingBox *bboxlist_on_rank0 = simCtx->bboxlist;
2324
2325
2326 PetscFunctionBeginUser;
2327
2328 if (!user) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "DisplayBanner - UserCtx pointer is NULL.");
2329 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
2330
2331 if (rank == 0) {
2332 // If global_domain_bbox is not pre-populated in UserCtx, compute it here from bboxlist_on_rank0
2333 // This assumes bboxlist_on_rank0 is valid and contains all local bounding boxes on rank 0.
2334 if (bboxlist_on_rank0 && num_mpi_procs > 0) {
2335 global_min_coords = bboxlist_on_rank0[0].min_coords;
2336 global_max_coords = bboxlist_on_rank0[0].max_coords;
2337 for (PetscMPIInt p = 1; p < num_mpi_procs; ++p) {
2338 global_min_coords.x = PetscMin(global_min_coords.x, bboxlist_on_rank0[p].min_coords.x);
2339 global_min_coords.y = PetscMin(global_min_coords.y, bboxlist_on_rank0[p].min_coords.y);
2340 global_min_coords.z = PetscMin(global_min_coords.z, bboxlist_on_rank0[p].min_coords.z);
2341 global_max_coords.x = PetscMax(global_max_coords.x, bboxlist_on_rank0[p].max_coords.x);
2342 global_max_coords.y = PetscMax(global_max_coords.y, bboxlist_on_rank0[p].max_coords.y);
2343 global_max_coords.z = PetscMax(global_max_coords.z, bboxlist_on_rank0[p].max_coords.z);
2344 }
2345 // Optionally store this in user->global_domain_bbox if it's useful elsewhere
2346 // user->global_domain_bbox.min_coords = global_min_coords;
2347 // user->global_domain_bbox.max_coords = global_max_coords;
2348 } else {
2349 // Fallback or warning if bboxlist is not available for global calculation
2350 LOG_ALLOW(LOCAL, LOG_WARNING, "(Rank 0) - bboxlist not provided or num_mpi_procs <=0; using user->bbox for domain bounds.\n");
2351 // global_min_coords = user->bbox.min_coords; // Use local bbox of rank 0 as fallback
2352 // global_max_coords = user->bbox.max_coords;
2353 }
2354
2355 ierr = PetscPrintf(PETSC_COMM_SELF, "\n"); CHKERRQ(ierr);
2356 ierr = PetscPrintf(PETSC_COMM_SELF, "=============================================================\n"); CHKERRQ(ierr);
2357 ierr = PetscPrintf(PETSC_COMM_SELF, " CASE SUMMARY \n"); CHKERRQ(ierr);
2358 ierr = PetscPrintf(PETSC_COMM_SELF, "=============================================================\n"); CHKERRQ(ierr);
2359 ierr = PetscPrintf(PETSC_COMM_SELF, " Grid Points : %d X %d X %d\n", user->IM, user->JM, user->KM); CHKERRQ(ierr);
2360 ierr = PetscPrintf(PETSC_COMM_SELF, " Cells : %d X %d X %d\n", user->IM - 1, user->JM - 1, user->KM - 1); CHKERRQ(ierr);
2361 ierr = PetscPrintf(PETSC_COMM_SELF, " Global Domain Bounds (X) : %.6f to %.6f\n", (double)global_min_coords.x, (double)global_max_coords.x); CHKERRQ(ierr);
2362 ierr = PetscPrintf(PETSC_COMM_SELF, " Global Domain Bounds (Y) : %.6f to %.6f\n", (double)global_min_coords.y, (double)global_max_coords.y); CHKERRQ(ierr);
2363 ierr = PetscPrintf(PETSC_COMM_SELF, " Global Domain Bounds (Z) : %.6f to %.6f\n", (double)global_min_coords.z, (double)global_max_coords.z); CHKERRQ(ierr);
2364 if(strcmp(simCtx->eulerianSource,"load")==0 || strcmp(simCtx->eulerianSource,"solve")==0){
2365 ierr = PetscPrintf(PETSC_COMM_SELF, "-------------------- Boundary Conditions --------------------\n"); CHKERRQ(ierr);
2366 const int face_name_width = 17; // Adjusted for longer names (Zeta,Eta,Xi)
2367 for (PetscInt i_face = 0; i_face < 6; ++i_face) {
2368 BCFace current_face = (BCFace)i_face;
2369 // The BCFaceToString will now return the Xi, Eta, Zeta versions
2370 const char* face_str = BCFaceToString(current_face);
2371 const char* bc_type_str = BCTypeToString(user->boundary_faces[current_face].mathematical_type);
2372 const char* bc_handler_type_str = BCHandlerTypeToString(user->boundary_faces[current_face].handler_type);
2373 if(user->boundary_faces[current_face].mathematical_type == INLET){
2375 Cmpnts inlet_velocity = {0.0,0.0,0.0};
2376 PetscBool found;
2377 ierr = GetBCParamReal(user->boundary_faces[current_face].params,"vx",&inlet_velocity.x,&found); CHKERRQ(ierr);
2378 ierr = GetBCParamReal(user->boundary_faces[current_face].params,"vy",&inlet_velocity.y,&found); CHKERRQ(ierr);
2379 ierr = GetBCParamReal(user->boundary_faces[current_face].params,"vz",&inlet_velocity.z,&found); CHKERRQ(ierr);
2380 ierr = PetscPrintf(PETSC_COMM_SELF, " Face %-*s : %s - %s - [%.4f,%.4f,%.4f]\n",
2381 face_name_width, face_str, bc_type_str, bc_handler_type_str,inlet_velocity.x,inlet_velocity.y,inlet_velocity.z); CHKERRQ(ierr);
2382 } else if(user->boundary_faces[current_face].handler_type == BC_HANDLER_INLET_PARABOLIC){
2383 PetscReal v_max = 0.0;
2384 PetscBool found;
2385 ierr = GetBCParamReal(user->boundary_faces[current_face].params,"v_max",&v_max,&found); CHKERRQ(ierr);
2386 ierr = PetscPrintf(PETSC_COMM_SELF, " Face %-*s : %s - %s - v_max=%.4f\n",
2387 face_name_width, face_str, bc_type_str, bc_handler_type_str, v_max); CHKERRQ(ierr);
2388 }
2390 PetscReal flux;
2391 PetscBool trimflag,foundflux,foundtrimflag;
2392 ierr = GetBCParamReal(user->boundary_faces[current_face].params,"target_flux",&flux,&foundflux); CHKERRQ(ierr);
2393 ierr = GetBCParamBool(user->boundary_faces[current_face].params,"apply_trim",&trimflag,&foundtrimflag); CHKERRQ(ierr);
2394 ierr = PetscPrintf(PETSC_COMM_SELF, " Face %-*s : %s - %s - [%.4f] - %s\n",
2395 face_name_width, face_str, bc_type_str, bc_handler_type_str,flux,trimflag?"Trim Ucont":"No Trim"); CHKERRQ(ierr);
2396 } else{
2397 ierr = PetscPrintf(PETSC_COMM_SELF, " Face %-*s : %s - %s\n",
2398 face_name_width, face_str, bc_type_str,bc_handler_type_str); CHKERRQ(ierr);
2399 }
2400 }
2401 }
2402 ierr = PetscPrintf(PETSC_COMM_SELF, "-------------------------------------------------------------\n"); CHKERRQ(ierr);
2403 ierr = PetscPrintf(PETSC_COMM_SELF, " Start Time : %.4f\n", (double)StartTime); CHKERRQ(ierr);
2404 ierr = PetscPrintf(PETSC_COMM_SELF, " Timestep Size : %.4f\n", (double)simCtx->dt); CHKERRQ(ierr);
2405 ierr = PetscPrintf(PETSC_COMM_SELF, " Starting Step : %d\n", StartStep); CHKERRQ(ierr);
2406 ierr = PetscPrintf(PETSC_COMM_SELF, " Total Steps to Run : %d\n", StepsToRun); CHKERRQ(ierr);
2407 ierr = PetscPrintf(PETSC_COMM_SELF, " Number of MPI Processes : %d\n", num_mpi_procs); CHKERRQ(ierr);
2408 ierr = PetscPrintf(PETSC_COMM_WORLD," Number of Particles : %d\n", total_num_particles); CHKERRQ(ierr);
2409 if(strcmp(simCtx->eulerianSource,"solve")==0 || strcmp(simCtx->eulerianSource,"load")==0){
2410 const char* field_init_str = FieldInitializationToString(simCtx->FieldInitialization);
2411 const char* particle_init_str = ParticleInitializationToString(simCtx->ParticleInitialization);
2412 ierr = PetscPrintf(PETSC_COMM_WORLD," Reynolds Number : %le\n", simCtx->ren); CHKERRQ(ierr);
2413 //ierr = PetscPrintf(PETSC_COMM_WORLD," Von-Neumann Number : %le\n", simCtx->vnn); CHKERRQ(ierr);
2414 ierr = PetscPrintf(PETSC_COMM_SELF, " Particle Initialization Mode: %s\n", particle_init_str); CHKERRQ(ierr);
2415 if(strcmp(simCtx->eulerianSource,"solve")==0){
2416 //ierr = PetscPrintf(PETSC_COMM_WORLD," Stanton Number : %le\n", simCtx->st); CHKERRQ(ierr);
2417 ierr = PetscPrintf(PETSC_COMM_WORLD," Momentum Equation Solver : %s\n", MomentumSolverTypeToString(simCtx->mom_solver_type)); CHKERRQ(ierr);
2418 ierr = PetscPrintf(PETSC_COMM_WORLD," Initial Pseudo-CFL : %le\n", simCtx->pseudo_cfl); CHKERRQ(ierr);
2419 ierr = PetscPrintf(PETSC_COMM_WORLD," Large Eddy Simulation Model : %s\n", LESModelToString(simCtx->les)); CHKERRQ(ierr);
2420 }
2423 if (user->inletFaceDefined) {
2424 ierr = PetscPrintf(PETSC_COMM_SELF, " Particles Initialized At : %s (Enum Val: %d)\n", BCFaceToString(user->identifiedInletBCFace), user->identifiedInletBCFace); CHKERRQ(ierr);
2425 } else {
2426 ierr = PetscPrintf(PETSC_COMM_SELF, " Particles Initialized At : --- (No INLET face identified)\n"); CHKERRQ(ierr);
2427 }
2428 }
2429
2430 ierr = PetscPrintf(PETSC_COMM_SELF, " Field Initialization Mode : %s\n", field_init_str); CHKERRQ(ierr);
2431 if (simCtx->FieldInitialization == 1) {
2432 ierr = PetscPrintf(PETSC_COMM_SELF, " Constant Velocity : x - %.4f, y - %.4f, z - %.4f \n", (double)simCtx->InitialConstantContra.x,(double)simCtx->InitialConstantContra.y,(double)simCtx->InitialConstantContra.z ); CHKERRQ(ierr);
2433 }
2434 } else if(strcmp(simCtx->eulerianSource,"analytical")==0){
2435 ierr = PetscPrintf(PETSC_COMM_WORLD," Analytical Solution Type : %s\n", simCtx->AnalyticalSolutionType); CHKERRQ(ierr);
2436 }
2437 ierr = PetscPrintf(PETSC_COMM_SELF, "=============================================================\n"); CHKERRQ(ierr);
2438 ierr = PetscPrintf(PETSC_COMM_SELF, "\n"); CHKERRQ(ierr);
2439 }
2440 PetscFunctionReturn(0);
2441}
PetscErrorCode GetBCParamReal(BC_Param *params, const char *key, PetscReal *value_out, PetscBool *found)
Searches a BC_Param linked list for a key and returns its value as a double.
Definition io.c:360
PetscErrorCode GetBCParamBool(BC_Param *params, const char *key, PetscBool *value_out, PetscBool *found)
Searches a BC_Param linked list for a key and returns its value as a bool.
Definition io.c:385
const char * BCHandlerTypeToString(BCHandlerType handler_type)
Converts a BCHandlerType enum to its string representation.
Definition logging.c:748
const char * FieldInitializationToString(PetscInt FieldInitialization)
Helper function to convert FieldInitialization to a string representation.
Definition logging.c:660
const char * BCFaceToString(BCFace face)
Helper function to convert BCFace enum to a string representation.
Definition logging.c:643
const char * BCTypeToString(BCType type)
Helper function to convert BCType enum to a string representation.
Definition logging.c:722
const char * LESModelToString(LESModelType LESFlag)
Helper function to convert LES Flag to a string representation.
Definition logging.c:691
const char * MomentumSolverTypeToString(MomentumSolverType SolverFlag)
Helper function to convert Momentum Solver flag to a string representation.
Definition logging.c:706
const char * ParticleInitializationToString(ParticleInitializationType ParticleInitialization)
Helper function to convert ParticleInitialization to a string representation.
Definition logging.c:675
@ INLET
Definition variables.h:217
UserCtx * user
Definition variables.h:474
PetscBool inletFaceDefined
Definition variables.h:747
BoundaryFaceConfig boundary_faces[6]
Definition variables.h:746
BCFace identifiedInletBCFace
Definition variables.h:748
@ PARTICLE_INIT_SURFACE_RANDOM
Random placement on the inlet face.
Definition variables.h:466
@ PARTICLE_INIT_SURFACE_EDGES
Deterministic placement at inlet face edges.
Definition variables.h:469
PetscReal StartTime
Definition variables.h:598
UserMG usermg
Definition variables.h:698
@ BC_HANDLER_INLET_PARABOLIC
Definition variables.h:236
@ BC_HANDLER_INLET_CONSTANT_VELOCITY
Definition variables.h:235
@ BC_HANDLER_PERIODIC_DRIVEN_CONSTANT_FLUX
Definition variables.h:245
PetscInt FieldInitialization
Definition variables.h:636
PetscReal ren
Definition variables.h:632
BCHandlerType handler_type
Definition variables.h:296
Cmpnts max_coords
Maximum x, y, z coordinates of the bounding box.
Definition variables.h:156
PetscReal dt
Definition variables.h:599
PetscInt StepsToRun
Definition variables.h:596
PetscInt StartStep
Definition variables.h:595
Cmpnts min_coords
Minimum x, y, z coordinates of the bounding box.
Definition variables.h:155
PetscScalar x
Definition variables.h:101
BoundingBox * bboxlist
Definition variables.h:679
BC_Param * params
Definition variables.h:297
char eulerianSource[PETSC_MAX_PATH_LEN]
Definition variables.h:604
ParticleInitializationType ParticleInitialization
Definition variables.h:680
PetscScalar z
Definition variables.h:101
PetscInt mglevels
Definition variables.h:481
char AnalyticalSolutionType[PETSC_MAX_PATH_LEN]
Definition variables.h:617
Cmpnts InitialConstantContra
Definition variables.h:637
PetscScalar y
Definition variables.h:101
PetscMPIInt size
Definition variables.h:589
MGCtx * mgctx
Definition variables.h:484
BCType mathematical_type
Definition variables.h:295
MomentumSolverType mom_solver_type
Definition variables.h:624
PetscReal pseudo_cfl
Definition variables.h:632
BCFace
Identifies the six logical faces of a structured computational block.
Definition variables.h:203
Defines a 3D axis-aligned bounding box.
Definition variables.h:154
A 3D point or vector with PetscScalar components.
Definition variables.h:100
User-defined context containing data specific to a single computational grid level.
Definition variables.h:728
Here is the call graph for this function:
Here is the caller graph for this function:

◆ StringToBCFace()

PetscErrorCode StringToBCFace ( const char *  str,
BCFace face_out 
)

Converts a string representation of a face to a BCFace enum.

Parameters
strThe input string (e.g., "-Xi", "+Zeta"). Case-insensitive.
[out]face_outThe resulting BCFace enum.
Returns
0 on success.

Definition at line 280 of file io.c.

280 {
281 if (strcasecmp(str, "-Xi") == 0) *face_out = BC_FACE_NEG_X;
282 else if (strcasecmp(str, "+Xi") == 0) *face_out = BC_FACE_POS_X;
283 else if (strcasecmp(str, "-Eta") == 0) *face_out = BC_FACE_NEG_Y;
284 else if (strcasecmp(str, "+Eta") == 0) *face_out = BC_FACE_POS_Y;
285 else if (strcasecmp(str, "-Zeta") == 0) *face_out = BC_FACE_NEG_Z;
286 else if (strcasecmp(str, "+Zeta") == 0) *face_out = BC_FACE_POS_Z;
287 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown face specifier: %s", str);
288 return 0;
289}
@ BC_FACE_NEG_X
Definition variables.h:204
@ BC_FACE_POS_Z
Definition variables.h:206
@ BC_FACE_POS_Y
Definition variables.h:205
@ BC_FACE_NEG_Z
Definition variables.h:206
@ BC_FACE_POS_X
Definition variables.h:204
@ BC_FACE_NEG_Y
Definition variables.h:205
Here is the caller graph for this function:

◆ StringToBCType()

PetscErrorCode StringToBCType ( const char *  str,
BCType type_out 
)

Converts a string representation of a BC type to a BCType enum.

Parameters
strThe input string (e.g., "WALL", "INLET"). Case-insensitive.
[out]type_outThe resulting BCType enum.
Returns
0 on success.

Definition at line 297 of file io.c.

297 {
298 if (strcasecmp(str, "WALL") == 0) *type_out = WALL;
299 else if (strcasecmp(str, "SYMMETRY") == 0) *type_out = SYMMETRY;
300 else if (strcasecmp(str, "INLET") == 0) *type_out = INLET;
301 else if (strcasecmp(str, "OUTLET") == 0) *type_out = OUTLET;
302 else if (strcasecmp(str, "PERIODIC") == 0) *type_out = PERIODIC;
303 // ... add other BCTypes here ...
304 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown BC Type string: %s", str);
305 return 0;
306}
@ SYMMETRY
Definition variables.h:215
@ OUTLET
Definition variables.h:216
@ PERIODIC
Definition variables.h:219
@ WALL
Definition variables.h:213
Here is the caller graph for this function:

◆ StringToBCHandlerType()

PetscErrorCode StringToBCHandlerType ( const char *  str,
BCHandlerType handler_out 
)

Converts a string representation of a handler to a BCHandlerType enum.

Parameters
strThe input string (e.g., "noslip", "constant_velocity"). Case-insensitive.
[out]handler_outThe resulting BCHandlerType enum.
Returns
0 on success.

Definition at line 314 of file io.c.

314 {
315 if (strcasecmp(str, "noslip") == 0) *handler_out = BC_HANDLER_WALL_NOSLIP;
316 else if (strcasecmp(str, "constant_velocity") == 0) *handler_out = BC_HANDLER_INLET_CONSTANT_VELOCITY;
317 else if (strcasecmp(str, "conservation") == 0) *handler_out = BC_HANDLER_OUTLET_CONSERVATION;
318 else if (strcasecmp(str, "parabolic") == 0) *handler_out = BC_HANDLER_INLET_PARABOLIC;
319 else if (strcasecmp(str,"geometric") == 0) *handler_out = BC_HANDLER_PERIODIC_GEOMETRIC;
320 else if (strcasecmp(str,"constant_flux") == 0) *handler_out = BC_HANDLER_PERIODIC_DRIVEN_CONSTANT_FLUX;
321 else if (strcasecmp(str,"initial_flux") == 0) *handler_out = BC_HANDLER_PERIODIC_DRIVEN_INITIAL_FLUX;
322 // ... add other BCHandlerTypes here ...
323 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown BC Handler string: %s", str);
324 return 0;
325}
@ BC_HANDLER_PERIODIC_GEOMETRIC
Definition variables.h:243
@ BC_HANDLER_PERIODIC_DRIVEN_INITIAL_FLUX
Definition variables.h:246
@ BC_HANDLER_WALL_NOSLIP
Definition variables.h:232
@ BC_HANDLER_OUTLET_CONSERVATION
Definition variables.h:241
Here is the caller graph for this function:

◆ ValidateBCHandlerForBCType()

PetscErrorCode ValidateBCHandlerForBCType ( BCType  type,
BCHandlerType  handler 
)

Validates that a specific handler is compatible with a general BC type.

Parameters
typeThe general BCType.
handlerThe specific BCHandlerType.
Returns
0 if compatible, error code otherwise.

Definition at line 333 of file io.c.

333 {
334 switch (type) {
335 case OUTLET:
336 if(handler != BC_HANDLER_OUTLET_CONSERVATION) return PETSC_ERR_ARG_WRONG;
337 break;
338 case WALL:
339 if (handler != BC_HANDLER_WALL_NOSLIP && handler != BC_HANDLER_WALL_MOVING) return PETSC_ERR_ARG_WRONG;
340 break;
341 case INLET:
342 if (handler != BC_HANDLER_INLET_CONSTANT_VELOCITY && handler != BC_HANDLER_INLET_PARABOLIC) return PETSC_ERR_ARG_WRONG;
343 break;
344 case PERIODIC:
345 if(handler != BC_HANDLER_PERIODIC_GEOMETRIC && handler != BC_HANDLER_PERIODIC_DRIVEN_CONSTANT_FLUX && !handler != BC_HANDLER_PERIODIC_DRIVEN_INITIAL_FLUX) return PETSC_ERR_ARG_WRONG;
346 // ... add other validation cases here ...
347 default: break;
348 }
349 return 0; // Combination is valid
350}
@ BC_HANDLER_WALL_MOVING
Definition variables.h:233
Here is the caller graph for this function:

◆ FreeBC_ParamList()

void FreeBC_ParamList ( BC_Param head)

Frees the memory allocated for a linked list of BC_Param structs.

Parameters
headA pointer to the head of the linked list to be freed.

Definition at line 263 of file io.c.

263 {
264 BC_Param *current = head;
265 while (current != NULL) {
266 BC_Param *next = current->next;
267 PetscFree(current->key);
268 PetscFree(current->value);
269 PetscFree(current);
270 current = next;
271 }
272}
struct BC_Param_s * next
Definition variables.h:266
char * key
Definition variables.h:264
char * value
Definition variables.h:265
A node in a linked list for storing key-value parameters from the bcs.dat file.
Definition variables.h:263
Here is the caller graph for this function:

◆ GetBCParamReal()

PetscErrorCode GetBCParamReal ( BC_Param params,
const char *  key,
PetscReal *  value_out,
PetscBool *  found 
)

Searches a BC_Param linked list for a key and returns its value as a double.

Parameters
paramsThe head of the BC_Param linked list.
keyThe key to search for (case-insensitive).
[out]value_outThe found value, converted to a PetscReal.
[out]foundSet to PETSC_TRUE if the key was found, PETSC_FALSE otherwise.
Returns
0 on success.

Definition at line 360 of file io.c.

360 {
361 *found = PETSC_FALSE;
362 *value_out = 0.0;
363 if (!key) return 0; // No key to search for
364
365 BC_Param *current = params;
366 while (current) {
367 if (strcasecmp(current->key, key) == 0) {
368 *value_out = atof(current->value);
369 *found = PETSC_TRUE;
370 return 0; // Found it, we're done
371 }
372 current = current->next;
373 }
374 return 0; // It's not an error to not find the key.
375}
Here is the caller graph for this function:

◆ GetBCParamBool()

PetscErrorCode GetBCParamBool ( BC_Param params,
const char *  key,
PetscBool *  value_out,
PetscBool *  found 
)

Searches a BC_Param linked list for a key and returns its value as a bool.

Parameters
paramsThe head of the BC_Param linked list.
keyThe key to search for (case-insensitive).
[out]value_outThe found value, converted to a PetscBool.
[out]foundSet to PETSC_TRUE if the key was found, PETSC_FALSE otherwise.
Returns
0 on success.

Definition at line 385 of file io.c.

385 {
386 *found = PETSC_FALSE;
387 *value_out = PETSC_FALSE;
388 if (!key) return 0; // No key to search for
389
390 BC_Param *current = params;
391 while (current) {
392 if (strcasecmp(current->key, key) == 0) {
393 // Key was found.
394 *found = PETSC_TRUE;
395
396 // Check the value string. Default to FALSE if the value is NULL or doesn't match a "true" string.
397 if (current->value &&
398 (strcasecmp(current->value, "true") == 0 ||
399 strcmp(current->value, "1") == 0 ||
400 strcasecmp(current->value, "yes") == 0))
401 {
402 *value_out = PETSC_TRUE;
403 } else {
404 *value_out = PETSC_FALSE;
405 }
406 return 0; // Found it, we're done
407 }
408 current = current->next;
409 }
410 return 0; // It's not an error to not find the key.
411}
Here is the caller graph for this function:

◆ ParseAllBoundaryConditions()

PetscErrorCode ParseAllBoundaryConditions ( UserCtx user,
const char *  bcs_input_filename 
)

Parses the boundary conditions file to configure the type, handler, and any associated parameters for all 6 global faces of the domain.

This function performs the following steps:

  1. On MPI rank 0, it reads the specified configuration file line-by-line.
  2. It parses each line for <Face> <Type> <Handler> [param=value]... format.
  3. It validates the parsed strings and stores the configuration, including a linked list of parameters, in a temporary array.
  4. It then serializes this configuration and broadcasts it to all other MPI ranks.
  5. All ranks (including rank 0) then deserialize the broadcasted data to populate their local user->boundary_faces array identically.
  6. It also sets legacy fields in UserCtx for compatibility with other modules.
Parameters
[in,out]userThe main UserCtx struct where the final configuration for all ranks will be stored.
[in]bcs_input_filenameThe path to the boundary conditions configuration file.
Returns
PetscErrorCode 0 on success, error code on failure.

Definition at line 439 of file io.c.

440{
441 PetscErrorCode ierr;
442 PetscMPIInt rank;
443
444 // Temporary storage for rank 0 to build the configuration before broadcasting.
445 BoundaryFaceConfig configs_rank0[6];
446
447 PetscFunctionBeginUser;
449 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
450
451 if (rank == 0) {
452 FILE *file;
453 char line_buffer[1024];
454
455 // Initialize the temporary config array with safe defaults on rank 0.
456 for (int i = 0; i < 6; i++) {
457 configs_rank0[i].face_id = (BCFace)i;
458 configs_rank0[i].mathematical_type = WALL;
459 configs_rank0[i].handler_type = BC_HANDLER_WALL_NOSLIP;
460 configs_rank0[i].params = NULL;
461 configs_rank0[i].handler = NULL; // Handler object is not created here.
462 }
463
464 LOG_ALLOW(GLOBAL, LOG_INFO, "Parsing BC configuration from '%s' on rank 0... \n", bcs_input_filename);
465 file = fopen(bcs_input_filename, "r");
466 if (!file) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_OPEN, "Could not open BCs file '%s'.", bcs_input_filename);
467
468 while (fgets(line_buffer, sizeof(line_buffer), file)) {
469 char *current_pos = line_buffer;
470 while (isspace((unsigned char)*current_pos)) current_pos++; // Skip leading whitespace
471 if (*current_pos == '#' || *current_pos == '\0' || *current_pos == '\n' || *current_pos == '\r') continue;
472
473 char *face_str = strtok(current_pos, " \t\n\r");
474 char *type_str = strtok(NULL, " \t\n\r");
475 char *handler_str = strtok(NULL, " \t\n\r");
476
477 if (!face_str || !type_str || !handler_str) {
478 LOG_ALLOW(GLOBAL, LOG_WARNING, "Malformed line in bcs.dat, skipping: %s \n", line_buffer);
479 continue;
480 }
481
482 BCFace face_enum;
483 BCType type_enum;
484 BCHandlerType handler_enum;
485 const char* handler_name_for_log;
486
487 // --- Convert strings to enums and validate ---
488 ierr = StringToBCFace(face_str, &face_enum); CHKERRQ(ierr);
489 ierr = StringToBCType(type_str, &type_enum); CHKERRQ(ierr);
490 ierr = StringToBCHandlerType(handler_str, &handler_enum); CHKERRQ(ierr);
491 ierr = ValidateBCHandlerForBCType(type_enum, handler_enum);
492 if (ierr) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Validation failed: Handler '%s' is not valid for Type '%s' on Face '%s'.\n", handler_str, type_str, face_str);
493
494 // Store the core types for the corresponding face
495 configs_rank0[face_enum].mathematical_type = type_enum;
496 configs_rank0[face_enum].handler_type = handler_enum;
497 handler_name_for_log = BCHandlerTypeToString(handler_enum); // Assumes this utility exists
498 LOG_ALLOW(GLOBAL, LOG_DEBUG, " Parsed Face '%s': Type=%s, Handler=%s \n", face_str, type_str, handler_name_for_log);
499
500 // --- Parse optional key=value parameters for this face ---
501 FreeBC_ParamList(configs_rank0[face_enum].params); // Clear any previous (default) params
502 configs_rank0[face_enum].params = NULL;
503 BC_Param **param_next_ptr = &configs_rank0[face_enum].params; // Pointer to the 'next' pointer to build the list
504
505 char* token;
506 while ((token = strtok(NULL, " \t\n\r")) != NULL) {
507 char* equals_ptr = strchr(token, '=');
508 if (!equals_ptr) {
509 LOG_ALLOW(GLOBAL, LOG_WARNING, "Malformed parameter '%s' on face '%s', skipping. \n", token, face_str);
510 continue;
511 }
512
513 *equals_ptr = '\0'; // Temporarily split the string at '=' to separate key and value
514 char* key_str = token;
515 char* value_str = equals_ptr + 1;
516
517 BC_Param *new_param;
518 ierr = PetscMalloc1(1, &new_param); CHKERRQ(ierr);
519 ierr = PetscStrallocpy(key_str, &new_param->key); CHKERRQ(ierr);
520 ierr = PetscStrallocpy(value_str, &new_param->value); CHKERRQ(ierr);
521 new_param->next = NULL;
522
523 *param_next_ptr = new_param;
524 param_next_ptr = &new_param->next;
525 LOG_ALLOW(GLOBAL, LOG_TRACE, " - Found param: [%s] = [%s] \n", new_param->key, new_param->value);
526 }
527 }
528 fclose(file);
529 }
530
531 // =========================================================================
532 // BROADCASTING THE CONFIGURATION FROM RANK 0
533 // =========================================================================
534 // This is a critical step to ensure all processes have the same configuration.
535
536 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "Rank %d broadcasting/receiving BC configuration.\n", rank);
537
538 for (int i = 0; i < 6; i++) {
539 // --- Broadcast simple enums ---
540 if (rank == 0) {
541 user->boundary_faces[i] = configs_rank0[i]; // Rank 0 populates its final struct
542 }
543 ierr = MPI_Bcast(&user->boundary_faces[i].mathematical_type, 1, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
544 ierr = MPI_Bcast(&user->boundary_faces[i].handler_type, 1, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
545
546 // --- Serialize and Broadcast the parameter linked list ---
547 PetscInt n_params = 0;
548 if (rank == 0) { // On rank 0, count the number of parameters to send
549 for (BC_Param *p = user->boundary_faces[i].params; p; p = p->next) n_params++;
550 }
551 ierr = MPI_Bcast(&n_params, 1, MPI_INT, 0, PETSC_COMM_WORLD);CHKERRQ(ierr);
552
553 if (rank != 0) { // Non-root ranks need to receive and build the list
554 FreeBC_ParamList(user->boundary_faces[i].params); // Ensure list is empty before building
555 user->boundary_faces[i].params = NULL;
556 }
557
558 BC_Param **param_next_ptr = &user->boundary_faces[i].params;
559
560 for (int j = 0; j < n_params; j++) {
561 char key_buf[256] = {0}, val_buf[256] = {0};
562 if (rank == 0) {
563 // On rank 0, navigate to the j-th param and copy its data to buffers
564 BC_Param *p = user->boundary_faces[i].params;
565 for (int k = 0; k < j; k++) p = p->next;
566 strncpy(key_buf, p->key, 255);
567 strncpy(val_buf, p->value, 255);
568 }
569
570 ierr = MPI_Bcast(key_buf, 256, MPI_CHAR, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
571 ierr = MPI_Bcast(val_buf, 256, MPI_CHAR, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
572
573 if (rank != 0) {
574 // On non-root ranks, deserialize: create a new node and append it
575 BC_Param *new_param;
576 ierr = PetscMalloc1(1, &new_param); CHKERRQ(ierr);
577 ierr = PetscStrallocpy(key_buf, &new_param->key); CHKERRQ(ierr);
578 ierr = PetscStrallocpy(val_buf, &new_param->value); CHKERRQ(ierr);
579 new_param->next = NULL;
580 *param_next_ptr = new_param;
581 param_next_ptr = &new_param->next;
582 } else {
583 // On rank 0, just advance the pointer for the next iteration
584 param_next_ptr = &((*param_next_ptr)->next);
585 }
586 }
587 user->boundary_faces[i].face_id = (BCFace)i; // Ensure face_id is set on all ranks
588 }
589
590 // --- Set legacy fields for compatibility with particle system ---
591 user->inletFaceDefined = PETSC_FALSE;
592 for (int i=0; i<6; i++) {
593
594 if (user->boundary_faces[i].mathematical_type == INLET && !user->inletFaceDefined) {
595 user->inletFaceDefined = PETSC_TRUE;
596 user->identifiedInletBCFace = (BCFace)i;
597 LOG_ALLOW(GLOBAL, LOG_INFO, "Inlet face for particle initialization identified as Face %d.\n", i);
598 break; // Found the first one, stop looking
599 }
600 }
601
602
603 if (rank == 0) {
604 // Rank 0 can now free the linked lists it created for the temporary storage.
605 // As written, user->boundary_faces was populated directly on rank 0, so no extra free is needed.
606 // for(int i=0; i<6; i++) FreeBC_ParamList(configs_rank0[i].params); // This would be needed if we used configs_rank0 exclusively
607 }
608
610 PetscFunctionReturn(0);
611}
PetscErrorCode StringToBCHandlerType(const char *str, BCHandlerType *handler_out)
Converts a string representation of a handler to a BCHandlerType enum.
Definition io.c:314
PetscErrorCode ValidateBCHandlerForBCType(BCType type, BCHandlerType handler)
Validates that a specific handler is compatible with a general BC type.
Definition io.c:333
PetscErrorCode StringToBCFace(const char *str, BCFace *face_out)
Converts a string representation of a face to a BCFace enum.
Definition io.c:280
void FreeBC_ParamList(BC_Param *head)
Frees the memory allocated for a linked list of BC_Param structs.
Definition io.c:263
PetscErrorCode StringToBCType(const char *str, BCType *type_out)
Converts a string representation of a BC type to a BCType enum.
Definition io.c:297
BCType
Defines the general mathematical/physical Category of a boundary.
Definition variables.h:210
BCHandlerType
Defines the specific computational "strategy" for a boundary handler.
Definition variables.h:230
BoundaryCondition * handler
Definition variables.h:298
Holds the complete configuration for one of the six boundary faces.
Definition variables.h:293
Here is the call graph for this function:
Here is the caller graph for this function:

◆ DeterminePeriodicity()

PetscErrorCode DeterminePeriodicity ( SimCtx simCtx)

Scans all block-specific boundary condition files to determine a globally consistent periodicity for each dimension, reusing the core type parser.

This is a lightweight pre-parser intended to be called before DMDA creation. It ensures that the periodicity setting is consistent across all blocks, which is a physical requirement for the domain.

  1. It collectively verifies that the mandatory BCS file for each block exists.
  2. On MPI rank 0, it then iterates through the files.
  3. For each line, it attempts to convert the type string to a BCType enum using the standard StringToBCType helper.
  4. If the conversion is successful AND the type is PERIODIC, it flags the corresponding face.
  5. If the conversion fails (e.g., for "WALL", "INLET", etc.), the error is cleared and the line is simply ignored, as it's not relevant to periodicity.
  6. It validates consistency (e.g., -Xi and +Xi match) and ensures all block files specify the same global periodicity.
  7. It broadcasts the final three flags (as integers 0 or 1) to all MPI ranks.
  8. All ranks update the i_periodic, j_periodic, and k_periodic fields in their SimCtx.
Parameters
[in,out]simCtxThe master SimCtx struct, containing the bcs_files list and where the final periodicity flags will be stored.
Returns
PetscErrorCode 0 on success, error code on failure.

Scans all block-specific boundary condition files to determine a globally consistent periodicity for each dimension, reusing the core type parser.

This is a lightweight pre-parser intended to be called before DMDA creation. It ensures that the periodicity setting is consistent across all blocks, which is a physical requirement for the domain.

  1. It collectively verifies that the mandatory BCS file for each block exists.
  2. On MPI rank 0, it then iterates through the files.
  3. It parses each line to extract the first (face) and second (type) tokens, mimicking the main BC parser's tokenization logic.
  4. It performs a direct, case-insensitive string comparison on the "type" token to check if it is "PERIODIC". Other types are silently ignored.
  5. It validates consistency (e.g., -Xi and +Xi match) and ensures all block files specify the same global periodicity.
  6. It broadcasts the final three flags (as integers 0 or 1) to all MPI ranks.
  7. All ranks update the i_periodic, j_periodic, and k_periodic fields in their SimCtx.
Parameters
[in,out]simCtxThe master SimCtx struct, containing the bcs_files list and where the final periodicity flags will be stored.
Returns
PetscErrorCode 0 on success, error code on failure.

Definition at line 645 of file io.c.

646{
647 PetscErrorCode ierr;
648 PetscMPIInt rank;
649 PetscInt periodic_flags[3] = {0, 0, 0}; // Index 0:I, 1:J, 2:K
650
651 PetscFunctionBeginUser;
652 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
653
654 // --- Part 1: Collectively verify all BCS files exist before proceeding ---
655 for (PetscInt bi = 0; bi < simCtx->block_number; bi++) {
656 const char *bcs_filename = simCtx->bcs_files[bi];
657 if (!bcs_filename) SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_NULL, "BCS filename for block %d is not set in SimCtx.", bi);
658 char desc_buf[256];
659 PetscBool file_exists;
660 snprintf(desc_buf, sizeof(desc_buf), "BCS file for block %d", bi);
661 ierr = VerifyPathExistence(bcs_filename, PETSC_FALSE, PETSC_FALSE, desc_buf, &file_exists); CHKERRQ(ierr);
662 }
663
664 // --- Part 2: Rank 0 does the parsing, since we know all files exist ---
665 if (rank == 0) {
666 PetscBool global_is_periodic[3] = {PETSC_FALSE, PETSC_FALSE, PETSC_FALSE};
667 PetscBool is_set = PETSC_FALSE;
668
669 for (PetscInt bi = 0; bi < simCtx->block_number; bi++) {
670 const char *bcs_filename = simCtx->bcs_files[bi];
671 FILE *file = fopen(bcs_filename, "r");
672
673 PetscBool face_is_periodic[6] = {PETSC_FALSE};
674 char line_buffer[1024];
675
676 while (fgets(line_buffer, sizeof(line_buffer), file)) {
677 char *current_pos = line_buffer;
678 while (isspace((unsigned char)*current_pos)) current_pos++;
679 if (*current_pos == '#' || *current_pos == '\0' || *current_pos == '\n') continue;
680
681 // --- Tokenize the line exactly like the main parser ---
682 char *face_str = strtok(current_pos, " \t\n\r");
683 char *type_str = strtok(NULL, " \t\n\r");
684
685 // If the line doesn't have at least two tokens, we can't determine the type.
686 if (!face_str || !type_str) continue;
687
688 // --- Perform a direct, non-erroring check on the mathematical type string ---
689 if (strcasecmp(type_str, "PERIODIC") == 0) {
690 BCFace face_enum;
691 // A malformed face string on a periodic line IS a fatal error.
692 ierr = StringToBCFace(face_str, &face_enum); CHKERRQ(ierr);
693 face_is_periodic[face_enum] = PETSC_TRUE;
694 }
695 // Any other type_str (e.g., "WALL", "INLET") is correctly and silently ignored.
696 }
697 fclose(file);
698
699 // --- Validate consistency within this file ---
700 if (face_is_periodic[BC_FACE_NEG_X] != face_is_periodic[BC_FACE_POS_X])
701 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Inconsistent X-periodicity in file '%s' for block %d. Both -Xi and +Xi must be periodic or neither.", bcs_filename, bi);
702 if (face_is_periodic[BC_FACE_NEG_Y] != face_is_periodic[BC_FACE_POS_Y])
703 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Inconsistent Y-periodicity in file '%s' for block %d. Both -Eta and +Eta must be periodic or neither.", bcs_filename, bi);
704 if (face_is_periodic[BC_FACE_NEG_Z] != face_is_periodic[BC_FACE_POS_Z])
705 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Inconsistent Z-periodicity in file '%s' for block %d. Both -Zeta and +Zeta must be periodic or neither.", bcs_filename, bi);
706
707 PetscBool local_is_periodic[3] = {face_is_periodic[BC_FACE_NEG_X], face_is_periodic[BC_FACE_NEG_Y], face_is_periodic[BC_FACE_NEG_Z]};
708
709 // --- Validate consistency across block files ---
710 if (!is_set) {
711 global_is_periodic[0] = local_is_periodic[0];
712 global_is_periodic[1] = local_is_periodic[1];
713 global_is_periodic[2] = local_is_periodic[2];
714 is_set = PETSC_TRUE;
715 } else {
716 if (global_is_periodic[0] != local_is_periodic[0] || global_is_periodic[1] != local_is_periodic[1] || global_is_periodic[2] != local_is_periodic[2]) {
717 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP,
718 "Periodicity mismatch between blocks. Block 0 requires (I:%d, J:%d, K:%d), but block %d (file '%s') has (I:%d, J:%d, K:%d).",
719 (int)global_is_periodic[0], (int)global_is_periodic[1], (int)global_is_periodic[2],
720 bi, bcs_filename,
721 (int)local_is_periodic[0], (int)local_is_periodic[1], (int)local_is_periodic[2]);
722 }
723 }
724 } // end loop over blocks
725
726 periodic_flags[0] = (global_is_periodic[0]) ? 1 : 0;
727 periodic_flags[1] = (global_is_periodic[1]) ? 1 : 0;
728 periodic_flags[2] = (global_is_periodic[2]) ? 1 : 0;
729
730 LOG_ALLOW(GLOBAL, LOG_INFO, "Global periodicity determined: I-periodic=%d, J-periodic=%d, K-periodic=%d\n",
731 periodic_flags[0], periodic_flags[1], periodic_flags[2]);
732 }
733
734 // --- Part 3: Broadcast the final flags from rank 0 to all other ranks ---
735 ierr = MPI_Bcast(periodic_flags, 3, MPIU_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
736
737 // --- All ranks now update their SimCtx ---
738 simCtx->i_periodic = periodic_flags[0];
739 simCtx->j_periodic = periodic_flags[1];
740 simCtx->k_periodic = periodic_flags[2];
741
742 PetscFunctionReturn(0);
743}
PetscErrorCode VerifyPathExistence(const char *path, PetscBool is_dir, PetscBool is_optional, const char *description, PetscBool *exists)
A parallel-safe helper to verify the existence of a generic file or directory path.
Definition io.c:762
char ** bcs_files
Definition variables.h:657
PetscInt k_periodic
Definition variables.h:650
PetscInt i_periodic
Definition variables.h:650
PetscInt j_periodic
Definition variables.h:650
Here is the call graph for this function:
Here is the caller graph for this function:

◆ TrimWhitespace()

void TrimWhitespace ( char *  str)

Helper function to trim leading/trailing whitespace from a string.

Parameters
strThe string to trim in-place.

Helper function to trim leading/trailing whitespace from a string.

Parameters
strThe string to be trimmed.

Definition at line 36 of file io.c.

36 {
37 if (!str) return;
38
39 char *start = str;
40 // Find the first non-whitespace character
41 while (isspace((unsigned char)*start)) {
42 start++;
43 }
44
45 // Find the end of the string
46 char *end = str + strlen(str) - 1;
47 // Move backwards from the end to find the last non-whitespace character
48 while (end > start && isspace((unsigned char)*end)) {
49 end--;
50 }
51
52 // Null-terminate after the last non-whitespace character
53 *(end + 1) = '\0';
54
55 // If there was leading whitespace, shift the string to the left
56 if (str != start) {
57 memmove(str, start, (end - start) + 2); // +2 to include the new null terminator
58 }
59}
Here is the caller graph for this function:

◆ ParsePostProcessingSettings()

PetscErrorCode ParsePostProcessingSettings ( SimCtx simCtx)

Initializes post-processing settings from a config file and command-line overrides.

This function establishes the configuration for a post-processing run by:

  1. Setting hardcoded default values in the PostProcessParams struct.
  2. Reading a configuration file to override the defaults.
  3. Parsing command-line options (-startTime, -endTime, etc.) which can override both the defaults and the file settings.
Parameters
simCtxThe pointer to the simulation context that contains the postprocessing file and struct.
Returns
PetscErrorCode

This function establishes the configuration for a post-processing run by:

  1. Setting hardcoded default values in the PostProcessParams struct.
  2. Reading a configuration file to override the defaults.
  3. Parsing command-line options (-startTime, -endTime, etc.) which can override both the defaults and the file settings.
Parameters
configFileThe path to the configuration file to parse.
ppsPointer to the PostProcessParams struct to be populated.
Returns
PetscErrorCode

Definition at line 2458 of file io.c.

2459{
2460 FILE *file;
2461 char line[1024];
2462 PetscBool startTimeSet, endTimeSet, timeStepSet;
2463
2464 PetscFunctionBeginUser;
2466
2467 if (!simCtx || !simCtx->pps) {
2468 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_NULL, "SimCtx or its pps member is NULL in ParsePostProcessingSettings.");
2469 }
2470
2471 char *configFile = simCtx->PostprocessingControlFile;
2472 PostProcessParams *pps = simCtx->pps;
2473
2474
2475 // --- 1. Set Sane Defaults First ---
2476 pps->startTime = 0;
2477 pps->endTime = 0;
2478 pps->timeStep = 1;
2479 pps->outputParticles = PETSC_FALSE;
2480 pps->particle_output_freq = simCtx->LoggingFrequency; // Default to logging frequency;
2481 strcpy(pps->process_pipeline, "");
2482 strcpy(pps->output_fields_instantaneous, "Ucat,P");
2483 strcpy(pps->output_fields_averaged, "");
2484 strcpy(pps->output_prefix, "Field");
2485 strcpy(pps->particle_output_prefix,"Particle");
2486 strcpy(pps->particle_fields,"velocity,CellID,weight,pid");
2487 strcpy(pps->particle_pipeline,"");
2488 strcpy(pps->particleExt,"dat"); // The input file format for particles.
2489 strcpy(pps->eulerianExt,"dat"); // The input file format for Eulerian fields.
2490 pps->reference[0] = pps->reference[1] = pps->reference[2] = 1;
2491 strncpy(pps->source_dir, simCtx->output_dir, sizeof(pps->source_dir) - 1);
2492 pps->source_dir[sizeof(pps->source_dir) - 1] = '\0'; // Ensure null-termination
2493
2494 // --- 2. Parse the Configuration File (overrides defaults) ---
2495 file = fopen(configFile, "r");
2496 if (file) {
2497 LOG_ALLOW(GLOBAL, LOG_INFO, "Parsing post-processing config file: %s\n", configFile);
2498 while (fgets(line, sizeof(line), file)) {
2499 char *key, *value, *comment;
2500 comment = strchr(line, '#'); if (comment) *comment = '\0';
2501 TrimWhitespace(line); if (strlen(line) == 0) continue;
2502 key = strtok(line, "="); value = strtok(NULL, "=");
2503 if (key && value) {
2504 TrimWhitespace(key); TrimWhitespace(value);
2505 if (strcmp(key, "startTime") == 0) pps->startTime = atoi(value);
2506 else if (strcmp(key, "endTime") == 0) pps->endTime = atoi(value);
2507 else if (strcmp(key, "timeStep") == 0) pps->timeStep = atoi(value);
2508 else if (strcmp(key, "output_particles") == 0) {
2509 if (strcasecmp(value, "true") == 0) pps->outputParticles = PETSC_TRUE;
2510 }
2511 else if (strcasecmp(key, "process_pipeline") == 0) {
2512 strncpy(pps->process_pipeline, value, MAX_PIPELINE_LENGTH - 1);
2513 pps->process_pipeline[MAX_PIPELINE_LENGTH - 1] = '\0'; // Ensure null-termination
2514 } else if (strcasecmp(key, "output_fields_instantaneous") == 0) {
2515 strncpy(pps->output_fields_instantaneous, value, MAX_FIELD_LIST_LENGTH - 1);
2517 } else if (strcasecmp(key, "output_prefix") == 0) {
2518 strncpy(pps->output_prefix, value, MAX_FILENAME_LENGTH - 1);
2519 pps->output_prefix[MAX_FILENAME_LENGTH - 1] = '\0';
2520 } else if (strcasecmp(key, "particle_output_prefix") == 0) {
2521 strncpy(pps->particle_output_prefix, value, MAX_FILENAME_LENGTH - 1);
2523 } else if (strcasecmp(key, "particle_fields_instantaneous") == 0) {
2524 strncpy(pps->particle_fields, value, MAX_FIELD_LIST_LENGTH - 1);
2525 pps->particle_fields[MAX_FIELD_LIST_LENGTH - 1] = '\0';
2526 } else if (strcasecmp(key, "particle_pipeline") == 0) {
2527 strncpy(pps->particle_pipeline, value, MAX_PIPELINE_LENGTH - 1);
2528 pps->particle_pipeline[MAX_PIPELINE_LENGTH - 1] = '\0';
2529 } else if (strcasecmp(key, "particle_output_freq") == 0) {
2530 pps->particle_output_freq = atoi(value);
2531 } else if (strcmp(key, "reference_ip") == 0) {pps->reference[0] = atoi(value);
2532 } else if (strcmp(key, "reference_jp") == 0) {pps->reference[1] = atoi(value);
2533 } else if (strcmp(key, "reference_kp") == 0) {pps->reference[2] = atoi(value);
2534 } else if (strcasecmp(key, "source_directory") == 0) {
2535 strncpy(pps->source_dir, value, sizeof(pps->source_dir) - 1);
2536 pps->source_dir[sizeof(pps->source_dir) - 1] = '\0';
2537 } else {
2538 LOG_ALLOW(GLOBAL, LOG_WARNING, "Unknown key '%s' in post-processing config file. Ignoring.\n", key);
2539 }
2540 // Add parsing for pipeline, fields, etc. in later phases
2541 }
2542 }
2543 fclose(file);
2544 } else {
2545 LOG_ALLOW(GLOBAL, LOG_WARNING, "Could not open post-processing config file '%s'. Using defaults and command-line overrides.\n", configFile);
2546 }
2547
2548 // --- 3. Parse Command-Line Options (overrides file settings and defaults) ---
2549 PetscOptionsGetInt(NULL, NULL, "-startTime", &pps->startTime, &startTimeSet);
2550 PetscOptionsGetInt(NULL, NULL, "-endTime", &pps->endTime, &endTimeSet);
2551 PetscOptionsGetInt(NULL, NULL, "-timeStep", &pps->timeStep, &timeStepSet);
2552 PetscOptionsGetBool(NULL, NULL, "-output_particles", &pps->outputParticles, NULL);
2553
2554 if(pps->endTime==-1){
2555 pps->endTime = simCtx->StartStep + simCtx->StepsToRun; // Total steps if endTime is set to -1.
2556 }
2557
2558 // If only startTime is given on command line, run for a single step
2559 if (startTimeSet && !endTimeSet) {
2560 pps->endTime = pps->startTime;
2561 }
2562
2563 LOG_ALLOW(GLOBAL, LOG_INFO, "Post-processing configured to run from t=%d to t=%d with step %d. Particle output: %s.\n",
2564 pps->startTime, pps->endTime, pps->timeStep, pps->outputParticles ? "TRUE" : "FALSE");
2565
2566 LOG_ALLOW(GLOBAL, LOG_INFO, "Process Pipeline: %s\n", pps->process_pipeline);
2567 LOG_ALLOW(GLOBAL, LOG_INFO, "Instantaneous Output Fields: %s\n", pps->output_fields_instantaneous);
2568 LOG_ALLOW(GLOBAL, LOG_INFO, "Output Prefix: %s\n", pps->output_prefix);
2569 LOG_ALLOW(GLOBAL, LOG_INFO, "Particle Output Prefix: %s\n", pps->particle_output_prefix);
2570 LOG_ALLOW(GLOBAL, LOG_INFO, "Particle Fields: %s\n", pps->particle_fields);
2571 LOG_ALLOW(GLOBAL, LOG_INFO, "Particle Pipeline: %s\n", pps->particle_pipeline);
2572 LOG_ALLOW(GLOBAL, LOG_INFO, "Particle Output Frequency: %d\n", pps->particle_output_freq);
2573
2575 PetscFunctionReturn(0);
2576}
void TrimWhitespace(char *str)
Trims leading and trailing whitespace from a string in-place.
Definition io.c:36
char particle_output_prefix[256]
Definition variables.h:516
char output_prefix[256]
Definition variables.h:513
#define MAX_FIELD_LIST_LENGTH
Definition variables.h:490
char output_fields_averaged[1024]
Definition variables.h:512
#define MAX_PIPELINE_LENGTH
Definition variables.h:489
PetscInt reference[3]
Definition variables.h:524
PetscInt timeStep
Definition variables.h:506
#define MAX_FILENAME_LENGTH
Definition variables.h:491
char output_fields_instantaneous[1024]
Definition variables.h:511
char eulerianExt[8]
Definition variables.h:520
char particle_pipeline[1024]
Definition variables.h:514
char process_pipeline[1024]
Definition variables.h:510
PetscInt particle_output_freq
Definition variables.h:517
char particle_fields[1024]
Definition variables.h:515
PetscBool outputParticles
Definition variables.h:507
char particleExt[8]
Definition variables.h:521
PetscInt startTime
Definition variables.h:504
char PostprocessingControlFile[PETSC_MAX_PATH_LEN]
Definition variables.h:714
PetscInt LoggingFrequency
Definition variables.h:703
Holds all configuration parameters for a post-processing run.
Definition variables.h:499
Here is the call graph for this function:
Here is the caller graph for this function:

◆ ParseScalingInformation()

PetscErrorCode ParseScalingInformation ( SimCtx simCtx)

Parses physical scaling parameters from command-line options.

This function reads the reference length, velocity, and density from the PETSc options database (provided via -scaling_L_ref, etc.). It populates the simCtx->scaling struct and calculates the derived reference pressure. It sets default values of 1.0 for a fully non-dimensional case if the options are not provided.

Parameters
[in,out]simCtxThe simulation context whose 'scaling' member will be populated.
Returns
PetscErrorCode

Definition at line 2593 of file io.c.

2594{
2595 PetscErrorCode ierr;
2596 PetscBool flg;
2597
2598 PetscFunctionBeginUser;
2600
2601 if (!simCtx) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "SimCtx is NULL in ParseScalingInformation");
2602
2603 // --- 1. Set default values to 1.0 ---
2604 // This represents a purely non-dimensional run if no scaling is provided.
2605 simCtx->scaling.L_ref = 1.0;
2606 simCtx->scaling.U_ref = 1.0;
2607 simCtx->scaling.rho_ref = 1.0;
2608
2609 // --- 2. Read overrides from the command line / control file ---
2610 ierr = PetscOptionsGetReal(NULL, NULL, "-scaling_L_ref", &simCtx->scaling.L_ref, &flg); CHKERRQ(ierr);
2611 ierr = PetscOptionsGetReal(NULL, NULL, "-scaling_U_ref", &simCtx->scaling.U_ref, &flg); CHKERRQ(ierr);
2612 ierr = PetscOptionsGetReal(NULL, NULL, "-scaling_rho_ref", &simCtx->scaling.rho_ref, &flg); CHKERRQ(ierr);
2613
2614 // --- 3. Calculate derived scaling factors ---
2615 // Check for division by zero to be safe, though U_ref should be positive.
2616 if (simCtx->scaling.U_ref <= 0.0) {
2617 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_OUTOFRANGE, "Reference velocity U_ref must be positive. Got %g", (double)simCtx->scaling.U_ref);
2618 }
2619 simCtx->scaling.P_ref = simCtx->scaling.rho_ref * simCtx->scaling.U_ref * simCtx->scaling.U_ref;
2620
2621 // --- 4. Log the final, effective scales for verification ---
2622 LOG(GLOBAL, LOG_INFO, "---------------- Physical Scales Initialized -----------------\n");
2623 LOG(GLOBAL, LOG_INFO, " L_ref: %.4f, U_ref: %.4f, rho_ref: %.4f, P_ref: %.4f\n",
2624 simCtx->scaling.L_ref, simCtx->scaling.U_ref, simCtx->scaling.rho_ref, simCtx->scaling.P_ref);
2625 LOG(GLOBAL, LOG_INFO, "--------------------------------------------------------------\n");
2626
2628 PetscFunctionReturn(0);
2629}
#define LOG(scope, level, fmt,...)
Logging macro for PETSc-based applications with scope control.
Definition logging.h:84
PetscReal L_ref
Definition variables.h:567
ScalingCtx scaling
Definition variables.h:644
PetscReal P_ref
Definition variables.h:570
PetscReal rho_ref
Definition variables.h:569
PetscReal U_ref
Definition variables.h:568
Here is the caller graph for this function: