PICurv 0.1.0
A Parallel Particle-In-Cell Solver for Curvilinear LES
Loading...
Searching...
No Matches
io.c
Go to the documentation of this file.
1/**
2 * @file io.c
3 * @brief Implementation of data input/output routines, focusing on grid configuration.
4 *
5 * This module provides functions to parse grid geometry information, either from
6 * command-line options for programmatically generated grids or by reading the
7 * header of a grid definition file.
8 */
9
10#include "io.h"
11
12// =============================================================================
13// STATIC (PRIVATE) VARIABLES FOR ONE-TIME FILE READ
14// =============================================================================
15
16/** @brief Stores the number of blocks read from the grid file. */
17static PetscInt g_nblk_from_file = 0;
18/** @brief Caches the IM dimensions for all blocks read from the grid file. */
19static PetscInt* g_IMs_from_file = NULL;
20/** @brief Caches the JM dimensions for all blocks read from the grid file. */
21static PetscInt* g_JMs_from_file = NULL;
22/** @brief Caches the KM dimensions for all blocks read from the grid file. */
23static PetscInt* g_KMs_from_file = NULL;
24/** @brief A flag to ensure the grid file is read only once. */
25static PetscBool g_file_has_been_read = PETSC_FALSE;
26
27
28// =============================================================================
29// PUBLIC FUNCTION IMPLEMENTATIONS
30// =============================================================================
31
32#undef __FUNCT__
33#define __FUNCT__ "ReadGridGenerationInputs"
34/**
35 * @brief Parses command-line options for a programmatically generated grid for a SINGLE block.
36 *
37 * This function is responsible for reading all per-block array options related
38 * to grid geometry, such as dimensions (`-im`), domain bounds (`-xMins`, `-xMaxs`),
39 * and stretching ratios (`-rxs`). It reads the entire array for each option, then
40 * uses the block index stored in `user->_this` to select the correct value and
41 * populate the fields of the provided `UserCtx` struct.
42 *
43 * @param user Pointer to the `UserCtx` for a specific block. The function will
44 * populate the geometric fields (`IM`, `Min_X`, `rx`, etc.) within this struct.
45 * @return PetscErrorCode 0 on success, or a PETSc error code on failure.
46 */
47PetscErrorCode ReadGridGenerationInputs(UserCtx *user)
48{
49 PetscErrorCode ierr;
50 SimCtx *simCtx = user->simCtx;
51 PetscInt nblk = simCtx->block_number;
52 PetscInt block_index = user->_this;
53 PetscBool found;
54
55 // Temporary arrays to hold the parsed values for ALL blocks
56 PetscInt *IMs = NULL, *JMs = NULL, *KMs = NULL, *cgrids = NULL;
57 PetscReal *xMins = NULL, *xMaxs = NULL, *rxs = NULL;
58 PetscReal *yMins = NULL, *yMaxs = NULL, *rys = NULL;
59 PetscReal *zMins = NULL, *zMaxs = NULL, *rzs = NULL;
60
61 PetscFunctionBeginUser;
62
63 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Reading generated grid inputs for block %d.\n", simCtx->rank, block_index);
64
65 if (block_index >= nblk) {
66 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Block index %d is out of range for nblk=%d", block_index, nblk);
67 }
68
69 // --- Allocate temporary storage for all array options ---
70 ierr = PetscMalloc4(nblk, &IMs, nblk, &JMs, nblk, &KMs, nblk, &cgrids); CHKERRQ(ierr);
71 ierr = PetscMalloc6(nblk, &xMins, nblk, &xMaxs, nblk, &rxs, nblk, &yMins, nblk, &yMaxs, nblk, &rys); CHKERRQ(ierr);
72 ierr = PetscMalloc3(nblk, &zMins, nblk, &zMaxs, nblk, &rzs); CHKERRQ(ierr);
73
74 // --- Set default values for the temporary arrays ---
75 for (PetscInt i = 0; i < nblk; ++i) {
76 IMs[i] = 10; JMs[i] = 10; KMs[i] = 10; cgrids[i] = 0;
77 xMins[i] = 0.0; xMaxs[i] = 1.0; rxs[i] = 1.0;
78 yMins[i] = 0.0; yMaxs[i] = 1.0; rys[i] = 1.0;
79 zMins[i] = 0.0; zMaxs[i] = 1.0; rzs[i] = 1.0;
80 }
81
82 // --- Parse the array options from the command line / control file ---
83 PetscInt count;
84 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-im", IMs, &count, &found); CHKERRQ(ierr);
85 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-jm", JMs, &count, &found); CHKERRQ(ierr);
86 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-km", KMs, &count, &found); CHKERRQ(ierr);
87 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-xMins", xMins, &count, &found); CHKERRQ(ierr);
88 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-xMaxs", xMaxs, &count, &found); CHKERRQ(ierr);
89 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-rxs", rxs, &count, &found); CHKERRQ(ierr);
90 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-yMins", yMins, &count, &found); CHKERRQ(ierr);
91 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-yMaxs", yMaxs, &count, &found); CHKERRQ(ierr);
92 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-rys", rys, &count, &found); CHKERRQ(ierr);
93 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-zMins", zMins, &count, &found); CHKERRQ(ierr);
94 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-zMaxs", zMaxs, &count, &found); CHKERRQ(ierr);
95 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-rzs", rzs, &count, &found); CHKERRQ(ierr);
96 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-cgrids", cgrids, &count, &found); CHKERRQ(ierr);
97
98 // --- Assign the parsed values to the specific UserCtx struct passed in ---
99 user->IM = IMs[block_index];
100 user->JM = JMs[block_index];
101 user->KM = KMs[block_index];
102 user->Min_X = xMins[block_index];
103 user->Max_X = xMaxs[block_index];
104 user->rx = rxs[block_index];
105 user->Min_Y = yMins[block_index];
106 user->Max_Y = yMaxs[block_index];
107 user->ry = rys[block_index];
108 user->Min_Z = zMins[block_index];
109 user->Max_Z = zMaxs[block_index];
110 user->rz = rzs[block_index];
111 user->cgrid = cgrids[block_index];
112
113 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Block %d grid generation inputs set: IM=%d, JM=%d, KM=%d\n",
114 simCtx->rank, block_index, user->IM, user->JM, user->KM);
115 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Block %d bounds: X=[%.2f, %.2f], Y=[%.2f, %.2f], Z=[%.2f, %.2f]\n",
116 simCtx->rank, block_index, user->Min_X, user->Max_X, user->Min_Y, user->Max_Y, user->Min_Z, user->Max_Z);
117
118 // --- Clean up temporary storage ---
119 ierr = PetscFree4(IMs, JMs, KMs, cgrids); CHKERRQ(ierr);
120 ierr = PetscFree6(xMins, xMaxs, rxs, yMins, yMaxs, rys); CHKERRQ(ierr);
121 ierr = PetscFree3(zMins, zMaxs, rzs); CHKERRQ(ierr);
122
123 PetscFunctionReturn(0);
124}
125
126
127#undef __FUNCT__
128#define __FUNCT__ "ReadGridFile"
129/**
130 * @brief Sets grid dimensions from a file for a SINGLE block using a one-time read cache.
131 *
132 * This function uses a static-variable pattern to ensure the grid file header
133 * is read only once, collectively, by all processes. The first time any process
134 * calls this function, it triggers a collective operation where rank 0 reads the
135 * file and broadcasts the dimensions for all blocks. This data is stored in
136 * static "cached" arrays.
137 *
138 * On every call (including the first), the function retrieves the dimensions for the
139 * specific block (identified by `user->_this`) from the cached arrays and populates
140 * the `IM`, `JM`, and `KM` fields of the provided `UserCtx`.
141 *
142 * @param user Pointer to the `UserCtx` for a specific block.
143 * @return PetscErrorCode 0 on success, or a PETSc error code on failure.
144 */
145PetscErrorCode ReadGridFile(UserCtx *user)
146{
147 PetscErrorCode ierr;
148 SimCtx *simCtx = user->simCtx;
149 PetscInt block_index = user->_this;
150
151 PetscFunctionBeginUser;
152
153 // --- One-Time Read and Broadcast Logic ---
155 LOG_ALLOW_SYNC(GLOBAL, LOG_INFO, "First call to ReadGridFile. Reading and broadcasting grid file header from '%s'...\n", simCtx->grid_file);
156 PetscMPIInt rank = simCtx->rank;
157 PetscInt nblk = simCtx->block_number;
158
159 if (rank == 0) {
160 FILE *fd = fopen(simCtx->grid_file, "r");
161 if (!fd) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_OPEN, "Cannot open file: %s", simCtx->grid_file);
162
163 fscanf(fd, "%d\n", &g_nblk_from_file);
164 // ---- Read first token; decide whether it is the header or nblk ----
165 char firstTok[32] = {0};
166 if (fscanf(fd, "%31s", firstTok) != 1)
167 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_READ, "Empty grid file: %s", simCtx->grid_file);
168
169 if (strcmp(firstTok, "PICGRID") == 0) {
170 // Header is present – read nblk from the next line
171 if (fscanf(fd, "%d", &g_nblk_from_file) != 1)
172 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_READ, "Expected number of blocks after \"PICGRID\" in %s", simCtx->grid_file);
173 } else {
174 // No header – the token we just read is actually nblk
175 g_nblk_from_file = (PetscInt)strtol(firstTok, NULL, 10);
176 }
177 if (g_nblk_from_file != nblk) {
178 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Mismatch: -nblk is %d but grid file specifies %d blocks.", nblk, g_nblk_from_file);
179 }
180
181 ierr = PetscMalloc3(nblk, &g_IMs_from_file, nblk, &g_JMs_from_file, nblk, &g_KMs_from_file); CHKERRQ(ierr);
182 for (PetscInt i = 0; i < nblk; ++i) {
183 if (fscanf(fd, "%d %d %d\n", &g_IMs_from_file[i], &g_JMs_from_file[i], &g_KMs_from_file[i]) != 3) {
184 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_READ, "Expected 3 integers for block %d in %s", i, simCtx->grid_file);
185 }
186 }
187 fclose(fd);
188 }
189
190 // Broadcast nblk to verify (optional, good practice)
191 ierr = MPI_Bcast(&g_nblk_from_file, 1, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
192
193 // Allocate on other ranks before receiving the broadcast
194 if (rank != 0) {
195 ierr = PetscMalloc3(nblk, &g_IMs_from_file, nblk, &g_JMs_from_file, nblk, &g_KMs_from_file); CHKERRQ(ierr);
196 }
197
198 // Broadcast the data arrays
199 ierr = MPI_Bcast(g_IMs_from_file, nblk, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
200 ierr = MPI_Bcast(g_JMs_from_file, nblk, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
201 ierr = MPI_Bcast(g_KMs_from_file, nblk, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
202
203 g_file_has_been_read = PETSC_TRUE;
204 LOG_ALLOW(GLOBAL, LOG_INFO, "Grid file header read and broadcast complete.\n");
205 }
206
207 // --- Per-Block Assignment Logic (runs on every call) ---
208 user->IM = g_IMs_from_file[block_index];
209 user->JM = g_JMs_from_file[block_index];
210 user->KM = g_KMs_from_file[block_index];
211
212 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Set file inputs for Block %d: IM=%d, JM=%d, KM=%d\n",
213 simCtx->rank, block_index, user->IM, user->JM, user->KM);
214
215 PetscFunctionReturn(0);
216}
217
218
219//================================================================================
220//
221// PRIVATE HELPER FUNCTIONS
222//
223//================================================================================
224
225/**
226 * @brief Frees the memory allocated for a linked list of BC_Param structs.
227 * @param head A pointer to the head of the linked list to be freed.
228 */
230 BC_Param *current = head;
231 while (current != NULL) {
232 BC_Param *next = current->next;
233 PetscFree(current->key);
234 PetscFree(current->value);
235 PetscFree(current);
236 current = next;
237 }
238}
239
240/**
241 * @brief Converts a string representation of a face to a BCFace enum.
242 * @param str The input string (e.g., "-Xi", "+Zeta"). Case-insensitive.
243 * @param[out] face_out The resulting BCFace enum.
244 * @return 0 on success.
245 */
246PetscErrorCode StringToBCFace(const char* str, BCFace* face_out) {
247 if (strcasecmp(str, "-Xi") == 0) *face_out = BC_FACE_NEG_X;
248 else if (strcasecmp(str, "+Xi") == 0) *face_out = BC_FACE_POS_X;
249 else if (strcasecmp(str, "-Eta") == 0) *face_out = BC_FACE_NEG_Y;
250 else if (strcasecmp(str, "+Eta") == 0) *face_out = BC_FACE_POS_Y;
251 else if (strcasecmp(str, "-Zeta") == 0) *face_out = BC_FACE_NEG_Z;
252 else if (strcasecmp(str, "+Zeta") == 0) *face_out = BC_FACE_POS_Z;
253 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown face specifier: %s", str);
254 return 0;
255}
256
257/**
258 * @brief Converts a string representation of a BC type to a BCType enum.
259 * @param str The input string (e.g., "WALL", "INLET"). Case-insensitive.
260 * @param[out] type_out The resulting BCType enum.
261 * @return 0 on success.
262 */
263PetscErrorCode StringToBCType(const char* str, BCType* type_out) {
264 if (strcasecmp(str, "WALL") == 0) *type_out = WALL;
265 else if (strcasecmp(str, "SYMMETRY") == 0) *type_out = SYMMETRY;
266 else if (strcasecmp(str, "INLET") == 0) *type_out = INLET;
267 else if (strcasecmp(str, "OUTLET") == 0) *type_out = OUTLET;
268 else if (strcasecmp(str, "NOGRAD") == 0) *type_out = NOGRAD;
269 // ... add other BCTypes here ...
270 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown BC Type string: %s", str);
271 return 0;
272}
273
274/**
275 * @brief Converts a string representation of a handler to a BCHandlerType enum.
276 * @param str The input string (e.g., "noslip", "constant_velocity"). Case-insensitive.
277 * @param[out] handler_out The resulting BCHandlerType enum.
278 * @return 0 on success.
279 */
280PetscErrorCode StringToBCHandlerType(const char* str, BCHandlerType* handler_out) {
281 if (strcasecmp(str, "noslip") == 0) *handler_out = BC_HANDLER_WALL_NOSLIP;
282 else if (strcasecmp(str, "constant_velocity") == 0) *handler_out = BC_HANDLER_INLET_CONSTANT_VELOCITY;
283 else if (strcasecmp(str, "conservation") == 0) *handler_out = BC_HANDLER_OUTLET_CONSERVATION;
284 else if (strcasecmp(str, "allcopy") == 0) *handler_out = BC_HANDLER_NOGRAD_COPY_GHOST;
285 else if (strcasecmp(str, "parabolic") == 0) *handler_out = BC_HANDLER_INLET_PARABOLIC;
286 // ... add other BCHandlerTypes here ...
287 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown BC Handler string: %s", str);
288 return 0;
289}
290
291/**
292 * @brief Validates that a specific handler is compatible with a general BC type.
293 * @param type The general BCType.
294 * @param handler The specific BCHandlerType.
295 * @return 0 if compatible, error code otherwise.
296 */
297PetscErrorCode ValidateBCHandlerForBCType(BCType type, BCHandlerType handler) {
298 switch (type) {
299 case NOGRAD:
300 if(handler != BC_HANDLER_NOGRAD_COPY_GHOST) return PETSC_ERR_ARG_WRONG;
301 break;
302 case WALL:
303 if (handler != BC_HANDLER_WALL_NOSLIP && handler != BC_HANDLER_WALL_MOVING) return PETSC_ERR_ARG_WRONG;
304 break;
305 case INLET:
306 if (handler != BC_HANDLER_INLET_CONSTANT_VELOCITY && handler != BC_HANDLER_INLET_PARABOLIC) return PETSC_ERR_ARG_WRONG;
307 break;
308 // ... add other validation cases here ...
309 default: break;
310 }
311 return 0; // Combination is valid
312}
313
314//================================================================================
315//
316// PUBLIC PARSING FUNCTION
317//
318//================================================================================
319
320/**
321 * @brief Parses the boundary conditions file to configure the type, handler, and
322 * any associated parameters for all 6 global faces of the domain.
323 *
324 * This function performs the following steps:
325 * 1. On MPI rank 0, it reads the specified configuration file line-by-line.
326 * 2. It parses each line for `<Face> <Type> <Handler> [param=value]...` format.
327 * 3. It validates the parsed strings and stores the configuration, including a
328 * linked list of parameters, in a temporary array.
329 * 4. It then serializes this configuration and broadcasts it to all other MPI ranks.
330 * 5. All ranks (including rank 0) then deserialize the broadcasted data to populate
331 * their local `user->boundary_faces` array identically.
332 * 6. It also sets legacy fields in UserCtx for compatibility with other modules.
333 *
334 * @param[in,out] user The main UserCtx struct where the final configuration
335 * for all ranks will be stored.
336 * @param[in] bcs_input_filename The path to the boundary conditions configuration file.
337 * @return PetscErrorCode 0 on success, error code on failure.
338 */
339PetscErrorCode ParseAllBoundaryConditions(UserCtx *user, const char *bcs_input_filename)
340{
341 PetscErrorCode ierr;
342 PetscMPIInt rank;
343
344 // Temporary storage for rank 0 to build the configuration before broadcasting.
345 BoundaryFaceConfig configs_rank0[6];
346
347 PetscFunctionBeginUser;
348 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
349
350 if (rank == 0) {
351 FILE *file;
352 char line_buffer[1024];
353
354 // Initialize the temporary config array with safe defaults on rank 0.
355 for (int i = 0; i < 6; i++) {
356 configs_rank0[i].face_id = (BCFace)i;
357 configs_rank0[i].mathematical_type = WALL;
358 configs_rank0[i].handler_type = BC_HANDLER_WALL_NOSLIP;
359 configs_rank0[i].params = NULL;
360 configs_rank0[i].handler = NULL; // Handler object is not created here.
361 }
362
363 LOG_ALLOW(GLOBAL, LOG_INFO, "Parsing BC configuration from '%s' on rank 0... \n", bcs_input_filename);
364 file = fopen(bcs_input_filename, "r");
365 if (!file) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_OPEN, "Could not open BCs file '%s'.", bcs_input_filename);
366
367 while (fgets(line_buffer, sizeof(line_buffer), file)) {
368 char *current_pos = line_buffer;
369 while (isspace((unsigned char)*current_pos)) current_pos++; // Skip leading whitespace
370 if (*current_pos == '#' || *current_pos == '\0' || *current_pos == '\n' || *current_pos == '\r') continue;
371
372 char *face_str = strtok(current_pos, " \t\n\r");
373 char *type_str = strtok(NULL, " \t\n\r");
374 char *handler_str = strtok(NULL, " \t\n\r");
375
376 if (!face_str || !type_str || !handler_str) {
377 LOG_ALLOW(GLOBAL, LOG_WARNING, "Malformed line in bcs.dat, skipping: %s \n", line_buffer);
378 continue;
379 }
380
381 BCFace face_enum;
382 BCType type_enum;
383 BCHandlerType handler_enum;
384 const char* handler_name_for_log;
385
386 // --- Convert strings to enums and validate ---
387 ierr = StringToBCFace(face_str, &face_enum); CHKERRQ(ierr);
388 ierr = StringToBCType(type_str, &type_enum); CHKERRQ(ierr);
389 ierr = StringToBCHandlerType(handler_str, &handler_enum); CHKERRQ(ierr);
390 ierr = ValidateBCHandlerForBCType(type_enum, handler_enum);
391 if (ierr) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Validation failed: Handler '%s' is not valid for Type '%s' on Face '%s'.\n", handler_str, type_str, face_str);
392
393 // Store the core types for the corresponding face
394 configs_rank0[face_enum].mathematical_type = type_enum;
395 configs_rank0[face_enum].handler_type = handler_enum;
396 handler_name_for_log = BCHandlerTypeToString(handler_enum); // Assumes this utility exists
397 LOG_ALLOW(GLOBAL, LOG_DEBUG, " Parsed Face '%s': Type=%s, Handler=%s \n", face_str, type_str, handler_name_for_log);
398
399 // --- Parse optional key=value parameters for this face ---
400 FreeBC_ParamList(configs_rank0[face_enum].params); // Clear any previous (default) params
401 configs_rank0[face_enum].params = NULL;
402 BC_Param **param_next_ptr = &configs_rank0[face_enum].params; // Pointer to the 'next' pointer to build the list
403
404 char* token;
405 while ((token = strtok(NULL, " \t\n\r")) != NULL) {
406 char* equals_ptr = strchr(token, '=');
407 if (!equals_ptr) {
408 LOG_ALLOW(GLOBAL, LOG_WARNING, "Malformed parameter '%s' on face '%s', skipping. \n", token, face_str);
409 continue;
410 }
411
412 *equals_ptr = '\0'; // Temporarily split the string at '=' to separate key and value
413 char* key_str = token;
414 char* value_str = equals_ptr + 1;
415
416 BC_Param *new_param;
417 ierr = PetscMalloc1(1, &new_param); CHKERRQ(ierr);
418 ierr = PetscStrallocpy(key_str, &new_param->key); CHKERRQ(ierr);
419 ierr = PetscStrallocpy(value_str, &new_param->value); CHKERRQ(ierr);
420 new_param->next = NULL;
421
422 *param_next_ptr = new_param;
423 param_next_ptr = &new_param->next;
424 LOG_ALLOW(GLOBAL, LOG_DEBUG, " - Found param: [%s] = [%s] \n", new_param->key, new_param->value);
425 }
426 }
427 fclose(file);
428 }
429
430 // =========================================================================
431 // BROADCASTING THE CONFIGURATION FROM RANK 0
432 // =========================================================================
433 // This is a critical step to ensure all processes have the same configuration.
434
435 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d broadcasting/receiving BC configuration.\n", rank);
436
437 for (int i = 0; i < 6; i++) {
438 // --- Broadcast simple enums ---
439 if (rank == 0) {
440 user->boundary_faces[i] = configs_rank0[i]; // Rank 0 populates its final struct
441 }
442 ierr = MPI_Bcast(&user->boundary_faces[i].mathematical_type, 1, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
443 ierr = MPI_Bcast(&user->boundary_faces[i].handler_type, 1, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
444
445 // --- Serialize and Broadcast the parameter linked list ---
446 PetscInt n_params = 0;
447 if (rank == 0) { // On rank 0, count the number of parameters to send
448 for (BC_Param *p = user->boundary_faces[i].params; p; p = p->next) n_params++;
449 }
450 ierr = MPI_Bcast(&n_params, 1, MPI_INT, 0, PETSC_COMM_WORLD);CHKERRQ(ierr);
451
452 if (rank != 0) { // Non-root ranks need to receive and build the list
453 FreeBC_ParamList(user->boundary_faces[i].params); // Ensure list is empty before building
454 user->boundary_faces[i].params = NULL;
455 }
456
457 BC_Param **param_next_ptr = &user->boundary_faces[i].params;
458
459 for (int j = 0; j < n_params; j++) {
460 char key_buf[256] = {0}, val_buf[256] = {0};
461 if (rank == 0) {
462 // On rank 0, navigate to the j-th param and copy its data to buffers
463 BC_Param *p = user->boundary_faces[i].params;
464 for (int k = 0; k < j; k++) p = p->next;
465 strncpy(key_buf, p->key, 255);
466 strncpy(val_buf, p->value, 255);
467 }
468
469 ierr = MPI_Bcast(key_buf, 256, MPI_CHAR, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
470 ierr = MPI_Bcast(val_buf, 256, MPI_CHAR, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
471
472 if (rank != 0) {
473 // On non-root ranks, deserialize: create a new node and append it
474 BC_Param *new_param;
475 ierr = PetscMalloc1(1, &new_param); CHKERRQ(ierr);
476 ierr = PetscStrallocpy(key_buf, &new_param->key); CHKERRQ(ierr);
477 ierr = PetscStrallocpy(val_buf, &new_param->value); CHKERRQ(ierr);
478 new_param->next = NULL;
479 *param_next_ptr = new_param;
480 param_next_ptr = &new_param->next;
481 } else {
482 // On rank 0, just advance the pointer for the next iteration
483 param_next_ptr = &((*param_next_ptr)->next);
484 }
485 }
486 user->boundary_faces[i].face_id = (BCFace)i; // Ensure face_id is set on all ranks
487 }
488
489 // --- Set legacy fields for compatibility with particle system ---
490 user->inletFaceDefined = PETSC_FALSE;
491 for (int i=0; i<6; i++) {
492 if (user->boundary_faces[i].mathematical_type == INLET) {
493 user->inletFaceDefined = PETSC_TRUE;
494 user->identifiedInletBCFace = (BCFace)i;
495 LOG_ALLOW(GLOBAL, LOG_INFO, "Inlet face for particle initialization identified as Face %d.\n", i);
496 break; // Found the first one, stop looking
497 }
498 }
499
500 if (rank == 0) {
501 // Rank 0 can now free the linked lists it created, as they have been broadcast.
502 // Or, if user->boundary_faces was used directly, this is not needed.
503 }
504
505 PetscFunctionReturn(0);
506}
507
508/**
509 * @brief Checks for a data file's existence in a parallel-safe manner.
510 *
511 * Only Rank 0 checks for the file on disk. The result (true or false) is
512 * then broadcast to all other processes in the communicator. This ensures all
513 * processes make a collective, synchronized decision.
514 *
515 * @param ti The time index of the file.
516 * @param fieldName The name of the field.
517 * @param ext The file extension.
518 * @param fileExists [out] The result, which will be identical on all ranks.
519 * @return PetscErrorCode
520 */
521static PetscErrorCode CheckDataFile(PetscInt ti, const char *fieldName, const char *ext, PetscBool *fileExists)
522{
523 PetscErrorCode ierr;
524 PetscMPIInt rank;
525 MPI_Comm comm = PETSC_COMM_WORLD;
526 PetscInt placeholder_int = 0;
527
528 PetscFunctionBeginUser;
529 ierr = MPI_Comm_rank(comm, &rank); CHKERRQ(ierr);
530
531 if (rank == 0) {
532 char filename[PETSC_MAX_PATH_LEN];
533 // Use the same standardized, rank-independent filename format
534
535 ierr = PetscSNPrintf(filename, sizeof(filename), "results/%s%05"PetscInt_FMT"_%d.%s", fieldName, ti, placeholder_int, ext);
536 ierr = PetscTestFile(filename, 'r', fileExists); CHKERRQ(ierr);
537 if (!(*fileExists)) {
538 LOG_ALLOW(GLOBAL, LOG_WARNING, "CheckDataFile (Rank 0) - Optional data file '%s' not found.\n", filename);
539 }
540 }
541
542 // Broadcast the result from Rank 0 to all other ranks.
543 // We cast the PetscBool to a PetscMPIInt for MPI_Bcast.
544 PetscMPIInt fileExists_int = (rank == 0) ? (PetscMPIInt)(*fileExists) : 0;
545 ierr = MPI_Bcast(&fileExists_int, 1, MPI_INT, 0, comm); CHKERRMPI(ierr);
546 *fileExists = (PetscBool)fileExists_int;
547
548 PetscFunctionReturn(0);
549}
550
551/**
552 * @brief Checks for and reads an optional DMSwarm field from a file.
553 *
554 * This helper function first checks if the corresponding data file exists.
555 * - If the file does not exist, it logs a warning and returns success (0),
556 * gracefully skipping the field.
557 * - If the file exists, it calls ReadSwarmField() to read the data. Any
558 * error during this read operation is considered fatal (e.g., file is
559 * corrupt, permissions issue, or size mismatch), as the presence of the
560 * file implies an intent to load it.
561 *
562 * @param[in] user Pointer to the UserCtx structure.
563 * @param[in] field_name Internal name of the DMSwarm field (e.g., "DMSwarm_CellID").
564 * @param[in] field_label A user-friendly name for logging (e.g., "Cell ID").
565 * @param[in] ti Time index for the file name.
566 * @param[in] ext File extension.
567 *
568 * @return PetscErrorCode Returns 0 on success or if the optional file was not found.
569 * Returns a non-zero error code on a fatal read error.
570 */
571static PetscErrorCode ReadOptionalSwarmField(UserCtx *user, const char *field_name, const char *field_label, PetscInt ti, const char *ext)
572{
573 PetscErrorCode ierr;
574 PetscBool fileExists;
575
576 PetscFunctionBeginUser;
577
578 /* Check if the data file for this optional field exists. */
579 ierr = CheckDataFile(ti, field_name, ext, &fileExists); CHKERRQ(ierr);
580
581 if (fileExists) {
582 /* File exists, so we MUST be able to read it. */
583 LOG_ALLOW(GLOBAL, LOG_DEBUG, "File for %s found, attempting to read...\n", field_label);
584 if(strcasecmp(field_name,"DMSwarm_CellID") == 0 || strcasecmp(field_name,"DMSwarm_pid")== 0 || strcasecmp(field_name,"DMSwarm_location_status")== 0 ) {
585 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Reading integer swarm field '%s'.\n", field_name);
586 ierr = ReadSwarmIntField(user,field_name,ti,ext);
587 }
588 else{
589 ierr = ReadSwarmField(user, field_name, ti, ext);
590 }
591
592 if (ierr) {
593 /* Any error here is fatal. A PETSC_ERR_FILE_OPEN would mean a race condition or
594 permissions issue. Other errors could be size mismatch or corruption. */
595 SETERRQ(PETSC_COMM_WORLD, ierr, "Failed to read data for %s from existing file for step %d. The file may be corrupt or have an incorrect size.", field_label, ti);
596 } else {
597 LOG_ALLOW(GLOBAL, LOG_INFO, "Successfully read %s field for step %d.\n", field_label, ti);
598 }
599 } else {
600 /* File does not exist, which is acceptable for an optional field. */
601 LOG_ALLOW(GLOBAL, LOG_WARNING, "Optional %s file for step %d not found. Skipping.\n", field_label, ti);
602 }
603
604 PetscFunctionReturn(0);
605}
606
607/************************************************************************************************
608* @file io.c
609* @brief Utility for (re-)loading PETSc Vec-based fields written by rank-0 in a previous run
610* – works in both serial and MPI executions.
611*
612* The restart files are always written as **one** sequential Vec per field
613* ( `<results>/<field_name><step>_0.<ext>` ). In parallel we therefore
614* have to:
615*
616* • let rank-0 read that sequential file into a temporary Vec,
617* • broadcast the raw array to every rank, and
618* • copy the local slice into the distributed Vec supplied by the caller.
619*
620* The function purposely avoids `VecScatterCreateToAll()` because PETSc
621* chooses an even block-cyclic layout that seldom matches an existing
622* application layout – that mismatch is the root–cause of the previous
623* “Scatter sizes of ix and iy don’t match” / “incompatible local lengths”
624* crashes. A plain `MPI_Bcast` plus a `memcpy()` into the already
625* allocated local array is bullet-proof and costs one extra buffer only
626* on the non-root ranks.
627*
628* Logging helper used below:
629* LOG_ALLOW(scope,level,fmt,…) – your existing macro
630*
631* @param[in] user – application context (unused here but part of original API)
632* @param[in] field_name – base name of the field (“position”, “ufield”, …)
633* @param[out] field_vec – **distributed** Vec we have to fill
634* @param[in] ti – time-step index encoded in the filename
635* @param[in] ext – file-name extension (“dat”, “bin”, …)
636*
637* @returns 0 on success or a PETSc error code.
638************************************************************************************************/
639PetscErrorCode ReadFieldData(UserCtx *user,
640 const char *field_name,
641 Vec field_vec,
642 PetscInt ti,
643 const char *ext)
644{
645 PetscErrorCode ierr;
646 char filename[PETSC_MAX_PATH_LEN];
647 MPI_Comm comm;
648 PetscMPIInt rank,size;
649
650 PetscFunctionBeginUser;
651
652 ierr = PetscObjectGetComm((PetscObject)field_vec,&comm);CHKERRQ(ierr);
653 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
654 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
655
656 /* ---------------------------------------------------------------------
657 * Compose <results>/<field_name><step with 5 digits>_0.<ext>
658 * (all restart files are written by rank-0 with that naming scheme).
659 * ------------------------------------------------------------------ */
660 ierr = PetscSNPrintf(filename,sizeof(filename),
661 "results/%s%05" PetscInt_FMT "_0.%s",
662 field_name,ti,ext);CHKERRQ(ierr);
663
665 "Attempting to read <%s> on rank %d/%d\n",
666 filename,(int)rank,(int)size);
667
668 /* ======================================================================
669 * 1. SERIAL JOB – just hand the Vec to VecLoad()
670 * ==================================================================== */
671 if(size==1)
672 {
673 PetscViewer viewer;
674 PetscBool found;
675 Vec temp_vec;
676 PetscInt expectedSize,loadedSize;
677
678 ierr = PetscTestFile(filename,'r',&found);CHKERRQ(ierr);
679 if(!found) SETERRQ(comm,PETSC_ERR_FILE_OPEN,
680 "Restart file not found: %s",filename);
681
682 ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,filename,FILE_MODE_READ,&viewer);CHKERRQ(ierr);
683// ---- START MODIFICATION ----
684 // DO NOT load directly into field_vec, as this can resize it, which is
685 // illegal for DMSwarm "view" vectors. Instead, load into a temporary vector.
686 ierr = VecCreate(PETSC_COMM_SELF, &temp_vec); CHKERRQ(ierr);
687 ierr = VecLoad(temp_vec,viewer);CHKERRQ(ierr);
688 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
689
690 // Sanity check: ensure the file size matches the expected vector size.
691 ierr = VecGetSize(field_vec, &expectedSize);CHKERRQ(ierr);
692 ierr = VecGetSize(temp_vec, &loadedSize);CHKERRQ(ierr);
693 if (loadedSize != expectedSize) {
694 SETERRQ(comm,PETSC_ERR_FILE_UNEXPECTED,
695 "File %s holds %d entries – expected %d for field '%s'",
696 filename, loadedSize, expectedSize, field_name);
697 }
698
699 // Now, safely copy the data from the temporary vector to the final destination.
700 ierr = VecCopy(temp_vec, field_vec);CHKERRQ(ierr);
701
702 // Clean up the temporary vector.
703 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
704
705 // ---- END MODIFICATION ----
706
707 /* create EMPTY sequential Vec – VecLoad() will size it correctly */
708 /*
709 ierr = VecCreate(PETSC_COMM_SELF,&seq_vec);CHKERRQ(ierr);
710 ierr = VecSetType(seq_vec,VECSEQ);CHKERRQ(ierr);
711
712 ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,filename,
713 FILE_MODE_READ,&viewer);CHKERRQ(ierr);
714
715 ierr = VecLoad(field_vec,viewer);CHKERRQ(ierr);
716 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
717 */
719 "Loaded <%s> (serial path)\n",filename);
720 PetscFunctionReturn(0);
721 }
722
723 /* ======================================================================
724 * 2. PARALLEL JOB
725 * ==================================================================== */
726 PetscInt globalSize;
727 ierr = VecGetSize(field_vec,&globalSize);CHKERRQ(ierr);
728
729 /* -------------------- rank-0 : read the sequential file -------------- */
730 Vec seq_vec = NULL; /* only valid on rank-0 */
731 const PetscScalar *seqArray = NULL; /* borrowed pointer on rank-0 only */
732
733 if(rank==0)
734 {
735 PetscViewer viewer;
736 PetscBool found;
737
738 ierr = PetscTestFile(filename,'r',&found);CHKERRQ(ierr);
739 if(!found) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_OPEN,
740 "Restart file not found: %s",filename);
741
742 /* create EMPTY sequential Vec – VecLoad() will size it correctly */
743 ierr = VecCreate(PETSC_COMM_SELF,&seq_vec);CHKERRQ(ierr);
744 ierr = VecSetType(seq_vec,VECSEQ);CHKERRQ(ierr);
745
746 ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,filename,
747 FILE_MODE_READ,&viewer);CHKERRQ(ierr);
748 ierr = VecLoad(seq_vec,viewer);CHKERRQ(ierr);
749 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
750
751 /* size sanity-check */
752 PetscInt loaded;
753 ierr = VecGetSize(seq_vec,&loaded);CHKERRQ(ierr);
754 if(loaded != globalSize)
755 SETERRQ(comm,PETSC_ERR_FILE_UNEXPECTED,
756 "File %s holds %d entries – expected %d",
757 filename,loaded,globalSize);
758
759 /* borrow array for later Bcast */
760 ierr = VecGetArrayRead(seq_vec,&seqArray);CHKERRQ(ierr);
761
763 "ReadFieldData - Rank 0 successfully loaded <%s>\n",filename);
764 }
765
766 /* -------------------- everybody : broadcast raw data ----------------- */
767 PetscScalar *buffer = NULL; /* receives the full field */
768 if(rank==0)
769 {
770 /* shallow-copy: const-cast is safe, we do not modify the data */
771 buffer = (PetscScalar *)seqArray;
772 }
773 else
774 { /* non-root ranks allocate a receive buffer */
775 ierr = PetscMalloc1(globalSize,&buffer);CHKERRQ(ierr);
776 }
777
778 ierr = MPI_Bcast(buffer, (int)globalSize, MPIU_SCALAR, 0, comm);CHKERRQ(ierr);
779
780 /* -------------------- copy my slice into field_vec ------------------- */
781 PetscInt rstart,rend,loc;
782 PetscScalar *locArray;
783
784 ierr = VecGetOwnershipRange(field_vec,&rstart,&rend);CHKERRQ(ierr);
785 loc = rend - rstart; /* local length */
786
787 ierr = VecGetArray(field_vec,&locArray);CHKERRQ(ierr);
788 ierr = PetscMemcpy(locArray,
789 buffer + rstart,
790 loc*sizeof(PetscScalar));CHKERRQ(ierr);
791 ierr = VecRestoreArray(field_vec,&locArray);CHKERRQ(ierr);
792
793 /* -------------------- tidy up ---------------------------------------- */
794 if(rank==0)
795 {
796 ierr = VecRestoreArrayRead(seq_vec,&seqArray);CHKERRQ(ierr);
797 ierr = VecDestroy(&seq_vec);CHKERRQ(ierr);
798 }
799 else
800 {
801 ierr = PetscFree(buffer);CHKERRQ(ierr);
802 }
803
805 "ReadFieldData - Loaded <%s> (parallel path)\n",filename);
806
807 PetscFunctionReturn(0);
808}
809
810
811/**
812 * @brief Reads simulation fields from files into their respective PETSc vectors.
813 *
814 * This function reads contravariant velocity, Cartesian velocity, pressure, and node state
815 * fields from their respective binary files. It also conditionally reads LES, RANS, and
816 * statistical fields if they are enabled.
817 *
818 * @param[in,out] user Pointer to the UserCtx structure containing simulation context.
819 * @param[in] ti Time index for constructing the file name.
820 *
821 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
822 */
823PetscErrorCode ReadSimulationFields(UserCtx *user,PetscInt ti)
824{
825 PetscErrorCode ierr;
826
827 SimCtx *simCtx = user->simCtx;
828
829 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to read simulation fields.\n");
830
831 // Read Cartesian velocity field
832 ierr = ReadFieldData(user, "ufield", user->Ucat, ti, "dat"); CHKERRQ(ierr);
833
834 // Read contravariant velocity field
835 ierr = ReadFieldData(user, "vfield", user->Ucont, ti, "dat"); CHKERRQ(ierr);
836
837 // Read pressure field
838 ierr = ReadFieldData(user, "pfield", user->P, ti, "dat"); CHKERRQ(ierr);
839
840 // Read node state field (nvert)
841 ierr = ReadFieldData(user, "nvfield", user->Nvert, ti, "dat"); CHKERRQ(ierr);
842
843 if(simCtx->np>0){
844 // Read Particle Count field
845 if(!user->ParticleCount){
846 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "ParticleCount Vec is NULL but np>0");
847 }
848 ierr = ReadFieldData(user, "ParticleCount", user->ParticleCount, ti, "dat"); CHKERRQ(ierr);
849 }
850 else{
851 LOG_ALLOW(GLOBAL, LOG_INFO, "No particles in simulation, skipping ParticleCount field read.\n");
852 }
853 // Process LES fields if enabled
854 if (simCtx->les) {
855 ierr = ReadLESFields(user,ti); CHKERRQ(ierr);
856 }
857
858 // Process RANS fields if enabled
859 if (simCtx->rans) {
860 ierr = ReadRANSFields(user,ti); CHKERRQ(ierr);
861 }
862
863 // Process statistical fields if averaging is enabled
864 if (simCtx->averaging) {
865 ierr = ReadStatisticalFields(user,ti); CHKERRQ(ierr);
866 }
867
868 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading simulation fields.\n");
869
870 return 0;
871}
872
873/**
874 * @brief Reads statistical fields for averaging purposes.
875 *
876 * This function reads data for fields such as Ucat_sum, Ucat_cross_sum, Ucat_square_sum,
877 * and P_sum, used for statistical analysis during simulation.
878 *
879 * @param[in,out] user Pointer to the UserCtx structure containing simulation context.
880 * @param[in] ti Time index for constructing the file name.
881 *
882 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
883 */
884PetscErrorCode ReadStatisticalFields(UserCtx *user,PetscInt ti)
885{
886 PetscErrorCode ierr;
887
888 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to read statistical fields.\n");
889
890 ierr = ReadFieldData(user, "su0", user->Ucat_sum, ti, "dat"); CHKERRQ(ierr);
891 ierr = ReadFieldData(user, "su1", user->Ucat_cross_sum, ti, "dat"); CHKERRQ(ierr);
892 ierr = ReadFieldData(user, "su2", user->Ucat_square_sum, ti, "dat"); CHKERRQ(ierr);
893 ierr = ReadFieldData(user, "sp", user->P_sum, ti, "dat"); CHKERRQ(ierr);
894
895 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading statistical fields.\n");
896
897 return 0;
898}
899
900/**
901 * @brief Reads LES-related fields.
902 *
903 * This function reads LES-related fields such as Cs (Smagorinsky constant)
904 * into their respective PETSc vectors.
905 *
906 * @param[in,out] user Pointer to the UserCtx structure containing simulation context.
907 * @param[in] ti Time index for constructing the file name.
908 *
909 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
910 */
911PetscErrorCode ReadLESFields(UserCtx *user,PetscInt ti)
912{
913 PetscErrorCode ierr;
914 Vec Cs;
915
916 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to read LES fields.\n");
917
918 VecDuplicate(user->P, &Cs);
919 ierr = ReadFieldData(user, "cs", Cs, ti, "dat"); CHKERRQ(ierr);
920 DMGlobalToLocalBegin(user->da, Cs, INSERT_VALUES, user->lCs);
921 DMGlobalToLocalEnd(user->da, Cs, INSERT_VALUES, user->lCs);
922 VecDestroy(&Cs);
923
924 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading LES fields.\n");
925
926 return 0;
927}
928
929/**
930 * @brief Reads RANS-related fields.
931 *
932 * This function reads RANS-related fields such as K_Omega into their respective
933 * PETSc vectors.
934 *
935 * @param[in,out] user Pointer to the UserCtx structure containing simulation context.
936 * @param[in] ti Time index for constructing the file name.
937 *
938 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
939 */
940PetscErrorCode ReadRANSFields(UserCtx *user,PetscInt ti)
941{
942 PetscErrorCode ierr;
943
944 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to read RANS fields.\n");
945
946 ierr = ReadFieldData(user, "kfield", user->K_Omega, ti, "dat"); CHKERRQ(ierr);
947 VecCopy(user->K_Omega, user->K_Omega_o);
948
949 DMGlobalToLocalBegin(user->fda2, user->K_Omega, INSERT_VALUES, user->lK_Omega);
950 DMGlobalToLocalEnd(user->fda2, user->K_Omega, INSERT_VALUES, user->lK_Omega);
951
952 DMGlobalToLocalBegin(user->fda2, user->K_Omega_o, INSERT_VALUES, user->lK_Omega_o);
953 DMGlobalToLocalEnd(user->fda2, user->K_Omega_o, INSERT_VALUES, user->lK_Omega_o);
954
955 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading RANS fields.\n");
956
957 return 0;
958}
959
960
961/**
962 * @brief Reads data from a file into a specified field of a PETSc DMSwarm.
963 *
964 * This function is the counterpart to WriteSwarmField(). It creates a global PETSc vector
965 * that references the specified DMSwarm field, uses ReadFieldData() to read the data from
966 * a file, and then destroys the global vector reference.
967 *
968 * @param[in] user Pointer to the UserCtx structure (containing `user->swarm`).
969 * @param[in] field_name Name of the DMSwarm field to read into (must be previously declared/allocated).
970 * @param[in] ti Time index used to construct the input file name.
971 * @param[in] ext File extension (e.g., "dat" or "bin").
972 *
973 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
974 *
975 * @note Compatible with PETSc 3.14.x.
976 */
977PetscErrorCode ReadSwarmField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
978{
979 PetscErrorCode ierr;
980 DM swarm;
981 Vec fieldVec;
982
983 PetscFunctionBegin;
984
985 swarm = user->swarm;
986
987 LOG_ALLOW(GLOBAL,LOG_DEBUG," ReadSwarmField Begins \n");
988
989 /* 2) Create a global vector that references the specified Swarm field. */
990 ierr = DMSwarmCreateGlobalVectorFromField(swarm, field_name, &fieldVec);CHKERRQ(ierr);
991
992 LOG_ALLOW(GLOBAL,LOG_DEBUG," Vector created from Field \n");
993
994 /* 3) Use the ReadFieldData() function to read data into fieldVec. */
995 ierr = ReadFieldData(user, field_name, fieldVec, ti, ext);CHKERRQ(ierr);
996
997 /* 4) Destroy the global vector reference. */
998 ierr = DMSwarmDestroyGlobalVectorFromField(swarm, field_name, &fieldVec);CHKERRQ(ierr);
999
1000 PetscFunctionReturn(0);
1001}
1002
1003/**
1004 * @brief Reads integer swarm data by using ReadFieldData and casting the result.
1005 *
1006 * This function is the counterpart to WriteSwarmIntField. It reads a file
1007 * containing floating-point data (that was originally integer) into a temporary
1008 * Vec and then casts it back to the integer swarm field. It works by:
1009 * 1. Creating a temporary parallel Vec.
1010 * 2. Calling the standard ReadFieldData() to populate this Vec.
1011 * 3. Accessing the local data of both the Vec and the swarm field.
1012 * 4. Populating the swarm's integer field by casting each PetscScalar back to a PetscInt.
1013 * 5. Destroying the temporary Vec.
1014 *
1015 * @param[in] user Pointer to the UserCtx structure.
1016 * @param[in] field_name Name of the integer Swarm field to be read.
1017 * @param[in] ti Time index for the input file.
1018 * @param[in] ext File extension.
1019 *
1020 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1021 */
1022PetscErrorCode ReadSwarmIntField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
1023{
1024 PetscErrorCode ierr;
1025 DM swarm = user->swarm;
1026 Vec temp_vec;
1027 PetscInt nlocal, nglobal, bs, i;
1028 const PetscScalar *scalar_array; // Read-only pointer from the temp Vec
1029 void *field_array_void;
1030
1031
1032 PetscFunctionBeginUser;
1033
1034 LOG_ALLOW(GLOBAL, LOG_DEBUG, "ReadSwarmIntField - Reading '%s' via temporary Vec.\n", field_name);
1035
1036 // Get the properties of the swarm field to determine the expected layout
1037 ierr = DMSwarmGetLocalSize(swarm, &nlocal); CHKERRQ(ierr);
1038 ierr = DMSwarmGetSize(swarm, &nglobal); CHKERRQ(ierr);
1039 // We get the block size but not the data pointer yet
1040 ierr = DMSwarmGetField(swarm, field_name, &bs, NULL, NULL); CHKERRQ(ierr);
1041 ierr = DMSwarmRestoreField(swarm, field_name, &bs, NULL, NULL); CHKERRQ(ierr);
1042
1043 // Create a temporary Vec with the CORRECT layout to receive the data
1044 ierr = VecCreate(PETSC_COMM_WORLD, &temp_vec); CHKERRQ(ierr);
1045 ierr = VecSetType(temp_vec, VECMPI); CHKERRQ(ierr);
1046 ierr = VecSetSizes(temp_vec, nlocal * bs, nglobal * bs); CHKERRQ(ierr);
1047 ierr = VecSetBlockSize(temp_vec, bs); CHKERRQ(ierr);
1048 ierr = VecSetUp(temp_vec); CHKERRQ(ierr);
1049
1050 // Call your existing reader to populate the temporary Vec
1051 ierr = ReadFieldData(user, field_name, temp_vec, ti, ext); CHKERRQ(ierr);
1052
1053 // Get local pointers
1054 ierr = VecGetArrayRead(temp_vec, &scalar_array); CHKERRQ(ierr);
1055 ierr = DMSwarmGetField(swarm, field_name, NULL, NULL, &field_array_void); CHKERRQ(ierr);
1056
1057 // Perform the cast back, using the correct loop size (nlocal * bs)
1058 if (strcmp(field_name, "DMSwarm_pid") == 0) {
1059 PetscInt64 *int64_array = (PetscInt64 *)field_array_void;
1060 for (i = 0; i < nlocal * bs; i++) {
1061 int64_array[i] = (PetscInt64)scalar_array[i];
1062 }
1063 } else {
1064 PetscInt *int_array = (PetscInt *)field_array_void;
1065 for (i = 0; i < nlocal * bs; i++) {
1066 int_array[i] = (PetscInt)scalar_array[i];
1067 }
1068 }
1069
1070 // Restore access
1071 ierr = DMSwarmRestoreField(swarm, field_name, NULL, NULL, &field_array_void); CHKERRQ(ierr);
1072 ierr = VecRestoreArrayRead(temp_vec, &scalar_array); CHKERRQ(ierr);
1073
1074 // 6. Clean up
1075 ierr = VecDestroy(&temp_vec); CHKERRQ(ierr);
1076
1077 PetscFunctionReturn(0);
1078}
1079
1080/**
1081 * @brief Reads multiple fields into a DMSwarm, gracefully handling missing optional files.
1082 *
1083 * This function reads all necessary and optional fields for a DMSwarm for a given
1084 * timestep. It assumes the swarm has already been resized to match the particle
1085 * count in the input files.
1086 *
1087 * The 'position' field is considered MANDATORY. If its file is missing or corrupt,
1088 * the function will return a fatal error.
1089 *
1090 * All other fields (velocity, CellID, weight, etc.) are OPTIONAL. If their
1091 * corresponding files are not found, a warning is logged, and the function
1092 * continues without error. If an optional file IS found but is corrupt or has a
1093 * size mismatch, it is treated as a fatal error.
1094 *
1095 * @param[in,out] user Pointer to the UserCtx structure containing the DMSwarm (user->swarm).
1096 * @param[in] ti Time index for constructing the file names.
1097 *
1098 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1099 */
1100PetscErrorCode ReadAllSwarmFields(UserCtx *user, PetscInt ti)
1101{
1102 PetscErrorCode ierr;
1103 PetscInt nGlobal;
1104
1105 PetscFunctionBeginUser;
1106 ierr = DMSwarmGetSize(user->swarm, &nGlobal); CHKERRQ(ierr);
1107 LOG_ALLOW(GLOBAL, LOG_INFO, "Reading DMSwarm fields for timestep %d (swarm size is %d).\n", ti, nGlobal);
1108
1109 if (nGlobal == 0) {
1110 LOG_ALLOW(GLOBAL, LOG_INFO, "Swarm is empty for timestep %d. Nothing to read.\n", ti);
1111 PetscFunctionReturn(0);
1112 }
1113
1114 /* 1) Read positions (REQUIRED) */
1115 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Reading mandatory position field...\n");
1116 ierr = ReadSwarmField(user, "position", ti, "dat");
1117 if (ierr) {
1118 SETERRQ(PETSC_COMM_WORLD, ierr, "Failed to read MANDATORY 'position' field for step %d. Cannot continue.", ti);
1119 }
1120 LOG_ALLOW(GLOBAL, LOG_INFO, "Successfully read mandatory position field for step %d.\n", ti);
1121
1122 /* 2) Read all OPTIONAL fields using the helper function. */
1123 /* The helper will print a warning and continue if a file is not found. */
1124 ierr = ReadOptionalSwarmField(user, "velocity", "Velocity", ti, "dat"); CHKERRQ(ierr);
1125 ierr = ReadOptionalSwarmField(user, "DMSwarm_pid", "Particle ID", ti, "dat"); CHKERRQ(ierr);
1126 ierr = ReadOptionalSwarmField(user, "DMSwarm_CellID", "Cell ID", ti, "dat"); CHKERRQ(ierr);
1127 ierr = ReadOptionalSwarmField(user, "weight", "Particle Weight", ti, "dat"); CHKERRQ(ierr);
1128 ierr = ReadOptionalSwarmField(user, "Psi", "Scalar Psi", ti, "dat"); CHKERRQ(ierr);
1129 ierr = ReadOptionalSwarmField(user, "DMSwarm_location_status", "Migration Status", ti, "dat"); CHKERRQ(ierr);
1130
1131 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading DMSwarm fields for timestep %d.\n", ti);
1132 PetscFunctionReturn(0);
1133}
1134
1135 /**
1136 * @brief Writes data from a specific PETSc vector to a single, sequential file.
1137 *
1138 * This function is now parallel-safe.
1139 * - In PARALLEL: All processes send their local data to Rank 0. Rank 0 assembles
1140 * the data into a temporary sequential vector and writes it to a single file.
1141 * - In SERIAL: It performs a direct, simple write.
1142 *
1143 * This ensures the output file is always in a simple, portable format.
1144 *
1145 * @param[in] user Pointer to the UserCtx structure.
1146 * @param[in] field_name Name of the field (e.g., "position").
1147 * @param[in] field_vec The parallel PETSc vector containing the data to write.
1148 * @param[in] ti Time index for constructing the file name.
1149 * @param[in] ext File extension (e.g., "dat").
1150 *
1151 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1152 */
1153/**
1154 * @brief Dump a **distributed** PETSc Vec to the *single* sequential file
1155 * format used by our restart / post-processing tools.
1156 *
1157 * The companion of ReadFieldData(): it always produces **one** file
1158 * (e.g. `results/ufield00006_0.dat`) regardless of how many MPI ranks
1159 * are running.
1160 *
1161 * **Behaviour**
1162 *
1163 * | # MPI ranks | Strategy |
1164 * |-------------|---------------------------------------------------------------------------|
1165 * | 1 | Direct `VecView()` into the file. |
1166 * | > 1 | `VecScatterCreateToZero()` gathers the distributed Vec onto **rank 0**. |
1167 * | | Rank 0 writes the sequential Vec; all other ranks allocate no storage. |
1168 *
1169 * The routine never alters or destroys the parallel Vec passed in; the
1170 * gather buffer is created and freed internally.
1171 *
1172 * @param[in] user Simulation context (used only for logging).
1173 * @param[in] field_name Logical field name (forms part of the filename).
1174 * @param[in] field_vec Distributed PETSc Vec to write.
1175 * @param[in] ti Timestep index used in the filename.
1176 * @param[in] ext File extension (`"dat"` in our workflow).
1177 *
1178 * @return `0` on success or a PETSc error code.
1179 */
1180PetscErrorCode WriteFieldData(UserCtx *user,
1181 const char *field_name,
1182 Vec field_vec,
1183 PetscInt ti,
1184 const char *ext)
1185{
1186 PetscErrorCode ierr;
1187 MPI_Comm comm;
1188 PetscMPIInt rank, size;
1189
1190 const PetscInt placeholder_int = 0; /* keep legacy name */
1191 char filename[PETSC_MAX_PATH_LEN];
1192
1193 PetscFunctionBeginUser;
1194
1195 /* ------------------------------------------------------------ */
1196 /* Basic communicator information */
1197 /* ------------------------------------------------------------ */
1198 ierr = PetscObjectGetComm((PetscObject)field_vec,&comm);CHKERRQ(ierr);
1199 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1200 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1201
1202 ierr = PetscSNPrintf(filename,sizeof(filename),
1203 "results/%s%05" PetscInt_FMT "_%d.%s",
1204 field_name,ti,placeholder_int,ext);CHKERRQ(ierr);
1205
1207 "WriteFieldData - Preparing to write <%s> on rank %d/%d\n",
1208 filename,rank,size);
1209
1210 /* ------------------------------------------------------------ */
1211 /* 1. Serial path */
1212 /* ------------------------------------------------------------ */
1213 if (size == 1) {
1214 PetscViewer viewer;
1215
1216 ierr = PetscViewerBinaryOpen(comm,filename,
1217 FILE_MODE_WRITE,&viewer);CHKERRQ(ierr);
1218 ierr = VecView(field_vec,viewer);CHKERRQ(ierr);
1219 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
1220
1222 "WriteFieldData - Wrote <%s> (serial path)\n",filename);
1223 PetscFunctionReturn(0);
1224 }
1225
1226 /* ------------------------------------------------------------ */
1227 /* 2. Parallel path */
1228 /* ------------------------------------------------------------ */
1229 VecScatter scatter;
1230 Vec seq_vec=NULL; /* created by PETSc, lives only on rank 0 */
1231
1232 /* 2.1 Create gather context and buffer */
1233 ierr = VecScatterCreateToZero(field_vec,&scatter,&seq_vec);CHKERRQ(ierr);
1234
1235 /* 2.2 Gather distributed → sequential (on rank 0) */
1236 ierr = VecScatterBegin(scatter,field_vec,seq_vec,
1237 INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1238 ierr = VecScatterEnd (scatter,field_vec,seq_vec,
1239 INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1240
1241 /* 2.3 Rank 0 writes the file */
1242 if (rank == 0) {
1243 PetscViewer viewer;
1244
1245 /* (optional) value diagnostics */
1246 PetscReal vmin,vmax;
1247 ierr = VecMin(seq_vec,NULL,&vmin);CHKERRQ(ierr);
1248 ierr = VecMax(seq_vec,NULL,&vmax);CHKERRQ(ierr);
1250 "WriteFieldData - <%s> range = [%.4e … %.4e]\n",
1251 field_name,(double)vmin,(double)vmax);
1252
1253 ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,filename,
1254 FILE_MODE_WRITE,&viewer);CHKERRQ(ierr);
1255 ierr = VecView(seq_vec,viewer);CHKERRQ(ierr);
1256 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
1257
1259 "WriteFieldData - Wrote <%s> (parallel path)\n",filename);
1260 }
1261
1262 /* 2.4 Cleanup */
1263 ierr = VecScatterDestroy(&scatter);CHKERRQ(ierr);
1264 ierr = VecDestroy(&seq_vec);CHKERRQ(ierr);
1265
1266 PetscFunctionReturn(0);
1267}
1268
1269/**
1270 * @brief Writes simulation fields to files.
1271 *
1272 * This function writes contravariant velocity, Cartesian velocity, pressure, and node state
1273 * fields to their respective binary files. It also conditionally writes LES, RANS, and
1274 * statistical fields if they are enabled.
1275 *
1276 * @param[in] user Pointer to the UserCtx structure containing simulation context.
1277 *
1278 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1279 */
1280PetscErrorCode WriteSimulationFields(UserCtx *user)
1281{
1282 PetscErrorCode ierr;
1283
1284 SimCtx *simCtx = user->simCtx;
1285
1286 LOG_ALLOW(GLOBAL, LOG_INFO, "WriteSimulationFields - Starting to write simulation fields.\n");
1287
1288 // Write contravariant velocity field
1289 ierr = WriteFieldData(user, "vfield", user->Ucont, simCtx->step, "dat"); CHKERRQ(ierr);
1290
1291 // Write Cartesian velocity field
1292 ierr = WriteFieldData(user, "ufield", user->Ucat, simCtx->step, "dat"); CHKERRQ(ierr);
1293
1294 // Write pressure field
1295 ierr = WriteFieldData(user, "pfield", user->P, simCtx->step, "dat"); CHKERRQ(ierr);
1296
1297 // Write node state field (nvert)
1298 ierr = WriteFieldData(user, "nvfield", user->Nvert, simCtx->step, "dat"); CHKERRQ(ierr);
1299
1300 // Write ParticleCountPerCell if enabled.
1301 if(simCtx->np>0){
1302 ierr = WriteFieldData(user, "ParticleCount",user->ParticleCount,simCtx->step,"dat"); CHKERRQ(ierr);
1303 }
1304
1305 // Write LES fields if enabled
1306 if (simCtx->les) {
1307 ierr = WriteLESFields(user); CHKERRQ(ierr);
1308 }
1309
1310 // Write RANS fields if enabled
1311 if (simCtx->rans) {
1312 ierr = WriteRANSFields(user); CHKERRQ(ierr);
1313 }
1314
1315 // Write statistical fields if averaging is enabled
1316 if (simCtx->averaging) {
1317 ierr = WriteStatisticalFields(user); CHKERRQ(ierr);
1318 }
1319
1320 LOG_ALLOW(GLOBAL, LOG_INFO, "WriteSimulationFields - Finished writing simulation fields.\n");
1321
1322 return 0;
1323}
1324
1325/**
1326 * @brief Writes statistical fields for averaging purposes.
1327 *
1328 * This function writes data for fields such as Ucat_sum, Ucat_cross_sum, Ucat_square_sum,
1329 * and P_sum to their respective binary files.
1330 *
1331 * @param[in] user Pointer to the UserCtx structure containing simulation context.
1332 *
1333 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1334 */
1335PetscErrorCode WriteStatisticalFields(UserCtx *user)
1336{
1337 PetscErrorCode ierr;
1338
1339 SimCtx *simCtx = user->simCtx;
1340
1341 LOG_ALLOW(GLOBAL, LOG_INFO, "WriteStatisticalFields - Starting to write statistical fields.\n");
1342
1343 ierr = WriteFieldData(user, "su0", user->Ucat_sum, simCtx->step, "dat"); CHKERRQ(ierr);
1344 ierr = WriteFieldData(user, "su1", user->Ucat_cross_sum, simCtx->step, "dat"); CHKERRQ(ierr);
1345 ierr = WriteFieldData(user, "su2", user->Ucat_square_sum, simCtx->step, "dat"); CHKERRQ(ierr);
1346 ierr = WriteFieldData(user, "sp", user->P_sum, simCtx->step, "dat"); CHKERRQ(ierr);
1347
1348 LOG_ALLOW(GLOBAL, LOG_INFO, "WriteStatisticalFields - Finished writing statistical fields.\n");
1349
1350 return 0;
1351}
1352
1353/**
1354 * @brief Writes LES-related fields.
1355 *
1356 * This function writes LES-related fields such as Cs (Smagorinsky constant)
1357 * to their respective binary files.
1358 *
1359 * @param[in] user Pointer to the UserCtx structure containing simulation context.
1360 *
1361 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1362 */
1363PetscErrorCode WriteLESFields(UserCtx *user)
1364{
1365 PetscErrorCode ierr;
1366 Vec Cs;
1367
1368 SimCtx *simCtx = user->simCtx;
1369
1370 LOG_ALLOW(GLOBAL, LOG_INFO, "WriteLESFields - Starting to write LES fields.\n");
1371
1372 VecDuplicate(user->P, &Cs);
1373 DMLocalToGlobalBegin(user->da, user->lCs, INSERT_VALUES, Cs);
1374 DMLocalToGlobalEnd(user->da, user->lCs, INSERT_VALUES, Cs);
1375 ierr = WriteFieldData(user, "cs", Cs, simCtx->step, "dat"); CHKERRQ(ierr);
1376 VecDestroy(&Cs);
1377
1378 LOG_ALLOW(GLOBAL, LOG_INFO, "WriteLESFields - Finished writing LES fields.\n");
1379
1380 return 0;
1381}
1382
1383/**
1384 * @brief Writes RANS-related fields.
1385 *
1386 * This function writes RANS-related fields such as K_Omega to their respective
1387 * binary files.
1388 *
1389 * @param[in] user Pointer to the UserCtx structure containing simulation context.
1390 *
1391 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1392 */
1393PetscErrorCode WriteRANSFields(UserCtx *user)
1394{
1395 PetscErrorCode ierr;
1396
1397 SimCtx *simCtx = user->simCtx;
1398
1399 LOG_ALLOW(GLOBAL, LOG_INFO, "WriteRANSFields - Starting to write RANS fields.\n");
1400
1401 ierr = WriteFieldData(user, "kfield", user->K_Omega, simCtx->step, "dat"); CHKERRQ(ierr);
1402
1403 LOG_ALLOW(GLOBAL, LOG_INFO, "WriteRANSFields - Finished writing RANS fields.\n");
1404
1405 return 0;
1406}
1407
1408/**
1409 * @brief Writes data from a specific field in a PETSc Swarm to a file.
1410 *
1411 * This function retrieves the Swarm from the UserCtx (i.e., `user->swarm`) and
1412 * creates a global PETSc vector from the specified Swarm field. It then calls
1413 * the existing WriteFieldData() function to handle the actual I/O operation.
1414 * After writing the data, the function destroys the temporary global vector
1415 * to avoid memory leaks.
1416 *
1417 * @param[in] user Pointer to the UserCtx structure containing simulation context
1418 * and the PetscSwarm (as `user->swarm`).
1419 * @param[in] field_name Name of the Swarm field to be written (e.g., "my_field").
1420 * @param[in] ti Time index used to construct the output file name.
1421 * @param[in] ext File extension (e.g., "dat", "bin").
1422 *
1423 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1424 *
1425 * @note Compatible with PETSc 3.14.4 and newer.
1426 */
1427PetscErrorCode WriteSwarmField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
1428{
1429 PetscErrorCode ierr;
1430 Vec fieldVec;
1431 DM swarm;
1432
1433 PetscFunctionBeginUser; /* PETSc macro indicating start of function */
1434
1435 /*
1436 * 1) Retrieve the PetscSwarm from the user context.
1437 * Ensure user->swarm is initialized and not NULL.
1438 */
1439 swarm = user->swarm;
1440
1441 /*
1442 * 2) Create a global vector from the specified swarm field.
1443 * This function is available in PETSc 3.14.4.
1444 * It provides a read/write "view" of the swarm field as a global Vec.
1445 */
1447 "WriteSwarmField - Attempting to create global vector from field: %s\n",
1448 field_name);
1449 ierr = DMSwarmCreateGlobalVectorFromField(swarm, field_name, &fieldVec);CHKERRQ(ierr);
1450
1451 /*
1452 * 3) Use your existing WriteFieldData() to write the global vector to a file.
1453 * The field name, time index, and extension are passed along for naming.
1454 */
1456 "WriteSwarmField - Calling WriteFieldData for field: %s\n",
1457 field_name);
1458 ierr = WriteFieldData(user, field_name, fieldVec, ti, ext);CHKERRQ(ierr);
1459
1460 /*
1461 * 4) Destroy the global vector once the data is successfully written.
1462 * This step is crucial for avoiding memory leaks.
1463 * DMSwarmDestroyGlobalVectorFromField() is also available in PETSc 3.14.4.
1464 */
1466 "WriteSwarmField - Destroying the global vector for field: %s\n",
1467 field_name);
1468 ierr = DMSwarmDestroyGlobalVectorFromField(swarm, field_name, &fieldVec);CHKERRQ(ierr);
1469
1470 /* Log and return success. */
1472 "WriteSwarmField - Successfully wrote swarm data for field: %s\n",
1473 field_name);
1474
1475 PetscFunctionReturn(0); /* PETSc macro indicating end of function */
1476}
1477
1478/**
1479 * @brief Writes integer swarm data by casting it to a temporary Vec and using WriteFieldData.
1480 *
1481 * This function provides a bridge to write integer-based swarm fields (like DMSwarm_CellID)
1482 * using the existing Vec-based I/O routine (WriteFieldData). It works by:
1483 * 1. Creating a temporary parallel Vec with the same layout as other swarm fields.
1484 * 2. Accessing the local integer data from the swarm field.
1485 * 3. Populating the temporary Vec by casting each integer to a PetscScalar.
1486 * 4. Calling the standard WriteFieldData() function with the temporary Vec.
1487 * 5. Destroying the temporary Vec.
1488 *
1489 * @param[in] user Pointer to the UserCtx structure.
1490 * @param[in] field_name Name of the integer Swarm field to be written.
1491 * @param[in] ti Time index for the output file.
1492 * @param[in] ext File extension.
1493 *
1494 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1495 */
1496PetscErrorCode WriteSwarmIntField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
1497{
1498 PetscErrorCode ierr;
1499 DM swarm = user->swarm;
1500 Vec temp_vec; // Temporary Vec to hold casted data
1501 PetscInt nlocal, nglobal,bs,i;
1502 void *field_array_void;
1503 PetscScalar *scalar_array; // Pointer to the temporary Vec's scalar data
1504
1505 PetscFunctionBeginUser;
1506
1507 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Casting '%s' to Vec for writing.\n", field_name);
1508
1509 // Get the swarm field properties
1510 ierr = DMSwarmGetLocalSize(swarm, &nlocal); CHKERRQ(ierr);
1511 ierr = DMSwarmGetSize(swarm, &nglobal); CHKERRQ(ierr);
1512 ierr = DMSwarmGetField(swarm, field_name, &bs, NULL, &field_array_void); CHKERRQ(ierr);
1513
1514 // Create Temporary parallel Vec wit the CORRECT layout
1515 ierr = VecCreate(PETSC_COMM_WORLD, &temp_vec); CHKERRQ(ierr);
1516 ierr = VecSetType(temp_vec, VECMPI); CHKERRQ(ierr);
1517 ierr = VecSetSizes(temp_vec, nlocal*bs, nglobal*bs); CHKERRQ(ierr);
1518 ierr = VecSetUp(temp_vec); CHKERRQ(ierr);
1519
1520 // Defining Vector field to mandatory field 'position'
1521 DMSwarmVectorDefineField(swarm,"position");
1522
1523 ierr = VecGetArray(temp_vec, &scalar_array); CHKERRQ(ierr);
1524
1525 if(strcasecmp(field_name,"DMSwarm_pid") == 0){
1526 PetscInt64 *int64_array = (PetscInt64 *)field_array_void;
1527 // Perform the cast from PetscInt64 to PetscScalar
1528 for (i = 0; i < nlocal*bs; i++) {
1529 scalar_array[i] = (PetscScalar)int64_array[i];
1530 }
1531 }else{
1532 PetscInt *int_array = (PetscInt *)field_array_void;
1533 //Perform the cast from PetscInt to PetscScalar
1534 for (i = 0; i < nlocal*bs; i++) {
1535 scalar_array[i] = (PetscScalar)int_array[i];
1536 }
1537 }
1538
1539 // Restore access to both arrays
1540 ierr = VecRestoreArray(temp_vec, &scalar_array); CHKERRQ(ierr);
1541 ierr = DMSwarmRestoreField(swarm, field_name, &bs, NULL, &field_array_void); CHKERRQ(ierr);
1542
1543 // Call your existing writer with the temporary, populated Vec
1544 ierr = WriteFieldData(user, field_name, temp_vec, ti, ext); CHKERRQ(ierr);
1545
1546 // Clean up
1547 ierr = VecDestroy(&temp_vec); CHKERRQ(ierr);
1548
1549 PetscFunctionReturn(0);
1550}
1551
1552/**
1553 * @brief Writes a predefined set of PETSc Swarm fields to files.
1554 *
1555 * This function iterates through a hardcoded list of common swarm fields
1556 * (position, velocity, etc.) and calls the WriteSwarmField() helper function
1557 * for each one. This provides a straightforward way to output essential particle
1558 * data at a given simulation step.
1559 *
1560 * This function will only execute if particles are enabled in the simulation
1561 * (i.e., `user->simCtx->np > 0` and `user->swarm` is not NULL).
1562 *
1563 * @param[in] user Pointer to the UserCtx structure containing the simulation context
1564 * and the PetscSwarm.
1565 *
1566 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1567 */
1568PetscErrorCode WriteAllSwarmFields(UserCtx *user)
1569{
1570 PetscErrorCode ierr;
1571 SimCtx *simCtx = user->simCtx;
1572
1573 PetscFunctionBeginUser;
1574
1575 // If no swarm is configured or there are no particles, do nothing and return.
1576 if (!user->swarm || simCtx->np <= 0) {
1577 PetscFunctionReturn(0);
1578 }
1579
1580 LOG_ALLOW(GLOBAL, LOG_INFO, "WriteAllSwarmFields - Starting to write swarm fields.\n");
1581
1582 // Write particle position field
1583 ierr = WriteSwarmField(user, "position", simCtx->step, "dat"); CHKERRQ(ierr);
1584
1585 // Write particle velocity field
1586 ierr = WriteSwarmField(user, "velocity", simCtx->step, "dat"); CHKERRQ(ierr);
1587
1588 // Write particle weight field
1589 ierr = WriteSwarmField(user, "weight", simCtx->step, "dat"); CHKERRQ(ierr);
1590
1591 // Write custom particle field "Psi"
1592 ierr = WriteSwarmField(user, "Psi", simCtx->step, "dat"); CHKERRQ(ierr);
1593
1594 // Integer fields require special handling
1595
1596 // Write the background mesh cell ID for each particle
1597 ierr = WriteSwarmIntField(user, "DMSwarm_CellID", simCtx->step, "dat"); CHKERRQ(ierr);
1598
1599 // Write the particle location status (e.g., inside or outside the domain)
1600 ierr = WriteSwarmIntField(user, "DMSwarm_location_status", simCtx->step, "dat"); CHKERRQ(ierr);
1601
1602 // Write the unique particle ID
1603 ierr = WriteSwarmIntField(user, "DMSwarm_pid", simCtx->step, "dat"); CHKERRQ(ierr);
1604
1605 LOG_ALLOW(GLOBAL, LOG_INFO, "WriteAllSwarmFields - Finished writing swarm fields.\n");
1606
1607 PetscFunctionReturn(0);
1608}
1609
1610/**
1611 * @brief Gather a (possibly distributed) PETSc Vec onto rank 0 as a contiguous C array.
1612 * If the Vec has a DMDA attached, the gather is performed in DMDA "natural" ordering.
1613 *
1614 * @param[in] inVec The PETSc Vec (may be distributed).
1615 * @param[out] N Global length of the Vec (includes dof).
1616 * @param[out] arrayOut On rank 0, newly allocated buffer with the gathered values (PetscScalar-sized).
1617 * On other ranks, set to NULL.
1618 */
1619PetscErrorCode VecToArrayOnRank0(Vec inVec, PetscInt *N, double **arrayOut)
1620{
1621 PetscErrorCode ierr;
1622 MPI_Comm comm;
1623 PetscMPIInt rank;
1624 PetscInt globalSize;
1625 DM dm = NULL;
1626 const char *dmtype = NULL;
1627
1628 /* For DMDA path */
1629 Vec nat = NULL, seqNat = NULL;
1630 VecScatter scatNat = NULL;
1631 const PetscScalar *nar = NULL;
1632 PetscScalar *buf = NULL;
1633
1634 /* For generic (no DM) path */
1635 Vec seq = NULL;
1636 VecScatter scat = NULL;
1637 const PetscScalar *sar = NULL;
1638
1639 PetscFunctionBeginUser;
1640
1641 ierr = PetscObjectGetComm((PetscObject)inVec, &comm); CHKERRQ(ierr);
1642 ierr = MPI_Comm_rank(comm, &rank); CHKERRQ(ierr);
1643 ierr = VecGetSize(inVec, &globalSize); CHKERRQ(ierr);
1644 *N = globalSize;
1645 *arrayOut = NULL;
1646
1647 ierr = VecGetDM(inVec, &dm); CHKERRQ(ierr);
1648 if (dm) { ierr = DMGetType(dm, &dmtype); CHKERRQ(ierr); }
1649
1650 if (dmtype && !strcmp(dmtype, DMDA)) {
1651 /* --- DMDA path: go to NATURAL ordering, then gather to rank 0 --- */
1652 ierr = DMDACreateNaturalVector(dm, &nat); CHKERRQ(ierr);
1653 ierr = DMDAGlobalToNaturalBegin(dm, inVec, INSERT_VALUES, nat); CHKERRQ(ierr);
1654 ierr = DMDAGlobalToNaturalEnd (dm, inVec, INSERT_VALUES, nat); CHKERRQ(ierr);
1655
1656 ierr = VecScatterCreateToZero(nat, &scatNat, &seqNat); CHKERRQ(ierr);
1657 ierr = VecScatterBegin(scatNat, nat, seqNat, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
1658 ierr = VecScatterEnd (scatNat, nat, seqNat, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
1659
1660 if (rank == 0) {
1661 PetscInt nseq;
1662 ierr = VecGetLocalSize(seqNat, &nseq); CHKERRQ(ierr);
1663 ierr = VecGetArrayRead(seqNat, &nar); CHKERRQ(ierr);
1664
1665 ierr = PetscMalloc1(nseq, &buf); CHKERRQ(ierr);
1666 ierr = PetscMemcpy(buf, nar, (size_t)nseq * sizeof(PetscScalar)); CHKERRQ(ierr);
1667
1668 ierr = VecRestoreArrayRead(seqNat, &nar); CHKERRQ(ierr);
1669 *arrayOut = (double*)buf; /* hand back as double* for drop-in compatibility */
1670 }
1671
1672 ierr = VecScatterDestroy(&scatNat); CHKERRQ(ierr);
1673 ierr = VecDestroy(&seqNat); CHKERRQ(ierr);
1674 ierr = VecDestroy(&nat); CHKERRQ(ierr);
1675 } else {
1676 /* --- No DM attached: plain gather in Vec’s global (parallel) layout order --- */
1677 ierr = VecScatterCreateToZero(inVec, &scat, &seq); CHKERRQ(ierr);
1678 ierr = VecScatterBegin(scat, inVec, seq, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
1679 ierr = VecScatterEnd (scat, inVec, seq, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
1680
1681 if (rank == 0) {
1682 PetscInt nseq;
1683 ierr = VecGetLocalSize(seq, &nseq); CHKERRQ(ierr);
1684 ierr = VecGetArrayRead(seq, &sar); CHKERRQ(ierr);
1685
1686 ierr = PetscMalloc1(nseq, &buf); CHKERRQ(ierr);
1687 ierr = PetscMemcpy(buf, sar, (size_t)nseq * sizeof(PetscScalar)); CHKERRQ(ierr);
1688
1689 ierr = VecRestoreArrayRead(seq, &sar); CHKERRQ(ierr);
1690 *arrayOut = (double*)buf;
1691 }
1692
1693 ierr = VecScatterDestroy(&scat); CHKERRQ(ierr);
1694 ierr = VecDestroy(&seq); CHKERRQ(ierr);
1695 }
1696
1697 PetscFunctionReturn(0);
1698}
1699
1700/**
1701 * @brief Gathers any DMSwarm field from all ranks to a single, contiguous array on rank 0.
1702 *
1703 * This is a generic, type-aware version of SwarmFieldToArrayOnRank0.
1704 * It is a COLLECTIVE operation.
1705 *
1706 * @param[in] swarm The DMSwarm to gather from.
1707 * @param[in] field_name The name of the field to gather.
1708 * @param[out] n_total_particles (Output on rank 0) Total number of particles in the global swarm.
1709 * @param[out] n_components (Output on rank 0) Number of components for the field.
1710 * @param[out] gathered_array (Output on rank 0) A newly allocated array containing the full, gathered data.
1711 * The caller is responsible for freeing this memory and for casting it to the correct type.
1712 * @return PetscErrorCode
1713 */
1714PetscErrorCode SwarmFieldToArrayOnRank0(DM swarm, const char *field_name, PetscInt *n_total_particles, PetscInt *n_components, void **gathered_array)
1715{
1716 PetscErrorCode ierr;
1717 PetscMPIInt rank, size;
1718 PetscInt nlocal, nglobal, bs;
1719 void *local_array_void;
1720 size_t element_size = 0;
1721 MPI_Datatype mpi_type = MPI_BYTE; // We'll send raw bytes
1722
1723 PetscFunctionBeginUser;
1724
1725 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
1726 ierr = MPI_Comm_size(PETSC_COMM_WORLD, &size); CHKERRQ(ierr);
1727
1728 // All ranks get swarm properties to determine send/receive counts
1729 ierr = DMSwarmGetLocalSize(swarm, &nlocal); CHKERRQ(ierr);
1730 ierr = DMSwarmGetSize(swarm, &nglobal); CHKERRQ(ierr);
1731 ierr = DMSwarmGetField(swarm, field_name, &bs, NULL, &local_array_void); CHKERRQ(ierr);
1732
1733 // Determine the size of one element of the field's data type
1734 if (strcasecmp(field_name, "DMSwarm_pid") == 0) {
1735 element_size = sizeof(PetscInt64);
1736 } else if (strcasecmp(field_name, "DMSwarm_CellID") == 0 || strcasecmp(field_name, "DMSwarm_location_status") == 0) {
1737 element_size = sizeof(PetscInt);
1738 } else {
1739 element_size = sizeof(PetscScalar);
1740 }
1741
1742 if (rank == 0) {
1743 *n_total_particles = nglobal;
1744 *n_components = bs;
1745 *gathered_array = NULL; // Initialize output
1746 }
1747
1748 if (size == 1) { // Serial case is a simple copy
1749 if (rank == 0) {
1750 ierr = PetscMalloc(nglobal * bs * element_size, gathered_array); CHKERRQ(ierr);
1751 ierr = PetscMemcpy(*gathered_array, local_array_void, nglobal * bs * element_size); CHKERRQ(ierr);
1752 }
1753 } else { // Parallel case: use MPI_Gatherv
1754 PetscInt *recvcounts = NULL, *displs = NULL;
1755 if (rank == 0) {
1756 ierr = PetscMalloc1(size, &recvcounts); CHKERRQ(ierr);
1757 ierr = PetscMalloc1(size, &displs); CHKERRQ(ierr);
1758 }
1759 PetscInt sendcount = nlocal * bs;
1760
1761 // Gather the number of elements (not bytes) from each rank
1762 ierr = MPI_Gather(&sendcount, 1, MPIU_INT, recvcounts, 1, MPIU_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
1763
1764 if (rank == 0) {
1765 displs[0] = 0;
1766 // Convert counts and calculate displacements in terms of BYTES
1767 for (PetscMPIInt i = 0; i < size; i++) recvcounts[i] *= element_size;
1768 for (PetscMPIInt i = 1; i < size; i++) displs[i] = displs[i-1] + recvcounts[i-1];
1769
1770 ierr = PetscMalloc(nglobal * bs * element_size, gathered_array); CHKERRQ(ierr);
1771 }
1772
1773 // Use Gatherv with MPI_BYTE to handle any data type generically
1774 ierr = MPI_Gatherv(local_array_void, nlocal * bs * element_size, MPI_BYTE,
1775 *gathered_array, recvcounts, displs, MPI_BYTE,
1776 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
1777
1778 if (rank == 0) {
1779 ierr = PetscFree(recvcounts); CHKERRQ(ierr);
1780 ierr = PetscFree(displs); CHKERRQ(ierr);
1781 }
1782 }
1783
1784 ierr = DMSwarmRestoreField(swarm, field_name, &bs, NULL, &local_array_void); CHKERRQ(ierr);
1785
1786 PetscFunctionReturn(0);
1787}
1788
1789/**
1790 * @brief Displays a structured banner summarizing the simulation configuration.
1791 *
1792 * This function prints key simulation parameters to standard output.
1793 * It is intended to be called ONLY by MPI rank 0.
1794 * It retrieves global domain bounds from `user->global_domain_bbox` and boundary
1795 * conditions for all faces from `user->face_bc_types`.
1796 *
1797 * @param[in] user Pointer to `UserCtx` structure.
1798 * @param[in] StartTime Initial simulation time.
1799 * @param[in] StartStep Starting timestep index.
1800 * @param[in] StepsToRun Total number of timesteps to run.
1801 * @param[in] num_mpi_procs Total number of MPI processes.
1802 * @param[in] total_num_particles Total number of particles.
1803 * @param[in] bboxlist (If rank 0 needed to compute global_domain_bbox here, otherwise NULL)
1804 *
1805 * @return PetscErrorCode Returns `0` on success.
1806 */
1807PetscErrorCode DisplayBanner(SimCtx *simCtx) // bboxlist is only valid on rank 0
1808{
1809 PetscErrorCode ierr;
1810 PetscMPIInt rank;
1811 Cmpnts global_min_coords, global_max_coords;
1812 PetscReal StartTime;
1813 PetscInt StartStep,StepsToRun,total_num_particles;
1814 PetscMPIInt num_mpi_procs;
1815
1816 // SimCtx *simCtx = user->simCtx;
1817 UserCtx *user = simCtx->usermg.mgctx[simCtx->usermg.mglevels - 1].user;
1818 num_mpi_procs = simCtx->size;
1819 StartTime = simCtx->StartTime;
1820 StartStep = simCtx->StartStep;
1821 StepsToRun = simCtx->StepsToRun;
1822 total_num_particles = simCtx->np;
1823 BoundingBox *bboxlist_on_rank0 = simCtx->bboxlist;
1824
1825
1826 PetscFunctionBeginUser;
1827
1828 if (!user) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "DisplayBanner - UserCtx pointer is NULL.");
1829 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
1830
1831 if (rank == 0) {
1832 // If global_domain_bbox is not pre-populated in UserCtx, compute it here from bboxlist_on_rank0
1833 // This assumes bboxlist_on_rank0 is valid and contains all local bounding boxes on rank 0.
1834 if (bboxlist_on_rank0 && num_mpi_procs > 0) {
1835 global_min_coords = bboxlist_on_rank0[0].min_coords;
1836 global_max_coords = bboxlist_on_rank0[0].max_coords;
1837 for (PetscMPIInt p = 1; p < num_mpi_procs; ++p) {
1838 global_min_coords.x = PetscMin(global_min_coords.x, bboxlist_on_rank0[p].min_coords.x);
1839 global_min_coords.y = PetscMin(global_min_coords.y, bboxlist_on_rank0[p].min_coords.y);
1840 global_min_coords.z = PetscMin(global_min_coords.z, bboxlist_on_rank0[p].min_coords.z);
1841 global_max_coords.x = PetscMax(global_max_coords.x, bboxlist_on_rank0[p].max_coords.x);
1842 global_max_coords.y = PetscMax(global_max_coords.y, bboxlist_on_rank0[p].max_coords.y);
1843 global_max_coords.z = PetscMax(global_max_coords.z, bboxlist_on_rank0[p].max_coords.z);
1844 }
1845 // Optionally store this in user->global_domain_bbox if it's useful elsewhere
1846 // user->global_domain_bbox.min_coords = global_min_coords;
1847 // user->global_domain_bbox.max_coords = global_max_coords;
1848 } else {
1849 // Fallback or warning if bboxlist is not available for global calculation
1850 LOG_ALLOW(PETSC_COMM_SELF, LOG_WARNING, "DisplayBanner (Rank 0) - bboxlist not provided or num_mpi_procs <=0; using user->bbox for domain bounds.\n");
1851 // global_min_coords = user->bbox.min_coords; // Use local bbox of rank 0 as fallback
1852 // global_max_coords = user->bbox.max_coords;
1853 }
1854
1855
1856 ierr = PetscPrintf(PETSC_COMM_SELF, "\n"); CHKERRQ(ierr);
1857 ierr = PetscPrintf(PETSC_COMM_SELF, "=============================================================\n"); CHKERRQ(ierr);
1858 ierr = PetscPrintf(PETSC_COMM_SELF, " CASE SUMMARY \n"); CHKERRQ(ierr);
1859 ierr = PetscPrintf(PETSC_COMM_SELF, "=============================================================\n"); CHKERRQ(ierr);
1860 ierr = PetscPrintf(PETSC_COMM_SELF, " Grid Points : %d X %d X %d\n", user->IM, user->JM, user->KM); CHKERRQ(ierr);
1861 ierr = PetscPrintf(PETSC_COMM_SELF, " Cells : %d X %d X %d\n", user->IM - 1, user->JM - 1, user->KM - 1); CHKERRQ(ierr);
1862 ierr = PetscPrintf(PETSC_COMM_SELF, " Global Domain Bounds (X) : %.6f to %.6f\n", (double)global_min_coords.x, (double)global_max_coords.x); CHKERRQ(ierr);
1863 ierr = PetscPrintf(PETSC_COMM_SELF, " Global Domain Bounds (Y) : %.6f to %.6f\n", (double)global_min_coords.y, (double)global_max_coords.y); CHKERRQ(ierr);
1864 ierr = PetscPrintf(PETSC_COMM_SELF, " Global Domain Bounds (Z) : %.6f to %.6f\n", (double)global_min_coords.z, (double)global_max_coords.z); CHKERRQ(ierr);
1865 ierr = PetscPrintf(PETSC_COMM_SELF, "-------------------- Boundary Conditions --------------------\n"); CHKERRQ(ierr);
1866 const int face_name_width = 17; // Adjusted for longer names (Zeta,Eta,Xi)
1867 for (PetscInt i_face = 0; i_face < 6; ++i_face) {
1868 BCFace current_face = (BCFace)i_face;
1869 // The BCFaceToString will now return the Xi, Eta, Zeta versions
1870 const char* face_str = BCFaceToString(current_face);
1871 const char* bc_type_str = BCTypeToString(user->boundary_faces[current_face].mathematical_type);
1872
1873 ierr = PetscPrintf(PETSC_COMM_SELF, " Face %-*s : %s\n",
1874 face_name_width, face_str, bc_type_str); CHKERRQ(ierr);
1875 }
1876 ierr = PetscPrintf(PETSC_COMM_SELF, "-------------------------------------------------------------\n"); CHKERRQ(ierr);
1877 ierr = PetscPrintf(PETSC_COMM_SELF, " Start Time : %.4f\n", (double)StartTime); CHKERRQ(ierr);
1878 ierr = PetscPrintf(PETSC_COMM_SELF, " Timestep Size : %.4f\n", (double)simCtx->dt); CHKERRQ(ierr);
1879 ierr = PetscPrintf(PETSC_COMM_SELF, " Starting Step : %d\n", StartStep); CHKERRQ(ierr);
1880 ierr = PetscPrintf(PETSC_COMM_SELF, " Total Steps to Run : %d\n", StepsToRun); CHKERRQ(ierr);
1881 ierr = PetscPrintf(PETSC_COMM_SELF, " Number of MPI Processes : %d\n", num_mpi_procs); CHKERRQ(ierr);
1882 ierr = PetscPrintf(PETSC_COMM_WORLD," Number of Particles : %d\n", total_num_particles); CHKERRQ(ierr);
1883 ierr = PetscPrintf(PETSC_COMM_WORLD," Reynolds Number : %le\n", simCtx->ren); CHKERRQ(ierr);
1884 ierr = PetscPrintf(PETSC_COMM_WORLD," Stanton Number : %le\n", simCtx->st); CHKERRQ(ierr);
1885 ierr = PetscPrintf(PETSC_COMM_WORLD," CFL Number : %le\n", simCtx->cfl); CHKERRQ(ierr);
1886 ierr = PetscPrintf(PETSC_COMM_WORLD," Von-Neumann Number : %le\n", simCtx->vnn); CHKERRQ(ierr);
1887 ierr = PetscPrintf(PETSC_COMM_SELF, " Particle Initialization Mode: %d\n", simCtx->ParticleInitialization); CHKERRQ(ierr);
1888 if (simCtx->ParticleInitialization == 0) {
1889 if (user->inletFaceDefined) {
1890 ierr = PetscPrintf(PETSC_COMM_SELF, " Particles Initialized At : %s (Enum Val: %d)\n", BCFaceToString(user->identifiedInletBCFace), user->identifiedInletBCFace); CHKERRQ(ierr);
1891 } else {
1892 ierr = PetscPrintf(PETSC_COMM_SELF, " Particles Initialized At : --- (No INLET face identified)\n"); CHKERRQ(ierr);
1893 }
1894 }
1895 ierr = PetscPrintf(PETSC_COMM_SELF, " Field Initialization Mode : %d\n", simCtx->FieldInitialization); CHKERRQ(ierr);
1896 if (simCtx->FieldInitialization == 1) {
1897 ierr = PetscPrintf(PETSC_COMM_SELF, " Constant Velocity : x - %.4f, y - %.4f, z - %.4f \n", (double)simCtx->InitialConstantContra.x,(double)simCtx->InitialConstantContra.y,(double)simCtx->InitialConstantContra.z ); CHKERRQ(ierr);
1898 }
1899
1900 ierr = PetscPrintf(PETSC_COMM_SELF, "=============================================================\n"); CHKERRQ(ierr);
1901 ierr = PetscPrintf(PETSC_COMM_SELF, "\n"); CHKERRQ(ierr);
1902 }
1903 PetscFunctionReturn(0);
1904}
1905
1906/**
1907 * @brief Helper function to trim leading/trailing whitespace from a string.
1908 * @param str The string to trim in-place.
1909 */
1910void TrimWhitespace(char *str) {
1911 char *end;
1912 // Trim leading space
1913 while(isspace((unsigned char)*str)) str++;
1914 if(*str == 0) return; // All spaces?
1915
1916 // Trim trailing space
1917 end = str + strlen(str) - 1;
1918 while(end > str && isspace((unsigned char)*end)) end--;
1919 end[1] = '\0';
1920}
1921
1922/**
1923 * @brief Initializes post-processing settings from a config file and command-line overrides.
1924 *
1925 * This function establishes the configuration for a post-processing run by:
1926 * 1. Setting hardcoded default values in the PostProcessParams struct.
1927 * 2. Reading a configuration file to override the defaults.
1928 * 3. Parsing command-line options (-startTime, -endTime, etc.) which can override
1929 * both the defaults and the file settings.
1930 *
1931 * @param configFile The path to the configuration file to parse.
1932 * @param pps Pointer to the PostProcessParams struct to be populated.
1933 * @return PetscErrorCode
1934 */
1936{
1937 FILE *file;
1938 char line[1024];
1939 PetscBool startTimeSet, endTimeSet, timeStepSet;
1940
1941 PetscFunctionBeginUser;
1942
1943 if (!simCtx || !simCtx->pps) {
1944 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_NULL, "SimCtx or its pps member is NULL in ParsePostProcessingSettings.");
1945 }
1946
1947 char *configFile = simCtx->PostprocessingControlFile;
1948 PostProcessParams *pps = simCtx->pps;
1949
1950
1951 // --- 1. Set Sane Defaults First ---
1952 pps->startTime = 0;
1953 pps->endTime = 0;
1954 pps->timeStep = 1;
1955 pps->outputParticles = PETSC_FALSE;
1956 pps->particle_output_freq = simCtx->LoggingFrequency; // Default to logging frequency;
1957 strcpy(pps->process_pipeline, "");
1958 strcpy(pps->output_fields_instantaneous, "Ucat,P");
1959 strcpy(pps->output_fields_averaged, "");
1960 strcpy(pps->output_prefix, "results/viz");
1961 strcpy(pps->particle_output_prefix,"results/viz");
1962 strcpy(pps->particle_fields,"velocity,CellID,weight,pid");
1963 strcpy(pps->particle_pipeline,"");
1964 strcpy(pps->particleExt,"dat"); // The input file format for particles.
1965 strcpy(pps->eulerianExt,"dat"); // The input file format for Eulerian fields.
1966
1967 // --- 2. Parse the Configuration File (overrides defaults) ---
1968 file = fopen(configFile, "r");
1969 if (file) {
1970 LOG_ALLOW(GLOBAL, LOG_INFO, "Parsing post-processing config file: %s\n", configFile);
1971 while (fgets(line, sizeof(line), file)) {
1972 char *key, *value, *comment;
1973 comment = strchr(line, '#'); if (comment) *comment = '\0';
1974 TrimWhitespace(line); if (strlen(line) == 0) continue;
1975 key = strtok(line, "="); value = strtok(NULL, "=");
1976 if (key && value) {
1977 TrimWhitespace(key); TrimWhitespace(value);
1978 if (strcmp(key, "startTime") == 0) pps->startTime = atoi(value);
1979 else if (strcmp(key, "endTime") == 0) pps->endTime = atoi(value);
1980 else if (strcmp(key, "timeStep") == 0) pps->timeStep = atoi(value);
1981 else if (strcmp(key, "output_particles") == 0) {
1982 if (strcasecmp(value, "true") == 0) pps->outputParticles = PETSC_TRUE;
1983 }
1984 else if (strcasecmp(key, "process_pipeline") == 0) {
1985 strncpy(pps->process_pipeline, value, MAX_PIPELINE_LENGTH - 1);
1986 pps->process_pipeline[MAX_PIPELINE_LENGTH - 1] = '\0'; // Ensure null-termination
1987 } else if (strcasecmp(key, "output_fields_instantaneous") == 0) {
1988 strncpy(pps->output_fields_instantaneous, value, MAX_FIELD_LIST_LENGTH - 1);
1990 } else if (strcasecmp(key, "output_prefix") == 0) {
1991 strncpy(pps->output_prefix, value, MAX_FILENAME_LENGTH - 1);
1992 pps->output_prefix[MAX_FILENAME_LENGTH - 1] = '\0';
1993 } else if (strcasecmp(key, "particle_output_prefix") == 0) {
1994 strncpy(pps->particle_output_prefix, value, MAX_FILENAME_LENGTH - 1);
1996 } else if (strcasecmp(key, "particle_fields_instantaneous") == 0) {
1997 strncpy(pps->particle_fields, value, MAX_FIELD_LIST_LENGTH - 1);
1998 pps->particle_fields[MAX_FIELD_LIST_LENGTH - 1] = '\0';
1999 } else if (strcasecmp(key, "particle_pipeline") == 0) {
2000 strncpy(pps->particle_pipeline, value, MAX_PIPELINE_LENGTH - 1);
2001 pps->particle_pipeline[MAX_PIPELINE_LENGTH - 1] = '\0';
2002 } else if (strcasecmp(key, "particle_output_freq") == 0) {
2003 pps->particle_output_freq = atoi(value);
2004 } else {
2005 LOG_ALLOW(GLOBAL, LOG_WARNING, "Unknown key '%s' in post-processing config file. Ignoring.\n", key);
2006 }
2007 // Add parsing for pipeline, fields, etc. in later phases
2008 }
2009 }
2010 fclose(file);
2011 } else {
2012 LOG_ALLOW(GLOBAL, LOG_WARNING, "Could not open post-processing config file '%s'. Using defaults and command-line overrides.\n", configFile);
2013 }
2014
2015 // --- 3. Parse Command-Line Options (overrides file settings and defaults) ---
2016 PetscOptionsGetInt(NULL, NULL, "-startTime", &pps->startTime, &startTimeSet);
2017 PetscOptionsGetInt(NULL, NULL, "-endTime", &pps->endTime, &endTimeSet);
2018 PetscOptionsGetInt(NULL, NULL, "-timeStep", &pps->timeStep, &timeStepSet);
2019 PetscOptionsGetBool(NULL, NULL, "-output_particles", &pps->outputParticles, NULL);
2020
2021 // If only startTime is given on command line, run for a single step
2022 if (startTimeSet && !endTimeSet) {
2023 pps->endTime = pps->startTime;
2024 }
2025
2026 LOG_ALLOW(GLOBAL, LOG_INFO, "Post-processing configured to run from t=%d to t=%d with step %d. Particle output: %s.\n",
2027 pps->startTime, pps->endTime, pps->timeStep, pps->outputParticles ? "TRUE" : "FALSE");
2028
2029 LOG_ALLOW(GLOBAL, LOG_INFO, "Process Pipeline: %s\n", pps->process_pipeline);
2030 LOG_ALLOW(GLOBAL, LOG_INFO, "Instantaneous Output Fields: %s\n", pps->output_fields_instantaneous);
2031 LOG_ALLOW(GLOBAL, LOG_INFO, "Output Prefix: %s\n", pps->output_prefix);
2032 LOG_ALLOW(GLOBAL, LOG_INFO, "Particle Output Prefix: %s\n", pps->particle_output_prefix);
2033 LOG_ALLOW(GLOBAL, LOG_INFO, "Particle Fields: %s\n", pps->particle_fields);
2034 LOG_ALLOW(GLOBAL, LOG_INFO, "Particle Pipeline: %s\n", pps->particle_pipeline);
2035 LOG_ALLOW(GLOBAL, LOG_INFO, "Particle Output Frequency: %d\n", pps->particle_output_freq);
2036 PetscFunctionReturn(0);
2037}
2038
2039// ---------------------------------------------------------------------
2040// UTILITY FUNCTIONS NOT USED IN THE MAIN CODE BUT MAY BE USEFUL
2041// ---------------------------------------------------------------------
2042
2043//=====================================================================
2044// 1) ReadDataFileToArray
2045
2046// See the function-level comments in io.h for a summary.
2047// This reads one value per line from an ASCII file. Rank 0 does I/O,
2048// broadcasts the data to all ranks.
2049//===================================================================== */
2050PetscInt ReadDataFileToArray(const char *filename,
2051 double **data_out,
2052 PetscInt *Nout,
2053 MPI_Comm comm)
2054{
2055 /* STEP 0: Prepare local variables & log function entry */
2056 PetscMPIInt rank, size;
2057 PetscErrorCode ierr;
2058 FILE *fp = NULL;
2059 PetscInt N = 0; /* number of lines/values read on rank 0 */
2060 double *array = NULL; /* pointer to local array on each rank */
2061 PetscInt fileExistsFlag = 0; /* 0 = doesn't exist, 1 = does exist */
2062
2064 "ReadDataFileToArray - Start reading from file: %s\n",
2065 filename);
2066
2067 /* Basic error checking: data_out, Nout must be non-null. */
2068 if (!filename || !data_out || !Nout) {
2070 "ReadDataFileToArray - Null pointer argument provided.\n");
2071 return 1;
2072 }
2073
2074 /* Determine rank/size for coordinating I/O. */
2075 MPI_Comm_rank(comm, &rank);
2076 MPI_Comm_size(comm, &size);
2077
2078 /* STEP 1: On rank 0, check if file can be opened. */
2079 if (!rank) {
2080 fp = fopen(filename, "r");
2081 if (fp) {
2082 fileExistsFlag = 1;
2083 fclose(fp);
2084 }
2085 }
2086
2087 /* STEP 2: Broadcast file existence to all ranks. */
2088 // In ReadDataFileToArray:
2089 ierr = MPI_Bcast(&fileExistsFlag, 1, MPI_INT, 0, comm); CHKERRQ(ierr);
2090
2091 if (!fileExistsFlag) {
2092 /* If file does not exist, log & return. */
2093 if (!rank) {
2095 "ReadDataFileToArray - File '%s' not found.\n",
2096 filename);
2097 }
2098 return 2;
2099 }
2100
2101 /* STEP 3: Rank 0 re-opens and reads the file, counting lines, etc. */
2102 if (!rank) {
2103 fp = fopen(filename, "r");
2104 if (!fp) {
2106 "ReadDataFileToArray - File '%s' could not be opened for reading.\n",
2107 filename);
2108 return 3;
2109 }
2110
2111 /* (3a) Count lines first. */
2112 {
2113 char line[256];
2114 while (fgets(line, sizeof(line), fp)) {
2115 N++;
2116 }
2117 }
2118
2120 "ReadDataFileToArray - File '%s' has %d lines.\n",
2121 filename, N);
2122
2123 /* (3b) Allocate array on rank 0. */
2124 array = (double*)malloc(N * sizeof(double));
2125 if (!array) {
2126 fclose(fp);
2128 "ReadDataFileToArray - malloc failed for array.\n");
2129 return 4;
2130 }
2131
2132 /* (3c) Rewind & read values into array. */
2133 rewind(fp);
2134 {
2135 PetscInt i = 0;
2136 char line[256];
2137 while (fgets(line, sizeof(line), fp)) {
2138 double val;
2139 if (sscanf(line, "%lf", &val) == 1) {
2140 array[i++] = val;
2141 }
2142 }
2143 }
2144 fclose(fp);
2145
2147 "ReadDataFileToArray - Successfully read %d values from '%s'.\n",
2148 N, filename);
2149 }
2150
2151 /* STEP 4: Broadcast the integer N to all ranks. */
2152 ierr = MPI_Bcast(&N, 1, MPI_INT, 0, comm); CHKERRQ(ierr);
2153
2154 /* STEP 5: Each rank allocates an array to receive the broadcast if rank>0. */
2155 if (rank) {
2156 array = (double*)malloc(N * sizeof(double));
2157 if (!array) {
2159 "ReadDataFileToArray - malloc failed on rank %d.\n",
2160 rank);
2161 return 5;
2162 }
2163 }
2164
2165 /* STEP 6: Broadcast the actual data from rank 0 to all. */
2166 ierr = MPI_Bcast(array, N, MPI_DOUBLE, 0, comm); CHKERRQ(ierr);
2167
2168 /* STEP 7: Assign outputs on all ranks. */
2169 *data_out = array;
2170 *Nout = N;
2171
2173 "ReadDataFileToArray - Done. Provided array of length=%d to all ranks.\n",
2174 N);
2175 return 0; /* success */
2176}
2177
2178/**
2179 * @brief Reads coordinate data (for particles) from file into a PETSc Vec, then gathers it to rank 0.
2180 *
2181 * This function uses \c ReadFieldData to fill a PETSc Vec with coordinate data,
2182 * then leverages \c VecToArrayOnRank0 to gather that data into a contiguous array
2183 * (valid on rank 0 only).
2184 *
2185 * @param[in] timeIndex The time index used to construct file names.
2186 * @param[in] user Pointer to the user context.
2187 * @param[out] coordsArray On rank 0, will point to a newly allocated array holding the coordinates.
2188 * @param[out] Ncoords On rank 0, the length of \p coordsArray. On other ranks, 0.
2189 *
2190 * @return PetscErrorCode Returns 0 on success, or non-zero on failures.
2191 */
2192PetscErrorCode ReadPositionsFromFile(PetscInt timeIndex,
2193 UserCtx *user,
2194 double **coordsArray,
2195 PetscInt *Ncoords)
2196{
2197 PetscFunctionBeginUser;
2198
2199 PetscErrorCode ierr;
2200 Vec coordsVec;
2201
2202 LOG_ALLOW(GLOBAL, LOG_DEBUG, "ReadPositionsFromFile - Creating coords Vec.\n");
2203 ierr = VecCreate(PETSC_COMM_WORLD, &coordsVec);CHKERRQ(ierr);
2204 ierr = VecSetFromOptions(coordsVec);CHKERRQ(ierr);
2205
2206 // For example: "position" is the name of the coordinate data
2207 ierr = ReadFieldData(user, "position", coordsVec, timeIndex, "dat");
2208 if (ierr) {
2210 "ReadPositionsFromFile - Error reading position data (ti=%d).\n",
2211 timeIndex);
2212 PetscFunctionReturn(ierr);
2213 }
2214
2215 LOG_ALLOW(GLOBAL, LOG_DEBUG, "ReadPositions - Gathering coords Vec to rank 0.\n");
2216 ierr = VecToArrayOnRank0(coordsVec, Ncoords, coordsArray);CHKERRQ(ierr);
2217
2218 ierr = VecDestroy(&coordsVec);CHKERRQ(ierr);
2219
2221 "ReadPositionsFromFile - Successfully gathered coordinates. Ncoords=%d.\n", *Ncoords);
2222 PetscFunctionReturn(0);
2223}
2224
2225
2226/**
2227 * @brief Reads a named field from file into a PETSc Vec, then gathers it to rank 0.
2228 *
2229 * This function wraps \c ReadFieldData and \c VecToArrayOnRank0 into a single step.
2230 * The gathered data is stored in \p scalarArray on rank 0, with its length in \p Nscalars.
2231 *
2232 * @param[in] timeIndex The time index used to construct file names.
2233 * @param[in] fieldName Name of the field to be read (e.g., "velocity").
2234 * @param[in] user Pointer to the user context.
2235 * @param[out] scalarArray On rank 0, a newly allocated array holding the field data.
2236 * @param[out] Nscalars On rank 0, length of \p scalarArray. On other ranks, 0.
2237 *
2238 * @return PetscErrorCode Returns 0 on success, or non-zero on failures.
2239 */
2240PetscErrorCode ReadFieldDataToRank0(PetscInt timeIndex,
2241 const char *fieldName,
2242 UserCtx *user,
2243 double **scalarArray,
2244 PetscInt *Nscalars)
2245{
2246 PetscFunctionBeginUser;
2247
2248 PetscErrorCode ierr;
2249 Vec fieldVec;
2250
2251 LOG_ALLOW(GLOBAL, LOG_DEBUG, "ReadFieldDataWrapper - Creating field Vec.\n");
2252 ierr = VecCreate(PETSC_COMM_WORLD, &fieldVec);CHKERRQ(ierr);
2253 ierr = VecSetFromOptions(fieldVec);CHKERRQ(ierr);
2254
2255 ierr = ReadFieldData(user, fieldName, fieldVec, timeIndex, "dat");
2256 if (ierr) {
2258 "ReadFieldDataWrapper - Error reading field '%s' (ti=%d).\n",
2259 fieldName, timeIndex);
2260 PetscFunctionReturn(ierr);
2261 }
2262
2263 LOG_ALLOW(GLOBAL, LOG_DEBUG, "ReadFieldDataWrapper - Gathering field Vec to rank 0.\n");
2264 ierr = VecToArrayOnRank0(fieldVec, Nscalars, scalarArray);CHKERRQ(ierr);
2265
2266 ierr = VecDestroy(&fieldVec);CHKERRQ(ierr);
2267
2269 "ReadFieldDataWrapper - Successfully gathered field '%s'. Nscalars=%d.\n",
2270 fieldName, *Nscalars);
2271 PetscFunctionReturn(0);
2272}
PetscErrorCode ReadStatisticalFields(UserCtx *user, PetscInt ti)
Reads statistical fields for averaging purposes.
Definition io.c:884
PetscErrorCode ParsePostProcessingSettings(SimCtx *simCtx)
Initializes post-processing settings from a config file and command-line overrides.
Definition io.c:1935
PetscErrorCode WriteAllSwarmFields(UserCtx *user)
Writes a predefined set of PETSc Swarm fields to files.
Definition io.c:1568
PetscErrorCode SwarmFieldToArrayOnRank0(DM swarm, const char *field_name, PetscInt *n_total_particles, PetscInt *n_components, void **gathered_array)
Gathers any DMSwarm field from all ranks to a single, contiguous array on rank 0.
Definition io.c:1714
PetscInt ReadDataFileToArray(const char *filename, double **data_out, PetscInt *Nout, MPI_Comm comm)
Definition io.c:2050
PetscErrorCode ReadGridFile(UserCtx *user)
Sets grid dimensions from a file for a SINGLE block using a one-time read cache.
Definition io.c:145
PetscErrorCode StringToBCHandlerType(const char *str, BCHandlerType *handler_out)
Converts a string representation of a handler to a BCHandlerType enum.
Definition io.c:280
PetscErrorCode ReadRANSFields(UserCtx *user, PetscInt ti)
Reads RANS-related fields.
Definition io.c:940
PetscErrorCode ReadSwarmIntField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
Reads integer swarm data by using ReadFieldData and casting the result.
Definition io.c:1022
PetscErrorCode WriteSwarmField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
Writes data from a specific field in a PETSc Swarm to a file.
Definition io.c:1427
static PetscInt * g_IMs_from_file
Caches the IM dimensions for all blocks read from the grid file.
Definition io.c:19
PetscErrorCode WriteStatisticalFields(UserCtx *user)
Writes statistical fields for averaging purposes.
Definition io.c:1335
PetscErrorCode ReadSimulationFields(UserCtx *user, PetscInt ti)
Reads simulation fields from files into their respective PETSc vectors.
Definition io.c:823
static PetscInt g_nblk_from_file
Stores the number of blocks read from the grid file.
Definition io.c:17
PetscErrorCode WriteFieldData(UserCtx *user, const char *field_name, Vec field_vec, PetscInt ti, const char *ext)
Writes data from a specific PETSc vector to a single, sequential file.
Definition io.c:1180
PetscErrorCode ReadLESFields(UserCtx *user, PetscInt ti)
Reads LES-related fields.
Definition io.c:911
PetscErrorCode ReadFieldData(UserCtx *user, const char *field_name, Vec field_vec, PetscInt ti, const char *ext)
Reads data for a specific field from a file into the provided vector.
Definition io.c:639
PetscErrorCode ParseAllBoundaryConditions(UserCtx *user, const char *bcs_input_filename)
Parses the boundary conditions file to configure the type, handler, and any associated parameters for...
Definition io.c:339
PetscErrorCode ValidateBCHandlerForBCType(BCType type, BCHandlerType handler)
Validates that a specific handler is compatible with a general BC type.
Definition io.c:297
void TrimWhitespace(char *str)
Helper function to trim leading/trailing whitespace from a string.
Definition io.c:1910
PetscErrorCode WriteSimulationFields(UserCtx *user)
Writes simulation fields to files.
Definition io.c:1280
PetscErrorCode ReadAllSwarmFields(UserCtx *user, PetscInt ti)
Reads multiple fields into a DMSwarm, gracefully handling missing optional files.
Definition io.c:1100
static PetscBool g_file_has_been_read
A flag to ensure the grid file is read only once.
Definition io.c:25
PetscErrorCode ReadGridGenerationInputs(UserCtx *user)
Parses command-line options for a programmatically generated grid for a SINGLE block.
Definition io.c:47
PetscErrorCode ReadSwarmField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
Reads data from a file into a specified field of a PETSc DMSwarm.
Definition io.c:977
PetscErrorCode WriteRANSFields(UserCtx *user)
Writes RANS-related fields.
Definition io.c:1393
PetscErrorCode ReadFieldDataToRank0(PetscInt timeIndex, const char *fieldName, UserCtx *user, double **scalarArray, PetscInt *Nscalars)
Reads a named field from file into a PETSc Vec, then gathers it to rank 0.
Definition io.c:2240
PetscErrorCode VecToArrayOnRank0(Vec inVec, PetscInt *N, double **arrayOut)
Gather a (possibly distributed) PETSc Vec onto rank 0 as a contiguous C array.
Definition io.c:1619
PetscErrorCode WriteSwarmIntField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
Writes integer swarm data by casting it to a temporary Vec and using WriteFieldData.
Definition io.c:1496
PetscErrorCode WriteLESFields(UserCtx *user)
Writes LES-related fields.
Definition io.c:1363
static PetscInt * g_KMs_from_file
Caches the KM dimensions for all blocks read from the grid file.
Definition io.c:23
static PetscInt * g_JMs_from_file
Caches the JM dimensions for all blocks read from the grid file.
Definition io.c:21
PetscErrorCode StringToBCFace(const char *str, BCFace *face_out)
Converts a string representation of a face to a BCFace enum.
Definition io.c:246
static PetscErrorCode CheckDataFile(PetscInt ti, const char *fieldName, const char *ext, PetscBool *fileExists)
Checks for a data file's existence in a parallel-safe manner.
Definition io.c:521
PetscErrorCode ReadPositionsFromFile(PetscInt timeIndex, UserCtx *user, double **coordsArray, PetscInt *Ncoords)
Reads coordinate data (for particles) from file into a PETSc Vec, then gathers it to rank 0.
Definition io.c:2192
void FreeBC_ParamList(BC_Param *head)
Frees the memory allocated for a linked list of BC_Param structs.
Definition io.c:229
PetscErrorCode StringToBCType(const char *str, BCType *type_out)
Converts a string representation of a BC type to a BCType enum.
Definition io.c:263
PetscErrorCode DisplayBanner(SimCtx *simCtx)
Displays a structured banner summarizing the simulation configuration.
Definition io.c:1807
static PetscErrorCode ReadOptionalSwarmField(UserCtx *user, const char *field_name, const char *field_label, PetscInt ti, const char *ext)
Checks for and reads an optional DMSwarm field from a file.
Definition io.c:571
Public interface for data input/output routines.
const char * BCHandlerTypeToString(BCHandlerType handler_type)
Converts a BCHandlerType enum to its string representation.
Definition logging.c:689
#define LOG_ALLOW_SYNC(scope, level, fmt,...)
----— DEBUG ---------------------------------------— #define LOG_ALLOW(scope, level,...
Definition logging.h:274
#define LOCAL
Logging scope definitions for controlling message output.
Definition logging.h:44
#define GLOBAL
Scope for global logging across all processes.
Definition logging.h:45
const char * BCFaceToString(BCFace face)
Helper function to convert BCFace enum to a string representation.
Definition logging.c:645
#define LOG_ALLOW(scope, level, fmt,...)
Logging macro that checks both the log level and whether the calling function is in the allowed-funct...
Definition logging.h:207
const char * BCTypeToString(BCType type)
Helper function to convert BCType enum to a string representation.
Definition logging.c:662
@ LOG_ERROR
Critical errors that may halt the program.
Definition logging.h:29
@ LOG_INFO
Informational messages about program execution.
Definition logging.h:32
@ LOG_WARNING
Non-critical issues that warrant attention.
Definition logging.h:30
@ LOG_DEBUG
Detailed debugging information.
Definition logging.h:33
BCType
Defines the general mathematical/physical category of a boundary.
Definition variables.h:206
@ INLET
Definition variables.h:210
@ NOGRAD
Definition variables.h:216
@ SYMMETRY
Definition variables.h:209
@ OUTLET
Definition variables.h:211
@ WALL
Definition variables.h:209
UserCtx * user
Definition variables.h:418
PetscBool inletFaceDefined
Definition variables.h:649
char particle_output_prefix[256]
Definition variables.h:457
PetscMPIInt rank
Definition variables.h:516
PetscInt cgrid
Definition variables.h:645
BoundaryFaceConfig boundary_faces[6]
Definition variables.h:648
PetscInt block_number
Definition variables.h:562
BCFace identifiedInletBCFace
Definition variables.h:650
SimCtx * simCtx
Back-pointer to the master simulation context.
Definition variables.h:633
PetscInt rans
Definition variables.h:578
PetscReal StartTime
Definition variables.h:526
Vec K_Omega_o
Definition variables.h:680
char output_prefix[256]
Definition variables.h:454
struct BC_Param_s * next
Definition variables.h:239
PetscReal Min_X
Definition variables.h:640
char * key
Definition variables.h:237
PetscInt KM
Definition variables.h:639
PetscReal st
Definition variables.h:549
UserMG usermg
Definition variables.h:599
#define MAX_FIELD_LIST_LENGTH
Definition variables.h:434
Vec K_Omega
Definition variables.h:680
BCHandlerType
Defines the specific computational "strategy" for a boundary handler.
Definition variables.h:220
@ BC_HANDLER_INLET_PARABOLIC
Definition variables.h:223
@ BC_HANDLER_INLET_CONSTANT_VELOCITY
Definition variables.h:224
@ BC_HANDLER_WALL_MOVING
Definition variables.h:222
@ BC_HANDLER_NOGRAD_COPY_GHOST
Definition variables.h:221
@ BC_HANDLER_WALL_NOSLIP
Definition variables.h:222
@ BC_HANDLER_OUTLET_CONSERVATION
Definition variables.h:225
char output_fields_averaged[1024]
Definition variables.h:453
PetscInt FieldInitialization
Definition variables.h:550
#define MAX_PIPELINE_LENGTH
Definition variables.h:433
PetscReal ren
Definition variables.h:549
BCHandlerType handler_type
Definition variables.h:265
Cmpnts max_coords
Maximum x, y, z coordinates of the bounding box.
Definition variables.h:155
PetscInt _this
Definition variables.h:643
PetscReal dt
Definition variables.h:527
PetscReal ry
Definition variables.h:644
PetscInt StepsToRun
Definition variables.h:524
PetscInt timeStep
Definition variables.h:447
PetscInt np
Definition variables.h:585
PetscReal Max_Y
Definition variables.h:640
PetscBool averaging
Definition variables.h:582
char * value
Definition variables.h:238
Vec lCs
Definition variables.h:680
#define MAX_FILENAME_LENGTH
Definition variables.h:435
Vec Ucont
Definition variables.h:657
PetscInt StartStep
Definition variables.h:523
Cmpnts min_coords
Minimum x, y, z coordinates of the bounding box.
Definition variables.h:154
PetscScalar x
Definition variables.h:100
Vec Ucat_square_sum
Definition variables.h:683
Vec P_sum
Definition variables.h:683
char grid_file[PETSC_MAX_PATH_LEN]
Definition variables.h:567
char output_fields_instantaneous[1024]
Definition variables.h:452
char eulerianExt[8]
Definition variables.h:461
BoundingBox * bboxlist
Definition variables.h:588
PetscReal rz
Definition variables.h:644
char particle_pipeline[1024]
Definition variables.h:455
BC_Param * params
Definition variables.h:266
PetscScalar z
Definition variables.h:100
Vec lK_Omega_o
Definition variables.h:680
Vec Ucat
Definition variables.h:657
Vec ParticleCount
Definition variables.h:697
Vec lK_Omega
Definition variables.h:680
PetscInt JM
Definition variables.h:639
PetscInt mglevels
Definition variables.h:425
PetscReal Min_Z
Definition variables.h:640
PetscReal cfl
Definition variables.h:549
Vec Ucat_sum
Definition variables.h:683
char process_pipeline[1024]
Definition variables.h:451
PetscInt particle_output_freq
Definition variables.h:458
char particle_fields[1024]
Definition variables.h:456
PetscBool outputParticles
Definition variables.h:448
PetscReal Max_X
Definition variables.h:640
Cmpnts InitialConstantContra
Definition variables.h:551
PetscReal Min_Y
Definition variables.h:640
PetscInt step
Definition variables.h:521
PostProcessParams * pps
Definition variables.h:616
PetscScalar y
Definition variables.h:100
PetscMPIInt size
Definition variables.h:517
PetscInt IM
Definition variables.h:639
char particleExt[8]
Definition variables.h:462
Vec Ucat_cross_sum
Definition variables.h:683
PetscInt les
Definition variables.h:578
Vec Nvert
Definition variables.h:657
MGCtx * mgctx
Definition variables.h:428
BCType mathematical_type
Definition variables.h:264
PetscReal rx
Definition variables.h:644
PetscInt ParticleInitialization
Definition variables.h:589
PetscInt startTime
Definition variables.h:445
PetscReal vnn
Definition variables.h:549
PetscReal Max_Z
Definition variables.h:640
char PostprocessingControlFile[PETSC_MAX_PATH_LEN]
Definition variables.h:615
PetscInt LoggingFrequency
Definition variables.h:604
BCFace
Identifies the six logical faces of a structured computational block.
Definition variables.h:199
@ BC_FACE_NEG_X
Definition variables.h:200
@ BC_FACE_POS_Z
Definition variables.h:202
@ BC_FACE_POS_Y
Definition variables.h:201
@ BC_FACE_NEG_Z
Definition variables.h:202
@ BC_FACE_POS_X
Definition variables.h:200
@ BC_FACE_NEG_Y
Definition variables.h:201
BoundaryCondition * handler
Definition variables.h:267
A node in a linked list for storing key-value parameters from the bcs.dat file.
Definition variables.h:236
Holds the complete configuration for one of the six boundary faces.
Definition variables.h:262
Defines a 3D axis-aligned bounding box.
Definition variables.h:153
A 3D point or vector with PetscScalar components.
Definition variables.h:99
Holds all configuration parameters for a post-processing run.
Definition variables.h:443
The master context for the entire simulation.
Definition variables.h:513
User-defined context containing data specific to a single computational grid level.
Definition variables.h:630