PICurv 0.1.0
A Parallel Particle-In-Cell Solver for Curvilinear LES
Loading...
Searching...
No Matches
io.c
Go to the documentation of this file.
1/**
2 * @file io.c
3 * @brief Implementation of data input/output routines, focusing on grid configuration.
4 *
5 * This module provides functions to parse grid geometry information, either from
6 * command-line options for programmatically generated grids or by reading the
7 * header of a grid definition file.
8 */
9
10#include "io.h"
11
12// =============================================================================
13// STATIC (PRIVATE) VARIABLES FOR ONE-TIME FILE READ
14// =============================================================================
15
16/** @brief Stores the number of blocks read from the grid file. */
17static PetscInt g_nblk_from_file = 0;
18/** @brief Caches the IM dimensions for all blocks read from the grid file. */
19static PetscInt* g_IMs_from_file = NULL;
20/** @brief Caches the JM dimensions for all blocks read from the grid file. */
21static PetscInt* g_JMs_from_file = NULL;
22/** @brief Caches the KM dimensions for all blocks read from the grid file. */
23static PetscInt* g_KMs_from_file = NULL;
24/** @brief A flag to ensure the grid file is read only once. */
25static PetscBool g_file_has_been_read = PETSC_FALSE;
26
27
28// =============================================================================
29// PUBLIC FUNCTION IMPLEMENTATIONS
30// =============================================================================
31
32/**
33 * @brief Trims leading and trailing whitespace from a string in-place.
34 * @param str The string to be trimmed.
35 */
36void TrimWhitespace(char *str) {
37 if (!str) return;
38
39 char *start = str;
40 // Find the first non-whitespace character
41 while (isspace((unsigned char)*start)) {
42 start++;
43 }
44
45 // Find the end of the string
46 char *end = str + strlen(str) - 1;
47 // Move backwards from the end to find the last non-whitespace character
48 while (end > start && isspace((unsigned char)*end)) {
49 end--;
50 }
51
52 // Null-terminate after the last non-whitespace character
53 *(end + 1) = '\0';
54
55 // If there was leading whitespace, shift the string to the left
56 if (str != start) {
57 memmove(str, start, (end - start) + 2); // +2 to include the new null terminator
58 }
59}
60
61
62#undef __FUNCT__
63#define __FUNCT__ "ReadGridGenerationInputs"
64/**
65 * @brief Parses command-line options for a programmatically generated grid for a SINGLE block.
66 *
67 * This function is responsible for reading all per-block array options related
68 * to grid geometry, such as dimensions (`-im`), domain bounds (`-xMins`, `-xMaxs`),
69 * and stretching ratios (`-rxs`). It reads the entire array for each option, then
70 * uses the block index stored in `user->_this` to select the correct value and
71 * populate the fields of the provided `UserCtx` struct.
72 *
73 * @param user Pointer to the `UserCtx` for a specific block. The function will
74 * populate the geometric fields (`IM`, `Min_X`, `rx`, etc.) within this struct.
75 * @return PetscErrorCode 0 on success, or a PETSc error code on failure.
76 */
77PetscErrorCode ReadGridGenerationInputs(UserCtx *user)
78{
79 PetscErrorCode ierr;
80 SimCtx *simCtx = user->simCtx;
81 PetscInt nblk = simCtx->block_number;
82 PetscInt block_index = user->_this;
83 PetscBool found;
84
85 // Temporary arrays to hold the parsed values for ALL blocks
86 PetscInt *IMs = NULL, *JMs = NULL, *KMs = NULL, *cgrids = NULL;
87 PetscReal *xMins = NULL, *xMaxs = NULL, *rxs = NULL;
88 PetscReal *yMins = NULL, *yMaxs = NULL, *rys = NULL;
89 PetscReal *zMins = NULL, *zMaxs = NULL, *rzs = NULL;
90
91 PetscFunctionBeginUser;
93
94 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Reading generated grid inputs for block %d.\n", simCtx->rank, block_index);
95
96 if (block_index >= nblk) {
97 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Block index %d is out of range for nblk=%d", block_index, nblk);
98 }
99
100 // --- Allocate temporary storage for all array options ---
101 ierr = PetscMalloc4(nblk, &IMs, nblk, &JMs, nblk, &KMs, nblk, &cgrids); CHKERRQ(ierr);
102 ierr = PetscMalloc6(nblk, &xMins, nblk, &xMaxs, nblk, &rxs, nblk, &yMins, nblk, &yMaxs, nblk, &rys); CHKERRQ(ierr);
103 ierr = PetscMalloc3(nblk, &zMins, nblk, &zMaxs, nblk, &rzs); CHKERRQ(ierr);
104
105 // --- Set default values for the temporary arrays ---
106 for (PetscInt i = 0; i < nblk; ++i) {
107 IMs[i] = 10; JMs[i] = 10; KMs[i] = 10; cgrids[i] = 0;
108 xMins[i] = 0.0; xMaxs[i] = 1.0; rxs[i] = 1.0;
109 yMins[i] = 0.0; yMaxs[i] = 1.0; rys[i] = 1.0;
110 zMins[i] = 0.0; zMaxs[i] = 1.0; rzs[i] = 1.0;
111 }
112
113 // --- Parse the array options from the command line / control file ---
114 PetscInt count;
115 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-im", IMs, &count, &found); CHKERRQ(ierr);
116 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-jm", JMs, &count, &found); CHKERRQ(ierr);
117 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-km", KMs, &count, &found); CHKERRQ(ierr);
118 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-xMins", xMins, &count, &found); CHKERRQ(ierr);
119 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-xMaxs", xMaxs, &count, &found); CHKERRQ(ierr);
120 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-rxs", rxs, &count, &found); CHKERRQ(ierr);
121 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-yMins", yMins, &count, &found); CHKERRQ(ierr);
122 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-yMaxs", yMaxs, &count, &found); CHKERRQ(ierr);
123 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-rys", rys, &count, &found); CHKERRQ(ierr);
124 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-zMins", zMins, &count, &found); CHKERRQ(ierr);
125 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-zMaxs", zMaxs, &count, &found); CHKERRQ(ierr);
126 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-rzs", rzs, &count, &found); CHKERRQ(ierr);
127 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-cgrids", cgrids, &count, &found); CHKERRQ(ierr);
128
129 // --- Assign the parsed values to the specific UserCtx struct passed in ---
130 user->IM = IMs[block_index];
131 user->JM = JMs[block_index];
132 user->KM = KMs[block_index];
133 user->Min_X = xMins[block_index];
134 user->Max_X = xMaxs[block_index];
135 user->rx = rxs[block_index];
136 user->Min_Y = yMins[block_index];
137 user->Max_Y = yMaxs[block_index];
138 user->ry = rys[block_index];
139 user->Min_Z = zMins[block_index];
140 user->Max_Z = zMaxs[block_index];
141 user->rz = rzs[block_index];
142 user->cgrid = cgrids[block_index];
143
144 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Block %d grid generation inputs set: IM=%d, JM=%d, KM=%d\n",
145 simCtx->rank, block_index, user->IM, user->JM, user->KM);
146 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Block %d bounds: X=[%.2f, %.2f], Y=[%.2f, %.2f], Z=[%.2f, %.2f]\n",
147 simCtx->rank, block_index, user->Min_X, user->Max_X, user->Min_Y, user->Max_Y, user->Min_Z, user->Max_Z);
148
149 // --- Clean up temporary storage ---
150 ierr = PetscFree4(IMs, JMs, KMs, cgrids); CHKERRQ(ierr);
151 ierr = PetscFree6(xMins, xMaxs, rxs, yMins, yMaxs, rys); CHKERRQ(ierr);
152 ierr = PetscFree3(zMins, zMaxs, rzs); CHKERRQ(ierr);
153
155 PetscFunctionReturn(0);
156}
157
158/**
159 * @brief Parses grid resolution arrays (`-im`, `-jm`, `-km`) once and applies them to all finest-grid blocks.
160 */
161PetscErrorCode PopulateFinestUserGridResolutionFromOptions(UserCtx *finest_users, PetscInt nblk)
162{
163 PetscErrorCode ierr;
164 PetscBool found;
165 PetscInt *IMs = NULL, *JMs = NULL, *KMs = NULL;
166 SimCtx *simCtx = NULL;
167
168 PetscFunctionBeginUser;
169
170 if (!finest_users) {
171 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "finest_users cannot be NULL.");
172 }
173 if (nblk <= 0) {
174 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "nblk must be positive. Got %d.", nblk);
175 }
176 simCtx = finest_users[0].simCtx;
177
178 ierr = PetscMalloc3(nblk, &IMs, nblk, &JMs, nblk, &KMs); CHKERRQ(ierr);
179 for (PetscInt i = 0; i < nblk; ++i) {
180 IMs[i] = 10; JMs[i] = 10; KMs[i] = 10;
181 }
182
183 PetscInt count;
184 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-im", IMs, &count, &found); CHKERRQ(ierr);
185 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-jm", JMs, &count, &found); CHKERRQ(ierr);
186 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-km", KMs, &count, &found); CHKERRQ(ierr);
187
188 for (PetscInt bi = 0; bi < nblk; ++bi) {
189 finest_users[bi].IM = IMs[bi];
190 finest_users[bi].JM = JMs[bi];
191 finest_users[bi].KM = KMs[bi];
192 if (simCtx) {
194 "Rank %d: Preloaded analytical grid resolution for block %d: IM=%d, JM=%d, KM=%d\n",
195 simCtx->rank, bi, finest_users[bi].IM, finest_users[bi].JM, finest_users[bi].KM);
196 }
197 }
198
199 ierr = PetscFree3(IMs, JMs, KMs); CHKERRQ(ierr);
200 PetscFunctionReturn(0);
201}
202
203
204#undef __FUNCT__
205#define __FUNCT__ "ReadGridFile"
206/**
207 * @brief Sets grid dimensions from a file for a SINGLE block using a one-time read cache.
208 *
209 * This function uses a static-variable pattern to ensure the grid file header
210 * is read only once, collectively, by all processes. The first time any process
211 * calls this function, it triggers a collective operation where rank 0 reads the
212 * file and broadcasts the dimensions for all blocks. This data is stored in
213 * static "cached" arrays.
214 *
215 * On every call (including the first), the function retrieves the dimensions for the
216 * specific block (identified by `user->_this`) from the cached arrays and populates
217 * the `IM`, `JM`, and `KM` fields of the provided `UserCtx`.
218 *
219 * @param user Pointer to the `UserCtx` for a specific block.
220 * @return PetscErrorCode 0 on success, or a PETSc error code on failure.
221 */
222PetscErrorCode ReadGridFile(UserCtx *user)
223{
224 PetscErrorCode ierr;
225 SimCtx *simCtx = user->simCtx;
226 PetscInt block_index = user->_this;
227
228 PetscFunctionBeginUser;
230
231 // --- One-Time Read and Broadcast Logic ---
233 LOG_ALLOW_SYNC(GLOBAL, LOG_INFO, "First call to ReadGridFile. Reading and broadcasting grid file header from '%s'...\n", simCtx->grid_file);
234 PetscMPIInt rank = simCtx->rank;
235 PetscInt nblk = simCtx->block_number;
236
237 if (rank == 0) {
238 FILE *fd = fopen(simCtx->grid_file, "r");
239 if (!fd) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_OPEN, "Cannot open file: %s", simCtx->grid_file);
240
241 fscanf(fd, "%d\n", &g_nblk_from_file);
242 // ---- Read first token; decide whether it is the header or nblk ----
243 char firstTok[32] = {0};
244 if (fscanf(fd, "%31s", firstTok) != 1)
245 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_READ, "Empty grid file: %s", simCtx->grid_file);
246
247 if (strcmp(firstTok, "PICGRID") == 0) {
248 // Header is present – read nblk from the next line
249 if (fscanf(fd, "%d", &g_nblk_from_file) != 1)
250 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_READ, "Expected number of blocks after \"PICGRID\" in %s", simCtx->grid_file);
251 } else {
252 // No header – the token we just read is actually nblk
253 g_nblk_from_file = (PetscInt)strtol(firstTok, NULL, 10);
254 }
255 if (g_nblk_from_file != nblk) {
256 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Mismatch: -nblk is %d but grid file specifies %d blocks.", nblk, g_nblk_from_file);
257 }
258
259 ierr = PetscMalloc3(nblk, &g_IMs_from_file, nblk, &g_JMs_from_file, nblk, &g_KMs_from_file); CHKERRQ(ierr);
260 for (PetscInt i = 0; i < nblk; ++i) {
261 if (fscanf(fd, "%d %d %d\n", &g_IMs_from_file[i], &g_JMs_from_file[i], &g_KMs_from_file[i]) != 3) {
262 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_READ, "Expected 3 integers for block %d in %s", i, simCtx->grid_file);
263 }
264 }
265 fclose(fd);
266 }
267
268 // Broadcast nblk to verify (optional, good practice)
269 ierr = MPI_Bcast(&g_nblk_from_file, 1, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
270
271 // Allocate on other ranks before receiving the broadcast
272 if (rank != 0) {
273 ierr = PetscMalloc3(nblk, &g_IMs_from_file, nblk, &g_JMs_from_file, nblk, &g_KMs_from_file); CHKERRQ(ierr);
274 }
275
276 // Broadcast the data arrays
277 ierr = MPI_Bcast(g_IMs_from_file, nblk, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
278 ierr = MPI_Bcast(g_JMs_from_file, nblk, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
279 ierr = MPI_Bcast(g_KMs_from_file, nblk, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
280
281 g_file_has_been_read = PETSC_TRUE;
282 LOG_ALLOW(GLOBAL, LOG_INFO, "Grid file header read and broadcast complete.\n");
283 }
284
285 // --- Per-Block Assignment Logic (runs on every call) ---
286 user->IM = g_IMs_from_file[block_index];
287 user->JM = g_JMs_from_file[block_index];
288 user->KM = g_KMs_from_file[block_index];
289
290 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Set file inputs for Block %d: IM=%d, JM=%d, KM=%d\n",
291 simCtx->rank, block_index, user->IM, user->JM, user->KM);
292
294 PetscFunctionReturn(0);
295}
296
297
298//================================================================================
299//
300// PRIVATE HELPER FUNCTIONS
301//
302//================================================================================
303
304/**
305 * @brief Frees the memory allocated for a linked list of BC_Param structs.
306 * @param head A pointer to the head of the linked list to be freed.
307 */
309 BC_Param *current = head;
310 while (current != NULL) {
311 BC_Param *next = current->next;
312 PetscFree(current->key);
313 PetscFree(current->value);
314 PetscFree(current);
315 current = next;
316 }
317}
318
319/**
320 * @brief Converts a string representation of a face to a BCFace enum.
321 * @param str The input string (e.g., "-Xi", "+Zeta"). Case-insensitive.
322 * @param[out] face_out The resulting BCFace enum.
323 * @return 0 on success.
324 */
325PetscErrorCode StringToBCFace(const char* str, BCFace* face_out) {
326 if (strcasecmp(str, "-Xi") == 0) *face_out = BC_FACE_NEG_X;
327 else if (strcasecmp(str, "+Xi") == 0) *face_out = BC_FACE_POS_X;
328 else if (strcasecmp(str, "-Eta") == 0) *face_out = BC_FACE_NEG_Y;
329 else if (strcasecmp(str, "+Eta") == 0) *face_out = BC_FACE_POS_Y;
330 else if (strcasecmp(str, "-Zeta") == 0) *face_out = BC_FACE_NEG_Z;
331 else if (strcasecmp(str, "+Zeta") == 0) *face_out = BC_FACE_POS_Z;
332 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown face specifier: %s", str);
333 return 0;
334}
335
336/**
337 * @brief Converts a string representation of a BC type to a BCType enum.
338 * @param str The input string (e.g., "WALL", "INLET"). Case-insensitive.
339 * @param[out] type_out The resulting BCType enum.
340 * @return 0 on success.
341 */
342PetscErrorCode StringToBCType(const char* str, BCType* type_out) {
343 if (strcasecmp(str, "WALL") == 0) *type_out = WALL;
344 else if (strcasecmp(str, "SYMMETRY") == 0) *type_out = SYMMETRY;
345 else if (strcasecmp(str, "INLET") == 0) *type_out = INLET;
346 else if (strcasecmp(str, "OUTLET") == 0) *type_out = OUTLET;
347 else if (strcasecmp(str, "PERIODIC") == 0) *type_out = PERIODIC;
348 // ... add other BCTypes here ...
349 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown BC Type string: %s", str);
350 return 0;
351}
352
353/**
354 * @brief Converts a string representation of a handler to a BCHandlerType enum.
355 * @param str The input string (e.g., "noslip", "constant_velocity"). Case-insensitive.
356 * @param[out] handler_out The resulting BCHandlerType enum.
357 * @return 0 on success.
358 */
359PetscErrorCode StringToBCHandlerType(const char* str, BCHandlerType* handler_out) {
360 if (strcasecmp(str, "noslip") == 0) *handler_out = BC_HANDLER_WALL_NOSLIP;
361 else if (strcasecmp(str, "constant_velocity") == 0) *handler_out = BC_HANDLER_INLET_CONSTANT_VELOCITY;
362 else if (strcasecmp(str, "conservation") == 0) *handler_out = BC_HANDLER_OUTLET_CONSERVATION;
363 else if (strcasecmp(str, "parabolic") == 0) *handler_out = BC_HANDLER_INLET_PARABOLIC;
364 else if (strcasecmp(str,"geometric") == 0) *handler_out = BC_HANDLER_PERIODIC_GEOMETRIC;
365 else if (strcasecmp(str,"constant_flux") == 0) *handler_out = BC_HANDLER_PERIODIC_DRIVEN_CONSTANT_FLUX;
366 else if (strcasecmp(str,"initial_flux") == 0) *handler_out = BC_HANDLER_PERIODIC_DRIVEN_INITIAL_FLUX;
367 // ... add other BCHandlerTypes here ...
368 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown BC Handler string: %s", str);
369 return 0;
370}
371
372/**
373 * @brief Validates that a specific handler is compatible with a general BC type.
374 * @param type The general BCType.
375 * @param handler The specific BCHandlerType.
376 * @return 0 if compatible, error code otherwise.
377 */
378PetscErrorCode ValidateBCHandlerForBCType(BCType type, BCHandlerType handler) {
379 switch (type) {
380 case OUTLET:
381 if(handler != BC_HANDLER_OUTLET_CONSERVATION) return PETSC_ERR_ARG_WRONG;
382 break;
383 case WALL:
384 if (handler != BC_HANDLER_WALL_NOSLIP && handler != BC_HANDLER_WALL_MOVING) return PETSC_ERR_ARG_WRONG;
385 break;
386 case INLET:
387 if (handler != BC_HANDLER_INLET_CONSTANT_VELOCITY && handler != BC_HANDLER_INLET_PARABOLIC) return PETSC_ERR_ARG_WRONG;
388 break;
389 case PERIODIC:
390 if(handler != BC_HANDLER_PERIODIC_GEOMETRIC && handler != BC_HANDLER_PERIODIC_DRIVEN_CONSTANT_FLUX && handler != BC_HANDLER_PERIODIC_DRIVEN_INITIAL_FLUX) return PETSC_ERR_ARG_WRONG;
391 // ... add other validation cases here ...
392 default: break;
393 }
394 return 0; // Combination is valid
395}
396
397/**
398 * @brief Searches a BC_Param linked list for a key and returns its value as a double.
399 * @param params The head of the BC_Param linked list.
400 * @param key The key to search for (case-insensitive).
401 * @param[out] value_out The found value, converted to a PetscReal.
402 * @param[out] found Set to PETSC_TRUE if the key was found, PETSC_FALSE otherwise.
403 * @return 0 on success.
404 */
405PetscErrorCode GetBCParamReal(BC_Param *params, const char *key, PetscReal *value_out, PetscBool *found) {
406 *found = PETSC_FALSE;
407 *value_out = 0.0;
408 if (!key) return 0; // No key to search for
409
410 BC_Param *current = params;
411 while (current) {
412 if (strcasecmp(current->key, key) == 0) {
413 *value_out = atof(current->value);
414 *found = PETSC_TRUE;
415 return 0; // Found it, we're done
416 }
417 current = current->next;
418 }
419 return 0; // It's not an error to not find the key.
420}
421
422/**
423 * @brief Searches a BC_Param linked list for a key and returns its value as a bool.
424 * @param params The head of the BC_Param linked list.
425 * @param key The key to search for (case-insensitive).
426 * @param[out] value_out The found value, converted to a PetscBool.
427 * @param[out] found Set to PETSC_TRUE if the key was found, PETSC_FALSE otherwise.
428 * @return 0 on success.
429 */
430PetscErrorCode GetBCParamBool(BC_Param *params, const char *key, PetscBool *value_out, PetscBool *found) {
431 *found = PETSC_FALSE;
432 *value_out = PETSC_FALSE;
433 if (!key) return 0; // No key to search for
434
435 BC_Param *current = params;
436 while (current) {
437 if (strcasecmp(current->key, key) == 0) {
438 // Key was found.
439 *found = PETSC_TRUE;
440
441 // Check the value string. Default to FALSE if the value is NULL or doesn't match a "true" string.
442 if (current->value &&
443 (strcasecmp(current->value, "true") == 0 ||
444 strcmp(current->value, "1") == 0 ||
445 strcasecmp(current->value, "yes") == 0))
446 {
447 *value_out = PETSC_TRUE;
448 } else {
449 *value_out = PETSC_FALSE;
450 }
451 return 0; // Found it, we're done
452 }
453 current = current->next;
454 }
455 return 0; // It's not an error to not find the key.
456}
457
458//================================================================================
459//
460// PUBLIC PARSING FUNCTION
461//
462//================================================================================
463#undef __FUNCT__
464#define __FUNCT__ "ParseAllBoundaryConditions"
465/**
466 * @brief Parses the boundary conditions file to configure the type, handler, and
467 * any associated parameters for all 6 global faces of the domain.
468 *
469 * This function performs the following steps:
470 * 1. On MPI rank 0, it reads the specified configuration file line-by-line.
471 * 2. It parses each line for `<Face> <Type> <Handler> [param=value]...` format.
472 * 3. It validates the parsed strings and stores the configuration, including a
473 * linked list of parameters, in a temporary array.
474 * 4. It then serializes this configuration and broadcasts it to all other MPI ranks.
475 * 5. All ranks (including rank 0) then deserialize the broadcasted data to populate
476 * their local `user->boundary_faces` array identically.
477 * 6. It also sets legacy fields in UserCtx for compatibility with other modules.
478 *
479 * @param[in,out] user The main UserCtx struct where the final configuration
480 * for all ranks will be stored.
481 * @param[in] bcs_input_filename The path to the boundary conditions configuration file.
482 * @return PetscErrorCode 0 on success, error code on failure.
483 */
484PetscErrorCode ParseAllBoundaryConditions(UserCtx *user, const char *bcs_input_filename)
485{
486 PetscErrorCode ierr;
487 PetscMPIInt rank;
488
489 // Temporary storage for rank 0 to build the configuration before broadcasting.
490 BoundaryFaceConfig configs_rank0[6];
491
492 PetscFunctionBeginUser;
494 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
495
496 if (rank == 0) {
497 FILE *file;
498 char line_buffer[1024];
499
500 // Initialize the temporary config array with safe defaults on rank 0.
501 for (int i = 0; i < 6; i++) {
502 configs_rank0[i].face_id = (BCFace)i;
503 configs_rank0[i].mathematical_type = WALL;
504 configs_rank0[i].handler_type = BC_HANDLER_WALL_NOSLIP;
505 configs_rank0[i].params = NULL;
506 configs_rank0[i].handler = NULL; // Handler object is not created here.
507 }
508
509 LOG_ALLOW(GLOBAL, LOG_INFO, "Parsing BC configuration from '%s' on rank 0... \n", bcs_input_filename);
510 file = fopen(bcs_input_filename, "r");
511 if (!file) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_OPEN, "Could not open BCs file '%s'.", bcs_input_filename);
512
513 while (fgets(line_buffer, sizeof(line_buffer), file)) {
514 char *current_pos = line_buffer;
515 while (isspace((unsigned char)*current_pos)) current_pos++; // Skip leading whitespace
516 if (*current_pos == '#' || *current_pos == '\0' || *current_pos == '\n' || *current_pos == '\r') continue;
517
518 char *face_str = strtok(current_pos, " \t\n\r");
519 char *type_str = strtok(NULL, " \t\n\r");
520 char *handler_str = strtok(NULL, " \t\n\r");
521
522 if (!face_str || !type_str || !handler_str) {
523 LOG_ALLOW(GLOBAL, LOG_WARNING, "Malformed line in bcs.dat, skipping: %s \n", line_buffer);
524 continue;
525 }
526
527 BCFace face_enum;
528 BCType type_enum;
529 BCHandlerType handler_enum;
530 const char* handler_name_for_log;
531
532 // --- Convert strings to enums and validate ---
533 ierr = StringToBCFace(face_str, &face_enum); CHKERRQ(ierr);
534 ierr = StringToBCType(type_str, &type_enum); CHKERRQ(ierr);
535 ierr = StringToBCHandlerType(handler_str, &handler_enum); CHKERRQ(ierr);
536 ierr = ValidateBCHandlerForBCType(type_enum, handler_enum);
537 if (ierr) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Validation failed: Handler '%s' is not valid for Type '%s' on Face '%s'.\n", handler_str, type_str, face_str);
538
539 // Store the core types for the corresponding face
540 configs_rank0[face_enum].mathematical_type = type_enum;
541 configs_rank0[face_enum].handler_type = handler_enum;
542 handler_name_for_log = BCHandlerTypeToString(handler_enum); // Assumes this utility exists
543 LOG_ALLOW(GLOBAL, LOG_DEBUG, " Parsed Face '%s': Type=%s, Handler=%s \n", face_str, type_str, handler_name_for_log);
544
545 // --- Parse optional key=value parameters for this face ---
546 FreeBC_ParamList(configs_rank0[face_enum].params); // Clear any previous (default) params
547 configs_rank0[face_enum].params = NULL;
548 BC_Param **param_next_ptr = &configs_rank0[face_enum].params; // Pointer to the 'next' pointer to build the list
549
550 char* token;
551 while ((token = strtok(NULL, " \t\n\r")) != NULL) {
552 char* equals_ptr = strchr(token, '=');
553 if (!equals_ptr) {
554 LOG_ALLOW(GLOBAL, LOG_WARNING, "Malformed parameter '%s' on face '%s', skipping. \n", token, face_str);
555 continue;
556 }
557
558 *equals_ptr = '\0'; // Temporarily split the string at '=' to separate key and value
559 char* key_str = token;
560 char* value_str = equals_ptr + 1;
561
562 BC_Param *new_param;
563 ierr = PetscMalloc1(1, &new_param); CHKERRQ(ierr);
564 ierr = PetscStrallocpy(key_str, &new_param->key); CHKERRQ(ierr);
565 ierr = PetscStrallocpy(value_str, &new_param->value); CHKERRQ(ierr);
566 new_param->next = NULL;
567
568 *param_next_ptr = new_param;
569 param_next_ptr = &new_param->next;
570 LOG_ALLOW(GLOBAL, LOG_TRACE, " - Found param: [%s] = [%s] \n", new_param->key, new_param->value);
571 }
572 }
573 fclose(file);
574 }
575
576 // =========================================================================
577 // BROADCASTING THE CONFIGURATION FROM RANK 0
578 // =========================================================================
579 // This is a critical step to ensure all processes have the same configuration.
580
581 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "Rank %d broadcasting/receiving BC configuration.\n", rank);
582
583 for (int i = 0; i < 6; i++) {
584 // --- Broadcast simple enums ---
585 if (rank == 0) {
586 user->boundary_faces[i] = configs_rank0[i]; // Rank 0 populates its final struct
587 }
588 ierr = MPI_Bcast(&user->boundary_faces[i].mathematical_type, 1, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
589 ierr = MPI_Bcast(&user->boundary_faces[i].handler_type, 1, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
590
591 // --- Serialize and Broadcast the parameter linked list ---
592 PetscInt n_params = 0;
593 if (rank == 0) { // On rank 0, count the number of parameters to send
594 for (BC_Param *p = user->boundary_faces[i].params; p; p = p->next) n_params++;
595 }
596 ierr = MPI_Bcast(&n_params, 1, MPI_INT, 0, PETSC_COMM_WORLD);CHKERRQ(ierr);
597
598 if (rank != 0) { // Non-root ranks need to receive and build the list
599 FreeBC_ParamList(user->boundary_faces[i].params); // Ensure list is empty before building
600 user->boundary_faces[i].params = NULL;
601 }
602
603 BC_Param **param_next_ptr = &user->boundary_faces[i].params;
604
605 for (int j = 0; j < n_params; j++) {
606 char key_buf[256] = {0}, val_buf[256] = {0};
607 if (rank == 0) {
608 // On rank 0, navigate to the j-th param and copy its data to buffers
609 BC_Param *p = user->boundary_faces[i].params;
610 for (int k = 0; k < j; k++) p = p->next;
611 strncpy(key_buf, p->key, 255);
612 strncpy(val_buf, p->value, 255);
613 }
614
615 ierr = MPI_Bcast(key_buf, 256, MPI_CHAR, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
616 ierr = MPI_Bcast(val_buf, 256, MPI_CHAR, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
617
618 if (rank != 0) {
619 // On non-root ranks, deserialize: create a new node and append it
620 BC_Param *new_param;
621 ierr = PetscMalloc1(1, &new_param); CHKERRQ(ierr);
622 ierr = PetscStrallocpy(key_buf, &new_param->key); CHKERRQ(ierr);
623 ierr = PetscStrallocpy(val_buf, &new_param->value); CHKERRQ(ierr);
624 new_param->next = NULL;
625 *param_next_ptr = new_param;
626 param_next_ptr = &new_param->next;
627 } else {
628 // On rank 0, just advance the pointer for the next iteration
629 param_next_ptr = &((*param_next_ptr)->next);
630 }
631 }
632 user->boundary_faces[i].face_id = (BCFace)i; // Ensure face_id is set on all ranks
633 }
634
635 // --- Set legacy fields for compatibility with particle system ---
636 user->inletFaceDefined = PETSC_FALSE;
637 for (int i=0; i<6; i++) {
638
639 if (user->boundary_faces[i].mathematical_type == INLET && !user->inletFaceDefined) {
640 user->inletFaceDefined = PETSC_TRUE;
641 user->identifiedInletBCFace = (BCFace)i;
642 LOG_ALLOW(GLOBAL, LOG_INFO, "Inlet face for particle initialization identified as Face %d.\n", i);
643 break; // Found the first one, stop looking
644 }
645 }
646
647
648 if (rank == 0) {
649 // Rank 0 can now free the linked lists it created for the temporary storage.
650 // As written, user->boundary_faces was populated directly on rank 0, so no extra free is needed.
651 // for(int i=0; i<6; i++) FreeBC_ParamList(configs_rank0[i].params); // This would be needed if we used configs_rank0 exclusively
652 }
653
655 PetscFunctionReturn(0);
656}
657
658//================================================================================
659//
660// PRIVATE HELPER FUNCTIONS
661//
662//================================================================================
663
664// ... (existing helper functions like FreeBC_ParamList, StringToBCFace, etc.) ...
665#undef __FUNCT__
666#define __FUNCT__ "DeterminePeriodicity"
667/**
668 * @brief Scans all block-specific boundary condition files to determine a globally
669 * consistent periodicity for each dimension.
670 *
671 * This is a lightweight pre-parser intended to be called before DMDA creation.
672 * It ensures that the periodicity setting is consistent across all blocks, which is a
673 * physical requirement for the domain.
674 *
675 * 1. It collectively verifies that the mandatory BCS file for each block exists.
676 * 2. On MPI rank 0, it then iterates through the files.
677 * 3. It parses each line to extract the first (face) and second (type) tokens,
678 * mimicking the main BC parser's tokenization logic.
679 * 4. It performs a direct, case-insensitive string comparison on the "type" token
680 * to check if it is "PERIODIC". Other types are silently ignored.
681 * 5. It validates consistency (e.g., -Xi and +Xi match) and ensures all block files
682 * specify the same global periodicity.
683 * 6. It broadcasts the final three flags (as integers 0 or 1) to all MPI ranks.
684 * 7. All ranks update the i_periodic, j_periodic, and k_periodic fields in their SimCtx.
685 *
686 * @param[in,out] simCtx The master SimCtx struct, containing the bcs_files list and
687 * where the final periodicity flags will be stored.
688 * @return PetscErrorCode 0 on success, error code on failure.
689 */
690PetscErrorCode DeterminePeriodicity(SimCtx *simCtx)
691{
692 PetscErrorCode ierr;
693 PetscMPIInt rank;
694 PetscInt periodic_flags[3] = {0, 0, 0}; // Index 0:I, 1:J, 2:K
695
696 PetscFunctionBeginUser;
697 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
698
699 // --- Part 1: Collectively verify all BCS files exist before proceeding ---
700 for (PetscInt bi = 0; bi < simCtx->block_number; bi++) {
701 const char *bcs_filename = simCtx->bcs_files[bi];
702 if (!bcs_filename) SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_NULL, "BCS filename for block %d is not set in SimCtx.", bi);
703 char desc_buf[256];
704 PetscBool file_exists;
705 snprintf(desc_buf, sizeof(desc_buf), "BCS file for block %d", bi);
706 ierr = VerifyPathExistence(bcs_filename, PETSC_FALSE, PETSC_FALSE, desc_buf, &file_exists); CHKERRQ(ierr);
707 }
708
709 // --- Part 2: Rank 0 does the parsing, since we know all files exist ---
710 if (rank == 0) {
711 PetscBool global_is_periodic[3] = {PETSC_FALSE, PETSC_FALSE, PETSC_FALSE};
712 PetscBool is_set = PETSC_FALSE;
713
714 for (PetscInt bi = 0; bi < simCtx->block_number; bi++) {
715 const char *bcs_filename = simCtx->bcs_files[bi];
716 FILE *file = fopen(bcs_filename, "r");
717
718 PetscBool face_is_periodic[6] = {PETSC_FALSE};
719 char line_buffer[1024];
720
721 while (fgets(line_buffer, sizeof(line_buffer), file)) {
722 char *current_pos = line_buffer;
723 while (isspace((unsigned char)*current_pos)) current_pos++;
724 if (*current_pos == '#' || *current_pos == '\0' || *current_pos == '\n') continue;
725
726 // --- Tokenize the line exactly like the main parser ---
727 char *face_str = strtok(current_pos, " \t\n\r");
728 char *type_str = strtok(NULL, " \t\n\r");
729
730 // If the line doesn't have at least two tokens, we can't determine the type.
731 if (!face_str || !type_str) continue;
732
733 // --- Perform a direct, non-erroring check on the mathematical type string ---
734 if (strcasecmp(type_str, "PERIODIC") == 0) {
735 BCFace face_enum;
736 // A malformed face string on a periodic line IS a fatal error.
737 ierr = StringToBCFace(face_str, &face_enum); CHKERRQ(ierr);
738 face_is_periodic[face_enum] = PETSC_TRUE;
739 }
740 // Any other type_str (e.g., "WALL", "INLET") is correctly and silently ignored.
741 }
742 fclose(file);
743
744 // --- Validate consistency within this file ---
745 if (face_is_periodic[BC_FACE_NEG_X] != face_is_periodic[BC_FACE_POS_X])
746 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Inconsistent X-periodicity in file '%s' for block %d. Both -Xi and +Xi must be periodic or neither.", bcs_filename, bi);
747 if (face_is_periodic[BC_FACE_NEG_Y] != face_is_periodic[BC_FACE_POS_Y])
748 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Inconsistent Y-periodicity in file '%s' for block %d. Both -Eta and +Eta must be periodic or neither.", bcs_filename, bi);
749 if (face_is_periodic[BC_FACE_NEG_Z] != face_is_periodic[BC_FACE_POS_Z])
750 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Inconsistent Z-periodicity in file '%s' for block %d. Both -Zeta and +Zeta must be periodic or neither.", bcs_filename, bi);
751
752 PetscBool local_is_periodic[3] = {face_is_periodic[BC_FACE_NEG_X], face_is_periodic[BC_FACE_NEG_Y], face_is_periodic[BC_FACE_NEG_Z]};
753
754 // --- Validate consistency across block files ---
755 if (!is_set) {
756 global_is_periodic[0] = local_is_periodic[0];
757 global_is_periodic[1] = local_is_periodic[1];
758 global_is_periodic[2] = local_is_periodic[2];
759 is_set = PETSC_TRUE;
760 } else {
761 if (global_is_periodic[0] != local_is_periodic[0] || global_is_periodic[1] != local_is_periodic[1] || global_is_periodic[2] != local_is_periodic[2]) {
762 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP,
763 "Periodicity mismatch between blocks. Block 0 requires (I:%d, J:%d, K:%d), but block %d (file '%s') has (I:%d, J:%d, K:%d).",
764 (int)global_is_periodic[0], (int)global_is_periodic[1], (int)global_is_periodic[2],
765 bi, bcs_filename,
766 (int)local_is_periodic[0], (int)local_is_periodic[1], (int)local_is_periodic[2]);
767 }
768 }
769 } // end loop over blocks
770
771 periodic_flags[0] = (global_is_periodic[0]) ? 1 : 0;
772 periodic_flags[1] = (global_is_periodic[1]) ? 1 : 0;
773 periodic_flags[2] = (global_is_periodic[2]) ? 1 : 0;
774
775 LOG_ALLOW(GLOBAL, LOG_INFO, "Global periodicity determined: I-periodic=%d, J-periodic=%d, K-periodic=%d\n",
776 periodic_flags[0], periodic_flags[1], periodic_flags[2]);
777 }
778
779 // --- Part 3: Broadcast the final flags from rank 0 to all other ranks ---
780 ierr = MPI_Bcast(periodic_flags, 3, MPIU_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
781
782 // --- All ranks now update their SimCtx ---
783 simCtx->i_periodic = periodic_flags[0];
784 simCtx->j_periodic = periodic_flags[1];
785 simCtx->k_periodic = periodic_flags[2];
786
787 PetscFunctionReturn(0);
788}
789
790/**
791 * @brief A parallel-safe helper to verify the existence of a generic file or directory path.
792 *
793 * This function centralizes the logic for checking arbitrary paths. Only Rank 0 performs the
794 * filesystem check, and the result is broadcast to all other processes. This ensures
795 * collective and synchronized decision-making across all ranks. It is intended for
796 * configuration files, source directories, etc., where the path is known completely.
797 *
798 * @param[in] path The full path to the file or directory to check.
799 * @param[in] is_dir PETSC_TRUE if checking for a directory, PETSC_FALSE for a file.
800 * @param[in] is_optional PETSC_TRUE if the path is optional (results in a warning),
801 * PETSC_FALSE if mandatory (results in an error).
802 * @param[in] description A user-friendly description of the path for logging (e.g., "Grid file").
803 * @param[out] exists The result of the check (identical on all ranks).
804 *
805 * @return PetscErrorCode
806 */
807PetscErrorCode VerifyPathExistence(const char *path, PetscBool is_dir, PetscBool is_optional, const char *description, PetscBool *exists)
808{
809 PetscErrorCode ierr;
810 PetscMPIInt rank;
811 MPI_Comm comm = PETSC_COMM_WORLD;
812
813 PetscFunctionBeginUser;
814 ierr = MPI_Comm_rank(comm, &rank); CHKERRQ(ierr);
815
816 if (rank == 0) {
817 if (is_dir) {
818 ierr = PetscTestDirectory(path, 'r', exists); CHKERRQ(ierr);
819 } else {
820 ierr = PetscTestFile(path, 'r', exists); CHKERRQ(ierr);
821 }
822
823 if (!(*exists)) {
824 if (is_optional) {
825 LOG_ALLOW(GLOBAL, LOG_WARNING, "Optional %s not found at: %s (using defaults/ignoring).\n", description, path);
826 } else {
827 LOG_ALLOW(GLOBAL, LOG_ERROR, "Mandatory %s not found at: %s\n", description, path);
828 }
829 } else {
830 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Found %s: %s\n", description, path);
831 }
832 }
833
834 // Broadcast the result from Rank 0
835 PetscMPIInt exists_int = (rank == 0) ? (PetscMPIInt)(*exists) : 0;
836 ierr = MPI_Bcast(&exists_int, 1, MPI_INT, 0, comm); CHKERRMPI(ierr);
837 *exists = (PetscBool)exists_int;
838
839 // Collective error for mandatory files
840 if (!(*exists) && !is_optional) {
841 SETERRQ(comm, PETSC_ERR_FILE_OPEN, "Mandatory %s not found. Rank 0 expected it at '%s'. Check path and permissions.", description, path);
842 }
843
844 PetscFunctionReturn(0);
845}
846
847/**
848 * @brief Checks for a data file's existence in a parallel-safe manner.
849 *
850 * This function's signature and behavior have been updated to use the
851 * "Directory Context Pointer" pattern. The high-level calling function (e.g.,
852 * ReadSimulationFields) must set the `simCtx->current_io_directory` context
853 * before this function is called.
854 *
855 * @param[in] user Pointer to the UserCtx structure to access SimCtx.
856 * @param ti The time index of the file.
857 * @param fieldName The name of the field.
858 * @param ext The file extension.
859 * @param fileExists [out] The result, which will be identical on all ranks.
860 * @return PetscErrorCode
861 */
862static PetscErrorCode CheckDataFile(UserCtx *user, PetscInt ti, const char *fieldName, const char *ext, PetscBool *fileExists)
863{
864 PetscErrorCode ierr;
865 PetscMPIInt rank;
866 MPI_Comm comm = PETSC_COMM_WORLD;
867 PetscInt placeholder_int = 0;
868 SimCtx *simCtx = user->simCtx;
869
870 PetscFunctionBeginUser;
871
872 if(!simCtx->current_io_directory) {
873 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_NULL, "I/O context directory is NULL. Ensure it is set before calling CheckDataFile().");
874 }
875
876 ierr = MPI_Comm_rank(comm, &rank); CHKERRQ(ierr);
877
878 if (rank == 0) {
879 char filename[PETSC_MAX_PATH_LEN];
880 // Use the same standardized, rank-independent filename format
881
882 ierr = PetscSNPrintf(filename, sizeof(filename), "%s/%s%05"PetscInt_FMT"_%d.%s",simCtx->current_io_directory,fieldName, ti, placeholder_int, ext);
883 ierr = PetscTestFile(filename, 'r', fileExists); CHKERRQ(ierr);
884 if (!(*fileExists)) {
885 LOG_ALLOW(GLOBAL, LOG_WARNING, "(Rank 0) - Optional data file '%s' not found.\n", filename);
886 }
887 }
888
889 // Broadcast the result from Rank 0 to all other ranks.
890 // We cast the PetscBool to a PetscMPIInt for MPI_Bcast.
891 PetscMPIInt fileExists_int = (rank == 0) ? (PetscMPIInt)(*fileExists) : 0;
892 ierr = MPI_Bcast(&fileExists_int, 1, MPI_INT, 0, comm); CHKERRMPI(ierr);
893 *fileExists = (PetscBool)fileExists_int;
894
895 PetscFunctionReturn(0);
896}
897
898/**
899 * @brief Checks for and reads an optional Eulerian field from a file into a Vec.
900 *
901 * This helper function first checks if the corresponding data file exists in a
902 * parallel-safe manner using CheckDataFile().
903 * - If the file does not exist, it logs a warning and returns success (0),
904 * gracefully skipping the field. The target Vec is not modified.
905 * - If the file exists, it calls ReadFieldData() to read the data into the
906 * provided Vec. Any error during this read operation is considered fatal
907 * (e.g., file is corrupt, permissions issue, or size mismatch), as the
908 * presence of the file implies an intent to load it.
909 *
910 * @param[in] user Pointer to the UserCtx structure.
911 * @param[in] field_name Internal name of the field for the filename (e.g., "cs").
912 * @param[in] field_label A user-friendly name for logging (e.g., "Smagorinsky Constant").
913 * @param[in,out] field_vec The PETSc Vec to load the data into.
914 * @param[in] ti Time index for the file name.
915 * @param[in] ext File extension.
916 *
917 * @return PetscErrorCode Returns 0 on success or if the optional file was not found.
918 * Returns a non-zero error code on a fatal read error.
919 */
920static PetscErrorCode ReadOptionalField(UserCtx *user, const char *field_name, const char *field_label, Vec field_vec, PetscInt ti, const char *ext)
921{
922 PetscErrorCode ierr;
923 PetscBool fileExists;
924
925 PetscFunctionBeginUser;
926
927 /* Check if the data file for this optional field exists. */
928 ierr = CheckDataFile(user,ti, field_name, ext, &fileExists); CHKERRQ(ierr);
929
930 if (fileExists) {
931 /* File exists, so we MUST be able to read it. */
932 LOG_ALLOW(GLOBAL, LOG_DEBUG, "File for %s found, attempting to read...\n", field_label);
933 ierr = ReadFieldData(user, field_name, field_vec, ti, ext);
934
935 if (ierr) {
936 /* Any error here is fatal. A PETSC_ERR_FILE_OPEN would mean a race condition or
937 permissions issue. Other errors could be size mismatch or corruption. */
938 SETERRQ(PETSC_COMM_WORLD, ierr, "Failed to read data for %s from existing file for step %d. The file may be corrupt or have an incorrect size.", field_label, ti);
939 } else {
940 LOG_ALLOW(GLOBAL, LOG_INFO, "Successfully read %s field for step %d.\n", field_label, ti);
941 }
942 } else {
943 /* File does not exist, which is acceptable for an optional field. */
944 LOG_ALLOW(GLOBAL, LOG_WARNING, "Optional %s file for step %d not found. Skipping.\n", field_label, ti);
945 }
946
947 PetscFunctionReturn(0);
948}
949
950/**
951 * @brief Checks for and reads an optional DMSwarm field from a file.
952 *
953 * This helper function first checks if the corresponding data file exists.
954 * - If the file does not exist, it logs a warning and returns success (0),
955 * gracefully skipping the field.
956 * - If the file exists, it calls ReadSwarmField() to read the data. Any
957 * error during this read operation is considered fatal (e.g., file is
958 * corrupt, permissions issue, or size mismatch), as the presence of the
959 * file implies an intent to load it.
960 *
961 * @param[in] user Pointer to the UserCtx structure.
962 * @param[in] field_name Internal name of the DMSwarm field (e.g., "DMSwarm_CellID").
963 * @param[in] field_label A user-friendly name for logging (e.g., "Cell ID").
964 * @param[in] ti Time index for the file name.
965 * @param[in] ext File extension.
966 *
967 * @return PetscErrorCode Returns 0 on success or if the optional file was not found.
968 * Returns a non-zero error code on a fatal read error.
969 */
970static PetscErrorCode ReadOptionalSwarmField(UserCtx *user, const char *field_name, const char *field_label, PetscInt ti, const char *ext)
971{
972 PetscErrorCode ierr;
973 PetscBool fileExists;
974
975 PetscFunctionBeginUser;
976
977 /* Check if the data file for this optional field exists. */
978 ierr = CheckDataFile(user,ti, field_name, ext, &fileExists); CHKERRQ(ierr);
979
980 if (fileExists) {
981 /* File exists, so we MUST be able to read it. */
982 LOG_ALLOW(GLOBAL, LOG_DEBUG, "File for %s found, attempting to read...\n", field_label);
983 if(strcasecmp(field_name,"DMSwarm_CellID") == 0 || strcasecmp(field_name,"DMSwarm_pid")== 0 || strcasecmp(field_name,"DMSwarm_location_status")== 0 ) {
984 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Reading integer swarm field '%s'.\n", field_name);
985 ierr = ReadSwarmIntField(user,field_name,ti,ext);
986 }
987 else{
988 ierr = ReadSwarmField(user, field_name, ti, ext);
989 }
990
991 if (ierr) {
992 /* Any error here is fatal. A PETSC_ERR_FILE_OPEN would mean a race condition or
993 permissions issue. Other errors could be size mismatch or corruption. */
994 SETERRQ(PETSC_COMM_WORLD, ierr, "Failed to read data for %s from existing file for step %d. The file may be corrupt or have an incorrect size.", field_label, ti);
995 } else {
996 LOG_ALLOW(GLOBAL, LOG_INFO, "Successfully read %s field for step %d.\n", field_label, ti);
997 }
998 } else {
999 /* File does not exist, which is acceptable for an optional field. */
1000 LOG_ALLOW(GLOBAL, LOG_WARNING, "Optional %s file for step %d not found. Skipping.\n", field_label, ti);
1001 }
1002
1003 PetscFunctionReturn(0);
1004}
1005
1006#undef __FUNCT__
1007#define __FUNCT__ "ReadFieldData"
1008/************************************************************************************************
1009* @file io.c
1010* @brief Utility for (re-)loading PETSc Vec-based fields written by rank-0 in a previous run
1011* – works in both serial and MPI executions.
1012*
1013* The restart files are always written as **one** sequential Vec per field
1014* ( `<path-to-file>/<field_name><step>_0.<ext>` ). In parallel we therefore
1015* have to:
1016*
1017* • let rank-0 read that sequential file into a temporary Vec,
1018* • broadcast the raw array to every rank, and
1019* • copy the local slice into the distributed Vec supplied by the caller.
1020*
1021* The function purposely avoids `VecScatterCreateToAll()` because PETSc
1022* chooses an even block-cyclic layout that seldom matches an existing
1023* application layout – that mismatch is the root–cause of the previous
1024* “Scatter sizes of ix and iy don’t match” / “incompatible local lengths”
1025* crashes. A plain `MPI_Bcast` plus a `memcpy()` into the already
1026* allocated local array is bullet-proof and costs one extra buffer only
1027* on the non-root ranks.
1028*
1029* Logging helper used below:
1030* LOG_ALLOW(scope,level,fmt,…) – your existing macro
1031*
1032* @param[in] user – application context (unused here but part of original API)
1033* @param[in] field_name – base name of the field (“position”, “ufield”, …)
1034* @param[out] field_vec – **distributed** Vec we have to fill
1035* @param[in] ti – time-step index encoded in the filename
1036* @param[in] ext – file-name extension (“dat”, “bin”, …)
1037*
1038* @returns 0 on success or a PETSc error code.
1039************************************************************************************************/
1040PetscErrorCode ReadFieldData(UserCtx *user,
1041 const char *field_name,
1042 Vec field_vec,
1043 PetscInt ti,
1044 const char *ext)
1045{
1046 PetscErrorCode ierr;
1047 char filename[PETSC_MAX_PATH_LEN];
1048 MPI_Comm comm;
1049 PetscMPIInt rank,size;
1050 SimCtx *simCtx = user->simCtx;
1051
1052
1053 PetscFunctionBeginUser;
1055
1056 if(!simCtx->current_io_directory){
1057 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "I/O context directory was not set before calling ReadFieldData().");
1058 }
1059
1060
1061 ierr = PetscObjectGetComm((PetscObject)field_vec,&comm);CHKERRQ(ierr);
1062 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1063 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1064
1065 const char *source_path = NULL;
1066 source_path = simCtx->current_io_directory;
1067
1068 if(!source_path){
1069 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "source_path was not set for the current execution mode.");
1070 }
1071 /* ---------------------------------------------------------------------
1072 * Compose <path-to-file>/<field_name><step with 5 digits>_0.<ext>
1073 * (all restart files are written by rank-0 with that naming scheme).
1074 * ------------------------------------------------------------------ */
1075 ierr = PetscSNPrintf(filename,sizeof(filename),
1076 "%s/%s%05" PetscInt_FMT "_0.%s",
1077 source_path,field_name,ti,ext);CHKERRQ(ierr);
1078
1080 "Attempting to read <%s> on rank %d/%d\n",
1081 filename,(int)rank,(int)size);
1082
1083 /* ======================================================================
1084 * 1. SERIAL JOB – just hand the Vec to VecLoad()
1085 * ==================================================================== */
1086 if(size==1)
1087 {
1088 PetscViewer viewer;
1089 PetscBool found;
1090 Vec temp_vec;
1091 PetscInt expectedSize,loadedSize;
1092
1093 ierr = PetscTestFile(filename,'r',&found);CHKERRQ(ierr);
1094 if(!found) SETERRQ(comm,PETSC_ERR_FILE_OPEN,
1095 "Restart/Source file not found: %s",filename);
1096
1097 ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,filename,FILE_MODE_READ,&viewer);CHKERRQ(ierr);
1098// ---- START MODIFICATION ----
1099 // DO NOT load directly into field_vec, as this can resize it, which is
1100 // illegal for DMSwarm "view" vectors. Instead, load into a temporary vector.
1101 ierr = VecCreate(PETSC_COMM_SELF, &temp_vec); CHKERRQ(ierr);
1102 ierr = VecLoad(temp_vec,viewer);CHKERRQ(ierr);
1103 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
1104
1105 // Sanity check: ensure the file size matches the expected vector size.
1106 ierr = VecGetSize(field_vec, &expectedSize);CHKERRQ(ierr);
1107 ierr = VecGetSize(temp_vec, &loadedSize);CHKERRQ(ierr);
1108 if (loadedSize != expectedSize) {
1109 SETERRQ(comm,PETSC_ERR_FILE_UNEXPECTED,
1110 "File %s holds %d entries – expected %d for field '%s'",
1111 filename, loadedSize, expectedSize, field_name);
1112 }
1113
1114 // Now, safely copy the data from the temporary vector to the final destination.
1115 ierr = VecCopy(temp_vec, field_vec);CHKERRQ(ierr);
1116
1117 // Clean up the temporary vector.
1118 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
1119
1120 // ---- END MODIFICATION ----
1121
1122 /* create EMPTY sequential Vec – VecLoad() will size it correctly */
1123 /*
1124 ierr = VecCreate(PETSC_COMM_SELF,&seq_vec);CHKERRQ(ierr);
1125 ierr = VecSetType(seq_vec,VECSEQ);CHKERRQ(ierr);
1126
1127 ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,filename,
1128 FILE_MODE_READ,&viewer);CHKERRQ(ierr);
1129
1130 ierr = VecLoad(field_vec,viewer);CHKERRQ(ierr);
1131 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
1132 */
1134 "Loaded <%s> (serial path)\n",filename);
1135
1137 PetscFunctionReturn(0);
1138 }
1139
1140 /* ======================================================================
1141 * 2. PARALLEL JOB
1142 * ==================================================================== */
1143 PetscInt globalSize;
1144 ierr = VecGetSize(field_vec,&globalSize);CHKERRQ(ierr);
1145
1146 DM dm = NULL;
1147 const char *dmtype = NULL;
1148 Vec nat = NULL; /* Natural-ordered vector for DMDA */
1149
1150 /* -------------------- rank-0 : read the sequential file -------------- */
1151 Vec seq_vec = NULL; /* only valid on rank-0 */
1152 const PetscScalar *seqArray = NULL; /* borrowed pointer on rank-0 only */
1153
1154 if(rank==0)
1155 {
1156 PetscViewer viewer;
1157 PetscBool found;
1158
1159 ierr = PetscTestFile(filename,'r',&found);CHKERRQ(ierr);
1160 if(!found) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_OPEN,
1161 "Restart file not found: %s",filename);
1162
1163 /* create EMPTY sequential Vec – VecLoad() will size it correctly */
1164 ierr = VecCreate(PETSC_COMM_SELF,&seq_vec);CHKERRQ(ierr);
1165 ierr = VecSetType(seq_vec,VECSEQ);CHKERRQ(ierr);
1166
1167 ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,filename,
1168 FILE_MODE_READ,&viewer);CHKERRQ(ierr);
1169 ierr = VecLoad(seq_vec,viewer);CHKERRQ(ierr);
1170 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
1171
1172 /* size sanity-check */
1173 PetscInt loaded;
1174 ierr = VecGetSize(seq_vec,&loaded);CHKERRQ(ierr);
1175 if(loaded != globalSize)
1176 SETERRQ(comm,PETSC_ERR_FILE_UNEXPECTED,
1177 "File %s holds %d entries – expected %d",
1178 filename,loaded,globalSize);
1179
1180 /* borrow array for later Bcast */
1181 ierr = VecGetArrayRead(seq_vec,&seqArray);CHKERRQ(ierr);
1182
1184 "Rank 0 successfully loaded <%s>\n",filename);
1185 }
1186
1187 /* -------------------- Check if this is a DMDA vector ----------------- */
1188 ierr = VecGetDM(field_vec, &dm); CHKERRQ(ierr);
1189 if (dm) { ierr = DMGetType(dm, &dmtype); CHKERRQ(ierr); }
1190
1191 if (dmtype && !strcmp(dmtype, DMDA)) {
1192 /* ==================================================================
1193 * DMDA PATH: File is in natural ordering, need to convert to global
1194 * ================================================================== */
1195
1196 /* Create natural vector */
1197 ierr = DMDACreateNaturalVector(dm, &nat); CHKERRQ(ierr);
1198
1199 /* Scatter from rank 0's seq_vec to all ranks' natural vector */
1200 VecScatter scatter;
1201 Vec nat_seq = NULL; /* Sequential natural vector on rank 0 */
1202
1203 ierr = VecScatterCreateToZero(nat, &scatter, &nat_seq); CHKERRQ(ierr);
1204
1205 /* Reverse scatter: from rank 0 to all ranks */
1206 ierr = VecScatterBegin(scatter, (rank == 0 ? seq_vec : nat_seq), nat,
1207 INSERT_VALUES, SCATTER_REVERSE); CHKERRQ(ierr);
1208 ierr = VecScatterEnd(scatter, (rank == 0 ? seq_vec : nat_seq), nat,
1209 INSERT_VALUES, SCATTER_REVERSE); CHKERRQ(ierr);
1210
1211 /* Convert natural → global ordering */
1212 ierr = DMDANaturalToGlobalBegin(dm, nat, INSERT_VALUES, field_vec); CHKERRQ(ierr);
1213 ierr = DMDANaturalToGlobalEnd(dm, nat, INSERT_VALUES, field_vec); CHKERRQ(ierr);
1214
1215 /* Cleanup */
1216 ierr = VecScatterDestroy(&scatter); CHKERRQ(ierr);
1217 ierr = VecDestroy(&nat_seq); CHKERRQ(ierr);
1218 ierr = VecDestroy(&nat); CHKERRQ(ierr);
1219
1220 } else {
1221 /* ==================================================================
1222 * NON-DMDA PATH: Use broadcast and direct copy (assumes global ordering)
1223 * ================================================================== */
1224
1225 PetscScalar *buffer = NULL;
1226 if (rank == 0) {
1227 buffer = (PetscScalar *)seqArray;
1228 } else {
1229 ierr = PetscMalloc1(globalSize, &buffer); CHKERRQ(ierr);
1230 }
1231
1232 ierr = MPI_Bcast(buffer, (int)globalSize, MPIU_SCALAR, 0, comm); CHKERRQ(ierr);
1233
1234 /* Copy slice based on ownership range */
1235 PetscInt rstart, rend, loc;
1236 PetscScalar *locArray;
1237
1238 ierr = VecGetOwnershipRange(field_vec, &rstart, &rend); CHKERRQ(ierr);
1239 loc = rend - rstart;
1240
1241 ierr = VecGetArray(field_vec, &locArray); CHKERRQ(ierr);
1242 ierr = PetscMemcpy(locArray, buffer + rstart, loc * sizeof(PetscScalar)); CHKERRQ(ierr);
1243 ierr = VecRestoreArray(field_vec, &locArray); CHKERRQ(ierr);
1244
1245 if (rank != 0) {
1246 ierr = PetscFree(buffer); CHKERRQ(ierr);
1247 }
1248 }
1249
1250 /* -------------------- tidy up ---------------------------------------- */
1251 if (rank == 0) {
1252 ierr = VecRestoreArrayRead(seq_vec, &seqArray); CHKERRQ(ierr);
1253 ierr = VecDestroy(&seq_vec); CHKERRQ(ierr);
1254 }
1255
1257 "Loaded <%s> (parallel path)\n",filename);
1258
1260 PetscFunctionReturn(0);
1261}
1262
1263
1264/**
1265 * @brief Reads simulation fields from files into their respective PETSc vectors.
1266 *
1267 * This function reads contravariant velocity, Cartesian velocity, pressure, and node state
1268 * fields from their respective binary files. It also conditionally reads LES, RANS, and
1269 * statistical fields if they are enabled.
1270 *
1271 * @param[in,out] user Pointer to the UserCtx structure containing simulation context.
1272 * @param[in] ti Time index for constructing the file name.
1273 *
1274 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1275 */
1276PetscErrorCode ReadSimulationFields(UserCtx *user,PetscInt ti)
1277{
1278 PetscErrorCode ierr;
1279
1280 SimCtx *simCtx = user->simCtx;
1281 const char *source_path = NULL;
1282 const char *eulerian_ext = "dat";
1283
1284 if(simCtx->exec_mode == EXEC_MODE_POSTPROCESSOR){
1285 source_path = simCtx->pps->source_dir;
1286 if (simCtx->pps->eulerianExt[0] != '\0') {
1287 eulerian_ext = simCtx->pps->eulerianExt;
1288 }
1289 } else if(simCtx->exec_mode == EXEC_MODE_SOLVER){
1290 source_path = simCtx->restart_dir;
1291 } else{
1292 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Invalid execution mode for reading simulation fields.");
1293 }
1294
1295 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to read simulation fields.\n");
1296
1297 // Set the current I/O directory context
1298 ierr = PetscSNPrintf(simCtx->_io_context_buffer, sizeof(simCtx->_io_context_buffer), "%s/%s",source_path,simCtx->euler_subdir); CHKERRQ(ierr);
1299 simCtx->current_io_directory = simCtx->_io_context_buffer;
1300
1301 // Read Cartesian velocity field
1302 ierr = ReadFieldData(user, "ufield", user->Ucat, ti, eulerian_ext); CHKERRQ(ierr);
1303
1304 // Read contravariant velocity field
1305 ierr = ReadFieldData(user, "vfield", user->Ucont, ti, eulerian_ext); CHKERRQ(ierr);
1306
1307 // Read pressure field
1308 ierr = ReadFieldData(user, "pfield", user->P, ti, eulerian_ext); CHKERRQ(ierr);
1309
1310 // Read node state field (nvert)
1311 ierr = ReadFieldData(user, "nvfield", user->Nvert, ti, eulerian_ext); CHKERRQ(ierr);
1312
1313 LOG_ALLOW(GLOBAL,LOG_INFO,"Successfully read all mandatory fields. \n");
1314
1315 if(simCtx->np>0){
1316 // Read Particle Count field
1317 if(!user->ParticleCount){
1318 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "ParticleCount Vec is NULL but np>0");
1319 }
1320 ierr = ReadOptionalField(user, "ParticleCount", "Particle Count", user->ParticleCount, ti, eulerian_ext); CHKERRQ(ierr);
1321
1322 ierr = ReadOptionalField(user, "psifield", "Scalar Psi Field", user->Psi, ti, eulerian_ext); CHKERRQ(ierr);
1323 }
1324 else{
1325 LOG_ALLOW(GLOBAL, LOG_INFO, "No particles in simulation, skipping Particle fields reading.\n");
1326 }
1327 // Process LES fields if enabled
1328 if (simCtx->les) {
1329 ierr = ReadLESFields(user,ti); CHKERRQ(ierr);
1330 }
1331
1332 // Process RANS fields if enabled
1333 if (simCtx->rans) {
1334 ierr = ReadRANSFields(user,ti); CHKERRQ(ierr);
1335 }
1336
1337 // Process statistical fields if averaging is enabled
1338 if (simCtx->averaging) {
1339 ierr = ReadStatisticalFields(user,ti); CHKERRQ(ierr);
1340 }
1341
1342 simCtx->current_io_directory = NULL; // Clear the I/O context after reading
1343
1344 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading simulation fields.\n");
1345
1346 return 0;
1347}
1348
1349/**
1350 * @brief Reads statistical fields for averaging purposes.
1351 *
1352 * This function reads data for fields such as Ucat_sum, Ucat_cross_sum, Ucat_square_sum,
1353 * and P_sum, used for statistical analysis during simulation.
1354 *
1355 * @param[in,out] user Pointer to the UserCtx structure containing simulation context.
1356 * @param[in] ti Time index for constructing the file name.
1357 *
1358 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1359 */
1360PetscErrorCode ReadStatisticalFields(UserCtx *user,PetscInt ti)
1361{
1362 PetscErrorCode ierr;
1363 const char *eulerian_ext = "dat";
1364 if (user->simCtx->exec_mode == EXEC_MODE_POSTPROCESSOR && user->simCtx->pps->eulerianExt[0] != '\0') {
1365 eulerian_ext = user->simCtx->pps->eulerianExt;
1366 }
1367
1368 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to read statistical fields.\n");
1369
1370 ierr = ReadOptionalField(user, "su0", "Velocity Sum", user->Ucat_sum, ti, eulerian_ext); CHKERRQ(ierr);
1371 ierr = ReadOptionalField(user, "su1", "Velocity Cross Sum", user->Ucat_cross_sum, ti, eulerian_ext); CHKERRQ(ierr);
1372 ierr = ReadOptionalField(user, "su2", "Velocity Square Sum",user->Ucat_square_sum, ti, eulerian_ext); CHKERRQ(ierr);
1373 ierr = ReadOptionalField(user, "sp", "Pressure Sum", user->P_sum, ti, eulerian_ext); CHKERRQ(ierr);
1374
1375 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading statistical fields.\n");
1376
1377 return 0;
1378}
1379
1380/**
1381 * @brief Reads LES-related fields.
1382 *
1383 * This function reads LES-related fields such as Cs (Smagorinsky constant)
1384 * into their respective PETSc vectors.
1385 *
1386 * @param[in,out] user Pointer to the UserCtx structure containing simulation context.
1387 * @param[in] ti Time index for constructing the file name.
1388 *
1389 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1390 */
1391PetscErrorCode ReadLESFields(UserCtx *user,PetscInt ti)
1392{
1393 PetscErrorCode ierr;
1394 const char *eulerian_ext = "dat";
1395 if (user->simCtx->exec_mode == EXEC_MODE_POSTPROCESSOR && user->simCtx->pps->eulerianExt[0] != '\0') {
1396 eulerian_ext = user->simCtx->pps->eulerianExt;
1397 }
1398
1399 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to read LES fields.\n");
1400
1401 ierr = ReadOptionalField(user, "Nu_t", "Turbulent Viscosity", user->Nu_t, ti, eulerian_ext); CHKERRQ(ierr);
1402 ierr = ReadOptionalField(user, "cs", "Smagorinsky Constant (Cs)", user->CS, ti, eulerian_ext); CHKERRQ(ierr);
1403
1404 DMGlobalToLocalBegin(user->da, user->CS, INSERT_VALUES, user->lCs);
1405 DMGlobalToLocalEnd(user->da, user->CS, INSERT_VALUES, user->lCs);
1406
1407 DMGlobalToLocalBegin(user->da, user->Nu_t, INSERT_VALUES, user->lNu_t);
1408 DMGlobalToLocalEnd(user->da, user->Nu_t, INSERT_VALUES, user->lNu_t);
1409
1410 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading LES fields.\n");
1411
1412 return 0;
1413}
1414
1415/**
1416 * @brief Reads RANS-related fields.
1417 *
1418 * This function reads RANS-related fields such as K_Omega into their respective
1419 * PETSc vectors.
1420 *
1421 * @param[in,out] user Pointer to the UserCtx structure containing simulation context.
1422 * @param[in] ti Time index for constructing the file name.
1423 *
1424 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1425 */
1426PetscErrorCode ReadRANSFields(UserCtx *user,PetscInt ti)
1427{
1428 PetscErrorCode ierr;
1429 const char *eulerian_ext = "dat";
1430 if (user->simCtx->exec_mode == EXEC_MODE_POSTPROCESSOR && user->simCtx->pps->eulerianExt[0] != '\0') {
1431 eulerian_ext = user->simCtx->pps->eulerianExt;
1432 }
1433
1434 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to read RANS fields.\n");
1435
1436 ierr = ReadOptionalField(user, "kfield", "K-Omega RANS", user->K_Omega, ti, eulerian_ext); CHKERRQ(ierr);
1437 ierr = ReadOptionalField(user, "Nu_t", "Turbulent Viscosity", user->Nu_t, ti, eulerian_ext); CHKERRQ(ierr);
1438
1439 VecCopy(user->K_Omega, user->K_Omega_o);
1440
1441 DMGlobalToLocalBegin(user->fda2, user->K_Omega, INSERT_VALUES, user->lK_Omega);
1442 DMGlobalToLocalEnd(user->fda2, user->K_Omega, INSERT_VALUES, user->lK_Omega);
1443
1444 DMGlobalToLocalBegin(user->da, user->Nu_t, INSERT_VALUES, user->lNu_t);
1445 DMGlobalToLocalEnd(user->da, user->Nu_t, INSERT_VALUES, user->lNu_t);
1446
1447 DMGlobalToLocalBegin(user->fda2, user->K_Omega_o, INSERT_VALUES, user->lK_Omega_o);
1448 DMGlobalToLocalEnd(user->fda2, user->K_Omega_o, INSERT_VALUES, user->lK_Omega_o);
1449
1450 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading RANS fields.\n");
1451
1452 return 0;
1453}
1454
1455
1456/**
1457 * @brief Reads data from a file into a specified field of a PETSc DMSwarm.
1458 *
1459 * This function is the counterpart to WriteSwarmField(). It creates a global PETSc vector
1460 * that references the specified DMSwarm field, uses ReadFieldData() to read the data from
1461 * a file, and then destroys the global vector reference.
1462 *
1463 * @param[in] user Pointer to the UserCtx structure (containing `user->swarm`).
1464 * @param[in] field_name Name of the DMSwarm field to read into (must be previously declared/allocated).
1465 * @param[in] ti Time index used to construct the input file name.
1466 * @param[in] ext File extension (e.g., "dat" or "bin").
1467 *
1468 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1469 *
1470 * @note Compatible with PETSc 3.14.x.
1471 */
1472PetscErrorCode ReadSwarmField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
1473{
1474 PetscErrorCode ierr;
1475 DM swarm;
1476 Vec fieldVec;
1477
1478 PetscFunctionBegin;
1479
1480 swarm = user->swarm;
1481
1482 LOG_ALLOW(GLOBAL,LOG_DEBUG," ReadSwarmField Begins \n");
1483
1484 /* 2) Create a global vector that references the specified Swarm field. */
1485 ierr = DMSwarmCreateGlobalVectorFromField(swarm, field_name, &fieldVec);CHKERRQ(ierr);
1486
1487 LOG_ALLOW(GLOBAL,LOG_DEBUG," Vector created from Field \n");
1488
1489 /* 3) Use the ReadFieldData() function to read data into fieldVec. */
1490 ierr = ReadFieldData(user, field_name, fieldVec, ti, ext);CHKERRQ(ierr);
1491
1492 /* 4) Destroy the global vector reference. */
1493 ierr = DMSwarmDestroyGlobalVectorFromField(swarm, field_name, &fieldVec);CHKERRQ(ierr);
1494
1495 PetscFunctionReturn(0);
1496}
1497
1498/**
1499 * @brief Reads integer swarm data by using ReadFieldData and casting the result.
1500 *
1501 * This function is the counterpart to WriteSwarmIntField. It reads a file
1502 * containing floating-point data (that was originally integer) into a temporary
1503 * Vec and then casts it back to the integer swarm field. It works by:
1504 * 1. Creating a temporary parallel Vec.
1505 * 2. Calling the standard ReadFieldData() to populate this Vec.
1506 * 3. Accessing the local data of both the Vec and the swarm field.
1507 * 4. Populating the swarm's integer field by casting each PetscScalar back to a PetscInt.
1508 * 5. Destroying the temporary Vec.
1509 *
1510 * @param[in] user Pointer to the UserCtx structure.
1511 * @param[in] field_name Name of the integer Swarm field to be read.
1512 * @param[in] ti Time index for the input file.
1513 * @param[in] ext File extension.
1514 *
1515 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1516 */
1517PetscErrorCode ReadSwarmIntField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
1518{
1519 PetscErrorCode ierr;
1520 DM swarm = user->swarm;
1521 Vec temp_vec;
1522 PetscInt nlocal, nglobal, bs, i;
1523 const PetscScalar *scalar_array; // Read-only pointer from the temp Vec
1524 void *field_array_void;
1525
1526
1527 PetscFunctionBeginUser;
1528
1529 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Reading '%s' via temporary Vec.\n", field_name);
1530
1531 // Get the properties of the swarm field to determine the expected layout
1532 ierr = DMSwarmGetLocalSize(swarm, &nlocal); CHKERRQ(ierr);
1533 ierr = DMSwarmGetSize(swarm, &nglobal); CHKERRQ(ierr);
1534 // We get the block size but not the data pointer yet
1535 ierr = DMSwarmGetField(swarm, field_name, &bs, NULL, NULL); CHKERRQ(ierr);
1536 ierr = DMSwarmRestoreField(swarm, field_name, &bs, NULL, NULL); CHKERRQ(ierr);
1537
1538 // Create a temporary Vec with the CORRECT layout to receive the data
1539 ierr = VecCreate(PETSC_COMM_WORLD, &temp_vec); CHKERRQ(ierr);
1540 ierr = VecSetType(temp_vec, VECMPI); CHKERRQ(ierr);
1541 ierr = VecSetSizes(temp_vec, nlocal * bs, nglobal * bs); CHKERRQ(ierr);
1542 ierr = VecSetBlockSize(temp_vec, bs); CHKERRQ(ierr);
1543 ierr = VecSetUp(temp_vec); CHKERRQ(ierr);
1544
1545 // Call your existing reader to populate the temporary Vec
1546 ierr = ReadFieldData(user, field_name, temp_vec, ti, ext); CHKERRQ(ierr);
1547
1548 // Get local pointers
1549 ierr = VecGetArrayRead(temp_vec, &scalar_array); CHKERRQ(ierr);
1550 ierr = DMSwarmGetField(swarm, field_name, NULL, NULL, &field_array_void); CHKERRQ(ierr);
1551
1552 // Perform the cast back, using the correct loop size (nlocal * bs)
1553 if (strcmp(field_name, "DMSwarm_pid") == 0) {
1554 PetscInt64 *int64_array = (PetscInt64 *)field_array_void;
1555 for (i = 0; i < nlocal * bs; i++) {
1556 int64_array[i] = (PetscInt64)scalar_array[i];
1557 }
1558 } else {
1559 PetscInt *int_array = (PetscInt *)field_array_void;
1560 for (i = 0; i < nlocal * bs; i++) {
1561 int_array[i] = (PetscInt)scalar_array[i];
1562 }
1563 }
1564
1565 // Restore access
1566 ierr = DMSwarmRestoreField(swarm, field_name, NULL, NULL, &field_array_void); CHKERRQ(ierr);
1567 ierr = VecRestoreArrayRead(temp_vec, &scalar_array); CHKERRQ(ierr);
1568
1569 // 6. Clean up
1570 ierr = VecDestroy(&temp_vec); CHKERRQ(ierr);
1571
1572 PetscFunctionReturn(0);
1573}
1574
1575/**
1576 * @brief Reads multiple fields into a DMSwarm, gracefully handling missing optional files.
1577 *
1578 * This function reads all necessary and optional fields for a DMSwarm for a given
1579 * timestep. It assumes the swarm has already been resized to match the particle
1580 * count in the input files.
1581 *
1582 * The 'position' field is considered MANDATORY. If its file is missing or corrupt,
1583 * the function will return a fatal error.
1584 *
1585 * All other fields (velocity, CellID, weight, etc.) are OPTIONAL. If their
1586 * corresponding files are not found, a warning is logged, and the function
1587 * continues without error. If an optional file IS found but is corrupt or has a
1588 * size mismatch, it is treated as a fatal error.
1589 *
1590 * @param[in,out] user Pointer to the UserCtx structure containing the DMSwarm (user->swarm).
1591 * @param[in] ti Time index for constructing the file names.
1592 *
1593 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1594 */
1595PetscErrorCode ReadAllSwarmFields(UserCtx *user, PetscInt ti)
1596{
1597 PetscErrorCode ierr;
1598 PetscInt nGlobal;
1599 SimCtx *simCtx = user->simCtx;
1600 const char *source_path = NULL;
1601 const char *particle_ext = "dat";
1602
1603 PetscFunctionBeginUser;
1604 ierr = DMSwarmGetSize(user->swarm, &nGlobal); CHKERRQ(ierr);
1605 LOG_ALLOW(GLOBAL, LOG_INFO, "Reading DMSwarm fields for timestep %d (swarm size is %d).\n", ti, nGlobal);
1606
1607 if (nGlobal == 0) {
1608 LOG_ALLOW(GLOBAL, LOG_INFO, "Swarm is empty for timestep %d. Nothing to read.\n", ti);
1609 PetscFunctionReturn(0);
1610 }
1611
1612 // First, determine the top-level source directory based on the execution mode.
1613 if (simCtx->exec_mode == EXEC_MODE_SOLVER) {
1614 source_path = simCtx->restart_dir;
1615 } else if (simCtx->exec_mode == EXEC_MODE_POSTPROCESSOR) {
1616 source_path = simCtx->pps->source_dir;
1617 if (simCtx->pps->particleExt[0] != '\0') {
1618 particle_ext = simCtx->pps->particleExt;
1619 }
1620 } else {
1621 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONGSTATE, "Invalid execution mode for reading simulation fields.");
1622 }
1623
1624 // Set the current I/O directory context
1625 ierr = PetscSNPrintf(simCtx->_io_context_buffer, sizeof(simCtx->_io_context_buffer),
1626 "%s/%s", source_path, simCtx->particle_subdir); CHKERRQ(ierr);
1627
1628 simCtx->current_io_directory = simCtx->_io_context_buffer;
1629
1630 /* 1) Read positions (REQUIRED) */
1631 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Reading mandatory position field...\n");
1632 ierr = ReadSwarmField(user, "position", ti, particle_ext);
1633 if (ierr) {
1634 SETERRQ(PETSC_COMM_WORLD, ierr, "Failed to read MANDATORY 'position' field for step %d. Cannot continue.", ti);
1635 }
1636 LOG_ALLOW(GLOBAL, LOG_INFO, "Successfully read mandatory position field for step %d.\n", ti);
1637
1638 /* 2) Read all OPTIONAL fields using the helper function. */
1639 /* The helper will print a warning and continue if a file is not found. */
1640 ierr = ReadOptionalSwarmField(user, "velocity", "Velocity", ti, particle_ext); CHKERRQ(ierr);
1641 ierr = ReadOptionalSwarmField(user, "DMSwarm_pid", "Particle ID", ti, particle_ext); CHKERRQ(ierr);
1642 ierr = ReadOptionalSwarmField(user, "DMSwarm_CellID", "Cell ID", ti, particle_ext); CHKERRQ(ierr);
1643 ierr = ReadOptionalSwarmField(user, "weight", "Particle Weight", ti, particle_ext); CHKERRQ(ierr);
1644 ierr = ReadOptionalSwarmField(user, "Psi", "Scalar Psi", ti, particle_ext); CHKERRQ(ierr);
1645 ierr = ReadOptionalSwarmField(user, "DMSwarm_location_status", "Migration Status", ti, particle_ext); CHKERRQ(ierr);
1646
1647 simCtx->current_io_directory = NULL; // Clear the I/O context after reading
1648
1649 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading DMSwarm fields for timestep %d.\n", ti);
1650 PetscFunctionReturn(0);
1651}
1652
1653
1654#undef __FUNCT__
1655#define __FUNCT__ "WriteFieldData"
1656 /**
1657 * @brief Writes data from a specific PETSc vector to a single, sequential file.
1658 *
1659 * This function is now parallel-safe.
1660 * - In PARALLEL: All processes send their local data to Rank 0. Rank 0 assembles
1661 * the data into a temporary sequential vector and writes it to a single file.
1662 * - In SERIAL: It performs a direct, simple write.
1663 *
1664 * This ensures the output file is always in a simple, portable format.
1665 *
1666 * @param[in] user Pointer to the UserCtx structure.
1667 * @param[in] field_name Name of the field (e.g., "position").
1668 * @param[in] field_vec The parallel PETSc vector containing the data to write.
1669 * @param[in] ti Time index for constructing the file name.
1670 * @param[in] ext File extension (e.g., "dat").
1671 *
1672 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1673 */
1674/**
1675 * @brief Dump a **distributed** PETSc Vec to the *single* sequential file
1676 * format used by our restart / post-processing tools.
1677 *
1678 * The companion of ReadFieldData(): it always produces **one** file
1679 * (e.g. `results/ufield00006_0.dat`) regardless of how many MPI ranks
1680 * are running.
1681 *
1682 * **Behaviour**
1683 *
1684 * | # MPI ranks | Strategy |
1685 * |-------------|---------------------------------------------------------------------------|
1686 * | 1 | Direct `VecView()` into the file. |
1687 * | > 1 | `VecScatterCreateToZero()` gathers the distributed Vec onto **rank 0**. |
1688 * | | Rank 0 writes the sequential Vec; all other ranks allocate no storage. |
1689 *
1690 * The routine never alters or destroys the parallel Vec passed in; the
1691 * gather buffer is created and freed internally.
1692 *
1693 * @param[in] user Simulation context (used only for logging).
1694 * @param[in] field_name Logical field name (forms part of the filename).
1695 * @param[in] field_vec Distributed PETSc Vec to write.
1696 * @param[in] ti Timestep index used in the filename.
1697 * @param[in] ext File extension (`"dat"` in our workflow).
1698 *
1699 * @return `0` on success or a PETSc error code.
1700 */
1701PetscErrorCode WriteFieldData(UserCtx *user,
1702 const char *field_name,
1703 Vec field_vec,
1704 PetscInt ti,
1705 const char *ext)
1706{
1707 PetscErrorCode ierr;
1708 MPI_Comm comm;
1709 PetscMPIInt rank, size;
1710
1711 const PetscInt placeholder_int = 0; /* keep legacy name */
1712 char filename[PETSC_MAX_PATH_LEN];
1713 SimCtx *simCtx=user->simCtx;
1714
1715 PetscFunctionBeginUser;
1717
1718 if(!simCtx->output_dir){
1719 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "Output directory was not set before calling WriteFieldData().");
1720 }
1721
1722 /* ------------------------------------------------------------ */
1723 /* Basic communicator information */
1724 /* ------------------------------------------------------------ */
1725 ierr = PetscObjectGetComm((PetscObject)field_vec,&comm);CHKERRQ(ierr);
1726 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1727 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1728
1729 ierr = PetscSNPrintf(filename,sizeof(filename),
1730 "%s/%s%05" PetscInt_FMT "_%d.%s",
1731 simCtx->current_io_directory,field_name,ti,placeholder_int,ext);CHKERRQ(ierr);
1732
1734 " Preparing to write <%s> on rank %d/%d\n",
1735 filename,rank,size);
1736
1737 /* ------------------------------------------------------------ */
1738 /* 1. Serial path */
1739 /* ------------------------------------------------------------ */
1740 if (size == 1) {
1741 PetscViewer viewer;
1742
1743 ierr = PetscViewerBinaryOpen(comm,filename,
1744 FILE_MODE_WRITE,&viewer);CHKERRQ(ierr);
1745 ierr = VecView(field_vec,viewer);CHKERRQ(ierr);
1746 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
1747
1749 " Wrote <%s> (serial path)\n",filename);
1750 PetscFunctionReturn(0);
1751 }
1752
1753 /* ------------------------------------------------------------ */
1754 /* 2. Parallel path */
1755 /* ------------------------------------------------------------ */
1756 VecScatter scatter;
1757 Vec seq_vec=NULL; /* created by PETSc, lives only on rank 0 */
1758 DM dm = NULL;
1759 const char *dmtype = NULL;
1760 Vec nat = NULL; /* Natural-ordered vector for DMDA */
1761
1762 /* 2.1 Check if vector has DMDA and convert to natural ordering if needed */
1763 ierr = VecGetDM(field_vec, &dm); CHKERRQ(ierr);
1764 if (dm) { ierr = DMGetType(dm, &dmtype); CHKERRQ(ierr); }
1765
1766 if (dmtype && !strcmp(dmtype, DMDA)) {
1767 /* DMDA path: convert to natural ordering first */
1768 ierr = DMDACreateNaturalVector(dm, &nat); CHKERRQ(ierr);
1769 ierr = DMDAGlobalToNaturalBegin(dm, field_vec, INSERT_VALUES, nat); CHKERRQ(ierr);
1770 ierr = DMDAGlobalToNaturalEnd(dm, field_vec, INSERT_VALUES, nat); CHKERRQ(ierr);
1771
1772 /* Gather the natural-ordered vector */
1773 ierr = VecScatterCreateToZero(nat, &scatter, &seq_vec); CHKERRQ(ierr);
1774 } else {
1775 /* Non-DMDA path: direct gather in global ordering */
1776 ierr = VecScatterCreateToZero(field_vec, &scatter, &seq_vec); CHKERRQ(ierr);
1777 }
1778
1779 /* 2.2 Gather distributed → sequential (on rank 0) */
1780 ierr = VecScatterBegin(scatter, (nat ? nat : field_vec), seq_vec,
1781 INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
1782 ierr = VecScatterEnd(scatter, (nat ? nat : field_vec), seq_vec,
1783 INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
1784
1785 /* 2.3 Rank 0 writes the file */
1786 if (rank == 0) {
1787 PetscViewer viewer;
1788
1789 /* (optional) value diagnostics */
1790 PetscReal vmin,vmax;
1791 ierr = VecMin(seq_vec,NULL,&vmin);CHKERRQ(ierr);
1792 ierr = VecMax(seq_vec,NULL,&vmax);CHKERRQ(ierr);
1794 " <%s> range = [%.4e … %.4e]\n",
1795 field_name,(double)vmin,(double)vmax);
1796
1797 ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,filename,
1798 FILE_MODE_WRITE,&viewer);CHKERRQ(ierr);
1799 ierr = VecView(seq_vec,viewer);CHKERRQ(ierr);
1800 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
1801
1803 " Wrote <%s> (parallel path)\n",filename);
1804 }
1805
1806 /* 2.4 Cleanup */
1807 ierr = VecScatterDestroy(&scatter);CHKERRQ(ierr);
1808 ierr = VecDestroy(&seq_vec);CHKERRQ(ierr);
1809 if (nat) { ierr = VecDestroy(&nat); CHKERRQ(ierr); }
1810
1812 PetscFunctionReturn(0);
1813}
1814
1815/**
1816 * @brief Writes simulation fields to files.
1817 *
1818 * This function writes contravariant velocity, Cartesian velocity, pressure, and node state
1819 * fields to their respective binary files. It also conditionally writes LES, RANS, and
1820 * statistical fields if they are enabled.
1821 *
1822 * @param[in] user Pointer to the UserCtx structure containing simulation context.
1823 *
1824 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1825 */
1826PetscErrorCode WriteSimulationFields(UserCtx *user)
1827{
1828 PetscErrorCode ierr;
1829
1830 SimCtx *simCtx = user->simCtx;
1831
1832 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to write simulation fields.\n");
1833
1834 // Set the current IO directory
1835 ierr = PetscSNPrintf(simCtx->_io_context_buffer, sizeof(simCtx->_io_context_buffer),
1836 "%s/%s", simCtx->output_dir, simCtx->euler_subdir);CHKERRQ(ierr);
1837 simCtx->current_io_directory = simCtx->_io_context_buffer;
1838
1839 // Write contravariant velocity field
1840 ierr = WriteFieldData(user, "vfield", user->Ucont, simCtx->step, "dat"); CHKERRQ(ierr);
1841
1842 // Write Cartesian velocity field
1843 ierr = WriteFieldData(user, "ufield", user->Ucat, simCtx->step, "dat"); CHKERRQ(ierr);
1844
1845 // Write pressure field
1846 ierr = WriteFieldData(user, "pfield", user->P, simCtx->step, "dat"); CHKERRQ(ierr);
1847
1848 // Write node state field (nvert)
1849 ierr = WriteFieldData(user, "nvfield", user->Nvert, simCtx->step, "dat"); CHKERRQ(ierr);
1850
1851 // Write ParticleCountPerCell if enabled.
1852 if(simCtx->np>0){
1853 ierr = WriteFieldData(user, "ParticleCount",user->ParticleCount,simCtx->step,"dat"); CHKERRQ(ierr);
1854 ierr = WriteFieldData(user, "psifield", user->Psi, simCtx->step, "dat"); CHKERRQ(ierr);
1855 }
1856
1857 // Write LES fields if enabled
1858 if (simCtx->les) {
1859 ierr = WriteLESFields(user); CHKERRQ(ierr);
1860 }
1861
1862 // Write RANS fields if enabled
1863 if (simCtx->rans) {
1864 ierr = WriteRANSFields(user); CHKERRQ(ierr);
1865 }
1866
1867 // Write statistical fields if averaging is enabled
1868 if (simCtx->averaging) {
1869 ierr = WriteStatisticalFields(user); CHKERRQ(ierr);
1870 }
1871
1872 simCtx->current_io_directory = NULL;
1873
1874 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished writing simulation fields.\n");
1875
1876 return 0;
1877}
1878
1879/**
1880 * @brief Writes statistical fields for averaging purposes.
1881 *
1882 * This function writes data for fields such as Ucat_sum, Ucat_cross_sum, Ucat_square_sum,
1883 * and P_sum to their respective binary files.
1884 *
1885 * @param[in] user Pointer to the UserCtx structure containing simulation context.
1886 *
1887 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1888 */
1889PetscErrorCode WriteStatisticalFields(UserCtx *user)
1890{
1891 PetscErrorCode ierr;
1892
1893 SimCtx *simCtx = user->simCtx;
1894
1895 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to write statistical fields.\n");
1896
1897 ierr = WriteFieldData(user, "su0", user->Ucat_sum, simCtx->step, "dat"); CHKERRQ(ierr);
1898 ierr = WriteFieldData(user, "su1", user->Ucat_cross_sum, simCtx->step, "dat"); CHKERRQ(ierr);
1899 ierr = WriteFieldData(user, "su2", user->Ucat_square_sum, simCtx->step, "dat"); CHKERRQ(ierr);
1900 ierr = WriteFieldData(user, "sp", user->P_sum, simCtx->step, "dat"); CHKERRQ(ierr);
1901
1902 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished writing statistical fields.\n");
1903
1904 return 0;
1905}
1906
1907/**
1908 * @brief Writes LES-related fields.
1909 *
1910 * This function writes LES-related fields such as Cs (Smagorinsky constant)
1911 * to their respective binary files.
1912 *
1913 * @param[in] user Pointer to the UserCtx structure containing simulation context.
1914 *
1915 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1916 */
1917PetscErrorCode WriteLESFields(UserCtx *user)
1918{
1919 PetscErrorCode ierr;
1920
1921 SimCtx *simCtx = user->simCtx;
1922
1923 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to write LES fields.\n");
1924
1925
1926 DMLocalToGlobalBegin(user->da, user->lCs, INSERT_VALUES, user->CS);
1927 DMLocalToGlobalEnd(user->da, user->lCs, INSERT_VALUES, user->CS);
1928
1929 DMLocalToGlobalBegin(user->da, user->lNu_t, INSERT_VALUES, user->Nu_t);
1930 DMLocalToGlobalEnd(user->da, user->lNu_t, INSERT_VALUES, user->Nu_t);
1931
1932 ierr = WriteFieldData(user, "Nu_t", user->Nu_t, simCtx->step, "dat"); CHKERRQ(ierr);
1933 ierr = WriteFieldData(user, "cs", user->CS, simCtx->step, "dat"); CHKERRQ(ierr);
1934
1935
1936 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished writing LES fields.\n");
1937
1938 return 0;
1939}
1940
1941/**
1942 * @brief Writes RANS-related fields.
1943 *
1944 * This function writes RANS-related fields such as K_Omega to their respective
1945 * binary files.
1946 *
1947 * @param[in] user Pointer to the UserCtx structure containing simulation context.
1948 *
1949 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1950 */
1951PetscErrorCode WriteRANSFields(UserCtx *user)
1952{
1953 PetscErrorCode ierr;
1954
1955 SimCtx *simCtx = user->simCtx;
1956
1957 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to write RANS fields.\n");
1958
1959 ierr = WriteFieldData(user, "kfield", user->K_Omega, simCtx->step, "dat"); CHKERRQ(ierr);
1960
1961 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished writing RANS fields.\n");
1962
1963 return 0;
1964}
1965
1966/**
1967 * @brief Writes data from a specific field in a PETSc Swarm to a file.
1968 *
1969 * This function retrieves the Swarm from the UserCtx (i.e., `user->swarm`) and
1970 * creates a global PETSc vector from the specified Swarm field. It then calls
1971 * the existing WriteFieldData() function to handle the actual I/O operation.
1972 * After writing the data, the function destroys the temporary global vector
1973 * to avoid memory leaks.
1974 *
1975 * @param[in] user Pointer to the UserCtx structure containing simulation context
1976 * and the PetscSwarm (as `user->swarm`).
1977 * @param[in] field_name Name of the Swarm field to be written (e.g., "my_field").
1978 * @param[in] ti Time index used to construct the output file name.
1979 * @param[in] ext File extension (e.g., "dat", "bin").
1980 *
1981 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1982 *
1983 * @note Compatible with PETSc 3.14.4 and newer.
1984 */
1985PetscErrorCode WriteSwarmField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
1986{
1987 PetscErrorCode ierr;
1988 Vec fieldVec;
1989 DM swarm;
1990
1991 PetscFunctionBeginUser; /* PETSc macro indicating start of function */
1992
1993 /*
1994 * 1) Retrieve the PetscSwarm from the user context.
1995 * Ensure user->swarm is initialized and not NULL.
1996 */
1997 swarm = user->swarm;
1998
1999 /*
2000 * 2) Create a global vector from the specified swarm field.
2001 * This function is available in PETSc 3.14.4.
2002 * It provides a read/write "view" of the swarm field as a global Vec.
2003 */
2005 "Attempting to create global vector from field: %s\n",
2006 field_name);
2007 ierr = DMSwarmCreateGlobalVectorFromField(swarm, field_name, &fieldVec);CHKERRQ(ierr);
2008
2009 /*
2010 * 3) Use your existing WriteFieldData() to write the global vector to a file.
2011 * The field name, time index, and extension are passed along for naming.
2012 */
2014 "Calling WriteFieldData for field: %s\n",
2015 field_name);
2016 ierr = WriteFieldData(user, field_name, fieldVec, ti, ext);CHKERRQ(ierr);
2017
2018 /*
2019 * 4) Destroy the global vector once the data is successfully written.
2020 * This step is crucial for avoiding memory leaks.
2021 * DMSwarmDestroyGlobalVectorFromField() is also available in PETSc 3.14.4.
2022 */
2024 "Destroying the global vector for field: %s\n",
2025 field_name);
2026 ierr = DMSwarmDestroyGlobalVectorFromField(swarm, field_name, &fieldVec);CHKERRQ(ierr);
2027
2028 /* Log and return success. */
2030 "Successfully wrote swarm data for field: %s\n",
2031 field_name);
2032
2033 PetscFunctionReturn(0); /* PETSc macro indicating end of function */
2034}
2035
2036/**
2037 * @brief Writes integer swarm data by casting it to a temporary Vec and using WriteFieldData.
2038 *
2039 * This function provides a bridge to write integer-based swarm fields (like DMSwarm_CellID)
2040 * using the existing Vec-based I/O routine (WriteFieldData). It works by:
2041 * 1. Creating a temporary parallel Vec with the same layout as other swarm fields.
2042 * 2. Accessing the local integer data from the swarm field.
2043 * 3. Populating the temporary Vec by casting each integer to a PetscScalar.
2044 * 4. Calling the standard WriteFieldData() function with the temporary Vec.
2045 * 5. Destroying the temporary Vec.
2046 *
2047 * @param[in] user Pointer to the UserCtx structure.
2048 * @param[in] field_name Name of the integer Swarm field to be written.
2049 * @param[in] ti Time index for the output file.
2050 * @param[in] ext File extension.
2051 *
2052 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
2053 */
2054PetscErrorCode WriteSwarmIntField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
2055{
2056 PetscErrorCode ierr;
2057 DM swarm = user->swarm;
2058 Vec temp_vec; // Temporary Vec to hold casted data
2059 PetscInt nlocal, nglobal,bs,i;
2060 void *field_array_void;
2061 PetscScalar *scalar_array; // Pointer to the temporary Vec's scalar data
2062
2063 PetscFunctionBeginUser;
2064
2065 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Casting '%s' to Vec for writing.\n", field_name);
2066
2067 // Get the swarm field properties
2068 ierr = DMSwarmGetLocalSize(swarm, &nlocal); CHKERRQ(ierr);
2069 ierr = DMSwarmGetSize(swarm, &nglobal); CHKERRQ(ierr);
2070 ierr = DMSwarmGetField(swarm, field_name, &bs, NULL, &field_array_void); CHKERRQ(ierr);
2071
2072 // Create Temporary parallel Vec wit the CORRECT layout
2073 ierr = VecCreate(PETSC_COMM_WORLD, &temp_vec); CHKERRQ(ierr);
2074 ierr = VecSetType(temp_vec, VECMPI); CHKERRQ(ierr);
2075 ierr = VecSetSizes(temp_vec, nlocal*bs, nglobal*bs); CHKERRQ(ierr);
2076 ierr = VecSetUp(temp_vec); CHKERRQ(ierr);
2077
2078 // Defining Vector field to mandatory field 'position'
2079 DMSwarmVectorDefineField(swarm,"position");
2080
2081 ierr = VecGetArray(temp_vec, &scalar_array); CHKERRQ(ierr);
2082
2083 if(strcasecmp(field_name,"DMSwarm_pid") == 0){
2084 PetscInt64 *int64_array = (PetscInt64 *)field_array_void;
2085 // Perform the cast from PetscInt64 to PetscScalar
2086 for (i = 0; i < nlocal*bs; i++) {
2087 scalar_array[i] = (PetscScalar)int64_array[i];
2088 }
2089 }else{
2090 PetscInt *int_array = (PetscInt *)field_array_void;
2091 //Perform the cast from PetscInt to PetscScalar
2092 for (i = 0; i < nlocal*bs; i++) {
2093 scalar_array[i] = (PetscScalar)int_array[i];
2094 }
2095 }
2096
2097 // Restore access to both arrays
2098 ierr = VecRestoreArray(temp_vec, &scalar_array); CHKERRQ(ierr);
2099 ierr = DMSwarmRestoreField(swarm, field_name, &bs, NULL, &field_array_void); CHKERRQ(ierr);
2100
2101 // Call your existing writer with the temporary, populated Vec
2102 ierr = WriteFieldData(user, field_name, temp_vec, ti, ext); CHKERRQ(ierr);
2103
2104 // Clean up
2105 ierr = VecDestroy(&temp_vec); CHKERRQ(ierr);
2106
2107 PetscFunctionReturn(0);
2108}
2109
2110/**
2111 * @brief Writes a predefined set of PETSc Swarm fields to files.
2112 *
2113 * This function iterates through a hardcoded list of common swarm fields
2114 * (position, velocity, etc.) and calls the WriteSwarmField() helper function
2115 * for each one. This provides a straightforward way to output essential particle
2116 * data at a given simulation step.
2117 *
2118 * This function will only execute if particles are enabled in the simulation
2119 * (i.e., `user->simCtx->np > 0` and `user->swarm` is not NULL).
2120 *
2121 * @param[in] user Pointer to the UserCtx structure containing the simulation context
2122 * and the PetscSwarm.
2123 *
2124 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
2125 */
2126PetscErrorCode WriteAllSwarmFields(UserCtx *user)
2127{
2128 PetscErrorCode ierr;
2129 SimCtx *simCtx = user->simCtx;
2130
2131 PetscFunctionBeginUser;
2132
2133 // If no swarm is configured or there are no particles, do nothing and return.
2134 if (!user->swarm || simCtx->np <= 0) {
2135 PetscFunctionReturn(0);
2136 }
2137
2138 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to write swarm fields.\n");
2139
2140 // Ensure the current IO directory is set
2141 ierr = PetscSNPrintf(simCtx->_io_context_buffer, sizeof(simCtx->_io_context_buffer),
2142 "%s/%s", simCtx->output_dir, simCtx->particle_subdir);CHKERRQ(ierr);
2143 simCtx->current_io_directory = simCtx->_io_context_buffer;
2144
2145 // Write particle position field
2146 ierr = WriteSwarmField(user, "position", simCtx->step, "dat"); CHKERRQ(ierr);
2147
2148 // Write particle velocity field
2149 ierr = WriteSwarmField(user, "velocity", simCtx->step, "dat"); CHKERRQ(ierr);
2150
2151 // Write particle weight field
2152 ierr = WriteSwarmField(user, "weight", simCtx->step, "dat"); CHKERRQ(ierr);
2153
2154 // Write custom particle field "Psi"
2155 ierr = WriteSwarmField(user, "Psi", simCtx->step, "dat"); CHKERRQ(ierr);
2156
2157 // Integer fields require special handling
2158
2159 // Write the background mesh cell ID for each particle
2160 ierr = WriteSwarmIntField(user, "DMSwarm_CellID", simCtx->step, "dat"); CHKERRQ(ierr);
2161
2162 // Write the particle location status (e.g., inside or outside the domain)
2163 ierr = WriteSwarmIntField(user, "DMSwarm_location_status", simCtx->step, "dat"); CHKERRQ(ierr);
2164
2165 // Write the unique particle ID
2166 ierr = WriteSwarmIntField(user, "DMSwarm_pid", simCtx->step, "dat"); CHKERRQ(ierr);
2167
2168 simCtx->current_io_directory = NULL;
2169
2170 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished writing swarm fields.\n");
2171
2172 PetscFunctionReturn(0);
2173}
2174
2175/**
2176 * @brief Gather a (possibly distributed) PETSc Vec onto rank 0 as a contiguous C array.
2177 * If the Vec has a DMDA attached, the gather is performed in DMDA "natural" ordering.
2178 *
2179 * @param[in] inVec The PETSc Vec (may be distributed).
2180 * @param[out] N Global length of the Vec (includes dof).
2181 * @param[out] arrayOut On rank 0, newly allocated buffer with the gathered values (PetscScalar-sized).
2182 * On other ranks, set to NULL.
2183 */
2184PetscErrorCode VecToArrayOnRank0(Vec inVec, PetscInt *N, double **arrayOut)
2185{
2186 PetscErrorCode ierr;
2187 MPI_Comm comm;
2188 PetscMPIInt rank;
2189 PetscInt globalSize;
2190 DM dm = NULL;
2191 const char *dmtype = NULL;
2192
2193 /* For DMDA path */
2194 Vec nat = NULL, seqNat = NULL;
2195 VecScatter scatNat = NULL;
2196 const PetscScalar *nar = NULL;
2197 PetscScalar *buf = NULL;
2198
2199 /* For generic (no DM) path */
2200 Vec seq = NULL;
2201 VecScatter scat = NULL;
2202 const PetscScalar *sar = NULL;
2203
2204 PetscFunctionBeginUser;
2205
2206 ierr = PetscObjectGetComm((PetscObject)inVec, &comm); CHKERRQ(ierr);
2207 ierr = MPI_Comm_rank(comm, &rank); CHKERRQ(ierr);
2208 ierr = VecGetSize(inVec, &globalSize); CHKERRQ(ierr);
2209 *N = globalSize;
2210 *arrayOut = NULL;
2211
2212 ierr = VecGetDM(inVec, &dm); CHKERRQ(ierr);
2213 if (dm) { ierr = DMGetType(dm, &dmtype); CHKERRQ(ierr); }
2214
2215 if (dmtype && !strcmp(dmtype, DMDA)) {
2216 /* --- DMDA path: go to NATURAL ordering, then gather to rank 0 --- */
2217 ierr = DMDACreateNaturalVector(dm, &nat); CHKERRQ(ierr);
2218 ierr = DMDAGlobalToNaturalBegin(dm, inVec, INSERT_VALUES, nat); CHKERRQ(ierr);
2219 ierr = DMDAGlobalToNaturalEnd (dm, inVec, INSERT_VALUES, nat); CHKERRQ(ierr);
2220
2221 ierr = VecScatterCreateToZero(nat, &scatNat, &seqNat); CHKERRQ(ierr);
2222 ierr = VecScatterBegin(scatNat, nat, seqNat, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
2223 ierr = VecScatterEnd (scatNat, nat, seqNat, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
2224
2225 if (rank == 0) {
2226 PetscInt nseq;
2227 ierr = VecGetLocalSize(seqNat, &nseq); CHKERRQ(ierr);
2228 ierr = VecGetArrayRead(seqNat, &nar); CHKERRQ(ierr);
2229
2230 ierr = PetscMalloc1(nseq, &buf); CHKERRQ(ierr);
2231 ierr = PetscMemcpy(buf, nar, (size_t)nseq * sizeof(PetscScalar)); CHKERRQ(ierr);
2232
2233 ierr = VecRestoreArrayRead(seqNat, &nar); CHKERRQ(ierr);
2234 *arrayOut = (double*)buf; /* hand back as double* for drop-in compatibility */
2235 }
2236
2237 ierr = VecScatterDestroy(&scatNat); CHKERRQ(ierr);
2238 ierr = VecDestroy(&seqNat); CHKERRQ(ierr);
2239 ierr = VecDestroy(&nat); CHKERRQ(ierr);
2240 } else {
2241 /* --- No DM attached: plain gather in Vec’s global (parallel) layout order --- */
2242 ierr = VecScatterCreateToZero(inVec, &scat, &seq); CHKERRQ(ierr);
2243 ierr = VecScatterBegin(scat, inVec, seq, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
2244 ierr = VecScatterEnd (scat, inVec, seq, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
2245
2246 if (rank == 0) {
2247 PetscInt nseq;
2248 ierr = VecGetLocalSize(seq, &nseq); CHKERRQ(ierr);
2249 ierr = VecGetArrayRead(seq, &sar); CHKERRQ(ierr);
2250
2251 ierr = PetscMalloc1(nseq, &buf); CHKERRQ(ierr);
2252 ierr = PetscMemcpy(buf, sar, (size_t)nseq * sizeof(PetscScalar)); CHKERRQ(ierr);
2253
2254 ierr = VecRestoreArrayRead(seq, &sar); CHKERRQ(ierr);
2255 *arrayOut = (double*)buf;
2256 }
2257
2258 ierr = VecScatterDestroy(&scat); CHKERRQ(ierr);
2259 ierr = VecDestroy(&seq); CHKERRQ(ierr);
2260 }
2261
2262 PetscFunctionReturn(0);
2263}
2264
2265/**
2266 * @brief Gathers any DMSwarm field from all ranks to a single, contiguous array on rank 0.
2267 *
2268 * This is a generic, type-aware version of SwarmFieldToArrayOnRank0.
2269 * It is a COLLECTIVE operation.
2270 *
2271 * @param[in] swarm The DMSwarm to gather from.
2272 * @param[in] field_name The name of the field to gather.
2273 * @param[out] n_total_particles (Output on rank 0) Total number of particles in the global swarm.
2274 * @param[out] n_components (Output on rank 0) Number of components for the field.
2275 * @param[out] gathered_array (Output on rank 0) A newly allocated array containing the full, gathered data.
2276 * The caller is responsible for freeing this memory and for casting it to the correct type.
2277 * @return PetscErrorCode
2278 */
2279PetscErrorCode SwarmFieldToArrayOnRank0(DM swarm, const char *field_name, PetscInt *n_total_particles, PetscInt *n_components, void **gathered_array)
2280{
2281 PetscErrorCode ierr;
2282 PetscMPIInt rank, size;
2283 PetscInt nlocal, nglobal, bs;
2284 void *local_array_void;
2285 size_t element_size = 0;
2286 MPI_Datatype mpi_type = MPI_BYTE; // We'll send raw bytes
2287
2288 PetscFunctionBeginUser;
2289
2290 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
2291 ierr = MPI_Comm_size(PETSC_COMM_WORLD, &size); CHKERRQ(ierr);
2292
2293 // All ranks get swarm properties to determine send/receive counts
2294 ierr = DMSwarmGetLocalSize(swarm, &nlocal); CHKERRQ(ierr);
2295 ierr = DMSwarmGetSize(swarm, &nglobal); CHKERRQ(ierr);
2296 ierr = DMSwarmGetField(swarm, field_name, &bs, NULL, &local_array_void); CHKERRQ(ierr);
2297
2298 // Determine the size of one element of the field's data type
2299 if (strcasecmp(field_name, "DMSwarm_pid") == 0) {
2300 element_size = sizeof(PetscInt64);
2301 } else if (strcasecmp(field_name, "DMSwarm_CellID") == 0 || strcasecmp(field_name, "DMSwarm_location_status") == 0) {
2302 element_size = sizeof(PetscInt);
2303 } else {
2304 element_size = sizeof(PetscScalar);
2305 }
2306
2307 if (rank == 0) {
2308 *n_total_particles = nglobal;
2309 *n_components = bs;
2310 *gathered_array = NULL; // Initialize output
2311 }
2312
2313 if (size == 1) { // Serial case is a simple copy
2314 if (rank == 0) {
2315 ierr = PetscMalloc(nglobal * bs * element_size, gathered_array); CHKERRQ(ierr);
2316 ierr = PetscMemcpy(*gathered_array, local_array_void, nglobal * bs * element_size); CHKERRQ(ierr);
2317 }
2318 } else { // Parallel case: use MPI_Gatherv
2319 PetscInt *recvcounts = NULL, *displs = NULL;
2320 if (rank == 0) {
2321 ierr = PetscMalloc1(size, &recvcounts); CHKERRQ(ierr);
2322 ierr = PetscMalloc1(size, &displs); CHKERRQ(ierr);
2323 }
2324 PetscInt sendcount = nlocal * bs;
2325
2326 // Gather the number of elements (not bytes) from each rank
2327 ierr = MPI_Gather(&sendcount, 1, MPIU_INT, recvcounts, 1, MPIU_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
2328
2329 if (rank == 0) {
2330 displs[0] = 0;
2331 // Convert counts and calculate displacements in terms of BYTES
2332 for (PetscMPIInt i = 0; i < size; i++) recvcounts[i] *= element_size;
2333 for (PetscMPIInt i = 1; i < size; i++) displs[i] = displs[i-1] + recvcounts[i-1];
2334
2335 ierr = PetscMalloc(nglobal * bs * element_size, gathered_array); CHKERRQ(ierr);
2336 }
2337
2338 // Use Gatherv with MPI_BYTE to handle any data type generically
2339 ierr = MPI_Gatherv(local_array_void, nlocal * bs * element_size, MPI_BYTE,
2340 *gathered_array, recvcounts, displs, MPI_BYTE,
2341 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
2342
2343 if (rank == 0) {
2344 ierr = PetscFree(recvcounts); CHKERRQ(ierr);
2345 ierr = PetscFree(displs); CHKERRQ(ierr);
2346 }
2347 }
2348
2349 ierr = DMSwarmRestoreField(swarm, field_name, &bs, NULL, &local_array_void); CHKERRQ(ierr);
2350
2351 PetscFunctionReturn(0);
2352}
2353
2354/**
2355 * @brief Displays a structured banner summarizing the simulation configuration.
2356 *
2357 * This function prints key simulation parameters to standard output.
2358 * It is intended to be called ONLY by MPI rank 0.
2359 * It retrieves global domain bounds from `user->global_domain_bbox` and boundary
2360 * conditions for all faces from `user->face_bc_types`.
2361 *
2362 * @param[in] user Pointer to `UserCtx` structure.
2363 * @param[in] StartTime Initial simulation time.
2364 * @param[in] StartStep Starting timestep index.
2365 * @param[in] StepsToRun Total number of timesteps to run.
2366 * @param[in] num_mpi_procs Total number of MPI processes.
2367 * @param[in] total_num_particles Total number of particles.
2368 * @param[in] bboxlist (If rank 0 needed to compute global_domain_bbox here, otherwise NULL)
2369 *
2370 * @return PetscErrorCode Returns `0` on success.
2371 */
2372PetscErrorCode DisplayBanner(SimCtx *simCtx) // bboxlist is only valid on rank 0
2373{
2374 PetscErrorCode ierr;
2375 PetscMPIInt rank;
2376 Cmpnts global_min_coords, global_max_coords;
2377 PetscReal StartTime;
2378 PetscInt StartStep,StepsToRun,total_num_particles;
2379 PetscMPIInt num_mpi_procs;
2380
2381 // SimCtx *simCtx = user->simCtx;
2382 UserCtx *user = simCtx->usermg.mgctx[simCtx->usermg.mglevels - 1].user;
2383 num_mpi_procs = simCtx->size;
2384 StartTime = simCtx->StartTime;
2385 StartStep = simCtx->StartStep;
2386 StepsToRun = simCtx->StepsToRun;
2387 total_num_particles = simCtx->np;
2388 BoundingBox *bboxlist_on_rank0 = simCtx->bboxlist;
2389
2390
2391 PetscFunctionBeginUser;
2392
2393 if (!user) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "DisplayBanner - UserCtx pointer is NULL.");
2394 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
2395
2396 if (rank == 0) {
2397 // If global_domain_bbox is not pre-populated in UserCtx, compute it here from bboxlist_on_rank0
2398 // This assumes bboxlist_on_rank0 is valid and contains all local bounding boxes on rank 0.
2399 if (bboxlist_on_rank0 && num_mpi_procs > 0) {
2400 global_min_coords = bboxlist_on_rank0[0].min_coords;
2401 global_max_coords = bboxlist_on_rank0[0].max_coords;
2402 for (PetscMPIInt p = 1; p < num_mpi_procs; ++p) {
2403 global_min_coords.x = PetscMin(global_min_coords.x, bboxlist_on_rank0[p].min_coords.x);
2404 global_min_coords.y = PetscMin(global_min_coords.y, bboxlist_on_rank0[p].min_coords.y);
2405 global_min_coords.z = PetscMin(global_min_coords.z, bboxlist_on_rank0[p].min_coords.z);
2406 global_max_coords.x = PetscMax(global_max_coords.x, bboxlist_on_rank0[p].max_coords.x);
2407 global_max_coords.y = PetscMax(global_max_coords.y, bboxlist_on_rank0[p].max_coords.y);
2408 global_max_coords.z = PetscMax(global_max_coords.z, bboxlist_on_rank0[p].max_coords.z);
2409 }
2410 // Optionally store this in user->global_domain_bbox if it's useful elsewhere
2411 // user->global_domain_bbox.min_coords = global_min_coords;
2412 // user->global_domain_bbox.max_coords = global_max_coords;
2413 } else {
2414 // Fallback or warning if bboxlist is not available for global calculation
2415 LOG_ALLOW(LOCAL, LOG_WARNING, "(Rank 0) - bboxlist not provided or num_mpi_procs <=0; using user->bbox for domain bounds.\n");
2416 // global_min_coords = user->bbox.min_coords; // Use local bbox of rank 0 as fallback
2417 // global_max_coords = user->bbox.max_coords;
2418 }
2419
2420 ierr = PetscPrintf(PETSC_COMM_SELF, "\n"); CHKERRQ(ierr);
2421 ierr = PetscPrintf(PETSC_COMM_SELF, "=============================================================\n"); CHKERRQ(ierr);
2422 ierr = PetscPrintf(PETSC_COMM_SELF, " CASE SUMMARY \n"); CHKERRQ(ierr);
2423 ierr = PetscPrintf(PETSC_COMM_SELF, "=============================================================\n"); CHKERRQ(ierr);
2424 ierr = PetscPrintf(PETSC_COMM_SELF, " Grid Points : %d X %d X %d\n", user->IM, user->JM, user->KM); CHKERRQ(ierr);
2425 ierr = PetscPrintf(PETSC_COMM_SELF, " Cells : %d X %d X %d\n", user->IM - 1, user->JM - 1, user->KM - 1); CHKERRQ(ierr);
2426 ierr = PetscPrintf(PETSC_COMM_SELF, " Global Domain Bounds (X) : %.6f to %.6f\n", (double)global_min_coords.x, (double)global_max_coords.x); CHKERRQ(ierr);
2427 ierr = PetscPrintf(PETSC_COMM_SELF, " Global Domain Bounds (Y) : %.6f to %.6f\n", (double)global_min_coords.y, (double)global_max_coords.y); CHKERRQ(ierr);
2428 ierr = PetscPrintf(PETSC_COMM_SELF, " Global Domain Bounds (Z) : %.6f to %.6f\n", (double)global_min_coords.z, (double)global_max_coords.z); CHKERRQ(ierr);
2429 if(strcmp(simCtx->eulerianSource,"load")==0 || strcmp(simCtx->eulerianSource,"solve")==0){
2430 ierr = PetscPrintf(PETSC_COMM_SELF, "-------------------- Boundary Conditions --------------------\n"); CHKERRQ(ierr);
2431 const int face_name_width = 17; // Adjusted for longer names (Zeta,Eta,Xi)
2432 for (PetscInt i_face = 0; i_face < 6; ++i_face) {
2433 BCFace current_face = (BCFace)i_face;
2434 // The BCFaceToString will now return the Xi, Eta, Zeta versions
2435 const char* face_str = BCFaceToString(current_face);
2436 const char* bc_type_str = BCTypeToString(user->boundary_faces[current_face].mathematical_type);
2437 const char* bc_handler_type_str = BCHandlerTypeToString(user->boundary_faces[current_face].handler_type);
2438 if(user->boundary_faces[current_face].mathematical_type == INLET){
2440 Cmpnts inlet_velocity = {0.0,0.0,0.0};
2441 PetscBool found;
2442 ierr = GetBCParamReal(user->boundary_faces[current_face].params,"vx",&inlet_velocity.x,&found); CHKERRQ(ierr);
2443 ierr = GetBCParamReal(user->boundary_faces[current_face].params,"vy",&inlet_velocity.y,&found); CHKERRQ(ierr);
2444 ierr = GetBCParamReal(user->boundary_faces[current_face].params,"vz",&inlet_velocity.z,&found); CHKERRQ(ierr);
2445 ierr = PetscPrintf(PETSC_COMM_SELF, " Face %-*s : %s - %s - [%.4f,%.4f,%.4f]\n",
2446 face_name_width, face_str, bc_type_str, bc_handler_type_str,inlet_velocity.x,inlet_velocity.y,inlet_velocity.z); CHKERRQ(ierr);
2447 } else if(user->boundary_faces[current_face].handler_type == BC_HANDLER_INLET_PARABOLIC){
2448 PetscReal v_max = 0.0;
2449 PetscBool found;
2450 ierr = GetBCParamReal(user->boundary_faces[current_face].params,"v_max",&v_max,&found); CHKERRQ(ierr);
2451 ierr = PetscPrintf(PETSC_COMM_SELF, " Face %-*s : %s - %s - v_max=%.4f\n",
2452 face_name_width, face_str, bc_type_str, bc_handler_type_str, v_max); CHKERRQ(ierr);
2453 }
2455 PetscReal flux;
2456 PetscBool trimflag,foundflux,foundtrimflag;
2457 ierr = GetBCParamReal(user->boundary_faces[current_face].params,"target_flux",&flux,&foundflux); CHKERRQ(ierr);
2458 ierr = GetBCParamBool(user->boundary_faces[current_face].params,"apply_trim",&trimflag,&foundtrimflag); CHKERRQ(ierr);
2459 ierr = PetscPrintf(PETSC_COMM_SELF, " Face %-*s : %s - %s - [%.4f] - %s\n",
2460 face_name_width, face_str, bc_type_str, bc_handler_type_str,flux,trimflag?"Trim Ucont":"No Trim"); CHKERRQ(ierr);
2461 } else{
2462 ierr = PetscPrintf(PETSC_COMM_SELF, " Face %-*s : %s - %s\n",
2463 face_name_width, face_str, bc_type_str,bc_handler_type_str); CHKERRQ(ierr);
2464 }
2465 }
2466 }
2467 ierr = PetscPrintf(PETSC_COMM_SELF, "-------------------------------------------------------------\n"); CHKERRQ(ierr);
2468 ierr = PetscPrintf(PETSC_COMM_SELF, " Start Time : %.4f\n", (double)StartTime); CHKERRQ(ierr);
2469 ierr = PetscPrintf(PETSC_COMM_SELF, " Timestep Size : %.4f\n", (double)simCtx->dt); CHKERRQ(ierr);
2470 ierr = PetscPrintf(PETSC_COMM_SELF, " Starting Step : %d\n", StartStep); CHKERRQ(ierr);
2471 ierr = PetscPrintf(PETSC_COMM_SELF, " Total Steps to Run : %d\n", StepsToRun); CHKERRQ(ierr);
2472 ierr = PetscPrintf(PETSC_COMM_SELF, " Number of MPI Processes : %d\n", num_mpi_procs); CHKERRQ(ierr);
2473 ierr = PetscPrintf(PETSC_COMM_WORLD," Number of Particles : %d\n", total_num_particles); CHKERRQ(ierr);
2474 if(strcmp(simCtx->eulerianSource,"solve")==0 || strcmp(simCtx->eulerianSource,"load")==0){
2475 const char* field_init_str = FieldInitializationToString(simCtx->FieldInitialization);
2476 const char* particle_init_str = ParticleInitializationToString(simCtx->ParticleInitialization);
2477 ierr = PetscPrintf(PETSC_COMM_WORLD," Reynolds Number : %le\n", simCtx->ren); CHKERRQ(ierr);
2478 //ierr = PetscPrintf(PETSC_COMM_WORLD," Von-Neumann Number : %le\n", simCtx->vnn); CHKERRQ(ierr);
2479 ierr = PetscPrintf(PETSC_COMM_SELF, " Particle Initialization Mode: %s\n", particle_init_str); CHKERRQ(ierr);
2480 if(strcmp(simCtx->eulerianSource,"solve")==0){
2481 //ierr = PetscPrintf(PETSC_COMM_WORLD," Stanton Number : %le\n", simCtx->st); CHKERRQ(ierr);
2482 ierr = PetscPrintf(PETSC_COMM_WORLD," Momentum Equation Solver : %s\n", MomentumSolverTypeToString(simCtx->mom_solver_type)); CHKERRQ(ierr);
2483 ierr = PetscPrintf(PETSC_COMM_WORLD," Initial Pseudo-CFL : %le\n", simCtx->pseudo_cfl); CHKERRQ(ierr);
2484 ierr = PetscPrintf(PETSC_COMM_WORLD," Large Eddy Simulation Model : %s\n", LESModelToString(simCtx->les)); CHKERRQ(ierr);
2485 }
2488 if (user->inletFaceDefined) {
2489 ierr = PetscPrintf(PETSC_COMM_SELF, " Particles Initialized At : %s (Enum Val: %d)\n", BCFaceToString(user->identifiedInletBCFace), user->identifiedInletBCFace); CHKERRQ(ierr);
2490 } else {
2491 ierr = PetscPrintf(PETSC_COMM_SELF, " Particles Initialized At : --- (No INLET face identified)\n"); CHKERRQ(ierr);
2492 }
2493 }
2494
2495 ierr = PetscPrintf(PETSC_COMM_SELF, " Field Initialization Mode : %s\n", field_init_str); CHKERRQ(ierr);
2496 if (simCtx->FieldInitialization == 1) {
2497 ierr = PetscPrintf(PETSC_COMM_SELF, " Constant Velocity : x - %.4f, y - %.4f, z - %.4f \n", (double)simCtx->InitialConstantContra.x,(double)simCtx->InitialConstantContra.y,(double)simCtx->InitialConstantContra.z ); CHKERRQ(ierr);
2498 }
2499 } else if(strcmp(simCtx->eulerianSource,"analytical")==0){
2500 ierr = PetscPrintf(PETSC_COMM_WORLD," Analytical Solution Type : %s\n", simCtx->AnalyticalSolutionType); CHKERRQ(ierr);
2501 }
2502 ierr = PetscPrintf(PETSC_COMM_SELF, "=============================================================\n"); CHKERRQ(ierr);
2503 ierr = PetscPrintf(PETSC_COMM_SELF, "\n"); CHKERRQ(ierr);
2504 }
2505 PetscFunctionReturn(0);
2506}
2507
2508#undef __FUNCT__
2509#define __FUNCT__ "ParsePostProcessingSettings"
2510/**
2511 * @brief Initializes post-processing settings from a config file and command-line overrides.
2512 *
2513 * This function establishes the configuration for a post-processing run by:
2514 * 1. Setting hardcoded default values in the PostProcessParams struct.
2515 * 2. Reading a configuration file to override the defaults.
2516 * 3. Parsing command-line options (-startTime, -endTime, etc.) which can override
2517 * both the defaults and the file settings.
2518 *
2519 * @param configFile The path to the configuration file to parse.
2520 * @param pps Pointer to the PostProcessParams struct to be populated.
2521 * @return PetscErrorCode
2522 */
2524{
2525 FILE *file;
2526 char line[1024];
2527 PetscBool startTimeSet, endTimeSet, timeStepSet;
2528
2529 PetscFunctionBeginUser;
2531
2532 if (!simCtx || !simCtx->pps) {
2533 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_NULL, "SimCtx or its pps member is NULL in ParsePostProcessingSettings.");
2534 }
2535
2536 char *configFile = simCtx->PostprocessingControlFile;
2537 PostProcessParams *pps = simCtx->pps;
2538
2539
2540 // --- 1. Set Sane Defaults First ---
2541 pps->startTime = 0;
2542 pps->endTime = 0;
2543 pps->timeStep = 1;
2544 pps->outputParticles = PETSC_FALSE;
2545 pps->particle_output_freq = simCtx->LoggingFrequency; // Default to logging frequency;
2546 strcpy(pps->process_pipeline, "");
2547 strcpy(pps->output_fields_instantaneous, "Ucat,P");
2548 strcpy(pps->output_fields_averaged, "");
2549 strcpy(pps->output_prefix, "Field");
2550 strcpy(pps->particle_output_prefix,"Particle");
2551 strcpy(pps->particle_fields,"velocity,CellID,weight,pid");
2552 strcpy(pps->particle_pipeline,"");
2553 strncpy(pps->statistics_pipeline, "", MAX_PIPELINE_LENGTH - 1);
2554 strncpy(pps->statistics_output_prefix, "Stats", MAX_FILENAME_LENGTH - 1);
2555 strcpy(pps->particleExt,"dat"); // The input file format for particles.
2556 strcpy(pps->eulerianExt,"dat"); // The input file format for Eulerian fields.
2557 pps->reference[0] = pps->reference[1] = pps->reference[2] = 1;
2558 strncpy(pps->source_dir, simCtx->output_dir, sizeof(pps->source_dir) - 1);
2559 pps->source_dir[sizeof(pps->source_dir) - 1] = '\0'; // Ensure null-termination
2560
2561 // --- 2. Parse the Configuration File (overrides defaults) ---
2562 file = fopen(configFile, "r");
2563 if (file) {
2564 LOG_ALLOW(GLOBAL, LOG_INFO, "Parsing post-processing config file: %s\n", configFile);
2565 while (fgets(line, sizeof(line), file)) {
2566 char *key, *value, *comment;
2567 comment = strchr(line, '#'); if (comment) *comment = '\0';
2568 TrimWhitespace(line); if (strlen(line) == 0) continue;
2569 key = strtok(line, "="); value = strtok(NULL, "=");
2570 if (key && value) {
2571 TrimWhitespace(key); TrimWhitespace(value);
2572 if (strcmp(key, "startTime") == 0) pps->startTime = atoi(value);
2573 else if (strcmp(key, "endTime") == 0) pps->endTime = atoi(value);
2574 else if (strcmp(key, "timeStep") == 0) pps->timeStep = atoi(value);
2575 else if (strcmp(key, "output_particles") == 0) {
2576 if (strcasecmp(value, "true") == 0) pps->outputParticles = PETSC_TRUE;
2577 }
2578 else if (strcasecmp(key, "process_pipeline") == 0) {
2579 strncpy(pps->process_pipeline, value, MAX_PIPELINE_LENGTH - 1);
2580 pps->process_pipeline[MAX_PIPELINE_LENGTH - 1] = '\0'; // Ensure null-termination
2581 } else if (strcasecmp(key, "output_fields_instantaneous") == 0) {
2582 strncpy(pps->output_fields_instantaneous, value, MAX_FIELD_LIST_LENGTH - 1);
2584 } else if (strcasecmp(key, "output_fields_averaged") == 0) {
2585 strncpy(pps->output_fields_averaged, value, MAX_FIELD_LIST_LENGTH - 1);
2587 } else if (strcasecmp(key, "output_prefix") == 0) {
2588 strncpy(pps->output_prefix, value, MAX_FILENAME_LENGTH - 1);
2589 pps->output_prefix[MAX_FILENAME_LENGTH - 1] = '\0';
2590 } else if (strcasecmp(key, "particle_output_prefix") == 0) {
2591 strncpy(pps->particle_output_prefix, value, MAX_FILENAME_LENGTH - 1);
2593 } else if (strcasecmp(key, "particle_fields_instantaneous") == 0) {
2594 strncpy(pps->particle_fields, value, MAX_FIELD_LIST_LENGTH - 1);
2595 pps->particle_fields[MAX_FIELD_LIST_LENGTH - 1] = '\0';
2596 } else if (strcasecmp(key, "particle_pipeline") == 0) {
2597 strncpy(pps->particle_pipeline, value, MAX_PIPELINE_LENGTH - 1);
2598 pps->particle_pipeline[MAX_PIPELINE_LENGTH - 1] = '\0';
2599 } else if (strcasecmp(key, "particle_output_freq") == 0) {
2600 pps->particle_output_freq = atoi(value);
2601 } else if (strcasecmp(key, "statistics_pipeline") == 0) {
2602 strncpy(pps->statistics_pipeline, value, MAX_PIPELINE_LENGTH - 1);
2604 } else if (strcasecmp(key, "statistics_output_prefix") == 0) {
2605 strncpy(pps->statistics_output_prefix, value, MAX_FILENAME_LENGTH - 1);
2607 } else if (strcasecmp(key, "particleExt") == 0) {
2608 strncpy(pps->particleExt, value, sizeof(pps->particleExt) - 1);
2609 pps->particleExt[sizeof(pps->particleExt) - 1] = '\0';
2610 } else if (strcasecmp(key, "eulerianExt") == 0) {
2611 strncpy(pps->eulerianExt, value, sizeof(pps->eulerianExt) - 1);
2612 pps->eulerianExt[sizeof(pps->eulerianExt) - 1] = '\0';
2613 } else if (strcmp(key, "reference_ip") == 0) {pps->reference[0] = atoi(value);
2614 } else if (strcmp(key, "reference_jp") == 0) {pps->reference[1] = atoi(value);
2615 } else if (strcmp(key, "reference_kp") == 0) {pps->reference[2] = atoi(value);
2616 } else if (strcasecmp(key, "source_directory") == 0) {
2617 strncpy(pps->source_dir, value, sizeof(pps->source_dir) - 1);
2618 pps->source_dir[sizeof(pps->source_dir) - 1] = '\0';
2619 } else {
2620 LOG_ALLOW(GLOBAL, LOG_WARNING, "Unknown key '%s' in post-processing config file. Ignoring.\n", key);
2621 }
2622 // Add parsing for pipeline, fields, etc. in later phases
2623 }
2624 }
2625 fclose(file);
2626 } else {
2627 LOG_ALLOW(GLOBAL, LOG_WARNING, "Could not open post-processing config file '%s'. Using defaults and command-line overrides.\n", configFile);
2628 }
2629
2630 // --- 3. Parse Command-Line Options (overrides file settings and defaults) ---
2631 PetscOptionsGetInt(NULL, NULL, "-startTime", &pps->startTime, &startTimeSet);
2632 PetscOptionsGetInt(NULL, NULL, "-endTime", &pps->endTime, &endTimeSet);
2633 PetscOptionsGetInt(NULL, NULL, "-timeStep", &pps->timeStep, &timeStepSet);
2634 PetscOptionsGetBool(NULL, NULL, "-output_particles", &pps->outputParticles, NULL);
2635
2636 if(pps->endTime==-1){
2637 pps->endTime = simCtx->StartStep + simCtx->StepsToRun; // Total steps if endTime is set to -1.
2638 }
2639
2640 // If only startTime is given on command line, run for a single step
2641 if (startTimeSet && !endTimeSet) {
2642 pps->endTime = pps->startTime;
2643 }
2644
2645 LOG_ALLOW(GLOBAL, LOG_INFO, "Post-processing configured to run from t=%d to t=%d with step %d. Particle output: %s.\n",
2646 pps->startTime, pps->endTime, pps->timeStep, pps->outputParticles ? "TRUE" : "FALSE");
2647
2648 LOG_ALLOW(GLOBAL, LOG_INFO, "Process Pipeline: %s\n", pps->process_pipeline);
2649 LOG_ALLOW(GLOBAL, LOG_INFO, "Instantaneous Output Fields: %s\n", pps->output_fields_instantaneous);
2650 LOG_ALLOW(GLOBAL, LOG_INFO, "Output Prefix: %s\n", pps->output_prefix);
2651 LOG_ALLOW(GLOBAL, LOG_INFO, "Particle Output Prefix: %s\n", pps->particle_output_prefix);
2652 LOG_ALLOW(GLOBAL, LOG_INFO, "Particle Fields: %s\n", pps->particle_fields);
2653 LOG_ALLOW(GLOBAL, LOG_INFO, "Particle Pipeline: %s\n", pps->particle_pipeline);
2654 LOG_ALLOW(GLOBAL, LOG_INFO, "Particle Output Frequency: %d\n", pps->particle_output_freq);
2655 LOG_ALLOW(GLOBAL, LOG_INFO, "Averaged Output Fields (reserved): %s\n", pps->output_fields_averaged);
2656 LOG_ALLOW(GLOBAL, LOG_INFO, "Post input extensions: Eulerian='.%s', Particle='.%s'\n", pps->eulerianExt, pps->particleExt);
2657
2659 PetscFunctionReturn(0);
2660}
2661
2662
2663#undef __FUNCT__
2664#define __FUNCT__ "ParseScalingInformation"
2665/**
2666 * @brief Parses physical scaling parameters from command-line options.
2667 *
2668 * This function reads the reference length, velocity, and density from the
2669 * PETSc options database (provided via -scaling_L_ref, etc.). It populates
2670 * the simCtx->scaling struct and calculates the derived reference pressure.
2671 * It sets default values of 1.0 for a fully non-dimensional case if the
2672 * options are not provided.
2673 *
2674 * @param[in,out] simCtx The simulation context whose 'scaling' member will be populated.
2675 * @return PetscErrorCode
2676 */
2677PetscErrorCode ParseScalingInformation(SimCtx *simCtx)
2678{
2679 PetscErrorCode ierr;
2680 PetscBool flg;
2681
2682 PetscFunctionBeginUser;
2684
2685 if (!simCtx) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "SimCtx is NULL in ParseScalingInformation");
2686
2687 // --- 1. Set default values to 1.0 ---
2688 // This represents a purely non-dimensional run if no scaling is provided.
2689 simCtx->scaling.L_ref = 1.0;
2690 simCtx->scaling.U_ref = 1.0;
2691 simCtx->scaling.rho_ref = 1.0;
2692
2693 // --- 2. Read overrides from the command line / control file ---
2694 ierr = PetscOptionsGetReal(NULL, NULL, "-scaling_L_ref", &simCtx->scaling.L_ref, &flg); CHKERRQ(ierr);
2695 ierr = PetscOptionsGetReal(NULL, NULL, "-scaling_U_ref", &simCtx->scaling.U_ref, &flg); CHKERRQ(ierr);
2696 ierr = PetscOptionsGetReal(NULL, NULL, "-scaling_rho_ref", &simCtx->scaling.rho_ref, &flg); CHKERRQ(ierr);
2697
2698 // --- 3. Calculate derived scaling factors ---
2699 // Check for division by zero to be safe, though U_ref should be positive.
2700 if (simCtx->scaling.U_ref <= 0.0) {
2701 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_OUTOFRANGE, "Reference velocity U_ref must be positive. Got %g", (double)simCtx->scaling.U_ref);
2702 }
2703 simCtx->scaling.P_ref = simCtx->scaling.rho_ref * simCtx->scaling.U_ref * simCtx->scaling.U_ref;
2704
2705 // --- 4. Log the final, effective scales for verification ---
2706 LOG(GLOBAL, LOG_INFO, "---------------- Physical Scales Initialized -----------------\n");
2707 LOG(GLOBAL, LOG_INFO, " L_ref: %.4f, U_ref: %.4f, rho_ref: %.4f, P_ref: %.4f\n",
2708 simCtx->scaling.L_ref, simCtx->scaling.U_ref, simCtx->scaling.rho_ref, simCtx->scaling.P_ref);
2709 LOG(GLOBAL, LOG_INFO, "--------------------------------------------------------------\n");
2710
2712 PetscFunctionReturn(0);
2713}
2714
2715
2716// ---------------------------------------------------------------------
2717// UTILITY FUNCTIONS NOT USED IN THE MAIN CODE ( NOT SUPPORTED ANYMORE,INCLUDED FOR BACKWARD COMPATIBILITY)
2718// ---------------------------------------------------------------------
2719
2720//=====================================================================
2721// 1) ReadDataFileToArray
2722
2723// See the function-level comments in io.h for a summary.
2724// This reads one value per line from an ASCII file. Rank 0 does I/O,
2725// broadcasts the data to all ranks.
2726//===================================================================== */
2727PetscInt ReadDataFileToArray(const char *filename,
2728 double **data_out,
2729 PetscInt *Nout,
2730 MPI_Comm comm)
2731{
2732 /* STEP 0: Prepare local variables & log function entry */
2733 PetscMPIInt rank, size;
2734 PetscErrorCode ierr;
2735 FILE *fp = NULL;
2736 PetscInt N = 0; /* number of lines/values read on rank 0 */
2737 double *array = NULL; /* pointer to local array on each rank */
2738 PetscInt fileExistsFlag = 0; /* 0 = doesn't exist, 1 = does exist */
2739
2741 "Start reading from file: %s\n",
2742 filename);
2743
2744 /* Basic error checking: data_out, Nout must be non-null. */
2745 if (!filename || !data_out || !Nout) {
2747 "Null pointer argument provided.\n");
2748 return 1;
2749 }
2750
2751 /* Determine rank/size for coordinating I/O. */
2752 MPI_Comm_rank(comm, &rank);
2753 MPI_Comm_size(comm, &size);
2754
2755 /* STEP 1: On rank 0, check if file can be opened. */
2756 if (!rank) {
2757 fp = fopen(filename, "r");
2758 if (fp) {
2759 fileExistsFlag = 1;
2760 fclose(fp);
2761 }
2762 }
2763
2764 /* STEP 2: Broadcast file existence to all ranks. */
2765 // In ReadDataFileToArray:
2766 ierr = MPI_Bcast(&fileExistsFlag, 1, MPI_INT, 0, comm); CHKERRQ(ierr);
2767
2768 if (!fileExistsFlag) {
2769 /* If file does not exist, log & return. */
2770 if (!rank) {
2772 "File '%s' not found.\n",
2773 filename);
2774 }
2775 return 2;
2776 }
2777
2778 /* STEP 3: Rank 0 re-opens and reads the file, counting lines, etc. */
2779 if (!rank) {
2780 fp = fopen(filename, "r");
2781 if (!fp) {
2783 "File '%s' could not be opened for reading.\n",
2784 filename);
2785 return 3;
2786 }
2787
2788 /* (3a) Count lines first. */
2789 {
2790 char line[256];
2791 while (fgets(line, sizeof(line), fp)) {
2792 N++;
2793 }
2794 }
2795
2797 "File '%s' has %d lines.\n",
2798 filename, N);
2799
2800 /* (3b) Allocate array on rank 0. */
2801 array = (double*)malloc(N * sizeof(double));
2802 if (!array) {
2803 fclose(fp);
2805 "malloc failed for array.\n");
2806 return 4;
2807 }
2808
2809 /* (3c) Rewind & read values into array. */
2810 rewind(fp);
2811 {
2812 PetscInt i = 0;
2813 char line[256];
2814 while (fgets(line, sizeof(line), fp)) {
2815 double val;
2816 if (sscanf(line, "%lf", &val) == 1) {
2817 array[i++] = val;
2818 }
2819 }
2820 }
2821 fclose(fp);
2822
2824 "Successfully read %d values from '%s'.\n",
2825 N, filename);
2826 }
2827
2828 /* STEP 4: Broadcast the integer N to all ranks. */
2829 ierr = MPI_Bcast(&N, 1, MPI_INT, 0, comm); CHKERRQ(ierr);
2830
2831 /* STEP 5: Each rank allocates an array to receive the broadcast if rank>0. */
2832 if (rank) {
2833 array = (double*)malloc(N * sizeof(double));
2834 if (!array) {
2836 "malloc failed on rank %d.\n",
2837 rank);
2838 return 5;
2839 }
2840 }
2841
2842 /* STEP 6: Broadcast the actual data from rank 0 to all. */
2843 ierr = MPI_Bcast(array, N, MPI_DOUBLE, 0, comm); CHKERRQ(ierr);
2844
2845 /* STEP 7: Assign outputs on all ranks. */
2846 *data_out = array;
2847 *Nout = N;
2848
2850 "Done. Provided array of length=%d to all ranks.\n",
2851 N);
2852 return 0; /* success */
2853}
2854
2855/**
2856 * @brief Reads coordinate data (for particles) from file into a PETSc Vec, then gathers it to rank 0.
2857 *
2858 * This function uses \c ReadFieldData to fill a PETSc Vec with coordinate data,
2859 * then leverages \c VecToArrayOnRank0 to gather that data into a contiguous array
2860 * (valid on rank 0 only).
2861 *
2862 * @param[in] timeIndex The time index used to construct file names.
2863 * @param[in] user Pointer to the user context.
2864 * @param[out] coordsArray On rank 0, will point to a newly allocated array holding the coordinates.
2865 * @param[out] Ncoords On rank 0, the length of \p coordsArray. On other ranks, 0.
2866 *
2867 * @return PetscErrorCode Returns 0 on success, or non-zero on failures.
2868 */
2869PetscErrorCode ReadPositionsFromFile(PetscInt timeIndex,
2870 UserCtx *user,
2871 double **coordsArray,
2872 PetscInt *Ncoords)
2873{
2874 PetscFunctionBeginUser;
2875
2876 PetscErrorCode ierr;
2877 Vec coordsVec;
2878
2879 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Creating coords Vec.\n");
2880 ierr = VecCreate(PETSC_COMM_WORLD, &coordsVec);CHKERRQ(ierr);
2881 ierr = VecSetFromOptions(coordsVec);CHKERRQ(ierr);
2882
2883 // For example: "position" is the name of the coordinate data
2884 ierr = ReadFieldData(user, "position", coordsVec, timeIndex, "dat");
2885 if (ierr) {
2887 "Error reading position data (ti=%d).\n",
2888 timeIndex);
2889 PetscFunctionReturn(ierr);
2890 }
2891
2892 LOG_ALLOW(GLOBAL, LOG_DEBUG, "ReadPositions - Gathering coords Vec to rank 0.\n");
2893 ierr = VecToArrayOnRank0(coordsVec, Ncoords, coordsArray);CHKERRQ(ierr);
2894
2895 ierr = VecDestroy(&coordsVec);CHKERRQ(ierr);
2896
2898 "Successfully gathered coordinates. Ncoords=%d.\n", *Ncoords);
2899 PetscFunctionReturn(0);
2900}
2901
2902
2903/**
2904 * @brief Reads a named field from file into a PETSc Vec, then gathers it to rank 0.
2905 *
2906 * This function wraps \c ReadFieldData and \c VecToArrayOnRank0 into a single step.
2907 * The gathered data is stored in \p scalarArray on rank 0, with its length in \p Nscalars.
2908 *
2909 * @param[in] timeIndex The time index used to construct file names.
2910 * @param[in] fieldName Name of the field to be read (e.g., "velocity").
2911 * @param[in] user Pointer to the user context.
2912 * @param[out] scalarArray On rank 0, a newly allocated array holding the field data.
2913 * @param[out] Nscalars On rank 0, length of \p scalarArray. On other ranks, 0.
2914 *
2915 * @return PetscErrorCode Returns 0 on success, or non-zero on failures.
2916 */
2917PetscErrorCode ReadFieldDataToRank0(PetscInt timeIndex,
2918 const char *fieldName,
2919 UserCtx *user,
2920 double **scalarArray,
2921 PetscInt *Nscalars)
2922{
2923 PetscFunctionBeginUser;
2924
2925 PetscErrorCode ierr;
2926 Vec fieldVec;
2927
2928 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Creating field Vec.\n");
2929 ierr = VecCreate(PETSC_COMM_WORLD, &fieldVec);CHKERRQ(ierr);
2930 ierr = VecSetFromOptions(fieldVec);CHKERRQ(ierr);
2931
2932 ierr = ReadFieldData(user, fieldName, fieldVec, timeIndex, "dat");
2933 if (ierr) {
2935 "Error reading field '%s' (ti=%d).\n",
2936 fieldName, timeIndex);
2937 PetscFunctionReturn(ierr);
2938 }
2939
2940 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Gathering field Vec to rank 0.\n");
2941 ierr = VecToArrayOnRank0(fieldVec, Nscalars, scalarArray);CHKERRQ(ierr);
2942
2943 ierr = VecDestroy(&fieldVec);CHKERRQ(ierr);
2944
2946 "Successfully gathered field '%s'. Nscalars=%d.\n",
2947 fieldName, *Nscalars);
2948 PetscFunctionReturn(0);
2949}
PetscErrorCode ReadStatisticalFields(UserCtx *user, PetscInt ti)
Reads statistical fields for averaging purposes.
Definition io.c:1360
PetscErrorCode ParsePostProcessingSettings(SimCtx *simCtx)
Initializes post-processing settings from a config file and command-line overrides.
Definition io.c:2523
PetscErrorCode WriteAllSwarmFields(UserCtx *user)
Writes a predefined set of PETSc Swarm fields to files.
Definition io.c:2126
PetscErrorCode SwarmFieldToArrayOnRank0(DM swarm, const char *field_name, PetscInt *n_total_particles, PetscInt *n_components, void **gathered_array)
Gathers any DMSwarm field from all ranks to a single, contiguous array on rank 0.
Definition io.c:2279
PetscErrorCode ParseScalingInformation(SimCtx *simCtx)
Parses physical scaling parameters from command-line options.
Definition io.c:2677
PetscInt ReadDataFileToArray(const char *filename, double **data_out, PetscInt *Nout, MPI_Comm comm)
Definition io.c:2727
PetscErrorCode ReadGridFile(UserCtx *user)
Sets grid dimensions from a file for a SINGLE block using a one-time read cache.
Definition io.c:222
PetscErrorCode StringToBCHandlerType(const char *str, BCHandlerType *handler_out)
Converts a string representation of a handler to a BCHandlerType enum.
Definition io.c:359
PetscErrorCode ReadRANSFields(UserCtx *user, PetscInt ti)
Reads RANS-related fields.
Definition io.c:1426
PetscErrorCode GetBCParamReal(BC_Param *params, const char *key, PetscReal *value_out, PetscBool *found)
Searches a BC_Param linked list for a key and returns its value as a double.
Definition io.c:405
PetscErrorCode ReadSwarmIntField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
Reads integer swarm data by using ReadFieldData and casting the result.
Definition io.c:1517
PetscErrorCode WriteSwarmField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
Writes data from a specific field in a PETSc Swarm to a file.
Definition io.c:1985
static PetscInt * g_IMs_from_file
Caches the IM dimensions for all blocks read from the grid file.
Definition io.c:19
PetscErrorCode WriteStatisticalFields(UserCtx *user)
Writes statistical fields for averaging purposes.
Definition io.c:1889
PetscErrorCode ReadSimulationFields(UserCtx *user, PetscInt ti)
Reads simulation fields from files into their respective PETSc vectors.
Definition io.c:1276
static PetscInt g_nblk_from_file
Stores the number of blocks read from the grid file.
Definition io.c:17
PetscErrorCode WriteFieldData(UserCtx *user, const char *field_name, Vec field_vec, PetscInt ti, const char *ext)
Writes data from a specific PETSc vector to a single, sequential file.
Definition io.c:1701
PetscErrorCode ReadLESFields(UserCtx *user, PetscInt ti)
Reads LES-related fields.
Definition io.c:1391
PetscErrorCode ReadFieldData(UserCtx *user, const char *field_name, Vec field_vec, PetscInt ti, const char *ext)
Reads data for a specific field from a file into the provided vector.
Definition io.c:1040
PetscErrorCode ParseAllBoundaryConditions(UserCtx *user, const char *bcs_input_filename)
Parses the boundary conditions file to configure the type, handler, and any associated parameters for...
Definition io.c:484
PetscErrorCode ValidateBCHandlerForBCType(BCType type, BCHandlerType handler)
Validates that a specific handler is compatible with a general BC type.
Definition io.c:378
void TrimWhitespace(char *str)
Trims leading and trailing whitespace from a string in-place.
Definition io.c:36
PetscErrorCode WriteSimulationFields(UserCtx *user)
Writes simulation fields to files.
Definition io.c:1826
PetscErrorCode ReadAllSwarmFields(UserCtx *user, PetscInt ti)
Reads multiple fields into a DMSwarm, gracefully handling missing optional files.
Definition io.c:1595
static PetscBool g_file_has_been_read
A flag to ensure the grid file is read only once.
Definition io.c:25
PetscErrorCode ReadGridGenerationInputs(UserCtx *user)
Parses command-line options for a programmatically generated grid for a SINGLE block.
Definition io.c:77
PetscErrorCode ReadSwarmField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
Reads data from a file into a specified field of a PETSc DMSwarm.
Definition io.c:1472
PetscErrorCode WriteRANSFields(UserCtx *user)
Writes RANS-related fields.
Definition io.c:1951
PetscErrorCode PopulateFinestUserGridResolutionFromOptions(UserCtx *finest_users, PetscInt nblk)
Parses grid resolution arrays (-im, -jm, -km) once and applies them to all finest-grid blocks.
Definition io.c:161
PetscErrorCode DeterminePeriodicity(SimCtx *simCtx)
Scans all block-specific boundary condition files to determine a globally consistent periodicity for ...
Definition io.c:690
PetscErrorCode ReadFieldDataToRank0(PetscInt timeIndex, const char *fieldName, UserCtx *user, double **scalarArray, PetscInt *Nscalars)
Reads a named field from file into a PETSc Vec, then gathers it to rank 0.
Definition io.c:2917
PetscErrorCode VecToArrayOnRank0(Vec inVec, PetscInt *N, double **arrayOut)
Gather a (possibly distributed) PETSc Vec onto rank 0 as a contiguous C array.
Definition io.c:2184
PetscErrorCode WriteSwarmIntField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
Writes integer swarm data by casting it to a temporary Vec and using WriteFieldData.
Definition io.c:2054
PetscErrorCode WriteLESFields(UserCtx *user)
Writes LES-related fields.
Definition io.c:1917
static PetscInt * g_KMs_from_file
Caches the KM dimensions for all blocks read from the grid file.
Definition io.c:23
static PetscInt * g_JMs_from_file
Caches the JM dimensions for all blocks read from the grid file.
Definition io.c:21
PetscErrorCode StringToBCFace(const char *str, BCFace *face_out)
Converts a string representation of a face to a BCFace enum.
Definition io.c:325
PetscErrorCode ReadPositionsFromFile(PetscInt timeIndex, UserCtx *user, double **coordsArray, PetscInt *Ncoords)
Reads coordinate data (for particles) from file into a PETSc Vec, then gathers it to rank 0.
Definition io.c:2869
static PetscErrorCode ReadOptionalField(UserCtx *user, const char *field_name, const char *field_label, Vec field_vec, PetscInt ti, const char *ext)
Checks for and reads an optional Eulerian field from a file into a Vec.
Definition io.c:920
PetscErrorCode VerifyPathExistence(const char *path, PetscBool is_dir, PetscBool is_optional, const char *description, PetscBool *exists)
A parallel-safe helper to verify the existence of a generic file or directory path.
Definition io.c:807
PetscErrorCode GetBCParamBool(BC_Param *params, const char *key, PetscBool *value_out, PetscBool *found)
Searches a BC_Param linked list for a key and returns its value as a bool.
Definition io.c:430
void FreeBC_ParamList(BC_Param *head)
Frees the memory allocated for a linked list of BC_Param structs.
Definition io.c:308
PetscErrorCode StringToBCType(const char *str, BCType *type_out)
Converts a string representation of a BC type to a BCType enum.
Definition io.c:342
PetscErrorCode DisplayBanner(SimCtx *simCtx)
Displays a structured banner summarizing the simulation configuration.
Definition io.c:2372
static PetscErrorCode CheckDataFile(UserCtx *user, PetscInt ti, const char *fieldName, const char *ext, PetscBool *fileExists)
Checks for a data file's existence in a parallel-safe manner.
Definition io.c:862
static PetscErrorCode ReadOptionalSwarmField(UserCtx *user, const char *field_name, const char *field_label, PetscInt ti, const char *ext)
Checks for and reads an optional DMSwarm field from a file.
Definition io.c:970
Public interface for data input/output routines.
const char * BCHandlerTypeToString(BCHandlerType handler_type)
Converts a BCHandlerType enum to its string representation.
Definition logging.c:748
const char * FieldInitializationToString(PetscInt FieldInitialization)
Helper function to convert FieldInitialization to a string representation.
Definition logging.c:660
#define LOG_ALLOW_SYNC(scope, level, fmt,...)
----— DEBUG ---------------------------------------— #define LOG_ALLOW(scope, level,...
Definition logging.h:267
#define LOCAL
Logging scope definitions for controlling message output.
Definition logging.h:45
#define GLOBAL
Scope for global logging across all processes.
Definition logging.h:46
const char * BCFaceToString(BCFace face)
Helper function to convert BCFace enum to a string representation.
Definition logging.c:643
#define LOG_ALLOW(scope, level, fmt,...)
Logging macro that checks both the log level and whether the calling function is in the allowed-funct...
Definition logging.h:200
#define PROFILE_FUNCTION_END
Marks the end of a profiled code block.
Definition logging.h:746
#define LOG(scope, level, fmt,...)
Logging macro for PETSc-based applications with scope control.
Definition logging.h:84
const char * BCTypeToString(BCType type)
Helper function to convert BCType enum to a string representation.
Definition logging.c:722
@ LOG_ERROR
Critical errors that may halt the program.
Definition logging.h:28
@ LOG_TRACE
Very fine-grained tracing information for in-depth debugging.
Definition logging.h:33
@ LOG_INFO
Informational messages about program execution.
Definition logging.h:31
@ LOG_WARNING
Non-critical issues that warrant attention.
Definition logging.h:29
@ LOG_DEBUG
Detailed debugging information.
Definition logging.h:32
#define PROFILE_FUNCTION_BEGIN
Marks the beginning of a profiled code block (typically a function).
Definition logging.h:737
const char * LESModelToString(LESModelType LESFlag)
Helper function to convert LES Flag to a string representation.
Definition logging.c:691
const char * MomentumSolverTypeToString(MomentumSolverType SolverFlag)
Helper function to convert Momentum Solver flag to a string representation.
Definition logging.c:706
const char * ParticleInitializationToString(ParticleInitializationType ParticleInitialization)
Helper function to convert ParticleInitialization to a string representation.
Definition logging.c:675
char statistics_output_prefix[256]
basename for CSV output, e.g.
Definition variables.h:521
BCType
Defines the general mathematical/physical Category of a boundary.
Definition variables.h:210
@ INLET
Definition variables.h:217
@ SYMMETRY
Definition variables.h:215
@ OUTLET
Definition variables.h:216
@ PERIODIC
Definition variables.h:219
@ WALL
Definition variables.h:213
UserCtx * user
Definition variables.h:474
PetscBool inletFaceDefined
Definition variables.h:752
char particle_output_prefix[256]
Definition variables.h:516
PetscMPIInt rank
Definition variables.h:592
PetscInt cgrid
Definition variables.h:748
BoundaryFaceConfig boundary_faces[6]
Definition variables.h:751
PetscInt block_number
Definition variables.h:654
BCFace identifiedInletBCFace
Definition variables.h:753
char euler_subdir[PETSC_MAX_PATH_LEN]
Definition variables.h:611
SimCtx * simCtx
Back-pointer to the master simulation context.
Definition variables.h:736
PetscInt rans
Definition variables.h:674
@ PARTICLE_INIT_SURFACE_RANDOM
Random placement on the inlet face.
Definition variables.h:466
@ PARTICLE_INIT_SURFACE_EDGES
Deterministic placement at inlet face edges.
Definition variables.h:469
PetscReal StartTime
Definition variables.h:602
Vec K_Omega_o
Definition variables.h:788
char output_prefix[256]
Definition variables.h:513
char ** bcs_files
Definition variables.h:662
struct BC_Param_s * next
Definition variables.h:266
PetscReal Min_X
Definition variables.h:743
char * key
Definition variables.h:264
PetscInt KM
Definition variables.h:742
UserMG usermg
Definition variables.h:703
#define MAX_FIELD_LIST_LENGTH
Definition variables.h:490
PetscReal L_ref
Definition variables.h:571
Vec K_Omega
Definition variables.h:788
BCHandlerType
Defines the specific computational "strategy" for a boundary handler.
Definition variables.h:230
@ BC_HANDLER_PERIODIC_GEOMETRIC
Definition variables.h:243
@ BC_HANDLER_INLET_PARABOLIC
Definition variables.h:236
@ BC_HANDLER_INLET_CONSTANT_VELOCITY
Definition variables.h:235
@ BC_HANDLER_PERIODIC_DRIVEN_INITIAL_FLUX
Definition variables.h:246
@ BC_HANDLER_PERIODIC_DRIVEN_CONSTANT_FLUX
Definition variables.h:245
@ BC_HANDLER_WALL_MOVING
Definition variables.h:233
@ BC_HANDLER_WALL_NOSLIP
Definition variables.h:232
@ BC_HANDLER_OUTLET_CONSERVATION
Definition variables.h:241
char output_fields_averaged[1024]
Definition variables.h:512
PetscInt FieldInitialization
Definition variables.h:641
#define MAX_PIPELINE_LENGTH
Definition variables.h:489
PetscReal ren
Definition variables.h:636
BCHandlerType handler_type
Definition variables.h:296
Cmpnts max_coords
Maximum x, y, z coordinates of the bounding box.
Definition variables.h:156
PetscInt _this
Definition variables.h:746
PetscInt reference[3]
Definition variables.h:528
char output_dir[PETSC_MAX_PATH_LEN]
Definition variables.h:610
PetscReal dt
Definition variables.h:603
PetscReal ry
Definition variables.h:747
PetscInt StepsToRun
Definition variables.h:600
PetscInt k_periodic
Definition variables.h:655
PetscInt timeStep
Definition variables.h:506
PetscInt np
Definition variables.h:681
PetscReal Max_Y
Definition variables.h:743
PetscBool averaging
Definition variables.h:678
char * value
Definition variables.h:265
Vec lCs
Definition variables.h:788
#define MAX_FILENAME_LENGTH
Definition variables.h:491
Vec Ucont
Definition variables.h:760
PetscInt StartStep
Definition variables.h:599
Cmpnts min_coords
Minimum x, y, z coordinates of the bounding box.
Definition variables.h:155
PetscScalar x
Definition variables.h:101
Vec Ucat_square_sum
Definition variables.h:791
char * current_io_directory
Definition variables.h:615
Vec P_sum
Definition variables.h:791
char grid_file[PETSC_MAX_PATH_LEN]
Definition variables.h:659
char statistics_pipeline[1024]
e.g.
Definition variables.h:520
char output_fields_instantaneous[1024]
Definition variables.h:511
char eulerianExt[8]
Definition variables.h:524
char particle_subdir[PETSC_MAX_PATH_LEN]
Definition variables.h:612
BoundingBox * bboxlist
Definition variables.h:684
Vec lNu_t
Definition variables.h:788
Vec Nu_t
Definition variables.h:788
PetscReal rz
Definition variables.h:747
char source_dir[PETSC_MAX_PATH_LEN]
Definition variables.h:501
char particle_pipeline[1024]
Definition variables.h:514
BC_Param * params
Definition variables.h:297
char eulerianSource[PETSC_MAX_PATH_LEN]
Definition variables.h:608
ParticleInitializationType ParticleInitialization
Definition variables.h:685
PetscScalar z
Definition variables.h:101
Vec lK_Omega_o
Definition variables.h:788
Vec Ucat
Definition variables.h:760
Vec ParticleCount
Definition variables.h:805
ScalingCtx scaling
Definition variables.h:649
Vec lK_Omega
Definition variables.h:788
PetscInt JM
Definition variables.h:742
PetscInt mglevels
Definition variables.h:481
PetscReal Min_Z
Definition variables.h:743
Vec Ucat_sum
Definition variables.h:791
char process_pipeline[1024]
Definition variables.h:510
PetscInt particle_output_freq
Definition variables.h:517
char particle_fields[1024]
Definition variables.h:515
PetscBool outputParticles
Definition variables.h:507
char AnalyticalSolutionType[PETSC_MAX_PATH_LEN]
Definition variables.h:621
PetscReal Max_X
Definition variables.h:743
Cmpnts InitialConstantContra
Definition variables.h:642
PetscReal Min_Y
Definition variables.h:743
PetscInt i_periodic
Definition variables.h:655
PetscInt step
Definition variables.h:597
PostProcessParams * pps
Definition variables.h:720
PetscScalar y
Definition variables.h:101
PetscMPIInt size
Definition variables.h:593
@ EXEC_MODE_SOLVER
Definition variables.h:562
@ EXEC_MODE_POSTPROCESSOR
Definition variables.h:563
PetscInt IM
Definition variables.h:742
char _io_context_buffer[PETSC_MAX_PATH_LEN]
Definition variables.h:614
char particleExt[8]
Definition variables.h:525
Vec Ucat_cross_sum
Definition variables.h:791
PetscInt les
Definition variables.h:674
Vec Nvert
Definition variables.h:760
MGCtx * mgctx
Definition variables.h:484
BCType mathematical_type
Definition variables.h:295
PetscReal P_ref
Definition variables.h:574
PetscReal rx
Definition variables.h:747
ExecutionMode exec_mode
Definition variables.h:607
PetscInt startTime
Definition variables.h:504
PetscReal rho_ref
Definition variables.h:573
PetscReal Max_Z
Definition variables.h:743
PetscReal U_ref
Definition variables.h:572
MomentumSolverType mom_solver_type
Definition variables.h:628
char PostprocessingControlFile[PETSC_MAX_PATH_LEN]
Definition variables.h:719
char restart_dir[PETSC_MAX_PATH_LEN]
Definition variables.h:609
PetscReal pseudo_cfl
Definition variables.h:636
PetscInt LoggingFrequency
Definition variables.h:708
Vec Psi
Definition variables.h:806
BCFace
Identifies the six logical faces of a structured computational block.
Definition variables.h:203
@ BC_FACE_NEG_X
Definition variables.h:204
@ BC_FACE_POS_Z
Definition variables.h:206
@ BC_FACE_POS_Y
Definition variables.h:205
@ BC_FACE_NEG_Z
Definition variables.h:206
@ BC_FACE_POS_X
Definition variables.h:204
@ BC_FACE_NEG_Y
Definition variables.h:205
PetscInt j_periodic
Definition variables.h:655
BoundaryCondition * handler
Definition variables.h:298
A node in a linked list for storing key-value parameters from the bcs.dat file.
Definition variables.h:263
Holds the complete configuration for one of the six boundary faces.
Definition variables.h:293
Defines a 3D axis-aligned bounding box.
Definition variables.h:154
A 3D point or vector with PetscScalar components.
Definition variables.h:100
Holds all configuration parameters for a post-processing run.
Definition variables.h:499
The master context for the entire simulation.
Definition variables.h:589
User-defined context containing data specific to a single computational grid level.
Definition variables.h:733