PICurv 0.1.0
A Parallel Particle-In-Cell Solver for Curvilinear LES
Loading...
Searching...
No Matches
io.c
Go to the documentation of this file.
1/**
2 * @file io.c
3 * @brief Implementation of data input/output routines, focusing on grid configuration.
4 *
5 * This module provides functions to parse grid geometry information, either from
6 * command-line options for programmatically generated grids or by reading the
7 * header of a grid definition file.
8 */
9
10#include "io.h"
11
12// =============================================================================
13// STATIC (PRIVATE) VARIABLES FOR ONE-TIME FILE READ
14// =============================================================================
15
16/** @brief Stores the number of blocks read from the grid file. */
17static PetscInt g_nblk_from_file = 0;
18/** @brief Caches the IM dimensions for all blocks read from the grid file. */
19static PetscInt* g_IMs_from_file = NULL;
20/** @brief Caches the JM dimensions for all blocks read from the grid file. */
21static PetscInt* g_JMs_from_file = NULL;
22/** @brief Caches the KM dimensions for all blocks read from the grid file. */
23static PetscInt* g_KMs_from_file = NULL;
24/** @brief A flag to ensure the grid file is read only once. */
25static PetscBool g_file_has_been_read = PETSC_FALSE;
26
27
28// =============================================================================
29// PUBLIC FUNCTION IMPLEMENTATIONS
30// =============================================================================
31
32/**
33 * @brief Implementation of \ref TrimWhitespace().
34 * @details Full API contract (arguments, ownership, side effects) is documented with
35 * the header declaration in `include/io.h`.
36 * @see TrimWhitespace()
37 */
38void TrimWhitespace(char *str) {
39 if (!str) return;
40
41 char *start = str;
42 // Find the first non-whitespace character
43 while (isspace((unsigned char)*start)) {
44 start++;
45 }
46
47 // Find the end of the string
48 char *end = str + strlen(str) - 1;
49 // Move backwards from the end to find the last non-whitespace character
50 while (end > start && isspace((unsigned char)*end)) {
51 end--;
52 }
53
54 // Null-terminate after the last non-whitespace character
55 *(end + 1) = '\0';
56
57 // If there was leading whitespace, shift the string to the left
58 if (str != start) {
59 memmove(str, start, (end - start) + 2); // +2 to include the new null terminator
60 }
61}
62
63/**
64 * @brief Implementation of \ref ShouldWriteDataOutput().
65 * @details Full API contract (arguments, ownership, side effects) is documented with
66 * the header declaration in `include/io.h`.
67 * @see ShouldWriteDataOutput()
68 */
69
70PetscBool ShouldWriteDataOutput(const SimCtx *simCtx, PetscInt completed_step)
71{
72 if (!simCtx) {
73 return PETSC_FALSE;
74 }
75 return (PetscBool)(simCtx->tiout > 0 && completed_step > 0 && completed_step % simCtx->tiout == 0);
76}
77
78
79#undef __FUNCT__
80#define __FUNCT__ "ReadGridGenerationInputs"
81/**
82 * @brief Internal helper implementation: `ReadGridGenerationInputs()`.
83 * @details Local to this translation unit.
84 */
85PetscErrorCode ReadGridGenerationInputs(UserCtx *user)
86{
87 PetscErrorCode ierr;
88 SimCtx *simCtx = user->simCtx;
89 PetscInt nblk = simCtx->block_number;
90 PetscInt block_index = user->_this;
91 PetscBool found;
92
93 // Temporary arrays to hold the parsed values for ALL blocks
94 PetscInt *IMs = NULL, *JMs = NULL, *KMs = NULL, *cgrids = NULL;
95 PetscReal *xMins = NULL, *xMaxs = NULL, *rxs = NULL;
96 PetscReal *yMins = NULL, *yMaxs = NULL, *rys = NULL;
97 PetscReal *zMins = NULL, *zMaxs = NULL, *rzs = NULL;
98
99 PetscFunctionBeginUser;
101
102 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Reading generated grid inputs for block %d.\n", simCtx->rank, block_index);
103
104 if (block_index >= nblk) {
105 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Block index %d is out of range for nblk=%d", block_index, nblk);
106 }
107
108 // --- Allocate temporary storage for all array options ---
109 ierr = PetscMalloc4(nblk, &IMs, nblk, &JMs, nblk, &KMs, nblk, &cgrids); CHKERRQ(ierr);
110 ierr = PetscMalloc6(nblk, &xMins, nblk, &xMaxs, nblk, &rxs, nblk, &yMins, nblk, &yMaxs, nblk, &rys); CHKERRQ(ierr);
111 ierr = PetscMalloc3(nblk, &zMins, nblk, &zMaxs, nblk, &rzs); CHKERRQ(ierr);
112
113 // --- Set default values for the temporary arrays ---
114 for (PetscInt i = 0; i < nblk; ++i) {
115 IMs[i] = 10; JMs[i] = 10; KMs[i] = 10; cgrids[i] = 0;
116 xMins[i] = 0.0; xMaxs[i] = 1.0; rxs[i] = 1.0;
117 yMins[i] = 0.0; yMaxs[i] = 1.0; rys[i] = 1.0;
118 zMins[i] = 0.0; zMaxs[i] = 1.0; rzs[i] = 1.0;
119 }
120
121 // --- Parse the array options from the command line / control file ---
122 PetscInt count;
123 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-im", IMs, &count, &found); CHKERRQ(ierr);
124 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-jm", JMs, &count, &found); CHKERRQ(ierr);
125 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-km", KMs, &count, &found); CHKERRQ(ierr);
126 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-xMins", xMins, &count, &found); CHKERRQ(ierr);
127 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-xMaxs", xMaxs, &count, &found); CHKERRQ(ierr);
128 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-rxs", rxs, &count, &found); CHKERRQ(ierr);
129 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-yMins", yMins, &count, &found); CHKERRQ(ierr);
130 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-yMaxs", yMaxs, &count, &found); CHKERRQ(ierr);
131 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-rys", rys, &count, &found); CHKERRQ(ierr);
132 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-zMins", zMins, &count, &found); CHKERRQ(ierr);
133 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-zMaxs", zMaxs, &count, &found); CHKERRQ(ierr);
134 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-rzs", rzs, &count, &found); CHKERRQ(ierr);
135 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-cgrids", cgrids, &count, &found); CHKERRQ(ierr);
136
137 // --- Assign the parsed values to the specific UserCtx struct passed in ---
138 user->IM = IMs[block_index];
139 user->JM = JMs[block_index];
140 user->KM = KMs[block_index];
141 user->Min_X = xMins[block_index];
142 user->Max_X = xMaxs[block_index];
143 user->rx = rxs[block_index];
144 user->Min_Y = yMins[block_index];
145 user->Max_Y = yMaxs[block_index];
146 user->ry = rys[block_index];
147 user->Min_Z = zMins[block_index];
148 user->Max_Z = zMaxs[block_index];
149 user->rz = rzs[block_index];
150 user->cgrid = cgrids[block_index];
151
152 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Block %d grid generation inputs set: IM=%d, JM=%d, KM=%d\n",
153 simCtx->rank, block_index, user->IM, user->JM, user->KM);
154 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Block %d bounds: X=[%.2f, %.2f], Y=[%.2f, %.2f], Z=[%.2f, %.2f]\n",
155 simCtx->rank, block_index, user->Min_X, user->Max_X, user->Min_Y, user->Max_Y, user->Min_Z, user->Max_Z);
156
157 // --- Clean up temporary storage ---
158 ierr = PetscFree4(IMs, JMs, KMs, cgrids); CHKERRQ(ierr);
159 ierr = PetscFree6(xMins, xMaxs, rxs, yMins, yMaxs, rys); CHKERRQ(ierr);
160 ierr = PetscFree3(zMins, zMaxs, rzs); CHKERRQ(ierr);
161
163 PetscFunctionReturn(0);
164}
165
166/**
167 * @brief Internal helper implementation: `PopulateFinestUserGridResolutionFromOptions()`.
168 * @details Local to this translation unit.
169 */
170PetscErrorCode PopulateFinestUserGridResolutionFromOptions(UserCtx *finest_users, PetscInt nblk)
171{
172 PetscErrorCode ierr;
173 PetscBool found;
174 PetscInt *IMs = NULL, *JMs = NULL, *KMs = NULL;
175 SimCtx *simCtx = NULL;
176
177 PetscFunctionBeginUser;
178
179 if (!finest_users) {
180 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "finest_users cannot be NULL.");
181 }
182 if (nblk <= 0) {
183 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "nblk must be positive. Got %d.", nblk);
184 }
185 simCtx = finest_users[0].simCtx;
186
187 ierr = PetscMalloc3(nblk, &IMs, nblk, &JMs, nblk, &KMs); CHKERRQ(ierr);
188 for (PetscInt i = 0; i < nblk; ++i) {
189 IMs[i] = 10; JMs[i] = 10; KMs[i] = 10;
190 }
191
192 PetscInt count;
193 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-im", IMs, &count, &found); CHKERRQ(ierr);
194 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-jm", JMs, &count, &found); CHKERRQ(ierr);
195 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-km", KMs, &count, &found); CHKERRQ(ierr);
196
197 for (PetscInt bi = 0; bi < nblk; ++bi) {
198 finest_users[bi].IM = IMs[bi];
199 finest_users[bi].JM = JMs[bi];
200 finest_users[bi].KM = KMs[bi];
201 if (simCtx) {
203 "Rank %d: Preloaded analytical grid resolution for block %d: IM=%d, JM=%d, KM=%d\n",
204 simCtx->rank, bi, finest_users[bi].IM, finest_users[bi].JM, finest_users[bi].KM);
205 }
206 }
207
208 ierr = PetscFree3(IMs, JMs, KMs); CHKERRQ(ierr);
209 PetscFunctionReturn(0);
210}
211
212
213#undef __FUNCT__
214#define __FUNCT__ "ReadGridFile"
215/**
216 * @brief Internal helper implementation: `ReadGridFile()`.
217 * @details Local to this translation unit.
218 */
219PetscErrorCode ReadGridFile(UserCtx *user)
220{
221 PetscErrorCode ierr;
222 SimCtx *simCtx = user->simCtx;
223 PetscInt block_index = user->_this;
224
225 PetscFunctionBeginUser;
227
228 // --- One-Time Read and Broadcast Logic ---
230 LOG_ALLOW_SYNC(GLOBAL, LOG_INFO, "First call to ReadGridFile. Reading and broadcasting grid file header from '%s'...\n", simCtx->grid_file);
231 PetscMPIInt rank = simCtx->rank;
232 PetscInt nblk = simCtx->block_number;
233
234 if (rank == 0) {
235 FILE *fd = fopen(simCtx->grid_file, "r");
236 if (!fd) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_OPEN, "Cannot open file: %s", simCtx->grid_file);
237
238 // Read and validate the canonical PICGRID header.
239 char firstTok[32] = {0};
240 if (fscanf(fd, "%31s", firstTok) != 1)
241 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_READ, "Empty grid file: %s", simCtx->grid_file);
242 if (strcmp(firstTok, "PICGRID") != 0)
243 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_READ,
244 "Grid file %s must begin with the canonical PICGRID header.", simCtx->grid_file);
245 if (fscanf(fd, "%d", &g_nblk_from_file) != 1)
246 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_READ, "Expected number of blocks after \"PICGRID\" in %s", simCtx->grid_file);
247 if (g_nblk_from_file != nblk) {
248 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Mismatch: -nblk is %d but grid file specifies %d blocks.", nblk, g_nblk_from_file);
249 }
250
251 ierr = PetscMalloc3(nblk, &g_IMs_from_file, nblk, &g_JMs_from_file, nblk, &g_KMs_from_file); CHKERRQ(ierr);
252 for (PetscInt i = 0; i < nblk; ++i) {
253 if (fscanf(fd, "%d %d %d\n", &g_IMs_from_file[i], &g_JMs_from_file[i], &g_KMs_from_file[i]) != 3) {
254 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_READ, "Expected 3 integers for block %d in %s", i, simCtx->grid_file);
255 }
256 }
257 fclose(fd);
258 }
259
260 // Broadcast nblk to verify (optional, good practice)
261 ierr = MPI_Bcast(&g_nblk_from_file, 1, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
262
263 // Allocate on other ranks before receiving the broadcast
264 if (rank != 0) {
265 ierr = PetscMalloc3(nblk, &g_IMs_from_file, nblk, &g_JMs_from_file, nblk, &g_KMs_from_file); CHKERRQ(ierr);
266 }
267
268 // Broadcast the data arrays
269 ierr = MPI_Bcast(g_IMs_from_file, nblk, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
270 ierr = MPI_Bcast(g_JMs_from_file, nblk, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
271 ierr = MPI_Bcast(g_KMs_from_file, nblk, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
272
273 g_file_has_been_read = PETSC_TRUE;
274 LOG_ALLOW(GLOBAL, LOG_INFO, "Grid file header read and broadcast complete.\n");
275 }
276
277 // --- Per-Block Assignment Logic (runs on every call) ---
278 user->IM = g_IMs_from_file[block_index];
279 user->JM = g_JMs_from_file[block_index];
280 user->KM = g_KMs_from_file[block_index];
281
282 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Set file inputs for Block %d: IM=%d, JM=%d, KM=%d\n",
283 simCtx->rank, block_index, user->IM, user->JM, user->KM);
284
286 PetscFunctionReturn(0);
287}
288
289
290//================================================================================
291//
292// PRIVATE HELPER FUNCTIONS
293//
294//================================================================================
295
296/**
297 * @brief Implementation of \ref FreeBC_ParamList().
298 * @details Full API contract (arguments, ownership, side effects) is documented with
299 * the header declaration in `include/io.h`.
300 * @see FreeBC_ParamList()
301 */
303 BC_Param *current = head;
304 while (current != NULL) {
305 BC_Param *next = current->next;
306 PetscFree(current->key);
307 PetscFree(current->value);
308 PetscFree(current);
309 current = next;
310 }
311}
312
313/**
314 * @brief Internal helper implementation: `StringToBCFace()`.
315 * @details Local to this translation unit.
316 */
317PetscErrorCode StringToBCFace(const char* str, BCFace* face_out) {
318 if (strcasecmp(str, "-Xi") == 0) *face_out = BC_FACE_NEG_X;
319 else if (strcasecmp(str, "+Xi") == 0) *face_out = BC_FACE_POS_X;
320 else if (strcasecmp(str, "-Eta") == 0) *face_out = BC_FACE_NEG_Y;
321 else if (strcasecmp(str, "+Eta") == 0) *face_out = BC_FACE_POS_Y;
322 else if (strcasecmp(str, "-Zeta") == 0) *face_out = BC_FACE_NEG_Z;
323 else if (strcasecmp(str, "+Zeta") == 0) *face_out = BC_FACE_POS_Z;
324 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown face specifier: %s", str);
325 return 0;
326}
327
328/**
329 * @brief Internal helper implementation: `StringToBCType()`.
330 * @details Local to this translation unit.
331 */
332PetscErrorCode StringToBCType(const char* str, BCType* type_out) {
333 if (strcasecmp(str, "WALL") == 0) *type_out = WALL;
334 else if (strcasecmp(str, "SYMMETRY") == 0) *type_out = SYMMETRY;
335 else if (strcasecmp(str, "INLET") == 0) *type_out = INLET;
336 else if (strcasecmp(str, "OUTLET") == 0) *type_out = OUTLET;
337 else if (strcasecmp(str, "PERIODIC") == 0) *type_out = PERIODIC;
338 // ... add other BCTypes here ...
339 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown BC Type string: %s", str);
340 return 0;
341}
342
343/**
344 * @brief Internal helper implementation: `StringToBCHandlerType()`.
345 * @details Local to this translation unit.
346 */
347PetscErrorCode StringToBCHandlerType(const char* str, BCHandlerType* handler_out) {
348 if (strcasecmp(str, "noslip") == 0) *handler_out = BC_HANDLER_WALL_NOSLIP;
349 else if (strcasecmp(str, "constant_velocity") == 0) *handler_out = BC_HANDLER_INLET_CONSTANT_VELOCITY;
350 else if (strcasecmp(str, "conservation") == 0) *handler_out = BC_HANDLER_OUTLET_CONSERVATION;
351 else if (strcasecmp(str, "parabolic") == 0) *handler_out = BC_HANDLER_INLET_PARABOLIC;
352 else if (strcasecmp(str,"geometric") == 0) *handler_out = BC_HANDLER_PERIODIC_GEOMETRIC;
353 else if (strcasecmp(str,"constant_flux") == 0) *handler_out = BC_HANDLER_PERIODIC_DRIVEN_CONSTANT_FLUX;
354 else if (strcasecmp(str,"initial_flux") == 0) *handler_out = BC_HANDLER_PERIODIC_DRIVEN_INITIAL_FLUX;
355 // ... add other BCHandlerTypes here ...
356 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown BC Handler string: %s", str);
357 return 0;
358}
359
360/**
361 * @brief Internal helper implementation: `ValidateBCHandlerForBCType()`.
362 * @details Local to this translation unit.
363 */
364PetscErrorCode ValidateBCHandlerForBCType(BCType type, BCHandlerType handler) {
365 switch (type) {
366 case OUTLET:
367 if(handler != BC_HANDLER_OUTLET_CONSERVATION) return PETSC_ERR_ARG_WRONG;
368 break;
369 case WALL:
370 if (handler != BC_HANDLER_WALL_NOSLIP && handler != BC_HANDLER_WALL_MOVING) return PETSC_ERR_ARG_WRONG;
371 break;
372 case INLET:
373 if (handler != BC_HANDLER_INLET_CONSTANT_VELOCITY && handler != BC_HANDLER_INLET_PARABOLIC) return PETSC_ERR_ARG_WRONG;
374 break;
375 case PERIODIC:
376 if(handler != BC_HANDLER_PERIODIC_GEOMETRIC && handler != BC_HANDLER_PERIODIC_DRIVEN_CONSTANT_FLUX && handler != BC_HANDLER_PERIODIC_DRIVEN_INITIAL_FLUX) return PETSC_ERR_ARG_WRONG;
377 // ... add other validation cases here ...
378 default: break;
379 }
380 return 0; // Combination is valid
381}
382
383/**
384 * @brief Internal helper implementation: `GetBCParamReal()`.
385 * @details Local to this translation unit.
386 */
387PetscErrorCode GetBCParamReal(BC_Param *params, const char *key, PetscReal *value_out, PetscBool *found) {
388 *found = PETSC_FALSE;
389 *value_out = 0.0;
390 if (!key) return 0; // No key to search for
391
392 BC_Param *current = params;
393 while (current) {
394 if (strcasecmp(current->key, key) == 0) {
395 *value_out = atof(current->value);
396 *found = PETSC_TRUE;
397 return 0; // Found it, we're done
398 }
399 current = current->next;
400 }
401 return 0; // It's not an error to not find the key.
402}
403
404/**
405 * @brief Internal helper implementation: `GetBCParamBool()`.
406 * @details Local to this translation unit.
407 */
408PetscErrorCode GetBCParamBool(BC_Param *params, const char *key, PetscBool *value_out, PetscBool *found) {
409 *found = PETSC_FALSE;
410 *value_out = PETSC_FALSE;
411 if (!key) return 0; // No key to search for
412
413 BC_Param *current = params;
414 while (current) {
415 if (strcasecmp(current->key, key) == 0) {
416 // Key was found.
417 *found = PETSC_TRUE;
418
419 // Check the value string. Default to FALSE if the value is NULL or doesn't match a "true" string.
420 if (current->value &&
421 (strcasecmp(current->value, "true") == 0 ||
422 strcmp(current->value, "1") == 0 ||
423 strcasecmp(current->value, "yes") == 0))
424 {
425 *value_out = PETSC_TRUE;
426 } else {
427 *value_out = PETSC_FALSE;
428 }
429 return 0; // Found it, we're done
430 }
431 current = current->next;
432 }
433 return 0; // It's not an error to not find the key.
434}
435
436//================================================================================
437//
438// PUBLIC PARSING FUNCTION
439//
440//================================================================================
441#undef __FUNCT__
442#define __FUNCT__ "ParseAllBoundaryConditions"
443/**
444 * @brief Internal helper implementation: `ParseAllBoundaryConditions()`.
445 * @details Local to this translation unit.
446 */
447PetscErrorCode ParseAllBoundaryConditions(UserCtx *user, const char *bcs_input_filename)
448{
449 PetscErrorCode ierr;
450 PetscMPIInt rank;
451
452 // Temporary storage for rank 0 to build the configuration before broadcasting.
453 BoundaryFaceConfig configs_rank0[6];
454
455 PetscFunctionBeginUser;
457 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
458
459 if (rank == 0) {
460 FILE *file;
461 char line_buffer[1024];
462
463 // Initialize the temporary config array with safe defaults on rank 0.
464 for (int i = 0; i < 6; i++) {
465 configs_rank0[i].face_id = (BCFace)i;
466 configs_rank0[i].mathematical_type = WALL;
467 configs_rank0[i].handler_type = BC_HANDLER_WALL_NOSLIP;
468 configs_rank0[i].params = NULL;
469 configs_rank0[i].handler = NULL; // Handler object is not created here.
470 }
471
472 LOG_ALLOW(GLOBAL, LOG_INFO, "Parsing BC configuration from '%s' on rank 0... \n", bcs_input_filename);
473 file = fopen(bcs_input_filename, "r");
474 if (!file) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_OPEN, "Could not open BCs file '%s'.", bcs_input_filename);
475
476 while (fgets(line_buffer, sizeof(line_buffer), file)) {
477 char *current_pos = line_buffer;
478 while (isspace((unsigned char)*current_pos)) current_pos++; // Skip leading whitespace
479 if (*current_pos == '#' || *current_pos == '\0' || *current_pos == '\n' || *current_pos == '\r') continue;
480
481 char *face_str = strtok(current_pos, " \t\n\r");
482 char *type_str = strtok(NULL, " \t\n\r");
483 char *handler_str = strtok(NULL, " \t\n\r");
484
485 if (!face_str || !type_str || !handler_str) {
486 LOG_ALLOW(GLOBAL, LOG_WARNING, "Malformed line in bcs.dat, skipping: %s \n", line_buffer);
487 continue;
488 }
489
490 BCFace face_enum;
491 BCType type_enum;
492 BCHandlerType handler_enum;
493 const char* handler_name_for_log;
494
495 // --- Convert strings to enums and validate ---
496 ierr = StringToBCFace(face_str, &face_enum); CHKERRQ(ierr);
497 ierr = StringToBCType(type_str, &type_enum); CHKERRQ(ierr);
498 ierr = StringToBCHandlerType(handler_str, &handler_enum); CHKERRQ(ierr);
499 ierr = ValidateBCHandlerForBCType(type_enum, handler_enum);
500 if (ierr) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Validation failed: Handler '%s' is not valid for Type '%s' on Face '%s'.\n", handler_str, type_str, face_str);
501
502 // Store the core types for the corresponding face
503 configs_rank0[face_enum].mathematical_type = type_enum;
504 configs_rank0[face_enum].handler_type = handler_enum;
505 handler_name_for_log = BCHandlerTypeToString(handler_enum); // Assumes this utility exists
506 LOG_ALLOW(GLOBAL, LOG_DEBUG, " Parsed Face '%s': Type=%s, Handler=%s \n", face_str, type_str, handler_name_for_log);
507
508 // --- Parse optional key=value parameters for this face ---
509 FreeBC_ParamList(configs_rank0[face_enum].params); // Clear any previous (default) params
510 configs_rank0[face_enum].params = NULL;
511 BC_Param **param_next_ptr = &configs_rank0[face_enum].params; // Pointer to the 'next' pointer to build the list
512
513 char* token;
514 while ((token = strtok(NULL, " \t\n\r")) != NULL) {
515 char* equals_ptr = strchr(token, '=');
516 if (!equals_ptr) {
517 LOG_ALLOW(GLOBAL, LOG_WARNING, "Malformed parameter '%s' on face '%s', skipping. \n", token, face_str);
518 continue;
519 }
520
521 *equals_ptr = '\0'; // Temporarily split the string at '=' to separate key and value
522 char* key_str = token;
523 char* value_str = equals_ptr + 1;
524
525 BC_Param *new_param;
526 ierr = PetscMalloc1(1, &new_param); CHKERRQ(ierr);
527 ierr = PetscStrallocpy(key_str, &new_param->key); CHKERRQ(ierr);
528 ierr = PetscStrallocpy(value_str, &new_param->value); CHKERRQ(ierr);
529 new_param->next = NULL;
530
531 *param_next_ptr = new_param;
532 param_next_ptr = &new_param->next;
533 LOG_ALLOW(GLOBAL, LOG_TRACE, " - Found param: [%s] = [%s] \n", new_param->key, new_param->value);
534 }
535 }
536 fclose(file);
537 }
538
539 // =========================================================================
540 // BROADCASTING THE CONFIGURATION FROM RANK 0
541 // =========================================================================
542 // This is a critical step to ensure all processes have the same configuration.
543
544 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "Rank %d broadcasting/receiving BC configuration.\n", rank);
545
546 for (int i = 0; i < 6; i++) {
547 // --- Broadcast simple enums ---
548 if (rank == 0) {
549 user->boundary_faces[i] = configs_rank0[i]; // Rank 0 populates its final struct
550 }
551 ierr = MPI_Bcast(&user->boundary_faces[i].mathematical_type, 1, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
552 ierr = MPI_Bcast(&user->boundary_faces[i].handler_type, 1, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
553
554 // --- Serialize and Broadcast the parameter linked list ---
555 PetscInt n_params = 0;
556 if (rank == 0) { // On rank 0, count the number of parameters to send
557 for (BC_Param *p = user->boundary_faces[i].params; p; p = p->next) n_params++;
558 }
559 ierr = MPI_Bcast(&n_params, 1, MPI_INT, 0, PETSC_COMM_WORLD);CHKERRQ(ierr);
560
561 if (rank != 0) { // Non-root ranks need to receive and build the list
562 FreeBC_ParamList(user->boundary_faces[i].params); // Ensure list is empty before building
563 user->boundary_faces[i].params = NULL;
564 }
565
566 BC_Param **param_next_ptr = &user->boundary_faces[i].params;
567
568 for (int j = 0; j < n_params; j++) {
569 char key_buf[256] = {0}, val_buf[256] = {0};
570 if (rank == 0) {
571 // On rank 0, navigate to the j-th param and copy its data to buffers
572 BC_Param *p = user->boundary_faces[i].params;
573 for (int k = 0; k < j; k++) p = p->next;
574 strncpy(key_buf, p->key, 255);
575 strncpy(val_buf, p->value, 255);
576 }
577
578 ierr = MPI_Bcast(key_buf, 256, MPI_CHAR, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
579 ierr = MPI_Bcast(val_buf, 256, MPI_CHAR, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
580
581 if (rank != 0) {
582 // On non-root ranks, deserialize: create a new node and append it
583 BC_Param *new_param;
584 ierr = PetscMalloc1(1, &new_param); CHKERRQ(ierr);
585 ierr = PetscStrallocpy(key_buf, &new_param->key); CHKERRQ(ierr);
586 ierr = PetscStrallocpy(val_buf, &new_param->value); CHKERRQ(ierr);
587 new_param->next = NULL;
588 *param_next_ptr = new_param;
589 param_next_ptr = &new_param->next;
590 } else {
591 // On rank 0, just advance the pointer for the next iteration
592 param_next_ptr = &((*param_next_ptr)->next);
593 }
594 }
595 user->boundary_faces[i].face_id = (BCFace)i; // Ensure face_id is set on all ranks
596 }
597
598 // --- Set particle inlet lookup fields used by the particle system ---
599 user->inletFaceDefined = PETSC_FALSE;
600 for (int i=0; i<6; i++) {
601
602 if (user->boundary_faces[i].mathematical_type == INLET && !user->inletFaceDefined) {
603 user->inletFaceDefined = PETSC_TRUE;
604 user->identifiedInletBCFace = (BCFace)i;
605 LOG_ALLOW(GLOBAL, LOG_INFO, "Inlet face for particle initialization identified as Face %d.\n", i);
606 break; // Found the first one, stop looking
607 }
608 }
609
610
611 if (rank == 0) {
612 // Rank 0 can now free the linked lists it created for the temporary storage.
613 // As written, user->boundary_faces was populated directly on rank 0, so no extra free is needed.
614 // for(int i=0; i<6; i++) FreeBC_ParamList(configs_rank0[i].params); // This would be needed if we used configs_rank0 exclusively
615 }
616
618 PetscFunctionReturn(0);
619}
620
621//================================================================================
622//
623// PRIVATE HELPER FUNCTIONS
624//
625//================================================================================
626
627// ... (existing helper functions like FreeBC_ParamList, StringToBCFace, etc.) ...
628#undef __FUNCT__
629#define __FUNCT__ "DeterminePeriodicity"
630/**
631 * @brief Internal helper implementation: `DeterminePeriodicity()`.
632 * @details Local to this translation unit.
633 */
634PetscErrorCode DeterminePeriodicity(SimCtx *simCtx)
635{
636 PetscErrorCode ierr;
637 PetscMPIInt rank;
638 PetscInt periodic_flags[3] = {0, 0, 0}; // Index 0:I, 1:J, 2:K
639
640 PetscFunctionBeginUser;
641 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
642
643 // --- Part 1: Collectively verify all BCS files exist before proceeding ---
644 for (PetscInt bi = 0; bi < simCtx->block_number; bi++) {
645 const char *bcs_filename = simCtx->bcs_files[bi];
646 if (!bcs_filename) SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_NULL, "BCS filename for block %d is not set in SimCtx.", bi);
647 char desc_buf[256];
648 PetscBool file_exists;
649 snprintf(desc_buf, sizeof(desc_buf), "BCS file for block %d", bi);
650 ierr = VerifyPathExistence(bcs_filename, PETSC_FALSE, PETSC_FALSE, desc_buf, &file_exists); CHKERRQ(ierr);
651 }
652
653 // --- Part 2: Rank 0 does the parsing, since we know all files exist ---
654 if (rank == 0) {
655 PetscBool global_is_periodic[3] = {PETSC_FALSE, PETSC_FALSE, PETSC_FALSE};
656 PetscBool is_set = PETSC_FALSE;
657
658 for (PetscInt bi = 0; bi < simCtx->block_number; bi++) {
659 const char *bcs_filename = simCtx->bcs_files[bi];
660 FILE *file = fopen(bcs_filename, "r");
661
662 PetscBool face_is_periodic[6] = {PETSC_FALSE};
663 char line_buffer[1024];
664
665 while (fgets(line_buffer, sizeof(line_buffer), file)) {
666 char *current_pos = line_buffer;
667 while (isspace((unsigned char)*current_pos)) current_pos++;
668 if (*current_pos == '#' || *current_pos == '\0' || *current_pos == '\n') continue;
669
670 // --- Tokenize the line exactly like the main parser ---
671 char *face_str = strtok(current_pos, " \t\n\r");
672 char *type_str = strtok(NULL, " \t\n\r");
673
674 // If the line doesn't have at least two tokens, we can't determine the type.
675 if (!face_str || !type_str) continue;
676
677 // --- Perform a direct, non-erroring check on the mathematical type string ---
678 if (strcasecmp(type_str, "PERIODIC") == 0) {
679 BCFace face_enum;
680 // A malformed face string on a periodic line IS a fatal error.
681 ierr = StringToBCFace(face_str, &face_enum); CHKERRQ(ierr);
682 face_is_periodic[face_enum] = PETSC_TRUE;
683 }
684 // Any other type_str (e.g., "WALL", "INLET") is correctly and silently ignored.
685 }
686 fclose(file);
687
688 // --- Validate consistency within this file ---
689 if (face_is_periodic[BC_FACE_NEG_X] != face_is_periodic[BC_FACE_POS_X])
690 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Inconsistent X-periodicity in file '%s' for block %d. Both -Xi and +Xi must be periodic or neither.", bcs_filename, bi);
691 if (face_is_periodic[BC_FACE_NEG_Y] != face_is_periodic[BC_FACE_POS_Y])
692 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Inconsistent Y-periodicity in file '%s' for block %d. Both -Eta and +Eta must be periodic or neither.", bcs_filename, bi);
693 if (face_is_periodic[BC_FACE_NEG_Z] != face_is_periodic[BC_FACE_POS_Z])
694 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Inconsistent Z-periodicity in file '%s' for block %d. Both -Zeta and +Zeta must be periodic or neither.", bcs_filename, bi);
695
696 PetscBool local_is_periodic[3] = {face_is_periodic[BC_FACE_NEG_X], face_is_periodic[BC_FACE_NEG_Y], face_is_periodic[BC_FACE_NEG_Z]};
697
698 // --- Validate consistency across block files ---
699 if (!is_set) {
700 global_is_periodic[0] = local_is_periodic[0];
701 global_is_periodic[1] = local_is_periodic[1];
702 global_is_periodic[2] = local_is_periodic[2];
703 is_set = PETSC_TRUE;
704 } else {
705 if (global_is_periodic[0] != local_is_periodic[0] || global_is_periodic[1] != local_is_periodic[1] || global_is_periodic[2] != local_is_periodic[2]) {
706 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP,
707 "Periodicity mismatch between blocks. Block 0 requires (I:%d, J:%d, K:%d), but block %d (file '%s') has (I:%d, J:%d, K:%d).",
708 (int)global_is_periodic[0], (int)global_is_periodic[1], (int)global_is_periodic[2],
709 bi, bcs_filename,
710 (int)local_is_periodic[0], (int)local_is_periodic[1], (int)local_is_periodic[2]);
711 }
712 }
713 } // end loop over blocks
714
715 periodic_flags[0] = (global_is_periodic[0]) ? 1 : 0;
716 periodic_flags[1] = (global_is_periodic[1]) ? 1 : 0;
717 periodic_flags[2] = (global_is_periodic[2]) ? 1 : 0;
718
719 LOG_ALLOW(GLOBAL, LOG_INFO, "Global periodicity determined: I-periodic=%d, J-periodic=%d, K-periodic=%d\n",
720 periodic_flags[0], periodic_flags[1], periodic_flags[2]);
721 }
722
723 // --- Part 3: Broadcast the final flags from rank 0 to all other ranks ---
724 ierr = MPI_Bcast(periodic_flags, 3, MPIU_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
725
726 // --- All ranks now update their SimCtx ---
727 simCtx->i_periodic = periodic_flags[0];
728 simCtx->j_periodic = periodic_flags[1];
729 simCtx->k_periodic = periodic_flags[2];
730
731 PetscFunctionReturn(0);
732}
733
734/**
735 * @brief Internal helper implementation: `VerifyPathExistence()`.
736 * @details Local to this translation unit.
737 */
738PetscErrorCode VerifyPathExistence(const char *path, PetscBool is_dir, PetscBool is_optional, const char *description, PetscBool *exists)
739{
740 PetscErrorCode ierr;
741 PetscMPIInt rank;
742 MPI_Comm comm = PETSC_COMM_WORLD;
743
744 PetscFunctionBeginUser;
745 ierr = MPI_Comm_rank(comm, &rank); CHKERRQ(ierr);
746
747 if (rank == 0) {
748 if (is_dir) {
749 ierr = PetscTestDirectory(path, 'r', exists); CHKERRQ(ierr);
750 } else {
751 ierr = PetscTestFile(path, 'r', exists); CHKERRQ(ierr);
752 }
753
754 if (!(*exists)) {
755 if (is_optional) {
756 LOG_ALLOW(GLOBAL, LOG_WARNING, "Optional %s not found at: %s (using defaults/ignoring).\n", description, path);
757 } else {
758 LOG_ALLOW(GLOBAL, LOG_ERROR, "Mandatory %s not found at: %s\n", description, path);
759 }
760 } else {
761 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Found %s: %s\n", description, path);
762 }
763 }
764
765 // Broadcast the result from Rank 0
766 PetscMPIInt exists_int = (rank == 0) ? (PetscMPIInt)(*exists) : 0;
767 ierr = MPI_Bcast(&exists_int, 1, MPI_INT, 0, comm); CHKERRMPI(ierr);
768 *exists = (PetscBool)exists_int;
769
770 // Collective error for mandatory files
771 if (!(*exists) && !is_optional) {
772 SETERRQ(comm, PETSC_ERR_FILE_OPEN, "Mandatory %s not found. Rank 0 expected it at '%s'. Check path and permissions.", description, path);
773 }
774
775 PetscFunctionReturn(0);
776}
777
778/**
779 * @brief Internal helper implementation: `CheckDataFile()`.
780 * @details Local to this translation unit.
781 */
782static PetscErrorCode CheckDataFile(UserCtx *user, PetscInt ti, const char *fieldName, const char *ext, PetscBool *fileExists)
783{
784 PetscErrorCode ierr;
785 PetscMPIInt rank;
786 MPI_Comm comm = PETSC_COMM_WORLD;
787 PetscInt placeholder_int = 0;
788 SimCtx *simCtx = user->simCtx;
789
790 PetscFunctionBeginUser;
791
792 if(!simCtx->current_io_directory) {
793 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_NULL, "I/O context directory is NULL. Ensure it is set before calling CheckDataFile().");
794 }
795
796 ierr = MPI_Comm_rank(comm, &rank); CHKERRQ(ierr);
797
798 if (rank == 0) {
799 char filename[PETSC_MAX_PATH_LEN];
800 // Use the same standardized, rank-independent filename format
801
802 ierr = PetscSNPrintf(filename, sizeof(filename), "%s/%s%05"PetscInt_FMT"_%d.%s",simCtx->current_io_directory,fieldName, ti, placeholder_int, ext);
803 ierr = PetscTestFile(filename, 'r', fileExists); CHKERRQ(ierr);
804 if (!(*fileExists)) {
805 LOG_ALLOW(GLOBAL, LOG_WARNING, "(Rank 0) - Optional data file '%s' not found.\n", filename);
806 }
807 }
808
809 // Broadcast the result from Rank 0 to all other ranks.
810 // We cast the PetscBool to a PetscMPIInt for MPI_Bcast.
811 PetscMPIInt fileExists_int = (rank == 0) ? (PetscMPIInt)(*fileExists) : 0;
812 ierr = MPI_Bcast(&fileExists_int, 1, MPI_INT, 0, comm); CHKERRMPI(ierr);
813 *fileExists = (PetscBool)fileExists_int;
814
815 PetscFunctionReturn(0);
816}
817
818/**
819 * @brief Internal helper implementation: `ReadOptionalField()`.
820 * @details Local to this translation unit.
821 */
822static PetscErrorCode ReadOptionalField(UserCtx *user, const char *field_name, const char *field_label, Vec field_vec, PetscInt ti, const char *ext)
823{
824 PetscErrorCode ierr;
825 PetscBool fileExists;
826
827 PetscFunctionBeginUser;
828
829 /* Check if the data file for this optional field exists. */
830 ierr = CheckDataFile(user,ti, field_name, ext, &fileExists); CHKERRQ(ierr);
831
832 if (fileExists) {
833 /* File exists, so we MUST be able to read it. */
834 LOG_ALLOW(GLOBAL, LOG_DEBUG, "File for %s found, attempting to read...\n", field_label);
835 ierr = ReadFieldData(user, field_name, field_vec, ti, ext);
836
837 if (ierr) {
838 /* Any error here is fatal. A PETSC_ERR_FILE_OPEN would mean a race condition or
839 permissions issue. Other errors could be size mismatch or corruption. */
840 SETERRQ(PETSC_COMM_WORLD, ierr, "Failed to read data for %s from existing file for step %d. The file may be corrupt or have an incorrect size.", field_label, ti);
841 } else {
842 LOG_ALLOW(GLOBAL, LOG_INFO, "Successfully read %s field for step %d.\n", field_label, ti);
843 }
844 } else {
845 /* File does not exist, which is acceptable for an optional field. */
846 LOG_ALLOW(GLOBAL, LOG_WARNING, "Optional %s file for step %d not found. Skipping.\n", field_label, ti);
847 }
848
849 PetscFunctionReturn(0);
850}
851
852/**
853 * @brief Internal helper implementation: `ReadOptionalSwarmField()`.
854 * @details Local to this translation unit.
855 */
856static PetscErrorCode ReadOptionalSwarmField(UserCtx *user, const char *field_name, const char *field_label, PetscInt ti, const char *ext)
857{
858 PetscErrorCode ierr;
859 PetscBool fileExists;
860
861 PetscFunctionBeginUser;
862
863 /* Check if the data file for this optional field exists. */
864 ierr = CheckDataFile(user,ti, field_name, ext, &fileExists); CHKERRQ(ierr);
865
866 if (fileExists) {
867 /* File exists, so we MUST be able to read it. */
868 LOG_ALLOW(GLOBAL, LOG_DEBUG, "File for %s found, attempting to read...\n", field_label);
869 if(strcasecmp(field_name,"DMSwarm_CellID") == 0 || strcasecmp(field_name,"DMSwarm_pid")== 0 || strcasecmp(field_name,"DMSwarm_location_status")== 0 ) {
870 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Reading integer swarm field '%s'.\n", field_name);
871 ierr = ReadSwarmIntField(user,field_name,ti,ext);
872 }
873 else{
874 ierr = ReadSwarmField(user, field_name, ti, ext);
875 }
876
877 if (ierr) {
878 /* Any error here is fatal. A PETSC_ERR_FILE_OPEN would mean a race condition or
879 permissions issue. Other errors could be size mismatch or corruption. */
880 SETERRQ(PETSC_COMM_WORLD, ierr, "Failed to read data for %s from existing file for step %d. The file may be corrupt or have an incorrect size.", field_label, ti);
881 } else {
882 LOG_ALLOW(GLOBAL, LOG_INFO, "Successfully read %s field for step %d.\n", field_label, ti);
883 }
884 } else {
885 /* File does not exist, which is acceptable for an optional field. */
886 LOG_ALLOW(GLOBAL, LOG_WARNING, "Optional %s file for step %d not found. Skipping.\n", field_label, ti);
887 }
888
889 PetscFunctionReturn(0);
890}
891
892#undef __FUNCT__
893#define __FUNCT__ "ReadFieldData"
894/**
895 * @brief Internal helper implementation: `ReadFieldData()`.
896 * @details Local to this translation unit.
897 */
898PetscErrorCode ReadFieldData(UserCtx *user,
899 const char *field_name,
900 Vec field_vec,
901 PetscInt ti,
902 const char *ext)
903{
904 PetscErrorCode ierr;
905 char filename[PETSC_MAX_PATH_LEN];
906 MPI_Comm comm;
907 PetscMPIInt rank,size;
908 SimCtx *simCtx = user->simCtx;
909
910
911 PetscFunctionBeginUser;
913
914 if(!simCtx->current_io_directory){
915 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "I/O context directory was not set before calling ReadFieldData().");
916 }
917
918
919 ierr = PetscObjectGetComm((PetscObject)field_vec,&comm);CHKERRQ(ierr);
920 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
921 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
922
923 const char *source_path = NULL;
924 source_path = simCtx->current_io_directory;
925
926 if(!source_path){
927 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "source_path was not set for the current execution mode.");
928 }
929 /* ---------------------------------------------------------------------
930 * Compose <path-to-file>/<field_name><step with 5 digits>_0.<ext>
931 * (all restart files are written by rank-0 with that naming scheme).
932 * ------------------------------------------------------------------ */
933 ierr = PetscSNPrintf(filename,sizeof(filename),
934 "%s/%s%05" PetscInt_FMT "_0.%s",
935 source_path,field_name,ti,ext);CHKERRQ(ierr);
936
938 "Attempting to read <%s> on rank %d/%d\n",
939 filename,(int)rank,(int)size);
940
941 /* ======================================================================
942 * 1. SERIAL JOB – just hand the Vec to VecLoad()
943 * ==================================================================== */
944 if(size==1)
945 {
946 PetscViewer viewer;
947 PetscBool found;
948 Vec temp_vec;
949 PetscInt expectedSize,loadedSize;
950
951 ierr = PetscTestFile(filename,'r',&found);CHKERRQ(ierr);
952 if(!found) SETERRQ(comm,PETSC_ERR_FILE_OPEN,
953 "Restart/Source file not found: %s",filename);
954
955 ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,filename,FILE_MODE_READ,&viewer);CHKERRQ(ierr);
956// ---- START MODIFICATION ----
957 // DO NOT load directly into field_vec, as this can resize it, which is
958 // illegal for DMSwarm "view" vectors. Instead, load into a temporary vector.
959 ierr = VecCreate(PETSC_COMM_SELF, &temp_vec); CHKERRQ(ierr);
960 ierr = VecLoad(temp_vec,viewer);CHKERRQ(ierr);
961 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
962
963 // Sanity check: ensure the file size matches the expected vector size.
964 ierr = VecGetSize(field_vec, &expectedSize);CHKERRQ(ierr);
965 ierr = VecGetSize(temp_vec, &loadedSize);CHKERRQ(ierr);
966 if (loadedSize != expectedSize) {
967 SETERRQ(comm,PETSC_ERR_FILE_UNEXPECTED,
968 "File %s holds %d entries – expected %d for field '%s'",
969 filename, loadedSize, expectedSize, field_name);
970 }
971
972 // Now, safely copy the data from the temporary vector to the final destination.
973 ierr = VecCopy(temp_vec, field_vec);CHKERRQ(ierr);
974
975 // Clean up the temporary vector.
976 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
977
978 // ---- END MODIFICATION ----
979
980 /* create EMPTY sequential Vec – VecLoad() will size it correctly */
981 /*
982 ierr = VecCreate(PETSC_COMM_SELF,&seq_vec);CHKERRQ(ierr);
983 ierr = VecSetType(seq_vec,VECSEQ);CHKERRQ(ierr);
984
985 ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,filename,
986 FILE_MODE_READ,&viewer);CHKERRQ(ierr);
987
988 ierr = VecLoad(field_vec,viewer);CHKERRQ(ierr);
989 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
990 */
992 "Loaded <%s> (serial path)\n",filename);
993
995 PetscFunctionReturn(0);
996 }
997
998 /* ======================================================================
999 * 2. PARALLEL JOB
1000 * ==================================================================== */
1001 PetscInt globalSize;
1002 ierr = VecGetSize(field_vec,&globalSize);CHKERRQ(ierr);
1003
1004 DM dm = NULL;
1005 const char *dmtype = NULL;
1006 Vec nat = NULL; /* Natural-ordered vector for DMDA */
1007
1008 /* -------------------- rank-0 : read the sequential file -------------- */
1009 Vec seq_vec = NULL; /* only valid on rank-0 */
1010 const PetscScalar *seqArray = NULL; /* borrowed pointer on rank-0 only */
1011
1012 if(rank==0)
1013 {
1014 PetscViewer viewer;
1015 PetscBool found;
1016
1017 ierr = PetscTestFile(filename,'r',&found);CHKERRQ(ierr);
1018 if(!found) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_OPEN,
1019 "Restart file not found: %s",filename);
1020
1021 /* create EMPTY sequential Vec – VecLoad() will size it correctly */
1022 ierr = VecCreate(PETSC_COMM_SELF,&seq_vec);CHKERRQ(ierr);
1023 ierr = VecSetType(seq_vec,VECSEQ);CHKERRQ(ierr);
1024
1025 ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,filename,
1026 FILE_MODE_READ,&viewer);CHKERRQ(ierr);
1027 ierr = VecLoad(seq_vec,viewer);CHKERRQ(ierr);
1028 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
1029
1030 /* size sanity-check */
1031 PetscInt loaded;
1032 ierr = VecGetSize(seq_vec,&loaded);CHKERRQ(ierr);
1033 if(loaded != globalSize)
1034 SETERRQ(comm,PETSC_ERR_FILE_UNEXPECTED,
1035 "File %s holds %d entries – expected %d",
1036 filename,loaded,globalSize);
1037
1038 /* borrow array for later Bcast */
1039 ierr = VecGetArrayRead(seq_vec,&seqArray);CHKERRQ(ierr);
1040
1042 "Rank 0 successfully loaded <%s>\n",filename);
1043 }
1044
1045 /* -------------------- Check if this is a DMDA vector ----------------- */
1046 ierr = VecGetDM(field_vec, &dm); CHKERRQ(ierr);
1047 if (dm) { ierr = DMGetType(dm, &dmtype); CHKERRQ(ierr); }
1048
1049 if (dmtype && !strcmp(dmtype, DMDA)) {
1050 /* ==================================================================
1051 * DMDA PATH: File is in natural ordering, need to convert to global
1052 * ================================================================== */
1053
1054 /* Create natural vector */
1055 ierr = DMDACreateNaturalVector(dm, &nat); CHKERRQ(ierr);
1056
1057 /* Scatter from rank 0's seq_vec to all ranks' natural vector */
1058 VecScatter scatter;
1059 Vec nat_seq = NULL; /* Sequential natural vector on rank 0 */
1060
1061 ierr = VecScatterCreateToZero(nat, &scatter, &nat_seq); CHKERRQ(ierr);
1062
1063 /* Reverse scatter: from rank 0 to all ranks */
1064 ierr = VecScatterBegin(scatter, (rank == 0 ? seq_vec : nat_seq), nat,
1065 INSERT_VALUES, SCATTER_REVERSE); CHKERRQ(ierr);
1066 ierr = VecScatterEnd(scatter, (rank == 0 ? seq_vec : nat_seq), nat,
1067 INSERT_VALUES, SCATTER_REVERSE); CHKERRQ(ierr);
1068
1069 /* Convert natural → global ordering */
1070 ierr = DMDANaturalToGlobalBegin(dm, nat, INSERT_VALUES, field_vec); CHKERRQ(ierr);
1071 ierr = DMDANaturalToGlobalEnd(dm, nat, INSERT_VALUES, field_vec); CHKERRQ(ierr);
1072
1073 /* Cleanup */
1074 ierr = VecScatterDestroy(&scatter); CHKERRQ(ierr);
1075 ierr = VecDestroy(&nat_seq); CHKERRQ(ierr);
1076 ierr = VecDestroy(&nat); CHKERRQ(ierr);
1077
1078 } else {
1079 /* ==================================================================
1080 * NON-DMDA PATH: Use broadcast and direct copy (assumes global ordering)
1081 * ================================================================== */
1082
1083 PetscScalar *buffer = NULL;
1084 if (rank == 0) {
1085 buffer = (PetscScalar *)seqArray;
1086 } else {
1087 ierr = PetscMalloc1(globalSize, &buffer); CHKERRQ(ierr);
1088 }
1089
1090 ierr = MPI_Bcast(buffer, (int)globalSize, MPIU_SCALAR, 0, comm); CHKERRQ(ierr);
1091
1092 /* Copy slice based on ownership range */
1093 PetscInt rstart, rend, loc;
1094 PetscScalar *locArray;
1095
1096 ierr = VecGetOwnershipRange(field_vec, &rstart, &rend); CHKERRQ(ierr);
1097 loc = rend - rstart;
1098
1099 ierr = VecGetArray(field_vec, &locArray); CHKERRQ(ierr);
1100 ierr = PetscMemcpy(locArray, buffer + rstart, loc * sizeof(PetscScalar)); CHKERRQ(ierr);
1101 ierr = VecRestoreArray(field_vec, &locArray); CHKERRQ(ierr);
1102
1103 if (rank != 0) {
1104 ierr = PetscFree(buffer); CHKERRQ(ierr);
1105 }
1106 }
1107
1108 /* -------------------- tidy up ---------------------------------------- */
1109 if (rank == 0) {
1110 ierr = VecRestoreArrayRead(seq_vec, &seqArray); CHKERRQ(ierr);
1111 ierr = VecDestroy(&seq_vec); CHKERRQ(ierr);
1112 }
1113
1115 "Loaded <%s> (parallel path)\n",filename);
1116
1118 PetscFunctionReturn(0);
1119}
1120
1121
1122/**
1123 * @brief Internal helper implementation: `ReadSimulationFields()`.
1124 * @details Local to this translation unit.
1125 */
1126PetscErrorCode ReadSimulationFields(UserCtx *user,PetscInt ti)
1127{
1128 PetscErrorCode ierr;
1129
1130 SimCtx *simCtx = user->simCtx;
1131 const char *source_path = NULL;
1132 const char *eulerian_ext = "dat";
1133
1134 if(simCtx->exec_mode == EXEC_MODE_POSTPROCESSOR){
1135 source_path = simCtx->pps->source_dir;
1136 if (simCtx->pps->eulerianExt[0] != '\0') {
1137 eulerian_ext = simCtx->pps->eulerianExt;
1138 }
1139 } else if(simCtx->exec_mode == EXEC_MODE_SOLVER){
1140 source_path = simCtx->restart_dir;
1141 } else{
1142 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Invalid execution mode for reading simulation fields.");
1143 }
1144
1145 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to read simulation fields.\n");
1146
1147 // Set the current I/O directory context
1148 ierr = PetscSNPrintf(simCtx->_io_context_buffer, sizeof(simCtx->_io_context_buffer), "%s/%s",source_path,simCtx->euler_subdir); CHKERRQ(ierr);
1149 simCtx->current_io_directory = simCtx->_io_context_buffer;
1150
1151 // Read Cartesian velocity field
1152 ierr = ReadFieldData(user, "ufield", user->Ucat, ti, eulerian_ext); CHKERRQ(ierr);
1153
1154 // Read contravariant velocity field
1155 ierr = ReadFieldData(user, "vfield", user->Ucont, ti, eulerian_ext); CHKERRQ(ierr);
1156
1157 // Read pressure field
1158 ierr = ReadFieldData(user, "pfield", user->P, ti, eulerian_ext); CHKERRQ(ierr);
1159
1160 // Read node state field (nvert)
1161 ierr = ReadFieldData(user, "nvfield", user->Nvert, ti, eulerian_ext); CHKERRQ(ierr);
1162
1163 LOG_ALLOW(GLOBAL,LOG_INFO,"Successfully read all mandatory fields. \n");
1164
1165 if(simCtx->np>0){
1166 // Read Particle Count field
1167 if(!user->ParticleCount){
1168 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "ParticleCount Vec is NULL but np>0");
1169 }
1170 ierr = ReadOptionalField(user, "ParticleCount", "Particle Count", user->ParticleCount, ti, eulerian_ext); CHKERRQ(ierr);
1171
1172 ierr = ReadOptionalField(user, "psifield", "Scalar Psi Field", user->Psi, ti, eulerian_ext); CHKERRQ(ierr);
1173 }
1174 else{
1175 LOG_ALLOW(GLOBAL, LOG_INFO, "No particles in simulation, skipping Particle fields reading.\n");
1176 }
1177 // Process LES fields if enabled
1178 if (simCtx->les) {
1179 ierr = ReadLESFields(user,ti); CHKERRQ(ierr);
1180 }
1181
1182 // Process RANS fields if enabled
1183 if (simCtx->rans) {
1184 ierr = ReadRANSFields(user,ti); CHKERRQ(ierr);
1185 }
1186
1187 // Process statistical fields if averaging is enabled
1188 if (simCtx->averaging) {
1189 ierr = ReadStatisticalFields(user,ti); CHKERRQ(ierr);
1190 }
1191
1192 simCtx->current_io_directory = NULL; // Clear the I/O context after reading
1193
1194 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading simulation fields.\n");
1195
1196 return 0;
1197}
1198
1199/**
1200 * @brief Implementation of \ref ReadStatisticalFields().
1201 * @details Full API contract (arguments, ownership, side effects) is documented with
1202 * the header declaration in `include/io.h`.
1203 * @see ReadStatisticalFields()
1204 */
1205PetscErrorCode ReadStatisticalFields(UserCtx *user,PetscInt ti)
1206{
1207 PetscErrorCode ierr;
1208 const char *eulerian_ext = "dat";
1209 if (user->simCtx->exec_mode == EXEC_MODE_POSTPROCESSOR && user->simCtx->pps->eulerianExt[0] != '\0') {
1210 eulerian_ext = user->simCtx->pps->eulerianExt;
1211 }
1212
1213 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to read statistical fields.\n");
1214
1215 ierr = ReadOptionalField(user, "su0", "Velocity Sum", user->Ucat_sum, ti, eulerian_ext); CHKERRQ(ierr);
1216 ierr = ReadOptionalField(user, "su1", "Velocity Cross Sum", user->Ucat_cross_sum, ti, eulerian_ext); CHKERRQ(ierr);
1217 ierr = ReadOptionalField(user, "su2", "Velocity Square Sum",user->Ucat_square_sum, ti, eulerian_ext); CHKERRQ(ierr);
1218 ierr = ReadOptionalField(user, "sp", "Pressure Sum", user->P_sum, ti, eulerian_ext); CHKERRQ(ierr);
1219
1220 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading statistical fields.\n");
1221
1222 return 0;
1223}
1224
1225/**
1226 * @brief Internal helper implementation: `ReadLESFields()`.
1227 * @details Local to this translation unit.
1228 */
1229PetscErrorCode ReadLESFields(UserCtx *user,PetscInt ti)
1230{
1231 PetscErrorCode ierr;
1232 const char *eulerian_ext = "dat";
1233 if (user->simCtx->exec_mode == EXEC_MODE_POSTPROCESSOR && user->simCtx->pps->eulerianExt[0] != '\0') {
1234 eulerian_ext = user->simCtx->pps->eulerianExt;
1235 }
1236
1237 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to read LES fields.\n");
1238
1239 ierr = ReadOptionalField(user, "Nu_t", "Turbulent Viscosity", user->Nu_t, ti, eulerian_ext); CHKERRQ(ierr);
1240 ierr = ReadOptionalField(user, "cs", "Smagorinsky Constant (Cs)", user->CS, ti, eulerian_ext); CHKERRQ(ierr);
1241
1242 DMGlobalToLocalBegin(user->da, user->CS, INSERT_VALUES, user->lCs);
1243 DMGlobalToLocalEnd(user->da, user->CS, INSERT_VALUES, user->lCs);
1244
1245 DMGlobalToLocalBegin(user->da, user->Nu_t, INSERT_VALUES, user->lNu_t);
1246 DMGlobalToLocalEnd(user->da, user->Nu_t, INSERT_VALUES, user->lNu_t);
1247
1248 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading LES fields.\n");
1249
1250 return 0;
1251}
1252
1253/**
1254 * @brief Internal helper implementation: `ReadRANSFields()`.
1255 * @details Local to this translation unit.
1256 */
1257PetscErrorCode ReadRANSFields(UserCtx *user,PetscInt ti)
1258{
1259 PetscErrorCode ierr;
1260 const char *eulerian_ext = "dat";
1261 if (user->simCtx->exec_mode == EXEC_MODE_POSTPROCESSOR && user->simCtx->pps->eulerianExt[0] != '\0') {
1262 eulerian_ext = user->simCtx->pps->eulerianExt;
1263 }
1264
1265 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to read RANS fields.\n");
1266
1267 ierr = ReadOptionalField(user, "kfield", "K-Omega RANS", user->K_Omega, ti, eulerian_ext); CHKERRQ(ierr);
1268 ierr = ReadOptionalField(user, "Nu_t", "Turbulent Viscosity", user->Nu_t, ti, eulerian_ext); CHKERRQ(ierr);
1269
1270 VecCopy(user->K_Omega, user->K_Omega_o);
1271
1272 DMGlobalToLocalBegin(user->fda2, user->K_Omega, INSERT_VALUES, user->lK_Omega);
1273 DMGlobalToLocalEnd(user->fda2, user->K_Omega, INSERT_VALUES, user->lK_Omega);
1274
1275 DMGlobalToLocalBegin(user->da, user->Nu_t, INSERT_VALUES, user->lNu_t);
1276 DMGlobalToLocalEnd(user->da, user->Nu_t, INSERT_VALUES, user->lNu_t);
1277
1278 DMGlobalToLocalBegin(user->fda2, user->K_Omega_o, INSERT_VALUES, user->lK_Omega_o);
1279 DMGlobalToLocalEnd(user->fda2, user->K_Omega_o, INSERT_VALUES, user->lK_Omega_o);
1280
1281 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading RANS fields.\n");
1282
1283 return 0;
1284}
1285
1286
1287/**
1288 * @brief Internal helper implementation: `ReadSwarmField()`.
1289 * @details Local to this translation unit.
1290 */
1291PetscErrorCode ReadSwarmField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
1292{
1293 PetscErrorCode ierr;
1294 DM swarm;
1295 Vec fieldVec;
1296
1297 PetscFunctionBegin;
1298
1299 swarm = user->swarm;
1300
1301 LOG_ALLOW(GLOBAL,LOG_DEBUG," ReadSwarmField Begins \n");
1302
1303 /* 2) Create a global vector that references the specified Swarm field. */
1304 ierr = DMSwarmCreateGlobalVectorFromField(swarm, field_name, &fieldVec);CHKERRQ(ierr);
1305
1306 LOG_ALLOW(GLOBAL,LOG_DEBUG," Vector created from Field \n");
1307
1308 /* 3) Use the ReadFieldData() function to read data into fieldVec. */
1309 ierr = ReadFieldData(user, field_name, fieldVec, ti, ext);CHKERRQ(ierr);
1310
1311 /* 4) Destroy the global vector reference. */
1312 ierr = DMSwarmDestroyGlobalVectorFromField(swarm, field_name, &fieldVec);CHKERRQ(ierr);
1313
1314 PetscFunctionReturn(0);
1315}
1316
1317/**
1318 * @brief Internal helper implementation: `ReadSwarmIntField()`.
1319 * @details Local to this translation unit.
1320 */
1321PetscErrorCode ReadSwarmIntField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
1322{
1323 PetscErrorCode ierr;
1324 DM swarm = user->swarm;
1325 Vec temp_vec;
1326 PetscInt nlocal, nglobal, bs, i;
1327 const PetscScalar *scalar_array; // Read-only pointer from the temp Vec
1328 void *field_array_void;
1329
1330
1331 PetscFunctionBeginUser;
1332
1333 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Reading '%s' via temporary Vec.\n", field_name);
1334
1335 // Get the properties of the swarm field to determine the expected layout
1336 ierr = DMSwarmGetLocalSize(swarm, &nlocal); CHKERRQ(ierr);
1337 ierr = DMSwarmGetSize(swarm, &nglobal); CHKERRQ(ierr);
1338 // We get the block size but not the data pointer yet
1339 ierr = DMSwarmGetField(swarm, field_name, &bs, NULL, NULL); CHKERRQ(ierr);
1340 ierr = DMSwarmRestoreField(swarm, field_name, &bs, NULL, NULL); CHKERRQ(ierr);
1341
1342 // Create a temporary Vec with the CORRECT layout to receive the data
1343 ierr = VecCreate(PETSC_COMM_WORLD, &temp_vec); CHKERRQ(ierr);
1344 ierr = VecSetType(temp_vec, VECMPI); CHKERRQ(ierr);
1345 ierr = VecSetSizes(temp_vec, nlocal * bs, nglobal * bs); CHKERRQ(ierr);
1346 ierr = VecSetBlockSize(temp_vec, bs); CHKERRQ(ierr);
1347 ierr = VecSetUp(temp_vec); CHKERRQ(ierr);
1348
1349 // Call your existing reader to populate the temporary Vec
1350 ierr = ReadFieldData(user, field_name, temp_vec, ti, ext); CHKERRQ(ierr);
1351
1352 // Get local pointers
1353 ierr = VecGetArrayRead(temp_vec, &scalar_array); CHKERRQ(ierr);
1354 ierr = DMSwarmGetField(swarm, field_name, NULL, NULL, &field_array_void); CHKERRQ(ierr);
1355
1356 // Perform the cast back, using the correct loop size (nlocal * bs)
1357 if (strcmp(field_name, "DMSwarm_pid") == 0) {
1358 PetscInt64 *int64_array = (PetscInt64 *)field_array_void;
1359 for (i = 0; i < nlocal * bs; i++) {
1360 int64_array[i] = (PetscInt64)scalar_array[i];
1361 }
1362 } else {
1363 PetscInt *int_array = (PetscInt *)field_array_void;
1364 for (i = 0; i < nlocal * bs; i++) {
1365 int_array[i] = (PetscInt)scalar_array[i];
1366 }
1367 }
1368
1369 // Restore access
1370 ierr = DMSwarmRestoreField(swarm, field_name, NULL, NULL, &field_array_void); CHKERRQ(ierr);
1371 ierr = VecRestoreArrayRead(temp_vec, &scalar_array); CHKERRQ(ierr);
1372
1373 // 6. Clean up
1374 ierr = VecDestroy(&temp_vec); CHKERRQ(ierr);
1375
1376 PetscFunctionReturn(0);
1377}
1378
1379/**
1380 * @brief Internal helper implementation: `ReadAllSwarmFields()`.
1381 * @details Local to this translation unit.
1382 */
1383PetscErrorCode ReadAllSwarmFields(UserCtx *user, PetscInt ti)
1384{
1385 PetscErrorCode ierr;
1386 PetscInt nGlobal;
1387 SimCtx *simCtx = user->simCtx;
1388 const char *source_path = NULL;
1389 const char *particle_ext = "dat";
1390
1391 PetscFunctionBeginUser;
1392 ierr = DMSwarmGetSize(user->swarm, &nGlobal); CHKERRQ(ierr);
1393 LOG_ALLOW(GLOBAL, LOG_INFO, "Reading DMSwarm fields for timestep %d (swarm size is %d).\n", ti, nGlobal);
1394
1395 if (nGlobal == 0) {
1396 LOG_ALLOW(GLOBAL, LOG_INFO, "Swarm is empty for timestep %d. Nothing to read.\n", ti);
1397 PetscFunctionReturn(0);
1398 }
1399
1400 // First, determine the top-level source directory based on the execution mode.
1401 if (simCtx->exec_mode == EXEC_MODE_SOLVER) {
1402 source_path = simCtx->restart_dir;
1403 } else if (simCtx->exec_mode == EXEC_MODE_POSTPROCESSOR) {
1404 source_path = simCtx->pps->source_dir;
1405 if (simCtx->pps->particleExt[0] != '\0') {
1406 particle_ext = simCtx->pps->particleExt;
1407 }
1408 } else {
1409 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONGSTATE, "Invalid execution mode for reading simulation fields.");
1410 }
1411
1412 // Set the current I/O directory context
1413 ierr = PetscSNPrintf(simCtx->_io_context_buffer, sizeof(simCtx->_io_context_buffer),
1414 "%s/%s", source_path, simCtx->particle_subdir); CHKERRQ(ierr);
1415
1416 simCtx->current_io_directory = simCtx->_io_context_buffer;
1417
1418 /* 1) Read positions (REQUIRED) */
1419 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Reading mandatory position field...\n");
1420 ierr = ReadSwarmField(user, "position", ti, particle_ext);
1421 if (ierr) {
1422 SETERRQ(PETSC_COMM_WORLD, ierr, "Failed to read MANDATORY 'position' field for step %d. Cannot continue.", ti);
1423 }
1424 LOG_ALLOW(GLOBAL, LOG_INFO, "Successfully read mandatory position field for step %d.\n", ti);
1425
1426 /* 2) Read all OPTIONAL fields using the helper function. */
1427 /* The helper will print a warning and continue if a file is not found. */
1428 ierr = ReadOptionalSwarmField(user, "velocity", "Velocity", ti, particle_ext); CHKERRQ(ierr);
1429 ierr = ReadOptionalSwarmField(user, "DMSwarm_pid", "Particle ID", ti, particle_ext); CHKERRQ(ierr);
1430 ierr = ReadOptionalSwarmField(user, "DMSwarm_CellID", "Cell ID", ti, particle_ext); CHKERRQ(ierr);
1431 ierr = ReadOptionalSwarmField(user, "weight", "Particle Weight", ti, particle_ext); CHKERRQ(ierr);
1432 ierr = ReadOptionalSwarmField(user, "Psi", "Scalar Psi", ti, particle_ext); CHKERRQ(ierr);
1433 ierr = ReadOptionalSwarmField(user, "DMSwarm_location_status", "Migration Status", ti, particle_ext); CHKERRQ(ierr);
1434
1435 simCtx->current_io_directory = NULL; // Clear the I/O context after reading
1436
1437 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading DMSwarm fields for timestep %d.\n", ti);
1438 PetscFunctionReturn(0);
1439}
1440
1441
1442#undef __FUNCT__
1443#define __FUNCT__ "WriteFieldData"
1444/**
1445 * @brief Internal helper implementation: `WriteFieldData()`.
1446 * @details Local to this translation unit.
1447 */
1448PetscErrorCode WriteFieldData(UserCtx *user,
1449 const char *field_name,
1450 Vec field_vec,
1451 PetscInt ti,
1452 const char *ext)
1453{
1454 PetscErrorCode ierr;
1455 MPI_Comm comm;
1456 PetscMPIInt rank, size;
1457
1458 const PetscInt placeholder_int = 0; /* keep legacy name */
1459 char filename[PETSC_MAX_PATH_LEN];
1460 SimCtx *simCtx=user->simCtx;
1461
1462 PetscFunctionBeginUser;
1464
1465 if(!simCtx->current_io_directory){
1466 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "I/O context directory was not set before calling WriteFieldData().");
1467 }
1468
1469 /* ------------------------------------------------------------ */
1470 /* Basic communicator information */
1471 /* ------------------------------------------------------------ */
1472 ierr = PetscObjectGetComm((PetscObject)field_vec,&comm);CHKERRQ(ierr);
1473 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1474 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1475
1476 ierr = PetscSNPrintf(filename,sizeof(filename),
1477 "%s/%s%05" PetscInt_FMT "_%d.%s",
1478 simCtx->current_io_directory,field_name,ti,placeholder_int,ext);CHKERRQ(ierr);
1479
1481 " Preparing to write <%s> on rank %d/%d\n",
1482 filename,rank,size);
1483
1484 /* ------------------------------------------------------------ */
1485 /* 1. Serial path */
1486 /* ------------------------------------------------------------ */
1487 if (size == 1) {
1488 PetscViewer viewer;
1489
1490 ierr = PetscViewerBinaryOpen(comm,filename,
1491 FILE_MODE_WRITE,&viewer);CHKERRQ(ierr);
1492 ierr = VecView(field_vec,viewer);CHKERRQ(ierr);
1493 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
1494
1496 " Wrote <%s> (serial path)\n",filename);
1497 PetscFunctionReturn(0);
1498 }
1499
1500 /* ------------------------------------------------------------ */
1501 /* 2. Parallel path */
1502 /* ------------------------------------------------------------ */
1503 VecScatter scatter;
1504 Vec seq_vec=NULL; /* created by PETSc, lives only on rank 0 */
1505 DM dm = NULL;
1506 const char *dmtype = NULL;
1507 Vec nat = NULL; /* Natural-ordered vector for DMDA */
1508
1509 /* 2.1 Check if vector has DMDA and convert to natural ordering if needed */
1510 ierr = VecGetDM(field_vec, &dm); CHKERRQ(ierr);
1511 if (dm) { ierr = DMGetType(dm, &dmtype); CHKERRQ(ierr); }
1512
1513 if (dmtype && !strcmp(dmtype, DMDA)) {
1514 /* DMDA path: convert to natural ordering first */
1515 ierr = DMDACreateNaturalVector(dm, &nat); CHKERRQ(ierr);
1516 ierr = DMDAGlobalToNaturalBegin(dm, field_vec, INSERT_VALUES, nat); CHKERRQ(ierr);
1517 ierr = DMDAGlobalToNaturalEnd(dm, field_vec, INSERT_VALUES, nat); CHKERRQ(ierr);
1518
1519 /* Gather the natural-ordered vector */
1520 ierr = VecScatterCreateToZero(nat, &scatter, &seq_vec); CHKERRQ(ierr);
1521 } else {
1522 /* Non-DMDA path: direct gather in global ordering */
1523 ierr = VecScatterCreateToZero(field_vec, &scatter, &seq_vec); CHKERRQ(ierr);
1524 }
1525
1526 /* 2.2 Gather distributed → sequential (on rank 0) */
1527 ierr = VecScatterBegin(scatter, (nat ? nat : field_vec), seq_vec,
1528 INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
1529 ierr = VecScatterEnd(scatter, (nat ? nat : field_vec), seq_vec,
1530 INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
1531
1532 /* 2.3 Rank 0 writes the file */
1533 if (rank == 0) {
1534 PetscViewer viewer;
1535
1536 /* (optional) value diagnostics */
1537 PetscReal vmin,vmax;
1538 ierr = VecMin(seq_vec,NULL,&vmin);CHKERRQ(ierr);
1539 ierr = VecMax(seq_vec,NULL,&vmax);CHKERRQ(ierr);
1541 " <%s> range = [%.4e … %.4e]\n",
1542 field_name,(double)vmin,(double)vmax);
1543
1544 ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,filename,
1545 FILE_MODE_WRITE,&viewer);CHKERRQ(ierr);
1546 ierr = VecView(seq_vec,viewer);CHKERRQ(ierr);
1547 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
1548
1550 " Wrote <%s> (parallel path)\n",filename);
1551 }
1552
1553 /* 2.4 Cleanup */
1554 ierr = VecScatterDestroy(&scatter);CHKERRQ(ierr);
1555 ierr = VecDestroy(&seq_vec);CHKERRQ(ierr);
1556 if (nat) { ierr = VecDestroy(&nat); CHKERRQ(ierr); }
1557
1559 PetscFunctionReturn(0);
1560}
1561
1562/**
1563 * @brief Implementation of \ref WriteSimulationFields().
1564 * @details Full API contract (arguments, ownership, side effects) is documented with
1565 * the header declaration in `include/io.h`.
1566 * @see WriteSimulationFields()
1567 */
1568PetscErrorCode WriteSimulationFields(UserCtx *user)
1569{
1570 PetscErrorCode ierr;
1571
1572 SimCtx *simCtx = user->simCtx;
1573
1574 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to write simulation fields.\n");
1575
1576 // Set the current IO directory
1577 ierr = PetscSNPrintf(simCtx->_io_context_buffer, sizeof(simCtx->_io_context_buffer),
1578 "%s/%s", simCtx->output_dir, simCtx->euler_subdir);CHKERRQ(ierr);
1579 simCtx->current_io_directory = simCtx->_io_context_buffer;
1580
1581 // Write contravariant velocity field
1582 ierr = WriteFieldData(user, "vfield", user->Ucont, simCtx->step, "dat"); CHKERRQ(ierr);
1583
1584 // Write Cartesian velocity field
1585 ierr = WriteFieldData(user, "ufield", user->Ucat, simCtx->step, "dat"); CHKERRQ(ierr);
1586
1587 // Write pressure field
1588 ierr = WriteFieldData(user, "pfield", user->P, simCtx->step, "dat"); CHKERRQ(ierr);
1589
1590 // Write node state field (nvert)
1591 ierr = WriteFieldData(user, "nvfield", user->Nvert, simCtx->step, "dat"); CHKERRQ(ierr);
1592
1593 // Write ParticleCountPerCell if enabled.
1594 if(simCtx->np>0){
1595 ierr = WriteFieldData(user, "ParticleCount",user->ParticleCount,simCtx->step,"dat"); CHKERRQ(ierr);
1596 ierr = WriteFieldData(user, "psifield", user->Psi, simCtx->step, "dat"); CHKERRQ(ierr);
1597 }
1598
1599 // Write LES fields if enabled
1600 if (simCtx->les) {
1601 ierr = WriteLESFields(user); CHKERRQ(ierr);
1602 }
1603
1604 // Write RANS fields if enabled
1605 if (simCtx->rans) {
1606 ierr = WriteRANSFields(user); CHKERRQ(ierr);
1607 }
1608
1609 // Write statistical fields if averaging is enabled
1610 if (simCtx->averaging) {
1611 ierr = WriteStatisticalFields(user); CHKERRQ(ierr);
1612 }
1613
1614 simCtx->current_io_directory = NULL;
1615
1616 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished writing simulation fields.\n");
1617
1618 return 0;
1619}
1620
1621/**
1622 * @brief Implementation of \ref WriteStatisticalFields().
1623 * @details Full API contract (arguments, ownership, side effects) is documented with
1624 * the header declaration in `include/io.h`.
1625 * @see WriteStatisticalFields()
1626 */
1627PetscErrorCode WriteStatisticalFields(UserCtx *user)
1628{
1629 PetscErrorCode ierr;
1630
1631 SimCtx *simCtx = user->simCtx;
1632
1633 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to write statistical fields.\n");
1634
1635 ierr = WriteFieldData(user, "su0", user->Ucat_sum, simCtx->step, "dat"); CHKERRQ(ierr);
1636 ierr = WriteFieldData(user, "su1", user->Ucat_cross_sum, simCtx->step, "dat"); CHKERRQ(ierr);
1637 ierr = WriteFieldData(user, "su2", user->Ucat_square_sum, simCtx->step, "dat"); CHKERRQ(ierr);
1638 ierr = WriteFieldData(user, "sp", user->P_sum, simCtx->step, "dat"); CHKERRQ(ierr);
1639
1640 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished writing statistical fields.\n");
1641
1642 return 0;
1643}
1644
1645/**
1646 * @brief Internal helper implementation: `WriteLESFields()`.
1647 * @details Local to this translation unit.
1648 */
1649PetscErrorCode WriteLESFields(UserCtx *user)
1650{
1651 PetscErrorCode ierr;
1652
1653 SimCtx *simCtx = user->simCtx;
1654
1655 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to write LES fields.\n");
1656
1657
1658 DMLocalToGlobalBegin(user->da, user->lCs, INSERT_VALUES, user->CS);
1659 DMLocalToGlobalEnd(user->da, user->lCs, INSERT_VALUES, user->CS);
1660
1661 DMLocalToGlobalBegin(user->da, user->lNu_t, INSERT_VALUES, user->Nu_t);
1662 DMLocalToGlobalEnd(user->da, user->lNu_t, INSERT_VALUES, user->Nu_t);
1663
1664 ierr = WriteFieldData(user, "Nu_t", user->Nu_t, simCtx->step, "dat"); CHKERRQ(ierr);
1665 ierr = WriteFieldData(user, "cs", user->CS, simCtx->step, "dat"); CHKERRQ(ierr);
1666
1667
1668 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished writing LES fields.\n");
1669
1670 return 0;
1671}
1672
1673/**
1674 * @brief Implementation of \ref WriteRANSFields().
1675 * @details Full API contract (arguments, ownership, side effects) is documented with
1676 * the header declaration in `include/io.h`.
1677 * @see WriteRANSFields()
1678 */
1679PetscErrorCode WriteRANSFields(UserCtx *user)
1680{
1681 PetscErrorCode ierr;
1682
1683 SimCtx *simCtx = user->simCtx;
1684
1685 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to write RANS fields.\n");
1686
1687 ierr = WriteFieldData(user, "kfield", user->K_Omega, simCtx->step, "dat"); CHKERRQ(ierr);
1688
1689 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished writing RANS fields.\n");
1690
1691 return 0;
1692}
1693
1694/**
1695 * @brief Implementation of \ref WriteSwarmField().
1696 * @details Full API contract (arguments, ownership, side effects) is documented with
1697 * the header declaration in `include/io.h`.
1698 * @see WriteSwarmField()
1699 */
1700PetscErrorCode WriteSwarmField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
1701{
1702 PetscErrorCode ierr;
1703 Vec fieldVec;
1704 DM swarm;
1705
1706 PetscFunctionBeginUser; /* PETSc macro indicating start of function */
1707
1708 /*
1709 * 1) Retrieve the PetscSwarm from the user context.
1710 * Ensure user->swarm is initialized and not NULL.
1711 */
1712 swarm = user->swarm;
1713
1714 /*
1715 * 2) Create a global vector from the specified swarm field.
1716 * This function is available in PETSc 3.14.4.
1717 * It provides a read/write "view" of the swarm field as a global Vec.
1718 */
1720 "Attempting to create global vector from field: %s\n",
1721 field_name);
1722 ierr = DMSwarmCreateGlobalVectorFromField(swarm, field_name, &fieldVec);CHKERRQ(ierr);
1723
1724 /*
1725 * 3) Use your existing WriteFieldData() to write the global vector to a file.
1726 * The field name, time index, and extension are passed along for naming.
1727 */
1729 "Calling WriteFieldData for field: %s\n",
1730 field_name);
1731 ierr = WriteFieldData(user, field_name, fieldVec, ti, ext);CHKERRQ(ierr);
1732
1733 /*
1734 * 4) Destroy the global vector once the data is successfully written.
1735 * This step is crucial for avoiding memory leaks.
1736 * DMSwarmDestroyGlobalVectorFromField() is also available in PETSc 3.14.4.
1737 */
1739 "Destroying the global vector for field: %s\n",
1740 field_name);
1741 ierr = DMSwarmDestroyGlobalVectorFromField(swarm, field_name, &fieldVec);CHKERRQ(ierr);
1742
1743 /* Log and return success. */
1745 "Successfully wrote swarm data for field: %s\n",
1746 field_name);
1747
1748 PetscFunctionReturn(0); /* PETSc macro indicating end of function */
1749}
1750
1751/**
1752 * @brief Internal helper implementation: `WriteSwarmIntField()`.
1753 * @details Local to this translation unit.
1754 */
1755PetscErrorCode WriteSwarmIntField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
1756{
1757 PetscErrorCode ierr;
1758 DM swarm = user->swarm;
1759 Vec temp_vec; // Temporary Vec to hold casted data
1760 PetscInt nlocal, nglobal,bs,i;
1761 void *field_array_void;
1762 PetscScalar *scalar_array; // Pointer to the temporary Vec's scalar data
1763
1764 PetscFunctionBeginUser;
1765
1766 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Casting '%s' to Vec for writing.\n", field_name);
1767
1768 // Get the swarm field properties
1769 ierr = DMSwarmGetLocalSize(swarm, &nlocal); CHKERRQ(ierr);
1770 ierr = DMSwarmGetSize(swarm, &nglobal); CHKERRQ(ierr);
1771 ierr = DMSwarmGetField(swarm, field_name, &bs, NULL, &field_array_void); CHKERRQ(ierr);
1772
1773 // Create Temporary parallel Vec wit the CORRECT layout
1774 ierr = VecCreate(PETSC_COMM_WORLD, &temp_vec); CHKERRQ(ierr);
1775 ierr = VecSetType(temp_vec, VECMPI); CHKERRQ(ierr);
1776 ierr = VecSetSizes(temp_vec, nlocal*bs, nglobal*bs); CHKERRQ(ierr);
1777 ierr = VecSetUp(temp_vec); CHKERRQ(ierr);
1778
1779 // Defining Vector field to mandatory field 'position'
1780 DMSwarmVectorDefineField(swarm,"position");
1781
1782 ierr = VecGetArray(temp_vec, &scalar_array); CHKERRQ(ierr);
1783
1784 if(strcasecmp(field_name,"DMSwarm_pid") == 0){
1785 PetscInt64 *int64_array = (PetscInt64 *)field_array_void;
1786 // Perform the cast from PetscInt64 to PetscScalar
1787 for (i = 0; i < nlocal*bs; i++) {
1788 scalar_array[i] = (PetscScalar)int64_array[i];
1789 }
1790 }else{
1791 PetscInt *int_array = (PetscInt *)field_array_void;
1792 //Perform the cast from PetscInt to PetscScalar
1793 for (i = 0; i < nlocal*bs; i++) {
1794 scalar_array[i] = (PetscScalar)int_array[i];
1795 }
1796 }
1797
1798 // Restore access to both arrays
1799 ierr = VecRestoreArray(temp_vec, &scalar_array); CHKERRQ(ierr);
1800 ierr = DMSwarmRestoreField(swarm, field_name, &bs, NULL, &field_array_void); CHKERRQ(ierr);
1801
1802 // Call your existing writer with the temporary, populated Vec
1803 ierr = WriteFieldData(user, field_name, temp_vec, ti, ext); CHKERRQ(ierr);
1804
1805 // Clean up
1806 ierr = VecDestroy(&temp_vec); CHKERRQ(ierr);
1807
1808 PetscFunctionReturn(0);
1809}
1810
1811/**
1812 * @brief Internal helper implementation: `WriteAllSwarmFields()`.
1813 * @details Local to this translation unit.
1814 */
1815PetscErrorCode WriteAllSwarmFields(UserCtx *user)
1816{
1817 PetscErrorCode ierr;
1818 SimCtx *simCtx = user->simCtx;
1819
1820 PetscFunctionBeginUser;
1821
1822 // If no swarm is configured or there are no particles, do nothing and return.
1823 if (!user->swarm || simCtx->np <= 0) {
1824 PetscFunctionReturn(0);
1825 }
1826
1827 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to write swarm fields.\n");
1828
1829 // Ensure the current IO directory is set
1830 ierr = PetscSNPrintf(simCtx->_io_context_buffer, sizeof(simCtx->_io_context_buffer),
1831 "%s/%s", simCtx->output_dir, simCtx->particle_subdir);CHKERRQ(ierr);
1832 simCtx->current_io_directory = simCtx->_io_context_buffer;
1833
1834 // Write particle position field
1835 ierr = WriteSwarmField(user, "position", simCtx->step, "dat"); CHKERRQ(ierr);
1836
1837 // Write particle velocity field
1838 ierr = WriteSwarmField(user, "velocity", simCtx->step, "dat"); CHKERRQ(ierr);
1839
1840 // Write particle weight field
1841 ierr = WriteSwarmField(user, "weight", simCtx->step, "dat"); CHKERRQ(ierr);
1842
1843 // Write custom particle field "Psi"
1844 ierr = WriteSwarmField(user, "Psi", simCtx->step, "dat"); CHKERRQ(ierr);
1845
1846 // Integer fields require special handling
1847
1848 // Write the background mesh cell ID for each particle
1849 ierr = WriteSwarmIntField(user, "DMSwarm_CellID", simCtx->step, "dat"); CHKERRQ(ierr);
1850
1851 // Write the particle location status (e.g., inside or outside the domain)
1852 ierr = WriteSwarmIntField(user, "DMSwarm_location_status", simCtx->step, "dat"); CHKERRQ(ierr);
1853
1854 // Write the unique particle ID
1855 ierr = WriteSwarmIntField(user, "DMSwarm_pid", simCtx->step, "dat"); CHKERRQ(ierr);
1856
1857 simCtx->current_io_directory = NULL;
1858
1859 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished writing swarm fields.\n");
1860
1861 PetscFunctionReturn(0);
1862}
1863
1864/**
1865 * @brief Internal helper implementation: `VecToArrayOnRank0()`.
1866 * @details Local to this translation unit.
1867 */
1868PetscErrorCode VecToArrayOnRank0(Vec inVec, PetscInt *N, double **arrayOut)
1869{
1870 PetscErrorCode ierr;
1871 MPI_Comm comm;
1872 PetscMPIInt rank;
1873 PetscInt globalSize;
1874 DM dm = NULL;
1875 const char *dmtype = NULL;
1876
1877 /* For DMDA path */
1878 Vec nat = NULL, seqNat = NULL;
1879 VecScatter scatNat = NULL;
1880 const PetscScalar *nar = NULL;
1881 PetscScalar *buf = NULL;
1882
1883 /* For generic (no DM) path */
1884 Vec seq = NULL;
1885 VecScatter scat = NULL;
1886 const PetscScalar *sar = NULL;
1887
1888 PetscFunctionBeginUser;
1889
1890 ierr = PetscObjectGetComm((PetscObject)inVec, &comm); CHKERRQ(ierr);
1891 ierr = MPI_Comm_rank(comm, &rank); CHKERRQ(ierr);
1892 ierr = VecGetSize(inVec, &globalSize); CHKERRQ(ierr);
1893 *N = globalSize;
1894 *arrayOut = NULL;
1895
1896 ierr = VecGetDM(inVec, &dm); CHKERRQ(ierr);
1897 if (dm) { ierr = DMGetType(dm, &dmtype); CHKERRQ(ierr); }
1898
1899 if (dmtype && !strcmp(dmtype, DMDA)) {
1900 /* --- DMDA path: go to NATURAL ordering, then gather to rank 0 --- */
1901 ierr = DMDACreateNaturalVector(dm, &nat); CHKERRQ(ierr);
1902 ierr = DMDAGlobalToNaturalBegin(dm, inVec, INSERT_VALUES, nat); CHKERRQ(ierr);
1903 ierr = DMDAGlobalToNaturalEnd (dm, inVec, INSERT_VALUES, nat); CHKERRQ(ierr);
1904
1905 ierr = VecScatterCreateToZero(nat, &scatNat, &seqNat); CHKERRQ(ierr);
1906 ierr = VecScatterBegin(scatNat, nat, seqNat, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
1907 ierr = VecScatterEnd (scatNat, nat, seqNat, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
1908
1909 if (rank == 0) {
1910 PetscInt nseq;
1911 ierr = VecGetLocalSize(seqNat, &nseq); CHKERRQ(ierr);
1912 ierr = VecGetArrayRead(seqNat, &nar); CHKERRQ(ierr);
1913
1914 ierr = PetscMalloc1(nseq, &buf); CHKERRQ(ierr);
1915 ierr = PetscMemcpy(buf, nar, (size_t)nseq * sizeof(PetscScalar)); CHKERRQ(ierr);
1916
1917 ierr = VecRestoreArrayRead(seqNat, &nar); CHKERRQ(ierr);
1918 *arrayOut = (double*)buf; /* hand back as double* to match the helper signature */
1919 }
1920
1921 ierr = VecScatterDestroy(&scatNat); CHKERRQ(ierr);
1922 ierr = VecDestroy(&seqNat); CHKERRQ(ierr);
1923 ierr = VecDestroy(&nat); CHKERRQ(ierr);
1924 } else {
1925 /* --- No DM attached: plain gather in Vec’s global (parallel) layout order --- */
1926 ierr = VecScatterCreateToZero(inVec, &scat, &seq); CHKERRQ(ierr);
1927 ierr = VecScatterBegin(scat, inVec, seq, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
1928 ierr = VecScatterEnd (scat, inVec, seq, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
1929
1930 if (rank == 0) {
1931 PetscInt nseq;
1932 ierr = VecGetLocalSize(seq, &nseq); CHKERRQ(ierr);
1933 ierr = VecGetArrayRead(seq, &sar); CHKERRQ(ierr);
1934
1935 ierr = PetscMalloc1(nseq, &buf); CHKERRQ(ierr);
1936 ierr = PetscMemcpy(buf, sar, (size_t)nseq * sizeof(PetscScalar)); CHKERRQ(ierr);
1937
1938 ierr = VecRestoreArrayRead(seq, &sar); CHKERRQ(ierr);
1939 *arrayOut = (double*)buf;
1940 }
1941
1942 ierr = VecScatterDestroy(&scat); CHKERRQ(ierr);
1943 ierr = VecDestroy(&seq); CHKERRQ(ierr);
1944 }
1945
1946 PetscFunctionReturn(0);
1947}
1948
1949/**
1950 * @brief Internal helper implementation: `SwarmFieldToArrayOnRank0()`.
1951 * @details Local to this translation unit.
1952 */
1953PetscErrorCode SwarmFieldToArrayOnRank0(DM swarm, const char *field_name, PetscInt *n_total_particles, PetscInt *n_components, void **gathered_array)
1954{
1955 PetscErrorCode ierr;
1956 PetscMPIInt rank, size;
1957 PetscInt nlocal, nglobal, bs;
1958 void *local_array_void;
1959 size_t element_size = 0;
1960
1961 PetscFunctionBeginUser;
1962
1963 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
1964 ierr = MPI_Comm_size(PETSC_COMM_WORLD, &size); CHKERRQ(ierr);
1965
1966 // All ranks get swarm properties to determine send/receive counts
1967 ierr = DMSwarmGetLocalSize(swarm, &nlocal); CHKERRQ(ierr);
1968 ierr = DMSwarmGetSize(swarm, &nglobal); CHKERRQ(ierr);
1969 ierr = DMSwarmGetField(swarm, field_name, &bs, NULL, &local_array_void); CHKERRQ(ierr);
1970
1971 // Determine the size of one element of the field's data type
1972 if (strcasecmp(field_name, "DMSwarm_pid") == 0) {
1973 element_size = sizeof(PetscInt64);
1974 } else if (strcasecmp(field_name, "DMSwarm_CellID") == 0 || strcasecmp(field_name, "DMSwarm_location_status") == 0) {
1975 element_size = sizeof(PetscInt);
1976 } else {
1977 element_size = sizeof(PetscScalar);
1978 }
1979
1980 if (rank == 0) {
1981 *n_total_particles = nglobal;
1982 *n_components = bs;
1983 *gathered_array = NULL; // Initialize output
1984 }
1985
1986 if (size == 1) { // Serial case is a simple copy
1987 if (rank == 0) {
1988 ierr = PetscMalloc(nglobal * bs * element_size, gathered_array); CHKERRQ(ierr);
1989 ierr = PetscMemcpy(*gathered_array, local_array_void, nglobal * bs * element_size); CHKERRQ(ierr);
1990 }
1991 } else { // Parallel case: use MPI_Gatherv
1992 PetscInt *recvcounts = NULL, *displs = NULL;
1993 if (rank == 0) {
1994 ierr = PetscMalloc1(size, &recvcounts); CHKERRQ(ierr);
1995 ierr = PetscMalloc1(size, &displs); CHKERRQ(ierr);
1996 }
1997 PetscInt sendcount = nlocal * bs;
1998
1999 // Gather the number of elements (not bytes) from each rank
2000 ierr = MPI_Gather(&sendcount, 1, MPIU_INT, recvcounts, 1, MPIU_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
2001
2002 if (rank == 0) {
2003 displs[0] = 0;
2004 // Convert counts and calculate displacements in terms of BYTES
2005 for (PetscMPIInt i = 0; i < size; i++) recvcounts[i] *= element_size;
2006 for (PetscMPIInt i = 1; i < size; i++) displs[i] = displs[i-1] + recvcounts[i-1];
2007
2008 ierr = PetscMalloc(nglobal * bs * element_size, gathered_array); CHKERRQ(ierr);
2009 }
2010
2011 // Use Gatherv with MPI_BYTE to handle any data type generically
2012 ierr = MPI_Gatherv(local_array_void, nlocal * bs * element_size, MPI_BYTE,
2013 *gathered_array, recvcounts, displs, MPI_BYTE,
2014 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
2015
2016 if (rank == 0) {
2017 ierr = PetscFree(recvcounts); CHKERRQ(ierr);
2018 ierr = PetscFree(displs); CHKERRQ(ierr);
2019 }
2020 }
2021
2022 ierr = DMSwarmRestoreField(swarm, field_name, &bs, NULL, &local_array_void); CHKERRQ(ierr);
2023
2024 PetscFunctionReturn(0);
2025}
2026
2027/**
2028 * @brief Internal helper implementation: `DisplayBanner()`.
2029 * @details Local to this translation unit.
2030 */
2031PetscErrorCode DisplayBanner(SimCtx *simCtx) // bboxlist is only valid on rank 0
2032{
2033 PetscErrorCode ierr;
2034 PetscMPIInt rank;
2035 Cmpnts global_min_coords, global_max_coords;
2036 PetscReal StartTime;
2037 PetscInt StartStep,StepsToRun,total_num_particles;
2038 PetscMPIInt num_mpi_procs;
2039
2040 // SimCtx *simCtx = user->simCtx;
2041 UserCtx *user = simCtx->usermg.mgctx[simCtx->usermg.mglevels - 1].user;
2042 num_mpi_procs = simCtx->size;
2043 StartTime = simCtx->StartTime;
2044 StartStep = simCtx->StartStep;
2045 StepsToRun = simCtx->StepsToRun;
2046 total_num_particles = simCtx->np;
2047 BoundingBox *bboxlist_on_rank0 = simCtx->bboxlist;
2048
2049
2050 PetscFunctionBeginUser;
2051
2052 if (!user) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "DisplayBanner - UserCtx pointer is NULL.");
2053 global_min_coords = user->bbox.min_coords;
2054 global_max_coords = user->bbox.max_coords;
2055 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
2056
2057 if (rank == 0) {
2058 // If global_domain_bbox is not pre-populated in UserCtx, compute it here from bboxlist_on_rank0
2059 // This assumes bboxlist_on_rank0 is valid and contains all local bounding boxes on rank 0.
2060 if (bboxlist_on_rank0 && num_mpi_procs > 0) {
2061 global_min_coords = bboxlist_on_rank0[0].min_coords;
2062 global_max_coords = bboxlist_on_rank0[0].max_coords;
2063 for (PetscMPIInt p = 1; p < num_mpi_procs; ++p) {
2064 global_min_coords.x = PetscMin(global_min_coords.x, bboxlist_on_rank0[p].min_coords.x);
2065 global_min_coords.y = PetscMin(global_min_coords.y, bboxlist_on_rank0[p].min_coords.y);
2066 global_min_coords.z = PetscMin(global_min_coords.z, bboxlist_on_rank0[p].min_coords.z);
2067 global_max_coords.x = PetscMax(global_max_coords.x, bboxlist_on_rank0[p].max_coords.x);
2068 global_max_coords.y = PetscMax(global_max_coords.y, bboxlist_on_rank0[p].max_coords.y);
2069 global_max_coords.z = PetscMax(global_max_coords.z, bboxlist_on_rank0[p].max_coords.z);
2070 }
2071 // Optionally store this in user->global_domain_bbox if it's useful elsewhere
2072 // user->global_domain_bbox.min_coords = global_min_coords;
2073 // user->global_domain_bbox.max_coords = global_max_coords;
2074 } else {
2075 // Fallback or warning if bboxlist is not available for global calculation
2076 LOG_ALLOW(LOCAL, LOG_WARNING, "(Rank 0) - bboxlist not provided or num_mpi_procs <=0; using user->bbox for domain bounds.\n");
2077 // global_min_coords = user->bbox.min_coords; // Use local bbox of rank 0 as fallback
2078 // global_max_coords = user->bbox.max_coords;
2079 }
2080
2081 ierr = PetscPrintf(PETSC_COMM_SELF, "\n"); CHKERRQ(ierr);
2082 ierr = PetscPrintf(PETSC_COMM_SELF, "=============================================================\n"); CHKERRQ(ierr);
2083 ierr = PetscPrintf(PETSC_COMM_SELF, " CASE SUMMARY \n"); CHKERRQ(ierr);
2084 ierr = PetscPrintf(PETSC_COMM_SELF, "=============================================================\n"); CHKERRQ(ierr);
2085 ierr = PetscPrintf(PETSC_COMM_SELF, " Grid Points : %d X %d X %d\n", user->IM, user->JM, user->KM); CHKERRQ(ierr);
2086 ierr = PetscPrintf(PETSC_COMM_SELF, " Cells : %d X %d X %d\n", user->IM - 1, user->JM - 1, user->KM - 1); CHKERRQ(ierr);
2087 ierr = PetscPrintf(PETSC_COMM_SELF, " Global Domain Bounds (X) : %.6f to %.6f\n", (double)global_min_coords.x, (double)global_max_coords.x); CHKERRQ(ierr);
2088 ierr = PetscPrintf(PETSC_COMM_SELF, " Global Domain Bounds (Y) : %.6f to %.6f\n", (double)global_min_coords.y, (double)global_max_coords.y); CHKERRQ(ierr);
2089 ierr = PetscPrintf(PETSC_COMM_SELF, " Global Domain Bounds (Z) : %.6f to %.6f\n", (double)global_min_coords.z, (double)global_max_coords.z); CHKERRQ(ierr);
2090 if(strcmp(simCtx->eulerianSource,"load")==0 || strcmp(simCtx->eulerianSource,"solve")==0){
2091 ierr = PetscPrintf(PETSC_COMM_SELF, "-------------------- Boundary Conditions --------------------\n"); CHKERRQ(ierr);
2092 const int face_name_width = 17; // Adjusted for longer names (Zeta,Eta,Xi)
2093 for (PetscInt i_face = 0; i_face < 6; ++i_face) {
2094 BCFace current_face = (BCFace)i_face;
2095 // The BCFaceToString will now return the Xi, Eta, Zeta versions
2096 const char* face_str = BCFaceToString(current_face);
2097 const char* bc_type_str = BCTypeToString(user->boundary_faces[current_face].mathematical_type);
2098 const char* bc_handler_type_str = BCHandlerTypeToString(user->boundary_faces[current_face].handler_type);
2099 if(user->boundary_faces[current_face].mathematical_type == INLET){
2101 Cmpnts inlet_velocity = {0.0,0.0,0.0};
2102 PetscBool found;
2103 ierr = GetBCParamReal(user->boundary_faces[current_face].params,"vx",&inlet_velocity.x,&found); CHKERRQ(ierr);
2104 ierr = GetBCParamReal(user->boundary_faces[current_face].params,"vy",&inlet_velocity.y,&found); CHKERRQ(ierr);
2105 ierr = GetBCParamReal(user->boundary_faces[current_face].params,"vz",&inlet_velocity.z,&found); CHKERRQ(ierr);
2106 ierr = PetscPrintf(PETSC_COMM_SELF, " Face %-*s : %s - %s - [%.4f,%.4f,%.4f]\n",
2107 face_name_width, face_str, bc_type_str, bc_handler_type_str,inlet_velocity.x,inlet_velocity.y,inlet_velocity.z); CHKERRQ(ierr);
2108 } else if(user->boundary_faces[current_face].handler_type == BC_HANDLER_INLET_PARABOLIC){
2109 PetscReal v_max = 0.0;
2110 PetscBool found;
2111 ierr = GetBCParamReal(user->boundary_faces[current_face].params,"v_max",&v_max,&found); CHKERRQ(ierr);
2112 ierr = PetscPrintf(PETSC_COMM_SELF, " Face %-*s : %s - %s - v_max=%.4f\n",
2113 face_name_width, face_str, bc_type_str, bc_handler_type_str, v_max); CHKERRQ(ierr);
2114 }
2116 PetscReal flux;
2117 PetscBool trimflag,foundflux,foundtrimflag;
2118 ierr = GetBCParamReal(user->boundary_faces[current_face].params,"target_flux",&flux,&foundflux); CHKERRQ(ierr);
2119 ierr = GetBCParamBool(user->boundary_faces[current_face].params,"apply_trim",&trimflag,&foundtrimflag); CHKERRQ(ierr);
2120 ierr = PetscPrintf(PETSC_COMM_SELF, " Face %-*s : %s - %s - [%.4f] - %s\n",
2121 face_name_width, face_str, bc_type_str, bc_handler_type_str,flux,trimflag?"Trim Ucont":"No Trim"); CHKERRQ(ierr);
2122 } else{
2123 ierr = PetscPrintf(PETSC_COMM_SELF, " Face %-*s : %s - %s\n",
2124 face_name_width, face_str, bc_type_str,bc_handler_type_str); CHKERRQ(ierr);
2125 }
2126 }
2127 }
2128 ierr = PetscPrintf(PETSC_COMM_SELF, "-------------------------------------------------------------\n"); CHKERRQ(ierr);
2129 ierr = PetscPrintf(PETSC_COMM_SELF, " Run Mode : %s\n", simCtx->OnlySetup ? "SETUP ONLY" : "Full Simulation"); CHKERRQ(ierr);
2130 ierr = PetscPrintf(PETSC_COMM_SELF, " Start Time : %.4f\n", (double)StartTime); CHKERRQ(ierr);
2131 ierr = PetscPrintf(PETSC_COMM_SELF, " Timestep Size : %.4f\n", (double)simCtx->dt); CHKERRQ(ierr);
2132 ierr = PetscPrintf(PETSC_COMM_SELF, " Starting Step : %d\n", StartStep); CHKERRQ(ierr);
2133 ierr = PetscPrintf(PETSC_COMM_SELF, " Total Steps to Run : %d\n", StepsToRun); CHKERRQ(ierr);
2134 ierr = PetscPrintf(PETSC_COMM_SELF, " Ending Step : %d\n", StartStep + StepsToRun); CHKERRQ(ierr);
2135 if (simCtx->tiout > 0) {
2136 ierr = PetscPrintf(PETSC_COMM_SELF, " Field/Restart Cadence : every %d step(s)\n", simCtx->tiout); CHKERRQ(ierr);
2137 } else {
2138 ierr = PetscPrintf(PETSC_COMM_SELF, " Field/Restart Cadence : DISABLED\n"); CHKERRQ(ierr);
2139 }
2140 ierr = PetscPrintf(PETSC_COMM_SELF, " Immersed Boundary : %s\n", simCtx->immersed ? "ENABLED" : "DISABLED"); CHKERRQ(ierr);
2141 if (simCtx->walltimeGuardEnabled) {
2142 ierr = PetscPrintf(
2143 PETSC_COMM_SELF,
2144 " Runtime Walltime Guard : %s (warmup=%d, multiplier=%.2f, min=%.1f s, alpha=%.2f)\n",
2145 simCtx->walltimeGuardActive ? "ENABLED" : "CONFIGURED BUT INACTIVE",
2147 (double)simCtx->walltimeGuardMultiplier,
2148 (double)simCtx->walltimeGuardMinSeconds,
2149 (double)simCtx->walltimeGuardEstimatorAlpha
2150 ); CHKERRQ(ierr);
2151 } else {
2152 ierr = PetscPrintf(PETSC_COMM_SELF, " Runtime Walltime Guard : DISABLED\n"); CHKERRQ(ierr);
2153 }
2154 ierr = PetscPrintf(PETSC_COMM_SELF, " Number of MPI Processes : %d\n", num_mpi_procs); CHKERRQ(ierr);
2155 ierr = PetscPrintf(PETSC_COMM_WORLD," Number of Particles : %d\n", total_num_particles); CHKERRQ(ierr);
2156 if (simCtx->np > 0) {
2157 const char *particle_init_str = ParticleInitializationToString(simCtx->ParticleInitialization);
2158
2159 if (simCtx->particleConsoleOutputFreq > 0) {
2160 ierr = PetscPrintf(PETSC_COMM_SELF, " Particle Console Cadence : every %d step(s)\n", simCtx->particleConsoleOutputFreq); CHKERRQ(ierr);
2161 } else {
2162 ierr = PetscPrintf(PETSC_COMM_SELF, " Particle Console Cadence : DISABLED\n"); CHKERRQ(ierr);
2163 }
2164 ierr = PetscPrintf(PETSC_COMM_SELF, " Particle Log Row Sampling : every %d particle(s)\n", simCtx->LoggingFrequency); CHKERRQ(ierr);
2165 if (simCtx->StartStep > 0) {
2166 ierr = PetscPrintf(PETSC_COMM_SELF, " Particle Restart Mode : %s\n", simCtx->particleRestartMode); CHKERRQ(ierr);
2167 }
2168 ierr = PetscPrintf(PETSC_COMM_SELF, " Particle Initialization Mode: %s\n", particle_init_str); CHKERRQ(ierr);
2169 ierr = PetscPrintf(PETSC_COMM_SELF, " Interpolation Method : %s\n",
2170 simCtx->interpolationMethod == INTERP_TRILINEAR ? "Trilinear (direct cell-center)" : "CornerAveraged (legacy)"); CHKERRQ(ierr);
2173 if (user->inletFaceDefined) {
2174 ierr = PetscPrintf(PETSC_COMM_SELF, " Particles Initialized At : %s (Enum Val: %d)\n", BCFaceToString(user->identifiedInletBCFace), user->identifiedInletBCFace); CHKERRQ(ierr);
2175 } else {
2176 ierr = PetscPrintf(PETSC_COMM_SELF, " Particles Initialized At : --- (No INLET face identified)\n"); CHKERRQ(ierr);
2177 }
2178 }
2179 }
2180 if(strcmp(simCtx->eulerianSource,"solve")==0 || strcmp(simCtx->eulerianSource,"load")==0){
2181 const char* field_init_str = FieldInitializationToString(simCtx->FieldInitialization);
2182 ierr = PetscPrintf(PETSC_COMM_WORLD," Reynolds Number : %le\n", simCtx->ren); CHKERRQ(ierr);
2183 //ierr = PetscPrintf(PETSC_COMM_WORLD," Von-Neumann Number : %le\n", simCtx->vnn); CHKERRQ(ierr);
2184 if(strcmp(simCtx->eulerianSource,"solve")==0){
2185 //ierr = PetscPrintf(PETSC_COMM_WORLD," Stanton Number : %le\n", simCtx->st); CHKERRQ(ierr);
2186 ierr = PetscPrintf(PETSC_COMM_WORLD," Momentum Equation Solver : %s\n", MomentumSolverTypeToString(simCtx->mom_solver_type)); CHKERRQ(ierr);
2187 ierr = PetscPrintf(PETSC_COMM_WORLD," Initial Pseudo-CFL : %le\n", simCtx->pseudo_cfl); CHKERRQ(ierr);
2188 ierr = PetscPrintf(PETSC_COMM_WORLD," Large Eddy Simulation Model : %s\n", LESModelToString(simCtx->les)); CHKERRQ(ierr);
2189 }
2190 ierr = PetscPrintf(PETSC_COMM_SELF, " Field Initialization Mode : %s\n", field_init_str); CHKERRQ(ierr);
2191 if (simCtx->FieldInitialization == 1) {
2192 ierr = PetscPrintf(PETSC_COMM_SELF, " Constant Velocity : x - %.4f, y - %.4f, z - %.4f \n", (double)simCtx->InitialConstantContra.x,(double)simCtx->InitialConstantContra.y,(double)simCtx->InitialConstantContra.z ); CHKERRQ(ierr);
2193 }
2194 } else if(strcmp(simCtx->eulerianSource,"analytical")==0){
2195 ierr = PetscPrintf(PETSC_COMM_WORLD," Analytical Solution Type : %s\n", simCtx->AnalyticalSolutionType); CHKERRQ(ierr);
2196 }
2197 ierr = PetscPrintf(PETSC_COMM_SELF, "=============================================================\n"); CHKERRQ(ierr);
2198 ierr = PetscPrintf(PETSC_COMM_SELF, "\n"); CHKERRQ(ierr);
2199 }
2200 PetscFunctionReturn(0);
2201}
2202
2203#undef __FUNCT__
2204#define __FUNCT__ "ParsePostProcessingSettings"
2205/**
2206 * @brief Internal helper implementation: `ParsePostProcessingSettings()`.
2207 * @details Local to this translation unit.
2208 */
2210{
2211 FILE *file;
2212 char line[1024];
2213 PetscBool startTimeSet, endTimeSet, timeStepSet;
2214
2215 PetscFunctionBeginUser;
2217
2218 if (!simCtx || !simCtx->pps) {
2219 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_NULL, "SimCtx or its pps member is NULL in ParsePostProcessingSettings.");
2220 }
2221
2222 char *configFile = simCtx->PostprocessingControlFile;
2223 PostProcessParams *pps = simCtx->pps;
2224
2225
2226 // --- 1. Set Sane Defaults First ---
2227 pps->startTime = 0;
2228 pps->endTime = 0;
2229 pps->timeStep = 1;
2230 pps->outputParticles = PETSC_FALSE;
2231 pps->particle_output_freq = simCtx->LoggingFrequency; // Default to logging frequency;
2232 strcpy(pps->process_pipeline, "");
2233 strcpy(pps->output_fields_instantaneous, "Ucat,P");
2234 strcpy(pps->output_fields_averaged, "");
2235 strcpy(pps->output_prefix, "Field");
2236 strcpy(pps->particle_output_prefix,"Particle");
2237 strcpy(pps->particle_fields,"velocity,CellID,weight,pid");
2238 strcpy(pps->particle_pipeline,"");
2239 strncpy(pps->statistics_pipeline, "", MAX_PIPELINE_LENGTH - 1);
2240 strncpy(pps->statistics_output_prefix, "Stats", MAX_FILENAME_LENGTH - 1);
2241 strcpy(pps->particleExt,"dat"); // The input file format for particles.
2242 strcpy(pps->eulerianExt,"dat"); // The input file format for Eulerian fields.
2243 pps->reference[0] = pps->reference[1] = pps->reference[2] = 1;
2244 strncpy(pps->source_dir, simCtx->output_dir, sizeof(pps->source_dir) - 1);
2245 pps->source_dir[sizeof(pps->source_dir) - 1] = '\0'; // Ensure null-termination
2246
2247 // --- 2. Parse the Configuration File (overrides defaults) ---
2248 file = fopen(configFile, "r");
2249 if (file) {
2250 LOG_ALLOW(GLOBAL, LOG_INFO, "Parsing post-processing config file: %s\n", configFile);
2251 while (fgets(line, sizeof(line), file)) {
2252 char *key, *value, *comment;
2253 comment = strchr(line, '#'); if (comment) *comment = '\0';
2254 TrimWhitespace(line); if (strlen(line) == 0) continue;
2255 key = strtok(line, "="); value = strtok(NULL, "=");
2256 if (key && value) {
2257 TrimWhitespace(key); TrimWhitespace(value);
2258 if (strcmp(key, "startTime") == 0) pps->startTime = atoi(value);
2259 else if (strcmp(key, "endTime") == 0) pps->endTime = atoi(value);
2260 else if (strcmp(key, "timeStep") == 0) pps->timeStep = atoi(value);
2261 else if (strcmp(key, "output_particles") == 0) {
2262 if (strcasecmp(value, "true") == 0) pps->outputParticles = PETSC_TRUE;
2263 }
2264 else if (strcasecmp(key, "process_pipeline") == 0) {
2265 strncpy(pps->process_pipeline, value, MAX_PIPELINE_LENGTH - 1);
2266 pps->process_pipeline[MAX_PIPELINE_LENGTH - 1] = '\0'; // Ensure null-termination
2267 } else if (strcasecmp(key, "output_fields_instantaneous") == 0) {
2268 strncpy(pps->output_fields_instantaneous, value, MAX_FIELD_LIST_LENGTH - 1);
2270 } else if (strcasecmp(key, "output_fields_averaged") == 0) {
2271 strncpy(pps->output_fields_averaged, value, MAX_FIELD_LIST_LENGTH - 1);
2273 } else if (strcasecmp(key, "output_prefix") == 0) {
2274 strncpy(pps->output_prefix, value, MAX_FILENAME_LENGTH - 1);
2275 pps->output_prefix[MAX_FILENAME_LENGTH - 1] = '\0';
2276 } else if (strcasecmp(key, "particle_output_prefix") == 0) {
2277 strncpy(pps->particle_output_prefix, value, MAX_FILENAME_LENGTH - 1);
2279 } else if (strcasecmp(key, "particle_fields_instantaneous") == 0) {
2280 strncpy(pps->particle_fields, value, MAX_FIELD_LIST_LENGTH - 1);
2281 pps->particle_fields[MAX_FIELD_LIST_LENGTH - 1] = '\0';
2282 } else if (strcasecmp(key, "particle_pipeline") == 0) {
2283 strncpy(pps->particle_pipeline, value, MAX_PIPELINE_LENGTH - 1);
2284 pps->particle_pipeline[MAX_PIPELINE_LENGTH - 1] = '\0';
2285 } else if (strcasecmp(key, "particle_output_freq") == 0) {
2286 pps->particle_output_freq = atoi(value);
2287 } else if (strcasecmp(key, "statistics_pipeline") == 0) {
2288 strncpy(pps->statistics_pipeline, value, MAX_PIPELINE_LENGTH - 1);
2290 } else if (strcasecmp(key, "statistics_output_prefix") == 0) {
2291 strncpy(pps->statistics_output_prefix, value, MAX_FILENAME_LENGTH - 1);
2293 } else if (strcasecmp(key, "particleExt") == 0) {
2294 strncpy(pps->particleExt, value, sizeof(pps->particleExt) - 1);
2295 pps->particleExt[sizeof(pps->particleExt) - 1] = '\0';
2296 } else if (strcasecmp(key, "eulerianExt") == 0) {
2297 strncpy(pps->eulerianExt, value, sizeof(pps->eulerianExt) - 1);
2298 pps->eulerianExt[sizeof(pps->eulerianExt) - 1] = '\0';
2299 } else if (strcmp(key, "reference_ip") == 0) {pps->reference[0] = atoi(value);
2300 } else if (strcmp(key, "reference_jp") == 0) {pps->reference[1] = atoi(value);
2301 } else if (strcmp(key, "reference_kp") == 0) {pps->reference[2] = atoi(value);
2302 } else if (strcasecmp(key, "source_directory") == 0) {
2303 strncpy(pps->source_dir, value, sizeof(pps->source_dir) - 1);
2304 pps->source_dir[sizeof(pps->source_dir) - 1] = '\0';
2305 } else {
2306 LOG_ALLOW(GLOBAL, LOG_WARNING, "Unknown key '%s' in post-processing config file. Ignoring.\n", key);
2307 }
2308 // Add parsing for pipeline, fields, etc. in later phases
2309 }
2310 }
2311 fclose(file);
2312 } else {
2313 LOG_ALLOW(GLOBAL, LOG_WARNING, "Could not open post-processing config file '%s'. Using defaults and command-line overrides.\n", configFile);
2314 }
2315
2316 // --- 3. Parse Command-Line Options (overrides file settings and defaults) ---
2317 PetscOptionsGetInt(NULL, NULL, "-startTime", &pps->startTime, &startTimeSet);
2318 PetscOptionsGetInt(NULL, NULL, "-endTime", &pps->endTime, &endTimeSet);
2319 PetscOptionsGetInt(NULL, NULL, "-timeStep", &pps->timeStep, &timeStepSet);
2320 PetscOptionsGetBool(NULL, NULL, "-output_particles", &pps->outputParticles, NULL);
2321
2322 if(pps->endTime==-1){
2323 pps->endTime = simCtx->StartStep + simCtx->StepsToRun; // Total steps if endTime is set to -1.
2324 }
2325
2326 // If only startTime is given on command line, run for a single step
2327 if (startTimeSet && !endTimeSet) {
2328 pps->endTime = pps->startTime;
2329 }
2330
2331 LOG_ALLOW(GLOBAL, LOG_INFO, "Post-processing configured to run from t=%d to t=%d with step %d. Particle output: %s.\n",
2332 pps->startTime, pps->endTime, pps->timeStep, pps->outputParticles ? "TRUE" : "FALSE");
2333
2334 LOG_ALLOW(GLOBAL, LOG_INFO, "Process Pipeline: %s\n", pps->process_pipeline);
2335 LOG_ALLOW(GLOBAL, LOG_INFO, "Instantaneous Output Fields: %s\n", pps->output_fields_instantaneous);
2336 LOG_ALLOW(GLOBAL, LOG_INFO, "Output Prefix: %s\n", pps->output_prefix);
2337 LOG_ALLOW(GLOBAL, LOG_INFO, "Particle Output Prefix: %s\n", pps->particle_output_prefix);
2338 LOG_ALLOW(GLOBAL, LOG_INFO, "Particle Fields: %s\n", pps->particle_fields);
2339 LOG_ALLOW(GLOBAL, LOG_INFO, "Particle Pipeline: %s\n", pps->particle_pipeline);
2340 LOG_ALLOW(GLOBAL, LOG_INFO, "Particle Output Frequency: %d\n", pps->particle_output_freq);
2341 LOG_ALLOW(GLOBAL, LOG_INFO, "Averaged Output Fields (reserved): %s\n", pps->output_fields_averaged);
2342 LOG_ALLOW(GLOBAL, LOG_INFO, "Post input extensions: Eulerian='.%s', Particle='.%s'\n", pps->eulerianExt, pps->particleExt);
2343
2345 PetscFunctionReturn(0);
2346}
2347
2348
2349#undef __FUNCT__
2350#define __FUNCT__ "ParseScalingInformation"
2351/**
2352 * @brief Implementation of \ref ParseScalingInformation().
2353 * @details Full API contract (arguments, ownership, side effects) is documented with
2354 * the header declaration in `include/io.h`.
2355 * @see ParseScalingInformation()
2356 */
2357PetscErrorCode ParseScalingInformation(SimCtx *simCtx)
2358{
2359 PetscErrorCode ierr;
2360 PetscBool flg;
2361
2362 PetscFunctionBeginUser;
2364
2365 if (!simCtx) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "SimCtx is NULL in ParseScalingInformation");
2366
2367 // --- 1. Set default values to 1.0 ---
2368 // This represents a purely non-dimensional run if no scaling is provided.
2369 simCtx->scaling.L_ref = 1.0;
2370 simCtx->scaling.U_ref = 1.0;
2371 simCtx->scaling.rho_ref = 1.0;
2372
2373 // --- 2. Read overrides from the command line / control file ---
2374 ierr = PetscOptionsGetReal(NULL, NULL, "-scaling_L_ref", &simCtx->scaling.L_ref, &flg); CHKERRQ(ierr);
2375 ierr = PetscOptionsGetReal(NULL, NULL, "-scaling_U_ref", &simCtx->scaling.U_ref, &flg); CHKERRQ(ierr);
2376 ierr = PetscOptionsGetReal(NULL, NULL, "-scaling_rho_ref", &simCtx->scaling.rho_ref, &flg); CHKERRQ(ierr);
2377
2378 // --- 3. Calculate derived scaling factors ---
2379 // Check for division by zero to be safe, though U_ref should be positive.
2380 if (simCtx->scaling.U_ref <= 0.0) {
2381 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_OUTOFRANGE, "Reference velocity U_ref must be positive. Got %g", (double)simCtx->scaling.U_ref);
2382 }
2383 simCtx->scaling.P_ref = simCtx->scaling.rho_ref * simCtx->scaling.U_ref * simCtx->scaling.U_ref;
2384
2385 // --- 4. Log the final, effective scales for verification ---
2386 LOG(GLOBAL, LOG_INFO, "---------------- Physical Scales Initialized -----------------\n");
2387 LOG(GLOBAL, LOG_INFO, " L_ref: %.4f, U_ref: %.4f, rho_ref: %.4f, P_ref: %.4f\n",
2388 simCtx->scaling.L_ref, simCtx->scaling.U_ref, simCtx->scaling.rho_ref, simCtx->scaling.P_ref);
2389 LOG(GLOBAL, LOG_INFO, "--------------------------------------------------------------\n");
2390
2392 PetscFunctionReturn(0);
2393}
2394
2395/**
2396 * @brief Implementation of \ref ReadDataFileToArray().
2397 * @details Full API contract (arguments, ownership, side effects) is documented with
2398 * the header declaration in `include/io.h`.
2399 * @see ReadDataFileToArray()
2400 */
2401PetscInt ReadDataFileToArray(const char *filename,
2402 double **data_out,
2403 PetscInt *Nout,
2404 MPI_Comm comm)
2405{
2406 /* STEP 0: Prepare local variables & log function entry */
2407 PetscMPIInt rank, size;
2408 PetscErrorCode ierr;
2409 FILE *fp = NULL;
2410 PetscInt N = 0; /* number of lines/values read on rank 0 */
2411 double *array = NULL; /* pointer to local array on each rank */
2412 PetscInt fileExistsFlag = 0; /* 0 = doesn't exist, 1 = does exist */
2413
2415 "Start reading from file: %s\n",
2416 filename);
2417
2418 /* Basic error checking: data_out, Nout must be non-null. */
2419 if (!filename || !data_out || !Nout) {
2421 "Null pointer argument provided.\n");
2422 return 1;
2423 }
2424
2425 /* Determine rank/size for coordinating I/O. */
2426 MPI_Comm_rank(comm, &rank);
2427 MPI_Comm_size(comm, &size);
2428
2429 /* STEP 1: On rank 0, check if file can be opened. */
2430 if (!rank) {
2431 fp = fopen(filename, "r");
2432 if (fp) {
2433 fileExistsFlag = 1;
2434 fclose(fp);
2435 }
2436 }
2437
2438 /* STEP 2: Broadcast file existence to all ranks. */
2439 // In ReadDataFileToArray:
2440 ierr = MPI_Bcast(&fileExistsFlag, 1, MPI_INT, 0, comm); CHKERRQ(ierr);
2441
2442 if (!fileExistsFlag) {
2443 /* If file does not exist, log & return. */
2444 if (!rank) {
2446 "File '%s' not found.\n",
2447 filename);
2448 }
2449 return 2;
2450 }
2451
2452 /* STEP 3: Rank 0 re-opens and reads the file, counting lines, etc. */
2453 if (!rank) {
2454 fp = fopen(filename, "r");
2455 if (!fp) {
2457 "File '%s' could not be opened for reading.\n",
2458 filename);
2459 return 3;
2460 }
2461
2462 /* (3a) Count lines first. */
2463 {
2464 char line[256];
2465 while (fgets(line, sizeof(line), fp)) {
2466 N++;
2467 }
2468 }
2469
2471 "File '%s' has %d lines.\n",
2472 filename, N);
2473
2474 /* (3b) Allocate array on rank 0. */
2475 array = (double*)malloc(N * sizeof(double));
2476 if (!array) {
2477 fclose(fp);
2479 "malloc failed for array.\n");
2480 return 4;
2481 }
2482
2483 /* (3c) Rewind & read values into array. */
2484 rewind(fp);
2485 {
2486 PetscInt i = 0;
2487 char line[256];
2488 while (fgets(line, sizeof(line), fp)) {
2489 double val;
2490 if (sscanf(line, "%lf", &val) == 1) {
2491 array[i++] = val;
2492 }
2493 }
2494 }
2495 fclose(fp);
2496
2498 "Successfully read %d values from '%s'.\n",
2499 N, filename);
2500 }
2501
2502 /* STEP 4: Broadcast the integer N to all ranks. */
2503 ierr = MPI_Bcast(&N, 1, MPI_INT, 0, comm); CHKERRQ(ierr);
2504
2505 /* STEP 5: Each rank allocates an array to receive the broadcast if rank>0. */
2506 if (rank) {
2507 array = (double*)malloc(N * sizeof(double));
2508 if (!array) {
2510 "malloc failed on rank %d.\n",
2511 rank);
2512 return 5;
2513 }
2514 }
2515
2516 /* STEP 6: Broadcast the actual data from rank 0 to all. */
2517 ierr = MPI_Bcast(array, N, MPI_DOUBLE, 0, comm); CHKERRQ(ierr);
2518
2519 /* STEP 7: Assign outputs on all ranks. */
2520 *data_out = array;
2521 *Nout = N;
2522
2524 "Done. Provided array of length=%d to all ranks.\n",
2525 N);
2526 return 0; /* success */
2527}
2528
2529/**
2530 * @brief Internal helper implementation: `ReadPositionsFromFile()`.
2531 * @details Local to this translation unit.
2532 */
2533PetscErrorCode ReadPositionsFromFile(PetscInt timeIndex,
2534 UserCtx *user,
2535 double **coordsArray,
2536 PetscInt *Ncoords)
2537{
2538 PetscFunctionBeginUser;
2539
2540 PetscErrorCode ierr;
2541 Vec coordsVec;
2542
2543 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Creating coords Vec.\n");
2544 ierr = VecCreate(PETSC_COMM_WORLD, &coordsVec);CHKERRQ(ierr);
2545 ierr = VecSetFromOptions(coordsVec);CHKERRQ(ierr);
2546
2547 // For example: "position" is the name of the coordinate data
2548 ierr = ReadFieldData(user, "position", coordsVec, timeIndex, "dat");
2549 if (ierr) {
2551 "Error reading position data (ti=%d).\n",
2552 timeIndex);
2553 PetscFunctionReturn(ierr);
2554 }
2555
2556 LOG_ALLOW(GLOBAL, LOG_DEBUG, "ReadPositions - Gathering coords Vec to rank 0.\n");
2557 ierr = VecToArrayOnRank0(coordsVec, Ncoords, coordsArray);CHKERRQ(ierr);
2558
2559 ierr = VecDestroy(&coordsVec);CHKERRQ(ierr);
2560
2562 "Successfully gathered coordinates. Ncoords=%d.\n", *Ncoords);
2563 PetscFunctionReturn(0);
2564}
2565
2566
2567/**
2568 * @brief Internal helper implementation: `ReadFieldDataToRank0()`.
2569 * @details Local to this translation unit.
2570 */
2571PetscErrorCode ReadFieldDataToRank0(PetscInt timeIndex,
2572 const char *fieldName,
2573 UserCtx *user,
2574 double **scalarArray,
2575 PetscInt *Nscalars)
2576{
2577 PetscFunctionBeginUser;
2578
2579 PetscErrorCode ierr;
2580 Vec fieldVec;
2581
2582 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Creating field Vec.\n");
2583 ierr = VecCreate(PETSC_COMM_WORLD, &fieldVec);CHKERRQ(ierr);
2584 ierr = VecSetFromOptions(fieldVec);CHKERRQ(ierr);
2585
2586 ierr = ReadFieldData(user, fieldName, fieldVec, timeIndex, "dat");
2587 if (ierr) {
2589 "Error reading field '%s' (ti=%d).\n",
2590 fieldName, timeIndex);
2591 PetscFunctionReturn(ierr);
2592 }
2593
2594 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Gathering field Vec to rank 0.\n");
2595 ierr = VecToArrayOnRank0(fieldVec, Nscalars, scalarArray);CHKERRQ(ierr);
2596
2597 ierr = VecDestroy(&fieldVec);CHKERRQ(ierr);
2598
2600 "Successfully gathered field '%s'. Nscalars=%d.\n",
2601 fieldName, *Nscalars);
2602 PetscFunctionReturn(0);
2603}
PetscErrorCode ReadStatisticalFields(UserCtx *user, PetscInt ti)
Implementation of ReadStatisticalFields().
Definition io.c:1205
PetscErrorCode ParsePostProcessingSettings(SimCtx *simCtx)
Internal helper implementation: ParsePostProcessingSettings().
Definition io.c:2209
PetscErrorCode WriteAllSwarmFields(UserCtx *user)
Internal helper implementation: WriteAllSwarmFields().
Definition io.c:1815
PetscErrorCode SwarmFieldToArrayOnRank0(DM swarm, const char *field_name, PetscInt *n_total_particles, PetscInt *n_components, void **gathered_array)
Internal helper implementation: SwarmFieldToArrayOnRank0().
Definition io.c:1953
PetscErrorCode ParseScalingInformation(SimCtx *simCtx)
Implementation of ParseScalingInformation().
Definition io.c:2357
PetscInt ReadDataFileToArray(const char *filename, double **data_out, PetscInt *Nout, MPI_Comm comm)
Implementation of ReadDataFileToArray().
Definition io.c:2401
PetscErrorCode ReadGridFile(UserCtx *user)
Internal helper implementation: ReadGridFile().
Definition io.c:219
PetscErrorCode StringToBCHandlerType(const char *str, BCHandlerType *handler_out)
Internal helper implementation: StringToBCHandlerType().
Definition io.c:347
PetscErrorCode ReadRANSFields(UserCtx *user, PetscInt ti)
Internal helper implementation: ReadRANSFields().
Definition io.c:1257
PetscErrorCode GetBCParamReal(BC_Param *params, const char *key, PetscReal *value_out, PetscBool *found)
Internal helper implementation: GetBCParamReal().
Definition io.c:387
PetscErrorCode ReadSwarmIntField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
Internal helper implementation: ReadSwarmIntField().
Definition io.c:1321
PetscErrorCode WriteSwarmField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
Implementation of WriteSwarmField().
Definition io.c:1700
static PetscInt * g_IMs_from_file
Caches the IM dimensions for all blocks read from the grid file.
Definition io.c:19
PetscErrorCode WriteStatisticalFields(UserCtx *user)
Implementation of WriteStatisticalFields().
Definition io.c:1627
PetscErrorCode ReadSimulationFields(UserCtx *user, PetscInt ti)
Internal helper implementation: ReadSimulationFields().
Definition io.c:1126
static PetscInt g_nblk_from_file
Stores the number of blocks read from the grid file.
Definition io.c:17
PetscBool ShouldWriteDataOutput(const SimCtx *simCtx, PetscInt completed_step)
Implementation of ShouldWriteDataOutput().
Definition io.c:70
PetscErrorCode WriteFieldData(UserCtx *user, const char *field_name, Vec field_vec, PetscInt ti, const char *ext)
Internal helper implementation: WriteFieldData().
Definition io.c:1448
PetscErrorCode ReadLESFields(UserCtx *user, PetscInt ti)
Internal helper implementation: ReadLESFields().
Definition io.c:1229
PetscErrorCode ReadFieldData(UserCtx *user, const char *field_name, Vec field_vec, PetscInt ti, const char *ext)
Internal helper implementation: ReadFieldData().
Definition io.c:898
PetscErrorCode ParseAllBoundaryConditions(UserCtx *user, const char *bcs_input_filename)
Internal helper implementation: ParseAllBoundaryConditions().
Definition io.c:447
PetscErrorCode ValidateBCHandlerForBCType(BCType type, BCHandlerType handler)
Internal helper implementation: ValidateBCHandlerForBCType().
Definition io.c:364
void TrimWhitespace(char *str)
Implementation of TrimWhitespace().
Definition io.c:38
PetscErrorCode WriteSimulationFields(UserCtx *user)
Implementation of WriteSimulationFields().
Definition io.c:1568
PetscErrorCode ReadAllSwarmFields(UserCtx *user, PetscInt ti)
Internal helper implementation: ReadAllSwarmFields().
Definition io.c:1383
static PetscBool g_file_has_been_read
A flag to ensure the grid file is read only once.
Definition io.c:25
PetscErrorCode ReadGridGenerationInputs(UserCtx *user)
Internal helper implementation: ReadGridGenerationInputs().
Definition io.c:85
PetscErrorCode ReadSwarmField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
Internal helper implementation: ReadSwarmField().
Definition io.c:1291
PetscErrorCode WriteRANSFields(UserCtx *user)
Implementation of WriteRANSFields().
Definition io.c:1679
PetscErrorCode PopulateFinestUserGridResolutionFromOptions(UserCtx *finest_users, PetscInt nblk)
Internal helper implementation: PopulateFinestUserGridResolutionFromOptions().
Definition io.c:170
PetscErrorCode DeterminePeriodicity(SimCtx *simCtx)
Internal helper implementation: DeterminePeriodicity().
Definition io.c:634
PetscErrorCode ReadFieldDataToRank0(PetscInt timeIndex, const char *fieldName, UserCtx *user, double **scalarArray, PetscInt *Nscalars)
Internal helper implementation: ReadFieldDataToRank0().
Definition io.c:2571
PetscErrorCode VecToArrayOnRank0(Vec inVec, PetscInt *N, double **arrayOut)
Internal helper implementation: VecToArrayOnRank0().
Definition io.c:1868
PetscErrorCode WriteSwarmIntField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
Internal helper implementation: WriteSwarmIntField().
Definition io.c:1755
PetscErrorCode WriteLESFields(UserCtx *user)
Internal helper implementation: WriteLESFields().
Definition io.c:1649
static PetscInt * g_KMs_from_file
Caches the KM dimensions for all blocks read from the grid file.
Definition io.c:23
static PetscInt * g_JMs_from_file
Caches the JM dimensions for all blocks read from the grid file.
Definition io.c:21
PetscErrorCode StringToBCFace(const char *str, BCFace *face_out)
Internal helper implementation: StringToBCFace().
Definition io.c:317
PetscErrorCode ReadPositionsFromFile(PetscInt timeIndex, UserCtx *user, double **coordsArray, PetscInt *Ncoords)
Internal helper implementation: ReadPositionsFromFile().
Definition io.c:2533
static PetscErrorCode ReadOptionalField(UserCtx *user, const char *field_name, const char *field_label, Vec field_vec, PetscInt ti, const char *ext)
Internal helper implementation: ReadOptionalField().
Definition io.c:822
PetscErrorCode VerifyPathExistence(const char *path, PetscBool is_dir, PetscBool is_optional, const char *description, PetscBool *exists)
Internal helper implementation: VerifyPathExistence().
Definition io.c:738
PetscErrorCode GetBCParamBool(BC_Param *params, const char *key, PetscBool *value_out, PetscBool *found)
Internal helper implementation: GetBCParamBool().
Definition io.c:408
void FreeBC_ParamList(BC_Param *head)
Implementation of FreeBC_ParamList().
Definition io.c:302
PetscErrorCode StringToBCType(const char *str, BCType *type_out)
Internal helper implementation: StringToBCType().
Definition io.c:332
PetscErrorCode DisplayBanner(SimCtx *simCtx)
Internal helper implementation: DisplayBanner().
Definition io.c:2031
static PetscErrorCode CheckDataFile(UserCtx *user, PetscInt ti, const char *fieldName, const char *ext, PetscBool *fileExists)
Internal helper implementation: CheckDataFile().
Definition io.c:782
static PetscErrorCode ReadOptionalSwarmField(UserCtx *user, const char *field_name, const char *field_label, PetscInt ti, const char *ext)
Internal helper implementation: ReadOptionalSwarmField().
Definition io.c:856
Public interface for data input/output routines.
const char * BCHandlerTypeToString(BCHandlerType handler_type)
Converts a BCHandlerType enum to its string representation.
Definition logging.c:771
const char * FieldInitializationToString(PetscInt FieldInitialization)
Helper function to convert FieldInitialization to a string representation.
Definition logging.c:687
#define LOG_ALLOW_SYNC(scope, level, fmt,...)
Synchronized logging macro that checks both the log level and whether the calling function is in the ...
Definition logging.h:252
#define LOCAL
Logging scope definitions for controlling message output.
Definition logging.h:44
#define GLOBAL
Scope for global logging across all processes.
Definition logging.h:45
const char * BCFaceToString(BCFace face)
Helper function to convert BCFace enum to a string representation.
Definition logging.c:669
#define LOG_ALLOW(scope, level, fmt,...)
Logging macro that checks both the log level and whether the calling function is in the allowed-funct...
Definition logging.h:199
#define PROFILE_FUNCTION_END
Marks the end of a profiled code block.
Definition logging.h:779
#define LOG(scope, level, fmt,...)
Logging macro for PETSc-based applications with scope control.
Definition logging.h:83
const char * BCTypeToString(BCType type)
Helper function to convert BCType enum to a string representation.
Definition logging.c:751
@ LOG_ERROR
Critical errors that may halt the program.
Definition logging.h:28
@ LOG_TRACE
Very fine-grained tracing information for in-depth debugging.
Definition logging.h:32
@ LOG_INFO
Informational messages about program execution.
Definition logging.h:30
@ LOG_WARNING
Non-critical issues that warrant attention.
Definition logging.h:29
@ LOG_DEBUG
Detailed debugging information.
Definition logging.h:31
#define PROFILE_FUNCTION_BEGIN
Marks the beginning of a profiled code block (typically a function).
Definition logging.h:770
const char * LESModelToString(LESModelType LESFlag)
Helper function to convert LES Flag to a string representation.
Definition logging.c:720
const char * MomentumSolverTypeToString(MomentumSolverType SolverFlag)
Helper function to convert Momentum Solver flag to a string representation.
Definition logging.c:736
const char * ParticleInitializationToString(ParticleInitializationType ParticleInitialization)
Helper function to convert ParticleInitialization to a string representation.
Definition logging.c:703
char statistics_output_prefix[256]
basename for CSV output, e.g.
Definition variables.h:575
BCType
Defines the general mathematical/physical Category of a boundary.
Definition variables.h:251
@ INLET
Definition variables.h:258
@ SYMMETRY
Definition variables.h:256
@ OUTLET
Definition variables.h:257
@ PERIODIC
Definition variables.h:260
@ WALL
Definition variables.h:254
UserCtx * user
Definition variables.h:528
PetscBool inletFaceDefined
Definition variables.h:830
char particle_output_prefix[256]
Definition variables.h:570
PetscMPIInt rank
Definition variables.h:646
PetscInt cgrid
Definition variables.h:826
BoundaryFaceConfig boundary_faces[6]
Definition variables.h:829
PetscInt block_number
Definition variables.h:712
BCFace identifiedInletBCFace
Definition variables.h:831
PetscBool walltimeGuardActive
Definition variables.h:782
char euler_subdir[PETSC_MAX_PATH_LEN]
Definition variables.h:666
SimCtx * simCtx
Back-pointer to the master simulation context.
Definition variables.h:814
PetscInt rans
Definition variables.h:732
@ PARTICLE_INIT_SURFACE_RANDOM
Random placement on the inlet face.
Definition variables.h:509
@ PARTICLE_INIT_SURFACE_EDGES
Deterministic placement at inlet face edges.
Definition variables.h:512
PetscReal StartTime
Definition variables.h:657
Vec K_Omega_o
Definition variables.h:865
char output_prefix[256]
Definition variables.h:567
char ** bcs_files
Definition variables.h:720
struct BC_Param_s * next
Definition variables.h:307
PetscReal Min_X
Definition variables.h:821
char * key
Definition variables.h:305
PetscInt KM
Definition variables.h:820
PetscInt tiout
Definition variables.h:655
UserMG usermg
Definition variables.h:764
#define MAX_FIELD_LIST_LENGTH
Definition variables.h:544
PetscReal walltimeGuardMinSeconds
Definition variables.h:785
PetscReal L_ref
Definition variables.h:625
Vec K_Omega
Definition variables.h:865
BCHandlerType
Defines the specific computational "strategy" for a boundary handler.
Definition variables.h:271
@ BC_HANDLER_PERIODIC_GEOMETRIC
Definition variables.h:284
@ BC_HANDLER_INLET_PARABOLIC
Definition variables.h:277
@ BC_HANDLER_INLET_CONSTANT_VELOCITY
Definition variables.h:276
@ BC_HANDLER_PERIODIC_DRIVEN_INITIAL_FLUX
Definition variables.h:287
@ BC_HANDLER_PERIODIC_DRIVEN_CONSTANT_FLUX
Definition variables.h:286
@ BC_HANDLER_WALL_MOVING
Definition variables.h:274
@ BC_HANDLER_WALL_NOSLIP
Definition variables.h:273
@ BC_HANDLER_OUTLET_CONSERVATION
Definition variables.h:282
char output_fields_averaged[1024]
Definition variables.h:566
PetscInt FieldInitialization
Definition variables.h:696
#define MAX_PIPELINE_LENGTH
Definition variables.h:543
PetscReal ren
Definition variables.h:691
BCHandlerType handler_type
Definition variables.h:337
Cmpnts max_coords
Maximum x, y, z coordinates of the bounding box.
Definition variables.h:156
PetscInt _this
Definition variables.h:824
PetscInt reference[3]
Definition variables.h:582
char output_dir[PETSC_MAX_PATH_LEN]
Definition variables.h:665
PetscReal dt
Definition variables.h:658
PetscReal ry
Definition variables.h:825
PetscInt StepsToRun
Definition variables.h:654
PetscInt k_periodic
Definition variables.h:713
PetscInt timeStep
Definition variables.h:560
PetscInt np
Definition variables.h:739
PetscReal Max_Y
Definition variables.h:821
PetscBool averaging
Definition variables.h:736
char * value
Definition variables.h:306
Vec lCs
Definition variables.h:865
#define MAX_FILENAME_LENGTH
Definition variables.h:545
Vec Ucont
Definition variables.h:837
PetscInt StartStep
Definition variables.h:653
Cmpnts min_coords
Minimum x, y, z coordinates of the bounding box.
Definition variables.h:155
PetscBool OnlySetup
Definition variables.h:659
PetscScalar x
Definition variables.h:101
Vec Ucat_square_sum
Definition variables.h:868
char * current_io_directory
Definition variables.h:670
Vec P_sum
Definition variables.h:868
char grid_file[PETSC_MAX_PATH_LEN]
Definition variables.h:717
char statistics_pipeline[1024]
e.g.
Definition variables.h:574
InterpolationMethod interpolationMethod
Definition variables.h:744
char output_fields_instantaneous[1024]
Definition variables.h:565
char particleRestartMode[16]
Definition variables.h:745
char eulerianExt[8]
Definition variables.h:578
char particle_subdir[PETSC_MAX_PATH_LEN]
Definition variables.h:667
BoundingBox * bboxlist
Definition variables.h:742
Vec lNu_t
Definition variables.h:865
Vec Nu_t
Definition variables.h:865
PetscReal rz
Definition variables.h:825
char source_dir[PETSC_MAX_PATH_LEN]
Definition variables.h:555
char particle_pipeline[1024]
Definition variables.h:568
BC_Param * params
Definition variables.h:338
char eulerianSource[PETSC_MAX_PATH_LEN]
Definition variables.h:663
PetscBool walltimeGuardEnabled
Definition variables.h:781
PetscInt walltimeGuardWarmupSteps
Definition variables.h:783
ParticleInitializationType ParticleInitialization
Definition variables.h:743
PetscScalar z
Definition variables.h:101
@ INTERP_TRILINEAR
Definition variables.h:522
Vec lK_Omega_o
Definition variables.h:865
Vec Ucat
Definition variables.h:837
Vec ParticleCount
Definition variables.h:882
ScalingCtx scaling
Definition variables.h:707
Vec lK_Omega
Definition variables.h:865
PetscInt JM
Definition variables.h:820
PetscInt mglevels
Definition variables.h:535
PetscReal Min_Z
Definition variables.h:821
Vec Ucat_sum
Definition variables.h:868
char process_pipeline[1024]
Definition variables.h:564
PetscInt particle_output_freq
Definition variables.h:571
char particle_fields[1024]
Definition variables.h:569
PetscBool outputParticles
Definition variables.h:561
char AnalyticalSolutionType[PETSC_MAX_PATH_LEN]
Definition variables.h:676
PetscReal Max_X
Definition variables.h:821
PetscInt particleConsoleOutputFreq
Definition variables.h:656
Cmpnts InitialConstantContra
Definition variables.h:697
PetscReal Min_Y
Definition variables.h:821
PetscInt i_periodic
Definition variables.h:713
PetscInt step
Definition variables.h:651
PostProcessParams * pps
Definition variables.h:798
PetscScalar y
Definition variables.h:101
PetscMPIInt size
Definition variables.h:647
@ EXEC_MODE_SOLVER
Definition variables.h:616
@ EXEC_MODE_POSTPROCESSOR
Definition variables.h:617
PetscInt IM
Definition variables.h:820
char _io_context_buffer[PETSC_MAX_PATH_LEN]
Definition variables.h:669
char particleExt[8]
Definition variables.h:579
Vec Ucat_cross_sum
Definition variables.h:868
PetscReal walltimeGuardEstimatorAlpha
Definition variables.h:786
PetscInt les
Definition variables.h:732
Vec Nvert
Definition variables.h:837
MGCtx * mgctx
Definition variables.h:538
BCType mathematical_type
Definition variables.h:336
PetscReal P_ref
Definition variables.h:628
PetscReal rx
Definition variables.h:825
ExecutionMode exec_mode
Definition variables.h:662
PetscInt startTime
Definition variables.h:558
PetscReal rho_ref
Definition variables.h:627
BoundingBox bbox
Definition variables.h:822
PetscReal walltimeGuardMultiplier
Definition variables.h:784
PetscReal Max_Z
Definition variables.h:821
PetscReal U_ref
Definition variables.h:626
MomentumSolverType mom_solver_type
Definition variables.h:683
PetscInt immersed
Definition variables.h:673
char PostprocessingControlFile[PETSC_MAX_PATH_LEN]
Definition variables.h:797
char restart_dir[PETSC_MAX_PATH_LEN]
Definition variables.h:664
PetscReal pseudo_cfl
Definition variables.h:691
PetscInt LoggingFrequency
Definition variables.h:769
Vec Psi
Definition variables.h:883
BCFace
Identifies the six logical faces of a structured computational block.
Definition variables.h:244
@ BC_FACE_NEG_X
Definition variables.h:245
@ BC_FACE_POS_Z
Definition variables.h:247
@ BC_FACE_POS_Y
Definition variables.h:246
@ BC_FACE_NEG_Z
Definition variables.h:247
@ BC_FACE_POS_X
Definition variables.h:245
@ BC_FACE_NEG_Y
Definition variables.h:246
PetscInt j_periodic
Definition variables.h:713
BoundaryCondition * handler
Definition variables.h:339
A node in a linked list for storing key-value parameters from the bcs.dat file.
Definition variables.h:304
Holds the complete configuration for one of the six boundary faces.
Definition variables.h:334
Defines a 3D axis-aligned bounding box.
Definition variables.h:154
A 3D point or vector with PetscScalar components.
Definition variables.h:100
Holds all configuration parameters for a post-processing run.
Definition variables.h:553
The master context for the entire simulation.
Definition variables.h:643
User-defined context containing data specific to a single computational grid level.
Definition variables.h:811