PICurv 0.1.0
A Parallel Particle-In-Cell Solver for Curvilinear LES
Loading...
Searching...
No Matches
io.c
Go to the documentation of this file.
1/**
2 * @file io.c
3 * @brief Implementation of data input/output routines, focusing on grid configuration.
4 *
5 * This module provides functions to parse grid geometry information, either from
6 * command-line options for programmatically generated grids or by reading the
7 * header of a grid definition file.
8 */
9
10#include "io.h"
11
12// =============================================================================
13// STATIC (PRIVATE) VARIABLES FOR ONE-TIME FILE READ
14// =============================================================================
15
16/** @brief Stores the number of blocks read from the grid file. */
17static PetscInt g_nblk_from_file = 0;
18/** @brief Caches the IM dimensions for all blocks read from the grid file. */
19static PetscInt* g_IMs_from_file = NULL;
20/** @brief Caches the JM dimensions for all blocks read from the grid file. */
21static PetscInt* g_JMs_from_file = NULL;
22/** @brief Caches the KM dimensions for all blocks read from the grid file. */
23static PetscInt* g_KMs_from_file = NULL;
24/** @brief A flag to ensure the grid file is read only once. */
25static PetscBool g_file_has_been_read = PETSC_FALSE;
26
27
28// =============================================================================
29// PUBLIC FUNCTION IMPLEMENTATIONS
30// =============================================================================
31
32/**
33 * @brief Trims leading and trailing whitespace from a string in-place.
34 * @param str The string to be trimmed.
35 */
36void TrimWhitespace(char *str) {
37 if (!str) return;
38
39 char *start = str;
40 // Find the first non-whitespace character
41 while (isspace((unsigned char)*start)) {
42 start++;
43 }
44
45 // Find the end of the string
46 char *end = str + strlen(str) - 1;
47 // Move backwards from the end to find the last non-whitespace character
48 while (end > start && isspace((unsigned char)*end)) {
49 end--;
50 }
51
52 // Null-terminate after the last non-whitespace character
53 *(end + 1) = '\0';
54
55 // If there was leading whitespace, shift the string to the left
56 if (str != start) {
57 memmove(str, start, (end - start) + 2); // +2 to include the new null terminator
58 }
59}
60
61
62#undef __FUNCT__
63#define __FUNCT__ "ReadGridGenerationInputs"
64/**
65 * @brief Parses command-line options for a programmatically generated grid for a SINGLE block.
66 *
67 * This function is responsible for reading all per-block array options related
68 * to grid geometry, such as dimensions (`-im`), domain bounds (`-xMins`, `-xMaxs`),
69 * and stretching ratios (`-rxs`). It reads the entire array for each option, then
70 * uses the block index stored in `user->_this` to select the correct value and
71 * populate the fields of the provided `UserCtx` struct.
72 *
73 * @param user Pointer to the `UserCtx` for a specific block. The function will
74 * populate the geometric fields (`IM`, `Min_X`, `rx`, etc.) within this struct.
75 * @return PetscErrorCode 0 on success, or a PETSc error code on failure.
76 */
77PetscErrorCode ReadGridGenerationInputs(UserCtx *user)
78{
79 PetscErrorCode ierr;
80 SimCtx *simCtx = user->simCtx;
81 PetscInt nblk = simCtx->block_number;
82 PetscInt block_index = user->_this;
83 PetscBool found;
84
85 // Temporary arrays to hold the parsed values for ALL blocks
86 PetscInt *IMs = NULL, *JMs = NULL, *KMs = NULL, *cgrids = NULL;
87 PetscReal *xMins = NULL, *xMaxs = NULL, *rxs = NULL;
88 PetscReal *yMins = NULL, *yMaxs = NULL, *rys = NULL;
89 PetscReal *zMins = NULL, *zMaxs = NULL, *rzs = NULL;
90
91 PetscFunctionBeginUser;
93
94 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Reading generated grid inputs for block %d.\n", simCtx->rank, block_index);
95
96 if (block_index >= nblk) {
97 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Block index %d is out of range for nblk=%d", block_index, nblk);
98 }
99
100 // --- Allocate temporary storage for all array options ---
101 ierr = PetscMalloc4(nblk, &IMs, nblk, &JMs, nblk, &KMs, nblk, &cgrids); CHKERRQ(ierr);
102 ierr = PetscMalloc6(nblk, &xMins, nblk, &xMaxs, nblk, &rxs, nblk, &yMins, nblk, &yMaxs, nblk, &rys); CHKERRQ(ierr);
103 ierr = PetscMalloc3(nblk, &zMins, nblk, &zMaxs, nblk, &rzs); CHKERRQ(ierr);
104
105 // --- Set default values for the temporary arrays ---
106 for (PetscInt i = 0; i < nblk; ++i) {
107 IMs[i] = 10; JMs[i] = 10; KMs[i] = 10; cgrids[i] = 0;
108 xMins[i] = 0.0; xMaxs[i] = 1.0; rxs[i] = 1.0;
109 yMins[i] = 0.0; yMaxs[i] = 1.0; rys[i] = 1.0;
110 zMins[i] = 0.0; zMaxs[i] = 1.0; rzs[i] = 1.0;
111 }
112
113 // --- Parse the array options from the command line / control file ---
114 PetscInt count;
115 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-im", IMs, &count, &found); CHKERRQ(ierr);
116 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-jm", JMs, &count, &found); CHKERRQ(ierr);
117 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-km", KMs, &count, &found); CHKERRQ(ierr);
118 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-xMins", xMins, &count, &found); CHKERRQ(ierr);
119 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-xMaxs", xMaxs, &count, &found); CHKERRQ(ierr);
120 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-rxs", rxs, &count, &found); CHKERRQ(ierr);
121 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-yMins", yMins, &count, &found); CHKERRQ(ierr);
122 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-yMaxs", yMaxs, &count, &found); CHKERRQ(ierr);
123 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-rys", rys, &count, &found); CHKERRQ(ierr);
124 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-zMins", zMins, &count, &found); CHKERRQ(ierr);
125 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-zMaxs", zMaxs, &count, &found); CHKERRQ(ierr);
126 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-rzs", rzs, &count, &found); CHKERRQ(ierr);
127 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-cgrids", cgrids, &count, &found); CHKERRQ(ierr);
128
129 // --- Assign the parsed values to the specific UserCtx struct passed in ---
130 user->IM = IMs[block_index];
131 user->JM = JMs[block_index];
132 user->KM = KMs[block_index];
133 user->Min_X = xMins[block_index];
134 user->Max_X = xMaxs[block_index];
135 user->rx = rxs[block_index];
136 user->Min_Y = yMins[block_index];
137 user->Max_Y = yMaxs[block_index];
138 user->ry = rys[block_index];
139 user->Min_Z = zMins[block_index];
140 user->Max_Z = zMaxs[block_index];
141 user->rz = rzs[block_index];
142 user->cgrid = cgrids[block_index];
143
144 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Block %d grid generation inputs set: IM=%d, JM=%d, KM=%d\n",
145 simCtx->rank, block_index, user->IM, user->JM, user->KM);
146 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Block %d bounds: X=[%.2f, %.2f], Y=[%.2f, %.2f], Z=[%.2f, %.2f]\n",
147 simCtx->rank, block_index, user->Min_X, user->Max_X, user->Min_Y, user->Max_Y, user->Min_Z, user->Max_Z);
148
149 // --- Clean up temporary storage ---
150 ierr = PetscFree4(IMs, JMs, KMs, cgrids); CHKERRQ(ierr);
151 ierr = PetscFree6(xMins, xMaxs, rxs, yMins, yMaxs, rys); CHKERRQ(ierr);
152 ierr = PetscFree3(zMins, zMaxs, rzs); CHKERRQ(ierr);
153
155 PetscFunctionReturn(0);
156}
157
158
159#undef __FUNCT__
160#define __FUNCT__ "ReadGridFile"
161/**
162 * @brief Sets grid dimensions from a file for a SINGLE block using a one-time read cache.
163 *
164 * This function uses a static-variable pattern to ensure the grid file header
165 * is read only once, collectively, by all processes. The first time any process
166 * calls this function, it triggers a collective operation where rank 0 reads the
167 * file and broadcasts the dimensions for all blocks. This data is stored in
168 * static "cached" arrays.
169 *
170 * On every call (including the first), the function retrieves the dimensions for the
171 * specific block (identified by `user->_this`) from the cached arrays and populates
172 * the `IM`, `JM`, and `KM` fields of the provided `UserCtx`.
173 *
174 * @param user Pointer to the `UserCtx` for a specific block.
175 * @return PetscErrorCode 0 on success, or a PETSc error code on failure.
176 */
177PetscErrorCode ReadGridFile(UserCtx *user)
178{
179 PetscErrorCode ierr;
180 SimCtx *simCtx = user->simCtx;
181 PetscInt block_index = user->_this;
182
183 PetscFunctionBeginUser;
185
186 // --- One-Time Read and Broadcast Logic ---
188 LOG_ALLOW_SYNC(GLOBAL, LOG_INFO, "First call to ReadGridFile. Reading and broadcasting grid file header from '%s'...\n", simCtx->grid_file);
189 PetscMPIInt rank = simCtx->rank;
190 PetscInt nblk = simCtx->block_number;
191
192 if (rank == 0) {
193 FILE *fd = fopen(simCtx->grid_file, "r");
194 if (!fd) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_OPEN, "Cannot open file: %s", simCtx->grid_file);
195
196 fscanf(fd, "%d\n", &g_nblk_from_file);
197 // ---- Read first token; decide whether it is the header or nblk ----
198 char firstTok[32] = {0};
199 if (fscanf(fd, "%31s", firstTok) != 1)
200 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_READ, "Empty grid file: %s", simCtx->grid_file);
201
202 if (strcmp(firstTok, "PICGRID") == 0) {
203 // Header is present – read nblk from the next line
204 if (fscanf(fd, "%d", &g_nblk_from_file) != 1)
205 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_READ, "Expected number of blocks after \"PICGRID\" in %s", simCtx->grid_file);
206 } else {
207 // No header – the token we just read is actually nblk
208 g_nblk_from_file = (PetscInt)strtol(firstTok, NULL, 10);
209 }
210 if (g_nblk_from_file != nblk) {
211 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Mismatch: -nblk is %d but grid file specifies %d blocks.", nblk, g_nblk_from_file);
212 }
213
214 ierr = PetscMalloc3(nblk, &g_IMs_from_file, nblk, &g_JMs_from_file, nblk, &g_KMs_from_file); CHKERRQ(ierr);
215 for (PetscInt i = 0; i < nblk; ++i) {
216 if (fscanf(fd, "%d %d %d\n", &g_IMs_from_file[i], &g_JMs_from_file[i], &g_KMs_from_file[i]) != 3) {
217 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_READ, "Expected 3 integers for block %d in %s", i, simCtx->grid_file);
218 }
219 }
220 fclose(fd);
221 }
222
223 // Broadcast nblk to verify (optional, good practice)
224 ierr = MPI_Bcast(&g_nblk_from_file, 1, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
225
226 // Allocate on other ranks before receiving the broadcast
227 if (rank != 0) {
228 ierr = PetscMalloc3(nblk, &g_IMs_from_file, nblk, &g_JMs_from_file, nblk, &g_KMs_from_file); CHKERRQ(ierr);
229 }
230
231 // Broadcast the data arrays
232 ierr = MPI_Bcast(g_IMs_from_file, nblk, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
233 ierr = MPI_Bcast(g_JMs_from_file, nblk, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
234 ierr = MPI_Bcast(g_KMs_from_file, nblk, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
235
236 g_file_has_been_read = PETSC_TRUE;
237 LOG_ALLOW(GLOBAL, LOG_INFO, "Grid file header read and broadcast complete.\n");
238 }
239
240 // --- Per-Block Assignment Logic (runs on every call) ---
241 user->IM = g_IMs_from_file[block_index];
242 user->JM = g_JMs_from_file[block_index];
243 user->KM = g_KMs_from_file[block_index];
244
245 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Set file inputs for Block %d: IM=%d, JM=%d, KM=%d\n",
246 simCtx->rank, block_index, user->IM, user->JM, user->KM);
247
249 PetscFunctionReturn(0);
250}
251
252
253//================================================================================
254//
255// PRIVATE HELPER FUNCTIONS
256//
257//================================================================================
258
259/**
260 * @brief Frees the memory allocated for a linked list of BC_Param structs.
261 * @param head A pointer to the head of the linked list to be freed.
262 */
264 BC_Param *current = head;
265 while (current != NULL) {
266 BC_Param *next = current->next;
267 PetscFree(current->key);
268 PetscFree(current->value);
269 PetscFree(current);
270 current = next;
271 }
272}
273
274/**
275 * @brief Converts a string representation of a face to a BCFace enum.
276 * @param str The input string (e.g., "-Xi", "+Zeta"). Case-insensitive.
277 * @param[out] face_out The resulting BCFace enum.
278 * @return 0 on success.
279 */
280PetscErrorCode StringToBCFace(const char* str, BCFace* face_out) {
281 if (strcasecmp(str, "-Xi") == 0) *face_out = BC_FACE_NEG_X;
282 else if (strcasecmp(str, "+Xi") == 0) *face_out = BC_FACE_POS_X;
283 else if (strcasecmp(str, "-Eta") == 0) *face_out = BC_FACE_NEG_Y;
284 else if (strcasecmp(str, "+Eta") == 0) *face_out = BC_FACE_POS_Y;
285 else if (strcasecmp(str, "-Zeta") == 0) *face_out = BC_FACE_NEG_Z;
286 else if (strcasecmp(str, "+Zeta") == 0) *face_out = BC_FACE_POS_Z;
287 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown face specifier: %s", str);
288 return 0;
289}
290
291/**
292 * @brief Converts a string representation of a BC type to a BCType enum.
293 * @param str The input string (e.g., "WALL", "INLET"). Case-insensitive.
294 * @param[out] type_out The resulting BCType enum.
295 * @return 0 on success.
296 */
297PetscErrorCode StringToBCType(const char* str, BCType* type_out) {
298 if (strcasecmp(str, "WALL") == 0) *type_out = WALL;
299 else if (strcasecmp(str, "SYMMETRY") == 0) *type_out = SYMMETRY;
300 else if (strcasecmp(str, "INLET") == 0) *type_out = INLET;
301 else if (strcasecmp(str, "OUTLET") == 0) *type_out = OUTLET;
302 else if (strcasecmp(str, "PERIODIC") == 0) *type_out = PERIODIC;
303 // ... add other BCTypes here ...
304 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown BC Type string: %s", str);
305 return 0;
306}
307
308/**
309 * @brief Converts a string representation of a handler to a BCHandlerType enum.
310 * @param str The input string (e.g., "noslip", "constant_velocity"). Case-insensitive.
311 * @param[out] handler_out The resulting BCHandlerType enum.
312 * @return 0 on success.
313 */
314PetscErrorCode StringToBCHandlerType(const char* str, BCHandlerType* handler_out) {
315 if (strcasecmp(str, "noslip") == 0) *handler_out = BC_HANDLER_WALL_NOSLIP;
316 else if (strcasecmp(str, "constant_velocity") == 0) *handler_out = BC_HANDLER_INLET_CONSTANT_VELOCITY;
317 else if (strcasecmp(str, "conservation") == 0) *handler_out = BC_HANDLER_OUTLET_CONSERVATION;
318 else if (strcasecmp(str, "parabolic") == 0) *handler_out = BC_HANDLER_INLET_PARABOLIC;
319 else if (strcasecmp(str,"geometric") == 0) *handler_out = BC_HANDLER_PERIODIC_GEOMETRIC;
320 else if (strcasecmp(str,"constant_flux") == 0) *handler_out = BC_HANDLER_PERIODIC_DRIVEN_CONSTANT_FLUX;
321 else if (strcasecmp(str,"initial_flux") == 0) *handler_out = BC_HANDLER_PERIODIC_DRIVEN_INITIAL_FLUX;
322 // ... add other BCHandlerTypes here ...
323 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown BC Handler string: %s", str);
324 return 0;
325}
326
327/**
328 * @brief Validates that a specific handler is compatible with a general BC type.
329 * @param type The general BCType.
330 * @param handler The specific BCHandlerType.
331 * @return 0 if compatible, error code otherwise.
332 */
333PetscErrorCode ValidateBCHandlerForBCType(BCType type, BCHandlerType handler) {
334 switch (type) {
335 case OUTLET:
336 if(handler != BC_HANDLER_OUTLET_CONSERVATION) return PETSC_ERR_ARG_WRONG;
337 break;
338 case WALL:
339 if (handler != BC_HANDLER_WALL_NOSLIP && handler != BC_HANDLER_WALL_MOVING) return PETSC_ERR_ARG_WRONG;
340 break;
341 case INLET:
342 if (handler != BC_HANDLER_INLET_CONSTANT_VELOCITY && handler != BC_HANDLER_INLET_PARABOLIC) return PETSC_ERR_ARG_WRONG;
343 break;
344 case PERIODIC:
345 if(handler != BC_HANDLER_PERIODIC_GEOMETRIC && handler != BC_HANDLER_PERIODIC_DRIVEN_CONSTANT_FLUX && !handler != BC_HANDLER_PERIODIC_DRIVEN_INITIAL_FLUX) return PETSC_ERR_ARG_WRONG;
346 // ... add other validation cases here ...
347 default: break;
348 }
349 return 0; // Combination is valid
350}
351
352/**
353 * @brief Searches a BC_Param linked list for a key and returns its value as a double.
354 * @param params The head of the BC_Param linked list.
355 * @param key The key to search for (case-insensitive).
356 * @param[out] value_out The found value, converted to a PetscReal.
357 * @param[out] found Set to PETSC_TRUE if the key was found, PETSC_FALSE otherwise.
358 * @return 0 on success.
359 */
360PetscErrorCode GetBCParamReal(BC_Param *params, const char *key, PetscReal *value_out, PetscBool *found) {
361 *found = PETSC_FALSE;
362 *value_out = 0.0;
363 if (!key) return 0; // No key to search for
364
365 BC_Param *current = params;
366 while (current) {
367 if (strcasecmp(current->key, key) == 0) {
368 *value_out = atof(current->value);
369 *found = PETSC_TRUE;
370 return 0; // Found it, we're done
371 }
372 current = current->next;
373 }
374 return 0; // It's not an error to not find the key.
375}
376
377/**
378 * @brief Searches a BC_Param linked list for a key and returns its value as a bool.
379 * @param params The head of the BC_Param linked list.
380 * @param key The key to search for (case-insensitive).
381 * @param[out] value_out The found value, converted to a PetscBool.
382 * @param[out] found Set to PETSC_TRUE if the key was found, PETSC_FALSE otherwise.
383 * @return 0 on success.
384 */
385PetscErrorCode GetBCParamBool(BC_Param *params, const char *key, PetscBool *value_out, PetscBool *found) {
386 *found = PETSC_FALSE;
387 *value_out = PETSC_FALSE;
388 if (!key) return 0; // No key to search for
389
390 BC_Param *current = params;
391 while (current) {
392 if (strcasecmp(current->key, key) == 0) {
393 // Key was found.
394 *found = PETSC_TRUE;
395
396 // Check the value string. Default to FALSE if the value is NULL or doesn't match a "true" string.
397 if (current->value &&
398 (strcasecmp(current->value, "true") == 0 ||
399 strcmp(current->value, "1") == 0 ||
400 strcasecmp(current->value, "yes") == 0))
401 {
402 *value_out = PETSC_TRUE;
403 } else {
404 *value_out = PETSC_FALSE;
405 }
406 return 0; // Found it, we're done
407 }
408 current = current->next;
409 }
410 return 0; // It's not an error to not find the key.
411}
412
413//================================================================================
414//
415// PUBLIC PARSING FUNCTION
416//
417//================================================================================
418#undef __FUNCT__
419#define __FUNCT__ "ParseAllBoundaryConditions"
420/**
421 * @brief Parses the boundary conditions file to configure the type, handler, and
422 * any associated parameters for all 6 global faces of the domain.
423 *
424 * This function performs the following steps:
425 * 1. On MPI rank 0, it reads the specified configuration file line-by-line.
426 * 2. It parses each line for `<Face> <Type> <Handler> [param=value]...` format.
427 * 3. It validates the parsed strings and stores the configuration, including a
428 * linked list of parameters, in a temporary array.
429 * 4. It then serializes this configuration and broadcasts it to all other MPI ranks.
430 * 5. All ranks (including rank 0) then deserialize the broadcasted data to populate
431 * their local `user->boundary_faces` array identically.
432 * 6. It also sets legacy fields in UserCtx for compatibility with other modules.
433 *
434 * @param[in,out] user The main UserCtx struct where the final configuration
435 * for all ranks will be stored.
436 * @param[in] bcs_input_filename The path to the boundary conditions configuration file.
437 * @return PetscErrorCode 0 on success, error code on failure.
438 */
439PetscErrorCode ParseAllBoundaryConditions(UserCtx *user, const char *bcs_input_filename)
440{
441 PetscErrorCode ierr;
442 PetscMPIInt rank;
443
444 // Temporary storage for rank 0 to build the configuration before broadcasting.
445 BoundaryFaceConfig configs_rank0[6];
446
447 PetscFunctionBeginUser;
449 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
450
451 if (rank == 0) {
452 FILE *file;
453 char line_buffer[1024];
454
455 // Initialize the temporary config array with safe defaults on rank 0.
456 for (int i = 0; i < 6; i++) {
457 configs_rank0[i].face_id = (BCFace)i;
458 configs_rank0[i].mathematical_type = WALL;
459 configs_rank0[i].handler_type = BC_HANDLER_WALL_NOSLIP;
460 configs_rank0[i].params = NULL;
461 configs_rank0[i].handler = NULL; // Handler object is not created here.
462 }
463
464 LOG_ALLOW(GLOBAL, LOG_INFO, "Parsing BC configuration from '%s' on rank 0... \n", bcs_input_filename);
465 file = fopen(bcs_input_filename, "r");
466 if (!file) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_OPEN, "Could not open BCs file '%s'.", bcs_input_filename);
467
468 while (fgets(line_buffer, sizeof(line_buffer), file)) {
469 char *current_pos = line_buffer;
470 while (isspace((unsigned char)*current_pos)) current_pos++; // Skip leading whitespace
471 if (*current_pos == '#' || *current_pos == '\0' || *current_pos == '\n' || *current_pos == '\r') continue;
472
473 char *face_str = strtok(current_pos, " \t\n\r");
474 char *type_str = strtok(NULL, " \t\n\r");
475 char *handler_str = strtok(NULL, " \t\n\r");
476
477 if (!face_str || !type_str || !handler_str) {
478 LOG_ALLOW(GLOBAL, LOG_WARNING, "Malformed line in bcs.dat, skipping: %s \n", line_buffer);
479 continue;
480 }
481
482 BCFace face_enum;
483 BCType type_enum;
484 BCHandlerType handler_enum;
485 const char* handler_name_for_log;
486
487 // --- Convert strings to enums and validate ---
488 ierr = StringToBCFace(face_str, &face_enum); CHKERRQ(ierr);
489 ierr = StringToBCType(type_str, &type_enum); CHKERRQ(ierr);
490 ierr = StringToBCHandlerType(handler_str, &handler_enum); CHKERRQ(ierr);
491 ierr = ValidateBCHandlerForBCType(type_enum, handler_enum);
492 if (ierr) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Validation failed: Handler '%s' is not valid for Type '%s' on Face '%s'.\n", handler_str, type_str, face_str);
493
494 // Store the core types for the corresponding face
495 configs_rank0[face_enum].mathematical_type = type_enum;
496 configs_rank0[face_enum].handler_type = handler_enum;
497 handler_name_for_log = BCHandlerTypeToString(handler_enum); // Assumes this utility exists
498 LOG_ALLOW(GLOBAL, LOG_DEBUG, " Parsed Face '%s': Type=%s, Handler=%s \n", face_str, type_str, handler_name_for_log);
499
500 // --- Parse optional key=value parameters for this face ---
501 FreeBC_ParamList(configs_rank0[face_enum].params); // Clear any previous (default) params
502 configs_rank0[face_enum].params = NULL;
503 BC_Param **param_next_ptr = &configs_rank0[face_enum].params; // Pointer to the 'next' pointer to build the list
504
505 char* token;
506 while ((token = strtok(NULL, " \t\n\r")) != NULL) {
507 char* equals_ptr = strchr(token, '=');
508 if (!equals_ptr) {
509 LOG_ALLOW(GLOBAL, LOG_WARNING, "Malformed parameter '%s' on face '%s', skipping. \n", token, face_str);
510 continue;
511 }
512
513 *equals_ptr = '\0'; // Temporarily split the string at '=' to separate key and value
514 char* key_str = token;
515 char* value_str = equals_ptr + 1;
516
517 BC_Param *new_param;
518 ierr = PetscMalloc1(1, &new_param); CHKERRQ(ierr);
519 ierr = PetscStrallocpy(key_str, &new_param->key); CHKERRQ(ierr);
520 ierr = PetscStrallocpy(value_str, &new_param->value); CHKERRQ(ierr);
521 new_param->next = NULL;
522
523 *param_next_ptr = new_param;
524 param_next_ptr = &new_param->next;
525 LOG_ALLOW(GLOBAL, LOG_TRACE, " - Found param: [%s] = [%s] \n", new_param->key, new_param->value);
526 }
527 }
528 fclose(file);
529 }
530
531 // =========================================================================
532 // BROADCASTING THE CONFIGURATION FROM RANK 0
533 // =========================================================================
534 // This is a critical step to ensure all processes have the same configuration.
535
536 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "Rank %d broadcasting/receiving BC configuration.\n", rank);
537
538 for (int i = 0; i < 6; i++) {
539 // --- Broadcast simple enums ---
540 if (rank == 0) {
541 user->boundary_faces[i] = configs_rank0[i]; // Rank 0 populates its final struct
542 }
543 ierr = MPI_Bcast(&user->boundary_faces[i].mathematical_type, 1, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
544 ierr = MPI_Bcast(&user->boundary_faces[i].handler_type, 1, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
545
546 // --- Serialize and Broadcast the parameter linked list ---
547 PetscInt n_params = 0;
548 if (rank == 0) { // On rank 0, count the number of parameters to send
549 for (BC_Param *p = user->boundary_faces[i].params; p; p = p->next) n_params++;
550 }
551 ierr = MPI_Bcast(&n_params, 1, MPI_INT, 0, PETSC_COMM_WORLD);CHKERRQ(ierr);
552
553 if (rank != 0) { // Non-root ranks need to receive and build the list
554 FreeBC_ParamList(user->boundary_faces[i].params); // Ensure list is empty before building
555 user->boundary_faces[i].params = NULL;
556 }
557
558 BC_Param **param_next_ptr = &user->boundary_faces[i].params;
559
560 for (int j = 0; j < n_params; j++) {
561 char key_buf[256] = {0}, val_buf[256] = {0};
562 if (rank == 0) {
563 // On rank 0, navigate to the j-th param and copy its data to buffers
564 BC_Param *p = user->boundary_faces[i].params;
565 for (int k = 0; k < j; k++) p = p->next;
566 strncpy(key_buf, p->key, 255);
567 strncpy(val_buf, p->value, 255);
568 }
569
570 ierr = MPI_Bcast(key_buf, 256, MPI_CHAR, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
571 ierr = MPI_Bcast(val_buf, 256, MPI_CHAR, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
572
573 if (rank != 0) {
574 // On non-root ranks, deserialize: create a new node and append it
575 BC_Param *new_param;
576 ierr = PetscMalloc1(1, &new_param); CHKERRQ(ierr);
577 ierr = PetscStrallocpy(key_buf, &new_param->key); CHKERRQ(ierr);
578 ierr = PetscStrallocpy(val_buf, &new_param->value); CHKERRQ(ierr);
579 new_param->next = NULL;
580 *param_next_ptr = new_param;
581 param_next_ptr = &new_param->next;
582 } else {
583 // On rank 0, just advance the pointer for the next iteration
584 param_next_ptr = &((*param_next_ptr)->next);
585 }
586 }
587 user->boundary_faces[i].face_id = (BCFace)i; // Ensure face_id is set on all ranks
588 }
589
590 // --- Set legacy fields for compatibility with particle system ---
591 user->inletFaceDefined = PETSC_FALSE;
592 for (int i=0; i<6; i++) {
593
594 if (user->boundary_faces[i].mathematical_type == INLET && !user->inletFaceDefined) {
595 user->inletFaceDefined = PETSC_TRUE;
596 user->identifiedInletBCFace = (BCFace)i;
597 LOG_ALLOW(GLOBAL, LOG_INFO, "Inlet face for particle initialization identified as Face %d.\n", i);
598 break; // Found the first one, stop looking
599 }
600 }
601
602
603 if (rank == 0) {
604 // Rank 0 can now free the linked lists it created for the temporary storage.
605 // As written, user->boundary_faces was populated directly on rank 0, so no extra free is needed.
606 // for(int i=0; i<6; i++) FreeBC_ParamList(configs_rank0[i].params); // This would be needed if we used configs_rank0 exclusively
607 }
608
610 PetscFunctionReturn(0);
611}
612
613//================================================================================
614//
615// PRIVATE HELPER FUNCTIONS
616//
617//================================================================================
618
619// ... (existing helper functions like FreeBC_ParamList, StringToBCFace, etc.) ...
620#undef __FUNCT__
621#define __FUNCT__ "DeterminePeriodicity"
622/**
623 * @brief Scans all block-specific boundary condition files to determine a globally
624 * consistent periodicity for each dimension.
625 *
626 * This is a lightweight pre-parser intended to be called before DMDA creation.
627 * It ensures that the periodicity setting is consistent across all blocks, which is a
628 * physical requirement for the domain.
629 *
630 * 1. It collectively verifies that the mandatory BCS file for each block exists.
631 * 2. On MPI rank 0, it then iterates through the files.
632 * 3. It parses each line to extract the first (face) and second (type) tokens,
633 * mimicking the main BC parser's tokenization logic.
634 * 4. It performs a direct, case-insensitive string comparison on the "type" token
635 * to check if it is "PERIODIC". Other types are silently ignored.
636 * 5. It validates consistency (e.g., -Xi and +Xi match) and ensures all block files
637 * specify the same global periodicity.
638 * 6. It broadcasts the final three flags (as integers 0 or 1) to all MPI ranks.
639 * 7. All ranks update the i_periodic, j_periodic, and k_periodic fields in their SimCtx.
640 *
641 * @param[in,out] simCtx The master SimCtx struct, containing the bcs_files list and
642 * where the final periodicity flags will be stored.
643 * @return PetscErrorCode 0 on success, error code on failure.
644 */
645PetscErrorCode DeterminePeriodicity(SimCtx *simCtx)
646{
647 PetscErrorCode ierr;
648 PetscMPIInt rank;
649 PetscInt periodic_flags[3] = {0, 0, 0}; // Index 0:I, 1:J, 2:K
650
651 PetscFunctionBeginUser;
652 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
653
654 // --- Part 1: Collectively verify all BCS files exist before proceeding ---
655 for (PetscInt bi = 0; bi < simCtx->block_number; bi++) {
656 const char *bcs_filename = simCtx->bcs_files[bi];
657 if (!bcs_filename) SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_NULL, "BCS filename for block %d is not set in SimCtx.", bi);
658 char desc_buf[256];
659 PetscBool file_exists;
660 snprintf(desc_buf, sizeof(desc_buf), "BCS file for block %d", bi);
661 ierr = VerifyPathExistence(bcs_filename, PETSC_FALSE, PETSC_FALSE, desc_buf, &file_exists); CHKERRQ(ierr);
662 }
663
664 // --- Part 2: Rank 0 does the parsing, since we know all files exist ---
665 if (rank == 0) {
666 PetscBool global_is_periodic[3] = {PETSC_FALSE, PETSC_FALSE, PETSC_FALSE};
667 PetscBool is_set = PETSC_FALSE;
668
669 for (PetscInt bi = 0; bi < simCtx->block_number; bi++) {
670 const char *bcs_filename = simCtx->bcs_files[bi];
671 FILE *file = fopen(bcs_filename, "r");
672
673 PetscBool face_is_periodic[6] = {PETSC_FALSE};
674 char line_buffer[1024];
675
676 while (fgets(line_buffer, sizeof(line_buffer), file)) {
677 char *current_pos = line_buffer;
678 while (isspace((unsigned char)*current_pos)) current_pos++;
679 if (*current_pos == '#' || *current_pos == '\0' || *current_pos == '\n') continue;
680
681 // --- Tokenize the line exactly like the main parser ---
682 char *face_str = strtok(current_pos, " \t\n\r");
683 char *type_str = strtok(NULL, " \t\n\r");
684
685 // If the line doesn't have at least two tokens, we can't determine the type.
686 if (!face_str || !type_str) continue;
687
688 // --- Perform a direct, non-erroring check on the mathematical type string ---
689 if (strcasecmp(type_str, "PERIODIC") == 0) {
690 BCFace face_enum;
691 // A malformed face string on a periodic line IS a fatal error.
692 ierr = StringToBCFace(face_str, &face_enum); CHKERRQ(ierr);
693 face_is_periodic[face_enum] = PETSC_TRUE;
694 }
695 // Any other type_str (e.g., "WALL", "INLET") is correctly and silently ignored.
696 }
697 fclose(file);
698
699 // --- Validate consistency within this file ---
700 if (face_is_periodic[BC_FACE_NEG_X] != face_is_periodic[BC_FACE_POS_X])
701 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Inconsistent X-periodicity in file '%s' for block %d. Both -Xi and +Xi must be periodic or neither.", bcs_filename, bi);
702 if (face_is_periodic[BC_FACE_NEG_Y] != face_is_periodic[BC_FACE_POS_Y])
703 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Inconsistent Y-periodicity in file '%s' for block %d. Both -Eta and +Eta must be periodic or neither.", bcs_filename, bi);
704 if (face_is_periodic[BC_FACE_NEG_Z] != face_is_periodic[BC_FACE_POS_Z])
705 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Inconsistent Z-periodicity in file '%s' for block %d. Both -Zeta and +Zeta must be periodic or neither.", bcs_filename, bi);
706
707 PetscBool local_is_periodic[3] = {face_is_periodic[BC_FACE_NEG_X], face_is_periodic[BC_FACE_NEG_Y], face_is_periodic[BC_FACE_NEG_Z]};
708
709 // --- Validate consistency across block files ---
710 if (!is_set) {
711 global_is_periodic[0] = local_is_periodic[0];
712 global_is_periodic[1] = local_is_periodic[1];
713 global_is_periodic[2] = local_is_periodic[2];
714 is_set = PETSC_TRUE;
715 } else {
716 if (global_is_periodic[0] != local_is_periodic[0] || global_is_periodic[1] != local_is_periodic[1] || global_is_periodic[2] != local_is_periodic[2]) {
717 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP,
718 "Periodicity mismatch between blocks. Block 0 requires (I:%d, J:%d, K:%d), but block %d (file '%s') has (I:%d, J:%d, K:%d).",
719 (int)global_is_periodic[0], (int)global_is_periodic[1], (int)global_is_periodic[2],
720 bi, bcs_filename,
721 (int)local_is_periodic[0], (int)local_is_periodic[1], (int)local_is_periodic[2]);
722 }
723 }
724 } // end loop over blocks
725
726 periodic_flags[0] = (global_is_periodic[0]) ? 1 : 0;
727 periodic_flags[1] = (global_is_periodic[1]) ? 1 : 0;
728 periodic_flags[2] = (global_is_periodic[2]) ? 1 : 0;
729
730 LOG_ALLOW(GLOBAL, LOG_INFO, "Global periodicity determined: I-periodic=%d, J-periodic=%d, K-periodic=%d\n",
731 periodic_flags[0], periodic_flags[1], periodic_flags[2]);
732 }
733
734 // --- Part 3: Broadcast the final flags from rank 0 to all other ranks ---
735 ierr = MPI_Bcast(periodic_flags, 3, MPIU_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
736
737 // --- All ranks now update their SimCtx ---
738 simCtx->i_periodic = periodic_flags[0];
739 simCtx->j_periodic = periodic_flags[1];
740 simCtx->k_periodic = periodic_flags[2];
741
742 PetscFunctionReturn(0);
743}
744
745/**
746 * @brief A parallel-safe helper to verify the existence of a generic file or directory path.
747 *
748 * This function centralizes the logic for checking arbitrary paths. Only Rank 0 performs the
749 * filesystem check, and the result is broadcast to all other processes. This ensures
750 * collective and synchronized decision-making across all ranks. It is intended for
751 * configuration files, source directories, etc., where the path is known completely.
752 *
753 * @param[in] path The full path to the file or directory to check.
754 * @param[in] is_dir PETSC_TRUE if checking for a directory, PETSC_FALSE for a file.
755 * @param[in] is_optional PETSC_TRUE if the path is optional (results in a warning),
756 * PETSC_FALSE if mandatory (results in an error).
757 * @param[in] description A user-friendly description of the path for logging (e.g., "Grid file").
758 * @param[out] exists The result of the check (identical on all ranks).
759 *
760 * @return PetscErrorCode
761 */
762PetscErrorCode VerifyPathExistence(const char *path, PetscBool is_dir, PetscBool is_optional, const char *description, PetscBool *exists)
763{
764 PetscErrorCode ierr;
765 PetscMPIInt rank;
766 MPI_Comm comm = PETSC_COMM_WORLD;
767
768 PetscFunctionBeginUser;
769 ierr = MPI_Comm_rank(comm, &rank); CHKERRQ(ierr);
770
771 if (rank == 0) {
772 if (is_dir) {
773 ierr = PetscTestDirectory(path, 'r', exists); CHKERRQ(ierr);
774 } else {
775 ierr = PetscTestFile(path, 'r', exists); CHKERRQ(ierr);
776 }
777
778 if (!(*exists)) {
779 if (is_optional) {
780 LOG_ALLOW(GLOBAL, LOG_WARNING, "Optional %s not found at: %s (using defaults/ignoring).\n", description, path);
781 } else {
782 LOG_ALLOW(GLOBAL, LOG_ERROR, "Mandatory %s not found at: %s\n", description, path);
783 }
784 } else {
785 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Found %s: %s\n", description, path);
786 }
787 }
788
789 // Broadcast the result from Rank 0
790 PetscMPIInt exists_int = (rank == 0) ? (PetscMPIInt)(*exists) : 0;
791 ierr = MPI_Bcast(&exists_int, 1, MPI_INT, 0, comm); CHKERRMPI(ierr);
792 *exists = (PetscBool)exists_int;
793
794 // Collective error for mandatory files
795 if (!(*exists) && !is_optional) {
796 SETERRQ(comm, PETSC_ERR_FILE_OPEN, "Mandatory %s not found. Rank 0 expected it at '%s'. Check path and permissions.", description, path);
797 }
798
799 PetscFunctionReturn(0);
800}
801
802/**
803 * @brief Checks for a data file's existence in a parallel-safe manner.
804 *
805 * This function's signature and behavior have been updated to use the
806 * "Directory Context Pointer" pattern. The high-level calling function (e.g.,
807 * ReadSimulationFields) must set the `simCtx->current_io_directory` context
808 * before this function is called.
809 *
810 * @param[in] user Pointer to the UserCtx structure to access SimCtx.
811 * @param ti The time index of the file.
812 * @param fieldName The name of the field.
813 * @param ext The file extension.
814 * @param fileExists [out] The result, which will be identical on all ranks.
815 * @return PetscErrorCode
816 */
817static PetscErrorCode CheckDataFile(UserCtx *user, PetscInt ti, const char *fieldName, const char *ext, PetscBool *fileExists)
818{
819 PetscErrorCode ierr;
820 PetscMPIInt rank;
821 MPI_Comm comm = PETSC_COMM_WORLD;
822 PetscInt placeholder_int = 0;
823 SimCtx *simCtx = user->simCtx;
824
825 PetscFunctionBeginUser;
826
827 if(!simCtx->current_io_directory) {
828 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_NULL, "I/O context directory is NULL. Ensure it is set before calling CheckDataFile().");
829 }
830
831 ierr = MPI_Comm_rank(comm, &rank); CHKERRQ(ierr);
832
833 if (rank == 0) {
834 char filename[PETSC_MAX_PATH_LEN];
835 // Use the same standardized, rank-independent filename format
836
837 ierr = PetscSNPrintf(filename, sizeof(filename), "%s/%s%05"PetscInt_FMT"_%d.%s",simCtx->current_io_directory,fieldName, ti, placeholder_int, ext);
838 ierr = PetscTestFile(filename, 'r', fileExists); CHKERRQ(ierr);
839 if (!(*fileExists)) {
840 LOG_ALLOW(GLOBAL, LOG_WARNING, "(Rank 0) - Optional data file '%s' not found.\n", filename);
841 }
842 }
843
844 // Broadcast the result from Rank 0 to all other ranks.
845 // We cast the PetscBool to a PetscMPIInt for MPI_Bcast.
846 PetscMPIInt fileExists_int = (rank == 0) ? (PetscMPIInt)(*fileExists) : 0;
847 ierr = MPI_Bcast(&fileExists_int, 1, MPI_INT, 0, comm); CHKERRMPI(ierr);
848 *fileExists = (PetscBool)fileExists_int;
849
850 PetscFunctionReturn(0);
851}
852
853/**
854 * @brief Checks for and reads an optional Eulerian field from a file into a Vec.
855 *
856 * This helper function first checks if the corresponding data file exists in a
857 * parallel-safe manner using CheckDataFile().
858 * - If the file does not exist, it logs a warning and returns success (0),
859 * gracefully skipping the field. The target Vec is not modified.
860 * - If the file exists, it calls ReadFieldData() to read the data into the
861 * provided Vec. Any error during this read operation is considered fatal
862 * (e.g., file is corrupt, permissions issue, or size mismatch), as the
863 * presence of the file implies an intent to load it.
864 *
865 * @param[in] user Pointer to the UserCtx structure.
866 * @param[in] field_name Internal name of the field for the filename (e.g., "cs").
867 * @param[in] field_label A user-friendly name for logging (e.g., "Smagorinsky Constant").
868 * @param[in,out] field_vec The PETSc Vec to load the data into.
869 * @param[in] ti Time index for the file name.
870 * @param[in] ext File extension.
871 *
872 * @return PetscErrorCode Returns 0 on success or if the optional file was not found.
873 * Returns a non-zero error code on a fatal read error.
874 */
875static PetscErrorCode ReadOptionalField(UserCtx *user, const char *field_name, const char *field_label, Vec field_vec, PetscInt ti, const char *ext)
876{
877 PetscErrorCode ierr;
878 PetscBool fileExists;
879
880 PetscFunctionBeginUser;
881
882 /* Check if the data file for this optional field exists. */
883 ierr = CheckDataFile(user,ti, field_name, ext, &fileExists); CHKERRQ(ierr);
884
885 if (fileExists) {
886 /* File exists, so we MUST be able to read it. */
887 LOG_ALLOW(GLOBAL, LOG_DEBUG, "File for %s found, attempting to read...\n", field_label);
888 ierr = ReadFieldData(user, field_name, field_vec, ti, ext);
889
890 if (ierr) {
891 /* Any error here is fatal. A PETSC_ERR_FILE_OPEN would mean a race condition or
892 permissions issue. Other errors could be size mismatch or corruption. */
893 SETERRQ(PETSC_COMM_WORLD, ierr, "Failed to read data for %s from existing file for step %d. The file may be corrupt or have an incorrect size.", field_label, ti);
894 } else {
895 LOG_ALLOW(GLOBAL, LOG_INFO, "Successfully read %s field for step %d.\n", field_label, ti);
896 }
897 } else {
898 /* File does not exist, which is acceptable for an optional field. */
899 LOG_ALLOW(GLOBAL, LOG_WARNING, "Optional %s file for step %d not found. Skipping.\n", field_label, ti);
900 }
901
902 PetscFunctionReturn(0);
903}
904
905/**
906 * @brief Checks for and reads an optional DMSwarm field from a file.
907 *
908 * This helper function first checks if the corresponding data file exists.
909 * - If the file does not exist, it logs a warning and returns success (0),
910 * gracefully skipping the field.
911 * - If the file exists, it calls ReadSwarmField() to read the data. Any
912 * error during this read operation is considered fatal (e.g., file is
913 * corrupt, permissions issue, or size mismatch), as the presence of the
914 * file implies an intent to load it.
915 *
916 * @param[in] user Pointer to the UserCtx structure.
917 * @param[in] field_name Internal name of the DMSwarm field (e.g., "DMSwarm_CellID").
918 * @param[in] field_label A user-friendly name for logging (e.g., "Cell ID").
919 * @param[in] ti Time index for the file name.
920 * @param[in] ext File extension.
921 *
922 * @return PetscErrorCode Returns 0 on success or if the optional file was not found.
923 * Returns a non-zero error code on a fatal read error.
924 */
925static PetscErrorCode ReadOptionalSwarmField(UserCtx *user, const char *field_name, const char *field_label, PetscInt ti, const char *ext)
926{
927 PetscErrorCode ierr;
928 PetscBool fileExists;
929
930 PetscFunctionBeginUser;
931
932 /* Check if the data file for this optional field exists. */
933 ierr = CheckDataFile(user,ti, field_name, ext, &fileExists); CHKERRQ(ierr);
934
935 if (fileExists) {
936 /* File exists, so we MUST be able to read it. */
937 LOG_ALLOW(GLOBAL, LOG_DEBUG, "File for %s found, attempting to read...\n", field_label);
938 if(strcasecmp(field_name,"DMSwarm_CellID") == 0 || strcasecmp(field_name,"DMSwarm_pid")== 0 || strcasecmp(field_name,"DMSwarm_location_status")== 0 ) {
939 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Reading integer swarm field '%s'.\n", field_name);
940 ierr = ReadSwarmIntField(user,field_name,ti,ext);
941 }
942 else{
943 ierr = ReadSwarmField(user, field_name, ti, ext);
944 }
945
946 if (ierr) {
947 /* Any error here is fatal. A PETSC_ERR_FILE_OPEN would mean a race condition or
948 permissions issue. Other errors could be size mismatch or corruption. */
949 SETERRQ(PETSC_COMM_WORLD, ierr, "Failed to read data for %s from existing file for step %d. The file may be corrupt or have an incorrect size.", field_label, ti);
950 } else {
951 LOG_ALLOW(GLOBAL, LOG_INFO, "Successfully read %s field for step %d.\n", field_label, ti);
952 }
953 } else {
954 /* File does not exist, which is acceptable for an optional field. */
955 LOG_ALLOW(GLOBAL, LOG_WARNING, "Optional %s file for step %d not found. Skipping.\n", field_label, ti);
956 }
957
958 PetscFunctionReturn(0);
959}
960
961#undef __FUNCT__
962#define __FUNCT__ "ReadFieldData"
963/************************************************************************************************
964* @file io.c
965* @brief Utility for (re-)loading PETSc Vec-based fields written by rank-0 in a previous run
966* – works in both serial and MPI executions.
967*
968* The restart files are always written as **one** sequential Vec per field
969* ( `<path-to-file>/<field_name><step>_0.<ext>` ). In parallel we therefore
970* have to:
971*
972* • let rank-0 read that sequential file into a temporary Vec,
973* • broadcast the raw array to every rank, and
974* • copy the local slice into the distributed Vec supplied by the caller.
975*
976* The function purposely avoids `VecScatterCreateToAll()` because PETSc
977* chooses an even block-cyclic layout that seldom matches an existing
978* application layout – that mismatch is the root–cause of the previous
979* “Scatter sizes of ix and iy don’t match” / “incompatible local lengths”
980* crashes. A plain `MPI_Bcast` plus a `memcpy()` into the already
981* allocated local array is bullet-proof and costs one extra buffer only
982* on the non-root ranks.
983*
984* Logging helper used below:
985* LOG_ALLOW(scope,level,fmt,…) – your existing macro
986*
987* @param[in] user – application context (unused here but part of original API)
988* @param[in] field_name – base name of the field (“position”, “ufield”, …)
989* @param[out] field_vec – **distributed** Vec we have to fill
990* @param[in] ti – time-step index encoded in the filename
991* @param[in] ext – file-name extension (“dat”, “bin”, …)
992*
993* @returns 0 on success or a PETSc error code.
994************************************************************************************************/
995PetscErrorCode ReadFieldData(UserCtx *user,
996 const char *field_name,
997 Vec field_vec,
998 PetscInt ti,
999 const char *ext)
1000{
1001 PetscErrorCode ierr;
1002 char filename[PETSC_MAX_PATH_LEN];
1003 MPI_Comm comm;
1004 PetscMPIInt rank,size;
1005 SimCtx *simCtx = user->simCtx;
1006
1007
1008 PetscFunctionBeginUser;
1010
1011 if(!simCtx->current_io_directory){
1012 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "I/O context directory was not set before calling ReadFieldData().");
1013 }
1014
1015
1016 ierr = PetscObjectGetComm((PetscObject)field_vec,&comm);CHKERRQ(ierr);
1017 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1018 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1019
1020 const char *source_path = NULL;
1021 source_path = simCtx->current_io_directory;
1022
1023 if(!source_path){
1024 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "source_path was not set for the current execution mode.");
1025 }
1026 /* ---------------------------------------------------------------------
1027 * Compose <path-to-file>/<field_name><step with 5 digits>_0.<ext>
1028 * (all restart files are written by rank-0 with that naming scheme).
1029 * ------------------------------------------------------------------ */
1030 ierr = PetscSNPrintf(filename,sizeof(filename),
1031 "%s/%s%05" PetscInt_FMT "_0.%s",
1032 source_path,field_name,ti,ext);CHKERRQ(ierr);
1033
1035 "Attempting to read <%s> on rank %d/%d\n",
1036 filename,(int)rank,(int)size);
1037
1038 /* ======================================================================
1039 * 1. SERIAL JOB – just hand the Vec to VecLoad()
1040 * ==================================================================== */
1041 if(size==1)
1042 {
1043 PetscViewer viewer;
1044 PetscBool found;
1045 Vec temp_vec;
1046 PetscInt expectedSize,loadedSize;
1047
1048 ierr = PetscTestFile(filename,'r',&found);CHKERRQ(ierr);
1049 if(!found) SETERRQ(comm,PETSC_ERR_FILE_OPEN,
1050 "Restart/Source file not found: %s",filename);
1051
1052 ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,filename,FILE_MODE_READ,&viewer);CHKERRQ(ierr);
1053// ---- START MODIFICATION ----
1054 // DO NOT load directly into field_vec, as this can resize it, which is
1055 // illegal for DMSwarm "view" vectors. Instead, load into a temporary vector.
1056 ierr = VecCreate(PETSC_COMM_SELF, &temp_vec); CHKERRQ(ierr);
1057 ierr = VecLoad(temp_vec,viewer);CHKERRQ(ierr);
1058 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
1059
1060 // Sanity check: ensure the file size matches the expected vector size.
1061 ierr = VecGetSize(field_vec, &expectedSize);CHKERRQ(ierr);
1062 ierr = VecGetSize(temp_vec, &loadedSize);CHKERRQ(ierr);
1063 if (loadedSize != expectedSize) {
1064 SETERRQ(comm,PETSC_ERR_FILE_UNEXPECTED,
1065 "File %s holds %d entries – expected %d for field '%s'",
1066 filename, loadedSize, expectedSize, field_name);
1067 }
1068
1069 // Now, safely copy the data from the temporary vector to the final destination.
1070 ierr = VecCopy(temp_vec, field_vec);CHKERRQ(ierr);
1071
1072 // Clean up the temporary vector.
1073 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
1074
1075 // ---- END MODIFICATION ----
1076
1077 /* create EMPTY sequential Vec – VecLoad() will size it correctly */
1078 /*
1079 ierr = VecCreate(PETSC_COMM_SELF,&seq_vec);CHKERRQ(ierr);
1080 ierr = VecSetType(seq_vec,VECSEQ);CHKERRQ(ierr);
1081
1082 ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,filename,
1083 FILE_MODE_READ,&viewer);CHKERRQ(ierr);
1084
1085 ierr = VecLoad(field_vec,viewer);CHKERRQ(ierr);
1086 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
1087 */
1089 "Loaded <%s> (serial path)\n",filename);
1090
1092 PetscFunctionReturn(0);
1093 }
1094
1095 /* ======================================================================
1096 * 2. PARALLEL JOB
1097 * ==================================================================== */
1098 PetscInt globalSize;
1099 ierr = VecGetSize(field_vec,&globalSize);CHKERRQ(ierr);
1100
1101 DM dm = NULL;
1102 const char *dmtype = NULL;
1103 Vec nat = NULL; /* Natural-ordered vector for DMDA */
1104
1105 /* -------------------- rank-0 : read the sequential file -------------- */
1106 Vec seq_vec = NULL; /* only valid on rank-0 */
1107 const PetscScalar *seqArray = NULL; /* borrowed pointer on rank-0 only */
1108
1109 if(rank==0)
1110 {
1111 PetscViewer viewer;
1112 PetscBool found;
1113
1114 ierr = PetscTestFile(filename,'r',&found);CHKERRQ(ierr);
1115 if(!found) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_OPEN,
1116 "Restart file not found: %s",filename);
1117
1118 /* create EMPTY sequential Vec – VecLoad() will size it correctly */
1119 ierr = VecCreate(PETSC_COMM_SELF,&seq_vec);CHKERRQ(ierr);
1120 ierr = VecSetType(seq_vec,VECSEQ);CHKERRQ(ierr);
1121
1122 ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,filename,
1123 FILE_MODE_READ,&viewer);CHKERRQ(ierr);
1124 ierr = VecLoad(seq_vec,viewer);CHKERRQ(ierr);
1125 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
1126
1127 /* size sanity-check */
1128 PetscInt loaded;
1129 ierr = VecGetSize(seq_vec,&loaded);CHKERRQ(ierr);
1130 if(loaded != globalSize)
1131 SETERRQ(comm,PETSC_ERR_FILE_UNEXPECTED,
1132 "File %s holds %d entries – expected %d",
1133 filename,loaded,globalSize);
1134
1135 /* borrow array for later Bcast */
1136 ierr = VecGetArrayRead(seq_vec,&seqArray);CHKERRQ(ierr);
1137
1139 "Rank 0 successfully loaded <%s>\n",filename);
1140 }
1141
1142 /* -------------------- Check if this is a DMDA vector ----------------- */
1143 ierr = VecGetDM(field_vec, &dm); CHKERRQ(ierr);
1144 if (dm) { ierr = DMGetType(dm, &dmtype); CHKERRQ(ierr); }
1145
1146 if (dmtype && !strcmp(dmtype, DMDA)) {
1147 /* ==================================================================
1148 * DMDA PATH: File is in natural ordering, need to convert to global
1149 * ================================================================== */
1150
1151 /* Create natural vector */
1152 ierr = DMDACreateNaturalVector(dm, &nat); CHKERRQ(ierr);
1153
1154 /* Scatter from rank 0's seq_vec to all ranks' natural vector */
1155 VecScatter scatter;
1156 Vec nat_seq = NULL; /* Sequential natural vector on rank 0 */
1157
1158 ierr = VecScatterCreateToZero(nat, &scatter, &nat_seq); CHKERRQ(ierr);
1159
1160 /* Reverse scatter: from rank 0 to all ranks */
1161 ierr = VecScatterBegin(scatter, (rank == 0 ? seq_vec : nat_seq), nat,
1162 INSERT_VALUES, SCATTER_REVERSE); CHKERRQ(ierr);
1163 ierr = VecScatterEnd(scatter, (rank == 0 ? seq_vec : nat_seq), nat,
1164 INSERT_VALUES, SCATTER_REVERSE); CHKERRQ(ierr);
1165
1166 /* Convert natural → global ordering */
1167 ierr = DMDANaturalToGlobalBegin(dm, nat, INSERT_VALUES, field_vec); CHKERRQ(ierr);
1168 ierr = DMDANaturalToGlobalEnd(dm, nat, INSERT_VALUES, field_vec); CHKERRQ(ierr);
1169
1170 /* Cleanup */
1171 ierr = VecScatterDestroy(&scatter); CHKERRQ(ierr);
1172 ierr = VecDestroy(&nat_seq); CHKERRQ(ierr);
1173 ierr = VecDestroy(&nat); CHKERRQ(ierr);
1174
1175 } else {
1176 /* ==================================================================
1177 * NON-DMDA PATH: Use broadcast and direct copy (assumes global ordering)
1178 * ================================================================== */
1179
1180 PetscScalar *buffer = NULL;
1181 if (rank == 0) {
1182 buffer = (PetscScalar *)seqArray;
1183 } else {
1184 ierr = PetscMalloc1(globalSize, &buffer); CHKERRQ(ierr);
1185 }
1186
1187 ierr = MPI_Bcast(buffer, (int)globalSize, MPIU_SCALAR, 0, comm); CHKERRQ(ierr);
1188
1189 /* Copy slice based on ownership range */
1190 PetscInt rstart, rend, loc;
1191 PetscScalar *locArray;
1192
1193 ierr = VecGetOwnershipRange(field_vec, &rstart, &rend); CHKERRQ(ierr);
1194 loc = rend - rstart;
1195
1196 ierr = VecGetArray(field_vec, &locArray); CHKERRQ(ierr);
1197 ierr = PetscMemcpy(locArray, buffer + rstart, loc * sizeof(PetscScalar)); CHKERRQ(ierr);
1198 ierr = VecRestoreArray(field_vec, &locArray); CHKERRQ(ierr);
1199
1200 if (rank != 0) {
1201 ierr = PetscFree(buffer); CHKERRQ(ierr);
1202 }
1203 }
1204
1205 /* -------------------- tidy up ---------------------------------------- */
1206 if (rank == 0) {
1207 ierr = VecRestoreArrayRead(seq_vec, &seqArray); CHKERRQ(ierr);
1208 ierr = VecDestroy(&seq_vec); CHKERRQ(ierr);
1209 }
1210
1212 "Loaded <%s> (parallel path)\n",filename);
1213
1215 PetscFunctionReturn(0);
1216}
1217
1218
1219/**
1220 * @brief Reads simulation fields from files into their respective PETSc vectors.
1221 *
1222 * This function reads contravariant velocity, Cartesian velocity, pressure, and node state
1223 * fields from their respective binary files. It also conditionally reads LES, RANS, and
1224 * statistical fields if they are enabled.
1225 *
1226 * @param[in,out] user Pointer to the UserCtx structure containing simulation context.
1227 * @param[in] ti Time index for constructing the file name.
1228 *
1229 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1230 */
1231PetscErrorCode ReadSimulationFields(UserCtx *user,PetscInt ti)
1232{
1233 PetscErrorCode ierr;
1234
1235 SimCtx *simCtx = user->simCtx;
1236 const char *source_path = NULL;
1237
1238 if(simCtx->exec_mode == EXEC_MODE_POSTPROCESSOR){
1239 source_path = simCtx->pps->source_dir;
1240 } else if(simCtx->exec_mode == EXEC_MODE_SOLVER){
1241 source_path = simCtx->restart_dir;
1242 } else{
1243 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Invalid execution mode for reading simulation fields.");
1244 }
1245
1246 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to read simulation fields.\n");
1247
1248 // Set the current I/O directory context
1249 ierr = PetscSNPrintf(simCtx->_io_context_buffer, sizeof(simCtx->_io_context_buffer), "%s/%s",source_path,simCtx->euler_subdir); CHKERRQ(ierr);
1250 simCtx->current_io_directory = simCtx->_io_context_buffer;
1251
1252 // Read Cartesian velocity field
1253 ierr = ReadFieldData(user, "ufield", user->Ucat, ti, "dat"); CHKERRQ(ierr);
1254
1255 // Read contravariant velocity field
1256 ierr = ReadFieldData(user, "vfield", user->Ucont, ti, "dat"); CHKERRQ(ierr);
1257
1258 // Read pressure field
1259 ierr = ReadFieldData(user, "pfield", user->P, ti, "dat"); CHKERRQ(ierr);
1260
1261 // Read node state field (nvert)
1262 ierr = ReadFieldData(user, "nvfield", user->Nvert, ti, "dat"); CHKERRQ(ierr);
1263
1264 LOG_ALLOW(GLOBAL,LOG_INFO,"Successfully read all mandatory fields. \n");
1265
1266 if(simCtx->np>0){
1267 // Read Particle Count field
1268 if(!user->ParticleCount){
1269 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "ParticleCount Vec is NULL but np>0");
1270 }
1271 ierr = ReadOptionalField(user, "ParticleCount", "Particle Count", user->ParticleCount, ti, "dat"); CHKERRQ(ierr);
1272
1273 ierr = ReadOptionalField(user, "psifield", "Scalar Psi Field", user->Psi, ti, "dat"); CHKERRQ(ierr);
1274 }
1275 else{
1276 LOG_ALLOW(GLOBAL, LOG_INFO, "No particles in simulation, skipping Particle fields reading.\n");
1277 }
1278 // Process LES fields if enabled
1279 if (simCtx->les) {
1280 ierr = ReadLESFields(user,ti); CHKERRQ(ierr);
1281 }
1282
1283 // Process RANS fields if enabled
1284 if (simCtx->rans) {
1285 ierr = ReadRANSFields(user,ti); CHKERRQ(ierr);
1286 }
1287
1288 // Process statistical fields if averaging is enabled
1289 if (simCtx->averaging) {
1290 ierr = ReadStatisticalFields(user,ti); CHKERRQ(ierr);
1291 }
1292
1293 simCtx->current_io_directory = NULL; // Clear the I/O context after reading
1294
1295 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading simulation fields.\n");
1296
1297 return 0;
1298}
1299
1300/**
1301 * @brief Reads statistical fields for averaging purposes.
1302 *
1303 * This function reads data for fields such as Ucat_sum, Ucat_cross_sum, Ucat_square_sum,
1304 * and P_sum, used for statistical analysis during simulation.
1305 *
1306 * @param[in,out] user Pointer to the UserCtx structure containing simulation context.
1307 * @param[in] ti Time index for constructing the file name.
1308 *
1309 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1310 */
1311PetscErrorCode ReadStatisticalFields(UserCtx *user,PetscInt ti)
1312{
1313 PetscErrorCode ierr;
1314
1315 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to read statistical fields.\n");
1316
1317 ierr = ReadOptionalField(user, "su0", "Velocity Sum", user->Ucat_sum, ti, "dat"); CHKERRQ(ierr);
1318 ierr = ReadOptionalField(user, "su1", "Velocity Cross Sum", user->Ucat_cross_sum, ti, "dat"); CHKERRQ(ierr);
1319 ierr = ReadOptionalField(user, "su2", "Velocity Square Sum",user->Ucat_square_sum, ti, "dat"); CHKERRQ(ierr);
1320 ierr = ReadOptionalField(user, "sp", "Pressure Sum", user->P_sum, ti, "dat"); CHKERRQ(ierr);
1321
1322 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading statistical fields.\n");
1323
1324 return 0;
1325}
1326
1327/**
1328 * @brief Reads LES-related fields.
1329 *
1330 * This function reads LES-related fields such as Cs (Smagorinsky constant)
1331 * into their respective PETSc vectors.
1332 *
1333 * @param[in,out] user Pointer to the UserCtx structure containing simulation context.
1334 * @param[in] ti Time index for constructing the file name.
1335 *
1336 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1337 */
1338PetscErrorCode ReadLESFields(UserCtx *user,PetscInt ti)
1339{
1340 PetscErrorCode ierr;
1341
1342 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to read LES fields.\n");
1343
1344 ierr = ReadOptionalField(user, "Nu_t", "Turbulent Viscosity", user->Nu_t, ti, "dat"); CHKERRQ(ierr);
1345 ierr = ReadOptionalField(user, "cs", "Smagorinsky Constant (Cs)", user->CS, ti, "dat"); CHKERRQ(ierr);
1346
1347 DMGlobalToLocalBegin(user->da, user->CS, INSERT_VALUES, user->lCs);
1348 DMGlobalToLocalEnd(user->da, user->CS, INSERT_VALUES, user->lCs);
1349
1350 DMGlobalToLocalBegin(user->da, user->Nu_t, INSERT_VALUES, user->lNu_t);
1351 DMGlobalToLocalEnd(user->da, user->Nu_t, INSERT_VALUES, user->lNu_t);
1352
1353 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading LES fields.\n");
1354
1355 return 0;
1356}
1357
1358/**
1359 * @brief Reads RANS-related fields.
1360 *
1361 * This function reads RANS-related fields such as K_Omega into their respective
1362 * PETSc vectors.
1363 *
1364 * @param[in,out] user Pointer to the UserCtx structure containing simulation context.
1365 * @param[in] ti Time index for constructing the file name.
1366 *
1367 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1368 */
1369PetscErrorCode ReadRANSFields(UserCtx *user,PetscInt ti)
1370{
1371 PetscErrorCode ierr;
1372
1373 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to read RANS fields.\n");
1374
1375 ierr = ReadOptionalField(user, "kfield", "K-Omega RANS", user->K_Omega, ti, "dat"); CHKERRQ(ierr);
1376 ierr = ReadOptionalField(user, "Nu_t", "Turbulent Viscosity", user->Nu_t, ti, "dat"); CHKERRQ(ierr);
1377
1378 VecCopy(user->K_Omega, user->K_Omega_o);
1379
1380 DMGlobalToLocalBegin(user->fda2, user->K_Omega, INSERT_VALUES, user->lK_Omega);
1381 DMGlobalToLocalEnd(user->fda2, user->K_Omega, INSERT_VALUES, user->lK_Omega);
1382
1383 DMGlobalToLocalBegin(user->da, user->Nu_t, INSERT_VALUES, user->lNu_t);
1384 DMGlobalToLocalEnd(user->da, user->Nu_t, INSERT_VALUES, user->lNu_t);
1385
1386 DMGlobalToLocalBegin(user->fda2, user->K_Omega_o, INSERT_VALUES, user->lK_Omega_o);
1387 DMGlobalToLocalEnd(user->fda2, user->K_Omega_o, INSERT_VALUES, user->lK_Omega_o);
1388
1389 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading RANS fields.\n");
1390
1391 return 0;
1392}
1393
1394
1395/**
1396 * @brief Reads data from a file into a specified field of a PETSc DMSwarm.
1397 *
1398 * This function is the counterpart to WriteSwarmField(). It creates a global PETSc vector
1399 * that references the specified DMSwarm field, uses ReadFieldData() to read the data from
1400 * a file, and then destroys the global vector reference.
1401 *
1402 * @param[in] user Pointer to the UserCtx structure (containing `user->swarm`).
1403 * @param[in] field_name Name of the DMSwarm field to read into (must be previously declared/allocated).
1404 * @param[in] ti Time index used to construct the input file name.
1405 * @param[in] ext File extension (e.g., "dat" or "bin").
1406 *
1407 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1408 *
1409 * @note Compatible with PETSc 3.14.x.
1410 */
1411PetscErrorCode ReadSwarmField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
1412{
1413 PetscErrorCode ierr;
1414 DM swarm;
1415 Vec fieldVec;
1416
1417 PetscFunctionBegin;
1418
1419 swarm = user->swarm;
1420
1421 LOG_ALLOW(GLOBAL,LOG_DEBUG," ReadSwarmField Begins \n");
1422
1423 /* 2) Create a global vector that references the specified Swarm field. */
1424 ierr = DMSwarmCreateGlobalVectorFromField(swarm, field_name, &fieldVec);CHKERRQ(ierr);
1425
1426 LOG_ALLOW(GLOBAL,LOG_DEBUG," Vector created from Field \n");
1427
1428 /* 3) Use the ReadFieldData() function to read data into fieldVec. */
1429 ierr = ReadFieldData(user, field_name, fieldVec, ti, ext);CHKERRQ(ierr);
1430
1431 /* 4) Destroy the global vector reference. */
1432 ierr = DMSwarmDestroyGlobalVectorFromField(swarm, field_name, &fieldVec);CHKERRQ(ierr);
1433
1434 PetscFunctionReturn(0);
1435}
1436
1437/**
1438 * @brief Reads integer swarm data by using ReadFieldData and casting the result.
1439 *
1440 * This function is the counterpart to WriteSwarmIntField. It reads a file
1441 * containing floating-point data (that was originally integer) into a temporary
1442 * Vec and then casts it back to the integer swarm field. It works by:
1443 * 1. Creating a temporary parallel Vec.
1444 * 2. Calling the standard ReadFieldData() to populate this Vec.
1445 * 3. Accessing the local data of both the Vec and the swarm field.
1446 * 4. Populating the swarm's integer field by casting each PetscScalar back to a PetscInt.
1447 * 5. Destroying the temporary Vec.
1448 *
1449 * @param[in] user Pointer to the UserCtx structure.
1450 * @param[in] field_name Name of the integer Swarm field to be read.
1451 * @param[in] ti Time index for the input file.
1452 * @param[in] ext File extension.
1453 *
1454 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1455 */
1456PetscErrorCode ReadSwarmIntField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
1457{
1458 PetscErrorCode ierr;
1459 DM swarm = user->swarm;
1460 Vec temp_vec;
1461 PetscInt nlocal, nglobal, bs, i;
1462 const PetscScalar *scalar_array; // Read-only pointer from the temp Vec
1463 void *field_array_void;
1464
1465
1466 PetscFunctionBeginUser;
1467
1468 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Reading '%s' via temporary Vec.\n", field_name);
1469
1470 // Get the properties of the swarm field to determine the expected layout
1471 ierr = DMSwarmGetLocalSize(swarm, &nlocal); CHKERRQ(ierr);
1472 ierr = DMSwarmGetSize(swarm, &nglobal); CHKERRQ(ierr);
1473 // We get the block size but not the data pointer yet
1474 ierr = DMSwarmGetField(swarm, field_name, &bs, NULL, NULL); CHKERRQ(ierr);
1475 ierr = DMSwarmRestoreField(swarm, field_name, &bs, NULL, NULL); CHKERRQ(ierr);
1476
1477 // Create a temporary Vec with the CORRECT layout to receive the data
1478 ierr = VecCreate(PETSC_COMM_WORLD, &temp_vec); CHKERRQ(ierr);
1479 ierr = VecSetType(temp_vec, VECMPI); CHKERRQ(ierr);
1480 ierr = VecSetSizes(temp_vec, nlocal * bs, nglobal * bs); CHKERRQ(ierr);
1481 ierr = VecSetBlockSize(temp_vec, bs); CHKERRQ(ierr);
1482 ierr = VecSetUp(temp_vec); CHKERRQ(ierr);
1483
1484 // Call your existing reader to populate the temporary Vec
1485 ierr = ReadFieldData(user, field_name, temp_vec, ti, ext); CHKERRQ(ierr);
1486
1487 // Get local pointers
1488 ierr = VecGetArrayRead(temp_vec, &scalar_array); CHKERRQ(ierr);
1489 ierr = DMSwarmGetField(swarm, field_name, NULL, NULL, &field_array_void); CHKERRQ(ierr);
1490
1491 // Perform the cast back, using the correct loop size (nlocal * bs)
1492 if (strcmp(field_name, "DMSwarm_pid") == 0) {
1493 PetscInt64 *int64_array = (PetscInt64 *)field_array_void;
1494 for (i = 0; i < nlocal * bs; i++) {
1495 int64_array[i] = (PetscInt64)scalar_array[i];
1496 }
1497 } else {
1498 PetscInt *int_array = (PetscInt *)field_array_void;
1499 for (i = 0; i < nlocal * bs; i++) {
1500 int_array[i] = (PetscInt)scalar_array[i];
1501 }
1502 }
1503
1504 // Restore access
1505 ierr = DMSwarmRestoreField(swarm, field_name, NULL, NULL, &field_array_void); CHKERRQ(ierr);
1506 ierr = VecRestoreArrayRead(temp_vec, &scalar_array); CHKERRQ(ierr);
1507
1508 // 6. Clean up
1509 ierr = VecDestroy(&temp_vec); CHKERRQ(ierr);
1510
1511 PetscFunctionReturn(0);
1512}
1513
1514/**
1515 * @brief Reads multiple fields into a DMSwarm, gracefully handling missing optional files.
1516 *
1517 * This function reads all necessary and optional fields for a DMSwarm for a given
1518 * timestep. It assumes the swarm has already been resized to match the particle
1519 * count in the input files.
1520 *
1521 * The 'position' field is considered MANDATORY. If its file is missing or corrupt,
1522 * the function will return a fatal error.
1523 *
1524 * All other fields (velocity, CellID, weight, etc.) are OPTIONAL. If their
1525 * corresponding files are not found, a warning is logged, and the function
1526 * continues without error. If an optional file IS found but is corrupt or has a
1527 * size mismatch, it is treated as a fatal error.
1528 *
1529 * @param[in,out] user Pointer to the UserCtx structure containing the DMSwarm (user->swarm).
1530 * @param[in] ti Time index for constructing the file names.
1531 *
1532 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1533 */
1534PetscErrorCode ReadAllSwarmFields(UserCtx *user, PetscInt ti)
1535{
1536 PetscErrorCode ierr;
1537 PetscInt nGlobal;
1538 SimCtx *simCtx = user->simCtx;
1539 const char *source_path = NULL;
1540
1541 PetscFunctionBeginUser;
1542 ierr = DMSwarmGetSize(user->swarm, &nGlobal); CHKERRQ(ierr);
1543 LOG_ALLOW(GLOBAL, LOG_INFO, "Reading DMSwarm fields for timestep %d (swarm size is %d).\n", ti, nGlobal);
1544
1545 if (nGlobal == 0) {
1546 LOG_ALLOW(GLOBAL, LOG_INFO, "Swarm is empty for timestep %d. Nothing to read.\n", ti);
1547 PetscFunctionReturn(0);
1548 }
1549
1550 // First, determine the top-level source directory based on the execution mode.
1551 if (simCtx->exec_mode == EXEC_MODE_SOLVER) {
1552 source_path = simCtx->restart_dir;
1553 } else if (simCtx->exec_mode == EXEC_MODE_POSTPROCESSOR) {
1554 source_path = simCtx->pps->source_dir;
1555 } else {
1556 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONGSTATE, "Invalid execution mode for reading simulation fields.");
1557 }
1558
1559 // Set the current I/O directory context
1560 ierr = PetscSNPrintf(simCtx->_io_context_buffer, sizeof(simCtx->_io_context_buffer),
1561 "%s/%s", source_path, simCtx->particle_subdir); CHKERRQ(ierr);
1562
1563 simCtx->current_io_directory = simCtx->_io_context_buffer;
1564
1565 /* 1) Read positions (REQUIRED) */
1566 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Reading mandatory position field...\n");
1567 ierr = ReadSwarmField(user, "position", ti, "dat");
1568 if (ierr) {
1569 SETERRQ(PETSC_COMM_WORLD, ierr, "Failed to read MANDATORY 'position' field for step %d. Cannot continue.", ti);
1570 }
1571 LOG_ALLOW(GLOBAL, LOG_INFO, "Successfully read mandatory position field for step %d.\n", ti);
1572
1573 /* 2) Read all OPTIONAL fields using the helper function. */
1574 /* The helper will print a warning and continue if a file is not found. */
1575 ierr = ReadOptionalSwarmField(user, "velocity", "Velocity", ti, "dat"); CHKERRQ(ierr);
1576 ierr = ReadOptionalSwarmField(user, "DMSwarm_pid", "Particle ID", ti, "dat"); CHKERRQ(ierr);
1577 ierr = ReadOptionalSwarmField(user, "DMSwarm_CellID", "Cell ID", ti, "dat"); CHKERRQ(ierr);
1578 ierr = ReadOptionalSwarmField(user, "weight", "Particle Weight", ti, "dat"); CHKERRQ(ierr);
1579 ierr = ReadOptionalSwarmField(user, "Psi", "Scalar Psi", ti, "dat"); CHKERRQ(ierr);
1580 ierr = ReadOptionalSwarmField(user, "DMSwarm_location_status", "Migration Status", ti, "dat"); CHKERRQ(ierr);
1581
1582 simCtx->current_io_directory = NULL; // Clear the I/O context after reading
1583
1584 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading DMSwarm fields for timestep %d.\n", ti);
1585 PetscFunctionReturn(0);
1586}
1587
1588
1589#undef __FUNCT__
1590#define __FUNCT__ "WriteFieldData"
1591 /**
1592 * @brief Writes data from a specific PETSc vector to a single, sequential file.
1593 *
1594 * This function is now parallel-safe.
1595 * - In PARALLEL: All processes send their local data to Rank 0. Rank 0 assembles
1596 * the data into a temporary sequential vector and writes it to a single file.
1597 * - In SERIAL: It performs a direct, simple write.
1598 *
1599 * This ensures the output file is always in a simple, portable format.
1600 *
1601 * @param[in] user Pointer to the UserCtx structure.
1602 * @param[in] field_name Name of the field (e.g., "position").
1603 * @param[in] field_vec The parallel PETSc vector containing the data to write.
1604 * @param[in] ti Time index for constructing the file name.
1605 * @param[in] ext File extension (e.g., "dat").
1606 *
1607 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1608 */
1609/**
1610 * @brief Dump a **distributed** PETSc Vec to the *single* sequential file
1611 * format used by our restart / post-processing tools.
1612 *
1613 * The companion of ReadFieldData(): it always produces **one** file
1614 * (e.g. `results/ufield00006_0.dat`) regardless of how many MPI ranks
1615 * are running.
1616 *
1617 * **Behaviour**
1618 *
1619 * | # MPI ranks | Strategy |
1620 * |-------------|---------------------------------------------------------------------------|
1621 * | 1 | Direct `VecView()` into the file. |
1622 * | > 1 | `VecScatterCreateToZero()` gathers the distributed Vec onto **rank 0**. |
1623 * | | Rank 0 writes the sequential Vec; all other ranks allocate no storage. |
1624 *
1625 * The routine never alters or destroys the parallel Vec passed in; the
1626 * gather buffer is created and freed internally.
1627 *
1628 * @param[in] user Simulation context (used only for logging).
1629 * @param[in] field_name Logical field name (forms part of the filename).
1630 * @param[in] field_vec Distributed PETSc Vec to write.
1631 * @param[in] ti Timestep index used in the filename.
1632 * @param[in] ext File extension (`"dat"` in our workflow).
1633 *
1634 * @return `0` on success or a PETSc error code.
1635 */
1636PetscErrorCode WriteFieldData(UserCtx *user,
1637 const char *field_name,
1638 Vec field_vec,
1639 PetscInt ti,
1640 const char *ext)
1641{
1642 PetscErrorCode ierr;
1643 MPI_Comm comm;
1644 PetscMPIInt rank, size;
1645
1646 const PetscInt placeholder_int = 0; /* keep legacy name */
1647 char filename[PETSC_MAX_PATH_LEN];
1648 SimCtx *simCtx=user->simCtx;
1649
1650 PetscFunctionBeginUser;
1652
1653 if(!simCtx->output_dir){
1654 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "Output directory was not set before calling WriteFieldData().");
1655 }
1656
1657 /* ------------------------------------------------------------ */
1658 /* Basic communicator information */
1659 /* ------------------------------------------------------------ */
1660 ierr = PetscObjectGetComm((PetscObject)field_vec,&comm);CHKERRQ(ierr);
1661 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1662 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1663
1664 ierr = PetscSNPrintf(filename,sizeof(filename),
1665 "%s/%s%05" PetscInt_FMT "_%d.%s",
1666 simCtx->current_io_directory,field_name,ti,placeholder_int,ext);CHKERRQ(ierr);
1667
1669 " Preparing to write <%s> on rank %d/%d\n",
1670 filename,rank,size);
1671
1672 /* ------------------------------------------------------------ */
1673 /* 1. Serial path */
1674 /* ------------------------------------------------------------ */
1675 if (size == 1) {
1676 PetscViewer viewer;
1677
1678 ierr = PetscViewerBinaryOpen(comm,filename,
1679 FILE_MODE_WRITE,&viewer);CHKERRQ(ierr);
1680 ierr = VecView(field_vec,viewer);CHKERRQ(ierr);
1681 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
1682
1684 " Wrote <%s> (serial path)\n",filename);
1685 PetscFunctionReturn(0);
1686 }
1687
1688 /* ------------------------------------------------------------ */
1689 /* 2. Parallel path */
1690 /* ------------------------------------------------------------ */
1691 VecScatter scatter;
1692 Vec seq_vec=NULL; /* created by PETSc, lives only on rank 0 */
1693 DM dm = NULL;
1694 const char *dmtype = NULL;
1695 Vec nat = NULL; /* Natural-ordered vector for DMDA */
1696
1697 /* 2.1 Check if vector has DMDA and convert to natural ordering if needed */
1698 ierr = VecGetDM(field_vec, &dm); CHKERRQ(ierr);
1699 if (dm) { ierr = DMGetType(dm, &dmtype); CHKERRQ(ierr); }
1700
1701 if (dmtype && !strcmp(dmtype, DMDA)) {
1702 /* DMDA path: convert to natural ordering first */
1703 ierr = DMDACreateNaturalVector(dm, &nat); CHKERRQ(ierr);
1704 ierr = DMDAGlobalToNaturalBegin(dm, field_vec, INSERT_VALUES, nat); CHKERRQ(ierr);
1705 ierr = DMDAGlobalToNaturalEnd(dm, field_vec, INSERT_VALUES, nat); CHKERRQ(ierr);
1706
1707 /* Gather the natural-ordered vector */
1708 ierr = VecScatterCreateToZero(nat, &scatter, &seq_vec); CHKERRQ(ierr);
1709 } else {
1710 /* Non-DMDA path: direct gather in global ordering */
1711 ierr = VecScatterCreateToZero(field_vec, &scatter, &seq_vec); CHKERRQ(ierr);
1712 }
1713
1714 /* 2.2 Gather distributed → sequential (on rank 0) */
1715 ierr = VecScatterBegin(scatter, (nat ? nat : field_vec), seq_vec,
1716 INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
1717 ierr = VecScatterEnd(scatter, (nat ? nat : field_vec), seq_vec,
1718 INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
1719
1720 /* 2.3 Rank 0 writes the file */
1721 if (rank == 0) {
1722 PetscViewer viewer;
1723
1724 /* (optional) value diagnostics */
1725 PetscReal vmin,vmax;
1726 ierr = VecMin(seq_vec,NULL,&vmin);CHKERRQ(ierr);
1727 ierr = VecMax(seq_vec,NULL,&vmax);CHKERRQ(ierr);
1729 " <%s> range = [%.4e … %.4e]\n",
1730 field_name,(double)vmin,(double)vmax);
1731
1732 ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,filename,
1733 FILE_MODE_WRITE,&viewer);CHKERRQ(ierr);
1734 ierr = VecView(seq_vec,viewer);CHKERRQ(ierr);
1735 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
1736
1738 " Wrote <%s> (parallel path)\n",filename);
1739 }
1740
1741 /* 2.4 Cleanup */
1742 ierr = VecScatterDestroy(&scatter);CHKERRQ(ierr);
1743 ierr = VecDestroy(&seq_vec);CHKERRQ(ierr);
1744 if (nat) { ierr = VecDestroy(&nat); CHKERRQ(ierr); }
1745
1747 PetscFunctionReturn(0);
1748}
1749
1750/**
1751 * @brief Writes simulation fields to files.
1752 *
1753 * This function writes contravariant velocity, Cartesian velocity, pressure, and node state
1754 * fields to their respective binary files. It also conditionally writes LES, RANS, and
1755 * statistical fields if they are enabled.
1756 *
1757 * @param[in] user Pointer to the UserCtx structure containing simulation context.
1758 *
1759 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1760 */
1761PetscErrorCode WriteSimulationFields(UserCtx *user)
1762{
1763 PetscErrorCode ierr;
1764
1765 SimCtx *simCtx = user->simCtx;
1766
1767 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to write simulation fields.\n");
1768
1769 // Set the current IO directory
1770 ierr = PetscSNPrintf(simCtx->_io_context_buffer, sizeof(simCtx->_io_context_buffer),
1771 "%s/%s", simCtx->output_dir, simCtx->euler_subdir);CHKERRQ(ierr);
1772 simCtx->current_io_directory = simCtx->_io_context_buffer;
1773
1774 // Write contravariant velocity field
1775 ierr = WriteFieldData(user, "vfield", user->Ucont, simCtx->step, "dat"); CHKERRQ(ierr);
1776
1777 // Write Cartesian velocity field
1778 ierr = WriteFieldData(user, "ufield", user->Ucat, simCtx->step, "dat"); CHKERRQ(ierr);
1779
1780 // Write pressure field
1781 ierr = WriteFieldData(user, "pfield", user->P, simCtx->step, "dat"); CHKERRQ(ierr);
1782
1783 // Write node state field (nvert)
1784 ierr = WriteFieldData(user, "nvfield", user->Nvert, simCtx->step, "dat"); CHKERRQ(ierr);
1785
1786 // Write ParticleCountPerCell if enabled.
1787 if(simCtx->np>0){
1788 ierr = WriteFieldData(user, "ParticleCount",user->ParticleCount,simCtx->step,"dat"); CHKERRQ(ierr);
1789 ierr = WriteFieldData(user, "psifield", user->Psi, simCtx->step, "dat"); CHKERRQ(ierr);
1790 }
1791
1792 // Write LES fields if enabled
1793 if (simCtx->les) {
1794 ierr = WriteLESFields(user); CHKERRQ(ierr);
1795 }
1796
1797 // Write RANS fields if enabled
1798 if (simCtx->rans) {
1799 ierr = WriteRANSFields(user); CHKERRQ(ierr);
1800 }
1801
1802 // Write statistical fields if averaging is enabled
1803 if (simCtx->averaging) {
1804 ierr = WriteStatisticalFields(user); CHKERRQ(ierr);
1805 }
1806
1807 simCtx->current_io_directory = NULL;
1808
1809 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished writing simulation fields.\n");
1810
1811 return 0;
1812}
1813
1814/**
1815 * @brief Writes statistical fields for averaging purposes.
1816 *
1817 * This function writes data for fields such as Ucat_sum, Ucat_cross_sum, Ucat_square_sum,
1818 * and P_sum to their respective binary files.
1819 *
1820 * @param[in] user Pointer to the UserCtx structure containing simulation context.
1821 *
1822 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1823 */
1824PetscErrorCode WriteStatisticalFields(UserCtx *user)
1825{
1826 PetscErrorCode ierr;
1827
1828 SimCtx *simCtx = user->simCtx;
1829
1830 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to write statistical fields.\n");
1831
1832 ierr = WriteFieldData(user, "su0", user->Ucat_sum, simCtx->step, "dat"); CHKERRQ(ierr);
1833 ierr = WriteFieldData(user, "su1", user->Ucat_cross_sum, simCtx->step, "dat"); CHKERRQ(ierr);
1834 ierr = WriteFieldData(user, "su2", user->Ucat_square_sum, simCtx->step, "dat"); CHKERRQ(ierr);
1835 ierr = WriteFieldData(user, "sp", user->P_sum, simCtx->step, "dat"); CHKERRQ(ierr);
1836
1837 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished writing statistical fields.\n");
1838
1839 return 0;
1840}
1841
1842/**
1843 * @brief Writes LES-related fields.
1844 *
1845 * This function writes LES-related fields such as Cs (Smagorinsky constant)
1846 * to their respective binary files.
1847 *
1848 * @param[in] user Pointer to the UserCtx structure containing simulation context.
1849 *
1850 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1851 */
1852PetscErrorCode WriteLESFields(UserCtx *user)
1853{
1854 PetscErrorCode ierr;
1855
1856 SimCtx *simCtx = user->simCtx;
1857
1858 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to write LES fields.\n");
1859
1860
1861 DMLocalToGlobalBegin(user->da, user->lCs, INSERT_VALUES, user->CS);
1862 DMLocalToGlobalEnd(user->da, user->lCs, INSERT_VALUES, user->CS);
1863
1864 DMLocalToGlobalBegin(user->da, user->lNu_t, INSERT_VALUES, user->Nu_t);
1865 DMLocalToGlobalEnd(user->da, user->lNu_t, INSERT_VALUES, user->Nu_t);
1866
1867 ierr = WriteFieldData(user, "Nu_t", user->Nu_t, simCtx->step, "dat"); CHKERRQ(ierr);
1868 ierr = WriteFieldData(user, "cs", user->CS, simCtx->step, "dat"); CHKERRQ(ierr);
1869
1870
1871 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished writing LES fields.\n");
1872
1873 return 0;
1874}
1875
1876/**
1877 * @brief Writes RANS-related fields.
1878 *
1879 * This function writes RANS-related fields such as K_Omega to their respective
1880 * binary files.
1881 *
1882 * @param[in] user Pointer to the UserCtx structure containing simulation context.
1883 *
1884 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1885 */
1886PetscErrorCode WriteRANSFields(UserCtx *user)
1887{
1888 PetscErrorCode ierr;
1889
1890 SimCtx *simCtx = user->simCtx;
1891
1892 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to write RANS fields.\n");
1893
1894 ierr = WriteFieldData(user, "kfield", user->K_Omega, simCtx->step, "dat"); CHKERRQ(ierr);
1895
1896 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished writing RANS fields.\n");
1897
1898 return 0;
1899}
1900
1901/**
1902 * @brief Writes data from a specific field in a PETSc Swarm to a file.
1903 *
1904 * This function retrieves the Swarm from the UserCtx (i.e., `user->swarm`) and
1905 * creates a global PETSc vector from the specified Swarm field. It then calls
1906 * the existing WriteFieldData() function to handle the actual I/O operation.
1907 * After writing the data, the function destroys the temporary global vector
1908 * to avoid memory leaks.
1909 *
1910 * @param[in] user Pointer to the UserCtx structure containing simulation context
1911 * and the PetscSwarm (as `user->swarm`).
1912 * @param[in] field_name Name of the Swarm field to be written (e.g., "my_field").
1913 * @param[in] ti Time index used to construct the output file name.
1914 * @param[in] ext File extension (e.g., "dat", "bin").
1915 *
1916 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1917 *
1918 * @note Compatible with PETSc 3.14.4 and newer.
1919 */
1920PetscErrorCode WriteSwarmField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
1921{
1922 PetscErrorCode ierr;
1923 Vec fieldVec;
1924 DM swarm;
1925
1926 PetscFunctionBeginUser; /* PETSc macro indicating start of function */
1927
1928 /*
1929 * 1) Retrieve the PetscSwarm from the user context.
1930 * Ensure user->swarm is initialized and not NULL.
1931 */
1932 swarm = user->swarm;
1933
1934 /*
1935 * 2) Create a global vector from the specified swarm field.
1936 * This function is available in PETSc 3.14.4.
1937 * It provides a read/write "view" of the swarm field as a global Vec.
1938 */
1940 "Attempting to create global vector from field: %s\n",
1941 field_name);
1942 ierr = DMSwarmCreateGlobalVectorFromField(swarm, field_name, &fieldVec);CHKERRQ(ierr);
1943
1944 /*
1945 * 3) Use your existing WriteFieldData() to write the global vector to a file.
1946 * The field name, time index, and extension are passed along for naming.
1947 */
1949 "Calling WriteFieldData for field: %s\n",
1950 field_name);
1951 ierr = WriteFieldData(user, field_name, fieldVec, ti, ext);CHKERRQ(ierr);
1952
1953 /*
1954 * 4) Destroy the global vector once the data is successfully written.
1955 * This step is crucial for avoiding memory leaks.
1956 * DMSwarmDestroyGlobalVectorFromField() is also available in PETSc 3.14.4.
1957 */
1959 "Destroying the global vector for field: %s\n",
1960 field_name);
1961 ierr = DMSwarmDestroyGlobalVectorFromField(swarm, field_name, &fieldVec);CHKERRQ(ierr);
1962
1963 /* Log and return success. */
1965 "Successfully wrote swarm data for field: %s\n",
1966 field_name);
1967
1968 PetscFunctionReturn(0); /* PETSc macro indicating end of function */
1969}
1970
1971/**
1972 * @brief Writes integer swarm data by casting it to a temporary Vec and using WriteFieldData.
1973 *
1974 * This function provides a bridge to write integer-based swarm fields (like DMSwarm_CellID)
1975 * using the existing Vec-based I/O routine (WriteFieldData). It works by:
1976 * 1. Creating a temporary parallel Vec with the same layout as other swarm fields.
1977 * 2. Accessing the local integer data from the swarm field.
1978 * 3. Populating the temporary Vec by casting each integer to a PetscScalar.
1979 * 4. Calling the standard WriteFieldData() function with the temporary Vec.
1980 * 5. Destroying the temporary Vec.
1981 *
1982 * @param[in] user Pointer to the UserCtx structure.
1983 * @param[in] field_name Name of the integer Swarm field to be written.
1984 * @param[in] ti Time index for the output file.
1985 * @param[in] ext File extension.
1986 *
1987 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1988 */
1989PetscErrorCode WriteSwarmIntField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
1990{
1991 PetscErrorCode ierr;
1992 DM swarm = user->swarm;
1993 Vec temp_vec; // Temporary Vec to hold casted data
1994 PetscInt nlocal, nglobal,bs,i;
1995 void *field_array_void;
1996 PetscScalar *scalar_array; // Pointer to the temporary Vec's scalar data
1997
1998 PetscFunctionBeginUser;
1999
2000 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Casting '%s' to Vec for writing.\n", field_name);
2001
2002 // Get the swarm field properties
2003 ierr = DMSwarmGetLocalSize(swarm, &nlocal); CHKERRQ(ierr);
2004 ierr = DMSwarmGetSize(swarm, &nglobal); CHKERRQ(ierr);
2005 ierr = DMSwarmGetField(swarm, field_name, &bs, NULL, &field_array_void); CHKERRQ(ierr);
2006
2007 // Create Temporary parallel Vec wit the CORRECT layout
2008 ierr = VecCreate(PETSC_COMM_WORLD, &temp_vec); CHKERRQ(ierr);
2009 ierr = VecSetType(temp_vec, VECMPI); CHKERRQ(ierr);
2010 ierr = VecSetSizes(temp_vec, nlocal*bs, nglobal*bs); CHKERRQ(ierr);
2011 ierr = VecSetUp(temp_vec); CHKERRQ(ierr);
2012
2013 // Defining Vector field to mandatory field 'position'
2014 DMSwarmVectorDefineField(swarm,"position");
2015
2016 ierr = VecGetArray(temp_vec, &scalar_array); CHKERRQ(ierr);
2017
2018 if(strcasecmp(field_name,"DMSwarm_pid") == 0){
2019 PetscInt64 *int64_array = (PetscInt64 *)field_array_void;
2020 // Perform the cast from PetscInt64 to PetscScalar
2021 for (i = 0; i < nlocal*bs; i++) {
2022 scalar_array[i] = (PetscScalar)int64_array[i];
2023 }
2024 }else{
2025 PetscInt *int_array = (PetscInt *)field_array_void;
2026 //Perform the cast from PetscInt to PetscScalar
2027 for (i = 0; i < nlocal*bs; i++) {
2028 scalar_array[i] = (PetscScalar)int_array[i];
2029 }
2030 }
2031
2032 // Restore access to both arrays
2033 ierr = VecRestoreArray(temp_vec, &scalar_array); CHKERRQ(ierr);
2034 ierr = DMSwarmRestoreField(swarm, field_name, &bs, NULL, &field_array_void); CHKERRQ(ierr);
2035
2036 // Call your existing writer with the temporary, populated Vec
2037 ierr = WriteFieldData(user, field_name, temp_vec, ti, ext); CHKERRQ(ierr);
2038
2039 // Clean up
2040 ierr = VecDestroy(&temp_vec); CHKERRQ(ierr);
2041
2042 PetscFunctionReturn(0);
2043}
2044
2045/**
2046 * @brief Writes a predefined set of PETSc Swarm fields to files.
2047 *
2048 * This function iterates through a hardcoded list of common swarm fields
2049 * (position, velocity, etc.) and calls the WriteSwarmField() helper function
2050 * for each one. This provides a straightforward way to output essential particle
2051 * data at a given simulation step.
2052 *
2053 * This function will only execute if particles are enabled in the simulation
2054 * (i.e., `user->simCtx->np > 0` and `user->swarm` is not NULL).
2055 *
2056 * @param[in] user Pointer to the UserCtx structure containing the simulation context
2057 * and the PetscSwarm.
2058 *
2059 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
2060 */
2061PetscErrorCode WriteAllSwarmFields(UserCtx *user)
2062{
2063 PetscErrorCode ierr;
2064 SimCtx *simCtx = user->simCtx;
2065
2066 PetscFunctionBeginUser;
2067
2068 // If no swarm is configured or there are no particles, do nothing and return.
2069 if (!user->swarm || simCtx->np <= 0) {
2070 PetscFunctionReturn(0);
2071 }
2072
2073 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to write swarm fields.\n");
2074
2075 // Ensure the current IO directory is set
2076 ierr = PetscSNPrintf(simCtx->_io_context_buffer, sizeof(simCtx->_io_context_buffer),
2077 "%s/%s", simCtx->output_dir, simCtx->particle_subdir);CHKERRQ(ierr);
2078 simCtx->current_io_directory = simCtx->_io_context_buffer;
2079
2080 // Write particle position field
2081 ierr = WriteSwarmField(user, "position", simCtx->step, "dat"); CHKERRQ(ierr);
2082
2083 // Write particle velocity field
2084 ierr = WriteSwarmField(user, "velocity", simCtx->step, "dat"); CHKERRQ(ierr);
2085
2086 // Write particle weight field
2087 ierr = WriteSwarmField(user, "weight", simCtx->step, "dat"); CHKERRQ(ierr);
2088
2089 // Write custom particle field "Psi"
2090 ierr = WriteSwarmField(user, "Psi", simCtx->step, "dat"); CHKERRQ(ierr);
2091
2092 // Integer fields require special handling
2093
2094 // Write the background mesh cell ID for each particle
2095 ierr = WriteSwarmIntField(user, "DMSwarm_CellID", simCtx->step, "dat"); CHKERRQ(ierr);
2096
2097 // Write the particle location status (e.g., inside or outside the domain)
2098 ierr = WriteSwarmIntField(user, "DMSwarm_location_status", simCtx->step, "dat"); CHKERRQ(ierr);
2099
2100 // Write the unique particle ID
2101 ierr = WriteSwarmIntField(user, "DMSwarm_pid", simCtx->step, "dat"); CHKERRQ(ierr);
2102
2103 simCtx->current_io_directory = NULL;
2104
2105 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished writing swarm fields.\n");
2106
2107 PetscFunctionReturn(0);
2108}
2109
2110/**
2111 * @brief Gather a (possibly distributed) PETSc Vec onto rank 0 as a contiguous C array.
2112 * If the Vec has a DMDA attached, the gather is performed in DMDA "natural" ordering.
2113 *
2114 * @param[in] inVec The PETSc Vec (may be distributed).
2115 * @param[out] N Global length of the Vec (includes dof).
2116 * @param[out] arrayOut On rank 0, newly allocated buffer with the gathered values (PetscScalar-sized).
2117 * On other ranks, set to NULL.
2118 */
2119PetscErrorCode VecToArrayOnRank0(Vec inVec, PetscInt *N, double **arrayOut)
2120{
2121 PetscErrorCode ierr;
2122 MPI_Comm comm;
2123 PetscMPIInt rank;
2124 PetscInt globalSize;
2125 DM dm = NULL;
2126 const char *dmtype = NULL;
2127
2128 /* For DMDA path */
2129 Vec nat = NULL, seqNat = NULL;
2130 VecScatter scatNat = NULL;
2131 const PetscScalar *nar = NULL;
2132 PetscScalar *buf = NULL;
2133
2134 /* For generic (no DM) path */
2135 Vec seq = NULL;
2136 VecScatter scat = NULL;
2137 const PetscScalar *sar = NULL;
2138
2139 PetscFunctionBeginUser;
2140
2141 ierr = PetscObjectGetComm((PetscObject)inVec, &comm); CHKERRQ(ierr);
2142 ierr = MPI_Comm_rank(comm, &rank); CHKERRQ(ierr);
2143 ierr = VecGetSize(inVec, &globalSize); CHKERRQ(ierr);
2144 *N = globalSize;
2145 *arrayOut = NULL;
2146
2147 ierr = VecGetDM(inVec, &dm); CHKERRQ(ierr);
2148 if (dm) { ierr = DMGetType(dm, &dmtype); CHKERRQ(ierr); }
2149
2150 if (dmtype && !strcmp(dmtype, DMDA)) {
2151 /* --- DMDA path: go to NATURAL ordering, then gather to rank 0 --- */
2152 ierr = DMDACreateNaturalVector(dm, &nat); CHKERRQ(ierr);
2153 ierr = DMDAGlobalToNaturalBegin(dm, inVec, INSERT_VALUES, nat); CHKERRQ(ierr);
2154 ierr = DMDAGlobalToNaturalEnd (dm, inVec, INSERT_VALUES, nat); CHKERRQ(ierr);
2155
2156 ierr = VecScatterCreateToZero(nat, &scatNat, &seqNat); CHKERRQ(ierr);
2157 ierr = VecScatterBegin(scatNat, nat, seqNat, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
2158 ierr = VecScatterEnd (scatNat, nat, seqNat, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
2159
2160 if (rank == 0) {
2161 PetscInt nseq;
2162 ierr = VecGetLocalSize(seqNat, &nseq); CHKERRQ(ierr);
2163 ierr = VecGetArrayRead(seqNat, &nar); CHKERRQ(ierr);
2164
2165 ierr = PetscMalloc1(nseq, &buf); CHKERRQ(ierr);
2166 ierr = PetscMemcpy(buf, nar, (size_t)nseq * sizeof(PetscScalar)); CHKERRQ(ierr);
2167
2168 ierr = VecRestoreArrayRead(seqNat, &nar); CHKERRQ(ierr);
2169 *arrayOut = (double*)buf; /* hand back as double* for drop-in compatibility */
2170 }
2171
2172 ierr = VecScatterDestroy(&scatNat); CHKERRQ(ierr);
2173 ierr = VecDestroy(&seqNat); CHKERRQ(ierr);
2174 ierr = VecDestroy(&nat); CHKERRQ(ierr);
2175 } else {
2176 /* --- No DM attached: plain gather in Vec’s global (parallel) layout order --- */
2177 ierr = VecScatterCreateToZero(inVec, &scat, &seq); CHKERRQ(ierr);
2178 ierr = VecScatterBegin(scat, inVec, seq, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
2179 ierr = VecScatterEnd (scat, inVec, seq, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
2180
2181 if (rank == 0) {
2182 PetscInt nseq;
2183 ierr = VecGetLocalSize(seq, &nseq); CHKERRQ(ierr);
2184 ierr = VecGetArrayRead(seq, &sar); CHKERRQ(ierr);
2185
2186 ierr = PetscMalloc1(nseq, &buf); CHKERRQ(ierr);
2187 ierr = PetscMemcpy(buf, sar, (size_t)nseq * sizeof(PetscScalar)); CHKERRQ(ierr);
2188
2189 ierr = VecRestoreArrayRead(seq, &sar); CHKERRQ(ierr);
2190 *arrayOut = (double*)buf;
2191 }
2192
2193 ierr = VecScatterDestroy(&scat); CHKERRQ(ierr);
2194 ierr = VecDestroy(&seq); CHKERRQ(ierr);
2195 }
2196
2197 PetscFunctionReturn(0);
2198}
2199
2200/**
2201 * @brief Gathers any DMSwarm field from all ranks to a single, contiguous array on rank 0.
2202 *
2203 * This is a generic, type-aware version of SwarmFieldToArrayOnRank0.
2204 * It is a COLLECTIVE operation.
2205 *
2206 * @param[in] swarm The DMSwarm to gather from.
2207 * @param[in] field_name The name of the field to gather.
2208 * @param[out] n_total_particles (Output on rank 0) Total number of particles in the global swarm.
2209 * @param[out] n_components (Output on rank 0) Number of components for the field.
2210 * @param[out] gathered_array (Output on rank 0) A newly allocated array containing the full, gathered data.
2211 * The caller is responsible for freeing this memory and for casting it to the correct type.
2212 * @return PetscErrorCode
2213 */
2214PetscErrorCode SwarmFieldToArrayOnRank0(DM swarm, const char *field_name, PetscInt *n_total_particles, PetscInt *n_components, void **gathered_array)
2215{
2216 PetscErrorCode ierr;
2217 PetscMPIInt rank, size;
2218 PetscInt nlocal, nglobal, bs;
2219 void *local_array_void;
2220 size_t element_size = 0;
2221 MPI_Datatype mpi_type = MPI_BYTE; // We'll send raw bytes
2222
2223 PetscFunctionBeginUser;
2224
2225 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
2226 ierr = MPI_Comm_size(PETSC_COMM_WORLD, &size); CHKERRQ(ierr);
2227
2228 // All ranks get swarm properties to determine send/receive counts
2229 ierr = DMSwarmGetLocalSize(swarm, &nlocal); CHKERRQ(ierr);
2230 ierr = DMSwarmGetSize(swarm, &nglobal); CHKERRQ(ierr);
2231 ierr = DMSwarmGetField(swarm, field_name, &bs, NULL, &local_array_void); CHKERRQ(ierr);
2232
2233 // Determine the size of one element of the field's data type
2234 if (strcasecmp(field_name, "DMSwarm_pid") == 0) {
2235 element_size = sizeof(PetscInt64);
2236 } else if (strcasecmp(field_name, "DMSwarm_CellID") == 0 || strcasecmp(field_name, "DMSwarm_location_status") == 0) {
2237 element_size = sizeof(PetscInt);
2238 } else {
2239 element_size = sizeof(PetscScalar);
2240 }
2241
2242 if (rank == 0) {
2243 *n_total_particles = nglobal;
2244 *n_components = bs;
2245 *gathered_array = NULL; // Initialize output
2246 }
2247
2248 if (size == 1) { // Serial case is a simple copy
2249 if (rank == 0) {
2250 ierr = PetscMalloc(nglobal * bs * element_size, gathered_array); CHKERRQ(ierr);
2251 ierr = PetscMemcpy(*gathered_array, local_array_void, nglobal * bs * element_size); CHKERRQ(ierr);
2252 }
2253 } else { // Parallel case: use MPI_Gatherv
2254 PetscInt *recvcounts = NULL, *displs = NULL;
2255 if (rank == 0) {
2256 ierr = PetscMalloc1(size, &recvcounts); CHKERRQ(ierr);
2257 ierr = PetscMalloc1(size, &displs); CHKERRQ(ierr);
2258 }
2259 PetscInt sendcount = nlocal * bs;
2260
2261 // Gather the number of elements (not bytes) from each rank
2262 ierr = MPI_Gather(&sendcount, 1, MPIU_INT, recvcounts, 1, MPIU_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
2263
2264 if (rank == 0) {
2265 displs[0] = 0;
2266 // Convert counts and calculate displacements in terms of BYTES
2267 for (PetscMPIInt i = 0; i < size; i++) recvcounts[i] *= element_size;
2268 for (PetscMPIInt i = 1; i < size; i++) displs[i] = displs[i-1] + recvcounts[i-1];
2269
2270 ierr = PetscMalloc(nglobal * bs * element_size, gathered_array); CHKERRQ(ierr);
2271 }
2272
2273 // Use Gatherv with MPI_BYTE to handle any data type generically
2274 ierr = MPI_Gatherv(local_array_void, nlocal * bs * element_size, MPI_BYTE,
2275 *gathered_array, recvcounts, displs, MPI_BYTE,
2276 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
2277
2278 if (rank == 0) {
2279 ierr = PetscFree(recvcounts); CHKERRQ(ierr);
2280 ierr = PetscFree(displs); CHKERRQ(ierr);
2281 }
2282 }
2283
2284 ierr = DMSwarmRestoreField(swarm, field_name, &bs, NULL, &local_array_void); CHKERRQ(ierr);
2285
2286 PetscFunctionReturn(0);
2287}
2288
2289/**
2290 * @brief Displays a structured banner summarizing the simulation configuration.
2291 *
2292 * This function prints key simulation parameters to standard output.
2293 * It is intended to be called ONLY by MPI rank 0.
2294 * It retrieves global domain bounds from `user->global_domain_bbox` and boundary
2295 * conditions for all faces from `user->face_bc_types`.
2296 *
2297 * @param[in] user Pointer to `UserCtx` structure.
2298 * @param[in] StartTime Initial simulation time.
2299 * @param[in] StartStep Starting timestep index.
2300 * @param[in] StepsToRun Total number of timesteps to run.
2301 * @param[in] num_mpi_procs Total number of MPI processes.
2302 * @param[in] total_num_particles Total number of particles.
2303 * @param[in] bboxlist (If rank 0 needed to compute global_domain_bbox here, otherwise NULL)
2304 *
2305 * @return PetscErrorCode Returns `0` on success.
2306 */
2307PetscErrorCode DisplayBanner(SimCtx *simCtx) // bboxlist is only valid on rank 0
2308{
2309 PetscErrorCode ierr;
2310 PetscMPIInt rank;
2311 Cmpnts global_min_coords, global_max_coords;
2312 PetscReal StartTime;
2313 PetscInt StartStep,StepsToRun,total_num_particles;
2314 PetscMPIInt num_mpi_procs;
2315
2316 // SimCtx *simCtx = user->simCtx;
2317 UserCtx *user = simCtx->usermg.mgctx[simCtx->usermg.mglevels - 1].user;
2318 num_mpi_procs = simCtx->size;
2319 StartTime = simCtx->StartTime;
2320 StartStep = simCtx->StartStep;
2321 StepsToRun = simCtx->StepsToRun;
2322 total_num_particles = simCtx->np;
2323 BoundingBox *bboxlist_on_rank0 = simCtx->bboxlist;
2324
2325
2326 PetscFunctionBeginUser;
2327
2328 if (!user) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "DisplayBanner - UserCtx pointer is NULL.");
2329 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
2330
2331 if (rank == 0) {
2332 // If global_domain_bbox is not pre-populated in UserCtx, compute it here from bboxlist_on_rank0
2333 // This assumes bboxlist_on_rank0 is valid and contains all local bounding boxes on rank 0.
2334 if (bboxlist_on_rank0 && num_mpi_procs > 0) {
2335 global_min_coords = bboxlist_on_rank0[0].min_coords;
2336 global_max_coords = bboxlist_on_rank0[0].max_coords;
2337 for (PetscMPIInt p = 1; p < num_mpi_procs; ++p) {
2338 global_min_coords.x = PetscMin(global_min_coords.x, bboxlist_on_rank0[p].min_coords.x);
2339 global_min_coords.y = PetscMin(global_min_coords.y, bboxlist_on_rank0[p].min_coords.y);
2340 global_min_coords.z = PetscMin(global_min_coords.z, bboxlist_on_rank0[p].min_coords.z);
2341 global_max_coords.x = PetscMax(global_max_coords.x, bboxlist_on_rank0[p].max_coords.x);
2342 global_max_coords.y = PetscMax(global_max_coords.y, bboxlist_on_rank0[p].max_coords.y);
2343 global_max_coords.z = PetscMax(global_max_coords.z, bboxlist_on_rank0[p].max_coords.z);
2344 }
2345 // Optionally store this in user->global_domain_bbox if it's useful elsewhere
2346 // user->global_domain_bbox.min_coords = global_min_coords;
2347 // user->global_domain_bbox.max_coords = global_max_coords;
2348 } else {
2349 // Fallback or warning if bboxlist is not available for global calculation
2350 LOG_ALLOW(LOCAL, LOG_WARNING, "(Rank 0) - bboxlist not provided or num_mpi_procs <=0; using user->bbox for domain bounds.\n");
2351 // global_min_coords = user->bbox.min_coords; // Use local bbox of rank 0 as fallback
2352 // global_max_coords = user->bbox.max_coords;
2353 }
2354
2355 ierr = PetscPrintf(PETSC_COMM_SELF, "\n"); CHKERRQ(ierr);
2356 ierr = PetscPrintf(PETSC_COMM_SELF, "=============================================================\n"); CHKERRQ(ierr);
2357 ierr = PetscPrintf(PETSC_COMM_SELF, " CASE SUMMARY \n"); CHKERRQ(ierr);
2358 ierr = PetscPrintf(PETSC_COMM_SELF, "=============================================================\n"); CHKERRQ(ierr);
2359 ierr = PetscPrintf(PETSC_COMM_SELF, " Grid Points : %d X %d X %d\n", user->IM, user->JM, user->KM); CHKERRQ(ierr);
2360 ierr = PetscPrintf(PETSC_COMM_SELF, " Cells : %d X %d X %d\n", user->IM - 1, user->JM - 1, user->KM - 1); CHKERRQ(ierr);
2361 ierr = PetscPrintf(PETSC_COMM_SELF, " Global Domain Bounds (X) : %.6f to %.6f\n", (double)global_min_coords.x, (double)global_max_coords.x); CHKERRQ(ierr);
2362 ierr = PetscPrintf(PETSC_COMM_SELF, " Global Domain Bounds (Y) : %.6f to %.6f\n", (double)global_min_coords.y, (double)global_max_coords.y); CHKERRQ(ierr);
2363 ierr = PetscPrintf(PETSC_COMM_SELF, " Global Domain Bounds (Z) : %.6f to %.6f\n", (double)global_min_coords.z, (double)global_max_coords.z); CHKERRQ(ierr);
2364 if(strcmp(simCtx->eulerianSource,"load")==0 || strcmp(simCtx->eulerianSource,"solve")==0){
2365 ierr = PetscPrintf(PETSC_COMM_SELF, "-------------------- Boundary Conditions --------------------\n"); CHKERRQ(ierr);
2366 const int face_name_width = 17; // Adjusted for longer names (Zeta,Eta,Xi)
2367 for (PetscInt i_face = 0; i_face < 6; ++i_face) {
2368 BCFace current_face = (BCFace)i_face;
2369 // The BCFaceToString will now return the Xi, Eta, Zeta versions
2370 const char* face_str = BCFaceToString(current_face);
2371 const char* bc_type_str = BCTypeToString(user->boundary_faces[current_face].mathematical_type);
2372 const char* bc_handler_type_str = BCHandlerTypeToString(user->boundary_faces[current_face].handler_type);
2373 if(user->boundary_faces[current_face].mathematical_type == INLET){
2375 Cmpnts inlet_velocity = {0.0,0.0,0.0};
2376 PetscBool found;
2377 ierr = GetBCParamReal(user->boundary_faces[current_face].params,"vx",&inlet_velocity.x,&found); CHKERRQ(ierr);
2378 ierr = GetBCParamReal(user->boundary_faces[current_face].params,"vy",&inlet_velocity.y,&found); CHKERRQ(ierr);
2379 ierr = GetBCParamReal(user->boundary_faces[current_face].params,"vz",&inlet_velocity.z,&found); CHKERRQ(ierr);
2380 ierr = PetscPrintf(PETSC_COMM_SELF, " Face %-*s : %s - %s - [%.4f,%.4f,%.4f]\n",
2381 face_name_width, face_str, bc_type_str, bc_handler_type_str,inlet_velocity.x,inlet_velocity.y,inlet_velocity.z); CHKERRQ(ierr);
2382 } else if(user->boundary_faces[current_face].handler_type == BC_HANDLER_INLET_PARABOLIC){
2383 PetscReal v_max = 0.0;
2384 PetscBool found;
2385 ierr = GetBCParamReal(user->boundary_faces[current_face].params,"v_max",&v_max,&found); CHKERRQ(ierr);
2386 ierr = PetscPrintf(PETSC_COMM_SELF, " Face %-*s : %s - %s - v_max=%.4f\n",
2387 face_name_width, face_str, bc_type_str, bc_handler_type_str, v_max); CHKERRQ(ierr);
2388 }
2390 PetscReal flux;
2391 PetscBool trimflag,foundflux,foundtrimflag;
2392 ierr = GetBCParamReal(user->boundary_faces[current_face].params,"target_flux",&flux,&foundflux); CHKERRQ(ierr);
2393 ierr = GetBCParamBool(user->boundary_faces[current_face].params,"apply_trim",&trimflag,&foundtrimflag); CHKERRQ(ierr);
2394 ierr = PetscPrintf(PETSC_COMM_SELF, " Face %-*s : %s - %s - [%.4f] - %s\n",
2395 face_name_width, face_str, bc_type_str, bc_handler_type_str,flux,trimflag?"Trim Ucont":"No Trim"); CHKERRQ(ierr);
2396 } else{
2397 ierr = PetscPrintf(PETSC_COMM_SELF, " Face %-*s : %s - %s\n",
2398 face_name_width, face_str, bc_type_str,bc_handler_type_str); CHKERRQ(ierr);
2399 }
2400 }
2401 }
2402 ierr = PetscPrintf(PETSC_COMM_SELF, "-------------------------------------------------------------\n"); CHKERRQ(ierr);
2403 ierr = PetscPrintf(PETSC_COMM_SELF, " Start Time : %.4f\n", (double)StartTime); CHKERRQ(ierr);
2404 ierr = PetscPrintf(PETSC_COMM_SELF, " Timestep Size : %.4f\n", (double)simCtx->dt); CHKERRQ(ierr);
2405 ierr = PetscPrintf(PETSC_COMM_SELF, " Starting Step : %d\n", StartStep); CHKERRQ(ierr);
2406 ierr = PetscPrintf(PETSC_COMM_SELF, " Total Steps to Run : %d\n", StepsToRun); CHKERRQ(ierr);
2407 ierr = PetscPrintf(PETSC_COMM_SELF, " Number of MPI Processes : %d\n", num_mpi_procs); CHKERRQ(ierr);
2408 ierr = PetscPrintf(PETSC_COMM_WORLD," Number of Particles : %d\n", total_num_particles); CHKERRQ(ierr);
2409 if(strcmp(simCtx->eulerianSource,"solve")==0 || strcmp(simCtx->eulerianSource,"load")==0){
2410 const char* field_init_str = FieldInitializationToString(simCtx->FieldInitialization);
2411 const char* particle_init_str = ParticleInitializationToString(simCtx->ParticleInitialization);
2412 ierr = PetscPrintf(PETSC_COMM_WORLD," Reynolds Number : %le\n", simCtx->ren); CHKERRQ(ierr);
2413 //ierr = PetscPrintf(PETSC_COMM_WORLD," Von-Neumann Number : %le\n", simCtx->vnn); CHKERRQ(ierr);
2414 ierr = PetscPrintf(PETSC_COMM_SELF, " Particle Initialization Mode: %s\n", particle_init_str); CHKERRQ(ierr);
2415 if(strcmp(simCtx->eulerianSource,"solve")==0){
2416 //ierr = PetscPrintf(PETSC_COMM_WORLD," Stanton Number : %le\n", simCtx->st); CHKERRQ(ierr);
2417 ierr = PetscPrintf(PETSC_COMM_WORLD," Momentum Equation Solver : %s\n", MomentumSolverTypeToString(simCtx->mom_solver_type)); CHKERRQ(ierr);
2418 ierr = PetscPrintf(PETSC_COMM_WORLD," Initial Pseudo-CFL : %le\n", simCtx->pseudo_cfl); CHKERRQ(ierr);
2419 ierr = PetscPrintf(PETSC_COMM_WORLD," Large Eddy Simulation Model : %s\n", LESModelToString(simCtx->les)); CHKERRQ(ierr);
2420 }
2423 if (user->inletFaceDefined) {
2424 ierr = PetscPrintf(PETSC_COMM_SELF, " Particles Initialized At : %s (Enum Val: %d)\n", BCFaceToString(user->identifiedInletBCFace), user->identifiedInletBCFace); CHKERRQ(ierr);
2425 } else {
2426 ierr = PetscPrintf(PETSC_COMM_SELF, " Particles Initialized At : --- (No INLET face identified)\n"); CHKERRQ(ierr);
2427 }
2428 }
2429
2430 ierr = PetscPrintf(PETSC_COMM_SELF, " Field Initialization Mode : %s\n", field_init_str); CHKERRQ(ierr);
2431 if (simCtx->FieldInitialization == 1) {
2432 ierr = PetscPrintf(PETSC_COMM_SELF, " Constant Velocity : x - %.4f, y - %.4f, z - %.4f \n", (double)simCtx->InitialConstantContra.x,(double)simCtx->InitialConstantContra.y,(double)simCtx->InitialConstantContra.z ); CHKERRQ(ierr);
2433 }
2434 } else if(strcmp(simCtx->eulerianSource,"analytical")==0){
2435 ierr = PetscPrintf(PETSC_COMM_WORLD," Analytical Solution Type : %s\n", simCtx->AnalyticalSolutionType); CHKERRQ(ierr);
2436 }
2437 ierr = PetscPrintf(PETSC_COMM_SELF, "=============================================================\n"); CHKERRQ(ierr);
2438 ierr = PetscPrintf(PETSC_COMM_SELF, "\n"); CHKERRQ(ierr);
2439 }
2440 PetscFunctionReturn(0);
2441}
2442
2443#undef __FUNCT__
2444#define __FUNCT__ "ParsePostProcessingSettings"
2445/**
2446 * @brief Initializes post-processing settings from a config file and command-line overrides.
2447 *
2448 * This function establishes the configuration for a post-processing run by:
2449 * 1. Setting hardcoded default values in the PostProcessParams struct.
2450 * 2. Reading a configuration file to override the defaults.
2451 * 3. Parsing command-line options (-startTime, -endTime, etc.) which can override
2452 * both the defaults and the file settings.
2453 *
2454 * @param configFile The path to the configuration file to parse.
2455 * @param pps Pointer to the PostProcessParams struct to be populated.
2456 * @return PetscErrorCode
2457 */
2459{
2460 FILE *file;
2461 char line[1024];
2462 PetscBool startTimeSet, endTimeSet, timeStepSet;
2463
2464 PetscFunctionBeginUser;
2466
2467 if (!simCtx || !simCtx->pps) {
2468 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_NULL, "SimCtx or its pps member is NULL in ParsePostProcessingSettings.");
2469 }
2470
2471 char *configFile = simCtx->PostprocessingControlFile;
2472 PostProcessParams *pps = simCtx->pps;
2473
2474
2475 // --- 1. Set Sane Defaults First ---
2476 pps->startTime = 0;
2477 pps->endTime = 0;
2478 pps->timeStep = 1;
2479 pps->outputParticles = PETSC_FALSE;
2480 pps->particle_output_freq = simCtx->LoggingFrequency; // Default to logging frequency;
2481 strcpy(pps->process_pipeline, "");
2482 strcpy(pps->output_fields_instantaneous, "Ucat,P");
2483 strcpy(pps->output_fields_averaged, "");
2484 strcpy(pps->output_prefix, "Field");
2485 strcpy(pps->particle_output_prefix,"Particle");
2486 strcpy(pps->particle_fields,"velocity,CellID,weight,pid");
2487 strcpy(pps->particle_pipeline,"");
2488 strcpy(pps->particleExt,"dat"); // The input file format for particles.
2489 strcpy(pps->eulerianExt,"dat"); // The input file format for Eulerian fields.
2490 pps->reference[0] = pps->reference[1] = pps->reference[2] = 1;
2491 strncpy(pps->source_dir, simCtx->output_dir, sizeof(pps->source_dir) - 1);
2492 pps->source_dir[sizeof(pps->source_dir) - 1] = '\0'; // Ensure null-termination
2493
2494 // --- 2. Parse the Configuration File (overrides defaults) ---
2495 file = fopen(configFile, "r");
2496 if (file) {
2497 LOG_ALLOW(GLOBAL, LOG_INFO, "Parsing post-processing config file: %s\n", configFile);
2498 while (fgets(line, sizeof(line), file)) {
2499 char *key, *value, *comment;
2500 comment = strchr(line, '#'); if (comment) *comment = '\0';
2501 TrimWhitespace(line); if (strlen(line) == 0) continue;
2502 key = strtok(line, "="); value = strtok(NULL, "=");
2503 if (key && value) {
2504 TrimWhitespace(key); TrimWhitespace(value);
2505 if (strcmp(key, "startTime") == 0) pps->startTime = atoi(value);
2506 else if (strcmp(key, "endTime") == 0) pps->endTime = atoi(value);
2507 else if (strcmp(key, "timeStep") == 0) pps->timeStep = atoi(value);
2508 else if (strcmp(key, "output_particles") == 0) {
2509 if (strcasecmp(value, "true") == 0) pps->outputParticles = PETSC_TRUE;
2510 }
2511 else if (strcasecmp(key, "process_pipeline") == 0) {
2512 strncpy(pps->process_pipeline, value, MAX_PIPELINE_LENGTH - 1);
2513 pps->process_pipeline[MAX_PIPELINE_LENGTH - 1] = '\0'; // Ensure null-termination
2514 } else if (strcasecmp(key, "output_fields_instantaneous") == 0) {
2515 strncpy(pps->output_fields_instantaneous, value, MAX_FIELD_LIST_LENGTH - 1);
2517 } else if (strcasecmp(key, "output_prefix") == 0) {
2518 strncpy(pps->output_prefix, value, MAX_FILENAME_LENGTH - 1);
2519 pps->output_prefix[MAX_FILENAME_LENGTH - 1] = '\0';
2520 } else if (strcasecmp(key, "particle_output_prefix") == 0) {
2521 strncpy(pps->particle_output_prefix, value, MAX_FILENAME_LENGTH - 1);
2523 } else if (strcasecmp(key, "particle_fields_instantaneous") == 0) {
2524 strncpy(pps->particle_fields, value, MAX_FIELD_LIST_LENGTH - 1);
2525 pps->particle_fields[MAX_FIELD_LIST_LENGTH - 1] = '\0';
2526 } else if (strcasecmp(key, "particle_pipeline") == 0) {
2527 strncpy(pps->particle_pipeline, value, MAX_PIPELINE_LENGTH - 1);
2528 pps->particle_pipeline[MAX_PIPELINE_LENGTH - 1] = '\0';
2529 } else if (strcasecmp(key, "particle_output_freq") == 0) {
2530 pps->particle_output_freq = atoi(value);
2531 } else if (strcmp(key, "reference_ip") == 0) {pps->reference[0] = atoi(value);
2532 } else if (strcmp(key, "reference_jp") == 0) {pps->reference[1] = atoi(value);
2533 } else if (strcmp(key, "reference_kp") == 0) {pps->reference[2] = atoi(value);
2534 } else if (strcasecmp(key, "source_directory") == 0) {
2535 strncpy(pps->source_dir, value, sizeof(pps->source_dir) - 1);
2536 pps->source_dir[sizeof(pps->source_dir) - 1] = '\0';
2537 } else {
2538 LOG_ALLOW(GLOBAL, LOG_WARNING, "Unknown key '%s' in post-processing config file. Ignoring.\n", key);
2539 }
2540 // Add parsing for pipeline, fields, etc. in later phases
2541 }
2542 }
2543 fclose(file);
2544 } else {
2545 LOG_ALLOW(GLOBAL, LOG_WARNING, "Could not open post-processing config file '%s'. Using defaults and command-line overrides.\n", configFile);
2546 }
2547
2548 // --- 3. Parse Command-Line Options (overrides file settings and defaults) ---
2549 PetscOptionsGetInt(NULL, NULL, "-startTime", &pps->startTime, &startTimeSet);
2550 PetscOptionsGetInt(NULL, NULL, "-endTime", &pps->endTime, &endTimeSet);
2551 PetscOptionsGetInt(NULL, NULL, "-timeStep", &pps->timeStep, &timeStepSet);
2552 PetscOptionsGetBool(NULL, NULL, "-output_particles", &pps->outputParticles, NULL);
2553
2554 if(pps->endTime==-1){
2555 pps->endTime = simCtx->StartStep + simCtx->StepsToRun; // Total steps if endTime is set to -1.
2556 }
2557
2558 // If only startTime is given on command line, run for a single step
2559 if (startTimeSet && !endTimeSet) {
2560 pps->endTime = pps->startTime;
2561 }
2562
2563 LOG_ALLOW(GLOBAL, LOG_INFO, "Post-processing configured to run from t=%d to t=%d with step %d. Particle output: %s.\n",
2564 pps->startTime, pps->endTime, pps->timeStep, pps->outputParticles ? "TRUE" : "FALSE");
2565
2566 LOG_ALLOW(GLOBAL, LOG_INFO, "Process Pipeline: %s\n", pps->process_pipeline);
2567 LOG_ALLOW(GLOBAL, LOG_INFO, "Instantaneous Output Fields: %s\n", pps->output_fields_instantaneous);
2568 LOG_ALLOW(GLOBAL, LOG_INFO, "Output Prefix: %s\n", pps->output_prefix);
2569 LOG_ALLOW(GLOBAL, LOG_INFO, "Particle Output Prefix: %s\n", pps->particle_output_prefix);
2570 LOG_ALLOW(GLOBAL, LOG_INFO, "Particle Fields: %s\n", pps->particle_fields);
2571 LOG_ALLOW(GLOBAL, LOG_INFO, "Particle Pipeline: %s\n", pps->particle_pipeline);
2572 LOG_ALLOW(GLOBAL, LOG_INFO, "Particle Output Frequency: %d\n", pps->particle_output_freq);
2573
2575 PetscFunctionReturn(0);
2576}
2577
2578
2579#undef __FUNCT__
2580#define __FUNCT__ "ParseScalingInformation"
2581/**
2582 * @brief Parses physical scaling parameters from command-line options.
2583 *
2584 * This function reads the reference length, velocity, and density from the
2585 * PETSc options database (provided via -scaling_L_ref, etc.). It populates
2586 * the simCtx->scaling struct and calculates the derived reference pressure.
2587 * It sets default values of 1.0 for a fully non-dimensional case if the
2588 * options are not provided.
2589 *
2590 * @param[in,out] simCtx The simulation context whose 'scaling' member will be populated.
2591 * @return PetscErrorCode
2592 */
2593PetscErrorCode ParseScalingInformation(SimCtx *simCtx)
2594{
2595 PetscErrorCode ierr;
2596 PetscBool flg;
2597
2598 PetscFunctionBeginUser;
2600
2601 if (!simCtx) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "SimCtx is NULL in ParseScalingInformation");
2602
2603 // --- 1. Set default values to 1.0 ---
2604 // This represents a purely non-dimensional run if no scaling is provided.
2605 simCtx->scaling.L_ref = 1.0;
2606 simCtx->scaling.U_ref = 1.0;
2607 simCtx->scaling.rho_ref = 1.0;
2608
2609 // --- 2. Read overrides from the command line / control file ---
2610 ierr = PetscOptionsGetReal(NULL, NULL, "-scaling_L_ref", &simCtx->scaling.L_ref, &flg); CHKERRQ(ierr);
2611 ierr = PetscOptionsGetReal(NULL, NULL, "-scaling_U_ref", &simCtx->scaling.U_ref, &flg); CHKERRQ(ierr);
2612 ierr = PetscOptionsGetReal(NULL, NULL, "-scaling_rho_ref", &simCtx->scaling.rho_ref, &flg); CHKERRQ(ierr);
2613
2614 // --- 3. Calculate derived scaling factors ---
2615 // Check for division by zero to be safe, though U_ref should be positive.
2616 if (simCtx->scaling.U_ref <= 0.0) {
2617 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_OUTOFRANGE, "Reference velocity U_ref must be positive. Got %g", (double)simCtx->scaling.U_ref);
2618 }
2619 simCtx->scaling.P_ref = simCtx->scaling.rho_ref * simCtx->scaling.U_ref * simCtx->scaling.U_ref;
2620
2621 // --- 4. Log the final, effective scales for verification ---
2622 LOG(GLOBAL, LOG_INFO, "---------------- Physical Scales Initialized -----------------\n");
2623 LOG(GLOBAL, LOG_INFO, " L_ref: %.4f, U_ref: %.4f, rho_ref: %.4f, P_ref: %.4f\n",
2624 simCtx->scaling.L_ref, simCtx->scaling.U_ref, simCtx->scaling.rho_ref, simCtx->scaling.P_ref);
2625 LOG(GLOBAL, LOG_INFO, "--------------------------------------------------------------\n");
2626
2628 PetscFunctionReturn(0);
2629}
2630
2631
2632// ---------------------------------------------------------------------
2633// UTILITY FUNCTIONS NOT USED IN THE MAIN CODE ( NOT SUPPORTED ANYMORE,INCLUDED FOR BACKWARD COMPATIBILITY)
2634// ---------------------------------------------------------------------
2635
2636//=====================================================================
2637// 1) ReadDataFileToArray
2638
2639// See the function-level comments in io.h for a summary.
2640// This reads one value per line from an ASCII file. Rank 0 does I/O,
2641// broadcasts the data to all ranks.
2642//===================================================================== */
2643PetscInt ReadDataFileToArray(const char *filename,
2644 double **data_out,
2645 PetscInt *Nout,
2646 MPI_Comm comm)
2647{
2648 /* STEP 0: Prepare local variables & log function entry */
2649 PetscMPIInt rank, size;
2650 PetscErrorCode ierr;
2651 FILE *fp = NULL;
2652 PetscInt N = 0; /* number of lines/values read on rank 0 */
2653 double *array = NULL; /* pointer to local array on each rank */
2654 PetscInt fileExistsFlag = 0; /* 0 = doesn't exist, 1 = does exist */
2655
2657 "Start reading from file: %s\n",
2658 filename);
2659
2660 /* Basic error checking: data_out, Nout must be non-null. */
2661 if (!filename || !data_out || !Nout) {
2663 "Null pointer argument provided.\n");
2664 return 1;
2665 }
2666
2667 /* Determine rank/size for coordinating I/O. */
2668 MPI_Comm_rank(comm, &rank);
2669 MPI_Comm_size(comm, &size);
2670
2671 /* STEP 1: On rank 0, check if file can be opened. */
2672 if (!rank) {
2673 fp = fopen(filename, "r");
2674 if (fp) {
2675 fileExistsFlag = 1;
2676 fclose(fp);
2677 }
2678 }
2679
2680 /* STEP 2: Broadcast file existence to all ranks. */
2681 // In ReadDataFileToArray:
2682 ierr = MPI_Bcast(&fileExistsFlag, 1, MPI_INT, 0, comm); CHKERRQ(ierr);
2683
2684 if (!fileExistsFlag) {
2685 /* If file does not exist, log & return. */
2686 if (!rank) {
2688 "File '%s' not found.\n",
2689 filename);
2690 }
2691 return 2;
2692 }
2693
2694 /* STEP 3: Rank 0 re-opens and reads the file, counting lines, etc. */
2695 if (!rank) {
2696 fp = fopen(filename, "r");
2697 if (!fp) {
2699 "File '%s' could not be opened for reading.\n",
2700 filename);
2701 return 3;
2702 }
2703
2704 /* (3a) Count lines first. */
2705 {
2706 char line[256];
2707 while (fgets(line, sizeof(line), fp)) {
2708 N++;
2709 }
2710 }
2711
2713 "File '%s' has %d lines.\n",
2714 filename, N);
2715
2716 /* (3b) Allocate array on rank 0. */
2717 array = (double*)malloc(N * sizeof(double));
2718 if (!array) {
2719 fclose(fp);
2721 "malloc failed for array.\n");
2722 return 4;
2723 }
2724
2725 /* (3c) Rewind & read values into array. */
2726 rewind(fp);
2727 {
2728 PetscInt i = 0;
2729 char line[256];
2730 while (fgets(line, sizeof(line), fp)) {
2731 double val;
2732 if (sscanf(line, "%lf", &val) == 1) {
2733 array[i++] = val;
2734 }
2735 }
2736 }
2737 fclose(fp);
2738
2740 "Successfully read %d values from '%s'.\n",
2741 N, filename);
2742 }
2743
2744 /* STEP 4: Broadcast the integer N to all ranks. */
2745 ierr = MPI_Bcast(&N, 1, MPI_INT, 0, comm); CHKERRQ(ierr);
2746
2747 /* STEP 5: Each rank allocates an array to receive the broadcast if rank>0. */
2748 if (rank) {
2749 array = (double*)malloc(N * sizeof(double));
2750 if (!array) {
2752 "malloc failed on rank %d.\n",
2753 rank);
2754 return 5;
2755 }
2756 }
2757
2758 /* STEP 6: Broadcast the actual data from rank 0 to all. */
2759 ierr = MPI_Bcast(array, N, MPI_DOUBLE, 0, comm); CHKERRQ(ierr);
2760
2761 /* STEP 7: Assign outputs on all ranks. */
2762 *data_out = array;
2763 *Nout = N;
2764
2766 "Done. Provided array of length=%d to all ranks.\n",
2767 N);
2768 return 0; /* success */
2769}
2770
2771/**
2772 * @brief Reads coordinate data (for particles) from file into a PETSc Vec, then gathers it to rank 0.
2773 *
2774 * This function uses \c ReadFieldData to fill a PETSc Vec with coordinate data,
2775 * then leverages \c VecToArrayOnRank0 to gather that data into a contiguous array
2776 * (valid on rank 0 only).
2777 *
2778 * @param[in] timeIndex The time index used to construct file names.
2779 * @param[in] user Pointer to the user context.
2780 * @param[out] coordsArray On rank 0, will point to a newly allocated array holding the coordinates.
2781 * @param[out] Ncoords On rank 0, the length of \p coordsArray. On other ranks, 0.
2782 *
2783 * @return PetscErrorCode Returns 0 on success, or non-zero on failures.
2784 */
2785PetscErrorCode ReadPositionsFromFile(PetscInt timeIndex,
2786 UserCtx *user,
2787 double **coordsArray,
2788 PetscInt *Ncoords)
2789{
2790 PetscFunctionBeginUser;
2791
2792 PetscErrorCode ierr;
2793 Vec coordsVec;
2794
2795 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Creating coords Vec.\n");
2796 ierr = VecCreate(PETSC_COMM_WORLD, &coordsVec);CHKERRQ(ierr);
2797 ierr = VecSetFromOptions(coordsVec);CHKERRQ(ierr);
2798
2799 // For example: "position" is the name of the coordinate data
2800 ierr = ReadFieldData(user, "position", coordsVec, timeIndex, "dat");
2801 if (ierr) {
2803 "Error reading position data (ti=%d).\n",
2804 timeIndex);
2805 PetscFunctionReturn(ierr);
2806 }
2807
2808 LOG_ALLOW(GLOBAL, LOG_DEBUG, "ReadPositions - Gathering coords Vec to rank 0.\n");
2809 ierr = VecToArrayOnRank0(coordsVec, Ncoords, coordsArray);CHKERRQ(ierr);
2810
2811 ierr = VecDestroy(&coordsVec);CHKERRQ(ierr);
2812
2814 "Successfully gathered coordinates. Ncoords=%d.\n", *Ncoords);
2815 PetscFunctionReturn(0);
2816}
2817
2818
2819/**
2820 * @brief Reads a named field from file into a PETSc Vec, then gathers it to rank 0.
2821 *
2822 * This function wraps \c ReadFieldData and \c VecToArrayOnRank0 into a single step.
2823 * The gathered data is stored in \p scalarArray on rank 0, with its length in \p Nscalars.
2824 *
2825 * @param[in] timeIndex The time index used to construct file names.
2826 * @param[in] fieldName Name of the field to be read (e.g., "velocity").
2827 * @param[in] user Pointer to the user context.
2828 * @param[out] scalarArray On rank 0, a newly allocated array holding the field data.
2829 * @param[out] Nscalars On rank 0, length of \p scalarArray. On other ranks, 0.
2830 *
2831 * @return PetscErrorCode Returns 0 on success, or non-zero on failures.
2832 */
2833PetscErrorCode ReadFieldDataToRank0(PetscInt timeIndex,
2834 const char *fieldName,
2835 UserCtx *user,
2836 double **scalarArray,
2837 PetscInt *Nscalars)
2838{
2839 PetscFunctionBeginUser;
2840
2841 PetscErrorCode ierr;
2842 Vec fieldVec;
2843
2844 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Creating field Vec.\n");
2845 ierr = VecCreate(PETSC_COMM_WORLD, &fieldVec);CHKERRQ(ierr);
2846 ierr = VecSetFromOptions(fieldVec);CHKERRQ(ierr);
2847
2848 ierr = ReadFieldData(user, fieldName, fieldVec, timeIndex, "dat");
2849 if (ierr) {
2851 "Error reading field '%s' (ti=%d).\n",
2852 fieldName, timeIndex);
2853 PetscFunctionReturn(ierr);
2854 }
2855
2856 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Gathering field Vec to rank 0.\n");
2857 ierr = VecToArrayOnRank0(fieldVec, Nscalars, scalarArray);CHKERRQ(ierr);
2858
2859 ierr = VecDestroy(&fieldVec);CHKERRQ(ierr);
2860
2862 "Successfully gathered field '%s'. Nscalars=%d.\n",
2863 fieldName, *Nscalars);
2864 PetscFunctionReturn(0);
2865}
2866
2867
PetscErrorCode ReadStatisticalFields(UserCtx *user, PetscInt ti)
Reads statistical fields for averaging purposes.
Definition io.c:1311
PetscErrorCode ParsePostProcessingSettings(SimCtx *simCtx)
Initializes post-processing settings from a config file and command-line overrides.
Definition io.c:2458
PetscErrorCode WriteAllSwarmFields(UserCtx *user)
Writes a predefined set of PETSc Swarm fields to files.
Definition io.c:2061
PetscErrorCode SwarmFieldToArrayOnRank0(DM swarm, const char *field_name, PetscInt *n_total_particles, PetscInt *n_components, void **gathered_array)
Gathers any DMSwarm field from all ranks to a single, contiguous array on rank 0.
Definition io.c:2214
PetscErrorCode ParseScalingInformation(SimCtx *simCtx)
Parses physical scaling parameters from command-line options.
Definition io.c:2593
PetscInt ReadDataFileToArray(const char *filename, double **data_out, PetscInt *Nout, MPI_Comm comm)
Definition io.c:2643
PetscErrorCode ReadGridFile(UserCtx *user)
Sets grid dimensions from a file for a SINGLE block using a one-time read cache.
Definition io.c:177
PetscErrorCode StringToBCHandlerType(const char *str, BCHandlerType *handler_out)
Converts a string representation of a handler to a BCHandlerType enum.
Definition io.c:314
PetscErrorCode ReadRANSFields(UserCtx *user, PetscInt ti)
Reads RANS-related fields.
Definition io.c:1369
PetscErrorCode GetBCParamReal(BC_Param *params, const char *key, PetscReal *value_out, PetscBool *found)
Searches a BC_Param linked list for a key and returns its value as a double.
Definition io.c:360
PetscErrorCode ReadSwarmIntField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
Reads integer swarm data by using ReadFieldData and casting the result.
Definition io.c:1456
PetscErrorCode WriteSwarmField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
Writes data from a specific field in a PETSc Swarm to a file.
Definition io.c:1920
static PetscInt * g_IMs_from_file
Caches the IM dimensions for all blocks read from the grid file.
Definition io.c:19
PetscErrorCode WriteStatisticalFields(UserCtx *user)
Writes statistical fields for averaging purposes.
Definition io.c:1824
PetscErrorCode ReadSimulationFields(UserCtx *user, PetscInt ti)
Reads simulation fields from files into their respective PETSc vectors.
Definition io.c:1231
static PetscInt g_nblk_from_file
Stores the number of blocks read from the grid file.
Definition io.c:17
PetscErrorCode WriteFieldData(UserCtx *user, const char *field_name, Vec field_vec, PetscInt ti, const char *ext)
Writes data from a specific PETSc vector to a single, sequential file.
Definition io.c:1636
PetscErrorCode ReadLESFields(UserCtx *user, PetscInt ti)
Reads LES-related fields.
Definition io.c:1338
PetscErrorCode ReadFieldData(UserCtx *user, const char *field_name, Vec field_vec, PetscInt ti, const char *ext)
Reads data for a specific field from a file into the provided vector.
Definition io.c:995
PetscErrorCode ParseAllBoundaryConditions(UserCtx *user, const char *bcs_input_filename)
Parses the boundary conditions file to configure the type, handler, and any associated parameters for...
Definition io.c:439
PetscErrorCode ValidateBCHandlerForBCType(BCType type, BCHandlerType handler)
Validates that a specific handler is compatible with a general BC type.
Definition io.c:333
void TrimWhitespace(char *str)
Trims leading and trailing whitespace from a string in-place.
Definition io.c:36
PetscErrorCode WriteSimulationFields(UserCtx *user)
Writes simulation fields to files.
Definition io.c:1761
PetscErrorCode ReadAllSwarmFields(UserCtx *user, PetscInt ti)
Reads multiple fields into a DMSwarm, gracefully handling missing optional files.
Definition io.c:1534
static PetscBool g_file_has_been_read
A flag to ensure the grid file is read only once.
Definition io.c:25
PetscErrorCode ReadGridGenerationInputs(UserCtx *user)
Parses command-line options for a programmatically generated grid for a SINGLE block.
Definition io.c:77
PetscErrorCode ReadSwarmField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
Reads data from a file into a specified field of a PETSc DMSwarm.
Definition io.c:1411
PetscErrorCode WriteRANSFields(UserCtx *user)
Writes RANS-related fields.
Definition io.c:1886
PetscErrorCode DeterminePeriodicity(SimCtx *simCtx)
Scans all block-specific boundary condition files to determine a globally consistent periodicity for ...
Definition io.c:645
PetscErrorCode ReadFieldDataToRank0(PetscInt timeIndex, const char *fieldName, UserCtx *user, double **scalarArray, PetscInt *Nscalars)
Reads a named field from file into a PETSc Vec, then gathers it to rank 0.
Definition io.c:2833
PetscErrorCode VecToArrayOnRank0(Vec inVec, PetscInt *N, double **arrayOut)
Gather a (possibly distributed) PETSc Vec onto rank 0 as a contiguous C array.
Definition io.c:2119
PetscErrorCode WriteSwarmIntField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
Writes integer swarm data by casting it to a temporary Vec and using WriteFieldData.
Definition io.c:1989
PetscErrorCode WriteLESFields(UserCtx *user)
Writes LES-related fields.
Definition io.c:1852
static PetscInt * g_KMs_from_file
Caches the KM dimensions for all blocks read from the grid file.
Definition io.c:23
static PetscInt * g_JMs_from_file
Caches the JM dimensions for all blocks read from the grid file.
Definition io.c:21
PetscErrorCode StringToBCFace(const char *str, BCFace *face_out)
Converts a string representation of a face to a BCFace enum.
Definition io.c:280
PetscErrorCode ReadPositionsFromFile(PetscInt timeIndex, UserCtx *user, double **coordsArray, PetscInt *Ncoords)
Reads coordinate data (for particles) from file into a PETSc Vec, then gathers it to rank 0.
Definition io.c:2785
static PetscErrorCode ReadOptionalField(UserCtx *user, const char *field_name, const char *field_label, Vec field_vec, PetscInt ti, const char *ext)
Checks for and reads an optional Eulerian field from a file into a Vec.
Definition io.c:875
PetscErrorCode VerifyPathExistence(const char *path, PetscBool is_dir, PetscBool is_optional, const char *description, PetscBool *exists)
A parallel-safe helper to verify the existence of a generic file or directory path.
Definition io.c:762
PetscErrorCode GetBCParamBool(BC_Param *params, const char *key, PetscBool *value_out, PetscBool *found)
Searches a BC_Param linked list for a key and returns its value as a bool.
Definition io.c:385
void FreeBC_ParamList(BC_Param *head)
Frees the memory allocated for a linked list of BC_Param structs.
Definition io.c:263
PetscErrorCode StringToBCType(const char *str, BCType *type_out)
Converts a string representation of a BC type to a BCType enum.
Definition io.c:297
PetscErrorCode DisplayBanner(SimCtx *simCtx)
Displays a structured banner summarizing the simulation configuration.
Definition io.c:2307
static PetscErrorCode CheckDataFile(UserCtx *user, PetscInt ti, const char *fieldName, const char *ext, PetscBool *fileExists)
Checks for a data file's existence in a parallel-safe manner.
Definition io.c:817
static PetscErrorCode ReadOptionalSwarmField(UserCtx *user, const char *field_name, const char *field_label, PetscInt ti, const char *ext)
Checks for and reads an optional DMSwarm field from a file.
Definition io.c:925
Public interface for data input/output routines.
const char * BCHandlerTypeToString(BCHandlerType handler_type)
Converts a BCHandlerType enum to its string representation.
Definition logging.c:748
const char * FieldInitializationToString(PetscInt FieldInitialization)
Helper function to convert FieldInitialization to a string representation.
Definition logging.c:660
#define LOG_ALLOW_SYNC(scope, level, fmt,...)
----— DEBUG ---------------------------------------— #define LOG_ALLOW(scope, level,...
Definition logging.h:267
#define LOCAL
Logging scope definitions for controlling message output.
Definition logging.h:45
#define GLOBAL
Scope for global logging across all processes.
Definition logging.h:46
const char * BCFaceToString(BCFace face)
Helper function to convert BCFace enum to a string representation.
Definition logging.c:643
#define LOG_ALLOW(scope, level, fmt,...)
Logging macro that checks both the log level and whether the calling function is in the allowed-funct...
Definition logging.h:200
#define PROFILE_FUNCTION_END
Marks the end of a profiled code block.
Definition logging.h:746
#define LOG(scope, level, fmt,...)
Logging macro for PETSc-based applications with scope control.
Definition logging.h:84
const char * BCTypeToString(BCType type)
Helper function to convert BCType enum to a string representation.
Definition logging.c:722
@ LOG_ERROR
Critical errors that may halt the program.
Definition logging.h:28
@ LOG_TRACE
Very fine-grained tracing information for in-depth debugging.
Definition logging.h:33
@ LOG_INFO
Informational messages about program execution.
Definition logging.h:31
@ LOG_WARNING
Non-critical issues that warrant attention.
Definition logging.h:29
@ LOG_DEBUG
Detailed debugging information.
Definition logging.h:32
#define PROFILE_FUNCTION_BEGIN
Marks the beginning of a profiled code block (typically a function).
Definition logging.h:737
const char * LESModelToString(LESModelType LESFlag)
Helper function to convert LES Flag to a string representation.
Definition logging.c:691
const char * MomentumSolverTypeToString(MomentumSolverType SolverFlag)
Helper function to convert Momentum Solver flag to a string representation.
Definition logging.c:706
const char * ParticleInitializationToString(ParticleInitializationType ParticleInitialization)
Helper function to convert ParticleInitialization to a string representation.
Definition logging.c:675
BCType
Defines the general mathematical/physical Category of a boundary.
Definition variables.h:210
@ INLET
Definition variables.h:217
@ SYMMETRY
Definition variables.h:215
@ OUTLET
Definition variables.h:216
@ PERIODIC
Definition variables.h:219
@ WALL
Definition variables.h:213
UserCtx * user
Definition variables.h:474
PetscBool inletFaceDefined
Definition variables.h:747
char particle_output_prefix[256]
Definition variables.h:516
PetscMPIInt rank
Definition variables.h:588
PetscInt cgrid
Definition variables.h:743
BoundaryFaceConfig boundary_faces[6]
Definition variables.h:746
PetscInt block_number
Definition variables.h:649
BCFace identifiedInletBCFace
Definition variables.h:748
char euler_subdir[PETSC_MAX_PATH_LEN]
Definition variables.h:607
SimCtx * simCtx
Back-pointer to the master simulation context.
Definition variables.h:731
PetscInt rans
Definition variables.h:669
@ PARTICLE_INIT_SURFACE_RANDOM
Random placement on the inlet face.
Definition variables.h:466
@ PARTICLE_INIT_SURFACE_EDGES
Deterministic placement at inlet face edges.
Definition variables.h:469
PetscReal StartTime
Definition variables.h:598
Vec K_Omega_o
Definition variables.h:783
char output_prefix[256]
Definition variables.h:513
char ** bcs_files
Definition variables.h:657
struct BC_Param_s * next
Definition variables.h:266
PetscReal Min_X
Definition variables.h:738
char * key
Definition variables.h:264
PetscInt KM
Definition variables.h:737
UserMG usermg
Definition variables.h:698
#define MAX_FIELD_LIST_LENGTH
Definition variables.h:490
PetscReal L_ref
Definition variables.h:567
Vec K_Omega
Definition variables.h:783
BCHandlerType
Defines the specific computational "strategy" for a boundary handler.
Definition variables.h:230
@ BC_HANDLER_PERIODIC_GEOMETRIC
Definition variables.h:243
@ BC_HANDLER_INLET_PARABOLIC
Definition variables.h:236
@ BC_HANDLER_INLET_CONSTANT_VELOCITY
Definition variables.h:235
@ BC_HANDLER_PERIODIC_DRIVEN_INITIAL_FLUX
Definition variables.h:246
@ BC_HANDLER_PERIODIC_DRIVEN_CONSTANT_FLUX
Definition variables.h:245
@ BC_HANDLER_WALL_MOVING
Definition variables.h:233
@ BC_HANDLER_WALL_NOSLIP
Definition variables.h:232
@ BC_HANDLER_OUTLET_CONSERVATION
Definition variables.h:241
char output_fields_averaged[1024]
Definition variables.h:512
PetscInt FieldInitialization
Definition variables.h:636
#define MAX_PIPELINE_LENGTH
Definition variables.h:489
PetscReal ren
Definition variables.h:632
BCHandlerType handler_type
Definition variables.h:296
Cmpnts max_coords
Maximum x, y, z coordinates of the bounding box.
Definition variables.h:156
PetscInt _this
Definition variables.h:741
PetscInt reference[3]
Definition variables.h:524
char output_dir[PETSC_MAX_PATH_LEN]
Definition variables.h:606
PetscReal dt
Definition variables.h:599
PetscReal ry
Definition variables.h:742
PetscInt StepsToRun
Definition variables.h:596
PetscInt k_periodic
Definition variables.h:650
PetscInt timeStep
Definition variables.h:506
PetscInt np
Definition variables.h:676
PetscReal Max_Y
Definition variables.h:738
PetscBool averaging
Definition variables.h:673
char * value
Definition variables.h:265
Vec lCs
Definition variables.h:783
#define MAX_FILENAME_LENGTH
Definition variables.h:491
Vec Ucont
Definition variables.h:755
PetscInt StartStep
Definition variables.h:595
Cmpnts min_coords
Minimum x, y, z coordinates of the bounding box.
Definition variables.h:155
PetscScalar x
Definition variables.h:101
Vec Ucat_square_sum
Definition variables.h:786
char * current_io_directory
Definition variables.h:611
Vec P_sum
Definition variables.h:786
char grid_file[PETSC_MAX_PATH_LEN]
Definition variables.h:654
char output_fields_instantaneous[1024]
Definition variables.h:511
char eulerianExt[8]
Definition variables.h:520
char particle_subdir[PETSC_MAX_PATH_LEN]
Definition variables.h:608
BoundingBox * bboxlist
Definition variables.h:679
Vec lNu_t
Definition variables.h:783
Vec Nu_t
Definition variables.h:783
PetscReal rz
Definition variables.h:742
char source_dir[PETSC_MAX_PATH_LEN]
Definition variables.h:501
char particle_pipeline[1024]
Definition variables.h:514
BC_Param * params
Definition variables.h:297
char eulerianSource[PETSC_MAX_PATH_LEN]
Definition variables.h:604
ParticleInitializationType ParticleInitialization
Definition variables.h:680
PetscScalar z
Definition variables.h:101
Vec lK_Omega_o
Definition variables.h:783
Vec Ucat
Definition variables.h:755
Vec ParticleCount
Definition variables.h:800
ScalingCtx scaling
Definition variables.h:644
Vec lK_Omega
Definition variables.h:783
PetscInt JM
Definition variables.h:737
PetscInt mglevels
Definition variables.h:481
PetscReal Min_Z
Definition variables.h:738
Vec Ucat_sum
Definition variables.h:786
char process_pipeline[1024]
Definition variables.h:510
PetscInt particle_output_freq
Definition variables.h:517
char particle_fields[1024]
Definition variables.h:515
PetscBool outputParticles
Definition variables.h:507
char AnalyticalSolutionType[PETSC_MAX_PATH_LEN]
Definition variables.h:617
PetscReal Max_X
Definition variables.h:738
Cmpnts InitialConstantContra
Definition variables.h:637
PetscReal Min_Y
Definition variables.h:738
PetscInt i_periodic
Definition variables.h:650
PetscInt step
Definition variables.h:593
PostProcessParams * pps
Definition variables.h:715
PetscScalar y
Definition variables.h:101
PetscMPIInt size
Definition variables.h:589
@ EXEC_MODE_SOLVER
Definition variables.h:558
@ EXEC_MODE_POSTPROCESSOR
Definition variables.h:559
PetscInt IM
Definition variables.h:737
char _io_context_buffer[PETSC_MAX_PATH_LEN]
Definition variables.h:610
char particleExt[8]
Definition variables.h:521
Vec Ucat_cross_sum
Definition variables.h:786
PetscInt les
Definition variables.h:669
Vec Nvert
Definition variables.h:755
MGCtx * mgctx
Definition variables.h:484
BCType mathematical_type
Definition variables.h:295
PetscReal P_ref
Definition variables.h:570
PetscReal rx
Definition variables.h:742
ExecutionMode exec_mode
Definition variables.h:603
PetscInt startTime
Definition variables.h:504
PetscReal rho_ref
Definition variables.h:569
PetscReal Max_Z
Definition variables.h:738
PetscReal U_ref
Definition variables.h:568
MomentumSolverType mom_solver_type
Definition variables.h:624
char PostprocessingControlFile[PETSC_MAX_PATH_LEN]
Definition variables.h:714
char restart_dir[PETSC_MAX_PATH_LEN]
Definition variables.h:605
PetscReal pseudo_cfl
Definition variables.h:632
PetscInt LoggingFrequency
Definition variables.h:703
Vec Psi
Definition variables.h:801
BCFace
Identifies the six logical faces of a structured computational block.
Definition variables.h:203
@ BC_FACE_NEG_X
Definition variables.h:204
@ BC_FACE_POS_Z
Definition variables.h:206
@ BC_FACE_POS_Y
Definition variables.h:205
@ BC_FACE_NEG_Z
Definition variables.h:206
@ BC_FACE_POS_X
Definition variables.h:204
@ BC_FACE_NEG_Y
Definition variables.h:205
PetscInt j_periodic
Definition variables.h:650
BoundaryCondition * handler
Definition variables.h:298
A node in a linked list for storing key-value parameters from the bcs.dat file.
Definition variables.h:263
Holds the complete configuration for one of the six boundary faces.
Definition variables.h:293
Defines a 3D axis-aligned bounding box.
Definition variables.h:154
A 3D point or vector with PetscScalar components.
Definition variables.h:100
Holds all configuration parameters for a post-processing run.
Definition variables.h:499
The master context for the entire simulation.
Definition variables.h:585
User-defined context containing data specific to a single computational grid level.
Definition variables.h:728