PICurv 0.1.0
A Parallel Particle-In-Cell Solver for Curvilinear LES
Loading...
Searching...
No Matches
io.c
Go to the documentation of this file.
1/**
2 * @file io.c
3 * @brief Implementation of data input/output routines, focusing on grid configuration.
4 *
5 * This module provides functions to parse grid geometry information, either from
6 * command-line options for programmatically generated grids or by reading the
7 * header of a grid definition file.
8 */
9
10#include "io.h"
11
12// =============================================================================
13// STATIC (PRIVATE) VARIABLES FOR ONE-TIME FILE READ
14// =============================================================================
15
16/** @brief Stores the number of blocks read from the grid file. */
17static PetscInt g_nblk_from_file = 0;
18/** @brief Caches the IM dimensions for all blocks read from the grid file. */
19static PetscInt* g_IMs_from_file = NULL;
20/** @brief Caches the JM dimensions for all blocks read from the grid file. */
21static PetscInt* g_JMs_from_file = NULL;
22/** @brief Caches the KM dimensions for all blocks read from the grid file. */
23static PetscInt* g_KMs_from_file = NULL;
24/** @brief A flag to ensure the grid file is read only once. */
25static PetscBool g_file_has_been_read = PETSC_FALSE;
26
27
28// =============================================================================
29// PUBLIC FUNCTION IMPLEMENTATIONS
30// =============================================================================
31
32/**
33 * @brief Trims leading and trailing whitespace from a string in-place.
34 * @param str The string to be trimmed.
35 */
36void TrimWhitespace(char *str) {
37 if (!str) return;
38
39 char *start = str;
40 // Find the first non-whitespace character
41 while (isspace((unsigned char)*start)) {
42 start++;
43 }
44
45 // Find the end of the string
46 char *end = str + strlen(str) - 1;
47 // Move backwards from the end to find the last non-whitespace character
48 while (end > start && isspace((unsigned char)*end)) {
49 end--;
50 }
51
52 // Null-terminate after the last non-whitespace character
53 *(end + 1) = '\0';
54
55 // If there was leading whitespace, shift the string to the left
56 if (str != start) {
57 memmove(str, start, (end - start) + 2); // +2 to include the new null terminator
58 }
59}
60
61
62#undef __FUNCT__
63#define __FUNCT__ "ReadGridGenerationInputs"
64/**
65 * @brief Parses command-line options for a programmatically generated grid for a SINGLE block.
66 *
67 * This function is responsible for reading all per-block array options related
68 * to grid geometry, such as dimensions (`-im`), domain bounds (`-xMins`, `-xMaxs`),
69 * and stretching ratios (`-rxs`). It reads the entire array for each option, then
70 * uses the block index stored in `user->_this` to select the correct value and
71 * populate the fields of the provided `UserCtx` struct.
72 *
73 * @param user Pointer to the `UserCtx` for a specific block. The function will
74 * populate the geometric fields (`IM`, `Min_X`, `rx`, etc.) within this struct.
75 * @return PetscErrorCode 0 on success, or a PETSc error code on failure.
76 */
77PetscErrorCode ReadGridGenerationInputs(UserCtx *user)
78{
79 PetscErrorCode ierr;
80 SimCtx *simCtx = user->simCtx;
81 PetscInt nblk = simCtx->block_number;
82 PetscInt block_index = user->_this;
83 PetscBool found;
84
85 // Temporary arrays to hold the parsed values for ALL blocks
86 PetscInt *IMs = NULL, *JMs = NULL, *KMs = NULL, *cgrids = NULL;
87 PetscReal *xMins = NULL, *xMaxs = NULL, *rxs = NULL;
88 PetscReal *yMins = NULL, *yMaxs = NULL, *rys = NULL;
89 PetscReal *zMins = NULL, *zMaxs = NULL, *rzs = NULL;
90
91 PetscFunctionBeginUser;
93
94 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Reading generated grid inputs for block %d.\n", simCtx->rank, block_index);
95
96 if (block_index >= nblk) {
97 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Block index %d is out of range for nblk=%d", block_index, nblk);
98 }
99
100 // --- Allocate temporary storage for all array options ---
101 ierr = PetscMalloc4(nblk, &IMs, nblk, &JMs, nblk, &KMs, nblk, &cgrids); CHKERRQ(ierr);
102 ierr = PetscMalloc6(nblk, &xMins, nblk, &xMaxs, nblk, &rxs, nblk, &yMins, nblk, &yMaxs, nblk, &rys); CHKERRQ(ierr);
103 ierr = PetscMalloc3(nblk, &zMins, nblk, &zMaxs, nblk, &rzs); CHKERRQ(ierr);
104
105 // --- Set default values for the temporary arrays ---
106 for (PetscInt i = 0; i < nblk; ++i) {
107 IMs[i] = 10; JMs[i] = 10; KMs[i] = 10; cgrids[i] = 0;
108 xMins[i] = 0.0; xMaxs[i] = 1.0; rxs[i] = 1.0;
109 yMins[i] = 0.0; yMaxs[i] = 1.0; rys[i] = 1.0;
110 zMins[i] = 0.0; zMaxs[i] = 1.0; rzs[i] = 1.0;
111 }
112
113 // --- Parse the array options from the command line / control file ---
114 PetscInt count;
115 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-im", IMs, &count, &found); CHKERRQ(ierr);
116 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-jm", JMs, &count, &found); CHKERRQ(ierr);
117 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-km", KMs, &count, &found); CHKERRQ(ierr);
118 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-xMins", xMins, &count, &found); CHKERRQ(ierr);
119 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-xMaxs", xMaxs, &count, &found); CHKERRQ(ierr);
120 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-rxs", rxs, &count, &found); CHKERRQ(ierr);
121 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-yMins", yMins, &count, &found); CHKERRQ(ierr);
122 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-yMaxs", yMaxs, &count, &found); CHKERRQ(ierr);
123 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-rys", rys, &count, &found); CHKERRQ(ierr);
124 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-zMins", zMins, &count, &found); CHKERRQ(ierr);
125 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-zMaxs", zMaxs, &count, &found); CHKERRQ(ierr);
126 count = nblk; ierr = PetscOptionsGetRealArray(NULL, NULL, "-rzs", rzs, &count, &found); CHKERRQ(ierr);
127 count = nblk; ierr = PetscOptionsGetIntArray(NULL, NULL, "-cgrids", cgrids, &count, &found); CHKERRQ(ierr);
128
129 // --- Assign the parsed values to the specific UserCtx struct passed in ---
130 user->IM = IMs[block_index];
131 user->JM = JMs[block_index];
132 user->KM = KMs[block_index];
133 user->Min_X = xMins[block_index];
134 user->Max_X = xMaxs[block_index];
135 user->rx = rxs[block_index];
136 user->Min_Y = yMins[block_index];
137 user->Max_Y = yMaxs[block_index];
138 user->ry = rys[block_index];
139 user->Min_Z = zMins[block_index];
140 user->Max_Z = zMaxs[block_index];
141 user->rz = rzs[block_index];
142 user->cgrid = cgrids[block_index];
143
144 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Block %d grid generation inputs set: IM=%d, JM=%d, KM=%d\n",
145 simCtx->rank, block_index, user->IM, user->JM, user->KM);
146 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Block %d bounds: X=[%.2f, %.2f], Y=[%.2f, %.2f], Z=[%.2f, %.2f]\n",
147 simCtx->rank, block_index, user->Min_X, user->Max_X, user->Min_Y, user->Max_Y, user->Min_Z, user->Max_Z);
148
149 // --- Clean up temporary storage ---
150 ierr = PetscFree4(IMs, JMs, KMs, cgrids); CHKERRQ(ierr);
151 ierr = PetscFree6(xMins, xMaxs, rxs, yMins, yMaxs, rys); CHKERRQ(ierr);
152 ierr = PetscFree3(zMins, zMaxs, rzs); CHKERRQ(ierr);
153
155 PetscFunctionReturn(0);
156}
157
158
159#undef __FUNCT__
160#define __FUNCT__ "ReadGridFile"
161/**
162 * @brief Sets grid dimensions from a file for a SINGLE block using a one-time read cache.
163 *
164 * This function uses a static-variable pattern to ensure the grid file header
165 * is read only once, collectively, by all processes. The first time any process
166 * calls this function, it triggers a collective operation where rank 0 reads the
167 * file and broadcasts the dimensions for all blocks. This data is stored in
168 * static "cached" arrays.
169 *
170 * On every call (including the first), the function retrieves the dimensions for the
171 * specific block (identified by `user->_this`) from the cached arrays and populates
172 * the `IM`, `JM`, and `KM` fields of the provided `UserCtx`.
173 *
174 * @param user Pointer to the `UserCtx` for a specific block.
175 * @return PetscErrorCode 0 on success, or a PETSc error code on failure.
176 */
177PetscErrorCode ReadGridFile(UserCtx *user)
178{
179 PetscErrorCode ierr;
180 SimCtx *simCtx = user->simCtx;
181 PetscInt block_index = user->_this;
182
183 PetscFunctionBeginUser;
185
186 // --- One-Time Read and Broadcast Logic ---
188 LOG_ALLOW_SYNC(GLOBAL, LOG_INFO, "First call to ReadGridFile. Reading and broadcasting grid file header from '%s'...\n", simCtx->grid_file);
189 PetscMPIInt rank = simCtx->rank;
190 PetscInt nblk = simCtx->block_number;
191
192 if (rank == 0) {
193 FILE *fd = fopen(simCtx->grid_file, "r");
194 if (!fd) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_OPEN, "Cannot open file: %s", simCtx->grid_file);
195
196 fscanf(fd, "%d\n", &g_nblk_from_file);
197 // ---- Read first token; decide whether it is the header or nblk ----
198 char firstTok[32] = {0};
199 if (fscanf(fd, "%31s", firstTok) != 1)
200 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_READ, "Empty grid file: %s", simCtx->grid_file);
201
202 if (strcmp(firstTok, "PICGRID") == 0) {
203 // Header is present – read nblk from the next line
204 if (fscanf(fd, "%d", &g_nblk_from_file) != 1)
205 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_READ, "Expected number of blocks after \"PICGRID\" in %s", simCtx->grid_file);
206 } else {
207 // No header – the token we just read is actually nblk
208 g_nblk_from_file = (PetscInt)strtol(firstTok, NULL, 10);
209 }
210 if (g_nblk_from_file != nblk) {
211 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Mismatch: -nblk is %d but grid file specifies %d blocks.", nblk, g_nblk_from_file);
212 }
213
214 ierr = PetscMalloc3(nblk, &g_IMs_from_file, nblk, &g_JMs_from_file, nblk, &g_KMs_from_file); CHKERRQ(ierr);
215 for (PetscInt i = 0; i < nblk; ++i) {
216 if (fscanf(fd, "%d %d %d\n", &g_IMs_from_file[i], &g_JMs_from_file[i], &g_KMs_from_file[i]) != 3) {
217 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_READ, "Expected 3 integers for block %d in %s", i, simCtx->grid_file);
218 }
219 }
220 fclose(fd);
221 }
222
223 // Broadcast nblk to verify (optional, good practice)
224 ierr = MPI_Bcast(&g_nblk_from_file, 1, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
225
226 // Allocate on other ranks before receiving the broadcast
227 if (rank != 0) {
228 ierr = PetscMalloc3(nblk, &g_IMs_from_file, nblk, &g_JMs_from_file, nblk, &g_KMs_from_file); CHKERRQ(ierr);
229 }
230
231 // Broadcast the data arrays
232 ierr = MPI_Bcast(g_IMs_from_file, nblk, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
233 ierr = MPI_Bcast(g_JMs_from_file, nblk, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
234 ierr = MPI_Bcast(g_KMs_from_file, nblk, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
235
236 g_file_has_been_read = PETSC_TRUE;
237 LOG_ALLOW(GLOBAL, LOG_INFO, "Grid file header read and broadcast complete.\n");
238 }
239
240 // --- Per-Block Assignment Logic (runs on every call) ---
241 user->IM = g_IMs_from_file[block_index];
242 user->JM = g_JMs_from_file[block_index];
243 user->KM = g_KMs_from_file[block_index];
244
245 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Set file inputs for Block %d: IM=%d, JM=%d, KM=%d\n",
246 simCtx->rank, block_index, user->IM, user->JM, user->KM);
247
249 PetscFunctionReturn(0);
250}
251
252
253//================================================================================
254//
255// PRIVATE HELPER FUNCTIONS
256//
257//================================================================================
258
259/**
260 * @brief Frees the memory allocated for a linked list of BC_Param structs.
261 * @param head A pointer to the head of the linked list to be freed.
262 */
264 BC_Param *current = head;
265 while (current != NULL) {
266 BC_Param *next = current->next;
267 PetscFree(current->key);
268 PetscFree(current->value);
269 PetscFree(current);
270 current = next;
271 }
272}
273
274/**
275 * @brief Converts a string representation of a face to a BCFace enum.
276 * @param str The input string (e.g., "-Xi", "+Zeta"). Case-insensitive.
277 * @param[out] face_out The resulting BCFace enum.
278 * @return 0 on success.
279 */
280PetscErrorCode StringToBCFace(const char* str, BCFace* face_out) {
281 if (strcasecmp(str, "-Xi") == 0) *face_out = BC_FACE_NEG_X;
282 else if (strcasecmp(str, "+Xi") == 0) *face_out = BC_FACE_POS_X;
283 else if (strcasecmp(str, "-Eta") == 0) *face_out = BC_FACE_NEG_Y;
284 else if (strcasecmp(str, "+Eta") == 0) *face_out = BC_FACE_POS_Y;
285 else if (strcasecmp(str, "-Zeta") == 0) *face_out = BC_FACE_NEG_Z;
286 else if (strcasecmp(str, "+Zeta") == 0) *face_out = BC_FACE_POS_Z;
287 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown face specifier: %s", str);
288 return 0;
289}
290
291/**
292 * @brief Converts a string representation of a BC type to a BCType enum.
293 * @param str The input string (e.g., "WALL", "INLET"). Case-insensitive.
294 * @param[out] type_out The resulting BCType enum.
295 * @return 0 on success.
296 */
297PetscErrorCode StringToBCType(const char* str, BCType* type_out) {
298 if (strcasecmp(str, "WALL") == 0) *type_out = WALL;
299 else if (strcasecmp(str, "SYMMETRY") == 0) *type_out = SYMMETRY;
300 else if (strcasecmp(str, "INLET") == 0) *type_out = INLET;
301 else if (strcasecmp(str, "OUTLET") == 0) *type_out = OUTLET;
302 else if (strcasecmp(str, "NOGRAD") == 0) *type_out = NOGRAD;
303 // ... add other BCTypes here ...
304 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown BC Type string: %s", str);
305 return 0;
306}
307
308/**
309 * @brief Converts a string representation of a handler to a BCHandlerType enum.
310 * @param str The input string (e.g., "noslip", "constant_velocity"). Case-insensitive.
311 * @param[out] handler_out The resulting BCHandlerType enum.
312 * @return 0 on success.
313 */
314PetscErrorCode StringToBCHandlerType(const char* str, BCHandlerType* handler_out) {
315 if (strcasecmp(str, "noslip") == 0) *handler_out = BC_HANDLER_WALL_NOSLIP;
316 else if (strcasecmp(str, "constant_velocity") == 0) *handler_out = BC_HANDLER_INLET_CONSTANT_VELOCITY;
317 else if (strcasecmp(str, "conservation") == 0) *handler_out = BC_HANDLER_OUTLET_CONSERVATION;
318 else if (strcasecmp(str, "allcopy") == 0) *handler_out = BC_HANDLER_NOGRAD_COPY_GHOST;
319 else if (strcasecmp(str, "parabolic") == 0) *handler_out = BC_HANDLER_INLET_PARABOLIC;
320 // ... add other BCHandlerTypes here ...
321 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown BC Handler string: %s", str);
322 return 0;
323}
324
325/**
326 * @brief Validates that a specific handler is compatible with a general BC type.
327 * @param type The general BCType.
328 * @param handler The specific BCHandlerType.
329 * @return 0 if compatible, error code otherwise.
330 */
331PetscErrorCode ValidateBCHandlerForBCType(BCType type, BCHandlerType handler) {
332 switch (type) {
333 case NOGRAD:
334 if(handler != BC_HANDLER_NOGRAD_COPY_GHOST) return PETSC_ERR_ARG_WRONG;
335 break;
336 case WALL:
337 if (handler != BC_HANDLER_WALL_NOSLIP && handler != BC_HANDLER_WALL_MOVING) return PETSC_ERR_ARG_WRONG;
338 break;
339 case INLET:
340 if (handler != BC_HANDLER_INLET_CONSTANT_VELOCITY && handler != BC_HANDLER_INLET_PARABOLIC) return PETSC_ERR_ARG_WRONG;
341 break;
342 // ... add other validation cases here ...
343 default: break;
344 }
345 return 0; // Combination is valid
346}
347
348/**
349 * @brief Searches a BC_Param linked list for a key and returns its value as a double.
350 * @param params The head of the BC_Param linked list.
351 * @param key The key to search for (case-insensitive).
352 * @param[out] value_out The found value, converted to a PetscReal.
353 * @param[out] found Set to PETSC_TRUE if the key was found, PETSC_FALSE otherwise.
354 * @return 0 on success.
355 */
356PetscErrorCode GetBCParamReal(BC_Param *params, const char *key, PetscReal *value_out, PetscBool *found) {
357 *found = PETSC_FALSE;
358 *value_out = 0.0;
359 if (!key) return 0; // No key to search for
360
361 BC_Param *current = params;
362 while (current) {
363 if (strcasecmp(current->key, key) == 0) {
364 *value_out = atof(current->value);
365 *found = PETSC_TRUE;
366 return 0; // Found it, we're done
367 }
368 current = current->next;
369 }
370 return 0; // It's not an error to not find the key.
371}
372
373//================================================================================
374//
375// PUBLIC PARSING FUNCTION
376//
377//================================================================================
378#undef __FUNCT__
379#define __FUNCT__ "ParseAllBoundaryConditions"
380/**
381 * @brief Parses the boundary conditions file to configure the type, handler, and
382 * any associated parameters for all 6 global faces of the domain.
383 *
384 * This function performs the following steps:
385 * 1. On MPI rank 0, it reads the specified configuration file line-by-line.
386 * 2. It parses each line for `<Face> <Type> <Handler> [param=value]...` format.
387 * 3. It validates the parsed strings and stores the configuration, including a
388 * linked list of parameters, in a temporary array.
389 * 4. It then serializes this configuration and broadcasts it to all other MPI ranks.
390 * 5. All ranks (including rank 0) then deserialize the broadcasted data to populate
391 * their local `user->boundary_faces` array identically.
392 * 6. It also sets legacy fields in UserCtx for compatibility with other modules.
393 *
394 * @param[in,out] user The main UserCtx struct where the final configuration
395 * for all ranks will be stored.
396 * @param[in] bcs_input_filename The path to the boundary conditions configuration file.
397 * @return PetscErrorCode 0 on success, error code on failure.
398 */
399PetscErrorCode ParseAllBoundaryConditions(UserCtx *user, const char *bcs_input_filename)
400{
401 PetscErrorCode ierr;
402 PetscMPIInt rank;
403
404 // Temporary storage for rank 0 to build the configuration before broadcasting.
405 BoundaryFaceConfig configs_rank0[6];
406
407 PetscFunctionBeginUser;
409 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
410
411 if (rank == 0) {
412 FILE *file;
413 char line_buffer[1024];
414
415 // Initialize the temporary config array with safe defaults on rank 0.
416 for (int i = 0; i < 6; i++) {
417 configs_rank0[i].face_id = (BCFace)i;
418 configs_rank0[i].mathematical_type = WALL;
419 configs_rank0[i].handler_type = BC_HANDLER_WALL_NOSLIP;
420 configs_rank0[i].params = NULL;
421 configs_rank0[i].handler = NULL; // Handler object is not created here.
422 }
423
424 LOG_ALLOW(GLOBAL, LOG_INFO, "Parsing BC configuration from '%s' on rank 0... \n", bcs_input_filename);
425 file = fopen(bcs_input_filename, "r");
426 if (!file) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_OPEN, "Could not open BCs file '%s'.", bcs_input_filename);
427
428 while (fgets(line_buffer, sizeof(line_buffer), file)) {
429 char *current_pos = line_buffer;
430 while (isspace((unsigned char)*current_pos)) current_pos++; // Skip leading whitespace
431 if (*current_pos == '#' || *current_pos == '\0' || *current_pos == '\n' || *current_pos == '\r') continue;
432
433 char *face_str = strtok(current_pos, " \t\n\r");
434 char *type_str = strtok(NULL, " \t\n\r");
435 char *handler_str = strtok(NULL, " \t\n\r");
436
437 if (!face_str || !type_str || !handler_str) {
438 LOG_ALLOW(GLOBAL, LOG_WARNING, "Malformed line in bcs.dat, skipping: %s \n", line_buffer);
439 continue;
440 }
441
442 BCFace face_enum;
443 BCType type_enum;
444 BCHandlerType handler_enum;
445 const char* handler_name_for_log;
446
447 // --- Convert strings to enums and validate ---
448 ierr = StringToBCFace(face_str, &face_enum); CHKERRQ(ierr);
449 ierr = StringToBCType(type_str, &type_enum); CHKERRQ(ierr);
450 ierr = StringToBCHandlerType(handler_str, &handler_enum); CHKERRQ(ierr);
451 ierr = ValidateBCHandlerForBCType(type_enum, handler_enum);
452 if (ierr) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Validation failed: Handler '%s' is not valid for Type '%s' on Face '%s'.\n", handler_str, type_str, face_str);
453
454 // Store the core types for the corresponding face
455 configs_rank0[face_enum].mathematical_type = type_enum;
456 configs_rank0[face_enum].handler_type = handler_enum;
457 handler_name_for_log = BCHandlerTypeToString(handler_enum); // Assumes this utility exists
458 LOG_ALLOW(GLOBAL, LOG_DEBUG, " Parsed Face '%s': Type=%s, Handler=%s \n", face_str, type_str, handler_name_for_log);
459
460 // --- Parse optional key=value parameters for this face ---
461 FreeBC_ParamList(configs_rank0[face_enum].params); // Clear any previous (default) params
462 configs_rank0[face_enum].params = NULL;
463 BC_Param **param_next_ptr = &configs_rank0[face_enum].params; // Pointer to the 'next' pointer to build the list
464
465 char* token;
466 while ((token = strtok(NULL, " \t\n\r")) != NULL) {
467 char* equals_ptr = strchr(token, '=');
468 if (!equals_ptr) {
469 LOG_ALLOW(GLOBAL, LOG_WARNING, "Malformed parameter '%s' on face '%s', skipping. \n", token, face_str);
470 continue;
471 }
472
473 *equals_ptr = '\0'; // Temporarily split the string at '=' to separate key and value
474 char* key_str = token;
475 char* value_str = equals_ptr + 1;
476
477 BC_Param *new_param;
478 ierr = PetscMalloc1(1, &new_param); CHKERRQ(ierr);
479 ierr = PetscStrallocpy(key_str, &new_param->key); CHKERRQ(ierr);
480 ierr = PetscStrallocpy(value_str, &new_param->value); CHKERRQ(ierr);
481 new_param->next = NULL;
482
483 *param_next_ptr = new_param;
484 param_next_ptr = &new_param->next;
485 LOG_ALLOW(GLOBAL, LOG_TRACE, " - Found param: [%s] = [%s] \n", new_param->key, new_param->value);
486 }
487 }
488 fclose(file);
489 }
490
491 // =========================================================================
492 // BROADCASTING THE CONFIGURATION FROM RANK 0
493 // =========================================================================
494 // This is a critical step to ensure all processes have the same configuration.
495
496 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "Rank %d broadcasting/receiving BC configuration.\n", rank);
497
498 for (int i = 0; i < 6; i++) {
499 // --- Broadcast simple enums ---
500 if (rank == 0) {
501 user->boundary_faces[i] = configs_rank0[i]; // Rank 0 populates its final struct
502 }
503 ierr = MPI_Bcast(&user->boundary_faces[i].mathematical_type, 1, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
504 ierr = MPI_Bcast(&user->boundary_faces[i].handler_type, 1, MPI_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
505
506 // --- Serialize and Broadcast the parameter linked list ---
507 PetscInt n_params = 0;
508 if (rank == 0) { // On rank 0, count the number of parameters to send
509 for (BC_Param *p = user->boundary_faces[i].params; p; p = p->next) n_params++;
510 }
511 ierr = MPI_Bcast(&n_params, 1, MPI_INT, 0, PETSC_COMM_WORLD);CHKERRQ(ierr);
512
513 if (rank != 0) { // Non-root ranks need to receive and build the list
514 FreeBC_ParamList(user->boundary_faces[i].params); // Ensure list is empty before building
515 user->boundary_faces[i].params = NULL;
516 }
517
518 BC_Param **param_next_ptr = &user->boundary_faces[i].params;
519
520 for (int j = 0; j < n_params; j++) {
521 char key_buf[256] = {0}, val_buf[256] = {0};
522 if (rank == 0) {
523 // On rank 0, navigate to the j-th param and copy its data to buffers
524 BC_Param *p = user->boundary_faces[i].params;
525 for (int k = 0; k < j; k++) p = p->next;
526 strncpy(key_buf, p->key, 255);
527 strncpy(val_buf, p->value, 255);
528 }
529
530 ierr = MPI_Bcast(key_buf, 256, MPI_CHAR, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
531 ierr = MPI_Bcast(val_buf, 256, MPI_CHAR, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
532
533 if (rank != 0) {
534 // On non-root ranks, deserialize: create a new node and append it
535 BC_Param *new_param;
536 ierr = PetscMalloc1(1, &new_param); CHKERRQ(ierr);
537 ierr = PetscStrallocpy(key_buf, &new_param->key); CHKERRQ(ierr);
538 ierr = PetscStrallocpy(val_buf, &new_param->value); CHKERRQ(ierr);
539 new_param->next = NULL;
540 *param_next_ptr = new_param;
541 param_next_ptr = &new_param->next;
542 } else {
543 // On rank 0, just advance the pointer for the next iteration
544 param_next_ptr = &((*param_next_ptr)->next);
545 }
546 }
547 user->boundary_faces[i].face_id = (BCFace)i; // Ensure face_id is set on all ranks
548 }
549
550 // --- Set legacy fields for compatibility with particle system ---
551 user->inletFaceDefined = PETSC_FALSE;
552 for (int i=0; i<6; i++) {
553
554 if (user->boundary_faces[i].mathematical_type == INLET && !user->inletFaceDefined) {
555 user->inletFaceDefined = PETSC_TRUE;
556 user->identifiedInletBCFace = (BCFace)i;
557 LOG_ALLOW(GLOBAL, LOG_INFO, "Inlet face for particle initialization identified as Face %d.\n", i);
558 break; // Found the first one, stop looking
559 }
560 }
561
562
563 if (rank == 0) {
564 // Rank 0 can now free the linked lists it created for the temporary storage.
565 // As written, user->boundary_faces was populated directly on rank 0, so no extra free is needed.
566 // for(int i=0; i<6; i++) FreeBC_ParamList(configs_rank0[i].params); // This would be needed if we used configs_rank0 exclusively
567 }
568
570 PetscFunctionReturn(0);
571}
572
573/**
574 * @brief A parallel-safe helper to verify the existence of a generic file or directory path.
575 *
576 * This function centralizes the logic for checking arbitrary paths. Only Rank 0 performs the
577 * filesystem check, and the result is broadcast to all other processes. This ensures
578 * collective and synchronized decision-making across all ranks. It is intended for
579 * configuration files, source directories, etc., where the path is known completely.
580 *
581 * @param[in] path The full path to the file or directory to check.
582 * @param[in] is_dir PETSC_TRUE if checking for a directory, PETSC_FALSE for a file.
583 * @param[in] is_optional PETSC_TRUE if the path is optional (results in a warning),
584 * PETSC_FALSE if mandatory (results in an error).
585 * @param[in] description A user-friendly description of the path for logging (e.g., "Grid file").
586 * @param[out] exists The result of the check (identical on all ranks).
587 *
588 * @return PetscErrorCode
589 */
590PetscErrorCode VerifyPathExistence(const char *path, PetscBool is_dir, PetscBool is_optional, const char *description, PetscBool *exists)
591{
592 PetscErrorCode ierr;
593 PetscMPIInt rank;
594 MPI_Comm comm = PETSC_COMM_WORLD;
595
596 PetscFunctionBeginUser;
597 ierr = MPI_Comm_rank(comm, &rank); CHKERRQ(ierr);
598
599 if (rank == 0) {
600 if (is_dir) {
601 ierr = PetscTestDirectory(path, 'r', exists); CHKERRQ(ierr);
602 } else {
603 ierr = PetscTestFile(path, 'r', exists); CHKERRQ(ierr);
604 }
605
606 if (!(*exists)) {
607 if (is_optional) {
608 LOG_ALLOW(GLOBAL, LOG_WARNING, "Optional %s not found at: %s (using defaults/ignoring).\n", description, path);
609 } else {
610 LOG_ALLOW(GLOBAL, LOG_ERROR, "Mandatory %s not found at: %s\n", description, path);
611 }
612 } else {
613 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Found %s: %s\n", description, path);
614 }
615 }
616
617 // Broadcast the result from Rank 0
618 PetscMPIInt exists_int = (rank == 0) ? (PetscMPIInt)(*exists) : 0;
619 ierr = MPI_Bcast(&exists_int, 1, MPI_INT, 0, comm); CHKERRMPI(ierr);
620 *exists = (PetscBool)exists_int;
621
622 // Collective error for mandatory files
623 if (!(*exists) && !is_optional) {
624 SETERRQ(comm, PETSC_ERR_FILE_OPEN, "Mandatory %s not found. Rank 0 expected it at '%s'. Check path and permissions.", description, path);
625 }
626
627 PetscFunctionReturn(0);
628}
629
630/**
631 * @brief Checks for a data file's existence in a parallel-safe manner.
632 *
633 * This function's signature and behavior have been updated to use the
634 * "Directory Context Pointer" pattern. The high-level calling function (e.g.,
635 * ReadSimulationFields) must set the `simCtx->current_io_directory` context
636 * before this function is called.
637 *
638 * @param[in] user Pointer to the UserCtx structure to access SimCtx.
639 * @param ti The time index of the file.
640 * @param fieldName The name of the field.
641 * @param ext The file extension.
642 * @param fileExists [out] The result, which will be identical on all ranks.
643 * @return PetscErrorCode
644 */
645static PetscErrorCode CheckDataFile(UserCtx *user, PetscInt ti, const char *fieldName, const char *ext, PetscBool *fileExists)
646{
647 PetscErrorCode ierr;
648 PetscMPIInt rank;
649 MPI_Comm comm = PETSC_COMM_WORLD;
650 PetscInt placeholder_int = 0;
651 SimCtx *simCtx = user->simCtx;
652
653 PetscFunctionBeginUser;
654
655 if(!simCtx->current_io_directory) {
656 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_NULL, "I/O context directory is NULL. Ensure it is set before calling CheckDataFile().");
657 }
658
659 ierr = MPI_Comm_rank(comm, &rank); CHKERRQ(ierr);
660
661 if (rank == 0) {
662 char filename[PETSC_MAX_PATH_LEN];
663 // Use the same standardized, rank-independent filename format
664
665 ierr = PetscSNPrintf(filename, sizeof(filename), "%s/%s%05"PetscInt_FMT"_%d.%s",simCtx->current_io_directory,fieldName, ti, placeholder_int, ext);
666 ierr = PetscTestFile(filename, 'r', fileExists); CHKERRQ(ierr);
667 if (!(*fileExists)) {
668 LOG_ALLOW(GLOBAL, LOG_WARNING, "(Rank 0) - Optional data file '%s' not found.\n", filename);
669 }
670 }
671
672 // Broadcast the result from Rank 0 to all other ranks.
673 // We cast the PetscBool to a PetscMPIInt for MPI_Bcast.
674 PetscMPIInt fileExists_int = (rank == 0) ? (PetscMPIInt)(*fileExists) : 0;
675 ierr = MPI_Bcast(&fileExists_int, 1, MPI_INT, 0, comm); CHKERRMPI(ierr);
676 *fileExists = (PetscBool)fileExists_int;
677
678 PetscFunctionReturn(0);
679}
680
681/**
682 * @brief Checks for and reads an optional Eulerian field from a file into a Vec.
683 *
684 * This helper function first checks if the corresponding data file exists in a
685 * parallel-safe manner using CheckDataFile().
686 * - If the file does not exist, it logs a warning and returns success (0),
687 * gracefully skipping the field. The target Vec is not modified.
688 * - If the file exists, it calls ReadFieldData() to read the data into the
689 * provided Vec. Any error during this read operation is considered fatal
690 * (e.g., file is corrupt, permissions issue, or size mismatch), as the
691 * presence of the file implies an intent to load it.
692 *
693 * @param[in] user Pointer to the UserCtx structure.
694 * @param[in] field_name Internal name of the field for the filename (e.g., "cs").
695 * @param[in] field_label A user-friendly name for logging (e.g., "Smagorinsky Constant").
696 * @param[in,out] field_vec The PETSc Vec to load the data into.
697 * @param[in] ti Time index for the file name.
698 * @param[in] ext File extension.
699 *
700 * @return PetscErrorCode Returns 0 on success or if the optional file was not found.
701 * Returns a non-zero error code on a fatal read error.
702 */
703static PetscErrorCode ReadOptionalField(UserCtx *user, const char *field_name, const char *field_label, Vec field_vec, PetscInt ti, const char *ext)
704{
705 PetscErrorCode ierr;
706 PetscBool fileExists;
707
708 PetscFunctionBeginUser;
709
710 /* Check if the data file for this optional field exists. */
711 ierr = CheckDataFile(user,ti, field_name, ext, &fileExists); CHKERRQ(ierr);
712
713 if (fileExists) {
714 /* File exists, so we MUST be able to read it. */
715 LOG_ALLOW(GLOBAL, LOG_DEBUG, "File for %s found, attempting to read...\n", field_label);
716 ierr = ReadFieldData(user, field_name, field_vec, ti, ext);
717
718 if (ierr) {
719 /* Any error here is fatal. A PETSC_ERR_FILE_OPEN would mean a race condition or
720 permissions issue. Other errors could be size mismatch or corruption. */
721 SETERRQ(PETSC_COMM_WORLD, ierr, "Failed to read data for %s from existing file for step %d. The file may be corrupt or have an incorrect size.", field_label, ti);
722 } else {
723 LOG_ALLOW(GLOBAL, LOG_INFO, "Successfully read %s field for step %d.\n", field_label, ti);
724 }
725 } else {
726 /* File does not exist, which is acceptable for an optional field. */
727 LOG_ALLOW(GLOBAL, LOG_WARNING, "Optional %s file for step %d not found. Skipping.\n", field_label, ti);
728 }
729
730 PetscFunctionReturn(0);
731}
732
733/**
734 * @brief Checks for and reads an optional DMSwarm field from a file.
735 *
736 * This helper function first checks if the corresponding data file exists.
737 * - If the file does not exist, it logs a warning and returns success (0),
738 * gracefully skipping the field.
739 * - If the file exists, it calls ReadSwarmField() to read the data. Any
740 * error during this read operation is considered fatal (e.g., file is
741 * corrupt, permissions issue, or size mismatch), as the presence of the
742 * file implies an intent to load it.
743 *
744 * @param[in] user Pointer to the UserCtx structure.
745 * @param[in] field_name Internal name of the DMSwarm field (e.g., "DMSwarm_CellID").
746 * @param[in] field_label A user-friendly name for logging (e.g., "Cell ID").
747 * @param[in] ti Time index for the file name.
748 * @param[in] ext File extension.
749 *
750 * @return PetscErrorCode Returns 0 on success or if the optional file was not found.
751 * Returns a non-zero error code on a fatal read error.
752 */
753static PetscErrorCode ReadOptionalSwarmField(UserCtx *user, const char *field_name, const char *field_label, PetscInt ti, const char *ext)
754{
755 PetscErrorCode ierr;
756 PetscBool fileExists;
757
758 PetscFunctionBeginUser;
759
760 /* Check if the data file for this optional field exists. */
761 ierr = CheckDataFile(user,ti, field_name, ext, &fileExists); CHKERRQ(ierr);
762
763 if (fileExists) {
764 /* File exists, so we MUST be able to read it. */
765 LOG_ALLOW(GLOBAL, LOG_DEBUG, "File for %s found, attempting to read...\n", field_label);
766 if(strcasecmp(field_name,"DMSwarm_CellID") == 0 || strcasecmp(field_name,"DMSwarm_pid")== 0 || strcasecmp(field_name,"DMSwarm_location_status")== 0 ) {
767 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Reading integer swarm field '%s'.\n", field_name);
768 ierr = ReadSwarmIntField(user,field_name,ti,ext);
769 }
770 else{
771 ierr = ReadSwarmField(user, field_name, ti, ext);
772 }
773
774 if (ierr) {
775 /* Any error here is fatal. A PETSC_ERR_FILE_OPEN would mean a race condition or
776 permissions issue. Other errors could be size mismatch or corruption. */
777 SETERRQ(PETSC_COMM_WORLD, ierr, "Failed to read data for %s from existing file for step %d. The file may be corrupt or have an incorrect size.", field_label, ti);
778 } else {
779 LOG_ALLOW(GLOBAL, LOG_INFO, "Successfully read %s field for step %d.\n", field_label, ti);
780 }
781 } else {
782 /* File does not exist, which is acceptable for an optional field. */
783 LOG_ALLOW(GLOBAL, LOG_WARNING, "Optional %s file for step %d not found. Skipping.\n", field_label, ti);
784 }
785
786 PetscFunctionReturn(0);
787}
788
789#undef __FUNCT__
790#define __FUNCT__ "ReadFieldData"
791/************************************************************************************************
792* @file io.c
793* @brief Utility for (re-)loading PETSc Vec-based fields written by rank-0 in a previous run
794* – works in both serial and MPI executions.
795*
796* The restart files are always written as **one** sequential Vec per field
797* ( `<path-to-file>/<field_name><step>_0.<ext>` ). In parallel we therefore
798* have to:
799*
800* • let rank-0 read that sequential file into a temporary Vec,
801* • broadcast the raw array to every rank, and
802* • copy the local slice into the distributed Vec supplied by the caller.
803*
804* The function purposely avoids `VecScatterCreateToAll()` because PETSc
805* chooses an even block-cyclic layout that seldom matches an existing
806* application layout – that mismatch is the root–cause of the previous
807* “Scatter sizes of ix and iy don’t match” / “incompatible local lengths”
808* crashes. A plain `MPI_Bcast` plus a `memcpy()` into the already
809* allocated local array is bullet-proof and costs one extra buffer only
810* on the non-root ranks.
811*
812* Logging helper used below:
813* LOG_ALLOW(scope,level,fmt,…) – your existing macro
814*
815* @param[in] user – application context (unused here but part of original API)
816* @param[in] field_name – base name of the field (“position”, “ufield”, …)
817* @param[out] field_vec – **distributed** Vec we have to fill
818* @param[in] ti – time-step index encoded in the filename
819* @param[in] ext – file-name extension (“dat”, “bin”, …)
820*
821* @returns 0 on success or a PETSc error code.
822************************************************************************************************/
823PetscErrorCode ReadFieldData(UserCtx *user,
824 const char *field_name,
825 Vec field_vec,
826 PetscInt ti,
827 const char *ext)
828{
829 PetscErrorCode ierr;
830 char filename[PETSC_MAX_PATH_LEN];
831 MPI_Comm comm;
832 PetscMPIInt rank,size;
833 SimCtx *simCtx = user->simCtx;
834
835
836 PetscFunctionBeginUser;
838
839 if(!simCtx->current_io_directory){
840 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "I/O context directory was not set before calling ReadFieldData().");
841 }
842
843
844 ierr = PetscObjectGetComm((PetscObject)field_vec,&comm);CHKERRQ(ierr);
845 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
846 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
847
848 const char *source_path = NULL;
849 source_path = simCtx->current_io_directory;
850
851 if(!source_path){
852 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "source_path was not set for the current execution mode.");
853 }
854 /* ---------------------------------------------------------------------
855 * Compose <path-to-file>/<field_name><step with 5 digits>_0.<ext>
856 * (all restart files are written by rank-0 with that naming scheme).
857 * ------------------------------------------------------------------ */
858 ierr = PetscSNPrintf(filename,sizeof(filename),
859 "%s/%s%05" PetscInt_FMT "_0.%s",
860 source_path,field_name,ti,ext);CHKERRQ(ierr);
861
863 "Attempting to read <%s> on rank %d/%d\n",
864 filename,(int)rank,(int)size);
865
866 /* ======================================================================
867 * 1. SERIAL JOB – just hand the Vec to VecLoad()
868 * ==================================================================== */
869 if(size==1)
870 {
871 PetscViewer viewer;
872 PetscBool found;
873 Vec temp_vec;
874 PetscInt expectedSize,loadedSize;
875
876 ierr = PetscTestFile(filename,'r',&found);CHKERRQ(ierr);
877 if(!found) SETERRQ(comm,PETSC_ERR_FILE_OPEN,
878 "Restart/Source file not found: %s",filename);
879
880 ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,filename,FILE_MODE_READ,&viewer);CHKERRQ(ierr);
881// ---- START MODIFICATION ----
882 // DO NOT load directly into field_vec, as this can resize it, which is
883 // illegal for DMSwarm "view" vectors. Instead, load into a temporary vector.
884 ierr = VecCreate(PETSC_COMM_SELF, &temp_vec); CHKERRQ(ierr);
885 ierr = VecLoad(temp_vec,viewer);CHKERRQ(ierr);
886 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
887
888 // Sanity check: ensure the file size matches the expected vector size.
889 ierr = VecGetSize(field_vec, &expectedSize);CHKERRQ(ierr);
890 ierr = VecGetSize(temp_vec, &loadedSize);CHKERRQ(ierr);
891 if (loadedSize != expectedSize) {
892 SETERRQ(comm,PETSC_ERR_FILE_UNEXPECTED,
893 "File %s holds %d entries – expected %d for field '%s'",
894 filename, loadedSize, expectedSize, field_name);
895 }
896
897 // Now, safely copy the data from the temporary vector to the final destination.
898 ierr = VecCopy(temp_vec, field_vec);CHKERRQ(ierr);
899
900 // Clean up the temporary vector.
901 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
902
903 // ---- END MODIFICATION ----
904
905 /* create EMPTY sequential Vec – VecLoad() will size it correctly */
906 /*
907 ierr = VecCreate(PETSC_COMM_SELF,&seq_vec);CHKERRQ(ierr);
908 ierr = VecSetType(seq_vec,VECSEQ);CHKERRQ(ierr);
909
910 ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,filename,
911 FILE_MODE_READ,&viewer);CHKERRQ(ierr);
912
913 ierr = VecLoad(field_vec,viewer);CHKERRQ(ierr);
914 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
915 */
917 "Loaded <%s> (serial path)\n",filename);
918
920 PetscFunctionReturn(0);
921 }
922
923 /* ======================================================================
924 * 2. PARALLEL JOB
925 * ==================================================================== */
926 PetscInt globalSize;
927 ierr = VecGetSize(field_vec,&globalSize);CHKERRQ(ierr);
928
929 /* -------------------- rank-0 : read the sequential file -------------- */
930 Vec seq_vec = NULL; /* only valid on rank-0 */
931 const PetscScalar *seqArray = NULL; /* borrowed pointer on rank-0 only */
932
933 if(rank==0)
934 {
935 PetscViewer viewer;
936 PetscBool found;
937
938 ierr = PetscTestFile(filename,'r',&found);CHKERRQ(ierr);
939 if(!found) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_OPEN,
940 "Restart file not found: %s",filename);
941
942 /* create EMPTY sequential Vec – VecLoad() will size it correctly */
943 ierr = VecCreate(PETSC_COMM_SELF,&seq_vec);CHKERRQ(ierr);
944 ierr = VecSetType(seq_vec,VECSEQ);CHKERRQ(ierr);
945
946 ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,filename,
947 FILE_MODE_READ,&viewer);CHKERRQ(ierr);
948 ierr = VecLoad(seq_vec,viewer);CHKERRQ(ierr);
949 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
950
951 /* size sanity-check */
952 PetscInt loaded;
953 ierr = VecGetSize(seq_vec,&loaded);CHKERRQ(ierr);
954 if(loaded != globalSize)
955 SETERRQ(comm,PETSC_ERR_FILE_UNEXPECTED,
956 "File %s holds %d entries – expected %d",
957 filename,loaded,globalSize);
958
959 /* borrow array for later Bcast */
960 ierr = VecGetArrayRead(seq_vec,&seqArray);CHKERRQ(ierr);
961
963 "Rank 0 successfully loaded <%s>\n",filename);
964 }
965
966 /* -------------------- everybody : broadcast raw data ----------------- */
967 PetscScalar *buffer = NULL; /* receives the full field */
968 if(rank==0)
969 {
970 /* shallow-copy: const-cast is safe, we do not modify the data */
971 buffer = (PetscScalar *)seqArray;
972 }
973 else
974 { /* non-root ranks allocate a receive buffer */
975 ierr = PetscMalloc1(globalSize,&buffer);CHKERRQ(ierr);
976 }
977
978 ierr = MPI_Bcast(buffer, (int)globalSize, MPIU_SCALAR, 0, comm);CHKERRQ(ierr);
979
980 /* -------------------- copy my slice into field_vec ------------------- */
981 PetscInt rstart,rend,loc;
982 PetscScalar *locArray;
983
984 ierr = VecGetOwnershipRange(field_vec,&rstart,&rend);CHKERRQ(ierr);
985 loc = rend - rstart; /* local length */
986
987 ierr = VecGetArray(field_vec,&locArray);CHKERRQ(ierr);
988 ierr = PetscMemcpy(locArray,
989 buffer + rstart,
990 loc*sizeof(PetscScalar));CHKERRQ(ierr);
991 ierr = VecRestoreArray(field_vec,&locArray);CHKERRQ(ierr);
992
993 /* -------------------- tidy up ---------------------------------------- */
994 if(rank==0)
995 {
996 ierr = VecRestoreArrayRead(seq_vec,&seqArray);CHKERRQ(ierr);
997 ierr = VecDestroy(&seq_vec);CHKERRQ(ierr);
998 }
999 else
1000 {
1001 ierr = PetscFree(buffer);CHKERRQ(ierr);
1002 }
1003
1005 "Loaded <%s> (parallel path)\n",filename);
1006
1008 PetscFunctionReturn(0);
1009}
1010
1011
1012/**
1013 * @brief Reads simulation fields from files into their respective PETSc vectors.
1014 *
1015 * This function reads contravariant velocity, Cartesian velocity, pressure, and node state
1016 * fields from their respective binary files. It also conditionally reads LES, RANS, and
1017 * statistical fields if they are enabled.
1018 *
1019 * @param[in,out] user Pointer to the UserCtx structure containing simulation context.
1020 * @param[in] ti Time index for constructing the file name.
1021 *
1022 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1023 */
1024PetscErrorCode ReadSimulationFields(UserCtx *user,PetscInt ti)
1025{
1026 PetscErrorCode ierr;
1027
1028 SimCtx *simCtx = user->simCtx;
1029 const char *source_path = NULL;
1030
1031 if(simCtx->exec_mode == EXEC_MODE_POSTPROCESSOR){
1032 source_path = simCtx->pps->source_dir;
1033 } else if(simCtx->exec_mode == EXEC_MODE_SOLVER){
1034 source_path = simCtx->restart_dir;
1035 } else{
1036 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Invalid execution mode for reading simulation fields.");
1037 }
1038
1039 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to read simulation fields.\n");
1040
1041 // Set the current I/O directory context
1042 ierr = PetscSNPrintf(simCtx->_io_context_buffer, sizeof(simCtx->_io_context_buffer), "%s/%s",source_path,simCtx->euler_subdir); CHKERRQ(ierr);
1043 simCtx->current_io_directory = simCtx->_io_context_buffer;
1044
1045 // Read Cartesian velocity field
1046 ierr = ReadFieldData(user, "ufield", user->Ucat, ti, "dat"); CHKERRQ(ierr);
1047
1048 // Read contravariant velocity field
1049 ierr = ReadFieldData(user, "vfield", user->Ucont, ti, "dat"); CHKERRQ(ierr);
1050
1051 // Read pressure field
1052 ierr = ReadFieldData(user, "pfield", user->P, ti, "dat"); CHKERRQ(ierr);
1053
1054 // Read node state field (nvert)
1055 ierr = ReadFieldData(user, "nvfield", user->Nvert, ti, "dat"); CHKERRQ(ierr);
1056
1057 LOG_ALLOW(GLOBAL,LOG_INFO,"Successfully read all mandatory fields. \n");
1058
1059 if(simCtx->np>0){
1060 // Read Particle Count field
1061 if(!user->ParticleCount){
1062 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "ParticleCount Vec is NULL but np>0");
1063 }
1064 ierr = ReadOptionalField(user, "ParticleCount", "Particle Count", user->ParticleCount, ti, "dat"); CHKERRQ(ierr);
1065
1066 ierr = ReadOptionalField(user, "psifield", "Scalar Psi Field", user->Psi, ti, "dat"); CHKERRQ(ierr);
1067 }
1068 else{
1069 LOG_ALLOW(GLOBAL, LOG_INFO, "No particles in simulation, skipping Particle fields reading.\n");
1070 }
1071 // Process LES fields if enabled
1072 if (simCtx->les) {
1073 ierr = ReadLESFields(user,ti); CHKERRQ(ierr);
1074 }
1075
1076 // Process RANS fields if enabled
1077 if (simCtx->rans) {
1078 ierr = ReadRANSFields(user,ti); CHKERRQ(ierr);
1079 }
1080
1081 // Process statistical fields if averaging is enabled
1082 if (simCtx->averaging) {
1083 ierr = ReadStatisticalFields(user,ti); CHKERRQ(ierr);
1084 }
1085
1086 simCtx->current_io_directory = NULL; // Clear the I/O context after reading
1087
1088 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading simulation fields.\n");
1089
1090 return 0;
1091}
1092
1093/**
1094 * @brief Reads statistical fields for averaging purposes.
1095 *
1096 * This function reads data for fields such as Ucat_sum, Ucat_cross_sum, Ucat_square_sum,
1097 * and P_sum, used for statistical analysis during simulation.
1098 *
1099 * @param[in,out] user Pointer to the UserCtx structure containing simulation context.
1100 * @param[in] ti Time index for constructing the file name.
1101 *
1102 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1103 */
1104PetscErrorCode ReadStatisticalFields(UserCtx *user,PetscInt ti)
1105{
1106 PetscErrorCode ierr;
1107
1108 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to read statistical fields.\n");
1109
1110 ierr = ReadOptionalField(user, "su0", "Velocity Sum", user->Ucat_sum, ti, "dat"); CHKERRQ(ierr);
1111 ierr = ReadOptionalField(user, "su1", "Velocity Cross Sum", user->Ucat_cross_sum, ti, "dat"); CHKERRQ(ierr);
1112 ierr = ReadOptionalField(user, "su2", "Velocity Square Sum",user->Ucat_square_sum, ti, "dat"); CHKERRQ(ierr);
1113 ierr = ReadOptionalField(user, "sp", "Pressure Sum", user->P_sum, ti, "dat"); CHKERRQ(ierr);
1114
1115 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading statistical fields.\n");
1116
1117 return 0;
1118}
1119
1120/**
1121 * @brief Reads LES-related fields.
1122 *
1123 * This function reads LES-related fields such as Cs (Smagorinsky constant)
1124 * into their respective PETSc vectors.
1125 *
1126 * @param[in,out] user Pointer to the UserCtx structure containing simulation context.
1127 * @param[in] ti Time index for constructing the file name.
1128 *
1129 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1130 */
1131PetscErrorCode ReadLESFields(UserCtx *user,PetscInt ti)
1132{
1133 PetscErrorCode ierr;
1134
1135 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to read LES fields.\n");
1136
1137 ierr = ReadOptionalField(user, "Nu_t", "Turbulent Viscosity", user->Nu_t, ti, "dat"); CHKERRQ(ierr);
1138 ierr = ReadOptionalField(user, "cs", "Smagorinsky Constant (Cs)", user->CS, ti, "dat"); CHKERRQ(ierr);
1139
1140 DMGlobalToLocalBegin(user->da, user->CS, INSERT_VALUES, user->lCs);
1141 DMGlobalToLocalEnd(user->da, user->CS, INSERT_VALUES, user->lCs);
1142
1143 DMGlobalToLocalBegin(user->da, user->Nu_t, INSERT_VALUES, user->lNu_t);
1144 DMGlobalToLocalEnd(user->da, user->Nu_t, INSERT_VALUES, user->lNu_t);
1145
1146 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading LES fields.\n");
1147
1148 return 0;
1149}
1150
1151/**
1152 * @brief Reads RANS-related fields.
1153 *
1154 * This function reads RANS-related fields such as K_Omega into their respective
1155 * PETSc vectors.
1156 *
1157 * @param[in,out] user Pointer to the UserCtx structure containing simulation context.
1158 * @param[in] ti Time index for constructing the file name.
1159 *
1160 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1161 */
1162PetscErrorCode ReadRANSFields(UserCtx *user,PetscInt ti)
1163{
1164 PetscErrorCode ierr;
1165
1166 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to read RANS fields.\n");
1167
1168 ierr = ReadOptionalField(user, "kfield", "K-Omega RANS", user->K_Omega, ti, "dat"); CHKERRQ(ierr);
1169 ierr = ReadOptionalField(user, "Nu_t", "Turbulent Viscosity", user->Nu_t, ti, "dat"); CHKERRQ(ierr);
1170
1171 VecCopy(user->K_Omega, user->K_Omega_o);
1172
1173 DMGlobalToLocalBegin(user->fda2, user->K_Omega, INSERT_VALUES, user->lK_Omega);
1174 DMGlobalToLocalEnd(user->fda2, user->K_Omega, INSERT_VALUES, user->lK_Omega);
1175
1176 DMGlobalToLocalBegin(user->da, user->Nu_t, INSERT_VALUES, user->lNu_t);
1177 DMGlobalToLocalEnd(user->da, user->Nu_t, INSERT_VALUES, user->lNu_t);
1178
1179 DMGlobalToLocalBegin(user->fda2, user->K_Omega_o, INSERT_VALUES, user->lK_Omega_o);
1180 DMGlobalToLocalEnd(user->fda2, user->K_Omega_o, INSERT_VALUES, user->lK_Omega_o);
1181
1182 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading RANS fields.\n");
1183
1184 return 0;
1185}
1186
1187
1188/**
1189 * @brief Reads data from a file into a specified field of a PETSc DMSwarm.
1190 *
1191 * This function is the counterpart to WriteSwarmField(). It creates a global PETSc vector
1192 * that references the specified DMSwarm field, uses ReadFieldData() to read the data from
1193 * a file, and then destroys the global vector reference.
1194 *
1195 * @param[in] user Pointer to the UserCtx structure (containing `user->swarm`).
1196 * @param[in] field_name Name of the DMSwarm field to read into (must be previously declared/allocated).
1197 * @param[in] ti Time index used to construct the input file name.
1198 * @param[in] ext File extension (e.g., "dat" or "bin").
1199 *
1200 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1201 *
1202 * @note Compatible with PETSc 3.14.x.
1203 */
1204PetscErrorCode ReadSwarmField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
1205{
1206 PetscErrorCode ierr;
1207 DM swarm;
1208 Vec fieldVec;
1209
1210 PetscFunctionBegin;
1211
1212 swarm = user->swarm;
1213
1214 LOG_ALLOW(GLOBAL,LOG_DEBUG," ReadSwarmField Begins \n");
1215
1216 /* 2) Create a global vector that references the specified Swarm field. */
1217 ierr = DMSwarmCreateGlobalVectorFromField(swarm, field_name, &fieldVec);CHKERRQ(ierr);
1218
1219 LOG_ALLOW(GLOBAL,LOG_DEBUG," Vector created from Field \n");
1220
1221 /* 3) Use the ReadFieldData() function to read data into fieldVec. */
1222 ierr = ReadFieldData(user, field_name, fieldVec, ti, ext);CHKERRQ(ierr);
1223
1224 /* 4) Destroy the global vector reference. */
1225 ierr = DMSwarmDestroyGlobalVectorFromField(swarm, field_name, &fieldVec);CHKERRQ(ierr);
1226
1227 PetscFunctionReturn(0);
1228}
1229
1230/**
1231 * @brief Reads integer swarm data by using ReadFieldData and casting the result.
1232 *
1233 * This function is the counterpart to WriteSwarmIntField. It reads a file
1234 * containing floating-point data (that was originally integer) into a temporary
1235 * Vec and then casts it back to the integer swarm field. It works by:
1236 * 1. Creating a temporary parallel Vec.
1237 * 2. Calling the standard ReadFieldData() to populate this Vec.
1238 * 3. Accessing the local data of both the Vec and the swarm field.
1239 * 4. Populating the swarm's integer field by casting each PetscScalar back to a PetscInt.
1240 * 5. Destroying the temporary Vec.
1241 *
1242 * @param[in] user Pointer to the UserCtx structure.
1243 * @param[in] field_name Name of the integer Swarm field to be read.
1244 * @param[in] ti Time index for the input file.
1245 * @param[in] ext File extension.
1246 *
1247 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1248 */
1249PetscErrorCode ReadSwarmIntField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
1250{
1251 PetscErrorCode ierr;
1252 DM swarm = user->swarm;
1253 Vec temp_vec;
1254 PetscInt nlocal, nglobal, bs, i;
1255 const PetscScalar *scalar_array; // Read-only pointer from the temp Vec
1256 void *field_array_void;
1257
1258
1259 PetscFunctionBeginUser;
1260
1261 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Reading '%s' via temporary Vec.\n", field_name);
1262
1263 // Get the properties of the swarm field to determine the expected layout
1264 ierr = DMSwarmGetLocalSize(swarm, &nlocal); CHKERRQ(ierr);
1265 ierr = DMSwarmGetSize(swarm, &nglobal); CHKERRQ(ierr);
1266 // We get the block size but not the data pointer yet
1267 ierr = DMSwarmGetField(swarm, field_name, &bs, NULL, NULL); CHKERRQ(ierr);
1268 ierr = DMSwarmRestoreField(swarm, field_name, &bs, NULL, NULL); CHKERRQ(ierr);
1269
1270 // Create a temporary Vec with the CORRECT layout to receive the data
1271 ierr = VecCreate(PETSC_COMM_WORLD, &temp_vec); CHKERRQ(ierr);
1272 ierr = VecSetType(temp_vec, VECMPI); CHKERRQ(ierr);
1273 ierr = VecSetSizes(temp_vec, nlocal * bs, nglobal * bs); CHKERRQ(ierr);
1274 ierr = VecSetBlockSize(temp_vec, bs); CHKERRQ(ierr);
1275 ierr = VecSetUp(temp_vec); CHKERRQ(ierr);
1276
1277 // Call your existing reader to populate the temporary Vec
1278 ierr = ReadFieldData(user, field_name, temp_vec, ti, ext); CHKERRQ(ierr);
1279
1280 // Get local pointers
1281 ierr = VecGetArrayRead(temp_vec, &scalar_array); CHKERRQ(ierr);
1282 ierr = DMSwarmGetField(swarm, field_name, NULL, NULL, &field_array_void); CHKERRQ(ierr);
1283
1284 // Perform the cast back, using the correct loop size (nlocal * bs)
1285 if (strcmp(field_name, "DMSwarm_pid") == 0) {
1286 PetscInt64 *int64_array = (PetscInt64 *)field_array_void;
1287 for (i = 0; i < nlocal * bs; i++) {
1288 int64_array[i] = (PetscInt64)scalar_array[i];
1289 }
1290 } else {
1291 PetscInt *int_array = (PetscInt *)field_array_void;
1292 for (i = 0; i < nlocal * bs; i++) {
1293 int_array[i] = (PetscInt)scalar_array[i];
1294 }
1295 }
1296
1297 // Restore access
1298 ierr = DMSwarmRestoreField(swarm, field_name, NULL, NULL, &field_array_void); CHKERRQ(ierr);
1299 ierr = VecRestoreArrayRead(temp_vec, &scalar_array); CHKERRQ(ierr);
1300
1301 // 6. Clean up
1302 ierr = VecDestroy(&temp_vec); CHKERRQ(ierr);
1303
1304 PetscFunctionReturn(0);
1305}
1306
1307/**
1308 * @brief Reads multiple fields into a DMSwarm, gracefully handling missing optional files.
1309 *
1310 * This function reads all necessary and optional fields for a DMSwarm for a given
1311 * timestep. It assumes the swarm has already been resized to match the particle
1312 * count in the input files.
1313 *
1314 * The 'position' field is considered MANDATORY. If its file is missing or corrupt,
1315 * the function will return a fatal error.
1316 *
1317 * All other fields (velocity, CellID, weight, etc.) are OPTIONAL. If their
1318 * corresponding files are not found, a warning is logged, and the function
1319 * continues without error. If an optional file IS found but is corrupt or has a
1320 * size mismatch, it is treated as a fatal error.
1321 *
1322 * @param[in,out] user Pointer to the UserCtx structure containing the DMSwarm (user->swarm).
1323 * @param[in] ti Time index for constructing the file names.
1324 *
1325 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1326 */
1327PetscErrorCode ReadAllSwarmFields(UserCtx *user, PetscInt ti)
1328{
1329 PetscErrorCode ierr;
1330 PetscInt nGlobal;
1331 SimCtx *simCtx = user->simCtx;
1332 const char *source_path = NULL;
1333
1334 PetscFunctionBeginUser;
1335 ierr = DMSwarmGetSize(user->swarm, &nGlobal); CHKERRQ(ierr);
1336 LOG_ALLOW(GLOBAL, LOG_INFO, "Reading DMSwarm fields for timestep %d (swarm size is %d).\n", ti, nGlobal);
1337
1338 if (nGlobal == 0) {
1339 LOG_ALLOW(GLOBAL, LOG_INFO, "Swarm is empty for timestep %d. Nothing to read.\n", ti);
1340 PetscFunctionReturn(0);
1341 }
1342
1343 // First, determine the top-level source directory based on the execution mode.
1344 if (simCtx->exec_mode == EXEC_MODE_SOLVER) {
1345 source_path = simCtx->restart_dir;
1346 } else if (simCtx->exec_mode == EXEC_MODE_POSTPROCESSOR) {
1347 source_path = simCtx->pps->source_dir;
1348 } else {
1349 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONGSTATE, "Invalid execution mode for reading simulation fields.");
1350 }
1351
1352 // Set the current I/O directory context
1353 ierr = PetscSNPrintf(simCtx->_io_context_buffer, sizeof(simCtx->_io_context_buffer),
1354 "%s/%s", source_path, simCtx->particle_subdir); CHKERRQ(ierr);
1355
1356 simCtx->current_io_directory = simCtx->_io_context_buffer;
1357
1358 /* 1) Read positions (REQUIRED) */
1359 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Reading mandatory position field...\n");
1360 ierr = ReadSwarmField(user, "position", ti, "dat");
1361 if (ierr) {
1362 SETERRQ(PETSC_COMM_WORLD, ierr, "Failed to read MANDATORY 'position' field for step %d. Cannot continue.", ti);
1363 }
1364 LOG_ALLOW(GLOBAL, LOG_INFO, "Successfully read mandatory position field for step %d.\n", ti);
1365
1366 /* 2) Read all OPTIONAL fields using the helper function. */
1367 /* The helper will print a warning and continue if a file is not found. */
1368 ierr = ReadOptionalSwarmField(user, "velocity", "Velocity", ti, "dat"); CHKERRQ(ierr);
1369 ierr = ReadOptionalSwarmField(user, "DMSwarm_pid", "Particle ID", ti, "dat"); CHKERRQ(ierr);
1370 ierr = ReadOptionalSwarmField(user, "DMSwarm_CellID", "Cell ID", ti, "dat"); CHKERRQ(ierr);
1371 ierr = ReadOptionalSwarmField(user, "weight", "Particle Weight", ti, "dat"); CHKERRQ(ierr);
1372 ierr = ReadOptionalSwarmField(user, "Psi", "Scalar Psi", ti, "dat"); CHKERRQ(ierr);
1373 ierr = ReadOptionalSwarmField(user, "DMSwarm_location_status", "Migration Status", ti, "dat"); CHKERRQ(ierr);
1374
1375 simCtx->current_io_directory = NULL; // Clear the I/O context after reading
1376
1377 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished reading DMSwarm fields for timestep %d.\n", ti);
1378 PetscFunctionReturn(0);
1379}
1380
1381
1382#undef __FUNCT__
1383#define __FUNCT__ "WriteFieldData"
1384 /**
1385 * @brief Writes data from a specific PETSc vector to a single, sequential file.
1386 *
1387 * This function is now parallel-safe.
1388 * - In PARALLEL: All processes send their local data to Rank 0. Rank 0 assembles
1389 * the data into a temporary sequential vector and writes it to a single file.
1390 * - In SERIAL: It performs a direct, simple write.
1391 *
1392 * This ensures the output file is always in a simple, portable format.
1393 *
1394 * @param[in] user Pointer to the UserCtx structure.
1395 * @param[in] field_name Name of the field (e.g., "position").
1396 * @param[in] field_vec The parallel PETSc vector containing the data to write.
1397 * @param[in] ti Time index for constructing the file name.
1398 * @param[in] ext File extension (e.g., "dat").
1399 *
1400 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1401 */
1402/**
1403 * @brief Dump a **distributed** PETSc Vec to the *single* sequential file
1404 * format used by our restart / post-processing tools.
1405 *
1406 * The companion of ReadFieldData(): it always produces **one** file
1407 * (e.g. `results/ufield00006_0.dat`) regardless of how many MPI ranks
1408 * are running.
1409 *
1410 * **Behaviour**
1411 *
1412 * | # MPI ranks | Strategy |
1413 * |-------------|---------------------------------------------------------------------------|
1414 * | 1 | Direct `VecView()` into the file. |
1415 * | > 1 | `VecScatterCreateToZero()` gathers the distributed Vec onto **rank 0**. |
1416 * | | Rank 0 writes the sequential Vec; all other ranks allocate no storage. |
1417 *
1418 * The routine never alters or destroys the parallel Vec passed in; the
1419 * gather buffer is created and freed internally.
1420 *
1421 * @param[in] user Simulation context (used only for logging).
1422 * @param[in] field_name Logical field name (forms part of the filename).
1423 * @param[in] field_vec Distributed PETSc Vec to write.
1424 * @param[in] ti Timestep index used in the filename.
1425 * @param[in] ext File extension (`"dat"` in our workflow).
1426 *
1427 * @return `0` on success or a PETSc error code.
1428 */
1429PetscErrorCode WriteFieldData(UserCtx *user,
1430 const char *field_name,
1431 Vec field_vec,
1432 PetscInt ti,
1433 const char *ext)
1434{
1435 PetscErrorCode ierr;
1436 MPI_Comm comm;
1437 PetscMPIInt rank, size;
1438
1439 const PetscInt placeholder_int = 0; /* keep legacy name */
1440 char filename[PETSC_MAX_PATH_LEN];
1441 SimCtx *simCtx=user->simCtx;
1442
1443 PetscFunctionBeginUser;
1445
1446 if(!simCtx->output_dir){
1447 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "Output directory was not set before calling WriteFieldData().");
1448 }
1449
1450 /* ------------------------------------------------------------ */
1451 /* Basic communicator information */
1452 /* ------------------------------------------------------------ */
1453 ierr = PetscObjectGetComm((PetscObject)field_vec,&comm);CHKERRQ(ierr);
1454 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1455 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1456
1457 ierr = PetscSNPrintf(filename,sizeof(filename),
1458 "%s/%s%05" PetscInt_FMT "_%d.%s",
1459 simCtx->current_io_directory,field_name,ti,placeholder_int,ext);CHKERRQ(ierr);
1460
1462 " Preparing to write <%s> on rank %d/%d\n",
1463 filename,rank,size);
1464
1465 /* ------------------------------------------------------------ */
1466 /* 1. Serial path */
1467 /* ------------------------------------------------------------ */
1468 if (size == 1) {
1469 PetscViewer viewer;
1470
1471 ierr = PetscViewerBinaryOpen(comm,filename,
1472 FILE_MODE_WRITE,&viewer);CHKERRQ(ierr);
1473 ierr = VecView(field_vec,viewer);CHKERRQ(ierr);
1474 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
1475
1477 " Wrote <%s> (serial path)\n",filename);
1478 PetscFunctionReturn(0);
1479 }
1480
1481 /* ------------------------------------------------------------ */
1482 /* 2. Parallel path */
1483 /* ------------------------------------------------------------ */
1484 VecScatter scatter;
1485 Vec seq_vec=NULL; /* created by PETSc, lives only on rank 0 */
1486
1487 /* 2.1 Create gather context and buffer */
1488 ierr = VecScatterCreateToZero(field_vec,&scatter,&seq_vec);CHKERRQ(ierr);
1489
1490 /* 2.2 Gather distributed → sequential (on rank 0) */
1491 ierr = VecScatterBegin(scatter,field_vec,seq_vec,
1492 INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1493 ierr = VecScatterEnd (scatter,field_vec,seq_vec,
1494 INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1495
1496 /* 2.3 Rank 0 writes the file */
1497 if (rank == 0) {
1498 PetscViewer viewer;
1499
1500 /* (optional) value diagnostics */
1501 PetscReal vmin,vmax;
1502 ierr = VecMin(seq_vec,NULL,&vmin);CHKERRQ(ierr);
1503 ierr = VecMax(seq_vec,NULL,&vmax);CHKERRQ(ierr);
1505 " <%s> range = [%.4e … %.4e]\n",
1506 field_name,(double)vmin,(double)vmax);
1507
1508 ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,filename,
1509 FILE_MODE_WRITE,&viewer);CHKERRQ(ierr);
1510 ierr = VecView(seq_vec,viewer);CHKERRQ(ierr);
1511 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
1512
1514 " Wrote <%s> (parallel path)\n",filename);
1515 }
1516
1517 /* 2.4 Cleanup */
1518 ierr = VecScatterDestroy(&scatter);CHKERRQ(ierr);
1519 ierr = VecDestroy(&seq_vec);CHKERRQ(ierr);
1520
1522 PetscFunctionReturn(0);
1523}
1524
1525/**
1526 * @brief Writes simulation fields to files.
1527 *
1528 * This function writes contravariant velocity, Cartesian velocity, pressure, and node state
1529 * fields to their respective binary files. It also conditionally writes LES, RANS, and
1530 * statistical fields if they are enabled.
1531 *
1532 * @param[in] user Pointer to the UserCtx structure containing simulation context.
1533 *
1534 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1535 */
1536PetscErrorCode WriteSimulationFields(UserCtx *user)
1537{
1538 PetscErrorCode ierr;
1539
1540 SimCtx *simCtx = user->simCtx;
1541
1542 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to write simulation fields.\n");
1543
1544 // Set the current IO directory
1545 ierr = PetscSNPrintf(simCtx->_io_context_buffer, sizeof(simCtx->_io_context_buffer),
1546 "%s/%s", simCtx->output_dir, simCtx->euler_subdir);CHKERRQ(ierr);
1547 simCtx->current_io_directory = simCtx->_io_context_buffer;
1548
1549 // Write contravariant velocity field
1550 ierr = WriteFieldData(user, "vfield", user->Ucont, simCtx->step, "dat"); CHKERRQ(ierr);
1551
1552 // Write Cartesian velocity field
1553 ierr = WriteFieldData(user, "ufield", user->Ucat, simCtx->step, "dat"); CHKERRQ(ierr);
1554
1555 // Write pressure field
1556 ierr = WriteFieldData(user, "pfield", user->P, simCtx->step, "dat"); CHKERRQ(ierr);
1557
1558 // Write node state field (nvert)
1559 ierr = WriteFieldData(user, "nvfield", user->Nvert, simCtx->step, "dat"); CHKERRQ(ierr);
1560
1561 // Write ParticleCountPerCell if enabled.
1562 if(simCtx->np>0){
1563 ierr = WriteFieldData(user, "ParticleCount",user->ParticleCount,simCtx->step,"dat"); CHKERRQ(ierr);
1564 ierr = WriteFieldData(user, "psifield", user->Psi, simCtx->step, "dat"); CHKERRQ(ierr);
1565 }
1566
1567 // Write LES fields if enabled
1568 if (simCtx->les) {
1569 ierr = WriteLESFields(user); CHKERRQ(ierr);
1570 }
1571
1572 // Write RANS fields if enabled
1573 if (simCtx->rans) {
1574 ierr = WriteRANSFields(user); CHKERRQ(ierr);
1575 }
1576
1577 // Write statistical fields if averaging is enabled
1578 if (simCtx->averaging) {
1579 ierr = WriteStatisticalFields(user); CHKERRQ(ierr);
1580 }
1581
1582 simCtx->current_io_directory = NULL;
1583
1584 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished writing simulation fields.\n");
1585
1586 return 0;
1587}
1588
1589/**
1590 * @brief Writes statistical fields for averaging purposes.
1591 *
1592 * This function writes data for fields such as Ucat_sum, Ucat_cross_sum, Ucat_square_sum,
1593 * and P_sum to their respective binary files.
1594 *
1595 * @param[in] user Pointer to the UserCtx structure containing simulation context.
1596 *
1597 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1598 */
1599PetscErrorCode WriteStatisticalFields(UserCtx *user)
1600{
1601 PetscErrorCode ierr;
1602
1603 SimCtx *simCtx = user->simCtx;
1604
1605 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to write statistical fields.\n");
1606
1607 ierr = WriteFieldData(user, "su0", user->Ucat_sum, simCtx->step, "dat"); CHKERRQ(ierr);
1608 ierr = WriteFieldData(user, "su1", user->Ucat_cross_sum, simCtx->step, "dat"); CHKERRQ(ierr);
1609 ierr = WriteFieldData(user, "su2", user->Ucat_square_sum, simCtx->step, "dat"); CHKERRQ(ierr);
1610 ierr = WriteFieldData(user, "sp", user->P_sum, simCtx->step, "dat"); CHKERRQ(ierr);
1611
1612 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished writing statistical fields.\n");
1613
1614 return 0;
1615}
1616
1617/**
1618 * @brief Writes LES-related fields.
1619 *
1620 * This function writes LES-related fields such as Cs (Smagorinsky constant)
1621 * to their respective binary files.
1622 *
1623 * @param[in] user Pointer to the UserCtx structure containing simulation context.
1624 *
1625 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1626 */
1627PetscErrorCode WriteLESFields(UserCtx *user)
1628{
1629 PetscErrorCode ierr;
1630
1631 SimCtx *simCtx = user->simCtx;
1632
1633 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to write LES fields.\n");
1634
1635
1636 DMLocalToGlobalBegin(user->da, user->lCs, INSERT_VALUES, user->CS);
1637 DMLocalToGlobalEnd(user->da, user->lCs, INSERT_VALUES, user->CS);
1638
1639 DMLocalToGlobalBegin(user->da, user->lNu_t, INSERT_VALUES, user->Nu_t);
1640 DMLocalToGlobalEnd(user->da, user->lNu_t, INSERT_VALUES, user->Nu_t);
1641
1642 ierr = WriteFieldData(user, "Nu_t", user->Nu_t, simCtx->step, "dat"); CHKERRQ(ierr);
1643 ierr = WriteFieldData(user, "cs", user->CS, simCtx->step, "dat"); CHKERRQ(ierr);
1644
1645
1646 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished writing LES fields.\n");
1647
1648 return 0;
1649}
1650
1651/**
1652 * @brief Writes RANS-related fields.
1653 *
1654 * This function writes RANS-related fields such as K_Omega to their respective
1655 * binary files.
1656 *
1657 * @param[in] user Pointer to the UserCtx structure containing simulation context.
1658 *
1659 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1660 */
1661PetscErrorCode WriteRANSFields(UserCtx *user)
1662{
1663 PetscErrorCode ierr;
1664
1665 SimCtx *simCtx = user->simCtx;
1666
1667 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to write RANS fields.\n");
1668
1669 ierr = WriteFieldData(user, "kfield", user->K_Omega, simCtx->step, "dat"); CHKERRQ(ierr);
1670
1671 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished writing RANS fields.\n");
1672
1673 return 0;
1674}
1675
1676/**
1677 * @brief Writes data from a specific field in a PETSc Swarm to a file.
1678 *
1679 * This function retrieves the Swarm from the UserCtx (i.e., `user->swarm`) and
1680 * creates a global PETSc vector from the specified Swarm field. It then calls
1681 * the existing WriteFieldData() function to handle the actual I/O operation.
1682 * After writing the data, the function destroys the temporary global vector
1683 * to avoid memory leaks.
1684 *
1685 * @param[in] user Pointer to the UserCtx structure containing simulation context
1686 * and the PetscSwarm (as `user->swarm`).
1687 * @param[in] field_name Name of the Swarm field to be written (e.g., "my_field").
1688 * @param[in] ti Time index used to construct the output file name.
1689 * @param[in] ext File extension (e.g., "dat", "bin").
1690 *
1691 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1692 *
1693 * @note Compatible with PETSc 3.14.4 and newer.
1694 */
1695PetscErrorCode WriteSwarmField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
1696{
1697 PetscErrorCode ierr;
1698 Vec fieldVec;
1699 DM swarm;
1700
1701 PetscFunctionBeginUser; /* PETSc macro indicating start of function */
1702
1703 /*
1704 * 1) Retrieve the PetscSwarm from the user context.
1705 * Ensure user->swarm is initialized and not NULL.
1706 */
1707 swarm = user->swarm;
1708
1709 /*
1710 * 2) Create a global vector from the specified swarm field.
1711 * This function is available in PETSc 3.14.4.
1712 * It provides a read/write "view" of the swarm field as a global Vec.
1713 */
1715 "Attempting to create global vector from field: %s\n",
1716 field_name);
1717 ierr = DMSwarmCreateGlobalVectorFromField(swarm, field_name, &fieldVec);CHKERRQ(ierr);
1718
1719 /*
1720 * 3) Use your existing WriteFieldData() to write the global vector to a file.
1721 * The field name, time index, and extension are passed along for naming.
1722 */
1724 "Calling WriteFieldData for field: %s\n",
1725 field_name);
1726 ierr = WriteFieldData(user, field_name, fieldVec, ti, ext);CHKERRQ(ierr);
1727
1728 /*
1729 * 4) Destroy the global vector once the data is successfully written.
1730 * This step is crucial for avoiding memory leaks.
1731 * DMSwarmDestroyGlobalVectorFromField() is also available in PETSc 3.14.4.
1732 */
1734 "Destroying the global vector for field: %s\n",
1735 field_name);
1736 ierr = DMSwarmDestroyGlobalVectorFromField(swarm, field_name, &fieldVec);CHKERRQ(ierr);
1737
1738 /* Log and return success. */
1740 "Successfully wrote swarm data for field: %s\n",
1741 field_name);
1742
1743 PetscFunctionReturn(0); /* PETSc macro indicating end of function */
1744}
1745
1746/**
1747 * @brief Writes integer swarm data by casting it to a temporary Vec and using WriteFieldData.
1748 *
1749 * This function provides a bridge to write integer-based swarm fields (like DMSwarm_CellID)
1750 * using the existing Vec-based I/O routine (WriteFieldData). It works by:
1751 * 1. Creating a temporary parallel Vec with the same layout as other swarm fields.
1752 * 2. Accessing the local integer data from the swarm field.
1753 * 3. Populating the temporary Vec by casting each integer to a PetscScalar.
1754 * 4. Calling the standard WriteFieldData() function with the temporary Vec.
1755 * 5. Destroying the temporary Vec.
1756 *
1757 * @param[in] user Pointer to the UserCtx structure.
1758 * @param[in] field_name Name of the integer Swarm field to be written.
1759 * @param[in] ti Time index for the output file.
1760 * @param[in] ext File extension.
1761 *
1762 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1763 */
1764PetscErrorCode WriteSwarmIntField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
1765{
1766 PetscErrorCode ierr;
1767 DM swarm = user->swarm;
1768 Vec temp_vec; // Temporary Vec to hold casted data
1769 PetscInt nlocal, nglobal,bs,i;
1770 void *field_array_void;
1771 PetscScalar *scalar_array; // Pointer to the temporary Vec's scalar data
1772
1773 PetscFunctionBeginUser;
1774
1775 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Casting '%s' to Vec for writing.\n", field_name);
1776
1777 // Get the swarm field properties
1778 ierr = DMSwarmGetLocalSize(swarm, &nlocal); CHKERRQ(ierr);
1779 ierr = DMSwarmGetSize(swarm, &nglobal); CHKERRQ(ierr);
1780 ierr = DMSwarmGetField(swarm, field_name, &bs, NULL, &field_array_void); CHKERRQ(ierr);
1781
1782 // Create Temporary parallel Vec wit the CORRECT layout
1783 ierr = VecCreate(PETSC_COMM_WORLD, &temp_vec); CHKERRQ(ierr);
1784 ierr = VecSetType(temp_vec, VECMPI); CHKERRQ(ierr);
1785 ierr = VecSetSizes(temp_vec, nlocal*bs, nglobal*bs); CHKERRQ(ierr);
1786 ierr = VecSetUp(temp_vec); CHKERRQ(ierr);
1787
1788 // Defining Vector field to mandatory field 'position'
1789 DMSwarmVectorDefineField(swarm,"position");
1790
1791 ierr = VecGetArray(temp_vec, &scalar_array); CHKERRQ(ierr);
1792
1793 if(strcasecmp(field_name,"DMSwarm_pid") == 0){
1794 PetscInt64 *int64_array = (PetscInt64 *)field_array_void;
1795 // Perform the cast from PetscInt64 to PetscScalar
1796 for (i = 0; i < nlocal*bs; i++) {
1797 scalar_array[i] = (PetscScalar)int64_array[i];
1798 }
1799 }else{
1800 PetscInt *int_array = (PetscInt *)field_array_void;
1801 //Perform the cast from PetscInt to PetscScalar
1802 for (i = 0; i < nlocal*bs; i++) {
1803 scalar_array[i] = (PetscScalar)int_array[i];
1804 }
1805 }
1806
1807 // Restore access to both arrays
1808 ierr = VecRestoreArray(temp_vec, &scalar_array); CHKERRQ(ierr);
1809 ierr = DMSwarmRestoreField(swarm, field_name, &bs, NULL, &field_array_void); CHKERRQ(ierr);
1810
1811 // Call your existing writer with the temporary, populated Vec
1812 ierr = WriteFieldData(user, field_name, temp_vec, ti, ext); CHKERRQ(ierr);
1813
1814 // Clean up
1815 ierr = VecDestroy(&temp_vec); CHKERRQ(ierr);
1816
1817 PetscFunctionReturn(0);
1818}
1819
1820/**
1821 * @brief Writes a predefined set of PETSc Swarm fields to files.
1822 *
1823 * This function iterates through a hardcoded list of common swarm fields
1824 * (position, velocity, etc.) and calls the WriteSwarmField() helper function
1825 * for each one. This provides a straightforward way to output essential particle
1826 * data at a given simulation step.
1827 *
1828 * This function will only execute if particles are enabled in the simulation
1829 * (i.e., `user->simCtx->np > 0` and `user->swarm` is not NULL).
1830 *
1831 * @param[in] user Pointer to the UserCtx structure containing the simulation context
1832 * and the PetscSwarm.
1833 *
1834 * @return PetscErrorCode Returns 0 on success, non-zero on failure.
1835 */
1836PetscErrorCode WriteAllSwarmFields(UserCtx *user)
1837{
1838 PetscErrorCode ierr;
1839 SimCtx *simCtx = user->simCtx;
1840
1841 PetscFunctionBeginUser;
1842
1843 // If no swarm is configured or there are no particles, do nothing and return.
1844 if (!user->swarm || simCtx->np <= 0) {
1845 PetscFunctionReturn(0);
1846 }
1847
1848 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting to write swarm fields.\n");
1849
1850 // Ensure the current IO directory is set
1851 ierr = PetscSNPrintf(simCtx->_io_context_buffer, sizeof(simCtx->_io_context_buffer),
1852 "%s/%s", simCtx->output_dir, simCtx->particle_subdir);CHKERRQ(ierr);
1853 simCtx->current_io_directory = simCtx->_io_context_buffer;
1854
1855 // Write particle position field
1856 ierr = WriteSwarmField(user, "position", simCtx->step, "dat"); CHKERRQ(ierr);
1857
1858 // Write particle velocity field
1859 ierr = WriteSwarmField(user, "velocity", simCtx->step, "dat"); CHKERRQ(ierr);
1860
1861 // Write particle weight field
1862 ierr = WriteSwarmField(user, "weight", simCtx->step, "dat"); CHKERRQ(ierr);
1863
1864 // Write custom particle field "Psi"
1865 ierr = WriteSwarmField(user, "Psi", simCtx->step, "dat"); CHKERRQ(ierr);
1866
1867 // Integer fields require special handling
1868
1869 // Write the background mesh cell ID for each particle
1870 ierr = WriteSwarmIntField(user, "DMSwarm_CellID", simCtx->step, "dat"); CHKERRQ(ierr);
1871
1872 // Write the particle location status (e.g., inside or outside the domain)
1873 ierr = WriteSwarmIntField(user, "DMSwarm_location_status", simCtx->step, "dat"); CHKERRQ(ierr);
1874
1875 // Write the unique particle ID
1876 ierr = WriteSwarmIntField(user, "DMSwarm_pid", simCtx->step, "dat"); CHKERRQ(ierr);
1877
1878 simCtx->current_io_directory = NULL;
1879
1880 LOG_ALLOW(GLOBAL, LOG_INFO, "Finished writing swarm fields.\n");
1881
1882 PetscFunctionReturn(0);
1883}
1884
1885/**
1886 * @brief Gather a (possibly distributed) PETSc Vec onto rank 0 as a contiguous C array.
1887 * If the Vec has a DMDA attached, the gather is performed in DMDA "natural" ordering.
1888 *
1889 * @param[in] inVec The PETSc Vec (may be distributed).
1890 * @param[out] N Global length of the Vec (includes dof).
1891 * @param[out] arrayOut On rank 0, newly allocated buffer with the gathered values (PetscScalar-sized).
1892 * On other ranks, set to NULL.
1893 */
1894PetscErrorCode VecToArrayOnRank0(Vec inVec, PetscInt *N, double **arrayOut)
1895{
1896 PetscErrorCode ierr;
1897 MPI_Comm comm;
1898 PetscMPIInt rank;
1899 PetscInt globalSize;
1900 DM dm = NULL;
1901 const char *dmtype = NULL;
1902
1903 /* For DMDA path */
1904 Vec nat = NULL, seqNat = NULL;
1905 VecScatter scatNat = NULL;
1906 const PetscScalar *nar = NULL;
1907 PetscScalar *buf = NULL;
1908
1909 /* For generic (no DM) path */
1910 Vec seq = NULL;
1911 VecScatter scat = NULL;
1912 const PetscScalar *sar = NULL;
1913
1914 PetscFunctionBeginUser;
1915
1916 ierr = PetscObjectGetComm((PetscObject)inVec, &comm); CHKERRQ(ierr);
1917 ierr = MPI_Comm_rank(comm, &rank); CHKERRQ(ierr);
1918 ierr = VecGetSize(inVec, &globalSize); CHKERRQ(ierr);
1919 *N = globalSize;
1920 *arrayOut = NULL;
1921
1922 ierr = VecGetDM(inVec, &dm); CHKERRQ(ierr);
1923 if (dm) { ierr = DMGetType(dm, &dmtype); CHKERRQ(ierr); }
1924
1925 if (dmtype && !strcmp(dmtype, DMDA)) {
1926 /* --- DMDA path: go to NATURAL ordering, then gather to rank 0 --- */
1927 ierr = DMDACreateNaturalVector(dm, &nat); CHKERRQ(ierr);
1928 ierr = DMDAGlobalToNaturalBegin(dm, inVec, INSERT_VALUES, nat); CHKERRQ(ierr);
1929 ierr = DMDAGlobalToNaturalEnd (dm, inVec, INSERT_VALUES, nat); CHKERRQ(ierr);
1930
1931 ierr = VecScatterCreateToZero(nat, &scatNat, &seqNat); CHKERRQ(ierr);
1932 ierr = VecScatterBegin(scatNat, nat, seqNat, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
1933 ierr = VecScatterEnd (scatNat, nat, seqNat, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
1934
1935 if (rank == 0) {
1936 PetscInt nseq;
1937 ierr = VecGetLocalSize(seqNat, &nseq); CHKERRQ(ierr);
1938 ierr = VecGetArrayRead(seqNat, &nar); CHKERRQ(ierr);
1939
1940 ierr = PetscMalloc1(nseq, &buf); CHKERRQ(ierr);
1941 ierr = PetscMemcpy(buf, nar, (size_t)nseq * sizeof(PetscScalar)); CHKERRQ(ierr);
1942
1943 ierr = VecRestoreArrayRead(seqNat, &nar); CHKERRQ(ierr);
1944 *arrayOut = (double*)buf; /* hand back as double* for drop-in compatibility */
1945 }
1946
1947 ierr = VecScatterDestroy(&scatNat); CHKERRQ(ierr);
1948 ierr = VecDestroy(&seqNat); CHKERRQ(ierr);
1949 ierr = VecDestroy(&nat); CHKERRQ(ierr);
1950 } else {
1951 /* --- No DM attached: plain gather in Vec’s global (parallel) layout order --- */
1952 ierr = VecScatterCreateToZero(inVec, &scat, &seq); CHKERRQ(ierr);
1953 ierr = VecScatterBegin(scat, inVec, seq, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
1954 ierr = VecScatterEnd (scat, inVec, seq, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
1955
1956 if (rank == 0) {
1957 PetscInt nseq;
1958 ierr = VecGetLocalSize(seq, &nseq); CHKERRQ(ierr);
1959 ierr = VecGetArrayRead(seq, &sar); CHKERRQ(ierr);
1960
1961 ierr = PetscMalloc1(nseq, &buf); CHKERRQ(ierr);
1962 ierr = PetscMemcpy(buf, sar, (size_t)nseq * sizeof(PetscScalar)); CHKERRQ(ierr);
1963
1964 ierr = VecRestoreArrayRead(seq, &sar); CHKERRQ(ierr);
1965 *arrayOut = (double*)buf;
1966 }
1967
1968 ierr = VecScatterDestroy(&scat); CHKERRQ(ierr);
1969 ierr = VecDestroy(&seq); CHKERRQ(ierr);
1970 }
1971
1972 PetscFunctionReturn(0);
1973}
1974
1975/**
1976 * @brief Gathers any DMSwarm field from all ranks to a single, contiguous array on rank 0.
1977 *
1978 * This is a generic, type-aware version of SwarmFieldToArrayOnRank0.
1979 * It is a COLLECTIVE operation.
1980 *
1981 * @param[in] swarm The DMSwarm to gather from.
1982 * @param[in] field_name The name of the field to gather.
1983 * @param[out] n_total_particles (Output on rank 0) Total number of particles in the global swarm.
1984 * @param[out] n_components (Output on rank 0) Number of components for the field.
1985 * @param[out] gathered_array (Output on rank 0) A newly allocated array containing the full, gathered data.
1986 * The caller is responsible for freeing this memory and for casting it to the correct type.
1987 * @return PetscErrorCode
1988 */
1989PetscErrorCode SwarmFieldToArrayOnRank0(DM swarm, const char *field_name, PetscInt *n_total_particles, PetscInt *n_components, void **gathered_array)
1990{
1991 PetscErrorCode ierr;
1992 PetscMPIInt rank, size;
1993 PetscInt nlocal, nglobal, bs;
1994 void *local_array_void;
1995 size_t element_size = 0;
1996 MPI_Datatype mpi_type = MPI_BYTE; // We'll send raw bytes
1997
1998 PetscFunctionBeginUser;
1999
2000 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
2001 ierr = MPI_Comm_size(PETSC_COMM_WORLD, &size); CHKERRQ(ierr);
2002
2003 // All ranks get swarm properties to determine send/receive counts
2004 ierr = DMSwarmGetLocalSize(swarm, &nlocal); CHKERRQ(ierr);
2005 ierr = DMSwarmGetSize(swarm, &nglobal); CHKERRQ(ierr);
2006 ierr = DMSwarmGetField(swarm, field_name, &bs, NULL, &local_array_void); CHKERRQ(ierr);
2007
2008 // Determine the size of one element of the field's data type
2009 if (strcasecmp(field_name, "DMSwarm_pid") == 0) {
2010 element_size = sizeof(PetscInt64);
2011 } else if (strcasecmp(field_name, "DMSwarm_CellID") == 0 || strcasecmp(field_name, "DMSwarm_location_status") == 0) {
2012 element_size = sizeof(PetscInt);
2013 } else {
2014 element_size = sizeof(PetscScalar);
2015 }
2016
2017 if (rank == 0) {
2018 *n_total_particles = nglobal;
2019 *n_components = bs;
2020 *gathered_array = NULL; // Initialize output
2021 }
2022
2023 if (size == 1) { // Serial case is a simple copy
2024 if (rank == 0) {
2025 ierr = PetscMalloc(nglobal * bs * element_size, gathered_array); CHKERRQ(ierr);
2026 ierr = PetscMemcpy(*gathered_array, local_array_void, nglobal * bs * element_size); CHKERRQ(ierr);
2027 }
2028 } else { // Parallel case: use MPI_Gatherv
2029 PetscInt *recvcounts = NULL, *displs = NULL;
2030 if (rank == 0) {
2031 ierr = PetscMalloc1(size, &recvcounts); CHKERRQ(ierr);
2032 ierr = PetscMalloc1(size, &displs); CHKERRQ(ierr);
2033 }
2034 PetscInt sendcount = nlocal * bs;
2035
2036 // Gather the number of elements (not bytes) from each rank
2037 ierr = MPI_Gather(&sendcount, 1, MPIU_INT, recvcounts, 1, MPIU_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
2038
2039 if (rank == 0) {
2040 displs[0] = 0;
2041 // Convert counts and calculate displacements in terms of BYTES
2042 for (PetscMPIInt i = 0; i < size; i++) recvcounts[i] *= element_size;
2043 for (PetscMPIInt i = 1; i < size; i++) displs[i] = displs[i-1] + recvcounts[i-1];
2044
2045 ierr = PetscMalloc(nglobal * bs * element_size, gathered_array); CHKERRQ(ierr);
2046 }
2047
2048 // Use Gatherv with MPI_BYTE to handle any data type generically
2049 ierr = MPI_Gatherv(local_array_void, nlocal * bs * element_size, MPI_BYTE,
2050 *gathered_array, recvcounts, displs, MPI_BYTE,
2051 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
2052
2053 if (rank == 0) {
2054 ierr = PetscFree(recvcounts); CHKERRQ(ierr);
2055 ierr = PetscFree(displs); CHKERRQ(ierr);
2056 }
2057 }
2058
2059 ierr = DMSwarmRestoreField(swarm, field_name, &bs, NULL, &local_array_void); CHKERRQ(ierr);
2060
2061 PetscFunctionReturn(0);
2062}
2063
2064/**
2065 * @brief Displays a structured banner summarizing the simulation configuration.
2066 *
2067 * This function prints key simulation parameters to standard output.
2068 * It is intended to be called ONLY by MPI rank 0.
2069 * It retrieves global domain bounds from `user->global_domain_bbox` and boundary
2070 * conditions for all faces from `user->face_bc_types`.
2071 *
2072 * @param[in] user Pointer to `UserCtx` structure.
2073 * @param[in] StartTime Initial simulation time.
2074 * @param[in] StartStep Starting timestep index.
2075 * @param[in] StepsToRun Total number of timesteps to run.
2076 * @param[in] num_mpi_procs Total number of MPI processes.
2077 * @param[in] total_num_particles Total number of particles.
2078 * @param[in] bboxlist (If rank 0 needed to compute global_domain_bbox here, otherwise NULL)
2079 *
2080 * @return PetscErrorCode Returns `0` on success.
2081 */
2082PetscErrorCode DisplayBanner(SimCtx *simCtx) // bboxlist is only valid on rank 0
2083{
2084 PetscErrorCode ierr;
2085 PetscMPIInt rank;
2086 Cmpnts global_min_coords, global_max_coords;
2087 PetscReal StartTime;
2088 PetscInt StartStep,StepsToRun,total_num_particles;
2089 PetscMPIInt num_mpi_procs;
2090
2091 // SimCtx *simCtx = user->simCtx;
2092 UserCtx *user = simCtx->usermg.mgctx[simCtx->usermg.mglevels - 1].user;
2093 num_mpi_procs = simCtx->size;
2094 StartTime = simCtx->StartTime;
2095 StartStep = simCtx->StartStep;
2096 StepsToRun = simCtx->StepsToRun;
2097 total_num_particles = simCtx->np;
2098 BoundingBox *bboxlist_on_rank0 = simCtx->bboxlist;
2099
2100
2101 PetscFunctionBeginUser;
2102
2103 if (!user) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "DisplayBanner - UserCtx pointer is NULL.");
2104 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
2105
2106 if (rank == 0) {
2107 // If global_domain_bbox is not pre-populated in UserCtx, compute it here from bboxlist_on_rank0
2108 // This assumes bboxlist_on_rank0 is valid and contains all local bounding boxes on rank 0.
2109 if (bboxlist_on_rank0 && num_mpi_procs > 0) {
2110 global_min_coords = bboxlist_on_rank0[0].min_coords;
2111 global_max_coords = bboxlist_on_rank0[0].max_coords;
2112 for (PetscMPIInt p = 1; p < num_mpi_procs; ++p) {
2113 global_min_coords.x = PetscMin(global_min_coords.x, bboxlist_on_rank0[p].min_coords.x);
2114 global_min_coords.y = PetscMin(global_min_coords.y, bboxlist_on_rank0[p].min_coords.y);
2115 global_min_coords.z = PetscMin(global_min_coords.z, bboxlist_on_rank0[p].min_coords.z);
2116 global_max_coords.x = PetscMax(global_max_coords.x, bboxlist_on_rank0[p].max_coords.x);
2117 global_max_coords.y = PetscMax(global_max_coords.y, bboxlist_on_rank0[p].max_coords.y);
2118 global_max_coords.z = PetscMax(global_max_coords.z, bboxlist_on_rank0[p].max_coords.z);
2119 }
2120 // Optionally store this in user->global_domain_bbox if it's useful elsewhere
2121 // user->global_domain_bbox.min_coords = global_min_coords;
2122 // user->global_domain_bbox.max_coords = global_max_coords;
2123 } else {
2124 // Fallback or warning if bboxlist is not available for global calculation
2125 LOG_ALLOW(PETSC_COMM_SELF, LOG_WARNING, "(Rank 0) - bboxlist not provided or num_mpi_procs <=0; using user->bbox for domain bounds.\n");
2126 // global_min_coords = user->bbox.min_coords; // Use local bbox of rank 0 as fallback
2127 // global_max_coords = user->bbox.max_coords;
2128 }
2129
2130
2131 ierr = PetscPrintf(PETSC_COMM_SELF, "\n"); CHKERRQ(ierr);
2132 ierr = PetscPrintf(PETSC_COMM_SELF, "=============================================================\n"); CHKERRQ(ierr);
2133 ierr = PetscPrintf(PETSC_COMM_SELF, " CASE SUMMARY \n"); CHKERRQ(ierr);
2134 ierr = PetscPrintf(PETSC_COMM_SELF, "=============================================================\n"); CHKERRQ(ierr);
2135 ierr = PetscPrintf(PETSC_COMM_SELF, " Grid Points : %d X %d X %d\n", user->IM, user->JM, user->KM); CHKERRQ(ierr);
2136 ierr = PetscPrintf(PETSC_COMM_SELF, " Cells : %d X %d X %d\n", user->IM - 1, user->JM - 1, user->KM - 1); CHKERRQ(ierr);
2137 ierr = PetscPrintf(PETSC_COMM_SELF, " Global Domain Bounds (X) : %.6f to %.6f\n", (double)global_min_coords.x, (double)global_max_coords.x); CHKERRQ(ierr);
2138 ierr = PetscPrintf(PETSC_COMM_SELF, " Global Domain Bounds (Y) : %.6f to %.6f\n", (double)global_min_coords.y, (double)global_max_coords.y); CHKERRQ(ierr);
2139 ierr = PetscPrintf(PETSC_COMM_SELF, " Global Domain Bounds (Z) : %.6f to %.6f\n", (double)global_min_coords.z, (double)global_max_coords.z); CHKERRQ(ierr);
2140 ierr = PetscPrintf(PETSC_COMM_SELF, "-------------------- Boundary Conditions --------------------\n"); CHKERRQ(ierr);
2141 const int face_name_width = 17; // Adjusted for longer names (Zeta,Eta,Xi)
2142 const char* field_init_str = FieldInitializationToString(simCtx->FieldInitialization);
2143 const char* particle_init_str = ParticleInitializationToString(simCtx->ParticleInitialization);
2144 for (PetscInt i_face = 0; i_face < 6; ++i_face) {
2145 BCFace current_face = (BCFace)i_face;
2146 // The BCFaceToString will now return the Xi, Eta, Zeta versions
2147 const char* face_str = BCFaceToString(current_face);
2148 const char* bc_type_str = BCTypeToString(user->boundary_faces[current_face].mathematical_type);
2149 ierr = PetscPrintf(PETSC_COMM_SELF, " Face %-*s : %s\n",
2150 face_name_width, face_str, bc_type_str); CHKERRQ(ierr);
2151 }
2152 ierr = PetscPrintf(PETSC_COMM_SELF, "-------------------------------------------------------------\n"); CHKERRQ(ierr);
2153 ierr = PetscPrintf(PETSC_COMM_SELF, " Start Time : %.4f\n", (double)StartTime); CHKERRQ(ierr);
2154 ierr = PetscPrintf(PETSC_COMM_SELF, " Timestep Size : %.4f\n", (double)simCtx->dt); CHKERRQ(ierr);
2155 ierr = PetscPrintf(PETSC_COMM_SELF, " Starting Step : %d\n", StartStep); CHKERRQ(ierr);
2156 ierr = PetscPrintf(PETSC_COMM_SELF, " Total Steps to Run : %d\n", StepsToRun); CHKERRQ(ierr);
2157 ierr = PetscPrintf(PETSC_COMM_SELF, " Number of MPI Processes : %d\n", num_mpi_procs); CHKERRQ(ierr);
2158 ierr = PetscPrintf(PETSC_COMM_WORLD," Number of Particles : %d\n", total_num_particles); CHKERRQ(ierr);
2159 ierr = PetscPrintf(PETSC_COMM_WORLD," Reynolds Number : %le\n", simCtx->ren); CHKERRQ(ierr);
2160 ierr = PetscPrintf(PETSC_COMM_WORLD," Stanton Number : %le\n", simCtx->st); CHKERRQ(ierr);
2161 ierr = PetscPrintf(PETSC_COMM_WORLD," CFL Number : %le\n", simCtx->cfl); CHKERRQ(ierr);
2162 ierr = PetscPrintf(PETSC_COMM_WORLD," Von-Neumann Number : %le\n", simCtx->vnn); CHKERRQ(ierr);
2163 ierr = PetscPrintf(PETSC_COMM_SELF, " Particle Initialization Mode: %s\n", particle_init_str); CHKERRQ(ierr);
2164 ierr = PetscPrintf(PETSC_COMM_WORLD," Large Eddy Simulation Model : %s\n", LESFlagToString(simCtx->les)); CHKERRQ(ierr);
2165 if (simCtx->ParticleInitialization == 0 || simCtx->ParticleInitialization == 3) {
2166 if (user->inletFaceDefined) {
2167 ierr = PetscPrintf(PETSC_COMM_SELF, " Particles Initialized At : %s (Enum Val: %d)\n", BCFaceToString(user->identifiedInletBCFace), user->identifiedInletBCFace); CHKERRQ(ierr);
2168 } else {
2169 ierr = PetscPrintf(PETSC_COMM_SELF, " Particles Initialized At : --- (No INLET face identified)\n"); CHKERRQ(ierr);
2170 }
2171 }
2172
2173 ierr = PetscPrintf(PETSC_COMM_SELF, " Field Initialization Mode : %s\n", field_init_str); CHKERRQ(ierr);
2174 if (simCtx->FieldInitialization == 1) {
2175 ierr = PetscPrintf(PETSC_COMM_SELF, " Constant Velocity : x - %.4f, y - %.4f, z - %.4f \n", (double)simCtx->InitialConstantContra.x,(double)simCtx->InitialConstantContra.y,(double)simCtx->InitialConstantContra.z ); CHKERRQ(ierr);
2176 }
2177
2178 ierr = PetscPrintf(PETSC_COMM_SELF, "=============================================================\n"); CHKERRQ(ierr);
2179 ierr = PetscPrintf(PETSC_COMM_SELF, "\n"); CHKERRQ(ierr);
2180 }
2181 PetscFunctionReturn(0);
2182}
2183
2184#undef __FUNCT__
2185#define __FUNCT__ "ParsePostProcessingSettings"
2186/**
2187 * @brief Initializes post-processing settings from a config file and command-line overrides.
2188 *
2189 * This function establishes the configuration for a post-processing run by:
2190 * 1. Setting hardcoded default values in the PostProcessParams struct.
2191 * 2. Reading a configuration file to override the defaults.
2192 * 3. Parsing command-line options (-startTime, -endTime, etc.) which can override
2193 * both the defaults and the file settings.
2194 *
2195 * @param configFile The path to the configuration file to parse.
2196 * @param pps Pointer to the PostProcessParams struct to be populated.
2197 * @return PetscErrorCode
2198 */
2200{
2201 FILE *file;
2202 char line[1024];
2203 PetscBool startTimeSet, endTimeSet, timeStepSet;
2204
2205 PetscFunctionBeginUser;
2207
2208 if (!simCtx || !simCtx->pps) {
2209 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_NULL, "SimCtx or its pps member is NULL in ParsePostProcessingSettings.");
2210 }
2211
2212 char *configFile = simCtx->PostprocessingControlFile;
2213 PostProcessParams *pps = simCtx->pps;
2214
2215
2216 // --- 1. Set Sane Defaults First ---
2217 pps->startTime = 0;
2218 pps->endTime = 0;
2219 pps->timeStep = 1;
2220 pps->outputParticles = PETSC_FALSE;
2221 pps->particle_output_freq = simCtx->LoggingFrequency; // Default to logging frequency;
2222 strcpy(pps->process_pipeline, "");
2223 strcpy(pps->output_fields_instantaneous, "Ucat,P");
2224 strcpy(pps->output_fields_averaged, "");
2225 strcpy(pps->output_prefix, "Field");
2226 strcpy(pps->particle_output_prefix,"Particle");
2227 strcpy(pps->particle_fields,"velocity,CellID,weight,pid");
2228 strcpy(pps->particle_pipeline,"");
2229 strcpy(pps->particleExt,"dat"); // The input file format for particles.
2230 strcpy(pps->eulerianExt,"dat"); // The input file format for Eulerian fields.
2231 pps->reference[0] = pps->reference[1] = pps->reference[2] = 1;
2232 strncpy(pps->source_dir, simCtx->output_dir, sizeof(pps->source_dir) - 1);
2233 pps->source_dir[sizeof(pps->source_dir) - 1] = '\0'; // Ensure null-termination
2234
2235 // --- 2. Parse the Configuration File (overrides defaults) ---
2236 file = fopen(configFile, "r");
2237 if (file) {
2238 LOG_ALLOW(GLOBAL, LOG_INFO, "Parsing post-processing config file: %s\n", configFile);
2239 while (fgets(line, sizeof(line), file)) {
2240 char *key, *value, *comment;
2241 comment = strchr(line, '#'); if (comment) *comment = '\0';
2242 TrimWhitespace(line); if (strlen(line) == 0) continue;
2243 key = strtok(line, "="); value = strtok(NULL, "=");
2244 if (key && value) {
2245 TrimWhitespace(key); TrimWhitespace(value);
2246 if (strcmp(key, "startTime") == 0) pps->startTime = atoi(value);
2247 else if (strcmp(key, "endTime") == 0) pps->endTime = atoi(value);
2248 else if (strcmp(key, "timeStep") == 0) pps->timeStep = atoi(value);
2249 else if (strcmp(key, "output_particles") == 0) {
2250 if (strcasecmp(value, "true") == 0) pps->outputParticles = PETSC_TRUE;
2251 }
2252 else if (strcasecmp(key, "process_pipeline") == 0) {
2253 strncpy(pps->process_pipeline, value, MAX_PIPELINE_LENGTH - 1);
2254 pps->process_pipeline[MAX_PIPELINE_LENGTH - 1] = '\0'; // Ensure null-termination
2255 } else if (strcasecmp(key, "output_fields_instantaneous") == 0) {
2256 strncpy(pps->output_fields_instantaneous, value, MAX_FIELD_LIST_LENGTH - 1);
2258 } else if (strcasecmp(key, "output_prefix") == 0) {
2259 strncpy(pps->output_prefix, value, MAX_FILENAME_LENGTH - 1);
2260 pps->output_prefix[MAX_FILENAME_LENGTH - 1] = '\0';
2261 } else if (strcasecmp(key, "particle_output_prefix") == 0) {
2262 strncpy(pps->particle_output_prefix, value, MAX_FILENAME_LENGTH - 1);
2264 } else if (strcasecmp(key, "particle_fields_instantaneous") == 0) {
2265 strncpy(pps->particle_fields, value, MAX_FIELD_LIST_LENGTH - 1);
2266 pps->particle_fields[MAX_FIELD_LIST_LENGTH - 1] = '\0';
2267 } else if (strcasecmp(key, "particle_pipeline") == 0) {
2268 strncpy(pps->particle_pipeline, value, MAX_PIPELINE_LENGTH - 1);
2269 pps->particle_pipeline[MAX_PIPELINE_LENGTH - 1] = '\0';
2270 } else if (strcasecmp(key, "particle_output_freq") == 0) {
2271 pps->particle_output_freq = atoi(value);
2272 } else if (strcmp(key, "reference_ip") == 0) {pps->reference[0] = atoi(value);
2273 } else if (strcmp(key, "reference_jp") == 0) {pps->reference[1] = atoi(value);
2274 } else if (strcmp(key, "reference_kp") == 0) {pps->reference[2] = atoi(value);
2275 } else if (strcasecmp(key, "source_directory") == 0) {
2276 strncpy(pps->source_dir, value, sizeof(pps->source_dir) - 1);
2277 pps->source_dir[sizeof(pps->source_dir) - 1] = '\0';
2278 } else {
2279 LOG_ALLOW(GLOBAL, LOG_WARNING, "Unknown key '%s' in post-processing config file. Ignoring.\n", key);
2280 }
2281 // Add parsing for pipeline, fields, etc. in later phases
2282 }
2283 }
2284 fclose(file);
2285 } else {
2286 LOG_ALLOW(GLOBAL, LOG_WARNING, "Could not open post-processing config file '%s'. Using defaults and command-line overrides.\n", configFile);
2287 }
2288
2289 // --- 3. Parse Command-Line Options (overrides file settings and defaults) ---
2290 PetscOptionsGetInt(NULL, NULL, "-startTime", &pps->startTime, &startTimeSet);
2291 PetscOptionsGetInt(NULL, NULL, "-endTime", &pps->endTime, &endTimeSet);
2292 PetscOptionsGetInt(NULL, NULL, "-timeStep", &pps->timeStep, &timeStepSet);
2293 PetscOptionsGetBool(NULL, NULL, "-output_particles", &pps->outputParticles, NULL);
2294
2295 if(pps->endTime==-1){
2296 pps->endTime = simCtx->StartStep + simCtx->StepsToRun; // Total steps if endTime is set to -1.
2297 }
2298
2299 // If only startTime is given on command line, run for a single step
2300 if (startTimeSet && !endTimeSet) {
2301 pps->endTime = pps->startTime;
2302 }
2303
2304 LOG_ALLOW(GLOBAL, LOG_INFO, "Post-processing configured to run from t=%d to t=%d with step %d. Particle output: %s.\n",
2305 pps->startTime, pps->endTime, pps->timeStep, pps->outputParticles ? "TRUE" : "FALSE");
2306
2307 LOG_ALLOW(GLOBAL, LOG_INFO, "Process Pipeline: %s\n", pps->process_pipeline);
2308 LOG_ALLOW(GLOBAL, LOG_INFO, "Instantaneous Output Fields: %s\n", pps->output_fields_instantaneous);
2309 LOG_ALLOW(GLOBAL, LOG_INFO, "Output Prefix: %s\n", pps->output_prefix);
2310 LOG_ALLOW(GLOBAL, LOG_INFO, "Particle Output Prefix: %s\n", pps->particle_output_prefix);
2311 LOG_ALLOW(GLOBAL, LOG_INFO, "Particle Fields: %s\n", pps->particle_fields);
2312 LOG_ALLOW(GLOBAL, LOG_INFO, "Particle Pipeline: %s\n", pps->particle_pipeline);
2313 LOG_ALLOW(GLOBAL, LOG_INFO, "Particle Output Frequency: %d\n", pps->particle_output_freq);
2314
2316 PetscFunctionReturn(0);
2317}
2318
2319
2320#undef __FUNCT__
2321#define __FUNCT__ "ParseScalingInformation"
2322/**
2323 * @brief Parses physical scaling parameters from command-line options.
2324 *
2325 * This function reads the reference length, velocity, and density from the
2326 * PETSc options database (provided via -scaling_L_ref, etc.). It populates
2327 * the simCtx->scaling struct and calculates the derived reference pressure.
2328 * It sets default values of 1.0 for a fully non-dimensional case if the
2329 * options are not provided.
2330 *
2331 * @param[in,out] simCtx The simulation context whose 'scaling' member will be populated.
2332 * @return PetscErrorCode
2333 */
2334PetscErrorCode ParseScalingInformation(SimCtx *simCtx)
2335{
2336 PetscErrorCode ierr;
2337 PetscBool flg;
2338
2339 PetscFunctionBeginUser;
2341
2342 if (!simCtx) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "SimCtx is NULL in ParseScalingInformation");
2343
2344 // --- 1. Set default values to 1.0 ---
2345 // This represents a purely non-dimensional run if no scaling is provided.
2346 simCtx->scaling.L_ref = 1.0;
2347 simCtx->scaling.U_ref = 1.0;
2348 simCtx->scaling.rho_ref = 1.0;
2349
2350 // --- 2. Read overrides from the command line / control file ---
2351 ierr = PetscOptionsGetReal(NULL, NULL, "-scaling_L_ref", &simCtx->scaling.L_ref, &flg); CHKERRQ(ierr);
2352 ierr = PetscOptionsGetReal(NULL, NULL, "-scaling_U_ref", &simCtx->scaling.U_ref, &flg); CHKERRQ(ierr);
2353 ierr = PetscOptionsGetReal(NULL, NULL, "-scaling_rho_ref", &simCtx->scaling.rho_ref, &flg); CHKERRQ(ierr);
2354
2355 // --- 3. Calculate derived scaling factors ---
2356 // Check for division by zero to be safe, though U_ref should be positive.
2357 if (simCtx->scaling.U_ref <= 0.0) {
2358 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_OUTOFRANGE, "Reference velocity U_ref must be positive. Got %g", (double)simCtx->scaling.U_ref);
2359 }
2360 simCtx->scaling.P_ref = simCtx->scaling.rho_ref * simCtx->scaling.U_ref * simCtx->scaling.U_ref;
2361
2362 // --- 4. Log the final, effective scales for verification ---
2363 LOG(GLOBAL, LOG_INFO, "---------------- Physical Scales Initialized -----------------\n");
2364 LOG(GLOBAL, LOG_INFO, " L_ref: %.4f, U_ref: %.4f, rho_ref: %.4f, P_ref: %.4f\n",
2365 simCtx->scaling.L_ref, simCtx->scaling.U_ref, simCtx->scaling.rho_ref, simCtx->scaling.P_ref);
2366 LOG(GLOBAL, LOG_INFO, "--------------------------------------------------------------\n");
2367
2369 PetscFunctionReturn(0);
2370}
2371
2372
2373// ---------------------------------------------------------------------
2374// UTILITY FUNCTIONS NOT USED IN THE MAIN CODE ( NOT SUPPORTED ANYMORE,INCLUDED FOR BACKWARD COMPATIBILITY)
2375// ---------------------------------------------------------------------
2376
2377//=====================================================================
2378// 1) ReadDataFileToArray
2379
2380// See the function-level comments in io.h for a summary.
2381// This reads one value per line from an ASCII file. Rank 0 does I/O,
2382// broadcasts the data to all ranks.
2383//===================================================================== */
2384PetscInt ReadDataFileToArray(const char *filename,
2385 double **data_out,
2386 PetscInt *Nout,
2387 MPI_Comm comm)
2388{
2389 /* STEP 0: Prepare local variables & log function entry */
2390 PetscMPIInt rank, size;
2391 PetscErrorCode ierr;
2392 FILE *fp = NULL;
2393 PetscInt N = 0; /* number of lines/values read on rank 0 */
2394 double *array = NULL; /* pointer to local array on each rank */
2395 PetscInt fileExistsFlag = 0; /* 0 = doesn't exist, 1 = does exist */
2396
2398 "Start reading from file: %s\n",
2399 filename);
2400
2401 /* Basic error checking: data_out, Nout must be non-null. */
2402 if (!filename || !data_out || !Nout) {
2404 "Null pointer argument provided.\n");
2405 return 1;
2406 }
2407
2408 /* Determine rank/size for coordinating I/O. */
2409 MPI_Comm_rank(comm, &rank);
2410 MPI_Comm_size(comm, &size);
2411
2412 /* STEP 1: On rank 0, check if file can be opened. */
2413 if (!rank) {
2414 fp = fopen(filename, "r");
2415 if (fp) {
2416 fileExistsFlag = 1;
2417 fclose(fp);
2418 }
2419 }
2420
2421 /* STEP 2: Broadcast file existence to all ranks. */
2422 // In ReadDataFileToArray:
2423 ierr = MPI_Bcast(&fileExistsFlag, 1, MPI_INT, 0, comm); CHKERRQ(ierr);
2424
2425 if (!fileExistsFlag) {
2426 /* If file does not exist, log & return. */
2427 if (!rank) {
2429 "File '%s' not found.\n",
2430 filename);
2431 }
2432 return 2;
2433 }
2434
2435 /* STEP 3: Rank 0 re-opens and reads the file, counting lines, etc. */
2436 if (!rank) {
2437 fp = fopen(filename, "r");
2438 if (!fp) {
2440 "File '%s' could not be opened for reading.\n",
2441 filename);
2442 return 3;
2443 }
2444
2445 /* (3a) Count lines first. */
2446 {
2447 char line[256];
2448 while (fgets(line, sizeof(line), fp)) {
2449 N++;
2450 }
2451 }
2452
2454 "File '%s' has %d lines.\n",
2455 filename, N);
2456
2457 /* (3b) Allocate array on rank 0. */
2458 array = (double*)malloc(N * sizeof(double));
2459 if (!array) {
2460 fclose(fp);
2462 "malloc failed for array.\n");
2463 return 4;
2464 }
2465
2466 /* (3c) Rewind & read values into array. */
2467 rewind(fp);
2468 {
2469 PetscInt i = 0;
2470 char line[256];
2471 while (fgets(line, sizeof(line), fp)) {
2472 double val;
2473 if (sscanf(line, "%lf", &val) == 1) {
2474 array[i++] = val;
2475 }
2476 }
2477 }
2478 fclose(fp);
2479
2481 "Successfully read %d values from '%s'.\n",
2482 N, filename);
2483 }
2484
2485 /* STEP 4: Broadcast the integer N to all ranks. */
2486 ierr = MPI_Bcast(&N, 1, MPI_INT, 0, comm); CHKERRQ(ierr);
2487
2488 /* STEP 5: Each rank allocates an array to receive the broadcast if rank>0. */
2489 if (rank) {
2490 array = (double*)malloc(N * sizeof(double));
2491 if (!array) {
2493 "malloc failed on rank %d.\n",
2494 rank);
2495 return 5;
2496 }
2497 }
2498
2499 /* STEP 6: Broadcast the actual data from rank 0 to all. */
2500 ierr = MPI_Bcast(array, N, MPI_DOUBLE, 0, comm); CHKERRQ(ierr);
2501
2502 /* STEP 7: Assign outputs on all ranks. */
2503 *data_out = array;
2504 *Nout = N;
2505
2507 "Done. Provided array of length=%d to all ranks.\n",
2508 N);
2509 return 0; /* success */
2510}
2511
2512/**
2513 * @brief Reads coordinate data (for particles) from file into a PETSc Vec, then gathers it to rank 0.
2514 *
2515 * This function uses \c ReadFieldData to fill a PETSc Vec with coordinate data,
2516 * then leverages \c VecToArrayOnRank0 to gather that data into a contiguous array
2517 * (valid on rank 0 only).
2518 *
2519 * @param[in] timeIndex The time index used to construct file names.
2520 * @param[in] user Pointer to the user context.
2521 * @param[out] coordsArray On rank 0, will point to a newly allocated array holding the coordinates.
2522 * @param[out] Ncoords On rank 0, the length of \p coordsArray. On other ranks, 0.
2523 *
2524 * @return PetscErrorCode Returns 0 on success, or non-zero on failures.
2525 */
2526PetscErrorCode ReadPositionsFromFile(PetscInt timeIndex,
2527 UserCtx *user,
2528 double **coordsArray,
2529 PetscInt *Ncoords)
2530{
2531 PetscFunctionBeginUser;
2532
2533 PetscErrorCode ierr;
2534 Vec coordsVec;
2535
2536 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Creating coords Vec.\n");
2537 ierr = VecCreate(PETSC_COMM_WORLD, &coordsVec);CHKERRQ(ierr);
2538 ierr = VecSetFromOptions(coordsVec);CHKERRQ(ierr);
2539
2540 // For example: "position" is the name of the coordinate data
2541 ierr = ReadFieldData(user, "position", coordsVec, timeIndex, "dat");
2542 if (ierr) {
2544 "Error reading position data (ti=%d).\n",
2545 timeIndex);
2546 PetscFunctionReturn(ierr);
2547 }
2548
2549 LOG_ALLOW(GLOBAL, LOG_DEBUG, "ReadPositions - Gathering coords Vec to rank 0.\n");
2550 ierr = VecToArrayOnRank0(coordsVec, Ncoords, coordsArray);CHKERRQ(ierr);
2551
2552 ierr = VecDestroy(&coordsVec);CHKERRQ(ierr);
2553
2555 "Successfully gathered coordinates. Ncoords=%d.\n", *Ncoords);
2556 PetscFunctionReturn(0);
2557}
2558
2559
2560/**
2561 * @brief Reads a named field from file into a PETSc Vec, then gathers it to rank 0.
2562 *
2563 * This function wraps \c ReadFieldData and \c VecToArrayOnRank0 into a single step.
2564 * The gathered data is stored in \p scalarArray on rank 0, with its length in \p Nscalars.
2565 *
2566 * @param[in] timeIndex The time index used to construct file names.
2567 * @param[in] fieldName Name of the field to be read (e.g., "velocity").
2568 * @param[in] user Pointer to the user context.
2569 * @param[out] scalarArray On rank 0, a newly allocated array holding the field data.
2570 * @param[out] Nscalars On rank 0, length of \p scalarArray. On other ranks, 0.
2571 *
2572 * @return PetscErrorCode Returns 0 on success, or non-zero on failures.
2573 */
2574PetscErrorCode ReadFieldDataToRank0(PetscInt timeIndex,
2575 const char *fieldName,
2576 UserCtx *user,
2577 double **scalarArray,
2578 PetscInt *Nscalars)
2579{
2580 PetscFunctionBeginUser;
2581
2582 PetscErrorCode ierr;
2583 Vec fieldVec;
2584
2585 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Creating field Vec.\n");
2586 ierr = VecCreate(PETSC_COMM_WORLD, &fieldVec);CHKERRQ(ierr);
2587 ierr = VecSetFromOptions(fieldVec);CHKERRQ(ierr);
2588
2589 ierr = ReadFieldData(user, fieldName, fieldVec, timeIndex, "dat");
2590 if (ierr) {
2592 "Error reading field '%s' (ti=%d).\n",
2593 fieldName, timeIndex);
2594 PetscFunctionReturn(ierr);
2595 }
2596
2597 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Gathering field Vec to rank 0.\n");
2598 ierr = VecToArrayOnRank0(fieldVec, Nscalars, scalarArray);CHKERRQ(ierr);
2599
2600 ierr = VecDestroy(&fieldVec);CHKERRQ(ierr);
2601
2603 "Successfully gathered field '%s'. Nscalars=%d.\n",
2604 fieldName, *Nscalars);
2605 PetscFunctionReturn(0);
2606}
2607
2608
PetscErrorCode ReadStatisticalFields(UserCtx *user, PetscInt ti)
Reads statistical fields for averaging purposes.
Definition io.c:1104
PetscErrorCode ParsePostProcessingSettings(SimCtx *simCtx)
Initializes post-processing settings from a config file and command-line overrides.
Definition io.c:2199
PetscErrorCode WriteAllSwarmFields(UserCtx *user)
Writes a predefined set of PETSc Swarm fields to files.
Definition io.c:1836
PetscErrorCode SwarmFieldToArrayOnRank0(DM swarm, const char *field_name, PetscInt *n_total_particles, PetscInt *n_components, void **gathered_array)
Gathers any DMSwarm field from all ranks to a single, contiguous array on rank 0.
Definition io.c:1989
PetscErrorCode ParseScalingInformation(SimCtx *simCtx)
Parses physical scaling parameters from command-line options.
Definition io.c:2334
PetscInt ReadDataFileToArray(const char *filename, double **data_out, PetscInt *Nout, MPI_Comm comm)
Definition io.c:2384
PetscErrorCode ReadGridFile(UserCtx *user)
Sets grid dimensions from a file for a SINGLE block using a one-time read cache.
Definition io.c:177
PetscErrorCode StringToBCHandlerType(const char *str, BCHandlerType *handler_out)
Converts a string representation of a handler to a BCHandlerType enum.
Definition io.c:314
PetscErrorCode ReadRANSFields(UserCtx *user, PetscInt ti)
Reads RANS-related fields.
Definition io.c:1162
PetscErrorCode GetBCParamReal(BC_Param *params, const char *key, PetscReal *value_out, PetscBool *found)
Searches a BC_Param linked list for a key and returns its value as a double.
Definition io.c:356
PetscErrorCode ReadSwarmIntField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
Reads integer swarm data by using ReadFieldData and casting the result.
Definition io.c:1249
PetscErrorCode WriteSwarmField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
Writes data from a specific field in a PETSc Swarm to a file.
Definition io.c:1695
static PetscInt * g_IMs_from_file
Caches the IM dimensions for all blocks read from the grid file.
Definition io.c:19
PetscErrorCode WriteStatisticalFields(UserCtx *user)
Writes statistical fields for averaging purposes.
Definition io.c:1599
PetscErrorCode ReadSimulationFields(UserCtx *user, PetscInt ti)
Reads simulation fields from files into their respective PETSc vectors.
Definition io.c:1024
static PetscInt g_nblk_from_file
Stores the number of blocks read from the grid file.
Definition io.c:17
PetscErrorCode WriteFieldData(UserCtx *user, const char *field_name, Vec field_vec, PetscInt ti, const char *ext)
Writes data from a specific PETSc vector to a single, sequential file.
Definition io.c:1429
PetscErrorCode ReadLESFields(UserCtx *user, PetscInt ti)
Reads LES-related fields.
Definition io.c:1131
PetscErrorCode ReadFieldData(UserCtx *user, const char *field_name, Vec field_vec, PetscInt ti, const char *ext)
Reads data for a specific field from a file into the provided vector.
Definition io.c:823
PetscErrorCode ParseAllBoundaryConditions(UserCtx *user, const char *bcs_input_filename)
Parses the boundary conditions file to configure the type, handler, and any associated parameters for...
Definition io.c:399
PetscErrorCode ValidateBCHandlerForBCType(BCType type, BCHandlerType handler)
Validates that a specific handler is compatible with a general BC type.
Definition io.c:331
void TrimWhitespace(char *str)
Trims leading and trailing whitespace from a string in-place.
Definition io.c:36
PetscErrorCode WriteSimulationFields(UserCtx *user)
Writes simulation fields to files.
Definition io.c:1536
PetscErrorCode ReadAllSwarmFields(UserCtx *user, PetscInt ti)
Reads multiple fields into a DMSwarm, gracefully handling missing optional files.
Definition io.c:1327
static PetscBool g_file_has_been_read
A flag to ensure the grid file is read only once.
Definition io.c:25
PetscErrorCode ReadGridGenerationInputs(UserCtx *user)
Parses command-line options for a programmatically generated grid for a SINGLE block.
Definition io.c:77
PetscErrorCode ReadSwarmField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
Reads data from a file into a specified field of a PETSc DMSwarm.
Definition io.c:1204
PetscErrorCode WriteRANSFields(UserCtx *user)
Writes RANS-related fields.
Definition io.c:1661
PetscErrorCode ReadFieldDataToRank0(PetscInt timeIndex, const char *fieldName, UserCtx *user, double **scalarArray, PetscInt *Nscalars)
Reads a named field from file into a PETSc Vec, then gathers it to rank 0.
Definition io.c:2574
PetscErrorCode VecToArrayOnRank0(Vec inVec, PetscInt *N, double **arrayOut)
Gather a (possibly distributed) PETSc Vec onto rank 0 as a contiguous C array.
Definition io.c:1894
PetscErrorCode WriteSwarmIntField(UserCtx *user, const char *field_name, PetscInt ti, const char *ext)
Writes integer swarm data by casting it to a temporary Vec and using WriteFieldData.
Definition io.c:1764
PetscErrorCode WriteLESFields(UserCtx *user)
Writes LES-related fields.
Definition io.c:1627
static PetscInt * g_KMs_from_file
Caches the KM dimensions for all blocks read from the grid file.
Definition io.c:23
static PetscInt * g_JMs_from_file
Caches the JM dimensions for all blocks read from the grid file.
Definition io.c:21
PetscErrorCode StringToBCFace(const char *str, BCFace *face_out)
Converts a string representation of a face to a BCFace enum.
Definition io.c:280
PetscErrorCode ReadPositionsFromFile(PetscInt timeIndex, UserCtx *user, double **coordsArray, PetscInt *Ncoords)
Reads coordinate data (for particles) from file into a PETSc Vec, then gathers it to rank 0.
Definition io.c:2526
static PetscErrorCode ReadOptionalField(UserCtx *user, const char *field_name, const char *field_label, Vec field_vec, PetscInt ti, const char *ext)
Checks for and reads an optional Eulerian field from a file into a Vec.
Definition io.c:703
PetscErrorCode VerifyPathExistence(const char *path, PetscBool is_dir, PetscBool is_optional, const char *description, PetscBool *exists)
A parallel-safe helper to verify the existence of a generic file or directory path.
Definition io.c:590
void FreeBC_ParamList(BC_Param *head)
Frees the memory allocated for a linked list of BC_Param structs.
Definition io.c:263
PetscErrorCode StringToBCType(const char *str, BCType *type_out)
Converts a string representation of a BC type to a BCType enum.
Definition io.c:297
PetscErrorCode DisplayBanner(SimCtx *simCtx)
Displays a structured banner summarizing the simulation configuration.
Definition io.c:2082
static PetscErrorCode CheckDataFile(UserCtx *user, PetscInt ti, const char *fieldName, const char *ext, PetscBool *fileExists)
Checks for a data file's existence in a parallel-safe manner.
Definition io.c:645
static PetscErrorCode ReadOptionalSwarmField(UserCtx *user, const char *field_name, const char *field_label, PetscInt ti, const char *ext)
Checks for and reads an optional DMSwarm field from a file.
Definition io.c:753
Public interface for data input/output routines.
const char * BCHandlerTypeToString(BCHandlerType handler_type)
Converts a BCHandlerType enum to its string representation.
Definition logging.c:732
const char * FieldInitializationToString(PetscInt FieldInitialization)
Helper function to convert FieldInitialization to a string representation.
Definition logging.c:660
#define LOG_ALLOW_SYNC(scope, level, fmt,...)
----— DEBUG ---------------------------------------— #define LOG_ALLOW(scope, level,...
Definition logging.h:268
#define LOCAL
Logging scope definitions for controlling message output.
Definition logging.h:46
#define GLOBAL
Scope for global logging across all processes.
Definition logging.h:47
const char * BCFaceToString(BCFace face)
Helper function to convert BCFace enum to a string representation.
Definition logging.c:643
#define LOG_ALLOW(scope, level, fmt,...)
Logging macro that checks both the log level and whether the calling function is in the allowed-funct...
Definition logging.h:201
#define PROFILE_FUNCTION_END
Marks the end of a profiled code block.
Definition logging.h:740
const char * ParticleInitializationToString(PetscInt ParticleInitialization)
Helper function to convert ParticleInitialization to a string representation.
Definition logging.c:675
#define LOG(scope, level, fmt,...)
Logging macro for PETSc-based applications with scope control.
Definition logging.h:85
const char * BCTypeToString(BCType type)
Helper function to convert BCType enum to a string representation.
Definition logging.c:705
@ LOG_ERROR
Critical errors that may halt the program.
Definition logging.h:29
@ LOG_TRACE
Very fine-grained tracing information for in-depth debugging.
Definition logging.h:34
@ LOG_INFO
Informational messages about program execution.
Definition logging.h:32
@ LOG_WARNING
Non-critical issues that warrant attention.
Definition logging.h:30
@ LOG_DEBUG
Detailed debugging information.
Definition logging.h:33
#define PROFILE_FUNCTION_BEGIN
Marks the beginning of a profiled code block (typically a function).
Definition logging.h:731
const char * LESFlagToString(PetscInt LESFlag)
Helper function to convert LES Flag to a string representation.
Definition logging.c:690
BCType
Defines the general mathematical/physical category of a boundary.
Definition variables.h:207
@ INLET
Definition variables.h:214
@ NOGRAD
Definition variables.h:224
@ SYMMETRY
Definition variables.h:212
@ OUTLET
Definition variables.h:213
@ WALL
Definition variables.h:210
UserCtx * user
Definition variables.h:427
PetscBool inletFaceDefined
Definition variables.h:680
char particle_output_prefix[256]
Definition variables.h:469
PetscMPIInt rank
Definition variables.h:541
PetscInt cgrid
Definition variables.h:676
BoundaryFaceConfig boundary_faces[6]
Definition variables.h:679
PetscInt block_number
Definition variables.h:593
BCFace identifiedInletBCFace
Definition variables.h:681
char euler_subdir[PETSC_MAX_PATH_LEN]
Definition variables.h:560
SimCtx * simCtx
Back-pointer to the master simulation context.
Definition variables.h:664
PetscInt rans
Definition variables.h:609
PetscReal StartTime
Definition variables.h:551
Vec K_Omega_o
Definition variables.h:712
char output_prefix[256]
Definition variables.h:466
struct BC_Param_s * next
Definition variables.h:248
PetscReal Min_X
Definition variables.h:671
char * key
Definition variables.h:246
PetscInt KM
Definition variables.h:670
PetscReal st
Definition variables.h:581
UserMG usermg
Definition variables.h:631
#define MAX_FIELD_LIST_LENGTH
Definition variables.h:443
PetscReal L_ref
Definition variables.h:520
Vec K_Omega
Definition variables.h:712
BCHandlerType
Defines the specific computational "strategy" for a boundary handler.
Definition variables.h:228
@ BC_HANDLER_INLET_PARABOLIC
Definition variables.h:231
@ BC_HANDLER_INLET_CONSTANT_VELOCITY
Definition variables.h:232
@ BC_HANDLER_WALL_MOVING
Definition variables.h:230
@ BC_HANDLER_NOGRAD_COPY_GHOST
Definition variables.h:229
@ BC_HANDLER_WALL_NOSLIP
Definition variables.h:230
@ BC_HANDLER_OUTLET_CONSERVATION
Definition variables.h:234
char output_fields_averaged[1024]
Definition variables.h:465
PetscInt FieldInitialization
Definition variables.h:582
#define MAX_PIPELINE_LENGTH
Definition variables.h:442
PetscReal ren
Definition variables.h:581
BCHandlerType handler_type
Definition variables.h:274
Cmpnts max_coords
Maximum x, y, z coordinates of the bounding box.
Definition variables.h:156
PetscInt _this
Definition variables.h:674
PetscInt reference[3]
Definition variables.h:477
char output_dir[PETSC_MAX_PATH_LEN]
Definition variables.h:559
PetscReal dt
Definition variables.h:552
PetscReal ry
Definition variables.h:675
PetscInt StepsToRun
Definition variables.h:549
PetscInt timeStep
Definition variables.h:459
PetscInt np
Definition variables.h:616
PetscReal Max_Y
Definition variables.h:671
PetscBool averaging
Definition variables.h:613
char * value
Definition variables.h:247
Vec lCs
Definition variables.h:712
#define MAX_FILENAME_LENGTH
Definition variables.h:444
Vec Ucont
Definition variables.h:688
PetscInt StartStep
Definition variables.h:548
Cmpnts min_coords
Minimum x, y, z coordinates of the bounding box.
Definition variables.h:155
PetscScalar x
Definition variables.h:101
Vec Ucat_square_sum
Definition variables.h:715
char * current_io_directory
Definition variables.h:564
Vec P_sum
Definition variables.h:715
char grid_file[PETSC_MAX_PATH_LEN]
Definition variables.h:598
char output_fields_instantaneous[1024]
Definition variables.h:464
char eulerianExt[8]
Definition variables.h:473
char particle_subdir[PETSC_MAX_PATH_LEN]
Definition variables.h:561
BoundingBox * bboxlist
Definition variables.h:619
Vec lNu_t
Definition variables.h:712
Vec Nu_t
Definition variables.h:712
PetscReal rz
Definition variables.h:675
char source_dir[PETSC_MAX_PATH_LEN]
Definition variables.h:454
char particle_pipeline[1024]
Definition variables.h:467
BC_Param * params
Definition variables.h:275
PetscScalar z
Definition variables.h:101
Vec lK_Omega_o
Definition variables.h:712
Vec Ucat
Definition variables.h:688
Vec ParticleCount
Definition variables.h:729
ScalingCtx scaling
Definition variables.h:590
Vec lK_Omega
Definition variables.h:712
PetscInt JM
Definition variables.h:670
PetscInt mglevels
Definition variables.h:434
PetscReal Min_Z
Definition variables.h:671
PetscReal cfl
Definition variables.h:581
Vec Ucat_sum
Definition variables.h:715
char process_pipeline[1024]
Definition variables.h:463
PetscInt particle_output_freq
Definition variables.h:470
char particle_fields[1024]
Definition variables.h:468
PetscBool outputParticles
Definition variables.h:460
PetscReal Max_X
Definition variables.h:671
Cmpnts InitialConstantContra
Definition variables.h:583
PetscReal Min_Y
Definition variables.h:671
PetscInt step
Definition variables.h:546
PostProcessParams * pps
Definition variables.h:648
PetscScalar y
Definition variables.h:101
PetscMPIInt size
Definition variables.h:542
@ EXEC_MODE_SOLVER
Definition variables.h:511
@ EXEC_MODE_POSTPROCESSOR
Definition variables.h:512
PetscInt IM
Definition variables.h:670
char _io_context_buffer[PETSC_MAX_PATH_LEN]
Definition variables.h:563
char particleExt[8]
Definition variables.h:474
Vec Ucat_cross_sum
Definition variables.h:715
PetscInt les
Definition variables.h:609
Vec Nvert
Definition variables.h:688
MGCtx * mgctx
Definition variables.h:437
BCType mathematical_type
Definition variables.h:273
PetscReal P_ref
Definition variables.h:523
PetscReal rx
Definition variables.h:675
ExecutionMode exec_mode
Definition variables.h:556
PetscInt ParticleInitialization
Definition variables.h:620
PetscInt startTime
Definition variables.h:457
PetscReal rho_ref
Definition variables.h:522
PetscReal vnn
Definition variables.h:581
PetscReal Max_Z
Definition variables.h:671
PetscReal U_ref
Definition variables.h:521
char PostprocessingControlFile[PETSC_MAX_PATH_LEN]
Definition variables.h:647
char restart_dir[PETSC_MAX_PATH_LEN]
Definition variables.h:558
PetscInt LoggingFrequency
Definition variables.h:636
Vec Psi
Definition variables.h:730
BCFace
Identifies the six logical faces of a structured computational block.
Definition variables.h:200
@ BC_FACE_NEG_X
Definition variables.h:201
@ BC_FACE_POS_Z
Definition variables.h:203
@ BC_FACE_POS_Y
Definition variables.h:202
@ BC_FACE_NEG_Z
Definition variables.h:203
@ BC_FACE_POS_X
Definition variables.h:201
@ BC_FACE_NEG_Y
Definition variables.h:202
BoundaryCondition * handler
Definition variables.h:276
A node in a linked list for storing key-value parameters from the bcs.dat file.
Definition variables.h:245
Holds the complete configuration for one of the six boundary faces.
Definition variables.h:271
Defines a 3D axis-aligned bounding box.
Definition variables.h:154
A 3D point or vector with PetscScalar components.
Definition variables.h:100
Holds all configuration parameters for a post-processing run.
Definition variables.h:452
The master context for the entire simulation.
Definition variables.h:538
User-defined context containing data specific to a single computational grid level.
Definition variables.h:661