PICurv 0.1.0
A Parallel Particle-In-Cell Solver for Curvilinear LES
Loading...
Searching...
No Matches
setup.c File Reference

// Setup code for running any simulation More...

#include "setup.h"
Include dependency graph for setup.c:

Go to the source code of this file.

Macros

#define __FUNCT__   "CreateSimulationContext"
 
#define __FUNCT__   "PetscMkdirRecursive"
 
#define __FUNCT__   "SetupSimulationEnvironment"
 
#define __FUNCT__   "AllocateContextHeirarchy"
 
#define __FUNCT__   "SetupSolverParameters"
 
#define __FUNCT__   "SetupGridAndSolvers"
 
#define __FUNCT__   "CreateAndInitializeAllVectors"
 
#define __FUNCT__   "UpdateLocalGhosts"
 
#define __FUNCT__   "SetupBoundaryConditions"
 
#define __FUNCT__   "GetOwnedCellRange"
 
#define __FUNCT__   "ComputeAndStoreNeighborRanks"
 
#define __FUNCT__   "SetDMDAProcLayout"
 
#define __FUNCT__   "SetupDomainRankInfo"
 
#define __FUNCT__   "Contra2Cart"
 
#define __FUNCT__   "SetupDomainCellDecompositionMap"
 
#define __FUNCT__   "BinarySearchInt64"
 
#define __FUNCT__   "ComputeDivergence"
 
#define __FUNCT__   "InitializeRandomGenerators"
 
#define __FUNCT__   "InitializeLogicalSpaceRNGs"
 
#define __FUNCT__   "InitializeBrownianRNG"
 
#define __FUNCT__   "TransformScalarDerivativesToPhysical"
 
#define __FUNCT__   "TransformDerivativesToPhysical"
 
#define __FUNCT__   "ComputeScalarFieldDerivatives"
 
#define __FUNCT__   "ComputeVectorFieldDerivatives"
 
#define __FUNCT__   "DestroyUserVectors"
 
#define __FUNCT__   "DestroyUserContext"
 
#define __FUNCT__   "FinalizeSimulation"
 

Functions

PetscErrorCode CreateSimulationContext (int argc, char **argv, SimCtx **p_simCtx)
 Allocates and populates the master SimulationContext object.
 
static PetscErrorCode PetscMkdirRecursive (const char *path)
 Creates a directory path recursively, similar to mkdir -p.
 
PetscErrorCode SetupSimulationEnvironment (SimCtx *simCtx)
 Verifies and prepares the complete I/O environment for a simulation run.
 
static PetscErrorCode AllocateContextHierarchy (SimCtx *simCtx)
 Allocates the memory for the UserMG and UserCtx hierarchy.
 
static PetscErrorCode SetupSolverParameters (SimCtx *simCtx)
 
PetscErrorCode SetupGridAndSolvers (SimCtx *simCtx)
 The main orchestrator for setting up all grid-related components.
 
PetscErrorCode CreateAndInitializeAllVectors (SimCtx *simCtx)
 Creates and initializes all PETSc Vec objects for all fields.
 
PetscErrorCode UpdateLocalGhosts (UserCtx *user, const char *fieldName)
 Updates the local vector (including ghost points) from its corresponding global vector.
 
PetscErrorCode SetupBoundaryConditions (SimCtx *simCtx)
 (Orchestrator) Sets up all boundary conditions for the simulation.
 
PetscErrorCode Allocate3DArrayScalar (PetscReal ****array, PetscInt nz, PetscInt ny, PetscInt nx)
 Allocates a 3D array of PetscReal values using PetscCalloc.
 
PetscErrorCode Deallocate3DArrayScalar (PetscReal ***array, PetscInt nz, PetscInt ny)
 Deallocates a 3D array of PetscReal values allocated by Allocate3DArrayScalar.
 
PetscErrorCode Allocate3DArrayVector (Cmpnts ****array, PetscInt nz, PetscInt ny, PetscInt nx)
 Allocates a 3D array of Cmpnts structures using PetscCalloc.
 
PetscErrorCode Deallocate3DArrayVector (Cmpnts ***array, PetscInt nz, PetscInt ny)
 Deallocates a 3D array of Cmpnts structures allocated by Allocate3DArrayVector.
 
PetscErrorCode GetOwnedCellRange (const DMDALocalInfo *info_nodes, PetscInt dim, PetscInt *xs_cell_global_out, PetscInt *xm_cell_local_out)
 Gets the global starting index of cells owned by this rank and the number of such cells.
 
PetscErrorCode ComputeAndStoreNeighborRanks (UserCtx *user)
 Computes and stores the Cartesian neighbor ranks for the DMDA decomposition.
 
PetscErrorCode SetDMDAProcLayout (DM dm, UserCtx *user)
 Sets the processor layout for a given DMDA based on PETSc options.
 
PetscErrorCode SetupDomainRankInfo (SimCtx *simCtx)
 Sets up the full rank communication infrastructure for all blocks.
 
PetscErrorCode Contra2Cart (UserCtx *user)
 Reconstructs Cartesian velocity (Ucat) at cell centers from contravariant velocity (Ucont) defined on cell faces.
 
PetscErrorCode SetupDomainCellDecompositionMap (UserCtx *user)
 Creates and distributes a map of the domain's cell decomposition to all ranks.
 
PetscErrorCode BinarySearchInt64 (PetscInt n, const PetscInt64 arr[], PetscInt64 key, PetscBool *found)
 Performs a binary search for a key in a sorted array of PetscInt64.
 
static PetscInt Gidx (PetscInt i, PetscInt j, PetscInt k, UserCtx *user)
 
PetscErrorCode ComputeDivergence (UserCtx *user)
 
PetscErrorCode InitializeRandomGenerators (UserCtx *user, PetscRandom *randx, PetscRandom *randy, PetscRandom *randz)
 Initializes random number generators for assigning particle properties.
 
PetscErrorCode InitializeLogicalSpaceRNGs (PetscRandom *rand_logic_i, PetscRandom *rand_logic_j, PetscRandom *rand_logic_k)
 Initializes random number generators for logical space operations [0.0, 1.0).
 
PetscErrorCode InitializeBrownianRNG (SimCtx *simCtx)
 Initializes a single master RNG for time-stepping physics (Brownian motion).
 
void TransformScalarDerivativesToPhysical (PetscReal jacobian, Cmpnts csi_metrics, Cmpnts eta_metrics, Cmpnts zet_metrics, PetscReal dPhi_dcsi, PetscReal dPhi_deta, PetscReal dPhi_dzet, Cmpnts *gradPhi)
 Transforms scalar derivatives from computational space to physical space using the chain rule.
 
static void TransformDerivativesToPhysical (PetscReal jacobian, Cmpnts csi_metrics, Cmpnts eta_metrics, Cmpnts zet_metrics, Cmpnts deriv_csi, Cmpnts deriv_eta, Cmpnts deriv_zet, Cmpnts *dudx, Cmpnts *dvdx, Cmpnts *dwdx)
 Transforms derivatives from computational space to physical space using the chain rule.
 
PetscErrorCode ComputeScalarFieldDerivatives (UserCtx *user, PetscInt i, PetscInt j, PetscInt k, PetscReal ***field_data, Cmpnts *grad)
 Computes the gradient of a cell-centered SCALAR field at a specific grid point.
 
PetscErrorCode ComputeVectorFieldDerivatives (UserCtx *user, PetscInt i, PetscInt j, PetscInt k, Cmpnts ***field_data, Cmpnts *dudx, Cmpnts *dvdx, Cmpnts *dwdx)
 Computes the derivatives of a cell-centered vector field at a specific grid point.
 
PetscErrorCode DestroyUserVectors (UserCtx *user)
 Destroys all Vec objects in a UserCtx structure.
 
PetscErrorCode DestroyUserContext (UserCtx *user)
 Destroys all resources allocated within a single UserCtx structure.
 
PetscErrorCode FinalizeSimulation (SimCtx *simCtx)
 Main cleanup function for the entire simulation context.
 

Detailed Description

// Setup code for running any simulation

Test program for DMSwarm interpolation using the fdf-curvIB method. Provides the setup to start any simulation with DMSwarm and DMDAs.

Definition in file setup.c.

Macro Definition Documentation

◆ __FUNCT__ [1/27]

#define __FUNCT__   "CreateSimulationContext"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [2/27]

#define __FUNCT__   "PetscMkdirRecursive"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [3/27]

#define __FUNCT__   "SetupSimulationEnvironment"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [4/27]

#define __FUNCT__   "AllocateContextHeirarchy"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [5/27]

#define __FUNCT__   "SetupSolverParameters"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [6/27]

#define __FUNCT__   "SetupGridAndSolvers"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [7/27]

#define __FUNCT__   "CreateAndInitializeAllVectors"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [8/27]

#define __FUNCT__   "UpdateLocalGhosts"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [9/27]

#define __FUNCT__   "SetupBoundaryConditions"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [10/27]

#define __FUNCT__   "GetOwnedCellRange"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [11/27]

#define __FUNCT__   "ComputeAndStoreNeighborRanks"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [12/27]

#define __FUNCT__   "SetDMDAProcLayout"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [13/27]

#define __FUNCT__   "SetupDomainRankInfo"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [14/27]

#define __FUNCT__   "Contra2Cart"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [15/27]

#define __FUNCT__   "SetupDomainCellDecompositionMap"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [16/27]

#define __FUNCT__   "BinarySearchInt64"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [17/27]

#define __FUNCT__   "ComputeDivergence"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [18/27]

#define __FUNCT__   "InitializeRandomGenerators"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [19/27]

#define __FUNCT__   "InitializeLogicalSpaceRNGs"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [20/27]

#define __FUNCT__   "InitializeBrownianRNG"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [21/27]

#define __FUNCT__   "TransformScalarDerivativesToPhysical"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [22/27]

#define __FUNCT__   "TransformDerivativesToPhysical"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [23/27]

#define __FUNCT__   "ComputeScalarFieldDerivatives"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [24/27]

#define __FUNCT__   "ComputeVectorFieldDerivatives"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [25/27]

#define __FUNCT__   "DestroyUserVectors"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [26/27]

#define __FUNCT__   "DestroyUserContext"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [27/27]

#define __FUNCT__   "FinalizeSimulation"

Definition at line 10 of file setup.c.

Function Documentation

◆ CreateSimulationContext()

PetscErrorCode CreateSimulationContext ( int  argc,
char **  argv,
SimCtx **  p_simCtx 
)

Allocates and populates the master SimulationContext object.

This function serves as the single, authoritative entry point for all simulation configuration. It merges the setup logic from both the legacy FSI/IBM solver and the modern particle solver into a unified, robust process.

The function follows a strict sequence:

  1. Allocate Context & Set Defaults: It first allocates the SimulationContext and populates every field with a sane, hardcoded default value. This ensures the simulation starts from a known, predictable state.
  2. Configure Logging System: It configures the custom logging framework. It parses the -func_config_file option to load a list of function names allowed to produce log output. This configuration (the file path and the list of function names) is stored within the SimulationContext for later reference and cleanup.
  3. Parse All Options: It performs a comprehensive pass of PetscOptionsGet... calls for every possible command-line flag, overriding the default values set in step 1.
  4. Log Summary: After all options are parsed, it uses the now-active logging system to print a summary of the key simulation parameters.
Parameters
[in]argcArgument count passed from main.
[in]argvArgument vector passed from main.
[out]p_simCtxOn success, this will point to the newly created and fully configured SimulationContext pointer. The caller is responsible for eventually destroying this object by calling FinalizeSimulation().
Returns
PetscErrorCode Returns 0 on success, or a non-zero PETSc error code on failure.

Definition at line 44 of file setup.c.

45{
46 PetscErrorCode ierr;
47 SimCtx *simCtx;
48 char control_filename[PETSC_MAX_PATH_LEN] = ""; // Temporary placeholder for control file name.
49 PetscBool control_flg; // Temporary placeholder for control file tag existence check flag.
50
51 PetscFunctionBeginUser;
52
54
55 // === 1. Allocate the Context Struct and Set ALL Defaults ==================
56 ierr = PetscNew(p_simCtx); CHKERRQ(ierr);
57 simCtx = *p_simCtx;
58
59 // --- Group 1: Parallelism & MPI ---
60 simCtx->rank = 0; simCtx->size = 1;
61
62 // --- Group 2: Simulation Control, Time, and I/O ---
63 simCtx->step = 0; simCtx->ti = 0.0; simCtx->StartStep = 0; simCtx->StepsToRun = 10;
64 simCtx->tiout = 10; simCtx->StartTime = 0.0; simCtx->dt = 0.001;
65 simCtx->OnlySetup = PETSC_FALSE;
66 simCtx->logviewer = NULL; simCtx->OutputFreq = simCtx->tiout;
67 strcpy(simCtx->eulerianSource,"solve");
68 strcpy(simCtx->restart_dir,"results");
69 strcpy(simCtx->output_dir,"results");
70 strcpy(simCtx->log_dir,"logs");
71 strcpy(simCtx->euler_subdir,"eulerian");
72 strcpy(simCtx->particle_subdir,"particles");
73 simCtx->_io_context_buffer[0] = '\0';
74 simCtx->current_io_directory = NULL;
75
76 // --- Group 3: High-Level Physics & Model Selection Flags ---
77 simCtx->immersed = 0; simCtx->movefsi = 0; simCtx->rotatefsi = 0;
78 simCtx->sediment = 0; simCtx->rheology = 0; simCtx->invicid = 0;
79 simCtx->TwoD = 0; simCtx->thin = 0; simCtx->moveframe = 0;
80 simCtx->rotateframe = 0; simCtx->blank = 0;
81 simCtx->dgf_x = 0; simCtx->dgf_y = 1; simCtx->dgf_z = 0;
82 simCtx->dgf_ax = 1; simCtx->dgf_ay = 0; simCtx->dgf_az = 0;
83 strcpy(simCtx->AnalyticalSolutionType,"TGV3D");
84
85 // --- Group 4: Specific Simulation Case Flags --- (DEPRICATED)
86 simCtx->cop=0; simCtx->fish=0; simCtx->fish_c=0; simCtx->fishcyl=0;
87 simCtx->eel=0; simCtx->pizza=0; simCtx->turbine=0; simCtx->Pipe=0;
88 simCtx->wing=0; simCtx->hydro=0; simCtx->MHV=0; simCtx->LV=0;
89 simCtx->channelz = 0;
90
91 // --- Group 5: Solver & Numerics Parameters ---
93 simCtx->mom_dt_rk4_residual_norm_noise_allowance_factor = 1.05; // New addition for divergence detection
94 simCtx->mom_atol = 1e-7; simCtx->mom_rtol = 1e-4; simCtx->imp_stol = 1.e-8;
95 simCtx->mglevels = 3; simCtx->mg_MAX_IT = 30; simCtx->mg_idx = 1;
96 simCtx->mg_preItr = 1; simCtx->mg_poItr = 1;
97 simCtx->poisson = 0; simCtx->poisson_tol = 5.e-9;
98 simCtx->STRONG_COUPLING = 0;simCtx->central=0;
99 simCtx->ren = 100.0; simCtx->pseudo_cfl = 0.1;
100 simCtx->max_pseudo_cfl = 1.0; simCtx->min_pseudo_cfl = 0.001;
101 simCtx->pseudo_cfl_reduction_factor = 1.0;
102 simCtx->pseudo_cfl_growth_factor = 1.0; //simCtx->vnn = 0.1;
103 simCtx->cdisx = 0.0; simCtx->cdisy = 0.0; simCtx->cdisz = 0.0;
104 simCtx->FieldInitialization = 0;
105 simCtx->InitialConstantContra.x = 0.0;
106 simCtx->InitialConstantContra.y = 0.0;
107 simCtx->InitialConstantContra.z = 0.0;
108
109 // --- Group 6: Physical & Geometric Parameters ---
110 simCtx->NumberOfBodies = 1; simCtx->Flux_in = 1.0; simCtx->angle = 0.0;
111 simCtx->max_angle = -54. * 3.1415926 / 180.;
112 simCtx->CMx_c=0.0; simCtx->CMy_c=0.0; simCtx->CMz_c=0.0;
113 simCtx->wall_roughness_height = 1e-16;
114 simCtx->schmidt_number = 1.0; simCtx->Turbulent_schmidt_number = 0.7;
115
116 // --- Group 7: Grid, Domain, and Boundary Condition Settings ---
117 simCtx->block_number = 1; simCtx->inletprofile = 1;
118 simCtx->grid1d = 0; simCtx->Ogrid = 0;
119 simCtx->i_periodic = 0; simCtx->j_periodic = 0; simCtx->k_periodic = 0;
120 simCtx->blkpbc = 10; simCtx->pseudo_periodic = 0;
121 strcpy(simCtx->grid_file, "config/grid.run");
122 simCtx->generate_grid = PETSC_FALSE;
123 simCtx->da_procs_x = PETSC_DECIDE;
124 simCtx->da_procs_y = PETSC_DECIDE;
125 simCtx->da_procs_z = PETSC_DECIDE;
126 simCtx->grid_rotation_angle = 0.0;
127 simCtx->Croty = 0.0; simCtx->Crotz = 0.0;
128 simCtx->num_bcs_files = 1;
129 ierr = PetscMalloc1(1, &simCtx->bcs_files); CHKERRQ(ierr);
130 ierr = PetscStrallocpy("config/bcs.run", &simCtx->bcs_files[0]); CHKERRQ(ierr);
131 simCtx->FluxInSum = 0.0; simCtx->FluxOutSum = 0.0; simCtx->Fluxsum = 0.0;
132 simCtx->drivingForceMagnitude = 0.0, simCtx->forceScalingFactor = 1.8;
133 simCtx->targetVolumetricFlux = 0.0;
134 simCtx->AreaInSum = 0.0; simCtx->AreaOutSum = 0.0;
135 simCtx->U_bc = 0.0; simCtx->ccc = 0;
136 simCtx->ratio = 0.0;
137
138
139 // --- Group 8: Turbulence Modeling (LES/RANS) ---
140 simCtx->averaging = PETSC_FALSE; simCtx->les = NO_LES_MODEL; simCtx->rans = 0;
141 simCtx->wallfunction = 0; simCtx->mixed = 0; simCtx->clark = 0;
142 simCtx->dynamic_freq = 1; simCtx->max_cs = 0.5;
143 simCtx->Const_CS = 0.03;
144 simCtx->testfilter_ik = 0; simCtx->testfilter_1d = 0;
145 simCtx->i_homo_filter = 0; simCtx->j_homo_filter = 0; simCtx->k_homo_filter = 0;
146
147 // --- Group 9: Particle / DMSwarm Data & Settings ---
148 simCtx->np = 0; simCtx->readFields = PETSC_FALSE;
149 simCtx->dm_swarm = NULL; simCtx->bboxlist = NULL;
151 strcpy(simCtx->particleRestartMode,"load");
152 simCtx->particlesLostLastStep = 0;
153 simCtx->particlesMigratedLastStep = 0;
154 simCtx->occupiedCellCount = 0;
155 simCtx->particleLoadImbalance = 0.0;
156 simCtx->migrationPassesLastStep = 0;
157 simCtx->BrownianMotionRNG = NULL;
158 simCtx->C_IEM = 2.0;
159
160 // --- Group 10: Immersed Boundary & FSI Data Object Pointers ---
161 simCtx->ibm = NULL; simCtx->ibmv = NULL; simCtx->fsi = NULL;
162 simCtx->rstart_fsi = PETSC_FALSE; simCtx->duplicate = 0;
163
164 // --- Group 11: Logging and Custom Configuration ---
165 strcpy(simCtx->allowedFile, "config/whitelist.run");
166 simCtx->useCfg = PETSC_FALSE;
167 simCtx->allowedFuncs = NULL;
168 simCtx->nAllowed = 0;
169 simCtx->LoggingFrequency = 10;
170 simCtx->summationRHS = 0.0;
171 simCtx->MaxDiv = 0.0;
172 simCtx->MaxDivFlatArg = 0; simCtx->MaxDivx = 0; simCtx->MaxDivy = 0; simCtx->MaxDivz = 0;
173 strcpy(simCtx->criticalFuncsFile, "config/profile.run");
174 simCtx->useCriticalFuncsCfg = PETSC_FALSE;
175 simCtx->criticalFuncs = NULL;
176 simCtx->nCriticalFuncs = 0;
177 // --- Group 11: Post-Processing Information ---
178 strcpy(simCtx->PostprocessingControlFile, "config/post.run");
179 ierr = PetscNew(&simCtx->pps); CHKERRQ(ierr);
180
181 // === 2. Get MPI Info and Handle Config File =============================
182 // -- Group 1: Parallelism & MPI Information
183 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &simCtx->rank); CHKERRQ(ierr);
184 ierr = MPI_Comm_size(PETSC_COMM_WORLD, &simCtx->size); CHKERRQ(ierr);
185
186 // First, check if the -control_file argument was provided by the user/script.
187 ierr = PetscOptionsGetString(NULL, NULL, "-control_file", control_filename, sizeof(control_filename), &control_flg); CHKERRQ(ierr);
188
189 // If the flag is NOT present or the filename is empty, abort with a helpful error.
190 if (!control_flg || strlen(control_filename) == 0) {
191 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG,
192 "\n\n*** MANDATORY ARGUMENT MISSING ***\n"
193 "The -control_file argument was not provided.\n"
194 "This program must be launched with a configuration file.\n"
195 "Example: mpiexec -n 4 ./picsolver -control_file /path/to/your/config.control\n"
196 "This is typically handled automatically by the 'pic-flow' script.\n");
197 }
198
199 // At this point, we have a valid filename. Attempt to load it.
200 LOG(GLOBAL, LOG_INFO, "Loading mandatory configuration from: %s\n", control_filename);
201 ierr = PetscOptionsInsertFile(PETSC_COMM_WORLD, NULL, control_filename, PETSC_FALSE);
202 if (ierr == PETSC_ERR_FILE_OPEN) {
203 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_FILE_OPEN, "The specified control file was not found or could not be opened: %s", control_filename);
204 }
205 CHKERRQ(ierr);
206
207 // === 3. A Configure Logging System ========================================
208 // This logic determines the logging configuration and STORES it in simCtx for
209 // later reference and cleanup.
210 ierr = PetscOptionsGetString(NULL, NULL, "-whitelist_config_file", simCtx->allowedFile, PETSC_MAX_PATH_LEN, &simCtx->useCfg); CHKERRQ(ierr);
211
212 if (simCtx->useCfg) {
213 ierr = LoadAllowedFunctionsFromFile(simCtx->allowedFile, &simCtx->allowedFuncs, &simCtx->nAllowed);
214 if (ierr) {
215 // Use direct PetscPrintf as logging system isn't fully active yet.
216 PetscPrintf(PETSC_COMM_SELF, "[%s] WARNING: Failed to load allowed functions from '%s'. Falling back to default list.\n", __func__, simCtx->allowedFile);
217 simCtx->useCfg = PETSC_FALSE; // Mark as failed.
218 ierr = 0; // Clear the error to allow fallback.
219 }
220 }
221 if (!simCtx->useCfg) {
222 // Fallback to default logging functions if no file was used or if loading failed.
223 simCtx->nAllowed = 2;
224 ierr = PetscMalloc1(simCtx->nAllowed, &simCtx->allowedFuncs); CHKERRQ(ierr);
225 ierr = PetscStrallocpy("main", &simCtx->allowedFuncs[0]); CHKERRQ(ierr);
226 ierr = PetscStrallocpy("CreateSimulationContext", &simCtx->allowedFuncs[1]); CHKERRQ(ierr);
227 }
228
229 // Activate the configuration by passing it to the logging module's setup function.
230 set_allowed_functions((const char**)simCtx->allowedFuncs, (size_t)simCtx->nAllowed);
231
232 // Now that the logger is configured, we can use it.
233 LOG_ALLOW_SYNC(LOCAL, LOG_INFO, "Context created. Initializing on rank %d of %d.\n", simCtx->rank, simCtx->size);
234 print_log_level(); // This will now correctly reflect the LOG_LEVEL environment variable.
235
236 // === 3.B Configure Profiling System ========================================
237 ierr = PetscOptionsGetString(NULL, NULL, "-profile_config_file", simCtx->criticalFuncsFile, PETSC_MAX_PATH_LEN, &simCtx->useCriticalFuncsCfg); CHKERRQ(ierr);
238 if (simCtx->useCriticalFuncsCfg) {
240 if (ierr) {
241 PetscPrintf(PETSC_COMM_SELF, "[%s] WARNING: Failed to load critical profiling functions from '%s'. Falling back to default list.\n", __func__, simCtx->criticalFuncsFile);
242 simCtx->useCriticalFuncsCfg = PETSC_FALSE;
243 ierr = 0;
244 }
245 }
246 if (!simCtx->useCriticalFuncsCfg) {
247 // Fallback to a hardcoded default list if no file or loading failed
248 simCtx->nCriticalFuncs = 4;
249 ierr = PetscMalloc1(simCtx->nCriticalFuncs, &simCtx->criticalFuncs); CHKERRQ(ierr);
250 ierr = PetscStrallocpy("FlowSolver", &simCtx->criticalFuncs[0]); CHKERRQ(ierr);
251 ierr = PetscStrallocpy("AdvanceSimulation", &simCtx->criticalFuncs[1]); CHKERRQ(ierr);
252 ierr = PetscStrallocpy("LocateAllParticlesInGrid", &simCtx->criticalFuncs[2]); CHKERRQ(ierr);
253 ierr = PetscStrallocpy("InterpolateAllFieldsToSwarm", &simCtx->criticalFuncs[3]); CHKERRQ(ierr);
254 }
255
256 // Initialize the profiling system with the current updated simulation context.
257 ierr = ProfilingInitialize(simCtx); CHKERRQ(ierr);
258
259 // === 4. Parse All Command Line Options ==================================
260 LOG_ALLOW(GLOBAL, LOG_INFO, "Parsing command-line options...\n");
261
262 // --- Group 2
263 LOG_ALLOW(GLOBAL,LOG_DEBUG, "Parsing Group 2: Simulation Control,Time and I/O.\n");
264 // Read the physical time to start from.
265 // The default is already 0.0, so this will only be non-zero if the user provides it.
266 ierr = PetscOptionsGetInt(NULL, NULL, "-start_step", &simCtx->StartStep, NULL); CHKERRQ(ierr);
267 ierr = PetscOptionsGetInt(NULL,NULL, "-totalsteps", &simCtx->StepsToRun, NULL); CHKERRQ(ierr);
268 ierr = PetscOptionsGetBool(NULL, NULL, "-only_setup", &simCtx->OnlySetup, NULL); CHKERRQ(ierr);
269 ierr = PetscOptionsGetReal(NULL, NULL, "-dt", &simCtx->dt, NULL); CHKERRQ(ierr);
270 ierr = PetscOptionsGetInt(NULL, NULL, "-tio", &simCtx->tiout, NULL); CHKERRQ(ierr);
271 ierr = PetscOptionsGetString(NULL,NULL,"-euler_field_source",simCtx->eulerianSource,sizeof(simCtx->eulerianSource),NULL);CHKERRQ(ierr);
272 ierr = PetscOptionsGetString(NULL,NULL,"-output_dir",&simCtx->output_dir,sizeof(simCtx->output_dir),NULL);CHKERRQ(ierr);
273 ierr = PetscOptionsGetString(NULL,NULL,"-restart_dir",&simCtx->restart_dir,sizeof(simCtx->restart_dir),NULL);CHKERRQ(ierr);
274 ierr = PetscOptionsGetString(NULL,NULL,"-log_dir",&simCtx->log_dir,sizeof(simCtx->log_dir),NULL);CHKERRQ(ierr);
275 ierr = PetscOptionsGetString(NULL,NULL,"-euler_subdir",&simCtx->euler_subdir,sizeof(simCtx->euler_subdir),NULL);CHKERRQ(ierr);
276 ierr = PetscOptionsGetString(NULL,NULL,"-particle_subdir",&simCtx->particle_subdir,sizeof(simCtx->particle_subdir),NULL);CHKERRQ(ierr);
277
278 simCtx->OutputFreq = simCtx->tiout; // backward compatibility related redundancy.
279 if(strcmp(simCtx->eulerianSource,"solve")!= 0 && strcmp(simCtx->eulerianSource,"load") != 0 && strcmp(simCtx->eulerianSource,"analytical")!=0){
280 SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Invalid value for -euler_field_source. Must be 'load','analytical' or 'solve'. You provided '%s'.",simCtx->eulerianSource);
281 }
282
283 // --- Group 3
284 LOG_ALLOW(GLOBAL,LOG_DEBUG, "Parsing Group 3: High-Level Physics & Model Selection Flags\n");
285 ierr = PetscOptionsGetInt(NULL, NULL, "-imm", &simCtx->immersed, NULL); CHKERRQ(ierr);
286 ierr = PetscOptionsGetInt(NULL, NULL, "-fsi", &simCtx->movefsi, NULL); CHKERRQ(ierr);
287 ierr = PetscOptionsGetInt(NULL, NULL, "-rfsi", &simCtx->rotatefsi, NULL); CHKERRQ(ierr);
288 ierr = PetscOptionsGetInt(NULL, NULL, "-sediment", &simCtx->sediment, NULL); CHKERRQ(ierr);
289 ierr = PetscOptionsGetInt(NULL, NULL, "-rheology", &simCtx->rheology, NULL); CHKERRQ(ierr);
290 ierr = PetscOptionsGetInt(NULL, NULL, "-inv", &simCtx->invicid, NULL); CHKERRQ(ierr);
291 ierr = PetscOptionsGetInt(NULL, NULL, "-TwoD", &simCtx->TwoD, NULL); CHKERRQ(ierr);
292 ierr = PetscOptionsGetInt(NULL, NULL, "-thin", &simCtx->thin, NULL); CHKERRQ(ierr);
293 ierr = PetscOptionsGetInt(NULL, NULL, "-mframe", &simCtx->moveframe, NULL); CHKERRQ(ierr);
294 ierr = PetscOptionsGetInt(NULL, NULL, "-rframe", &simCtx->rotateframe, NULL); CHKERRQ(ierr);
295 ierr = PetscOptionsGetInt(NULL, NULL, "-blk", &simCtx->blank, NULL); CHKERRQ(ierr);
296 ierr = PetscOptionsGetInt(NULL, NULL, "-dgf_z", &simCtx->dgf_z, NULL); CHKERRQ(ierr);
297 ierr = PetscOptionsGetInt(NULL, NULL, "-dgf_y", &simCtx->dgf_y, NULL); CHKERRQ(ierr);
298 ierr = PetscOptionsGetInt(NULL, NULL, "-dgf_x", &simCtx->dgf_x, NULL); CHKERRQ(ierr);
299 ierr = PetscOptionsGetInt(NULL, NULL, "-dgf_az", &simCtx->dgf_az, NULL); CHKERRQ(ierr);
300 ierr = PetscOptionsGetInt(NULL, NULL, "-dgf_ay", &simCtx->dgf_ay, NULL); CHKERRQ(ierr);
301 ierr = PetscOptionsGetInt(NULL, NULL, "-dgf_ax", &simCtx->dgf_ax, NULL); CHKERRQ(ierr);
302 ierr = PetscOptionsGetString(NULL,NULL,"-analytical_type",simCtx->AnalyticalSolutionType,sizeof(simCtx->AnalyticalSolutionType),NULL);CHKERRQ(ierr);
303
304 // --- Group 4
305 LOG_ALLOW(GLOBAL,LOG_DEBUG, "Parsing Group 4: Specific Simulation Case Flags \n");
306 ierr = PetscOptionsGetInt(NULL, NULL, "-cop", &simCtx->cop, NULL); CHKERRQ(ierr);
307 ierr = PetscOptionsGetInt(NULL, NULL, "-fish", &simCtx->fish, NULL); CHKERRQ(ierr);
308 ierr = PetscOptionsGetInt(NULL, NULL, "-pizza", &simCtx->pizza, NULL); CHKERRQ(ierr);
309 ierr = PetscOptionsGetInt(NULL, NULL, "-turbine", &simCtx->turbine, NULL); CHKERRQ(ierr);
310 ierr = PetscOptionsGetInt(NULL, NULL, "-fishcyl", &simCtx->fishcyl, NULL); CHKERRQ(ierr);
311 ierr = PetscOptionsGetInt(NULL, NULL, "-eel", &simCtx->eel, NULL); CHKERRQ(ierr);
312 ierr = PetscOptionsGetInt(NULL, NULL, "-cstart", &simCtx->fish_c, NULL); CHKERRQ(ierr);
313 ierr = PetscOptionsGetInt(NULL, NULL, "-wing", &simCtx->wing, NULL); CHKERRQ(ierr);
314 ierr = PetscOptionsGetInt(NULL, NULL, "-mhv", &simCtx->MHV, NULL); CHKERRQ(ierr);
315 ierr = PetscOptionsGetInt(NULL, NULL, "-hydro", &simCtx->hydro, NULL); CHKERRQ(ierr);
316 ierr = PetscOptionsGetInt(NULL, NULL, "-lv", &simCtx->LV, NULL); CHKERRQ(ierr);
317 ierr = PetscOptionsGetInt(NULL, NULL, "-Pipe", &simCtx->Pipe, NULL); CHKERRQ(ierr);
318 ierr = PetscOptionsGetInt(NULL, NULL, "-Turbulent_Channel_z", &simCtx->channelz, NULL); CHKERRQ(ierr);
319 ierr = PetscOptionsGetReal(NULL,NULL,"-Turbulent_Channel_z_Driving_Force",&simCtx->drivingForceMagnitude,NULL);CHKERRQ(ierr);
320 ierr = PetscOptionsGetReal(NULL,NULL,"-Turbulent_Channel_z_Scaling_Factor",&simCtx->forceScalingFactor,NULL);CHKERRQ(ierr);
321 // --- Group 5
322 LOG_ALLOW(GLOBAL,LOG_DEBUG, "Parsing Group 5: Solver & Numerics Parameters \n");
323 char mom_solver_type_char[PETSC_MAX_PATH_LEN];
324 ierr = PetscOptionsGetString(NULL, NULL, "-mom_solver_type", &mom_solver_type_char,sizeof(mom_solver_type_char),NULL); CHKERRQ(ierr);
325 ierr = PetscOptionsGetInt(NULL, NULL, "-mom_max_pseudo_steps", &simCtx->mom_max_pseudo_steps, NULL); CHKERRQ(ierr);
326 ierr = PetscOptionsGetReal(NULL, NULL, "-mom_atol", &simCtx->mom_atol, NULL); CHKERRQ(ierr);
327 ierr = PetscOptionsGetReal(NULL, NULL, "-mom_rtol", &simCtx->mom_rtol, NULL); CHKERRQ(ierr);
328 ierr = PetscOptionsGetReal(NULL, NULL, "-imp_stol", &simCtx->imp_stol, NULL); CHKERRQ(ierr);
329 ierr = PetscOptionsGetInt(NULL, NULL, "-central", &simCtx->central, NULL); CHKERRQ(ierr);
330
331 // Map the string input to the enum type.
332 if(strcmp(mom_solver_type_char, "DUALTIME_PICARD_RK4") == 0) {
334 } else if (strcmp(mom_solver_type_char, "DUALTIME_NK_ARNOLDI") == 0) {
336 } else if (strcmp(mom_solver_type_char, "DUALTIME_NK_ANALYTICAL_JACOBIAN") == 0) {
338 } else if (strcmp(mom_solver_type_char, "EXPLICIT_RK") == 0) {
340 } else {
341 LOG(GLOBAL, LOG_ERROR, "Invalid value for -mom_solver_type: '%s'. Valid options are: 'DUALTIME_PICARD_RK4', 'DUALTIME_NK_ARNOLDI', 'DUALTIME_NK_ANALYTICAL_JACOBIAN', 'EXPLICIT_RK'.\n", mom_solver_type_char);
342 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "Invalid value for -mom_solver_type: '%s'.", mom_solver_type_char);
343 }
344
345 // --- Multigrid Options ---
346 ierr = PetscOptionsGetInt(NULL, NULL, "-mg_level", &simCtx->mglevels, NULL); CHKERRQ(ierr);
347 ierr = PetscOptionsGetInt(NULL, NULL, "-mg_max_it", &simCtx->mg_MAX_IT, NULL); CHKERRQ(ierr);
348 ierr = PetscOptionsGetInt(NULL, NULL, "-mg_idx", &simCtx->mg_idx, NULL); CHKERRQ(ierr);
349 ierr = PetscOptionsGetInt(NULL, NULL, "-mg_pre_it", &simCtx->mg_preItr, NULL); CHKERRQ(ierr);
350 ierr = PetscOptionsGetInt(NULL, NULL, "-mg_post_it", &simCtx->mg_poItr, NULL); CHKERRQ(ierr);
351
352 // --- Other Solver Options ---
353 ierr = PetscOptionsGetInt(NULL, NULL, "-poisson", &simCtx->poisson, NULL); CHKERRQ(ierr);
354 ierr = PetscOptionsGetReal(NULL, NULL, "-poisson_tol", &simCtx->poisson_tol, NULL); CHKERRQ(ierr);
355 ierr = PetscOptionsGetInt(NULL, NULL, "-str", &simCtx->STRONG_COUPLING, NULL); CHKERRQ(ierr);
356 ierr = PetscOptionsGetReal(NULL, NULL, "-ren", &simCtx->ren, NULL); CHKERRQ(ierr);
357 ierr = PetscOptionsGetReal(NULL, NULL, "-pseudo_cfl", &simCtx->pseudo_cfl, NULL); CHKERRQ(ierr);
358 ierr = PetscOptionsGetReal(NULL, NULL, "-max_pseudo_cfl", &simCtx->max_pseudo_cfl, NULL); CHKERRQ(ierr);
359 ierr = PetscOptionsGetReal(NULL, NULL, "-min_pseudo_cfl", &simCtx->min_pseudo_cfl, NULL); CHKERRQ(ierr);
360 ierr = PetscOptionsGetReal(NULL, NULL, "-pseudo_cfl_reduction_factor", &simCtx->pseudo_cfl_reduction_factor, NULL); CHKERRQ(ierr);
361 ierr = PetscOptionsGetReal(NULL, NULL, "-pseudo_cfl_growth_factor", &simCtx->pseudo_cfl_growth_factor, NULL); CHKERRQ(ierr);
362 ierr = PetscOptionsGetReal(NULL,NULL, "-mom_dt_rk4_residual_norm_noise_allowance_factor",&simCtx->mom_dt_rk4_residual_norm_noise_allowance_factor,NULL);CHKERRQ(ierr);
363 ierr = PetscOptionsGetInt(NULL, NULL, "-finit", &simCtx->FieldInitialization, NULL); CHKERRQ(ierr);
364 ierr = PetscOptionsGetReal(NULL, NULL, "-ucont_x", &simCtx->InitialConstantContra.x, NULL); CHKERRQ(ierr);
365 ierr = PetscOptionsGetReal(NULL, NULL, "-ucont_y", &simCtx->InitialConstantContra.y, NULL); CHKERRQ(ierr);
366 ierr = PetscOptionsGetReal(NULL, NULL, "-ucont_z", &simCtx->InitialConstantContra.z, NULL); CHKERRQ(ierr);
367 // NOTE: cdisx,cdisy,cdisz haven't been parsed, add if necessary.
368
369 // --- Group 6
370 LOG_ALLOW(GLOBAL,LOG_DEBUG, "Parsing Group 6: Physical & Geometric Parameters \n");
371 ierr = PetscOptionsGetReal(NULL,NULL,"-schmidt_number",&simCtx->schmidt_number,NULL);CHKERRQ(ierr);
372 ierr = PetscOptionsGetReal(NULL,NULL,"-turb_schmidt_number",&simCtx->Turbulent_schmidt_number,NULL);CHKERRQ(ierr);
373 ierr = PetscOptionsGetInt(NULL, NULL, "-no_of_bodies", &simCtx->NumberOfBodies, NULL); CHKERRQ(ierr);
374 ierr = PetscOptionsGetReal(NULL,NULL,"-wall_roughness",&simCtx->wall_roughness_height,NULL);CHKERRQ(ierr);
375 // NOTE: angle is not parsed in the original code, it set programmatically. We will follow that.
376 // NOTE: max_angle is calculated based on other flags (like MHV) in the legacy code.
377 // We will defer that logic to a later setup stage and not parse them directly.
378 // The Scaling Information is calculated here
379 ierr = ParseScalingInformation(simCtx); CHKERRQ(ierr);
380
381 // --- Group 7
382 LOG_ALLOW(GLOBAL,LOG_DEBUG, "Parsing Group 7: Grid, Domain, and Boundary Condition Settings \n");
383 ierr = PetscOptionsGetInt(NULL, NULL, "-nblk", &simCtx->block_number, NULL); CHKERRQ(ierr); // This is also a modern option
384 ierr = PetscOptionsGetInt(NULL, NULL, "-inlet", &simCtx->inletprofile, NULL); CHKERRQ(ierr);
385 ierr = PetscOptionsGetInt(NULL, NULL, "-Ogrid", &simCtx->Ogrid, NULL); CHKERRQ(ierr);
386 // NOTE: channelz was not parsed, likely set programmatically. We will omit its parsing call.
387 ierr = PetscOptionsGetInt(NULL, NULL, "-grid1d", &simCtx->grid1d, NULL); CHKERRQ(ierr);
388 ierr = PetscOptionsGetBool(NULL, NULL, "-grid", &simCtx->generate_grid, NULL); CHKERRQ(ierr);
389 ierr = PetscOptionsGetString(NULL, NULL, "-grid_file", simCtx->grid_file, PETSC_MAX_PATH_LEN, NULL); CHKERRQ(ierr);
390 ierr = PetscOptionsGetInt(NULL, NULL, "-da_processors_x", &simCtx->da_procs_x, NULL); CHKERRQ(ierr);
391 ierr = PetscOptionsGetInt(NULL, NULL, "-da_processors_y", &simCtx->da_procs_y, NULL); CHKERRQ(ierr);
392 ierr = PetscOptionsGetInt(NULL, NULL, "-da_processors_z", &simCtx->da_procs_z, NULL); CHKERRQ(ierr);
393 ierr = PetscOptionsGetInt(NULL, NULL, "-i_periodic", &simCtx->i_periodic, NULL); CHKERRQ(ierr);
394 ierr = PetscOptionsGetInt(NULL, NULL, "-j_periodic", &simCtx->j_periodic, NULL); CHKERRQ(ierr);
395 ierr = PetscOptionsGetInt(NULL, NULL, "-k_periodic", &simCtx->k_periodic, NULL); CHKERRQ(ierr);
396 ierr = PetscOptionsGetInt(NULL, NULL, "-pbc_domain", &simCtx->blkpbc, NULL); CHKERRQ(ierr);
397 // NOTE: pseudo_periodic was not parsed. We will omit its parsing call.
398 ierr = PetscOptionsGetReal(NULL, NULL, "-grid_rotation_angle", &simCtx->grid_rotation_angle, NULL); CHKERRQ(ierr);
399 ierr = PetscOptionsGetReal(NULL, NULL, "-Croty", &simCtx->Croty, NULL); CHKERRQ(ierr);
400 ierr = PetscOptionsGetReal(NULL, NULL, "-Crotz", &simCtx->Crotz, NULL); CHKERRQ(ierr);
401 PetscBool bcs_flg;
402 char file_list_str[PETSC_MAX_PATH_LEN * 10]; // Buffer for comma-separated list
403
404 ierr = PetscOptionsGetString(NULL, NULL, "-bcs_files", file_list_str, sizeof(file_list_str), &bcs_flg); CHKERRQ(ierr);
405 ierr = PetscOptionsGetReal(NULL, NULL, "-U_bc", &simCtx->U_bc, NULL); CHKERRQ(ierr);
406
407 if (bcs_flg) {
408 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Found -bcs_files option, overriding default.\n");
409
410 // A. Clean up the default memory we allocated in Phase 1.
411 ierr = PetscFree(simCtx->bcs_files[0]); CHKERRQ(ierr);
412 ierr = PetscFree(simCtx->bcs_files); CHKERRQ(ierr);
413 simCtx->num_bcs_files = 0;
414 simCtx->bcs_files = NULL;
415
416 // B. Parse the user-provided comma-separated list.
417 char *token;
418 char *str_copy;
419 ierr = PetscStrallocpy(file_list_str, &str_copy); CHKERRQ(ierr);
420
421 // First pass: count the number of files.
422 token = strtok(str_copy, ",");
423 while (token) {
424 simCtx->num_bcs_files++;
425 token = strtok(NULL, ",");
426 }
427 ierr = PetscFree(str_copy); CHKERRQ(ierr);
428
429 // Second pass: allocate memory and store the filenames.
430 ierr = PetscMalloc1(simCtx->num_bcs_files, &simCtx->bcs_files); CHKERRQ(ierr);
431 ierr = PetscStrallocpy(file_list_str, &str_copy); CHKERRQ(ierr);
432 token = strtok(str_copy, ",");
433 for (PetscInt i = 0; i < simCtx->num_bcs_files; i++) {
434 ierr = PetscStrallocpy(token, &simCtx->bcs_files[i]); CHKERRQ(ierr);
435 token = strtok(NULL, ",");
436 }
437 ierr = PetscFree(str_copy); CHKERRQ(ierr);
438 }
439
440
441 // --- Group 8
442 LOG_ALLOW(GLOBAL,LOG_DEBUG, "Parsing Group 8: Turbulence Modeling (LES/RANS) \n");
443 PetscInt temp_les_model;
444 ierr = PetscOptionsGetInt(NULL, NULL, "-les", &temp_les_model, NULL); CHKERRQ(ierr);
445 simCtx->les = (LESModelType)temp_les_model;
446 ierr = PetscOptionsGetInt(NULL, NULL, "-rans", &simCtx->rans, NULL); CHKERRQ(ierr);
447 ierr = PetscOptionsGetInt(NULL, NULL, "-wallfunction", &simCtx->wallfunction, NULL); CHKERRQ(ierr);
448 ierr = PetscOptionsGetInt(NULL, NULL, "-mixed", &simCtx->mixed, NULL); CHKERRQ(ierr);
449 ierr = PetscOptionsGetInt(NULL, NULL, "-clark", &simCtx->clark, NULL); CHKERRQ(ierr);
450 ierr = PetscOptionsGetInt(NULL, NULL, "-dynamic_freq", &simCtx->dynamic_freq, NULL); CHKERRQ(ierr);
451 ierr = PetscOptionsGetReal(NULL, NULL, "-max_cs", &simCtx->max_cs, NULL); CHKERRQ(ierr);
452 ierr = PetscOptionsGetReal(NULL, NULL, "-const_cs", &simCtx->Const_CS, NULL); CHKERRQ(ierr);
453 ierr = PetscOptionsGetInt(NULL, NULL, "-testfilter_ik", &simCtx->testfilter_ik, NULL); CHKERRQ(ierr);
454 ierr = PetscOptionsGetInt(NULL, NULL, "-testfilter_1d", &simCtx->testfilter_1d, NULL); CHKERRQ(ierr);
455 ierr = PetscOptionsGetInt(NULL, NULL, "-i_homo_filter", &simCtx->i_homo_filter, NULL); CHKERRQ(ierr);
456 ierr = PetscOptionsGetInt(NULL, NULL, "-j_homo_filter", &simCtx->j_homo_filter, NULL); CHKERRQ(ierr);
457 ierr = PetscOptionsGetInt(NULL, NULL, "-k_homo_filter", &simCtx->k_homo_filter, NULL); CHKERRQ(ierr);
458 ierr = PetscOptionsGetBool(NULL, NULL, "-averaging", &simCtx->averaging, NULL); CHKERRQ(ierr);
459
460 // --- Group 9
461 LOG_ALLOW(GLOBAL,LOG_DEBUG, "Parsing Group 9: Particle / DMSwarm Data & Settings \n");
462 ierr = PetscOptionsGetInt(NULL, NULL, "-numParticles", &simCtx->np, NULL); CHKERRQ(ierr);
463 ierr = PetscOptionsGetBool(NULL, NULL, "-read_fields", &simCtx->readFields, NULL); CHKERRQ(ierr);
464 PetscInt temp_pinit = (PetscInt)PARTICLE_INIT_SURFACE_RANDOM;
465 ierr = PetscOptionsGetInt(NULL, NULL, "-pinit", &temp_pinit, NULL); CHKERRQ(ierr);
467 ierr = PetscOptionsGetReal(NULL, NULL, "-psrc_x", &simCtx->psrc_x, NULL); CHKERRQ(ierr);
468 ierr = PetscOptionsGetReal(NULL, NULL, "-psrc_y", &simCtx->psrc_y, NULL); CHKERRQ(ierr);
469 ierr = PetscOptionsGetReal(NULL, NULL, "-psrc_z", &simCtx->psrc_z, NULL); CHKERRQ(ierr);
470 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Particle initialization mode: %s. Point source: (%.6f, %.6f, %.6f)\n",
472 simCtx->psrc_x, simCtx->psrc_y, simCtx->psrc_z);
473 ierr = PetscOptionsGetString(NULL,NULL,"-particle_restart_mode",simCtx->particleRestartMode,sizeof(simCtx->particleRestartMode),NULL); CHKERRQ(ierr);
474 // Validation for Particle Restart Mode
475 if (strcmp(simCtx->particleRestartMode, "load") != 0 && strcmp(simCtx->particleRestartMode, "init") != 0) {
476 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "Invalid value for -particle_restart_mode. Must be 'load' or 'init'. You provided '%s'.", simCtx->particleRestartMode);
477 }
478 ierr = InitializeBrownianRNG(simCtx); CHKERRQ(ierr);
479 // --- Group 10
480 LOG_ALLOW(GLOBAL,LOG_DEBUG, "Parsing Group 10: Immersed Boundary & FSI Data Object Pointers \n");
481 ierr = PetscOptionsGetBool(NULL, NULL, "-rs_fsi", &simCtx->rstart_fsi, NULL); CHKERRQ(ierr);
482 ierr = PetscOptionsGetInt(NULL, NULL, "-duplicate", &simCtx->duplicate, NULL); CHKERRQ(ierr);
483
484 // --- Group 11
485 LOG_ALLOW(GLOBAL,LOG_DEBUG, "Parsing Group 11: Top-Level Managers & Custom Configuration \n");
486 ierr = PetscOptionsGetInt(NULL, NULL, "-logfreq", &simCtx->LoggingFrequency, NULL); CHKERRQ(ierr);
487
488 if (simCtx->num_bcs_files != simCtx->block_number) {
489 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_INCOMP, "Number of BC files (%d) does not match number of blocks (%d). Use -bcs_files \"file1.dat,file2.dat,...\".", simCtx->num_bcs_files, simCtx->block_number);
490 }
491
492 // --- Group 12
493 LOG_ALLOW(GLOBAL,LOG_DEBUG, "Parsing Group 12: Post-Processing Information.\n");
494 // This logic determines the Post Processing configuration and STORES it in simCtx for later reference and cleanup.
495 ierr = PetscOptionsGetString(NULL,NULL,"-postprocessing_config_file",simCtx->PostprocessingControlFile,PETSC_MAX_PATH_LEN,NULL); CHKERRQ(ierr);
496 ierr = PetscNew(&simCtx->pps); CHKERRQ(ierr);
497 ierr = ParsePostProcessingSettings(simCtx);
498
499 // === 5. Dependent Parameter Calculations ================================
500 // Some parameters depend on others, so we calculate them here.
501 simCtx->StartTime = (PetscReal)simCtx->StartStep*simCtx->dt;
502 simCtx->ti = simCtx->StartTime;
503 simCtx->step = simCtx->StartStep;
504
505 // === 5. Log Summary and Finalize Setup ==================================
506 LOG_ALLOW(GLOBAL, LOG_DEBUG, "-- Console Output Functions [Total : %d] : --\n", simCtx->nAllowed);
507 for (PetscInt i = 0; i < simCtx->nAllowed; ++i) {
508 LOG_ALLOW(GLOBAL, LOG_DEBUG, " [%2d] «%s»\n", i, simCtx->allowedFuncs[i]);
509 }
510
511 LOG_ALLOW(GLOBAL, LOG_INFO, "Configuration complete. Key parameters:\n");
512 LOG_ALLOW(GLOBAL, LOG_INFO, " - Run mode: %s\n", simCtx->OnlySetup ? "SETUP ONLY" : "Full Simulation");
513 LOG_ALLOW(GLOBAL, LOG_INFO, " - Time steps: %d (from %d to %d)\n", simCtx->StepsToRun, simCtx->StartStep, simCtx->StartStep + simCtx->StepsToRun);
514 LOG_ALLOW(GLOBAL, LOG_INFO, " - Time step size (dt): %g\n", simCtx->dt);
515 LOG_ALLOW(GLOBAL, LOG_INFO, " - Immersed Boundary: %s\n", simCtx->immersed ? "ENABLED" : "DISABLED");
516 LOG_ALLOW(GLOBAL, LOG_INFO, " - Particles: %d\n", simCtx->np);
517 if (simCtx->StartStep > 0 && simCtx->np > 0) {
518 LOG_ALLOW(GLOBAL, LOG_INFO, " - Particle Restart Mode: %s\n", simCtx->particleRestartMode);
519 }
520
521 // --- Initialize PETSc's internal performance logging stage ---
522 ierr = PetscLogDefaultBegin(); CHKERRQ(ierr); // REDUNDANT but safe.
523
524 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Finished CreateSimulationContext successfully on rank %d.\n", simCtx->rank);
525
527 PetscFunctionReturn(0);
528}
PetscErrorCode ParsePostProcessingSettings(SimCtx *simCtx)
Initializes post-processing settings from a config file and command-line overrides.
Definition io.c:2458
PetscErrorCode ParseScalingInformation(SimCtx *simCtx)
Parses physical scaling parameters from command-line options.
Definition io.c:2593
void set_allowed_functions(const char **functionList, int count)
Sets the global list of function names that are allowed to log.
Definition logging.c:122
#define LOG_ALLOW_SYNC(scope, level, fmt,...)
----— DEBUG ---------------------------------------— #define LOG_ALLOW(scope, level,...
Definition logging.h:267
#define LOCAL
Logging scope definitions for controlling message output.
Definition logging.h:45
#define GLOBAL
Scope for global logging across all processes.
Definition logging.h:46
#define LOG_ALLOW(scope, level, fmt,...)
Logging macro that checks both the log level and whether the calling function is in the allowed-funct...
Definition logging.h:200
PetscErrorCode print_log_level(void)
Prints the current logging level to the console.
Definition logging.c:82
#define PROFILE_FUNCTION_END
Marks the end of a profiled code block.
Definition logging.h:746
#define LOG(scope, level, fmt,...)
Logging macro for PETSc-based applications with scope control.
Definition logging.h:84
PetscErrorCode LoadAllowedFunctionsFromFile(const char filename[], char ***funcsOut, PetscInt *nOut)
Load function names from a text file.
Definition logging.c:566
PetscErrorCode ProfilingInitialize(SimCtx *simCtx)
Initializes the custom profiling system using configuration from SimCtx.
Definition logging.c:1015
@ LOG_ERROR
Critical errors that may halt the program.
Definition logging.h:28
@ LOG_INFO
Informational messages about program execution.
Definition logging.h:31
@ LOG_DEBUG
Detailed debugging information.
Definition logging.h:32
#define PROFILE_FUNCTION_BEGIN
Marks the beginning of a profiled code block (typically a function).
Definition logging.h:737
const char * ParticleInitializationToString(ParticleInitializationType ParticleInitialization)
Helper function to convert ParticleInitialization to a string representation.
Definition logging.c:675
PetscErrorCode InitializeBrownianRNG(SimCtx *simCtx)
Initializes a single master RNG for time-stepping physics (Brownian motion).
Definition setup.c:2740
LESModelType
Identifies the six logical faces of a structured computational block.
Definition variables.h:447
@ NO_LES_MODEL
Definition variables.h:448
PetscInt MHV
Definition variables.h:620
PetscInt turbine
Definition variables.h:620
PetscInt fishcyl
Definition variables.h:620
PetscInt clark
Definition variables.h:670
PetscInt movefsi
Definition variables.h:614
PetscInt moveframe
Definition variables.h:615
PetscInt TwoD
Definition variables.h:615
PetscInt pseudo_periodic
Definition variables.h:650
PetscInt fish_c
Definition variables.h:620
PetscInt dgf_z
Definition variables.h:616
PetscReal poisson_tol
Definition variables.h:629
PetscReal schmidt_number
Definition variables.h:646
PetscMPIInt rank
Definition variables.h:588
PetscInt fish
Definition variables.h:620
PetscInt LV
Definition variables.h:620
PetscReal angle
Definition variables.h:641
PetscReal Turbulent_schmidt_number
Definition variables.h:646
PetscInt thin
Definition variables.h:615
PetscInt grid1d
Definition variables.h:649
PetscInt block_number
Definition variables.h:649
PetscReal mom_rtol
Definition variables.h:626
PetscInt da_procs_z
Definition variables.h:655
PetscInt blkpbc
Definition variables.h:650
PetscInt sediment
Definition variables.h:614
PetscReal targetVolumetricFlux
Definition variables.h:661
PetscInt channelz
Definition variables.h:621
char euler_subdir[PETSC_MAX_PATH_LEN]
Definition variables.h:607
PetscReal forceScalingFactor
Definition variables.h:660
PetscReal pseudo_cfl_reduction_factor
Definition variables.h:633
PetscInt rans
Definition variables.h:669
ParticleInitializationType
Enumerator to identify the particle initialization strategy.
Definition variables.h:465
@ PARTICLE_INIT_SURFACE_RANDOM
Random placement on the inlet face.
Definition variables.h:466
PetscReal StartTime
Definition variables.h:598
PetscInt dgf_az
Definition variables.h:616
PetscReal FluxOutSum
Definition variables.h:658
PetscReal CMy_c
Definition variables.h:642
char ** bcs_files
Definition variables.h:657
PetscReal max_angle
Definition variables.h:641
PetscReal min_pseudo_cfl
Definition variables.h:634
PetscInt particlesLostLastStep
Definition variables.h:682
PetscInt duplicate
Definition variables.h:695
PetscInt tiout
Definition variables.h:597
char allowedFile[PETSC_MAX_PATH_LEN]
Definition variables.h:699
PetscInt da_procs_y
Definition variables.h:655
PetscInt testfilter_1d
Definition variables.h:672
PetscReal psrc_x
Definition variables.h:643
PetscInt FieldInitialization
Definition variables.h:636
PetscReal ren
Definition variables.h:632
PetscReal Crotz
Definition variables.h:653
PetscInt mixed
Definition variables.h:670
IBMVNodes * ibmv
Definition variables.h:692
char criticalFuncsFile[PETSC_MAX_PATH_LEN]
Definition variables.h:708
char output_dir[PETSC_MAX_PATH_LEN]
Definition variables.h:606
PetscReal dt
Definition variables.h:599
PetscInt occupiedCellCount
Definition variables.h:685
PetscInt StepsToRun
Definition variables.h:596
PetscInt k_periodic
Definition variables.h:650
PetscInt inletprofile
Definition variables.h:649
PetscReal cdisy
Definition variables.h:632
PetscReal mom_atol
Definition variables.h:626
char ** criticalFuncs
Definition variables.h:710
PetscBool rstart_fsi
Definition variables.h:694
PetscInt np
Definition variables.h:676
PetscBool averaging
Definition variables.h:673
PetscReal C_IEM
Definition variables.h:688
PetscInt ccc
Definition variables.h:665
PetscReal ratio
Definition variables.h:666
PetscInt mg_idx
Definition variables.h:627
PetscInt StartStep
Definition variables.h:595
PetscInt mg_MAX_IT
Definition variables.h:627
PetscBool OnlySetup
Definition variables.h:600
PetscInt rotatefsi
Definition variables.h:614
@ MOMENTUM_SOLVER_DUALTIME_NK_ARNOLDI
Definition variables.h:460
@ MOMENTUM_SOLVER_EXPLICIT_RK
Definition variables.h:458
@ MOMENTUM_SOLVER_DUALTIME_PICARD_RK4
Definition variables.h:459
@ MOMENTUM_SOLVER_DUALTIME_NK_ANALYTIC_JACOBIAN
Definition variables.h:461
PetscReal cdisz
Definition variables.h:632
PetscScalar x
Definition variables.h:101
PetscInt dgf_x
Definition variables.h:616
char * current_io_directory
Definition variables.h:611
PetscInt pizza
Definition variables.h:620
PetscReal MaxDiv
Definition variables.h:705
char grid_file[PETSC_MAX_PATH_LEN]
Definition variables.h:654
PetscReal max_cs
Definition variables.h:671
PetscInt invicid
Definition variables.h:615
char ** allowedFuncs
Definition variables.h:701
PetscReal psrc_z
Point source location for PARTICLE_INIT_POINT_SOURCE.
Definition variables.h:643
PetscInt mg_poItr
Definition variables.h:627
PetscInt STRONG_COUPLING
Definition variables.h:630
PetscReal max_pseudo_cfl
Definition variables.h:634
PetscInt MaxDivx
Definition variables.h:706
PetscInt poisson
Definition variables.h:628
PetscInt k_homo_filter
Definition variables.h:672
PetscInt MaxDivy
Definition variables.h:706
PetscInt NumberOfBodies
Definition variables.h:640
char particleRestartMode[16]
Definition variables.h:681
PetscInt Ogrid
Definition variables.h:649
char particle_subdir[PETSC_MAX_PATH_LEN]
Definition variables.h:608
PetscInt MaxDivz
Definition variables.h:706
BoundingBox * bboxlist
Definition variables.h:679
PetscInt j_homo_filter
Definition variables.h:672
PetscInt eel
Definition variables.h:620
char log_dir[PETSC_MAX_PATH_LEN]
Definition variables.h:609
PetscInt MaxDivFlatArg
Definition variables.h:706
PetscReal FluxInSum
Definition variables.h:658
PetscReal CMz_c
Definition variables.h:642
PetscBool generate_grid
Definition variables.h:651
char eulerianSource[PETSC_MAX_PATH_LEN]
Definition variables.h:604
PetscReal imp_stol
Definition variables.h:626
PetscInt nAllowed
Definition variables.h:702
PetscReal wall_roughness_height
Definition variables.h:645
ParticleInitializationType ParticleInitialization
Definition variables.h:680
PetscScalar z
Definition variables.h:101
PetscInt OutputFreq
Definition variables.h:602
PetscReal Const_CS
Definition variables.h:671
PetscInt i_homo_filter
Definition variables.h:672
PetscInt wallfunction
Definition variables.h:670
PetscInt rheology
Definition variables.h:614
PetscReal Flux_in
Definition variables.h:641
PetscReal cdisx
Definition variables.h:632
PetscInt dgf_ax
Definition variables.h:616
PetscInt mglevels
Definition variables.h:627
PetscInt num_bcs_files
Definition variables.h:656
DM dm_swarm
Definition variables.h:678
PetscBool useCfg
Definition variables.h:700
PetscReal psrc_y
Definition variables.h:643
PetscBool readFields
Definition variables.h:677
PetscInt central
Definition variables.h:630
PetscBool useCriticalFuncsCfg
Definition variables.h:709
PetscReal Fluxsum
Definition variables.h:658
PetscReal pseudo_cfl_growth_factor
Definition variables.h:633
PetscReal Croty
Definition variables.h:653
PetscReal mom_dt_rk4_residual_norm_noise_allowance_factor
Definition variables.h:635
PetscInt particlesMigratedLastStep
Definition variables.h:684
PetscReal grid_rotation_angle
Definition variables.h:652
PetscInt dynamic_freq
Definition variables.h:670
char AnalyticalSolutionType[PETSC_MAX_PATH_LEN]
Definition variables.h:617
PetscInt da_procs_x
Definition variables.h:655
PetscReal U_bc
Definition variables.h:664
Cmpnts InitialConstantContra
Definition variables.h:637
PetscInt i_periodic
Definition variables.h:650
PetscInt step
Definition variables.h:593
PetscReal AreaOutSum
Definition variables.h:663
PetscInt dgf_ay
Definition variables.h:616
PetscInt mom_max_pseudo_steps
Definition variables.h:625
PetscRandom BrownianMotionRNG
Definition variables.h:687
PetscInt testfilter_ik
Definition variables.h:672
PetscInt hydro
Definition variables.h:620
PostProcessParams * pps
Definition variables.h:715
PetscInt migrationPassesLastStep
Definition variables.h:683
PetscScalar y
Definition variables.h:101
PetscMPIInt size
Definition variables.h:589
char _io_context_buffer[PETSC_MAX_PATH_LEN]
Definition variables.h:610
PetscInt les
Definition variables.h:669
PetscInt mg_preItr
Definition variables.h:627
PetscViewer logviewer
Definition variables.h:601
PetscInt cop
Definition variables.h:620
PetscReal ti
Definition variables.h:594
PetscInt Pipe
Definition variables.h:620
PetscInt rotateframe
Definition variables.h:615
IBMNodes * ibm
Definition variables.h:691
PetscReal AreaInSum
Definition variables.h:663
MomentumSolverType mom_solver_type
Definition variables.h:624
PetscInt nCriticalFuncs
Definition variables.h:711
PetscReal summationRHS
Definition variables.h:704
PetscInt immersed
Definition variables.h:614
char PostprocessingControlFile[PETSC_MAX_PATH_LEN]
Definition variables.h:714
char restart_dir[PETSC_MAX_PATH_LEN]
Definition variables.h:605
PetscInt blank
Definition variables.h:615
PetscInt dgf_y
Definition variables.h:616
PetscReal pseudo_cfl
Definition variables.h:632
PetscInt LoggingFrequency
Definition variables.h:703
PetscReal CMx_c
Definition variables.h:642
PetscReal drivingForceMagnitude
Definition variables.h:660
PetscReal particleLoadImbalance
Definition variables.h:686
PetscInt j_periodic
Definition variables.h:650
PetscInt wing
Definition variables.h:620
FSInfo * fsi
Definition variables.h:693
The master context for the entire simulation.
Definition variables.h:585
Here is the call graph for this function:
Here is the caller graph for this function:

◆ PetscMkdirRecursive()

static PetscErrorCode PetscMkdirRecursive ( const char *  path)
static

Creates a directory path recursively, similar to mkdir -p.

This function is designed to be called by Rank 0 only. It takes a full path, parses it, and creates each directory component in sequence.

Parameters
[in]pathThe full directory path to create.
Returns
PetscErrorCode

Definition at line 542 of file setup.c.

543{
544 PetscErrorCode ierr;
545 char tmp_path[PETSC_MAX_PATH_LEN];
546 char *p = NULL;
547 size_t len;
548 PetscBool exists;
549
550 PetscFunctionBeginUser;
551
552 // Create a mutable copy of the path
553 len = strlen(path);
554 if (len >= sizeof(tmp_path)) {
555 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Path is too long to process: %s", path);
556 }
557 strcpy(tmp_path, path);
558
559 // If the path ends with a separator, remove it
560 if (tmp_path[len - 1] == '/') {
561 tmp_path[len - 1] = 0;
562 }
563
564 // Iterate through the path, creating each directory level
565 for (p = tmp_path + 1; *p; p++) {
566 if (*p == '/') {
567 *p = 0; // Temporarily terminate the string
568
569 // Check if this directory level exists
570 ierr = PetscTestDirectory(tmp_path, 'r', &exists); CHKERRQ(ierr);
571 if (!exists) {
572 ierr = PetscMkdir(tmp_path); CHKERRQ(ierr);
573 }
574
575 *p = '/'; // Restore the separator
576 }
577 }
578
579 // Create the final, full directory path
580 ierr = PetscTestDirectory(tmp_path, 'r', &exists); CHKERRQ(ierr);
581 if (!exists) {
582 ierr = PetscMkdir(tmp_path); CHKERRQ(ierr);
583 }
584
585 PetscFunctionReturn(0);
586}
Here is the caller graph for this function:

◆ SetupSimulationEnvironment()

PetscErrorCode SetupSimulationEnvironment ( SimCtx simCtx)

Verifies and prepares the complete I/O environment for a simulation run.

This function performs a comprehensive series of checks and setup actions to ensure a valid and clean environment. It is parallel-safe; all filesystem operations and checks are performed by Rank 0, with collective error handling.

The function's responsibilities include:

  1. Checking Mandatory Inputs: Verifies existence of grid and BCs files.
  2. Checking Optional Inputs: Warns if optional config files (whitelist, profile) are missing.
  3. Validating Run Mode Paths: Ensures restart_dir or post-processing source directories exist when needed.
  4. Preparing Log Directory: Creates the log directory and cleans it of previous logs.
  5. Preparing Output Directories: Creates the main output directory and its required subdirectories.
Parameters
[in]simCtxThe fully configured SimulationContext object.
Returns
PetscErrorCode Returns 0 on success, or a non-zero error code if a mandatory file/directory is missing or a critical operation fails.

Definition at line 609 of file setup.c.

610{
611 PetscErrorCode ierr;
612 PetscMPIInt rank;
613 PetscBool exists;
614
615 PetscFunctionBeginUser;
616 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
617
618 LOG_ALLOW(GLOBAL, LOG_INFO, "--- Setting up simulation environment ---\n");
619
620 /* =====================================================================
621 * Phase 1: Check for all required and optional INPUT files.
622 * ===================================================================== */
623 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Phase 1: Verifying input files...\n");
624
625 // --- Mandatory Inputs ---
626 if (!simCtx->generate_grid) {
627 ierr = VerifyPathExistence(simCtx->grid_file, PETSC_FALSE, PETSC_FALSE, "Grid file", &exists); CHKERRQ(ierr);
628 }
629 for (PetscInt i = 0; i < simCtx->num_bcs_files; i++) {
630 char desc[128];
631 ierr = PetscSNPrintf(desc, sizeof(desc), "BCS file #%d", i + 1); CHKERRQ(ierr);
632 ierr = VerifyPathExistence(simCtx->bcs_files[i], PETSC_FALSE, PETSC_FALSE, desc, &exists); CHKERRQ(ierr);
633 }
634
635 // --- Optional Inputs (these produce warnings if missing) ---
636 if (simCtx->useCfg) {
637 ierr = VerifyPathExistence(simCtx->allowedFile, PETSC_FALSE, PETSC_TRUE, "Whitelist config file", &exists); CHKERRQ(ierr);
638 }
639 if (simCtx->useCriticalFuncsCfg) {
640 ierr = VerifyPathExistence(simCtx->criticalFuncsFile, PETSC_FALSE, PETSC_TRUE, "Profiling config file", &exists); CHKERRQ(ierr);
641 }
642 if (simCtx->exec_mode == EXEC_MODE_POSTPROCESSOR) {
643 ierr = VerifyPathExistence(simCtx->PostprocessingControlFile, PETSC_FALSE, PETSC_TRUE, "Post-processing control file", &exists); CHKERRQ(ierr);
644 }
645
646
647 /* =====================================================================
648 * Phase 2: Validate directories specific to the execution mode.
649 * ===================================================================== */
650 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Phase 2: Verifying execution mode directories...\n");
651 // The data source directory must exist if we intend to load any data from it.
652 // This is true if:
653 // 1. We are restarting from a previous time step (StartStep > 0), which implies
654 // loading Eulerian fields and/or particle fields.
655 // 2. We are starting from t=0 but are explicitly told to load the initial
656 // Eulerian fields from a file (eulerianSource == "load").
657 if (simCtx->StartStep > 0 || strcmp(simCtx->eulerianSource,"load")== 0){ // If this is a restart run
658 ierr = VerifyPathExistence(simCtx->restart_dir, PETSC_TRUE, PETSC_FALSE, "Restart source directory", &exists); CHKERRQ(ierr);
659 }
660 if (simCtx->exec_mode == EXEC_MODE_POSTPROCESSOR) {
661 ierr = VerifyPathExistence(simCtx->pps->source_dir, PETSC_TRUE, PETSC_FALSE, "Post-processing source directory", &exists); CHKERRQ(ierr);
662 }
663
664 /* =====================================================================
665 * Phase 3: Create and prepare all OUTPUT directories.
666 * ===================================================================== */
667 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Phase 3: Preparing output directories...\n");
668
669 if (rank == 0){
670 if(simCtx->exec_mode == EXEC_MODE_SOLVER){
671 // --- Prepare Log Directory ---
672 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Creating/cleaning log directory: %s\n", simCtx->log_dir);
673 ierr = PetscRMTree(simCtx->log_dir); // Wipes the directory and its contents
674 if (ierr) { /* Ignore file-not-found error, but fail on others */
675 PetscError(PETSC_COMM_SELF, __LINE__, __FUNCT__, __FILE__, ierr, PETSC_ERROR_INITIAL, "Could not remove existing log directory '%s'. Check permissions.", simCtx->log_dir);
676 }
677 ierr = PetscMkdir(simCtx->log_dir); CHKERRQ(ierr);
678
679 // --- Prepare Output Directories ---
680 char path_buffer[PETSC_MAX_PATH_LEN];
681
682 // 1. Check/Create the main output directory
683 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Verifying main output directory: %s\n", simCtx->output_dir);
684 ierr = PetscTestDirectory(simCtx->output_dir, 'r', &exists); CHKERRQ(ierr);
685 if (!exists) {
686 LOG_ALLOW(GLOBAL, LOG_INFO, "Output directory not found. Creating: %s\n", simCtx->output_dir);
687 ierr = PetscMkdir(simCtx->output_dir); CHKERRQ(ierr);
688 }
689
690 // 2. Check/Create the Eulerian subdirectory
691 ierr = PetscSNPrintf(path_buffer, sizeof(path_buffer), "%s/%s", simCtx->output_dir, simCtx->euler_subdir); CHKERRQ(ierr);
692 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Verifying Eulerian subdirectory: %s\n", path_buffer);
693 ierr = PetscTestDirectory(path_buffer, 'r', &exists); CHKERRQ(ierr);
694 if (!exists) {
695 LOG_ALLOW(GLOBAL, LOG_INFO, "Eulerian subdirectory not found. Creating: %s\n", path_buffer);
696 ierr = PetscMkdir(path_buffer); CHKERRQ(ierr);
697 }
698
699 // 3. Check/Create the Particle subdirectory if needed
700 if (simCtx->np > 0) {
701 ierr = PetscSNPrintf(path_buffer, sizeof(path_buffer), "%s/%s", simCtx->output_dir, simCtx->particle_subdir); CHKERRQ(ierr);
702 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Verifying Particle subdirectory: %s\n", path_buffer);
703 ierr = PetscTestDirectory(path_buffer, 'r', &exists); CHKERRQ(ierr);
704 if (!exists) {
705 LOG_ALLOW(GLOBAL, LOG_INFO, "Particle subdirectory not found. Creating: %s\n", path_buffer);
706 ierr = PetscMkdir(path_buffer); CHKERRQ(ierr);
707 }
708 }
709 } else if(simCtx->exec_mode == EXEC_MODE_POSTPROCESSOR){
710 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Preparing post-processing output directories ...\n");
711
712 PostProcessParams *pps = simCtx->pps;
713 char path_buffer[PETSC_MAX_PATH_LEN];
714
715 const char *last_slash_euler = strrchr(pps->output_prefix, '/');
716 if(last_slash_euler){
717 size_t dir_len = last_slash_euler - pps->output_prefix;
718 if(dir_len > 0){
719 if(dir_len >= sizeof(path_buffer)) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Post-processing output prefix path is too long.");
720 strncpy(path_buffer, pps->output_prefix, dir_len);
721 path_buffer[dir_len] = '\0';
722
723 ierr = PetscTestDirectory(path_buffer, 'r', &exists); CHKERRQ(ierr);
724 if (!exists){
725 LOG_ALLOW(GLOBAL, LOG_INFO, "Creating post-processing Eulerian output directory: %s\n", path_buffer);
726 ierr = PetscMkdirRecursive(path_buffer); CHKERRQ(ierr);
727 }
728 }
729 }
730
731 // Particle output directory
732 if(pps->outputParticles){
733 const char *last_slash_particle = strrchr(pps->particle_output_prefix, '/');
734 if(last_slash_particle){
735 size_t dir_len = last_slash_particle - pps->particle_output_prefix;
736 if(dir_len > 0){
737 if(dir_len > sizeof(path_buffer)) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Post-processing particle output prefix path is too long.");
738 strncpy(path_buffer, pps->particle_output_prefix, dir_len);
739 path_buffer[dir_len] = '\0';
740
741 ierr = PetscTestDirectory(path_buffer, 'r', &exists); CHKERRQ(ierr);
742
743 if (!exists){
744 LOG_ALLOW(GLOBAL, LOG_INFO, "Creating post-processing Particle output directory: %s\n", path_buffer);
745 ierr = PetscMkdirRecursive(path_buffer); CHKERRQ(ierr);
746 }
747 }
748 }
749 }
750 }
751 }
752
753 // Synchronize all processes before proceeding
754 ierr = MPI_Barrier(PETSC_COMM_WORLD); CHKERRMPI(ierr);
755
756 LOG_ALLOW(GLOBAL, LOG_INFO, "--- Environment setup complete ---\n");
757
758 PetscFunctionReturn(0);
759}
PetscErrorCode VerifyPathExistence(const char *path, PetscBool is_dir, PetscBool is_optional, const char *description, PetscBool *exists)
A parallel-safe helper to verify the existence of a generic file or directory path.
Definition io.c:762
static PetscErrorCode PetscMkdirRecursive(const char *path)
Creates a directory path recursively, similar to mkdir -p.
Definition setup.c:542
#define __FUNCT__
Definition setup.c:10
char particle_output_prefix[256]
Definition variables.h:516
char output_prefix[256]
Definition variables.h:513
char source_dir[PETSC_MAX_PATH_LEN]
Definition variables.h:501
PetscBool outputParticles
Definition variables.h:507
@ EXEC_MODE_SOLVER
Definition variables.h:558
@ EXEC_MODE_POSTPROCESSOR
Definition variables.h:559
ExecutionMode exec_mode
Definition variables.h:603
Holds all configuration parameters for a post-processing run.
Definition variables.h:499
Here is the call graph for this function:
Here is the caller graph for this function:

◆ AllocateContextHierarchy()

static PetscErrorCode AllocateContextHierarchy ( SimCtx simCtx)
static

Allocates the memory for the UserMG and UserCtx hierarchy.

This function performs the foundational memory allocation for the solver's data structures. It reads the number of multigrid levels and grid blocks from the master SimulationContext, then allocates the arrays for MGCtx and UserCtx.

It performs three critical tasks:

  1. Allocates the MGCtx array within the UserMG struct.
  2. For each level, allocates the UserCtx array for all blocks.
  3. Sets the simCtx back-pointer in every single UserCtx, linking it to the master configuration.
Parameters
simCtxThe master SimulationContext, which contains configuration and will store the resulting allocated hierarchy in its usermg member.
Returns
PetscErrorCode

Definition at line 781 of file setup.c.

782{
783 PetscErrorCode ierr;
784 UserMG *usermg = &simCtx->usermg;
785 MGCtx *mgctx;
786 PetscInt nblk = simCtx->block_number;
787 PetscBool found;
788 PetscFunctionBeginUser;
790
791 LOG_ALLOW(GLOBAL, LOG_INFO, "Allocating context hierarchy for %d levels and %d blocks...\n", simCtx->mglevels, nblk);
792
793 // Store the number of levels in the UserMG struct itself
794 usermg->mglevels = simCtx->mglevels;
795
796 // --- 1. Allocate the array of MGCtx structs ---
797 ierr = PetscMalloc(usermg->mglevels * sizeof(MGCtx), &usermg->mgctx); CHKERRQ(ierr);
798 // Zero-initialize to ensure all pointers (especially packer) are NULL
799 ierr = PetscMemzero(usermg->mgctx, usermg->mglevels * sizeof(MGCtx)); CHKERRQ(ierr);
800 mgctx = usermg->mgctx;
801 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Allocated MGCtx array of size %d.\n", simCtx->rank, usermg->mglevels);
802
803 // --- 2. Parse semi-coarsening options (logic from MG_Initial) ---
804 // These flags determine if a dimension is coarsened in the multigrid hierarchy.
805 PetscInt *isc, *jsc, *ksc;
806 ierr = PetscMalloc3(nblk, &isc, nblk, &jsc, nblk, &ksc); CHKERRQ(ierr);
807 // Set defaults to FALSE (full coarsening)
808 for (PetscInt i = 0; i < nblk; ++i) {
809 isc[i] = 0; jsc[i] = 0; ksc[i] = 0;
810 }
811
812// Use a temporary variable for the 'count' argument to the parsing function.
813 // This protects the original 'nblk' which is needed for the loop bounds.
814 PetscInt n_opts_found = nblk;
815 ierr = PetscOptionsGetIntArray(NULL, NULL, "-mg_i_semi", isc, &n_opts_found, &found); CHKERRQ(ierr);
816
817 n_opts_found = nblk; // Reset the temp variable before the next call
818 ierr = PetscOptionsGetIntArray(NULL, NULL, "-mg_j_semi", jsc, &n_opts_found, &found); CHKERRQ(ierr);
819
820 n_opts_found = nblk; // Reset the temp variable before the next call
821 ierr = PetscOptionsGetIntArray(NULL, NULL, "-mg_k_semi", ksc, &n_opts_found, &found); CHKERRQ(ierr);
822
823 // --- 3. Loop over levels and blocks to allocate UserCtx arrays ---
824 for (PetscInt level = 0; level < simCtx->mglevels; level++) {
825
826 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Setting up MG Level %d...\n", simCtx->rank, level);
827 // Allocate the array of UserCtx structs for this level
828 ierr = PetscMalloc(nblk * sizeof(UserCtx), &mgctx[level].user); CHKERRQ(ierr);
829 // It's good practice to zero out the memory to avoid uninitialized values
830 ierr = PetscMemzero(mgctx[level].user, nblk * sizeof(UserCtx)); CHKERRQ(ierr);
831 mgctx[level].thislevel = level;
832
833 for (PetscInt bi = 0; bi < nblk; bi++) {
834 UserCtx *currentUser = &mgctx[level].user[bi];
835 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Initializing UserCtx for Level %d, Block %d.\n", simCtx->rank, level, bi);
836
837 // --- CRITICAL STEP: Set the back-pointer to the master context ---
838 currentUser->simCtx = simCtx;
839
840 // Initialize other per-context values
841 currentUser->thislevel = level;
842 currentUser->_this = bi; //
843 currentUser->mglevels = usermg->mglevels;
844
845 // Assign semi-coarsening flags
846 currentUser->isc = isc[bi];
847 currentUser->jsc = jsc[bi];
848 currentUser->ksc = ksc[bi];
849
850 // Link to finer/coarser contexts for multigrid operations
851 if (level > 0) {
852 currentUser->user_c = &mgctx[level-1].user[bi];
853 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "Rank %d: -> Linked to coarser context (user_c).\n", simCtx->rank);
854 }
855 if (level < usermg->mglevels - 1) {
856 currentUser->user_f = &mgctx[level+1].user[bi];
857 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "Rank %d: -> Linked to finer context (user_f).\n", simCtx->rank);
858 }
859 }
860 }
861
862 // Log a summary of the parsed flags on each rank.
863 if (get_log_level() >= LOG_DEBUG && nblk > 0) {
864 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Final semi-coarsening configuration view:\n", simCtx->rank);
865 for (PetscInt bi = 0; bi < nblk; ++bi) {
866 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Block %d: i-semi=%d, j-semi=%d, k-semi=%d\n", simCtx->rank, bi, isc[bi], jsc[bi], ksc[bi]);
867 }
868 }
869
870 // Clean up temporary arrays
871 ierr = PetscFree3(isc, jsc, ksc); CHKERRQ(ierr);
872
873 LOG_ALLOW(GLOBAL, LOG_INFO, "Context hierarchy allocation complete.\n");
875 PetscFunctionReturn(0);
876}
LogLevel get_log_level()
Retrieves the current logging level from the environment variable LOG_LEVEL.
Definition logging.c:39
PetscInt isc
Definition variables.h:741
UserCtx * user
Definition variables.h:474
PetscInt mglevels
Definition variables.h:792
UserCtx * user_f
Definition variables.h:793
SimCtx * simCtx
Back-pointer to the master simulation context.
Definition variables.h:731
PetscInt ksc
Definition variables.h:741
UserMG usermg
Definition variables.h:698
PetscInt _this
Definition variables.h:741
PetscInt jsc
Definition variables.h:741
PetscInt thislevel
Definition variables.h:475
UserCtx * user_c
Definition variables.h:793
PetscInt thislevel
Definition variables.h:792
PetscInt mglevels
Definition variables.h:481
MGCtx * mgctx
Definition variables.h:484
Context for Multigrid operations.
Definition variables.h:473
User-defined context containing data specific to a single computational grid level.
Definition variables.h:728
User-level context for managing the entire multigrid hierarchy.
Definition variables.h:480
Here is the call graph for this function:
Here is the caller graph for this function:

◆ SetupSolverParameters()

static PetscErrorCode SetupSolverParameters ( SimCtx simCtx)
static

Definition at line 880 of file setup.c.

880 {
881
882 PetscFunctionBeginUser;
884
885 LOG_ALLOW(GLOBAL,LOG_INFO, " -- Setting up solver parameters -- .\n");
886
887 UserMG *usermg = &simCtx->usermg;
888 MGCtx *mgctx = usermg->mgctx;
889 PetscInt nblk = simCtx->block_number;
890
891 for (PetscInt level = usermg->mglevels-1; level >=0; level--) {
892 for (PetscInt bi = 0; bi < nblk; bi++) {
893 UserCtx *user = &mgctx[level].user[bi];
894 LOG_ALLOW_SYNC(LOCAL, LOG_DEBUG, "Rank %d: Setting up parameters for level %d, block %d\n", simCtx->rank, level, bi);
895
896 user->assignedA = PETSC_FALSE;
897 user->multinullspace = PETSC_FALSE;
898 }
899 }
901 PetscFunctionReturn(0);
902}
PetscBool assignedA
Definition variables.h:772
PetscBool multinullspace
Definition variables.h:769
Here is the caller graph for this function:

◆ SetupGridAndSolvers()

PetscErrorCode SetupGridAndSolvers ( SimCtx simCtx)

The main orchestrator for setting up all grid-related components.

(Implementation of the orchestrator function itself)

Definition at line 910 of file setup.c.

911{
912 PetscErrorCode ierr;
913 PetscFunctionBeginUser;
914
916
917 LOG_ALLOW(GLOBAL, LOG_INFO, "--- Starting Grid and Solvers Setup ---\n");
918
919 // Phase 1: Allocate the UserMG and UserCtx hierarchy
920 ierr = AllocateContextHierarchy(simCtx); CHKERRQ(ierr);
921
922 ierr = DefineAllGridDimensions(simCtx); CHKERRQ(ierr);
923 ierr = InitializeAllGridDMs(simCtx); CHKERRQ(ierr);
924 ierr = AssignAllGridCoordinates(simCtx);
925 ierr = CreateAndInitializeAllVectors(simCtx); CHKERRQ(ierr);
926 ierr = SetupSolverParameters(simCtx); CHKERRQ(ierr);
927
928 // NOTE: CalculateAllGridMetrics is now called inside SetupBoundaryConditions (not here) to ensure:
929 // 1. Boundary condition configuration data (boundary_faces) is available for periodic BC corrections
930 // 2. Computed metrics are available for inlet/outlet area calculations
931 // This resolves the circular dependency between BC setup and metric calculations.
932
933 LOG_ALLOW(GLOBAL, LOG_INFO, "--- Grid and Solvers Setup Complete ---\n");
934
936 PetscFunctionReturn(0);
937}
PetscErrorCode DefineAllGridDimensions(SimCtx *simCtx)
Orchestrates the parsing and setting of grid dimensions for all blocks.
Definition grid.c:68
PetscErrorCode InitializeAllGridDMs(SimCtx *simCtx)
Orchestrates the creation of DMDA objects for every block and multigrid level.
Definition grid.c:260
PetscErrorCode AssignAllGridCoordinates(SimCtx *simCtx)
Orchestrates the assignment of physical coordinates to all DMDA objects.
Definition grid.c:356
static PetscErrorCode SetupSolverParameters(SimCtx *simCtx)
Definition setup.c:880
PetscErrorCode CreateAndInitializeAllVectors(SimCtx *simCtx)
Creates and initializes all PETSc Vec objects for all fields.
Definition setup.c:953
static PetscErrorCode AllocateContextHierarchy(SimCtx *simCtx)
Allocates the memory for the UserMG and UserCtx hierarchy.
Definition setup.c:781
Here is the call graph for this function:
Here is the caller graph for this function:

◆ CreateAndInitializeAllVectors()

PetscErrorCode CreateAndInitializeAllVectors ( SimCtx simCtx)

Creates and initializes all PETSc Vec objects for all fields.

This function iterates through every UserCtx in the multigrid and multi-block hierarchy. For each context, it creates the comprehensive set of global and local PETSc Vecs required by the flow solver (e.g., Ucont, P, Nvert, metrics, turbulence fields, etc.). Each vector is initialized to zero.

Parameters
simCtxThe master SimCtx, containing the configured UserCtx hierarchy.
Returns
PetscErrorCode

Definition at line 953 of file setup.c.

954{
955 PetscErrorCode ierr;
956 UserMG *usermg = &simCtx->usermg;
957 MGCtx *mgctx = usermg->mgctx;
958 PetscInt nblk = simCtx->block_number;
959
960 PetscFunctionBeginUser;
961
963
964 LOG_ALLOW(GLOBAL, LOG_INFO, "Creating and initializing all simulation vectors...\n");
965
966 for (PetscInt level = usermg->mglevels-1; level >=0; level--) {
967 for (PetscInt bi = 0; bi < nblk; bi++) {
968 UserCtx *user = &mgctx[level].user[bi];
969
970 if(!user->da || !user->fda) {
971 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONGSTATE, "DMs not properly initialized in UserCtx before vector creation.");
972 }
973
974 LOG_ALLOW_SYNC(LOCAL, LOG_DEBUG, "Rank %d: Creating vectors for level %d, block %d\n", simCtx->rank, level, bi);
975
976 // --- Group A: Primary Flow Fields (Global and Local) ---
977 // These are the core solution variables.
978 ierr = DMCreateGlobalVector(user->fda, &user->Ucont); CHKERRQ(ierr); ierr = VecSet(user->Ucont, 0.0); CHKERRQ(ierr);
979 ierr = DMCreateGlobalVector(user->fda, &user->Ucat); CHKERRQ(ierr); ierr = VecSet(user->Ucat, 0.0); CHKERRQ(ierr);
980 ierr = DMCreateGlobalVector(user->da, &user->P); CHKERRQ(ierr); ierr = VecSet(user->P, 0.0); CHKERRQ(ierr);
981 ierr = DMCreateGlobalVector(user->da, &user->Nvert); CHKERRQ(ierr); ierr = VecSet(user->Nvert, 0.0); CHKERRQ(ierr);
982
983 ierr = DMCreateLocalVector(user->fda, &user->lUcont); CHKERRQ(ierr); ierr = VecSet(user->lUcont, 0.0); CHKERRQ(ierr);
984 ierr = DMCreateLocalVector(user->fda, &user->lUcat); CHKERRQ(ierr); ierr = VecSet(user->lUcat, 0.0); CHKERRQ(ierr);
985 ierr = DMCreateLocalVector(user->da, &user->lP); CHKERRQ(ierr); ierr = VecSet(user->lP, 0.0); CHKERRQ(ierr);
986 ierr = DMCreateLocalVector(user->da, &user->lNvert); CHKERRQ(ierr); ierr = VecSet(user->lNvert, 0.0); CHKERRQ(ierr);
987
988 // -- Group A2: Derived Flow Fields (Global and Local) ---
989 ierr = VecDuplicate(user->P,&user->Diffusivity); CHKERRQ(ierr); ierr = VecSet(user->Diffusivity, 0.0); CHKERRQ(ierr);
990 ierr = VecDuplicate(user->lP,&user->lDiffusivity); CHKERRQ(ierr); ierr = VecSet(user->lDiffusivity, 0.0); CHKERRQ(ierr);
991 ierr = VecDuplicate(user->Ucat,&user->DiffusivityGradient); CHKERRQ(ierr); ierr = VecSet(user->DiffusivityGradient, 0.0); CHKERRQ(ierr);
992 ierr = VecDuplicate(user->lUcat,&user->lDiffusivityGradient); CHKERRQ(ierr); ierr = VecSet(user->lDiffusivityGradient, 0.0); CHKERRQ(ierr);
993
994 // -- Group B: Solver Work Vectors (Global and Local) ---
995 ierr = VecDuplicate(user->P, &user->Phi); CHKERRQ(ierr); ierr = VecSet(user->Phi, 0.0); CHKERRQ(ierr);
996 ierr = VecDuplicate(user->lP, &user->lPhi); CHKERRQ(ierr); ierr = VecSet(user->lPhi, 0.0); CHKERRQ(ierr);
997
998 // --- Group C: Time-Stepping & Workspace Fields (Finest Level Only) ---
999 if (level == usermg->mglevels - 1) {
1000 ierr = VecDuplicate(user->Ucont, &user->Ucont_o); CHKERRQ(ierr); ierr = VecSet(user->Ucont_o, 0.0); CHKERRQ(ierr);
1001 ierr = VecDuplicate(user->Ucont, &user->Ucont_rm1); CHKERRQ(ierr); ierr = VecSet(user->Ucont_rm1, 0.0); CHKERRQ(ierr);
1002 ierr = VecDuplicate(user->Ucat, &user->Ucat_o); CHKERRQ(ierr); ierr = VecSet(user->Ucat_o, 0.0); CHKERRQ(ierr);
1003 ierr = VecDuplicate(user->P, &user->P_o); CHKERRQ(ierr); ierr = VecSet(user->P_o, 0.0); CHKERRQ(ierr);
1004 ierr = VecDuplicate(user->lUcont, &user->lUcont_o); CHKERRQ(ierr); ierr = VecSet(user->lUcont_o, 0.0); CHKERRQ(ierr);
1005 ierr = VecDuplicate(user->lUcont, &user->lUcont_rm1); CHKERRQ(ierr); ierr = VecSet(user->lUcont_rm1, 0.0); CHKERRQ(ierr);
1006 ierr = DMCreateLocalVector(user->da, &user->lNvert_o); CHKERRQ(ierr); ierr = VecSet(user->lNvert_o, 0.0); CHKERRQ(ierr);
1007 ierr = VecDuplicate(user->Nvert, &user->Nvert_o); CHKERRQ(ierr); ierr = VecSet(user->Nvert_o, 0.0); CHKERRQ(ierr);
1008 }
1009
1010 // --- Group D: Grid Metrics (Face-Centered) ---
1011 ierr = DMCreateGlobalVector(user->fda, &user->Csi); CHKERRQ(ierr); ierr = VecSet(user->Csi, 0.0); CHKERRQ(ierr);
1012 ierr = VecDuplicate(user->Csi, &user->Eta); CHKERRQ(ierr); ierr = VecSet(user->Eta, 0.0); CHKERRQ(ierr);
1013 ierr = VecDuplicate(user->Csi, &user->Zet); CHKERRQ(ierr); ierr = VecSet(user->Zet, 0.0); CHKERRQ(ierr);
1014 ierr = DMCreateGlobalVector(user->da, &user->Aj); CHKERRQ(ierr); ierr = VecSet(user->Aj, 0.0); CHKERRQ(ierr);
1015
1016 ierr = DMCreateLocalVector(user->fda, &user->lCsi); CHKERRQ(ierr); ierr = VecSet(user->lCsi, 0.0); CHKERRQ(ierr);
1017 ierr = VecDuplicate(user->lCsi, &user->lEta); CHKERRQ(ierr); ierr = VecSet(user->lEta, 0.0); CHKERRQ(ierr);
1018 ierr = VecDuplicate(user->lCsi, &user->lZet); CHKERRQ(ierr); ierr = VecSet(user->lZet, 0.0); CHKERRQ(ierr);
1019 ierr = DMCreateLocalVector(user->da, &user->lAj); CHKERRQ(ierr); ierr = VecSet(user->lAj, 0.0); CHKERRQ(ierr);
1020
1021
1022 // --- Group E: Grid Metrics (Face-Centered) ---
1023 // Vector metrics are duplicated from Csi (DOF=3, fda-based)
1024 ierr = VecDuplicate(user->Csi, &user->ICsi); CHKERRQ(ierr); ierr = VecSet(user->ICsi, 0.0); CHKERRQ(ierr);
1025 ierr = VecDuplicate(user->Csi, &user->IEta); CHKERRQ(ierr); ierr = VecSet(user->IEta, 0.0); CHKERRQ(ierr);
1026 ierr = VecDuplicate(user->Csi, &user->IZet); CHKERRQ(ierr); ierr = VecSet(user->IZet, 0.0); CHKERRQ(ierr);
1027 ierr = VecDuplicate(user->Csi, &user->JCsi); CHKERRQ(ierr); ierr = VecSet(user->JCsi, 0.0); CHKERRQ(ierr);
1028 ierr = VecDuplicate(user->Csi, &user->JEta); CHKERRQ(ierr); ierr = VecSet(user->JEta, 0.0); CHKERRQ(ierr);
1029 ierr = VecDuplicate(user->Csi, &user->JZet); CHKERRQ(ierr); ierr = VecSet(user->JZet, 0.0); CHKERRQ(ierr);
1030 ierr = VecDuplicate(user->Csi, &user->KCsi); CHKERRQ(ierr); ierr = VecSet(user->KCsi, 0.0); CHKERRQ(ierr);
1031 ierr = VecDuplicate(user->Csi, &user->KEta); CHKERRQ(ierr); ierr = VecSet(user->KEta, 0.0); CHKERRQ(ierr);
1032 ierr = VecDuplicate(user->Csi, &user->KZet); CHKERRQ(ierr); ierr = VecSet(user->KZet, 0.0); CHKERRQ(ierr);
1033 // Scalar metrics are duplicated from Aj (DOF=1, da-based)
1034 ierr = VecDuplicate(user->Aj, &user->IAj); CHKERRQ(ierr); ierr = VecSet(user->IAj, 0.0); CHKERRQ(ierr);
1035 ierr = VecDuplicate(user->Aj, &user->JAj); CHKERRQ(ierr); ierr = VecSet(user->JAj, 0.0); CHKERRQ(ierr);
1036 ierr = VecDuplicate(user->Aj, &user->KAj); CHKERRQ(ierr); ierr = VecSet(user->KAj, 0.0); CHKERRQ(ierr);
1037
1038 ierr = VecDuplicate(user->lCsi, &user->lICsi); CHKERRQ(ierr); ierr = VecSet(user->lICsi, 0.0); CHKERRQ(ierr);
1039 ierr = VecDuplicate(user->lCsi, &user->lIEta); CHKERRQ(ierr); ierr = VecSet(user->lIEta, 0.0); CHKERRQ(ierr);
1040 ierr = VecDuplicate(user->lCsi, &user->lIZet); CHKERRQ(ierr); ierr = VecSet(user->lIZet, 0.0); CHKERRQ(ierr);
1041 ierr = VecDuplicate(user->lCsi, &user->lJCsi); CHKERRQ(ierr); ierr = VecSet(user->lJCsi, 0.0); CHKERRQ(ierr);
1042 ierr = VecDuplicate(user->lCsi, &user->lJEta); CHKERRQ(ierr); ierr = VecSet(user->lJEta, 0.0); CHKERRQ(ierr);
1043 ierr = VecDuplicate(user->lCsi, &user->lJZet); CHKERRQ(ierr); ierr = VecSet(user->lJZet, 0.0); CHKERRQ(ierr);
1044 ierr = VecDuplicate(user->lCsi, &user->lKCsi); CHKERRQ(ierr); ierr = VecSet(user->lKCsi, 0.0); CHKERRQ(ierr);
1045 ierr = VecDuplicate(user->lCsi, &user->lKEta); CHKERRQ(ierr); ierr = VecSet(user->lKEta, 0.0); CHKERRQ(ierr);
1046 ierr = VecDuplicate(user->lCsi, &user->lKZet); CHKERRQ(ierr); ierr = VecSet(user->lKZet, 0.0); CHKERRQ(ierr);
1047
1048 ierr = VecDuplicate(user->lAj, &user->lIAj); CHKERRQ(ierr); ierr = VecSet(user->lIAj, 0.0); CHKERRQ(ierr);
1049 ierr = VecDuplicate(user->lAj, &user->lJAj); CHKERRQ(ierr); ierr = VecSet(user->lJAj, 0.0); CHKERRQ(ierr);
1050 ierr = VecDuplicate(user->lAj, &user->lKAj); CHKERRQ(ierr); ierr = VecSet(user->lKAj, 0.0); CHKERRQ(ierr);
1051
1052 // --- Group F: Cell/Face Center Coordinates and Grid Spacing ---
1053 ierr = DMCreateGlobalVector(user->fda, &user->Cent); CHKERRQ(ierr); ierr = VecSet(user->Cent, 0.0); CHKERRQ(ierr);
1054 ierr = DMCreateLocalVector(user->fda, &user->lCent); CHKERRQ(ierr); ierr = VecSet(user->lCent, 0.0); CHKERRQ(ierr);
1055
1056 ierr = VecDuplicate(user->Cent, &user->GridSpace); CHKERRQ(ierr); ierr = VecSet(user->GridSpace, 0.0); CHKERRQ(ierr);
1057 ierr = VecDuplicate(user->lCent, &user->lGridSpace); CHKERRQ(ierr); ierr = VecSet(user->lGridSpace, 0.0); CHKERRQ(ierr);
1058
1059 // Face-center coordinate vectors are LOCAL to hold calculated values before scattering
1060 ierr = VecDuplicate(user->lCent, &user->Centx); CHKERRQ(ierr); ierr = VecSet(user->Centx, 0.0); CHKERRQ(ierr);
1061 ierr = VecDuplicate(user->lCent, &user->Centy); CHKERRQ(ierr); ierr = VecSet(user->Centy, 0.0); CHKERRQ(ierr);
1062 ierr = VecDuplicate(user->lCent, &user->Centz); CHKERRQ(ierr); ierr = VecSet(user->Centz, 0.0); CHKERRQ(ierr);
1063
1064 if(level == usermg->mglevels -1){
1065 // --- Group G: Turbulence Models (Finest Level Only) ---
1066 if (simCtx->les || simCtx->rans) {
1067 ierr = DMCreateGlobalVector(user->da, &user->Nu_t); CHKERRQ(ierr); ierr = VecSet(user->Nu_t, 0.0); CHKERRQ(ierr);
1068 ierr = DMCreateLocalVector(user->da, &user->lNu_t); CHKERRQ(ierr); ierr = VecSet(user->lNu_t, 0.0); CHKERRQ(ierr);
1069 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Turbulence viscosity (Nu_t) vectors created for LES/RANS model.\n");
1070 if(simCtx->les){
1071 ierr = DMCreateGlobalVector(user->da,&user->CS); CHKERRQ(ierr); ierr = VecSet(user->CS,0.0); CHKERRQ(ierr);
1072 ierr = DMCreateLocalVector(user->da,&user->lCs); CHKERRQ(ierr); ierr = VecSet(user->lCs,0.0); CHKERRQ(ierr);
1073 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Smagorinsky constant (CS) vectors created for LES model.\n");
1074 }
1075
1076 if(simCtx->wallfunction){
1077 ierr = DMCreateLocalVector(user->fda,&user->lFriction_Velocity); CHKERRQ(ierr); ierr = VecSet(user->lFriction_Velocity,0.0);
1078 }
1079 // Add K_Omega etc. here as needed
1080
1081 // Note: Add any other vectors from the legacy MG_Initial here as needed.
1082 // For example: Rhs, Forcing, turbulence Vecs (K_Omega, Nu_t)...
1083
1084 }
1085 // --- Group H: Particle Methods
1086 if(simCtx->np>0){
1087 ierr = DMCreateGlobalVector(user->da,&user->ParticleCount); CHKERRQ(ierr); ierr = VecSet(user->ParticleCount,0.0); CHKERRQ(ierr);
1088 ierr = DMCreateLocalVector(user->da,&user->lParticleCount); CHKERRQ(ierr); ierr = VecSet(user->lParticleCount,0.0); CHKERRQ(ierr);
1089 // Scalar field to hold particle scalar property (e.g., temperature, concentration)
1090 ierr = DMCreateGlobalVector(user->da,&user->Psi); CHKERRQ(ierr); ierr = VecSet(user->Psi,0.0); CHKERRQ(ierr);
1091 ierr = DMCreateLocalVector(user->da,&user->lPsi); CHKERRQ(ierr); ierr = VecSet(user->lPsi,0.0); CHKERRQ(ierr);
1092 LOG_ALLOW(GLOBAL,LOG_DEBUG,"ParticleCount & Scalar(Psi) created for %d particles.\n",simCtx->np);
1093 }
1094 }
1095 // --- Group I: Boundary Condition vectors ---
1096 ierr = DMCreateGlobalVector(user->fda, &user->Bcs.Ubcs); CHKERRQ(ierr);
1097 ierr = VecSet(user->Bcs.Ubcs, 0.0); CHKERRQ(ierr);
1098 ierr = DMCreateGlobalVector(user->fda, &user->Bcs.Uch); CHKERRQ(ierr);
1099 ierr = VecSet(user->Bcs.Uch, 0.0); CHKERRQ(ierr);
1100
1101 if(level == usermg->mglevels - 1){
1102 if(simCtx->exec_mode == EXEC_MODE_POSTPROCESSOR){
1103 LOG_ALLOW(LOCAL, LOG_DEBUG, "Post-processor mode detected. Allocating derived field vectors.\n");
1104
1105 ierr = VecDuplicate(user->P, &user->P_nodal); CHKERRQ(ierr);
1106 ierr = VecSet(user->P_nodal, 0.0); CHKERRQ(ierr);
1107
1108 ierr = VecDuplicate(user->Ucat, &user->Ucat_nodal); CHKERRQ(ierr);
1109 ierr = VecSet(user->Ucat_nodal, 0.0); CHKERRQ(ierr);
1110
1111 ierr = VecDuplicate(user->P, &user->Qcrit); CHKERRQ(ierr);
1112 ierr = VecSet(user->Qcrit, 0.0); CHKERRQ(ierr);
1113
1114 LOG_ALLOW(LOCAL, LOG_DEBUG, "Derived field vectors P_nodal, Ucat_nodal, and Qcrit created.\n");
1115
1116 if(simCtx->np>0){
1117 ierr = VecDuplicate(user->Psi, &user->Psi_nodal); CHKERRQ(ierr);
1118 ierr = VecSet(user->Psi_nodal, 0.0); CHKERRQ(ierr);
1119
1120 LOG_ALLOW(LOCAL, LOG_DEBUG, "Derived field vector Psi_nodal created for particle scalar property.\n");
1121
1122 }
1123 }else{
1124 user->P_nodal = NULL;
1125 user->Ucat_nodal = NULL;
1126 user->Qcrit = NULL;
1127 user->Psi_nodal = NULL;
1128 }
1129 }
1130
1131 }
1132}
1133
1134 LOG_ALLOW(GLOBAL, LOG_INFO, "All simulation vectors created and initialized.\n");
1135
1137 PetscFunctionReturn(0);
1138}
Vec lFriction_Velocity
Definition variables.h:750
Vec lDiffusivityGradient
Definition variables.h:759
Vec lCent
Definition variables.h:776
Vec GridSpace
Definition variables.h:776
Vec P_nodal
Definition variables.h:805
Vec JCsi
Definition variables.h:779
Vec KAj
Definition variables.h:780
Vec JEta
Definition variables.h:779
Vec Zet
Definition variables.h:776
Vec lIEta
Definition variables.h:778
Vec lIZet
Definition variables.h:778
Vec lNvert
Definition variables.h:755
Vec Phi
Definition variables.h:755
Vec IZet
Definition variables.h:778
Vec Centz
Definition variables.h:777
Vec IEta
Definition variables.h:778
Vec lZet
Definition variables.h:776
Vec Csi
Definition variables.h:776
Vec lUcont_rm1
Definition variables.h:763
Vec lIAj
Definition variables.h:778
Vec lKEta
Definition variables.h:780
Vec Ucat_nodal
Definition variables.h:806
Vec lPsi
Definition variables.h:801
Vec DiffusivityGradient
Definition variables.h:759
Vec lJCsi
Definition variables.h:779
Vec lCs
Definition variables.h:783
Vec Ucont
Definition variables.h:755
Vec Ubcs
Boundary condition velocity values. (Comment: "An ugly hack, waste of memory")
Definition variables.h:121
Vec Qcrit
Definition variables.h:807
Vec JZet
Definition variables.h:779
Vec Centx
Definition variables.h:777
BCS Bcs
Definition variables.h:749
Vec lPhi
Definition variables.h:755
Vec lParticleCount
Definition variables.h:800
Vec lUcont_o
Definition variables.h:762
Vec Ucat_o
Definition variables.h:762
Vec lKZet
Definition variables.h:780
Vec Eta
Definition variables.h:776
Vec lNu_t
Definition variables.h:783
Vec Nu_t
Definition variables.h:783
Vec lJEta
Definition variables.h:779
Vec lCsi
Definition variables.h:776
Vec lGridSpace
Definition variables.h:776
Vec ICsi
Definition variables.h:778
Vec lKCsi
Definition variables.h:780
Vec Ucat
Definition variables.h:755
Vec ParticleCount
Definition variables.h:800
Vec Ucont_o
Definition variables.h:762
Vec lJZet
Definition variables.h:779
Vec Nvert_o
Definition variables.h:762
Vec IAj
Definition variables.h:778
Vec Psi_nodal
Definition variables.h:808
Vec JAj
Definition variables.h:779
Vec KEta
Definition variables.h:780
Vec Ucont_rm1
Definition variables.h:763
Vec lUcont
Definition variables.h:755
Vec Diffusivity
Definition variables.h:758
Vec lAj
Definition variables.h:776
Vec lICsi
Definition variables.h:778
Vec lUcat
Definition variables.h:755
Vec lEta
Definition variables.h:776
Vec KZet
Definition variables.h:780
Vec Cent
Definition variables.h:776
Vec Nvert
Definition variables.h:755
Vec KCsi
Definition variables.h:780
Vec lDiffusivity
Definition variables.h:758
Vec lNvert_o
Definition variables.h:762
Vec Centy
Definition variables.h:777
Vec lJAj
Definition variables.h:779
Vec lKAj
Definition variables.h:780
Vec Psi
Definition variables.h:801
Vec P_o
Definition variables.h:762
Vec Uch
Characteristic velocity for boundary conditions.
Definition variables.h:122
Here is the caller graph for this function:

◆ UpdateLocalGhosts()

PetscErrorCode UpdateLocalGhosts ( UserCtx user,
const char *  fieldName 
)

Updates the local vector (including ghost points) from its corresponding global vector.

This function identifies the correct global vector, local vector, and DM based on the provided fieldName and performs the standard PETSc DMGlobalToLocalBegin/End sequence. Includes optional debugging output (max norms before/after).

Parameters
userThe UserCtx structure containing the vectors and DMs.
fieldNameThe name of the field to update ("Ucat", "Ucont", "P", "Nvert", etc.).
Returns
PetscErrorCode 0 on success, non-zero on failure.
Note
This function assumes the global vector associated with fieldName has already been populated with the desired data (including any boundary conditions).

Definition at line 1157 of file setup.c.

1158{
1159 PetscErrorCode ierr;
1160 PetscMPIInt rank;
1161 Vec globalVec = NULL;
1162 Vec localVec = NULL;
1163 DM dm = NULL; // The DM associated with this field pair
1164
1165 PetscFunctionBeginUser; // Use User version for application code
1167 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
1168 LOG_ALLOW(GLOBAL, LOG_INFO, "Rank %d: Starting ghost update for field '%s'.\n", rank, fieldName);
1169
1170 // --- 1. Identify the correct Vectors and DM ---
1171 if (strcmp(fieldName, "Ucat") == 0) {
1172 globalVec = user->Ucat;
1173 localVec = user->lUcat;
1174 dm = user->fda;
1175 } else if (strcmp(fieldName, "Ucont") == 0) {
1176 globalVec = user->Ucont;
1177 localVec = user->lUcont;
1178 dm = user->fda;
1179 } else if (strcmp(fieldName, "P") == 0) {
1180 globalVec = user->P;
1181 localVec = user->lP;
1182 dm = user->da;
1183 } else if (strcmp(fieldName, "Diffusivity") == 0) {
1184 globalVec = user->Diffusivity;
1185 localVec = user->lDiffusivity;
1186 dm = user->da;
1187 } else if (strcmp(fieldName, "DiffusivityGradient") == 0) {
1188 globalVec = user->DiffusivityGradient;
1189 localVec = user->lDiffusivityGradient;
1190 dm = user->fda;
1191 } else if (strcmp(fieldName, "Csi") == 0) {
1192 globalVec = user->Csi;
1193 localVec = user->lCsi;
1194 dm = user->fda;
1195 } else if (strcmp(fieldName, "Eta") == 0) {
1196 globalVec = user->Eta;
1197 localVec = user->lEta;
1198 dm = user->fda;
1199 } else if (strcmp(fieldName, "Zet") == 0) {
1200 globalVec = user->Zet;
1201 localVec = user->lZet;
1202 dm = user->fda;
1203 }else if (strcmp(fieldName, "Nvert") == 0) {
1204 globalVec = user->Nvert;
1205 localVec = user->lNvert;
1206 dm = user->da;
1207 // Add other fields as needed
1208 } else if (strcmp(fieldName, "Aj") == 0) {
1209 globalVec = user->Aj;
1210 localVec = user->lAj;
1211 dm = user->da;
1212 } else if (strcmp(fieldName, "Cent") == 0) {
1213 globalVec = user->Cent;
1214 localVec = user->lCent;
1215 dm = user->fda;
1216 }else if (strcmp(fieldName, "GridSpace") == 0) {
1217 globalVec = user->GridSpace;
1218 localVec = user->lGridSpace;
1219 dm = user->fda;
1220 }else if (strcmp(fieldName,"ICsi") == 0){
1221 globalVec = user->ICsi;
1222 localVec = user->lICsi;
1223 dm = user->fda;
1224 }else if (strcmp(fieldName,"IEta") == 0){
1225 globalVec = user->IEta;
1226 localVec = user->lIEta;
1227 dm = user->fda;
1228 }else if (strcmp(fieldName,"IZet") == 0){
1229 globalVec = user->IZet;
1230 localVec = user->lIZet;
1231 dm = user->fda;
1232 }else if (strcmp(fieldName,"JCsi") == 0){
1233 globalVec = user->JCsi;
1234 localVec = user->lJCsi;
1235 dm = user->fda;
1236 }else if (strcmp(fieldName,"JEta") == 0){
1237 globalVec = user->JEta;
1238 localVec = user->lJEta;
1239 dm = user->fda;
1240 }else if (strcmp(fieldName,"JZet") == 0){
1241 globalVec = user->JZet;
1242 localVec = user->lJZet;
1243 dm = user->fda;
1244 }else if (strcmp(fieldName,"KCsi") == 0){
1245 globalVec = user->KCsi;
1246 localVec = user->lKCsi;
1247 dm = user->fda;
1248 }else if (strcmp(fieldName,"KEta") == 0){
1249 globalVec = user->KEta;
1250 localVec = user->lKEta;
1251 dm = user->fda;
1252 }else if (strcmp(fieldName,"KZet") == 0){
1253 globalVec = user->KZet;
1254 localVec = user->lKZet;
1255 dm = user->fda;
1256 }else if (strcmp(fieldName,"IAj") == 0){
1257 globalVec = user->IAj;
1258 localVec = user->lIAj;
1259 dm = user->da;
1260 }else if (strcmp(fieldName,"JAj") == 0){
1261 globalVec = user->JAj;
1262 localVec = user->lJAj;
1263 dm = user->da;
1264 }else if (strcmp(fieldName,"KAj") == 0){
1265 globalVec = user->KAj;
1266 localVec = user->lKAj;
1267 dm = user->da;
1268 }else if (strcmp(fieldName,"Phi") == 0){ // Pressure correction term.
1269 globalVec = user->Phi;
1270 localVec = user->lPhi;
1271 dm = user->da;
1272 }else if (strcmp(fieldName,"Psi") == 0){ // Particle scalar property.
1273 globalVec = user->Psi;
1274 localVec = user->lPsi;
1275 dm = user->da;
1276 }else {
1277 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_UNKNOWN_TYPE, "Field '%s' not recognized for ghost update.", fieldName);
1278 }
1279
1280 // --- 2. Check if components were found ---
1281 if (!globalVec) {
1282 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Global vector for field '%s' is NULL.", fieldName);
1283 }
1284 if (!localVec) {
1285 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Local vector for field '%s' is NULL.", fieldName);
1286 }
1287 if (!dm) {
1288 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "DM for field '%s' is NULL.", fieldName);
1289 }
1290
1291 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Identified components for '%s': DM=%p, GlobalVec=%p, LocalVec=%p.\n",
1292 rank, fieldName, (void*)dm, (void*)globalVec, (void*)localVec);
1293
1294 // --- 3. Optional Debugging: Norm Before Update ---
1295 // Use your logging convention check
1296 // if (get_log_level() >= LOG_LEVEL_DEBUG && is_function_allowed("UpdateLocalGhosts")) { // Example check
1297 if(get_log_level() == LOG_DEBUG && is_function_allowed(__func__)){
1298 PetscReal norm_global_before;
1299 ierr = VecNorm(globalVec, NORM_INFINITY, &norm_global_before); CHKERRQ(ierr);
1300 LOG_ALLOW(GLOBAL, LOG_INFO,"Max norm '%s' (Global) BEFORE Ghost Update: %g\n", fieldName, norm_global_before);
1301 // Optional: Norm of local vector before update (might contain old ghost values)
1302 // PetscReal norm_local_before;
1303 // ierr = VecNorm(localVec, NORM_INFINITY, &norm_local_before); CHKERRQ(ierr);
1304 // LOG_ALLOW(GLOBAL, LOG_DEBUG,"Max norm '%s' (Local) BEFORE Ghost Update: %g\n", fieldName, norm_local_before);
1305 }
1306
1307 // --- 4. Perform the Global-to-Local Transfer (Ghost Update) ---
1308 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Calling DMGlobalToLocalBegin/End for '%s'.\n", rank, fieldName);
1309 ierr = DMGlobalToLocalBegin(dm, globalVec, INSERT_VALUES, localVec); CHKERRQ(ierr);
1310 ierr = DMGlobalToLocalEnd(dm, globalVec, INSERT_VALUES, localVec); CHKERRQ(ierr);
1311 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Completed DMGlobalToLocalBegin/End for '%s'.\n", rank, fieldName);
1312
1313 // --- 5. Optional Debugging: Norm After Update ---
1314 // Use your logging convention check
1315 // if (get_log_level() >= LOG_LEVEL_DEBUG && is_function_allowed("UpdateLocalGhosts")) { // Example check
1316 if(get_log_level() == LOG_DEBUG && is_function_allowed(__func__)){ // Using your specific check
1317 PetscReal norm_local_after;
1318 ierr = VecNorm(localVec, NORM_INFINITY, &norm_local_after); CHKERRQ(ierr);
1319 LOG_ALLOW(GLOBAL, LOG_INFO,"Max norm '%s' (Local) AFTER Ghost Update: %g\n", fieldName, norm_local_after);
1320
1321 // --- 6. Optional Debugging: Specific Point Checks (Example for Ucat on Rank 0/1) ---
1322 // (Keep this conditional if it's only for specific debug scenarios)
1323 if (strcmp(fieldName, "Ucat") == 0) { // Only do detailed checks for Ucat for now
1324 PetscMPIInt rank_test;
1325 MPI_Comm_rank(PETSC_COMM_WORLD, &rank_test);
1326
1327 // Get Local Info needed for indexing checks
1328 DMDALocalInfo info_check;
1329 ierr = DMDAGetLocalInfo(dm, &info_check); CHKERRQ(ierr); // Use the correct dm
1330
1331 // Buffer for array pointer
1332 Cmpnts ***lUcat_arr_test = NULL;
1333 PetscErrorCode ierr_test = 0;
1334
1335 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Testing '%s' access immediately after ghost update...\n", rank_test, fieldName);
1336 ierr_test = DMDAVecGetArrayDOFRead(dm, localVec, &lUcat_arr_test); // Use correct dm and localVec
1337
1338 if (ierr_test) {
1339 LOG_ALLOW(LOCAL, LOG_ERROR, "Rank %d: ERROR %d getting '%s' array after ghost update!\n", rank_test, ierr_test, fieldName);
1340 } else if (!lUcat_arr_test) {
1341 LOG_ALLOW(LOCAL, LOG_ERROR, "Rank %d: ERROR NULL pointer getting '%s' array after ghost update!\n", rank_test, fieldName);
1342 }
1343 else {
1344 // Check owned interior point (e.g., first interior point)
1345 PetscInt k_int = info_check.zs + (info_check.zm > 1 ? 1 : 0); // Global k index (at least zs+1 if possible)
1346 PetscInt j_int = info_check.ys + (info_check.ym > 1 ? 1 : 0); // Global j index
1347 PetscInt i_int = info_check.xs + (info_check.xm > 1 ? 1 : 0); // Global i index
1348 // Ensure indices are within global bounds if domain is very small
1349 //if (k_int >= info_check.mz-1) k_int = info_check.mz-2; if (k_int < 1) k_int = 1;
1350 //if (j_int >= info_check.my-1) j_int = info_check.my-2; if (j_int < 1) j_int = 1;
1351 // if (i_int >= info_check.mx-1) i_int = info_check.mx-2; if (i_int < 1) i_int = 1;
1352 // clamp k_int to [1 .. mz-2]
1353 if (k_int >= info_check.mz - 1) {
1354 k_int = info_check.mz - 2;
1355 }
1356 if (k_int < 1) {
1357 k_int = 1;
1358 }
1359
1360 // clamp j_int to [1 .. my-2]
1361 if (j_int >= info_check.my - 1) {
1362 j_int = info_check.my - 2;
1363 }
1364 if (j_int < 1) {
1365 j_int = 1;
1366 }
1367
1368 // clamp i_int to [1 .. mx-2]
1369 if (i_int >= info_check.mx - 1) {
1370 i_int = info_check.mx - 2;
1371 }
1372 if (i_int < 1) {
1373 i_int = 1;
1374 }
1375
1376 // Only attempt read if indices are actually owned (relevant for multi-rank)
1377 if (k_int >= info_check.zs && k_int < info_check.zs + info_check.zm &&
1378 j_int >= info_check.ys && j_int < info_check.ys + info_check.ym &&
1379 i_int >= info_check.xs && i_int < info_check.xs + info_check.xm)
1380 {
1381 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Attempting test read OWNED INTERIOR [%d][%d][%d] (Global)\n", rank_test, k_int, j_int, i_int);
1382 Cmpnts test_val_owned_interior = lUcat_arr_test[k_int][j_int][i_int];
1383 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: SUCCESS reading owned interior: x=%g\n", rank_test, test_val_owned_interior.x);
1384 } else {
1385 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Skipping interior test read for non-owned index [%d][%d][%d].\n", rank_test, k_int, j_int, i_int);
1386 }
1387
1388
1389 // Check owned boundary point (e.g., first owned point)
1390 PetscInt k_bnd = info_check.zs; // Global k index
1391 PetscInt j_bnd = info_check.ys; // Global j index
1392 PetscInt i_bnd = info_check.xs; // Global i index
1393 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Attempting test read OWNED BOUNDARY [%d][%d][%d] (Global)\n", rank_test, k_bnd, j_bnd, i_bnd);
1394 Cmpnts test_val_owned_boundary = lUcat_arr_test[k_bnd][j_bnd][i_bnd];
1395 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: SUCCESS reading owned boundary: x=%g\n", rank_test, test_val_owned_boundary.x);
1396
1397
1398 // Check ghost point (e.g., one layer below in k, if applicable)
1399 if (info_check.zs > 0) { // Only if there's a rank below
1400 PetscInt k_ghost = info_check.zs - 1;
1401 PetscInt j_ghost = info_check.ys; // Use start of owned y, simple example
1402 PetscInt i_ghost = info_check.xs; // Use start of owned x, simple example
1403 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Attempting test read GHOST [%d][%d][%d] (Global)\n", rank_test, k_ghost, j_ghost, i_ghost);
1404 Cmpnts test_val_ghost = lUcat_arr_test[k_ghost][j_ghost][i_ghost];
1405 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: SUCCESS reading ghost: x=%g\n", rank_test, test_val_ghost.x);
1406 } else {
1407 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Skipping ghost test read (zs=0).\n", rank_test);
1408 }
1409
1410 // Restore the array
1411 ierr_test = DMDAVecRestoreArrayDOFRead(dm, localVec, &lUcat_arr_test);
1412 if(ierr_test){ LOG_ALLOW(LOCAL, LOG_ERROR, "Rank %d: ERROR %d restoring '%s' array after test read!\n", rank_test, ierr_test, fieldName); }
1413 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Finished testing '%s' access.\n", rank_test, fieldName);
1414 }
1415 } // end if Ucat
1416 } // end debug logging check
1417
1418 LOG_ALLOW(GLOBAL, LOG_INFO, "Rank %d: Completed ghost update for field '%s'.\n", rank, fieldName);
1420 PetscFunctionReturn(0);
1421}
PetscBool is_function_allowed(const char *functionName)
Checks if a given function is in the allow-list.
Definition logging.c:157
A 3D point or vector with PetscScalar components.
Definition variables.h:100
Here is the call graph for this function:
Here is the caller graph for this function:

◆ SetupBoundaryConditions()

PetscErrorCode SetupBoundaryConditions ( SimCtx simCtx)

(Orchestrator) Sets up all boundary conditions for the simulation.

Definition at line 1428 of file setup.c.

1429{
1430 PetscErrorCode ierr;
1431 PetscFunctionBeginUser;
1432
1434
1435 LOG_ALLOW(GLOBAL,LOG_INFO, "--- Setting up Boundary Conditions ---\n");
1436 // --- Phase 1: Parse and initialize BC configuration for all blocks ---
1437 LOG_ALLOW(GLOBAL,LOG_INFO,"Parsing BC configuration files and initializing boundary condition data structures.\n");
1438 UserCtx *user_finest = simCtx->usermg.mgctx[simCtx->usermg.mglevels-1].user;
1439 for (PetscInt bi = 0; bi < simCtx->block_number; bi++) {
1440 LOG_ALLOW(GLOBAL,LOG_DEBUG, " -> Processing Block %d:\n", bi);
1441
1442 // --- Generate the filename for the current block ---
1443 const char *current_bc_filename = simCtx->bcs_files[bi];
1444 LOG_ALLOW(GLOBAL,LOG_DEBUG," -> Processing Block %d using config file '%s'\n", bi, current_bc_filename);
1445 // This will populate user_finest[bi].boundary_faces
1446
1447 //ierr = ParseAllBoundaryConditions(&user_finest[bi],current_bc_filename); CHKERRQ(ierr);
1448
1449 ierr = BoundarySystem_Initialize(&user_finest[bi], current_bc_filename); CHKERRQ(ierr);
1450 // Call the adapter to translate into the legacy format
1451 ierr = TranslateModernBCsToLegacy(&user_finest[bi]); CHKERRQ(ierr);
1452 }
1453
1454 // Propogate BC Configuration to coarser levels.
1455 ierr = PropagateBoundaryConfigToCoarserLevels(simCtx); CHKERRQ(ierr);
1456
1457 // --- Calculate Grid Metrics (requires BC configuration) ---
1458 // NOTE: This MUST be called here (after BC initialization but before inlet/outlet calculations) because:
1459 // 1. Periodic BC corrections in metric calculations need boundary_faces data to be populated
1460 // 2. Inlet/Outlet area calculations (below) require computed metrics (Csi, Eta, Zet) to be available
1461 // Previously this was in SetupGridAndSolvers, but that caused metrics to be computed without BC info.
1462 LOG_ALLOW(GLOBAL,LOG_INFO,"Computing grid metrics with boundary condition information.\n");
1463 ierr = CalculateAllGridMetrics(simCtx); CHKERRQ(ierr);
1464
1465 // --- Phase 2: Calculate inlet/outlet properties (requires computed metrics) ---
1466 LOG_ALLOW(GLOBAL,LOG_INFO,"Calculating inlet and outlet face properties.\n");
1467 for (PetscInt bi = 0; bi < simCtx->block_number; bi++) {
1468 // Call the function to calculate the center of the inlet face & the inlet area, which may be used to calculate Boundary values.
1469 ierr = CalculateInletProperties(&user_finest[bi]); CHKERRQ(ierr);
1470
1471 // Call the function to calculate the center of the outlet face & the outlet area, which may be used to calculate Boundary values.
1472 ierr = CalculateOutletProperties(&user_finest[bi]); CHKERRQ(ierr);
1473 }
1474
1475 LOG_ALLOW(GLOBAL,LOG_INFO, "--- Boundary Conditions setup complete ---\n");
1476
1477
1479 PetscFunctionReturn(0);
1480}
PetscErrorCode BoundarySystem_Initialize(UserCtx *user, const char *bcs_filename)
Initializes the entire boundary system.
Definition Boundaries.c:987
PetscErrorCode PropagateBoundaryConfigToCoarserLevels(SimCtx *simCtx)
Propagates boundary condition configuration from finest to all coarser multigrid levels.
PetscErrorCode TranslateModernBCsToLegacy(UserCtx *user)
Definition Boundaries.c:656
PetscErrorCode CalculateAllGridMetrics(SimCtx *simCtx)
Orchestrates the calculation of all grid metrics.
Definition Metric.c:2388
PetscErrorCode CalculateOutletProperties(UserCtx *user)
Calculates the center and area of the primary OUTLET face.
Definition grid.c:1125
PetscErrorCode CalculateInletProperties(UserCtx *user)
Calculates the center and area of the primary INLET face.
Definition grid.c:1069
Here is the call graph for this function:
Here is the caller graph for this function:

◆ Allocate3DArrayScalar()

PetscErrorCode Allocate3DArrayScalar ( PetscReal ****  array,
PetscInt  nz,
PetscInt  ny,
PetscInt  nx 
)

Allocates a 3D array of PetscReal values using PetscCalloc.

This function dynamically allocates memory for a 3D array of PetscReal values with dimensions nz (layers) x ny (rows) x nx (columns). It uses PetscCalloc1 to ensure the memory is zero-initialized.

The allocation is done in three steps:

  1. Allocate an array of nz pointers (one for each layer).
  2. Allocate a contiguous block for nz*ny row pointers and assign each layer’s row pointers.
  3. Allocate a contiguous block for all nz*ny*nx PetscReal values.

This setup allows the array to be accessed as array[k][j][i], and the memory for the data is contiguous, which improves cache efficiency.

Parameters
[out]arrayPointer to the 3D array to be allocated.
[in]nzNumber of layers (z-direction).
[in]nyNumber of rows (y-direction).
[in]nxNumber of columns (x-direction).
Returns
PetscErrorCode 0 on success, nonzero on failure.

Definition at line 1504 of file setup.c.

1505{
1506 PetscErrorCode ierr;
1507 PetscReal ***data;
1508 PetscReal *dataContiguous;
1509 PetscInt k, j;
1510
1511 PetscFunctionBegin;
1512 /* Step 1: Allocate memory for an array of nz layer pointers (zero-initialized) */
1513 ierr = PetscCalloc1(nz, &data); CHKERRQ(ierr);
1514
1515 /* Step 2: Allocate memory for all row pointers (nz * ny pointers) */
1516 ierr = PetscCalloc1(nz * ny, &data[0]); CHKERRQ(ierr);
1517 for (k = 1; k < nz; k++) {
1518 data[k] = data[0] + k * ny;
1519 }
1520
1521 /* Step 3: Allocate one contiguous block for all data elements (nz*ny*nx) */
1522 ierr = PetscCalloc1(nz * ny * nx, &dataContiguous); CHKERRQ(ierr);
1523
1524 /* Build the 3D pointer structure: each row pointer gets the correct segment of data */
1525 for (k = 0; k < nz; k++) {
1526 for (j = 0; j < ny; j++) {
1527 data[k][j] = dataContiguous + (k * ny + j) * nx;
1528 /* Memory is already zeroed by PetscCalloc1, so no manual initialization is needed */
1529 }
1530 }
1531 *array = data;
1532 PetscFunctionReturn(0);
1533}

◆ Deallocate3DArrayScalar()

PetscErrorCode Deallocate3DArrayScalar ( PetscReal ***  array,
PetscInt  nz,
PetscInt  ny 
)

Deallocates a 3D array of PetscReal values allocated by Allocate3DArrayScalar.

This function frees the memory allocated for a 3D array of PetscReal values. It assumes the memory was allocated using Allocate3DArrayScalar, which allocated three separate memory blocks: one for the contiguous data, one for the row pointers, and one for the layer pointers.

Parameters
[in]arrayPointer to the 3D array to be deallocated.
[in]nzNumber of layers (z-direction).
[in]nyNumber of rows (y-direction).
Returns
PetscErrorCode 0 on success, nonzero on failure.

Definition at line 1549 of file setup.c.

1550{
1551 PetscErrorCode ierr;
1552
1553 PetscFunctionBegin;
1554 if (!array || !array[0] || !array[0][0] ) { // Added more robust check
1555 LOG_ALLOW(GLOBAL, LOG_WARNING, "Deallocate3DArrayScalar called with potentially unallocated or NULL array.\n");
1556 if (array) {
1557 if (array[0]) { // Check if row pointers might exist
1558 // Cannot safely access array[0][0] if array[0] might be invalid/freed
1559 // Standard deallocation below assumes valid pointers.
1560 ierr = PetscFree(array[0]); CHKERRQ(ierr); // Free row pointers if they exist
1561 }
1562 ierr = PetscFree(array); CHKERRQ(ierr); // Free layer pointers if they exist
1563 }
1564 PetscFunctionReturn(0);
1565 }
1566
1567 // --- Standard Deallocation (assuming valid allocation) ---
1568
1569 /* 1. Free the contiguous block of PetscReal values.
1570 The starting address was stored in array[0][0]. */
1571 ierr = PetscFree(array[0][0]); CHKERRQ(ierr); // Free the ACTUAL DATA
1572
1573 /* 2. Free the contiguous block of row pointers.
1574 The starting address was stored in array[0]. */
1575 ierr = PetscFree(array[0]); CHKERRQ(ierr); // Free the ROW POINTERS
1576
1577 /* 3. Free the layer pointer array.
1578 The starting address is 'array' itself. */
1579 ierr = PetscFree(array); CHKERRQ(ierr); // Free the LAYER POINTERS
1580
1581 PetscFunctionReturn(0);
1582}
@ LOG_WARNING
Non-critical issues that warrant attention.
Definition logging.h:29

◆ Allocate3DArrayVector()

PetscErrorCode Allocate3DArrayVector ( Cmpnts ****  array,
PetscInt  nz,
PetscInt  ny,
PetscInt  nx 
)

Allocates a 3D array of Cmpnts structures using PetscCalloc.

Deallocates a 3D array of Cmpnts structures allocated by Allocate3DArrayVector.

This function dynamically allocates memory for a 3D array of Cmpnts (vector) structures with dimensions nz (layers) x ny (rows) x nx (columns). It uses PetscCalloc1 to ensure that all allocated memory is zero-initialized.

The allocation procedure is similar to Allocate3DArrayScalar:

  1. Allocate an array of nz pointers (one for each layer).
  2. Allocate a contiguous block for nz*ny row pointers.
  3. Allocate one contiguous block for nz*ny*nx Cmpnts structures.

After allocation, the array can be accessed as array[k][j][i] and each element (a Cmpnts structure) will have its x, y, and z fields initialized to 0.0.

Parameters
[out]arrayPointer to the 3D array to be allocated.
[in]nzNumber of layers in the z-direction.
[in]nyNumber of rows in the y-direction.
[in]nxNumber of columns in the x-direction.
Returns
PetscErrorCode 0 on success, nonzero on failure.

Definition at line 1606 of file setup.c.

1607{
1608 PetscErrorCode ierr;
1609 Cmpnts ***data;
1610 Cmpnts *dataContiguous;
1611 PetscInt k, j;
1612 PetscMPIInt rank;
1613
1614 PetscFunctionBegin;
1615
1616 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
1617
1618 /* Step 1: Allocate memory for nz layer pointers (zeroed) */
1619 ierr = PetscCalloc1(nz, &data); CHKERRQ(ierr);
1620
1621 LOG_ALLOW(LOCAL,LOG_DEBUG," [Rank %d] memory allocated for outermost layer (%d k-layer pointers).\n",rank,nz);
1622
1623 /* Step 2: Allocate memory for all row pointers (nz * ny pointers) */
1624 ierr = PetscCalloc1(nz * ny, &data[0]); CHKERRQ(ierr);
1625 for (k = 1; k < nz; k++) {
1626 data[k] = data[0] + k * ny;
1627 }
1628
1629 LOG_ALLOW(LOCAL,LOG_DEBUG,"[Rank %d] memory allocated for %dx%d row pointers.\n",rank,nz,ny);
1630
1631 /* Step 3: Allocate one contiguous block for nz*ny*nx Cmpnts structures (zeroed) */
1632 ierr = PetscCalloc1(nz * ny * nx, &dataContiguous); CHKERRQ(ierr);
1633
1634 LOG_ALLOW(GLOBAL,LOG_DEBUG,"[Rank %d] memory allocated for contigous block of %dx%dx%d Cmpnts structures).\n",rank,nz,ny,nx);
1635
1636 /* Build the 3D pointer structure for vector data */
1637 for (k = 0; k < nz; k++) {
1638 for (j = 0; j < ny; j++) {
1639 data[k][j] = dataContiguous + (k * ny + j) * nx;
1640 /* The PetscCalloc1 call has already initialized each Cmpnts to zero. */
1641 }
1642 }
1643
1644 LOG_ALLOW(GLOBAL,LOG_DEBUG,"[Rank %d] 3D pointer structure for vector data created. \n",rank);
1645
1646 *array = data;
1647 PetscFunctionReturn(0);
1648}

◆ Deallocate3DArrayVector()

PetscErrorCode Deallocate3DArrayVector ( Cmpnts ***  array,
PetscInt  nz,
PetscInt  ny 
)

Deallocates a 3D array of Cmpnts structures allocated by Allocate3DArrayVector.

This function frees the memory allocated for a 3D array of Cmpnts structures. It assumes the memory was allocated using Allocate3DArrayVector, which created three separate memory blocks: one for the contiguous vector data, one for the row pointers, and one for the layer pointers.

Parameters
[in]arrayPointer to the 3D array to be deallocated.
[in]nzNumber of layers in the z-direction.
[in]nyNumber of rows in the y-direction.
Returns
PetscErrorCode 0 on success, nonzero on failure.

Definition at line 1664 of file setup.c.

1665{
1666 PetscErrorCode ierr;
1667
1668 PetscFunctionBegin;
1669 // If array is NULL or hasn't been allocated properly, just return.
1670 if (!array || !array[0] || !array[0][0] ) {
1671 LOG_ALLOW(GLOBAL, LOG_WARNING, "Deallocate3DArrayVector called with potentially unallocated or NULL array.\n");
1672 // Attempt to free what might exist, but be cautious
1673 if (array) {
1674 if (array[0]) { // Check if row pointers were allocated
1675 // We don't have a direct pointer to the contiguous data block
1676 // saved separately in this allocation scheme. The allocation relies
1677 // on array[0][0] pointing to it. If array[0] was freed first,
1678 // accessing array[0][0] is unsafe.
1679 // The allocation scheme where the contiguous data block is not
1680 // stored separately makes safe deallocation tricky if freeing
1681 // happens out of order or if parts are NULL.
1682
1683 // A SAFER ALLOCATION/DEALLOCATION would store the data pointer separately.
1684 // Given the current allocation scheme, the order MUST be:
1685 // 1. Free the data block (pointed to by array[0][0])
1686 // 2. Free the row pointer block (pointed to by array[0])
1687 // 3. Free the layer pointer block (pointed to by array)
1688
1689 // Let's assume the allocation was successful and pointers are valid.
1690 // Get pointer to the contiguous data block *before* freeing row pointers
1691 Cmpnts *dataContiguous = array[0][0];
1692 ierr = PetscFree(dataContiguous); CHKERRQ(ierr); // Free data block
1693
1694 // Now free the row pointers block
1695 ierr = PetscFree(array[0]); CHKERRQ(ierr); // Free row pointers
1696
1697 }
1698 // Finally, free the array of layer pointers
1699 ierr = PetscFree(array); CHKERRQ(ierr);
1700 }
1701 PetscFunctionReturn(0); // Return gracefully if input was NULL initially
1702 }
1703
1704
1705 // --- Standard Deallocation (assuming valid allocation) ---
1706
1707 /* 1. Free the contiguous block of Cmpnts structures.
1708 The starting address was stored in array[0][0] by Allocate3DArrayVector. */
1709 ierr = PetscFree(array[0][0]); CHKERRQ(ierr); // Free the ACTUAL DATA
1710
1711 /* 2. Free the contiguous block of row pointers.
1712 The starting address was stored in array[0]. */
1713 ierr = PetscFree(array[0]); CHKERRQ(ierr); // Free the ROW POINTERS
1714
1715 /* 3. Free the layer pointer array.
1716 The starting address is 'array' itself. */
1717 ierr = PetscFree(array); CHKERRQ(ierr); // Free the LAYER POINTERS
1718
1719 PetscFunctionReturn(0);
1720}

◆ GetOwnedCellRange()

PetscErrorCode GetOwnedCellRange ( const DMDALocalInfo *  info_nodes,
PetscInt  dim,
PetscInt *  xs_cell_global_out,
PetscInt *  xm_cell_local_out 
)

Gets the global starting index of cells owned by this rank and the number of such cells.

Determines the global starting index and number of CELLS owned by the current processor in a specified dimension.

A cell's global index is considered the same as its origin node's global index. This function assumes a node-centered DMDA where info_nodes provides all necessary information:

  • info_nodes->xs, ys, zs: Global starting index of the first node owned by this rank (excluding ghosts).
  • info_nodes->xm, ym, zm: Number of nodes owned by this rank in each dimension (excluding ghosts).
  • info_nodes->mx, my, mz: Total number of global nodes in each dimension for the entire domain.

A cell C_k (0-indexed) is defined by its origin node N_k and extends to node N_{k+1}. Thus, the last node in the global domain cannot be an origin for a cell. The last possible cell origin node index is PhysicalGlobalNodesInDim - 2.

Warning
THIS FUNCTION MAKES A CRITICAL ASSUMPTION: It assumes the DMDA it receives information from was created with dimensions of (physical_nodes_x + 1), (physical_nodes_y + 1), etc., where the +1 node is a "user-defined ghost" not meant for physical calculations. It will internally subtract 1 from the global dimensions (mx, my, mz) before calculating the number of cells.
Parameters
[in]info_nodesPointer to the DMDALocalInfo struct for the current rank. This struct contains local ownership information (xs, xm, etc.) and global domain dimensions (mx, my, mz for nodes).
[in]dimThe dimension for which to get the cell range (0 for i/x, 1 for j/y, 2 for k/z).
[out]xs_cell_global_outPointer to store the global index of the first cell whose origin node is owned by this rank.
[out]xm_cell_local_outPointer to store the number of cells for which this rank owns the origin node.
Returns
PetscErrorCode 0 on success, or an error code on failure.

Definition at line 1756 of file setup.c.

1760{
1761 PetscErrorCode ierr = 0; // Standard PETSc error code, not explicitly set here but good practice.
1762 PetscInt xs_node_global_rank; // Global index of the first node owned by this rank in the specified dimension.
1763 PetscInt num_nodes_owned_rank; // Number of nodes owned by this rank in this dimension (local count, excluding ghosts).
1764 PetscInt GlobalNodesInDim_from_info; // Total number of DA points in this dimension, from DMDALocalInfo.
1765
1766 PetscFunctionBeginUser;
1767
1768 // --- 1. Input Validation ---
1769 if (!info_nodes || !xs_cell_global_out || !xm_cell_local_out) {
1770 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Null pointer passed to GetOwnedCellRange.");
1771 }
1772
1773 // --- 2. Extract Node Ownership and Global Dimension Information from DMDALocalInfo ---
1774 if (dim == 0) { // I-direction
1775 xs_node_global_rank = info_nodes->xs;
1776 num_nodes_owned_rank = info_nodes->xm;
1777 GlobalNodesInDim_from_info = info_nodes->mx;
1778 } else if (dim == 1) { // J-direction
1779 xs_node_global_rank = info_nodes->ys;
1780 num_nodes_owned_rank = info_nodes->ym;
1781 GlobalNodesInDim_from_info = info_nodes->my;
1782 } else if (dim == 2) { // K-direction
1783 xs_node_global_rank = info_nodes->zs;
1784 num_nodes_owned_rank = info_nodes->zm;
1785 GlobalNodesInDim_from_info = info_nodes->mz;
1786 } else {
1787 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid dimension %d in GetOwnedCellRange. Must be 0, 1, or 2.", dim);
1788 }
1789
1790 // --- 3. Correct for User-Defined Ghost Node ---
1791 // Per the function's contract (@warning), the DA size includes an extra, non-physical
1792 // node. We subtract 1 to get the true number of physical nodes for cell calculations.
1793 const PetscInt physical_nodes_in_dim = GlobalNodesInDim_from_info - 1;
1794
1795 // --- 4. Handle Edge Cases for Physical Domain Size ---
1796 // If the physical domain has 0 or 1 node, no cells can be formed.
1797 if (physical_nodes_in_dim <= 1) {
1798 *xs_cell_global_out = xs_node_global_rank; // Still report the rank's starting node
1799 *xm_cell_local_out = 0; // But 0 cells
1800 PetscFunctionReturn(0);
1801 }
1802
1803 // --- 5. Determine Cell Ownership Based on Corrected Node Ownership ---
1804 // The first cell this rank *could* define has its origin at the first node this rank owns.
1805 *xs_cell_global_out = xs_node_global_rank;
1806
1807 // If the rank owns no nodes in this dimension, it can't form any cell origins.
1808 if (num_nodes_owned_rank == 0) {
1809 *xm_cell_local_out = 0;
1810 } else {
1811 // --- BUG FIX APPLIED HERE ---
1812 // The previous logic incorrectly assumed a cell's end node (N_{k+1}) must be on the
1813 // same rank as its origin node (N_k). The correct logic is to find the intersection
1814 // between the nodes this rank owns and the nodes that are valid origins globally.
1815
1816 // The first node owned by the rank is its first potential origin.
1817 PetscInt first_owned_origin = xs_node_global_rank;
1818
1819 // The absolute last node owned by this rank. Any node up to and including this one
1820 // is a potential cell origin from this rank's perspective.
1821 PetscInt last_node_owned_by_rank = xs_node_global_rank + num_nodes_owned_rank - 1;
1822
1823 // The absolute last node in the entire PHYSICAL domain that can serve as a cell origin.
1824 // If there are `N` physical nodes (0 to N-1), this index is `N-2`.
1825 PetscInt last_possible_origin_global_idx = physical_nodes_in_dim - 2;
1826
1827 // The actual last origin this rank can provide is the *minimum* of what it owns
1828 // and what is globally possible. This correctly handles both ranks in the middle of
1829 // the domain and the very last rank.
1830 PetscInt actual_last_origin_this_rank_can_form = PetscMin(last_node_owned_by_rank, last_possible_origin_global_idx);
1831
1832 // If the first potential origin this rank owns is already beyond the actual last
1833 // origin it can form, then this rank forms no valid cell origins. This happens if
1834 // the rank only owns the very last physical node.
1835 if (first_owned_origin > actual_last_origin_this_rank_can_form) {
1836 *xm_cell_local_out = 0;
1837 } else {
1838 // The number of cells is the count of valid origins this rank owns.
1839 // (Count = Last Index - First Index + 1)
1840 *xm_cell_local_out = actual_last_origin_this_rank_can_form - first_owned_origin + 1;
1841 }
1842 }
1843
1844 PetscFunctionReturn(ierr);
1845}
Here is the caller graph for this function:

◆ ComputeAndStoreNeighborRanks()

PetscErrorCode ComputeAndStoreNeighborRanks ( UserCtx user)

Computes and stores the Cartesian neighbor ranks for the DMDA decomposition.

This function retrieves the neighbor information from the primary DMDA (user->da) and stores the face neighbors (xm, xp, ym, yp, zm, zp) in the user->neighbors structure. It assumes a standard PETSc ordering for the neighbors array returned by DMDAGetNeighbors. If DMDAGetNeighbors returns a negative rank that is not MPI_PROC_NULL (which can happen in some PETSc/MPI configurations for non-periodic boundaries if not fully standard), this function will sanitize it to MPI_PROC_NULL to prevent issues.

Parameters
[in,out]userPointer to the UserCtx structure where neighbor info will be stored.
Returns
PetscErrorCode 0 on success, non-zero on failure.

Definition at line 1863 of file setup.c.

1864{
1865 PetscErrorCode ierr;
1866 PetscMPIInt rank;
1867 PetscMPIInt size; // MPI communicator size
1868 const PetscMPIInt *neighbor_ranks_ptr; // Pointer to raw neighbor data from PETSc
1869
1870 PetscFunctionBeginUser;
1872 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
1873 ierr = MPI_Comm_size(PETSC_COMM_WORLD, &size); CHKERRQ(ierr); // Get MPI size for validation
1874
1875 LOG_ALLOW(GLOBAL, LOG_INFO, "Rank %d: Computing DMDA neighbor ranks.\n", rank);
1876
1877 if (!user || !user->da) {
1878 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "UserCtx or user->da is NULL in ComputeAndStoreNeighborRanks.");
1879 }
1880
1881 // Get the neighbor information from the DMDA
1882 // neighbor_ranks_ptr will point to an internal PETSc array of 27 ranks.
1883 ierr = DMDAGetNeighbors(user->da, &neighbor_ranks_ptr); CHKERRQ(ierr);
1884
1885 // Log the raw values from DMDAGetNeighbors for boundary-relevant directions for debugging
1886 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "[Rank %d]Raw DMDAGetNeighbors: xm_raw=%d, xp_raw=%d, ym_raw=%d, yp_raw=%d, zm_raw=%d, zp_raw=%d. MPI_PROC_NULL is %d.\n",
1887 rank,
1888 neighbor_ranks_ptr[12], neighbor_ranks_ptr[14],
1889 neighbor_ranks_ptr[10], neighbor_ranks_ptr[16],
1890 neighbor_ranks_ptr[4], neighbor_ranks_ptr[22],
1891 (int)MPI_PROC_NULL);
1892
1893 // PETSc standard indices for 3D face neighbors from the 27-point stencil:
1894 // Index = k_offset*9 + j_offset*3 + i_offset (where offsets -1,0,1 map to 0,1,2)
1895 // Center: (i_off=1, j_off=1, k_off=1) => 1*9 + 1*3 + 1 = 13
1896 // X-min: (i_off=0, j_off=1, k_off=1) => 1*9 + 1*3 + 0 = 12
1897 // X-plus: (i_off=2, j_off=1, k_off=1) => 1*9 + 1*3 + 2 = 14
1898 // Y-min: (i_off=1, j_off=0, k_off=1) => 1*9 + 0*3 + 1 = 10
1899 // Y-plus: (i_off=1, j_off=2, k_off=1) => 1*9 + 2*3 + 1 = 16
1900 // Z-min: (i_off=1, j_off=1, k_off=0) => 0*9 + 1*3 + 1 = 4
1901 // Z-plus: (i_off=1, j_off=1, k_off=2) => 2*9 + 1*3 + 1 = 22
1902
1903 if (neighbor_ranks_ptr[13] != rank) {
1904 LOG_ALLOW(GLOBAL, LOG_WARNING, "Rank %d: DMDAGetNeighbors center index (13) is %d, expected current rank %d. Neighbor indexing might be non-standard or DMDA small.\n",
1905 rank, neighbor_ranks_ptr[13], rank);
1906 // This warning is important. If the center isn't the current rank, the offsets are likely wrong.
1907 // However, PETSc should ensure this unless the DM is too small for a 3x3x3 stencil.
1908 }
1909
1910 // Assign and sanitize each neighbor rank
1911 PetscMPIInt temp_neighbor;
1912
1913 temp_neighbor = neighbor_ranks_ptr[12]; // xm
1914 if (temp_neighbor < 0 || temp_neighbor >= size) {
1915 LOG_ALLOW(GLOBAL, LOG_WARNING, "[Rank %d] Correcting invalid xm neighbor %d to MPI_PROC_NULL (%d).\n", rank, temp_neighbor, (int)MPI_PROC_NULL);
1916 user->neighbors.rank_xm = MPI_PROC_NULL;
1917 } else {
1918 user->neighbors.rank_xm = temp_neighbor;
1919 }
1920
1921 temp_neighbor = neighbor_ranks_ptr[14]; // xp
1922 if (temp_neighbor < 0 || temp_neighbor >= size) {
1923 LOG_ALLOW(GLOBAL, LOG_WARNING, "[Rank %d] Correcting invalid xp neighbor %d to MPI_PROC_NULL (%d).\n", rank, temp_neighbor, (int)MPI_PROC_NULL);
1924 user->neighbors.rank_xp = MPI_PROC_NULL;
1925 } else {
1926 user->neighbors.rank_xp = temp_neighbor;
1927 }
1928
1929 temp_neighbor = neighbor_ranks_ptr[10]; // ym
1930 if (temp_neighbor < 0 || temp_neighbor >= size) {
1931 LOG_ALLOW(GLOBAL, LOG_WARNING, "[Rank %d] Correcting invalid ym neighbor %d to MPI_PROC_NULL (%d).\n", rank, temp_neighbor, (int)MPI_PROC_NULL);
1932 user->neighbors.rank_ym = MPI_PROC_NULL;
1933 } else {
1934 user->neighbors.rank_ym = temp_neighbor;
1935 }
1936
1937 temp_neighbor = neighbor_ranks_ptr[16]; // yp
1938 if (temp_neighbor < 0 || temp_neighbor >= size) {
1939 // The log for index 16 was "zm" in your output, should be yp
1940 LOG_ALLOW(GLOBAL, LOG_WARNING, "[Rank %d] Correcting invalid yp neighbor (raw index 16) %d to MPI_PROC_NULL (%d).\n", rank, temp_neighbor, (int)MPI_PROC_NULL);
1941 user->neighbors.rank_yp = MPI_PROC_NULL;
1942 } else {
1943 user->neighbors.rank_yp = temp_neighbor;
1944 }
1945
1946 temp_neighbor = neighbor_ranks_ptr[4]; // zm
1947 if (temp_neighbor < 0 || temp_neighbor >= size) {
1948 LOG_ALLOW(GLOBAL, LOG_WARNING, "[Rank %d] Correcting invalid zm neighbor %d to MPI_PROC_NULL (%d).\n", rank, temp_neighbor, (int)MPI_PROC_NULL);
1949 user->neighbors.rank_zm = MPI_PROC_NULL;
1950 } else {
1951 user->neighbors.rank_zm = temp_neighbor;
1952 }
1953
1954 temp_neighbor = neighbor_ranks_ptr[22]; // zp
1955 if (temp_neighbor < 0 || temp_neighbor >= size) {
1956 LOG_ALLOW(GLOBAL, LOG_WARNING, "[Rank %d] Correcting invalid zp neighbor %d to MPI_PROC_NULL (%d).\n", rank, temp_neighbor, (int)MPI_PROC_NULL);
1957 user->neighbors.rank_zp = MPI_PROC_NULL;
1958 } else {
1959 user->neighbors.rank_zp = temp_neighbor;
1960 }
1961
1962 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "[Rank %d] Stored user->neighbors: xm=%d, xp=%d, ym=%d, yp=%d, zm=%d, zp=%d\n", rank,
1963 user->neighbors.rank_xm, user->neighbors.rank_xp,
1964 user->neighbors.rank_ym, user->neighbors.rank_yp,
1965 user->neighbors.rank_zm, user->neighbors.rank_zp);
1966 PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT); // Ensure logs are flushed
1967
1968 // Note: neighbor_ranks_ptr memory is managed by PETSc, do not free it.
1970 PetscFunctionReturn(0);
1971}
PetscMPIInt rank_zm
Definition variables.h:182
PetscMPIInt rank_yp
Definition variables.h:181
PetscMPIInt rank_ym
Definition variables.h:181
PetscMPIInt rank_xp
Definition variables.h:180
RankNeighbors neighbors
Definition variables.h:740
PetscMPIInt rank_xm
Definition variables.h:180
PetscMPIInt rank_zp
Definition variables.h:182
Here is the caller graph for this function:

◆ SetDMDAProcLayout()

PetscErrorCode SetDMDAProcLayout ( DM  dm,
UserCtx user 
)

Sets the processor layout for a given DMDA based on PETSc options.

Reads the desired number of processors in x, y, and z directions using PETSc options (e.g., -dm_processors_x, -dm_processors_y, -dm_processors_z). If an option is not provided for a direction, PETSC_DECIDE is used for that direction. Applies the layout using DMDASetNumProcs.

Also stores the retrieved/decided values in user->procs_x/y/z if user context is provided.

Parameters
dmThe DMDA object to configure the layout for.
userPointer to the UserCtx structure (optional, used to store layout values).
Returns
PetscErrorCode 0 on success, non-zero on failure.

Definition at line 1990 of file setup.c.

1991{
1992 PetscErrorCode ierr;
1993 PetscMPIInt size, rank;
1994 PetscInt px = PETSC_DECIDE, py = PETSC_DECIDE, pz = PETSC_DECIDE;
1995 PetscBool px_set = PETSC_FALSE, py_set = PETSC_FALSE, pz_set = PETSC_FALSE;
1996 SimCtx *simCtx = user->simCtx;
1997
1998 // Set no.of processors in direction 1
1999 if(simCtx->da_procs_x) {
2000 px_set = PETSC_TRUE;
2001 px = simCtx->da_procs_x;
2002 }
2003 // Set no.of processors in direction 2
2004 if(simCtx->da_procs_y) {
2005 py_set = PETSC_TRUE;
2006 py = simCtx->da_procs_y;
2007 }
2008 // Set no.of processors in direction 1
2009 if(simCtx->da_procs_z) {
2010 pz_set = PETSC_TRUE;
2011 pz = simCtx->da_procs_z;
2012 }
2013
2014 PetscFunctionBeginUser;
2016 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)dm), &size); CHKERRQ(ierr);
2017 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank); CHKERRQ(ierr);
2018 LOG_ALLOW(GLOBAL, LOG_INFO, "Rank %d: Configuring DMDA processor layout for %d total processes.\n", rank, size);
2019
2020 // --- Validate User Input (Optional but Recommended) ---
2021 // Check if specified processor counts multiply to the total MPI size
2022 if (px_set && py_set && pz_set) {
2023 if (px * py * pz != size) {
2024 SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_INCOMP,
2025 "Specified processor layout %d x %d x %d = %d does not match MPI size %d",
2026 px, py, pz, px * py * pz, size);
2027 }
2028 LOG_ALLOW(GLOBAL, LOG_INFO, "Using specified processor layout: %d x %d x %d\n", px, py, pz);
2029 } else if (px_set || py_set || pz_set) {
2030 // If only some are set, PETSC_DECIDE will be used for others
2031 LOG_ALLOW(GLOBAL, LOG_INFO, "Using partially specified processor layout: %d x %d x %d (PETSC_DECIDE for unspecified)\n", px, py, pz);
2032 } else {
2033 LOG_ALLOW(GLOBAL, LOG_INFO, "Using fully automatic processor layout (PETSC_DECIDE x PETSC_DECIDE x PETSC_DECIDE)\n");
2034 }
2035 // Additional checks: Ensure px, py, pz are positive if set
2036 if ((px_set && px <= 0) || (py_set && py <= 0) || (pz_set && pz <= 0)) {
2037 SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Specified processor counts must be positive.");
2038 }
2039
2040
2041 // --- Apply the layout to the DMDA ---
2042 ierr = DMDASetNumProcs(dm, px, py, pz); CHKERRQ(ierr);
2043 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Rank %d: DMDASetNumProcs called with px=%d, py=%d, pz=%d.\n", rank, px, py, pz);
2044
2045 // --- Store the values in UserCtx (Optional) ---
2046 // Note: If PETSC_DECIDE was used, PETSc calculates the actual values during DMSetUp.
2047 // We store the *requested* values here. To get the *actual* values used,
2048 // you would need to call DMDAGetInfo after DMSetUp.
2049 /*
2050 if (user) {
2051 user->procs_x = px;
2052 user->procs_y = py;
2053 user->procs_z = pz;
2054 }
2055 */
2057 PetscFunctionReturn(0);
2058}

◆ SetupDomainRankInfo()

PetscErrorCode SetupDomainRankInfo ( SimCtx simCtx)

Sets up the full rank communication infrastructure for all blocks.

Sets up the full rank communication infrastructure, including neighbor ranks and bounding box exchange.

This function orchestrates the setup of the parallel communication map. It uses the existing helper functions to sequentially gather and broadcast the bounding box information for each computational block. All ranks participate in each step, assembling the final, unified multi-block list in parallel.

Parameters
[in,out]simCtxThe master simulation context. After this call, simCtx->bboxlist and the relevant user->neighbors fields will be populated on all ranks.

Definition at line 2074 of file setup.c.

2075{
2076 PetscErrorCode ierr;
2077 PetscInt nblk = simCtx->block_number;
2078 PetscInt size = simCtx->size;
2079 BoundingBox *final_bboxlist = NULL;
2080
2081 PetscFunctionBeginUser;
2083
2084 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting full rank communication setup for %d block(s).\n", nblk);
2085
2086 UserCtx *user_finest = simCtx->usermg.mgctx[simCtx->usermg.mglevels - 1].user;
2087
2088 // --- Step 1: Compute neighbor ranks (unchanged) ---
2089 for (int bi = 0; bi < nblk; bi++) {
2090 ierr = ComputeAndStoreNeighborRanks(&user_finest[bi]); CHKERRQ(ierr);
2091 }
2092 LOG_ALLOW(GLOBAL, LOG_INFO, "Neighbor ranks computed and stored for all blocks.\n");
2093
2094 // --- Step 2: Allocate the final, unified list on ALL ranks ---
2095 // Every rank will build this list in parallel.
2096 ierr = PetscMalloc1(size * nblk, &final_bboxlist); CHKERRQ(ierr);
2097
2098 // --- Step 3: Loop through each block, gather then broadcast its bbox list ---
2099 for (int bi = 0; bi < nblk; bi++) {
2100 // This is a temporary pointer for the current block's list.
2101 BoundingBox *block_bboxlist = NULL;
2102
2103 LOG_ALLOW(GLOBAL, LOG_INFO, "Processing bounding boxes for block %d...\n", bi);
2104
2105 // A) GATHER: On rank 0, block_bboxlist is allocated and filled. On others, it's NULL.
2106 ierr = GatherAllBoundingBoxes(&user_finest[bi], &block_bboxlist); CHKERRQ(ierr);
2107 LOG_ALLOW(GLOBAL, LOG_DEBUG, " -> Gather complete for block %d.\n", bi);
2108
2109 // B) BROADCAST: On non-root ranks, block_bboxlist is allocated. Then, the data
2110 // from rank 0 is broadcast to all ranks. After this call, ALL ranks have
2111 // an identical, complete copy of the bounding boxes for the current block.
2112 ierr = BroadcastAllBoundingBoxes(&user_finest[bi], &block_bboxlist); CHKERRQ(ierr);
2113 LOG_ALLOW(GLOBAL, LOG_DEBUG, " -> Broadcast complete for block %d.\n", bi);
2114
2115 // C) ASSEMBLE: Every rank now copies the data for this block into the
2116 // correct segment of its final, unified list.
2117 for (int r = 0; r < size; r++) {
2118 // The layout is [r0b0, r1b0, ..., r(size-1)b0, r0b1, r1b1, ...]
2119 final_bboxlist[bi * size + r] = block_bboxlist[r];
2120 }
2121 LOG_ALLOW(GLOBAL, LOG_DEBUG, " -> Assembly into final list complete for block %d.\n", bi);
2122
2123 // D) CLEANUP: Free the temporary list for this block on ALL ranks before the next iteration.
2124 // Your helper functions use malloc, so we must use free.
2125 free(block_bboxlist);
2126 }
2127
2128 // --- Step 4: Assign the final pointer and run the last setup step ---
2129 simCtx->bboxlist = final_bboxlist;
2130 LOG_ALLOW(GLOBAL, LOG_INFO, "Final unified bboxlist created on all ranks and stored in SimCtx.\n");
2131
2132 ierr = SetupDomainCellDecompositionMap(&user_finest[0]); CHKERRQ(ierr);
2133 LOG_ALLOW(GLOBAL, LOG_INFO, "Domain Cell Composition set and broadcasted.\n");
2134
2135 LOG_ALLOW(GLOBAL, LOG_INFO, "SetupDomainRankInfo: Completed successfully.\n");
2136
2138 PetscFunctionReturn(0);
2139}
PetscErrorCode BroadcastAllBoundingBoxes(UserCtx *user, BoundingBox **bboxlist)
Broadcasts the bounding box information collected on rank 0 to all other ranks.
Definition grid.c:1016
PetscErrorCode GatherAllBoundingBoxes(UserCtx *user, BoundingBox **allBBoxes)
Gathers local bounding boxes from all MPI processes to rank 0.
Definition grid.c:948
PetscErrorCode ComputeAndStoreNeighborRanks(UserCtx *user)
Computes and stores the Cartesian neighbor ranks for the DMDA decomposition.
Definition setup.c:1863
PetscErrorCode SetupDomainCellDecompositionMap(UserCtx *user)
Creates and distributes a map of the domain's cell decomposition to all ranks.
Definition setup.c:2335
Defines a 3D axis-aligned bounding box.
Definition variables.h:154
Here is the call graph for this function:
Here is the caller graph for this function:

◆ Contra2Cart()

PetscErrorCode Contra2Cart ( UserCtx user)

Reconstructs Cartesian velocity (Ucat) at cell centers from contravariant velocity (Ucont) defined on cell faces.

This function performs the transformation from a contravariant velocity representation (which is natural on a curvilinear grid) to a Cartesian (x,y,z) representation. For each interior computational cell owned by the rank, it performs the following:

  1. It averages the contravariant velocity components (U¹, U², U³) from the surrounding faces to get an estimate of the contravariant velocity at the cell center.
  2. It averages the metric vectors (Csi, Eta, Zet) from the surrounding faces to get an estimate of the metric tensor at the cell center. This tensor forms the transformation matrix.
  3. It solves the linear system [MetricTensor] * [ucat] = [ucont] for the Cartesian velocity vector ucat = (u,v,w) using Cramer's rule.
  4. The computed Cartesian velocity is stored in the global user->Ucat vector.

The function operates on local, ghosted versions of the input vectors (user->lUcont, user->lCsi, etc.) to ensure stencils are valid across processor boundaries.

Parameters
[in,out]userPointer to the UserCtx structure. The function reads from user->lUcont, user->lCsi, user->lEta, user->lZet, user->lNvert and writes to the global user->Ucat vector.
Returns
PetscErrorCode 0 on success.
Note
  • This function should be called AFTER user->lUcont and all local metric vectors (user->lCsi, etc.) have been populated with up-to-date ghost values via UpdateLocalGhosts.
  • It only computes Ucat for interior cells (not on physical boundaries) and for cells not marked as solid/blanked by user->lNvert.
  • The caller is responsible for subsequently applying boundary conditions to user->Ucat and calling UpdateLocalGhosts(user, "Ucat") to populate user->lUcat.

Definition at line 2177 of file setup.c.

2178{
2179 PetscErrorCode ierr;
2180 DMDALocalInfo info;
2181 Cmpnts ***lcsi_arr, ***leta_arr, ***lzet_arr; // Local metric arrays
2182 Cmpnts ***lucont_arr; // Local contravariant velocity array
2183 Cmpnts ***gucat_arr; // Global Cartesian velocity array
2184 PetscReal ***lnvert_arr; // Local Nvert array
2185 PetscReal ***laj_arr; // Local Jacobian Determinant inverse array
2186
2187 PetscFunctionBeginUser;
2189 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Starting Contravariant-to-Cartesian velocity transformation.\n");
2190
2191 // --- 1. Get DMDA Info and Check for Valid Inputs ---
2192 // All inputs (lUcont, lCsi, etc.) and outputs (Ucat) are on DMs from the UserCtx.
2193 // We get local info from fda, which governs the layout of most arrays here.
2194 ierr = DMDAGetLocalInfo(user->fda, &info); CHKERRQ(ierr);
2195 if (!user->lUcont || !user->lCsi || !user->lEta || !user->lZet || !user->lNvert || !user->Ucat) {
2196 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Contra2Cart requires lUcont, lCsi/Eta/Zet, lNvert, and Ucat to be non-NULL.");
2197 }
2198
2199
2200 // --- 2. Get Read-Only Array Access to Local Input Vectors (with ghosts) ---
2201 ierr = DMDAVecGetArrayRead(user->fda, user->lUcont, &lucont_arr); CHKERRQ(ierr);
2202 ierr = DMDAVecGetArrayRead(user->fda, user->lCsi, &lcsi_arr); CHKERRQ(ierr);
2203 ierr = DMDAVecGetArrayRead(user->fda, user->lEta, &leta_arr); CHKERRQ(ierr);
2204 ierr = DMDAVecGetArrayRead(user->fda, user->lZet, &lzet_arr); CHKERRQ(ierr);
2205 ierr = DMDAVecGetArrayRead(user->da, user->lNvert, &lnvert_arr); CHKERRQ(ierr);
2206 ierr = DMDAVecGetArrayRead(user->da, user->lAj, &laj_arr); CHKERRQ(ierr);
2207
2208 // --- 3. Get Write-Only Array Access to the Global Output Vector ---
2209 // We compute for local owned cells and write into the global vector.
2210 // PETSc handles mapping the global indices to the correct local memory locations.
2211 ierr = DMDAVecGetArray(user->fda, user->Ucat, &gucat_arr); CHKERRQ(ierr);
2212
2213
2214 // --- 4. Define Loop Bounds for INTERIOR Cells ---
2215 // We use adjusted bounds to avoid calculating Ucat on the physical domain boundaries,
2216 // as these are typically set explicitly by boundary condition functions.
2217 // The stencils use indices like i-1, j-1, k-1, so we must start loops at least at index 1.
2218 PetscInt i_start = (info.xs == 0) ? info.xs + 1 : info.xs;
2219 PetscInt i_end = (info.xs + info.xm == info.mx) ? info.xs + info.xm - 1 : info.xs + info.xm;
2220
2221 PetscInt j_start = (info.ys == 0) ? info.ys + 1 : info.ys;
2222 PetscInt j_end = (info.ys + info.ym == info.my) ? info.ys + info.ym - 1 : info.ys + info.ym;
2223
2224 PetscInt k_start = (info.zs == 0) ? info.zs + 1 : info.zs;
2225 PetscInt k_end = (info.zs + info.zm == info.mz) ? info.zs + info.zm - 1 : info.zs + info.zm;
2226
2227 // --- 5. Main Computation Loop ---
2228 // Loops over the GLOBAL indices of interior cells owned by this rank.
2229 for (PetscInt k_cell = k_start; k_cell < k_end; ++k_cell) {
2230 for (PetscInt j_cell = j_start; j_cell < j_end; ++j_cell) {
2231 for (PetscInt i_cell = i_start; i_cell < i_end; ++i_cell) {
2232
2233 // Check if the cell is a fluid cell (not solid/blanked)
2234 // if (lnvert_arr[k_cell][j_cell][i_cell] > 0.1) continue; // Skip solid/blanked cells
2235
2236 // Transformation matrix [mat] is the metric tensor at the cell center,
2237 // estimated by averaging metrics from adjacent faces.
2238 PetscReal mat[3][3];
2239
2240 // PetscReal aj_center = laj_arr[k_cell+1][j_cell+1][i_cell+1];
2241
2242 mat[0][0] = 0.5 * (lcsi_arr[k_cell][j_cell][i_cell-1].x + lcsi_arr[k_cell][j_cell][i_cell].x); //* aj_center;
2243 mat[0][1] = 0.5 * (lcsi_arr[k_cell][j_cell][i_cell-1].y + lcsi_arr[k_cell][j_cell][i_cell].y); //* aj_center;
2244 mat[0][2] = 0.5 * (lcsi_arr[k_cell][j_cell][i_cell-1].z + lcsi_arr[k_cell][j_cell][i_cell].z); //* aj_center;
2245
2246 mat[1][0] = 0.5 * (leta_arr[k_cell][j_cell-1][i_cell].x + leta_arr[k_cell][j_cell][i_cell].x); //* aj_center;
2247 mat[1][1] = 0.5 * (leta_arr[k_cell][j_cell-1][i_cell].y + leta_arr[k_cell][j_cell][i_cell].y); //* aj_center;
2248 mat[1][2] = 0.5 * (leta_arr[k_cell][j_cell-1][i_cell].z + leta_arr[k_cell][j_cell][i_cell].z); //* aj_center;
2249
2250 mat[2][0] = 0.5 * (lzet_arr[k_cell-1][j_cell][i_cell].x + lzet_arr[k_cell][j_cell][i_cell].x); //* aj_center;
2251 mat[2][1] = 0.5 * (lzet_arr[k_cell-1][j_cell][i_cell].y + lzet_arr[k_cell][j_cell][i_cell].y); //* aj_center;
2252 mat[2][2] = 0.5 * (lzet_arr[k_cell-1][j_cell][i_cell].z + lzet_arr[k_cell][j_cell][i_cell].z); //* aj_center;
2253
2254 // Contravariant velocity vector `q` at the cell center,
2255 // estimated by averaging face-based contravariant velocities.
2256 PetscReal q[3];
2257 q[0] = 0.5 * (lucont_arr[k_cell][j_cell][i_cell-1].x + lucont_arr[k_cell][j_cell][i_cell].x); // U¹ at cell center
2258 q[1] = 0.5 * (lucont_arr[k_cell][j_cell-1][i_cell].y + lucont_arr[k_cell][j_cell][i_cell].y); // U² at cell center
2259 q[2] = 0.5 * (lucont_arr[k_cell-1][j_cell][i_cell].z + lucont_arr[k_cell][j_cell][i_cell].z); // U³ at cell center
2260
2261 // Solve the 3x3 system `mat * ucat = q` using Cramer's rule.
2262 PetscReal det = mat[0][0] * (mat[1][1] * mat[2][2] - mat[1][2] * mat[2][1]) -
2263 mat[0][1] * (mat[1][0] * mat[2][2] - mat[1][2] * mat[2][0]) +
2264 mat[0][2] * (mat[1][0] * mat[2][1] - mat[1][1] * mat[2][0]);
2265
2266 if (PetscAbsReal(det) < 1.0e-18) {
2267 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FLOP_COUNT, "Transformation matrix determinant is near zero at cell (%d,%d,%d) \n", i_cell, j_cell, k_cell);
2268 }
2269
2270 PetscReal det_inv = 1.0 / det;
2271
2272 PetscReal det0 = q[0] * (mat[1][1] * mat[2][2] - mat[1][2] * mat[2][1]) -
2273 q[1] * (mat[0][1] * mat[2][2] - mat[0][2] * mat[2][1]) +
2274 q[2] * (mat[0][1] * mat[1][2] - mat[0][2] * mat[1][1]);
2275
2276 PetscReal det1 = -q[0] * (mat[1][0] * mat[2][2] - mat[1][2] * mat[2][0]) +
2277 q[1] * (mat[0][0] * mat[2][2] - mat[0][2] * mat[2][0]) -
2278 q[2] * (mat[0][0] * mat[1][2] - mat[0][2] * mat[1][0]);
2279
2280 PetscReal det2 = q[0] * (mat[1][0] * mat[2][1] - mat[1][1] * mat[2][0]) -
2281 q[1] * (mat[0][0] * mat[2][1] - mat[0][1] * mat[2][0]) +
2282 q[2] * (mat[0][0] * mat[1][1] - mat[0][1] * mat[1][0]);
2283
2284 // Store computed Cartesian velocity in the GLOBAL Ucat array at the
2285 // array index corresponding to the cell's origin node.
2286 gucat_arr[k_cell][j_cell][i_cell].x = det0 * det_inv;
2287 gucat_arr[k_cell][j_cell][i_cell].y = det1 * det_inv;
2288 gucat_arr[k_cell][j_cell][i_cell].z = det2 * det_inv;
2289 }
2290 }
2291 }
2292
2293 // --- 6. Restore Array Access ---
2294 ierr = DMDAVecRestoreArrayRead(user->fda, user->lUcont, &lucont_arr); CHKERRQ(ierr);
2295 ierr = DMDAVecRestoreArrayRead(user->fda, user->lCsi, &lcsi_arr); CHKERRQ(ierr);
2296 ierr = DMDAVecRestoreArrayRead(user->fda, user->lEta, &leta_arr); CHKERRQ(ierr);
2297 ierr = DMDAVecRestoreArrayRead(user->fda, user->lZet, &lzet_arr); CHKERRQ(ierr);
2298 ierr = DMDAVecRestoreArrayRead(user->da, user->lNvert, &lnvert_arr); CHKERRQ(ierr);
2299 ierr = DMDAVecRestoreArrayRead(user->da, user->lAj, &laj_arr); CHKERRQ(ierr);
2300 ierr = DMDAVecRestoreArray(user->fda, user->Ucat, &gucat_arr); CHKERRQ(ierr);
2301
2302 LOG_ALLOW(GLOBAL, LOG_INFO, "Completed Contravariant-to-Cartesian velocity transformation. \n");
2304 PetscFunctionReturn(0);
2305}
Here is the caller graph for this function:

◆ SetupDomainCellDecompositionMap()

PetscErrorCode SetupDomainCellDecompositionMap ( UserCtx user)

Creates and distributes a map of the domain's cell decomposition to all ranks.

This function is a critical part of the simulation setup. It determines the global cell ownership for each MPI rank and makes this information available to all other ranks. This "decomposition map" is essential for the robust "Walk and Handoff" particle migration strategy, allowing any rank to quickly identify the owner of a target cell.

The process involves:

  1. Each rank gets its own node ownership information from the DMDA.
  2. It converts this node information into cell ownership ranges using the GetOwnedCellRange helper function.
  3. It participates in an MPI_Allgather collective operation to build a complete array (user->RankCellInfoMap) containing the ownership information for every rank.

This function should be called once during initialization after the primary DMDA (user->da) has been set up.

Parameters
[in,out]userPointer to the UserCtx structure. The function will allocate and populate user->RankCellInfoMap and set user->num_ranks.
Returns
PetscErrorCode 0 on success, or a non-zero PETSc error code on failure. Errors can occur if input pointers are NULL or if MPI communication fails.

Definition at line 2335 of file setup.c.

2336{
2337 PetscErrorCode ierr;
2338 DMDALocalInfo local_node_info;
2339 RankCellInfo my_cell_info;
2340 PetscMPIInt rank, size;
2341
2342 PetscFunctionBeginUser;
2344
2345 // --- 1. Input Validation and MPI Info ---
2346 if (!user) {
2347 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "UserCtx pointer is NULL in SetupDomainCellDecompositionMap.");
2348 }
2349 if (!user->da) {
2350 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "user->da is not initialized in SetupDomainCellDecompositionMap.");
2351 }
2352
2353 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
2354 ierr = MPI_Comm_size(PETSC_COMM_WORLD, &size); CHKERRQ(ierr);
2355
2356 LOG_ALLOW(GLOBAL, LOG_INFO, "Setting up domain cell decomposition map for %d ranks.\n", size);
2357
2358 // --- 2. Determine Local Cell Ownership ---
2359 // Get the local node ownership information from the primary DMDA.
2360 ierr = DMDAGetLocalInfo(user->da, &local_node_info); CHKERRQ(ierr);
2361
2362 // Use the robust helper function to convert node ownership to cell ownership.
2363 // A cell's index is defined by its origin node.
2364
2365 ierr = GetOwnedCellRange(&local_node_info, 0, &my_cell_info.xs_cell, &my_cell_info.xm_cell); CHKERRQ(ierr);
2366 ierr = GetOwnedCellRange(&local_node_info, 1, &my_cell_info.ys_cell, &my_cell_info.ym_cell); CHKERRQ(ierr);
2367 ierr = GetOwnedCellRange(&local_node_info, 2, &my_cell_info.zs_cell, &my_cell_info.zm_cell); CHKERRQ(ierr);
2368
2369 // Log the calculated local ownership for debugging purposes.
2370 LOG_ALLOW(LOCAL, LOG_DEBUG, "[Rank %d] Owns cells: i[%d, %d), j[%d, %d), k[%d, %d)\n",
2371 rank, my_cell_info.xs_cell, my_cell_info.xs_cell + my_cell_info.xm_cell,
2372 my_cell_info.ys_cell, my_cell_info.ys_cell + my_cell_info.ym_cell,
2373 my_cell_info.zs_cell, my_cell_info.zs_cell + my_cell_info.zm_cell);
2374
2375 // --- 3. Allocate and Distribute the Global Map ---
2376 // Allocate memory for the global map that will hold information from all ranks.
2377 ierr = PetscMalloc1(size, &user->RankCellInfoMap); CHKERRQ(ierr);
2378
2379 // Perform the collective communication to gather the `RankCellInfo` struct from every rank.
2380 // Each rank sends its `my_cell_info` and receives the complete array in `user->RankCellInfoMap`.
2381 // We use MPI_BYTE to ensure portability across different systems and struct padding.
2382 ierr = MPI_Allgather(&my_cell_info, sizeof(RankCellInfo), MPI_BYTE,
2383 user->RankCellInfoMap, sizeof(RankCellInfo), MPI_BYTE,
2384 PETSC_COMM_WORLD); CHKERRQ(ierr);
2385
2386 LOG_ALLOW(GLOBAL, LOG_INFO, "Domain cell decomposition map created and distributed successfully.\n");
2387
2389 PetscFunctionReturn(0);
2390}
PetscErrorCode GetOwnedCellRange(const DMDALocalInfo *info_nodes, PetscInt dim, PetscInt *xs_cell_global_out, PetscInt *xm_cell_local_out)
Gets the global starting index of cells owned by this rank and the number of such cells.
Definition setup.c:1756
PetscInt ys_cell
Definition variables.h:187
PetscInt xs_cell
Definition variables.h:187
PetscInt zm_cell
Definition variables.h:188
PetscInt zs_cell
Definition variables.h:187
PetscInt xm_cell
Definition variables.h:188
RankCellInfo * RankCellInfoMap
Definition variables.h:799
PetscInt ym_cell
Definition variables.h:188
A lean struct to hold the global cell ownership range for a single MPI rank.
Definition variables.h:186
Here is the call graph for this function:
Here is the caller graph for this function:

◆ BinarySearchInt64()

PetscErrorCode BinarySearchInt64 ( PetscInt  n,
const PetscInt64  arr[],
PetscInt64  key,
PetscBool *  found 
)

Performs a binary search for a key in a sorted array of PetscInt64.

This is a standard binary search algorithm implemented as a PETSc-style helper function. It efficiently determines if a given key exists within a sorted array.

Parameters
[in]nThe number of elements in the array.
[in]arrA pointer to the sorted array of PetscInt64 values to be searched.
[in]keyThe PetscInt64 value to search for.
[out]foundA pointer to a PetscBool that will be set to PETSC_TRUE if the key is found, and PETSC_FALSE otherwise.
Returns
PetscErrorCode 0 on success, or a non-zero PETSc error code on failure.
Note
The input array arr must be sorted in ascending order for the algorithm to work correctly.

Definition at line 2411 of file setup.c.

2412{
2413 PetscInt low = 0, high = n - 1;
2414
2415 PetscFunctionBeginUser;
2417
2418 // --- 1. Input Validation ---
2419 if (!found) {
2420 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Output pointer 'found' is NULL in PetscBinarySearchInt64.");
2421 }
2422 if (n > 0 && !arr) {
2423 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Input array 'arr' is NULL for n > 0.");
2424 }
2425
2426 // Initialize output
2427 *found = PETSC_FALSE;
2428
2429 // --- 2. Binary Search Algorithm ---
2430 while (low <= high) {
2431 // Use this form to prevent potential integer overflow on very large arrays
2432 PetscInt mid = low + (high - low) / 2;
2433
2434 if (arr[mid] == key) {
2435 *found = PETSC_TRUE; // Key found!
2436 break; // Exit the loop
2437 }
2438
2439 if (arr[mid] < key) {
2440 low = mid + 1; // Search in the right half
2441 } else {
2442 high = mid - 1; // Search in the left half
2443 }
2444 }
2445
2447 PetscFunctionReturn(0);
2448}
Here is the caller graph for this function:

◆ Gidx()

static PetscInt Gidx ( PetscInt  i,
PetscInt  j,
PetscInt  k,
UserCtx user 
)
static

Definition at line 2451 of file setup.c.

2452{
2453 PetscInt nidx;
2454 DMDALocalInfo info = user->info;
2455
2456 PetscInt mx = info.mx, my = info.my;
2457
2458 AO ao;
2459 DMDAGetAO(user->da, &ao);
2460 nidx=i+j*mx+k*mx*my;
2461
2462 AOApplicationToPetsc(ao,1,&nidx);
2463
2464 return (nidx);
2465}
DMDALocalInfo info
Definition variables.h:735
Here is the caller graph for this function:

◆ ComputeDivergence()

PetscErrorCode ComputeDivergence ( UserCtx user)

Definition at line 2470 of file setup.c.

2471{
2472 DM da = user->da, fda = user->fda;
2473 DMDALocalInfo info = user->info;
2474
2475 PetscInt ti = user->simCtx->step;
2476
2477 PetscInt xs = info.xs, xe = info.xs + info.xm;
2478 PetscInt ys = info.ys, ye = info.ys + info.ym;
2479 PetscInt zs = info.zs, ze = info.zs + info.zm;
2480 PetscInt mx = info.mx, my = info.my, mz = info.mz;
2481
2482 PetscInt lxs, lys, lzs, lxe, lye, lze;
2483 PetscInt i, j, k;
2484
2485 Vec Div;
2486 PetscReal ***div, ***aj, ***nvert,***p;
2487 Cmpnts ***ucont;
2488 PetscReal maxdiv;
2489
2490 lxs = xs; lxe = xe;
2491 lys = ys; lye = ye;
2492 lzs = zs; lze = ze;
2493
2494 if (xs==0) lxs = xs+1;
2495 if (ys==0) lys = ys+1;
2496 if (zs==0) lzs = zs+1;
2497
2498 if (xe==mx) lxe = xe-1;
2499 if (ye==my) lye = ye-1;
2500 if (ze==mz) lze = ze-1;
2501
2502 PetscFunctionBeginUser;
2504
2505 DMDAVecGetArray(fda,user->lUcont, &ucont);
2506 DMDAVecGetArray(da, user->lAj, &aj);
2507 VecDuplicate(user->P, &Div);
2508 DMDAVecGetArray(da, Div, &div);
2509 DMDAVecGetArray(da, user->lNvert, &nvert);
2510 DMDAVecGetArray(da, user->P, &p);
2511 for (k=lzs; k<lze; k++) {
2512 for (j=lys; j<lye; j++){
2513 for (i=lxs; i<lxe; i++) {
2514 if (k==10 && j==10 && i==1){
2515 LOG_ALLOW(LOCAL,LOG_INFO,"Pressure[10][10][1] = %f | Pressure[10][10][0] = %f \n ",p[k][j][i],p[k][j][i-1]);
2516 }
2517
2518 if (k==10 && j==10 && i==mx-3)
2519 LOG_ALLOW(LOCAL,LOG_INFO,"Pressure[10][10][%d] = %f | Pressure[10][10][%d] = %f \n ",mx-2,p[k][j][mx-2],mx-1,p[k][j][mx-1]);
2520 }
2521 }
2522 }
2523 DMDAVecRestoreArray(da, user->P, &p);
2524
2525
2526 for (k=lzs; k<lze; k++) {
2527 for (j=lys; j<lye; j++) {
2528 for (i=lxs; i<lxe; i++) {
2529 maxdiv = fabs((ucont[k][j][i].x - ucont[k][j][i-1].x +
2530 ucont[k][j][i].y - ucont[k][j-1][i].y +
2531 ucont[k][j][i].z - ucont[k-1][j][i].z)*aj[k][j][i]);
2532 if (nvert[k][j][i] + nvert[k+1][j][i] + nvert[k-1][j][i] +
2533 nvert[k][j+1][i] + nvert[k][j-1][i] +
2534 nvert[k][j][i+1] + nvert[k][j][i-1] > 0.1) maxdiv = 0.;
2535 div[k][j][i] = maxdiv;
2536
2537 }
2538 }
2539 }
2540
2541 if (zs==0) {
2542 k=0;
2543 for (j=ys; j<ye; j++) {
2544 for (i=xs; i<xe; i++) {
2545 div[k][j][i] = 0.;
2546 }
2547 }
2548 }
2549
2550 if (ze == mz) {
2551 k=mz-1;
2552 for (j=ys; j<ye; j++) {
2553 for (i=xs; i<xe; i++) {
2554 div[k][j][i] = 0.;
2555 }
2556 }
2557 }
2558
2559 if (xs==0) {
2560 i=0;
2561 for (k=zs; k<ze; k++) {
2562 for (j=ys; j<ye; j++) {
2563 div[k][j][i] = 0.;
2564 }
2565 }
2566 }
2567
2568 if (xe==mx) {
2569 i=mx-1;
2570 for (k=zs; k<ze; k++) {
2571 for (j=ys; j<ye; j++) {
2572 div[k][j][i] = 0;
2573 }
2574 }
2575 }
2576
2577 if (ys==0) {
2578 j=0;
2579 for (k=zs; k<ze; k++) {
2580 for (i=xs; i<xe; i++) {
2581 div[k][j][i] = 0.;
2582 }
2583 }
2584 }
2585
2586 if (ye==my) {
2587 j=my-1;
2588 for (k=zs; k<ze; k++) {
2589 for (i=xs; i<xe; i++) {
2590 div[k][j][i] = 0.;
2591 }
2592 }
2593 }
2594 DMDAVecRestoreArray(da, Div, &div);
2595 PetscInt MaxFlatIndex;
2596
2597 VecMax(Div, &MaxFlatIndex, &maxdiv);
2598
2599 LOG_ALLOW(GLOBAL,LOG_INFO,"[Step %d]] The Maximum Divergence is %e at flat index %d.\n",ti,maxdiv,MaxFlatIndex);
2600
2601 user->simCtx->MaxDivFlatArg = MaxFlatIndex;
2602 user->simCtx->MaxDiv = maxdiv;
2603
2604 for (k=zs; k<ze; k++) {
2605 for (j=ys; j<ye; j++) {
2606 for (i=xs; i<xe; i++) {
2607 if (Gidx(i,j,k,user) == MaxFlatIndex) {
2608 LOG_ALLOW(GLOBAL,LOG_INFO,"[Step %d] The Maximum Divergence(%e) is at location [%d][%d][%d]. \n", ti, maxdiv,k,j,i);
2609 user->simCtx->MaxDivz = k;
2610 user->simCtx->MaxDivy = j;
2611 user->simCtx->MaxDivx = i;
2612 }
2613 }
2614 }
2615 }
2616
2617
2618 DMDAVecRestoreArray(da, user->lNvert, &nvert);
2619 DMDAVecRestoreArray(fda, user->lUcont, &ucont);
2620 DMDAVecRestoreArray(da, user->lAj, &aj);
2621 VecDestroy(&Div);
2622
2624 PetscFunctionReturn(0);
2625}
static PetscInt Gidx(PetscInt i, PetscInt j, PetscInt k, UserCtx *user)
Definition setup.c:2451
Here is the call graph for this function:
Here is the caller graph for this function:

◆ InitializeRandomGenerators()

PetscErrorCode InitializeRandomGenerators ( UserCtx user,
PetscRandom *  randx,
PetscRandom *  randy,
PetscRandom *  randz 
)

Initializes random number generators for assigning particle properties.

This function creates and configures separate PETSc random number generators for the x, y, and z coordinates.

Parameters
[in,out]userPointer to the UserCtx structure containing simulation context.
[out]randxPointer to store the RNG for the x-coordinate.
[out]randyPointer to store the RNG for the y-coordinate.
[out]randzPointer to store the RNG for the z-coordinate.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Definition at line 2642 of file setup.c.

2642 {
2643 PetscErrorCode ierr; // Error code for PETSc functions
2644 PetscMPIInt rank;
2645 PetscFunctionBeginUser;
2647 MPI_Comm_rank(PETSC_COMM_WORLD, &rank);
2648
2649 // Initialize RNG for x-coordinate
2650 ierr = PetscRandomCreate(PETSC_COMM_SELF, randx); CHKERRQ(ierr);
2651 ierr = PetscRandomSetType((*randx), PETSCRAND48); CHKERRQ(ierr);
2652 ierr = PetscRandomSetInterval(*randx, user->bbox.min_coords.x, user->bbox.max_coords.x); CHKERRQ(ierr);
2653 ierr = PetscRandomSetSeed(*randx, rank + 12345); CHKERRQ(ierr); // Unique seed per rank
2654 ierr = PetscRandomSeed(*randx); CHKERRQ(ierr);
2655 LOG_ALLOW_SYNC(LOCAL,LOG_VERBOSE, "[Rank %d]Initialized RNG for X-axis.\n",rank);
2656
2657 // Initialize RNG for y-coordinate
2658 ierr = PetscRandomCreate(PETSC_COMM_SELF, randy); CHKERRQ(ierr);
2659 ierr = PetscRandomSetType((*randy), PETSCRAND48); CHKERRQ(ierr);
2660 ierr = PetscRandomSetInterval(*randy, user->bbox.min_coords.y, user->bbox.max_coords.y); CHKERRQ(ierr);
2661 ierr = PetscRandomSetSeed(*randy, rank + 67890); CHKERRQ(ierr); // Unique seed per rank
2662 ierr = PetscRandomSeed(*randy); CHKERRQ(ierr);
2663 LOG_ALLOW_SYNC(LOCAL,LOG_VERBOSE, "[Rank %d]Initialized RNG for Y-axis.\n",rank);
2664
2665 // Initialize RNG for z-coordinate
2666 ierr = PetscRandomCreate(PETSC_COMM_SELF, randz); CHKERRQ(ierr);
2667 ierr = PetscRandomSetType((*randz), PETSCRAND48); CHKERRQ(ierr);
2668 ierr = PetscRandomSetInterval(*randz, user->bbox.min_coords.z, user->bbox.max_coords.z); CHKERRQ(ierr);
2669 ierr = PetscRandomSetSeed(*randz, rank + 54321); CHKERRQ(ierr); // Unique seed per rank
2670 ierr = PetscRandomSeed(*randz); CHKERRQ(ierr);
2671 LOG_ALLOW_SYNC(LOCAL,LOG_VERBOSE, "[Rank %d]Initialized RNG for Z-axis.\n",rank);
2672
2674 PetscFunctionReturn(0);
2675}
@ LOG_VERBOSE
Extremely detailed logs, typically for development use only.
Definition logging.h:34
Cmpnts max_coords
Maximum x, y, z coordinates of the bounding box.
Definition variables.h:156
Cmpnts min_coords
Minimum x, y, z coordinates of the bounding box.
Definition variables.h:155
BoundingBox bbox
Definition variables.h:739
Here is the caller graph for this function:

◆ InitializeLogicalSpaceRNGs()

PetscErrorCode InitializeLogicalSpaceRNGs ( PetscRandom *  rand_logic_i,
PetscRandom *  rand_logic_j,
PetscRandom *  rand_logic_k 
)

Initializes random number generators for logical space operations [0.0, 1.0).

This function creates and configures three separate PETSc random number generators, one for each logical dimension (i, j, k or xi, eta, zeta equivalent). Each RNG is configured to produce uniformly distributed real numbers in the interval [0.0, 1.0). These are typically used for selecting owned cells or generating intra-cell logical coordinates.

Parameters
[out]rand_logic_iPointer to store the RNG for the i-logical dimension.
[out]rand_logic_jPointer to store the RNG for the j-logical dimension.
[out]rand_logic_kPointer to store the RNG for the k-logical dimension.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Definition at line 2693 of file setup.c.

2693 {
2694 PetscErrorCode ierr;
2695 PetscMPIInt rank;
2696 PetscFunctionBeginUser;
2697
2699
2700 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
2701
2702 // --- RNG for i-logical dimension ---
2703 ierr = PetscRandomCreate(PETSC_COMM_SELF, rand_logic_i); CHKERRQ(ierr);
2704 ierr = PetscRandomSetType((*rand_logic_i), PETSCRAND48); CHKERRQ(ierr);
2705 ierr = PetscRandomSetInterval(*rand_logic_i, 0.0, 1.0); CHKERRQ(ierr); // Key change: [0,1)
2706 ierr = PetscRandomSetSeed(*rand_logic_i, rank + 202401); CHKERRQ(ierr); // Unique seed
2707 ierr = PetscRandomSeed(*rand_logic_i); CHKERRQ(ierr);
2708 LOG_ALLOW(LOCAL,LOG_VERBOSE, "[Rank %d] Initialized RNG for i-logical dimension [0,1).\n",rank);
2709
2710 // --- RNG for j-logical dimension ---
2711 ierr = PetscRandomCreate(PETSC_COMM_SELF, rand_logic_j); CHKERRQ(ierr);
2712 ierr = PetscRandomSetType((*rand_logic_j), PETSCRAND48); CHKERRQ(ierr);
2713 ierr = PetscRandomSetInterval(*rand_logic_j, 0.0, 1.0); CHKERRQ(ierr); // Key change: [0,1)
2714 ierr = PetscRandomSetSeed(*rand_logic_j, rank + 202402); CHKERRQ(ierr);
2715 ierr = PetscRandomSeed(*rand_logic_j); CHKERRQ(ierr);
2716 LOG_ALLOW(LOCAL,LOG_VERBOSE, "[Rank %d] Initialized RNG for j-logical dimension [0,1).\n",rank);
2717
2718 // --- RNG for k-logical dimension ---
2719 ierr = PetscRandomCreate(PETSC_COMM_SELF, rand_logic_k); CHKERRQ(ierr);
2720 ierr = PetscRandomSetType((*rand_logic_k), PETSCRAND48); CHKERRQ(ierr);
2721 ierr = PetscRandomSetInterval(*rand_logic_k, 0.0, 1.0); CHKERRQ(ierr); // Key change: [0,1)
2722 ierr = PetscRandomSetSeed(*rand_logic_k, rank + 202403); CHKERRQ(ierr);
2723 ierr = PetscRandomSeed(*rand_logic_k); CHKERRQ(ierr);
2724 LOG_ALLOW(LOCAL,LOG_VERBOSE, "[Rank %d] Initialized RNG for k-logical dimension [0,1).\n",rank);
2725
2726
2728 PetscFunctionReturn(0);
2729}
Here is the caller graph for this function:

◆ InitializeBrownianRNG()

PetscErrorCode InitializeBrownianRNG ( SimCtx simCtx)

Initializes a single master RNG for time-stepping physics (Brownian motion).

Configures it for Uniform [0, 1) which is required for Box-Muller transformation.

Parameters
[in,out]simCtxPointer to the Simulation Context.
Returns
PetscErrorCode

Definition at line 2740 of file setup.c.

2740 {
2741 PetscErrorCode ierr;
2742 PetscMPIInt rank;
2743
2744 PetscFunctionBeginUser;
2746
2747 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
2748
2749 // 1. Create the generator (stored in SimCtx, not UserCtx, as it is global physics)
2750 ierr = PetscRandomCreate(PETSC_COMM_WORLD, &simCtx->BrownianMotionRNG); CHKERRQ(ierr);
2751 ierr = PetscRandomSetType(simCtx->BrownianMotionRNG, PETSCRAND48); CHKERRQ(ierr);
2752
2753 // 2. CRITICAL: Set interval to [0, 1).
2754 // This is required for the Gaussian math to work.
2755 ierr = PetscRandomSetInterval(simCtx->BrownianMotionRNG, 0.0, 1.0); CHKERRQ(ierr);
2756
2757 // 3. Seed based on Rank to ensure spatial randomness
2758 // Multiplying by a large prime helps separate the streams significantly
2759 unsigned long seed = (unsigned long)rank * 987654321 + (unsigned long)time(NULL);
2760 ierr = PetscRandomSetSeed(simCtx->BrownianMotionRNG, seed); CHKERRQ(ierr);
2761 ierr = PetscRandomSeed(simCtx->BrownianMotionRNG); CHKERRQ(ierr);
2762
2763 LOG_ALLOW(LOCAL, LOG_VERBOSE, "[Rank %d] Initialized Brownian Physics RNG.\n", rank);
2764
2766 PetscFunctionReturn(0);
2767}
Here is the caller graph for this function:

◆ TransformScalarDerivativesToPhysical()

void TransformScalarDerivativesToPhysical ( PetscReal  jacobian,
Cmpnts  csi_metrics,
Cmpnts  eta_metrics,
Cmpnts  zet_metrics,
PetscReal  dPhi_dcsi,
PetscReal  dPhi_deta,
PetscReal  dPhi_dzet,
Cmpnts gradPhi 
)

Transforms scalar derivatives from computational space to physical space using the chain rule.

Formula: dPhi/dx = J * ( dPhi/dCsi * dCsi/dx + dPhi/dEta * dEta/dx + ... )

Definition at line 2779 of file setup.c.

2787{
2788 // Gradient X component
2789 gradPhi->x = jacobian * (dPhi_dcsi * csi_metrics.x + dPhi_deta * eta_metrics.x + dPhi_dzet * zet_metrics.x);
2790
2791 // Gradient Y component
2792 gradPhi->y = jacobian * (dPhi_dcsi * csi_metrics.y + dPhi_deta * eta_metrics.y + dPhi_dzet * zet_metrics.y);
2793
2794 // Gradient Z component
2795 gradPhi->z = jacobian * (dPhi_dcsi * csi_metrics.z + dPhi_deta * eta_metrics.z + dPhi_dzet * zet_metrics.z);
2796}
Here is the caller graph for this function:

◆ TransformDerivativesToPhysical()

static void TransformDerivativesToPhysical ( PetscReal  jacobian,
Cmpnts  csi_metrics,
Cmpnts  eta_metrics,
Cmpnts  zet_metrics,
Cmpnts  deriv_csi,
Cmpnts  deriv_eta,
Cmpnts  deriv_zet,
Cmpnts dudx,
Cmpnts dvdx,
Cmpnts dwdx 
)
static

Transforms derivatives from computational space to physical space using the chain rule.

Definition at line 2803 of file setup.c.

2806{
2807 // Derivatives of the first component (u)
2808 dudx->x = jacobian * (deriv_csi.x * csi_metrics.x + deriv_eta.x * eta_metrics.x + deriv_zet.x * zet_metrics.x);
2809 dudx->y = jacobian * (deriv_csi.x * csi_metrics.y + deriv_eta.x * eta_metrics.y + deriv_zet.x * zet_metrics.y);
2810 dudx->z = jacobian * (deriv_csi.x * csi_metrics.z + deriv_eta.x * eta_metrics.z + deriv_zet.x * zet_metrics.z);
2811 // Derivatives of the second component (v)
2812 dvdx->x = jacobian * (deriv_csi.y * csi_metrics.x + deriv_eta.y * eta_metrics.x + deriv_zet.y * zet_metrics.x);
2813 dvdx->y = jacobian * (deriv_csi.y * csi_metrics.y + deriv_eta.y * eta_metrics.y + deriv_zet.y * zet_metrics.y);
2814 dvdx->z = jacobian * (deriv_csi.y * csi_metrics.z + deriv_eta.y * eta_metrics.z + deriv_zet.y * zet_metrics.z);
2815 // Derivatives of the third component (w)
2816 dwdx->x = jacobian * (deriv_csi.z * csi_metrics.x + deriv_eta.z * eta_metrics.x + deriv_zet.z * zet_metrics.x);
2817 dwdx->y = jacobian * (deriv_csi.z * csi_metrics.y + deriv_eta.z * eta_metrics.y + deriv_zet.z * zet_metrics.y);
2818 dwdx->z = jacobian * (deriv_csi.z * csi_metrics.z + deriv_eta.z * eta_metrics.z + deriv_zet.z * zet_metrics.z);
2819}
Here is the caller graph for this function:

◆ ComputeScalarFieldDerivatives()

PetscErrorCode ComputeScalarFieldDerivatives ( UserCtx user,
PetscInt  i,
PetscInt  j,
PetscInt  k,
PetscReal ***  field_data,
Cmpnts grad 
)

Computes the gradient of a cell-centered SCALAR field at a specific grid point.

Parameters
userThe user context.
i,j,kThe grid indices.
field_data3D array pointer to the scalar field (PetscReal***).
gradOutput: A Cmpnts struct storing [dPhi/dx, dPhi/dy, dPhi/dz].
Returns
PetscErrorCode

Definition at line 2832 of file setup.c.

2834{
2835 PetscErrorCode ierr;
2836 Cmpnts ***csi, ***eta, ***zet;
2837 PetscReal ***jac;
2838 PetscReal d_csi, d_eta, d_zet;
2839
2840 PetscFunctionBeginUser;
2841
2842 // 1. Get read-only access to metrics
2843 ierr = DMDAVecGetArrayRead(user->fda, user->lCsi, &csi); CHKERRQ(ierr);
2844 ierr = DMDAVecGetArrayRead(user->fda, user->lEta, &eta); CHKERRQ(ierr);
2845 ierr = DMDAVecGetArrayRead(user->fda, user->lZet, &zet); CHKERRQ(ierr);
2846 ierr = DMDAVecGetArrayRead(user->da, user->lAj, &jac); CHKERRQ(ierr);
2847
2848 // 2. Compute derivatives in computational space (Central Difference)
2849 // Assumes ghosts are available at i+/-1
2850 d_csi = 0.5 * (field_data[k][j][i+1] - field_data[k][j][i-1]);
2851 d_eta = 0.5 * (field_data[k][j+1][i] - field_data[k][j-1][i]);
2852 d_zet = 0.5 * (field_data[k+1][j][i] - field_data[k-1][j][i]);
2853
2854 // 3. Transform to physical space
2856 csi[k][j][i], eta[k][j][i], zet[k][j][i],
2857 d_csi, d_eta, d_zet,
2858 grad);
2859
2860 // 4. Restore arrays
2861 ierr = DMDAVecRestoreArrayRead(user->fda, user->lCsi, &csi); CHKERRQ(ierr);
2862 ierr = DMDAVecRestoreArrayRead(user->fda, user->lEta, &eta); CHKERRQ(ierr);
2863 ierr = DMDAVecRestoreArrayRead(user->fda, user->lZet, &zet); CHKERRQ(ierr);
2864 ierr = DMDAVecRestoreArrayRead(user->da, user->lAj, &jac); CHKERRQ(ierr);
2865
2866 PetscFunctionReturn(0);
2867}
void TransformScalarDerivativesToPhysical(PetscReal jacobian, Cmpnts csi_metrics, Cmpnts eta_metrics, Cmpnts zet_metrics, PetscReal dPhi_dcsi, PetscReal dPhi_deta, PetscReal dPhi_dzet, Cmpnts *gradPhi)
Transforms scalar derivatives from computational space to physical space using the chain rule.
Definition setup.c:2779
Here is the call graph for this function:

◆ ComputeVectorFieldDerivatives()

PetscErrorCode ComputeVectorFieldDerivatives ( UserCtx user,
PetscInt  i,
PetscInt  j,
PetscInt  k,
Cmpnts ***  field_data,
Cmpnts dudx,
Cmpnts dvdx,
Cmpnts dwdx 
)

Computes the derivatives of a cell-centered vector field at a specific grid point.

This function orchestrates the calculation of spatial derivatives. It first computes the derivatives in computational space (d/dcsi, d/deta, d/dzet) using a central difference scheme and then transforms them into physical space (d/dx, d/dy, d/dz).

Parameters
userThe user context for the current computational block.
i,j,kThe grid indices of the cell center where derivatives are required.
field_dataA 3D array pointer to the raw local data of the vector field (e.g., from lUcat).
dudxOutput: A Cmpnts struct storing [du/dx, du/dy, du/dz].
dvdxOutput: A Cmpnts struct storing [dv/dx, dv/dy, dv/dz].
dwdxOutput: A Cmpnts struct storing [dw/dx, dw/dy, dw/dz].
Returns
PetscErrorCode 0 on success.

Definition at line 2886 of file setup.c.

2888{
2889 PetscErrorCode ierr;
2890 Cmpnts ***csi, ***eta, ***zet;
2891 PetscReal ***jac;
2892 PetscFunctionBeginUser;
2893
2894 // 1. Get read-only access to the necessary metric data arrays
2895 ierr = DMDAVecGetArrayRead(user->fda, user->lCsi, &csi); CHKERRQ(ierr);
2896 ierr = DMDAVecGetArrayRead(user->fda, user->lEta, &eta); CHKERRQ(ierr);
2897 ierr = DMDAVecGetArrayRead(user->fda, user->lZet, &zet); CHKERRQ(ierr);
2898 ierr = DMDAVecGetArrayRead(user->da, user->lAj, &jac); CHKERRQ(ierr);
2899
2900 // 2. Calculate derivatives in computational space using central differencing
2901 Cmpnts deriv_csi, deriv_eta, deriv_zet;
2902 deriv_csi.x = (field_data[k][j][i+1].x - field_data[k][j][i-1].x) * 0.5;
2903 deriv_csi.y = (field_data[k][j][i+1].y - field_data[k][j][i-1].y) * 0.5;
2904 deriv_csi.z = (field_data[k][j][i+1].z - field_data[k][j][i-1].z) * 0.5;
2905
2906 deriv_eta.x = (field_data[k][j+1][i].x - field_data[k][j-1][i].x) * 0.5;
2907 deriv_eta.y = (field_data[k][j+1][i].y - field_data[k][j-1][i].y) * 0.5;
2908 deriv_eta.z = (field_data[k][j+1][i].z - field_data[k][j-1][i].z) * 0.5;
2909
2910 deriv_zet.x = (field_data[k+1][j][i].x - field_data[k-1][j][i].x) * 0.5;
2911 deriv_zet.y = (field_data[k+1][j][i].y - field_data[k-1][j][i].y) * 0.5;
2912 deriv_zet.z = (field_data[k+1][j][i].z - field_data[k-1][j][i].z) * 0.5;
2913
2914 // 3. Transform derivatives to physical space
2915 TransformDerivativesToPhysical(jac[k][j][i], csi[k][j][i], eta[k][j][i], zet[k][j][i],
2916 deriv_csi, deriv_eta, deriv_zet,
2917 dudx, dvdx, dwdx);
2918
2919 // 4. Restore access to the PETSc data arrays
2920 ierr = DMDAVecRestoreArrayRead(user->fda, user->lCsi, &csi); CHKERRQ(ierr);
2921 ierr = DMDAVecRestoreArrayRead(user->fda, user->lEta, &eta); CHKERRQ(ierr);
2922 ierr = DMDAVecRestoreArrayRead(user->fda, user->lZet, &zet); CHKERRQ(ierr);
2923 ierr = DMDAVecRestoreArrayRead(user->da, user->lAj, &jac); CHKERRQ(ierr);
2924
2925 PetscFunctionReturn(0);
2926}
static void TransformDerivativesToPhysical(PetscReal jacobian, Cmpnts csi_metrics, Cmpnts eta_metrics, Cmpnts zet_metrics, Cmpnts deriv_csi, Cmpnts deriv_eta, Cmpnts deriv_zet, Cmpnts *dudx, Cmpnts *dvdx, Cmpnts *dwdx)
Transforms derivatives from computational space to physical space using the chain rule.
Definition setup.c:2803
Here is the call graph for this function:
Here is the caller graph for this function:

◆ DestroyUserVectors()

PetscErrorCode DestroyUserVectors ( UserCtx user)

Destroys all Vec objects in a UserCtx structure.

Destroys all PETSc Vec objects within a single UserCtx structure.

This function systematically destroys all PETSc Vec objects allocated in a UserCtx, with proper conditional checks for vectors that are only allocated under certain conditions (finest level, turbulence models, particles, postprocessor mode, etc.).

Parameters
userThe UserCtx structure whose vectors should be destroyed.
Returns
PetscErrorCode 0 on success.

Definition at line 2946 of file setup.c.

2947{
2948 PetscErrorCode ierr;
2949 PetscFunctionBeginUser;
2950
2951 // --- Group A: Primary Flow Fields (Always allocated at all levels) ---
2952 if (user->Ucont) { ierr = VecDestroy(&user->Ucont); CHKERRQ(ierr); }
2953 if (user->lUcont) { ierr = VecDestroy(&user->lUcont); CHKERRQ(ierr); }
2954 if (user->Ucat) { ierr = VecDestroy(&user->Ucat); CHKERRQ(ierr); }
2955 if (user->lUcat) { ierr = VecDestroy(&user->lUcat); CHKERRQ(ierr); }
2956 if (user->P) { ierr = VecDestroy(&user->P); CHKERRQ(ierr); }
2957 if (user->lP) { ierr = VecDestroy(&user->lP); CHKERRQ(ierr); }
2958 if (user->Nvert) { ierr = VecDestroy(&user->Nvert); CHKERRQ(ierr); }
2959 if (user->lNvert) { ierr = VecDestroy(&user->lNvert); CHKERRQ(ierr); }
2960
2961 // --- Group A2: Derived Flow Fields (Conditional) ---
2962 if(user->Diffusivity) {ierr = VecDestroy(&user->Diffusivity); CHKERRQ(ierr);}
2963 if(user->lDiffusivity){ierr = VecDestroy(&user->lDiffusivity); CHKERRQ(ierr);}
2964 if(user->DiffusivityGradient){ierr = VecDestroy(&user->DiffusivityGradient); CHKERRQ(ierr);}
2965 if(user->lDiffusivityGradient){ierr = VecDestroy(&user->lDiffusivityGradient); CHKERRQ(ierr);}
2966
2967 // --- Group B: Solver Work Vectors (All levels) ---
2968 if (user->Phi) { ierr = VecDestroy(&user->Phi); CHKERRQ(ierr); }
2969 if (user->lPhi) { ierr = VecDestroy(&user->lPhi); CHKERRQ(ierr); }
2970
2971 // --- Group C: Time-Stepping Vectors (Finest level only) ---
2972 if (user->Ucont_o) { ierr = VecDestroy(&user->Ucont_o); CHKERRQ(ierr); }
2973 if (user->Ucont_rm1) { ierr = VecDestroy(&user->Ucont_rm1); CHKERRQ(ierr); }
2974 if (user->Ucat_o) { ierr = VecDestroy(&user->Ucat_o); CHKERRQ(ierr); }
2975 if (user->P_o) { ierr = VecDestroy(&user->P_o); CHKERRQ(ierr); }
2976 if (user->Nvert_o) { ierr = VecDestroy(&user->Nvert_o); CHKERRQ(ierr); }
2977 if (user->lUcont_o) { ierr = VecDestroy(&user->lUcont_o); CHKERRQ(ierr); }
2978 if (user->lUcont_rm1) { ierr = VecDestroy(&user->lUcont_rm1); CHKERRQ(ierr); }
2979 if (user->lNvert_o) { ierr = VecDestroy(&user->lNvert_o); CHKERRQ(ierr); }
2980
2981 // --- Group D: Grid Metrics - Face Centered (All levels) ---
2982 if (user->Csi) { ierr = VecDestroy(&user->Csi); CHKERRQ(ierr); }
2983 if (user->Eta) { ierr = VecDestroy(&user->Eta); CHKERRQ(ierr); }
2984 if (user->Zet) { ierr = VecDestroy(&user->Zet); CHKERRQ(ierr); }
2985 if (user->Aj) { ierr = VecDestroy(&user->Aj); CHKERRQ(ierr); }
2986 if (user->lCsi) { ierr = VecDestroy(&user->lCsi); CHKERRQ(ierr); }
2987 if (user->lEta) { ierr = VecDestroy(&user->lEta); CHKERRQ(ierr); }
2988 if (user->lZet) { ierr = VecDestroy(&user->lZet); CHKERRQ(ierr); }
2989 if (user->lAj) { ierr = VecDestroy(&user->lAj); CHKERRQ(ierr); }
2990
2991 // --- Group E: Grid Metrics - Face Centered (All levels) ---
2992 if (user->ICsi) { ierr = VecDestroy(&user->ICsi); CHKERRQ(ierr); }
2993 if (user->IEta) { ierr = VecDestroy(&user->IEta); CHKERRQ(ierr); }
2994 if (user->IZet) { ierr = VecDestroy(&user->IZet); CHKERRQ(ierr); }
2995 if (user->JCsi) { ierr = VecDestroy(&user->JCsi); CHKERRQ(ierr); }
2996 if (user->JEta) { ierr = VecDestroy(&user->JEta); CHKERRQ(ierr); }
2997 if (user->JZet) { ierr = VecDestroy(&user->JZet); CHKERRQ(ierr); }
2998 if (user->KCsi) { ierr = VecDestroy(&user->KCsi); CHKERRQ(ierr); }
2999 if (user->KEta) { ierr = VecDestroy(&user->KEta); CHKERRQ(ierr); }
3000 if (user->KZet) { ierr = VecDestroy(&user->KZet); CHKERRQ(ierr); }
3001 if (user->IAj) { ierr = VecDestroy(&user->IAj); CHKERRQ(ierr); }
3002 if (user->JAj) { ierr = VecDestroy(&user->JAj); CHKERRQ(ierr); }
3003 if (user->KAj) { ierr = VecDestroy(&user->KAj); CHKERRQ(ierr); }
3004 if (user->lICsi) { ierr = VecDestroy(&user->lICsi); CHKERRQ(ierr); }
3005 if (user->lIEta) { ierr = VecDestroy(&user->lIEta); CHKERRQ(ierr); }
3006 if (user->lIZet) { ierr = VecDestroy(&user->lIZet); CHKERRQ(ierr); }
3007 if (user->lJCsi) { ierr = VecDestroy(&user->lJCsi); CHKERRQ(ierr); }
3008 if (user->lJEta) { ierr = VecDestroy(&user->lJEta); CHKERRQ(ierr); }
3009 if (user->lJZet) { ierr = VecDestroy(&user->lJZet); CHKERRQ(ierr); }
3010 if (user->lKCsi) { ierr = VecDestroy(&user->lKCsi); CHKERRQ(ierr); }
3011 if (user->lKEta) { ierr = VecDestroy(&user->lKEta); CHKERRQ(ierr); }
3012 if (user->lKZet) { ierr = VecDestroy(&user->lKZet); CHKERRQ(ierr); }
3013 if (user->lIAj) { ierr = VecDestroy(&user->lIAj); CHKERRQ(ierr); }
3014 if (user->lJAj) { ierr = VecDestroy(&user->lJAj); CHKERRQ(ierr); }
3015 if (user->lKAj) { ierr = VecDestroy(&user->lKAj); CHKERRQ(ierr); }
3016
3017 // --- Group F: Cell/Face Coordinates and Grid Spacing (All levels) ---
3018 if (user->Cent) { ierr = VecDestroy(&user->Cent); CHKERRQ(ierr); }
3019 if (user->lCent) { ierr = VecDestroy(&user->lCent); CHKERRQ(ierr); }
3020 if (user->GridSpace) { ierr = VecDestroy(&user->GridSpace); CHKERRQ(ierr); }
3021 if (user->lGridSpace) { ierr = VecDestroy(&user->lGridSpace); CHKERRQ(ierr); }
3022 if (user->Centx) { ierr = VecDestroy(&user->Centx); CHKERRQ(ierr); }
3023 if (user->Centy) { ierr = VecDestroy(&user->Centy); CHKERRQ(ierr); }
3024 if (user->Centz) { ierr = VecDestroy(&user->Centz); CHKERRQ(ierr); }
3025
3026 // --- Group G: Turbulence Model Vectors (Finest level, conditional on les/rans) ---
3027 if (user->Nu_t) { ierr = VecDestroy(&user->Nu_t); CHKERRQ(ierr); }
3028 if (user->lNu_t) { ierr = VecDestroy(&user->lNu_t); CHKERRQ(ierr); }
3029 if (user->CS) { ierr = VecDestroy(&user->CS); CHKERRQ(ierr); }
3030 if (user->lCs) { ierr = VecDestroy(&user->lCs); CHKERRQ(ierr); }
3031 if (user->lFriction_Velocity) { ierr = VecDestroy(&user->lFriction_Velocity); CHKERRQ(ierr); }
3032 if (user->K_Omega) { ierr = VecDestroy(&user->K_Omega); CHKERRQ(ierr); }
3033 if (user->lK_Omega) { ierr = VecDestroy(&user->lK_Omega); CHKERRQ(ierr); }
3034 if (user->K_Omega_o) { ierr = VecDestroy(&user->K_Omega_o); CHKERRQ(ierr); }
3035 if (user->lK_Omega_o) { ierr = VecDestroy(&user->lK_Omega_o); CHKERRQ(ierr); }
3036
3037 // --- Group H: Particle Vectors (Finest level, conditional on np > 0) ---
3038 if (user->ParticleCount) { ierr = VecDestroy(&user->ParticleCount); CHKERRQ(ierr); }
3039 if (user->lParticleCount) { ierr = VecDestroy(&user->lParticleCount); CHKERRQ(ierr); }
3040 if (user->Psi) { ierr = VecDestroy(&user->Psi); CHKERRQ(ierr); }
3041 if (user->lPsi) { ierr = VecDestroy(&user->lPsi); CHKERRQ(ierr); }
3042
3043 // --- Group I: Boundary Condition Vectors (All levels) ---
3044 if (user->Bcs.Ubcs) { ierr = VecDestroy(&user->Bcs.Ubcs); CHKERRQ(ierr); }
3045 if (user->Bcs.Uch) { ierr = VecDestroy(&user->Bcs.Uch); CHKERRQ(ierr); }
3046
3047 // --- Group J: Post-Processing Vectors (Finest level, postprocessor mode) ---
3048 if (user->P_nodal) { ierr = VecDestroy(&user->P_nodal); CHKERRQ(ierr); }
3049 if (user->Ucat_nodal) { ierr = VecDestroy(&user->Ucat_nodal); CHKERRQ(ierr); }
3050 if (user->Qcrit) { ierr = VecDestroy(&user->Qcrit); CHKERRQ(ierr); }
3051 if (user->Psi_nodal) { ierr = VecDestroy(&user->Psi_nodal); CHKERRQ(ierr); }
3052
3053 // --- Group K: Interpolation Vectors (Lazy allocation) ---
3054 if (user->CellFieldAtCorner) { ierr = VecDestroy(&user->CellFieldAtCorner); CHKERRQ(ierr); }
3055 if (user->lCellFieldAtCorner) { ierr = VecDestroy(&user->lCellFieldAtCorner); CHKERRQ(ierr); }
3056
3057 // --- Group L: Statistical Averaging Vectors (If allocated) ---
3058 if (user->Ucat_sum) { ierr = VecDestroy(&user->Ucat_sum); CHKERRQ(ierr); }
3059 if (user->Ucat_cross_sum) { ierr = VecDestroy(&user->Ucat_cross_sum); CHKERRQ(ierr); }
3060 if (user->Ucat_square_sum) { ierr = VecDestroy(&user->Ucat_square_sum); CHKERRQ(ierr); }
3061 if (user->P_sum) { ierr = VecDestroy(&user->P_sum); CHKERRQ(ierr); }
3062
3063 // --- Group M: Implicit Solver Temporary Vectors (Destroyed after use, but check anyway) ---
3064 if (user->Rhs) { ierr = VecDestroy(&user->Rhs); CHKERRQ(ierr); }
3065 if (user->dUcont) { ierr = VecDestroy(&user->dUcont); CHKERRQ(ierr); }
3066 if (user->pUcont) { ierr = VecDestroy(&user->pUcont); CHKERRQ(ierr); }
3067
3068 // --- Group N: Poisson Solver Vectors (Destroyed after solve, but check anyway) ---
3069 if (user->B) { ierr = VecDestroy(&user->B); CHKERRQ(ierr); }
3070 if (user->R) { ierr = VecDestroy(&user->R); CHKERRQ(ierr); }
3071
3072 LOG_ALLOW(LOCAL, LOG_DEBUG, "All vectors destroyed for UserCtx.\n");
3073 PetscFunctionReturn(0);
3074}
Vec Rhs
Definition variables.h:763
Vec K_Omega_o
Definition variables.h:783
Vec K_Omega
Definition variables.h:783
Vec lCellFieldAtCorner
Definition variables.h:764
Vec Ucat_square_sum
Definition variables.h:786
Vec P_sum
Definition variables.h:786
Vec pUcont
Definition variables.h:763
Vec lK_Omega_o
Definition variables.h:783
Vec lK_Omega
Definition variables.h:783
Vec CellFieldAtCorner
Definition variables.h:764
Vec Ucat_sum
Definition variables.h:786
Vec dUcont
Definition variables.h:763
Vec Ucat_cross_sum
Definition variables.h:786
Here is the caller graph for this function:

◆ DestroyUserContext()

PetscErrorCode DestroyUserContext ( UserCtx user)

Destroys all resources allocated within a single UserCtx structure.

This function cleans up all memory and PETSc objects associated with a single UserCtx (grid level). It calls the helper functions and destroys remaining objects in the proper dependency order:

  1. Boundary conditions (handlers and their data)
  2. All PETSc vectors (via DestroyUserVectors)
  3. Matrix and solver objects (A, C, MR, MP, ksp, nullsp)
  4. Application ordering (AO)
  5. Distributed mesh objects (DMs) - most derived first
  6. Raw PetscMalloc'd arrays (RankCellInfoMap, KSKE)

This function should be called for each UserCtx in the multigrid hierarchy.

Parameters
[in,out]userPointer to the UserCtx to be destroyed.
Returns
PetscErrorCode 0 on success.

Definition at line 3096 of file setup.c.

3097{
3098 PetscErrorCode ierr;
3099 PetscFunctionBeginUser;
3100
3101 if (!user) {
3102 LOG_ALLOW(LOCAL, LOG_WARNING, "DestroyUserContext called with NULL user pointer.\n");
3103 PetscFunctionReturn(0);
3104 }
3105
3106 LOG_ALLOW(LOCAL, LOG_INFO, "Destroying UserCtx at level %d...\n", user->thislevel);
3107
3108 // --- Step 1: Destroy Boundary Condition System ---
3109 // This handles all BC handlers and their private data.
3110 ierr = BoundarySystem_Destroy(user); CHKERRQ(ierr);
3111 LOG_ALLOW(LOCAL, LOG_DEBUG, " Boundary system destroyed.\n");
3112
3113 // --- Step 2: Destroy All Vectors ---
3114 // Handles ~74 Vec objects with proper NULL checking.
3115 ierr = DestroyUserVectors(user); CHKERRQ(ierr);
3116 LOG_ALLOW(LOCAL, LOG_DEBUG, " All vectors destroyed.\n");
3117
3118 // --- Step 3: Destroy Matrix and Solver Objects ---
3119 // Destroy pressure-Poisson matrices and solver.
3120 if (user->A) {
3121 ierr = MatDestroy(&user->A); CHKERRQ(ierr);
3122 LOG_ALLOW(LOCAL, LOG_DEBUG, " Matrix A destroyed.\n");
3123 }
3124 if (user->C) {
3125 ierr = MatDestroy(&user->C); CHKERRQ(ierr);
3126 LOG_ALLOW(LOCAL, LOG_DEBUG, " Matrix C destroyed.\n");
3127 }
3128 if (user->MR) {
3129 ierr = MatDestroy(&user->MR); CHKERRQ(ierr);
3130 LOG_ALLOW(LOCAL, LOG_DEBUG, " Matrix MR destroyed.\n");
3131 }
3132 if (user->MP) {
3133 ierr = MatDestroy(&user->MP); CHKERRQ(ierr);
3134 LOG_ALLOW(LOCAL, LOG_DEBUG, " Matrix MP destroyed.\n");
3135 }
3136 if (user->ksp) {
3137 ierr = KSPDestroy(&user->ksp); CHKERRQ(ierr);
3138 LOG_ALLOW(LOCAL, LOG_DEBUG, " KSP solver destroyed.\n");
3139 }
3140 if (user->nullsp) {
3141 ierr = MatNullSpaceDestroy(&user->nullsp); CHKERRQ(ierr);
3142 LOG_ALLOW(LOCAL, LOG_DEBUG, " MatNullSpace destroyed.\n");
3143 }
3144
3145 // --- Step 4: Destroy Application Ordering ---
3146 if (user->ao) {
3147 ierr = AODestroy(&user->ao); CHKERRQ(ierr);
3148 LOG_ALLOW(LOCAL, LOG_DEBUG, " AO destroyed.\n");
3149 }
3150
3151 // --- Step 5: Destroy DM Objects ---
3152 // Destroy in reverse order of dependency: post_swarm, swarm, fda2, fda, da
3153 if (user->post_swarm) {
3154 ierr = DMDestroy(&user->post_swarm); CHKERRQ(ierr);
3155 LOG_ALLOW(LOCAL, LOG_DEBUG, " post_swarm DM destroyed.\n");
3156 }
3157 if (user->swarm) {
3158 ierr = DMDestroy(&user->swarm); CHKERRQ(ierr);
3159 LOG_ALLOW(LOCAL, LOG_DEBUG, " swarm DM destroyed.\n");
3160 }
3161 if (user->fda2) {
3162 ierr = DMDestroy(&user->fda2); CHKERRQ(ierr);
3163 LOG_ALLOW(LOCAL, LOG_DEBUG, " fda2 DM destroyed.\n");
3164 }
3165 if (user->da) {
3166 ierr = DMDestroy(&user->da); CHKERRQ(ierr);
3167 LOG_ALLOW(LOCAL, LOG_DEBUG, " da DM destroyed.\n");
3168 }
3169
3170 // --- Step 6: Free PetscMalloc'd Arrays ---
3171 // Free arrays allocated with PetscMalloc1
3172 if (user->RankCellInfoMap) {
3173 ierr = PetscFree(user->RankCellInfoMap); CHKERRQ(ierr);
3174 user->RankCellInfoMap = NULL;
3175 LOG_ALLOW(LOCAL, LOG_DEBUG, " RankCellInfoMap freed.\n");
3176 }
3177 if (user->KSKE) {
3178 ierr = PetscFree(user->KSKE); CHKERRQ(ierr);
3179 user->KSKE = NULL;
3180 LOG_ALLOW(LOCAL, LOG_DEBUG, " KSKE array freed.\n");
3181 }
3182
3183 LOG_ALLOW(LOCAL, LOG_INFO, "UserCtx at level %d fully destroyed.\n", user->thislevel);
3184 PetscFunctionReturn(0);
3185}
PetscErrorCode BoundarySystem_Destroy(UserCtx *user)
Cleans up and destroys all boundary system resources.
PetscErrorCode DestroyUserVectors(UserCtx *user)
Destroys all Vec objects in a UserCtx structure.
Definition setup.c:2946
MatNullSpace nullsp
Definition variables.h:767
PetscInt * KSKE
Definition variables.h:768
DM post_swarm
Definition variables.h:804
KSP ksp
Definition variables.h:767
Here is the call graph for this function:
Here is the caller graph for this function:

◆ FinalizeSimulation()

PetscErrorCode FinalizeSimulation ( SimCtx simCtx)

Main cleanup function for the entire simulation context.

This function is responsible for destroying ALL memory and PETSc objects allocated during the simulation, including:

  • All UserCtx structures in the multigrid hierarchy (via DestroyUserContext)
  • The multigrid management structures (UserMG, MGCtx array)
  • All SimCtx-level objects (logviewer, dm_swarm, bboxlist, string arrays, etc.)

This function should be called ONCE at the end of the simulation, after all computation is complete, but BEFORE PetscFinalize().

Call order in main:

  1. [Simulation runs]
  2. ProfilingFinalize(simCtx);
  3. FinalizeSimulation(simCtx); <- This function
  4. PetscFinalize();
Parameters
[in,out]simCtxPointer to the master SimulationContext to be destroyed.
Returns
PetscErrorCode 0 on success.

Definition at line 3211 of file setup.c.

3212{
3213 PetscErrorCode ierr;
3214 PetscFunctionBeginUser;
3215
3216 if (!simCtx) {
3217 LOG_ALLOW(GLOBAL, LOG_WARNING, "FinalizeSimulation called with NULL SimCtx pointer.\n");
3218 PetscFunctionReturn(0);
3219 }
3220
3221 LOG_ALLOW(GLOBAL, LOG_INFO, "========================================\n");
3222 LOG_ALLOW(GLOBAL, LOG_INFO, "Beginning simulation memory cleanup...\n");
3223 LOG_ALLOW(GLOBAL, LOG_INFO, "========================================\n");
3224
3225 // ============================================================================
3226 // PHASE 1: DESTROY MULTIGRID HIERARCHY (All UserCtx structures)
3227 // ============================================================================
3228
3229 if (simCtx->usermg.mgctx) {
3230 LOG_ALLOW(GLOBAL, LOG_INFO, "Destroying multigrid hierarchy (%d levels)...\n",
3231 simCtx->usermg.mglevels);
3232
3233 // Destroy each UserCtx from finest to coarsest (reverse order is safer)
3234 for (PetscInt level = simCtx->usermg.mglevels - 1; level >= 0; level--) {
3235 UserCtx *user = simCtx->usermg.mgctx[level].user;
3236 if (user) {
3237 LOG_ALLOW(LOCAL, LOG_INFO, " Destroying level %d of %d...\n",
3238 level, simCtx->usermg.mglevels - 1);
3239 ierr = DestroyUserContext(user); CHKERRQ(ierr);
3240
3241 // Free the UserCtx structure itself
3242 ierr = PetscFree(user); CHKERRQ(ierr);
3243 simCtx->usermg.mgctx[level].user = NULL;
3244 }
3245
3246 // Destroy the MGCtx-level packer DM
3247 if (simCtx->usermg.mgctx[level].packer) {
3248 ierr = DMDestroy(&simCtx->usermg.mgctx[level].packer); CHKERRQ(ierr);
3249 LOG_ALLOW(LOCAL, LOG_DEBUG, " MGCtx[%d].packer destroyed.\n", level);
3250 }
3251 }
3252
3253 // Free the MGCtx array itself
3254 ierr = PetscFree(simCtx->usermg.mgctx); CHKERRQ(ierr);
3255 simCtx->usermg.mgctx = NULL;
3256 LOG_ALLOW(GLOBAL, LOG_INFO, "All multigrid levels destroyed.\n");
3257 }
3258
3259 // ============================================================================
3260 // PHASE 2: DESTROY USERMG-LEVEL OBJECTS
3261 // ============================================================================
3262
3263 if (simCtx->usermg.packer) {
3264 ierr = DMDestroy(&simCtx->usermg.packer); CHKERRQ(ierr);
3265 LOG_ALLOW(LOCAL, LOG_DEBUG, "UserMG.packer DM destroyed.\n");
3266 }
3267
3268 if (simCtx->usermg.snespacker) {
3269 ierr = SNESDestroy(&simCtx->usermg.snespacker); CHKERRQ(ierr);
3270 LOG_ALLOW(LOCAL, LOG_DEBUG, "UserMG.snespacker SNES destroyed.\n");
3271 }
3272
3273 // ============================================================================
3274 // PHASE 3: DESTROY SIMCTX-LEVEL OBJECTS
3275 // ============================================================================
3276
3277 LOG_ALLOW(GLOBAL, LOG_INFO, "Destroying SimCtx-level objects...\n");
3278
3279 // --- PetscViewer for logging ---
3280 if (simCtx->logviewer) {
3281 ierr = PetscViewerDestroy(&simCtx->logviewer); CHKERRQ(ierr);
3282 LOG_ALLOW(LOCAL, LOG_DEBUG, " logviewer destroyed.\n");
3283 }
3284
3285 // --- Particle System DM ---
3286 if (simCtx->dm_swarm) {
3287 ierr = DMDestroy(&simCtx->dm_swarm); CHKERRQ(ierr);
3288 LOG_ALLOW(LOCAL, LOG_DEBUG, " dm_swarm destroyed.\n");
3289 }
3290
3291 // --- BoundingBox List (Array of BoundingBox structs) ---
3292 if (simCtx->bboxlist) {
3293 ierr = PetscFree(simCtx->bboxlist); CHKERRQ(ierr);
3294 simCtx->bboxlist = NULL;
3295 LOG_ALLOW(LOCAL, LOG_DEBUG, " bboxlist freed.\n");
3296 }
3297
3298 // --- Boundary Condition Files (Array of strings) ---
3299 if (simCtx->bcs_files) {
3300 for (PetscInt i = 0; i < simCtx->num_bcs_files; i++) {
3301 if (simCtx->bcs_files[i]) {
3302 ierr = PetscFree(simCtx->bcs_files[i]); CHKERRQ(ierr);
3303 }
3304 }
3305 ierr = PetscFree(simCtx->bcs_files); CHKERRQ(ierr);
3306 simCtx->bcs_files = NULL;
3307 LOG_ALLOW(LOCAL, LOG_DEBUG, " bcs_files array freed (%d files).\n", simCtx->num_bcs_files);
3308 }
3309
3310 // --- Brownian Motion RNG ---
3311 if (simCtx->BrownianMotionRNG) {
3312 ierr = PetscRandomDestroy(&simCtx->BrownianMotionRNG); CHKERRQ(ierr);
3313 LOG_ALLOW(LOCAL, LOG_DEBUG, " BrownianMotionRNG destroyed.\n");
3314 }
3315 // --- Post-Processing Parameters ---
3316 // pps is allocated with PetscNew and contains only static char arrays and basic types.
3317 // No internal dynamic allocations need to be freed.
3318 if (simCtx->pps) {
3319 ierr = PetscFree(simCtx->pps); CHKERRQ(ierr);
3320 simCtx->pps = NULL;
3321 LOG_ALLOW(LOCAL, LOG_DEBUG, " PostProcessParams freed.\n");
3322 }
3323
3324 // --- IBM/FSI Objects ---
3325 // Note: These are initialized to NULL and currently have no dedicated destroy functions.
3326 // If these modules are extended with cleanup routines, call them here.
3327 if (simCtx->ibm != NULL) {
3328 LOG_ALLOW(GLOBAL, LOG_WARNING, " WARNING: simCtx->ibm is non-NULL but no destroy function exists. Potential memory leak.\n");
3329 }
3330 if (simCtx->ibmv != NULL) {
3331 LOG_ALLOW(GLOBAL, LOG_WARNING, " WARNING: simCtx->ibmv is non-NULL but no destroy function exists. Potential memory leak.\n");
3332 }
3333 if (simCtx->fsi != NULL) {
3334 LOG_ALLOW(GLOBAL, LOG_WARNING, " WARNING: simCtx->fsi is non-NULL but no destroy function exists. Potential memory leak.\n");
3335 }
3336
3337 // --- Logging Allowed Functions (Array of strings) ---
3338 // Note: The logging system maintains its own copy via set_allowed_functions(),
3339 // so freeing simCtx->allowedFuncs will NOT affect LOG_ALLOW functionality.
3340 if (simCtx->allowedFuncs) {
3341 for (PetscInt i = 0; i < simCtx->nAllowed; i++) {
3342 if (simCtx->allowedFuncs[i]) {
3343 ierr = PetscFree(simCtx->allowedFuncs[i]); CHKERRQ(ierr);
3344 }
3345 }
3346 ierr = PetscFree(simCtx->allowedFuncs); CHKERRQ(ierr);
3347 simCtx->allowedFuncs = NULL;
3348 LOG_ALLOW(LOCAL, LOG_DEBUG, " allowedFuncs array freed (%d functions).\n", simCtx->nAllowed);
3349 }
3350
3351 // --- Profiling Critical Functions (Array of strings) ---
3352 if (simCtx->criticalFuncs) {
3353 for (PetscInt i = 0; i < simCtx->nCriticalFuncs; i++) {
3354 if (simCtx->criticalFuncs[i]) {
3355 ierr = PetscFree(simCtx->criticalFuncs[i]); CHKERRQ(ierr);
3356 }
3357 }
3358 ierr = PetscFree(simCtx->criticalFuncs); CHKERRQ(ierr);
3359 simCtx->criticalFuncs = NULL;
3360 LOG_ALLOW(LOCAL, LOG_DEBUG, " criticalFuncs array freed (%d functions).\n", simCtx->nCriticalFuncs);
3361 }
3362
3363 // ============================================================================
3364 // PHASE 4: FINAL SUMMARY
3365 // ============================================================================
3366
3367 LOG_ALLOW(GLOBAL, LOG_INFO, "========================================\n");
3368 LOG_ALLOW(GLOBAL, LOG_INFO, "Simulation cleanup completed successfully.\n");
3369 LOG_ALLOW(GLOBAL, LOG_INFO, "All PETSc objects have been destroyed.\n");
3370 LOG_ALLOW(GLOBAL, LOG_INFO, "========================================\n");
3371
3372 PetscFunctionReturn(0);
3373}
PetscErrorCode DestroyUserContext(UserCtx *user)
Destroys all resources allocated within a single UserCtx structure.
Definition setup.c:3096
DM packer
Definition variables.h:485
SNES snespacker
Definition variables.h:486
DM packer
Definition variables.h:476
Here is the call graph for this function:
Here is the caller graph for this function: