PICurv 0.1.0
A Parallel Particle-In-Cell Solver for Curvilinear LES
Loading...
Searching...
No Matches
Macros | Functions
setup.c File Reference

// Setup code for running any simulation More...

#include "setup.h"
Include dependency graph for setup.c:

Go to the source code of this file.

Macros

#define __FUNCT__   "CreateSimulationContext"
 
#define __FUNCT__   "PetscMkdirRecursive"
 
#define __FUNCT__   "SetupSimulationEnvironment"
 
#define __FUNCT__   "AllocateContextHeirarchy"
 
#define __FUNCT__   "SetupSolverParameters"
 
#define __FUNCT__   "SetupGridAndSolvers"
 
#define __FUNCT__   "CreateAndInitializeAllVectors"
 
#define __FUNCT__   "UpdateLocalGhosts"
 
#define __FUNCT__   "SetupBoundaryConditions"
 
#define __FUNCT__   "GetOwnedCellRange"
 
#define __FUNCT__   "ComputeAndStoreNeighborRanks"
 
#define __FUNCT__   "SetDMDAProcLayout"
 
#define __FUNCT__   "SetupDomainRankInfo"
 
#define __FUNCT__   "Contra2Cart"
 
#define __FUNCT__   "SetupDomainCellDecompositionMap"
 
#define __FUNCT__   "BinarySearchInt64"
 
#define __FUNCT__   "ComputeDivergence"
 
#define __FUNCT__   "InitializeRandomGenerators"
 
#define __FUNCT__   "InitializeLogicalSpaceRNGs"
 
#define __FUNCT__   "InitializeBrownianRNG"
 
#define __FUNCT__   "TransformScalarDerivativesToPhysical"
 
#define __FUNCT__   "TransformDerivativesToPhysical"
 
#define __FUNCT__   "ComputeScalarFieldDerivatives"
 
#define __FUNCT__   "ComputeVectorFieldDerivatives"
 
#define __FUNCT__   "DestroyUserVectors"
 
#define __FUNCT__   "DestroyUserContext"
 
#define __FUNCT__   "FinalizeSimulation"
 

Functions

PetscErrorCode CreateSimulationContext (int argc, char **argv, SimCtx **p_simCtx)
 Allocates and populates the master SimulationContext object.
 
static PetscErrorCode PetscMkdirRecursive (const char *path)
 Creates a directory path recursively, similar to mkdir -p.
 
PetscErrorCode SetupSimulationEnvironment (SimCtx *simCtx)
 Verifies and prepares the complete I/O environment for a simulation run.
 
static PetscErrorCode AllocateContextHierarchy (SimCtx *simCtx)
 Allocates the memory for the UserMG and UserCtx hierarchy.
 
static PetscErrorCode SetupSolverParameters (SimCtx *simCtx)
 
PetscErrorCode SetupGridAndSolvers (SimCtx *simCtx)
 The main orchestrator for setting up all grid-related components.
 
PetscErrorCode CreateAndInitializeAllVectors (SimCtx *simCtx)
 Creates and initializes all PETSc Vec objects for all fields.
 
PetscErrorCode UpdateLocalGhosts (UserCtx *user, const char *fieldName)
 Updates the local vector (including ghost points) from its corresponding global vector.
 
PetscErrorCode SetupBoundaryConditions (SimCtx *simCtx)
 (Orchestrator) Sets up all boundary conditions for the simulation.
 
PetscErrorCode Allocate3DArrayScalar (PetscReal ****array, PetscInt nz, PetscInt ny, PetscInt nx)
 Allocates a 3D array of PetscReal values using PetscCalloc.
 
PetscErrorCode Deallocate3DArrayScalar (PetscReal ***array, PetscInt nz, PetscInt ny)
 Deallocates a 3D array of PetscReal values allocated by Allocate3DArrayScalar.
 
PetscErrorCode Allocate3DArrayVector (Cmpnts ****array, PetscInt nz, PetscInt ny, PetscInt nx)
 Allocates a 3D array of Cmpnts structures using PetscCalloc.
 
PetscErrorCode Deallocate3DArrayVector (Cmpnts ***array, PetscInt nz, PetscInt ny)
 Deallocates a 3D array of Cmpnts structures allocated by Allocate3DArrayVector.
 
PetscErrorCode GetOwnedCellRange (const DMDALocalInfo *info_nodes, PetscInt dim, PetscInt *xs_cell_global_out, PetscInt *xm_cell_local_out)
 Gets the global starting index of cells owned by this rank and the number of such cells.
 
PetscErrorCode ComputeAndStoreNeighborRanks (UserCtx *user)
 Computes and stores the Cartesian neighbor ranks for the DMDA decomposition.
 
PetscErrorCode SetDMDAProcLayout (DM dm, UserCtx *user)
 Sets the processor layout for a given DMDA based on PETSc options.
 
PetscErrorCode SetupDomainRankInfo (SimCtx *simCtx)
 Sets up the full rank communication infrastructure for all blocks.
 
PetscErrorCode Contra2Cart (UserCtx *user)
 Reconstructs Cartesian velocity (Ucat) at cell centers from contravariant velocity (Ucont) defined on cell faces.
 
PetscErrorCode SetupDomainCellDecompositionMap (UserCtx *user)
 Creates and distributes a map of the domain's cell decomposition to all ranks.
 
PetscErrorCode BinarySearchInt64 (PetscInt n, const PetscInt64 arr[], PetscInt64 key, PetscBool *found)
 Performs a binary search for a key in a sorted array of PetscInt64.
 
static PetscInt Gidx (PetscInt i, PetscInt j, PetscInt k, UserCtx *user)
 
PetscErrorCode ComputeDivergence (UserCtx *user)
 
PetscErrorCode InitializeRandomGenerators (UserCtx *user, PetscRandom *randx, PetscRandom *randy, PetscRandom *randz)
 Initializes random number generators for assigning particle properties.
 
PetscErrorCode InitializeLogicalSpaceRNGs (PetscRandom *rand_logic_i, PetscRandom *rand_logic_j, PetscRandom *rand_logic_k)
 Initializes random number generators for logical space operations [0.0, 1.0).
 
PetscErrorCode InitializeBrownianRNG (SimCtx *simCtx)
 Initializes a single master RNG for time-stepping physics (Brownian motion).
 
void TransformScalarDerivativesToPhysical (PetscReal jacobian, Cmpnts csi_metrics, Cmpnts eta_metrics, Cmpnts zet_metrics, PetscReal dPhi_dcsi, PetscReal dPhi_deta, PetscReal dPhi_dzet, Cmpnts *gradPhi)
 Transforms scalar derivatives from computational space to physical space using the chain rule.
 
static void TransformDerivativesToPhysical (PetscReal jacobian, Cmpnts csi_metrics, Cmpnts eta_metrics, Cmpnts zet_metrics, Cmpnts deriv_csi, Cmpnts deriv_eta, Cmpnts deriv_zet, Cmpnts *dudx, Cmpnts *dvdx, Cmpnts *dwdx)
 Transforms derivatives from computational space to physical space using the chain rule.
 
PetscErrorCode ComputeScalarFieldDerivatives (UserCtx *user, PetscInt i, PetscInt j, PetscInt k, PetscReal ***field_data, Cmpnts *grad)
 Computes the gradient of a cell-centered SCALAR field at a specific grid point.
 
PetscErrorCode ComputeVectorFieldDerivatives (UserCtx *user, PetscInt i, PetscInt j, PetscInt k, Cmpnts ***field_data, Cmpnts *dudx, Cmpnts *dvdx, Cmpnts *dwdx)
 Computes the derivatives of a cell-centered vector field at a specific grid point.
 
PetscErrorCode DestroyUserVectors (UserCtx *user)
 Destroys all Vec objects in a UserCtx structure.
 
PetscErrorCode DestroyUserContext (UserCtx *user)
 Destroys all resources allocated within a single UserCtx structure.
 
PetscErrorCode FinalizeSimulation (SimCtx *simCtx)
 Main cleanup function for the entire simulation context.
 

Detailed Description

// Setup code for running any simulation

Test program for DMSwarm interpolation using the fdf-curvIB method. Provides the setup to start any simulation with DMSwarm and DMDAs.

Definition in file setup.c.

Macro Definition Documentation

◆ __FUNCT__ [1/27]

#define __FUNCT__   "CreateSimulationContext"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [2/27]

#define __FUNCT__   "PetscMkdirRecursive"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [3/27]

#define __FUNCT__   "SetupSimulationEnvironment"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [4/27]

#define __FUNCT__   "AllocateContextHeirarchy"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [5/27]

#define __FUNCT__   "SetupSolverParameters"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [6/27]

#define __FUNCT__   "SetupGridAndSolvers"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [7/27]

#define __FUNCT__   "CreateAndInitializeAllVectors"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [8/27]

#define __FUNCT__   "UpdateLocalGhosts"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [9/27]

#define __FUNCT__   "SetupBoundaryConditions"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [10/27]

#define __FUNCT__   "GetOwnedCellRange"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [11/27]

#define __FUNCT__   "ComputeAndStoreNeighborRanks"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [12/27]

#define __FUNCT__   "SetDMDAProcLayout"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [13/27]

#define __FUNCT__   "SetupDomainRankInfo"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [14/27]

#define __FUNCT__   "Contra2Cart"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [15/27]

#define __FUNCT__   "SetupDomainCellDecompositionMap"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [16/27]

#define __FUNCT__   "BinarySearchInt64"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [17/27]

#define __FUNCT__   "ComputeDivergence"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [18/27]

#define __FUNCT__   "InitializeRandomGenerators"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [19/27]

#define __FUNCT__   "InitializeLogicalSpaceRNGs"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [20/27]

#define __FUNCT__   "InitializeBrownianRNG"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [21/27]

#define __FUNCT__   "TransformScalarDerivativesToPhysical"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [22/27]

#define __FUNCT__   "TransformDerivativesToPhysical"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [23/27]

#define __FUNCT__   "ComputeScalarFieldDerivatives"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [24/27]

#define __FUNCT__   "ComputeVectorFieldDerivatives"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [25/27]

#define __FUNCT__   "DestroyUserVectors"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [26/27]

#define __FUNCT__   "DestroyUserContext"

Definition at line 10 of file setup.c.

◆ __FUNCT__ [27/27]

#define __FUNCT__   "FinalizeSimulation"

Definition at line 10 of file setup.c.

Function Documentation

◆ CreateSimulationContext()

PetscErrorCode CreateSimulationContext ( int  argc,
char **  argv,
SimCtx **  p_simCtx 
)

Allocates and populates the master SimulationContext object.

This function serves as the single, authoritative entry point for all simulation configuration. It merges the setup logic from both the legacy FSI/IBM solver and the modern particle solver into a unified, robust process.

The function follows a strict sequence:

  1. Allocate Context & Set Defaults: It first allocates the SimulationContext and populates every field with a sane, hardcoded default value. This ensures the simulation starts from a known, predictable state.
  2. Configure Logging System: It configures the custom logging framework. It parses the -func_config_file option to load a list of function names allowed to produce log output. This configuration (the file path and the list of function names) is stored within the SimulationContext for later reference and cleanup.
  3. Parse All Options: It performs a comprehensive pass of PetscOptionsGet... calls for every possible command-line flag, overriding the default values set in step 1.
  4. Log Summary: After all options are parsed, it uses the now-active logging system to print a summary of the key simulation parameters.
Parameters
[in]argcArgument count passed from main.
[in]argvArgument vector passed from main.
[out]p_simCtxOn success, this will point to the newly created and fully configured SimulationContext pointer. The caller is responsible for eventually destroying this object by calling FinalizeSimulation().
Returns
PetscErrorCode Returns 0 on success, or a non-zero PETSc error code on failure.

Definition at line 44 of file setup.c.

45{
46 PetscErrorCode ierr;
47 SimCtx *simCtx;
48 char control_filename[PETSC_MAX_PATH_LEN] = ""; // Temporary placeholder for control file name.
49 PetscBool control_flg; // Temporary placeholder for control file tag existence check flag.
50
51 PetscFunctionBeginUser;
52
54
55 // === 1. Allocate the Context Struct and Set ALL Defaults ==================
56 ierr = PetscNew(p_simCtx); CHKERRQ(ierr);
57 simCtx = *p_simCtx;
58
59 // --- Group 1: Parallelism & MPI ---
60 simCtx->rank = 0; simCtx->size = 1;
61
62 // --- Group 2: Simulation Control, Time, and I/O ---
63 simCtx->step = 0; simCtx->ti = 0.0; simCtx->StartStep = 0; simCtx->StepsToRun = 10;
64 simCtx->tiout = 10; simCtx->StartTime = 0.0; simCtx->dt = 0.001;
65 simCtx->OnlySetup = PETSC_FALSE;
66 simCtx->logviewer = NULL; simCtx->OutputFreq = simCtx->tiout;
67 strcpy(simCtx->eulerianSource,"solve");
68 strcpy(simCtx->restart_dir,"results");
69 strcpy(simCtx->output_dir,"results");
70 strcpy(simCtx->log_dir,"logs");
71 strcpy(simCtx->euler_subdir,"eulerian");
72 strcpy(simCtx->particle_subdir,"particles");
73 simCtx->_io_context_buffer[0] = '\0';
74 simCtx->current_io_directory = NULL;
75
76 // --- Group 3: High-Level Physics & Model Selection Flags ---
77 simCtx->immersed = 0; simCtx->movefsi = 0; simCtx->rotatefsi = 0;
78 simCtx->sediment = 0; simCtx->rheology = 0; simCtx->invicid = 0;
79 simCtx->TwoD = 0; simCtx->thin = 0; simCtx->moveframe = 0;
80 simCtx->rotateframe = 0; simCtx->blank = 0;
81 simCtx->dgf_x = 0; simCtx->dgf_y = 1; simCtx->dgf_z = 0;
82 simCtx->dgf_ax = 1; simCtx->dgf_ay = 0; simCtx->dgf_az = 0;
83 strcpy(simCtx->AnalyticalSolutionType,"TGV3D");
84
85 // --- Group 4: Specific Simulation Case Flags --- (DEPRICATED)
86 simCtx->cop=0; simCtx->fish=0; simCtx->fish_c=0; simCtx->fishcyl=0;
87 simCtx->eel=0; simCtx->pizza=0; simCtx->turbine=0; simCtx->Pipe=0;
88 simCtx->wing=0; simCtx->hydro=0; simCtx->MHV=0; simCtx->LV=0;
89 simCtx->channelz = 0;
90
91 // --- Group 5: Solver & Numerics Parameters ---
93 simCtx->mom_dt_rk4_residual_norm_noise_allowance_factor = 1.05; // New addition for divergence detection
94 simCtx->mom_atol = 1e-7; simCtx->mom_rtol = 1e-4; simCtx->imp_stol = 1.e-8;
95 simCtx->mglevels = 3; simCtx->mg_MAX_IT = 30; simCtx->mg_idx = 1;
96 simCtx->mg_preItr = 1; simCtx->mg_poItr = 1;
97 simCtx->poisson = 0; simCtx->poisson_tol = 5.e-9;
98 simCtx->STRONG_COUPLING = 0;simCtx->central=0;
99 simCtx->ren = 100.0; simCtx->pseudo_cfl = 0.1;
100 simCtx->max_pseudo_cfl = 1.0; simCtx->min_pseudo_cfl = 0.001;
101 simCtx->pseudo_cfl_reduction_factor = 1.0;
102 simCtx->pseudo_cfl_growth_factor = 1.0; //simCtx->vnn = 0.1;
103 simCtx->ps_ksp_pic_monitor_true_residual = PETSC_FALSE;
104 simCtx->cdisx = 0.0; simCtx->cdisy = 0.0; simCtx->cdisz = 0.0;
105 simCtx->FieldInitialization = 0;
106 simCtx->InitialConstantContra.x = 0.0;
107 simCtx->InitialConstantContra.y = 0.0;
108 simCtx->InitialConstantContra.z = 0.0;
109
110 // --- Group 6: Physical & Geometric Parameters ---
111 simCtx->NumberOfBodies = 1; simCtx->Flux_in = 1.0; simCtx->angle = 0.0;
112 simCtx->max_angle = -54. * 3.1415926 / 180.;
113 simCtx->CMx_c=0.0; simCtx->CMy_c=0.0; simCtx->CMz_c=0.0;
114 simCtx->wall_roughness_height = 1e-16;
115 simCtx->schmidt_number = 1.0; simCtx->Turbulent_schmidt_number = 0.7;
116
117 // --- Group 7: Grid, Domain, and Boundary Condition Settings ---
118 simCtx->block_number = 1; simCtx->inletprofile = 1;
119 simCtx->grid1d = 0; simCtx->Ogrid = 0;
120 simCtx->i_periodic = 0; simCtx->j_periodic = 0; simCtx->k_periodic = 0;
121 simCtx->blkpbc = 10; simCtx->pseudo_periodic = 0;
122 strcpy(simCtx->grid_file, "config/grid.run");
123 simCtx->generate_grid = PETSC_FALSE;
124 simCtx->da_procs_x = PETSC_DECIDE;
125 simCtx->da_procs_y = PETSC_DECIDE;
126 simCtx->da_procs_z = PETSC_DECIDE;
127 simCtx->grid_rotation_angle = 0.0;
128 simCtx->Croty = 0.0; simCtx->Crotz = 0.0;
129 simCtx->num_bcs_files = 1;
130 ierr = PetscMalloc1(1, &simCtx->bcs_files); CHKERRQ(ierr);
131 ierr = PetscStrallocpy("config/bcs.run", &simCtx->bcs_files[0]); CHKERRQ(ierr);
132 simCtx->FluxInSum = 0.0; simCtx->FluxOutSum = 0.0; simCtx->Fluxsum = 0.0;
133 simCtx->drivingForceMagnitude = 0.0, simCtx->forceScalingFactor = 1.8;
134 simCtx->targetVolumetricFlux = 0.0;
135 simCtx->AreaInSum = 0.0; simCtx->AreaOutSum = 0.0;
136 simCtx->U_bc = 0.0; simCtx->ccc = 0;
137 simCtx->ratio = 0.0;
138
139
140 // --- Group 8: Turbulence Modeling (LES/RANS) ---
141 simCtx->averaging = PETSC_FALSE; simCtx->les = NO_LES_MODEL; simCtx->rans = 0;
142 simCtx->wallfunction = 0; simCtx->mixed = 0; simCtx->clark = 0;
143 simCtx->dynamic_freq = 1; simCtx->max_cs = 0.5;
144 simCtx->Const_CS = 0.03;
145 simCtx->testfilter_ik = 0; simCtx->testfilter_1d = 0;
146 simCtx->i_homo_filter = 0; simCtx->j_homo_filter = 0; simCtx->k_homo_filter = 0;
147
148 // --- Group 9: Particle / DMSwarm Data & Settings ---
149 simCtx->np = 0; simCtx->readFields = PETSC_FALSE;
150 simCtx->dm_swarm = NULL; simCtx->bboxlist = NULL;
152 strcpy(simCtx->particleRestartMode,"load");
153 simCtx->particlesLostLastStep = 0;
154 simCtx->particlesMigratedLastStep = 0;
155 simCtx->occupiedCellCount = 0;
156 simCtx->particleLoadImbalance = 0.0;
157 simCtx->migrationPassesLastStep = 0;
158 simCtx->BrownianMotionRNG = NULL;
159 simCtx->C_IEM = 2.0;
160
161 // --- Group 10: Immersed Boundary & FSI Data Object Pointers ---
162 simCtx->ibm = NULL; simCtx->ibmv = NULL; simCtx->fsi = NULL;
163 simCtx->rstart_fsi = PETSC_FALSE; simCtx->duplicate = 0;
164
165 // --- Group 11: Logging and Custom Configuration ---
166 strcpy(simCtx->allowedFile, "config/whitelist.run");
167 simCtx->useCfg = PETSC_FALSE;
168 simCtx->allowedFuncs = NULL;
169 simCtx->nAllowed = 0;
170 simCtx->LoggingFrequency = 10;
171 simCtx->summationRHS = 0.0;
172 simCtx->MaxDiv = 0.0;
173 simCtx->MaxDivFlatArg = 0; simCtx->MaxDivx = 0; simCtx->MaxDivy = 0; simCtx->MaxDivz = 0;
174 strcpy(simCtx->criticalFuncsFile, "config/profile.run");
175 simCtx->useCriticalFuncsCfg = PETSC_FALSE;
176 simCtx->criticalFuncs = NULL;
177 simCtx->nCriticalFuncs = 0;
178 // --- Group 11: Post-Processing Information ---
179 strcpy(simCtx->PostprocessingControlFile, "config/post.run");
180 ierr = PetscNew(&simCtx->pps); CHKERRQ(ierr);
181
182 // === 2. Get MPI Info and Handle Config File =============================
183 // -- Group 1: Parallelism & MPI Information
184 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &simCtx->rank); CHKERRQ(ierr);
185 ierr = MPI_Comm_size(PETSC_COMM_WORLD, &simCtx->size); CHKERRQ(ierr);
186
187 // First, check if the -control_file argument was provided by the user/script.
188 ierr = PetscOptionsGetString(NULL, NULL, "-control_file", control_filename, sizeof(control_filename), &control_flg); CHKERRQ(ierr);
189
190 // If the flag is NOT present or the filename is empty, abort with a helpful error.
191 if (!control_flg || strlen(control_filename) == 0) {
192 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG,
193 "\n\n*** MANDATORY ARGUMENT MISSING ***\n"
194 "The -control_file argument was not provided.\n"
195 "This program must be launched with a configuration file.\n"
196 "Example: mpiexec -n 4 ./picsolver -control_file /path/to/your/config.control\n"
197 "This is typically handled automatically by the 'pic-flow' script.\n");
198 }
199
200 // At this point, we have a valid filename. Attempt to load it.
201 LOG(GLOBAL, LOG_INFO, "Loading mandatory configuration from: %s\n", control_filename);
202 ierr = PetscOptionsInsertFile(PETSC_COMM_WORLD, NULL, control_filename, PETSC_FALSE);
203 if (ierr == PETSC_ERR_FILE_OPEN) {
204 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_FILE_OPEN, "The specified control file was not found or could not be opened: %s", control_filename);
205 }
206 CHKERRQ(ierr);
207
208 // === 3. A Configure Logging System ========================================
209 // This logic determines the logging configuration and STORES it in simCtx for
210 // later reference and cleanup.
211 ierr = PetscOptionsGetString(NULL, NULL, "-whitelist_config_file", simCtx->allowedFile, PETSC_MAX_PATH_LEN, &simCtx->useCfg); CHKERRQ(ierr);
212
213 if (simCtx->useCfg) {
214 ierr = LoadAllowedFunctionsFromFile(simCtx->allowedFile, &simCtx->allowedFuncs, &simCtx->nAllowed);
215 if (ierr) {
216 // Use direct PetscPrintf as logging system isn't fully active yet.
217 PetscPrintf(PETSC_COMM_SELF, "[%s] WARNING: Failed to load allowed functions from '%s'. Falling back to default list.\n", __func__, simCtx->allowedFile);
218 simCtx->useCfg = PETSC_FALSE; // Mark as failed.
219 ierr = 0; // Clear the error to allow fallback.
220 }
221 }
222 if (!simCtx->useCfg) {
223 // Fallback to default logging functions if no file was used or if loading failed.
224 simCtx->nAllowed = 2;
225 ierr = PetscMalloc1(simCtx->nAllowed, &simCtx->allowedFuncs); CHKERRQ(ierr);
226 ierr = PetscStrallocpy("main", &simCtx->allowedFuncs[0]); CHKERRQ(ierr);
227 ierr = PetscStrallocpy("CreateSimulationContext", &simCtx->allowedFuncs[1]); CHKERRQ(ierr);
228 }
229
230 // Activate the configuration by passing it to the logging module's setup function.
231 set_allowed_functions((const char**)simCtx->allowedFuncs, (size_t)simCtx->nAllowed);
232
233 // Now that the logger is configured, we can use it.
234 LOG_ALLOW_SYNC(LOCAL, LOG_INFO, "Context created. Initializing on rank %d of %d.\n", simCtx->rank, simCtx->size);
235 print_log_level(); // This will now correctly reflect the LOG_LEVEL environment variable.
236
237 // === 3.B Configure Profiling System ========================================
238 ierr = PetscOptionsGetString(NULL, NULL, "-profile_config_file", simCtx->criticalFuncsFile, PETSC_MAX_PATH_LEN, &simCtx->useCriticalFuncsCfg); CHKERRQ(ierr);
239 if (simCtx->useCriticalFuncsCfg) {
241 if (ierr) {
242 PetscPrintf(PETSC_COMM_SELF, "[%s] WARNING: Failed to load critical profiling functions from '%s'. Falling back to default list.\n", __func__, simCtx->criticalFuncsFile);
243 simCtx->useCriticalFuncsCfg = PETSC_FALSE;
244 ierr = 0;
245 }
246 }
247 if (!simCtx->useCriticalFuncsCfg) {
248 // Fallback to a hardcoded default list if no file or loading failed
249 simCtx->nCriticalFuncs = 4;
250 ierr = PetscMalloc1(simCtx->nCriticalFuncs, &simCtx->criticalFuncs); CHKERRQ(ierr);
251 ierr = PetscStrallocpy("FlowSolver", &simCtx->criticalFuncs[0]); CHKERRQ(ierr);
252 ierr = PetscStrallocpy("AdvanceSimulation", &simCtx->criticalFuncs[1]); CHKERRQ(ierr);
253 ierr = PetscStrallocpy("LocateAllParticlesInGrid", &simCtx->criticalFuncs[2]); CHKERRQ(ierr);
254 ierr = PetscStrallocpy("InterpolateAllFieldsToSwarm", &simCtx->criticalFuncs[3]); CHKERRQ(ierr);
255 }
256
257 // Initialize the profiling system with the current updated simulation context.
258 ierr = ProfilingInitialize(simCtx); CHKERRQ(ierr);
259
260 // === 4. Parse All Command Line Options ==================================
261 LOG_ALLOW(GLOBAL, LOG_INFO, "Parsing command-line options...\n");
262
263 // --- Group 2
264 LOG_ALLOW(GLOBAL,LOG_DEBUG, "Parsing Group 2: Simulation Control,Time and I/O.\n");
265 // Read the physical time to start from.
266 // The default is already 0.0, so this will only be non-zero if the user provides it.
267 ierr = PetscOptionsGetInt(NULL, NULL, "-start_step", &simCtx->StartStep, NULL); CHKERRQ(ierr);
268 ierr = PetscOptionsGetInt(NULL,NULL, "-totalsteps", &simCtx->StepsToRun, NULL); CHKERRQ(ierr);
269 ierr = PetscOptionsGetBool(NULL, NULL, "-only_setup", &simCtx->OnlySetup, NULL); CHKERRQ(ierr);
270 ierr = PetscOptionsGetReal(NULL, NULL, "-dt", &simCtx->dt, NULL); CHKERRQ(ierr);
271 ierr = PetscOptionsGetInt(NULL, NULL, "-tio", &simCtx->tiout, NULL); CHKERRQ(ierr);
272 ierr = PetscOptionsGetString(NULL,NULL,"-euler_field_source",simCtx->eulerianSource,sizeof(simCtx->eulerianSource),NULL);CHKERRQ(ierr);
273 ierr = PetscOptionsGetString(NULL,NULL,"-output_dir",&simCtx->output_dir,sizeof(simCtx->output_dir),NULL);CHKERRQ(ierr);
274 ierr = PetscOptionsGetString(NULL,NULL,"-restart_dir",&simCtx->restart_dir,sizeof(simCtx->restart_dir),NULL);CHKERRQ(ierr);
275 ierr = PetscOptionsGetString(NULL,NULL,"-log_dir",&simCtx->log_dir,sizeof(simCtx->log_dir),NULL);CHKERRQ(ierr);
276 ierr = PetscOptionsGetString(NULL,NULL,"-euler_subdir",&simCtx->euler_subdir,sizeof(simCtx->euler_subdir),NULL);CHKERRQ(ierr);
277 ierr = PetscOptionsGetString(NULL,NULL,"-particle_subdir",&simCtx->particle_subdir,sizeof(simCtx->particle_subdir),NULL);CHKERRQ(ierr);
278
279 simCtx->OutputFreq = simCtx->tiout; // backward compatibility related redundancy.
280 if(strcmp(simCtx->eulerianSource,"solve")!= 0 && strcmp(simCtx->eulerianSource,"load") != 0 && strcmp(simCtx->eulerianSource,"analytical")!=0){
281 SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Invalid value for -euler_field_source. Must be 'load','analytical' or 'solve'. You provided '%s'.",simCtx->eulerianSource);
282 }
283
284 // --- Group 3
285 LOG_ALLOW(GLOBAL,LOG_DEBUG, "Parsing Group 3: High-Level Physics & Model Selection Flags\n");
286 ierr = PetscOptionsGetInt(NULL, NULL, "-imm", &simCtx->immersed, NULL); CHKERRQ(ierr);
287 ierr = PetscOptionsGetInt(NULL, NULL, "-fsi", &simCtx->movefsi, NULL); CHKERRQ(ierr);
288 ierr = PetscOptionsGetInt(NULL, NULL, "-rfsi", &simCtx->rotatefsi, NULL); CHKERRQ(ierr);
289 ierr = PetscOptionsGetInt(NULL, NULL, "-sediment", &simCtx->sediment, NULL); CHKERRQ(ierr);
290 ierr = PetscOptionsGetInt(NULL, NULL, "-rheology", &simCtx->rheology, NULL); CHKERRQ(ierr);
291 ierr = PetscOptionsGetInt(NULL, NULL, "-inv", &simCtx->invicid, NULL); CHKERRQ(ierr);
292 ierr = PetscOptionsGetInt(NULL, NULL, "-TwoD", &simCtx->TwoD, NULL); CHKERRQ(ierr);
293 ierr = PetscOptionsGetInt(NULL, NULL, "-thin", &simCtx->thin, NULL); CHKERRQ(ierr);
294 ierr = PetscOptionsGetInt(NULL, NULL, "-mframe", &simCtx->moveframe, NULL); CHKERRQ(ierr);
295 ierr = PetscOptionsGetInt(NULL, NULL, "-rframe", &simCtx->rotateframe, NULL); CHKERRQ(ierr);
296 ierr = PetscOptionsGetInt(NULL, NULL, "-blk", &simCtx->blank, NULL); CHKERRQ(ierr);
297 ierr = PetscOptionsGetInt(NULL, NULL, "-dgf_z", &simCtx->dgf_z, NULL); CHKERRQ(ierr);
298 ierr = PetscOptionsGetInt(NULL, NULL, "-dgf_y", &simCtx->dgf_y, NULL); CHKERRQ(ierr);
299 ierr = PetscOptionsGetInt(NULL, NULL, "-dgf_x", &simCtx->dgf_x, NULL); CHKERRQ(ierr);
300 ierr = PetscOptionsGetInt(NULL, NULL, "-dgf_az", &simCtx->dgf_az, NULL); CHKERRQ(ierr);
301 ierr = PetscOptionsGetInt(NULL, NULL, "-dgf_ay", &simCtx->dgf_ay, NULL); CHKERRQ(ierr);
302 ierr = PetscOptionsGetInt(NULL, NULL, "-dgf_ax", &simCtx->dgf_ax, NULL); CHKERRQ(ierr);
303 ierr = PetscOptionsGetString(NULL,NULL,"-analytical_type",simCtx->AnalyticalSolutionType,sizeof(simCtx->AnalyticalSolutionType),NULL);CHKERRQ(ierr);
304
305 // --- Group 4
306 LOG_ALLOW(GLOBAL,LOG_DEBUG, "Parsing Group 4: Specific Simulation Case Flags \n");
307 ierr = PetscOptionsGetInt(NULL, NULL, "-cop", &simCtx->cop, NULL); CHKERRQ(ierr);
308 ierr = PetscOptionsGetInt(NULL, NULL, "-fish", &simCtx->fish, NULL); CHKERRQ(ierr);
309 ierr = PetscOptionsGetInt(NULL, NULL, "-pizza", &simCtx->pizza, NULL); CHKERRQ(ierr);
310 ierr = PetscOptionsGetInt(NULL, NULL, "-turbine", &simCtx->turbine, NULL); CHKERRQ(ierr);
311 ierr = PetscOptionsGetInt(NULL, NULL, "-fishcyl", &simCtx->fishcyl, NULL); CHKERRQ(ierr);
312 ierr = PetscOptionsGetInt(NULL, NULL, "-eel", &simCtx->eel, NULL); CHKERRQ(ierr);
313 ierr = PetscOptionsGetInt(NULL, NULL, "-cstart", &simCtx->fish_c, NULL); CHKERRQ(ierr);
314 ierr = PetscOptionsGetInt(NULL, NULL, "-wing", &simCtx->wing, NULL); CHKERRQ(ierr);
315 ierr = PetscOptionsGetInt(NULL, NULL, "-mhv", &simCtx->MHV, NULL); CHKERRQ(ierr);
316 ierr = PetscOptionsGetInt(NULL, NULL, "-hydro", &simCtx->hydro, NULL); CHKERRQ(ierr);
317 ierr = PetscOptionsGetInt(NULL, NULL, "-lv", &simCtx->LV, NULL); CHKERRQ(ierr);
318 ierr = PetscOptionsGetInt(NULL, NULL, "-Pipe", &simCtx->Pipe, NULL); CHKERRQ(ierr);
319 ierr = PetscOptionsGetInt(NULL, NULL, "-Turbulent_Channel_z", &simCtx->channelz, NULL); CHKERRQ(ierr);
320 ierr = PetscOptionsGetReal(NULL,NULL,"-Turbulent_Channel_z_Driving_Force",&simCtx->drivingForceMagnitude,NULL);CHKERRQ(ierr);
321 ierr = PetscOptionsGetReal(NULL,NULL,"-Turbulent_Channel_z_Scaling_Factor",&simCtx->forceScalingFactor,NULL);CHKERRQ(ierr);
322 // --- Group 5
323 LOG_ALLOW(GLOBAL,LOG_DEBUG, "Parsing Group 5: Solver & Numerics Parameters \n");
324 char mom_solver_type_char[PETSC_MAX_PATH_LEN];
325 PetscBool mom_solver_type_flg = PETSC_FALSE;
326 ierr = PetscOptionsGetString(NULL, NULL, "-mom_solver_type", mom_solver_type_char, sizeof(mom_solver_type_char), &mom_solver_type_flg); CHKERRQ(ierr);
327 ierr = PetscOptionsGetInt(NULL, NULL, "-mom_max_pseudo_steps", &simCtx->mom_max_pseudo_steps, NULL); CHKERRQ(ierr);
328 ierr = PetscOptionsGetReal(NULL, NULL, "-mom_atol", &simCtx->mom_atol, NULL); CHKERRQ(ierr);
329 ierr = PetscOptionsGetReal(NULL, NULL, "-mom_rtol", &simCtx->mom_rtol, NULL); CHKERRQ(ierr);
330 ierr = PetscOptionsGetReal(NULL, NULL, "-imp_stol", &simCtx->imp_stol, NULL); CHKERRQ(ierr);
331 ierr = PetscOptionsGetInt(NULL, NULL, "-central", &simCtx->central, NULL); CHKERRQ(ierr);
332
333 // Map the string input to the enum type only when explicitly provided.
334 if (mom_solver_type_flg) {
335 if(strcmp(mom_solver_type_char, "DUALTIME_PICARD_RK4") == 0) {
337 } else if (strcmp(mom_solver_type_char, "DUALTIME_NK_ARNOLDI") == 0) {
339 } else if (strcmp(mom_solver_type_char, "DUALTIME_NK_ANALYTICAL_JACOBIAN") == 0) {
341 } else if (strcmp(mom_solver_type_char, "EXPLICIT_RK") == 0) {
343 } else {
344 LOG(GLOBAL, LOG_ERROR, "Invalid value for -mom_solver_type: '%s'. Valid options are: 'DUALTIME_PICARD_RK4', 'DUALTIME_NK_ARNOLDI', 'DUALTIME_NK_ANALYTICAL_JACOBIAN', 'EXPLICIT_RK'.\n", mom_solver_type_char);
345 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "Invalid value for -mom_solver_type: '%s'.", mom_solver_type_char);
346 }
347 }
348
349 // --- Multigrid Options ---
350 ierr = PetscOptionsGetInt(NULL, NULL, "-mg_level", &simCtx->mglevels, NULL); CHKERRQ(ierr);
351 ierr = PetscOptionsGetInt(NULL, NULL, "-mg_max_it", &simCtx->mg_MAX_IT, NULL); CHKERRQ(ierr);
352 ierr = PetscOptionsGetInt(NULL, NULL, "-mg_idx", &simCtx->mg_idx, NULL); CHKERRQ(ierr);
353 ierr = PetscOptionsGetInt(NULL, NULL, "-mg_pre_it", &simCtx->mg_preItr, NULL); CHKERRQ(ierr);
354 ierr = PetscOptionsGetInt(NULL, NULL, "-mg_post_it", &simCtx->mg_poItr, NULL); CHKERRQ(ierr);
355
356 // --- Other Solver Options ---
357 ierr = PetscOptionsGetInt(NULL, NULL, "-poisson", &simCtx->poisson, NULL); CHKERRQ(ierr);
358 ierr = PetscOptionsGetReal(NULL, NULL, "-poisson_tol", &simCtx->poisson_tol, NULL); CHKERRQ(ierr);
359 ierr = PetscOptionsGetInt(NULL, NULL, "-str", &simCtx->STRONG_COUPLING, NULL); CHKERRQ(ierr);
360 ierr = PetscOptionsGetReal(NULL, NULL, "-ren", &simCtx->ren, NULL); CHKERRQ(ierr);
361 ierr = PetscOptionsGetReal(NULL, NULL, "-pseudo_cfl", &simCtx->pseudo_cfl, NULL); CHKERRQ(ierr);
362 ierr = PetscOptionsGetReal(NULL, NULL, "-max_pseudo_cfl", &simCtx->max_pseudo_cfl, NULL); CHKERRQ(ierr);
363 ierr = PetscOptionsGetReal(NULL, NULL, "-min_pseudo_cfl", &simCtx->min_pseudo_cfl, NULL); CHKERRQ(ierr);
364 ierr = PetscOptionsGetReal(NULL, NULL, "-pseudo_cfl_reduction_factor", &simCtx->pseudo_cfl_reduction_factor, NULL); CHKERRQ(ierr);
365 ierr = PetscOptionsGetReal(NULL, NULL, "-pseudo_cfl_growth_factor", &simCtx->pseudo_cfl_growth_factor, NULL); CHKERRQ(ierr);
366 ierr = PetscOptionsGetReal(NULL,NULL, "-mom_dt_rk4_residual_norm_noise_allowance_factor",&simCtx->mom_dt_rk4_residual_norm_noise_allowance_factor,NULL);CHKERRQ(ierr);
367 ierr = PetscOptionsHasName(NULL, NULL, "-ps_ksp_pic_monitor_true_residual", &simCtx->ps_ksp_pic_monitor_true_residual); CHKERRQ(ierr);
368 ierr = PetscOptionsGetInt(NULL, NULL, "-finit", &simCtx->FieldInitialization, NULL); CHKERRQ(ierr);
369 ierr = PetscOptionsGetReal(NULL, NULL, "-ucont_x", &simCtx->InitialConstantContra.x, NULL); CHKERRQ(ierr);
370 ierr = PetscOptionsGetReal(NULL, NULL, "-ucont_y", &simCtx->InitialConstantContra.y, NULL); CHKERRQ(ierr);
371 ierr = PetscOptionsGetReal(NULL, NULL, "-ucont_z", &simCtx->InitialConstantContra.z, NULL); CHKERRQ(ierr);
372 // NOTE: cdisx,cdisy,cdisz haven't been parsed, add if necessary.
373
374 // --- Group 6
375 LOG_ALLOW(GLOBAL,LOG_DEBUG, "Parsing Group 6: Physical & Geometric Parameters \n");
376 ierr = PetscOptionsGetReal(NULL,NULL,"-schmidt_number",&simCtx->schmidt_number,NULL);CHKERRQ(ierr);
377 ierr = PetscOptionsGetReal(NULL,NULL,"-turb_schmidt_number",&simCtx->Turbulent_schmidt_number,NULL);CHKERRQ(ierr);
378 ierr = PetscOptionsGetInt(NULL, NULL, "-no_of_bodies", &simCtx->NumberOfBodies, NULL); CHKERRQ(ierr);
379 ierr = PetscOptionsGetReal(NULL,NULL,"-wall_roughness",&simCtx->wall_roughness_height,NULL);CHKERRQ(ierr);
380 // NOTE: angle is not parsed in the original code, it set programmatically. We will follow that.
381 // NOTE: max_angle is calculated based on other flags (like MHV) in the legacy code.
382 // We will defer that logic to a later setup stage and not parse them directly.
383 // The Scaling Information is calculated here
384 ierr = ParseScalingInformation(simCtx); CHKERRQ(ierr);
385
386 // --- Group 7
387 LOG_ALLOW(GLOBAL,LOG_DEBUG, "Parsing Group 7: Grid, Domain, and Boundary Condition Settings \n");
388 ierr = PetscOptionsGetInt(NULL, NULL, "-nblk", &simCtx->block_number, NULL); CHKERRQ(ierr); // This is also a modern option
389 ierr = PetscOptionsGetInt(NULL, NULL, "-inlet", &simCtx->inletprofile, NULL); CHKERRQ(ierr);
390 ierr = PetscOptionsGetInt(NULL, NULL, "-Ogrid", &simCtx->Ogrid, NULL); CHKERRQ(ierr);
391 // NOTE: channelz was not parsed, likely set programmatically. We will omit its parsing call.
392 ierr = PetscOptionsGetInt(NULL, NULL, "-grid1d", &simCtx->grid1d, NULL); CHKERRQ(ierr);
393 ierr = PetscOptionsGetBool(NULL, NULL, "-grid", &simCtx->generate_grid, NULL); CHKERRQ(ierr);
394 ierr = PetscOptionsGetString(NULL, NULL, "-grid_file", simCtx->grid_file, PETSC_MAX_PATH_LEN, NULL); CHKERRQ(ierr);
395 ierr = PetscOptionsGetInt(NULL, NULL, "-da_processors_x", &simCtx->da_procs_x, NULL); CHKERRQ(ierr);
396 ierr = PetscOptionsGetInt(NULL, NULL, "-da_processors_y", &simCtx->da_procs_y, NULL); CHKERRQ(ierr);
397 ierr = PetscOptionsGetInt(NULL, NULL, "-da_processors_z", &simCtx->da_procs_z, NULL); CHKERRQ(ierr);
398 ierr = PetscOptionsGetInt(NULL, NULL, "-i_periodic", &simCtx->i_periodic, NULL); CHKERRQ(ierr);
399 ierr = PetscOptionsGetInt(NULL, NULL, "-j_periodic", &simCtx->j_periodic, NULL); CHKERRQ(ierr);
400 ierr = PetscOptionsGetInt(NULL, NULL, "-k_periodic", &simCtx->k_periodic, NULL); CHKERRQ(ierr);
401 ierr = PetscOptionsGetInt(NULL, NULL, "-pbc_domain", &simCtx->blkpbc, NULL); CHKERRQ(ierr);
402 // NOTE: pseudo_periodic was not parsed. We will omit its parsing call.
403 ierr = PetscOptionsGetReal(NULL, NULL, "-grid_rotation_angle", &simCtx->grid_rotation_angle, NULL); CHKERRQ(ierr);
404 ierr = PetscOptionsGetReal(NULL, NULL, "-Croty", &simCtx->Croty, NULL); CHKERRQ(ierr);
405 ierr = PetscOptionsGetReal(NULL, NULL, "-Crotz", &simCtx->Crotz, NULL); CHKERRQ(ierr);
406 PetscBool bcs_flg;
407 char file_list_str[PETSC_MAX_PATH_LEN * 10]; // Buffer for comma-separated list
408
409 ierr = PetscOptionsGetString(NULL, NULL, "-bcs_files", file_list_str, sizeof(file_list_str), &bcs_flg); CHKERRQ(ierr);
410 ierr = PetscOptionsGetReal(NULL, NULL, "-U_bc", &simCtx->U_bc, NULL); CHKERRQ(ierr);
411
412 if (bcs_flg) {
413 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Found -bcs_files option, overriding default.\n");
414
415 // A. Clean up the default memory we allocated in Phase 1.
416 ierr = PetscFree(simCtx->bcs_files[0]); CHKERRQ(ierr);
417 ierr = PetscFree(simCtx->bcs_files); CHKERRQ(ierr);
418 simCtx->num_bcs_files = 0;
419 simCtx->bcs_files = NULL;
420
421 // B. Parse the user-provided comma-separated list.
422 char *token;
423 char *str_copy;
424 ierr = PetscStrallocpy(file_list_str, &str_copy); CHKERRQ(ierr);
425
426 // First pass: count the number of files.
427 token = strtok(str_copy, ",");
428 while (token) {
429 simCtx->num_bcs_files++;
430 token = strtok(NULL, ",");
431 }
432 ierr = PetscFree(str_copy); CHKERRQ(ierr);
433
434 // Second pass: allocate memory and store the filenames.
435 ierr = PetscMalloc1(simCtx->num_bcs_files, &simCtx->bcs_files); CHKERRQ(ierr);
436 ierr = PetscStrallocpy(file_list_str, &str_copy); CHKERRQ(ierr);
437 token = strtok(str_copy, ",");
438 for (PetscInt i = 0; i < simCtx->num_bcs_files; i++) {
439 ierr = PetscStrallocpy(token, &simCtx->bcs_files[i]); CHKERRQ(ierr);
440 token = strtok(NULL, ",");
441 }
442 ierr = PetscFree(str_copy); CHKERRQ(ierr);
443 }
444
445
446 // --- Group 8
447 LOG_ALLOW(GLOBAL,LOG_DEBUG, "Parsing Group 8: Turbulence Modeling (LES/RANS) \n");
448 PetscInt temp_les_model;
449 ierr = PetscOptionsGetInt(NULL, NULL, "-les", &temp_les_model, NULL); CHKERRQ(ierr);
450 simCtx->les = (LESModelType)temp_les_model;
451 ierr = PetscOptionsGetInt(NULL, NULL, "-rans", &simCtx->rans, NULL); CHKERRQ(ierr);
452 ierr = PetscOptionsGetInt(NULL, NULL, "-wallfunction", &simCtx->wallfunction, NULL); CHKERRQ(ierr);
453 ierr = PetscOptionsGetInt(NULL, NULL, "-mixed", &simCtx->mixed, NULL); CHKERRQ(ierr);
454 ierr = PetscOptionsGetInt(NULL, NULL, "-clark", &simCtx->clark, NULL); CHKERRQ(ierr);
455 ierr = PetscOptionsGetInt(NULL, NULL, "-dynamic_freq", &simCtx->dynamic_freq, NULL); CHKERRQ(ierr);
456 ierr = PetscOptionsGetReal(NULL, NULL, "-max_cs", &simCtx->max_cs, NULL); CHKERRQ(ierr);
457 ierr = PetscOptionsGetReal(NULL, NULL, "-const_cs", &simCtx->Const_CS, NULL); CHKERRQ(ierr);
458 ierr = PetscOptionsGetInt(NULL, NULL, "-testfilter_ik", &simCtx->testfilter_ik, NULL); CHKERRQ(ierr);
459 ierr = PetscOptionsGetInt(NULL, NULL, "-testfilter_1d", &simCtx->testfilter_1d, NULL); CHKERRQ(ierr);
460 ierr = PetscOptionsGetInt(NULL, NULL, "-i_homo_filter", &simCtx->i_homo_filter, NULL); CHKERRQ(ierr);
461 ierr = PetscOptionsGetInt(NULL, NULL, "-j_homo_filter", &simCtx->j_homo_filter, NULL); CHKERRQ(ierr);
462 ierr = PetscOptionsGetInt(NULL, NULL, "-k_homo_filter", &simCtx->k_homo_filter, NULL); CHKERRQ(ierr);
463 ierr = PetscOptionsGetBool(NULL, NULL, "-averaging", &simCtx->averaging, NULL); CHKERRQ(ierr);
464
465 // --- Group 9
466 LOG_ALLOW(GLOBAL,LOG_DEBUG, "Parsing Group 9: Particle / DMSwarm Data & Settings \n");
467 ierr = PetscOptionsGetInt(NULL, NULL, "-numParticles", &simCtx->np, NULL); CHKERRQ(ierr);
468 ierr = PetscOptionsGetBool(NULL, NULL, "-read_fields", &simCtx->readFields, NULL); CHKERRQ(ierr);
469 PetscInt temp_pinit = (PetscInt)PARTICLE_INIT_SURFACE_RANDOM;
470 ierr = PetscOptionsGetInt(NULL, NULL, "-pinit", &temp_pinit, NULL); CHKERRQ(ierr);
472 ierr = PetscOptionsGetReal(NULL, NULL, "-psrc_x", &simCtx->psrc_x, NULL); CHKERRQ(ierr);
473 ierr = PetscOptionsGetReal(NULL, NULL, "-psrc_y", &simCtx->psrc_y, NULL); CHKERRQ(ierr);
474 ierr = PetscOptionsGetReal(NULL, NULL, "-psrc_z", &simCtx->psrc_z, NULL); CHKERRQ(ierr);
475 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Particle initialization mode: %s. Point source: (%.6f, %.6f, %.6f)\n",
477 simCtx->psrc_x, simCtx->psrc_y, simCtx->psrc_z);
478 ierr = PetscOptionsGetString(NULL,NULL,"-particle_restart_mode",simCtx->particleRestartMode,sizeof(simCtx->particleRestartMode),NULL); CHKERRQ(ierr);
479 // Validation for Particle Restart Mode
480 if (strcmp(simCtx->particleRestartMode, "load") != 0 && strcmp(simCtx->particleRestartMode, "init") != 0) {
481 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "Invalid value for -particle_restart_mode. Must be 'load' or 'init'. You provided '%s'.", simCtx->particleRestartMode);
482 }
483 ierr = InitializeBrownianRNG(simCtx); CHKERRQ(ierr);
484 // --- Group 10
485 LOG_ALLOW(GLOBAL,LOG_DEBUG, "Parsing Group 10: Immersed Boundary & FSI Data Object Pointers \n");
486 ierr = PetscOptionsGetBool(NULL, NULL, "-rs_fsi", &simCtx->rstart_fsi, NULL); CHKERRQ(ierr);
487 ierr = PetscOptionsGetInt(NULL, NULL, "-duplicate", &simCtx->duplicate, NULL); CHKERRQ(ierr);
488
489 // --- Group 11
490 LOG_ALLOW(GLOBAL,LOG_DEBUG, "Parsing Group 11: Top-Level Managers & Custom Configuration \n");
491 ierr = PetscOptionsGetInt(NULL, NULL, "-logfreq", &simCtx->LoggingFrequency, NULL); CHKERRQ(ierr);
492
493 if (simCtx->num_bcs_files != simCtx->block_number) {
494 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_INCOMP, "Number of BC files (%d) does not match number of blocks (%d). Use -bcs_files \"file1.dat,file2.dat,...\".", simCtx->num_bcs_files, simCtx->block_number);
495 }
496
497 // --- Group 12
498 LOG_ALLOW(GLOBAL,LOG_DEBUG, "Parsing Group 12: Post-Processing Information.\n");
499 // This logic determines the Post Processing configuration and STORES it in simCtx for later reference and cleanup.
500 ierr = PetscOptionsGetString(NULL,NULL,"-postprocessing_config_file",simCtx->PostprocessingControlFile,PETSC_MAX_PATH_LEN,NULL); CHKERRQ(ierr);
501 /* Parse post settings for both solver and post-processor binaries using the single pre-allocated pps object. */
502 ierr = ParsePostProcessingSettings(simCtx);
503
504 // === 5. Dependent Parameter Calculations ================================
505 // Some parameters depend on others, so we calculate them here.
506 simCtx->StartTime = (PetscReal)simCtx->StartStep*simCtx->dt;
507 simCtx->ti = simCtx->StartTime;
508 simCtx->step = simCtx->StartStep;
509
510 // === 5. Log Summary and Finalize Setup ==================================
511 LOG_ALLOW(GLOBAL, LOG_DEBUG, "-- Console Output Functions [Total : %d] : --\n", simCtx->nAllowed);
512 for (PetscInt i = 0; i < simCtx->nAllowed; ++i) {
513 LOG_ALLOW(GLOBAL, LOG_DEBUG, " [%2d] «%s»\n", i, simCtx->allowedFuncs[i]);
514 }
515
516 LOG_ALLOW(GLOBAL, LOG_INFO, "Configuration complete. Key parameters:\n");
517 LOG_ALLOW(GLOBAL, LOG_INFO, " - Run mode: %s\n", simCtx->OnlySetup ? "SETUP ONLY" : "Full Simulation");
518 LOG_ALLOW(GLOBAL, LOG_INFO, " - Time steps: %d (from %d to %d)\n", simCtx->StepsToRun, simCtx->StartStep, simCtx->StartStep + simCtx->StepsToRun);
519 LOG_ALLOW(GLOBAL, LOG_INFO, " - Time step size (dt): %g\n", simCtx->dt);
520 LOG_ALLOW(GLOBAL, LOG_INFO, " - Immersed Boundary: %s\n", simCtx->immersed ? "ENABLED" : "DISABLED");
521 LOG_ALLOW(GLOBAL, LOG_INFO, " - Particles: %d\n", simCtx->np);
522 if (simCtx->StartStep > 0 && simCtx->np > 0) {
523 LOG_ALLOW(GLOBAL, LOG_INFO, " - Particle Restart Mode: %s\n", simCtx->particleRestartMode);
524 }
525
526 // --- Initialize PETSc's internal performance logging stage ---
527 ierr = PetscLogDefaultBegin(); CHKERRQ(ierr); // REDUNDANT but safe.
528
529 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Finished CreateSimulationContext successfully on rank %d.\n", simCtx->rank);
530
532 PetscFunctionReturn(0);
533}
PetscErrorCode ParsePostProcessingSettings(SimCtx *simCtx)
Initializes post-processing settings from a config file and command-line overrides.
Definition io.c:2523
PetscErrorCode ParseScalingInformation(SimCtx *simCtx)
Parses physical scaling parameters from command-line options.
Definition io.c:2677
void set_allowed_functions(const char **functionList, int count)
Sets the global list of function names that are allowed to log.
Definition logging.c:122
#define LOG_ALLOW_SYNC(scope, level, fmt,...)
----— DEBUG ---------------------------------------— #define LOG_ALLOW(scope, level,...
Definition logging.h:267
#define LOCAL
Logging scope definitions for controlling message output.
Definition logging.h:45
#define GLOBAL
Scope for global logging across all processes.
Definition logging.h:46
#define LOG_ALLOW(scope, level, fmt,...)
Logging macro that checks both the log level and whether the calling function is in the allowed-funct...
Definition logging.h:200
PetscErrorCode print_log_level(void)
Prints the current logging level to the console.
Definition logging.c:82
#define PROFILE_FUNCTION_END
Marks the end of a profiled code block.
Definition logging.h:746
#define LOG(scope, level, fmt,...)
Logging macro for PETSc-based applications with scope control.
Definition logging.h:84
PetscErrorCode LoadAllowedFunctionsFromFile(const char filename[], char ***funcsOut, PetscInt *nOut)
Load function names from a text file.
Definition logging.c:566
PetscErrorCode ProfilingInitialize(SimCtx *simCtx)
Initializes the custom profiling system using configuration from SimCtx.
Definition logging.c:1015
@ LOG_ERROR
Critical errors that may halt the program.
Definition logging.h:28
@ LOG_INFO
Informational messages about program execution.
Definition logging.h:31
@ LOG_DEBUG
Detailed debugging information.
Definition logging.h:32
#define PROFILE_FUNCTION_BEGIN
Marks the beginning of a profiled code block (typically a function).
Definition logging.h:737
const char * ParticleInitializationToString(ParticleInitializationType ParticleInitialization)
Helper function to convert ParticleInitialization to a string representation.
Definition logging.c:675
PetscErrorCode InitializeBrownianRNG(SimCtx *simCtx)
Initializes a single master RNG for time-stepping physics (Brownian motion).
Definition setup.c:2745
LESModelType
Identifies the six logical faces of a structured computational block.
Definition variables.h:447
@ NO_LES_MODEL
Definition variables.h:448
PetscInt MHV
Definition variables.h:624
PetscInt turbine
Definition variables.h:624
PetscInt fishcyl
Definition variables.h:624
PetscInt clark
Definition variables.h:675
PetscInt movefsi
Definition variables.h:618
PetscInt moveframe
Definition variables.h:619
PetscInt TwoD
Definition variables.h:619
PetscInt pseudo_periodic
Definition variables.h:655
PetscInt fish_c
Definition variables.h:624
PetscInt dgf_z
Definition variables.h:620
PetscReal poisson_tol
Definition variables.h:633
PetscReal schmidt_number
Definition variables.h:651
PetscMPIInt rank
Definition variables.h:592
PetscInt fish
Definition variables.h:624
PetscInt LV
Definition variables.h:624
PetscReal angle
Definition variables.h:646
PetscReal Turbulent_schmidt_number
Definition variables.h:651
PetscInt thin
Definition variables.h:619
PetscInt grid1d
Definition variables.h:654
PetscInt block_number
Definition variables.h:654
PetscReal mom_rtol
Definition variables.h:630
PetscInt da_procs_z
Definition variables.h:660
PetscInt blkpbc
Definition variables.h:655
PetscInt sediment
Definition variables.h:618
PetscReal targetVolumetricFlux
Definition variables.h:666
PetscInt channelz
Definition variables.h:625
char euler_subdir[PETSC_MAX_PATH_LEN]
Definition variables.h:611
PetscReal forceScalingFactor
Definition variables.h:665
PetscReal pseudo_cfl_reduction_factor
Definition variables.h:637
PetscInt rans
Definition variables.h:674
ParticleInitializationType
Enumerator to identify the particle initialization strategy.
Definition variables.h:465
@ PARTICLE_INIT_SURFACE_RANDOM
Random placement on the inlet face.
Definition variables.h:466
PetscReal StartTime
Definition variables.h:602
PetscInt dgf_az
Definition variables.h:620
PetscReal FluxOutSum
Definition variables.h:663
PetscReal CMy_c
Definition variables.h:647
char ** bcs_files
Definition variables.h:662
PetscReal max_angle
Definition variables.h:646
PetscReal min_pseudo_cfl
Definition variables.h:638
PetscInt particlesLostLastStep
Definition variables.h:687
PetscInt duplicate
Definition variables.h:700
PetscInt tiout
Definition variables.h:601
char allowedFile[PETSC_MAX_PATH_LEN]
Definition variables.h:704
PetscInt da_procs_y
Definition variables.h:660
PetscInt testfilter_1d
Definition variables.h:677
PetscReal psrc_x
Definition variables.h:648
PetscInt FieldInitialization
Definition variables.h:641
PetscReal ren
Definition variables.h:636
PetscReal Crotz
Definition variables.h:658
PetscInt mixed
Definition variables.h:675
IBMVNodes * ibmv
Definition variables.h:697
char criticalFuncsFile[PETSC_MAX_PATH_LEN]
Definition variables.h:713
char output_dir[PETSC_MAX_PATH_LEN]
Definition variables.h:610
PetscReal dt
Definition variables.h:603
PetscInt occupiedCellCount
Definition variables.h:690
PetscInt StepsToRun
Definition variables.h:600
PetscInt k_periodic
Definition variables.h:655
PetscInt inletprofile
Definition variables.h:654
PetscReal cdisy
Definition variables.h:636
PetscReal mom_atol
Definition variables.h:630
char ** criticalFuncs
Definition variables.h:715
PetscBool rstart_fsi
Definition variables.h:699
PetscInt np
Definition variables.h:681
PetscBool averaging
Definition variables.h:678
PetscReal C_IEM
Definition variables.h:693
PetscInt ccc
Definition variables.h:670
PetscReal ratio
Definition variables.h:671
PetscInt mg_idx
Definition variables.h:631
PetscInt StartStep
Definition variables.h:599
PetscInt mg_MAX_IT
Definition variables.h:631
PetscBool OnlySetup
Definition variables.h:604
PetscInt rotatefsi
Definition variables.h:618
@ MOMENTUM_SOLVER_DUALTIME_NK_ARNOLDI
Definition variables.h:460
@ MOMENTUM_SOLVER_EXPLICIT_RK
Definition variables.h:458
@ MOMENTUM_SOLVER_DUALTIME_PICARD_RK4
Definition variables.h:459
@ MOMENTUM_SOLVER_DUALTIME_NK_ANALYTIC_JACOBIAN
Definition variables.h:461
PetscReal cdisz
Definition variables.h:636
PetscScalar x
Definition variables.h:101
PetscInt dgf_x
Definition variables.h:620
char * current_io_directory
Definition variables.h:615
PetscInt pizza
Definition variables.h:624
PetscReal MaxDiv
Definition variables.h:710
char grid_file[PETSC_MAX_PATH_LEN]
Definition variables.h:659
PetscReal max_cs
Definition variables.h:676
PetscInt invicid
Definition variables.h:619
char ** allowedFuncs
Definition variables.h:706
PetscReal psrc_z
Point source location for PARTICLE_INIT_POINT_SOURCE.
Definition variables.h:648
PetscInt mg_poItr
Definition variables.h:631
PetscInt STRONG_COUPLING
Definition variables.h:634
PetscReal max_pseudo_cfl
Definition variables.h:638
PetscInt MaxDivx
Definition variables.h:711
PetscInt poisson
Definition variables.h:632
PetscInt k_homo_filter
Definition variables.h:677
PetscInt MaxDivy
Definition variables.h:711
PetscInt NumberOfBodies
Definition variables.h:645
char particleRestartMode[16]
Definition variables.h:686
PetscInt Ogrid
Definition variables.h:654
char particle_subdir[PETSC_MAX_PATH_LEN]
Definition variables.h:612
PetscInt MaxDivz
Definition variables.h:711
BoundingBox * bboxlist
Definition variables.h:684
PetscInt j_homo_filter
Definition variables.h:677
PetscInt eel
Definition variables.h:624
char log_dir[PETSC_MAX_PATH_LEN]
Definition variables.h:613
PetscInt MaxDivFlatArg
Definition variables.h:711
PetscReal FluxInSum
Definition variables.h:663
PetscReal CMz_c
Definition variables.h:647
PetscBool generate_grid
Definition variables.h:656
char eulerianSource[PETSC_MAX_PATH_LEN]
Definition variables.h:608
PetscReal imp_stol
Definition variables.h:630
PetscInt nAllowed
Definition variables.h:707
PetscReal wall_roughness_height
Definition variables.h:650
ParticleInitializationType ParticleInitialization
Definition variables.h:685
PetscScalar z
Definition variables.h:101
PetscInt OutputFreq
Definition variables.h:606
PetscReal Const_CS
Definition variables.h:676
PetscInt i_homo_filter
Definition variables.h:677
PetscInt wallfunction
Definition variables.h:675
PetscInt rheology
Definition variables.h:618
PetscReal Flux_in
Definition variables.h:646
PetscReal cdisx
Definition variables.h:636
PetscInt dgf_ax
Definition variables.h:620
PetscInt mglevels
Definition variables.h:631
PetscInt num_bcs_files
Definition variables.h:661
DM dm_swarm
Definition variables.h:683
PetscBool useCfg
Definition variables.h:705
PetscReal psrc_y
Definition variables.h:648
PetscBool readFields
Definition variables.h:682
PetscInt central
Definition variables.h:634
PetscBool useCriticalFuncsCfg
Definition variables.h:714
PetscReal Fluxsum
Definition variables.h:663
PetscReal pseudo_cfl_growth_factor
Definition variables.h:637
PetscReal Croty
Definition variables.h:658
PetscReal mom_dt_rk4_residual_norm_noise_allowance_factor
Definition variables.h:639
PetscInt particlesMigratedLastStep
Definition variables.h:689
PetscReal grid_rotation_angle
Definition variables.h:657
PetscInt dynamic_freq
Definition variables.h:675
char AnalyticalSolutionType[PETSC_MAX_PATH_LEN]
Definition variables.h:621
PetscInt da_procs_x
Definition variables.h:660
PetscReal U_bc
Definition variables.h:669
Cmpnts InitialConstantContra
Definition variables.h:642
PetscInt i_periodic
Definition variables.h:655
PetscInt step
Definition variables.h:597
PetscReal AreaOutSum
Definition variables.h:668
PetscInt dgf_ay
Definition variables.h:620
PetscInt mom_max_pseudo_steps
Definition variables.h:629
PetscRandom BrownianMotionRNG
Definition variables.h:692
PetscInt testfilter_ik
Definition variables.h:677
PetscInt hydro
Definition variables.h:624
PostProcessParams * pps
Definition variables.h:720
PetscInt migrationPassesLastStep
Definition variables.h:688
PetscScalar y
Definition variables.h:101
PetscMPIInt size
Definition variables.h:593
char _io_context_buffer[PETSC_MAX_PATH_LEN]
Definition variables.h:614
PetscBool ps_ksp_pic_monitor_true_residual
Definition variables.h:640
PetscInt les
Definition variables.h:674
PetscInt mg_preItr
Definition variables.h:631
PetscViewer logviewer
Definition variables.h:605
PetscInt cop
Definition variables.h:624
PetscReal ti
Definition variables.h:598
PetscInt Pipe
Definition variables.h:624
PetscInt rotateframe
Definition variables.h:619
IBMNodes * ibm
Definition variables.h:696
PetscReal AreaInSum
Definition variables.h:668
MomentumSolverType mom_solver_type
Definition variables.h:628
PetscInt nCriticalFuncs
Definition variables.h:716
PetscReal summationRHS
Definition variables.h:709
PetscInt immersed
Definition variables.h:618
char PostprocessingControlFile[PETSC_MAX_PATH_LEN]
Definition variables.h:719
char restart_dir[PETSC_MAX_PATH_LEN]
Definition variables.h:609
PetscInt blank
Definition variables.h:619
PetscInt dgf_y
Definition variables.h:620
PetscReal pseudo_cfl
Definition variables.h:636
PetscInt LoggingFrequency
Definition variables.h:708
PetscReal CMx_c
Definition variables.h:647
PetscReal drivingForceMagnitude
Definition variables.h:665
PetscReal particleLoadImbalance
Definition variables.h:691
PetscInt j_periodic
Definition variables.h:655
PetscInt wing
Definition variables.h:624
FSInfo * fsi
Definition variables.h:698
The master context for the entire simulation.
Definition variables.h:589
Here is the call graph for this function:
Here is the caller graph for this function:

◆ PetscMkdirRecursive()

static PetscErrorCode PetscMkdirRecursive ( const char *  path)
static

Creates a directory path recursively, similar to mkdir -p.

This function is designed to be called by Rank 0 only. It takes a full path, parses it, and creates each directory component in sequence.

Parameters
[in]pathThe full directory path to create.
Returns
PetscErrorCode

Definition at line 547 of file setup.c.

548{
549 PetscErrorCode ierr;
550 char tmp_path[PETSC_MAX_PATH_LEN];
551 char *p = NULL;
552 size_t len;
553 PetscBool exists;
554
555 PetscFunctionBeginUser;
556
557 // Create a mutable copy of the path
558 len = strlen(path);
559 if (len >= sizeof(tmp_path)) {
560 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Path is too long to process: %s", path);
561 }
562 strcpy(tmp_path, path);
563
564 // If the path ends with a separator, remove it
565 if (tmp_path[len - 1] == '/') {
566 tmp_path[len - 1] = 0;
567 }
568
569 // Iterate through the path, creating each directory level
570 for (p = tmp_path + 1; *p; p++) {
571 if (*p == '/') {
572 *p = 0; // Temporarily terminate the string
573
574 // Check if this directory level exists
575 ierr = PetscTestDirectory(tmp_path, 'r', &exists); CHKERRQ(ierr);
576 if (!exists) {
577 ierr = PetscMkdir(tmp_path); CHKERRQ(ierr);
578 }
579
580 *p = '/'; // Restore the separator
581 }
582 }
583
584 // Create the final, full directory path
585 ierr = PetscTestDirectory(tmp_path, 'r', &exists); CHKERRQ(ierr);
586 if (!exists) {
587 ierr = PetscMkdir(tmp_path); CHKERRQ(ierr);
588 }
589
590 PetscFunctionReturn(0);
591}
Here is the caller graph for this function:

◆ SetupSimulationEnvironment()

PetscErrorCode SetupSimulationEnvironment ( SimCtx simCtx)

Verifies and prepares the complete I/O environment for a simulation run.

This function performs a comprehensive series of checks and setup actions to ensure a valid and clean environment. It is parallel-safe; all filesystem operations and checks are performed by Rank 0, with collective error handling.

The function's responsibilities include:

  1. Checking Mandatory Inputs: Verifies existence of grid and BCs files.
  2. Checking Optional Inputs: Warns if optional config files (whitelist, profile) are missing.
  3. Validating Run Mode Paths: Ensures restart_dir or post-processing source directories exist when needed.
  4. Preparing Log Directory: Creates the log directory and cleans it of previous logs.
  5. Preparing Output Directories: Creates the main output directory and its required subdirectories.
Parameters
[in]simCtxThe fully configured SimulationContext object.
Returns
PetscErrorCode Returns 0 on success, or a non-zero error code if a mandatory file/directory is missing or a critical operation fails.

Definition at line 614 of file setup.c.

615{
616 PetscErrorCode ierr;
617 PetscMPIInt rank;
618 PetscBool exists;
619
620 PetscFunctionBeginUser;
621 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
622
623 LOG_ALLOW(GLOBAL, LOG_INFO, "--- Setting up simulation environment ---\n");
624
625 /* =====================================================================
626 * Phase 1: Check for all required and optional INPUT files.
627 * ===================================================================== */
628 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Phase 1: Verifying input files...\n");
629
630 // --- Mandatory Inputs ---
631 if (!simCtx->generate_grid) {
632 ierr = VerifyPathExistence(simCtx->grid_file, PETSC_FALSE, PETSC_FALSE, "Grid file", &exists); CHKERRQ(ierr);
633 }
634 for (PetscInt i = 0; i < simCtx->num_bcs_files; i++) {
635 char desc[128];
636 ierr = PetscSNPrintf(desc, sizeof(desc), "BCS file #%d", i + 1); CHKERRQ(ierr);
637 ierr = VerifyPathExistence(simCtx->bcs_files[i], PETSC_FALSE, PETSC_FALSE, desc, &exists); CHKERRQ(ierr);
638 }
639
640 // --- Optional Inputs (these produce warnings if missing) ---
641 if (simCtx->useCfg) {
642 ierr = VerifyPathExistence(simCtx->allowedFile, PETSC_FALSE, PETSC_TRUE, "Whitelist config file", &exists); CHKERRQ(ierr);
643 }
644 if (simCtx->useCriticalFuncsCfg) {
645 ierr = VerifyPathExistence(simCtx->criticalFuncsFile, PETSC_FALSE, PETSC_TRUE, "Profiling config file", &exists); CHKERRQ(ierr);
646 }
647 if (simCtx->exec_mode == EXEC_MODE_POSTPROCESSOR) {
648 ierr = VerifyPathExistence(simCtx->PostprocessingControlFile, PETSC_FALSE, PETSC_TRUE, "Post-processing control file", &exists); CHKERRQ(ierr);
649 }
650
651
652 /* =====================================================================
653 * Phase 2: Validate directories specific to the execution mode.
654 * ===================================================================== */
655 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Phase 2: Verifying execution mode directories...\n");
656 // The data source directory must exist if we intend to load any data from it.
657 // This is true if:
658 // 1. We are restarting from a previous time step (StartStep > 0), which implies
659 // loading Eulerian fields and/or particle fields.
660 // 2. We are starting from t=0 but are explicitly told to load the initial
661 // Eulerian fields from a file (eulerianSource == "load").
662 if (simCtx->StartStep > 0 || strcmp(simCtx->eulerianSource,"load")== 0){ // If this is a restart run
663 ierr = VerifyPathExistence(simCtx->restart_dir, PETSC_TRUE, PETSC_FALSE, "Restart source directory", &exists); CHKERRQ(ierr);
664 }
665 if (simCtx->exec_mode == EXEC_MODE_POSTPROCESSOR) {
666 ierr = VerifyPathExistence(simCtx->pps->source_dir, PETSC_TRUE, PETSC_FALSE, "Post-processing source directory", &exists); CHKERRQ(ierr);
667 }
668
669 /* =====================================================================
670 * Phase 3: Create and prepare all OUTPUT directories.
671 * ===================================================================== */
672 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Phase 3: Preparing output directories...\n");
673
674 if (rank == 0){
675 if(simCtx->exec_mode == EXEC_MODE_SOLVER){
676 // --- Prepare Log Directory ---
677 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Creating/cleaning log directory: %s\n", simCtx->log_dir);
678 ierr = PetscRMTree(simCtx->log_dir); // Wipes the directory and its contents
679 if (ierr) { /* Ignore file-not-found error, but fail on others */
680 PetscError(PETSC_COMM_SELF, __LINE__, __FUNCT__, __FILE__, ierr, PETSC_ERROR_INITIAL, "Could not remove existing log directory '%s'. Check permissions.", simCtx->log_dir);
681 }
682 ierr = PetscMkdir(simCtx->log_dir); CHKERRQ(ierr);
683
684 // --- Prepare Output Directories ---
685 char path_buffer[PETSC_MAX_PATH_LEN];
686
687 // 1. Check/Create the main output directory
688 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Verifying main output directory: %s\n", simCtx->output_dir);
689 ierr = PetscTestDirectory(simCtx->output_dir, 'r', &exists); CHKERRQ(ierr);
690 if (!exists) {
691 LOG_ALLOW(GLOBAL, LOG_INFO, "Output directory not found. Creating: %s\n", simCtx->output_dir);
692 ierr = PetscMkdir(simCtx->output_dir); CHKERRQ(ierr);
693 }
694
695 // 2. Check/Create the Eulerian subdirectory
696 ierr = PetscSNPrintf(path_buffer, sizeof(path_buffer), "%s/%s", simCtx->output_dir, simCtx->euler_subdir); CHKERRQ(ierr);
697 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Verifying Eulerian subdirectory: %s\n", path_buffer);
698 ierr = PetscTestDirectory(path_buffer, 'r', &exists); CHKERRQ(ierr);
699 if (!exists) {
700 LOG_ALLOW(GLOBAL, LOG_INFO, "Eulerian subdirectory not found. Creating: %s\n", path_buffer);
701 ierr = PetscMkdir(path_buffer); CHKERRQ(ierr);
702 }
703
704 // 3. Check/Create the Particle subdirectory if needed
705 if (simCtx->np > 0) {
706 ierr = PetscSNPrintf(path_buffer, sizeof(path_buffer), "%s/%s", simCtx->output_dir, simCtx->particle_subdir); CHKERRQ(ierr);
707 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Verifying Particle subdirectory: %s\n", path_buffer);
708 ierr = PetscTestDirectory(path_buffer, 'r', &exists); CHKERRQ(ierr);
709 if (!exists) {
710 LOG_ALLOW(GLOBAL, LOG_INFO, "Particle subdirectory not found. Creating: %s\n", path_buffer);
711 ierr = PetscMkdir(path_buffer); CHKERRQ(ierr);
712 }
713 }
714 } else if(simCtx->exec_mode == EXEC_MODE_POSTPROCESSOR){
715 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Preparing post-processing output directories ...\n");
716
717 PostProcessParams *pps = simCtx->pps;
718 char path_buffer[PETSC_MAX_PATH_LEN];
719
720 const char *last_slash_euler = strrchr(pps->output_prefix, '/');
721 if(last_slash_euler){
722 size_t dir_len = last_slash_euler - pps->output_prefix;
723 if(dir_len > 0){
724 if(dir_len >= sizeof(path_buffer)) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Post-processing output prefix path is too long.");
725 strncpy(path_buffer, pps->output_prefix, dir_len);
726 path_buffer[dir_len] = '\0';
727
728 ierr = PetscTestDirectory(path_buffer, 'r', &exists); CHKERRQ(ierr);
729 if (!exists){
730 LOG_ALLOW(GLOBAL, LOG_INFO, "Creating post-processing Eulerian output directory: %s\n", path_buffer);
731 ierr = PetscMkdirRecursive(path_buffer); CHKERRQ(ierr);
732 }
733 }
734 }
735
736 // Particle output directory
737 if(pps->outputParticles){
738 const char *last_slash_particle = strrchr(pps->particle_output_prefix, '/');
739 if(last_slash_particle){
740 size_t dir_len = last_slash_particle - pps->particle_output_prefix;
741 if(dir_len > 0){
742 if(dir_len > sizeof(path_buffer)) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Post-processing particle output prefix path is too long.");
743 strncpy(path_buffer, pps->particle_output_prefix, dir_len);
744 path_buffer[dir_len] = '\0';
745
746 ierr = PetscTestDirectory(path_buffer, 'r', &exists); CHKERRQ(ierr);
747
748 if (!exists){
749 LOG_ALLOW(GLOBAL, LOG_INFO, "Creating post-processing Particle output directory: %s\n", path_buffer);
750 ierr = PetscMkdirRecursive(path_buffer); CHKERRQ(ierr);
751 }
752 }
753 }
754 }
755 }
756 }
757
758 // Synchronize all processes before proceeding
759 ierr = MPI_Barrier(PETSC_COMM_WORLD); CHKERRMPI(ierr);
760
761 LOG_ALLOW(GLOBAL, LOG_INFO, "--- Environment setup complete ---\n");
762
763 PetscFunctionReturn(0);
764}
PetscErrorCode VerifyPathExistence(const char *path, PetscBool is_dir, PetscBool is_optional, const char *description, PetscBool *exists)
A parallel-safe helper to verify the existence of a generic file or directory path.
Definition io.c:807
static PetscErrorCode PetscMkdirRecursive(const char *path)
Creates a directory path recursively, similar to mkdir -p.
Definition setup.c:547
#define __FUNCT__
Definition setup.c:10
char particle_output_prefix[256]
Definition variables.h:516
char output_prefix[256]
Definition variables.h:513
char source_dir[PETSC_MAX_PATH_LEN]
Definition variables.h:501
PetscBool outputParticles
Definition variables.h:507
@ EXEC_MODE_SOLVER
Definition variables.h:562
@ EXEC_MODE_POSTPROCESSOR
Definition variables.h:563
ExecutionMode exec_mode
Definition variables.h:607
Holds all configuration parameters for a post-processing run.
Definition variables.h:499
Here is the call graph for this function:
Here is the caller graph for this function:

◆ AllocateContextHierarchy()

static PetscErrorCode AllocateContextHierarchy ( SimCtx simCtx)
static

Allocates the memory for the UserMG and UserCtx hierarchy.

This function performs the foundational memory allocation for the solver's data structures. It reads the number of multigrid levels and grid blocks from the master SimulationContext, then allocates the arrays for MGCtx and UserCtx.

It performs three critical tasks:

  1. Allocates the MGCtx array within the UserMG struct.
  2. For each level, allocates the UserCtx array for all blocks.
  3. Sets the simCtx back-pointer in every single UserCtx, linking it to the master configuration.
Parameters
simCtxThe master SimulationContext, which contains configuration and will store the resulting allocated hierarchy in its usermg member.
Returns
PetscErrorCode

Definition at line 786 of file setup.c.

787{
788 PetscErrorCode ierr;
789 UserMG *usermg = &simCtx->usermg;
790 MGCtx *mgctx;
791 PetscInt nblk = simCtx->block_number;
792 PetscBool found;
793 PetscFunctionBeginUser;
795
796 LOG_ALLOW(GLOBAL, LOG_INFO, "Allocating context hierarchy for %d levels and %d blocks...\n", simCtx->mglevels, nblk);
797
798 // Store the number of levels in the UserMG struct itself
799 usermg->mglevels = simCtx->mglevels;
800
801 // --- 1. Allocate the array of MGCtx structs ---
802 ierr = PetscMalloc(usermg->mglevels * sizeof(MGCtx), &usermg->mgctx); CHKERRQ(ierr);
803 // Zero-initialize to ensure all pointers (especially packer) are NULL
804 ierr = PetscMemzero(usermg->mgctx, usermg->mglevels * sizeof(MGCtx)); CHKERRQ(ierr);
805 mgctx = usermg->mgctx;
806 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Allocated MGCtx array of size %d.\n", simCtx->rank, usermg->mglevels);
807
808 // --- 2. Parse semi-coarsening options (logic from MG_Initial) ---
809 // These flags determine if a dimension is coarsened in the multigrid hierarchy.
810 PetscInt *isc, *jsc, *ksc;
811 ierr = PetscMalloc3(nblk, &isc, nblk, &jsc, nblk, &ksc); CHKERRQ(ierr);
812 // Set defaults to FALSE (full coarsening)
813 for (PetscInt i = 0; i < nblk; ++i) {
814 isc[i] = 0; jsc[i] = 0; ksc[i] = 0;
815 }
816
817// Use a temporary variable for the 'count' argument to the parsing function.
818 // This protects the original 'nblk' which is needed for the loop bounds.
819 PetscInt n_opts_found = nblk;
820 ierr = PetscOptionsGetIntArray(NULL, NULL, "-mg_i_semi", isc, &n_opts_found, &found); CHKERRQ(ierr);
821
822 n_opts_found = nblk; // Reset the temp variable before the next call
823 ierr = PetscOptionsGetIntArray(NULL, NULL, "-mg_j_semi", jsc, &n_opts_found, &found); CHKERRQ(ierr);
824
825 n_opts_found = nblk; // Reset the temp variable before the next call
826 ierr = PetscOptionsGetIntArray(NULL, NULL, "-mg_k_semi", ksc, &n_opts_found, &found); CHKERRQ(ierr);
827
828 // --- 3. Loop over levels and blocks to allocate UserCtx arrays ---
829 for (PetscInt level = 0; level < simCtx->mglevels; level++) {
830
831 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Setting up MG Level %d...\n", simCtx->rank, level);
832 // Allocate the array of UserCtx structs for this level
833 ierr = PetscMalloc(nblk * sizeof(UserCtx), &mgctx[level].user); CHKERRQ(ierr);
834 // It's good practice to zero out the memory to avoid uninitialized values
835 ierr = PetscMemzero(mgctx[level].user, nblk * sizeof(UserCtx)); CHKERRQ(ierr);
836 mgctx[level].thislevel = level;
837
838 for (PetscInt bi = 0; bi < nblk; bi++) {
839 UserCtx *currentUser = &mgctx[level].user[bi];
840 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Initializing UserCtx for Level %d, Block %d.\n", simCtx->rank, level, bi);
841
842 // --- CRITICAL STEP: Set the back-pointer to the master context ---
843 currentUser->simCtx = simCtx;
844
845 // Initialize other per-context values
846 currentUser->thislevel = level;
847 currentUser->_this = bi; //
848 currentUser->mglevels = usermg->mglevels;
849
850 // Assign semi-coarsening flags
851 currentUser->isc = isc[bi];
852 currentUser->jsc = jsc[bi];
853 currentUser->ksc = ksc[bi];
854
855 // Link to finer/coarser contexts for multigrid operations
856 if (level > 0) {
857 currentUser->user_c = &mgctx[level-1].user[bi];
858 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "Rank %d: -> Linked to coarser context (user_c).\n", simCtx->rank);
859 }
860 if (level < usermg->mglevels - 1) {
861 currentUser->user_f = &mgctx[level+1].user[bi];
862 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "Rank %d: -> Linked to finer context (user_f).\n", simCtx->rank);
863 }
864 }
865 }
866
867 // Log a summary of the parsed flags on each rank.
868 if (get_log_level() >= LOG_DEBUG && nblk > 0) {
869 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Final semi-coarsening configuration view:\n", simCtx->rank);
870 for (PetscInt bi = 0; bi < nblk; ++bi) {
871 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Block %d: i-semi=%d, j-semi=%d, k-semi=%d\n", simCtx->rank, bi, isc[bi], jsc[bi], ksc[bi]);
872 }
873 }
874
875 // Clean up temporary arrays
876 ierr = PetscFree3(isc, jsc, ksc); CHKERRQ(ierr);
877
878 LOG_ALLOW(GLOBAL, LOG_INFO, "Context hierarchy allocation complete.\n");
880 PetscFunctionReturn(0);
881}
LogLevel get_log_level()
Retrieves the current logging level from the environment variable LOG_LEVEL.
Definition logging.c:39
PetscInt isc
Definition variables.h:746
UserCtx * user
Definition variables.h:474
PetscInt mglevels
Definition variables.h:797
UserCtx * user_f
Definition variables.h:798
SimCtx * simCtx
Back-pointer to the master simulation context.
Definition variables.h:736
PetscInt ksc
Definition variables.h:746
UserMG usermg
Definition variables.h:703
PetscInt _this
Definition variables.h:746
PetscInt jsc
Definition variables.h:746
PetscInt thislevel
Definition variables.h:475
UserCtx * user_c
Definition variables.h:798
PetscInt thislevel
Definition variables.h:797
PetscInt mglevels
Definition variables.h:481
MGCtx * mgctx
Definition variables.h:484
Context for Multigrid operations.
Definition variables.h:473
User-defined context containing data specific to a single computational grid level.
Definition variables.h:733
User-level context for managing the entire multigrid hierarchy.
Definition variables.h:480
Here is the call graph for this function:
Here is the caller graph for this function:

◆ SetupSolverParameters()

static PetscErrorCode SetupSolverParameters ( SimCtx simCtx)
static

Definition at line 885 of file setup.c.

885 {
886
887 PetscFunctionBeginUser;
889
890 LOG_ALLOW(GLOBAL,LOG_INFO, " -- Setting up solver parameters -- .\n");
891
892 UserMG *usermg = &simCtx->usermg;
893 MGCtx *mgctx = usermg->mgctx;
894 PetscInt nblk = simCtx->block_number;
895
896 for (PetscInt level = usermg->mglevels-1; level >=0; level--) {
897 for (PetscInt bi = 0; bi < nblk; bi++) {
898 UserCtx *user = &mgctx[level].user[bi];
899 LOG_ALLOW_SYNC(LOCAL, LOG_DEBUG, "Rank %d: Setting up parameters for level %d, block %d\n", simCtx->rank, level, bi);
900
901 user->assignedA = PETSC_FALSE;
902 user->multinullspace = PETSC_FALSE;
903 }
904 }
906 PetscFunctionReturn(0);
907}
PetscBool assignedA
Definition variables.h:777
PetscBool multinullspace
Definition variables.h:774
Here is the caller graph for this function:

◆ SetupGridAndSolvers()

PetscErrorCode SetupGridAndSolvers ( SimCtx simCtx)

The main orchestrator for setting up all grid-related components.

(Implementation of the orchestrator function itself)

Definition at line 915 of file setup.c.

916{
917 PetscErrorCode ierr;
918 PetscFunctionBeginUser;
919
921
922 LOG_ALLOW(GLOBAL, LOG_INFO, "--- Starting Grid and Solvers Setup ---\n");
923
924 // Phase 1: Allocate the UserMG and UserCtx hierarchy
925 ierr = AllocateContextHierarchy(simCtx); CHKERRQ(ierr);
926
927 ierr = DefineAllGridDimensions(simCtx); CHKERRQ(ierr);
928 ierr = InitializeAllGridDMs(simCtx); CHKERRQ(ierr);
929 ierr = AssignAllGridCoordinates(simCtx);
930 ierr = CreateAndInitializeAllVectors(simCtx); CHKERRQ(ierr);
931 ierr = SetupSolverParameters(simCtx); CHKERRQ(ierr);
932
933 // NOTE: CalculateAllGridMetrics is now called inside SetupBoundaryConditions (not here) to ensure:
934 // 1. Boundary condition configuration data (boundary_faces) is available for periodic BC corrections
935 // 2. Computed metrics are available for inlet/outlet area calculations
936 // This resolves the circular dependency between BC setup and metric calculations.
937
938 LOG_ALLOW(GLOBAL, LOG_INFO, "--- Grid and Solvers Setup Complete ---\n");
939
941 PetscFunctionReturn(0);
942}
PetscErrorCode DefineAllGridDimensions(SimCtx *simCtx)
Orchestrates the parsing and setting of grid dimensions for all blocks.
Definition grid.c:75
PetscErrorCode InitializeAllGridDMs(SimCtx *simCtx)
Orchestrates the creation of DMDA objects for every block and multigrid level.
Definition grid.c:274
PetscErrorCode AssignAllGridCoordinates(SimCtx *simCtx)
Orchestrates the assignment of physical coordinates to all DMDA objects.
Definition grid.c:370
static PetscErrorCode SetupSolverParameters(SimCtx *simCtx)
Definition setup.c:885
PetscErrorCode CreateAndInitializeAllVectors(SimCtx *simCtx)
Creates and initializes all PETSc Vec objects for all fields.
Definition setup.c:958
static PetscErrorCode AllocateContextHierarchy(SimCtx *simCtx)
Allocates the memory for the UserMG and UserCtx hierarchy.
Definition setup.c:786
Here is the call graph for this function:
Here is the caller graph for this function:

◆ CreateAndInitializeAllVectors()

PetscErrorCode CreateAndInitializeAllVectors ( SimCtx simCtx)

Creates and initializes all PETSc Vec objects for all fields.

This function iterates through every UserCtx in the multigrid and multi-block hierarchy. For each context, it creates the comprehensive set of global and local PETSc Vecs required by the flow solver (e.g., Ucont, P, Nvert, metrics, turbulence fields, etc.). Each vector is initialized to zero.

Parameters
simCtxThe master SimCtx, containing the configured UserCtx hierarchy.
Returns
PetscErrorCode

Definition at line 958 of file setup.c.

959{
960 PetscErrorCode ierr;
961 UserMG *usermg = &simCtx->usermg;
962 MGCtx *mgctx = usermg->mgctx;
963 PetscInt nblk = simCtx->block_number;
964
965 PetscFunctionBeginUser;
966
968
969 LOG_ALLOW(GLOBAL, LOG_INFO, "Creating and initializing all simulation vectors...\n");
970
971 for (PetscInt level = usermg->mglevels-1; level >=0; level--) {
972 for (PetscInt bi = 0; bi < nblk; bi++) {
973 UserCtx *user = &mgctx[level].user[bi];
974
975 if(!user->da || !user->fda) {
976 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONGSTATE, "DMs not properly initialized in UserCtx before vector creation.");
977 }
978
979 LOG_ALLOW_SYNC(LOCAL, LOG_DEBUG, "Rank %d: Creating vectors for level %d, block %d\n", simCtx->rank, level, bi);
980
981 // --- Group A: Primary Flow Fields (Global and Local) ---
982 // These are the core solution variables.
983 ierr = DMCreateGlobalVector(user->fda, &user->Ucont); CHKERRQ(ierr); ierr = VecSet(user->Ucont, 0.0); CHKERRQ(ierr);
984 ierr = DMCreateGlobalVector(user->fda, &user->Ucat); CHKERRQ(ierr); ierr = VecSet(user->Ucat, 0.0); CHKERRQ(ierr);
985 ierr = DMCreateGlobalVector(user->da, &user->P); CHKERRQ(ierr); ierr = VecSet(user->P, 0.0); CHKERRQ(ierr);
986 ierr = DMCreateGlobalVector(user->da, &user->Nvert); CHKERRQ(ierr); ierr = VecSet(user->Nvert, 0.0); CHKERRQ(ierr);
987
988 ierr = DMCreateLocalVector(user->fda, &user->lUcont); CHKERRQ(ierr); ierr = VecSet(user->lUcont, 0.0); CHKERRQ(ierr);
989 ierr = DMCreateLocalVector(user->fda, &user->lUcat); CHKERRQ(ierr); ierr = VecSet(user->lUcat, 0.0); CHKERRQ(ierr);
990 ierr = DMCreateLocalVector(user->da, &user->lP); CHKERRQ(ierr); ierr = VecSet(user->lP, 0.0); CHKERRQ(ierr);
991 ierr = DMCreateLocalVector(user->da, &user->lNvert); CHKERRQ(ierr); ierr = VecSet(user->lNvert, 0.0); CHKERRQ(ierr);
992
993 // -- Group A2: Derived Flow Fields (Global and Local) ---
994 ierr = VecDuplicate(user->P,&user->Diffusivity); CHKERRQ(ierr); ierr = VecSet(user->Diffusivity, 0.0); CHKERRQ(ierr);
995 ierr = VecDuplicate(user->lP,&user->lDiffusivity); CHKERRQ(ierr); ierr = VecSet(user->lDiffusivity, 0.0); CHKERRQ(ierr);
996 ierr = VecDuplicate(user->Ucat,&user->DiffusivityGradient); CHKERRQ(ierr); ierr = VecSet(user->DiffusivityGradient, 0.0); CHKERRQ(ierr);
997 ierr = VecDuplicate(user->lUcat,&user->lDiffusivityGradient); CHKERRQ(ierr); ierr = VecSet(user->lDiffusivityGradient, 0.0); CHKERRQ(ierr);
998
999 // -- Group B: Solver Work Vectors (Global and Local) ---
1000 ierr = VecDuplicate(user->P, &user->Phi); CHKERRQ(ierr); ierr = VecSet(user->Phi, 0.0); CHKERRQ(ierr);
1001 ierr = VecDuplicate(user->lP, &user->lPhi); CHKERRQ(ierr); ierr = VecSet(user->lPhi, 0.0); CHKERRQ(ierr);
1002
1003 // --- Group C: Time-Stepping & Workspace Fields (Finest Level Only) ---
1004 if (level == usermg->mglevels - 1) {
1005 ierr = VecDuplicate(user->Ucont, &user->Ucont_o); CHKERRQ(ierr); ierr = VecSet(user->Ucont_o, 0.0); CHKERRQ(ierr);
1006 ierr = VecDuplicate(user->Ucont, &user->Ucont_rm1); CHKERRQ(ierr); ierr = VecSet(user->Ucont_rm1, 0.0); CHKERRQ(ierr);
1007 ierr = VecDuplicate(user->Ucat, &user->Ucat_o); CHKERRQ(ierr); ierr = VecSet(user->Ucat_o, 0.0); CHKERRQ(ierr);
1008 ierr = VecDuplicate(user->P, &user->P_o); CHKERRQ(ierr); ierr = VecSet(user->P_o, 0.0); CHKERRQ(ierr);
1009 ierr = VecDuplicate(user->lUcont, &user->lUcont_o); CHKERRQ(ierr); ierr = VecSet(user->lUcont_o, 0.0); CHKERRQ(ierr);
1010 ierr = VecDuplicate(user->lUcont, &user->lUcont_rm1); CHKERRQ(ierr); ierr = VecSet(user->lUcont_rm1, 0.0); CHKERRQ(ierr);
1011 ierr = DMCreateLocalVector(user->da, &user->lNvert_o); CHKERRQ(ierr); ierr = VecSet(user->lNvert_o, 0.0); CHKERRQ(ierr);
1012 ierr = VecDuplicate(user->Nvert, &user->Nvert_o); CHKERRQ(ierr); ierr = VecSet(user->Nvert_o, 0.0); CHKERRQ(ierr);
1013 }
1014
1015 // --- Group D: Grid Metrics (Face-Centered) ---
1016 ierr = DMCreateGlobalVector(user->fda, &user->Csi); CHKERRQ(ierr); ierr = VecSet(user->Csi, 0.0); CHKERRQ(ierr);
1017 ierr = VecDuplicate(user->Csi, &user->Eta); CHKERRQ(ierr); ierr = VecSet(user->Eta, 0.0); CHKERRQ(ierr);
1018 ierr = VecDuplicate(user->Csi, &user->Zet); CHKERRQ(ierr); ierr = VecSet(user->Zet, 0.0); CHKERRQ(ierr);
1019 ierr = DMCreateGlobalVector(user->da, &user->Aj); CHKERRQ(ierr); ierr = VecSet(user->Aj, 0.0); CHKERRQ(ierr);
1020
1021 ierr = DMCreateLocalVector(user->fda, &user->lCsi); CHKERRQ(ierr); ierr = VecSet(user->lCsi, 0.0); CHKERRQ(ierr);
1022 ierr = VecDuplicate(user->lCsi, &user->lEta); CHKERRQ(ierr); ierr = VecSet(user->lEta, 0.0); CHKERRQ(ierr);
1023 ierr = VecDuplicate(user->lCsi, &user->lZet); CHKERRQ(ierr); ierr = VecSet(user->lZet, 0.0); CHKERRQ(ierr);
1024 ierr = DMCreateLocalVector(user->da, &user->lAj); CHKERRQ(ierr); ierr = VecSet(user->lAj, 0.0); CHKERRQ(ierr);
1025
1026
1027 // --- Group E: Grid Metrics (Face-Centered) ---
1028 // Vector metrics are duplicated from Csi (DOF=3, fda-based)
1029 ierr = VecDuplicate(user->Csi, &user->ICsi); CHKERRQ(ierr); ierr = VecSet(user->ICsi, 0.0); CHKERRQ(ierr);
1030 ierr = VecDuplicate(user->Csi, &user->IEta); CHKERRQ(ierr); ierr = VecSet(user->IEta, 0.0); CHKERRQ(ierr);
1031 ierr = VecDuplicate(user->Csi, &user->IZet); CHKERRQ(ierr); ierr = VecSet(user->IZet, 0.0); CHKERRQ(ierr);
1032 ierr = VecDuplicate(user->Csi, &user->JCsi); CHKERRQ(ierr); ierr = VecSet(user->JCsi, 0.0); CHKERRQ(ierr);
1033 ierr = VecDuplicate(user->Csi, &user->JEta); CHKERRQ(ierr); ierr = VecSet(user->JEta, 0.0); CHKERRQ(ierr);
1034 ierr = VecDuplicate(user->Csi, &user->JZet); CHKERRQ(ierr); ierr = VecSet(user->JZet, 0.0); CHKERRQ(ierr);
1035 ierr = VecDuplicate(user->Csi, &user->KCsi); CHKERRQ(ierr); ierr = VecSet(user->KCsi, 0.0); CHKERRQ(ierr);
1036 ierr = VecDuplicate(user->Csi, &user->KEta); CHKERRQ(ierr); ierr = VecSet(user->KEta, 0.0); CHKERRQ(ierr);
1037 ierr = VecDuplicate(user->Csi, &user->KZet); CHKERRQ(ierr); ierr = VecSet(user->KZet, 0.0); CHKERRQ(ierr);
1038 // Scalar metrics are duplicated from Aj (DOF=1, da-based)
1039 ierr = VecDuplicate(user->Aj, &user->IAj); CHKERRQ(ierr); ierr = VecSet(user->IAj, 0.0); CHKERRQ(ierr);
1040 ierr = VecDuplicate(user->Aj, &user->JAj); CHKERRQ(ierr); ierr = VecSet(user->JAj, 0.0); CHKERRQ(ierr);
1041 ierr = VecDuplicate(user->Aj, &user->KAj); CHKERRQ(ierr); ierr = VecSet(user->KAj, 0.0); CHKERRQ(ierr);
1042
1043 ierr = VecDuplicate(user->lCsi, &user->lICsi); CHKERRQ(ierr); ierr = VecSet(user->lICsi, 0.0); CHKERRQ(ierr);
1044 ierr = VecDuplicate(user->lCsi, &user->lIEta); CHKERRQ(ierr); ierr = VecSet(user->lIEta, 0.0); CHKERRQ(ierr);
1045 ierr = VecDuplicate(user->lCsi, &user->lIZet); CHKERRQ(ierr); ierr = VecSet(user->lIZet, 0.0); CHKERRQ(ierr);
1046 ierr = VecDuplicate(user->lCsi, &user->lJCsi); CHKERRQ(ierr); ierr = VecSet(user->lJCsi, 0.0); CHKERRQ(ierr);
1047 ierr = VecDuplicate(user->lCsi, &user->lJEta); CHKERRQ(ierr); ierr = VecSet(user->lJEta, 0.0); CHKERRQ(ierr);
1048 ierr = VecDuplicate(user->lCsi, &user->lJZet); CHKERRQ(ierr); ierr = VecSet(user->lJZet, 0.0); CHKERRQ(ierr);
1049 ierr = VecDuplicate(user->lCsi, &user->lKCsi); CHKERRQ(ierr); ierr = VecSet(user->lKCsi, 0.0); CHKERRQ(ierr);
1050 ierr = VecDuplicate(user->lCsi, &user->lKEta); CHKERRQ(ierr); ierr = VecSet(user->lKEta, 0.0); CHKERRQ(ierr);
1051 ierr = VecDuplicate(user->lCsi, &user->lKZet); CHKERRQ(ierr); ierr = VecSet(user->lKZet, 0.0); CHKERRQ(ierr);
1052
1053 ierr = VecDuplicate(user->lAj, &user->lIAj); CHKERRQ(ierr); ierr = VecSet(user->lIAj, 0.0); CHKERRQ(ierr);
1054 ierr = VecDuplicate(user->lAj, &user->lJAj); CHKERRQ(ierr); ierr = VecSet(user->lJAj, 0.0); CHKERRQ(ierr);
1055 ierr = VecDuplicate(user->lAj, &user->lKAj); CHKERRQ(ierr); ierr = VecSet(user->lKAj, 0.0); CHKERRQ(ierr);
1056
1057 // --- Group F: Cell/Face Center Coordinates and Grid Spacing ---
1058 ierr = DMCreateGlobalVector(user->fda, &user->Cent); CHKERRQ(ierr); ierr = VecSet(user->Cent, 0.0); CHKERRQ(ierr);
1059 ierr = DMCreateLocalVector(user->fda, &user->lCent); CHKERRQ(ierr); ierr = VecSet(user->lCent, 0.0); CHKERRQ(ierr);
1060
1061 ierr = VecDuplicate(user->Cent, &user->GridSpace); CHKERRQ(ierr); ierr = VecSet(user->GridSpace, 0.0); CHKERRQ(ierr);
1062 ierr = VecDuplicate(user->lCent, &user->lGridSpace); CHKERRQ(ierr); ierr = VecSet(user->lGridSpace, 0.0); CHKERRQ(ierr);
1063
1064 // Face-center coordinate vectors are LOCAL to hold calculated values before scattering
1065 ierr = VecDuplicate(user->lCent, &user->Centx); CHKERRQ(ierr); ierr = VecSet(user->Centx, 0.0); CHKERRQ(ierr);
1066 ierr = VecDuplicate(user->lCent, &user->Centy); CHKERRQ(ierr); ierr = VecSet(user->Centy, 0.0); CHKERRQ(ierr);
1067 ierr = VecDuplicate(user->lCent, &user->Centz); CHKERRQ(ierr); ierr = VecSet(user->Centz, 0.0); CHKERRQ(ierr);
1068
1069 if(level == usermg->mglevels -1){
1070 // --- Group G: Turbulence Models (Finest Level Only) ---
1071 if (simCtx->les || simCtx->rans) {
1072 ierr = DMCreateGlobalVector(user->da, &user->Nu_t); CHKERRQ(ierr); ierr = VecSet(user->Nu_t, 0.0); CHKERRQ(ierr);
1073 ierr = DMCreateLocalVector(user->da, &user->lNu_t); CHKERRQ(ierr); ierr = VecSet(user->lNu_t, 0.0); CHKERRQ(ierr);
1074 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Turbulence viscosity (Nu_t) vectors created for LES/RANS model.\n");
1075 if(simCtx->les){
1076 ierr = DMCreateGlobalVector(user->da,&user->CS); CHKERRQ(ierr); ierr = VecSet(user->CS,0.0); CHKERRQ(ierr);
1077 ierr = DMCreateLocalVector(user->da,&user->lCs); CHKERRQ(ierr); ierr = VecSet(user->lCs,0.0); CHKERRQ(ierr);
1078 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Smagorinsky constant (CS) vectors created for LES model.\n");
1079 }
1080
1081 if(simCtx->wallfunction){
1082 ierr = DMCreateLocalVector(user->fda,&user->lFriction_Velocity); CHKERRQ(ierr); ierr = VecSet(user->lFriction_Velocity,0.0);
1083 }
1084 // Add K_Omega etc. here as needed
1085
1086 // Note: Add any other vectors from the legacy MG_Initial here as needed.
1087 // For example: Rhs, Forcing, turbulence Vecs (K_Omega, Nu_t)...
1088
1089 }
1090 // --- Group H: Particle Methods
1091 if(simCtx->np>0){
1092 ierr = DMCreateGlobalVector(user->da,&user->ParticleCount); CHKERRQ(ierr); ierr = VecSet(user->ParticleCount,0.0); CHKERRQ(ierr);
1093 ierr = DMCreateLocalVector(user->da,&user->lParticleCount); CHKERRQ(ierr); ierr = VecSet(user->lParticleCount,0.0); CHKERRQ(ierr);
1094 // Scalar field to hold particle scalar property (e.g., temperature, concentration)
1095 ierr = DMCreateGlobalVector(user->da,&user->Psi); CHKERRQ(ierr); ierr = VecSet(user->Psi,0.0); CHKERRQ(ierr);
1096 ierr = DMCreateLocalVector(user->da,&user->lPsi); CHKERRQ(ierr); ierr = VecSet(user->lPsi,0.0); CHKERRQ(ierr);
1097 LOG_ALLOW(GLOBAL,LOG_DEBUG,"ParticleCount & Scalar(Psi) created for %d particles.\n",simCtx->np);
1098 }
1099 }
1100 // --- Group I: Boundary Condition vectors ---
1101 ierr = DMCreateGlobalVector(user->fda, &user->Bcs.Ubcs); CHKERRQ(ierr);
1102 ierr = VecSet(user->Bcs.Ubcs, 0.0); CHKERRQ(ierr);
1103 ierr = DMCreateGlobalVector(user->fda, &user->Bcs.Uch); CHKERRQ(ierr);
1104 ierr = VecSet(user->Bcs.Uch, 0.0); CHKERRQ(ierr);
1105
1106 if(level == usermg->mglevels - 1){
1107 if(simCtx->exec_mode == EXEC_MODE_POSTPROCESSOR){
1108 LOG_ALLOW(LOCAL, LOG_DEBUG, "Post-processor mode detected. Allocating derived field vectors.\n");
1109
1110 ierr = VecDuplicate(user->P, &user->P_nodal); CHKERRQ(ierr);
1111 ierr = VecSet(user->P_nodal, 0.0); CHKERRQ(ierr);
1112
1113 ierr = VecDuplicate(user->Ucat, &user->Ucat_nodal); CHKERRQ(ierr);
1114 ierr = VecSet(user->Ucat_nodal, 0.0); CHKERRQ(ierr);
1115
1116 ierr = VecDuplicate(user->P, &user->Qcrit); CHKERRQ(ierr);
1117 ierr = VecSet(user->Qcrit, 0.0); CHKERRQ(ierr);
1118
1119 LOG_ALLOW(LOCAL, LOG_DEBUG, "Derived field vectors P_nodal, Ucat_nodal, and Qcrit created.\n");
1120
1121 if(simCtx->np>0){
1122 ierr = VecDuplicate(user->Psi, &user->Psi_nodal); CHKERRQ(ierr);
1123 ierr = VecSet(user->Psi_nodal, 0.0); CHKERRQ(ierr);
1124
1125 LOG_ALLOW(LOCAL, LOG_DEBUG, "Derived field vector Psi_nodal created for particle scalar property.\n");
1126
1127 }
1128 }else{
1129 user->P_nodal = NULL;
1130 user->Ucat_nodal = NULL;
1131 user->Qcrit = NULL;
1132 user->Psi_nodal = NULL;
1133 }
1134 }
1135
1136 }
1137}
1138
1139 LOG_ALLOW(GLOBAL, LOG_INFO, "All simulation vectors created and initialized.\n");
1140
1142 PetscFunctionReturn(0);
1143}
Vec lFriction_Velocity
Definition variables.h:755
Vec lDiffusivityGradient
Definition variables.h:764
Vec lCent
Definition variables.h:781
Vec GridSpace
Definition variables.h:781
Vec P_nodal
Definition variables.h:810
Vec JCsi
Definition variables.h:784
Vec KAj
Definition variables.h:785
Vec JEta
Definition variables.h:784
Vec Zet
Definition variables.h:781
Vec lIEta
Definition variables.h:783
Vec lIZet
Definition variables.h:783
Vec lNvert
Definition variables.h:760
Vec Phi
Definition variables.h:760
Vec IZet
Definition variables.h:783
Vec Centz
Definition variables.h:782
Vec IEta
Definition variables.h:783
Vec lZet
Definition variables.h:781
Vec Csi
Definition variables.h:781
Vec lUcont_rm1
Definition variables.h:768
Vec lIAj
Definition variables.h:783
Vec lKEta
Definition variables.h:785
Vec Ucat_nodal
Definition variables.h:811
Vec lPsi
Definition variables.h:806
Vec DiffusivityGradient
Definition variables.h:764
Vec lJCsi
Definition variables.h:784
Vec lCs
Definition variables.h:788
Vec Ucont
Definition variables.h:760
Vec Ubcs
Boundary condition velocity values. (Comment: "An ugly hack, waste of memory")
Definition variables.h:121
Vec Qcrit
Definition variables.h:812
Vec JZet
Definition variables.h:784
Vec Centx
Definition variables.h:782
BCS Bcs
Definition variables.h:754
Vec lPhi
Definition variables.h:760
Vec lParticleCount
Definition variables.h:805
Vec lUcont_o
Definition variables.h:767
Vec Ucat_o
Definition variables.h:767
Vec lKZet
Definition variables.h:785
Vec Eta
Definition variables.h:781
Vec lNu_t
Definition variables.h:788
Vec Nu_t
Definition variables.h:788
Vec lJEta
Definition variables.h:784
Vec lCsi
Definition variables.h:781
Vec lGridSpace
Definition variables.h:781
Vec ICsi
Definition variables.h:783
Vec lKCsi
Definition variables.h:785
Vec Ucat
Definition variables.h:760
Vec ParticleCount
Definition variables.h:805
Vec Ucont_o
Definition variables.h:767
Vec lJZet
Definition variables.h:784
Vec Nvert_o
Definition variables.h:767
Vec IAj
Definition variables.h:783
Vec Psi_nodal
Definition variables.h:813
Vec JAj
Definition variables.h:784
Vec KEta
Definition variables.h:785
Vec Ucont_rm1
Definition variables.h:768
Vec lUcont
Definition variables.h:760
Vec Diffusivity
Definition variables.h:763
Vec lAj
Definition variables.h:781
Vec lICsi
Definition variables.h:783
Vec lUcat
Definition variables.h:760
Vec lEta
Definition variables.h:781
Vec KZet
Definition variables.h:785
Vec Cent
Definition variables.h:781
Vec Nvert
Definition variables.h:760
Vec KCsi
Definition variables.h:785
Vec lDiffusivity
Definition variables.h:763
Vec lNvert_o
Definition variables.h:767
Vec Centy
Definition variables.h:782
Vec lJAj
Definition variables.h:784
Vec lKAj
Definition variables.h:785
Vec Psi
Definition variables.h:806
Vec P_o
Definition variables.h:767
Vec Uch
Characteristic velocity for boundary conditions.
Definition variables.h:122
Here is the caller graph for this function:

◆ UpdateLocalGhosts()

PetscErrorCode UpdateLocalGhosts ( UserCtx user,
const char *  fieldName 
)

Updates the local vector (including ghost points) from its corresponding global vector.

This function identifies the correct global vector, local vector, and DM based on the provided fieldName and performs the standard PETSc DMGlobalToLocalBegin/End sequence. Includes optional debugging output (max norms before/after).

Parameters
userThe UserCtx structure containing the vectors and DMs.
fieldNameThe name of the field to update ("Ucat", "Ucont", "P", "Nvert", etc.).
Returns
PetscErrorCode 0 on success, non-zero on failure.
Note
This function assumes the global vector associated with fieldName has already been populated with the desired data (including any boundary conditions).

Definition at line 1162 of file setup.c.

1163{
1164 PetscErrorCode ierr;
1165 PetscMPIInt rank;
1166 Vec globalVec = NULL;
1167 Vec localVec = NULL;
1168 DM dm = NULL; // The DM associated with this field pair
1169
1170 PetscFunctionBeginUser; // Use User version for application code
1172 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
1173 LOG_ALLOW(GLOBAL, LOG_INFO, "Rank %d: Starting ghost update for field '%s'.\n", rank, fieldName);
1174
1175 // --- 1. Identify the correct Vectors and DM ---
1176 if (strcmp(fieldName, "Ucat") == 0) {
1177 globalVec = user->Ucat;
1178 localVec = user->lUcat;
1179 dm = user->fda;
1180 } else if (strcmp(fieldName, "Ucont") == 0) {
1181 globalVec = user->Ucont;
1182 localVec = user->lUcont;
1183 dm = user->fda;
1184 } else if (strcmp(fieldName, "P") == 0) {
1185 globalVec = user->P;
1186 localVec = user->lP;
1187 dm = user->da;
1188 } else if (strcmp(fieldName, "Diffusivity") == 0) {
1189 globalVec = user->Diffusivity;
1190 localVec = user->lDiffusivity;
1191 dm = user->da;
1192 } else if (strcmp(fieldName, "DiffusivityGradient") == 0) {
1193 globalVec = user->DiffusivityGradient;
1194 localVec = user->lDiffusivityGradient;
1195 dm = user->fda;
1196 } else if (strcmp(fieldName, "Csi") == 0) {
1197 globalVec = user->Csi;
1198 localVec = user->lCsi;
1199 dm = user->fda;
1200 } else if (strcmp(fieldName, "Eta") == 0) {
1201 globalVec = user->Eta;
1202 localVec = user->lEta;
1203 dm = user->fda;
1204 } else if (strcmp(fieldName, "Zet") == 0) {
1205 globalVec = user->Zet;
1206 localVec = user->lZet;
1207 dm = user->fda;
1208 }else if (strcmp(fieldName, "Nvert") == 0) {
1209 globalVec = user->Nvert;
1210 localVec = user->lNvert;
1211 dm = user->da;
1212 // Add other fields as needed
1213 } else if (strcmp(fieldName, "Aj") == 0) {
1214 globalVec = user->Aj;
1215 localVec = user->lAj;
1216 dm = user->da;
1217 } else if (strcmp(fieldName, "Cent") == 0) {
1218 globalVec = user->Cent;
1219 localVec = user->lCent;
1220 dm = user->fda;
1221 }else if (strcmp(fieldName, "GridSpace") == 0) {
1222 globalVec = user->GridSpace;
1223 localVec = user->lGridSpace;
1224 dm = user->fda;
1225 }else if (strcmp(fieldName,"ICsi") == 0){
1226 globalVec = user->ICsi;
1227 localVec = user->lICsi;
1228 dm = user->fda;
1229 }else if (strcmp(fieldName,"IEta") == 0){
1230 globalVec = user->IEta;
1231 localVec = user->lIEta;
1232 dm = user->fda;
1233 }else if (strcmp(fieldName,"IZet") == 0){
1234 globalVec = user->IZet;
1235 localVec = user->lIZet;
1236 dm = user->fda;
1237 }else if (strcmp(fieldName,"JCsi") == 0){
1238 globalVec = user->JCsi;
1239 localVec = user->lJCsi;
1240 dm = user->fda;
1241 }else if (strcmp(fieldName,"JEta") == 0){
1242 globalVec = user->JEta;
1243 localVec = user->lJEta;
1244 dm = user->fda;
1245 }else if (strcmp(fieldName,"JZet") == 0){
1246 globalVec = user->JZet;
1247 localVec = user->lJZet;
1248 dm = user->fda;
1249 }else if (strcmp(fieldName,"KCsi") == 0){
1250 globalVec = user->KCsi;
1251 localVec = user->lKCsi;
1252 dm = user->fda;
1253 }else if (strcmp(fieldName,"KEta") == 0){
1254 globalVec = user->KEta;
1255 localVec = user->lKEta;
1256 dm = user->fda;
1257 }else if (strcmp(fieldName,"KZet") == 0){
1258 globalVec = user->KZet;
1259 localVec = user->lKZet;
1260 dm = user->fda;
1261 }else if (strcmp(fieldName,"IAj") == 0){
1262 globalVec = user->IAj;
1263 localVec = user->lIAj;
1264 dm = user->da;
1265 }else if (strcmp(fieldName,"JAj") == 0){
1266 globalVec = user->JAj;
1267 localVec = user->lJAj;
1268 dm = user->da;
1269 }else if (strcmp(fieldName,"KAj") == 0){
1270 globalVec = user->KAj;
1271 localVec = user->lKAj;
1272 dm = user->da;
1273 }else if (strcmp(fieldName,"Phi") == 0){ // Pressure correction term.
1274 globalVec = user->Phi;
1275 localVec = user->lPhi;
1276 dm = user->da;
1277 }else if (strcmp(fieldName,"Psi") == 0){ // Particle scalar property.
1278 globalVec = user->Psi;
1279 localVec = user->lPsi;
1280 dm = user->da;
1281 }else {
1282 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_UNKNOWN_TYPE, "Field '%s' not recognized for ghost update.", fieldName);
1283 }
1284
1285 // --- 2. Check if components were found ---
1286 if (!globalVec) {
1287 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Global vector for field '%s' is NULL.", fieldName);
1288 }
1289 if (!localVec) {
1290 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Local vector for field '%s' is NULL.", fieldName);
1291 }
1292 if (!dm) {
1293 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "DM for field '%s' is NULL.", fieldName);
1294 }
1295
1296 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Identified components for '%s': DM=%p, GlobalVec=%p, LocalVec=%p.\n",
1297 rank, fieldName, (void*)dm, (void*)globalVec, (void*)localVec);
1298
1299 // --- 3. Optional Debugging: Norm Before Update ---
1300 // Use your logging convention check
1301 // if (get_log_level() >= LOG_LEVEL_DEBUG && is_function_allowed("UpdateLocalGhosts")) { // Example check
1302 if(get_log_level() == LOG_DEBUG && is_function_allowed(__func__)){
1303 PetscReal norm_global_before;
1304 ierr = VecNorm(globalVec, NORM_INFINITY, &norm_global_before); CHKERRQ(ierr);
1305 LOG_ALLOW(GLOBAL, LOG_INFO,"Max norm '%s' (Global) BEFORE Ghost Update: %g\n", fieldName, norm_global_before);
1306 // Optional: Norm of local vector before update (might contain old ghost values)
1307 // PetscReal norm_local_before;
1308 // ierr = VecNorm(localVec, NORM_INFINITY, &norm_local_before); CHKERRQ(ierr);
1309 // LOG_ALLOW(GLOBAL, LOG_DEBUG,"Max norm '%s' (Local) BEFORE Ghost Update: %g\n", fieldName, norm_local_before);
1310 }
1311
1312 // --- 4. Perform the Global-to-Local Transfer (Ghost Update) ---
1313 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Calling DMGlobalToLocalBegin/End for '%s'.\n", rank, fieldName);
1314 ierr = DMGlobalToLocalBegin(dm, globalVec, INSERT_VALUES, localVec); CHKERRQ(ierr);
1315 ierr = DMGlobalToLocalEnd(dm, globalVec, INSERT_VALUES, localVec); CHKERRQ(ierr);
1316 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Completed DMGlobalToLocalBegin/End for '%s'.\n", rank, fieldName);
1317
1318 // --- 5. Optional Debugging: Norm After Update ---
1319 // Use your logging convention check
1320 // if (get_log_level() >= LOG_LEVEL_DEBUG && is_function_allowed("UpdateLocalGhosts")) { // Example check
1321 if(get_log_level() == LOG_DEBUG && is_function_allowed(__func__)){ // Using your specific check
1322 PetscReal norm_local_after;
1323 ierr = VecNorm(localVec, NORM_INFINITY, &norm_local_after); CHKERRQ(ierr);
1324 LOG_ALLOW(GLOBAL, LOG_INFO,"Max norm '%s' (Local) AFTER Ghost Update: %g\n", fieldName, norm_local_after);
1325
1326 // --- 6. Optional Debugging: Specific Point Checks (Example for Ucat on Rank 0/1) ---
1327 // (Keep this conditional if it's only for specific debug scenarios)
1328 if (strcmp(fieldName, "Ucat") == 0) { // Only do detailed checks for Ucat for now
1329 PetscMPIInt rank_test;
1330 MPI_Comm_rank(PETSC_COMM_WORLD, &rank_test);
1331
1332 // Get Local Info needed for indexing checks
1333 DMDALocalInfo info_check;
1334 ierr = DMDAGetLocalInfo(dm, &info_check); CHKERRQ(ierr); // Use the correct dm
1335
1336 // Buffer for array pointer
1337 Cmpnts ***lUcat_arr_test = NULL;
1338 PetscErrorCode ierr_test = 0;
1339
1340 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Testing '%s' access immediately after ghost update...\n", rank_test, fieldName);
1341 ierr_test = DMDAVecGetArrayDOFRead(dm, localVec, &lUcat_arr_test); // Use correct dm and localVec
1342
1343 if (ierr_test) {
1344 LOG_ALLOW(LOCAL, LOG_ERROR, "Rank %d: ERROR %d getting '%s' array after ghost update!\n", rank_test, ierr_test, fieldName);
1345 } else if (!lUcat_arr_test) {
1346 LOG_ALLOW(LOCAL, LOG_ERROR, "Rank %d: ERROR NULL pointer getting '%s' array after ghost update!\n", rank_test, fieldName);
1347 }
1348 else {
1349 // Check owned interior point (e.g., first interior point)
1350 PetscInt k_int = info_check.zs + (info_check.zm > 1 ? 1 : 0); // Global k index (at least zs+1 if possible)
1351 PetscInt j_int = info_check.ys + (info_check.ym > 1 ? 1 : 0); // Global j index
1352 PetscInt i_int = info_check.xs + (info_check.xm > 1 ? 1 : 0); // Global i index
1353 // Ensure indices are within global bounds if domain is very small
1354 //if (k_int >= info_check.mz-1) k_int = info_check.mz-2; if (k_int < 1) k_int = 1;
1355 //if (j_int >= info_check.my-1) j_int = info_check.my-2; if (j_int < 1) j_int = 1;
1356 // if (i_int >= info_check.mx-1) i_int = info_check.mx-2; if (i_int < 1) i_int = 1;
1357 // clamp k_int to [1 .. mz-2]
1358 if (k_int >= info_check.mz - 1) {
1359 k_int = info_check.mz - 2;
1360 }
1361 if (k_int < 1) {
1362 k_int = 1;
1363 }
1364
1365 // clamp j_int to [1 .. my-2]
1366 if (j_int >= info_check.my - 1) {
1367 j_int = info_check.my - 2;
1368 }
1369 if (j_int < 1) {
1370 j_int = 1;
1371 }
1372
1373 // clamp i_int to [1 .. mx-2]
1374 if (i_int >= info_check.mx - 1) {
1375 i_int = info_check.mx - 2;
1376 }
1377 if (i_int < 1) {
1378 i_int = 1;
1379 }
1380
1381 // Only attempt read if indices are actually owned (relevant for multi-rank)
1382 if (k_int >= info_check.zs && k_int < info_check.zs + info_check.zm &&
1383 j_int >= info_check.ys && j_int < info_check.ys + info_check.ym &&
1384 i_int >= info_check.xs && i_int < info_check.xs + info_check.xm)
1385 {
1386 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Attempting test read OWNED INTERIOR [%d][%d][%d] (Global)\n", rank_test, k_int, j_int, i_int);
1387 Cmpnts test_val_owned_interior = lUcat_arr_test[k_int][j_int][i_int];
1388 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: SUCCESS reading owned interior: x=%g\n", rank_test, test_val_owned_interior.x);
1389 } else {
1390 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Skipping interior test read for non-owned index [%d][%d][%d].\n", rank_test, k_int, j_int, i_int);
1391 }
1392
1393
1394 // Check owned boundary point (e.g., first owned point)
1395 PetscInt k_bnd = info_check.zs; // Global k index
1396 PetscInt j_bnd = info_check.ys; // Global j index
1397 PetscInt i_bnd = info_check.xs; // Global i index
1398 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Attempting test read OWNED BOUNDARY [%d][%d][%d] (Global)\n", rank_test, k_bnd, j_bnd, i_bnd);
1399 Cmpnts test_val_owned_boundary = lUcat_arr_test[k_bnd][j_bnd][i_bnd];
1400 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: SUCCESS reading owned boundary: x=%g\n", rank_test, test_val_owned_boundary.x);
1401
1402
1403 // Check ghost point (e.g., one layer below in k, if applicable)
1404 if (info_check.zs > 0) { // Only if there's a rank below
1405 PetscInt k_ghost = info_check.zs - 1;
1406 PetscInt j_ghost = info_check.ys; // Use start of owned y, simple example
1407 PetscInt i_ghost = info_check.xs; // Use start of owned x, simple example
1408 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Attempting test read GHOST [%d][%d][%d] (Global)\n", rank_test, k_ghost, j_ghost, i_ghost);
1409 Cmpnts test_val_ghost = lUcat_arr_test[k_ghost][j_ghost][i_ghost];
1410 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: SUCCESS reading ghost: x=%g\n", rank_test, test_val_ghost.x);
1411 } else {
1412 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Skipping ghost test read (zs=0).\n", rank_test);
1413 }
1414
1415 // Restore the array
1416 ierr_test = DMDAVecRestoreArrayDOFRead(dm, localVec, &lUcat_arr_test);
1417 if(ierr_test){ LOG_ALLOW(LOCAL, LOG_ERROR, "Rank %d: ERROR %d restoring '%s' array after test read!\n", rank_test, ierr_test, fieldName); }
1418 LOG_ALLOW(LOCAL, LOG_DEBUG, "Rank %d: Finished testing '%s' access.\n", rank_test, fieldName);
1419 }
1420 } // end if Ucat
1421 } // end debug logging check
1422
1423 LOG_ALLOW(GLOBAL, LOG_INFO, "Rank %d: Completed ghost update for field '%s'.\n", rank, fieldName);
1425 PetscFunctionReturn(0);
1426}
PetscBool is_function_allowed(const char *functionName)
Checks if a given function is in the allow-list.
Definition logging.c:157
A 3D point or vector with PetscScalar components.
Definition variables.h:100
Here is the call graph for this function:
Here is the caller graph for this function:

◆ SetupBoundaryConditions()

PetscErrorCode SetupBoundaryConditions ( SimCtx simCtx)

(Orchestrator) Sets up all boundary conditions for the simulation.

Definition at line 1433 of file setup.c.

1434{
1435 PetscErrorCode ierr;
1436 PetscFunctionBeginUser;
1437
1439
1440 LOG_ALLOW(GLOBAL,LOG_INFO, "--- Setting up Boundary Conditions ---\n");
1441 // --- Phase 1: Parse and initialize BC configuration for all blocks ---
1442 LOG_ALLOW(GLOBAL,LOG_INFO,"Parsing BC configuration files and initializing boundary condition data structures.\n");
1443 UserCtx *user_finest = simCtx->usermg.mgctx[simCtx->usermg.mglevels-1].user;
1444 for (PetscInt bi = 0; bi < simCtx->block_number; bi++) {
1445 LOG_ALLOW(GLOBAL,LOG_DEBUG, " -> Processing Block %d:\n", bi);
1446
1447 // --- Generate the filename for the current block ---
1448 const char *current_bc_filename = simCtx->bcs_files[bi];
1449 LOG_ALLOW(GLOBAL,LOG_DEBUG," -> Processing Block %d using config file '%s'\n", bi, current_bc_filename);
1450 // This will populate user_finest[bi].boundary_faces
1451
1452 //ierr = ParseAllBoundaryConditions(&user_finest[bi],current_bc_filename); CHKERRQ(ierr);
1453
1454 ierr = BoundarySystem_Initialize(&user_finest[bi], current_bc_filename); CHKERRQ(ierr);
1455 // Call the adapter to translate into the legacy format
1456 ierr = TranslateModernBCsToLegacy(&user_finest[bi]); CHKERRQ(ierr);
1457 }
1458
1459 // Propogate BC Configuration to coarser levels.
1460 ierr = PropagateBoundaryConfigToCoarserLevels(simCtx); CHKERRQ(ierr);
1461
1462 // --- Calculate Grid Metrics (requires BC configuration) ---
1463 // NOTE: This MUST be called here (after BC initialization but before inlet/outlet calculations) because:
1464 // 1. Periodic BC corrections in metric calculations need boundary_faces data to be populated
1465 // 2. Inlet/Outlet area calculations (below) require computed metrics (Csi, Eta, Zet) to be available
1466 // Previously this was in SetupGridAndSolvers, but that caused metrics to be computed without BC info.
1467 LOG_ALLOW(GLOBAL,LOG_INFO,"Computing grid metrics with boundary condition information.\n");
1468 ierr = CalculateAllGridMetrics(simCtx); CHKERRQ(ierr);
1469
1470 // --- Phase 2: Calculate inlet/outlet properties (requires computed metrics) ---
1471 LOG_ALLOW(GLOBAL,LOG_INFO,"Calculating inlet and outlet face properties.\n");
1472 for (PetscInt bi = 0; bi < simCtx->block_number; bi++) {
1473 // Call the function to calculate the center of the inlet face & the inlet area, which may be used to calculate Boundary values.
1474 ierr = CalculateInletProperties(&user_finest[bi]); CHKERRQ(ierr);
1475
1476 // Call the function to calculate the center of the outlet face & the outlet area, which may be used to calculate Boundary values.
1477 ierr = CalculateOutletProperties(&user_finest[bi]); CHKERRQ(ierr);
1478 }
1479
1480 LOG_ALLOW(GLOBAL,LOG_INFO, "--- Boundary Conditions setup complete ---\n");
1481
1482
1484 PetscFunctionReturn(0);
1485}
PetscErrorCode BoundarySystem_Initialize(UserCtx *user, const char *bcs_filename)
Initializes the entire boundary system.
Definition Boundaries.c:987
PetscErrorCode PropagateBoundaryConfigToCoarserLevels(SimCtx *simCtx)
Propagates boundary condition configuration from finest to all coarser multigrid levels.
PetscErrorCode TranslateModernBCsToLegacy(UserCtx *user)
Definition Boundaries.c:656
PetscErrorCode CalculateAllGridMetrics(SimCtx *simCtx)
Orchestrates the calculation of all grid metrics.
Definition Metric.c:2388
PetscErrorCode CalculateOutletProperties(UserCtx *user)
Calculates the center and area of the primary OUTLET face.
Definition grid.c:1139
PetscErrorCode CalculateInletProperties(UserCtx *user)
Calculates the center and area of the primary INLET face.
Definition grid.c:1083
Here is the call graph for this function:
Here is the caller graph for this function:

◆ Allocate3DArrayScalar()

PetscErrorCode Allocate3DArrayScalar ( PetscReal ****  array,
PetscInt  nz,
PetscInt  ny,
PetscInt  nx 
)

Allocates a 3D array of PetscReal values using PetscCalloc.

This function dynamically allocates memory for a 3D array of PetscReal values with dimensions nz (layers) x ny (rows) x nx (columns). It uses PetscCalloc1 to ensure the memory is zero-initialized.

The allocation is done in three steps:

  1. Allocate an array of nz pointers (one for each layer).
  2. Allocate a contiguous block for nz*ny row pointers and assign each layer’s row pointers.
  3. Allocate a contiguous block for all nz*ny*nx PetscReal values.

This setup allows the array to be accessed as array[k][j][i], and the memory for the data is contiguous, which improves cache efficiency.

Parameters
[out]arrayPointer to the 3D array to be allocated.
[in]nzNumber of layers (z-direction).
[in]nyNumber of rows (y-direction).
[in]nxNumber of columns (x-direction).
Returns
PetscErrorCode 0 on success, nonzero on failure.

Definition at line 1509 of file setup.c.

1510{
1511 PetscErrorCode ierr;
1512 PetscReal ***data;
1513 PetscReal *dataContiguous;
1514 PetscInt k, j;
1515
1516 PetscFunctionBegin;
1517 /* Step 1: Allocate memory for an array of nz layer pointers (zero-initialized) */
1518 ierr = PetscCalloc1(nz, &data); CHKERRQ(ierr);
1519
1520 /* Step 2: Allocate memory for all row pointers (nz * ny pointers) */
1521 ierr = PetscCalloc1(nz * ny, &data[0]); CHKERRQ(ierr);
1522 for (k = 1; k < nz; k++) {
1523 data[k] = data[0] + k * ny;
1524 }
1525
1526 /* Step 3: Allocate one contiguous block for all data elements (nz*ny*nx) */
1527 ierr = PetscCalloc1(nz * ny * nx, &dataContiguous); CHKERRQ(ierr);
1528
1529 /* Build the 3D pointer structure: each row pointer gets the correct segment of data */
1530 for (k = 0; k < nz; k++) {
1531 for (j = 0; j < ny; j++) {
1532 data[k][j] = dataContiguous + (k * ny + j) * nx;
1533 /* Memory is already zeroed by PetscCalloc1, so no manual initialization is needed */
1534 }
1535 }
1536 *array = data;
1537 PetscFunctionReturn(0);
1538}

◆ Deallocate3DArrayScalar()

PetscErrorCode Deallocate3DArrayScalar ( PetscReal ***  array,
PetscInt  nz,
PetscInt  ny 
)

Deallocates a 3D array of PetscReal values allocated by Allocate3DArrayScalar.

This function frees the memory allocated for a 3D array of PetscReal values. It assumes the memory was allocated using Allocate3DArrayScalar, which allocated three separate memory blocks: one for the contiguous data, one for the row pointers, and one for the layer pointers.

Parameters
[in]arrayPointer to the 3D array to be deallocated.
[in]nzNumber of layers (z-direction).
[in]nyNumber of rows (y-direction).
Returns
PetscErrorCode 0 on success, nonzero on failure.

Definition at line 1554 of file setup.c.

1555{
1556 PetscErrorCode ierr;
1557
1558 PetscFunctionBegin;
1559 if (!array || !array[0] || !array[0][0] ) { // Added more robust check
1560 LOG_ALLOW(GLOBAL, LOG_WARNING, "Deallocate3DArrayScalar called with potentially unallocated or NULL array.\n");
1561 if (array) {
1562 if (array[0]) { // Check if row pointers might exist
1563 // Cannot safely access array[0][0] if array[0] might be invalid/freed
1564 // Standard deallocation below assumes valid pointers.
1565 ierr = PetscFree(array[0]); CHKERRQ(ierr); // Free row pointers if they exist
1566 }
1567 ierr = PetscFree(array); CHKERRQ(ierr); // Free layer pointers if they exist
1568 }
1569 PetscFunctionReturn(0);
1570 }
1571
1572 // --- Standard Deallocation (assuming valid allocation) ---
1573
1574 /* 1. Free the contiguous block of PetscReal values.
1575 The starting address was stored in array[0][0]. */
1576 ierr = PetscFree(array[0][0]); CHKERRQ(ierr); // Free the ACTUAL DATA
1577
1578 /* 2. Free the contiguous block of row pointers.
1579 The starting address was stored in array[0]. */
1580 ierr = PetscFree(array[0]); CHKERRQ(ierr); // Free the ROW POINTERS
1581
1582 /* 3. Free the layer pointer array.
1583 The starting address is 'array' itself. */
1584 ierr = PetscFree(array); CHKERRQ(ierr); // Free the LAYER POINTERS
1585
1586 PetscFunctionReturn(0);
1587}
@ LOG_WARNING
Non-critical issues that warrant attention.
Definition logging.h:29

◆ Allocate3DArrayVector()

PetscErrorCode Allocate3DArrayVector ( Cmpnts ****  array,
PetscInt  nz,
PetscInt  ny,
PetscInt  nx 
)

Allocates a 3D array of Cmpnts structures using PetscCalloc.

Deallocates a 3D array of Cmpnts structures allocated by Allocate3DArrayVector.

This function dynamically allocates memory for a 3D array of Cmpnts (vector) structures with dimensions nz (layers) x ny (rows) x nx (columns). It uses PetscCalloc1 to ensure that all allocated memory is zero-initialized.

The allocation procedure is similar to Allocate3DArrayScalar:

  1. Allocate an array of nz pointers (one for each layer).
  2. Allocate a contiguous block for nz*ny row pointers.
  3. Allocate one contiguous block for nz*ny*nx Cmpnts structures.

After allocation, the array can be accessed as array[k][j][i] and each element (a Cmpnts structure) will have its x, y, and z fields initialized to 0.0.

Parameters
[out]arrayPointer to the 3D array to be allocated.
[in]nzNumber of layers in the z-direction.
[in]nyNumber of rows in the y-direction.
[in]nxNumber of columns in the x-direction.
Returns
PetscErrorCode 0 on success, nonzero on failure.

Definition at line 1611 of file setup.c.

1612{
1613 PetscErrorCode ierr;
1614 Cmpnts ***data;
1615 Cmpnts *dataContiguous;
1616 PetscInt k, j;
1617 PetscMPIInt rank;
1618
1619 PetscFunctionBegin;
1620
1621 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
1622
1623 /* Step 1: Allocate memory for nz layer pointers (zeroed) */
1624 ierr = PetscCalloc1(nz, &data); CHKERRQ(ierr);
1625
1626 LOG_ALLOW(LOCAL,LOG_DEBUG," [Rank %d] memory allocated for outermost layer (%d k-layer pointers).\n",rank,nz);
1627
1628 /* Step 2: Allocate memory for all row pointers (nz * ny pointers) */
1629 ierr = PetscCalloc1(nz * ny, &data[0]); CHKERRQ(ierr);
1630 for (k = 1; k < nz; k++) {
1631 data[k] = data[0] + k * ny;
1632 }
1633
1634 LOG_ALLOW(LOCAL,LOG_DEBUG,"[Rank %d] memory allocated for %dx%d row pointers.\n",rank,nz,ny);
1635
1636 /* Step 3: Allocate one contiguous block for nz*ny*nx Cmpnts structures (zeroed) */
1637 ierr = PetscCalloc1(nz * ny * nx, &dataContiguous); CHKERRQ(ierr);
1638
1639 LOG_ALLOW(GLOBAL,LOG_DEBUG,"[Rank %d] memory allocated for contigous block of %dx%dx%d Cmpnts structures).\n",rank,nz,ny,nx);
1640
1641 /* Build the 3D pointer structure for vector data */
1642 for (k = 0; k < nz; k++) {
1643 for (j = 0; j < ny; j++) {
1644 data[k][j] = dataContiguous + (k * ny + j) * nx;
1645 /* The PetscCalloc1 call has already initialized each Cmpnts to zero. */
1646 }
1647 }
1648
1649 LOG_ALLOW(GLOBAL,LOG_DEBUG,"[Rank %d] 3D pointer structure for vector data created. \n",rank);
1650
1651 *array = data;
1652 PetscFunctionReturn(0);
1653}

◆ Deallocate3DArrayVector()

PetscErrorCode Deallocate3DArrayVector ( Cmpnts ***  array,
PetscInt  nz,
PetscInt  ny 
)

Deallocates a 3D array of Cmpnts structures allocated by Allocate3DArrayVector.

This function frees the memory allocated for a 3D array of Cmpnts structures. It assumes the memory was allocated using Allocate3DArrayVector, which created three separate memory blocks: one for the contiguous vector data, one for the row pointers, and one for the layer pointers.

Parameters
[in]arrayPointer to the 3D array to be deallocated.
[in]nzNumber of layers in the z-direction.
[in]nyNumber of rows in the y-direction.
Returns
PetscErrorCode 0 on success, nonzero on failure.

Definition at line 1669 of file setup.c.

1670{
1671 PetscErrorCode ierr;
1672
1673 PetscFunctionBegin;
1674 // If array is NULL or hasn't been allocated properly, just return.
1675 if (!array || !array[0] || !array[0][0] ) {
1676 LOG_ALLOW(GLOBAL, LOG_WARNING, "Deallocate3DArrayVector called with potentially unallocated or NULL array.\n");
1677 // Attempt to free what might exist, but be cautious
1678 if (array) {
1679 if (array[0]) { // Check if row pointers were allocated
1680 // We don't have a direct pointer to the contiguous data block
1681 // saved separately in this allocation scheme. The allocation relies
1682 // on array[0][0] pointing to it. If array[0] was freed first,
1683 // accessing array[0][0] is unsafe.
1684 // The allocation scheme where the contiguous data block is not
1685 // stored separately makes safe deallocation tricky if freeing
1686 // happens out of order or if parts are NULL.
1687
1688 // A SAFER ALLOCATION/DEALLOCATION would store the data pointer separately.
1689 // Given the current allocation scheme, the order MUST be:
1690 // 1. Free the data block (pointed to by array[0][0])
1691 // 2. Free the row pointer block (pointed to by array[0])
1692 // 3. Free the layer pointer block (pointed to by array)
1693
1694 // Let's assume the allocation was successful and pointers are valid.
1695 // Get pointer to the contiguous data block *before* freeing row pointers
1696 Cmpnts *dataContiguous = array[0][0];
1697 ierr = PetscFree(dataContiguous); CHKERRQ(ierr); // Free data block
1698
1699 // Now free the row pointers block
1700 ierr = PetscFree(array[0]); CHKERRQ(ierr); // Free row pointers
1701
1702 }
1703 // Finally, free the array of layer pointers
1704 ierr = PetscFree(array); CHKERRQ(ierr);
1705 }
1706 PetscFunctionReturn(0); // Return gracefully if input was NULL initially
1707 }
1708
1709
1710 // --- Standard Deallocation (assuming valid allocation) ---
1711
1712 /* 1. Free the contiguous block of Cmpnts structures.
1713 The starting address was stored in array[0][0] by Allocate3DArrayVector. */
1714 ierr = PetscFree(array[0][0]); CHKERRQ(ierr); // Free the ACTUAL DATA
1715
1716 /* 2. Free the contiguous block of row pointers.
1717 The starting address was stored in array[0]. */
1718 ierr = PetscFree(array[0]); CHKERRQ(ierr); // Free the ROW POINTERS
1719
1720 /* 3. Free the layer pointer array.
1721 The starting address is 'array' itself. */
1722 ierr = PetscFree(array); CHKERRQ(ierr); // Free the LAYER POINTERS
1723
1724 PetscFunctionReturn(0);
1725}

◆ GetOwnedCellRange()

PetscErrorCode GetOwnedCellRange ( const DMDALocalInfo *  info_nodes,
PetscInt  dim,
PetscInt *  xs_cell_global_out,
PetscInt *  xm_cell_local_out 
)

Gets the global starting index of cells owned by this rank and the number of such cells.

Determines the global starting index and number of CELLS owned by the current processor in a specified dimension.

A cell's global index is considered the same as its origin node's global index. This function assumes a node-centered DMDA where info_nodes provides all necessary information:

  • info_nodes->xs, ys, zs: Global starting index of the first node owned by this rank (excluding ghosts).
  • info_nodes->xm, ym, zm: Number of nodes owned by this rank in each dimension (excluding ghosts).
  • info_nodes->mx, my, mz: Total number of global nodes in each dimension for the entire domain.

A cell C_k (0-indexed) is defined by its origin node N_k and extends to node N_{k+1}. Thus, the last node in the global domain cannot be an origin for a cell. The last possible cell origin node index is PhysicalGlobalNodesInDim - 2.

Warning
THIS FUNCTION MAKES A CRITICAL ASSUMPTION: It assumes the DMDA it receives information from was created with dimensions of (physical_nodes_x + 1), (physical_nodes_y + 1), etc., where the +1 node is a "user-defined ghost" not meant for physical calculations. It will internally subtract 1 from the global dimensions (mx, my, mz) before calculating the number of cells.
Parameters
[in]info_nodesPointer to the DMDALocalInfo struct for the current rank. This struct contains local ownership information (xs, xm, etc.) and global domain dimensions (mx, my, mz for nodes).
[in]dimThe dimension for which to get the cell range (0 for i/x, 1 for j/y, 2 for k/z).
[out]xs_cell_global_outPointer to store the global index of the first cell whose origin node is owned by this rank.
[out]xm_cell_local_outPointer to store the number of cells for which this rank owns the origin node.
Returns
PetscErrorCode 0 on success, or an error code on failure.

Definition at line 1761 of file setup.c.

1765{
1766 PetscErrorCode ierr = 0; // Standard PETSc error code, not explicitly set here but good practice.
1767 PetscInt xs_node_global_rank; // Global index of the first node owned by this rank in the specified dimension.
1768 PetscInt num_nodes_owned_rank; // Number of nodes owned by this rank in this dimension (local count, excluding ghosts).
1769 PetscInt GlobalNodesInDim_from_info; // Total number of DA points in this dimension, from DMDALocalInfo.
1770
1771 PetscFunctionBeginUser;
1772
1773 // --- 1. Input Validation ---
1774 if (!info_nodes || !xs_cell_global_out || !xm_cell_local_out) {
1775 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Null pointer passed to GetOwnedCellRange.");
1776 }
1777
1778 // --- 2. Extract Node Ownership and Global Dimension Information from DMDALocalInfo ---
1779 if (dim == 0) { // I-direction
1780 xs_node_global_rank = info_nodes->xs;
1781 num_nodes_owned_rank = info_nodes->xm;
1782 GlobalNodesInDim_from_info = info_nodes->mx;
1783 } else if (dim == 1) { // J-direction
1784 xs_node_global_rank = info_nodes->ys;
1785 num_nodes_owned_rank = info_nodes->ym;
1786 GlobalNodesInDim_from_info = info_nodes->my;
1787 } else if (dim == 2) { // K-direction
1788 xs_node_global_rank = info_nodes->zs;
1789 num_nodes_owned_rank = info_nodes->zm;
1790 GlobalNodesInDim_from_info = info_nodes->mz;
1791 } else {
1792 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid dimension %d in GetOwnedCellRange. Must be 0, 1, or 2.", dim);
1793 }
1794
1795 // --- 3. Correct for User-Defined Ghost Node ---
1796 // Per the function's contract (@warning), the DA size includes an extra, non-physical
1797 // node. We subtract 1 to get the true number of physical nodes for cell calculations.
1798 const PetscInt physical_nodes_in_dim = GlobalNodesInDim_from_info - 1;
1799
1800 // --- 4. Handle Edge Cases for Physical Domain Size ---
1801 // If the physical domain has 0 or 1 node, no cells can be formed.
1802 if (physical_nodes_in_dim <= 1) {
1803 *xs_cell_global_out = xs_node_global_rank; // Still report the rank's starting node
1804 *xm_cell_local_out = 0; // But 0 cells
1805 PetscFunctionReturn(0);
1806 }
1807
1808 // --- 5. Determine Cell Ownership Based on Corrected Node Ownership ---
1809 // The first cell this rank *could* define has its origin at the first node this rank owns.
1810 *xs_cell_global_out = xs_node_global_rank;
1811
1812 // If the rank owns no nodes in this dimension, it can't form any cell origins.
1813 if (num_nodes_owned_rank == 0) {
1814 *xm_cell_local_out = 0;
1815 } else {
1816 // --- BUG FIX APPLIED HERE ---
1817 // The previous logic incorrectly assumed a cell's end node (N_{k+1}) must be on the
1818 // same rank as its origin node (N_k). The correct logic is to find the intersection
1819 // between the nodes this rank owns and the nodes that are valid origins globally.
1820
1821 // The first node owned by the rank is its first potential origin.
1822 PetscInt first_owned_origin = xs_node_global_rank;
1823
1824 // The absolute last node owned by this rank. Any node up to and including this one
1825 // is a potential cell origin from this rank's perspective.
1826 PetscInt last_node_owned_by_rank = xs_node_global_rank + num_nodes_owned_rank - 1;
1827
1828 // The absolute last node in the entire PHYSICAL domain that can serve as a cell origin.
1829 // If there are `N` physical nodes (0 to N-1), this index is `N-2`.
1830 PetscInt last_possible_origin_global_idx = physical_nodes_in_dim - 2;
1831
1832 // The actual last origin this rank can provide is the *minimum* of what it owns
1833 // and what is globally possible. This correctly handles both ranks in the middle of
1834 // the domain and the very last rank.
1835 PetscInt actual_last_origin_this_rank_can_form = PetscMin(last_node_owned_by_rank, last_possible_origin_global_idx);
1836
1837 // If the first potential origin this rank owns is already beyond the actual last
1838 // origin it can form, then this rank forms no valid cell origins. This happens if
1839 // the rank only owns the very last physical node.
1840 if (first_owned_origin > actual_last_origin_this_rank_can_form) {
1841 *xm_cell_local_out = 0;
1842 } else {
1843 // The number of cells is the count of valid origins this rank owns.
1844 // (Count = Last Index - First Index + 1)
1845 *xm_cell_local_out = actual_last_origin_this_rank_can_form - first_owned_origin + 1;
1846 }
1847 }
1848
1849 PetscFunctionReturn(ierr);
1850}
Here is the caller graph for this function:

◆ ComputeAndStoreNeighborRanks()

PetscErrorCode ComputeAndStoreNeighborRanks ( UserCtx user)

Computes and stores the Cartesian neighbor ranks for the DMDA decomposition.

This function retrieves the neighbor information from the primary DMDA (user->da) and stores the face neighbors (xm, xp, ym, yp, zm, zp) in the user->neighbors structure. It assumes a standard PETSc ordering for the neighbors array returned by DMDAGetNeighbors. If DMDAGetNeighbors returns a negative rank that is not MPI_PROC_NULL (which can happen in some PETSc/MPI configurations for non-periodic boundaries if not fully standard), this function will sanitize it to MPI_PROC_NULL to prevent issues.

Parameters
[in,out]userPointer to the UserCtx structure where neighbor info will be stored.
Returns
PetscErrorCode 0 on success, non-zero on failure.

Definition at line 1868 of file setup.c.

1869{
1870 PetscErrorCode ierr;
1871 PetscMPIInt rank;
1872 PetscMPIInt size; // MPI communicator size
1873 const PetscMPIInt *neighbor_ranks_ptr; // Pointer to raw neighbor data from PETSc
1874
1875 PetscFunctionBeginUser;
1877 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
1878 ierr = MPI_Comm_size(PETSC_COMM_WORLD, &size); CHKERRQ(ierr); // Get MPI size for validation
1879
1880 LOG_ALLOW(GLOBAL, LOG_INFO, "Rank %d: Computing DMDA neighbor ranks.\n", rank);
1881
1882 if (!user || !user->da) {
1883 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "UserCtx or user->da is NULL in ComputeAndStoreNeighborRanks.");
1884 }
1885
1886 // Get the neighbor information from the DMDA
1887 // neighbor_ranks_ptr will point to an internal PETSc array of 27 ranks.
1888 ierr = DMDAGetNeighbors(user->da, &neighbor_ranks_ptr); CHKERRQ(ierr);
1889
1890 // Log the raw values from DMDAGetNeighbors for boundary-relevant directions for debugging
1891 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "[Rank %d]Raw DMDAGetNeighbors: xm_raw=%d, xp_raw=%d, ym_raw=%d, yp_raw=%d, zm_raw=%d, zp_raw=%d. MPI_PROC_NULL is %d.\n",
1892 rank,
1893 neighbor_ranks_ptr[12], neighbor_ranks_ptr[14],
1894 neighbor_ranks_ptr[10], neighbor_ranks_ptr[16],
1895 neighbor_ranks_ptr[4], neighbor_ranks_ptr[22],
1896 (int)MPI_PROC_NULL);
1897
1898 // PETSc standard indices for 3D face neighbors from the 27-point stencil:
1899 // Index = k_offset*9 + j_offset*3 + i_offset (where offsets -1,0,1 map to 0,1,2)
1900 // Center: (i_off=1, j_off=1, k_off=1) => 1*9 + 1*3 + 1 = 13
1901 // X-min: (i_off=0, j_off=1, k_off=1) => 1*9 + 1*3 + 0 = 12
1902 // X-plus: (i_off=2, j_off=1, k_off=1) => 1*9 + 1*3 + 2 = 14
1903 // Y-min: (i_off=1, j_off=0, k_off=1) => 1*9 + 0*3 + 1 = 10
1904 // Y-plus: (i_off=1, j_off=2, k_off=1) => 1*9 + 2*3 + 1 = 16
1905 // Z-min: (i_off=1, j_off=1, k_off=0) => 0*9 + 1*3 + 1 = 4
1906 // Z-plus: (i_off=1, j_off=1, k_off=2) => 2*9 + 1*3 + 1 = 22
1907
1908 if (neighbor_ranks_ptr[13] != rank) {
1909 LOG_ALLOW(GLOBAL, LOG_WARNING, "Rank %d: DMDAGetNeighbors center index (13) is %d, expected current rank %d. Neighbor indexing might be non-standard or DMDA small.\n",
1910 rank, neighbor_ranks_ptr[13], rank);
1911 // This warning is important. If the center isn't the current rank, the offsets are likely wrong.
1912 // However, PETSc should ensure this unless the DM is too small for a 3x3x3 stencil.
1913 }
1914
1915 // Assign and sanitize each neighbor rank
1916 PetscMPIInt temp_neighbor;
1917
1918 temp_neighbor = neighbor_ranks_ptr[12]; // xm
1919 if (temp_neighbor < 0 || temp_neighbor >= size) {
1920 LOG_ALLOW(GLOBAL, LOG_WARNING, "[Rank %d] Correcting invalid xm neighbor %d to MPI_PROC_NULL (%d).\n", rank, temp_neighbor, (int)MPI_PROC_NULL);
1921 user->neighbors.rank_xm = MPI_PROC_NULL;
1922 } else {
1923 user->neighbors.rank_xm = temp_neighbor;
1924 }
1925
1926 temp_neighbor = neighbor_ranks_ptr[14]; // xp
1927 if (temp_neighbor < 0 || temp_neighbor >= size) {
1928 LOG_ALLOW(GLOBAL, LOG_WARNING, "[Rank %d] Correcting invalid xp neighbor %d to MPI_PROC_NULL (%d).\n", rank, temp_neighbor, (int)MPI_PROC_NULL);
1929 user->neighbors.rank_xp = MPI_PROC_NULL;
1930 } else {
1931 user->neighbors.rank_xp = temp_neighbor;
1932 }
1933
1934 temp_neighbor = neighbor_ranks_ptr[10]; // ym
1935 if (temp_neighbor < 0 || temp_neighbor >= size) {
1936 LOG_ALLOW(GLOBAL, LOG_WARNING, "[Rank %d] Correcting invalid ym neighbor %d to MPI_PROC_NULL (%d).\n", rank, temp_neighbor, (int)MPI_PROC_NULL);
1937 user->neighbors.rank_ym = MPI_PROC_NULL;
1938 } else {
1939 user->neighbors.rank_ym = temp_neighbor;
1940 }
1941
1942 temp_neighbor = neighbor_ranks_ptr[16]; // yp
1943 if (temp_neighbor < 0 || temp_neighbor >= size) {
1944 // The log for index 16 was "zm" in your output, should be yp
1945 LOG_ALLOW(GLOBAL, LOG_WARNING, "[Rank %d] Correcting invalid yp neighbor (raw index 16) %d to MPI_PROC_NULL (%d).\n", rank, temp_neighbor, (int)MPI_PROC_NULL);
1946 user->neighbors.rank_yp = MPI_PROC_NULL;
1947 } else {
1948 user->neighbors.rank_yp = temp_neighbor;
1949 }
1950
1951 temp_neighbor = neighbor_ranks_ptr[4]; // zm
1952 if (temp_neighbor < 0 || temp_neighbor >= size) {
1953 LOG_ALLOW(GLOBAL, LOG_WARNING, "[Rank %d] Correcting invalid zm neighbor %d to MPI_PROC_NULL (%d).\n", rank, temp_neighbor, (int)MPI_PROC_NULL);
1954 user->neighbors.rank_zm = MPI_PROC_NULL;
1955 } else {
1956 user->neighbors.rank_zm = temp_neighbor;
1957 }
1958
1959 temp_neighbor = neighbor_ranks_ptr[22]; // zp
1960 if (temp_neighbor < 0 || temp_neighbor >= size) {
1961 LOG_ALLOW(GLOBAL, LOG_WARNING, "[Rank %d] Correcting invalid zp neighbor %d to MPI_PROC_NULL (%d).\n", rank, temp_neighbor, (int)MPI_PROC_NULL);
1962 user->neighbors.rank_zp = MPI_PROC_NULL;
1963 } else {
1964 user->neighbors.rank_zp = temp_neighbor;
1965 }
1966
1967 LOG_ALLOW_SYNC(GLOBAL, LOG_DEBUG, "[Rank %d] Stored user->neighbors: xm=%d, xp=%d, ym=%d, yp=%d, zm=%d, zp=%d\n", rank,
1968 user->neighbors.rank_xm, user->neighbors.rank_xp,
1969 user->neighbors.rank_ym, user->neighbors.rank_yp,
1970 user->neighbors.rank_zm, user->neighbors.rank_zp);
1971 PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT); // Ensure logs are flushed
1972
1973 // Note: neighbor_ranks_ptr memory is managed by PETSc, do not free it.
1975 PetscFunctionReturn(0);
1976}
PetscMPIInt rank_zm
Definition variables.h:182
PetscMPIInt rank_yp
Definition variables.h:181
PetscMPIInt rank_ym
Definition variables.h:181
PetscMPIInt rank_xp
Definition variables.h:180
RankNeighbors neighbors
Definition variables.h:745
PetscMPIInt rank_xm
Definition variables.h:180
PetscMPIInt rank_zp
Definition variables.h:182
Here is the caller graph for this function:

◆ SetDMDAProcLayout()

PetscErrorCode SetDMDAProcLayout ( DM  dm,
UserCtx user 
)

Sets the processor layout for a given DMDA based on PETSc options.

Reads the desired number of processors in x, y, and z directions using PETSc options (e.g., -dm_processors_x, -dm_processors_y, -dm_processors_z). If an option is not provided for a direction, PETSC_DECIDE is used for that direction. Applies the layout using DMDASetNumProcs.

Also stores the retrieved/decided values in user->procs_x/y/z if user context is provided.

Parameters
dmThe DMDA object to configure the layout for.
userPointer to the UserCtx structure (optional, used to store layout values).
Returns
PetscErrorCode 0 on success, non-zero on failure.

Definition at line 1995 of file setup.c.

1996{
1997 PetscErrorCode ierr;
1998 PetscMPIInt size, rank;
1999 PetscInt px = PETSC_DECIDE, py = PETSC_DECIDE, pz = PETSC_DECIDE;
2000 PetscBool px_set = PETSC_FALSE, py_set = PETSC_FALSE, pz_set = PETSC_FALSE;
2001 SimCtx *simCtx = user->simCtx;
2002
2003 // Set no.of processors in direction 1
2004 if(simCtx->da_procs_x) {
2005 px_set = PETSC_TRUE;
2006 px = simCtx->da_procs_x;
2007 }
2008 // Set no.of processors in direction 2
2009 if(simCtx->da_procs_y) {
2010 py_set = PETSC_TRUE;
2011 py = simCtx->da_procs_y;
2012 }
2013 // Set no.of processors in direction 1
2014 if(simCtx->da_procs_z) {
2015 pz_set = PETSC_TRUE;
2016 pz = simCtx->da_procs_z;
2017 }
2018
2019 PetscFunctionBeginUser;
2021 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)dm), &size); CHKERRQ(ierr);
2022 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank); CHKERRQ(ierr);
2023 LOG_ALLOW(GLOBAL, LOG_INFO, "Rank %d: Configuring DMDA processor layout for %d total processes.\n", rank, size);
2024
2025 // --- Validate User Input (Optional but Recommended) ---
2026 // Check if specified processor counts multiply to the total MPI size
2027 if (px_set && py_set && pz_set) {
2028 if (px * py * pz != size) {
2029 SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_INCOMP,
2030 "Specified processor layout %d x %d x %d = %d does not match MPI size %d",
2031 px, py, pz, px * py * pz, size);
2032 }
2033 LOG_ALLOW(GLOBAL, LOG_INFO, "Using specified processor layout: %d x %d x %d\n", px, py, pz);
2034 } else if (px_set || py_set || pz_set) {
2035 // If only some are set, PETSC_DECIDE will be used for others
2036 LOG_ALLOW(GLOBAL, LOG_INFO, "Using partially specified processor layout: %d x %d x %d (PETSC_DECIDE for unspecified)\n", px, py, pz);
2037 } else {
2038 LOG_ALLOW(GLOBAL, LOG_INFO, "Using fully automatic processor layout (PETSC_DECIDE x PETSC_DECIDE x PETSC_DECIDE)\n");
2039 }
2040 // Additional checks: Ensure px, py, pz are positive if set
2041 if ((px_set && px <= 0) || (py_set && py <= 0) || (pz_set && pz <= 0)) {
2042 SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Specified processor counts must be positive.");
2043 }
2044
2045
2046 // --- Apply the layout to the DMDA ---
2047 ierr = DMDASetNumProcs(dm, px, py, pz); CHKERRQ(ierr);
2048 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Rank %d: DMDASetNumProcs called with px=%d, py=%d, pz=%d.\n", rank, px, py, pz);
2049
2050 // --- Store the values in UserCtx (Optional) ---
2051 // Note: If PETSC_DECIDE was used, PETSc calculates the actual values during DMSetUp.
2052 // We store the *requested* values here. To get the *actual* values used,
2053 // you would need to call DMDAGetInfo after DMSetUp.
2054 /*
2055 if (user) {
2056 user->procs_x = px;
2057 user->procs_y = py;
2058 user->procs_z = pz;
2059 }
2060 */
2062 PetscFunctionReturn(0);
2063}

◆ SetupDomainRankInfo()

PetscErrorCode SetupDomainRankInfo ( SimCtx simCtx)

Sets up the full rank communication infrastructure for all blocks.

Sets up the full rank communication infrastructure, including neighbor ranks and bounding box exchange.

This function orchestrates the setup of the parallel communication map. It uses the existing helper functions to sequentially gather and broadcast the bounding box information for each computational block. All ranks participate in each step, assembling the final, unified multi-block list in parallel.

Parameters
[in,out]simCtxThe master simulation context. After this call, simCtx->bboxlist and the relevant user->neighbors fields will be populated on all ranks.

Definition at line 2079 of file setup.c.

2080{
2081 PetscErrorCode ierr;
2082 PetscInt nblk = simCtx->block_number;
2083 PetscInt size = simCtx->size;
2084 BoundingBox *final_bboxlist = NULL;
2085
2086 PetscFunctionBeginUser;
2088
2089 LOG_ALLOW(GLOBAL, LOG_INFO, "Starting full rank communication setup for %d block(s).\n", nblk);
2090
2091 UserCtx *user_finest = simCtx->usermg.mgctx[simCtx->usermg.mglevels - 1].user;
2092
2093 // --- Step 1: Compute neighbor ranks (unchanged) ---
2094 for (int bi = 0; bi < nblk; bi++) {
2095 ierr = ComputeAndStoreNeighborRanks(&user_finest[bi]); CHKERRQ(ierr);
2096 }
2097 LOG_ALLOW(GLOBAL, LOG_INFO, "Neighbor ranks computed and stored for all blocks.\n");
2098
2099 // --- Step 2: Allocate the final, unified list on ALL ranks ---
2100 // Every rank will build this list in parallel.
2101 ierr = PetscMalloc1(size * nblk, &final_bboxlist); CHKERRQ(ierr);
2102
2103 // --- Step 3: Loop through each block, gather then broadcast its bbox list ---
2104 for (int bi = 0; bi < nblk; bi++) {
2105 // This is a temporary pointer for the current block's list.
2106 BoundingBox *block_bboxlist = NULL;
2107
2108 LOG_ALLOW(GLOBAL, LOG_INFO, "Processing bounding boxes for block %d...\n", bi);
2109
2110 // A) GATHER: On rank 0, block_bboxlist is allocated and filled. On others, it's NULL.
2111 ierr = GatherAllBoundingBoxes(&user_finest[bi], &block_bboxlist); CHKERRQ(ierr);
2112 LOG_ALLOW(GLOBAL, LOG_DEBUG, " -> Gather complete for block %d.\n", bi);
2113
2114 // B) BROADCAST: On non-root ranks, block_bboxlist is allocated. Then, the data
2115 // from rank 0 is broadcast to all ranks. After this call, ALL ranks have
2116 // an identical, complete copy of the bounding boxes for the current block.
2117 ierr = BroadcastAllBoundingBoxes(&user_finest[bi], &block_bboxlist); CHKERRQ(ierr);
2118 LOG_ALLOW(GLOBAL, LOG_DEBUG, " -> Broadcast complete for block %d.\n", bi);
2119
2120 // C) ASSEMBLE: Every rank now copies the data for this block into the
2121 // correct segment of its final, unified list.
2122 for (int r = 0; r < size; r++) {
2123 // The layout is [r0b0, r1b0, ..., r(size-1)b0, r0b1, r1b1, ...]
2124 final_bboxlist[bi * size + r] = block_bboxlist[r];
2125 }
2126 LOG_ALLOW(GLOBAL, LOG_DEBUG, " -> Assembly into final list complete for block %d.\n", bi);
2127
2128 // D) CLEANUP: Free the temporary list for this block on ALL ranks before the next iteration.
2129 // Your helper functions use malloc, so we must use free.
2130 free(block_bboxlist);
2131 }
2132
2133 // --- Step 4: Assign the final pointer and run the last setup step ---
2134 simCtx->bboxlist = final_bboxlist;
2135 LOG_ALLOW(GLOBAL, LOG_INFO, "Final unified bboxlist created on all ranks and stored in SimCtx.\n");
2136
2137 ierr = SetupDomainCellDecompositionMap(&user_finest[0]); CHKERRQ(ierr);
2138 LOG_ALLOW(GLOBAL, LOG_INFO, "Domain Cell Composition set and broadcasted.\n");
2139
2140 LOG_ALLOW(GLOBAL, LOG_INFO, "SetupDomainRankInfo: Completed successfully.\n");
2141
2143 PetscFunctionReturn(0);
2144}
PetscErrorCode BroadcastAllBoundingBoxes(UserCtx *user, BoundingBox **bboxlist)
Broadcasts the bounding box information collected on rank 0 to all other ranks.
Definition grid.c:1030
PetscErrorCode GatherAllBoundingBoxes(UserCtx *user, BoundingBox **allBBoxes)
Gathers local bounding boxes from all MPI processes to rank 0.
Definition grid.c:962
PetscErrorCode ComputeAndStoreNeighborRanks(UserCtx *user)
Computes and stores the Cartesian neighbor ranks for the DMDA decomposition.
Definition setup.c:1868
PetscErrorCode SetupDomainCellDecompositionMap(UserCtx *user)
Creates and distributes a map of the domain's cell decomposition to all ranks.
Definition setup.c:2340
Defines a 3D axis-aligned bounding box.
Definition variables.h:154
Here is the call graph for this function:
Here is the caller graph for this function:

◆ Contra2Cart()

PetscErrorCode Contra2Cart ( UserCtx user)

Reconstructs Cartesian velocity (Ucat) at cell centers from contravariant velocity (Ucont) defined on cell faces.

This function performs the transformation from a contravariant velocity representation (which is natural on a curvilinear grid) to a Cartesian (x,y,z) representation. For each interior computational cell owned by the rank, it performs the following:

  1. It averages the contravariant velocity components (U¹, U², U³) from the surrounding faces to get an estimate of the contravariant velocity at the cell center.
  2. It averages the metric vectors (Csi, Eta, Zet) from the surrounding faces to get an estimate of the metric tensor at the cell center. This tensor forms the transformation matrix.
  3. It solves the linear system [MetricTensor] * [ucat] = [ucont] for the Cartesian velocity vector ucat = (u,v,w) using Cramer's rule.
  4. The computed Cartesian velocity is stored in the global user->Ucat vector.

The function operates on local, ghosted versions of the input vectors (user->lUcont, user->lCsi, etc.) to ensure stencils are valid across processor boundaries.

Parameters
[in,out]userPointer to the UserCtx structure. The function reads from user->lUcont, user->lCsi, user->lEta, user->lZet, user->lNvert and writes to the global user->Ucat vector.
Returns
PetscErrorCode 0 on success.
Note
  • This function should be called AFTER user->lUcont and all local metric vectors (user->lCsi, etc.) have been populated with up-to-date ghost values via UpdateLocalGhosts.
  • It only computes Ucat for interior cells (not on physical boundaries) and for cells not marked as solid/blanked by user->lNvert.
  • The caller is responsible for subsequently applying boundary conditions to user->Ucat and calling UpdateLocalGhosts(user, "Ucat") to populate user->lUcat.

Definition at line 2182 of file setup.c.

2183{
2184 PetscErrorCode ierr;
2185 DMDALocalInfo info;
2186 Cmpnts ***lcsi_arr, ***leta_arr, ***lzet_arr; // Local metric arrays
2187 Cmpnts ***lucont_arr; // Local contravariant velocity array
2188 Cmpnts ***gucat_arr; // Global Cartesian velocity array
2189 PetscReal ***lnvert_arr; // Local Nvert array
2190 PetscReal ***laj_arr; // Local Jacobian Determinant inverse array
2191
2192 PetscFunctionBeginUser;
2194 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Starting Contravariant-to-Cartesian velocity transformation.\n");
2195
2196 // --- 1. Get DMDA Info and Check for Valid Inputs ---
2197 // All inputs (lUcont, lCsi, etc.) and outputs (Ucat) are on DMs from the UserCtx.
2198 // We get local info from fda, which governs the layout of most arrays here.
2199 ierr = DMDAGetLocalInfo(user->fda, &info); CHKERRQ(ierr);
2200 if (!user->lUcont || !user->lCsi || !user->lEta || !user->lZet || !user->lNvert || !user->Ucat) {
2201 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Contra2Cart requires lUcont, lCsi/Eta/Zet, lNvert, and Ucat to be non-NULL.");
2202 }
2203
2204
2205 // --- 2. Get Read-Only Array Access to Local Input Vectors (with ghosts) ---
2206 ierr = DMDAVecGetArrayRead(user->fda, user->lUcont, &lucont_arr); CHKERRQ(ierr);
2207 ierr = DMDAVecGetArrayRead(user->fda, user->lCsi, &lcsi_arr); CHKERRQ(ierr);
2208 ierr = DMDAVecGetArrayRead(user->fda, user->lEta, &leta_arr); CHKERRQ(ierr);
2209 ierr = DMDAVecGetArrayRead(user->fda, user->lZet, &lzet_arr); CHKERRQ(ierr);
2210 ierr = DMDAVecGetArrayRead(user->da, user->lNvert, &lnvert_arr); CHKERRQ(ierr);
2211 ierr = DMDAVecGetArrayRead(user->da, user->lAj, &laj_arr); CHKERRQ(ierr);
2212
2213 // --- 3. Get Write-Only Array Access to the Global Output Vector ---
2214 // We compute for local owned cells and write into the global vector.
2215 // PETSc handles mapping the global indices to the correct local memory locations.
2216 ierr = DMDAVecGetArray(user->fda, user->Ucat, &gucat_arr); CHKERRQ(ierr);
2217
2218
2219 // --- 4. Define Loop Bounds for INTERIOR Cells ---
2220 // We use adjusted bounds to avoid calculating Ucat on the physical domain boundaries,
2221 // as these are typically set explicitly by boundary condition functions.
2222 // The stencils use indices like i-1, j-1, k-1, so we must start loops at least at index 1.
2223 PetscInt i_start = (info.xs == 0) ? info.xs + 1 : info.xs;
2224 PetscInt i_end = (info.xs + info.xm == info.mx) ? info.xs + info.xm - 1 : info.xs + info.xm;
2225
2226 PetscInt j_start = (info.ys == 0) ? info.ys + 1 : info.ys;
2227 PetscInt j_end = (info.ys + info.ym == info.my) ? info.ys + info.ym - 1 : info.ys + info.ym;
2228
2229 PetscInt k_start = (info.zs == 0) ? info.zs + 1 : info.zs;
2230 PetscInt k_end = (info.zs + info.zm == info.mz) ? info.zs + info.zm - 1 : info.zs + info.zm;
2231
2232 // --- 5. Main Computation Loop ---
2233 // Loops over the GLOBAL indices of interior cells owned by this rank.
2234 for (PetscInt k_cell = k_start; k_cell < k_end; ++k_cell) {
2235 for (PetscInt j_cell = j_start; j_cell < j_end; ++j_cell) {
2236 for (PetscInt i_cell = i_start; i_cell < i_end; ++i_cell) {
2237
2238 // Check if the cell is a fluid cell (not solid/blanked)
2239 // if (lnvert_arr[k_cell][j_cell][i_cell] > 0.1) continue; // Skip solid/blanked cells
2240
2241 // Transformation matrix [mat] is the metric tensor at the cell center,
2242 // estimated by averaging metrics from adjacent faces.
2243 PetscReal mat[3][3];
2244
2245 // PetscReal aj_center = laj_arr[k_cell+1][j_cell+1][i_cell+1];
2246
2247 mat[0][0] = 0.5 * (lcsi_arr[k_cell][j_cell][i_cell-1].x + lcsi_arr[k_cell][j_cell][i_cell].x); //* aj_center;
2248 mat[0][1] = 0.5 * (lcsi_arr[k_cell][j_cell][i_cell-1].y + lcsi_arr[k_cell][j_cell][i_cell].y); //* aj_center;
2249 mat[0][2] = 0.5 * (lcsi_arr[k_cell][j_cell][i_cell-1].z + lcsi_arr[k_cell][j_cell][i_cell].z); //* aj_center;
2250
2251 mat[1][0] = 0.5 * (leta_arr[k_cell][j_cell-1][i_cell].x + leta_arr[k_cell][j_cell][i_cell].x); //* aj_center;
2252 mat[1][1] = 0.5 * (leta_arr[k_cell][j_cell-1][i_cell].y + leta_arr[k_cell][j_cell][i_cell].y); //* aj_center;
2253 mat[1][2] = 0.5 * (leta_arr[k_cell][j_cell-1][i_cell].z + leta_arr[k_cell][j_cell][i_cell].z); //* aj_center;
2254
2255 mat[2][0] = 0.5 * (lzet_arr[k_cell-1][j_cell][i_cell].x + lzet_arr[k_cell][j_cell][i_cell].x); //* aj_center;
2256 mat[2][1] = 0.5 * (lzet_arr[k_cell-1][j_cell][i_cell].y + lzet_arr[k_cell][j_cell][i_cell].y); //* aj_center;
2257 mat[2][2] = 0.5 * (lzet_arr[k_cell-1][j_cell][i_cell].z + lzet_arr[k_cell][j_cell][i_cell].z); //* aj_center;
2258
2259 // Contravariant velocity vector `q` at the cell center,
2260 // estimated by averaging face-based contravariant velocities.
2261 PetscReal q[3];
2262 q[0] = 0.5 * (lucont_arr[k_cell][j_cell][i_cell-1].x + lucont_arr[k_cell][j_cell][i_cell].x); // U¹ at cell center
2263 q[1] = 0.5 * (lucont_arr[k_cell][j_cell-1][i_cell].y + lucont_arr[k_cell][j_cell][i_cell].y); // U² at cell center
2264 q[2] = 0.5 * (lucont_arr[k_cell-1][j_cell][i_cell].z + lucont_arr[k_cell][j_cell][i_cell].z); // U³ at cell center
2265
2266 // Solve the 3x3 system `mat * ucat = q` using Cramer's rule.
2267 PetscReal det = mat[0][0] * (mat[1][1] * mat[2][2] - mat[1][2] * mat[2][1]) -
2268 mat[0][1] * (mat[1][0] * mat[2][2] - mat[1][2] * mat[2][0]) +
2269 mat[0][2] * (mat[1][0] * mat[2][1] - mat[1][1] * mat[2][0]);
2270
2271 if (PetscAbsReal(det) < 1.0e-18) {
2272 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FLOP_COUNT, "Transformation matrix determinant is near zero at cell (%d,%d,%d) \n", i_cell, j_cell, k_cell);
2273 }
2274
2275 PetscReal det_inv = 1.0 / det;
2276
2277 PetscReal det0 = q[0] * (mat[1][1] * mat[2][2] - mat[1][2] * mat[2][1]) -
2278 q[1] * (mat[0][1] * mat[2][2] - mat[0][2] * mat[2][1]) +
2279 q[2] * (mat[0][1] * mat[1][2] - mat[0][2] * mat[1][1]);
2280
2281 PetscReal det1 = -q[0] * (mat[1][0] * mat[2][2] - mat[1][2] * mat[2][0]) +
2282 q[1] * (mat[0][0] * mat[2][2] - mat[0][2] * mat[2][0]) -
2283 q[2] * (mat[0][0] * mat[1][2] - mat[0][2] * mat[1][0]);
2284
2285 PetscReal det2 = q[0] * (mat[1][0] * mat[2][1] - mat[1][1] * mat[2][0]) -
2286 q[1] * (mat[0][0] * mat[2][1] - mat[0][1] * mat[2][0]) +
2287 q[2] * (mat[0][0] * mat[1][1] - mat[0][1] * mat[1][0]);
2288
2289 // Store computed Cartesian velocity in the GLOBAL Ucat array at the
2290 // array index corresponding to the cell's origin node.
2291 gucat_arr[k_cell][j_cell][i_cell].x = det0 * det_inv;
2292 gucat_arr[k_cell][j_cell][i_cell].y = det1 * det_inv;
2293 gucat_arr[k_cell][j_cell][i_cell].z = det2 * det_inv;
2294 }
2295 }
2296 }
2297
2298 // --- 6. Restore Array Access ---
2299 ierr = DMDAVecRestoreArrayRead(user->fda, user->lUcont, &lucont_arr); CHKERRQ(ierr);
2300 ierr = DMDAVecRestoreArrayRead(user->fda, user->lCsi, &lcsi_arr); CHKERRQ(ierr);
2301 ierr = DMDAVecRestoreArrayRead(user->fda, user->lEta, &leta_arr); CHKERRQ(ierr);
2302 ierr = DMDAVecRestoreArrayRead(user->fda, user->lZet, &lzet_arr); CHKERRQ(ierr);
2303 ierr = DMDAVecRestoreArrayRead(user->da, user->lNvert, &lnvert_arr); CHKERRQ(ierr);
2304 ierr = DMDAVecRestoreArrayRead(user->da, user->lAj, &laj_arr); CHKERRQ(ierr);
2305 ierr = DMDAVecRestoreArray(user->fda, user->Ucat, &gucat_arr); CHKERRQ(ierr);
2306
2307 LOG_ALLOW(GLOBAL, LOG_INFO, "Completed Contravariant-to-Cartesian velocity transformation. \n");
2309 PetscFunctionReturn(0);
2310}
Here is the caller graph for this function:

◆ SetupDomainCellDecompositionMap()

PetscErrorCode SetupDomainCellDecompositionMap ( UserCtx user)

Creates and distributes a map of the domain's cell decomposition to all ranks.

This function is a critical part of the simulation setup. It determines the global cell ownership for each MPI rank and makes this information available to all other ranks. This "decomposition map" is essential for the robust "Walk and Handoff" particle migration strategy, allowing any rank to quickly identify the owner of a target cell.

The process involves:

  1. Each rank gets its own node ownership information from the DMDA.
  2. It converts this node information into cell ownership ranges using the GetOwnedCellRange helper function.
  3. It participates in an MPI_Allgather collective operation to build a complete array (user->RankCellInfoMap) containing the ownership information for every rank.

This function should be called once during initialization after the primary DMDA (user->da) has been set up.

Parameters
[in,out]userPointer to the UserCtx structure. The function will allocate and populate user->RankCellInfoMap and set user->num_ranks.
Returns
PetscErrorCode 0 on success, or a non-zero PETSc error code on failure. Errors can occur if input pointers are NULL or if MPI communication fails.

Definition at line 2340 of file setup.c.

2341{
2342 PetscErrorCode ierr;
2343 DMDALocalInfo local_node_info;
2344 RankCellInfo my_cell_info;
2345 PetscMPIInt rank, size;
2346
2347 PetscFunctionBeginUser;
2349
2350 // --- 1. Input Validation and MPI Info ---
2351 if (!user) {
2352 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "UserCtx pointer is NULL in SetupDomainCellDecompositionMap.");
2353 }
2354 if (!user->da) {
2355 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "user->da is not initialized in SetupDomainCellDecompositionMap.");
2356 }
2357
2358 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
2359 ierr = MPI_Comm_size(PETSC_COMM_WORLD, &size); CHKERRQ(ierr);
2360
2361 LOG_ALLOW(GLOBAL, LOG_INFO, "Setting up domain cell decomposition map for %d ranks.\n", size);
2362
2363 // --- 2. Determine Local Cell Ownership ---
2364 // Get the local node ownership information from the primary DMDA.
2365 ierr = DMDAGetLocalInfo(user->da, &local_node_info); CHKERRQ(ierr);
2366
2367 // Use the robust helper function to convert node ownership to cell ownership.
2368 // A cell's index is defined by its origin node.
2369
2370 ierr = GetOwnedCellRange(&local_node_info, 0, &my_cell_info.xs_cell, &my_cell_info.xm_cell); CHKERRQ(ierr);
2371 ierr = GetOwnedCellRange(&local_node_info, 1, &my_cell_info.ys_cell, &my_cell_info.ym_cell); CHKERRQ(ierr);
2372 ierr = GetOwnedCellRange(&local_node_info, 2, &my_cell_info.zs_cell, &my_cell_info.zm_cell); CHKERRQ(ierr);
2373
2374 // Log the calculated local ownership for debugging purposes.
2375 LOG_ALLOW(LOCAL, LOG_DEBUG, "[Rank %d] Owns cells: i[%d, %d), j[%d, %d), k[%d, %d)\n",
2376 rank, my_cell_info.xs_cell, my_cell_info.xs_cell + my_cell_info.xm_cell,
2377 my_cell_info.ys_cell, my_cell_info.ys_cell + my_cell_info.ym_cell,
2378 my_cell_info.zs_cell, my_cell_info.zs_cell + my_cell_info.zm_cell);
2379
2380 // --- 3. Allocate and Distribute the Global Map ---
2381 // Allocate memory for the global map that will hold information from all ranks.
2382 ierr = PetscMalloc1(size, &user->RankCellInfoMap); CHKERRQ(ierr);
2383
2384 // Perform the collective communication to gather the `RankCellInfo` struct from every rank.
2385 // Each rank sends its `my_cell_info` and receives the complete array in `user->RankCellInfoMap`.
2386 // We use MPI_BYTE to ensure portability across different systems and struct padding.
2387 ierr = MPI_Allgather(&my_cell_info, sizeof(RankCellInfo), MPI_BYTE,
2388 user->RankCellInfoMap, sizeof(RankCellInfo), MPI_BYTE,
2389 PETSC_COMM_WORLD); CHKERRQ(ierr);
2390
2391 LOG_ALLOW(GLOBAL, LOG_INFO, "Domain cell decomposition map created and distributed successfully.\n");
2392
2394 PetscFunctionReturn(0);
2395}
PetscErrorCode GetOwnedCellRange(const DMDALocalInfo *info_nodes, PetscInt dim, PetscInt *xs_cell_global_out, PetscInt *xm_cell_local_out)
Gets the global starting index of cells owned by this rank and the number of such cells.
Definition setup.c:1761
PetscInt ys_cell
Definition variables.h:187
PetscInt xs_cell
Definition variables.h:187
PetscInt zm_cell
Definition variables.h:188
PetscInt zs_cell
Definition variables.h:187
PetscInt xm_cell
Definition variables.h:188
RankCellInfo * RankCellInfoMap
Definition variables.h:804
PetscInt ym_cell
Definition variables.h:188
A lean struct to hold the global cell ownership range for a single MPI rank.
Definition variables.h:186
Here is the call graph for this function:
Here is the caller graph for this function:

◆ BinarySearchInt64()

PetscErrorCode BinarySearchInt64 ( PetscInt  n,
const PetscInt64  arr[],
PetscInt64  key,
PetscBool *  found 
)

Performs a binary search for a key in a sorted array of PetscInt64.

This is a standard binary search algorithm implemented as a PETSc-style helper function. It efficiently determines if a given key exists within a sorted array.

Parameters
[in]nThe number of elements in the array.
[in]arrA pointer to the sorted array of PetscInt64 values to be searched.
[in]keyThe PetscInt64 value to search for.
[out]foundA pointer to a PetscBool that will be set to PETSC_TRUE if the key is found, and PETSC_FALSE otherwise.
Returns
PetscErrorCode 0 on success, or a non-zero PETSc error code on failure.
Note
The input array arr must be sorted in ascending order for the algorithm to work correctly.

Definition at line 2416 of file setup.c.

2417{
2418 PetscInt low = 0, high = n - 1;
2419
2420 PetscFunctionBeginUser;
2422
2423 // --- 1. Input Validation ---
2424 if (!found) {
2425 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Output pointer 'found' is NULL in PetscBinarySearchInt64.");
2426 }
2427 if (n > 0 && !arr) {
2428 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Input array 'arr' is NULL for n > 0.");
2429 }
2430
2431 // Initialize output
2432 *found = PETSC_FALSE;
2433
2434 // --- 2. Binary Search Algorithm ---
2435 while (low <= high) {
2436 // Use this form to prevent potential integer overflow on very large arrays
2437 PetscInt mid = low + (high - low) / 2;
2438
2439 if (arr[mid] == key) {
2440 *found = PETSC_TRUE; // Key found!
2441 break; // Exit the loop
2442 }
2443
2444 if (arr[mid] < key) {
2445 low = mid + 1; // Search in the right half
2446 } else {
2447 high = mid - 1; // Search in the left half
2448 }
2449 }
2450
2452 PetscFunctionReturn(0);
2453}
Here is the caller graph for this function:

◆ Gidx()

static PetscInt Gidx ( PetscInt  i,
PetscInt  j,
PetscInt  k,
UserCtx user 
)
static

Definition at line 2456 of file setup.c.

2457{
2458 PetscInt nidx;
2459 DMDALocalInfo info = user->info;
2460
2461 PetscInt mx = info.mx, my = info.my;
2462
2463 AO ao;
2464 DMDAGetAO(user->da, &ao);
2465 nidx=i+j*mx+k*mx*my;
2466
2467 AOApplicationToPetsc(ao,1,&nidx);
2468
2469 return (nidx);
2470}
DMDALocalInfo info
Definition variables.h:740
Here is the caller graph for this function:

◆ ComputeDivergence()

PetscErrorCode ComputeDivergence ( UserCtx user)

Definition at line 2475 of file setup.c.

2476{
2477 DM da = user->da, fda = user->fda;
2478 DMDALocalInfo info = user->info;
2479
2480 PetscInt ti = user->simCtx->step;
2481
2482 PetscInt xs = info.xs, xe = info.xs + info.xm;
2483 PetscInt ys = info.ys, ye = info.ys + info.ym;
2484 PetscInt zs = info.zs, ze = info.zs + info.zm;
2485 PetscInt mx = info.mx, my = info.my, mz = info.mz;
2486
2487 PetscInt lxs, lys, lzs, lxe, lye, lze;
2488 PetscInt i, j, k;
2489
2490 Vec Div;
2491 PetscReal ***div, ***aj, ***nvert,***p;
2492 Cmpnts ***ucont;
2493 PetscReal maxdiv;
2494
2495 lxs = xs; lxe = xe;
2496 lys = ys; lye = ye;
2497 lzs = zs; lze = ze;
2498
2499 if (xs==0) lxs = xs+1;
2500 if (ys==0) lys = ys+1;
2501 if (zs==0) lzs = zs+1;
2502
2503 if (xe==mx) lxe = xe-1;
2504 if (ye==my) lye = ye-1;
2505 if (ze==mz) lze = ze-1;
2506
2507 PetscFunctionBeginUser;
2509
2510 DMDAVecGetArray(fda,user->lUcont, &ucont);
2511 DMDAVecGetArray(da, user->lAj, &aj);
2512 VecDuplicate(user->P, &Div);
2513 DMDAVecGetArray(da, Div, &div);
2514 DMDAVecGetArray(da, user->lNvert, &nvert);
2515 DMDAVecGetArray(da, user->P, &p);
2516 for (k=lzs; k<lze; k++) {
2517 for (j=lys; j<lye; j++){
2518 for (i=lxs; i<lxe; i++) {
2519 if (k==10 && j==10 && i==1){
2520 LOG_ALLOW(LOCAL,LOG_INFO,"Pressure[10][10][1] = %f | Pressure[10][10][0] = %f \n ",p[k][j][i],p[k][j][i-1]);
2521 }
2522
2523 if (k==10 && j==10 && i==mx-3)
2524 LOG_ALLOW(LOCAL,LOG_INFO,"Pressure[10][10][%d] = %f | Pressure[10][10][%d] = %f \n ",mx-2,p[k][j][mx-2],mx-1,p[k][j][mx-1]);
2525 }
2526 }
2527 }
2528 DMDAVecRestoreArray(da, user->P, &p);
2529
2530
2531 for (k=lzs; k<lze; k++) {
2532 for (j=lys; j<lye; j++) {
2533 for (i=lxs; i<lxe; i++) {
2534 maxdiv = fabs((ucont[k][j][i].x - ucont[k][j][i-1].x +
2535 ucont[k][j][i].y - ucont[k][j-1][i].y +
2536 ucont[k][j][i].z - ucont[k-1][j][i].z)*aj[k][j][i]);
2537 if (nvert[k][j][i] + nvert[k+1][j][i] + nvert[k-1][j][i] +
2538 nvert[k][j+1][i] + nvert[k][j-1][i] +
2539 nvert[k][j][i+1] + nvert[k][j][i-1] > 0.1) maxdiv = 0.;
2540 div[k][j][i] = maxdiv;
2541
2542 }
2543 }
2544 }
2545
2546 if (zs==0) {
2547 k=0;
2548 for (j=ys; j<ye; j++) {
2549 for (i=xs; i<xe; i++) {
2550 div[k][j][i] = 0.;
2551 }
2552 }
2553 }
2554
2555 if (ze == mz) {
2556 k=mz-1;
2557 for (j=ys; j<ye; j++) {
2558 for (i=xs; i<xe; i++) {
2559 div[k][j][i] = 0.;
2560 }
2561 }
2562 }
2563
2564 if (xs==0) {
2565 i=0;
2566 for (k=zs; k<ze; k++) {
2567 for (j=ys; j<ye; j++) {
2568 div[k][j][i] = 0.;
2569 }
2570 }
2571 }
2572
2573 if (xe==mx) {
2574 i=mx-1;
2575 for (k=zs; k<ze; k++) {
2576 for (j=ys; j<ye; j++) {
2577 div[k][j][i] = 0;
2578 }
2579 }
2580 }
2581
2582 if (ys==0) {
2583 j=0;
2584 for (k=zs; k<ze; k++) {
2585 for (i=xs; i<xe; i++) {
2586 div[k][j][i] = 0.;
2587 }
2588 }
2589 }
2590
2591 if (ye==my) {
2592 j=my-1;
2593 for (k=zs; k<ze; k++) {
2594 for (i=xs; i<xe; i++) {
2595 div[k][j][i] = 0.;
2596 }
2597 }
2598 }
2599 DMDAVecRestoreArray(da, Div, &div);
2600 PetscInt MaxFlatIndex;
2601
2602 VecMax(Div, &MaxFlatIndex, &maxdiv);
2603
2604 LOG_ALLOW(GLOBAL,LOG_INFO,"[Step %d]] The Maximum Divergence is %e at flat index %d.\n",ti,maxdiv,MaxFlatIndex);
2605
2606 user->simCtx->MaxDivFlatArg = MaxFlatIndex;
2607 user->simCtx->MaxDiv = maxdiv;
2608
2609 for (k=zs; k<ze; k++) {
2610 for (j=ys; j<ye; j++) {
2611 for (i=xs; i<xe; i++) {
2612 if (Gidx(i,j,k,user) == MaxFlatIndex) {
2613 LOG_ALLOW(GLOBAL,LOG_INFO,"[Step %d] The Maximum Divergence(%e) is at location [%d][%d][%d]. \n", ti, maxdiv,k,j,i);
2614 user->simCtx->MaxDivz = k;
2615 user->simCtx->MaxDivy = j;
2616 user->simCtx->MaxDivx = i;
2617 }
2618 }
2619 }
2620 }
2621
2622
2623 DMDAVecRestoreArray(da, user->lNvert, &nvert);
2624 DMDAVecRestoreArray(fda, user->lUcont, &ucont);
2625 DMDAVecRestoreArray(da, user->lAj, &aj);
2626 VecDestroy(&Div);
2627
2629 PetscFunctionReturn(0);
2630}
static PetscInt Gidx(PetscInt i, PetscInt j, PetscInt k, UserCtx *user)
Definition setup.c:2456
Here is the call graph for this function:
Here is the caller graph for this function:

◆ InitializeRandomGenerators()

PetscErrorCode InitializeRandomGenerators ( UserCtx user,
PetscRandom *  randx,
PetscRandom *  randy,
PetscRandom *  randz 
)

Initializes random number generators for assigning particle properties.

This function creates and configures separate PETSc random number generators for the x, y, and z coordinates.

Parameters
[in,out]userPointer to the UserCtx structure containing simulation context.
[out]randxPointer to store the RNG for the x-coordinate.
[out]randyPointer to store the RNG for the y-coordinate.
[out]randzPointer to store the RNG for the z-coordinate.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Definition at line 2647 of file setup.c.

2647 {
2648 PetscErrorCode ierr; // Error code for PETSc functions
2649 PetscMPIInt rank;
2650 PetscFunctionBeginUser;
2652 MPI_Comm_rank(PETSC_COMM_WORLD, &rank);
2653
2654 // Initialize RNG for x-coordinate
2655 ierr = PetscRandomCreate(PETSC_COMM_SELF, randx); CHKERRQ(ierr);
2656 ierr = PetscRandomSetType((*randx), PETSCRAND48); CHKERRQ(ierr);
2657 ierr = PetscRandomSetInterval(*randx, user->bbox.min_coords.x, user->bbox.max_coords.x); CHKERRQ(ierr);
2658 ierr = PetscRandomSetSeed(*randx, rank + 12345); CHKERRQ(ierr); // Unique seed per rank
2659 ierr = PetscRandomSeed(*randx); CHKERRQ(ierr);
2660 LOG_ALLOW_SYNC(LOCAL,LOG_VERBOSE, "[Rank %d]Initialized RNG for X-axis.\n",rank);
2661
2662 // Initialize RNG for y-coordinate
2663 ierr = PetscRandomCreate(PETSC_COMM_SELF, randy); CHKERRQ(ierr);
2664 ierr = PetscRandomSetType((*randy), PETSCRAND48); CHKERRQ(ierr);
2665 ierr = PetscRandomSetInterval(*randy, user->bbox.min_coords.y, user->bbox.max_coords.y); CHKERRQ(ierr);
2666 ierr = PetscRandomSetSeed(*randy, rank + 67890); CHKERRQ(ierr); // Unique seed per rank
2667 ierr = PetscRandomSeed(*randy); CHKERRQ(ierr);
2668 LOG_ALLOW_SYNC(LOCAL,LOG_VERBOSE, "[Rank %d]Initialized RNG for Y-axis.\n",rank);
2669
2670 // Initialize RNG for z-coordinate
2671 ierr = PetscRandomCreate(PETSC_COMM_SELF, randz); CHKERRQ(ierr);
2672 ierr = PetscRandomSetType((*randz), PETSCRAND48); CHKERRQ(ierr);
2673 ierr = PetscRandomSetInterval(*randz, user->bbox.min_coords.z, user->bbox.max_coords.z); CHKERRQ(ierr);
2674 ierr = PetscRandomSetSeed(*randz, rank + 54321); CHKERRQ(ierr); // Unique seed per rank
2675 ierr = PetscRandomSeed(*randz); CHKERRQ(ierr);
2676 LOG_ALLOW_SYNC(LOCAL,LOG_VERBOSE, "[Rank %d]Initialized RNG for Z-axis.\n",rank);
2677
2679 PetscFunctionReturn(0);
2680}
@ LOG_VERBOSE
Extremely detailed logs, typically for development use only.
Definition logging.h:34
Cmpnts max_coords
Maximum x, y, z coordinates of the bounding box.
Definition variables.h:156
Cmpnts min_coords
Minimum x, y, z coordinates of the bounding box.
Definition variables.h:155
BoundingBox bbox
Definition variables.h:744
Here is the caller graph for this function:

◆ InitializeLogicalSpaceRNGs()

PetscErrorCode InitializeLogicalSpaceRNGs ( PetscRandom *  rand_logic_i,
PetscRandom *  rand_logic_j,
PetscRandom *  rand_logic_k 
)

Initializes random number generators for logical space operations [0.0, 1.0).

This function creates and configures three separate PETSc random number generators, one for each logical dimension (i, j, k or xi, eta, zeta equivalent). Each RNG is configured to produce uniformly distributed real numbers in the interval [0.0, 1.0). These are typically used for selecting owned cells or generating intra-cell logical coordinates.

Parameters
[out]rand_logic_iPointer to store the RNG for the i-logical dimension.
[out]rand_logic_jPointer to store the RNG for the j-logical dimension.
[out]rand_logic_kPointer to store the RNG for the k-logical dimension.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Definition at line 2698 of file setup.c.

2698 {
2699 PetscErrorCode ierr;
2700 PetscMPIInt rank;
2701 PetscFunctionBeginUser;
2702
2704
2705 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
2706
2707 // --- RNG for i-logical dimension ---
2708 ierr = PetscRandomCreate(PETSC_COMM_SELF, rand_logic_i); CHKERRQ(ierr);
2709 ierr = PetscRandomSetType((*rand_logic_i), PETSCRAND48); CHKERRQ(ierr);
2710 ierr = PetscRandomSetInterval(*rand_logic_i, 0.0, 1.0); CHKERRQ(ierr); // Key change: [0,1)
2711 ierr = PetscRandomSetSeed(*rand_logic_i, rank + 202401); CHKERRQ(ierr); // Unique seed
2712 ierr = PetscRandomSeed(*rand_logic_i); CHKERRQ(ierr);
2713 LOG_ALLOW(LOCAL,LOG_VERBOSE, "[Rank %d] Initialized RNG for i-logical dimension [0,1).\n",rank);
2714
2715 // --- RNG for j-logical dimension ---
2716 ierr = PetscRandomCreate(PETSC_COMM_SELF, rand_logic_j); CHKERRQ(ierr);
2717 ierr = PetscRandomSetType((*rand_logic_j), PETSCRAND48); CHKERRQ(ierr);
2718 ierr = PetscRandomSetInterval(*rand_logic_j, 0.0, 1.0); CHKERRQ(ierr); // Key change: [0,1)
2719 ierr = PetscRandomSetSeed(*rand_logic_j, rank + 202402); CHKERRQ(ierr);
2720 ierr = PetscRandomSeed(*rand_logic_j); CHKERRQ(ierr);
2721 LOG_ALLOW(LOCAL,LOG_VERBOSE, "[Rank %d] Initialized RNG for j-logical dimension [0,1).\n",rank);
2722
2723 // --- RNG for k-logical dimension ---
2724 ierr = PetscRandomCreate(PETSC_COMM_SELF, rand_logic_k); CHKERRQ(ierr);
2725 ierr = PetscRandomSetType((*rand_logic_k), PETSCRAND48); CHKERRQ(ierr);
2726 ierr = PetscRandomSetInterval(*rand_logic_k, 0.0, 1.0); CHKERRQ(ierr); // Key change: [0,1)
2727 ierr = PetscRandomSetSeed(*rand_logic_k, rank + 202403); CHKERRQ(ierr);
2728 ierr = PetscRandomSeed(*rand_logic_k); CHKERRQ(ierr);
2729 LOG_ALLOW(LOCAL,LOG_VERBOSE, "[Rank %d] Initialized RNG for k-logical dimension [0,1).\n",rank);
2730
2731
2733 PetscFunctionReturn(0);
2734}
Here is the caller graph for this function:

◆ InitializeBrownianRNG()

PetscErrorCode InitializeBrownianRNG ( SimCtx simCtx)

Initializes a single master RNG for time-stepping physics (Brownian motion).

Configures it for Uniform [0, 1) which is required for Box-Muller transformation.

Parameters
[in,out]simCtxPointer to the Simulation Context.
Returns
PetscErrorCode

Definition at line 2745 of file setup.c.

2745 {
2746 PetscErrorCode ierr;
2747 PetscMPIInt rank;
2748
2749 PetscFunctionBeginUser;
2751
2752 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
2753
2754 // 1. Create the generator (stored in SimCtx, not UserCtx, as it is global physics)
2755 ierr = PetscRandomCreate(PETSC_COMM_WORLD, &simCtx->BrownianMotionRNG); CHKERRQ(ierr);
2756 ierr = PetscRandomSetType(simCtx->BrownianMotionRNG, PETSCRAND48); CHKERRQ(ierr);
2757
2758 // 2. CRITICAL: Set interval to [0, 1).
2759 // This is required for the Gaussian math to work.
2760 ierr = PetscRandomSetInterval(simCtx->BrownianMotionRNG, 0.0, 1.0); CHKERRQ(ierr);
2761
2762 // 3. Seed based on Rank to ensure spatial randomness
2763 // Multiplying by a large prime helps separate the streams significantly
2764 unsigned long seed = (unsigned long)rank * 987654321 + (unsigned long)time(NULL);
2765 ierr = PetscRandomSetSeed(simCtx->BrownianMotionRNG, seed); CHKERRQ(ierr);
2766 ierr = PetscRandomSeed(simCtx->BrownianMotionRNG); CHKERRQ(ierr);
2767
2768 LOG_ALLOW(LOCAL, LOG_VERBOSE, "[Rank %d] Initialized Brownian Physics RNG.\n", rank);
2769
2771 PetscFunctionReturn(0);
2772}
Here is the caller graph for this function:

◆ TransformScalarDerivativesToPhysical()

void TransformScalarDerivativesToPhysical ( PetscReal  jacobian,
Cmpnts  csi_metrics,
Cmpnts  eta_metrics,
Cmpnts  zet_metrics,
PetscReal  dPhi_dcsi,
PetscReal  dPhi_deta,
PetscReal  dPhi_dzet,
Cmpnts gradPhi 
)

Transforms scalar derivatives from computational space to physical space using the chain rule.

Formula: dPhi/dx = J * ( dPhi/dCsi * dCsi/dx + dPhi/dEta * dEta/dx + ... )

Definition at line 2784 of file setup.c.

2792{
2793 // Gradient X component
2794 gradPhi->x = jacobian * (dPhi_dcsi * csi_metrics.x + dPhi_deta * eta_metrics.x + dPhi_dzet * zet_metrics.x);
2795
2796 // Gradient Y component
2797 gradPhi->y = jacobian * (dPhi_dcsi * csi_metrics.y + dPhi_deta * eta_metrics.y + dPhi_dzet * zet_metrics.y);
2798
2799 // Gradient Z component
2800 gradPhi->z = jacobian * (dPhi_dcsi * csi_metrics.z + dPhi_deta * eta_metrics.z + dPhi_dzet * zet_metrics.z);
2801}
Here is the caller graph for this function:

◆ TransformDerivativesToPhysical()

static void TransformDerivativesToPhysical ( PetscReal  jacobian,
Cmpnts  csi_metrics,
Cmpnts  eta_metrics,
Cmpnts  zet_metrics,
Cmpnts  deriv_csi,
Cmpnts  deriv_eta,
Cmpnts  deriv_zet,
Cmpnts dudx,
Cmpnts dvdx,
Cmpnts dwdx 
)
static

Transforms derivatives from computational space to physical space using the chain rule.

Definition at line 2808 of file setup.c.

2811{
2812 // Derivatives of the first component (u)
2813 dudx->x = jacobian * (deriv_csi.x * csi_metrics.x + deriv_eta.x * eta_metrics.x + deriv_zet.x * zet_metrics.x);
2814 dudx->y = jacobian * (deriv_csi.x * csi_metrics.y + deriv_eta.x * eta_metrics.y + deriv_zet.x * zet_metrics.y);
2815 dudx->z = jacobian * (deriv_csi.x * csi_metrics.z + deriv_eta.x * eta_metrics.z + deriv_zet.x * zet_metrics.z);
2816 // Derivatives of the second component (v)
2817 dvdx->x = jacobian * (deriv_csi.y * csi_metrics.x + deriv_eta.y * eta_metrics.x + deriv_zet.y * zet_metrics.x);
2818 dvdx->y = jacobian * (deriv_csi.y * csi_metrics.y + deriv_eta.y * eta_metrics.y + deriv_zet.y * zet_metrics.y);
2819 dvdx->z = jacobian * (deriv_csi.y * csi_metrics.z + deriv_eta.y * eta_metrics.z + deriv_zet.y * zet_metrics.z);
2820 // Derivatives of the third component (w)
2821 dwdx->x = jacobian * (deriv_csi.z * csi_metrics.x + deriv_eta.z * eta_metrics.x + deriv_zet.z * zet_metrics.x);
2822 dwdx->y = jacobian * (deriv_csi.z * csi_metrics.y + deriv_eta.z * eta_metrics.y + deriv_zet.z * zet_metrics.y);
2823 dwdx->z = jacobian * (deriv_csi.z * csi_metrics.z + deriv_eta.z * eta_metrics.z + deriv_zet.z * zet_metrics.z);
2824}
Here is the caller graph for this function:

◆ ComputeScalarFieldDerivatives()

PetscErrorCode ComputeScalarFieldDerivatives ( UserCtx user,
PetscInt  i,
PetscInt  j,
PetscInt  k,
PetscReal ***  field_data,
Cmpnts grad 
)

Computes the gradient of a cell-centered SCALAR field at a specific grid point.

Parameters
userThe user context.
i,j,kThe grid indices.
field_data3D array pointer to the scalar field (PetscReal***).
gradOutput: A Cmpnts struct storing [dPhi/dx, dPhi/dy, dPhi/dz].
Returns
PetscErrorCode

Definition at line 2837 of file setup.c.

2839{
2840 PetscErrorCode ierr;
2841 Cmpnts ***csi, ***eta, ***zet;
2842 PetscReal ***jac;
2843 PetscReal d_csi, d_eta, d_zet;
2844
2845 PetscFunctionBeginUser;
2846
2847 // 1. Get read-only access to metrics
2848 ierr = DMDAVecGetArrayRead(user->fda, user->lCsi, &csi); CHKERRQ(ierr);
2849 ierr = DMDAVecGetArrayRead(user->fda, user->lEta, &eta); CHKERRQ(ierr);
2850 ierr = DMDAVecGetArrayRead(user->fda, user->lZet, &zet); CHKERRQ(ierr);
2851 ierr = DMDAVecGetArrayRead(user->da, user->lAj, &jac); CHKERRQ(ierr);
2852
2853 // 2. Compute derivatives in computational space (Central Difference)
2854 // Assumes ghosts are available at i+/-1
2855 d_csi = 0.5 * (field_data[k][j][i+1] - field_data[k][j][i-1]);
2856 d_eta = 0.5 * (field_data[k][j+1][i] - field_data[k][j-1][i]);
2857 d_zet = 0.5 * (field_data[k+1][j][i] - field_data[k-1][j][i]);
2858
2859 // 3. Transform to physical space
2861 csi[k][j][i], eta[k][j][i], zet[k][j][i],
2862 d_csi, d_eta, d_zet,
2863 grad);
2864
2865 // 4. Restore arrays
2866 ierr = DMDAVecRestoreArrayRead(user->fda, user->lCsi, &csi); CHKERRQ(ierr);
2867 ierr = DMDAVecRestoreArrayRead(user->fda, user->lEta, &eta); CHKERRQ(ierr);
2868 ierr = DMDAVecRestoreArrayRead(user->fda, user->lZet, &zet); CHKERRQ(ierr);
2869 ierr = DMDAVecRestoreArrayRead(user->da, user->lAj, &jac); CHKERRQ(ierr);
2870
2871 PetscFunctionReturn(0);
2872}
void TransformScalarDerivativesToPhysical(PetscReal jacobian, Cmpnts csi_metrics, Cmpnts eta_metrics, Cmpnts zet_metrics, PetscReal dPhi_dcsi, PetscReal dPhi_deta, PetscReal dPhi_dzet, Cmpnts *gradPhi)
Transforms scalar derivatives from computational space to physical space using the chain rule.
Definition setup.c:2784
Here is the call graph for this function:

◆ ComputeVectorFieldDerivatives()

PetscErrorCode ComputeVectorFieldDerivatives ( UserCtx user,
PetscInt  i,
PetscInt  j,
PetscInt  k,
Cmpnts ***  field_data,
Cmpnts dudx,
Cmpnts dvdx,
Cmpnts dwdx 
)

Computes the derivatives of a cell-centered vector field at a specific grid point.

This function orchestrates the calculation of spatial derivatives. It first computes the derivatives in computational space (d/dcsi, d/deta, d/dzet) using a central difference scheme and then transforms them into physical space (d/dx, d/dy, d/dz).

Parameters
userThe user context for the current computational block.
i,j,kThe grid indices of the cell center where derivatives are required.
field_dataA 3D array pointer to the raw local data of the vector field (e.g., from lUcat).
dudxOutput: A Cmpnts struct storing [du/dx, du/dy, du/dz].
dvdxOutput: A Cmpnts struct storing [dv/dx, dv/dy, dv/dz].
dwdxOutput: A Cmpnts struct storing [dw/dx, dw/dy, dw/dz].
Returns
PetscErrorCode 0 on success.

Definition at line 2891 of file setup.c.

2893{
2894 PetscErrorCode ierr;
2895 Cmpnts ***csi, ***eta, ***zet;
2896 PetscReal ***jac;
2897 PetscFunctionBeginUser;
2898
2899 // 1. Get read-only access to the necessary metric data arrays
2900 ierr = DMDAVecGetArrayRead(user->fda, user->lCsi, &csi); CHKERRQ(ierr);
2901 ierr = DMDAVecGetArrayRead(user->fda, user->lEta, &eta); CHKERRQ(ierr);
2902 ierr = DMDAVecGetArrayRead(user->fda, user->lZet, &zet); CHKERRQ(ierr);
2903 ierr = DMDAVecGetArrayRead(user->da, user->lAj, &jac); CHKERRQ(ierr);
2904
2905 // 2. Calculate derivatives in computational space using central differencing
2906 Cmpnts deriv_csi, deriv_eta, deriv_zet;
2907 deriv_csi.x = (field_data[k][j][i+1].x - field_data[k][j][i-1].x) * 0.5;
2908 deriv_csi.y = (field_data[k][j][i+1].y - field_data[k][j][i-1].y) * 0.5;
2909 deriv_csi.z = (field_data[k][j][i+1].z - field_data[k][j][i-1].z) * 0.5;
2910
2911 deriv_eta.x = (field_data[k][j+1][i].x - field_data[k][j-1][i].x) * 0.5;
2912 deriv_eta.y = (field_data[k][j+1][i].y - field_data[k][j-1][i].y) * 0.5;
2913 deriv_eta.z = (field_data[k][j+1][i].z - field_data[k][j-1][i].z) * 0.5;
2914
2915 deriv_zet.x = (field_data[k+1][j][i].x - field_data[k-1][j][i].x) * 0.5;
2916 deriv_zet.y = (field_data[k+1][j][i].y - field_data[k-1][j][i].y) * 0.5;
2917 deriv_zet.z = (field_data[k+1][j][i].z - field_data[k-1][j][i].z) * 0.5;
2918
2919 // 3. Transform derivatives to physical space
2920 TransformDerivativesToPhysical(jac[k][j][i], csi[k][j][i], eta[k][j][i], zet[k][j][i],
2921 deriv_csi, deriv_eta, deriv_zet,
2922 dudx, dvdx, dwdx);
2923
2924 // 4. Restore access to the PETSc data arrays
2925 ierr = DMDAVecRestoreArrayRead(user->fda, user->lCsi, &csi); CHKERRQ(ierr);
2926 ierr = DMDAVecRestoreArrayRead(user->fda, user->lEta, &eta); CHKERRQ(ierr);
2927 ierr = DMDAVecRestoreArrayRead(user->fda, user->lZet, &zet); CHKERRQ(ierr);
2928 ierr = DMDAVecRestoreArrayRead(user->da, user->lAj, &jac); CHKERRQ(ierr);
2929
2930 PetscFunctionReturn(0);
2931}
static void TransformDerivativesToPhysical(PetscReal jacobian, Cmpnts csi_metrics, Cmpnts eta_metrics, Cmpnts zet_metrics, Cmpnts deriv_csi, Cmpnts deriv_eta, Cmpnts deriv_zet, Cmpnts *dudx, Cmpnts *dvdx, Cmpnts *dwdx)
Transforms derivatives from computational space to physical space using the chain rule.
Definition setup.c:2808
Here is the call graph for this function:
Here is the caller graph for this function:

◆ DestroyUserVectors()

PetscErrorCode DestroyUserVectors ( UserCtx user)

Destroys all Vec objects in a UserCtx structure.

Destroys all PETSc Vec objects within a single UserCtx structure.

This function systematically destroys all PETSc Vec objects allocated in a UserCtx, with proper conditional checks for vectors that are only allocated under certain conditions (finest level, turbulence models, particles, postprocessor mode, etc.).

Parameters
userThe UserCtx structure whose vectors should be destroyed.
Returns
PetscErrorCode 0 on success.

Definition at line 2951 of file setup.c.

2952{
2953 PetscErrorCode ierr;
2954 PetscFunctionBeginUser;
2955
2956 // --- Group A: Primary Flow Fields (Always allocated at all levels) ---
2957 if (user->Ucont) { ierr = VecDestroy(&user->Ucont); CHKERRQ(ierr); }
2958 if (user->lUcont) { ierr = VecDestroy(&user->lUcont); CHKERRQ(ierr); }
2959 if (user->Ucat) { ierr = VecDestroy(&user->Ucat); CHKERRQ(ierr); }
2960 if (user->lUcat) { ierr = VecDestroy(&user->lUcat); CHKERRQ(ierr); }
2961 if (user->P) { ierr = VecDestroy(&user->P); CHKERRQ(ierr); }
2962 if (user->lP) { ierr = VecDestroy(&user->lP); CHKERRQ(ierr); }
2963 if (user->Nvert) { ierr = VecDestroy(&user->Nvert); CHKERRQ(ierr); }
2964 if (user->lNvert) { ierr = VecDestroy(&user->lNvert); CHKERRQ(ierr); }
2965
2966 // --- Group A2: Derived Flow Fields (Conditional) ---
2967 if(user->Diffusivity) {ierr = VecDestroy(&user->Diffusivity); CHKERRQ(ierr);}
2968 if(user->lDiffusivity){ierr = VecDestroy(&user->lDiffusivity); CHKERRQ(ierr);}
2969 if(user->DiffusivityGradient){ierr = VecDestroy(&user->DiffusivityGradient); CHKERRQ(ierr);}
2970 if(user->lDiffusivityGradient){ierr = VecDestroy(&user->lDiffusivityGradient); CHKERRQ(ierr);}
2971
2972 // --- Group B: Solver Work Vectors (All levels) ---
2973 if (user->Phi) { ierr = VecDestroy(&user->Phi); CHKERRQ(ierr); }
2974 if (user->lPhi) { ierr = VecDestroy(&user->lPhi); CHKERRQ(ierr); }
2975
2976 // --- Group C: Time-Stepping Vectors (Finest level only) ---
2977 if (user->Ucont_o) { ierr = VecDestroy(&user->Ucont_o); CHKERRQ(ierr); }
2978 if (user->Ucont_rm1) { ierr = VecDestroy(&user->Ucont_rm1); CHKERRQ(ierr); }
2979 if (user->Ucat_o) { ierr = VecDestroy(&user->Ucat_o); CHKERRQ(ierr); }
2980 if (user->P_o) { ierr = VecDestroy(&user->P_o); CHKERRQ(ierr); }
2981 if (user->Nvert_o) { ierr = VecDestroy(&user->Nvert_o); CHKERRQ(ierr); }
2982 if (user->lUcont_o) { ierr = VecDestroy(&user->lUcont_o); CHKERRQ(ierr); }
2983 if (user->lUcont_rm1) { ierr = VecDestroy(&user->lUcont_rm1); CHKERRQ(ierr); }
2984 if (user->lNvert_o) { ierr = VecDestroy(&user->lNvert_o); CHKERRQ(ierr); }
2985
2986 // --- Group D: Grid Metrics - Face Centered (All levels) ---
2987 if (user->Csi) { ierr = VecDestroy(&user->Csi); CHKERRQ(ierr); }
2988 if (user->Eta) { ierr = VecDestroy(&user->Eta); CHKERRQ(ierr); }
2989 if (user->Zet) { ierr = VecDestroy(&user->Zet); CHKERRQ(ierr); }
2990 if (user->Aj) { ierr = VecDestroy(&user->Aj); CHKERRQ(ierr); }
2991 if (user->lCsi) { ierr = VecDestroy(&user->lCsi); CHKERRQ(ierr); }
2992 if (user->lEta) { ierr = VecDestroy(&user->lEta); CHKERRQ(ierr); }
2993 if (user->lZet) { ierr = VecDestroy(&user->lZet); CHKERRQ(ierr); }
2994 if (user->lAj) { ierr = VecDestroy(&user->lAj); CHKERRQ(ierr); }
2995
2996 // --- Group E: Grid Metrics - Face Centered (All levels) ---
2997 if (user->ICsi) { ierr = VecDestroy(&user->ICsi); CHKERRQ(ierr); }
2998 if (user->IEta) { ierr = VecDestroy(&user->IEta); CHKERRQ(ierr); }
2999 if (user->IZet) { ierr = VecDestroy(&user->IZet); CHKERRQ(ierr); }
3000 if (user->JCsi) { ierr = VecDestroy(&user->JCsi); CHKERRQ(ierr); }
3001 if (user->JEta) { ierr = VecDestroy(&user->JEta); CHKERRQ(ierr); }
3002 if (user->JZet) { ierr = VecDestroy(&user->JZet); CHKERRQ(ierr); }
3003 if (user->KCsi) { ierr = VecDestroy(&user->KCsi); CHKERRQ(ierr); }
3004 if (user->KEta) { ierr = VecDestroy(&user->KEta); CHKERRQ(ierr); }
3005 if (user->KZet) { ierr = VecDestroy(&user->KZet); CHKERRQ(ierr); }
3006 if (user->IAj) { ierr = VecDestroy(&user->IAj); CHKERRQ(ierr); }
3007 if (user->JAj) { ierr = VecDestroy(&user->JAj); CHKERRQ(ierr); }
3008 if (user->KAj) { ierr = VecDestroy(&user->KAj); CHKERRQ(ierr); }
3009 if (user->lICsi) { ierr = VecDestroy(&user->lICsi); CHKERRQ(ierr); }
3010 if (user->lIEta) { ierr = VecDestroy(&user->lIEta); CHKERRQ(ierr); }
3011 if (user->lIZet) { ierr = VecDestroy(&user->lIZet); CHKERRQ(ierr); }
3012 if (user->lJCsi) { ierr = VecDestroy(&user->lJCsi); CHKERRQ(ierr); }
3013 if (user->lJEta) { ierr = VecDestroy(&user->lJEta); CHKERRQ(ierr); }
3014 if (user->lJZet) { ierr = VecDestroy(&user->lJZet); CHKERRQ(ierr); }
3015 if (user->lKCsi) { ierr = VecDestroy(&user->lKCsi); CHKERRQ(ierr); }
3016 if (user->lKEta) { ierr = VecDestroy(&user->lKEta); CHKERRQ(ierr); }
3017 if (user->lKZet) { ierr = VecDestroy(&user->lKZet); CHKERRQ(ierr); }
3018 if (user->lIAj) { ierr = VecDestroy(&user->lIAj); CHKERRQ(ierr); }
3019 if (user->lJAj) { ierr = VecDestroy(&user->lJAj); CHKERRQ(ierr); }
3020 if (user->lKAj) { ierr = VecDestroy(&user->lKAj); CHKERRQ(ierr); }
3021
3022 // --- Group F: Cell/Face Coordinates and Grid Spacing (All levels) ---
3023 if (user->Cent) { ierr = VecDestroy(&user->Cent); CHKERRQ(ierr); }
3024 if (user->lCent) { ierr = VecDestroy(&user->lCent); CHKERRQ(ierr); }
3025 if (user->GridSpace) { ierr = VecDestroy(&user->GridSpace); CHKERRQ(ierr); }
3026 if (user->lGridSpace) { ierr = VecDestroy(&user->lGridSpace); CHKERRQ(ierr); }
3027 if (user->Centx) { ierr = VecDestroy(&user->Centx); CHKERRQ(ierr); }
3028 if (user->Centy) { ierr = VecDestroy(&user->Centy); CHKERRQ(ierr); }
3029 if (user->Centz) { ierr = VecDestroy(&user->Centz); CHKERRQ(ierr); }
3030
3031 // --- Group G: Turbulence Model Vectors (Finest level, conditional on les/rans) ---
3032 if (user->Nu_t) { ierr = VecDestroy(&user->Nu_t); CHKERRQ(ierr); }
3033 if (user->lNu_t) { ierr = VecDestroy(&user->lNu_t); CHKERRQ(ierr); }
3034 if (user->CS) { ierr = VecDestroy(&user->CS); CHKERRQ(ierr); }
3035 if (user->lCs) { ierr = VecDestroy(&user->lCs); CHKERRQ(ierr); }
3036 if (user->lFriction_Velocity) { ierr = VecDestroy(&user->lFriction_Velocity); CHKERRQ(ierr); }
3037 if (user->K_Omega) { ierr = VecDestroy(&user->K_Omega); CHKERRQ(ierr); }
3038 if (user->lK_Omega) { ierr = VecDestroy(&user->lK_Omega); CHKERRQ(ierr); }
3039 if (user->K_Omega_o) { ierr = VecDestroy(&user->K_Omega_o); CHKERRQ(ierr); }
3040 if (user->lK_Omega_o) { ierr = VecDestroy(&user->lK_Omega_o); CHKERRQ(ierr); }
3041
3042 // --- Group H: Particle Vectors (Finest level, conditional on np > 0) ---
3043 if (user->ParticleCount) { ierr = VecDestroy(&user->ParticleCount); CHKERRQ(ierr); }
3044 if (user->lParticleCount) { ierr = VecDestroy(&user->lParticleCount); CHKERRQ(ierr); }
3045 if (user->Psi) { ierr = VecDestroy(&user->Psi); CHKERRQ(ierr); }
3046 if (user->lPsi) { ierr = VecDestroy(&user->lPsi); CHKERRQ(ierr); }
3047
3048 // --- Group I: Boundary Condition Vectors (All levels) ---
3049 if (user->Bcs.Ubcs) { ierr = VecDestroy(&user->Bcs.Ubcs); CHKERRQ(ierr); }
3050 if (user->Bcs.Uch) { ierr = VecDestroy(&user->Bcs.Uch); CHKERRQ(ierr); }
3051
3052 // --- Group J: Post-Processing Vectors (Finest level, postprocessor mode) ---
3053 if (user->P_nodal) { ierr = VecDestroy(&user->P_nodal); CHKERRQ(ierr); }
3054 if (user->Ucat_nodal) { ierr = VecDestroy(&user->Ucat_nodal); CHKERRQ(ierr); }
3055 if (user->Qcrit) { ierr = VecDestroy(&user->Qcrit); CHKERRQ(ierr); }
3056 if (user->Psi_nodal) { ierr = VecDestroy(&user->Psi_nodal); CHKERRQ(ierr); }
3057
3058 // --- Group K: Interpolation Vectors (Lazy allocation) ---
3059 if (user->CellFieldAtCorner) { ierr = VecDestroy(&user->CellFieldAtCorner); CHKERRQ(ierr); }
3060 if (user->lCellFieldAtCorner) { ierr = VecDestroy(&user->lCellFieldAtCorner); CHKERRQ(ierr); }
3061
3062 // --- Group L: Statistical Averaging Vectors (If allocated) ---
3063 if (user->Ucat_sum) { ierr = VecDestroy(&user->Ucat_sum); CHKERRQ(ierr); }
3064 if (user->Ucat_cross_sum) { ierr = VecDestroy(&user->Ucat_cross_sum); CHKERRQ(ierr); }
3065 if (user->Ucat_square_sum) { ierr = VecDestroy(&user->Ucat_square_sum); CHKERRQ(ierr); }
3066 if (user->P_sum) { ierr = VecDestroy(&user->P_sum); CHKERRQ(ierr); }
3067
3068 // --- Group M: Implicit Solver Temporary Vectors (Destroyed after use, but check anyway) ---
3069 if (user->Rhs) { ierr = VecDestroy(&user->Rhs); CHKERRQ(ierr); }
3070 if (user->dUcont) { ierr = VecDestroy(&user->dUcont); CHKERRQ(ierr); }
3071 if (user->pUcont) { ierr = VecDestroy(&user->pUcont); CHKERRQ(ierr); }
3072
3073 // --- Group N: Poisson Solver Vectors (Destroyed after solve, but check anyway) ---
3074 if (user->B) { ierr = VecDestroy(&user->B); CHKERRQ(ierr); }
3075 if (user->R) { ierr = VecDestroy(&user->R); CHKERRQ(ierr); }
3076
3077 LOG_ALLOW(LOCAL, LOG_DEBUG, "All vectors destroyed for UserCtx.\n");
3078 PetscFunctionReturn(0);
3079}
Vec Rhs
Definition variables.h:768
Vec K_Omega_o
Definition variables.h:788
Vec K_Omega
Definition variables.h:788
Vec lCellFieldAtCorner
Definition variables.h:769
Vec Ucat_square_sum
Definition variables.h:791
Vec P_sum
Definition variables.h:791
Vec pUcont
Definition variables.h:768
Vec lK_Omega_o
Definition variables.h:788
Vec lK_Omega
Definition variables.h:788
Vec CellFieldAtCorner
Definition variables.h:769
Vec Ucat_sum
Definition variables.h:791
Vec dUcont
Definition variables.h:768
Vec Ucat_cross_sum
Definition variables.h:791
Here is the caller graph for this function:

◆ DestroyUserContext()

PetscErrorCode DestroyUserContext ( UserCtx user)

Destroys all resources allocated within a single UserCtx structure.

This function cleans up all memory and PETSc objects associated with a single UserCtx (grid level). It calls the helper functions and destroys remaining objects in the proper dependency order:

  1. Boundary conditions (handlers and their data)
  2. All PETSc vectors (via DestroyUserVectors)
  3. Matrix and solver objects (A, C, MR, MP, ksp, nullsp)
  4. Application ordering (AO)
  5. Distributed mesh objects (DMs) - most derived first
  6. Raw PetscMalloc'd arrays (RankCellInfoMap, KSKE)

This function should be called for each UserCtx in the multigrid hierarchy.

Parameters
[in,out]userPointer to the UserCtx to be destroyed.
Returns
PetscErrorCode 0 on success.

Definition at line 3101 of file setup.c.

3102{
3103 PetscErrorCode ierr;
3104 PetscFunctionBeginUser;
3105
3106 if (!user) {
3107 LOG_ALLOW(LOCAL, LOG_WARNING, "DestroyUserContext called with NULL user pointer.\n");
3108 PetscFunctionReturn(0);
3109 }
3110
3111 LOG_ALLOW(LOCAL, LOG_INFO, "Destroying UserCtx at level %d...\n", user->thislevel);
3112
3113 // --- Step 1: Destroy Boundary Condition System ---
3114 // This handles all BC handlers and their private data.
3115 ierr = BoundarySystem_Destroy(user); CHKERRQ(ierr);
3116 LOG_ALLOW(LOCAL, LOG_DEBUG, " Boundary system destroyed.\n");
3117
3118 // --- Step 2: Destroy All Vectors ---
3119 // Handles ~74 Vec objects with proper NULL checking.
3120 ierr = DestroyUserVectors(user); CHKERRQ(ierr);
3121 LOG_ALLOW(LOCAL, LOG_DEBUG, " All vectors destroyed.\n");
3122
3123 // --- Step 3: Destroy Matrix and Solver Objects ---
3124 // Destroy pressure-Poisson matrices and solver.
3125 if (user->A) {
3126 ierr = MatDestroy(&user->A); CHKERRQ(ierr);
3127 LOG_ALLOW(LOCAL, LOG_DEBUG, " Matrix A destroyed.\n");
3128 }
3129 if (user->C) {
3130 ierr = MatDestroy(&user->C); CHKERRQ(ierr);
3131 LOG_ALLOW(LOCAL, LOG_DEBUG, " Matrix C destroyed.\n");
3132 }
3133 if (user->MR) {
3134 ierr = MatDestroy(&user->MR); CHKERRQ(ierr);
3135 LOG_ALLOW(LOCAL, LOG_DEBUG, " Matrix MR destroyed.\n");
3136 }
3137 if (user->MP) {
3138 ierr = MatDestroy(&user->MP); CHKERRQ(ierr);
3139 LOG_ALLOW(LOCAL, LOG_DEBUG, " Matrix MP destroyed.\n");
3140 }
3141 if (user->ksp) {
3142 ierr = KSPDestroy(&user->ksp); CHKERRQ(ierr);
3143 LOG_ALLOW(LOCAL, LOG_DEBUG, " KSP solver destroyed.\n");
3144 }
3145 if (user->nullsp) {
3146 ierr = MatNullSpaceDestroy(&user->nullsp); CHKERRQ(ierr);
3147 LOG_ALLOW(LOCAL, LOG_DEBUG, " MatNullSpace destroyed.\n");
3148 }
3149
3150 // --- Step 4: Destroy Application Ordering ---
3151 if (user->ao) {
3152 ierr = AODestroy(&user->ao); CHKERRQ(ierr);
3153 LOG_ALLOW(LOCAL, LOG_DEBUG, " AO destroyed.\n");
3154 }
3155
3156 // --- Step 5: Destroy DM Objects ---
3157 // Destroy in reverse order of dependency: post_swarm, swarm, fda2, fda, da
3158 if (user->post_swarm) {
3159 ierr = DMDestroy(&user->post_swarm); CHKERRQ(ierr);
3160 LOG_ALLOW(LOCAL, LOG_DEBUG, " post_swarm DM destroyed.\n");
3161 }
3162 if (user->swarm) {
3163 ierr = DMDestroy(&user->swarm); CHKERRQ(ierr);
3164 LOG_ALLOW(LOCAL, LOG_DEBUG, " swarm DM destroyed.\n");
3165 }
3166 if (user->fda2) {
3167 ierr = DMDestroy(&user->fda2); CHKERRQ(ierr);
3168 LOG_ALLOW(LOCAL, LOG_DEBUG, " fda2 DM destroyed.\n");
3169 }
3170 if (user->da) {
3171 ierr = DMDestroy(&user->da); CHKERRQ(ierr);
3172 LOG_ALLOW(LOCAL, LOG_DEBUG, " da DM destroyed.\n");
3173 }
3174
3175 // --- Step 6: Free PetscMalloc'd Arrays ---
3176 // Free arrays allocated with PetscMalloc1
3177 if (user->RankCellInfoMap) {
3178 ierr = PetscFree(user->RankCellInfoMap); CHKERRQ(ierr);
3179 user->RankCellInfoMap = NULL;
3180 LOG_ALLOW(LOCAL, LOG_DEBUG, " RankCellInfoMap freed.\n");
3181 }
3182 if (user->KSKE) {
3183 ierr = PetscFree(user->KSKE); CHKERRQ(ierr);
3184 user->KSKE = NULL;
3185 LOG_ALLOW(LOCAL, LOG_DEBUG, " KSKE array freed.\n");
3186 }
3187
3188 LOG_ALLOW(LOCAL, LOG_INFO, "UserCtx at level %d fully destroyed.\n", user->thislevel);
3189 PetscFunctionReturn(0);
3190}
PetscErrorCode BoundarySystem_Destroy(UserCtx *user)
Cleans up and destroys all boundary system resources.
PetscErrorCode DestroyUserVectors(UserCtx *user)
Destroys all Vec objects in a UserCtx structure.
Definition setup.c:2951
MatNullSpace nullsp
Definition variables.h:772
PetscInt * KSKE
Definition variables.h:773
DM post_swarm
Definition variables.h:809
KSP ksp
Definition variables.h:772
Here is the call graph for this function:
Here is the caller graph for this function:

◆ FinalizeSimulation()

PetscErrorCode FinalizeSimulation ( SimCtx simCtx)

Main cleanup function for the entire simulation context.

This function is responsible for destroying ALL memory and PETSc objects allocated during the simulation, including:

  • All UserCtx structures in the multigrid hierarchy (via DestroyUserContext)
  • The multigrid management structures (UserMG, MGCtx array)
  • All SimCtx-level objects (logviewer, dm_swarm, bboxlist, string arrays, etc.)

This function should be called ONCE at the end of the simulation, after all computation is complete, but BEFORE PetscFinalize().

Call order in main:

  1. [Simulation runs]
  2. ProfilingFinalize(simCtx);
  3. FinalizeSimulation(simCtx); <- This function
  4. PetscFinalize();
Parameters
[in,out]simCtxPointer to the master SimulationContext to be destroyed.
Returns
PetscErrorCode 0 on success.

Definition at line 3216 of file setup.c.

3217{
3218 PetscErrorCode ierr;
3219 PetscFunctionBeginUser;
3220
3221 if (!simCtx) {
3222 LOG_ALLOW(GLOBAL, LOG_WARNING, "FinalizeSimulation called with NULL SimCtx pointer.\n");
3223 PetscFunctionReturn(0);
3224 }
3225
3226 LOG_ALLOW(GLOBAL, LOG_INFO, "========================================\n");
3227 LOG_ALLOW(GLOBAL, LOG_INFO, "Beginning simulation memory cleanup...\n");
3228 LOG_ALLOW(GLOBAL, LOG_INFO, "========================================\n");
3229
3230 // ============================================================================
3231 // PHASE 1: DESTROY MULTIGRID HIERARCHY (All UserCtx structures)
3232 // ============================================================================
3233
3234 if (simCtx->usermg.mgctx) {
3235 LOG_ALLOW(GLOBAL, LOG_INFO, "Destroying multigrid hierarchy (%d levels)...\n",
3236 simCtx->usermg.mglevels);
3237
3238 // Destroy each UserCtx from finest to coarsest (reverse order is safer)
3239 for (PetscInt level = simCtx->usermg.mglevels - 1; level >= 0; level--) {
3240 UserCtx *user = simCtx->usermg.mgctx[level].user;
3241 if (user) {
3242 LOG_ALLOW(LOCAL, LOG_INFO, " Destroying level %d of %d...\n",
3243 level, simCtx->usermg.mglevels - 1);
3244 ierr = DestroyUserContext(user); CHKERRQ(ierr);
3245
3246 // Free the UserCtx structure itself
3247 ierr = PetscFree(user); CHKERRQ(ierr);
3248 simCtx->usermg.mgctx[level].user = NULL;
3249 }
3250
3251 // Destroy the MGCtx-level packer DM
3252 if (simCtx->usermg.mgctx[level].packer) {
3253 ierr = DMDestroy(&simCtx->usermg.mgctx[level].packer); CHKERRQ(ierr);
3254 LOG_ALLOW(LOCAL, LOG_DEBUG, " MGCtx[%d].packer destroyed.\n", level);
3255 }
3256 }
3257
3258 // Free the MGCtx array itself
3259 ierr = PetscFree(simCtx->usermg.mgctx); CHKERRQ(ierr);
3260 simCtx->usermg.mgctx = NULL;
3261 LOG_ALLOW(GLOBAL, LOG_INFO, "All multigrid levels destroyed.\n");
3262 }
3263
3264 // ============================================================================
3265 // PHASE 2: DESTROY USERMG-LEVEL OBJECTS
3266 // ============================================================================
3267
3268 if (simCtx->usermg.packer) {
3269 ierr = DMDestroy(&simCtx->usermg.packer); CHKERRQ(ierr);
3270 LOG_ALLOW(LOCAL, LOG_DEBUG, "UserMG.packer DM destroyed.\n");
3271 }
3272
3273 if (simCtx->usermg.snespacker) {
3274 ierr = SNESDestroy(&simCtx->usermg.snespacker); CHKERRQ(ierr);
3275 LOG_ALLOW(LOCAL, LOG_DEBUG, "UserMG.snespacker SNES destroyed.\n");
3276 }
3277
3278 // ============================================================================
3279 // PHASE 3: DESTROY SIMCTX-LEVEL OBJECTS
3280 // ============================================================================
3281
3282 LOG_ALLOW(GLOBAL, LOG_INFO, "Destroying SimCtx-level objects...\n");
3283
3284 // --- PetscViewer for logging ---
3285 if (simCtx->logviewer) {
3286 ierr = PetscViewerDestroy(&simCtx->logviewer); CHKERRQ(ierr);
3287 LOG_ALLOW(LOCAL, LOG_DEBUG, " logviewer destroyed.\n");
3288 }
3289
3290 // --- Particle System DM ---
3291 if (simCtx->dm_swarm) {
3292 ierr = DMDestroy(&simCtx->dm_swarm); CHKERRQ(ierr);
3293 LOG_ALLOW(LOCAL, LOG_DEBUG, " dm_swarm destroyed.\n");
3294 }
3295
3296 // --- BoundingBox List (Array of BoundingBox structs) ---
3297 if (simCtx->bboxlist) {
3298 ierr = PetscFree(simCtx->bboxlist); CHKERRQ(ierr);
3299 simCtx->bboxlist = NULL;
3300 LOG_ALLOW(LOCAL, LOG_DEBUG, " bboxlist freed.\n");
3301 }
3302
3303 // --- Boundary Condition Files (Array of strings) ---
3304 if (simCtx->bcs_files) {
3305 for (PetscInt i = 0; i < simCtx->num_bcs_files; i++) {
3306 if (simCtx->bcs_files[i]) {
3307 ierr = PetscFree(simCtx->bcs_files[i]); CHKERRQ(ierr);
3308 }
3309 }
3310 ierr = PetscFree(simCtx->bcs_files); CHKERRQ(ierr);
3311 simCtx->bcs_files = NULL;
3312 LOG_ALLOW(LOCAL, LOG_DEBUG, " bcs_files array freed (%d files).\n", simCtx->num_bcs_files);
3313 }
3314
3315 // --- Brownian Motion RNG ---
3316 if (simCtx->BrownianMotionRNG) {
3317 ierr = PetscRandomDestroy(&simCtx->BrownianMotionRNG); CHKERRQ(ierr);
3318 LOG_ALLOW(LOCAL, LOG_DEBUG, " BrownianMotionRNG destroyed.\n");
3319 }
3320 // --- Post-Processing Parameters ---
3321 // pps is allocated with PetscNew and contains only static char arrays and basic types.
3322 // No internal dynamic allocations need to be freed.
3323 if (simCtx->pps) {
3324 ierr = PetscFree(simCtx->pps); CHKERRQ(ierr);
3325 simCtx->pps = NULL;
3326 LOG_ALLOW(LOCAL, LOG_DEBUG, " PostProcessParams freed.\n");
3327 }
3328
3329 // --- IBM/FSI Objects ---
3330 // Note: These are initialized to NULL and currently have no dedicated destroy functions.
3331 // If these modules are extended with cleanup routines, call them here.
3332 if (simCtx->ibm != NULL) {
3333 LOG_ALLOW(GLOBAL, LOG_WARNING, " WARNING: simCtx->ibm is non-NULL but no destroy function exists. Potential memory leak.\n");
3334 }
3335 if (simCtx->ibmv != NULL) {
3336 LOG_ALLOW(GLOBAL, LOG_WARNING, " WARNING: simCtx->ibmv is non-NULL but no destroy function exists. Potential memory leak.\n");
3337 }
3338 if (simCtx->fsi != NULL) {
3339 LOG_ALLOW(GLOBAL, LOG_WARNING, " WARNING: simCtx->fsi is non-NULL but no destroy function exists. Potential memory leak.\n");
3340 }
3341
3342 // --- Logging Allowed Functions (Array of strings) ---
3343 // Note: The logging system maintains its own copy via set_allowed_functions(),
3344 // so freeing simCtx->allowedFuncs will NOT affect LOG_ALLOW functionality.
3345 if (simCtx->allowedFuncs) {
3346 for (PetscInt i = 0; i < simCtx->nAllowed; i++) {
3347 if (simCtx->allowedFuncs[i]) {
3348 ierr = PetscFree(simCtx->allowedFuncs[i]); CHKERRQ(ierr);
3349 }
3350 }
3351 ierr = PetscFree(simCtx->allowedFuncs); CHKERRQ(ierr);
3352 simCtx->allowedFuncs = NULL;
3353 LOG_ALLOW(LOCAL, LOG_DEBUG, " allowedFuncs array freed (%d functions).\n", simCtx->nAllowed);
3354 }
3355
3356 // --- Profiling Critical Functions (Array of strings) ---
3357 if (simCtx->criticalFuncs) {
3358 for (PetscInt i = 0; i < simCtx->nCriticalFuncs; i++) {
3359 if (simCtx->criticalFuncs[i]) {
3360 ierr = PetscFree(simCtx->criticalFuncs[i]); CHKERRQ(ierr);
3361 }
3362 }
3363 ierr = PetscFree(simCtx->criticalFuncs); CHKERRQ(ierr);
3364 simCtx->criticalFuncs = NULL;
3365 LOG_ALLOW(LOCAL, LOG_DEBUG, " criticalFuncs array freed (%d functions).\n", simCtx->nCriticalFuncs);
3366 }
3367
3368 // ============================================================================
3369 // PHASE 4: FINAL SUMMARY
3370 // ============================================================================
3371
3372 LOG_ALLOW(GLOBAL, LOG_INFO, "========================================\n");
3373 LOG_ALLOW(GLOBAL, LOG_INFO, "Simulation cleanup completed successfully.\n");
3374 LOG_ALLOW(GLOBAL, LOG_INFO, "All PETSc objects have been destroyed.\n");
3375 LOG_ALLOW(GLOBAL, LOG_INFO, "========================================\n");
3376
3377 PetscFunctionReturn(0);
3378}
PetscErrorCode DestroyUserContext(UserCtx *user)
Destroys all resources allocated within a single UserCtx structure.
Definition setup.c:3101
DM packer
Definition variables.h:485
SNES snespacker
Definition variables.h:486
DM packer
Definition variables.h:476
Here is the call graph for this function:
Here is the caller graph for this function: