PICurv 0.1.0
A Parallel Particle-In-Cell Solver for Curvilinear LES
Loading...
Searching...
No Matches
logging.h File Reference

Logging utilities and macros for PETSc-based applications. More...

#include <petsc.h>
#include <stdlib.h>
#include <string.h>
#include <petscsys.h>
#include <ctype.h>
#include "variables.h"
#include "Boundaries.h"
Include dependency graph for logging.h:
This graph shows which files directly or indirectly include this file:

Go to the source code of this file.

Data Structures

struct  DualMonitorCtx
 Context for a dual-purpose KSP monitor. More...
 

Macros

#define LOCAL   0
 Logging scope definitions for controlling message output.
 
#define GLOBAL   1
 Scope for global logging across all processes.
 
#define LOG(scope, level, fmt, ...)
 Logging macro for PETSc-based applications with scope control.
 
#define LOG_DEFAULT(level, fmt, ...)
 Default logging macro for PETSc-based applications.
 
#define LOG_SYNC(scope, level, fmt, ...)
 Logging macro for PETSc-based applications with scope control, using synchronized output across processes.
 
#define LOG_SYNC_DEFAULT(level, fmt, ...)
 Default synchronized logging macro for PETSc-based applications.
 
#define LOG_ALLOW(scope, level, fmt, ...)
 Logging macro that checks both the log level and whether the calling function is in the allowed-function list before printing.
 
#define LOG_ALLOW_SYNC(scope, level, fmt, ...)
 ----— DEBUG ---------------------------------------— #define LOG_ALLOW(scope, level, fmt, ...) \ do { \ MPI_Comm comm = (scope == LOCAL) ? MPI_COMM_SELF : MPI_COMM_WORLD; \ PetscInt current_level_val = get_log_level(); \ PetscBool __allowed_func_val = is_function_allowed(__func); \ Print BEFORE the check \ if (strcmp(func, "LocateAllParticlesInGrid") == 0) { \ printf("[DEBUG LOG_ALLOW in %s] Checking: level=%d, get_log_level() returned %d, func_allowed=%d\n", \ func, (int)level, (int)__current_level_val, (int)__allowed_func_val); \ } \ if ((int)(level) <= (int)__current_level_val && allowed_func_val) { \ Print AFTER passing the check // \ if (strcmp(__func, "LocateAllParticlesInGrid") == 0) { \ printf("[DEBUG LOG_ALLOW in %s] Check PASSED. Printing log.\n", func); \ } \ PetscPrintf(comm, "[%s] " fmt, func, ##__VA_ARGS__); \ } \
 
#define LOG_LOOP_ALLOW(scope, level, iterVar, interval, fmt, ...)
 Logs a message inside a loop, but only every interval iterations.
 
#define LOG_LOOP_ALLOW_EXACT(scope, level, var, val, fmt, ...)
 Logs a custom message if a variable equals a specific value.
 
#define LOG_ARRAY_ELEMENT_ALLOW(scope, level, arr, length, idx, fmt)
 Logs a single element of an array, given an index.
 
#define LOG_ARRAY_SUBRANGE_ALLOW(scope, level, arr, length, start, end, fmt)
 Logs a consecutive subrange of an array.
 
#define LOG_PROFILE_MSG(scope, fmt, ...)
 
#define PROFILE_FUNCTION_BEGIN    _ProfilingStart(__FUNCT__)
 Marks the beginning of a profiled code block (typically a function).
 
#define PROFILE_FUNCTION_END    _ProfilingEnd(__FUNCT__)
 Marks the end of a profiled code block.
 

Enumerations

enum  LogLevel {
  LOG_ERROR = 0 , LOG_WARNING , LOG_PROFILE , LOG_INFO ,
  LOG_DEBUG , LOG_TRACE , LOG_VERBOSE
}
 Enumeration of logging levels. More...
 

Functions

LogLevel get_log_level ()
 Retrieves the current logging level from the environment variable LOG_LEVEL.
 
PetscErrorCode print_log_level (void)
 Prints the current logging level to the console.
 
void set_allowed_functions (const char **functionList, int count)
 Sets the global list of function names that are allowed to log.
 
PetscBool is_function_allowed (const char *functionName)
 Checks if a given function is in the allow-list.
 
PetscErrorCode LOG_CELL_VERTICES (const Cell *cell, PetscMPIInt rank)
 Prints the coordinates of a cell's vertices.
 
PetscErrorCode LOG_FACE_DISTANCES (PetscReal *d)
 Prints the signed distances to each face of the cell.
 
PetscErrorCode LOG_PARTICLE_FIELDS (UserCtx *user, PetscInt printInterval)
 Prints particle fields in a table that automatically adjusts its column widths.
 
PetscErrorCode FreeAllowedFunctions (char **funcs, PetscInt n)
 Free an array previously returned by LoadAllowedFunctionsFromFile().
 
PetscErrorCode LoadAllowedFunctionsFromFile (const char filename[], char ***funcsOut, PetscInt *nOut)
 Load function names from a text file.
 
const char * BCFaceToString (BCFace face)
 Helper function to convert BCFace enum to a string representation.
 
const char * FieldInitializationToString (PetscInt FieldInitialization)
 Helper function to convert FieldInitialization to a string representation.
 
const char * ParticleInitializationToString (ParticleInitializationType ParticleInitialization)
 Helper function to convert ParticleInitialization to a string representation.
 
const char * LESModelToString (LESModelType LESFlag)
 Helper function to convert LES Flag to a string representation.
 
const char * MomentumSolverTypeToString (MomentumSolverType SolverFlag)
 Helper function to convert Momentum Solver flag to a string representation.
 
const char * BCTypeToString (BCType type)
 Helper function to convert BCType enum to a string representation.
 
const char * BCHandlerTypeToString (BCHandlerType handler_type)
 Converts a BCHandlerType enum to its string representation.
 
PetscErrorCode DualKSPMonitor (KSP ksp, PetscInt it, PetscReal rnorm, void *ctx)
 A custom KSP monitor that logs to a file and optionally to the console.
 
PetscErrorCode DualMonitorDestroy (void **ctx)
 Destroys the DualMonitorCtx.
 
PetscErrorCode LOG_CONTINUITY_METRICS (UserCtx *user)
 Logs continuity metrics for a single block to a file.
 
const char * ParticleLocationStatusToString (ParticleLocationStatus level)
 A function that outputs the name of the current level in the ParticleLocation enum.
 
void PrintProgressBar (PetscInt step, PetscInt startStep, PetscInt totalSteps, PetscReal currentTime)
 Prints a progress bar to the console.
 
PetscErrorCode ProfilingInitialize (SimCtx *simCtx)
 Initializes the custom profiling system using configuration from SimCtx.
 
PetscErrorCode ProfilingResetTimestepCounters (void)
 
PetscErrorCode ProfilingLogTimestepSummary (PetscInt step)
 Logs the performance summary for the current timestep and resets timers.
 
PetscErrorCode ProfilingFinalize (SimCtx *simCtx)
 the profiling excercise and build a profiling summary which is then printed to a log file.
 
void _ProfilingStart (const char *func_name)
 
void _ProfilingEnd (const char *func_name)
 
PetscErrorCode LOG_FIELD_MIN_MAX (UserCtx *user, const char *fieldName)
 Computes and logs the local and global min/max values of a 3-component vector field.
 
PetscErrorCode LOG_FIELD_ANATOMY (UserCtx *user, const char *field_name, const char *stage_name)
 Logs the anatomy of a specified field at key boundary locations, respecting the solver's specific grid and variable architecture.
 
PetscErrorCode LOG_INTERPOLATION_ERROR (UserCtx *user)
 Logs the interpolation error between the analytical and computed solutions.
 
PetscErrorCode CalculateAdvancedParticleMetrics (UserCtx *user)
 Computes advanced particle statistics and stores them in SimCtx.
 
PetscErrorCode LOG_PARTICLE_METRICS (UserCtx *user, const char *stageName)
 Logs particle swarm metrics, adapting its behavior based on a boolean flag in SimCtx.
 

Detailed Description

Logging utilities and macros for PETSc-based applications.

This header defines logging levels, scopes, and macros for consistent logging throughout the application. It provides functions to retrieve the current logging level and macros to simplify logging with scope control.

Definition in file logging.h.


Data Structure Documentation

◆ DualMonitorCtx

struct DualMonitorCtx

Context for a dual-purpose KSP monitor.

This struct holds a file viewer for unconditional logging and a boolean flag to enable/disable optional logging to the console.

Definition at line 56 of file logging.h.

Data Fields
FILE * file_handle
PetscBool log_to_console
PetscReal bnorm
PetscInt step
PetscInt block_id

Macro Definition Documentation

◆ LOCAL

#define LOCAL   0

Logging scope definitions for controlling message output.

  • LOCAL: Logs on the current process using MPI_COMM_SELF.
  • GLOBAL: Logs across all processes using MPI_COMM_WORLD. Scope for local logging on the current process.

Definition at line 45 of file logging.h.

◆ GLOBAL

#define GLOBAL   1

Scope for global logging across all processes.

Definition at line 46 of file logging.h.

◆ LOG

#define LOG (   scope,
  level,
  fmt,
  ... 
)
Value:
do { \
/* Determine the MPI communicator based on the scope */ \
MPI_Comm comm = (scope == LOCAL) ? MPI_COMM_SELF : MPI_COMM_WORLD; \
/* Check if the log level is within the allowed range */ \
if ((int)(level) <= (int)get_log_level()) { \
/* Print the message to the specified communicator */ \
PetscPrintf(comm, fmt, ##__VA_ARGS__); \
} \
} while (0)
#define LOCAL
Logging scope definitions for controlling message output.
Definition logging.h:45
LogLevel get_log_level()
Retrieves the current logging level from the environment variable LOG_LEVEL.
Definition logging.c:39

Logging macro for PETSc-based applications with scope control.

This macro provides a convenient way to log messages with different scopes (LOCAL or GLOBAL) and severity levels. It utilizes PETSc's PetscPrintf function for message output.

Parameters
scopeSpecifies the logging scope:
  • LOCAL: Logs on the current process using MPI_COMM_SELF.
  • GLOBAL: Logs on all processes using MPI_COMM_WORLD.
levelThe severity level of the message (e.g., LOG_INFO, LOG_ERROR).
fmtThe format string for the message (similar to printf).
...Additional arguments for the format string (optional).

Example usage: LOG(LOCAL, LOG_ERROR, "An error occurred at index %ld.\n", idx); LOG(GLOBAL, LOG_INFO, "Grid size: %ld x %ld x %ld.\n", nx, ny, nz);

Definition at line 84 of file logging.h.

85 { \
86 /* Determine the MPI communicator based on the scope */ \
87 MPI_Comm comm = (scope == LOCAL) ? MPI_COMM_SELF : MPI_COMM_WORLD; \
88 /* Check if the log level is within the allowed range */ \
89 if ((int)(level) <= (int)get_log_level()) { \
90 /* Print the message to the specified communicator */ \
91 PetscPrintf(comm, fmt, ##__VA_ARGS__); \
92 } \
93 } while (0)

◆ LOG_DEFAULT

#define LOG_DEFAULT (   level,
  fmt,
  ... 
)
Value:
do { \
/* Set the communicator to global (MPI_COMM_WORLD) by default */ \
MPI_Comm comm = MPI_COMM_WORLD; \
/* Check if the log level is within the allowed range */ \
if ((int)(level) <= (int)get_log_level()) { \
/* Print the message using PetscPrintf with the global communicator */ \
PetscPrintf(comm, fmt, ##__VA_ARGS__); \
} \
} while (0)

Default logging macro for PETSc-based applications.

This macro simplifies logging by defaulting the scope to GLOBAL (i.e., MPI_COMM_WORLD) and providing a convenient interface for common logging needs.

Parameters
levelThe severity level of the message (e.g., LOG_ERROR, LOG_INFO).
fmtThe format string for the log message (similar to printf).
...Additional arguments for the format string (optional).

Example usage: LOG_DEFAULT(LOG_ERROR, "Error occurred at index %ld.\n", idx); LOG_DEFAULT(LOG_INFO, "Grid size: %ld x %ld x %ld.\n", nx, ny, nz);

Note
  • By default, this macro logs across all MPI processes using MPI_COMM_WORLD.
  • If finer control (e.g., local logging) is required, use the more general LOG macro.
  • The log level is filtered based on the value returned by get_log_level().

Definition at line 115 of file logging.h.

116 { \
117 /* Set the communicator to global (MPI_COMM_WORLD) by default */ \
118 MPI_Comm comm = MPI_COMM_WORLD; \
119 /* Check if the log level is within the allowed range */ \
120 if ((int)(level) <= (int)get_log_level()) { \
121 /* Print the message using PetscPrintf with the global communicator */ \
122 PetscPrintf(comm, fmt, ##__VA_ARGS__); \
123 } \
124 } while (0)

◆ LOG_SYNC

#define LOG_SYNC (   scope,
  level,
  fmt,
  ... 
)
Value:
do { \
/* Determine the MPI communicator based on the scope */ \
MPI_Comm comm = (scope == LOCAL) ? MPI_COMM_SELF : MPI_COMM_WORLD; \
/* Check if the log level is within the allowed range */ \
if ((int)(level) <= (int)get_log_level()) { \
/* Synchronized print (collective) on the specified communicator */ \
PetscSynchronizedPrintf(comm, fmt, ##__VA_ARGS__); \
/* Ensure all ranks have finished printing before continuing */ \
PetscSynchronizedFlush(comm, PETSC_STDOUT); \
} \
} while (0)

Logging macro for PETSc-based applications with scope control, using synchronized output across processes.

This macro uses PetscSynchronizedPrintf and PetscSynchronizedFlush to ensure messages from different ranks are printed in a synchronized (rank-by-rank) manner, preventing interleaved outputs.

Parameters
scopeSpecifies the logging scope:
  • LOCAL: Logs on the current process using MPI_COMM_SELF.
  • GLOBAL: Logs on all processes using MPI_COMM_WORLD.
levelThe severity level of the message (e.g., LOG_INFO, LOG_ERROR).
fmtThe format string for the message (similar to printf).
...Additional arguments for the format string (optional).

Example usage: LOG_SYNC(LOCAL, LOG_ERROR, "An error occurred at index %ld.\n", idx); LOG_SYNC(GLOBAL, LOG_INFO, "Synchronized info: rank = %ld.\n", rank);

Definition at line 145 of file logging.h.

146 { \
147 /* Determine the MPI communicator based on the scope */ \
148 MPI_Comm comm = (scope == LOCAL) ? MPI_COMM_SELF : MPI_COMM_WORLD; \
149 /* Check if the log level is within the allowed range */ \
150 if ((int)(level) <= (int)get_log_level()) { \
151 /* Synchronized print (collective) on the specified communicator */ \
152 PetscSynchronizedPrintf(comm, fmt, ##__VA_ARGS__); \
153 /* Ensure all ranks have finished printing before continuing */ \
154 PetscSynchronizedFlush(comm, PETSC_STDOUT); \
155 } \
156 } while (0)

◆ LOG_SYNC_DEFAULT

#define LOG_SYNC_DEFAULT (   level,
  fmt,
  ... 
)
Value:
do { \
if ((int)(level) <= (int)get_log_level()) { \
PetscSynchronizedPrintf(MPI_COMM_WORLD, fmt, ##__VA_ARGS__); \
PetscSynchronizedFlush(MPI_COMM_WORLD, PETSC_STDOUT); \
} \
} while (0)

Default synchronized logging macro for PETSc-based applications.

This macro simplifies logging by defaulting the scope to GLOBAL (i.e., MPI_COMM_WORLD) and provides synchronized output across all processes.

Parameters
levelThe severity level of the message (e.g., LOG_ERROR, LOG_INFO).
fmtThe format string for the log message (similar to printf).
...Additional arguments for the format string (optional).

Example usage: LOG_SYNC_DEFAULT(LOG_ERROR, "Error at index %ld.\n", idx); LOG_SYNC_DEFAULT(LOG_INFO, "Process rank: %ld.\n", rank);

Note
  • By default, this macro logs across all MPI processes using MPI_COMM_WORLD.
  • If local (per-process) logging is required, use the more general LOG_SYNC macro.
  • The log level is filtered based on the value returned by get_log_level().

Definition at line 178 of file logging.h.

179 { \
180 if ((int)(level) <= (int)get_log_level()) { \
181 PetscSynchronizedPrintf(MPI_COMM_WORLD, fmt, ##__VA_ARGS__); \
182 PetscSynchronizedFlush(MPI_COMM_WORLD, PETSC_STDOUT); \
183 } \
184 } while (0)

◆ LOG_ALLOW

#define LOG_ALLOW (   scope,
  level,
  fmt,
  ... 
)
Value:
do { \
MPI_Comm comm = (scope == LOCAL) ? MPI_COMM_SELF : MPI_COMM_WORLD; \
if ((int)(level) <= (int)get_log_level() && is_function_allowed(__func__)) { \
PetscPrintf(comm, "[%s] " fmt, __func__, ##__VA_ARGS__); \
} \
} while (0)
PetscBool is_function_allowed(const char *functionName)
Checks if a given function is in the allow-list.
Definition logging.c:157

Logging macro that checks both the log level and whether the calling function is in the allowed-function list before printing.

Useful for selective, per-function logging.

Parameters
scopeSpecifies the logging scope (LOCAL or GLOBAL).
levelThe severity level of the message (e.g., LOG_INFO, LOG_ERROR).
fmtThe format string for the message (similar to printf).
...Additional arguments for the format string (optional).

Example usage: LOG_ALLOW(LOCAL, LOG_DEBUG, "Debugging info in function: %s\n", func);

Definition at line 200 of file logging.h.

201 { \
202 MPI_Comm comm = (scope == LOCAL) ? MPI_COMM_SELF : MPI_COMM_WORLD; \
203 if ((int)(level) <= (int)get_log_level() && is_function_allowed(__func__)) { \
204 PetscPrintf(comm, "[%s] " fmt, __func__, ##__VA_ARGS__); \
205 } \
206 } while (0)

◆ LOG_ALLOW_SYNC

#define LOG_ALLOW_SYNC (   scope,
  level,
  fmt,
  ... 
)
Value:
do { \
/* ------------------------------------------------------------------ */ \
/* Validate scope and pick communicator *before* any early exits. */ \
/* ------------------------------------------------------------------ */ \
MPI_Comm _comm; \
if ((scope) == LOCAL) _comm = MPI_COMM_SELF; \
else if ((scope) == GLOBAL) _comm = MPI_COMM_WORLD; \
else { \
fprintf(stderr, "LOG_ALLOW_SYNC ERROR: invalid scope (%d) at %s:%d\n", \
(scope), __FILE__, __LINE__); \
MPI_Abort(MPI_COMM_WORLD, 1); \
} \
\
/* ------------------------------------------------------------------ */ \
/* Decide whether *this* rank should actually print. */ \
/* ------------------------------------------------------------------ */ \
PetscBool _doPrint = \
is_function_allowed(__func__) && ((int)(level) <= (int)get_log_level()); \
\
if (_doPrint) { \
PetscSynchronizedPrintf(_comm, "[%s] " fmt, __func__, ##__VA_ARGS__); \
} \
\
/* ------------------------------------------------------------------ */ \
/* ALL ranks call the flush, even if they printed nothing. */ \
/* ------------------------------------------------------------------ */ \
PetscSynchronizedFlush(_comm, PETSC_STDOUT); \
} while (0)
#define GLOBAL
Scope for global logging across all processes.
Definition logging.h:46

----— DEBUG ---------------------------------------— #define LOG_ALLOW(scope, level, fmt, ...) \ do { \ MPI_Comm comm = (scope == LOCAL) ? MPI_COMM_SELF : MPI_COMM_WORLD; \ PetscInt current_level_val = get_log_level(); \ PetscBool __allowed_func_val = is_function_allowed(__func); \ Print BEFORE the check \ if (strcmp(func, "LocateAllParticlesInGrid") == 0) { \ printf("[DEBUG LOG_ALLOW in %s] Checking: level=%d, get_log_level() returned %d, func_allowed=%d\n", \ func, (int)level, (int)__current_level_val, (int)__allowed_func_val); \ } \ if ((int)(level) <= (int)__current_level_val && allowed_func_val) { \ Print AFTER passing the check // \ if (strcmp(__func, "LocateAllParticlesInGrid") == 0) { \ printf("[DEBUG LOG_ALLOW in %s] Check PASSED. Printing log.\n", func); \ } \ PetscPrintf(comm, "[%s] " fmt, func, ##__VA_ARGS__); \ } \

} while (0)

Synchronized logging macro that checks both the log level and whether the calling function is in the allow-list.

This macro uses PetscSynchronizedPrintf and PetscSynchronizedFlush to ensure messages from different ranks are printed in a rank-ordered fashion (i.e., to avoid interleaving). It also filters out messages if the current function is not in the allow-list (is_function_allowed(__func__)) or the requested log level is higher than get_log_level().

Parameters
scopeEither LOCAL (MPI_COMM_SELF) or GLOBAL (MPI_COMM_WORLD).
levelOne of LOG_ERROR, LOG_WARNING, LOG_INFO, LOG_DEBUG.
fmtA printf-style format string (e.g., "Message: %ld\n").
...Variadic arguments to fill in fmt.

Example usage:

LOG_ALLOW_SYNC(LOCAL, LOG_DEBUG, "Debug info: rank = %ld\n", rank);
LOG_ALLOW_SYNC(GLOBAL, LOG_INFO, "Synchronized info in %s\n", __func__);
#define LOG_ALLOW_SYNC(scope, level, fmt,...)
----— DEBUG ---------------------------------------— #define LOG_ALLOW(scope, level,...
Definition logging.h:267
@ LOG_INFO
Informational messages about program execution.
Definition logging.h:31
@ LOG_DEBUG
Detailed debugging information.
Definition logging.h:32

Definition at line 267 of file logging.h.

268 { \
269 /* ------------------------------------------------------------------ */ \
270 /* Validate scope and pick communicator *before* any early exits. */ \
271 /* ------------------------------------------------------------------ */ \
272 MPI_Comm _comm; \
273 if ((scope) == LOCAL) _comm = MPI_COMM_SELF; \
274 else if ((scope) == GLOBAL) _comm = MPI_COMM_WORLD; \
275 else { \
276 fprintf(stderr, "LOG_ALLOW_SYNC ERROR: invalid scope (%d) at %s:%d\n", \
277 (scope), __FILE__, __LINE__); \
278 MPI_Abort(MPI_COMM_WORLD, 1); \
279 } \
280 \
281 /* ------------------------------------------------------------------ */ \
282 /* Decide whether *this* rank should actually print. */ \
283 /* ------------------------------------------------------------------ */ \
284 PetscBool _doPrint = \
285 is_function_allowed(__func__) && ((int)(level) <= (int)get_log_level()); \
286 \
287 if (_doPrint) { \
288 PetscSynchronizedPrintf(_comm, "[%s] " fmt, __func__, ##__VA_ARGS__); \
289 } \
290 \
291 /* ------------------------------------------------------------------ */ \
292 /* ALL ranks call the flush, even if they printed nothing. */ \
293 /* ------------------------------------------------------------------ */ \
294 PetscSynchronizedFlush(_comm, PETSC_STDOUT); \
295} while (0)

◆ LOG_LOOP_ALLOW

#define LOG_LOOP_ALLOW (   scope,
  level,
  iterVar,
  interval,
  fmt,
  ... 
)
Value:
do { \
if (is_function_allowed(__func__) && (int)(level) <= (int)get_log_level()) { \
if ((iterVar) % (interval) == 0) { \
MPI_Comm comm = (scope == LOCAL) ? MPI_COMM_SELF : MPI_COMM_WORLD; \
PetscPrintf(comm, "[%s] [%s=%d] " fmt, \
__func__, #iterVar, (iterVar), ##__VA_ARGS__); \
} \
} \
} while (0)

Logs a message inside a loop, but only every interval iterations.

Parameters
scopeLOCAL or GLOBAL.
levelLOG_* level.
iterVarThe loop variable (e.g., i).
intervalOnly log when (iterVar % interval == 0).
fmtprintf-style format string.
...Variadic arguments to include in the formatted message.

Example: for (int i = 0; i < 100; i++) { LOG_LOOP_ALLOW(LOCAL, LOG_DEBUG, i, 10, "Value of i=%d\n", i); }

Definition at line 312 of file logging.h.

313 { \
314 if (is_function_allowed(__func__) && (int)(level) <= (int)get_log_level()) { \
315 if ((iterVar) % (interval) == 0) { \
316 MPI_Comm comm = (scope == LOCAL) ? MPI_COMM_SELF : MPI_COMM_WORLD; \
317 PetscPrintf(comm, "[%s] [%s=%d] " fmt, \
318 __func__, #iterVar, (iterVar), ##__VA_ARGS__); \
319 } \
320 } \
321 } while (0)

◆ LOG_LOOP_ALLOW_EXACT

#define LOG_LOOP_ALLOW_EXACT (   scope,
  level,
  var,
  val,
  fmt,
  ... 
)
Value:
do { \
/* First, perform the cheap, standard gatekeeper checks. */ \
if (is_function_allowed(__func__) && (int)(level) <= (int)get_log_level()) { \
/* Only if those pass, check the user's specific condition. */ \
if ((var) == (val)) { \
MPI_Comm comm = ((scope) == LOCAL) ? MPI_COMM_SELF : MPI_COMM_WORLD; \
/* Print the standard prefix, then the user's custom message. */ \
PetscPrintf(comm, "[%s] [%s=%d] " fmt, \
__func__, #var, (var), ##__VA_ARGS__); \
} \
} \
} while (0)

Logs a custom message if a variable equals a specific value.

This is a variadic macro for logging a single event when a condition is met. It is extremely useful for printing debug information at a specific iteration of a loop or when a state variable reaches a certain value.

Parameters
scopeEither LOCAL or GLOBAL.
levelThe logging level.
varThe variable to check (e.g., a loop counter 'k').
valThe value that triggers the log (e.g., 6). The log prints if var == val.
...A printf-style format string and its corresponding arguments.

Definition at line 348 of file logging.h.

349 { \
350 /* First, perform the cheap, standard gatekeeper checks. */ \
351 if (is_function_allowed(__func__) && (int)(level) <= (int)get_log_level()) { \
352 /* Only if those pass, check the user's specific condition. */ \
353 if ((var) == (val)) { \
354 MPI_Comm comm = ((scope) == LOCAL) ? MPI_COMM_SELF : MPI_COMM_WORLD; \
355 /* Print the standard prefix, then the user's custom message. */ \
356 PetscPrintf(comm, "[%s] [%s=%d] " fmt, \
357 __func__, #var, (var), ##__VA_ARGS__); \
358 } \
359 } \
360 } while (0)

◆ LOG_ARRAY_ELEMENT_ALLOW

#define LOG_ARRAY_ELEMENT_ALLOW (   scope,
  level,
  arr,
  length,
  idx,
  fmt 
)
Value:
do { \
if (is_function_allowed(__func__) && (int)(level) <= (int)get_log_level()) { \
if ((idx) >= 0 && (idx) < (length)) { \
MPI_Comm comm = (scope == LOCAL) ? MPI_COMM_SELF : MPI_COMM_WORLD; \
PetscPrintf(comm, "[%s] arr[%d] = " fmt "\n", \
__func__, (idx), (arr)[idx]); \
} \
} \
} while (0)

Logs a single element of an array, given an index.

Parameters
scopeEither LOCAL or GLOBAL.
levelLOG_ERROR, LOG_WARNING, LOG_INFO, or LOG_DEBUG.
arrPointer to the array to log from.
lengthThe length of the array (to prevent out-of-bounds).
idxThe index of the element to print.
fmtThe printf-style format specifier (e.g. "%g", "%f", etc.).

This macro only logs if: 1) The current function is in the allow-list (is_function_allowed(__func__)). 2) The requested logging level <= the current global get_log_level(). 3) The index idx is valid (0 <= idx < length).

Definition at line 377 of file logging.h.

378 { \
379 if (is_function_allowed(__func__) && (int)(level) <= (int)get_log_level()) { \
380 if ((idx) >= 0 && (idx) < (length)) { \
381 MPI_Comm comm = (scope == LOCAL) ? MPI_COMM_SELF : MPI_COMM_WORLD; \
382 PetscPrintf(comm, "[%s] arr[%d] = " fmt "\n", \
383 __func__, (idx), (arr)[idx]); \
384 } \
385 } \
386 } while (0)

◆ LOG_ARRAY_SUBRANGE_ALLOW

#define LOG_ARRAY_SUBRANGE_ALLOW (   scope,
  level,
  arr,
  length,
  start,
  end,
  fmt 
)
Value:
do { \
if (is_function_allowed(__func__) && (int)(level) <= (int)get_log_level()) { \
MPI_Comm comm = (scope == LOCAL) ? MPI_COMM_SELF : MPI_COMM_WORLD; \
PetscInt _start = (start) < 0 ? 0 : (start); \
PetscInt _end = (end) >= (length) ? (length) - 1 : (end); \
for (PetscInt i = _start; i <= _end; i++) { \
PetscPrintf(comm, "[%s] arr[%d] = " fmt "\n", __func__, i, (arr)[i]); \
} \
} \
} while (0)

Logs a consecutive subrange of an array.

Parameters
scopeEither LOCAL or GLOBAL.
levelLOG_ERROR, LOG_WARNING, LOG_INFO, or LOG_DEBUG.
arrPointer to the array to log from.
lengthTotal length of the array.
startStarting index of the subrange.
endEnding index of the subrange (inclusive).
fmtThe printf-style format specifier (e.g., "%g", "%f").

This macro prints each element arr[i] for i in [start, end], bounded by [0, length-1].

Definition at line 401 of file logging.h.

402 { \
403 if (is_function_allowed(__func__) && (int)(level) <= (int)get_log_level()) { \
404 MPI_Comm comm = (scope == LOCAL) ? MPI_COMM_SELF : MPI_COMM_WORLD; \
405 PetscInt _start = (start) < 0 ? 0 : (start); \
406 PetscInt _end = (end) >= (length) ? (length) - 1 : (end); \
407 for (PetscInt i = _start; i <= _end; i++) { \
408 PetscPrintf(comm, "[%s] arr[%d] = " fmt "\n", __func__, i, (arr)[i]); \
409 } \
410 } \
411 } while (0)

◆ LOG_PROFILE_MSG

#define LOG_PROFILE_MSG (   scope,
  fmt,
  ... 
)
Value:
do { \
if ((int)(LOG_PROFILE) <= (int)get_log_level()) { \
MPI_Comm comm = (scope == LOCAL) ? MPI_COMM_SELF : MPI_COMM_WORLD; \
PetscPrintf(comm, "[PROFILE] " fmt, ##__VA_ARGS__); \
} \
} while (0)
@ LOG_PROFILE
Exclusive log level for performance timing and profiling.
Definition logging.h:30

Definition at line 413 of file logging.h.

414 { \
415 if ((int)(LOG_PROFILE) <= (int)get_log_level()) { \
416 MPI_Comm comm = (scope == LOCAL) ? MPI_COMM_SELF : MPI_COMM_WORLD; \
417 PetscPrintf(comm, "[PROFILE] " fmt, ##__VA_ARGS__); \
418 } \
419 } while (0)

◆ PROFILE_FUNCTION_BEGIN

#define PROFILE_FUNCTION_BEGIN    _ProfilingStart(__FUNCT__)

Marks the beginning of a profiled code block (typically a function).

Place this macro at the very beginning of a function you wish to profile. It automatically captures the function's name and starts a wall-clock timer.

Definition at line 737 of file logging.h.

◆ PROFILE_FUNCTION_END

#define PROFILE_FUNCTION_END    _ProfilingEnd(__FUNCT__)

Marks the end of a profiled code block.

Place this macro just before every return point in a function that starts with PROFILE_FUNCTION_BEGIN. It stops the timer and accumulates the results.

Definition at line 746 of file logging.h.

Enumeration Type Documentation

◆ LogLevel

enum LogLevel

Enumeration of logging levels.

Defines various severity levels for logging messages.

Enumerator
LOG_ERROR 

Critical errors that may halt the program.

LOG_WARNING 

Non-critical issues that warrant attention.

LOG_PROFILE 

Exclusive log level for performance timing and profiling.

LOG_INFO 

Informational messages about program execution.

LOG_DEBUG 

Detailed debugging information.

LOG_TRACE 

Very fine-grained tracing information for in-depth debugging.

LOG_VERBOSE 

Extremely detailed logs, typically for development use only.

Definition at line 27 of file logging.h.

27 {
28 LOG_ERROR = 0, /**< Critical errors that may halt the program */
29 LOG_WARNING, /**< Non-critical issues that warrant attention */
30 LOG_PROFILE, /**< Exclusive log level for performance timing and profiling */
31 LOG_INFO, /**< Informational messages about program execution */
32 LOG_DEBUG, /**< Detailed debugging information */
33 LOG_TRACE, /**< Very fine-grained tracing information for in-depth debugging */
34 LOG_VERBOSE /**< Extremely detailed logs, typically for development use only */
35} LogLevel;
LogLevel
Enumeration of logging levels.
Definition logging.h:27
@ LOG_ERROR
Critical errors that may halt the program.
Definition logging.h:28
@ LOG_TRACE
Very fine-grained tracing information for in-depth debugging.
Definition logging.h:33
@ LOG_WARNING
Non-critical issues that warrant attention.
Definition logging.h:29
@ LOG_VERBOSE
Extremely detailed logs, typically for development use only.
Definition logging.h:34

Function Documentation

◆ get_log_level()

LogLevel get_log_level ( )

Retrieves the current logging level from the environment variable LOG_LEVEL.

The function checks the LOG_LEVEL environment variable and sets the logging level accordingly. Supported levels are "DEBUG", "INFO", "WARNING", and defaults to "ERROR" if not set or unrecognized.

Returns
LogLevel The current logging level.

The function checks the LOG_LEVEL environment variable and sets the logging level accordingly. Supported levels are "DEBUG", "INFO", "WARNING", and defaults to "ERROR" if not set or unrecognized. The log level is cached after the first call to avoid repeated environment variable checks.

Returns
LogLevel The current logging level.

Definition at line 39 of file logging.c.

39 {
40 if (current_log_level == -1) { // Log level not set yet
41 const char *env = getenv("LOG_LEVEL");
42 if (!env) {
43 current_log_level = LOG_ERROR; // Default level
44 }
45 else if (strcmp(env, "DEBUG") == 0) {
47 }
48 else if (strcmp(env, "INFO") == 0) {
50 }
51 else if (strcmp(env, "WARNING") == 0) {
53 }
54 else if (strcmp(env, "PROFILE") == 0) { // <-- New profile level
56 }
57 else if (strcmp(env, "VERBOSE") == 0) {
59 }
60 else if (strcmp(env, "TRACE") == 0) {
62 }
63 else {
64 current_log_level = LOG_ERROR; // Default if unrecognized
65 }
66 }
67 return current_log_level;
68}
static LogLevel current_log_level
Static variable to cache the current logging level.
Definition logging.c:14
Here is the caller graph for this function:

◆ print_log_level()

PetscErrorCode print_log_level ( void  )

Prints the current logging level to the console.

This function retrieves the log level using get_log_level() and prints the corresponding log level name. It helps verify the logging configuration at runtime.

The log levels supported are:

  • LOG_PROFILE (0) : Logs performance profiling details.
  • LOG_ERROR (1) : Logs only critical errors.
  • LOG_WARNING (2) : Logs warnings and errors.
  • LOG_INFO (3) : Logs general information, warnings, and errors.
  • LOG_DEBUG (4) : Logs debugging information, info, warnings, and errors.
Note
The log level is determined from the LOG_LEVEL environment variable. If LOG_LEVEL is not set, it defaults to LOG_INFO.
See also
get_log_level()

This function retrieves the log level using get_log_level() and prints the corresponding log level name. It helps verify the logging configuration at runtime.

Note
The log level is determined from the LOG_LEVEL environment variable. If LOG_LEVEL is not set, it defaults to LOG_INFO.
See also
get_log_level()

Definition at line 82 of file logging.c.

83{
84 PetscMPIInt rank;
85 PetscErrorCode ierr;
86 int level;
87 const char *level_name;
88
89 PetscFunctionBeginUser;
90 /* get MPI rank */
91 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRMPI(ierr);
92
93 /* decide level name */
94 level = get_log_level();
95 level_name = (level == LOG_ERROR) ? "ERROR" :
96 (level == LOG_WARNING) ? "WARNING" :
97 (level == LOG_INFO) ? "INFO" :
98 (level == LOG_DEBUG) ? "DEBUG" :
99 (level == LOG_VERBOSE) ? "VERBOSE" :
100 (level == LOG_TRACE) ? "TRACE" :
101 (level == LOG_PROFILE) ? "PROFILE" : "UNKNOWN";
102
103 /* print it out */
104 ierr = PetscPrintf(PETSC_COMM_SELF,
105 "Current log level: %s (%d) | rank: %d\n",
106 level_name, level, (int)rank);
107 CHKERRMPI(ierr);
108
109 PetscFunctionReturn(PETSC_SUCCESS);
110}
LogLevel get_log_level()
Retrieves the current logging level from the environment variable LOG_LEVEL.
Definition logging.c:39
Here is the call graph for this function:
Here is the caller graph for this function:

◆ set_allowed_functions()

void set_allowed_functions ( const char **  functionList,
int  count 
)

Sets the global list of function names that are allowed to log.

You can replace the entire list of allowed function names at runtime.

Parameters
functionListAn array of function name strings (e.g., {"foo", "bar"}).
countThe number of entries in the array.

The existing allow-list is cleared and replaced by the new one. If you pass an empty list (count = 0), then no function will be allowed unless you change it later.

Definition at line 122 of file logging.c.

123{
124 // 1. Free any existing entries
125 if (gAllowedFunctions) {
126 for (int i = 0; i < gNumAllowed; ++i) {
127 free(gAllowedFunctions[i]); // each was strdup'ed
128 }
129 free(gAllowedFunctions);
130 gAllowedFunctions = NULL;
131 gNumAllowed = 0;
132 }
133
134 // 2. Allocate new array
135 if (count > 0) {
136 gAllowedFunctions = (char**)malloc(sizeof(char*) * count);
137 }
138
139 // 3. Copy the new entries
140 for (int i = 0; i < count; ++i) {
141 // strdup is a POSIX function. If not available, implement your own string copy.
142 gAllowedFunctions[i] = strdup(functionList[i]);
143 }
144 gNumAllowed = count;
145}
static char ** gAllowedFunctions
Global/static array of function names allowed to log.
Definition logging.c:21
static int gNumAllowed
Number of entries in the gAllowedFunctions array.
Definition logging.c:26
Here is the caller graph for this function:

◆ is_function_allowed()

PetscBool is_function_allowed ( const char *  functionName)

Checks if a given function is in the allow-list.

This helper is used internally by the LOG_ALLOW macro.

Checks if a given function is in the allow-list.

Parameters
functionNameThe name of the function to check.
Returns
PETSC_TRUE if functionName is allowed, otherwise PETSC_FALSE.

If no functions are in the list, nothing is allowed by default. You can reverse this logic if you prefer to allow everything unless specified otherwise.

Definition at line 157 of file logging.c.

158{
159 /* no list ⇒ allow all */
160 if (gNumAllowed == 0) {
161 return PETSC_TRUE;
162 }
163
164 /* otherwise only the listed functions are allowed */
165 for (int i = 0; i < gNumAllowed; ++i) {
166 if (strcmp(gAllowedFunctions[i], functionName) == 0) {
167 return PETSC_TRUE;
168 }
169 }
170 return PETSC_FALSE;
171}
Here is the caller graph for this function:

◆ LOG_CELL_VERTICES()

PetscErrorCode LOG_CELL_VERTICES ( const Cell cell,
PetscMPIInt  rank 
)

Prints the coordinates of a cell's vertices.

This function iterates through the eight vertices of a given cell and prints their coordinates. It is primarily used for debugging purposes to verify the correctness of cell vertex assignments.

Parameters
[in]cellPointer to a Cell structure representing the cell, containing its vertices.
[in]rankMPI rank for identification (useful in parallel environments).
Returns
PetscErrorCode Returns 0 to indicate successful execution. Non-zero on failure.
Note
  • Ensure that the cell pointer is not NULL before calling this function..

Definition at line 187 of file logging.c.

188{
189
190 // Validate input pointers
191 if (cell == NULL) {
192 LOG_ALLOW(LOCAL,LOG_ERROR, "'cell' is NULL.\n");
193 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "LOG_CELL_VERTICES - Input parameter 'cell' is NULL.");
194 }
195
196 LOG_ALLOW(LOCAL,LOG_VERBOSE, "Rank %d, Cell Vertices:\n", rank);
197 for(int i = 0; i < 8; i++){
198 LOG_ALLOW(LOCAL,LOG_VERBOSE, " Vertex[%d]: (%.2f, %.2f, %.2f)\n",
199 i, cell->vertices[i].x, cell->vertices[i].y, cell->vertices[i].z);
200 }
201
202 return 0; // Indicate successful execution
203}
#define LOG_ALLOW(scope, level, fmt,...)
Logging macro that checks both the log level and whether the calling function is in the allowed-funct...
Definition logging.h:200
PetscScalar x
Definition variables.h:101
PetscScalar z
Definition variables.h:101
PetscScalar y
Definition variables.h:101
Cmpnts vertices[8]
Coordinates of the eight vertices of the cell.
Definition variables.h:161
Here is the caller graph for this function:

◆ LOG_FACE_DISTANCES()

PetscErrorCode LOG_FACE_DISTANCES ( PetscReal *  d)

Prints the signed distances to each face of the cell.

This function iterates through the six signed distances from a point to each face of a given cell and prints their values. It is primarily used for debugging purposes to verify the correctness of distance calculations.

Parameters
[in]dAn array of six PetscReal values representing the signed distances. The indices correspond to:
  • d[LEFT]: Left Face
  • d[RIGHT]: Right Face
  • d[BOTTOM]: Bottom Face
  • d[TOP]: Top Face
  • d[FRONT]: Front Face
  • d[BACK]: Back Face
Returns
PetscErrorCode Returns 0 to indicate successful execution. Non-zero on failure.
Note
  • Ensure that the d array is correctly populated with signed distances before calling this function.

Definition at line 227 of file logging.c.

228{
229
230 // Validate input array
231 if (d == NULL) {
232 LOG_ALLOW(LOCAL,LOG_ERROR, " 'd' is NULL.\n");
233 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, " Input array 'd' is NULL.");
234 }
235
236 PetscPrintf(PETSC_COMM_SELF, " Face Distances:\n");
237 PetscPrintf(PETSC_COMM_SELF, " LEFT(%d): %.15f\n", LEFT, d[LEFT]);
238 PetscPrintf(PETSC_COMM_SELF, " RIGHT(%d): %.15f\n", RIGHT, d[RIGHT]);
239 PetscPrintf(PETSC_COMM_SELF, " BOTTOM(%d): %.15f\n", BOTTOM, d[BOTTOM]);
240 PetscPrintf(PETSC_COMM_SELF, " TOP(%d): %.15f\n", TOP, d[TOP]);
241 PetscPrintf(PETSC_COMM_SELF, " FRONT(%d): %.15f\n", FRONT, d[FRONT]);
242 PetscPrintf(PETSC_COMM_SELF, " BACK(%d): %.15f\n", BACK, d[BACK]);
243
244 return 0; // Indicate successful execution
245}
@ TOP
Definition variables.h:145
@ FRONT
Definition variables.h:145
@ BOTTOM
Definition variables.h:145
@ BACK
Definition variables.h:145
@ LEFT
Definition variables.h:145
@ RIGHT
Definition variables.h:145
Here is the caller graph for this function:

◆ LOG_PARTICLE_FIELDS()

PetscErrorCode LOG_PARTICLE_FIELDS ( UserCtx user,
PetscInt  printInterval 
)

Prints particle fields in a table that automatically adjusts its column widths.

This function retrieves data from the particle swarm and prints a table where the width of each column is determined by the maximum width needed to display the data. Only every 'printInterval'-th particle is printed.

Parameters
[in]userPointer to the UserCtx structure.
[in]printIntervalOnly every printInterval‑th particle is printed.
Returns
PetscErrorCode Returns 0 on success.

Definition at line 400 of file logging.c.

401{
402 DM swarm = user->swarm;
403 PetscErrorCode ierr;
404 PetscInt localNumParticles;
405 PetscReal *positions = NULL;
406 PetscInt64 *particleIDs = NULL;
407 PetscMPIInt *particleRanks = NULL;
408 PetscInt *cellIDs = NULL;
409 PetscReal *weights = NULL;
410 PetscReal *velocities = NULL;
411 PetscMPIInt rank;
412
413 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
414 LOG_ALLOW(LOCAL,LOG_INFO, "Rank %d is retrieving particle data.\n", rank);
415
416 ierr = DMSwarmGetLocalSize(swarm, &localNumParticles); CHKERRQ(ierr);
417 LOG_ALLOW(LOCAL,LOG_DEBUG,"Rank %d has %d particles.\n", rank, localNumParticles);
418
419 ierr = DMSwarmGetField(swarm, "position", NULL, NULL, (void**)&positions); CHKERRQ(ierr);
420 ierr = DMSwarmGetField(swarm, "DMSwarm_pid", NULL, NULL, (void**)&particleIDs); CHKERRQ(ierr);
421 ierr = DMSwarmGetField(swarm, "DMSwarm_rank", NULL, NULL, (void**)&particleRanks); CHKERRQ(ierr);
422 ierr = DMSwarmGetField(swarm, "DMSwarm_CellID", NULL, NULL, (void**)&cellIDs); CHKERRQ(ierr);
423 ierr = DMSwarmGetField(swarm, "weight", NULL, NULL, (void**)&weights); CHKERRQ(ierr);
424 ierr = DMSwarmGetField(swarm, "velocity", NULL, NULL, (void**)&velocities); CHKERRQ(ierr);
425
426 /* Compute maximum column widths. */
427 int wRank, wPID, wCell, wPos, wVel, wWt;
428 wRank = wPID = wCell = wPos = wVel = wWt = 0;
429 ierr = ComputeMaxColumnWidths(localNumParticles, particleRanks, particleIDs, cellIDs,
430 positions, velocities, weights,
431 &wRank, &wPID, &wCell, &wPos, &wVel, &wWt); CHKERRQ(ierr);
432
433 /* Build a header string and a row format string. */
434 char headerFmt[256];
435 char rowFmt[256];
436 BuildHeaderString(headerFmt, sizeof(headerFmt), wRank, wPID, wCell, wPos, wVel, wWt);
437 BuildRowFormatString(wRank, wPID, wCell, wPos, wVel, wWt, rowFmt, sizeof(rowFmt));
438
439 /* Print header (using synchronized printing for parallel output). */
440 ierr = PetscSynchronizedPrintf(PETSC_COMM_WORLD, "--------------------------------------------------------------------------------------------------------------\n"); CHKERRQ(ierr);
441 ierr = PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s", headerFmt); CHKERRQ(ierr);
442 ierr = PetscSynchronizedPrintf(PETSC_COMM_WORLD, "--------------------------------------------------------------------------------------------------------------\n"); CHKERRQ(ierr);
443
444 /* Loop over particles and print every printInterval-th row. */
445 char rowStr[256];
446 for (PetscInt i = 0; i < localNumParticles; i++) {
447 if (i % printInterval == 0) {
448 // ------- DEBUG
449 //char cellStr[TMP_BUF_SIZE], posStr[TMP_BUF_SIZE], velStr[TMP_BUF_SIZE], wtStr[TMP_BUF_SIZE];
450 //CellToStr(&cellIDs[3*i], cellStr, TMP_BUF_SIZE);
451 //TripleRealToStr(&positions[3*i], posStr, TMP_BUF_SIZE);
452 //TripleRealToStr(&velocities[3*i], velStr, TMP_BUF_SIZE);
453 // TripleRealToStr(&weights[3*i], wtStr, TMP_BUF_SIZE);
454
455 // if (rank == 0) { // Or whatever rank is Rank 0
456 //PetscPrintf(PETSC_COMM_SELF, "[Rank 0 DEBUG LPF] Particle %lld: PID=%lld, Rank=%d\n", (long long)i, (long long)particleIDs[i], particleRanks[i]);
457 //PetscPrintf(PETSC_COMM_SELF, "[Rank 0 DEBUG LPF] Raw Pos: (%.10e, %.10e, %.10e)\n", positions[3*i+0], positions[3*i+1], positions[3*i+2]);
458 //PetscPrintf(PETSC_COMM_SELF, "[Rank 0 DEBUG LPF] Str Pos: %s\n", posStr);
459 //PetscPrintf(PETSC_COMM_SELF, "[Rank 0 DEBUG LPF] Raw Vel: (%.10e, %.10e, %.10e)\n", velocities[3*i+0], velocities[3*i+1], velocities[3*i+2]);
460 // PetscPrintf(PETSC_COMM_SELF, "[Rank 0 DEBUG LPF] Str Vel: %s\n", velStr);
461 // Add similar for cell, weights
462 // PetscPrintf(PETSC_COMM_SELF, "[Rank 0 DEBUG LPF] About to build rowStr for particle %lld\n", (long long)i);
463 // fflush(stdout);
464 // }
465
466 // snprintf(rowStr, sizeof(rowStr), rowFmt,
467 // particleRanks[i],
468 // particleIDs[i],
469 // cellStr,
470 // posStr,
471 // velStr,
472 // wtStr);
473
474
475 // ierr = PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s", rowStr); CHKERRQ(ierr);
476
477 // ierr = PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s", rowStr); CHKERRQ(ierr);
478
479 // -------- DEBUG
480 /* Format the row by converting each field to a string first.
481 * We use temporary buffers and then build the row string.
482 */
483
484 char cellStr[TMP_BUF_SIZE], posStr[TMP_BUF_SIZE], velStr[TMP_BUF_SIZE], wtStr[TMP_BUF_SIZE];
485 CellToStr(&cellIDs[3*i], cellStr, TMP_BUF_SIZE);
486 TripleRealToStr(&positions[3*i], posStr, TMP_BUF_SIZE);
487 TripleRealToStr(&velocities[3*i], velStr, TMP_BUF_SIZE);
488 TripleRealToStr(&weights[3*i], wtStr, TMP_BUF_SIZE);
489
490 /* Build the row string. Note that for the integer fields we can use the row format string. */
491 snprintf(rowStr, sizeof(rowStr), rowFmt,
492 particleRanks[i],
493 particleIDs[i],
494 cellStr,
495 posStr,
496 velStr,
497 wtStr);
498 ierr = PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s", rowStr); CHKERRQ(ierr);
499 }
500 }
501
502
503 ierr = PetscSynchronizedPrintf(PETSC_COMM_WORLD, "--------------------------------------------------------------------------------------------------------------\n"); CHKERRQ(ierr);
504 ierr = PetscSynchronizedPrintf(PETSC_COMM_WORLD, "\n"); CHKERRQ(ierr);
505 ierr = PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT); CHKERRQ(ierr);
506
507 LOG_ALLOW_SYNC(GLOBAL,LOG_DEBUG,"Completed printing on Rank %d.\n", rank);
508
509 /* Restore fields */
510 ierr = DMSwarmRestoreField(swarm, "position", NULL, NULL, (void**)&positions); CHKERRQ(ierr);
511 ierr = DMSwarmRestoreField(swarm, "DMSwarm_pid", NULL, NULL, (void**)&particleIDs); CHKERRQ(ierr);
512 ierr = DMSwarmRestoreField(swarm, "DMSwarm_rank", NULL, NULL, (void**)&particleRanks); CHKERRQ(ierr);
513 ierr = DMSwarmRestoreField(swarm, "DMSwarm_CellID", NULL, NULL, (void**)&cellIDs); CHKERRQ(ierr);
514 ierr = DMSwarmRestoreField(swarm, "weight", NULL, NULL, (void**)&weights); CHKERRQ(ierr);
515 ierr = DMSwarmRestoreField(swarm, "velocity", NULL, NULL, (void**)&velocities); CHKERRQ(ierr);
516
517 LOG_ALLOW(LOCAL,LOG_DEBUG, "Restored all particle fields.\n");
518 return 0;
519}
#define TMP_BUF_SIZE
Definition logging.c:5
static void BuildRowFormatString(PetscMPIInt wRank, PetscInt wPID, PetscInt wCell, PetscInt wPos, PetscInt wVel, PetscInt wWt, char *fmtStr, size_t bufSize)
Definition logging.c:363
static void BuildHeaderString(char *headerStr, size_t bufSize, PetscMPIInt wRank, PetscInt wPID, PetscInt wCell, PetscInt wPos, PetscInt wVel, PetscInt wWt)
Definition logging.c:376
static void CellToStr(const PetscInt *cell, char *buf, size_t bufsize)
Definition logging.c:266
static void TripleRealToStr(const PetscReal *arr, char *buf, size_t bufsize)
Definition logging.c:274
static PetscErrorCode ComputeMaxColumnWidths(PetscInt nParticles, const PetscMPIInt *ranks, const PetscInt64 *pids, const PetscInt *cellIDs, const PetscReal *positions, const PetscReal *velocities, const PetscReal *weights, int *wRank, int *wPID, int *wCell, int *wPos, int *wVel, int *wWt)
Definition logging.c:299
Here is the call graph for this function:
Here is the caller graph for this function:

◆ FreeAllowedFunctions()

PetscErrorCode FreeAllowedFunctions ( char **  funcs,
PetscInt  n 
)

Free an array previously returned by LoadAllowedFunctionsFromFile().

Parameters
[in,out]funcsArray of strings to release (may be NULL).
[in]nNumber of entries in funcs. Ignored if funcs is NULL.
Returns
0 on success or a PETSc error code.

Definition at line 625 of file logging.c.

626{
627 PetscErrorCode ierr;
628 PetscFunctionBegin;
629 if (funcs) {
630 for (PetscInt i = 0; i < n; ++i) {
631 ierr = PetscFree(funcs[i]); CHKERRQ(ierr);
632 }
633 ierr = PetscFree(funcs); CHKERRQ(ierr);
634 }
635 PetscFunctionReturn(0);
636}

◆ LoadAllowedFunctionsFromFile()

PetscErrorCode LoadAllowedFunctionsFromFile ( const char  filename[],
char ***  funcsOut,
PetscInt *  nOut 
)

Load function names from a text file.

The file is expected to contain one identifier per line. Blank lines and lines whose first non‑blank character is a # are silently skipped so the file can include comments. Example:

# Allowed function list
InitializeSimulation
InterpolateAllFieldsToSwarm # inline comments are OK, too
PetscErrorCode InterpolateAllFieldsToSwarm(UserCtx *user)
Interpolates all relevant fields from the DMDA to the DMSwarm.
int main(int argc, char **argv)
Definition picsolver.c:24

The routine allocates memory as needed (growing an internal buffer with PetscRealloc()) and returns the resulting array and its length to the caller. Use FreeAllowedFunctions() to clean up when done.

Parameters
[in]filenamePath of the configuration file to read.
[out]funcsOutOn success, points to a freshly‑allocated array of char* (size nOut).
[out]nOutNumber of valid entries in funcsOut.
Returns
0 on success, or a PETSc error code on failure (e.g. I/O error, OOM).

Definition at line 566 of file logging.c.

569{
570 FILE *fp = NULL;
571 char **funcs = NULL;
572 size_t cap = 16; /* initial capacity */
573 size_t n = 0; /* number of names */
574 char line[PETSC_MAX_PATH_LEN];
575 PetscErrorCode ierr;
576
577 PetscFunctionBegin;
578
579 /* ---------------------------------------------------------------------- */
580 /* 1. Open file */
581 fp = fopen(filename, "r");
582 if (!fp) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_OPEN,
583 "Cannot open %s", filename);
584
585 /* 2. Allocate initial pointer array */
586 ierr = PetscMalloc1(cap, &funcs); CHKERRQ(ierr);
587
588 /* 3. Read file line by line */
589 while (fgets(line, sizeof line, fp)) {
590 /* Strip everything after a comment character '#'. */
591 char *hash = strchr(line, '#');
592 if (hash) *hash = '\0';
593
594 trim(line); /* remove leading/trailing blanks */
595 if (!*line) continue; /* skip if empty */
596
597 /* Grow the array if necessary */
598 if (n == cap) {
599 cap *= 2;
600 ierr = PetscRealloc(cap * sizeof(*funcs), (void **)&funcs); CHKERRQ(ierr);
601 }
602
603 /* Deep‑copy the cleaned identifier */
604 ierr = PetscStrallocpy(line, &funcs[n++]); CHKERRQ(ierr);
605 }
606 fclose(fp);
607
608 /* 4. Return results to caller */
609 *funcsOut = funcs;
610 *nOut = (PetscInt)n;
611
612 PetscFunctionReturn(0);
613}
static void trim(char *s)
Definition logging.c:522
Here is the call graph for this function:
Here is the caller graph for this function:

◆ BCFaceToString()

const char * BCFaceToString ( BCFace  face)

Helper function to convert BCFace enum to a string representation.

Parameters
[in]faceThe BCFace enum value.
Returns
Pointer to a constant string representing the face.

Definition at line 643 of file logging.c.

643 {
644 switch (face) {
645 case BC_FACE_NEG_X: return "-Xi (I-Min)";
646 case BC_FACE_POS_X: return "+Xi (I-Max)";
647 case BC_FACE_NEG_Y: return "-Eta (J-Min)";
648 case BC_FACE_POS_Y: return "+Eta (J-Max)";
649 case BC_FACE_NEG_Z: return "-Zeta (K-Min)";
650 case BC_FACE_POS_Z: return "+Zeta (K-Max)";
651 default: return "Unknown Face";
652 }
653}
@ BC_FACE_NEG_X
Definition variables.h:204
@ BC_FACE_POS_Z
Definition variables.h:206
@ BC_FACE_POS_Y
Definition variables.h:205
@ BC_FACE_NEG_Z
Definition variables.h:206
@ BC_FACE_POS_X
Definition variables.h:204
@ BC_FACE_NEG_Y
Definition variables.h:205
Here is the caller graph for this function:

◆ FieldInitializationToString()

const char * FieldInitializationToString ( PetscInt  FieldInitialization)

Helper function to convert FieldInitialization to a string representation.

Parameters
[in]PetscIntThe FieldInitialization value.
Returns
Pointer to a constant string representing the FieldInitialization.

Definition at line 660 of file logging.c.

661{
662 switch(FieldInitialization){
663 case 0: return "Zero";
664 case 1: return "Constant Normal velocity";
665 case 2: return "Poiseuille Normal velocity";
666 default: return "Unknown Field Initialization";
667 }
668}
Here is the caller graph for this function:

◆ ParticleInitializationToString()

const char * ParticleInitializationToString ( ParticleInitializationType  ParticleInitialization)

Helper function to convert ParticleInitialization to a string representation.

Parameters
[in]PetscIntThe ParticleInitialization value.
Returns
Pointer to a constant string representing the FieldInitialization.
Parameters
[in]ParticleInitializationTypeThe ParticleInitialization value.
Returns
Pointer to a constant string representing the ParticleInitialization.

Definition at line 675 of file logging.c.

676{
677 switch(ParticleInitialization){
678 case PARTICLE_INIT_SURFACE_RANDOM: return "Surface: Random";
679 case PARTICLE_INIT_VOLUME: return "Volume";
680 case PARTICLE_INIT_POINT_SOURCE: return "Point Source";
681 case PARTICLE_INIT_SURFACE_EDGES: return "Surface: At edges";
682 default: return "Unknown Particle Initialization";
683 }
684}
@ PARTICLE_INIT_SURFACE_RANDOM
Random placement on the inlet face.
Definition variables.h:466
@ PARTICLE_INIT_SURFACE_EDGES
Deterministic placement at inlet face edges.
Definition variables.h:469
@ PARTICLE_INIT_POINT_SOURCE
All particles at a fixed (psrc_x,psrc_y,psrc_z) — for validation.
Definition variables.h:468
@ PARTICLE_INIT_VOLUME
Random volumetric distribution across the domain.
Definition variables.h:467
Here is the caller graph for this function:

◆ LESModelToString()

const char * LESModelToString ( LESModelType  LESFlag)

Helper function to convert LES Flag to a string representation.

Parameters
[in]LESModelTypeThe LES flag value.
Returns
Pointer to a constant string representing the LES Flag.
Parameters
[in]LESModelTypeThe LES flag value.
Returns
Pointer to a constant string representing the LES Model Type.

Definition at line 691 of file logging.c.

692{
693 switch(LESFlag){
694 case NO_LES_MODEL: return "No LES";
695 case CONSTANT_SMAGORINSKY: return "Constant Smagorinsky";
696 case DYNAMIC_SMAGORINSKY: return "Dynamic Smagorinsky";
697 default: return "Unknown LES Flag";
698 }
699}
@ DYNAMIC_SMAGORINSKY
Definition variables.h:450
@ NO_LES_MODEL
Definition variables.h:448
@ CONSTANT_SMAGORINSKY
Definition variables.h:449
Here is the caller graph for this function:

◆ MomentumSolverTypeToString()

const char * MomentumSolverTypeToString ( MomentumSolverType  SolverFlag)

Helper function to convert Momentum Solver flag to a string representation.

Parameters
[in]MomentumSolverTypeThe Momentum Solver flag value.
Returns
Pointer to a constant string representing the MomentumSolverType.

Definition at line 706 of file logging.c.

707{
708 switch(SolverFlag){
709 case MOMENTUM_SOLVER_EXPLICIT_RK: return "Explicit 4 stage Runge-Kutta ";
710 case MOMENTUM_SOLVER_DUALTIME_PICARD_RK4: return "Dual Time Stepping with Picard Iterations and 4 stage Runge-Kutta Smoothing";
711 case MOMENTUM_SOLVER_DUALTIME_NK_ANALYTIC_JACOBIAN: return "Dual Time Stepping with Newton-Krylov Iterations and Analytic Jacobian";
712 case MOMENTUM_SOLVER_DUALTIME_NK_ARNOLDI: return "Dual Time Stepping with Newton-Krylov Arnoldi Iteration Method.";
713 default: return "Unknown Momentum Solver Type";
714 }
715}
@ MOMENTUM_SOLVER_DUALTIME_NK_ARNOLDI
Definition variables.h:460
@ MOMENTUM_SOLVER_EXPLICIT_RK
Definition variables.h:458
@ MOMENTUM_SOLVER_DUALTIME_PICARD_RK4
Definition variables.h:459
@ MOMENTUM_SOLVER_DUALTIME_NK_ANALYTIC_JACOBIAN
Definition variables.h:461
Here is the caller graph for this function:

◆ BCTypeToString()

const char * BCTypeToString ( BCType  type)

Helper function to convert BCType enum to a string representation.

Parameters
[in]typeThe BCType enum value.
Returns
Pointer to a constant string representing the BC type.

Definition at line 722 of file logging.c.

722 {
723 switch (type) {
724 // case DIRICHLET: return "DIRICHLET";
725 // case NEUMANN: return "NEUMANN";
726 case WALL: return "WALL";
727 case INLET: return "INLET";
728 case OUTLET: return "OUTLET";
729 case FARFIELD: return "FARFIELD";
730 case PERIODIC: return "PERIODIC";
731 case INTERFACE: return "INTERFACE";
732
733 // case CUSTOM: return "CUSTOM";
734 default: return "Unknown BC Type";
735 }
736}
@ INLET
Definition variables.h:217
@ INTERFACE
Definition variables.h:212
@ FARFIELD
Definition variables.h:218
@ OUTLET
Definition variables.h:216
@ PERIODIC
Definition variables.h:219
@ WALL
Definition variables.h:213
Here is the caller graph for this function:

◆ BCHandlerTypeToString()

const char * BCHandlerTypeToString ( BCHandlerType  handler_type)

Converts a BCHandlerType enum to its string representation.

Provides a descriptive string for a specific boundary condition implementation strategy. This is crucial for logging the exact behavior configured for a face.

Parameters
handler_typeThe BCHandlerType enum value (e.g., BC_HANDLER_WALL_NOSLIP).
Returns
A constant character string corresponding to the enum. Returns "UNKNOWN_HANDLER" if the enum value is not recognized.

Definition at line 748 of file logging.c.

748 {
749 switch (handler_type) {
750 // Wall & Symmetry Handlers
751 case BC_HANDLER_WALL_NOSLIP: return "noslip";
752 case BC_HANDLER_WALL_MOVING: return "moving";
753 case BC_HANDLER_SYMMETRY_PLANE: return "symmetry_plane";
754
755 // Inlet Handlers
756 case BC_HANDLER_INLET_CONSTANT_VELOCITY: return "constant_velocity";
757 case BC_HANDLER_INLET_PULSATILE_FLUX: return "pulsatile_flux";
758 case BC_HANDLER_INLET_PARABOLIC: return "parabolic";
759
760 // Outlet Handlers
761 case BC_HANDLER_OUTLET_CONSERVATION: return "conservation";
762 case BC_HANDLER_OUTLET_PRESSURE: return "pressure";
763
764 // Other Physical Handlers
765 case BC_HANDLER_FARFIELD_NONREFLECTING: return "nonreflecting";
766
767 // Multi-Block / Interface Handlers
768 case BC_HANDLER_PERIODIC_GEOMETRIC: return "geometric";
769 case BC_HANDLER_PERIODIC_DRIVEN_CONSTANT_FLUX: return "constant flux";
770 case BC_HANDLER_PERIODIC_DRIVEN_INITIAL_FLUX: return "initial flux";
771 case BC_HANDLER_INTERFACE_OVERSET: return "overset";
772
773 // Default case
775 default: return "UNKNOWN_HANDLER";
776 }
777}
@ BC_HANDLER_INLET_PULSATILE_FLUX
Definition variables.h:239
@ BC_HANDLER_PERIODIC_GEOMETRIC
Definition variables.h:243
@ BC_HANDLER_INLET_PARABOLIC
Definition variables.h:236
@ BC_HANDLER_INLET_CONSTANT_VELOCITY
Definition variables.h:235
@ BC_HANDLER_PERIODIC_DRIVEN_INITIAL_FLUX
Definition variables.h:246
@ BC_HANDLER_INTERFACE_OVERSET
Definition variables.h:244
@ BC_HANDLER_PERIODIC_DRIVEN_CONSTANT_FLUX
Definition variables.h:245
@ BC_HANDLER_WALL_MOVING
Definition variables.h:233
@ BC_HANDLER_WALL_NOSLIP
Definition variables.h:232
@ BC_HANDLER_OUTLET_CONSERVATION
Definition variables.h:241
@ BC_HANDLER_FARFIELD_NONREFLECTING
Definition variables.h:240
@ BC_HANDLER_OUTLET_PRESSURE
Definition variables.h:242
@ BC_HANDLER_SYMMETRY_PLANE
Definition variables.h:234
@ BC_HANDLER_UNDEFINED
Definition variables.h:231
Here is the caller graph for this function:

◆ DualKSPMonitor()

PetscErrorCode DualKSPMonitor ( KSP  ksp,
PetscInt  it,
PetscReal  rnorm,
void *  ctx 
)

A custom KSP monitor that logs to a file and optionally to the console.

This function unconditionally calls the standard true residual monitor to log to a file viewer provided in the context. It also checks a flag in the context and, if true, calls the monitor again to log to standard output.

Parameters
kspThe Krylov subspace context.
itThe current iteration number.
rnormThe preconditioned residual norm.
ctxA pointer to the DualMonitorCtx structure.
Returns
PetscErrorCode 0 on success.

Definition at line 819 of file logging.c.

820{
821 DualMonitorCtx *monctx = (DualMonitorCtx*)ctx;
822 PetscErrorCode ierr;
823 PetscReal trnorm, relnorm;
824 Vec r;
825 char norm_buf[256];
826 PetscMPIInt rank;
827
828 PetscFunctionBeginUser;
829 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank); CHKERRQ(ierr);
830
831 // 1. Calculate the true residual norm.
832 ierr = KSPBuildResidual(ksp, NULL, NULL, &r); CHKERRQ(ierr);
833 ierr = VecNorm(r, NORM_2, &trnorm); CHKERRQ(ierr);
834
835 // 2. On the first iteration, compute and store the norm of the RHS vector `b`.
836 if (it == 0) {
837 Vec b;
838 ierr = KSPGetRhs(ksp, &b); CHKERRQ(ierr);
839 ierr = VecNorm(b, NORM_2, &monctx->bnorm); CHKERRQ(ierr);
840 }
841
842 if(!rank){
843 // 3. Compute the relative norm and format the output string.
844 if (monctx->bnorm > 1.e-15) {
845 relnorm = trnorm / monctx->bnorm;
846 sprintf(norm_buf, "ts: %-5d | block: %-2d | iter: %-3d | Unprecond Norm: %12.5e | True Norm: %12.5e | Rel Norm: %12.5e",(int)monctx->step, (int)monctx->block_id, (int)it, (double)rnorm, (double)trnorm, (double)relnorm);
847 } else {
848 sprintf(norm_buf,"ts: %-5d | block: %-2d | iter: %-3d | Unprecond Norm: %12.5e | True Norm: %12.5e",(int)monctx->step, (int)monctx->block_id, (int)it, (double)rnorm, (double)trnorm);
849 }
850
851 // 4. Log to the file viewer (unconditionally).
852 if(monctx->file_handle){
853 ierr = PetscFPrintf(PETSC_COMM_SELF,monctx->file_handle,"%s\n", norm_buf); CHKERRQ(ierr);
854 }
855 // 5. Log to the console (conditionally).
856 if (monctx->log_to_console) {
857 PetscFPrintf(PETSC_COMM_SELF,stdout, "%s\n", norm_buf); CHKERRQ(ierr);
858 }
859
860 } //rank
861
862 PetscFunctionReturn(0);
863}
PetscBool log_to_console
Definition logging.h:58
PetscReal bnorm
Definition logging.h:59
PetscInt step
Definition logging.h:60
FILE * file_handle
Definition logging.h:57
PetscInt block_id
Definition logging.h:61
Context for a dual-purpose KSP monitor.
Definition logging.h:56
Here is the caller graph for this function:

◆ DualMonitorDestroy()

PetscErrorCode DualMonitorDestroy ( void **  ctx)

Destroys the DualMonitorCtx.

This function is passed to KSPMonitorSet to ensure the viewer is properly destroyed and the context memory is freed when the KSP is destroyed.

Parameters
Ctxa pointer to the context pointer to be destroyed
Returns
PetscErrorCode

Definition at line 787 of file logging.c.

788{
789 DualMonitorCtx *monctx = (DualMonitorCtx*)*ctx;
790 PetscErrorCode ierr;
791 PetscMPIInt rank;
792
793 PetscFunctionBeginUser;
794 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank); CHKERRQ(ierr);
795 if(!rank && monctx->file_handle){
796 fclose(monctx->file_handle);
797 }
798
799 ierr = PetscFree(monctx); CHKERRQ(ierr);
800 *ctx = NULL;
801 PetscFunctionReturn(0);
802}
Here is the caller graph for this function:

◆ LOG_CONTINUITY_METRICS()

PetscErrorCode LOG_CONTINUITY_METRICS ( UserCtx user)

Logs continuity metrics for a single block to a file.

This function should be called for each block, once per timestep. It opens a central log file in append mode. To ensure the header is written only once, it checks if it is processing block 0 on the simulation's start step.

Parameters
userA pointer to the UserCtx for the specific block whose metrics are to be logged. The function accesses both global (SimCtx) and local (user->...) data.
Returns
PetscErrorCode 0 on success.

Definition at line 879 of file logging.c.

880{
881 PetscErrorCode ierr;
882 PetscMPIInt rank;
883 SimCtx *simCtx = user->simCtx; // Get the shared SimCtx
884 const PetscInt bi = user->_this; // Get this block's specific ID
885 const PetscInt ti = simCtx->step; // Get the current timestep
886
887 PetscFunctionBeginUser;
888 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
889
890 // Only rank 0 performs file I/O.
891 if (!rank) {
892 FILE *f;
893 char filen[128];
894 sprintf(filen, "%s/Continuity_Metrics.log",simCtx->log_dir);
895
896 // Open the log file in append mode.
897 f = fopen(filen, "a");
898 if (!f) {
899 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_OPEN, "Cannot open log file: %s", filen);
900 }
901
902 // Write a header only for the very first block (bi=0) on the very
903 // first timestep (ti=StartStep + 1). This ensures it's written only once.
904 if (ti == simCtx->StartStep + 1 && bi == 0) {
905 PetscFPrintf(PETSC_COMM_SELF, f, "%-10s | %-6s | %-18s | %-30s | %-18s | %-18s | %-18s | %-18s\n",
906 "Timestep", "Block", "Max Divergence", "Max Divergence Location ([k][j][i]=idx)", "Sum(RHS)","Total Flux In", "Total Flux Out", "Net Flux");
907 PetscFPrintf(PETSC_COMM_SELF, f, "------------------------------------------------------------------------------------------------------------------------------------------\n");
908 }
909
910 // Prepare the data strings and values for the current block.
911 PetscReal net_flux = simCtx->FluxInSum - simCtx->FluxOutSum;
912 char location_str[64];
913 sprintf(location_str, "([%d][%d][%d] = %d)", (int)simCtx->MaxDivz, (int)simCtx->MaxDivy, (int)simCtx->MaxDivx, (int)simCtx->MaxDivFlatArg);
914
915 // Write the formatted line for the current block.
916 PetscFPrintf(PETSC_COMM_SELF, f, "%-10d | %-6d | %-18.10e | %-39s | %-18.10e | %-18.10e | %-18.10e | %-18.10e\n",
917 (int)ti,
918 (int)bi,
919 (double)simCtx->MaxDiv,
920 location_str,
921 (double)simCtx->summationRHS,
922 (double)simCtx->FluxInSum,
923 (double)simCtx->FluxOutSum,
924 (double)net_flux);
925
926 fclose(f);
927 }
928
929 PetscFunctionReturn(0);
930}
SimCtx * simCtx
Back-pointer to the master simulation context.
Definition variables.h:731
PetscReal FluxOutSum
Definition variables.h:658
PetscInt _this
Definition variables.h:741
PetscInt StartStep
Definition variables.h:595
PetscReal MaxDiv
Definition variables.h:705
PetscInt MaxDivx
Definition variables.h:706
PetscInt MaxDivy
Definition variables.h:706
PetscInt MaxDivz
Definition variables.h:706
char log_dir[PETSC_MAX_PATH_LEN]
Definition variables.h:609
PetscInt MaxDivFlatArg
Definition variables.h:706
PetscReal FluxInSum
Definition variables.h:658
PetscInt step
Definition variables.h:593
PetscReal summationRHS
Definition variables.h:704
The master context for the entire simulation.
Definition variables.h:585
Here is the caller graph for this function:

◆ ParticleLocationStatusToString()

const char * ParticleLocationStatusToString ( ParticleLocationStatus  level)

A function that outputs the name of the current level in the ParticleLocation enum.

Parameters
levelThe ParticleLocation enum value.
Returns
A constant character string corresponding to the enum. Returns "UNKNOWN_LEVEL" if the enum value is not recognized.

Definition at line 938 of file logging.c.

939{
940 switch (level) {
941 case NEEDS_LOCATION: return "NEEDS_LOCATION";
942 case ACTIVE_AND_LOCATED: return "ACTIVE_AND_LOCATED";
943 case MIGRATING_OUT: return "MIGRATING_OUT";
944 case LOST: return "LOST";
945 case UNINITIALIZED: return "UNINITIALIZED";
946 default: return "UNKNOWN_LEVEL";
947 }
948}
@ LOST
Definition variables.h:139
@ NEEDS_LOCATION
Definition variables.h:136
@ ACTIVE_AND_LOCATED
Definition variables.h:137
@ UNINITIALIZED
Definition variables.h:140
@ MIGRATING_OUT
Definition variables.h:138
Here is the caller graph for this function:

◆ PrintProgressBar()

void PrintProgressBar ( PetscInt  step,
PetscInt  startStep,
PetscInt  totalSteps,
PetscReal  currentTime 
)

Prints a progress bar to the console.

This function should only be called by the root process (rank 0). It uses a carriage return \r to overwrite the same line in the terminal, creating a dynamic progress bar.

Parameters
stepThe current step index from the loop (e.g., from 0 to N-1).
startStepThe global starting step number of the simulation.
totalStepsThe total number of steps to be run in this simulation instance.
currentTimeThe current simulation time to display.

Definition at line 1227 of file logging.c.

1228{
1229 if (totalSteps <= 0) return;
1230
1231 // --- Configuration ---
1232 const int barWidth = 50;
1233
1234 // --- Calculation ---
1235 // Calculate progress as a fraction from 0.0 to 1.0
1236 PetscReal progress = (PetscReal)(step - startStep + 1) / totalSteps;
1237 // Ensure progress doesn't exceed 1.0 due to floating point inaccuracies
1238 if (progress > 1.0) progress = 1.0;
1239
1240 int pos = (int)(barWidth * progress);
1241
1242 // --- Printing ---
1243 // Carriage return moves cursor to the beginning of the line
1244 PetscPrintf(PETSC_COMM_SELF, "\rProgress: [");
1245
1246 for (int i = 0; i < barWidth; ++i) {
1247 if (i < pos) {
1248 PetscPrintf(PETSC_COMM_SELF, "=");
1249 } else if (i == pos) {
1250 PetscPrintf(PETSC_COMM_SELF, ">");
1251 } else {
1252 PetscPrintf(PETSC_COMM_SELF, " ");
1253 }
1254 }
1255
1256 // Print percentage, step count, and current time
1257 PetscPrintf(PETSC_COMM_SELF, "] %3d%% (Step %ld/%ld, t=%.4f)",
1258 (int)(progress * 100.0),
1259 step + 1,
1260 startStep + totalSteps,
1261 currentTime);
1262
1263 // Flush the output buffer to ensure the bar is displayed immediately
1264 fflush(stdout);
1265}
Here is the caller graph for this function:

◆ ProfilingInitialize()

PetscErrorCode ProfilingInitialize ( SimCtx simCtx)

Initializes the custom profiling system using configuration from SimCtx.

This function sets up the internal data structures for tracking function performance. It reads the list of "critical functions" from the provided SimCtx and marks them for per-step logging at LOG_INFO level.

It should be called once at the beginning of the application, after CreateSimulationContext() but before the main time loop.

Parameters
simCtxThe master simulation context, which contains the list of critical function names to always log.
Returns
PetscErrorCode

Definition at line 1015 of file logging.c.

1016{
1017 PetscFunctionBeginUser;
1018 if (!simCtx) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "SimCtx cannot be null for ProfilingInitialize");
1019
1020 // Iterate through the list of critical functions provided in SimCtx
1021 for (PetscInt i = 0; i < simCtx->nCriticalFuncs; ++i) {
1022 PetscInt idx;
1023 const char *func_name = simCtx->criticalFuncs[i];
1024 PetscErrorCode ierr = _FindOrCreateEntry(func_name, &idx); CHKERRQ(ierr);
1025 g_profiler_registry[idx].always_log = PETSC_TRUE;
1026
1027 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Marked '%s' as a critical function for profiling.\n", func_name);
1028 }
1029 PetscFunctionReturn(0);
1030}
PetscBool always_log
Definition logging.c:960
static PetscErrorCode _FindOrCreateEntry(const char *func_name, PetscInt *idx)
Definition logging.c:969
static ProfiledFunction * g_profiler_registry
Definition logging.c:964
char ** criticalFuncs
Definition variables.h:710
PetscInt nCriticalFuncs
Definition variables.h:711
Here is the call graph for this function:
Here is the caller graph for this function:

◆ ProfilingResetTimestepCounters()

PetscErrorCode ProfilingResetTimestepCounters ( void  )

Definition at line 1054 of file logging.c.

1055{
1056 PetscFunctionBeginUser;
1057 for (PetscInt i = 0; i < g_profiler_count; ++i) {
1060 }
1061 PetscFunctionReturn(0);
1062}
static PetscInt g_profiler_count
Definition logging.c:965
double current_step_time
Definition logging.c:956
long long current_step_call_count
Definition logging.c:958
Here is the caller graph for this function:

◆ ProfilingLogTimestepSummary()

PetscErrorCode ProfilingLogTimestepSummary ( PetscInt  step)

Logs the performance summary for the current timestep and resets timers.

Depending on the current log level, this function will print:

  • LOG_PROFILE: Timings for ALL functions called during the step.
  • LOG_INFO/LOG_DEBUG: Timings for only the "always log" functions.

It must be called once per timestep, typically at the end of the main loop. After logging, it resets the per-step counters and timers.

Parameters
stepThe current simulation step number, for logging context.
Returns
PetscErrorCode

Definition at line 1064 of file logging.c.

1065{
1066 LogLevel log_level = get_log_level();
1067 PetscBool should_print = PETSC_FALSE;
1068
1069 PetscFunctionBeginUser;
1070
1071 // Decide if we should print anything at all
1072 if (log_level >= LOG_PROFILE) {
1073 for (PetscInt i = 0; i < g_profiler_count; ++i) {
1074 if (g_profiler_registry[i].current_step_call_count > 0) {
1075 if (log_level == LOG_PROFILE || g_profiler_registry[i].always_log) {
1076 should_print = PETSC_TRUE;
1077 break;
1078 }
1079 }
1080 }
1081 }
1082
1083 if (should_print) {
1084 PetscPrintf(PETSC_COMM_SELF, "[PROFILE] ----- Timestep %d Summary -----\n", step);
1085 for (PetscInt i = 0; i < g_profiler_count; ++i) {
1086 if (g_profiler_registry[i].current_step_call_count > 0) {
1087 if (log_level == LOG_PROFILE || g_profiler_registry[i].always_log) {
1088 PetscPrintf(PETSC_COMM_SELF, "[PROFILE] %-25s: %.6f s (%lld calls)\n",
1089 g_profiler_registry[i].name,
1090 g_profiler_registry[i].current_step_time,
1091 g_profiler_registry[i].current_step_call_count);
1092 }
1093 }
1094 }
1095 }
1096
1097 // Reset per-step counters for the next iteration
1098 for (PetscInt i = 0; i < g_profiler_count; ++i) {
1101 }
1102 PetscFunctionReturn(0);
1103}
Here is the call graph for this function:
Here is the caller graph for this function:

◆ ProfilingFinalize()

PetscErrorCode ProfilingFinalize ( SimCtx simCtx)

the profiling excercise and build a profiling summary which is then printed to a log file.

Parameters
simCtxThe Simulation Context Structure that can contains all the data regarding the simulation.
Returns
PetscErrorCode 0 on success.

Definition at line 1123 of file logging.c.

1124{
1125 PetscInt rank = simCtx->rank;
1126 PetscFunctionBeginUser;
1127 if (!rank) {
1128
1129 const char exec_mode_modifier[MAX_FILENAME_LENGTH];
1130 if(simCtx->exec_mode == EXEC_MODE_SOLVER) strcpy(exec_mode_modifier,"Solver");
1131 else if(simCtx->exec_mode == EXEC_MODE_POSTPROCESSOR) strcpy(exec_mode_modifier,"PostProcessor");
1132 //--- Step 0: Create a file viewer for log file
1133 FILE *f;
1134 char filen[128];
1135 sprintf(filen, "%s/ProfilingSummary_%s.log",simCtx->log_dir,exec_mode_modifier);
1136
1137 // Open the log file in write mode.
1138 f = fopen(filen,"w");
1139 if(!f){
1140 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_OPEN,"Cannot Open log file: %s",filen);
1141 }
1142
1143 // --- Step 1: Sort the data for readability ---
1145
1146 // --- Step 2: Dynamically determine the width for the function name column ---
1147 PetscInt max_name_len = strlen("Function"); // Start with the header's length
1148 for (PetscInt i = 0; i < g_profiler_count; ++i) {
1149 if (g_profiler_registry[i].total_call_count > 0) {
1150 PetscInt len = strlen(g_profiler_registry[i].name);
1151 if (len > max_name_len) {
1152 max_name_len = len;
1153 }
1154 }
1155 }
1156 // Add a little padding
1157 max_name_len += 2;
1158
1159 // --- Step 3: Define fixed widths for numeric columns for consistent alignment ---
1160 const int time_width = 18;
1161 const int count_width = 15;
1162 const int avg_width = 22;
1163
1164 // --- Step 4: Print the formatted table ---
1165 PetscFPrintf(PETSC_COMM_SELF, f, "=================================================================================================================\n");
1166 PetscFPrintf(PETSC_COMM_SELF, f, " FINAL PROFILING SUMMARY (Sorted by Total Time)\n");
1167 PetscFPrintf(PETSC_COMM_SELF, f, "=================================================================================================================\n");
1168
1169 // Header Row
1170 PetscFPrintf(PETSC_COMM_SELF, f, "%-*s | %-*s | %-*s | %-*s\n",
1171 max_name_len, "Function",
1172 time_width, "Total Time (s)",
1173 count_width, "Call Count",
1174 avg_width, "Avg. Time/Call (ms)");
1175
1176 // Separator Line (dynamically sized)
1177 for (int i = 0; i < max_name_len; i++) PetscFPrintf(PETSC_COMM_SELF, f, "-");
1178 PetscFPrintf(PETSC_COMM_SELF, f, "-|-");
1179 for (int i = 0; i < time_width; i++) PetscFPrintf(PETSC_COMM_SELF, f, "-");
1180 PetscFPrintf(PETSC_COMM_SELF, f, "-|-");
1181 for (int i = 0; i < count_width; i++) PetscFPrintf(PETSC_COMM_SELF, f, "-");
1182 PetscFPrintf(PETSC_COMM_SELF, f, "-|-");
1183 for (int i = 0; i < avg_width; i++) PetscFPrintf(PETSC_COMM_SELF, f, "-");
1184 PetscFPrintf(PETSC_COMM_SELF, f, "\n");
1185
1186 // Data Rows
1187 for (PetscInt i = 0; i < g_profiler_count; ++i) {
1188 if (g_profiler_registry[i].total_call_count > 0) {
1189 double avg_time_ms = (g_profiler_registry[i].total_time / g_profiler_registry[i].total_call_count) * 1000.0;
1190 PetscFPrintf(PETSC_COMM_SELF, f, "%-*s | %*.*f | %*lld | %*.*f\n",
1191 max_name_len, g_profiler_registry[i].name,
1192 time_width, 6, g_profiler_registry[i].total_time,
1193 count_width, g_profiler_registry[i].total_call_count,
1194 avg_width, 6, avg_time_ms);
1195 PetscFPrintf(PETSC_COMM_SELF, f, "------------------------------------------------------------------------------------------------------------------\n");
1196 }
1197 }
1198 PetscFPrintf(PETSC_COMM_SELF, f, "==================================================================================================================\n");
1199
1200 fclose(f);
1201 }
1202
1203 // --- Final Cleanup ---
1204 PetscFree(g_profiler_registry);
1205 g_profiler_registry = NULL;
1206 g_profiler_count = 0;
1208 PetscFunctionReturn(0);
1209}
long long total_call_count
Definition logging.c:957
static PetscInt g_profiler_capacity
Definition logging.c:966
double total_time
Definition logging.c:955
static int _CompareProfiledFunctions(const void *a, const void *b)
Definition logging.c:1106
PetscMPIInt rank
Definition variables.h:588
#define MAX_FILENAME_LENGTH
Definition variables.h:491
@ EXEC_MODE_SOLVER
Definition variables.h:558
@ EXEC_MODE_POSTPROCESSOR
Definition variables.h:559
ExecutionMode exec_mode
Definition variables.h:603
Here is the call graph for this function:
Here is the caller graph for this function:

◆ _ProfilingStart()

void _ProfilingStart ( const char *  func_name)

Definition at line 1032 of file logging.c.

1033{
1034 PetscInt idx;
1035 if (_FindOrCreateEntry(func_name, &idx) != 0) return; // Fail silently
1036 PetscTime(&g_profiler_registry[idx].start_time);
1037}
Here is the call graph for this function:

◆ _ProfilingEnd()

void _ProfilingEnd ( const char *  func_name)

Definition at line 1039 of file logging.c.

1040{
1041 double end_time;
1042 PetscTime(&end_time);
1043
1044 PetscInt idx;
1045 if (_FindOrCreateEntry(func_name, &idx) != 0) return; // Fail silently
1046
1047 double elapsed = end_time - g_profiler_registry[idx].start_time;
1048 g_profiler_registry[idx].total_time += elapsed;
1049 g_profiler_registry[idx].current_step_time += elapsed;
1052}
double start_time
Definition logging.c:959
Here is the call graph for this function:

◆ LOG_FIELD_MIN_MAX()

PetscErrorCode LOG_FIELD_MIN_MAX ( UserCtx user,
const char *  fieldName 
)

Computes and logs the local and global min/max values of a 3-component vector field.

This utility function inspects a PETSc Vec associated with a DMDA and calculates the minimum and maximum values for each of its three components (e.g., x, y, z) both for the local data on the current MPI rank and for the entire global domain.

It uses the same "smart" logic as the flow solver, ignoring the padding nodes at the IM, JM, and KM boundaries of the grid. The results are printed to the standard output in a formatted, easy-to-read table.

Parameters
[in]userPointer to the user-defined context. Used for grid information (IM, JM, KM) and MPI rank.
[in]fieldNameA string descriptor for the field being analyzed (e.g., "Velocity", "Coordinates"). This is used for clear log output.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Computes and logs the local and global min/max values of a 3-component vector field.

This utility function inspects a PETSc Vec and calculates the minimum and maximum values for its components, both locally and globally.

It is "architecture-aware" and adjusts its iteration range to only include physically meaningful data points:

  • Cell-Centered Fields ("Ucat", "P"): It uses the "Shifted Index Architecture," iterating from index 1 to N-1 to exclude the ghost/tool values at indices 0 and N.
  • Node-Centered Fields ("Coordinates"): It iterates from index 0 to N-1, covering all physical nodes.
  • Face-Centered Fields ("Ucont"): It iterates from index 0 to N-1, covering all physical faces.

The results are printed to the standard output in a formatted, easy-to-read table.

Parameters
[in]userPointer to the user-defined context.
[in]fieldNameA string descriptor for the field being analyzed.
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Definition at line 1293 of file logging.c.

1294{
1295 PetscErrorCode ierr;
1296 PetscInt i, j, k;
1297 DMDALocalInfo info;
1298
1299 Vec fieldVec = NULL;
1300 DM dm = NULL;
1301 PetscInt dof;
1302 char data_layout[20];
1303
1304 PetscFunctionBeginUser;
1305
1306 // --- 1. Map string name to PETSc objects and determine data layout ---
1307 if (strcasecmp(fieldName, "Ucat") == 0) {
1308 fieldVec = user->Ucat; dm = user->fda; dof = 3; strcpy(data_layout, "Cell-Centered");
1309 } else if (strcasecmp(fieldName, "P") == 0) {
1310 fieldVec = user->P; dm = user->da; dof = 1; strcpy(data_layout, "Cell-Centered");
1311 } else if (strcasecmp(fieldName, "Diffusivity") == 0) {
1312 fieldVec = user->Diffusivity; dm = user->da; dof = 1; strcpy(data_layout, "Cell-Centered");
1313 } else if (strcasecmp(fieldName, "DiffusivityGradient") == 0) {
1314 fieldVec = user->DiffusivityGradient; dm = user->fda; dof = 3; strcpy(data_layout, "Cell-Centered");
1315 } else if (strcasecmp(fieldName, "Ucont") == 0) {
1316 fieldVec = user->lUcont; dm = user->fda; dof = 3; strcpy(data_layout, "Face-Centered");
1317 } else if (strcasecmp(fieldName, "Coordinates") == 0) {
1318 ierr = DMGetCoordinates(user->da, &fieldVec); CHKERRQ(ierr);
1319 dm = user->fda; dof = 3; strcpy(data_layout, "Node-Centered");
1320 } else if (strcasecmp(fieldName,"Psi") == 0) {
1321 fieldVec = user->Psi; dm = user->da; dof = 1; strcpy(data_layout, "Cell-Centered"); // Assuming Psi is cell-centered
1322 } else {
1323 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_UNKNOWN_TYPE, "Field %s not recognized.", fieldName);
1324 }
1325
1326 if (!fieldVec) {
1327 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Vector for field '%s' is NULL.", fieldName);
1328 }
1329 if (!dm) {
1330 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "DM for field '%s' is NULL.", fieldName);
1331 }
1332
1333 ierr = DMDAGetLocalInfo(dm, &info); CHKERRQ(ierr);
1334
1335 // --- 2. Define Architecture-Aware Loop Bounds ---
1336 PetscInt i_start, i_end, j_start, j_end, k_start, k_end;
1337
1338 if (strcmp(data_layout, "Cell-Centered") == 0) {
1339 // For cell-centered data, the physical values are stored from index 1 to N-1.
1340 // We find the intersection of the rank's owned range [xs, xe) with the
1341 // physical data range [1, IM-1).
1342 i_start = PetscMax(info.xs, 1); i_end = PetscMin(info.xs + info.xm, user->IM);
1343 j_start = PetscMax(info.ys, 1); j_end = PetscMin(info.ys + info.ym, user->JM);
1344 k_start = PetscMax(info.zs, 1); k_end = PetscMin(info.zs + info.zm, user->KM);
1345 } else { // For Node- or Face-Centered data
1346 // The physical values are stored from index 0 to N-1.
1347 // We find the intersection of the rank's owned range [xs, xe) with the
1348 // physical data range [0, IM-1].
1349 i_start = PetscMax(info.xs, 0); i_end = PetscMin(info.xs + info.xm, user->IM);
1350 j_start = PetscMax(info.ys, 0); j_end = PetscMin(info.ys + info.ym, user->JM);
1351 k_start = PetscMax(info.zs, 0); k_end = PetscMin(info.zs + info.zm, user->KM);
1352 }
1353
1354 // --- 3. Barrier for clean, grouped output ---
1355 ierr = MPI_Barrier(PETSC_COMM_WORLD); CHKERRQ(ierr);
1356 if (user->simCtx->rank == 0) {
1357 PetscPrintf(PETSC_COMM_SELF, "\n--- Field Ranges: [%s] (Layout: %s) ---\n", fieldName, data_layout);
1358 }
1359
1360 // --- 4. Branch on DoF and perform calculation with correct bounds ---
1361 if (dof == 1) {
1362 PetscReal localMin = PETSC_MAX_REAL, localMax = PETSC_MIN_REAL;
1363 PetscReal globalMin, globalMax;
1364 const PetscScalar ***array;
1365
1366 ierr = DMDAVecGetArrayRead(dm, fieldVec, &array); CHKERRQ(ierr);
1367 for (k = k_start; k < k_end; k++) {
1368 for (j = j_start; j < j_end; j++) {
1369 for (i = i_start; i < i_end; i++) {
1370 localMin = PetscMin(localMin, array[k][j][i]);
1371 localMax = PetscMax(localMax, array[k][j][i]);
1372 }
1373 }
1374 }
1375 ierr = DMDAVecRestoreArrayRead(dm, fieldVec, &array); CHKERRQ(ierr);
1376
1377 ierr = MPI_Allreduce(&localMin, &globalMin, 1, MPIU_REAL, MPI_MIN, PETSC_COMM_WORLD); CHKERRQ(ierr);
1378 ierr = MPI_Allreduce(&localMax, &globalMax, 1, MPIU_REAL, MPI_MAX, PETSC_COMM_WORLD); CHKERRQ(ierr);
1379
1380 PetscSynchronizedPrintf(PETSC_COMM_WORLD, " [Rank %d] Local Range: [ %11.4e , %11.4e ]\n", user->simCtx->rank, localMin, localMax);
1381 ierr = PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT); CHKERRQ(ierr);
1382 if (user->simCtx->rank == 0) {
1383 PetscPrintf(PETSC_COMM_SELF, " Global Range: [ %11.4e , %11.4e ]\n", globalMin, globalMax);
1384 }
1385
1386 } else if (dof == 3) {
1387 Cmpnts localMin = {PETSC_MAX_REAL, PETSC_MAX_REAL, PETSC_MAX_REAL};
1388 Cmpnts localMax = {PETSC_MIN_REAL, PETSC_MIN_REAL, PETSC_MIN_REAL};
1389 Cmpnts globalMin, globalMax;
1390 const Cmpnts ***array;
1391
1392 ierr = DMDAVecGetArrayRead(dm, fieldVec, &array); CHKERRQ(ierr);
1393 for (k = k_start; k < k_end; k++) {
1394 for (j = j_start; j < j_end; j++) {
1395 for (i = i_start; i < i_end; i++) {
1396 localMin.x = PetscMin(localMin.x, array[k][j][i].x);
1397 localMin.y = PetscMin(localMin.y, array[k][j][i].y);
1398 localMin.z = PetscMin(localMin.z, array[k][j][i].z);
1399 localMax.x = PetscMax(localMax.x, array[k][j][i].x);
1400 localMax.y = PetscMax(localMax.y, array[k][j][i].y);
1401 localMax.z = PetscMax(localMax.z, array[k][j][i].z);
1402 }
1403 }
1404 }
1405 ierr = DMDAVecRestoreArrayRead(dm, fieldVec, &array); CHKERRQ(ierr);
1406
1407 ierr = MPI_Allreduce(&localMin, &globalMin, 3, MPIU_REAL, MPI_MIN, PETSC_COMM_WORLD); CHKERRQ(ierr);
1408 ierr = MPI_Allreduce(&localMax, &globalMax, 3, MPIU_REAL, MPI_MAX, PETSC_COMM_WORLD); CHKERRQ(ierr);
1409
1410 ierr = PetscSynchronizedPrintf(PETSC_COMM_WORLD, " [Rank %d] Local X-Range: [ %11.4e , %11.4e ]\n", user->simCtx->rank, localMin.x, localMax.x);
1411 ierr = PetscSynchronizedPrintf(PETSC_COMM_WORLD, " [Rank %d] Local Y-Range: [ %11.4e , %11.4e ]\n", user->simCtx->rank, localMin.y, localMax.y);
1412 ierr = PetscSynchronizedPrintf(PETSC_COMM_WORLD, " [Rank %d] Local Z-Range: [ %11.4e , %11.4e ]\n", user->simCtx->rank, localMin.z, localMax.z);
1413 ierr = PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT); CHKERRQ(ierr);
1414
1415 if (user->simCtx->rank == 0) {
1416 PetscPrintf(PETSC_COMM_SELF, " [Global] X-Range: [ %11.4e , %11.4e ]\n", globalMin.x, globalMax.x);
1417 PetscPrintf(PETSC_COMM_SELF, " [Global] Y-Range: [ %11.4e , %11.4e ]\n", globalMin.y, globalMax.y);
1418 PetscPrintf(PETSC_COMM_SELF, " [Global] Z-Range: [ %11.4e , %11.4e ]\n", globalMin.z, globalMax.z);
1419 }
1420
1421 } else {
1422 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "LogFieldStatistics only supports fields with 1 or 3 components, but field '%s' has %D.", fieldName, dof);
1423 }
1424
1425 // --- 5. Final barrier for clean output ordering ---
1426 ierr = MPI_Barrier(PETSC_COMM_WORLD); CHKERRQ(ierr);
1427 if (user->simCtx->rank == 0) {
1428 PetscPrintf(PETSC_COMM_SELF, "--------------------------------------------\n\n");
1429 }
1430
1431 PetscFunctionReturn(0);
1432}
PetscInt KM
Definition variables.h:737
Vec DiffusivityGradient
Definition variables.h:759
Vec Ucat
Definition variables.h:755
PetscInt JM
Definition variables.h:737
Vec lUcont
Definition variables.h:755
Vec Diffusivity
Definition variables.h:758
PetscInt IM
Definition variables.h:737
Vec Psi
Definition variables.h:801
A 3D point or vector with PetscScalar components.
Definition variables.h:100
Here is the caller graph for this function:

◆ LOG_FIELD_ANATOMY()

PetscErrorCode LOG_FIELD_ANATOMY ( UserCtx user,
const char *  field_name,
const char *  stage_name 
)

Logs the anatomy of a specified field at key boundary locations, respecting the solver's specific grid and variable architecture.

This intelligent diagnostic function inspects a PETSc Vec and prints its values at critical boundary locations (-Xi/+Xi, -Eta/+Eta, -Zeta/+Zeta). It is "architecture-aware":

  • Cell-Centered Fields ("Ucat", "P"): It correctly applies the "Shifted Index Architecture," where the value for geometric Cell i is stored at array index i+1. It labels the output to clearly distinguish between true physical values and ghost values.
  • Face-Centered Fields ("Ucont"): It uses a direct index mapping, where the value for the face at Node i is stored at index i.
  • Node-Centered Fields ("Coordinates"): It uses a direct index mapping, where the value for Node i is stored at index i.

The output is synchronized across MPI ranks to ensure readability and focuses on a slice through the center of the domain to be concise.

Parameters
userA pointer to the UserCtx structure containing the DMs and Vecs.
field_nameA string identifier for the field to log (e.g., "Ucat", "P", "Ucont", "Coordinates").
stage_nameA string identifier for the current simulation stage (e.g., "After Advection").
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

This intelligent diagnostic function inspects a PETSc Vec and prints its values at critical boundary locations (-Xi/+Xi, -Eta/+Eta, -Zeta/+Zeta). It is "architecture-aware":

  • Cell-Centered Fields ("Ucat", "P"): Correctly applies the "Shifted Index Architecture," where the value for geometric Cell i is stored at array index i+1.
  • Face-Centered Fields ("Ucont", "Csi"): Recognizes the staggered nature. For example, an X-face-centered field is treated as node-centered in the I-direction but cell-centered in the J and K directions. For vector fields like "Ucont", each component is handled with its appropriate directional anatomy.
  • Node-Centered Fields ("Coordinates"): Uses a direct index mapping, where the value for Node i is stored at index i.

The output is synchronized across MPI ranks and focuses on a slice through the center of the domain to be concise.

Parameters
userA pointer to the UserCtx structure containing the DMs and Vecs.
field_nameA string identifier for the field to log (e.g., "Ucat", "P", "Ucont", "Coordinates").
stage_nameA string identifier for the current simulation stage (e.g., "After Advection").
Returns
PetscErrorCode Returns 0 on success, non-zero on failure.

Definition at line 1460 of file logging.c.

1461{
1462 PetscErrorCode ierr;
1463 DMDALocalInfo info;
1464 PetscMPIInt rank;
1465
1466 Vec vec_local = NULL;
1467 DM dm = NULL;
1468 PetscInt dof = 0;
1469 char data_layout[20];
1470 char dominant_dir = '\0'; // 'x', 'y', 'z' for face-centered, 'm' for mixed (Ucont)
1471
1472 PetscFunctionBeginUser;
1473 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
1474
1475 // --- 1. Map string name to PETSc objects and determine data layout ---
1476 if (strcasecmp(field_name, "Ucat") == 0) {
1477 vec_local = user->lUcat; dm = user->fda; dof = 3; strcpy(data_layout, "Cell-Centered");
1478 } else if (strcasecmp(field_name, "P") == 0) {
1479 vec_local = user->lP; dm = user->da; dof = 1; strcpy(data_layout, "Cell-Centered");
1480 } else if (strcasecmp(field_name, "Diffusivity") == 0) {
1481 vec_local = user->lDiffusivity; dm = user->da; dof = 1; strcpy(data_layout, "Cell-Centered");
1482 } else if (strcasecmp(field_name, "DiffusivityGradient") == 0) {
1483 vec_local = user->lDiffusivityGradient; dm = user->fda; dof = 3; strcpy(data_layout, "Cell-Centered");
1484 } else if (strcasecmp(field_name, "Psi") == 0) {
1485 vec_local = user->lPsi; dm = user->da; dof = 1; strcpy(data_layout, "Cell-Centered");
1486 } else if (strcasecmp(field_name, "Center-Coordinates") == 0) {
1487 vec_local = user->lCent; dm = user->fda; dof = 3; strcpy(data_layout, "Cell-Centered");
1488 } else if (strcasecmp(field_name, "Ucont") == 0) {
1489 vec_local = user->lUcont; dm = user->fda; dof = 3; strcpy(data_layout, "Face-Centered"); dominant_dir = 'm'; // Mixed
1490 } else if (strcasecmp(field_name, "Csi") == 0 || strcasecmp(field_name, "X-Face-Centers") == 0) {
1491 vec_local = (strcasecmp(field_name, "Csi") == 0) ? user->lCsi : user->Centx;
1492 dm = user->fda; dof = 3; strcpy(data_layout, "Face-Centered"); dominant_dir = 'x';
1493 } else if (strcasecmp(field_name, "Eta") == 0 || strcasecmp(field_name, "Y-Face-Centers") == 0) {
1494 vec_local = (strcasecmp(field_name, "Eta") == 0) ? user->lEta : user->Centy;
1495 dm = user->fda; dof = 3; strcpy(data_layout, "Face-Centered"); dominant_dir = 'y';
1496 } else if (strcasecmp(field_name, "Zet") == 0 || strcasecmp(field_name, "Z-Face-Centers") == 0) {
1497 vec_local = (strcasecmp(field_name, "Zet") == 0) ? user->lZet : user->Centz;
1498 dm = user->fda; dof = 3; strcpy(data_layout, "Face-Centered"); dominant_dir = 'z';
1499 } else if (strcasecmp(field_name, "Coordinates") == 0) {
1500 ierr = DMGetCoordinatesLocal(user->da, &vec_local); CHKERRQ(ierr);
1501 dm = user->fda; dof = 3; strcpy(data_layout, "Node-Centered");
1502 } else if (strcasecmp(field_name, "CornerField")== 0){
1503 vec_local = user->lCellFieldAtCorner; strcpy(data_layout, "Node-Centered");
1504 PetscInt bs = 1;
1505 ierr = VecGetBlockSize(user->CellFieldAtCorner, &bs); CHKERRQ(ierr);
1506 dof = bs;
1507 if(dof == 1) dm = user->da;
1508 else dm = user->fda;
1509 } else {
1510 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "Unknown field name for LOG_FIELD_ANATOMY: %s", field_name);
1511 }
1512
1513 // --- 2. Get Grid Info and Array Pointers ---
1514 ierr = DMDAGetLocalInfo(dm, &info); CHKERRQ(ierr);
1515
1516 ierr = PetscBarrier(NULL);
1517 PetscPrintf(PETSC_COMM_WORLD, "\n--- Field Anatomy Log: [%s] | Stage: [%s] | Layout: [%s] ---\n", field_name, stage_name, data_layout);
1518
1519 // Global physical dimensions (number of cells)
1520 PetscInt im_phys = user->IM;
1521 PetscInt jm_phys = user->JM;
1522 PetscInt km_phys = user->KM;
1523
1524 // Slice through the center of the local domain
1525 PetscInt i_mid = (PetscInt)(info.xs + 0.5 * info.xm) - 1;
1526 PetscInt j_mid = (PetscInt)(info.ys + 0.5 * info.ym) - 1;
1527 PetscInt k_mid = (PetscInt)(info.zs + 0.5 * info.zm) - 1;
1528
1529 // --- 3. Print Boundary Information based on Data Layout ---
1530
1531 // ======================================================================
1532 // === CASE 1: Cell-Centered Fields (Ucat, P) - USES SHIFTED INDEX ===
1533 // ======================================================================
1534 if (strcmp(data_layout, "Cell-Centered") == 0) {
1535 const void *l_arr;
1536 ierr = DMDAVecGetArrayRead(dm, vec_local, (void*)&l_arr); CHKERRQ(ierr);
1537
1538
1539 // --- I-Direction Boundaries ---
1540 if (info.xs == 0) { // Rank on -Xi boundary
1541 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, I-DIR]: Idx %2d (Ghost for Cell[k][j][0]) = ", rank, 0);
1542 if(dof==1) PetscSynchronizedPrintf(PETSC_COMM_WORLD, "(%.5f)\n", ((const PetscReal***)l_arr)[k_mid][j_mid][0]);
1543 else PetscSynchronizedPrintf(PETSC_COMM_WORLD, "(%.5f, %.5f, %.5f)\n", ((const Cmpnts***)l_arr)[k_mid][j_mid][0].x, ((const Cmpnts***)l_arr)[k_mid][j_mid][0].y, ((const Cmpnts***)l_arr)[k_mid][j_mid][0].z);
1544
1545 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, I-DIR]: Idx %2d (Value for Cell[k][j][0]) = ", rank, 1);
1546 if(dof==1) PetscSynchronizedPrintf(PETSC_COMM_WORLD, "(%.5f)\n", ((const PetscReal***)l_arr)[k_mid][j_mid][1]);
1547 else PetscSynchronizedPrintf(PETSC_COMM_WORLD, "(%.5f, %.5f, %.5f)\n", ((const Cmpnts***)l_arr)[k_mid][j_mid][1].x, ((const Cmpnts***)l_arr)[k_mid][j_mid][1].y, ((const Cmpnts***)l_arr)[k_mid][j_mid][1].z);
1548 }
1549 if (info.xs + info.xm == info.mx) { // Rank on +Xi boundary
1550 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, I-DIR]: Idx %2d (Value for Cell[k][j][%d]) = ", rank, im_phys - 1, im_phys - 2);
1551 if(dof==1) PetscSynchronizedPrintf(PETSC_COMM_WORLD, "(%.5f)\n", ((const PetscReal***)l_arr)[k_mid][j_mid][im_phys - 1]);
1552 else PetscSynchronizedPrintf(PETSC_COMM_WORLD, "(%.5f, %.5f, %.5f)\n", ((const Cmpnts***)l_arr)[k_mid][j_mid][im_phys - 1].x, ((const Cmpnts***)l_arr)[k_mid][j_mid][im_phys - 1].y, ((const Cmpnts***)l_arr)[k_mid][j_mid][im_phys - 1].z);
1553
1554 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, I-DIR]: Idx %2d (Ghost for Cell[k][j][%d]) = ", rank, im_phys, im_phys - 2);
1555 if(dof==1) PetscSynchronizedPrintf(PETSC_COMM_WORLD, "(%.5f)\n", ((const PetscReal***)l_arr)[k_mid][j_mid][im_phys]);
1556 else PetscSynchronizedPrintf(PETSC_COMM_WORLD, "(%.5f, %.5f, %.5f)\n", ((const Cmpnts***)l_arr)[k_mid][j_mid][im_phys].x, ((const Cmpnts***)l_arr)[k_mid][j_mid][im_phys].y, ((const Cmpnts***)l_arr)[k_mid][j_mid][im_phys].z);
1557 }
1558
1559 // --- J-Direction Boundaries ---
1560 if (info.ys == 0) { // Rank on -Eta boundary
1561 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, J-DIR]: Jdx %2d (Ghost for Cell[k][0][i]) = ", rank, 0);
1562 if(dof==1) PetscSynchronizedPrintf(PETSC_COMM_WORLD, "(%.5f)\n", ((const PetscReal***)l_arr)[k_mid][0][i_mid]);
1563 else PetscSynchronizedPrintf(PETSC_COMM_WORLD, "(%.5f, %.5f, %.5f)\n", ((const Cmpnts***)l_arr)[k_mid][0][i_mid].x, ((const Cmpnts***)l_arr)[k_mid][0][i_mid].y, ((const Cmpnts***)l_arr)[k_mid][0][i_mid].z);
1564
1565 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, J-DIR]: Jdx %2d (Value for Cell[k][0][i]) = ", rank, 1);
1566 if(dof==1) PetscSynchronizedPrintf(PETSC_COMM_WORLD, "(%.5f)\n", ((const PetscReal***)l_arr)[k_mid][1][i_mid]);
1567 else PetscSynchronizedPrintf(PETSC_COMM_WORLD, "(%.5f, %.5f, %.5f)\n", ((const Cmpnts***)l_arr)[k_mid][1][i_mid].x, ((const Cmpnts***)l_arr)[k_mid][1][i_mid].y, ((const Cmpnts***)l_arr)[k_mid][1][i_mid].z);
1568 }
1569
1570 if (info.ys + info.ym == info.my) { // Rank on +Eta boundary
1571 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, J-DIR]: Jdx %2d (Value for Cell[k][%d][i]) = ", rank, jm_phys - 1, jm_phys - 2);
1572 if(dof==1) PetscSynchronizedPrintf(PETSC_COMM_WORLD, "(%.5f)\n", ((const PetscReal***)l_arr)[k_mid][jm_phys - 1][i_mid]);
1573 else PetscSynchronizedPrintf(PETSC_COMM_WORLD, "(%.5f, %.5f, %.5f)\n", ((const Cmpnts***)l_arr)[k_mid][jm_phys - 1][i_mid].x, ((const Cmpnts***)l_arr)[k_mid][jm_phys - 1][i_mid].y, ((const Cmpnts***)l_arr)[k_mid][jm_phys - 1][i_mid].z);
1574
1575 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, J-DIR]: Jdx %2d (Ghost for Cell[k][%d][i]) = ", rank, jm_phys, jm_phys - 2);
1576 if(dof==1) PetscSynchronizedPrintf(PETSC_COMM_WORLD, "(%.5f)\n", ((const PetscReal***)l_arr)[k_mid][jm_phys][i_mid]);
1577 else PetscSynchronizedPrintf(PETSC_COMM_WORLD, "(%.5f, %.5f, %.5f)\n", ((const Cmpnts***)l_arr)[k_mid][jm_phys][i_mid].x, ((const Cmpnts***)l_arr)[k_mid][jm_phys][i_mid].y, ((const Cmpnts***)l_arr)[k_mid][jm_phys][i_mid].z);
1578 }
1579
1580 // --- K-Direction Boundaries ---
1581 if (info.zs == 0) { // Rank on -Zeta boundary
1582 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, K-DIR]: Kdx %2d (Ghost for Cell[0][j][i]) = ", rank, 0);
1583 if(dof==1) PetscSynchronizedPrintf(PETSC_COMM_WORLD, "(%.5f)\n", ((const PetscReal***)l_arr)[0][j_mid][i_mid]);
1584 else PetscSynchronizedPrintf(PETSC_COMM_WORLD, "(%.5f, %.5f, %.5f)\n", ((const Cmpnts***)l_arr)[0][j_mid][i_mid].x, ((const Cmpnts***)l_arr)[0][j_mid][i_mid].y, ((const Cmpnts***)l_arr)[0][j_mid][i_mid].z);
1585 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, K-DIR]: Kdx %2d (Value for Cell[0][j][i]) = ", rank, 1);
1586 if(dof==1) PetscSynchronizedPrintf(PETSC_COMM_WORLD, "(%.5f)\n", ((const PetscReal***)l_arr)[1][j_mid][i_mid]);
1587 else PetscSynchronizedPrintf(PETSC_COMM_WORLD, "(%.5f, %.5f, %.5f)\n", ((const Cmpnts***)l_arr)[1][j_mid][i_mid].x, ((const Cmpnts***)l_arr)[1][j_mid][i_mid].y, ((const Cmpnts***)l_arr)[1][j_mid][i_mid].z);
1588 }
1589 if (info.zs + info.zm == info.mz) { // Rank on +Zeta boundary
1590 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, K-DIR]: Kdx %2d (Value for Cell[%d][j][i]) = ", rank, km_phys - 1, km_phys - 2);
1591 if(dof==1) PetscSynchronizedPrintf(PETSC_COMM_WORLD, "(%.5f)\n", ((const PetscReal***)l_arr)[km_phys - 1][j_mid][i_mid]);
1592 else PetscSynchronizedPrintf(PETSC_COMM_WORLD, "(%.5f, %.5f, %.5f)\n", ((const Cmpnts***)l_arr)[km_phys - 1][j_mid][i_mid].x, ((const Cmpnts***)l_arr)[km_phys - 1][j_mid][i_mid].y, ((const Cmpnts***)l_arr)[km_phys - 1][j_mid][i_mid].z);
1593 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, K-DIR]: Kdx %2d (Ghost for Cell[%d][j][i]) = ", rank, km_phys, km_phys - 2);
1594 if(dof==1) PetscSynchronizedPrintf(PETSC_COMM_WORLD, "(%.5f)\n", ((const PetscReal***)l_arr)[km_phys][j_mid][i_mid]);
1595 else PetscSynchronizedPrintf(PETSC_COMM_WORLD, "(%.5f, %.5f, %.5f)\n", ((const Cmpnts***)l_arr)[km_phys][j_mid][i_mid].x, ((const Cmpnts***)l_arr)[km_phys][j_mid][i_mid].y, ((const Cmpnts***)l_arr)[km_phys][j_mid][i_mid].z);
1596 }
1597 ierr = DMDAVecRestoreArrayRead(dm, vec_local, (void*)&l_arr); CHKERRQ(ierr);
1598 }
1599 // ======================================================================
1600 // === CASE 2: Face-Centered Fields - NUANCED DIRECTIONAL LOGIC ===
1601 // ======================================================================
1602 else if (strcmp(data_layout, "Face-Centered") == 0) {
1603 const Cmpnts ***l_arr;
1604 ierr = DMDAVecGetArrayRead(dm, vec_local, (void*)&l_arr); CHKERRQ(ierr);
1605
1606 // --- I-Direction Boundaries ---
1607 if (info.xs == 0) { // Rank on -Xi boundary
1608 if (dominant_dir == 'x') { // Node-like in I-dir
1609 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, I-DIR]: Idx %2d (First Phys. X-Face) = (%.5f, %.5f, %.5f)\n", rank, 0, l_arr[k_mid][j_mid][0].x, l_arr[k_mid][j_mid][0].y, l_arr[k_mid][j_mid][0].z);
1610 } else if (dominant_dir == 'y' || dominant_dir == 'z') { // Cell-like in I-dir
1611 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, I-DIR]: Idx %2d (Ghost for Cell[k][j][0]) = (%.5f, %.5f, %.5f)\n", rank, 0, l_arr[k_mid][j_mid][0].x, l_arr[k_mid][j_mid][0].y, l_arr[k_mid][j_mid][0].z);
1612 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, I-DIR]: Idx %2d (Value for Cell[k][j][0]) = (%.5f, %.5f, %.5f)\n", rank, 1, l_arr[k_mid][j_mid][1].x, l_arr[k_mid][j_mid][1].y, l_arr[k_mid][j_mid][1].z);
1613 } else if (dominant_dir == 'm') { // Ucont: Mixed
1614 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, I-DIR]: u-comp @ Idx %2d (1st X-Face) = %.5f\n", rank, 0, l_arr[k_mid][j_mid][0].x);
1615 }
1616 }
1617 if (info.xs + info.xm == info.mx) { // Rank on +Xi boundary
1618 if (dominant_dir == 'x') { // Node-like in I-dir
1619 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, I-DIR]: Idx %2d (Last Phys. X-Face) = (%.5f, %.5f, %.5f)\n", rank, im_phys - 1, l_arr[k_mid][j_mid][im_phys - 1].x, l_arr[k_mid][j_mid][im_phys-1].y, l_arr[k_mid][j_mid][im_phys - 1].z);
1620 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, I-DIR]: Idx %2d (Ghost Location) = (%.5f, %.5f, %.5f)\n", rank, im_phys, l_arr[k_mid][j_mid][im_phys].x, l_arr[k_mid][j_mid][im_phys].y, l_arr[k_mid][j_mid][im_phys].z);
1621 } else if (dominant_dir == 'y' || dominant_dir == 'z') { // Cell-like in I-dir
1622 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, I-DIR]: Idx %2d (Value for Cell[k][j][%d]) = (%.5f, %.5f, %.5f)\n", rank, im_phys - 1, im_phys - 2, l_arr[k_mid][j_mid][im_phys - 1].x, l_arr[k_mid][j_mid][im_phys - 1].y, l_arr[k_mid][j_mid][im_phys-1].z);
1623 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, I-DIR]: Idx %2d (Ghost for Cell[k][j][%d]) = (%.5f, %.5f, %.5f)\n", rank, im_phys, im_phys - 2, l_arr[k_mid][j_mid][im_phys].x, l_arr[k_mid][j_mid][im_phys].y, l_arr[k_mid][j_mid][im_phys].z);
1624 } else if (dominant_dir == 'm') { // Ucont: Mixed
1625 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, I-DIR]: u-comp @ Idx %2d (Last X-Face) = %.5f\n", rank, im_phys - 1, l_arr[k_mid][j_mid][im_phys - 1].x);
1626 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, I-DIR]: u-comp @ Idx %2d (Ghost Location) = %.5f\n", rank, im_phys, l_arr[k_mid][j_mid][im_phys].x);
1627 }
1628 }
1629
1630 // --- J-Direction Boundaries ---
1631 if (info.ys == 0) { // Rank on -Eta boundary
1632 if (dominant_dir == 'y') { // Node-like in J-dir
1633 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, J-DIR]: Jdx %2d (First Phys. Y-Face) = (%.5f, %.5f, %.5f)\n", rank, 0, l_arr[k_mid][0][i_mid].x, l_arr[k_mid][0][i_mid].y, l_arr[k_mid][0][i_mid].z);
1634 } else if (dominant_dir == 'x' || dominant_dir == 'z') { // Cell-like in J-dir
1635 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, J-DIR]: Jdx %2d (Ghost for Cell[k][0][i]) = (%.5f, %.5f, %.5f)\n", rank, 0, l_arr[k_mid][0][i_mid].x, l_arr[k_mid][0][i_mid].y, l_arr[k_mid][0][i_mid].z);
1636 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, J-DIR]: Jdx %2d (Value for Cell[k][0][i]) = (%.5f, %.5f, %.5f)\n", rank, 1, l_arr[k_mid][1][i_mid].x, l_arr[k_mid][1][i_mid].y, l_arr[k_mid][1][i_mid].z);
1637 } else if (dominant_dir == 'm') { // Ucont: Mixed
1638 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, J-DIR]: v-comp @ Jdx %2d (1st Y-Face) = %.5f\n", rank, 0, l_arr[k_mid][0][i_mid].y);
1639 }
1640 }
1641 if (info.ys + info.ym == info.my) { // Rank on +Eta boundary
1642 if (dominant_dir == 'y') { // Node-like in J-dir
1643 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, J-DIR]: Jdx %2d (Last Phys. Y-Face) = (%.5f, %.5f, %.5f)\n", rank, jm_phys - 1, l_arr[k_mid][jm_phys - 1][i_mid].x, l_arr[k_mid][jm_phys - 1][i_mid].y, l_arr[k_mid][jm_phys - 1][i_mid].z);
1644 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, J-DIR]: Jdx %2d (Ghost Location) = (%.5f, %.5f, %.5f)\n", rank, jm_phys, l_arr[k_mid][jm_phys][i_mid].x, l_arr[k_mid][jm_phys][i_mid].y, l_arr[k_mid][jm_phys][i_mid].z);
1645 } else if (dominant_dir == 'x' || dominant_dir == 'z') { // Cell-like in J-dir
1646 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, J-DIR]: Jdx %2d (Value for Cell[k][%d][i]) = (%.5f, %.5f, %.5f)\n", rank, jm_phys-1, jm_phys-2, l_arr[k_mid][jm_phys - 1][i_mid].x, l_arr[k_mid][jm_phys - 1][i_mid].y, l_arr[k_mid][jm_phys - 1][i_mid].z);
1647 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, J-DIR]: Jdx %2d (Ghost for Cell[k][%d][i]) = (%.5f, %.5f, %.5f)\n", rank, jm_phys, jm_phys-2, l_arr[k_mid][jm_phys][i_mid].x, l_arr[k_mid][jm_phys][i_mid].y, l_arr[k_mid][jm_phys][i_mid].z);
1648 } else if (dominant_dir == 'm') { // Ucont: Mixed
1649 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, J-DIR]: v-comp @ Jdx %2d (Last Y-Face) = %.5f\n", rank, jm_phys - 1, l_arr[k_mid][jm_phys - 1][i_mid].y);
1650 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, J-DIR]: v-comp @ Jdx %2d (Ghost Location) = %.5f\n", rank, jm_phys, l_arr[k_mid][jm_phys][i_mid].y);
1651 }
1652 }
1653
1654 // --- K-Direction Boundaries ---
1655 if (info.zs == 0) { // Rank on -Zeta boundary
1656 if (dominant_dir == 'z') { // Node-like in K-dir
1657 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, K-DIR]: Kdx %2d (First Phys. Z-Face) = (%.5f, %.5f, %.5f)\n", rank, 0, l_arr[0][j_mid][i_mid].x, l_arr[0][j_mid][i_mid].y, l_arr[0][j_mid][i_mid].z);
1658 } else if (dominant_dir == 'x' || dominant_dir == 'y') { // Cell-like in K-dir
1659 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, K-DIR]: Kdx %2d (Ghost for Cell[0][j][i]) = (%.5f, %.5f, %.5f)\n", rank, 0, l_arr[0][j_mid][i_mid].x, l_arr[0][j_mid][i_mid].y, l_arr[0][j_mid][i_mid].z);
1660 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, K-DIR]: Kdx %2d (Value for Cell[0][j][i]) = (%.5f, %.5f, %.5f)\n", rank, 1, l_arr[1][j_mid][i_mid].x, l_arr[1][j_mid][i_mid].y, l_arr[1][j_mid][i_mid].z);
1661 } else if (dominant_dir == 'm') { // Ucont: Mixed
1662 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, K-DIR]: w-comp @ Idx %2d (1st Z-Face) = %.5f\n", rank, 0, l_arr[0][j_mid][i_mid].z);
1663 }
1664 }
1665 if (info.zs + info.zm == info.mz) { // Rank on +Zeta boundary
1666 if (dominant_dir == 'z') { // Node-like in K-dir
1667 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, K-DIR]: Idx %2d (Last Phys. Z-Face) = (%.5f, %.5f, %.5f)\n", rank, km_phys - 1, l_arr[km_phys - 1][j_mid][i_mid].x, l_arr[km_phys - 1][j_mid][i_mid].y, l_arr[km_phys - 1][j_mid][i_mid].z);
1668 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, K-DIR]: Idx %2d (Ghost Location) = (%.5f, %.5f, %.5f)\n", rank, km_phys, l_arr[km_phys][j_mid][i_mid].x, l_arr[km_phys][j_mid][i_mid].y, l_arr[km_phys][j_mid][i_mid].z);
1669 } else if (dominant_dir == 'x' || dominant_dir == 'y') { // Cell-like in K-dir
1670 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, K-DIR]: Idx %2d (Value for Cell[%d][j][i]) = (%.5f, %.5f, %.5f)\n", rank, km_phys-1, km_phys-2, l_arr[km_phys-1][j_mid][i_mid].x, l_arr[km_phys-1][j_mid][i_mid].y, l_arr[km_phys - 1][j_mid][i_mid].z);
1671 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, K-DIR]: Idx %2d (Ghost for Cell[%d][j][i]) = (%.5f, %.5f, %.5f)\n", rank, km_phys, km_phys-2, l_arr[km_phys][j_mid][i_mid].x, l_arr[km_phys][j_mid][i_mid].y, l_arr[km_phys][j_mid][i_mid].z);
1672 } else if (dominant_dir == 'm') { // Ucont: Mixed
1673 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, K-DIR]: w-comp @ Idx %2d (Last Z-Face) = %.5f\n", rank, km_phys - 1, l_arr[km_phys - 1][j_mid][i_mid].z);
1674 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, K-DIR]: w-comp @ Idx %2d (Ghost Loc.) = %.5f\n", rank, km_phys, l_arr[km_phys][j_mid][i_mid].z);
1675
1676 }
1677 }
1678 ierr = DMDAVecRestoreArrayRead(dm, vec_local, (void*)&l_arr); CHKERRQ(ierr);
1679 }
1680 // ======================================================================
1681 // === CASE 3: Node-Centered Fields - USES DIRECT INDEX ===
1682 // ======================================================================
1683 else if (strcmp(data_layout, "Node-Centered") == 0) {
1684 const Cmpnts ***l_arr;
1685 ierr = DMDAVecGetArrayRead(dm, vec_local, (void*)&l_arr); CHKERRQ(ierr);
1686
1687 // --- I-Direction Boundaries ---
1688 if (info.xs == 0) {
1689 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, I-DIR]: Idx %2d (First Phys. Node) = (%.5f, %.5f, %.5f)\n", rank, 0, l_arr[k_mid][j_mid][0].x, l_arr[k_mid][j_mid][0].y, l_arr[k_mid][j_mid][0].z);
1690 }
1691 if (info.xs + info.xm == info.mx) {
1692 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, I-DIR]: Idx %2d (Last Phys. Node) = (%.5f, %.5f, %.5f)\n", rank, im_phys - 1, l_arr[k_mid][j_mid][im_phys - 1].x, l_arr[k_mid][j_mid][im_phys - 1].y, l_arr[k_mid][j_mid][im_phys - 1].z);
1693 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, I-DIR]: Idx %2d (Unused/Ghost Loc) = (%.5f, %.5f, %.5f)\n", rank, im_phys, l_arr[k_mid][j_mid][im_phys].x, l_arr[k_mid][j_mid][im_phys].y, l_arr[k_mid][j_mid][im_phys].z);
1694 }
1695 // --- J-Direction Boundaries ---
1696 if (info.ys == 0) {
1697 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, J-DIR]: Jdx %2d (First Phys. Node) = (%.5f, %.5f, %.5f)\n", rank, 0, l_arr[k_mid][0][i_mid].x, l_arr[k_mid][0][i_mid].y, l_arr[k_mid][0][i_mid].z);
1698 }
1699 if (info.ys + info.ym == info.my) {
1700 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, J-DIR]: Jdx %2d (Last Phys. Node) = (%.5f, %.5f, %.5f)\n", rank, jm_phys - 1, l_arr[k_mid][jm_phys - 1][i_mid].x, l_arr[k_mid][jm_phys - 1][i_mid].y, l_arr[k_mid][jm_phys - 1][i_mid].z);
1701 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, J-DIR]: Jdx %2d (Unused/Ghost Loc) = (%.5f, %.5f, %.5f)\n", rank, jm_phys, l_arr[k_mid][jm_phys][i_mid].x, l_arr[k_mid][jm_phys][i_mid].y, l_arr[k_mid][jm_phys][i_mid].z);
1702 }
1703 // --- K-Direction Boundaries ---
1704 if (info.zs == 0) {
1705 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, K-DIR]: Kdx %2d (First Phys. Node) = (%.5f, %.5f, %.5f)\n", rank, 0, l_arr[0][j_mid][i_mid].x, l_arr[0][j_mid][i_mid].y, l_arr[0][j_mid][i_mid].z);
1706 }
1707 if(info.zs + info.zm == info.mz) {
1708 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, K-DIR]: Kdx %2d (Last Phys. Node) = (%.5f, %.5f, %.5f)\n", rank, km_phys - 1, l_arr[km_phys - 1][j_mid][i_mid].x, l_arr[km_phys - 1][j_mid][i_mid].y, l_arr[km_phys - 1][j_mid][i_mid].z);
1709 PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[Rank %d, K-DIR]: Kdx %2d (Unused/Ghost Loc) = (%.5f, %.5f, %.5f)\n", rank, km_phys, l_arr[km_phys][j_mid][i_mid].x, l_arr[km_phys][j_mid][i_mid].y, l_arr[km_phys][j_mid][i_mid].z);
1710 }
1711 ierr = DMDAVecRestoreArrayRead(dm, vec_local, (void*)&l_arr); CHKERRQ(ierr);
1712 }
1713 else {
1714 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "LOG_FIELD_ANATOMY encountered an unknown data layout: %s", data_layout);
1715 }
1716
1717 ierr = PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT); CHKERRQ(ierr);
1718 ierr = PetscBarrier(NULL);
1719 PetscFunctionReturn(0);
1720}
Vec lDiffusivityGradient
Definition variables.h:759
Vec lCent
Definition variables.h:776
Vec lZet
Definition variables.h:776
Vec lCellFieldAtCorner
Definition variables.h:764
Vec lPsi
Definition variables.h:801
Vec lCsi
Definition variables.h:776
Vec CellFieldAtCorner
Definition variables.h:764
Vec lUcat
Definition variables.h:755
Vec lEta
Definition variables.h:776
Vec lDiffusivity
Definition variables.h:758
Here is the caller graph for this function:

◆ LOG_INTERPOLATION_ERROR()

PetscErrorCode LOG_INTERPOLATION_ERROR ( UserCtx user)

Logs the interpolation error between the analytical and computed solutions.

Definition at line 1727 of file logging.c.

1728{
1729 SimCtx *simCtx = user->simCtx;
1730 PetscErrorCode ierr;
1731 DM swarm = user->swarm;
1732 Vec positionVec, analyticalvelocityVec, velocityVec, errorVec;
1733 PetscReal Interpolation_error = 0.0;
1734 PetscReal Maximum_Interpolation_error = 0.0;
1735 PetscReal AnalyticalSolution_magnitude = 0.0;
1736 PetscReal ErrorPercentage = 0.0;
1737
1738 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Creating global vectors.\n");
1739 ierr = DMSwarmCreateGlobalVectorFromField(swarm, "position", &positionVec); CHKERRQ(ierr);
1740 ierr = DMSwarmCreateGlobalVectorFromField(swarm, "velocity", &velocityVec); CHKERRQ(ierr);
1741
1742 ierr = VecDuplicate(positionVec, &analyticalvelocityVec); CHKERRQ(ierr);
1743 ierr = VecCopy(positionVec, analyticalvelocityVec); CHKERRQ(ierr);
1744
1745 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Computing analytical solution.\n");
1746 ierr = SetAnalyticalSolutionForParticles(analyticalvelocityVec, simCtx); CHKERRQ(ierr);
1747
1748 ierr = VecDuplicate(analyticalvelocityVec, &errorVec); CHKERRQ(ierr);
1749 ierr = VecCopy(analyticalvelocityVec, errorVec); CHKERRQ(ierr);
1750
1751 ierr = VecNorm(analyticalvelocityVec, NORM_2, &AnalyticalSolution_magnitude); CHKERRQ(ierr);
1752
1753 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Computing error.\n");
1754 ierr = VecAXPY(errorVec, -1.0, velocityVec); CHKERRQ(ierr);
1755 ierr = VecNorm(errorVec, NORM_2, &Interpolation_error); CHKERRQ(ierr);
1756 ierr = VecNorm(errorVec,NORM_INFINITY,&Maximum_Interpolation_error); CHKERRQ(ierr);
1757
1758 ErrorPercentage = (AnalyticalSolution_magnitude > 0) ?
1759 (Interpolation_error / AnalyticalSolution_magnitude * 100.0) : 0.0;
1760
1761 LOG_ALLOW(GLOBAL, LOG_INFO, "Interpolation error (%%): %g\n", ErrorPercentage);
1762 PetscPrintf(PETSC_COMM_WORLD, "Interpolation error (%%): %g\n", ErrorPercentage);
1763 LOG_ALLOW(GLOBAL, LOG_INFO, "Maximum Interpolation error: %g\n", Maximum_Interpolation_error);
1764 PetscPrintf(PETSC_COMM_WORLD, "Maximum Interpolation error: %g\n", Maximum_Interpolation_error);
1765
1766
1767 ierr = VecDestroy(&analyticalvelocityVec); CHKERRQ(ierr);
1768 ierr = VecDestroy(&errorVec); CHKERRQ(ierr);
1769 ierr = DMSwarmDestroyGlobalVectorFromField(swarm, "position", &positionVec); CHKERRQ(ierr);
1770 ierr = DMSwarmDestroyGlobalVectorFromField(swarm, "velocity", &velocityVec); CHKERRQ(ierr);
1771
1772 return 0;
1773}
PetscErrorCode SetAnalyticalSolutionForParticles(Vec tempVec, SimCtx *simCtx)
Applies the analytical solution to particle velocity vector.
Here is the call graph for this function:
Here is the caller graph for this function:

◆ CalculateAdvancedParticleMetrics()

PetscErrorCode CalculateAdvancedParticleMetrics ( UserCtx user)

Computes advanced particle statistics and stores them in SimCtx.

This function calculates:

  • Particle load imbalance across MPI ranks.
  • The total number of grid cells occupied by at least one particle.

It requires that CalculateParticleCountPerCell() has been called prior to its execution. It uses collective MPI operations and must be called by all ranks.

Parameters
userPointer to the UserCtx.
Returns
PetscErrorCode 0 on success.

Definition at line 1790 of file logging.c.

1791{
1792 PetscErrorCode ierr;
1793 SimCtx *simCtx = user->simCtx;
1794 PetscMPIInt size, rank;
1795
1796 PetscFunctionBeginUser;
1797 ierr = MPI_Comm_size(PETSC_COMM_WORLD, &size); CHKERRQ(ierr);
1798 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
1799
1800 // --- 1. Particle Load Imbalance ---
1801 PetscInt nLocal, nGlobal, nLocalMax;
1802 ierr = DMSwarmGetLocalSize(user->swarm, &nLocal); CHKERRQ(ierr);
1803 ierr = DMSwarmGetSize(user->swarm, &nGlobal); CHKERRQ(ierr);
1804 ierr = MPI_Allreduce(&nLocal, &nLocalMax, 1, MPIU_INT, MPI_MAX, PETSC_COMM_WORLD); CHKERRQ(ierr);
1805
1806 PetscReal avg_per_rank = (size > 0) ? ((PetscReal)nGlobal / size) : 0.0;
1807 // Handle division by zero if there are no particles
1808 simCtx->particleLoadImbalance = (avg_per_rank > 1e-9) ? (nLocalMax / avg_per_rank) : 1.0;
1809
1810
1811 // --- 2. Number of Occupied Cells ---
1812 // This part requires access to the user->ParticleCount vector.
1813 PetscInt local_occupied_cells = 0;
1814 PetscInt global_occupied_cells;
1815 const PetscScalar *count_array;
1816 PetscInt vec_local_size;
1817
1818 ierr = VecGetLocalSize(user->ParticleCount, &vec_local_size); CHKERRQ(ierr);
1819 ierr = VecGetArrayRead(user->ParticleCount, &count_array); CHKERRQ(ierr);
1820
1821 for (PetscInt i = 0; i < vec_local_size; ++i) {
1822 if (count_array[i] > 0.5) { // Use 0.5 to be safe with floating point
1823 local_occupied_cells++;
1824 }
1825 }
1826 ierr = VecRestoreArrayRead(user->ParticleCount, &count_array); CHKERRQ(ierr);
1827
1828 ierr = MPI_Allreduce(&local_occupied_cells, &global_occupied_cells, 1, MPIU_INT, MPI_SUM, PETSC_COMM_WORLD); CHKERRQ(ierr);
1829 simCtx->occupiedCellCount = global_occupied_cells;
1830
1831 LOG_ALLOW_SYNC(GLOBAL, LOG_INFO, "[Rank %d] Advanced Metrics: Imbalance=%.2f, OccupiedCells=%d\n", rank, simCtx->particleLoadImbalance, simCtx->occupiedCellCount);
1832
1833 PetscFunctionReturn(0);
1834}
PetscInt occupiedCellCount
Definition variables.h:685
Vec ParticleCount
Definition variables.h:800
PetscReal particleLoadImbalance
Definition variables.h:686
Here is the caller graph for this function:

◆ LOG_PARTICLE_METRICS()

PetscErrorCode LOG_PARTICLE_METRICS ( UserCtx user,
const char *  stageName 
)

Logs particle swarm metrics, adapting its behavior based on a boolean flag in SimCtx.

This function serves a dual purpose:

  1. If simCtx->isInitializationPhase is PETSC_TRUE, it logs settlement diagnostics to "Initialization_Metrics.log", using the provided stageName.
  2. If simCtx->isInitializationPhase is PETSC_FALSE, it logs regular timestep metrics to "Particle_Metrics.log".
Parameters
userA pointer to the UserCtx.
stageNameA descriptive string for the initialization stage (ignored in timestep mode).
Returns
PetscErrorCode 0 on success.

Definition at line 1851 of file logging.c.

1852{
1853 PetscErrorCode ierr;
1854 PetscMPIInt rank, size;
1855 SimCtx *simCtx = user->simCtx;
1856
1857 PetscFunctionBeginUser;
1858 ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); CHKERRQ(ierr);
1859 ierr = MPI_Comm_size(PETSC_COMM_WORLD, &size); CHKERRQ(ierr);
1860
1861 PetscInt totalParticles;
1862 ierr = DMSwarmGetSize(user->swarm, &totalParticles); CHKERRQ(ierr);
1863
1864 if (!rank) {
1865 FILE *f;
1866 char filen[PETSC_MAX_PATH_LEN];
1867 sprintf(filen, "%s/Particle_Metrics.log", simCtx->log_dir);
1868 f = fopen(filen, "a");
1869 if (!f) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FILE_OPEN, "Cannot open particle log file: %s", filen);
1870
1871 if (simCtx->step == simCtx->StartStep + 1) {
1872 PetscFPrintf(PETSC_COMM_SELF, f, "%-10s | %-12s | %-10s | %-10s | %-15s | %-10s | %-10s\n",
1873 "Timestep", "Total Ptls", "Lost", "Migrated", "Occupied Cells", "Imbalance", "Mig Passes");
1874 PetscFPrintf(PETSC_COMM_SELF, f, "----------------------------------------------------------------------------------------------------------\n");
1875 }
1876
1877 PetscFPrintf(PETSC_COMM_SELF, f, "%-10d | %-12d | %-10d | %-10d | %-15d | %-10.2f | %-10d\n",
1878 (int)simCtx->step, (int)totalParticles, (int)simCtx->particlesLostLastStep,
1879 (int)simCtx->particlesMigratedLastStep, (int)simCtx->occupiedCellCount,
1880 (double)simCtx->particleLoadImbalance, (int)simCtx->migrationPassesLastStep);
1881 fclose(f);
1882 }
1883 PetscFunctionReturn(0);
1884}
PetscInt particlesLostLastStep
Definition variables.h:682
PetscInt particlesMigratedLastStep
Definition variables.h:684
PetscInt migrationPassesLastStep
Definition variables.h:683
Here is the caller graph for this function: