PICurv 0.1.0
A Parallel Particle-In-Cell Solver for Curvilinear LES
Loading...
Searching...
No Matches
Functions
postprocessor.h File Reference
#include "io.h"
#include "variables.h"
#include "logging.h"
#include "ParticleSwarm.h"
#include "interpolation.h"
#include "grid.h"
#include "setup.h"
#include "Metric.h"
#include "postprocessing_kernels.h"
#include "vtk_io.h"
#include "particle_statistics.h"
Include dependency graph for postprocessor.h:
This graph shows which files directly or indirectly include this file:

Go to the source code of this file.

Functions

PetscErrorCode SetupPostProcessSwarm (UserCtx *user, PostProcessParams *pps)
 Creates a new, dedicated DMSwarm for post-processing tasks.
 
PetscErrorCode WriteEulerianFile (UserCtx *user, PostProcessParams *pps, PetscInt ti)
 Orchestrates the writing of a combined, multi-field VTK file for a single time step.
 
PetscErrorCode EulerianDataProcessingPipeline (UserCtx *user, PostProcessParams *pps)
 Parses the processing pipeline string and executes the requested kernels.
 
PetscErrorCode ParticleDataProcessingPipeline (UserCtx *user, PostProcessParams *pps)
 Parses and executes the particle pipeline using a robust two-pass approach.
 
PetscErrorCode WriteParticleFile (UserCtx *user, PostProcessParams *pps, PetscInt ti)
 Writes particle data to a VTP file using the Prepare-Write-Cleanup pattern.
 
PetscErrorCode GlobalStatisticsPipeline (UserCtx *user, PostProcessParams *pps, PetscInt ti)
 Executes the global statistics pipeline, computing aggregate reductions over all particles.
 

Function Documentation

◆ SetupPostProcessSwarm()

PetscErrorCode SetupPostProcessSwarm ( UserCtx user,
PostProcessParams pps 
)

Creates a new, dedicated DMSwarm for post-processing tasks.

This function is called once at startup. It creates an empty DMSwarm and associates it with the same grid DM as the primary swarm and registers all the required fields.

Parameters
userThe UserCtx where user->post_swarm will be created.
ppsThe PostProcessParams containing the particle_pipeline string for field registration.
Returns
PetscErrorCode

Creates a new, dedicated DMSwarm for post-processing tasks.

Local to this translation unit.

Definition at line 18 of file postprocessor.c.

19{
20 PetscErrorCode ierr;
21 PetscFunctionBeginUser;
23 char *pipeline_copy, *step_token, *step_saveptr;
24 PetscBool finalize_needed = PETSC_FALSE;
25
26 ierr = DMCreate(PETSC_COMM_WORLD, &user->post_swarm); CHKERRQ(ierr);
27 ierr = DMSetType(user->post_swarm, DMSWARM); CHKERRQ(ierr);
28 ierr = DMSetDimension(user->post_swarm, 3); CHKERRQ(ierr);
29 ierr = DMSwarmSetType(user->post_swarm, DMSWARM_BASIC); CHKERRQ(ierr);
30 // Associate it with the same grid as the solver's swarm
31 if (user->da) {
32 ierr = DMSwarmSetCellDM(user->post_swarm, user->da); CHKERRQ(ierr);
33 LOG_ALLOW(LOCAL,LOG_INFO,"Associated DMSwarm with Cell DM (user->da).\n");
34 } else {
35 // If user->da is essential for your simulation logic with particles, this should be a fatal error.
36 LOG_ALLOW(GLOBAL, LOG_WARNING, "user->da (Cell DM for Swarm) is NULL. Cell-based swarm operations might fail.\n");
37 // SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONGSTATE, "CreateParticleSwarm - user->da (Cell DM) is NULL but required.");
38 }
39
40 LOG_ALLOW(GLOBAL, LOG_INFO, "Created dedicated DMSwarm for post-processing.\n");
41
42 LOG_ALLOW(GLOBAL, LOG_DEBUG, " --- Setting up Post-Processing Pipeline fields-- \n");
43
44 ierr = PetscStrallocpy(pps->particle_pipeline, &pipeline_copy); CHKERRQ(ierr);
45 step_token = strtok_r(pipeline_copy, ";", &step_saveptr);
46 while (step_token) {
47 TrimWhitespace(step_token);
48 if (strlen(step_token) == 0) { step_token = strtok_r(NULL, ";", &step_saveptr); continue; }
49
50 char *keyword = strtok(step_token, ":");
51 char *args_str = strtok(NULL, "");
52 TrimWhitespace(keyword);
53 PetscInt output_field_dimensions = 1; // Default to scalar output fields
54
55 if (strcasecmp(keyword, "ComputeSpecificKE") == 0) {
56 if (!args_str) SETERRQ(PETSC_COMM_SELF, 1, "Error (ComputeSpecificKE): Missing arguments.");
57 char *input_field = strtok(args_str, ">");
58 char *output_field = strtok(NULL, ">");
59 output_field_dimensions = 1; // SKE is scalar
60 if (!input_field) SETERRQ(PETSC_COMM_SELF, 1, "Error (ComputeSpecificKE): Missing input field in 'in>out' syntax.");
61 if (!output_field) SETERRQ(PETSC_COMM_SELF, 1, "Error (ComputeSpecificKE): Missing output field in 'in>out' syntax.");
62 TrimWhitespace(input_field);
63 TrimWhitespace(output_field);
64 if (strlen(input_field) == 0) SETERRQ(PETSC_COMM_SELF, 1, "Error (ComputeSpecificKE): Empty input field name.");
65 if (strlen(output_field) == 0) SETERRQ(PETSC_COMM_SELF, 1, "Error (ComputeSpecificKE): Empty output field name.");
66 // Register the output field
67 ierr = RegisterSwarmField(user->post_swarm, output_field, output_field_dimensions,PETSC_REAL); CHKERRQ(ierr);
68 LOG_ALLOW(GLOBAL, LOG_INFO, "Registered particle field '%s' (ComputeSpecificKE input='%s').\n", output_field, input_field);
69 finalize_needed = PETSC_TRUE;
70 } else {
71 LOG_ALLOW(GLOBAL, LOG_WARNING, "Warning: Unknown particle transformation keyword '%s'. Skipping.\n", keyword);
72 }
73
74 // Add other 'else if' blocks here for other kernels that create output fields
75
76 step_token = strtok_r(NULL, ";", &step_saveptr);
77 } // while step_token
78
79 ierr = PetscFree(pipeline_copy); CHKERRQ(ierr);
80
81 // --- FINALIZE STEP ---
82 if (finalize_needed) {
83 LOG_ALLOW(GLOBAL, LOG_INFO, "Finalizing registered particle fields for the post-processing swarm.\n");
84 } else {
85 LOG_ALLOW(GLOBAL, LOG_INFO, "No custom particle fields requested; finalizing an empty post-processing swarm for safe use.\n");
86 }
87 ierr = DMSwarmFinalizeFieldRegister(user->post_swarm); CHKERRQ(ierr);
88
89 LOG_ALLOW(GLOBAL, LOG_INFO, "Post-Processing DMSwarm setup complete.\n");
90
92 PetscFunctionReturn(0);
93}
PetscErrorCode RegisterSwarmField(DM swarm, const char *fieldName, PetscInt fieldDim, PetscDataType dtype)
Registers a swarm field without finalizing registration.
void TrimWhitespace(char *str)
Helper function to trim leading/trailing whitespace from a string.
Definition io.c:38
#define LOCAL
Logging scope definitions for controlling message output.
Definition logging.h:44
#define GLOBAL
Scope for global logging across all processes.
Definition logging.h:45
#define LOG_ALLOW(scope, level, fmt,...)
Logging macro that checks both the log level and whether the calling function is in the allowed-funct...
Definition logging.h:199
#define PROFILE_FUNCTION_END
Marks the end of a profiled code block.
Definition logging.h:779
@ LOG_INFO
Informational messages about program execution.
Definition logging.h:30
@ LOG_WARNING
Non-critical issues that warrant attention.
Definition logging.h:29
@ LOG_DEBUG
Detailed debugging information.
Definition logging.h:31
#define PROFILE_FUNCTION_BEGIN
Marks the beginning of a profiled code block (typically a function).
Definition logging.h:770
DM post_swarm
Definition variables.h:886
char particle_pipeline[1024]
Definition variables.h:568
Here is the call graph for this function:
Here is the caller graph for this function:

◆ WriteEulerianFile()

PetscErrorCode WriteEulerianFile ( UserCtx user,
PostProcessParams pps,
PetscInt  ti 
)

Orchestrates the writing of a combined, multi-field VTK file for a single time step.

This function is the primary driver for generating output. It performs these steps:

  1. Prepares the subsampled coordinate array required for the legacy grid format.
  2. Parses the user-requested list of fields from the configuration.
  3. For each field, prepares a corresponding subsampled data array.
  4. Assembles all prepared arrays into a single VTKMetaData struct.
  5. Calls the low-level VTK writer to generate the final .vts file.
  6. Frees all temporary memory allocated during the preparation phase.
Parameters
userThe UserCtx for the finest grid level.
ppsThe post-processing configuration struct.
tiThe current time step index.
Returns
PetscErrorCode

Orchestrates the writing of a combined, multi-field VTK file for a single time step.

Full API contract (arguments, ownership, side effects) is documented with the header declaration in include/postprocessor.h.

See also
WriteEulerianFile()

Definition at line 192 of file postprocessor.c.

193{
194 PetscErrorCode ierr;
195 VTKMetaData meta;
196 char filename[MAX_FILENAME_LENGTH];
197
198 PetscFunctionBeginUser;
200
201 if (pps->output_fields_instantaneous[0] == '\0') {
202 LOG_ALLOW(GLOBAL, LOG_DEBUG, "No instantaneous fields requested for output at ti=%" PetscInt_FMT ". Skipping.\n", ti);
203 PetscFunctionReturn(0);
204 }
205
206 LOG_ALLOW(GLOBAL, LOG_INFO, "--- Starting VTK File Writing for ti = %" PetscInt_FMT " ---\n", ti);
207
208 /* 1) Metadata init */
209 ierr = PetscMemzero(&meta, sizeof(VTKMetaData)); CHKERRQ(ierr);
211
212 /* 2) Coordinates (subsampled interior) */
213 ierr = PrepareOutputCoordinates(user, &meta.coords, &meta.mx, &meta.my, &meta.mz, &meta.npoints); CHKERRQ(ierr);
214 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Using coords linearization order: fast=i mid=j slow=k (sizes: %" PetscInt_FMT " x %" PetscInt_FMT " x %" PetscInt_FMT ")\n",
215 meta.mx, meta.my, meta.mz);
216
217 /* 3) Fields (rank 0) */
218 if (user->simCtx->rank == 0) {
219 char *fields_copy, *field_name;
220 ierr = PetscStrallocpy(pps->output_fields_instantaneous, &fields_copy); CHKERRQ(ierr);
221
222 field_name = strtok(fields_copy, ",");
223 while (field_name) {
224 TrimWhitespace(field_name);
225 if (!*field_name) { field_name = strtok(NULL, ","); continue; }
226
227 LOG_ALLOW(LOCAL, LOG_DEBUG, "Preparing field '%s' for output.\n", field_name);
228
229 Vec field_vec = NULL;
230 PetscInt num_components = 0;
231
232 if (!strcasecmp(field_name, "P_nodal")) {
233 field_vec = user->P_nodal; num_components = 1;
234 } else if (!strcasecmp(field_name, "Ucat_nodal")) {
235 field_vec = user->Ucat_nodal; num_components = 3;
236 } else if (!strcasecmp(field_name, "Qcrit")) {
237 field_vec = user->Qcrit; num_components = 1;
238 } else if (!strcasecmp(field_name, "Psi_nodal")){
239 if(user->simCtx->np==0){
240 LOG_ALLOW(LOCAL, LOG_WARNING, "Field 'Psi_nodal' requested but no particles are present. Skipping.\n");
241 field_name = strtok(NULL, ",");
242 continue;
243 }
244 field_vec = user->Psi_nodal; num_components = 1;
245 } else {
246 LOG_ALLOW(LOCAL, LOG_WARNING, "Field '%s' not recognized. Skipping.\n", field_name);
247 field_name = strtok(NULL, ",");
248 continue;
249 }
250
252 LOG_ALLOW(LOCAL, LOG_WARNING, "MAX_POINT_DATA_FIELDS reached. Cannot add '%s'.\n", field_name);
253 field_name = strtok(NULL, ",");
254 continue;
255 }
256
257 // --- Add field to metadata ---
258 VTKFieldInfo* current_field = &meta.point_data_fields[meta.num_point_data_fields];
259 strncpy(current_field->name, field_name, MAX_VTK_FIELD_NAME_LENGTH-1);
260 current_field->name[MAX_VTK_FIELD_NAME_LENGTH-1] = '\0';
261 current_field->num_components = num_components;
262
263 /* Build interior AoS from NATURAL gathered Vec */
264 ierr = PrepareOutputEulerianFieldData(user, field_vec, num_components, &current_field->data); CHKERRQ(ierr);
265
266 /*
267 // *** DEBUG: Dump Ucat_nodal details and add scalar companions Ux, Uy, Uz for easier visualization ***
268
269 // If this is Ucat_nodal, dump a few tuples and add scalar companions
270 if (!strcasecmp(field_name, "Ucat_nodal")) {
271 const PetscInt npts = meta.npoints;
272 const PetscScalar *a = (const PetscScalar*)current_field->data;
273
274 LOG_ALLOW(GLOBAL, LOG_INFO, "DBG Ucat_nodal: ptr=%p npoints=%" PetscInt_FMT " num_components=%" PetscInt_FMT "\n",
275 (void*)a, npts, current_field->num_components);
276
277 if (a && current_field->num_components == 3 && npts > 0) {
278 const PetscInt nshow = (npts < 5) ? npts : 5;
279 LOG_ALLOW(GLOBAL, LOG_INFO, "DBG Ucat_nodal: showing first %d of %" PetscInt_FMT " tuples (AoS x,y,z):\n",
280 (int)nshow, npts);
281 for (PetscInt t = 0; t < nshow; ++t) {
282 LOG_ALLOW(GLOBAL, LOG_INFO, " Ucat_nodal[%3" PetscInt_FMT "] = (%g, %g, %g)\n",
283 t, (double)a[3*t+0], (double)a[3*t+1], (double)a[3*t+2]);
284 }
285 if (npts > 10) {
286 PetscInt mid = npts / 2;
287 LOG_ALLOW(GLOBAL, LOG_INFO, " Ucat_nodal[mid=%" PetscInt_FMT "] = (%g, %g, %g)\n",
288 mid, (double)a[3*mid+0], (double)a[3*mid+1], (double)a[3*mid+2]);
289 }
290
291 // Add scalar companions from the AoS we just created
292 PetscScalar *Ux=NULL,*Uy=NULL,*Uz=NULL;
293 ierr = PetscMalloc1(meta.npoints, &Ux); CHKERRQ(ierr);
294 ierr = PetscMalloc1(meta.npoints, &Uy); CHKERRQ(ierr);
295 ierr = PetscMalloc1(meta.npoints, &Uz); CHKERRQ(ierr);
296 for (PetscInt i = 0; i < meta.npoints; ++i) {
297 Ux[i] = a[3*i+0]; Uy[i] = a[3*i+1]; Uz[i] = a[3*i+2];
298 }
299
300 if (meta.num_point_data_fields + 3 <= MAX_POINT_DATA_FIELDS) {
301 VTKFieldInfo *fx = &meta.point_data_fields[++meta.num_point_data_fields];
302 strncpy(fx->name, "Ux_debug", MAX_VTK_FIELD_NAME_LENGTH-1);
303 fx->name[MAX_VTK_FIELD_NAME_LENGTH-1] = '\0';
304 fx->num_components = 1; fx->data = Ux;
305
306 VTKFieldInfo *fy = &meta.point_data_fields[++meta.num_point_data_fields];
307 strncpy(fy->name, "Uy_debug", MAX_VTK_FIELD_NAME_LENGTH-1);
308 fy->name[MAX_VTK_FIELD_NAME_LENGTH-1] = '\0';
309 fy->num_components = 1; fy->data = Uy;
310
311 VTKFieldInfo *fz = &meta.point_data_fields[++meta.num_point_data_fields];
312 strncpy(fz->name, "Uz_debug", MAX_VTK_FIELD_NAME_LENGTH-1);
313 fz->name[MAX_VTK_FIELD_NAME_LENGTH-1] = '\0';
314 fz->num_components = 1; fz->data = Uz;
315
316 LOG_ALLOW(GLOBAL, LOG_INFO, "DBG: Added scalar companions Ux_debug, Uy_debug, Uz_debug.\n");
317 } else {
318 LOG_ALLOW(GLOBAL, LOG_WARNING, "DBG: Not enough slots to add Ux/Uy/Uz debug fields.\n");
319 PetscFree(Ux); PetscFree(Uy); PetscFree(Uz);
320 }
321
322 // Mid-plane CSV + AoS vs NATURAL compare (component X)
323
324 // Gather NATURAL again (small cost, but isolated and clear)
325 PetscInt Ng = 0;
326 double *nat_d = NULL;
327 DM dmU = NULL;
328 DMDALocalInfo infU;
329 ierr = VecGetDM(field_vec, &dmU); CHKERRQ(ierr);
330 ierr = DMDAGetLocalInfo(dmU, &infU); CHKERRQ(ierr);
331
332 const PetscInt M=infU.mx, N=infU.my, P=infU.mz;
333 const PetscInt mx = meta.mx, my = meta.my, mz = meta.mz;
334 const PetscInt iInnerMid = mx/2; // interior index [0..mx-1]
335 const PetscInt iGlob = iInnerMid;
336
337 ierr = VecToArrayOnRank0(field_vec, &Ng, &nat_d); CHKERRQ(ierr);
338
339 if (nat_d) {
340 const PetscScalar *nar = (const PetscScalar*)nat_d;
341 const char *base = pps->output_prefix;
342 char fn[512], fnc[512];
343 snprintf(fn, sizeof(fn), "%s_%05" PetscInt_FMT "_iMid.csv", base, ti);
344 snprintf(fnc, sizeof(fnc), "%s_%05" PetscInt_FMT "_iMid_compare.csv", base, ti);
345
346 FILE *fp = fopen(fn, "w");
347 FILE *fpc = fopen(fnc, "w");
348 if (fp) fprintf(fp, "jInner,kInner,Ux,Uy,Uz\n");
349 if (fpc) fprintf(fpc, "jInner,kInner,Ux_AoS,Ux_NAT,abs_diff\n");
350
351 double maxAbsDiff = 0.0, sumAbs = 0.0;
352 PetscInt count = 0;
353
354 for (PetscInt kInner = 0; kInner < mz; ++kInner) {
355 const PetscInt k = kInner;
356 for (PetscInt jInner = 0; jInner < my; ++jInner) {
357 const PetscInt j = jInner;
358
359 // AoS tuple index
360 const PetscInt t = iInnerMid + mx * (jInner + my * kInner);
361 const PetscScalar ux = a[3*t+0], uy = a[3*t+1], uz = a[3*t+2];
362
363 if (fp) fprintf(fp, "%d,%d,%.15e,%.15e,%.15e\n",
364 (int)jInner,(int)kInner,(double)ux,(double)uy,(double)uz);
365
366 // NATURAL base for (iGlob,j,k)
367 const PetscInt baseNat = 3 * (((k)*N + j)*M + iGlob);
368 const PetscScalar uxN = nar[baseNat + 0];
369
370 const double diff = fabs((double)ux - (double)uxN);
371 if (diff > maxAbsDiff) maxAbsDiff = diff;
372 sumAbs += diff; ++count;
373
374 if (fpc) fprintf(fpc, "%d,%d,%.15e,%.15e,%.15e\n",
375 (int)jInner,(int)kInner,(double)ux,(double)uxN,diff);
376 } // for jInner
377 } // for kInner
378 if (fp) fclose(fp);
379 if (fpc) fclose(fpc);
380
381 if (count > 0) {
382 const double meanAbs = sumAbs / (double)count;
383 LOG_ALLOW(GLOBAL, LOG_INFO,
384 "PETSc-Vec vs AoS (i-mid, Ux): max|Δ|=%.6e, mean|Δ|=%.6e -> CSV: %s\n",
385 maxAbsDiff, meanAbs, fnc);
386 LOG_ALLOW(GLOBAL, LOG_INFO, "Wrote i-mid plane CSV: %s\n", fn);
387 } // if count>0
388
389 ierr = PetscFree(nat_d); CHKERRQ(ierr);
390
391
392 } // if nat_d
393
394 } // if a && num_components==3 && npts>0
395 } // if Ucat_nodal
396 // --- END DEBUG BLOCK (Ucat_nodal) ---
397 */
398
399 meta.num_point_data_fields++; /* count the main field we just added */
400 field_name = strtok(NULL, ",");
401 }
402
403 ierr = PetscFree(fields_copy); CHKERRQ(ierr);
404
405 // --- DEBUG: Add sanity fields i_idx, j_idx, k_idx and x_pos, y_pos, z_pos ---
406 // These are the logical indices and physical coordinates of each point in the subsampled grid.
407 // They can be used to verify the grid structure and orientation in visualization tools.
408 // They are added as scalar fields with names "i_idx", "j_idx", "k_idx" and "x_pos", "y_pos", "z_pos".
409 // Note: these are only added if there is room in the MAX_POINT_DATA_FIELDS limit.
410 // They are allocated and owned here, and will be freed below.
411 // They are in the same linearization order as meta.coords (AoS x,y,z by point).
412 /*
413 // Append sanity fields i/j/k indices and coordinates
414
415 // Build i/j/k and x/y/z (length = npoints); these match the same linearization as coords
416 const PetscInt n = meta.npoints;
417 PetscScalar *i_idx=NULL,*j_idx=NULL,*k_idx=NULL,*x_pos=NULL,*y_pos=NULL,*z_pos=NULL;
418
419 ierr = PetscMalloc1(n, &i_idx); CHKERRQ(ierr);
420 ierr = PetscMalloc1(n, &j_idx); CHKERRQ(ierr);
421 ierr = PetscMalloc1(n, &k_idx); CHKERRQ(ierr);
422 ierr = PetscMalloc1(n, &x_pos); CHKERRQ(ierr);
423 ierr = PetscMalloc1(n, &y_pos); CHKERRQ(ierr);
424 ierr = PetscMalloc1(n, &z_pos); CHKERRQ(ierr);
425
426 // coords is length 3*n, AoS: (x,y,z) by point
427 const PetscScalar *c = (const PetscScalar*)meta.coords;
428 for (PetscInt k = 0; k < meta.mz; ++k) {
429 for (PetscInt j = 0; j < meta.my; ++j) {
430 for (PetscInt i = 0; i < meta.mx; ++i) {
431 const PetscInt t = i + meta.mx * (j + meta.my * k);
432 i_idx[t] = (PetscScalar)i;
433 j_idx[t] = (PetscScalar)j;
434 k_idx[t] = (PetscScalar)k;
435 x_pos[t] = c[3*t+0];
436 y_pos[t] = c[3*t+1];
437 z_pos[t] = c[3*t+2];
438 }
439 }
440 }
441
442 const char *nf[6] = {"i_idx","j_idx","k_idx","x_pos","y_pos","z_pos"};
443 PetscScalar *arrs[6] = {i_idx,j_idx,k_idx,x_pos,y_pos,z_pos};
444 for (int s=0; s<6; ++s) {
445 if (meta.num_point_data_fields < MAX_POINT_DATA_FIELDS) {
446 VTKFieldInfo *f = &meta.point_data_fields[meta.num_point_data_fields++];
447 strncpy(f->name, nf[s], MAX_VTK_FIELD_NAME_LENGTH-1);
448 f->name[MAX_VTK_FIELD_NAME_LENGTH-1] = '\0';
449 f->num_components = 1;
450 f->data = arrs[s];
451 } else {
452 LOG_ALLOW(GLOBAL, LOG_WARNING, "Sanity field '%s' dropped: MAX_POINT_DATA_FIELDS reached.\n", nf[s]);
453 PetscFree(arrs[s]);
454 }
455 }
456 LOG_ALLOW(GLOBAL, LOG_INFO, "DBG: Added sanity fields i_idx/j_idx/k_idx and x_pos/y_pos/z_pos.\n");
457 */
458 // --- END DEBUG BLOCK (sanity fields) ---
459
460 /* Field summary */
461 LOG_ALLOW(GLOBAL, LOG_INFO, "PointData fields to write: %d\n", (int)meta.num_point_data_fields);
462 for (PetscInt ii=0; ii<meta.num_point_data_fields; ++ii) {
463 LOG_ALLOW(GLOBAL, LOG_INFO, " # %2" PetscInt_FMT " Field Name = %s Components = %d\n",
464 ii, meta.point_data_fields[ii].name, (int)meta.point_data_fields[ii].num_components);
465 }
466 } // if rank 0
467
468 /* 4) Write the VTS */
469 ierr = PetscSNPrintf(filename, sizeof(filename), "%s_%05" PetscInt_FMT ".vts", pps->output_prefix, ti); CHKERRQ(ierr);
470 ierr = CreateVTKFileFromMetadata(filename, &meta, PETSC_COMM_WORLD); CHKERRQ(ierr);
471
472 LOG_ALLOW(GLOBAL, LOG_INFO, "--- Eulerian File Writing for ti = %" PetscInt_FMT " Complete ---\n", ti);
474 PetscFunctionReturn(0);
475}
PetscInt CreateVTKFileFromMetadata(const char *filename, const VTKMetaData *meta, MPI_Comm comm)
Creates a VTK file from prepared metadata and field payloads.
Definition vtk_io.c:153
Vec P_nodal
Definition variables.h:887
#define MAX_POINT_DATA_FIELDS
Defines the maximum number of data fields for VTK point data.
Definition variables.h:547
PetscInt npoints
Definition variables.h:604
PetscInt num_components
Definition variables.h:591
PetscMPIInt rank
Definition variables.h:646
PetscInt num_point_data_fields
Definition variables.h:607
SimCtx * simCtx
Back-pointer to the master simulation context.
Definition variables.h:814
char output_prefix[256]
Definition variables.h:567
Vec Ucat_nodal
Definition variables.h:888
PetscInt np
Definition variables.h:739
#define MAX_FILENAME_LENGTH
Definition variables.h:545
Vec Qcrit
Definition variables.h:889
VTKFileType fileType
Definition variables.h:602
char output_fields_instantaneous[1024]
Definition variables.h:565
PetscScalar * data
Definition variables.h:592
char name[64]
Definition variables.h:590
PetscScalar * coords
Definition variables.h:605
VTKFieldInfo point_data_fields[20]
Definition variables.h:606
Vec Psi_nodal
Definition variables.h:890
PetscInt mz
Definition variables.h:603
PetscInt my
Definition variables.h:603
PetscInt mx
Definition variables.h:603
#define MAX_VTK_FIELD_NAME_LENGTH
Maximum length for VTK field names.
Definition variables.h:548
@ VTK_STRUCTURED
Definition variables.h:597
Stores all necessary information for a single data array in a VTK file.
Definition variables.h:589
PetscErrorCode PrepareOutputEulerianFieldData(UserCtx *user, Vec field_vec, PetscInt num_components, PetscScalar **out_data)
Creates a C array of field data corresponding to a subsampled (legacy-style) grid.
Definition vtk_io.c:313
PetscErrorCode PrepareOutputCoordinates(UserCtx *user, PetscScalar **out_coords, PetscInt *out_nx, PetscInt *out_ny, PetscInt *out_nz, PetscInt *out_npoints)
Creates a C array of coordinates corresponding to a subsampled (legacy-style) grid.
Definition vtk_io.c:245
Here is the call graph for this function:
Here is the caller graph for this function:

◆ EulerianDataProcessingPipeline()

PetscErrorCode EulerianDataProcessingPipeline ( UserCtx user,
PostProcessParams pps 
)

Parses the processing pipeline string and executes the requested kernels.

Parameters
userThe UserCtx containing the data to be transformed.
ppsThe PostProcessParams containing the pipeline string.
Returns
PetscErrorCode

Parses the processing pipeline string and executes the requested kernels.

Full API contract (arguments, ownership, side effects) is documented with the header declaration in include/postprocessor.h.

See also
EulerianDataProcessingPipeline()

Definition at line 104 of file postprocessor.c.

105{
106 PetscErrorCode ierr;
107 char *pipeline_copy, *step_token, *step_saveptr;
108
109 PetscFunctionBeginUser;
111 LOG_ALLOW(GLOBAL, LOG_INFO, "--- Starting Data Transformation Pipeline ---\n");
112
113 // Do nothing if the pipeline string is empty
114 if (pps->process_pipeline[0] == '\0') {
115 LOG_ALLOW(GLOBAL, LOG_INFO, "Processing pipeline is empty. No transformations will be run.\n");
116 PetscFunctionReturn(0);
117 }
118
119 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Pipeline string: [%s]\n", pps->process_pipeline);
120
121 // Make a writable copy for strtok_r, as it modifies the string
122 ierr = PetscStrallocpy(pps->process_pipeline, &pipeline_copy); CHKERRQ(ierr);
123
124 // --- Outer Loop: Tokenize by Semicolon (;) to get each processing step ---
125 step_token = strtok_r(pipeline_copy, ";", &step_saveptr);
126 while (step_token) {
127 TrimWhitespace(step_token);
128 if (strlen(step_token) == 0) {
129 step_token = strtok_r(NULL, ";", &step_saveptr);
130 continue;
131 }
132
133 char *keyword = strtok(step_token, ":");
134 char *args_str = strtok(NULL, ""); // Get the rest of the string as arguments
135
136 if (!keyword) { // Should not happen with TrimWhitespace, but is a safe check
137 step_token = strtok_r(NULL, ";", &step_saveptr);
138 continue;
139 }
140
141 TrimWhitespace(keyword);
142 if (args_str) TrimWhitespace(args_str);
143
144 LOG_ALLOW(GLOBAL, LOG_INFO, "Executing Transformation: '%s' on args: '%s'\n", keyword, args_str ? args_str : "None");
145
146 // --- DISPATCHER: Route to the correct kernel based on the keyword ---
147 if (strcasecmp(keyword, "CellToNodeAverage") == 0) {
148 if (!args_str) SETERRQ(PETSC_COMM_SELF, 1, "CellToNodeAverage requires arguments in 'in_field>out_field' format.");
149 char *in_field = strtok(args_str, ">");
150 char *out_field = strtok(NULL, ">");
151 if (!in_field || !out_field) SETERRQ(PETSC_COMM_SELF, 1, "CellToNodeAverage requires 'in>out' syntax (e.g., P>P_nodal).");
152 if(strcmp(in_field,out_field)==0) SETERRQ(PETSC_COMM_SELF, 1, "CellToNodeAverage input and output fields must be different.");
153 if(user->simCtx->np == 0 && (strcmp(out_field,"Psi_nodal")==0 || strcmp(in_field,"Psi_nodal")==0)){
154 LOG(GLOBAL,LOG_WARNING,"CellToNodeAverage cannot process 'Psi_nodal' when no particles are present in the simulation.\n");
155 step_token = strtok_r(NULL, ";", &step_saveptr);
156 continue;
157 }
158 TrimWhitespace(in_field); TrimWhitespace(out_field);
159 ierr = ComputeNodalAverage(user, in_field, out_field); CHKERRQ(ierr);
160 }
161 else if (strcasecmp(keyword, "ComputeQCriterion") == 0) {
162 ierr = ComputeQCriterion(user); CHKERRQ(ierr);
163 }
164 else if (strcasecmp(keyword, "NormalizeRelativeField") == 0) {
165 if (!args_str) SETERRQ(PETSC_COMM_SELF, 1, "NormalizePressure requires the pressure field name (e.g., 'P') as an argument.");
166 ierr = NormalizeRelativeField(user, args_str); CHKERRQ(ierr);
167 }
168 // *** Add new kernels here in the future using 'else if' ***
169 // else if (strcasecmp(keyword, "ComputeVorticity") == 0) { ... }
170 else {
171 LOG_ALLOW(GLOBAL, LOG_WARNING, "Unknown transformation keyword '%s'. Skipping.\n", keyword);
172 }
173
174 step_token = strtok_r(NULL, ";", &step_saveptr);
175 }
176
177 ierr = PetscFree(pipeline_copy); CHKERRQ(ierr);
178 LOG_ALLOW(GLOBAL, LOG_INFO, "--- Data Transformation Pipeline Complete ---\n");
180 PetscFunctionReturn(0);
181}
#define LOG(scope, level, fmt,...)
Logging macro for PETSc-based applications with scope control.
Definition logging.h:83
PetscErrorCode ComputeQCriterion(UserCtx *user)
Computes the Q-criterion diagnostic from the local velocity-gradient tensor.
PetscErrorCode NormalizeRelativeField(UserCtx *user, const char *relative_field_name)
Normalizes a relative scalar field using the configured reference pressure scale.
PetscErrorCode ComputeNodalAverage(UserCtx *user, const char *in_field_name, const char *out_field_name)
Interpolates a cell-centered field to nodal locations using local stencil averaging.
char process_pipeline[1024]
Definition variables.h:564
Here is the call graph for this function:
Here is the caller graph for this function:

◆ ParticleDataProcessingPipeline()

PetscErrorCode ParticleDataProcessingPipeline ( UserCtx user,
PostProcessParams pps 
)

Parses and executes the particle pipeline using a robust two-pass approach.

This function ensures correctness and efficiency by separating field registration from kernel execution.

PASS 1 (Registration): The pipeline string is parsed to identify all new fields that will be created. These fields are registered with the DMSwarm.

Finalize: After Pass 1, DMSwarmFinalizeFieldRegister is called exactly once if any new fields were added, preparing the swarm's memory layout.

PASS 2 (Execution): The pipeline string is parsed again, and this time the actual compute kernels are executed, filling the now-valid fields.

Parameters
userThe UserCtx containing the DMSwarm.
ppsThe PostProcessParams struct containing the particle_pipeline string.
Returns
PetscErrorCode

Parses and executes the particle pipeline using a robust two-pass approach.

Full API contract (arguments, ownership, side effects) is documented with the header declaration in include/postprocessor.h.

See also
ParticleDataProcessingPipeline()

Definition at line 486 of file postprocessor.c.

487{
488 PetscErrorCode ierr;
489 char *pipeline_copy, *step_token, *step_saveptr;
490
491 PetscFunctionBeginUser;
492
494
495 if (pps->particle_pipeline[0] == '\0') {
496 PetscFunctionReturn(0);
497 }
498
499 // --- Timestep Setup: Synchronize post_swarm size ---
500 PetscInt n_global_source;
501 ierr = DMSwarmGetSize(user->swarm, &n_global_source); CHKERRQ(ierr);
502
503 // Resize post_swarm to match source swarm
504 ierr = ResizeSwarmGlobally(user->post_swarm, n_global_source); CHKERRQ(ierr);
505
506 LOG_ALLOW(GLOBAL, LOG_INFO, "--- Starting Particle Data Transformation Pipeline ---\n");
507 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Particle Pipeline string: [%s]\n", pps->particle_pipeline);
508
509 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Executing compute kernels...\n");
510 ierr = PetscStrallocpy(pps->particle_pipeline, &pipeline_copy); CHKERRQ(ierr);
511 step_token = strtok_r(pipeline_copy, ";", &step_saveptr);
512 while (step_token) {
513 TrimWhitespace(step_token);
514 if (strlen(step_token) == 0) { step_token = strtok_r(NULL, ";", &step_saveptr); continue; }
515
516 char *keyword = strtok(step_token, ":");
517 char *args_str = strtok(NULL, "");
518 TrimWhitespace(keyword);
519 if (args_str) TrimWhitespace(args_str);
520
521 LOG_ALLOW(GLOBAL, LOG_INFO, "Executing Particle Transformation: '%s' on args: '%s'\n", keyword, args_str ? args_str : "None");
522
523 if (strcasecmp(keyword, "ComputeSpecificKE") == 0) {
524 if (!args_str) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "ComputeSpecificKE requires 'input_field>output_field' arguments.");
525 char *velocity_field = strtok(args_str, ">");
526 char *ske_field = strtok(NULL, ">");
527 if (!velocity_field || !ske_field) {
528 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "ComputeSpecificKE requires 'input_field>output_field' arguments.");
529 }
530 TrimWhitespace(velocity_field); TrimWhitespace(ske_field);
531 if (strlen(velocity_field) == 0 || strlen(ske_field) == 0) {
532 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "ComputeSpecificKE does not allow empty input/output field names.");
533 }
534
535 ierr = ComputeSpecificKE(user, velocity_field, ske_field); CHKERRQ(ierr);
536 }
537 else {
538 LOG_ALLOW(GLOBAL, LOG_WARNING, "Unknown particle transformation keyword '%s'. Skipping.\n", keyword);
539 }
540
541 step_token = strtok_r(NULL, ";", &step_saveptr);
542 }
543 ierr = PetscFree(pipeline_copy); CHKERRQ(ierr);
544
545 LOG_ALLOW(GLOBAL, LOG_INFO, "--- Particle Data Transformation Pipeline Complete ---\n");
546
548 PetscFunctionReturn(0);
549}
PetscErrorCode ResizeSwarmGlobally(DM swarm, PetscInt N_target)
Resizes a swarm collectively to a target global particle count.
PetscErrorCode ComputeSpecificKE(UserCtx *user, const char *velocity_field, const char *ske_field)
Computes the specific kinetic energy (KE per unit mass) for each particle.
Here is the call graph for this function:
Here is the caller graph for this function:

◆ WriteParticleFile()

PetscErrorCode WriteParticleFile ( UserCtx user,
PostProcessParams pps,
PetscInt  ti 
)

Writes particle data to a VTP file using the Prepare-Write-Cleanup pattern.

Parameters
userPrimary UserCtx input for the operation.
ppsPost-processing configuration for the operation.
tiParameter ti passed to WriteParticleFile().
Returns
PetscErrorCode 0 on success.

Writes particle data to a VTP file using the Prepare-Write-Cleanup pattern.

Full API contract (arguments, ownership, side effects) is documented with the header declaration in include/postprocessor.h.

See also
WriteParticleFile()

Definition at line 606 of file postprocessor.c.

607{
608 PetscErrorCode ierr;
609 VTKMetaData part_meta;
610 char filename[MAX_FILENAME_LENGTH];
611 PetscInt n_total_particles_before_subsample;
612
613 PetscFunctionBeginUser;
615
616 // These checks can be done on all ranks
617 if (!pps->outputParticles || pps->particle_fields[0] == '\0') {
618 PetscFunctionReturn(0);
619 }
620 PetscInt n_global;
621 ierr = DMSwarmGetSize(user->swarm, &n_global); CHKERRQ(ierr);
622 if (n_global == 0) {
623 LOG_ALLOW(GLOBAL, LOG_DEBUG, "Swarm is empty for ti=%" PetscInt_FMT ". Skipping particle file write.\n", ti);
624 PetscFunctionReturn(0);
625 }
626
627 ierr = PetscMemzero(&part_meta, sizeof(VTKMetaData)); CHKERRQ(ierr);
628
629 // --- 1. PREPARE (Collective Call) ---
630 ierr = PrepareOutputParticleData(user, pps, &part_meta, &n_total_particles_before_subsample); CHKERRQ(ierr);
631
632 // --- 2. WRITE and CLEANUP (Rank 0 only) ---
633 if (user->simCtx->rank == 0) {
634 if (part_meta.npoints > 0) {
635 LOG_ALLOW(GLOBAL, LOG_INFO, "--- Starting VTP Particle File Writing for ti = %" PetscInt_FMT " (writing %" PetscInt_FMT " of %" PetscInt_FMT " particles) ---\n",
636 ti, part_meta.npoints, n_total_particles_before_subsample);
637
638 /* Field summary */
639 LOG_ALLOW(GLOBAL, LOG_INFO, "Particle Data fields to write: %d\n", (int)part_meta.num_point_data_fields);
640 for (PetscInt ii=0; ii<part_meta.num_point_data_fields; ++ii) {
641 LOG_ALLOW(GLOBAL, LOG_INFO, " # %2" PetscInt_FMT " Field Name = %s Components = %d\n",
642 ii, part_meta.point_data_fields[ii].name, (int)part_meta.point_data_fields[ii].num_components);
643 }
644
645 ierr = PetscSNPrintf(filename, sizeof(filename), "%s_%05" PetscInt_FMT ".vtp", pps->particle_output_prefix, ti); CHKERRQ(ierr);
646 ierr = CreateVTKFileFromMetadata(filename, &part_meta, PETSC_COMM_WORLD); CHKERRQ(ierr);
647
648 } else {
649 LOG_ALLOW(GLOBAL, LOG_DEBUG, "No particles to write at ti=%" PetscInt_FMT " after subsampling. Skipping.\n", ti);
650 }
651
652 }
653
654 LOG_ALLOW(GLOBAL, LOG_INFO, "--- Particle File Writing for ti = %" PetscInt_FMT " Complete ---\n", ti);
656 PetscFunctionReturn(0);
657}
char particle_output_prefix[256]
Definition variables.h:570
char particle_fields[1024]
Definition variables.h:569
PetscBool outputParticles
Definition variables.h:561
PetscErrorCode PrepareOutputParticleData(UserCtx *user, PostProcessParams *pps, VTKMetaData *meta, PetscInt *p_n_total)
Gathers, subsamples, and prepares all particle data for VTK output.
Definition vtk_io.c:422
Here is the call graph for this function:
Here is the caller graph for this function:

◆ GlobalStatisticsPipeline()

PetscErrorCode GlobalStatisticsPipeline ( UserCtx user,
PostProcessParams pps,
PetscInt  ti 
)

Executes the global statistics pipeline, computing aggregate reductions over all particles.

Parses the semicolon-delimited pps->statistics_pipeline string and dispatches to the appropriate kernel (e.g. ComputeParticleMSD). Each kernel appends one row to its own CSV file and logs a summary via LOG_INFO. All MPI reductions happen inside each kernel. This pipeline is independent of the per-particle VTK pipeline; it produces no .vtp output.

Parameters
userThe UserCtx containing the primary DMSwarm (user->swarm).
ppsThe PostProcessParams containing statistics_pipeline and statistics_output_prefix.
tiCurrent time-step index (passed through to kernels for time computation).
Returns
PetscErrorCode

Executes the global statistics pipeline, computing aggregate reductions over all particles.

Local to this translation unit.

Definition at line 557 of file postprocessor.c.

558{
559 PetscErrorCode ierr;
560 char *pipeline_copy, *step_token, *step_saveptr;
561
562 PetscFunctionBeginUser;
563
564 if (pps->statistics_pipeline[0] == '\0') PetscFunctionReturn(0);
565
566 PetscInt n_global;
567 ierr = DMSwarmGetSize(user->swarm, &n_global); CHKERRQ(ierr);
568 if (n_global == 0) PetscFunctionReturn(0);
569
570 LOG_ALLOW(GLOBAL, LOG_INFO, "--- Starting Global Statistics Pipeline ---\n");
571
572 ierr = PetscStrallocpy(pps->statistics_pipeline, &pipeline_copy); CHKERRQ(ierr);
573 step_token = strtok_r(pipeline_copy, ";", &step_saveptr);
574 while (step_token) {
575 TrimWhitespace(step_token);
576 if (strlen(step_token) == 0) {
577 step_token = strtok_r(NULL, ";", &step_saveptr); continue;
578 }
579 char *keyword = strtok(step_token, ":");
580 TrimWhitespace(keyword);
581
582 if (strcasecmp(keyword, "ComputeMSD") == 0) {
583 ierr = ComputeParticleMSD(user, pps->statistics_output_prefix, ti); CHKERRQ(ierr);
584 } else {
586 "Unknown statistics keyword '%s'. Skipping.\n", keyword);
587 }
588 /* Additional kernels should add else-if branches here when implemented. */
589
590 step_token = strtok_r(NULL, ";", &step_saveptr);
591 }
592 ierr = PetscFree(pipeline_copy); CHKERRQ(ierr);
593
594 LOG_ALLOW(GLOBAL, LOG_INFO, "--- Global Statistics Pipeline Complete ---\n");
595 PetscFunctionReturn(0);
596}
PetscErrorCode ComputeParticleMSD(UserCtx *user, const char *stats_prefix, PetscInt ti)
Computes the mean-squared displacement (MSD) of a particle cloud.
char statistics_output_prefix[256]
basename for CSV output, e.g.
Definition variables.h:575
char statistics_pipeline[1024]
e.g.
Definition variables.h:574
Here is the call graph for this function:
Here is the caller graph for this function: