Actual source code: ex103.c

  1: static char help[] = "Test CGNS parallel load-save-reload cycle, including data and DMLabels\n\n";
  2: // This is a modification of src/dm/impls/plex/tutorials/ex15.c, but with additional tests that don't make sense for a tutorial problem (such as verify FaceLabels)

  4: #include <petscdmlabel.h>
  5: #include <petscdmplex.h>
  6: #include <petscsf.h>
  7: #include <petscviewerhdf5.h>
  8: #define EX "ex103.c"

 10: typedef struct {
 11:   char      infile[PETSC_MAX_PATH_LEN];  /* Input mesh filename */
 12:   char      outfile[PETSC_MAX_PATH_LEN]; /* Dump/reload mesh filename */
 13:   PetscBool heterogeneous;               /* Test save on N / load on M */
 14:   PetscInt  ntimes;                      /* How many times do the cycle */
 15: } AppCtx;

 17: static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
 18: {
 19:   PetscBool flg;

 21:   PetscFunctionBeginUser;
 22:   options->infile[0]     = '\0';
 23:   options->outfile[0]    = '\0';
 24:   options->heterogeneous = PETSC_FALSE;
 25:   options->ntimes        = 2;
 26:   PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");
 27:   PetscCall(PetscOptionsString("-infile", "The input CGNS file", EX, options->infile, options->infile, sizeof(options->infile), &flg));
 28:   PetscCall(PetscOptionsString("-outfile", "The output CGNS file", EX, options->outfile, options->outfile, sizeof(options->outfile), &flg));
 29:   PetscCall(PetscOptionsBool("-heterogeneous", "Test save on N / load on M", EX, options->heterogeneous, &options->heterogeneous, NULL));
 30:   PetscCall(PetscOptionsInt("-ntimes", "How many times do the cycle", EX, options->ntimes, &options->ntimes, NULL));
 31:   PetscOptionsEnd();
 32:   PetscCheck(flg, comm, PETSC_ERR_USER_INPUT, "-infile needs to be specified");
 33:   PetscCheck(flg, comm, PETSC_ERR_USER_INPUT, "-outfile needs to be specified");
 34:   PetscFunctionReturn(PETSC_SUCCESS);
 35: }

 37: // @brief Create DM from CGNS file and setup PetscFE to VecLoad solution from that file
 38: PetscErrorCode ReadCGNSDM(MPI_Comm comm, const char filename[], DM *dm)
 39: {
 40:   PetscInt degree;

 42:   PetscFunctionBeginUser;
 43:   PetscCall(DMPlexCreateFromFile(comm, filename, "ex15_plex", PETSC_TRUE, dm));
 44:   PetscCall(DMPlexDistributeSetDefault(*dm, PETSC_FALSE));
 45:   PetscCall(DMSetFromOptions(*dm));
 46:   PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));

 48:   /* Redistribute */
 49:   PetscCall(DMSetOptionsPrefix(*dm, "redistributed_"));
 50:   PetscCall(DMSetFromOptions(*dm));
 51:   PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));

 53:   { // Get degree of the natural section
 54:     PetscFE        fe_natural;
 55:     PetscDualSpace dual_space_natural;

 57:     PetscCall(DMGetField(*dm, 0, NULL, (PetscObject *)&fe_natural));
 58:     PetscCall(PetscFEGetDualSpace(fe_natural, &dual_space_natural));
 59:     PetscCall(PetscDualSpaceGetOrder(dual_space_natural, &degree));
 60:     PetscCall(DMClearFields(*dm));
 61:     PetscCall(DMSetLocalSection(*dm, NULL));
 62:   }

 64:   { // Setup fe to load in the initial condition data
 65:     PetscFE  fe;
 66:     PetscInt dim;

 68:     PetscCall(DMGetDimension(*dm, &dim));
 69:     PetscCall(PetscFECreateLagrange(PETSC_COMM_SELF, dim, 5, PETSC_FALSE, degree, PETSC_DETERMINE, &fe));
 70:     PetscCall(PetscObjectSetName((PetscObject)fe, "FE for VecLoad"));
 71:     PetscCall(DMAddField(*dm, NULL, (PetscObject)fe));
 72:     PetscCall(DMCreateDS(*dm));
 73:     PetscCall(PetscFEDestroy(&fe));
 74:   }

 76:   // Set section component names, used when writing out CGNS files
 77:   PetscSection section;
 78:   PetscCall(DMGetLocalSection(*dm, &section));
 79:   PetscCall(PetscSectionSetFieldName(section, 0, ""));
 80:   PetscCall(PetscSectionSetComponentName(section, 0, 0, "Pressure"));
 81:   PetscCall(PetscSectionSetComponentName(section, 0, 1, "VelocityX"));
 82:   PetscCall(PetscSectionSetComponentName(section, 0, 2, "VelocityY"));
 83:   PetscCall(PetscSectionSetComponentName(section, 0, 3, "VelocityZ"));
 84:   PetscCall(PetscSectionSetComponentName(section, 0, 4, "Temperature"));
 85:   PetscFunctionReturn(PETSC_SUCCESS);
 86: }

 88: // Verify that V_load is equivalent to V_serial, even if V_load is distributed
 89: PetscErrorCode VerifyLoadedSolution(DM dm_serial, Vec V_serial, DM dm_load, Vec V_load, PetscScalar tol)
 90: {
 91:   MPI_Comm     comm = PetscObjectComm((PetscObject)dm_load);
 92:   PetscSF      load_to_serial_sf_;
 93:   PetscScalar *array_load_bcast = NULL;
 94:   PetscInt     num_comps        = 5;

 96:   PetscFunctionBeginUser;
 97:   { // Create SF to broadcast loaded vec nodes to serial vec nodes
 98:     PetscInt           dim, num_local_serial = 0, num_local_load;
 99:     Vec                coord_Vec_serial, coord_Vec_load;
100:     const PetscScalar *coord_serial = NULL, *coord_load;

102:     PetscCall(DMGetCoordinateDim(dm_load, &dim));
103:     PetscCall(DMGetCoordinates(dm_load, &coord_Vec_load));
104:     PetscCall(VecGetLocalSize(coord_Vec_load, &num_local_load));
105:     num_local_load /= dim;

107:     PetscCall(VecGetArrayRead(coord_Vec_load, &coord_load));

109:     if (dm_serial) {
110:       PetscCall(DMGetCoordinates(dm_serial, &coord_Vec_serial));
111:       PetscCall(VecGetLocalSize(coord_Vec_serial, &num_local_serial));
112:       num_local_serial /= dim;
113:       PetscCall(VecGetArrayRead(coord_Vec_serial, &coord_serial));
114:     }

116:     PetscCall(PetscSFCreate(comm, &load_to_serial_sf_));
117:     PetscCall(PetscSFSetGraphFromCoordinates(load_to_serial_sf_, num_local_load, num_local_serial, dim, 100 * PETSC_MACHINE_EPSILON, coord_load, coord_serial));
118:     PetscCall(PetscSFViewFromOptions(load_to_serial_sf_, NULL, "-verify_sf_view"));

120:     PetscCall(VecRestoreArrayRead(coord_Vec_load, &coord_load));
121:     if (dm_serial) PetscCall(VecRestoreArrayRead(coord_Vec_serial, &coord_serial));
122:   }

124:   { // Broadcast the loaded vector data into a serial-sized array
125:     PetscInt           size_local_serial = 0;
126:     const PetscScalar *array_load;
127:     MPI_Datatype       unit;

129:     PetscCall(VecGetArrayRead(V_load, &array_load));
130:     if (V_serial) {
131:       PetscCall(VecGetLocalSize(V_serial, &size_local_serial));
132:       PetscCall(PetscMalloc1(size_local_serial, &array_load_bcast));
133:     }

135:     PetscCallMPI(MPI_Type_contiguous(num_comps, MPIU_REAL, &unit));
136:     PetscCallMPI(MPI_Type_commit(&unit));
137:     PetscCall(PetscSFBcastBegin(load_to_serial_sf_, unit, array_load, array_load_bcast, MPI_REPLACE));
138:     PetscCall(PetscSFBcastEnd(load_to_serial_sf_, unit, array_load, array_load_bcast, MPI_REPLACE));
139:     PetscCallMPI(MPI_Type_free(&unit));
140:     PetscCall(VecRestoreArrayRead(V_load, &array_load));
141:   }

143:   if (V_serial) {
144:     const PetscScalar *array_serial;
145:     PetscInt           size_local_serial;

147:     PetscCall(VecGetArrayRead(V_serial, &array_serial));
148:     PetscCall(VecGetLocalSize(V_serial, &size_local_serial));
149:     for (PetscInt i = 0; i < size_local_serial; i++) {
150:       if (PetscAbs(array_serial[i] - array_load_bcast[i]) > tol) PetscCall(PetscPrintf(comm, "DoF %" PetscInt_FMT " is inconsistent. Found %g, expected %g\n", i, (double)array_load_bcast[i], (double)array_serial[i]));
151:     }
152:     PetscCall(VecRestoreArrayRead(V_serial, &array_serial));
153:   }

155:   PetscCall(PetscFree(array_load_bcast));
156:   PetscCall(PetscSFDestroy(&load_to_serial_sf_));
157:   PetscFunctionReturn(PETSC_SUCCESS);
158: }

160: // Get centroids associated with every Plex point
161: PetscErrorCode DMPlexGetPointsCentroids(DM dm, PetscReal *points_centroids[])
162: {
163:   PetscInt     coords_dim, pStart, pEnd, depth = 0;
164:   PetscSection coord_section;
165:   Vec          coords_vec;
166:   PetscReal   *points_centroids_;

168:   PetscFunctionBeginUser;
169:   PetscCall(DMGetCoordinateDim(dm, &coords_dim));
170:   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
171:   PetscCall(DMGetCoordinateSection(dm, &coord_section));
172:   PetscCall(DMGetCoordinatesLocal(dm, &coords_vec));

174:   PetscCall(PetscCalloc1((pEnd - pStart) * coords_dim, &points_centroids_));
175:   for (PetscInt p = pStart; p < pEnd; p++) {
176:     PetscScalar *coords = NULL;
177:     PetscInt     coords_size, num_coords;

179:     PetscCall(DMPlexVecGetClosureAtDepth(dm, coord_section, coords_vec, p, depth, &coords_size, &coords));
180:     num_coords = coords_size / coords_dim;
181:     for (PetscInt c = 0; c < num_coords; c++) {
182:       for (PetscInt d = 0; d < coords_dim; d++) points_centroids_[p * coords_dim + d] += PetscRealPart(coords[c * coords_dim + d]);
183:     }
184:     for (PetscInt d = 0; d < coords_dim; d++) points_centroids_[p * coords_dim + d] /= num_coords;
185:     PetscCall(DMPlexVecRestoreClosure(dm, coord_section, coords_vec, p, &coords_size, &coords));
186:   }
187:   *points_centroids = points_centroids_;
188:   PetscFunctionReturn(PETSC_SUCCESS);
189: }

191: // Verify that the DMLabel in dm_serial is identical to that in dm_load
192: PetscErrorCode VerifyDMLabels(DM dm_serial, DM dm_load, const char label_name[], PetscSF *serial2loadPointSF)
193: {
194:   PetscMPIInt rank;
195:   MPI_Comm    comm              = PetscObjectComm((PetscObject)dm_load);
196:   PetscInt    num_values_serial = 0, dim;
197:   PetscInt   *values_serial     = NULL;
198:   DMLabel     label_serial      = NULL, label_load;

200:   PetscFunctionBeginUser;
201:   PetscCall(DMGetCoordinateDim(dm_load, &dim));
202:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
203:   if (dm_serial) { // Communicate valid label values to all ranks
204:     IS              serialValuesIS;
205:     const PetscInt *values_serial_is;

207:     PetscCheck(rank == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Rank with serial DM not rank 0");
208:     PetscCall(DMGetLabel(dm_serial, label_name, &label_serial));
209:     PetscCall(DMLabelGetNonEmptyStratumValuesIS(label_serial, &serialValuesIS));
210:     PetscCall(ISGetLocalSize(serialValuesIS, &num_values_serial));

212:     PetscCall(PetscMalloc1(num_values_serial, &values_serial));
213:     PetscCall(ISGetIndices(serialValuesIS, &values_serial_is));
214:     PetscCall(PetscArraycpy(values_serial, values_serial_is, num_values_serial));
215:     PetscCall(PetscSortInt(num_values_serial, values_serial));
216:     PetscCall(ISRestoreIndices(serialValuesIS, &values_serial_is));
217:     PetscCall(ISDestroy(&serialValuesIS));
218:   }
219:   PetscCallMPI(MPI_Bcast(&num_values_serial, 1, MPIU_INT, 0, comm));
220:   if (values_serial == NULL) PetscCall(PetscMalloc1(num_values_serial, &values_serial));
221:   PetscCallMPI(MPI_Bcast(values_serial, num_values_serial, MPIU_INT, 0, comm));

223:   IS              loadValuesIS;
224:   PetscInt        num_values_global;
225:   const PetscInt *values_global;
226:   PetscBool       are_values_same = PETSC_TRUE;

228:   PetscCall(DMGetLabel(dm_load, label_name, &label_load));
229:   PetscCall(DMLabelGetValueISGlobal(comm, label_load, PETSC_TRUE, &loadValuesIS));
230:   PetscCall(ISSort(loadValuesIS));
231:   PetscCall(ISGetLocalSize(loadValuesIS, &num_values_global));
232:   PetscCall(ISGetIndices(loadValuesIS, &values_global));
233:   if (num_values_serial != num_values_global) {
234:     PetscCall(PetscPrintf(comm, "DMLabel '%s': Number of nonempty values in serial DM (%" PetscInt_FMT ") not equal to number of values in global DM (%" PetscInt_FMT ")\n", label_name, num_values_serial, num_values_global));
235:     are_values_same = PETSC_FALSE;
236:   }
237:   PetscCall(PetscPrintf(comm, "DMLabel '%s': serial values:\n", label_name));
238:   PetscCall(PetscIntView(num_values_serial, values_serial, PETSC_VIEWER_STDOUT_(comm)));
239:   PetscCall(PetscPrintf(comm, "DMLabel '%s': global values:\n", label_name));
240:   PetscCall(PetscIntView(num_values_global, values_global, PETSC_VIEWER_STDOUT_(comm)));
241:   for (PetscInt i = 0; i < num_values_serial; i++) {
242:     PetscInt loc;
243:     PetscCall(PetscFindInt(values_serial[i], num_values_global, values_global, &loc));
244:     if (loc < 0) {
245:       PetscCall(PetscPrintf(comm, "DMLabel '%s': Label value %" PetscInt_FMT " in serial DM not found in global DM\n", label_name, values_serial[i]));
246:       are_values_same = PETSC_FALSE;
247:     }
248:   }
249:   PetscCheck(are_values_same, comm, PETSC_ERR_PLIB, "The values in DMLabel are not the same");
250:   PetscCall(PetscFree(values_serial));

252:   PetscSF  serial2loadPointSF_;
253:   PetscInt pStart, pEnd, pStartSerial = -1, pEndSerial = -2;
254:   PetscInt num_points_load, num_points_serial = 0;
255:   { // Create SF mapping serialDM points to loadDM points
256:     PetscReal *points_centroid_load, *points_centroid_serial = NULL;

258:     if (rank == 0) {
259:       PetscCall(DMPlexGetChart(dm_serial, &pStartSerial, &pEndSerial));
260:       num_points_serial = pEndSerial - pStartSerial;
261:       PetscCall(DMPlexGetPointsCentroids(dm_serial, &points_centroid_serial));
262:     }
263:     PetscCall(DMPlexGetChart(dm_load, &pStart, &pEnd));
264:     num_points_load = pEnd - pStart;
265:     PetscCall(DMPlexGetPointsCentroids(dm_load, &points_centroid_load));

267:     PetscCall(PetscSFCreate(comm, &serial2loadPointSF_));
268:     PetscCall(PetscSFSetGraphFromCoordinates(serial2loadPointSF_, num_points_serial, num_points_load, dim, 100 * PETSC_MACHINE_EPSILON, points_centroid_serial, points_centroid_load));
269:     PetscCall(PetscObjectSetName((PetscObject)serial2loadPointSF_, "Serial To Loaded DM Points SF"));
270:     PetscCall(PetscSFViewFromOptions(serial2loadPointSF_, NULL, "-verify_points_sf_view"));
271:     PetscCall(PetscFree(points_centroid_load));
272:     PetscCall(PetscFree(points_centroid_serial));
273:   }

275:   PetscSection pointSerialSection = NULL;
276:   PetscInt     npointMaskSerial   = 0;
277:   PetscBool   *pointMask, *pointMaskSerial = NULL;

279:   if (rank == 0) {
280:     const PetscInt *root_degree;
281:     PetscCall(PetscSectionCreate(PETSC_COMM_SELF, &pointSerialSection));
282:     PetscCall(PetscSectionSetChart(pointSerialSection, pStartSerial, pEndSerial));
283:     PetscCall(PetscSFComputeDegreeBegin(serial2loadPointSF_, &root_degree));
284:     PetscCall(PetscSFComputeDegreeEnd(serial2loadPointSF_, &root_degree));
285:     for (PetscInt p = 0; p < num_points_serial; p++) PetscCall(PetscSectionSetDof(pointSerialSection, p, root_degree[p]));
286:     PetscCall(PetscSectionSetUp(pointSerialSection));
287:     PetscCall(PetscSectionGetStorageSize(pointSerialSection, &npointMaskSerial));

289:     PetscCall(PetscMalloc1(npointMaskSerial, &pointMaskSerial));
290:   }
291:   PetscCall(PetscMalloc1(num_points_load, &pointMask));

293:   for (PetscInt v = 0; v < num_values_global; v++) {
294:     PetscInt value     = values_global[v];
295:     IS       stratumIS = NULL;

297:     if (pointMaskSerial) PetscCall(PetscArrayzero(pointMaskSerial, npointMaskSerial));
298:     PetscCall(PetscArrayzero(pointMask, num_points_load));
299:     PetscCall(DMLabelGetStratumIS(label_load, value, &stratumIS));
300:     if (stratumIS) {
301:       PetscInt        num_points;
302:       const PetscInt *points;

304:       PetscCall(ISGetLocalSize(stratumIS, &num_points));
305:       PetscCall(ISGetIndices(stratumIS, &points));
306:       for (PetscInt p = 0; p < num_points; p++) pointMask[points[p]] = PETSC_TRUE;
307:       PetscCall(ISRestoreIndices(stratumIS, &points));
308:       PetscCall(ISDestroy(&stratumIS));
309:     }
310:     PetscCall(PetscSFGatherBegin(serial2loadPointSF_, MPI_C_BOOL, pointMask, pointMaskSerial));
311:     PetscCall(PetscSFGatherEnd(serial2loadPointSF_, MPI_C_BOOL, pointMask, pointMaskSerial));

313:     if (rank == 0) {
314:       IS stratumIS = NULL;

316:       PetscCall(DMLabelGetStratumIS(label_serial, value, &stratumIS));
317:       if (stratumIS) {
318:         PetscInt        num_points;
319:         const PetscInt *points;

321:         PetscCall(ISSort(stratumIS));
322:         PetscCall(ISGetLocalSize(stratumIS, &num_points));
323:         PetscCall(ISGetIndices(stratumIS, &points));
324:         for (PetscInt p = 0; p < num_points_serial; p++) {
325:           PetscInt ndof, offset, loc;

327:           PetscCall(PetscSectionGetDof(pointSerialSection, p, &ndof));
328:           PetscCall(PetscSectionGetOffset(pointSerialSection, p, &offset));
329:           PetscCall(PetscFindInt(p, num_points, points, &loc));
330:           PetscBool serial_has_point = loc >= 0;

332:           for (PetscInt d = 0; d < ndof; d++) {
333:             if (serial_has_point != pointMaskSerial[offset + d]) PetscCall(PetscPrintf(comm, "DMLabel '%s': Serial and global DM disagree on point %" PetscInt_FMT " valid for label value %" PetscInt_FMT "\n", label_name, p, value));
334:           }
335:         }
336:         PetscCall(ISRestoreIndices(stratumIS, &points));
337:         PetscCall(ISDestroy(&stratumIS));
338:       }
339:     }
340:   }
341:   if (serial2loadPointSF && !*serial2loadPointSF) *serial2loadPointSF = serial2loadPointSF_;
342:   else PetscCall(PetscSFDestroy(&serial2loadPointSF_));

344:   PetscCall(ISRestoreIndices(loadValuesIS, &values_global));
345:   PetscCall(ISDestroy(&loadValuesIS));
346:   PetscCall(PetscSectionDestroy(&pointSerialSection));
347:   PetscCall(PetscFree(pointMaskSerial));
348:   PetscCall(PetscFree(pointMask));
349:   PetscFunctionReturn(PETSC_SUCCESS);
350: }

352: int main(int argc, char **argv)
353: {
354:   AppCtx      user;
355:   MPI_Comm    comm;
356:   PetscMPIInt gsize, grank, mycolor;
357:   PetscBool   flg;
358:   const char *infilename;
359:   DM          dm_serial = NULL;
360:   Vec         V_serial  = NULL;

362:   PetscFunctionBeginUser;
363:   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
364:   PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user));
365:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &gsize));
366:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &grank));

368:   { // Read infile in serial
369:     PetscViewer viewer;
370:     PetscMPIInt gsize_serial;

372:     mycolor = grank == 0 ? 0 : 1;
373:     PetscCallMPI(MPI_Comm_split(PETSC_COMM_WORLD, mycolor, grank, &comm));

375:     if (grank == 0) {
376:       PetscCallMPI(MPI_Comm_size(comm, &gsize_serial));

378:       PetscCall(ReadCGNSDM(comm, user.infile, &dm_serial));
379:       PetscCall(DMSetOptionsPrefix(dm_serial, "serial_"));
380:       PetscCall(DMSetFromOptions(dm_serial));

382:       /* We just test/demonstrate DM is indeed distributed - unneeded in the application code */
383:       PetscCall(DMPlexIsDistributed(dm_serial, &flg));
384:       PetscCall(PetscPrintf(comm, "Loaded mesh distributed? %s\n", PetscBools[flg]));

386:       PetscCall(DMViewFromOptions(dm_serial, NULL, "-dm_view"));
387:       PetscCall(PetscViewerCGNSOpen(comm, user.infile, FILE_MODE_READ, &viewer));
388:       PetscCall(DMGetGlobalVector(dm_serial, &V_serial));
389:       PetscCall(VecLoad(V_serial, viewer));
390:       PetscCall(PetscViewerDestroy(&viewer));

392:       PetscCallMPI(MPI_Comm_free(&comm));
393:     }
394:     PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD));
395:   }

397:   for (PetscInt i = 0; i < user.ntimes; i++) {
398:     if (i == 0) {
399:       /* Use infile for the initial load */
400:       infilename = user.infile;
401:     } else {
402:       /* Use outfile for all I/O except the very initial load */
403:       infilename = user.outfile;
404:     }

406:     if (user.heterogeneous) {
407:       mycolor = (PetscMPIInt)(grank < (gsize - i) ? 0 : 1);
408:     } else {
409:       mycolor = (PetscMPIInt)0;
410:     }
411:     PetscCallMPI(MPI_Comm_split(PETSC_COMM_WORLD, mycolor, grank, &comm));

413:     if (mycolor == 0) {
414:       /* Load/Save only on processes with mycolor == 0 */
415:       DM          dm;
416:       Vec         V;
417:       PetscViewer viewer;
418:       PetscMPIInt comm_size;
419:       const char *name;
420:       PetscReal   time;
421:       PetscBool   set;

423:       PetscCallMPI(MPI_Comm_size(comm, &comm_size));
424:       PetscCall(PetscPrintf(comm, "Begin cycle %" PetscInt_FMT ", comm size %d\n", i, comm_size));

426:       // Load DM from CGNS file
427:       PetscCall(ReadCGNSDM(comm, infilename, &dm));
428:       PetscCall(DMSetOptionsPrefix(dm, "loaded_"));
429:       PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));

431:       // Load solution from CGNS file
432:       PetscCall(PetscViewerCGNSOpen(comm, infilename, FILE_MODE_READ, &viewer));
433:       PetscCall(DMGetGlobalVector(dm, &V));
434:       PetscCall(PetscViewerCGNSSetSolutionIndex(viewer, 1));
435:       { // Test GetSolutionIndex, not needed in application code
436:         PetscInt solution_index;
437:         PetscCall(PetscViewerCGNSGetSolutionIndex(viewer, &solution_index));
438:         PetscCheck(solution_index == 1, comm, PETSC_ERR_ARG_INCOMP, "Returned solution index wrong.");
439:       }
440:       PetscCall(PetscViewerCGNSGetSolutionName(viewer, &name));
441:       PetscCall(PetscViewerCGNSGetSolutionTime(viewer, &time, &set));
442:       PetscCheck(set, comm, PETSC_ERR_RETURN, "Time data wasn't set!");
443:       PetscCall(PetscPrintf(comm, "Solution Name: %s, and time %g\n", name, time));
444:       PetscCall(VecLoad(V, viewer));
445:       PetscCall(PetscViewerDestroy(&viewer));

447:       // Verify loaded solution against serial solution
448:       PetscCall(VerifyLoadedSolution(dm_serial, V_serial, dm, V, 100 * PETSC_MACHINE_EPSILON));

450:       // Verify DMLabel values against the serial DM
451:       PetscCall(VerifyDMLabels(dm_serial, dm, "Face Sets", NULL));

453:       { // Complete the label so that the writer must sort through non-face points
454:         DMLabel label;
455:         PetscCall(DMGetLabel(dm, "Face Sets", &label));
456:         PetscCall(DMPlexLabelComplete(dm, label));
457:       }

459:       // Write loaded solution to CGNS file
460:       PetscCall(PetscViewerCGNSOpen(comm, user.outfile, FILE_MODE_WRITE, &viewer));
461:       PetscCall(VecView(V, viewer));
462:       PetscCall(PetscViewerDestroy(&viewer));

464:       PetscCall(DMRestoreGlobalVector(dm, &V));
465:       PetscCall(DMDestroy(&dm));
466:       PetscCall(PetscPrintf(comm, "End   cycle %" PetscInt_FMT "\n--------\n", i));
467:     }
468:     PetscCallMPI(MPI_Comm_free(&comm));
469:     PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD));
470:   }

472:   if (V_serial && dm_serial) PetscCall(DMRestoreGlobalVector(dm_serial, &V_serial));
473:   PetscCall(DMDestroy(&dm_serial));
474:   PetscCall(PetscFinalize());
475:   return 0;
476: }

478: /*TEST
479:   build:
480:     requires: cgns

482:   testset:
483:     suffix: cgns_3x3
484:     requires: !complex
485:     nsize: 4
486:     args: -infile ${DATAFILESPATH}/meshes/3x3_Q1.cgns -outfile 3x3_Q1_output.cgns -dm_plex_cgns_parallel -ntimes 3 -heterogeneous -serial_dm_view -loaded_dm_view -redistributed_dm_distribute
487:     test:
488:       # this partitioner should not shuffle anything, it should yield the same partitioning as the XDMF reader - added just for testing
489:       suffix: simple
490:       args: -petscpartitioner_type simple
491:     test:
492:       suffix: parmetis
493:       requires: parmetis
494:       args: -petscpartitioner_type parmetis
495:     test:
496:       suffix: ptscotch
497:       requires: ptscotch
498:       args: -petscpartitioner_type ptscotch

500:   testset:
501:     suffix: cgns_2x2x2
502:     requires: !complex
503:     nsize: 4
504:     args: -infile ${DATAFILESPATH}/meshes/2x2x2_Q3_wave.cgns -outfile 2x2x2_Q3_wave_output.cgns -dm_plex_cgns_parallel -ntimes 3 -heterogeneous -serial_dm_view -loaded_dm_view -redistributed_dm_distribute
505:     test:
506:       # this partitioner should not shuffle anything, it should yield the same partitioning as the XDMF reader - added just for testing
507:       suffix: simple
508:       args: -petscpartitioner_type simple
509:     test:
510:       suffix: parmetis
511:       requires: parmetis
512:       args: -petscpartitioner_type parmetis
513:     test:
514:       suffix: ptscotch
515:       requires: ptscotch
516:       args: -petscpartitioner_type ptscotch

518:   test:
519:     # This file is meant to explicitly have a special case where a partition completely surrounds a boundary element, but does not own it
520:     requires: !complex
521:     suffix: cgns_3x3_2
522:     args: -infile ${DATAFILESPATH}/meshes/3x3_Q1.cgns -outfile 3x3_Q1_output.cgns -dm_plex_cgns_parallel -serial_dm_view -loaded_dm_view -redistributed_dm_distribute -petscpartitioner_type simple
523: TEST*/