Actual source code: grvtk.c
1: #include <petsc/private/dmdaimpl.h>
2: /*
3: Note that the API for using PETSCVIEWERVTK is totally wrong since its use requires
4: including the private vtkvimpl.h file. The code should be refactored.
5: */
6: #include <../src/sys/classes/viewer/impls/vtk/vtkvimpl.h>
8: /* Helper function which determines if any DMDA fields are named. This is used
9: as a proxy for the user's intention to use DMDA fields as distinct
10: scalar-valued fields as opposed to a single vector-valued field */
11: static PetscErrorCode DMDAGetFieldsNamed(DM da, PetscBool *fieldsnamed)
12: {
13: PetscInt f, bs;
15: *fieldsnamed = PETSC_FALSE;
16: DMDAGetDof(da, &bs);
17: for (f = 0; f < bs; ++f) {
18: const char *fieldname;
19: DMDAGetFieldName(da, f, &fieldname);
20: if (fieldname) {
21: *fieldsnamed = PETSC_TRUE;
22: break;
23: }
24: }
25: return 0;
26: }
28: static PetscErrorCode DMDAVTKWriteAll_VTS(DM da, PetscViewer viewer)
29: {
30: const char *byte_order = PetscBinaryBigEndian() ? "BigEndian" : "LittleEndian";
31: #if defined(PETSC_USE_REAL_SINGLE)
32: const char precision[] = "Float32";
33: #elif defined(PETSC_USE_REAL_DOUBLE)
34: const char precision[] = "Float64";
35: #else
36: const char precision[] = "UnknownPrecision";
37: #endif
38: MPI_Comm comm;
39: Vec Coords;
40: PetscViewer_VTK *vtk = (PetscViewer_VTK *)viewer->data;
41: PetscViewerVTKObjectLink link;
42: FILE *fp;
43: PetscMPIInt rank, size, tag;
44: DMDALocalInfo info;
45: PetscInt dim, mx, my, mz, cdim, bs, boffset, maxnnodes, maxbs, i, j, k, r;
46: PetscInt rloc[6], (*grloc)[6] = NULL;
47: PetscScalar *array, *array2;
49: PetscObjectGetComm((PetscObject)da, &comm);
51: MPI_Comm_size(comm, &size);
52: MPI_Comm_rank(comm, &rank);
53: DMDAGetInfo(da, &dim, &mx, &my, &mz, NULL, NULL, NULL, &bs, NULL, NULL, NULL, NULL, NULL);
54: DMDAGetLocalInfo(da, &info);
55: DMGetCoordinates(da, &Coords);
56: if (Coords) {
57: PetscInt csize;
58: VecGetSize(Coords, &csize);
60: cdim = csize / (mx * my * mz);
62: } else {
63: cdim = dim;
64: }
66: PetscFOpen(comm, vtk->filename, "wb", &fp);
67: PetscFPrintf(comm, fp, "<?xml version=\"1.0\"?>\n");
68: PetscFPrintf(comm, fp, "<VTKFile type=\"StructuredGrid\" version=\"0.1\" byte_order=\"%s\">\n", byte_order);
69: PetscFPrintf(comm, fp, " <StructuredGrid WholeExtent=\"%d %" PetscInt_FMT " %d %" PetscInt_FMT " %d %" PetscInt_FMT "\">\n", 0, mx - 1, 0, my - 1, 0, mz - 1);
71: if (rank == 0) PetscMalloc1(size * 6, &grloc);
72: rloc[0] = info.xs;
73: rloc[1] = info.xm;
74: rloc[2] = info.ys;
75: rloc[3] = info.ym;
76: rloc[4] = info.zs;
77: rloc[5] = info.zm;
78: MPI_Gather(rloc, 6, MPIU_INT, &grloc[0][0], 6, MPIU_INT, 0, comm);
80: /* Write XML header */
81: maxnnodes = 0; /* Used for the temporary array size on rank 0 */
82: maxbs = 0; /* Used for the temporary array size on rank 0 */
83: boffset = 0; /* Offset into binary file */
84: for (r = 0; r < size; r++) {
85: PetscInt xs = -1, xm = -1, ys = -1, ym = -1, zs = -1, zm = -1, nnodes = 0;
86: if (rank == 0) {
87: xs = grloc[r][0];
88: xm = grloc[r][1];
89: ys = grloc[r][2];
90: ym = grloc[r][3];
91: zs = grloc[r][4];
92: zm = grloc[r][5];
93: nnodes = xm * ym * zm;
94: }
95: maxnnodes = PetscMax(maxnnodes, nnodes);
96: PetscFPrintf(comm, fp, " <Piece Extent=\"%" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\">\n", xs, xs + xm - 1, ys, ys + ym - 1, zs, zs + zm - 1);
97: PetscFPrintf(comm, fp, " <Points>\n");
98: PetscFPrintf(comm, fp, " <DataArray type=\"%s\" Name=\"Position\" NumberOfComponents=\"3\" format=\"appended\" offset=\"%" PetscInt_FMT "\" />\n", precision, boffset);
99: boffset += 3 * nnodes * sizeof(PetscScalar) + sizeof(int);
100: PetscFPrintf(comm, fp, " </Points>\n");
102: PetscFPrintf(comm, fp, " <PointData Scalars=\"ScalarPointData\">\n");
103: for (link = vtk->link; link; link = link->next) {
104: Vec X = (Vec)link->vec;
105: PetscInt bs, f;
106: DM daCurr;
107: PetscBool fieldsnamed;
108: const char *vecname = "Unnamed";
110: VecGetDM(X, &daCurr);
111: DMDAGetInfo(daCurr, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &bs, NULL, NULL, NULL, NULL, NULL);
112: maxbs = PetscMax(maxbs, bs);
114: if (((PetscObject)X)->name || link != vtk->link) PetscObjectGetName((PetscObject)X, &vecname);
116: /* If any fields are named, add scalar fields. Otherwise, add a vector field */
117: DMDAGetFieldsNamed(daCurr, &fieldsnamed);
118: if (fieldsnamed) {
119: for (f = 0; f < bs; f++) {
120: char buf[256];
121: const char *fieldname;
122: DMDAGetFieldName(daCurr, f, &fieldname);
123: if (!fieldname) {
124: PetscSNPrintf(buf, sizeof(buf), "%" PetscInt_FMT, f);
125: fieldname = buf;
126: }
127: PetscFPrintf(comm, fp, " <DataArray type=\"%s\" Name=\"%s.%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%" PetscInt_FMT "\" />\n", precision, vecname, fieldname, boffset);
128: boffset += nnodes * sizeof(PetscScalar) + sizeof(int);
129: }
130: } else {
131: PetscFPrintf(comm, fp, " <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%" PetscInt_FMT "\" format=\"appended\" offset=\"%" PetscInt_FMT "\" />\n", precision, vecname, bs, boffset);
132: boffset += bs * nnodes * sizeof(PetscScalar) + sizeof(int);
133: }
134: }
135: PetscFPrintf(comm, fp, " </PointData>\n");
136: PetscFPrintf(comm, fp, " </Piece>\n");
137: }
138: PetscFPrintf(comm, fp, " </StructuredGrid>\n");
139: PetscFPrintf(comm, fp, " <AppendedData encoding=\"raw\">\n");
140: PetscFPrintf(comm, fp, "_");
142: /* Now write the arrays. */
143: tag = ((PetscObject)viewer)->tag;
144: PetscMalloc2(maxnnodes * PetscMax(3, maxbs), &array, maxnnodes * PetscMax(3, maxbs), &array2);
145: for (r = 0; r < size; r++) {
146: MPI_Status status;
147: PetscInt xs = -1, xm = -1, ys = -1, ym = -1, zs = -1, zm = -1, nnodes = 0;
148: if (rank == 0) {
149: xs = grloc[r][0];
150: xm = grloc[r][1];
151: ys = grloc[r][2];
152: ym = grloc[r][3];
153: zs = grloc[r][4];
154: zm = grloc[r][5];
155: nnodes = xm * ym * zm;
156: } else if (r == rank) {
157: nnodes = info.xm * info.ym * info.zm;
158: }
160: /* Write the coordinates */
161: if (Coords) {
162: const PetscScalar *coords;
163: VecGetArrayRead(Coords, &coords);
164: if (rank == 0) {
165: if (r) {
166: PetscMPIInt nn;
167: MPI_Recv(array, nnodes * cdim, MPIU_SCALAR, r, tag, comm, &status);
168: MPI_Get_count(&status, MPIU_SCALAR, &nn);
170: } else PetscArraycpy(array, coords, nnodes * cdim);
171: /* Transpose coordinates to VTK (C-style) ordering */
172: for (k = 0; k < zm; k++) {
173: for (j = 0; j < ym; j++) {
174: for (i = 0; i < xm; i++) {
175: PetscInt Iloc = i + xm * (j + ym * k);
176: array2[Iloc * 3 + 0] = array[Iloc * cdim + 0];
177: array2[Iloc * 3 + 1] = cdim > 1 ? array[Iloc * cdim + 1] : 0.0;
178: array2[Iloc * 3 + 2] = cdim > 2 ? array[Iloc * cdim + 2] : 0.0;
179: }
180: }
181: }
182: } else if (r == rank) {
183: MPI_Send((void *)coords, nnodes * cdim, MPIU_SCALAR, 0, tag, comm);
184: }
185: VecRestoreArrayRead(Coords, &coords);
186: } else { /* Fabricate some coordinates using grid index */
187: for (k = 0; k < zm; k++) {
188: for (j = 0; j < ym; j++) {
189: for (i = 0; i < xm; i++) {
190: PetscInt Iloc = i + xm * (j + ym * k);
191: array2[Iloc * 3 + 0] = xs + i;
192: array2[Iloc * 3 + 1] = ys + j;
193: array2[Iloc * 3 + 2] = zs + k;
194: }
195: }
196: }
197: }
198: PetscViewerVTKFWrite(viewer, fp, array2, nnodes * 3, MPIU_SCALAR);
200: /* Write each of the objects queued up for this file */
201: for (link = vtk->link; link; link = link->next) {
202: Vec X = (Vec)link->vec;
203: const PetscScalar *x;
204: PetscInt bs, f;
205: DM daCurr;
206: PetscBool fieldsnamed;
207: VecGetDM(X, &daCurr);
208: DMDAGetInfo(daCurr, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &bs, NULL, NULL, NULL, NULL, NULL);
209: VecGetArrayRead(X, &x);
210: if (rank == 0) {
211: if (r) {
212: PetscMPIInt nn;
213: MPI_Recv(array, nnodes * bs, MPIU_SCALAR, r, tag, comm, &status);
214: MPI_Get_count(&status, MPIU_SCALAR, &nn);
216: } else PetscArraycpy(array, x, nnodes * bs);
218: /* If any fields are named, add scalar fields. Otherwise, add a vector field */
219: DMDAGetFieldsNamed(daCurr, &fieldsnamed);
220: if (fieldsnamed) {
221: for (f = 0; f < bs; f++) {
222: /* Extract and transpose the f'th field */
223: for (k = 0; k < zm; k++) {
224: for (j = 0; j < ym; j++) {
225: for (i = 0; i < xm; i++) {
226: PetscInt Iloc = i + xm * (j + ym * k);
227: array2[Iloc] = array[Iloc * bs + f];
228: }
229: }
230: }
231: PetscViewerVTKFWrite(viewer, fp, array2, nnodes, MPIU_SCALAR);
232: }
233: } else PetscViewerVTKFWrite(viewer, fp, array, bs * nnodes, MPIU_SCALAR);
234: } else if (r == rank) MPI_Send((void *)x, nnodes * bs, MPIU_SCALAR, 0, tag, comm);
235: VecRestoreArrayRead(X, &x);
236: }
237: }
238: PetscFree2(array, array2);
239: PetscFree(grloc);
241: PetscFPrintf(comm, fp, "\n </AppendedData>\n");
242: PetscFPrintf(comm, fp, "</VTKFile>\n");
243: PetscFClose(comm, fp);
244: return 0;
245: }
247: static PetscErrorCode DMDAVTKWriteAll_VTR(DM da, PetscViewer viewer)
248: {
249: const char *byte_order = PetscBinaryBigEndian() ? "BigEndian" : "LittleEndian";
250: #if defined(PETSC_USE_REAL_SINGLE)
251: const char precision[] = "Float32";
252: #elif defined(PETSC_USE_REAL_DOUBLE)
253: const char precision[] = "Float64";
254: #else
255: const char precision[] = "UnknownPrecision";
256: #endif
257: MPI_Comm comm;
258: PetscViewer_VTK *vtk = (PetscViewer_VTK *)viewer->data;
259: PetscViewerVTKObjectLink link;
260: FILE *fp;
261: PetscMPIInt rank, size, tag;
262: DMDALocalInfo info;
263: PetscInt dim, mx, my, mz, boffset, maxnnodes, maxbs, i, j, k, r;
264: PetscInt rloc[6], (*grloc)[6] = NULL;
265: PetscScalar *array, *array2;
267: PetscObjectGetComm((PetscObject)da, &comm);
269: MPI_Comm_size(comm, &size);
270: MPI_Comm_rank(comm, &rank);
271: DMDAGetInfo(da, &dim, &mx, &my, &mz, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
272: DMDAGetLocalInfo(da, &info);
273: PetscFOpen(comm, vtk->filename, "wb", &fp);
274: PetscFPrintf(comm, fp, "<?xml version=\"1.0\"?>\n");
275: PetscFPrintf(comm, fp, "<VTKFile type=\"RectilinearGrid\" version=\"0.1\" byte_order=\"%s\">\n", byte_order);
276: PetscFPrintf(comm, fp, " <RectilinearGrid WholeExtent=\"%d %" PetscInt_FMT " %d %" PetscInt_FMT " %d %" PetscInt_FMT "\">\n", 0, mx - 1, 0, my - 1, 0, mz - 1);
278: if (rank == 0) PetscMalloc1(size * 6, &grloc);
279: rloc[0] = info.xs;
280: rloc[1] = info.xm;
281: rloc[2] = info.ys;
282: rloc[3] = info.ym;
283: rloc[4] = info.zs;
284: rloc[5] = info.zm;
285: MPI_Gather(rloc, 6, MPIU_INT, &grloc[0][0], 6, MPIU_INT, 0, comm);
287: /* Write XML header */
288: maxnnodes = 0; /* Used for the temporary array size on rank 0 */
289: maxbs = 0; /* Used for the temporary array size on rank 0 */
290: boffset = 0; /* Offset into binary file */
291: for (r = 0; r < size; r++) {
292: PetscInt xs = -1, xm = -1, ys = -1, ym = -1, zs = -1, zm = -1, nnodes = 0;
293: if (rank == 0) {
294: xs = grloc[r][0];
295: xm = grloc[r][1];
296: ys = grloc[r][2];
297: ym = grloc[r][3];
298: zs = grloc[r][4];
299: zm = grloc[r][5];
300: nnodes = xm * ym * zm;
301: }
302: maxnnodes = PetscMax(maxnnodes, nnodes);
303: PetscFPrintf(comm, fp, " <Piece Extent=\"%" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\">\n", xs, xs + xm - 1, ys, ys + ym - 1, zs, zs + zm - 1);
304: PetscFPrintf(comm, fp, " <Coordinates>\n");
305: PetscFPrintf(comm, fp, " <DataArray type=\"%s\" Name=\"Xcoord\" format=\"appended\" offset=\"%" PetscInt_FMT "\" />\n", precision, boffset);
306: boffset += xm * sizeof(PetscScalar) + sizeof(int);
307: PetscFPrintf(comm, fp, " <DataArray type=\"%s\" Name=\"Ycoord\" format=\"appended\" offset=\"%" PetscInt_FMT "\" />\n", precision, boffset);
308: boffset += ym * sizeof(PetscScalar) + sizeof(int);
309: PetscFPrintf(comm, fp, " <DataArray type=\"%s\" Name=\"Zcoord\" format=\"appended\" offset=\"%" PetscInt_FMT "\" />\n", precision, boffset);
310: boffset += zm * sizeof(PetscScalar) + sizeof(int);
311: PetscFPrintf(comm, fp, " </Coordinates>\n");
312: PetscFPrintf(comm, fp, " <PointData Scalars=\"ScalarPointData\">\n");
313: for (link = vtk->link; link; link = link->next) {
314: Vec X = (Vec)link->vec;
315: PetscInt bs, f;
316: DM daCurr;
317: PetscBool fieldsnamed;
318: const char *vecname = "Unnamed";
320: VecGetDM(X, &daCurr);
321: DMDAGetInfo(daCurr, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &bs, NULL, NULL, NULL, NULL, NULL);
322: maxbs = PetscMax(maxbs, bs);
323: if (((PetscObject)X)->name || link != vtk->link) PetscObjectGetName((PetscObject)X, &vecname);
325: /* If any fields are named, add scalar fields. Otherwise, add a vector field */
326: DMDAGetFieldsNamed(daCurr, &fieldsnamed);
327: if (fieldsnamed) {
328: for (f = 0; f < bs; f++) {
329: char buf[256];
330: const char *fieldname;
331: DMDAGetFieldName(daCurr, f, &fieldname);
332: if (!fieldname) {
333: PetscSNPrintf(buf, sizeof(buf), "%" PetscInt_FMT, f);
334: fieldname = buf;
335: }
336: PetscFPrintf(comm, fp, " <DataArray type=\"%s\" Name=\"%s.%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%" PetscInt_FMT "\" />\n", precision, vecname, fieldname, boffset);
337: boffset += nnodes * sizeof(PetscScalar) + sizeof(int);
338: }
339: } else {
340: PetscFPrintf(comm, fp, " <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%" PetscInt_FMT "\" format=\"appended\" offset=\"%" PetscInt_FMT "\" />\n", precision, vecname, bs, boffset);
341: boffset += bs * nnodes * sizeof(PetscScalar) + sizeof(int);
342: }
343: }
344: PetscFPrintf(comm, fp, " </PointData>\n");
345: PetscFPrintf(comm, fp, " </Piece>\n");
346: }
347: PetscFPrintf(comm, fp, " </RectilinearGrid>\n");
348: PetscFPrintf(comm, fp, " <AppendedData encoding=\"raw\">\n");
349: PetscFPrintf(comm, fp, "_");
351: /* Now write the arrays. */
352: tag = ((PetscObject)viewer)->tag;
353: PetscMalloc2(PetscMax(maxnnodes * maxbs, info.xm + info.ym + info.zm), &array, PetscMax(maxnnodes * maxbs, info.xm + info.ym + info.zm), &array2);
354: for (r = 0; r < size; r++) {
355: MPI_Status status;
356: PetscInt xs = -1, xm = -1, ys = -1, ym = -1, zs = -1, zm = -1, nnodes = 0;
357: if (rank == 0) {
358: xs = grloc[r][0];
359: xm = grloc[r][1];
360: ys = grloc[r][2];
361: ym = grloc[r][3];
362: zs = grloc[r][4];
363: zm = grloc[r][5];
364: nnodes = xm * ym * zm;
365: } else if (r == rank) {
366: nnodes = info.xm * info.ym * info.zm;
367: }
368: { /* Write the coordinates */
369: Vec Coords;
371: DMGetCoordinates(da, &Coords);
372: if (Coords) {
373: const PetscScalar *coords;
374: VecGetArrayRead(Coords, &coords);
375: if (rank == 0) {
376: if (r) {
377: PetscMPIInt nn;
378: MPI_Recv(array, xm + ym + zm, MPIU_SCALAR, r, tag, comm, &status);
379: MPI_Get_count(&status, MPIU_SCALAR, &nn);
381: } else {
382: /* x coordinates */
383: for (j = 0, k = 0, i = 0; i < xm; i++) {
384: PetscInt Iloc = i + xm * (j + ym * k);
385: array[i] = coords[Iloc * dim + 0];
386: }
387: /* y coordinates */
388: for (i = 0, k = 0, j = 0; j < ym; j++) {
389: PetscInt Iloc = i + xm * (j + ym * k);
390: array[j + xm] = dim > 1 ? coords[Iloc * dim + 1] : 0;
391: }
392: /* z coordinates */
393: for (i = 0, j = 0, k = 0; k < zm; k++) {
394: PetscInt Iloc = i + xm * (j + ym * k);
395: array[k + xm + ym] = dim > 2 ? coords[Iloc * dim + 2] : 0;
396: }
397: }
398: } else if (r == rank) {
399: xm = info.xm;
400: ym = info.ym;
401: zm = info.zm;
402: for (j = 0, k = 0, i = 0; i < xm; i++) {
403: PetscInt Iloc = i + xm * (j + ym * k);
404: array2[i] = coords[Iloc * dim + 0];
405: }
406: for (i = 0, k = 0, j = 0; j < ym; j++) {
407: PetscInt Iloc = i + xm * (j + ym * k);
408: array2[j + xm] = dim > 1 ? coords[Iloc * dim + 1] : 0;
409: }
410: for (i = 0, j = 0, k = 0; k < zm; k++) {
411: PetscInt Iloc = i + xm * (j + ym * k);
412: array2[k + xm + ym] = dim > 2 ? coords[Iloc * dim + 2] : 0;
413: }
414: MPI_Send((void *)array2, xm + ym + zm, MPIU_SCALAR, 0, tag, comm);
415: }
416: VecRestoreArrayRead(Coords, &coords);
417: } else { /* Fabricate some coordinates using grid index */
418: for (i = 0; i < xm; i++) array[i] = xs + i;
419: for (j = 0; j < ym; j++) array[j + xm] = ys + j;
420: for (k = 0; k < zm; k++) array[k + xm + ym] = zs + k;
421: }
422: if (rank == 0) {
423: PetscViewerVTKFWrite(viewer, fp, &(array[0]), xm, MPIU_SCALAR);
424: PetscViewerVTKFWrite(viewer, fp, &(array[xm]), ym, MPIU_SCALAR);
425: PetscViewerVTKFWrite(viewer, fp, &(array[xm + ym]), zm, MPIU_SCALAR);
426: }
427: }
429: /* Write each of the objects queued up for this file */
430: for (link = vtk->link; link; link = link->next) {
431: Vec X = (Vec)link->vec;
432: const PetscScalar *x;
433: PetscInt bs, f;
434: DM daCurr;
435: PetscBool fieldsnamed;
436: VecGetDM(X, &daCurr);
437: DMDAGetInfo(daCurr, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &bs, NULL, NULL, NULL, NULL, NULL);
439: VecGetArrayRead(X, &x);
440: if (rank == 0) {
441: if (r) {
442: PetscMPIInt nn;
443: MPI_Recv(array, nnodes * bs, MPIU_SCALAR, r, tag, comm, &status);
444: MPI_Get_count(&status, MPIU_SCALAR, &nn);
446: } else PetscArraycpy(array, x, nnodes * bs);
447: /* If any fields are named, add scalar fields. Otherwise, add a vector field */
448: DMDAGetFieldsNamed(daCurr, &fieldsnamed);
449: if (fieldsnamed) {
450: for (f = 0; f < bs; f++) {
451: /* Extract and transpose the f'th field */
452: for (k = 0; k < zm; k++) {
453: for (j = 0; j < ym; j++) {
454: for (i = 0; i < xm; i++) {
455: PetscInt Iloc = i + xm * (j + ym * k);
456: array2[Iloc] = array[Iloc * bs + f];
457: }
458: }
459: }
460: PetscViewerVTKFWrite(viewer, fp, array2, nnodes, MPIU_SCALAR);
461: }
462: }
463: PetscViewerVTKFWrite(viewer, fp, array, nnodes * bs, MPIU_SCALAR);
465: } else if (r == rank) {
466: MPI_Send((void *)x, nnodes * bs, MPIU_SCALAR, 0, tag, comm);
467: }
468: VecRestoreArrayRead(X, &x);
469: }
470: }
471: PetscFree2(array, array2);
472: PetscFree(grloc);
474: PetscFPrintf(comm, fp, "\n </AppendedData>\n");
475: PetscFPrintf(comm, fp, "</VTKFile>\n");
476: PetscFClose(comm, fp);
477: return 0;
478: }
480: /*@C
481: DMDAVTKWriteAll - Write a file containing all the fields that have been provided to the viewer
483: Collective
485: Input Parameters:
486: + odm - `DMDA` specifying the grid layout, passed as a `PetscObject`
487: - viewer - viewer of type `PETSCVIEWERVTK`
489: Level: developer
491: Notes:
492: This function is a callback used by the VTK viewer to actually write the file.
493: The reason for this odd model is that the VTK file format does not provide any way to write one field at a time.
494: Instead, metadata for the entire file needs to be available up-front before you can start writing the file.
496: If any fields have been named (see e.g. DMDASetFieldName()), then individual scalar
497: fields are written. Otherwise, a single multi-dof (vector) field is written.
499: .seealso: `DMDA`, `DM`, `PETSCVIEWERVTK`
500: @*/
501: PetscErrorCode DMDAVTKWriteAll(PetscObject odm, PetscViewer viewer)
502: {
503: DM dm = (DM)odm;
504: PetscBool isvtk;
508: PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERVTK, &isvtk);
510: switch (viewer->format) {
511: case PETSC_VIEWER_VTK_VTS:
512: DMDAVTKWriteAll_VTS(dm, viewer);
513: break;
514: case PETSC_VIEWER_VTK_VTR:
515: DMDAVTKWriteAll_VTR(dm, viewer);
516: break;
517: default:
518: SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No support for format '%s'", PetscViewerFormats[viewer->format]);
519: }
520: return 0;
521: }