Actual source code: vecio.c
1: #define PETSCVEC_DLL
2: /*
3: This file contains simple binary input routines for vectors. The
4: analogous output routines are within each vector implementation's
5: VecView (with viewer types PETSC_VIEWER_BINARY)
6: */
8: #include petsc.h
9: #include petscsys.h
10: #include petscvec.h
11: #include private/vecimpl.h
12: #if defined(PETSC_HAVE_PNETCDF)
14: #include "pnetcdf.h"
16: #endif
17: EXTERN PetscErrorCode VecLoad_Binary(PetscViewer, VecType, Vec*);
18: EXTERN PetscErrorCode VecLoad_Netcdf(PetscViewer, Vec*);
19: EXTERN PetscErrorCode VecLoadIntoVector_Binary(PetscViewer, Vec);
20: EXTERN PetscErrorCode VecLoadIntoVector_Netcdf(PetscViewer, Vec);
24: /*@C
25: VecLoad - Loads a vector that has been stored in binary format
26: with VecView().
28: Collective on PetscViewer
30: Input Parameters:
31: + viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
32: NetCDF file viewer, obtained from PetscViewerNetcdfOpen()
33: - outtype - the type of vector VECSEQ or VECMPI or PETSC_NULL (which indicates
34: using VECSEQ if the communicator in the Viewer is of size 1; otherwise
35: use VECMPI).
37: Output Parameter:
38: . newvec - the newly loaded vector
40: Level: intermediate
42: Notes:
43: The input file must contain the full global vector, as
44: written by the routine VecView().
46: Notes for advanced users:
47: Most users should not need to know the details of the binary storage
48: format, since VecLoad() and VecView() completely hide these details.
49: But for anyone who's interested, the standard binary matrix storage
50: format is
51: .vb
52: int VEC_FILE_COOKIE
53: int number of rows
54: PetscScalar *values of all nonzeros
55: .ve
57: Note for Cray users, the int's stored in the binary file are 32 bit
58: integers; not 64 as they are represented in the memory, so if you
59: write your own routines to read/write these binary files from the Cray
60: you need to adjust the integer sizes that you read in, see
61: PetscBinaryRead() and PetscBinaryWrite() to see how this may be
62: done.
64: In addition, PETSc automatically does the byte swapping for
65: machines that store the bytes reversed, e.g. DEC alpha, freebsd,
66: linux, Windows and the paragon; thus if you write your own binary
67: read/write routines you have to swap the bytes; see PetscBinaryRead()
68: and PetscBinaryWrite() to see how this may be done.
70: Concepts: vector^loading from file
72: .seealso: PetscViewerBinaryOpen(), VecView(), MatLoad(), VecLoadIntoVector()
73: @*/
74: PetscErrorCode PETSCVEC_DLLEXPORT VecLoad(PetscViewer viewer, VecType outtype,Vec *newvec)
75: {
77: PetscTruth isbinary,flg;
78: char vtype[256];
79: const char *prefix;
80: #if defined(PETSC_HAVE_PNETCDF)
81: PetscTruth isnetcdf;
82: #endif
87: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
88: #if defined(PETSC_HAVE_PNETCDF)
89: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_NETCDF,&isnetcdf);
90: if ((!isbinary) && (!isnetcdf)) SETERRQ(PETSC_ERR_ARG_WRONG,"Must be binary or NetCDF viewer");
91: #else
92: if (!isbinary) SETERRQ(PETSC_ERR_ARG_WRONG,"Must be binary viewer");
93: #endif
95: #ifndef PETSC_USE_DYNAMIC_LIBRARIES
96: VecInitializePackage(PETSC_NULL);
97: #endif
98: #if defined(PETSC_HAVE_PNETCDF)
99: if (isnetcdf) {
100: VecLoad_Netcdf(viewer,newvec);
101: } else
102: #endif
103: {
104: Vec factory;
105: MPI_Comm comm;
106: PetscErrorCode (*r)(PetscViewer, VecType,Vec*);
107: PetscMPIInt size;
109: PetscObjectGetOptionsPrefix((PetscObject)viewer,(const char**)&prefix);
110: PetscOptionsGetString(prefix,"-vec_type",vtype,256,&flg);
111: if (flg) {
112: outtype = vtype;
113: }
114: PetscOptionsGetString(prefix,"-vecload_type",vtype,256,&flg);
115: if (flg) {
116: outtype = vtype;
117: }
118: PetscObjectGetComm((PetscObject)viewer,&comm);
119: if (!outtype) {
120: MPI_Comm_size(comm,&size);
121: outtype = (size > 1) ? VECMPI : VECSEQ;
122: }
124: VecCreate(comm,&factory);
125: VecSetSizes(factory,1,PETSC_DETERMINE);
126: VecSetType(factory,outtype);
127: r = factory->ops->load;
128: VecDestroy(factory);
129: if (!r) SETERRQ1(PETSC_ERR_SUP,"VecLoad is not supported for type: %s",outtype);
130: (*r)(viewer,outtype,newvec);
131: }
132: return(0);
133: }
135: #if defined(PETSC_HAVE_PNETCDF)
138: PetscErrorCode VecLoad_Netcdf(PetscViewer viewer,Vec *newvec)
139: {
141: PetscMPIInt rank;
142: PetscInt N,n,bs;
143: PetscInt ncid,start;
144: Vec vec;
145: PetscScalar *avec;
146: MPI_Comm comm;
147: PetscTruth flag;
148: char name[NC_MAX_NAME];
151: PetscLogEventBegin(VEC_Load,viewer,0,0,0);
152: PetscObjectGetComm((PetscObject)viewer,&comm);
153: MPI_Comm_rank(comm,&rank);
154: PetscViewerNetcdfGetID(viewer,&ncid);
155: ncmpi_inq_dim(ncid,0,name,(MPI_Offset*)&N); /* N gets the global vector size */
156: VecCreate(comm,&vec);
157: VecSetSizes(vec,PETSC_DECIDE,N);
158: if (!rank) {
159: PetscOptionsGetInt(PETSC_NULL,"-vecload_block_size",&bs,&flag);
160: if (flag) {
161: VecSetBlockSize(vec,bs);
162: }
163: }
164: VecSetFromOptions(vec);
165: VecGetLocalSize(vec,&n);
166: VecGetOwnershipRange(vec,&start,PETSC_NULL);
167: VecGetArray(vec,&avec);
168: ncmpi_get_vara_double_all(ncid,0,(const MPI_Offset*)&start,(const MPI_Offset*)&n,(double *)avec);
169: VecRestoreArray(vec,&avec);
170: *newvec = vec;
171: VecAssemblyBegin(vec);
172: VecAssemblyEnd(vec);
173: PetscLogEventEnd(VEC_Load,viewer,0,0,0);
174: return(0);
175: }
176: #endif
180: PetscErrorCode VecLoad_Binary(PetscViewer viewer, VecType itype,Vec *newvec)
181: {
182: PetscMPIInt size,rank,tag;
183: int fd;
184: PetscInt i,rows,type,n,*range,bs;
185: PetscErrorCode ierr,nierr;
186: Vec vec;
187: PetscScalar *avec;
188: MPI_Comm comm;
189: MPI_Request request;
190: MPI_Status status;
191: PetscTruth flag;
194: PetscLogEventBegin(VEC_Load,viewer,0,0,0);
195: PetscViewerBinaryGetDescriptor(viewer,&fd);
196: PetscObjectGetComm((PetscObject)viewer,&comm);
197: MPI_Comm_rank(comm,&rank);
198: MPI_Comm_size(comm,&size);
200: if (!rank) {
201: /* Read vector header. */
202: PetscBinaryRead(fd,&type,1,PETSC_INT);if (ierr) goto handleerror;
203: if (type != VEC_FILE_COOKIE) {PETSC_ERR_ARG_WRONG; goto handleerror;}
204: PetscBinaryRead(fd,&rows,1,PETSC_INT);if (ierr) goto handleerror;
205: MPI_Bcast(&rows,1,MPIU_INT,0,comm);
206: VecCreate(comm,&vec);
207: VecSetSizes(vec,PETSC_DECIDE,rows);
208: PetscOptionsGetInt(PETSC_NULL,"-vecload_block_size",&bs,&flag);
209: if (flag) {
210: VecSetBlockSize(vec,bs);
211: }
212: VecSetFromOptions(vec);
213: VecGetLocalSize(vec,&n);
214: VecGetArray(vec,&avec);
215: PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
216: VecRestoreArray(vec,&avec);
218: if (size > 1) {
219: /* read in other chuncks and send to other processors */
220: /* determine maximum chunck owned by other */
221: range = vec->map.range;
222: n = 1;
223: for (i=1; i<size; i++) {
224: n = PetscMax(n,range[i+1] - range[i]);
225: }
226: PetscMalloc(n*sizeof(PetscScalar),&avec);
227: PetscObjectGetNewTag((PetscObject)viewer,&tag);
228: for (i=1; i<size; i++) {
229: n = range[i+1] - range[i];
230: PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
231: MPI_Isend(avec,n,MPIU_SCALAR,i,tag,comm,&request);
232: MPI_Wait(&request,&status);
233: }
234: PetscFree(avec);
235: }
236: } else {
237: MPI_Bcast(&rows,1,MPIU_INT,0,comm);
238: /* this is a marker sent to indicate that the file does not have a vector at this location */
239: if (rows == -1) {
240: nPetscLogEventEnd(VEC_Load,viewer,0,0,0);CHKERRQ(nierr);
241: SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"Error loading vector");
242: }
243: VecCreate(comm,&vec);
244: VecSetSizes(vec,PETSC_DECIDE,rows);
245: VecSetFromOptions(vec);
246: VecGetLocalSize(vec,&n);
247: PetscObjectGetNewTag((PetscObject)viewer,&tag);
248: VecGetArray(vec,&avec);
249: MPI_Recv(avec,n,MPIU_SCALAR,0,tag,comm,&status);
250: VecRestoreArray(vec,&avec);
251: }
252: *newvec = vec;
253: VecAssemblyBegin(vec);
254: VecAssemblyEnd(vec);
255: PetscLogEventEnd(VEC_Load,viewer,0,0,0);
256: return(0);
257: /* tell the other processors we've had an error; only used on process 0 */
258: handleerror:
259: if (PetscExceptionValue(ierr)) {
260: nPetscLogEventEnd(VEC_Load,viewer,0,0,0);CHKERRQ(nierr);
261: n-1; MPI_Bcast(&nierr,1,MPIU_INT,0,comm);
262: }
263:
264: return(0);
265: }
269: PetscErrorCode VecLoadIntoVector_Default(PetscViewer viewer,Vec vec)
270: {
271: PetscTruth isbinary;
272: #if defined(PETSC_HAVE_PNETCDF)
273: PetscTruth isnetcdf;
274: #endif
279: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
280: #if defined(PETSC_HAVE_PNETCDF)
281: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_NETCDF,&isnetcdf);
282: if ((!isbinary) && (!isnetcdf)) SETERRQ(PETSC_ERR_ARG_WRONG,"Must be binary or NetCDF viewer");
283: #else
284: if (!isbinary) SETERRQ(PETSC_ERR_ARG_WRONG,"Must be binary viewer");
285: #endif
287: #if defined(PETSC_HAVE_PNETCDF)
288: if (isnetcdf) {
289: VecLoadIntoVector_Netcdf(viewer,vec);
290: } else
291: #endif
292: {
293: VecLoadIntoVector_Binary(viewer,vec);
294: }
295: return(0);
296: }
298: #if defined(PETSC_HAVE_PNETCDF)
301: PetscErrorCode VecLoadIntoVector_Netcdf(PetscViewer viewer,Vec vec)
302: {
304: PetscMPIInt rank;
305: PetscInt N,rows,n,bs;
306: PetscInt ncid,start;
307: PetscScalar *avec;
308: MPI_Comm comm;
309: PetscTruth flag;
310: char name[NC_MAX_NAME];
313: PetscLogEventBegin(VEC_Load,viewer,vec,0,0);
314: PetscObjectGetComm((PetscObject)viewer,&comm);
315: MPI_Comm_rank(comm,&rank);
316: PetscViewerNetcdfGetID(viewer,&ncid);
317: ncmpi_inq_dim(ncid,0,name,(MPI_Offset*)&N); /* N gets the global vector size */
318: if (!rank) {
319: VecGetSize(vec,&rows);
320: if (N != rows) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"Vector in file different length then input vector");
321: PetscOptionsGetInt(PETSC_NULL,"-vecload_block_size",&bs,&flag);
322: if (flag) {
323: VecSetBlockSize(vec,bs);
324: }
325: }
326: VecSetFromOptions(vec);
327: VecGetLocalSize(vec,&n);
328: VecGetOwnershipRange(vec,&start,PETSC_NULL);
329: VecGetArray(vec,&avec);
330: ncmpi_get_vara_double_all(ncid,0,(const MPI_Offset*)&start,(const MPI_Offset*)&n,(double *)avec);
331: VecRestoreArray(vec,&avec);
332: VecAssemblyBegin(vec);
333: VecAssemblyEnd(vec);
334: PetscLogEventEnd(VEC_Load,viewer,vec,0,0);
335: return(0);
336: }
337: #endif
341: PetscErrorCode VecLoadIntoVector_Binary(PetscViewer viewer,Vec vec)
342: {
344: PetscMPIInt size,rank,tag;
345: PetscInt i,rows,type,n,*range;
346: int fd;
347: PetscScalar *avec;
348: MPI_Comm comm;
349: MPI_Request request;
350: MPI_Status status;
353: PetscLogEventBegin(VEC_Load,viewer,vec,0,0);
355: PetscViewerBinaryGetDescriptor(viewer,&fd);
356: PetscObjectGetComm((PetscObject)viewer,&comm);
357: MPI_Comm_rank(comm,&rank);
358: MPI_Comm_size(comm,&size);
360: if (!rank) {
361: /* Read vector header. */
362: PetscBinaryRead(fd,&type,1,PETSC_INT);
363: if (type != VEC_FILE_COOKIE) SETERRQ(PETSC_ERR_ARG_WRONG,"Non-vector object");
364: PetscBinaryRead(fd,&rows,1,PETSC_INT);
365: VecGetSize(vec,&n);
366: if (n != rows) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"Vector in file different length then input vector");
367: MPI_Bcast(&rows,1,MPIU_INT,0,comm);
369: VecSetFromOptions(vec);
370: VecGetLocalSize(vec,&n);
371: VecGetArray(vec,&avec);
372: PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
373: VecRestoreArray(vec,&avec);
375: if (size > 1) {
376: /* read in other chuncks and send to other processors */
377: /* determine maximum chunck owned by other */
378: range = vec->map.range;
379: n = 1;
380: for (i=1; i<size; i++) {
381: n = PetscMax(n,range[i+1] - range[i]);
382: }
383: PetscMalloc(n*sizeof(PetscScalar),&avec);
384: PetscObjectGetNewTag((PetscObject)viewer,&tag);
385: for (i=1; i<size; i++) {
386: n = range[i+1] - range[i];
387: PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
388: MPI_Isend(avec,n,MPIU_SCALAR,i,tag,comm,&request);
389: MPI_Wait(&request,&status);
390: }
391: PetscFree(avec);
392: }
393: } else {
394: MPI_Bcast(&rows,1,MPIU_INT,0,comm);
395: VecSetFromOptions(vec);
396: VecGetLocalSize(vec,&n);
397: PetscObjectGetNewTag((PetscObject)viewer,&tag);
398: VecGetArray(vec,&avec);
399: MPI_Recv(avec,n,MPIU_SCALAR,0,tag,comm,&status);
400: VecRestoreArray(vec,&avec);
401: }
402: VecAssemblyBegin(vec);
403: VecAssemblyEnd(vec);
404: PetscLogEventEnd(VEC_Load,viewer,vec,0,0);
405: return(0);
406: }