Actual source code: pdvec.c
1: #define PETSCVEC_DLL
2: /*
3: Code for some of the parallel vector primatives.
4: */
5: #include src/vec/vec/impls/mpi/pvecimpl.h
6: #if defined(PETSC_HAVE_PNETCDF)
8: #include "pnetcdf.h"
10: #endif
14: PetscErrorCode VecDestroy_MPI(Vec v)
15: {
16: Vec_MPI *x = (Vec_MPI*)v->data;
20: #if defined(PETSC_USE_LOG)
21: PetscLogObjectState((PetscObject)v,"Length=%D",v->map.N);
22: #endif
23: PetscFree(x->array_allocated);
25: /* Destroy local representation of vector if it exists */
26: if (x->localrep) {
27: VecDestroy(x->localrep);
28: if (x->localupdate) {VecScatterDestroy(x->localupdate);}
29: }
30: /* Destroy the stashes: note the order - so that the tags are freed properly */
31: VecStashDestroy_Private(&v->bstash);
32: VecStashDestroy_Private(&v->stash);
33: PetscFree(x);
34: return(0);
35: }
39: PetscErrorCode VecView_MPI_ASCII(Vec xin,PetscViewer viewer)
40: {
41: PetscErrorCode ierr;
42: PetscInt i,work = xin->map.n,cnt,len;
43: PetscMPIInt j,n = 0,size,rank,tag = ((PetscObject)viewer)->tag;
44: MPI_Status status;
45: PetscScalar *values,*xarray;
46: const char *name;
47: PetscViewerFormat format;
50: VecGetArray(xin,&xarray);
51: /* determine maximum message to arrive */
52: MPI_Comm_rank(xin->comm,&rank);
53: MPI_Reduce(&work,&len,1,MPIU_INT,MPI_MAX,0,xin->comm);
54: MPI_Comm_size(xin->comm,&size);
56: if (!rank) {
57: PetscMalloc((len+1)*sizeof(PetscScalar),&values);
58: PetscViewerGetFormat(viewer,&format);
59: /*
60: Matlab format and ASCII format are very similar except
61: Matlab uses %18.16e format while ASCII uses %g
62: */
63: if (format == PETSC_VIEWER_ASCII_MATLAB) {
64: PetscObjectGetName((PetscObject)xin,&name);
65: PetscViewerASCIIPrintf(viewer,"%s = [\n",name);
66: for (i=0; i<xin->map.n; i++) {
67: #if defined(PETSC_USE_COMPLEX)
68: if (PetscImaginaryPart(xarray[i]) > 0.0) {
69: PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16ei\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
70: } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
71: PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16ei\n",PetscRealPart(xarray[i]),-PetscImaginaryPart(xarray[i]));
72: } else {
73: PetscViewerASCIIPrintf(viewer,"%18.16e\n",PetscRealPart(xarray[i]));
74: }
75: #else
76: PetscViewerASCIIPrintf(viewer,"%18.16e\n",xarray[i]);
77: #endif
78: }
79: /* receive and print messages */
80: for (j=1; j<size; j++) {
81: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,xin->comm,&status);
82: MPI_Get_count(&status,MPIU_SCALAR,&n);
83: for (i=0; i<n; i++) {
84: #if defined(PETSC_USE_COMPLEX)
85: if (PetscImaginaryPart(values[i]) > 0.0) {
86: PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16e i\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
87: } else if (PetscImaginaryPart(values[i]) < 0.0) {
88: PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16e i\n",PetscRealPart(values[i]),-PetscImaginaryPart(values[i]));
89: } else {
90: PetscViewerASCIIPrintf(viewer,"%18.16e\n",PetscRealPart(values[i]));
91: }
92: #else
93: PetscViewerASCIIPrintf(viewer,"%18.16e\n",values[i]);
94: #endif
95: }
96: }
97: PetscViewerASCIIPrintf(viewer,"];\n");
99: } else if (format == PETSC_VIEWER_ASCII_SYMMODU) {
100: for (i=0; i<xin->map.n; i++) {
101: #if defined(PETSC_USE_COMPLEX)
102: PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
103: #else
104: PetscViewerASCIIPrintf(viewer,"%18.16e\n",xarray[i]);
105: #endif
106: }
107: /* receive and print messages */
108: for (j=1; j<size; j++) {
109: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,xin->comm,&status);
110: MPI_Get_count(&status,MPIU_SCALAR,&n);
111: for (i=0; i<n; i++) {
112: #if defined(PETSC_USE_COMPLEX)
113: PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
114: #else
115: PetscViewerASCIIPrintf(viewer,"%18.16e\n",values[i]);
116: #endif
117: }
118: }
119: } else if (format == PETSC_VIEWER_ASCII_VTK || format == PETSC_VIEWER_ASCII_VTK_CELL) {
120: PetscInt bs, b;
122: VecGetLocalSize(xin, &n);
123: VecGetBlockSize(xin, &bs);
124: if ((bs < 1) || (bs > 3)) {
125: SETERRQ1(PETSC_ERR_ARG_WRONGSTATE, "VTK can only handle 3D objects, but vector dimension is %d", bs);
126: }
127: if (format == PETSC_VIEWER_ASCII_VTK) {
128: PetscViewerASCIIPrintf(viewer, "POINT_DATA %d\n", xin->map.N);
129: } else {
130: PetscViewerASCIIPrintf(viewer, "CELL_DATA %d\n", xin->map.N);
131: }
132: PetscViewerASCIIPrintf(viewer, "SCALARS scalars double %d\n", bs);
133: PetscViewerASCIIPrintf(viewer, "LOOKUP_TABLE default\n");
134: for (i=0; i<n/bs; i++) {
135: for (b=0; b<bs; b++) {
136: if (b > 0) {
137: PetscViewerASCIIPrintf(viewer," ");
138: }
139: #if !defined(PETSC_USE_COMPLEX)
140: PetscViewerASCIIPrintf(viewer,"%G",xarray[i*bs+b]);
141: #endif
142: }
143: PetscViewerASCIIPrintf(viewer,"\n");
144: }
145: for (j=1; j<size; j++) {
146: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,xin->comm,&status);
147: MPI_Get_count(&status,MPIU_SCALAR,&n);
148: for (i=0; i<n/bs; i++) {
149: for (b=0; b<bs; b++) {
150: if (b > 0) {
151: PetscViewerASCIIPrintf(viewer," ");
152: }
153: #if !defined(PETSC_USE_COMPLEX)
154: PetscViewerASCIIPrintf(viewer,"%G",values[i*bs+b]);
155: #endif
156: }
157: PetscViewerASCIIPrintf(viewer,"\n");
158: }
159: }
160: } else if (format == PETSC_VIEWER_ASCII_VTK_COORDS) {
161: PetscInt bs, b;
163: VecGetLocalSize(xin, &n);
164: VecGetBlockSize(xin, &bs);
165: if ((bs < 1) || (bs > 3)) {
166: SETERRQ1(PETSC_ERR_ARG_WRONGSTATE, "VTK can only handle 3D objects, but vector dimension is %d", bs);
167: }
168: for (i=0; i<n/bs; i++) {
169: for (b=0; b<bs; b++) {
170: if (b > 0) {
171: PetscViewerASCIIPrintf(viewer," ");
172: }
173: #if !defined(PETSC_USE_COMPLEX)
174: PetscViewerASCIIPrintf(viewer,"%G",xarray[i*bs+b]);
175: #endif
176: }
177: for (b=bs; b<3; b++) {
178: PetscViewerASCIIPrintf(viewer," 0.0");
179: }
180: PetscViewerASCIIPrintf(viewer,"\n");
181: }
182: for (j=1; j<size; j++) {
183: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,xin->comm,&status);
184: MPI_Get_count(&status,MPIU_SCALAR,&n);
185: for (i=0; i<n/bs; i++) {
186: for (b=0; b<bs; b++) {
187: if (b > 0) {
188: PetscViewerASCIIPrintf(viewer," ");
189: }
190: #if !defined(PETSC_USE_COMPLEX)
191: PetscViewerASCIIPrintf(viewer,"%G",values[i*bs+b]);
192: #endif
193: }
194: for (b=bs; b<3; b++) {
195: PetscViewerASCIIPrintf(viewer," 0.0");
196: }
197: PetscViewerASCIIPrintf(viewer,"\n");
198: }
199: }
200: } else if (format == PETSC_VIEWER_ASCII_PCICE) {
201: PetscInt bs, b, vertexCount = 1;
203: VecGetLocalSize(xin, &n);
204: VecGetBlockSize(xin, &bs);
205: if ((bs < 1) || (bs > 3)) {
206: SETERRQ1(PETSC_ERR_ARG_WRONGSTATE, "PCICE can only handle up to 3D objects, but vector dimension is %d", bs);
207: }
208: PetscViewerASCIIPrintf(viewer,"%D\n", xin->map.N/bs);
209: for (i=0; i<n/bs; i++) {
210: PetscViewerASCIIPrintf(viewer,"%7D ", vertexCount++);
211: for (b=0; b<bs; b++) {
212: if (b > 0) {
213: PetscViewerASCIIPrintf(viewer," ");
214: }
215: #if !defined(PETSC_USE_COMPLEX)
216: PetscViewerASCIIPrintf(viewer,"% 12.5E",xarray[i*bs+b]);
217: #endif
218: }
219: PetscViewerASCIIPrintf(viewer,"\n");
220: }
221: for (j=1; j<size; j++) {
222: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,xin->comm,&status);
223: MPI_Get_count(&status,MPIU_SCALAR,&n);
224: for (i=0; i<n/bs; i++) {
225: PetscViewerASCIIPrintf(viewer,"%7D ", vertexCount++);
226: for (b=0; b<bs; b++) {
227: if (b > 0) {
228: PetscViewerASCIIPrintf(viewer," ");
229: }
230: #if !defined(PETSC_USE_COMPLEX)
231: PetscViewerASCIIPrintf(viewer,"% 12.5E",values[i*bs+b]);
232: #endif
233: }
234: PetscViewerASCIIPrintf(viewer,"\n");
235: }
236: }
237: } else if (format == PETSC_VIEWER_ASCII_PYLITH) {
238: PetscInt bs, b, vertexCount = 1;
240: VecGetLocalSize(xin, &n);
241: VecGetBlockSize(xin, &bs);
242: if (bs != 3) {
243: SETERRQ1(PETSC_ERR_ARG_WRONGSTATE, "PyLith can only handle 3D objects, but vector dimension is %d", bs);
244: }
245: PetscViewerASCIIPrintf(viewer,"#\n");
246: PetscViewerASCIIPrintf(viewer,"# Node X-coord Y-coord Z-coord\n");
247: PetscViewerASCIIPrintf(viewer,"#\n");
248: for (i=0; i<n/bs; i++) {
249: PetscViewerASCIIPrintf(viewer,"%7D ", vertexCount++);
250: for (b=0; b<bs; b++) {
251: if (b > 0) {
252: PetscViewerASCIIPrintf(viewer," ");
253: }
254: #if !defined(PETSC_USE_COMPLEX)
255: PetscViewerASCIIPrintf(viewer,"% 16.8E",xarray[i*bs+b]);
256: #endif
257: }
258: PetscViewerASCIIPrintf(viewer,"\n");
259: }
260: for (j=1; j<size; j++) {
261: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,xin->comm,&status);
262: MPI_Get_count(&status,MPIU_SCALAR,&n);
263: for (i=0; i<n/bs; i++) {
264: PetscViewerASCIIPrintf(viewer,"%7D ", vertexCount++);
265: for (b=0; b<bs; b++) {
266: if (b > 0) {
267: PetscViewerASCIIPrintf(viewer," ");
268: }
269: #if !defined(PETSC_USE_COMPLEX)
270: PetscViewerASCIIPrintf(viewer,"% 16.8E",values[i*bs+b]);
271: #endif
272: }
273: PetscViewerASCIIPrintf(viewer,"\n");
274: }
275: }
276: } else {
277: if (format != PETSC_VIEWER_ASCII_COMMON) {PetscViewerASCIIPrintf(viewer,"Process [%d]\n",rank);}
278: cnt = 0;
279: for (i=0; i<xin->map.n; i++) {
280: if (format == PETSC_VIEWER_ASCII_INDEX) {
281: PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
282: }
283: #if defined(PETSC_USE_COMPLEX)
284: if (PetscImaginaryPart(xarray[i]) > 0.0) {
285: PetscViewerASCIIPrintf(viewer,"%G + %G i\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
286: } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
287: PetscViewerASCIIPrintf(viewer,"%G - %G i\n",PetscRealPart(xarray[i]),-PetscImaginaryPart(xarray[i]));
288: } else {
289: PetscViewerASCIIPrintf(viewer,"%G\n",PetscRealPart(xarray[i]));
290: }
291: #else
292: PetscViewerASCIIPrintf(viewer,"%G\n",xarray[i]);
293: #endif
294: }
295: /* receive and print messages */
296: for (j=1; j<size; j++) {
297: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,xin->comm,&status);
298: MPI_Get_count(&status,MPIU_SCALAR,&n);
299: if (format != PETSC_VIEWER_ASCII_COMMON) {
300: PetscViewerASCIIPrintf(viewer,"Process [%d]\n",j);
301: }
302: for (i=0; i<n; i++) {
303: if (format == PETSC_VIEWER_ASCII_INDEX) {
304: PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
305: }
306: #if defined(PETSC_USE_COMPLEX)
307: if (PetscImaginaryPart(values[i]) > 0.0) {
308: PetscViewerASCIIPrintf(viewer,"%G + %G i\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
309: } else if (PetscImaginaryPart(values[i]) < 0.0) {
310: PetscViewerASCIIPrintf(viewer,"%G - %G i\n",PetscRealPart(values[i]),-PetscImaginaryPart(values[i]));
311: } else {
312: PetscViewerASCIIPrintf(viewer,"%G\n",PetscRealPart(values[i]));
313: }
314: #else
315: PetscViewerASCIIPrintf(viewer,"%G\n",values[i]);
316: #endif
317: }
318: }
319: }
320: PetscFree(values);
321: } else {
322: /* send values */
323: MPI_Send(xarray,xin->map.n,MPIU_SCALAR,0,tag,xin->comm);
324: }
325: PetscViewerFlush(viewer);
326: VecRestoreArray(xin,&xarray);
327: return(0);
328: }
332: PetscErrorCode VecView_MPI_Binary(Vec xin,PetscViewer viewer)
333: {
335: PetscMPIInt rank,size,tag = ((PetscObject)viewer)->tag,n;
336: PetscInt len,work = xin->map.n,j;
337: int fdes;
338: MPI_Status status;
339: PetscScalar *values,*xarray;
340: FILE *file;
343: VecGetArray(xin,&xarray);
344: PetscViewerBinaryGetDescriptor(viewer,&fdes);
346: /* determine maximum message to arrive */
347: MPI_Comm_rank(xin->comm,&rank);
348: MPI_Reduce(&work,&len,1,MPIU_INT,MPI_MAX,0,xin->comm);
349: MPI_Comm_size(xin->comm,&size);
351: if (!rank) {
352: PetscInt cookie = VEC_FILE_COOKIE;
353: PetscBinaryWrite(fdes,&cookie,1,PETSC_INT,PETSC_FALSE);
354: PetscBinaryWrite(fdes,&xin->map.N,1,PETSC_INT,PETSC_FALSE);
355: PetscBinaryWrite(fdes,xarray,xin->map.n,PETSC_SCALAR,PETSC_FALSE);
357: PetscMalloc((len+1)*sizeof(PetscScalar),&values);
358: /* receive and print messages */
359: for (j=1; j<size; j++) {
360: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,xin->comm,&status);
361: MPI_Get_count(&status,MPIU_SCALAR,&n);
362: PetscBinaryWrite(fdes,values,n,PETSC_SCALAR,PETSC_FALSE);
363: }
364: PetscFree(values);
365: PetscViewerBinaryGetInfoPointer(viewer,&file);
366: if (file && xin->map.bs > 1) {
367: if (xin->prefix) {
368: PetscFPrintf(PETSC_COMM_SELF,file,"-%svecload_block_size %D\n",xin->prefix,xin->map.bs);
369: } else {
370: PetscFPrintf(PETSC_COMM_SELF,file,"-vecload_block_size %D\n",xin->map.bs);
371: }
372: }
373: } else {
374: /* send values */
375: MPI_Send(xarray,xin->map.n,MPIU_SCALAR,0,tag,xin->comm);
376: }
377: VecRestoreArray(xin,&xarray);
378: return(0);
379: }
383: PetscErrorCode VecView_MPI_Draw_LG(Vec xin,PetscViewer viewer)
384: {
385: PetscDraw draw;
386: PetscTruth isnull;
389: #if defined(PETSC_USE_64BIT_INDICES)
391: PetscViewerDrawGetDraw(viewer,0,&draw);
392: PetscDrawIsNull(draw,&isnull);
393: if (isnull) return(0);
394: SETERRQ(PETSC_ERR_SUP,"Not supported with 64 bit integers");
395: #else
396: PetscMPIInt size,rank;
397: int i,N = xin->map.N,*lens;
398: PetscReal *xx,*yy;
399: PetscDrawLG lg;
400: PetscScalar *xarray;
403: PetscViewerDrawGetDraw(viewer,0,&draw);
404: PetscDrawIsNull(draw,&isnull);
405: if (isnull) return(0);
407: VecGetArray(xin,&xarray);
408: PetscViewerDrawGetDrawLG(viewer,0,&lg);
409: PetscDrawCheckResizedWindow(draw);
410: MPI_Comm_rank(xin->comm,&rank);
411: MPI_Comm_size(xin->comm,&size);
412: if (!rank) {
413: PetscDrawLGReset(lg);
414: PetscMalloc(2*(N+1)*sizeof(PetscReal),&xx);
415: for (i=0; i<N; i++) {xx[i] = (PetscReal) i;}
416: yy = xx + N;
417: PetscMalloc(size*sizeof(PetscInt),&lens);
418: for (i=0; i<size; i++) {
419: lens[i] = xin->map.range[i+1] - xin->map.range[i];
420: }
421: #if !defined(PETSC_USE_COMPLEX)
422: MPI_Gatherv(xarray,xin->map.n,MPIU_REAL,yy,lens,xin->map.range,MPIU_REAL,0,xin->comm);
423: #else
424: {
425: PetscReal *xr;
426: PetscMalloc((xin->map.n+1)*sizeof(PetscReal),&xr);
427: for (i=0; i<xin->map.n; i++) {
428: xr[i] = PetscRealPart(xarray[i]);
429: }
430: MPI_Gatherv(xr,xin->map.n,MPIU_REAL,yy,lens,xin->map.range,MPIU_REAL,0,xin->comm);
431: PetscFree(xr);
432: }
433: #endif
434: PetscFree(lens);
435: PetscDrawLGAddPoints(lg,N,&xx,&yy);
436: PetscFree(xx);
437: } else {
438: #if !defined(PETSC_USE_COMPLEX)
439: MPI_Gatherv(xarray,xin->map.n,MPIU_REAL,0,0,0,MPIU_REAL,0,xin->comm);
440: #else
441: {
442: PetscReal *xr;
443: PetscMalloc((xin->map.n+1)*sizeof(PetscReal),&xr);
444: for (i=0; i<xin->map.n; i++) {
445: xr[i] = PetscRealPart(xarray[i]);
446: }
447: MPI_Gatherv(xr,xin->map.n,MPIU_REAL,0,0,0,MPIU_REAL,0,xin->comm);
448: PetscFree(xr);
449: }
450: #endif
451: }
452: PetscDrawLGDraw(lg);
453: PetscDrawSynchronizedFlush(draw);
454: VecRestoreArray(xin,&xarray);
455: #endif
456: return(0);
457: }
460: /* I am assuming this is Extern 'C' because it is dynamically loaded. If not, we can remove the DLLEXPORT tag */
463: PetscErrorCode PETSCVEC_DLLEXPORT VecView_MPI_Draw(Vec xin,PetscViewer viewer)
464: {
466: PetscMPIInt rank,size,tag = ((PetscObject)viewer)->tag;
467: PetscInt i,start,end;
468: MPI_Status status;
469: PetscReal coors[4],ymin,ymax,xmin,xmax,tmp;
470: PetscDraw draw;
471: PetscTruth isnull;
472: PetscDrawAxis axis;
473: PetscScalar *xarray;
476: PetscViewerDrawGetDraw(viewer,0,&draw);
477: PetscDrawIsNull(draw,&isnull); if (isnull) return(0);
479: VecGetArray(xin,&xarray);
480: PetscDrawCheckResizedWindow(draw);
481: xmin = 1.e20; xmax = -1.e20;
482: for (i=0; i<xin->map.n; i++) {
483: #if defined(PETSC_USE_COMPLEX)
484: if (PetscRealPart(xarray[i]) < xmin) xmin = PetscRealPart(xarray[i]);
485: if (PetscRealPart(xarray[i]) > xmax) xmax = PetscRealPart(xarray[i]);
486: #else
487: if (xarray[i] < xmin) xmin = xarray[i];
488: if (xarray[i] > xmax) xmax = xarray[i];
489: #endif
490: }
491: if (xmin + 1.e-10 > xmax) {
492: xmin -= 1.e-5;
493: xmax += 1.e-5;
494: }
495: MPI_Reduce(&xmin,&ymin,1,MPIU_REAL,MPI_MIN,0,xin->comm);
496: MPI_Reduce(&xmax,&ymax,1,MPIU_REAL,MPI_MAX,0,xin->comm);
497: MPI_Comm_size(xin->comm,&size);
498: MPI_Comm_rank(xin->comm,&rank);
499: PetscDrawAxisCreate(draw,&axis);
500: PetscLogObjectParent(draw,axis);
501: if (!rank) {
502: PetscDrawClear(draw);
503: PetscDrawFlush(draw);
504: PetscDrawAxisSetLimits(axis,0.0,(double)xin->map.N,ymin,ymax);
505: PetscDrawAxisDraw(axis);
506: PetscDrawGetCoordinates(draw,coors,coors+1,coors+2,coors+3);
507: }
508: PetscDrawAxisDestroy(axis);
509: MPI_Bcast(coors,4,MPIU_REAL,0,xin->comm);
510: if (rank) {PetscDrawSetCoordinates(draw,coors[0],coors[1],coors[2],coors[3]);}
511: /* draw local part of vector */
512: VecGetOwnershipRange(xin,&start,&end);
513: if (rank < size-1) { /*send value to right */
514: MPI_Send(&xarray[xin->map.n-1],1,MPIU_REAL,rank+1,tag,xin->comm);
515: }
516: for (i=1; i<xin->map.n; i++) {
517: #if !defined(PETSC_USE_COMPLEX)
518: PetscDrawLine(draw,(PetscReal)(i-1+start),xarray[i-1],(PetscReal)(i+start),
519: xarray[i],PETSC_DRAW_RED);
520: #else
521: PetscDrawLine(draw,(PetscReal)(i-1+start),PetscRealPart(xarray[i-1]),(PetscReal)(i+start),
522: PetscRealPart(xarray[i]),PETSC_DRAW_RED);
523: #endif
524: }
525: if (rank) { /* receive value from right */
526: MPI_Recv(&tmp,1,MPIU_REAL,rank-1,tag,xin->comm,&status);
527: #if !defined(PETSC_USE_COMPLEX)
528: PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,xarray[0],PETSC_DRAW_RED);
529: #else
530: PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,PetscRealPart(xarray[0]),PETSC_DRAW_RED);
531: #endif
532: }
533: PetscDrawSynchronizedFlush(draw);
534: PetscDrawPause(draw);
535: VecRestoreArray(xin,&xarray);
536: return(0);
537: }
540: #if defined(PETSC_USE_SOCKET_VIEWER)
543: PetscErrorCode VecView_MPI_Socket(Vec xin,PetscViewer viewer)
544: {
545: #if defined(PETSC_USE_64BIT_INDICES)
547: SETERRQ(PETSC_ERR_SUP,"Not supported with 64 bit integers");
548: #else
550: PetscMPIInt rank,size;
551: int i,N = xin->map.N,*lens;
552: PetscScalar *xx,*xarray;
555: VecGetArray(xin,&xarray);
556: MPI_Comm_rank(xin->comm,&rank);
557: MPI_Comm_size(xin->comm,&size);
558: if (!rank) {
559: PetscMalloc((N+1)*sizeof(PetscScalar),&xx);
560: PetscMalloc(size*sizeof(PetscInt),&lens);
561: for (i=0; i<size; i++) {
562: lens[i] = xin->map.range[i+1] - xin->map.range[i];
563: }
564: MPI_Gatherv(xarray,xin->map.n,MPIU_SCALAR,xx,lens,xin->map.range,MPIU_SCALAR,0,xin->comm);
565: PetscFree(lens);
566: PetscViewerSocketPutScalar(viewer,N,1,xx);
567: PetscFree(xx);
568: } else {
569: MPI_Gatherv(xarray,xin->map.n,MPIU_SCALAR,0,0,0,MPIU_SCALAR,0,xin->comm);
570: }
571: VecRestoreArray(xin,&xarray);
572: #endif
573: return(0);
574: }
575: #endif
577: #if defined(PETSC_HAVE_MATLAB)
580: PetscErrorCode VecView_MPI_Matlab(Vec xin,PetscViewer viewer)
581: {
583: PetscMPIInt rank,size,*lens;
584: PetscInt i,N = xin->map.N;
585: PetscScalar *xx,*xarray;
588: VecGetArray(xin,&xarray);
589: MPI_Comm_rank(xin->comm,&rank);
590: MPI_Comm_size(xin->comm,&size);
591: if (!rank) {
592: PetscMalloc((N+1)*sizeof(PetscScalar),&xx);
593: PetscMalloc(size*sizeof(PetscMPIInt),&lens);
594: for (i=0; i<size; i++) {
595: lens[i] = xin->map.range[i+1] - xin->map.range[i];
596: }
597: MPI_Gatherv(xarray,xin->map.n,MPIU_SCALAR,xx,lens,xin->map.range,MPIU_SCALAR,0,xin->comm);
598: PetscFree(lens);
600: PetscObjectName((PetscObject)xin);
601: PetscViewerMatlabPutArray(viewer,N,1,xx,xin->name);
603: PetscFree(xx);
604: } else {
605: MPI_Gatherv(xarray,xin->map.n,MPIU_SCALAR,0,0,0,MPIU_SCALAR,0,xin->comm);
606: }
607: VecRestoreArray(xin,&xarray);
608: return(0);
609: }
610: #endif
612: #if defined(PETSC_HAVE_PNETCDF)
615: PetscErrorCode VecView_MPI_Netcdf(Vec xin,PetscViewer v)
616: {
618: int n = xin->map.n,ncid,xdim,xdim_num=1,xin_id,xstart;
619: PetscScalar *xarray;
622: VecGetArray(xin,&xarray);
623: PetscViewerNetcdfGetID(v,&ncid);
624: if (ncid < 0) SETERRQ(PETSC_ERR_ORDER,"First call PetscViewerNetcdfOpen to create NetCDF dataset");
625: /* define dimensions */
626: ncmpi_def_dim(ncid,"PETSc_Vector_Global_Size",xin->map.N,&xdim);
627: /* define variables */
628: ncmpi_def_var(ncid,"PETSc_Vector_MPI",NC_DOUBLE,xdim_num,&xdim,&xin_id);
629: /* leave define mode */
630: ncmpi_enddef(ncid);
631: /* store the vector */
632: VecGetOwnershipRange(xin,&xstart,PETSC_NULL);
633: ncmpi_put_vara_double_all(ncid,xin_id,(const MPI_Offset*)&xstart,(const MPI_Offset*)&n,xarray);
634: VecRestoreArray(xin,&xarray);
635: return(0);
636: }
637: #endif
639: #if defined(PETSC_HAVE_HDF4)
642: PetscErrorCode VecView_MPI_HDF4_Ex(Vec X, PetscViewer viewer, int d, int *dims)
643: {
645: PetscMPIInt rank,size,tag = ((PetscObject)viewer)->tag;
646: int len, i, j, k, cur, bs, n, N;
647: MPI_Status status;
648: PetscScalar *x;
649: float *xlf, *xf;
653: bs = X->map.bs > 0 ? X->map.bs : 1;
654: N = X->map.N / bs;
655: n = X->map.n / bs;
657: /* For now, always convert to float */
658: PetscMalloc(N * sizeof(float), &xf);
659: PetscMalloc(n * sizeof(float), &xlf);
661: MPI_Comm_rank(X->comm, &rank);
662: MPI_Comm_size(X->comm, &size);
664: VecGetArray(X, &x);
666: for (k = 0; k < bs; k++) {
667: for (i = 0; i < n; i++) {
668: xlf[i] = (float) x[i*bs + k];
669: }
670: if (!rank) {
671: cur = 0;
672: PetscMemcpy(xf + cur, xlf, n * sizeof(float));
673: cur += n;
674: for (j = 1; j < size; j++) {
675: MPI_Recv(xf + cur, N - cur, MPI_FLOAT, j, tag, X->comm,&status);
676: MPI_Get_count(&status, MPI_FLOAT, &len);cur += len;
677: }
678: if (cur != N) {
679: SETERRQ2(PETSC_ERR_PLIB, "? %D %D", cur, N);
680: }
681: PetscViewerHDF4WriteSDS(viewer, xf, 2, dims, bs);
682: } else {
683: MPI_Send(xlf, n, MPI_FLOAT, 0, tag, X->comm);
684: }
685: }
686: VecRestoreArray(X, &x);
687: PetscFree(xlf);
688: PetscFree(xf);
689: return(0);
690: }
691: #endif
693: #if defined(PETSC_HAVE_HDF4)
696: PetscErrorCode VecView_MPI_HDF4(Vec xin,PetscViewer viewer)
697: {
699: PetscErrorCode bs, dims[1];
701: bs = xin->map.bs > 0 ? xin->map.bs : 1;
702: dims[0] = xin->map.N / bs;
703: VecView_MPI_HDF4_Ex(xin, viewer, 1, dims);
704: return(0);
705: }
706: #endif
710: PetscErrorCode VecView_MPI(Vec xin,PetscViewer viewer)
711: {
713: PetscTruth iascii,issocket,isbinary,isdraw;
714: #if defined(PETSC_HAVE_MATHEMATICA)
715: PetscTruth ismathematica;
716: #endif
717: #if defined(PETSC_HAVE_NETCDF)
718: PetscTruth isnetcdf;
719: #endif
720: #if defined(PETSC_HAVE_HDF4)
721: PetscTruth ishdf4;
722: #endif
723: #if defined(PETSC_HAVE_MATLAB) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_SINGLE) && !defined(PETSC_USE_MAT_SINGLE)
724: PetscTruth ismatlab;
725: #endif
728: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
729: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_SOCKET,&issocket);
730: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
731: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);
732: #if defined(PETSC_HAVE_MATHEMATICA)
733: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_MATHEMATICA,&ismathematica);
734: #endif
735: #if defined(PETSC_HAVE_NETCDF)
736: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_NETCDF,&isnetcdf);
737: #endif
738: #if defined(PETSC_HAVE_HDF4)
739: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_HDF4,&ishdf4);
740: #endif
741: #if defined(PETSC_HAVE_MATLAB) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_SINGLE) && !defined(PETSC_USE_MAT_SINGLE)
742: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_MATLAB,&ismatlab);
743: #endif
744: if (iascii){
745: VecView_MPI_ASCII(xin,viewer);
746: #if defined(PETSC_USE_SOCKET_VIEWER)
747: } else if (issocket) {
748: VecView_MPI_Socket(xin,viewer);
749: #endif
750: } else if (isbinary) {
751: VecView_MPI_Binary(xin,viewer);
752: } else if (isdraw) {
753: PetscViewerFormat format;
755: PetscViewerGetFormat(viewer,&format);
756: if (format == PETSC_VIEWER_DRAW_LG) {
757: VecView_MPI_Draw_LG(xin,viewer);
758: } else {
759: VecView_MPI_Draw(xin,viewer);
760: }
761: #if defined(PETSC_HAVE_MATHEMATICA)
762: } else if (ismathematica) {
763: PetscViewerMathematicaPutVector(viewer,xin);
764: #endif
765: #if defined(PETSC_HAVE_NETCDF)
766: } else if (isnetcdf) {
767: VecView_MPI_Netcdf(xin,viewer);
768: #endif
769: #if defined(PETSC_HAVE_HDF4)
770: } else if (ishdf4) {
771: VecView_MPI_HDF4(xin,viewer);
772: #endif
773: #if defined(PETSC_HAVE_MATLAB) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_SINGLE) && !defined(PETSC_USE_MAT_SINGLE)
774: } else if (ismatlab) {
775: VecView_MPI_Matlab(xin,viewer);
776: #endif
777: } else {
778: SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for this object",((PetscObject)viewer)->type_name);
779: }
780: return(0);
781: }
785: PetscErrorCode VecGetSize_MPI(Vec xin,PetscInt *N)
786: {
788: *N = xin->map.N;
789: return(0);
790: }
794: PetscErrorCode VecGetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],PetscScalar y[])
795: {
796: Vec_MPI *x = (Vec_MPI *)xin->data;
797: PetscScalar *xx = x->array;
798: PetscInt i,tmp,start = xin->map.range[xin->stash.rank];
801: for (i=0; i<ni; i++) {
802: tmp = ix[i] - start;
803: #if defined(PETSC_USE_DEBUG)
804: if (tmp < 0 || tmp >= xin->map.n) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Can only get local values, trying %D",ix[i]);
805: #endif
806: y[i] = xx[tmp];
807: }
808: return(0);
809: }
813: PetscErrorCode VecSetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar y[],InsertMode addv)
814: {
816: PetscMPIInt rank = xin->stash.rank;
817: PetscInt *owners = xin->map.range,start = owners[rank];
818: PetscInt end = owners[rank+1],i,row;
819: PetscScalar *xx;
822: #if defined(PETSC_USE_DEBUG)
823: if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) {
824: SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
825: } else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) {
826: SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
827: }
828: #endif
829: VecGetArray(xin,&xx);
830: xin->stash.insertmode = addv;
832: if (addv == INSERT_VALUES) {
833: for (i=0; i<ni; i++) {
834: if ((row = ix[i]) >= start && row < end) {
835: xx[row-start] = y[i];
836: } else if (!xin->stash.donotstash) {
837: if (ix[i] < 0) continue;
838: #if defined(PETSC_USE_DEBUG)
839: if (ix[i] >= xin->map.N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map.N);
840: #endif
841: VecStashValue_Private(&xin->stash,row,y[i]);
842: }
843: }
844: } else {
845: for (i=0; i<ni; i++) {
846: if ((row = ix[i]) >= start && row < end) {
847: xx[row-start] += y[i];
848: } else if (!xin->stash.donotstash) {
849: if (ix[i] < 0) continue;
850: #if defined(PETSC_USE_DEBUG)
851: if (ix[i] > xin->map.N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map.N);
852: #endif
853: VecStashValue_Private(&xin->stash,row,y[i]);
854: }
855: }
856: }
857: VecRestoreArray(xin,&xx);
858: return(0);
859: }
863: PetscErrorCode VecSetValuesBlocked_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar yin[],InsertMode addv)
864: {
865: PetscMPIInt rank = xin->stash.rank;
866: PetscInt *owners = xin->map.range,start = owners[rank];
868: PetscInt end = owners[rank+1],i,row,bs = xin->map.bs,j;
869: PetscScalar *xx,*y = (PetscScalar*)yin;
872: VecGetArray(xin,&xx);
873: #if defined(PETSC_USE_DEBUG)
874: if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) {
875: SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
876: }
877: else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) {
878: SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
879: }
880: #endif
881: xin->stash.insertmode = addv;
883: if (addv == INSERT_VALUES) {
884: for (i=0; i<ni; i++) {
885: if ((row = bs*ix[i]) >= start && row < end) {
886: for (j=0; j<bs; j++) {
887: xx[row-start+j] = y[j];
888: }
889: } else if (!xin->stash.donotstash) {
890: if (ix[i] < 0) continue;
891: #if defined(PETSC_USE_DEBUG)
892: if (ix[i] >= xin->map.N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map.N);
893: #endif
894: VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
895: }
896: y += bs;
897: }
898: } else {
899: for (i=0; i<ni; i++) {
900: if ((row = bs*ix[i]) >= start && row < end) {
901: for (j=0; j<bs; j++) {
902: xx[row-start+j] += y[j];
903: }
904: } else if (!xin->stash.donotstash) {
905: if (ix[i] < 0) continue;
906: #if defined(PETSC_USE_DEBUG)
907: if (ix[i] > xin->map.N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map.N);
908: #endif
909: VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
910: }
911: y += bs;
912: }
913: }
914: VecRestoreArray(xin,&xx);
915: return(0);
916: }
918: /*
919: Since nsends or nreceives may be zero we add 1 in certain mallocs
920: to make sure we never malloc an empty one.
921: */
924: PetscErrorCode VecAssemblyBegin_MPI(Vec xin)
925: {
927: PetscInt *owners = xin->map.range,*bowners,i,bs,nstash,reallocs;
928: PetscMPIInt size;
929: InsertMode addv;
930: MPI_Comm comm = xin->comm;
933: if (xin->stash.donotstash) {
934: return(0);
935: }
937: MPI_Allreduce(&xin->stash.insertmode,&addv,1,MPI_INT,MPI_BOR,comm);
938: if (addv == (ADD_VALUES|INSERT_VALUES)) {
939: SETERRQ(PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
940: }
941: xin->stash.insertmode = addv; /* in case this processor had no cache */
942:
943: bs = xin->map.bs;
944: MPI_Comm_size(xin->comm,&size);
945: if (!xin->bstash.bowners && xin->map.bs != -1) {
946: PetscMalloc((size+1)*sizeof(PetscInt),&bowners);
947: for (i=0; i<size+1; i++){ bowners[i] = owners[i]/bs;}
948: xin->bstash.bowners = bowners;
949: } else {
950: bowners = xin->bstash.bowners;
951: }
952: VecStashScatterBegin_Private(&xin->stash,owners);
953: VecStashScatterBegin_Private(&xin->bstash,bowners);
954: VecStashGetInfo_Private(&xin->stash,&nstash,&reallocs);
955: PetscInfo2(0,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);
956: VecStashGetInfo_Private(&xin->bstash,&nstash,&reallocs);
957: PetscInfo2(0,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);
959: return(0);
960: }
964: PetscErrorCode VecAssemblyEnd_MPI(Vec vec)
965: {
967: PetscInt base,i,j,*row,flg,bs;
968: PetscMPIInt n;
969: PetscScalar *val,*vv,*array,*xarray;
972: if (!vec->stash.donotstash) {
973: VecGetArray(vec,&xarray);
974: base = vec->map.range[vec->stash.rank];
975: bs = vec->map.bs;
977: /* Process the stash */
978: while (1) {
979: VecStashScatterGetMesg_Private(&vec->stash,&n,&row,&val,&flg);
980: if (!flg) break;
981: if (vec->stash.insertmode == ADD_VALUES) {
982: for (i=0; i<n; i++) { xarray[row[i] - base] += val[i]; }
983: } else if (vec->stash.insertmode == INSERT_VALUES) {
984: for (i=0; i<n; i++) { xarray[row[i] - base] = val[i]; }
985: } else {
986: SETERRQ(PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
987: }
988: }
989: VecStashScatterEnd_Private(&vec->stash);
991: /* now process the block-stash */
992: while (1) {
993: VecStashScatterGetMesg_Private(&vec->bstash,&n,&row,&val,&flg);
994: if (!flg) break;
995: for (i=0; i<n; i++) {
996: array = xarray+row[i]*bs-base;
997: vv = val+i*bs;
998: if (vec->stash.insertmode == ADD_VALUES) {
999: for (j=0; j<bs; j++) { array[j] += vv[j];}
1000: } else if (vec->stash.insertmode == INSERT_VALUES) {
1001: for (j=0; j<bs; j++) { array[j] = vv[j]; }
1002: } else {
1003: SETERRQ(PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
1004: }
1005: }
1006: }
1007: VecStashScatterEnd_Private(&vec->bstash);
1008: VecRestoreArray(vec,&xarray);
1009: }
1010: vec->stash.insertmode = NOT_SET_VALUES;
1011: return(0);
1012: }