Actual source code: pbvec.c

  1: #define PETSCVEC_DLL
  2: /*
  3:    This file contains routines for Parallel vector operations.
  4:  */
 5:  #include src/vec/vec/impls/mpi/pvecimpl.h

  7: /*
  8:        Note this code is very similar to VecPublish_Seq()
  9: */
 12: static PetscErrorCode VecPublish_MPI(PetscObject obj)
 13: {
 15:   return(0);
 16: }

 20: PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
 21: {
 22:   PetscScalar    sum,work;

 26:   VecDot_Seq(xin,yin,&work);
 27:   MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);
 28:   *z = sum;
 29:   return(0);
 30: }

 34: PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
 35: {
 36:   PetscScalar    sum,work;

 40:   VecTDot_Seq(xin,yin,&work);
 41:   MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);
 42:   *z   = sum;
 43:   return(0);
 44: }

 48: PetscErrorCode VecSetOption_MPI(Vec v,VecOption op)
 49: {
 51:   if (op == VEC_IGNORE_OFF_PROC_ENTRIES) {
 52:     v->stash.donotstash = PETSC_TRUE;
 53:   } else if (op == VEC_TREAT_OFF_PROC_ENTRIES) {
 54:     v->stash.donotstash = PETSC_FALSE;
 55:   }
 56:   return(0);
 57: }
 58: 
 59: EXTERN PetscErrorCode VecDuplicate_MPI(Vec,Vec *);
 61: EXTERN PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);

 66: PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
 67: {
 69:   Vec_MPI        *v = (Vec_MPI *)vin->data;

 72:   if (v->unplacedarray) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
 73:   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
 74:   v->array = (PetscScalar *)a;
 75:   if (v->localrep) {
 76:     VecPlaceArray(v->localrep,a);
 77:   }
 78:   return(0);
 79: }

 81: EXTERN PetscErrorCode VecLoad_Binary(PetscViewer, VecType, Vec*);
 82: EXTERN PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []);

 84: static struct _VecOps DvOps = { VecDuplicate_MPI,
 85:             VecDuplicateVecs_Default,
 86:             VecDestroyVecs_Default,
 87:             VecDot_MPI,
 88:             VecMDot_MPI,
 89:             VecNorm_MPI,
 90:             VecTDot_MPI,
 91:             VecMTDot_MPI,
 92:             VecScale_Seq,
 93:             VecCopy_Seq,
 94:             VecSet_Seq,
 95:             VecSwap_Seq,
 96:             VecAXPY_Seq,
 97:             VecAXPBY_Seq,
 98:             VecMAXPY_Seq,
 99:             VecAYPX_Seq,
100:             VecWAXPY_Seq,
101:             VecPointwiseMult_Seq,
102:             VecPointwiseDivide_Seq,
103:             VecSetValues_MPI,
104:             VecAssemblyBegin_MPI,
105:             VecAssemblyEnd_MPI,
106:             VecGetArray_Seq,
107:             VecGetSize_MPI,
108:             VecGetSize_Seq,
109:             VecRestoreArray_Seq,
110:             VecMax_MPI,
111:             VecMin_MPI,
112:             VecSetRandom_Seq,
113:             VecSetOption_MPI,
114:             VecSetValuesBlocked_MPI,
115:             VecDestroy_MPI,
116:             VecView_MPI,
117:             VecPlaceArray_MPI,
118:             VecReplaceArray_Seq,
119:             VecDot_Seq,
120:             VecTDot_Seq,
121:             VecNorm_Seq,
122:             VecMDot_Seq,
123:             VecMTDot_Seq,
124:             VecLoadIntoVector_Default,
125:             VecReciprocal_Default,
126:             0, /* VecViewNative... */
127:             VecConjugate_Seq,
128:             0,
129:             0,
130:             VecResetArray_Seq,
131:             0,
132:             VecMaxPointwiseDivide_Seq,
133:             VecLoad_Binary,
134:             VecPointwiseMax_Seq,
135:             VecPointwiseMaxAbs_Seq,
136:             VecPointwiseMin_Seq,
137:             VecGetValues_MPI};

141: /*
142:     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
143:     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
144:     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
145: */
146: PetscErrorCode VecCreate_MPI_Private(Vec v,PetscInt nghost,const PetscScalar array[])
147: {
148:   Vec_MPI        *s;


153:   v->bops->publish   = VecPublish_MPI;
154:   PetscLogObjectMemory(v,sizeof(Vec_MPI) + (v->map.n+nghost+1)*sizeof(PetscScalar));
155:   PetscNew(Vec_MPI,&s);
156:   PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));
157:   v->data        = (void*)s;
158:   s->nghost      = nghost;
159:   v->mapping     = 0;
160:   v->bmapping    = 0;
161:   v->petscnative = PETSC_TRUE;

163:   if (v->map.bs == -1) v->map.bs = 1;
164:   PetscMapInitialize(v->comm,&v->map);
165:   if (array) {
166:     s->array           = (PetscScalar *)array;
167:     s->array_allocated = 0;
168:   } else {
169:     PetscInt n         = v->map.n+nghost;
170:     PetscMalloc(n*sizeof(PetscScalar),&s->array);
171:     s->array_allocated = s->array;
172:     PetscMemzero(s->array,v->map.n*sizeof(PetscScalar));
173:   }

175:   /* By default parallel vectors do not have local representation */
176:   s->localrep    = 0;
177:   s->localupdate = 0;

179:   v->stash.insertmode  = NOT_SET_VALUES;
180:   /* create the stashes. The block-size for bstash is set later when 
181:      VecSetValuesBlocked is called.
182:   */
183:   VecStashCreate_Private(v->comm,1,&v->stash);
184:   VecStashCreate_Private(v->comm,v->map.bs,&v->bstash);
185: 
186: #if defined(PETSC_HAVE_MATLAB)
187:   PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);
188:   PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);
189: #endif
190:   PetscObjectChangeTypeName((PetscObject)v,VECMPI);
191:   PetscPublishAll(v);
192:   return(0);
193: }

195: /*MC
196:    VECMPI - VECMPI = "mpi" - The basic parallel vector

198:    Options Database Keys:
199: . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()

201:   Level: beginner

203: .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
204: M*/

209: PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_MPI(Vec vv)
210: {

214:   VecCreate_MPI_Private(vv,0,0);
215:   return(0);
216: }

221: /*@C
222:    VecCreateMPIWithArray - Creates a parallel, array-style vector,
223:    where the user provides the array space to store the vector values.

225:    Collective on MPI_Comm

227:    Input Parameters:
228: +  comm  - the MPI communicator to use
229: .  n     - local vector length, cannot be PETSC_DECIDE
230: .  N     - global vector length (or PETSC_DECIDE to have calculated)
231: -  array - the user provided array to store the vector values

233:    Output Parameter:
234: .  vv - the vector
235:  
236:    Notes:
237:    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
238:    same type as an existing vector.

240:    If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
241:    at a later stage to SET the array for storing the vector values.

243:    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
244:    The user should not free the array until the vector is destroyed.

246:    Level: intermediate

248:    Concepts: vectors^creating with array

250: .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
251:           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()

253: @*/
254: PetscErrorCode PETSCVEC_DLLEXPORT VecCreateMPIWithArray(MPI_Comm comm,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
255: {

259:   if (n == PETSC_DECIDE) {
260:     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
261:   }
262:   PetscSplitOwnership(comm,&n,&N);
263:   VecCreate(comm,vv);
264:   VecSetSizes(*vv,n,N);
265:   VecCreate_MPI_Private(*vv,0,array);
266:   return(0);
267: }

271: /*@
272:     VecGhostGetLocalForm - Obtains the local ghosted representation of 
273:     a parallel vector created with VecCreateGhost().

275:     Not Collective

277:     Input Parameter:
278: .   g - the global vector. Vector must be have been obtained with either
279:         VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq().

281:     Output Parameter:
282: .   l - the local (ghosted) representation

284:     Notes:
285:     This routine does not actually update the ghost values, but rather it
286:     returns a sequential vector that includes the locations for the ghost
287:     values and their current values. The returned vector and the original
288:     vector passed in share the same array that contains the actual vector data.

290:     One should call VecGhostRestoreLocalForm() or VecDestroy() once one is
291:     finished using the object.

293:     Level: advanced

295:    Concepts: vectors^ghost point access

297: .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray()

299: @*/
300: PetscErrorCode PETSCVEC_DLLEXPORT VecGhostGetLocalForm(Vec g,Vec *l)
301: {
303:   PetscTruth     isseq,ismpi;


309:   PetscTypeCompare((PetscObject)g,VECSEQ,&isseq);
310:   PetscTypeCompare((PetscObject)g,VECMPI,&ismpi);
311:   if (ismpi) {
312:     Vec_MPI *v  = (Vec_MPI*)g->data;
313:     if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
314:     *l = v->localrep;
315:   } else if (isseq) {
316:     *l = g;
317:   } else {
318:     SETERRQ1(PETSC_ERR_ARG_WRONG,"Vector type %s does not have local representation",g->type_name);
319:   }
320:   PetscObjectReference((PetscObject)*l);
321:   return(0);
322: }

326: /*@
327:     VecGhostRestoreLocalForm - Restores the local ghosted representation of 
328:     a parallel vector obtained with VecGhostGetLocalForm().

330:     Not Collective

332:     Input Parameter:
333: +   g - the global vector
334: -   l - the local (ghosted) representation

336:     Notes:
337:     This routine does not actually update the ghost values, but rather it
338:     returns a sequential vector that includes the locations for the ghost values
339:     and their current values.

341:     Level: advanced

343: .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray()
344: @*/
345: PetscErrorCode PETSCVEC_DLLEXPORT VecGhostRestoreLocalForm(Vec g,Vec *l)
346: {
348:   PetscObjectDereference((PetscObject)*l);
349:   return(0);
350: }

354: /*@
355:    VecGhostUpdateBegin - Begins the vector scatter to update the vector from
356:    local representation to global or global representation to local.

358:    Collective on Vec

360:    Input Parameters:
361: +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
362: .  insertmode - one of ADD_VALUES or INSERT_VALUES
363: -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE

365:    Notes:
366:    Use the following to update the ghost regions with correct values from the owning process
367: .vb
368:        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
369:        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
370: .ve

372:    Use the following to accumulate the ghost region values onto the owning processors
373: .vb
374:        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
375:        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
376: .ve

378:    To accumulate the ghost region values onto the owning processors and then update
379:    the ghost regions correctly, call the later followed by the former, i.e.,
380: .vb
381:        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
382:        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
383:        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
384:        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
385: .ve

387:    Level: advanced

389: .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(),
390:           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()

392: @*/
393: PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode)
394: {
395:   Vec_MPI        *v;


401:   v  = (Vec_MPI*)g->data;
402:   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
403:   if (!v->localupdate) return(0);
404: 
405:   if (scattermode == SCATTER_REVERSE) {
406:     VecScatterBegin(v->localrep,g,insertmode,scattermode,v->localupdate);
407:   } else {
408:     VecScatterBegin(g,v->localrep,insertmode,scattermode,v->localupdate);
409:   }
410:   return(0);
411: }

415: /*@
416:    VecGhostUpdateEnd - End the vector scatter to update the vector from
417:    local representation to global or global representation to local.

419:    Collective on Vec

421:    Input Parameters:
422: +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
423: .  insertmode - one of ADD_VALUES or INSERT_VALUES
424: -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE

426:    Notes:

428:    Use the following to update the ghost regions with correct values from the owning process
429: .vb
430:        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
431:        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
432: .ve

434:    Use the following to accumulate the ghost region values onto the owning processors
435: .vb
436:        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
437:        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
438: .ve

440:    To accumulate the ghost region values onto the owning processors and then update
441:    the ghost regions correctly, call the later followed by the former, i.e.,
442: .vb
443:        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
444:        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
445:        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
446:        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
447: .ve

449:    Level: advanced

451: .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(),
452:           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()

454: @*/
455: PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode)
456: {
457:   Vec_MPI        *v;


463:   v  = (Vec_MPI*)g->data;
464:   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
465:   if (!v->localupdate) return(0);

467:   if (scattermode == SCATTER_REVERSE) {
468:     VecScatterEnd(v->localrep,g,insertmode,scattermode,v->localupdate);
469:   } else {
470:     VecScatterEnd(g,v->localrep,insertmode,scattermode,v->localupdate);
471:   }
472:   return(0);
473: }

477: /*@C
478:    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
479:    the caller allocates the array space.

481:    Collective on MPI_Comm

483:    Input Parameters:
484: +  comm - the MPI communicator to use
485: .  n - local vector length 
486: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
487: .  nghost - number of local ghost points
488: .  ghosts - global indices of ghost points (or PETSC_NULL if not needed)
489: -  array - the space to store the vector values (as long as n + nghost)

491:    Output Parameter:
492: .  vv - the global vector representation (without ghost points as part of vector)
493:  
494:    Notes:
495:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
496:    of the vector.

498:    Level: advanced

500:    Concepts: vectors^creating with array
501:    Concepts: vectors^ghosted

503: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 
504:           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
505:           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()

507: @*/
508: PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
509: {
511:   Vec_MPI        *w;
512:   PetscScalar    *larray;
513:   IS             from,to;

516:   *vv = 0;

518:   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
519:   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
520:   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
521:   PetscSplitOwnership(comm,&n,&N);
522:   /* Create global representation */
523:   VecCreate(comm,vv);
524:   VecSetSizes(*vv,n,N);
525:   VecCreate_MPI_Private(*vv,nghost,array);
526:   w    = (Vec_MPI *)(*vv)->data;
527:   /* Create local representation */
528:   VecGetArray(*vv,&larray);
529:   VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);
530:   PetscLogObjectParent(*vv,w->localrep);
531:   VecRestoreArray(*vv,&larray);

533:   /*
534:        Create scatter context for scattering (updating) ghost values 
535:   */
536:   ISCreateGeneral(comm,nghost,ghosts,&from);
537:   ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);
538:   VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
539:   PetscLogObjectParent(*vv,w->localupdate);
540:   ISDestroy(to);
541:   ISDestroy(from);

543:   return(0);
544: }

548: /*@
549:    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.

551:    Collective on MPI_Comm

553:    Input Parameters:
554: +  comm - the MPI communicator to use
555: .  n - local vector length 
556: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
557: .  nghost - number of local ghost points
558: -  ghosts - global indices of ghost points

560:    Output Parameter:
561: .  vv - the global vector representation (without ghost points as part of vector)
562:  
563:    Notes:
564:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
565:    of the vector.

567:    Level: advanced

569:    Concepts: vectors^ghosted

571: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
572:           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
573:           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
574:           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()

576: @*/
577: PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
578: {

582:   VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);
583:   return(0);
584: }

588: PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
589: {
591:   Vec_MPI        *vw,*w = (Vec_MPI *)win->data;
592:   PetscScalar    *array;

595:   VecCreate(win->comm,v);
596:   VecSetSizes(*v,win->map.n,win->map.N);
597:   VecCreate_MPI_Private(*v,w->nghost,0);
598:   vw   = (Vec_MPI *)(*v)->data;
599:   PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));

601:   /* save local representation of the parallel vector (and scatter) if it exists */
602:   if (w->localrep) {
603:     VecGetArray(*v,&array);
604:     VecCreateSeqWithArray(PETSC_COMM_SELF,win->map.n+w->nghost,array,&vw->localrep);
605:     PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));
606:     VecRestoreArray(*v,&array);
607:     PetscLogObjectParent(*v,vw->localrep);
608:     vw->localupdate = w->localupdate;
609:     if (vw->localupdate) {
610:       PetscObjectReference((PetscObject)vw->localupdate);
611:     }
612:   }

614:   /* New vector should inherit stashing property of parent */
615:   (*v)->stash.donotstash = win->stash.donotstash;
616: 
617:   PetscOListDuplicate(win->olist,&(*v)->olist);
618:   PetscFListDuplicate(win->qlist,&(*v)->qlist);
619:   if (win->mapping) {
620:     (*v)->mapping = win->mapping;
621:     PetscObjectReference((PetscObject)win->mapping);
622:   }
623:   if (win->bmapping) {
624:     (*v)->bmapping = win->bmapping;
625:     PetscObjectReference((PetscObject)win->bmapping);
626:   }
627:   (*v)->map.bs        = win->map.bs;
628:   (*v)->bstash.bs = win->bstash.bs;

630:   return(0);
631: }

633: /* ------------------------------------------------------------------------------------------*/
636: /*@C
637:    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
638:    the caller allocates the array space. Indices in the ghost region are based on blocks.

640:    Collective on MPI_Comm

642:    Input Parameters:
643: +  comm - the MPI communicator to use
644: .  bs - block size
645: .  n - local vector length 
646: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
647: .  nghost - number of local ghost blocks
648: .  ghosts - global indices of ghost blocks (or PETSC_NULL if not needed)
649: -  array - the space to store the vector values (as long as n + nghost*bs)

651:    Output Parameter:
652: .  vv - the global vector representation (without ghost points as part of vector)
653:  
654:    Notes:
655:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
656:    of the vector.

658:    n is the local vector size (total local size not the number of blocks) while nghost
659:    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
660:    portion is bs*nghost

662:    Level: advanced

664:    Concepts: vectors^creating ghosted
665:    Concepts: vectors^creating with array

667: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 
668:           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
669:           VecCreateGhostWithArray(), VecCreateGhostBlocked()

671: @*/
672: PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
673: {
675:   Vec_MPI        *w;
676:   PetscScalar    *larray;
677:   IS             from,to;

680:   *vv = 0;

682:   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
683:   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
684:   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
685:   PetscSplitOwnership(comm,&n,&N);
686:   /* Create global representation */
687:   VecCreate(comm,vv);
688:   VecSetSizes(*vv,n,N);
689:   VecCreate_MPI_Private(*vv,nghost*bs,array);
690:   VecSetBlockSize(*vv,bs);
691:   w    = (Vec_MPI *)(*vv)->data;
692:   /* Create local representation */
693:   VecGetArray(*vv,&larray);
694:   VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);
695:   VecSetBlockSize(w->localrep,bs);
696:   PetscLogObjectParent(*vv,w->localrep);
697:   VecRestoreArray(*vv,&larray);

699:   /*
700:        Create scatter context for scattering (updating) ghost values 
701:   */
702:   ISCreateBlock(comm,bs,nghost,ghosts,&from);
703:   ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);
704:   VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
705:   PetscLogObjectParent(*vv,w->localupdate);
706:   ISDestroy(to);
707:   ISDestroy(from);

709:   return(0);
710: }

714: /*@
715:    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
716:         The indicing of the ghost points is done with blocks.

718:    Collective on MPI_Comm

720:    Input Parameters:
721: +  comm - the MPI communicator to use
722: .  bs - the block size
723: .  n - local vector length 
724: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
725: .  nghost - number of local ghost blocks
726: -  ghosts - global indices of ghost blocks

728:    Output Parameter:
729: .  vv - the global vector representation (without ghost points as part of vector)
730:  
731:    Notes:
732:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
733:    of the vector.

735:    n is the local vector size (total local size not the number of blocks) while nghost
736:    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
737:    portion is bs*nghost

739:    Level: advanced

741:    Concepts: vectors^ghosted

743: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
744:           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
745:           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()

747: @*/
748: PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
749: {

753:   VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);
754:   return(0);
755: }

757: /*
758:     These introduce a ghosted vector where the ghosting is determined by the call to 
759:   VecSetLocalToGlobalMapping()
760: */

764: PetscErrorCode VecSetLocalToGlobalMapping_FETI(Vec vv,ISLocalToGlobalMapping map)
765: {
767:   Vec_MPI        *v = (Vec_MPI *)vv->data;

770:   v->nghost = map->n - vv->map.n;

772:   /* we need to make longer the array space that was allocated when the vector was created */
773:   PetscFree(v->array_allocated);
774:   PetscMalloc(map->n*sizeof(PetscScalar),&v->array_allocated);
775:   v->array = v->array_allocated;
776: 
777:   /* Create local representation */
778:   VecCreateSeqWithArray(PETSC_COMM_SELF,map->n,v->array,&v->localrep);
779:   PetscLogObjectParent(vv,v->localrep);
780:   return(0);
781: }


786: PetscErrorCode VecSetValuesLocal_FETI(Vec vv,PetscInt n,const PetscInt *ix,const PetscScalar *values,InsertMode mode)
787: {
789:   Vec_MPI        *v = (Vec_MPI *)vv->data;

792:   VecSetValues(v->localrep,n,ix,values,mode);
793:   return(0);
794: }

799: PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_FETI(Vec vv)
800: {

804:   VecSetType(vv,VECMPI);
805: 
806:   /* overwrite the functions to handle setting values locally */
807:   vv->ops->setlocaltoglobalmapping = VecSetLocalToGlobalMapping_FETI;
808:   vv->ops->setvalueslocal          = VecSetValuesLocal_FETI;
809:   vv->ops->assemblybegin           = 0;
810:   vv->ops->assemblyend             = 0;
811:   vv->ops->setvaluesblocked        = 0;
812:   vv->ops->setvaluesblocked        = 0;

814:   return(0);
815: }