Actual source code: partition.c

  1: #define PETSCMAT_DLL

 3:  #include private/matimpl.h

  5: /* Logging support */
  6: PetscCookie  MAT_PARTITIONING_COOKIE;

  8: /*
  9:    Simplest partitioning, keeps the current partitioning.
 10: */
 13: static PetscErrorCode MatPartitioningApply_Current(MatPartitioning part,IS *partitioning)
 14: {
 16:   PetscInt       m;
 17:   PetscMPIInt    rank,size;

 20:   MPI_Comm_size(((PetscObject)part)->comm,&size);
 21:   if (part->n != size) {
 22:     SETERRQ(PETSC_ERR_SUP,"This is the DEFAULT NO-OP partitioner, it currently only supports one domain per processor\nuse -matpartitioning_type parmetis or chaco or scotch for more than one subdomain per processor");
 23:   }
 24:   MPI_Comm_rank(((PetscObject)part)->comm,&rank);

 26:   MatGetLocalSize(part->adj,&m,PETSC_NULL);
 27:   ISCreateStride(((PetscObject)part)->comm,m,rank,0,partitioning);
 28:   return(0);
 29: }

 33: static PetscErrorCode MatPartitioningApply_Square(MatPartitioning part,IS *partitioning)
 34: {
 36:   PetscInt       cell,n,N,p,rstart,rend,*color;
 37:   PetscMPIInt    size;

 40:   MPI_Comm_size(((PetscObject)part)->comm,&size);
 41:   if (part->n != size) {
 42:     SETERRQ(PETSC_ERR_SUP,"Currently only supports one domain per processor");
 43:   }
 44:   p = (PetscInt)sqrt((double)part->n);
 45:   if (p*p != part->n) {
 46:     SETERRQ(PETSC_ERR_SUP,"Square partitioning requires \"perfect square\" number of domains");
 47:   }
 48:   MatGetSize(part->adj,&N,PETSC_NULL);
 49:   n = (PetscInt)sqrt((double)N);
 50:   if (n*n != N) {  /* This condition is NECESSARY, but NOT SUFFICIENT in order to the domain be square */
 51:     SETERRQ(PETSC_ERR_SUP,"Square partitioning requires square domain");
 52:   }
 53:   if (n%p != 0) {
 54:     SETERRQ(PETSC_ERR_SUP,"Square partitioning requires p to divide n");
 55:   }
 56:   MatGetOwnershipRange(part->adj,&rstart,&rend);
 57:   PetscMalloc((rend-rstart)*sizeof(PetscInt),&color);
 58:   /* for (int cell=rstart; cell<rend; cell++) { color[cell-rstart] = ((cell%n) < (n/2)) + 2 * ((cell/n) < (n/2)); } */
 59:   for (cell=rstart; cell<rend; cell++) {
 60:     color[cell-rstart] = ((cell%n) / (n/p)) + p * ((cell/n) / (n/p));
 61:   }
 62:   ISCreateGeneral(((PetscObject)part)->comm,rend-rstart,color,partitioning);
 63:   PetscFree(color);

 65:   return(0);
 66: }

 71: PetscErrorCode  MatPartitioningCreate_Current(MatPartitioning part)
 72: {
 74:   part->ops->apply   = MatPartitioningApply_Current;
 75:   part->ops->view    = 0;
 76:   part->ops->destroy = 0;
 77:   return(0);
 78: }

 84: PetscErrorCode  MatPartitioningCreate_Square(MatPartitioning part)
 85: {
 87:   part->ops->apply   = MatPartitioningApply_Square;
 88:   part->ops->view    = 0;
 89:   part->ops->destroy = 0;
 90:   return(0);
 91: }

 94: /* ===========================================================================================*/

 96:  #include petscsys.h

 98: PetscFList MatPartitioningList = 0;
 99: PetscTruth MatPartitioningRegisterAllCalled = PETSC_FALSE;


104: PetscErrorCode  MatPartitioningRegister(const char sname[],const char path[],const char name[],PetscErrorCode (*function)(MatPartitioning))
105: {
107:   char fullname[PETSC_MAX_PATH_LEN];

110:   PetscFListConcat(path,name,fullname);
111:   PetscFListAdd(&MatPartitioningList,sname,fullname,(void (*)(void))function);
112:   return(0);
113: }

117: /*@C
118:    MatPartitioningRegisterDestroy - Frees the list of partitioning routines.

120:   Not Collective

122:   Level: developer

124: .keywords: matrix, register, destroy

126: .seealso: MatPartitioningRegisterDynamic(), MatPartitioningRegisterAll()
127: @*/
128: PetscErrorCode  MatPartitioningRegisterDestroy(void)
129: {

133:   MatPartitioningRegisterAllCalled = PETSC_FALSE;
134:   PetscFListDestroy(&MatPartitioningList);
135:   return(0);
136: }

140: /*@C
141:    MatPartitioningGetType - Gets the Partitioning method type and name (as a string) 
142:         from the partitioning context.

144:    Not collective

146:    Input Parameter:
147: .  partitioning - the partitioning context

149:    Output Parameter:
150: .  type - partitioner type

152:    Level: intermediate

154:    Not Collective

156: .keywords: Partitioning, get, method, name, type
157: @*/
158: PetscErrorCode  MatPartitioningGetType(MatPartitioning partitioning,const MatPartitioningType *type)
159: {
163:   *type = ((PetscObject)partitioning)->type_name;
164:   return(0);
165: }

169: /*@C
170:    MatPartitioningSetNParts - Set how many partitions need to be created;
171:         by default this is one per processor. Certain partitioning schemes may
172:         in fact only support that option.

174:    Not collective

176:    Input Parameter:
177: .  partitioning - the partitioning context
178: .  n - the number of partitions

180:    Level: intermediate

182:    Not Collective

184: .keywords: Partitioning, set

186: .seealso: MatPartitioningCreate(), MatPartitioningApply()
187: @*/
188: PetscErrorCode  MatPartitioningSetNParts(MatPartitioning part,PetscInt n)
189: {
191:   part->n = n;
192:   return(0);
193: }

197: /*@
198:    MatPartitioningApply - Gets a partitioning for a matrix.

200:    Collective on Mat

202:    Input Parameters:
203: .  matp - the matrix partitioning object

205:    Output Parameters:
206: .   partitioning - the partitioning. For each local node this tells the processor
207:                    number that that node is assigned to.

209:    Options Database Keys:
210:    To specify the partitioning through the options database, use one of
211:    the following 
212: $    -mat_partitioning_type parmetis, -mat_partitioning current
213:    To see the partitioning result
214: $    -mat_partitioning_view

216:    Level: beginner

218:    The user can define additional partitionings; see MatPartitioningRegisterDynamic().

220: .keywords: matrix, get, partitioning

222: .seealso:  MatPartitioningRegisterDynamic(), MatPartitioningCreate(),
223:            MatPartitioningDestroy(), MatPartitioningSetAdjacency(), ISPartitioningToNumbering(),
224:            ISPartitioningCount()
225: @*/
226: PetscErrorCode  MatPartitioningApply(MatPartitioning matp,IS *partitioning)
227: {
229:   PetscTruth flag;

234:   if (!matp->adj->assembled) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
235:   if (matp->adj->factor) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
236:   if (!matp->ops->apply) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must set type with MatPartitioningSetFromOptions() or MatPartitioningSetType()");
237:   PetscLogEventBegin(MAT_Partitioning,matp,0,0,0);
238:   (*matp->ops->apply)(matp,partitioning);
239:   PetscLogEventEnd(MAT_Partitioning,matp,0,0,0);

241:   PetscOptionsHasName(PETSC_NULL,"-mat_partitioning_view",&flag);
242:   if (flag) {
243:     PetscViewer viewer;
244:     PetscViewerASCIIGetStdout(((PetscObject)matp)->comm,&viewer);
245:     MatPartitioningView(matp,viewer);
246:     ISView(*partitioning,viewer);
247:   }
248:   return(0);
249: }
250: 
253: /*@
254:    MatPartitioningSetAdjacency - Sets the adjacency graph (matrix) of the thing to be
255:       partitioned.

257:    Collective on MatPartitioning and Mat

259:    Input Parameters:
260: +  part - the partitioning context
261: -  adj - the adjacency matrix

263:    Level: beginner

265: .keywords: Partitioning, adjacency

267: .seealso: MatPartitioningCreate()
268: @*/
269: PetscErrorCode  MatPartitioningSetAdjacency(MatPartitioning part,Mat adj)
270: {
274:   part->adj = adj;
275:   return(0);
276: }

280: /*@
281:    MatPartitioningDestroy - Destroys the partitioning context.

283:    Collective on Partitioning

285:    Input Parameters:
286: .  part - the partitioning context

288:    Level: beginner

290: .keywords: Partitioning, destroy, context

292: .seealso: MatPartitioningCreate()
293: @*/
294: PetscErrorCode  MatPartitioningDestroy(MatPartitioning part)
295: {

300:   if (--((PetscObject)part)->refct > 0) return(0);

302:   if (part->ops->destroy) {
303:     (*part->ops->destroy)(part);
304:   }
305:   PetscFree(part->vertex_weights);
306:   PetscFree(part->part_weights);
307:   PetscHeaderDestroy(part);
308:   return(0);
309: }

313: /*@C
314:    MatPartitioningSetVertexWeights - Sets the weights for vertices for a partitioning.

316:    Collective on Partitioning

318:    Input Parameters:
319: +  part - the partitioning context
320: -  weights - the weights

322:    Level: beginner

324:    Notes:
325:       The array weights is freed by PETSc so the user should not free the array. In C/C++
326:    the array must be obtained with a call to PetscMalloc(), not malloc().

328: .keywords: Partitioning, destroy, context

330: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetPartitionWeights()
331: @*/
332: PetscErrorCode  MatPartitioningSetVertexWeights(MatPartitioning part,const PetscInt weights[])
333: {


339:   PetscFree(part->vertex_weights);
340:   part->vertex_weights = (PetscInt*)weights;
341:   return(0);
342: }

346: /*@C
347:    MatPartitioningSetPartitionWeights - Sets the weights for each partition.

349:    Collective on Partitioning

351:    Input Parameters:
352: +  part - the partitioning context
353: -  weights - the weights

355:    Level: beginner

357:    Notes:
358:       The array weights is freed by PETSc so the user should not free the array. In C/C++
359:    the array must be obtained with a call to PetscMalloc(), not malloc().

361: .keywords: Partitioning, destroy, context

363: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetVertexWeights()
364: @*/
365: PetscErrorCode  MatPartitioningSetPartitionWeights(MatPartitioning part,const PetscReal weights[])
366: {


372:   PetscFree(part->part_weights);
373:   part->part_weights = (PetscReal*)weights;
374:   return(0);
375: }

379: /*@
380:    MatPartitioningCreate - Creates a partitioning context.

382:    Collective on MPI_Comm

384:    Input Parameter:
385: .   comm - MPI communicator 

387:    Output Parameter:
388: .  newp - location to put the context

390:    Level: beginner

392: .keywords: Partitioning, create, context

394: .seealso: MatPartitioningSetType(), MatPartitioningApply(), MatPartitioningDestroy(),
395:           MatPartitioningSetAdjacency()

397: @*/
398: PetscErrorCode  MatPartitioningCreate(MPI_Comm comm,MatPartitioning *newp)
399: {
400:   MatPartitioning part;
401:   PetscErrorCode  ierr;
402:   PetscMPIInt     size;

405:   *newp          = 0;

407: #ifndef PETSC_USE_DYNAMIC_LIBRARIES
408:   MatInitializePackage(PETSC_NULL);
409: #endif
410:   PetscHeaderCreate(part,_p_MatPartitioning,struct _MatPartitioningOps,MAT_PARTITIONING_COOKIE,-1,"MatPartitioning",comm,MatPartitioningDestroy,
411:                     MatPartitioningView);
412:   part->vertex_weights = PETSC_NULL;
413:   part->part_weights   = PETSC_NULL;
414:   MPI_Comm_size(comm,&size);
415:   part->n = (PetscInt)size;

417:   *newp = part;
418:   return(0);
419: }

423: /*@C 
424:    MatPartitioningView - Prints the partitioning data structure.

426:    Collective on MatPartitioning

428:    Input Parameters:
429: .  part - the partitioning context
430: .  viewer - optional visualization context

432:    Level: intermediate

434:    Note:
435:    The available visualization contexts include
436: +     PETSC_VIEWER_STDOUT_SELF - standard output (default)
437: -     PETSC_VIEWER_STDOUT_WORLD - synchronized standard
438:          output where only the first processor opens
439:          the file.  All other processors send their 
440:          data to the first processor to print. 

442:    The user can open alternative visualization contexts with
443: .     PetscViewerASCIIOpen() - output to a specified file

445: .keywords: Partitioning, view

447: .seealso: PetscViewerASCIIOpen()
448: @*/
449: PetscErrorCode  MatPartitioningView(MatPartitioning part,PetscViewer viewer)
450: {
451:   PetscErrorCode            ierr;
452:   PetscTruth                iascii;
453:   const MatPartitioningType name;

457:   if (!viewer) {
458:     PetscViewerASCIIGetStdout(((PetscObject)part)->comm,&viewer);
459:   }

463:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
464:   if (iascii) {
465:     MatPartitioningGetType(part,&name);
466:     PetscViewerASCIIPrintf(viewer,"MatPartitioning Object: %s\n",name);
467:     if (part->vertex_weights) {
468:       PetscViewerASCIIPrintf(viewer,"  Using vertex weights\n");
469:     }
470:   } else {
471:     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for this MatParitioning",((PetscObject)viewer)->type_name);
472:   }

474:   if (part->ops->view) {
475:     PetscViewerASCIIPushTab(viewer);
476:     (*part->ops->view)(part,viewer);
477:     PetscViewerASCIIPopTab(viewer);
478:   }

480:   return(0);
481: }

485: /*@C
486:    MatPartitioningSetType - Sets the type of partitioner to use

488:    Collective on MatPartitioning

490:    Input Parameter:
491: .  part - the partitioning context.
492: .  type - a known method

494:    Options Database Command:
495: $  -mat_partitioning_type  <type>
496: $      Use -help for a list of available methods
497: $      (for instance, parmetis)

499:    Level: intermediate

501: .keywords: partitioning, set, method, type

503: .seealso: MatPartitioningCreate(), MatPartitioningApply(), MatPartitioningType

505: @*/
506: PetscErrorCode  MatPartitioningSetType(MatPartitioning part,const MatPartitioningType type)
507: {
508:   PetscErrorCode ierr,(*r)(MatPartitioning);
509:   PetscTruth match;


515:   PetscTypeCompare((PetscObject)part,type,&match);
516:   if (match) return(0);

518:   if (part->setupcalled) {
519:      (*part->ops->destroy)(part);
520:     part->data        = 0;
521:     part->setupcalled = 0;
522:   }

524:    PetscFListFind(MatPartitioningList,((PetscObject)part)->comm,type,(void (**)(void)) &r);

526:   if (!r) {SETERRQ1(PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown partitioning type %s",type);}

528:   part->ops->destroy      = (PetscErrorCode (*)(MatPartitioning)) 0;
529:   part->ops->view         = (PetscErrorCode (*)(MatPartitioning,PetscViewer)) 0;
530:   (*r)(part);

532:   PetscStrfree(((PetscObject)part)->type_name);
533:   PetscStrallocpy(type,&((PetscObject)part)->type_name);
534:   return(0);
535: }

539: /*@
540:    MatPartitioningSetFromOptions - Sets various partitioning options from the 
541:         options database.

543:    Collective on MatPartitioning

545:    Input Parameter:
546: .  part - the partitioning context.

548:    Options Database Command:
549: $  -mat_partitioning_type  <type>
550: $      Use -help for a list of available methods
551: $      (for instance, parmetis)

553:    Level: beginner

555: .keywords: partitioning, set, method, type
556: @*/
557: PetscErrorCode  MatPartitioningSetFromOptions(MatPartitioning part)
558: {
560:   PetscTruth flag;
561:   char       type[256];
562:   const char *def;

565:   PetscOptionsBegin(((PetscObject)part)->comm,((PetscObject)part)->prefix,"Partitioning options","MatOrderings");
566:     if (!((PetscObject)part)->type_name) {
567: #if defined(PETSC_HAVE_PARMETIS)
568:       def = MAT_PARTITIONING_PARMETIS;
569: #else
570:       def = MAT_PARTITIONING_CURRENT;
571: #endif
572:     } else {
573:       def = ((PetscObject)part)->type_name;
574:     }
575:     PetscOptionsList("-mat_partitioning_type","Type of partitioner","MatPartitioningSetType",MatPartitioningList,def,type,256,&flag);
576:     if (flag) {
577:       MatPartitioningSetType(part,type);
578:     }
579:     /*
580:       Set the type if it was never set.
581:     */
582:     if (!((PetscObject)part)->type_name) {
583:       MatPartitioningSetType(part,def);
584:     }

586:     if (part->ops->setfromoptions) {
587:       (*part->ops->setfromoptions)(part);
588:     }
589:   PetscOptionsEnd();
590:   return(0);
591: }