Actual source code: chaco.c
1: #define PETSCMAT_DLL
3: #include ../src/mat/impls/adj/mpi/mpiadj.h
5: #ifdef PETSC_HAVE_UNISTD_H
6: #include <unistd.h>
7: #endif
9: #ifdef PETSC_HAVE_STDLIB_H
10: #include <stdlib.h>
11: #endif
13: #include "petscfix.h"
16: /* Chaco does not have an include file */
18: float *ewgts, float *x, float *y, float *z, char *outassignname,
19: char *outfilename, short *assignment, int architecture, int ndims_tot,
20: int mesh_dims[3], double *goal, int global_method, int local_method,
21: int rqi_flag, int vmax, int ndims, double eigtol, long seed);
25: /*
26: int nvtxs; number of vertices in full graph
27: int *start; start of edge list for each vertex
28: int *adjacency; edge list data
29: int *vwgts; weights for all vertices
30: float *ewgts; weights for all edges
31: float *x, *y, *z; coordinates for inertial method
32: char *outassignname; name of assignment output file
33: char *outfilename; output file name
34: short *assignment; set number of each vtx (length n)
35: int architecture; 0 => hypercube, d => d-dimensional mesh
36: int ndims_tot; total number of cube dimensions to divide
37: int mesh_dims[3]; dimensions of mesh of processors
38: double *goal; desired set sizes for each set
39: int global_method; global partitioning algorithm
40: int local_method; local partitioning algorithm
41: int rqi_flag; should I use RQI/Symmlq eigensolver?
42: int vmax; how many vertices to coarsen down to?
43: int ndims; number of eigenvectors (2^d sets)
44: double eigtol; tolerance on eigenvectors
45: long seed; for random graph mutations
46: */
50: typedef struct {
51: int architecture;
52: int ndims_tot;
53: int mesh_dims[3];
54: int rqi_flag;
55: int numbereigen;
56: double eigtol;
57: int global_method; /* global method */
58: int local_method; /* local method */
59: int nbvtxcoarsed; /* number of vertices for the coarse graph */
60: char *mesg_log;
61: } MatPartitioning_Chaco;
63: #define SIZE_LOG 10000 /* size of buffer for msg_log */
67: static PetscErrorCode MatPartitioningApply_Chaco(MatPartitioning part, IS *partitioning)
68: {
70: int *parttab, *locals, i, size, rank;
71: Mat mat = part->adj, matMPI, matSeq;
72: int nb_locals;
73: Mat_MPIAdj *adj;
74: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *) part->data;
75: PetscTruth flg;
76: #ifdef PETSC_HAVE_UNISTD_H
77: int fd_stdout, fd_pipe[2], count,err;
78: #endif
82: FREE_GRAPH = 0; /* otherwise Chaco will attempt to free memory for adjacency graph */
83:
84: MPI_Comm_size(((PetscObject)mat)->comm, &size);
86: PetscTypeCompare((PetscObject) mat, MATMPIADJ, &flg);
88: /* check if the matrix is sequential, use MatGetSubMatrices if necessary */
89: if (size > 1) {
90: int M, N;
91: IS isrow, iscol;
92: Mat *A;
94: if (flg) {
95: SETERRQ(0, "Distributed matrix format MPIAdj is not supported for sequential partitioners");
96: }
97: PetscPrintf(((PetscObject)part)->comm, "Converting distributed matrix to sequential: this could be a performance loss\n");
99: MatGetSize(mat, &M, &N);
100: ISCreateStride(PETSC_COMM_SELF, M, 0, 1, &isrow);
101: ISCreateStride(PETSC_COMM_SELF, N, 0, 1, &iscol);
102: MatGetSubMatrices(mat, 1, &isrow, &iscol, MAT_INITIAL_MATRIX, &A);
103: ISDestroy(isrow);
104: ISDestroy(iscol);
105: matSeq = *A;
106: PetscFree(A);
107: } else
108: matSeq = mat;
110: /* check for the input format that is supported only for a MPIADJ type
111: and set it to matMPI */
112: if (!flg) {
113: MatConvert(matSeq, MATMPIADJ, MAT_INITIAL_MATRIX, &matMPI);
114: } else {
115: matMPI = matSeq;
116: }
117: adj = (Mat_MPIAdj *) matMPI->data; /* finaly adj contains adjacency graph */
119: {
120: /* arguments for Chaco library */
121: int nvtxs = mat->rmap->N; /* number of vertices in full graph */
122: int *start = adj->i; /* start of edge list for each vertex */
123: int *adjacency; /* = adj -> j; edge list data */
124: int *vwgts = NULL; /* weights for all vertices */
125: float *ewgts = NULL; /* weights for all edges */
126: float *x = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
127: char *outassignname = NULL; /* name of assignment output file */
128: char *outfilename = NULL; /* output file name */
129: short *assignment; /* set number of each vtx (length n) */
130: int architecture = chaco->architecture; /* 0 => hypercube, d => d-dimensional mesh */
131: int ndims_tot = chaco->ndims_tot; /* total number of cube dimensions to divide */
132: int *mesh_dims = chaco->mesh_dims; /* dimensions of mesh of processors */
133: double *goal = NULL; /* desired set sizes for each set */
134: int global_method = chaco->global_method; /* global partitioning algorithm */
135: int local_method = chaco->local_method; /* local partitioning algorithm */
136: int rqi_flag = chaco->rqi_flag; /* should I use RQI/Symmlq eigensolver? */
137: int vmax = chaco->nbvtxcoarsed; /* how many vertices to coarsen down to? */
138: int ndims = chaco->numbereigen; /* number of eigenvectors (2^d sets) */
139: double eigtol = chaco->eigtol; /* tolerance on eigenvectors */
140: long seed = 123636512; /* for random graph mutations */
142: /* return value of Chaco */
143: PetscMalloc((mat->rmap->N) * sizeof(short), &assignment);
144:
145: /* index change for libraries that have fortran implementation */
146: PetscMalloc(sizeof(int) * start[nvtxs], &adjacency);
147: for (i = 0; i < start[nvtxs]; i++)
148: adjacency[i] = (adj->j)[i] + 1;
150: /* redirect output to buffer: chaco -> mesg_log */
151: #ifdef PETSC_HAVE_UNISTD_H
152: fd_stdout = dup(1);
153: pipe(fd_pipe);
154: close(1);
155: dup2(fd_pipe[1], 1);
156: PetscMalloc(SIZE_LOG * sizeof(char), &(chaco->mesg_log));
157: #endif
159: /* library call */
160: interface(nvtxs, start, adjacency, vwgts, ewgts, x, y, z,
161: outassignname, outfilename, assignment, architecture, ndims_tot,
162: mesh_dims, goal, global_method, local_method, rqi_flag, vmax, ndims,
163: eigtol, seed);
165: #ifdef PETSC_HAVE_UNISTD_H
166: err = fflush(stdout);
167: if (err) SETERRQ(PETSC_ERR_SYS,"fflush() failed on stdout");
168: count = read(fd_pipe[0], chaco->mesg_log, (SIZE_LOG - 1) * sizeof(char));
169: if (count < 0)
170: count = 0;
171: chaco->mesg_log[count] = 0;
172: close(1);
173: dup2(fd_stdout, 1);
174: close(fd_stdout);
175: close(fd_pipe[0]);
176: close(fd_pipe[1]);
177: #endif
179: if (ierr) { SETERRQ(PETSC_ERR_LIB, chaco->mesg_log); }
181: PetscFree(adjacency);
183: PetscMalloc((mat->rmap->N) * sizeof(int), &parttab);
184: for (i = 0; i < nvtxs; i++) {
185: parttab[i] = assignment[i];
186: }
187: PetscFree(assignment);
188: }
190: /* Creation of the index set */
191: MPI_Comm_rank(((PetscObject)part)->comm, &rank);
192: MPI_Comm_size(((PetscObject)part)->comm, &size);
193: nb_locals = mat->rmap->N / size;
194: locals = parttab + rank * nb_locals;
195: if (rank < mat->rmap->N % size) {
196: nb_locals++;
197: locals += rank;
198: } else
199: locals += mat->rmap->N % size;
200: ISCreateGeneral(((PetscObject)part)->comm, nb_locals, locals, partitioning);
202: /* destroy temporary objects */
203: PetscFree(parttab);
204: if (matSeq != mat) {
205: MatDestroy(matSeq);
206: }
207: if (matMPI != mat) {
208: MatDestroy(matMPI);
209: }
210: return(0);
211: }
216: PetscErrorCode MatPartitioningView_Chaco(MatPartitioning part, PetscViewer viewer)
217: {
219: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *) part->data;
220: PetscErrorCode ierr;
221: PetscMPIInt rank;
222: PetscTruth iascii;
225: MPI_Comm_rank(((PetscObject)part)->comm, &rank);
226: PetscTypeCompare((PetscObject) viewer, PETSC_VIEWER_ASCII, &iascii);
227: if (iascii) {
228: if (!rank && chaco->mesg_log) {
229: PetscViewerASCIIPrintf(viewer, "%s\n", chaco->mesg_log);
230: }
231: } else {
232: SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for this Chaco partitioner",((PetscObject) viewer)->type_name);
233: }
234: return(0);
235: }
239: /*@
240: MatPartitioningChacoSetGlobal - Set method for global partitioning.
242: Input Parameter:
243: . part - the partitioning context
244: . method - MP_CHACO_MULTILEVEL_KL, MP_CHACO_SPECTRAL, MP_CHACO_LINEAR,
245: MP_CHACO_RANDOM or MP_CHACO_SCATTERED
247: Level: advanced
249: @*/
250: PetscErrorCode MatPartitioningChacoSetGlobal(MatPartitioning part, MPChacoGlobalType method)
251: {
252: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *) part->data;
256: switch (method) {
257: case MP_CHACO_MULTILEVEL_KL:
258: chaco->global_method = 1;
259: break;
260: case MP_CHACO_SPECTRAL:
261: chaco->global_method = 2;
262: break;
263: case MP_CHACO_LINEAR:
264: chaco->global_method = 4;
265: break;
266: case MP_CHACO_RANDOM:
267: chaco->global_method = 5;
268: break;
269: case MP_CHACO_SCATTERED:
270: chaco->global_method = 6;
271: break;
272: default:
273: SETERRQ(PETSC_ERR_SUP, "Chaco: Unknown or unsupported option");
274: }
275: return(0);
276: }
280: /*@
281: MatPartitioningChacoSetLocal - Set method for local partitioning.
283: Input Parameter:
284: . part - the partitioning context
285: . method - MP_CHACO_KERNIGHAN_LIN or MP_CHACO_NONE
287: Level: advanced
289: @*/
290: PetscErrorCode MatPartitioningChacoSetLocal(MatPartitioning part, MPChacoLocalType method)
291: {
292: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *) part->data;
296: switch (method) {
297: case MP_CHACO_KERNIGHAN_LIN:
298: chaco->local_method = 1;
299: break;
300: case MP_CHACO_NONE:
301: chaco->local_method = 2;
302: break;
303: default:
304: SETERRQ(PETSC_ERR_ARG_CORRUPT, "Chaco: Unknown or unsupported option");
305: }
307: return(0);
308: }
312: /*@
313: MatPartitioningChacoSetCoarseLevel - Set the coarse level
314:
315: Input Parameter:
316: . part - the partitioning context
317: . level - the coarse level in range [0.0,1.0]
319: Level: advanced
321: @*/
322: PetscErrorCode MatPartitioningChacoSetCoarseLevel(MatPartitioning part, PetscReal level)
323: {
324: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *) part->data;
328: if (level < 0 || level > 1.0) {
329: SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,
330: "Chaco: level of coarsening out of range [0.01-1.0]");
331: } else
332: chaco->nbvtxcoarsed = (int)(part->adj->cmap->N * level);
334: if (chaco->nbvtxcoarsed < 20)
335: chaco->nbvtxcoarsed = 20;
337: return(0);
338: }
342: /*@
343: MatPartitioningChacoSetEigenSolver - Set method for eigensolver.
345: Input Parameter:
346: . method - MP_CHACO_LANCZOS or MP_CHACO_RQI_SYMMLQ
348: Level: advanced
350: @*/
351: PetscErrorCode MatPartitioningChacoSetEigenSolver(MatPartitioning part,
352: MPChacoEigenType method)
353: {
354: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *) part->data;
358: switch (method) {
359: case MP_CHACO_LANCZOS:
360: chaco->rqi_flag = 0;
361: break;
362: case MP_CHACO_RQI_SYMMLQ:
363: chaco->rqi_flag = 1;
364: break;
365: default:
366: SETERRQ(PETSC_ERR_ARG_CORRUPT, "Chaco: Unknown or unsupported option");
367: }
369: return(0);
370: }
374: /*@
375: MatPartitioningChacoSetEigenTol - Set tolerance for eigensolver.
377: Input Parameter:
378: . tol - Tolerance requested.
380: Level: advanced
382: @*/
383: PetscErrorCode MatPartitioningChacoSetEigenTol(MatPartitioning part, PetscReal tol)
384: {
385: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *) part->data;
389: if (tol <= 0.0) {
390: SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,
391: "Chaco: Eigensolver tolerance out of range");
392: } else
393: chaco->eigtol = tol;
395: return(0);
396: }
400: /*@
401: MatPartitioningChacoSetEigenNumber - Set number of eigenvectors for partitioning.
403: Input Parameter:
404: . num - This argument should have a value of 1, 2 or 3 indicating
405: partitioning by bisection, quadrisection, or octosection.
407: Level: advanced
409: @*/
410: PetscErrorCode MatPartitioningChacoSetEigenNumber(MatPartitioning part, int num)
411: {
412: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *) part->data;
416: if (num > 3 || num < 1) {
417: SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,
418: "Chaco: number of eigenvectors out of range");
419: } else
420: chaco->numbereigen = num;
422: return(0);
423: }
427: PetscErrorCode MatPartitioningSetFromOptions_Chaco(MatPartitioning part)
428: {
430: int i;
431: PetscReal r;
432: PetscTruth flag;
433: const char *global[] =
434: { "multilevel-kl", "spectral", "linear", "random", "scattered" };
435: const char *local[] = { "kernighan-lin", "none" };
436: const char *eigen[] = { "lanczos", "rqi_symmlq" };
439: PetscOptionsHead("Set Chaco partitioning options");
441: PetscOptionsEList("-mat_partitioning_chaco_global",
442: "Global method to use", "MatPartitioningChacoSetGlobal", global, 5,
443: global[0], &i, &flag);
444: if (flag)
445: MatPartitioningChacoSetGlobal(part, (MPChacoGlobalType)i);
447: PetscOptionsEList("-mat_partitioning_chaco_local",
448: "Local method to use", "MatPartitioningChacoSetLocal", local, 2,
449: local[0], &i, &flag);
450: if (flag)
451: MatPartitioningChacoSetLocal(part, (MPChacoLocalType)i);
453: PetscOptionsReal("-mat_partitioning_chaco_coarse_level",
454: "Coarse level", "MatPartitioningChacoSetCoarseLevel", 0, &r,
455: &flag);
456: if (flag)
457: MatPartitioningChacoSetCoarseLevel(part, r);
459: PetscOptionsEList("-mat_partitioning_chaco_eigen_solver",
460: "Eigensolver to use in spectral method", "MatPartitioningChacoSetEigenSolver",
461: eigen, 2, eigen[0], &i, &flag);
462: if (flag)
463: MatPartitioningChacoSetEigenSolver(part, (MPChacoEigenType)i);
465: PetscOptionsReal("-mat_partitioning_chaco_eigen_tol",
466: "Tolerance for eigensolver", "MatPartitioningChacoSetEigenTol", 0.001,
467: &r, &flag);
468: if (flag)
469: MatPartitioningChacoSetEigenTol(part, r);
471: PetscOptionsInt("-mat_partitioning_chaco_eigen_number",
472: "Number of eigenvectors: 1, 2, or 3 (bi-, quadri-, or octosection)",
473: "MatPartitioningChacoSetEigenNumber", 1, &i, &flag);
474: if (flag)
475: MatPartitioningChacoSetEigenNumber(part, i);
477: PetscOptionsTail();
478: return(0);
479: }
484: PetscErrorCode MatPartitioningDestroy_Chaco(MatPartitioning part)
485: {
486: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *) part->data;
487: PetscErrorCode ierr;
490: PetscFree(chaco->mesg_log);
491: PetscFree(chaco);
492: return(0);
493: }
495: /*MC
496: MAT_PARTITIONING_CHACO - Creates a partitioning context via the external package Chaco.
498: Collective on MPI_Comm
500: Input Parameter:
501: . part - the partitioning context
503: Options Database Keys:
504: + -mat_partitioning_chaco_global <multilevel-kl> (one of) multilevel-kl spectral linear random scattered
505: . -mat_partitioning_chaco_local <kernighan-lin> (one of) kernighan-lin none
506: . -mat_partitioning_chaco_coarse_level <0>: Coarse level (MatPartitioningChacoSetCoarseLevel)
507: . -mat_partitioning_chaco_eigen_solver <lanczos> (one of) lanczos rqi_symmlq
508: . -mat_partitioning_chaco_eigen_tol <0.001>: Tolerance for eigensolver (MatPartitioningChacoSetEigenTol)
509: - -mat_partitioning_chaco_eigen_number <1>: Number of eigenvectors: 1, 2, or 3 (bi-, quadri-, or octosection) (MatPartitioningChacoSetEigenNumber)
511: Level: beginner
513: Notes: See http://www.cs.sandia.gov/CRF/chac.html
515: .keywords: Partitioning, create, context
517: .seealso: MatPartitioningSetType(), MatPartitioningType
519: M*/
523: PetscErrorCode MatPartitioningCreate_Chaco(MatPartitioning part)
524: {
526: MatPartitioning_Chaco *chaco;
529: PetscNewLog(part,MatPartitioning_Chaco, &chaco);
530: part->data = (void*) chaco;
532: chaco->architecture = 1;
533: chaco->ndims_tot = 0;
534: chaco->mesh_dims[0] = part->n;
535: chaco->global_method = 1;
536: chaco->local_method = 1;
537: chaco->rqi_flag = 0;
538: chaco->nbvtxcoarsed = 200;
539: chaco->numbereigen = 1;
540: chaco->eigtol = 0.001;
541: chaco->mesg_log = NULL;
543: part->ops->apply = MatPartitioningApply_Chaco;
544: part->ops->view = MatPartitioningView_Chaco;
545: part->ops->destroy = MatPartitioningDestroy_Chaco;
546: part->ops->setfromoptions = MatPartitioningSetFromOptions_Chaco;
548: return(0);
549: }