Actual source code: scotch.c
1: #define PETSCMAT_DLL
3: #include ../src/mat/impls/adj/mpi/mpiadj.h
5: #ifdef PETSC_HAVE_UNISTD_H
6: #include <unistd.h>
7: #endif
9: #ifdef PETSC_HAVE_STDLIB_H
10: #include <stdlib.h>
11: #endif
13: #include "petscfix.h"
15: /*
16: Currently using Scotch-3.4
17: */
19: #include "scotch.h"
22: /*************************************
23: * *
24: * Note: *
25: * *
26: * To make scotch compile I *
27: * modified all old mat->m/M into *
28: * mat->rmap->n/N *
29: * *
30: * Hope I was right *
31: * *
32: *************************************/
33: typedef struct {
34: char arch[PETSC_MAX_PATH_LEN];
35: int multilevel;
36: char strategy[30];
37: int global_method; /* global method */
38: int local_method; /* local method */
39: int nbvtxcoarsed; /* number of vertices for the coarse graph */
40: int map; /* to know if we map on archptr or just partionate the graph */
41: char *mesg_log;
42: char host_list[PETSC_MAX_PATH_LEN];
43: } MatPartitioning_Scotch;
45: #define SIZE_LOG 10000 /* size of buffer for msg_log */
49: static PetscErrorCode MatPartitioningApply_Scotch(MatPartitioning part, IS * partitioning)
50: {
52: int *parttab, *locals = PETSC_NULL, rank, i, size;
53: size_t j;
54: Mat mat = part->adj, matMPI, matSeq;
55: int nb_locals = mat->rmap->n;
56: Mat_MPIAdj *adj = (Mat_MPIAdj *) mat->data;
57: MatPartitioning_Scotch *scotch = (MatPartitioning_Scotch *) part->data;
58: PetscTruth flg;
59: #ifdef PETSC_HAVE_UNISTD_H
60: int fd_stdout, fd_pipe[2], count,err;
61: #endif
65: /* check if the matrix is sequential, use MatGetSubMatrices if necessary */
66: MPI_Comm_size(((PetscObject)mat)->comm, &size);
67: PetscTypeCompare((PetscObject) mat, MATMPIADJ, &flg);
68: if (size > 1) {
69: int M, N;
70: IS isrow, iscol;
71: Mat *A;
73: if (flg) {
74: SETERRQ(0, "Distributed matrix format MPIAdj is not supported for sequential partitioners");
75: }
76: PetscPrintf(((PetscObject)part)->comm, "Converting distributed matrix to sequential: this could be a performance loss\n");
78: MatGetSize(mat, &M, &N);
79: ISCreateStride(PETSC_COMM_SELF, M, 0, 1, &isrow);
80: ISCreateStride(PETSC_COMM_SELF, N, 0, 1, &iscol);
81: MatGetSubMatrices(mat, 1, &isrow, &iscol, MAT_INITIAL_MATRIX, &A);
82: matSeq = *A;
83: PetscFree(A);
84: ISDestroy(isrow);
85: ISDestroy(iscol);
86: } else
87: matSeq = mat;
89: /* convert the the matrix to MPIADJ type if necessary */
90: if (!flg) {
91: MatConvert(matSeq, MATMPIADJ, MAT_INITIAL_MATRIX, &matMPI);
92: } else {
93: matMPI = matSeq;
94: }
96: adj = (Mat_MPIAdj *) matMPI->data; /* finaly adj contains adjacency graph */
98: MPI_Comm_rank(((PetscObject)part)->comm, &rank);
100: {
101: /* definition of Scotch library arguments */
102: SCOTCH_Strat stratptr; /* scotch strategy */
103: SCOTCH_Graph grafptr; /* scotch graph */
104: int vertnbr = mat->rmap->N; /* number of vertices in full graph */
105: int *verttab = adj->i; /* start of edge list for each vertex */
106: int *edgetab = adj->j; /* edge list data */
107: int edgenbr = adj->nz; /* number of edges */
108: int *velotab = NULL; /* not used by petsc interface */
109: int *vlbltab = NULL;
110: int *edlotab = NULL;
111: int baseval = 0; /* 0 for C array indexing */
112: int flagval = 3; /* (cf doc scotch no weight edge & vertices) */
113: char strategy[256];
115: PetscMalloc((mat->rmap->N) * sizeof(int), &parttab);
117: /* redirect output to buffer scotch -> mesg_log */
118: #ifdef PETSC_HAVE_UNISTD_H
119: fd_stdout = dup(1);
120: pipe(fd_pipe);
121: close(1);
122: dup2(fd_pipe[1], 1);
123: PetscMalloc(SIZE_LOG * sizeof(char), &(scotch->mesg_log));
124: #endif
126: /* library call */
128: /* Construction of the scotch graph object */
129: SCOTCH_graphInit(&grafptr);
130: SCOTCH_graphBuild((SCOTCH_Graph *) &grafptr,
131: (const SCOTCH_Num) vertnbr,
132: (const SCOTCH_Num) verttab,
133: (const SCOTCH_Num *)velotab,
134: (const SCOTCH_Num *)vlbltab,
135: (const SCOTCH_Num *)edgenbr,
136: (const SCOTCH_Num *)edgetab,
137: (const SCOTCH_Num) edlotab,
138: (const SCOTCH_Num *)baseval,
139: (const SCOTCH_Num *)flagval);
140: SCOTCH_graphCheck(&grafptr);
142: /* Construction of the strategy */
143: if (scotch->strategy[0] != 0) {
144: PetscStrcpy(strategy, scotch->strategy);
145: } else {
146: PetscStrcpy(strategy, "b{strat=");
148: if (scotch->multilevel) {
149: /* PetscStrcat(strategy,"m{vert=");
150: sprintf(strategy+strlen(strategy),"%d",scotch->nbvtxcoarsed);
151: PetscStrcat(strategy,",asc="); */
152: sprintf(strategy, "b{strat=m{vert=%d,asc=",
153: scotch->nbvtxcoarsed);
154: } else
155: PetscStrcpy(strategy, "b{strat=");
157: switch (scotch->global_method) {
158: case MP_SCOTCH_GREEDY:
159: PetscStrcat(strategy, "h");
160: break;
161: case MP_SCOTCH_GPS:
162: PetscStrcat(strategy, "g");
163: break;
164: case MP_SCOTCH_GR_GPS:
165: PetscStrcat(strategy, "g|h");
166: }
168: switch (scotch->local_method) {
169: case MP_SCOTCH_KERNIGHAN_LIN:
170: if (scotch->multilevel)
171: PetscStrcat(strategy, ",low=f}");
172: else
173: PetscStrcat(strategy, " f");
174: break;
175: case MP_SCOTCH_NONE:
176: if (scotch->multilevel)
177: PetscStrcat(strategy, ",asc=x}");
178: default:
179: break;
180: }
182: PetscStrcat(strategy, " x}");
183: }
185: PetscPrintf(((PetscObject)part)->comm, "strategy=[%s]\n", strategy);
187: SCOTCH_stratInit(&stratptr);
188: /*
190: TODO: Correct this part
192: Commented because this doesn't exists anymore
194:
195: SCOTCH_stratMap(&stratptr, strategy);
196: */
197: /* check for option mapping */
198: if (!scotch->map) {
199: /* ********************************************
200: * *
201: * TODO: Correct this part *
202: * *
203: * Won't work with this tmp SCOTCH_Strat... *
204: * *
205: * I just modified it to make scotch compile, *
206: * to be able to use PaStiX... *
207: * *
208: **********************************************/
209: SCOTCH_Strat tmp;
210: SCOTCH_graphPart((const SCOTCH_Graph *)&grafptr,
211: (const SCOTCH_Num) &stratptr,
212: (const SCOTCH_Strat *)&tmp, /* The Argument changed from scotch 3.04 it was part->n, */
213: (SCOTCH_Num *) parttab);
214: PetscPrintf(PETSC_COMM_SELF, "Partition simple without mapping\n");
215: } else {
216: SCOTCH_Graph grafarch;
217: SCOTCH_Num *listtab;
218: SCOTCH_Num listnbr = 0;
219: SCOTCH_Arch archptr; /* file in scotch architecture format */
220: SCOTCH_Strat archstrat;
221: int arch_total_size, *parttab_tmp,err;
222: int cpt;
223: char buf[256];
224: FILE *file1, *file2;
225: char host_buf[256];
227: /* generate the graph that represents the arch */
228: file1 = fopen(scotch->arch, "r");
229: if (!file1) SETERRQ1(PETSC_ERR_FILE_OPEN, "Scotch: unable to open architecture file %s", scotch->arch);
231: SCOTCH_graphInit(&grafarch);
232: SCOTCH_graphLoad(&grafarch, file1, baseval, 3);
234: SCOTCH_graphCheck(&grafarch);
235: SCOTCH_graphSize(&grafarch, &arch_total_size, &cpt);
237: err = fclose(file1);
238: if (err) SETERRQ(PETSC_ERR_SYS,"fclose() failed on file");
240: printf("total size = %d\n", arch_total_size);
242: /* generate the list of nodes currently working */
243: PetscGetHostName(host_buf, 256);
244: PetscStrlen(host_buf, &j);
246: file2 = fopen(scotch->host_list, "r");
247: if (!file2) SETERRQ1(PETSC_ERR_FILE_OPEN, "Scotch: unable to open host list file %s", scotch->host_list);
249: i = -1;
250: flg = PETSC_FALSE;
251: while (!feof(file2) && !flg) {
252: i++;
253: fgets(buf, 256, file2);
254: PetscStrncmp(buf, host_buf, j, &flg);
255: }
256: err = fclose(file2);
257: if (err) SETERRQ(PETSC_ERR_SYS,"fclose() failed on file");
258: if (!flg) SETERRQ1(PETSC_ERR_LIB, "Scotch: unable to find '%s' in host list file", host_buf);
260: listnbr = size;
261: PetscMalloc(sizeof(SCOTCH_Num) * listnbr, &listtab);
263: MPI_Allgather(&i, 1, MPI_INT, listtab, 1, MPI_INT, ((PetscObject)part)->comm);
265: printf("listnbr = %d, listtab = ", listnbr);
266: for (i = 0; i < listnbr; i++)
267: printf("%d ", listtab[i]);
269: printf("\n");
270: err = fflush(stdout);
271: if (err) SETERRQ(PETSC_ERR_SYS,"fflush() failed on file");
273: SCOTCH_stratInit(&archstrat);
274: /**************************************************************
275: * *
276: * TODO: Correct this part *
277: * *
278: * Commented because this doesn't exists anymore *
279: * *
280: * SCOTCH_stratBipart(&archstrat, "fx"); *
281: **************************************************************/
282: SCOTCH_archInit(&archptr);
283: SCOTCH_archBuild(&archptr, &grafarch, listnbr, listtab,
284: &archstrat);
286: PetscMalloc((mat->rmap->N) * sizeof(int), &parttab_tmp);
287: /************************************************************************************
288: * *
289: * TODO: Correct this part *
290: * *
291: * Commented because this doesn't exists anymore *
292: * *
293: * SCOTCH_mapInit(&mappptr, &grafptr, &archptr, parttab_tmp); *
294: * *
295: * SCOTCH_mapCompute(&mappptr, &stratptr); *
296: * *
297: * SCOTCH_mapView(&mappptr, stdout); *
298: ************************************************************************************/
299: /* now we have to set in the real parttab at the good place */
300: /* because the ranks order are different than position in */
301: /* the arch graph */
302: for (i = 0; i < mat->rmap->N; i++) {
303: parttab[i] = parttab_tmp[i];
304: }
306: PetscFree(listtab);
307: SCOTCH_archExit(&archptr);
308: /*************************************************
309: * TODO: Correct this part *
310: * *
311: * Commented because this doesn't exists anymore *
312: * SCOTCH_mapExit(&mappptr); *
313: *************************************************/
314: SCOTCH_stratExit(&archstrat);
315: }
317: /* dump to mesg_log... */
318: #ifdef PETSC_HAVE_UNISTD_H
319: err = fflush(stdout);
320: if (err) SETERRQ(PETSC_ERR_SYS,"fflush() failed on stdout");
322: count = read(fd_pipe[0], scotch->mesg_log, (SIZE_LOG - 1) * sizeof(char));
323: if (count < 0)
324: count = 0;
325: scotch->mesg_log[count] = 0;
326: close(1);
327: dup2(fd_stdout, 1);
328: close(fd_stdout);
329: close(fd_pipe[0]);
330: close(fd_pipe[1]);
331: #endif
333: SCOTCH_graphExit(&grafptr);
334: SCOTCH_stratExit(&stratptr);
335: }
337: if (ierr)
338: SETERRQ(PETSC_ERR_LIB, scotch->mesg_log);
340: /* Creation of the index set */
342: MPI_Comm_rank(((PetscObject)part)->comm, &rank);
343: MPI_Comm_size(((PetscObject)part)->comm, &size);
344: nb_locals = mat->rmap->N / size;
345: locals = parttab + rank * nb_locals;
346: if (rank < mat->rmap->N % size) {
347: nb_locals++;
348: locals += rank;
349: } else
350: locals += mat->rmap->N % size;
351: ISCreateGeneral(((PetscObject)part)->comm, nb_locals, locals, partitioning);
353: /* destroying old objects */
354: PetscFree(parttab);
355: if (matSeq != mat) {
356: MatDestroy(matSeq);
357: }
358: if (matMPI != mat) {
359: MatDestroy(matMPI);
360: }
362: return(0);
363: }
368: PetscErrorCode MatPartitioningView_Scotch(MatPartitioning part, PetscViewer viewer)
369: {
370: MatPartitioning_Scotch *scotch = (MatPartitioning_Scotch *) part->data;
371: PetscErrorCode ierr;
372: PetscMPIInt rank;
373: PetscTruth iascii;
374:
376: MPI_Comm_rank(((PetscObject)part)->comm, &rank);
377: PetscTypeCompare((PetscObject) viewer, PETSC_VIEWER_ASCII, &iascii);
378: if (iascii) {
379: if (!rank && scotch->mesg_log) {
380: PetscViewerASCIIPrintf(viewer, "%s\n", scotch->mesg_log);
381: }
382: } else {
383: SETERRQ1(PETSC_ERR_SUP, "Viewer type %s not supported for this Scotch partitioner",((PetscObject)viewer)->type_name);
384: }
385: return(0);
386: }
390: /*@
391: MatPartitioningScotchSetGlobal - Set method for global partitioning.
393: Input Parameter:
394: . part - the partitioning context
395: . method - MP_SCOTCH_GREED, MP_SCOTCH_GIBBS or MP_SCOTCH_GR_GI (the combination of two)
396: Level: advanced
398: @*/
399: PetscErrorCode MatPartitioningScotchSetGlobal(MatPartitioning part,
400: MPScotchGlobalType global)
401: {
402: MatPartitioning_Scotch *scotch = (MatPartitioning_Scotch *) part->data;
406: switch (global) {
407: case MP_SCOTCH_GREEDY:
408: case MP_SCOTCH_GPS:
409: case MP_SCOTCH_GR_GPS:
410: scotch->global_method = global;
411: break;
412: default:
413: SETERRQ(PETSC_ERR_SUP, "Scotch: Unknown or unsupported option");
414: }
416: return(0);
417: }
421: /*@
422: MatPartitioningScotchSetCoarseLevel - Set the coarse level
423:
424: Input Parameter:
425: . part - the partitioning context
426: . level - the coarse level in range [0.0,1.0]
428: Level: advanced
430: @*/
431: PetscErrorCode MatPartitioningScotchSetCoarseLevel(MatPartitioning part, PetscReal level)
432: {
433: MatPartitioning_Scotch *scotch = (MatPartitioning_Scotch *) part->data;
437: if (level < 0 || level > 1.0) {
438: SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,
439: "Scocth: level of coarsening out of range [0.0-1.0]");
440: } else {
441: /* ********************************************
442: * *
443: * TODO: Correct this part *
444: * *
445: * Won't work with this nbvxtcoarsed *
446: * *
447: * I just modified it to make scotch compile, *
448: * to be able to use PaStiX... *
449: * *
450: **********************************************/
451: scotch->nbvtxcoarsed = 0;
452: /* with scotch 3.0.4 it was : scotch->nbvtxcoarsed = (int)(part->adj->N * level); */
453: }
454: if (scotch->nbvtxcoarsed < 20)
455: scotch->nbvtxcoarsed = 20;
457: return(0);
458: }
462: /*@C
463: MatPartitioningScotchSetStrategy - Set the strategy to be used by Scotch.
464: This is an alternative way of specifying the global method, the local
465: method, the coarse level and the multilevel option.
466:
467: Input Parameter:
468: . part - the partitioning context
469: . level - the strategy in Scotch format. Check Scotch documentation.
471: Level: advanced
473: .seealso: MatPartitioningScotchSetGlobal(), MatPartitioningScotchSetLocal(), MatPartitioningScotchSetCoarseLevel(), MatPartitioningScotchSetMultilevel(),
474: @*/
475: PetscErrorCode MatPartitioningScotchSetStrategy(MatPartitioning part, char *strat)
476: {
477: MatPartitioning_Scotch *scotch = (MatPartitioning_Scotch *) part->data;
481: PetscStrcpy(scotch->strategy, strat);
482: return(0);
483: }
488: /*@
489: MatPartitioningScotchSetLocal - Set method for local partitioning.
491: Input Parameter:
492: . part - the partitioning context
493: . method - MP_SCOTCH_KERNIGHAN_LIN or MP_SCOTCH_NONE
495: Level: advanced
497: @*/
498: PetscErrorCode MatPartitioningScotchSetLocal(MatPartitioning part, MPScotchLocalType local)
499: {
500: MatPartitioning_Scotch *scotch = (MatPartitioning_Scotch *) part->data;
504: switch (local) {
505: case MP_SCOTCH_KERNIGHAN_LIN:
506: case MP_SCOTCH_NONE:
507: scotch->local_method = local;
508: break;
509: default:
510: SETERRQ(PETSC_ERR_ARG_CORRUPT, "Scotch: Unknown or unsupported option");
511: }
513: return(0);
514: }
518: /*@C
519: MatPartitioningScotchSetArch - Specify the file that describes the
520: architecture used for mapping. The format of this file is documented in
521: the Scotch manual.
523: Input Parameter:
524: . part - the partitioning context
525: . file - the name of file
526: Level: advanced
528: Note:
529: If the name is not set, then the default "archgraph.src" is used.
531: .seealso: MatPartitioningScotchSetHostList(),MatPartitioningScotchSetMapping()
532: @*/
533: PetscErrorCode MatPartitioningScotchSetArch(MatPartitioning part, const char *filename)
534: {
535: MatPartitioning_Scotch *scotch = (MatPartitioning_Scotch *) part->data;
539: PetscStrcpy(scotch->arch, filename);
541: return(0);
542: }
546: /*@C
547: MatPartitioningScotchSetHostList - Specify host list file for mapping.
549: Input Parameter:
550: . part - the partitioning context
551: . file - the name of file
553: Level: advanced
555: Notes:
556: The file must consist in a list of hostnames (one per line). These hosts
557: are the ones referred to in the architecture file (see
558: MatPartitioningScotchSetArch()): the first host corresponds to index 0,
559: the second one to index 1, and so on.
560:
561: If the name is not set, then the default "host_list" is used.
562:
563: .seealso: MatPartitioningScotchSetArch(), MatPartitioningScotchSetMapping()
564: @*/
565: PetscErrorCode MatPartitioningScotchSetHostList(MatPartitioning part, const char *filename)
566: {
567: MatPartitioning_Scotch *scotch = (MatPartitioning_Scotch *) part->data;
571: PetscStrcpy(scotch->host_list, filename);
573: return(0);
574: }
578: /*@
579: MatPartitioningScotchSetMultilevel - Activates multilevel partitioning.
581: Input Parameter:
582: . part - the partitioning context
584: Level: advanced
586: @*/
587: PetscErrorCode MatPartitioningScotchSetMultilevel(MatPartitioning part)
588: {
589: MatPartitioning_Scotch *scotch = (MatPartitioning_Scotch *) part->data;
593: scotch->multilevel = 1;
595: return(0);
596: }
601: /*@
602: MatPartitioningScotchSetMapping - Activates architecture mapping for the
603: partitioning algorithm. Architecture mapping tries to enhance the quality
604: of partitioning by using network topology information.
606: Input Parameter:
607: . part - the partitioning context
609: Level: advanced
611: .seealso: MatPartitioningScotchSetArch(),MatPartitioningScotchSetHostList()
612: @*/
613: PetscErrorCode MatPartitioningScotchSetMapping(MatPartitioning part)
614: {
615: MatPartitioning_Scotch *scotch = (MatPartitioning_Scotch *) part->data;
619: scotch->map = 1;
621: return(0);
622: }
626: PetscErrorCode MatPartitioningSetFromOptions_Scotch(MatPartitioning part)
627: {
629: PetscTruth flag;
630: char name[PETSC_MAX_PATH_LEN];
631: int i;
632: PetscReal r;
634: const char *global[] = { "greedy", "gps", "gr_gps" };
635: const char *local[] = { "kernighan-lin", "none" };
638: PetscOptionsHead("Set Scotch partitioning options");
640: PetscOptionsEList("-mat_partitioning_scotch_global",
641: "Global method to use", "MatPartitioningScotchSetGlobal", global, 3,
642: global[0], &i, &flag);
643: if (flag)
644: MatPartitioningScotchSetGlobal(part, (MPScotchGlobalType)i);
646: PetscOptionsEList("-mat_partitioning_scotch_local",
647: "Local method to use", "MatPartitioningScotchSetLocal", local, 2,
648: local[0], &i, &flag);
649: if (flag)
650: MatPartitioningScotchSetLocal(part, (MPScotchLocalType)i);
652: PetscOptionsName("-mat_partitioning_scotch_mapping", "Use mapping",
653: "MatPartitioningScotchSetMapping", &flag);
654: if (flag)
655: MatPartitioningScotchSetMapping(part);
657: PetscOptionsString("-mat_partitioning_scotch_arch",
658: "architecture file in scotch format", "MatPartitioningScotchSetArch",
659: "archgraph.src", name, PETSC_MAX_PATH_LEN, &flag);
660: if (flag)
661: MatPartitioningScotchSetArch(part, name);
663: PetscOptionsString("-mat_partitioning_scotch_hosts",
664: "host list filename", "MatPartitioningScotchSetHostList",
665: "host_list", name, PETSC_MAX_PATH_LEN, &flag);
666: if (flag)
667: MatPartitioningScotchSetHostList(part, name);
669: PetscOptionsReal("-mat_partitioning_scotch_coarse_level",
670: "coarse level", "MatPartitioningScotchSetCoarseLevel", 0, &r,
671: &flag);
672: if (flag)
673: MatPartitioningScotchSetCoarseLevel(part, r);
675: PetscOptionsName("-mat_partitioning_scotch_mul", "Use coarse level",
676: "MatPartitioningScotchSetMultilevel", &flag);
677: if (flag)
678: MatPartitioningScotchSetMultilevel(part);
680: PetscOptionsString("-mat_partitioning_scotch_strategy",
681: "Scotch strategy string",
682: "MatPartitioningScotchSetStrategy", "", name, PETSC_MAX_PATH_LEN,
683: &flag);
684: if (flag)
685: MatPartitioningScotchSetStrategy(part, name);
687: PetscOptionsTail();
688: return(0);
689: }
693: PetscErrorCode MatPartitioningDestroy_Scotch(MatPartitioning part)
694: {
695: MatPartitioning_Scotch *scotch = (MatPartitioning_Scotch *) part->data;
696: PetscErrorCode ierr;
699: PetscFree(scotch->mesg_log);
700: PetscFree(scotch);
701: return(0);
702: }
705: /*MC
706: MAT_PARTITIONING_SCOTCH - Creates a partitioning context via the external package SCOTCH.
708: Collective on MPI_Comm
710: Input Parameter:
711: . part - the partitioning context
713: Options Database Keys:
714: + -mat_partitioning_scotch_global <greedy> (one of) greedy gps gr_gps
715: . -mat_partitioning_scotch_local <kernighan-lin> (one of) kernighan-lin none
716: . -mat_partitioning_scotch_mapping: Use mapping (MatPartitioningScotchSetMapping)
717: . -mat_partitioning_scotch_arch <archgraph.src>: architecture file in scotch format (MatPartitioningScotchSetArch)
718: . -mat_partitioning_scotch_hosts <host_list>: host list filename (MatPartitioningScotchSetHostList)
719: . -mat_partitioning_scotch_coarse_level <0>: coarse level (MatPartitioningScotchSetCoarseLevel)
720: . -mat_partitioning_scotch_mul: Use coarse level (MatPartitioningScotchSetMultilevel)
721: - -mat_partitioning_scotch_strategy <>: Scotch strategy string (MatPartitioningScotchSetStrategy)
723: Level: beginner
725: Notes: See http://www.labri.fr/Perso/~pelegrin/scotch/
727: .keywords: Partitioning, create, context
729: .seealso: MatPartitioningSetType(), MatPartitioningType
731: M*/
736: PetscErrorCode MatPartitioningCreate_Scotch(MatPartitioning part)
737: {
739: MatPartitioning_Scotch *scotch;
742: PetscNewLog(part,MatPartitioning_Scotch, &scotch);
743: part->data = (void*) scotch;
745: scotch->map = 0;
746: scotch->global_method = MP_SCOTCH_GR_GPS;
747: scotch->local_method = MP_SCOTCH_KERNIGHAN_LIN;
748: PetscStrcpy(scotch->arch, "archgraph.src");
749: scotch->nbvtxcoarsed = 200;
750: PetscStrcpy(scotch->strategy, "");
751: scotch->multilevel = 0;
752: scotch->mesg_log = NULL;
754: PetscStrcpy(scotch->host_list, "host_list");
756: part->ops->apply = MatPartitioningApply_Scotch;
757: part->ops->view = MatPartitioningView_Scotch;
758: part->ops->destroy = MatPartitioningDestroy_Scotch;
759: part->ops->setfromoptions = MatPartitioningSetFromOptions_Scotch;
761: return(0);
762: }