Actual source code: pastix.c
1: #define PETSCMAT_DLL
3: /*
4: Provides an interface to the PaStiX sparse solver
5: */
6: #include ../src/mat/impls/aij/seq/aij.h
7: #include ../src/mat/impls/aij/mpi/mpiaij.h
8: #include ../src/mat/impls/sbaij/seq/sbaij.h
9: #include ../src/mat/impls/sbaij/mpi/mpisbaij.h
12: #include "mpi.h"
13: #include "pastix.h"
16: typedef struct Mat_Pastix_ {
17: pastix_data_t *pastix_data; /* Pastix data storage structure */
18: MatStructure matstruc;
19: PetscInt n; /* Number of columns in the matrix */
20: PetscInt *colptr; /* Index of first element of each column in row and val */
21: PetscInt *row; /* Row of each element of the matrix */
22: PetscScalar *val; /* Value of each element of the matrix */
23: PetscInt *perm; /* Permutation tabular */
24: PetscInt *invp; /* Reverse permutation tabular */
25: PetscScalar *rhs; /* Rhight-hand-side member */
26: PetscInt rhsnbr; /* Rhight-hand-side number (must be 1) */
27: PetscInt iparm[64]; /* Integer parameters */
28: double dparm[64]; /* Floating point parameters */
29: MPI_Comm pastix_comm; /* PaStiX MPI communicator */
30: PetscMPIInt commRank; /* MPI rank */
31: PetscMPIInt commSize; /* MPI communicator size */
32: PetscTruth CleanUpPastix; /* Boolean indicating if we call PaStiX clean step */
33: VecScatter scat_rhs;
34: VecScatter scat_sol;
35: Vec b_seq,x_seq;
36: PetscTruth isAIJ;
37: PetscInt nSolve; /* Number of consecutive solve */
38: PetscErrorCode (*MatDestroy)(Mat);
39: } Mat_Pastix;
41: EXTERN PetscErrorCode MatDuplicate_Pastix(Mat,MatDuplicateOption,Mat*);
43: /*
44: convert Petsc seqaij matrix to CSC: colptr[n], row[nz], val[nz]
46: input:
47: A - matrix in seqaij or mpisbaij (bs=1) format
48: valOnly - FALSE: spaces are allocated and values are set for the CSC
49: TRUE: Only fill values
50: output:
51: n - Size of the matrix
52: colptr - Index of first element of each column in row and val
53: row - Row of each element of the matrix
54: values - Value of each element of the matrix
55: */
56: PetscErrorCode MatConvertToCSC(Mat A,PetscTruth valOnly,PetscInt *n,PetscInt **colptr,PetscInt **row,PetscScalar **values)
57: {
58: Mat_SeqAIJ *aa = (Mat_SeqAIJ*)A->data;
59: PetscInt *rowptr = aa->i;
60: PetscInt *col = aa->j;
61: PetscScalar *rvalues = aa->a;
62: PetscInt m = A->rmap->N;
63: PetscInt nnz;
64: PetscInt i,j, k;
65: PetscInt base = 1;
66: PetscInt idx;
67: PetscErrorCode ierr;
68: PetscInt colidx;
69: PetscInt *colcount;
70: PetscTruth isSBAIJ;
71: PetscTruth isSeqSBAIJ;
72: PetscTruth isMpiSBAIJ;
73: PetscTruth isSym;
74:
77: /* Allocate the CSC */
79: MatIsSymmetric(A,0.0,&isSym);
80: PetscTypeCompare((PetscObject)A,MATSBAIJ,&isSBAIJ);
81: PetscTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);
82: PetscTypeCompare((PetscObject)A,MATMPISBAIJ,&isMpiSBAIJ);
84: *n = A->cmap->N;
86: /* PaStiX only needs triangular matrix if matrix is symmetric
87: */
88: if (isSym && !(isSBAIJ || isSeqSBAIJ || isMpiSBAIJ)) {
89: nnz = (aa->nz - *n)/2 + *n;
90: }
91: else {
92: nnz = aa->nz;
93: }
95: if (!valOnly){
96: PetscMalloc(((*n)+1) *sizeof(PetscInt) ,colptr );
97: PetscMalloc( nnz *sizeof(PetscInt) ,row);
98: PetscMalloc( nnz *sizeof(PetscScalar),values);
100: if (isSBAIJ || isSeqSBAIJ || isMpiSBAIJ) {
101: fprintf(stdout, "prout\n");
102: PetscMemcpy (*colptr, rowptr, ((*n)+1)*sizeof(PetscInt));
103: for (i = 0; i < *n+1; i++)
104: (*colptr)[i] += base;
105: PetscMemcpy (*row, col, (nnz)*sizeof(PetscInt));
106: for (i = 0; i < nnz; i++)
107: (*row)[i] += base;
108: PetscMemcpy (*values, rvalues, (nnz)*sizeof(PetscScalar));
109: } else {
110: PetscMalloc((*n)*sizeof(PetscInt) ,&colcount);
112: for (i = 0; i < m; i++)
113: colcount[i] = 0;
114: /* Fill-in colptr */
115: for (i = 0; i < m; i++)
116: for (j = rowptr[i]; j < rowptr[i+1]; j++)
117: if (!isSym || col[j] <= i)
118: colcount[col[j]]++;
119:
120: (*colptr)[0] = base;
121: for (j = 0; j < *n; j++) {
122: (*colptr)[j+1] = (*colptr)[j] + colcount[j];
123: /* in next loop we fill starting from (*colptr)[colidx] - base */
124: colcount[j] = -base;
125: }
126:
127: /* Fill-in rows and values */
128: for (i = 0; i < m; i++) {
129: for (j = rowptr[i]; j < rowptr[i+1]; j++) {
130: if (!isSym || col[j] <= i) {
131: colidx = col[j];
132: idx = (*colptr)[colidx] + colcount[colidx];
133: (*row)[idx] = i + base;
134: (*values)[idx] = rvalues[j];
135: colcount[colidx]++;
136: }
137: }
138: }
139: PetscFree(colcount);
140: }
141: } else {
142: /* Fill-in only values */
143: for (i = 0; i < m; i++) {
144: for (j = rowptr[i]; j < rowptr[i+1]; j++) {
145: colidx = col[j];
146: if ((isSBAIJ || isSeqSBAIJ || isMpiSBAIJ) ||!isSym || col[j] <= i)
147: {
148: /* look for the value to fill */
149: for (k = (*colptr)[colidx] - base;
150: k < (*colptr)[colidx + 1] - base;
151: k++) {
152: if ((*row)[k] == i) {
153: (*values)[k] = rvalues[j];
154: break;
155: }
156: }
157: /* shouldn't happen, overflow */
158: if (k == (*colptr)[colidx + 1] - base)
159: PetscFunctionReturn(1);
160: }
161: }
162: }
163: }
164: return(0);
165: }
171: /*
172: Call clean step of PaStiX if lu->CleanUpPastix == true.
173: Free the CSC matrix.
174: */
175: PetscErrorCode MatDestroy_Pastix(Mat A)
176: {
177: Mat_Pastix *lu=(Mat_Pastix*)A->spptr;
178: PetscErrorCode ierr;
179: PetscMPIInt size=lu->commSize;
182: if (lu->CleanUpPastix) {
183: /* Terminate instance, deallocate memories */
184: if (size > 1){
185: VecScatterDestroy(lu->scat_rhs);
186: VecDestroy(lu->b_seq);
187: if (lu->nSolve && lu->scat_sol){VecScatterDestroy(lu->scat_sol);}
188: if (lu->nSolve && lu->x_seq){VecDestroy(lu->x_seq);}
189: }
190:
191: lu->iparm[IPARM_START_TASK]=API_TASK_CLEAN;
192: lu->iparm[IPARM_END_TASK]=API_TASK_CLEAN;
194: pastix((pastix_data_t **)&(lu->pastix_data),
195: lu->pastix_comm,
196: (pastix_int_t) lu->n,
197: (pastix_int_t*) lu->colptr,
198: (pastix_int_t*) lu->row,
199: (pastix_float_t*) lu->val,
200: (pastix_int_t*) lu->perm,
201: (pastix_int_t*) lu->invp,
202: (pastix_float_t*) lu->rhs,
203: (pastix_int_t) lu->rhsnbr,
204: (pastix_int_t*) lu->iparm,
205: lu->dparm);
207: PetscFree(lu->colptr);
208: PetscFree(lu->row);
209: PetscFree(lu->val);
210: PetscFree(lu->perm);
211: PetscFree(lu->invp);
212: /* PetscFree(lu->rhs); */
213: MPI_Comm_free(&(lu->pastix_comm));
214: }
215: (lu->MatDestroy)(A);
216: return(0);
217: }
221: /*
222: Gather right-hand-side.
223: Call for Solve step.
224: Scatter solution.
225: */
226: PetscErrorCode MatSolve_PaStiX(Mat A,Vec b,Vec x)
227: {
228: Mat_Pastix *lu=(Mat_Pastix*)A->spptr;
229: PetscScalar *array;
230: Vec x_seq;
231: PetscErrorCode ierr;
234: lu->rhsnbr = 1;
235: x_seq = lu->b_seq;
236: if (lu->commSize > 1){
237: /* PaStiX only supports centralized rhs. Scatter b into a seqential rhs vector */
238: VecScatterBegin(lu->scat_rhs,b,x_seq,INSERT_VALUES,SCATTER_FORWARD);
239: VecScatterEnd(lu->scat_rhs,b,x_seq,INSERT_VALUES,SCATTER_FORWARD);
240: VecGetArray(x_seq,&array);
241: } else { /* size == 1 */
242: VecCopy(b,x);
243: VecGetArray(x,&array);
244: }
245: lu->rhs = array;
246: if (lu->commSize == 1){
247: VecRestoreArray(x,&array);
248: } else {
249: VecRestoreArray(x_seq,&array);
250: }
252: /* solve phase */
253: /*-------------*/
254: lu->iparm[IPARM_START_TASK] = API_TASK_SOLVE;
255: lu->iparm[IPARM_END_TASK] = API_TASK_REFINE;
256: lu->iparm[IPARM_RHS_MAKING] = API_RHS_B;
257:
258: pastix((pastix_data_t **)&(lu->pastix_data),
259: (MPI_Comm) lu->pastix_comm,
260: (pastix_int_t) lu->n,
261: (pastix_int_t*) lu->colptr,
262: (pastix_int_t*) lu->row,
263: (pastix_float_t*) lu->val,
264: (pastix_int_t*) lu->perm,
265: (pastix_int_t*) lu->invp,
266: (pastix_float_t*) lu->rhs,
267: (pastix_int_t) lu->rhsnbr,
268: (pastix_int_t*) lu->iparm,
269: (double*) lu->dparm);
270:
271: if (lu->iparm[IPARM_ERROR_NUMBER] < 0) {
272: SETERRQ1(PETSC_ERR_LIB,"Error reported by PaStiX in solve phase: lu->iparm[IPARM_ERROR_NUMBER] = %d\n",lu->iparm[IPARM_ERROR_NUMBER] );
273: }
275: if (lu->commSize == 1){
276: VecRestoreArray(x,&(lu->rhs));
277: } else {
278: VecRestoreArray(x_seq,&(lu->rhs));
279: }
281: if (lu->commSize > 1) { /* convert PaStiX centralized solution to petsc mpi x */
282: VecScatterBegin(lu->scat_sol,x_seq,x,INSERT_VALUES,SCATTER_FORWARD);
283: VecScatterEnd(lu->scat_sol,x_seq,x,INSERT_VALUES,SCATTER_FORWARD);
284: }
285: lu->nSolve++;
286: return(0);
287: }
289: #if !defined(PETSC_USE_COMPLEX)
290: /*
291: TODO: Fill this function
292: I didn't fill this function
293: because I didn't understood its goal.
294: */
296: /*
297: input:
298: F: numeric factor
299: output:
300: nneg: total number of pivots
301: nzero: 0
302: npos: (global dimension of F) - nneg
303: */
307: PetscErrorCode MatGetInertia_SBAIJPASTIX(Mat F,int *nneg,int *nzero,int *npos)
308: {
310: /* MPI_Comm_size(((PetscObject)F)->comm,&size); */
311: /* /\* PASTIX 4.3.1 calls ScaLAPACK when ICNTL(13)=0 (default), which does not offer the possibility to compute the inertia of a dense matrix. Set ICNTL(13)=1 to skip ScaLAPACK *\/ */
312: /* if (size > 1 && lu->id.ICNTL(13) != 1){ */
313: /* SETERRQ1(PETSC_ERR_ARG_WRONG,"ICNTL(13)=%d. -mat_pastix_icntl_13 must be set as 1 for correct global matrix inertia\n",lu->id.INFOG(13)); */
314: /* } */
315: /* if (nneg){ */
316: /* if (!lu->commSize){ */
317: /* *nneg = lu->id.INFOG(12); */
318: /* } */
319: /* MPI_Bcast(nneg,1,MPI_INT,0,lu->comm_pastix); */
320: /* } */
321: /* if (nzero) *nzero = lu->iparm[IPARM_NNZEROS]; */
322: /* if (npos) *npos = F->rmap->N - (*nneg); */
323: return(0);
324: }
325: #endif /* !defined(PETSC_USE_COMPLEX) */
327: /*
328: Numeric factorisation using PaStiX solver.
330: */
333: PetscErrorCode MatFactorNumeric_PaStiX(Mat F,Mat A,const MatFactorInfo *info)
334: {
335: Mat_Pastix *lu =(Mat_Pastix*)(F)->spptr;
336: Mat *tseq;
337: PetscErrorCode 0;
338: PetscInt icntl;
339: PetscInt M=A->rmap->N;
340: PetscTruth valOnly,flg, isSym;
341: Mat F_diag;
342: IS is_iden;
343: Vec b;
344: IS isrow;
345: PetscTruth isSeqAIJ,isSeqSBAIJ;
348:
349: PetscTypeCompare((PetscObject)A,MATSEQAIJ,&isSeqAIJ);
350: PetscTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);
351: if (lu->matstruc == DIFFERENT_NONZERO_PATTERN){
352: (F)->ops->solve = MatSolve_PaStiX;
354: /* Initialize a PASTIX instance */
355: MPI_Comm_dup(((PetscObject)A)->comm,&(lu->pastix_comm));
356: MPI_Comm_rank(lu->pastix_comm, &lu->commRank);
357: MPI_Comm_size(lu->pastix_comm, &lu->commSize);
359: /* Set pastix options */
360: lu->iparm[IPARM_MODIFY_PARAMETER] = API_NO;
361: lu->iparm[IPARM_START_TASK] = API_TASK_INIT;
362: lu->iparm[IPARM_END_TASK] = API_TASK_INIT;
363: lu->rhsnbr = 1;
365: /* Call to set default pastix options */
366: pastix((pastix_data_t **)&(lu->pastix_data),
367: (MPI_Comm) lu->pastix_comm,
368: (pastix_int_t) lu->n,
369: (pastix_int_t*) lu->colptr,
370: (pastix_int_t*) lu->row,
371: (pastix_float_t*) lu->val,
372: (pastix_int_t*) lu->perm,
373: (pastix_int_t*) lu->invp,
374: (pastix_float_t*) lu->rhs,
375: (pastix_int_t) lu->rhsnbr,
376: (pastix_int_t*) lu->iparm,
377: (double*) lu->dparm);
379: PetscOptionsBegin(((PetscObject)A)->comm,((PetscObject)A)->prefix,"PaStiX Options","Mat");
381: icntl=-1;
382: lu->iparm[IPARM_VERBOSE] = API_VERBOSE_NOT;
383: PetscOptionsInt("-mat_pastix_verbose","iparm[IPARM_VERBOSE] : level of printing (0 to 2)","None",lu->iparm[IPARM_VERBOSE],&icntl,&flg);
384: if ((flg && icntl > 0) || PetscLogPrintInfo) {
385: lu->iparm[IPARM_VERBOSE] = icntl;
386: }
387: icntl=-1;
388: PetscOptionsInt("-mat_pastix_threadnbr","iparm[IPARM_THREAD_NBR] : Number of thread by MPI node","None",lu->iparm[IPARM_THREAD_NBR],&icntl,PETSC_NULL);
389: if ((flg && icntl > 0)) {
390: lu->iparm[IPARM_THREAD_NBR] = icntl;
391: }
392: PetscOptionsEnd();
393: valOnly = PETSC_FALSE;
394: } else {
395: valOnly = PETSC_TRUE;
396: }
398: lu->iparm[IPARM_MATRIX_VERIFICATION] = API_YES;
400: /* convert mpi A to seq mat A */
401: ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow);
402: MatGetSubMatrices(A,1,&isrow,&isrow,MAT_INITIAL_MATRIX,&tseq);
403: ISDestroy(isrow);
405: MatConvertToCSC(*tseq,valOnly, &lu->n, &lu->colptr, &lu->row, &lu->val);
406: MatIsSymmetric(*tseq,0.0,&isSym);
407: MatDestroyMatrices(1,&tseq);
409: PetscMalloc((lu->n)*sizeof(PetscInt) ,&(lu->perm));
410: PetscMalloc((lu->n)*sizeof(PetscInt) ,&(lu->invp));
413: if (isSym) {
414: /* On symmetric matrix, LLT */
415: lu->iparm[IPARM_SYM] = API_SYM_YES;
416: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LDLT;
417: }
418: else {
419: /* On unsymmetric matrix, LU */
420: lu->iparm[IPARM_SYM] = API_SYM_NO;
421: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LU;
422: }
423:
424: /*----------------*/
425: if (lu->matstruc == DIFFERENT_NONZERO_PATTERN){
426: if (!(isSeqAIJ || isSeqSBAIJ)) {
427: /* PaStiX only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
428: VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&lu->b_seq);
429: ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);
430: VecCreate(((PetscObject)A)->comm,&b);
431: VecSetSizes(b,A->rmap->n,PETSC_DECIDE);
432: VecSetFromOptions(b);
433:
434: VecScatterCreate(b,is_iden,lu->b_seq,is_iden,&lu->scat_rhs);
435: VecScatterCreate(lu->b_seq,is_iden,b,is_iden,&lu->scat_sol);
436: ISDestroy(is_iden);
437: VecDestroy(b);
438: }
439: lu->iparm[IPARM_START_TASK] = API_TASK_ORDERING;
440: lu->iparm[IPARM_END_TASK] = API_TASK_NUMFACT;
442: pastix((pastix_data_t **)&(lu->pastix_data),
443: (MPI_Comm) lu->pastix_comm,
444: (pastix_int_t) lu->n,
445: (pastix_int_t*) lu->colptr,
446: (pastix_int_t*) lu->row,
447: (pastix_float_t*) lu->val,
448: (pastix_int_t*) lu->perm,
449: (pastix_int_t*) lu->invp,
450: (pastix_float_t*) lu->rhs,
451: (pastix_int_t) lu->rhsnbr,
452: (pastix_int_t*) lu->iparm,
453: (double*) lu->dparm);
454: if (lu->iparm[IPARM_ERROR_NUMBER] < 0) {
455: SETERRQ1(PETSC_ERR_LIB,"Error reported by PaStiX in analysis phase: iparm(IPARM_ERROR_NUMBER)=%d\n",lu->iparm[IPARM_ERROR_NUMBER]);
456: }
457: } else {
458: lu->iparm[IPARM_START_TASK] = API_TASK_NUMFACT;
459: lu->iparm[IPARM_END_TASK] = API_TASK_NUMFACT;
460: pastix((pastix_data_t **)&(lu->pastix_data),
461: (MPI_Comm) lu->pastix_comm,
462: (pastix_int_t) lu->n,
463: (pastix_int_t*) lu->colptr,
464: (pastix_int_t*) lu->row,
465: (pastix_float_t*) lu->val,
466: (pastix_int_t*) lu->perm,
467: (pastix_int_t*) lu->invp,
468: (pastix_float_t*) lu->rhs,
469: (pastix_int_t) lu->rhsnbr,
470: (pastix_int_t*) lu->iparm,
471: (double*) lu->dparm);
473: if (lu->iparm[IPARM_ERROR_NUMBER] < 0) {
474: SETERRQ1(PETSC_ERR_LIB,"Error reported by PaStiX in analysis phase: iparm(IPARM_ERROR_NUMBER)=%d\n",lu->iparm[IPARM_ERROR_NUMBER]);
475: }
476: }
478: if (lu->commSize > 1){
479: if ((F)->factor == MAT_FACTOR_LU){
480: F_diag = ((Mat_MPIAIJ *)(F)->data)->A;
481: } else {
482: F_diag = ((Mat_MPISBAIJ *)(F)->data)->A;
483: }
484: F_diag->assembled = PETSC_TRUE;
485: if (lu->nSolve){
486: VecScatterDestroy(lu->scat_sol);
487: VecDestroy(lu->x_seq);
488: }
489: }
490: (F)->assembled = PETSC_TRUE;
491: lu->matstruc = SAME_NONZERO_PATTERN;
492: lu->CleanUpPastix = PETSC_TRUE;
493: lu->nSolve = 0;
494: return(0);
495: }
498: /* Note the Petsc r and c permutations are ignored */
501: PetscErrorCode MatLUFactorSymbolic_AIJPASTIX(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
502: {
503: Mat_Pastix *lu = (Mat_Pastix*)F->spptr;
506: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LU;
507: lu->iparm[IPARM_SYM] = API_SYM_YES;
508: lu->matstruc = DIFFERENT_NONZERO_PATTERN;
509: F->ops->lufactornumeric = MatFactorNumeric_PaStiX;
510: return(0);
511: }
514: /* Note the Petsc r permutation is ignored */
517: PetscErrorCode MatCholeskyFactorSymbolic_SBAIJPASTIX(Mat F,Mat A,IS r,const MatFactorInfo *info)
518: {
519: Mat_Pastix *lu = (Mat_Pastix*)(F)->spptr;
522: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LLT;
523: lu->iparm[IPARM_SYM] = API_SYM_NO;
524: lu->matstruc = DIFFERENT_NONZERO_PATTERN;
525: (F)->ops->choleskyfactornumeric = MatFactorNumeric_PaStiX;
526: #if !defined(PETSC_USE_COMPLEX)
527: (F)->ops->getinertia = MatGetInertia_SBAIJPASTIX;
528: #endif
529: return(0);
530: }
534: PetscErrorCode MatView_PaStiX(Mat A,PetscViewer viewer)
535: {
536: PetscErrorCode ierr;
537: PetscTruth iascii;
538: PetscViewerFormat format;
541: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
542: if (iascii) {
543: PetscViewerGetFormat(viewer,&format);
544: if (format == PETSC_VIEWER_ASCII_INFO){
545: Mat_Pastix *lu=(Mat_Pastix*)A->spptr;
547: PetscViewerASCIIPrintf(viewer,"PaStiX run parameters:\n");
548: PetscViewerASCIIPrintf(viewer," Matrix type : %s \n",((lu->iparm[IPARM_SYM] == API_SYM_YES)?"Symmetric":"Unsymmetric"));
549: PetscViewerASCIIPrintf(viewer," Level of printing (0,1,2): %d \n",lu->iparm[IPARM_VERBOSE]);
550: PetscViewerASCIIPrintf(viewer," Number of refinements iterations : %d \n",lu->iparm[IPARM_NBITER]);
551: PetscPrintf(PETSC_COMM_SELF," Error : %g \n",lu->dparm[DPARM_RELATIVE_ERROR]);
552: }
553: }
554: return(0);
555: }
558: /*MC
559: MAT_SOLVER_PASTIX - A solver package providing direct solvers (LU) for distributed
560: and sequential matrices via the external package PaStiX.
562: Use config/configure.py --download-pastix to have PETSc installed with PaStiX
564: Options Database Keys:
565: + -mat_pastix_verbose <0,1,2> - print level
566: - -mat_pastix_threadnbr <integer> - Set the thread number by MPI task.
568: Level: beginner
570: .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage
572: M*/
577: PetscErrorCode MatGetInfo_PaStiX(Mat A,MatInfoType flag,MatInfo *info)
578: {
579: Mat_Pastix *lu =(Mat_Pastix*)A->spptr;
582: info->block_size = 1.0;
583: info->nz_allocated = lu->iparm[IPARM_NNZEROS];
584: info->nz_used = lu->iparm[IPARM_NNZEROS];
585: info->nz_unneeded = 0.0;
586: info->assemblies = 0.0;
587: info->mallocs = 0.0;
588: info->memory = 0.0;
589: info->fill_ratio_given = 0;
590: info->fill_ratio_needed = 0;
591: info->factor_mallocs = 0;
592: return(0);
593: }
598: PetscErrorCode MatFactorGetSolverPackage_pastix(Mat A,const MatSolverPackage *type)
599: {
601: *type = MAT_SOLVER_PASTIX;
602: return(0);
603: }
607: /*
608: The seq and mpi versions of this function are the same
609: */
612: PetscErrorCode MatGetFactor_seqaij_pastix(Mat A,MatFactorType ftype,Mat *F)
613: {
614: Mat B;
616: Mat_Pastix *pastix;
619: if (ftype != MAT_FACTOR_LU) {
620: SETERRQ(PETSC_ERR_SUP,"Cannot use PETSc AIJ matrices with PaStiX Cholesky, use SBAIJ matrix");
621: }
622: /* Create the factorization matrix */
623: MatCreate(((PetscObject)A)->comm,&B);
624: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
625: MatSetType(B,((PetscObject)A)->type_name);
626: MatSeqAIJSetPreallocation(B,0,PETSC_NULL);
628: B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJPASTIX;
629: B->ops->view = MatView_PaStiX;
630: B->ops->getinfo = MatGetInfo_PaStiX;
631: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C",
632: "MatFactorGetSolverPackage_pastix",
633: MatFactorGetSolverPackage_pastix);
634: B->factor = MAT_FACTOR_LU;
636: PetscNewLog(B,Mat_Pastix,&pastix);
637: pastix->CleanUpPastix = PETSC_FALSE;
638: pastix->isAIJ = PETSC_TRUE;
639: pastix->scat_rhs = PETSC_NULL;
640: pastix->scat_sol = PETSC_NULL;
641: pastix->nSolve = 0;
642: pastix->MatDestroy = B->ops->destroy;
643: B->ops->destroy = MatDestroy_Pastix;
644: B->spptr = (void*)pastix;
646: *F = B;
647: return(0);
648: }
655: PetscErrorCode MatGetFactor_mpiaij_pastix(Mat A,MatFactorType ftype,Mat *F)
656: {
657: Mat B;
659: Mat_Pastix *pastix;
662: if (ftype != MAT_FACTOR_LU) SETERRQ(PETSC_ERR_SUP,"Cannot use PETSc AIJ matrices with PaStiX Cholesky, use SBAIJ matrix");
663: /* Create the factorization matrix */
664: MatCreate(((PetscObject)A)->comm,&B);
665: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
666: MatSetType(B,((PetscObject)A)->type_name);
667: MatSeqAIJSetPreallocation(B,0,PETSC_NULL);
668: MatMPIAIJSetPreallocation(B,0,PETSC_NULL,0,PETSC_NULL);
670: B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJPASTIX;
671: B->ops->view = MatView_PaStiX;
672: PetscObjectComposeFunctionDynamic((PetscObject)B,
673: "MatFactorGetSolverPackage_C",
674: "MatFactorGetSolverPackage_pastix",
675: MatFactorGetSolverPackage_pastix);
676: B->factor = MAT_FACTOR_LU;
678: PetscNewLog(B,Mat_Pastix,&pastix);
679: pastix->CleanUpPastix = PETSC_FALSE;
680: pastix->isAIJ = PETSC_TRUE;
681: pastix->scat_rhs = PETSC_NULL;
682: pastix->scat_sol = PETSC_NULL;
683: pastix->nSolve = 0;
684: pastix->MatDestroy = B->ops->destroy;
685: B->ops->destroy = MatDestroy_Pastix;
686: B->spptr = (void*)pastix;
688: *F = B;
689: return(0);
690: }
696: PetscErrorCode MatGetFactor_seqsbaij_pastix(Mat A,MatFactorType ftype,Mat *F)
697: {
698: Mat B;
700: Mat_Pastix *pastix;
703: if (ftype != MAT_FACTOR_CHOLESKY) {
704: SETERRQ(PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with PaStiX LU, use AIJ matrix");
705: }
706: /* Create the factorization matrix */
707: MatCreate(((PetscObject)A)->comm,&B);
708: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
709: MatSetType(B,((PetscObject)A)->type_name);
710: MatSeqSBAIJSetPreallocation(B,1,0,PETSC_NULL);
711: MatMPISBAIJSetPreallocation(B,1,0,PETSC_NULL,0,PETSC_NULL);
713: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SBAIJPASTIX;
714: B->ops->view = MatView_PaStiX;
715: PetscObjectComposeFunctionDynamic((PetscObject)B,
716: "MatFactorGetSolverPackage_C",
717: "MatFactorGetSolverPackage_pastix",
718: MatFactorGetSolverPackage_pastix);
720: B->factor = MAT_FACTOR_CHOLESKY;
722: PetscNewLog(B,Mat_Pastix,&pastix);
723: pastix->CleanUpPastix = PETSC_FALSE;
724: pastix->isAIJ = PETSC_TRUE;
725: pastix->scat_rhs = PETSC_NULL;
726: pastix->scat_sol = PETSC_NULL;
727: pastix->nSolve = 0;
728: pastix->MatDestroy = B->ops->destroy;
729: B->ops->destroy = MatDestroy_Pastix;
730: B->spptr = (void*)pastix;
732: *F = B;
733: return(0);
734: }
740: PetscErrorCode MatGetFactor_mpisbaij_pastix(Mat A,MatFactorType ftype,Mat *F)
741: {
742: Mat B;
744: Mat_Pastix *pastix;
745:
747: if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with PaStiX LU, use AIJ matrix");
749: /* Create the factorization matrix */
750: MatCreate(((PetscObject)A)->comm,&B);
751: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
752: MatSetType(B,((PetscObject)A)->type_name);
753: MatSeqSBAIJSetPreallocation(B,1,0,PETSC_NULL);
754: MatMPISBAIJSetPreallocation(B,1,0,PETSC_NULL,0,PETSC_NULL);
756: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SBAIJPASTIX;
757: B->ops->view = MatView_PaStiX;
758: PetscObjectComposeFunctionDynamic((PetscObject)B,
759: "MatFactorGetSolverPackage_C",
760: "MatFactorGetSolverPackage_pastix",
761: MatFactorGetSolverPackage_pastix);
762: B->factor = MAT_FACTOR_CHOLESKY;
764: PetscNewLog(B,Mat_Pastix,&pastix);
765: pastix->CleanUpPastix = PETSC_FALSE;
766: pastix->isAIJ = PETSC_TRUE;
767: pastix->scat_rhs = PETSC_NULL;
768: pastix->scat_sol = PETSC_NULL;
769: pastix->nSolve = 0;
770: pastix->MatDestroy = B->ops->destroy;
771: B->ops->destroy = MatDestroy_Pastix;
772: B->spptr = (void*)pastix;
774: *F = B;
775: return(0);
776: }