Actual source code: xyt.c
1: #define PETSCKSP_DLL
3: /*************************************xyt.c************************************
4: Module Name: xyt
5: Module Info:
7: author: Henry M. Tufo III
8: e-mail: hmt@asci.uchicago.edu
9: contact:
10: +--------------------------------+--------------------------------+
11: |MCS Division - Building 221 |Department of Computer Science |
12: |Argonne National Laboratory |Ryerson 152 |
13: |9700 S. Cass Avenue |The University of Chicago |
14: |Argonne, IL 60439 |Chicago, IL 60637 |
15: |(630) 252-5354/5986 ph/fx |(773) 702-6019/8487 ph/fx |
16: +--------------------------------+--------------------------------+
18: Last Modification: 3.20.01
19: **************************************xyt.c***********************************/
20: #include ../src/ksp/pc/impls/tfs/tfs.h
22: #define LEFT -1
23: #define RIGHT 1
24: #define BOTH 0
26: typedef struct xyt_solver_info {
27: PetscInt n, m, n_global, m_global;
28: PetscInt nnz, max_nnz, msg_buf_sz;
29: PetscInt *nsep, *lnsep, *fo, nfo, *stages;
30: PetscInt *xcol_sz, *xcol_indices;
31: PetscScalar **xcol_vals, *x, *solve_uu, *solve_w;
32: PetscInt *ycol_sz, *ycol_indices;
33: PetscScalar **ycol_vals, *y;
34: PetscInt nsolves;
35: PetscScalar tot_solve_time;
36: } xyt_info;
38:
39: typedef struct matvec_info {
40: PetscInt n, m, n_global, m_global;
41: PetscInt *local2global;
42: gs_ADT gs_handle;
43: PetscErrorCode (*matvec)(struct matvec_info*,PetscScalar*,PetscScalar*);
44: void *grid_data;
45: } mv_info;
47: struct xyt_CDT{
48: PetscInt id;
49: PetscInt ns;
50: PetscInt level;
51: xyt_info *info;
52: mv_info *mvi;
53: };
55: static PetscInt n_xyt=0;
56: static PetscInt n_xyt_handles=0;
58: /* prototypes */
59: static PetscErrorCode do_xyt_solve(xyt_ADT xyt_handle, PetscScalar *rhs);
60: static PetscErrorCode check_handle(xyt_ADT xyt_handle);
61: static PetscErrorCode det_separators(xyt_ADT xyt_handle);
62: static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u);
63: static PetscInt xyt_generate(xyt_ADT xyt_handle);
64: static PetscInt do_xyt_factor(xyt_ADT xyt_handle);
65: static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, void *matvec, void *grid_data);
67: /**************************************xyt.c***********************************/
68: xyt_ADT XYT_new(void)
69: {
70: xyt_ADT xyt_handle;
72: /* rolling count on n_xyt ... pot. problem here */
73: n_xyt_handles++;
74: xyt_handle = (xyt_ADT)malloc(sizeof(struct xyt_CDT));
75: xyt_handle->id = ++n_xyt;
76: xyt_handle->info = NULL;
77: xyt_handle->mvi = NULL;
79: return(xyt_handle);
80: }
82: /**************************************xyt.c***********************************/
83: PetscInt XYT_factor(xyt_ADT xyt_handle, /* prev. allocated xyt handle */
84: PetscInt *local2global, /* global column mapping */
85: PetscInt n, /* local num rows */
86: PetscInt m, /* local num cols */
87: void *matvec, /* b_loc=A_local.x_loc */
88: void *grid_data /* grid data for matvec */
89: )
90: {
92: comm_init();
93: check_handle(xyt_handle);
95: /* only 2^k for now and all nodes participating */
96: if ((1<<(xyt_handle->level=i_log2_num_nodes))!=num_nodes)
97: {SETERRQ2(PETSC_ERR_PLIB,"only 2^k for now and MPI_COMM_WORLD!!! %D != %D\n",1<<i_log2_num_nodes,num_nodes);}
99: /* space for X info */
100: xyt_handle->info = (xyt_info*)malloc(sizeof(xyt_info));
102: /* set up matvec handles */
103: xyt_handle->mvi = set_mvi(local2global, n, m, matvec, grid_data);
105: /* matrix is assumed to be of full rank */
106: /* LATER we can reset to indicate rank def. */
107: xyt_handle->ns=0;
109: /* determine separators and generate firing order - NB xyt info set here */
110: det_separators(xyt_handle);
112: return(do_xyt_factor(xyt_handle));
113: }
115: /**************************************xyt.c***********************************/
116: PetscInt XYT_solve(xyt_ADT xyt_handle, PetscScalar *x, PetscScalar *b)
117: {
118: comm_init();
119: check_handle(xyt_handle);
121: /* need to copy b into x? */
122: if (b)
123: {rvec_copy(x,b,xyt_handle->mvi->n);}
124: do_xyt_solve(xyt_handle,x);
126: return(0);
127: }
129: /**************************************xyt.c***********************************/
130: PetscInt XYT_free(xyt_ADT xyt_handle)
131: {
132: comm_init();
133: check_handle(xyt_handle);
134: n_xyt_handles--;
136: free(xyt_handle->info->nsep);
137: free(xyt_handle->info->lnsep);
138: free(xyt_handle->info->fo);
139: free(xyt_handle->info->stages);
140: free(xyt_handle->info->solve_uu);
141: free(xyt_handle->info->solve_w);
142: free(xyt_handle->info->x);
143: free(xyt_handle->info->xcol_vals);
144: free(xyt_handle->info->xcol_sz);
145: free(xyt_handle->info->xcol_indices);
146: free(xyt_handle->info->y);
147: free(xyt_handle->info->ycol_vals);
148: free(xyt_handle->info->ycol_sz);
149: free(xyt_handle->info->ycol_indices);
150: free(xyt_handle->info);
151: free(xyt_handle->mvi->local2global);
152: gs_free(xyt_handle->mvi->gs_handle);
153: free(xyt_handle->mvi);
154: free(xyt_handle);
156:
157: /* if the check fails we nuke */
158: /* if NULL pointer passed to free we nuke */
159: /* if the calls to free fail that's not my problem */
160: return(0);
161: }
163: /**************************************xyt.c***********************************/
164: PetscInt XYT_stats(xyt_ADT xyt_handle)
165: {
166: PetscInt op[] = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD};
167: PetscInt fop[] = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD};
168: PetscInt vals[9], work[9];
169: PetscScalar fvals[3], fwork[3];
172: comm_init();
173: check_handle(xyt_handle);
175: /* if factorization not done there are no stats */
176: if (!xyt_handle->info||!xyt_handle->mvi)
177: {
178: if (!my_id)
179: {PetscPrintf(PETSC_COMM_WORLD,"XYT_stats() :: no stats available!\n");}
180: return 1;
181: }
183: vals[0]=vals[1]=vals[2]=xyt_handle->info->nnz;
184: vals[3]=vals[4]=vals[5]=xyt_handle->mvi->n;
185: vals[6]=vals[7]=vals[8]=xyt_handle->info->msg_buf_sz;
186: giop(vals,work,sizeof(op)/sizeof(op[0])-1,op);
188: fvals[0]=fvals[1]=fvals[2]
189: =xyt_handle->info->tot_solve_time/xyt_handle->info->nsolves++;
190: grop(fvals,fwork,sizeof(fop)/sizeof(fop[0])-1,fop);
192: if (!my_id)
193: {
194: PetscPrintf(PETSC_COMM_WORLD,"%D :: min xyt_nnz=%D\n",my_id,vals[0]);
195: PetscPrintf(PETSC_COMM_WORLD,"%D :: max xyt_nnz=%D\n",my_id,vals[1]);
196: PetscPrintf(PETSC_COMM_WORLD,"%D :: avg xyt_nnz=%g\n",my_id,1.0*vals[2]/num_nodes);
197: PetscPrintf(PETSC_COMM_WORLD,"%D :: tot xyt_nnz=%D\n",my_id,vals[2]);
198: PetscPrintf(PETSC_COMM_WORLD,"%D :: xyt C(2d) =%g\n",my_id,vals[2]/(pow(1.0*vals[5],1.5)));
199: PetscPrintf(PETSC_COMM_WORLD,"%D :: xyt C(3d) =%g\n",my_id,vals[2]/(pow(1.0*vals[5],1.6667)));
200: PetscPrintf(PETSC_COMM_WORLD,"%D :: min xyt_n =%D\n",my_id,vals[3]);
201: PetscPrintf(PETSC_COMM_WORLD,"%D :: max xyt_n =%D\n",my_id,vals[4]);
202: PetscPrintf(PETSC_COMM_WORLD,"%D :: avg xyt_n =%g\n",my_id,1.0*vals[5]/num_nodes);
203: PetscPrintf(PETSC_COMM_WORLD,"%D :: tot xyt_n =%D\n",my_id,vals[5]);
204: PetscPrintf(PETSC_COMM_WORLD,"%D :: min xyt_buf=%D\n",my_id,vals[6]);
205: PetscPrintf(PETSC_COMM_WORLD,"%D :: max xyt_buf=%D\n",my_id,vals[7]);
206: PetscPrintf(PETSC_COMM_WORLD,"%D :: avg xyt_buf=%g\n",my_id,1.0*vals[8]/num_nodes);
207: PetscPrintf(PETSC_COMM_WORLD,"%D :: min xyt_slv=%g\n",my_id,fvals[0]);
208: PetscPrintf(PETSC_COMM_WORLD,"%D :: max xyt_slv=%g\n",my_id,fvals[1]);
209: PetscPrintf(PETSC_COMM_WORLD,"%D :: avg xyt_slv=%g\n",my_id,fvals[2]/num_nodes);
210: }
212: return(0);
213: }
216: /*************************************xyt.c************************************
218: Description: get A_local, local portion of global coarse matrix which
219: is a row dist. nxm matrix w/ n<m.
220: o my_ml holds address of ML struct associated w/A_local and coarse grid
221: o local2global holds global number of column i (i=0,...,m-1)
222: o local2global holds global number of row i (i=0,...,n-1)
223: o mylocmatvec performs A_local . vec_local (note that gs is performed using
224: gs_init/gop).
226: mylocmatvec = my_ml->Amat[grid_tag].matvec->external;
227: mylocmatvec (void :: void *data, double *in, double *out)
228: **************************************xyt.c***********************************/
229: static PetscInt do_xyt_factor(xyt_ADT xyt_handle)
230: {
231: return xyt_generate(xyt_handle);
232: }
234: /**************************************xyt.c***********************************/
235: static PetscInt xyt_generate(xyt_ADT xyt_handle)
236: {
237: PetscInt i,j,k,idx;
238: PetscInt dim, col;
239: PetscScalar *u, *uu, *v, *z, *w, alpha, alpha_w;
240: PetscInt *segs;
241: PetscInt op[] = {GL_ADD,0};
242: PetscInt off, len;
243: PetscScalar *x_ptr, *y_ptr;
244: PetscInt *iptr, flag;
245: PetscInt start=0, end, work;
246: PetscInt op2[] = {GL_MIN,0};
247: gs_ADT gs_handle;
248: PetscInt *nsep, *lnsep, *fo;
249: PetscInt a_n=xyt_handle->mvi->n;
250: PetscInt a_m=xyt_handle->mvi->m;
251: PetscInt *a_local2global=xyt_handle->mvi->local2global;
252: PetscInt level;
253: PetscInt n, m;
254: PetscInt *xcol_sz, *xcol_indices, *stages;
255: PetscScalar **xcol_vals, *x;
256: PetscInt *ycol_sz, *ycol_indices;
257: PetscScalar **ycol_vals, *y;
258: PetscInt n_global;
259: PetscInt xt_nnz=0, xt_max_nnz=0;
260: PetscInt yt_nnz=0, yt_max_nnz=0;
261: PetscInt xt_zero_nnz =0;
262: PetscInt xt_zero_nnz_0=0;
263: PetscInt yt_zero_nnz =0;
264: PetscInt yt_zero_nnz_0=0;
265: PetscBLASInt i1 = 1,dlen;
266: PetscScalar dm1 = -1.0;
269: n=xyt_handle->mvi->n;
270: nsep=xyt_handle->info->nsep;
271: lnsep=xyt_handle->info->lnsep;
272: fo=xyt_handle->info->fo;
273: end=lnsep[0];
274: level=xyt_handle->level;
275: gs_handle=xyt_handle->mvi->gs_handle;
277: /* is there a null space? */
278: /* LATER add in ability to detect null space by checking alpha */
279: for (i=0, j=0; i<=level; i++)
280: {j+=nsep[i];}
282: m = j-xyt_handle->ns;
283: if (m!=j)
284: {PetscPrintf(PETSC_COMM_WORLD,"xyt_generate() :: null space exists %D %D %D\n",m,j,xyt_handle->ns);}
286: PetscInfo2(0,"xyt_generate() :: X(%D,%D)\n",n,m);
288: /* get and initialize storage for x local */
289: /* note that x local is nxm and stored by columns */
290: xcol_sz = (PetscInt*) malloc(m*sizeof(PetscInt));
291: xcol_indices = (PetscInt*) malloc((2*m+1)*sizeof(PetscInt));
292: xcol_vals = (PetscScalar **) malloc(m*sizeof(PetscScalar *));
293: for (i=j=0; i<m; i++, j+=2)
294: {
295: xcol_indices[j]=xcol_indices[j+1]=xcol_sz[i]=-1;
296: xcol_vals[i] = NULL;
297: }
298: xcol_indices[j]=-1;
300: /* get and initialize storage for y local */
301: /* note that y local is nxm and stored by columns */
302: ycol_sz = (PetscInt*) malloc(m*sizeof(PetscInt));
303: ycol_indices = (PetscInt*) malloc((2*m+1)*sizeof(PetscInt));
304: ycol_vals = (PetscScalar **) malloc(m*sizeof(PetscScalar *));
305: for (i=j=0; i<m; i++, j+=2)
306: {
307: ycol_indices[j]=ycol_indices[j+1]=ycol_sz[i]=-1;
308: ycol_vals[i] = NULL;
309: }
310: ycol_indices[j]=-1;
312: /* size of separators for each sub-hc working from bottom of tree to top */
313: /* this looks like nsep[]=segments */
314: stages = (PetscInt*) malloc((level+1)*sizeof(PetscInt));
315: segs = (PetscInt*) malloc((level+1)*sizeof(PetscInt));
316: ivec_zero(stages,level+1);
317: ivec_copy(segs,nsep,level+1);
318: for (i=0; i<level; i++)
319: {segs[i+1] += segs[i];}
320: stages[0] = segs[0];
322: /* temporary vectors */
323: u = (PetscScalar *) malloc(n*sizeof(PetscScalar));
324: z = (PetscScalar *) malloc(n*sizeof(PetscScalar));
325: v = (PetscScalar *) malloc(a_m*sizeof(PetscScalar));
326: uu = (PetscScalar *) malloc(m*sizeof(PetscScalar));
327: w = (PetscScalar *) malloc(m*sizeof(PetscScalar));
329: /* extra nnz due to replication of vertices across separators */
330: for (i=1, j=0; i<=level; i++)
331: {j+=nsep[i];}
333: /* storage for sparse x values */
334: n_global = xyt_handle->info->n_global;
335: xt_max_nnz = yt_max_nnz = (PetscInt)(2.5*pow(1.0*n_global,1.6667) + j*n/2)/num_nodes;
336: x = (PetscScalar *) malloc(xt_max_nnz*sizeof(PetscScalar));
337: y = (PetscScalar *) malloc(yt_max_nnz*sizeof(PetscScalar));
339: /* LATER - can embed next sep to fire in gs */
340: /* time to make the donuts - generate X factor */
341: for (dim=i=j=0;i<m;i++)
342: {
343: /* time to move to the next level? */
344: while (i==segs[dim])
345: {
346: if (dim==level)
347: {SETERRQ(PETSC_ERR_PLIB,"dim about to exceed level\n"); break;}
349: stages[dim++]=i;
350: end+=lnsep[dim];
351: }
352: stages[dim]=i;
354: /* which column are we firing? */
355: /* i.e. set v_l */
356: /* use new seps and do global min across hc to determine which one to fire */
357: (start<end) ? (col=fo[start]) : (col=INT_MAX);
358: giop_hc(&col,&work,1,op2,dim);
360: /* shouldn't need this */
361: if (col==INT_MAX)
362: {
363: PetscInfo(0,"hey ... col==INT_MAX??\n");
364: continue;
365: }
367: /* do I own it? I should */
368: rvec_zero(v ,a_m);
369: if (col==fo[start])
370: {
371: start++;
372: idx=ivec_linear_search(col, a_local2global, a_n);
373: if (idx!=-1)
374: {v[idx] = 1.0; j++;}
375: else
376: {SETERRQ(PETSC_ERR_PLIB,"NOT FOUND!\n");}
377: }
378: else
379: {
380: idx=ivec_linear_search(col, a_local2global, a_m);
381: if (idx!=-1)
382: {v[idx] = 1.0;}
383: }
385: /* perform u = A.v_l */
386: rvec_zero(u,n);
387: do_matvec(xyt_handle->mvi,v,u);
389: /* uu = X^T.u_l (local portion) */
390: /* technically only need to zero out first i entries */
391: /* later turn this into an XYT_solve call ? */
392: rvec_zero(uu,m);
393: y_ptr=y;
394: iptr = ycol_indices;
395: for (k=0; k<i; k++)
396: {
397: off = *iptr++;
398: len = *iptr++;
399: dlen = PetscBLASIntCast(len);
400: uu[k] = BLASdot_(&dlen,u+off,&i1,y_ptr,&i1);
401: y_ptr+=len;
402: }
404: /* uu = X^T.u_l (comm portion) */
405: ssgl_radd (uu, w, dim, stages);
407: /* z = X.uu */
408: rvec_zero(z,n);
409: x_ptr=x;
410: iptr = xcol_indices;
411: for (k=0; k<i; k++)
412: {
413: off = *iptr++;
414: len = *iptr++;
415: dlen = PetscBLASIntCast(len);
416: BLASaxpy_(&dlen,&uu[k],x_ptr,&i1,z+off,&i1);
417: x_ptr+=len;
418: }
420: /* compute v_l = v_l - z */
421: rvec_zero(v+a_n,a_m-a_n);
422: dlen = PetscBLASIntCast(n);
423: BLASaxpy_(&dlen,&dm1,z,&i1,v,&i1);
425: /* compute u_l = A.v_l */
426: if (a_n!=a_m)
427: {gs_gop_hc(gs_handle,v,"+\0",dim);}
428: rvec_zero(u,n);
429: do_matvec(xyt_handle->mvi,v,u);
431: /* compute sqrt(alpha) = sqrt(u_l^T.u_l) - local portion */
432: dlen = PetscBLASIntCast(n);
433: alpha = BLASdot_(&dlen,u,&i1,u,&i1);
434: /* compute sqrt(alpha) = sqrt(u_l^T.u_l) - comm portion */
435: grop_hc(&alpha, &alpha_w, 1, op, dim);
437: alpha = (PetscScalar) sqrt((double)alpha);
439: /* check for small alpha */
440: /* LATER use this to detect and determine null space */
441: if (fabs(alpha)<1.0e-14)
442: {SETERRQ1(PETSC_ERR_PLIB,"bad alpha! %g\n",alpha);}
444: /* compute v_l = v_l/sqrt(alpha) */
445: rvec_scale(v,1.0/alpha,n);
446: rvec_scale(u,1.0/alpha,n);
448: /* add newly generated column, v_l, to X */
449: flag = 1;
450: off=len=0;
451: for (k=0; k<n; k++)
452: {
453: if (v[k]!=0.0)
454: {
455: len=k;
456: if (flag)
457: {off=k; flag=0;}
458: }
459: }
461: len -= (off-1);
463: if (len>0)
464: {
465: if ((xt_nnz+len)>xt_max_nnz)
466: {
467: PetscInfo(0,"increasing space for X by 2x!\n");
468: xt_max_nnz *= 2;
469: x_ptr = (PetscScalar *) malloc(xt_max_nnz*sizeof(PetscScalar));
470: rvec_copy(x_ptr,x,xt_nnz);
471: free(x);
472: x = x_ptr;
473: x_ptr+=xt_nnz;
474: }
475: xt_nnz += len;
476: rvec_copy(x_ptr,v+off,len);
478: /* keep track of number of zeros */
479: if (dim)
480: {
481: for (k=0; k<len; k++)
482: {
483: if (x_ptr[k]==0.0)
484: {xt_zero_nnz++;}
485: }
486: }
487: else
488: {
489: for (k=0; k<len; k++)
490: {
491: if (x_ptr[k]==0.0)
492: {xt_zero_nnz_0++;}
493: }
494: }
495: xcol_indices[2*i] = off;
496: xcol_sz[i] = xcol_indices[2*i+1] = len;
497: xcol_vals[i] = x_ptr;
498: }
499: else
500: {
501: xcol_indices[2*i] = 0;
502: xcol_sz[i] = xcol_indices[2*i+1] = 0;
503: xcol_vals[i] = x_ptr;
504: }
507: /* add newly generated column, u_l, to Y */
508: flag = 1;
509: off=len=0;
510: for (k=0; k<n; k++)
511: {
512: if (u[k]!=0.0)
513: {
514: len=k;
515: if (flag)
516: {off=k; flag=0;}
517: }
518: }
520: len -= (off-1);
522: if (len>0)
523: {
524: if ((yt_nnz+len)>yt_max_nnz)
525: {
526: PetscInfo(0,"increasing space for Y by 2x!\n");
527: yt_max_nnz *= 2;
528: y_ptr = (PetscScalar *) malloc(yt_max_nnz*sizeof(PetscScalar));
529: rvec_copy(y_ptr,y,yt_nnz);
530: free(y);
531: y = y_ptr;
532: y_ptr+=yt_nnz;
533: }
534: yt_nnz += len;
535: rvec_copy(y_ptr,u+off,len);
537: /* keep track of number of zeros */
538: if (dim)
539: {
540: for (k=0; k<len; k++)
541: {
542: if (y_ptr[k]==0.0)
543: {yt_zero_nnz++;}
544: }
545: }
546: else
547: {
548: for (k=0; k<len; k++)
549: {
550: if (y_ptr[k]==0.0)
551: {yt_zero_nnz_0++;}
552: }
553: }
554: ycol_indices[2*i] = off;
555: ycol_sz[i] = ycol_indices[2*i+1] = len;
556: ycol_vals[i] = y_ptr;
557: }
558: else
559: {
560: ycol_indices[2*i] = 0;
561: ycol_sz[i] = ycol_indices[2*i+1] = 0;
562: ycol_vals[i] = y_ptr;
563: }
564: }
566: /* close off stages for execution phase */
567: while (dim!=level)
568: {
569: stages[dim++]=i;
570: PetscInfo2(0,"disconnected!!! dim(%D)!=level(%D)\n",dim,level);
571: }
572: stages[dim]=i;
574: xyt_handle->info->n=xyt_handle->mvi->n;
575: xyt_handle->info->m=m;
576: xyt_handle->info->nnz=xt_nnz + yt_nnz;
577: xyt_handle->info->max_nnz=xt_max_nnz + yt_max_nnz;
578: xyt_handle->info->msg_buf_sz=stages[level]-stages[0];
579: xyt_handle->info->solve_uu = (PetscScalar *) malloc(m*sizeof(PetscScalar));
580: xyt_handle->info->solve_w = (PetscScalar *) malloc(m*sizeof(PetscScalar));
581: xyt_handle->info->x=x;
582: xyt_handle->info->xcol_vals=xcol_vals;
583: xyt_handle->info->xcol_sz=xcol_sz;
584: xyt_handle->info->xcol_indices=xcol_indices;
585: xyt_handle->info->stages=stages;
586: xyt_handle->info->y=y;
587: xyt_handle->info->ycol_vals=ycol_vals;
588: xyt_handle->info->ycol_sz=ycol_sz;
589: xyt_handle->info->ycol_indices=ycol_indices;
591: free(segs);
592: free(u);
593: free(v);
594: free(uu);
595: free(z);
596: free(w);
598: return(0);
599: }
601: /**************************************xyt.c***********************************/
602: static PetscErrorCode do_xyt_solve(xyt_ADT xyt_handle, PetscScalar *uc)
603: {
604: PetscInt off, len, *iptr;
605: PetscInt level =xyt_handle->level;
606: PetscInt n =xyt_handle->info->n;
607: PetscInt m =xyt_handle->info->m;
608: PetscInt *stages =xyt_handle->info->stages;
609: PetscInt *xcol_indices=xyt_handle->info->xcol_indices;
610: PetscInt *ycol_indices=xyt_handle->info->ycol_indices;
611: PetscScalar *x_ptr, *y_ptr, *uu_ptr;
612: PetscScalar *solve_uu=xyt_handle->info->solve_uu;
613: PetscScalar *solve_w =xyt_handle->info->solve_w;
614: PetscScalar *x =xyt_handle->info->x;
615: PetscScalar *y =xyt_handle->info->y;
616: PetscBLASInt i1 = 1,dlen;
619: uu_ptr=solve_uu;
620: rvec_zero(uu_ptr,m);
622: /* x = X.Y^T.b */
623: /* uu = Y^T.b */
624: for (y_ptr=y,iptr=ycol_indices; *iptr!=-1; y_ptr+=len)
625: {
626: off=*iptr++; len=*iptr++;
627: dlen = PetscBLASIntCast(len);
628: *uu_ptr++ = BLASdot_(&dlen,uc+off,&i1,y_ptr,&i1);
629: }
631: /* comunication of beta */
632: uu_ptr=solve_uu;
633: if (level) {ssgl_radd(uu_ptr, solve_w, level, stages);}
635: rvec_zero(uc,n);
637: /* x = X.uu */
638: for (x_ptr=x,iptr=xcol_indices; *iptr!=-1; x_ptr+=len)
639: {
640: off=*iptr++; len=*iptr++;
641: dlen = PetscBLASIntCast(len);
642: BLASaxpy_(&dlen,uu_ptr++,x_ptr,&i1,uc+off,&i1);
643: }
644: return(0);
645: }
647: /**************************************xyt.c***********************************/
648: static PetscErrorCode check_handle(xyt_ADT xyt_handle)
649: {
650: PetscInt vals[2], work[2], op[] = {NON_UNIFORM,GL_MIN,GL_MAX};
653: if (xyt_handle==NULL)
654: {SETERRQ1(PETSC_ERR_PLIB,"check_handle() :: bad handle :: NULL %D\n",xyt_handle);}
656: vals[0]=vals[1]=xyt_handle->id;
657: giop(vals,work,sizeof(op)/sizeof(op[0])-1,op);
658: if ((vals[0]!=vals[1])||(xyt_handle->id<=0))
659: {SETERRQ3(PETSC_ERR_PLIB,"check_handle() :: bad handle :: id mismatch min/max %D/%D %D\n", vals[0],vals[1], xyt_handle->id);}
660: return(0);
661: }
663: /**************************************xyt.c***********************************/
664: static PetscErrorCode det_separators(xyt_ADT xyt_handle)
665: {
666: PetscInt i, ct, id;
667: PetscInt mask, edge, *iptr;
668: PetscInt *dir, *used;
669: PetscInt sum[4], w[4];
670: PetscScalar rsum[4], rw[4];
671: PetscInt op[] = {GL_ADD,0};
672: PetscScalar *lhs, *rhs;
673: PetscInt *nsep, *lnsep, *fo, nfo=0;
674: gs_ADT gs_handle=xyt_handle->mvi->gs_handle;
675: PetscInt *local2global=xyt_handle->mvi->local2global;
676: PetscInt n=xyt_handle->mvi->n;
677: PetscInt m=xyt_handle->mvi->m;
678: PetscInt level=xyt_handle->level;
679: PetscInt shared=FALSE;
683: dir = (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
684: nsep = (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
685: lnsep= (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
686: fo = (PetscInt*)malloc(sizeof(PetscInt)*(n+1));
687: used = (PetscInt*)malloc(sizeof(PetscInt)*n);
689: ivec_zero(dir ,level+1);
690: ivec_zero(nsep ,level+1);
691: ivec_zero(lnsep,level+1);
692: ivec_set (fo ,-1,n+1);
693: ivec_zero(used,n);
695: lhs = (PetscScalar*)malloc(sizeof(PetscScalar)*m);
696: rhs = (PetscScalar*)malloc(sizeof(PetscScalar)*m);
698: /* determine the # of unique dof */
699: rvec_zero(lhs,m);
700: rvec_set(lhs,1.0,n);
701: gs_gop_hc(gs_handle,lhs,"+\0",level);
702: PetscInfo(0,"done first gs_gop_hc\n");
703: rvec_zero(rsum,2);
704: for (ct=i=0;i<n;i++)
705: {
706: if (lhs[i]!=0.0)
707: {rsum[0]+=1.0/lhs[i]; rsum[1]+=lhs[i];}
709: if (lhs[i]!=1.0)
710: {
711: shared=TRUE;
712: }
713: }
715: grop_hc(rsum,rw,2,op,level);
716: rsum[0]+=0.1;
717: rsum[1]+=0.1;
719: xyt_handle->info->n_global=xyt_handle->info->m_global=(PetscInt) rsum[0];
720: xyt_handle->mvi->n_global =xyt_handle->mvi->m_global =(PetscInt) rsum[0];
722: /* determine separator sets top down */
723: if (shared)
724: {
725: /* solution is to do as in the symmetric shared case but then */
726: /* pick the sub-hc with the most free dofs and do a mat-vec */
727: /* and pick up the responses on the other sub-hc from the */
728: /* initial separator set obtained from the symm. shared case */
729: SETERRQ(PETSC_ERR_PLIB,"shared dof separator determination not ready ... see hmt!!!\n");
730: for (iptr=fo+n,id=my_id,mask=num_nodes>>1,edge=level;edge>0;edge--,mask>>=1)
731: {
732: /* set rsh of hc, fire, and collect lhs responses */
733: (id<mask) ? rvec_zero(lhs,m) : rvec_set(lhs,1.0,m);
734: gs_gop_hc(gs_handle,lhs,"+\0",edge);
735:
736: /* set lsh of hc, fire, and collect rhs responses */
737: (id<mask) ? rvec_set(rhs,1.0,m) : rvec_zero(rhs,m);
738: gs_gop_hc(gs_handle,rhs,"+\0",edge);
739:
740: for (i=0;i<n;i++)
741: {
742: if (id< mask)
743: {
744: if (lhs[i]!=0.0)
745: {lhs[i]=1.0;}
746: }
747: if (id>=mask)
748: {
749: if (rhs[i]!=0.0)
750: {rhs[i]=1.0;}
751: }
752: }
754: if (id< mask)
755: {gs_gop_hc(gs_handle,lhs,"+\0",edge-1);}
756: else
757: {gs_gop_hc(gs_handle,rhs,"+\0",edge-1);}
759: /* count number of dofs I own that have signal and not in sep set */
760: rvec_zero(rsum,4);
761: for (ivec_zero(sum,4),ct=i=0;i<n;i++)
762: {
763: if (!used[i])
764: {
765: /* number of unmarked dofs on node */
766: ct++;
767: /* number of dofs to be marked on lhs hc */
768: if (id< mask)
769: {
770: if (lhs[i]!=0.0)
771: {sum[0]++; rsum[0]+=1.0/lhs[i];}
772: }
773: /* number of dofs to be marked on rhs hc */
774: if (id>=mask)
775: {
776: if (rhs[i]!=0.0)
777: {sum[1]++; rsum[1]+=1.0/rhs[i];}
778: }
779: }
780: }
782: /* go for load balance - choose half with most unmarked dofs, bias LHS */
783: (id<mask) ? (sum[2]=ct) : (sum[3]=ct);
784: (id<mask) ? (rsum[2]=ct) : (rsum[3]=ct);
785: giop_hc(sum,w,4,op,edge);
786: grop_hc(rsum,rw,4,op,edge);
787: rsum[0]+=0.1; rsum[1]+=0.1; rsum[2]+=0.1; rsum[3]+=0.1;
789: if (id<mask)
790: {
791: /* mark dofs I own that have signal and not in sep set */
792: for (ct=i=0;i<n;i++)
793: {
794: if ((!used[i])&&(lhs[i]!=0.0))
795: {
796: ct++; nfo++;
798: if (nfo>n)
799: {SETERRQ(PETSC_ERR_PLIB,"nfo about to exceed n\n");}
801: *--iptr = local2global[i];
802: used[i]=edge;
803: }
804: }
805: if (ct>1) {ivec_sort(iptr,ct);}
807: lnsep[edge]=ct;
808: nsep[edge]=(PetscInt) rsum[0];
809: dir [edge]=LEFT;
810: }
812: if (id>=mask)
813: {
814: /* mark dofs I own that have signal and not in sep set */
815: for (ct=i=0;i<n;i++)
816: {
817: if ((!used[i])&&(rhs[i]!=0.0))
818: {
819: ct++; nfo++;
821: if (nfo>n)
822: {SETERRQ(PETSC_ERR_PLIB,"nfo about to exceed n\n");}
824: *--iptr = local2global[i];
825: used[i]=edge;
826: }
827: }
828: if (ct>1) {ivec_sort(iptr,ct);}
830: lnsep[edge]=ct;
831: nsep[edge]= (PetscInt) rsum[1];
832: dir [edge]=RIGHT;
833: }
835: /* LATER or we can recur on these to order seps at this level */
836: /* do we need full set of separators for this? */
838: /* fold rhs hc into lower */
839: if (id>=mask)
840: {id-=mask;}
841: }
842: }
843: else
844: {
845: for (iptr=fo+n,id=my_id,mask=num_nodes>>1,edge=level;edge>0;edge--,mask>>=1)
846: {
847: /* set rsh of hc, fire, and collect lhs responses */
848: (id<mask) ? rvec_zero(lhs,m) : rvec_set(lhs,1.0,m);
849: gs_gop_hc(gs_handle,lhs,"+\0",edge);
851: /* set lsh of hc, fire, and collect rhs responses */
852: (id<mask) ? rvec_set(rhs,1.0,m) : rvec_zero(rhs,m);
853: gs_gop_hc(gs_handle,rhs,"+\0",edge);
855: /* count number of dofs I own that have signal and not in sep set */
856: for (ivec_zero(sum,4),ct=i=0;i<n;i++)
857: {
858: if (!used[i])
859: {
860: /* number of unmarked dofs on node */
861: ct++;
862: /* number of dofs to be marked on lhs hc */
863: if ((id< mask)&&(lhs[i]!=0.0)) {sum[0]++;}
864: /* number of dofs to be marked on rhs hc */
865: if ((id>=mask)&&(rhs[i]!=0.0)) {sum[1]++;}
866: }
867: }
869: /* for the non-symmetric case we need separators of width 2 */
870: /* so take both sides */
871: (id<mask) ? (sum[2]=ct) : (sum[3]=ct);
872: giop_hc(sum,w,4,op,edge);
874: ct=0;
875: if (id<mask)
876: {
877: /* mark dofs I own that have signal and not in sep set */
878: for (i=0;i<n;i++)
879: {
880: if ((!used[i])&&(lhs[i]!=0.0))
881: {
882: ct++; nfo++;
883: *--iptr = local2global[i];
884: used[i]=edge;
885: }
886: }
887: /* LSH hc summation of ct should be sum[0] */
888: }
889: else
890: {
891: /* mark dofs I own that have signal and not in sep set */
892: for (i=0;i<n;i++)
893: {
894: if ((!used[i])&&(rhs[i]!=0.0))
895: {
896: ct++; nfo++;
897: *--iptr = local2global[i];
898: used[i]=edge;
899: }
900: }
901: /* RSH hc summation of ct should be sum[1] */
902: }
904: if (ct>1) {ivec_sort(iptr,ct);}
905: lnsep[edge]=ct;
906: nsep[edge]=sum[0]+sum[1];
907: dir [edge]=BOTH;
909: /* LATER or we can recur on these to order seps at this level */
910: /* do we need full set of separators for this? */
912: /* fold rhs hc into lower */
913: if (id>=mask)
914: {id-=mask;}
915: }
916: }
918: /* level 0 is on processor case - so mark the remainder */
919: for (ct=i=0;i<n;i++)
920: {
921: if (!used[i])
922: {
923: ct++; nfo++;
924: *--iptr = local2global[i];
925: used[i]=edge;
926: }
927: }
928: if (ct>1) {ivec_sort(iptr,ct);}
929: lnsep[edge]=ct;
930: nsep [edge]=ct;
931: dir [edge]=BOTH;
933: xyt_handle->info->nsep=nsep;
934: xyt_handle->info->lnsep=lnsep;
935: xyt_handle->info->fo=fo;
936: xyt_handle->info->nfo=nfo;
938: free(dir);
939: free(lhs);
940: free(rhs);
941: free(used);
942: return(0);
943: }
945: /**************************************xyt.c***********************************/
946: static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, void *matvec, void *grid_data)
947: {
948: mv_info *mvi;
951: mvi = (mv_info*)malloc(sizeof(mv_info));
952: mvi->n=n;
953: mvi->m=m;
954: mvi->n_global=-1;
955: mvi->m_global=-1;
956: mvi->local2global=(PetscInt*)malloc((m+1)*sizeof(PetscInt));
957: ivec_copy(mvi->local2global,local2global,m);
958: mvi->local2global[m] = INT_MAX;
959: mvi->matvec=(PetscErrorCode (*)(mv_info*,PetscScalar*,PetscScalar*))matvec;
960: mvi->grid_data=grid_data;
962: /* set xyt communication handle to perform restricted matvec */
963: mvi->gs_handle = gs_init(local2global, m, num_nodes);
965: return(mvi);
966: }
968: /**************************************xyt.c***********************************/
969: static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u)
970: {
972: A->matvec((mv_info*)A->grid_data,v,u);
973: return(0);
974: }