Actual source code: dacorn.c
1: #define PETSCDM_DLL
3: /*
4: Code for manipulating distributed regular arrays in parallel.
5: */
7: #include ../src/dm/da/daimpl.h
11: /*@
12: DASetCoordinates - Sets into the DA a vector that indicates the
13: coordinates of the local nodes (NOT including ghost nodes).
15: Not Collective
17: Input Parameter:
18: + da - the distributed array
19: - c - coordinate vector
21: Note:
22: The coordinates should NOT include those for all ghost points
24: Does NOT increase the reference count of this vector, so caller should NOT
25: destroy the vector.
27: Level: intermediate
29: .keywords: distributed array, get, corners, nodes, local indices, coordinates
31: .seealso: DAGetGhostCorners(), DAGetCoordinates(), DASetUniformCoordinates(). DAGetGhostCoordinates(), DAGetCoordinateDA()
32: @*/
33: PetscErrorCode DASetCoordinates(DA da,Vec c)
34: {
40: if (da->coordinates) {
41: VecDestroy(da->coordinates);
42: }
43: da->coordinates = c;
44: VecSetBlockSize(c,da->dim);
45: return(0);
46: }
50: /*@
51: DAGetCoordinates - Gets the node coordinates associated with a DA.
53: Not Collective
55: Input Parameter:
56: . da - the distributed array
58: Output Parameter:
59: . c - coordinate vector
61: Note:
62: Each process has only the coordinates for its local nodes (does NOT have the
63: coordinates for the ghost nodes).
65: For two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
66: and (x_0,y_0,z_0,x_1,y_1,z_1...)
68: The user is responsible for destroying this vector.
70: Level: intermediate
72: .keywords: distributed array, get, corners, nodes, local indices, coordinates
74: .seealso: DAGetGhostCorners(), DASetCoordinates(), DASetUniformCoordinates(), DAGetGhostedCoordinates(), DAGetCoordinateDA()
75: @*/
76: PetscErrorCode DAGetCoordinates(DA da,Vec *c)
77: {
83: if (da->coordinates) {PetscObjectReference((PetscObject) da->coordinates);}
84: *c = da->coordinates;
85: return(0);
86: }
90: /*@
91: DAGetCoordinateDA - Gets the DA that scatters between global and local DA coordinates
93: Collective on DA
95: Input Parameter:
96: . da - the distributed array
98: Output Parameter:
99: . dac - coordinate DA
101: Note: The user is responsible for destroying this DA when finished
103: Level: intermediate
105: .keywords: distributed array, get, corners, nodes, local indices, coordinates
107: .seealso: DAGetGhostCorners(), DASetCoordinates(), DASetUniformCoordinates(), DAGetCoordinates(), DAGetGhostedCoordinates()
108: @*/
109: PetscErrorCode DAGetCoordinateDA(DA da,DA *cda)
110: {
111: PetscMPIInt size;
115: if (!da->da_coordinates) {
116: MPI_Comm_size(((PetscObject)da)->comm,&size);
117: if (da->dim == 1) {
118: PetscInt s,m,*lc,l;
119: DAPeriodicType pt;
120: DAGetInfo(da,0,&m,0,0,0,0,0,0,&s,&pt,0);
121: DAGetCorners(da,0,0,0,&l,0,0);
122: PetscMalloc(size*sizeof(PetscInt),&lc);
123: MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,((PetscObject)da)->comm);
124: DACreate1d(((PetscObject)da)->comm,pt,m,1,s,lc,&da->da_coordinates);
125: PetscFree(lc);
126: } else if (da->dim == 2) {
127: PetscInt i,s,m,*lc,*ld,l,k,n,M,N;
128: DAPeriodicType pt;
129: DAGetInfo(da,0,&m,&n,0,&M,&N,0,0,&s,&pt,0);
130: DAGetCorners(da,0,0,0,&l,&k,0);
131: PetscMalloc(size*sizeof(PetscInt),&lc);
132: PetscMalloc(size*sizeof(PetscInt),&ld);
133: /* only first M values in lc matter */
134: MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,((PetscObject)da)->comm);
135: /* every Mth value in ld matters */
136: MPI_Allgather(&k,1,MPIU_INT,ld,1,MPIU_INT,((PetscObject)da)->comm);
137: for ( i=0; i<N; i++) {
138: ld[i] = ld[M*i];
139: }
140: DACreate2d(((PetscObject)da)->comm,pt,DA_STENCIL_BOX,m,n,M,N,2,s,lc,ld,&da->da_coordinates);
141: PetscFree(lc);
142: PetscFree(ld);
143: } else if (da->dim == 3) {
144: PetscInt i,s,m,*lc,*ld,*le,l,k,q,n,M,N,P,p;
145: DAPeriodicType pt;
146: DAGetInfo(da,0,&m,&n,&p,&M,&N,&P,0,&s,&pt,0);
147: DAGetCorners(da,0,0,0,&l,&k,&q);
148: PetscMalloc(size*sizeof(PetscInt),&lc);
149: PetscMalloc(size*sizeof(PetscInt),&ld);
150: PetscMalloc(size*sizeof(PetscInt),&le);
151: /* only first M values in lc matter */
152: MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,((PetscObject)da)->comm);
153: /* every Mth value in ld matters */
154: MPI_Allgather(&k,1,MPIU_INT,ld,1,MPIU_INT,((PetscObject)da)->comm);
155: for ( i=0; i<N; i++) {
156: ld[i] = ld[M*i];
157: }
158: MPI_Allgather(&q,1,MPIU_INT,le,1,MPIU_INT,((PetscObject)da)->comm);
159: for ( i=0; i<P; i++) {
160: le[i] = le[M*N*i];
161: }
162: DACreate3d(((PetscObject)da)->comm,pt,DA_STENCIL_BOX,m,n,p,M,N,P,3,s,lc,ld,le,&da->da_coordinates);
163: PetscFree(lc);
164: PetscFree(ld);
165: PetscFree(le);
166: }
167: }
168: PetscObjectReference((PetscObject) da->da_coordinates);
169: *cda = da->da_coordinates;
170: return(0);
171: }
176: /*@
177: DAGetGhostedCoordinates - Gets the node coordinates associated with a DA.
179: Collective on DA the first time it is called
181: Input Parameter:
182: . da - the distributed array
184: Output Parameter:
185: . c - coordinate vector
187: Note:
188: Each process has only the coordinates for its local AND ghost nodes
190: For two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
191: and (x_0,y_0,z_0,x_1,y_1,z_1...)
193: The user is responsible for destroying this vector.
195: Level: intermediate
197: .keywords: distributed array, get, corners, nodes, local indices, coordinates
199: .seealso: DAGetGhostCorners(), DASetCoordinates(), DASetUniformCoordinates(), DAGetCoordinates(), DAGetCoordinateDA()
200: @*/
201: PetscErrorCode DAGetGhostedCoordinates(DA da,Vec *c)
202: {
208: if (!da->coordinates) SETERRQ(PETSC_ERR_ORDER,"You must call DASetCoordinates() before this call");
209: if (!da->ghosted_coordinates) {
210: DA dac;
211: DAGetCoordinateDA(da,&dac);
212: DACreateLocalVector(dac,&da->ghosted_coordinates);
213: if (dac == da) {PetscObjectDereference((PetscObject)dac);}
214: DAGlobalToLocalBegin(dac,da->coordinates,INSERT_VALUES,da->ghosted_coordinates);
215: DAGlobalToLocalEnd(dac,da->coordinates,INSERT_VALUES,da->ghosted_coordinates);
216: DADestroy(dac);
217: }
218: PetscObjectReference((PetscObject) da->ghosted_coordinates);
219: *c = da->ghosted_coordinates;
220: return(0);
221: }
225: /*@C
226: DASetFieldName - Sets the names of individual field components in multicomponent
227: vectors associated with a DA.
229: Not Collective
231: Input Parameters:
232: + da - the distributed array
233: . nf - field number for the DA (0, 1, ... dof-1), where dof indicates the
234: number of degrees of freedom per node within the DA
235: - names - the name of the field (component)
237: Level: intermediate
239: .keywords: distributed array, get, component name
241: .seealso: DAGetFieldName()
242: @*/
243: PetscErrorCode DASetFieldName(DA da,PetscInt nf,const char name[])
244: {
248:
250: if (nf < 0 || nf >= da->w) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Invalid field number: %D",nf);
251: if (da->fieldname[nf]) {PetscFree(da->fieldname[nf]);}
252:
253: PetscStrallocpy(name,&da->fieldname[nf]);
254: return(0);
255: }
259: /*@C
260: DAGetFieldName - Gets the names of individual field components in multicomponent
261: vectors associated with a DA.
263: Not Collective
265: Input Parameter:
266: + da - the distributed array
267: - nf - field number for the DA (0, 1, ... dof-1), where dof indicates the
268: number of degrees of freedom per node within the DA
270: Output Parameter:
271: . names - the name of the field (component)
273: Level: intermediate
275: .keywords: distributed array, get, component name
277: .seealso: DASetFieldName()
278: @*/
279: PetscErrorCode DAGetFieldName(DA da,PetscInt nf,char **name)
280: {
282:
285: if (nf < 0 || nf >= da->w) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Invalid field number: %D",nf);
286: *name = da->fieldname[nf];
287: return(0);
288: }
292: /*@
293: DAGetCorners - Returns the global (x,y,z) indices of the lower left
294: corner of the local region, excluding ghost points.
296: Not Collective
298: Input Parameter:
299: . da - the distributed array
301: Output Parameters:
302: + x,y,z - the corner indices (where y and z are optional; these are used
303: for 2D and 3D problems)
304: - m,n,p - widths in the corresponding directions (where n and p are optional;
305: these are used for 2D and 3D problems)
307: Note:
308: The corner information is independent of the number of degrees of
309: freedom per node set with the DACreateXX() routine. Thus the x, y, z, and
310: m, n, p can be thought of as coordinates on a logical grid, where each
311: grid point has (potentially) several degrees of freedom.
312: Any of y, z, n, and p can be passed in as PETSC_NULL if not needed.
314: Level: beginner
316: .keywords: distributed array, get, corners, nodes, local indices
318: .seealso: DAGetGhostCorners(), DAGetOwnershipRanges()
319: @*/
320: PetscErrorCode DAGetCorners(DA da,PetscInt *x,PetscInt *y,PetscInt *z,PetscInt *m,PetscInt *n,PetscInt *p)
321: {
322: PetscInt w;
326: /* since the xs, xe ... have all been multiplied by the number of degrees
327: of freedom per cell, w = da->w, we divide that out before returning.*/
328: w = da->w;
329: if (x) *x = da->xs/w; if(m) *m = (da->xe - da->xs)/w;
330: /* the y and z have NOT been multiplied by w */
331: if (y) *y = da->ys; if (n) *n = (da->ye - da->ys);
332: if (z) *z = da->zs; if (p) *p = (da->ze - da->zs);
333: return(0);
334: }