Actual source code: slepcutil.c

  1: /*
  2:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  3:    SLEPc - Scalable Library for Eigenvalue Problem Computations
  4:    Copyright (c) 2002-2011, Universitat Politecnica de Valencia, Spain

  6:    This file is part of SLEPc.
  7:       
  8:    SLEPc is free software: you can redistribute it and/or modify it under  the
  9:    terms of version 3 of the GNU Lesser General Public License as published by
 10:    the Free Software Foundation.

 12:    SLEPc  is  distributed in the hope that it will be useful, but WITHOUT  ANY 
 13:    WARRANTY;  without even the implied warranty of MERCHANTABILITY or  FITNESS 
 14:    FOR  A  PARTICULAR PURPOSE. See the GNU Lesser General Public  License  for 
 15:    more details.

 17:    You  should have received a copy of the GNU Lesser General  Public  License
 18:    along with SLEPc. If not, see <http://www.gnu.org/licenses/>.
 19:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 20: */

 22: #include <private/slepcimpl.h>            /*I "slepcsys.h" I*/

 26: /*@C
 27:    SlepcMatConvertSeqDense - Converts a parallel matrix to another one in sequential 
 28:    dense format replicating the values in every processor.

 30:    Collective on Mat

 32:    Input parameters:
 33: +  A  - the source matrix
 34: -  B  - the target matrix

 36:    Level: developer
 37:    
 38: @*/
 39: PetscErrorCode SlepcMatConvertSeqDense(Mat mat,Mat *newmat)
 40: {
 42:   PetscInt       m,n;
 43:   PetscMPIInt    size;
 44:   MPI_Comm       comm;
 45:   Mat            *M;
 46:   IS             isrow,iscol;
 47:   PetscBool      flg;

 52:   PetscObjectGetComm((PetscObject)mat,&comm);
 53:   MPI_Comm_size(comm,&size);

 55:   if (size > 1) {
 56:     /* assemble full matrix on every processor */
 57:     MatGetSize(mat,&m,&n);
 58:     ISCreateStride(PETSC_COMM_SELF,m,0,1,&isrow);
 59:     ISCreateStride(PETSC_COMM_SELF,n,0,1,&iscol);
 60:     MatGetSubMatrices(mat,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&M);
 61:     ISDestroy(&isrow);
 62:     ISDestroy(&iscol);

 64:     /* Fake support for "inplace" convert */
 65:     if (*newmat == mat) {
 66:       MatDestroy(&mat);
 67:     }
 68:     *newmat = *M;
 69:     PetscFree(M);
 70: 
 71:     /* convert matrix to MatSeqDense */
 72:     PetscTypeCompare((PetscObject)*newmat,MATSEQDENSE,&flg);
 73:     if (!flg) {
 74:       MatConvert(*newmat,MATSEQDENSE,MAT_INITIAL_MATRIX,newmat);
 75:     }
 76:   } else {
 77:     /* convert matrix to MatSeqDense */
 78:     MatConvert(mat,MATSEQDENSE,MAT_INITIAL_MATRIX,newmat);
 79:   }
 80:   return(0);
 81: }

 85: static PetscErrorCode SlepcMatTile_SeqAIJ(PetscScalar a,Mat A,PetscScalar b,Mat B,PetscScalar c,Mat C,PetscScalar d,Mat D,Mat G)
 86: {
 87:   PetscErrorCode    ierr;
 88:   PetscInt          i,j,M1,M2,N1,N2,*nnz,ncols,*scols;
 89:   PetscScalar       *svals,*buf;
 90:   const PetscInt    *cols;
 91:   const PetscScalar *vals;

 94:   MatGetSize(A,&M1,&N1);
 95:   MatGetSize(D,&M2,&N2);

 97:   PetscMalloc((M1+M2)*sizeof(PetscInt),&nnz);
 98:   PetscMemzero(nnz,(M1+M2)*sizeof(PetscInt));
 99:   /* Preallocate for A */
100:   if (a!=0.0) {
101:     for (i=0;i<M1;i++) {
102:       MatGetRow(A,i,&ncols,PETSC_NULL,PETSC_NULL);
103:       nnz[i] += ncols;
104:       MatRestoreRow(A,i,&ncols,PETSC_NULL,PETSC_NULL);
105:     }
106:   }
107:   /* Preallocate for B */
108:   if (b!=0.0) {
109:     for (i=0;i<M1;i++) {
110:       MatGetRow(B,i,&ncols,PETSC_NULL,PETSC_NULL);
111:       nnz[i] += ncols;
112:       MatRestoreRow(B,i,&ncols,PETSC_NULL,PETSC_NULL);
113:     }
114:   }
115:   /* Preallocate for C */
116:   if (c!=0.0) {
117:     for (i=0;i<M2;i++) {
118:       MatGetRow(C,i,&ncols,PETSC_NULL,PETSC_NULL);
119:       nnz[i+M1] += ncols;
120:       MatRestoreRow(C,i,&ncols,PETSC_NULL,PETSC_NULL);
121:     }
122:   }
123:   /* Preallocate for D */
124:   if (d!=0.0) {
125:     for (i=0;i<M2;i++) {
126:       MatGetRow(D,i,&ncols,PETSC_NULL,PETSC_NULL);
127:       nnz[i+M1] += ncols;
128:       MatRestoreRow(D,i,&ncols,PETSC_NULL,PETSC_NULL);
129:     }
130:   }
131:   MatSeqAIJSetPreallocation(G,0,nnz);
132:   PetscFree(nnz);
133: 
134:   PetscMalloc(sizeof(PetscScalar)*PetscMax(N1,N2),&buf);
135:   PetscMalloc(sizeof(PetscInt)*PetscMax(N1,N2),&scols);
136:   /* Transfer A */
137:   if (a!=0.0) {
138:     for (i=0;i<M1;i++) {
139:       MatGetRow(A,i,&ncols,&cols,&vals);
140:       if (a!=1.0) { svals=buf; for (j=0;j<ncols;j++) svals[j] = vals[j]*a; }
141:       else svals=(PetscScalar*)vals;
142:       MatSetValues(G,1,&i,ncols,cols,svals,INSERT_VALUES);
143:       MatRestoreRow(A,i,&ncols,&cols,&vals);
144:     }
145:   }
146:   /* Transfer B */
147:   if (b!=0.0) {
148:     for (i=0;i<M1;i++) {
149:       MatGetRow(B,i,&ncols,&cols,&vals);
150:       if (b!=1.0) { svals=buf; for (j=0;j<ncols;j++) svals[j] = vals[j]*b; }
151:       else svals=(PetscScalar*)vals;
152:       for (j=0;j<ncols;j++) scols[j] = cols[j]+N1;
153:       MatSetValues(G,1,&i,ncols,scols,svals,INSERT_VALUES);
154:       MatRestoreRow(B,i,&ncols,&cols,&vals);
155:     }
156:   }
157:   /* Transfer C */
158:   if (c!=0.0) {
159:     for (i=0;i<M2;i++) {
160:       MatGetRow(C,i,&ncols,&cols,&vals);
161:       if (c!=1.0) { svals=buf; for (j=0;j<ncols;j++) svals[j] = vals[j]*c; }
162:       else svals=(PetscScalar*)vals;
163:       j = i+M1;
164:       MatSetValues(G,1,&j,ncols,cols,svals,INSERT_VALUES);
165:       MatRestoreRow(C,i,&ncols,&cols,&vals);
166:     }
167:   }
168:   /* Transfer D */
169:   if (d!=0.0) {
170:     for (i=0;i<M2;i++) {
171:       MatGetRow(D,i,&ncols,&cols,&vals);
172:       if (d!=1.0) { svals=buf; for (j=0;j<ncols;j++) svals[j] = vals[j]*d; }
173:       else svals=(PetscScalar*)vals;
174:       for (j=0;j<ncols;j++) scols[j] = cols[j]+N1;
175:       j = i+M1;
176:       MatSetValues(G,1,&j,ncols,scols,svals,INSERT_VALUES);
177:       MatRestoreRow(D,i,&ncols,&cols,&vals);
178:     }
179:   }
180:   PetscFree(buf);
181:   PetscFree(scols);
182:   return(0);
183: }

187: static PetscErrorCode SlepcMatTile_MPIAIJ(PetscScalar a,Mat A,PetscScalar b,Mat B,PetscScalar c,Mat C,PetscScalar d,Mat D,Mat G)
188: {
190:   PetscMPIInt       np;
191:   PetscInt          p,i,j,N1,N2,m1,m2,n1,n2,*map1,*map2;
192:   PetscInt          *dnz,*onz,ncols,*scols,start,gstart;
193:   PetscScalar       *svals,*buf;
194:   const PetscInt    *cols,*mapptr1,*mapptr2;
195:   const PetscScalar *vals;

198:   MatGetSize(A,PETSC_NULL,&N1);
199:   MatGetLocalSize(A,&m1,&n1);
200:   MatGetSize(D,PETSC_NULL,&N2);
201:   MatGetLocalSize(D,&m2,&n2);

203:   /* Create mappings */
204:   MPI_Comm_size(((PetscObject)G)->comm,&np);
205:   MatGetOwnershipRangesColumn(A,&mapptr1);
206:   MatGetOwnershipRangesColumn(B,&mapptr2);
207:   PetscMalloc(sizeof(PetscInt)*N1,&map1);
208:   for (p=0;p<np;p++) {
209:     for (i=mapptr1[p];i<mapptr1[p+1];i++) map1[i] = i+mapptr2[p];
210:   }
211:   PetscMalloc(sizeof(PetscInt)*N2,&map2);
212:   for (p=0;p<np;p++) {
213:     for (i=mapptr2[p];i<mapptr2[p+1];i++) map2[i] = i+mapptr1[p+1];
214:   }

216:   PetscMalloc(sizeof(PetscScalar)*PetscMax(N1,N2),&buf);
217:   PetscMalloc(sizeof(PetscInt)*PetscMax(N1,N2),&scols);

219:   MatPreallocateInitialize(((PetscObject)G)->comm,m1+m2,n1+n2,dnz,onz);
220:   MatGetOwnershipRange(G,&gstart,PETSC_NULL);
221:   /* Preallocate for A */
222:   if (a!=0.0) {
223:     MatGetOwnershipRange(A,&start,PETSC_NULL);
224:     for (i=0;i<m1;i++) {
225:       MatGetRow(A,i+start,&ncols,&cols,PETSC_NULL);
226:       for (j=0;j<ncols;j++) scols[j] = map1[cols[j]];
227:       MatPreallocateSet(gstart+i,ncols,scols,dnz,onz);
228:       MatRestoreRow(A,i+start,&ncols,&cols,PETSC_NULL);
229:     }
230:   }
231:   /* Preallocate for B */
232:   if (b!=0.0) {
233:     MatGetOwnershipRange(B,&start,PETSC_NULL);
234:     for (i=0;i<m1;i++) {
235:       MatGetRow(B,i+start,&ncols,&cols,PETSC_NULL);
236:       for (j=0;j<ncols;j++) scols[j] = map2[cols[j]];
237:       MatPreallocateSet(gstart+i,ncols,scols,dnz,onz);
238:       MatRestoreRow(B,i+start,&ncols,&cols,PETSC_NULL);
239:     }
240:   }
241:   /* Preallocate for C */
242:   if (c!=0.0) {
243:     MatGetOwnershipRange(C,&start,PETSC_NULL);
244:     for (i=0;i<m2;i++) {
245:       MatGetRow(C,i+start,&ncols,&cols,PETSC_NULL);
246:       for (j=0;j<ncols;j++) scols[j] = map1[cols[j]];
247:       MatPreallocateSet(gstart+m1+i,ncols,scols,dnz,onz);
248:       MatRestoreRow(C,i+start,&ncols,&cols,PETSC_NULL);
249:     }
250:   }
251:   /* Preallocate for D */
252:   if (d!=0.0) {
253:     MatGetOwnershipRange(D,&start,PETSC_NULL);
254:     for (i=0;i<m2;i++) {
255:       MatGetRow(D,i+start,&ncols,&cols,PETSC_NULL);
256:       for (j=0;j<ncols;j++) scols[j] = map2[cols[j]];
257:       MatPreallocateSet(gstart+m1+i,ncols,scols,dnz,onz);
258:       MatRestoreRow(D,i+start,&ncols,&cols,PETSC_NULL);
259:     }
260:   }
261:   MatMPIAIJSetPreallocation(G,0,dnz,0,onz);
262:   MatPreallocateFinalize(dnz,onz);
263: 
264:   /* Transfer A */
265:   if (a!=0.0) {
266:     MatGetOwnershipRange(A,&start,PETSC_NULL);
267:     for (i=0;i<m1;i++) {
268:       MatGetRow(A,i+start,&ncols,&cols,&vals);
269:       if (a!=1.0) { svals=buf; for (j=0;j<ncols;j++) svals[j] = vals[j]*a; }
270:       else svals=(PetscScalar*)vals;
271:       for (j=0;j<ncols;j++) scols[j] = map1[cols[j]];
272:       j = gstart+i;
273:       MatSetValues(G,1,&j,ncols,scols,svals,INSERT_VALUES);
274:       MatRestoreRow(A,i+start,&ncols,&cols,&vals);
275:     }
276:   }
277:   /* Transfer B */
278:   if (b!=0.0) {
279:     MatGetOwnershipRange(B,&start,PETSC_NULL);
280:     for (i=0;i<m1;i++) {
281:       MatGetRow(B,i+start,&ncols,&cols,&vals);
282:       if (b!=1.0) { svals=buf; for (j=0;j<ncols;j++) svals[j] = vals[j]*b; }
283:       else svals=(PetscScalar*)vals;
284:       for (j=0;j<ncols;j++) scols[j] = map2[cols[j]];
285:       j = gstart+i;
286:       MatSetValues(G,1,&j,ncols,scols,svals,INSERT_VALUES);
287:       MatRestoreRow(B,i+start,&ncols,&cols,&vals);
288:     }
289:   }
290:   /* Transfer C */
291:   if (c!=0.0) {
292:     MatGetOwnershipRange(C,&start,PETSC_NULL);
293:     for (i=0;i<m2;i++) {
294:       MatGetRow(C,i+start,&ncols,&cols,&vals);
295:       if (c!=1.0) { svals=buf; for (j=0;j<ncols;j++) svals[j] = vals[j]*c; }
296:       else svals=(PetscScalar*)vals;
297:       for (j=0;j<ncols;j++) scols[j] = map1[cols[j]];
298:       j = gstart+m1+i;
299:       MatSetValues(G,1,&j,ncols,scols,svals,INSERT_VALUES);
300:       MatRestoreRow(C,i+start,&ncols,&cols,&vals);
301:     }
302:   }
303:   /* Transfer D */
304:   if (d!=0.0) {
305:     MatGetOwnershipRange(D,&start,PETSC_NULL);
306:     for (i=0;i<m2;i++) {
307:       MatGetRow(D,i+start,&ncols,&cols,&vals);
308:       if (d!=1.0) { svals=buf; for (j=0;j<ncols;j++) svals[j] = vals[j]*d; }
309:       else svals=(PetscScalar*)vals;
310:       for (j=0;j<ncols;j++) scols[j] = map2[cols[j]];
311:       j = gstart+m1+i;
312:       MatSetValues(G,1,&j,ncols,scols,svals,INSERT_VALUES);
313:       MatRestoreRow(D,i+start,&ncols,&cols,&vals);
314:     }
315:   }
316:   PetscFree(buf);
317:   PetscFree(scols);
318:   PetscFree(map1);
319:   PetscFree(map2);
320:   return(0);
321: }

325: /*@
326:    SlepcMatTile - Explicitly build a matrix from four blocks, G = [ a*A b*B; c*C d*D ].

328:    Collective on Mat

330:    Input parameters:
331: +  A - matrix for top-left block
332: .  a - scaling factor for block A
333: .  B - matrix for top-right block
334: .  b - scaling factor for block B
335: .  C - matrix for bottom-left block
336: .  c - scaling factor for block C
337: .  D - matrix for bottom-right block
338: -  d - scaling factor for block D

340:    Output parameter:
341: .  G  - the resulting matrix

343:    Notes:
344:    In the case of a parallel matrix, a permuted version of G is returned. The permutation
345:    is a perfect shuffle such that the local parts of A, B, C, D remain in the local part of
346:    G for the same process.

348:    Matrix G must be destroyed by the user.

350:    Level: developer
351: @*/
352: PetscErrorCode SlepcMatTile(PetscScalar a,Mat A,PetscScalar b,Mat B,PetscScalar c,Mat C,PetscScalar d,Mat D,Mat *G)
353: {
355:   PetscInt       M1,M2,N1,N2,M,N,m1,m2,n1,n2,m,n;
356:   PetscBool      flg1,flg2;


368:   /* check row 1 */
369:   MatGetSize(A,&M1,PETSC_NULL);
370:   MatGetLocalSize(A,&m1,PETSC_NULL);
371:   MatGetSize(B,&M,PETSC_NULL);
372:   MatGetLocalSize(B,&m,PETSC_NULL);
373:   if (M!=M1 || m!=m1) SETERRQ(((PetscObject)A)->comm,PETSC_ERR_ARG_INCOMP,"Incompatible dimensions");
374:   /* check row 2 */
375:   MatGetSize(C,&M2,PETSC_NULL);
376:   MatGetLocalSize(C,&m2,PETSC_NULL);
377:   MatGetSize(D,&M,PETSC_NULL);
378:   MatGetLocalSize(D,&m,PETSC_NULL);
379:   if (M!=M2 || m!=m2) SETERRQ(((PetscObject)A)->comm,PETSC_ERR_ARG_INCOMP,"Incompatible dimensions");
380:   /* check column 1 */
381:   MatGetSize(A,PETSC_NULL,&N1);
382:   MatGetLocalSize(A,PETSC_NULL,&n1);
383:   MatGetSize(C,PETSC_NULL,&N);
384:   MatGetLocalSize(C,PETSC_NULL,&n);
385:   if (N!=N1 || n!=n1) SETERRQ(((PetscObject)A)->comm,PETSC_ERR_ARG_INCOMP,"Incompatible dimensions");
386:   /* check column 2 */
387:   MatGetSize(B,PETSC_NULL,&N2);
388:   MatGetLocalSize(B,PETSC_NULL,&n2);
389:   MatGetSize(D,PETSC_NULL,&N);
390:   MatGetLocalSize(D,PETSC_NULL,&n);
391:   if (N!=N2 || n!=n2) SETERRQ(((PetscObject)A)->comm,PETSC_ERR_ARG_INCOMP,"Incompatible dimensions");

393:   MatCreate(((PetscObject)A)->comm,G);
394:   MatSetSizes(*G,m1+m2,n1+n2,M1+M2,N1+N2);
395:   MatSetFromOptions(*G);

397:   PetscTypeCompare((PetscObject)*G,MATMPIAIJ,&flg1);
398:   PetscTypeCompare((PetscObject)A,MATMPIAIJ,&flg2);
399:   if (flg1 && flg2) {
400:     SlepcMatTile_MPIAIJ(a,A,b,B,c,C,d,D,*G);
401:   }
402:   else {
403:     PetscTypeCompare((PetscObject)*G,MATSEQAIJ,&flg1);
404:     PetscTypeCompare((PetscObject)A,MATSEQAIJ,&flg2);
405:     if (flg1 && flg2) {
406:       SlepcMatTile_SeqAIJ(a,A,b,B,c,C,d,D,*G);
407:     }
408:     else SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"Not implemented for this matrix type");
409:   }
410:   MatAssemblyBegin(*G,MAT_FINAL_ASSEMBLY);
411:   MatAssemblyEnd(*G,MAT_FINAL_ASSEMBLY);
412:   return(0);
413: }

417: /*@
418:    SlepcCheckOrthogonality - Checks (or prints) the level of orthogonality
419:    of a set of vectors.

421:    Collective on Vec

423:    Input parameters:
424: +  V  - a set of vectors
425: .  nv - number of V vectors
426: .  W  - an alternative set of vectors (optional)
427: .  nw - number of W vectors
428: -  B  - matrix defining the inner product (optional)

430:    Output parameter:
431: .  lev - level of orthogonality (optional)

433:    Notes: 
434:    This function computes W'*V and prints the result. It is intended to check
435:    the level of bi-orthogonality of the vectors in the two sets. If W is equal
436:    to PETSC_NULL then V is used, thus checking the orthogonality of the V vectors.

438:    If matrix B is provided then the check uses the B-inner product, W'*B*V.

440:    If lev is not PETSC_NULL, it will contain the maximum entry of matrix 
441:    W'*V - I (in absolute value). Otherwise, the matrix W'*V is printed.

443:    Level: developer
444: @*/
445: PetscErrorCode SlepcCheckOrthogonality(Vec *V,PetscInt nv,Vec *W,PetscInt nw,Mat B,PetscReal *lev)
446: {
448:   PetscInt       i,j;
449:   PetscScalar    *vals;
450:   Vec            w;
451:   MPI_Comm       comm;

454:   if (nv<=0 || nw<=0) return(0);
455:   PetscObjectGetComm((PetscObject)V[0],&comm);
456:   PetscMalloc(nv*sizeof(PetscScalar),&vals);
457:   if (B) { VecDuplicate(V[0],&w); }
458:   if (lev) *lev = 0.0;
459:   for (i=0;i<nw;i++) {
460:     if (B) {
461:       if (W) { MatMultTranspose(B,W[i],w); }
462:       else { MatMultTranspose(B,V[i],w); }
463:     }
464:     else {
465:       if (W) w = W[i];
466:       else w = V[i];
467:     }
468:     VecMDot(w,nv,V,vals);
469:     for (j=0;j<nv;j++) {
470:       if (lev) *lev = PetscMax(*lev, PetscAbsScalar((j==i)? (vals[j]-1.0): vals[j]));
471:       else {
472: #if !defined(PETSC_USE_COMPLEX)
473:         PetscPrintf(comm," %12G  ",vals[j]);
474: #else
475:         PetscPrintf(comm," %12G%+12Gi ",PetscRealPart(vals[j]),PetscImaginaryPart(vals[j]));
476: #endif
477:       }
478:     }
479:     if (!lev) { PetscPrintf(comm,"\n"); }
480:   }
481:   PetscFree(vals);
482:   if (B) { VecDestroy(&w); }
483:   return(0);
484: }

488: /*
489:   Clean up context used in monitors of type XXXMonitorConverged.
490:   This function is shared by EPS, SVD, QEP
491: */
492: PetscErrorCode SlepcConvMonitorDestroy(SlepcConvMonitor *ctx)
493: {

497:   if (!*ctx) return(0);
498:   PetscViewerDestroy(&(*ctx)->viewer);
499:   PetscFree(*ctx);
500:   return(0);
501: }