Actual source code: dsghep.c

slepc-3.17.1 2022-04-11
Report Typos and Errors
  1: /*
  2:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  3:    SLEPc - Scalable Library for Eigenvalue Problem Computations
  4:    Copyright (c) 2002-, Universitat Politecnica de Valencia, Spain

  6:    This file is part of SLEPc.
  7:    SLEPc is distributed under a 2-clause BSD license (see LICENSE).
  8:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  9: */

 11: #include <slepc/private/dsimpl.h>
 12: #include <slepcblaslapack.h>

 14: PetscErrorCode DSAllocate_GHEP(DS ds,PetscInt ld)
 15: {
 16:   DSAllocateMat_Private(ds,DS_MAT_A);
 17:   DSAllocateMat_Private(ds,DS_MAT_B);
 18:   DSAllocateMat_Private(ds,DS_MAT_Q);
 19:   PetscFree(ds->perm);
 20:   PetscMalloc1(ld,&ds->perm);
 21:   PetscLogObjectMemory((PetscObject)ds,ld*sizeof(PetscInt));
 22:   PetscFunctionReturn(0);
 23: }

 25: PetscErrorCode DSView_GHEP(DS ds,PetscViewer viewer)
 26: {
 27:   PetscViewerFormat format;

 29:   PetscViewerGetFormat(viewer,&format);
 30:   if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) PetscFunctionReturn(0);
 31:   DSViewMat(ds,viewer,DS_MAT_A);
 32:   DSViewMat(ds,viewer,DS_MAT_B);
 33:   if (ds->state>DS_STATE_INTERMEDIATE) DSViewMat(ds,viewer,DS_MAT_Q);
 34:   if (ds->mat[DS_MAT_X]) DSViewMat(ds,viewer,DS_MAT_X);
 35:   PetscFunctionReturn(0);
 36: }

 38: PetscErrorCode DSVectors_GHEP(DS ds,DSMatType mat,PetscInt *j,PetscReal *rnorm)
 39: {
 40:   PetscScalar    *Q = ds->mat[DS_MAT_Q];
 41:   PetscInt       ld = ds->ld;

 44:   switch (mat) {
 45:     case DS_MAT_X:
 46:     case DS_MAT_Y:
 47:       if (j) {
 48:         if (ds->state>=DS_STATE_CONDENSED) PetscArraycpy(ds->mat[mat]+(*j)*ld,Q+(*j)*ld,ld);
 49:         else {
 50:           PetscArrayzero(ds->mat[mat]+(*j)*ld,ld);
 51:           *(ds->mat[mat]+(*j)+(*j)*ld) = 1.0;
 52:         }
 53:       } else {
 54:         if (ds->state>=DS_STATE_CONDENSED) PetscArraycpy(ds->mat[mat],Q,ld*ld);
 55:         else DSSetIdentity(ds,mat);
 56:       }
 57:       break;
 58:     case DS_MAT_U:
 59:     case DS_MAT_V:
 60:       SETERRQ(PetscObjectComm((PetscObject)ds),PETSC_ERR_SUP,"Not implemented yet");
 61:     default:
 62:       SETERRQ(PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"Invalid mat parameter");
 63:   }
 64:   PetscFunctionReturn(0);
 65: }

 67: PetscErrorCode DSSort_GHEP(DS ds,PetscScalar *wr,PetscScalar *wi,PetscScalar *rr,PetscScalar *ri,PetscInt *k)
 68: {
 69:   PetscInt       n,l,i,*perm,ld=ds->ld;
 70:   PetscScalar    *A;

 72:   if (!ds->sc) PetscFunctionReturn(0);
 73:   n = ds->n;
 74:   l = ds->l;
 75:   A  = ds->mat[DS_MAT_A];
 76:   perm = ds->perm;
 77:   for (i=l;i<n;i++) wr[i] = A[i+i*ld];
 78:   if (rr) DSSortEigenvalues_Private(ds,rr,ri,perm,PETSC_FALSE);
 79:   else DSSortEigenvalues_Private(ds,wr,NULL,perm,PETSC_FALSE);
 80:   for (i=l;i<n;i++) A[i+i*ld] = wr[perm[i]];
 81:   for (i=l;i<n;i++) wr[i] = A[i+i*ld];
 82:   DSPermuteColumns_Private(ds,l,n,n,DS_MAT_Q,perm);
 83:   PetscFunctionReturn(0);
 84: }

 86: PetscErrorCode DSSolve_GHEP(DS ds,PetscScalar *wr,PetscScalar *wi)
 87: {
 88:   PetscScalar    *work,*A,*B,*Q;
 89:   PetscBLASInt   itype = 1,*iwork,info,n1,liwork,ld,lrwork=0,lwork;
 90:   PetscInt       off,i;
 91: #if defined(PETSC_USE_COMPLEX)
 92:   PetscReal      *rwork,*rr;
 93: #endif

 95:   PetscBLASIntCast(ds->n-ds->l,&n1);
 96:   PetscBLASIntCast(ds->ld,&ld);
 97:   PetscBLASIntCast(5*ds->n+3,&liwork);
 98: #if defined(PETSC_USE_COMPLEX)
 99:   PetscBLASIntCast(ds->n*ds->n+2*ds->n,&lwork);
100:   PetscBLASIntCast(2*ds->n*ds->n+5*ds->n+1+n1,&lrwork);
101: #else
102:   PetscBLASIntCast(2*ds->n*ds->n+6*ds->n+1,&lwork);
103: #endif
104:   DSAllocateWork_Private(ds,lwork,lrwork,liwork);
105:   work = ds->work;
106:   iwork = ds->iwork;
107:   off = ds->l+ds->l*ld;
108:   A = ds->mat[DS_MAT_A];
109:   B = ds->mat[DS_MAT_B];
110:   Q = ds->mat[DS_MAT_Q];
111: #if defined(PETSC_USE_COMPLEX)
112:   rr = ds->rwork;
113:   rwork = ds->rwork + n1;
114:   lrwork = ds->lrwork - n1;
115:   PetscStackCallBLAS("LAPACKsygvd",LAPACKsygvd_(&itype,"V","U",&n1,A+off,&ld,B+off,&ld,rr,work,&lwork,rwork,&lrwork,iwork,&liwork,&info));
116:   for (i=0;i<n1;i++) wr[ds->l+i] = rr[i];
117: #else
118:   PetscStackCallBLAS("LAPACKsygvd",LAPACKsygvd_(&itype,"V","U",&n1,A+off,&ld,B+off,&ld,wr+ds->l,work,&lwork,iwork,&liwork,&info));
119: #endif
120:   SlepcCheckLapackInfo("sygvd",info);
121:   PetscArrayzero(Q+ds->l*ld,n1*ld);
122:   for (i=ds->l;i<ds->n;i++) PetscArraycpy(Q+ds->l+i*ld,A+ds->l+i*ld,n1);
123:   PetscArrayzero(B+ds->l*ld,n1*ld);
124:   PetscArrayzero(A+ds->l*ld,n1*ld);
125:   for (i=ds->l;i<ds->n;i++) {
126:     if (wi) wi[i] = 0.0;
127:     B[i+i*ld] = 1.0;
128:     A[i+i*ld] = wr[i];
129:   }
130:   PetscFunctionReturn(0);
131: }

133: PetscErrorCode DSSynchronize_GHEP(DS ds,PetscScalar eigr[],PetscScalar eigi[])
134: {
135:   PetscInt       ld=ds->ld,l=ds->l,k;
136:   PetscMPIInt    n,rank,off=0,size,ldn;

138:   k = 2*(ds->n-l)*ld;
139:   if (ds->state>DS_STATE_RAW) k += (ds->n-l)*ld;
140:   if (eigr) k += (ds->n-l);
141:   DSAllocateWork_Private(ds,k,0,0);
142:   PetscMPIIntCast(k*sizeof(PetscScalar),&size);
143:   PetscMPIIntCast(ds->n-l,&n);
144:   PetscMPIIntCast(ld*(ds->n-l),&ldn);
145:   MPI_Comm_rank(PetscObjectComm((PetscObject)ds),&rank);
146:   if (!rank) {
147:     MPI_Pack(ds->mat[DS_MAT_A]+l*ld,ldn,MPIU_SCALAR,ds->work,size,&off,PetscObjectComm((PetscObject)ds));
148:     MPI_Pack(ds->mat[DS_MAT_B]+l*ld,ldn,MPIU_SCALAR,ds->work,size,&off,PetscObjectComm((PetscObject)ds));
149:     if (ds->state>DS_STATE_RAW) MPI_Pack(ds->mat[DS_MAT_Q]+l*ld,ldn,MPIU_SCALAR,ds->work,size,&off,PetscObjectComm((PetscObject)ds));
150:     if (eigr) MPI_Pack(eigr+l,n,MPIU_SCALAR,ds->work,size,&off,PetscObjectComm((PetscObject)ds));
151:   }
152:   MPI_Bcast(ds->work,size,MPI_BYTE,0,PetscObjectComm((PetscObject)ds));
153:   if (rank) {
154:     MPI_Unpack(ds->work,size,&off,ds->mat[DS_MAT_A]+l*ld,ldn,MPIU_SCALAR,PetscObjectComm((PetscObject)ds));
155:     MPI_Unpack(ds->work,size,&off,ds->mat[DS_MAT_B]+l*ld,ldn,MPIU_SCALAR,PetscObjectComm((PetscObject)ds));
156:     if (ds->state>DS_STATE_RAW) MPI_Unpack(ds->work,size,&off,ds->mat[DS_MAT_Q]+l*ld,ldn,MPIU_SCALAR,PetscObjectComm((PetscObject)ds));
157:     if (eigr) MPI_Unpack(ds->work,size,&off,eigr+l,n,MPIU_SCALAR,PetscObjectComm((PetscObject)ds));
158:   }
159:   PetscFunctionReturn(0);
160: }

162: PetscErrorCode DSHermitian_GHEP(DS ds,DSMatType m,PetscBool *flg)
163: {
164:   if (m==DS_MAT_A || m==DS_MAT_B) *flg = PETSC_TRUE;
165:   else *flg = PETSC_FALSE;
166:   PetscFunctionReturn(0);
167: }

169: /*MC
170:    DSGHEP - Dense Generalized Hermitian Eigenvalue Problem.

172:    Level: beginner

174:    Notes:
175:    The problem is expressed as A*X = B*X*Lambda, where both A and B are
176:    real symmetric (or complex Hermitian) and B is positive-definite. Lambda
177:    is a diagonal matrix whose diagonal elements are the arguments of DSSolve().
178:    After solve, A is overwritten with Lambda, and B is overwritten with I.

180:    No intermediate state is implemented, nor compact storage.

182:    Used DS matrices:
183: +  DS_MAT_A - first problem matrix
184: .  DS_MAT_B - second problem matrix
185: -  DS_MAT_Q - matrix of B-orthogonal eigenvectors, which is equal to X

187:    Implemented methods:
188: .  0 - Divide and Conquer (_sygvd)

190: .seealso: DSCreate(), DSSetType(), DSType
191: M*/
192: SLEPC_EXTERN PetscErrorCode DSCreate_GHEP(DS ds)
193: {
194:   ds->ops->allocate      = DSAllocate_GHEP;
195:   ds->ops->view          = DSView_GHEP;
196:   ds->ops->vectors       = DSVectors_GHEP;
197:   ds->ops->solve[0]      = DSSolve_GHEP;
198:   ds->ops->sort          = DSSort_GHEP;
199:   ds->ops->synchronize   = DSSynchronize_GHEP;
200:   ds->ops->hermitian     = DSHermitian_GHEP;
201:   PetscFunctionReturn(0);
202: }