Actual source code: test43.c
1: /*
2: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
3: SLEPc - Scalable Library for Eigenvalue Problem Computations
4: Copyright (c) 2002-, Universitat Politecnica de Valencia, Spain
6: This file is part of SLEPc.
7: SLEPc is distributed under a 2-clause BSD license (see LICENSE).
8: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
9: */
11: static char help[] = "Solves a linear system using PCHPDDM.\n\n"
12: "Modification of ${PETSC_DIR}/src/ksp/ksp/tutorials/ex76.c where concurrent EPS are instantiated explicitly by the user.\n\n";
14: #include <slepceps.h>
16: int main(int argc,char **args)
17: {
18: Mat A,aux,a,P,B,X;
19: Vec b;
20: KSP ksp;
21: PC pc;
22: EPS eps;
23: ST st;
24: IS is,sizes;
25: const PetscInt *idx;
26: PetscInt m,rstart,rend,location,nev,nconv;
27: PetscMPIInt rank,size;
28: PetscLayout map;
29: PetscViewer viewer;
30: char dir[PETSC_MAX_PATH_LEN],name[PETSC_MAX_PATH_LEN];
31: PetscBool flg;
33: PetscFunctionBeginUser;
34: PetscCall(SlepcInitialize(&argc,&args,NULL,help));
35: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size));
36: PetscCheck(size == 4,PETSC_COMM_WORLD,PETSC_ERR_USER,"This example requires 4 processes");
37: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
38: PetscCall(MatCreate(PETSC_COMM_WORLD,&A));
39: PetscCall(PetscStrcpy(dir,"."));
40: PetscCall(PetscOptionsGetString(NULL,NULL,"-load_dir",dir,sizeof(dir),NULL));
41: /* loading matrices */
42: PetscCall(PetscSNPrintf(name,sizeof(name),"%s/sizes_%d.dat",dir,size));
43: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD,name,FILE_MODE_READ,&viewer));
44: PetscCall(ISCreate(PETSC_COMM_WORLD,&sizes));
45: PetscCall(ISLoad(sizes,viewer));
46: PetscCall(ISGetIndices(sizes,&idx));
47: PetscCall(MatSetSizes(A,idx[0],idx[1],idx[2],idx[3]));
48: PetscCall(MatCreate(PETSC_COMM_WORLD,&X));
49: PetscCall(MatSetSizes(X,idx[4],idx[4],PETSC_DETERMINE,PETSC_DETERMINE));
50: PetscCall(MatSetUp(X));
51: PetscCall(ISRestoreIndices(sizes,&idx));
52: PetscCall(ISDestroy(&sizes));
53: PetscCall(PetscViewerDestroy(&viewer));
54: PetscCall(PetscSNPrintf(name,sizeof(name),"%s/A.dat",dir));
55: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD,name,FILE_MODE_READ,&viewer));
56: PetscCall(MatLoad(A,viewer));
57: PetscCall(PetscViewerDestroy(&viewer));
58: PetscCall(PetscSNPrintf(name,sizeof(name),"%s/is_%d.dat",dir,size));
59: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD,name,FILE_MODE_READ,&viewer));
60: PetscCall(ISCreate(PETSC_COMM_WORLD,&sizes));
61: PetscCall(MatGetLayouts(X,&map,NULL));
62: PetscCall(ISSetLayout(sizes,map));
63: PetscCall(ISLoad(sizes,viewer));
64: PetscCall(ISGetLocalSize(sizes,&m));
65: PetscCall(ISGetIndices(sizes,&idx));
66: PetscCall(ISCreateGeneral(PETSC_COMM_SELF,m,idx,PETSC_COPY_VALUES,&is));
67: PetscCall(ISRestoreIndices(sizes,&idx));
68: PetscCall(ISDestroy(&sizes));
69: PetscCall(MatGetBlockSize(A,&m));
70: PetscCall(ISSetBlockSize(is,m));
71: PetscCall(PetscViewerDestroy(&viewer));
72: PetscCall(PetscSNPrintf(name,sizeof(name),"%s/Neumann_%d.dat",dir,size));
73: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD,name,FILE_MODE_READ,&viewer));
74: PetscCall(MatLoad(X,viewer));
75: PetscCall(PetscViewerDestroy(&viewer));
76: PetscCall(MatGetDiagonalBlock(X,&B));
77: PetscCall(MatDuplicate(B,MAT_COPY_VALUES,&aux));
78: PetscCall(MatDestroy(&X));
79: PetscCall(MatSetBlockSizesFromMats(aux,A,A));
80: PetscCall(MatSetOption(A,MAT_SYMMETRIC,PETSC_TRUE));
81: PetscCall(MatSetOption(aux,MAT_SYMMETRIC,PETSC_TRUE));
82: /* ready for testing */
83: PetscCall(KSPCreate(PETSC_COMM_WORLD,&ksp));
84: PetscCall(KSPSetOperators(ksp,A,A));
85: PetscCall(KSPGetPC(ksp,&pc));
86: PetscCall(PCSetType(pc,PCHPDDM));
87: PetscCall(MatDuplicate(aux,MAT_DO_NOT_COPY_VALUES,&B)); /* duplicate so that MatStructure is SAME_NONZERO_PATTERN */
88: PetscCall(MatGetDiagonalBlock(A,&a));
89: PetscCall(MatGetOwnershipRange(A,&rstart,&rend));
90: PetscCall(ISGetLocalSize(is,&m));
91: PetscCall(MatCreateSeqAIJ(PETSC_COMM_SELF,rend-rstart,m,1,NULL,&P));
92: for (m = rstart; m < rend; ++m) {
93: PetscCall(ISLocate(is,m,&location));
94: PetscCheck(location >= 0,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"IS of the auxiliary Mat does not include all local rows of A");
95: PetscCall(MatSetValue(P,m-rstart,location,1.0,INSERT_VALUES));
96: }
97: PetscCall(MatAssemblyBegin(P,MAT_FINAL_ASSEMBLY));
98: PetscCall(MatAssemblyEnd(P,MAT_FINAL_ASSEMBLY));
99: PetscCall(MatPtAP(a,P,MAT_INITIAL_MATRIX,1.0,&X));
100: PetscCall(MatDestroy(&P));
101: PetscCall(MatAXPY(B,1.0,X,SUBSET_NONZERO_PATTERN));
102: PetscCall(MatDestroy(&X));
103: PetscCall(MatSetOption(B,MAT_SYMMETRIC,PETSC_TRUE));
104: PetscCall(EPSCreate(PETSC_COMM_SELF,&eps));
105: PetscCall(EPSSetOperators(eps,aux,B));
106: PetscCall(EPSSetProblemType(eps,EPS_GHEP));
107: PetscCall(EPSSetTarget(eps,0.0));
108: PetscCall(EPSSetWhichEigenpairs(eps,EPS_TARGET_MAGNITUDE));
109: PetscCall(EPSGetST(eps,&st));
110: PetscCall(STSetType(st,STSINVERT));
111: PetscCall(EPSSetFromOptions(eps));
112: PetscCall(EPSGetDimensions(eps,&nev,NULL,NULL));
113: PetscCall(EPSSolve(eps));
114: PetscCall(EPSGetConverged(eps,&nconv));
115: nev = PetscMin(nev,nconv);
116: PetscCall(ISGetLocalSize(is,&m));
117: PetscCall(MatCreateSeqDense(PETSC_COMM_SELF,m,nev,NULL,&P));
118: for (m = 0; m < nev; ++m) {
119: PetscCall(MatDenseGetColumnVecWrite(P,m,&b));
120: PetscCall(EPSGetEigenvector(eps,m,b,NULL));
121: PetscCall(MatDenseRestoreColumnVecWrite(P,m,&b));
122: }
123: PetscCall(EPSDestroy(&eps));
124: PetscCall(MatDestroy(&B));
125: PetscCall(PCHPDDMSetDeflationMat(pc,is,P));
126: PetscCall(MatDestroy(&P));
127: PetscCall(MatDestroy(&aux));
128: PetscCall(KSPSetFromOptions(ksp));
129: PetscCall(ISDestroy(&is));
130: PetscCall(MatCreateVecs(A,NULL,&b));
131: PetscCall(VecSet(b,1.0));
132: PetscCall(KSPSolve(ksp,b,b));
133: PetscCall(PetscObjectTypeCompare((PetscObject)pc,PCHPDDM,&flg));
134: if (flg) {
135: PetscCall(PCHPDDMGetSTShareSubKSP(pc,&flg));
136: /* since EPSSolve() is called outside PCSetUp_HPDDM() and there is not mechanism (yet) to pass the underlying ST, */
137: /* PCHPDDMGetSTShareSubKSP() should not return PETSC_TRUE when using PCHPDDMSetDeflationMat() */
138: PetscCheck(!flg,PETSC_COMM_SELF,PETSC_ERR_PLIB,"PCHPDDMGetSTShareSubKSP() should not return PETSC_TRUE");
139: }
140: PetscCall(VecDestroy(&b));
141: PetscCall(KSPDestroy(&ksp));
142: PetscCall(MatDestroy(&A));
143: PetscCall(SlepcFinalize());
144: return 0;
145: }
147: /*TEST
149: build:
150: requires: hpddm
152: test:
153: requires: double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES)
154: nsize: 4
155: filter: grep -v " total: nonzeros=" | grep -v " rows=" | grep -v " factor fill ratio given " | grep -v " using I-node" | sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 5/Linear solve converged due to CONVERGED_RTOL iterations 4/g" -e "s/amount of overlap = 1/user-defined overlap/g"
156: # similar output as ex76 with -pc_hpddm_levels_eps_nev val instead of just -eps_nev val
157: args: -ksp_rtol 1e-3 -ksp_max_it 10 -ksp_error_if_not_converged -ksp_converged_reason -ksp_view -ksp_type hpddm -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_define_subdomains {{false true}shared output} -pc_hpddm_levels_1_pc_type asm -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_levels_1_st_share_sub_ksp -eps_nev 10 -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -matload_block_size 2
159: TEST*/