Actual source code: ex12.c
2: static char help[] = "Solves a linear system in parallel with KSP,\n\
3: demonstrating how to register a new preconditioner (PC) type.\n\
4: Input parameters include:\n\
5: -m <mesh_x> : number of mesh points in x-direction\n\
6: -n <mesh_y> : number of mesh points in y-direction\n\n";
8: /*
9: Demonstrates registering a new preconditioner (PC) type.
11: To register a PC type whose code is linked into the executable,
12: use PCRegister(). To register a PC type in a dynamic library use PCRegister()
14: Also provide the prototype for your PCCreate_XXX() function. In
15: this example we use the PETSc implementation of the Jacobi method,
16: PCCreate_Jacobi() just as an example.
18: See the file src/ksp/pc/impls/jacobi/jacobi.c for details on how to
19: write a new PC component.
21: See the manual page PCRegister() for details on how to register a method.
22: */
24: /*
25: Include "petscksp.h" so that we can use KSP solvers. Note that this file
26: automatically includes:
27: petscsys.h - base PETSc routines petscvec.h - vectors
28: petscmat.h - matrices
29: petscis.h - index sets petscksp.h - Krylov subspace methods
30: petscviewer.h - viewers petscpc.h - preconditioners
31: */
32: #include <petscksp.h>
34: PETSC_EXTERN PetscErrorCode PCCreate_Jacobi(PC);
36: int main(int argc,char **args)
37: {
38: Vec x,b,u; /* approx solution, RHS, exact solution */
39: Mat A; /* linear system matrix */
40: KSP ksp; /* linear solver context */
41: PetscReal norm; /* norm of solution error */
42: PetscInt i,j,Ii,J,Istart,Iend,m = 8,n = 7,its;
43: PetscScalar v,one = 1.0;
44: PC pc; /* preconditioner context */
46: PetscInitialize(&argc,&args,(char*)0,help);
47: PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL);
48: PetscOptionsGetInt(NULL,NULL,"-n",&n,NULL);
50: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
51: Compute the matrix and right-hand-side vector that define
52: the linear system, Ax = b.
53: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
54: /*
55: Create parallel matrix, specifying only its global dimensions.
56: When using MatCreate(), the matrix format can be specified at
57: runtime. Also, the parallel partitioning of the matrix can be
58: determined by PETSc at runtime.
59: */
60: MatCreate(PETSC_COMM_WORLD,&A);
61: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);
62: MatSetFromOptions(A);
63: MatSetUp(A);
65: /*
66: Currently, all PETSc parallel matrix formats are partitioned by
67: contiguous chunks of rows across the processors. Determine which
68: rows of the matrix are locally owned.
69: */
70: MatGetOwnershipRange(A,&Istart,&Iend);
72: /*
73: Set matrix elements for the 2-D, five-point stencil in parallel.
74: - Each processor needs to insert only elements that it owns
75: locally (but any non-local elements will be sent to the
76: appropriate processor during matrix assembly).
77: - Always specify global rows and columns of matrix entries.
78: */
79: for (Ii=Istart; Ii<Iend; Ii++) {
80: v = -1.0; i = Ii/n; j = Ii - i*n;
81: if (i>0) {J = Ii - n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
82: if (i<m-1) {J = Ii + n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
83: if (j>0) {J = Ii - 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
84: if (j<n-1) {J = Ii + 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
85: v = 4.0; MatSetValues(A,1,&Ii,1,&Ii,&v,INSERT_VALUES);
86: }
88: /*
89: Assemble matrix, using the 2-step process:
90: MatAssemblyBegin(), MatAssemblyEnd()
91: Computations can be done while messages are in transition
92: by placing code between these two statements.
93: */
94: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
95: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
97: /*
98: Create parallel vectors.
99: - When using VecCreate(), VecSetSizes() and VecSetFromOptions(),
100: we specify only the vector's global
101: dimension; the parallel partitioning is determined at runtime.
102: - When solving a linear system, the vectors and matrices MUST
103: be partitioned accordingly. PETSc automatically generates
104: appropriately partitioned matrices and vectors when MatCreate()
105: and VecCreate() are used with the same communicator.
106: - Note: We form 1 vector from scratch and then duplicate as needed.
107: */
108: VecCreate(PETSC_COMM_WORLD,&u);
109: VecSetSizes(u,PETSC_DECIDE,m*n);
110: VecSetFromOptions(u);
111: VecDuplicate(u,&b);
112: VecDuplicate(b,&x);
114: /*
115: Set exact solution; then compute right-hand-side vector.
116: Use an exact solution of a vector with all elements of 1.0;
117: */
118: VecSet(u,one);
119: MatMult(A,u,b);
121: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
122: Create the linear solver and set various options
123: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
125: /*
126: Create linear solver context
127: */
128: KSPCreate(PETSC_COMM_WORLD,&ksp);
130: /*
131: Set operators. Here the matrix that defines the linear system
132: also serves as the preconditioning matrix.
133: */
134: KSPSetOperators(ksp,A,A);
136: /*
137: First register a new PC type with the command PCRegister()
138: */
139: PCRegister("ourjacobi",PCCreate_Jacobi);
141: /*
142: Set the PC type to be the new method
143: */
144: KSPGetPC(ksp,&pc);
145: PCSetType(pc,"ourjacobi");
147: /*
148: Set runtime options, e.g.,
149: -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
150: These options will override those specified above as long as
151: KSPSetFromOptions() is called _after_ any other customization
152: routines.
153: */
154: KSPSetFromOptions(ksp);
156: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
157: Solve the linear system
158: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
160: KSPSolve(ksp,b,x);
162: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
163: Check solution and clean up
164: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
166: /*
167: Check the error
168: */
169: VecAXPY(x,-1.0,u);
170: VecNorm(x,NORM_2,&norm);
171: KSPGetIterationNumber(ksp,&its);
173: /*
174: Print convergence information. PetscPrintf() produces a single
175: print statement from all processes that share a communicator.
176: */
177: PetscPrintf(PETSC_COMM_WORLD,"Norm of error %g iterations %D\n",(double)norm,its);
179: /*
180: Free work space. All PETSc objects should be destroyed when they
181: are no longer needed.
182: */
183: KSPDestroy(&ksp);
184: VecDestroy(&u)); PetscCall(VecDestroy(&x);
185: VecDestroy(&b)); PetscCall(MatDestroy(&A);
187: /*
188: Always call PetscFinalize() before exiting a program. This routine
189: - finalizes the PETSc libraries as well as MPI
190: - provides summary and diagnostic information if certain runtime
191: options are chosen (e.g., -log_view).
192: */
193: PetscFinalize();
194: return 0;
195: }
197: /*TEST
199: test:
200: args: -ksp_gmres_cgs_refinement_type refine_always
202: TEST*/