Actual source code: ex26.c
petsc-3.5.4 2015-05-23
1: static char help[] ="Solvers Laplacian with multigrid, bad way.\n\
2: -mx <xg>, where <xg> = number of grid points in the x-direction\n\
3: -my <yg>, where <yg> = number of grid points in the y-direction\n\
4: -Nx <npx>, where <npx> = number of processors in the x-direction\n\
5: -Ny <npy>, where <npy> = number of processors in the y-direction\n\n";
7: /* Modified from ~src/ksp/examples/tests/ex19.c. Used for testing ML 6.2 interface.
9: This problem is modeled by
10: the partial differential equation
12: -Laplacian u = g, 0 < x,y < 1,
14: with boundary conditions
16: u = 0 for x = 0, x = 1, y = 0, y = 1.
18: A finite difference approximation with the usual 5-point stencil
19: is used to discretize the boundary value problem to obtain a nonlinear
20: system of equations.
22: Usage: ./ex26 -ksp_monitor_short -pc_type ml
23: -mg_coarse_ksp_max_it 10
24: -mg_levels_1_ksp_max_it 10 -mg_levels_2_ksp_max_it 10
25: -mg_fine_ksp_max_it 10
26: */
28: #include <petscksp.h>
29: #include <petscdm.h>
30: #include <petscdmda.h>
32: /* User-defined application contexts */
33: typedef struct {
34: PetscInt mx,my; /* number grid points in x and y direction */
35: Vec localX,localF; /* local vectors with ghost region */
36: DM da;
37: Vec x,b,r; /* global vectors */
38: Mat J; /* Jacobian on grid */
39: Mat A,P,R;
40: KSP ksp;
41: } GridCtx;
42: extern int FormJacobian_Grid(GridCtx*,Mat*);
46: int main(int argc,char **argv)
47: {
49: PetscInt its,n,Nx=PETSC_DECIDE,Ny=PETSC_DECIDE,nlocal;
50: PetscMPIInt size;
51: PetscScalar one = 1.0;
52: PetscInt mx,my;
53: Mat A;
54: GridCtx fine_ctx;
55: KSP ksp;
56: PetscBool flg;
58: PetscInitialize(&argc,&argv,(char*)0,help);
59: /* set up discretization matrix for fine grid */
60: fine_ctx.mx = 9; fine_ctx.my = 9;
61: PetscOptionsGetInt(NULL,"-mx",&mx,&flg);
62: if (flg) fine_ctx.mx = mx;
63: PetscOptionsGetInt(NULL,"-my",&my,&flg);
64: if (flg) fine_ctx.my = my;
65: PetscPrintf(PETSC_COMM_WORLD,"Fine grid size %D by %D\n",fine_ctx.mx,fine_ctx.my);
66: n = fine_ctx.mx*fine_ctx.my;
68: MPI_Comm_size(PETSC_COMM_WORLD,&size);
69: PetscOptionsGetInt(NULL,"-Nx",&Nx,NULL);
70: PetscOptionsGetInt(NULL,"-Ny",&Ny,NULL);
72: /* Set up distributed array for fine grid */
73: DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,fine_ctx.mx,
74: fine_ctx.my,Nx,Ny,1,1,NULL,NULL,&fine_ctx.da);
75: DMCreateGlobalVector(fine_ctx.da,&fine_ctx.x);
76: VecDuplicate(fine_ctx.x,&fine_ctx.b);
77: VecGetLocalSize(fine_ctx.x,&nlocal);
78: DMCreateLocalVector(fine_ctx.da,&fine_ctx.localX);
79: VecDuplicate(fine_ctx.localX,&fine_ctx.localF);
80: MatCreateAIJ(PETSC_COMM_WORLD,nlocal,nlocal,n,n,5,NULL,3,NULL,&A);
81: FormJacobian_Grid(&fine_ctx,&A);
83: /* create linear solver */
84: KSPCreate(PETSC_COMM_WORLD,&ksp);
86: /* set values for rhs vector */
87: VecSet(fine_ctx.b,one);
88: {
89: PetscRandom rdm;
90: PetscRandomCreate(PETSC_COMM_WORLD,&rdm);
91: PetscRandomSetFromOptions(rdm);
92: VecSetRandom(fine_ctx.b,rdm);
93: PetscRandomDestroy(&rdm);
94: }
96: /* set options, then solve system */
97: KSPSetFromOptions(ksp); /* calls PCSetFromOptions_ML if 'pc_type=ml' */
98: KSPSetOperators(ksp,A,A);
99: KSPSolve(ksp,fine_ctx.b,fine_ctx.x);
100: KSPGetIterationNumber(ksp,&its);
101: PetscPrintf(PETSC_COMM_WORLD,"Number of iterations = %D\n",its);
103: /* free data structures */
104: VecDestroy(&fine_ctx.x);
105: VecDestroy(&fine_ctx.b);
106: DMDestroy(&fine_ctx.da);
107: VecDestroy(&fine_ctx.localX);
108: VecDestroy(&fine_ctx.localF);
109: MatDestroy(&A);
110: KSPDestroy(&ksp);
112: PetscFinalize();
113: return 0;
114: }
118: int FormJacobian_Grid(GridCtx *grid,Mat *J)
119: {
120: Mat jac = *J;
121: PetscErrorCode ierr;
122: PetscInt i,j,row,mx,my,xs,ys,xm,ym,Xs,Ys,Xm,Ym,col[5];
123: PetscInt grow;
124: const PetscInt *ltog;
125: PetscScalar two = 2.0,one = 1.0,v[5],hx,hy,hxdhy,hydhx,value;
126: ISLocalToGlobalMapping ltogm;
128: mx = grid->mx; my = grid->my;
129: hx = one/(PetscReal)(mx-1); hy = one/(PetscReal)(my-1);
130: hxdhy = hx/hy; hydhx = hy/hx;
132: /* Get ghost points */
133: DMDAGetCorners(grid->da,&xs,&ys,0,&xm,&ym,0);
134: DMDAGetGhostCorners(grid->da,&Xs,&Ys,0,&Xm,&Ym,0);
135: DMGetLocalToGlobalMapping(grid->da,<ogm);
136: ISLocalToGlobalMappingGetIndices(ltogm,<og);
138: /* Evaluate Jacobian of function */
139: for (j=ys; j<ys+ym; j++) {
140: row = (j - Ys)*Xm + xs - Xs - 1;
141: for (i=xs; i<xs+xm; i++) {
142: row++;
143: grow = ltog[row];
144: if (i > 0 && i < mx-1 && j > 0 && j < my-1) {
145: v[0] = -hxdhy; col[0] = ltog[row - Xm];
146: v[1] = -hydhx; col[1] = ltog[row - 1];
147: v[2] = two*(hydhx + hxdhy); col[2] = grow;
148: v[3] = -hydhx; col[3] = ltog[row + 1];
149: v[4] = -hxdhy; col[4] = ltog[row + Xm];
150: MatSetValues(jac,1,&grow,5,col,v,INSERT_VALUES);
151: } else if ((i > 0 && i < mx-1) || (j > 0 && j < my-1)) {
152: value = .5*two*(hydhx + hxdhy);
153: MatSetValues(jac,1,&grow,1,&grow,&value,INSERT_VALUES);
154: } else {
155: value = .25*two*(hydhx + hxdhy);
156: MatSetValues(jac,1,&grow,1,&grow,&value,INSERT_VALUES);
157: }
158: }
159: }
160: ISLocalToGlobalMappingRestoreIndices(ltogm,<og);
161: MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY);
162: MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY);
163: return 0;
164: }