Actual source code: ex8f.F

petsc-3.5.4 2015-05-23
Report Typos and Errors
  1: !
  2: !   Tests PCMGSetResidual
  3: !
  4: ! -----------------------------------------------------------------------

  6:       program main
  7:       implicit none

  9: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 10: !                    Include files
 11: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 12: !
 13: !
 14: #include <finclude/petscsys.h>
 15: #include <finclude/petscvec.h>
 16: #include <finclude/petscmat.h>
 17: #include <finclude/petscpc.h>
 18: #include <finclude/petscksp.h>
 19: !
 20: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 21: !                   Variable declarations
 22: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 23: !
 24: !  Variables:
 25: !     ksp     - linear solver context
 26: !     x, b, u  - approx solution, right-hand-side, exact solution vectors
 27: !     A        - matrix that defines linear system
 28: !     its      - iterations for convergence
 29: !     norm     - norm of error in solution
 30: !     rctx     - random number context
 31: !

 33:       Mat              A
 34:       Vec              x,b,u
 35:       PC               pc
 36:       PetscInt  n,dim,istart,iend
 37:       PetscInt  i,j,jj,ii,one,zero
 38:       PetscErrorCode ierr
 39:       PetscScalar v
 40:       external         MyResidual
 41:       PetscScalar      pfive
 42:       KSP              ksp

 44: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 45: !                 Beginning of program
 46: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

 48:       call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
 49:       pfive = .5d0
 50:       n      = 6
 51:       dim    = n*n
 52:       one    = 1
 53:       zero   = 0

 55: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 56: !      Compute the matrix and right-hand-side vector that define
 57: !      the linear system, Ax = b.
 58: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

 60: !  Create parallel matrix, specifying only its global dimensions.
 61: !  When using MatCreate(), the matrix format can be specified at
 62: !  runtime. Also, the parallel partitioning of the matrix is
 63: !  determined by PETSc at runtime.

 65:       call MatCreate(PETSC_COMM_WORLD,A,ierr)
 66:       call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,dim,dim,ierr)
 67:       call MatSetFromOptions(A,ierr)
 68:       call MatSetUp(A,ierr)

 70: !  Currently, all PETSc parallel matrix formats are partitioned by
 71: !  contiguous chunks of rows across the processors.  Determine which
 72: !  rows of the matrix are locally owned.

 74:       call MatGetOwnershipRange(A,Istart,Iend,ierr)

 76: !  Set matrix elements in parallel.
 77: !   - Each processor needs to insert only elements that it owns
 78: !     locally (but any non-local elements will be sent to the
 79: !     appropriate processor during matrix assembly).
 80: !   - Always specify global rows and columns of matrix entries.

 82:       do 10, II=Istart,Iend-1
 83:         v = -1.0
 84:         i = II/n
 85:         j = II - i*n
 86:         if (i.gt.0) then
 87:           JJ = II - n
 88:           call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
 89:         endif
 90:         if (i.lt.n-1) then
 91:           JJ = II + n
 92:           call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
 93:         endif
 94:         if (j.gt.0) then
 95:           JJ = II - 1
 96:           call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
 97:         endif
 98:         if (j.lt.n-1) then
 99:           JJ = II + 1
100:           call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
101:         endif
102:         v = 4.0
103:         call  MatSetValues(A,one,II,one,II,v,ADD_VALUES,ierr)
104:  10   continue

106: !  Assemble matrix, using the 2-step process:
107: !       MatAssemblyBegin(), MatAssemblyEnd()
108: !  Computations can be done while messages are in transition
109: !  by placing code between these two statements.

111:       call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
112:       call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)

114: !  Create parallel vectors.
115: !   - Here, the parallel partitioning of the vector is determined by
116: !     PETSc at runtime.  We could also specify the local dimensions
117: !     if desired.
118: !   - Note: We form 1 vector from scratch and then duplicate as needed.

120:       call VecCreate(PETSC_COMM_WORLD,u,ierr)
121:       call VecSetSizes(u,PETSC_DECIDE,dim,ierr)
122:       call VecSetFromOptions(u,ierr)
123:       call VecDuplicate(u,b,ierr)
124:       call VecDuplicate(b,x,ierr)

126: !  Set exact solution; then compute right-hand-side vector.

128:       call VecSet(u,pfive,ierr)
129:       call MatMult(A,u,b,ierr)

131: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
132: !         Create the linear solver and set various options
133: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

135: !  Create linear solver context

137:       call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
138:       call KSPGetPC(ksp,pc,ierr)
139:       call PCSetType(pc,PCMG,ierr)
140:       call PCMGSetLevels(pc,one,PETSC_NULL_OBJECT,ierr)

142:       call PCMGSetResidual(pc,zero,MyResidual,A,ierr)

144: !  Set operators. Here the matrix that defines the linear system
145: !  also serves as the preconditioning matrix.

147:       call KSPSetOperators(ksp,A,A,ierr)


150:       call KSPDestroy(ksp,ierr)
151:       call VecDestroy(u,ierr)
152:       call VecDestroy(x,ierr)
153:       call VecDestroy(b,ierr)
154:       call MatDestroy(A,ierr)

156:       call PetscFinalize(ierr)
157:       end

159:       subroutine MyResidual(A,b,x,r,ierr)
160:       Mat A
161:       Vec b,x,r
162:       integer ierr
163:       return
164:       end