Actual source code: ex4.c

petsc-3.5.4 2015-05-23
Report Typos and Errors
  2: static char help[] = "Chemo-taxis Problems from Mathematical Biology.\n";

  4: /*
  5:      Page 18, Chemo-taxis Problems from Mathematical Biology

  7:         rho_t =
  8:         c_t   =

 10:      Further discusson on Page 134 and in 2d on Page 409
 11: */

 13: /*

 15:    Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
 16:    Include "petscts.h" so that we can use SNES solvers.  Note that this
 17:    file automatically includes:
 18:      petscsys.h       - base PETSc routines   petscvec.h - vectors
 19:      petscmat.h - matrices
 20:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
 21:      petscviewer.h - viewers               petscpc.h  - preconditioners
 22:      petscksp.h   - linear solvers
 23: */
 24: #include <petscdm.h>
 25: #include <petscdmda.h>
 26: #include <petscts.h>

 28: typedef struct {
 29:   PetscScalar rho,c;
 30: } Field;

 32: typedef struct {
 33:   PetscScalar epsilon,delta,alpha,beta,gamma,kappa,lambda,mu,cstar;
 34:   PetscBool   upwind;
 35: } AppCtx;

 37: /*
 38:    User-defined routines
 39: */
 40: extern PetscErrorCode IFunction(TS,PetscReal,Vec,Vec,Vec,void*),InitialConditions(DM,Vec);

 44: int main(int argc,char **argv)
 45: {
 46:   TS             ts;                  /* nonlinear solver */
 47:   Vec            U;                   /* solution, residual vectors */
 49:   DM             da;
 50:   AppCtx         appctx;

 52:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 53:      Initialize program
 54:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 55:   PetscInitialize(&argc,&argv,(char*)0,help);

 57:   appctx.epsilon = 1.0e-3;
 58:   appctx.delta   = 1.0;
 59:   appctx.alpha   = 10.0;
 60:   appctx.beta    = 4.0;
 61:   appctx.gamma   = 1.0;
 62:   appctx.kappa   = .75;
 63:   appctx.lambda  = 1.0;
 64:   appctx.mu      = 100.;
 65:   appctx.cstar   = .2;
 66:   appctx.upwind  = PETSC_TRUE;

 68:   PetscOptionsGetScalar(NULL,"-delta",&appctx.delta,NULL);
 69:   PetscOptionsGetBool(NULL,"-upwind",&appctx.upwind,NULL);

 71:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 72:      Create distributed array (DMDA) to manage parallel grid and vectors
 73:   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 74:   DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE,-8,2,1,NULL,&da);
 75:   DMDASetFieldName(da,0,"rho");
 76:   DMDASetFieldName(da,1,"c");

 78:   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 79:      Extract global vectors from DMDA; then duplicate for remaining
 80:      vectors that are the same types
 81:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 82:   DMCreateGlobalVector(da,&U);

 84:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 85:      Create timestepping solver context
 86:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 87:   TSCreate(PETSC_COMM_WORLD,&ts);
 88:   TSSetType(ts,TSROSW);
 89:   TSSetDM(ts,da);
 90:   TSSetProblemType(ts,TS_NONLINEAR);
 91:   TSSetIFunction(ts,NULL,IFunction,&appctx);


 94:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 95:      Set initial conditions
 96:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 97:   InitialConditions(da,U);
 98:   TSSetSolution(ts,U);

100:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
101:      Set solver options
102:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
103:   TSSetInitialTimeStep(ts,0.0,.0001);
104:   TSSetDuration(ts,PETSC_DEFAULT,1.0);
105:   TSSetFromOptions(ts);

107:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
108:      Solve nonlinear system
109:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
110:   TSSolve(ts,U);

112:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
113:      Free work space.  All PETSc objects should be destroyed when they
114:      are no longer needed.
115:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
116:   VecDestroy(&U);
117:   TSDestroy(&ts);
118:   DMDestroy(&da);

120:   PetscFinalize();
121:   return(0);
122: }
123: /* ------------------------------------------------------------------- */
126: /*
127:    IFunction - Evaluates nonlinear function, F(U).

129:    Input Parameters:
130: .  ts - the TS context
131: .  U - input vector
132: .  ptr - optional user-defined context, as set by SNESSetFunction()

134:    Output Parameter:
135: .  F - function vector
136:  */
137: PetscErrorCode IFunction(TS ts,PetscReal ftime,Vec U,Vec Udot,Vec F,void *ptr)
138: {
139:   AppCtx         *appctx = (AppCtx*)ptr;
140:   DM             da;
142:   PetscInt       i,Mx,xs,xm;
143:   PetscReal      hx,sx;
144:   PetscScalar    rho,c,rhoxx,cxx,cx,rhox,kcxrhox;
145:   Field          *u,*f,*udot;
146:   Vec            localU;

149:   TSGetDM(ts,&da);
150:   DMGetLocalVector(da,&localU);
151:   DMDAGetInfo(da,PETSC_IGNORE,&Mx,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);

153:   hx = 1.0/(PetscReal)(Mx-1); sx = 1.0/(hx*hx);

155:   /*
156:      Scatter ghost points to local vector,using the 2-step process
157:         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
158:      By placing code between these two statements, computations can be
159:      done while messages are in transition.
160:   */
161:   DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU);
162:   DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU);

164:   /*
165:      Get pointers to vector data
166:   */
167:   DMDAVecGetArray(da,localU,&u);
168:   DMDAVecGetArray(da,Udot,&udot);
169:   DMDAVecGetArray(da,F,&f);

171:   /*
172:      Get local grid boundaries
173:   */
174:   DMDAGetCorners(da,&xs,NULL,NULL,&xm,NULL,NULL);

176:   if (!xs) {
177:     f[0].rho = udot[0].rho; /* u[0].rho - 0.0; */
178:     f[0].c   = udot[0].c; /* u[0].c   - 1.0; */
179:     xs++;
180:     xm--;
181:   }
182:   if (xs+xm == Mx) {
183:     f[Mx-1].rho = udot[Mx-1].rho; /* u[Mx-1].rho - 1.0; */
184:     f[Mx-1].c   = udot[Mx-1].c;  /* u[Mx-1].c   - 0.0;  */
185:     xm--;
186:   }

188:   /*
189:      Compute function over the locally owned part of the grid
190:   */
191:   for (i=xs; i<xs+xm; i++) {
192:     rho   = u[i].rho;
193:     rhoxx = (-2.0*rho + u[i-1].rho + u[i+1].rho)*sx;
194:     c     = u[i].c;
195:     cxx   = (-2.0*c + u[i-1].c + u[i+1].c)*sx;

197:     if (!appctx->upwind) {
198:       rhox    = .5*(u[i+1].rho - u[i-1].rho)/hx;
199:       cx      = .5*(u[i+1].c - u[i-1].c)/hx;
200:       kcxrhox = appctx->kappa*(cxx*rho + cx*rhox);
201:     } else {
202:       kcxrhox = appctx->kappa*((u[i+1].c - u[i].c)*u[i+1].rho - (u[i].c - u[i-1].c)*u[i].rho)*sx;
203:     }

205:     f[i].rho = udot[i].rho - appctx->epsilon*rhoxx + kcxrhox  - appctx->mu*PetscAbsScalar(rho)*(1.0 - rho)*PetscMax(0,PetscRealPart(c - appctx->cstar)) + appctx->beta*rho;
206:     f[i].c   = udot[i].c - appctx->delta*cxx + appctx->lambda*c + appctx->alpha*rho*c/(appctx->gamma + c);
207:   }

209:   /*
210:      Restore vectors
211:   */
212:   DMDAVecRestoreArray(da,localU,&u);
213:   DMDAVecRestoreArray(da,Udot,&udot);
214:   DMDAVecRestoreArray(da,F,&f);
215:   DMRestoreLocalVector(da,&localU);
216:   return(0);
217: }

219: /* ------------------------------------------------------------------- */
222: PetscErrorCode InitialConditions(DM da,Vec U)
223: {
225:   PetscInt       i,xs,xm,Mx;
226:   Field          *u;
227:   PetscReal      hx,x;

230:   DMDAGetInfo(da,PETSC_IGNORE,&Mx,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);

232:   hx = 1.0/(PetscReal)(Mx-1);

234:   /*
235:      Get pointers to vector data
236:   */
237:   DMDAVecGetArray(da,U,&u);

239:   /*
240:      Get local grid boundaries
241:   */
242:   DMDAGetCorners(da,&xs,NULL,NULL,&xm,NULL,NULL);

244:   /*
245:      Compute function over the locally owned part of the grid
246:   */
247:   for (i=xs; i<xs+xm; i++) {
248:     x = i*hx;
249:     if (x < 1.0) u[i].rho = 0.0;
250:     else         u[i].rho = 1.0;
251:     u[i].c = PetscCosReal(.5*PETSC_PI*x);
252:   }

254:   /*
255:      Restore vectors
256:   */
257:   DMDAVecRestoreArray(da,U,&u);
258:   return(0);
259: }