Actual source code: ex12.c


  2: static char help[] ="Tests PetscObjectSetOptions() for TS object\n\n";

  4: /* ------------------------------------------------------------------------

  6:    This program solves the PDE

  8:                u * u_xx
  9:          u_t = ---------
 10:                2*(t+1)^2

 12:     on the domain 0 <= x <= 1, with boundary conditions
 13:          u(t,0) = t + 1,  u(t,1) = 2*t + 2,
 14:     and initial condition
 15:          u(0,x) = 1 + x*x.

 17:     The exact solution is:
 18:          u(t,x) = (1 + x*x) * (1 + t)

 20:     Note that since the solution is linear in time and quadratic in x,
 21:     the finite difference scheme actually computes the "exact" solution.

 23:     We use by default the backward Euler method.

 25:   ------------------------------------------------------------------------- */

 27: /*
 28:    Include "petscts.h" to use the PETSc timestepping routines. Note that
 29:    this file automatically includes "petscsys.h" and other lower-level
 30:    PETSc include files.

 32:    Include the "petscdmda.h" to allow us to use the distributed array data
 33:    structures to manage the parallel grid.
 34: */
 35: #include <petscts.h>
 36: #include <petscdm.h>
 37: #include <petscdmda.h>
 38: #include <petscdraw.h>

 40: /*
 41:    User-defined application context - contains data needed by the
 42:    application-provided callback routines.
 43: */
 44: typedef struct {
 45:   MPI_Comm  comm;           /* communicator */
 46:   DM        da;             /* distributed array data structure */
 47:   Vec       localwork;      /* local ghosted work vector */
 48:   Vec       u_local;        /* local ghosted approximate solution vector */
 49:   Vec       solution;       /* global exact solution vector */
 50:   PetscInt  m;              /* total number of grid points */
 51:   PetscReal h;              /* mesh width: h = 1/(m-1) */
 52: } AppCtx;

 54: /*
 55:    User-defined routines, provided below.
 56: */
 57: extern PetscErrorCode InitialConditions(Vec,AppCtx*);
 58: extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*);
 59: extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*);
 60: extern PetscErrorCode ExactSolution(PetscReal,Vec,AppCtx*);

 62: int main(int argc,char **argv)
 63: {
 64:   AppCtx         appctx;                 /* user-defined application context */
 65:   TS             ts;                     /* timestepping context */
 66:   Mat            A;                      /* Jacobian matrix data structure */
 67:   Vec            u;                      /* approximate solution vector */
 68:   PetscInt       time_steps_max = 100;  /* default max timesteps */
 69:   PetscReal      dt;
 70:   PetscReal      time_total_max = 100.0; /* default max total time */
 71:   PetscOptions   options,optionscopy;

 73:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 74:      Initialize program and set problem parameters
 75:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

 77:   PetscInitialize(&argc,&argv,(char*)0,help);

 79:   PetscOptionsCreate(&options);
 80:   PetscOptionsSetValue(options,"-ts_monitor","ascii");
 81:   PetscOptionsSetValue(options,"-snes_monitor","ascii");
 82:   PetscOptionsSetValue(options,"-ksp_monitor","ascii");

 84:   appctx.comm = PETSC_COMM_WORLD;
 85:   appctx.m    = 60;

 87:   PetscOptionsGetInt(options,NULL,"-M",&appctx.m,NULL);

 89:   appctx.h    = 1.0/(appctx.m-1.0);

 91:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 92:      Create vector data structures
 93:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

 95:   /*
 96:      Create distributed array (DMDA) to manage parallel grid and vectors
 97:      and to set up the ghost point communication pattern.  There are M
 98:      total grid values spread equally among all the processors.
 99:   */
100:   DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,appctx.m,1,1,NULL,&appctx.da);
101:   PetscObjectSetOptions((PetscObject)appctx.da,options);
102:   DMSetFromOptions(appctx.da);
103:   DMSetUp(appctx.da);

105:   /*
106:      Extract global and local vectors from DMDA; we use these to store the
107:      approximate solution.  Then duplicate these for remaining vectors that
108:      have the same types.
109:   */
110:   DMCreateGlobalVector(appctx.da,&u);
111:   DMCreateLocalVector(appctx.da,&appctx.u_local);

113:   /*
114:      Create local work vector for use in evaluating right-hand-side function;
115:      create global work vector for storing exact solution.
116:   */
117:   VecDuplicate(appctx.u_local,&appctx.localwork);
118:   VecDuplicate(u,&appctx.solution);

120:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
121:      Create timestepping solver context; set callback routine for
122:      right-hand-side function evaluation.
123:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

125:   TSCreate(PETSC_COMM_WORLD,&ts);
126:   PetscObjectSetOptions((PetscObject)ts,options);
127:   TSSetProblemType(ts,TS_NONLINEAR);
128:   TSSetRHSFunction(ts,NULL,RHSFunction,&appctx);

130:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
131:      For nonlinear problems, the user can provide a Jacobian evaluation
132:      routine (or use a finite differencing approximation).

134:      Create matrix data structure; set Jacobian evaluation routine.
135:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

137:   MatCreate(PETSC_COMM_WORLD,&A);
138:   PetscObjectSetOptions((PetscObject)A,options);
139:   MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,appctx.m,appctx.m);
140:   MatSetFromOptions(A);
141:   MatSetUp(A);
142:   TSSetRHSJacobian(ts,A,A,RHSJacobian,&appctx);

144:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
145:      Set solution vector and initial timestep
146:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

148:   dt   = appctx.h/2.0;
149:   TSSetTimeStep(ts,dt);

151:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
152:      Customize timestepping solver:
153:        - Set the solution method to be the Backward Euler method.
154:        - Set timestepping duration info
155:      Then set runtime options, which can override these defaults.
156:      For example,
157:           -ts_max_steps <maxsteps> -ts_max_time <maxtime>
158:      to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
159:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

161:   TSSetType(ts,TSBEULER);
162:   TSSetMaxSteps(ts,time_steps_max);
163:   TSSetMaxTime(ts,time_total_max);
164:   TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);
165:   TSSetFromOptions(ts);

167:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
168:      Solve the problem
169:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

171:   /*
172:      Evaluate initial conditions
173:   */
174:   InitialConditions(u,&appctx);

176:   /*
177:      Run the timestepping solver
178:   */
179:   TSSolve(ts,u);

181:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
182:      Free work space.  All PETSc objects should be destroyed when they
183:      are no longer needed.
184:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

186:   PetscObjectGetOptions((PetscObject)ts,&optionscopy);

189:   TSDestroy(&ts);
190:   VecDestroy(&u);
191:   MatDestroy(&A);
192:   DMDestroy(&appctx.da);
193:   VecDestroy(&appctx.localwork);
194:   VecDestroy(&appctx.solution);
195:   VecDestroy(&appctx.u_local);
196:   PetscOptionsDestroy(&options);

198:   /*
199:      Always call PetscFinalize() before exiting a program.  This routine
200:        - finalizes the PETSc libraries as well as MPI
201:        - provides summary and diagnostic information if certain runtime
202:          options are chosen (e.g., -log_view).
203:   */
204:   PetscFinalize();
205:   return 0;
206: }
207: /* --------------------------------------------------------------------- */
208: /*
209:    InitialConditions - Computes the solution at the initial time.

211:    Input Parameters:
212:    u - uninitialized solution vector (global)
213:    appctx - user-defined application context

215:    Output Parameter:
216:    u - vector with solution at initial time (global)
217: */
218: PetscErrorCode InitialConditions(Vec u,AppCtx *appctx)
219: {
220:   PetscScalar    *u_localptr,h = appctx->h,x;
221:   PetscInt       i,mybase,myend;

223:   /*
224:      Determine starting point of each processor's range of
225:      grid values.
226:   */
227:   VecGetOwnershipRange(u,&mybase,&myend);

229:   /*
230:     Get a pointer to vector data.
231:     - For default PETSc vectors, VecGetArray() returns a pointer to
232:       the data array.  Otherwise, the routine is implementation dependent.
233:     - You MUST call VecRestoreArray() when you no longer need access to
234:       the array.
235:     - Note that the Fortran interface to VecGetArray() differs from the
236:       C version.  See the users manual for details.
237:   */
238:   VecGetArray(u,&u_localptr);

240:   /*
241:      We initialize the solution array by simply writing the solution
242:      directly into the array locations.  Alternatively, we could use
243:      VecSetValues() or VecSetValuesLocal().
244:   */
245:   for (i=mybase; i<myend; i++) {
246:     x = h*(PetscReal)i; /* current location in global grid */
247:     u_localptr[i-mybase] = 1.0 + x*x;
248:   }

250:   /*
251:      Restore vector
252:   */
253:   VecRestoreArray(u,&u_localptr);

255:   return 0;
256: }
257: /* --------------------------------------------------------------------- */
258: /*
259:    ExactSolution - Computes the exact solution at a given time.

261:    Input Parameters:
262:    t - current time
263:    solution - vector in which exact solution will be computed
264:    appctx - user-defined application context

266:    Output Parameter:
267:    solution - vector with the newly computed exact solution
268: */
269: PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
270: {
271:   PetscScalar    *s_localptr,h = appctx->h,x;
272:   PetscInt       i,mybase,myend;

274:   /*
275:      Determine starting and ending points of each processor's
276:      range of grid values
277:   */
278:   VecGetOwnershipRange(solution,&mybase,&myend);

280:   /*
281:      Get a pointer to vector data.
282:   */
283:   VecGetArray(solution,&s_localptr);

285:   /*
286:      Simply write the solution directly into the array locations.
287:      Alternatively, we could use VecSetValues() or VecSetValuesLocal().
288:   */
289:   for (i=mybase; i<myend; i++) {
290:     x = h*(PetscReal)i;
291:     s_localptr[i-mybase] = (t + 1.0)*(1.0 + x*x);
292:   }

294:   /*
295:      Restore vector
296:   */
297:   VecRestoreArray(solution,&s_localptr);
298:   return 0;
299: }
300: /* --------------------------------------------------------------------- */
301: /*
302:    RHSFunction - User-provided routine that evalues the right-hand-side
303:    function of the ODE.  This routine is set in the main program by
304:    calling TSSetRHSFunction().  We compute:
305:           global_out = F(global_in)

307:    Input Parameters:
308:    ts         - timesteping context
309:    t          - current time
310:    global_in  - vector containing the current iterate
311:    ctx        - (optional) user-provided context for function evaluation.
312:                 In this case we use the appctx defined above.

314:    Output Parameter:
315:    global_out - vector containing the newly evaluated function
316: */
317: PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec global_in,Vec global_out,void *ctx)
318: {
319:   AppCtx            *appctx   = (AppCtx*) ctx;     /* user-defined application context */
320:   DM                da        = appctx->da;        /* distributed array */
321:   Vec               local_in  = appctx->u_local;   /* local ghosted input vector */
322:   Vec               localwork = appctx->localwork; /* local ghosted work vector */
323:   PetscInt          i,localsize;
324:   PetscMPIInt       rank,size;
325:   PetscScalar       *copyptr,sc;
326:   const PetscScalar *localptr;

328:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
329:      Get ready for local function computations
330:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
331:   /*
332:      Scatter ghost points to local vector, using the 2-step process
333:         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
334:      By placing code between these two statements, computations can be
335:      done while messages are in transition.
336:   */
337:   DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in);
338:   DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in);

340:   /*
341:       Access directly the values in our local INPUT work array
342:   */
343:   VecGetArrayRead(local_in,&localptr);

345:   /*
346:       Access directly the values in our local OUTPUT work array
347:   */
348:   VecGetArray(localwork,&copyptr);

350:   sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));

352:   /*
353:       Evaluate our function on the nodes owned by this processor
354:   */
355:   VecGetLocalSize(local_in,&localsize);

357:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
358:      Compute entries for the locally owned part
359:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

361:   /*
362:      Handle boundary conditions: This is done by using the boundary condition
363:         u(t,boundary) = g(t,boundary)
364:      for some function g. Now take the derivative with respect to t to obtain
365:         u_{t}(t,boundary) = g_{t}(t,boundary)

367:      In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
368:              and  u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
369:   */
370:   MPI_Comm_rank(appctx->comm,&rank);
371:   MPI_Comm_size(appctx->comm,&size);
372:   if (rank == 0)          copyptr[0]           = 1.0;
373:   if (rank == size-1) copyptr[localsize-1] = 2.0;

375:   /*
376:      Handle the interior nodes where the PDE is replace by finite
377:      difference operators.
378:   */
379:   for (i=1; i<localsize-1; i++) copyptr[i] =  localptr[i] * sc * (localptr[i+1] + localptr[i-1] - 2.0*localptr[i]);

381:   /*
382:      Restore vectors
383:   */
384:   VecRestoreArrayRead(local_in,&localptr);
385:   VecRestoreArray(localwork,&copyptr);

387:   /*
388:      Insert values from the local OUTPUT vector into the global
389:      output vector
390:   */
391:   DMLocalToGlobalBegin(da,localwork,INSERT_VALUES,global_out);
392:   DMLocalToGlobalEnd(da,localwork,INSERT_VALUES,global_out);

394:   return 0;
395: }
396: /* --------------------------------------------------------------------- */
397: /*
398:    RHSJacobian - User-provided routine to compute the Jacobian of
399:    the nonlinear right-hand-side function of the ODE.

401:    Input Parameters:
402:    ts - the TS context
403:    t - current time
404:    global_in - global input vector
405:    dummy - optional user-defined context, as set by TSetRHSJacobian()

407:    Output Parameters:
408:    AA - Jacobian matrix
409:    BB - optionally different preconditioning matrix
410:    str - flag indicating matrix structure

412:   Notes:
413:   RHSJacobian computes entries for the locally owned part of the Jacobian.
414:    - Currently, all PETSc parallel matrix formats are partitioned by
415:      contiguous chunks of rows across the processors.
416:    - Each processor needs to insert only elements that it owns
417:      locally (but any non-local elements will be sent to the
418:      appropriate processor during matrix assembly).
419:    - Always specify global row and columns of matrix entries when
420:      using MatSetValues().
421:    - Here, we set all entries for a particular row at once.
422:    - Note that MatSetValues() uses 0-based row and column numbers
423:      in Fortran as well as in C.
424: */
425: PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec global_in,Mat AA,Mat BB,void *ctx)
426: {
427:   AppCtx            *appctx  = (AppCtx*)ctx;    /* user-defined application context */
428:   Vec               local_in = appctx->u_local;   /* local ghosted input vector */
429:   DM                da       = appctx->da;        /* distributed array */
430:   PetscScalar       v[3],sc;
431:   const PetscScalar *localptr;
432:   PetscInt          i,mstart,mend,mstarts,mends,idx[3],is;

434:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
435:      Get ready for local Jacobian computations
436:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
437:   /*
438:      Scatter ghost points to local vector, using the 2-step process
439:         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
440:      By placing code between these two statements, computations can be
441:      done while messages are in transition.
442:   */
443:   DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in);
444:   DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in);

446:   /*
447:      Get pointer to vector data
448:   */
449:   VecGetArrayRead(local_in,&localptr);

451:   /*
452:      Get starting and ending locally owned rows of the matrix
453:   */
454:   MatGetOwnershipRange(BB,&mstarts,&mends);
455:   mstart = mstarts; mend = mends;

457:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
458:      Compute entries for the locally owned part of the Jacobian.
459:       - Currently, all PETSc parallel matrix formats are partitioned by
460:         contiguous chunks of rows across the processors.
461:       - Each processor needs to insert only elements that it owns
462:         locally (but any non-local elements will be sent to the
463:         appropriate processor during matrix assembly).
464:       - Here, we set all entries for a particular row at once.
465:       - We can set matrix entries either using either
466:         MatSetValuesLocal() or MatSetValues().
467:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

469:   /*
470:      Set matrix rows corresponding to boundary data
471:   */
472:   if (mstart == 0) {
473:     v[0] = 0.0;
474:     MatSetValues(BB,1,&mstart,1,&mstart,v,INSERT_VALUES);
475:     mstart++;
476:   }
477:   if (mend == appctx->m) {
478:     mend--;
479:     v[0] = 0.0;
480:     MatSetValues(BB,1,&mend,1,&mend,v,INSERT_VALUES);
481:   }

483:   /*
484:      Set matrix rows corresponding to interior data.  We construct the
485:      matrix one row at a time.
486:   */
487:   sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
488:   for (i=mstart; i<mend; i++) {
489:     idx[0] = i-1; idx[1] = i; idx[2] = i+1;
490:     is     = i - mstart + 1;
491:     v[0]   = sc*localptr[is];
492:     v[1]   = sc*(localptr[is+1] + localptr[is-1] - 4.0*localptr[is]);
493:     v[2]   = sc*localptr[is];
494:     MatSetValues(BB,1,&i,3,idx,v,INSERT_VALUES);
495:   }

497:   /*
498:      Restore vector
499:   */
500:   VecRestoreArrayRead(local_in,&localptr);

502:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
503:      Complete the matrix assembly process and set some options
504:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
505:   /*
506:      Assemble matrix, using the 2-step process:
507:        MatAssemblyBegin(), MatAssemblyEnd()
508:      Computations can be done while messages are in transition
509:      by placing code between these two statements.
510:   */
511:   MatAssemblyBegin(BB,MAT_FINAL_ASSEMBLY);
512:   MatAssemblyEnd(BB,MAT_FINAL_ASSEMBLY);
513:   if (BB != AA) {
514:     MatAssemblyBegin(AA,MAT_FINAL_ASSEMBLY);
515:     MatAssemblyEnd(AA,MAT_FINAL_ASSEMBLY);
516:   }

518:   /*
519:      Set and option to indicate that we will never add a new nonzero location
520:      to the matrix. If we do, it will generate an error.
521:   */
522:   MatSetOption(BB,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);

524:   return 0;
525: }

527: /*TEST

529:     test:
530:       requires: !single

532: TEST*/