Actual source code: ex21.c
2: static char help[] ="Solves a time-dependent nonlinear PDE with lower and upper bounds on the interior grid points. Uses implicit\n\
3: timestepping. Runtime options include:\n\
4: -M <xg>, where <xg> = number of grid points\n\
5: -debug : Activate debugging printouts\n\
6: -nox : Deactivate x-window graphics\n\
7: -ul : lower bound\n\
8: -uh : upper bound\n\n";
10: /* ------------------------------------------------------------------------
12: This is a variation of ex2.c to solve the PDE
14: u * u_xx
15: u_t = ---------
16: 2*(t+1)^2
18: with box constraints on the interior grid points
19: ul <= u(t,x) <= uh with x != 0,1
20: on the domain 0 <= x <= 1, with boundary conditions
21: u(t,0) = t + 1, u(t,1) = 2*t + 2,
22: and initial condition
23: u(0,x) = 1 + x*x.
25: The exact solution is:
26: u(t,x) = (1 + x*x) * (1 + t)
28: We use by default the backward Euler method.
30: ------------------------------------------------------------------------- */
32: /*
33: Include "petscts.h" to use the PETSc timestepping routines. Note that
34: this file automatically includes "petscsys.h" and other lower-level
35: PETSc include files.
37: Include the "petscdmda.h" to allow us to use the distributed array data
38: structures to manage the parallel grid.
39: */
40: #include <petscts.h>
41: #include <petscdm.h>
42: #include <petscdmda.h>
43: #include <petscdraw.h>
45: /*
46: User-defined application context - contains data needed by the
47: application-provided callback routines.
48: */
49: typedef struct {
50: MPI_Comm comm; /* communicator */
51: DM da; /* distributed array data structure */
52: Vec localwork; /* local ghosted work vector */
53: Vec u_local; /* local ghosted approximate solution vector */
54: Vec solution; /* global exact solution vector */
55: PetscInt m; /* total number of grid points */
56: PetscReal h; /* mesh width: h = 1/(m-1) */
57: PetscBool debug; /* flag (1 indicates activation of debugging printouts) */
58: } AppCtx;
60: /*
61: User-defined routines, provided below.
62: */
63: extern PetscErrorCode InitialConditions(Vec,AppCtx*);
64: extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*);
65: extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*);
66: extern PetscErrorCode Monitor(TS,PetscInt,PetscReal,Vec,void*);
67: extern PetscErrorCode ExactSolution(PetscReal,Vec,AppCtx*);
68: extern PetscErrorCode SetBounds(Vec,Vec,PetscScalar,PetscScalar,AppCtx*);
70: int main(int argc,char **argv)
71: {
72: AppCtx appctx; /* user-defined application context */
73: TS ts; /* timestepping context */
74: Mat A; /* Jacobian matrix data structure */
75: Vec u; /* approximate solution vector */
76: Vec r; /* residual vector */
77: PetscInt time_steps_max = 1000; /* default max timesteps */
78: PetscReal dt;
79: PetscReal time_total_max = 100.0; /* default max total time */
80: Vec xl,xu; /* Lower and upper bounds on variables */
81: PetscScalar ul=0.0,uh = 3.0;
82: PetscBool mymonitor;
83: PetscReal bounds[] = {1.0, 3.3};
85: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
86: Initialize program and set problem parameters
87: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
89: PetscInitialize(&argc,&argv,(char*)0,help);
90: PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD),1,bounds);
92: appctx.comm = PETSC_COMM_WORLD;
93: appctx.m = 60;
94: PetscOptionsGetInt(NULL,NULL,"-M",&appctx.m,NULL);
95: PetscOptionsGetScalar(NULL,NULL,"-ul",&ul,NULL);
96: PetscOptionsGetScalar(NULL,NULL,"-uh",&uh,NULL);
97: PetscOptionsHasName(NULL,NULL,"-debug",&appctx.debug);
98: PetscOptionsHasName(NULL,NULL,"-mymonitor",&mymonitor);
99: appctx.h = 1.0/(appctx.m-1.0);
101: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
102: Create vector data structures
103: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
105: /*
106: Create distributed array (DMDA) to manage parallel grid and vectors
107: and to set up the ghost point communication pattern. There are M
108: total grid values spread equally among all the processors.
109: */
110: DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,appctx.m,1,1,NULL,&appctx.da);
111: DMSetFromOptions(appctx.da);
112: DMSetUp(appctx.da);
114: /*
115: Extract global and local vectors from DMDA; we use these to store the
116: approximate solution. Then duplicate these for remaining vectors that
117: have the same types.
118: */
119: DMCreateGlobalVector(appctx.da,&u);
120: DMCreateLocalVector(appctx.da,&appctx.u_local);
122: /*
123: Create local work vector for use in evaluating right-hand-side function;
124: create global work vector for storing exact solution.
125: */
126: VecDuplicate(appctx.u_local,&appctx.localwork);
127: VecDuplicate(u,&appctx.solution);
129: /* Create residual vector */
130: VecDuplicate(u,&r);
131: /* Create lower and upper bound vectors */
132: VecDuplicate(u,&xl);
133: VecDuplicate(u,&xu);
134: SetBounds(xl,xu,ul,uh,&appctx);
136: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
137: Create timestepping solver context; set callback routine for
138: right-hand-side function evaluation.
139: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
141: TSCreate(PETSC_COMM_WORLD,&ts);
142: TSSetProblemType(ts,TS_NONLINEAR);
143: TSSetRHSFunction(ts,r,RHSFunction,&appctx);
145: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
146: Set optional user-defined monitoring routine
147: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
149: if (mymonitor) {
150: TSMonitorSet(ts,Monitor,&appctx,NULL);
151: }
153: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
154: For nonlinear problems, the user can provide a Jacobian evaluation
155: routine (or use a finite differencing approximation).
157: Create matrix data structure; set Jacobian evaluation routine.
158: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
160: MatCreate(PETSC_COMM_WORLD,&A);
161: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,appctx.m,appctx.m);
162: MatSetFromOptions(A);
163: MatSetUp(A);
164: TSSetRHSJacobian(ts,A,A,RHSJacobian,&appctx);
166: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
167: Set solution vector and initial timestep
168: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
170: dt = appctx.h/2.0;
171: TSSetTimeStep(ts,dt);
173: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
174: Customize timestepping solver:
175: - Set the solution method to be the Backward Euler method.
176: - Set timestepping duration info
177: Then set runtime options, which can override these defaults.
178: For example,
179: -ts_max_steps <maxsteps> -ts_max_time <maxtime>
180: to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
181: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
183: TSSetType(ts,TSBEULER);
184: TSSetMaxSteps(ts,time_steps_max);
185: TSSetMaxTime(ts,time_total_max);
186: TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);
187: /* Set lower and upper bound on the solution vector for each time step */
188: TSVISetVariableBounds(ts,xl,xu);
189: TSSetFromOptions(ts);
191: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
192: Solve the problem
193: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
195: /*
196: Evaluate initial conditions
197: */
198: InitialConditions(u,&appctx);
200: /*
201: Run the timestepping solver
202: */
203: TSSolve(ts,u);
205: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
206: Free work space. All PETSc objects should be destroyed when they
207: are no longer needed.
208: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
210: VecDestroy(&r);
211: VecDestroy(&xl);
212: VecDestroy(&xu);
213: TSDestroy(&ts);
214: VecDestroy(&u);
215: MatDestroy(&A);
216: DMDestroy(&appctx.da);
217: VecDestroy(&appctx.localwork);
218: VecDestroy(&appctx.solution);
219: VecDestroy(&appctx.u_local);
221: /*
222: Always call PetscFinalize() before exiting a program. This routine
223: - finalizes the PETSc libraries as well as MPI
224: - provides summary and diagnostic information if certain runtime
225: options are chosen (e.g., -log_view).
226: */
227: PetscFinalize();
228: return 0;
229: }
230: /* --------------------------------------------------------------------- */
231: /*
232: InitialConditions - Computes the solution at the initial time.
234: Input Parameters:
235: u - uninitialized solution vector (global)
236: appctx - user-defined application context
238: Output Parameter:
239: u - vector with solution at initial time (global)
240: */
241: PetscErrorCode InitialConditions(Vec u,AppCtx *appctx)
242: {
243: PetscScalar *u_localptr,h = appctx->h,x;
244: PetscInt i,mybase,myend;
246: /*
247: Determine starting point of each processor's range of
248: grid values.
249: */
250: VecGetOwnershipRange(u,&mybase,&myend);
252: /*
253: Get a pointer to vector data.
254: - For default PETSc vectors, VecGetArray() returns a pointer to
255: the data array. Otherwise, the routine is implementation dependent.
256: - You MUST call VecRestoreArray() when you no longer need access to
257: the array.
258: - Note that the Fortran interface to VecGetArray() differs from the
259: C version. See the users manual for details.
260: */
261: VecGetArray(u,&u_localptr);
263: /*
264: We initialize the solution array by simply writing the solution
265: directly into the array locations. Alternatively, we could use
266: VecSetValues() or VecSetValuesLocal().
267: */
268: for (i=mybase; i<myend; i++) {
269: x = h*(PetscReal)i; /* current location in global grid */
270: u_localptr[i-mybase] = 1.0 + x*x;
271: }
273: /*
274: Restore vector
275: */
276: VecRestoreArray(u,&u_localptr);
278: /*
279: Print debugging information if desired
280: */
281: if (appctx->debug) {
282: PetscPrintf(appctx->comm,"initial guess vector\n");
283: VecView(u,PETSC_VIEWER_STDOUT_WORLD);
284: }
286: return 0;
287: }
289: /* --------------------------------------------------------------------- */
290: /*
291: SetBounds - Sets the lower and uper bounds on the interior points
293: Input parameters:
294: xl - vector of lower bounds
295: xu - vector of upper bounds
296: ul - constant lower bound for all variables
297: uh - constant upper bound for all variables
298: appctx - Application context
299: */
300: PetscErrorCode SetBounds(Vec xl, Vec xu, PetscScalar ul, PetscScalar uh,AppCtx *appctx)
301: {
302: PetscScalar *l,*u;
303: PetscMPIInt rank,size;
304: PetscInt localsize;
307: VecSet(xl,ul);
308: VecSet(xu,uh);
309: VecGetLocalSize(xl,&localsize);
310: VecGetArray(xl,&l);
311: VecGetArray(xu,&u);
313: MPI_Comm_rank(appctx->comm,&rank);
314: MPI_Comm_size(appctx->comm,&size);
315: if (rank == 0) {
316: l[0] = -PETSC_INFINITY;
317: u[0] = PETSC_INFINITY;
318: }
319: if (rank == size-1) {
320: l[localsize-1] = -PETSC_INFINITY;
321: u[localsize-1] = PETSC_INFINITY;
322: }
323: VecRestoreArray(xl,&l);
324: VecRestoreArray(xu,&u);
325: return 0;
326: }
328: /* --------------------------------------------------------------------- */
329: /*
330: ExactSolution - Computes the exact solution at a given time.
332: Input Parameters:
333: t - current time
334: solution - vector in which exact solution will be computed
335: appctx - user-defined application context
337: Output Parameter:
338: solution - vector with the newly computed exact solution
339: */
340: PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
341: {
342: PetscScalar *s_localptr,h = appctx->h,x;
343: PetscInt i,mybase,myend;
345: /*
346: Determine starting and ending points of each processor's
347: range of grid values
348: */
349: VecGetOwnershipRange(solution,&mybase,&myend);
351: /*
352: Get a pointer to vector data.
353: */
354: VecGetArray(solution,&s_localptr);
356: /*
357: Simply write the solution directly into the array locations.
358: Alternatively, we could use VecSetValues() or VecSetValuesLocal().
359: */
360: for (i=mybase; i<myend; i++) {
361: x = h*(PetscReal)i;
362: s_localptr[i-mybase] = (t + 1.0)*(1.0 + x*x);
363: }
365: /*
366: Restore vector
367: */
368: VecRestoreArray(solution,&s_localptr);
369: return 0;
370: }
371: /* --------------------------------------------------------------------- */
372: /*
373: Monitor - User-provided routine to monitor the solution computed at
374: each timestep. This example plots the solution and computes the
375: error in two different norms.
377: Input Parameters:
378: ts - the timestep context
379: step - the count of the current step (with 0 meaning the
380: initial condition)
381: time - the current time
382: u - the solution at this timestep
383: ctx - the user-provided context for this monitoring routine.
384: In this case we use the application context which contains
385: information about the problem size, workspace and the exact
386: solution.
387: */
388: PetscErrorCode Monitor(TS ts,PetscInt step,PetscReal time,Vec u,void *ctx)
389: {
390: AppCtx *appctx = (AppCtx*) ctx; /* user-defined application context */
391: PetscReal en2,en2s,enmax;
392: PetscDraw draw;
394: /*
395: We use the default X windows viewer
396: PETSC_VIEWER_DRAW_(appctx->comm)
397: that is associated with the current communicator. This saves
398: the effort of calling PetscViewerDrawOpen() to create the window.
399: Note that if we wished to plot several items in separate windows we
400: would create each viewer with PetscViewerDrawOpen() and store them in
401: the application context, appctx.
403: PetscReal buffering makes graphics look better.
404: */
405: PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm),0,&draw);
406: PetscDrawSetDoubleBuffer(draw);
407: VecView(u,PETSC_VIEWER_DRAW_(appctx->comm));
409: /*
410: Compute the exact solution at this timestep
411: */
412: ExactSolution(time,appctx->solution,appctx);
414: /*
415: Print debugging information if desired
416: */
417: if (appctx->debug) {
418: PetscPrintf(appctx->comm,"Computed solution vector\n");
419: VecView(u,PETSC_VIEWER_STDOUT_WORLD);
420: PetscPrintf(appctx->comm,"Exact solution vector\n");
421: VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
422: }
424: /*
425: Compute the 2-norm and max-norm of the error
426: */
427: VecAXPY(appctx->solution,-1.0,u);
428: VecNorm(appctx->solution,NORM_2,&en2);
429: en2s = PetscSqrtReal(appctx->h)*en2; /* scale the 2-norm by the grid spacing */
430: VecNorm(appctx->solution,NORM_MAX,&enmax);
432: /*
433: PetscPrintf() causes only the first processor in this
434: communicator to print the timestep information.
435: */
436: PetscPrintf(appctx->comm,"Timestep %D: time = %g,2-norm error = %g, max norm error = %g\n",step,(double)time,(double)en2s,(double)enmax);
438: /*
439: Print debugging information if desired
440: */
441: /* if (appctx->debug) {
442: PetscPrintf(appctx->comm,"Error vector\n");
443: VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
444: } */
445: return 0;
446: }
447: /* --------------------------------------------------------------------- */
448: /*
449: RHSFunction - User-provided routine that evalues the right-hand-side
450: function of the ODE. This routine is set in the main program by
451: calling TSSetRHSFunction(). We compute:
452: global_out = F(global_in)
454: Input Parameters:
455: ts - timesteping context
456: t - current time
457: global_in - vector containing the current iterate
458: ctx - (optional) user-provided context for function evaluation.
459: In this case we use the appctx defined above.
461: Output Parameter:
462: global_out - vector containing the newly evaluated function
463: */
464: PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec global_in,Vec global_out,void *ctx)
465: {
466: AppCtx *appctx = (AppCtx*) ctx; /* user-defined application context */
467: DM da = appctx->da; /* distributed array */
468: Vec local_in = appctx->u_local; /* local ghosted input vector */
469: Vec localwork = appctx->localwork; /* local ghosted work vector */
470: PetscInt i,localsize;
471: PetscMPIInt rank,size;
472: PetscScalar *copyptr,sc;
473: const PetscScalar *localptr;
475: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
476: Get ready for local function computations
477: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
478: /*
479: Scatter ghost points to local vector, using the 2-step process
480: DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
481: By placing code between these two statements, computations can be
482: done while messages are in transition.
483: */
484: DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in);
485: DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in);
487: /*
488: Access directly the values in our local INPUT work array
489: */
490: VecGetArrayRead(local_in,&localptr);
492: /*
493: Access directly the values in our local OUTPUT work array
494: */
495: VecGetArray(localwork,©ptr);
497: sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
499: /*
500: Evaluate our function on the nodes owned by this processor
501: */
502: VecGetLocalSize(local_in,&localsize);
504: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
505: Compute entries for the locally owned part
506: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
508: /*
509: Handle boundary conditions: This is done by using the boundary condition
510: u(t,boundary) = g(t,boundary)
511: for some function g. Now take the derivative with respect to t to obtain
512: u_{t}(t,boundary) = g_{t}(t,boundary)
514: In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
515: and u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
516: */
517: MPI_Comm_rank(appctx->comm,&rank);
518: MPI_Comm_size(appctx->comm,&size);
519: if (rank == 0) copyptr[0] = 1.0;
520: if (rank == size-1) copyptr[localsize-1] = (t < .5) ? 2.0 : 0.0;
522: /*
523: Handle the interior nodes where the PDE is replace by finite
524: difference operators.
525: */
526: for (i=1; i<localsize-1; i++) copyptr[i] = localptr[i] * sc * (localptr[i+1] + localptr[i-1] - 2.0*localptr[i]);
528: /*
529: Restore vectors
530: */
531: VecRestoreArrayRead(local_in,&localptr);
532: VecRestoreArray(localwork,©ptr);
534: /*
535: Insert values from the local OUTPUT vector into the global
536: output vector
537: */
538: DMLocalToGlobalBegin(da,localwork,INSERT_VALUES,global_out);
539: DMLocalToGlobalEnd(da,localwork,INSERT_VALUES,global_out);
541: /* Print debugging information if desired */
542: /* if (appctx->debug) {
543: PetscPrintf(appctx->comm,"RHS function vector\n");
544: VecView(global_out,PETSC_VIEWER_STDOUT_WORLD);
545: } */
547: return 0;
548: }
549: /* --------------------------------------------------------------------- */
550: /*
551: RHSJacobian - User-provided routine to compute the Jacobian of
552: the nonlinear right-hand-side function of the ODE.
554: Input Parameters:
555: ts - the TS context
556: t - current time
557: global_in - global input vector
558: dummy - optional user-defined context, as set by TSetRHSJacobian()
560: Output Parameters:
561: AA - Jacobian matrix
562: BB - optionally different preconditioning matrix
563: str - flag indicating matrix structure
565: Notes:
566: RHSJacobian computes entries for the locally owned part of the Jacobian.
567: - Currently, all PETSc parallel matrix formats are partitioned by
568: contiguous chunks of rows across the processors.
569: - Each processor needs to insert only elements that it owns
570: locally (but any non-local elements will be sent to the
571: appropriate processor during matrix assembly).
572: - Always specify global row and columns of matrix entries when
573: using MatSetValues().
574: - Here, we set all entries for a particular row at once.
575: - Note that MatSetValues() uses 0-based row and column numbers
576: in Fortran as well as in C.
577: */
578: PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec global_in,Mat AA,Mat B,void *ctx)
579: {
580: AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */
581: Vec local_in = appctx->u_local; /* local ghosted input vector */
582: DM da = appctx->da; /* distributed array */
583: PetscScalar v[3],sc;
584: const PetscScalar *localptr;
585: PetscInt i,mstart,mend,mstarts,mends,idx[3],is;
587: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
588: Get ready for local Jacobian computations
589: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
590: /*
591: Scatter ghost points to local vector, using the 2-step process
592: DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
593: By placing code between these two statements, computations can be
594: done while messages are in transition.
595: */
596: DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in);
597: DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in);
599: /*
600: Get pointer to vector data
601: */
602: VecGetArrayRead(local_in,&localptr);
604: /*
605: Get starting and ending locally owned rows of the matrix
606: */
607: MatGetOwnershipRange(B,&mstarts,&mends);
608: mstart = mstarts; mend = mends;
610: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
611: Compute entries for the locally owned part of the Jacobian.
612: - Currently, all PETSc parallel matrix formats are partitioned by
613: contiguous chunks of rows across the processors.
614: - Each processor needs to insert only elements that it owns
615: locally (but any non-local elements will be sent to the
616: appropriate processor during matrix assembly).
617: - Here, we set all entries for a particular row at once.
618: - We can set matrix entries either using either
619: MatSetValuesLocal() or MatSetValues().
620: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
622: /*
623: Set matrix rows corresponding to boundary data
624: */
625: if (mstart == 0) {
626: v[0] = 0.0;
627: MatSetValues(B,1,&mstart,1,&mstart,v,INSERT_VALUES);
628: mstart++;
629: }
630: if (mend == appctx->m) {
631: mend--;
632: v[0] = 0.0;
633: MatSetValues(B,1,&mend,1,&mend,v,INSERT_VALUES);
634: }
636: /*
637: Set matrix rows corresponding to interior data. We construct the
638: matrix one row at a time.
639: */
640: sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
641: for (i=mstart; i<mend; i++) {
642: idx[0] = i-1; idx[1] = i; idx[2] = i+1;
643: is = i - mstart + 1;
644: v[0] = sc*localptr[is];
645: v[1] = sc*(localptr[is+1] + localptr[is-1] - 4.0*localptr[is]);
646: v[2] = sc*localptr[is];
647: MatSetValues(B,1,&i,3,idx,v,INSERT_VALUES);
648: }
650: /*
651: Restore vector
652: */
653: VecRestoreArrayRead(local_in,&localptr);
655: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
656: Complete the matrix assembly process and set some options
657: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
658: /*
659: Assemble matrix, using the 2-step process:
660: MatAssemblyBegin(), MatAssemblyEnd()
661: Computations can be done while messages are in transition
662: by placing code between these two statements.
663: */
664: MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
665: MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
667: /*
668: Set and option to indicate that we will never add a new nonzero location
669: to the matrix. If we do, it will generate an error.
670: */
671: MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);
673: return 0;
674: }
676: /*TEST
678: test:
679: args: -snes_type vinewtonrsls -ts_type glee -mymonitor -ts_max_steps 10 -nox
680: requires: !single
682: TEST*/