Actual source code: ex7.c
petsc-3.7.4 2016-10-02
2: static char help[] = ".\n";
4: /*
6: u_t = u_xx + R(u)
8: Where u(t,x,i) for i=0, .... N-1 where i+1 represents the void size
10: ex9.c is the 2d version of this code
11: */
13: #include <petscdm.h>
14: #include <petscdmda.h>
15: #include <petscts.h>
17: /*
18: User-defined data structures and routines
19: */
21: /* AppCtx */
22: typedef struct {
23: PetscInt N; /* number of dofs */
24: } AppCtx;
26: extern PetscErrorCode IFunction(TS,PetscReal,Vec,Vec,Vec,void*);
27: extern PetscErrorCode InitialConditions(DM,Vec);
28: extern PetscErrorCode IJacobian(TS,PetscReal,Vec,Vec,PetscReal,Mat,Mat,void*);
33: int main(int argc,char **argv)
34: {
35: TS ts; /* nonlinear solver */
36: Vec U; /* solution, residual vectors */
37: Mat J; /* Jacobian matrix */
38: PetscInt maxsteps = 1000;
40: DM da;
41: AppCtx user;
42: PetscInt i;
43: char Name[16];
45: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
46: Initialize program
47: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
48: PetscInitialize(&argc,&argv,(char*)0,help);
49: user.N = 1;
50: PetscOptionsGetInt(NULL,NULL,"-N",&user.N,NULL);
52: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
53: Create distributed array (DMDA) to manage parallel grid and vectors
54: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
55: DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_MIRROR,-8,user.N,1,NULL,&da);
57: for (i=0; i<user.N; i++) {
58: PetscSNPrintf(Name,16,"Void size %d",(int)(i+1));
59: DMDASetFieldName(da,i,Name);
60: }
62: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
63: Extract global vectors from DMDA; then duplicate for remaining
64: vectors that are the same types
65: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
66: DMCreateGlobalVector(da,&U);
67: DMSetMatType(da,MATAIJ);
68: DMCreateMatrix(da,&J);
70: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
71: Create timestepping solver context
72: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
73: TSCreate(PETSC_COMM_WORLD,&ts);
74: TSSetType(ts,TSARKIMEX);
75: TSSetDM(ts,da);
76: TSSetProblemType(ts,TS_NONLINEAR);
77: TSSetIFunction(ts,NULL,IFunction,&user);
78: TSSetIJacobian(ts,J,J,IJacobian,&user);
81: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
82: Set initial conditions
83: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
84: InitialConditions(da,U);
85: TSSetSolution(ts,U);
87: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
88: Set solver options
89: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
90: TSSetInitialTimeStep(ts,0.0,.001);
91: TSSetDuration(ts,maxsteps,1.0);
92: TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);
93: TSSetFromOptions(ts);
95: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
96: Solve nonlinear system
97: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
98: TSSolve(ts,U);
100: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
101: Free work space. All PETSc objects should be destroyed when they
102: are no longer needed.
103: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
104: VecDestroy(&U);
105: MatDestroy(&J);
106: TSDestroy(&ts);
107: DMDestroy(&da);
109: PetscFinalize();
110: return(0);
111: }
112: /* ------------------------------------------------------------------- */
115: /*
116: IFunction - Evaluates nonlinear function, F(U).
118: Input Parameters:
119: . ts - the TS context
120: . U - input vector
121: . ptr - optional user-defined context, as set by SNESSetFunction()
123: Output Parameter:
124: . F - function vector
125: */
126: PetscErrorCode IFunction(TS ts,PetscReal ftime,Vec U,Vec Udot,Vec F,void *ptr)
127: {
128: DM da;
130: PetscInt i,c,Mx,xs,xm,N;
131: PetscReal hx,sx,x;
132: PetscScalar uxx;
133: PetscScalar **u,**f,**udot;
134: Vec localU;
137: TSGetDM(ts,&da);
138: DMGetLocalVector(da,&localU);
139: DMDAGetInfo(da,PETSC_IGNORE,&Mx,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,&N,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);
141: hx = 1.0/(PetscReal)(Mx-1); sx = 1.0/(hx*hx);
143: /*
144: Scatter ghost points to local vector,using the 2-step process
145: DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
146: By placing code between these two statements, computations can be
147: done while messages are in transition.
148: */
149: DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU);
150: DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU);
152: /*
153: Get pointers to vector data
154: */
155: DMDAVecGetArrayDOF(da,localU,&u);
156: DMDAVecGetArrayDOF(da,Udot,&udot);
157: DMDAVecGetArrayDOF(da,F,&f);
159: /*
160: Get local grid boundaries
161: */
162: DMDAGetCorners(da,&xs,NULL,NULL,&xm,NULL,NULL);
164: /*
165: Compute function over the locally owned part of the grid
166: */
167: for (i=xs; i<xs+xm; i++) {
168: x = i*hx;
170: /* diffusion term */
171: for (c=0; c<N; c++) {
172: uxx = (-2.0*u[i][c] + u[i-1][c] + u[i+1][c])*sx;
173: f[i][c] = udot[i][c] - uxx;
174: }
176: /* reaction terms */
178: for (c=0; c<N/3; c++) {
179: f[i][c] += 500*u[i][c]*u[i][c] + 500*u[i][c]*u[i][c+1];
180: f[i][c+1] += -500*u[i][c]*u[i][c] + 500*u[i][c]*u[i][c+1];
181: f[i][c+2] -= 500*u[i][c]*u[i][c+1];
182: }
185: /* forcing term */
187: f[i][0] -= 5*PetscExpScalar((1.0 - x)*(1.0 - x));
189: }
191: /*
192: Restore vectors
193: */
194: DMDAVecRestoreArrayDOF(da,localU,&u);
195: DMDAVecRestoreArrayDOF(da,Udot,&udot);
196: DMDAVecRestoreArrayDOF(da,F,&f);
197: DMRestoreLocalVector(da,&localU);
198: return(0);
199: }
203: PetscErrorCode IJacobian(TS ts,PetscReal t,Vec U,Vec Udot,PetscReal a,Mat J,Mat Jpre,void *ctx)
204: {
206: PetscInt i,c,Mx,xs,xm,nc;
207: DM da;
208: MatStencil col[3],row;
209: PetscScalar vals[3],hx,sx;
210: AppCtx *user = (AppCtx*)ctx;
211: PetscInt N = user->N;
212: PetscScalar **u;
215: TSGetDM(ts,&da);
216: DMDAGetInfo(da,PETSC_IGNORE,&Mx,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);
217: DMDAGetCorners(da,&xs,NULL,NULL,&xm,NULL,NULL);
219: hx = 1.0/(PetscReal)(Mx-1); sx = 1.0/(hx*hx);
221: DMDAVecGetArrayDOF(da,U,&u);
223: MatZeroEntries(Jpre);
224: for (i=xs; i<xs+xm; i++) {
225: for (c=0; c<N; c++) {
226: nc = 0;
227: row.c = c; row.i = i;
228: col[nc].c = c; col[nc].i = i-1; vals[nc++] = -sx;
229: col[nc].c = c; col[nc].i = i; vals[nc++] = 2.0*sx + a;
230: col[nc].c = c; col[nc].i = i+1; vals[nc++] = -sx;
231: MatSetValuesStencil(Jpre,1,&row,nc,col,vals,ADD_VALUES);
232: }
234: for (c=0; c<N/3; c++) {
235: nc = 0;
236: row.c = c; row.i = i;
237: col[nc].c = c; col[nc].i = i; vals[nc++] = 1000*u[i][c] + 500*u[i][c+1];
238: col[nc].c = c+1; col[nc].i = i; vals[nc++] = 500*u[i][c];
239: MatSetValuesStencil(Jpre,1,&row,nc,col,vals,ADD_VALUES);
241: nc = 0;
242: row.c = c+1; row.i = i;
243: col[nc].c = c; col[nc].i = i; vals[nc++] = -1000*u[i][c] + 500*u[i][c+1];
244: col[nc].c = c+1; col[nc].i = i; vals[nc++] = 500*u[i][c];
245: MatSetValuesStencil(Jpre,1,&row,nc,col,vals,ADD_VALUES);
247: nc = 0;
248: row.c = c+2; row.i = i;
249: col[nc].c = c; col[nc].i = i; vals[nc++] = -500*u[i][c+1];
250: col[nc].c = c+1; col[nc].i = i; vals[nc++] = -500*u[i][c];
251: MatSetValuesStencil(Jpre,1,&row,nc,col,vals,ADD_VALUES);
253: }
254: }
255: MatAssemblyBegin(Jpre,MAT_FINAL_ASSEMBLY);
256: MatAssemblyEnd(Jpre,MAT_FINAL_ASSEMBLY);
257: if (J != Jpre) {
258: MatAssemblyBegin(J,MAT_FINAL_ASSEMBLY);
259: MatAssemblyEnd(J,MAT_FINAL_ASSEMBLY);
260: }
261: DMDAVecRestoreArrayDOF(da,U,&u);
262: return(0);
263: }
265: /* ------------------------------------------------------------------- */
268: PetscErrorCode InitialConditions(DM da,Vec U)
269: {
271: PetscInt i,c,xs,xm,Mx,N;
272: PetscScalar **u;
273: PetscReal hx,x;
276: DMDAGetInfo(da,PETSC_IGNORE,&Mx,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,&N,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);
278: hx = 1.0/(PetscReal)(Mx-1);
280: /*
281: Get pointers to vector data
282: */
283: DMDAVecGetArrayDOF(da,U,&u);
285: /*
286: Get local grid boundaries
287: */
288: DMDAGetCorners(da,&xs,NULL,NULL,&xm,NULL,NULL);
290: /*
291: Compute function over the locally owned part of the grid
292: */
293: for (i=xs; i<xs+xm; i++) {
294: x = i*hx;
295: for (c=0; c<N; c++) u[i][c] = 0.0; /*PetscCosScalar(PETSC_PI*x);*/
296: }
298: /*
299: Restore vectors
300: */
301: DMDAVecRestoreArrayDOF(da,U,&u);
302: return(0);
303: }