Actual source code: ex19.c
2: static char help[] = "Tests reusing MPI parallel matrices and MatGetValues().\n\
3: To test the parallel matrix assembly, this example intentionally lays out\n\
4: the matrix across processors differently from the way it is assembled.\n\
5: This example uses bilinear elements on the unit square. Input arguments are:\n\
6: -m <size> : problem size\n\n";
8: #include <petscmat.h>
10: int FormElementStiffness(PetscReal H,PetscScalar *Ke)
11: {
12: Ke[0] = H/6.0; Ke[1] = -.125*H; Ke[2] = H/12.0; Ke[3] = -.125*H;
13: Ke[4] = -.125*H; Ke[5] = H/6.0; Ke[6] = -.125*H; Ke[7] = H/12.0;
14: Ke[8] = H/12.0; Ke[9] = -.125*H; Ke[10] = H/6.0; Ke[11] = -.125*H;
15: Ke[12] = -.125*H; Ke[13] = H/12.0; Ke[14] = -.125*H; Ke[15] = H/6.0;
16: return 0;
17: }
19: int main(int argc,char **args)
20: {
21: Mat C;
22: Vec u,b;
23: PetscMPIInt size,rank;
24: PetscInt i,m = 5,N,start,end,M,idx[4];
25: PetscInt j,nrsub,ncsub,*rsub,*csub,mystart,myend;
26: PetscBool flg;
27: PetscScalar one = 1.0,Ke[16],*vals;
28: PetscReal h,norm;
30: PetscInitialize(&argc,&args,(char*)0,help);
31: PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL);
33: N = (m+1)*(m+1); /* dimension of matrix */
34: M = m*m; /* number of elements */
35: h = 1.0/m; /* mesh width */
36: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
37: MPI_Comm_size(PETSC_COMM_WORLD,&size);
39: /* Create stiffness matrix */
40: MatCreate(PETSC_COMM_WORLD,&C);
41: MatSetSizes(C,PETSC_DECIDE,PETSC_DECIDE,N,N);
42: MatSetFromOptions(C);
43: MatSetUp(C);
45: start = rank*(M/size) + ((M%size) < rank ? (M%size) : rank);
46: end = start + M/size + ((M%size) > rank);
48: /* Form the element stiffness for the Laplacian */
49: FormElementStiffness(h*h,Ke);
50: for (i=start; i<end; i++) {
51: /* location of lower left corner of element */
52: /* node numbers for the four corners of element */
53: idx[0] = (m+1)*(i/m) + (i % m);
54: idx[1] = idx[0]+1; idx[2] = idx[1] + m + 1; idx[3] = idx[2] - 1;
55: MatSetValues(C,4,idx,4,idx,Ke,ADD_VALUES);
56: }
57: MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);
58: MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);
60: /* Assemble the matrix again */
61: MatZeroEntries(C);
63: for (i=start; i<end; i++) {
64: /* location of lower left corner of element */
65: /* node numbers for the four corners of element */
66: idx[0] = (m+1)*(i/m) + (i % m);
67: idx[1] = idx[0]+1; idx[2] = idx[1] + m + 1; idx[3] = idx[2] - 1;
68: MatSetValues(C,4,idx,4,idx,Ke,ADD_VALUES);
69: }
70: MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);
71: MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);
73: /* Create test vectors */
74: VecCreate(PETSC_COMM_WORLD,&u);
75: VecSetSizes(u,PETSC_DECIDE,N);
76: VecSetFromOptions(u);
77: VecDuplicate(u,&b);
78: VecSet(u,one);
80: /* Check error */
81: MatMult(C,u,b);
82: VecNorm(b,NORM_2,&norm);
83: if (norm > PETSC_SQRT_MACHINE_EPSILON) {
84: PetscPrintf(PETSC_COMM_WORLD,"Norm of error b %g should be near 0\n",(double)norm);
85: }
87: /* Now test MatGetValues() */
88: PetscOptionsHasName(NULL,NULL,"-get_values",&flg);
89: if (flg) {
90: MatGetOwnershipRange(C,&mystart,&myend);
91: nrsub = myend - mystart; ncsub = 4;
92: PetscMalloc1(nrsub*ncsub,&vals);
93: PetscMalloc1(nrsub,&rsub);
94: PetscMalloc1(ncsub,&csub);
95: for (i=myend-1; i>=mystart; i--) rsub[myend-i-1] = i;
96: for (i=0; i<ncsub; i++) csub[i] = 2*(ncsub-i) + mystart;
97: MatGetValues(C,nrsub,rsub,ncsub,csub,vals);
98: MatView(C,PETSC_VIEWER_STDOUT_WORLD);
99: PetscSynchronizedPrintf(PETSC_COMM_WORLD,"processor number %d: start=%" PetscInt_FMT ", end=%" PetscInt_FMT ", mystart=%" PetscInt_FMT ", myend=%" PetscInt_FMT "\n",rank,start,end,mystart,myend);
100: for (i=0; i<nrsub; i++) {
101: for (j=0; j<ncsub; j++) {
102: if (PetscImaginaryPart(vals[i*ncsub+j]) != 0.0) {
103: PetscSynchronizedPrintf(PETSC_COMM_WORLD," C[%" PetscInt_FMT ", %" PetscInt_FMT "] = %g + %g i\n",rsub[i],csub[j],(double)PetscRealPart(vals[i*ncsub+j]),(double)PetscImaginaryPart(vals[i*ncsub+j]));
104: } else {
105: PetscSynchronizedPrintf(PETSC_COMM_WORLD," C[%" PetscInt_FMT ", %" PetscInt_FMT "] = %g\n",rsub[i],csub[j],(double)PetscRealPart(vals[i*ncsub+j]));
106: }
107: }
108: }
109: PetscSynchronizedFlush(PETSC_COMM_WORLD,PETSC_STDOUT);
110: PetscFree(rsub);
111: PetscFree(csub);
112: PetscFree(vals);
113: }
115: /* Free data structures */
116: VecDestroy(&u);
117: VecDestroy(&b);
118: MatDestroy(&C);
119: PetscFinalize();
120: return 0;
121: }
123: /*TEST
125: test:
126: nsize: 4
128: TEST*/