Actual source code: ex12.c
2: static char help[] = "Tests the use of MatZeroRows() for parallel matrices.\n\
3: This example also tests the use of MatDuplicate() for both MPIAIJ and MPIBAIJ matrices";
5: #include <petscmat.h>
7: extern PetscErrorCode TestMatZeroRows_Basic(Mat,IS,PetscScalar);
8: extern PetscErrorCode TestMatZeroRows_with_no_allocation(Mat,IS,PetscScalar);
10: int main(int argc,char **args)
11: {
12: Mat A;
13: PetscInt i,j,m = 3,n,Ii,J,Imax;
14: PetscMPIInt rank,size;
15: PetscScalar v,diag=-4.0;
16: IS is;
18: PetscInitialize(&argc,&args,(char*)0,help);
19: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
20: MPI_Comm_size(PETSC_COMM_WORLD,&size);
21: n = 2*size;
23: /* create A Square matrix for the five point stencil,YET AGAIN*/
24: MatCreate(PETSC_COMM_WORLD,&A);
25: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);
26: MatSetFromOptions(A);
27: MatSetUp(A);
28: for (i=0; i<m; i++) {
29: for (j=2*rank; j<2*rank+2; j++) {
30: v = -1.0; Ii = j + n*i;
31: if (i>0) {J = Ii - n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
32: if (i<m-1) {J = Ii + n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
33: if (j>0) {J = Ii - 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
34: if (j<n-1) {J = Ii + 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
35: v = 4.0; MatSetValues(A,1,&Ii,1,&Ii,&v,INSERT_VALUES);
36: }
37: }
38: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
39: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
41: /* Create AN IS required by MatZeroRows() */
42: Imax = n*rank; if (Imax>= n*m -m - 1) Imax = m*n - m - 1;
43: ISCreateStride(PETSC_COMM_SELF,m,Imax,1,&is);
45: TestMatZeroRows_Basic(A,is,0.0);
46: TestMatZeroRows_Basic(A,is,diag);
48: TestMatZeroRows_with_no_allocation(A,is,0.0);
49: TestMatZeroRows_with_no_allocation(A,is,diag);
51: MatDestroy(&A);
53: /* Now Create a rectangular matrix with five point stencil (app)
54: n+size is used so that this dimension is always divisible by size.
55: This way, we can always use bs = size for any number of procs */
56: MatCreate(PETSC_COMM_WORLD,&A);
57: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*(n+size));
58: MatSetFromOptions(A);
59: MatSetUp(A);
60: for (i=0; i<m; i++) {
61: for (j=2*rank; j<2*rank+2; j++) {
62: v = -1.0; Ii = j + n*i;
63: if (i>0) {J = Ii - n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
64: if (i<m-1) {J = Ii + n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
65: if (j>0) {J = Ii - 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
66: if (j<n+size-1) {J = Ii + 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
67: v = 4.0; MatSetValues(A,1,&Ii,1,&Ii,&v,INSERT_VALUES);
68: }
69: }
70: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
71: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
73: TestMatZeroRows_Basic(A,is,0.0);
74: TestMatZeroRows_Basic(A,is,diag);
76: MatDestroy(&A);
77: ISDestroy(&is);
78: PetscFinalize();
79: return 0;
80: }
82: PetscErrorCode TestMatZeroRows_Basic(Mat A,IS is,PetscScalar diag)
83: {
84: Mat B;
85: PetscBool keepnonzeropattern;
87: /* Now copy A into B, and test it with MatZeroRows() */
88: MatDuplicate(A,MAT_COPY_VALUES,&B);
90: PetscOptionsHasName(NULL,NULL,"-keep_nonzero_pattern",&keepnonzeropattern);
91: if (keepnonzeropattern) {
92: MatSetOption(B,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE);
93: }
95: MatZeroRowsIS(B,is,diag,0,0);
96: MatView(B,PETSC_VIEWER_STDOUT_WORLD);
97: MatDestroy(&B);
98: return 0;
99: }
101: PetscErrorCode TestMatZeroRows_with_no_allocation(Mat A,IS is,PetscScalar diag)
102: {
103: Mat B;
105: /* Now copy A into B, and test it with MatZeroRows() */
106: MatDuplicate(A,MAT_COPY_VALUES,&B);
107: /* Set this flag after assembly. This way, it affects only MatZeroRows() */
108: MatSetOption(B,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_TRUE);
110: MatZeroRowsIS(B,is,diag,0,0);
111: MatView(B,PETSC_VIEWER_STDOUT_WORLD);
112: MatDestroy(&B);
113: return 0;
114: }
116: /*TEST
118: test:
119: nsize: 2
120: filter: grep -v "MPI processes"
122: test:
123: suffix: 2
124: nsize: 3
125: args: -mat_type mpibaij -mat_block_size 3
126: filter: grep -v "MPI processes"
128: test:
129: suffix: 3
130: nsize: 3
131: args: -mat_type mpiaij -keep_nonzero_pattern
132: filter: grep -v "MPI processes"
134: test:
135: suffix: 4
136: nsize: 3
137: args: -keep_nonzero_pattern -mat_type mpibaij -mat_block_size 3
138: filter: grep -v "MPI processes"
140: TEST*/