2: #include src/mat/impls/bdiag/seq/bdiag.h
4: /*
5: Mat_MPIBDiag - Parallel, block-diagonal format, where each diagonal
6: element consists of a square block of size bs x bs. Dense storage
7: within each block is in column-major order.
9: For now, the parallel part is just a copy of the Mat_MPIAIJ
10: parallel data structure.
11: */
13: typedef struct {
14: PetscInt brstart,brend; /* block starting and ending local rows */
15: Mat A; /* local matrix */
16: PetscInt gnd; /* number of global diagonals */
17: PetscInt *gdiag; /* global matrix diagonal numbers */
18: PetscMPIInt size; /* size of communicator */
19: PetscMPIInt rank; /* rank of proc in communicator */
21: /* The following variables are used for matrix assembly */
22: PetscTruth donotstash; /* 1 if off processor entries dropped */
23: MPI_Request *send_waits; /* array of send requests */
24: MPI_Request *recv_waits; /* array of receive requests */
25: PetscInt nsends,nrecvs; /* numbers of sends and receives */
26: PetscScalar *svalues,*rvalues; /* sending and receiving data */
27: PetscInt rmax; /* maximum message length */
28: PetscInt *garray; /* work array */
29: PetscTruth roworiented; /* indicates MatSetValues() input default 1*/
31: /* The following variables are used for matrix-vector products */
33: Vec lvec; /* local vector */
34: VecScatter Mvctx; /* scatter context for vector */
35: } Mat_MPIBDiag;
37: EXTERN PetscErrorCode MatLoad_MPIBDiag(PetscViewer, MatType,Mat*);
38: EXTERN PetscErrorCode MatSetUpMultiply_MPIBDiag(Mat);