Reference documentation for deal.II version GIT b8135fa6eb 2023-03-25 15:55:02+00:00
\(\newcommand{\dealvcentcolon}{\mathrel{\mathop{:}}}\) \(\newcommand{\dealcoloneq}{\dealvcentcolon\mathrel{\mkern-1.2mu}=}\) \(\newcommand{\jump}[1]{\left[\!\left[ #1 \right]\!\right]}\) \(\newcommand{\average}[1]{\left\{\!\left\{ #1 \right\}\!\right\}}\)
petsc_parallel_block_sparse_matrix.cc
Go to the documentation of this file.
1 // ---------------------------------------------------------------------
2 //
3 // Copyright (C) 2004 - 2022 by the deal.II authors
4 //
5 // This file is part of the deal.II library.
6 //
7 // The deal.II library is free software; you can use it, redistribute
8 // it, and/or modify it under the terms of the GNU Lesser General
9 // Public License as published by the Free Software Foundation; either
10 // version 2.1 of the License, or (at your option) any later version.
11 // The full text of the license can be found in the file LICENSE.md at
12 // the top level directory of deal.II.
13 //
14 // ---------------------------------------------------------------------
15 
18 
19 #ifdef DEAL_II_WITH_PETSC
20 
22 
23 namespace PETScWrappers
24 {
25  namespace MPI
26  {
29  {
31 
32  return *this;
33  }
34 
35 
36 
38  {
39  PetscErrorCode ierr = destroy_matrix(petsc_nest_matrix);
40  (void)ierr;
41  AssertNothrow(ierr == 0, ExcPETScError(ierr));
42  }
43 
44 
45 
46 # ifndef DOXYGEN
47  void
48  BlockSparseMatrix::reinit(const size_type n_block_rows,
49  const size_type n_block_columns)
50  {
51  // first delete previous content of
52  // the subobjects array
53  clear();
54 
55  // then resize. set sizes of blocks to
56  // zero. user will later have to call
57  // collect_sizes for this
58  this->sub_objects.reinit(n_block_rows, n_block_columns);
59  this->row_block_indices.reinit(n_block_rows, 0);
60  this->column_block_indices.reinit(n_block_columns, 0);
61 
62  // and reinitialize the blocks
63  for (size_type r = 0; r < this->n_block_rows(); ++r)
64  for (size_type c = 0; c < this->n_block_cols(); ++c)
65  {
66  BlockType *p = new BlockType();
67  this->sub_objects[r][c] = p;
68  }
69  }
70 # endif
71 
72 
73 
74  void
75  BlockSparseMatrix::reinit(const std::vector<IndexSet> & rows,
76  const std::vector<IndexSet> & cols,
77  const BlockDynamicSparsityPattern &bdsp,
78  const MPI_Comm & com)
79  {
80  Assert(rows.size() == bdsp.n_block_rows(), ExcMessage("invalid size"));
81  Assert(cols.size() == bdsp.n_block_cols(), ExcMessage("invalid size"));
82 
83 
84  clear();
85  this->sub_objects.reinit(bdsp.n_block_rows(), bdsp.n_block_cols());
86 
87  std::vector<types::global_dof_index> row_sizes;
88  for (unsigned int r = 0; r < bdsp.n_block_rows(); ++r)
89  row_sizes.push_back(bdsp.block(r, 0).n_rows());
90  this->row_block_indices.reinit(row_sizes);
91 
92  std::vector<types::global_dof_index> col_sizes;
93  for (unsigned int c = 0; c < bdsp.n_block_cols(); ++c)
94  col_sizes.push_back(bdsp.block(0, c).n_cols());
95  this->column_block_indices.reinit(col_sizes);
96 
97  for (unsigned int r = 0; r < this->n_block_rows(); ++r)
98  for (unsigned int c = 0; c < this->n_block_cols(); ++c)
99  {
100  Assert(rows[r].size() == bdsp.block(r, c).n_rows(),
101  ExcMessage("invalid size"));
102  Assert(cols[c].size() == bdsp.block(r, c).n_cols(),
103  ExcMessage("invalid size"));
104 
105  BlockType *p = new BlockType();
106  p->reinit(rows[r], cols[c], bdsp.block(r, c), com);
107  this->sub_objects[r][c] = p;
108  }
109 
110  collect_sizes();
111  }
112 
113  void
114  BlockSparseMatrix::reinit(const std::vector<IndexSet> & sizes,
115  const BlockDynamicSparsityPattern &bdsp,
116  const MPI_Comm & com)
117  {
118  reinit(sizes, sizes, bdsp, com);
119  }
120 
121 
122 
123  void
125  {
127 
128  auto m = this->n_block_cols();
129  auto n = this->n_block_cols();
130 
131  PetscErrorCode ierr = destroy_matrix(petsc_nest_matrix);
132  AssertThrow(ierr == 0, ExcPETScError(ierr));
133  std::vector<Mat> psub_objects(m * n);
134  for (unsigned int r = 0; r < m; r++)
135  for (unsigned int c = 0; c < n; c++)
136  psub_objects[r * n + c] = this->sub_objects[r][c]->petsc_matrix();
137  ierr = MatCreateNest(get_mpi_communicator(),
138  m,
139  nullptr,
140  n,
141  nullptr,
142  psub_objects.data(),
144  AssertThrow(ierr == 0, ExcPETScError(ierr));
145  }
146 
147 
148 
149  std::vector<IndexSet>
151  {
152  std::vector<IndexSet> index_sets;
153 
154  for (unsigned int i = 0; i < this->n_block_cols(); ++i)
155  index_sets.push_back(this->block(0, i).locally_owned_domain_indices());
156 
157  return index_sets;
158  }
159 
160 
161 
162  std::vector<IndexSet>
164  {
165  std::vector<IndexSet> index_sets;
166 
167  for (unsigned int i = 0; i < this->n_block_rows(); ++i)
168  index_sets.push_back(this->block(i, 0).locally_owned_range_indices());
169 
170  return index_sets;
171  }
172 
173 
174 
175  std::uint64_t
177  {
178  std::uint64_t n_nonzero = 0;
179  for (size_type rows = 0; rows < this->n_block_rows(); ++rows)
180  for (size_type cols = 0; cols < this->n_block_cols(); ++cols)
181  n_nonzero += this->block(rows, cols).n_nonzero_elements();
182 
183  return n_nonzero;
184  }
185 
186 
187 
188  const MPI_Comm &
190  {
191  static MPI_Comm comm = PETSC_COMM_SELF;
192  MPI_Comm pcomm =
193  PetscObjectComm(reinterpret_cast<PetscObject>(petsc_nest_matrix));
194  if (pcomm != MPI_COMM_NULL)
195  comm = pcomm;
196  return comm;
197  }
198 
199  BlockSparseMatrix::operator const Mat &() const
200  {
201  return petsc_nest_matrix;
202  }
203 
204 
205 
206  Mat &
208  {
209  return petsc_nest_matrix;
210  }
211 
212  void
214  {
215  clear();
216 
217  PetscBool isnest;
218  PetscInt nr = 1, nc = 1;
219 
220  PetscErrorCode ierr =
221  PetscObjectTypeCompare(reinterpret_cast<PetscObject>(A),
222  MATNEST,
223  &isnest);
224  AssertThrow(ierr == 0, ExcPETScError(ierr));
225  std::vector<Mat> mats;
226  if (isnest)
227  {
228  ierr = MatNestGetSize(A, &nr, &nc);
229  AssertThrow(ierr == 0, ExcPETScError(ierr));
230  for (PetscInt i = 0; i < nr; ++i)
231  {
232  for (PetscInt j = 0; j < nc; ++j)
233  {
234  Mat sA;
235  ierr = MatNestGetSubMat(A, i, j, &sA);
236  mats.push_back(sA);
237  }
238  }
239  }
240  else
241  {
242  mats.push_back(A);
243  }
244 
245  std::vector<size_type> r_block_sizes(nr, 0);
246  std::vector<size_type> c_block_sizes(nc, 0);
247  this->row_block_indices.reinit(r_block_sizes);
248  this->column_block_indices.reinit(c_block_sizes);
249  this->sub_objects.reinit(nr, nc);
250  for (PetscInt i = 0; i < nr; ++i)
251  {
252  for (PetscInt j = 0; j < nc; ++j)
253  {
254  // TODO: MATNEST supports NULL blocks
255  this->sub_objects[i][j] = new BlockType(mats[i * nc + j]);
256  }
257  }
258 
259  collect_sizes();
260  }
261 
262  } // namespace MPI
263 } // namespace PETScWrappers
264 
265 
266 
268 
269 #endif
void reinit(const unsigned int n_blocks, const size_type n_elements_per_block)
BlockType & block(const unsigned int row, const unsigned int column)
unsigned int n_block_rows() const
Table< 2, SmartPointer< BlockType, BlockMatrixBase< SparseMatrix > > > sub_objects
unsigned int n_block_cols() const
SparsityPatternType & block(const size_type row, const size_type column)
void reinit(const size_type n_block_rows, const size_type n_block_columns)
BlockSparseMatrix & operator=(const BlockSparseMatrix &)
std::vector< IndexSet > locally_owned_domain_indices() const
std::size_t n_nonzero_elements() const
virtual void reinit(const SparsityPattern &sparsity)
size_type n_rows() const
size_type n_cols() const
Subscriptor & operator=(const Subscriptor &)
Definition: subscriptor.h:291
#define DEAL_II_NAMESPACE_OPEN
Definition: config.h:474
#define DEAL_II_NAMESPACE_CLOSE
Definition: config.h:475
#define Assert(cond, exc)
Definition: exceptions.h:1586
#define AssertNothrow(cond, exc)
Definition: exceptions.h:1628
static ::ExceptionBase & ExcMessage(std::string arg1)
#define AssertThrow(cond, exc)
Definition: exceptions.h:1675
static const char A
types::global_dof_index size_type
PetscErrorCode destroy_matrix(Mat &matrix)
const MPI_Comm & comm