Reference documentation for deal.II version Git e413cbf8ac 2021-08-02 21:56:02 +0200
\(\newcommand{\dealvcentcolon}{\mathrel{\mathop{:}}}\) \(\newcommand{\dealcoloneq}{\dealvcentcolon\mathrel{\mkern-1.2mu}=}\) \(\newcommand{\jump}[1]{\left[\!\left[ #1 \right]\!\right]}\) \(\newcommand{\average}[1]{\left\{\!\left\{ #1 \right\}\!\right\}}\)
chunk_sparsity_pattern.h
Go to the documentation of this file.
1 // ---------------------------------------------------------------------
2 //
3 // Copyright (C) 2008 - 2020 by the deal.II authors
4 //
5 // This file is part of the deal.II library.
6 //
7 // The deal.II library is free software; you can use it, redistribute
8 // it, and/or modify it under the terms of the GNU Lesser General
9 // Public License as published by the Free Software Foundation; either
10 // version 2.1 of the License, or (at your option) any later version.
11 // The full text of the license can be found in the file LICENSE.md at
12 // the top level directory of deal.II.
13 //
14 // ---------------------------------------------------------------------
15 
16 #ifndef dealii_chunk_sparsity_pattern_h
17 #define dealii_chunk_sparsity_pattern_h
18 
19 
20 #include <deal.II/base/config.h>
21 
24 
26 
27 #include <iostream>
28 #include <vector>
29 
31 
32 
33 // Forward declaration
34 #ifndef DOXYGEN
35 template <typename>
36 class ChunkSparseMatrix;
37 #endif
38 
49 {
50  // forward declaration
51  class Iterator;
52 
62  class Accessor
63  {
64  public:
69 
74 
79 
84  size_type
85  row() const;
86 
90  std::size_t
91  reduced_index() const;
92 
97  size_type
98  column() const;
99 
109  bool
110  is_valid_entry() const;
111 
112 
116  bool
117  operator==(const Accessor &) const;
118 
119 
127  bool
128  operator<(const Accessor &) const;
129 
130  protected:
135 
140 
145 
150 
154  void
155  advance();
156 
157  // Grant access to iterator class.
158  friend class Iterator;
159  };
160 
161 
162 
166  class Iterator
167  {
168  public:
173 
178  Iterator(const ChunkSparsityPattern *sp, const size_type row);
179 
183  Iterator &
184  operator++();
185 
189  Iterator
190  operator++(int);
191 
195  const Accessor &
196  operator*() const;
197 
201  const Accessor *
202  operator->() const;
203 
207  bool
208  operator==(const Iterator &) const;
209 
213  bool
214  operator!=(const Iterator &) const;
215 
223  bool
224  operator<(const Iterator &) const;
225 
226  private:
231  };
232 } // namespace ChunkSparsityPatternIterators
233 
234 
235 
245 {
246 public:
256 
265 
280  static const size_type invalid_entry = SparsityPattern::invalid_entry;
281 
288 
305 
313  const size_type n,
314  const size_type max_chunks_per_row,
315  const size_type chunk_size);
316 
325  const size_type n,
326  const std::vector<size_type> &row_lengths,
327  const size_type chunk_size);
328 
338  const size_type max_per_row,
339  const size_type chunk_size);
340 
349  const std::vector<size_type> &row_lengths,
350  const size_type chunk_size);
351 
355  ~ChunkSparsityPattern() override = default;
356 
363  operator=(const ChunkSparsityPattern &);
364 
373  void
374  reinit(const size_type m,
375  const size_type n,
376  const size_type max_per_row,
377  const size_type chunk_size);
378 
393  void
394  reinit(const size_type m,
395  const size_type n,
396  const std::vector<size_type> &row_lengths,
397  const size_type chunk_size);
398 
402  void
403  reinit(const size_type m,
404  const size_type n,
405  const ArrayView<const size_type> &row_lengths,
406  const size_type chunk_size);
407 
420  void
421  compress();
422 
498  template <typename ForwardIterator>
499  void
500  copy_from(const size_type n_rows,
501  const size_type n_cols,
502  const ForwardIterator begin,
503  const ForwardIterator end,
504  const size_type chunk_size);
505 
511  template <typename SparsityPatternType>
512  void
513  copy_from(const SparsityPatternType &dsp, const size_type chunk_size);
514 
522  template <typename number>
523  void
524  copy_from(const FullMatrix<number> &matrix, const size_type chunk_size);
525 
543  template <typename Sparsity>
544  void
545  create_from(const size_type m,
546  const size_type n,
547  const Sparsity &sparsity_pattern_for_chunks,
548  const size_type chunk_size,
549  const bool optimize_diagonal = true);
550 
555  bool
556  empty() const;
557 
561  size_type
562  get_chunk_size() const;
563 
569  size_type
570  max_entries_per_row() const;
571 
578  void
579  add(const size_type i, const size_type j);
580 
588  void
589  symmetrize();
590 
595  inline size_type
596  n_rows() const;
597 
602  inline size_type
603  n_cols() const;
604 
608  bool
609  exists(const size_type i, const size_type j) const;
610 
614  size_type
615  row_length(const size_type row) const;
616 
623  size_type
624  bandwidth() const;
625 
634  size_type
635  n_nonzero_elements() const;
636 
640  bool
641  is_compressed() const;
642 
655  bool
656  stores_only_added_elements() const;
657 
663  iterator
664  begin() const;
665 
669  iterator
670  end() const;
671 
680  iterator
681  begin(const size_type r) const;
682 
691  iterator
692  end(const size_type r) const;
693 
704  void
705  block_write(std::ostream &out) const;
706 
720  void
721  block_read(std::istream &in);
722 
728  void
729  print(std::ostream &out) const;
730 
744  void
745  print_gnuplot(std::ostream &out) const;
746 
751  std::size_t
752  memory_consumption() const;
753 
762  size_type,
763  << "The provided number is invalid here: " << arg1);
767  DeclException2(ExcInvalidIndex,
768  size_type,
769  size_type,
770  << "The given index " << arg1 << " should be less than "
771  << arg2 << ".");
775  DeclException2(ExcNotEnoughSpace,
776  size_type,
777  size_type,
778  << "Upon entering a new entry to row " << arg1
779  << ": there was no free entry any more. " << std::endl
780  << "(Maximum number of entries for this row: " << arg2
781  << "; maybe the matrix is already compressed?)");
787  ExcNotCompressed,
788  "The operation you attempted is only allowed after the SparsityPattern "
789  "has been set up and compress() was called.");
795  ExcMatrixIsCompressed,
796  "The operation you attempted changes the structure of the SparsityPattern "
797  "and is not possible after compress() has been called.");
805  DeclException2(ExcIteratorRange,
806  size_type,
807  size_type,
808  << "The iterators denote a range of " << arg1
809  << " elements, but the given number of rows was " << arg2);
818  size_type,
819  << "The number of partitions you gave is " << arg1
820  << ", but must be greater than zero.");
825  size_type,
826  size_type,
827  << "The array has size " << arg1 << " but should have size "
828  << arg2);
830 private:
835 
840 
845 
851 
852  // Make all the chunk sparse matrix kinds friends.
853  template <typename>
854  friend class ChunkSparseMatrix;
855 
856  // Make the accessor class a friend.
858 };
859 
860 
862 /*---------------------- Inline functions -----------------------------------*/
863 
864 #ifndef DOXYGEN
865 
867 {
869  const size_type row)
870  : sparsity_pattern(sparsity_pattern)
871  , reduced_accessor(row == sparsity_pattern->n_rows() ?
872  *sparsity_pattern->sparsity_pattern.end() :
873  *sparsity_pattern->sparsity_pattern.begin(
874  row / sparsity_pattern->get_chunk_size()))
875  , chunk_row(row == sparsity_pattern->n_rows() ?
876  0 :
877  row % sparsity_pattern->get_chunk_size())
878  , chunk_col(0)
879  {}
880 
881 
882 
883  inline Accessor::Accessor(const ChunkSparsityPattern *sparsity_pattern)
884  : sparsity_pattern(sparsity_pattern)
885  , reduced_accessor(*sparsity_pattern->sparsity_pattern.end())
886  , chunk_row(0)
887  , chunk_col(0)
888  {}
889 
890 
891 
892  inline bool
893  Accessor::is_valid_entry() const
894  {
895  return reduced_accessor.is_valid_entry() &&
896  sparsity_pattern->get_chunk_size() * reduced_accessor.row() +
897  chunk_row <
899  sparsity_pattern->get_chunk_size() * reduced_accessor.column() +
900  chunk_col <
902  }
903 
904 
905 
906  inline Accessor::size_type
907  Accessor::row() const
908  {
909  Assert(is_valid_entry() == true, ExcInvalidIterator());
910 
911  return sparsity_pattern->get_chunk_size() * reduced_accessor.row() +
912  chunk_row;
913  }
914 
915 
916 
917  inline Accessor::size_type
918  Accessor::column() const
919  {
920  Assert(is_valid_entry() == true, ExcInvalidIterator());
921 
922  return sparsity_pattern->get_chunk_size() * reduced_accessor.column() +
923  chunk_col;
924  }
925 
926 
927 
928  inline std::size_t
929  Accessor::reduced_index() const
930  {
931  Assert(is_valid_entry() == true, ExcInvalidIterator());
932 
933  return reduced_accessor.linear_index;
934  }
935 
936 
937 
938  inline bool
939  Accessor::operator==(const Accessor &other) const
940  {
941  // no need to check for equality of sparsity patterns as this is done in
942  // the reduced case already and every ChunkSparsityPattern has its own
943  // reduced sparsity pattern
944  return (reduced_accessor == other.reduced_accessor &&
945  chunk_row == other.chunk_row && chunk_col == other.chunk_col);
946  }
947 
948 
949 
950  inline bool
951  Accessor::operator<(const Accessor &other) const
952  {
953  Assert(sparsity_pattern == other.sparsity_pattern, ExcInternalError());
954 
955  if (chunk_row != other.chunk_row)
956  {
957  if (reduced_accessor.linear_index ==
958  reduced_accessor.container->n_nonzero_elements())
959  return false;
960  if (other.reduced_accessor.linear_index ==
961  reduced_accessor.container->n_nonzero_elements())
962  return true;
963 
964  const auto global_row = sparsity_pattern->get_chunk_size() *
965  reduced_accessor.row() +
966  chunk_row,
967  other_global_row = sparsity_pattern->get_chunk_size() *
968  other.reduced_accessor.row() +
969  other.chunk_row;
970  if (global_row < other_global_row)
971  return true;
972  else if (global_row > other_global_row)
973  return false;
974  }
975 
976  return (
977  reduced_accessor.linear_index < other.reduced_accessor.linear_index ||
978  (reduced_accessor.linear_index == other.reduced_accessor.linear_index &&
979  chunk_col < other.chunk_col));
980  }
981 
982 
983  inline void
985  {
986  const auto chunk_size = sparsity_pattern->get_chunk_size();
987  Assert(chunk_row < chunk_size && chunk_col < chunk_size,
989  Assert(reduced_accessor.row() * chunk_size + chunk_row <
991  reduced_accessor.column() * chunk_size + chunk_col <
994  if (chunk_size == 1)
995  {
996  reduced_accessor.advance();
997  return;
998  }
999 
1000  ++chunk_col;
1001 
1002  // end of chunk
1003  if (chunk_col == chunk_size ||
1004  reduced_accessor.column() * chunk_size + chunk_col ==
1006  {
1007  const auto reduced_row = reduced_accessor.row();
1008  // end of row
1009  if (reduced_accessor.linear_index + 1 ==
1010  reduced_accessor.container->rowstart[reduced_row + 1])
1011  {
1012  ++chunk_row;
1013 
1014  chunk_col = 0;
1015 
1016  // end of chunk rows or end of matrix
1017  if (chunk_row == chunk_size ||
1018  (reduced_row * chunk_size + chunk_row ==
1020  {
1021  chunk_row = 0;
1022  reduced_accessor.advance();
1023  }
1024  // go back to the beginning of the same reduced row but with
1025  // chunk_row increased by one
1026  else
1027  reduced_accessor.linear_index =
1028  reduced_accessor.container->rowstart[reduced_row];
1029  }
1030  // advance within chunk
1031  else
1032  {
1033  reduced_accessor.advance();
1034  chunk_col = 0;
1035  }
1036  }
1037  }
1038 
1039 
1040 
1041  inline Iterator::Iterator(const ChunkSparsityPattern *sparsity_pattern,
1042  const size_type row)
1043  : accessor(sparsity_pattern, row)
1044  {}
1045 
1046 
1047 
1048  inline Iterator &
1050  {
1051  accessor.advance();
1052  return *this;
1053  }
1054 
1055 
1056 
1057  inline Iterator
1059  {
1060  const Iterator iter = *this;
1061  accessor.advance();
1062  return iter;
1063  }
1064 
1065 
1066 
1067  inline const Accessor &
1068  Iterator::operator*() const
1069  {
1070  return accessor;
1071  }
1072 
1073 
1074 
1075  inline const Accessor *
1076  Iterator::operator->() const
1077  {
1078  return &accessor;
1079  }
1080 
1081 
1082  inline bool
1083  Iterator::operator==(const Iterator &other) const
1084  {
1085  return (accessor == other.accessor);
1086  }
1087 
1088 
1089 
1090  inline bool
1091  Iterator::operator!=(const Iterator &other) const
1092  {
1093  return !(accessor == other.accessor);
1094  }
1095 
1096 
1097  inline bool
1098  Iterator::operator<(const Iterator &other) const
1099  {
1100  return accessor < other.accessor;
1101  }
1102 
1103 } // namespace ChunkSparsityPatternIterators
1104 
1105 
1106 
1109 {
1110  return {this, 0};
1111 }
1112 
1113 
1116 {
1117  return {this, n_rows()};
1118 }
1119 
1120 
1121 
1124 {
1125  AssertIndexRange(r, n_rows());
1126  return {this, r};
1127 }
1128 
1129 
1130 
1132 ChunkSparsityPattern::end(const size_type r) const
1133 {
1134  AssertIndexRange(r, n_rows());
1135  return {this, r + 1};
1136 }
1137 
1138 
1139 
1142 {
1143  return rows;
1144 }
1145 
1146 
1149 {
1150  return cols;
1151 }
1152 
1153 
1154 
1157 {
1158  return chunk_size;
1159 }
1160 
1161 
1162 
1163 inline bool
1165 {
1167 }
1168 
1169 
1170 
1171 template <typename ForwardIterator>
1172 void
1174  const size_type n_cols,
1175  const ForwardIterator begin,
1176  const ForwardIterator end,
1177  const size_type chunk_size)
1178 {
1179  Assert(static_cast<size_type>(std::distance(begin, end)) == n_rows,
1180  ExcIteratorRange(std::distance(begin, end), n_rows));
1181 
1182  // first determine row lengths for each row. if the matrix is quadratic,
1183  // then we might have to add an additional entry for the diagonal, if that
1184  // is not yet present. as we have to call compress anyway later on, don't
1185  // bother to check whether that diagonal entry is in a certain row or not
1186  const bool is_square = (n_rows == n_cols);
1187  std::vector<size_type> row_lengths;
1188  row_lengths.reserve(n_rows);
1189  for (ForwardIterator i = begin; i != end; ++i)
1190  row_lengths.push_back(std::distance(i->begin(), i->end()) +
1191  (is_square ? 1 : 0));
1192  reinit(n_rows, n_cols, row_lengths, chunk_size);
1193 
1194  // now enter all the elements into the matrix
1195  size_type row = 0;
1196  using inner_iterator =
1197  typename std::iterator_traits<ForwardIterator>::value_type::const_iterator;
1198  for (ForwardIterator i = begin; i != end; ++i, ++row)
1199  {
1200  const inner_iterator end_of_row = i->end();
1201  for (inner_iterator j = i->begin(); j != end_of_row; ++j)
1202  {
1203  const size_type col =
1205  Assert(col < n_cols, ExcInvalidIndex(col, n_cols));
1206 
1207  add(row, col);
1208  }
1209  }
1210 
1211  // finally compress everything. this also sorts the entries within each row
1212  compress();
1213 }
1214 
1215 
1216 #endif // DOXYGEN
1217 
1219 
1220 #endif
size_type get_chunk_size() const
void copy_from(const size_type n_rows, const size_type n_cols, const ForwardIterator begin, const ForwardIterator end, const size_type chunk_size)
void reinit(MatrixBlock< MatrixType > &v, const BlockSparsityPattern &p)
Definition: matrix_block.h:618
#define DeclException2(Exception2, type1, type2, outsequence)
Definition: exceptions.h:538
Contents is actually a matrix.
constexpr SymmetricTensor< 2, dim, Number > symmetrize(const Tensor< 2, dim, Number > &t)
types::global_dof_index size_type
Definition: cuda_kernels.h:45
types::global_dof_index size_type
bool operator!=(const AlignedVector< T > &lhs, const AlignedVector< T > &rhs)
static ::ExceptionBase & ExcMETISNotInstalled()
iterator end() const
#define AssertIndexRange(index, range)
Definition: exceptions.h:1724
bool operator<(const SynchronousIterators< Iterators > &a, const SynchronousIterators< Iterators > &b)
const ChunkSparsityPattern * sparsity_pattern
void add(const size_type i, const size_type j)
bool operator==(const AlignedVector< T > &lhs, const AlignedVector< T > &rhs)
SparsityPattern sparsity_pattern
static ::ExceptionBase & ExcInvalidIterator()
size_type get_column_index_from_iterator(const size_type i)
size_type n_cols() const
static ::ExceptionBase & ExcInvalidNumberOfPartitions(int arg1)
std::string compress(const std::string &input)
Definition: utilities.cc:392
#define DeclException1(Exception1, type1, outsequence)
Definition: exceptions.h:515
Accessor(const ChunkSparsityPattern *matrix, const size_type row)
bool is_compressed() const
#define Assert(cond, exc)
Definition: exceptions.h:1467
static ::ExceptionBase & ExcInvalidArraySize(int arg1, int arg2)
#define DeclExceptionMsg(Exception, defaulttext)
Definition: exceptions.h:493
#define DeclException0(Exception0)
Definition: exceptions.h:470
static ::ExceptionBase & ExcInvalidNumber(unsigned int arg1)
#define DEAL_II_NAMESPACE_CLOSE
Definition: config.h:401
VectorType::value_type * end(VectorType &V)
SynchronousIterators< Iterators > operator++(SynchronousIterators< Iterators > &a)
size_type n_rows() const
static ::ExceptionBase & ExcInvalidIndex(size_type arg1, size_type arg2)
static ::ExceptionBase & ExcIteratorPastEnd()
unsigned int global_dof_index
Definition: types.h:76
static ::ExceptionBase & ExcIteratorRange(size_type arg1, size_type arg2)
void advance(std::tuple< I1, I2 > &t, const unsigned int n)
#define DEAL_II_NAMESPACE_OPEN
Definition: config.h:400
VectorType::value_type * begin(VectorType &V)
constexpr int chunk_size
Definition: cuda_size.h:35
static ::ExceptionBase & ExcEmptyObject()
iterator begin() const
static const size_type invalid_entry
bool operator<(const Accessor &) const
std::enable_if< std::is_floating_point< T >::value &&std::is_floating_point< U >::value, typename ProductType< std::complex< T >, std::complex< U > >::type >::type operator*(const std::complex< T > &left, const std::complex< U > &right)
SparsityPatternIterators::Accessor reduced_accessor
void reinit(const size_type m, const size_type n, const size_type max_per_row, const size_type chunk_size)
size_type n_rows() const
bool operator==(const Accessor &) const
std::enable_if< std::is_fundamental< T >::value, std::size_t >::type memory_consumption(const T &t)
static ::ExceptionBase & ExcInternalError()
size_type n_cols() const