Reference documentation for deal.II version 9.5.0
\(\newcommand{\dealvcentcolon}{\mathrel{\mathop{:}}}\) \(\newcommand{\dealcoloneq}{\dealvcentcolon\mathrel{\mkern-1.2mu}=}\) \(\newcommand{\jump}[1]{\left[\!\left[ #1 \right]\!\right]}\) \(\newcommand{\average}[1]{\left\{\!\left\{ #1 \right\}\!\right\}}\)
Loading...
Searching...
No Matches
petsc_parallel_block_vector.cc
Go to the documentation of this file.
1// ---------------------------------------------------------------------
2//
3// Copyright (C) 2004 - 2023 by the deal.II authors
4//
5// This file is part of the deal.II library.
6//
7// The deal.II library is free software; you can use it, redistribute
8// it, and/or modify it under the terms of the GNU Lesser General
9// Public License as published by the Free Software Foundation; either
10// version 2.1 of the License, or (at your option) any later version.
11// The full text of the license can be found in the file LICENSE.md at
12// the top level directory of deal.II.
13//
14// ---------------------------------------------------------------------
15
17
18#ifdef DEAL_II_WITH_PETSC
19
21
23
24namespace PETScWrappers
25{
26 namespace MPI
27 {
29
31 {
32 PetscErrorCode ierr = VecDestroy(&petsc_nest_vector);
33 AssertThrow(ierr == 0, ExcPETScError(ierr));
34 }
35
36 void
37 BlockVector::reinit(const unsigned int num_blocks)
38 {
39 std::vector<size_type> block_sizes(num_blocks, 0);
40 this->block_indices.reinit(block_sizes);
41 if (this->components.size() != this->n_blocks())
42 this->components.resize(this->n_blocks());
43
44 for (unsigned int i = 0; i < this->n_blocks(); ++i)
45 components[i].reinit(MPI_COMM_SELF, 0, 0);
46
48 }
49
50 void
52 {
53 PetscBool isnest;
54
55 PetscErrorCode ierr =
56 PetscObjectTypeCompare(reinterpret_cast<PetscObject>(v),
57 VECNEST,
58 &isnest);
59 AssertThrow(ierr == 0, ExcPETScError(ierr));
60 std::vector<Vec> sv;
61 if (isnest)
62 {
63 PetscInt nb;
64 ierr = VecNestGetSize(v, &nb);
65 AssertThrow(ierr == 0, ExcPETScError(ierr));
66 for (PetscInt i = 0; i < nb; ++i)
67 {
68 Vec vv;
69 ierr = VecNestGetSubVec(v, i, &vv);
70 sv.push_back(vv);
71 }
72 }
73 else
74 {
75 sv.push_back(v);
76 }
77
78 auto nb = sv.size();
79
80 std::vector<size_type> block_sizes(nb, 0);
81 this->block_indices.reinit(block_sizes);
82
83 this->components.resize(nb);
84 for (unsigned int i = 0; i < nb; ++i)
85 {
86 this->components[i].reinit(sv[i]);
87 }
88
90 if (!isnest)
92 else
93 {
94 ierr = PetscObjectReference(reinterpret_cast<PetscObject>(v));
95 AssertThrow(ierr == 0, ExcPETScError(ierr));
96 PetscErrorCode ierr = VecDestroy(&petsc_nest_vector);
97 AssertThrow(ierr == 0, ExcPETScError(ierr));
99 }
100 }
101
102 Vec &
104 {
105 return petsc_nest_vector;
106 }
107
108 BlockVector::operator const Vec &() const
109 {
110 return petsc_nest_vector;
111 }
112
113 void
115 {
118 }
119
120 void
122 {
123 BlockVectorBase::compress(operation);
125 }
126
127 void
129 {
130 PetscErrorCode ierr = VecDestroy(&petsc_nest_vector);
131 AssertThrow(ierr == 0, ExcPETScError(ierr));
132
133 auto n = this->n_blocks();
134
135 std::vector<Vec> pcomponents(n);
136 for (unsigned int i = 0; i < n; i++)
137 pcomponents[i] = this->components[i].petsc_vector();
138
139 MPI_Comm comm =
140 pcomponents.size() > 0 ?
141 PetscObjectComm(reinterpret_cast<PetscObject>(pcomponents[0])) :
142 PETSC_COMM_SELF;
143
144 ierr =
145 VecCreateNest(comm, n, nullptr, pcomponents.data(), &petsc_nest_vector);
146 AssertThrow(ierr == 0, ExcPETScError(ierr));
147 }
148 } // namespace MPI
149
150} // namespace PETScWrappers
151
153
154#endif // DEAL_II_WITH_PETSC
void reinit(const unsigned int n_blocks, const size_type n_elements_per_block)
unsigned int n_blocks() const
void compress(VectorOperation::values operation)
void collect_sizes()
std::vector< Vector > components
void compress(VectorOperation::values operation)
#define DEAL_II_NAMESPACE_OPEN
Definition config.h:472
#define DEAL_II_NAMESPACE_CLOSE
Definition config.h:473
#define AssertThrow(cond, exc)
void petsc_increment_state_counter(Vec v)
unsigned int global_dof_index
Definition types.h:82
const MPI_Comm comm