AMDiS 2.10
The Adaptive Multi-Dimensional Simulation Toolbox
MatrixNnzStructure.inc.hpp
1#pragma once
2
3#include <algorithm>
4#include <map>
5#include <set>
6
7#include <dune/common/timer.hh>
8#include <dune/grid/common/partitionset.hh>
9
10#include <amdis/AdaptiveGrid.hpp>
11#include <amdis/Output.hpp>
12#if HAVE_MPI
13#include <amdis/common/parallel/Communicator.hpp>
14#include <amdis/common/parallel/RequestOperations.hpp>
15#endif
16#include <amdis/linearalgebra/AttributeSet.hpp>
17
18namespace AMDiS {
19
20// Create local pattern from bases
21template <class RowBasis, class ColBasis, class LI>
22void MatrixNnzStructure::init(
23 RowBasis const& rowBasis, PetscSequentialIndexDistribution<LI> const& rowDofMap,
24 ColBasis const& colBasis, PetscSequentialIndexDistribution<LI> const& colDofMap)
25{
26 Dune::Timer t;
27
28 std::size_t localRows = rowDofMap.localSize();
29 test_exit(localRows > 0, "Local domain has no owned DOFs!");
30
31 std::vector<std::set<PetscInt>> d_nz(localRows), o_nz(localRows);
32
33 // build up local nz structure
34 auto rowLocalView = rowBasis.localView();
35 auto colLocalView = colBasis.localView();
36 for (const auto& element : entitySet(rowBasis)) {
37 rowLocalView.bind(element);
38 colLocalView.bind(element);
39
40 std::size_t rows = rowLocalView.size();
41 std::size_t cols = colLocalView.size();
42
43 for (std::size_t i = 0; i < rows; ++i) {
44 auto row = rowLocalView.index(i);
45 auto global_row = rowDofMap.global(row);
46 auto local_row = rowDofMap.globalToLocal(global_row);
47 assert(local_row < localRows);
48 for (std::size_t j = 0; j < cols; ++j) {
49 auto col = colLocalView.index(j);
50 d_nz[local_row].insert(colDofMap.global(col));
51 }
52 }
53
54 colLocalView.unbind();
55 rowLocalView.unbind();
56 }
57
58 // insert size of sets into diagonal block nnz and off-diagonal block nnz
59 dnnz_.resize(localRows, 0);
60 onnz_.resize(localRows, 0);
61
62 std::transform(d_nz.begin(), d_nz.end(), dnnz_.begin(), [](auto const& s) { return s.size(); });
63 std::transform(o_nz.begin(), o_nz.end(), onnz_.begin(), [](auto const& s) { return s.size(); });
64}
65
66
67// Create distributed pattern from bases
68template <class RowBasis, class ColBasis, class GID, class LI>
69void MatrixNnzStructure::init(
70 RowBasis const& rowBasis, PetscParallelIndexDistribution<GID,LI> const& rowDofMap,
71 ColBasis const& colBasis, PetscParallelIndexDistribution<GID,LI> const& colDofMap)
72{
73 Dune::Timer t;
74
75 std::size_t localRows = rowDofMap.localSize();
76 test_exit(localRows > 0, "Local domain has no owned DOFs!");
77
78 std::vector<std::set<PetscInt>> d_nz(localRows), o_nz(localRows);
79
80 // column indices of row DOFs belonging to remote rank
81 // global-row -> {global-col}
82 std::map<PetscInt, std::set<PetscInt>> remote_nz;
83
84 // build up local nz structure
85 auto rowLocalView = rowBasis.localView();
86 auto colLocalView = colBasis.localView();
87 for (const auto& element : entitySet(rowBasis)) {
88 rowLocalView.bind(element);
89 colLocalView.bind(element);
90
91 std::size_t rows = rowLocalView.size();
92 std::size_t cols = colLocalView.size();
93
94 for (std::size_t i = 0; i < rows; ++i) {
95 auto row = rowLocalView.index(i);
96 auto global_row = rowDofMap.global(row);
97 if (rowDofMap.owner(row)) {
98 auto local_row = rowDofMap.globalToLocal(global_row);
99 assert(local_row < localRows);
100 for (std::size_t j = 0; j < cols; ++j) {
101 auto col = colLocalView.index(j);
102 if (colDofMap.owner(col))
103 d_nz[local_row].insert(colDofMap.global(col));
104 else
105 o_nz[local_row].insert(colDofMap.global(col));
106 }
107 } else {
108 auto& remote_row = remote_nz[global_row];
109 for (std::size_t j = 0; j < cols; ++j) {
110 auto col = colLocalView.index(j);
111 remote_row.insert(colDofMap.global(col));
112 }
113 }
114 }
115
116 colLocalView.unbind();
117 rowLocalView.unbind();
118 }
119
120 // insert size of sets into diagonal block nnz and off-diagonal block nnz
121 dnnz_.resize(localRows, 0);
122 onnz_.resize(localRows, 0);
123
124 std::transform(d_nz.begin(), d_nz.end(), dnnz_.begin(), [](auto const& s) { return s.size(); });
125 std::transform(o_nz.begin(), o_nz.end(), onnz_.begin(), [](auto const& s) { return s.size(); });
126
127 auto& world = rowDofMap.world_;
128
129 // exchange remote rows
130 using Attribute = DefaultAttributeSet::Type;
131 using RemoteNz = std::vector<PetscInt>;
132 std::vector<RemoteNz> sendList(world.size());
133 std::vector<std::size_t> receiveList(world.size(), 0);
134
135 for (auto const& rim : rowDofMap.remoteIndices()) {
136 int p = rim.first;
137
138 auto* sourceRemoteIndexList = rim.second.first;
139 auto* targetRemoteIndexList = rim.second.second;
140
141 // receive from overlap
142 for (auto const& ri : *sourceRemoteIndexList) {
143 auto const& lip = ri.localIndexPair();
144 Attribute remoteAttr = ri.attribute();
145 Attribute myAttr = lip.local().attribute();
146 if (myAttr == Attribute::owner && remoteAttr == Attribute::overlap) {
147 assert(rowDofMap.owner(lip.local().local()));
148 receiveList[p]++;
149 }
150 }
151
152 // send to owner
153 for (auto const& ri : *targetRemoteIndexList) {
154 auto const& lip = ri.localIndexPair();
155 Attribute remoteAttr = ri.attribute();
156 Attribute myAttr = lip.local().attribute();
157 if (myAttr == Attribute::overlap && remoteAttr == Attribute::owner) {
158 assert(!rowDofMap.owner(lip.local().local()));
159
160 PetscInt global_row = rowDofMap.global(lip.local().local());
161 assert(rowDofMap.globalOwner(p, global_row));
162
163 PetscInt remoteDnnz = 0;
164 PetscInt remoteOnnz = 0;
165 for (PetscInt global_col : remote_nz[global_row]) {
166 if (colDofMap.globalOwner(p, global_col))
167 remoteDnnz++;
168 else
169 remoteOnnz++;
170 }
171
172 auto& data = sendList[p];
173 data.push_back(global_row);
174 data.push_back(remoteDnnz);
175 data.push_back(remoteOnnz);
176 }
177 }
178 }
179
180 // 1. send remote nz structure to owner
181 std::vector<Mpi::Request> sendRequests;
182 for (int p = 0; p < world.size(); ++p) {
183 if (!sendList[p].empty()) {
184 sendRequests.emplace_back( world.isend(sendList[p], p, tag_) );
185 }
186 }
187
188 // 2. receive nz structure belonging to own DOFs from remote node
189 std::vector<Mpi::Request> recvRequests;
190 std::vector<RemoteNz> recvData(world.size());
191 for (int p = 0; p < world.size(); ++p) {
192 if (receiveList[p] > 0)
193 recvRequests.emplace_back( world.irecv(recvData[p], p, tag_) );
194 }
195
196 Mpi::wait_all(recvRequests.begin(), recvRequests.end());
197
198 // 3. insert all remote nz data into my local nnz sets
199 for (int p = 0; p < world.size(); ++p) {
200 auto const& data = recvData[p];
201 assert(data.size() == 3*receiveList[p]);
202 for (std::size_t i = 0; i < receiveList[p]; ++i) {
203 PetscInt global_row = data[3*i];
204 assert(rowDofMap.globalOwner(global_row));
205
206 PetscInt row_dnnz = data[3*i+1];
207 assert(row_dnnz < PetscInt(localRows));
208
209 PetscInt row_onnz = data[3*i+2];
210 assert(row_onnz < PetscInt(rowDofMap.globalSize()));
211
212 auto local_row = rowDofMap.globalToLocal(global_row);
213 assert(local_row < localRows);
214
215 dnnz_[local_row] += row_dnnz;
216 onnz_[local_row] += row_onnz;
217 }
218 }
219
220 // 4. check that the sum of diagonal and off-diagonal entries does not exceed the size of the matrix
221 // This may happen for very small size matrices
222 for (std::size_t i = 0; i < localRows; ++i) {
223 dnnz_[i] = std::min(dnnz_[i], PetscInt(localRows));
224 if (onnz_[i] + dnnz_[i] > PetscInt(rowDofMap.globalSize())) {
225 PetscInt diff = onnz_[i] + dnnz_[i] - rowDofMap.globalSize();
226 onnz_[i] -= diff;
227 }
228 }
229
230 Mpi::wait_all(sendRequests.begin(), sendRequests.end());
231
232
233 info(2,"update MatrixNnzStructure need {} sec", t.elapsed());
234}
235
236} // end namespace AMDiS