Xpetra Version of the Day
Loading...
Searching...
No Matches
Xpetra_MatrixUtils.hpp
Go to the documentation of this file.
1// @HEADER
2//
3// ***********************************************************************
4//
5// Xpetra: A linear algebra interface package
6// Copyright 2012 Sandia Corporation
7//
8// Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
9// the U.S. Government retains certain rights in this software.
10//
11// Redistribution and use in source and binary forms, with or without
12// modification, are permitted provided that the following conditions are
13// met:
14//
15// 1. Redistributions of source code must retain the above copyright
16// notice, this list of conditions and the following disclaimer.
17//
18// 2. Redistributions in binary form must reproduce the above copyright
19// notice, this list of conditions and the following disclaimer in the
20// documentation and/or other materials provided with the distribution.
21//
22// 3. Neither the name of the Corporation nor the names of the
23// contributors may be used to endorse or promote products derived from
24// this software without specific prior written permission.
25//
26// THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
27// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
28// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
29// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
30// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
31// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
32// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
33// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
34// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
35// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
36// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
37//
38// Questions? Contact
39// Jonathan Hu (jhu@sandia.gov)
40// Andrey Prokopenko (aprokop@sandia.gov)
41// Ray Tuminaro (rstumin@sandia.gov)
42// Tobias Wiesner (tawiesn@sandia.gov)
43//
44// ***********************************************************************
45//
46// @HEADER
47#ifndef PACKAGES_XPETRA_SUP_MATRIX_UTILS_HPP_
48#define PACKAGES_XPETRA_SUP_MATRIX_UTILS_HPP_
49
50#include "Xpetra_ConfigDefs.hpp"
51
52
53#include "Xpetra_Map.hpp"
54#include "Xpetra_MapUtils.hpp"
55#include "Xpetra_StridedMap.hpp"
56#include "Xpetra_MapFactory.hpp"
57#include "Xpetra_MapExtractor.hpp"
59#include "Xpetra_Matrix.hpp"
63
64#include "Xpetra_IO.hpp"
65
66#ifdef HAVE_XPETRA_TPETRA
67#include "Xpetra_TpetraMultiVector.hpp"
68#include <Tpetra_RowMatrixTransposer.hpp>
69#include <Tpetra_Details_extractBlockDiagonal.hpp>
70#include <Tpetra_Details_scaleBlockDiagonal.hpp>
71#endif
72
73namespace Xpetra {
74
84template <class Scalar,
85 class LocalOrdinal,
86 class GlobalOrdinal,
87 class Node>
89#undef XPETRA_MATRIXUTILS_SHORT
91
92public:
93
96 RCP<const Map> map = MapUtils::shrinkMapGIDs(*(input.getMap()),*(input.getMap()));
98 for (size_t c = 0; c < input.getNumVectors(); c++) {
100 for (size_t r = 0; r < input.getLocalLength(); r++) {
101 ret->replaceLocalValue(Teuchos::as<LocalOrdinal>(r), c, data[r]);
102 }
103 }
104
105 return ret;
106 }
107
111
112 RCP< const Teuchos::Comm<int> > comm = input.getRowMap()->getComm();
113
114 // build an overlapping version of mySpecialMap
115 Teuchos::Array<GlobalOrdinal> ovlUnknownStatusGids;
116 Teuchos::Array<GlobalOrdinal> ovlFoundStatusGids;
117
118 // loop over global column map of A and find all GIDs where it is not sure, whether they are special or not
119 for(size_t i = 0; i<input.getColMap()->getLocalNumElements(); i++) {
120 GlobalOrdinal gcid = input.getColMap()->getGlobalElement(i);
121 if(domainMap.isNodeGlobalElement(gcid) == false) {
122 ovlUnknownStatusGids.push_back(gcid);
123 }
124 }
125
126 // We need a locally replicated list of all DOF gids of the (non-overlapping) range map of A10
127 // Communicate the number of DOFs on each processor
128 std::vector<int> myUnknownDofGIDs(comm->getSize(),0);
129 std::vector<int> numUnknownDofGIDs(comm->getSize(),0);
130 myUnknownDofGIDs[comm->getRank()] = ovlUnknownStatusGids.size();
131 Teuchos::reduceAll(*comm,Teuchos::REDUCE_MAX,comm->getSize(),&myUnknownDofGIDs[0],&numUnknownDofGIDs[0]);
132
133 // create array containing all DOF GIDs
134 size_t cntUnknownDofGIDs = 0;
135 for(int p = 0; p < comm->getSize(); p++) cntUnknownDofGIDs += numUnknownDofGIDs[p];
136 std::vector<GlobalOrdinal> lUnknownDofGIDs(cntUnknownDofGIDs,-1); // local version to be filled
137 std::vector<GlobalOrdinal> gUnknownDofGIDs(cntUnknownDofGIDs,-1); // global version after communication
138 // calculate the offset and fill chunk of memory with local data on each processor
139 size_t cntUnknownOffset = 0;
140 for(int p = 0; p < comm->getRank(); p++) cntUnknownOffset += numUnknownDofGIDs[p];
141 for(size_t k=0; k < Teuchos::as<size_t>(ovlUnknownStatusGids.size()); k++) {
142 lUnknownDofGIDs[k+cntUnknownOffset] = ovlUnknownStatusGids[k];
143 }
144 if(cntUnknownDofGIDs > 0)
145 Teuchos::reduceAll(*comm,Teuchos::REDUCE_MAX,Teuchos::as<int>(cntUnknownDofGIDs),&lUnknownDofGIDs[0],&gUnknownDofGIDs[0]);
146 std::sort(gUnknownDofGIDs.begin(), gUnknownDofGIDs.end());
147 gUnknownDofGIDs.erase(std::unique(gUnknownDofGIDs.begin(), gUnknownDofGIDs.end()), gUnknownDofGIDs.end());
148
149 // loop through all GIDs with unknown status
150 for(size_t k=0; k < gUnknownDofGIDs.size(); k++) {
151 GlobalOrdinal curgid = gUnknownDofGIDs[k];
152 if(domainMap.isNodeGlobalElement(curgid)) {
153 ovlFoundStatusGids.push_back(curgid); // curgid is in special map (on this processor)
154 }
155 }
156
157 std::vector<int> myFoundDofGIDs(comm->getSize(),0);
158 std::vector<int> numFoundDofGIDs(comm->getSize(),0);
159 myFoundDofGIDs[comm->getRank()] = ovlFoundStatusGids.size();
160 Teuchos::reduceAll(*comm,Teuchos::REDUCE_MAX,comm->getSize(),&myFoundDofGIDs[0],&numFoundDofGIDs[0]);
161
162 // create array containing all DOF GIDs
163 size_t cntFoundDofGIDs = 0;
164 for(int p = 0; p < comm->getSize(); p++) cntFoundDofGIDs += numFoundDofGIDs[p];
165 std::vector<GlobalOrdinal> lFoundDofGIDs(cntFoundDofGIDs,-1); // local version to be filled
166 std::vector<GlobalOrdinal> gFoundDofGIDs(cntFoundDofGIDs,-1); // global version after communication
167 // calculate the offset and fill chunk of memory with local data on each processor
168 size_t cntFoundOffset = 0;
169 for(int p = 0; p < comm->getRank(); p++) cntFoundOffset += numFoundDofGIDs[p];
170 for(size_t k=0; k < Teuchos::as<size_t>(ovlFoundStatusGids.size()); k++) {
171 lFoundDofGIDs[k+cntFoundOffset] = ovlFoundStatusGids[k];
172 }
173 if(cntFoundDofGIDs > 0)
174 Teuchos::reduceAll(*comm,Teuchos::REDUCE_MAX,Teuchos::as<int>(cntFoundDofGIDs),&lFoundDofGIDs[0],&gFoundDofGIDs[0]);
175
176 Teuchos::Array<GlobalOrdinal> ovlDomainMapArray;
177 for(size_t i = 0; i<input.getColMap()->getLocalNumElements(); i++) {
178 GlobalOrdinal gcid = input.getColMap()->getGlobalElement(i);
179 if(domainMap.isNodeGlobalElement(gcid) == true ||
180 std::find(gFoundDofGIDs.begin(), gFoundDofGIDs.end(), gcid) != gFoundDofGIDs.end()) {
181 ovlDomainMapArray.push_back(gcid);
182 }
183 }
184 RCP<Map> ovlDomainMap = MapFactory::Build (domainMap.lib(),Teuchos::OrdinalTraits<GlobalOrdinal>::invalid(),ovlDomainMapArray(),0,comm);
185 return ovlDomainMap;
186 }
187
202 Teuchos::RCP<const Xpetra::MapExtractor<Scalar, LocalOrdinal, GlobalOrdinal, Node> > columnMapExtractor = Teuchos::null,
203 bool bThyraMode = false) {
204
205 size_t numRows = rangeMapExtractor->NumMaps();
206 size_t numCols = domainMapExtractor->NumMaps();
207
208 TEUCHOS_TEST_FOR_EXCEPTION(rangeMapExtractor->getThyraMode() == true, Xpetra::Exceptions::Incompatible, "Xpetra::MatrixUtils::Split: RangeMapExtractor must not use Thyra style numbering of GIDs. The MapExtractor must contain all GIDs of the full range map in order to define a proper splitting.")
209 TEUCHOS_TEST_FOR_EXCEPTION(domainMapExtractor->getThyraMode() == true, Xpetra::Exceptions::Incompatible, "Xpetra::MatrixUtils::Split: DomainMapExtractor must not use Thyra style numbering of GIDs. The MapExtractor must contain all GIDs of the full domain map in order to define a proper splitting.")
210
211 RCP<const Map> fullRangeMap = rangeMapExtractor->getFullMap();
212 RCP<const Map> fullDomainMap = domainMapExtractor->getFullMap();
213
214 TEUCHOS_TEST_FOR_EXCEPTION(fullRangeMap->getMaxAllGlobalIndex() != input.getRowMap()->getMaxAllGlobalIndex(), Xpetra::Exceptions::Incompatible, "Xpetra::MatrixUtils::Split: RangeMapExtractor incompatible to row map of input matrix.")
215 TEUCHOS_TEST_FOR_EXCEPTION(fullRangeMap->getGlobalNumElements() != input.getRowMap()->getGlobalNumElements(), Xpetra::Exceptions::Incompatible, "Xpetra::MatrixUtils::Split: RangeMapExtractor incompatible to row map of input matrix.")
216 TEUCHOS_TEST_FOR_EXCEPTION(fullRangeMap->getLocalNumElements() != input.getRowMap()->getLocalNumElements(), Xpetra::Exceptions::Incompatible, "Xpetra::MatrixUtils::Split: RangeMapExtractor incompatible to row map of input matrix.")
217 TEUCHOS_TEST_FOR_EXCEPTION(fullRangeMap->getMaxAllGlobalIndex() != input.getRangeMap()->getMaxAllGlobalIndex(), Xpetra::Exceptions::Incompatible, "Xpetra::MatrixUtils::Split: RangeMapExtractor incompatible to row map of input matrix.")
218 TEUCHOS_TEST_FOR_EXCEPTION(fullRangeMap->getGlobalNumElements() != input.getRangeMap()->getGlobalNumElements(), Xpetra::Exceptions::Incompatible, "Xpetra::MatrixUtils::Split: RangeMapExtractor incompatible to row map of input matrix.")
219 TEUCHOS_TEST_FOR_EXCEPTION(fullRangeMap->getLocalNumElements() != input.getRangeMap()->getLocalNumElements(), Xpetra::Exceptions::Incompatible, "Xpetra::MatrixUtils::Split: RangeMapExtractor incompatible to row map of input matrix.")
220
221 TEUCHOS_TEST_FOR_EXCEPTION(fullDomainMap->getMaxAllGlobalIndex() != input.getColMap()->getMaxAllGlobalIndex(), Xpetra::Exceptions::Incompatible, "Xpetra::MatrixUtils::Split: DomainMapExtractor incompatible to domain map of input matrix. fullDomainMap->getMaxAllGlobalIndex() = " << fullDomainMap->getMaxAllGlobalIndex() << " vs. input.getColMap()->getMaxAllGlobalIndex() = " << input.getColMap()->getMaxAllGlobalIndex())
222 TEUCHOS_TEST_FOR_EXCEPTION(fullDomainMap->getMaxAllGlobalIndex() != input.getDomainMap()->getMaxAllGlobalIndex(), Xpetra::Exceptions::Incompatible, "Xpetra::MatrixUtils::Split: DomainMapExtractor incompatible to domain map of input matrix.")
223 TEUCHOS_TEST_FOR_EXCEPTION(fullDomainMap->getGlobalNumElements() != input.getDomainMap()->getGlobalNumElements(), Xpetra::Exceptions::Incompatible, "Xpetra::MatrixUtils::Split: DomainMapExtractor incompatible to domain map of input matrix.")
224 TEUCHOS_TEST_FOR_EXCEPTION(fullDomainMap->getLocalNumElements() != input.getDomainMap()->getLocalNumElements(), Xpetra::Exceptions::Incompatible, "Xpetra::MatrixUtils::Split: DomainMapExtractor incompatible to domain map of input matrix.")
225
226 // check column map extractor
227 Teuchos::RCP<const MapExtractor> myColumnMapExtractor = Teuchos::null;
228 if(columnMapExtractor == Teuchos::null) {
229 TEUCHOS_TEST_FOR_EXCEPTION(domainMapExtractor->getThyraMode() == true, Xpetra::Exceptions::Incompatible, "Xpetra::MatrixUtils::Split: Auto generation of column map extractor not supported for Thyra style numbering.");
230 // This code is always executed, since we always provide map extractors in Xpetra numbering!
231 std::vector<Teuchos::RCP<const Map> > ovlxmaps(numCols, Teuchos::null);
232 for(size_t c = 0; c < numCols; c++) {
233 // TODO: is this routine working correctly?
234 Teuchos::RCP<const Map> colMap = MatrixUtils::findColumnSubMap(input, *(domainMapExtractor->getMap(c)));
235 ovlxmaps[c] = colMap;
236 }
237 RCP<const Map> fullColMap = MapUtils::concatenateMaps(ovlxmaps);
238 // This MapExtractor is always in Xpetra mode!
239 myColumnMapExtractor = MapExtractorFactory::Build(fullColMap,ovlxmaps);
240 } else
241 myColumnMapExtractor = columnMapExtractor; // use user-provided column map extractor.
242
243 // all above MapExtractors are always in Xpetra mode
244 // build separate ones containing Thyra mode GIDs (if necessary)
245 Teuchos::RCP<const MapExtractor> thyRangeMapExtractor = Teuchos::null;
246 Teuchos::RCP<const MapExtractor> thyDomainMapExtractor = Teuchos::null;
247 Teuchos::RCP<const MapExtractor> thyColMapExtractor = Teuchos::null;
248 if(bThyraMode == true) {
249 // build Thyra-style map extractors
250 std::vector<Teuchos::RCP<const Map> > thyRgMapExtractorMaps(numRows, Teuchos::null);
251 for (size_t r = 0; r < numRows; r++) {
252 RCP<const Map> rMap = rangeMapExtractor->getMap(r);
253 RCP<const Map> shrinkedMap = MapUtils::shrinkMapGIDs(*rMap,*rMap);
254 RCP<const StridedMap> strRangeMap = Teuchos::rcp_dynamic_cast<const StridedMap>(rMap);
255 if(strRangeMap != Teuchos::null) {
256 std::vector<size_t> strInfo = strRangeMap->getStridingData();
257 GlobalOrdinal offset = strRangeMap->getOffset();
258 LocalOrdinal stridedBlockId = strRangeMap->getStridedBlockId();
259 RCP<const StridedMap> strShrinkedMap = Teuchos::rcp(new StridedMap(shrinkedMap, strInfo, shrinkedMap->getIndexBase(), stridedBlockId, offset));
260 thyRgMapExtractorMaps[r] = strShrinkedMap;
261 } else {
262 thyRgMapExtractorMaps[r] = shrinkedMap;
263 }
264 TEUCHOS_TEST_FOR_EXCEPTION(thyRgMapExtractorMaps[r]->getLocalNumElements() != rMap->getLocalNumElements(), Xpetra::Exceptions::Incompatible, "Xpetra::MatrixUtils::Split: Thyra-style range map extractor contains faulty data.")
265 }
266 RCP<const Map> fullThyRangeMap = MapUtils::concatenateMaps(thyRgMapExtractorMaps);
267 thyRangeMapExtractor = MapExtractorFactory::Build(fullThyRangeMap,thyRgMapExtractorMaps,true);
268 std::vector<Teuchos::RCP<const Map> > thyDoMapExtractorMaps (numCols, Teuchos::null);
269 std::vector<Teuchos::RCP<const Map> > thyColMapExtractorMaps(numCols, Teuchos::null);
270 for (size_t c = 0; c < numCols; c++) {
271 RCP<const Map> cMap = domainMapExtractor->getMap(c);
272
273 RCP<const Map> shrinkedDomainMap = MapUtils::shrinkMapGIDs(*cMap,*cMap);
274 RCP<const StridedMap> strDomainMap = Teuchos::rcp_dynamic_cast<const StridedMap>(cMap);
275 if(strDomainMap != Teuchos::null) {
276 std::vector<size_t> strInfo = strDomainMap->getStridingData();
277 GlobalOrdinal offset = strDomainMap->getOffset();
278 LocalOrdinal stridedBlockId = strDomainMap->getStridedBlockId();
279 RCP<const StridedMap> strShrinkedDomainMap = Teuchos::rcp(new StridedMap(shrinkedDomainMap, strInfo, shrinkedDomainMap->getIndexBase(), stridedBlockId, offset));
280 thyDoMapExtractorMaps[c] = strShrinkedDomainMap;
281 } else {
282 thyDoMapExtractorMaps[c] = shrinkedDomainMap;
283 }
284 RCP<const Map> colMap = myColumnMapExtractor->getMap(c);
285 RCP<const Map> shrinkedColMap = MapUtils::shrinkMapGIDs(*colMap,*cMap);
286 RCP<const StridedMap> strColMap = Teuchos::rcp_dynamic_cast<const StridedMap>(colMap);
287 if(strColMap != Teuchos::null) {
288 std::vector<size_t> strInfo = strColMap->getStridingData();
289 GlobalOrdinal offset = strColMap->getOffset();
290 LocalOrdinal stridedBlockId = strColMap->getStridedBlockId();
291 RCP<const StridedMap> strShrinkedColMap = Teuchos::rcp(new StridedMap(shrinkedColMap, strInfo, shrinkedColMap->getIndexBase(), stridedBlockId, offset));
292 thyColMapExtractorMaps[c] = strShrinkedColMap;
293 } else {
294 thyColMapExtractorMaps[c] = shrinkedColMap;
295 }
296
297 TEUCHOS_TEST_FOR_EXCEPTION(thyColMapExtractorMaps[c]->getLocalNumElements() != colMap->getLocalNumElements(), Xpetra::Exceptions::Incompatible, "Xpetra::MatrixUtils::Split: Thyra-style column map extractor contains faulty data.")
298 TEUCHOS_TEST_FOR_EXCEPTION(thyDoMapExtractorMaps[c]->getLocalNumElements() != cMap->getLocalNumElements(), Xpetra::Exceptions::Incompatible, "Xpetra::MatrixUtils::Split: Thyra-style domain map extractor contains faulty data.")
299 }
300 RCP<const Map> fullThyDomainMap = MapUtils::concatenateMaps(thyDoMapExtractorMaps);
301 RCP<const Map> fullThyColumnMap = MapUtils::concatenateMaps(thyColMapExtractorMaps);
302 thyDomainMapExtractor = MapExtractorFactory::Build(fullThyDomainMap,thyDoMapExtractorMaps,true);
303 thyColMapExtractor = MapExtractorFactory::Build(fullThyColumnMap,thyColMapExtractorMaps,true);
304 }
305 // create submatrices
306 std::vector<Teuchos::RCP<Matrix> > subMatrices(numRows*numCols, Teuchos::null);
307 for (size_t r = 0; r < numRows; r++) {
308 for (size_t c = 0; c < numCols; c++) {
309 // create empty CrsMatrix objects
310 // make sure that the submatrices are defined using the right row maps (either Thyra or xpetra style)
311 // Note: we're reserving a little bit too much memory for the submatrices, but should be still reasonable
312 if(bThyraMode == true)
313 subMatrices[r*numCols+c] = MatrixFactory::Build (thyRangeMapExtractor->getMap(r,true),input.getLocalMaxNumRowEntries());
314 else
315 subMatrices[r*numCols+c] = MatrixFactory::Build (rangeMapExtractor->getMap(r),input.getLocalMaxNumRowEntries());
316 }
317 }
318
319 // We need a vector which lives on the column map of input and stores the block id that the column belongs to.
320 // create a vector on the domain map. Loop over it and fill in the corresponding block id numbers
321 // create a vector on the column map and import the data
322 // Importer: source map is non-overlapping. Target map is overlapping
323 // call colMap.Import(domMap,Importer,Insert)
324 // do the same with "Add" to make sure only one processor is responsible for the different GIDs!
325#if 0 // TAW needs to be fixed (does not compile for Scalar=complex)
330
331 RCP<Vector> doCheck = VectorFactory::Build(input.getDomainMap(), true);
332 doCheck->putScalar(1.0);
333 RCP<Vector> coCheck = VectorFactory::Build(input.getColMap(), true);
335 coCheck->doImport(*doCheck, *imp, Xpetra::ADD);
336 TEUCHOS_TEST_FOR_EXCEPTION(coCheck->normInf() != Teuchos::ScalarTraits< Scalar >::magnitude(1.0), Xpetra::Exceptions::RuntimeError, "Xpetra::MatrixUtils::SplitMatrix: error when distributing data.");
337
338 doCheck->putScalar(-1.0);
339 coCheck->putScalar(-1.0);
340
341 Teuchos::ArrayRCP< Scalar > doCheckData = doCheck->getDataNonConst(0);
342 for (size_t rrr = 0; rrr < input.getDomainMap()->getLocalNumElements(); rrr++) {
343 // global row id to extract data from global monolithic matrix
344 GlobalOrdinal id = input.getDomainMap()->getGlobalElement(rrr); // LID -> GID (column)
345
346 // Find the block id in range map extractor that belongs to same global id.
347 size_t BlockId = domainMapExtractor->getMapIndexForGID(id);
348
349 doCheckData[rrr] = Teuchos::as<Scalar>(BlockId);
350 }
351
352 coCheck->doImport(*doCheck, *imp, Xpetra::INSERT);
353
354 Teuchos::ArrayRCP< Scalar > coCheckData = coCheck->getDataNonConst(0);
355#endif
356 // loop over all rows of input matrix
357 for (size_t rr = 0; rr < input.getRowMap()->getLocalNumElements(); rr++) {
358
359 // global row id to extract data from global monolithic matrix
360 GlobalOrdinal growid = input.getRowMap()->getGlobalElement(rr); // LID -> GID (column)
361
362 // Find the block id in range map extractor that belongs to same global row id.
363 // We assume the global ids to be unique in the map extractor
364 // The MapExtractor objects may be constructed using the thyra mode. However, we use
365 // global GID ids (as we have a single input matrix). The only problematic thing could
366 // be that the GIDs of the input matrix are inconsistent with the GIDs in the map extractor.
367 // Note, that for the Thyra mode the GIDs in the map extractor are generated using the ordering
368 // of the blocks.
369 size_t rowBlockId = rangeMapExtractor->getMapIndexForGID(growid);
370
371 // global row id used for subblocks to insert information
372 GlobalOrdinal subblock_growid = growid; // for Xpetra-style numbering the global row ids are not changing
373 if(bThyraMode == true) {
374 // find local row id associated with growid in the corresponding subblock
375 LocalOrdinal lrowid = rangeMapExtractor->getMap(rowBlockId)->getLocalElement(growid);
376 // translate back local row id to global row id for the subblock
377 subblock_growid = thyRangeMapExtractor->getMap(rowBlockId,true)->getGlobalElement(lrowid);
378 }
379
380 // extract matrix entries from input matrix
381 // we use global ids since we have to determine the corresponding
382 // block column ids using the global ids anyway
385 input.getLocalRowView(rr, indices, vals);
386
387 std::vector<Teuchos::Array<GlobalOrdinal> > blockColIdx (numCols, Teuchos::Array<GlobalOrdinal>());
388 std::vector<Teuchos::Array<Scalar> > blockColVals(numCols, Teuchos::Array<Scalar>());
389
390 for(size_t i=0; i<(size_t)indices.size(); i++) {
391 // gobal column id to extract data from full monolithic matrix
392 GlobalOrdinal gcolid = input.getColMap()->getGlobalElement(indices[i]);
393
394 size_t colBlockId = myColumnMapExtractor->getMapIndexForGID(gcolid); // old buggy thing
395 //size_t colBlockId = Teuchos::as<size_t>(coCheckData[indices[i]]);
396
397 // global column id used for subblocks to insert information
398 GlobalOrdinal subblock_gcolid = gcolid; // for Xpetra-style numbering the global col ids are not changing
399 if(bThyraMode == true) {
400 // find local col id associated with gcolid in the corresponding subblock
401 LocalOrdinal lcolid = myColumnMapExtractor->getMap(colBlockId)->getLocalElement(gcolid);
402 // translate back local col id to global col id for the subblock
403 subblock_gcolid = thyColMapExtractor->getMap(colBlockId,true)->getGlobalElement(lcolid);
404 }
405 blockColIdx [colBlockId].push_back(subblock_gcolid);
406 blockColVals[colBlockId].push_back(vals[i]);
407 }
408
409 for (size_t c = 0; c < numCols; c++) {
410 subMatrices[rowBlockId*numCols+c]->insertGlobalValues(subblock_growid,blockColIdx[c].view(0,blockColIdx[c].size()),blockColVals[c].view(0,blockColVals[c].size()));
411 }
412
413 }
414
415 // call fill complete on subblocks and create BlockedCrsOperator
416 RCP<BlockedCrsMatrix> bA = Teuchos::null;
417 if(bThyraMode == true) {
418 for (size_t r = 0; r < numRows; r++) {
419 for (size_t c = 0; c < numCols; c++) {
420 subMatrices[r*numCols+c]->fillComplete(thyDomainMapExtractor->getMap(c,true), thyRangeMapExtractor->getMap(r,true));
421 }
422 }
423 bA = Teuchos::rcp(new BlockedCrsMatrix(thyRangeMapExtractor, thyDomainMapExtractor, 10 /*input.getRowMap()->getLocalMaxNumRowEntries()*/));
424 } else {
425 for (size_t r = 0; r < numRows; r++) {
426 for (size_t c = 0; c < numCols; c++) {
427 subMatrices[r*numCols+c]->fillComplete(domainMapExtractor->getMap(c), rangeMapExtractor->getMap(r));
428 }
429 }
430 bA = Teuchos::rcp(new BlockedCrsMatrix(rangeMapExtractor, domainMapExtractor, 10 /*input.getRowMap()->getLocalMaxNumRowEntries()*/));
431 }
432
433 for (size_t r = 0; r < numRows; r++) {
434 for (size_t c = 0; c < numCols; c++) {
435 bA->setMatrix(r,c,subMatrices[r*numCols+c]);
436 }
437 }
438 return bA;
439 } // SplitMatrix
440
444 bool const &repairZeroDiagonals, Teuchos::FancyOStream &fos,
446 const Scalar replacementValue = Teuchos::ScalarTraits<Scalar>::one())
447 {
448 using TST = typename Teuchos::ScalarTraits<Scalar>;
449 using Teuchos::rcp_dynamic_cast;
450
451 GlobalOrdinal gZeroDiags;
452 bool usedEfficientPath = false;
453
454#ifdef HAVE_MUELU_TPETRA
455 RCP<CrsMatrixWrap> crsWrapAc = rcp_dynamic_cast<CrsMatrixWrap>(Ac);
456 RCP<TpetraCrsMatrix> tpCrsAc;
457 if (!crsWrapAc.is_null())
458 tpCrsAc = rcp_dynamic_cast<TpetraCrsMatrix>(crsWrapAc->getCrsMatrix());
459
460 if (!tpCrsAc.is_null()) {
461 auto tpCrsGraph = tpCrsAc->getTpetra_CrsMatrix()->getCrsGraph();
462 size_t numRows = Ac->getRowMap()->getLocalNumElements();
463 typedef typename Tpetra::CrsGraph<LocalOrdinal,GlobalOrdinal,Node>::offset_device_view_type offset_type;
464 using range_type = Kokkos::RangePolicy<LocalOrdinal, typename Node::execution_space>;
465 auto offsets = offset_type(Kokkos::ViewAllocateWithoutInitializing("offsets"), numRows);
466 tpCrsGraph->getLocalDiagOffsets(offsets);
467
468 const size_t STINV =
469 Tpetra::Details::OrdinalTraits<typename offset_type::value_type>::invalid ();
470
471 if (repairZeroDiagonals) {
472 // Make sure that the matrix has all its diagonal entries, so
473 // we can fix them in-place.
474
475 LO numMissingDiagonalEntries = 0;
476
477 Kokkos::parallel_reduce("countMissingDiagonalEntries",
478 range_type(0, numRows),
479 KOKKOS_LAMBDA (const LO i, LO& missing) {
480 missing += (offsets(i) == STINV);
481 }, numMissingDiagonalEntries);
482
483 GlobalOrdinal gNumMissingDiagonalEntries;
484 Teuchos::reduceAll(*(Ac->getRowMap()->getComm()), Teuchos::REDUCE_SUM, Teuchos::as<GlobalOrdinal>(numMissingDiagonalEntries),
485 Teuchos::outArg(gNumMissingDiagonalEntries));
486
487 if (gNumMissingDiagonalEntries == 0) {
488 // Matrix has all diagonal entries, now we fix them
489
490 auto lclA = tpCrsAc->getTpetra_CrsMatrix()->getLocalMatrixDevice();
491
492 using ATS = Kokkos::ArithTraits<Scalar>;
493 using impl_ATS = Kokkos::ArithTraits<typename ATS::val_type>;
494
495 LO lZeroDiags = 0;
496 typename ATS::val_type impl_replacementValue = replacementValue;
497
498 Kokkos::parallel_reduce("fixSmallDiagonalEntries",
499 range_type(0, numRows),
500 KOKKOS_LAMBDA (const LO i, LO& fixed) {
501 const auto offset = offsets(i);
502 auto curRow = lclA.row (i);
503 if (impl_ATS::magnitude(curRow.value(offset)) <= threshold) {
504 curRow.value(offset) = impl_replacementValue;
505 fixed += 1;
506 }
507 }, lZeroDiags);
508
509 Teuchos::reduceAll(*(Ac->getRowMap()->getComm()), Teuchos::REDUCE_SUM, Teuchos::as<GlobalOrdinal>(lZeroDiags),
510 Teuchos::outArg(gZeroDiags));
511
512 usedEfficientPath = true;
513 }
514 } else {
515 // We only want to count up small diagonal entries, but not
516 // fix them. So missing diagonal entries are not an issue.
517
518 auto lclA = tpCrsAc->getTpetra_CrsMatrix()->getLocalMatrixDevice();
519
520 using ATS = Kokkos::ArithTraits<Scalar>;
521 using impl_ATS = Kokkos::ArithTraits<typename ATS::val_type>;
522
523 LO lZeroDiags = 0;
524
525 Kokkos::parallel_reduce("detectSmallDiagonalEntries",
526 range_type(0, numRows),
527 KOKKOS_LAMBDA (const LO i, LO& small) {
528 const auto offset = offsets(i);
529 if (offset == STINV)
530 small += 1;
531 else {
532 auto curRow = lclA.row (i);
533 if (impl_ATS::magnitude(curRow.value(offset)) <= threshold) {
534 small += 1;
535 }
536 }
537 }, lZeroDiags);
538
539 Teuchos::reduceAll(*(Ac->getRowMap()->getComm()), Teuchos::REDUCE_SUM, Teuchos::as<GlobalOrdinal>(lZeroDiags),
540 Teuchos::outArg(gZeroDiags));
541
542 usedEfficientPath = true;
543
544 }
545 }
546#endif
547
548 if (!usedEfficientPath) {
549 RCP<const Map> rowMap = Ac->getRowMap();
550 RCP<Vector> diagVec = VectorFactory::Build(rowMap);
551 Ac->getLocalDiagCopy(*diagVec);
552
553 LocalOrdinal lZeroDiags = 0;
554 Teuchos::ArrayRCP< const Scalar > diagVal = diagVec->getData(0);
555
556 for (size_t i = 0; i < rowMap->getLocalNumElements(); i++) {
557 if (TST::magnitude(diagVal[i]) <= threshold) {
558 lZeroDiags++;
559 }
560 }
561
562 Teuchos::reduceAll(*(rowMap->getComm()), Teuchos::REDUCE_SUM, Teuchos::as<GlobalOrdinal>(lZeroDiags),
563 Teuchos::outArg(gZeroDiags));
564
565 if (repairZeroDiagonals && gZeroDiags > 0) {
566 /*
567 TAW: If Ac has empty rows, put a 1 on the diagonal of Ac. Be aware that Ac might have empty rows AND
568 columns. The columns might not exist in the column map at all. It would be nice to add the entries
569 to the original matrix Ac. But then we would have to use insertLocalValues. However we cannot add
570 new entries for local column indices that do not exist in the column map of Ac (at least Epetra is
571 not able to do this). With Tpetra it is also not possible to add new entries after the FillComplete
572 call with a static map, since the column map already exists and the diagonal entries are completely
573 missing. Here we build a diagonal matrix with zeros on the diagonal and ones on the diagonal for
574 the rows where Ac has empty rows We have to build a new matrix to be able to use insertGlobalValues.
575 Then we add the original matrix Ac to our new block diagonal matrix and use the result as new
576 (non-singular) matrix Ac. This is very inefficient.
577
578 If you know something better, please let me know.
579 */
580 RCP<Matrix> fixDiagMatrix = MatrixFactory::Build(rowMap, 1);
582 Teuchos::Array<Scalar> valout(1);
583 for (size_t r = 0; r < rowMap->getLocalNumElements(); r++) {
584 if (TST::magnitude(diagVal[r]) <= threshold) {
585 GlobalOrdinal grid = rowMap->getGlobalElement(r);
586 indout[0] = grid;
587 valout[0] = replacementValue;
588 fixDiagMatrix->insertGlobalValues(grid,indout(), valout());
589 }
590 }
591 {
592 Teuchos::TimeMonitor m1(*Teuchos::TimeMonitor::getNewTimer("CheckRepairMainDiagonal: fillComplete1"));
593 fixDiagMatrix->fillComplete(Ac->getDomainMap(),Ac->getRangeMap());
594 }
595
596 RCP<Matrix> newAc;
597 Xpetra::MatrixMatrix<Scalar,LocalOrdinal,GlobalOrdinal,Node>::TwoMatrixAdd(*Ac, false, 1.0, *fixDiagMatrix, false, 1.0, newAc, fos);
598 if (Ac->IsView("stridedMaps"))
599 newAc->CreateView("stridedMaps", Ac);
600
601 Ac = Teuchos::null; // free singular matrix
602 fixDiagMatrix = Teuchos::null;
603 Ac = newAc; // set fixed non-singular matrix
604
605 // call fillComplete with optimized storage option set to true
606 // This is necessary for new faster Epetra MM kernels.
608 p->set("DoOptimizeStorage", true);
609 {
610 Teuchos::TimeMonitor m1(*Teuchos::TimeMonitor::getNewTimer("CheckRepairMainDiagonal: fillComplete2"));
611 Ac->fillComplete(p);
612 }
613 } // end repair
614 }
615
616 // print some output
617 fos << "CheckRepairMainDiagonal: " << (repairZeroDiagonals ? "repaired " : "found ")
618 << gZeroDiags << " too small entries (threshold = " << threshold <<") on main diagonal of Ac." << std::endl;
619
620#ifdef HAVE_XPETRA_DEBUG // only for debugging
621 {
622 // check whether Ac has been repaired...
623 RCP<const Map> rowMap = Ac->getRowMap();
624 RCP<Vector> diagVec = VectorFactory::Build(rowMap);
626 Ac->getLocalDiagCopy(*diagVec);
627 diagVal = diagVec->getData(0);
628 for (size_t r = 0; r < Ac->getRowMap()->getLocalNumElements(); r++) {
629 if (TST::magnitude(diagVal[r]) <= threshold) {
630 fos << "Error: there are too small entries left on diagonal after repair..." << std::endl;
631 break;
632 }
633 }
634 }
635#endif
636 } //CheckRepairMainDiagonal
637
638
639
647 const Teuchos::ArrayView<const double> & relativeThreshold, Teuchos::FancyOStream &fos)
648 {
649 Teuchos::TimeMonitor m1(*Teuchos::TimeMonitor::getNewTimer("RelativeDiagonalBoost"));
650
651 TEUCHOS_TEST_FOR_EXCEPTION(A->GetFixedBlockSize() != relativeThreshold.size() && relativeThreshold.size() != 1,Xpetra::Exceptions::Incompatible, "Xpetra::MatrixUtils::RelativeDiagonal Boost: Either A->GetFixedBlockSize() != relativeThreshold.size() OR relativeThreshold.size() == 1");
652
653 LocalOrdinal numPDEs = A->GetFixedBlockSize();
654 typedef typename Teuchos::ScalarTraits<Scalar> TST;
656 Scalar zero = TST::zero();
657 Scalar one = TST::one();
658
659 // Get the diagonal
660 RCP<Vector> diag = VectorFactory::Build(A->getRowMap());
661 A->getLocalDiagCopy(*diag);
662 Teuchos::ArrayRCP< const Scalar > dataVal = diag->getData(0);
663 size_t N = A->getRowMap()->getLocalNumElements();
664
665 // Compute the diagonal maxes for each PDE
666 std::vector<MT> l_diagMax(numPDEs), g_diagMax(numPDEs);
667 for(size_t i=0; i<N; i++) {
668 int pde = (int) (i % numPDEs);
669 if((int)i < numPDEs)
670 l_diagMax[pde] = TST::magnitude(dataVal[i]);
671 else
672 l_diagMax[pde] = std::max(l_diagMax[pde],TST::magnitude(dataVal[i]));
673 }
674 Teuchos::reduceAll(*A->getRowMap()->getComm(), Teuchos::REDUCE_MAX, numPDEs, l_diagMax.data(), g_diagMax.data() );
675
676 // Apply the diagonal maxes via matrix-matrix addition
677 RCP<Matrix> boostMatrix = MatrixFactory::Build(A->getRowMap(),1);
679 Teuchos::Array<Scalar> value(1);
680 for (size_t i = 0; i<N; i++) {
681 GlobalOrdinal GRID = A->getRowMap()->getGlobalElement(i);
682 int pde = (int) (i % numPDEs);
683 index[0] = GRID;
684 if (TST::magnitude(dataVal[i]) < relativeThreshold[pde] * g_diagMax[pde])
685 value[0] = relativeThreshold[pde] * g_diagMax[pde] - TST::magnitude(dataVal[i]);
686 else
687 value[0] =zero;
688 boostMatrix->insertGlobalValues(GRID,index(),value());
689 }
690 boostMatrix->fillComplete(A->getDomainMap(),A->getRangeMap());
691
692 // FIXME: We really need an add that lets you "add into"
693 RCP<Matrix> newA;
694 Xpetra::MatrixMatrix<Scalar,LocalOrdinal,GlobalOrdinal,Node>::TwoMatrixAdd(*A,false,one, *boostMatrix,false,one,newA,fos);
695 if (A->IsView("stridedMaps"))
696 newA->CreateView("stridedMaps", A);
697 A = newA;
698 A->fillComplete();
699 }
700
701
702 // Extracting the block diagonal of a matrix
705 const UnderlyingLib lib = A.getRowMap()->lib();
706
707 if(lib == Xpetra::UseEpetra) {
708#if defined(HAVE_XPETRA_EPETRA)
709 throw(Xpetra::Exceptions::RuntimeError("Xpetra::MatrixUtils::extractBlockDiagonal not available for Epetra."));
710#endif // HAVE_XPETRA_EPETRA
711 } else if(lib == Xpetra::UseTpetra) {
712#ifdef HAVE_XPETRA_TPETRA
713 const Tpetra::CrsMatrix<SC,LO,GO,NO> & At = Xpetra::Helpers<SC,LO,GO,NO>::Op2TpetraCrs(A);
714 Tpetra::MultiVector<SC,LO,GO,NO> & Dt = Xpetra::toTpetra(diagonal);
715 Tpetra::Details::extractBlockDiagonal(At,Dt);
716#endif // HAVE_XPETRA_TPETRA
717 }
718 }
719
720 // Inverse scaling by a block-diagonal matrix
722 bool doTranspose,
724
725 const UnderlyingLib lib = blockDiagonal.getMap()->lib();
726
727 if(lib == Xpetra::UseEpetra) {
728#if defined(HAVE_XPETRA_EPETRA)
729 throw(Xpetra::Exceptions::RuntimeError("Xpetra::MatrixUtils::inverseScaleBlockDiagonal not available for Epetra."));
730#endif // HAVE_XPETRA_EPETRA
731 } else if(lib == Xpetra::UseTpetra) {
732#ifdef HAVE_XPETRA_TPETRA
733 Tpetra::MultiVector<SC,LO,GO,NO> & Dt = Xpetra::toTpetra(blockDiagonal);
734 Tpetra::MultiVector<SC,LO,GO,NO> & St = Xpetra::toTpetra(toBeScaled);
735 Tpetra::Details::inverseScaleBlockDiagonal(Dt,doTranspose,St);
736#endif // HAVE_XPETRA_TPETRA
737 }
738 }
739
741 RCP<const Map> rowMap = A.getRowMap();
742 RCP<const Map> colMap = A.getColMap();
743 bool fail = false;
744 if (rowMap->lib() == Xpetra::UseTpetra) {
745 auto tpRowMap = Teuchos::rcp_dynamic_cast<const TpetraMap>(rowMap, true)->getTpetra_Map();
746 auto tpColMap = Teuchos::rcp_dynamic_cast<const TpetraMap>(colMap, true)->getTpetra_Map();
747 fail = ! tpColMap->isLocallyFitted(*tpRowMap);
748 } else {
749 RCP<const Teuchos::Comm<int> > comm = rowMap->getComm();
750 LO numRows = Teuchos::as<LocalOrdinal>(rowMap->getLocalNumElements());
751
752 for (LO rowLID = 0; rowLID < numRows; rowLID++) {
753 GO rowGID = rowMap->getGlobalElement(rowLID);
754 LO colGID = colMap->getGlobalElement(rowLID);
755 if (rowGID != colGID) {
756 fail = true;
757 std::cerr << "On rank " << comm->getRank() << ", LID " << rowLID << " is GID " << rowGID << " in the rowmap, but GID " << colGID << " in the column map.\n";
758 }
759 }
760 }
762 "Local parts of row and column map do not match!");
763 }
764
774 std::vector<size_t>& rangeStridingInfo, std::vector<size_t>& domainStridingInfo)
775 {
776 RCP<const StridedMap> stridedRowMap = StridedMapFactory::Build(matrix->getRowMap(), rangeStridingInfo, -1, 0);
777 RCP<const StridedMap> stridedColMap = StridedMapFactory::Build(matrix->getColMap(), domainStridingInfo, -1, 0);
778
779 if (matrix->IsView("stridedMaps") == true) matrix->RemoveView("stridedMaps");
780 matrix->CreateView("stridedMaps", stridedRowMap, stridedColMap);
781 }
782
783};
784
785} // end namespace Xpetra
786
787#define XPETRA_MATRIXUTILS_SHORT
788
789#endif // PACKAGES_XPETRA_SUP_MATRIX_UTILS_HPP_
size_type size() const
size_type size() const
void push_back(const value_type &x)
bool is_null() const
static RCP< Time > getNewTimer(const std::string &name)
virtual Teuchos::RCP< const Map< LocalOrdinal, GlobalOrdinal, Node > > getMap() const =0
The Map describing the parallel distribution of this object.
Exception throws to report incompatible objects (like maps).
Exception throws to report errors in the internal logical of the program.
static RCP< const Tpetra::CrsMatrix< SC, LO, GO, NO > > Op2TpetraCrs(RCP< Matrix > Op)
static RCP< Import< LocalOrdinal, GlobalOrdinal, Node > > Build(const RCP< const Map< LocalOrdinal, GlobalOrdinal, Node > > &source, const RCP< const Map< LocalOrdinal, GlobalOrdinal, Node > > &target, const Teuchos::RCP< Teuchos::ParameterList > &plist=Teuchos::null)
Constructor specifying the number of non-zeros for all rows.
static Teuchos::RCP< Xpetra::MapExtractor< Scalar, LocalOrdinal, GlobalOrdinal, Node > > Build(const Teuchos::RCP< const Map > &fullmap, const std::vector< Teuchos::RCP< const Map > > &maps, bool bThyraMode=false)
Build MapExtrasctor from a given set of partial maps.
static Teuchos::RCP< Map< LocalOrdinal, GlobalOrdinal, Node > > Build(UnderlyingLib lib, global_size_t numGlobalElements, GlobalOrdinal indexBase, const Teuchos::RCP< const Teuchos::Comm< int > > &comm, LocalGlobal lg=Xpetra::GloballyDistributed)
Map constructor with Xpetra-defined contiguous uniform distribution.
static Teuchos::RCP< const Xpetra::Map< LocalOrdinal, GlobalOrdinal, Node > > concatenateMaps(const std::vector< Teuchos::RCP< const Xpetra::Map< LocalOrdinal, GlobalOrdinal, Node > > > &subMaps)
Helper function to concatenate several maps.
static Teuchos::RCP< Xpetra::Map< LocalOrdinal, GlobalOrdinal, Node > > shrinkMapGIDs(const Xpetra::Map< LocalOrdinal, GlobalOrdinal, Node > &input, const Xpetra::Map< LocalOrdinal, GlobalOrdinal, Node > &nonOvlInput)
Helper function to shrink the GIDs and generate a standard map whith GIDs starting at 0.
virtual UnderlyingLib lib() const =0
Get the library used by this object (Tpetra or Epetra?)
virtual bool isNodeGlobalElement(GlobalOrdinal globalIndex) const =0
Whether the given global index is valid for this Map on this process.
static RCP< Matrix > Build(const RCP< const Map > &rowMap)
static void TwoMatrixAdd(const Matrix &A, bool transposeA, SC alpha, Matrix &B, SC beta)
Helper function to calculate B = alpha*A + beta*B.
Xpetra utility class for common matrix-related routines.
static void checkLocalRowMapMatchesColMap(const Xpetra::Matrix< Scalar, LocalOrdinal, GlobalOrdinal, Node > &A)
static void convertMatrixToStridedMaps(Teuchos::RCP< Xpetra::Matrix< Scalar, LocalOrdinal, GlobalOrdinal, Node > > matrix, std::vector< size_t > &rangeStridingInfo, std::vector< size_t > &domainStridingInfo)
static Teuchos::RCP< Xpetra::Map< LocalOrdinal, GlobalOrdinal, Node > > findColumnSubMap(const Xpetra::Matrix< Scalar, LocalOrdinal, GlobalOrdinal, Node > &input, const Xpetra::Map< LocalOrdinal, GlobalOrdinal, Node > &domainMap)
static void CheckRepairMainDiagonal(RCP< Xpetra::Matrix< Scalar, LocalOrdinal, GlobalOrdinal, Node > > &Ac, bool const &repairZeroDiagonals, Teuchos::FancyOStream &fos, const typename Teuchos::ScalarTraits< Scalar >::magnitudeType threshold=Teuchos::ScalarTraits< typename Teuchos::ScalarTraits< Scalar >::magnitudeType >::zero(), const Scalar replacementValue=Teuchos::ScalarTraits< Scalar >::one())
static void RelativeDiagonalBoost(RCP< Xpetra::Matrix< Scalar, LocalOrdinal, GlobalOrdinal, Node > > &A, const Teuchos::ArrayView< const double > &relativeThreshold, Teuchos::FancyOStream &fos)
static void extractBlockDiagonal(const Xpetra::Matrix< Scalar, LocalOrdinal, GlobalOrdinal, Node > &A, Xpetra::MultiVector< Scalar, LocalOrdinal, GlobalOrdinal, Node > &diagonal)
static Teuchos::RCP< Xpetra::MultiVector< Scalar, LocalOrdinal, GlobalOrdinal, Node > > xpetraGidNumbering2ThyraGidNumbering(const Xpetra::MultiVector< Scalar, LocalOrdinal, GlobalOrdinal, Node > &input)
static Teuchos::RCP< Xpetra::BlockedCrsMatrix< Scalar, LocalOrdinal, GlobalOrdinal, Node > > SplitMatrix(const Xpetra::Matrix< Scalar, LocalOrdinal, GlobalOrdinal, Node > &input, Teuchos::RCP< const Xpetra::MapExtractor< Scalar, LocalOrdinal, GlobalOrdinal, Node > > rangeMapExtractor, Teuchos::RCP< const Xpetra::MapExtractor< Scalar, LocalOrdinal, GlobalOrdinal, Node > > domainMapExtractor, Teuchos::RCP< const Xpetra::MapExtractor< Scalar, LocalOrdinal, GlobalOrdinal, Node > > columnMapExtractor=Teuchos::null, bool bThyraMode=false)
static void inverseScaleBlockDiagonal(Xpetra::MultiVector< Scalar, LocalOrdinal, GlobalOrdinal, Node > &blockDiagonal, bool doTranspose, Xpetra::MultiVector< Scalar, LocalOrdinal, GlobalOrdinal, Node > &toBeScaled)
Xpetra-specific matrix class.
virtual const RCP< const Map > & getColMap() const
Returns the Map that describes the column distribution in this matrix. This might be null until fillC...
virtual void getLocalRowView(LocalOrdinal LocalRow, ArrayView< const LocalOrdinal > &indices, ArrayView< const Scalar > &values) const =0
Extract a const, non-persisting view of local indices in a specified row of the matrix.
virtual const RCP< const Map > & getRowMap() const
Returns the Map that describes the row distribution in this matrix.
virtual size_t getLocalMaxNumRowEntries() const =0
Returns the maximum number of entries across all rows/columns on this node.
static Teuchos::RCP< MultiVector< Scalar, LocalOrdinal, GlobalOrdinal, Node > > Build(const Teuchos::RCP< const Map< LocalOrdinal, GlobalOrdinal, Node > > &map, size_t NumVectors, bool zeroOut=true)
Constructor specifying the number of non-zeros for all rows.
virtual size_t getNumVectors() const =0
Number of columns in the multivector.
virtual Teuchos::ArrayRCP< const Scalar > getData(size_t j) const =0
Const view of the local values in a particular vector of this multivector.
virtual size_t getLocalLength() const =0
Local number of rows on the calling process.
virtual Teuchos::RCP< const Map > getRangeMap() const =0
The Map associated with the range of this operator, which must be compatible with Y....
virtual Teuchos::RCP< const Map > getDomainMap() const =0
The Map associated with the domain of this operator, which must be compatible with X....
static RCP< Xpetra::StridedMap< LocalOrdinal, GlobalOrdinal, Node > > Build(UnderlyingLib lib, global_size_t numGlobalElements, GlobalOrdinal indexBase, std::vector< size_t > &stridingInfo, const Teuchos::RCP< const Teuchos::Comm< int > > &comm, LocalOrdinal stridedBlockId=-1, GlobalOrdinal offset=0, LocalGlobal lg=Xpetra::GloballyDistributed)
Map constructor with Xpetra-defined contiguous uniform distribution.
Class that stores a strided map.
static Teuchos::RCP< Xpetra::Vector< Scalar, LocalOrdinal, GlobalOrdinal, Node > > Build(const Teuchos::RCP< const Xpetra::Map< LocalOrdinal, GlobalOrdinal, Node > > &map, bool zeroOut=true)
Constructor specifying the number of non-zeros for all rows.
#define TEUCHOS_TEST_FOR_EXCEPTION(throw_exception_test, Exception, msg)
TEUCHOS_DEPRECATED RCP< T > rcp(T *p, Dealloc_T dealloc, bool owns_mem)
Xpetra namespace
RCP< const Tpetra::CrsGraph< LocalOrdinal, GlobalOrdinal, Node > > toTpetra(const RCP< const CrsGraph< LocalOrdinal, GlobalOrdinal, Node > > &graph)