Domi
Multi-dimensional, distributed data structures
Domi_MDVector.hpp
1 // @HEADER
2 // ***********************************************************************
3 //
4 // Domi: Multi-dimensional Distributed Linear Algebra Services
5 // Copyright (2014) Sandia Corporation
6 //
7 // Under the terms of Contract DE-AC04-94AL85000 with Sandia
8 // Corporation, the U.S. Government retains certain rights in this
9 // software.
10 //
11 // Redistribution and use in source and binary forms, with or without
12 // modification, are permitted provided that the following conditions are
13 // met:
14 //
15 // 1. Redistributions of source code must retain the above copyright
16 // notice, this list of conditions and the following disclaimer.
17 //
18 // 2. Redistributions in binary form must reproduce the above copyright
19 // notice, this list of conditions and the following disclaimer in the
20 // documentation and/or other materials provided with the distribution.
21 //
22 // 3. Neither the name of the Corporation nor the names of the
23 // contributors may be used to endorse or promote products derived from
24 // this software without specific prior written permission.
25 //
26 // THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
27 // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
28 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
29 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
30 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
31 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
32 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
33 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
34 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
35 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
36 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
37 //
38 // Questions? Contact William F. Spotz (wfspotz@sandia.gov)
39 //
40 // ***********************************************************************
41 // @HEADER
42 
43 #ifndef DOMI_MDVECTOR_HPP
44 #define DOMI_MDVECTOR_HPP
45 
46 // #define DOMI_MDVECTOR_VERBOSE
47 
48 // Standard includes
49 #include <ctime>
50 
51 // Domi includes
52 #include "Domi_ConfigDefs.hpp"
53 #include "Domi_DefaultNode.hpp"
54 #include "Domi_MDMap.hpp"
55 #include "Domi_MDArrayRCP.hpp"
56 
57 // Teuchos includes
58 #include "Teuchos_DataAccess.hpp"
59 #include "Teuchos_Describable.hpp"
60 #include "Teuchos_ScalarTraitsDecl.hpp"
61 #include "Teuchos_Comm.hpp"
62 #include "Teuchos_CommHelpers.hpp"
63 
64 #ifdef HAVE_EPETRA
65 #include "Epetra_IntVector.h"
66 #include "Epetra_Vector.h"
67 #endif
68 
69 #ifdef HAVE_TPETRA
70 #include "Tpetra_Vector.hpp"
71 #endif
72 
73 
74 #ifdef OPEN_MPI
75 // I provide dummy definitions for MPI structs so that
76 // typeid(MPI_Datatype) and typeid(MPI_Request) will compile.
77 // Defining empty structs like this should be fine since only the guts
78 // of OpenMPI will ever see the real definitions. This is a silly game
79 // we are playing with the C++ type system here but it should work
80 // just fine.
81 struct ompi_datatype_t {};
82 //struct ompi_request_t {};
83 #endif
84 
85 using std::cout;
86 using std::endl;
87 
88 namespace Domi
89 {
90 
173 template< class Scalar,
174  class Node = DefaultNode::DefaultNodeType >
175 class MDVector : public Teuchos::Describable
176 {
177 public:
178 
181 
189  MDVector(const Teuchos::RCP< const MDMap< Node > > & mdMap,
190  bool zeroOut = true);
191 
213  MDVector(const Teuchos::RCP< const MDMap< Node > > & mdMap,
214  const dim_type leadingDim,
215  const dim_type trailingDim = 0,
216  bool zeroOut = true);
217 
225  MDVector(const Teuchos::RCP< const MDMap< Node > > & mdMap,
226  const MDArrayView< Scalar > & source);
227 
235  MDVector(const Teuchos::RCP< const MDMap< Node > > & mdMap,
236  const MDArrayRCP< Scalar > & source);
237 
250  MDVector(const MDVector< Scalar, Node > & source,
251  Teuchos::DataAccess access = Teuchos::View);
252 
266  MDVector(const Teuchos::RCP< const Teuchos::Comm< int > > teuchosComm,
267  Teuchos::ParameterList & plist);
268 
281  MDVector(const Teuchos::RCP< const MDComm > mdComm,
282  Teuchos::ParameterList & plist);
283 
293  MDVector(const MDVector< Scalar, Node > & parent,
294  int axis,
295  dim_type index);
296 
312  MDVector(const MDVector< Scalar, Node > & parent,
313  int axis,
314  const Slice & slice,
315  int bndryPad = 0);
316 
330  MDVector(const MDVector< Scalar, Node > & parent,
331  const Teuchos::ArrayView< Slice > & slices,
332  const Teuchos::ArrayView< int > & bndryPad =
333  Teuchos::ArrayView< int >());
334 
340  operator=(const MDVector< Scalar, Node > & source);
341 
344  virtual ~MDVector();
345 
347 
350 
353  const Teuchos::RCP< const MDMap< Node > >
354  getMDMap() const;
355 
362  bool onSubcommunicator() const;
363 
370  Teuchos::RCP< const Teuchos::Comm< int > > getTeuchosComm() const;
371 
378  int numDims() const;
379 
389  int getCommDim(int axis) const;
390 
400  bool isPeriodic(int axis) const;
401 
411  int getCommIndex(int axis) const;
412 
429  int getLowerNeighbor(int axis) const;
430 
448  int getUpperNeighbor(int axis) const;
449 
458  dim_type getGlobalDim(int axis, bool withBndryPad=false) const;
459 
468  Slice getGlobalBounds(int axis, bool withBndryPadding=false) const;
469 
484  Slice getGlobalRankBounds(int axis, bool withBndryPad=false) const;
485 
494  dim_type getLocalDim(int axis, bool withCommPad=false) const;
495 
503  Teuchos::ArrayView< const Slice > getLocalBounds() const;
504 
518  Slice getLocalBounds(int axis, bool withPad=false) const;
519 
537  Slice getLocalInteriorBounds(int axis) const;
538 
549  bool hasPadding() const;
550 
559  int getLowerPadSize(int axis) const;
560 
569  int getUpperPadSize(int axis) const;
570 
580  int getCommPadSize(int axis) const;
581 
588  int getLowerBndryPad(int axis) const;
589 
596  int getUpperBndryPad(int axis) const;
597 
607  int getBndryPadSize(int axis) const;
608 
616  void setLowerPad(int axis, const Scalar value);
617 
625  void setUpperPad(int axis, const Scalar value);
626 
629  bool isReplicatedBoundary(int axis) const;
630 
633  Layout getLayout() const;
634 
644  bool isContiguous() const;
645 
647 
650 
651 #ifdef HAVE_EPETRA
652 
668  Teuchos::RCP< Epetra_IntVector > getEpetraIntVectorView() const;
669 
685  Teuchos::RCP< Epetra_Vector > getEpetraVectorView() const;
686 
707  Teuchos::RCP< Epetra_MultiVector > getEpetraMultiVectorView() const;
708 
718  Teuchos::RCP< Epetra_IntVector > getEpetraIntVectorCopy() const;
719 
729  Teuchos::RCP< Epetra_Vector > getEpetraVectorCopy() const;
730 
747  Teuchos::RCP< Epetra_MultiVector > getEpetraMultiVectorCopy() const;
748 
749 #endif
750 
751 #ifdef HAVE_TPETRA
752 
763  template< class LocalOrdinal,
764  class GlobalOrdinal = LocalOrdinal,
765  class Node2 = Node>
766  Teuchos::RCP< Tpetra::Vector< Scalar, LocalOrdinal, GlobalOrdinal, Node2 > >
767  getTpetraVectorView() const;
768 
786  template < class LocalOrdinal,
787  class GlobalOrdinal = LocalOrdinal,
788  class Node2 = Node >
789  Teuchos::RCP< Tpetra::MultiVector< Scalar,
790  LocalOrdinal,
791  GlobalOrdinal,
792  Node2 > >
793  getTpetraMultiVectorView() const;
794 
800  template< class LocalOrdinal,
801  class GlobalOrdinal = LocalOrdinal,
802  class Node2 = Node >
803  Teuchos::RCP< Tpetra::Vector< Scalar, LocalOrdinal, GlobalOrdinal, Node2 > >
804  getTpetraVectorCopy() const;
805 
818  template < class LocalOrdinal,
819  class GlobalOrdinal = LocalOrdinal,
820  class Node2 = Node >
821  Teuchos::RCP< Tpetra::MultiVector< Scalar,
822  LocalOrdinal,
823  GlobalOrdinal,
824  Node2 > >
825  getTpetraMultiVectorCopy() const;
826 
827 #endif
828 
830 
833 
839  MDArrayView< Scalar > getDataNonConst(bool includePadding = true);
840 
846  MDArrayView< const Scalar > getData(bool includePadding = true) const;
847 
854 
861 
868 
875 
877 
880 
885  Scalar
886  dot(const MDVector< Scalar, Node > & a) const;
887 
890  typename Teuchos::ScalarTraits< Scalar >::magnitudeType norm1() const;
891 
894  typename Teuchos::ScalarTraits< Scalar >::magnitudeType norm2() const;
895 
898  typename Teuchos::ScalarTraits< Scalar >::magnitudeType normInf() const;
899 
904  typename Teuchos::ScalarTraits< Scalar >::magnitudeType
905  normWeighted(const MDVector< Scalar, Node > & weights) const;
906 
909  Scalar meanValue() const;
910 
912 
915 
918  virtual std::string description() const;
919 
927  virtual void
928  describe(Teuchos::FancyOStream &out,
929  const Teuchos::EVerbosityLevel verbLevel =
930  Teuchos::Describable::verbLevel_default) const;
931 
933 
936 
944  void putScalar(const Scalar & value,
945  bool includePadding = true);
946 
949  void randomize();
950 
952 
955 
956  // /** \brief Sum values of a locally replicated multivector across all
957  // * processes.
958  // */
959  // void reduce();
960 
965  void updateCommPad();
966 
967  /* \brief Update the data in the communication padding along the
968  * specified axis
969  *
970  * \param axis [in] the axis along which communication will be
971  * performed
972  */
973  void updateCommPad(int axis);
974 
983  void startUpdateCommPad(int axis);
984 
993  void endUpdateCommPad(int axis);
994 
996 
999 
1011  operator[](dim_type index) const;
1012 
1030  operator[](Slice slice) const;
1031 
1033 
1036 
1044  void writeBinary(const std::string & filename,
1045  bool includeBndryPad = false) const;
1046 
1054  void readBinary(const std::string & filename,
1055  bool includeBndryPad = false);
1056 
1058 
1059 private:
1060 
1061  // The Teuchos communicator. Note that this is always a reference
1062  // to the communicator of the _mdMap, and is stored only for
1063  // convenience
1064  Teuchos::RCP< const Teuchos::Comm< int > > _teuchosComm;
1065 
1066  // The MDMap that describes the domain decomposition of this
1067  // MDVector
1068  Teuchos::RCP< const MDMap< Node > > _mdMap;
1069 
1070  // The MDArrayRCP that stores the data of this MDVector
1071  MDArrayRCP< Scalar > _mdArrayRcp;
1072 
1073  // The MDArrayView of the (possibly whole) sub-view into this
1074  // MDVector's MDArrayRCP
1075  MDArrayView< Scalar > _mdArrayView;
1076 
1077  // The operator[](int) and operator[](Slice) methods are applied to
1078  // a specific axis, namely this internally stored and updated next
1079  // axis
1080  int _nextAxis;
1081 
1083  // *** Communication Support *** //
1085 
1086 #ifdef HAVE_MPI
1087  // An array of MPI_Request objects for supporting non-blocking sends
1088  // and receives
1089  Teuchos::Array< MPI_Request > _requests;
1090 #endif
1091 
1092  // Define a struct for storing all the information needed for a
1093  // single message: a pointer to the buffer, a reference to the
1094  // message's MPI data type, the rank of the communication partner,
1095  // and the axis alng which we are communicating
1096  struct MessageInfo
1097  {
1098  // Pointer to first element of data buffer
1099  void *buffer;
1100 #ifdef HAVE_MPI
1101  // MPI data type (strided vector)
1102  Teuchos::RCP< MPI_Datatype > datatype;
1103 #else
1104  // Teuchos ArrayView for periodic domains
1105  MDArrayView< Scalar > dataview;
1106 #endif
1107  // Processor rank for communication partner
1108  int proc;
1109  // Communication is along this axis
1110  int axis;
1111  };
1112 
1113  // An array of MessageInfo objects representing all active send
1114  // buffers. The outer array represents the axis and the inner
1115  // 2-Tuple represents the lower and upper boundaries.
1116  Teuchos::Array< Teuchos::Tuple< MessageInfo, 2 > > _sendMessages;
1117 
1118  // An array of MessageInfo objects representing all active receive
1119  // buffers. The outer array represents the axis and the inner
1120  // 2-Tuple represents the lower and upper boundaries.
1121  Teuchos::Array< Teuchos::Tuple< MessageInfo, 2 > > _recvMessages;
1122 
1123  // A private method to initialize the _sendMessages and
1124  // _recvMessages arrays.
1125  void initializeMessages();
1126 
1128  // *** Input/Output Support *** //
1130 
1131  // Define a struct for storing all of the information needed to
1132  // write or read the MDVector to a file: arrays that store the file
1133  // shape, the local buffer shape, the local data shape, the file
1134  // starting coordinates, and the data starting coordinates.
1135  struct FileInfo
1136  {
1137  Teuchos::Array< dim_type > fileShape;
1138  Teuchos::Array< dim_type > bufferShape;
1139  Teuchos::Array< dim_type > dataShape;
1140  Teuchos::Array< dim_type > fileStart;
1141  Teuchos::Array< dim_type > dataStart;
1142 #ifdef HAVE_MPI
1143  Teuchos::RCP< MPI_Datatype > filetype;
1144  Teuchos::RCP< MPI_Datatype > datatype;
1145 #endif
1146  };
1147 
1148  // FileInfo struct for an input or output file that does not store
1149  // boundary padding. This is mutable because it does not get set
1150  // until the first time the MDVector is read or written to a file,
1151  // and the writeBinary() method should logically be const.
1152  mutable Teuchos::RCP< FileInfo > _fileInfo;
1153 
1154  // FileInfo struct for an input or output file that does store
1155  // boundary padding. This is mutable because it does not get set
1156  // until the first time the MDVector is read or written to a file,
1157  // and the writeBinary() method should logically be const.
1158  mutable Teuchos::RCP< FileInfo > _fileInfoWithBndry;
1159 
1160  // Compute either the _fileInfo or _fileInfoWithBndry data members.
1161  // This private method gets called by the writeBinary() and
1162  // readBinary() methods, and sets the requested fileInfo object,
1163  // unless it has already been set. This is const so that it can be
1164  // called by writeBinary(), but its whole purpose is to change
1165  // mutable data members.
1166  Teuchos::RCP< FileInfo > & computeFileInfo(bool includeBndryPad) const;
1167 
1168 };
1169 
1171 // *** Implementations *** //
1173 
1175 
1176 template< class Scalar,
1177  class Node >
1179 MDVector(const Teuchos::RCP< const MDMap< Node > > & mdMap,
1180  bool zeroOut) :
1181  _teuchosComm(mdMap->getTeuchosComm()),
1182  _mdMap(mdMap),
1183  _mdArrayRcp(),
1184  _mdArrayView(),
1185  _nextAxis(0),
1186 #ifdef HAVE_MPI
1187  _requests(),
1188 #endif
1189  _sendMessages(),
1190  _recvMessages()
1191 {
1192  setObjectLabel("Domi::MDVector");
1193 
1194  // Obtain the array of dimensions
1195  int numDims = _mdMap->numDims();
1196  Teuchos::Array< dim_type > dims(numDims);
1197  for (int axis = 0; axis < numDims; ++axis)
1198  dims[axis] = _mdMap->getLocalDim(axis,true);
1199 
1200  // Resize the MDArrayRCP and set the MDArrayView
1201  _mdArrayRcp.resize(dims);
1202  _mdArrayView = _mdArrayRcp();
1203 }
1204 
1206 
1207 template< class Scalar,
1208  class Node >
1210 MDVector(const Teuchos::RCP< const MDMap< Node > > & mdMap,
1211  const dim_type leadingDim,
1212  const dim_type trailingDim,
1213  bool zeroOut) :
1214  _teuchosComm(mdMap->getTeuchosComm()),
1215  _mdMap(mdMap->getAugmentedMDMap(leadingDim, trailingDim)),
1216  _mdArrayRcp(),
1217  _mdArrayView(),
1218  _nextAxis(0),
1219 #ifdef HAVE_MPI
1220  _requests(),
1221 #endif
1222  _sendMessages(),
1223  _recvMessages()
1224 {
1225  setObjectLabel("Domi::MDVector");
1226 
1227  // Obtain the array of dimensions
1228  int numDims = _mdMap->numDims();
1229  Teuchos::Array< dim_type > dims(numDims);
1230  for (int axis = 0; axis < numDims; ++axis)
1231  dims[axis] = _mdMap->getLocalDim(axis,true);
1232 
1233  // Resize the MDArrayRCP and set the MDArrayView
1234  _mdArrayRcp.resize(dims);
1235  _mdArrayView = _mdArrayRcp();
1236 }
1237 
1239 
1240 template< class Scalar,
1241  class Node >
1243 MDVector(const Teuchos::RCP< const MDMap< Node > > & mdMap,
1244  const MDArrayView< Scalar > & source) :
1245  _mdMap(mdMap),
1246  _mdArrayRcp(source),
1247  _mdArrayView(_mdArrayRcp()),
1248  _nextAxis(0),
1249 #ifdef HAVE_MPI
1250  _requests(),
1251 #endif
1252  _sendMessages(),
1253  _recvMessages()
1254 {
1255  setObjectLabel("Domi::MDVector");
1256  int numDims = _mdMap->numDims();
1257  TEUCHOS_TEST_FOR_EXCEPTION(
1258  numDims != _mdArrayRcp.numDims(),
1260  "MDMap and source array do not have the same number of dimensions");
1261 
1262  for (int axis = 0; axis < numDims; ++axis)
1263  {
1264  TEUCHOS_TEST_FOR_EXCEPTION(
1265  _mdMap->getLocalDim(axis) != _mdArrayRcp.dimension(axis),
1267  "Axis " << axis << ": MDMap dimension = " << _mdMap->getLocalDim(axis)
1268  << ", MDArray dimension = " << _mdArrayRcp.dimension(axis));
1269  }
1270 }
1271 
1273 
1274 template< class Scalar,
1275  class Node >
1277 MDVector(const Teuchos::RCP< const MDMap< Node > > & mdMap,
1278  const MDArrayRCP< Scalar > & mdArrayRcp) :
1279  _mdMap(mdMap),
1280  _mdArrayRcp(mdArrayRcp),
1281  _mdArrayView(_mdArrayRcp()),
1282  _nextAxis(0),
1283 #ifdef HAVE_MPI
1284  _requests(),
1285 #endif
1286  _sendMessages(),
1287  _recvMessages()
1288 {
1289 #ifdef DOMI_MDVECTOR_VERBOSE
1290  std::cout << "_mdArrayRcp = " << _mdArrayRcp << std::endl;
1291  std::cout << "_mdArrayView.getRawPtr() = " << _mdArrayView.getRawPtr()
1292  << " (constructor)" << std::endl;
1293  std::cout << "_mdArrayView = " << _mdArrayView << std::endl;
1294 #endif
1295  setObjectLabel("Domi::MDVector");
1296  int numDims = _mdMap->numDims();
1297  TEUCHOS_TEST_FOR_EXCEPTION(
1298  numDims != _mdArrayRcp.numDims(),
1300  "MDMap and source array do not have the same number of dimensions");
1301 
1302  for (int axis = 0; axis < numDims; ++axis)
1303  {
1304  TEUCHOS_TEST_FOR_EXCEPTION(
1305  _mdMap->getLocalDim(axis) != _mdArrayRcp.dimension(axis),
1307  "Axis " << axis << ": MDMap dimension = " << _mdMap->getLocalDim(axis)
1308  << ", MDArray dimension = " << _mdArrayRcp.dimension(axis));
1309  }
1310 }
1311 
1313 
1314 template< class Scalar,
1315  class Node >
1318  Teuchos::DataAccess access) :
1319  _teuchosComm(source.getMDMap()->getTeuchosComm()),
1320  _mdMap(source.getMDMap()),
1321  _mdArrayRcp(source._mdArrayRcp),
1322  _mdArrayView(source._mdArrayView),
1323  _nextAxis(0),
1324 #ifdef HAVE_MPI
1325  _requests(),
1326 #endif
1327  _sendMessages(),
1328  _recvMessages()
1329 {
1330  setObjectLabel("Domi::MDVector");
1331 
1332  if (access == Teuchos::Copy)
1333  {
1334 #ifdef DOMI_MDVECTOR_VERBOSE
1335  std::cout << "Inside MDVector copy constructor with copy access" << std::endl;
1336 #endif
1337  // Obtain the array of dimensions
1338  int numDims = _mdMap->numDims();
1339  Teuchos::Array< dim_type > dims(numDims);
1340  for (int axis = 0; axis < numDims; ++axis)
1341  dims[axis] = _mdMap->getLocalDim(axis,true);
1342 
1343  // Reset the MDArrayRCP and set the MDArrayView
1344  _mdArrayRcp = MDArrayRCP< Scalar >(dims, 0, source.getLayout());
1345  _mdArrayView = _mdArrayRcp();
1346 
1347  // Copy the source data to the new MDVector
1348  typedef typename MDArrayView< Scalar >::iterator iterator;
1349  typedef typename MDArrayView< Scalar >::const_iterator const_iterator;
1350  const_iterator src = source.getData().begin();
1351  for (iterator trg = _mdArrayView.begin(); trg != _mdArrayView.end(); ++trg)
1352  {
1353  *trg = *src;
1354  ++src;
1355  }
1356  }
1357 #ifdef DOMI_MDVECTOR_VERBOSE
1358  else
1359  {
1360  std::cout << "Inside MDVector copy constructor with view access"
1361  << std::endl;
1362  }
1363 #endif
1364 }
1365 
1367 
1368 template< class Scalar,
1369  class Node >
1371 MDVector(const Teuchos::RCP< const Teuchos::Comm< int > > teuchosComm,
1372  Teuchos::ParameterList & plist) :
1373  _teuchosComm(teuchosComm),
1374  _mdMap(),
1375  _mdArrayRcp(),
1376  _mdArrayView(),
1377  _nextAxis(0),
1378 #ifdef HAVE_MPI
1379  _requests(),
1380 #endif
1381  _sendMessages(),
1382  _recvMessages()
1383 {
1384  setObjectLabel("Domi::MDVector");
1385 
1386  // Compute the MDComm and MDMap
1387  MDMap< Node > * myMdMap = new MDMap< Node >(teuchosComm, plist);
1388  dim_type leadingDim = plist.get("leading dimension" , 0);
1389  dim_type trailingDim = plist.get("trailing dimension", 0);
1390  if (leadingDim + trailingDim > 0)
1391  {
1392  _mdMap = myMdMap->getAugmentedMDMap(leadingDim, trailingDim);
1393  delete myMdMap;
1394  }
1395  else
1396  _mdMap = Teuchos::rcp(myMdMap);
1397 
1398  // Obtain the array of dimensions
1399  int numDims = _mdMap->numDims();
1400  Teuchos::Array< dim_type > dims(numDims);
1401  for (int axis = 0; axis < numDims; ++axis)
1402  dims[axis] = _mdMap->getLocalDim(axis,true);
1403 
1404  // Resize the MDArrayRCP and set the MDArrayView
1405  _mdArrayRcp.resize(dims);
1406  _mdArrayView = _mdArrayRcp();
1407 }
1408 
1410 
1411 template< class Scalar,
1412  class Node >
1414 MDVector(const Teuchos::RCP< const MDComm > mdComm,
1415  Teuchos::ParameterList & plist) :
1416  _teuchosComm(mdComm->getTeuchosComm()),
1417  _mdMap(Teuchos::rcp(new MDMap< Node >(mdComm, plist))),
1418  _mdArrayRcp(),
1419  _mdArrayView(),
1420  _nextAxis(0),
1421 #ifdef HAVE_MPI
1422  _requests(),
1423 #endif
1424  _sendMessages(),
1425  _recvMessages()
1426 {
1427  setObjectLabel("Domi::MDVector");
1428 
1429  // Compute the MDMap
1430  MDMap< Node > * myMdMap = new MDMap< Node >(mdComm, plist);
1431  dim_type leadingDim = plist.get("leading dimension" , 0);
1432  dim_type trailingDim = plist.get("trailing dimension", 0);
1433  if (leadingDim + trailingDim > 0)
1434  {
1435  _mdMap = myMdMap->getAugmentedMDMap(leadingDim, trailingDim);
1436  delete myMdMap;
1437  }
1438  else
1439  _mdMap = Teuchos::rcp(myMdMap);
1440 
1441  // Obtain the array of dimensions
1442  int numDims = _mdMap->numDims();
1443  Teuchos::Array< dim_type > dims(numDims);
1444  for (int axis = 0; axis < numDims; ++axis)
1445  dims[axis] = _mdMap->getLocalDim(axis,true);
1446 
1447  // Resize the MDArrayRCP and set the MDArrayView
1448  _mdArrayRcp.resize(dims);
1449  _mdArrayView = _mdArrayRcp();
1450 }
1451 
1453 
1454 template< class Scalar,
1455  class Node >
1458  int axis,
1459  dim_type globalIndex) :
1460  _teuchosComm(parent._teuchosComm),
1461  _mdMap(),
1462  _mdArrayRcp(parent._mdArrayRcp),
1463  _mdArrayView(parent._mdArrayView),
1464  _nextAxis(0),
1465 #ifdef HAVE_MPI
1466  _requests(),
1467 #endif
1468  _sendMessages(),
1469  _recvMessages()
1470 {
1471  setObjectLabel("Domi::MDVector");
1472 
1473  // Obtain the parent MDMap
1474  Teuchos::RCP< const MDMap< Node > > parentMdMap = parent.getMDMap();
1475 
1476  // Obtain the new, sliced MDMap
1477  _mdMap = Teuchos::rcp(new MDMap< Node >(*parentMdMap,
1478  axis,
1479  globalIndex));
1480 
1481  // Check that we are on the new sub-communicator
1482  if (_mdMap->onSubcommunicator())
1483  {
1484  // Convert the index from global to local. We start by
1485  // determining the starting global index on this processor along
1486  // the given axis, ignoring the boundary padding. We then
1487  // subtract the lower padding, whether it is communication padding
1488  // or boundary padding.
1489  dim_type origin = parentMdMap->getGlobalRankBounds(axis,false).start() -
1490  parentMdMap->getLowerPadSize(axis);
1491 
1492  // The local index along the given axis is the global axis minus
1493  // the starting index. Since we are on the subcommunicator, this
1494  // should be valid.
1495  dim_type localIndex = globalIndex - origin;
1496 
1497  // Obtain the new MDArrayView using the local index
1498  MDArrayView< Scalar > newView(_mdArrayView, axis, localIndex);
1499  _mdArrayView = newView;
1500  }
1501  else
1502  {
1503  // We are not on the sub-communicator, so clear out the data
1504  // buffer and view
1505  _mdArrayRcp.clear();
1506  _mdArrayView = MDArrayView< Scalar >();
1507  }
1508 }
1509 
1511 
1512 template< class Scalar,
1513  class Node >
1516  int axis,
1517  const Slice & slice,
1518  int bndryPad) :
1519  _teuchosComm(),
1520  _mdMap(),
1521  _mdArrayRcp(parent._mdArrayRcp),
1522  _mdArrayView(parent._mdArrayView),
1523  _nextAxis(0),
1524 #ifdef HAVE_MPI
1525  _requests(),
1526 #endif
1527  _sendMessages(),
1528  _recvMessages()
1529 {
1530 #ifdef DOMI_MDVECTOR_VERBOSE
1531  std::cout << "slice axis " << axis << std::endl;
1532  std::cout << " _mdArrayRcp @ " << _mdArrayRcp.getRawPtr() << std::endl;
1533  std::cout << " _mdArrayView @ " << _mdArrayView.getRawPtr() << std::endl;
1534 #endif
1535 
1536  setObjectLabel("Domi::MDVector");
1537 
1538  // Obtain the parent MDMap
1539  Teuchos::RCP< const MDMap< Node > > parentMdMap = parent.getMDMap();
1540 
1541  // Obtain the new, sliced MDMap
1542  _mdMap = Teuchos::rcp(new MDMap< Node >(*parentMdMap,
1543  axis,
1544  slice,
1545  bndryPad));
1546  _teuchosComm = _mdMap->getTeuchosComm();
1547 
1548  // Check that we are on the new sub-communicator
1549  if (_mdMap->onSubcommunicator())
1550  {
1551  // Get the concrete bounds
1552  Slice bounds = slice.bounds(parentMdMap->getGlobalDim(axis,true));
1553 
1554  // Convert the given Slice start index from global to local. We
1555  // start by determining the starting global index on this
1556  // processor along the given axis, ignoring the boundary padding.
1557  // We then subtract the lower padding, whether it is communication
1558  // padding or boundary padding.
1559  dim_type origin = parentMdMap->getGlobalRankBounds(axis,false).start() -
1560  parentMdMap->getLowerPadSize(axis);
1561 
1562  // Determine the starting index of our local slice. This will be
1563  // the start of the given slice minus the starting global index on
1564  // this processor minus the given boundary pad. If this is less
1565  // than zero, then the start is on a lower processor, so set the
1566  // local start to zero.
1567  dim_type start = std::max(0, bounds.start() - origin - bndryPad);
1568 
1569  // Now get the stop index of the local slice. This will be the
1570  // stop of the given slice minus the starting global index on this
1571  // processor plus the given boundary pad. If this is larger than
1572  // the local dimension, then set the local stop to the local
1573  // dimension.
1574  dim_type stop = std::min(bounds.stop() - origin + bndryPad,
1575  parentMdMap->getLocalDim(axis,true));
1576 
1577  // Obtain the new MDArrayView using the local slice
1578  MDArrayView< Scalar > newView(_mdArrayView, axis, Slice(start,stop));
1579  _mdArrayView = newView;
1580  }
1581  else
1582  {
1583  // We are not on the sub-communicator, so clear out the data
1584  // buffer and view
1585  _mdArrayRcp.clear();
1586  _mdArrayView = MDArrayView< Scalar >();
1587  }
1588 #ifdef DOMI_MDVECTOR_VERBOSE
1589  std::cout << " _mdArrayView @ " << _mdArrayView.getRawPtr() << std::endl;
1590  std::cout << " offset = " << int(_mdArrayView.getRawPtr() -
1591  _mdArrayRcp.getRawPtr()) << std::endl;
1592 #endif
1593 }
1594 
1596 
1597 template< class Scalar,
1598  class Node >
1601  const Teuchos::ArrayView< Slice > & slices,
1602  const Teuchos::ArrayView< int > & bndryPad)
1603 {
1604  setObjectLabel("Domi::MDVector");
1605 
1606  // Temporarily store the number of dimensions
1607  int numDims = parent.numDims();
1608 
1609  // Sanity check on dimensions
1610  TEUCHOS_TEST_FOR_EXCEPTION(
1611  (slices.size() != numDims),
1613  "number of slices = " << slices.size() << " != parent MDVector number of "
1614  "dimensions = " << numDims);
1615 
1616  // Apply the single-Slice constructor to each axis in succession
1617  MDVector< Scalar, Node > tempMDVector1(parent);
1618  for (int axis = 0; axis < numDims; ++axis)
1619  {
1620  int bndryPadding = (axis < bndryPad.size()) ? bndryPad[axis] : 0;
1621  MDVector< Scalar, Node > tempMDVector2(tempMDVector1,
1622  axis,
1623  slices[axis],
1624  bndryPadding);
1625  tempMDVector1 = tempMDVector2;
1626  }
1627  *this = tempMDVector1;
1628 }
1629 
1631 
1632 template< class Scalar,
1633  class Node >
1637 {
1638  _teuchosComm = source._teuchosComm;
1639  _mdMap = source._mdMap;
1640  _mdArrayRcp = source._mdArrayRcp;
1641  _mdArrayView = source._mdArrayView;
1642  _nextAxis = source._nextAxis;
1643 #ifdef HAVE_MPI
1644  _requests = source._requests;
1645 #endif
1646  _sendMessages = source._sendMessages;
1647  _recvMessages = source._recvMessages;
1648  return *this;
1649 }
1650 
1652 
1653 template< class Scalar,
1654  class Node >
1656 {
1657 }
1658 
1660 
1661 template< class Scalar,
1662  class Node >
1663 const Teuchos::RCP< const MDMap< Node > >
1665 getMDMap() const
1666 {
1667  return _mdMap;
1668 }
1669 
1671 
1672 template< class Scalar,
1673  class Node >
1674 bool
1677 {
1678  return _mdMap->onSubcommunicator();
1679 }
1680 
1682 
1683 template< class Scalar,
1684  class Node >
1685 Teuchos::RCP< const Teuchos::Comm< int > >
1688 {
1689  return _mdMap->getTeuchosComm();
1690 }
1691 
1693 
1694 template< class Scalar,
1695  class Node >
1696 int
1698 numDims() const
1699 {
1700  return _mdMap->numDims();
1701 }
1702 
1704 
1705 template< class Scalar,
1706  class Node >
1707 int
1709 getCommDim(int axis) const
1710 {
1711  return _mdMap->getCommDim(axis);
1712 }
1713 
1715 
1716 template< class Scalar,
1717  class Node >
1718 bool
1720 isPeriodic(int axis) const
1721 {
1722  return _mdMap->isPeriodic(axis);
1723 }
1724 
1726 
1727 template< class Scalar,
1728  class Node >
1729 int
1731 getCommIndex(int axis) const
1732 {
1733  return _mdMap->getCommIndex(axis);
1734 }
1735 
1737 
1738 template< class Scalar,
1739  class Node >
1740 int
1742 getLowerNeighbor(int axis) const
1743 {
1744  return _mdMap->getLowerNeighbor(axis);
1745 }
1746 
1748 
1749 template< class Scalar,
1750  class Node >
1751 int
1753 getUpperNeighbor(int axis) const
1754 {
1755  return _mdMap->getUpperNeighbor(axis);
1756 }
1757 
1759 
1760 template< class Scalar,
1761  class Node >
1762 dim_type
1764 getGlobalDim(int axis, bool withBndryPad) const
1765 {
1766  return _mdMap->getGlobalDim(axis, withBndryPad);
1767 }
1768 
1770 
1771 template< class Scalar,
1772  class Node >
1773 Slice
1775 getGlobalBounds(int axis, bool withBndryPad) const
1776 {
1777  return _mdMap->getGlobalBounds(axis, withBndryPad);
1778 }
1779 
1781 
1782 template< class Scalar,
1783  class Node >
1784 Slice
1786 getGlobalRankBounds(int axis, bool withBndryPad) const
1787 {
1788  return _mdMap->getGlobalRankBounds(axis, withBndryPad);
1789 }
1790 
1792 
1793 template< class Scalar,
1794  class Node >
1795 dim_type
1797 getLocalDim(int axis, bool withCommPad) const
1798 {
1799  return _mdMap->getLocalDim(axis, withCommPad);
1800 }
1801 
1803 
1804 template< class Scalar,
1805  class Node >
1806 Teuchos::ArrayView< const Slice >
1809 {
1810  return _mdMap->getLocalBounds();
1811 }
1812 
1814 
1815 template< class Scalar,
1816  class Node >
1817 Slice
1819 getLocalBounds(int axis, bool withCommPad) const
1820 {
1821  return _mdMap->getLocalBounds(axis, withCommPad);
1822 }
1823 
1825 
1826 template< class Scalar,
1827  class Node >
1828 Slice
1830 getLocalInteriorBounds(int axis) const
1831 {
1832  return _mdMap->getLocalInteriorBounds(axis);
1833 }
1834 
1836 
1837 template< class Scalar,
1838  class Node >
1839 bool
1841 hasPadding() const
1842 {
1843  return _mdMap->hasPadding();
1844 }
1845 
1847 
1848 template< class Scalar,
1849  class Node >
1850 int
1852 getLowerPadSize(int axis) const
1853 {
1854  return _mdMap->getLowerPadSize(axis);
1855 }
1856 
1858 
1859 template< class Scalar,
1860  class Node >
1861 int
1863 getUpperPadSize(int axis) const
1864 {
1865  return _mdMap->getUpperPadSize(axis);
1866 }
1867 
1869 
1870 template< class Scalar,
1871  class Node >
1872 int
1874 getCommPadSize(int axis) const
1875 {
1876  return _mdMap->getCommPadSize(axis);
1877 }
1878 
1880 
1881 template< class Scalar,
1882  class Node >
1883 int
1885 getLowerBndryPad(int axis) const
1886 {
1887  return _mdMap->getLowerBndryPad(axis);
1888 }
1889 
1891 
1892 template< class Scalar,
1893  class Node >
1894 int
1896 getUpperBndryPad(int axis) const
1897 {
1898  return _mdMap->getUpperBndryPad(axis);
1899 }
1900 
1902 
1903 template< class Scalar,
1904  class Node >
1905 int
1907 getBndryPadSize(int axis) const
1908 {
1909  return _mdMap->getBndryPadSize(axis);
1910 }
1911 
1913 
1914 template< class Scalar,
1915  class Node >
1916 void
1918 setLowerPad(int axis,
1919  const Scalar value)
1920 {
1921  MDArrayView< Scalar > lowerPad = getLowerPadDataNonConst(axis);
1922  lowerPad.assign(value);
1923 }
1924 
1926 
1927 template< class Scalar,
1928  class Node >
1929 void
1931 setUpperPad(int axis,
1932  const Scalar value)
1933 {
1934  MDArrayView< Scalar > upperPad = getUpperPadDataNonConst(axis);
1935  upperPad.assign(value);
1936 }
1937 
1939 
1940 template< class Scalar,
1941  class Node >
1942 bool
1944 isReplicatedBoundary(int axis) const
1945 {
1946  return _mdMap->isReplicatedBoundary(axis);
1947 }
1948 
1950 
1951 template< class Scalar,
1952  class Node >
1953 Layout
1955 getLayout() const
1956 {
1957  return _mdMap->getLayout();
1958 }
1959 
1961 
1962 template< class Scalar,
1963  class Node >
1964 bool
1967 {
1968  return _mdMap->isContiguous();
1969 }
1970 
1972 
1973 #ifdef HAVE_EPETRA
1974 
1975 // The getEpetraIntVectorView() method only makes sense for Scalar =
1976 // int, because Epetra_IntVectors store data buffers of type int only.
1977 // There is no convenient way to specialize just one (or a small
1978 // handfull of) methods, instead you have to specialize the whole
1979 // class. So we allow getEpetraIntVectorView() to compile for any
1980 // Scalar, but we will throw an exception if Scalar is not int.
1981 
1982 template< class Scalar,
1983  class Node >
1984 Teuchos::RCP< Epetra_IntVector >
1986 getEpetraIntVectorView() const
1987 {
1988  // Throw an exception if Scalar is not int
1989  TEUCHOS_TEST_FOR_EXCEPTION(
1990  typeid(Scalar) != typeid(int),
1991  TypeError,
1992  "MDVector is of scalar type '" << typeid(Scalar).name() << "', but "
1993  "Epetra_IntVector requires scalar type 'int'");
1994 
1995  // Throw an exception if this MDVector's MDMap is not contiguous
1996  TEUCHOS_TEST_FOR_EXCEPTION(
1997  !isContiguous(),
1999  "This MDVector's MDMap is non-contiguous. This can happen when you take "
2000  "a slice of a parent MDVector.");
2001 
2002  // Get the Epetra_Map that is equivalent to this MDVector's MDMap
2003  Teuchos::RCP< const Epetra_Map > epetraMap = _mdMap->getEpetraMap(true);
2004 
2005  // Return the result. We are changing a Scalar* to a double*, which
2006  // looks sketchy, but we have thrown an exception if Sca is not
2007  // double, so everything is kosher.
2008  void * buffer = (void*) _mdArrayView.getRawPtr();
2009  return Teuchos::rcp(new Epetra_IntVector(View,
2010  *epetraMap,
2011  (int*) buffer));
2012 }
2013 
2015 
2016 // The getEpetraVectorView() method only makes sense for Scalar =
2017 // double, because Epetra_Vectors store data buffers of type double
2018 // only. There is no convenient way to specialize just one (or a
2019 // small handfull of) methods, instead you have to specialize the
2020 // whole class. So we allow getEpetraVectorView() to compile for any
2021 // Scalar, but we will throw an exception if Scalar is not double.
2022 
2023 template< class Scalar,
2024  class Node >
2025 Teuchos::RCP< Epetra_Vector >
2026 MDVector< Scalar, Node >::
2027 getEpetraVectorView() const
2028 {
2029  // Throw an exception if Scalar is not double
2030  TEUCHOS_TEST_FOR_EXCEPTION(
2031  typeid(Scalar) != typeid(double),
2032  TypeError,
2033  "MDVector is of scalar type '" << typeid(Scalar).name() << "', but "
2034  "Epetra_Vector requires scalar type 'double'");
2035 
2036  // Throw an exception if this MDVector's MDMap is not contiguous
2037  TEUCHOS_TEST_FOR_EXCEPTION(
2038  !isContiguous(),
2039  MDMapNoncontiguousError,
2040  "This MDVector's MDMap is non-contiguous. This can happen when you take "
2041  "a slice of a parent MDVector.");
2042 
2043  // Get the Epetra_Map that is equivalent to this MDVector's MDMap
2044  Teuchos::RCP< const Epetra_Map > epetraMap = _mdMap->getEpetraMap(true);
2045 
2046  // Return the result. We are changing a Scalar* to a double*, which
2047  // looks sketchy, but we have thrown an exception if Sca is not
2048  // double, so everything is kosher.
2049  void * buffer = (void*) _mdArrayView.getRawPtr();
2050  return Teuchos::rcp(new Epetra_Vector(View,
2051  *epetraMap,
2052  (double*) buffer));
2053 }
2054 
2056 
2057 // The getEpetraMultiVectorView() method only makes sense for Scalar =
2058 // double, because Epetra_MultiVectors store data buffers of type
2059 // double only. There is no convenient way to specialize just one (or
2060 // a small handfull of) methods, instead you have to specialize the
2061 // whole class. So we allow getEpetraVectorView() to compile for any
2062 // Scalar, but we will throw an exception if Scalar is not double.
2063 
2064 template< class Scalar,
2065  class Node >
2066 Teuchos::RCP< Epetra_MultiVector >
2067 MDVector< Scalar, Node >::
2068 getEpetraMultiVectorView() const
2069 {
2070  // Throw an exception if Scalar is not double
2071  TEUCHOS_TEST_FOR_EXCEPTION(
2072  typeid(Scalar) != typeid(double),
2073  TypeError,
2074  "MDVector is of scalar type '" << typeid(Scalar).name() << "', but "
2075  "Epetra_Vector requires scalar type 'double'");
2076 
2077  // Determine the vector axis and related info
2078  int vectorAxis = (getLayout() == C_ORDER) ? 0 : numDims()-1;
2079  int padding = getLowerPadSize(vectorAxis) + getUpperPadSize(vectorAxis);
2080  int commDim = getCommDim(vectorAxis);
2081  int numVectors = getGlobalDim(vectorAxis);
2082 
2083  // Obtain the appropriate MDMap and check that it is contiguous
2084  Teuchos::RCP< const MDMap< Node > > newMdMap;
2085  if (padding == 0 && commDim == 1)
2086  newMdMap = Teuchos::rcp(new MDMap< Node >(*_mdMap, vectorAxis, 0));
2087  else
2088  {
2089  newMdMap = _mdMap;
2090  numVectors = 1;
2091  }
2092  TEUCHOS_TEST_FOR_EXCEPTION(
2093  ! newMdMap->isContiguous(),
2094  MDMapNoncontiguousError,
2095  "This MDVector's MDMap is non-contiguous. This can happen when you take "
2096  "a slice of a parent MDVector.");
2097 
2098  // Get the stride between vectors. The MDMap strides are private,
2099  // but we know the new MDMap is contiguous, so we can calculate it
2100  // as the product of the new MDMap dimensions (including padding)
2101  size_type stride = newMdMap->getLocalDim(0,true);
2102  for (int axis = 0; axis < newMdMap->numDims(); ++axis)
2103  stride *= newMdMap->getLocalDim(axis,true);
2104  TEUCHOS_TEST_FOR_EXCEPTION(
2105  stride*numVectors > Teuchos::OrdinalTraits<int>::max(),
2106  MapOrdinalError,
2107  "Buffer size " << stride*numVectors << " is too large for Epetra int "
2108  "ordinals");
2109  int lda = (int)stride;
2110 
2111  // Get the Epetra_Map that is equivalent to newMdMap
2112  Teuchos::RCP< const Epetra_Map > epetraMap = newMdMap->getEpetraMap(true);
2113 
2114  // Return the result. We are changing a Scalar* to a double*, which
2115  // looks sketchy, but we have thrown an exception if Sca is not
2116  // double, so everything is kosher.
2117  void * buffer = (void*) _mdArrayView.getRawPtr();
2118  return Teuchos::rcp(new Epetra_MultiVector(View,
2119  *epetraMap,
2120  (double*) buffer,
2121  lda,
2122  numVectors));
2123 }
2124 
2126 
2127 template< class Scalar,
2128  class Node >
2129 Teuchos::RCP< Epetra_IntVector >
2130 MDVector< Scalar, Node >::
2131 getEpetraIntVectorCopy() const
2132 {
2133  typedef typename MDArrayView< const Scalar >::iterator iterator;
2134 
2135  // Get the Epetra_Map that is equivalent to this MDVector's MDMap
2136  Teuchos::RCP< const Epetra_Map > epetraMap = _mdMap->getEpetraMap(true);
2137 
2138  // Construct the result
2139  Teuchos::RCP< Epetra_IntVector > result =
2140  Teuchos::rcp(new Epetra_IntVector(*epetraMap));
2141 
2142  // Copy the MDVector data buffer to the Epetra_IntVector, even if the
2143  // MDVector is non-contiguous
2144  int ii = 0;
2145  for (iterator it = _mdArrayView.begin(); it != _mdArrayView.end(); ++it)
2146  result->operator[](ii++) = (int) *it;
2147 
2148  // Return the result
2149  return result;
2150 }
2151 
2153 
2154 template< class Scalar,
2155  class Node >
2156 Teuchos::RCP< Epetra_Vector >
2157 MDVector< Scalar, Node >::
2158 getEpetraVectorCopy() const
2159 {
2160  typedef typename MDArrayView< const Scalar >::iterator iterator;
2161 
2162  // Get the Epetra_Map that is equivalent to this MDVector's MDMap
2163  Teuchos::RCP< const Epetra_Map > epetraMap = _mdMap->getEpetraMap(true);
2164 
2165  // Construct the result
2166  Teuchos::RCP< Epetra_Vector > result =
2167  Teuchos::rcp(new Epetra_Vector(*epetraMap));
2168 
2169  // Copy the MDVector data buffer to the Epetra_Vector, even if the
2170  // MDVector is non-contiguous
2171  int ii = 0;
2172  for (iterator it = _mdArrayView.begin(); it != _mdArrayView.end(); ++it)
2173  result->operator[](ii++) = (double) *it;
2174 
2175  // Return the result
2176  return result;
2177 }
2178 
2180 
2181 template< class Scalar,
2182  class Node >
2183 Teuchos::RCP< Epetra_MultiVector >
2184 MDVector< Scalar, Node >::
2185 getEpetraMultiVectorCopy() const
2186 {
2187  typedef typename MDArrayView< Scalar >::iterator iterator;
2188  typedef typename MDArrayView< const Scalar >::iterator citerator;
2189 
2190  // Determine the vector axis and related info
2191  int vectorAxis = (getLayout() == C_ORDER) ? 0 : numDims()-1;
2192  int padding = getLowerPadSize(vectorAxis) + getUpperPadSize(vectorAxis);
2193  int commDim = getCommDim(vectorAxis);
2194  int numVectors = getGlobalDim(vectorAxis);
2195 
2196  // Obtain the appropriate MDMap
2197  Teuchos::RCP< const MDMap< Node > > newMdMap;
2198  if (padding == 0 && commDim == 1)
2199  newMdMap = Teuchos::rcp(new MDMap< Node >(*_mdMap, vectorAxis, 0));
2200  else
2201  {
2202  newMdMap = _mdMap;
2203  numVectors = 1;
2204  }
2205 
2206  // Get the Epetra_Map that is equivalent to newMdMap
2207  Teuchos::RCP< const Epetra_Map > epetraMap = newMdMap->getEpetraMap(true);
2208 
2209  // Construct the result
2210  Teuchos::RCP< Epetra_MultiVector > result =
2211  Teuchos::rcp(new Epetra_MultiVector(*epetraMap, numVectors));
2212 
2213  // Copy the MDVector data to the Epetra_MultiVector, even if the
2214  // MDVector is non-contiguous
2215  int ii = 0;
2216  if (numVectors == 1)
2217  {
2218  for (citerator it = _mdArrayView.begin(); it != _mdArrayView.end(); ++it)
2219  result->operator[](0)[ii++] = (double) *it;
2220  }
2221  else
2222  {
2223  for (int iv = 0; iv < numVectors; ++iv)
2224  {
2225  ii = 0;
2226  MDArrayView< Scalar > subArray(_mdArrayView, vectorAxis, iv);
2227  for (iterator it = subArray.begin(); it != subArray.end(); ++it)
2228  result->operator[](iv)[ii++] = (double) *it;
2229  }
2230  }
2231 
2232  // Return the result
2233  return result;
2234 }
2235 
2236 #endif
2237 
2239 
2240 #ifdef HAVE_TPETRA
2241 
2242 template< class Scalar, class Node >
2243 template< class LocalOrdinal,
2244  class GlobalOrdinal,
2245  class Node2 >
2246 Teuchos::RCP< Tpetra::Vector< Scalar, LocalOrdinal, GlobalOrdinal, Node2 > >
2247 MDVector< Scalar, Node >::
2248 getTpetraVectorView() const
2249 {
2250  // Throw an exception if this MDVector's MDMap is not contiguous
2251  TEUCHOS_TEST_FOR_EXCEPTION(
2252  !isContiguous(),
2253  MDMapNoncontiguousError,
2254  "This MDVector's MDMap is non-contiguous. This can happen when you take "
2255  "a slice of a parent MDVector.");
2256 
2257  // Get the Tpetra::Map that is equivalent to this MDVector's MDMap
2258  Teuchos::RCP< const Tpetra::Map< LocalOrdinal,
2259  GlobalOrdinal,
2260  Node2 > > tpetraMap =
2261  _mdMap->template getTpetraMap< LocalOrdinal, GlobalOrdinal, Node2 >(true);
2262 
2263  // Return the result
2264  return Teuchos::rcp(new Tpetra::Vector< Scalar,
2265  LocalOrdinal,
2266  GlobalOrdinal,
2267  Node2 >(tpetraMap,
2268  _mdArrayView.arrayView()));
2269 }
2270 
2272 
2273 template< class Scalar, class Node >
2274 template< class LocalOrdinal,
2275  class GlobalOrdinal,
2276  class Node2 >
2277 Teuchos::RCP< Tpetra::Vector< Scalar, LocalOrdinal, GlobalOrdinal, Node2 > >
2278 MDVector< Scalar, Node >::
2279 getTpetraVectorCopy() const
2280 {
2281  typedef typename MDArrayView< const Scalar >::iterator iterator;
2282 
2283  // Get the Tpetra::Map that is equivalent to this MDVector's MDMap
2284  Teuchos::RCP< const Tpetra::Map< LocalOrdinal,
2285  GlobalOrdinal,
2286  Node2 > > tpetraMap =
2287  _mdMap->template getTpetraMap< LocalOrdinal, GlobalOrdinal, Node2 >(true);
2288 
2289  // Construct the result
2290  Teuchos::RCP< Tpetra::Vector< Scalar,
2291  LocalOrdinal,
2292  GlobalOrdinal,
2293  Node2 > > result =
2294  Teuchos::rcp(new Tpetra::Vector< Scalar,
2295  LocalOrdinal,
2296  GlobalOrdinal,
2297  Node2 >(tpetraMap) );
2298 
2299  // Copy the MDVector data to the Tpetra::Vector, even if the
2300  // MDVector is non-contiguous
2301  Teuchos::ArrayRCP< Scalar > tpetraVectorArray = result->getDataNonConst();
2302  int ii = 0;
2303  for (iterator it = _mdArrayView.begin(); it != _mdArrayView.end(); ++it)
2304  tpetraVectorArray[ii++] = *it;
2305 
2306  // Return the result
2307  return result;
2308 }
2309 
2311 
2312 template< class Scalar, class Node >
2313 template < class LocalOrdinal,
2314  class GlobalOrdinal,
2315  class Node2 >
2316 Teuchos::RCP< Tpetra::MultiVector< Scalar,
2317  LocalOrdinal,
2318  GlobalOrdinal,
2319  Node2 > >
2320 MDVector< Scalar, Node >::
2321 getTpetraMultiVectorView() const
2322 {
2323  // Determine the vector axis and related info
2324  int vectorAxis = (getLayout() == C_ORDER) ? 0 : numDims()-1;
2325  int padding = getLowerPadSize(vectorAxis) + getUpperPadSize(vectorAxis);
2326  int commDim = getCommDim(vectorAxis);
2327  size_t numVectors = getGlobalDim(vectorAxis);
2328 
2329  // Obtain the appropriate MDMap and check that it is contiguous
2330  Teuchos::RCP< const MDMap< Node > > newMdMap;
2331  if (padding == 0 && commDim == 1)
2332  newMdMap = Teuchos::rcp(new MDMap< Node >(*_mdMap, vectorAxis, 0));
2333  else
2334  {
2335  newMdMap = _mdMap;
2336  numVectors = 1;
2337  }
2338  TEUCHOS_TEST_FOR_EXCEPTION(
2339  ! newMdMap->isContiguous(),
2340  MDMapNoncontiguousError,
2341  "This MDVector's MDMap is non-contiguous. This can happen when you take "
2342  "a slice of a parent MDVector.");
2343 
2344  // Get the stride between vectors. The MDMap strides are private,
2345  // but we know the new MDMap is contiguous, so we can calculate it
2346  // as the product of the new MDMap dimensions (including padding)
2347  size_type stride = newMdMap->getLocalDim(0,true);
2348  for (int axis = 0; axis < newMdMap->numDims(); ++axis)
2349  stride *= newMdMap->getLocalDim(axis,true);
2350  TEUCHOS_TEST_FOR_EXCEPTION(
2351  stride*numVectors > Teuchos::OrdinalTraits<GlobalOrdinal>::max(),
2352  MapOrdinalError,
2353  "Buffer size " << stride*numVectors << " is too large for Tpetra "
2354  "GlobalOrdinal = " << typeid(GlobalOrdinal).name() );
2355  size_t lda = (size_t)stride;
2356 
2357  // Get the Tpetra::Map that is equivalent to newMdMap
2358  Teuchos::RCP< const Tpetra::Map< LocalOrdinal,
2359  GlobalOrdinal,
2360  Node2> > tpetraMap =
2361  newMdMap->template getTpetraMap< LocalOrdinal, GlobalOrdinal, Node2 >(true);
2362 
2363  // Return the result
2364  return Teuchos::rcp(new Tpetra::MultiVector< Scalar,
2365  LocalOrdinal,
2366  GlobalOrdinal,
2367  Node2 >(tpetraMap,
2368  _mdArrayView.arrayView(),
2369  lda,
2370  numVectors));
2371 }
2372 
2374 
2375 template< class Scalar, class Node >
2376 template < class LocalOrdinal,
2377  class GlobalOrdinal,
2378  class Node2 >
2379 Teuchos::RCP< Tpetra::MultiVector< Scalar,
2380  LocalOrdinal,
2381  GlobalOrdinal,
2382  Node2 > >
2383 MDVector< Scalar, Node >::
2384 getTpetraMultiVectorCopy() const
2385 {
2386  typedef typename MDArrayView< Scalar >::iterator iterator;
2387  typedef typename MDArrayView< const Scalar >::iterator citerator;
2388 
2389  // Determine the vector axis and related info
2390  int vectorAxis = (getLayout() == C_ORDER) ? 0 : numDims()-1;
2391  int padding = getLowerPadSize(vectorAxis) + getUpperPadSize(vectorAxis);
2392  int commDim = getCommDim(vectorAxis);
2393  int numVectors = getGlobalDim(vectorAxis);
2394 
2395  // Obtain the appropriate MDMap
2396  Teuchos::RCP< const MDMap< Node > > newMdMap;
2397  if (padding == 0 && commDim == 1)
2398  newMdMap = Teuchos::rcp(new MDMap< Node >(*_mdMap, vectorAxis, 0));
2399  else
2400  {
2401  newMdMap = _mdMap;
2402  numVectors = 1;
2403  }
2404 
2405  // Get the Tpetra::Map that is equivalent to newMdMap
2406  Teuchos::RCP< const Tpetra::Map< LocalOrdinal,
2407  GlobalOrdinal,
2408  Node2 > > tpetraMap =
2409  newMdMap->template getTpetraMap< LocalOrdinal, GlobalOrdinal, Node2 >(true);
2410 
2411  // Construct the result
2412  Teuchos::RCP< Tpetra::MultiVector< Scalar,
2413  LocalOrdinal,
2414  GlobalOrdinal,
2415  Node2 > > result =
2416  Teuchos::rcp(new Tpetra::MultiVector< Scalar,
2417  LocalOrdinal,
2418  GlobalOrdinal,
2419  Node2 >(tpetraMap, numVectors));
2420 
2421  // Copy the MDVector to the Tpetra::MultiVector, even if the
2422  // MDVector is non-contiguous
2423  int ii = 0;
2424  if (numVectors == 1)
2425  {
2426  Teuchos::ArrayRCP< Scalar > tpetraVectorArray = result->getDataNonConst(0);
2427  for (citerator it = _mdArrayView.begin(); it != _mdArrayView.end(); ++it)
2428  tpetraVectorArray[ii++] = (double) *it;
2429  }
2430  else
2431  {
2432  for (int iv = 0; iv < numVectors; ++iv)
2433  {
2434  ii = 0;
2435  Teuchos::ArrayRCP< Scalar > tpetraVectorArray =
2436  result->getDataNonConst(iv);
2437  MDArrayView< Scalar > subArray(_mdArrayView, vectorAxis, iv);
2438  for (iterator it = subArray.begin(); it != subArray.end(); ++it)
2439  tpetraVectorArray[ii++] = (double) *it;
2440  }
2441  }
2442 
2443  // Return the result
2444  return result;
2445 }
2446 
2447 #endif
2448 
2450 
2451 template< class Scalar,
2452  class Node >
2455 getDataNonConst(bool includePadding)
2456 {
2457 #ifdef DOMI_MDVECTOR_VERBOSE
2458  std::cout << "_mdArrayView.getRawPtr() = " << _mdArrayView.getRawPtr()
2459  << std::endl;
2460  std::cout << "_mdArrayView = " << _mdArrayView << std::endl;
2461 #endif
2462  if (includePadding)
2463  return _mdArrayView;
2464  MDArrayView< Scalar > newArray(_mdArrayView);
2465  for (int axis = 0; axis < numDims(); ++axis)
2466  {
2467  int lo = getLowerPadSize(axis);
2468  int hi = getLocalDim(axis,true) - getUpperPadSize(axis);
2469  newArray = MDArrayView< Scalar >(newArray, axis, Slice(lo,hi));
2470  }
2471  return newArray;
2472 }
2473 
2475 
2476 template< class Scalar,
2477  class Node >
2480 getData(bool includePadding) const
2481 {
2482  if (includePadding)
2483  return _mdArrayView.getConst();
2484  MDArrayView< Scalar > newArray(_mdArrayView);
2485  for (int axis = 0; axis < numDims(); ++axis)
2486  {
2487  int lo = getLowerPadSize(axis);
2488  int hi = getLocalDim(axis,true) - getUpperPadSize(axis);
2489  newArray = MDArrayView< Scalar >(newArray, axis, Slice(lo,hi));
2490  }
2491  return newArray.getConst();
2492 }
2493 
2495 
2496 template< class Scalar,
2497  class Node >
2501 {
2502  MDArrayView< Scalar > newArrayView(_mdArrayView,
2503  axis,
2504  Slice(getLowerPadSize(axis)));
2505  return newArrayView;
2506 }
2507 
2509 
2510 template< class Scalar,
2511  class Node >
2514 getLowerPadData(int axis) const
2515 {
2516  MDArrayView< const Scalar > newArrayView(_mdArrayView.getConst(),
2517  axis,
2518  Slice(getLowerPadSize(axis)));
2519  return newArrayView;
2520 }
2521 
2523 
2524 template< class Scalar,
2525  class Node >
2529 {
2530  dim_type n = getLocalDim(axis,true);
2531  int pad = getUpperPadSize(axis);
2532  Slice slice;
2533  if (pad) slice = Slice(n-pad,n);
2534  else slice = Slice(n-1,n-1);
2535  MDArrayView< Scalar > newArrayView(_mdArrayView,
2536  axis,
2537  slice);
2538  return newArrayView;
2539 }
2540 
2542 
2543 template< class Scalar,
2544  class Node >
2547 getUpperPadData(int axis) const
2548 {
2549  dim_type n = getLocalDim(axis,true);
2550  int pad = getUpperPadSize(axis);
2551  Slice slice;
2552  if (pad) slice = Slice(n-pad,n);
2553  else slice = Slice(n-1,n-1);
2554  MDArrayView< const Scalar > newArrayView(_mdArrayView.getConst(),
2555  axis,
2556  slice);
2557  return newArrayView;
2558 }
2559 
2561 
2562 template< class Scalar,
2563  class Node >
2564 Scalar
2567 {
2568  typedef typename MDArrayView< const Scalar >::iterator iterator;
2569 
2570  TEUCHOS_TEST_FOR_EXCEPTION(
2571  ! _mdMap->isCompatible(*(a._mdMap)),
2572  MDMapError,
2573  "MDMap of calling MDVector and argument 'a' are incompatible");
2574 
2576  Scalar local_dot = 0;
2577  iterator a_it = aView.begin();
2578  for (iterator it = _mdArrayView.begin(); it != _mdArrayView.end();
2579  ++it, ++a_it)
2580  local_dot += *it * *a_it;
2581  Scalar global_dot = 0;
2582  Teuchos::reduceAll(*_teuchosComm,
2583  Teuchos::REDUCE_SUM,
2584  1,
2585  &local_dot,
2586  &global_dot);
2587  return global_dot;
2588 }
2589 
2591 
2592 template< class Scalar,
2593  class Node >
2594 typename Teuchos::ScalarTraits< Scalar >::magnitudeType
2596 norm1() const
2597 {
2598  typedef typename Teuchos::ScalarTraits< Scalar >::magnitudeType mag;
2599  typedef typename MDArrayView< const Scalar >::iterator iterator;
2600 
2601  mag local_norm1 = 0;
2602  for (iterator it = _mdArrayView.begin(); it != _mdArrayView.end(); ++it)
2603  local_norm1 += std::abs(*it);
2604  mag global_norm1 = 0;
2605  Teuchos::reduceAll(*_teuchosComm,
2606  Teuchos::REDUCE_SUM,
2607  1,
2608  &local_norm1,
2609  &global_norm1);
2610  return global_norm1;
2611 }
2612 
2614 
2615 template< class Scalar,
2616  class Node >
2617 typename Teuchos::ScalarTraits< Scalar >::magnitudeType
2619 norm2() const
2620 {
2621  typedef typename Teuchos::ScalarTraits< Scalar >::magnitudeType mag;
2622  mag norm2 = dot(*this);
2623  return Teuchos::ScalarTraits<mag>::squareroot(norm2);
2624 }
2625 
2627 
2628 template< class Scalar,
2629  class Node >
2630 typename Teuchos::ScalarTraits< Scalar >::magnitudeType
2632 normInf() const
2633 {
2634  typedef typename Teuchos::ScalarTraits< Scalar >::magnitudeType mag;
2635  typedef typename MDArrayView< const Scalar >::iterator iterator;
2636 
2637  mag local_normInf = 0;
2638  for (iterator it = _mdArrayView.begin(); it != _mdArrayView.end(); ++it)
2639  local_normInf = std::max(local_normInf, std::abs(*it));
2640  mag global_normInf = 0;
2641  Teuchos::reduceAll(*_teuchosComm,
2642  Teuchos::REDUCE_MAX,
2643  1,
2644  &local_normInf,
2645  &global_normInf);
2646  return global_normInf;
2647 }
2648 
2650 
2651 template< class Scalar,
2652  class Node >
2653 typename Teuchos::ScalarTraits< Scalar >::magnitudeType
2656 {
2657  typedef typename Teuchos::ScalarTraits< Scalar >::magnitudeType mag;
2658  typedef typename MDArrayView< const Scalar >::iterator iterator;
2659 
2660  TEUCHOS_TEST_FOR_EXCEPTION(
2661  ! _mdMap->isCompatible(*(weights._mdMap)),
2662  MDMapError,
2663  "MDMap of calling MDVector and argument 'weights' are incompatible");
2664 
2665  MDArrayView< const Scalar > wView = weights.getData();
2666  mag local_wNorm = 0;
2667  iterator w_it = wView.begin();
2668  for (iterator it = _mdArrayView.begin(); it != _mdArrayView.end();
2669  ++it, ++w_it)
2670  local_wNorm += *it * *it * *w_it;
2671  mag global_wNorm = 0;
2672  Teuchos::reduceAll(*_teuchosComm,
2673  Teuchos::REDUCE_SUM,
2674  1,
2675  &local_wNorm,
2676  &global_wNorm);
2677  Teuchos::Array< dim_type > dimensions(numDims());
2678  for (int i = 0; i < numDims(); ++i)
2679  dimensions[i] = _mdMap->getGlobalDim(i);
2680  size_type n = computeSize(dimensions);
2681  if (n == 0) return 0;
2682  return Teuchos::ScalarTraits<mag>::squareroot(global_wNorm / n);
2683 }
2684 
2686 
2687 template< class Scalar,
2688  class Node >
2689 Scalar
2691 meanValue() const
2692 {
2693  typedef typename Teuchos::ScalarTraits< Scalar >::magnitudeType mag;
2694  typedef typename MDArrayView< const Scalar >::iterator iterator;
2695 
2696  mag local_sum = 0;
2697  for (iterator it = _mdArrayView.begin(); it != _mdArrayView.end(); ++it)
2698  local_sum += *it;
2699  mag global_sum = 0;
2700  Teuchos::reduceAll(*_teuchosComm,
2701  Teuchos::REDUCE_SUM,
2702  1,
2703  &local_sum,
2704  &global_sum);
2705  Teuchos::Array< dim_type > dimensions(numDims());
2706  for (int i = 0; i < numDims(); ++i)
2707  dimensions[i] = _mdMap->getGlobalDim(i);
2708  size_type n = computeSize(dimensions);
2709  if (n == 0) return 0;
2710  return global_sum / n;
2711 }
2712 
2714 
2715 template< class Scalar,
2716  class Node >
2717 std::string
2720 {
2721  using Teuchos::TypeNameTraits;
2722 
2723  Teuchos::Array< dim_type > dims(numDims());
2724  for (int axis = 0; axis < numDims(); ++axis)
2725  dims[axis] = getGlobalDim(axis, true);
2726 
2727  std::ostringstream oss;
2728  oss << "\"Domi::MDVector\": {"
2729  << "Template parameters: {"
2730  << "Scalar: " << TypeNameTraits<Scalar>::name()
2731  << ", Node: " << TypeNameTraits< Node >::name()
2732  << "}";
2733  if (this->getObjectLabel() != "")
2734  oss << ", Label: \"" << this->getObjectLabel () << "\", ";
2735  oss << "Global dimensions: " << dims << " }";
2736  return oss.str();
2737 }
2738 
2740 
2741 template< class Scalar,
2742  class Node >
2743 void
2745 describe(Teuchos::FancyOStream &out,
2746  const Teuchos::EVerbosityLevel verbLevel) const
2747 {
2748  using std::endl;
2749  using std::setw;
2750  using Teuchos::Comm;
2751  using Teuchos::RCP;
2752  using Teuchos::TypeNameTraits;
2753  using Teuchos::VERB_DEFAULT;
2754  using Teuchos::VERB_NONE;
2755  using Teuchos::VERB_LOW;
2756  using Teuchos::VERB_MEDIUM;
2757  using Teuchos::VERB_HIGH;
2758  using Teuchos::VERB_EXTREME;
2759 
2760  const Teuchos::EVerbosityLevel vl =
2761  (verbLevel == VERB_DEFAULT) ? VERB_LOW : verbLevel;
2762 
2763  const MDMap< Node > & mdMap = *(getMDMap());
2764  Teuchos::RCP< const Teuchos::Comm< int > > comm = mdMap.getTeuchosComm();
2765  const int myImageID = comm->getRank();
2766  const int numImages = comm->getSize();
2767  Teuchos::OSTab tab0(out);
2768 
2769  if (vl != VERB_NONE)
2770  {
2771  if (myImageID == 0)
2772  {
2773  out << "\"Domi::MDVector\":" << endl;
2774  }
2775  Teuchos::OSTab tab1(out);// applies to all processes
2776  if (myImageID == 0)
2777  {
2778  out << "Template parameters:";
2779  {
2780  Teuchos::OSTab tab2(out);
2781  out << "Scalar: " << TypeNameTraits<Scalar>::name() << endl
2782  << "Node: " << TypeNameTraits< Node >::name() << endl;
2783  }
2784  out << endl;
2785  if (this->getObjectLabel() != "")
2786  {
2787  out << "Label: \"" << getObjectLabel() << "\"" << endl;
2788  }
2789  Teuchos::Array< dim_type > globalDims(numDims());
2790  for (int axis = 0; axis < numDims(); ++axis)
2791  globalDims[axis] = getGlobalDim(axis, true);
2792  out << "Global dimensions: " << globalDims << endl;
2793  }
2794  for (int imageCtr = 0; imageCtr < numImages; ++imageCtr)
2795  {
2796  if (myImageID == imageCtr)
2797  {
2798  if (vl != VERB_LOW)
2799  {
2800  // VERB_MEDIUM and higher prints getLocalLength()
2801  out << "Process: " << myImageID << endl;
2802  Teuchos::OSTab tab2(out);
2803  Teuchos::Array< dim_type > localDims(numDims());
2804  for (int axis = 0; axis < numDims(); ++axis)
2805  localDims[axis] = getLocalDim(axis, true);
2806  out << "Local dimensions: " << localDims << endl;
2807  }
2808  std::flush(out); // give output time to complete
2809  }
2810  comm->barrier(); // give output time to complete
2811  comm->barrier();
2812  comm->barrier();
2813  }
2814  }
2815 }
2816 
2818 
2819 template< class Scalar,
2820  class Node >
2821 void
2823 putScalar(const Scalar & value,
2824  bool includePadding)
2825 {
2826  typedef typename MDArrayView< Scalar >::iterator iterator;
2827 
2828  MDArrayView< Scalar > newArray = getDataNonConst(includePadding);
2829  for (iterator it = newArray.begin(); it != newArray.end(); ++it)
2830  *it = value;
2831 }
2832 
2834 
2835 template< class Scalar,
2836  class Node >
2837 void
2840 {
2841  typedef typename MDArrayView< Scalar >::iterator iterator;
2842 
2843  Teuchos::ScalarTraits< Scalar >::seedrandom(time(NULL));
2844  for (iterator it = _mdArrayView.begin(); it != _mdArrayView.end(); ++it)
2845  *it = Teuchos::ScalarTraits< Scalar >::random();
2846 }
2847 
2849 
2850 template< class Scalar,
2851  class Node >
2852 void
2855 {
2856  for (int axis = 0; axis < numDims(); ++axis)
2857  {
2858  updateCommPad(axis);
2859  }
2860 }
2861 
2863 
2864 template< class Scalar,
2865  class Node >
2866 void
2868 updateCommPad(int axis)
2869 {
2870  startUpdateCommPad(axis);
2871  endUpdateCommPad(axis);
2872 }
2873 
2875 
2876 template< class Scalar,
2877  class Node >
2878 void
2881 {
2882  // #define DOMI_MDVECTOR_OUTPUT_UPDATECOMMPAD
2883 
2884  // Initialize the _sendMessages and _recvMessages members on the
2885  // first call to startUpdateCommPad(int).
2886  if (_sendMessages.empty()) initializeMessages();
2887 
2888 #ifdef HAVE_MPI
2889  int rank = _teuchosComm->getRank();
2890  int numProc = _teuchosComm->getSize();
2891  int tag;
2892  // Since HAVE_MPI is defined, we know that _teuchosComm points to a
2893  // const Teuchos::MpiComm< int >. We downcast, extract and
2894  // dereference so that we can get access to the MPI_Comm used to
2895  // construct it.
2896  Teuchos::RCP< const Teuchos::MpiComm< int > > mpiComm =
2897  Teuchos::rcp_dynamic_cast< const Teuchos::MpiComm< int > >(_teuchosComm);
2898  const Teuchos::OpaqueWrapper< MPI_Comm > & communicator =
2899  *(mpiComm->getRawMpiComm());
2900 
2901  // Post the non-blocking sends
2902  MPI_Request request;
2903  for (int boundary = 0; boundary < 2; ++boundary)
2904  {
2905  MessageInfo message = _sendMessages[axis][boundary];
2906  if (message.proc >= 0)
2907  {
2908  tag = 2 * (rank * numProc + message.proc) + boundary;
2909 
2910 #ifdef DOMI_MDVECTOR_OUTPUT_UPDATECOMMPAD
2911  cout << rank << ": post send for axis " << axis << ", boundary "
2912  << boundary << ", buffer = " << message.buffer << ", proc = "
2913  << message.proc << ", tag = " << tag << endl;
2914 #endif
2915 
2916  if (MPI_Isend(message.buffer,
2917  1,
2918  *(message.datatype),
2919  message.proc,
2920  tag,
2921  communicator(),
2922  &request))
2923  throw std::runtime_error("Domi::MDVector: Error in MPI_Isend");
2924  _requests.push_back(request);
2925  }
2926  }
2927 
2928  // Post the non-blocking receives
2929  for (int boundary = 0; boundary < 2; ++boundary)
2930  {
2931  MessageInfo message = _recvMessages[axis][boundary];
2932  if (message.proc >= 0)
2933  {
2934  tag = 2 * (message.proc * numProc + rank) + (1-boundary);
2935 
2936 #ifdef DOMI_MDVECTOR_OUTPUT_UPDATECOMMPAD
2937  cout << rank << ": post recv for axis " << axis << ", boundary "
2938  << boundary << ", buffer = " << message.buffer << ", proc = "
2939  << message.proc << ", tag = " << tag << endl;
2940 #endif
2941 
2942  if (MPI_Irecv(message.buffer,
2943  1,
2944  *(message.datatype),
2945  message.proc,
2946  tag,
2947  communicator(),
2948  &request))
2949  throw std::runtime_error("Domi::MDVector: Error in MPI_Irecv");
2950  _requests.push_back(request);
2951  }
2952  }
2953 #else
2954  // HAVE_MPI is not defined, so we are on a single processor.
2955  // However, if the axis is periodic, we need to copy the appropriate
2956  // data to the communication padding.
2957  if (isPeriodic(axis))
2958  {
2959  for (int sendBndry = 0; sendBndry < 2; ++sendBndry)
2960  {
2961  int recvBndry = 1 - sendBndry;
2962  // Get the receive and send data views
2963  MDArrayView< Scalar > recvView = _recvMessages[axis][recvBndry].dataview;
2964  MDArrayView< Scalar > sendView = _sendMessages[axis][sendBndry].dataview;
2965 
2966  // Initialize the receive and send data view iterators
2967  typename MDArrayView< Scalar >::iterator it_recv = recvView.begin();
2968  typename MDArrayView< Scalar >::iterator it_send = sendView.begin();
2969 
2970  // Copy the send buffer to the receive buffer
2971  for ( ; it_recv != recvView.end(); ++it_recv, ++it_send)
2972  *it_recv = *it_send;
2973  }
2974  }
2975 #endif
2976 }
2977 
2979 
2980 template< class Scalar,
2981  class Node >
2982 void
2985 {
2986 #ifdef HAVE_MPI
2987  if (_requests.size() > 0)
2988  {
2989  Teuchos::Array< MPI_Status > status(_requests.size());
2990  if (MPI_Waitall(_requests.size(),
2991  &(_requests[0]),
2992  &(status[0]) ) )
2993  throw std::runtime_error("Domi::MDVector: Error in MPI_Waitall");
2994  _requests.clear();
2995  }
2996 #endif
2997 }
2998 
3000 
3001 template< class Scalar,
3002  class Node >
3005 operator[](dim_type index) const
3006 {
3007  MDVector< Scalar, Node > result(*this,
3008  _nextAxis,
3009  index);
3010  int newAxis = _nextAxis + 1;
3011  if (newAxis >= result.numDims()) newAxis = 0;
3012  result._nextAxis = newAxis;
3013  return result;
3014 }
3015 
3017 
3018 template< class Scalar,
3019  class Node >
3022 operator[](Slice slice) const
3023 {
3024  MDVector< Scalar, Node > result(*this,
3025  _nextAxis,
3026  slice );
3027  int newAxis = _nextAxis + 1;
3028  if (newAxis >= result.numDims()) newAxis = 0;
3029  result._nextAxis = newAxis;
3030  return result;
3031 }
3032 
3034 
3035 template< class Scalar,
3036  class Node >
3037 void
3040 {
3041  // #define DOMI_MDVECTOR_MESSAGE_INITIALIZE
3042 
3043  int ndims = numDims();
3044  Teuchos::Array<int> sizes(ndims);
3045  Teuchos::Array<int> subsizes(ndims);
3046  Teuchos::Array<int> starts(ndims);
3047  MessageInfo messageInfo;
3048 
3049  _sendMessages.resize(ndims);
3050  _recvMessages.resize(ndims);
3051 
3052 #ifdef DOMI_MDVECTOR_MESSAGE_INITIALIZE
3053  std::stringstream msg;
3054  int rank = _teuchosComm->getRank();
3055 #endif
3056 
3057 #ifdef HAVE_MPI
3058  int order = mpiOrder(getLayout());
3059  MPI_Datatype datatype = mpiType< Scalar >();
3060 #endif
3061 
3062  // Loop over the axes we are going to send messages along
3063  for (int msgAxis = 0; msgAxis < ndims; ++msgAxis)
3064  {
3065  // Set the initial values for sizes, subsizes and starts
3066  for (int axis = 0; axis < ndims; ++axis)
3067  {
3068  sizes[axis] = _mdArrayRcp.dimension(axis);
3069  subsizes[axis] = _mdArrayView.dimension(axis);
3070  starts[axis] = 0;
3071  }
3072 
3074  // Set the lower receive and send messages
3076 
3077  int proc = getLowerNeighbor(msgAxis);
3078 
3079 #ifdef DOMI_MDVECTOR_MESSAGE_INITIALIZE
3080  msg << endl << "P" << rank << ": axis " << msgAxis << ", lower neighbor = "
3081  << proc << endl;
3082 #endif
3083 
3084  // Fix the subsize along this message axis
3085  subsizes[msgAxis] = getLowerPadSize(msgAxis);
3086  // Fix the proc if the subsize is zero
3087  if (subsizes[msgAxis] == 0) proc = -1;
3088  // Assign the non-MPI members of messageInfo
3089  messageInfo.buffer = (void*) getData().getRawPtr();
3090  messageInfo.proc = proc;
3091  messageInfo.axis = msgAxis;
3092 
3093  if (proc >= 0)
3094  {
3096  // Lower receive message
3098 
3099 #ifdef HAVE_MPI
3100 
3101 #ifdef DOMI_MDVECTOR_MESSAGE_INITIALIZE
3102  msg << endl << "P" << rank << ": axis " << msgAxis
3103  << ", Lower receive message:" << endl << " ndims = " << ndims
3104  << endl << " sizes = " << sizes << endl << " subsizes = "
3105  << subsizes << endl << " starts = " << starts << endl
3106  << " order = " << order << endl;
3107 #endif
3108  Teuchos::RCP< MPI_Datatype > commPad = Teuchos::rcp(new MPI_Datatype);
3109  MPI_Type_create_subarray(ndims,
3110  &sizes[0],
3111  &subsizes[0],
3112  &starts[0],
3113  order,
3114  datatype,
3115  commPad.get());
3116  MPI_Type_commit(commPad.get());
3117  messageInfo.datatype = commPad;
3118 #else
3119  messageInfo.dataview = _mdArrayView;
3120  for (int axis = 0; axis < numDims(); ++axis)
3121  {
3122  Slice slice(starts[axis], starts[axis] + subsizes[axis]);
3123  messageInfo.dataview = MDArrayView< Scalar >(messageInfo.dataview,
3124  axis,
3125  slice);
3126  }
3127 #endif
3128 
3129  }
3130  _recvMessages[msgAxis][0] = messageInfo;
3131 
3133  // Lower send message
3135 
3136  starts[msgAxis] = getLowerPadSize(msgAxis);
3137  if (isReplicatedBoundary(msgAxis) && getCommIndex(msgAxis) == 0)
3138  starts[msgAxis] += 1;
3139  if (proc >= 0)
3140  {
3141 
3142 #ifdef HAVE_MPI
3143 
3144 #ifdef DOMI_MDVECTOR_MESSAGE_INITIALIZE
3145  msg << endl << "P" << rank << ": axis " << msgAxis
3146  << ", Lower send message:" << endl << " ndims = " << ndims
3147  << endl << " sizes = " << sizes << endl << " subsizes = "
3148  << subsizes << endl << " starts = " << starts << endl
3149  << " order = " << order << endl;
3150 #endif
3151  Teuchos::RCP< MPI_Datatype > commPad = Teuchos::rcp(new MPI_Datatype);
3152  MPI_Type_create_subarray(ndims,
3153  &sizes[0],
3154  &subsizes[0],
3155  &starts[0],
3156  order,
3157  datatype,
3158  commPad.get());
3159  MPI_Type_commit(commPad.get());
3160  messageInfo.datatype = commPad;
3161 #else
3162  messageInfo.dataview = _mdArrayView;
3163  for (int axis = 0; axis < numDims(); ++axis)
3164  {
3165  Slice slice(starts[axis], starts[axis] + subsizes[axis]);
3166  messageInfo.dataview = MDArrayView< Scalar >(messageInfo.dataview,
3167  axis,
3168  slice);
3169  }
3170 #endif
3171 
3172  }
3173  _sendMessages[msgAxis][0] = messageInfo;
3174 
3176  // Set the upper receive and send messages
3178 
3179  proc = getUpperNeighbor(msgAxis);
3180 
3181 #ifdef DOMI_MDVECTOR_MESSAGE_INITIALIZE
3182  msg << endl << "P" << rank << ": axis " << msgAxis << ", upper neighbor = "
3183  << proc << endl;
3184 #endif
3185 
3186  subsizes[msgAxis] = getUpperPadSize(msgAxis);
3187  starts[msgAxis] = _mdArrayView.dimension(msgAxis) -
3188  getUpperPadSize(msgAxis);
3189  if (subsizes[msgAxis] == 0) proc = -1;
3190  messageInfo.proc = proc;
3191  if (proc >= 0)
3192  {
3194  // Upper receive message
3196 
3197 #ifdef HAVE_MPI
3198 
3199 #ifdef DOMI_MDVECTOR_MESSAGE_INITIALIZE
3200  msg << endl << "P" << rank << ": axis " << msgAxis
3201  << ", Upper receive message:" << endl << " ndims = " << ndims
3202  << endl << " sizes = " << sizes << endl << " subsizes = "
3203  << subsizes << endl << " starts = " << starts << endl
3204  << " order = " << order << endl;
3205 #endif
3206  Teuchos::RCP< MPI_Datatype > commPad = Teuchos::rcp(new MPI_Datatype);
3207  MPI_Type_create_subarray(ndims,
3208  &sizes[0],
3209  &subsizes[0],
3210  &starts[0],
3211  order,
3212  datatype,
3213  commPad.get());
3214  MPI_Type_commit(commPad.get());
3215  messageInfo.datatype = commPad;
3216 #else
3217  messageInfo.dataview = _mdArrayView;
3218  for (int axis = 0; axis < numDims(); ++axis)
3219  {
3220  Slice slice(starts[axis], starts[axis] + subsizes[axis]);
3221  messageInfo.dataview = MDArrayView< Scalar >(messageInfo.dataview,
3222  axis,
3223  slice);
3224  }
3225 #endif
3226  }
3227  _recvMessages[msgAxis][1] = messageInfo;
3228 
3230  // Upper send message
3232 
3233  starts[msgAxis] -= getUpperPadSize(msgAxis);
3234  if (isReplicatedBoundary(msgAxis) &&
3235  getCommIndex(msgAxis) == getCommDim(msgAxis)-1)
3236  starts[msgAxis] -= 1;
3237  if (proc >= 0)
3238  {
3239 
3240 #ifdef HAVE_MPI
3241 
3242 #ifdef DOMI_MDVECTOR_MESSAGE_INITIALIZE
3243  msg << endl << "P" << rank << ": axis " << msgAxis
3244  << ", Upper send message:" << endl << " ndims = " << ndims
3245  << endl << " sizes = " << sizes << endl << " subsizes = "
3246  << subsizes << endl << " starts = " << starts << endl
3247  << " order = " << order << endl;
3248 #endif
3249  Teuchos::RCP< MPI_Datatype > commPad = Teuchos::rcp(new MPI_Datatype);
3250  MPI_Type_create_subarray(ndims,
3251  &sizes[0],
3252  &subsizes[0],
3253  &starts[0],
3254  order,
3255  datatype,
3256  commPad.get());
3257  MPI_Type_commit(commPad.get());
3258  messageInfo.datatype = commPad;
3259 #else
3260  messageInfo.dataview = _mdArrayView;
3261  for (int axis = 0; axis < numDims(); ++axis)
3262  {
3263  Slice slice(starts[axis], starts[axis] + subsizes[axis]);
3264  messageInfo.dataview = MDArrayView< Scalar >(messageInfo.dataview,
3265  axis,
3266  slice);
3267  }
3268 #endif
3269 
3270  }
3271  _sendMessages[msgAxis][1] = messageInfo;
3272  }
3273 
3274 #ifdef DOMI_MDVECTOR_MESSAGE_INITIALIZE
3275  for (int proc = 0; proc < _teuchosComm->getSize(); ++proc)
3276  {
3277  if (proc == rank)
3278  {
3279  cout << msg.str();
3280  msg.flush();
3281  }
3282  _teuchosComm->barrier();
3283  }
3284 #endif
3285 
3286 }
3287 
3289 
3290 template< class Scalar,
3291  class Node >
3292 void
3294 writeBinary(const std::string & filename,
3295  bool includeBndryPad) const
3296 {
3297  FILE * datafile;
3298  // If we are using MPI and overwriting an existing file, and the new
3299  // file is shorter than the old file, it appears that the new file
3300  // will retain the old file's length and trailing data. To prevent
3301  // this behavior, we open and close the file to give it zero length.
3302  int pid = _teuchosComm->getRank();
3303  if (pid == 0)
3304  {
3305  datafile = fopen(filename.c_str(), "w");
3306  fclose(datafile);
3307  }
3308  _teuchosComm->barrier();
3309 
3310  // Get the pointer to this MDVector's MDArray, including all padding
3311  const Scalar * buffer = getData(true).getRawPtr();
3312 
3313  // Compute either _fileInfo or _fileInfoWithBndry, whichever is
3314  // appropriate, and return a reference to that fileInfo object
3315  Teuchos::RCP< FileInfo > & fileInfo = computeFileInfo(includeBndryPad);
3316 
3317  // Parallel output
3318 #ifdef HAVE_MPI
3319 
3320  // Since HAVE_MPI is defined, we know that _teuchosComm points to a
3321  // const Teuchos::MpiComm< int >. We downcast, extract and
3322  // dereference so that we can get access to the MPI_Comm used to
3323  // construct it.
3324  Teuchos::RCP< const Teuchos::MpiComm< int > > mpiComm =
3325  Teuchos::rcp_dynamic_cast< const Teuchos::MpiComm< int > >(_teuchosComm);
3326  const Teuchos::OpaqueWrapper< MPI_Comm > & communicator =
3327  *(mpiComm->getRawMpiComm());
3328 
3329  // Compute the access mode
3330  int access = MPI_MODE_WRONLY | MPI_MODE_CREATE;
3331 
3332  // I copy the filename C string, because the c_str() method returns
3333  // a const char*, and the MPI_File_open() function requires
3334  // (incorrectly) a non-const char*.
3335  char * cstr = new char[filename.size()+1];
3336  std::strcpy(cstr, filename.c_str());
3337 
3338  // Use MPI I/O to write the binary file
3339  MPI_File mpiFile;
3340  MPI_Status status;
3341  char datarep[7] = "native";
3342  MPI_File_open(communicator(), cstr, access, MPI_INFO_NULL, &mpiFile);
3343  MPI_File_set_view(mpiFile, 0, mpiType< Scalar >(),
3344  *(fileInfo->filetype), datarep, MPI_INFO_NULL);
3345  MPI_File_write_all(mpiFile, (void*)buffer, 1, *(fileInfo->datatype),
3346  &status);
3347  MPI_File_close(&mpiFile);
3348 
3349  // Delete the C string
3350  delete [] cstr;
3351 
3352  // Serial output
3353 #else
3354 
3355  // Get the number of dimensions
3356  // int ndims = _mdMap->numDims();
3357 
3358  // Initialize the data file
3359  datafile = fopen(filename.c_str(), "w");
3360 
3361  // Obtain the data to write, including the boundary padding if requested
3362  MDArrayView< const Scalar > mdArrayView = getData(includeBndryPad);
3363 
3364  // Iterate over the data and write it to the data file
3365  typedef typename MDArrayView< Scalar >::const_iterator const_iterator;
3366  for (const_iterator it = mdArrayView.begin(); it != mdArrayView.end(); ++it)
3367  {
3368  fwrite((const void *) &(*it), sizeof(Scalar), 1, datafile);
3369  }
3370 
3371  // Close the data file
3372  fclose(datafile);
3373 
3374 #endif
3375 
3376 }
3377 
3379 
3380 template< class Scalar,
3381  class Node >
3382 void
3384 readBinary(const std::string & filename,
3385  bool includeBndryPad)
3386 {
3387  // Get the pointer to this MDVector's MDArray, including all padding
3388  const Scalar * buffer = getDataNonConst(true).getRawPtr();
3389 
3390  // Compute either _fileInfo or _fileInfoWithBndry, whichever is
3391  // appropriate, and return a reference to that fileInfo object
3392  Teuchos::RCP< FileInfo > & fileInfo = computeFileInfo(includeBndryPad);
3393 
3394  // Parallel input
3395 #ifdef HAVE_MPI
3396 
3397  // Since HAVE_MPI is defined, we know that _teuchosComm points to a
3398  // const Teuchos::MpiComm< int >. We downcast, extract and
3399  // dereference so that we can get access to the MPI_Comm used to
3400  // construct it.
3401  Teuchos::RCP< const Teuchos::MpiComm< int > > mpiComm =
3402  Teuchos::rcp_dynamic_cast< const Teuchos::MpiComm< int > >(_teuchosComm);
3403  const Teuchos::OpaqueWrapper< MPI_Comm > & communicator =
3404  *(mpiComm->getRawMpiComm());
3405 
3406  // Compute the access mode
3407  int access = MPI_MODE_RDONLY;
3408 
3409  // I copy the filename C string, because the c_str() method returns
3410  // a const char*, and the MPI_File_open() function requires
3411  // (incorrectly) a non-const char*.
3412  char * cstr = new char[filename.size()+1];
3413  std::strcpy(cstr, filename.c_str());
3414 
3415  // Use MPI I/O to read the binary file
3416  MPI_File mpiFile;
3417  MPI_Status status;
3418  char datarep[7] = "native";
3419  MPI_File_open(communicator(), cstr, access, MPI_INFO_NULL, &mpiFile);
3420  MPI_File_set_view(mpiFile, 0, mpiType< Scalar >(),
3421  *(fileInfo->filetype), datarep, MPI_INFO_NULL);
3422  MPI_File_read_all(mpiFile, (void*)buffer, 1, *(fileInfo->datatype),
3423  &status);
3424  MPI_File_close(&mpiFile);
3425 
3426  // Delete the C string
3427  delete [] cstr;
3428 
3429  // Serial output
3430 #else
3431 
3432  // Get the number of dimensions
3433  int ndims = _mdMap->numDims();
3434 
3435  // Initialize the data file
3436  FILE * datafile;
3437  datafile = fopen(filename.c_str(), "r");
3438 
3439  // Obtain the MDArrayView to read into, including the boundary
3440  // padding if requested
3441  MDArrayView< Scalar > mdArrayView = getDataNonConst(includeBndryPad);
3442 
3443  // Initialize the set of indexes
3444  Teuchos::Array< Ordinal > index(3);
3445  for (int axis = 0; axis < ndims; ++axis)
3446  index[axis] = fileInfo->dataStart[axis];
3447 
3448  // Iterate over the data and read it from the data file
3449  typedef typename MDArrayView< Scalar >::iterator iterator;
3450  for (iterator it = mdArrayView.begin(); it != mdArrayView.end(); ++it)
3451  {
3452  fread(&(*it), sizeof(Scalar), 1, datafile);
3453  }
3454 
3455  // Close the data file
3456  fclose(datafile);
3457 
3458 #endif
3459 
3460 }
3461 
3463 
3464 template< class Scalar,
3465  class Node >
3466 Teuchos::RCP< typename MDVector< Scalar, Node >::FileInfo > &
3468 computeFileInfo(bool includeBndryPad) const
3469 {
3470  // Work with the appropriate FileInfo object. By using a reference
3471  // here, we are working directly with the member data.
3472  Teuchos::RCP< MDVector< Scalar, Node >::FileInfo > & fileInfo =
3473  includeBndryPad ? _fileInfoWithBndry : _fileInfo;
3474 
3475  // If the fileInfo object already has been set, our work is done
3476  if (!fileInfo.is_null()) return fileInfo;
3477 
3478  // Initialize the new FileInfo object
3479  int ndims = _mdMap->numDims();
3480  fileInfo.reset(new MDVector< Scalar, Node >::FileInfo);
3481  fileInfo->fileShape.resize(ndims);
3482  fileInfo->bufferShape.resize(ndims);
3483  fileInfo->dataShape.resize(ndims);
3484  fileInfo->fileStart.resize(ndims);
3485  fileInfo->dataStart.resize(ndims);
3486 
3487  // Initialize the shapes and starts.
3488  for (int axis = 0; axis < ndims; ++axis)
3489  {
3490  // Initialize the FileInfo arrays using includeBndryPad where
3491  // appropriate
3492  fileInfo->fileShape[axis] = getGlobalDim(axis,includeBndryPad);
3493  fileInfo->bufferShape[axis] = getLocalDim(axis,true );
3494  fileInfo->dataShape[axis] = getLocalDim(axis,false);
3495  fileInfo->fileStart[axis] = getGlobalRankBounds(axis,includeBndryPad).start();
3496  fileInfo->dataStart[axis] = getLocalBounds(axis).start();
3497  // Modify dataShape and dataStart if boundary padding is included
3498  if (includeBndryPad)
3499  {
3500  int commIndex = _mdMap->getCommIndex(axis);
3501  if (commIndex == 0)
3502  {
3503  int pad = getLowerBndryPad(axis);
3504  fileInfo->dataShape[axis] += pad;
3505  fileInfo->dataStart[axis] -= pad;
3506  }
3507  if (commIndex == _mdMap->getCommDim(axis)-1)
3508  {
3509  fileInfo->dataShape[axis] += getUpperBndryPad(axis);
3510  }
3511  }
3512  }
3513 
3514 #ifdef DOMI_MDVECTOR_DEBUG_IO
3515  cout << pid << ": fileShape = " << fileInfo->fileShape() << endl;
3516  cout << pid << ": bufferShape = " << fileInfo->bufferShape() << endl;
3517  cout << pid << ": dataShape = " << fileInfo->dataShape() << endl;
3518  cout << pid << ": fileStart = " << fileInfo->fileStart() << endl;
3519  cout << pid << ": dataStart = " << fileInfo->dataStart() << endl;
3520 #endif
3521 
3522 #ifdef HAVE_MPI
3523  int mpi_order = getLayout() == C_ORDER ? MPI_ORDER_C : MPI_ORDER_FORTRAN;
3524  // Build the MPI_Datatype for the file
3525  fileInfo->filetype = Teuchos::rcp(new MPI_Datatype);
3526  MPI_Type_create_subarray(ndims,
3527  fileInfo->fileShape.getRawPtr(),
3528  fileInfo->dataShape.getRawPtr(),
3529  fileInfo->fileStart.getRawPtr(),
3530  mpi_order,
3531  mpiType< Scalar >(),
3532  fileInfo->filetype.get());
3533  MPI_Type_commit(fileInfo->filetype.get());
3534 
3535  // Build the MPI_Datatype for the data
3536  fileInfo->datatype = Teuchos::rcp(new MPI_Datatype);
3537  MPI_Type_create_subarray(ndims,
3538  fileInfo->bufferShape.getRawPtr(),
3539  fileInfo->dataShape.getRawPtr(),
3540  fileInfo->dataStart.getRawPtr(),
3541  mpi_order,
3542  mpiType< Scalar >(),
3543  fileInfo->datatype.get());
3544  MPI_Type_commit(fileInfo->datatype.get());
3545 #endif // DGM_PARALLEL
3546 
3547  return fileInfo;
3548 }
3549 
3551 
3552 } // Namespace Domi
3553 
3554 #endif
void writeBinary(const std::string &filename, bool includeBndryPad=false) const
Write the MDVector to a binary file.
Definition: Domi_MDVector.hpp:3294
int getCommPadSize(int axis) const
Get the communication padding size along the given axis.
Definition: Domi_MDVector.hpp:1874
Scalar meanValue() const
Compute the mean (average) value of this MDVector.
Definition: Domi_MDVector.hpp:2691
MDVector< Scalar, Node > operator[](dim_type index) const
Sub-vector access operator.
Definition: Domi_MDVector.hpp:3005
virtual std::string description() const
A simple one-line description of this MDVector.
Definition: Domi_MDVector.hpp:2719
void resize(const Teuchos::ArrayView< dim_type > &dims)
Resize the MDArrayRCP based on the given dimensions.
Definition: Domi_MDArrayRCP.hpp:1684
int getUpperPadSize(int axis) const
Get the size of the upper padding along the given axis.
Definition: Domi_MDVector.hpp:1863
MDVector(const Teuchos::RCP< const MDMap< Node > > &mdMap, bool zeroOut=true)
Main constructor.
Definition: Domi_MDVector.hpp:1179
virtual void describe(Teuchos::FancyOStream &out, const Teuchos::EVerbosityLevel verbLevel=Teuchos::Describable::verbLevel_default) const
Print the object with some verbosity level to a FancyOStream.
Definition: Domi_MDVector.hpp:2745
int getLowerPadSize(int axis) const
Get the size of the lower padding along the given axis.
Definition: Domi_MDVector.hpp:1852
Teuchos::RCP< const MDMap< Node > > getAugmentedMDMap(const dim_type leadingDim, const dim_type trailingDim=0) const
Return an RCP to a new MDMap that is a simple augmentation of this MDMap.
Definition: Domi_MDMap.hpp:2564
void assign(const T &value)
Assign a value to all elements of the MDArrayView
Definition: Domi_MDArrayView.hpp:1413
MDArrayView< Scalar > getLowerPadDataNonConst(int axis)
Get a non-const view of the lower padding data along the given axis as an MDArrayView.
Definition: Domi_MDVector.hpp:2500
Teuchos::ArrayView< const Slice > getLocalBounds() const
Get the local loop bounds along every axis.
Definition: Domi_MDVector.hpp:1808
void setLowerPad(int axis, const Scalar value)
Assign all elements of the lower pad a constant value.
Definition: Domi_MDVector.hpp:1918
Teuchos::ScalarTraits< Scalar >::magnitudeType norm1() const
Compute the 1-norm of this MDVector.
Definition: Domi_MDVector.hpp:2596
void setUpperPad(int axis, const Scalar value)
Assign all elements of the upper pad a constant value.
Definition: Domi_MDVector.hpp:1931
dim_type getLocalDim(int axis, bool withCommPad=false) const
Get the local dimension along the specified axis.
Definition: Domi_MDVector.hpp:1797
iterator begin()
Return the beginning iterator.
Definition: Domi_MDArrayView.hpp:960
MDArrayView< const Scalar > getLowerPadData(int axis) const
Get a const view of the lower padding data along the given axis as an MDArrayView.
Definition: Domi_MDVector.hpp:2514
bool onSubcommunicator() const
Query whether this processor is on the sub-communicator.
Definition: Domi_MDVector.hpp:1676
Iterator class suitable for multi-dimensional arrays.
Definition: Domi_MDIterator.hpp:100
iterator end()
Return the ending iterator.
Definition: Domi_MDArrayView.hpp:969
Scalar dot(const MDVector< Scalar, Node > &a) const
Compute the dot product of this MDVector and MDVector a.
Definition: Domi_MDVector.hpp:2566
void startUpdateCommPad(int axis)
Start an asyncronous update of the communication padding.
Definition: Domi_MDVector.hpp:2880
MDArrayView< const Scalar > getData(bool includePadding=true) const
Get a const view of the data as an MDArrayView.
Definition: Domi_MDVector.hpp:2480
bool isReplicatedBoundary(int axis) const
Return whether the given axis supports a replicated boundary.
Definition: Domi_MDVector.hpp:1944
int numDims() const
Get the number of dimensions.
Definition: Domi_MDVector.hpp:1698
Layout getLayout() const
Get the storage order.
Definition: Domi_MDVector.hpp:1955
dim_type getGlobalDim(int axis, bool withBndryPad=false) const
Get the global dimension along the specified axis.
Definition: Domi_MDVector.hpp:1764
int getCommDim(int axis) const
Get the communicator size along the given axis.
Definition: Domi_MDVector.hpp:1709
const T * getRawPtr() const
Return a const raw pointer to the beginning of the MDArrayView or NULL if unsized.
Definition: Domi_MDArrayView.hpp:1521
void readBinary(const std::string &filename, bool includeBndryPad=false)
Read the MDVector from a binary file.
Definition: Domi_MDVector.hpp:3384
int getCommIndex(int axis) const
Get the axis rank of this processor.
Definition: Domi_MDVector.hpp:1731
MDArrayView< Scalar > getUpperPadDataNonConst(int axis)
Get a non-const view of the upper padding data along the given axis as an MDArrayView.
Definition: Domi_MDVector.hpp:2528
const dim_type stop() const
Stop index.
Definition: Domi_Slice.hpp:438
bool hasPadding() const
Return true if there is any padding stored locally.
Definition: Domi_MDVector.hpp:1841
Teuchos::ScalarTraits< Scalar >::magnitudeType norm2() const
Compute the 2-norm of this MDVector.
Definition: Domi_MDVector.hpp:2619
iterator begin()
Return the beginning iterator.
Definition: Domi_MDArrayRCP.hpp:1065
A Slice contains a start, stop, and step index, describing a subset of an ordered container...
Definition: Domi_Slice.hpp:137
Teuchos::ScalarTraits< Scalar >::magnitudeType normInf() const
Compute the infinity-norm of this MDVector.
Definition: Domi_MDVector.hpp:2632
Teuchos::RCP< const Teuchos::Comm< int > > getTeuchosComm() const
Get the Teuchos communicator.
Definition: Domi_MDMap.hpp:2062
bool isPeriodic(int axis) const
Return the periodic flag for the given axis.
Definition: Domi_MDVector.hpp:1720
void endUpdateCommPad(int axis)
Complete an asyncronous update of the communication padding.
Definition: Domi_MDVector.hpp:2984
void clear()
Clear the MDArrayRCP
Definition: Domi_MDArrayRCP.hpp:1652
MDArrayView< const Scalar > getUpperPadData(int axis) const
Get a const view of the upper padding data along the given axis as an MDArrayView.
Definition: Domi_MDVector.hpp:2547
void randomize()
Set all values in the multivector to pseudorandom numbers.
Definition: Domi_MDVector.hpp:2839
MDMap Error exception type.
Definition: Domi_Exceptions.hpp:114
Definition: Domi_ConfigDefs.hpp:97
Type Error exception type.
Definition: Domi_Exceptions.hpp:126
MDVector< Scalar, Node > & operator=(const MDVector< Scalar, Node > &source)
Assignment operator.
Definition: Domi_MDVector.hpp:1636
virtual Slice bounds(dim_type size) const
Return a Slice with concrete start and stop values.
virtual ~MDVector()
Destructor.
Definition: Domi_MDVector.hpp:1655
Slice getLocalInteriorBounds(int axis) const
Get the local interior looping bounds along the specified axis.
Definition: Domi_MDVector.hpp:1830
void putScalar(const Scalar &value, bool includePadding=true)
Set all values in the multivector with the given value.
Definition: Domi_MDVector.hpp:2823
int numDims() const
Return the number of dimensions.
Definition: Domi_MDArrayRCP.hpp:999
int getBndryPadSize(int axis) const
Get the boundary padding size along the given axis.
Definition: Domi_MDVector.hpp:1907
Teuchos::ScalarTraits< Scalar >::magnitudeType normWeighted(const MDVector< Scalar, Node > &weights) const
Compute the weighted norm of this.
Definition: Domi_MDVector.hpp:2655
Slice getGlobalRankBounds(int axis, bool withBndryPad=false) const
Get the global loop bounds on this processor along the specified axis.
Definition: Domi_MDVector.hpp:1786
Multi-dimensional distributed vector.
Definition: Domi_MDVector.hpp:175
const_pointer getRawPtr() const
Return a const raw pointer to the beginning of the MDArrayRCP or NULL if unsized. ...
Definition: Domi_MDArrayRCP.hpp:1718
Slice getGlobalBounds(int axis, bool withBndryPadding=false) const
Get the bounds of the global problem.
Definition: Domi_MDVector.hpp:1775
Teuchos::RCP< const Teuchos::Comm< int > > getTeuchosComm() const
Get the Teuchos communicator.
Definition: Domi_MDVector.hpp:1687
void updateCommPad()
Sum values of a locally replicated multivector across all processes.
Definition: Domi_MDVector.hpp:2854
iterator end()
Return the ending iterator.
Definition: Domi_MDArrayRCP.hpp:1074
int getLowerNeighbor(int axis) const
Get the rank of the lower neighbor.
Definition: Domi_MDVector.hpp:1742
int getLowerBndryPad(int axis) const
Get the size of the lower boundary padding along the given axis.
Definition: Domi_MDVector.hpp:1885
MDArrayView< const T > getConst() const
Return an MDArrayView< const T > of an MDArrayView< T > object.
Definition: Domi_MDArrayView.hpp:1055
dim_type dimension(int axis) const
Return the dimension of the given axis.
Definition: Domi_MDArrayRCP.hpp:1017
const Teuchos::RCP< const MDMap< Node > > getMDMap() const
MDMap accessor method.
Definition: Domi_MDVector.hpp:1665
Multi-dimensional map.
Definition: Domi_MDMap.hpp:144
const dim_type start() const
Start index.
Definition: Domi_Slice.hpp:431
MDArrayView< Scalar > getDataNonConst(bool includePadding=true)
Get a non-const view of the data as an MDArrayView.
Definition: Domi_MDVector.hpp:2455
int getUpperBndryPad(int axis) const
Get the size of the upper boundary padding along the given axis.
Definition: Domi_MDVector.hpp:1896
Invalid argument exception type.
Definition: Domi_Exceptions.hpp:53
int getUpperNeighbor(int axis) const
Get the rank of the upper neighbor.
Definition: Domi_MDVector.hpp:1753
MDMap Error exception type.
Definition: Domi_Exceptions.hpp:102
bool isContiguous() const
True if there are no stride gaps on any processor.
Definition: Domi_MDVector.hpp:1966

Generated for Domi by doxygen 1.8.14