Intrepid2
Intrepid2_CellGeometryDef.hpp
1 // @HEADER
2 // ************************************************************************
3 //
4 // Intrepid2 Package
5 // Copyright (2007) Sandia Corporation
6 //
7 // Under terms of Contract DE-AC04-94AL85000, there is a non-exclusive
8 // license for use of this work by or on behalf of the U.S. Government.
9 //
10 // Redistribution and use in source and binary forms, with or without
11 // modification, are permitted provided that the following conditions are
12 // met:
13 //
14 // 1. Redistributions of source code must retain the above copyright
15 // notice, this list of conditions and the following disclaimer.
16 //
17 // 2. Redistributions in binary form must reproduce the above copyright
18 // notice, this list of conditions and the following disclaimer in the
19 // documentation and/or other materials provided with the distribution.
20 //
21 // 3. Neither the name of the Corporation nor the names of the
22 // contributors may be used to endorse or promote products derived from
23 // this software without specific prior written permission.
24 //
25 // THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
26 // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
27 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
28 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
29 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
30 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
31 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
32 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
33 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
34 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
35 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 //
37 // Questions? Contact Kyungjoo Kim (kyukim@sandia.gov),
38 // Mauro Perego (mperego@sandia.gov), or
39 // Nate Roberts (nvrober@sandia.gov),
40 //
41 // ************************************************************************
42 // @HEADER
43 
50 #ifndef Intrepid2_CellGeometryDef_h
51 #define Intrepid2_CellGeometryDef_h
52 
53 namespace Intrepid2
54 {
55 
56  namespace Impl
57  {
60  template<class PointScalar, int spaceDim, typename DeviceType>
62  {
63  using BasisPtr = Teuchos::RCP<Intrepid2::Basis<DeviceType,PointScalar,PointScalar> >;
65  public:
66  // conceptually, these should be private members, but for the definition of these, we need them to be externally accessible.
67  static std::map<const CellGeometryType *, shards::CellTopology> cellTopology_;
68  static std::map<const CellGeometryType *, BasisPtr> basisForNodes_;
69 
70  public:
71  static void constructorCalled(const CellGeometryType *cellGeometry, const shards::CellTopology &cellTopo, BasisPtr basisForNodes)
72  {
73  cellTopology_[cellGeometry] = cellTopo;
74  basisForNodes_[cellGeometry] = basisForNodes;
75  }
76 
77  static void destructorCalled(const CellGeometryType *cellGeometry)
78  {
79  cellTopology_.erase(cellGeometry);
80  basisForNodes_.erase(cellGeometry);
81  }
82 
83  static BasisPtr getBasis(const CellGeometryType *cellGeometry)
84  {
85  return basisForNodes_[cellGeometry];
86  }
87 
88  static const shards::CellTopology & getCellTopology(const CellGeometryType *cellGeometry)
89  {
90  return cellTopology_[cellGeometry];
91  }
92  };
93 
94  // member lookup map definitions for CellGeometryHostMembers:
95  template< class PointScalar, int spaceDim, typename DeviceType > typename std::map<const CellGeometry<PointScalar,spaceDim,DeviceType> *, shards::CellTopology> CellGeometryHostMembers< PointScalar,spaceDim,DeviceType>::cellTopology_;
96 
97  template< class PointScalar, int spaceDim, typename DeviceType > typename std::map<const CellGeometry<PointScalar,spaceDim,DeviceType> *, Teuchos::RCP<Intrepid2::Basis<DeviceType,PointScalar,PointScalar> >> CellGeometryHostMembers< PointScalar,spaceDim,DeviceType>::basisForNodes_;
98 
101  template<class PointScalar, int spaceDim, typename DeviceType>
103  {
104  Kokkos::View<PointScalar**, DeviceType> cellMeasures_; // (C,P)
105  Kokkos::View<PointScalar**, DeviceType> detData_; // (C,P)
106  TensorData<PointScalar,DeviceType> cubatureWeights_; // (P)
107  public:
108  CellMeasureFunctor(Kokkos::View<PointScalar**, DeviceType> cellMeasures,
109  Kokkos::View<PointScalar**, DeviceType> detData, TensorData<PointScalar,DeviceType> cubatureWeights)
110  :
111  cellMeasures_(cellMeasures),
112  detData_(detData),
113  cubatureWeights_(cubatureWeights)
114  {}
115 
116  KOKKOS_INLINE_FUNCTION void
117  operator () (const ordinal_type cellOrdinal, const ordinal_type pointOrdinal) const
118  {
119  cellMeasures_(cellOrdinal,pointOrdinal) = detData_(cellOrdinal,pointOrdinal) * cubatureWeights_(pointOrdinal);
120  }
121  };
122  }
123 
124  template<class PointScalar, int spaceDim, typename DeviceType>
125  KOKKOS_INLINE_FUNCTION
127 :
128  nodeOrdering_(cellGeometry.nodeOrdering_),
129  cellGeometryType_(cellGeometry.cellGeometryType_),
130  subdivisionStrategy_(cellGeometry.subdivisionStrategy_),
131  affine_(cellGeometry.affine_),
132  orientations_(cellGeometry.orientations_),
133  origin_(cellGeometry.origin_),
134  domainExtents_(cellGeometry.domainExtents_),
135  gridCellCounts_(cellGeometry.gridCellCounts_),
136  tensorVertices_(cellGeometry.tensorVertices_),
137  cellToNodes_(cellGeometry.cellToNodes_),
138  nodes_(cellGeometry.nodes_),
139  numCells_(cellGeometry.numCells_),
140  numNodesPerCell_(cellGeometry.numNodesPerCell_)
141  {
142  // host-only registration with HostMemberLookup:
143 #ifndef __CUDA_ARCH__
144  shards::CellTopology cellTopo = cellGeometry.cellTopology();
145  BasisPtr basisForNodes = cellGeometry.basisForNodes();
147  HostMemberLookup::constructorCalled(this, cellTopo, basisForNodes);
148 #endif
149  }
150 
151  template<class PointScalar, int spaceDim, typename DeviceType>
152  KOKKOS_INLINE_FUNCTION
154  {
155  // host-only deregistration with HostMemberLookup:
156 #ifndef __CUDA_ARCH__
158  HostMemberLookup::destructorCalled(this);
159 #endif
160  }
161 
162  template<class PointScalar, int spaceDim, typename DeviceType>
163  KOKKOS_INLINE_FUNCTION
165  {
166  switch (subdivisionStrategy) {
167  case NO_SUBDIVISION:
168  return 1;
169  case TWO_TRIANGLES_LEFT:
170  case TWO_TRIANGLES_RIGHT:
171  return 2;
172  case FOUR_TRIANGLES:
173  return 4;
174  case FIVE_TETRAHEDRA:
175  return 5;
176  case SIX_TETRAHEDRA:
177  return 6;
178  }
179  return -1;
180  }
181 
182  template<class PointScalar, int spaceDim, typename DeviceType>
184  CellGeometry<PointScalar,spaceDim,DeviceType>::allocateJacobianDataPrivate(const TensorPoints<PointScalar,DeviceType> &points, const int &pointsPerCell, const int startCell, const int endCell) const
185  {
186  ScalarView<PointScalar,DeviceType> data;
187  const int rank = 4; // C,P,D,D
188  const int CELL_DIM = 0;
189  const int POINT_DIM = 1;
190  const int D1_DIM = 2;
191  const int D2_DIM = 3;
192 
193  const int numCellsWorkset = (endCell == -1) ? (numCells_ - startCell) : (endCell - startCell);
194 
195  Kokkos::Array<int,7> extents { numCellsWorkset, pointsPerCell, spaceDim, spaceDim, 1, 1, 1 };
196  Kokkos::Array<DataVariationType,7> variationType { CONSTANT, CONSTANT, CONSTANT, CONSTANT, CONSTANT, CONSTANT, CONSTANT };
197 
198  int blockPlusDiagonalLastNonDiagonal = -1;
199 
200  if (cellGeometryType_ == UNIFORM_GRID)
201  {
202  if (uniformJacobianModulus() != 1)
203  {
204  variationType[CELL_DIM] = MODULAR;
205  variationType[POINT_DIM] = CONSTANT;
206  variationType[D1_DIM] = GENERAL;
207  variationType[D2_DIM] = GENERAL;
208 
209  int cellTypeModulus = uniformJacobianModulus();
210 
211  data = getMatchingViewWithLabel(points.getTensorComponent(0), "CellGeometryProvider: Jacobian data", cellTypeModulus, spaceDim, spaceDim);
212  }
213  else
214  {
215  // diagonal Jacobian
216  variationType[D1_DIM] = BLOCK_PLUS_DIAGONAL;
217  variationType[D2_DIM] = BLOCK_PLUS_DIAGONAL;
218  blockPlusDiagonalLastNonDiagonal = -1;
219 
220  data = getMatchingViewWithLabel(points.getTensorComponent(0), "CellGeometryProvider: Jacobian data", spaceDim);
221  }
222  }
223  else if (cellGeometryType_ == TENSOR_GRID)
224  {
225  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "tensor grid support not yet implemented");
226  }
227  else if (cellGeometryType_ == FIRST_ORDER)
228  {
229  const bool simplex = (spaceDim + 1 == cellToNodes_.extent_int(1));
230  if (simplex)
231  {
232  variationType[CELL_DIM] = GENERAL;
233  variationType[POINT_DIM] = CONSTANT; // affine: no point variation
234  variationType[D1_DIM] = GENERAL;
235  variationType[D2_DIM] = GENERAL;
236 
237  data = getMatchingViewWithLabel(data, "CellGeometryProvider: Jacobian data", numCells_, spaceDim, spaceDim);
238  }
239  else
240  {
241  variationType[CELL_DIM] = GENERAL;
242  variationType[D1_DIM] = GENERAL;
243  variationType[D2_DIM] = GENERAL;
244  if (affine_)
245  {
246  // no point variation
247  variationType[POINT_DIM] = CONSTANT;
248  data = getMatchingViewWithLabel(data, "CellGeometryProvider: Jacobian data", numCellsWorkset, spaceDim, spaceDim);
249  }
250  else
251  {
252  variationType[POINT_DIM] = GENERAL;
253  data = getMatchingViewWithLabel(data, "CellGeometryProvider: Jacobian data", numCellsWorkset, pointsPerCell, spaceDim, spaceDim);
254  }
255  }
256  }
257  else if (cellGeometryType_ == HIGHER_ORDER)
258  {
259  // most general case: varies in all 4 dimensions
260  variationType[CELL_DIM] = GENERAL;
261  variationType[POINT_DIM] = GENERAL;
262  variationType[D1_DIM] = GENERAL;
263  variationType[D2_DIM] = GENERAL;
264  data = getMatchingViewWithLabel(data, "CellGeometryProvider: Jacobian data", numCellsWorkset, pointsPerCell, spaceDim, spaceDim);
265  }
266  else
267  {
268  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "support for this CellGeometryType is not yet implemented");
269  }
270 
271  Data<PointScalar,DeviceType> jacobianData(data,rank,extents,variationType,blockPlusDiagonalLastNonDiagonal);
272  return jacobianData;
273  }
274 
275  template<class PointScalar, int spaceDim, typename DeviceType>
277  const int &pointsPerCell, const Data<PointScalar,DeviceType> &refData,
278  const int startCell, const int endCell) const
279  {
280  const int numCellsWorkset = (endCell == -1) ? (numCells_ - startCell) : (endCell - startCell);
281 
282  if (cellGeometryType_ == UNIFORM_GRID)
283  {
284  if (uniformJacobianModulus() != 1)
285  {
286  int cellTypeModulus = uniformJacobianModulus();
287 
288  auto dataView3 = jacobianData.getUnderlyingView3(); // (cellTypeModulus, spaceDim, spaceDim) allocated in allocateJacobianDataPrivate()
289  auto dataHost = Kokkos::create_mirror_view(dataView3);
290 
291  const int startCellType = startCell % cellTypeModulus;
292  const int endCellType = (numCellsWorkset >= cellTypeModulus) ? startCellType + cellTypeModulus : startCellType + numCellsWorkset;
293  const int gridCellOrdinal = 0; // sample cell
294  for (int cellType=startCellType; cellType<endCellType; cellType++)
295  {
296  const int subdivisionOrdinal = cellType % cellTypeModulus;
297  const int nodeZero = 0;
298  // simplex Jacobian formula is J_00 = x1 - x0, J_01 = x2 - x0, etc.
299  for (int i=0; i<spaceDim; i++)
300  {
301  for (int j=0; j<spaceDim; j++)
302  {
303  const int node = j+1; // this is the only node other than the 0 node that has non-zero derivative in the j direction -- and this has unit derivative
304  // nodeZero has derivative -1 in every dimension.
305  const auto J_ij = subdivisionCoordinate(gridCellOrdinal, subdivisionOrdinal, node, i) - subdivisionCoordinate(gridCellOrdinal, subdivisionOrdinal, nodeZero, i);
306  dataHost(cellType,i,j) = J_ij;
307  }
308  }
309  }
310 
311  Kokkos::deep_copy(dataView3,dataHost);
312  }
313  else
314  {
315  // diagonal Jacobian
316  auto dataView1 = jacobianData.getUnderlyingView1(); // (spaceDim) allocated in allocateJacobianDataPrivate()
317  const auto domainExtents = domainExtents_;
318  const auto gridCellCounts = gridCellCounts_;
319 
320  using ExecutionSpace = typename DeviceType::execution_space;
321  auto policy = Kokkos::RangePolicy<>(ExecutionSpace(),0,spaceDim);
322  Kokkos::parallel_for("fill jacobian", policy, KOKKOS_LAMBDA(const int d1)
323  {
324  // diagonal jacobian
325  const double REF_SPACE_EXTENT = 2.0;
326  dataView1(d1) = (domainExtents[d1] / REF_SPACE_EXTENT) / gridCellCounts[d1];
327  });
328  ExecutionSpace().fence();
329  }
330  }
331  else if (cellGeometryType_ == TENSOR_GRID)
332  {
333  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "tensor grid support not yet implemented");
334  }
335  else if ((cellGeometryType_ == FIRST_ORDER) || (cellGeometryType_ == HIGHER_ORDER))
336  {
337  const bool simplex = (spaceDim + 1 == cellToNodes_.extent_int(1));
338  if (simplex)
339  {
340  auto dataView3 = jacobianData.getUnderlyingView3(); // (numCells_, spaceDim, spaceDim) allocated in allocateJacobianDataPrivate()
341 
342  // get local (shallow) copies to avoid implicit references to this
343  auto cellToNodes = cellToNodes_;
344  auto nodes = nodes_;
345 
346  using ExecutionSpace = typename DeviceType::execution_space;
347  auto policy = Kokkos::MDRangePolicy<ExecutionSpace,Kokkos::Rank<3>>({startCell,0,0},{numCellsWorkset,spaceDim,spaceDim});
348 
349  Kokkos::parallel_for("compute first-order simplex Jacobians", policy,
350  KOKKOS_LAMBDA (const int &cellOrdinal, const int &d1, const int &d2) {
351  const int nodeZero = 0; // nodeZero has derivative -1 in every dimension.
352  const int node = d2+1; // this is the only node other than the 0 node that has non-zero derivative in the d2 direction -- and this has unit derivative (except in 1D, where each derivative is ±0.5)
353  const auto & nodeCoord = nodes(cellToNodes(cellOrdinal,node), d1);
354  const auto & nodeZeroCoord = nodes(cellToNodes(cellOrdinal,nodeZero), d1);
355  const PointScalar J_ij = nodeCoord - nodeZeroCoord;
356  dataView3(cellOrdinal,d1,d2) = (spaceDim != 1) ? J_ij : J_ij * 0.5;
357  });
358  }
359  else
360  {
362  auto basisForNodes = this->basisForNodes();
363 
364  if (affine_)
365  {
366  // no point variation
367  auto dataView3 = jacobianData.getUnderlyingView3(); // (numCellsWorkset, spaceDim, spaceDim) allocated in allocateJacobianDataPrivate()
368 
369  // TODO: find an allocation-free way to do this… (consider modifying CellTools::setJacobian() to support affine case.)
370  const int onePoint = 1;
371  auto testPointView = getMatchingViewWithLabel(dataView3, "CellGeometryProvider: test point", onePoint, spaceDim);
372  auto tempData = getMatchingViewWithLabel(dataView3, "CellGeometryProvider: temporary Jacobian data", numCellsWorkset, onePoint, spaceDim, spaceDim);
373 
374  Kokkos::deep_copy(testPointView, 0.0);
375 
376  CellTools::setJacobian(tempData, testPointView, *this, basisForNodes, startCell, endCell);
377 
378  auto tempDataSubview = Kokkos::subview(tempData, Kokkos::ALL(), 0, Kokkos::ALL(), Kokkos::ALL());
379  Kokkos::deep_copy(dataView3, tempDataSubview);
380  }
381  else
382  {
383  auto dataView = jacobianData.getUnderlyingView(); // (numCellsWorkset, pointsPerCell, spaceDim, spaceDim) allocated in allocateJacobianDataPrivate()
384  TEUCHOS_TEST_FOR_EXCEPTION(basisForNodes == Teuchos::null, std::invalid_argument, "basisForNodes must not be null");
385  TEUCHOS_TEST_FOR_EXCEPTION(dataView.size() == 0, std::invalid_argument, "underlying view is not valid");
386 
387  // refData should contain the basis gradients; shape is (F,P,D)
388  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(!refData.isValid(), std::invalid_argument, "refData should be a valid container for cases with non-affine geometry");
389  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(refData.rank() != 3, std::invalid_argument, "refData should have shape (F,P,D)");
390  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(refData.extent_int(0) != basisForNodes->getCardinality(), std::invalid_argument, "refData should have shape (F,P,D)");
391  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(refData.extent_int(1) != points.extent_int(0), std::invalid_argument, "refData should have shape (F,P,D)");
392  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(refData.extent_int(2) != spaceDim, std::invalid_argument, "refData should have shape (F,P,D)");
393 
394  CellTools::setJacobian(dataView, *this, refData, startCell, endCell);
395  }
396  }
397  }
398  else
399  {
400  // TODO: handle the other cases
401  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "support for this CellGeometryType is not yet implemented");
402  }
403  }
404 
405  // Uniform grid constructor, with optional subdivision into simplices
406  template<class PointScalar, int spaceDim, typename DeviceType>
407  CellGeometry<PointScalar,spaceDim,DeviceType>::CellGeometry(const Kokkos::Array<PointScalar,spaceDim> &origin,
408  const Kokkos::Array<PointScalar,spaceDim> &domainExtents,
409  const Kokkos::Array<int,spaceDim> &gridCellCounts,
410  SubdivisionStrategy subdivisionStrategy,
411  HypercubeNodeOrdering nodeOrdering)
412  :
413  nodeOrdering_(nodeOrdering),
414  cellGeometryType_(UNIFORM_GRID),
415  subdivisionStrategy_(subdivisionStrategy),
416  affine_(true),
417  origin_(origin),
418  domainExtents_(domainExtents),
419  gridCellCounts_(gridCellCounts)
420  {
421  numCells_ = 1;
422  for (int d=0; d<spaceDim; d++)
423  {
424  numCells_ *= gridCellCounts_[d];
425  }
426  numCells_ *= numCellsPerGridCell(subdivisionStrategy_);
427 
428  shards::CellTopology cellTopo; // will register with HostMemberLookup below
429  if (subdivisionStrategy_ == NO_SUBDIVISION)
430  {
431  // hypercube
432  numNodesPerCell_ = 1 << spaceDim; // 2^D vertices in a D-dimensional hypercube
433 
434  if (spaceDim == 1)
435  {
436  cellTopo = shards::CellTopology(shards::getCellTopologyData<shards::Line<> >());
437  }
438  else if (spaceDim == 2)
439  {
440  cellTopo = shards::CellTopology(shards::getCellTopologyData<shards::Quadrilateral<> >());
441  }
442  else if (spaceDim == 3)
443  {
444  cellTopo = shards::CellTopology(shards::getCellTopologyData<shards::Hexahedron<> >());
445  }
446  else
447  {
448  // TODO: Once shards supports higher-dimensional hypercubes, initialize cellTopo accordingly
449  }
450  }
451  else
452  {
453  // simplex
454  numNodesPerCell_ = spaceDim + 1; // D+1 vertices in a D-dimensional simplex
455  if (spaceDim == 2)
456  {
457  cellTopo = shards::CellTopology(shards::getCellTopologyData<shards::Triangle<> >());
458  }
459  else if (spaceDim == 3)
460  {
461  cellTopo = shards::CellTopology(shards::getCellTopologyData<shards::Tetrahedron<> >());
462  }
463  else
464  {
465  // TODO: Once shards supports higher-dimensional simplices, initialize cellTopo_ accordingly
466  }
467  }
468 
470  const int linearPolyOrder = 1;
471  BasisPtr basisForNodes = getBasis<BasisFamily>(cellTopo, FUNCTION_SPACE_HGRAD, linearPolyOrder);
472 
473  if (nodeOrdering_ == HYPERCUBE_NODE_ORDER_CLASSIC_SHARDS)
474  {
475  // override basisForNodes for quad, hexahedron. Apparently the lowest-order bases below are *not* in the same order as their
476  // arbitrary-polynomial-order counterparts; the latter do not match the order of the shards::CellTopology nodes.
477  if (cellTopo.getKey() == shards::Quadrilateral<>::key)
478  {
480  }
481  else if (cellTopo.getKey() == shards::Hexahedron<>::key)
482  {
484  }
485  }
486 
488  HostMemberLookup::constructorCalled(this, cellTopo, basisForNodes);
489  }
490 
491  // Node-based constructor for straight-edged cell geometry.
492  // If claimAffine is true, we assume (without checking) that the mapping from reference space is affine.
493  // (If claimAffine is false, we check whether the topology is simplicial; if so, we conclude that the mapping must be affine.)
494  template<class PointScalar, int spaceDim, typename DeviceType>
496  ScalarView<int,DeviceType> cellToNodes,
497  ScalarView<PointScalar,DeviceType> nodes,
498  const bool claimAffine,
499  const HypercubeNodeOrdering nodeOrdering)
500  :
501  nodeOrdering_(nodeOrdering),
502  cellGeometryType_(FIRST_ORDER),
503  cellToNodes_(cellToNodes),
504  nodes_(nodes)
505  {
506  numCells_ = cellToNodes.extent_int(0);
507  numNodesPerCell_ = cellToNodes.extent_int(1);
508  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(numNodesPerCell_ != cellTopo.getNodeCount(), std::invalid_argument, "cellToNodes.extent(1) does not match the cell topology node count");
509 
510  if (!claimAffine)
511  {
512  // if cellTopo is simplicial, then since the geometry is straight-edged, it is also affine
513  const bool simplicialTopo = (cellTopo.getNodeCount() == cellTopo.getDimension() + 1);
514  affine_ = simplicialTopo;
515  }
516  else
517  {
518  affine_ = true;
519  }
520 
522  const int linearPolyOrder = 1;
523  BasisPtr basisForNodes = getBasis<BasisFamily>(cellTopo, FUNCTION_SPACE_HGRAD, linearPolyOrder);
524 
525  if (nodeOrdering_ == HYPERCUBE_NODE_ORDER_CLASSIC_SHARDS)
526  {
527  // override basisForNodes for quad, hexahedron. Apparently the lowest-order bases below are *not* in the same order as their
528  // arbitrary-polynomial-order counterparts; the latter do not match the order of the shards::CellTopology nodes.
529  if (cellTopo.getKey() == shards::Quadrilateral<>::key)
530  {
532  }
533  else if (cellTopo.getKey() == shards::Hexahedron<>::key)
534  {
536  }
537  }
538 
540  HostMemberLookup::constructorCalled(this, cellTopo, basisForNodes);
541  }
542 
543  // Constructor for higher-order geometry
544  template<class PointScalar, int spaceDim, typename DeviceType>
546  ScalarView<PointScalar,DeviceType> cellNodes)
547  :
548  nodeOrdering_(HYPERCUBE_NODE_ORDER_TENSOR),
549  cellGeometryType_(HIGHER_ORDER),
550  nodes_(cellNodes)
551  {
552  numCells_ = cellNodes.extent_int(0);
553  numNodesPerCell_ = cellNodes.extent_int(1);
554 
555  // if basis degree is 1, mark as first-order geometry
556  const bool firstOrderGeometry = (basisForNodes->getDegree() == 1);
557  cellGeometryType_ = firstOrderGeometry ? FIRST_ORDER : HIGHER_ORDER;
558 
559  shards::CellTopology cellTopo = basisForNodes->getBaseCellTopology();
560 
561  if (firstOrderGeometry && (cellTopo.getNodeCount() == spaceDim + 1)) // lowest-order and simplicial
562  {
563  affine_ = true;
564  }
565  else
566  {
567  affine_ = false;
568  }
570  HostMemberLookup::constructorCalled(this, cellTopo, basisForNodes);
571  }
572 
573  template<class PointScalar, int spaceDim, typename DeviceType>
574  KOKKOS_INLINE_FUNCTION
576  {
577  return affine_;
578  }
579 
580  template<class PointScalar, int spaceDim, typename DeviceType>
583  const TensorData<PointScalar,DeviceType> & cubatureWeights ) const
584  {
585  // Output possibilities for a cubatureWeights with N components:
586  // 1. For AFFINE elements (jacobianDet cell-wise constant), returns a container with N+1 tensorial components; the first component corresponds to cells
587  // 2. Otherwise, returns a container with 1 tensorial component
588 
589  INTREPID2_TEST_FOR_EXCEPTION(cubatureWeights.rank() != 1, std::invalid_argument, "cubatureWeights container must have shape (P)");
590 
591  const int numTensorComponents = affine_ ? cubatureWeights.numTensorComponents() + 1 : 1;
592  std::vector< Data<PointScalar,DeviceType> > tensorComponents(numTensorComponents);
593 
594  if (affine_)
595  {
596  const int cellExtent = jacobianDet.extent_int(0);
597  Kokkos::Array<DataVariationType,7> cellVariationTypes {jacobianDet.getVariationTypes()[0], CONSTANT, CONSTANT, CONSTANT, CONSTANT, CONSTANT, CONSTANT};
598  const int cellDataDim = jacobianDet.getDataExtent(0);
599  Kokkos::Array<int,7> cellExtents{cellExtent,1,1,1,1,1,1};
600 
601  ScalarView<PointScalar,DeviceType> detDataView ("cell relative volumes", cellDataDim);
602  tensorComponents[0] = Data<PointScalar,DeviceType>(detDataView,1,cellExtents,cellVariationTypes);
603 
604  for (int cubTensorComponent=0; cubTensorComponent<numTensorComponents-1; cubTensorComponent++)
605  {
606  auto cubatureComponent = cubatureWeights.getTensorComponent(cubTensorComponent);
607  const auto cubatureExtents = cubatureComponent.getExtents();
608  const auto cubatureVariationTypes = cubatureComponent.getVariationTypes();
609  const int numPoints = cubatureComponent.getDataExtent(0);
610  ScalarView<PointScalar,DeviceType> cubatureWeightView ("cubature component weights", numPoints);
611  const int pointComponentRank = 1;
612  tensorComponents[cubTensorComponent+1] = Data<PointScalar,DeviceType>(cubatureWeightView,pointComponentRank,cubatureExtents,cubatureVariationTypes);
613  }
614  }
615  else
616  {
617  const int cellExtent = jacobianDet.extent_int(0);
618  Kokkos::Array<DataVariationType,7> variationTypes {jacobianDet.getVariationTypes()[0], GENERAL, CONSTANT, CONSTANT, CONSTANT, CONSTANT, CONSTANT};
619  const int cellDataDim = jacobianDet.getDataExtent(0);
620 
621  const int numPoints = cubatureWeights.extent_int(0);
622  Kokkos::Array<int,7> extents{cellExtent,numPoints,1,1,1,1,1};
623 
624  ScalarView<PointScalar,DeviceType> cubatureWeightView;
625  if (variationTypes[0] != CONSTANT)
626  {
627  cubatureWeightView = ScalarView<PointScalar,DeviceType>("cell measure", cellDataDim, numPoints);
628  }
629  else
630  {
631  cubatureWeightView = ScalarView<PointScalar,DeviceType>("cell measure", numPoints);
632  }
633  const int cellMeasureRank = 2;
634  tensorComponents[0] = Data<PointScalar,DeviceType>(cubatureWeightView,cellMeasureRank,extents,variationTypes);
635  }
636  const bool separateFirstComponent = (numTensorComponents > 1);
637  return TensorData<PointScalar,DeviceType>(tensorComponents, separateFirstComponent);
638  }
639 
640  template<class PointScalar, int spaceDim, typename DeviceType>
642  const Data<PointScalar,DeviceType> & jacobianDet,
643  const TensorData<PointScalar,DeviceType> & cubatureWeights ) const
644  {
645  // Output possibilities for a cubatureWeights with N components:
646  // 1. For AFFINE elements (jacobianDet constant on each cell), returns a container with N+1 tensorial components; the first component corresponds to cells
647  // 2. Otherwise, returns a container with 1 tensorial component
648 
649  INTREPID2_TEST_FOR_EXCEPTION((cellMeasure.numTensorComponents() != cubatureWeights.numTensorComponents() + 1) && (cellMeasure.numTensorComponents() != 1), std::invalid_argument,
650  "cellMeasure must either have a tensor component count of 1 or a tensor component count that is one higher than that of cubatureWeights");
651 
652  INTREPID2_TEST_FOR_EXCEPTION(cubatureWeights.rank() != 1, std::invalid_argument, "cubatureWeights container must have shape (P)");
653 
654  if (cellMeasure.numTensorComponents() == cubatureWeights.numTensorComponents() + 1)
655  {
656  // affine case; the first component should contain the cell volume divided by ref cell volume; this should be stored in jacobianDet
657  Kokkos::deep_copy(cellMeasure.getTensorComponent(0).getUnderlyingView1(), jacobianDet.getUnderlyingView1()); // copy point-invariant data from jacobianDet to the first tensor component of cell measure container
658  const int numTensorDimensions = cubatureWeights.numTensorComponents();
659  for (int i=1; i<numTensorDimensions+1; i++)
660  {
661  Kokkos::deep_copy(cellMeasure.getTensorComponent(i).getUnderlyingView1(), cubatureWeights.getTensorComponent(i-1).getUnderlyingView1());
662  }
663  }
664  else
665  {
666  auto detVaries = jacobianDet.getVariationTypes();
667 
668  const bool detCellVaries = detVaries[0] != CONSTANT;
669  const bool detPointVaries = detVaries[1] != CONSTANT;
670 
671  if (detCellVaries && detPointVaries)
672  {
673  auto cellMeasureData = cellMeasure.getTensorComponent(0).getUnderlyingView2();
674  auto detData = jacobianDet.getUnderlyingView2();
675  const int numCells = detData.extent_int(0);
676  const int numPoints = detData.extent_int(1);
677  INTREPID2_TEST_FOR_EXCEPTION(numCells != cellMeasureData.extent_int(0), std::invalid_argument, "cellMeasureData doesn't match jacobianDet in cell dimension");
678  INTREPID2_TEST_FOR_EXCEPTION(numPoints != cellMeasureData.extent_int(1), std::invalid_argument, "cellMeasureData doesn't match jacobianDet in point dimension");
679 
680  // We implement this case as a functor (rather than a lambda) to work around an apparent CUDA 10.1.243 compiler bug
681  Impl::CellMeasureFunctor<PointScalar,spaceDim,DeviceType> cellMeasureFunctor(cellMeasureData, detData, cubatureWeights);
682 
683  using ExecutionSpace = typename DeviceType::execution_space;
684  Kokkos::MDRangePolicy<ExecutionSpace,Kokkos::Rank<2>> rangePolicy({0,0},{numCells,numPoints});
685  Kokkos::parallel_for(rangePolicy, cellMeasureFunctor);
686  }
687  else if (detCellVaries && !detPointVaries)
688  {
689  auto cellMeasureData = cellMeasure.getTensorComponent(0).getUnderlyingView2();
690  auto detData = jacobianDet.getUnderlyingView1();
691  using ExecutionSpace = typename DeviceType::execution_space;
692  Kokkos::parallel_for(
693  Kokkos::MDRangePolicy<ExecutionSpace,Kokkos::Rank<2>>({0,0},{detData.extent_int(0),cubatureWeights.extent_int(0)}),
694  KOKKOS_LAMBDA (int cellOrdinal, int pointOrdinal) {
695  cellMeasureData(cellOrdinal,pointOrdinal) = detData(cellOrdinal) * cubatureWeights(pointOrdinal);
696  });
697  }
698  else
699  {
700  // constant jacobian det case
701  // cell measure data has shape (P)
702  auto cellMeasureData = cellMeasure.getTensorComponent(0).getUnderlyingView1();
703  auto detData = jacobianDet.getUnderlyingView1();
704  using ExecutionSpace = typename DeviceType::execution_space;
705  Kokkos::parallel_for(Kokkos::RangePolicy<ExecutionSpace>(0,cellMeasureData.extent_int(0)),
706  KOKKOS_LAMBDA (const int &pointOrdinal) {
707  cellMeasureData(pointOrdinal) = detData(0) * cubatureWeights(pointOrdinal);
708  });
709  }
710  }
711  }
712 
713  template<class PointScalar, int spaceDim, typename DeviceType>
714  typename CellGeometry<PointScalar,spaceDim,DeviceType>::BasisPtr
716  {
718  return HostMemberLookup::getBasis(this);
719  }
720 
721  template<class PointScalar, int spaceDim, typename DeviceType>
723  {
725  return HostMemberLookup::getCellTopology(this);
726  }
727 
728  template<class PointScalar, int spaceDim, typename DeviceType>
729  KOKKOS_INLINE_FUNCTION
731  {
732  if (cellGeometryType_ == UNIFORM_GRID)
733  {
734  const int numSubdivisions = numCellsPerGridCell(subdivisionStrategy_);
735  if (numSubdivisions == 1)
736  {
737  return CONSTANT;
738  }
739  else
740  {
741  return MODULAR;
742  }
743  }
744  else return GENERAL;
745  }
746 
747  template<class PointScalar, int spaceDim, typename DeviceType>
749  {
750  Data<PointScalar,DeviceType> emptyRefData;
751  if (cellGeometryType_ == UNIFORM_GRID)
752  {
753  // no need for basis computations
754  return emptyRefData;
755  }
756  else if (cellGeometryType_ == TENSOR_GRID)
757  {
758  // no need for basis values
759  return emptyRefData;
760  }
761  else if ((cellGeometryType_ == FIRST_ORDER) || (cellGeometryType_ == HIGHER_ORDER))
762  {
763  const bool simplex = (spaceDim + 1 == cellToNodes_.extent_int(1));
764  if (simplex)
765  {
766  // no need for precomputed basis values
767  return emptyRefData;
768  }
769  else
770  {
771  auto basisForNodes = this->basisForNodes();
772 
773  if (affine_)
774  {
775  // no need for precomputed basis values
776  return emptyRefData;
777  }
778  else
779  {
780  auto basisGradients = basisForNodes->allocateBasisValues(points, OPERATOR_GRAD);
781  basisForNodes->getValues(basisGradients, points, OPERATOR_GRAD);
782 
783  int numPoints = points.extent_int(0);
784  int numFields = basisForNodes->getCardinality();
785 
786  // At some (likely relatively small) memory cost, we copy the BasisGradients into an explicit (F,P,D) container.
787  // Given that we expect to reuse this for a non-trivial number of cell in the common use case, the extra memory
788  // cost is likely worth the increased flop count, etc. (One might want to revisit this in cases of high spaceDim
789  // and/or very high polynomial order.)
790 
791  auto firstPointComponentView = points.getTensorComponent(0); // (P,D0)
792  auto basisGradientsView = getMatchingViewWithLabel(firstPointComponentView, "CellGeometryProvider: temporary basisGradients", numFields, numPoints, spaceDim);
793 
794  using ExecutionSpace = typename DeviceType::execution_space;
795  auto policy = Kokkos::MDRangePolicy<ExecutionSpace,Kokkos::Rank<3>>({0,0,0},{numFields,numPoints,spaceDim});
796 
797  Kokkos::parallel_for("copy basis gradients", policy,
798  KOKKOS_LAMBDA (const int &fieldOrdinal, const int &pointOrdinal, const int &d) {
799  basisGradientsView(fieldOrdinal,pointOrdinal,d) = basisGradients(fieldOrdinal,pointOrdinal,d);
800  });
801  ExecutionSpace().fence();
802 
803  Data<PointScalar,DeviceType> basisRefData(basisGradientsView);
804  return basisRefData;
805  }
806  }
807  }
808  else
809  {
810  // TODO: handle the other cases
811  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "support for this CellGeometryType is not yet implemented");
812  }
813  return emptyRefData;
814  }
815 
816  template<class PointScalar, int spaceDim, typename DeviceType>
817  KOKKOS_INLINE_FUNCTION
819  {
820  if (nodeOrdering_ == HYPERCUBE_NODE_ORDER_CLASSIC_SHARDS)
821  {
822  // note that Shards numbers nodes for quad counter-clockwise
823  // cube is tensor-product of the (x,y) quad with a z line segment
824  if (d==0)
825  {
826  if ((hypercubeNodeNumber % 4 == 1) || (hypercubeNodeNumber % 4 == 2))
827  return 1;
828  else
829  return 0;
830  }
831  else if (d==1)
832  {
833  if ((hypercubeNodeNumber % 4 == 2) || (hypercubeNodeNumber % 4 == 3))
834  return 1;
835  else
836  return 0;
837  }
838  }
839  // tensor formula coincides with shards formula for d ≥ 2
840  const int nodesForPriorDimensions = 1 << d;
841  if ((hypercubeNodeNumber / nodesForPriorDimensions) % 2 == 1)
842  return 1;
843  else
844  return 0;
845  }
846 
847  template<class PointScalar, int spaceDim, typename DeviceType>
849  {
850  using HostExecSpace = typename Kokkos::Impl::is_space<DeviceType>::host_mirror_space::execution_space ;
851 
852  const bool isGridType = (cellGeometryType_ == TENSOR_GRID) || (cellGeometryType_ == UNIFORM_GRID);
853  const int numOrientations = isGridType ? numCellsPerGridCell(subdivisionStrategy_) : numCells();
854 
855  const int nodesPerCell = numNodesPerCell();
856 
857  ScalarView<Orientation, DeviceType> orientationsView("orientations", numOrientations);
858  auto orientationsHost = Kokkos::create_mirror_view(typename HostExecSpace::memory_space(), orientationsView);
859 
860  ScalarView<PointScalar, HostExecSpace> cellNodesHost("cellNodesHost",numOrientations,nodesPerCell); // (C,N) -- where C = numOrientations
861 
862  DataVariationType cellVariationType;
863 
864  if (isGridType)
865  {
866  // then there are as many distinct orientations possible as there are there are cells per grid cell
867  // fill cellNodesHost with sample nodes from grid cell 0
868 
869  const int numSubdivisions = numCellsPerGridCell(subdivisionStrategy_); // can be up to 6
870  const int gridCellOrdinal = 0;
871  auto hostPolicy = Kokkos::MDRangePolicy<HostExecSpace,Kokkos::Rank<2>>({0,0},{numSubdivisions,nodesPerCell});
872  Kokkos::parallel_for("fill cellNodesHost", hostPolicy,
873  KOKKOS_LAMBDA (const int &subdivisionOrdinal, const int &nodeInCell) {
874  auto node = gridCellNodeForSubdivisionNode(gridCellOrdinal, subdivisionOrdinal, nodeInCell);
875  cellNodesHost(subdivisionOrdinal,nodeInCell) = node;
876  });
877 
878  cellVariationType = (numSubdivisions == 1) ? CONSTANT : MODULAR;
879  }
880  else
881  {
882  cellVariationType = GENERAL;
883  auto cellToNodesHost = Kokkos::create_mirror_view_and_copy(typename HostExecSpace::memory_space(), cellToNodes_);
884  }
885 
886  OrientationTools<HostExecSpace>::getOrientation(orientationsHost,cellNodesHost,this->cellTopology());
887  Kokkos::deep_copy(orientationsView,orientationsHost);
888 
889  const int orientationsRank = 1; // shape (C)
890  const Kokkos::Array<int,7> orientationExtents {static_cast<int>(numCells_),1,1,1,1,1,1};
891  const Kokkos::Array<DataVariationType,7> orientationVariationTypes { cellVariationType, CONSTANT, CONSTANT, CONSTANT, CONSTANT, CONSTANT, CONSTANT};
892  orientations_ = Data<Orientation,DeviceType>(orientationsView, orientationsRank, orientationExtents, orientationVariationTypes);
893  }
894 
895  template<class PointScalar, int spaceDim, typename DeviceType>
896  KOKKOS_INLINE_FUNCTION
898  if (r == 0)
899  {
900  return numCells_;
901  }
902  else if (r == 1)
903  {
904  return numNodesPerCell_;
905  }
906  else if (r == 2)
907  {
908  return spaceDim;
909  }
910  else
911  {
912  return 1;
913  }
914  }
915 
916  template<class PointScalar, int spaceDim, typename DeviceType>
917  template <typename iType>
918  KOKKOS_INLINE_FUNCTION
919  typename std::enable_if<std::is_integral<iType>::value, int>::type
921  {
922  return static_cast<int>(extent(r));
923  }
924 
925  template<class PointScalar, int spaceDim, typename DeviceType>
926  KOKKOS_INLINE_FUNCTION
929  {
930  return nodeOrdering_;
931  }
932 
933  template<class PointScalar, int spaceDim, typename DeviceType>
934  KOKKOS_INLINE_FUNCTION
936  {
937  return numCells_;
938  }
939 
940  template<class PointScalar, int spaceDim, typename DeviceType>
941  KOKKOS_INLINE_FUNCTION
943  {
944  if (cellGeometryType_ == UNIFORM_GRID)
945  {
946  return gridCellCounts_[dim];
947  }
948  else if (cellGeometryType_ == TENSOR_GRID)
949  {
950  return tensorVertices_.extent_int(dim);
951  }
952  else
953  {
954  return -1; // not valid for this cell geometry type
955  }
956  }
957 
958  template<class PointScalar, int spaceDim, typename DeviceType>
959  KOKKOS_INLINE_FUNCTION
961  {
962  return numNodesPerCell_;
963  }
964 
965  template<class PointScalar, int spaceDim, typename DeviceType>
966  KOKKOS_INLINE_FUNCTION
968  {
969  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(!orientations_.isValid(), std::invalid_argument, "orientations_ not initialized; call initializeOrientations() first");
970  return orientations_(cellNumber);
971  }
972 
973  template<class PointScalar, int spaceDim, typename DeviceType>
975  {
976  if (!orientations_.isValid())
977  {
978  initializeOrientations();
979  }
980  return orientations_;
981  }
982 
983  template<class PointScalar, int spaceDim, typename DeviceType>
984  KOKKOS_INLINE_FUNCTION
985  PointScalar CellGeometry<PointScalar,spaceDim,DeviceType>::gridCellCoordinate(const int &gridCellOrdinal, const int &localNodeNumber, const int &dim) const
986  {
987  const int componentNode = hypercubeComponentNodeNumber(localNodeNumber, dim);
988  int cellCountForPriorDimensions = 1;
989  for (int d=0; d<dim; d++)
990  {
991  cellCountForPriorDimensions *= numCellsInDimension(d);
992  }
993  const int componentGridCellOrdinal = (gridCellOrdinal / cellCountForPriorDimensions) % numCellsInDimension(dim);
994  const int vertexOrdinal = componentGridCellOrdinal + componentNode;
995  if (cellGeometryType_ == UNIFORM_GRID)
996  {
997  return origin_[dim] + (vertexOrdinal * domainExtents_[dim]) / gridCellCounts_[dim];
998  }
999  else if (cellGeometryType_ == TENSOR_GRID)
1000  {
1001  Kokkos::Array<int,spaceDim> pointOrdinalComponents;
1002  for (int d=0; d<spaceDim; d++)
1003  {
1004  pointOrdinalComponents[d] = 0;
1005  }
1006  pointOrdinalComponents[dim] = vertexOrdinal;
1007  return tensorVertices_(pointOrdinalComponents,dim);
1008  }
1009  else
1010  {
1011  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "Unsupported geometry type");
1012  return 0; // unreachable; here to avoid compiler warnings
1013  }
1014  }
1015 
1016  template<class PointScalar, int spaceDim, typename DeviceType>
1017  KOKKOS_INLINE_FUNCTION
1019  {
1020  return 3; // (C,N,D)
1021  }
1022 
1023  template<class PointScalar, int spaceDim, typename DeviceType>
1024  KOKKOS_INLINE_FUNCTION
1025  int CellGeometry<PointScalar,spaceDim,DeviceType>::gridCellNodeForSubdivisionNode(const int &gridCellOrdinal, const int &subdivisionOrdinal,
1026  const int &subdivisionNodeNumber) const
1027  {
1028  // TODO: do something to reuse the nodeLookup containers
1029  switch (subdivisionStrategy_)
1030  {
1031  case NO_SUBDIVISION:
1032  return subdivisionNodeNumber;
1033  case TWO_TRIANGLES_RIGHT:
1034  case TWO_TRIANGLES_LEFT:
1035  case FOUR_TRIANGLES:
1036  {
1037  Kokkos::Array<int,3> nodeLookup;
1038  if (subdivisionStrategy_ == TWO_TRIANGLES_RIGHT)
1039  {
1040  if (subdivisionOrdinal == 0)
1041  {
1042  // bottom-right cell: node numbers coincide with quad node numbers
1043  nodeLookup = {0,1,2};
1044  }
1045  else if (subdivisionOrdinal == 1)
1046  {
1047  // node 0 --> node 2
1048  // node 1 --> node 3
1049  // node 2 --> node 0
1050  nodeLookup = {2,3,0};
1051  }
1052  else
1053  {
1054  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "Unsupported subdivision ordinal");
1055  }
1056  }
1057  else if (subdivisionStrategy_ == TWO_TRIANGLES_LEFT)
1058  {
1059  if (subdivisionOrdinal == 0)
1060  {
1061  // bottom-left cell:
1062  // node 0 --> node 3
1063  // node 1 --> node 0
1064  // node 2 --> node 1
1065  nodeLookup = {3,0,1};
1066  }
1067  else if (subdivisionOrdinal == 1)
1068  {
1069  // node 0 --> node 2
1070  // node 1 --> node 3
1071  // node 2 --> node 0
1072  nodeLookup = {2,3,0};
1073  }
1074  else
1075  {
1076  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "Unsupported subdivision ordinal");
1077  }
1078  }
1079  else // FOUR_TRIANGLES
1080  {
1081  // counter-clockwise, bottom triangle first
1082  // bottom triangle goes:
1083  // 0 --> 1
1084  // 1 --> center
1085  // 2 --> 0
1086  // and similarly for the other triangles, proceeding counter-clockwise
1087  // node 1 always being the center, we special-case that
1088  if (subdivisionNodeNumber == 1)
1089  {
1090  // center coordinate: we call this node 4 in the quadrilateral
1091  return 4;
1092  }
1093  else
1094  {
1095  nodeLookup = {(subdivisionOrdinal + 1) % 4, -1, subdivisionOrdinal};
1096  }
1097  }
1098  const int gridCellNodeNumber = nodeLookup[subdivisionNodeNumber];
1099  return gridCellNodeNumber;
1100  }
1101  case FIVE_TETRAHEDRA:
1102  case SIX_TETRAHEDRA:
1103  {
1104  Kokkos::Array<int,4> nodeLookup;
1105  if (subdivisionStrategy_ == FIVE_TETRAHEDRA)
1106  {
1107  /*
1108  // to discretize a unit cube into 5 tetrahedra, we can take the four vertices
1109  // (1,1,1)
1110  // (0,0,1)
1111  // (0,1,0)
1112  // (1,0,0)
1113  // as an interior tetrahedron. Call this cell 0. The remaining 4 cells can be determined
1114  // by selecting three of the above points (there are exactly 4 such combinations) and then selecting
1115  // from the remaining four vertices of the cube the one nearest the plane defined by those three points.
1116  // The remaining four vertices are:
1117  // (0,0,0)
1118  // (1,1,0)
1119  // (1,0,1)
1120  // (0,1,1)
1121  // For each of these four, we pick one, and then take the three nearest vertices from cell 0 to form a new tetrahedron.
1122  // We enumerate as follows:
1123  // cell 0: (1,1,1), (0,0,1), (0,1,0), (1,0,0)
1124  // cell 1: (0,0,0), (1,0,0), (0,1,0), (0,0,1)
1125  // cell 2: (1,1,0), (1,1,1), (0,1,0), (1,0,0)
1126  // cell 3: (1,0,1), (1,1,1), (0,0,1), (1,0,0)
1127  // cell 4: (0,1,1), (1,1,1), (0,0,1), (0,1,0)
1128  */
1129  // tetrahedra are as follows:
1130  // 0: {1,3,4,6}
1131  // 1: {0,1,3,4}
1132  // 2: {1,2,3,6}
1133  // 3: {1,4,5,6}
1134  // 4: {3,4,6,7}
1135  switch (subdivisionOrdinal) {
1136  case 0:
1137  nodeLookup = {1,3,4,6};
1138  break;
1139  case 1:
1140  nodeLookup = {0,1,3,4};
1141  break;
1142  case 2:
1143  nodeLookup = {1,2,3,6};
1144  break;
1145  case 3:
1146  nodeLookup = {1,4,5,6};
1147  break;
1148  case 4:
1149  nodeLookup = {3,4,6,7};
1150  break;
1151  default:
1152  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "invalid subdivisionOrdinal");
1153  break;
1154  }
1155  }
1156  else if (subdivisionStrategy_ == SIX_TETRAHEDRA)
1157  {
1158  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "support for SIX_TETRAHEDRA not yet implemented");
1159  }
1160  const int gridCellNodeNumber = nodeLookup[subdivisionNodeNumber];
1161  return gridCellNodeNumber;
1162  }
1163  default:
1164  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "Subdivision strategy not yet implemented!");
1165  // some compilers complain about missing return
1166  return 0; // statement should be unreachable...
1167  }
1168  }
1169 
1170  template<class PointScalar, int spaceDim, typename DeviceType>
1171  KOKKOS_INLINE_FUNCTION
1172  PointScalar CellGeometry<PointScalar,spaceDim,DeviceType>::subdivisionCoordinate(const int &gridCellOrdinal, const int &subdivisionOrdinal,
1173  const int &subdivisionNodeNumber, const int &d) const
1174  {
1175  int gridCellNode = gridCellNodeForSubdivisionNode(gridCellOrdinal, subdivisionOrdinal, subdivisionNodeNumber);
1176 
1177  if (subdivisionStrategy_ == FOUR_TRIANGLES)
1178  {
1179  // this is the one case in which the gridCellNode may not actually be a node in the grid cell
1180  if (gridCellNode == 4) // center vertex
1181  {
1182  // d == 0 means quad vertices 0 and 1 suffice;
1183  // d == 1 means quad vertices 0 and 3 suffice
1184  const int gridVertex0 = 0;
1185  const int gridVertex1 = (d == 0) ? 1 : 3;
1186  return 0.5 * (gridCellCoordinate(gridCellOrdinal, gridVertex0, d) + gridCellCoordinate(gridCellOrdinal, gridVertex1, d));
1187  }
1188  }
1189  return gridCellCoordinate(gridCellOrdinal, gridCellNode, d);
1190  }
1191 
1192  template<class PointScalar, int spaceDim, typename DeviceType>
1193  KOKKOS_INLINE_FUNCTION
1194  PointScalar
1195  CellGeometry<PointScalar,spaceDim,DeviceType>::operator()(const int& cell, const int& node, const int& dim) const {
1196  if ((cellGeometryType_ == UNIFORM_GRID) || (cellGeometryType_ == TENSOR_GRID))
1197  {
1198  const int numSubdivisions = numCellsPerGridCell(subdivisionStrategy_);
1199  if (numSubdivisions == 1)
1200  {
1201  // hypercube
1202  return gridCellCoordinate(cell, node, dim);
1203  }
1204  else
1205  {
1206  const int subdivisionOrdinal = cell % numSubdivisions;
1207  const int gridCellOrdinal = cell / numSubdivisions;
1208  return subdivisionCoordinate(gridCellOrdinal, subdivisionOrdinal, node, dim);
1209  }
1210  }
1211  else
1212  {
1213 #ifdef HAVE_INTREPID2_DEBUG
1214  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE((cell < 0), std::invalid_argument, "cell out of bounds");
1215  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(static_cast<unsigned>(cell) > numCells_, std::invalid_argument, "cell out of bounds");
1216  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE((node < 0), std::invalid_argument, "node out of bounds");
1217  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(static_cast<unsigned>(node) > numNodesPerCell_, std::invalid_argument, "node out of bounds");
1218  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE((dim < 0), std::invalid_argument, "dim out of bounds" );
1219  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(dim > spaceDim, std::invalid_argument, "dim out of bounds" );
1220 #endif
1221  if (cellToNodes_.is_allocated())
1222  {
1223  const int nodeNumber = cellToNodes_(cell,node);
1224  return nodes_(nodeNumber,dim);
1225  }
1226  else
1227  {
1228  return nodes_(cell,node,dim);
1229  }
1230  }
1231  }
1232 
1233  template<class PointScalar, int spaceDim, typename DeviceType>
1234  KOKKOS_INLINE_FUNCTION
1236  {
1237  if (cellGeometryType_ == UNIFORM_GRID)
1238  {
1239  return numCellsPerGridCell(subdivisionStrategy_);
1240  }
1241  else
1242  {
1243  return numCells_;
1244  }
1245  }
1246 
1247  template<class PointScalar, int spaceDim, typename DeviceType>
1249  {
1250  const int pointsPerCell = points.extent_int(0);
1251  return allocateJacobianDataPrivate(points,pointsPerCell,startCell,endCell);
1252  }
1253 
1254  template<class PointScalar, int spaceDim, typename DeviceType>
1255  Data<PointScalar,DeviceType> CellGeometry<PointScalar,spaceDim,DeviceType>::allocateJacobianData(const ScalarView<PointScalar,DeviceType> &points, const int startCell, const int endCell) const
1256  {
1257  // if points is rank 3, it has shape (C,P,D). If it's rank 2, (P,D).
1258  const int pointDimension = (points.rank() == 3) ? 1 : 0;
1259  const int pointsPerCell = points.extent_int(pointDimension);
1260  TensorPoints<PointScalar,DeviceType> tensorPoints(points);
1261  return allocateJacobianDataPrivate(tensorPoints,pointsPerCell,startCell,endCell);
1262  }
1263 
1264  template<class PointScalar, int spaceDim, typename DeviceType>
1265  Data<PointScalar,DeviceType> CellGeometry<PointScalar,spaceDim,DeviceType>::allocateJacobianData(const int &numPoints, const int startCell, const int endCell) const
1266  {
1267  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(!affine_, std::invalid_argument, "this version of allocateJacobianData() is only supported for affine CellGeometry");
1268 
1270  return allocateJacobianDataPrivate(emptyPoints,numPoints,startCell,endCell);
1271  }
1272 
1273  template<class PointScalar, int spaceDim, typename DeviceType>
1275  const Data<PointScalar,DeviceType> &refData, const int startCell, const int endCell) const
1276  {
1277  const int pointsPerCell = points.extent_int(0);
1278  setJacobianDataPrivate(jacobianData,points,pointsPerCell,refData,startCell,endCell);
1279  }
1280 
1281  template<class PointScalar, int spaceDim, typename DeviceType>
1282  void CellGeometry<PointScalar,spaceDim,DeviceType>::setJacobian(Data<PointScalar,DeviceType> &jacobianData, const ScalarView<PointScalar,DeviceType> &points,
1283  const Data<PointScalar,DeviceType> &refData, const int startCell, const int endCell) const
1284  {
1285  // if points is rank 3, it has shape (C,P,D). If it's rank 2, (P,D).
1286  const int pointDimension = (points.rank() == 3) ? 1 : 0;
1287  const int pointsPerCell = points.extent_int(pointDimension);
1288  TensorPoints<PointScalar,DeviceType> tensorPoints(points);
1289  setJacobianDataPrivate(jacobianData,tensorPoints,pointsPerCell,refData,startCell,endCell);
1290  }
1291 
1292  template<class PointScalar, int spaceDim, typename DeviceType>
1293  void CellGeometry<PointScalar,spaceDim,DeviceType>::setJacobian(Data<PointScalar,DeviceType> &jacobianData, const int &numPoints, const int startCell, const int endCell) const
1294  {
1295  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(!affine_, std::invalid_argument, "this version of setJacobian() is only supported for affine CellGeometry");
1296 
1298  Data<PointScalar,DeviceType> emptyRefData;
1299  setJacobianDataPrivate(jacobianData,emptyPoints,numPoints,emptyRefData,startCell,endCell);
1300  }
1301 } // namespace Intrepid2
1302 
1303 #endif /* Intrepid2_CellGeometryDef_h */
static void getOrientation(Kokkos::DynRankView< elemOrtValueType, elemOrtProperties... > elemOrts, const Kokkos::DynRankView< elemNodeValueType, elemNodeProperties... > elemNodes, const shards::CellTopology cellTopo)
Compute orientations of cells in a workset.
Implementation of the default H(grad)-compatible FEM basis of degree 1 on Quadrilateral cell...
geometry expressible in terms of vertices of the cell
arbitrary variation
KOKKOS_INLINE_FUNCTION int numCellsPerGridCell(SubdivisionStrategy subdivisionStrategy) const
Helper method that returns the number of cells into which each grid cell will be subdivided based on ...
KOKKOS_INLINE_FUNCTION int numCells() const
Returns the number of cells.
KOKKOS_INLINE_FUNCTION int hypercubeComponentNodeNumber(int hypercubeNodeNumber, int d) const
For hypercube vertex number hypercubeNodeNumber, returns the component node number in specified dimen...
const shards::CellTopology & cellTopology() const
The shards CellTopology for each cell within the CellGeometry object. Note that this is always a lowe...
KOKKOS_INLINE_FUNCTION int numCellsInDimension(const int &dim) const
For uniform grid and tensor grid CellGeometry, returns the number of cells in the specified component...
CellGeometry provides the nodes for a set of cells; has options that support efficient definition of ...
KOKKOS_INLINE_FUNCTION bool affine() const
Returns true if Jacobian is constant within each cell.
Functor for full (C,P) Jacobian determinant container. CUDA compiler issues led us to avoid lambdas f...
View-like interface to tensor points; point components are stored separately; the appropriate coordin...
An abstract base class that defines interface for concrete basis implementations for Finite Element (...
void setJacobianDataPrivate(Data< PointScalar, DeviceType > &jacobianData, const TensorPoints< PointScalar, DeviceType > &points, const int &pointsPerCell, const Data< PointScalar, DeviceType > &refData, const int startCell, const int endCell) const
Notionally-private method that provides a common interface for multiple public-facing setJacobianData...
CellGeometry(const Kokkos::Array< PointScalar, spaceDim > &origin, const Kokkos::Array< PointScalar, spaceDim > &domainExtents, const Kokkos::Array< int, spaceDim > &gridCellCounts, SubdivisionStrategy subdivisionStrategy=NO_SUBDIVISION, HypercubeNodeOrdering nodeOrdering=HYPERCUBE_NODE_ORDER_TENSOR)
Uniform grid constructor, with optional subdivision into simplices.
A stateless class for operations on cell data. Provides methods for:
KOKKOS_INLINE_FUNCTION enable_if_t< rank==1, const Kokkos::View< typename RankExpander< DataScalar, rank >::value_type, DeviceType > & > getUnderlyingView() const
Returns the underlying view. Throws an exception if the underlying view is not rank 1...
KOKKOS_INLINE_FUNCTION constexpr bool isValid() const
returns true for containers that have data; false for those that don&#39;t (namely, those that have been ...
one of two dimensions in a matrix; bottom-right part of matrix is diagonal
KOKKOS_INLINE_FUNCTION unsigned rank() const
Returns the logical rank of this container. This is always 3.
KOKKOS_INLINE_FUNCTION std::enable_if< std::is_integral< iType >::value, int >::type extent_int(const iType &r) const
Returns the logical extent of the container in the specified dimension as an int; the shape of CellGe...
KOKKOS_INLINE_FUNCTION std::enable_if< std::is_integral< iType >::value, int >::type extent_int(const iType &r) const
Returns the logical extent in the requested dimension.
Wrapper around a Kokkos::View that allows data that is constant or repeating in various logical dimen...
varies according to modulus of the index
KOKKOS_INLINE_FUNCTION PointScalar operator()(const int &cell, const int &node, const int &dim) const
Return the coordinate (weight) of the specified node. For straight-edged geometry, this is simply the physical coordinate of the vertex. For all geometries, this can be understood as a weight on the corresponding H^1 basis function used in the reference-to-physical map.
KOKKOS_INLINE_FUNCTION const Kokkos::View< DataScalar *, DeviceType > & getUnderlyingView1() const
returns the View that stores the unique data. For rank-1 underlying containers.
void setJacobian(Data< PointScalar, DeviceType > &jacobianData, const TensorPoints< PointScalar, DeviceType > &points, const Data< PointScalar, DeviceType > &refData, const int startCell=0, const int endCell=-1) const
Compute Jacobian values for the reference-to-physical transformation, and place them in the provided ...
KOKKOS_INLINE_FUNCTION unsigned rank() const
Returns the logical rank of the Data container.
KOKKOS_INLINE_FUNCTION DataVariationType cellVariationType() const
KOKKOS_INLINE_FUNCTION HypercubeNodeOrdering nodeOrderingForHypercubes() const
Returns the node ordering used for hypercubes.
KOKKOS_INLINE_FUNCTION ordinal_type rank() const
Returns the rank of the container.
Orientation encoding and decoding.
Data< PointScalar, DeviceType > getJacobianRefData(const TensorPoints< PointScalar, DeviceType > &points) const
Computes reference-space data for the specified points, to be used in setJacobian().
KOKKOS_INLINE_FUNCTION int extent_int(const int &r) const
Returns the logical extent in the specified dimension.
Store host-only "members" of CellGeometry using a static map indexed on the CellGeometry pointer...
geometry expressible in terms of a higher-order basis (must be specified)
KOKKOS_INLINE_FUNCTION int uniformJacobianModulus() const
Returns an integer indicating the number of distinct cell types vis-a-vis Jacobians.
Data< PointScalar, DeviceType > allocateJacobianData(const TensorPoints< PointScalar, DeviceType > &points, const int startCell=0, const int endCell=-1) const
Allocate a container into which Jacobians of the reference-to-physical mapping can be placed...
void initializeOrientations()
Initialize the internal orientations_ member with the orientations of each member cell...
KOKKOS_INLINE_FUNCTION PointScalar gridCellCoordinate(const int &gridCellOrdinal, const int &localNodeNumber, const int &dim) const
returns coordinate in dimension dim of the indicated node in the indicated grid cell ...
Data< Orientation, DeviceType > getOrientations()
Returns the orientations for all cells. Calls initializeOrientations() if it has not previously been ...
KOKKOS_INLINE_FUNCTION std::enable_if< std::is_integral< iType >::value, ordinal_type >::type extent_int(const iType &d) const
Returns the logical extent in the requested dimension.
KOKKOS_INLINE_FUNCTION const Data< Scalar, DeviceType > & getTensorComponent(const ordinal_type &r) const
Returns the requested tensor component.
KOKKOS_INLINE_FUNCTION int gridCellNodeForSubdivisionNode(const int &gridCellOrdinal, const int &subdivisionOrdinal, const int &subdivisionNodeNumber) const
returns coordinate in dimension d for the indicated subdivision of the indicated grid cell ...
A family of nodal basis functions which is related to, but not identical with, the Lagrangian basis f...
void computeCellMeasure(TensorData< PointScalar, DeviceType > &cellMeasure, const Data< PointScalar, DeviceType > &jacobianDet, const TensorData< PointScalar, DeviceType > &cubatureWeights) const
Compute cell measures that correspond to provided Jacobian determinants and.
Kokkos::DynRankView< typename ViewType::value_type, typename DeduceLayout< ViewType >::result_layout, typename ViewType::device_type > getMatchingViewWithLabel(const ViewType &view, const std::string &label, DimArgs... dims)
Creates and returns a view that matches the provided view in Kokkos Layout.
KOKKOS_INLINE_FUNCTION size_t extent(const int &r) const
Returns the logical extent of the container in the specified dimension; the shape of CellGeometry is ...
KOKKOS_INLINE_FUNCTION ScalarView< PointScalar, DeviceType > getTensorComponent(const ordinal_type &r) const
Returns the requested tensor component.
KOKKOS_INLINE_FUNCTION PointScalar subdivisionCoordinate(const int &gridCellOrdinal, const int &subdivisionOrdinal, const int &subdivisionNodeNumber, const int &d) const
returns coordinate in dimension d for the indicated subdivision of the indicated grid cell ...
KOKKOS_INLINE_FUNCTION ordinal_type numTensorComponents() const
Return the number of tensorial components.
BasisPtr basisForNodes() const
H^1 Basis used in the reference-to-physical transformation. Linear for straight-edged geometry; highe...
TensorData< PointScalar, DeviceType > allocateCellMeasure(const Data< PointScalar, DeviceType > &jacobianDet, const TensorData< PointScalar, DeviceType > &cubatureWeights) const
Allocate a TensorData object appropriate for passing to computeCellMeasure().
KOKKOS_INLINE_FUNCTION int getDataExtent(const ordinal_type &d) const
returns the true extent of the data corresponding to the logical dimension provided; if the data does...
KOKKOS_INLINE_FUNCTION const Kokkos::View< DataScalar **, DeviceType > & getUnderlyingView2() const
returns the View that stores the unique data. For rank-2 underlying containers.
KOKKOS_INLINE_FUNCTION ~CellGeometry()
Destructor.
Data< PointScalar, DeviceType > allocateJacobianDataPrivate(const TensorPoints< PointScalar, DeviceType > &points, const int &pointsPerCell, const int startCell, const int endCell) const
Notionally-private method that provides a common interface for multiple public-facing allocateJacobia...
Implementation of the default H(grad)-compatible FEM basis of degree 1 on Hexahedron cell...
DataVariationType
Enumeration to indicate how data varies in a particular dimension of an Intrepid2::Data object...
KOKKOS_INLINE_FUNCTION const Kokkos::View< DataScalar ***, DeviceType > & getUnderlyingView3() const
returns the View that stores the unique data. For rank-3 underlying containers.
static void setJacobian(Kokkos::DynRankView< jacobianValueType, jacobianProperties... > jacobian, const Kokkos::DynRankView< pointValueType, pointProperties... > points, const WorksetType worksetCell, const Teuchos::RCP< HGradBasisType > basis, const int startCell=0, const int endCell=-1)
Computes the Jacobian matrix DF of the reference-to-physical frame map F.
KOKKOS_INLINE_FUNCTION Orientation getOrientation(int &cellNumber) const
Returns the orientation for the specified cell. Requires that initializeOrientations() has been calle...
KOKKOS_INLINE_FUNCTION int numNodesPerCell() const
Returns the number of nodes per cell; may be more than the number of vertices in the corresponding Ce...
KOKKOS_INLINE_FUNCTION const Kokkos::Array< DataVariationType, 7 > & getVariationTypes() const
Returns an array with the variation types in each logical dimension.