dune-fem 2.8.0
Loading...
Searching...
No Matches
petscoperator.hh
Go to the documentation of this file.
1#ifndef DUNE_FEM_PETSCLINEAROPERATOR_HH
2#define DUNE_FEM_PETSCLINEAROPERATOR_HH
3
4#include <cassert>
5#include <cstddef>
6
7#include <iostream>
8#include <memory>
9#include <string>
10#include <type_traits>
11#include <vector>
12
13#include <dune/common/dynmatrix.hh>
14
31
33
34#if HAVE_PETSC
35
36#include "petscmat.h"
37
38
39namespace Dune
40{
41 namespace Fem
42 {
43
44 struct PetscSolverParameter : public LocalParameter< SolverParameter, PetscSolverParameter >
45 {
46 typedef LocalParameter< SolverParameter, PetscSolverParameter > BaseType;
47
48 public:
49 using BaseType :: parameter ;
50 using BaseType :: keyPrefix ;
51
52 PetscSolverParameter( const ParameterReader &parameter = Parameter::container() )
53 : BaseType( parameter )
54 {}
55
56 PetscSolverParameter( const SolverParameter& sp )
57 : PetscSolverParameter( sp.keyPrefix(), sp.parameter() )
58 {}
59
60 PetscSolverParameter( const std::string &keyPrefix, const ParameterReader &parameter = Parameter::container() )
61 : BaseType( keyPrefix, parameter )
62 {}
63
64 bool isPetscSolverParameter() const { return true; }
65
66 static const int boomeramg = 0;
67 static const int parasails = 1;
68 static const int pilut = 2;
69
70 int hypreMethod() const
71 {
72 const std::string hyprePCNames[] = { "boomer-amg", "parasails", "pilu-t" };
73 int hypreType = 0;
74 if (parameter().exists("petsc.preconditioning.method"))
75 {
76 hypreType = parameter().getEnum( "petsc.preconditioning.hypre.method", hyprePCNames, 0 );
77 std::cout << "WARNING: using deprecated parameter 'petsc.preconditioning.hypre.method' use "
78 << keyPrefix() << "preconditioning.hypre.method instead\n";
79 }
80 else
81 hypreType = parameter().getEnum( keyPrefix()+"hypre.method", hyprePCNames, 0 );
82 return hypreType;
83 }
84
85 int superluMethod() const
86 {
87 const std::string factorizationNames[] = { "petsc", "superlu", "mumps" };
88 int factorType = 0;
89 if (parameter().exists("petsc.preconditioning.lu.method"))
90 {
91 factorType = parameter().getEnum( "petsc.preconditioning.lu.method", factorizationNames, 0 );
92 std::cout << "WARNING: using deprecated parameter 'petsc.preconditioning.lu.method' use "
93 << keyPrefix() << "preconditioning.lu.method instead\n";
94 }
95 else
96 factorType = parameter().getEnum( keyPrefix()+"preconditioning.lu.method", factorizationNames, 0 );
97 return factorType;
98 }
99
100 bool viennaCL () const {
101 return parameter().getValue< bool > ( keyPrefix() + "petsc.viennacl", false );
102 }
103 bool blockedMode () const {
104 return parameter().getValue< bool > ( keyPrefix() + "petsc.blockedmode", true );
105 }
106 };
107
108
109
110 /* ========================================
111 * class PetscLinearOperator
112 */
113 template< typename DomainFunction, typename RangeFunction >
114 class PetscLinearOperator
115 : public Fem::AssembledOperator< DomainFunction, RangeFunction >
116 {
117 typedef PetscLinearOperator< DomainFunction, RangeFunction > ThisType;
118 public:
119 typedef Mat MatrixType;
120 typedef DomainFunction DomainFunctionType;
121 typedef RangeFunction RangeFunctionType;
122 typedef typename DomainFunctionType::RangeFieldType DomainFieldType;
123 typedef typename RangeFunctionType::RangeFieldType RangeFieldType;
124 typedef typename DomainFunctionType::DiscreteFunctionSpaceType DomainSpaceType;
125 typedef typename RangeFunctionType::DiscreteFunctionSpaceType RangeSpaceType;
126
127 typedef PetscDiscreteFunction< DomainSpaceType > PetscDomainFunctionType;
128 typedef PetscDiscreteFunction< RangeSpaceType > PetscRangeFunctionType;
129
130 typedef typename DomainSpaceType::GridPartType::template Codim< 0 >::EntityType DomainEntityType;
131 typedef typename RangeSpaceType::GridPartType::template Codim< 0 >::EntityType RangeEntityType;
132
133 static const unsigned int domainLocalBlockSize = DomainSpaceType::localBlockSize;
134 static const unsigned int rangeLocalBlockSize = RangeSpaceType::localBlockSize;
135
136 static constexpr bool squareBlocked = domainLocalBlockSize == rangeLocalBlockSize ;
137 static constexpr bool blockedMatrix = domainLocalBlockSize > 1 && squareBlocked;
138
139 // is this right? It should be rangeBS x domainBS - the system is
140 // Ad=r so domain gives columns and r gives range
141 typedef FlatFieldMatrix< RangeFieldType, domainLocalBlockSize, rangeLocalBlockSize > MatrixBlockType;
142 typedef MatrixBlockType block_type;
143
144 private:
145 enum Status {statAssembled=0,statAdd=1,statInsert=2,statGet=3,statNothing=4};
146
147 typedef PetscMappers< DomainSpaceType > DomainMappersType;
148 typedef PetscMappers< RangeSpaceType > RangeMappersType;
149
150 typedef AuxiliaryDofs< typename DomainSpaceType::GridPartType, typename DomainMappersType::GhostMapperType > DomainAuxiliaryDofsType;
151 typedef AuxiliaryDofs< typename RangeSpaceType::GridPartType, typename RangeMappersType::GhostMapperType > RangeAuxiliaryDofsType;
152
153 public:
154 // the local matrix
155 class LocalMatrix;
156
157 struct LocalMatrixTraits
158 {
159 typedef typename DomainFunctionType::DiscreteFunctionSpaceType DomainSpaceType;
160 typedef typename RangeFunctionType::DiscreteFunctionSpaceType RangeSpaceType;
161 typedef LocalMatrix LocalMatrixType;
162 typedef typename RangeSpaceType::RangeFieldType RangeFieldType;
163
164 // copied this typedef from spmatrix.hh
165 typedef RangeFieldType LittleBlockType;
166 };
167
169 typedef LocalMatrix ObjectType;
170 typedef ThisType LocalMatrixFactoryType;
171 typedef ObjectStack< LocalMatrixFactoryType > LocalMatrixStackType;
172
174 typedef LocalMatrixWrapper< LocalMatrixStackType > LocalMatrixType;
175 typedef ColumnObject< ThisType > LocalColumnObjectType;
176
177 PetscLinearOperator ( const DomainSpaceType &domainSpace, const RangeSpaceType &rangeSpace,
178 const PetscSolverParameter& param = PetscSolverParameter() )
179 : domainMappers_( domainSpace ),
180 rangeMappers_( rangeSpace ),
181 localMatrixStack_( *this ),
182 status_(statNothing),
183 viennaCL_( param.viennaCL() ),
184 blockedMode_( blockedMatrix && (!viennaCL_) && param.blockedMode() )
185 {}
186
187 PetscLinearOperator ( const std::string &, const DomainSpaceType &domainSpace, const RangeSpaceType &rangeSpace,
188 const PetscSolverParameter& param = PetscSolverParameter() )
189 : PetscLinearOperator( domainSpace, rangeSpace, param )
190 {}
191
193 ~PetscLinearOperator ()
194 {
195 destroy();
196 }
197
198 void flushAssembly()
199 {
200 ::Dune::Petsc::MatAssemblyBegin( petscMatrix_, MAT_FLUSH_ASSEMBLY );
201 ::Dune::Petsc::MatAssemblyEnd ( petscMatrix_, MAT_FLUSH_ASSEMBLY );
202 // set variable directly since setStatus might be disabled
203 status_ = statAssembled;
204 }
205
206 void finalize ()
207 {
208 if( ! finalizedAlready() )
209 {
210 ::Dune::Petsc::MatAssemblyBegin( petscMatrix_, MAT_FINAL_ASSEMBLY );
211 ::Dune::Petsc::MatAssemblyEnd ( petscMatrix_, MAT_FINAL_ASSEMBLY );
212
213 if( ! unitRows_.empty() )
214 {
215 ::Dune::Petsc::MatZeroRows( petscMatrix_, unitRows_.size(), unitRows_.data(), 1. );
216 // remove all cached unit rows
217 unitRows_.clear();
218 }
219 }
220 }
221
222 protected:
223 bool finalizedAlready() const
224 {
225 PetscBool assembled = PETSC_FALSE ;
226 ::Dune::Petsc::MatAssembled( petscMatrix_, &assembled );
227 return assembled == PETSC_TRUE;
228 }
229
230 void finalizeAssembly () const
231 {
232 const_cast< ThisType& > (*this).finalize();
233 }
234
235 public:
236 const DomainSpaceType &domainSpace () const { return domainMappers_.space(); }
237 const RangeSpaceType &rangeSpace () const { return rangeMappers_.space(); }
238
242 template <class DF, class RF>
243 void apply ( const DF &arg, RF &dest ) const
244 {
245 if( ! petscArg_ )
246 petscArg_.reset( new PetscDomainFunctionType( "PetscOp-arg", domainSpace() ) );
247 if( ! petscDest_ )
248 petscDest_.reset( new PetscRangeFunctionType( "PetscOp-arg", rangeSpace() ) );
249
250 petscArg_->assign( arg );
251 apply( *petscArg_, *petscDest_ );
252 dest.assign( *petscDest_ );
253 }
254
256 void apply ( const PetscDomainFunctionType &arg, PetscRangeFunctionType &dest ) const
257 {
258 // make sure matrix is in correct state
259 finalizeAssembly();
260 ::Dune::Petsc::MatMult( petscMatrix_, *arg.petscVec() , *dest.petscVec() );
261 }
262
263 void operator() ( const DomainFunctionType &arg, RangeFunctionType &dest ) const
264 {
265 apply( arg, dest );
266 }
267
268 void reserve ()
269 {
270 reserve( SimpleStencil<DomainSpaceType,RangeSpaceType>(0), true );
271 }
272
273 template <class Set>
274 void reserve (const std::vector< Set >& sparsityPattern )
275 {
276 reserve( StencilWrapper< DomainSpaceType,RangeSpaceType, Set >( sparsityPattern ), true );
277 }
278
280 template <class StencilType>
281 void reserve (const StencilType &stencil, const bool isSimpleStencil = false )
282 {
283 domainMappers_.update();
284 rangeMappers_.update();
285
286 if(sequence_ != domainSpace().sequence())
287 {
288 // clear Petsc Mat
289 destroy();
290
291 // reset temporary Petsc discrete functions
292 petscArg_.reset();
293 petscDest_.reset();
294
295 unitRows_.clear();
296
297 // create matrix
298 ::Dune::Petsc::MatCreate( domainSpace().gridPart().comm(), &petscMatrix_ );
299
300 // set sizes of the matrix (columns == domain and rows == range)
301 const PetscInt localCols = domainMappers_.ghostMapper().interiorSize() * domainLocalBlockSize;
302 const PetscInt localRows = rangeMappers_.ghostMapper().interiorSize() * rangeLocalBlockSize;
303
304 const PetscInt globalCols = domainMappers_.parallelMapper().size() * domainLocalBlockSize;
305 const PetscInt globalRows = rangeMappers_.parallelMapper().size() * rangeLocalBlockSize;
306
307 ::Dune::Petsc::MatSetSizes( petscMatrix_, localRows, localCols, globalRows, globalCols );
308
309 PetscInt bs = 1;
310 if( viennaCL_ )
311 {
312 ::Dune::Petsc::MatSetType( petscMatrix_, MATAIJVIENNACL );
313 }
314 else if( blockedMode_ )
315 {
316 bs = domainLocalBlockSize ;
317 ::Dune::Petsc::MatSetType( petscMatrix_, MATBAIJ );
318 // set block size
319 ::Dune::Petsc::MatSetBlockSize( petscMatrix_, bs );
320 }
321 else
322 {
323 ::Dune::Petsc::MatSetType( petscMatrix_, MATAIJ );
324 }
325 /*
326 std::cout << "Matrix dimension with bs=" << bs << " "
327 << localRows << "x" << localCols << " "
328 << globalRows << "x" << globalCols << " "
329 << rangeLocalBlockSize/bs << "x" << domainLocalBlockSize/bs << " "
330 << std::endl;
331 */
332
333 // there is an issue with the stencil and non zero pattern in the
334 // case of domainSpace != rangeSpace. In this case additional
335 // mallocs are required during matrix assembly which heavily
336 // impacts the preformance. As a quick fix we use a global value
337 // for the number of non zeros per row. That leads to a
338 // significant increase in memory usage but not to much
339 // computational overhead in assembly. The issue with the stencil
340 // is a bug and needs fixing....
341 if( isSimpleStencil || std::is_same< StencilType,SimpleStencil<DomainSpaceType,RangeSpaceType> >::value )
342 {
343 ::Dune::Petsc::MatSetUp( petscMatrix_, bs, domainLocalBlockSize * stencil.maxNonZerosEstimate() );
344 }
345 else
346 {
347 DomainAuxiliaryDofsType domainAuxiliaryDofs( domainMappers_.ghostMapper() );
348 RangeAuxiliaryDofsType rangeAuxiliaryDofs( rangeMappers_.ghostMapper() );
349
350 std::vector< PetscInt > d_nnz( localRows / bs, 0 );
351 std::vector< PetscInt > o_nnz( localRows / bs, 0 );
352 for( const auto& entry : stencil.globalStencil() )
353 {
354 const int petscIndex = rangeMappers_.ghostIndex( entry.first );
355 if( rangeAuxiliaryDofs.contains( petscIndex ) )
356 continue;
357
358 for (unsigned int rb = 0; rb<rangeLocalBlockSize/bs; ++rb)
359 {
360 const size_t row = petscIndex*rangeLocalBlockSize/bs + rb;
361 // Remark: ghost entities should not be inserted into the stencil for dg to
362 // get optimal results but they are needed for istl....
363 assert( row < size_t(localRows/bs) );
364 d_nnz[ row ] = o_nnz[ row ] = 0;
365 for( const auto local : entry.second )
366 {
367 if( !domainAuxiliaryDofs.contains( domainMappers_.ghostIndex( local ) ) )
368 d_nnz[ row ] += domainLocalBlockSize/bs;
369 else
370 o_nnz[ row ] += domainLocalBlockSize/bs;
371 }
372 }
373 }
374 ::Dune::Petsc::MatSetUp( petscMatrix_, bs, &d_nnz[0], &o_nnz[0] );
375 }
376 sequence_ = domainSpace().sequence();
377 }
378
379 flushAssembly();
380 status_ = statAssembled ;
381 }
382
383 void clear ()
384 {
385 flushAssembly();
386 ::Dune::Petsc::MatZeroEntries( petscMatrix_ );
387 flushAssembly();
388 }
389
390 template <class Vector>
391 void setUnitRows( const Vector &rows )
392 {
393 std::vector< PetscInt > r( rows.size() );
394 for( std::size_t i =0 ; i< rows.size(); ++i )
395 {
396 const PetscInt block = rangeMappers_.parallelIndex( rows[ i ] / rangeLocalBlockSize );
397 r[ i ] = block * rangeLocalBlockSize + (rows[ i ] % rangeLocalBlockSize);
398 }
399
400 if( finalizedAlready() )
401 {
402 ::Dune::Petsc::MatZeroRows( petscMatrix_, r.size(), r.data(), 1. );
403 }
404 else
405 {
406 unitRows_.reserve( unitRows_.size() + r.size() );
407 for( const auto& row : r )
408 unitRows_.push_back( row );
409 }
410 }
411
413 ObjectType* newObject() const
414 {
415 return new ObjectType( *this, domainSpace(), rangeSpace() );
416 }
417
422 [[deprecated("Use TemporaryLocalMatrix,LocalContribution and {get,add,set}LocalMatrix")]]
423 LocalMatrixType localMatrix ( const DomainEntityType &domainEntity, const RangeEntityType &rangeEntity ) const
424 {
425 return LocalMatrixType(localMatrixStack_, domainEntity, rangeEntity);
426 }
427
428 LocalColumnObjectType localColumn( const DomainEntityType &colEntity ) const
429 {
430 return LocalColumnObjectType ( *this, colEntity );
431 }
432
433 public:
434 void unitRow( const PetscInt localRow, const PetscScalar diag = 1.0 )
435 {
436 std::array< PetscInt, domainLocalBlockSize > rows;
437 const PetscInt row = rangeMappers_.parallelIndex( localRow );
438 for( unsigned int i=0, r = row * domainLocalBlockSize; i<domainLocalBlockSize; ++i, ++r )
439 {
440 rows[ i ] = r;
441 }
442
443 if( finalizedAlready() )
444 {
445 // set given row to a zero row with diagonal entry equal to diag
446 ::Dune::Petsc::MatZeroRows( petscMatrix_, domainLocalBlockSize, rows.data(), diag );
447 }
448 else
449 {
450 // this only works for diag = 1
451 assert( std::abs( diag - 1. ) < 1e-12 );
452 unitRows_.reserve( unitRows_.size() + domainLocalBlockSize );
453 for( const auto& r : rows )
454 {
455 unitRows_.push_back( r );
456 }
457 }
458 }
459
460 bool blockedMode() const { return blockedMode_; }
461
462 protected:
463 template< class PetscOp >
464 void applyToBlock ( const PetscInt localRow, const PetscInt localCol, const MatrixBlockType& block, PetscOp op )
465 {
466#ifndef NDEBUG
467 const PetscInt localCols = domainMappers_.ghostMapper().interiorSize() * domainLocalBlockSize;
468 const PetscInt localRows = rangeMappers_.ghostMapper().interiorSize() * rangeLocalBlockSize;
469 assert( localRow < localRows );
470 assert( localCol < localCols );
471#endif
472
473 if( blockedMode_ )
474 {
475 // convert process local indices to global indices
476 const PetscInt row = rangeMappers_.parallelIndex( localRow );
477 const PetscInt col = rangeMappers_.parallelIndex( localCol );
478 ::Dune::Petsc::MatSetValuesBlocked( petscMatrix_, 1, &row, 1, &col, block.data(), op );
479 }
480 else
481 {
482 // convert process local indices to global indices
483 const PetscInt row = rangeMappers_.parallelIndex( localRow );
484 const PetscInt col = rangeMappers_.parallelIndex( localCol );
485 std::array< PetscInt, domainLocalBlockSize > rows;
486 std::array< PetscInt, domainLocalBlockSize > cols;
487 for( unsigned int i=0, r = row * domainLocalBlockSize, c = col * domainLocalBlockSize; i<domainLocalBlockSize; ++i, ++r, ++c )
488 {
489 rows[ i ] = r;
490 cols[ i ] = c;
491 }
492
493 // set given row to a zero row with diagonal entry equal to diag
494 ::Dune::Petsc::MatSetValues( petscMatrix_, domainLocalBlockSize, rows.data(), domainLocalBlockSize, cols.data(), block.data(), op );
495 }
496 setStatus( statAssembled );
497 }
498
499 template< class LocalBlock, class PetscOp >
500 void applyToBlock ( const size_t row, const size_t col, const LocalBlock& block, PetscOp op )
501 {
502 assert( block.rows() == rangeLocalBlockSize );
503 assert( block.cols() == domainLocalBlockSize );
504
505 // copy to MatrixBlockType data structure suited to be inserted into Mat
506 MatrixBlockType matBlock( block );
507 applyToBlock( row, col, matBlock, op );
508 }
509
510 template< class LocalBlock >
511 void setBlock ( const size_t row, const size_t col, const LocalBlock& block )
512 {
513#ifndef USE_SMP_PARALLEL
514 assert( status_==statAssembled || status_==statInsert );
515#endif
516 assert( row < std::numeric_limits< int > :: max() );
517 assert( col < std::numeric_limits< int > :: max() );
518
519 setStatus( statInsert );
520 applyToBlock( static_cast< PetscInt > (row), static_cast< PetscInt > (col), block, INSERT_VALUES );
521 }
522
523 template< class LocalBlock >
524 void addBlock ( const size_t row, const size_t col, const LocalBlock& block )
525 {
526#ifndef USE_SMP_PARALLEL
527 assert( status_==statAssembled || status_==statInsert );
528#endif
529 assert( row < std::numeric_limits< int > :: max() );
530 assert( col < std::numeric_limits< int > :: max() );
531
532 setStatus( statAdd );
533 applyToBlock( static_cast< PetscInt > (row), static_cast< PetscInt > (col), block, ADD_VALUES );
534 }
535
536 protected:
537 typedef TemporaryLocalMatrix< DomainSpaceType, RangeSpaceType > TemporaryLocalMatrixType;
538
539 // specialization for temporary local matrix, then copy of values is not needed
540 template <class Operation>
541 const PetscScalar* getValues( const unsigned int rSize, const unsigned int cSize,
542 const TemporaryLocalMatrixType &localMat, const Operation&,
543 const std::integral_constant< bool, false> nonscaled )
544 {
545 return localMat.data();
546 }
547
548 // specialization for temporary local matrix, then copy of values is not needed
549 template <class LM, class Operation>
550 const PetscScalar* getValues( const unsigned int rSize, const unsigned int cSize,
551 const Assembly::Impl::LocalMatrixGetter< LM >& localMat, const Operation&,
552 const std::integral_constant< bool, false> nonscaled )
553 {
554 return localMat.localMatrix().data();
555 }
556
557 // retrieve values for arbitrary local matrix
558 template <class LocalMatrix, class Operation, bool T>
559 const PetscScalar* getValues( const unsigned int rSize, const unsigned int cSize,
560 const LocalMatrix &localMat, const Operation& operation,
561 const std::integral_constant< bool, T> scaled )
562 {
563 std::vector< PetscScalar >& v = *(v_);
564 v.resize( rSize * cSize );
565 for( unsigned int i = 0, ic = 0 ; i< rSize; ++i )
566 {
567 for( unsigned int j =0; j< cSize; ++j, ++ic )
568 {
569 v[ ic ] = operation( localMat.get( i, j ) );
570 }
571 }
572 return v.data();
573 }
574
575 template< class LocalMatrix, class Operation, class PetscOp, bool T >
576 void applyLocalMatrix ( const DomainEntityType &domainEntity, const RangeEntityType &rangeEntity,
577 const LocalMatrix &localMat, const Operation& operation,
578 PetscOp petscOp,
579 const std::integral_constant<bool, T> scaled )
580 {
581 auto& rcTemp = *(rcTemp_);
582 std::vector< PetscInt >& r = rcTemp.first;
583 std::vector< PetscInt >& c = rcTemp.second;
584
585 if( blockedMode_ )
586 {
587 setupIndicesBlocked( rangeMappers_, rangeEntity, r );
588 setupIndicesBlocked( domainMappers_, domainEntity, c );
589
590 // domainLocalBlockSize == rangeLocalBlockSize
591 const unsigned int rSize = r.size() * domainLocalBlockSize ;
592 const unsigned int cSize = c.size() * domainLocalBlockSize ;
593
594 const PetscScalar* values = getValues( rSize, cSize, localMat, operation, scaled );
595 ::Dune::Petsc::MatSetValuesBlocked( petscMatrix_, r.size(), r.data(), c.size(), c.data(), values, petscOp );
596 }
597 else
598 {
599 setupIndices( rangeMappers_, rangeEntity, r );
600 setupIndices( domainMappers_, domainEntity, c );
601
602 const unsigned int rSize = r.size();
603 const unsigned int cSize = c.size();
604
605 const PetscScalar* values = getValues( rSize, cSize, localMat, operation, scaled );
606 ::Dune::Petsc::MatSetValues( petscMatrix_, rSize, r.data(), cSize, c.data(), values, petscOp );
607 }
608 setStatus( statAssembled );
609 }
610
611 public:
612 template< class LocalMatrix >
613 void addLocalMatrix ( const DomainEntityType &domainEntity, const RangeEntityType &rangeEntity, const LocalMatrix &localMat )
614 {
615#ifndef USE_SMP_PARALLEL
616 assert( status_==statAssembled || status_==statAdd );
617#endif
618 setStatus( statAdd );
619
620 auto operation = [] ( const PetscScalar& value ) -> PetscScalar { return value; };
621
622 applyLocalMatrix( domainEntity, rangeEntity, localMat, operation, ADD_VALUES, std::integral_constant< bool, false>() );
623 }
624
625 template< class LocalMatrix, class Scalar >
626 void addScaledLocalMatrix ( const DomainEntityType &domainEntity, const RangeEntityType &rangeEntity, const LocalMatrix &localMat, const Scalar &s )
627 {
628#ifndef USE_SMP_PARALLEL
629 assert( status_==statAssembled || status_==statAdd );
630#endif
631 setStatus( statAdd );
632
633 auto operation = [ &s ] ( const PetscScalar& value ) -> PetscScalar { return s * value; };
634
635 applyLocalMatrix( domainEntity, rangeEntity, localMat, operation, ADD_VALUES, std::integral_constant< bool, true>() );
636 }
637
638 template< class LocalMatrix >
639 void setLocalMatrix ( const DomainEntityType &domainEntity, const RangeEntityType &rangeEntity, const LocalMatrix &localMat )
640 {
641#ifndef USE_SMP_PARALLEL
642 assert( status_==statAssembled || status_==statInsert );
643#endif
644 setStatus( statInsert );
645
646 auto operation = [] ( const PetscScalar& value ) -> PetscScalar { return value; };
647
648 applyLocalMatrix( domainEntity, rangeEntity, localMat, operation, INSERT_VALUES, std::integral_constant< bool, false>() );
649 }
650
651 template< class LocalMatrix >
652 void getLocalMatrix ( const DomainEntityType &domainEntity, const RangeEntityType &rangeEntity, LocalMatrix &localMat ) const
653 {
654 // make sure matrix is in correct state before using
655 finalizeAssembly();
656
657#ifndef USE_SMP_PARALLEL
658 assert( status_==statAssembled || status_==statGet );
659#endif
660 setStatus( statGet );
661
662 auto& rcTemp = *(rcTemp_);
663 std::vector< PetscInt >& r = rcTemp.first;
664 std::vector< PetscInt >& c = rcTemp.second;
665 std::vector< PetscScalar >& v = *(v_);
666
667 setupIndices( rangeMappers_, rangeEntity, r );
668 setupIndices( domainMappers_, domainEntity, c );
669
670 v.resize( r.size() * c.size() );
671 ::Dune::Petsc::MatGetValues( petscMatrix_, r.size(), r.data(), c.size(), c.data(), v.data() );
672
673 for( std::size_t i =0 ; i< r.size(); ++i )
674 for( std::size_t j =0; j< c.size(); ++j )
675 localMat.set( i, j, v[ i * c.size() +j ] );
676
677 setStatus( statAssembled );
678 }
679
680 void scaleLocalMatrix ( const DomainEntityType &domainEntity, const RangeEntityType &rangeEntity, const RangeFieldType &s )
681 {
682 // make sure matrix is in correct state before using
683 finalizeAssembly();
684
685#ifndef USE_SMP_PARALLEL
686 assert( status_==statAssembled || status_==statGet );
687#endif
688 setStatus( statGet );
689
690 auto& rcTemp = *(rcTemp_);
691 std::vector< PetscInt >& r = rcTemp.first;
692 std::vector< PetscInt >& c = rcTemp.second;
693 std::vector< PetscScalar >& v = *(v_);
694
695 setupIndices( rangeMappers_, rangeEntity, r );
696 setupIndices( domainMappers_, domainEntity, c );
697
698 const unsigned int rSize = r.size();
699 const unsigned int cSize = c.size();
700 const std::size_t size = rSize * cSize;
701
702 v.resize( size );
703 // get values
704 ::Dune::Petsc::MatGetValues( petscMatrix_, r.size(), r.data(), c.size(), c.data(), v.data() );
705
706 // scale values
707 for( std::size_t i=0; i<size; ++i )
708 v[ i ] *= s;
709
710 // set values again
711 ::Dune::Petsc::MatSetValues( petscMatrix_, rSize, r.data(), cSize, c.data(), v.data(), INSERT_VALUES );
712 setStatus( statAssembled );
713 }
714
715 // just here for debugging
716 void view () const
717 {
718 ::Dune::Petsc::MatView( petscMatrix_, PETSC_VIEWER_STDOUT_WORLD );
719 }
720
721 // print matrix just here for debugging
722 void print( std::ostream& s ) const
723 {
724 if( &s == &std::cout || &s == &std::cerr )
725 {
726 view();
727 }
728 }
729
730 // return reference to PETSc matrix object
731 Mat& exportMatrix () const
732 {
733 // make sure matrix is in correct state
734 finalizeAssembly();
735 return petscMatrix_;
736 }
737
738 private:
739 PetscLinearOperator ();
740
742 void destroy ()
743 {
744 if( status_ != statNothing )
745 {
746 ::Dune::Petsc::MatDestroy( &petscMatrix_ );
747 status_ = statNothing ;
748 }
749 sequence_ = -1;
750 }
751
752 void setStatus (const Status &newstatus) const
753 {
754 // in case OpenMP is used simply avoid status check
755#ifndef USE_SMP_PARALLEL
756 status_ = newstatus;
757#endif
758 }
759
760 template< class DFS, class Entity >
761 static void setupIndices ( const PetscMappers< DFS > &mappers, const Entity &entity, std::vector< PetscInt > &indices )
762 {
763 NonBlockMapper< const typename PetscMappers< DFS >::ParallelMapperType, DFS::localBlockSize > nonBlockMapper( mappers.parallelMapper() );
764 nonBlockMapper.map( entity, indices );
765 }
766
767 template< class DFS, class Entity >
768 static void setupIndicesBlocked ( const PetscMappers< DFS > &mappers, const Entity &entity, std::vector< PetscInt > &indices )
769 {
770 mappers.parallelMapper().map( entity, indices );
771 }
772
773 /*
774 * data fields
775 */
776 DomainMappersType domainMappers_;
777 RangeMappersType rangeMappers_;
778
779 int sequence_ = -1;
780 mutable Mat petscMatrix_;
781
782 mutable LocalMatrixStackType localMatrixStack_;
783 mutable Status status_;
784
785 const bool viennaCL_;
786 const bool blockedMode_;
787
788 mutable std::unique_ptr< PetscDomainFunctionType > petscArg_;
789 mutable std::unique_ptr< PetscRangeFunctionType > petscDest_;
790
791 mutable ThreadSafeValue< std::vector< PetscScalar > > v_;
792 mutable ThreadSafeValue< std::pair< std::vector< PetscInt >, std::vector< PetscInt > > > rcTemp_;
793
794 mutable std::vector< PetscInt > unitRows_;
795 };
796
797
798
799 /* ========================================
800 * class PetscLinearOperator::LocalMatrix
801 */
802 template< typename DomainFunction, typename RangeFunction >
803 class PetscLinearOperator< DomainFunction, RangeFunction >::LocalMatrix
804 : public LocalMatrixDefault< typename PetscLinearOperator< DomainFunction, RangeFunction >::LocalMatrixTraits >
805 {
806 typedef LocalMatrix ThisType;
807 typedef LocalMatrixDefault< typename PetscLinearOperator< DomainFunction, RangeFunction >::LocalMatrixTraits > BaseType;
808
809 typedef PetscLinearOperator< DomainFunction, RangeFunction > PetscLinearOperatorType;
810
811
812 public:
813 typedef PetscInt DofIndexType;
814 typedef std::vector< DofIndexType > IndexVectorType;
815 typedef typename DomainFunction::DiscreteFunctionSpaceType DomainSpaceType;
816 typedef typename RangeFunction::DiscreteFunctionSpaceType RangeSpaceType;
817 typedef typename DomainSpaceType::BasisFunctionSetType DomainBasisFunctionSetType;
818 typedef typename RangeSpaceType::BasisFunctionSetType RangeBasisFunctionSetType;
819
820 enum { rangeBlockSize = RangeSpaceType::localBlockSize };
821 enum { domainBlockSize = DomainSpaceType ::localBlockSize };
822
823 private:
824
825 // needed for .mapEach below
826 template< typename PetscMapping >
827 struct PetscAssignFunctor
828 {
829 explicit PetscAssignFunctor ( const PetscMapping &petscMapping, IndexVectorType &indices )
830 : petscMapping_( petscMapping ),
831 indices_( indices )
832 {}
833
834 template< typename T >
835 void operator() ( const std::size_t localIndex, T globalIndex ) { indices_[ localIndex ] = petscMapping_.globalMapping( globalIndex ); }
836
837 private:
838 const PetscMapping &petscMapping_;
839 IndexVectorType &indices_;
840 };
841
842 public:
843 [[deprecated("Use TemporaryLocal Matrix and {add,set,get}LocalMatrix")]]
844 LocalMatrix ( const PetscLinearOperatorType &petscLinOp,
845 const DomainSpaceType &domainSpace,
846 const RangeSpaceType &rangeSpace )
847 : BaseType( domainSpace, rangeSpace ),
848 petscLinearOperator_( petscLinOp )
849 {}
850
851 void finalize()
852 {
853 }
854
855 void init ( const DomainEntityType &domainEntity, const RangeEntityType &rangeEntity )
856 {
857 // call initialize on base class
858 BaseType :: init( domainEntity, rangeEntity );
859
860 //*************************************************
861 // The rows belong to the domain space
862 // it's indices are determained by the rangeSpace
863 //
864 // The columns belong to the range space
865 // it's indices are determained by the domainSpace
866 //*************************************************
867
868 setupIndices( petscLinearOperator_.rangeMappers_, rangeEntity, rowIndices_ );
869 setupIndices( petscLinearOperator_.domainMappers_, domainEntity, colIndices_ );
870
871 status_ = statAssembled;
872 petscLinearOperator_.setStatus(status_);
873 }
874
875 inline void add ( const int localRow, const int localCol, const RangeFieldType &value )
876 {
877 assert( status_==statAssembled || status_==statAdd );
878 status_ = statAdd;
879 petscLinearOperator_.setStatus(status_);
880 ::Dune::Petsc::MatSetValue( petscMatrix(), globalRowIndex( localRow ), globalColIndex( localCol ) , value, ADD_VALUES );
881 }
882
883 inline void set(const int localRow, const int localCol, const RangeFieldType &value )
884 {
885 assert( status_==statAssembled || status_==statInsert );
886 status_ = statInsert;
887 petscLinearOperator_.setStatus(status_);
888 ::Dune::Petsc::MatSetValue( petscMatrix(), globalRowIndex( localRow ), globalColIndex( localCol ) , value, INSERT_VALUES );
889 }
890
892 void clearRow ( const int localRow )
893 {
894 assert( status_==statAssembled || status_==statInsert );
895 status_ = statInsert;
896 petscLinearOperator_.setStatus(status_);
897 const int col = this->columns();
898 const int globalRowIdx = globalRowIndex( localRow );
899 for(int localCol=0; localCol<col; ++localCol)
900 {
901 ::Dune::Petsc::MatSetValue( petscMatrix(), globalRowIdx, globalColIndex( localCol ), 0.0, INSERT_VALUES );
902 }
903 }
904
905 inline const RangeFieldType get ( const int localRow, const int localCol ) const
906 {
907 assert( status_==statAssembled || status_==statGet );
908 status_ = statGet;
909 petscLinearOperator_.setStatus(status_);
910 RangeFieldType v[1];
911 const int r[] = {globalRowIndex( localRow )};
912 const int c[] = {globalColIndex( localCol )};
913 ::Dune::Petsc::MatGetValues( petscMatrix(), 1, r, 1, c, v );
914 return v[0];
915 }
916
917 inline void scale ( const RangeFieldType &factor )
918 {
919 const_cast< PetscLinearOperatorType& > (petscLinearOperator_).scaleLocalMatrix( this->domainEntity(), this->rangeEntity(), factor );
920 }
921
922 private:
923 LocalMatrix ();
924
925 Mat& petscMatrix () { return petscLinearOperator_.petscMatrix_; }
926 const Mat& petscMatrix () const { return petscLinearOperator_.petscMatrix_; }
927
928 public:
929 int rows() const { return rowIndices_.size(); }
930 int columns() const { return colIndices_.size(); }
931
932 private:
933 DofIndexType globalRowIndex( const int localRow ) const
934 {
935 assert( localRow < static_cast< int >( rowIndices_.size() ) );
936 return rowIndices_[ localRow ];
937 }
938
939 DofIndexType globalColIndex( const int localCol ) const
940 {
941 assert( localCol < static_cast< int >( colIndices_.size() ) );
942 return colIndices_[ localCol ];
943 }
944
945 /*
946 * data fields
947 */
948 const PetscLinearOperatorType &petscLinearOperator_;
949 IndexVectorType rowIndices_;
950 IndexVectorType colIndices_;
951 mutable Status status_;
952 };
953
954
955 } // namespace Fem
956
957} // namespace Dune
958
959#endif // #if HAVE_PETSC
960
961#endif // #ifndef DUNE_FEM_PETSCLINEAROPERATOR_HH
Dune::Fem::Double abs(const Dune::Fem::Double &a)
Definition: double.hh:942
Definition: bindguard.hh:11
std::tuple_element< i, Tuple >::type & get(Dune::TypeIndexedTuple< Tuple, Types > &tuple)
Definition: typeindexedtuple.hh:122