dune-fem 2.8-git
petscoperator.hh
Go to the documentation of this file.
1#ifndef DUNE_FEM_PETSCLINEAROPERATOR_HH
2#define DUNE_FEM_PETSCLINEAROPERATOR_HH
3
4#include <cassert>
5#include <cstddef>
6
7#include <iostream>
8#include <memory>
9#include <string>
10#include <type_traits>
11#include <vector>
12
13#include <dune/common/dynmatrix.hh>
14
30
32
33#if HAVE_PETSC
34
35#include "petscmat.h"
36
37
38namespace Dune
39{
40 namespace Fem
41 {
42
43 struct PetscSolverParameter : public LocalParameter< SolverParameter, PetscSolverParameter >
44 {
45 typedef LocalParameter< SolverParameter, PetscSolverParameter > BaseType;
46
47 public:
48 using BaseType :: parameter ;
49 using BaseType :: keyPrefix ;
50
51 PetscSolverParameter( const ParameterReader &parameter = Parameter::container() )
52 : BaseType( parameter )
53 {}
54
55 PetscSolverParameter( const SolverParameter& sp )
56 : PetscSolverParameter( sp.keyPrefix(), sp.parameter() )
57 {}
58
59 PetscSolverParameter( const std::string &keyPrefix, const ParameterReader &parameter = Parameter::container() )
60 : BaseType( keyPrefix, parameter )
61 {}
62
63 bool isPetscSolverParameter() const { return true; }
64
65 static const int boomeramg = 0;
66 static const int parasails = 1;
67 static const int pilut = 2;
68
69 int hypreMethod() const
70 {
71 const std::string hyprePCNames[] = { "boomer-amg", "parasails", "pilu-t" };
72 int hypreType = 0;
73 if (parameter().exists("petsc.preconditioning.method"))
74 {
75 hypreType = parameter().getEnum( "petsc.preconditioning.hypre.method", hyprePCNames, 0 );
76 std::cout << "WARNING: using deprecated parameter 'petsc.preconditioning.hypre.method' use "
77 << keyPrefix() << "preconditioning.hypre.method instead\n";
78 }
79 else
80 hypreType = parameter().getEnum( keyPrefix()+"hypre.method", hyprePCNames, 0 );
81 return hypreType;
82 }
83
84 int superluMethod() const
85 {
86 const std::string factorizationNames[] = { "petsc", "superlu", "mumps" };
87 int factorType = 0;
88 if (parameter().exists("petsc.preconditioning.lu.method"))
89 {
90 factorType = parameter().getEnum( "petsc.preconditioning.lu.method", factorizationNames, 0 );
91 std::cout << "WARNING: using deprecated parameter 'petsc.preconditioning.lu.method' use "
92 << keyPrefix() << "preconditioning.lu.method instead\n";
93 }
94 else
95 factorType = parameter().getEnum( keyPrefix()+"preconditioning.lu.method", factorizationNames, 0 );
96 return factorType;
97 }
98
99 bool viennaCL () const {
100 return parameter().getValue< bool > ( keyPrefix() + "petsc.viennacl", false );
101 }
102 bool blockedMode () const {
103 return parameter().getValue< bool > ( keyPrefix() + "petsc.blockedmode", true );
104 }
105 };
106
107
108
109 /* ========================================
110 * class PetscLinearOperator
111 */
112 template< typename DomainFunction, typename RangeFunction >
113 class PetscLinearOperator
114 : public Fem::AssembledOperator< DomainFunction, RangeFunction >
115 {
116 typedef PetscLinearOperator< DomainFunction, RangeFunction > ThisType;
117 public:
118 typedef Mat MatrixType;
119 typedef DomainFunction DomainFunctionType;
120 typedef RangeFunction RangeFunctionType;
121 typedef typename DomainFunctionType::RangeFieldType DomainFieldType;
122 typedef typename RangeFunctionType::RangeFieldType RangeFieldType;
123 typedef typename DomainFunctionType::DiscreteFunctionSpaceType DomainSpaceType;
124 typedef typename RangeFunctionType::DiscreteFunctionSpaceType RangeSpaceType;
125
126 typedef PetscDiscreteFunction< DomainSpaceType > PetscDomainFunctionType;
127 typedef PetscDiscreteFunction< RangeSpaceType > PetscRangeFunctionType;
128
129 typedef typename DomainSpaceType::GridPartType::template Codim< 0 >::EntityType DomainEntityType;
130 typedef typename RangeSpaceType::GridPartType::template Codim< 0 >::EntityType RangeEntityType;
131
132 static const unsigned int domainLocalBlockSize = DomainSpaceType::localBlockSize;
133 static const unsigned int rangeLocalBlockSize = RangeSpaceType::localBlockSize;
134
135 static constexpr bool squareBlocked = domainLocalBlockSize == rangeLocalBlockSize ;
136 static constexpr bool blockedMatrix = domainLocalBlockSize > 1 && squareBlocked;
137
138 // is this right? It should be rangeBS x domainBS - the system is
139 // Ad=r so domain gives columns and r gives range
140 typedef FlatFieldMatrix< RangeFieldType, domainLocalBlockSize, rangeLocalBlockSize > MatrixBlockType;
141 typedef MatrixBlockType block_type;
142
143 private:
144 enum Status {statAssembled=0,statAdd=1,statInsert=2,statGet=3,statNothing=4};
145
146 typedef PetscMappers< DomainSpaceType > DomainMappersType;
147 typedef PetscMappers< RangeSpaceType > RangeMappersType;
148
149 typedef AuxiliaryDofs< typename DomainSpaceType::GridPartType, typename DomainMappersType::GhostMapperType > DomainAuxiliaryDofsType;
150 typedef AuxiliaryDofs< typename RangeSpaceType::GridPartType, typename RangeMappersType::GhostMapperType > RangeAuxiliaryDofsType;
151
152 public:
153 // the local matrix
154 class LocalMatrix;
155
156 struct LocalMatrixTraits
157 {
158 typedef typename DomainFunctionType::DiscreteFunctionSpaceType DomainSpaceType;
159 typedef typename RangeFunctionType::DiscreteFunctionSpaceType RangeSpaceType;
160 typedef LocalMatrix LocalMatrixType;
161 typedef typename RangeSpaceType::RangeFieldType RangeFieldType;
162
163 // copied this typedef from spmatrix.hh
164 typedef RangeFieldType LittleBlockType;
165 };
166
168 typedef LocalMatrix ObjectType;
169 typedef ThisType LocalMatrixFactoryType;
170 typedef ObjectStack< LocalMatrixFactoryType > LocalMatrixStackType;
171
173 typedef LocalMatrixWrapper< LocalMatrixStackType > LocalMatrixType;
174 typedef ColumnObject< ThisType > LocalColumnObjectType;
175
176 PetscLinearOperator ( const DomainSpaceType &domainSpace, const RangeSpaceType &rangeSpace,
177 const PetscSolverParameter& param = PetscSolverParameter() )
178 : domainMappers_( domainSpace ),
179 rangeMappers_( rangeSpace ),
180 localMatrixStack_( *this ),
181 status_(statNothing),
182 viennaCL_( param.viennaCL() ),
183 blockedMode_( blockedMatrix && (!viennaCL_) && param.blockedMode() )
184 {}
185
186 PetscLinearOperator ( const std::string &, const DomainSpaceType &domainSpace, const RangeSpaceType &rangeSpace,
187 const PetscSolverParameter& param = PetscSolverParameter() )
188 : PetscLinearOperator( domainSpace, rangeSpace, param )
189 {}
190
192 ~PetscLinearOperator ()
193 {
194 destroy();
195 }
196
197 void flushAssembly()
198 {
199 ::Dune::Petsc::MatAssemblyBegin( petscMatrix_, MAT_FLUSH_ASSEMBLY );
200 ::Dune::Petsc::MatAssemblyEnd ( petscMatrix_, MAT_FLUSH_ASSEMBLY );
201 setStatus( statAssembled );
202 }
203
204 void finalize ()
205 {
206 if( ! finalizedAlready() )
207 {
208 ::Dune::Petsc::MatAssemblyBegin( petscMatrix_, MAT_FINAL_ASSEMBLY );
209 ::Dune::Petsc::MatAssemblyEnd ( petscMatrix_, MAT_FINAL_ASSEMBLY );
210
211 if( ! unitRows_.empty() )
212 {
213 ::Dune::Petsc::MatZeroRows( petscMatrix_, unitRows_.size(), unitRows_.data(), 1. );
214 // remove all cached unit rows
215 unitRows_.clear();
216 }
217 }
218 }
219
220 protected:
221 bool finalizedAlready() const
222 {
223 PetscBool assembled = PETSC_FALSE ;
224 ::Dune::Petsc::MatAssembled( petscMatrix_, &assembled );
225 return assembled == PETSC_TRUE;
226 }
227
228 void finalizeAssembly () const
229 {
230 const_cast< ThisType& > (*this).finalize();
231 }
232
233 public:
234 const DomainSpaceType &domainSpace () const { return domainMappers_.space(); }
235 const RangeSpaceType &rangeSpace () const { return rangeMappers_.space(); }
236
240 template <class DF, class RF>
241 void apply ( const DF &arg, RF &dest ) const
242 {
243 if( ! petscArg_ )
244 petscArg_.reset( new PetscDomainFunctionType( "PetscOp-arg", domainSpace() ) );
245 if( ! petscDest_ )
246 petscDest_.reset( new PetscRangeFunctionType( "PetscOp-arg", rangeSpace() ) );
247
248 petscArg_->assign( arg );
249 apply( *petscArg_, *petscDest_ );
250 dest.assign( *petscDest_ );
251 }
252
254 void apply ( const PetscDomainFunctionType &arg, PetscRangeFunctionType &dest ) const
255 {
256 // make sure matrix is in correct state
257 finalizeAssembly();
258 ::Dune::Petsc::MatMult( petscMatrix_, *arg.petscVec() , *dest.petscVec() );
259 }
260
261 void operator() ( const DomainFunctionType &arg, RangeFunctionType &dest ) const
262 {
263 apply( arg, dest );
264 }
265
266 void reserve ()
267 {
268 reserve( SimpleStencil<DomainSpaceType,RangeSpaceType>(0), true );
269 }
270
271 template <class Set>
272 void reserve (const std::vector< Set >& sparsityPattern )
273 {
274 reserve( StencilWrapper< DomainSpaceType,RangeSpaceType, Set >( sparsityPattern ), true );
275 }
276
278 template <class StencilType>
279 void reserve (const StencilType &stencil, const bool isSimpleStencil = false )
280 {
281 domainMappers_.update();
282 rangeMappers_.update();
283
284 if(sequence_ != domainSpace().sequence())
285 {
286 // clear Petsc Mat
287 destroy();
288
289 // reset temporary Petsc discrete functions
290 petscArg_.reset();
291 petscDest_.reset();
292
293 unitRows_.clear();
294
295 // create matrix
296 ::Dune::Petsc::MatCreate( domainSpace().gridPart().comm(), &petscMatrix_ );
297
298 // set sizes of the matrix (columns == domain and rows == range)
299 const PetscInt localCols = domainMappers_.ghostMapper().interiorSize() * domainLocalBlockSize;
300 const PetscInt localRows = rangeMappers_.ghostMapper().interiorSize() * rangeLocalBlockSize;
301
302 const PetscInt globalCols = domainMappers_.parallelMapper().size() * domainLocalBlockSize;
303 const PetscInt globalRows = rangeMappers_.parallelMapper().size() * rangeLocalBlockSize;
304
305 ::Dune::Petsc::MatSetSizes( petscMatrix_, localRows, localCols, globalRows, globalCols );
306
307 PetscInt bs = 1;
308 if( viennaCL_ )
309 {
310 ::Dune::Petsc::MatSetType( petscMatrix_, MATAIJVIENNACL );
311 }
312 else if( blockedMode_ )
313 {
314 bs = domainLocalBlockSize ;
315 ::Dune::Petsc::MatSetType( petscMatrix_, MATBAIJ );
316 // set block size
317 ::Dune::Petsc::MatSetBlockSize( petscMatrix_, bs );
318 }
319 else
320 {
321 ::Dune::Petsc::MatSetType( petscMatrix_, MATAIJ );
322 }
323 /*
324 std::cout << "Matrix dimension with bs=" << bs << " "
325 << localRows << "x" << localCols << " "
326 << globalRows << "x" << globalCols << " "
327 << rangeLocalBlockSize/bs << "x" << domainLocalBlockSize/bs << " "
328 << std::endl;
329 */
330
331 // there is an issue with the stencil and non zero pattern in the
332 // case of domainSpace != rangeSpace. In this case additional
333 // mallocs are required during matrix assembly which heavily
334 // impacts the preformance. As a quick fix we use a global value
335 // for the number of non zeros per row. That leads to a
336 // significant increase in memory usage but not to much
337 // computational overhead in assembly. The issue with the stencil
338 // is a bug and needs fixing....
339 if( isSimpleStencil || std::is_same< StencilType,SimpleStencil<DomainSpaceType,RangeSpaceType> >::value )
340 {
341 ::Dune::Petsc::MatSetUp( petscMatrix_, bs, domainLocalBlockSize * stencil.maxNonZerosEstimate() );
342 }
343 else
344 {
345 DomainAuxiliaryDofsType domainAuxiliaryDofs( domainMappers_.ghostMapper() );
346 RangeAuxiliaryDofsType rangeAuxiliaryDofs( rangeMappers_.ghostMapper() );
347
348 std::vector< PetscInt > d_nnz( localRows / bs, 0 );
349 std::vector< PetscInt > o_nnz( localRows / bs, 0 );
350 for( const auto entry : stencil.globalStencil() )
351 {
352 const int petscIndex = rangeMappers_.ghostIndex( entry.first );
353 if( rangeAuxiliaryDofs.contains( petscIndex ) )
354 continue;
355
356 for (unsigned int rb = 0; rb<rangeLocalBlockSize/bs; ++rb)
357 {
358 const size_t row = petscIndex*rangeLocalBlockSize/bs + rb;
359 // Remark: ghost entities should not be inserted into the stencil for dg to
360 // get optimal results but they are needed for istl....
361 assert( row < size_t(localRows/bs) );
362 d_nnz[ row ] = o_nnz[ row ] = 0;
363 for( const auto local : entry.second )
364 {
365 if( !domainAuxiliaryDofs.contains( domainMappers_.ghostIndex( local ) ) )
366 d_nnz[ row ] += domainLocalBlockSize/bs;
367 else
368 o_nnz[ row ] += domainLocalBlockSize/bs;
369 }
370 }
371 }
372 ::Dune::Petsc::MatSetUp( petscMatrix_, bs, &d_nnz[0], &o_nnz[0] );
373 }
374 sequence_ = domainSpace().sequence();
375 }
376
377 flushAssembly();
378 status_ = statAssembled ;
379 }
380
381 void clear ()
382 {
383 ::Dune::Petsc::MatZeroEntries( petscMatrix_ );
384 flushAssembly();
385 }
386
387 template <class Vector>
388 void setUnitRows( const Vector &rows )
389 {
390 std::vector< PetscInt > r( rows.size() );
391 for( std::size_t i =0 ; i< rows.size(); ++i )
392 {
393 const PetscInt block = rangeMappers_.parallelIndex( rows[ i ] / rangeLocalBlockSize );
394 r[ i ] = block * rangeLocalBlockSize + (rows[ i ] % rangeLocalBlockSize);
395 }
396
397 if( finalizedAlready() )
398 {
399 ::Dune::Petsc::MatZeroRows( petscMatrix_, r.size(), r.data(), 1. );
400 }
401 else
402 {
403 unitRows_.reserve( unitRows_.size() + r.size() );
404 for( const auto& row : r )
405 unitRows_.push_back( row );
406 }
407 }
408
410 ObjectType* newObject() const
411 {
412 return new ObjectType( *this, domainSpace(), rangeSpace() );
413 }
414
419 [[deprecated("Use TemporaryLocalMatrix,LocalContribution and {get,add,set}LocalMatrix")]]
420 LocalMatrixType localMatrix ( const DomainEntityType &domainEntity, const RangeEntityType &rangeEntity ) const
421 {
422 return LocalMatrixType(localMatrixStack_, domainEntity, rangeEntity);
423 }
424
425 LocalColumnObjectType localColumn( const DomainEntityType &colEntity ) const
426 {
427 return LocalColumnObjectType ( *this, colEntity );
428 }
429
430 public:
431 void unitRow( const PetscInt localRow, const PetscScalar diag = 1.0 )
432 {
433 std::array< PetscInt, domainLocalBlockSize > rows;
434 const PetscInt row = rangeMappers_.parallelIndex( localRow );
435 for( unsigned int i=0, r = row * domainLocalBlockSize; i<domainLocalBlockSize; ++i, ++r )
436 {
437 rows[ i ] = r;
438 }
439
440 if( finalizedAlready() )
441 {
442 // set given row to a zero row with diagonal entry equal to diag
443 ::Dune::Petsc::MatZeroRows( petscMatrix_, domainLocalBlockSize, rows.data(), diag );
444 }
445 else
446 {
447 // this only works for diag = 1
448 assert( std::abs( diag - 1. ) < 1e-12 );
449 unitRows_.reserve( unitRows_.size() + domainLocalBlockSize );
450 for( const auto& r : rows )
451 {
452 unitRows_.push_back( r );
453 }
454 }
455 }
456
457 bool blockedMode() const { return blockedMode_; }
458
459 protected:
460 template< class PetscOp >
461 void applyToBlock ( const PetscInt localRow, const PetscInt localCol, const MatrixBlockType& block, PetscOp op )
462 {
463#ifndef NDEBUG
464 const PetscInt localCols = domainMappers_.ghostMapper().interiorSize() * domainLocalBlockSize;
465 const PetscInt localRows = rangeMappers_.ghostMapper().interiorSize() * rangeLocalBlockSize;
466 assert( localRow < localRows );
467 assert( localCol < localCols );
468#endif
469
470 if( blockedMode_ )
471 {
472 // convert process local indices to global indices
473 const PetscInt row = rangeMappers_.parallelIndex( localRow );
474 const PetscInt col = rangeMappers_.parallelIndex( localCol );
475 ::Dune::Petsc::MatSetValuesBlocked( petscMatrix_, 1, &row, 1, &col, block.data(), op );
476 }
477 else
478 {
479 // convert process local indices to global indices
480 const PetscInt row = rangeMappers_.parallelIndex( localRow );
481 const PetscInt col = rangeMappers_.parallelIndex( localCol );
482 std::array< PetscInt, domainLocalBlockSize > rows;
483 std::array< PetscInt, domainLocalBlockSize > cols;
484 for( unsigned int i=0, r = row * domainLocalBlockSize, c = col * domainLocalBlockSize; i<domainLocalBlockSize; ++i, ++r, ++c )
485 {
486 rows[ i ] = r;
487 cols[ i ] = c;
488 }
489
490 // set given row to a zero row with diagonal entry equal to diag
491 ::Dune::Petsc::MatSetValues( petscMatrix_, domainLocalBlockSize, rows.data(), domainLocalBlockSize, cols.data(), block.data(), op );
492 }
493 setStatus( statAssembled );
494 }
495
496 template< class LocalBlock, class PetscOp >
497 void applyToBlock ( const size_t row, const size_t col, const LocalBlock& block, PetscOp op )
498 {
499 assert( block.rows() == rangeLocalBlockSize );
500 assert( block.cols() == domainLocalBlockSize );
501
502 // copy to MatrixBlockType data structure suited to be inserted into Mat
503 MatrixBlockType matBlock( block );
504 applyToBlock( row, col, matBlock, op );
505 }
506
507 template< class LocalBlock >
508 void setBlock ( const size_t row, const size_t col, const LocalBlock& block )
509 {
510#ifndef _OPENMP
511 assert( status_==statAssembled || status_==statInsert );
512#endif
513 assert( row < std::numeric_limits< int > :: max() );
514 assert( col < std::numeric_limits< int > :: max() );
515
516 setStatus( statInsert );
517 applyToBlock( static_cast< PetscInt > (row), static_cast< PetscInt > (col), block, INSERT_VALUES );
518 }
519
520 template< class LocalBlock >
521 void addBlock ( const size_t row, const size_t col, const LocalBlock& block )
522 {
523#ifndef _OPENMP
524 assert( status_==statAssembled || status_==statInsert );
525#endif
526 assert( row < std::numeric_limits< int > :: max() );
527 assert( col < std::numeric_limits< int > :: max() );
528
529 setStatus( statAdd );
530 applyToBlock( static_cast< PetscInt > (row), static_cast< PetscInt > (col), block, ADD_VALUES );
531 }
532
533 protected:
534 typedef TemporaryLocalMatrix< DomainSpaceType, RangeSpaceType > TemporaryLocalMatrixType;
535
536 // specialization for temporary local matrix, then copy of values is not needed
537 template <class Operation>
538 const PetscScalar* getValues( const unsigned int rSize, const unsigned int cSize,
539 const TemporaryLocalMatrixType &localMat, const Operation&,
540 const std::integral_constant< bool, false> nonscaled )
541 {
542 return localMat.data();
543 }
544
545 // specialization for temporary local matrix, then copy of values is not needed
546 template <class LM, class Operation>
547 const PetscScalar* getValues( const unsigned int rSize, const unsigned int cSize,
548 const Assembly::Impl::LocalMatrixGetter< LM >& localMat, const Operation&,
549 const std::integral_constant< bool, false> nonscaled )
550 {
551 return localMat.localMatrix().data();
552 }
553
554 // retrieve values for arbitrary local matrix
555 template <class LocalMatrix, class Operation, bool T>
556 const PetscScalar* getValues( const unsigned int rSize, const unsigned int cSize,
557 const LocalMatrix &localMat, const Operation& operation,
558 const std::integral_constant< bool, T> scaled )
559 {
560 std::vector< PetscScalar >& v = v_;
561 v.resize( rSize * cSize );
562 for( unsigned int i = 0, ic = 0 ; i< rSize; ++i )
563 {
564 for( unsigned int j =0; j< cSize; ++j, ++ic )
565 {
566 v[ ic ] = operation( localMat.get( i, j ) );
567 }
568 }
569 return v.data();
570 }
571
572 template< class LocalMatrix, class Operation, class PetscOp, bool T >
573 void applyLocalMatrix ( const DomainEntityType &domainEntity, const RangeEntityType &rangeEntity,
574 const LocalMatrix &localMat, const Operation& operation,
575 PetscOp petscOp,
576 const std::integral_constant<bool, T> scaled )
577 {
578 std::vector< PetscInt >& r = r_;
579 std::vector< PetscInt >& c = c_;
580
581 if( blockedMode_ )
582 {
583 setupIndicesBlocked( rangeMappers_, rangeEntity, r );
584 setupIndicesBlocked( domainMappers_, domainEntity, c );
585
586 // domainLocalBlockSize == rangeLocalBlockSize
587 const unsigned int rSize = r.size() * domainLocalBlockSize ;
588 const unsigned int cSize = c.size() * domainLocalBlockSize ;
589
590 const PetscScalar* values = getValues( rSize, cSize, localMat, operation, scaled );
591 ::Dune::Petsc::MatSetValuesBlocked( petscMatrix_, r.size(), r.data(), c.size(), c.data(), values, petscOp );
592 }
593 else
594 {
595 setupIndices( rangeMappers_, rangeEntity, r );
596 setupIndices( domainMappers_, domainEntity, c );
597
598 const unsigned int rSize = r.size();
599 const unsigned int cSize = c.size();
600
601 const PetscScalar* values = getValues( rSize, cSize, localMat, operation, scaled );
602 ::Dune::Petsc::MatSetValues( petscMatrix_, rSize, r.data(), cSize, c.data(), values, petscOp );
603 }
604 setStatus( statAssembled );
605 }
606
607 public:
608 template< class LocalMatrix >
609 void addLocalMatrix ( const DomainEntityType &domainEntity, const RangeEntityType &rangeEntity, const LocalMatrix &localMat )
610 {
611 assert( status_==statAssembled || status_==statAdd );
612 setStatus( statAdd );
613
614 auto operation = [] ( const PetscScalar& value ) -> PetscScalar { return value; };
615
616 applyLocalMatrix( domainEntity, rangeEntity, localMat, operation, ADD_VALUES, std::integral_constant< bool, false>() );
617 }
618
619 template< class LocalMatrix, class Scalar >
620 void addScaledLocalMatrix ( const DomainEntityType &domainEntity, const RangeEntityType &rangeEntity, const LocalMatrix &localMat, const Scalar &s )
621 {
622 assert( status_==statAssembled || status_==statAdd );
623 setStatus( statAdd );
624
625 auto operation = [ &s ] ( const PetscScalar& value ) -> PetscScalar { return s * value; };
626
627 applyLocalMatrix( domainEntity, rangeEntity, localMat, operation, ADD_VALUES, std::integral_constant< bool, true>() );
628 }
629
630 template< class LocalMatrix >
631 void setLocalMatrix ( const DomainEntityType &domainEntity, const RangeEntityType &rangeEntity, const LocalMatrix &localMat )
632 {
633 assert( status_==statAssembled || status_==statInsert );
634 setStatus( statInsert );
635
636 auto operation = [] ( const PetscScalar& value ) -> PetscScalar { return value; };
637
638 applyLocalMatrix( domainEntity, rangeEntity, localMat, operation, INSERT_VALUES, std::integral_constant< bool, false>() );
639 }
640
641 template< class LocalMatrix >
642 void getLocalMatrix ( const DomainEntityType &domainEntity, const RangeEntityType &rangeEntity, LocalMatrix &localMat ) const
643 {
644 // make sure matrix is in correct state before using
645 finalizeAssembly();
646
647 assert( status_==statAssembled || status_==statGet );
648 setStatus( statGet );
649
650 std::vector< PetscInt >& r = r_;
651 std::vector< PetscInt >& c = c_;
652 std::vector< PetscScalar >& v = v_;
653
654 setupIndices( rangeMappers_, rangeEntity, r );
655 setupIndices( domainMappers_, domainEntity, c );
656
657 v.resize( r.size() * c.size() );
658 ::Dune::Petsc::MatGetValues( petscMatrix_, r.size(), r.data(), c.size(), c.data(), v.data() );
659
660 for( std::size_t i =0 ; i< r.size(); ++i )
661 for( std::size_t j =0; j< c.size(); ++j )
662 localMat.set( i, j, v[ i * c.size() +j ] );
663
664 setStatus( statAssembled );
665 }
666
667 void scaleLocalMatrix ( const DomainEntityType &domainEntity, const RangeEntityType &rangeEntity, const RangeFieldType &s )
668 {
669 // make sure matrix is in correct state before using
670 finalizeAssembly();
671
672 assert( status_==statAssembled || status_==statGet );
673 setStatus( statGet );
674
675 std::vector< PetscInt >& r = r_;
676 std::vector< PetscInt >& c = c_;
677 std::vector< PetscScalar >& v = v_;
678
679 setupIndices( rangeMappers_, rangeEntity, r );
680 setupIndices( domainMappers_, domainEntity, c );
681
682 const unsigned int rSize = r.size();
683 const unsigned int cSize = c.size();
684 const std::size_t size = rSize * cSize;
685
686 v.resize( size );
687 // get values
688 ::Dune::Petsc::MatGetValues( petscMatrix_, r.size(), r.data(), c.size(), c.data(), v.data() );
689
690 // scale values
691 for( std::size_t i=0; i<size; ++i )
692 v[ i ] *= s;
693
694 // set values again
695 ::Dune::Petsc::MatSetValues( petscMatrix_, rSize, r.data(), cSize, c.data(), v.data(), INSERT_VALUES );
696 setStatus( statAssembled );
697 }
698
699 // just here for debugging
700 void view () const
701 {
702 ::Dune::Petsc::MatView( petscMatrix_, PETSC_VIEWER_STDOUT_WORLD );
703 }
704
705 // print matrix just here for debugging
706 void print( std::ostream& s ) const
707 {
708 if( &s == &std::cout || &s == &std::cerr )
709 {
710 view();
711 }
712 }
713
714 // return reference to PETSc matrix object
715 Mat& exportMatrix () const
716 {
717 // make sure matrix is in correct state
718 finalizeAssembly();
719 return petscMatrix_;
720 }
721
722 private:
723 PetscLinearOperator ();
724
726 void destroy ()
727 {
728 if( status_ != statNothing )
729 {
730 ::Dune::Petsc::MatDestroy( &petscMatrix_ );
731 status_ = statNothing ;
732 }
733 sequence_ = -1;
734 }
735
736 void setStatus (const Status &newstatus) const
737 {
738 // in case OpenMP is used simply avoid status check
739#ifndef _OPENMP
740 status_ = newstatus;
741#endif
742 }
743
744 template< class DFS, class Entity >
745 static void setupIndices ( const PetscMappers< DFS > &mappers, const Entity &entity, std::vector< PetscInt > &indices )
746 {
747 NonBlockMapper< const typename PetscMappers< DFS >::ParallelMapperType, DFS::localBlockSize > nonBlockMapper( mappers.parallelMapper() );
748 nonBlockMapper.map( entity, indices );
749 }
750
751 template< class DFS, class Entity >
752 static void setupIndicesBlocked ( const PetscMappers< DFS > &mappers, const Entity &entity, std::vector< PetscInt > &indices )
753 {
754 mappers.parallelMapper().map( entity, indices );
755 }
756
757 /*
758 * data fields
759 */
760 DomainMappersType domainMappers_;
761 RangeMappersType rangeMappers_;
762
763 int sequence_ = -1;
764 mutable Mat petscMatrix_;
765
766 mutable LocalMatrixStackType localMatrixStack_;
767 mutable Status status_;
768
769 const bool viennaCL_;
770 const bool blockedMode_;
771
772 mutable std::unique_ptr< PetscDomainFunctionType > petscArg_;
773 mutable std::unique_ptr< PetscRangeFunctionType > petscDest_;
774
775 mutable std::vector< PetscScalar > v_;
776 mutable std::vector< PetscInt > r_;
777 mutable std::vector< PetscInt > c_;
778
779 mutable std::vector< PetscInt > unitRows_;
780 };
781
782
783
784 /* ========================================
785 * class PetscLinearOperator::LocalMatrix
786 */
787 template< typename DomainFunction, typename RangeFunction >
788 class PetscLinearOperator< DomainFunction, RangeFunction >::LocalMatrix
789 : public LocalMatrixDefault< typename PetscLinearOperator< DomainFunction, RangeFunction >::LocalMatrixTraits >
790 {
791 typedef LocalMatrix ThisType;
792 typedef LocalMatrixDefault< typename PetscLinearOperator< DomainFunction, RangeFunction >::LocalMatrixTraits > BaseType;
793
794 typedef PetscLinearOperator< DomainFunction, RangeFunction > PetscLinearOperatorType;
795
796
797 public:
798 typedef PetscInt DofIndexType;
799 typedef std::vector< DofIndexType > IndexVectorType;
800 typedef typename DomainFunction::DiscreteFunctionSpaceType DomainSpaceType;
801 typedef typename RangeFunction::DiscreteFunctionSpaceType RangeSpaceType;
802 typedef typename DomainSpaceType::BasisFunctionSetType DomainBasisFunctionSetType;
803 typedef typename RangeSpaceType::BasisFunctionSetType RangeBasisFunctionSetType;
804
805 enum { rangeBlockSize = RangeSpaceType::localBlockSize };
806 enum { domainBlockSize = DomainSpaceType ::localBlockSize };
807
808 private:
809
810 // needed for .mapEach below
811 template< typename PetscMapping >
812 struct PetscAssignFunctor
813 {
814 explicit PetscAssignFunctor ( const PetscMapping &petscMapping, IndexVectorType &indices )
815 : petscMapping_( petscMapping ),
816 indices_( indices )
817 {}
818
819 template< typename T >
820 void operator() ( const std::size_t localIndex, T globalIndex ) { indices_[ localIndex ] = petscMapping_.globalMapping( globalIndex ); }
821
822 private:
823 const PetscMapping &petscMapping_;
824 IndexVectorType &indices_;
825 };
826
827 public:
828 LocalMatrix ( const PetscLinearOperatorType &petscLinOp,
829 const DomainSpaceType &domainSpace,
830 const RangeSpaceType &rangeSpace )
831 : BaseType( domainSpace, rangeSpace ),
832 petscLinearOperator_( petscLinOp )
833 {}
834
835 void finalize()
836 {
837 }
838
839 void init ( const DomainEntityType &domainEntity, const RangeEntityType &rangeEntity )
840 {
841 // call initialize on base class
842 BaseType :: init( domainEntity, rangeEntity );
843
844 //*************************************************
845 // The rows belong to the domain space
846 // it's indices are determained by the rangeSpace
847 //
848 // The columns belong to the range space
849 // it's indices are determained by the domainSpace
850 //*************************************************
851
852 setupIndices( petscLinearOperator_.rangeMappers_, rangeEntity, rowIndices_ );
853 setupIndices( petscLinearOperator_.domainMappers_, domainEntity, colIndices_ );
854
855 status_ = statAssembled;
856 petscLinearOperator_.setStatus(status_);
857 }
858
859 inline void add ( const int localRow, const int localCol, const RangeFieldType &value )
860 {
861 assert( status_==statAssembled || status_==statAdd );
862 status_ = statAdd;
863 petscLinearOperator_.setStatus(status_);
864 ::Dune::Petsc::MatSetValue( petscMatrix(), globalRowIndex( localRow ), globalColIndex( localCol ) , value, ADD_VALUES );
865 }
866
867 inline void set(const int localRow, const int localCol, const RangeFieldType &value )
868 {
869 assert( status_==statAssembled || status_==statInsert );
870 status_ = statInsert;
871 petscLinearOperator_.setStatus(status_);
872 ::Dune::Petsc::MatSetValue( petscMatrix(), globalRowIndex( localRow ), globalColIndex( localCol ) , value, INSERT_VALUES );
873 }
874
876 void clearRow ( const int localRow )
877 {
878 assert( status_==statAssembled || status_==statInsert );
879 status_ = statInsert;
880 petscLinearOperator_.setStatus(status_);
881 const int col = this->columns();
882 const int globalRowIdx = globalRowIndex( localRow );
883 for(int localCol=0; localCol<col; ++localCol)
884 {
885 ::Dune::Petsc::MatSetValue( petscMatrix(), globalRowIdx, globalColIndex( localCol ), 0.0, INSERT_VALUES );
886 }
887 }
888
889 inline const RangeFieldType get ( const int localRow, const int localCol ) const
890 {
891 assert( status_==statAssembled || status_==statGet );
892 status_ = statGet;
893 petscLinearOperator_.setStatus(status_);
894 RangeFieldType v[1];
895 const int r[] = {globalRowIndex( localRow )};
896 const int c[] = {globalColIndex( localCol )};
897 ::Dune::Petsc::MatGetValues( petscMatrix(), 1, r, 1, c, v );
898 return v[0];
899 }
900
901 inline void scale ( const RangeFieldType &factor )
902 {
903 const_cast< PetscLinearOperatorType& > (petscLinearOperator_).scaleLocalMatrix( this->domainEntity(), this->rangeEntity(), factor );
904 }
905
906 private:
907 LocalMatrix ();
908
909 Mat& petscMatrix () { return petscLinearOperator_.petscMatrix_; }
910 const Mat& petscMatrix () const { return petscLinearOperator_.petscMatrix_; }
911
912 public:
913 int rows() const { return rowIndices_.size(); }
914 int columns() const { return colIndices_.size(); }
915
916 private:
917 DofIndexType globalRowIndex( const int localRow ) const
918 {
919 assert( localRow < static_cast< int >( rowIndices_.size() ) );
920 return rowIndices_[ localRow ];
921 }
922
923 DofIndexType globalColIndex( const int localCol ) const
924 {
925 assert( localCol < static_cast< int >( colIndices_.size() ) );
926 return colIndices_[ localCol ];
927 }
928
929 /*
930 * data fields
931 */
932 const PetscLinearOperatorType &petscLinearOperator_;
933 IndexVectorType rowIndices_;
934 IndexVectorType colIndices_;
935 mutable Status status_;
936 };
937
938
939 } // namespace Fem
940
941} // namespace Dune
942
943#endif // #if HAVE_PETSC
944
945#endif // #ifndef DUNE_FEM_PETSCLINEAROPERATOR_HH
Dune::Fem::Double abs(const Dune::Fem::Double &a)
Definition: double.hh:942
Definition: bindguard.hh:11
std::tuple_element< i, Tuple >::type & get(Dune::TypeIndexedTuple< Tuple, Types > &tuple)
Definition: typeindexedtuple.hh:122
BasicParameterReader< std::function< const std::string *(const std::string &, const std::string *) > > ParameterReader
Definition: reader.hh:315
static constexpr T max(T a)
Definition: utility.hh:77
static ParameterContainer & container()
Definition: io/parameter.hh:193