From 3034b595857fc9846e8a201b38642e5a6d7dbde5 Mon Sep 17 00:00:00 2001 From: Stefano Zampini Date: Wed, 27 Aug 2025 14:44:53 +0300 Subject: [PATCH 1/5] PetscErrorCode can be an enum --- cpp/dolfinx/la/petsc.cpp | 4 ++-- cpp/dolfinx/la/petsc.h | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/cpp/dolfinx/la/petsc.cpp b/cpp/dolfinx/la/petsc.cpp index 4a7d3b34a42..df2d4f7bdc6 100644 --- a/cpp/dolfinx/la/petsc.cpp +++ b/cpp/dolfinx/la/petsc.cpp @@ -32,7 +32,7 @@ using namespace dolfinx::la; } while (0) //----------------------------------------------------------------------------- -void la::petsc::error(int error_code, const std::string& filename, +void la::petsc::error(PetscErrorCode error_code, const std::string& filename, const std::string& petsc_function) { // Fetch PETSc error description @@ -42,7 +42,7 @@ void la::petsc::error(int error_code, const std::string& filename, // Log detailed error info spdlog::info("PETSc error in '{}', '{}'", filename.c_str(), petsc_function.c_str()); - spdlog::info("PETSc error code '{}' '{}'", error_code, desc); + spdlog::info("PETSc error code '{}' '{}'", (int)error_code, desc); throw std::runtime_error("Failed to successfully call PETSc function '" + petsc_function + "'. PETSc error code is: " + std ::to_string(error_code) + ", " diff --git a/cpp/dolfinx/la/petsc.h b/cpp/dolfinx/la/petsc.h index 9b1443b8e17..50f7031f7d1 100644 --- a/cpp/dolfinx/la/petsc.h +++ b/cpp/dolfinx/la/petsc.h @@ -35,7 +35,7 @@ class SparsityPattern; namespace petsc { /// Print error message for PETSc calls that return an error -void error(int error_code, const std::string& filename, +void error(PetscErrorCode error_code, const std::string& filename, const std::string& petsc_function); /// Create PETsc vectors from the local data. The data is copied into From c2d11dd4583098b1bdc1a804f069e70f18da9641 Mon Sep 17 00:00:00 2001 From: Stefano Zampini Date: Wed, 27 Aug 2025 14:46:20 +0300 Subject: [PATCH 2/5] Support MATIS --- cpp/dolfinx/fem/assembler.h | 50 +++++++++++++++-- cpp/dolfinx/fem/petsc.h | 28 ++++------ cpp/dolfinx/la/petsc.cpp | 90 ++++++++++++++++++------------- cpp/dolfinx/la/petsc.h | 4 +- python/dolfinx/fem/petsc.py | 24 ++++++--- python/dolfinx/wrappers/petsc.cpp | 5 +- 6 files changed, 132 insertions(+), 69 deletions(-) diff --git a/cpp/dolfinx/fem/assembler.h b/cpp/dolfinx/fem/assembler.h index 898e14db845..e8e5d096406 100644 --- a/cpp/dolfinx/fem/assembler.h +++ b/cpp/dolfinx/fem/assembler.h @@ -575,6 +575,29 @@ void set_diagonal(auto set_fn, std::span rows, } } +/// @brief Sets values to the diagonal of a matrix for specified rows. +/// +/// This function is typically called after assembly. The assembly +/// function zeroes Dirichlet rows and columns. For block matrices, this +/// function should normally be called only on the diagonal blocks, i.e. +/// blocks for which the test and trial spaces are the same. +/// +/// @param[in] set_fn The function for setting values to a matrix. +/// @param[in] rows Row blocks, in local indices, for which to add a +/// value to the diagonal. +/// @param[in] diagonal Values to add to the diagonal for the specified +/// rows. +template +void set_diagonal(auto set_fn, std::span rows, + std::span diagonal) +{ + assert(diagonal.size() == rows.size()); + for (std::size_t i = 0; i < rows.size(); ++i) + { + set_fn(rows.subspan(i, 1), rows.subspan(i, 1), diagonal.subspan(i, 1)); + } +} + /// @brief Sets a value to the diagonal of the matrix for rows with a /// Dirichlet boundary conditions applied. /// @@ -587,22 +610,43 @@ void set_diagonal(auto set_fn, std::span rows, /// @param[in] set_fn The function for setting values to a matrix. /// @param[in] V The function space for the rows and columns of the /// matrix. It is used to extract only the Dirichlet boundary conditions -/// that are define on V or subspaces of V. +/// that are defined on V or subspaces of V. /// @param[in] bcs The Dirichlet boundary conditions. /// @param[in] diagonal Value to add to the diagonal for rows with a /// boundary condition applied. +/// @param[in] unassembled Whether the matrix is in unassembled format template void set_diagonal( auto set_fn, const FunctionSpace& V, const std::vector>>& bcs, - T diagonal = 1.0) + T diagonal = 1.0, bool unassembled = false) { for (auto& bc : bcs) { if (V.contains(*bc.get().function_space())) { const auto [dofs, range] = bc.get().dof_indices(); - set_diagonal(set_fn, dofs.first(range), diagonal); + if (unassembled) + { + // We need to insert on ghost indices too; since MATIS is not + // designed to support the INSERT_VALUES stage, we scale ALL + // the diagonal values we insert into the matrix + // TODO: move scaling factor computation to DirichletBC or IndexMap? + auto adjlist = bc.get().function_space()->dofmap()->index_map->index_to_dest_ranks(); + const auto off = adjlist.offsets(); + std::vector number_of_sharing_ranks(off.size()); + for (size_t i = 0; i < off.size() - 1; ++i) + number_of_sharing_ranks[i] = off[i+1] - off[i] + 1; + std::vector data(dofs.size()); + for (size_t i = 0; i < dofs.size(); ++i) + data[i] = diagonal / T(number_of_sharing_ranks[dofs[i]]); + std::span data_span = std::span(data); + set_diagonal(set_fn, dofs, data_span); + } + else + { + set_diagonal(set_fn, dofs.first(range), diagonal); + } } } } diff --git a/cpp/dolfinx/fem/petsc.h b/cpp/dolfinx/fem/petsc.h index 1934df08758..d12ce95a3bb 100644 --- a/cpp/dolfinx/fem/petsc.h +++ b/cpp/dolfinx/fem/petsc.h @@ -122,12 +122,6 @@ Mat create_matrix_block( la::SparsityPattern pattern(mesh->comm(), p, maps, bs_dofs); pattern.finalize(); - // FIXME: Add option to pass customised local-to-global map to PETSc - // Mat constructor - - // Initialise matrix - Mat A = la::petsc::create_matrix(mesh->comm(), pattern, type); - // Create row and column local-to-global maps (field0, field1, field2, // etc), i.e. ghosts of field0 appear before owned indices of field1 std::array, 2> _maps; @@ -161,29 +155,27 @@ Mat create_matrix_block( } // Create PETSc local-to-global map/index sets and attach to matrix - ISLocalToGlobalMapping petsc_local_to_global0; - ISLocalToGlobalMappingCreate(MPI_COMM_SELF, 1, _maps[0].size(), + ISLocalToGlobalMapping petsc_local_to_global0, petsc_local_to_global1; + ISLocalToGlobalMappingCreate(mesh->comm(), 1, _maps[0].size(), _maps[0].data(), PETSC_COPY_VALUES, &petsc_local_to_global0); if (V[0] == V[1]) { - MatSetLocalToGlobalMapping(A, petsc_local_to_global0, - petsc_local_to_global0); - ISLocalToGlobalMappingDestroy(&petsc_local_to_global0); + PetscObjectReference((PetscObject)petsc_local_to_global0); + petsc_local_to_global1 = petsc_local_to_global0; } else { - - ISLocalToGlobalMapping petsc_local_to_global1; - ISLocalToGlobalMappingCreate(MPI_COMM_SELF, 1, _maps[1].size(), + ISLocalToGlobalMappingCreate(mesh->comm(), 1, _maps[1].size(), _maps[1].data(), PETSC_COPY_VALUES, &petsc_local_to_global1); - MatSetLocalToGlobalMapping(A, petsc_local_to_global0, - petsc_local_to_global1); - ISLocalToGlobalMappingDestroy(&petsc_local_to_global0); - ISLocalToGlobalMappingDestroy(&petsc_local_to_global1); } + // Initialise matrix + Mat A = la::petsc::create_matrix(mesh->comm(), pattern, type, petsc_local_to_global0, petsc_local_to_global1); + ISLocalToGlobalMappingDestroy(&petsc_local_to_global0); + ISLocalToGlobalMappingDestroy(&petsc_local_to_global1); + return A; } diff --git a/cpp/dolfinx/la/petsc.cpp b/cpp/dolfinx/la/petsc.cpp index df2d4f7bdc6..e01d4afa6f9 100644 --- a/cpp/dolfinx/la/petsc.cpp +++ b/cpp/dolfinx/la/petsc.cpp @@ -232,7 +232,9 @@ void la::petsc::scatter_local_vectors( } //----------------------------------------------------------------------------- Mat la::petsc::create_matrix(MPI_Comm comm, const SparsityPattern& sp, - std::optional type) + std::optional type, + std::optional rlgmap, + std::optional clgmap) { PetscErrorCode ierr; Mat A; @@ -289,30 +291,28 @@ Mat la::petsc::create_matrix(MPI_Comm comm, const SparsityPattern& sp, _nnz_offdiag[i] = bs[1] * sp.nnz_off_diag(i / bs[0]); } - // Allocate space for matrix - ierr = MatXAIJSetPreallocation(A, _bs, _nnz_diag.data(), _nnz_offdiag.data(), - nullptr, nullptr); - if (ierr != 0) - petsc::error(ierr, __FILE__, "MatXIJSetPreallocation"); - - // Set block sizes - ierr = MatSetBlockSizes(A, bs[0], bs[1]); - if (ierr != 0) - petsc::error(ierr, __FILE__, "MatSetBlockSizes"); - // Create PETSc local-to-global map/index sets - ISLocalToGlobalMapping local_to_global0; - const std::vector map0 = maps[0]->global_indices(); - const std::vector _map0(map0.begin(), map0.end()); - ierr = ISLocalToGlobalMappingCreate(MPI_COMM_SELF, bs[0], _map0.size(), - _map0.data(), PETSC_COPY_VALUES, - &local_to_global0); - - if (ierr != 0) - petsc::error(ierr, __FILE__, "ISLocalToGlobalMappingCreate"); + ISLocalToGlobalMapping local_to_global0, local_to_global1 = NULL; + if (rlgmap) + { + ierr = PetscObjectReference((PetscObject)rlgmap.value()); + if (ierr != 0) + petsc::error(ierr, __FILE__, "PetscObjectReference"); + local_to_global0 = rlgmap.value(); + } + else + { + const std::vector map0 = maps[0]->global_indices(); + const std::vector _map0(map0.begin(), map0.end()); + ierr = ISLocalToGlobalMappingCreate(comm, bs[0], _map0.size(), + _map0.data(), PETSC_COPY_VALUES, + &local_to_global0); + if (ierr != 0) + petsc::error(ierr, __FILE__, "ISLocalToGlobalMappingCreate"); + } // Check for common index maps - if (maps[0] == maps[1] and bs[0] == bs[1]) + if (maps[0] == maps[1] and bs[0] == bs[1] and !clgmap) { ierr = MatSetLocalToGlobalMapping(A, local_to_global0, local_to_global0); if (ierr != 0) @@ -320,32 +320,46 @@ Mat la::petsc::create_matrix(MPI_Comm comm, const SparsityPattern& sp, } else { - ISLocalToGlobalMapping local_to_global1; - const std::vector map1 = maps[1]->global_indices(); - const std::vector _map1(map1.begin(), map1.end()); - ierr = ISLocalToGlobalMappingCreate(MPI_COMM_SELF, bs[1], _map1.size(), - _map1.data(), PETSC_COPY_VALUES, - &local_to_global1); - if (ierr != 0) - petsc::error(ierr, __FILE__, "ISLocalToGlobalMappingCreate"); + if (clgmap) + { + ierr = PetscObjectReference((PetscObject)clgmap.value()); + if (ierr != 0) + petsc::error(ierr, __FILE__, "PetscObjectReference"); + local_to_global1 = clgmap.value(); + } + else + { + const std::vector map1 = maps[1]->global_indices(); + const std::vector _map1(map1.begin(), map1.end()); + ierr = ISLocalToGlobalMappingCreate(comm, bs[1], _map1.size(), + _map1.data(), PETSC_COPY_VALUES, + &local_to_global1); + if (ierr != 0) + petsc::error(ierr, __FILE__, "ISLocalToGlobalMappingCreate"); + } ierr = MatSetLocalToGlobalMapping(A, local_to_global0, local_to_global1); if (ierr != 0) petsc::error(ierr, __FILE__, "MatSetLocalToGlobalMapping"); - ierr = ISLocalToGlobalMappingDestroy(&local_to_global1); - if (ierr != 0) - petsc::error(ierr, __FILE__, "ISLocalToGlobalMappingDestroy"); } // Clean up local-to-global 0 ierr = ISLocalToGlobalMappingDestroy(&local_to_global0); + if (ierr != 0) + petsc::error(ierr, __FILE__, "ISLocalToGlobalMappingDestroy"); + ierr = ISLocalToGlobalMappingDestroy(&local_to_global1); if (ierr != 0) petsc::error(ierr, __FILE__, "ISLocalToGlobalMappingDestroy"); - // Note: This should be called after having set the local-to-global - // map for MATIS (this is a dummy call if A is not of type MATIS) - // ierr = MatISSetPreallocation(A, 0, _nnz_diag.data(), 0, - // _nnz_offdiag.data()); if (ierr != 0) - // error(ierr, __FILE__, "MatISSetPreallocation"); + // Allocate space for matrix + ierr = MatXAIJSetPreallocation(A, _bs, _nnz_diag.data(), _nnz_offdiag.data(), + nullptr, nullptr); + if (ierr != 0) + petsc::error(ierr, __FILE__, "MatXIJSetPreallocation"); + + // Set block sizes + ierr = MatSetBlockSizes(A, bs[0], bs[1]); + if (ierr != 0) + petsc::error(ierr, __FILE__, "MatSetBlockSizes"); // Set some options on Mat object ierr = MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE); diff --git a/cpp/dolfinx/la/petsc.h b/cpp/dolfinx/la/petsc.h index 50f7031f7d1..4fc217dae83 100644 --- a/cpp/dolfinx/la/petsc.h +++ b/cpp/dolfinx/la/petsc.h @@ -120,7 +120,9 @@ void scatter_local_vectors( /// Create a PETSc Mat. Caller is responsible for destroying the /// returned object. Mat create_matrix(MPI_Comm comm, const SparsityPattern& sp, - std::optional type = std::nullopt); + std::optional type = std::nullopt, + std::optional rlgmap = std::nullopt, + std::optional clgmap = std::nullopt); /// Create PETSc MatNullSpace. Caller is responsible for destruction /// returned object. diff --git a/python/dolfinx/fem/petsc.py b/python/dolfinx/fem/petsc.py index acfec2f25c6..2fe0d99b12f 100644 --- a/python/dolfinx/fem/petsc.py +++ b/python/dolfinx/fem/petsc.py @@ -513,13 +513,16 @@ def _( A.assemble(PETSc.Mat.AssemblyType.FLUSH) # type: ignore[attr-defined] # Set diagonal - for i, a_row in enumerate(a): - for j, a_sub in enumerate(a_row): - if a_sub is not None: - Asub = A.getLocalSubMatrix(is0[i], is1[j]) - if a_sub.function_spaces[0] is a_sub.function_spaces[1]: - _cpp.fem.petsc.insert_diagonal(Asub, a_sub.function_spaces[0], _bcs, diag) - A.restoreLocalSubMatrix(is0[i], is1[j], Asub) + if len(_bcs): + for i, a_row in enumerate(a): + for j, a_sub in enumerate(a_row): + if a_sub is not None: + if a_sub.function_spaces[0] is a_sub.function_spaces[1]: + Asub = A.getLocalSubMatrix(is0[i], is1[j]) + _cpp.fem.petsc.insert_diagonal( + Asub, a_sub.function_spaces[0], _bcs, diag + ) + A.restoreLocalSubMatrix(is0[i], is1[j], Asub) else: # Non-blocked constants = pack_constants(a) if constants is None else constants # type: ignore[assignment] coeffs = pack_coefficients(a) if coeffs is None else coeffs # type: ignore[assignment] @@ -528,7 +531,8 @@ def _( if a.function_spaces[0] is a.function_spaces[1]: A.assemblyBegin(PETSc.Mat.AssemblyType.FLUSH) # type: ignore[attr-defined] A.assemblyEnd(PETSc.Mat.AssemblyType.FLUSH) # type: ignore[attr-defined] - _cpp.fem.petsc.insert_diagonal(A, a.function_spaces[0], _bcs, diag) + if len(_bcs): + _cpp.fem.petsc.insert_diagonal(A, a.function_spaces[0], _bcs, diag) return A @@ -849,6 +853,8 @@ def __init__( # For nest matrices kind can be a nested list. kind = "nest" if self.A.getType() == PETSc.Mat.Type.NEST else kind # type: ignore[attr-defined] + if kind == "is": + kind = "mpi" assert kind is None or isinstance(kind, str) self._b = create_vector(self.L, kind=kind) self._x = create_vector(self.L, kind=kind) @@ -1326,6 +1332,8 @@ def __init__( # Determine the vector kind based on the matrix type kind = "nest" if self._A.getType() == PETSc.Mat.Type.NEST else kind # type: ignore[attr-defined] + if kind == "is": + kind = "mpi" assert kind is None or isinstance(kind, str) self._b = create_vector(self.F, kind=kind) self._x = create_vector(self.F, kind=kind) diff --git a/python/dolfinx/wrappers/petsc.cpp b/python/dolfinx/wrappers/petsc.cpp index 163c74dcaa1..db7f92389d3 100644 --- a/python/dolfinx/wrappers/petsc.cpp +++ b/python/dolfinx/wrappers/petsc.cpp @@ -393,9 +393,12 @@ void petsc_fem_module(nb::module_& m) _bcs.push_back(*bc); } + PetscBool flg; + PetscObjectTypeCompare((PetscObject)A, MATIS, &flg); + dolfinx::fem::set_diagonal( dolfinx::la::petsc::Matrix::set_fn(A, INSERT_VALUES), V, _bcs, - diagonal); + diagonal, flg ? true : false); }, nb::arg("A"), nb::arg("V"), nb::arg("bcs"), nb::arg("diagonal")); From a4e7d2eceb3c019b08f7f34a9650eb9326d83a13 Mon Sep 17 00:00:00 2001 From: Stefano Zampini Date: Sat, 30 Aug 2025 14:29:59 +0300 Subject: [PATCH 3/5] la::petsc::create_matrix: the Python interface can pass "mpi" --- cpp/dolfinx/la/petsc.cpp | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/cpp/dolfinx/la/petsc.cpp b/cpp/dolfinx/la/petsc.cpp index e01d4afa6f9..f971e222769 100644 --- a/cpp/dolfinx/la/petsc.cpp +++ b/cpp/dolfinx/la/petsc.cpp @@ -247,7 +247,20 @@ Mat la::petsc::create_matrix(MPI_Comm comm, const SparsityPattern& sp, const std::array bs = {sp.block_size(0), sp.block_size(1)}; if (type) - MatSetType(A, type->c_str()); + { + if (type == std::string("mpi")) + { + ierr = MatSetType(A, MATAIJ); + if (ierr != 0) + petsc::error(ierr, __FILE__, "MatSetType"); + } + else + { + ierr = MatSetType(A, type->c_str()); + if (ierr != 0) + petsc::error(ierr, __FILE__, "MatSetType"); + } + } // Get global and local dimensions const std::int64_t M = bs[0] * maps[0]->size_global(); From 4d4591ed2682b7e96c365c2910e80627fd54dcb4 Mon Sep 17 00:00:00 2001 From: Stefano Zampini Date: Thu, 28 Aug 2025 17:45:54 +0300 Subject: [PATCH 4/5] PETSc FEM python wrapper: attach DM shell basic customization field information for fields how to clone a matrix --- cpp/dolfinx/la/petsc.cpp | 46 ++++++++++++++++++++++ cpp/dolfinx/la/petsc.h | 16 +++++++- python/dolfinx/fem/petsc.py | 64 +++++++++++++++++++++++++++++++ python/dolfinx/wrappers/petsc.cpp | 25 ++++++++++++ 4 files changed, 150 insertions(+), 1 deletion(-) diff --git a/cpp/dolfinx/la/petsc.cpp b/cpp/dolfinx/la/petsc.cpp index f971e222769..1e06dccfe9d 100644 --- a/cpp/dolfinx/la/petsc.cpp +++ b/cpp/dolfinx/la/petsc.cpp @@ -147,6 +147,52 @@ std::vector la::petsc::create_index_sets( return is; } //----------------------------------------------------------------------------- +std::vector la::petsc::create_global_index_sets( + const std::vector< + std::pair, int>>& maps) +{ + std::vector is; + + std::int64_t offset = 0; + std::int64_t merged_local_size = 0; + MPI_Comm comm = MPI_COMM_NULL; + + for (auto& map : maps) + { + if (comm == MPI_COMM_NULL) + { + comm = map.first.get().comm(); + } + int result; + MPI_Comm_compare(comm, map.first.get().comm(), &result); + if (result != MPI_IDENT && result != MPI_CONGRUENT) + { + throw std::runtime_error("Not supported on the different communicators."); + } + int bs = map.second; + std::int32_t size = map.first.get().size_local(); + merged_local_size += size * bs; + } + if (comm == MPI_COMM_NULL) + return is; + + int ierr = MPI_Exscan(&merged_local_size, &offset, 1, MPI_INT64_T, + MPI_SUM, comm); + dolfinx::MPI::check_error(comm, ierr); + + for (auto& map : maps) + { + int bs = map.second; + std::int32_t size = map.first.get().size_local(); + IS _is; + ISCreateStride(map.first.get().comm(), bs * size, offset, 1, &_is); + is.push_back(_is); + offset += bs * size; + } + + return is; +} +//----------------------------------------------------------------------------- std::vector> la::petsc::get_local_vectors( const Vec x, const std::vector< diff --git a/cpp/dolfinx/la/petsc.h b/cpp/dolfinx/la/petsc.h index 4fc217dae83..69f33daf5b1 100644 --- a/cpp/dolfinx/la/petsc.h +++ b/cpp/dolfinx/la/petsc.h @@ -100,11 +100,25 @@ Vec create_vector_wrap(const la::Vector& x) /// @note The caller is responsible for destruction of each IS. /// /// @param[in] maps Vector of IndexMaps and corresponding block sizes -/// @return Vector of PETSc Index Sets, created on` PETSC_COMM_SELF` +/// @return Vector of PETSc Index Sets, created on `PETSC_COMM_SELF` std::vector create_index_sets( const std::vector< std::pair, int>>& maps); +/// @brief Compute PETSc IndexSets (IS) for a stack of index maps. +/// +/// This function stacks the owned part of the maps and returns +/// indices in the global space. The maps must have the same communicator. +/// +/// @note Collective +/// @note The caller is responsible for destruction of each IS. +/// +/// @param[in] maps Vector of IndexMaps and corresponding block sizes +/// @return Vector of PETSc Index Sets, created on the index map communicators +std::vector create_global_index_sets( + const std::vector< + std::pair, int>>& maps); + /// Copy blocks from Vec into local arrays std::vector> get_local_vectors( const Vec x, diff --git a/python/dolfinx/fem/petsc.py b/python/dolfinx/fem/petsc.py index 2fe0d99b12f..c8ccabc6044 100644 --- a/python/dolfinx/fem/petsc.py +++ b/python/dolfinx/fem/petsc.py @@ -875,6 +875,12 @@ def __init__( self._solver = PETSc.KSP().create(self.A.comm) # type: ignore[attr-defined] self.solver.setOperators(self.A, self.P_mat) + # Attach problem information + dm = self.solver.getDM() + dm.setCreateMatrix(partial(_dm_create_matrix, self.A)) + dm.setCreateFieldDecomposition(partial(_dm_create_field_decomposition, u, self.L)) + self.solver.getPC().setDM(dm) + if petsc_options_prefix == "": raise ValueError("PETSc options prefix cannot be empty.") @@ -1346,6 +1352,12 @@ def __init__( ) self.solver.setFunction(partial(assemble_residual, u, self.F, self.J, bcs), self.b) + # Attach problem information + dm = self.solver.getDM() + dm.setCreateMatrix(partial(_dm_create_matrix, self.A)) + dm.setCreateFieldDecomposition(partial(_dm_create_field_decomposition, u, self.F)) + self.solver.getKSP().setDM(dm) + if petsc_options_prefix == "": raise ValueError("PETSc options prefix cannot be empty.") @@ -1710,3 +1722,55 @@ def _(x: PETSc.Vec, u: typing.Union[_Function, Sequence[_Function]]): # type: i dolfinx.la.petsc.assign(x, data0 + data1) else: dolfinx.la.petsc.assign(x, u.x.array) + + +# -- DMShell (default) helper functions -- + + +def _dm_create_field_decomposition( + u: typing.Union[Sequence[_Function], _Function], + form: typing.Union[Form, Sequence[Form]], + _dm: PETSc.DM, # type: ignore[name-defined] +): + """Return index sets of the fields and their associated names. + + Args: + u: Function tied to the solution vector. + form: Form of the residual or of the right-hand side. + It can be a sequence of forms. + _dm: The DM instance. + + Returns: + names: field names. + ises: list of index sets in global numbering. + dms: list of subDMs. This function returns `None`. + """ + + if not isinstance(form, Sequence): + form = [form] + spaces = _extract_function_spaces(form) + ises = _cpp.la.petsc.create_global_index_sets( + [(V.dofmaps(0).index_map, V.dofmaps(0).index_map_bs) for V in spaces] # type: ignore[union-attr] + ) + if isinstance(u, Sequence): + names = [f"{v.name + '_' if v.name != 'f' else ''}{i}" for i, v in enumerate(u)] + else: + names = [f"dolfinx_field_{i}" for i in range(len(form))] + return names, ises, None + + +def _dm_create_matrix( + J: PETSc.Mat, # type: ignore[name-defined] + _dm: PETSc.DM, # type: ignore[name-defined] +): + """Return a clone of the matrix. + + Args: + _dm: The DM instance. + J: Matrix to assemble the Jacobian into. + + Returns: + A PETSc matrix. + """ + + return J.duplicate() diff --git a/python/dolfinx/wrappers/petsc.cpp b/python/dolfinx/wrappers/petsc.cpp index db7f92389d3..9338a44ad66 100644 --- a/python/dolfinx/wrappers/petsc.cpp +++ b/python/dolfinx/wrappers/petsc.cpp @@ -210,6 +210,31 @@ void petsc_la_module(nb::module_& m) }, nb::arg("maps")); + m.def( + "create_global_index_sets", + [](const std::vector>& + maps) + { + using X = std::vector, int>>; + X _maps; + std::ranges::transform(maps, std::back_inserter(_maps), + [](auto m) -> typename X::value_type + { return {*m.first, m.second}; }); + std::vector index_sets + = dolfinx::la::petsc::create_global_index_sets(_maps); + + std::vector py_index_sets; + for (auto is : index_sets) + { + PyObject* obj = PyPetscIS_New(is); + PetscObjectDereference((PetscObject)is); + py_index_sets.push_back(nb::steal(obj)); + } + return py_index_sets; + }, + nb::arg("maps")); + m.def( "scatter_local_vectors", [](Vec x, From cc73f78dbff05dd9cd9463bdcb2ca95671bb4933 Mon Sep 17 00:00:00 2001 From: Stefano Zampini Date: Wed, 27 Aug 2025 21:19:28 +0300 Subject: [PATCH 5/5] demo BDDC mixed poisson --- python/demo/demo_mixed-poisson.py | 280 +++++++++++++++++++----------- 1 file changed, 177 insertions(+), 103 deletions(-) diff --git a/python/demo/demo_mixed-poisson.py b/python/demo/demo_mixed-poisson.py index e24db40cfd7..fbb9a070e36 100644 --- a/python/demo/demo_mixed-poisson.py +++ b/python/demo/demo_mixed-poisson.py @@ -22,6 +22,9 @@ # PETSc/petsc4y. # * Construct a Hypre Auxiliary Maxwell Space (AMS) preconditioner for # $H(\mathrm{div})$ problems in two-dimensions. +# * Construct a monolithic Balancing Domain Decomposition by +# Constraints (BDDC) preconditioner as described in the +# [paper](https://doi.org/10.1137/16M1080653) # # ```{admonition} Download sources # :class: download @@ -103,7 +106,7 @@ from basix.ufl import element from dolfinx import fem, mesh from dolfinx.fem.petsc import discrete_gradient, interpolation_matrix -from dolfinx.mesh import CellType, create_unit_square +from dolfinx.mesh import CellType, GhostMode, create_unit_square # Solution scalar (e.g., float32, complex128) and geometry (float32/64) # types @@ -111,13 +114,19 @@ xdtype = dolfinx.default_real_type # - -# Create a two-dimensional mesh. The iterative solver constructed -# later requires special construction that is specific to two +# Create a two-dimensional mesh. The AMS based iterative solvers +# constructed later require special construction that is specific to two # dimensions. Application in three-dimensions would require a number of # changes to the linear solver. +# BDDC methods need the linear operator to be stored in unassembled format, +# i.e. assembled on each MPI process associated with the subdomain part of +# the mesh, but not across processes. +# To this end, we require the mesh to not be distributed with overlap. # + -msh = create_unit_square(MPI.COMM_WORLD, 96, 96, CellType.triangle, dtype=xdtype) +msh = create_unit_square( + MPI.COMM_WORLD, 96, 96, CellType.triangle, ghost_mode=GhostMode.none, dtype=xdtype +) # - # # Here we construct compatible function spaces for the mixed Poisson @@ -226,41 +235,14 @@ # - # We now create a linear problem solver for the mixed problem. +# We use two separate functions to setup block preconditioning with AMS or +# monolithic BDDC methods, as the matrix specification differs. +# +# The next function sets up the problem with block preconditioning. # As we will use different preconditions for the individual blocks of # the saddle point problem, we specify the matrix kind to be "nest", # so that we can use # [`fieldsplit`](https://petsc.org/release/manual/ksp/#sec-block-matrices) # (block) type and set the 'splits' between the $\sigma$ and $u$ fields. - - -# + - -problem = fem.petsc.LinearProblem( - a, - L, - u=[sigma, u], - P=a_p, - kind="nest", - bcs=bcs, - petsc_options_prefix="demo_mixed_poisson_", - petsc_options={ - "ksp_type": "gmres", - "pc_type": "fieldsplit", - "pc_fieldsplit_type": "additive", - "ksp_rtol": 1e-8, - "ksp_gmres_restart": 100, - "ksp_view": "", - }, -) -# - - - -# + -ksp = problem.solver -ksp.setMonitor( - lambda _, its, rnorm: PETSc.Sys.Print(f"Iteration: {its:>4d}, residual: {rnorm:.3e}") -) -# - - # For the $P_{11}$ block, which is the discontinuous Lagrange mass # matrix, we let the preconditioner be the default, which is incomplete # LU factorisation and which can solve the block exactly in one @@ -273,82 +255,174 @@ # $H({\rm div})$ and $H({\rm curl})$ spaces are effectively the same in # two-dimensions, just rotated by $\pi/2. + # + -ksp_sigma, ksp_u = ksp.getPC().getFieldSplitSubKSP() -pc_sigma = ksp_sigma.getPC() -if PETSc.Sys().hasExternalPackage("hypre") and not np.issubdtype(dtype, np.complexfloating): - pc_sigma.setType("hypre") - pc_sigma.setHYPREType("ams") - - opts = PETSc.Options() - opts[f"{ksp_sigma.prefix}pc_hypre_ams_cycle_type"] = 7 - opts[f"{ksp_sigma.prefix}pc_hypre_ams_relax_times"] = 2 - - # Construct and set the 'discrete gradient' operator, which maps - # grad H1 -> H(curl), i.e. the gradient of a scalar Lagrange space - # to a H(curl) space - V_H1 = fem.functionspace(msh, element("Lagrange", msh.basix_cell(), k, dtype=xdtype)) - V_curl = fem.functionspace(msh, element("N1curl", msh.basix_cell(), k, dtype=xdtype)) - G = discrete_gradient(V_H1, V_curl) - G.assemble() - pc_sigma.setHYPREDiscreteGradient(G) - - assert k > 0, "Element degree must be at least 1." - if k == 1: - # For the lowest order base (k=1), we can supply interpolation - # of the '1' vectors in the space V. Hypre can then construct - # the required operators from G and the '1' vectors. - cvec0, cvec1 = fem.Function(V), fem.Function(V) - cvec0.interpolate(lambda x: np.vstack((np.ones_like(x[0]), np.zeros_like(x[1])))) - cvec1.interpolate(lambda x: np.vstack((np.zeros_like(x[0]), np.ones_like(x[1])))) - pc_sigma.setHYPRESetEdgeConstantVectors(cvec0.x.petsc_vec, cvec1.x.petsc_vec, None) +def block_solver(): + problem = fem.petsc.LinearProblem( + a, + L, + u=[sigma, u], + P=a_p, + kind="nest", + bcs=bcs, + petsc_options_prefix="demo_mixed_poisson_", + petsc_options={ + "ksp_type": "gmres", + "pc_type": "fieldsplit", + "pc_fieldsplit_type": "additive", + "ksp_rtol": 1e-8, + "ksp_gmres_restart": 100, + "ksp_view": "", + }, + ) + + ksp = problem.solver + ksp_sigma, ksp_u = ksp.getPC().getFieldSplitSubKSP() + pc_sigma = ksp_sigma.getPC() + if PETSc.Sys().hasExternalPackage("hypre") and not np.issubdtype(dtype, np.complexfloating): + pc_sigma.setType("hypre") + pc_sigma.setHYPREType("ams") + + opts = PETSc.Options() + opts[f"{ksp_sigma.prefix}pc_hypre_ams_cycle_type"] = 7 + opts[f"{ksp_sigma.prefix}pc_hypre_ams_relax_times"] = 2 + + # Construct and set the 'discrete gradient' operator, which maps + # grad H1 -> H(curl), i.e. the gradient of a scalar Lagrange space + # to a H(curl) space + V_H1 = fem.functionspace(msh, element("Lagrange", msh.basix_cell(), k, dtype=xdtype)) + V_curl = fem.functionspace(msh, element("N1curl", msh.basix_cell(), k, dtype=xdtype)) + G = discrete_gradient(V_H1, V_curl) + G.assemble() + pc_sigma.setHYPREDiscreteGradient(G) + + assert k > 0, "Element degree must be at least 1." + if k == 1: + # For the lowest order base (k=1), we can supply interpolation + # of the '1' vectors in the space V. Hypre can then construct + # the required operators from G and the '1' vectors. + cvec0, cvec1 = fem.Function(V), fem.Function(V) + cvec0.interpolate(lambda x: np.vstack((np.ones_like(x[0]), np.zeros_like(x[1])))) + cvec1.interpolate(lambda x: np.vstack((np.zeros_like(x[0]), np.ones_like(x[1])))) + pc_sigma.setHYPRESetEdgeConstantVectors(cvec0.x.petsc_vec, cvec1.x.petsc_vec, None) + else: + # For high-order spaces, we must provide the (H1)^d -> H(div) + # interpolation operator/matrix + V_H1d = fem.functionspace(msh, ("Lagrange", k, (msh.geometry.dim,))) + Pi = interpolation_matrix(V_H1d, V) # (H1)^d -> H(div) + Pi.assemble() + pc_sigma.setHYPRESetInterpolations(msh.geometry.dim, None, None, Pi, None) + + # High-order elements generally converge less well than the + # lowest-order case with algebraic multigrid, so we perform + # extra work at the multigrid stage + opts[f"{ksp_sigma.prefix}pc_hypre_ams_tol"] = 1e-12 + opts[f"{ksp_sigma.prefix}pc_hypre_ams_max_iter"] = 3 + + ksp_sigma.setFromOptions() else: - # For high-order spaces, we must provide the (H1)^d -> H(div) - # interpolation operator/matrix - V_H1d = fem.functionspace(msh, ("Lagrange", k, (msh.geometry.dim,))) - Pi = interpolation_matrix(V_H1d, V) # (H1)^d -> H(div) - Pi.assemble() - pc_sigma.setHYPRESetInterpolations(msh.geometry.dim, None, None, Pi, None) - - # High-order elements generally converge less well than the - # lowest-order case with algebraic multigrid, so we perform - # extra work at the multigrid stage - opts[f"{ksp_sigma.prefix}pc_hypre_ams_tol"] = 1e-12 - opts[f"{ksp_sigma.prefix}pc_hypre_ams_max_iter"] = 3 - - ksp_sigma.setFromOptions() -else: - # If Hypre is not available, use LU factorisation on the $P_{00}$ - # block - pc_sigma.setType("lu") - use_superlu = PETSc.IntType == np.int64 - if PETSc.Sys().hasExternalPackage("mumps") and not use_superlu: - pc_sigma.setFactorSolverType("mumps") - elif PETSc.Sys().hasExternalPackage("superlu_dist"): - pc_sigma.setFactorSolverType("superlu_dist") + # If Hypre is not available, use LU factorisation on the $P_{00}$ + # block + pc_sigma.setType("lu") + use_superlu = PETSc.IntType == np.int64 + if PETSc.Sys().hasExternalPackage("mumps") and not use_superlu: + pc_sigma.setFactorSolverType("mumps") + elif PETSc.Sys().hasExternalPackage("superlu_dist"): + pc_sigma.setFactorSolverType("superlu_dist") + + return problem + + # - -# Once we have set the preconditioners for the two blocks, we can -# solve the linear system. The `LinearProblem` class will -# automatically assemble the linear system, apply the boundary -# conditions, call the Krylov solver and update the solution -# vectors `u` and `sigma`. +# In the next function we create a monolithic linear problem solver. +# As we will use BDDC, we specify the matrix kind to be "is". +# We also show a minimal customization for the method to be +# applied monolithically on the symmetric indefinite linear +# system arising from the mixed problem. +# In particular, we need solvers for the subproblems that can +# support the saddle point formulation. + # + -problem.solve() -converged_reason = problem.solver.getConvergedReason() -assert converged_reason > 0, f"Krylov solver has not converged, reason: {converged_reason}." +def monolithic_solver(): + # Some defaults solver strategies based on what PETSc has + # been configured with + local_solver = "svd" + local_solver_type = "dummy" + coarse_solver = "svd" + coarse_solver_type = "dummy" + if PETSc.Sys().hasExternalPackage("mumps"): + local_solver = "cholesky" + local_solver_type = "mumps" + coarse_solver = "cholesky" + coarse_solver_type = "mumps" + elif PETSc.Sys().hasExternalPackage("superlu"): + local_solver = "lu" + local_solver_type = "superlu" + elif PETSc.Sys().hasExternalPackage("umfpack"): + local_solver = "lu" + local_solver_type = "umfpack" + if coarse_solver == "svd" and PETSc.Sys().hasExternalPackage("superlu_dist"): + coarse_solver = "lu" + coarse_solver_type = "superlu_dist" + + return fem.petsc.LinearProblem( + a, + L, + u=[sigma, u], + kind="is", + bcs=bcs, + petsc_options_prefix="demo_mixed_poisson_", + petsc_options={ + "ksp_rtol": 1e-8, + "ksp_view": None, + "pc_type": "bddc", + "pc_bddc_use_local_mat_graph": False, + "pc_bddc_benign_trick": None, + "pc_bddc_nonetflux": None, + "pc_bddc_detect_disconnected": None, + "pc_bddc_dirichlet_pc_type": local_solver, + "pc_bddc_dirichlet_pc_factor_mat_solver_type": local_solver_type, + "pc_bddc_neumann_pc_type": local_solver, + "pc_bddc_neumann_pc_factor_mat_solver_type": local_solver_type, + "pc_bddc_coarse_pc_type": coarse_solver, + "pc_bddc_coarse_pc_factor_mat_solver_type": coarse_solver_type, + }, + ) + + # - -# We save the solution `u` in VTX format: +# Once we have created the problem solvers, we can +# solve the linear systems. The `LinearProblem` class will +# automatically assemble the linear system, apply the boundary +# conditions, call the Krylov solver and update the solution +# vectors `u` and `sigma`. For each solve, we save the solution +# `u` in VTX format: # + -if dolfinx.has_adios2: - from dolfinx.io import VTXWriter - - u.name = "u" - with VTXWriter(msh.comm, "output_mixed_poisson.bp", u, "bp4") as f: - f.write(0.0) -else: - print("ADIOS2 required for VTX output.") +if not dolfinx.has_adios2: + PETSc.Sys.Print("ADIOS2 required for VTX output.") + + +for strategy_name, strategy in ("block", block_solver), ("monolithic", monolithic_solver): + problem = strategy() + ksp = problem.solver + ksp.setMonitor( + lambda _, its, rnorm: PETSc.Sys.Print(f" iteration: {its:>4d}, residual: {rnorm:.3e}") + ) + PETSc.Sys.Print(f"\n\nSolving mixed poisson problem with {strategy_name} solver.") + problem.solve() + converged_reason = problem.solver.getConvergedReason() + assert converged_reason > 0, ( + f"Krylov solver for {strategy_name} has not converged, reason: {converged_reason}." + ) + + if dolfinx.has_adios2: + from dolfinx.io import VTXWriter + + u.name = "u" + with VTXWriter(msh.comm, f"output_mixed_poisson_{strategy_name}.bp", u, "bp4") as f: + f.write(0.0) # -