Hello,
I am trying to use parallel computation in dolfinx (docker container dolfinx/dolfinx, version 0.6.0-r1) with TAO solver and MPI. I define the configuration of the TAO solver as:
a_problem = TAOProblem(functional_a, dfunctional_a, ddfunctional_a, alpha, bca)
dofs_domain_a, dofs_borders_a = V_a.dofmap.index_map, V_a.dofmap.index_map_bs
b_a = dolfinx.la.create_petsc_vector(dofs_domain_a, dofs_borders_a)
J_a = dolfinx.fem.petsc.create_matrix(a_problem.a)
# Create PETSc TAO
solver_a_tao = petsc4py.PETSc.TAO().create(comm=comm)
solver_a_tao.setType("tron")
solver_a_tao.setObjective(a_problem.f)
solver_a_tao.setGradient(a_problem.F, b_a)
solver_a_tao.setHessian(a_problem.J, J_a)
solver_a_tao.setTolerances(grtol=1e-6, gttol=1e-6)
solver_a_tao.getKSP().setType("preonly")
solver_a_tao.getKSP().setTolerances(rtol=1.0e-6)
solver_a_tao.getKSP().getPC().setType("lu")
# We set the bound (Note: they are passed as reference and not as values)
solver_a_tao.setVariableBounds(alow.vector, aupp.vector)
The function TAOproblem is:
from typing import List
from petsc4py import PETSc
import dolfinx
import ufl
class TAOProblem:
"""Nonlinear problem class compatible with PETSC.TAO solver.
"""
def __init__(self, f: ufl.form.Form, F: ufl.form.Form, J: ufl.form.Form,
u: dolfinx.fem.Function, bcs: List[dolfinx.fem.dirichletbc]):
self.obj = dolfinx.fem.form(f)
self.L = dolfinx.fem.form(F)
self.a = dolfinx.fem.form(J)
self.bcs = bcs
self.u = u
# Create matrix and vector to be used for assembly
# of the non-linear problem
self.A = dolfinx.fem.petsc.create_matrix(self.a)
self.b = dolfinx.fem.petsc.create_vector(self.L)
def f(self, tao, x: PETSc.Vec):
# .... Connection between ranks in the vector x: FORWARD
x.ghostUpdate(addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD)
# ..... We copy the vector x in the vector u
x.copy(self.u.vector)
# .... Connection between ranks in the vector u in the class
self.u.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD)
return dolfinx.fem.assemble_scalar(self.obj)
def F(self, tao: PETSc.TAO, x: PETSc.Vec, F):
# .... Connection between ranks in the vector x: FORWARD
x.ghostUpdate(addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD)
# ..... We copy the vector x in the vector u
x.copy(self.u.vector)
# .... Connection between ranks in the vector u in the class
self.u.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD)
with F.localForm() as f_local:
f_local.set(0.0)
dolfinx.fem.petsc.assemble_vector(F, self.L)
# .... Include the boundary conditions using the lifting technique
dolfinx.fem.petsc.apply_lifting(F, [self.a], bcs=[self.bcs], x0=[x], scale=-1.0)
# .... Connection between ranks in the vector x: REVERSE
F.ghostUpdate(addv=PETSc.InsertMode.ADD, mode=PETSc.ScatterMode.REVERSE)
# .... Redefine the function F with the boundary condition.
dolfinx.fem.petsc.set_bc(F, self.bcs, x, -1.0)
def J(self, tao: PETSc.TAO, x: PETSc.Vec, A: PETSc.Mat, P: PETSc.Mat):
A.zeroEntries()
dolfinx.fem.petsc.assemble_matrix(A, self.a, self.bcs)
A.assemble()
The code works perfectly in serial computation, but not in parallel. It doesn’t give a specific error, just it doesn’t finish the calculation. I would like to know which is my mistake, because I use the same things for SNES solver and there is no problem.
Thank you in advance