Hello, I was surprised when running my code on a new machine and my unit tests failed.
Here is a MWE reproducing my error:
The code solves a singular traction test with pure neuman traction BCs.
Since the problem is singular, I enforce that the solution is orthogonal to the operator nullspace through krylov solver options, by setting the nullspace in the PETSc matrix. As usual I solve the problem with algebraic multigrid with a jacobi coarse solver to avoid messing up the nullspace during coarse solves.
import dolfinx.mesh, dolfinx.fem as fem, dolfinx.geometry
import ufl
from mpi4py import MPI
import numpy as np
from petsc4py import PETSc
from contextlib import ExitStack
from numpy.testing import assert_almost_equal
def nullspace_elasticity_2D(V: fem.FunctionSpace):
"""Build nullspace for 2D linear elasticity."""
# Create list of vectors for null space
index_map = V.dofmap.index_map
nullspace_basis = [
dolfinx.la.create_petsc_vector(index_map, V.dofmap.index_map_bs)
for i in range(3)
]
with ExitStack() as stack:
vec_local = [stack.enter_context(x.localForm()) for x in nullspace_basis]
basis = [np.asarray(x) for x in vec_local]
# Dof indices for each subspace (x and y dofs)
dofs = [V.sub(i).dofmap.list.array for i in range(2)]
# Build translational null space basis
for i in range(2):
basis[i][dofs[i]] = 1.0
# Build rotational null space basis
x = V.tabulate_dof_coordinates()
dofs_block = V.dofmap.list.array
x0, x1 = x[dofs_block, 0], x[dofs_block, 1]
basis[2][dofs[0]] = -x1
basis[2][dofs[1]] = x0
# Create vector space basis and orthogonalize
dolfinx.la.orthonormalize(nullspace_basis)
nsp = PETSc.NullSpace().create(vectors=nullspace_basis)
return nsp
if __name__ == "__main__":
mesh = dolfinx.mesh.create_unit_square(MPI.COMM_SELF, 5, 5)
V = fem.VectorFunctionSpace(mesh, ("CG", 1))
u = ufl.TrialFunction(V)
v = ufl.TestFunction(V)
E = 10
nu = 0.3
C = ufl.inv(1/ E * ufl.as_matrix([
[1, -nu, 0],
[-nu, 1, 0],
[0, 0, (2 * (1 + nu))],
]))
strain2voigt = lambda eps: ufl.as_vector([eps[0, 0], eps[1, 1], 2 * eps[0, 1]])
voigt2stress = lambda sigma: ufl.as_vector([[sigma[0], sigma[2]], [sigma[2], sigma[1]]])
strain = lambda u: ufl.sym(ufl.grad(u))
stress = lambda u, C: voigt2stress(ufl.dot(C, strain2voigt(strain(u))))
a = ufl.inner(stress(u, C), strain(v)) * ufl.dx
n = ufl.FacetNormal(mesh)
sigma0 = ufl.as_matrix([[1.0, 0], [0.0, 0.0]])
L = ufl.dot(ufl.dot(sigma0, n), v) * ufl.ds
gamg_options = {
"ksp_type": "cgs",
"ksp_rtol": 1.0e-12,
"pc_type": "gamg",
"mg_levels_ksp_type": "chebyshev",
"mg_levels_pc_type": "jacobi",
"mg_levels_esteig_ksp_type": "cgs",
"mg_levels_ksp_chebyshev_esteig_steps": 20,
"mg_coarse_pc_type": "jacobi", # jacobi coarse solver for singular problem
}
nullspace = nullspace_elasticity_2D(V)
problem = fem.petsc.LinearProblem(a, L, bcs=[], petsc_options=gamg_options)
problem.A.setNullSpace(nullspace)
problem.A.setNearNullSpace(nullspace)
u_sol = problem.solve()
# check if solution is orthogonal to RBM modes
u0 = fem.Function(V) # [1, 0]
u1 = fem.Function(V) # [0, 1]
u2 = fem.Function(V) # [-x[1], x[0]]
u0.interpolate(lambda x: np.stack([np.ones_like(x[0]), np.zeros_like(x[0])]))
u1.interpolate(lambda x: np.stack([np.zeros_like(x[0]), np.ones_like(x[0])]))
u2.interpolate(lambda x: np.stack([-x[1], x[0]]))
res0 = fem.assemble_scalar(fem.form(ufl.dot(u_sol, u0) * ufl.dx))
res1 = fem.assemble_scalar(fem.form(ufl.dot(u_sol, u1) * ufl.dx))
res2 = fem.assemble_scalar(fem.form(ufl.dot(u_sol, u2) * ufl.dx))
assert_almost_equal(res0, 0)
assert_almost_equal(res1, 0)
assert_almost_equal(res2, 0)
At the end of the code I check that the solution is in fact orthogonal to the nullspace.
Here is my problem: on the latest docker image, the assertions are passing and the solution is correct.
However I tried running that exact same code on a fresh ubuntu 22 install with the dolfinx ppa, and the assertions are failing, and also the solution looks completely messed up:
One thing I’ve noticed is that the ubuntu version of dolfinx uses PETSc 3.15 whereas the docker version uses version 3.17.
Is there something I am doing wrong here? I’ve been using these GAMG options for quite a while now and never ran into such issues.