Dolfinx mpc contact inelastic condition with MPI

Hi, I have noticed a strange behaviour with MPI when using inelastic condition. As in the post here Dolfinx mpc overlapping constraints , I am defining a non-conformal mesh, and I would like to use the inelastic condition to create the potential continuity. Without MPI, everything seems ok, with MPI I get unexpected results. This is the mesh creation with gmsh:

import gmsh

gmsh.initialize()
gmsh.option.setNumber("General.Terminal", 1)
gmsh.model.add("2squares")

r = 0.1
cx, cy = 0.5, 0.6

# Create base surfaces
rect_left  = gmsh.model.occ.addRectangle(0, 0, 0, 1, 1)
disk       = gmsh.model.occ.addDisk(cx, cy, 0.0, r, r)
rect_right = gmsh.model.occ.addRectangle(1, 0.15, 0, 1, 1)

gmsh.model.occ.synchronize()

# Fragment: object = left rect, tools = disk + right rect
outDimTags, outMap = gmsh.model.occ.fragment([(2, rect_left)], [(2, disk)])
gmsh.model.occ.synchronize()


for i, (dim, ctag) in enumerate(gmsh.model.getEntities(2), start=1):
     pg = gmsh.model.addPhysicalGroup(2, [ctag], tag=i)
     gmsh.model.setPhysicalName(2, pg, f"surf_{ctag}")

for i, (dim, ctag) in enumerate(gmsh.model.getEntities(1), start=1):
     pg = gmsh.model.addPhysicalGroup(1, [ctag], tag=100 + i)
     gmsh.model.setPhysicalName(1, pg, f"line_{ctag}")
     
# Mesh and write
# Keep left-square sizes unchanged; refine only in the right square.
def right_square_refine(dim, tag, x, y, z, lc):
    if x > 1.0 and 0.0 <= y <= 1.0:
        return min(lc, 0.02)
    return lc

#make right square mesh finer to see the non conformality
field = gmsh.model.mesh.field.add("Constant")
gmsh.model.mesh.field.setNumber(field, "VIn", 0.19)   # size inside selected entities
gmsh.model.mesh.field.setNumbers(field, "SurfacesList", [rect_right])
gmsh.model.mesh.field.setAsBackgroundMesh(field)

gmsh.model.mesh.generate(2)
gmsh.model.mesh.refine()
gmsh.write("2squares.msh")
gmsh.finalize()

The mesh is non conformal where the 2 square are in contact. In this notebook cell I load and plot the mesh:

#%%px

import dolfinx
import dolfinx_mpc
from dolfinx.io import gmsh
import numpy as np
import pyvista
import ufl
from mpi4py import MPI
from dolfinx import plot

#cell tags are -> 1: disk, 2 right square, 3 left square
comm = MPI.COMM_WORLD
partitioner = (
        dolfinx.mesh.create_cell_partitioner(
            dolfinx.mesh.GhostMode.shared_facet,
        )
        if comm.Get_size() > 1
        else None
    )

mesh,ct,ft,*_ = gmsh.read_from_msh("2squares.msh",comm,partitioner=partitioner)
# Each rank builds its own mesh
owned = mesh.topology.index_map(mesh.topology.dim).size_local
ghosts = mesh.topology.index_map(mesh.topology.dim).num_ghosts
topology, cell_types, geometry = plot.vtk_mesh(mesh, mesh.topology.dim,np.arange(owned + ghosts))
mesh_data = (topology, cell_types, geometry)
all_data = comm.gather(mesh_data, root=0)
all_owned = comm.gather(owned, root=0)


if comm.rank == 0:
    plotter = pyvista.Plotter(shape=(1, comm.size))
    counter=0
    for (topology, cell_types, geometry),num_owned in zip(all_data,all_owned,strict=True):
        grid = pyvista.UnstructuredGrid(topology, cell_types, geometry)
        owned_cells = grid.extract_cells(range(num_owned))
        ghost_cells = grid.extract_cells(np.arange(num_owned, grid.n_cells))
        plotter.subplot(0, counter)
        plotter.add_mesh(owned_cells, show_edges=True)
        if ghost_cells.is_empty is False:
            plotter.add_mesh(ghost_cells, show_edges=True,color="red")
        plotter.view_xy()
        counter+=1  # noqa: SIM113

    plotter.show()

The code that I solve is the following:

#%%px
dx = ufl.Measure("dx", domain=mesh, subdomain_data=ct)
disk = 1
right_square = 2
left_square = 3
left_square_contact = 104
right_square_contact = 107
tdim = mesh.topology.dim
tags_to_shift = right_square
mesh.topology.create_connectivity(tdim, 0)

sp = dolfinx.fem.functionspace(mesh, ("CG", 2))
nu0 = 1/(4*3.14*1e-7)

#dirichlet bc on left edge
bc = dolfinx.fem.dirichletbc(0.0, dolfinx.fem.locate_dofs_geometrical(sp, lambda x: np.isclose(x[0], 2.0)), sp)
mpc = dolfinx_mpc.MultiPointConstraint(sp)
mpc.create_contact_inelastic_condition(ft, left_square_contact, right_square_contact, eps2=1e-11,allow_missing_masters=True)
mpc.finalize()

#linear form: source inside domain disk, all materials are linear with nu0
a = nu0*ufl.inner(ufl.grad(ufl.TrialFunction(sp)), ufl.grad(ufl.TestFunction(sp)))*ufl.dx
L= 10000.0*ufl.TestFunction(sp)*dx(disk)

#solve linear problem
problem = dolfinx_mpc.LinearProblem(a,L, mpc=mpc,bcs=[bc])
sol_lin= problem.solve()

Without MPI, I get the solution I expect:

#%%px
owned = mesh.topology.index_map(mesh.topology.dim).size_local # “How many cells do I own on this MPI rank?”
ghosts = mesh.topology.index_map(mesh.topology.dim).num_ghosts # “How many ghost cells do I own on this MPI rank?”
topology, cell_types, geometry = plot.vtk_mesh(mpc.function_space,np.arange(owned + ghosts))
mesh_data = (topology, cell_types, geometry)
all_data = comm.gather(mesh_data, root=0)
all_owned = comm.gather(owned, root=0)
all_sol = comm.gather(sol_lin.x.array[:] , root=0)
if comm.rank == 0:
    plotter = pyvista.Plotter(shape=(1, comm.size))
    counter=0
    for (topology, cell_types, geometry),num_owned,sol in zip(all_data,all_owned,all_sol,strict=True):
        grid = pyvista.UnstructuredGrid(topology, cell_types, geometry)
        grid.point_data["u"] = sol
        grid.set_active_scalars("u")
        levels = np.linspace(0, 6e-4, 25)
        iso_lin = grid.contour(isosurfaces=levels, scalars="u")
        owned_cells = grid.extract_cells(range(num_owned))
        ghost_cells = grid.extract_cells(np.arange(num_owned, grid.n_cells))
        plotter.subplot(0, counter)
        plotter.add_mesh(owned_cells, show_edges=True)
        plotter.add_mesh(iso_lin, color="white", line_width=2)
        if ghost_cells.is_empty is False:
            plotter.add_mesh(ghost_cells, show_edges=True,color="red")

        plotter.view_xy()
        counter+=1

    plotter.show()

If I use MPI, I get the following partition of the mesh (in red cells with ghost nodes):

and mpc.create_contact_inelastic_condition causes MPI_ABORT with CG2 function space, while with CG1 gives different results:

Am I missing something?

Mattia

There seems to be a bug here, as:

import dolfinx
import dolfinx_mpc
from dolfinx.io import gmsh
import numpy as np
import ufl
from mpi4py import MPI
from dolfinx import plot

#cell tags are -> 1: disk, 2 right square, 3 left square
comm = MPI.COMM_WORLD
partitioner = (
        dolfinx.mesh.create_cell_partitioner(
            dolfinx.mesh.GhostMode.none,
        )
        if comm.Get_size() > 1
        else None
    )

mesh,ct,ft,*_ = gmsh.read_from_msh("2squares.msh",comm,partitioner=partitioner)


#%%px
dx = ufl.Measure("dx", domain=mesh, subdomain_data=ct)
disk = 1
right_square = 2
left_square = 3
left_square_contact = 104
right_square_contact = 107
tdim = mesh.topology.dim
tags_to_shift = right_square
mesh.topology.create_connectivity(tdim, 0)

sp = dolfinx.fem.functionspace(mesh, ("CG", 2))
nu0 = 1/(4*3.14*1e-7)

#dirichlet bc on left edge
bc = dolfinx.fem.dirichletbc(0.0, dolfinx.fem.locate_dofs_geometrical(sp, lambda x: np.isclose(x[0], 2.0)), sp)
mpc = dolfinx_mpc.MultiPointConstraint(sp)
mpc.create_contact_inelastic_condition(ft, left_square_contact, right_square_contact, eps2=1e-11,allow_missing_masters=True)
mpc.finalize()
print(len(mpc.slaves), len(ft.find(left_square_contact)), len(ft.find(right_square_contact)))

gives way to many slaves on 3 procs and segfaults on 2.
I’ll have a look.

I’ve located the issue and made a fix with:

With this your example gives consistent results on various sets of processes.
When I implemented the inelastic contact constraint I didn’t think of it for cases where the block size of the space is not equal to the dimension of the space (scalar or manifold). This is fixed in the PR.

EDIT
I’ve made a 0.10.5 release that fixes this.

It should be on conda within a few days.