Using Mpirun to read.h5 file appears in PETSC ERROR

Hello, my dear! This is a program that uses FENICSX 0.9.0 to read the grid information stored in the “.h5” file. It will run without mpirun, but if you switch to mpirun, the following error will occur. Can you help me to solve it? Thank you very much!

import h5py
import numpy as np
from basix.ufl import element, mixed_element
import dolfinx
import ufl
from mpi4py import MPI
from dolfinx import mesh, io
from dolfinx.mesh import meshtags, meshtags_from_entities  #
from dolfinx.io import distribute_entity_data
# import meshio  #

#
try:
    from dolfinx.cpp.graph import adjacencylist
except ImportError:
    #
    from dolfinx import cpp
    adjacencylist = cpp.graph.AdjacencyList_int32

filename = "colin27_coarse_boundaries.h5"
file = h5py.File(filename, 'r')

coordinates = np.array(file["mesh/coordinates"])
#
# coordinates = np.array([
#     [0.0, 0.0, 0.0],  #
#     [1.0, 0.0, 0.0],  #
#     [0.0, 1.0, 0.0],  #
#     [0.0, 0.0, 1.0],  #
#     [1.0, 1.0, 1.0],  #
#     # ...
# ], dtype=np.float64)
topology = np.array(file["mesh/topology"]) #
print(f"Mesh_shape: {topology.shape}")
#
# topology = np.array([
#     [0, 1, 2, 3],  #
#     [1, 2, 3, 4],  #
#     [2, 3, 4, 5],  #
#     # ...
# ], dtype=np.int32)
boundary_coordinates = np.array(file["boundaries/coordinates"])
boundary_facets = np.array(file["boundaries/topology"], dtype=np.int32) #
tags = np.array(file["boundaries/values"], dtype=np.int32) #

print(f"boundary_facets = {boundary_facets}") #
gdim = 3
shape = "tetrahedron"
degree = 1

#
cell = ufl.Cell(shape)
ultele = element("Lagrange", cell.cellname(), degree, shape=(gdim,))
ufl_tetra = ufl.Mesh(ultele)

mesh = dolfinx.mesh.create_mesh(MPI.COMM_WORLD, topology, coordinates, ufl_tetra)

tdim = mesh.topology.dim
fdim = tdim - 1

marked_facets = boundary_facets.reshape(-1, 3).astype(np.int64)
print(f"boundary_facets = {marked_facets}")
facet_values = tags.reshape(-1).astype(np.int32)
print(f"facet_values = {facet_values}")

# 2.
sorted_facets = np.sort(marked_facets, axis=1)
print(f"boundary_facets = {sorted_facets.shape}")
unique_facets, unique_indices = np.unique(sorted_facets, axis=0, return_index=True)

#
marked_facets_unique = marked_facets[unique_indices]
facet_values_unique = facet_values[unique_indices]

print(f"only fact number: {len(marked_facets_unique)}")

# 3.
local_entities, local_values = distribute_entity_data(
    mesh, fdim, marked_facets_unique, facet_values_unique
)
print(f"local_entities: {local_entities.shape}; local_values: {local_values.shape}")

# 4.
mesh.topology.create_connectivity(fdim, tdim)  #
mesh.topology.create_connectivity(tdim, fdim)

# 5.
adj = adjacencylist(local_entities)
facet_tag = meshtags_from_entities(mesh, fdim, adj, local_values)
facet_tag.name = "Facet tags"

with io.XDMFFile(MPI.COMM_WORLD, "brain.xdmf", "w") as xdmf:
    xdmf.write_mesh(mesh)
    xdmf.write_meshtags(facet_tag, mesh.geometry)


Error:

[0]PETSC ERROR: ------------------------------------------------------------------------

[0]PETSC ERROR: Caught signal number 11 SEGV: Segmentation Violation, probably memory access out of range[0]PETSC ERROR: Try option -start_in_debugger or -on_error_attach_debugger

[0]PETSC ERROR: or see  and [0]PETSC ERROR: configure using --with-debugging=yes, recompile, link, and run

[0]PETSC ERROR: to get more information on the crash.

[0]PETSC ERROR: Run with -malloc_debug to check if memory corruption is causing the crash.[1]PETSC ERROR: ------------------------------------------------------------------------

[1]PETSC ERROR: Caught signal number 11 SEGV: Segmentation Violation, probably memory access out of range

[1]PETSC ERROR: Try option -start_in_debugger or -on_error_attach_debugger[1]PETSC ERROR: or see  and [1]PETSC ERROR: configure using --with-debugging=yes, recompile, link, and run

[1]PETSC ERROR: to get more information on the crash.[1]PETSC ERROR: Run with -malloc_debug to check if memory corruption is causing the crash.

I found the problem with this paragraph, and I’m trying to figure out how to avoid it.

mesh = dolfinx.mesh.create_mesh(MPI.COMM_WORLD, topology, coordinates, ufl_tetra)

tdim = mesh.topology.dim
fdim = tdim - 1

You are reading in the information on every process. This is not how one should read in plain data arrays in parallel in DOLFINx.

This is covered in: Mesh creation in serial and parallel — FEniCSx Documentation

Thank you for your kind reply, I will learn it.