Problems with my codes that do not work any more

Hi!
I am facing issues with my codes that have stopped working, even though they worked a week ago. I guess an update may have caused issues.

First, a more seemingly simple example

import dolfinx
from mpi4py import MPI
import numpy as np
import ufl
from dolfinx import fem
from petsc4py.PETSc import ScalarType
import dolfinx.fem.petsc

Nx = 5
Ny = 6
mesh = dolfinx.mesh.create_unit_square(MPI.COMM_WORLD, Nx, Ny)

V = dolfinx.fem.VectorFunctionSpace(mesh, ("Lagrange", 1))
mesh.topology.create_connectivity(mesh.topology.dim-1, mesh.topology.dim)
boundary_facets = dolfinx.mesh.exterior_facet_indices(mesh.topology)
boundary_dofs = dolfinx.fem.locate_dofs_topological(V, mesh.topology.dim-1, boundary_facets)

u_bc = dolfinx.fem.Function(V)
mesh.topology.create_connectivity(mesh.topology.dim-1, mesh.topology.dim)
f_to_c = mesh.topology.connectivity(mesh.topology.dim-1, mesh.topology.dim)
mesh.topology.create_connectivity(mesh.topology.dim, mesh.topology.dim-1)
c_to_f = mesh.topology.connectivity(mesh.topology.dim, mesh.topology.dim-1)


values = [1,2]

dof_layout = V.dofmap.dof_layout
coords = V.tabulate_dof_coordinates()
num_dofs = 0
bc_dofs = []
for facet in boundary_facets:
    # Find local index for each facet
    cells = f_to_c.links(facet)
    assert len(cells) == 1
    facets = c_to_f.links(cells[0])
    local_index = np.flatnonzero(facets == facet)
    # Find local index for dofs in cell
    closure_dofs = dof_layout.entity_closure_dofs(
        mesh.topology.dim-1,  local_index)
    cell_dofs = V.dofmap.cell_dofs(cells[0])
    for dof in closure_dofs:
        local_dof = cell_dofs[dof]
        dof_coordinate = coords[local_dof]
        for b in range(V.dofmap.bs):
            num_dofs += 1
            u_bc.x.array[local_dof * V.dofmap.bs + b] = values[b]
     

bc = dolfinx.fem.dirichletbc(u_bc, boundary_dofs)
u = ufl.TrialFunction(V)
v = ufl.TestFunction(V)
f = fem.Constant(mesh, ScalarType((-1,-2)))
a = ufl.inner(ufl.grad(u), ufl.grad(v)) * ufl.dx
L = ufl.inner(f, v) * ufl.dx
problem = fem.petsc.LinearProblem(a, L, bcs=[bc], petsc_options={"ksp_type": "preonly", "pc_type": "lu"})
uh_x = problem.solve()

mesh.topology.create_connectivity(mesh.topology.dim-1, mesh.topology.dim)
boundary_facets = dolfinx.mesh.exterior_facet_indices(mesh.topology)
boundary_dofs = dolfinx.fem.locate_dofs_topological(V, mesh.topology.dim-1, boundary_facets)
dof_coordinates = V.tabulate_dof_coordinates()[boundary_dofs]
points = mesh.geometry.x

u_values = []
from dolfinx import geometry
bb_tree = geometry.BoundingBoxTree(mesh, mesh.topology.dim)
cells = []
points_on_proc = []
# Find cells whose bounding-box collide with the the points
cell_candidates = geometry.compute_collisions(bb_tree, points)
# Choose one of the cells that contains the point
colliding_cells = geometry.compute_colliding_cells(mesh, cell_candidates, points)
for i, point in enumerate(points):
    if len(colliding_cells.links(i))>0:
        points_on_proc.append(point)
        cells.append(colliding_cells.links(i)[0])       
points_on_proc = np.array(points_on_proc, dtype=np.float64)
# 3d array x_points has mesh points (including the trivial z axis) and u_values=(u_x,u_y) the relevant 2d solution of the Poisson equation in each point
x_points=points_on_proc
u_values = uh_x.eval(points_on_proc, cells)

The errors are 2. Seemingly the way we use function spaces have changed and same thing with BoundingBoxTree? Complete error is as follows

WARNING:test.py:13: DeprecationWarning: This method is deprecated. Use FunctionSpace with an element shape argument instead
  V = dolfinx.fem.VectorFunctionSpace(mesh, ("Lagrange", 1))

Traceback (most recent call last):
  File "test.py", line 66, in <module>
    bb_tree = geometry.BoundingBoxTree(mesh, mesh.topology.dim)
TypeError: BoundingBoxTree.__init__() takes 2 positional arguments but 3 were given

However, there is also another more severe issue that I don’t understand with missmatch of h5py. Here, I want to create a 3d object, say a cone, and create tetraedra inside to do a FEM calculation

from mpi4py import MPI
from petsc4py.PETSc import ScalarType
import pygmsh
import gmsh
import os
import dolfinx.fem.petsc

#PARAMETRS
mesh_size_1 = 0.026

dir_path = os.path.dirname(os.path.abspath(__file__))
geom = pygmsh.occ.Geometry()
model3D = geom.__enter__()
cone0 =  model3D.add_cone([0,0,0],[0,0,1],0,0.25,mesh_size=mesh_size_1)
model3D.synchronize()
geom.generate_mesh(dim=3)
gmsh.write(os.path.join(dir_path, "cone.msh"))
model3D.__exit__()
# Function used to import the geometry mesh
# More info from one of the FEniQSx developer here https://jsdokken.com/src/pygmsh_tutorial.html
import meshio
mesh_from_file = meshio.read(os.path.join(dir_path, "cone.msh"))
def create_mesh(mesh, cell_type, prune_z=False):
    cells = mesh.get_cells_type(cell_type)
    points = mesh.points[:,:2] if prune_z else mesh.points
    out_mesh = meshio.Mesh(points=points, cells={cell_type: cells})
    return out_mesh

tetra_mesh = create_mesh(mesh_from_file, "tetra", prune_z=False)
meshio.write(os.path.join(dir_path, "cone.xdmf"), tetra_mesh)

with dolfinx.io.XDMFFile(MPI.COMM_WORLD, os.path.join(dir_path, "cone.xdmf"), "r") as xdmf:
       mesh = xdmf.read_mesh(name="Grid")
domain1=mesh       

I will post the error in the end of the post since it is large
Any help is greatly appreaciated!!

WARNING:py.warnings UserWarning: h5py is running against HDF5 1.10.7 when it was built against 1.14.2, this may cause problems
  _warn(("h5py is running against HDF5 {0} when it was built against {1}, "

Warning! ***HDF5 library version mismatched error***
The HDF5 header files used to compile this application do not match
the version used by the HDF5 library to which this application is linked.
Data corruption or segmentation faults may occur if the application continues.
This can happen when an application was compiled by one version of HDF5 but
linked with a different version of static or shared HDF5 library.
You should recompile the application or check your shared library related
settings such as 'LD_LIBRARY_PATH'.
You can, at your own risk, disable this warning by setting the environment
variable 'HDF5_DISABLE_VERSION_CHECK' to a value of '1'.
Setting it to 2 or higher will suppress the warning messages totally.
Headers are 1.14.2, library is 1.10.7
	    SUMMARY OF THE HDF5 CONFIGURATION
	    =================================

General Information:
-------------------
                   HDF5 Version: 1.10.7
                  Configured on: Wed, 08 Dec 2021 23:33:27 +0000
                  Configured by: Debian
                    Host system: x86_64-pc-linux-gnu
              Uname information: Debian
                       Byte sex: little-endian
             Installation point: /usr
		    Flavor name: openmpi

Compiling Options:
------------------
                     Build Mode: production
              Debugging Symbols: no
                        Asserts: no
                      Profiling: no
             Optimization Level: high

Linking Options:
----------------
                      Libraries: static, shared
  Statically Linked Executables: 
                        LDFLAGS: -Wl,-Bsymbolic-functions -flto=auto -Wl,-z,relro
                     H5_LDFLAGS: -Wl,--version-script,$(top_srcdir)/debian/map_mpi.ver
                     AM_LDFLAGS: 
                Extra libraries: -lcrypto -lcurl -lsz -lz -ldl -lm 
                       Archiver: ar
                       AR_FLAGS: cr
                         Ranlib: x86_64-linux-gnu-ranlib

Languages:
----------
                              C: yes
                     C Compiler: /usr/bin/mpicc.openmpi ( Configured with: ../src/configure -v --with-pkgversion='Ubuntu 11.2.0-12ubuntu1' --with-bugurl=file:///usr/share/doc/gcc-11/README.Bugs --enable-languages=c,ada,c++,go,brig,d,fortran,objc,obj-c++,m2 --prefix=/usr --with-gcc-major-version-only --program-suffix=-11 --program-prefix=x86_64-linux-gnu- --enable-shared --enable-linker-build-id --libexecdir=/usr/lib --without-included-gettext --enable-threads=posix --libdir=/usr/lib --enable-nls --enable-bootstrap --enable-clocale=gnu --enable-libstdcxx-debug --enable-libstdcxx-time=yes --with-default-libstdcxx-abi=new --enable-gnu-unique-object --disable-vtable-verify --enable-plugin --enable-default-pie --with-system-zlib --enable-libphobos-checking=release --with-target-system-zlib=auto --enable-objc-gc=auto --enable-multiarch --disable-werror --enable-cet --with-arch-32=i686 --with-abi=m64 --with-multilib-list=m32,m64,mx32 --enable-multilib --with-tune=generic --enable-offload-targets=nvptx-none=/build/gcc-11-VB3iS9/gcc-11-11.2.0/debian/tmp-nvptx/usr,amdgcn-amdhsa=/build/gcc-11-VB3iS9/gcc-11-11.2.0/debian/tmp-gcn/usr --without-cuda-driver --enable-checking=release --build=x86_64-linux-gnu --host=x86_64-linux-gnu --target=x86_64-linux-gnu --with-build-config=bootstrap-lto-lean --enable-link-serialization=2 built with gcc version 11.2.0 (Ubuntu 11.2.0-12ubuntu1))
                       CPPFLAGS: -Wdate-time -D_FORTIFY_SOURCE=2
                    H5_CPPFLAGS: -D_GNU_SOURCE -D_POSIX_C_SOURCE=200809L   -DNDEBUG -UH5_DEBUG_API
                    AM_CPPFLAGS: 
                        C Flags: -g -O2 -ffile-prefix-map=$(top_srcdir)=. -flto=auto -ffat-lto-objects -fstack-protector-strong -Wformat -Werror=format-security
                     H5 C Flags:  -std=c99  -Wall -Wcast-qual -Wconversion -Wextra -Wfloat-equal -Wformat=2 -Winit-self -Winvalid-pch -Wmissing-include-dirs -Wno-c++-compat -Wno-format-nonliteral -Wshadow -Wundef -Wwrite-strings -pedantic -Wlarger-than=2560 -Wlogical-op -Wframe-larger-than=16384 -Wpacked-bitfield-compat -Wsync-nand -Wstrict-overflow=5 -Wno-unsuffixed-float-constants -Wdouble-promotion -Wtrampolines -Wstack-usage=8192 -Wmaybe-uninitialized -Wdate-time -Warray-bounds=2 -Wc99-c11-compat -Wduplicated-cond -Whsa -Wnormalized -Wnull-dereference -Wunused-const-variable -Walloca -Walloc-zero -Wduplicated-branches -Wformat-overflow=2 -Wformat-truncation=1 -Wimplicit-fallthrough=5 -Wrestrict -Wattribute-alias -Wcast-align=strict -Wshift-overflow=2 -Wattribute-alias=2 -Wmissing-profile -fstdarg-opt -s -Wno-aggregate-return -Wno-inline -Wno-missing-format-attribute -Wno-missing-noreturn -Wno-overlength-strings -Wno-jump-misses-init -Wno-suggest-attribute=const -Wno-suggest-attribute=noreturn -Wno-suggest-attribute=pure -Wno-suggest-attribute=format -Wno-suggest-attribute=cold -Wno-suggest-attribute=malloc -O3  -Werror=bad-function-cast -Werror=declaration-after-statement -Werror=implicit-function-declaration -Werror=missing-declarations -Werror=missing-prototypes -Werror=nested-externs -Werror=old-style-definition -Werror=packed -Werror=pointer-sign -Werror=pointer-to-int-cast -Werror=redundant-decls -Werror=strict-prototypes -Werror=switch -Wunused-function -Wunused-variable -Wunused-parameter -Wcast-align -Wunused-but-set-variable -Wformat -Werror=incompatible-pointer-types -Werror=shadow -Wcast-function-type -Wmaybe-uninitialized
                     AM C Flags: 
               Shared C Library: yes
               Static C Library: yes


                        Fortran: yes
               Fortran Compiler: /usr/bin/mpif90.openmpi ( Configured with: ../src/configure -v --with-pkgversion='Ubuntu 11.2.0-12ubuntu1' --with-bugurl=file:///usr/share/doc/gcc-11/README.Bugs --enable-languages=c,ada,c++,go,brig,d,fortran,objc,obj-c++,m2 --prefix=/usr --with-gcc-major-version-only --program-suffix=-11 --program-prefix=x86_64-linux-gnu- --enable-shared --enable-linker-build-id --libexecdir=/usr/lib --without-included-gettext --enable-threads=posix --libdir=/usr/lib --enable-nls --enable-bootstrap --enable-clocale=gnu --enable-libstdcxx-debug --enable-libstdcxx-time=yes --with-default-libstdcxx-abi=new --enable-gnu-unique-object --disable-vtable-verify --enable-plugin --enable-default-pie --with-system-zlib --enable-libphobos-checking=release --with-target-system-zlib=auto --enable-objc-gc=auto --enable-multiarch --disable-werror --enable-cet --with-arch-32=i686 --with-abi=m64 --with-multilib-list=m32,m64,mx32 --enable-multilib --with-tune=generic --enable-offload-targets=nvptx-none=/build/gcc-11-VB3iS9/gcc-11-11.2.0/debian/tmp-nvptx/usr,amdgcn-amdhsa=/build/gcc-11-VB3iS9/gcc-11-11.2.0/debian/tmp-gcn/usr --without-cuda-driver --enable-checking=release --build=x86_64-linux-gnu --host=x86_64-linux-gnu --target=x86_64-linux-gnu --with-build-config=bootstrap-lto-lean --enable-link-serialization=2 built with gcc version 11.2.0 (Ubuntu 11.2.0-12ubuntu1))
                  Fortran Flags: -g -O2 -ffile-prefix-map=$(top_srcdir)=. -flto=auto -ffat-lto-objects -fstack-protector-strong
               H5 Fortran Flags:  -std=f2008  -Waliasing -Wall -Wcharacter-truncation -Wextra -Wimplicit-interface -Wsurprising -Wunderflow -pedantic -Warray-temporaries -Wintrinsics-std -Wimplicit-procedure -Wreal-q-constant -Wfunction-elimination -Wrealloc-lhs -Wrealloc-lhs-all -Wno-c-binding-type -Wuse-without-only -Winteger-division -Wfrontend-loop-interchange  -s -O3
               AM Fortran Flags: 
         Shared Fortran Library: yes
         Static Fortran Library: yes

                            C++: yes
                   C++ Compiler: /usr/bin/mpicxx.openmpi ( Configured with: ../src/configure -v --with-pkgversion='Ubuntu 11.2.0-12ubuntu1' --with-bugurl=file:///usr/share/doc/gcc-11/README.Bugs --enable-languages=c,ada,c++,go,brig,d,fortran,objc,obj-c++,m2 --prefix=/usr --with-gcc-major-version-only --program-suffix=-11 --program-prefix=x86_64-linux-gnu- --enable-shared --enable-linker-build-id --libexecdir=/usr/lib --without-included-gettext --enable-threads=posix --libdir=/usr/lib --enable-nls --enable-bootstrap --enable-clocale=gnu --enable-libstdcxx-debug --enable-libstdcxx-time=yes --with-default-libstdcxx-abi=new --enable-gnu-unique-object --disable-vtable-verify --enable-plugin --enable-default-pie --with-system-zlib --enable-libphobos-checking=release --with-target-system-zlib=auto --enable-objc-gc=auto --enable-multiarch --disable-werror --enable-cet --with-arch-32=i686 --with-abi=m64 --with-multilib-list=m32,m64,mx32 --enable-multilib --with-tune=generic --enable-offload-targets=nvptx-none=/build/gcc-11-VB3iS9/gcc-11-11.2.0/debian/tmp-nvptx/usr,amdgcn-amdhsa=/build/gcc-11-VB3iS9/gcc-11-11.2.0/debian/tmp-gcn/usr --without-cuda-driver --enable-checking=release --build=x86_64-linux-gnu --host=x86_64-linux-gnu --target=x86_64-linux-gnu --with-build-config=bootstrap-lto-lean --enable-link-serialization=2 built with gcc version 11.2.0 (Ubuntu 11.2.0-12ubuntu1))
                      C++ Flags: -g -O2 -ffile-prefix-map=$(top_srcdir)=. -flto=auto -ffat-lto-objects -fstack-protector-strong -Wformat -Werror=format-security
                   H5 C++ Flags:   -Wall -Wcast-qual -Wconversion -Wctor-dtor-privacy -Weffc++ -Wextra -Wfloat-equal -Wformat=2 -Winit-self -Winvalid-pch -Wmissing-include-dirs -Wno-format-nonliteral -Wnon-virtual-dtor -Wold-style-cast -Woverloaded-virtual -Wreorder -Wshadow -Wsign-promo -Wundef -Wwrite-strings -pedantic -Wlarger-than=2560 -Wlogical-op -Wframe-larger-than=16384 -Wpacked-bitfield-compat -Wsync-nand -Wstrict-overflow=5 -Wno-unsuffixed-float-constants -Wdouble-promotion -Wtrampolines -Wstack-usage=8192 -Wmaybe-uninitialized -Wdate-time -Wopenmp-simd -Warray-bounds=2 -Wduplicated-cond -Whsa -Wnormalized -Wnull-dereference -Wunused-const-variable -Walloca -Walloc-zero -Wduplicated-branches -Wformat-overflow=2 -Wformat-truncation=1 -Wimplicit-fallthrough=5 -Wrestrict -Wattribute-alias -Wcast-align=strict -Wshift-overflow=2 -Wattribute-alias=2 -Wmissing-profile -fstdarg-opt -s -O3  -Wcast-align -Wmissing-declarations -Werror=packed -Werror=redundant-decls -Werror=switch -Wunused-but-set-variable -Werror=unused-function -Werror=unused-variable -Wunused-parameter -Werror=shadow
                   AM C++ Flags: 
             Shared C++ Library: yes
             Static C++ Library: yes

                           Java: no


Features:
---------
                   Parallel HDF5: yes
Parallel Filtered Dataset Writes: yes
              Large Parallel I/O: yes
              High-level library: yes
                Build HDF5 Tests: yes
                Build HDF5 Tools: yes
                    Threadsafety: no
             Default API mapping: v18
  With deprecated public symbols: yes
          I/O filters (external): deflate(zlib),szip(encoder)
                             MPE: 
                      Direct VFD: no
                      Mirror VFD: no
              (Read-Only) S3 VFD: yes
            (Read-Only) HDFS VFD: no
                         dmalloc: no
  Packages w/ extra debug output: none
                     API tracing: no
            Using memory checker: no
 Memory allocation sanity checks: no
          Function stack tracing: no
                Use file locking: best-effort
       Strict file format checks: no
    Optimization instrumentation: no
Bye...
[harma-ThinkStation-P620:2172546] *** Process received signal ***
[harma-ThinkStation-P620:2172546] Signal: Aborted (6)
[harma-ThinkStation-P620:2172546] Signal code:  (-6)
[harma-ThinkStation-P620:2172546] [ 0] /lib/x86_64-linux-gnu/libc.so.6(+0x42520)[0x7f6737c42520]
[harma-ThinkStation-P620:2172546] [ 1] /lib/x86_64-linux-gnu/libc.so.6(pthread_kill+0x12c)[0x7f6737c969fc]
[harma-ThinkStation-P620:2172546] [ 2] /lib/x86_64-linux-gnu/libc.so.6(raise+0x16)[0x7f6737c42476]
[harma-ThinkStation-P620:2172546] [ 3] /lib/x86_64-linux-gnu/libc.so.6(abort+0xd3)[0x7f6737c287f3]
[harma-ThinkStation-P620:2172546] [ 4] /lib/x86_64-linux-gnu/libhdf5_openmpi.so.103(+0x3f69d)[0x7f670c43f69d]
[harma-ThinkStation-P620:2172546] [ 5] /lib/x86_64-linux-gnu/libhdf5_openmpi.so.103(H5check_version+0x21)[0x7f670c43f6d1]
[harma-ThinkStation-P620:2172546] [ 6] /home/harma/.local/lib/python3.10/site-packages/h5py/h5f.cpython-310-x86_64-linux-gnu.so(+0x77d0)[0x7f65fb0da7d0]
[harma-ThinkStation-P620:2172546] [ 7] python3(PyModule_ExecDef+0x73)[0x5606f58252d3]
[harma-ThinkStation-P620:2172546] [ 8] python3(+0x23bda0)[0x5606f5825da0]
[harma-ThinkStation-P620:2172546] [ 9] python3(+0x15f854)[0x5606f5749854]
[harma-ThinkStation-P620:2172546] [10] python3(_PyEval_EvalFrameDefault+0x2b71)[0x5606f57352c1]
[harma-ThinkStation-P620:2172546] [11] python3(_PyFunction_Vectorcall+0x7c)[0x5606f574a70c]
[harma-ThinkStation-P620:2172546] [12] python3(_PyEval_EvalFrameDefault+0x6152)[0x5606f57388a2]
[harma-ThinkStation-P620:2172546] [13] python3(_PyFunction_Vectorcall+0x7c)[0x5606f574a70c]
[harma-ThinkStation-P620:2172546] [14] python3(_PyEval_EvalFrameDefault+0x802)[0x5606f5732f52]
[harma-ThinkStation-P620:2172546] [15] python3(_PyFunction_Vectorcall+0x7c)[0x5606f574a70c]
[harma-ThinkStation-P620:2172546] [16] python3(_PyEval_EvalFrameDefault+0x6bd)[0x5606f5732e0d]
[harma-ThinkStation-P620:2172546] [17] python3(_PyFunction_Vectorcall+0x7c)[0x5606f574a70c]
[harma-ThinkStation-P620:2172546] [18] python3(_PyEval_EvalFrameDefault+0x6bd)[0x5606f5732e0d]
[harma-ThinkStation-P620:2172546] [19] python3(_PyFunction_Vectorcall+0x7c)[0x5606f574a70c]
[harma-ThinkStation-P620:2172546] [20] python3(+0x15fb24)[0x5606f5749b24]
[harma-ThinkStation-P620:2172546] [21] python3(_PyObject_CallMethodIdObjArgs+0xff)[0x5606f58294af]
[harma-ThinkStation-P620:2172546] [22] python3(PyImport_ImportModuleLevelObject+0x25a)[0x5606f575e0ca]
[harma-ThinkStation-P620:2172546] [23] python3(+0x184458)[0x5606f576e458]
[harma-ThinkStation-P620:2172546] [24] python3(+0x15fe0e)[0x5606f5749e0e]
[harma-ThinkStation-P620:2172546] [25] python3(PyObject_Call+0xbb)[0x5606f575912b]
[harma-ThinkStation-P620:2172546] [26] python3(_PyEval_EvalFrameDefault+0x2b71)[0x5606f57352c1]
[harma-ThinkStation-P620:2172546] [27] python3(_PyFunction_Vectorcall+0x7c)[0x5606f574a70c]
[harma-ThinkStation-P620:2172546] [28] python3(_PyEval_EvalFrameDefault+0x6bd)[0x5606f5732e0d]
[harma-ThinkStation-P620:2172546] [29] python3(_PyFunction_Vectorcall+0x7c)[0x5606f574a70c]
[harma-ThinkStation-P620:2172546] *** End of error message ***
Aborted (core dumped)

On

TypeError: BoundingBoxTree.__init__() takes 2 positional arguments but 3 were given

see

also discussed in

just a few days ago.

On

[harma-ThinkStation-P620:2172546] [ 5] /lib/x86_64-linux-gnu/libhdf5_openmpi.so.103(H5check_version+0x21)[0x7f670c43f6d1]
[harma-ThinkStation-P620:2172546] [ 6] /home/harma/.local/lib/python3.10/site-packages/h5py/h5f.cpython-310-x86_64-linux-gnu.so(+0x77d0)[0x7f65fb0da7d0]

it seems to me that you are using libhdf5_openmpi.so.103 from debian/ubuntu. You are better off using h5py from debian/ubuntu too, rather than pip installing a (possibly incompatible version) in $HOME/.local.

Thanks for your help!!!

About h5py there was indeed a conflict. The issue was solved with

pip uninstall h5py
sudo apt-get update
sudo apt-get install python3-h5py

Now for the bouncing box error the fix were the changes

# bb_tree = geometry.BoundingBoxTree(mesh, mesh.topology.dim) # Older
bb_tree = geometry.bb_tree(mesh, mesh.topology.dim)

and

# cell_candidates = geometry.compute_collisions(bb_tree, points) # Older
cell_candidates = geometry.compute_collisions_points(bb_tree, points)

were enough.
I still not get what is the new way to create a 2d or 3d vector function space. From what I see in the forums seems that

V = dolfinx.fem.VectorFunctionSpace(mesh, ("Lagrange", 1))

should change to

element = ufl.VectorElement("Lagrange", mesh.ufl_cell(), 1)
V = dolfinx.fem.functionspace(mesh, element)

It seems to do the trick but I am not 100% sure.

Anyway, after the corrections there seems to be another issue, hopefully minor. I have a function that calculates the derivatives of each component of a 3d vector field

def derivatives(length,domain,points,uh_x,uh_y,uh_z):
 grad_Bx = ufl.grad(uh_x)
 grad_By = ufl.grad(uh_y)
 grad_Bz = ufl.grad(uh_z)
 gdim = domain.geometry.dim
 tdim = domain.topology.dim
 from dolfinx import geometry
 bb = geometry.bb_tree(domain, tdim)
 # Find colliding cells on proc
 closest_cell = []
 local_map = []
 cells = geometry.compute_collisions_points(bb, points)
 actual_cells = geometry.compute_colliding_cells(domain, cells, points)
 for i in range(actual_cells.num_nodes):
      if len(actual_cells.links(i)) > 0:
           local_map.append(i)
           closest_cell.append(actual_cells.links(i)[0])
 num_dofs_x = domain.geometry.dofmap.links(0).size  # NOTE: Assumes same cell geometry in whole mesh
 t_imap = domain.topology.index_map(tdim)
 num_cells = t_imap.size_local + t_imap.num_ghosts
 x = domain.geometry.x
 x_dofs = domain.geometry.dofmap.array.reshape(num_cells, num_dofs_x)
 cell_geometry = np.zeros((num_dofs_x, gdim), dtype=np.float64)
 points_ref = np.zeros((len(local_map), tdim))
# Map cells on process back to reference element
 for i, cell in enumerate(closest_cell):
      cell_geometry[:] = x[x_dofs[cell], :gdim]
      point_ref = mesh.geometry.cmap.pull_back(points[local_map[i]][:gdim].reshape(1, -1), cell_geometry)
      points_ref[i] = point_ref
 # Eval using Expression
 expr_x = dolfinx.fem.Expression(grad_Bx, points_ref)
 expr_y = dolfinx.fem.Expression(grad_By, points_ref)
 expr_z = dolfinx.fem.Expression(grad_Bz, points_ref)
 dBx1 = expr_x.eval(closest_cell).reshape(len(closest_cell), points_ref.shape[0], gdim)
 dBy1 = expr_y.eval(closest_cell).reshape(len(closest_cell), points_ref.shape[0], gdim)
 dBz1 = expr_z.eval(closest_cell).reshape(len(closest_cell), points_ref.shape[0], gdim)
 dBx=[]; dBxdx1 = []; dBxdy1 = []; dBxdz1 = []; dBy=[]; dBydx1 = []; dBydy1 = []; dBydz1 = []; dBz=[]; dBzdx1 = []; dBzdy1 = []; dBzdz1 = []
 for i, cell in enumerate(closest_cell):
      point = points[local_map[i]]
      dBx.append(dBx1[i, i])
      dBy.append(dBy1[i, i])
      dBz.append(dBz1[i, i])  
 for ii in range(length):
  dBxdx1.append(dBx[ii][0])
  dBxdy1.append(dBx[ii][1])
  dBxdz1.append(dBx[ii][2])
  dBydx1.append(dBy[ii][0])
  dBydy1.append(dBy[ii][1])
  dBydz1.append(dBy[ii][2]) 
  dBzdx1.append(dBz[ii][0])
  dBzdy1.append(dBz[ii][1])
  dBzdz1.append(dBz[ii][2]) 
 dBxdx=np.array(dBxdx1)
 dBxdy=np.array(dBxdy1)
 dBxdz=np.array(dBxdz1)
 dBydx=np.array(dBydx1)
 dBydy=np.array(dBydy1)
 dBydz=np.array(dBydz1)
 dBzdx=np.array(dBzdx1)
 dBzdy=np.array(dBzdy1)
 dBzdz=np.array(dBzdz1)
 divergence_B=np.zeros(length)
 divergence_B=dBxdx+dBydy+dBzdz
 measure_B=np.zeros(length)
 mean_B_derivatives=np.zeros(length)
 div_to_B=np.zeros(length)
 measure_B = [math.sqrt(Bx_exp[i]**2.0+By_exp[i]**2.0+Bz_exp[i]**2.0) for i in range(length)]
 mean_B_derivatives=(dBxdx+dBxdy+dBxdz+dBydx+dBydy+dBydz+dBzdx+dBzdy+dBzdz)/9
 div_to_B=[abs(divergence_B[i]/measure_B[i]) for i in range(length)]
 return(dBxdx, dBxdy, dBxdz, dBydx, dBydy, dBydz, dBzdx, dBzdy, dBzdz, divergence_B, measure_B, mean_B_derivatives, div_to_B)

After the aforementioned changes to bouncing box there seems that an error has arisen

    num_dofs_x = domain.geometry.dofmap.links(0).size  # NOTE: Assumes same cell geometry in whole mesh
AttributeError: 'numpy.ndarray' object has no attribute 'links'

I would really appreciate if someone can help a bit more or even suggest a more elegant way to calculate the data from all the derivatives.

domain.geometry.dofmap is now a numpy array, what used to be domain.geometry.dofmap.links(0).size should be now domain.geometry.dofmap.shape[1], with the same assumption in your NOTE:.

Thanks!

Hopefully, a final point. I changed

# domain.geometry.dofmap.links(0).size to
num_dofs_x = domain.geometry.dofmap.shape[1]

and

# x_dofs = domain.geometry.dofmap.array.reshape(num_cells, num_dofs_x) to
x_dofs = domain.geometry.dofmap.reshape(num_cells, num_dofs_x)

However, I am not sure how to change

point_ref = mesh.geometry.cmap.pull_back(points[local_map[i]][:gdim].reshape(1, -1), cell_geometry)

cmap seems to have changed to cmaps and the object is now a list?

Use the first entry of that list. For now it contains only one map. However, as there is a plan to support mixed celltype meshes there might be more at a future point

Thanks for everything @francesco-ballarin and @dokken . All my compatibility issues seem to have been resolved!