Explicitly assembling matrices with dolfinx_mpc

Hi all,
When setting up an eigenvalue problem that uses periodic boundary conditions, I am having trouble assembling the matrix eigenvalue problem using MPC. On matrix assembly:

import sys
import numpy as np
from mpi4py import MPI as nMPI
from petsc4py import PETSc
import slepc4py as slepc
import meshio
import ufl
from dolfinx.mesh import locate_entities_boundary, meshtags
from dolfinx.io import gmshio
from dolfinx import fem
from dolfinx import io
import dolfinx_mpc
import gmsh

l = 6.0 # Cell length
r = 6.0 # Cell radius
rh = 2.0 # Helix radius
rw = 0.2 # wire radius
npts = 100 # number of points of spline curve
eps = 1.0e-5

Gamma = 0 + 6.283j

BCweight = 100.0

comm = nMPI.COMM_WORLD
mpiRank = comm.rank
modelRank = 0 # process where GMSG runs

if mpiRank == modelRank:

    gmsh.initialize()
    gmsh.option.setNumber('General.Terminal', 1)
    gmsh.model.add("Helix Cell")

    c = gmsh.model.occ.addCylinder(0, 0, 0, 0, 0, l, r, 1, 2*np.pi)

    dt = 2 * np.pi / npts
    p = []
    for n in range(npts+3):
        theta = 2 * np.pi * (n-1) / npts
        kk = gmsh.model.occ.addCircle(rh * np.cos(theta), rh * np.sin(theta), (n -1) * l / npts, rw)
        p.append(gmsh.model.occ.addCurveLoop([kk], tag=-1))
    print(p)
    s = gmsh.model.occ.addThruSections(p, tag=-1)
    print(s)
    v = gmsh.model.occ.cut([(3,c)], s, tag=100, removeObject=True, removeTool=True)
    print("v={}".format(v))
    for n in range(npts+3):
        gmsh.model.occ.remove([(1,p[n])], recursive=True)

    gmsh.model.occ.synchronize()

    bb = gmsh.model.getEntities(dim=3)
    pt = gmsh.model.getBoundary(bb, combined=True, oriented=False, recursive=False)
    print(pt)
# Boundary conditions
    Bot = []
    Top = []
    PEC = []
    for bb in pt:
        CoM = gmsh.model.occ.getCenterOfMass(bb[0], bb[1])
        print(CoM)
        if(np.abs(CoM[2]) < eps):
            Bot.append(bb[1])
        elif(np.abs(CoM[2]-l) < eps):
            Top.append(bb[1])
        else:
            PEC.append(bb[1])

    print(Bot, Top, PEC)
    gmsh.model.mesh.setPeriodic(2, Top, Bot, [1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, l, 0, 0, 0, 1])

    gmsh.model.addPhysicalGroup(3, [100], 1) # Helix volume
    gmsh.model.setPhysicalName(3, 1, "HelixVolume")
    gmsh.model.addPhysicalGroup(2, Top, 1) # Top BC (slave periodic BC)
    gmsh.model.addPhysicalGroup(2, Bot, 2) # Bottom BC (master periodic BC)
    gmsh.model.addPhysicalGroup(2, PEC, 3) # PEC boundary
    gmsh.model.setPhysicalName(2, 1, "TopPBC")
    gmsh.model.setPhysicalName(2, 2, "BotPBC")
    gmsh.model.setPhysicalName(2, 3, "PEC")

    gmsh.model.mesh.setSize([(0,108),(0,109)], 0.05)
    gmsh.option.setNumber('Mesh.MeshSizeMin', 0.001)
    gmsh.option.setNumber('Mesh.MeshSizeMax', 1.2)
    gmsh.option.setNumber('Mesh.Algorithm', 6) #1=Mesh Adapt, 2=Auto, 3=Initial mesh only, 5=Delaunay, 6=Frontal-Delaunay
    gmsh.option.setNumber('Mesh.MshFileVersion', 2.2)
    gmsh.option.setNumber('Mesh.Format', 1)
    gmsh.option.setNumber('Mesh.MinimumCirclePoints', 50)
    gmsh.option.setNumber('Mesh.CharacteristicLengthFromCurvature', 1)
    gmsh.model.mesh.generate(3)
    #gmsh.fltk.run()

# mesh = 3d topology
# ct = volume tags
# fm = surface tags
mesh, ct, fm = gmshio.model_to_mesh(gmsh.model, comm, modelRank, gdim=3)
if modelRank == mpiRank:
    gmsh.finalize()

mesh.topology.create_connectivity(mesh.topology.dim-1, mesh.topology.dim)
V = fem.VectorFunctionSpace(mesh, ("CG", 2))
u = ufl.TrialFunction(V)
v = ufl.TestFunction(V)

# Print out BC tags
with io.XDMFFile(mesh.comm, "BCs.xdmf", "w") as xx:
    xx.write_mesh(mesh)
    xx.write_meshtags(fm)

n = ufl.FacetNormal(mesh)
ds = ufl.Measure("ds", domain=mesh, subdomain_data=fm)

# Periodic BC mapping
def PeriodicMapping(x):
    out = np.zeros(x.shape)
    out[0] = x[0]
    out[1] = x[1]
    out[2] = x[2] + l
    return out

# Dirichlet BCs using weak invokation
Lb = BCweight * ufl.inner(ufl.cross(n, u), ufl.cross(n, v)) * ds(3)
bc=[]
# Create multipoint boundary.  Master boundary has tag 2, 
# slave has tag 1 - set on periodic constraint.
# (see GMSH physical group definitions above)
mpc = dolfinx_mpc.MultiPointConstraint(V)
mpc.create_periodic_constraint_topological(V, fm, 1, PeriodicMapping, bc, 1.0)
mpc.finalize()

# Set up forms for eigenvalue problem Ax -k0^2*Bx = 0
az = ufl.as_vector((0, 0, 1)) # Z-directed unit vector
a = fem.form(ufl.inner(Gamma * ufl.cross(az, u) + ufl.curl(u), Gamma * ufl.cross(az, v) + ufl.curl(v)) * ufl.dx + Lb)
b = fem.form(ufl.inner(u, v) * ufl.dx)

num_dofs_local = V.dofmap.index_map.size_local * V.dofmap.index_map_bs
num_dofs_global = V.dofmap.index_map.size_global * V.dofmap.index_map_bs
print(f"Number of dofs (owned) by rank {mpiRank}: {num_dofs_local}")
if mpiRank == modelRank:
    print(f"Number of dofs global: {num_dofs_global}")
A = dolfinx_mpc.assemble_matrix(a, mpc, bcs=bc, diagval=1.0)
B = dolfinx_mpc.assemble_matrix(b, mpc, bcs=bc, diagval=1.0)

eigs = slepc.EPS().create(mesh.comm)
eigs.setOperators(A, B)
eigs.setProblemType(slepc.EPS.ProblemType.GNHEP) # General non-hermitian
eigs.setTolerances(1.0e-8)
eigs.setType(slepc.EPS.Type.KRYLOVSCHUR)
st = eigs.getST() # Invoke spetcral transformation
st.setType(slepc.ST.Type.SINVERT)
eigs.setWhichEigenpairs(slepc.EPS.Which.TARGET_REAL) # target real part of eigenvalue
eigs.setTarget(-1.0)
eigs.setDimensions(nev=5) # Number of eigenvalues
eigs.solve()
eigs.view()
eigs.errorView()

sys.exit(0)

I get the following error:

  File "/home/bill/Cluster/Fenics2020/Helix/HelicalSection3.py", line 143, in <module>
    A = dolfinx_mpc.assemble_matrix(a, mpc, bcs=bc, diagval=1.0)
  File "/usr/lib/python3/dist-packages/dolfinx_mpc/assemble_matrix.py", line 51, in assemble_matrix
    A = cpp.mpc.create_matrix(form, constraint[0]._cpp_object,
TypeError: create_matrix(): incompatible function arguments. The following argument types are supported:
    1. (arg0: dolfinx.cpp.fem.Form_float64, arg1: dolfinx_mpc.cpp.mpc.MultiPointConstraint) -> mat
    2. (arg0: dolfinx.cpp.fem.Form_float64, arg1: dolfinx_mpc.cpp.mpc.MultiPointConstraint, arg2: dolfinx_mpc.cpp.mpc.MultiPointConstraint) -> mat

It seems I am doing something wrong in invoking the MPC. What am I missing?

I cannot generate your mesh

nfo    : Done optimizing mesh (Wall 0.181664s, CPU 0.177616s)
Info    : Reconstructing periodicity for curve connection 107 - 109
Info    : Reconstructing periodicity for curve connection 110 - 111
Error   : Cannot find periodic counterpart of node 2732 of curve 110 on curve 111
Traceback (most recent call last):
  File "/root/shared/mwe123.py", line 91, in <module>
    gmsh.model.mesh.generate(3)
  File "/usr/local/lib/gmsh.py", line 1973, in generate
    raise Exception(logger.getLastError())
Exception: Cannot find periodic counterpart of node 2732 of curve 110 on curve 111

when using

docker run -ti --network=host -e DISPLAY=$DISPLAY -v /tmp/.X11-unix:/tmp/.X11-unix -v $(pwd):/root/shared -w /root/shared --rm --shm-size=512m  ghcr.io/jorgensd/dolfinx_mpc:v0.6.1.post1

Strange - it works for me.

Edit: It is not likely the tag numbers. Here is a version of the code that has a cleaner handling of the mesh density definition, which eliminates the problem of tag numbers.

import sys
import numpy as np
from mpi4py import MPI as nMPI
from petsc4py import PETSc
import slepc4py as slepc
import meshio
import ufl
from dolfinx.mesh import locate_entities_boundary, meshtags
from dolfinx.io import gmshio
from dolfinx import fem
from dolfinx import io
import dolfinx_mpc
import gmsh

l = 6.0 # Cell length
r = 6.0 # Cell radius
rh = 2.0 # Helix radius
rw = 0.2 # wire radius
npts = 100 # number of points of spline curve
eps = 1.0e-5

Gamma = 0 + 6.283j

BCweight = 100.0

comm = nMPI.COMM_WORLD
mpiRank = comm.rank
modelRank = 0 # process where GMSG runs

if mpiRank == modelRank:

    gmsh.initialize()
    gmsh.option.setNumber('General.Terminal', 1)
    gmsh.model.add("Helix Cell")

    c = gmsh.model.occ.addCylinder(0, 0, 0, 0, 0, l, r, 1, 2*np.pi)

    dt = 2 * np.pi / npts
    p = []
    for n in range(npts+3):
        theta = 2 * np.pi * (n-1) / npts
        kk = gmsh.model.occ.addCircle(rh * np.cos(theta), rh * np.sin(theta), (n -1) * l / npts, rw)
        p.append(gmsh.model.occ.addCurveLoop([kk], tag=-1))
    print(p)
    s = gmsh.model.occ.addThruSections(p, tag=-1)
    print(s)
    v = gmsh.model.occ.cut([(3,c)], s, tag=100, removeObject=True, removeTool=True)
    print("v={}".format(v))
    for n in range(npts+3):
        gmsh.model.occ.remove([(1,p[n])], recursive=True)

    gmsh.model.occ.synchronize()

    bb = gmsh.model.getEntities(dim=3)
    pt = gmsh.model.getBoundary(bb, combined=True, oriented=False, recursive=False)
    print(pt)
# Boundary conditions
    Bot = []
    Top = []
    PEC = []
    for bb in pt:
        CoM = gmsh.model.occ.getCenterOfMass(bb[0], bb[1])
        print(CoM)
        if(np.abs(CoM[2]) < eps):
            Bot.append(bb[1])
        elif(np.abs(CoM[2]-l) < eps):
            Top.append(bb[1])
        else:
            PEC.append(bb[1])

    print(Bot, Top, PEC)
    gmsh.model.mesh.setPeriodic(2, Top, Bot, [1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, l, 0, 0, 0, 1])

    gmsh.model.addPhysicalGroup(3, [100], 1) # Helix volume
    gmsh.model.setPhysicalName(3, 1, "HelixVolume")
    gmsh.model.addPhysicalGroup(2, Top, 1) # Top BC (slave periodic BC)
    gmsh.model.addPhysicalGroup(2, Bot, 2) # Bottom BC (master periodic BC)
    gmsh.model.addPhysicalGroup(2, PEC, 3) # PEC boundary
    gmsh.model.setPhysicalName(2, 1, "TopPBC")
    gmsh.model.setPhysicalName(2, 2, "BotPBC")
    gmsh.model.setPhysicalName(2, 3, "PEC")
    pt = gmsh.model.getEntitiesInBoundingBox(-eps-rh-rw, -rh-rw-eps, -eps, rh+rw+eps, rh+rw+eps, eps)
    gmsh.model.mesh.setSize(pt, 0.05)
    print(pt)
    pt = gmsh.model.getEntitiesInBoundingBox(-eps-rh-rw, -rh-rw-eps, l-eps, rh+rw+eps, rh+rw+eps, l+eps)
    gmsh.model.mesh.setSize(pt, 0.05)
    print(pt)

    #gmsh.model.mesh.setSize([(0,108),(0,109)], 0.05)
    gmsh.option.setNumber('Mesh.MeshSizeMin', 0.001)
    gmsh.option.setNumber('Mesh.MeshSizeMax', 1.2)
    gmsh.option.setNumber('Mesh.Algorithm', 6) #1=Mesh Adapt, 2=Auto, 3=Initial mesh only, 5=Delaunay, 6=Frontal-Delaunay
    gmsh.option.setNumber('Mesh.MshFileVersion', 2.2)
    gmsh.option.setNumber('Mesh.Format', 1)
    gmsh.option.setNumber('Mesh.MinimumCirclePoints', 50)
    gmsh.option.setNumber('Mesh.CharacteristicLengthFromCurvature', 1)
    gmsh.model.mesh.generate(3)
    gmsh.fltk.run()

# mesh = 3d topology
# ct = volume tags
# fm = surface tags
mesh, ct, fm = gmshio.model_to_mesh(gmsh.model, comm, modelRank, gdim=3)
if modelRank == mpiRank:
    gmsh.finalize()

mesh.topology.create_connectivity(mesh.topology.dim-1, mesh.topology.dim)
V = fem.VectorFunctionSpace(mesh, ("CG", 2))
u = ufl.TrialFunction(V)
v = ufl.TestFunction(V)

# Print out BC tags
with io.XDMFFile(mesh.comm, "BCs.xdmf", "w") as xx:
    xx.write_mesh(mesh)
    xx.write_meshtags(fm)

n = ufl.FacetNormal(mesh)
ds = ufl.Measure("ds", domain=mesh, subdomain_data=fm)

# Periodic BC mapping
def PeriodicMapping(x):
    out = np.zeros(x.shape)
    out[0] = x[0]
    out[1] = x[1]
    out[2] = x[2] + l
    return out

# Dirichlet BCs using weak invokation
Lb = BCweight * ufl.inner(ufl.cross(n, u), ufl.cross(n, v)) * ds(3)
bc=[]
# Create multipoint boundary.  Master boundary has tag 2, 
# slave has tag 1 - set on periodic constraint.
# (see GMSH physical group definitions above)
mpc = dolfinx_mpc.MultiPointConstraint(V)
mpc.create_periodic_constraint_topological(V, fm, 1, PeriodicMapping, bc, 1.0)
mpc.finalize()

# Set up forms for eigenvalue problem Ax -k0^2*Bx = 0
az = ufl.as_vector((0, 0, 1)) # Z-directed unit vector
a = fem.form(ufl.inner(Gamma * ufl.cross(az, u) + ufl.curl(u), Gamma * ufl.cross(az, v) + ufl.curl(v)) * ufl.dx + Lb)
b = fem.form(ufl.inner(u, v) * ufl.dx)

num_dofs_local = V.dofmap.index_map.size_local * V.dofmap.index_map_bs
num_dofs_global = V.dofmap.index_map.size_global * V.dofmap.index_map_bs
print(f"Number of dofs (owned) by rank {mpiRank}: {num_dofs_local}")
if mpiRank == modelRank:
    print(f"Number of dofs global: {num_dofs_global}")
A = dolfinx_mpc.assemble_matrix(a, mpc, bcs=bc, diagval=1.0)
B = dolfinx_mpc.assemble_matrix(b, mpc, bcs=bc, diagval=1.0)

eigs = slepc.EPS().create(mesh.comm)
eigs.setOperators(A, B)
eigs.setProblemType(slepc.EPS.ProblemType.GNHEP) # General non-hermitian
eigs.setTolerances(1.0e-8)
eigs.setType(slepc.EPS.Type.KRYLOVSCHUR)
st = eigs.getST() # Invoke spetcral transformation
st.setType(slepc.ST.Type.SINVERT)
eigs.setWhichEigenpairs(slepc.EPS.Which.TARGET_REAL) # target real part of eigenvalue
eigs.setTarget(-1.0)
eigs.setDimensions(nev=5) # Number of eigenvalues
eigs.solve()
eigs.view()
eigs.errorView()

sys.exit(0)

There could also be a problem caused by differences in mesh size. You can try
replacing
gmsh.option.setNumber('Mesh.MeshSizeMax', 1.2)
with
gmsh.option.setNumber('Mesh.MeshSizeMax', 0.8)

Are you running DOLFINx/DOLFINx_mpc in complex mode (i.e. with complex petsc enabled?).
What is the output of

python3 -c "from petsc4py import PETSc; print(PETSc.ScalarType)"

As gamma is a complex coefficient you need to have petsc with complex support.

For your current code I get to the LU-factorization, where I run out of memory on my laptop

import sys
import numpy as np
from mpi4py import MPI as nMPI
from petsc4py import PETSc
from slepc4py import SLEPc
import ufl
from dolfinx.io import gmshio
from dolfinx import fem
from dolfinx import io
import dolfinx_mpc
import gmsh

l = 6.0 # Cell length
r = 6.0 # Cell radius
rh = 2.0 # Helix radius
rw = 0.2 # wire radius
npts = 100 # number of points of spline curve
eps = 1.0e-5

Gamma = 0 + 6.283j

BCweight = 100.0

comm = nMPI.COMM_WORLD
mpiRank = comm.rank
modelRank = 0 # process where GMSG runs

if mpiRank == modelRank:

    gmsh.initialize()
    gmsh.option.setNumber('General.Terminal', 1)
    gmsh.model.add("Helix Cell")

    c = gmsh.model.occ.addCylinder(0, 0, 0, 0, 0, l, r, 1, 2*np.pi)

    dt = 2 * np.pi / npts
    p = []
    for n in range(npts+3):
        theta = 2 * np.pi * (n-1) / npts
        kk = gmsh.model.occ.addCircle(rh * np.cos(theta), rh * np.sin(theta), (n -1) * l / npts, rw)
        p.append(gmsh.model.occ.addCurveLoop([kk], tag=-1))
    print(p)
    s = gmsh.model.occ.addThruSections(p, tag=-1)
    print(s)
    v = gmsh.model.occ.cut([(3,c)], s, tag=100, removeObject=True, removeTool=True)
    print("v={}".format(v))
    for n in range(npts+3):
        gmsh.model.occ.remove([(1,p[n])], recursive=True)

    gmsh.model.occ.synchronize()

    bb = gmsh.model.getEntities(dim=3)
    pt = gmsh.model.getBoundary(bb, combined=True, oriented=False, recursive=False)
    print(pt)
# Boundary conditions
    Bot = []
    Top = []
    PEC = []
    for bb in pt:
        CoM = gmsh.model.occ.getCenterOfMass(bb[0], bb[1])
        print(CoM)
        if(np.abs(CoM[2]) < eps):
            Bot.append(bb[1])
        elif(np.abs(CoM[2]-l) < eps):
            Top.append(bb[1])
        else:
            PEC.append(bb[1])

    print(Bot, Top, PEC)
    gmsh.model.mesh.setPeriodic(2, Top, Bot, [1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, l, 0, 0, 0, 1])

    gmsh.model.addPhysicalGroup(3, [100], 1) # Helix volume
    gmsh.model.setPhysicalName(3, 1, "HelixVolume")
    gmsh.model.addPhysicalGroup(2, Top, 1) # Top BC (slave periodic BC)
    gmsh.model.addPhysicalGroup(2, Bot, 2) # Bottom BC (master periodic BC)
    gmsh.model.addPhysicalGroup(2, PEC, 3) # PEC boundary
    gmsh.model.setPhysicalName(2, 1, "TopPBC")
    gmsh.model.setPhysicalName(2, 2, "BotPBC")
    gmsh.model.setPhysicalName(2, 3, "PEC")
    pt = gmsh.model.getEntitiesInBoundingBox(-eps-rh-rw, -rh-rw-eps, -eps, rh+rw+eps, rh+rw+eps, eps)
    gmsh.model.mesh.setSize(pt, 0.05)
    print(pt)
    pt = gmsh.model.getEntitiesInBoundingBox(-eps-rh-rw, -rh-rw-eps, l-eps, rh+rw+eps, rh+rw+eps, l+eps)
    gmsh.model.mesh.setSize(pt, 0.05)
    print(pt)

    #gmsh.model.mesh.setSize([(0,108),(0,109)], 0.05)
    gmsh.option.setNumber('Mesh.MeshSizeMin', 0.001)
    gmsh.option.setNumber('Mesh.MeshSizeMax', 1.2)
    gmsh.option.setNumber('Mesh.Algorithm', 6) #1=Mesh Adapt, 2=Auto, 3=Initial mesh only, 5=Delaunay, 6=Frontal-Delaunay
    gmsh.option.setNumber('Mesh.MshFileVersion', 2.2)
    gmsh.option.setNumber('Mesh.Format', 1)
    gmsh.option.setNumber('Mesh.MinimumCirclePoints', 50)
    gmsh.option.setNumber('Mesh.CharacteristicLengthFromCurvature', 1)
    gmsh.model.mesh.generate(3)
    #gmsh.fltk.run()

# mesh = 3d topology
# ct = volume tags
# fm = surface tags
mesh, ct, fm = gmshio.model_to_mesh(gmsh.model, comm, modelRank, gdim=3)
if modelRank == mpiRank:
    gmsh.finalize()

mesh.topology.create_connectivity(mesh.topology.dim-1, mesh.topology.dim)
V = fem.VectorFunctionSpace(mesh, ("CG", 2))
u = ufl.TrialFunction(V)
v = ufl.TestFunction(V)

# Print out BC tags
with io.XDMFFile(mesh.comm, "BCs.xdmf", "w") as xx:
    xx.write_mesh(mesh)
    xx.write_meshtags(fm)

n = ufl.FacetNormal(mesh)
ds = ufl.Measure("ds", domain=mesh, subdomain_data=fm)

# Periodic BC mapping
def PeriodicMapping(x):
    out = np.zeros(x.shape)
    out[0] = x[0]
    out[1] = x[1]
    out[2] = x[2] + l
    return out

# Dirichlet BCs using weak invokation
Lb = BCweight * ufl.inner(ufl.cross(n, u), ufl.cross(n, v)) * ds(3)
bc=[]
# Create multipoint boundary.  Master boundary has tag 2, 
# slave has tag 1 - set on periodic constraint.
# (see GMSH physical group definitions above)
mpc = dolfinx_mpc.MultiPointConstraint(V)
mpc.create_periodic_constraint_topological(V, fm, 1, PeriodicMapping, bc, 1.0)
mpc.finalize()

# Set up forms for eigenvalue problem Ax -k0^2*Bx = 0
az = ufl.as_vector((0, 0, 1)) # Z-directed unit vector
a = fem.form(ufl.inner(Gamma * ufl.cross(az, u) + ufl.curl(u), Gamma * ufl.cross(az, v) + ufl.curl(v)) * ufl.dx + Lb)
b = fem.form(ufl.inner(u, v) * ufl.dx)

num_dofs_local = V.dofmap.index_map.size_local * V.dofmap.index_map_bs
num_dofs_global = V.dofmap.index_map.size_global * V.dofmap.index_map_bs
print(f"Number of dofs (owned) by rank {mpiRank}: {num_dofs_local}")
if mpiRank == modelRank:
    print(f"Number of dofs global: {num_dofs_global}")
A = dolfinx_mpc.assemble_matrix(a, mpc, bcs=bc, diagval=1.0)
B = dolfinx_mpc.assemble_matrix(b, mpc, bcs=bc, diagval=1.0)

eigs = SLEPc.EPS().create(mesh.comm)
eigs.setOperators(A, B)
eigs.setProblemType(SLEPc.EPS.ProblemType.GNHEP) # General non-hermitian
eigs.setTolerances(1.0e-8)
eigs.setType(SLEPc.EPS.Type.KRYLOVSCHUR)
st = eigs.getST() # Invoke spetcral transformation
st.setType(SLEPc.ST.Type.SINVERT)
eigs.setWhichEigenpairs(SLEPc.EPS.Which.TARGET_REAL) # target real part of eigenvalue
eigs.setTarget(-1.0)
eigs.setDimensions(nev=5) # Number of eigenvalues
eigs.solve()
eigs.view()
eigs.errorView()

sys.exit(0)

Yes, I use complex mode. Output of the petsc4py query is <class ā€˜numpy.complex128ā€™>
The code dies at the matrix assembly on my desktop cluster running on 8 processors. I just tried it in single-processor mode and it dies in the same way - at matrix assembly.

How did you install DOLFINx/DOLFINx_mpc?

As I said, I got to LU-factorization (i.e. past assembly) in the code above using the stated docker image

and then source dolfinx-complex-mode.

The error message you get:

indicates that DOLFINx_mpc has been built against the real version (float64) of PETSc and subsequently DOLFINx.

Could be!

I installed using the Ubuntu apt command

sudo apt install python3-dolfinx-mpc

Perhaps it installed the real version. How do I get it to install the complex version?

As far as I can tell the binaries on Debian depends on real PETSc (Debian -- Error)
@dparsons can probably confirm this.

Exactly so. The debian package for dolfinx_mpc is experimental (new), so itā€™s currently only provided for the real build not the complex number build. Your complex Gamma parameter of course will need the complex number build.

You can confirm package dependencies with

apt-cache show python3-dolfinx-mpc libdolfinx-mpc*

python3-dolfinx-mpc currently depends on the real packages. In the future it will be replaced with python3-dolfinx-mpc-real and python3-dolfinx-mpc-complex enabling either version of the build.

Thanks for that.
Question: Should I revert back to using legacy Dolfin (2019) for solving this problem or will it be possible that the complex version of MPC will exist in the near future?

Legacy dolfin does not support complex numbers at all, so thereā€™s no gain reverting to it. A complex mpc build should be ready mid-summer if not sooner.

1 Like

The complex build is available at conda and as a docker image, see:

2 Likes

I tried to use Conda, but it wants to do a complete install of everything (breaking my already installed Dolfin, meshio, gmsh, etc.). Having never used Conda or Docker, what is the best way to just add the required package (dolfinx-mpc-complex) to my existing installation using one of these methods?

Conda creates a virtual environment where it installs everything, so it shouldnā€™t break anything you have already installed.

If you have installed from apt, you cant, as dolfinx-mpc-complex relies on dolfinx-complex, and as Drew already mentioned, this is not available through apt.

If you install Ubuntu ā€“ Error and then build dolfinx_mpc from source: Release v0.4.0 Ā· jorgensd/dolfinx_mpc Ā· GitHub it will work.

Docker creates a virtual system that doesnā€™t use any of the packages on your system

I thought I had complex dolfinx installed (because i can solve complex problems directly by setting the petscpath environment variable appropriately).

Some functionality will work, as DOLFINx has tried to avoid direct dependencies on PETSc as much as possible.

As iā€™ve implemented dolfinx_mpc more or less by myself, Iā€™ve not spent much time decoupling my C++ interface from PETSc, relying on the PETSc scalar type for setting up matrices.

If you set the petsc_dir and petsc_arch correctly, you should be able to build dolfinx_mpc ontop of the apt package.

Hi @Dokken

If you set the petsc_dir and petsc_arch correctly, you should be able to build dolfinx_mpc ontop of the apt package.

I downloaded the Release 0.4.0 and ran ā€œcmakeā€ as described in the Readme and got the following error.

-- Asking Python module FFCX for location of UFC...
CMake Error at CMakeLists.txt:93 (find_package):
  Could not find a configuration file for package "DOLFINX" that is
  compatible with requested version "0.5.1".

  The following configuration files were considered but not accepted:

    /usr/lib/x86_64-linux-gnu/cmake/dolfinx/DOLFINXConfig.cmake, version: 0.5.2
    /lib/x86_64-linux-gnu/cmake/dolfinx/DOLFINXConfig.cmake, version: 0.5.2



-- Configuring incomplete, errors occurred!

I then found a version 0.5.0 on Github, and tried that and got

-- Asking Python module FFCX for location of UFC...
CMake Error at CMakeLists.txt:93 (find_package):
  Could not find a configuration file for package "DOLFINX" that is
  compatible with requested version "0.5.0".

  The following configuration files were considered but not accepted:

    /usr/lib/x86_64-linux-gnu/cmake/dolfinx/DOLFINXConfig.cmake, version: 0.5.2
    /lib/x86_64-linux-gnu/cmake/dolfinx/DOLFINXConfig.cmake, version: 0.5.2



-- Configuring incomplete, errors occurred!

It seems my Dolfinx installation is version 0.5.2 and cmake chokes on this. Will this version of dolfinx-mpc work on Dolfinx version 0.5.2 by changing something in a cmake config file (e.g. make dolfinx 0.5.2 acceptible for MPC version 0.5.0)?

You should try with: Release v0.5.0.post0 Ā· jorgensd/dolfinx_mpc Ā· GitHub

I get the same type of error - compatible with version 0.5.1 and not accepting 0.5.2 configuration files.

-- The C compiler identification is GNU 11.3.0
-- The CXX compiler identification is GNU 11.3.0
-- Detecting C compiler ABI info
-- Detecting C compiler ABI info - done
-- Check for working C compiler: /usr/bin/cc - skipped
-- Detecting C compile features
-- Detecting C compile features - done
-- Detecting CXX compiler ABI info
-- Detecting CXX compiler ABI info - done
-- Check for working CXX compiler: /usr/bin/c++ - skipped
-- Detecting CXX compile features
-- Detecting CXX compile features - done
-- Asking Python module FFCX for location of UFC...
-- Found PythonInterp: /usr/bin/python3 (found suitable version "3.10.6", minimum required is "3") 
-- Found UFC: /usr/lib/python3/dist-packages/ffcx/codegeneration  
CMake Error at CMakeLists.txt:93 (find_package):
  Could not find a configuration file for package "DOLFINX" that is
  compatible with requested version "0.5.1".

  The following configuration files were considered but not accepted:

    /usr/lib/x86_64-linux-gnu/cmake/dolfinx/DOLFINXConfig.cmake, version: 0.5.2
    /lib/x86_64-linux-gnu/cmake/dolfinx/DOLFINXConfig.cmake, version: 0.5.2

I see, i never made a release compatible with 0.5.2 as it didnā€™t change anything in the api: Release v0.5.2 Ā· FEniCS/dolfinx Ā· GitHub
Iā€™ll see if I can patch up a release for this tonight.