CompileError when defining problem in mixed space

Hello,

I am receiving the error:

VerificationError: CompileError: command '/usr/bin/x86_64-linux-gnu-gcc' failed with exit code 1

when I try to define my problem. I am modeling a reaction-diffusion equation and have done it successfully with one variable, but it fails when I go to a mixed space. All of my code runs fine until I define “problem = …”. Here’s how I set it up:

from mpi4py import MPI
from dolfinx import fem, io, nls, log, mesh, plot
import numpy as np
import ufl
from petsc4py.PETSc import ScalarType
import matplotlib.pyplot as plt

L = 5e-3
D = 6.591637557240176e-06
k = 0.12
C_0 = 0.00133
t_final = 60*60*24
t_step = 1
N_0 = 20

domain = mesh.create_interval(MPI.COMM_WORLD, 100, [0, L])
P1 = ufl.FiniteElement('CG', domain.ufl_cell(), 1)
element = ufl.MixedElement([P1, P1])
V = fem.FunctionSpace(domain, element)

# test functions
v1, v2 = ufl.TestFunctions(V)
# fields to solve for
u = fem.Function(V)
u1, u2 = u.split()
# concentrations from previous timestep
u_n = fem.Function(V)
u_n1, u_n2 = u_n.split()

# apply timestep variable to each node in the mesh
dt = fem.Constant(domain, ScalarType(t_step))

# turn floats to Constant objects
D_ = fem.Constant(domain, ScalarType(D))
k_ = fem.Constant(domain, ScalarType(k))

# set initial conditions
u1.vector.array = C_0 * np.ones(u1.vector.array.shape)
u2.vector.array = N_0 * np.ones(u2.vector.array.shape)

# function determining if a node is on the tray top
def on_top_boundary(x):
    return(np.isclose(x[0], L))

# fetch subspace for just u1
V0, submap = V.sub(0).collapse()
# determine boundary DOFs
boundary_dofs = fem.locate_dofs_geometrical((V.sub(0), V0), on_top_boundary)
# apply dirichlet BC to boundary DOFs
bc = fem.dirichletbc(ScalarType(C_0), boundary_dofs[0], V.sub(0))

F = (1/dt)*(u1 - u_n1)*v1*ufl.dx + \
    (1/dt)*(u2 - u_n2)*v2*ufl.dx + \
    D_*ufl.inner(ufl.grad(u1), ufl.grad(v1))*ufl.dx + \
    k_*u1*u2*v1*ufl.dx + \
    k_*u1*u2*v2*ufl.dx

problem = fem.petsc.NonlinearProblem(F, u, bcs=[bc])

The full error it spits out is:

libffcx_forms_54c4dce6faad875e8265b909b02c0945426b6539.c: In function ‘functionspace_form_libffcx_forms_54c4dce6faad875e8265b909b02c0945426b6539_0’:
libffcx_forms_54c4dce6faad875e8265b909b02c0945426b6539.c:1076:59: error: expected ‘=’, ‘,’, ‘;’, ‘asm’ or ‘__attribute__’ before ‘-’ token
 1076 | static ufcx_function_space functionspace_f_140433230527424-1 =
      |                                                           ^
libffcx_forms_54c4dce6faad875e8265b909b02c0945426b6539.c:1077:1: error: expected expression before ‘{’ token
 1077 | {
      | ^
libffcx_forms_54c4dce6faad875e8265b909b02c0945426b6539.c:1085:59: error: expected ‘=’, ‘,’, ‘;’, ‘asm’ or ‘__attribute__’ before ‘-’ token
 1085 | static ufcx_function_space functionspace_f_140433230524672-1 =
      |                                                           ^
libffcx_forms_54c4dce6faad875e8265b909b02c0945426b6539.c:1086:1: error: expected expression before ‘{’ token
 1086 | {
      | ^
libffcx_forms_54c4dce6faad875e8265b909b02c0945426b6539.c:1094:59: error: expected ‘=’, ‘,’, ‘;’, ‘asm’ or ‘__attribute__’ before ‘-’ token
 1094 | static ufcx_function_space functionspace_f_140433230524672-0 =
      |                                                           ^
libffcx_forms_54c4dce6faad875e8265b909b02c0945426b6539.c:1095:1: error: expected expression before ‘{’ token
 1095 | {
      | ^
libffcx_forms_54c4dce6faad875e8265b909b02c0945426b6539.c:1103:59: error: expected ‘=’, ‘,’, ‘;’, ‘asm’ or ‘__attribute__’ before ‘-’ token
 1103 | static ufcx_function_space functionspace_f_140433230527424-0 =
      |                                                           ^
libffcx_forms_54c4dce6faad875e8265b909b02c0945426b6539.c:1104:1: error: expected expression before ‘{’ token
 1104 | {
      | ^
libffcx_forms_54c4dce6faad875e8265b909b02c0945426b6539.c:1115:11: error: ‘functionspace_f_140433230527424’ undeclared (first use in this function)
 1115 |   return &functionspace_f_140433230527424-1;
      |           ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
libffcx_forms_54c4dce6faad875e8265b909b02c0945426b6539.c:1115:11: note: each undeclared identifier is reported only once for each function it appears in
libffcx_forms_54c4dce6faad875e8265b909b02c0945426b6539.c:1117:11: error: ‘functionspace_f_140433230524672’ undeclared (first use in this function)
 1117 |   return &functionspace_f_140433230524672-1;
      |           ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

---------------------------------------------------------------------------
DistutilsExecError                        Traceback (most recent call last)
File /usr/lib/python3.9/distutils/unixccompiler.py:117, in UnixCCompiler._compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts)
    116 try:
--> 117     self.spawn(compiler_so + cc_args + [src, '-o', obj] +
    118                extra_postargs)
    119 except DistutilsExecError as msg:

File /usr/lib/python3.9/distutils/ccompiler.py:910, in CCompiler.spawn(self, cmd)
    909 def spawn(self, cmd):
--> 910     spawn(cmd, dry_run=self.dry_run)

File /usr/lib/python3.9/distutils/spawn.py:91, in spawn(cmd, search_path, verbose, dry_run)
     90     cmd = cmd[0]
---> 91 raise DistutilsExecError(
     92       "command %r failed with exit code %s" % (cmd, exitcode))

DistutilsExecError: command '/usr/bin/x86_64-linux-gnu-gcc' failed with exit code 1

During handling of the above exception, another exception occurred:

CompileError                              Traceback (most recent call last)
File /usr/local/lib/python3.9/dist-packages/cffi/ffiplatform.py:51, in _build(tmpdir, ext, compiler_verbose, debug)
     50 distutils.log.set_verbosity(compiler_verbose)
---> 51 dist.run_command('build_ext')
     52 cmd_obj = dist.get_command_obj('build_ext')

File /usr/lib/python3.9/distutils/dist.py:985, in Distribution.run_command(self, command)
    984 cmd_obj.ensure_finalized()
--> 985 cmd_obj.run()
    986 self.have_run[command] = 1

File /usr/lib/python3.9/distutils/command/build_ext.py:340, in build_ext.run(self)
    339 # Now actually compile and link everything.
--> 340 self.build_extensions()

File /usr/lib/python3.9/distutils/command/build_ext.py:449, in build_ext.build_extensions(self)
    448 else:
--> 449     self._build_extensions_serial()

File /usr/lib/python3.9/distutils/command/build_ext.py:474, in build_ext._build_extensions_serial(self)
    473 with self._filter_build_errors(ext):
--> 474     self.build_extension(ext)

File /usr/lib/python3.9/distutils/command/build_ext.py:529, in build_ext.build_extension(self, ext)
    527     macros.append((undef,))
--> 529 objects = self.compiler.compile(sources,
    530                                  output_dir=self.build_temp,
    531                                  macros=macros,
    532                                  include_dirs=ext.include_dirs,
    533                                  debug=self.debug,
    534                                  extra_postargs=extra_args,
    535                                  depends=ext.depends)
    537 # XXX outdated variable, kept here in case third-part code
    538 # needs it.

File /usr/lib/python3.9/distutils/ccompiler.py:574, in CCompiler.compile(self, sources, output_dir, macros, include_dirs, debug, extra_preargs, extra_postargs, depends)
    573         continue
--> 574     self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts)
    576 # Return *all* object filenames, not just the ones we just built.

File /usr/lib/python3.9/distutils/unixccompiler.py:120, in UnixCCompiler._compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts)
    119 except DistutilsExecError as msg:
--> 120     raise CompileError(msg)

CompileError: command '/usr/bin/x86_64-linux-gnu-gcc' failed with exit code 1

During handling of the above exception, another exception occurred:

VerificationError                         Traceback (most recent call last)
Input In [151], in <cell line: 1>()
----> 1 problem = fem.petsc.NonlinearProblem(F, u, bcs=[bc])

File /usr/local/dolfinx-real/lib/python3.8/dist-packages/dolfinx/fem/petsc.py:616, in NonlinearProblem.__init__(self, F, u, bcs, J, form_compiler_params, jit_params)
    594 def __init__(self, F: ufl.form.Form, u: _Function, bcs: typing.List[DirichletBCMetaClass] = [],
    595              J: ufl.form.Form = None, form_compiler_params={}, jit_params={}):
    596     """Initialize solver for solving a non-linear problem using Newton's method, :math:`dF/du(u) du = -F(u)`.
    597 
    598     Args:
   (...)
    614 
    615     """
--> 616     self._L = _create_form(F, form_compiler_params=form_compiler_params,
    617                            jit_params=jit_params)
    619     # Create the Jacobian matrix, dF/du
    620     if J is None:

File /usr/local/dolfinx-real/lib/python3.8/dist-packages/dolfinx/fem/forms.py:139, in form(form, dtype, form_compiler_params, jit_params)
    136         return list(map(lambda sub_form: _create_form(sub_form), form))
    137     return form
--> 139 return _create_form(form)

File /usr/local/dolfinx-real/lib/python3.8/dist-packages/dolfinx/fem/forms.py:134, in form.<locals>._create_form(form)
    131 """Recursively convert ufl.Forms to dolfinx.fem.Form, otherwise
    132 return form argument"""
    133 if isinstance(form, ufl.Form):
--> 134     return _form(form)
    135 elif isinstance(form, collections.abc.Iterable):
    136     return list(map(lambda sub_form: _create_form(sub_form), form))

File /usr/local/dolfinx-real/lib/python3.8/dist-packages/dolfinx/fem/forms.py:108, in form.<locals>._form(form)
    105 if mesh is None:
    106     raise RuntimeError("Expecting to find a Mesh in the form.")
--> 108 ufcx_form, module, code = jit.ffcx_jit(mesh.comm, form,
    109                                        form_compiler_params=form_compiler_params,
    110                                        jit_params=jit_params)
    112 # For each argument in form extract its function space
    113 V = [arg.ufl_function_space()._cpp_object for arg in form.arguments()]

File /usr/local/dolfinx-real/lib/python3.8/dist-packages/dolfinx/jit.py:56, in mpi_jit_decorator.<locals>.mpi_jit(comm, *args, **kwargs)
     51 @functools.wraps(local_jit)
     52 def mpi_jit(comm, *args, **kwargs):
     53 
     54     # Just call JIT compiler when running in serial
     55     if comm.size == 1:
---> 56         return local_jit(*args, **kwargs)
     58     # Default status (0 == ok, 1 == fail)
     59     status = 0

File /usr/local/dolfinx-real/lib/python3.8/dist-packages/dolfinx/jit.py:204, in ffcx_jit(ufl_object, form_compiler_params, jit_params)
    202 # Switch on type and compile, returning cffi object
    203 if isinstance(ufl_object, ufl.Form):
--> 204     r = ffcx.codegeneration.jit.compile_forms([ufl_object], parameters=p_ffcx, **p_jit)
    205 elif isinstance(ufl_object, ufl.FiniteElementBase):
    206     r = ffcx.codegeneration.jit.compile_elements([ufl_object], parameters=p_ffcx, **p_jit)

File /usr/local/lib/python3.9/dist-packages/ffcx/codegeneration/jit.py:168, in compile_forms(forms, parameters, cache_dir, timeout, cffi_extra_compile_args, cffi_verbose, cffi_debug, cffi_libraries)
    165     for name in form_names:
    166         decl += form_template.format(name=name)
--> 168     impl = _compile_objects(decl, forms, form_names, module_name, p, cache_dir,
    169                             cffi_extra_compile_args, cffi_verbose, cffi_debug, cffi_libraries)
    170 except Exception:
    171     # remove c file so that it will not timeout next time
    172     c_filename = cache_dir.joinpath(module_name + ".c")

File /usr/local/lib/python3.9/dist-packages/ffcx/codegeneration/jit.py:252, in _compile_objects(decl, ufl_objects, object_names, module_name, parameters, cache_dir, cffi_extra_compile_args, cffi_verbose, cffi_debug, cffi_libraries)
    250 f = io.StringIO()
    251 with redirect_stdout(f):
--> 252     ffibuilder.compile(tmpdir=cache_dir, verbose=True, debug=cffi_debug)
    253 s = f.getvalue()
    254 if (cffi_verbose):

File /usr/local/lib/python3.9/dist-packages/cffi/api.py:725, in FFI.compile(self, tmpdir, verbose, target, debug)
    723     raise ValueError("set_source() must be called before compile()")
    724 module_name, source, source_extension, kwds = self._assigned_source
--> 725 return recompile(self, module_name, source, tmpdir=tmpdir,
    726                  target=target, source_extension=source_extension,
    727                  compiler_verbose=verbose, debug=debug, **kwds)

File /usr/local/lib/python3.9/dist-packages/cffi/recompiler.py:1564, in recompile(ffi, module_name, preamble, tmpdir, call_c_compiler, c_file, source_extension, extradir, compiler_verbose, target, debug, **kwds)
   1562         print('%s %r' % (msg, os.path.abspath(tmpdir)))
   1563     os.chdir(tmpdir)
-> 1564     outputfilename = ffiplatform.compile('.', ext,
   1565                                          compiler_verbose, debug)
   1566 finally:
   1567     os.chdir(cwd)

File /usr/local/lib/python3.9/dist-packages/cffi/ffiplatform.py:22, in compile(tmpdir, ext, compiler_verbose, debug)
     20 saved_environ = os.environ.copy()
     21 try:
---> 22     outputfilename = _build(tmpdir, ext, compiler_verbose, debug)
     23     outputfilename = os.path.abspath(outputfilename)
     24 finally:
     25     # workaround for a distutils bugs where some env vars can
     26     # become longer and longer every time it is used

File /usr/local/lib/python3.9/dist-packages/cffi/ffiplatform.py:58, in _build(tmpdir, ext, compiler_verbose, debug)
     55         distutils.log.set_threshold(old_level)
     56 except (distutils.errors.CompileError,
     57         distutils.errors.LinkError) as e:
---> 58     raise VerificationError('%s: %s' % (e.__class__.__name__, e))
     59 #
     60 return soname

VerificationError: CompileError: command '/usr/bin/x86_64-linux-gnu-gcc' failed with exit code 1

There is a bug in DOLFINx here. I’ve proposed a fix in: Fix sub component name by jorgensd · Pull Request #2068 · FEniCS/dolfinx · GitHub
A temporary workaround for you would be to manually rename the split variables:

# fields to solve for
u = fem.Function(V)
u1, u2 = u.split()
u1.name = "u1"
u2.name = "u2"
# concentrations from previous timestep
u_n = fem.Function(V)
u_n1, u_n2 = u_n.split()
u_n1.name="u_n1"
u_n2.name= "u_n2"
2 Likes

@dokken
This fix has moved me past the problem definition, but I still get convergence errors when I run the solver that do not occur for the one-variable problem. Perhaps this is worth moving to a separate thread, but here is my working example for one variable:

from mpi4py import MPI
from dolfinx import fem, io, nls, log, mesh, plot
import numpy as np
import ufl
from petsc4py.PETSc import ScalarType
import matplotlib.pyplot as plt

L = 5e-3
D = 6.591637557240176e-06
k = 0.12
C_0 = 0.00133
t_final = 1
t_step = 0.01

domain = mesh.create_interval(MPI.COMM_WORLD, 100, [0, L])
V = fem.FunctionSpace(domain, ('CG', 1))

# test function
v = ufl.TestFunction(V)
# concentration CO2
u = fem.Function(V)
# concentration from previous timestep
u_n = fem.Function(V)
# apply timestep variable to each node in the mesh
dt = fem.Constant(domain, ScalarType(t_step))

# turn floats to Constant objects
D_ = fem.Constant(domain, ScalarType(D))
k_ = fem.Constant(domain, ScalarType(k))
# set initial condition
u.vector.array = C_0 * np.ones(u.vector.array.shape)

# function determining if a node is on the tray top
def on_top_boundary(x):
    return(np.isclose(x[0], L))

# determine boundary DOFs
boundary_dofs = fem.locate_dofs_geometrical(V, on_top_boundary)
# apply dirichlet BC to boundary DOFs
bc = fem.dirichletbc(ScalarType(C_0), boundary_dofs, V)

F = (1/dt)*(u - u_n)*v*ufl.dx + D_*ufl.inner(ufl.grad(u), ufl.grad(v))*ufl.dx + k_*u*v*ufl.dx
problem = fem.petsc.NonlinearProblem(F, u, bcs=[bc])
solver = nls.petsc.NewtonSolver(MPI.COMM_WORLD, problem)

t = 0
for n in range(int(t_final/t_step)):
    # update current time
    t += t_step
    # Solve the variational problem for timestep
    solver.solve(u)
    # update previous solution
    u_n.vector.array = u.vector.array

When I move to two variables, the solver.solve step results in “RuntimeError: Newton solver did not converge because maximum number of iterations reached.” I have tried adjusting max_it, rtol, mesh spacing etc. but it has not had an effect. I had the same issue when working on a previous problem, but it was caused by ill-defined non-zero Neumann BCs that I fixed with a Lagrange transform. In this case I have no non-zero Neumann BCs, all I have done is moved to a mixed space. Here is the full script:

L = 5e-3
D = 6.591637557240176e-06
k = 0.12
C_0 = 0.00133
N_0 = 20

t_final = 1
t_step = 0.01

nodes = 1000
relative_tolerance = 1.
maximum_iterations = 100

domain = mesh.create_interval(MPI.COMM_WORLD, nodes, [0, L])
P1 = ufl.FiniteElement('P', domain.ufl_cell(), 2)
element = ufl.MixedElement([P1, P1])
V = fem.FunctionSpace(domain, element)

# test functions
v1, v2 = ufl.TestFunctions(V)

# fields to solve for
u = fem.Function(V)
u1, u2 = u.split()
u1.name = 'u1'
u2.name = 'u2'

# concentrations from previous timestep
u_n = fem.Function(V)
u_n1, u_n2 = u_n.split()
u_n1.name = 'u_n1'
u_n2.name = 'u_n2'

# apply timestep variable to each node in the mesh
dt = fem.Constant(domain, ScalarType(t_step))

# turn floats to Constant objects
D_ = fem.Constant(domain, ScalarType(D))
k_ = fem.Constant(domain, ScalarType(k))

# set initial conditions
u1.vector.array = C_0 * np.ones(u1.vector.array.shape)
u2.vector.array = N_0 * np.ones(u2.vector.array.shape)

# function determining if a node is on the tray top
def on_top_boundary(x):
    return(np.isclose(x[0], L))

# fetch subspace for just u1
V0, submap = V.sub(0).collapse()
# determine boundary DOFs
boundary_dofs = fem.locate_dofs_geometrical((V.sub(0), V0), on_top_boundary)
# apply dirichlet BC to boundary DOFs
bc = fem.dirichletbc(ScalarType(C_0), boundary_dofs[0], V.sub(0))

# LHS variational formulation of PDE
F = (1/dt)*(u1 - u_n1)*v1*ufl.dx + \
    (1/dt)*(u2 - u_n2)*v2*ufl.dx + \
    D_*ufl.inner(ufl.grad(u1), ufl.grad(v1))*ufl.dx + \
    k_*u1*u2*v1*ufl.dx + \
    k_*u1*u2*v2*ufl.dx

# construct problem
problem = fem.petsc.NonlinearProblem(F, u, bcs=[bc, bc1])

# use the standard Newton's method to solve each cell
solver = nls.petsc.NewtonSolver(MPI.COMM_WORLD, problem)
# modify Krylov solver in each Newton iteration
solver.convergence_criterion = 'incremental'
solver.rtol = relative_tolerance
solver.max_it = maximum_iterations

t = 0
for n in range(int(t_final/t_step)):
    # update current time
    t += t_step
    # Solve the variational problem for timestep
    solver.solve(u)
    # update previous solution
    u_n.vector.array = u.vector.array

Thank you for your responses!

This is probably outdated, but did you define bc1 ? You need boundary conditions for the new space too, otherwise your problem is mathematically ill-posed…