@BouteillerP, the following MWE gives different results for parallel and series computation:
#!/usr/bin/env python3
import numpy as np
import matplotlib.pyplot as plt
from mpi4py import MPI
from petsc4py import PETSc
from dolfinx import fem, default_scalar_type, mesh
from dolfinx.io import XDMFFile
import ufl
import dolfinx.fem.petsc
import time
start_time = time.time()
#============= Create mesh =============
L = 1.0 # Length of the rod
nx, ny, nz = 10, 10, 100 # Number of elements in x, y, and z directions
domain = mesh.create_box(MPI.COMM_WORLD, [np.array([0, 0, 0]), np.array(
[0.1, 0.1, L])], [nx, ny, nz], cell_type=mesh.CellType.hexahedron)
#============= Create function spaces =============
V = fem.VectorFunctionSpace(domain, ("CG", 1))
fdim = domain.topology.dim - 1
#============= Physical constants =============
E = 200e9
nu = 0.27
mu = E / (2.0 * (1 + nu))
lmbda = E * nu / ((1.0 + nu) * (1.0 - 2.0 * nu))
rho = 7990
Amp=1e-8 # Amplitude of excitation
#============= Time integration parameters =============
T = 5.0e-4
NTsteps = 2000
save_every = 20
dt = fem.Constant(domain, T / NTsteps)
#============= Time dep. boundary conditions =============
def point1(x):
return np.logical_and(np.logical_and(np.isclose(x[2], 0.0), np.isclose(x[1], 0.05)), np.isclose(x[0], 0.0))
def point2(x):
return np.logical_and(np.logical_and(np.isclose(x[2], 0.0), np.isclose(x[1], 0.05)), np.isclose(x[0], 0.1))
def pert(t):
return Amp*np.sin(2*np.pi*t/T)
def input1(t):
return np.array([0.0,pert(t),0.0], dtype=default_scalar_type)
def input2(t):
return np.array([0.0,-pert(t),0.0], dtype=default_scalar_type)
bc_fun1=fem.Constant(domain, PETSc.ScalarType((0,0,0)))
bc_fun2=fem.Constant(domain, PETSc.ScalarType((0,0,0)))
point1_dofs = fem.locate_dofs_geometrical(V, point1)
point2_dofs = fem.locate_dofs_geometrical(V, point2)
bc1 = fem.dirichletbc(bc_fun1, point1_dofs, V)
bc2 = fem.dirichletbc(bc_fun2, point2_dofs, V)
allbcs=[bc1,bc2]
#============= Functions and fields =============
u_ = ufl.TestFunction(V)
u = fem.Function(V, name='Displacement')
v = fem.Function(V)
a = fem.Function(V)
a_ = ufl.TrialFunction(V)
#============= Measures =============
dx = ufl.Measure("dx", domain=domain)
#============= Stress and bilinear forms =============
def sigma(r):
return lmbda*ufl.nabla_div(r)*ufl.Identity(len(r)) + 2*mu*ufl.sym(ufl.grad(r))
def m(a, u_):
return rho*ufl.inner(a, u_)*dx
def k(u, u_):
return ufl.inner(sigma(u), ufl.grad(u_))*dx
#============= Explicit solver functions setup =============
def petsc_div(numerateur, denominateur, output):
"""
Pointwise division between two vectors using PETSc.
numerateur : PETScVector
denominateur : PETScVector
output : PETScVector
"""
output.pointwiseDivide(numerateur, denominateur)
#============= Residual to solve =============
residual = k(u, u_)
m_form = m(a_,u_)
#============= Create file =============
xdmf_file=XDMFFile(domain.comm, "output_xdmfs/testnocores.xdmf", "w")
xdmf_file.write_mesh(domain)
xdmf_file.write_function(u, 0)
#============= Time loop =============
tt=fem.Constant(domain, 0.)
u1 = fem.Function(V)
u1.vector.array[:] = 1.
diag_M = fem.petsc.assemble_vector(fem.form(ufl.action(m_form, u1)))
diag_M.ghostUpdate(addv=PETSc.InsertMode.ADD, mode=PETSc.ScatterMode.REVERSE)
diag_M.ghostUpdate(addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD)
local_res = fem.form(-residual)
for i in range(NTsteps):
tt.value += dt.value
bc_fun1.value = input1(tt.value)
bc_fun2.value = input2(tt.value)
#------------ Explicit Time stepping --------------
u.x.array[:]=u.x.array+ float(dt)*v.x.array
fem.set_bc(u.vector, allbcs)
u.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD)
local_res = fem.form(-residual)
res = fem.petsc.assemble_vector(local_res)
res.ghostUpdate(addv=PETSc.InsertMode.ADD, mode=PETSc.ScatterMode.REVERSE)
res.ghostUpdate(addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD)
petsc_div(res, diag_M, a.vector)
a.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD)
v.x.array[:]=v.x.array+ float(dt)*a.x.array
v.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD)
res.destroy()
if MPI.COMM_WORLD.rank==0:
print(f"Time: {tt.value:.5e}")
#------------- Write variables to XDMF file -------------
if (i+1) % save_every == 0:
xdmf_file.write_function(u, tt.value)
xdmf_file.close()
if MPI.COMM_WORLD.rank==0:
print("--- %s seconds runtime ---" % (time.time() - start_time))