3D problem (sphere) Navier-Stokes

OK thanks! I am trying now.

import dolfinx
import numpy as np
from mpi4py import MPI
from dolfinx.cpp.mesh import CellType
from dolfinx.io import (XDMFFile, extract_gmsh_geometry,
                        extract_gmsh_topology_and_markers, ufl_mesh_from_gmsh)
from dolfinx.fem import (Constant, DirichletBC, Function, FunctionSpace, apply_lifting, assemble_matrix, 
                         assemble_scalar, assemble_vector, create_vector, locate_dofs_topological, set_bc)
from petsc4py import PETSc
from ufl import (FacetNormal, FiniteElement, Identity, Measure, TestFunction, TrialFunction, VectorElement,
                 as_vector, div, dot, ds, dx, inner, lhs, grad, nabla_grad, rhs, sym)

print(f"DOLFINx version: {dolfinx.__version__} is installed")

filename="small-mesh.xdmf"
with dolfinx.io.XDMFFile(MPI.COMM_WORLD, "Small-Mesh-With-Naming.xdmf", "r") as xdmf:
       mesh = xdmf.read_mesh(name="Grid")


#### local mesh ####
## mesh = UnitCubeMesh(MPI.COMM_WORLD, 10, 10, 10)
###################

t = 0.000000
T = 1 #8                    # Final time
dt = 1/1600                 # Time step size
num_steps = int(T/dt)
k = Constant(mesh, dt)        
mu = Constant(mesh, 0.001)  # Dynamic viscosity
rho = Constant(mesh, 1)     # Density

import dolfinx.fem as fem
V = fem.VectorFunctionSpace(mesh, ("CG", 2))
Q = fem.FunctionSpace(mesh, ("CG", 1))

eps=1e-14

def Inflow(x):
    return np.abs(x[0] - 0.) < eps

def Outflow(x):
    return np.abs(x[0] - 1.5) < eps

def Walls(x):
    result = np.logical_or(np.abs(x[1] + 0.) < eps, np.abs(x[1] - 1.) < eps)
    result2 = np.logical_or(np.abs(x[2] - 0.) < eps, np.abs(x[2] - 1.) < eps)
    return np.logical_or(result, result2)

def Sphere(x):
    result = np.logical_and(x[0] > .45 - eps, x[0] < .55 + eps)
    result2 = np.logical_and(x[1] > .45 - eps, x[1] < .55 + eps)
    result3 = np.logical_and(x[2] > .45 - eps, x[2] < .55 + eps)
    return np.logical_and(np.logical_and(result, result2), result3)

def inflow_profile(x):
    values = np.zeros(x.shape)
    values[0,:]=1
    return values

u_in = fem.Function(V)
u_in.interpolate(inflow_profile)

u_zero = fem.Function(V)
u_zero.x.array[:] = 0
p_zero = fem.Function(Q)
p_zero.x.array[:] = 0

inflow_boundary_dofs = fem.locate_dofs_geometrical(V, Inflow)
outflow_boundary_dofs = fem.locate_dofs_geometrical(Q, Outflow)
walls_boundary_dofs = fem.locate_dofs_geometrical(V, Walls)

facets = dolfinx.mesh.locate_entities_boundary(
    mesh, mesh.topology.dim-1, Sphere)
mt = dolfinx.mesh.MeshTags(mesh, mesh.topology.dim-1,
                           facets, np.full(len(facets), 1, dtype=np.int32))
with dolfinx.io.XDMFFile(MPI.COMM_WORLD, "mt.xdmf", "w") as xdmf:
    xdmf.write_mesh(mesh)
    xdmf.write_meshtags(mt)

#sphere_boundary_dofs = fem.locate_dofs_geometrical(V, Sphere)
sphere_boundary_dofs = fem.locate_dofs_topological(
    V, mesh.topology.dim-1, facets)

bcu_inflow = fem.DirichletBC(u_in, inflow_boundary_dofs)
bcp_outflow = fem.DirichletBC(p_zero, outflow_boundary_dofs)
bcu_walls = fem.DirichletBC(u_zero, walls_boundary_dofs)
bcu_sphere = fem.DirichletBC(u_zero, sphere_boundary_dofs)

bcu = [bcu_inflow, bcu_walls, bcu_sphere]
bcp = [bcp_outflow]


print(f"DOLFINx version: {dolfinx.__version__} is installed")

# Define trial and test functions
u = TrialFunction(V)
v = TestFunction(V)
p = TrialFunction(Q)
q = TestFunction(Q)

# Define functions for solutions at previous and current time steps
u_n = Function(V)
u_ = Function(V)
p_n = Function(Q)
p_ = Function(Q)

# Define expressions used in variational forms
U = 0.5 * (u_n + u)
n = FacetNormal(mesh)
f = Constant(mesh, PETSc.ScalarType((0,0,0)))
k = Constant(mesh, PETSc.ScalarType(dt))
mu = Constant(mesh, PETSc.ScalarType(.001))
rho = Constant(mesh, PETSc.ScalarType(1))

# Define symmetric gradient
def epsilon(u):
    return sym(nabla_grad(u))


# Define stress tensor
def sigma(u, p):
    return 2 * mu * epsilon(u) - p * Identity(len(u))


# Define variational problem for step 1
F1 = rho * dot((u - u_n) / k, v) * dx \
     + rho * dot(dot(u_n, nabla_grad(u_n)), v) * dx \
     + inner(sigma(U, p_n), epsilon(v)) * dx \
     + dot(p_n * n, v) * ds - dot(mu * nabla_grad(U) * n, v) * ds \
     - dot(f, v) * dx
a1 = lhs(F1)
L1 = rhs(F1)

# Define variational problem for step 2
a2 = dot(nabla_grad(p), nabla_grad(q)) * dx
L2 = dot(nabla_grad(p_n), nabla_grad(q)) * dx - (1 / k) * div(u_) * q * dx

# Define variational problem for step 3
a3 = dot(u, v) * dx
L3 = dot(u_, v) * dx - k * dot(nabla_grad(p_ - p_n), v) * dx

# Assemble matrices
#A1 = assemble(a1)
#A2 = assemble(a2)
#A3 = assemble(a3)
A1 = assemble_matrix(a1, bcs=bcu)
A1.assemble()
b1 = create_vector(L1)


A2 = assemble_matrix(a2, bcs=bcp)
A2.assemble()
b2 = create_vector(L2)


A3 = assemble_matrix(a3)
A3.assemble()
b3 = create_vector(L3)



print(f"DOLFINx version: {dolfinx.__version__} is installed")



# Solver for step 1
solver1 = PETSc.KSP().create(MPI.COMM_WORLD)
solver1.setOperators(A1)
solver1.setType(PETSc.KSP.Type.BCGS)
pc1 = solver1.getPC()
pc1.setType(PETSc.PC.Type.JACOBI)

# Solver for step 2
solver2 = PETSc.KSP().create(MPI.COMM_WORLD)
solver2.setOperators(A2)
solver2.setType(PETSc.KSP.Type.MINRES)
pc2 = solver2.getPC()
pc2.setType(PETSc.PC.Type.HYPRE)
pc2.setHYPREType("boomeramg")

# Solver for step 3
solver3 = PETSc.KSP().create(MPI.COMM_WORLD)
solver3.setOperators(A3)
solver3.setType(PETSc.KSP.Type.CG)
pc3 = solver3.getPC()
pc3.setType(PETSc.PC.Type.SOR)


print(f"DOLFINx version: {dolfinx.__version__} is installed")

import dolfinx.io
xdmf = dolfinx.io.XDMFFile(MPI.COMM_WORLD, "solution-1.xdmf", "w")
xdmf.write_mesh(mesh)
xdmf.write_function(u_n, t)
xdmf.write_function(p_n, t)

for i in range(num_steps):
    # Update current time step
    t += dt

    # Step 1: Tentative veolcity step
    with b1.localForm() as loc_1:
        loc_1.set(0)
    assemble_vector(b1, L1)
    apply_lifting(b1, [a1], [bcu])
    b1.ghostUpdate(addv=PETSc.InsertMode.ADD_VALUES, mode=PETSc.ScatterMode.REVERSE)
    set_bc(b1, bcu)
    solver1.solve(b1, u_.vector)
    u_.x.scatter_forward()
    
    # Step 2: Pressure corrrection step
    with b2.localForm() as loc_2:
        loc_2.set(0)
    assemble_vector(b2, L2)
    apply_lifting(b2, [a2], [bcp])
    b2.ghostUpdate(addv=PETSc.InsertMode.ADD_VALUES, mode=PETSc.ScatterMode.REVERSE)
    set_bc(b2, bcp)
    solver2.solve(b2, p_.vector)
    p_.x.scatter_forward()

    # Step 3: Velocity correction step
    with b3.localForm() as loc_3:
        loc_3.set(0)
    assemble_vector(b3, L3)
    b3.ghostUpdate(addv=PETSc.InsertMode.ADD_VALUES, mode=PETSc.ScatterMode.REVERSE)
    solver3.solve(b3, u_.vector)
    u_.x.scatter_forward()

    # Update variable with solution form this time step
    u_n.x.array[:] = u_.x.array[:]
    p_n.x.array[:] = p_.x.array[:]

    # Write solutions to file
    xdmf.write_function(u_n, t)
    xdmf.write_function(p_n, t)

# Close xmdf file
xdmf.close()

and I am getting the following error:

DOLFINx version: 0.3.1.0 is installed
  #003: H5Fint.c line 2165 in H5F__close_cb(): can't close file, there are objects still open
    major: File accessibility
    minor: Unable to close file
DOLFINx version: 0.3.1.0 is installed
Traceback (most recent call last):
  File "sphere-1.py", line 78, in <module>
    xdmf.write_meshtags(mt)
RuntimeError: Failed to write HDF5 local dataset into hyperslab.

During handling of the above exception, another exception occurred:

Could you please tell me what causes this error?

Give the file another name, i.e.

with dolfinx.io.XDMFFile(MPI.COMM_WORLD, "mt_verification.xdmf", "w") as xdmf:
    xdmf.write_mesh(mesh)
    xdmf.write_meshtags(mt)

I am getting some MPI error:

HDF5-DIAG: Error detected in HDF5 (1.10.7) MPI-process 24:
HDF5-DIAG: Error detected in HDF5 (1.10.7) MPI-process 27:
  #000: H5Dio.c line 335 in H5Dwrite(): can't write data
    major: Dataset
  #000: H5Dio.c line 335 in H5Dwrite(): can't write data
    major: Dataset
    minor: Write failed
  #001: H5Dio.c line 813 in H5D__write(): can't write data
    major: Dataset
    minor: Write failed
  #001: H5Dio.c line 813 in H5D__write(): can't write data
    major: Dataset
    minor: Write failed
    minor: Write failed
  #002: H5Dmpio.c line 735 in H5D__contig_collective_write(): couldn't finish shared collective MPI-IO
    major: Low-level I/O
    minor: Write failed
  #002: H5Dmpio.c line 735 in H5D__contig_collective_write(): couldn't finish shared collective MPI-IO
    major: Low-level I/O
    minor: Write failed
  #003: H5Dmpio.c line 2081 in H5D__inter_collective_io(): couldn't finish collective MPI-IO
    major: Low-level I/O
  #003: H5Dmpio.c line 2081 in H5D__inter_collective_io(): couldn't finish collective MPI-IO
    major: Low-level I/O
    minor: Can't get value
    minor: Can't get value
  #004: H5Dmpio.c line 2125 in H5D__final_collective_io(): optimized write failed
    major: Dataset
  #004: H5Dmpio.c line 2125 in H5D__final_collective_io(): optimized write failed
    major: Dataset
    minor: Write failed
    minor: Write failed
  #005: H5Dmpio.c line 491 in H5D__mpio_select_write(): can't finish collective parallel write
  #005: H5Dmpio.c line 491 in H5D__mpio_select_write(): can't finish collective parallel write
    major: Low-level I/O
    minor: Write failed
    major: Low-level I/O
    minor: Write failed


What happens if you replace:

with dolfinx.io.XDMFFile(MPI.COMM_WORLD, "mt_verification.xdmf", "w") as xdmf:
    xdmf.write_mesh(mesh)
    xdmf.write_meshtags(mt)

with:

with dolfinx.io.XDMFFile(mesh.comm, "mt_verification.xdmf", "w") as xdmf_mt:
    xdmf_mt.write_mesh(mesh)
    xdmf_mt.write_meshtags(mt)

now I get:

  File "sphere-1.py", line 82, in <module>
    with dolfinx.io.XDMFFile(mesh.comm, "mt_verification.xdmf", "w") as xdmf_mt:
AttributeError: 'dolfinx.cpp.mesh.Mesh' object has no attribute 'comm'
Traceback (most recent call last):
  File "sphere-1.py", line 82, in <module>
    with dolfinx.io.XDMFFile(mesh.comm, "mt_verification.xdmf", "w") as xdmf_mt:
AttributeError: 'dolfinx.cpp.mesh.Mesh' object has no attribute 'comm'
Traceback (most recent call last):
  File "sphere-1.py", line 82, in <module>
    with dolfinx.io.XDMFFile(mesh.comm, "mt_verification.xdmf", "w") as xdmf_mt:
AttributeError: 'dolfinx.cpp.mesh.Mesh' object has no attribute 'comm'
DOLFINx version: 0.3.1.0 is installed
Traceback (most recent call last):
  File "sphere-1.py", line 82, in <module>
    with dolfinx.io.XDMFFile(mesh.comm, "mt_verification.xdmf", "w") as xdmf_mt:
AttributeError: 'dolfinx.cpp.mesh.Mesh' object has no attribute 'comm'
srun: error: iris-137: tasks 0-27: Exited with exit code 1

could be due to some syntax missing, but I am calling necessary libraries:

import dolfinx
import numpy as np
from mpi4py import MPI
from dolfinx.cpp.mesh import CellType
from dolfinx.io import (XDMFFile, extract_gmsh_geometry,
                        extract_gmsh_topology_and_markers, ufl_mesh_from_gmsh)
from dolfinx.fem import (Constant, DirichletBC, Function, FunctionSpace, apply_lifting, assemble_matrix, 
                         assemble_scalar, assemble_vector, create_vector, locate_dofs_topological, set_bc)
from petsc4py import PETSc
from ufl import (FacetNormal, FiniteElement, Identity, Measure, TestFunction, TrialFunction, VectorElement,
                 as_vector, div, dot, ds, dx, inner, lhs, grad, nabla_grad, rhs, sym)


You are running an older version of dolfinx than me, try: mesh.mpi_comm.

Still the same error:

  File "sphere-1.py", line 77, in <module>
    with dolfinx.io.XDMFFile(mesh.mpi_comm, "mt.xdmf", "w") as xdmf:
TypeError: __init__(): incompatible constructor arguments. The following argument types are supported:
    1. dolfinx.cpp.io.XDMFFile(comm: MPICommWrapper, filename: str, file_mode: str, encoding: dolfinx.cpp.io.XDMFFile.Encoding = <Encoding.HDF5: 0>)

Invoked with: <bound method PyCapsule.mpi_comm of <dolfinx.cpp.mesh.Mesh object at 0x2aaaca5c9360>>, 'mt.xdmf', 'w'
Traceback (most recent call last):
  File "sphere-1.py", line 77, in <module>
    with dolfinx.io.XDMFFile(mesh.mpi_comm, "mt.xdmf", "w") as xdmf:
TypeError: __init__(): incompatible constructor arguments. The following argument types are supported:
    1. dolfinx.cpp.io.XDMFFile(comm: MPICommWrapper, filename: str, file_mode: str, encoding: dolfinx.cpp.io.XDMFFile.Encoding = <Encoding.HDF5: 0>)

Tried both options (mt_verification.xdmf and just mt.xdmf)

Traceback (most recent call last):
  File "sphere-1.py", line 77, in <module>
    with dolfinx.io.XDMFFile(mesh.mpi_comm, "mt_verification.xdmf", "w") as xdmf_mt:
TypeError: __init__(): incompatible constructor arguments. The following argument types are supported:
    1. dolfinx.cpp.io.XDMFFile(comm: MPICommWrapper, filename: str, file_mode: str, encoding: dolfinx.cpp.io.XDMFFile.Encoding = <Encoding.HDF5: 0>)

Invoked with: <bound method PyCapsule.mpi_comm of <dolfinx.cpp.mesh.Mesh object at 0x2aaaca5c9e50>>, 'mt_verification.xdmf', 'w'
DOLFINx version: 0.3.1.0 is installed
Traceback (most recent call last):
  File "sphere-1.py", line 77, in <module>
    with dolfinx.io.XDMFFile(mesh.mpi_comm, "mt_verification.xdmf", "w") as xdmf_mt:
TypeError: __init__(): incompatible constructor arguments. The following argument types are supported:
    1. dolfinx.cpp.io.XDMFFile(comm: MPICommWrapper, filename: str, file_mode: str, encoding: dolfinx.cpp.io.XDMFFile.Encoding = <Encoding.HDF5: 0>)

What about:
mesh.mpi_comm()

Thanks! but still same sort of error:

HDF5-DIAG: Error detected in HDF5 (1.10.7) MPI-process 6:
  #000: H5Dio.c line 335 in H5Dwrite(): can't write data
    major: Dataset
HDF5-DIAG: Error detected in HDF5 (1.10.7) MPI-process 27:
    minor: Write failed
  #000: H5Dio.c line 335 in H5Dwrite(): can't write data
  #001: H5Dio.c line 813 in H5D__write(): can't write data
    major: Dataset
    minor: Write failed
    major: Dataset
    minor: Write failed
  #002: H5Dmpio.c line 735 in H5D__contig_collective_write(): couldn't finish shared collective MPI-IO
    major: Low-level I/O
    minor: Write failed
  #001: H5Dio.c line 813 in H5D__write(): can't write data
    major: Dataset
  #003: H5Dmpio.c line 2081 in H5D__inter_collective_io(): couldn't finish collective MPI-IO
    minor: Write failed
    major: Low-level I/O
    minor: Can't get value

Just ignore writing the meshtag to file for now.

Thanks! the simulation is running but I also need to check the correctness.