Hi there,
I have a high order function solution (with MPI) as a PETSc vector and I would like to save it and read it back for postprocessing using single process. I have tried using adios4dolfinx but I could not make it working.
I prepared a MWE to show my attempt below;
The below code nicely saves the dolfinx function as a PETSc vector.
import numba
import numpy as np
from dolfinx.fem import (Function, FunctionSpace,
locate_dofs_geometrical)
from dolfinx.io import VTXWriter
from dolfinx.mesh import create_unit_square, locate_entities
from ufl import VectorElement, FiniteElement
from mpi4py import MPI
from petsc4py import PETSc
mesh = create_unit_square(MPI.COMM_WORLD, 10, 10)
v_cg = FiniteElement("CG", mesh.ufl_cell(), 1)
Q = FunctionSpace(mesh, v_cg)
def Omega_0(x):
return x[1] <= 0.3
def Omega_1(x):
return x[1] >= 0.7
kappa = Function(Q)
cells_0 = locate_entities(mesh, mesh.topology.dim, Omega_0)
cells_1 = locate_entities(mesh, mesh.topology.dim, Omega_1)
@numba.njit
def unroll_dofmap(dofs, bs):
dofs_unrolled = np.zeros(bs*len(dofs), dtype=np.int32)
for i, dof in enumerate(dofs):
for b in range(bs):
dofs_unrolled[i*bs+b] = dof*bs+b
return dofs_unrolled
dofs_0 = locate_dofs_geometrical(Q, Omega_0)
dofs_1 = locate_dofs_geometrical(Q, Omega_1)
dofmap_bs = Q.dofmap.bs
kappa.x.array[unroll_dofmap(dofs_0, dofmap_bs)] = 1
kappa.x.array[unroll_dofmap(dofs_1, dofmap_bs)] = 0.1
name = 'test'
#save petsc vector
viewer = PETSc.Viewer().createBinary(name+".dat", mode=PETSc.Viewer.Mode.WRITE, comm=MPI.COMM_WORLD)
kappa.vector.view(viewer)
with VTXWriter(mesh.comm, name+".bp", [kappa], engine="BP4") as vtx:
vtx.write(0.0)
Now, I read that PETSc vector with the code below;
from dolfinx.fem import (Function, FunctionSpace)
from dolfinx.io import VTXWriter
from dolfinx.mesh import create_unit_square
from ufl import FiniteElement
from mpi4py import MPI
from petsc4py import PETSc
mesh = create_unit_square(MPI.COMM_WORLD, 10, 10)
v_cg = FiniteElement("CG", mesh.ufl_cell(), 1)
Q = FunctionSpace(mesh, v_cg)
kappa = Function(Q)
# read the petsc vector back
name = 'test'
viewer = PETSc.Viewer().createBinary(name+".dat", mode=PETSc.Viewer.Mode.READ, comm=MPI.COMM_WORLD)
loaded_vector = PETSc.Vec().load(viewer)
kappa.vector.setArray(loaded_vector.array)
# test the reading
fname = 'test_petsc'
with VTXWriter(mesh.comm, fname+".bp", [kappa], engine="BP4") as vtx:
vtx.write(0.0)
When I run the first code with 1 proc and read the PETSc vector with the second code using 1 proc, it gives the correct result;
However, when I run the first code in parallel using 2 procs and read the PETSc vector with 1 proc, it gives corrupted function;
I am aware that dof sharing over multiple processes is the problem, but is there anyone who can provide a clue, or different solution strategy? My aim is to save the result of heavy simulation in parallel and read it back to postprocess it using single proc.
Thank you for the answers in advance!