Dear all,
I have some problems with using fieldsplit for preconditioning of Navier-Stokes problem. My code works fine for previous FEniCS (2017,2018). For FEniCS 2019, my code does not work for parallel case after I use fieldsplit for preconditioning. I wonder whether there is any changes in FEniCS 2019 for fieldsplit.
I include part of code here:
// User defined nonlinear problem
class NSequation : public NonlinearProblem
{
public:
// Constructor
NSequation(std::shared_ptr<const Form> F,
std::shared_ptr<const Form> J,
std::vector<DirichletBC*> bcs) : _F(F), _J(J), _bcs(bcs) {}
// User defined residual vector
void F(GenericVector& b, const GenericVector& x)
{
assemble(b, *_F);
for (std::size_t i = 0; i < _bcs.size(); i++)
_bcs[i]->apply(b, x);
}
// User defined assemble of Jacobian
void J(GenericMatrix& A, const GenericVector& x)
{
assemble(A, *_J);
for (std::size_t i = 0; i < _bcs.size(); i++)
_bcs[i]->apply(A);
}
void bcschange(std::vector<DirichletBC*> bcnew)
{
for (std::size_t i = 0; i < _bcs.size(); i++)
_bcs[i]=bcnew[i];
}
private:
// Forms
std::shared_ptr<const Form> _F;
std::shared_ptr<const Form> _J;
std::vector<DirichletBC*> _bcs;
};
int main(int argc, char* argv[])
{
init(argc, argv);
// Backend
//parameters["linear_algebra_backend"] = "PETSc";
auto mesh = std::make_shared<Mesh>("sphere.xml");
auto W = std::make_shared<ns::FunctionSpace>(mesh);
auto F = std::make_shared<ns::LinearForm>(W);
auto J = std::make_shared<ns::JacobianForm>(W, W);
auto Wn = std::make_shared<Function>(W);
IS is[2];
auto u_dofs = W->sub(0)->dofmap()->dofs();
auto p_dofs = W->sub(1)->dofmap()->dofs();
dolfin::cout << "Number of u and p dofs: " << u_dofs.size() << ", "
<< p_dofs.size() << dolfin::endl;
ISCreateGeneral(PETSC_COMM_WORLD, u_dofs.size(), u_dofs.data(),
PETSC_COPY_VALUES, &is[0]);
ISCreateGeneral(PETSC_COMM_WORLD, p_dofs.size(), p_dofs.data(),
PETSC_COPY_VALUES, &is[1]);
auto solver = std::make_shared<PETScKrylovSolver>("gmres");
solver->parameters["nonzero_initial_guess"]=true;
KSP ksp = solver->ksp();
PC pc;
KSPGetPC(ksp, &pc);
PCSetType(pc, PCFIELDSPLIT);
PCFieldSplitSetIS(pc, "u", is[0]);
PCFieldSplitSetIS(pc, "p", is[1]);
dolfin::PETScOptions::set("ksp_view");
dolfin::PETScOptions::set("ksp_monitor_true_residual");
dolfin::PETScOptions::set("ksp_pc_side", "right");
dolfin::PETScOptions::set("pc_fieldsplit_type", "schur");
dolfin::PETScOptions::set("pc_fieldsplit_schur_fact_type", "upper");
dolfin::PETScOptions::set("pc_fieldsplit_schur_preconditioning", "selfp");
dolfin::PETScOptions::set("fieldsplit_u_ksp_type", "preonly");
dolfin::PETScOptions::set("fieldsplit_u_pc_type", "jacobi");
dolfin::PETScOptions::set("fieldsplit_p_ksp_type", "preonly");
dolfin::PETScOptions::set("fieldsplit_p_pc_type", "hypre");
dolfin::PETScOptions::set("fieldsplit_p_pc_hypre_type", "boomeramg");
dolfin::PETScOptions::set("fieldsplit_p_pc_hypre_boomeramg_coarsen_type", "pmis");
dolfin::PETScOptions::set("fieldsplit_p_pc_hypre_boomeramg_interp_type", "FF1");
dolfin::PETScOptions::set("fieldsplit_p_pc_hypre_boomeramg_strong_threshold", "0.5");
KSPSetFromOptions(ksp);
NSequation nseq(F, J, bcs);
NewtonSolver newton_solver(MPI_COMM_WORLD, solver, PETScFactory::instance());
newton_solver.parameters["relative_tolerance"] = 1.0e-4;
newton_solver.parameters["absolute_tolerance"] = 1.0e-4;
newton_solver.parameters["maximum_iterations"] = 10;
newton_solver.solve(nseq, *Wn->vector());
Any suggestion would be helpful. Thanks.