Skip to content

Instantly share code, notes, and snippets.

@minrk
Last active April 29, 2024 12:37
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save minrk/66f7e1981cb60654d4068b34e7f4ecce to your computer and use it in GitHub Desktop.
Save minrk/66f7e1981cb60654d4068b34e7f4ecce to your computer and use it in GitHub Desktop.
"""
run with
PYTHONUNBUFFERED=1 python3 petscsolve.py
"""
import sys
import argparse
import time
from mpi4py import MPI
import petsc4py
# petsc4py.init("-log_view", comm=MPI.COMM_WORLD)
from petsc4py import PETSc
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument(
"-A", default="dolfinx-A.dat", type=str, help="Path to matrix", dest="file_A"
)
parser.add_argument(
"-b", default="dolfinx-b.dat", type=str, help="Path to vector", dest="file_b"
)
parser.add_argument("-pc", default="lu", type=str, help="pc type", dest="pc")
args = parser.parse_args()
comm = MPI.COMM_WORLD
viewer_A = PETSc.Viewer().createBinary(args.file_A, "r")
viewer_b = PETSc.Viewer().createBinary(args.file_b, "r")
A = PETSc.Mat(comm).load(viewer_A)
b = PETSc.Vec(comm).load(viewer_b)
ksp = PETSc.KSP().create(comm)
ksp.setOperators(A)
opts = PETSc.Options()
options = {
"ksp_type": "preonly",
"pc_type": "lu",
"pc_factor_mat_solver_type": "mumps",
# "mat_mumps_icntl_1": None,
# "mat_mumps_icntl_2": None,
# "mat_mumps_icntl_3": None,
# "mat_mumps_icntl_4": 4,
}
for key, val in options.items():
opts[key] = val
ksp.setFromOptions()
comm.Barrier()
start_pcsetup = time.perf_counter()
ksp.getPC().setUp()
end_pcsetup = time.perf_counter()
comm.Barrier()
start_kspsetup = time.perf_counter()
ksp.setUp()
end_kspsetup = time.perf_counter()
comm.Barrier()
start_solve = time.perf_counter()
uh = b.copy()
print("solving...")
ksp.solve(b, uh)
print("solved")
end_solve = time.perf_counter()
comm.Barrier()
msg = f"PETSc: {petsc4py.__version__} Comm: {comm.rank+1}/{comm.size}, PC Setup: {end_pcsetup-start_pcsetup}, KSP Setup: {end_kspsetup-start_kspsetup} Solve: {end_solve-start_solve}"
msg += f"\n Converged reason: {ksp.getConvergedReason()} Number of iterations: {ksp.getIterationNumber()} "
comm.Barrier()
time.sleep(1e-2 * MPI.COMM_WORLD.rank + 1e-3)
print(msg, flush=True)
PETSc.garbage_cleanup()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment