Skip to content

Instantly share code, notes, and snippets.

@minrk
Created November 2, 2023 09:07
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save minrk/ad28de876ca5a8c1b32c5380293fc092 to your computer and use it in GitHub Desktop.
Save minrk/ad28de876ca5a8c1b32c5380293fc092 to your computer and use it in GitHub Desktop.
#include "mpi.h"
#include "petsc.h"
#include "petscmat.h"
#include <assert.h>
// subset of PETScBaseMatrix and PETScBaseMatrix
// namespace dolfin {
class PETScBaseMatrix {
public:
/// Constructor
PETScBaseMatrix() : _matA(nullptr) {}
/// Constructor
explicit PETScBaseMatrix(Mat A) : _matA(A) {
// Increase reference count, and throw error if Mat pointer is NULL
if (_matA)
PetscObjectReference((PetscObject)_matA);
else {
throw std::overflow_error("no matrix!");
// "PETScBaseMatrix.cpp", "initialize with PETSc Mat pointer",
// "Cannot wrap PETSc Mat objects that have not been initialized");
}
}
/// Copy constructor
// PETScBaseMatrix(const PETScBaseMatrix& A);
/// Destructor
~PETScBaseMatrix() {
// Decrease reference count (PETSc will destroy object once
// reference counts reached zero)
if (_matA)
MatDestroy(&_matA);
}
/// Return PETSc Mat pointer
Mat mat() const { return _matA; }
/// Return the MPI communicator
// MPI_Comm PETScBaseMatrix::mpi_comm() const {
// // dolfin_assert(_matA);
// MPI_Comm mpi_comm = MPI_COMM_NULL;
// PetscObjectGetComm((PetscObject)_matA, &mpi_comm);
// return mpi_comm;
// }
/// Return informal string representation (pretty-print)
virtual std::string str(bool verbose) const {
return "No str function for this PETSc matrix operator.";
}
protected:
// PETSc Mat pointer
Mat _matA;
};
// }
std::shared_ptr<PETScBaseMatrix> create_transfer_matrix() {
PetscErrorCode ierr;
Mat I;
PetscInt m, n, M, N;
m = n = PETSC_DECIDE;
M = N = 4;
m = n = M;
MPI_Comm mpi_comm = MPI_COMM_WORLD;
int mpi_rank, mpi_size;
MPI_Comm_size(mpi_comm, &mpi_size);
MPI_Comm_rank(mpi_comm, &mpi_rank);
std::vector<PetscInt> dnnz(M, 0);
for(int i=0; i<M; i++) {
++dnnz[i];
}
// unsigned int mpi_size = MPI::size(mpi_comm);
// Create and initialise the transfer matrix as MATMPIAIJ/MATSEQAIJ
ierr = MatCreate(mpi_comm, &I);
CHKERRABORT(PETSC_COMM_WORLD, ierr);
if (mpi_size > 1) {
ierr = MatSetType(I, MATMPIAIJ);
CHKERRABORT(PETSC_COMM_WORLD, ierr);
ierr = MatSetSizes(I, m, n, M, N);
CHKERRABORT(PETSC_COMM_WORLD, ierr);
ierr = MatGetSize(I, &m, &n);
CHKERRABORT(PETSC_COMM_WORLD, ierr);
// ierr = MatMPIAIJSetPreallocation(I, PETSC_DEFAULT, dnnz.data(),
// PETSC_DEFAULT, onnz.data());
// CHKERRABORT(PETSC_COMM_WORLD, ierr);
} else {
ierr = MatSetType(I, MATSEQAIJ);
CHKERRABORT(PETSC_COMM_WORLD, ierr);
ierr = MatSetSizes(I, m, n, M, N);
CHKERRABORT(PETSC_COMM_WORLD, ierr);
ierr = MatSeqAIJSetPreallocation(I, PETSC_DEFAULT, dnnz.data());
CHKERRABORT(PETSC_COMM_WORLD, ierr);
}
// Setting transfer matrix values row by row
for (int i = 0; i < m; ++i) {
PetscInt rows[1] = {i,};
for (int j = i; j <= i; ++j) {
PetscInt cols[1] = {j,};
PetscScalar values[1];
values[0] = i * j;
ierr = MatSetValues(I, 1, rows, 1, cols, values, INSERT_VALUES);
CHKERRABORT(PETSC_COMM_WORLD, ierr);
}
}
// for (unsigned int fine_row = 0; fine_row < m; ++fine_row)
// {
// PetscInt fine_dof[ = global_row_indices[fine_row];
// ierr = MatSetValues(I, 1, &fine_dof, eldim,
// col_indices[fine_row].data(),
// values[fine_row].data(), INSERT_VALUES);
// CHKERRABORT(PETSC_COMM_WORLD, ierr);
// }
// Assemble the transfer matrix
ierr = MatAssemblyBegin(I, MAT_FINAL_ASSEMBLY);
CHKERRABORT(PETSC_COMM_WORLD, ierr);
ierr = MatAssemblyEnd(I, MAT_FINAL_ASSEMBLY);
CHKERRABORT(PETSC_COMM_WORLD, ierr);
// create shared pointer and return the pointer to the transfer
// matrix
std::shared_ptr<PETScBaseMatrix> ptr = std::make_shared<PETScBaseMatrix>(I);
ierr = MatDestroy(&I);
CHKERRABORT(PETSC_COMM_WORLD, ierr);
return ptr;
}
int main(int argc, char **argv) {
PetscErrorCode ierr;
Mat matrix;
ierr = PetscInitialize(&argc, &argv, NULL, NULL);
CHKERRABORT(PETSC_COMM_WORLD, ierr);
std::shared_ptr<PETScBaseMatrix> P = create_transfer_matrix();
matrix = P->mat();
Vec x = nullptr;
Vec y = nullptr;
ierr = MatCreateVecs(matrix, &x, &y); // this will crash if I've reproduced the bug
CHKERRABORT(PETSC_COMM_WORLD, ierr);
printf("Ierr %d\n", ierr);
ierr = MatView(matrix, PETSC_VIEWER_STDOUT_SELF);
CHKERRABORT(PETSC_COMM_WORLD, ierr);
//
// // Create and initialise the transfer matrix as MATMPIAIJ/MATSEQAIJ
// ierr = MatCreate(mpi_comm, &I);
// CHKERRABORT(PETSC_COMM_WORLD, ierr);
// if (mpi_size > 1) {
// ierr = MatSetType(I, MATMPIAIJ);
// CHKERRABORT(PETSC_COMM_WORLD, ierr);
// ierr = MatSetSizes(I, m, n, M, N);
// CHKERRABORT(PETSC_COMM_WORLD, ierr);
// ierr = MatMPIAIJSetPreallocation(I, PETSC_DEFAULT, dnnz.data(),
// PETSC_DEFAULT, onnz.data());
// CHKERRABORT(PETSC_COMM_WORLD, ierr);
// } else {
// ierr = MatSetType(I, MATSEQAIJ);
// CHKERRABORT(PETSC_COMM_WORLD, ierr);
// ierr = MatSetSizes(I, m, n, M, N);
// CHKERRABORT(PETSC_COMM_WORLD, ierr);
// ierr = MatSeqAIJSetPreallocation(I, PETSC_DEFAULT, dnnz.data());
// CHKERRABORT(PETSC_COMM_WORLD, ierr);
// }
//
// // Setting transfer matrix values row by row
// // for (unsigned int fine_row = 0; fine_row < m_owned; ++fine_row) {
// // PetscInt fine_dof = global_row_indices[fine_row];
// // ierr = MatSetValues(I, 1, &fine_dof, eldim,
// col_indices[fine_row].data(),
// // values[fine_row].data(), INSERT_VALUES);
// // CHKERRABORT(PETSC_COMM_WORLD, ierr);
// // }
//
// // Assemble the transfer matrix
// ierr = MatAssemblyBegin(I, MAT_FINAL_ASSEMBLY);
// CHKERRABORT(PETSC_COMM_WORLD, ierr);
// ierr = MatAssemblyEnd(I, MAT_FINAL_ASSEMBLY);
// CHKERRABORT(PETSC_COMM_WORLD, ierr);
// create shared pointer and return the pointer to the transfer
// matrix
// std::shared_ptr<PETScBaseMatrix> ptr =
// std::make_shared<PETScBaseMatrix>(I); ierr = MatDestroy(&I);
// CHKERRABORT(PETSC_COMM_WORLD, ierr);
return 0;
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment