Skip to content

Instantly share code, notes, and snippets.

@zonca
Created February 16, 2011 22:15
Show Gist options
  • Save zonca/830379 to your computer and use it in GitHub Desktop.
Save zonca/830379 to your computer and use it in GitHub Desktop.
test trilinos FEVBr
#include <list>
#include <string>
#include <math.h>
#include "Epetra_ConfigDefs.h"
#ifdef HAVE_MPI
#include "mpi.h"
#include "Epetra_MpiComm.h"
#else
#include "Epetra_SerialComm.h"
#endif
#include "Epetra_Map.h"
#include "Epetra_BlockMap.h"
#include "Epetra_FEVbrMatrix.h"
#include <EpetraExt_MatrixMatrix.h>
using namespace std;
int main(int argc, char *argv[])
{
#ifdef HAVE_MPI
MPI_Init(&argc, &argv);
Epetra_MpiComm Comm(MPI_COMM_WORLD);
#else
Epetra_SerialComm Comm;
#endif
int NSIDE = 1;
int NPIX ;
int NSTOKES = 3;
int NumElements;
NumElements = 11;
NPIX = 12. * NSIDE * NSIDE +1; //total pixel size, each pixel is an element which contains 3 floats which are IQU
Epetra_BlockMap PixMap(NPIX,NSTOKES,0,Comm);
int * PixMyGlobalElements = PixMap.MyGlobalElements();
cout << PixMap << endl;
Epetra_FEVbrMatrix invM(Copy, PixMap, 1);
int BlockIndices[1];
BlockIndices[0] = 2;
Epetra_SerialDenseMatrix *Prow;
int RowDim, NumBlockEntries;
int err;
Epetra_SerialDenseMatrix Mpp(NSTOKES, NSTOKES);
Mpp[0][0] = 1.;
cout << Mpp << endl;
int debugPID = 1;
int NumHits = 2*Comm.MyPID() + 5;
for( int i=0 ; i<NumHits; ++i ) { //loop on local pointing
invM.BeginSumIntoGlobalValues(BlockIndices[0], 1, BlockIndices);
err = invM.SubmitBlockEntry(Mpp.A(), Mpp.LDA(), NSTOKES, NSTOKES); //FIXME check order
if (err != 0) {
cout << "PID:" << Comm.MyPID() << "Error in inserting values in M, error code:" << err << endl;
}
err = invM.EndSubmitEntries();
if (err != 0) {
cout << "PID:" << Comm.MyPID() << " LocalRow[i]:" << i << " Error in ending submit entries in M, error code:" << err << endl;
}
}
invM.GlobalAssemble();
cout << invM << endl;
#ifdef HAVE_MPI
MPI_Finalize();
#endif
return(0);
};
#include <list>
#include <string>
#include <math.h>
#include "Epetra_ConfigDefs.h"
#ifdef HAVE_MPI
#include "mpi.h"
#include "Epetra_MpiComm.h"
#else
#include "Epetra_SerialComm.h"
#endif
#include "Epetra_Map.h"
#include "Epetra_BlockMap.h"
#include "Epetra_FEVbrMatrix.h"
//#include "AztecOO.h"
#include <EpetraExt_MatrixMatrix.h>
using namespace std;
int main(int argc, char *argv[])
{
#ifdef HAVE_MPI
MPI_Init(&argc, &argv);
Epetra_MpiComm Comm(MPI_COMM_WORLD);
#else
Epetra_SerialComm Comm;
#endif
int NSIDE = 1;
int NPIX ;
int NSTOKES = 3;
int NumElements;
NumElements = 11;
NPIX = 12. * NSIDE * NSIDE +1; //total pixel size, each pixel is an element which contains 3 floats which are IQU
Epetra_BlockMap PixMap(NPIX,NSTOKES,0,Comm);
int * PixMyGlobalElements = PixMap.MyGlobalElements();
cout << PixMap << endl;
Epetra_FEVbrMatrix invM(Copy, PixMap, 1);
int BlockIndices[1];
BlockIndices[0] = 2;
Epetra_SerialDenseMatrix *Prow;
int RowDim, NumBlockEntries;
int err;
Epetra_SerialDenseMatrix Mpp(NSTOKES, NSTOKES);
Mpp[0][0] = 1.;
cout << Mpp << endl;
Epetra_SerialDenseMatrix * Zero;
for( int i=0 ; i<PixMap.NumMyElements(); ++i ) { //loop on local pixel
BlockIndices[0] = PixMyGlobalElements[i];
Zero = new Epetra_SerialDenseMatrix(NSTOKES, NSTOKES);
invM.BeginInsertGlobalValues(BlockIndices[0], 1, BlockIndices);
err = invM.SubmitBlockEntry(Zero->A(), Zero->LDA(), NSTOKES, NSTOKES);
if (err != 0) {
cout << "PID:" << Comm.MyPID() << "Error in inserting init zero values in M, error code:" << err << endl;
}
err = invM.EndSubmitEntries();
}
int debugPID = 1;
BlockIndices[0] = 2;
cout << invM << endl;
int NumHits = 2*Comm.MyPID() + 5;
for( int i=0 ; i<NumHits; ++i ) { //loop on local pointing
invM.BeginSumIntoGlobalValues(BlockIndices[0], 1, BlockIndices);
err = invM.SubmitBlockEntry(Mpp.A(), Mpp.LDA(), NSTOKES, NSTOKES); //FIXME check order
if (err != 0) {
cout << "PID:" << Comm.MyPID() << "Error in inserting values in M, error code:" << err << endl;
}
err = invM.EndSubmitEntries();
if (err != 0) {
cout << "PID:" << Comm.MyPID() << " LocalRow[i]:" << i << " Error in ending submit entries in M, error code:" << err << endl;
}
}
invM.GlobalAssemble();
cout << invM << endl;
#ifdef HAVE_MPI
MPI_Finalize();
#endif
return(0);
};
@zonca
Copy link
Author

zonca commented Feb 16, 2011

running on two processors, the expected result should be:

in PID=0 BlockRow and BlockCol 2 a SerialDenseMatrix:

13 0 0
0 0 0
0 0 0

instead I get:

7 0 0
0 0 0
0 0 0

@zonca
Copy link
Author

zonca commented Feb 16, 2011

OUTPUT:

Epetra::BlockMap
Number of Global Elements = 13
Number of Global Points = 39
Maximum of all GIDs = 12
Minimum of all GIDs = 0
Index Base = 0
Constant Element Size = 3

Number of Local Elements = 7
Number of Local Points = 21
Maximum of my GIDs = 6
Minimum of my GIDs = 0

     MyPID           Local Index        Global Index  
         0                 0                 0    
         0                 1                 1    
         0                 2                 2    
         0                 3                 3    
         0                 4                 4    
         0                 5                 5    
         0                 6                 6    

Epetra::BlockMap
Number of Local Elements = 6
Number of Local Points = 18
Maximum of my GIDs = 12
Minimum of my GIDs = 7

     MyPID           Local Index        Global Index  
         1                 0                 7    
         1                 1                 8    
         1                 2                 9    
         1                 3                10    
         1                 4                11    
         1                 5                12    

Epetra::SerialDenseMatrix
Data access mode: Copy
Epetra::SerialDenseMatrix
Data access mode: Copy
A_Copied: yes
Rows(M): 3
A_Copied: yes
Rows(M): 3
Columns(N): 3
LDA: 3
Columns(N): 3
LDA: 3
1 0 0
0 0 0
1 0 0
0 0 0

0 0 0
0 0 0
PID:0 LocalRow[i]:0 Error in ending submit entries in M, error code:2

PID:0 LocalRow[i]:1 Error in ending submit entries in M, error code:2
PID:0 LocalRow[i]:2 Error in ending submit entries in M, error code:2
PID:0 LocalRow[i]:3 Error in ending submit entries in M, error code:2
PID:0 LocalRow[i]:4 Error in ending submit entries in M, error code:2
Epetra::VbrMatrix
Number of Global Block Rows = 13
Number of Global Block Cols = 13
Number of Global Block Diags = 1
Number of Global Blk Entries = 1
Global Max Num Block Entries = 1

Number of Global Rows = 39
Number of Global Cols = 39
Number of Global Diagonals = 3
Number of Global Nonzeros = 9
Global Maximum Num Entries = 9
** Matrix is Lower Triangular **

Number of My Block Rows = 7
Number of My Block Cols = 1
Number of My Block Diags = 1
Number of My Blk Entries = 1
My Max Num Block Entries = 1

Number of My Rows = 21
Number of My Cols = 3
Number of My Diagonals = 3
Number of My Nonzeros = 9
My Maximum Num Entries = 1

Epetra::VbrMatrix
Number of My Block Rows = 6
Number of My Block Cols = 0
Number of My Block Diags = 0
Number of My Blk Entries = 0
My Max Num Block Entries = 0

Number of My Rows = 18
Number of My Cols = 0
Number of My Diagonals = 0
Number of My Nonzeros = 0
My Maximum Num Entries = 0

Processor Block Row Index Block Col Index
Values
0 2 2
Epetra::SerialDenseMatrix
Data access mode: View
A_Copied: no
Rows(M): 3
Columns(N): 3
LDA: 3
7 0 0
0 0 0
0 0 0

@zonca
Copy link
Author

zonca commented Feb 18, 2011

thanks to Alan Williams:

The problem is that when you start the loop, the matrix doesn't contain any entries, and so it has an error when you try to sum-into a non-existent entry. If you use BeginInsertGlobalValues on the first iteration of your loop and then use BeginSumIntoGlobalValues on subsequent loop iterations, it should work as you expect.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment