Skip to content

Instantly share code, notes, and snippets.

@zonca
Created March 4, 2011 21:43
Show Gist options
  • Save zonca/855747 to your computer and use it in GitHub Desktop.
Save zonca/855747 to your computer and use it in GitHub Desktop.
cmake_minimum_required (VERSION 2.6)
project(testTP)
include_directories("/usr/include/trilinos/", "/usr/include/openmpi")
add_executable(testTP testTP.cpp)
target_link_libraries(testTP trilinos_epetra)
mpirun -n 2 ./testTP
#include <string>
#include "Tpetra_ConfigDefs.h"
#ifdef TPETRA_MPI
#include "Tpetra_MpiPlatform.hpp"
#include "Tpetra_MpiComm.hpp"
#else
#include "Tpetra_SerialPlatform.hpp"
#include "Tpetra_SerialComm.hpp"
#endif
#include "Teuchos_ScalarTraits.hpp"
using namespace std;
typedef int OrdinalType;
typedef double ScalarType;
int log(int MyPID, string message) {
if( MyPID == 0 ) {
cout << "******** " << message << endl << endl << flush;
}
return true;
}
int main(int argc, char *argv[])
{
#ifdef HAVE_MPI
MPI_Init(&argc, &argv);
Tpetra::MpiComm<OrdinalType, ScalarType> Comm(MPI_COMM_WORLD);
#else
Tpetra::SerialComm<OrdinalType, ScalarType> Comm;
#endif
// Get zero and one for the OrdinalType
OrdinalType const OrdinalZero = Teuchos::ScalarTraits<OrdinalType>::zero();
OrdinalType const OrdinalOne = Teuchos::ScalarTraits<OrdinalType>::one();
// Get zero and one for the ScalarType
ScalarType const ScalarZero = Teuchos::ScalarTraits<ScalarType>::zero();
ScalarType const ScalarOne = Teuchos::ScalarTraits<ScalarType>::one();
OrdinalType length = OrdinalOne * 5e6;
OrdinalType indexBase = OrdinalZero;
// 1) Creation of a platform
#ifdef HAVE_MPI
const Tpetra::MpiPlatform <OrdinalType, OrdinalType> platformE(MPI_COMM_WORLD);
const Tpetra::MpiPlatform <OrdinalType, ScalarType> platformV(MPI_COMM_WORLD);
#else
const Tpetra::SerialPlatform <OrdinalType, OrdinalType> platformE;
const Tpetra::SerialPlatform <OrdinalType, ScalarType> platformV;
#endif
// 2) We can now create a map:
Tpetra::BlockMap<OrdinalType> Map(length, indexBase, platformE);
Tpetra::VectorSpace<OrdinalType, ScalarType>
vectorSpace(elementSpace, platformV);
// 3) We now setup the matrix, which is diagonal.
// To that aim, we need to extract the list of locally owned
// ID's from elementSpace.
OrdinalType NumMyElements = elementSpace.getNumMyElements();
vector<OrdinalType> MyGlobalElements = elementSpace.getMyGlobalElements();
Tpetra::CisMatrix<OrdinalType,ScalarType> matrix(vectorSpace);
for (OrdinalType LID = OrdinalZero ; LID < NumMyElements ; ++LID)
{
OrdinalType GID = MyGlobalElements[LID];
// add the diagonal element of value `GID'
matrix.submitEntry(Tpetra::Insert, GID, (ScalarType)GID, GID);
}
matrix.fillComplete();
log(MyPID, "Declaring Map");
Epetra_BlockMap Map(5e6,1,0,Comm);
//RAM 6MB/proc
cout << MyPID << ":" << Map.MinMyGID() << " " << Map.NumMyElements() << endl;
log(MyPID, "Before declaring P");
sleep(4);
Epetra_VbrMatrix P(Copy,Map,1);
log(MyPID, "After declaring P");
sleep(4); //RAM 140MB/proc
double * Values;
Values = new double[3];
Values[0] = 1.;
Values[1] = 2.;
Values[2] = 3.;
int Indices[1];
log(MyPID, "Assembling P"); // each row contains 3 doubles
for( int i=0 ; i<Map.NumMyElements(); ++i ) { //loop on local rows
if (i%100000==0) {
cout << i << endl;
sleep(3);
}
int GlobalNode = Map.MyGlobalElements()[i];
Indices[0] = i;
err = P.BeginInsertGlobalValues(GlobalNode, 1, Indices);
err = P.SubmitBlockEntry(Values, 1, 1, 3);
err = P.EndSubmitEntries();
}
delete[] Values;
log(MyPID, "FillComplete P");
sleep(4);//RAM 434MB/proc
P.FillComplete();
log(MyPID, "FillComplete P done");
//RAM 500MB/proc
#ifdef HAVE_MPI
MPI_Finalize();
#endif
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment