Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Save pajayrao/166bbeaf029012701f790b6943b31bb2 to your computer and use it in GitHub Desktop.
Save pajayrao/166bbeaf029012701f790b6943b31bb2 to your computer and use it in GitHub Desktop.

Installing and Basic Programs on MPI C

Installing MPI on Ubuntu

Run the following command on terminal

$ sudo apt-get install libcr-dev mpich2 mpich2-doc

Compiling MPI Programs

mpicc -o hello_world_c hello_world.c

Executing MPI Programs

mpiexec -np 4 ./hello_world_c

-np : Number of Processes

Basic MPI Programs

Basic MPI Function

*MPI_Init(&argc,&argv) : Initialize MPI part of the program. Compulsory for all MPI programs.

*MPI_Comm_rank(MPI_COMM_WORLD,&rank) : Returns the rank (process id) of that processes.

*MPI_Comm_size(MPI_COMM_WORLD,&size): Returns the total number of processes.

*MPI_Recv(void *buf, int count, MPI_Datatype datatype, int source, int tag,MPI_Comm comm, MPI_Status *status):Recieves data from specified rank.

*MPI_Send(const void *buf, int count, MPI_Datatype datatype, int dest, int tag,MPI_Comm comm) : Sends data to the specified rank.

*MPI_Scatter(void *send_data,int send_count,MPI_Datatype send_datatype,void *recv_data,int recv_count,MPI_Datatype recv_datatype,int root,MPI_Comm communicator) : Scatters data from a specified rank accross all the processes.

*MPI_Gather(void *send_data,int send_count,MPI_Datatype send_datatype,void *recv_data,int recv_count,MPI_Datatype recv_datatype,int root, MPI_Comm communicator) : Gathers data from all process to the specified process.

*MPI_Reduce(const void *sendbuf, void *recvbuf, int count, MPI_Datatype datatype,MPI_Op op, int root, MPI_Comm comm) :Reduces the specified a array by a specific operation accross all processes.

1. MPI Program to send data from 3 process to the fourth process

#include<mpi.h>
#include<stdio.h>
int main(int argc,char **argv)
{
	int rank,size;
	char A[3][50]={"RVCE","COLLEGE","SIDDA"};
	char B[50]={},C[50]={},D[50]={};
	MPI_Init(&argc,&argv);
	MPI_Comm_rank(MPI_COMM_WORLD,&rank);
	MPI_Comm_size(MPI_COMM_WORLD,&size);
	if(rank==0)
	{
	printf("Rank %d started \n",rank);
	
	MPI_Recv(B,50,MPI_CHAR,1,0,MPI_COMM_WORLD,MPI_STATUS_IGNORE);
	printf("Rank %d recieve %s message \n",1,B);
	MPI_Recv(C,50,MPI_CHAR,2,0,MPI_COMM_WORLD,MPI_STATUS_IGNORE);
	printf("Rank %d recieve %s message \n",2,C);
	MPI_Recv(D,50,MPI_CHAR,3,0,MPI_COMM_WORLD,MPI_STATUS_IGNORE);
	printf("Rank %d recieve %s message \n",3,D);
	

	}
	else
	{
	printf("Rank %d sends %s message \n",rank,A[rank-1]);
	MPI_Send(A[rank-1],20,MPI_CHAR,0,0,MPI_COMM_WORLD);
	
	}
MPI_Finalize();

	
}

2. MPI Program to calculate value of PI using Monte Carlo method

#include<stdio.h>
#include<mpi.h>
#include<stdlib.h>
#include <time.h>
int main(int argc,char **argv)
{
	int rank,size,i;
	double x=0,y=0,pi,z;
	int no = atoi(argv[1]);
	int count=0,total_count=0,no_div=0,fin_no = 0;
	MPI_Init(&argc,&argv);
	MPI_Comm_rank(MPI_COMM_WORLD,&rank);
	MPI_Comm_size(MPI_COMM_WORLD,&size);
	no_div = no/size;
	srand ( time(NULL) );
	for(i=0;i<no_div;i++)
		{
		
		x=(rand()%100)/(double)100;
		y=(rand()%100)/(double)100;
		z=x*x+y*y;
		if(z<=1)
			count++;

		}
	printf("For rank %d count = %d itrr = %d\n",rank,count,no_div);
	MPI_Reduce(&count,&total_count,1,MPI_INT,MPI_SUM,0,MPI_COMM_WORLD);
	MPI_Reduce(&no_div,&fin_no,1,MPI_INT,MPI_SUM,0,MPI_COMM_WORLD);
	
	if(rank ==0)
		{
	printf("Total count  = %d, total itrr = %d\n",total_count,fin_no);
	pi = ((double)total_count)/fin_no*4.0000;


	printf("Pi value = %lf",pi);


		}
	MPI_Finalize();

	
}

3. MPI Program to find sum of 2 large array

#include<mpi.h>
#include<stdio.h>
#include<stdlib.h>
void allocate(int a[],int n)
{
	int i;
	for(i=0;i<n;i++)
		a[i]=rand()%50;

}

int main(int argc, char**argv)
{
int rank,size,n,count,*a=NULL,*b=NULL,*c=NULL,*d=NULL,*e=NULL,*f=NULL,i;

MPI_Init(&argc,&argv);
MPI_Comm_size(MPI_COMM_WORLD,&size);
MPI_Comm_rank(MPI_COMM_WORLD,&rank);
n=atoi(argv[1]);
printf("Rank =%d n= %d",rank,n);
count=n/size;

d=(int*)malloc(count*sizeof(int));
e=(int*)malloc(count*sizeof(int));
f=(int*)malloc(count*sizeof(int));

if(rank ==0)
	{
	a=(int*)malloc(n*sizeof(int));
	b=(int*)malloc(n*sizeof(int));
	c=(int*)malloc(n*sizeof(int));
	allocate(a,n);
	allocate(b,n);	

}

MPI_Scatter(a,count,MPI_INT,d,count,MPI_INT,0,MPI_COMM_WORLD);
MPI_Scatter(b,count,MPI_INT,e,count,MPI_INT,0,MPI_COMM_WORLD);

for(i=0;i<count;i++)
	f[i]=d[i]+e[i];

MPI_Gather(f,count,MPI_INT,c,count,MPI_INT,0,MPI_COMM_WORLD);
if(rank ==0)
{
for(i=0;i<n;i++)
	printf("%d + %d = %d \n",a[i],b[i],c[i]);
}
MPI_Finalize();


}

References

@Bilal-hameed
Copy link

Hello! I am using Ubuntu 18.04. The very first command sudo apt-get install libcr-dev mpich2 mpich2-doc is not working.
result is..........
[Package mpich2 is not available, but is referred to by another package.
This may mean that the package is missing, has been obsoleted, or
is only available from another source
However the following packages replace it:
mpich:i386 mpich]

@Irreq
Copy link

Irreq commented Feb 9, 2020

Hello! I am using Ubuntu 18.04. The very first command sudo apt-get install libcr-dev mpich2 mpich2-doc is not working.
result is..........
[Package mpich2 is not available, but is referred to by another package.
This may mean that the package is missing, has been obsoleted, or
is only available from another source
However the following packages replace it:
mpich:i386 mpich]

Hello @Bilal-hameed, I'm fairly new to this. However i got it to work using only "sudo apt-get install mpich" (Ubuntu 18.04). Mpich2 is no longer working for me either, but mpich effortlessly does the work. It now works on my two node ubuntu cluster using NFS

@suvashsumon
Copy link

When I run sudo apt-get install libcr-dev mpich mpich-doc instead of sudo apt-get install libcr-dev mpich2 mpich2-doc in my ubuntu 20.04 pc, system shows me this error - E: Unable to locate package libcr-dev.

Can you please give me a solution?

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment