Skip to content

Commit

Permalink
PI_Estimation using Monte Carlo
Browse files Browse the repository at this point in the history
  • Loading branch information
FerasAhmed committed Nov 9, 2012
1 parent 960ef78 commit ba9f809
Show file tree
Hide file tree
Showing 3 changed files with 256 additions and 1 deletion.
157 changes: 157 additions & 0 deletions OddEvenSort.c
@@ -0,0 +1,157 @@
#include <mpi.h>
#include <stdio.h>
#include <stdlib.h>
#include <vector>
#include <iostream>
#include <algorithm>

using namespace std;

int trap(double local_a, double local_b, int local_n, double h)
{
return local_n * (local_a + local_b)/ h;
}

void Get_Input(int rank, int comm_sz, int* value, int count)
{
if(rank == 0)
{
printf("Enter Value: ");
scanf("%d", value);
}

MPI_Bcast(value, count, MPI_INT, 0, MPI_COMM_WORLD);
}

void Build_MPI_Type(int* first, int* second, MPI_Datatype* new_datatype)
{
/*
1- Get relative addresses.
2- Get Types array.
*/
int count = 2;
int array_of_blocks_length[] = {0,0};
MPI_Aint array_of_displacement[2];
MPI_Datatype array_of_datatype[] = {MPI_INT, MPI_INT};

MPI_Aint first_addr, second_addr;
MPI_Get_address(first, &first_addr);
MPI_Get_address(second, &second_addr);
//Set array of displacements
array_of_displacement[0] = 0;
array_of_displacement[1] = second_addr - first_addr;

//MPI_Create_struct
MPI_Type_create_struct(count, array_of_blocks_length, array_of_displacement, array_of_datatype, new_datatype);
MPI_Type_commit(new_datatype);

//After Using dataType must call MPI_Type_free(new_datatype) to deallocate memory used
}

void swap(int& a, int& b)
{
int temp = a;
a = b;
b = temp;
}

void oddEvenSort(int arr[], int sz)
{
int max_sz = sz-1;
for(int possible = 0; possible < sz; possible++)
for(int i = (possible % 2); i < max_sz; i+=2)
if(arr[i] > arr[i+1])
swap(arr[i], arr[i+1]);
}

void merge_low(int n, int low[], int high[], int workspace[], bool lower)
{
for(int i = 0; i < n; i++)
workspace[i] = low[i];

int i, k, j;

if(lower)
{
for(i = k = j = 0; k < n; k++)
{
if(low[i] <= high[j])
workspace[k] = low[i++];
else
workspace[k] = high[j++];
}
}
else
{
for(k = i = j = n-1; k > -1; k--)
{
if(low[i] >= high[j])
workspace[k] = low[i--];
else
workspace[k] = high[j--];
}
}

for(int i = 0; i < n; i++)
low[i] = workspace[i];
}

int main(int argc, char** argv) {

int rank, size, nlocal = 5, evenrank, oddrank;
int *elements, *relements, *workspace;

MPI_Init(NULL, NULL);
MPI_Comm_size(MPI_COMM_WORLD, &size);
MPI_Comm_rank(MPI_COMM_WORLD, &rank);

/* Get Input */
elements = new int[nlocal];
relements = new int[nlocal];
workspace = new int[nlocal];
//create array
srand(rank);
for (int i=0; i<nlocal; i++)
elements[i] = rand();
//sort array
sort(elements, elements + nlocal);

/* Get sendTo */
if(rank % 2 == 0)
{
oddrank = rank - 1;
evenrank = rank + 1;
}
else
{
oddrank = rank + 1;
evenrank = rank - 1;
}

if(oddrank == -1 || oddrank == size)
oddrank = MPI_PROC_NULL;
if(evenrank == -1 || evenrank == size)
evenrank = MPI_PROC_NULL;

MPI_Status mpi_status;
for(int i = 0; i < size; i++)
{
if(i%2 == 0)
MPI_Sendrecv(elements, nlocal, MPI_INT, evenrank, 1, relements, nlocal, MPI_INT, evenrank, 1, MPI_COMM_WORLD, &mpi_status);
else
MPI_Sendrecv(elements, nlocal, MPI_INT, evenrank, 1, relements, nlocal, MPI_INT, evenrank, 1, MPI_COMM_WORLD, &mpi_status);

merge_low(nlocal, elements, relements, workspace, (rank < mpi_status.MPI_SOURCE));
}

for(int i = 0; i < nlocal; i++)
printf("my rank:%i value: %i", rank, elements[i]);

delete [] elements;
delete [] relements;
delete [] workspace;


MPI_Finalize();
return 0;
}
94 changes: 94 additions & 0 deletions PI_Estimation.c
@@ -0,0 +1,94 @@
#include <mpi.h>
#include <stdio.h>
#include <stdlib.h>
#include <vector>
#include <iostream>
#include <algorithm>
#include <ctime>

using namespace std;

void Get_Input(int rank, int comm_sz, int* value, int count)
{
if(rank == 0)
{
printf("Enter Value: ");
scanf("%d", value);
}

MPI_Bcast(value, count, MPI_INT, 0, MPI_COMM_WORLD);
}

void Build_MPI_Type(int* first, int* second, MPI_Datatype* new_datatype)
{
/*
1- Get relative addresses.
2- Get Types array.
*/
int count = 2;
int array_of_blocks_length[] = {0,0};
MPI_Aint array_of_displacement[2];
MPI_Datatype array_of_datatype[] = {MPI_INT, MPI_INT};

MPI_Aint first_addr, second_addr;
MPI_Get_address(first, &first_addr);
MPI_Get_address(second, &second_addr);
//Set array of displacements
array_of_displacement[0] = 0;
array_of_displacement[1] = second_addr - first_addr;

//MPI_Create_struct
MPI_Type_create_struct(count, array_of_blocks_length, array_of_displacement, array_of_datatype, new_datatype);
MPI_Type_commit(new_datatype);

//After Using dataType must call MPI_Type_free(new_datatype) to deallocate memory used
}

int main(int argc, char** argv) {

int rank, size;
MPI_Init(NULL, NULL);
MPI_Comm_size(MPI_COMM_WORLD, &size);
MPI_Comm_rank(MPI_COMM_WORLD, &rank);

int tossPerProcess, root = 0;
long long totalHits = 0;
srand((unsigned)time(0) + rank);
/*
number in circle = 0;
for (toss = 0; toss < number of tosses; toss++)
f x = random double between -1 and 1;
y = random double between 􀀀1 and 1;
distance squared = xx + yy;
if (distance squared <= 1) number in circle++;
pi estimate = 4 * number in circle/((double) number of tosses); */

if(rank == root)
tossPerProcess = 10000000;

MPI_Bcast(&tossPerProcess, 1, MPI_INT, root, MPI_COMM_WORLD);

//printf("process %i recieved %i tosses\n", rank, tossPerProcess);
long long numberInCircle = 0;
double x,y, distance;
for(int i= 0; i < tossPerProcess; i++)
{
x = (rand()-RAND_MAX/2.0)/(RAND_MAX/2.0);
y = (rand()-RAND_MAX/2.0)/(RAND_MAX/2.0);
distance = x*x + y*y;
if(distance <= 1)
numberInCircle++;
}
//printf("process %i made %i hits\n", rank, numberInCircle);

MPI_Reduce(&numberInCircle, &totalHits, 1, MPI_LONG_LONG, MPI_SUM, root, MPI_COMM_WORLD);

if(rank == root)
{
printf("total hits: %i\n", totalHits);
printf("Pi Equals: %f", (4.0 * totalHits)/(tossPerProcess * size));
}

MPI_Finalize();
return 0;
}
6 changes: 5 additions & 1 deletion Readme.txt
Expand Up @@ -14,4 +14,8 @@ using MPI_Probe, MPI_Get_count to recive Dynamic Size data
1- DynamicSendRecieve.

using MPI_Scatter, MPI_Gather to scatter random array digits and calculate average
1- AverageNumbers.
1- AverageNumbers.

Second: Simple Programs:
1- OddEven Sort.
2- PI Estimation using Randomness.

0 comments on commit ba9f809

Please sign in to comment.