Skip to content

Instantly share code, notes, and snippets.

@edoakes
Last active May 10, 2022 11:46
Show Gist options
  • Star 4 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save edoakes/3915c4e2734ac5c8114d67bd3326e8ee to your computer and use it in GitHub Desktop.
Save edoakes/3915c4e2734ac5c8114d67bd3326e8ee to your computer and use it in GitHub Desktop.
Monte Carlo Pi estimation
import argparse
import time
import random
import math
parser = argparse.ArgumentParser(description="Approximate digits of Pi using Monte Carlo simulation.")
parser.add_argument("--num-samples", type=int, default=1000000)
parser.add_argument("--parallel", default=False, action="store_true")
parser.add_argument("--distributed", default=False, action="store_true")
SAMPLE_BATCH_SIZE = 100000
def sample(num_samples):
num_inside = 0
for _ in range(num_samples):
x = random.uniform(-1, 1)
y = random.uniform(-1, 1)
if math.hypot(x, y) <= 1:
num_inside += 1
return num_inside
def approximate_pi(num_samples):
start = time.time()
num_inside = sample(num_samples)
print("pi ~= {}".format((4*num_inside)/num_samples))
print("Finished in: {:.2f}s".format(time.time()-start))
def approximate_pi_parallel(num_samples):
from multiprocessing import Pool
pool = Pool()
start = time.time()
num_inside = 0
for result in pool.map(sample, [SAMPLE_BATCH_SIZE for _ in range(num_samples//SAMPLE_BATCH_SIZE)]):
num_inside += result
print("pi ~= {}".format((4*num_inside)/num_samples))
print("Finished in: {:.2f}s".format(time.time()-start))
def approximate_pi_distributed(num_samples):
from ray.util.multiprocessing import Pool
pool = Pool()
start = time.time()
num_inside = 0
for result in pool.map(sample, [SAMPLE_BATCH_SIZE for _ in range(num_samples//SAMPLE_BATCH_SIZE)]):
num_inside += result
print("pi ~= {}".format((4*num_inside)/num_samples))
print("Finished in: {:.2f}s".format(time.time()-start))
if __name__ == "__main__":
args = parser.parse_args()
if args.parallel:
print("Estimating Pi using multiprocessing with {} samples...".format(args.num_samples))
approximate_pi_parallel(args.num_samples)
elif args.distributed:
print("Estimating Pi using ray.util.multiprocessing with {} samples...".format(args.num_samples))
approximate_pi_distributed(args.num_samples)
else:
print("Estimating Pi in one process with {} samples...".format(args.num_samples))
approximate_pi(args.num_samples)
cluster_name: monte_carlo_pi
# The number of worker nodes to launch in addition to the head node.
min_workers: 9
max_workers: 9
provider:
type: aws
region: us-west-2
availability_zone: us-west-2a
head_node:
InstanceType: m4.4xlarge
ImageId: ami-06d51e91cea0dac8d # Ubuntu 18.04
worker_nodes:
InstanceType: m4.4xlarge
ImageId: ami-06d51e91cea0dac8d # Ubuntu 18.04
# List of shell commands to run to set up nodes.
setup_commands:
- wget https://repo.continuum.io/archive/Anaconda3-5.0.1-Linux-x86_64.sh || true
- bash Anaconda3-5.0.1-Linux-x86_64.sh -b -p $HOME/anaconda3 || true
- echo 'export PATH="$HOME/anaconda3/bin:$PATH"' >> ~/.bashrc
- pip install -U https://s3-us-west-2.amazonaws.com/ray-wheels/latest/ray-0.9.0.dev0-cp36-cp36m-manylinux1_x86_64.whl
- pip install -U ray[dashboard]
@edoakes
Copy link
Author

edoakes commented Feb 8, 2020

NOTE: As of 02/18/2020, this code sample requires installing the nightly Ray wheels.

Example usage:

# Start the cluster and copy the script to it.
> ray up -y ray-cluster.yaml
> ray rsync-up ray-cluster.yaml monte_carlo_pi.py monte_carlo_pi.py

# Attach to a remote shell in the head node and run the script.
> ray attach ray-cluster.yaml
[remote] > RAY_ADDRESS=auto python monte_carlo_pi.py --distributed --num-samples 1_000_000_000
pi ~= 3.141541
Finished in 13.75
[remote] > RAY_ADDRESS=auto python monte_carlo_pi.py --distributed --num-samples 10_000_000_000
pi ~= 3.141599
Finished in 137.19s

# Clean up the cluster.
> ray down -y ray-cluster.yaml

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment