Created
February 28, 2012 10:39
-
-
Save timriffe/1931817 to your computer and use it in GitHub Desktop.
Model Thinking- Aggregation- Game of Life- Massive simulation over density, 500 iterations, parellel code
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Author: Tim Riffe tim.riffe@gmail.com | |
############################################################################### | |
# ---------------------------------- | |
# a big fat game of life simulation | |
# ---------------------------------- | |
# -------------------------------------------------------------------------- | |
# Explanation of what this is all about: | |
# -------------------------------------- | |
# This code will produce a rather large dataset to be used in figuring out the | |
# function of form of Game Of Life 'population survival', which is only an approximate | |
# concept, of course. Technically speaking, we can't call the proportion 'on' over time | |
# a survival function because it isn't strictly decreasing, i.e. it's not a monotonically | |
# declining function over time. But it 'is' approximately declining, and it might be | |
# useful, at least in my mind, to treat this simple output as survival data. The reason | |
# why I'm trying the test over different starting densitiesis because I'm curious about | |
# whether the populations will all converge toward the same proportion 'living', in which | |
# case we could say that the long term 'survival' behavior of a game of life (at least | |
# for this board size!) is weakly ergodic, i.e. asymptotically independent of starting | |
# conditions. I hypothesize, before taking a look at the data (still running at this writing) | |
# that there will be a threshold in starting density above which weak ergodicity applies, | |
# and below which something else happens, such as extinction, or stability at a lower level. | |
# To be specific about terminology, I'd mean that the starting densities for which weak | |
# ergodicity apply are not only independent of each other, but also independent of the | |
# particular starting configuration on the board- although there are some stable cell | |
# configuration that foil this (entire board filled with a grid of | |
# 3 vertical/ 3 horizontal cells that simply switch between vertical and horizontal orientation) | |
# if cells are just messily scattered, then I hypothesize long run covergence under the | |
# same starting density. This we're not testing here, but might be worth checking in | |
# a future big fat simulation | |
# -------------------------------------------------------------------------- | |
# Some technical details | |
# ---------------------- | |
# this code sources 2 functions from Vadim Vinichenko for the game of life. | |
# His functions were more convenient because they scale well. | |
# we only work with 500x500 GOL boards. Each randomly generated, but with the | |
# same random number sequence and different starting density thresholds. The | |
# starting density thresholds are actually approximate, but you'll be able to | |
# get the exact starting proportion 'on' by looking at the output. | |
# these are the starting population thresholds that we iterate over | |
# using a parallel version of sapply(), parSapply(), which is included in the | |
# now- default package "parallel". i.e. the ONLY thing that differs between | |
# simulation runs is the approximate proportion of cells that start off alive- | |
# the same set of random numbers is used for each starting board. | |
rnd_threshold <- seq(.01,.5,by=.01) | |
# the function to be run length(rnd_threshold) times: | |
ParThreshold <- function(thr,n_rows=500,n_cols=500,n_cycles=5){ | |
# the 2 GOL functions from Vadim Vinichenko: | |
# recall the assumption here that borders are dead, rather than wrapping | |
shiftMatrix <- function(mx, dr, dc) { | |
#Shift the matrix by dr (delta r) rows and dc columns | |
#by adding e.g. dr rows of zeros and removing dr rows from the other side | |
nr <- nrow(mx) | |
nc <- ncol(mx) | |
#If the matrix is shifted by more than its nrow or ncol, we get a matrix of zeros | |
if (abs(dr) >= nr || abs(dc) >= nc) { | |
mx <- matrix(0, nrow = nr, ncol = nc) | |
return(mx) | |
} | |
#Rows: | |
if (dr > 0) { | |
mx <- rbind(mat.or.vec(dr, nc), mx) | |
mx <- mx[1:nr,] | |
} else if (dr < 0) { | |
mx <- rbind(mx, mat.or.vec(-dr, nc)) | |
mx <- mx[(1 - dr):(nr - dr),] | |
} | |
#Columns: | |
if (dc > 0) { | |
mx <- cbind(mat.or.vec(nr, dc), mx) | |
mx <- mx[,1:nc] | |
} else if (dc < 0) { | |
mx <- cbind(mx, mat.or.vec(nr, -dc)) | |
mx <- mx[,(1 - dc):(nc - dc)] | |
} | |
return(mx) | |
} | |
lifeCycle <- function(mx) { | |
#Move the board one generation forward | |
mx0 <- matrix(0, nrow = nrow(mx), ncol = ncol(mx)) | |
#Produce 8 "shifted" boards and add them up | |
for (n in (-1:1)) { | |
for (m in (-1:1)) { | |
if (n !=0 || m !=0) { | |
mx0 <- mx0 + shiftMatrix(mx, n, m) | |
} | |
} | |
} | |
#Deaths and births | |
mx[mx0 > 3 | mx0 < 2] <- 0 | |
mx[mx0 == 3] <- 1 | |
return(mx) | |
} | |
# produce a starting board: | |
n_cells <- n_rows * n_cols | |
# assures same random number sequence | |
set.seed(1) | |
board <- matrix(0, nrow = n_rows, ncol = n_cols) | |
# but the density of 'on' cells changes with 'thr' (threshold) | |
board[runif(n_cells, 0, 1) < thr] <- 1 | |
# prop_on is the output (proportion living after each iteration | |
prop_on <- vector(length = n_cycles+1) | |
prop_on[1] <- sum(board)/n_cells | |
# here we iterate n_cycles times for the given density parameter | |
for (i in (1:n_cycles)) { | |
board <- lifeCycle(board) | |
prop_on[(i+1)] <- sum(board)/n_cells | |
} | |
return(prop_on) | |
} | |
# This is a big task, so we'll dish it out to 6 of 8 available server cores | |
# really THINK TWICE before running this code. Anyway it'll only work if you have a | |
# 6+ core machine at your disposal. You could run it on a single core using sapply() | |
# but you'd be waiting possibly more than a couple days for it to finish... | |
library(parallel) | |
cl <- makeCluster(getOption("cl.cores", 6)) | |
A <- parSapply(cl,rnd_threshold,ParThreshold,n_cycles=5000) | |
stopCluster(cl) | |
# and this little line saves the output in a file that I will attach | |
# and that all can analyze as they see fit.. | |
save(A,file="GOL_Parallel_Densities5000.Rdata") | |
## to load the data, just do | |
# load("path/path/GOL_Parallel_Densities5000.Rdata") | |
## and a biggish object called A will appear in your workspace. | |
# dim(A) # gives dimensions | |
## the longer dimension are the iterations | |
## the shorter dimension are the densities, hopefully in order... |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment