Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save WinstonCampeau/800a55db17c52f460faae71d87f67593 to your computer and use it in GitHub Desktop.
Save WinstonCampeau/800a55db17c52f460faae71d87f67593 to your computer and use it in GitHub Desktop.
Naive Biodiversity Modelling
#Generates dictionary of regional sets (id:array). Regional sets can vary in species composition and number of populations (I consider each element a population).
import numpy as np
import random
reg_sets = {}
global_set = []
n_set = []
unique = {}
#Adjust number of regional sets, number of species, and number of populations in each set
for i in range(0,7):
ran0 = randint(2,6)
ran1 = randint(2,4)
r_set = {i:np.random.randint(ran0, size=ran1)}
reg_sets.update(r_set)
unique_val = len(np.unique(reg_sets.get(i)))
unique.update({i:unique_val})
global_set = union(global_set, reg_sets.get(i))
########################## MODEL ############################3
#Cycles through an integer ring the length of the dictionary (inlcuding zero).
#Finds intersections of ith, with (i+1)th and (i-1)th neighbours.
#Simulates some degree of autocorrelation between regional sets, generating local sets of shared species.
#Can also observe how to global set changes through iterations.
#Although this model includes local sets (migration, effectively) it still randomly removes a population and randomly inserts a new one.
#The next model ought to include random mutations (e.g 3 <- 4 -> 5). Perhaps some barriers to migration or logic to removal/addition.
from functools import reduce
globe = []
mono_iter = []
mono_iter_all = []
counter = 0
while True:
for j, k in reg_sets.items():
globe = reduce(np.union1d, reg_sets.values())
#print reg_sets
new_k = []
new_k = np.delete(k, randint(0, len(k)-1))
local = reduce(np.union1d, (reg_sets.get(counter%len(reg_sets)), reg_sets.get((counter+1)%len(reg_sets)), reg_sets.get((counter-1)%len(reg_sets))))
local_index = len(local)
new_k = np.insert(new_k, 0, local[randint(0, (local_index-1))])
num_unique = len(np.unique(new_k))
reg_sets.update({j:new_k})
unique.update({j:num_unique})
counter = counter + 1
for x, y in reg_sets.items():
globe = union(globe, y)
if unique.get(j)==1:
mono_iter.append(counter)
if len(globe)==1:
#An iteration here is determined by one full loop of the dictionary
show("Global monoculture acheived after:", counter/(len(reg_sets)), " iterations")
break
#To stop the computation if taking too much computational time
elif counter==1000000:
break
elif sum(unique.values())==len(reg_sets):
mono_iter_all.append(counter)
continue
#Very hard to acheive monocultures with increasing diversity and number of regional sets!
show(len(mono_iter), " instances of individual regional monocultures before global monoculture")
show(len(mono_iter_all), " instances of complete regional monocultures before global monoculture")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment