View dump.diff
diff --git a/proc.py b/proc.py
index a5cdafb..cc061ad 100755
--- a/proc.py
+++ b/proc.py
@@ -3,6 +3,8 @@
import sys
from bs4 import BeautifulSoup
+SEEN_SEPARATE = {}
+SEEN_ALL_IN_ONE = []
View gist:735a0e50f802c16b2e700395e15be415
mysql> select person,group_concat(organization) from aiwatch.positions where organization in (select distinct(donee) from donations where donor='Open Philanthropy Project') and person in (select distinct(donor) from donations where donee='Machine
Intelligence Research Institute') group by person;
+---------------------+---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| person | group_concat(organization) |
+---------------------+---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| Andrew Hay | Machine Intelligence Re
View gist:6cda8464b7feb7ff64fa14014ace5912
('c1.medium', 'us-east-1a') 0.0130209677419
('c1.medium', 'us-east-1b') 0.013
('c1.medium', 'us-east-1c') 0.013
('c1.medium', 'us-east-1d') 0.0130064516129
('c1.medium', 'us-east-1e') 0.13
('c1.xlarge', 'us-east-1a') 0.0729576651306
('c1.xlarge', 'us-east-1b') 0.0727055376344
('c1.xlarge', 'us-east-1c') 0.0739697849462
('c1.xlarge', 'us-east-1d') 0.0761150921659
('c1.xlarge', 'us-east-1e') 0.52
View object.mysql
# potential fields (from object storage):
provider # e.g. "Amazon S3"
name # name of plan
storage_cost # in $/GB/month. This has to somehow accept tiered pricing like on s3 https://aws.amazon.com/s3/pricing/?nc=sn&loc=4 and https://azure.microsoft.com/en-us/pricing/details/storage/blobs/
download_cost # in $/GB. I think this is the same as "data retrival" https://azure.microsoft.com/en-us/pricing/details/storage/blobs/
upload_cost? # in $/GB. I think this is the same as "data write" https://azure.microsoft.com/en-us/pricing/details/storage/blobs/
transfer_cost # e.g. to another region https://azure.microsoft.com/en-us/pricing/details/storage/blobs/ This can depend on the region so it's actually a function...
write_op_cost # in $/10000 or $/1000
read_op_cost # in $/10000 or $/1000
list_op_cost # in $/10000 or $/1000
View dump.mysql
# potential fields (from block storage):
provider # e.g. "Amazon EBS"
name # name of the plan, e.g. "Azure Managed Disks P4"
storage_type # SSD/HDD, etc.
disk_size # in GB; google and amazon don't have fixed sizes
storage_cost # in $/GB/month? or $/month since we already have the disk_size? but google's disks seem to have flexible sizing so we don't know the size necessarily https://cloud.google.com/persistent-disk/
iops_cost # cost for IOPS (input/output operations per second), in $/IOPS/month; see https://aws.amazon.com/ebs/pricing/
iops # per disk https://azure.microsoft.com/en-us/pricing/details/managed-disks/
iops_burst_performance # ??? https://aws.amazon.com/ebs/previous-generation/
throughtput # per disk, in MB/second https://azure.microsoft.com/en-us/pricing/details/managed-disks/
View influencerMoneyMovedList.php
<?php
// Find all donations that fit into an existing (donee, donor, date range)
// combination from the money_moved table
$query = <<<'EOD'
select
donations.donee, donations.donor, sum(donations.amount) as amount_accounted,
sum(money_moved.amount) as amount_claimed,
start_date, end_date
from
View analysis.py
#!/usr/bin/env python3
import pdb
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn.decomposition import PCA
from sklearn.cluster import KMeans
from mpl_toolkits.mplot3d import Axes3D
View gist:fdc7830cddd98d5aa51cb1afa5b9c180
<h1>India from 20050000 to 20100000</h1>
query = select *,(select shortname from datasets where datasets.url = database_url) as shortname from data where region = ? and odate between ? and ? and metric = ? having not (shortname REGEXP '^ted')
<br/>parameters to query: region = India, start_date = 20050000, end_date = 20100000, metric = GDP
<br/>Reading a row from data
<br/>Reading a row from data
<br/>Reading a row from data
<br/>Reading a row from data
<br/>Reading a row from data
<br/>Reading a row from data
View README.md

Instructions for converting model code to a hyperlinked SVG

  1. Copy the model code in causes.txt to the "Model code" box in http://www.dagitty.net/dags.html and update the model.

  2. In the top menu, click "Model" then "Export as SVG".

  3. Open the model SVG in Vim and run the following commands:

    %!tidy -xml -q -i -w 0
View gist:278bdf870fd667bc3f324d1ba656490c
Martin_Garrix 2004_Summer_Olympics link 99
other-search 2004_Summer_Olympics_national_flag_bearers external 174
other-empty 2004_Summer_Olympics_national_flag_bearers external 311
2016_Summer_Olympics_Parade_of_Nations 2004_Summer_Olympics_national_flag_bearers link 26
2004_Summer_Olympics_opening_ceremony 2004_Summer_Olympics_national_flag_bearers link 379
2000_Summer_Olympics_national_flag_bearers 2004_Summer_Olympics_national_flag_bearers link 43
2008_Summer_Olympics_Parade_of_Nations 2004_Summer_Olympics_national_flag_bearers link 45
Main_Page 2004_Summer_Olympics other 205
June_11 2004_Summer_Olympics other 28
other-external 2004_Summer_Paralympics external 115