View Hello.g4
// define a grammar called Hello | |
grammar Hello; | |
r : 'hello' ID; | |
ID : [a-z]+ ; | |
WS : [ \t\r\n]+ -> skip ; |
View print_sympy_expr.py
from subprocess import run | |
import sympy as sp | |
def print_to_file(fname, expr): | |
# See https://tex.stackexchange.com/questions/34054/tex-to-image-over-command-line/34058#34058 | |
t_start = r"""\documentclass[border=2pt]{standalone} | |
\usepackage{amsmath} | |
\usepackage{varwidth} | |
\begin{document} |
View Expr.g4
grammar Expr; | |
// Need to call recursive rule expr from non-recursive rule | |
r : expr+ ; | |
// ANTLR4 : Left recursion! | |
// Operator precedence matches order of definition | |
expr : '-' expr // Unary minus | |
| expr ('*' | '/' ) expr | |
| expr ('+' | '-' ) expr |
View business_plan.json
{ | |
"verbs" : ["aggregate", "architect", "benchmark", "brand", "cultivate", "deliver", "deploy", "disintermediate", | |
"disrupt", "drive", "e-enable", "embrace", "empower", "enable", "engage", "engineer", "enhance", | |
"envisioneer", "evolve", "expedite", "exploit", "extend", "facilitate", "generate", "grow", "harness", "implement", "incentivize", "incubate", "innovate", "integrate", | |
"iterate", "leverage", "matrix", "maximize", "mesh", "monetize", "morph", "optimize", "orchestrate", "productize", "recontextualize", "redefine", "reintermediate", | |
"reinvent", "repurpose", "revolutionize", "scale", "seize", "strategize", "streamline", "syndicate", "synergize", "synthesize", "target", | |
"transform", "transition", "unleash", "utilize", "visualize", "whiteboard"], | |
"adjectives": ["24/365", "24/7", "B2B", "B2C", "back-end", "best-of-breed", "bleeding-edge", "bricks-and-clicks", "clicks-and-mortar", | |
"collaborative", "compelling", "cross-p |
View kullback_leibler.py
import sympy as sp | |
x = sp.symbols('x', real=True) | |
p, q = sp.symbols('p q', positive=True) | |
KL = sp.Integral(p*sp.log(p) - p*sp.log(q), (x, -sp.oo, sp.oo)) | |
mu, mu1, mu2 = sp.symbols('mu mu1 mu2', real=True) | |
sig, sig1, sig2 = sp.symbols('sig sig1 sig2', positive=True) | |
n = 1/sp.sqrt(2*sp.pi*sig**2)*sp.exp(-(x - mu)**2/(2*sig**2)) | |
KL_n = sp.simplify( | |
KL.subs({p: n.subs({mu: mu1, sig: sig1}), q: n.subs({mu: mu2, sig: sig2})}).doit() | |
) |
View MaskRCNN_setup.sh
# Amazon #Sagemaker instance | |
# open Jupyter | |
# new terminal | |
cd SageMaker | |
git clone https://github.com/matterport/Mask_RCNN.git | |
git clone https://github.com/waleedka/coco.git | |
cd coco/PythonAPI | |
source activate tensorflow_p36 | |
make | |
python setup.py install |
View lib_install.py
import boto3 | |
import os | |
import sys | |
import zipfile | |
HERE = os.path.dirname(os.path.realpath(__file__)) | |
def library_install(): | |
"""Dynamically add libraries to path, retrieving from S3 if necessary |
View Dockerfile
FROM amazonlinux:latest | |
MAINTAINER Matt McDonnell "matt@matt-mcdonnell.com" | |
RUN yum -y -q update | |
RUN yum -y -q install python27-pip zip | |
RUN pip install -q virtualenv |
View s3_upload.json
{ | |
"Version": "2012-10-17", | |
"Statement": [ | |
{ | |
"Effect": "Allow", | |
"Action": [ | |
"s3:PutObject", | |
"s3:ListAllMyBuckets", | |
"s3:ListBucket", | |
"s3:CreateBucket" |
View two_compartment.py
import pystan | |
import numpy as np | |
# Two compartment model from | |
# "Stan: A probabilistic programming language for | |
# Bayesian inference and optimization" Gelman, Lee, Guo (2015) | |
# http://www.stat.columbia.edu/~gelman/research/published/stan_jebs_2.pdf | |
a = np.array([0.8, 1.0]) | |
b = np.array([2, 0.1]) |
NewerOlder