Skip to content

Instantly share code, notes, and snippets.

View mneedham's full-sized avatar

Mark Needham mneedham

View GitHub Profile
@mneedham
mneedham / app.py
Created July 20, 2021 16:40
Streamlit app
import streamlit as st
st.title("GitHub Events")
st.write("Welcome to our first Streamlit app")
@mneedham
mneedham / app.py
Last active July 20, 2021 19:52
Basic Pinot query rendered in Streamlit
import streamlit as st
from pinotdb import connect
import pandas as pd
st.title("GitHub Events")
broker_port = 8000
conn = connect(host='localhost', port=broker_port, path='/query/sql', scheme='http')
query = f"""
@mneedham
mneedham / docker-compose.yml
Created July 19, 2021 20:28
Apache Pinot GitHubEventsQuickStart
version: '3'
services:
pinot:
image: apachepinot/pinot:0.7.1
command: "GitHubEventsQuickStart -personalAccessToken ${GITHUB_TOKEN}"
container_name: "pinot-github-events-quick-start"
ports:
- "9000:9000"
- "8000:8000"
Mon May 17 14:35:19 UTC 2021
@mneedham
mneedham / docker-compose.yml
Created March 5, 2020 04:27
GraphConnect 2020 - APOC Training
version: '3.7'
services:
neo4j:
image: neo4j:4.0.0-enterprise
container_name: "gc2020-apoc"
volumes:
- ./plugins:/plugins
- ./data:/data
- ./import:/import
@mneedham
mneedham / blog_domain.py
Last active January 29, 2022 14:40
Altair - Setting a custom date domain for the x axis
# Code for https://markhneedham.com/blog/2020/01/14/altair-range-values-dates-axis/ blog post
import altair as alt
import pandas as pd
import datetime
df = pd.DataFrame( [
{"position": 40, "date": datetime.date(2019,9,5)},
{"position": 31, "date": datetime.date(2019,9,12)},
{"position": 19, "date": datetime.date(2019,9,19)},
@mneedham
mneedham / blog_domain.py
Created January 14, 2020 06:55
Altair - Setting a custom date domain for the x axis
import altair as alt
import pandas as pd
import datetime
df = pd.DataFrame( [
{"position": 40, "date": datetime.date(2019,9,5)},
{"position": 31, "date": datetime.date(2019,9,12)},
{"position": 19, "date": datetime.date(2019,9,19)},
{"position": 14, "date": datetime.date(2019,9,26)},
{"position": 7, "date": datetime.date(2019,10,3)},
CALL apoc.schema.assert(null,{Judge:['id'], Player:["name"], Country:["name"], Club:["name"]});
WITH "1f4nc8vehOiZhEB2_3L2bvXTpag29VcEKJ0PJeo5Runc" AS id
WITH "https://docs.google.com/spreadsheets/d/" + id + "/export?format=csv&id=" + id + "&gid=3" AS uri
LOAD CSV FROM uri AS row
WITH row SKIP 4 LIMIT 100
MERGE (player:Player {name: row[8]})
SET player.rawScore = toInteger(row[14]), player.score = toInteger(row[16])
MERGE (country:Country {name: row[9]})
MERGE (club:Club {name: row[10]})
@mneedham
mneedham / import.cql
Last active September 30, 2019 15:24
Importing Fashion MNIST
WITH "https://gist.github.com/mneedham/49aa07b8842718f5101585777e306009/raw/80b9299b26e165acc38ed0d83c9ab44c7137da3b/small_export.csv" AS uri
CALL apoc.load.csv(uri, {header: false, sep: " "})
YIELD lineNo, list
MERGE (item:MNISTItem {id: lineNo})
ON CREATE
SET item.embedding = [item in list | apoc.convert.toFloat(item)]
@mneedham
mneedham / 00_small_export.csv
Last active September 30, 2019 14:59
Importing Fashion MNIST into Neo4j
We can make this file beautiful and searchable if this error is corrected: No commas found in this CSV file in line 0.
0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.00000