I hereby claim:
- I am usmanm on github.
- I am usmanm (https://keybase.io/usmanm) on keybase.
- I have a public key ASD2vcsVTE1zJDTEDtiNcPwlkeRe4o41FjTaCERoJ7RLrgo
To claim this, I am signing this object:
Humaoids for the digital world. |
I hereby claim:
To claim this, I am signing this object:
#!/bin/bash | |
sudo apt-get install -y libtool autoconf automake | |
wget https://github.com/zeromq/zeromq4-1/releases/download/v4.1.5/zeromq-4.1.5.tar.gz && \ | |
tar -xvf zeromq-4.1.5.tar.gz && \ | |
cd zeromq-4.1.5/ && \ | |
./autogen.sh && \ | |
./configure --enable-static --prefix=/usr && \ | |
make && \ |
#!/bin/bash | |
wget https://github.com/nanomsg/nanomsg/archive/1.0.0.tar.gz && \ | |
tar -xvf 1.0.0.tar.gz && \ | |
cd nanomsg-1.0.0 && \ | |
mkdir build && \ | |
cd build && \ | |
cmake .. -DCMAKE_INSTALL_PREFIX=/usr -DNN_STATIC_LIB=1 && -DCMAKE_POSITION_INDEPENDENT_CODE=1 \ | |
cmake --build . && \ | |
sudo cmake --build . --target install && \ |
# pipeline_kafka Broker API | |
rm -rf /tmp/zookeeper; ./bin/zookeeper-server-start.sh config/zookeeper.properties | |
rm -rf /tmp/kafka-logs; ./bin/kafka-server-start.sh config/server.properties | |
# pipeline_kafka Consumer API | |
cat ~/snippets/pipeline_kafka.conf >> ~/pdb/data/pipelinedb.conf | |
./bin/kafka-topics.sh --zookeeper localhost:2181 --topic consumer_topic --create --partitions 5 --replication-factor 1 | |
for i in $(seq 1 1000); do echo { \"x\": $i }; done | kafkacat -P -b localhost:9092 -t my_topic | |
# pipeline_kafka Producer API |
-- Continuous View | |
CREATE STREAM s0 (x int); | |
CREATE CONTINUOUS VIEW cv AS | |
SELECT x, count(*) FROM s0 GROUP BY x; | |
INSERT INTO s0 (x) | |
SELECT x % 10 FROM generate_series(1, 1000) AS x; | |
-- Continuous Transform | |
CREATE STREAM s1 (x int); | |
CREATE CONTINUOUS TRANSFORM ct AS |
package main | |
import ( | |
"database/sql" | |
"fmt" | |
"math/rand" | |
_ "github.com/lib/pq" | |
) | |
func main() { |
-- Enforce a strongly typed schema on this stream | |
CREATE STREAM ab_event_stream | |
( | |
name text, | |
ab_group text, | |
event_type varchar(1), | |
cookie varchar(32) | |
); | |
CREATE CONTINUOUS VIEW ab_test_monitor AS |
require 'pg' | |
pipeline = PGconn.connect("dbname='pipeline' user='client' host='localhost' port=6543") | |
# This continuous view will perform 3 aggregations on page view traffic, grouped by url: | |
# | |
# total_count - count the number of total page views for each url | |
# uniques - count the number of unique users for each url | |
# p99_latency - determine the 99th-percentile latency for each url | |
q = "" + |
import psycopg2 | |
import random | |
conn = psycopg2.connect("dbname='pipeline' user='client' host='localhost' port=6543") | |
pipeline = conn.cursor() | |
# This continuous view will perform 3 aggregations on page view traffic, grouped by url: | |
# | |
# total_count - count the number of total page views for each url | |
# uniques - count the number of unique users for each url |