show dbs
# on ckamaster1 | |
sudo -i | |
apt install docker.io kubeadm=1.15.1-00 kubectl=1.15.1-00 kubelet=1.15.1-00 | |
kubeadm init --config=kubeadm-config.yaml --upload-certs| tee kubeadm-init.out | |
## OUTPUT | |
# kubeadm join ckamaster:6443 --token 4gttsi.cpkvihy9jwb8o8aq \ | |
# --discovery-token-ca-cert-hash sha256:c70ef81c9dd7064432a255b9f617e8c3c83b4cce64db17e2518c32b0e3aaf460 | |
exit | |
mkdir -p $HOME/.kube | |
sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config |
version: "2.1" | |
services: | |
namenode: | |
build: ./namenode | |
container_name: namenode | |
volumes: | |
- hadoop_namenode:/hadoop/dfs/name | |
environment: | |
- CLUSTER_NAME=test |
version: "2" | |
services: | |
namenode: | |
build: ./namenode | |
image: bde2020/hadoop-namenode:1.1.0-hadoop2.7.1-java8 | |
container_name: namenode | |
volumes: | |
- hadoop_namenode:/hadoop/dfs/name | |
environment: |
Tools
- Kops https://github.com/kubernetes/kops/blob/master/docs/README.md
- Kubectl https://kubernetes.io/docs/tasks/tools/install-kubectl/
- kubespy https://blog.pulumi.com/kubespy-trace-a-real-time-view-into-the-heart-of-a-kubernetes-service
- kops https://icicimov.github.io/blog/virtualization/Kubernetes-Cluster-in-AWS-with-Kops/
Presentations
import com.google.gson.JsonObject; | |
import com.pubnub.api.PNConfiguration; | |
import com.pubnub.api.PubNub; | |
import com.pubnub.api.callbacks.PNCallback; | |
import com.pubnub.api.callbacks.SubscribeCallback; | |
import com.pubnub.api.enums.PNStatusCategory; | |
import com.pubnub.api.models.consumer.PNPublishResult; | |
import com.pubnub.api.models.consumer.PNStatus; | |
import com.pubnub.api.models.consumer.pubsub.PNMessageResult; | |
import com.pubnub.api.models.consumer.pubsub.PNPresenceEventResult; |
from pymongo import MongoClient | |
def addUser(database, user): | |
return database.customers.insert_one(user).inserted_id | |
client = MongoClient(port=27017) | |
db=client.store | |
# This is the object that the application would be working with anyway and |
Currently, only data within the last 90 days is available via the OpenAQ API. However, there is much more data available on OpenAQ and a variety of different access mechanisms. Note also that there is work under way to bring back to the API a mechanism to access the data older than 90 days, details here.
If you're looking to query across all the data or even easily export the data (or a subset of it), the easiest way to do that currently is using a service like Amazon Athena. I'll provide some directions on how to do that below, but at a high level, this will let you make any query of the entire dataset that you'd like (written in SQL). I'll also provide some sample queries so you can see what's possible.
On to the directions!
- You will need to create an AWS account if you don't currently have one, you can start this process at htt
-- | |
-- This will register the "planet" table within your AWS account | |
-- | |
CREATE EXTERNAL TABLE planet ( | |
id BIGINT, | |
type STRING, | |
tags MAP<STRING,STRING>, | |
lat DECIMAL(9,7), | |
lon DECIMAL(10,7), | |
nds ARRAY<STRUCT<ref: BIGINT>>, |
import com.pubnub.api.PubNubException; | |
import com.pubnub.api.PubNubUtil; | |
import javax.net.ssl.HttpsURLConnection; | |
import java.io.BufferedReader; | |
import java.io.IOException; | |
import java.io.InputStreamReader; | |
import java.net.MalformedURLException; | |
import java.net.URL; | |
import java.util.Date; |