Create a CRD ...just the definition as cluster-admin
cat >> crd.yaml << EOF
apiVersion: apiextensions.k8s.io/v1beta1
kind: CustomResourceDefinition
metadata:
name: crontabs.stable.example.com
package main | |
import ( | |
"context" | |
"flag" | |
"fmt" | |
"log" | |
"net/http" | |
"os" | |
"os/signal" |
from __future__ import print_function | |
# | |
# Licensed to the Apache Software Foundation (ASF) under one or more | |
# contributor license agreements. See the NOTICE file distributed with | |
# this work for additional information regarding copyright ownership. | |
# The ASF licenses this file to You under the Apache License, Version 2.0 | |
# (the "License"); you may not use this file except in compliance with | |
# the License. You may obtain a copy of the License at | |
# | |
# http://www.apache.org/licenses/LICENSE-2.0 |
git clone https://github.com/torch/tutorials.git | |
docker run -it -p 8888:8888 -p 6006:6006 -v $PWD/tutorials:/root/sharedfolder floydhub/dl-docker:cpu jupyter notebook |
import requests | |
import zip | |
# Quandl file name | |
filename = 'wiki_prices.zip' | |
# Fetch the wiki_prices.zip file | |
r = requests.get( | |
'https://www.quandl.com/api/v3/datatables/WIKI/PRICES?qopts.export=true&api_key=YOUR-API-KEY') | |
resp = r.json() |
package main | |
import ( | |
"crypto/tls" | |
"golang.org/x/crypto/acme/autocert" | |
"log" | |
"net" | |
"net/http" | |
) |
WHITESPACE ODDITY | |
by Paul Phillips, in eternal admiration of David Bowie, RIP | |
Bound Ctrl to Major mode | |
Bound Ctrl to Major mode | |
Read inputrc and set extdebug on | |
Bound Ctrl to Major mode (Ten, Nine, Eight, Seven, Six) | |
Connecting readline, options on (Five, Four, Three) | |
Check the syntax, may terminfo be with you (Two, One, Exec) |
# Enable Graphite | |
*.sink.graphite.class=org.apache.spark.metrics.sink.GraphiteSink | |
*.sink.graphite.host=<graphite host> | |
*.sink.graphite.port=<graphite port> | |
*.sink.graphite.period=10 | |
# Enable jvm source for instance master, worker, driver and executor | |
master.source.jvm.class=org.apache.spark.metrics.source.JvmSource | |
worker.source.jvm.class=org.apache.spark.metrics.source.JvmSource | |
driver.source.jvm.class=org.apache.spark.metrics.source.JvmSource |
. |
Added configuration parameter "fs.swift.service.<name>.trust-id". | |
When you use "fs.swift.service.<name>.trust-id" parameter, | |
you have to set "fs.swift.service.sic.auth.url" to Keystone V3 URL (http://keystone-host:5000/v3/auth/tokens) | |
Example: | |
$ hadoop fs -ls -Dfs.swift.service.sahara.auth.url=http://localhost:5000/v3/auth/tokens \ | |
-Dfs.swift.service.sahara.username=<swift-proxy-user> \ | |
-Dfs.swift.service.sahara.password=<swift-proxy-user-password> \ | |
-Dfs.swift.service.sahara.trust-id=<trust-id> \ | |
swift://<container-name>.sahara/ |