View Python Copy Blob
#!/usr/bin/env python
import sys
import datetime
import time
from datetime import timedelta
sys.path.append('/home/reb/local/azure-sdk-for-python/')
from azure.storage import *
View nginx.conf
#
# CORS header support
#
# One way to use this is by placing it into a file called "cors_support"
# under your Nginx configuration directory and placing the following
# statement inside your **location** block(s):
#
# include cors_support;
#
# As of Nginx 1.7.5, add_header supports an "always" parameter which
View get-kube-aws.sh
#!/usr/bin/env bash
#set -v
# TODO:
# if we continue to use shell scripts here need to provide means for finding latest, aws-kube doesn't have a latest txt file like kubernetes has for other projects where we could wget it tho.
# so for now we'd have to know which version. we could change to require a version number too not sure best method to get that other than hitting the releases page and clicking on the link of the latest ATM.
kernel=$(uname -s)
case "${kernel}" in
Darwin)
View Readme
I am trying to enable https for the Pentaho BA Server using nginx reverse proxy.
I have posted the tomcat and nginx configurations in below.
I am able to access the Pentaho BA server login page https://test.mydomain.org/pentaho-test/Login
But when i tried to login with right credentials, I am getting the Mixed content error(http & https) in browser.
Please help to handle this
Browser Error:
Mixed Content: The page at 'https://test.mydomain.org/pentaho-test/Login' was loaded over HTTPS, but requested an insecure XMLHttpRequest endpoint 'http://test.mydomain.org/pentaho-test/Home?locale=en_US'. This request has been blocked; the content must be served over HTTPS.
View build.sbt
organization := "net.seratch"
name := "sandbox"
version := "0.1"
scalaVersion := "2.9.1"
libraryDependencies ++= Seq(
"junit" % "junit" % "4.9" withSources(),
View 0_reuse_code.js
// Use Gists to store code you would like to remember later on
console.log(window); // log the "window" object to the console
View remote-ssh-kube-commands.sh
#!/usr/bin/env bash
#
# Author: Chris McConnell
#
# Summary:
# Run remote command_list on all of the specified kubernetes clusters: controllers, etcd, or workers.
#
# Why:
# We have kubernetes and want to run CM jobs / commands on the kube nodes, but CoreOS doesnt have python etc. on it so we can't use CM tools here unless we hack 'em up (which shouldn't), so shell always works.
# Plan to continue to build tools on this and we can take output of this script and slurp up into database, feed to graylog, etc.
View mysql2parquet.scala
val sqlContext = new org.apache.spark.sql.SQLContext(sc) // optional
val df = sqlContext.load("jdbc", Map(
"url" -> "jdbc:mysql://<ip.address.your.db>/<table>?user=<username>&password=<pwd>",
"dbtable" -> "<tablename>"))
df.select("<col1>","<col2>","<col3>").save("</path/to/parquet/file.parquet>","parquet")
//Alternatively, to save all the columns: