gem install travis
ssh-keygen -t rsa -b 4096 -f 'github_deploy_key' -N ''
cat github_deploy_key.pub
copy public key and set as a deploy key on github e.g https://github.com///settings/keys
gem install travis
ssh-keygen -t rsa -b 4096 -f 'github_deploy_key' -N ''
cat github_deploy_key.pub
copy public key and set as a deploy key on github e.g https://github.com///settings/keys
env: | |
- tf_version=0.8.8 | |
- tf_version=0.9.11 | |
sudo: required | |
language: bash | |
before_install: | |
- wget https://releases.hashicorp.com/terraform/${tf_version}/terraform_${tf_version}_linux_amd64.zip -O /tmp/terraform.zip |
Here, I introduce 2 scripts for uploading files to Slack using Node.js as samples. These 2 sample scripts are for uploading files to Slack.
form-data
.boundary
using Buffer.concat()
.server { | |
listen 80; | |
# set root and server_name here | |
# only serve validation files for Let's Encrypt on port 80 | |
location /.well-known/acme-challenge/ { | |
try_files $uri /dev/null =404; | |
} | |
# otherwise to SSL |
library("ggplot2") | |
sample.jtl <- read.csv("~/repos/work/csv-percentiles/sample.csv") | |
sample.jtl$ts <- as.POSIXct(sample.jtl$ts / 1000, origin="1970-01-01") | |
interval_size <- '5 sec' | |
wanted_percentiles <- c(0.5, 0.9, 0.95, 0.99) | |
get_quantiles <- function(items) { | |
yy <- quantile(items, wanted_percentiles) |
@import "~normalize-scss/sass/normalize"; | |
$fa-font-path: "~font-awesome/fonts"; | |
@import "~font-awesome/scss/font-awesome.scss"; | |
@import "bourbon"; |
u <- c(0.1, 0.3, 0.5) | |
chi_df <- 3 | |
target_power <- 0.9 | |
chi_base <- qchisq(0.95, chi_df) | |
get_power <- function(n) { | |
return (pchisq(q = chi_base, df = chi_df, ncp = n * u %*% u, lower.tail = FALSE)) | |
} | |
get_power(20) |
# Makefile for transpiling with Babel in a Node app, or in a client- or | |
# server-side shared library. | |
.PHONY: all clean | |
# Install `babel-cli` in a project to get the transpiler. | |
babel := node_modules/.bin/babel | |
# Identify modules to be transpiled by recursively searching the `src/` | |
# directory. |
# Create using kubectl: | |
# $ kubectl create -f splunk-daemonset.yaml | |
# | |
# You should also add config on your indexer to deal with the json formatted files: | |
# https://answers.splunk.com/answers/148307/how-to-parse-and-extract-json-log-files-in-splunk.html | |
# | |
apiVersion: extensions/v1beta1 | |
kind: DaemonSet | |
metadata: | |
name: splunk-forwarder |