Coreos project : security analysis for docker images https://github.com/coreos/clair
Moder dynamic routing written in go https://github.com/containous/traefik
You can use select with varargs including *: | |
import spark.implicits._ | |
df.select($"*" +: Seq("A", "B", "C").map(c => | |
sum(c).over(Window.partitionBy("ID").orderBy("time")).alias(s"cum$c") | |
): _*) | |
This: | |
Maps columns names to window expressions with Seq("A", ...).map(...) |
#!/usr/bin/env node | |
// Replace the | |
// chmod +x pg-test.js | |
// npm install --save pg | |
// ./pg-test.js | |
package main | |
import ( | |
"encoding/json" | |
"fmt" | |
"github.com/ghodss/yaml" | |
"io/ioutil" | |
"path/filepath" | |
) |
Coreos project : security analysis for docker images https://github.com/coreos/clair
Moder dynamic routing written in go https://github.com/containous/traefik
import java.sql._ | |
dataframe.coalesce("NUMBER OF WORKERS").mapPartitions((d) => Iterator(d)).foreach { batch => | |
val dbc: Connection = DriverManager.getConnection("JDBCURL") | |
val st: PreparedStatement = dbc.prepareStatement("YOUR PREPARED STATEMENT") | |
batch.grouped("# Of Rows you want per batch").foreach { session => | |
session.foreach { x => | |
st.setDouble(1, x.getDouble(1)) | |
st.addBatch() | |
} |
// Restify Server CheatSheet. | |
// More about the API: http://mcavage.me/node-restify/#server-api | |
// Install restify with npm install restify | |
// 1.1. Creating a Server. | |
// http://mcavage.me/node-restify/#Creating-a-Server | |
var restify = require('restify'); |
#!/bin/sh | |
str_in_list() { | |
str="$1" | |
shift | |
list="$@" | |
if test "${list#*$str}" != "$list" | |
then | |
return 0 # $str is in $list | |
else |
http://henningpetersen.com/post/22/running-apache-spark-jobs-from-applications
// NOTE: This is an expensive operation in terms of time (10 seconds+). Use sparingly.
private def runSparkSubmit(args: Seq[String]): Unit = {
val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!"))
val process = Utils.executeCommand(
Seq("./bin/spark-submit") ++ args,