View SparkSQLJira.scala
package com.databricks.spark.jira
import org.apache.spark.rdd.RDD
import org.apache.spark.sql._
import org.apache.spark.sql.functions._
import org.apache.spark.sql.sources.{TableScan, BaseRelation, RelationProvider}
View spark-duplicates.scala
val transactions =
.option("header", "true")
.option("inferSchema", "true")
transactions.groupBy("id", "organization").count.sort($"count".desc).show
View spark-compare-dataframes.scala
val today =
val todayTransactions =
.option("header", "true")
.option("inferSchema", "true")
val yesterdayTransactions =
.option("header", "true")
.option("inferSchema", "true")
curl -X POST -d http://master-host:6066/v1/submissions/create --header "Content-Type:application/json" --data '{
"action": "CreateSubmissionRequest",
"appResource": "hdfs://localhost:9000/user/spark-examples_2.11-2.0.0.jar",
"clientSparkVersion": "2.0.0",
"appArgs": [ "10" ],
"environmentVariables" : {
"mainClass": "org.apache.spark.examples.SparkPi",
"sparkProperties": {
View db-example.go
package main
import (
_ ""
func main() {
View account_main.go
func main() {
aliceAcct := OpenSavingsAccount("12345", "Alice", time.Date(1999, time.January, 03, 0, 0, 0, 0, time.UTC))
fmt.Println("Alice's account =", aliceAcct)
fmt.Println("Alice's account (after deposit) =", aliceAcct)
if err := aliceAcct.Withdraw(Money(10)); err != nil {
} else {
View account.go
package account
import (
//Account is an interface that wraps the common behavior for accounts.
type Account interface {
View Unix Shell related commands
View start-spark-master-slave-history-servers.cmd
TITLE Launcher - Spark Master, 1 Worker and History Server
set SPARK_HOME=C:\aravind\sw\spark-2.0.2-bin-hadoop2.7
:: - Find the IP Address and set it to IP_ADDR env var and reuse it while launching Worker
for /f "tokens=1-2 delims=:" %%a in ('ipconfig^|find "IPv4"') do set ip=%%b
set IP_ADDR=%ip:~1%
echo %IP_ADDR%
:: - Start master
START "Spark Master" %SPARK_HOME%\bin\spark-class.cmd org.apache.spark.deploy.master.Master
View spark-cluster-windows
# - Prints all options available to start the worker
%SPARK_HOME%\bin\spark-class.cmd org.apache.spark.deploy.worker.Worker --help
# - Prints all options available to start the master
%SPARK_HOME%\bin\spark-class.cmd org.apache.spark.deploy.master.Master --help
# - Start master - Web UI on http://localhost:8080
%SPARK_HOME%\bin\spark-class.cmd org.apache.spark.deploy.master.Master
# - Start worker with 1 core and 64mb ram - Web UI on http://localhost:8081