View spark-rest-submit.sh
curl -X POST -d http://master-host:6066/v1/submissions/create --header "Content-Type:application/json" --data '{
"action": "CreateSubmissionRequest",
"appResource": "hdfs://localhost:9000/user/spark-examples_2.11-2.0.0.jar",
"clientSparkVersion": "2.0.0",
"appArgs": [ "10" ],
"environmentVariables" : {
"SPARK_ENV_LOADED" : "1"
},
"mainClass": "org.apache.spark.examples.SparkPi",
"sparkProperties": {
View db-example.go
package main
import (
"database/sql"
"fmt"
_ "github.com/mattn/go-sqlite3"
)
func main() {
View account_main.go
func main() {
aliceAcct := OpenSavingsAccount("12345", "Alice", time.Date(1999, time.January, 03, 0, 0, 0, 0, time.UTC))
fmt.Println("Alice's account =", aliceAcct)
aliceAcct.Deposit(Money(100.0))
fmt.Println("Alice's account (after deposit) =", aliceAcct)
if err := aliceAcct.Withdraw(Money(10)); err != nil {
fmt.Println(err)
} else {
View account.go
package account
import (
"errors"
"fmt"
"time"
)
//Account is an interface that wraps the common behavior for accounts.
type Account interface {
View Unix Shell related commands
View start-spark-master-slave-history-servers.cmd
TITLE Launcher - Spark Master, 1 Worker and History Server
set SPARK_HOME=C:\aravind\sw\spark-2.0.2-bin-hadoop2.7
:: - Find the IP Address and set it to IP_ADDR env var and reuse it while launching Worker
for /f "tokens=1-2 delims=:" %%a in ('ipconfig^|find "IPv4"') do set ip=%%b
set IP_ADDR=%ip:~1%
echo %IP_ADDR%
:: - Start master
START "Spark Master" %SPARK_HOME%\bin\spark-class.cmd org.apache.spark.deploy.master.Master
View spark-cluster-windows
# - Prints all options available to start the worker
%SPARK_HOME%\bin\spark-class.cmd org.apache.spark.deploy.worker.Worker --help
# - Prints all options available to start the master
%SPARK_HOME%\bin\spark-class.cmd org.apache.spark.deploy.master.Master --help
# - Start master - Web UI on http://localhost:8080
%SPARK_HOME%\bin\spark-class.cmd org.apache.spark.deploy.master.Master
# - Start worker with 1 core and 64mb ram - Web UI on http://localhost:8081
View spark-cli docopt
Apache Spark CLI
Usage:
spark-cli apps [--completed | --running]
spark-cli app <app-id>
Options:
-h, --help show help
-v, -version show version
View first_test.go
package example
import "testing"
func TestNameAndAge(t *testing.T) {
n, a := NameAndAge()
if n != "Esha" || a != 7 {
t.Errorf("Expected 'Esha' and '7' but got %s and %d", n, a)
}
View first.go
package main
import (
"fmt"
"github.com/yaravind/example"
)
func main() {
fmt.Println("Example - Hello World")