Last active
April 2, 2023 10:13
-
-
Save dacr/0aca63c5dec2a636b203696e8be238dd to your computer and use it in GitHub Desktop.
spark hello world. / published by https://github.com/dacr/code-examples-manager #b2bd6485-375c-4ad7-a6fc-76c5e65ca57e/59f44cff2291de272eb20d972f7c275f5c1fde7c
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// summary : spark hello world. | |
// keywords : scala, spark, @testable | |
// publish : gist | |
// authors : David Crosson | |
// license : Apache NON-AI License Version 2.0 (https://raw.githubusercontent.com/non-ai-licenses/non-ai-licenses/main/NON-AI-APACHE2) | |
// id : b2bd6485-375c-4ad7-a6fc-76c5e65ca57e | |
// created-on : 2020-05-31T19:54:52Z | |
// managed-by : https://github.com/dacr/code-examples-manager | |
// execution : scala 2.12 ammonite script (http://ammonite.io/) - run as follow 'amm scriptname.sc' | |
/* | |
// In REPL mode use AmmoniteSparkSession instead of SparkSession | |
val spark = | |
AmmoniteSparkSession.builder() | |
.master("local[*]") | |
.getOrCreate() | |
*/ | |
//import $ivy.`sh.almond::ammonite-spark:0.7.2` | |
import $ivy.`org.apache.spark::spark-sql:3.1.1`, org.apache.spark.sql._ | |
import $ivy.`org.scalatest::scalatest:3.2.6`, org.scalatest._, flatspec._, matchers._ | |
class SparkHelloTest extends AnyFlatSpec with should.Matchers { | |
override def suiteName = "SparkHelloTest" | |
val spark = | |
SparkSession.builder() | |
.master("local[*]") | |
.getOrCreate() | |
def sc = spark.sparkContext | |
"spark" should "be able to compute simple computation on a given RDD" in { | |
val rdd = sc.parallelize(1 to 100, 10) | |
val n = rdd.map(_ + 1).sum() | |
n shouldBe 5150 | |
} | |
} | |
org.scalatest.tools.Runner.main(Array("-oDF", "-s", classOf[SparkHelloTest].getName)) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment