Skip to content

Instantly share code, notes, and snippets.

@juanrh
Created July 7, 2015 18:50
Show Gist options
  • Save juanrh/464155a3aabbf2c3afa8 to your computer and use it in GitHub Desktop.
Save juanrh/464155a3aabbf2c3afa8 to your computer and use it in GitHub Desktop.
Spark Streaming contexts can be created and destroyed very quickly
package streaming.experiments
object OpenSparkStreamingContexts extends App {
import org.apache.spark._
import org.apache.spark.streaming._
import org.apache.spark.streaming.StreamingContext._
import org.apache.spark.streaming.dstream.ConstantInputDStream
val conf = new SparkConf().setMaster("local[5]").setAppName("OpenSparkStreamingContexts")
val sc = new SparkContext(conf)
for (i <- 1 to 100) {
println(s"attempt $i")
val ssc = new StreamingContext(sc, Duration(10))
val xs = sc.parallelize(1 to 100, 4)
val d = new ConstantInputDStream(ssc, xs)
d.print(3)
ssc.start
Thread.sleep(50)
ssc.stop(stopSparkContext=false, stopGracefully=false)
}
sc.stop
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment