Skip to content

Instantly share code, notes, and snippets.

@ankurcha
Last active December 17, 2015 18:49
Show Gist options
  • Save ankurcha/5655646 to your computer and use it in GitHub Desktop.
Save ankurcha/5655646 to your computer and use it in GitHub Desktop.
Spark configuration options
{
"home": null,
"local_dir": null,
"buffer_size": 65536,
"kryo": {
"buffer_size_mb": 10,
"registrator": null
},
"parallelism": null,
"test": {
"use_compressed_oops": false,
"disable_blockmanager_heartbeats": false
},
"driver": {
"host": "localhost",
"port": 7077
},
"hadoop": {},
"mesos": {
"coarse": false
},
"serializer": {
"default": "spark.JavaSerializer",
"closure": "spark.JavaSerializer"
},
"broadcast": {
"compress": true,
"driverTrackerPort": 11111,
"block_size": 4096,
"max_retries": 2,
"tracker_socket_timeout": 50000,
"server_socket_timeout": 10000,
"knock_interval": {
"min": 500,
"max": 999
},
"max_degree": 2,
"max_peers_in_guide_response": 4,
"chat": {
"max_slots": 4,
"max_time": 500,
"max_blocks": 1024
},
"end_game_fraction": 0.95
},
"user_name": null,
"akka": {
"ask_timeout": 10,
"num_retries": 3,
"retry_wait_ms": 3000,
"threads": 4,
"batch_size": 15,
"timeout": 20,
"frame_size": 10,
"log_lifecycle_events": false,
"write_timeout": 30
},
"cleaner_ttl": -1,
"worker_timeout": 60,
"deploy_spreadout": true,
"repl.class.uri": null,
"speculation": {
"interval": 100,
"quantile": 0.75,
"multiplier": 1.5
},
"starvation": {
"interval": 15000
},
"task": {
"cpus": 1,
"revive_interval": 0,
"schedule_aggression": "NODE_LOCAL"
},
"cluster_scheduling": "FIFO",
"fairscheduler_allocation_file": null,
"executor_memory": null,
"cores_max": -1,
"locality_wait": 3000,
"logging_exception_print_interval": 10000,
"mesos_extra_cores": 0,
"shuffle": {
"copier_threads": 6,
"use_netty": false,
"sender_port": 0,
"max_mb_inflight": 48,
"compress": true,
"file_buffer_kb": 100
},
"rdd_compress": true,
"storage": {
"memory_fraction": 0.66,
"blockmanager": {
"heartbeat_ms": 5000,
"timeout_interval_ms": 5000,
"slave_timeout_ms": 15000
}
},
"ui_port": null,
"disk_subdirectories": 64
}
spark.kryoserializer.buffer.mb=10
spark.kryo.registrator=
spark.default.parallelism=
spark.test.useCompressedOops=
spark.driver.host=
spark.driver.port=
spark.mesos.coarse=false
spark.hadoop.foo=
spark.buffer.size
spark.home=
spark.hostPort=
spark.serializer=spark.JavaSerializer
spark.closure.serializer=spark.JavaSerializer
spark.driver.host=localhost
spark.driver.port=7077
spark.local.dir=${java.io.tmpdir}
spark.buffer.size=65536
spark.broadcast.compress=true
spark.httpBroadcast.uri=
spark.MultiTracker.DriverHostAddress=
spark.broadcast.driverTrackerPort=11111
spark.broadcast.blockSize=4096
spark.broadcast.maxRetryCount=2
spark.broadcast.trackerSocketTimeout=50000
spark.broadcast.serverSocketTimeout=10000
spark.broadcast.minKnockInterval=500
spark.broadcast.maxKnockInterval=999
spark.broadcast.maxDegree=2
spark.broadcast.maxPeersInGuideResponse=4
spark.broadcast.maxChatSlots=4
spark.broadcast.maxChatTime=500
spark.broadcast.maxChatBlocks=1024
spark.broadcast.endGameFraction=0.95
user.name=<unknown>
spark.akka.askTimeout=10
spark.worker.timeout=60
spark.deploy.spreadOut=true
spark.repl.class.uri=
spark.speculation.interval=100
spark.starvation.timeout=15000
spark.tasks.revive.interval=0
spark.tasks.schedule.aggression=NODE_LOCAL
spark.cluster.schedulingmode=FIFO
spark.fairscheduler.allocation.file=
spark.executor.memory=
spark.cores.max=INT_MAX
spark.locality.wait=3000
spark.task.cpus=1
spark.speculation.quantile=0.75
spark.speculation.multiplier=1.5
spark.logging.exceptionPrintInterval=10000
spark.mesos.extra.cores=0
spark.shuffle.copier.threads=6
spark.shuffle.use.netty=false
spark.shuffle.sender.port=0
spark.reducer.maxMbInFlight=48
spark.shuffle.compress=ture
spark.rdd.compress=true
spark.storage.memoryFraction=0.66
spark.storage.blockManagerHeartBeatMs=5000
spark.test.disableBlockManagerHeartBeat=false
spark.akka.num.retries=3
spark.akka.retry.wait=3000
spark.storage.blockManagerSlaveTimeoutMs=3*${spark.storage.blockManagerHeartBeatMs}
spark.storage.blockManagerTimeoutIntervalMs=5000
spark.ui.port=
spark.diskStore.subDirectories=64
spark.shuffle.file.buffer.kb=100
spark.akka.threads=4
spark.akka.batchSize=15
spark.akka.timeout=20
spark.akka.frameSize=10
spark.akka.logLifecycleEvents=false
spark.akka.writeTimeout=30
spark.cleaner.ttl=-1
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment