Skip to content

Instantly share code, notes, and snippets.

@kk17 kk17/.env
Last active Sep 18, 2019

Embed
What would you like to do?
Deploying Spark on Spark Standalone Cluster using Jupyter Docker Image
#2.2.0
# SPARK_NOTEBOOK_VERSION=400c69639ea5
SPARK_NOTEBOOK_VERSION=latest
SPARK_HOME=/usr/local/spark
version: "3.7"
services:
spark-master:
image: jupyter/pyspark-notebook:${SPARK_NOTEBOOK_VERSION}
hostname: spark-master
user: root
ports:
- "18080:8080"
- "7077:7077"
- "10000:10000"
volumes:
- ./spark-apps:/opt/spark-apps
- ./spark-data:/opt/spark-data
- ./conf:${SPARK_HOME}/conf
- ./jars:/opt/jars
working_dir: ${SPARK_HOME}
environment:
SPARK_LOCAL_IP: spark-master
MASTER: spark://spark-master:7077
command: ${SPARK_HOME}/bin/spark-class org.apache.spark.deploy.master.Master -h spark-master
spark-worker-1:
image: jupyter/pyspark-notebook:${SPARK_NOTEBOOK_VERSION}
container_name: spark-worker-1
hostname: spark-worker-1
user: root
depends_on:
- spark-master
ports:
- "8081:8081"
environment:
SPARK_LOCAL_IP: spark-worker-1
SPARK_WORKER_CORES: 3
SPARK_WORKER_MEMORY: 4G
SPARK_WORKER_PORT: 8881
SPARK_WORKER_WEBUI_PORT: 8081
volumes:
- ./spark-apps:/opt/spark-apps
- ./spark-data:/opt/spark-data
- ./conf:${SPARK_HOME}/conf
- ./jars:/opt/jars
working_dir: ${SPARK_HOME}
command: ${SPARK_HOME}/bin/spark-class org.apache.spark.deploy.worker.Worker spark://spark-master:7077
jupyter:
image: jupyter/pyspark-notebook:${SPARK_NOTEBOOK_VERSION}
# image: jupyter/pyspark-notebook:latest
ports:
- "8888:8888"
volumes:
- ./notebooks:/home/jovyan/work
- ./spark-apps:/opt/spark-apps
- ./spark-data:/opt/spark-data
- ./conf:${SPARK_HOME}/conf
- ./jars:/opt/jars
environment:
- GRANT_SUDO=yes
command: start.sh jupyter notebook --NotebookApp.token=''
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
You can’t perform that action at this time.