GitBucket
4.21.2
Toggle navigation
Snippets
Sign in
Files
Branches
2
Releases
Issues
3
Pull requests
Labels
Priorities
Milestones
Wiki
Forks
nigel.stanger
/
docker-analytics
Browse code
Switched to docker username
master
spark3
1 parent
f93eab3
commit
2fe1cef5f97b3c98f3d8c8623095cfecd2649103
Nigel Stanger
authored
on 22 May 2019
Patch
Showing
2 changed files
Makefile
docker-compose.yml
Ignore Space
Show notes
View
Makefile
all: spark pyspark kafka spark: spark/Dockerfile spark/start-master.sh spark/start-worker.sh docker build $(BUILD_OPTS) -t nstanger/spark:latest -f $< $@ # Rebuild both the following if spark changes! pyspark: pyspark/Dockerfile pyspark/kernel.json pyspark/pyspark-kernel.sh docker build $(BUILD_OPTS) -t nstanger/pyspark:latest -f $< $@ kafka: kafka/Dockerfile kafka/start-kafka.sh docker build $(BUILD_OPTS) -t nstanger/kafka:latest -f $< $@
all: spark pyspark kafka spark: spark/Dockerfile spark/start-master.sh spark/start-worker.sh docker build $(BUILD_OPTS) -t analytics/spark:latest -f $< $@ # Rebuild both the following if spark changes! pyspark: pyspark/Dockerfile pyspark/kernel.json pyspark/pyspark-kernel.sh docker build $(BUILD_OPTS) -t analytics/pyspark:latest -f $< $@ kafka: kafka/Dockerfile kafka/start-kafka.sh docker build $(BUILD_OPTS) -t analytics/kafka:latest -f $< $@
Ignore Space
Show notes
View
docker-compose.yml
version: "3.3" services: spark-master: image: nstanger/spark:latest container_name: spark-master hostname: spark-master ports: - "8080:8080" - "7077:7077" networks: - spark-network environment: - "SPARK_HOSTNAME=spark-master" - "SPARK_MASTER=spark://spark-master:7077" - "SPARK_WORKER_MEMORY=2g" command: "start-master.sh" volumes: - ${HOME}/tmp/sparkdata:/mnt/sparkdata spark-worker: image: nstanger/spark:latest depends_on: - spark-master ports: - 8080 networks: - spark-network environment: - "SPARK_MASTER=spark://spark-master:7077" - "SPARK_WORKER_WEBUI_PORT=8080" - "SPARKMONITOR_UI_PORT=8080" - "SPARK_WORKER_MEMORY=2g" command: "start-worker.sh" volumes: - ${HOME}/tmp/sparkdata:/mnt/sparkdata pyspark: image: nstanger/pyspark:latest depends_on: - spark-master ports: - "8888:8888" networks: - spark-network environment: - "SPARK_MASTER=spark://spark-master:7077" - "SPARK_MASTER_WEBUI_PORT=8080" - "PYSPARK_SUBMIT_ARGS=--master spark://spark-master:7077 --packages org.apache.spark:spark-sql-kafka-0-10_2.11:2.4.3 pyspark-shell" working_dir: /home/pyspark/work volumes: - ${HOME}/tmp/sparkdata:/mnt/sparkdata kafka: image: nstanger/kafka:latest hostname: kafka ports: - 9092 networks: - spark-network volumes: - ${HOME}/tmp/sparkdata:/mnt/sparkdata networks: spark-network: driver: bridge ipam: driver: default
version: "3.3" services: spark-master: image: analytics/spark:latest container_name: spark-master hostname: spark-master ports: - "8080:8080" - "7077:7077" networks: - spark-network environment: - "SPARK_HOSTNAME=spark-master" - "SPARK_MASTER=spark://spark-master:7077" - "SPARK_WORKER_MEMORY=2g" command: "start-master.sh" volumes: - ${HOME}/tmp/sparkdata:/mnt/sparkdata spark-worker: image: analytics/spark:latest depends_on: - spark-master ports: - 8080 networks: - spark-network environment: - "SPARK_MASTER=spark://spark-master:7077" - "SPARK_WORKER_WEBUI_PORT=8080" - "SPARKMONITOR_UI_PORT=8080" - "SPARK_WORKER_MEMORY=2g" command: "start-worker.sh" volumes: - ${HOME}/tmp/sparkdata:/mnt/sparkdata pyspark: image: analytics/pyspark:latest depends_on: - spark-master ports: - "8888:8888" networks: - spark-network environment: - "SPARK_MASTER=spark://spark-master:7077" - "SPARK_MASTER_WEBUI_PORT=8080" - "PYSPARK_SUBMIT_ARGS=--master spark://spark-master:7077 --packages org.apache.spark:spark-sql-kafka-0-10_2.11:2.4.3 pyspark-shell" working_dir: /home/pyspark/work volumes: - ${HOME}/tmp/sparkdata:/mnt/sparkdata kafka: image: analytics/kafka:latest hostname: kafka ports: - 9092 networks: - spark-network volumes: - ${HOME}/tmp/sparkdata:/mnt/sparkdata networks: spark-network: driver: bridge ipam: driver: default
Show line notes below