diff --git a/pyspark/Dockerfile b/pyspark/Dockerfile index 071bf2a..292e08b 100644 --- a/pyspark/Dockerfile +++ b/pyspark/Dockerfile @@ -18,10 +18,13 @@ RUN pip install --upgrade \ pip \ && pip install \ + # Tornado 6 breaks sparkmonitor + tornado==5.1 \ jupyter \ kafka-python \ pyspark \ - sparkmonitor + sparkmonitor \ + tini RUN apk del .build-deps @@ -52,9 +55,11 @@ RUN mkdir -p /home/$NB_USER/work WORKDIR /home/$NB_USER/work +ENTRYPOINT ["/sbin/tini", "--"] + # pyspark-kernel.sh will automatically set up the PySpark context when # the kernel is loaded. -CMD ["jupyter", "notebook", "--ip=0.0.0.0", "--port=8888", "--no-browser"] +CMD ["jupyter", "notebook", "--ip=0.0.0.0", "--port=8888"] # debugging # CMD ["bash"]