build & push all image configurations (python-only mode)

This commit is contained in:
Christoph Schranz 2021-01-28 11:19:37 +01:00
parent 6a7acdecce
commit c833f1f9b7
5 changed files with 175 additions and 16 deletions

View File

@ -303,6 +303,107 @@ RUN MPLBACKEND=Agg python -c "import matplotlib.pyplot" && \
USER $NB_UID USER $NB_UID
WORKDIR $HOME
############################################################################
################ Dependency: jupyter/datascience-notebook ##################
############################################################################
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
LABEL maintainer="Jupyter Project <jupyter@googlegroups.com>"
# Set when building on Travis so that certain long-running build steps can
# be skipped to shorten build time.
ARG TEST_ONLY_BUILD
# Fix DL4006
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
USER root
# Julia installation
# Default values can be overridden at build time
# (ARGS are in lower case to distinguish them from ENV)
# Check https://julialang.org/downloads/
ARG julia_version="1.5.3"
# SHA256 checksum
ARG julia_checksum="f190c938dd6fed97021953240523c9db448ec0a6760b574afd4e9924ab5615f1"
# R pre-requisites
RUN apt-get update && \
apt-get install -y --no-install-recommends \
fonts-dejavu \
gfortran \
gcc && \
apt-get clean && rm -rf /var/lib/apt/lists/*
# Julia dependencies
# install Julia packages in /opt/julia instead of $HOME
ENV JULIA_DEPOT_PATH=/opt/julia \
JULIA_PKGDIR=/opt/julia \
JULIA_VERSION="${julia_version}"
WORKDIR /tmp
# hadolint ignore=SC2046
RUN mkdir "/opt/julia-${JULIA_VERSION}" && \
wget -q https://julialang-s3.julialang.org/bin/linux/x64/$(echo "${JULIA_VERSION}" | cut -d. -f 1,2)"/julia-${JULIA_VERSION}-linux-x86_64.tar.gz" && \
echo "${julia_checksum} *julia-${JULIA_VERSION}-linux-x86_64.tar.gz" | sha256sum -c - && \
tar xzf "julia-${JULIA_VERSION}-linux-x86_64.tar.gz" -C "/opt/julia-${JULIA_VERSION}" --strip-components=1 && \
rm "/tmp/julia-${JULIA_VERSION}-linux-x86_64.tar.gz"
RUN ln -fs /opt/julia-*/bin/julia /usr/local/bin/julia
# Show Julia where conda libraries are \
RUN mkdir /etc/julia && \
echo "push!(Libdl.DL_LOAD_PATH, \"$CONDA_DIR/lib\")" >> /etc/julia/juliarc.jl && \
# Create JULIA_PKGDIR \
mkdir "${JULIA_PKGDIR}" && \
chown "${NB_USER}" "${JULIA_PKGDIR}" && \
fix-permissions "${JULIA_PKGDIR}"
USER $NB_UID
# R packages including IRKernel which gets installed globally.
RUN conda install --quiet --yes \
'r-base=4.0.3' \
'r-caret=6.0*' \
'r-crayon=1.3*' \
'r-devtools=2.3*' \
'r-forecast=8.13*' \
'r-hexbin=1.28*' \
'r-htmltools=0.5*' \
'r-htmlwidgets=1.5*' \
'r-irkernel=1.1*' \
'r-nycflights13=1.0*' \
'r-randomforest=4.6*' \
'r-rcurl=1.98*' \
'r-rmarkdown=2.6*' \
'r-rsqlite=2.2*' \
'r-shiny=1.5*' \
'r-tidyverse=1.3*' \
'rpy2=3.3*' && \
conda clean --all -f -y && \
fix-permissions "${CONDA_DIR}" && \
fix-permissions "/home/${NB_USER}"
# Add Julia packages. Only add HDF5 if this is not a test-only build since
# it takes roughly half the entire build time of all of the images on Travis
# to add this one package and often causes Travis to timeout.
#
# Install IJulia as jovyan and then move the kernelspec out
# to the system share location. Avoids problems with runtime UID change not
# taking effect properly on the .local folder in the jovyan home dir.
RUN julia -e 'import Pkg; Pkg.update()' && \
(test $TEST_ONLY_BUILD || julia -e 'import Pkg; Pkg.add("HDF5")') && \
julia -e "using Pkg; pkg\"add IJulia\"; pkg\"precompile\"" && \
# move kernelspec out of home \
mv "${HOME}/.local/share/jupyter/kernels/julia"* "${CONDA_DIR}/share/jupyter/kernels/" && \
chmod -R go+rx "${CONDA_DIR}/share/jupyter" && \
rm -rf "${HOME}/.local" && \
fix-permissions "${JULIA_PKGDIR}" "${CONDA_DIR}/share/jupyter"
WORKDIR $HOME WORKDIR $HOME
############################################################################ ############################################################################
@ -314,7 +415,7 @@ LABEL maintainer="Christoph Schranz <christoph.schranz@salzburgresearch.at>"
# Install Tensorflow, check compatibility here: https://www.tensorflow.org/install/gpu # Install Tensorflow, check compatibility here: https://www.tensorflow.org/install/gpu
# installation via conda leads to errors in version 4.8.2 # installation via conda leads to errors in version 4.8.2
RUN pip install --upgrade pip && \ RUN pip install --upgrade pip && \
pip install --no-cache-dir "tensorflow-gpu>=2.1.*" && \ pip install --no-cache-dir "tensorflow==2.3.2" && \
pip install --no-cache-dir keras pip install --no-cache-dir keras
# Install PyTorch with dependencies # Install PyTorch with dependencies
@ -361,13 +462,13 @@ RUN jupyter labextension install jupyterlab-drawio
RUN jupyter labextension install jupyter-leaflet RUN jupyter labextension install jupyter-leaflet
RUN jupyter labextension install jupyterlab-plotly@4.8.1 RUN jupyter labextension install jupyterlab-plotly@4.8.1
RUN jupyter labextension install @jupyter-widgets/jupyterlab-manager RUN jupyter labextension install @jupyter-widgets/jupyterlab-manager
RUN pip install --no-cache-dir jupyter-tabnine==1.0.2 && \ RUN pip install --no-cache-dir jupyter-tabnine==1.1.0 --user && \
jupyter nbextension install --py jupyter_tabnine && \ jupyter nbextension install --py jupyter_tabnine --user && \
jupyter nbextension enable --py jupyter_tabnine && \ jupyter nbextension enable --py jupyter_tabnine --user && \
jupyter serverextension enable --py jupyter_tabnine jupyter serverextension enable --py jupyter_tabnine --user
RUN pip install --no-cache-dir jupyter_contrib_nbextensions \ RUN pip install --no-cache-dir jupyter_contrib_nbextensions \
jupyter_nbextensions_configurator rise && \ jupyter_nbextensions_configurator rise
jupyter nbextension enable codefolding/main # jupyter nbextension enable codefolding/main
RUN jupyter labextension install @ijmbarr/jupyterlab_spellchecker RUN jupyter labextension install @ijmbarr/jupyterlab_spellchecker
RUN fix-permissions /home/$NB_USER RUN fix-permissions /home/$NB_USER

4
.gitignore vendored
View File

@ -116,4 +116,6 @@ venv.bak/
src/jupyter_notebook_config.json src/jupyter_notebook_config.json
.idea .idea
/Deployment-notes.md /Deployment-notes.md
/push_tag.sh /push_tag_full.sh
/push_tag_python-only.sh
/push_tag_slim.sh

View File

@ -65,7 +65,7 @@ The image of this repository is available on [Dockerhub](https://hub.docker.com/
docker run --gpus all -d -it -p 8848:8888 -v $(pwd)/data:/home/jovyan/work -e GRANT_SUDO=yes -e JUPYTER_ENABLE_LAB=yes --user root cschranz/gpu-jupyter:v1.2_cuda-10.1_ubuntu-18.04_python-only docker run --gpus all -d -it -p 8848:8888 -v $(pwd)/data:/home/jovyan/work -e GRANT_SUDO=yes -e JUPYTER_ENABLE_LAB=yes --user root cschranz/gpu-jupyter:v1.2_cuda-10.1_ubuntu-18.04_python-only
``` ```
This starts an instance with of *GPU-Jupyter* the tag `v1.2_cuda-10.1_ubuntu-18.04_python-only` at [http://localhost:8848](http://localhost:8848) (port `8484`). This starts an instance with of *GPU-Jupyter* the tag `v1.2_cuda-10.1_ubuntu-18.04_python-only` at [http://localhost:8848](http://localhost:8848) (port `8484`).
The default password is `asdf` which should be changed as described [below](#set-password). The default password is `gpu-jupyter` (previously `asdf`) which should be changed as described [below](#set-password).
Furthermore, data within the host's `data` directory is shared with the container. Furthermore, data within the host's `data` directory is shared with the container.
Other versions of GPU-Jupyter are available and listed on Dockerhub under [Tags](https://hub.docker.com/r/cschranz/gpu-jupyter/tags?page=1&ordering=last_updated). Other versions of GPU-Jupyter are available and listed on Dockerhub under [Tags](https://hub.docker.com/r/cschranz/gpu-jupyter/tags?page=1&ordering=last_updated).
@ -84,13 +84,13 @@ As soon as you have access to your GPU within Docker containers
(make sure the command `docker run --gpus all nvidia/cuda:10.1-cudnn7-runtime-ubuntu18.04 nvidia-smi` (make sure the command `docker run --gpus all nvidia/cuda:10.1-cudnn7-runtime-ubuntu18.04 nvidia-smi`
shows your GPU statistics), you can generate the Dockerfile, build and run it. shows your GPU statistics), you can generate the Dockerfile, build and run it.
The following commands will start *GPU-Jupyter* on [localhost:8848](http://localhost:8848) The following commands will start *GPU-Jupyter* on [localhost:8848](http://localhost:8848)
with the default password `asdf`. with the default password `gpu-jupyter` (previously `asdf`).
```bash ```bash
git clone https://github.com/iot-salzburg/gpu-jupyter.git git clone https://github.com/iot-salzburg/gpu-jupyter.git
cd gpu-jupyter cd gpu-jupyter
# generate a Dockerfile with python and without Julia and R # generate a Dockerfile with python and without Julia and R
./generate-Dockerfile.sh --no-datascience-notebook ./generate-Dockerfile.sh --python-only
docker build -t gpu-jupyter .build/ # will take a while docker build -t gpu-jupyter .build/ # will take a while
docker run --gpus all -d -it -p 8848:8888 -v $(pwd)/data:/home/jovyan/work -e GRANT_SUDO=yes -e JUPYTER_ENABLE_LAB=yes -e NB_UID="$(id -u)" -e NB_GID="$(id -g)" --user root --restart always --name gpu-jupyter_1 gpu-jupyter docker run --gpus all -d -it -p 8848:8888 -v $(pwd)/data:/home/jovyan/work -e GRANT_SUDO=yes -e JUPYTER_ENABLE_LAB=yes -e NB_UID="$(id -u)" -e NB_GID="$(id -g)" --user root --restart always --name gpu-jupyter_1 gpu-jupyter
``` ```
@ -147,7 +147,7 @@ Here the `docker-stack` `scipy-notebook` is used instead of `datascience-noteboo
that comes with Julia and R. that comes with Julia and R.
Moreover, none of the packages within `src/Dockerfile.usefulpackages` is installed. Moreover, none of the packages within `src/Dockerfile.usefulpackages` is installed.
* `--no-datascience-notebook`: As the name suggests, the `docker-stack` `datascience-notebook` * `--python-only|--no-datascience-notebook`: As the name suggests, the `docker-stack` `datascience-notebook`
is not installed is not installed
on top of the `scipy-notebook`, but the packages within `src/Dockerfile.usefulpackages` are. on top of the `scipy-notebook`, but the packages within `src/Dockerfile.usefulpackages` are.
@ -170,7 +170,8 @@ If an essential package is missing in the default stack, please let us know!
Please set a new password using `src/jupyter_notebook_config.json`. Please set a new password using `src/jupyter_notebook_config.json`.
Therefore, hash your password in the form (password)(salt) using a sha1 hash generator, e.g., the sha1 generator of [sha1-online.com](http://www.sha1-online.com/). Therefore, hash your password in the form (password)(salt) using a sha1 hash generator, e.g., the sha1 generator of [sha1-online.com](http://www.sha1-online.com/).
The input with the default password `asdf` is appended by a arbitrary salt `e49e73b0eb0e` to `asdfe49e73b0eb0e` and should yield the hash string as shown in the config below. The input with the default password `gpu-jupyter` (previously `asdf`) is concatenated by an arbitrary salt `3b4b6378355` to `gpu-jupyter3b4b6378355` and is hashed to `642693b20f0a33bcad27b94293d0ed7db3408322`.
**Never give away your own unhashed password!** **Never give away your own unhashed password!**
Then update the config file as shown below and restart the service. Then update the config file as shown below and restart the service.
@ -178,7 +179,7 @@ Then update the config file as shown below and restart the service.
```json ```json
{ {
"NotebookApp": { "NotebookApp": {
"password": "sha1:e49e73b0eb0e:32edae7a5fd119045e699a0bd04f90819ca90cd6" "password": "sha1:3b4b6378355:642693b20f0a33bcad27b94293d0ed7db3408322"
} }
} }
``` ```
@ -303,7 +304,7 @@ e.g., here it is **elk_datastack**.
* **-r:** registry port is the port that is published by the registry service, default is `5000`. * **-r:** registry port is the port that is published by the registry service, default is `5000`.
Now, *gpu-jupyter* will be accessible here on [localhost:8848](http://localhost:8848) Now, *gpu-jupyter* will be accessible here on [localhost:8848](http://localhost:8848)
with the default password `asdf` and shares the network with the other data-source, i.e., with the default password `gpu-jupyter` (previously `asdf`) and shares the network with the other data-source, i.e.,
all ports of the data-source will be accessible within *GPU-Jupyter*, all ports of the data-source will be accessible within *GPU-Jupyter*,
even if they aren't routed it the source's `docker-compose` file. even if they aren't routed it the source's `docker-compose` file.

54
build_push_all.sh Executable file
View File

@ -0,0 +1,54 @@
#!/usr/bin/env bash
cd $(cd -P -- "$(dirname -- "$0")" && pwd -P)
export TAGNAME="v1.3_cuda-10.1_ubuntu-18.04"
###################### build, run and push full image ##########################
echo
echo
echo "build, run and push full image with tag $TAGNAME."
bash generate-Dockerfile.sh
docker build -t cschranz/gpu-jupyter:$TAGNAME .build/
export IMG_ID=$(docker image ls | grep $TAGNAME | grep -v _python-only | grep -v _slim | head -1 | awk '{print $3}')
echo "push image with ID $IMG_ID and Tag $TAGNAME ."
docker tag $IMG_ID cschranz/gpu-jupyter:$TAGNAME
docker rm -f gpu-jupyter_1
docker run --gpus all -d -it -p 8848:8888 -v $(pwd)/data:/home/jovyan/work -e GRANT_SUDO=yes -e JUPYTER_ENABLE_LAB=yes --user root --restart always --name gpu-jupyter_1 cschranz/gpu-jupyter:$TAGNAME
docker push cschranz/gpu-jupyter:$TAGNAME &&
docker save cschranz/gpu-jupyter:$TAGNAME | gzip > ../gpu-jupyter_tag-$TAGNAME.tar.gz
###################### build and push slim image ##########################
echo
echo
echo "build and push slim image with tag ${TAGNAME}_slim."
bash generate-Dockerfile.sh --slim
docker build -t cschranz/gpu-jupyter:${TAGNAME}_slim .build/
export IMG_ID=$(docker image ls | grep ${TAGNAME}_slim | head -1 | awk '{print $3}')
echo "push image with ID $IMG_ID and Tag ${TAGNAME}_slim."
docker tag $IMG_ID cschranz/gpu-jupyter:${TAGNAME}_slim
docker push cschranz/gpu-jupyter:${TAGNAME}_slim &&
docker save cschranz/gpu-jupyter:${TAGNAME}_slim | gzip > ../gpu-jupyter_tag-${TAGNAME}_slim.tar.gz
###################### build and push python-only image ##########################
echo
echo
echo "build and push slim image with tag ${TAGNAME}_slim."
bash generate-Dockerfile.sh --slim
docker build -t cschranz/gpu-jupyter:${TAGNAME}_slim .build/
export IMG_ID=$(docker image ls | grep ${TAGNAME}_slim | head -1 | awk '{print $3}')
echo "push image with ID $IMG_ID and Tag ${TAGNAME}_slim."
docker tag $IMG_ID cschranz/gpu-jupyter:${TAGNAME}_slim
docker push cschranz/gpu-jupyter:${TAGNAME}_slim &&
docker save cschranz/gpu-jupyter:${TAGNAME}_slim | gzip > ../gpu-jupyter_tag-${TAGNAME}_slim.tar.gz

View File

@ -10,6 +10,7 @@ export HEAD_COMMIT="703d8b2dcb886be2fe5aa4660a48fbcef647e7aa"
while [[ "$#" -gt 0 ]]; do case $1 in while [[ "$#" -gt 0 ]]; do case $1 in
-c|--commit) HEAD_COMMIT="$2"; shift;; -c|--commit) HEAD_COMMIT="$2"; shift;;
--no-datascience-notebook) no_datascience_notebook=1;; --no-datascience-notebook) no_datascience_notebook=1;;
--python-only) no_datascience_notebook=1;;
--no-useful-packages) no_useful_packages=1;; --no-useful-packages) no_useful_packages=1;;
-s|--slim) no_datascience_notebook=1 && no_useful_packages=1;; -s|--slim) no_datascience_notebook=1 && no_useful_packages=1;;
*) echo "Unknown parameter passed: $1" && *) echo "Unknown parameter passed: $1" &&
@ -87,7 +88,7 @@ if [[ "$no_datascience_notebook" != 1 ]]; then
" >> $DOCKERFILE " >> $DOCKERFILE
cat $STACKS_DIR/datascience-notebook/Dockerfile | grep -v BASE_CONTAINER >> $DOCKERFILE cat $STACKS_DIR/datascience-notebook/Dockerfile | grep -v BASE_CONTAINER >> $DOCKERFILE
else else
echo "Set 'no-datascience-notebook', not installing the datascience-notebook with Julia and R." echo "Set 'no-datascience-notebook' = 'python-only', not installing the datascience-notebook with Julia and R."
fi fi
# Note that the following step also installs the cudatoolkit, which is # Note that the following step also installs the cudatoolkit, which is