Skip to content

Commit

Permalink
Remove --quiet flag when not needed (#1887)
Browse files Browse the repository at this point in the history
  • Loading branch information
mathbunnyru committed Mar 9, 2023
1 parent 60a7907 commit 8dbeaa5
Show file tree
Hide file tree
Showing 11 changed files with 27 additions and 27 deletions.
2 changes: 1 addition & 1 deletion all-spark-notebook/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ RUN apt-get update --yes && \
USER ${NB_UID}

# R packages including IRKernel which gets installed globally.
RUN mamba install --quiet --yes \
RUN mamba install --yes \
'r-base' \
'r-ggplot2' \
'r-irkernel' \
Expand Down
2 changes: 1 addition & 1 deletion base-notebook/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ USER ${NB_UID}
# Do all this in a single RUN command to avoid duplicating all of the
# files across image layers when the permissions change
WORKDIR /tmp
RUN mamba install --quiet --yes \
RUN mamba install --yes \
'notebook' \
'jupyterhub' \
'jupyterlab' && \
Expand Down
2 changes: 1 addition & 1 deletion datascience-notebook/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ USER ${NB_UID}

# R packages including IRKernel which gets installed globally.
# r-e1071: dependency of the caret R package
RUN mamba install --quiet --yes \
RUN mamba install --yes \
'r-base' \
'r-caret' \
'r-crayon' \
Expand Down
8 changes: 4 additions & 4 deletions docs/using/common.md
Original file line number Diff line number Diff line change
Expand Up @@ -245,16 +245,16 @@ You can use either `mamba`, `pip` or `conda` (`mamba` is recommended) to install
```bash
# install a package into the default (python 3.x) environment and cleanup it after
# the installation
mamba install --quiet --yes some-package && \
mamba install --yes some-package && \
mamba clean --all -f -y && \
fix-permissions "${CONDA_DIR}" && \
fix-permissions "/home/${NB_USER}"

pip install --quiet --no-cache-dir some-package && \
pip install --no-cache-dir some-package && \
fix-permissions "${CONDA_DIR}" && \
fix-permissions "/home/${NB_USER}"

conda install --quiet --yes some-package && \
conda install --yes some-package && \
conda clean --all -f -y && \
fix-permissions "${CONDA_DIR}" && \
fix-permissions "/home/${NB_USER}"
Expand All @@ -274,7 +274,7 @@ mamba install --channel defaults humanize
conda config --system --prepend channels defaults

# install a package
mamba install --quiet --yes humanize && \
mamba install --yes humanize && \
mamba clean --all -f -y && \
fix-permissions "${CONDA_DIR}" && \
fix-permissions "/home/${NB_USER}"
Expand Down
28 changes: 14 additions & 14 deletions docs/using/recipes.md
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ Create a new Dockerfile like the one shown below.
# Start from a core stack version
FROM jupyter/datascience-notebook:2023-02-28
# Install in the default python3 environment
RUN pip install --quiet --no-cache-dir 'flake8==3.9.2' && \
RUN pip install --no-cache-dir 'flake8==3.9.2' && \
fix-permissions "${CONDA_DIR}" && \
fix-permissions "/home/${NB_USER}"
```
Expand All @@ -51,7 +51,7 @@ Next, create a new Dockerfile like the one shown below.
FROM jupyter/datascience-notebook:2023-02-28
# Install from the requirements.txt file
COPY --chown=${NB_UID}:${NB_GID} requirements.txt /tmp/
RUN pip install --quiet --no-cache-dir --requirement /tmp/requirements.txt && \
RUN pip install --no-cache-dir --requirement /tmp/requirements.txt && \
fix-permissions "${CONDA_DIR}" && \
fix-permissions "/home/${NB_USER}"
```
Expand Down Expand Up @@ -85,7 +85,7 @@ ARG conda_env=python37
ARG py_ver=3.7

# you can add additional libraries you want mamba to install by listing them below the first line and ending with "&& \"
RUN mamba create --quiet --yes -p "${CONDA_DIR}/envs/${conda_env}" python=${py_ver} ipython ipykernel && \
RUN mamba create --yes -p "${CONDA_DIR}/envs/${conda_env}" python=${py_ver} ipython ipykernel && \
mamba clean --all -f -y

# alternatively, you can comment out the lines above and uncomment those below
Expand All @@ -102,7 +102,7 @@ RUN "${CONDA_DIR}/envs/${conda_env}/bin/python" -m ipykernel install --user --na
fix-permissions "/home/${NB_USER}"

# any additional pip installs can be added by uncommenting the following line
# RUN "${CONDA_DIR}/envs/${conda_env}/bin/pip" install --quiet --no-cache-dir
# RUN "${CONDA_DIR}/envs/${conda_env}/bin/pip" install --no-cache-dir

# if you want this environment to be the default one, uncomment the following line:
# RUN echo "conda activate ${conda_env}" >> "${HOME}/.bashrc"
Expand All @@ -118,7 +118,7 @@ Create the Dockerfile as:
FROM jupyter/scipy-notebook:latest

# Install the Dask dashboard
RUN pip install --quiet --no-cache-dir dask-labextension && \
RUN pip install --no-cache-dir dask-labextension && \
fix-permissions "${CONDA_DIR}" && \
fix-permissions "/home/${NB_USER}"

Expand Down Expand Up @@ -160,7 +160,7 @@ notebooks, with no conversion, adding javascript Reveal.js:

```bash
# Add Live slideshows with RISE
RUN mamba install --quiet --yes -c damianavila82 rise && \
RUN mamba install --yes -c damianavila82 rise && \
mamba clean --all -f -y && \
fix-permissions "${CONDA_DIR}" && \
fix-permissions "/home/${NB_USER}"
Expand All @@ -175,12 +175,12 @@ You need to install conda-forge's gcc for Python xgboost to work correctly.
Otherwise, you'll get an exception about libgomp.so.1 missing GOMP_4.0.

```bash
mamba install --quiet --yes gcc && \
mamba install --yes gcc && \
mamba clean --all -f -y && \
fix-permissions "${CONDA_DIR}" && \
fix-permissions "/home/${NB_USER}"

pip install --quiet --no-cache-dir xgboost && \
pip install --no-cache-dir xgboost && \
fix-permissions "${CONDA_DIR}" && \
fix-permissions "/home/${NB_USER}"

Expand Down Expand Up @@ -284,7 +284,7 @@ version in the Hub itself.

```dockerfile
FROM jupyter/base-notebook:2023-02-28
RUN pip install --quiet --no-cache-dir jupyterhub==1.4.1 && \
RUN pip install --no-cache-dir jupyterhub==1.4.1 && \
fix-permissions "${CONDA_DIR}" && \
fix-permissions "/home/${NB_USER}"
```
Expand Down Expand Up @@ -438,9 +438,9 @@ USER ${NB_UID}
# - Dashboards
# - PyDoop
# - PyHive
RUN pip install --quiet --no-cache-dir jupyter_dashboards faker && \
RUN pip install --no-cache-dir jupyter_dashboards faker && \
jupyter dashboards quick-setup --sys-prefix && \
pip2 install --quiet --no-cache-dir pyhive pydoop thrift sasl thrift_sasl faker && \
pip2 install --no-cache-dir pyhive pydoop thrift sasl thrift_sasl faker && \
fix-permissions "${CONDA_DIR}" && \
fix-permissions "/home/${NB_USER}"

Expand Down Expand Up @@ -496,7 +496,7 @@ FROM jupyter/minimal-notebook:latest

USER ${NB_UID}

RUN pip install --quiet --no-cache-dir jupyter_contrib_nbextensions && \
RUN pip install --no-cache-dir jupyter_contrib_nbextensions && \
jupyter contrib nbextension install --user && \
# can modify or enable additional extensions here
jupyter nbextension enable spellchecker/main --user && \
Expand All @@ -515,7 +515,7 @@ By adding the properties to `spark-defaults.conf`, the user no longer needs to e
FROM jupyter/pyspark-notebook:latest

ARG DELTA_CORE_VERSION="1.2.1"
RUN pip install --quiet --no-cache-dir delta-spark==${DELTA_CORE_VERSION} && \
RUN pip install --no-cache-dir delta-spark==${DELTA_CORE_VERSION} && \
fix-permissions "${HOME}" && \
fix-permissions "${CONDA_DIR}"

Expand Down Expand Up @@ -608,7 +608,7 @@ RUN apt-get update --yes && \
# Switch back to jovyan to avoid accidental container runs as root
USER ${NB_UID}

RUN pip install --quiet --no-cache-dir pyodbc
RUN pip install --no-cache-dir pyodbc
```

You can now use `pyodbc` and `sqlalchemy` to interact with the database.
Expand Down
2 changes: 1 addition & 1 deletion examples/docker-compose/notebook/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,4 @@ USER ${NB_UID}

# Switch back to jovyan to avoid accidental container runs as root
# Add permanent mamba/pip/conda installs, data files, other user libs here
# e.g., RUN pip install --quiet --no-cache-dir flake8
# e.g., RUN pip install --no-cache-dir flake8
2 changes: 1 addition & 1 deletion examples/make-deploy/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,4 @@ USER ${NB_UID}

# Switch back to jovyan to avoid accidental container runs as root
# Add permanent mamba/pip/conda installs, data files, other user libs here
# e.g., RUN pip install --quiet --no-cache-dir flake8
# e.g., RUN pip install --no-cache-dir flake8
2 changes: 1 addition & 1 deletion pyspark-notebook/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ RUN fix-permissions "/etc/ipython/"
USER ${NB_UID}

# Install pyarrow
RUN mamba install --quiet --yes \
RUN mamba install --yes \
'pyarrow' && \
mamba clean --all -f -y && \
fix-permissions "${CONDA_DIR}" && \
Expand Down
2 changes: 1 addition & 1 deletion r-notebook/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ USER ${NB_UID}

# R packages including IRKernel which gets installed globally.
# r-e1071: dependency of the caret R package
RUN mamba install --quiet --yes \
RUN mamba install --yes \
'r-base' \
'r-caret' \
'r-crayon' \
Expand Down
2 changes: 1 addition & 1 deletion scipy-notebook/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ RUN apt-get update --yes && \
USER ${NB_UID}

# Install Python 3 packages
RUN mamba install --quiet --yes \
RUN mamba install --yes \
'altair' \
'beautifulsoup4' \
'bokeh' \
Expand Down
2 changes: 1 addition & 1 deletion tensorflow-notebook/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ LABEL maintainer="Jupyter Project <jupyter@googlegroups.com>"
SHELL ["/bin/bash", "-o", "pipefail", "-c"]

# Install Tensorflow
RUN mamba install --quiet --yes \
RUN mamba install --yes \
'tensorflow' && \
mamba clean --all -f -y && \
fix-permissions "${CONDA_DIR}" && \
Expand Down

0 comments on commit 8dbeaa5

Please sign in to comment.