diff --git a/Dockerfile b/Dockerfile index 49952746fbba434099355b887e38dafcad6917e8..6200f5402448a9b3dae72875b499913d53d81b29 100644 --- a/Dockerfile +++ b/Dockerfile @@ -6,14 +6,11 @@ USER root # Copy requirements and environment files to the container COPY requirements.txt environment.yml /tmp/ -# Update conda environment, install pip packages, and adjust permissions -RUN conda env update -q -f /tmp/environment.yml && \ - /opt/conda/bin/pip install --no-cache-dir -r /tmp/requirements.txt && \ - conda clean -y --all && \ - jupyter lab build && \ - python -m nltk.downloader stopwords && \ - python -m nltk.downloader wordnet && \ - python -m spacy download en_core_web_sm +# Update conda environment +RUN conda env update -q -f /tmp/environment.yml + +# Install pip packages and avoid caching +RUN /opt/conda/bin/pip install --no-cache-dir -r /tmp/requirements.txt # Fix permissions for the pip cache directory to avoid permission warnings RUN chown -R jovyan:users /home/jovyan/.cache @@ -21,6 +18,17 @@ RUN chown -R jovyan:users /home/jovyan/.cache # Ensure numpy and scikit-learn are compatible RUN /opt/conda/bin/pip install --no-cache-dir numpy==1.22.0 scikit-learn==1.2.2 +# Clean up conda cache +RUN conda clean -y --all + +# Build Jupyter Lab +RUN jupyter lab build + +# Install NLTK and SpaCy data +RUN python -m nltk.downloader stopwords +RUN python -m nltk.downloader wordnet +RUN python -m spacy download en_core_web_sm + # Copy and install custom packages COPY dash_proxy /tmp/dash_proxy/ RUN pip install --no-cache-dir /tmp/dash_proxy/