# base image FROM python:3.12-slim-bookworm AS base WORKDIR /app/api # Install uv ENV UV_VERSION=0.8.9 RUN pip install --no-cache-dir uv==${UV_VERSION} FROM base AS packages # if you located in China, you can use aliyun mirror to speed up # RUN sed -i 's@deb.debian.org@mirrors.aliyun.com@g' /etc/apt/sources.list.d/debian.sources RUN apt-get update \ && apt-get install -y --no-install-recommends \ # basic environment g++ \ # for building gmpy2 libmpfr-dev libmpc-dev # Install Python dependencies COPY pyproject.toml uv.lock ./ RUN uv sync --locked --no-dev # production stage FROM base AS production ENV FLASK_APP=app.py ENV EDITION=SELF_HOSTED ENV DEPLOY_ENV=PRODUCTION ENV CONSOLE_API_URL=http://127.0.0.1:5001 ENV CONSOLE_WEB_URL=http://127.0.0.1:3000 ENV SERVICE_API_URL=http://127.0.0.1:5001 ENV APP_WEB_URL=http://127.0.0.1:3000 EXPOSE 5001 # set timezone ENV TZ=UTC # Set UTF-8 locale ENV LANG=en_US.UTF-8 ENV LC_ALL=en_US.UTF-8 ENV PYTHONIOENCODING=utf-8 WORKDIR /app/api RUN \ apt-get update \ # Install dependencies && apt-get install -y --no-install-recommends \ # basic environment curl nodejs \ # for gmpy2 \ libgmp-dev libmpfr-dev libmpc-dev \ # For Security expat libldap-2.5-0=2.5.13+dfsg-5 perl libsqlite3-0=3.40.1-2+deb12u2 zlib1g=1:1.2.13.dfsg-1 \ # install fonts to support the use of tools like pypdfium2 fonts-noto-cjk \ # install a package to improve the accuracy of guessing mime type and file extension media-types \ # install libmagic to support the use of python-magic guess MIMETYPE libmagic1 \ && apt-get autoremove -y \ && rm -rf /var/lib/apt/lists/* # Copy Python environment and packages ENV VIRTUAL_ENV=/app/api/.venv COPY --from=packages ${VIRTUAL_ENV} ${VIRTUAL_ENV} ENV PATH="${VIRTUAL_ENV}/bin:${PATH}" # Download nltk data RUN mkdir -p /usr/local/share/nltk_data && NLTK_DATA=/usr/local/share/nltk_data python -c "import nltk; nltk.download('punkt'); nltk.download('averaged_perceptron_tagger'); nltk.download('stopwords')" \ && chmod -R 755 /usr/local/share/nltk_data ENV TIKTOKEN_CACHE_DIR=/app/api/.tiktoken_cache RUN python -c "import tiktoken; tiktoken.encoding_for_model('gpt2')" # Copy source code COPY . /app/api/ # Copy entrypoint COPY docker/entrypoint.sh /entrypoint.sh RUN chmod +x /entrypoint.sh # Create non-root user and set permissions RUN groupadd -r -g 1001 dify && \ useradd -r -u 1001 -g 1001 -s /bin/bash dify && \ mkdir -p /home/dify && \ chown -R 1001:1001 /app /home/dify ${TIKTOKEN_CACHE_DIR} /entrypoint.sh ARG COMMIT_SHA ENV COMMIT_SHA=${COMMIT_SHA} ENV NLTK_DATA=/usr/local/share/nltk_data USER 1001 ENTRYPOINT ["/bin/bash", "/entrypoint.sh"]