dify本地部署ollama
This commit is contained in:
3
.gitignore
vendored
3
.gitignore
vendored
@@ -4,4 +4,5 @@ build_output
|
|||||||
# Docker volumes(运行时数据,不纳入版本控制)
|
# Docker volumes(运行时数据,不纳入版本控制)
|
||||||
docker/schoolNews/volumes/
|
docker/schoolNews/volumes/
|
||||||
docker/schoolNews/crawler
|
docker/schoolNews/crawler
|
||||||
docker.zip
|
docker.zip
|
||||||
|
docker/dify/volumes
|
||||||
1431
docker/dify/.env
Normal file
1431
docker/dify/.env
Normal file
File diff suppressed because it is too large
Load Diff
@@ -133,6 +133,8 @@ ACCESS_TOKEN_EXPIRE_MINUTES=60
|
|||||||
# Refresh token expiration time in days
|
# Refresh token expiration time in days
|
||||||
REFRESH_TOKEN_EXPIRE_DAYS=30
|
REFRESH_TOKEN_EXPIRE_DAYS=30
|
||||||
|
|
||||||
|
# The default number of active requests for the application, where 0 means unlimited, should be a non-negative integer.
|
||||||
|
APP_DEFAULT_ACTIVE_REQUESTS=0
|
||||||
# The maximum number of active requests for the application, where 0 means unlimited, should be a non-negative integer.
|
# The maximum number of active requests for the application, where 0 means unlimited, should be a non-negative integer.
|
||||||
APP_MAX_ACTIVE_REQUESTS=0
|
APP_MAX_ACTIVE_REQUESTS=0
|
||||||
APP_MAX_EXECUTION_TIME=1200
|
APP_MAX_EXECUTION_TIME=1200
|
||||||
@@ -525,6 +527,7 @@ VECTOR_INDEX_NAME_PREFIX=Vector_index
|
|||||||
WEAVIATE_ENDPOINT=http://weaviate:8080
|
WEAVIATE_ENDPOINT=http://weaviate:8080
|
||||||
WEAVIATE_API_KEY=WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih
|
WEAVIATE_API_KEY=WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih
|
||||||
WEAVIATE_GRPC_ENDPOINT=grpc://weaviate:50051
|
WEAVIATE_GRPC_ENDPOINT=grpc://weaviate:50051
|
||||||
|
WEAVIATE_TOKENIZATION=word
|
||||||
|
|
||||||
# For OceanBase metadata database configuration, available when `DB_TYPE` is `mysql` and `COMPOSE_PROFILES` includes `oceanbase`.
|
# For OceanBase metadata database configuration, available when `DB_TYPE` is `mysql` and `COMPOSE_PROFILES` includes `oceanbase`.
|
||||||
# For OceanBase vector database configuration, available when `VECTOR_STORE` is `oceanbase`
|
# For OceanBase vector database configuration, available when `VECTOR_STORE` is `oceanbase`
|
||||||
|
|||||||
28
docker/dify/.gitignore
vendored
28
docker/dify/.gitignore
vendored
@@ -1,28 +0,0 @@
|
|||||||
##############################
|
|
||||||
# Dify docker volumes
|
|
||||||
##############################
|
|
||||||
|
|
||||||
# 1. 默认忽略 volumes 下所有内容
|
|
||||||
volumes/**
|
|
||||||
nginx/
|
|
||||||
backups/*
|
|
||||||
# 2. 保留当前已存在的目录本身(目录结构),方便写入 .gitkeep / 初始化文件
|
|
||||||
!volumes/
|
|
||||||
!volumes/app/
|
|
||||||
!volumes/certbot/
|
|
||||||
!volumes/db/
|
|
||||||
!volumes/myscale/
|
|
||||||
!volumes/oceanbase/
|
|
||||||
!volumes/opensearch/
|
|
||||||
!volumes/plugin_daemon/
|
|
||||||
!volumes/redis/
|
|
||||||
!volumes/sandbox/
|
|
||||||
!volumes/weaviate/
|
|
||||||
|
|
||||||
# 3. 如需在这些目录下保留特定文件(如 .gitkeep、README.md),
|
|
||||||
# 只需在对应目录新建文件,并在本文件中按需增加 !volumes/xxx/filename 规则。
|
|
||||||
!volumes/myscale/config/users.d/custom_users_config.xml
|
|
||||||
!volumes/oceanbase/init.d/vec_memory.sql
|
|
||||||
!volumes/opensearch/opensearch_dashboards.yml
|
|
||||||
!volumes/sandbox/conf/config.yaml
|
|
||||||
!volumes/sandbox/conf/config.yaml.example
|
|
||||||
@@ -1,48 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
set -euo pipefail
|
|
||||||
|
|
||||||
# Dify docker backup script
|
|
||||||
# 备份内容:
|
|
||||||
# 1) Postgres 数据库 dify
|
|
||||||
# 2) 整个 volumes/ 目录(包括插件、向量库等数据)
|
|
||||||
|
|
||||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
||||||
cd "$SCRIPT_DIR"
|
|
||||||
|
|
||||||
BACKUP_DIR="$SCRIPT_DIR/backups"
|
|
||||||
TIMESTAMP="$(date +%Y%m%d_%H%M%S)"
|
|
||||||
DB_FILE="$BACKUP_DIR/dify_db_${TIMESTAMP}.sql"
|
|
||||||
VOL_FILE="$BACKUP_DIR/dify_volumes_${TIMESTAMP}.tar.gz"
|
|
||||||
|
|
||||||
mkdir -p "$BACKUP_DIR"
|
|
||||||
|
|
||||||
DB_USER="${POSTGRES_USER:-postgres}"
|
|
||||||
DB_NAME="${POSTGRES_DB:-dify}"
|
|
||||||
|
|
||||||
# 自动检测 db_postgres 容器名,匹配形如 "<project>-db_postgres-1"
|
|
||||||
DB_CONTAINER="${DB_CONTAINER:-}"
|
|
||||||
if [ -z "$DB_CONTAINER" ]; then
|
|
||||||
DB_CONTAINER="$(docker ps --format '{{.Names}}' | grep 'db_postgres-1$' | head -n1 || true)"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -z "$DB_CONTAINER" ]; then
|
|
||||||
echo "错误:未找到名称类似 '*-db_postgres-1' 的运行中容器,请确认 db_postgres 服务已启动。" >&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "[1/2] 导出 Postgres 数据库 ${DB_NAME} (容器: ${DB_CONTAINER}) ..."
|
|
||||||
|
|
||||||
docker exec -t "${DB_CONTAINER}" \
|
|
||||||
pg_dump -U "${DB_USER}" -d "${DB_NAME}" > "${DB_FILE}"
|
|
||||||
|
|
||||||
echo "数据库已备份到: ${DB_FILE}"
|
|
||||||
|
|
||||||
echo "[2/2] 打包 volumes/ 目录 ..."
|
|
||||||
if [ ! -d "volumes" ]; then
|
|
||||||
echo "警告:当前目录下不存在 volumes/ 目录,跳过卷备份。" >&2
|
|
||||||
else
|
|
||||||
tar czf "${VOL_FILE}" volumes/
|
|
||||||
echo "卷数据已打包到: ${VOL_FILE}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "备份完成。"
|
|
||||||
@@ -2,7 +2,7 @@ x-shared-env: &shared-api-worker-env
|
|||||||
services:
|
services:
|
||||||
# API service
|
# API service
|
||||||
api:
|
api:
|
||||||
image: langgenius/dify-api:1.10.0
|
image: langgenius/dify-api:1.10.1
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
# Use the shared environment variables.
|
# Use the shared environment variables.
|
||||||
@@ -41,7 +41,7 @@ services:
|
|||||||
# worker service
|
# worker service
|
||||||
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
|
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
|
||||||
worker:
|
worker:
|
||||||
image: langgenius/dify-api:1.10.0
|
image: langgenius/dify-api:1.10.1
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
# Use the shared environment variables.
|
# Use the shared environment variables.
|
||||||
@@ -78,7 +78,7 @@ services:
|
|||||||
# worker_beat service
|
# worker_beat service
|
||||||
# Celery beat for scheduling periodic tasks.
|
# Celery beat for scheduling periodic tasks.
|
||||||
worker_beat:
|
worker_beat:
|
||||||
image: langgenius/dify-api:1.10.0
|
image: langgenius/dify-api:1.10.1
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
# Use the shared environment variables.
|
# Use the shared environment variables.
|
||||||
@@ -106,7 +106,7 @@ services:
|
|||||||
|
|
||||||
# Frontend web application.
|
# Frontend web application.
|
||||||
web:
|
web:
|
||||||
image: langgenius/dify-web:1.10.0
|
image: langgenius/dify-web:1.10.1
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||||
@@ -131,7 +131,7 @@ services:
|
|||||||
ENABLE_WEBSITE_JINAREADER: ${ENABLE_WEBSITE_JINAREADER:-true}
|
ENABLE_WEBSITE_JINAREADER: ${ENABLE_WEBSITE_JINAREADER:-true}
|
||||||
ENABLE_WEBSITE_FIRECRAWL: ${ENABLE_WEBSITE_FIRECRAWL:-true}
|
ENABLE_WEBSITE_FIRECRAWL: ${ENABLE_WEBSITE_FIRECRAWL:-true}
|
||||||
ENABLE_WEBSITE_WATERCRAWL: ${ENABLE_WEBSITE_WATERCRAWL:-true}
|
ENABLE_WEBSITE_WATERCRAWL: ${ENABLE_WEBSITE_WATERCRAWL:-true}
|
||||||
|
|
||||||
# The PostgreSQL database.
|
# The PostgreSQL database.
|
||||||
db_postgres:
|
db_postgres:
|
||||||
image: postgres:15-alpine
|
image: postgres:15-alpine
|
||||||
@@ -459,7 +459,7 @@ services:
|
|||||||
timeout: 10s
|
timeout: 10s
|
||||||
|
|
||||||
# seekdb vector database
|
# seekdb vector database
|
||||||
seekdb:
|
seekdb:
|
||||||
image: oceanbase/seekdb:latest
|
image: oceanbase/seekdb:latest
|
||||||
container_name: seekdb
|
container_name: seekdb
|
||||||
profiles:
|
profiles:
|
||||||
@@ -486,7 +486,7 @@ services:
|
|||||||
# Qdrant vector store.
|
# Qdrant vector store.
|
||||||
# (if used, you need to set VECTOR_STORE to qdrant in the api & worker service.)
|
# (if used, you need to set VECTOR_STORE to qdrant in the api & worker service.)
|
||||||
qdrant:
|
qdrant:
|
||||||
image: langgenius/qdrant:v1.7.3
|
image: langgenius/qdrant:v1.8.3
|
||||||
profiles:
|
profiles:
|
||||||
- qdrant
|
- qdrant
|
||||||
restart: always
|
restart: always
|
||||||
@@ -676,7 +676,7 @@ services:
|
|||||||
|
|
||||||
milvus-standalone:
|
milvus-standalone:
|
||||||
container_name: milvus-standalone
|
container_name: milvus-standalone
|
||||||
image: milvusdb/milvus:v2.5.15
|
image: milvusdb/milvus:v2.6.3
|
||||||
profiles:
|
profiles:
|
||||||
- milvus
|
- milvus
|
||||||
command: ["milvus", "run", "standalone"]
|
command: ["milvus", "run", "standalone"]
|
||||||
|
|||||||
@@ -34,6 +34,7 @@ x-shared-env: &shared-api-worker-env
|
|||||||
FILES_ACCESS_TIMEOUT: ${FILES_ACCESS_TIMEOUT:-300}
|
FILES_ACCESS_TIMEOUT: ${FILES_ACCESS_TIMEOUT:-300}
|
||||||
ACCESS_TOKEN_EXPIRE_MINUTES: ${ACCESS_TOKEN_EXPIRE_MINUTES:-60}
|
ACCESS_TOKEN_EXPIRE_MINUTES: ${ACCESS_TOKEN_EXPIRE_MINUTES:-60}
|
||||||
REFRESH_TOKEN_EXPIRE_DAYS: ${REFRESH_TOKEN_EXPIRE_DAYS:-30}
|
REFRESH_TOKEN_EXPIRE_DAYS: ${REFRESH_TOKEN_EXPIRE_DAYS:-30}
|
||||||
|
APP_DEFAULT_ACTIVE_REQUESTS: ${APP_DEFAULT_ACTIVE_REQUESTS:-0}
|
||||||
APP_MAX_ACTIVE_REQUESTS: ${APP_MAX_ACTIVE_REQUESTS:-0}
|
APP_MAX_ACTIVE_REQUESTS: ${APP_MAX_ACTIVE_REQUESTS:-0}
|
||||||
APP_MAX_EXECUTION_TIME: ${APP_MAX_EXECUTION_TIME:-1200}
|
APP_MAX_EXECUTION_TIME: ${APP_MAX_EXECUTION_TIME:-1200}
|
||||||
DIFY_BIND_ADDRESS: ${DIFY_BIND_ADDRESS:-0.0.0.0}
|
DIFY_BIND_ADDRESS: ${DIFY_BIND_ADDRESS:-0.0.0.0}
|
||||||
@@ -164,6 +165,7 @@ x-shared-env: &shared-api-worker-env
|
|||||||
WEAVIATE_ENDPOINT: ${WEAVIATE_ENDPOINT:-http://weaviate:8080}
|
WEAVIATE_ENDPOINT: ${WEAVIATE_ENDPOINT:-http://weaviate:8080}
|
||||||
WEAVIATE_API_KEY: ${WEAVIATE_API_KEY:-WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih}
|
WEAVIATE_API_KEY: ${WEAVIATE_API_KEY:-WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih}
|
||||||
WEAVIATE_GRPC_ENDPOINT: ${WEAVIATE_GRPC_ENDPOINT:-grpc://weaviate:50051}
|
WEAVIATE_GRPC_ENDPOINT: ${WEAVIATE_GRPC_ENDPOINT:-grpc://weaviate:50051}
|
||||||
|
WEAVIATE_TOKENIZATION: ${WEAVIATE_TOKENIZATION:-word}
|
||||||
OCEANBASE_VECTOR_HOST: ${OCEANBASE_VECTOR_HOST:-oceanbase}
|
OCEANBASE_VECTOR_HOST: ${OCEANBASE_VECTOR_HOST:-oceanbase}
|
||||||
OCEANBASE_VECTOR_PORT: ${OCEANBASE_VECTOR_PORT:-2881}
|
OCEANBASE_VECTOR_PORT: ${OCEANBASE_VECTOR_PORT:-2881}
|
||||||
OCEANBASE_VECTOR_USER: ${OCEANBASE_VECTOR_USER:-root@test}
|
OCEANBASE_VECTOR_USER: ${OCEANBASE_VECTOR_USER:-root@test}
|
||||||
@@ -532,7 +534,7 @@ x-shared-env: &shared-api-worker-env
|
|||||||
SSRF_POOL_MAX_CONNECTIONS: ${SSRF_POOL_MAX_CONNECTIONS:-100}
|
SSRF_POOL_MAX_CONNECTIONS: ${SSRF_POOL_MAX_CONNECTIONS:-100}
|
||||||
SSRF_POOL_MAX_KEEPALIVE_CONNECTIONS: ${SSRF_POOL_MAX_KEEPALIVE_CONNECTIONS:-20}
|
SSRF_POOL_MAX_KEEPALIVE_CONNECTIONS: ${SSRF_POOL_MAX_KEEPALIVE_CONNECTIONS:-20}
|
||||||
SSRF_POOL_KEEPALIVE_EXPIRY: ${SSRF_POOL_KEEPALIVE_EXPIRY:-5.0}
|
SSRF_POOL_KEEPALIVE_EXPIRY: ${SSRF_POOL_KEEPALIVE_EXPIRY:-5.0}
|
||||||
EXPOSE_NGINX_PORT: ${EXPOSE_NGINX_PORT:-8000}
|
EXPOSE_NGINX_PORT: ${EXPOSE_NGINX_PORT:-80}
|
||||||
EXPOSE_NGINX_SSL_PORT: ${EXPOSE_NGINX_SSL_PORT:-443}
|
EXPOSE_NGINX_SSL_PORT: ${EXPOSE_NGINX_SSL_PORT:-443}
|
||||||
POSITION_TOOL_PINS: ${POSITION_TOOL_PINS:-}
|
POSITION_TOOL_PINS: ${POSITION_TOOL_PINS:-}
|
||||||
POSITION_TOOL_INCLUDES: ${POSITION_TOOL_INCLUDES:-}
|
POSITION_TOOL_INCLUDES: ${POSITION_TOOL_INCLUDES:-}
|
||||||
@@ -635,7 +637,7 @@ x-shared-env: &shared-api-worker-env
|
|||||||
services:
|
services:
|
||||||
# API service
|
# API service
|
||||||
api:
|
api:
|
||||||
image: langgenius/dify-api:1.10.0
|
image: langgenius/dify-api:1.10.1
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
# Use the shared environment variables.
|
# Use the shared environment variables.
|
||||||
@@ -674,7 +676,7 @@ services:
|
|||||||
# worker service
|
# worker service
|
||||||
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
|
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
|
||||||
worker:
|
worker:
|
||||||
image: langgenius/dify-api:1.10.0
|
image: langgenius/dify-api:1.10.1
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
# Use the shared environment variables.
|
# Use the shared environment variables.
|
||||||
@@ -711,7 +713,7 @@ services:
|
|||||||
# worker_beat service
|
# worker_beat service
|
||||||
# Celery beat for scheduling periodic tasks.
|
# Celery beat for scheduling periodic tasks.
|
||||||
worker_beat:
|
worker_beat:
|
||||||
image: langgenius/dify-api:1.10.0
|
image: langgenius/dify-api:1.10.1
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
# Use the shared environment variables.
|
# Use the shared environment variables.
|
||||||
@@ -739,7 +741,7 @@ services:
|
|||||||
|
|
||||||
# Frontend web application.
|
# Frontend web application.
|
||||||
web:
|
web:
|
||||||
image: langgenius/dify-web:1.10.0
|
image: langgenius/dify-web:1.10.1
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||||
@@ -764,12 +766,12 @@ services:
|
|||||||
ENABLE_WEBSITE_JINAREADER: ${ENABLE_WEBSITE_JINAREADER:-true}
|
ENABLE_WEBSITE_JINAREADER: ${ENABLE_WEBSITE_JINAREADER:-true}
|
||||||
ENABLE_WEBSITE_FIRECRAWL: ${ENABLE_WEBSITE_FIRECRAWL:-true}
|
ENABLE_WEBSITE_FIRECRAWL: ${ENABLE_WEBSITE_FIRECRAWL:-true}
|
||||||
ENABLE_WEBSITE_WATERCRAWL: ${ENABLE_WEBSITE_WATERCRAWL:-true}
|
ENABLE_WEBSITE_WATERCRAWL: ${ENABLE_WEBSITE_WATERCRAWL:-true}
|
||||||
|
|
||||||
# The PostgreSQL database.
|
# The PostgreSQL database.
|
||||||
db_postgres:
|
db_postgres:
|
||||||
image: postgres:15-alpine
|
image: postgres:15-alpine
|
||||||
# profiles: # 避免分部启动时意外启动
|
profiles:
|
||||||
# - postgresql
|
- postgresql
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
POSTGRES_USER: ${POSTGRES_USER:-postgres}
|
POSTGRES_USER: ${POSTGRES_USER:-postgres}
|
||||||
@@ -1033,14 +1035,14 @@ services:
|
|||||||
- api
|
- api
|
||||||
- web
|
- web
|
||||||
ports:
|
ports:
|
||||||
- "${EXPOSE_NGINX_PORT:-8000}:${NGINX_PORT:-80}"
|
- "${EXPOSE_NGINX_PORT:-80}:${NGINX_PORT:-80}"
|
||||||
- "${EXPOSE_NGINX_SSL_PORT:-443}:${NGINX_SSL_PORT:-443}"
|
- "${EXPOSE_NGINX_SSL_PORT:-443}:${NGINX_SSL_PORT:-443}"
|
||||||
|
|
||||||
# The Weaviate vector store.
|
# The Weaviate vector store.
|
||||||
weaviate:
|
weaviate:
|
||||||
image: semitechnologies/weaviate:1.27.0
|
image: semitechnologies/weaviate:1.27.0
|
||||||
# profiles: #避免分部启动
|
profiles:
|
||||||
# - weaviate
|
- weaviate
|
||||||
restart: always
|
restart: always
|
||||||
volumes:
|
volumes:
|
||||||
# Mount the Weaviate data directory to the con tainer.
|
# Mount the Weaviate data directory to the con tainer.
|
||||||
@@ -1092,7 +1094,7 @@ services:
|
|||||||
timeout: 10s
|
timeout: 10s
|
||||||
|
|
||||||
# seekdb vector database
|
# seekdb vector database
|
||||||
seekdb:
|
seekdb:
|
||||||
image: oceanbase/seekdb:latest
|
image: oceanbase/seekdb:latest
|
||||||
container_name: seekdb
|
container_name: seekdb
|
||||||
profiles:
|
profiles:
|
||||||
@@ -1119,7 +1121,7 @@ services:
|
|||||||
# Qdrant vector store.
|
# Qdrant vector store.
|
||||||
# (if used, you need to set VECTOR_STORE to qdrant in the api & worker service.)
|
# (if used, you need to set VECTOR_STORE to qdrant in the api & worker service.)
|
||||||
qdrant:
|
qdrant:
|
||||||
image: langgenius/qdrant:v1.7.3
|
image: langgenius/qdrant:v1.8.3
|
||||||
profiles:
|
profiles:
|
||||||
- qdrant
|
- qdrant
|
||||||
restart: always
|
restart: always
|
||||||
@@ -1309,7 +1311,7 @@ services:
|
|||||||
|
|
||||||
milvus-standalone:
|
milvus-standalone:
|
||||||
container_name: milvus-standalone
|
container_name: milvus-standalone
|
||||||
image: milvusdb/milvus:v2.5.15
|
image: milvusdb/milvus:v2.6.3
|
||||||
profiles:
|
profiles:
|
||||||
- milvus
|
- milvus
|
||||||
command: ["milvus", "run", "standalone"]
|
command: ["milvus", "run", "standalone"]
|
||||||
|
|||||||
58
docker/dify/nginx/conf.d/default.conf
Normal file
58
docker/dify/nginx/conf.d/default.conf
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
# Please do not directly edit this file. Instead, modify the .env variables related to NGINX configuration.
|
||||||
|
|
||||||
|
server {
|
||||||
|
listen 80;
|
||||||
|
server_name _;
|
||||||
|
|
||||||
|
location /console/api {
|
||||||
|
proxy_pass http://api:5001;
|
||||||
|
include proxy.conf;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /api {
|
||||||
|
proxy_pass http://api:5001;
|
||||||
|
include proxy.conf;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /v1 {
|
||||||
|
proxy_pass http://api:5001;
|
||||||
|
include proxy.conf;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /files {
|
||||||
|
proxy_pass http://api:5001;
|
||||||
|
include proxy.conf;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /explore {
|
||||||
|
proxy_pass http://web:3000;
|
||||||
|
include proxy.conf;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /e/ {
|
||||||
|
proxy_pass http://plugin_daemon:5002;
|
||||||
|
proxy_set_header Dify-Hook-Url $scheme://$host$request_uri;
|
||||||
|
include proxy.conf;
|
||||||
|
}
|
||||||
|
|
||||||
|
location / {
|
||||||
|
proxy_pass http://web:3000;
|
||||||
|
include proxy.conf;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /mcp {
|
||||||
|
proxy_pass http://api:5001;
|
||||||
|
include proxy.conf;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /triggers {
|
||||||
|
proxy_pass http://api:5001;
|
||||||
|
include proxy.conf;
|
||||||
|
}
|
||||||
|
|
||||||
|
# placeholder for acme challenge location
|
||||||
|
|
||||||
|
|
||||||
|
# placeholder for https config defined in https.conf.template
|
||||||
|
|
||||||
|
}
|
||||||
@@ -1,76 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
set -euo pipefail
|
|
||||||
|
|
||||||
# Dify docker restore script
|
|
||||||
# 使用 backups/ 中的备份恢复:
|
|
||||||
# 1) volumes/ 目录
|
|
||||||
# 2) Postgres 数据库 dify
|
|
||||||
#
|
|
||||||
# 用法:
|
|
||||||
# ./restore.sh # 使用最新备份
|
|
||||||
# ./restore.sh 20251124_120000 # 使用指定时间戳的一组备份
|
|
||||||
|
|
||||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
||||||
cd "$SCRIPT_DIR"
|
|
||||||
|
|
||||||
BACKUP_DIR="$SCRIPT_DIR/backups"
|
|
||||||
TIMESTAMP="${1:-latest}"
|
|
||||||
|
|
||||||
if [ ! -d "$BACKUP_DIR" ]; then
|
|
||||||
echo "错误:未找到备份目录 $BACKUP_DIR" >&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# 选择备份文件
|
|
||||||
if [ "$TIMESTAMP" = "latest" ]; then
|
|
||||||
DB_FILE="$(ls -1t "$BACKUP_DIR"/dify_db_*.sql 2>/dev/null | head -n1 || true)"
|
|
||||||
VOL_FILE="$(ls -1t "$BACKUP_DIR"/dify_volumes_*.tar.gz 2>/dev/null | head -n1 || true)"
|
|
||||||
else
|
|
||||||
DB_FILE="$BACKUP_DIR/dify_db_${TIMESTAMP}.sql"
|
|
||||||
VOL_FILE="$BACKUP_DIR/dify_volumes_${TIMESTAMP}.tar.gz"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -z "${DB_FILE:-}" ] || [ ! -f "$DB_FILE" ]; then
|
|
||||||
echo "错误:未找到数据库备份文件 (dify_db_*.sql),请检查 $BACKUP_DIR" >&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -z "${VOL_FILE:-}" ] || [ ! -f "$VOL_FILE" ]; then
|
|
||||||
echo "错误:未找到卷备份文件 (dify_volumes_*.tar.gz),请检查 $BACKUP_DIR" >&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
DB_USER="${POSTGRES_USER:-postgres}"
|
|
||||||
DB_NAME="${POSTGRES_DB:-dify}"
|
|
||||||
|
|
||||||
# 自动检测 db_postgres 容器名,匹配形如 "<project>-db_postgres-1"
|
|
||||||
DB_CONTAINER="${DB_CONTAINER:-}"
|
|
||||||
if [ -z "$DB_CONTAINER" ]; then
|
|
||||||
DB_CONTAINER="$(docker ps --format '{{.Names}}' | grep 'db_postgres-1$' | head -n1 || true)"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -z "$DB_CONTAINER" ]; then
|
|
||||||
echo "错误:未找到名称类似 '*-db_postgres-1' 的运行中容器,请先启动 dify 的 db_postgres 服务。" >&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "将使用备份:"
|
|
||||||
echo " DB: $DB_FILE"
|
|
||||||
echo " Vol: $VOL_FILE"
|
|
||||||
read -rp "确认恢复这些备份? 这可能覆盖现有数据 [y/N]: " CONFIRM
|
|
||||||
if [[ ! "$CONFIRM" =~ ^[Yy]$ ]]; then
|
|
||||||
echo "已取消恢复。"
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "[1/2] 恢复 volumes/ 目录 ..."
|
|
||||||
tar xzf "$VOL_FILE" -C "$SCRIPT_DIR"
|
|
||||||
|
|
||||||
echo "[2/2] 恢复 Postgres 数据库 ${DB_NAME} ..."
|
|
||||||
# 可选:先清空数据库,按需修改
|
|
||||||
# docker exec -i "$DB_CONTAINER" psql -U "$DB_USER" -d "$DB_NAME" -c 'DROP SCHEMA public CASCADE; CREATE SCHEMA public;'
|
|
||||||
|
|
||||||
docker exec -i "$DB_CONTAINER" \
|
|
||||||
psql -U "$DB_USER" -d "$DB_NAME" < "$DB_FILE"
|
|
||||||
|
|
||||||
echo "恢复完成。"
|
|
||||||
1
docker/dify/version
Normal file
1
docker/dify/version
Normal file
@@ -0,0 +1 @@
|
|||||||
|
1.10.1
|
||||||
Reference in New Issue
Block a user