dify
This commit is contained in:
19
dify/.claude/settings.json.example
Normal file
19
dify/.claude/settings.json.example
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
{
|
||||||
|
"permissions": {
|
||||||
|
"allow": [],
|
||||||
|
"deny": []
|
||||||
|
},
|
||||||
|
"env": {
|
||||||
|
"__comment": "Environment variables for MCP servers. Override in .claude/settings.local.json with actual values.",
|
||||||
|
"GITHUB_PERSONAL_ACCESS_TOKEN": "ghp_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
|
||||||
|
},
|
||||||
|
"enabledMcpjsonServers": [
|
||||||
|
"context7",
|
||||||
|
"sequential-thinking",
|
||||||
|
"github",
|
||||||
|
"fetch",
|
||||||
|
"playwright",
|
||||||
|
"ide"
|
||||||
|
],
|
||||||
|
"enableAllProjectMcpServers": true
|
||||||
|
}
|
||||||
6
dify/.cursorrules
Normal file
6
dify/.cursorrules
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
# Cursor Rules for Dify Project
|
||||||
|
|
||||||
|
## Automated Test Generation
|
||||||
|
|
||||||
|
- Use `web/testing/testing.md` as the canonical instruction set for generating frontend automated tests.
|
||||||
|
- When proposing or saving tests, re-read that document and follow every requirement.
|
||||||
4
dify/.devcontainer/Dockerfile
Normal file
4
dify/.devcontainer/Dockerfile
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
FROM mcr.microsoft.com/devcontainers/python:3.12-bookworm
|
||||||
|
|
||||||
|
RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
|
||||||
|
&& apt-get -y install libgmp-dev libmpfr-dev libmpc-dev
|
||||||
42
dify/.devcontainer/README.md
Normal file
42
dify/.devcontainer/README.md
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
# Development with devcontainer
|
||||||
|
|
||||||
|
This project includes a devcontainer configuration that allows you to open the project in a container with a fully configured development environment.
|
||||||
|
Both frontend and backend environments are initialized when the container is started.
|
||||||
|
|
||||||
|
## GitHub Codespaces
|
||||||
|
|
||||||
|
[](https://codespaces.new/langgenius/dify)
|
||||||
|
|
||||||
|
you can simply click the button above to open this project in GitHub Codespaces.
|
||||||
|
|
||||||
|
For more info, check out the [GitHub documentation](https://docs.github.com/en/free-pro-team@latest/github/developing-online-with-codespaces/creating-a-codespace#creating-a-codespace).
|
||||||
|
|
||||||
|
## VS Code Dev Containers
|
||||||
|
|
||||||
|
[](https://vscode.dev/redirect?url=vscode://ms-vscode-remote.remote-containers/cloneInVolume?url=https://github.com/langgenius/dify)
|
||||||
|
|
||||||
|
if you have VS Code installed, you can click the button above to open this project in VS Code Dev Containers.
|
||||||
|
|
||||||
|
You can learn more in the [Dev Containers documentation](https://code.visualstudio.com/docs/devcontainers/containers).
|
||||||
|
|
||||||
|
## Pros of Devcontainer
|
||||||
|
|
||||||
|
Unified Development Environment: By using devcontainers, you can ensure that all developers are developing in the same environment, reducing the occurrence of "it works on my machine" type of issues.
|
||||||
|
|
||||||
|
Quick Start: New developers can set up their development environment in a few simple steps, without spending a lot of time on environment configuration.
|
||||||
|
|
||||||
|
Isolation: Devcontainers isolate your project from your host operating system, reducing the chance of OS updates or other application installations impacting the development environment.
|
||||||
|
|
||||||
|
## Cons of Devcontainer
|
||||||
|
|
||||||
|
Learning Curve: For developers unfamiliar with Docker and VS Code, using devcontainers may be somewhat complex.
|
||||||
|
|
||||||
|
Performance Impact: While usually minimal, programs running inside a devcontainer may be slightly slower than those running directly on the host.
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
if you see such error message when you open this project in codespaces:
|
||||||
|

|
||||||
|
|
||||||
|
a simple workaround is change `/signin` endpoint into another one, then login with GitHub account and close the tab, then change it back to `/signin` endpoint. Then all things will be fine.
|
||||||
|
The reason is `signin` endpoint is not allowed in codespaces, details can be found [here](https://github.com/orgs/community/discussions/5204)
|
||||||
52
dify/.devcontainer/devcontainer.json
Normal file
52
dify/.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
// For format details, see https://aka.ms/devcontainer.json. For config options, see the
|
||||||
|
// README at: https://github.com/devcontainers/templates/tree/main/src/anaconda
|
||||||
|
{
|
||||||
|
"name": "Python 3.12",
|
||||||
|
"build": {
|
||||||
|
"context": "..",
|
||||||
|
"dockerfile": "Dockerfile"
|
||||||
|
},
|
||||||
|
"features": {
|
||||||
|
"ghcr.io/devcontainers/features/node:1": {
|
||||||
|
"nodeGypDependencies": true,
|
||||||
|
"version": "lts"
|
||||||
|
},
|
||||||
|
"ghcr.io/devcontainers-extra/features/npm-package:1": {
|
||||||
|
"package": "typescript",
|
||||||
|
"version": "latest"
|
||||||
|
},
|
||||||
|
"ghcr.io/devcontainers/features/docker-in-docker:2": {
|
||||||
|
"moby": true,
|
||||||
|
"azureDnsAutoDetection": true,
|
||||||
|
"installDockerBuildx": true,
|
||||||
|
"version": "latest",
|
||||||
|
"dockerDashComposeVersion": "v2"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"customizations": {
|
||||||
|
"vscode": {
|
||||||
|
"extensions": [
|
||||||
|
"ms-python.pylint",
|
||||||
|
"GitHub.copilot",
|
||||||
|
"ms-python.python"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"postStartCommand": "./.devcontainer/post_start_command.sh",
|
||||||
|
"postCreateCommand": "./.devcontainer/post_create_command.sh"
|
||||||
|
|
||||||
|
// Features to add to the dev container. More info: https://containers.dev/features.
|
||||||
|
// "features": {},
|
||||||
|
|
||||||
|
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||||
|
// "forwardPorts": [],
|
||||||
|
|
||||||
|
// Use 'postCreateCommand' to run commands after the container is created.
|
||||||
|
// "postCreateCommand": "python --version",
|
||||||
|
|
||||||
|
// Configure tool-specific properties.
|
||||||
|
// "customizations": {},
|
||||||
|
|
||||||
|
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
|
||||||
|
// "remoteUser": "root"
|
||||||
|
}
|
||||||
3
dify/.devcontainer/noop.txt
Normal file
3
dify/.devcontainer/noop.txt
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
This file copied into the container along with environment.yml* from the parent
|
||||||
|
folder. This file is included to prevents the Dockerfile COPY instruction from
|
||||||
|
failing if no environment.yml is found.
|
||||||
15
dify/.devcontainer/post_create_command.sh
Normal file
15
dify/.devcontainer/post_create_command.sh
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
WORKSPACE_ROOT=$(pwd)
|
||||||
|
|
||||||
|
corepack enable
|
||||||
|
cd web && pnpm install
|
||||||
|
pipx install uv
|
||||||
|
|
||||||
|
echo "alias start-api=\"cd $WORKSPACE_ROOT/api && uv run python -m flask run --host 0.0.0.0 --port=5001 --debug\"" >> ~/.bashrc
|
||||||
|
echo "alias start-worker=\"cd $WORKSPACE_ROOT/api && uv run python -m celery -A app.celery worker -P threads -c 1 --loglevel INFO -Q dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor\"" >> ~/.bashrc
|
||||||
|
echo "alias start-web=\"cd $WORKSPACE_ROOT/web && pnpm dev\"" >> ~/.bashrc
|
||||||
|
echo "alias start-web-prod=\"cd $WORKSPACE_ROOT/web && pnpm build && pnpm start\"" >> ~/.bashrc
|
||||||
|
echo "alias start-containers=\"cd $WORKSPACE_ROOT/docker && docker-compose -f docker-compose.middleware.yaml -p dify --env-file middleware.env up -d\"" >> ~/.bashrc
|
||||||
|
echo "alias stop-containers=\"cd $WORKSPACE_ROOT/docker && docker-compose -f docker-compose.middleware.yaml -p dify --env-file middleware.env down\"" >> ~/.bashrc
|
||||||
|
|
||||||
|
source /home/vscode/.bashrc
|
||||||
3
dify/.devcontainer/post_start_command.sh
Normal file
3
dify/.devcontainer/post_start_command.sh
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
cd api && uv sync
|
||||||
BIN
dify/.devcontainer/troubleshooting.png
Normal file
BIN
dify/.devcontainer/troubleshooting.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 14 KiB |
39
dify/.editorconfig
Normal file
39
dify/.editorconfig
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
# EditorConfig is awesome: https://EditorConfig.org
|
||||||
|
|
||||||
|
# top-most EditorConfig file
|
||||||
|
root = true
|
||||||
|
|
||||||
|
# Unix-style newlines with a newline ending every file
|
||||||
|
[*]
|
||||||
|
charset = utf-8
|
||||||
|
end_of_line = lf
|
||||||
|
insert_final_newline = true
|
||||||
|
trim_trailing_whitespace = true
|
||||||
|
|
||||||
|
[*.py]
|
||||||
|
indent_size = 4
|
||||||
|
indent_style = space
|
||||||
|
|
||||||
|
[*.{yml,yaml}]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 2
|
||||||
|
|
||||||
|
[*.toml]
|
||||||
|
indent_size = 4
|
||||||
|
indent_style = space
|
||||||
|
|
||||||
|
# Markdown and MDX are whitespace sensitive languages.
|
||||||
|
# Do not remove trailing spaces.
|
||||||
|
[*.{md,mdx}]
|
||||||
|
trim_trailing_whitespace = false
|
||||||
|
|
||||||
|
# Matches multiple files with brace expansion notation
|
||||||
|
# Set default charset
|
||||||
|
[*.{js,jsx,ts,tsx,mjs}]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 2
|
||||||
|
|
||||||
|
# Matches the exact files package.json
|
||||||
|
[package.json]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 2
|
||||||
7
dify/.gitattributes
vendored
Normal file
7
dify/.gitattributes
vendored
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
# Ensure that .sh scripts use LF as line separator, even if they are checked out
|
||||||
|
# to Windows(NTFS) file-system, by a user of Docker for Windows.
|
||||||
|
# These .sh scripts will be run from the Container after `docker compose up -d`.
|
||||||
|
# If they appear to be CRLF style, Dash from the Container will fail to execute
|
||||||
|
# them.
|
||||||
|
|
||||||
|
*.sh text eol=lf
|
||||||
41
dify/.github/CODE_OF_CONDUCT.md
vendored
Normal file
41
dify/.github/CODE_OF_CONDUCT.md
vendored
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
# Dify Code of Conduct
|
||||||
|
|
||||||
|
## Our Pledge
|
||||||
|
|
||||||
|
We as members, contributors, and leaders pledge to make participation in our
|
||||||
|
community a harassment-free experience for everyone, regardless of age, body
|
||||||
|
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
||||||
|
identity and expression, level of experience, education, socio-economic status,
|
||||||
|
nationality, personal appearance, race, caste, color, religion, or sexual identity
|
||||||
|
and orientation.
|
||||||
|
|
||||||
|
We pledge to act and interact in ways that contribute to an open, welcoming,
|
||||||
|
diverse, inclusive, and healthy community.
|
||||||
|
|
||||||
|
## Our Standards
|
||||||
|
|
||||||
|
Examples of behavior that contributes to a positive environment for our
|
||||||
|
community include:
|
||||||
|
|
||||||
|
- Demonstrating empathy and kindness toward other people
|
||||||
|
- Being respectful of differing opinions, viewpoints, and experiences
|
||||||
|
- Giving and gracefully accepting constructive feedback
|
||||||
|
- Accepting responsibility and apologizing to those affected by our mistakes,
|
||||||
|
and learning from the experience
|
||||||
|
- Focusing on what is best not just for us as individuals, but for the
|
||||||
|
overall community
|
||||||
|
|
||||||
|
Examples of unacceptable behavior include:
|
||||||
|
|
||||||
|
- The use of sexualized language or imagery, and sexual attention or
|
||||||
|
advances of any kind
|
||||||
|
- Trolling, insulting or derogatory comments, and personal or political attacks
|
||||||
|
- Public or private harassment
|
||||||
|
- Publishing others' private information, such as a physical or email
|
||||||
|
address, without their explicit permission
|
||||||
|
- Other conduct which could reasonably be considered inappropriate in a
|
||||||
|
professional setting
|
||||||
|
|
||||||
|
## Language Policy
|
||||||
|
|
||||||
|
To facilitate clear and effective communication, all discussions, comments, documentation, and pull requests in this project should be conducted in English. This ensures that all contributors can participate and collaborate effectively.
|
||||||
24
dify/.github/DISCUSSION_TEMPLATE/general.yml
vendored
Normal file
24
dify/.github/DISCUSSION_TEMPLATE/general.yml
vendored
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
title: "General Discussion"
|
||||||
|
body:
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: Self Checks
|
||||||
|
description: "To make sure we get to you in time, please check the following :)"
|
||||||
|
options:
|
||||||
|
- label: I have searched for existing issues [search for existing issues](https://github.com/langgenius/dify/issues), including closed ones.
|
||||||
|
required: true
|
||||||
|
- label: I confirm that I am using English to submit this report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
|
||||||
|
required: true
|
||||||
|
- label: "[FOR CHINESE USERS] 请务必使用英文提交 Issue,否则会被关闭。谢谢!:)"
|
||||||
|
required: true
|
||||||
|
- label: "Please do not modify this template :) and fill in all the required fields."
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Content
|
||||||
|
placeholder: Please describe the content you would like to discuss.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: Please limit one request per issue.
|
||||||
30
dify/.github/DISCUSSION_TEMPLATE/help.yml
vendored
Normal file
30
dify/.github/DISCUSSION_TEMPLATE/help.yml
vendored
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
title: "Help"
|
||||||
|
body:
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: Self Checks
|
||||||
|
description: "To make sure we get to you in time, please check the following :)"
|
||||||
|
options:
|
||||||
|
- label: I have searched for existing issues [search for existing issues](https://github.com/langgenius/dify/issues), including closed ones.
|
||||||
|
required: true
|
||||||
|
- label: I confirm that I am using English to submit this report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
|
||||||
|
required: true
|
||||||
|
- label: "[FOR CHINESE USERS] 请务必使用英文提交 Issue,否则会被关闭。谢谢!:)"
|
||||||
|
required: true
|
||||||
|
- label: "Please do not modify this template :) and fill in all the required fields."
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: 1. Is this request related to a challenge you're experiencing? Tell me about your story.
|
||||||
|
placeholder: Please describe the specific scenario or problem you're facing as clearly as possible. For instance "I was trying to use [feature] for [specific task], and [what happened]... It was frustrating because...."
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: 2. Additional context or comments
|
||||||
|
placeholder: (Any other information, comments, documentations, links, or screenshots that would provide more clarity. This is the place to add anything else not covered above.)
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: Please limit one request per issue.
|
||||||
37
dify/.github/DISCUSSION_TEMPLATE/suggestion.yml
vendored
Normal file
37
dify/.github/DISCUSSION_TEMPLATE/suggestion.yml
vendored
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
title: Suggestions for New Features
|
||||||
|
body:
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: Self Checks
|
||||||
|
description: "To make sure we get to you in time, please check the following :)"
|
||||||
|
options:
|
||||||
|
- label: I have searched for existing issues [search for existing issues](https://github.com/langgenius/dify/issues), including closed ones.
|
||||||
|
required: true
|
||||||
|
- label: I confirm that I am using English to submit this report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
|
||||||
|
required: true
|
||||||
|
- label: "[FOR CHINESE USERS] 请务必使用英文提交 Issue,否则会被关闭。谢谢!:)"
|
||||||
|
required: true
|
||||||
|
- label: "Please do not modify this template :) and fill in all the required fields."
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: 1. Is this request related to a challenge you're experiencing? Tell me about your story.
|
||||||
|
placeholder: Please describe the specific scenario or problem you're facing as clearly as possible. For instance "I was trying to use [feature] for [specific task], and [what happened]... It was frustrating because...."
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: 2. Additional context or comments
|
||||||
|
placeholder: (Any other information, comments, documentations, links, or screenshots that would provide more clarity. This is the place to add anything else not covered above.)
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: 3. Can you help us with this feature?
|
||||||
|
description: Let us know! This is not a commitment, but a starting point for collaboration.
|
||||||
|
options:
|
||||||
|
- label: I am interested in contributing to this feature.
|
||||||
|
required: false
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: Please limit one request per issue.
|
||||||
65
dify/.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
65
dify/.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
name: "🕷️ Bug report"
|
||||||
|
description: Report errors or unexpected behavior
|
||||||
|
labels:
|
||||||
|
- bug
|
||||||
|
body:
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: Self Checks
|
||||||
|
description: "To make sure we get to you in time, please check the following :)"
|
||||||
|
options:
|
||||||
|
- label: I have read the [Contributing Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) and [Language Policy](https://github.com/langgenius/dify/issues/1542).
|
||||||
|
required: true
|
||||||
|
- label: This is only for bug report, if you would like to ask a question, please head to [Discussions](https://github.com/langgenius/dify/discussions/categories/general).
|
||||||
|
required: true
|
||||||
|
- label: I have searched for existing issues [search for existing issues](https://github.com/langgenius/dify/issues), including closed ones.
|
||||||
|
required: true
|
||||||
|
- label: I confirm that I am using English to submit this report, otherwise it will be closed.
|
||||||
|
required: true
|
||||||
|
- label: 【中文用户 & Non English User】请使用英语提交,否则会被关闭 :)
|
||||||
|
required: true
|
||||||
|
- label: "Please do not modify this template :) and fill in all the required fields."
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: input
|
||||||
|
attributes:
|
||||||
|
label: Dify version
|
||||||
|
description: See about section in Dify console
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: dropdown
|
||||||
|
attributes:
|
||||||
|
label: Cloud or Self Hosted
|
||||||
|
description: How / Where was Dify installed from?
|
||||||
|
multiple: true
|
||||||
|
options:
|
||||||
|
- Cloud
|
||||||
|
- Self Hosted (Docker)
|
||||||
|
- Self Hosted (Source)
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Steps to reproduce
|
||||||
|
description: We highly suggest including screenshots and a bug report log. Please use the right markdown syntax for code blocks.
|
||||||
|
placeholder: Having detailed steps helps us reproduce the bug. If you have logs, please use fenced code blocks (triple backticks ```) to format them.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: ✔️ Expected Behavior
|
||||||
|
description: Describe what you expected to happen.
|
||||||
|
placeholder: What were you expecting? Please do not copy and paste the steps to reproduce here.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: ❌ Actual Behavior
|
||||||
|
description: Describe what actually happened.
|
||||||
|
placeholder: What happened instead? Please do not copy and paste the steps to reproduce here.
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
14
dify/.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
14
dify/.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
blank_issues_enabled: false
|
||||||
|
contact_links:
|
||||||
|
- name: "\U0001F510 Security Vulnerabilities"
|
||||||
|
url: "https://github.com/langgenius/dify/security/advisories/new"
|
||||||
|
about: Report security vulnerabilities through GitHub Security Advisories to ensure responsible disclosure. 💡 Please do not report security vulnerabilities in public issues.
|
||||||
|
- name: "\U0001F4A1 Model Providers & Plugins"
|
||||||
|
url: "https://github.com/langgenius/dify-official-plugins/issues/new/choose"
|
||||||
|
about: Report issues with official plugins or model providers, you will need to provide the plugin version and other relevant details.
|
||||||
|
- name: "\U0001F4AC Documentation Issues"
|
||||||
|
url: "https://github.com/langgenius/dify-docs/issues/new"
|
||||||
|
about: Report issues with the documentation, such as typos, outdated information, or missing content. Please provide the specific section and details of the issue.
|
||||||
|
- name: "\U0001F4E7 Discussions"
|
||||||
|
url: https://github.com/langgenius/dify/discussions/categories/general
|
||||||
|
about: General discussions and seek help from the community
|
||||||
40
dify/.github/ISSUE_TEMPLATE/feature_request.yml
vendored
Normal file
40
dify/.github/ISSUE_TEMPLATE/feature_request.yml
vendored
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
name: "⭐ Feature or enhancement request"
|
||||||
|
description: Propose something new.
|
||||||
|
labels:
|
||||||
|
- enhancement
|
||||||
|
body:
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: Self Checks
|
||||||
|
description: "To make sure we get to you in time, please check the following :)"
|
||||||
|
options:
|
||||||
|
- label: I have read the [Contributing Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) and [Language Policy](https://github.com/langgenius/dify/issues/1542).
|
||||||
|
required: true
|
||||||
|
- label: I have searched for existing issues [search for existing issues](https://github.com/langgenius/dify/issues), including closed ones.
|
||||||
|
required: true
|
||||||
|
- label: I confirm that I am using English to submit this report, otherwise it will be closed.
|
||||||
|
required: true
|
||||||
|
- label: "Please do not modify this template :) and fill in all the required fields."
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: 1. Is this request related to a challenge you're experiencing? Tell me about your story.
|
||||||
|
placeholder: Please describe the specific scenario or problem you're facing as clearly as possible. For instance "I was trying to use [feature] for [specific task], and [what happened]... It was frustrating because...."
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: 2. Additional context or comments
|
||||||
|
placeholder: (Any other information, comments, documentations, links, or screenshots that would provide more clarity. This is the place to add anything else not covered above.)
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: 3. Can you help us with this feature?
|
||||||
|
description: Let us know! This is not a commitment, but a starting point for collaboration.
|
||||||
|
options:
|
||||||
|
- label: I am interested in contributing to this feature.
|
||||||
|
required: false
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: Please limit one request per issue.
|
||||||
44
dify/.github/ISSUE_TEMPLATE/refactor.yml
vendored
Normal file
44
dify/.github/ISSUE_TEMPLATE/refactor.yml
vendored
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
name: "✨ Refactor"
|
||||||
|
description: Refactor existing code for improved readability and maintainability.
|
||||||
|
title: "[Chore/Refactor] "
|
||||||
|
labels:
|
||||||
|
- refactor
|
||||||
|
body:
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: Self Checks
|
||||||
|
description: "To make sure we get to you in time, please check the following :)"
|
||||||
|
options:
|
||||||
|
- label: I have read the [Contributing Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) and [Language Policy](https://github.com/langgenius/dify/issues/1542).
|
||||||
|
required: true
|
||||||
|
- label: This is only for refactoring, if you would like to ask a question, please head to [Discussions](https://github.com/langgenius/dify/discussions/categories/general).
|
||||||
|
required: true
|
||||||
|
- label: I have searched for existing issues [search for existing issues](https://github.com/langgenius/dify/issues), including closed ones.
|
||||||
|
required: true
|
||||||
|
- label: I confirm that I am using English to submit this report, otherwise it will be closed.
|
||||||
|
required: true
|
||||||
|
- label: 【中文用户 & Non English User】请使用英语提交,否则会被关闭 :)
|
||||||
|
required: true
|
||||||
|
- label: "Please do not modify this template :) and fill in all the required fields."
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
id: description
|
||||||
|
attributes:
|
||||||
|
label: Description
|
||||||
|
placeholder: "Describe the refactor you are proposing."
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
id: motivation
|
||||||
|
attributes:
|
||||||
|
label: Motivation
|
||||||
|
placeholder: "Explain why this refactor is necessary."
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
- type: textarea
|
||||||
|
id: additional-context
|
||||||
|
attributes:
|
||||||
|
label: Additional Context
|
||||||
|
placeholder: "Add any other context or screenshots about the request here."
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
13
dify/.github/ISSUE_TEMPLATE/tracker.yml
vendored
Normal file
13
dify/.github/ISSUE_TEMPLATE/tracker.yml
vendored
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
name: "👾 Tracker"
|
||||||
|
description: For inner usages, please do not use this template.
|
||||||
|
title: "[Tracker] "
|
||||||
|
labels:
|
||||||
|
- tracker
|
||||||
|
body:
|
||||||
|
- type: textarea
|
||||||
|
id: content
|
||||||
|
attributes:
|
||||||
|
label: Blockers
|
||||||
|
placeholder: "- [ ] ..."
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
12
dify/.github/copilot-instructions.md
vendored
Normal file
12
dify/.github/copilot-instructions.md
vendored
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
# Copilot Instructions
|
||||||
|
|
||||||
|
GitHub Copilot must follow the unified frontend testing requirements documented in `web/testing/testing.md`.
|
||||||
|
|
||||||
|
Key reminders:
|
||||||
|
|
||||||
|
- Generate tests using the mandated tech stack, naming, and code style (AAA pattern, `fireEvent`, descriptive test names, cleans up mocks).
|
||||||
|
- Cover rendering, prop combinations, and edge cases by default; extend coverage for hooks, routing, async flows, and domain-specific components when applicable.
|
||||||
|
- Target >95% line and branch coverage and 100% function/statement coverage.
|
||||||
|
- Apply the project's mocking conventions for i18n, toast notifications, and Next.js utilities.
|
||||||
|
|
||||||
|
Any suggestions from Copilot that conflict with `web/testing/testing.md` should be revised before acceptance.
|
||||||
12
dify/.github/dependabot.yml
vendored
Normal file
12
dify/.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: "npm"
|
||||||
|
directory: "/web"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
open-pull-requests-limit: 2
|
||||||
|
- package-ecosystem: "uv"
|
||||||
|
directory: "/api"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
open-pull-requests-limit: 2
|
||||||
1
dify/.github/linters/.hadolint.yaml
vendored
Normal file
1
dify/.github/linters/.hadolint.yaml
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
failure-threshold: "error"
|
||||||
2
dify/.github/linters/.isort.cfg
vendored
Normal file
2
dify/.github/linters/.isort.cfg
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
[settings]
|
||||||
|
line_length=120
|
||||||
11
dify/.github/linters/.yaml-lint.yml
vendored
Normal file
11
dify/.github/linters/.yaml-lint.yml
vendored
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
---
|
||||||
|
|
||||||
|
extends: default
|
||||||
|
|
||||||
|
rules:
|
||||||
|
brackets:
|
||||||
|
max-spaces-inside: 1
|
||||||
|
comments-indentation: disable
|
||||||
|
document-start: disable
|
||||||
|
line-length: disable
|
||||||
|
truthy: disable
|
||||||
22
dify/.github/linters/editorconfig-checker.json
vendored
Normal file
22
dify/.github/linters/editorconfig-checker.json
vendored
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
{
|
||||||
|
"Verbose": false,
|
||||||
|
"Debug": false,
|
||||||
|
"IgnoreDefaults": false,
|
||||||
|
"SpacesAfterTabs": false,
|
||||||
|
"NoColor": false,
|
||||||
|
"Exclude": [
|
||||||
|
"^web/public/vs/",
|
||||||
|
"^web/public/pdf.worker.min.mjs$",
|
||||||
|
"web/app/components/base/icons/src/vender/"
|
||||||
|
],
|
||||||
|
"AllowedContentTypes": [],
|
||||||
|
"PassedFiles": [],
|
||||||
|
"Disable": {
|
||||||
|
"EndOfLine": false,
|
||||||
|
"Indentation": false,
|
||||||
|
"IndentSize": true,
|
||||||
|
"InsertFinalNewline": false,
|
||||||
|
"TrimTrailingWhitespace": false,
|
||||||
|
"MaxLineLength": false
|
||||||
|
}
|
||||||
|
}
|
||||||
23
dify/.github/pull_request_template.md
vendored
Normal file
23
dify/.github/pull_request_template.md
vendored
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
> [!IMPORTANT]
|
||||||
|
>
|
||||||
|
> 1. Make sure you have read our [contribution guidelines](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)
|
||||||
|
> 1. Ensure there is an associated issue and you have been assigned to it
|
||||||
|
> 1. Use the correct syntax to link this PR: `Fixes #<issue number>`.
|
||||||
|
|
||||||
|
## Summary
|
||||||
|
|
||||||
|
<!-- Please include a summary of the change and which issue is fixed. Please also include relevant motivation and context. List any dependencies that are required for this change. -->
|
||||||
|
|
||||||
|
## Screenshots
|
||||||
|
|
||||||
|
| Before | After |
|
||||||
|
|--------|-------|
|
||||||
|
| ... | ... |
|
||||||
|
|
||||||
|
## Checklist
|
||||||
|
|
||||||
|
- [ ] This change requires a documentation update, included: [Dify Document](https://github.com/langgenius/dify-docs)
|
||||||
|
- [x] I understand that this PR may be closed in case there was no previous discussion or issues. (This doesn't apply to typos!)
|
||||||
|
- [x] I've added a test for each change that was introduced, and I tried as much as possible to make a single atomic change.
|
||||||
|
- [x] I've updated the documentation accordingly.
|
||||||
|
- [x] I ran `dev/reformat`(backend) and `cd web && npx lint-staged`(frontend) to appease the lint gods
|
||||||
97
dify/.github/workflows/api-tests.yml
vendored
Normal file
97
dify/.github/workflows/api-tests.yml
vendored
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
name: Run Pytest
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: api-tests-${{ github.head_ref || github.run_id }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
name: API Tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
python-version:
|
||||||
|
- "3.11"
|
||||||
|
- "3.12"
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
|
- name: Setup UV and Python
|
||||||
|
uses: astral-sh/setup-uv@v6
|
||||||
|
with:
|
||||||
|
enable-cache: true
|
||||||
|
python-version: ${{ matrix.python-version }}
|
||||||
|
cache-dependency-glob: api/uv.lock
|
||||||
|
|
||||||
|
- name: Check UV lockfile
|
||||||
|
run: uv lock --project api --check
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: uv sync --project api --dev
|
||||||
|
|
||||||
|
- name: Run pyrefly check
|
||||||
|
run: |
|
||||||
|
cd api
|
||||||
|
uv add --dev pyrefly
|
||||||
|
uv run pyrefly check || true
|
||||||
|
|
||||||
|
- name: Run dify config tests
|
||||||
|
run: uv run --project api dev/pytest/pytest_config_tests.py
|
||||||
|
|
||||||
|
- name: Set up dotenvs
|
||||||
|
run: |
|
||||||
|
cp docker/.env.example docker/.env
|
||||||
|
cp docker/middleware.env.example docker/middleware.env
|
||||||
|
|
||||||
|
- name: Expose Service Ports
|
||||||
|
run: sh .github/workflows/expose_service_ports.sh
|
||||||
|
|
||||||
|
- name: Set up Sandbox
|
||||||
|
uses: hoverkraft-tech/compose-action@v2.0.2
|
||||||
|
with:
|
||||||
|
compose-file: |
|
||||||
|
docker/docker-compose.middleware.yaml
|
||||||
|
services: |
|
||||||
|
db_postgres
|
||||||
|
redis
|
||||||
|
sandbox
|
||||||
|
ssrf_proxy
|
||||||
|
|
||||||
|
- name: setup test config
|
||||||
|
run: |
|
||||||
|
cp api/tests/integration_tests/.env.example api/tests/integration_tests/.env
|
||||||
|
|
||||||
|
- name: Run Workflow
|
||||||
|
run: uv run --project api bash dev/pytest/pytest_workflow.sh
|
||||||
|
|
||||||
|
- name: Run Tool
|
||||||
|
run: uv run --project api bash dev/pytest/pytest_tools.sh
|
||||||
|
|
||||||
|
- name: Run TestContainers
|
||||||
|
run: uv run --project api bash dev/pytest/pytest_testcontainers.sh
|
||||||
|
|
||||||
|
- name: Run Unit tests
|
||||||
|
run: |
|
||||||
|
uv run --project api bash dev/pytest/pytest_unit_tests.sh
|
||||||
|
|
||||||
|
- name: Coverage Summary
|
||||||
|
run: |
|
||||||
|
set -x
|
||||||
|
# Extract coverage percentage and create a summary
|
||||||
|
TOTAL_COVERAGE=$(python -c 'import json; print(json.load(open("coverage.json"))["totals"]["percent_covered_display"])')
|
||||||
|
|
||||||
|
# Create a detailed coverage summary
|
||||||
|
echo "### Test Coverage Summary :test_tube:" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "Total Coverage: ${TOTAL_COVERAGE}%" >> $GITHUB_STEP_SUMMARY
|
||||||
|
uv run --project api coverage report --format=markdown >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
90
dify/.github/workflows/autofix.yml
vendored
Normal file
90
dify/.github/workflows/autofix.yml
vendored
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
name: autofix.ci
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches: ["main"]
|
||||||
|
push:
|
||||||
|
branches: ["main"]
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
autofix:
|
||||||
|
if: github.repository == 'langgenius/dify'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
# Use uv to ensure we have the same ruff version in CI and locally.
|
||||||
|
- uses: astral-sh/setup-uv@v6
|
||||||
|
with:
|
||||||
|
python-version: "3.11"
|
||||||
|
- run: |
|
||||||
|
cd api
|
||||||
|
uv sync --dev
|
||||||
|
# fmt first to avoid line too long
|
||||||
|
uv run ruff format ..
|
||||||
|
# Fix lint errors
|
||||||
|
uv run ruff check --fix .
|
||||||
|
# Format code
|
||||||
|
uv run ruff format ..
|
||||||
|
|
||||||
|
- name: count migration progress
|
||||||
|
run: |
|
||||||
|
cd api
|
||||||
|
./cnt_base.sh
|
||||||
|
|
||||||
|
- name: ast-grep
|
||||||
|
run: |
|
||||||
|
uvx --from ast-grep-cli sg --pattern 'db.session.query($WHATEVER).filter($HERE)' --rewrite 'db.session.query($WHATEVER).where($HERE)' -l py --update-all
|
||||||
|
uvx --from ast-grep-cli sg --pattern 'session.query($WHATEVER).filter($HERE)' --rewrite 'session.query($WHATEVER).where($HERE)' -l py --update-all
|
||||||
|
uvx --from ast-grep-cli sg -p '$A = db.Column($$$B)' -r '$A = mapped_column($$$B)' -l py --update-all
|
||||||
|
uvx --from ast-grep-cli sg -p '$A : $T = db.Column($$$B)' -r '$A : $T = mapped_column($$$B)' -l py --update-all
|
||||||
|
# Convert Optional[T] to T | None (ignoring quoted types)
|
||||||
|
cat > /tmp/optional-rule.yml << 'EOF'
|
||||||
|
id: convert-optional-to-union
|
||||||
|
language: python
|
||||||
|
rule:
|
||||||
|
kind: generic_type
|
||||||
|
all:
|
||||||
|
- has:
|
||||||
|
kind: identifier
|
||||||
|
pattern: Optional
|
||||||
|
- has:
|
||||||
|
kind: type_parameter
|
||||||
|
has:
|
||||||
|
kind: type
|
||||||
|
pattern: $T
|
||||||
|
fix: $T | None
|
||||||
|
EOF
|
||||||
|
uvx --from ast-grep-cli sg scan --inline-rules "$(cat /tmp/optional-rule.yml)" --update-all
|
||||||
|
# Fix forward references that were incorrectly converted (Python doesn't support "Type" | None syntax)
|
||||||
|
find . -name "*.py" -type f -exec sed -i.bak -E 's/"([^"]+)" \| None/Optional["\1"]/g; s/'"'"'([^'"'"']+)'"'"' \| None/Optional['"'"'\1'"'"']/g' {} \;
|
||||||
|
find . -name "*.py.bak" -type f -delete
|
||||||
|
|
||||||
|
- name: mdformat
|
||||||
|
run: |
|
||||||
|
uvx mdformat .
|
||||||
|
|
||||||
|
- name: Install pnpm
|
||||||
|
uses: pnpm/action-setup@v4
|
||||||
|
with:
|
||||||
|
package_json_file: web/package.json
|
||||||
|
run_install: false
|
||||||
|
|
||||||
|
- name: Setup NodeJS
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: 22
|
||||||
|
cache: pnpm
|
||||||
|
cache-dependency-path: ./web/package.json
|
||||||
|
|
||||||
|
- name: Web dependencies
|
||||||
|
working-directory: ./web
|
||||||
|
run: pnpm install --frozen-lockfile
|
||||||
|
|
||||||
|
- name: oxlint
|
||||||
|
working-directory: ./web
|
||||||
|
run: |
|
||||||
|
pnpx oxlint --fix
|
||||||
|
|
||||||
|
- uses: autofix-ci/action@635ffb0c9798bd160680f18fd73371e355b85f27
|
||||||
151
dify/.github/workflows/build-push.yml
vendored
Normal file
151
dify/.github/workflows/build-push.yml
vendored
Normal file
@@ -0,0 +1,151 @@
|
|||||||
|
name: Build and Push API & Web
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- "main"
|
||||||
|
- "deploy/**"
|
||||||
|
- "build/**"
|
||||||
|
- "release/e-*"
|
||||||
|
- "hotfix/**"
|
||||||
|
tags:
|
||||||
|
- "*"
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: build-push-${{ github.head_ref || github.run_id }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
env:
|
||||||
|
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
|
||||||
|
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
DIFY_WEB_IMAGE_NAME: ${{ vars.DIFY_WEB_IMAGE_NAME || 'langgenius/dify-web' }}
|
||||||
|
DIFY_API_IMAGE_NAME: ${{ vars.DIFY_API_IMAGE_NAME || 'langgenius/dify-api' }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ${{ matrix.platform == 'linux/arm64' && 'arm64_runner' || 'ubuntu-latest' }}
|
||||||
|
if: github.repository == 'langgenius/dify'
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- service_name: "build-api-amd64"
|
||||||
|
image_name_env: "DIFY_API_IMAGE_NAME"
|
||||||
|
context: "api"
|
||||||
|
platform: linux/amd64
|
||||||
|
- service_name: "build-api-arm64"
|
||||||
|
image_name_env: "DIFY_API_IMAGE_NAME"
|
||||||
|
context: "api"
|
||||||
|
platform: linux/arm64
|
||||||
|
- service_name: "build-web-amd64"
|
||||||
|
image_name_env: "DIFY_WEB_IMAGE_NAME"
|
||||||
|
context: "web"
|
||||||
|
platform: linux/amd64
|
||||||
|
- service_name: "build-web-arm64"
|
||||||
|
image_name_env: "DIFY_WEB_IMAGE_NAME"
|
||||||
|
context: "web"
|
||||||
|
platform: linux/arm64
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Prepare
|
||||||
|
run: |
|
||||||
|
platform=${{ matrix.platform }}
|
||||||
|
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Login to Docker Hub
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
username: ${{ env.DOCKERHUB_USER }}
|
||||||
|
password: ${{ env.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Extract metadata for Docker
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: ${{ env[matrix.image_name_env] }}
|
||||||
|
|
||||||
|
- name: Build Docker image
|
||||||
|
id: build
|
||||||
|
uses: docker/build-push-action@v6
|
||||||
|
with:
|
||||||
|
context: "{{defaultContext}}:${{ matrix.context }}"
|
||||||
|
platforms: ${{ matrix.platform }}
|
||||||
|
build-args: COMMIT_SHA=${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.revision'] }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
outputs: type=image,name=${{ env[matrix.image_name_env] }},push-by-digest=true,name-canonical=true,push=true
|
||||||
|
cache-from: type=gha,scope=${{ matrix.service_name }}
|
||||||
|
cache-to: type=gha,mode=max,scope=${{ matrix.service_name }}
|
||||||
|
|
||||||
|
- name: Export digest
|
||||||
|
env:
|
||||||
|
DIGEST: ${{ steps.build.outputs.digest }}
|
||||||
|
run: |
|
||||||
|
mkdir -p /tmp/digests
|
||||||
|
sanitized_digest=${DIGEST#sha256:}
|
||||||
|
touch "/tmp/digests/${sanitized_digest}"
|
||||||
|
|
||||||
|
- name: Upload digest
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: digests-${{ matrix.context }}-${{ env.PLATFORM_PAIR }}
|
||||||
|
path: /tmp/digests/*
|
||||||
|
if-no-files-found: error
|
||||||
|
retention-days: 1
|
||||||
|
|
||||||
|
create-manifest:
|
||||||
|
needs: build
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.repository == 'langgenius/dify'
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- service_name: "merge-api-images"
|
||||||
|
image_name_env: "DIFY_API_IMAGE_NAME"
|
||||||
|
context: "api"
|
||||||
|
- service_name: "merge-web-images"
|
||||||
|
image_name_env: "DIFY_WEB_IMAGE_NAME"
|
||||||
|
context: "web"
|
||||||
|
steps:
|
||||||
|
- name: Download digests
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
path: /tmp/digests
|
||||||
|
pattern: digests-${{ matrix.context }}-*
|
||||||
|
merge-multiple: true
|
||||||
|
|
||||||
|
- name: Login to Docker Hub
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
username: ${{ env.DOCKERHUB_USER }}
|
||||||
|
password: ${{ env.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Extract metadata for Docker
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: ${{ env[matrix.image_name_env] }}
|
||||||
|
tags: |
|
||||||
|
type=raw,value=latest,enable=${{ startsWith(github.ref, 'refs/tags/') && !contains(github.ref, '-') }}
|
||||||
|
type=ref,event=branch
|
||||||
|
type=sha,enable=true,priority=100,prefix=,suffix=,format=long
|
||||||
|
type=raw,value=${{ github.ref_name }},enable=${{ startsWith(github.ref, 'refs/tags/') }}
|
||||||
|
|
||||||
|
- name: Create manifest list and push
|
||||||
|
working-directory: /tmp/digests
|
||||||
|
env:
|
||||||
|
IMAGE_NAME: ${{ env[matrix.image_name_env] }}
|
||||||
|
run: |
|
||||||
|
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||||
|
$(printf "$IMAGE_NAME@sha256:%s " *)
|
||||||
|
|
||||||
|
- name: Inspect image
|
||||||
|
env:
|
||||||
|
IMAGE_NAME: ${{ env[matrix.image_name_env] }}
|
||||||
|
IMAGE_VERSION: ${{ steps.meta.outputs.version }}
|
||||||
|
run: |
|
||||||
|
docker buildx imagetools inspect "$IMAGE_NAME:$IMAGE_VERSION"
|
||||||
116
dify/.github/workflows/db-migration-test.yml
vendored
Normal file
116
dify/.github/workflows/db-migration-test.yml
vendored
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
name: DB Migration Test
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: db-migration-test-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
db-migration-test-postgres:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
|
- name: Setup UV and Python
|
||||||
|
uses: astral-sh/setup-uv@v6
|
||||||
|
with:
|
||||||
|
enable-cache: true
|
||||||
|
python-version: "3.12"
|
||||||
|
cache-dependency-glob: api/uv.lock
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: uv sync --project api
|
||||||
|
- name: Ensure Offline migration are supported
|
||||||
|
run: |
|
||||||
|
# upgrade
|
||||||
|
uv run --directory api flask db upgrade 'base:head' --sql
|
||||||
|
# downgrade
|
||||||
|
uv run --directory api flask db downgrade 'head:base' --sql
|
||||||
|
|
||||||
|
- name: Prepare middleware env
|
||||||
|
run: |
|
||||||
|
cd docker
|
||||||
|
cp middleware.env.example middleware.env
|
||||||
|
|
||||||
|
- name: Set up Middlewares
|
||||||
|
uses: hoverkraft-tech/compose-action@v2.0.2
|
||||||
|
with:
|
||||||
|
compose-file: |
|
||||||
|
docker/docker-compose.middleware.yaml
|
||||||
|
services: |
|
||||||
|
db_postgres
|
||||||
|
redis
|
||||||
|
|
||||||
|
- name: Prepare configs
|
||||||
|
run: |
|
||||||
|
cd api
|
||||||
|
cp .env.example .env
|
||||||
|
|
||||||
|
- name: Run DB Migration
|
||||||
|
env:
|
||||||
|
DEBUG: true
|
||||||
|
run: uv run --directory api flask upgrade-db
|
||||||
|
|
||||||
|
db-migration-test-mysql:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
|
- name: Setup UV and Python
|
||||||
|
uses: astral-sh/setup-uv@v6
|
||||||
|
with:
|
||||||
|
enable-cache: true
|
||||||
|
python-version: "3.12"
|
||||||
|
cache-dependency-glob: api/uv.lock
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: uv sync --project api
|
||||||
|
- name: Ensure Offline migration are supported
|
||||||
|
run: |
|
||||||
|
# upgrade
|
||||||
|
uv run --directory api flask db upgrade 'base:head' --sql
|
||||||
|
# downgrade
|
||||||
|
uv run --directory api flask db downgrade 'head:base' --sql
|
||||||
|
|
||||||
|
- name: Prepare middleware env for MySQL
|
||||||
|
run: |
|
||||||
|
cd docker
|
||||||
|
cp middleware.env.example middleware.env
|
||||||
|
sed -i 's/DB_TYPE=postgresql/DB_TYPE=mysql/' middleware.env
|
||||||
|
sed -i 's/DB_HOST=db_postgres/DB_HOST=db_mysql/' middleware.env
|
||||||
|
sed -i 's/DB_PORT=5432/DB_PORT=3306/' middleware.env
|
||||||
|
sed -i 's/DB_USERNAME=postgres/DB_USERNAME=mysql/' middleware.env
|
||||||
|
|
||||||
|
- name: Set up Middlewares
|
||||||
|
uses: hoverkraft-tech/compose-action@v2.0.2
|
||||||
|
with:
|
||||||
|
compose-file: |
|
||||||
|
docker/docker-compose.middleware.yaml
|
||||||
|
services: |
|
||||||
|
db_mysql
|
||||||
|
redis
|
||||||
|
|
||||||
|
- name: Prepare configs for MySQL
|
||||||
|
run: |
|
||||||
|
cd api
|
||||||
|
cp .env.example .env
|
||||||
|
sed -i 's/DB_TYPE=postgresql/DB_TYPE=mysql/' .env
|
||||||
|
sed -i 's/DB_PORT=5432/DB_PORT=3306/' .env
|
||||||
|
sed -i 's/DB_USERNAME=postgres/DB_USERNAME=root/' .env
|
||||||
|
|
||||||
|
- name: Run DB Migration
|
||||||
|
env:
|
||||||
|
DEBUG: true
|
||||||
|
run: uv run --directory api flask upgrade-db
|
||||||
25
dify/.github/workflows/deploy-dev.yml
vendored
Normal file
25
dify/.github/workflows/deploy-dev.yml
vendored
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
name: Deploy Dev
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_run:
|
||||||
|
workflows: ["Build and Push API & Web"]
|
||||||
|
branches:
|
||||||
|
- "deploy/dev"
|
||||||
|
types:
|
||||||
|
- completed
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
deploy:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: |
|
||||||
|
github.event.workflow_run.conclusion == 'success' &&
|
||||||
|
github.event.workflow_run.head_branch == 'deploy/dev'
|
||||||
|
steps:
|
||||||
|
- name: Deploy to server
|
||||||
|
uses: appleboy/ssh-action@v0.1.8
|
||||||
|
with:
|
||||||
|
host: ${{ secrets.SSH_HOST }}
|
||||||
|
username: ${{ secrets.SSH_USER }}
|
||||||
|
key: ${{ secrets.SSH_PRIVATE_KEY }}
|
||||||
|
script: |
|
||||||
|
${{ vars.SSH_SCRIPT || secrets.SSH_SCRIPT }}
|
||||||
41
dify/.github/workflows/deploy-enterprise.yml
vendored
Normal file
41
dify/.github/workflows/deploy-enterprise.yml
vendored
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
name: Deploy Enterprise
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_run:
|
||||||
|
workflows: ["Build and Push API & Web"]
|
||||||
|
branches:
|
||||||
|
- "deploy/enterprise"
|
||||||
|
types:
|
||||||
|
- completed
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
deploy:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: |
|
||||||
|
github.event.workflow_run.conclusion == 'success' &&
|
||||||
|
github.event.workflow_run.head_branch == 'deploy/enterprise'
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: trigger deployments
|
||||||
|
env:
|
||||||
|
DEV_ENV_ADDRS: ${{ vars.DEV_ENV_ADDRS }}
|
||||||
|
DEPLOY_SECRET: ${{ secrets.DEPLOY_SECRET }}
|
||||||
|
run: |
|
||||||
|
IFS=',' read -ra ENDPOINTS <<< "${DEV_ENV_ADDRS:-}"
|
||||||
|
BODY='{"project":"dify-api","tag":"deploy-enterprise"}'
|
||||||
|
|
||||||
|
for ENDPOINT in "${ENDPOINTS[@]}"; do
|
||||||
|
ENDPOINT="$(echo "$ENDPOINT" | xargs)"
|
||||||
|
[ -z "$ENDPOINT" ] && continue
|
||||||
|
|
||||||
|
API_SIGNATURE=$(printf '%s' "$BODY" | openssl dgst -sha256 -hmac "$DEPLOY_SECRET" | awk '{print "sha256="$2}')
|
||||||
|
|
||||||
|
curl -sSf -X POST \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-H "X-Hub-Signature-256: $API_SIGNATURE" \
|
||||||
|
-d "$BODY" \
|
||||||
|
"$ENDPOINT"
|
||||||
|
done
|
||||||
28
dify/.github/workflows/deploy-trigger-dev.yml
vendored
Normal file
28
dify/.github/workflows/deploy-trigger-dev.yml
vendored
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
name: Deploy Trigger Dev
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_run:
|
||||||
|
workflows: ["Build and Push API & Web"]
|
||||||
|
branches:
|
||||||
|
- "deploy/trigger-dev"
|
||||||
|
types:
|
||||||
|
- completed
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
deploy:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: |
|
||||||
|
github.event.workflow_run.conclusion == 'success' &&
|
||||||
|
github.event.workflow_run.head_branch == 'deploy/trigger-dev'
|
||||||
|
steps:
|
||||||
|
- name: Deploy to server
|
||||||
|
uses: appleboy/ssh-action@v0.1.8
|
||||||
|
with:
|
||||||
|
host: ${{ secrets.TRIGGER_SSH_HOST }}
|
||||||
|
username: ${{ secrets.SSH_USER }}
|
||||||
|
key: ${{ secrets.SSH_PRIVATE_KEY }}
|
||||||
|
script: |
|
||||||
|
${{ vars.SSH_SCRIPT || secrets.SSH_SCRIPT }}
|
||||||
48
dify/.github/workflows/docker-build.yml
vendored
Normal file
48
dify/.github/workflows/docker-build.yml
vendored
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
name: Build docker image
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- "main"
|
||||||
|
paths:
|
||||||
|
- api/Dockerfile
|
||||||
|
- web/Dockerfile
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: docker-build-${{ github.head_ref || github.run_id }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-docker:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- service_name: "api-amd64"
|
||||||
|
platform: linux/amd64
|
||||||
|
context: "api"
|
||||||
|
- service_name: "api-arm64"
|
||||||
|
platform: linux/arm64
|
||||||
|
context: "api"
|
||||||
|
- service_name: "web-amd64"
|
||||||
|
platform: linux/amd64
|
||||||
|
context: "web"
|
||||||
|
- service_name: "web-arm64"
|
||||||
|
platform: linux/arm64
|
||||||
|
context: "web"
|
||||||
|
steps:
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Build Docker Image
|
||||||
|
uses: docker/build-push-action@v6
|
||||||
|
with:
|
||||||
|
push: false
|
||||||
|
context: "{{defaultContext}}:${{ matrix.context }}"
|
||||||
|
file: "${{ matrix.file }}"
|
||||||
|
platforms: ${{ matrix.platform }}
|
||||||
|
cache-from: type=gha
|
||||||
|
cache-to: type=gha,mode=max
|
||||||
17
dify/.github/workflows/expose_service_ports.sh
vendored
Normal file
17
dify/.github/workflows/expose_service_ports.sh
vendored
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
yq eval '.services.weaviate.ports += ["8080:8080"]' -i docker/docker-compose.yaml
|
||||||
|
yq eval '.services.weaviate.ports += ["50051:50051"]' -i docker/docker-compose.yaml
|
||||||
|
yq eval '.services.qdrant.ports += ["6333:6333"]' -i docker/docker-compose.yaml
|
||||||
|
yq eval '.services.chroma.ports += ["8000:8000"]' -i docker/docker-compose.yaml
|
||||||
|
yq eval '.services["milvus-standalone"].ports += ["19530:19530"]' -i docker/docker-compose.yaml
|
||||||
|
yq eval '.services.pgvector.ports += ["5433:5432"]' -i docker/docker-compose.yaml
|
||||||
|
yq eval '.services["pgvecto-rs"].ports += ["5431:5432"]' -i docker/docker-compose.yaml
|
||||||
|
yq eval '.services["elasticsearch"].ports += ["9200:9200"]' -i docker/docker-compose.yaml
|
||||||
|
yq eval '.services.couchbase-server.ports += ["8091-8096:8091-8096"]' -i docker/docker-compose.yaml
|
||||||
|
yq eval '.services.couchbase-server.ports += ["11210:11210"]' -i docker/docker-compose.yaml
|
||||||
|
yq eval '.services.tidb.ports += ["4000:4000"]' -i docker/tidb/docker-compose.yaml
|
||||||
|
yq eval '.services.oceanbase.ports += ["2881:2881"]' -i docker/docker-compose.yaml
|
||||||
|
yq eval '.services.opengauss.ports += ["6600:6600"]' -i docker/docker-compose.yaml
|
||||||
|
|
||||||
|
echo "Ports exposed for sandbox, weaviate (HTTP 8080, gRPC 50051), tidb, qdrant, chroma, milvus, pgvector, pgvecto-rs, elasticsearch, couchbase, opengauss"
|
||||||
78
dify/.github/workflows/main-ci.yml
vendored
Normal file
78
dify/.github/workflows/main-ci.yml
vendored
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
name: Main CI Pipeline
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches: ["main"]
|
||||||
|
push:
|
||||||
|
branches: ["main"]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
pull-requests: write
|
||||||
|
checks: write
|
||||||
|
statuses: write
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: main-ci-${{ github.head_ref || github.run_id }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# Check which paths were changed to determine which tests to run
|
||||||
|
check-changes:
|
||||||
|
name: Check Changed Files
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
api-changed: ${{ steps.changes.outputs.api }}
|
||||||
|
web-changed: ${{ steps.changes.outputs.web }}
|
||||||
|
vdb-changed: ${{ steps.changes.outputs.vdb }}
|
||||||
|
migration-changed: ${{ steps.changes.outputs.migration }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: dorny/paths-filter@v3
|
||||||
|
id: changes
|
||||||
|
with:
|
||||||
|
filters: |
|
||||||
|
api:
|
||||||
|
- 'api/**'
|
||||||
|
- 'docker/**'
|
||||||
|
- '.github/workflows/api-tests.yml'
|
||||||
|
web:
|
||||||
|
- 'web/**'
|
||||||
|
vdb:
|
||||||
|
- 'api/core/rag/datasource/**'
|
||||||
|
- 'docker/**'
|
||||||
|
- '.github/workflows/vdb-tests.yml'
|
||||||
|
- 'api/uv.lock'
|
||||||
|
- 'api/pyproject.toml'
|
||||||
|
migration:
|
||||||
|
- 'api/migrations/**'
|
||||||
|
- '.github/workflows/db-migration-test.yml'
|
||||||
|
|
||||||
|
# Run tests in parallel
|
||||||
|
api-tests:
|
||||||
|
name: API Tests
|
||||||
|
needs: check-changes
|
||||||
|
if: needs.check-changes.outputs.api-changed == 'true'
|
||||||
|
uses: ./.github/workflows/api-tests.yml
|
||||||
|
|
||||||
|
web-tests:
|
||||||
|
name: Web Tests
|
||||||
|
needs: check-changes
|
||||||
|
if: needs.check-changes.outputs.web-changed == 'true'
|
||||||
|
uses: ./.github/workflows/web-tests.yml
|
||||||
|
|
||||||
|
style-check:
|
||||||
|
name: Style Check
|
||||||
|
uses: ./.github/workflows/style.yml
|
||||||
|
|
||||||
|
vdb-tests:
|
||||||
|
name: VDB Tests
|
||||||
|
needs: check-changes
|
||||||
|
if: needs.check-changes.outputs.vdb-changed == 'true'
|
||||||
|
uses: ./.github/workflows/vdb-tests.yml
|
||||||
|
|
||||||
|
db-migration-test:
|
||||||
|
name: DB Migration Test
|
||||||
|
needs: check-changes
|
||||||
|
if: needs.check-changes.outputs.migration-changed == 'true'
|
||||||
|
uses: ./.github/workflows/db-migration-test.yml
|
||||||
30
dify/.github/workflows/stale.yml
vendored
Normal file
30
dify/.github/workflows/stale.yml
vendored
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
# This workflow warns and then closes issues and PRs that have had no activity for a specified amount of time.
|
||||||
|
#
|
||||||
|
# You can adjust the behavior by modifying this file.
|
||||||
|
# For more information, see:
|
||||||
|
# https://github.com/actions/stale
|
||||||
|
name: Mark stale issues and pull requests
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 3 * * *'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
stale:
|
||||||
|
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/stale@v5
|
||||||
|
with:
|
||||||
|
days-before-issue-stale: 15
|
||||||
|
days-before-issue-close: 3
|
||||||
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
stale-issue-message: "Close due to it's no longer active, if you have any questions, you can reopen it."
|
||||||
|
stale-pr-message: "Close due to it's no longer active, if you have any questions, you can reopen it."
|
||||||
|
stale-issue-label: 'no-issue-activity'
|
||||||
|
stale-pr-label: 'no-pr-activity'
|
||||||
|
any-of-labels: 'duplicate,question,invalid,wontfix,no-issue-activity,no-pr-activity,enhancement,cant-reproduce,help-wanted'
|
||||||
182
dify/.github/workflows/style.yml
vendored
Normal file
182
dify/.github/workflows/style.yml
vendored
Normal file
@@ -0,0 +1,182 @@
|
|||||||
|
name: Style check
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: style-${{ github.head_ref || github.run_id }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
checks: write
|
||||||
|
statuses: write
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
python-style:
|
||||||
|
name: Python Style
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
|
- name: Check changed files
|
||||||
|
id: changed-files
|
||||||
|
uses: tj-actions/changed-files@v46
|
||||||
|
with:
|
||||||
|
files: |
|
||||||
|
api/**
|
||||||
|
.github/workflows/style.yml
|
||||||
|
|
||||||
|
- name: Setup UV and Python
|
||||||
|
if: steps.changed-files.outputs.any_changed == 'true'
|
||||||
|
uses: astral-sh/setup-uv@v6
|
||||||
|
with:
|
||||||
|
enable-cache: false
|
||||||
|
python-version: "3.12"
|
||||||
|
cache-dependency-glob: api/uv.lock
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
if: steps.changed-files.outputs.any_changed == 'true'
|
||||||
|
run: uv sync --project api --dev
|
||||||
|
|
||||||
|
- name: Run Import Linter
|
||||||
|
if: steps.changed-files.outputs.any_changed == 'true'
|
||||||
|
run: uv run --directory api --dev lint-imports
|
||||||
|
|
||||||
|
- name: Run Basedpyright Checks
|
||||||
|
if: steps.changed-files.outputs.any_changed == 'true'
|
||||||
|
run: dev/basedpyright-check
|
||||||
|
|
||||||
|
- name: Run Mypy Type Checks
|
||||||
|
if: steps.changed-files.outputs.any_changed == 'true'
|
||||||
|
run: uv --directory api run mypy --exclude-gitignore --exclude 'tests/' --exclude 'migrations/' --check-untyped-defs --disable-error-code=import-untyped .
|
||||||
|
|
||||||
|
- name: Dotenv check
|
||||||
|
if: steps.changed-files.outputs.any_changed == 'true'
|
||||||
|
run: uv run --project api dotenv-linter ./api/.env.example ./web/.env.example
|
||||||
|
|
||||||
|
web-style:
|
||||||
|
name: Web Style
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
working-directory: ./web
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
|
- name: Check changed files
|
||||||
|
id: changed-files
|
||||||
|
uses: tj-actions/changed-files@v46
|
||||||
|
with:
|
||||||
|
files: web/**
|
||||||
|
|
||||||
|
- name: Install pnpm
|
||||||
|
uses: pnpm/action-setup@v4
|
||||||
|
with:
|
||||||
|
package_json_file: web/package.json
|
||||||
|
run_install: false
|
||||||
|
|
||||||
|
- name: Setup NodeJS
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
if: steps.changed-files.outputs.any_changed == 'true'
|
||||||
|
with:
|
||||||
|
node-version: 22
|
||||||
|
cache: pnpm
|
||||||
|
cache-dependency-path: ./web/package.json
|
||||||
|
|
||||||
|
- name: Web dependencies
|
||||||
|
if: steps.changed-files.outputs.any_changed == 'true'
|
||||||
|
working-directory: ./web
|
||||||
|
run: pnpm install --frozen-lockfile
|
||||||
|
|
||||||
|
- name: Web style check
|
||||||
|
if: steps.changed-files.outputs.any_changed == 'true'
|
||||||
|
working-directory: ./web
|
||||||
|
run: |
|
||||||
|
pnpm run lint
|
||||||
|
|
||||||
|
- name: Web type check
|
||||||
|
if: steps.changed-files.outputs.any_changed == 'true'
|
||||||
|
working-directory: ./web
|
||||||
|
run: pnpm run type-check
|
||||||
|
|
||||||
|
docker-compose-template:
|
||||||
|
name: Docker Compose Template
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
|
- name: Check changed files
|
||||||
|
id: changed-files
|
||||||
|
uses: tj-actions/changed-files@v46
|
||||||
|
with:
|
||||||
|
files: |
|
||||||
|
docker/generate_docker_compose
|
||||||
|
docker/.env.example
|
||||||
|
docker/docker-compose-template.yaml
|
||||||
|
docker/docker-compose.yaml
|
||||||
|
|
||||||
|
- name: Generate Docker Compose
|
||||||
|
if: steps.changed-files.outputs.any_changed == 'true'
|
||||||
|
run: |
|
||||||
|
cd docker
|
||||||
|
./generate_docker_compose
|
||||||
|
|
||||||
|
- name: Check for changes
|
||||||
|
if: steps.changed-files.outputs.any_changed == 'true'
|
||||||
|
run: git diff --exit-code
|
||||||
|
|
||||||
|
superlinter:
|
||||||
|
name: SuperLinter
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
|
- name: Check changed files
|
||||||
|
id: changed-files
|
||||||
|
uses: tj-actions/changed-files@v46
|
||||||
|
with:
|
||||||
|
files: |
|
||||||
|
**.sh
|
||||||
|
**.yaml
|
||||||
|
**.yml
|
||||||
|
**Dockerfile
|
||||||
|
dev/**
|
||||||
|
.editorconfig
|
||||||
|
|
||||||
|
- name: Super-linter
|
||||||
|
uses: super-linter/super-linter/slim@v8
|
||||||
|
if: steps.changed-files.outputs.any_changed == 'true'
|
||||||
|
env:
|
||||||
|
BASH_SEVERITY: warning
|
||||||
|
DEFAULT_BRANCH: origin/main
|
||||||
|
EDITORCONFIG_FILE_NAME: editorconfig-checker.json
|
||||||
|
FILTER_REGEX_INCLUDE: pnpm-lock.yaml
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
IGNORE_GENERATED_FILES: true
|
||||||
|
IGNORE_GITIGNORED_FILES: true
|
||||||
|
VALIDATE_BASH: true
|
||||||
|
VALIDATE_BASH_EXEC: true
|
||||||
|
# FIXME: temporarily disabled until api-docker.yaml's run script is fixed for shellcheck
|
||||||
|
# VALIDATE_GITHUB_ACTIONS: true
|
||||||
|
VALIDATE_DOCKERFILE_HADOLINT: true
|
||||||
|
VALIDATE_EDITORCONFIG: true
|
||||||
|
VALIDATE_XML: true
|
||||||
|
VALIDATE_YAML: true
|
||||||
43
dify/.github/workflows/tool-test-sdks.yaml
vendored
Normal file
43
dify/.github/workflows/tool-test-sdks.yaml
vendored
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
name: Run Unit Test For SDKs
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
paths:
|
||||||
|
- sdks/**
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: sdk-tests-${{ github.head_ref || github.run_id }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
name: unit test for Node.js SDK
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
node-version: [16, 18, 20, 22]
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
working-directory: sdks/nodejs-client
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
|
- name: Use Node.js ${{ matrix.node-version }}
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: ${{ matrix.node-version }}
|
||||||
|
cache: ''
|
||||||
|
cache-dependency-path: 'pnpm-lock.yaml'
|
||||||
|
|
||||||
|
- name: Install Dependencies
|
||||||
|
run: pnpm install --frozen-lockfile
|
||||||
|
|
||||||
|
- name: Test
|
||||||
|
run: pnpm test
|
||||||
88
dify/.github/workflows/translate-i18n-base-on-english.yml
vendored
Normal file
88
dify/.github/workflows/translate-i18n-base-on-english.yml
vendored
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
name: Check i18n Files and Create PR
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'web/i18n/en-US/*.ts'
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check-and-update:
|
||||||
|
if: github.repository == 'langgenius/dify'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
working-directory: web
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Check for file changes in i18n/en-US
|
||||||
|
id: check_files
|
||||||
|
run: |
|
||||||
|
git fetch origin "${{ github.event.before }}" || true
|
||||||
|
git fetch origin "${{ github.sha }}" || true
|
||||||
|
changed_files=$(git diff --name-only "${{ github.event.before }}" "${{ github.sha }}" -- 'i18n/en-US/*.ts')
|
||||||
|
echo "Changed files: $changed_files"
|
||||||
|
if [ -n "$changed_files" ]; then
|
||||||
|
echo "FILES_CHANGED=true" >> $GITHUB_ENV
|
||||||
|
file_args=""
|
||||||
|
for file in $changed_files; do
|
||||||
|
filename=$(basename "$file" .ts)
|
||||||
|
file_args="$file_args --file $filename"
|
||||||
|
done
|
||||||
|
echo "FILE_ARGS=$file_args" >> $GITHUB_ENV
|
||||||
|
echo "File arguments: $file_args"
|
||||||
|
else
|
||||||
|
echo "FILES_CHANGED=false" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Install pnpm
|
||||||
|
uses: pnpm/action-setup@v4
|
||||||
|
with:
|
||||||
|
package_json_file: web/package.json
|
||||||
|
run_install: false
|
||||||
|
|
||||||
|
- name: Set up Node.js
|
||||||
|
if: env.FILES_CHANGED == 'true'
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: 'lts/*'
|
||||||
|
cache: pnpm
|
||||||
|
cache-dependency-path: ./web/package.json
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
if: env.FILES_CHANGED == 'true'
|
||||||
|
working-directory: ./web
|
||||||
|
run: pnpm install --frozen-lockfile
|
||||||
|
|
||||||
|
- name: Generate i18n translations
|
||||||
|
if: env.FILES_CHANGED == 'true'
|
||||||
|
working-directory: ./web
|
||||||
|
run: pnpm run auto-gen-i18n ${{ env.FILE_ARGS }}
|
||||||
|
|
||||||
|
- name: Generate i18n type definitions
|
||||||
|
if: env.FILES_CHANGED == 'true'
|
||||||
|
working-directory: ./web
|
||||||
|
run: pnpm run gen:i18n-types
|
||||||
|
|
||||||
|
- name: Create Pull Request
|
||||||
|
if: env.FILES_CHANGED == 'true'
|
||||||
|
uses: peter-evans/create-pull-request@v6
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
commit-message: Update i18n files and type definitions based on en-US changes
|
||||||
|
title: 'chore: translate i18n files and update type definitions'
|
||||||
|
body: |
|
||||||
|
This PR was automatically created to update i18n files and TypeScript type definitions based on changes in en-US locale.
|
||||||
|
|
||||||
|
**Changes included:**
|
||||||
|
- Updated translation files for all locales
|
||||||
|
- Regenerated TypeScript type definitions for type safety
|
||||||
|
branch: chore/automated-i18n-updates
|
||||||
90
dify/.github/workflows/vdb-tests.yml
vendored
Normal file
90
dify/.github/workflows/vdb-tests.yml
vendored
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
name: Run VDB Tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: vdb-tests-${{ github.head_ref || github.run_id }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
name: VDB Tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
python-version:
|
||||||
|
- "3.11"
|
||||||
|
- "3.12"
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
|
- name: Free Disk Space
|
||||||
|
uses: endersonmenezes/free-disk-space@v2
|
||||||
|
with:
|
||||||
|
remove_dotnet: true
|
||||||
|
remove_haskell: true
|
||||||
|
remove_tool_cache: true
|
||||||
|
|
||||||
|
- name: Setup UV and Python
|
||||||
|
uses: astral-sh/setup-uv@v6
|
||||||
|
with:
|
||||||
|
enable-cache: true
|
||||||
|
python-version: ${{ matrix.python-version }}
|
||||||
|
cache-dependency-glob: api/uv.lock
|
||||||
|
|
||||||
|
- name: Check UV lockfile
|
||||||
|
run: uv lock --project api --check
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: uv sync --project api --dev
|
||||||
|
|
||||||
|
- name: Set up dotenvs
|
||||||
|
run: |
|
||||||
|
cp docker/.env.example docker/.env
|
||||||
|
cp docker/middleware.env.example docker/middleware.env
|
||||||
|
|
||||||
|
- name: Expose Service Ports
|
||||||
|
run: sh .github/workflows/expose_service_ports.sh
|
||||||
|
|
||||||
|
# - name: Set up Vector Store (TiDB)
|
||||||
|
# uses: hoverkraft-tech/compose-action@v2.0.2
|
||||||
|
# with:
|
||||||
|
# compose-file: docker/tidb/docker-compose.yaml
|
||||||
|
# services: |
|
||||||
|
# tidb
|
||||||
|
# tiflash
|
||||||
|
|
||||||
|
- name: Set up Vector Stores (Weaviate, Qdrant, PGVector, Milvus, PgVecto-RS, Chroma, MyScale, ElasticSearch, Couchbase, OceanBase)
|
||||||
|
uses: hoverkraft-tech/compose-action@v2.0.2
|
||||||
|
with:
|
||||||
|
compose-file: |
|
||||||
|
docker/docker-compose.yaml
|
||||||
|
services: |
|
||||||
|
weaviate
|
||||||
|
qdrant
|
||||||
|
couchbase-server
|
||||||
|
etcd
|
||||||
|
minio
|
||||||
|
milvus-standalone
|
||||||
|
pgvecto-rs
|
||||||
|
pgvector
|
||||||
|
chroma
|
||||||
|
elasticsearch
|
||||||
|
oceanbase
|
||||||
|
|
||||||
|
- name: setup test config
|
||||||
|
run: |
|
||||||
|
echo $(pwd)
|
||||||
|
ls -lah .
|
||||||
|
cp api/tests/integration_tests/.env.example api/tests/integration_tests/.env
|
||||||
|
|
||||||
|
# - name: Check VDB Ready (TiDB)
|
||||||
|
# run: uv run --project api python api/tests/integration_tests/vdb/tidb_vector/check_tiflash_ready.py
|
||||||
|
|
||||||
|
- name: Test Vector Stores
|
||||||
|
run: uv run --project api bash dev/pytest/pytest_vdb.sh
|
||||||
58
dify/.github/workflows/web-tests.yml
vendored
Normal file
58
dify/.github/workflows/web-tests.yml
vendored
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
name: Web Tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: web-tests-${{ github.head_ref || github.run_id }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
name: Web Tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
working-directory: ./web
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
|
- name: Check changed files
|
||||||
|
id: changed-files
|
||||||
|
uses: tj-actions/changed-files@v46
|
||||||
|
with:
|
||||||
|
files: web/**
|
||||||
|
|
||||||
|
- name: Install pnpm
|
||||||
|
if: steps.changed-files.outputs.any_changed == 'true'
|
||||||
|
uses: pnpm/action-setup@v4
|
||||||
|
with:
|
||||||
|
package_json_file: web/package.json
|
||||||
|
run_install: false
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
if: steps.changed-files.outputs.any_changed == 'true'
|
||||||
|
with:
|
||||||
|
node-version: 22
|
||||||
|
cache: pnpm
|
||||||
|
cache-dependency-path: ./web/package.json
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
if: steps.changed-files.outputs.any_changed == 'true'
|
||||||
|
working-directory: ./web
|
||||||
|
run: pnpm install --frozen-lockfile
|
||||||
|
|
||||||
|
- name: Check i18n types synchronization
|
||||||
|
if: steps.changed-files.outputs.any_changed == 'true'
|
||||||
|
working-directory: ./web
|
||||||
|
run: pnpm run check:i18n-types
|
||||||
|
|
||||||
|
- name: Run tests
|
||||||
|
if: steps.changed-files.outputs.any_changed == 'true'
|
||||||
|
working-directory: ./web
|
||||||
|
run: pnpm test
|
||||||
246
dify/.gitignore
vendored
Normal file
246
dify/.gitignore
vendored
Normal file
@@ -0,0 +1,246 @@
|
|||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# *db files
|
||||||
|
*.db
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
pip-wheel-metadata/
|
||||||
|
share/python-wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
MANIFEST
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.nox/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
coverage.json
|
||||||
|
*.cover
|
||||||
|
*.py,cover
|
||||||
|
.hypothesis/
|
||||||
|
.pytest_cache/
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
local_settings.py
|
||||||
|
db.sqlite3
|
||||||
|
db.sqlite3-journal
|
||||||
|
|
||||||
|
# Flask stuff:
|
||||||
|
instance/
|
||||||
|
.webassets-cache
|
||||||
|
|
||||||
|
# Scrapy stuff:
|
||||||
|
.scrapy
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
|
|
||||||
|
# IPython
|
||||||
|
profile_default/
|
||||||
|
ipython_config.py
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
.python-version
|
||||||
|
|
||||||
|
# pipenv
|
||||||
|
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||||
|
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||||
|
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||||
|
# install all needed dependencies.
|
||||||
|
#Pipfile.lock
|
||||||
|
|
||||||
|
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||||
|
__pypackages__/
|
||||||
|
|
||||||
|
# Celery stuff
|
||||||
|
celerybeat-schedule
|
||||||
|
celerybeat-schedule.db
|
||||||
|
celerybeat.pid
|
||||||
|
|
||||||
|
# SageMath parsed files
|
||||||
|
*.sage.py
|
||||||
|
|
||||||
|
# Environments
|
||||||
|
.env
|
||||||
|
.env-local
|
||||||
|
.venv
|
||||||
|
env/
|
||||||
|
venv/
|
||||||
|
ENV/
|
||||||
|
env.bak/
|
||||||
|
venv.bak/
|
||||||
|
.conda/
|
||||||
|
|
||||||
|
# Spyder project settings
|
||||||
|
.spyderproject
|
||||||
|
.spyproject
|
||||||
|
|
||||||
|
# Rope project settings
|
||||||
|
.ropeproject
|
||||||
|
|
||||||
|
# mkdocs documentation
|
||||||
|
/site
|
||||||
|
|
||||||
|
# type checking
|
||||||
|
.mypy_cache/
|
||||||
|
.dmypy.json
|
||||||
|
dmypy.json
|
||||||
|
pyrightconfig.json
|
||||||
|
!api/pyrightconfig.json
|
||||||
|
|
||||||
|
# Pyre type checker
|
||||||
|
.pyre/
|
||||||
|
.idea/'
|
||||||
|
|
||||||
|
.DS_Store
|
||||||
|
web/.vscode/settings.json
|
||||||
|
|
||||||
|
# Intellij IDEA Files
|
||||||
|
.idea/*
|
||||||
|
!.idea/vcs.xml
|
||||||
|
!.idea/icon.png
|
||||||
|
.ideaDataSources/
|
||||||
|
*.iml
|
||||||
|
api/.idea
|
||||||
|
|
||||||
|
api/.env
|
||||||
|
api/storage/*
|
||||||
|
|
||||||
|
docker-legacy/volumes/app/storage/*
|
||||||
|
docker-legacy/volumes/db/data/*
|
||||||
|
docker-legacy/volumes/redis/data/*
|
||||||
|
docker-legacy/volumes/weaviate/*
|
||||||
|
docker-legacy/volumes/qdrant/*
|
||||||
|
docker-legacy/volumes/etcd/*
|
||||||
|
docker-legacy/volumes/minio/*
|
||||||
|
docker-legacy/volumes/milvus/*
|
||||||
|
docker-legacy/volumes/chroma/*
|
||||||
|
docker-legacy/volumes/opensearch/data/*
|
||||||
|
docker-legacy/volumes/pgvectors/data/*
|
||||||
|
docker-legacy/volumes/pgvector/data/*
|
||||||
|
|
||||||
|
docker/volumes/app/storage/*
|
||||||
|
docker/volumes/certbot/*
|
||||||
|
docker/volumes/db/data/*
|
||||||
|
docker/volumes/redis/data/*
|
||||||
|
docker/volumes/weaviate/*
|
||||||
|
docker/volumes/qdrant/*
|
||||||
|
docker/tidb/volumes/*
|
||||||
|
docker/volumes/etcd/*
|
||||||
|
docker/volumes/minio/*
|
||||||
|
docker/volumes/milvus/*
|
||||||
|
docker/volumes/chroma/*
|
||||||
|
docker/volumes/opensearch/data/*
|
||||||
|
docker/volumes/myscale/data/*
|
||||||
|
docker/volumes/myscale/log/*
|
||||||
|
docker/volumes/unstructured/*
|
||||||
|
docker/volumes/pgvector/data/*
|
||||||
|
docker/volumes/pgvecto_rs/data/*
|
||||||
|
docker/volumes/couchbase/*
|
||||||
|
docker/volumes/oceanbase/*
|
||||||
|
docker/volumes/plugin_daemon/*
|
||||||
|
docker/volumes/matrixone/*
|
||||||
|
docker/volumes/mysql/*
|
||||||
|
docker/volumes/seekdb/*
|
||||||
|
!docker/volumes/oceanbase/init.d
|
||||||
|
|
||||||
|
docker/nginx/conf.d/default.conf
|
||||||
|
docker/nginx/ssl/*
|
||||||
|
!docker/nginx/ssl/.gitkeep
|
||||||
|
docker/middleware.env
|
||||||
|
docker/docker-compose.override.yaml
|
||||||
|
|
||||||
|
sdks/python-client/build
|
||||||
|
sdks/python-client/dist
|
||||||
|
sdks/python-client/dify_client.egg-info
|
||||||
|
|
||||||
|
.vscode/*
|
||||||
|
!.vscode/launch.json.template
|
||||||
|
!.vscode/README.md
|
||||||
|
api/.vscode
|
||||||
|
web/.vscode
|
||||||
|
# vscode Code History Extension
|
||||||
|
.history
|
||||||
|
|
||||||
|
.idea/
|
||||||
|
|
||||||
|
# pnpm
|
||||||
|
/.pnpm-store
|
||||||
|
|
||||||
|
# plugin migrate
|
||||||
|
plugins.jsonl
|
||||||
|
|
||||||
|
# mise
|
||||||
|
mise.toml
|
||||||
|
|
||||||
|
# Next.js build output
|
||||||
|
.next/
|
||||||
|
|
||||||
|
# PWA generated files
|
||||||
|
web/public/sw.js
|
||||||
|
web/public/sw.js.map
|
||||||
|
web/public/workbox-*.js
|
||||||
|
web/public/workbox-*.js.map
|
||||||
|
web/public/fallback-*.js
|
||||||
|
|
||||||
|
# AI Assistant
|
||||||
|
.roo/
|
||||||
|
api/.env.backup
|
||||||
|
/clickzetta
|
||||||
|
|
||||||
|
# Benchmark
|
||||||
|
scripts/stress-test/setup/config/
|
||||||
|
scripts/stress-test/reports/
|
||||||
|
|
||||||
|
# mcp
|
||||||
|
.playwright-mcp/
|
||||||
|
.serena/
|
||||||
|
|
||||||
|
# settings
|
||||||
|
*.local.json
|
||||||
34
dify/.mcp.json
Normal file
34
dify/.mcp.json
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
{
|
||||||
|
"mcpServers": {
|
||||||
|
"context7": {
|
||||||
|
"type": "http",
|
||||||
|
"url": "https://mcp.context7.com/mcp"
|
||||||
|
},
|
||||||
|
"sequential-thinking": {
|
||||||
|
"type": "stdio",
|
||||||
|
"command": "npx",
|
||||||
|
"args": ["-y", "@modelcontextprotocol/server-sequential-thinking"],
|
||||||
|
"env": {}
|
||||||
|
},
|
||||||
|
"github": {
|
||||||
|
"type": "stdio",
|
||||||
|
"command": "npx",
|
||||||
|
"args": ["-y", "@modelcontextprotocol/server-github"],
|
||||||
|
"env": {
|
||||||
|
"GITHUB_PERSONAL_ACCESS_TOKEN": "${GITHUB_PERSONAL_ACCESS_TOKEN}"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"fetch": {
|
||||||
|
"type": "stdio",
|
||||||
|
"command": "uvx",
|
||||||
|
"args": ["mcp-server-fetch"],
|
||||||
|
"env": {}
|
||||||
|
},
|
||||||
|
"playwright": {
|
||||||
|
"type": "stdio",
|
||||||
|
"command": "npx",
|
||||||
|
"args": ["-y", "@playwright/mcp@latest"],
|
||||||
|
"env": {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
14
dify/.vscode/README.md
vendored
Normal file
14
dify/.vscode/README.md
vendored
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
# Debugging with VS Code
|
||||||
|
|
||||||
|
This `launch.json.template` file provides various debug configurations for the Dify project within VS Code / Cursor. To use these configurations, you should copy the contents of this file into a new file named `launch.json` in the same `.vscode` directory.
|
||||||
|
|
||||||
|
## How to Use
|
||||||
|
|
||||||
|
1. **Create `launch.json`**: If you don't have one, create a file named `launch.json` inside the `.vscode` directory.
|
||||||
|
1. **Copy Content**: Copy the entire content from `launch.json.template` into your newly created `launch.json` file.
|
||||||
|
1. **Select Debug Configuration**: Go to the Run and Debug view in VS Code / Cursor (Ctrl+Shift+D or Cmd+Shift+D).
|
||||||
|
1. **Start Debugging**: Select the desired configuration from the dropdown menu and click the green play button.
|
||||||
|
|
||||||
|
## Tips
|
||||||
|
|
||||||
|
- If you need to debug with Edge browser instead of Chrome, modify the `serverReadyAction` configuration in the "Next.js: debug full stack" section, change `"debugWithChrome"` to `"debugWithEdge"` to use Microsoft Edge for debugging.
|
||||||
65
dify/.vscode/launch.json.template
vendored
Normal file
65
dify/.vscode/launch.json.template
vendored
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
{
|
||||||
|
"version": "0.2.0",
|
||||||
|
"configurations": [
|
||||||
|
{
|
||||||
|
"name": "Python: Flask API",
|
||||||
|
"type": "debugpy",
|
||||||
|
"request": "launch",
|
||||||
|
"module": "flask",
|
||||||
|
"env": {
|
||||||
|
"FLASK_APP": "app.py",
|
||||||
|
"FLASK_ENV": "development"
|
||||||
|
},
|
||||||
|
"args": [
|
||||||
|
"run",
|
||||||
|
"--host=0.0.0.0",
|
||||||
|
"--port=5001",
|
||||||
|
"--no-debugger",
|
||||||
|
"--no-reload"
|
||||||
|
],
|
||||||
|
"jinja": true,
|
||||||
|
"justMyCode": true,
|
||||||
|
"cwd": "${workspaceFolder}/api",
|
||||||
|
"python": "${workspaceFolder}/api/.venv/bin/python"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Python: Celery Worker (Solo)",
|
||||||
|
"type": "debugpy",
|
||||||
|
"request": "launch",
|
||||||
|
"module": "celery",
|
||||||
|
"env": {},
|
||||||
|
"args": [
|
||||||
|
"-A",
|
||||||
|
"app.celery",
|
||||||
|
"worker",
|
||||||
|
"-P",
|
||||||
|
"solo",
|
||||||
|
"-c",
|
||||||
|
"1",
|
||||||
|
"-Q",
|
||||||
|
"dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor",
|
||||||
|
"--loglevel",
|
||||||
|
"INFO"
|
||||||
|
],
|
||||||
|
"justMyCode": false,
|
||||||
|
"cwd": "${workspaceFolder}/api",
|
||||||
|
"python": "${workspaceFolder}/api/.venv/bin/python"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Next.js: debug full stack",
|
||||||
|
"type": "node",
|
||||||
|
"request": "launch",
|
||||||
|
"program": "${workspaceFolder}/web/node_modules/next/dist/bin/next",
|
||||||
|
"runtimeArgs": ["--inspect"],
|
||||||
|
"skipFiles": ["<node_internals>/**"],
|
||||||
|
"serverReadyAction": {
|
||||||
|
"action": "debugWithChrome",
|
||||||
|
"killOnServerStop": true,
|
||||||
|
"pattern": "- Local:.+(https?://.+)",
|
||||||
|
"uriFormat": "%s",
|
||||||
|
"webRoot": "${workspaceFolder}/web"
|
||||||
|
},
|
||||||
|
"cwd": "${workspaceFolder}/web"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
5
dify/.windsurf/rules/testing.md
Normal file
5
dify/.windsurf/rules/testing.md
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
# Windsurf Testing Rules
|
||||||
|
|
||||||
|
- Use `web/testing/testing.md` as the single source of truth for frontend automated testing.
|
||||||
|
- Honor every requirement in that document when generating or accepting tests.
|
||||||
|
- When proposing or saving tests, re-read that document and follow every requirement.
|
||||||
54
dify/AGENTS.md
Normal file
54
dify/AGENTS.md
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
# AGENTS.md
|
||||||
|
|
||||||
|
## Project Overview
|
||||||
|
|
||||||
|
Dify is an open-source platform for developing LLM applications with an intuitive interface combining agentic AI workflows, RAG pipelines, agent capabilities, and model management.
|
||||||
|
|
||||||
|
The codebase is split into:
|
||||||
|
|
||||||
|
- **Backend API** (`/api`): Python Flask application organized with Domain-Driven Design
|
||||||
|
- **Frontend Web** (`/web`): Next.js 15 application using TypeScript and React 19
|
||||||
|
- **Docker deployment** (`/docker`): Containerized deployment configurations
|
||||||
|
|
||||||
|
## Backend Workflow
|
||||||
|
|
||||||
|
- Run backend CLI commands through `uv run --project api <command>`.
|
||||||
|
|
||||||
|
- Before submission, all backend modifications must pass local checks: `make lint`, `make type-check`, and `uv run --project api --dev dev/pytest/pytest_unit_tests.sh`.
|
||||||
|
|
||||||
|
- Use Makefile targets for linting and formatting; `make lint` and `make type-check` cover the required checks.
|
||||||
|
|
||||||
|
- Integration tests are CI-only and are not expected to run in the local environment.
|
||||||
|
|
||||||
|
## Frontend Workflow
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd web
|
||||||
|
pnpm lint
|
||||||
|
pnpm lint:fix
|
||||||
|
pnpm test
|
||||||
|
```
|
||||||
|
|
||||||
|
## Testing & Quality Practices
|
||||||
|
|
||||||
|
- Follow TDD: red → green → refactor.
|
||||||
|
- Use `pytest` for backend tests with Arrange-Act-Assert structure.
|
||||||
|
- Enforce strong typing; avoid `Any` and prefer explicit type annotations.
|
||||||
|
- Write self-documenting code; only add comments that explain intent.
|
||||||
|
|
||||||
|
## Language Style
|
||||||
|
|
||||||
|
- **Python**: Keep type hints on functions and attributes, and implement relevant special methods (e.g., `__repr__`, `__str__`).
|
||||||
|
- **TypeScript**: Use the strict config, lean on ESLint + Prettier workflows, and avoid `any` types.
|
||||||
|
|
||||||
|
## General Practices
|
||||||
|
|
||||||
|
- Prefer editing existing files; add new documentation only when requested.
|
||||||
|
- Inject dependencies through constructors and preserve clean architecture boundaries.
|
||||||
|
- Handle errors with domain-specific exceptions at the correct layer.
|
||||||
|
|
||||||
|
## Project Conventions
|
||||||
|
|
||||||
|
- Backend architecture adheres to DDD and Clean Architecture principles.
|
||||||
|
- Async work runs through Celery with Redis as the broker.
|
||||||
|
- Frontend user-facing strings must use `web/i18n/en-US/`; avoid hardcoded text.
|
||||||
6
dify/AUTHORS
Normal file
6
dify/AUTHORS
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
nite-knite
|
||||||
|
goocarlos
|
||||||
|
crazywoola
|
||||||
|
iamjoel
|
||||||
|
idsong
|
||||||
|
takatost
|
||||||
99
dify/CONTRIBUTING.md
Normal file
99
dify/CONTRIBUTING.md
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
# CONTRIBUTING
|
||||||
|
|
||||||
|
So you're looking to contribute to Dify - that's awesome, we can't wait to see what you do. As a startup with limited headcount and funding, we have grand ambitions to design the most intuitive workflow for building and managing LLM applications. Any help from the community counts, truly.
|
||||||
|
|
||||||
|
We need to be nimble and ship fast given where we are, but we also want to make sure that contributors like you get as smooth an experience at contributing as possible. We've assembled this contribution guide for that purpose, aiming at getting you familiarized with the codebase & how we work with contributors, so you could quickly jump to the fun part.
|
||||||
|
|
||||||
|
This guide, like Dify itself, is a constant work in progress. We highly appreciate your understanding if at times it lags behind the actual project, and welcome any feedback for us to improve.
|
||||||
|
|
||||||
|
In terms of licensing, please take a minute to read our short [License and Contributor Agreement](./LICENSE). The community also adheres to the [code of conduct](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md).
|
||||||
|
|
||||||
|
## Before you jump in
|
||||||
|
|
||||||
|
Looking for something to tackle? Browse our [good first issues](https://github.com/langgenius/dify/issues?q=is%3Aissue%20state%3Aopen%20label%3A%22good%20first%20issue%22) and pick one to get started!
|
||||||
|
|
||||||
|
Got a cool new model runtime or tool to add? Open a PR in our [plugin repo](https://github.com/langgenius/dify-plugins) and show us what you've built.
|
||||||
|
|
||||||
|
Need to update an existing model runtime, tool, or squash some bugs? Head over to our [official plugin repo](https://github.com/langgenius/dify-official-plugins) and make your magic happen!
|
||||||
|
|
||||||
|
Join the fun, contribute, and let's build something awesome together! 💡✨
|
||||||
|
|
||||||
|
Don't forget to link an existing issue or open a new issue in the PR's description.
|
||||||
|
|
||||||
|
### Bug reports
|
||||||
|
|
||||||
|
> [!IMPORTANT]
|
||||||
|
> Please make sure to include the following information when submitting a bug report:
|
||||||
|
|
||||||
|
- A clear and descriptive title
|
||||||
|
- A detailed description of the bug, including any error messages
|
||||||
|
- Steps to reproduce the bug
|
||||||
|
- Expected behavior
|
||||||
|
- **Logs**, if available, for backend issues, this is really important, you can find them in docker-compose logs
|
||||||
|
- Screenshots or videos, if applicable
|
||||||
|
|
||||||
|
How we prioritize:
|
||||||
|
|
||||||
|
| Issue Type | Priority |
|
||||||
|
| ------------------------------------------------------------ | --------------- |
|
||||||
|
| Bugs in core functions (cloud service, cannot login, applications not working, security loopholes) | Critical |
|
||||||
|
| Non-critical bugs, performance boosts | Medium Priority |
|
||||||
|
| Minor fixes (typos, confusing but working UI) | Low Priority |
|
||||||
|
|
||||||
|
### Feature requests
|
||||||
|
|
||||||
|
> [!NOTE]
|
||||||
|
> Please make sure to include the following information when submitting a feature request:
|
||||||
|
|
||||||
|
- A clear and descriptive title
|
||||||
|
- A detailed description of the feature
|
||||||
|
- A use case for the feature
|
||||||
|
- Any other context or screenshots about the feature request
|
||||||
|
|
||||||
|
How we prioritize:
|
||||||
|
|
||||||
|
| Feature Type | Priority |
|
||||||
|
| ------------------------------------------------------------ | --------------- |
|
||||||
|
| High-Priority Features as being labeled by a team member | High Priority |
|
||||||
|
| Popular feature requests from our [community feedback board](https://github.com/langgenius/dify/discussions/categories/feedbacks) | Medium Priority |
|
||||||
|
| Non-core features and minor enhancements | Low Priority |
|
||||||
|
| Valuable but not immediate | Future-Feature |
|
||||||
|
|
||||||
|
## Submitting your PR
|
||||||
|
|
||||||
|
### Pull Request Process
|
||||||
|
|
||||||
|
1. Fork the repository
|
||||||
|
1. Before you draft a PR, please create an issue to discuss the changes you want to make
|
||||||
|
1. Create a new branch for your changes
|
||||||
|
1. Please add tests for your changes accordingly
|
||||||
|
1. Ensure your code passes the existing tests
|
||||||
|
1. Please link the issue in the PR description, `fixes #<issue_number>`
|
||||||
|
1. Get merged!
|
||||||
|
|
||||||
|
### Setup the project
|
||||||
|
|
||||||
|
#### Frontend
|
||||||
|
|
||||||
|
For setting up the frontend service, please refer to our comprehensive [guide](https://github.com/langgenius/dify/blob/main/web/README.md) in the `web/README.md` file. This document provides detailed instructions to help you set up the frontend environment properly.
|
||||||
|
|
||||||
|
**Testing**: All React components must have comprehensive test coverage. See [web/testing/testing.md](https://github.com/langgenius/dify/blob/main/web/testing/testing.md) for the canonical frontend testing guidelines and follow every requirement described there.
|
||||||
|
|
||||||
|
#### Backend
|
||||||
|
|
||||||
|
For setting up the backend service, kindly refer to our detailed [instructions](https://github.com/langgenius/dify/blob/main/api/README.md) in the `api/README.md` file. This document contains step-by-step guidance to help you get the backend up and running smoothly.
|
||||||
|
|
||||||
|
#### Other things to note
|
||||||
|
|
||||||
|
We recommend reviewing this document carefully before proceeding with the setup, as it contains essential information about:
|
||||||
|
|
||||||
|
- Prerequisites and dependencies
|
||||||
|
- Installation steps
|
||||||
|
- Configuration details
|
||||||
|
- Common troubleshooting tips
|
||||||
|
|
||||||
|
Feel free to reach out if you encounter any issues during the setup process.
|
||||||
|
|
||||||
|
## Getting Help
|
||||||
|
|
||||||
|
If you ever get stuck or get a burning question while contributing, simply shoot your queries our way via the related GitHub issue, or hop onto our [Discord](https://discord.gg/8Tpq4AcN9c) for a quick chat.
|
||||||
22
dify/LICENSE
Normal file
22
dify/LICENSE
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
# Open Source License
|
||||||
|
|
||||||
|
Dify is licensed under a modified version of the Apache License 2.0, with the following additional conditions:
|
||||||
|
|
||||||
|
1. Dify may be utilized commercially, including as a backend service for other applications or as an application development platform for enterprises. Should the conditions below be met, a commercial license must be obtained from the producer:
|
||||||
|
|
||||||
|
a. Multi-tenant service: Unless explicitly authorized by Dify in writing, you may not use the Dify source code to operate a multi-tenant environment.
|
||||||
|
- Tenant Definition: Within the context of Dify, one tenant corresponds to one workspace. The workspace provides a separated area for each tenant's data and configurations.
|
||||||
|
|
||||||
|
b. LOGO and copyright information: In the process of using Dify's frontend, you may not remove or modify the LOGO or copyright information in the Dify console or applications. This restriction is inapplicable to uses of Dify that do not involve its frontend.
|
||||||
|
- Frontend Definition: For the purposes of this license, the "frontend" of Dify includes all components located in the `web/` directory when running Dify from the raw source code, or the "web" image when running Dify with Docker.
|
||||||
|
|
||||||
|
2. As a contributor, you should agree that:
|
||||||
|
|
||||||
|
a. The producer can adjust the open-source agreement to be more strict or relaxed as deemed necessary.
|
||||||
|
b. Your contributed code may be used for commercial purposes, including but not limited to its cloud business operations.
|
||||||
|
|
||||||
|
Apart from the specific conditions mentioned above, all other rights and restrictions follow the Apache License 2.0. Detailed information about the Apache License 2.0 can be found at http://www.apache.org/licenses/LICENSE-2.0.
|
||||||
|
|
||||||
|
The interactive design of this product is protected by appearance patent.
|
||||||
|
|
||||||
|
© 2025 LangGenius, Inc.
|
||||||
137
dify/Makefile
Normal file
137
dify/Makefile
Normal file
@@ -0,0 +1,137 @@
|
|||||||
|
# Variables
|
||||||
|
DOCKER_REGISTRY=langgenius
|
||||||
|
WEB_IMAGE=$(DOCKER_REGISTRY)/dify-web
|
||||||
|
API_IMAGE=$(DOCKER_REGISTRY)/dify-api
|
||||||
|
VERSION=latest
|
||||||
|
|
||||||
|
# Default target - show help
|
||||||
|
.DEFAULT_GOAL := help
|
||||||
|
|
||||||
|
# Backend Development Environment Setup
|
||||||
|
.PHONY: dev-setup prepare-docker prepare-web prepare-api
|
||||||
|
|
||||||
|
# Dev setup target
|
||||||
|
dev-setup: prepare-docker prepare-web prepare-api
|
||||||
|
@echo "✅ Backend development environment setup complete!"
|
||||||
|
|
||||||
|
# Step 1: Prepare Docker middleware
|
||||||
|
prepare-docker:
|
||||||
|
@echo "🐳 Setting up Docker middleware..."
|
||||||
|
@cp -n docker/middleware.env.example docker/middleware.env 2>/dev/null || echo "Docker middleware.env already exists"
|
||||||
|
@cd docker && docker compose -f docker-compose.middleware.yaml --env-file middleware.env -p dify-middlewares-dev up -d
|
||||||
|
@echo "✅ Docker middleware started"
|
||||||
|
|
||||||
|
# Step 2: Prepare web environment
|
||||||
|
prepare-web:
|
||||||
|
@echo "🌐 Setting up web environment..."
|
||||||
|
@cp -n web/.env.example web/.env 2>/dev/null || echo "Web .env already exists"
|
||||||
|
@cd web && pnpm install
|
||||||
|
@echo "✅ Web environment prepared (not started)"
|
||||||
|
|
||||||
|
# Step 3: Prepare API environment
|
||||||
|
prepare-api:
|
||||||
|
@echo "🔧 Setting up API environment..."
|
||||||
|
@cp -n api/.env.example api/.env 2>/dev/null || echo "API .env already exists"
|
||||||
|
@cd api && uv sync --dev
|
||||||
|
@cd api && uv run flask db upgrade
|
||||||
|
@echo "✅ API environment prepared (not started)"
|
||||||
|
|
||||||
|
# Clean dev environment
|
||||||
|
dev-clean:
|
||||||
|
@echo "⚠️ Stopping Docker containers..."
|
||||||
|
@cd docker && docker compose -f docker-compose.middleware.yaml --env-file middleware.env -p dify-middlewares-dev down
|
||||||
|
@echo "🗑️ Removing volumes..."
|
||||||
|
@rm -rf docker/volumes/db
|
||||||
|
@rm -rf docker/volumes/redis
|
||||||
|
@rm -rf docker/volumes/plugin_daemon
|
||||||
|
@rm -rf docker/volumes/weaviate
|
||||||
|
@rm -rf api/storage
|
||||||
|
@echo "✅ Cleanup complete"
|
||||||
|
|
||||||
|
# Backend Code Quality Commands
|
||||||
|
format:
|
||||||
|
@echo "🎨 Running ruff format..."
|
||||||
|
@uv run --project api --dev ruff format ./api
|
||||||
|
@echo "✅ Code formatting complete"
|
||||||
|
|
||||||
|
check:
|
||||||
|
@echo "🔍 Running ruff check..."
|
||||||
|
@uv run --project api --dev ruff check ./api
|
||||||
|
@echo "✅ Code check complete"
|
||||||
|
|
||||||
|
lint:
|
||||||
|
@echo "🔧 Running ruff format, check with fixes, and import linter..."
|
||||||
|
@uv run --project api --dev sh -c 'ruff format ./api && ruff check --fix ./api'
|
||||||
|
@uv run --directory api --dev lint-imports
|
||||||
|
@echo "✅ Linting complete"
|
||||||
|
|
||||||
|
type-check:
|
||||||
|
@echo "📝 Running type check with basedpyright..."
|
||||||
|
@uv run --directory api --dev basedpyright
|
||||||
|
@echo "✅ Type check complete"
|
||||||
|
|
||||||
|
test:
|
||||||
|
@echo "🧪 Running backend unit tests..."
|
||||||
|
@uv run --project api --dev dev/pytest/pytest_unit_tests.sh
|
||||||
|
@echo "✅ Tests complete"
|
||||||
|
|
||||||
|
# Build Docker images
|
||||||
|
build-web:
|
||||||
|
@echo "Building web Docker image: $(WEB_IMAGE):$(VERSION)..."
|
||||||
|
docker build -t $(WEB_IMAGE):$(VERSION) ./web
|
||||||
|
@echo "Web Docker image built successfully: $(WEB_IMAGE):$(VERSION)"
|
||||||
|
|
||||||
|
build-api:
|
||||||
|
@echo "Building API Docker image: $(API_IMAGE):$(VERSION)..."
|
||||||
|
docker build -t $(API_IMAGE):$(VERSION) ./api
|
||||||
|
@echo "API Docker image built successfully: $(API_IMAGE):$(VERSION)"
|
||||||
|
|
||||||
|
# Push Docker images
|
||||||
|
push-web:
|
||||||
|
@echo "Pushing web Docker image: $(WEB_IMAGE):$(VERSION)..."
|
||||||
|
docker push $(WEB_IMAGE):$(VERSION)
|
||||||
|
@echo "Web Docker image pushed successfully: $(WEB_IMAGE):$(VERSION)"
|
||||||
|
|
||||||
|
push-api:
|
||||||
|
@echo "Pushing API Docker image: $(API_IMAGE):$(VERSION)..."
|
||||||
|
docker push $(API_IMAGE):$(VERSION)
|
||||||
|
@echo "API Docker image pushed successfully: $(API_IMAGE):$(VERSION)"
|
||||||
|
|
||||||
|
# Build all images
|
||||||
|
build-all: build-web build-api
|
||||||
|
|
||||||
|
# Push all images
|
||||||
|
push-all: push-web push-api
|
||||||
|
|
||||||
|
build-push-api: build-api push-api
|
||||||
|
build-push-web: build-web push-web
|
||||||
|
|
||||||
|
# Build and push all images
|
||||||
|
build-push-all: build-all push-all
|
||||||
|
@echo "All Docker images have been built and pushed."
|
||||||
|
|
||||||
|
# Help target
|
||||||
|
help:
|
||||||
|
@echo "Development Setup Targets:"
|
||||||
|
@echo " make dev-setup - Run all setup steps for backend dev environment"
|
||||||
|
@echo " make prepare-docker - Set up Docker middleware"
|
||||||
|
@echo " make prepare-web - Set up web environment"
|
||||||
|
@echo " make prepare-api - Set up API environment"
|
||||||
|
@echo " make dev-clean - Stop Docker middleware containers"
|
||||||
|
@echo ""
|
||||||
|
@echo "Backend Code Quality:"
|
||||||
|
@echo " make format - Format code with ruff"
|
||||||
|
@echo " make check - Check code with ruff"
|
||||||
|
@echo " make lint - Format and fix code with ruff"
|
||||||
|
@echo " make type-check - Run type checking with basedpyright"
|
||||||
|
@echo " make test - Run backend unit tests"
|
||||||
|
@echo ""
|
||||||
|
@echo "Docker Build Targets:"
|
||||||
|
@echo " make build-web - Build web Docker image"
|
||||||
|
@echo " make build-api - Build API Docker image"
|
||||||
|
@echo " make build-all - Build all Docker images"
|
||||||
|
@echo " make push-all - Push all Docker images"
|
||||||
|
@echo " make build-push-all - Build and push all Docker images"
|
||||||
|
|
||||||
|
# Phony targets
|
||||||
|
.PHONY: build-web build-api push-web push-api build-all push-all build-push-all dev-setup prepare-docker prepare-web prepare-api dev-clean help format check lint type-check test
|
||||||
216
dify/README.md
Normal file
216
dify/README.md
Normal file
@@ -0,0 +1,216 @@
|
|||||||
|

|
||||||
|
|
||||||
|
<p align="center">
|
||||||
|
📌 <a href="https://dify.ai/blog/introducing-dify-workflow-file-upload-a-demo-on-ai-podcast">Introducing Dify Workflow File Upload: Recreate Google NotebookLM Podcast</a>
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<p align="center">
|
||||||
|
<a href="https://cloud.dify.ai">Dify Cloud</a> ·
|
||||||
|
<a href="https://docs.dify.ai/getting-started/install-self-hosted">Self-hosting</a> ·
|
||||||
|
<a href="https://docs.dify.ai">Documentation</a> ·
|
||||||
|
<a href="https://dify.ai/pricing">Dify edition overview</a>
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<p align="center">
|
||||||
|
<a href="https://dify.ai" target="_blank">
|
||||||
|
<img alt="Static Badge" src="https://img.shields.io/badge/Product-F04438"></a>
|
||||||
|
<a href="https://dify.ai/pricing" target="_blank">
|
||||||
|
<img alt="Static Badge" src="https://img.shields.io/badge/free-pricing?logo=free&color=%20%23155EEF&label=pricing&labelColor=%20%23528bff"></a>
|
||||||
|
<a href="https://discord.gg/FngNHpbcY7" target="_blank">
|
||||||
|
<img src="https://img.shields.io/discord/1082486657678311454?logo=discord&labelColor=%20%235462eb&logoColor=%20%23f5f5f5&color=%20%235462eb"
|
||||||
|
alt="chat on Discord"></a>
|
||||||
|
<a href="https://reddit.com/r/difyai" target="_blank">
|
||||||
|
<img src="https://img.shields.io/reddit/subreddit-subscribers/difyai?style=plastic&logo=reddit&label=r%2Fdifyai&labelColor=white"
|
||||||
|
alt="join Reddit"></a>
|
||||||
|
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||||
|
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||||
|
alt="follow on X(Twitter)"></a>
|
||||||
|
<a href="https://www.linkedin.com/company/langgenius/" target="_blank">
|
||||||
|
<img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
|
||||||
|
alt="follow on LinkedIn"></a>
|
||||||
|
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||||
|
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||||
|
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||||
|
<img alt="Commits last month" src="https://img.shields.io/github/commit-activity/m/langgenius/dify?labelColor=%20%2332b583&color=%20%2312b76a"></a>
|
||||||
|
<a href="https://github.com/langgenius/dify/" target="_blank">
|
||||||
|
<img alt="Issues closed" src="https://img.shields.io/github/issues-search?query=repo%3Alanggenius%2Fdify%20is%3Aclosed&label=issues%20closed&labelColor=%20%237d89b0&color=%20%235d6b98"></a>
|
||||||
|
<a href="https://github.com/langgenius/dify/discussions/" target="_blank">
|
||||||
|
<img alt="Discussion posts" src="https://img.shields.io/github/discussions/langgenius/dify?labelColor=%20%239b8afb&color=%20%237a5af8"></a>
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<p align="center">
|
||||||
|
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
|
||||||
|
<a href="./docs/zh-TW/README.md"><img alt="繁體中文文件" src="https://img.shields.io/badge/繁體中文-d9d9d9"></a>
|
||||||
|
<a href="./docs/zh-CN/README.md"><img alt="简体中文文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
|
||||||
|
<a href="./docs/ja-JP/README.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
|
||||||
|
<a href="./docs/es-ES/README.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
|
||||||
|
<a href="./docs/fr-FR/README.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
|
||||||
|
<a href="./docs/tlh/README.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
|
||||||
|
<a href="./docs/ko-KR/README.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
|
||||||
|
<a href="./docs/ar-SA/README.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
|
||||||
|
<a href="./docs/tr-TR/README.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
|
||||||
|
<a href="./docs/vi-VN/README.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
|
||||||
|
<a href="./docs/de-DE/README.md"><img alt="README in Deutsch" src="https://img.shields.io/badge/German-d9d9d9"></a>
|
||||||
|
<a href="./docs/bn-BD/README.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
|
||||||
|
</p>
|
||||||
|
|
||||||
|
Dify is an open-source platform for developing LLM applications. Its intuitive interface combines agentic AI workflows, RAG pipelines, agent capabilities, model management, observability features, and more—allowing you to quickly move from prototype to production.
|
||||||
|
|
||||||
|
## Quick start
|
||||||
|
|
||||||
|
> Before installing Dify, make sure your machine meets the following minimum system requirements:
|
||||||
|
>
|
||||||
|
> - CPU >= 2 Core
|
||||||
|
> - RAM >= 4 GiB
|
||||||
|
|
||||||
|
<br/>
|
||||||
|
|
||||||
|
The easiest way to start the Dify server is through [Docker Compose](docker/docker-compose.yaml). Before running Dify with the following commands, make sure that [Docker](https://docs.docker.com/get-docker/) and [Docker Compose](https://docs.docker.com/compose/install/) are installed on your machine:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd dify
|
||||||
|
cd docker
|
||||||
|
cp .env.example .env
|
||||||
|
docker compose up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
After running, you can access the Dify dashboard in your browser at [http://localhost/install](http://localhost/install) and start the initialization process.
|
||||||
|
|
||||||
|
#### Seeking help
|
||||||
|
|
||||||
|
Please refer to our [FAQ](https://docs.dify.ai/getting-started/install-self-hosted/faqs) if you encounter problems setting up Dify. Reach out to [the community and us](#community--contact) if you are still having issues.
|
||||||
|
|
||||||
|
> If you'd like to contribute to Dify or do additional development, refer to our [guide to deploying from source code](https://docs.dify.ai/getting-started/install-self-hosted/local-source-code)
|
||||||
|
|
||||||
|
## Key features
|
||||||
|
|
||||||
|
**1. Workflow**:
|
||||||
|
Build and test powerful AI workflows on a visual canvas, leveraging all the following features and beyond.
|
||||||
|
|
||||||
|
**2. Comprehensive model support**:
|
||||||
|
Seamless integration with hundreds of proprietary / open-source LLMs from dozens of inference providers and self-hosted solutions, covering GPT, Mistral, Llama3, and any OpenAI API-compatible models. A full list of supported model providers can be found [here](https://docs.dify.ai/getting-started/readme/model-providers).
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
**3. Prompt IDE**:
|
||||||
|
Intuitive interface for crafting prompts, comparing model performance, and adding additional features such as text-to-speech to a chat-based app.
|
||||||
|
|
||||||
|
**4. RAG Pipeline**:
|
||||||
|
Extensive RAG capabilities that cover everything from document ingestion to retrieval, with out-of-box support for text extraction from PDFs, PPTs, and other common document formats.
|
||||||
|
|
||||||
|
**5. Agent capabilities**:
|
||||||
|
You can define agents based on LLM Function Calling or ReAct, and add pre-built or custom tools for the agent. Dify provides 50+ built-in tools for AI agents, such as Google Search, DALL·E, Stable Diffusion and WolframAlpha.
|
||||||
|
|
||||||
|
**6. LLMOps**:
|
||||||
|
Monitor and analyze application logs and performance over time. You could continuously improve prompts, datasets, and models based on production data and annotations.
|
||||||
|
|
||||||
|
**7. Backend-as-a-Service**:
|
||||||
|
All of Dify's offerings come with corresponding APIs, so you could effortlessly integrate Dify into your own business logic.
|
||||||
|
|
||||||
|
## Using Dify
|
||||||
|
|
||||||
|
- **Cloud <br/>**
|
||||||
|
We host a [Dify Cloud](https://dify.ai) service for anyone to try with zero setup. It provides all the capabilities of the self-deployed version, and includes 200 free GPT-4 calls in the sandbox plan.
|
||||||
|
|
||||||
|
- **Self-hosting Dify Community Edition<br/>**
|
||||||
|
Quickly get Dify running in your environment with this [starter guide](#quick-start).
|
||||||
|
Use our [documentation](https://docs.dify.ai) for further references and more in-depth instructions.
|
||||||
|
|
||||||
|
- **Dify for enterprise / organizations<br/>**
|
||||||
|
We provide additional enterprise-centric features. [Send us an email](mailto:business@dify.ai?subject=%5BGitHub%5DBusiness%20License%20Inquiry) to discuss your enterprise needs. <br/>
|
||||||
|
|
||||||
|
> For startups and small businesses using AWS, check out [Dify Premium on AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) and deploy it to your own AWS VPC with one click. It's an affordable AMI offering with the option to create apps with custom logo and branding.
|
||||||
|
|
||||||
|
## Staying ahead
|
||||||
|
|
||||||
|
Star Dify on GitHub and be instantly notified of new releases.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
## Advanced Setup
|
||||||
|
|
||||||
|
### Custom configurations
|
||||||
|
|
||||||
|
If you need to customize the configuration, please refer to the comments in our [.env.example](docker/.env.example) file and update the corresponding values in your `.env` file. Additionally, you might need to make adjustments to the `docker-compose.yaml` file itself, such as changing image versions, port mappings, or volume mounts, based on your specific deployment environment and requirements. After making any changes, please re-run `docker-compose up -d`. You can find the full list of available environment variables [here](https://docs.dify.ai/getting-started/install-self-hosted/environments).
|
||||||
|
|
||||||
|
### Metrics Monitoring with Grafana
|
||||||
|
|
||||||
|
Import the dashboard to Grafana, using Dify's PostgreSQL database as data source, to monitor metrics in granularity of apps, tenants, messages, and more.
|
||||||
|
|
||||||
|
- [Grafana Dashboard by @bowenliang123](https://github.com/bowenliang123/dify-grafana-dashboard)
|
||||||
|
|
||||||
|
### Deployment with Kubernetes
|
||||||
|
|
||||||
|
If you'd like to configure a highly-available setup, there are community-contributed [Helm Charts](https://helm.sh/) and YAML files which allow Dify to be deployed on Kubernetes.
|
||||||
|
|
||||||
|
- [Helm Chart by @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify)
|
||||||
|
- [Helm Chart by @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm)
|
||||||
|
- [Helm Chart by @magicsong](https://github.com/magicsong/ai-charts)
|
||||||
|
- [YAML file by @Winson-030](https://github.com/Winson-030/dify-kubernetes)
|
||||||
|
- [YAML file by @wyy-holding](https://github.com/wyy-holding/dify-k8s)
|
||||||
|
- [🚀 NEW! YAML files (Supports Dify v1.6.0) by @Zhoneym](https://github.com/Zhoneym/DifyAI-Kubernetes)
|
||||||
|
|
||||||
|
#### Using Terraform for Deployment
|
||||||
|
|
||||||
|
Deploy Dify to Cloud Platform with a single click using [terraform](https://www.terraform.io/)
|
||||||
|
|
||||||
|
##### Azure Global
|
||||||
|
|
||||||
|
- [Azure Terraform by @nikawang](https://github.com/nikawang/dify-azure-terraform)
|
||||||
|
|
||||||
|
##### Google Cloud
|
||||||
|
|
||||||
|
- [Google Cloud Terraform by @sotazum](https://github.com/DeNA/dify-google-cloud-terraform)
|
||||||
|
|
||||||
|
#### Using AWS CDK for Deployment
|
||||||
|
|
||||||
|
Deploy Dify to AWS with [CDK](https://aws.amazon.com/cdk/)
|
||||||
|
|
||||||
|
##### AWS
|
||||||
|
|
||||||
|
- [AWS CDK by @KevinZhao (EKS based)](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
|
||||||
|
- [AWS CDK by @tmokmss (ECS based)](https://github.com/aws-samples/dify-self-hosted-on-aws)
|
||||||
|
|
||||||
|
#### Using Alibaba Cloud Computing Nest
|
||||||
|
|
||||||
|
Quickly deploy Dify to Alibaba cloud with [Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88)
|
||||||
|
|
||||||
|
#### Using Alibaba Cloud Data Management
|
||||||
|
|
||||||
|
One-Click deploy Dify to Alibaba Cloud with [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/)
|
||||||
|
|
||||||
|
#### Deploy to AKS with Azure Devops Pipeline
|
||||||
|
|
||||||
|
One-Click deploy Dify to AKS with [Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS)
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
For those who'd like to contribute code, see our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
|
||||||
|
At the same time, please consider supporting Dify by sharing it on social media and at events and conferences.
|
||||||
|
|
||||||
|
> We are looking for contributors to help translate Dify into languages other than Mandarin or English. If you are interested in helping, please see the [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) for more information, and leave us a comment in the `global-users` channel of our [Discord Community Server](https://discord.gg/8Tpq4AcN9c).
|
||||||
|
|
||||||
|
## Community & contact
|
||||||
|
|
||||||
|
- [GitHub Discussion](https://github.com/langgenius/dify/discussions). Best for: sharing feedback and asking questions.
|
||||||
|
- [GitHub Issues](https://github.com/langgenius/dify/issues). Best for: bugs you encounter using Dify.AI, and feature proposals. See our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
|
||||||
|
- [Discord](https://discord.gg/FngNHpbcY7). Best for: sharing your applications and hanging out with the community.
|
||||||
|
- [X(Twitter)](https://twitter.com/dify_ai). Best for: sharing your applications and hanging out with the community.
|
||||||
|
|
||||||
|
**Contributors**
|
||||||
|
|
||||||
|
<a href="https://github.com/langgenius/dify/graphs/contributors">
|
||||||
|
<img src="https://contrib.rocks/image?repo=langgenius/dify" />
|
||||||
|
</a>
|
||||||
|
|
||||||
|
## Star history
|
||||||
|
|
||||||
|
[](https://star-history.com/#langgenius/dify&Date)
|
||||||
|
|
||||||
|
## Security disclosure
|
||||||
|
|
||||||
|
To protect your privacy, please avoid posting security issues on GitHub. Instead, report issues to security@dify.ai, and our team will respond with detailed answer.
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
This repository is licensed under the [Dify Open Source License](LICENSE), based on Apache 2.0 with additional conditions.
|
||||||
19
dify/api/.dockerignore
Normal file
19
dify/api/.dockerignore
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
.env
|
||||||
|
*.env.*
|
||||||
|
|
||||||
|
storage/generate_files/*
|
||||||
|
storage/privkeys/*
|
||||||
|
storage/tools/*
|
||||||
|
storage/upload_files/*
|
||||||
|
|
||||||
|
# Logs
|
||||||
|
logs
|
||||||
|
*.log*
|
||||||
|
|
||||||
|
# jetbrains
|
||||||
|
.idea
|
||||||
|
.mypy_cache
|
||||||
|
.ruff_cache
|
||||||
|
|
||||||
|
# venv
|
||||||
|
.venv
|
||||||
639
dify/api/.env.example
Normal file
639
dify/api/.env.example
Normal file
@@ -0,0 +1,639 @@
|
|||||||
|
# Your App secret key will be used for securely signing the session cookie
|
||||||
|
# Make sure you are changing this key for your deployment with a strong key.
|
||||||
|
# You can generate a strong key using `openssl rand -base64 42`.
|
||||||
|
# Alternatively you can set it with `SECRET_KEY` environment variable.
|
||||||
|
SECRET_KEY=
|
||||||
|
|
||||||
|
# Ensure UTF-8 encoding
|
||||||
|
LANG=en_US.UTF-8
|
||||||
|
LC_ALL=en_US.UTF-8
|
||||||
|
PYTHONIOENCODING=utf-8
|
||||||
|
|
||||||
|
# Console API base URL
|
||||||
|
CONSOLE_API_URL=http://localhost:5001
|
||||||
|
CONSOLE_WEB_URL=http://localhost:3000
|
||||||
|
|
||||||
|
# Service API base URL
|
||||||
|
SERVICE_API_URL=http://localhost:5001
|
||||||
|
|
||||||
|
# Web APP base URL
|
||||||
|
APP_WEB_URL=http://localhost:3000
|
||||||
|
|
||||||
|
# Files URL
|
||||||
|
FILES_URL=http://localhost:5001
|
||||||
|
|
||||||
|
# INTERNAL_FILES_URL is used for plugin daemon communication within Docker network.
|
||||||
|
# Set this to the internal Docker service URL for proper plugin file access.
|
||||||
|
# Example: INTERNAL_FILES_URL=http://api:5001
|
||||||
|
INTERNAL_FILES_URL=http://127.0.0.1:5001
|
||||||
|
|
||||||
|
# TRIGGER URL
|
||||||
|
TRIGGER_URL=http://localhost:5001
|
||||||
|
|
||||||
|
# The time in seconds after the signature is rejected
|
||||||
|
FILES_ACCESS_TIMEOUT=300
|
||||||
|
|
||||||
|
# Access token expiration time in minutes
|
||||||
|
ACCESS_TOKEN_EXPIRE_MINUTES=60
|
||||||
|
|
||||||
|
# Refresh token expiration time in days
|
||||||
|
REFRESH_TOKEN_EXPIRE_DAYS=30
|
||||||
|
|
||||||
|
# redis configuration
|
||||||
|
REDIS_HOST=localhost
|
||||||
|
REDIS_PORT=6379
|
||||||
|
REDIS_USERNAME=
|
||||||
|
REDIS_PASSWORD=difyai123456
|
||||||
|
REDIS_USE_SSL=false
|
||||||
|
# SSL configuration for Redis (when REDIS_USE_SSL=true)
|
||||||
|
REDIS_SSL_CERT_REQS=CERT_NONE
|
||||||
|
# Options: CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED
|
||||||
|
REDIS_SSL_CA_CERTS=
|
||||||
|
# Path to CA certificate file for SSL verification
|
||||||
|
REDIS_SSL_CERTFILE=
|
||||||
|
# Path to client certificate file for SSL authentication
|
||||||
|
REDIS_SSL_KEYFILE=
|
||||||
|
# Path to client private key file for SSL authentication
|
||||||
|
REDIS_DB=0
|
||||||
|
|
||||||
|
# redis Sentinel configuration.
|
||||||
|
REDIS_USE_SENTINEL=false
|
||||||
|
REDIS_SENTINELS=
|
||||||
|
REDIS_SENTINEL_SERVICE_NAME=
|
||||||
|
REDIS_SENTINEL_USERNAME=
|
||||||
|
REDIS_SENTINEL_PASSWORD=
|
||||||
|
REDIS_SENTINEL_SOCKET_TIMEOUT=0.1
|
||||||
|
|
||||||
|
# redis Cluster configuration.
|
||||||
|
REDIS_USE_CLUSTERS=false
|
||||||
|
REDIS_CLUSTERS=
|
||||||
|
REDIS_CLUSTERS_PASSWORD=
|
||||||
|
|
||||||
|
# celery configuration
|
||||||
|
CELERY_BROKER_URL=redis://:difyai123456@localhost:${REDIS_PORT}/1
|
||||||
|
CELERY_BACKEND=redis
|
||||||
|
|
||||||
|
# Database configuration
|
||||||
|
DB_TYPE=postgresql
|
||||||
|
DB_USERNAME=postgres
|
||||||
|
DB_PASSWORD=difyai123456
|
||||||
|
DB_HOST=localhost
|
||||||
|
DB_PORT=5432
|
||||||
|
DB_DATABASE=dify
|
||||||
|
|
||||||
|
SQLALCHEMY_POOL_PRE_PING=true
|
||||||
|
SQLALCHEMY_POOL_TIMEOUT=30
|
||||||
|
|
||||||
|
# Storage configuration
|
||||||
|
# use for store upload files, private keys...
|
||||||
|
# storage type: opendal, s3, aliyun-oss, azure-blob, baidu-obs, google-storage, huawei-obs, oci-storage, tencent-cos, volcengine-tos, supabase
|
||||||
|
STORAGE_TYPE=opendal
|
||||||
|
|
||||||
|
# Apache OpenDAL storage configuration, refer to https://github.com/apache/opendal
|
||||||
|
OPENDAL_SCHEME=fs
|
||||||
|
OPENDAL_FS_ROOT=storage
|
||||||
|
|
||||||
|
# S3 Storage configuration
|
||||||
|
S3_USE_AWS_MANAGED_IAM=false
|
||||||
|
S3_ENDPOINT=https://your-bucket-name.storage.s3.cloudflare.com
|
||||||
|
S3_BUCKET_NAME=your-bucket-name
|
||||||
|
S3_ACCESS_KEY=your-access-key
|
||||||
|
S3_SECRET_KEY=your-secret-key
|
||||||
|
S3_REGION=your-region
|
||||||
|
|
||||||
|
# Azure Blob Storage configuration
|
||||||
|
AZURE_BLOB_ACCOUNT_NAME=your-account-name
|
||||||
|
AZURE_BLOB_ACCOUNT_KEY=your-account-key
|
||||||
|
AZURE_BLOB_CONTAINER_NAME=your-container-name
|
||||||
|
AZURE_BLOB_ACCOUNT_URL=https://<your_account_name>.blob.core.windows.net
|
||||||
|
|
||||||
|
# Aliyun oss Storage configuration
|
||||||
|
ALIYUN_OSS_BUCKET_NAME=your-bucket-name
|
||||||
|
ALIYUN_OSS_ACCESS_KEY=your-access-key
|
||||||
|
ALIYUN_OSS_SECRET_KEY=your-secret-key
|
||||||
|
ALIYUN_OSS_ENDPOINT=your-endpoint
|
||||||
|
ALIYUN_OSS_AUTH_VERSION=v1
|
||||||
|
ALIYUN_OSS_REGION=your-region
|
||||||
|
# Don't start with '/'. OSS doesn't support leading slash in object names.
|
||||||
|
ALIYUN_OSS_PATH=your-path
|
||||||
|
|
||||||
|
# Google Storage configuration
|
||||||
|
GOOGLE_STORAGE_BUCKET_NAME=your-bucket-name
|
||||||
|
GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64=your-google-service-account-json-base64-string
|
||||||
|
|
||||||
|
# Tencent COS Storage configuration
|
||||||
|
TENCENT_COS_BUCKET_NAME=your-bucket-name
|
||||||
|
TENCENT_COS_SECRET_KEY=your-secret-key
|
||||||
|
TENCENT_COS_SECRET_ID=your-secret-id
|
||||||
|
TENCENT_COS_REGION=your-region
|
||||||
|
TENCENT_COS_SCHEME=your-scheme
|
||||||
|
|
||||||
|
# Huawei OBS Storage Configuration
|
||||||
|
HUAWEI_OBS_BUCKET_NAME=your-bucket-name
|
||||||
|
HUAWEI_OBS_SECRET_KEY=your-secret-key
|
||||||
|
HUAWEI_OBS_ACCESS_KEY=your-access-key
|
||||||
|
HUAWEI_OBS_SERVER=your-server-url
|
||||||
|
|
||||||
|
# Baidu OBS Storage Configuration
|
||||||
|
BAIDU_OBS_BUCKET_NAME=your-bucket-name
|
||||||
|
BAIDU_OBS_SECRET_KEY=your-secret-key
|
||||||
|
BAIDU_OBS_ACCESS_KEY=your-access-key
|
||||||
|
BAIDU_OBS_ENDPOINT=your-server-url
|
||||||
|
|
||||||
|
# OCI Storage configuration
|
||||||
|
OCI_ENDPOINT=your-endpoint
|
||||||
|
OCI_BUCKET_NAME=your-bucket-name
|
||||||
|
OCI_ACCESS_KEY=your-access-key
|
||||||
|
OCI_SECRET_KEY=your-secret-key
|
||||||
|
OCI_REGION=your-region
|
||||||
|
|
||||||
|
# Volcengine tos Storage configuration
|
||||||
|
VOLCENGINE_TOS_ENDPOINT=your-endpoint
|
||||||
|
VOLCENGINE_TOS_BUCKET_NAME=your-bucket-name
|
||||||
|
VOLCENGINE_TOS_ACCESS_KEY=your-access-key
|
||||||
|
VOLCENGINE_TOS_SECRET_KEY=your-secret-key
|
||||||
|
VOLCENGINE_TOS_REGION=your-region
|
||||||
|
|
||||||
|
# Supabase Storage Configuration
|
||||||
|
SUPABASE_BUCKET_NAME=your-bucket-name
|
||||||
|
SUPABASE_API_KEY=your-access-key
|
||||||
|
SUPABASE_URL=your-server-url
|
||||||
|
|
||||||
|
# CORS configuration
|
||||||
|
WEB_API_CORS_ALLOW_ORIGINS=http://localhost:3000,*
|
||||||
|
CONSOLE_CORS_ALLOW_ORIGINS=http://localhost:3000,*
|
||||||
|
# When the frontend and backend run on different subdomains, set COOKIE_DOMAIN to the site’s top-level domain (e.g., `example.com`). Leading dots are optional.
|
||||||
|
COOKIE_DOMAIN=
|
||||||
|
|
||||||
|
# Vector database configuration
|
||||||
|
# Supported values are `weaviate`, `oceanbase`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `oracle`, `tencent`, `elasticsearch`, `elasticsearch-ja`, `analyticdb`, `couchbase`, `vikingdb`, `opengauss`, `tablestore`,`vastbase`,`tidb`,`tidb_on_qdrant`,`baidu`,`lindorm`,`huawei_cloud`,`upstash`, `matrixone`.
|
||||||
|
VECTOR_STORE=weaviate
|
||||||
|
# Prefix used to create collection name in vector database
|
||||||
|
VECTOR_INDEX_NAME_PREFIX=Vector_index
|
||||||
|
|
||||||
|
# Weaviate configuration
|
||||||
|
WEAVIATE_ENDPOINT=http://localhost:8080
|
||||||
|
WEAVIATE_API_KEY=WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih
|
||||||
|
WEAVIATE_GRPC_ENABLED=false
|
||||||
|
WEAVIATE_BATCH_SIZE=100
|
||||||
|
WEAVIATE_TOKENIZATION=word
|
||||||
|
|
||||||
|
# OceanBase Vector configuration
|
||||||
|
OCEANBASE_VECTOR_HOST=127.0.0.1
|
||||||
|
OCEANBASE_VECTOR_PORT=2881
|
||||||
|
OCEANBASE_VECTOR_USER=root@test
|
||||||
|
OCEANBASE_VECTOR_PASSWORD=difyai123456
|
||||||
|
OCEANBASE_VECTOR_DATABASE=test
|
||||||
|
OCEANBASE_MEMORY_LIMIT=6G
|
||||||
|
OCEANBASE_ENABLE_HYBRID_SEARCH=false
|
||||||
|
OCEANBASE_FULLTEXT_PARSER=ik
|
||||||
|
SEEKDB_MEMORY_LIMIT=2G
|
||||||
|
|
||||||
|
# Qdrant configuration, use `http://localhost:6333` for local mode or `https://your-qdrant-cluster-url.qdrant.io` for remote mode
|
||||||
|
QDRANT_URL=http://localhost:6333
|
||||||
|
QDRANT_API_KEY=difyai123456
|
||||||
|
QDRANT_CLIENT_TIMEOUT=20
|
||||||
|
QDRANT_GRPC_ENABLED=false
|
||||||
|
QDRANT_GRPC_PORT=6334
|
||||||
|
QDRANT_REPLICATION_FACTOR=1
|
||||||
|
|
||||||
|
#Couchbase configuration
|
||||||
|
COUCHBASE_CONNECTION_STRING=127.0.0.1
|
||||||
|
COUCHBASE_USER=Administrator
|
||||||
|
COUCHBASE_PASSWORD=password
|
||||||
|
COUCHBASE_BUCKET_NAME=Embeddings
|
||||||
|
COUCHBASE_SCOPE_NAME=_default
|
||||||
|
|
||||||
|
# Milvus configuration
|
||||||
|
MILVUS_URI=http://127.0.0.1:19530
|
||||||
|
MILVUS_TOKEN=
|
||||||
|
MILVUS_USER=root
|
||||||
|
MILVUS_PASSWORD=Milvus
|
||||||
|
MILVUS_ANALYZER_PARAMS=
|
||||||
|
|
||||||
|
# MyScale configuration
|
||||||
|
MYSCALE_HOST=127.0.0.1
|
||||||
|
MYSCALE_PORT=8123
|
||||||
|
MYSCALE_USER=default
|
||||||
|
MYSCALE_PASSWORD=
|
||||||
|
MYSCALE_DATABASE=default
|
||||||
|
MYSCALE_FTS_PARAMS=
|
||||||
|
|
||||||
|
# Relyt configuration
|
||||||
|
RELYT_HOST=127.0.0.1
|
||||||
|
RELYT_PORT=5432
|
||||||
|
RELYT_USER=postgres
|
||||||
|
RELYT_PASSWORD=postgres
|
||||||
|
RELYT_DATABASE=postgres
|
||||||
|
|
||||||
|
# Tencent configuration
|
||||||
|
TENCENT_VECTOR_DB_URL=http://127.0.0.1
|
||||||
|
TENCENT_VECTOR_DB_API_KEY=dify
|
||||||
|
TENCENT_VECTOR_DB_TIMEOUT=30
|
||||||
|
TENCENT_VECTOR_DB_USERNAME=dify
|
||||||
|
TENCENT_VECTOR_DB_DATABASE=dify
|
||||||
|
TENCENT_VECTOR_DB_SHARD=1
|
||||||
|
TENCENT_VECTOR_DB_REPLICAS=2
|
||||||
|
TENCENT_VECTOR_DB_ENABLE_HYBRID_SEARCH=false
|
||||||
|
|
||||||
|
# ElasticSearch configuration
|
||||||
|
ELASTICSEARCH_HOST=127.0.0.1
|
||||||
|
ELASTICSEARCH_PORT=9200
|
||||||
|
ELASTICSEARCH_USERNAME=elastic
|
||||||
|
ELASTICSEARCH_PASSWORD=elastic
|
||||||
|
|
||||||
|
# PGVECTO_RS configuration
|
||||||
|
PGVECTO_RS_HOST=localhost
|
||||||
|
PGVECTO_RS_PORT=5431
|
||||||
|
PGVECTO_RS_USER=postgres
|
||||||
|
PGVECTO_RS_PASSWORD=difyai123456
|
||||||
|
PGVECTO_RS_DATABASE=postgres
|
||||||
|
|
||||||
|
# PGVector configuration
|
||||||
|
PGVECTOR_HOST=127.0.0.1
|
||||||
|
PGVECTOR_PORT=5433
|
||||||
|
PGVECTOR_USER=postgres
|
||||||
|
PGVECTOR_PASSWORD=postgres
|
||||||
|
PGVECTOR_DATABASE=postgres
|
||||||
|
PGVECTOR_MIN_CONNECTION=1
|
||||||
|
PGVECTOR_MAX_CONNECTION=5
|
||||||
|
|
||||||
|
# TableStore Vector configuration
|
||||||
|
TABLESTORE_ENDPOINT=https://instance-name.cn-hangzhou.ots.aliyuncs.com
|
||||||
|
TABLESTORE_INSTANCE_NAME=instance-name
|
||||||
|
TABLESTORE_ACCESS_KEY_ID=xxx
|
||||||
|
TABLESTORE_ACCESS_KEY_SECRET=xxx
|
||||||
|
TABLESTORE_NORMALIZE_FULLTEXT_BM25_SCORE=false
|
||||||
|
|
||||||
|
# Tidb Vector configuration
|
||||||
|
TIDB_VECTOR_HOST=xxx.eu-central-1.xxx.aws.tidbcloud.com
|
||||||
|
TIDB_VECTOR_PORT=4000
|
||||||
|
TIDB_VECTOR_USER=xxx.root
|
||||||
|
TIDB_VECTOR_PASSWORD=xxxxxx
|
||||||
|
TIDB_VECTOR_DATABASE=dify
|
||||||
|
|
||||||
|
# Tidb on qdrant configuration
|
||||||
|
TIDB_ON_QDRANT_URL=http://127.0.0.1
|
||||||
|
TIDB_ON_QDRANT_API_KEY=dify
|
||||||
|
TIDB_ON_QDRANT_CLIENT_TIMEOUT=20
|
||||||
|
TIDB_ON_QDRANT_GRPC_ENABLED=false
|
||||||
|
TIDB_ON_QDRANT_GRPC_PORT=6334
|
||||||
|
TIDB_PUBLIC_KEY=dify
|
||||||
|
TIDB_PRIVATE_KEY=dify
|
||||||
|
TIDB_API_URL=http://127.0.0.1
|
||||||
|
TIDB_IAM_API_URL=http://127.0.0.1
|
||||||
|
TIDB_REGION=regions/aws-us-east-1
|
||||||
|
TIDB_PROJECT_ID=dify
|
||||||
|
TIDB_SPEND_LIMIT=100
|
||||||
|
|
||||||
|
# Chroma configuration
|
||||||
|
CHROMA_HOST=127.0.0.1
|
||||||
|
CHROMA_PORT=8000
|
||||||
|
CHROMA_TENANT=default_tenant
|
||||||
|
CHROMA_DATABASE=default_database
|
||||||
|
CHROMA_AUTH_PROVIDER=chromadb.auth.token_authn.TokenAuthenticationServerProvider
|
||||||
|
CHROMA_AUTH_CREDENTIALS=difyai123456
|
||||||
|
|
||||||
|
# AnalyticDB configuration
|
||||||
|
ANALYTICDB_KEY_ID=your-ak
|
||||||
|
ANALYTICDB_KEY_SECRET=your-sk
|
||||||
|
ANALYTICDB_REGION_ID=cn-hangzhou
|
||||||
|
ANALYTICDB_INSTANCE_ID=gp-ab123456
|
||||||
|
ANALYTICDB_ACCOUNT=testaccount
|
||||||
|
ANALYTICDB_PASSWORD=testpassword
|
||||||
|
ANALYTICDB_NAMESPACE=dify
|
||||||
|
ANALYTICDB_NAMESPACE_PASSWORD=difypassword
|
||||||
|
ANALYTICDB_HOST=gp-test.aliyuncs.com
|
||||||
|
ANALYTICDB_PORT=5432
|
||||||
|
ANALYTICDB_MIN_CONNECTION=1
|
||||||
|
ANALYTICDB_MAX_CONNECTION=5
|
||||||
|
|
||||||
|
# OpenSearch configuration
|
||||||
|
OPENSEARCH_HOST=127.0.0.1
|
||||||
|
OPENSEARCH_PORT=9200
|
||||||
|
OPENSEARCH_USER=admin
|
||||||
|
OPENSEARCH_PASSWORD=admin
|
||||||
|
OPENSEARCH_SECURE=true
|
||||||
|
OPENSEARCH_VERIFY_CERTS=true
|
||||||
|
|
||||||
|
# Baidu configuration
|
||||||
|
BAIDU_VECTOR_DB_ENDPOINT=http://127.0.0.1:5287
|
||||||
|
BAIDU_VECTOR_DB_CONNECTION_TIMEOUT_MS=30000
|
||||||
|
BAIDU_VECTOR_DB_ACCOUNT=root
|
||||||
|
BAIDU_VECTOR_DB_API_KEY=dify
|
||||||
|
BAIDU_VECTOR_DB_DATABASE=dify
|
||||||
|
BAIDU_VECTOR_DB_SHARD=1
|
||||||
|
BAIDU_VECTOR_DB_REPLICAS=3
|
||||||
|
BAIDU_VECTOR_DB_INVERTED_INDEX_ANALYZER=DEFAULT_ANALYZER
|
||||||
|
BAIDU_VECTOR_DB_INVERTED_INDEX_PARSER_MODE=COARSE_MODE
|
||||||
|
|
||||||
|
# Upstash configuration
|
||||||
|
UPSTASH_VECTOR_URL=your-server-url
|
||||||
|
UPSTASH_VECTOR_TOKEN=your-access-token
|
||||||
|
|
||||||
|
# ViKingDB configuration
|
||||||
|
VIKINGDB_ACCESS_KEY=your-ak
|
||||||
|
VIKINGDB_SECRET_KEY=your-sk
|
||||||
|
VIKINGDB_REGION=cn-shanghai
|
||||||
|
VIKINGDB_HOST=api-vikingdb.xxx.volces.com
|
||||||
|
VIKINGDB_SCHEMA=http
|
||||||
|
VIKINGDB_CONNECTION_TIMEOUT=30
|
||||||
|
VIKINGDB_SOCKET_TIMEOUT=30
|
||||||
|
|
||||||
|
# Matrixone configration
|
||||||
|
MATRIXONE_HOST=127.0.0.1
|
||||||
|
MATRIXONE_PORT=6001
|
||||||
|
MATRIXONE_USER=dump
|
||||||
|
MATRIXONE_PASSWORD=111
|
||||||
|
MATRIXONE_DATABASE=dify
|
||||||
|
|
||||||
|
# Lindorm configuration
|
||||||
|
LINDORM_URL=http://ld-*******************-proxy-search-pub.lindorm.aliyuncs.com:30070
|
||||||
|
LINDORM_USERNAME=admin
|
||||||
|
LINDORM_PASSWORD=admin
|
||||||
|
LINDORM_USING_UGC=True
|
||||||
|
LINDORM_QUERY_TIMEOUT=1
|
||||||
|
|
||||||
|
# AlibabaCloud MySQL Vector configuration
|
||||||
|
ALIBABACLOUD_MYSQL_HOST=127.0.0.1
|
||||||
|
ALIBABACLOUD_MYSQL_PORT=3306
|
||||||
|
ALIBABACLOUD_MYSQL_USER=root
|
||||||
|
ALIBABACLOUD_MYSQL_PASSWORD=root
|
||||||
|
ALIBABACLOUD_MYSQL_DATABASE=dify
|
||||||
|
ALIBABACLOUD_MYSQL_MAX_CONNECTION=5
|
||||||
|
ALIBABACLOUD_MYSQL_HNSW_M=6
|
||||||
|
|
||||||
|
# openGauss configuration
|
||||||
|
OPENGAUSS_HOST=127.0.0.1
|
||||||
|
OPENGAUSS_PORT=6600
|
||||||
|
OPENGAUSS_USER=postgres
|
||||||
|
OPENGAUSS_PASSWORD=Dify@123
|
||||||
|
OPENGAUSS_DATABASE=dify
|
||||||
|
OPENGAUSS_MIN_CONNECTION=1
|
||||||
|
OPENGAUSS_MAX_CONNECTION=5
|
||||||
|
|
||||||
|
# Upload configuration
|
||||||
|
UPLOAD_FILE_SIZE_LIMIT=15
|
||||||
|
UPLOAD_FILE_BATCH_LIMIT=5
|
||||||
|
UPLOAD_IMAGE_FILE_SIZE_LIMIT=10
|
||||||
|
UPLOAD_VIDEO_FILE_SIZE_LIMIT=100
|
||||||
|
UPLOAD_AUDIO_FILE_SIZE_LIMIT=50
|
||||||
|
|
||||||
|
# Comma-separated list of file extensions blocked from upload for security reasons.
|
||||||
|
# Extensions should be lowercase without dots (e.g., exe,bat,sh,dll).
|
||||||
|
# Empty by default to allow all file types.
|
||||||
|
# Recommended: exe,bat,cmd,com,scr,vbs,ps1,msi,dll
|
||||||
|
UPLOAD_FILE_EXTENSION_BLACKLIST=
|
||||||
|
|
||||||
|
# Model configuration
|
||||||
|
MULTIMODAL_SEND_FORMAT=base64
|
||||||
|
PROMPT_GENERATION_MAX_TOKENS=512
|
||||||
|
CODE_GENERATION_MAX_TOKENS=1024
|
||||||
|
PLUGIN_BASED_TOKEN_COUNTING_ENABLED=false
|
||||||
|
|
||||||
|
# Mail configuration, support: resend, smtp, sendgrid
|
||||||
|
MAIL_TYPE=
|
||||||
|
# If using SendGrid, use the 'from' field for authentication if necessary.
|
||||||
|
MAIL_DEFAULT_SEND_FROM=no-reply <no-reply@dify.ai>
|
||||||
|
# resend configuration
|
||||||
|
RESEND_API_KEY=
|
||||||
|
RESEND_API_URL=https://api.resend.com
|
||||||
|
# smtp configuration
|
||||||
|
SMTP_SERVER=smtp.gmail.com
|
||||||
|
SMTP_PORT=465
|
||||||
|
SMTP_USERNAME=123
|
||||||
|
SMTP_PASSWORD=abc
|
||||||
|
SMTP_USE_TLS=true
|
||||||
|
SMTP_OPPORTUNISTIC_TLS=false
|
||||||
|
# Sendgid configuration
|
||||||
|
SENDGRID_API_KEY=
|
||||||
|
# Sentry configuration
|
||||||
|
SENTRY_DSN=
|
||||||
|
|
||||||
|
# DEBUG
|
||||||
|
DEBUG=false
|
||||||
|
ENABLE_REQUEST_LOGGING=False
|
||||||
|
SQLALCHEMY_ECHO=false
|
||||||
|
|
||||||
|
# Notion import configuration, support public and internal
|
||||||
|
NOTION_INTEGRATION_TYPE=public
|
||||||
|
NOTION_CLIENT_SECRET=you-client-secret
|
||||||
|
NOTION_CLIENT_ID=you-client-id
|
||||||
|
NOTION_INTERNAL_SECRET=you-internal-secret
|
||||||
|
|
||||||
|
ETL_TYPE=dify
|
||||||
|
UNSTRUCTURED_API_URL=
|
||||||
|
UNSTRUCTURED_API_KEY=
|
||||||
|
SCARF_NO_ANALYTICS=true
|
||||||
|
|
||||||
|
#ssrf
|
||||||
|
SSRF_PROXY_HTTP_URL=
|
||||||
|
SSRF_PROXY_HTTPS_URL=
|
||||||
|
SSRF_DEFAULT_MAX_RETRIES=3
|
||||||
|
SSRF_DEFAULT_TIME_OUT=5
|
||||||
|
SSRF_DEFAULT_CONNECT_TIME_OUT=5
|
||||||
|
SSRF_DEFAULT_READ_TIME_OUT=5
|
||||||
|
SSRF_DEFAULT_WRITE_TIME_OUT=5
|
||||||
|
SSRF_POOL_MAX_CONNECTIONS=100
|
||||||
|
SSRF_POOL_MAX_KEEPALIVE_CONNECTIONS=20
|
||||||
|
SSRF_POOL_KEEPALIVE_EXPIRY=5.0
|
||||||
|
|
||||||
|
BATCH_UPLOAD_LIMIT=10
|
||||||
|
KEYWORD_DATA_SOURCE_TYPE=database
|
||||||
|
|
||||||
|
# Workflow file upload limit
|
||||||
|
WORKFLOW_FILE_UPLOAD_LIMIT=10
|
||||||
|
|
||||||
|
# CODE EXECUTION CONFIGURATION
|
||||||
|
CODE_EXECUTION_ENDPOINT=http://127.0.0.1:8194
|
||||||
|
CODE_EXECUTION_API_KEY=dify-sandbox
|
||||||
|
CODE_EXECUTION_SSL_VERIFY=True
|
||||||
|
CODE_EXECUTION_POOL_MAX_CONNECTIONS=100
|
||||||
|
CODE_EXECUTION_POOL_MAX_KEEPALIVE_CONNECTIONS=20
|
||||||
|
CODE_EXECUTION_POOL_KEEPALIVE_EXPIRY=5.0
|
||||||
|
CODE_EXECUTION_CONNECT_TIMEOUT=10
|
||||||
|
CODE_EXECUTION_READ_TIMEOUT=60
|
||||||
|
CODE_EXECUTION_WRITE_TIMEOUT=10
|
||||||
|
CODE_MAX_NUMBER=9223372036854775807
|
||||||
|
CODE_MIN_NUMBER=-9223372036854775808
|
||||||
|
CODE_MAX_STRING_LENGTH=400000
|
||||||
|
TEMPLATE_TRANSFORM_MAX_LENGTH=400000
|
||||||
|
CODE_MAX_STRING_ARRAY_LENGTH=30
|
||||||
|
CODE_MAX_OBJECT_ARRAY_LENGTH=30
|
||||||
|
CODE_MAX_NUMBER_ARRAY_LENGTH=1000
|
||||||
|
|
||||||
|
# API Tool configuration
|
||||||
|
API_TOOL_DEFAULT_CONNECT_TIMEOUT=10
|
||||||
|
API_TOOL_DEFAULT_READ_TIMEOUT=60
|
||||||
|
|
||||||
|
# HTTP Node configuration
|
||||||
|
HTTP_REQUEST_MAX_CONNECT_TIMEOUT=300
|
||||||
|
HTTP_REQUEST_MAX_READ_TIMEOUT=600
|
||||||
|
HTTP_REQUEST_MAX_WRITE_TIMEOUT=600
|
||||||
|
HTTP_REQUEST_NODE_MAX_BINARY_SIZE=10485760
|
||||||
|
HTTP_REQUEST_NODE_MAX_TEXT_SIZE=1048576
|
||||||
|
HTTP_REQUEST_NODE_SSL_VERIFY=True
|
||||||
|
|
||||||
|
# Webhook request configuration
|
||||||
|
WEBHOOK_REQUEST_BODY_MAX_SIZE=10485760
|
||||||
|
|
||||||
|
# Respect X-* headers to redirect clients
|
||||||
|
RESPECT_XFORWARD_HEADERS_ENABLED=false
|
||||||
|
|
||||||
|
# Log file path
|
||||||
|
LOG_FILE=
|
||||||
|
# Log file max size, the unit is MB
|
||||||
|
LOG_FILE_MAX_SIZE=20
|
||||||
|
# Log file max backup count
|
||||||
|
LOG_FILE_BACKUP_COUNT=5
|
||||||
|
# Log dateformat
|
||||||
|
LOG_DATEFORMAT=%Y-%m-%d %H:%M:%S
|
||||||
|
# Log Timezone
|
||||||
|
LOG_TZ=UTC
|
||||||
|
# Log format
|
||||||
|
LOG_FORMAT=%(asctime)s,%(msecs)d %(levelname)-2s [%(filename)s:%(lineno)d] %(req_id)s %(message)s
|
||||||
|
|
||||||
|
# Indexing configuration
|
||||||
|
INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH=4000
|
||||||
|
|
||||||
|
# Workflow runtime configuration
|
||||||
|
WORKFLOW_MAX_EXECUTION_STEPS=500
|
||||||
|
WORKFLOW_MAX_EXECUTION_TIME=1200
|
||||||
|
WORKFLOW_CALL_MAX_DEPTH=5
|
||||||
|
MAX_VARIABLE_SIZE=204800
|
||||||
|
|
||||||
|
# GraphEngine Worker Pool Configuration
|
||||||
|
# Minimum number of workers per GraphEngine instance (default: 1)
|
||||||
|
GRAPH_ENGINE_MIN_WORKERS=1
|
||||||
|
# Maximum number of workers per GraphEngine instance (default: 10)
|
||||||
|
GRAPH_ENGINE_MAX_WORKERS=10
|
||||||
|
# Queue depth threshold that triggers worker scale up (default: 3)
|
||||||
|
GRAPH_ENGINE_SCALE_UP_THRESHOLD=3
|
||||||
|
# Seconds of idle time before scaling down workers (default: 5.0)
|
||||||
|
GRAPH_ENGINE_SCALE_DOWN_IDLE_TIME=5.0
|
||||||
|
|
||||||
|
# Workflow storage configuration
|
||||||
|
# Options: rdbms, hybrid
|
||||||
|
# rdbms: Use only the relational database (default)
|
||||||
|
# hybrid: Save new data to object storage, read from both object storage and RDBMS
|
||||||
|
WORKFLOW_NODE_EXECUTION_STORAGE=rdbms
|
||||||
|
|
||||||
|
# Repository configuration
|
||||||
|
# Core workflow execution repository implementation
|
||||||
|
CORE_WORKFLOW_EXECUTION_REPOSITORY=core.repositories.sqlalchemy_workflow_execution_repository.SQLAlchemyWorkflowExecutionRepository
|
||||||
|
|
||||||
|
# Core workflow node execution repository implementation
|
||||||
|
CORE_WORKFLOW_NODE_EXECUTION_REPOSITORY=core.repositories.sqlalchemy_workflow_node_execution_repository.SQLAlchemyWorkflowNodeExecutionRepository
|
||||||
|
|
||||||
|
# API workflow node execution repository implementation
|
||||||
|
API_WORKFLOW_NODE_EXECUTION_REPOSITORY=repositories.sqlalchemy_api_workflow_node_execution_repository.DifyAPISQLAlchemyWorkflowNodeExecutionRepository
|
||||||
|
|
||||||
|
# API workflow run repository implementation
|
||||||
|
API_WORKFLOW_RUN_REPOSITORY=repositories.sqlalchemy_api_workflow_run_repository.DifyAPISQLAlchemyWorkflowRunRepository
|
||||||
|
# Workflow log cleanup configuration
|
||||||
|
# Enable automatic cleanup of workflow run logs to manage database size
|
||||||
|
WORKFLOW_LOG_CLEANUP_ENABLED=false
|
||||||
|
# Number of days to retain workflow run logs (default: 30 days)
|
||||||
|
WORKFLOW_LOG_RETENTION_DAYS=30
|
||||||
|
# Batch size for workflow log cleanup operations (default: 100)
|
||||||
|
WORKFLOW_LOG_CLEANUP_BATCH_SIZE=100
|
||||||
|
|
||||||
|
# App configuration
|
||||||
|
APP_MAX_EXECUTION_TIME=1200
|
||||||
|
APP_MAX_ACTIVE_REQUESTS=0
|
||||||
|
|
||||||
|
# Celery beat configuration
|
||||||
|
CELERY_BEAT_SCHEDULER_TIME=1
|
||||||
|
|
||||||
|
# Celery schedule tasks configuration
|
||||||
|
ENABLE_CLEAN_EMBEDDING_CACHE_TASK=false
|
||||||
|
ENABLE_CLEAN_UNUSED_DATASETS_TASK=false
|
||||||
|
ENABLE_CREATE_TIDB_SERVERLESS_TASK=false
|
||||||
|
ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK=false
|
||||||
|
ENABLE_CLEAN_MESSAGES=false
|
||||||
|
ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK=false
|
||||||
|
ENABLE_DATASETS_QUEUE_MONITOR=false
|
||||||
|
ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK=true
|
||||||
|
ENABLE_WORKFLOW_SCHEDULE_POLLER_TASK=true
|
||||||
|
# Interval time in minutes for polling scheduled workflows(default: 1 min)
|
||||||
|
WORKFLOW_SCHEDULE_POLLER_INTERVAL=1
|
||||||
|
WORKFLOW_SCHEDULE_POLLER_BATCH_SIZE=100
|
||||||
|
# Maximum number of scheduled workflows to dispatch per tick (0 for unlimited)
|
||||||
|
WORKFLOW_SCHEDULE_MAX_DISPATCH_PER_TICK=0
|
||||||
|
|
||||||
|
# Position configuration
|
||||||
|
POSITION_TOOL_PINS=
|
||||||
|
POSITION_TOOL_INCLUDES=
|
||||||
|
POSITION_TOOL_EXCLUDES=
|
||||||
|
|
||||||
|
POSITION_PROVIDER_PINS=
|
||||||
|
POSITION_PROVIDER_INCLUDES=
|
||||||
|
POSITION_PROVIDER_EXCLUDES=
|
||||||
|
|
||||||
|
# Plugin configuration
|
||||||
|
PLUGIN_DAEMON_KEY=lYkiYYT6owG+71oLerGzA7GXCgOT++6ovaezWAjpCjf+Sjc3ZtU+qUEi
|
||||||
|
PLUGIN_DAEMON_URL=http://127.0.0.1:5002
|
||||||
|
PLUGIN_REMOTE_INSTALL_PORT=5003
|
||||||
|
PLUGIN_REMOTE_INSTALL_HOST=localhost
|
||||||
|
PLUGIN_MAX_PACKAGE_SIZE=15728640
|
||||||
|
INNER_API_KEY_FOR_PLUGIN=QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1
|
||||||
|
|
||||||
|
# Marketplace configuration
|
||||||
|
MARKETPLACE_ENABLED=true
|
||||||
|
MARKETPLACE_API_URL=https://marketplace.dify.ai
|
||||||
|
|
||||||
|
# Endpoint configuration
|
||||||
|
ENDPOINT_URL_TEMPLATE=http://localhost:5002/e/{hook_id}
|
||||||
|
|
||||||
|
# Reset password token expiry minutes
|
||||||
|
RESET_PASSWORD_TOKEN_EXPIRY_MINUTES=5
|
||||||
|
EMAIL_REGISTER_TOKEN_EXPIRY_MINUTES=5
|
||||||
|
CHANGE_EMAIL_TOKEN_EXPIRY_MINUTES=5
|
||||||
|
OWNER_TRANSFER_TOKEN_EXPIRY_MINUTES=5
|
||||||
|
|
||||||
|
CREATE_TIDB_SERVICE_JOB_ENABLED=false
|
||||||
|
|
||||||
|
# Maximum number of submitted thread count in a ThreadPool for parallel node execution
|
||||||
|
MAX_SUBMIT_COUNT=100
|
||||||
|
# Lockout duration in seconds
|
||||||
|
LOGIN_LOCKOUT_DURATION=86400
|
||||||
|
|
||||||
|
# Enable OpenTelemetry
|
||||||
|
ENABLE_OTEL=false
|
||||||
|
OTLP_TRACE_ENDPOINT=
|
||||||
|
OTLP_METRIC_ENDPOINT=
|
||||||
|
OTLP_BASE_ENDPOINT=http://localhost:4318
|
||||||
|
OTLP_API_KEY=
|
||||||
|
OTEL_EXPORTER_OTLP_PROTOCOL=
|
||||||
|
OTEL_EXPORTER_TYPE=otlp
|
||||||
|
OTEL_SAMPLING_RATE=0.1
|
||||||
|
OTEL_BATCH_EXPORT_SCHEDULE_DELAY=5000
|
||||||
|
OTEL_MAX_QUEUE_SIZE=2048
|
||||||
|
OTEL_MAX_EXPORT_BATCH_SIZE=512
|
||||||
|
OTEL_METRIC_EXPORT_INTERVAL=60000
|
||||||
|
OTEL_BATCH_EXPORT_TIMEOUT=10000
|
||||||
|
OTEL_METRIC_EXPORT_TIMEOUT=30000
|
||||||
|
|
||||||
|
# Prevent Clickjacking
|
||||||
|
ALLOW_EMBED=false
|
||||||
|
|
||||||
|
# Dataset queue monitor configuration
|
||||||
|
QUEUE_MONITOR_THRESHOLD=200
|
||||||
|
# You can configure multiple ones, separated by commas. eg: test1@dify.ai,test2@dify.ai
|
||||||
|
QUEUE_MONITOR_ALERT_EMAILS=
|
||||||
|
# Monitor interval in minutes, default is 30 minutes
|
||||||
|
QUEUE_MONITOR_INTERVAL=30
|
||||||
|
|
||||||
|
# Swagger UI configuration
|
||||||
|
SWAGGER_UI_ENABLED=true
|
||||||
|
SWAGGER_UI_PATH=/swagger-ui.html
|
||||||
|
|
||||||
|
# Whether to encrypt dataset IDs when exporting DSL files (default: true)
|
||||||
|
# Set to false to export dataset IDs as plain text for easier cross-environment import
|
||||||
|
DSL_EXPORT_ENCRYPT_DATASET_ID=true
|
||||||
|
|
||||||
|
# Tenant isolated task queue configuration
|
||||||
|
TENANT_ISOLATED_TASK_CONCURRENCY=1
|
||||||
|
|
||||||
|
# Maximum number of segments for dataset segments API (0 for unlimited)
|
||||||
|
DATASET_MAX_SEGMENTS_PER_REQUEST=0
|
||||||
105
dify/api/.importlinter
Normal file
105
dify/api/.importlinter
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
[importlinter]
|
||||||
|
root_packages =
|
||||||
|
core
|
||||||
|
configs
|
||||||
|
controllers
|
||||||
|
models
|
||||||
|
tasks
|
||||||
|
services
|
||||||
|
|
||||||
|
[importlinter:contract:workflow]
|
||||||
|
name = Workflow
|
||||||
|
type=layers
|
||||||
|
layers =
|
||||||
|
graph_engine
|
||||||
|
graph_events
|
||||||
|
graph
|
||||||
|
nodes
|
||||||
|
node_events
|
||||||
|
entities
|
||||||
|
containers =
|
||||||
|
core.workflow
|
||||||
|
ignore_imports =
|
||||||
|
core.workflow.nodes.base.node -> core.workflow.graph_events
|
||||||
|
core.workflow.nodes.iteration.iteration_node -> core.workflow.graph_events
|
||||||
|
core.workflow.nodes.loop.loop_node -> core.workflow.graph_events
|
||||||
|
|
||||||
|
core.workflow.nodes.node_factory -> core.workflow.graph
|
||||||
|
core.workflow.nodes.iteration.iteration_node -> core.workflow.graph_engine
|
||||||
|
core.workflow.nodes.iteration.iteration_node -> core.workflow.graph
|
||||||
|
core.workflow.nodes.iteration.iteration_node -> core.workflow.graph_engine.command_channels
|
||||||
|
core.workflow.nodes.loop.loop_node -> core.workflow.graph_engine
|
||||||
|
core.workflow.nodes.loop.loop_node -> core.workflow.graph
|
||||||
|
core.workflow.nodes.loop.loop_node -> core.workflow.graph_engine.command_channels
|
||||||
|
|
||||||
|
[importlinter:contract:rsc]
|
||||||
|
name = RSC
|
||||||
|
type = layers
|
||||||
|
layers =
|
||||||
|
graph_engine
|
||||||
|
response_coordinator
|
||||||
|
containers =
|
||||||
|
core.workflow.graph_engine
|
||||||
|
|
||||||
|
[importlinter:contract:worker]
|
||||||
|
name = Worker
|
||||||
|
type = layers
|
||||||
|
layers =
|
||||||
|
graph_engine
|
||||||
|
worker
|
||||||
|
containers =
|
||||||
|
core.workflow.graph_engine
|
||||||
|
|
||||||
|
[importlinter:contract:graph-engine-architecture]
|
||||||
|
name = Graph Engine Architecture
|
||||||
|
type = layers
|
||||||
|
layers =
|
||||||
|
graph_engine
|
||||||
|
orchestration
|
||||||
|
command_processing
|
||||||
|
event_management
|
||||||
|
error_handler
|
||||||
|
graph_traversal
|
||||||
|
graph_state_manager
|
||||||
|
worker_management
|
||||||
|
domain
|
||||||
|
containers =
|
||||||
|
core.workflow.graph_engine
|
||||||
|
|
||||||
|
[importlinter:contract:domain-isolation]
|
||||||
|
name = Domain Model Isolation
|
||||||
|
type = forbidden
|
||||||
|
source_modules =
|
||||||
|
core.workflow.graph_engine.domain
|
||||||
|
forbidden_modules =
|
||||||
|
core.workflow.graph_engine.worker_management
|
||||||
|
core.workflow.graph_engine.command_channels
|
||||||
|
core.workflow.graph_engine.layers
|
||||||
|
core.workflow.graph_engine.protocols
|
||||||
|
|
||||||
|
[importlinter:contract:worker-management]
|
||||||
|
name = Worker Management
|
||||||
|
type = forbidden
|
||||||
|
source_modules =
|
||||||
|
core.workflow.graph_engine.worker_management
|
||||||
|
forbidden_modules =
|
||||||
|
core.workflow.graph_engine.orchestration
|
||||||
|
core.workflow.graph_engine.command_processing
|
||||||
|
core.workflow.graph_engine.event_management
|
||||||
|
|
||||||
|
|
||||||
|
[importlinter:contract:graph-traversal-components]
|
||||||
|
name = Graph Traversal Components
|
||||||
|
type = layers
|
||||||
|
layers =
|
||||||
|
edge_processor
|
||||||
|
skip_propagator
|
||||||
|
containers =
|
||||||
|
core.workflow.graph_engine.graph_traversal
|
||||||
|
|
||||||
|
[importlinter:contract:command-channels]
|
||||||
|
name = Command Channels Independence
|
||||||
|
type = independence
|
||||||
|
modules =
|
||||||
|
core.workflow.graph_engine.command_channels.in_memory_channel
|
||||||
|
core.workflow.graph_engine.command_channels.redis_channel
|
||||||
113
dify/api/.ruff.toml
Normal file
113
dify/api/.ruff.toml
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
exclude = ["migrations/*"]
|
||||||
|
line-length = 120
|
||||||
|
|
||||||
|
[format]
|
||||||
|
quote-style = "double"
|
||||||
|
|
||||||
|
[lint]
|
||||||
|
preview = true
|
||||||
|
select = [
|
||||||
|
"B", # flake8-bugbear rules
|
||||||
|
"C4", # flake8-comprehensions
|
||||||
|
"E", # pycodestyle E rules
|
||||||
|
"F", # pyflakes rules
|
||||||
|
"FURB", # refurb rules
|
||||||
|
"I", # isort rules
|
||||||
|
"N", # pep8-naming
|
||||||
|
"PT", # flake8-pytest-style rules
|
||||||
|
"PLC0208", # iteration-over-set
|
||||||
|
"PLC0414", # useless-import-alias
|
||||||
|
"PLE0604", # invalid-all-object
|
||||||
|
"PLE0605", # invalid-all-format
|
||||||
|
"PLR0402", # manual-from-import
|
||||||
|
"PLR1711", # useless-return
|
||||||
|
"PLR1714", # repeated-equality-comparison
|
||||||
|
"RUF013", # implicit-optional
|
||||||
|
"RUF019", # unnecessary-key-check
|
||||||
|
"RUF100", # unused-noqa
|
||||||
|
"RUF101", # redirected-noqa
|
||||||
|
"RUF200", # invalid-pyproject-toml
|
||||||
|
"RUF022", # unsorted-dunder-all
|
||||||
|
"S506", # unsafe-yaml-load
|
||||||
|
"SIM", # flake8-simplify rules
|
||||||
|
"T201", # print-found
|
||||||
|
"TRY400", # error-instead-of-exception
|
||||||
|
"TRY401", # verbose-log-message
|
||||||
|
"UP", # pyupgrade rules
|
||||||
|
"W191", # tab-indentation
|
||||||
|
"W605", # invalid-escape-sequence
|
||||||
|
# security related linting rules
|
||||||
|
# RCE proctection (sort of)
|
||||||
|
"S102", # exec-builtin, disallow use of `exec`
|
||||||
|
"S307", # suspicious-eval-usage, disallow use of `eval` and `ast.literal_eval`
|
||||||
|
"S301", # suspicious-pickle-usage, disallow use of `pickle` and its wrappers.
|
||||||
|
"S302", # suspicious-marshal-usage, disallow use of `marshal` module
|
||||||
|
"S311", # suspicious-non-cryptographic-random-usage
|
||||||
|
"G001", # don't use str format to logging messages
|
||||||
|
"G003", # don't use + in logging messages
|
||||||
|
"G004", # don't use f-strings to format logging messages
|
||||||
|
"UP042", # use StrEnum
|
||||||
|
]
|
||||||
|
|
||||||
|
ignore = [
|
||||||
|
"E402", # module-import-not-at-top-of-file
|
||||||
|
"E711", # none-comparison
|
||||||
|
"E712", # true-false-comparison
|
||||||
|
"E721", # type-comparison
|
||||||
|
"E722", # bare-except
|
||||||
|
"F821", # undefined-name
|
||||||
|
"F841", # unused-variable
|
||||||
|
"FURB113", # repeated-append
|
||||||
|
"FURB152", # math-constant
|
||||||
|
"UP007", # non-pep604-annotation
|
||||||
|
"UP032", # f-string
|
||||||
|
"UP045", # non-pep604-annotation-optional
|
||||||
|
"B005", # strip-with-multi-characters
|
||||||
|
"B006", # mutable-argument-default
|
||||||
|
"B007", # unused-loop-control-variable
|
||||||
|
"B026", # star-arg-unpacking-after-keyword-arg
|
||||||
|
"B901", # allow return in yield
|
||||||
|
"B903", # class-as-data-structure
|
||||||
|
"B904", # raise-without-from-inside-except
|
||||||
|
"B905", # zip-without-explicit-strict
|
||||||
|
"N806", # non-lowercase-variable-in-function
|
||||||
|
"N815", # mixed-case-variable-in-class-scope
|
||||||
|
"PT011", # pytest-raises-too-broad
|
||||||
|
"SIM102", # collapsible-if
|
||||||
|
"SIM103", # needless-bool
|
||||||
|
"SIM105", # suppressible-exception
|
||||||
|
"SIM107", # return-in-try-except-finally
|
||||||
|
"SIM108", # if-else-block-instead-of-if-exp
|
||||||
|
"SIM113", # enumerate-for-loop
|
||||||
|
"SIM117", # multiple-with-statements
|
||||||
|
"SIM210", # if-expr-with-true-false
|
||||||
|
]
|
||||||
|
|
||||||
|
[lint.per-file-ignores]
|
||||||
|
"__init__.py" = [
|
||||||
|
"F401", # unused-import
|
||||||
|
"F811", # redefined-while-unused
|
||||||
|
]
|
||||||
|
"configs/*" = [
|
||||||
|
"N802", # invalid-function-name
|
||||||
|
]
|
||||||
|
"core/model_runtime/callbacks/base_callback.py" = [
|
||||||
|
"T201",
|
||||||
|
]
|
||||||
|
"core/workflow/callbacks/workflow_logging_callback.py" = [
|
||||||
|
"T201",
|
||||||
|
]
|
||||||
|
"libs/gmpy2_pkcs10aep_cipher.py" = [
|
||||||
|
"N803", # invalid-argument-name
|
||||||
|
]
|
||||||
|
"tests/*" = [
|
||||||
|
"F811", # redefined-while-unused
|
||||||
|
"T201", # allow print in tests
|
||||||
|
]
|
||||||
|
|
||||||
|
[lint.pyflakes]
|
||||||
|
allowed-unused-imports = [
|
||||||
|
"_pytest.monkeypatch",
|
||||||
|
"tests.integration_tests",
|
||||||
|
"tests.unit_tests",
|
||||||
|
]
|
||||||
62
dify/api/AGENTS.md
Normal file
62
dify/api/AGENTS.md
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
# Agent Skill Index
|
||||||
|
|
||||||
|
Start with the section that best matches your need. Each entry lists the problems it solves plus key files/concepts so you know what to expect before opening it.
|
||||||
|
|
||||||
|
______________________________________________________________________
|
||||||
|
|
||||||
|
## Platform Foundations
|
||||||
|
|
||||||
|
- **[Infrastructure Overview](agent_skills/infra.md)**\
|
||||||
|
When to read this:
|
||||||
|
|
||||||
|
- You need to understand where a feature belongs in the architecture.
|
||||||
|
- You’re wiring storage, Redis, vector stores, or OTEL.
|
||||||
|
- You’re about to add CLI commands or async jobs.\
|
||||||
|
What it covers: configuration stack (`configs/app_config.py`, remote settings), storage entry points (`extensions/ext_storage.py`, `core/file/file_manager.py`), Redis conventions (`extensions/ext_redis.py`), plugin runtime topology, vector-store factory (`core/rag/datasource/vdb/*`), observability hooks, SSRF proxy usage, and core CLI commands.
|
||||||
|
|
||||||
|
- **[Coding Style](agent_skills/coding_style.md)**\
|
||||||
|
When to read this:
|
||||||
|
|
||||||
|
- You’re writing or reviewing backend code and need the authoritative checklist.
|
||||||
|
- You’re unsure about Pydantic validators, SQLAlchemy session usage, or logging patterns.
|
||||||
|
- You want the exact lint/type/test commands used in PRs.\
|
||||||
|
Includes: Ruff & BasedPyright commands, no-annotation policy, session examples (`with Session(db.engine, ...)`), `@field_validator` usage, logging expectations, and the rule set for file size, helpers, and package management.
|
||||||
|
|
||||||
|
______________________________________________________________________
|
||||||
|
|
||||||
|
## Plugin & Extension Development
|
||||||
|
|
||||||
|
- **[Plugin Systems](agent_skills/plugin.md)**\
|
||||||
|
When to read this:
|
||||||
|
|
||||||
|
- You’re building or debugging a marketplace plugin.
|
||||||
|
- You need to know how manifests, providers, daemons, and migrations fit together.\
|
||||||
|
What it covers: plugin manifests (`core/plugin/entities/plugin.py`), installation/upgrade flows (`services/plugin/plugin_service.py`, CLI commands), runtime adapters (`core/plugin/impl/*` for tool/model/datasource/trigger/endpoint/agent), daemon coordination (`core/plugin/entities/plugin_daemon.py`), and how provider registries surface capabilities to the rest of the platform.
|
||||||
|
|
||||||
|
- **[Plugin OAuth](agent_skills/plugin_oauth.md)**\
|
||||||
|
When to read this:
|
||||||
|
|
||||||
|
- You must integrate OAuth for a plugin or datasource.
|
||||||
|
- You’re handling credential encryption or refresh flows.\
|
||||||
|
Topics: credential storage, encryption helpers (`core/helper/provider_encryption.py`), OAuth client bootstrap (`services/plugin/oauth_service.py`, `services/plugin/plugin_parameter_service.py`), and how console/API layers expose the flows.
|
||||||
|
|
||||||
|
______________________________________________________________________
|
||||||
|
|
||||||
|
## Workflow Entry & Execution
|
||||||
|
|
||||||
|
- **[Trigger Concepts](agent_skills/trigger.md)**\
|
||||||
|
When to read this:
|
||||||
|
- You’re debugging why a workflow didn’t start.
|
||||||
|
- You’re adding a new trigger type or hook.
|
||||||
|
- You need to trace async execution, draft debugging, or webhook/schedule pipelines.\
|
||||||
|
Details: Start-node taxonomy, webhook & schedule internals (`core/workflow/nodes/trigger_*`, `services/trigger/*`), async orchestration (`services/async_workflow_service.py`, Celery queues), debug event bus, and storage/logging interactions.
|
||||||
|
|
||||||
|
______________________________________________________________________
|
||||||
|
|
||||||
|
## Additional Notes for Agents
|
||||||
|
|
||||||
|
- All skill docs assume you follow the coding style guide—run Ruff/BasedPyright/tests listed there before submitting changes.
|
||||||
|
- When you cannot find an answer in these briefs, search the codebase using the paths referenced (e.g., `core/plugin/impl/tool.py`, `services/dataset_service.py`).
|
||||||
|
- If you run into cross-cutting concerns (tenancy, configuration, storage), check the infrastructure guide first; it links to most supporting modules.
|
||||||
|
- Keep multi-tenancy and configuration central: everything flows through `configs.dify_config` and `tenant_id`.
|
||||||
|
- When touching plugins or triggers, consult both the system overview and the specialised doc to ensure you adjust lifecycle, storage, and observability consistently.
|
||||||
101
dify/api/Dockerfile
Normal file
101
dify/api/Dockerfile
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
# base image
|
||||||
|
FROM python:3.12-slim-bookworm AS base
|
||||||
|
|
||||||
|
WORKDIR /app/api
|
||||||
|
|
||||||
|
# Install uv
|
||||||
|
ENV UV_VERSION=0.8.9
|
||||||
|
|
||||||
|
RUN pip install --no-cache-dir uv==${UV_VERSION}
|
||||||
|
|
||||||
|
|
||||||
|
FROM base AS packages
|
||||||
|
|
||||||
|
# if you located in China, you can use aliyun mirror to speed up
|
||||||
|
# RUN sed -i 's@deb.debian.org@mirrors.aliyun.com@g' /etc/apt/sources.list.d/debian.sources
|
||||||
|
|
||||||
|
RUN apt-get update \
|
||||||
|
&& apt-get install -y --no-install-recommends \
|
||||||
|
# basic environment
|
||||||
|
g++ \
|
||||||
|
# for building gmpy2
|
||||||
|
libmpfr-dev libmpc-dev
|
||||||
|
|
||||||
|
# Install Python dependencies
|
||||||
|
COPY pyproject.toml uv.lock ./
|
||||||
|
RUN uv sync --locked --no-dev
|
||||||
|
|
||||||
|
# production stage
|
||||||
|
FROM base AS production
|
||||||
|
|
||||||
|
ENV FLASK_APP=app.py
|
||||||
|
ENV EDITION=SELF_HOSTED
|
||||||
|
ENV DEPLOY_ENV=PRODUCTION
|
||||||
|
ENV CONSOLE_API_URL=http://127.0.0.1:5001
|
||||||
|
ENV CONSOLE_WEB_URL=http://127.0.0.1:3000
|
||||||
|
ENV SERVICE_API_URL=http://127.0.0.1:5001
|
||||||
|
ENV APP_WEB_URL=http://127.0.0.1:3000
|
||||||
|
|
||||||
|
EXPOSE 5001
|
||||||
|
|
||||||
|
# set timezone
|
||||||
|
ENV TZ=UTC
|
||||||
|
|
||||||
|
# Set UTF-8 locale
|
||||||
|
ENV LANG=en_US.UTF-8
|
||||||
|
ENV LC_ALL=en_US.UTF-8
|
||||||
|
ENV PYTHONIOENCODING=utf-8
|
||||||
|
|
||||||
|
WORKDIR /app/api
|
||||||
|
|
||||||
|
RUN \
|
||||||
|
apt-get update \
|
||||||
|
# Install dependencies
|
||||||
|
&& apt-get install -y --no-install-recommends \
|
||||||
|
# basic environment
|
||||||
|
curl nodejs \
|
||||||
|
# for gmpy2 \
|
||||||
|
libgmp-dev libmpfr-dev libmpc-dev \
|
||||||
|
# For Security
|
||||||
|
expat libldap-2.5-0=2.5.13+dfsg-5 perl libsqlite3-0=3.40.1-2+deb12u2 zlib1g=1:1.2.13.dfsg-1 \
|
||||||
|
# install fonts to support the use of tools like pypdfium2
|
||||||
|
fonts-noto-cjk \
|
||||||
|
# install a package to improve the accuracy of guessing mime type and file extension
|
||||||
|
media-types \
|
||||||
|
# install libmagic to support the use of python-magic guess MIMETYPE
|
||||||
|
libmagic1 \
|
||||||
|
&& apt-get autoremove -y \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Copy Python environment and packages
|
||||||
|
ENV VIRTUAL_ENV=/app/api/.venv
|
||||||
|
COPY --from=packages ${VIRTUAL_ENV} ${VIRTUAL_ENV}
|
||||||
|
ENV PATH="${VIRTUAL_ENV}/bin:${PATH}"
|
||||||
|
|
||||||
|
# Download nltk data
|
||||||
|
RUN mkdir -p /usr/local/share/nltk_data && NLTK_DATA=/usr/local/share/nltk_data python -c "import nltk; nltk.download('punkt'); nltk.download('averaged_perceptron_tagger'); nltk.download('stopwords')" \
|
||||||
|
&& chmod -R 755 /usr/local/share/nltk_data
|
||||||
|
|
||||||
|
ENV TIKTOKEN_CACHE_DIR=/app/api/.tiktoken_cache
|
||||||
|
|
||||||
|
RUN python -c "import tiktoken; tiktoken.encoding_for_model('gpt2')"
|
||||||
|
|
||||||
|
# Copy source code
|
||||||
|
COPY . /app/api/
|
||||||
|
|
||||||
|
# Copy entrypoint
|
||||||
|
COPY docker/entrypoint.sh /entrypoint.sh
|
||||||
|
RUN chmod +x /entrypoint.sh
|
||||||
|
|
||||||
|
# Create non-root user and set permissions
|
||||||
|
RUN groupadd -r -g 1001 dify && \
|
||||||
|
useradd -r -u 1001 -g 1001 -s /bin/bash dify && \
|
||||||
|
mkdir -p /home/dify && \
|
||||||
|
chown -R 1001:1001 /app /home/dify ${TIKTOKEN_CACHE_DIR} /entrypoint.sh
|
||||||
|
|
||||||
|
ARG COMMIT_SHA
|
||||||
|
ENV COMMIT_SHA=${COMMIT_SHA}
|
||||||
|
ENV NLTK_DATA=/usr/local/share/nltk_data
|
||||||
|
USER 1001
|
||||||
|
|
||||||
|
ENTRYPOINT ["/bin/bash", "/entrypoint.sh"]
|
||||||
116
dify/api/README.md
Normal file
116
dify/api/README.md
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
# Dify Backend API
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
> [!IMPORTANT]
|
||||||
|
>
|
||||||
|
> In the v1.3.0 release, `poetry` has been replaced with
|
||||||
|
> [`uv`](https://docs.astral.sh/uv/) as the package manager
|
||||||
|
> for Dify API backend service.
|
||||||
|
|
||||||
|
1. Start the docker-compose stack
|
||||||
|
|
||||||
|
The backend require some middleware, including PostgreSQL, Redis, and Weaviate, which can be started together using `docker-compose`.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd ../docker
|
||||||
|
cp middleware.env.example middleware.env
|
||||||
|
# change the profile to mysql if you are not using postgres,change the profile to other vector database if you are not using weaviate
|
||||||
|
docker compose -f docker-compose.middleware.yaml --profile postgresql --profile weaviate -p dify up -d
|
||||||
|
cd ../api
|
||||||
|
```
|
||||||
|
|
||||||
|
1. Copy `.env.example` to `.env`
|
||||||
|
|
||||||
|
```cli
|
||||||
|
cp .env.example .env
|
||||||
|
```
|
||||||
|
|
||||||
|
> [!IMPORTANT]
|
||||||
|
>
|
||||||
|
> When the frontend and backend run on different subdomains, set COOKIE_DOMAIN to the site’s top-level domain (e.g., `example.com`). The frontend and backend must be under the same top-level domain in order to share authentication cookies.
|
||||||
|
|
||||||
|
1. Generate a `SECRET_KEY` in the `.env` file.
|
||||||
|
|
||||||
|
bash for Linux
|
||||||
|
|
||||||
|
```bash for Linux
|
||||||
|
sed -i "/^SECRET_KEY=/c\SECRET_KEY=$(openssl rand -base64 42)" .env
|
||||||
|
```
|
||||||
|
|
||||||
|
bash for Mac
|
||||||
|
|
||||||
|
```bash for Mac
|
||||||
|
secret_key=$(openssl rand -base64 42)
|
||||||
|
sed -i '' "/^SECRET_KEY=/c\\
|
||||||
|
SECRET_KEY=${secret_key}" .env
|
||||||
|
```
|
||||||
|
|
||||||
|
1. Create environment.
|
||||||
|
|
||||||
|
Dify API service uses [UV](https://docs.astral.sh/uv/) to manage dependencies.
|
||||||
|
First, you need to add the uv package manager, if you don't have it already.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pip install uv
|
||||||
|
# Or on macOS
|
||||||
|
brew install uv
|
||||||
|
```
|
||||||
|
|
||||||
|
1. Install dependencies
|
||||||
|
|
||||||
|
```bash
|
||||||
|
uv sync --dev
|
||||||
|
```
|
||||||
|
|
||||||
|
1. Run migrate
|
||||||
|
|
||||||
|
Before the first launch, migrate the database to the latest version.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
uv run flask db upgrade
|
||||||
|
```
|
||||||
|
|
||||||
|
1. Start backend
|
||||||
|
|
||||||
|
```bash
|
||||||
|
uv run flask run --host 0.0.0.0 --port=5001 --debug
|
||||||
|
```
|
||||||
|
|
||||||
|
1. Start Dify [web](../web) service.
|
||||||
|
|
||||||
|
1. Setup your application by visiting `http://localhost:3000`.
|
||||||
|
|
||||||
|
1. If you need to handle and debug the async tasks (e.g. dataset importing and documents indexing), please start the worker service.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
uv run celery -A app.celery worker -P threads -c 2 --loglevel INFO -Q dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor
|
||||||
|
```
|
||||||
|
|
||||||
|
Additionally, if you want to debug the celery scheduled tasks, you can run the following command in another terminal to start the beat service:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
uv run celery -A app.celery beat
|
||||||
|
```
|
||||||
|
|
||||||
|
## Testing
|
||||||
|
|
||||||
|
1. Install dependencies for both the backend and the test environment
|
||||||
|
|
||||||
|
```bash
|
||||||
|
uv sync --dev
|
||||||
|
```
|
||||||
|
|
||||||
|
1. Run the tests locally with mocked system environment variables in `tool.pytest_env` section in `pyproject.toml`, more can check [Claude.md](../CLAUDE.md)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
uv run pytest # Run all tests
|
||||||
|
uv run pytest tests/unit_tests/ # Unit tests only
|
||||||
|
uv run pytest tests/integration_tests/ # Integration tests
|
||||||
|
|
||||||
|
# Code quality
|
||||||
|
../dev/reformat # Run all formatters and linters
|
||||||
|
uv run ruff check --fix ./ # Fix linting issues
|
||||||
|
uv run ruff format ./ # Format code
|
||||||
|
uv run basedpyright . # Type checking
|
||||||
|
```
|
||||||
115
dify/api/agent_skills/coding_style.md
Normal file
115
dify/api/agent_skills/coding_style.md
Normal file
@@ -0,0 +1,115 @@
|
|||||||
|
## Linter
|
||||||
|
|
||||||
|
- Always follow `.ruff.toml`.
|
||||||
|
- Run `uv run ruff check --fix --unsafe-fixes`.
|
||||||
|
- Keep each line under 100 characters (including spaces).
|
||||||
|
|
||||||
|
## Code Style
|
||||||
|
|
||||||
|
- `snake_case` for variables and functions.
|
||||||
|
- `PascalCase` for classes.
|
||||||
|
- `UPPER_CASE` for constants.
|
||||||
|
|
||||||
|
## Rules
|
||||||
|
|
||||||
|
- Use Pydantic v2 standard.
|
||||||
|
- Use `uv` for package management.
|
||||||
|
- Do not override dunder methods like `__init__`, `__iadd__`, etc.
|
||||||
|
- Never launch services (`uv run app.py`, `flask run`, etc.); running tests under `tests/` is allowed.
|
||||||
|
- Prefer simple functions over classes for lightweight helpers.
|
||||||
|
- Keep files below 800 lines; split when necessary.
|
||||||
|
- Keep code readable—no clever hacks.
|
||||||
|
- Never use `print`; log with `logger = logging.getLogger(__name__)`.
|
||||||
|
|
||||||
|
## Guiding Principles
|
||||||
|
|
||||||
|
- Mirror the project’s layered architecture: controller → service → core/domain.
|
||||||
|
- Reuse existing helpers in `core/`, `services/`, and `libs/` before creating new abstractions.
|
||||||
|
- Optimise for observability: deterministic control flow, clear logging, actionable errors.
|
||||||
|
|
||||||
|
## SQLAlchemy Patterns
|
||||||
|
|
||||||
|
- Models inherit from `models.base.Base`; never create ad-hoc metadata or engines.
|
||||||
|
|
||||||
|
- Open sessions with context managers:
|
||||||
|
|
||||||
|
```python
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
with Session(db.engine, expire_on_commit=False) as session:
|
||||||
|
stmt = select(Workflow).where(
|
||||||
|
Workflow.id == workflow_id,
|
||||||
|
Workflow.tenant_id == tenant_id,
|
||||||
|
)
|
||||||
|
workflow = session.execute(stmt).scalar_one_or_none()
|
||||||
|
```
|
||||||
|
|
||||||
|
- Use SQLAlchemy expressions; avoid raw SQL unless necessary.
|
||||||
|
|
||||||
|
- Introduce repository abstractions only for very large tables (e.g., workflow executions) to support alternative storage strategies.
|
||||||
|
|
||||||
|
- Always scope queries by `tenant_id` and protect write paths with safeguards (`FOR UPDATE`, row counts, etc.).
|
||||||
|
|
||||||
|
## Storage & External IO
|
||||||
|
|
||||||
|
- Access storage via `extensions.ext_storage.storage`.
|
||||||
|
- Use `core.helper.ssrf_proxy` for outbound HTTP fetches.
|
||||||
|
- Background tasks that touch storage must be idempotent and log the relevant object identifiers.
|
||||||
|
|
||||||
|
## Pydantic Usage
|
||||||
|
|
||||||
|
- Define DTOs with Pydantic v2 models and forbid extras by default.
|
||||||
|
|
||||||
|
- Use `@field_validator` / `@model_validator` for domain rules.
|
||||||
|
|
||||||
|
- Example:
|
||||||
|
|
||||||
|
```python
|
||||||
|
from pydantic import BaseModel, ConfigDict, HttpUrl, field_validator
|
||||||
|
|
||||||
|
class TriggerConfig(BaseModel):
|
||||||
|
endpoint: HttpUrl
|
||||||
|
secret: str
|
||||||
|
|
||||||
|
model_config = ConfigDict(extra="forbid")
|
||||||
|
|
||||||
|
@field_validator("secret")
|
||||||
|
def ensure_secret_prefix(cls, value: str) -> str:
|
||||||
|
if not value.startswith("dify_"):
|
||||||
|
raise ValueError("secret must start with dify_")
|
||||||
|
return value
|
||||||
|
```
|
||||||
|
|
||||||
|
## Generics & Protocols
|
||||||
|
|
||||||
|
- Use `typing.Protocol` to define behavioural contracts (e.g., cache interfaces).
|
||||||
|
- Apply generics (`TypeVar`, `Generic`) for reusable utilities like caches or providers.
|
||||||
|
- Validate dynamic inputs at runtime when generics cannot enforce safety alone.
|
||||||
|
|
||||||
|
## Error Handling & Logging
|
||||||
|
|
||||||
|
- Raise domain-specific exceptions (`services/errors`, `core/errors`) and translate to HTTP responses in controllers.
|
||||||
|
- Declare `logger = logging.getLogger(__name__)` at module top.
|
||||||
|
- Include tenant/app/workflow identifiers in log context.
|
||||||
|
- Log retryable events at `warning`, terminal failures at `error`.
|
||||||
|
|
||||||
|
## Tooling & Checks
|
||||||
|
|
||||||
|
- Format/lint: `uv run --project api --dev ruff format ./api` and `uv run --project api --dev ruff check --fix --unsafe-fixes ./api`.
|
||||||
|
- Type checks: `uv run --directory api --dev basedpyright`.
|
||||||
|
- Tests: `uv run --project api --dev dev/pytest/pytest_unit_tests.sh`.
|
||||||
|
- Run all of the above before submitting your work.
|
||||||
|
|
||||||
|
## Controllers & Services
|
||||||
|
|
||||||
|
- Controllers: parse input via Pydantic, invoke services, return serialised responses; no business logic.
|
||||||
|
- Services: coordinate repositories, providers, background tasks; keep side effects explicit.
|
||||||
|
- Avoid repositories unless necessary; direct SQLAlchemy usage is preferred for typical tables.
|
||||||
|
- Document non-obvious behaviour with concise comments.
|
||||||
|
|
||||||
|
## Miscellaneous
|
||||||
|
|
||||||
|
- Use `configs.dify_config` for configuration—never read environment variables directly.
|
||||||
|
- Maintain tenant awareness end-to-end; `tenant_id` must flow through every layer touching shared resources.
|
||||||
|
- Queue async work through `services/async_workflow_service`; implement tasks under `tasks/` with explicit queue selection.
|
||||||
|
- Keep experimental scripts under `dev/`; do not ship them in production builds.
|
||||||
96
dify/api/agent_skills/infra.md
Normal file
96
dify/api/agent_skills/infra.md
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
## Configuration
|
||||||
|
|
||||||
|
- Import `configs.dify_config` for every runtime toggle. Do not read environment variables directly.
|
||||||
|
- Add new settings to the proper mixin inside `configs/` (deployment, feature, middleware, etc.) so they load through `DifyConfig`.
|
||||||
|
- Remote overrides come from the optional providers in `configs/remote_settings_sources`; keep defaults in code safe when the value is missing.
|
||||||
|
- Example: logging pulls targets from `extensions/ext_logging.py`, and model provider URLs are assembled in `services/entities/model_provider_entities.py`.
|
||||||
|
|
||||||
|
## Dependencies
|
||||||
|
|
||||||
|
- Runtime dependencies live in `[project].dependencies` inside `pyproject.toml`. Optional clients go into the `storage`, `tools`, or `vdb` groups under `[dependency-groups]`.
|
||||||
|
- Always pin versions and keep the list alphabetised. Shared tooling (lint, typing, pytest) belongs in the `dev` group.
|
||||||
|
- When code needs a new package, explain why in the PR and run `uv lock` so the lockfile stays current.
|
||||||
|
|
||||||
|
## Storage & Files
|
||||||
|
|
||||||
|
- Use `extensions.ext_storage.storage` for all blob IO; it already respects the configured backend.
|
||||||
|
- Convert files for workflows with helpers in `core/file/file_manager.py`; they handle signed URLs and multimodal payloads.
|
||||||
|
- When writing controller logic, delegate upload quotas and metadata to `services/file_service.py` instead of touching storage directly.
|
||||||
|
- All outbound HTTP fetches (webhooks, remote files) must go through the SSRF-safe client in `core/helper/ssrf_proxy.py`; it wraps `httpx` with the allow/deny rules configured for the platform.
|
||||||
|
|
||||||
|
## Redis & Shared State
|
||||||
|
|
||||||
|
- Access Redis through `extensions.ext_redis.redis_client`. For locking, reuse `redis_client.lock`.
|
||||||
|
- Prefer higher-level helpers when available: rate limits use `libs.helper.RateLimiter`, provider metadata uses caches in `core/helper/provider_cache.py`.
|
||||||
|
|
||||||
|
## Models
|
||||||
|
|
||||||
|
- SQLAlchemy models sit in `models/` and inherit from the shared declarative `Base` defined in `models/base.py` (metadata configured via `models/engine.py`).
|
||||||
|
- `models/__init__.py` exposes grouped aggregates: account/tenant models, app and conversation tables, datasets, providers, workflow runs, triggers, etc. Import from there to avoid deep path churn.
|
||||||
|
- Follow the DDD boundary: persistence objects live in `models/`, repositories under `repositories/` translate them into domain entities, and services consume those repositories.
|
||||||
|
- When adding a table, create the model class, register it in `models/__init__.py`, wire a repository if needed, and generate an Alembic migration as described below.
|
||||||
|
|
||||||
|
## Vector Stores
|
||||||
|
|
||||||
|
- Vector client implementations live in `core/rag/datasource/vdb/<provider>`, with a common factory in `core/rag/datasource/vdb/vector_factory.py` and enums in `core/rag/datasource/vdb/vector_type.py`.
|
||||||
|
- Retrieval pipelines call these providers through `core/rag/datasource/retrieval_service.py` and dataset ingestion flows in `services/dataset_service.py`.
|
||||||
|
- The CLI helper `flask vdb-migrate` orchestrates bulk migrations using routines in `commands.py`; reuse that pattern when adding new backend transitions.
|
||||||
|
- To add another store, mirror the provider layout, register it with the factory, and include any schema changes in Alembic migrations.
|
||||||
|
|
||||||
|
## Observability & OTEL
|
||||||
|
|
||||||
|
- OpenTelemetry settings live under the observability mixin in `configs/observability`. Toggle exporters and sampling via `dify_config`, not ad-hoc env reads.
|
||||||
|
- HTTP, Celery, Redis, SQLAlchemy, and httpx instrumentation is initialised in `extensions/ext_app_metrics.py` and `extensions/ext_request_logging.py`; reuse these hooks when adding new workers or entrypoints.
|
||||||
|
- When creating background tasks or external calls, propagate tracing context with helpers in the existing instrumented clients (e.g. use the shared `httpx` session from `core/helper/http_client_pooling.py`).
|
||||||
|
- If you add a new external integration, ensure spans and metrics are emitted by wiring the appropriate OTEL instrumentation package in `pyproject.toml` and configuring it in `extensions/`.
|
||||||
|
|
||||||
|
## Ops Integrations
|
||||||
|
|
||||||
|
- Langfuse support and other tracing bridges live under `core/ops/opik_trace`. Config toggles sit in `configs/observability`, while exporters are initialised in the OTEL extensions mentioned above.
|
||||||
|
- External monitoring services should follow this pattern: keep client code in `core/ops`, expose switches via `dify_config`, and hook initialisation in `extensions/ext_app_metrics.py` or sibling modules.
|
||||||
|
- Before instrumenting new code paths, check whether existing context helpers (e.g. `extensions/ext_request_logging.py`) already capture the necessary metadata.
|
||||||
|
|
||||||
|
## Controllers, Services, Core
|
||||||
|
|
||||||
|
- Controllers only parse HTTP input and call a service method. Keep business rules in `services/`.
|
||||||
|
- Services enforce tenant rules, quotas, and orchestration, then call into `core/` engines (workflow execution, tools, LLMs).
|
||||||
|
- When adding a new endpoint, search for an existing service to extend before introducing a new layer. Example: workflow APIs pipe through `services/workflow_service.py` into `core/workflow`.
|
||||||
|
|
||||||
|
## Plugins, Tools, Providers
|
||||||
|
|
||||||
|
- In Dify a plugin is a tenant-installable bundle that declares one or more providers (tool, model, datasource, trigger, endpoint, agent strategy) plus its resource needs and version metadata. The manifest (`core/plugin/entities/plugin.py`) mirrors what you see in the marketplace documentation.
|
||||||
|
- Installation, upgrades, and migrations are orchestrated by `services/plugin/plugin_service.py` together with helpers such as `services/plugin/plugin_migration.py`.
|
||||||
|
- Runtime loading happens through the implementations under `core/plugin/impl/*` (tool/model/datasource/trigger/endpoint/agent). These modules normalise plugin providers so that downstream systems (`core/tools/tool_manager.py`, `services/model_provider_service.py`, `services/trigger/*`) can treat builtin and plugin capabilities the same way.
|
||||||
|
- For remote execution, plugin daemons (`core/plugin/entities/plugin_daemon.py`, `core/plugin/impl/plugin.py`) manage lifecycle hooks, credential forwarding, and background workers that keep plugin processes in sync with the main application.
|
||||||
|
- Acquire tool implementations through `core/tools/tool_manager.py`; it resolves builtin, plugin, and workflow-as-tool providers uniformly, injecting the right context (tenant, credentials, runtime config).
|
||||||
|
- To add a new plugin capability, extend the relevant `core/plugin/entities` schema and register the implementation in the matching `core/plugin/impl` module rather than importing the provider directly.
|
||||||
|
|
||||||
|
## Async Workloads
|
||||||
|
|
||||||
|
see `agent_skills/trigger.md` for more detailed documentation.
|
||||||
|
|
||||||
|
- Enqueue background work through `services/async_workflow_service.py`. It routes jobs to the tiered Celery queues defined in `tasks/`.
|
||||||
|
- Workers boot from `celery_entrypoint.py` and execute functions in `tasks/workflow_execution_tasks.py`, `tasks/trigger_processing_tasks.py`, etc.
|
||||||
|
- Scheduled workflows poll from `schedule/workflow_schedule_tasks.py`. Follow the same pattern if you need new periodic jobs.
|
||||||
|
|
||||||
|
## Database & Migrations
|
||||||
|
|
||||||
|
- SQLAlchemy models live under `models/` and map directly to migration files in `migrations/versions`.
|
||||||
|
- Generate migrations with `uv run --project api flask db revision --autogenerate -m "<summary>"`, then review the diff; never hand-edit the database outside Alembic.
|
||||||
|
- Apply migrations locally using `uv run --project api flask db upgrade`; production deploys expect the same history.
|
||||||
|
- If you add tenant-scoped data, confirm the upgrade includes tenant filters or defaults consistent with the service logic touching those tables.
|
||||||
|
|
||||||
|
## CLI Commands
|
||||||
|
|
||||||
|
- Maintenance commands from `commands.py` are registered on the Flask CLI. Run them via `uv run --project api flask <command>`.
|
||||||
|
- Use the built-in `db` commands from Flask-Migrate for schema operations (`flask db upgrade`, `flask db stamp`, etc.). Only fall back to custom helpers if you need their extra behaviour.
|
||||||
|
- Custom entries such as `flask reset-password`, `flask reset-email`, and `flask vdb-migrate` handle self-hosted account recovery and vector database migrations.
|
||||||
|
- Before adding a new command, check whether an existing service can be reused and ensure the command guards edition-specific behaviour (many enforce `SELF_HOSTED`). Document any additions in the PR.
|
||||||
|
- Ruff helpers are run directly with `uv`: `uv run --project api --dev ruff format ./api` for formatting and `uv run --project api --dev ruff check ./api` (add `--fix` if you want automatic fixes).
|
||||||
|
|
||||||
|
## When You Add Features
|
||||||
|
|
||||||
|
- Check for an existing helper or service before writing a new util.
|
||||||
|
- Uphold tenancy: every service method should receive the tenant ID from controller wrappers such as `controllers/console/wraps.py`.
|
||||||
|
- Update or create tests alongside behaviour changes (`tests/unit_tests` for fast coverage, `tests/integration_tests` when touching orchestrations).
|
||||||
|
- Run `uv run --project api --dev ruff check ./api`, `uv run --directory api --dev basedpyright`, and `uv run --project api --dev dev/pytest/pytest_unit_tests.sh` before submitting changes.
|
||||||
1
dify/api/agent_skills/plugin.md
Normal file
1
dify/api/agent_skills/plugin.md
Normal file
@@ -0,0 +1 @@
|
|||||||
|
// TBD
|
||||||
1
dify/api/agent_skills/plugin_oauth.md
Normal file
1
dify/api/agent_skills/plugin_oauth.md
Normal file
@@ -0,0 +1 @@
|
|||||||
|
// TBD
|
||||||
53
dify/api/agent_skills/trigger.md
Normal file
53
dify/api/agent_skills/trigger.md
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
## Overview
|
||||||
|
|
||||||
|
Trigger is a collection of nodes that we called `Start` nodes, also, the concept of `Start` is the same as `RootNode` in the workflow engine `core/workflow/graph_engine`, On the other hand, `Start` node is the entry point of workflows, every workflow run always starts from a `Start` node.
|
||||||
|
|
||||||
|
## Trigger nodes
|
||||||
|
|
||||||
|
- `UserInput`
|
||||||
|
- `Trigger Webhook`
|
||||||
|
- `Trigger Schedule`
|
||||||
|
- `Trigger Plugin`
|
||||||
|
|
||||||
|
### UserInput
|
||||||
|
|
||||||
|
Before `Trigger` concept is introduced, it's what we called `Start` node, but now, to avoid confusion, it was renamed to `UserInput` node, has a strong relation with `ServiceAPI` in `controllers/service_api/app`
|
||||||
|
|
||||||
|
1. `UserInput` node introduces a list of arguments that need to be provided by the user, finally it will be converted into variables in the workflow variable pool.
|
||||||
|
1. `ServiceAPI` accept those arguments, and pass through them into `UserInput` node.
|
||||||
|
1. For its detailed implementation, please refer to `core/workflow/nodes/start`
|
||||||
|
|
||||||
|
### Trigger Webhook
|
||||||
|
|
||||||
|
Inside Webhook Node, Dify provided a UI panel that allows user define a HTTP manifest `core/workflow/nodes/trigger_webhook/entities.py`.`WebhookData`, also, Dify generates a random webhook id for each `Trigger Webhook` node, the implementation was implemented in `core/trigger/utils/endpoint.py`, as you can see, `webhook-debug` is a debug mode for webhook, you may find it in `controllers/trigger/webhook.py`.
|
||||||
|
|
||||||
|
Finally, requests to `webhook` endpoint will be converted into variables in workflow variable pool during workflow execution.
|
||||||
|
|
||||||
|
### Trigger Schedule
|
||||||
|
|
||||||
|
`Trigger Schedule` node is a node that allows user define a schedule to trigger the workflow, detailed manifest is here `core/workflow/nodes/trigger_schedule/entities.py`, we have a poller and executor to handle millions of schedules, see `docker/entrypoint.sh` / `schedule/workflow_schedule_task.py` for help.
|
||||||
|
|
||||||
|
To Achieve this, a `WorkflowSchedulePlan` model was introduced in `models/trigger.py`, and a `events/event_handlers/sync_workflow_schedule_when_app_published.py` was used to sync workflow schedule plans when app is published.
|
||||||
|
|
||||||
|
### Trigger Plugin
|
||||||
|
|
||||||
|
`Trigger Plugin` node allows user define there own distributed trigger plugin, whenever a request was received, Dify forwards it to the plugin and wait for parsed variables from it.
|
||||||
|
|
||||||
|
1. Requests were saved in storage by `services/trigger/trigger_request_service.py`, referenced by `services/trigger/trigger_service.py`.`TriggerService`.`process_endpoint`
|
||||||
|
1. Plugins accept those requests and parse variables from it, see `core/plugin/impl/trigger.py` for details.
|
||||||
|
|
||||||
|
A `subscription` concept was out here by Dify, it means an endpoint address from Dify was bound to thirdparty webhook service like `Github` `Slack` `Linear` `GoogleDrive` `Gmail` etc. Once a subscription was created, Dify continually receives requests from the platforms and handle them one by one.
|
||||||
|
|
||||||
|
## Worker Pool / Async Task
|
||||||
|
|
||||||
|
All the events that triggered a new workflow run is always in async mode, a unified entrypoint can be found here `services/async_workflow_service.py`.`AsyncWorkflowService`.`trigger_workflow_async`.
|
||||||
|
|
||||||
|
The infrastructure we used is `celery`, we've already configured it in `docker/entrypoint.sh`, and the consumers are in `tasks/async_workflow_tasks.py`, 3 queues were used to handle different tiers of users, `PROFESSIONAL_QUEUE` `TEAM_QUEUE` `SANDBOX_QUEUE`.
|
||||||
|
|
||||||
|
## Debug Strategy
|
||||||
|
|
||||||
|
Dify divided users into 2 groups: builders / end users.
|
||||||
|
|
||||||
|
Builders are the users who create workflows, in this stage, debugging a workflow becomes a critical part of the workflow development process, as the start node in workflows, trigger nodes can `listen` to the events from `WebhookDebug` `Schedule` `Plugin`, debugging process was created in `controllers/console/app/workflow.py`.`DraftWorkflowTriggerNodeApi`.
|
||||||
|
|
||||||
|
A polling process can be considered as combine of few single `poll` operations, each `poll` operation fetches events cached in `Redis`, returns `None` if no event was found, more detailed implemented: `core/trigger/debug/event_bus.py` was used to handle the polling process, and `core/trigger/debug/event_selectors.py` was used to select the event poller based on the trigger type.
|
||||||
29
dify/api/app.py
Normal file
29
dify/api/app.py
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
def is_db_command() -> bool:
|
||||||
|
if len(sys.argv) > 1 and sys.argv[0].endswith("flask") and sys.argv[1] == "db":
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
# create app
|
||||||
|
if is_db_command():
|
||||||
|
from app_factory import create_migrations_app
|
||||||
|
|
||||||
|
app = create_migrations_app()
|
||||||
|
else:
|
||||||
|
# Gunicorn and Celery handle monkey patching automatically in production by
|
||||||
|
# specifying the `gevent` worker class. Manual monkey patching is not required here.
|
||||||
|
#
|
||||||
|
# See `api/docker/entrypoint.sh` (lines 33 and 47) for details.
|
||||||
|
#
|
||||||
|
# For third-party library patching, refer to `gunicorn.conf.py` and `celery_entrypoint.py`.
|
||||||
|
|
||||||
|
from app_factory import create_app
|
||||||
|
|
||||||
|
app = create_app()
|
||||||
|
celery = app.extensions["celery"]
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
app.run(host="0.0.0.0", port=5001)
|
||||||
120
dify/api/app_factory.py
Normal file
120
dify/api/app_factory.py
Normal file
@@ -0,0 +1,120 @@
|
|||||||
|
import logging
|
||||||
|
import time
|
||||||
|
|
||||||
|
from configs import dify_config
|
||||||
|
from contexts.wrapper import RecyclableContextVar
|
||||||
|
from dify_app import DifyApp
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
# ----------------------------
|
||||||
|
# Application Factory Function
|
||||||
|
# ----------------------------
|
||||||
|
def create_flask_app_with_configs() -> DifyApp:
|
||||||
|
"""
|
||||||
|
create a raw flask app
|
||||||
|
with configs loaded from .env file
|
||||||
|
"""
|
||||||
|
dify_app = DifyApp(__name__)
|
||||||
|
dify_app.config.from_mapping(dify_config.model_dump())
|
||||||
|
dify_app.config["RESTX_INCLUDE_ALL_MODELS"] = True
|
||||||
|
|
||||||
|
# add before request hook
|
||||||
|
@dify_app.before_request
|
||||||
|
def before_request():
|
||||||
|
# add an unique identifier to each request
|
||||||
|
RecyclableContextVar.increment_thread_recycles()
|
||||||
|
|
||||||
|
# Capture the decorator's return value to avoid pyright reportUnusedFunction
|
||||||
|
_ = before_request
|
||||||
|
|
||||||
|
return dify_app
|
||||||
|
|
||||||
|
|
||||||
|
def create_app() -> DifyApp:
|
||||||
|
start_time = time.perf_counter()
|
||||||
|
app = create_flask_app_with_configs()
|
||||||
|
initialize_extensions(app)
|
||||||
|
end_time = time.perf_counter()
|
||||||
|
if dify_config.DEBUG:
|
||||||
|
logger.info("Finished create_app (%s ms)", round((end_time - start_time) * 1000, 2))
|
||||||
|
return app
|
||||||
|
|
||||||
|
|
||||||
|
def initialize_extensions(app: DifyApp):
|
||||||
|
from extensions import (
|
||||||
|
ext_app_metrics,
|
||||||
|
ext_blueprints,
|
||||||
|
ext_celery,
|
||||||
|
ext_code_based_extension,
|
||||||
|
ext_commands,
|
||||||
|
ext_compress,
|
||||||
|
ext_database,
|
||||||
|
ext_hosting_provider,
|
||||||
|
ext_import_modules,
|
||||||
|
ext_logging,
|
||||||
|
ext_login,
|
||||||
|
ext_mail,
|
||||||
|
ext_migrate,
|
||||||
|
ext_orjson,
|
||||||
|
ext_otel,
|
||||||
|
ext_proxy_fix,
|
||||||
|
ext_redis,
|
||||||
|
ext_request_logging,
|
||||||
|
ext_sentry,
|
||||||
|
ext_set_secretkey,
|
||||||
|
ext_storage,
|
||||||
|
ext_timezone,
|
||||||
|
ext_warnings,
|
||||||
|
)
|
||||||
|
|
||||||
|
extensions = [
|
||||||
|
ext_timezone,
|
||||||
|
ext_logging,
|
||||||
|
ext_warnings,
|
||||||
|
ext_import_modules,
|
||||||
|
ext_orjson,
|
||||||
|
ext_set_secretkey,
|
||||||
|
ext_compress,
|
||||||
|
ext_code_based_extension,
|
||||||
|
ext_database,
|
||||||
|
ext_app_metrics,
|
||||||
|
ext_migrate,
|
||||||
|
ext_redis,
|
||||||
|
ext_storage,
|
||||||
|
ext_celery,
|
||||||
|
ext_login,
|
||||||
|
ext_mail,
|
||||||
|
ext_hosting_provider,
|
||||||
|
ext_sentry,
|
||||||
|
ext_proxy_fix,
|
||||||
|
ext_blueprints,
|
||||||
|
ext_commands,
|
||||||
|
ext_otel,
|
||||||
|
ext_request_logging,
|
||||||
|
]
|
||||||
|
for ext in extensions:
|
||||||
|
short_name = ext.__name__.split(".")[-1]
|
||||||
|
is_enabled = ext.is_enabled() if hasattr(ext, "is_enabled") else True
|
||||||
|
if not is_enabled:
|
||||||
|
if dify_config.DEBUG:
|
||||||
|
logger.info("Skipped %s", short_name)
|
||||||
|
continue
|
||||||
|
|
||||||
|
start_time = time.perf_counter()
|
||||||
|
ext.init_app(app)
|
||||||
|
end_time = time.perf_counter()
|
||||||
|
if dify_config.DEBUG:
|
||||||
|
logger.info("Loaded %s (%s ms)", short_name, round((end_time - start_time) * 1000, 2))
|
||||||
|
|
||||||
|
|
||||||
|
def create_migrations_app():
|
||||||
|
app = create_flask_app_with_configs()
|
||||||
|
from extensions import ext_database, ext_migrate
|
||||||
|
|
||||||
|
# Initialize only required extensions
|
||||||
|
ext_database.init_app(app)
|
||||||
|
ext_migrate.init_app(app)
|
||||||
|
|
||||||
|
return app
|
||||||
13
dify/api/celery_entrypoint.py
Normal file
13
dify/api/celery_entrypoint.py
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
import psycogreen.gevent as pscycogreen_gevent # type: ignore
|
||||||
|
from grpc.experimental import gevent as grpc_gevent # type: ignore
|
||||||
|
|
||||||
|
# grpc gevent
|
||||||
|
grpc_gevent.init_gevent()
|
||||||
|
print("gRPC patched with gevent.", flush=True) # noqa: T201
|
||||||
|
pscycogreen_gevent.patch_psycopg()
|
||||||
|
print("psycopg2 patched with gevent.", flush=True) # noqa: T201
|
||||||
|
|
||||||
|
|
||||||
|
from app import app, celery
|
||||||
|
|
||||||
|
__all__ = ["app", "celery"]
|
||||||
7
dify/api/cnt_base.sh
Normal file
7
dify/api/cnt_base.sh
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -euxo pipefail
|
||||||
|
|
||||||
|
for pattern in "Base" "TypeBase"; do
|
||||||
|
printf "%s " "$pattern"
|
||||||
|
grep "($pattern):" -r --include='*.py' --exclude-dir=".venv" --exclude-dir="tests" . | wc -l
|
||||||
|
done
|
||||||
1901
dify/api/commands.py
Normal file
1901
dify/api/commands.py
Normal file
File diff suppressed because it is too large
Load Diff
3
dify/api/configs/__init__.py
Normal file
3
dify/api/configs/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
from .app_config import DifyConfig
|
||||||
|
|
||||||
|
dify_config = DifyConfig() # type: ignore
|
||||||
113
dify/api/configs/app_config.py
Normal file
113
dify/api/configs/app_config.py
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from pydantic.fields import FieldInfo
|
||||||
|
from pydantic_settings import BaseSettings, PydanticBaseSettingsSource, SettingsConfigDict, TomlConfigSettingsSource
|
||||||
|
|
||||||
|
from libs.file_utils import search_file_upwards
|
||||||
|
|
||||||
|
from .deploy import DeploymentConfig
|
||||||
|
from .enterprise import EnterpriseFeatureConfig
|
||||||
|
from .extra import ExtraServiceConfig
|
||||||
|
from .feature import FeatureConfig
|
||||||
|
from .middleware import MiddlewareConfig
|
||||||
|
from .observability import ObservabilityConfig
|
||||||
|
from .packaging import PackagingInfo
|
||||||
|
from .remote_settings_sources import RemoteSettingsSource, RemoteSettingsSourceConfig, RemoteSettingsSourceName
|
||||||
|
from .remote_settings_sources.apollo import ApolloSettingsSource
|
||||||
|
from .remote_settings_sources.nacos import NacosSettingsSource
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class RemoteSettingsSourceFactory(PydanticBaseSettingsSource):
|
||||||
|
def __init__(self, settings_cls: type[BaseSettings]):
|
||||||
|
super().__init__(settings_cls)
|
||||||
|
|
||||||
|
def get_field_value(self, field: FieldInfo, field_name: str) -> tuple[Any, str, bool]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def __call__(self) -> dict[str, Any]:
|
||||||
|
current_state = self.current_state
|
||||||
|
remote_source_name = current_state.get("REMOTE_SETTINGS_SOURCE_NAME")
|
||||||
|
if not remote_source_name:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
remote_source: RemoteSettingsSource | None = None
|
||||||
|
match remote_source_name:
|
||||||
|
case RemoteSettingsSourceName.APOLLO:
|
||||||
|
remote_source = ApolloSettingsSource(current_state)
|
||||||
|
case RemoteSettingsSourceName.NACOS:
|
||||||
|
remote_source = NacosSettingsSource(current_state)
|
||||||
|
case _:
|
||||||
|
logger.warning("Unsupported remote source: %s", remote_source_name)
|
||||||
|
return {}
|
||||||
|
|
||||||
|
d: dict[str, Any] = {}
|
||||||
|
|
||||||
|
for field_name, field in self.settings_cls.model_fields.items():
|
||||||
|
field_value, field_key, value_is_complex = remote_source.get_field_value(field, field_name)
|
||||||
|
field_value = remote_source.prepare_field_value(field_name, field, field_value, value_is_complex)
|
||||||
|
if field_value is not None:
|
||||||
|
d[field_key] = field_value
|
||||||
|
|
||||||
|
return d
|
||||||
|
|
||||||
|
|
||||||
|
class DifyConfig(
|
||||||
|
# Packaging info
|
||||||
|
PackagingInfo,
|
||||||
|
# Deployment configs
|
||||||
|
DeploymentConfig,
|
||||||
|
# Feature configs
|
||||||
|
FeatureConfig,
|
||||||
|
# Middleware configs
|
||||||
|
MiddlewareConfig,
|
||||||
|
# Extra service configs
|
||||||
|
ExtraServiceConfig,
|
||||||
|
# Observability configs
|
||||||
|
ObservabilityConfig,
|
||||||
|
# Remote source configs
|
||||||
|
RemoteSettingsSourceConfig,
|
||||||
|
# Enterprise feature configs
|
||||||
|
# **Before using, please contact business@dify.ai by email to inquire about licensing matters.**
|
||||||
|
EnterpriseFeatureConfig,
|
||||||
|
):
|
||||||
|
model_config = SettingsConfigDict(
|
||||||
|
# read from dotenv format config file
|
||||||
|
env_file=".env",
|
||||||
|
env_file_encoding="utf-8",
|
||||||
|
# ignore extra attributes
|
||||||
|
extra="ignore",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Before adding any config,
|
||||||
|
# please consider to arrange it in the proper config group of existed or added
|
||||||
|
# for better readability and maintainability.
|
||||||
|
# Thanks for your concentration and consideration.
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def settings_customise_sources(
|
||||||
|
cls,
|
||||||
|
settings_cls: type[BaseSettings],
|
||||||
|
init_settings: PydanticBaseSettingsSource,
|
||||||
|
env_settings: PydanticBaseSettingsSource,
|
||||||
|
dotenv_settings: PydanticBaseSettingsSource,
|
||||||
|
file_secret_settings: PydanticBaseSettingsSource,
|
||||||
|
) -> tuple[PydanticBaseSettingsSource, ...]:
|
||||||
|
return (
|
||||||
|
init_settings,
|
||||||
|
env_settings,
|
||||||
|
RemoteSettingsSourceFactory(settings_cls),
|
||||||
|
dotenv_settings,
|
||||||
|
file_secret_settings,
|
||||||
|
TomlConfigSettingsSource(
|
||||||
|
settings_cls=settings_cls,
|
||||||
|
toml_file=search_file_upwards(
|
||||||
|
base_dir_path=Path(__file__).parent,
|
||||||
|
target_file_name="pyproject.toml",
|
||||||
|
max_search_parent_depth=2,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
)
|
||||||
34
dify/api/configs/deploy/__init__.py
Normal file
34
dify/api/configs/deploy/__init__.py
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
from pydantic import Field
|
||||||
|
from pydantic_settings import BaseSettings
|
||||||
|
|
||||||
|
|
||||||
|
class DeploymentConfig(BaseSettings):
|
||||||
|
"""
|
||||||
|
Configuration settings for application deployment
|
||||||
|
"""
|
||||||
|
|
||||||
|
APPLICATION_NAME: str = Field(
|
||||||
|
description="Name of the application, used for identification and logging purposes",
|
||||||
|
default="langgenius/dify",
|
||||||
|
)
|
||||||
|
|
||||||
|
DEBUG: bool = Field(
|
||||||
|
description="Enable debug mode for additional logging and development features",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Request logging configuration
|
||||||
|
ENABLE_REQUEST_LOGGING: bool = Field(
|
||||||
|
description="Enable request and response body logging",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
EDITION: str = Field(
|
||||||
|
description="Deployment edition of the application (e.g., 'SELF_HOSTED', 'CLOUD')",
|
||||||
|
default="SELF_HOSTED",
|
||||||
|
)
|
||||||
|
|
||||||
|
DEPLOY_ENV: str = Field(
|
||||||
|
description="Deployment environment (e.g., 'PRODUCTION', 'DEVELOPMENT'), default to PRODUCTION",
|
||||||
|
default="PRODUCTION",
|
||||||
|
)
|
||||||
20
dify/api/configs/enterprise/__init__.py
Normal file
20
dify/api/configs/enterprise/__init__.py
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
from pydantic import Field
|
||||||
|
from pydantic_settings import BaseSettings
|
||||||
|
|
||||||
|
|
||||||
|
class EnterpriseFeatureConfig(BaseSettings):
|
||||||
|
"""
|
||||||
|
Configuration for enterprise-level features.
|
||||||
|
**Before using, please contact business@dify.ai by email to inquire about licensing matters.**
|
||||||
|
"""
|
||||||
|
|
||||||
|
ENTERPRISE_ENABLED: bool = Field(
|
||||||
|
description="Enable or disable enterprise-level features."
|
||||||
|
"Before using, please contact business@dify.ai by email to inquire about licensing matters.",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
CAN_REPLACE_LOGO: bool = Field(
|
||||||
|
description="Allow customization of the enterprise logo.",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
10
dify/api/configs/extra/__init__.py
Normal file
10
dify/api/configs/extra/__init__.py
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
from configs.extra.notion_config import NotionConfig
|
||||||
|
from configs.extra.sentry_config import SentryConfig
|
||||||
|
|
||||||
|
|
||||||
|
class ExtraServiceConfig(
|
||||||
|
# place the configs in alphabet order
|
||||||
|
NotionConfig,
|
||||||
|
SentryConfig,
|
||||||
|
):
|
||||||
|
pass
|
||||||
34
dify/api/configs/extra/notion_config.py
Normal file
34
dify/api/configs/extra/notion_config.py
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
from pydantic import Field
|
||||||
|
from pydantic_settings import BaseSettings
|
||||||
|
|
||||||
|
|
||||||
|
class NotionConfig(BaseSettings):
|
||||||
|
"""
|
||||||
|
Configuration settings for Notion integration
|
||||||
|
"""
|
||||||
|
|
||||||
|
NOTION_CLIENT_ID: str | None = Field(
|
||||||
|
description="Client ID for Notion API authentication. Required for OAuth 2.0 flow.",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
NOTION_CLIENT_SECRET: str | None = Field(
|
||||||
|
description="Client secret for Notion API authentication. Required for OAuth 2.0 flow.",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
NOTION_INTEGRATION_TYPE: str | None = Field(
|
||||||
|
description="Type of Notion integration."
|
||||||
|
" Set to 'internal' for internal integrations, or None for public integrations.",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
NOTION_INTERNAL_SECRET: str | None = Field(
|
||||||
|
description="Secret key for internal Notion integrations. Required when NOTION_INTEGRATION_TYPE is 'internal'.",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
NOTION_INTEGRATION_TOKEN: str | None = Field(
|
||||||
|
description="Integration token for Notion API access. Used for direct API calls without OAuth flow.",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
26
dify/api/configs/extra/sentry_config.py
Normal file
26
dify/api/configs/extra/sentry_config.py
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
from pydantic import Field, NonNegativeFloat
|
||||||
|
from pydantic_settings import BaseSettings
|
||||||
|
|
||||||
|
|
||||||
|
class SentryConfig(BaseSettings):
|
||||||
|
"""
|
||||||
|
Configuration settings for Sentry error tracking and performance monitoring
|
||||||
|
"""
|
||||||
|
|
||||||
|
SENTRY_DSN: str | None = Field(
|
||||||
|
description="Sentry Data Source Name (DSN)."
|
||||||
|
" This is the unique identifier of your Sentry project, used to send events to the correct project.",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
SENTRY_TRACES_SAMPLE_RATE: NonNegativeFloat = Field(
|
||||||
|
description="Sample rate for Sentry performance monitoring traces."
|
||||||
|
" Value between 0.0 and 1.0, where 1.0 means 100% of traces are sent to Sentry.",
|
||||||
|
default=1.0,
|
||||||
|
)
|
||||||
|
|
||||||
|
SENTRY_PROFILES_SAMPLE_RATE: NonNegativeFloat = Field(
|
||||||
|
description="Sample rate for Sentry profiling."
|
||||||
|
" Value between 0.0 and 1.0, where 1.0 means 100% of profiles are sent to Sentry.",
|
||||||
|
default=1.0,
|
||||||
|
)
|
||||||
1257
dify/api/configs/feature/__init__.py
Normal file
1257
dify/api/configs/feature/__init__.py
Normal file
File diff suppressed because it is too large
Load Diff
254
dify/api/configs/feature/hosted_service/__init__.py
Normal file
254
dify/api/configs/feature/hosted_service/__init__.py
Normal file
@@ -0,0 +1,254 @@
|
|||||||
|
from pydantic import Field, NonNegativeInt
|
||||||
|
from pydantic_settings import BaseSettings
|
||||||
|
|
||||||
|
|
||||||
|
class HostedCreditConfig(BaseSettings):
|
||||||
|
HOSTED_MODEL_CREDIT_CONFIG: str = Field(
|
||||||
|
description="Model credit configuration in format 'model:credits,model:credits', e.g., 'gpt-4:20,gpt-4o:10'",
|
||||||
|
default="",
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_model_credits(self, model_name: str) -> int:
|
||||||
|
"""
|
||||||
|
Get credit value for a specific model name.
|
||||||
|
Returns 1 if model is not found in configuration (default credit).
|
||||||
|
|
||||||
|
:param model_name: The name of the model to search for
|
||||||
|
:return: The credit value for the model
|
||||||
|
"""
|
||||||
|
if not self.HOSTED_MODEL_CREDIT_CONFIG:
|
||||||
|
return 1
|
||||||
|
|
||||||
|
try:
|
||||||
|
credit_map = dict(
|
||||||
|
item.strip().split(":", 1) for item in self.HOSTED_MODEL_CREDIT_CONFIG.split(",") if ":" in item
|
||||||
|
)
|
||||||
|
|
||||||
|
# Search for matching model pattern
|
||||||
|
for pattern, credit in credit_map.items():
|
||||||
|
if pattern.strip() == model_name:
|
||||||
|
return int(credit)
|
||||||
|
return 1 # Default quota if no match found
|
||||||
|
except (ValueError, AttributeError):
|
||||||
|
return 1 # Return default quota if parsing fails
|
||||||
|
|
||||||
|
|
||||||
|
class HostedOpenAiConfig(BaseSettings):
|
||||||
|
"""
|
||||||
|
Configuration for hosted OpenAI service
|
||||||
|
"""
|
||||||
|
|
||||||
|
HOSTED_OPENAI_API_KEY: str | None = Field(
|
||||||
|
description="API key for hosted OpenAI service",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
HOSTED_OPENAI_API_BASE: str | None = Field(
|
||||||
|
description="Base URL for hosted OpenAI API",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
HOSTED_OPENAI_API_ORGANIZATION: str | None = Field(
|
||||||
|
description="Organization ID for hosted OpenAI service",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
HOSTED_OPENAI_TRIAL_ENABLED: bool = Field(
|
||||||
|
description="Enable trial access to hosted OpenAI service",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
HOSTED_OPENAI_TRIAL_MODELS: str = Field(
|
||||||
|
description="Comma-separated list of available models for trial access",
|
||||||
|
default="gpt-3.5-turbo,"
|
||||||
|
"gpt-3.5-turbo-1106,"
|
||||||
|
"gpt-3.5-turbo-instruct,"
|
||||||
|
"gpt-3.5-turbo-16k,"
|
||||||
|
"gpt-3.5-turbo-16k-0613,"
|
||||||
|
"gpt-3.5-turbo-0613,"
|
||||||
|
"gpt-3.5-turbo-0125,"
|
||||||
|
"text-davinci-003",
|
||||||
|
)
|
||||||
|
|
||||||
|
HOSTED_OPENAI_QUOTA_LIMIT: NonNegativeInt = Field(
|
||||||
|
description="Quota limit for hosted OpenAI service usage",
|
||||||
|
default=200,
|
||||||
|
)
|
||||||
|
|
||||||
|
HOSTED_OPENAI_PAID_ENABLED: bool = Field(
|
||||||
|
description="Enable paid access to hosted OpenAI service",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
HOSTED_OPENAI_PAID_MODELS: str = Field(
|
||||||
|
description="Comma-separated list of available models for paid access",
|
||||||
|
default="gpt-4,"
|
||||||
|
"gpt-4-turbo-preview,"
|
||||||
|
"gpt-4-turbo-2024-04-09,"
|
||||||
|
"gpt-4-1106-preview,"
|
||||||
|
"gpt-4-0125-preview,"
|
||||||
|
"gpt-3.5-turbo,"
|
||||||
|
"gpt-3.5-turbo-16k,"
|
||||||
|
"gpt-3.5-turbo-16k-0613,"
|
||||||
|
"gpt-3.5-turbo-1106,"
|
||||||
|
"gpt-3.5-turbo-0613,"
|
||||||
|
"gpt-3.5-turbo-0125,"
|
||||||
|
"gpt-3.5-turbo-instruct,"
|
||||||
|
"text-davinci-003",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class HostedAzureOpenAiConfig(BaseSettings):
|
||||||
|
"""
|
||||||
|
Configuration for hosted Azure OpenAI service
|
||||||
|
"""
|
||||||
|
|
||||||
|
HOSTED_AZURE_OPENAI_ENABLED: bool = Field(
|
||||||
|
description="Enable hosted Azure OpenAI service",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
HOSTED_AZURE_OPENAI_API_KEY: str | None = Field(
|
||||||
|
description="API key for hosted Azure OpenAI service",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
HOSTED_AZURE_OPENAI_API_BASE: str | None = Field(
|
||||||
|
description="Base URL for hosted Azure OpenAI API",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
HOSTED_AZURE_OPENAI_QUOTA_LIMIT: NonNegativeInt = Field(
|
||||||
|
description="Quota limit for hosted Azure OpenAI service usage",
|
||||||
|
default=200,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class HostedAnthropicConfig(BaseSettings):
|
||||||
|
"""
|
||||||
|
Configuration for hosted Anthropic service
|
||||||
|
"""
|
||||||
|
|
||||||
|
HOSTED_ANTHROPIC_API_BASE: str | None = Field(
|
||||||
|
description="Base URL for hosted Anthropic API",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
HOSTED_ANTHROPIC_API_KEY: str | None = Field(
|
||||||
|
description="API key for hosted Anthropic service",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
HOSTED_ANTHROPIC_TRIAL_ENABLED: bool = Field(
|
||||||
|
description="Enable trial access to hosted Anthropic service",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
HOSTED_ANTHROPIC_QUOTA_LIMIT: NonNegativeInt = Field(
|
||||||
|
description="Quota limit for hosted Anthropic service usage",
|
||||||
|
default=600000,
|
||||||
|
)
|
||||||
|
|
||||||
|
HOSTED_ANTHROPIC_PAID_ENABLED: bool = Field(
|
||||||
|
description="Enable paid access to hosted Anthropic service",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class HostedMinmaxConfig(BaseSettings):
|
||||||
|
"""
|
||||||
|
Configuration for hosted Minmax service
|
||||||
|
"""
|
||||||
|
|
||||||
|
HOSTED_MINIMAX_ENABLED: bool = Field(
|
||||||
|
description="Enable hosted Minmax service",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class HostedSparkConfig(BaseSettings):
|
||||||
|
"""
|
||||||
|
Configuration for hosted Spark service
|
||||||
|
"""
|
||||||
|
|
||||||
|
HOSTED_SPARK_ENABLED: bool = Field(
|
||||||
|
description="Enable hosted Spark service",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class HostedZhipuAIConfig(BaseSettings):
|
||||||
|
"""
|
||||||
|
Configuration for hosted ZhipuAI service
|
||||||
|
"""
|
||||||
|
|
||||||
|
HOSTED_ZHIPUAI_ENABLED: bool = Field(
|
||||||
|
description="Enable hosted ZhipuAI service",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class HostedModerationConfig(BaseSettings):
|
||||||
|
"""
|
||||||
|
Configuration for hosted Moderation service
|
||||||
|
"""
|
||||||
|
|
||||||
|
HOSTED_MODERATION_ENABLED: bool = Field(
|
||||||
|
description="Enable hosted Moderation service",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
HOSTED_MODERATION_PROVIDERS: str = Field(
|
||||||
|
description="Comma-separated list of moderation providers",
|
||||||
|
default="",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class HostedFetchAppTemplateConfig(BaseSettings):
|
||||||
|
"""
|
||||||
|
Configuration for fetching app templates
|
||||||
|
"""
|
||||||
|
|
||||||
|
HOSTED_FETCH_APP_TEMPLATES_MODE: str = Field(
|
||||||
|
description="Mode for fetching app templates: remote, db, or builtin default to remote,",
|
||||||
|
default="remote",
|
||||||
|
)
|
||||||
|
|
||||||
|
HOSTED_FETCH_APP_TEMPLATES_REMOTE_DOMAIN: str = Field(
|
||||||
|
description="Domain for fetching remote app templates",
|
||||||
|
default="https://tmpl.dify.ai",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class HostedFetchPipelineTemplateConfig(BaseSettings):
|
||||||
|
"""
|
||||||
|
Configuration for fetching pipeline templates
|
||||||
|
"""
|
||||||
|
|
||||||
|
HOSTED_FETCH_PIPELINE_TEMPLATES_MODE: str = Field(
|
||||||
|
description="Mode for fetching pipeline templates: remote, db, or builtin default to remote,",
|
||||||
|
default="remote",
|
||||||
|
)
|
||||||
|
|
||||||
|
HOSTED_FETCH_PIPELINE_TEMPLATES_REMOTE_DOMAIN: str = Field(
|
||||||
|
description="Domain for fetching remote pipeline templates",
|
||||||
|
default="https://tmpl.dify.ai",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class HostedServiceConfig(
|
||||||
|
# place the configs in alphabet order
|
||||||
|
HostedAnthropicConfig,
|
||||||
|
HostedAzureOpenAiConfig,
|
||||||
|
HostedFetchAppTemplateConfig,
|
||||||
|
HostedFetchPipelineTemplateConfig,
|
||||||
|
HostedMinmaxConfig,
|
||||||
|
HostedOpenAiConfig,
|
||||||
|
HostedSparkConfig,
|
||||||
|
HostedZhipuAIConfig,
|
||||||
|
# moderation
|
||||||
|
HostedModerationConfig,
|
||||||
|
# credit config
|
||||||
|
HostedCreditConfig,
|
||||||
|
):
|
||||||
|
pass
|
||||||
366
dify/api/configs/middleware/__init__.py
Normal file
366
dify/api/configs/middleware/__init__.py
Normal file
@@ -0,0 +1,366 @@
|
|||||||
|
import os
|
||||||
|
from typing import Any, Literal
|
||||||
|
from urllib.parse import parse_qsl, quote_plus
|
||||||
|
|
||||||
|
from pydantic import Field, NonNegativeFloat, NonNegativeInt, PositiveFloat, PositiveInt, computed_field
|
||||||
|
from pydantic_settings import BaseSettings
|
||||||
|
|
||||||
|
from .cache.redis_config import RedisConfig
|
||||||
|
from .storage.aliyun_oss_storage_config import AliyunOSSStorageConfig
|
||||||
|
from .storage.amazon_s3_storage_config import S3StorageConfig
|
||||||
|
from .storage.azure_blob_storage_config import AzureBlobStorageConfig
|
||||||
|
from .storage.baidu_obs_storage_config import BaiduOBSStorageConfig
|
||||||
|
from .storage.clickzetta_volume_storage_config import ClickZettaVolumeStorageConfig
|
||||||
|
from .storage.google_cloud_storage_config import GoogleCloudStorageConfig
|
||||||
|
from .storage.huawei_obs_storage_config import HuaweiCloudOBSStorageConfig
|
||||||
|
from .storage.oci_storage_config import OCIStorageConfig
|
||||||
|
from .storage.opendal_storage_config import OpenDALStorageConfig
|
||||||
|
from .storage.supabase_storage_config import SupabaseStorageConfig
|
||||||
|
from .storage.tencent_cos_storage_config import TencentCloudCOSStorageConfig
|
||||||
|
from .storage.volcengine_tos_storage_config import VolcengineTOSStorageConfig
|
||||||
|
from .vdb.alibabacloud_mysql_config import AlibabaCloudMySQLConfig
|
||||||
|
from .vdb.analyticdb_config import AnalyticdbConfig
|
||||||
|
from .vdb.baidu_vector_config import BaiduVectorDBConfig
|
||||||
|
from .vdb.chroma_config import ChromaConfig
|
||||||
|
from .vdb.clickzetta_config import ClickzettaConfig
|
||||||
|
from .vdb.couchbase_config import CouchbaseConfig
|
||||||
|
from .vdb.elasticsearch_config import ElasticsearchConfig
|
||||||
|
from .vdb.huawei_cloud_config import HuaweiCloudConfig
|
||||||
|
from .vdb.lindorm_config import LindormConfig
|
||||||
|
from .vdb.matrixone_config import MatrixoneConfig
|
||||||
|
from .vdb.milvus_config import MilvusConfig
|
||||||
|
from .vdb.myscale_config import MyScaleConfig
|
||||||
|
from .vdb.oceanbase_config import OceanBaseVectorConfig
|
||||||
|
from .vdb.opengauss_config import OpenGaussConfig
|
||||||
|
from .vdb.opensearch_config import OpenSearchConfig
|
||||||
|
from .vdb.oracle_config import OracleConfig
|
||||||
|
from .vdb.pgvector_config import PGVectorConfig
|
||||||
|
from .vdb.pgvectors_config import PGVectoRSConfig
|
||||||
|
from .vdb.qdrant_config import QdrantConfig
|
||||||
|
from .vdb.relyt_config import RelytConfig
|
||||||
|
from .vdb.tablestore_config import TableStoreConfig
|
||||||
|
from .vdb.tencent_vector_config import TencentVectorDBConfig
|
||||||
|
from .vdb.tidb_on_qdrant_config import TidbOnQdrantConfig
|
||||||
|
from .vdb.tidb_vector_config import TiDBVectorConfig
|
||||||
|
from .vdb.upstash_config import UpstashConfig
|
||||||
|
from .vdb.vastbase_vector_config import VastbaseVectorConfig
|
||||||
|
from .vdb.vikingdb_config import VikingDBConfig
|
||||||
|
from .vdb.weaviate_config import WeaviateConfig
|
||||||
|
|
||||||
|
|
||||||
|
class StorageConfig(BaseSettings):
|
||||||
|
STORAGE_TYPE: Literal[
|
||||||
|
"opendal",
|
||||||
|
"s3",
|
||||||
|
"aliyun-oss",
|
||||||
|
"azure-blob",
|
||||||
|
"baidu-obs",
|
||||||
|
"clickzetta-volume",
|
||||||
|
"google-storage",
|
||||||
|
"huawei-obs",
|
||||||
|
"oci-storage",
|
||||||
|
"tencent-cos",
|
||||||
|
"volcengine-tos",
|
||||||
|
"supabase",
|
||||||
|
"local",
|
||||||
|
] = Field(
|
||||||
|
description="Type of storage to use."
|
||||||
|
" Options: 'opendal', '(deprecated) local', 's3', 'aliyun-oss', 'azure-blob', 'baidu-obs', "
|
||||||
|
"'clickzetta-volume', 'google-storage', 'huawei-obs', 'oci-storage', 'tencent-cos', "
|
||||||
|
"'volcengine-tos', 'supabase'. Default is 'opendal'.",
|
||||||
|
default="opendal",
|
||||||
|
)
|
||||||
|
|
||||||
|
STORAGE_LOCAL_PATH: str = Field(
|
||||||
|
description="Path for local storage when STORAGE_TYPE is set to 'local'.",
|
||||||
|
default="storage",
|
||||||
|
deprecated=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class VectorStoreConfig(BaseSettings):
|
||||||
|
VECTOR_STORE: str | None = Field(
|
||||||
|
description="Type of vector store to use for efficient similarity search."
|
||||||
|
" Set to None if not using a vector store.",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
VECTOR_STORE_WHITELIST_ENABLE: bool | None = Field(
|
||||||
|
description="Enable whitelist for vector store.",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
VECTOR_INDEX_NAME_PREFIX: str | None = Field(
|
||||||
|
description="Prefix used to create collection name in vector database",
|
||||||
|
default="Vector_index",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class KeywordStoreConfig(BaseSettings):
|
||||||
|
KEYWORD_STORE: str = Field(
|
||||||
|
description="Method for keyword extraction and storage."
|
||||||
|
" Default is 'jieba', a Chinese text segmentation library.",
|
||||||
|
default="jieba",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class DatabaseConfig(BaseSettings):
|
||||||
|
# Database type selector
|
||||||
|
DB_TYPE: Literal["postgresql", "mysql", "oceanbase"] = Field(
|
||||||
|
description="Database type to use. OceanBase is MySQL-compatible.",
|
||||||
|
default="postgresql",
|
||||||
|
)
|
||||||
|
|
||||||
|
DB_HOST: str = Field(
|
||||||
|
description="Hostname or IP address of the database server.",
|
||||||
|
default="localhost",
|
||||||
|
)
|
||||||
|
|
||||||
|
DB_PORT: PositiveInt = Field(
|
||||||
|
description="Port number for database connection.",
|
||||||
|
default=5432,
|
||||||
|
)
|
||||||
|
|
||||||
|
DB_USERNAME: str = Field(
|
||||||
|
description="Username for database authentication.",
|
||||||
|
default="postgres",
|
||||||
|
)
|
||||||
|
|
||||||
|
DB_PASSWORD: str = Field(
|
||||||
|
description="Password for database authentication.",
|
||||||
|
default="",
|
||||||
|
)
|
||||||
|
|
||||||
|
DB_DATABASE: str = Field(
|
||||||
|
description="Name of the database to connect to.",
|
||||||
|
default="dify",
|
||||||
|
)
|
||||||
|
|
||||||
|
DB_CHARSET: str = Field(
|
||||||
|
description="Character set for database connection.",
|
||||||
|
default="",
|
||||||
|
)
|
||||||
|
|
||||||
|
DB_EXTRAS: str = Field(
|
||||||
|
description="Additional database connection parameters. Example: 'keepalives_idle=60&keepalives=1'",
|
||||||
|
default="",
|
||||||
|
)
|
||||||
|
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def SQLALCHEMY_DATABASE_URI_SCHEME(self) -> str:
|
||||||
|
return "postgresql" if self.DB_TYPE == "postgresql" else "mysql+pymysql"
|
||||||
|
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def SQLALCHEMY_DATABASE_URI(self) -> str:
|
||||||
|
db_extras = (
|
||||||
|
f"{self.DB_EXTRAS}&client_encoding={self.DB_CHARSET}" if self.DB_CHARSET else self.DB_EXTRAS
|
||||||
|
).strip("&")
|
||||||
|
db_extras = f"?{db_extras}" if db_extras else ""
|
||||||
|
return (
|
||||||
|
f"{self.SQLALCHEMY_DATABASE_URI_SCHEME}://"
|
||||||
|
f"{quote_plus(self.DB_USERNAME)}:{quote_plus(self.DB_PASSWORD)}@{self.DB_HOST}:{self.DB_PORT}/{self.DB_DATABASE}"
|
||||||
|
f"{db_extras}"
|
||||||
|
)
|
||||||
|
|
||||||
|
SQLALCHEMY_POOL_SIZE: NonNegativeInt = Field(
|
||||||
|
description="Maximum number of database connections in the pool.",
|
||||||
|
default=30,
|
||||||
|
)
|
||||||
|
|
||||||
|
SQLALCHEMY_MAX_OVERFLOW: NonNegativeInt = Field(
|
||||||
|
description="Maximum number of connections that can be created beyond the pool_size.",
|
||||||
|
default=10,
|
||||||
|
)
|
||||||
|
|
||||||
|
SQLALCHEMY_POOL_RECYCLE: NonNegativeInt = Field(
|
||||||
|
description="Number of seconds after which a connection is automatically recycled.",
|
||||||
|
default=3600,
|
||||||
|
)
|
||||||
|
|
||||||
|
SQLALCHEMY_POOL_USE_LIFO: bool = Field(
|
||||||
|
description="If True, SQLAlchemy will use last-in-first-out way to retrieve connections from pool.",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
SQLALCHEMY_POOL_PRE_PING: bool = Field(
|
||||||
|
description="If True, enables connection pool pre-ping feature to check connections.",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
SQLALCHEMY_ECHO: bool | str = Field(
|
||||||
|
description="If True, SQLAlchemy will log all SQL statements.",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
SQLALCHEMY_POOL_TIMEOUT: NonNegativeInt = Field(
|
||||||
|
description="Number of seconds to wait for a connection from the pool before raising a timeout error.",
|
||||||
|
default=30,
|
||||||
|
)
|
||||||
|
|
||||||
|
RETRIEVAL_SERVICE_EXECUTORS: NonNegativeInt = Field(
|
||||||
|
description="Number of processes for the retrieval service, default to CPU cores.",
|
||||||
|
default=os.cpu_count() or 1,
|
||||||
|
)
|
||||||
|
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def SQLALCHEMY_ENGINE_OPTIONS(self) -> dict[str, Any]:
|
||||||
|
# Parse DB_EXTRAS for 'options'
|
||||||
|
db_extras_dict = dict(parse_qsl(self.DB_EXTRAS))
|
||||||
|
options = db_extras_dict.get("options", "")
|
||||||
|
connect_args = {}
|
||||||
|
# Use the dynamic SQLALCHEMY_DATABASE_URI_SCHEME property
|
||||||
|
if self.SQLALCHEMY_DATABASE_URI_SCHEME.startswith("postgresql"):
|
||||||
|
timezone_opt = "-c timezone=UTC"
|
||||||
|
if options:
|
||||||
|
merged_options = f"{options} {timezone_opt}"
|
||||||
|
else:
|
||||||
|
merged_options = timezone_opt
|
||||||
|
connect_args = {"options": merged_options}
|
||||||
|
|
||||||
|
return {
|
||||||
|
"pool_size": self.SQLALCHEMY_POOL_SIZE,
|
||||||
|
"max_overflow": self.SQLALCHEMY_MAX_OVERFLOW,
|
||||||
|
"pool_recycle": self.SQLALCHEMY_POOL_RECYCLE,
|
||||||
|
"pool_pre_ping": self.SQLALCHEMY_POOL_PRE_PING,
|
||||||
|
"connect_args": connect_args,
|
||||||
|
"pool_use_lifo": self.SQLALCHEMY_POOL_USE_LIFO,
|
||||||
|
"pool_reset_on_return": None,
|
||||||
|
"pool_timeout": self.SQLALCHEMY_POOL_TIMEOUT,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class CeleryConfig(DatabaseConfig):
|
||||||
|
CELERY_BACKEND: str = Field(
|
||||||
|
description="Backend for Celery task results. Options: 'database', 'redis', 'rabbitmq'.",
|
||||||
|
default="redis",
|
||||||
|
)
|
||||||
|
|
||||||
|
CELERY_BROKER_URL: str | None = Field(
|
||||||
|
description="URL of the message broker for Celery tasks.",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
CELERY_USE_SENTINEL: bool | None = Field(
|
||||||
|
description="Whether to use Redis Sentinel for high availability.",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
CELERY_SENTINEL_MASTER_NAME: str | None = Field(
|
||||||
|
description="Name of the Redis Sentinel master.",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
CELERY_SENTINEL_PASSWORD: str | None = Field(
|
||||||
|
description="Password of the Redis Sentinel master.",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
CELERY_SENTINEL_SOCKET_TIMEOUT: PositiveFloat | None = Field(
|
||||||
|
description="Timeout for Redis Sentinel socket operations in seconds.",
|
||||||
|
default=0.1,
|
||||||
|
)
|
||||||
|
|
||||||
|
@computed_field
|
||||||
|
def CELERY_RESULT_BACKEND(self) -> str | None:
|
||||||
|
if self.CELERY_BACKEND in ("database", "rabbitmq"):
|
||||||
|
return f"db+{self.SQLALCHEMY_DATABASE_URI}"
|
||||||
|
elif self.CELERY_BACKEND == "redis":
|
||||||
|
return self.CELERY_BROKER_URL
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def BROKER_USE_SSL(self) -> bool:
|
||||||
|
return self.CELERY_BROKER_URL.startswith("rediss://") if self.CELERY_BROKER_URL else False
|
||||||
|
|
||||||
|
|
||||||
|
class InternalTestConfig(BaseSettings):
|
||||||
|
"""
|
||||||
|
Configuration settings for Internal Test
|
||||||
|
"""
|
||||||
|
|
||||||
|
AWS_SECRET_ACCESS_KEY: str | None = Field(
|
||||||
|
description="Internal test AWS secret access key",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
AWS_ACCESS_KEY_ID: str | None = Field(
|
||||||
|
description="Internal test AWS access key ID",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class DatasetQueueMonitorConfig(BaseSettings):
|
||||||
|
"""
|
||||||
|
Configuration settings for Dataset Queue Monitor
|
||||||
|
"""
|
||||||
|
|
||||||
|
QUEUE_MONITOR_THRESHOLD: NonNegativeInt | None = Field(
|
||||||
|
description="Threshold for dataset queue monitor",
|
||||||
|
default=200,
|
||||||
|
)
|
||||||
|
QUEUE_MONITOR_ALERT_EMAILS: str | None = Field(
|
||||||
|
description="Emails for dataset queue monitor alert, separated by commas",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
QUEUE_MONITOR_INTERVAL: NonNegativeFloat | None = Field(
|
||||||
|
description="Interval for dataset queue monitor in minutes",
|
||||||
|
default=30,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class MiddlewareConfig(
|
||||||
|
# place the configs in alphabet order
|
||||||
|
CeleryConfig, # Note: CeleryConfig already inherits from DatabaseConfig
|
||||||
|
KeywordStoreConfig,
|
||||||
|
RedisConfig,
|
||||||
|
# configs of storage and storage providers
|
||||||
|
StorageConfig,
|
||||||
|
AliyunOSSStorageConfig,
|
||||||
|
AzureBlobStorageConfig,
|
||||||
|
BaiduOBSStorageConfig,
|
||||||
|
ClickZettaVolumeStorageConfig,
|
||||||
|
GoogleCloudStorageConfig,
|
||||||
|
HuaweiCloudOBSStorageConfig,
|
||||||
|
OCIStorageConfig,
|
||||||
|
OpenDALStorageConfig,
|
||||||
|
S3StorageConfig,
|
||||||
|
SupabaseStorageConfig,
|
||||||
|
TencentCloudCOSStorageConfig,
|
||||||
|
VolcengineTOSStorageConfig,
|
||||||
|
# configs of vdb and vdb providers
|
||||||
|
VectorStoreConfig,
|
||||||
|
AnalyticdbConfig,
|
||||||
|
ChromaConfig,
|
||||||
|
ClickzettaConfig,
|
||||||
|
HuaweiCloudConfig,
|
||||||
|
MilvusConfig,
|
||||||
|
AlibabaCloudMySQLConfig,
|
||||||
|
MyScaleConfig,
|
||||||
|
OpenSearchConfig,
|
||||||
|
OracleConfig,
|
||||||
|
PGVectorConfig,
|
||||||
|
VastbaseVectorConfig,
|
||||||
|
PGVectoRSConfig,
|
||||||
|
QdrantConfig,
|
||||||
|
RelytConfig,
|
||||||
|
TencentVectorDBConfig,
|
||||||
|
TiDBVectorConfig,
|
||||||
|
WeaviateConfig,
|
||||||
|
ElasticsearchConfig,
|
||||||
|
CouchbaseConfig,
|
||||||
|
InternalTestConfig,
|
||||||
|
VikingDBConfig,
|
||||||
|
UpstashConfig,
|
||||||
|
TidbOnQdrantConfig,
|
||||||
|
LindormConfig,
|
||||||
|
OceanBaseVectorConfig,
|
||||||
|
BaiduVectorDBConfig,
|
||||||
|
OpenGaussConfig,
|
||||||
|
TableStoreConfig,
|
||||||
|
DatasetQueueMonitorConfig,
|
||||||
|
MatrixoneConfig,
|
||||||
|
):
|
||||||
|
pass
|
||||||
0
dify/api/configs/middleware/cache/__init__.py
vendored
Normal file
0
dify/api/configs/middleware/cache/__init__.py
vendored
Normal file
113
dify/api/configs/middleware/cache/redis_config.py
vendored
Normal file
113
dify/api/configs/middleware/cache/redis_config.py
vendored
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
from pydantic import Field, NonNegativeInt, PositiveFloat, PositiveInt
|
||||||
|
from pydantic_settings import BaseSettings
|
||||||
|
|
||||||
|
|
||||||
|
class RedisConfig(BaseSettings):
|
||||||
|
"""
|
||||||
|
Configuration settings for Redis connection
|
||||||
|
"""
|
||||||
|
|
||||||
|
REDIS_HOST: str = Field(
|
||||||
|
description="Hostname or IP address of the Redis server",
|
||||||
|
default="localhost",
|
||||||
|
)
|
||||||
|
|
||||||
|
REDIS_PORT: PositiveInt = Field(
|
||||||
|
description="Port number on which the Redis server is listening",
|
||||||
|
default=6379,
|
||||||
|
)
|
||||||
|
|
||||||
|
REDIS_USERNAME: str | None = Field(
|
||||||
|
description="Username for Redis authentication (if required)",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
REDIS_PASSWORD: str | None = Field(
|
||||||
|
description="Password for Redis authentication (if required)",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
REDIS_DB: NonNegativeInt = Field(
|
||||||
|
description="Redis database number to use (0-15)",
|
||||||
|
default=0,
|
||||||
|
)
|
||||||
|
|
||||||
|
REDIS_USE_SSL: bool = Field(
|
||||||
|
description="Enable SSL/TLS for the Redis connection",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
REDIS_SSL_CERT_REQS: str = Field(
|
||||||
|
description="SSL certificate requirements (CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED)",
|
||||||
|
default="CERT_NONE",
|
||||||
|
)
|
||||||
|
|
||||||
|
REDIS_SSL_CA_CERTS: str | None = Field(
|
||||||
|
description="Path to the CA certificate file for SSL verification",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
REDIS_SSL_CERTFILE: str | None = Field(
|
||||||
|
description="Path to the client certificate file for SSL authentication",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
REDIS_SSL_KEYFILE: str | None = Field(
|
||||||
|
description="Path to the client private key file for SSL authentication",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
REDIS_USE_SENTINEL: bool | None = Field(
|
||||||
|
description="Enable Redis Sentinel mode for high availability",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
REDIS_SENTINELS: str | None = Field(
|
||||||
|
description="Comma-separated list of Redis Sentinel nodes (host:port)",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
REDIS_SENTINEL_SERVICE_NAME: str | None = Field(
|
||||||
|
description="Name of the Redis Sentinel service to monitor",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
REDIS_SENTINEL_USERNAME: str | None = Field(
|
||||||
|
description="Username for Redis Sentinel authentication (if required)",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
REDIS_SENTINEL_PASSWORD: str | None = Field(
|
||||||
|
description="Password for Redis Sentinel authentication (if required)",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
REDIS_SENTINEL_SOCKET_TIMEOUT: PositiveFloat | None = Field(
|
||||||
|
description="Socket timeout in seconds for Redis Sentinel connections",
|
||||||
|
default=0.1,
|
||||||
|
)
|
||||||
|
|
||||||
|
REDIS_USE_CLUSTERS: bool = Field(
|
||||||
|
description="Enable Redis Clusters mode for high availability",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
REDIS_CLUSTERS: str | None = Field(
|
||||||
|
description="Comma-separated list of Redis Clusters nodes (host:port)",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
REDIS_CLUSTERS_PASSWORD: str | None = Field(
|
||||||
|
description="Password for Redis Clusters authentication (if required)",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
REDIS_SERIALIZATION_PROTOCOL: int = Field(
|
||||||
|
description="Redis serialization protocol (RESP) version",
|
||||||
|
default=3,
|
||||||
|
)
|
||||||
|
|
||||||
|
REDIS_ENABLE_CLIENT_SIDE_CACHE: bool = Field(
|
||||||
|
description="Enable client side cache in redis",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
@@ -0,0 +1,43 @@
|
|||||||
|
from pydantic import Field
|
||||||
|
from pydantic_settings import BaseSettings
|
||||||
|
|
||||||
|
|
||||||
|
class AliyunOSSStorageConfig(BaseSettings):
|
||||||
|
"""
|
||||||
|
Configuration settings for Aliyun Object Storage Service (OSS)
|
||||||
|
"""
|
||||||
|
|
||||||
|
ALIYUN_OSS_BUCKET_NAME: str | None = Field(
|
||||||
|
description="Name of the Aliyun OSS bucket to store and retrieve objects",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
ALIYUN_OSS_ACCESS_KEY: str | None = Field(
|
||||||
|
description="Access key ID for authenticating with Aliyun OSS",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
ALIYUN_OSS_SECRET_KEY: str | None = Field(
|
||||||
|
description="Secret access key for authenticating with Aliyun OSS",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
ALIYUN_OSS_ENDPOINT: str | None = Field(
|
||||||
|
description="URL of the Aliyun OSS endpoint for your chosen region",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
ALIYUN_OSS_REGION: str | None = Field(
|
||||||
|
description="Aliyun OSS region where your bucket is located (e.g., 'oss-cn-hangzhou')",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
ALIYUN_OSS_AUTH_VERSION: str | None = Field(
|
||||||
|
description="Version of the authentication protocol to use with Aliyun OSS (e.g., 'v4')",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
ALIYUN_OSS_PATH: str | None = Field(
|
||||||
|
description="Base path within the bucket to store objects (e.g., 'my-app-data/')",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
@@ -0,0 +1,45 @@
|
|||||||
|
from typing import Literal
|
||||||
|
|
||||||
|
from pydantic import Field
|
||||||
|
from pydantic_settings import BaseSettings
|
||||||
|
|
||||||
|
|
||||||
|
class S3StorageConfig(BaseSettings):
|
||||||
|
"""
|
||||||
|
Configuration settings for S3-compatible object storage
|
||||||
|
"""
|
||||||
|
|
||||||
|
S3_ENDPOINT: str | None = Field(
|
||||||
|
description="URL of the S3-compatible storage endpoint (e.g., 'https://s3.amazonaws.com')",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
S3_REGION: str | None = Field(
|
||||||
|
description="Region where the S3 bucket is located (e.g., 'us-east-1')",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
S3_BUCKET_NAME: str | None = Field(
|
||||||
|
description="Name of the S3 bucket to store and retrieve objects",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
S3_ACCESS_KEY: str | None = Field(
|
||||||
|
description="Access key ID for authenticating with the S3 service",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
S3_SECRET_KEY: str | None = Field(
|
||||||
|
description="Secret access key for authenticating with the S3 service",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
S3_ADDRESS_STYLE: Literal["auto", "virtual", "path"] = Field(
|
||||||
|
description="S3 addressing style: 'auto', 'path', or 'virtual'",
|
||||||
|
default="auto",
|
||||||
|
)
|
||||||
|
|
||||||
|
S3_USE_AWS_MANAGED_IAM: bool = Field(
|
||||||
|
description="Use AWS managed IAM roles for authentication instead of access/secret keys",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
@@ -0,0 +1,28 @@
|
|||||||
|
from pydantic import Field
|
||||||
|
from pydantic_settings import BaseSettings
|
||||||
|
|
||||||
|
|
||||||
|
class AzureBlobStorageConfig(BaseSettings):
|
||||||
|
"""
|
||||||
|
Configuration settings for Azure Blob Storage
|
||||||
|
"""
|
||||||
|
|
||||||
|
AZURE_BLOB_ACCOUNT_NAME: str | None = Field(
|
||||||
|
description="Name of the Azure Storage account (e.g., 'mystorageaccount')",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
AZURE_BLOB_ACCOUNT_KEY: str | None = Field(
|
||||||
|
description="Access key for authenticating with the Azure Storage account",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
AZURE_BLOB_CONTAINER_NAME: str | None = Field(
|
||||||
|
description="Name of the Azure Blob container to store and retrieve objects",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
AZURE_BLOB_ACCOUNT_URL: str | None = Field(
|
||||||
|
description="URL of the Azure Blob storage endpoint (e.g., 'https://mystorageaccount.blob.core.windows.net')",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
@@ -0,0 +1,28 @@
|
|||||||
|
from pydantic import Field
|
||||||
|
from pydantic_settings import BaseSettings
|
||||||
|
|
||||||
|
|
||||||
|
class BaiduOBSStorageConfig(BaseSettings):
|
||||||
|
"""
|
||||||
|
Configuration settings for Baidu Object Storage Service (OBS)
|
||||||
|
"""
|
||||||
|
|
||||||
|
BAIDU_OBS_BUCKET_NAME: str | None = Field(
|
||||||
|
description="Name of the Baidu OBS bucket to store and retrieve objects (e.g., 'my-obs-bucket')",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
BAIDU_OBS_ACCESS_KEY: str | None = Field(
|
||||||
|
description="Access Key ID for authenticating with Baidu OBS",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
BAIDU_OBS_SECRET_KEY: str | None = Field(
|
||||||
|
description="Secret Access Key for authenticating with Baidu OBS",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
BAIDU_OBS_ENDPOINT: str | None = Field(
|
||||||
|
description="URL of the Baidu OSS endpoint for your chosen region (e.g., 'https://.bj.bcebos.com')",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
@@ -0,0 +1,63 @@
|
|||||||
|
"""ClickZetta Volume Storage Configuration"""
|
||||||
|
|
||||||
|
from pydantic import Field
|
||||||
|
from pydantic_settings import BaseSettings
|
||||||
|
|
||||||
|
|
||||||
|
class ClickZettaVolumeStorageConfig(BaseSettings):
|
||||||
|
"""Configuration for ClickZetta Volume storage."""
|
||||||
|
|
||||||
|
CLICKZETTA_VOLUME_USERNAME: str | None = Field(
|
||||||
|
description="Username for ClickZetta Volume authentication",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
CLICKZETTA_VOLUME_PASSWORD: str | None = Field(
|
||||||
|
description="Password for ClickZetta Volume authentication",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
CLICKZETTA_VOLUME_INSTANCE: str | None = Field(
|
||||||
|
description="ClickZetta instance identifier",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
CLICKZETTA_VOLUME_SERVICE: str = Field(
|
||||||
|
description="ClickZetta service endpoint",
|
||||||
|
default="api.clickzetta.com",
|
||||||
|
)
|
||||||
|
|
||||||
|
CLICKZETTA_VOLUME_WORKSPACE: str = Field(
|
||||||
|
description="ClickZetta workspace name",
|
||||||
|
default="quick_start",
|
||||||
|
)
|
||||||
|
|
||||||
|
CLICKZETTA_VOLUME_VCLUSTER: str = Field(
|
||||||
|
description="ClickZetta virtual cluster name",
|
||||||
|
default="default_ap",
|
||||||
|
)
|
||||||
|
|
||||||
|
CLICKZETTA_VOLUME_SCHEMA: str = Field(
|
||||||
|
description="ClickZetta schema name",
|
||||||
|
default="dify",
|
||||||
|
)
|
||||||
|
|
||||||
|
CLICKZETTA_VOLUME_TYPE: str = Field(
|
||||||
|
description="ClickZetta volume type (table|user|external)",
|
||||||
|
default="user",
|
||||||
|
)
|
||||||
|
|
||||||
|
CLICKZETTA_VOLUME_NAME: str | None = Field(
|
||||||
|
description="ClickZetta volume name for external volumes",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
CLICKZETTA_VOLUME_TABLE_PREFIX: str = Field(
|
||||||
|
description="Prefix for ClickZetta volume table names",
|
||||||
|
default="dataset_",
|
||||||
|
)
|
||||||
|
|
||||||
|
CLICKZETTA_VOLUME_DIFY_PREFIX: str = Field(
|
||||||
|
description="Directory prefix for User Volume to organize Dify files",
|
||||||
|
default="dify_km",
|
||||||
|
)
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
from pydantic import Field
|
||||||
|
from pydantic_settings import BaseSettings
|
||||||
|
|
||||||
|
|
||||||
|
class GoogleCloudStorageConfig(BaseSettings):
|
||||||
|
"""
|
||||||
|
Configuration settings for Google Cloud Storage
|
||||||
|
"""
|
||||||
|
|
||||||
|
GOOGLE_STORAGE_BUCKET_NAME: str | None = Field(
|
||||||
|
description="Name of the Google Cloud Storage bucket to store and retrieve objects (e.g., 'my-gcs-bucket')",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64: str | None = Field(
|
||||||
|
description="Base64-encoded JSON key file for Google Cloud service account authentication",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
@@ -0,0 +1,28 @@
|
|||||||
|
from pydantic import Field
|
||||||
|
from pydantic_settings import BaseSettings
|
||||||
|
|
||||||
|
|
||||||
|
class HuaweiCloudOBSStorageConfig(BaseSettings):
|
||||||
|
"""
|
||||||
|
Configuration settings for Huawei Cloud Object Storage Service (OBS)
|
||||||
|
"""
|
||||||
|
|
||||||
|
HUAWEI_OBS_BUCKET_NAME: str | None = Field(
|
||||||
|
description="Name of the Huawei Cloud OBS bucket to store and retrieve objects (e.g., 'my-obs-bucket')",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
HUAWEI_OBS_ACCESS_KEY: str | None = Field(
|
||||||
|
description="Access Key ID for authenticating with Huawei Cloud OBS",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
HUAWEI_OBS_SECRET_KEY: str | None = Field(
|
||||||
|
description="Secret Access Key for authenticating with Huawei Cloud OBS",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
HUAWEI_OBS_SERVER: str | None = Field(
|
||||||
|
description="Endpoint URL for Huawei Cloud OBS (e.g., 'https://obs.cn-north-4.myhuaweicloud.com')",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
33
dify/api/configs/middleware/storage/oci_storage_config.py
Normal file
33
dify/api/configs/middleware/storage/oci_storage_config.py
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
from pydantic import Field
|
||||||
|
from pydantic_settings import BaseSettings
|
||||||
|
|
||||||
|
|
||||||
|
class OCIStorageConfig(BaseSettings):
|
||||||
|
"""
|
||||||
|
Configuration settings for Oracle Cloud Infrastructure (OCI) Object Storage
|
||||||
|
"""
|
||||||
|
|
||||||
|
OCI_ENDPOINT: str | None = Field(
|
||||||
|
description="URL of the OCI Object Storage endpoint (e.g., 'https://objectstorage.us-phoenix-1.oraclecloud.com')",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
OCI_REGION: str | None = Field(
|
||||||
|
description="OCI region where the bucket is located (e.g., 'us-phoenix-1')",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
OCI_BUCKET_NAME: str | None = Field(
|
||||||
|
description="Name of the OCI Object Storage bucket to store and retrieve objects (e.g., 'my-oci-bucket')",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
OCI_ACCESS_KEY: str | None = Field(
|
||||||
|
description="Access key (also known as API key) for authenticating with OCI Object Storage",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
OCI_SECRET_KEY: str | None = Field(
|
||||||
|
description="Secret key associated with the access key for authenticating with OCI Object Storage",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
@@ -0,0 +1,9 @@
|
|||||||
|
from pydantic import Field
|
||||||
|
from pydantic_settings import BaseSettings
|
||||||
|
|
||||||
|
|
||||||
|
class OpenDALStorageConfig(BaseSettings):
|
||||||
|
OPENDAL_SCHEME: str = Field(
|
||||||
|
default="fs",
|
||||||
|
description="OpenDAL scheme.",
|
||||||
|
)
|
||||||
@@ -0,0 +1,23 @@
|
|||||||
|
from pydantic import Field
|
||||||
|
from pydantic_settings import BaseSettings
|
||||||
|
|
||||||
|
|
||||||
|
class SupabaseStorageConfig(BaseSettings):
|
||||||
|
"""
|
||||||
|
Configuration settings for Supabase Object Storage Service
|
||||||
|
"""
|
||||||
|
|
||||||
|
SUPABASE_BUCKET_NAME: str | None = Field(
|
||||||
|
description="Name of the Supabase bucket to store and retrieve objects (e.g., 'dify-bucket')",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
SUPABASE_API_KEY: str | None = Field(
|
||||||
|
description="API KEY for authenticating with Supabase",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
SUPABASE_URL: str | None = Field(
|
||||||
|
description="URL of the Supabase",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
@@ -0,0 +1,33 @@
|
|||||||
|
from pydantic import Field
|
||||||
|
from pydantic_settings import BaseSettings
|
||||||
|
|
||||||
|
|
||||||
|
class TencentCloudCOSStorageConfig(BaseSettings):
|
||||||
|
"""
|
||||||
|
Configuration settings for Tencent Cloud Object Storage (COS)
|
||||||
|
"""
|
||||||
|
|
||||||
|
TENCENT_COS_BUCKET_NAME: str | None = Field(
|
||||||
|
description="Name of the Tencent Cloud COS bucket to store and retrieve objects",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
TENCENT_COS_REGION: str | None = Field(
|
||||||
|
description="Tencent Cloud region where the COS bucket is located (e.g., 'ap-guangzhou')",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
TENCENT_COS_SECRET_ID: str | None = Field(
|
||||||
|
description="SecretId for authenticating with Tencent Cloud COS (part of API credentials)",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
TENCENT_COS_SECRET_KEY: str | None = Field(
|
||||||
|
description="SecretKey for authenticating with Tencent Cloud COS (part of API credentials)",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
TENCENT_COS_SCHEME: str | None = Field(
|
||||||
|
description="Protocol scheme for COS requests: 'https' (recommended) or 'http'",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
@@ -0,0 +1,33 @@
|
|||||||
|
from pydantic import Field
|
||||||
|
from pydantic_settings import BaseSettings
|
||||||
|
|
||||||
|
|
||||||
|
class VolcengineTOSStorageConfig(BaseSettings):
|
||||||
|
"""
|
||||||
|
Configuration settings for Volcengine Tinder Object Storage (TOS)
|
||||||
|
"""
|
||||||
|
|
||||||
|
VOLCENGINE_TOS_BUCKET_NAME: str | None = Field(
|
||||||
|
description="Name of the Volcengine TOS bucket to store and retrieve objects (e.g., 'my-tos-bucket')",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
VOLCENGINE_TOS_ACCESS_KEY: str | None = Field(
|
||||||
|
description="Access Key ID for authenticating with Volcengine TOS",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
VOLCENGINE_TOS_SECRET_KEY: str | None = Field(
|
||||||
|
description="Secret Access Key for authenticating with Volcengine TOS",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
VOLCENGINE_TOS_ENDPOINT: str | None = Field(
|
||||||
|
description="URL of the Volcengine TOS endpoint (e.g., 'https://tos-cn-beijing.volces.com')",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
VOLCENGINE_TOS_REGION: str | None = Field(
|
||||||
|
description="Volcengine region where the TOS bucket is located (e.g., 'cn-beijing')",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
54
dify/api/configs/middleware/vdb/alibabacloud_mysql_config.py
Normal file
54
dify/api/configs/middleware/vdb/alibabacloud_mysql_config.py
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
from pydantic import Field, PositiveInt
|
||||||
|
from pydantic_settings import BaseSettings
|
||||||
|
|
||||||
|
|
||||||
|
class AlibabaCloudMySQLConfig(BaseSettings):
|
||||||
|
"""
|
||||||
|
Configuration settings for AlibabaCloud MySQL vector database
|
||||||
|
"""
|
||||||
|
|
||||||
|
ALIBABACLOUD_MYSQL_HOST: str = Field(
|
||||||
|
description="Hostname or IP address of the AlibabaCloud MySQL server (e.g., 'localhost' or 'mysql.aliyun.com')",
|
||||||
|
default="localhost",
|
||||||
|
)
|
||||||
|
|
||||||
|
ALIBABACLOUD_MYSQL_PORT: PositiveInt = Field(
|
||||||
|
description="Port number on which the AlibabaCloud MySQL server is listening (default is 3306)",
|
||||||
|
default=3306,
|
||||||
|
)
|
||||||
|
|
||||||
|
ALIBABACLOUD_MYSQL_USER: str = Field(
|
||||||
|
description="Username for authenticating with AlibabaCloud MySQL (default is 'root')",
|
||||||
|
default="root",
|
||||||
|
)
|
||||||
|
|
||||||
|
ALIBABACLOUD_MYSQL_PASSWORD: str = Field(
|
||||||
|
description="Password for authenticating with AlibabaCloud MySQL (default is an empty string)",
|
||||||
|
default="",
|
||||||
|
)
|
||||||
|
|
||||||
|
ALIBABACLOUD_MYSQL_DATABASE: str = Field(
|
||||||
|
description="Name of the AlibabaCloud MySQL database to connect to (default is 'dify')",
|
||||||
|
default="dify",
|
||||||
|
)
|
||||||
|
|
||||||
|
ALIBABACLOUD_MYSQL_MAX_CONNECTION: PositiveInt = Field(
|
||||||
|
description="Maximum number of connections in the connection pool",
|
||||||
|
default=5,
|
||||||
|
)
|
||||||
|
|
||||||
|
ALIBABACLOUD_MYSQL_CHARSET: str = Field(
|
||||||
|
description="Character set for AlibabaCloud MySQL connection (default is 'utf8mb4')",
|
||||||
|
default="utf8mb4",
|
||||||
|
)
|
||||||
|
|
||||||
|
ALIBABACLOUD_MYSQL_DISTANCE_FUNCTION: str = Field(
|
||||||
|
description="Distance function used for vector similarity search in AlibabaCloud MySQL "
|
||||||
|
"(e.g., 'cosine', 'euclidean')",
|
||||||
|
default="cosine",
|
||||||
|
)
|
||||||
|
|
||||||
|
ALIBABACLOUD_MYSQL_HNSW_M: PositiveInt = Field(
|
||||||
|
description="Maximum number of connections per layer for HNSW vector index (default is 6, range: 3-200)",
|
||||||
|
default=6,
|
||||||
|
)
|
||||||
49
dify/api/configs/middleware/vdb/analyticdb_config.py
Normal file
49
dify/api/configs/middleware/vdb/analyticdb_config.py
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
from pydantic import Field, PositiveInt
|
||||||
|
from pydantic_settings import BaseSettings
|
||||||
|
|
||||||
|
|
||||||
|
class AnalyticdbConfig(BaseSettings):
|
||||||
|
"""
|
||||||
|
Configuration for connecting to Alibaba Cloud AnalyticDB for PostgreSQL.
|
||||||
|
Refer to the following documentation for details on obtaining credentials:
|
||||||
|
https://www.alibabacloud.com/help/en/analyticdb-for-postgresql/getting-started/create-an-instance-instances-with-vector-engine-optimization-enabled
|
||||||
|
"""
|
||||||
|
|
||||||
|
ANALYTICDB_KEY_ID: str | None = Field(
|
||||||
|
default=None, description="The Access Key ID provided by Alibaba Cloud for API authentication."
|
||||||
|
)
|
||||||
|
ANALYTICDB_KEY_SECRET: str | None = Field(
|
||||||
|
default=None, description="The Secret Access Key corresponding to the Access Key ID for secure API access."
|
||||||
|
)
|
||||||
|
ANALYTICDB_REGION_ID: str | None = Field(
|
||||||
|
default=None,
|
||||||
|
description="The region where the AnalyticDB instance is deployed (e.g., 'cn-hangzhou', 'ap-southeast-1').",
|
||||||
|
)
|
||||||
|
ANALYTICDB_INSTANCE_ID: str | None = Field(
|
||||||
|
default=None,
|
||||||
|
description="The unique identifier of the AnalyticDB instance you want to connect to.",
|
||||||
|
)
|
||||||
|
ANALYTICDB_ACCOUNT: str | None = Field(
|
||||||
|
default=None,
|
||||||
|
description="The account name used to log in to the AnalyticDB instance"
|
||||||
|
" (usually the initial account created with the instance).",
|
||||||
|
)
|
||||||
|
ANALYTICDB_PASSWORD: str | None = Field(
|
||||||
|
default=None, description="The password associated with the AnalyticDB account for database authentication."
|
||||||
|
)
|
||||||
|
ANALYTICDB_NAMESPACE: str | None = Field(
|
||||||
|
default=None, description="The namespace within AnalyticDB for schema isolation (if using namespace feature)."
|
||||||
|
)
|
||||||
|
ANALYTICDB_NAMESPACE_PASSWORD: str | None = Field(
|
||||||
|
default=None,
|
||||||
|
description="The password for accessing the specified namespace within the AnalyticDB instance"
|
||||||
|
" (if namespace feature is enabled).",
|
||||||
|
)
|
||||||
|
ANALYTICDB_HOST: str | None = Field(
|
||||||
|
default=None, description="The host of the AnalyticDB instance you want to connect to."
|
||||||
|
)
|
||||||
|
ANALYTICDB_PORT: PositiveInt = Field(
|
||||||
|
default=5432, description="The port of the AnalyticDB instance you want to connect to."
|
||||||
|
)
|
||||||
|
ANALYTICDB_MIN_CONNECTION: PositiveInt = Field(default=1, description="Min connection of the AnalyticDB database.")
|
||||||
|
ANALYTICDB_MAX_CONNECTION: PositiveInt = Field(default=5, description="Max connection of the AnalyticDB database.")
|
||||||
53
dify/api/configs/middleware/vdb/baidu_vector_config.py
Normal file
53
dify/api/configs/middleware/vdb/baidu_vector_config.py
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
from pydantic import Field, NonNegativeInt, PositiveInt
|
||||||
|
from pydantic_settings import BaseSettings
|
||||||
|
|
||||||
|
|
||||||
|
class BaiduVectorDBConfig(BaseSettings):
|
||||||
|
"""
|
||||||
|
Configuration settings for Baidu Vector Database
|
||||||
|
"""
|
||||||
|
|
||||||
|
BAIDU_VECTOR_DB_ENDPOINT: str | None = Field(
|
||||||
|
description="URL of the Baidu Vector Database service (e.g., 'http://vdb.bj.baidubce.com')",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
BAIDU_VECTOR_DB_CONNECTION_TIMEOUT_MS: PositiveInt = Field(
|
||||||
|
description="Timeout in milliseconds for Baidu Vector Database operations (default is 30000 milliseconds)",
|
||||||
|
default=30000,
|
||||||
|
)
|
||||||
|
|
||||||
|
BAIDU_VECTOR_DB_ACCOUNT: str | None = Field(
|
||||||
|
description="Account for authenticating with the Baidu Vector Database",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
BAIDU_VECTOR_DB_API_KEY: str | None = Field(
|
||||||
|
description="API key for authenticating with the Baidu Vector Database service",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
BAIDU_VECTOR_DB_DATABASE: str | None = Field(
|
||||||
|
description="Name of the specific Baidu Vector Database to connect to",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
BAIDU_VECTOR_DB_SHARD: PositiveInt = Field(
|
||||||
|
description="Number of shards for the Baidu Vector Database (default is 1)",
|
||||||
|
default=1,
|
||||||
|
)
|
||||||
|
|
||||||
|
BAIDU_VECTOR_DB_REPLICAS: NonNegativeInt = Field(
|
||||||
|
description="Number of replicas for the Baidu Vector Database (default is 3)",
|
||||||
|
default=3,
|
||||||
|
)
|
||||||
|
|
||||||
|
BAIDU_VECTOR_DB_INVERTED_INDEX_ANALYZER: str = Field(
|
||||||
|
description="Analyzer type for inverted index in Baidu Vector Database (default is DEFAULT_ANALYZER)",
|
||||||
|
default="DEFAULT_ANALYZER",
|
||||||
|
)
|
||||||
|
|
||||||
|
BAIDU_VECTOR_DB_INVERTED_INDEX_PARSER_MODE: str = Field(
|
||||||
|
description="Parser mode for inverted index in Baidu Vector Database (default is COARSE_MODE)",
|
||||||
|
default="COARSE_MODE",
|
||||||
|
)
|
||||||
38
dify/api/configs/middleware/vdb/chroma_config.py
Normal file
38
dify/api/configs/middleware/vdb/chroma_config.py
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
from pydantic import Field, PositiveInt
|
||||||
|
from pydantic_settings import BaseSettings
|
||||||
|
|
||||||
|
|
||||||
|
class ChromaConfig(BaseSettings):
|
||||||
|
"""
|
||||||
|
Configuration settings for Chroma vector database
|
||||||
|
"""
|
||||||
|
|
||||||
|
CHROMA_HOST: str | None = Field(
|
||||||
|
description="Hostname or IP address of the Chroma server (e.g., 'localhost' or '192.168.1.100')",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
CHROMA_PORT: PositiveInt = Field(
|
||||||
|
description="Port number on which the Chroma server is listening (default is 8000)",
|
||||||
|
default=8000,
|
||||||
|
)
|
||||||
|
|
||||||
|
CHROMA_TENANT: str | None = Field(
|
||||||
|
description="Tenant identifier for multi-tenancy support in Chroma",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
CHROMA_DATABASE: str | None = Field(
|
||||||
|
description="Name of the Chroma database to connect to",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
CHROMA_AUTH_PROVIDER: str | None = Field(
|
||||||
|
description="Authentication provider for Chroma (e.g., 'basic', 'token', or a custom provider)",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
CHROMA_AUTH_CREDENTIALS: str | None = Field(
|
||||||
|
description="Authentication credentials for Chroma (format depends on the auth provider)",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
68
dify/api/configs/middleware/vdb/clickzetta_config.py
Normal file
68
dify/api/configs/middleware/vdb/clickzetta_config.py
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
from pydantic import Field
|
||||||
|
from pydantic_settings import BaseSettings
|
||||||
|
|
||||||
|
|
||||||
|
class ClickzettaConfig(BaseSettings):
|
||||||
|
"""
|
||||||
|
Clickzetta Lakehouse vector database configuration
|
||||||
|
"""
|
||||||
|
|
||||||
|
CLICKZETTA_USERNAME: str | None = Field(
|
||||||
|
description="Username for authenticating with Clickzetta Lakehouse",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
CLICKZETTA_PASSWORD: str | None = Field(
|
||||||
|
description="Password for authenticating with Clickzetta Lakehouse",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
CLICKZETTA_INSTANCE: str | None = Field(
|
||||||
|
description="Clickzetta Lakehouse instance ID",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
CLICKZETTA_SERVICE: str | None = Field(
|
||||||
|
description="Clickzetta API service endpoint (e.g., 'api.clickzetta.com')",
|
||||||
|
default="api.clickzetta.com",
|
||||||
|
)
|
||||||
|
|
||||||
|
CLICKZETTA_WORKSPACE: str | None = Field(
|
||||||
|
description="Clickzetta workspace name",
|
||||||
|
default="default",
|
||||||
|
)
|
||||||
|
|
||||||
|
CLICKZETTA_VCLUSTER: str | None = Field(
|
||||||
|
description="Clickzetta virtual cluster name",
|
||||||
|
default="default_ap",
|
||||||
|
)
|
||||||
|
|
||||||
|
CLICKZETTA_SCHEMA: str | None = Field(
|
||||||
|
description="Database schema name in Clickzetta",
|
||||||
|
default="public",
|
||||||
|
)
|
||||||
|
|
||||||
|
CLICKZETTA_BATCH_SIZE: int | None = Field(
|
||||||
|
description="Batch size for bulk insert operations",
|
||||||
|
default=100,
|
||||||
|
)
|
||||||
|
|
||||||
|
CLICKZETTA_ENABLE_INVERTED_INDEX: bool | None = Field(
|
||||||
|
description="Enable inverted index for full-text search capabilities",
|
||||||
|
default=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
CLICKZETTA_ANALYZER_TYPE: str | None = Field(
|
||||||
|
description="Analyzer type for full-text search: keyword, english, chinese, unicode",
|
||||||
|
default="chinese",
|
||||||
|
)
|
||||||
|
|
||||||
|
CLICKZETTA_ANALYZER_MODE: str | None = Field(
|
||||||
|
description="Analyzer mode for tokenization: max_word (fine-grained) or smart (intelligent)",
|
||||||
|
default="smart",
|
||||||
|
)
|
||||||
|
|
||||||
|
CLICKZETTA_VECTOR_DISTANCE_FUNCTION: str | None = Field(
|
||||||
|
description="Distance function for vector similarity: l2_distance or cosine_distance",
|
||||||
|
default="cosine_distance",
|
||||||
|
)
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user