dify
This commit is contained in:
208
dify/api/models/__init__.py
Normal file
208
dify/api/models/__init__.py
Normal file
@@ -0,0 +1,208 @@
|
||||
from .account import (
|
||||
Account,
|
||||
AccountIntegrate,
|
||||
AccountStatus,
|
||||
InvitationCode,
|
||||
Tenant,
|
||||
TenantAccountJoin,
|
||||
TenantAccountRole,
|
||||
TenantStatus,
|
||||
)
|
||||
from .api_based_extension import APIBasedExtension, APIBasedExtensionPoint
|
||||
from .dataset import (
|
||||
AppDatasetJoin,
|
||||
Dataset,
|
||||
DatasetCollectionBinding,
|
||||
DatasetKeywordTable,
|
||||
DatasetPermission,
|
||||
DatasetPermissionEnum,
|
||||
DatasetProcessRule,
|
||||
DatasetQuery,
|
||||
Document,
|
||||
DocumentSegment,
|
||||
Embedding,
|
||||
ExternalKnowledgeApis,
|
||||
ExternalKnowledgeBindings,
|
||||
TidbAuthBinding,
|
||||
Whitelist,
|
||||
)
|
||||
from .enums import (
|
||||
AppTriggerStatus,
|
||||
AppTriggerType,
|
||||
CreatorUserRole,
|
||||
UserFrom,
|
||||
WorkflowRunTriggeredFrom,
|
||||
WorkflowTriggerStatus,
|
||||
)
|
||||
from .model import (
|
||||
ApiRequest,
|
||||
ApiToken,
|
||||
App,
|
||||
AppAnnotationHitHistory,
|
||||
AppAnnotationSetting,
|
||||
AppMCPServer,
|
||||
AppMode,
|
||||
AppModelConfig,
|
||||
Conversation,
|
||||
DatasetRetrieverResource,
|
||||
DifySetup,
|
||||
EndUser,
|
||||
IconType,
|
||||
InstalledApp,
|
||||
Message,
|
||||
MessageAgentThought,
|
||||
MessageAnnotation,
|
||||
MessageChain,
|
||||
MessageFeedback,
|
||||
MessageFile,
|
||||
OperationLog,
|
||||
RecommendedApp,
|
||||
Site,
|
||||
Tag,
|
||||
TagBinding,
|
||||
TraceAppConfig,
|
||||
UploadFile,
|
||||
)
|
||||
from .oauth import DatasourceOauthParamConfig, DatasourceProvider
|
||||
from .provider import (
|
||||
LoadBalancingModelConfig,
|
||||
Provider,
|
||||
ProviderModel,
|
||||
ProviderModelSetting,
|
||||
ProviderOrder,
|
||||
ProviderQuotaType,
|
||||
ProviderType,
|
||||
TenantDefaultModel,
|
||||
TenantPreferredModelProvider,
|
||||
)
|
||||
from .source import DataSourceApiKeyAuthBinding, DataSourceOauthBinding
|
||||
from .task import CeleryTask, CeleryTaskSet
|
||||
from .tools import (
|
||||
ApiToolProvider,
|
||||
BuiltinToolProvider,
|
||||
ToolConversationVariables,
|
||||
ToolFile,
|
||||
ToolLabelBinding,
|
||||
ToolModelInvoke,
|
||||
WorkflowToolProvider,
|
||||
)
|
||||
from .trigger import (
|
||||
AppTrigger,
|
||||
TriggerOAuthSystemClient,
|
||||
TriggerOAuthTenantClient,
|
||||
TriggerSubscription,
|
||||
WorkflowSchedulePlan,
|
||||
)
|
||||
from .web import PinnedConversation, SavedMessage
|
||||
from .workflow import (
|
||||
ConversationVariable,
|
||||
Workflow,
|
||||
WorkflowAppLog,
|
||||
WorkflowAppLogCreatedFrom,
|
||||
WorkflowNodeExecutionModel,
|
||||
WorkflowNodeExecutionOffload,
|
||||
WorkflowNodeExecutionTriggeredFrom,
|
||||
WorkflowPause,
|
||||
WorkflowRun,
|
||||
WorkflowType,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"APIBasedExtension",
|
||||
"APIBasedExtensionPoint",
|
||||
"Account",
|
||||
"AccountIntegrate",
|
||||
"AccountStatus",
|
||||
"ApiRequest",
|
||||
"ApiToken",
|
||||
"ApiToolProvider",
|
||||
"App",
|
||||
"AppAnnotationHitHistory",
|
||||
"AppAnnotationSetting",
|
||||
"AppDatasetJoin",
|
||||
"AppMCPServer",
|
||||
"AppMode",
|
||||
"AppModelConfig",
|
||||
"AppTrigger",
|
||||
"AppTriggerStatus",
|
||||
"AppTriggerType",
|
||||
"BuiltinToolProvider",
|
||||
"CeleryTask",
|
||||
"CeleryTaskSet",
|
||||
"Conversation",
|
||||
"ConversationVariable",
|
||||
"CreatorUserRole",
|
||||
"DataSourceApiKeyAuthBinding",
|
||||
"DataSourceOauthBinding",
|
||||
"Dataset",
|
||||
"DatasetCollectionBinding",
|
||||
"DatasetKeywordTable",
|
||||
"DatasetPermission",
|
||||
"DatasetPermissionEnum",
|
||||
"DatasetProcessRule",
|
||||
"DatasetQuery",
|
||||
"DatasetRetrieverResource",
|
||||
"DatasourceOauthParamConfig",
|
||||
"DatasourceProvider",
|
||||
"DifySetup",
|
||||
"Document",
|
||||
"DocumentSegment",
|
||||
"Embedding",
|
||||
"EndUser",
|
||||
"ExternalKnowledgeApis",
|
||||
"ExternalKnowledgeBindings",
|
||||
"IconType",
|
||||
"InstalledApp",
|
||||
"InvitationCode",
|
||||
"LoadBalancingModelConfig",
|
||||
"Message",
|
||||
"MessageAgentThought",
|
||||
"MessageAnnotation",
|
||||
"MessageChain",
|
||||
"MessageFeedback",
|
||||
"MessageFile",
|
||||
"OperationLog",
|
||||
"PinnedConversation",
|
||||
"Provider",
|
||||
"ProviderModel",
|
||||
"ProviderModelSetting",
|
||||
"ProviderOrder",
|
||||
"ProviderQuotaType",
|
||||
"ProviderType",
|
||||
"RecommendedApp",
|
||||
"SavedMessage",
|
||||
"Site",
|
||||
"Tag",
|
||||
"TagBinding",
|
||||
"Tenant",
|
||||
"TenantAccountJoin",
|
||||
"TenantAccountRole",
|
||||
"TenantDefaultModel",
|
||||
"TenantPreferredModelProvider",
|
||||
"TenantStatus",
|
||||
"TidbAuthBinding",
|
||||
"ToolConversationVariables",
|
||||
"ToolFile",
|
||||
"ToolLabelBinding",
|
||||
"ToolModelInvoke",
|
||||
"TraceAppConfig",
|
||||
"TriggerOAuthSystemClient",
|
||||
"TriggerOAuthTenantClient",
|
||||
"TriggerSubscription",
|
||||
"UploadFile",
|
||||
"UserFrom",
|
||||
"Whitelist",
|
||||
"Workflow",
|
||||
"WorkflowAppLog",
|
||||
"WorkflowAppLogCreatedFrom",
|
||||
"WorkflowNodeExecutionModel",
|
||||
"WorkflowNodeExecutionOffload",
|
||||
"WorkflowNodeExecutionTriggeredFrom",
|
||||
"WorkflowPause",
|
||||
"WorkflowRun",
|
||||
"WorkflowRunTriggeredFrom",
|
||||
"WorkflowSchedulePlan",
|
||||
"WorkflowToolProvider",
|
||||
"WorkflowTriggerStatus",
|
||||
"WorkflowType",
|
||||
]
|
||||
20
dify/api/models/_workflow_exc.py
Normal file
20
dify/api/models/_workflow_exc.py
Normal file
@@ -0,0 +1,20 @@
|
||||
"""All these exceptions are not meant to be caught by callers."""
|
||||
|
||||
|
||||
class WorkflowDataError(Exception):
|
||||
"""Base class for all workflow data related exceptions.
|
||||
|
||||
This should be used to indicate issues with workflow data integrity, such as
|
||||
no `graph` configuration, missing `nodes` field in `graph` configuration, or
|
||||
similar issues.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class NodeNotFoundError(WorkflowDataError):
|
||||
"""Raised when a node with the specified ID is not found in the workflow."""
|
||||
|
||||
def __init__(self, node_id: str):
|
||||
super().__init__(f"Node with ID '{node_id}' not found in the workflow.")
|
||||
self.node_id = node_id
|
||||
394
dify/api/models/account.py
Normal file
394
dify/api/models/account.py
Normal file
@@ -0,0 +1,394 @@
|
||||
import enum
|
||||
import json
|
||||
from dataclasses import field
|
||||
from datetime import datetime
|
||||
from typing import Any, Optional
|
||||
from uuid import uuid4
|
||||
|
||||
import sqlalchemy as sa
|
||||
from flask_login import UserMixin
|
||||
from sqlalchemy import DateTime, String, func, select
|
||||
from sqlalchemy.orm import Mapped, Session, mapped_column
|
||||
from typing_extensions import deprecated
|
||||
|
||||
from .base import TypeBase
|
||||
from .engine import db
|
||||
from .types import LongText, StringUUID
|
||||
|
||||
|
||||
class TenantAccountRole(enum.StrEnum):
|
||||
OWNER = "owner"
|
||||
ADMIN = "admin"
|
||||
EDITOR = "editor"
|
||||
NORMAL = "normal"
|
||||
DATASET_OPERATOR = "dataset_operator"
|
||||
|
||||
@staticmethod
|
||||
def is_valid_role(role: str) -> bool:
|
||||
if not role:
|
||||
return False
|
||||
return role in {
|
||||
TenantAccountRole.OWNER,
|
||||
TenantAccountRole.ADMIN,
|
||||
TenantAccountRole.EDITOR,
|
||||
TenantAccountRole.NORMAL,
|
||||
TenantAccountRole.DATASET_OPERATOR,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def is_privileged_role(role: Optional["TenantAccountRole"]) -> bool:
|
||||
if not role:
|
||||
return False
|
||||
return role in {TenantAccountRole.OWNER, TenantAccountRole.ADMIN}
|
||||
|
||||
@staticmethod
|
||||
def is_admin_role(role: Optional["TenantAccountRole"]) -> bool:
|
||||
if not role:
|
||||
return False
|
||||
return role == TenantAccountRole.ADMIN
|
||||
|
||||
@staticmethod
|
||||
def is_non_owner_role(role: Optional["TenantAccountRole"]) -> bool:
|
||||
if not role:
|
||||
return False
|
||||
return role in {
|
||||
TenantAccountRole.ADMIN,
|
||||
TenantAccountRole.EDITOR,
|
||||
TenantAccountRole.NORMAL,
|
||||
TenantAccountRole.DATASET_OPERATOR,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def is_editing_role(role: Optional["TenantAccountRole"]) -> bool:
|
||||
if not role:
|
||||
return False
|
||||
return role in {TenantAccountRole.OWNER, TenantAccountRole.ADMIN, TenantAccountRole.EDITOR}
|
||||
|
||||
@staticmethod
|
||||
def is_dataset_edit_role(role: Optional["TenantAccountRole"]) -> bool:
|
||||
if not role:
|
||||
return False
|
||||
return role in {
|
||||
TenantAccountRole.OWNER,
|
||||
TenantAccountRole.ADMIN,
|
||||
TenantAccountRole.EDITOR,
|
||||
TenantAccountRole.DATASET_OPERATOR,
|
||||
}
|
||||
|
||||
|
||||
class AccountStatus(enum.StrEnum):
|
||||
PENDING = "pending"
|
||||
UNINITIALIZED = "uninitialized"
|
||||
ACTIVE = "active"
|
||||
BANNED = "banned"
|
||||
CLOSED = "closed"
|
||||
|
||||
|
||||
class Account(UserMixin, TypeBase):
|
||||
__tablename__ = "accounts"
|
||||
__table_args__ = (sa.PrimaryKeyConstraint("id", name="account_pkey"), sa.Index("account_email_idx", "email"))
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False)
|
||||
name: Mapped[str] = mapped_column(String(255))
|
||||
email: Mapped[str] = mapped_column(String(255))
|
||||
password: Mapped[str | None] = mapped_column(String(255), default=None)
|
||||
password_salt: Mapped[str | None] = mapped_column(String(255), default=None)
|
||||
avatar: Mapped[str | None] = mapped_column(String(255), nullable=True, default=None)
|
||||
interface_language: Mapped[str | None] = mapped_column(String(255), default=None)
|
||||
interface_theme: Mapped[str | None] = mapped_column(String(255), nullable=True, default=None)
|
||||
timezone: Mapped[str | None] = mapped_column(String(255), default=None)
|
||||
last_login_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True, default=None)
|
||||
last_login_ip: Mapped[str | None] = mapped_column(String(255), nullable=True, default=None)
|
||||
last_active_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, server_default=func.current_timestamp(), nullable=False, init=False
|
||||
)
|
||||
status: Mapped[str] = mapped_column(String(16), server_default=sa.text("'active'"), default="active")
|
||||
initialized_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True, default=None)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, server_default=func.current_timestamp(), nullable=False, init=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, server_default=func.current_timestamp(), nullable=False, init=False, onupdate=func.current_timestamp()
|
||||
)
|
||||
|
||||
role: TenantAccountRole | None = field(default=None, init=False)
|
||||
_current_tenant: "Tenant | None" = field(default=None, init=False)
|
||||
|
||||
@property
|
||||
def is_password_set(self):
|
||||
return self.password is not None
|
||||
|
||||
@property
|
||||
def current_tenant(self):
|
||||
return self._current_tenant
|
||||
|
||||
@current_tenant.setter
|
||||
def current_tenant(self, tenant: "Tenant"):
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
tenant_join_query = select(TenantAccountJoin).where(
|
||||
TenantAccountJoin.tenant_id == tenant.id, TenantAccountJoin.account_id == self.id
|
||||
)
|
||||
tenant_join = session.scalar(tenant_join_query)
|
||||
tenant_query = select(Tenant).where(Tenant.id == tenant.id)
|
||||
# TODO: A workaround to reload the tenant with `expire_on_commit=False`, allowing
|
||||
# access to it after the session has been closed.
|
||||
# This prevents `DetachedInstanceError` when accessing the tenant outside
|
||||
# the session's lifecycle.
|
||||
# (The `tenant` argument is typically loaded by `db.session` without the
|
||||
# `expire_on_commit=False` flag, meaning its lifetime is tied to the web
|
||||
# request's lifecycle.)
|
||||
tenant_reloaded = session.scalars(tenant_query).one()
|
||||
|
||||
if tenant_join:
|
||||
self.role = TenantAccountRole(tenant_join.role)
|
||||
self._current_tenant = tenant_reloaded
|
||||
return
|
||||
self._current_tenant = None
|
||||
|
||||
@property
|
||||
def current_tenant_id(self) -> str | None:
|
||||
return self._current_tenant.id if self._current_tenant else None
|
||||
|
||||
def set_tenant_id(self, tenant_id: str):
|
||||
query = (
|
||||
select(Tenant, TenantAccountJoin)
|
||||
.where(Tenant.id == tenant_id)
|
||||
.where(TenantAccountJoin.tenant_id == Tenant.id)
|
||||
.where(TenantAccountJoin.account_id == self.id)
|
||||
)
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
tenant_account_join = session.execute(query).first()
|
||||
if not tenant_account_join:
|
||||
return
|
||||
tenant, join = tenant_account_join
|
||||
self.role = TenantAccountRole(join.role)
|
||||
self._current_tenant = tenant
|
||||
|
||||
@property
|
||||
def current_role(self):
|
||||
return self.role
|
||||
|
||||
def get_status(self) -> AccountStatus:
|
||||
status_str = self.status
|
||||
return AccountStatus(status_str)
|
||||
|
||||
@classmethod
|
||||
def get_by_openid(cls, provider: str, open_id: str):
|
||||
account_integrate = (
|
||||
db.session.query(AccountIntegrate)
|
||||
.where(AccountIntegrate.provider == provider, AccountIntegrate.open_id == open_id)
|
||||
.one_or_none()
|
||||
)
|
||||
if account_integrate:
|
||||
return db.session.query(Account).where(Account.id == account_integrate.account_id).one_or_none()
|
||||
return None
|
||||
|
||||
# check current_user.current_tenant.current_role in ['admin', 'owner']
|
||||
@property
|
||||
def is_admin_or_owner(self):
|
||||
return TenantAccountRole.is_privileged_role(self.role)
|
||||
|
||||
@property
|
||||
def is_admin(self):
|
||||
return TenantAccountRole.is_admin_role(self.role)
|
||||
|
||||
@property
|
||||
@deprecated("Use has_edit_permission instead.")
|
||||
def is_editor(self):
|
||||
"""Determines if the account has edit permissions in their current tenant (workspace).
|
||||
|
||||
This property checks if the current role has editing privileges, which includes:
|
||||
- `OWNER`
|
||||
- `ADMIN`
|
||||
- `EDITOR`
|
||||
|
||||
Note: This checks for any role with editing permission, not just the 'EDITOR' role specifically.
|
||||
"""
|
||||
return self.has_edit_permission
|
||||
|
||||
@property
|
||||
def has_edit_permission(self):
|
||||
"""Determines if the account has editing permissions in their current tenant (workspace).
|
||||
|
||||
This property checks if the current role has editing privileges, which includes:
|
||||
- `OWNER`
|
||||
- `ADMIN`
|
||||
- `EDITOR`
|
||||
"""
|
||||
return TenantAccountRole.is_editing_role(self.role)
|
||||
|
||||
@property
|
||||
def is_dataset_editor(self):
|
||||
return TenantAccountRole.is_dataset_edit_role(self.role)
|
||||
|
||||
@property
|
||||
def is_dataset_operator(self):
|
||||
return self.role == TenantAccountRole.DATASET_OPERATOR
|
||||
|
||||
|
||||
class TenantStatus(enum.StrEnum):
|
||||
NORMAL = "normal"
|
||||
ARCHIVE = "archive"
|
||||
|
||||
|
||||
class Tenant(TypeBase):
|
||||
__tablename__ = "tenants"
|
||||
__table_args__ = (sa.PrimaryKeyConstraint("id", name="tenant_pkey"),)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False)
|
||||
name: Mapped[str] = mapped_column(String(255))
|
||||
encrypt_public_key: Mapped[str | None] = mapped_column(LongText, default=None)
|
||||
plan: Mapped[str] = mapped_column(String(255), server_default=sa.text("'basic'"), default="basic")
|
||||
status: Mapped[str] = mapped_column(String(255), server_default=sa.text("'normal'"), default="normal")
|
||||
custom_config: Mapped[str | None] = mapped_column(LongText, default=None)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, server_default=func.current_timestamp(), nullable=False, init=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, server_default=func.current_timestamp(), init=False, onupdate=func.current_timestamp()
|
||||
)
|
||||
|
||||
def get_accounts(self) -> list[Account]:
|
||||
return list(
|
||||
db.session.scalars(
|
||||
select(Account).where(
|
||||
Account.id == TenantAccountJoin.account_id, TenantAccountJoin.tenant_id == self.id
|
||||
)
|
||||
).all()
|
||||
)
|
||||
|
||||
@property
|
||||
def custom_config_dict(self) -> dict[str, Any]:
|
||||
return json.loads(self.custom_config) if self.custom_config else {}
|
||||
|
||||
@custom_config_dict.setter
|
||||
def custom_config_dict(self, value: dict[str, Any]) -> None:
|
||||
self.custom_config = json.dumps(value)
|
||||
|
||||
|
||||
class TenantAccountJoin(TypeBase):
|
||||
__tablename__ = "tenant_account_joins"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="tenant_account_join_pkey"),
|
||||
sa.Index("tenant_account_join_account_id_idx", "account_id"),
|
||||
sa.Index("tenant_account_join_tenant_id_idx", "tenant_id"),
|
||||
sa.UniqueConstraint("tenant_id", "account_id", name="unique_tenant_account_join"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False)
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID)
|
||||
account_id: Mapped[str] = mapped_column(StringUUID)
|
||||
current: Mapped[bool] = mapped_column(sa.Boolean, server_default=sa.text("false"), default=False)
|
||||
role: Mapped[str] = mapped_column(String(16), server_default="normal", default="normal")
|
||||
invited_by: Mapped[str | None] = mapped_column(StringUUID, nullable=True, default=None)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, server_default=func.current_timestamp(), nullable=False, init=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, server_default=func.current_timestamp(), nullable=False, init=False, onupdate=func.current_timestamp()
|
||||
)
|
||||
|
||||
|
||||
class AccountIntegrate(TypeBase):
|
||||
__tablename__ = "account_integrates"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="account_integrate_pkey"),
|
||||
sa.UniqueConstraint("account_id", "provider", name="unique_account_provider"),
|
||||
sa.UniqueConstraint("provider", "open_id", name="unique_provider_open_id"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False)
|
||||
account_id: Mapped[str] = mapped_column(StringUUID)
|
||||
provider: Mapped[str] = mapped_column(String(16))
|
||||
open_id: Mapped[str] = mapped_column(String(255))
|
||||
encrypted_token: Mapped[str] = mapped_column(String(255))
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, server_default=func.current_timestamp(), nullable=False, init=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, server_default=func.current_timestamp(), nullable=False, init=False, onupdate=func.current_timestamp()
|
||||
)
|
||||
|
||||
|
||||
class InvitationCode(TypeBase):
|
||||
__tablename__ = "invitation_codes"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="invitation_code_pkey"),
|
||||
sa.Index("invitation_codes_batch_idx", "batch"),
|
||||
sa.Index("invitation_codes_code_idx", "code", "status"),
|
||||
)
|
||||
|
||||
id: Mapped[int] = mapped_column(sa.Integer, init=False)
|
||||
batch: Mapped[str] = mapped_column(String(255))
|
||||
code: Mapped[str] = mapped_column(String(32))
|
||||
status: Mapped[str] = mapped_column(String(16), server_default=sa.text("'unused'"), default="unused")
|
||||
used_at: Mapped[datetime | None] = mapped_column(DateTime, default=None)
|
||||
used_by_tenant_id: Mapped[str | None] = mapped_column(StringUUID, default=None)
|
||||
used_by_account_id: Mapped[str | None] = mapped_column(StringUUID, default=None)
|
||||
deprecated_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True, default=None)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, server_default=sa.func.current_timestamp(), nullable=False, init=False
|
||||
)
|
||||
|
||||
|
||||
class TenantPluginPermission(TypeBase):
|
||||
class InstallPermission(enum.StrEnum):
|
||||
EVERYONE = "everyone"
|
||||
ADMINS = "admins"
|
||||
NOBODY = "noone"
|
||||
|
||||
class DebugPermission(enum.StrEnum):
|
||||
EVERYONE = "everyone"
|
||||
ADMINS = "admins"
|
||||
NOBODY = "noone"
|
||||
|
||||
__tablename__ = "account_plugin_permissions"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="account_plugin_permission_pkey"),
|
||||
sa.UniqueConstraint("tenant_id", name="unique_tenant_plugin"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False)
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
install_permission: Mapped[InstallPermission] = mapped_column(
|
||||
String(16), nullable=False, server_default="everyone", default=InstallPermission.EVERYONE
|
||||
)
|
||||
debug_permission: Mapped[DebugPermission] = mapped_column(
|
||||
String(16), nullable=False, server_default="noone", default=DebugPermission.NOBODY
|
||||
)
|
||||
|
||||
|
||||
class TenantPluginAutoUpgradeStrategy(TypeBase):
|
||||
class StrategySetting(enum.StrEnum):
|
||||
DISABLED = "disabled"
|
||||
FIX_ONLY = "fix_only"
|
||||
LATEST = "latest"
|
||||
|
||||
class UpgradeMode(enum.StrEnum):
|
||||
ALL = "all"
|
||||
PARTIAL = "partial"
|
||||
EXCLUDE = "exclude"
|
||||
|
||||
__tablename__ = "tenant_plugin_auto_upgrade_strategies"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="tenant_plugin_auto_upgrade_strategy_pkey"),
|
||||
sa.UniqueConstraint("tenant_id", name="unique_tenant_plugin_auto_upgrade_strategy"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False)
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
strategy_setting: Mapped[StrategySetting] = mapped_column(
|
||||
String(16), nullable=False, server_default="fix_only", default=StrategySetting.FIX_ONLY
|
||||
)
|
||||
upgrade_mode: Mapped[UpgradeMode] = mapped_column(
|
||||
String(16), nullable=False, server_default="exclude", default=UpgradeMode.EXCLUDE
|
||||
)
|
||||
exclude_plugins: Mapped[list[str]] = mapped_column(sa.JSON, nullable=False, default_factory=list)
|
||||
include_plugins: Mapped[list[str]] = mapped_column(sa.JSON, nullable=False, default_factory=list)
|
||||
upgrade_time_of_day: Mapped[int] = mapped_column(sa.Integer, nullable=False, default=0)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), init=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), init=False, onupdate=func.current_timestamp()
|
||||
)
|
||||
34
dify/api/models/api_based_extension.py
Normal file
34
dify/api/models/api_based_extension.py
Normal file
@@ -0,0 +1,34 @@
|
||||
import enum
|
||||
from datetime import datetime
|
||||
from uuid import uuid4
|
||||
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import DateTime, String, func
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from .base import TypeBase
|
||||
from .types import LongText, StringUUID
|
||||
|
||||
|
||||
class APIBasedExtensionPoint(enum.StrEnum):
|
||||
APP_EXTERNAL_DATA_TOOL_QUERY = "app.external_data_tool.query"
|
||||
PING = "ping"
|
||||
APP_MODERATION_INPUT = "app.moderation.input"
|
||||
APP_MODERATION_OUTPUT = "app.moderation.output"
|
||||
|
||||
|
||||
class APIBasedExtension(TypeBase):
|
||||
__tablename__ = "api_based_extensions"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="api_based_extension_pkey"),
|
||||
sa.Index("api_based_extension_tenant_idx", "tenant_id"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False)
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
api_endpoint: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
api_key: Mapped[str] = mapped_column(LongText, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), init=False
|
||||
)
|
||||
52
dify/api/models/base.py
Normal file
52
dify/api/models/base.py
Normal file
@@ -0,0 +1,52 @@
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import DateTime, func
|
||||
from sqlalchemy.orm import DeclarativeBase, Mapped, MappedAsDataclass, mapped_column
|
||||
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from libs.uuid_utils import uuidv7
|
||||
|
||||
from .engine import metadata
|
||||
from .types import StringUUID
|
||||
|
||||
|
||||
class Base(DeclarativeBase):
|
||||
metadata = metadata
|
||||
|
||||
|
||||
class TypeBase(MappedAsDataclass, DeclarativeBase):
|
||||
"""
|
||||
This is for adding type, after all finished, rename to Base.
|
||||
"""
|
||||
|
||||
metadata = metadata
|
||||
|
||||
|
||||
class DefaultFieldsMixin:
|
||||
id: Mapped[str] = mapped_column(
|
||||
StringUUID,
|
||||
primary_key=True,
|
||||
# NOTE: The default serve as fallback mechanisms.
|
||||
# The application can generate the `id` before saving to optimize
|
||||
# the insertion process (especially for interdependent models)
|
||||
# and reduce database roundtrips.
|
||||
default=lambda: str(uuidv7()),
|
||||
)
|
||||
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime,
|
||||
nullable=False,
|
||||
default=naive_utc_now,
|
||||
server_default=func.current_timestamp(),
|
||||
)
|
||||
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
__name_pos=DateTime,
|
||||
nullable=False,
|
||||
default=naive_utc_now,
|
||||
server_default=func.current_timestamp(),
|
||||
onupdate=func.current_timestamp(),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<{self.__class__.__name__}(id={self.id})>"
|
||||
1402
dify/api/models/dataset.py
Normal file
1402
dify/api/models/dataset.py
Normal file
File diff suppressed because it is too large
Load Diff
25
dify/api/models/engine.py
Normal file
25
dify/api/models/engine.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from flask_sqlalchemy import SQLAlchemy
|
||||
from sqlalchemy import MetaData
|
||||
|
||||
POSTGRES_INDEXES_NAMING_CONVENTION = {
|
||||
"ix": "%(column_0_label)s_idx",
|
||||
"uq": "%(table_name)s_%(column_0_name)s_key",
|
||||
"ck": "%(table_name)s_%(constraint_name)s_check",
|
||||
"fk": "%(table_name)s_%(column_0_name)s_fkey",
|
||||
"pk": "%(table_name)s_pkey",
|
||||
}
|
||||
|
||||
metadata = MetaData(naming_convention=POSTGRES_INDEXES_NAMING_CONVENTION)
|
||||
|
||||
# ****** IMPORTANT NOTICE ******
|
||||
#
|
||||
# NOTE(QuantumGhost): Avoid directly importing and using `db` in modules outside of the
|
||||
# `controllers` package.
|
||||
#
|
||||
# Instead, import `db` within the `controllers` package and pass it as an argument to
|
||||
# functions or class constructors.
|
||||
#
|
||||
# Directly importing `db` in other modules can make the code more difficult to read, test, and maintain.
|
||||
#
|
||||
# Whenever possible, avoid this pattern in new code.
|
||||
db = SQLAlchemy(metadata=metadata)
|
||||
78
dify/api/models/enums.py
Normal file
78
dify/api/models/enums.py
Normal file
@@ -0,0 +1,78 @@
|
||||
from enum import StrEnum
|
||||
|
||||
from core.workflow.enums import NodeType
|
||||
|
||||
|
||||
class CreatorUserRole(StrEnum):
|
||||
ACCOUNT = "account"
|
||||
END_USER = "end_user"
|
||||
|
||||
|
||||
class UserFrom(StrEnum):
|
||||
ACCOUNT = "account"
|
||||
END_USER = "end-user"
|
||||
|
||||
|
||||
class WorkflowRunTriggeredFrom(StrEnum):
|
||||
DEBUGGING = "debugging"
|
||||
APP_RUN = "app-run" # webapp / service api
|
||||
RAG_PIPELINE_RUN = "rag-pipeline-run"
|
||||
RAG_PIPELINE_DEBUGGING = "rag-pipeline-debugging"
|
||||
WEBHOOK = "webhook"
|
||||
SCHEDULE = "schedule"
|
||||
PLUGIN = "plugin"
|
||||
|
||||
|
||||
class DraftVariableType(StrEnum):
|
||||
# node means that the correspond variable
|
||||
NODE = "node"
|
||||
SYS = "sys"
|
||||
CONVERSATION = "conversation"
|
||||
|
||||
|
||||
class MessageStatus(StrEnum):
|
||||
"""
|
||||
Message Status Enum
|
||||
"""
|
||||
|
||||
NORMAL = "normal"
|
||||
ERROR = "error"
|
||||
|
||||
|
||||
class ExecutionOffLoadType(StrEnum):
|
||||
INPUTS = "inputs"
|
||||
PROCESS_DATA = "process_data"
|
||||
OUTPUTS = "outputs"
|
||||
|
||||
|
||||
class WorkflowTriggerStatus(StrEnum):
|
||||
"""Workflow Trigger Execution Status"""
|
||||
|
||||
PENDING = "pending"
|
||||
QUEUED = "queued"
|
||||
RUNNING = "running"
|
||||
SUCCEEDED = "succeeded"
|
||||
PAUSED = "paused"
|
||||
FAILED = "failed"
|
||||
RATE_LIMITED = "rate_limited"
|
||||
RETRYING = "retrying"
|
||||
|
||||
|
||||
class AppTriggerStatus(StrEnum):
|
||||
"""App Trigger Status Enum"""
|
||||
|
||||
ENABLED = "enabled"
|
||||
DISABLED = "disabled"
|
||||
UNAUTHORIZED = "unauthorized"
|
||||
RATE_LIMITED = "rate_limited"
|
||||
|
||||
|
||||
class AppTriggerType(StrEnum):
|
||||
"""App Trigger Type Enum"""
|
||||
|
||||
TRIGGER_WEBHOOK = NodeType.TRIGGER_WEBHOOK.value
|
||||
TRIGGER_SCHEDULE = NodeType.TRIGGER_SCHEDULE.value
|
||||
TRIGGER_PLUGIN = NodeType.TRIGGER_PLUGIN.value
|
||||
|
||||
# for backward compatibility
|
||||
UNKNOWN = "unknown"
|
||||
2009
dify/api/models/model.py
Normal file
2009
dify/api/models/model.py
Normal file
File diff suppressed because it is too large
Load Diff
79
dify/api/models/oauth.py
Normal file
79
dify/api/models/oauth.py
Normal file
@@ -0,0 +1,79 @@
|
||||
from datetime import datetime
|
||||
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import func
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from libs.uuid_utils import uuidv7
|
||||
|
||||
from .base import TypeBase
|
||||
from .types import AdjustedJSON, LongText, StringUUID
|
||||
|
||||
|
||||
class DatasourceOauthParamConfig(TypeBase):
|
||||
__tablename__ = "datasource_oauth_params"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="datasource_oauth_config_pkey"),
|
||||
sa.UniqueConstraint("plugin_id", "provider", name="datasource_oauth_config_datasource_id_provider_idx"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuidv7()), init=False)
|
||||
plugin_id: Mapped[str] = mapped_column(sa.String(255), nullable=False)
|
||||
provider: Mapped[str] = mapped_column(sa.String(255), nullable=False)
|
||||
system_credentials: Mapped[dict] = mapped_column(AdjustedJSON, nullable=False)
|
||||
|
||||
|
||||
class DatasourceProvider(TypeBase):
|
||||
__tablename__ = "datasource_providers"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="datasource_provider_pkey"),
|
||||
sa.UniqueConstraint("tenant_id", "plugin_id", "provider", "name", name="datasource_provider_unique_name"),
|
||||
sa.Index("datasource_provider_auth_type_provider_idx", "tenant_id", "plugin_id", "provider"),
|
||||
)
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuidv7()), init=False)
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
name: Mapped[str] = mapped_column(sa.String(255), nullable=False)
|
||||
provider: Mapped[str] = mapped_column(sa.String(128), nullable=False)
|
||||
plugin_id: Mapped[str] = mapped_column(sa.String(255), nullable=False)
|
||||
auth_type: Mapped[str] = mapped_column(sa.String(255), nullable=False)
|
||||
encrypted_credentials: Mapped[dict] = mapped_column(AdjustedJSON, nullable=False)
|
||||
avatar_url: Mapped[str] = mapped_column(LongText, nullable=True, default="default")
|
||||
is_default: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("false"), default=False)
|
||||
expires_at: Mapped[int] = mapped_column(sa.Integer, nullable=False, server_default="-1", default=-1)
|
||||
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
sa.DateTime,
|
||||
nullable=False,
|
||||
server_default=func.current_timestamp(),
|
||||
onupdate=func.current_timestamp(),
|
||||
init=False,
|
||||
)
|
||||
|
||||
|
||||
class DatasourceOauthTenantParamConfig(TypeBase):
|
||||
__tablename__ = "datasource_oauth_tenant_params"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="datasource_oauth_tenant_config_pkey"),
|
||||
sa.UniqueConstraint("tenant_id", "plugin_id", "provider", name="datasource_oauth_tenant_config_unique"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuidv7()), init=False)
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
provider: Mapped[str] = mapped_column(sa.String(255), nullable=False)
|
||||
plugin_id: Mapped[str] = mapped_column(sa.String(255), nullable=False)
|
||||
client_params: Mapped[dict] = mapped_column(AdjustedJSON, nullable=False, default_factory=dict)
|
||||
enabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, default=False)
|
||||
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
sa.DateTime,
|
||||
nullable=False,
|
||||
server_default=func.current_timestamp(),
|
||||
onupdate=func.current_timestamp(),
|
||||
init=False,
|
||||
)
|
||||
347
dify/api/models/provider.py
Normal file
347
dify/api/models/provider.py
Normal file
@@ -0,0 +1,347 @@
|
||||
from datetime import datetime
|
||||
from enum import StrEnum, auto
|
||||
from functools import cached_property
|
||||
from uuid import uuid4
|
||||
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import DateTime, String, func, text
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from libs.uuid_utils import uuidv7
|
||||
|
||||
from .base import TypeBase
|
||||
from .engine import db
|
||||
from .types import LongText, StringUUID
|
||||
|
||||
|
||||
class ProviderType(StrEnum):
|
||||
CUSTOM = auto()
|
||||
SYSTEM = auto()
|
||||
|
||||
@staticmethod
|
||||
def value_of(value: str) -> "ProviderType":
|
||||
for member in ProviderType:
|
||||
if member.value == value:
|
||||
return member
|
||||
raise ValueError(f"No matching enum found for value '{value}'")
|
||||
|
||||
|
||||
class ProviderQuotaType(StrEnum):
|
||||
PAID = auto()
|
||||
"""hosted paid quota"""
|
||||
|
||||
FREE = auto()
|
||||
"""third-party free quota"""
|
||||
|
||||
TRIAL = auto()
|
||||
"""hosted trial quota"""
|
||||
|
||||
@staticmethod
|
||||
def value_of(value: str) -> "ProviderQuotaType":
|
||||
for member in ProviderQuotaType:
|
||||
if member.value == value:
|
||||
return member
|
||||
raise ValueError(f"No matching enum found for value '{value}'")
|
||||
|
||||
|
||||
class Provider(TypeBase):
|
||||
"""
|
||||
Provider model representing the API providers and their configurations.
|
||||
"""
|
||||
|
||||
__tablename__ = "providers"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="provider_pkey"),
|
||||
sa.Index("provider_tenant_id_provider_idx", "tenant_id", "provider_name"),
|
||||
sa.UniqueConstraint(
|
||||
"tenant_id", "provider_name", "provider_type", "quota_type", name="unique_provider_name_type_quota"
|
||||
),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, primary_key=True, default=lambda: str(uuidv7()), init=False)
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
provider_name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
provider_type: Mapped[str] = mapped_column(
|
||||
String(40), nullable=False, server_default=text("'custom'"), default="custom"
|
||||
)
|
||||
is_valid: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=text("false"), default=False)
|
||||
last_used: Mapped[datetime | None] = mapped_column(DateTime, nullable=True, init=False)
|
||||
credential_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True, default=None)
|
||||
|
||||
quota_type: Mapped[str | None] = mapped_column(String(40), nullable=True, server_default=text("''"), default="")
|
||||
quota_limit: Mapped[int | None] = mapped_column(sa.BigInteger, nullable=True, default=None)
|
||||
quota_used: Mapped[int] = mapped_column(sa.BigInteger, nullable=False, default=0)
|
||||
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), init=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp(), init=False
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
f"<Provider(id={self.id}, tenant_id={self.tenant_id}, provider_name='{self.provider_name}',"
|
||||
f" provider_type='{self.provider_type}')>"
|
||||
)
|
||||
|
||||
@cached_property
|
||||
def credential(self):
|
||||
if self.credential_id:
|
||||
return db.session.query(ProviderCredential).where(ProviderCredential.id == self.credential_id).first()
|
||||
|
||||
@property
|
||||
def credential_name(self):
|
||||
credential = self.credential
|
||||
return credential.credential_name if credential else None
|
||||
|
||||
@property
|
||||
def encrypted_config(self):
|
||||
credential = self.credential
|
||||
return credential.encrypted_config if credential else None
|
||||
|
||||
@property
|
||||
def token_is_set(self):
|
||||
"""
|
||||
Returns True if the encrypted_config is not None, indicating that the token is set.
|
||||
"""
|
||||
return self.encrypted_config is not None
|
||||
|
||||
@property
|
||||
def is_enabled(self):
|
||||
"""
|
||||
Returns True if the provider is enabled.
|
||||
"""
|
||||
if self.provider_type == ProviderType.SYSTEM:
|
||||
return self.is_valid
|
||||
else:
|
||||
return self.is_valid and self.token_is_set
|
||||
|
||||
|
||||
class ProviderModel(TypeBase):
|
||||
"""
|
||||
Provider model representing the API provider_models and their configurations.
|
||||
"""
|
||||
|
||||
__tablename__ = "provider_models"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="provider_model_pkey"),
|
||||
sa.Index("provider_model_tenant_id_provider_idx", "tenant_id", "provider_name"),
|
||||
sa.UniqueConstraint(
|
||||
"tenant_id", "provider_name", "model_name", "model_type", name="unique_provider_model_name"
|
||||
),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False)
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
provider_name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
model_name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
model_type: Mapped[str] = mapped_column(String(40), nullable=False)
|
||||
credential_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True, default=None)
|
||||
is_valid: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=text("false"), default=False)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), init=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp(), init=False
|
||||
)
|
||||
|
||||
@cached_property
|
||||
def credential(self):
|
||||
if self.credential_id:
|
||||
return (
|
||||
db.session.query(ProviderModelCredential)
|
||||
.where(ProviderModelCredential.id == self.credential_id)
|
||||
.first()
|
||||
)
|
||||
|
||||
@property
|
||||
def credential_name(self):
|
||||
credential = self.credential
|
||||
return credential.credential_name if credential else None
|
||||
|
||||
@property
|
||||
def encrypted_config(self):
|
||||
credential = self.credential
|
||||
return credential.encrypted_config if credential else None
|
||||
|
||||
|
||||
class TenantDefaultModel(TypeBase):
|
||||
__tablename__ = "tenant_default_models"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="tenant_default_model_pkey"),
|
||||
sa.Index("tenant_default_model_tenant_id_provider_type_idx", "tenant_id", "provider_name", "model_type"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False)
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
provider_name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
model_name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
model_type: Mapped[str] = mapped_column(String(40), nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), init=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp(), init=False
|
||||
)
|
||||
|
||||
|
||||
class TenantPreferredModelProvider(TypeBase):
|
||||
__tablename__ = "tenant_preferred_model_providers"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="tenant_preferred_model_provider_pkey"),
|
||||
sa.Index("tenant_preferred_model_provider_tenant_provider_idx", "tenant_id", "provider_name"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False)
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
provider_name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
preferred_provider_type: Mapped[str] = mapped_column(String(40), nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), init=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp(), init=False
|
||||
)
|
||||
|
||||
|
||||
class ProviderOrder(TypeBase):
|
||||
__tablename__ = "provider_orders"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="provider_order_pkey"),
|
||||
sa.Index("provider_order_tenant_provider_idx", "tenant_id", "provider_name"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False)
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
provider_name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
account_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
payment_product_id: Mapped[str] = mapped_column(String(191), nullable=False)
|
||||
payment_id: Mapped[str | None] = mapped_column(String(191))
|
||||
transaction_id: Mapped[str | None] = mapped_column(String(191))
|
||||
quantity: Mapped[int] = mapped_column(sa.Integer, nullable=False, server_default=text("1"))
|
||||
currency: Mapped[str | None] = mapped_column(String(40))
|
||||
total_amount: Mapped[int | None] = mapped_column(sa.Integer)
|
||||
payment_status: Mapped[str] = mapped_column(String(40), nullable=False, server_default=text("'wait_pay'"))
|
||||
paid_at: Mapped[datetime | None] = mapped_column(DateTime)
|
||||
pay_failed_at: Mapped[datetime | None] = mapped_column(DateTime)
|
||||
refunded_at: Mapped[datetime | None] = mapped_column(DateTime)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), init=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp(), init=False
|
||||
)
|
||||
|
||||
|
||||
class ProviderModelSetting(TypeBase):
|
||||
"""
|
||||
Provider model settings for record the model enabled status and load balancing status.
|
||||
"""
|
||||
|
||||
__tablename__ = "provider_model_settings"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="provider_model_setting_pkey"),
|
||||
sa.Index("provider_model_setting_tenant_provider_model_idx", "tenant_id", "provider_name", "model_type"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False)
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
provider_name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
model_name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
model_type: Mapped[str] = mapped_column(String(40), nullable=False)
|
||||
enabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=text("true"), default=True)
|
||||
load_balancing_enabled: Mapped[bool] = mapped_column(
|
||||
sa.Boolean, nullable=False, server_default=text("false"), default=False
|
||||
)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), init=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp(), init=False
|
||||
)
|
||||
|
||||
|
||||
class LoadBalancingModelConfig(TypeBase):
|
||||
"""
|
||||
Configurations for load balancing models.
|
||||
"""
|
||||
|
||||
__tablename__ = "load_balancing_model_configs"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="load_balancing_model_config_pkey"),
|
||||
sa.Index("load_balancing_model_config_tenant_provider_model_idx", "tenant_id", "provider_name", "model_type"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False)
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
provider_name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
model_name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
model_type: Mapped[str] = mapped_column(String(40), nullable=False)
|
||||
name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
encrypted_config: Mapped[str | None] = mapped_column(LongText, nullable=True, default=None)
|
||||
credential_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True, default=None)
|
||||
credential_source_type: Mapped[str | None] = mapped_column(String(40), nullable=True, default=None)
|
||||
enabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=text("true"), default=True)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), init=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp(), init=False
|
||||
)
|
||||
|
||||
|
||||
class ProviderCredential(TypeBase):
|
||||
"""
|
||||
Provider credential - stores multiple named credentials for each provider
|
||||
"""
|
||||
|
||||
__tablename__ = "provider_credentials"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="provider_credential_pkey"),
|
||||
sa.Index("provider_credential_tenant_provider_idx", "tenant_id", "provider_name"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuidv7()), init=False)
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
provider_name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
credential_name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
encrypted_config: Mapped[str] = mapped_column(LongText, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), init=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp(), init=False
|
||||
)
|
||||
|
||||
|
||||
class ProviderModelCredential(TypeBase):
|
||||
"""
|
||||
Provider model credential - stores multiple named credentials for each provider model
|
||||
"""
|
||||
|
||||
__tablename__ = "provider_model_credentials"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="provider_model_credential_pkey"),
|
||||
sa.Index(
|
||||
"provider_model_credential_tenant_provider_model_idx",
|
||||
"tenant_id",
|
||||
"provider_name",
|
||||
"model_name",
|
||||
"model_type",
|
||||
),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuidv7()), init=False)
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
provider_name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
model_name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
model_type: Mapped[str] = mapped_column(String(40), nullable=False)
|
||||
credential_name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
encrypted_config: Mapped[str] = mapped_column(LongText, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), init=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp(), init=False
|
||||
)
|
||||
64
dify/api/models/provider_ids.py
Normal file
64
dify/api/models/provider_ids.py
Normal file
@@ -0,0 +1,64 @@
|
||||
"""Provider ID entities for plugin system."""
|
||||
|
||||
import re
|
||||
|
||||
from werkzeug.exceptions import NotFound
|
||||
|
||||
|
||||
class GenericProviderID:
|
||||
organization: str
|
||||
plugin_name: str
|
||||
provider_name: str
|
||||
is_hardcoded: bool
|
||||
|
||||
def to_string(self) -> str:
|
||||
return str(self)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.organization}/{self.plugin_name}/{self.provider_name}"
|
||||
|
||||
def __init__(self, value: str, is_hardcoded: bool = False) -> None:
|
||||
if not value:
|
||||
raise NotFound("plugin not found, please add plugin")
|
||||
# check if the value is a valid plugin id with format: $organization/$plugin_name/$provider_name
|
||||
if not re.match(r"^[a-z0-9_-]+\/[a-z0-9_-]+\/[a-z0-9_-]+$", value):
|
||||
# check if matches [a-z0-9_-]+, if yes, append with langgenius/$value/$value
|
||||
if re.match(r"^[a-z0-9_-]+$", value):
|
||||
value = f"langgenius/{value}/{value}"
|
||||
else:
|
||||
raise ValueError(f"Invalid plugin id {value}")
|
||||
|
||||
self.organization, self.plugin_name, self.provider_name = value.split("/")
|
||||
self.is_hardcoded = is_hardcoded
|
||||
|
||||
def is_langgenius(self) -> bool:
|
||||
return self.organization == "langgenius"
|
||||
|
||||
@property
|
||||
def plugin_id(self) -> str:
|
||||
return f"{self.organization}/{self.plugin_name}"
|
||||
|
||||
|
||||
class ModelProviderID(GenericProviderID):
|
||||
def __init__(self, value: str, is_hardcoded: bool = False) -> None:
|
||||
super().__init__(value, is_hardcoded)
|
||||
if self.organization == "langgenius" and self.provider_name == "google":
|
||||
self.plugin_name = "gemini"
|
||||
|
||||
|
||||
class ToolProviderID(GenericProviderID):
|
||||
def __init__(self, value: str, is_hardcoded: bool = False) -> None:
|
||||
super().__init__(value, is_hardcoded)
|
||||
if self.organization == "langgenius":
|
||||
if self.provider_name in ["jina", "siliconflow", "stepfun", "gitee_ai"]:
|
||||
self.plugin_name = f"{self.provider_name}_tool"
|
||||
|
||||
|
||||
class DatasourceProviderID(GenericProviderID):
|
||||
def __init__(self, value: str, is_hardcoded: bool = False) -> None:
|
||||
super().__init__(value, is_hardcoded)
|
||||
|
||||
|
||||
class TriggerProviderID(GenericProviderID):
|
||||
def __init__(self, value: str, is_hardcoded: bool = False) -> None:
|
||||
super().__init__(value, is_hardcoded)
|
||||
74
dify/api/models/source.py
Normal file
74
dify/api/models/source.py
Normal file
@@ -0,0 +1,74 @@
|
||||
import json
|
||||
from datetime import datetime
|
||||
from uuid import uuid4
|
||||
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import DateTime, String, func
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from .base import TypeBase
|
||||
from .types import AdjustedJSON, LongText, StringUUID, adjusted_json_index
|
||||
|
||||
|
||||
class DataSourceOauthBinding(TypeBase):
|
||||
__tablename__ = "data_source_oauth_bindings"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="source_binding_pkey"),
|
||||
sa.Index("source_binding_tenant_id_idx", "tenant_id"),
|
||||
adjusted_json_index("source_info_idx", "source_info"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False)
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
access_token: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
provider: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
source_info: Mapped[dict] = mapped_column(AdjustedJSON, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), init=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime,
|
||||
nullable=False,
|
||||
server_default=func.current_timestamp(),
|
||||
onupdate=func.current_timestamp(),
|
||||
init=False,
|
||||
)
|
||||
disabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=True, server_default=sa.text("false"), default=False)
|
||||
|
||||
|
||||
class DataSourceApiKeyAuthBinding(TypeBase):
|
||||
__tablename__ = "data_source_api_key_auth_bindings"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="data_source_api_key_auth_binding_pkey"),
|
||||
sa.Index("data_source_api_key_auth_binding_tenant_id_idx", "tenant_id"),
|
||||
sa.Index("data_source_api_key_auth_binding_provider_idx", "provider"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False)
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
category: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
provider: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
credentials: Mapped[str | None] = mapped_column(LongText, nullable=True, default=None) # JSON
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), init=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime,
|
||||
nullable=False,
|
||||
server_default=func.current_timestamp(),
|
||||
onupdate=func.current_timestamp(),
|
||||
init=False,
|
||||
)
|
||||
disabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=True, server_default=sa.text("false"), default=False)
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
"id": self.id,
|
||||
"tenant_id": self.tenant_id,
|
||||
"category": self.category,
|
||||
"provider": self.provider,
|
||||
"credentials": json.loads(self.credentials) if self.credentials else None,
|
||||
"created_at": self.created_at.timestamp(),
|
||||
"updated_at": self.updated_at.timestamp(),
|
||||
"disabled": self.disabled,
|
||||
}
|
||||
50
dify/api/models/task.py
Normal file
50
dify/api/models/task.py
Normal file
@@ -0,0 +1,50 @@
|
||||
from datetime import datetime
|
||||
|
||||
import sqlalchemy as sa
|
||||
from celery import states
|
||||
from sqlalchemy import DateTime, String
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
|
||||
from .base import TypeBase
|
||||
from .types import BinaryData, LongText
|
||||
|
||||
|
||||
class CeleryTask(TypeBase):
|
||||
"""Task result/status."""
|
||||
|
||||
__tablename__ = "celery_taskmeta"
|
||||
|
||||
id: Mapped[int] = mapped_column(
|
||||
sa.Integer, sa.Sequence("task_id_sequence"), primary_key=True, autoincrement=True, init=False
|
||||
)
|
||||
task_id: Mapped[str] = mapped_column(String(155), unique=True)
|
||||
status: Mapped[str] = mapped_column(String(50), default=states.PENDING)
|
||||
result: Mapped[bytes | None] = mapped_column(BinaryData, nullable=True, default=None)
|
||||
date_done: Mapped[datetime | None] = mapped_column(
|
||||
DateTime,
|
||||
default=naive_utc_now,
|
||||
onupdate=naive_utc_now,
|
||||
nullable=True,
|
||||
)
|
||||
traceback: Mapped[str | None] = mapped_column(LongText, nullable=True, default=None)
|
||||
name: Mapped[str | None] = mapped_column(String(155), nullable=True, default=None)
|
||||
args: Mapped[bytes | None] = mapped_column(BinaryData, nullable=True, default=None)
|
||||
kwargs: Mapped[bytes | None] = mapped_column(BinaryData, nullable=True, default=None)
|
||||
worker: Mapped[str | None] = mapped_column(String(155), nullable=True, default=None)
|
||||
retries: Mapped[int | None] = mapped_column(sa.Integer, nullable=True, default=None)
|
||||
queue: Mapped[str | None] = mapped_column(String(155), nullable=True, default=None)
|
||||
|
||||
|
||||
class CeleryTaskSet(TypeBase):
|
||||
"""TaskSet result."""
|
||||
|
||||
__tablename__ = "celery_tasksetmeta"
|
||||
|
||||
id: Mapped[int] = mapped_column(
|
||||
sa.Integer, sa.Sequence("taskset_id_sequence"), autoincrement=True, primary_key=True, init=False
|
||||
)
|
||||
taskset_id: Mapped[str] = mapped_column(String(155), unique=True)
|
||||
result: Mapped[bytes | None] = mapped_column(BinaryData, nullable=True, default=None)
|
||||
date_done: Mapped[datetime | None] = mapped_column(DateTime, default=naive_utc_now, nullable=True)
|
||||
515
dify/api/models/tools.py
Normal file
515
dify/api/models/tools.py
Normal file
@@ -0,0 +1,515 @@
|
||||
import json
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
from uuid import uuid4
|
||||
|
||||
import sqlalchemy as sa
|
||||
from deprecated import deprecated
|
||||
from sqlalchemy import ForeignKey, String, func
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from core.tools.entities.common_entities import I18nObject
|
||||
from core.tools.entities.tool_bundle import ApiToolBundle
|
||||
from core.tools.entities.tool_entities import ApiProviderSchemaType, WorkflowToolParameterConfiguration
|
||||
|
||||
from .base import TypeBase
|
||||
from .engine import db
|
||||
from .model import Account, App, Tenant
|
||||
from .types import LongText, StringUUID
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from core.entities.mcp_provider import MCPProviderEntity
|
||||
|
||||
|
||||
# system level tool oauth client params (client_id, client_secret, etc.)
|
||||
class ToolOAuthSystemClient(TypeBase):
|
||||
__tablename__ = "tool_oauth_system_clients"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="tool_oauth_system_client_pkey"),
|
||||
sa.UniqueConstraint("plugin_id", "provider", name="tool_oauth_system_client_plugin_id_provider_idx"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False)
|
||||
plugin_id: Mapped[str] = mapped_column(String(512), nullable=False)
|
||||
provider: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
# oauth params of the tool provider
|
||||
encrypted_oauth_params: Mapped[str] = mapped_column(LongText, nullable=False)
|
||||
|
||||
|
||||
# tenant level tool oauth client params (client_id, client_secret, etc.)
|
||||
class ToolOAuthTenantClient(TypeBase):
|
||||
__tablename__ = "tool_oauth_tenant_clients"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="tool_oauth_tenant_client_pkey"),
|
||||
sa.UniqueConstraint("tenant_id", "plugin_id", "provider", name="unique_tool_oauth_tenant_client"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False)
|
||||
# tenant id
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
plugin_id: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
provider: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
enabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("true"), init=False)
|
||||
# oauth params of the tool provider
|
||||
encrypted_oauth_params: Mapped[str] = mapped_column(LongText, nullable=False, init=False)
|
||||
|
||||
@property
|
||||
def oauth_params(self) -> dict[str, Any]:
|
||||
return cast(dict[str, Any], json.loads(self.encrypted_oauth_params or "{}"))
|
||||
|
||||
|
||||
class BuiltinToolProvider(TypeBase):
|
||||
"""
|
||||
This table stores the tool provider information for built-in tools for each tenant.
|
||||
"""
|
||||
|
||||
__tablename__ = "tool_builtin_providers"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="tool_builtin_provider_pkey"),
|
||||
sa.UniqueConstraint("tenant_id", "provider", "name", name="unique_builtin_tool_provider"),
|
||||
)
|
||||
|
||||
# id of the tool provider
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False)
|
||||
name: Mapped[str] = mapped_column(
|
||||
String(256),
|
||||
nullable=False,
|
||||
server_default=sa.text("'API KEY 1'"),
|
||||
)
|
||||
# id of the tenant
|
||||
tenant_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True)
|
||||
# who created this tool provider
|
||||
user_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
# name of the tool provider
|
||||
provider: Mapped[str] = mapped_column(String(256), nullable=False)
|
||||
# credential of the tool provider
|
||||
encrypted_credentials: Mapped[str | None] = mapped_column(LongText, nullable=True, default=None)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
sa.DateTime,
|
||||
nullable=False,
|
||||
server_default=func.current_timestamp(),
|
||||
onupdate=func.current_timestamp(),
|
||||
init=False,
|
||||
)
|
||||
is_default: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("false"), default=False)
|
||||
# credential type, e.g., "api-key", "oauth2"
|
||||
credential_type: Mapped[str] = mapped_column(
|
||||
String(32), nullable=False, server_default=sa.text("'api-key'"), default="api-key"
|
||||
)
|
||||
expires_at: Mapped[int] = mapped_column(sa.BigInteger, nullable=False, server_default=sa.text("-1"), default=-1)
|
||||
|
||||
@property
|
||||
def credentials(self) -> dict[str, Any]:
|
||||
if not self.encrypted_credentials:
|
||||
return {}
|
||||
return cast(dict[str, Any], json.loads(self.encrypted_credentials))
|
||||
|
||||
|
||||
class ApiToolProvider(TypeBase):
|
||||
"""
|
||||
The table stores the api providers.
|
||||
"""
|
||||
|
||||
__tablename__ = "tool_api_providers"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="tool_api_provider_pkey"),
|
||||
sa.UniqueConstraint("name", "tenant_id", name="unique_api_tool_provider"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False)
|
||||
# name of the api provider
|
||||
name: Mapped[str] = mapped_column(
|
||||
String(255),
|
||||
nullable=False,
|
||||
server_default=sa.text("'API KEY 1'"),
|
||||
)
|
||||
# icon
|
||||
icon: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
# original schema
|
||||
schema: Mapped[str] = mapped_column(LongText, nullable=False)
|
||||
schema_type_str: Mapped[str] = mapped_column(String(40), nullable=False)
|
||||
# who created this tool
|
||||
user_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
# tenant id
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
# description of the provider
|
||||
description: Mapped[str] = mapped_column(LongText, nullable=False)
|
||||
# json format tools
|
||||
tools_str: Mapped[str] = mapped_column(LongText, nullable=False)
|
||||
# json format credentials
|
||||
credentials_str: Mapped[str] = mapped_column(LongText, nullable=False)
|
||||
# privacy policy
|
||||
privacy_policy: Mapped[str | None] = mapped_column(String(255), nullable=True, default=None)
|
||||
# custom_disclaimer
|
||||
custom_disclaimer: Mapped[str] = mapped_column(LongText, default="")
|
||||
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
sa.DateTime,
|
||||
nullable=False,
|
||||
server_default=func.current_timestamp(),
|
||||
onupdate=func.current_timestamp(),
|
||||
init=False,
|
||||
)
|
||||
|
||||
@property
|
||||
def schema_type(self) -> "ApiProviderSchemaType":
|
||||
return ApiProviderSchemaType.value_of(self.schema_type_str)
|
||||
|
||||
@property
|
||||
def tools(self) -> list["ApiToolBundle"]:
|
||||
return [ApiToolBundle.model_validate(tool) for tool in json.loads(self.tools_str)]
|
||||
|
||||
@property
|
||||
def credentials(self) -> dict[str, Any]:
|
||||
return dict[str, Any](json.loads(self.credentials_str))
|
||||
|
||||
@property
|
||||
def user(self) -> Account | None:
|
||||
if not self.user_id:
|
||||
return None
|
||||
return db.session.query(Account).where(Account.id == self.user_id).first()
|
||||
|
||||
@property
|
||||
def tenant(self) -> Tenant | None:
|
||||
return db.session.query(Tenant).where(Tenant.id == self.tenant_id).first()
|
||||
|
||||
|
||||
class ToolLabelBinding(TypeBase):
|
||||
"""
|
||||
The table stores the labels for tools.
|
||||
"""
|
||||
|
||||
__tablename__ = "tool_label_bindings"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="tool_label_bind_pkey"),
|
||||
sa.UniqueConstraint("tool_id", "label_name", name="unique_tool_label_bind"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False)
|
||||
# tool id
|
||||
tool_id: Mapped[str] = mapped_column(String(64), nullable=False)
|
||||
# tool type
|
||||
tool_type: Mapped[str] = mapped_column(String(40), nullable=False)
|
||||
# label name
|
||||
label_name: Mapped[str] = mapped_column(String(40), nullable=False)
|
||||
|
||||
|
||||
class WorkflowToolProvider(TypeBase):
|
||||
"""
|
||||
The table stores the workflow providers.
|
||||
"""
|
||||
|
||||
__tablename__ = "tool_workflow_providers"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="tool_workflow_provider_pkey"),
|
||||
sa.UniqueConstraint("name", "tenant_id", name="unique_workflow_tool_provider"),
|
||||
sa.UniqueConstraint("tenant_id", "app_id", name="unique_workflow_tool_provider_app_id"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False)
|
||||
# name of the workflow provider
|
||||
name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
# label of the workflow provider
|
||||
label: Mapped[str] = mapped_column(String(255), nullable=False, server_default="")
|
||||
# icon
|
||||
icon: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
# app id of the workflow provider
|
||||
app_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
# version of the workflow provider
|
||||
version: Mapped[str] = mapped_column(String(255), nullable=False, server_default="")
|
||||
# who created this tool
|
||||
user_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
# tenant id
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
# description of the provider
|
||||
description: Mapped[str] = mapped_column(LongText, nullable=False)
|
||||
# parameter configuration
|
||||
parameter_configuration: Mapped[str] = mapped_column(LongText, nullable=False, default="[]")
|
||||
# privacy policy
|
||||
privacy_policy: Mapped[str | None] = mapped_column(String(255), nullable=True, server_default="", default=None)
|
||||
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
sa.DateTime,
|
||||
nullable=False,
|
||||
server_default=func.current_timestamp(),
|
||||
onupdate=func.current_timestamp(),
|
||||
init=False,
|
||||
)
|
||||
|
||||
@property
|
||||
def user(self) -> Account | None:
|
||||
return db.session.query(Account).where(Account.id == self.user_id).first()
|
||||
|
||||
@property
|
||||
def tenant(self) -> Tenant | None:
|
||||
return db.session.query(Tenant).where(Tenant.id == self.tenant_id).first()
|
||||
|
||||
@property
|
||||
def parameter_configurations(self) -> list["WorkflowToolParameterConfiguration"]:
|
||||
return [
|
||||
WorkflowToolParameterConfiguration.model_validate(config)
|
||||
for config in json.loads(self.parameter_configuration)
|
||||
]
|
||||
|
||||
@property
|
||||
def app(self) -> App | None:
|
||||
return db.session.query(App).where(App.id == self.app_id).first()
|
||||
|
||||
|
||||
class MCPToolProvider(TypeBase):
|
||||
"""
|
||||
The table stores the mcp providers.
|
||||
"""
|
||||
|
||||
__tablename__ = "tool_mcp_providers"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="tool_mcp_provider_pkey"),
|
||||
sa.UniqueConstraint("tenant_id", "server_url_hash", name="unique_mcp_provider_server_url"),
|
||||
sa.UniqueConstraint("tenant_id", "name", name="unique_mcp_provider_name"),
|
||||
sa.UniqueConstraint("tenant_id", "server_identifier", name="unique_mcp_provider_server_identifier"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False)
|
||||
# name of the mcp provider
|
||||
name: Mapped[str] = mapped_column(String(40), nullable=False)
|
||||
# server identifier of the mcp provider
|
||||
server_identifier: Mapped[str] = mapped_column(String(64), nullable=False)
|
||||
# encrypted url of the mcp provider
|
||||
server_url: Mapped[str] = mapped_column(LongText, nullable=False)
|
||||
# hash of server_url for uniqueness check
|
||||
server_url_hash: Mapped[str] = mapped_column(String(64), nullable=False)
|
||||
# icon of the mcp provider
|
||||
icon: Mapped[str | None] = mapped_column(String(255), nullable=True)
|
||||
# tenant id
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
# who created this tool
|
||||
user_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
# encrypted credentials
|
||||
encrypted_credentials: Mapped[str | None] = mapped_column(LongText, nullable=True, default=None)
|
||||
# authed
|
||||
authed: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, default=False)
|
||||
# tools
|
||||
tools: Mapped[str] = mapped_column(LongText, nullable=False, default="[]")
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
sa.DateTime,
|
||||
nullable=False,
|
||||
server_default=func.current_timestamp(),
|
||||
onupdate=func.current_timestamp(),
|
||||
init=False,
|
||||
)
|
||||
timeout: Mapped[float] = mapped_column(sa.Float, nullable=False, server_default=sa.text("30"), default=30.0)
|
||||
sse_read_timeout: Mapped[float] = mapped_column(
|
||||
sa.Float, nullable=False, server_default=sa.text("300"), default=300.0
|
||||
)
|
||||
# encrypted headers for MCP server requests
|
||||
encrypted_headers: Mapped[str | None] = mapped_column(LongText, nullable=True, default=None)
|
||||
|
||||
def load_user(self) -> Account | None:
|
||||
return db.session.query(Account).where(Account.id == self.user_id).first()
|
||||
|
||||
@property
|
||||
def credentials(self) -> dict[str, Any]:
|
||||
if not self.encrypted_credentials:
|
||||
return {}
|
||||
try:
|
||||
return json.loads(self.encrypted_credentials)
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
@property
|
||||
def headers(self) -> dict[str, Any]:
|
||||
if self.encrypted_headers is None:
|
||||
return {}
|
||||
try:
|
||||
return json.loads(self.encrypted_headers)
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
@property
|
||||
def tool_dict(self) -> list[dict[str, Any]]:
|
||||
try:
|
||||
return json.loads(self.tools) if self.tools else []
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
return []
|
||||
|
||||
def to_entity(self) -> "MCPProviderEntity":
|
||||
"""Convert to domain entity"""
|
||||
from core.entities.mcp_provider import MCPProviderEntity
|
||||
|
||||
return MCPProviderEntity.from_db_model(self)
|
||||
|
||||
|
||||
class ToolModelInvoke(TypeBase):
|
||||
"""
|
||||
store the invoke logs from tool invoke
|
||||
"""
|
||||
|
||||
__tablename__ = "tool_model_invokes"
|
||||
__table_args__ = (sa.PrimaryKeyConstraint("id", name="tool_model_invoke_pkey"),)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False)
|
||||
# who invoke this tool
|
||||
user_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
# tenant id
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
# provider
|
||||
provider: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
# type
|
||||
tool_type: Mapped[str] = mapped_column(String(40), nullable=False)
|
||||
# tool name
|
||||
tool_name: Mapped[str] = mapped_column(String(128), nullable=False)
|
||||
# invoke parameters
|
||||
model_parameters: Mapped[str] = mapped_column(LongText, nullable=False)
|
||||
# prompt messages
|
||||
prompt_messages: Mapped[str] = mapped_column(LongText, nullable=False)
|
||||
# invoke response
|
||||
model_response: Mapped[str] = mapped_column(LongText, nullable=False)
|
||||
|
||||
prompt_tokens: Mapped[int] = mapped_column(sa.Integer, nullable=False, server_default=sa.text("0"))
|
||||
answer_tokens: Mapped[int] = mapped_column(sa.Integer, nullable=False, server_default=sa.text("0"))
|
||||
answer_unit_price: Mapped[Decimal] = mapped_column(sa.Numeric(10, 4), nullable=False)
|
||||
answer_price_unit: Mapped[Decimal] = mapped_column(
|
||||
sa.Numeric(10, 7), nullable=False, server_default=sa.text("0.001")
|
||||
)
|
||||
provider_response_latency: Mapped[float] = mapped_column(sa.Float, nullable=False, server_default=sa.text("0"))
|
||||
total_price: Mapped[Decimal | None] = mapped_column(sa.Numeric(10, 7))
|
||||
currency: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
sa.DateTime,
|
||||
nullable=False,
|
||||
server_default=func.current_timestamp(),
|
||||
onupdate=func.current_timestamp(),
|
||||
init=False,
|
||||
)
|
||||
|
||||
|
||||
@deprecated
|
||||
class ToolConversationVariables(TypeBase):
|
||||
"""
|
||||
store the conversation variables from tool invoke
|
||||
"""
|
||||
|
||||
__tablename__ = "tool_conversation_variables"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="tool_conversation_variables_pkey"),
|
||||
# add index for user_id and conversation_id
|
||||
sa.Index("user_id_idx", "user_id"),
|
||||
sa.Index("conversation_id_idx", "conversation_id"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False)
|
||||
# conversation user id
|
||||
user_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
# tenant id
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
# conversation id
|
||||
conversation_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
# variables pool
|
||||
variables_str: Mapped[str] = mapped_column(LongText, nullable=False)
|
||||
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
sa.DateTime,
|
||||
nullable=False,
|
||||
server_default=func.current_timestamp(),
|
||||
onupdate=func.current_timestamp(),
|
||||
init=False,
|
||||
)
|
||||
|
||||
@property
|
||||
def variables(self):
|
||||
return json.loads(self.variables_str)
|
||||
|
||||
|
||||
class ToolFile(TypeBase):
|
||||
"""This table stores file metadata generated in workflows,
|
||||
not only files created by agent.
|
||||
"""
|
||||
|
||||
__tablename__ = "tool_files"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="tool_file_pkey"),
|
||||
sa.Index("tool_file_conversation_id_idx", "conversation_id"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False)
|
||||
# conversation user id
|
||||
user_id: Mapped[str] = mapped_column(StringUUID)
|
||||
# tenant id
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID)
|
||||
# conversation id
|
||||
conversation_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True)
|
||||
# file key
|
||||
file_key: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
# mime type
|
||||
mimetype: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
# original url
|
||||
original_url: Mapped[str | None] = mapped_column(String(2048), nullable=True, default=None)
|
||||
# name
|
||||
name: Mapped[str] = mapped_column(String(255), default="")
|
||||
# size
|
||||
size: Mapped[int] = mapped_column(sa.Integer, default=-1)
|
||||
|
||||
|
||||
@deprecated
|
||||
class DeprecatedPublishedAppTool(TypeBase):
|
||||
"""
|
||||
The table stores the apps published as a tool for each person.
|
||||
"""
|
||||
|
||||
__tablename__ = "tool_published_apps"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="published_app_tool_pkey"),
|
||||
sa.UniqueConstraint("app_id", "user_id", name="unique_published_app_tool"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False)
|
||||
# id of the app
|
||||
app_id: Mapped[str] = mapped_column(StringUUID, ForeignKey("apps.id"), nullable=False)
|
||||
|
||||
user_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
# who published this tool
|
||||
description: Mapped[str] = mapped_column(LongText, nullable=False)
|
||||
# llm_description of the tool, for LLM
|
||||
llm_description: Mapped[str] = mapped_column(LongText, nullable=False)
|
||||
# query description, query will be seem as a parameter of the tool,
|
||||
# to describe this parameter to llm, we need this field
|
||||
query_description: Mapped[str] = mapped_column(LongText, nullable=False)
|
||||
# query name, the name of the query parameter
|
||||
query_name: Mapped[str] = mapped_column(String(40), nullable=False)
|
||||
# name of the tool provider
|
||||
tool_name: Mapped[str] = mapped_column(String(40), nullable=False)
|
||||
# author
|
||||
author: Mapped[str] = mapped_column(String(40), nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
sa.DateTime,
|
||||
nullable=False,
|
||||
server_default=func.current_timestamp(),
|
||||
onupdate=func.current_timestamp(),
|
||||
init=False,
|
||||
)
|
||||
|
||||
@property
|
||||
def description_i18n(self) -> "I18nObject":
|
||||
return I18nObject.model_validate(json.loads(self.description))
|
||||
476
dify/api/models/trigger.py
Normal file
476
dify/api/models/trigger.py
Normal file
@@ -0,0 +1,476 @@
|
||||
import json
|
||||
import time
|
||||
from collections.abc import Mapping
|
||||
from datetime import datetime
|
||||
from functools import cached_property
|
||||
from typing import Any, cast
|
||||
from uuid import uuid4
|
||||
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import DateTime, Index, Integer, String, UniqueConstraint, func
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from core.plugin.entities.plugin_daemon import CredentialType
|
||||
from core.trigger.entities.api_entities import TriggerProviderSubscriptionApiEntity
|
||||
from core.trigger.entities.entities import Subscription
|
||||
from core.trigger.utils.endpoint import generate_plugin_trigger_endpoint_url, generate_webhook_trigger_endpoint
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from libs.uuid_utils import uuidv7
|
||||
|
||||
from .base import TypeBase
|
||||
from .engine import db
|
||||
from .enums import AppTriggerStatus, AppTriggerType, CreatorUserRole, WorkflowTriggerStatus
|
||||
from .model import Account
|
||||
from .types import EnumText, LongText, StringUUID
|
||||
|
||||
|
||||
class TriggerSubscription(TypeBase):
|
||||
"""
|
||||
Trigger provider model for managing credentials
|
||||
Supports multiple credential instances per provider
|
||||
"""
|
||||
|
||||
__tablename__ = "trigger_subscriptions"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="trigger_provider_pkey"),
|
||||
Index("idx_trigger_providers_tenant_provider", "tenant_id", "provider_id"),
|
||||
# Primary index for O(1) lookup by endpoint
|
||||
Index("idx_trigger_providers_endpoint", "endpoint_id", unique=True),
|
||||
# Composite index for tenant-specific queries (optional, kept for compatibility)
|
||||
Index("idx_trigger_providers_tenant_endpoint", "tenant_id", "endpoint_id"),
|
||||
UniqueConstraint("tenant_id", "provider_id", "name", name="unique_trigger_provider"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False)
|
||||
name: Mapped[str] = mapped_column(String(255), nullable=False, comment="Subscription instance name")
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
user_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
provider_id: Mapped[str] = mapped_column(
|
||||
String(255), nullable=False, comment="Provider identifier (e.g., plugin_id/provider_name)"
|
||||
)
|
||||
endpoint_id: Mapped[str] = mapped_column(String(255), nullable=False, comment="Subscription endpoint")
|
||||
parameters: Mapped[dict[str, Any]] = mapped_column(sa.JSON, nullable=False, comment="Subscription parameters JSON")
|
||||
properties: Mapped[dict[str, Any]] = mapped_column(sa.JSON, nullable=False, comment="Subscription properties JSON")
|
||||
|
||||
credentials: Mapped[dict[str, Any]] = mapped_column(
|
||||
sa.JSON, nullable=False, comment="Subscription credentials JSON"
|
||||
)
|
||||
credential_type: Mapped[str] = mapped_column(String(50), nullable=False, comment="oauth or api_key")
|
||||
credential_expires_at: Mapped[int] = mapped_column(
|
||||
Integer, default=-1, comment="OAuth token expiration timestamp, -1 for never"
|
||||
)
|
||||
expires_at: Mapped[int] = mapped_column(
|
||||
Integer, default=-1, comment="Subscription instance expiration timestamp, -1 for never"
|
||||
)
|
||||
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), init=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime,
|
||||
nullable=False,
|
||||
server_default=func.current_timestamp(),
|
||||
server_onupdate=func.current_timestamp(),
|
||||
init=False,
|
||||
)
|
||||
|
||||
def is_credential_expired(self) -> bool:
|
||||
"""Check if credential is expired"""
|
||||
if self.credential_expires_at == -1:
|
||||
return False
|
||||
# Check if token expires in next 3 minutes
|
||||
return (self.credential_expires_at - 180) < int(time.time())
|
||||
|
||||
def to_entity(self) -> Subscription:
|
||||
return Subscription(
|
||||
expires_at=self.expires_at,
|
||||
endpoint=generate_plugin_trigger_endpoint_url(self.endpoint_id),
|
||||
parameters=self.parameters,
|
||||
properties=self.properties,
|
||||
)
|
||||
|
||||
def to_api_entity(self) -> TriggerProviderSubscriptionApiEntity:
|
||||
return TriggerProviderSubscriptionApiEntity(
|
||||
id=self.id,
|
||||
name=self.name,
|
||||
provider=self.provider_id,
|
||||
endpoint=generate_plugin_trigger_endpoint_url(self.endpoint_id),
|
||||
parameters=self.parameters,
|
||||
properties=self.properties,
|
||||
credential_type=CredentialType(self.credential_type),
|
||||
credentials=self.credentials,
|
||||
workflows_in_use=-1,
|
||||
)
|
||||
|
||||
|
||||
# system level trigger oauth client params
|
||||
class TriggerOAuthSystemClient(TypeBase):
|
||||
__tablename__ = "trigger_oauth_system_clients"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="trigger_oauth_system_client_pkey"),
|
||||
sa.UniqueConstraint("plugin_id", "provider", name="trigger_oauth_system_client_plugin_id_provider_idx"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False)
|
||||
plugin_id: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
provider: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
# oauth params of the trigger provider
|
||||
encrypted_oauth_params: Mapped[str] = mapped_column(LongText, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), init=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime,
|
||||
nullable=False,
|
||||
server_default=func.current_timestamp(),
|
||||
server_onupdate=func.current_timestamp(),
|
||||
init=False,
|
||||
)
|
||||
|
||||
|
||||
# tenant level trigger oauth client params (client_id, client_secret, etc.)
|
||||
class TriggerOAuthTenantClient(TypeBase):
|
||||
__tablename__ = "trigger_oauth_tenant_clients"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="trigger_oauth_tenant_client_pkey"),
|
||||
sa.UniqueConstraint("tenant_id", "plugin_id", "provider", name="unique_trigger_oauth_tenant_client"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False)
|
||||
# tenant id
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
plugin_id: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
provider: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
enabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("true"), default=True)
|
||||
# oauth params of the trigger provider
|
||||
encrypted_oauth_params: Mapped[str] = mapped_column(LongText, nullable=False, default="{}")
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), init=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime,
|
||||
nullable=False,
|
||||
server_default=func.current_timestamp(),
|
||||
server_onupdate=func.current_timestamp(),
|
||||
init=False,
|
||||
)
|
||||
|
||||
@property
|
||||
def oauth_params(self) -> Mapping[str, Any]:
|
||||
return cast(Mapping[str, Any], json.loads(self.encrypted_oauth_params or "{}"))
|
||||
|
||||
|
||||
class WorkflowTriggerLog(TypeBase):
|
||||
"""
|
||||
Workflow Trigger Log
|
||||
|
||||
Track async trigger workflow runs with re-invocation capability
|
||||
|
||||
Attributes:
|
||||
- id (uuid) Trigger Log ID (used as workflow_trigger_log_id)
|
||||
- tenant_id (uuid) Workspace ID
|
||||
- app_id (uuid) App ID
|
||||
- workflow_id (uuid) Workflow ID
|
||||
- workflow_run_id (uuid) Optional - Associated workflow run ID when execution starts
|
||||
- root_node_id (string) Optional - Custom starting node ID for workflow execution
|
||||
- trigger_metadata (text) Optional - Trigger metadata (JSON)
|
||||
- trigger_type (string) Type of trigger: webhook, schedule, plugin
|
||||
- trigger_data (text) Full trigger data including inputs (JSON)
|
||||
- inputs (text) Input parameters (JSON)
|
||||
- outputs (text) Optional - Output content (JSON)
|
||||
- status (string) Execution status
|
||||
- error (text) Optional - Error message if failed
|
||||
- queue_name (string) Celery queue used
|
||||
- celery_task_id (string) Optional - Celery task ID for tracking
|
||||
- retry_count (int) Number of retry attempts
|
||||
- elapsed_time (float) Optional - Time consumption in seconds
|
||||
- total_tokens (int) Optional - Total tokens used
|
||||
- created_by_role (string) Creator role: account, end_user
|
||||
- created_by (string) Creator ID
|
||||
- created_at (timestamp) Creation time
|
||||
- triggered_at (timestamp) Optional - When actually triggered
|
||||
- finished_at (timestamp) Optional - Completion time
|
||||
"""
|
||||
|
||||
__tablename__ = "workflow_trigger_logs"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="workflow_trigger_log_pkey"),
|
||||
sa.Index("workflow_trigger_log_tenant_app_idx", "tenant_id", "app_id"),
|
||||
sa.Index("workflow_trigger_log_status_idx", "status"),
|
||||
sa.Index("workflow_trigger_log_created_at_idx", "created_at"),
|
||||
sa.Index("workflow_trigger_log_workflow_run_idx", "workflow_run_id"),
|
||||
sa.Index("workflow_trigger_log_workflow_id_idx", "workflow_id"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuidv7()), init=False)
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
app_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
workflow_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
workflow_run_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True)
|
||||
root_node_id: Mapped[str | None] = mapped_column(String(255), nullable=True)
|
||||
trigger_metadata: Mapped[str] = mapped_column(LongText, nullable=False)
|
||||
trigger_type: Mapped[str] = mapped_column(EnumText(AppTriggerType, length=50), nullable=False)
|
||||
trigger_data: Mapped[str] = mapped_column(LongText, nullable=False) # Full TriggerData as JSON
|
||||
inputs: Mapped[str] = mapped_column(LongText, nullable=False) # Just inputs for easy viewing
|
||||
outputs: Mapped[str | None] = mapped_column(LongText, nullable=True)
|
||||
|
||||
status: Mapped[str] = mapped_column(EnumText(WorkflowTriggerStatus, length=50), nullable=False)
|
||||
error: Mapped[str | None] = mapped_column(LongText, nullable=True)
|
||||
|
||||
queue_name: Mapped[str] = mapped_column(String(100), nullable=False)
|
||||
celery_task_id: Mapped[str | None] = mapped_column(String(255), nullable=True)
|
||||
created_by_role: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
created_by: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
retry_count: Mapped[int] = mapped_column(sa.Integer, nullable=False, default=0)
|
||||
elapsed_time: Mapped[float | None] = mapped_column(sa.Float, nullable=True, default=None)
|
||||
total_tokens: Mapped[int | None] = mapped_column(sa.Integer, nullable=True, default=None)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), init=False
|
||||
)
|
||||
triggered_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True, default=None)
|
||||
finished_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True, default=None)
|
||||
|
||||
@property
|
||||
def created_by_account(self):
|
||||
created_by_role = CreatorUserRole(self.created_by_role)
|
||||
return db.session.get(Account, self.created_by) if created_by_role == CreatorUserRole.ACCOUNT else None
|
||||
|
||||
@property
|
||||
def created_by_end_user(self):
|
||||
from .model import EndUser
|
||||
|
||||
created_by_role = CreatorUserRole(self.created_by_role)
|
||||
return db.session.get(EndUser, self.created_by) if created_by_role == CreatorUserRole.END_USER else None
|
||||
|
||||
def to_dict(self) -> dict[str, Any]:
|
||||
"""Convert to dictionary for API responses"""
|
||||
return {
|
||||
"id": self.id,
|
||||
"tenant_id": self.tenant_id,
|
||||
"app_id": self.app_id,
|
||||
"workflow_id": self.workflow_id,
|
||||
"workflow_run_id": self.workflow_run_id,
|
||||
"root_node_id": self.root_node_id,
|
||||
"trigger_metadata": json.loads(self.trigger_metadata) if self.trigger_metadata else None,
|
||||
"trigger_type": self.trigger_type,
|
||||
"trigger_data": json.loads(self.trigger_data),
|
||||
"inputs": json.loads(self.inputs),
|
||||
"outputs": json.loads(self.outputs) if self.outputs else None,
|
||||
"status": self.status,
|
||||
"error": self.error,
|
||||
"queue_name": self.queue_name,
|
||||
"celery_task_id": self.celery_task_id,
|
||||
"retry_count": self.retry_count,
|
||||
"elapsed_time": self.elapsed_time,
|
||||
"total_tokens": self.total_tokens,
|
||||
"created_by_role": self.created_by_role,
|
||||
"created_by": self.created_by,
|
||||
"created_at": self.created_at.isoformat() if self.created_at else None,
|
||||
"triggered_at": self.triggered_at.isoformat() if self.triggered_at else None,
|
||||
"finished_at": self.finished_at.isoformat() if self.finished_at else None,
|
||||
}
|
||||
|
||||
|
||||
class WorkflowWebhookTrigger(TypeBase):
|
||||
"""
|
||||
Workflow Webhook Trigger
|
||||
|
||||
Attributes:
|
||||
- id (uuid) Primary key
|
||||
- app_id (uuid) App ID to bind to a specific app
|
||||
- node_id (varchar) Node ID which node in the workflow
|
||||
- tenant_id (uuid) Workspace ID
|
||||
- webhook_id (varchar) Webhook ID for URL: https://api.dify.ai/triggers/webhook/:webhook_id
|
||||
- created_by (varchar) User ID of the creator
|
||||
- created_at (timestamp) Creation time
|
||||
- updated_at (timestamp) Last update time
|
||||
"""
|
||||
|
||||
__tablename__ = "workflow_webhook_triggers"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="workflow_webhook_trigger_pkey"),
|
||||
sa.Index("workflow_webhook_trigger_tenant_idx", "tenant_id"),
|
||||
sa.UniqueConstraint("app_id", "node_id", name="uniq_node"),
|
||||
sa.UniqueConstraint("webhook_id", name="uniq_webhook_id"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuidv7()), init=False)
|
||||
app_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
node_id: Mapped[str] = mapped_column(String(64), nullable=False)
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
webhook_id: Mapped[str] = mapped_column(String(24), nullable=False)
|
||||
created_by: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), init=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime,
|
||||
nullable=False,
|
||||
server_default=func.current_timestamp(),
|
||||
server_onupdate=func.current_timestamp(),
|
||||
init=False,
|
||||
)
|
||||
|
||||
@cached_property
|
||||
def webhook_url(self):
|
||||
"""
|
||||
Generated webhook url
|
||||
"""
|
||||
return generate_webhook_trigger_endpoint(self.webhook_id)
|
||||
|
||||
@cached_property
|
||||
def webhook_debug_url(self):
|
||||
"""
|
||||
Generated debug webhook url
|
||||
"""
|
||||
return generate_webhook_trigger_endpoint(self.webhook_id, True)
|
||||
|
||||
|
||||
class WorkflowPluginTrigger(TypeBase):
|
||||
"""
|
||||
Workflow Plugin Trigger
|
||||
|
||||
Maps plugin triggers to workflow nodes, similar to WorkflowWebhookTrigger
|
||||
|
||||
Attributes:
|
||||
- id (uuid) Primary key
|
||||
- app_id (uuid) App ID to bind to a specific app
|
||||
- node_id (varchar) Node ID which node in the workflow
|
||||
- tenant_id (uuid) Workspace ID
|
||||
- provider_id (varchar) Plugin provider ID
|
||||
- event_name (varchar) trigger name
|
||||
- subscription_id (varchar) Subscription ID
|
||||
- created_at (timestamp) Creation time
|
||||
- updated_at (timestamp) Last update time
|
||||
"""
|
||||
|
||||
__tablename__ = "workflow_plugin_triggers"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="workflow_plugin_trigger_pkey"),
|
||||
sa.Index("workflow_plugin_trigger_tenant_subscription_idx", "tenant_id", "subscription_id", "event_name"),
|
||||
sa.UniqueConstraint("app_id", "node_id", name="uniq_app_node_subscription"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False)
|
||||
app_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
node_id: Mapped[str] = mapped_column(String(64), nullable=False)
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
provider_id: Mapped[str] = mapped_column(String(512), nullable=False)
|
||||
event_name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
subscription_id: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), init=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime,
|
||||
nullable=False,
|
||||
server_default=func.current_timestamp(),
|
||||
server_onupdate=func.current_timestamp(),
|
||||
init=False,
|
||||
)
|
||||
|
||||
|
||||
class AppTrigger(TypeBase):
|
||||
"""
|
||||
App Trigger
|
||||
|
||||
Manages multiple triggers for an app with enable/disable and authorization states.
|
||||
|
||||
Attributes:
|
||||
- id (uuid) Primary key
|
||||
- tenant_id (uuid) Workspace ID
|
||||
- app_id (uuid) App ID
|
||||
- trigger_type (string) Type: webhook, schedule, plugin
|
||||
- title (string) Trigger title
|
||||
|
||||
- status (string) Status: enabled, disabled, unauthorized, error
|
||||
- node_id (string) Optional workflow node ID
|
||||
- created_at (timestamp) Creation time
|
||||
- updated_at (timestamp) Last update time
|
||||
"""
|
||||
|
||||
__tablename__ = "app_triggers"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="app_trigger_pkey"),
|
||||
sa.Index("app_trigger_tenant_app_idx", "tenant_id", "app_id"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuidv7()), init=False)
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
app_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
node_id: Mapped[str | None] = mapped_column(String(64), nullable=False)
|
||||
trigger_type: Mapped[str] = mapped_column(EnumText(AppTriggerType, length=50), nullable=False)
|
||||
title: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
provider_name: Mapped[str] = mapped_column(String(255), server_default="", default="") # why it is nullable?
|
||||
status: Mapped[str] = mapped_column(
|
||||
EnumText(AppTriggerStatus, length=50), nullable=False, default=AppTriggerStatus.ENABLED
|
||||
)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), init=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime,
|
||||
nullable=False,
|
||||
default=naive_utc_now(),
|
||||
server_onupdate=func.current_timestamp(),
|
||||
init=False,
|
||||
)
|
||||
|
||||
|
||||
class WorkflowSchedulePlan(TypeBase):
|
||||
"""
|
||||
Workflow Schedule Configuration
|
||||
|
||||
Store schedule configurations for time-based workflow triggers.
|
||||
Uses cron expressions with timezone support for flexible scheduling.
|
||||
|
||||
Attributes:
|
||||
- id (uuid) Primary key
|
||||
- app_id (uuid) App ID to bind to a specific app
|
||||
- node_id (varchar) Starting node ID for workflow execution
|
||||
- tenant_id (uuid) Workspace ID for multi-tenancy
|
||||
- cron_expression (varchar) Cron expression defining schedule pattern
|
||||
- timezone (varchar) Timezone for cron evaluation (e.g., 'Asia/Shanghai')
|
||||
- next_run_at (timestamp) Next scheduled execution time
|
||||
- created_at (timestamp) Creation timestamp
|
||||
- updated_at (timestamp) Last update timestamp
|
||||
"""
|
||||
|
||||
__tablename__ = "workflow_schedule_plans"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="workflow_schedule_plan_pkey"),
|
||||
sa.UniqueConstraint("app_id", "node_id", name="uniq_app_node"),
|
||||
sa.Index("workflow_schedule_plan_next_idx", "next_run_at"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, primary_key=True, default=lambda: str(uuidv7()), init=False)
|
||||
app_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
node_id: Mapped[str] = mapped_column(String(64), nullable=False)
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
|
||||
# Schedule configuration
|
||||
cron_expression: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
timezone: Mapped[str] = mapped_column(String(64), nullable=False)
|
||||
|
||||
# Schedule control
|
||||
next_run_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), init=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp(), init=False
|
||||
)
|
||||
|
||||
def to_dict(self) -> dict[str, Any]:
|
||||
"""Convert to dictionary representation"""
|
||||
return {
|
||||
"id": self.id,
|
||||
"app_id": self.app_id,
|
||||
"node_id": self.node_id,
|
||||
"tenant_id": self.tenant_id,
|
||||
"cron_expression": self.cron_expression,
|
||||
"timezone": self.timezone,
|
||||
"next_run_at": self.next_run_at.isoformat() if self.next_run_at else None,
|
||||
"created_at": self.created_at.isoformat(),
|
||||
"updated_at": self.updated_at.isoformat(),
|
||||
}
|
||||
163
dify/api/models/types.py
Normal file
163
dify/api/models/types.py
Normal file
@@ -0,0 +1,163 @@
|
||||
import enum
|
||||
import uuid
|
||||
from typing import Any, Generic, TypeVar
|
||||
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import CHAR, TEXT, VARCHAR, LargeBinary, TypeDecorator
|
||||
from sqlalchemy.dialects.mysql import LONGBLOB, LONGTEXT
|
||||
from sqlalchemy.dialects.postgresql import BYTEA, JSONB, UUID
|
||||
from sqlalchemy.engine.interfaces import Dialect
|
||||
from sqlalchemy.sql.type_api import TypeEngine
|
||||
|
||||
from configs import dify_config
|
||||
|
||||
|
||||
class StringUUID(TypeDecorator[uuid.UUID | str | None]):
|
||||
impl = CHAR
|
||||
cache_ok = True
|
||||
|
||||
def process_bind_param(self, value: uuid.UUID | str | None, dialect: Dialect) -> str | None:
|
||||
if value is None:
|
||||
return value
|
||||
elif dialect.name == "postgresql":
|
||||
return str(value)
|
||||
else:
|
||||
if isinstance(value, uuid.UUID):
|
||||
return value.hex
|
||||
return value
|
||||
|
||||
def load_dialect_impl(self, dialect: Dialect) -> TypeEngine[Any]:
|
||||
if dialect.name == "postgresql":
|
||||
return dialect.type_descriptor(UUID())
|
||||
else:
|
||||
return dialect.type_descriptor(CHAR(36))
|
||||
|
||||
def process_result_value(self, value: uuid.UUID | str | None, dialect: Dialect) -> str | None:
|
||||
if value is None:
|
||||
return value
|
||||
return str(value)
|
||||
|
||||
|
||||
class LongText(TypeDecorator[str | None]):
|
||||
impl = TEXT
|
||||
cache_ok = True
|
||||
|
||||
def process_bind_param(self, value: str | None, dialect: Dialect) -> str | None:
|
||||
if value is None:
|
||||
return value
|
||||
return value
|
||||
|
||||
def load_dialect_impl(self, dialect: Dialect) -> TypeEngine[Any]:
|
||||
if dialect.name == "postgresql":
|
||||
return dialect.type_descriptor(TEXT())
|
||||
elif dialect.name == "mysql":
|
||||
return dialect.type_descriptor(LONGTEXT())
|
||||
else:
|
||||
return dialect.type_descriptor(TEXT())
|
||||
|
||||
def process_result_value(self, value: str | None, dialect: Dialect) -> str | None:
|
||||
if value is None:
|
||||
return value
|
||||
return value
|
||||
|
||||
|
||||
class BinaryData(TypeDecorator[bytes | None]):
|
||||
impl = LargeBinary
|
||||
cache_ok = True
|
||||
|
||||
def process_bind_param(self, value: bytes | None, dialect: Dialect) -> bytes | None:
|
||||
if value is None:
|
||||
return value
|
||||
return value
|
||||
|
||||
def load_dialect_impl(self, dialect: Dialect) -> TypeEngine[Any]:
|
||||
if dialect.name == "postgresql":
|
||||
return dialect.type_descriptor(BYTEA())
|
||||
elif dialect.name == "mysql":
|
||||
return dialect.type_descriptor(LONGBLOB())
|
||||
else:
|
||||
return dialect.type_descriptor(LargeBinary())
|
||||
|
||||
def process_result_value(self, value: bytes | None, dialect: Dialect) -> bytes | None:
|
||||
if value is None:
|
||||
return value
|
||||
return value
|
||||
|
||||
|
||||
class AdjustedJSON(TypeDecorator[dict | list | None]):
|
||||
impl = sa.JSON
|
||||
cache_ok = True
|
||||
|
||||
def __init__(self, astext_type=None):
|
||||
self.astext_type = astext_type
|
||||
super().__init__()
|
||||
|
||||
def load_dialect_impl(self, dialect: Dialect) -> TypeEngine[Any]:
|
||||
if dialect.name == "postgresql":
|
||||
if self.astext_type:
|
||||
return dialect.type_descriptor(JSONB(astext_type=self.astext_type))
|
||||
else:
|
||||
return dialect.type_descriptor(JSONB())
|
||||
elif dialect.name == "mysql":
|
||||
return dialect.type_descriptor(sa.JSON())
|
||||
else:
|
||||
return dialect.type_descriptor(sa.JSON())
|
||||
|
||||
def process_bind_param(self, value: dict | list | None, dialect: Dialect) -> dict | list | None:
|
||||
return value
|
||||
|
||||
def process_result_value(self, value: dict | list | None, dialect: Dialect) -> dict | list | None:
|
||||
return value
|
||||
|
||||
|
||||
_E = TypeVar("_E", bound=enum.StrEnum)
|
||||
|
||||
|
||||
class EnumText(TypeDecorator[_E | None], Generic[_E]):
|
||||
impl = VARCHAR
|
||||
cache_ok = True
|
||||
|
||||
_length: int
|
||||
_enum_class: type[_E]
|
||||
|
||||
def __init__(self, enum_class: type[_E], length: int | None = None):
|
||||
self._enum_class = enum_class
|
||||
max_enum_value_len = max(len(e.value) for e in enum_class)
|
||||
if length is not None:
|
||||
if length < max_enum_value_len:
|
||||
raise ValueError("length should be greater than enum value length.")
|
||||
self._length = length
|
||||
else:
|
||||
# leave some rooms for future longer enum values.
|
||||
self._length = max(max_enum_value_len, 20)
|
||||
|
||||
def process_bind_param(self, value: _E | str | None, dialect: Dialect) -> str | None:
|
||||
if value is None:
|
||||
return value
|
||||
if isinstance(value, self._enum_class):
|
||||
return value.value
|
||||
# Since _E is bound to StrEnum which inherits from str, at this point value must be str
|
||||
self._enum_class(value)
|
||||
return value
|
||||
|
||||
def load_dialect_impl(self, dialect: Dialect) -> TypeEngine[Any]:
|
||||
return dialect.type_descriptor(VARCHAR(self._length))
|
||||
|
||||
def process_result_value(self, value: str | None, dialect: Dialect) -> _E | None:
|
||||
if value is None:
|
||||
return value
|
||||
# Type annotation guarantees value is str at this point
|
||||
return self._enum_class(value)
|
||||
|
||||
def compare_values(self, x: _E | None, y: _E | None) -> bool:
|
||||
if x is None or y is None:
|
||||
return x is y
|
||||
return x == y
|
||||
|
||||
|
||||
def adjusted_json_index(index_name, column_name):
|
||||
index_name = index_name or f"{column_name}_idx"
|
||||
if dify_config.DB_TYPE == "postgresql":
|
||||
return sa.Index(index_name, column_name, postgresql_using="gin")
|
||||
else:
|
||||
return None
|
||||
59
dify/api/models/web.py
Normal file
59
dify/api/models/web.py
Normal file
@@ -0,0 +1,59 @@
|
||||
from datetime import datetime
|
||||
from uuid import uuid4
|
||||
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import DateTime, String, func
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from .base import TypeBase
|
||||
from .engine import db
|
||||
from .model import Message
|
||||
from .types import StringUUID
|
||||
|
||||
|
||||
class SavedMessage(TypeBase):
|
||||
__tablename__ = "saved_messages"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="saved_message_pkey"),
|
||||
sa.Index("saved_message_message_idx", "app_id", "message_id", "created_by_role", "created_by"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False)
|
||||
app_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
message_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
created_by_role: Mapped[str] = mapped_column(String(255), nullable=False, server_default=sa.text("'end_user'"))
|
||||
created_by: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime,
|
||||
nullable=False,
|
||||
server_default=func.current_timestamp(),
|
||||
init=False,
|
||||
)
|
||||
|
||||
@property
|
||||
def message(self):
|
||||
return db.session.query(Message).where(Message.id == self.message_id).first()
|
||||
|
||||
|
||||
class PinnedConversation(TypeBase):
|
||||
__tablename__ = "pinned_conversations"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="pinned_conversation_pkey"),
|
||||
sa.Index("pinned_conversation_conversation_idx", "app_id", "conversation_id", "created_by_role", "created_by"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False)
|
||||
app_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
conversation_id: Mapped[str] = mapped_column(StringUUID)
|
||||
created_by_role: Mapped[str] = mapped_column(
|
||||
String(255),
|
||||
nullable=False,
|
||||
server_default=sa.text("'end_user'"),
|
||||
)
|
||||
created_by: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime,
|
||||
nullable=False,
|
||||
server_default=func.current_timestamp(),
|
||||
init=False,
|
||||
)
|
||||
1730
dify/api/models/workflow.py
Normal file
1730
dify/api/models/workflow.py
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user