This commit is contained in:
2025-12-01 17:21:38 +08:00
parent 32fee2b8ab
commit fab8c13cb3
7511 changed files with 996300 additions and 0 deletions

View File

View File

@@ -0,0 +1,290 @@
from collections.abc import Mapping, Sequence
from enum import StrEnum
from typing import TYPE_CHECKING, Any, Optional
from pydantic import BaseModel, ConfigDict, Field, ValidationInfo, field_validator
if TYPE_CHECKING:
from core.ops.ops_trace_manager import TraceQueueManager
from constants import UUID_NIL
from core.app.app_config.entities import EasyUIBasedAppConfig, WorkflowUIBasedAppConfig
from core.entities.provider_configuration import ProviderModelBundle
from core.file import File, FileUploadConfig
from core.model_runtime.entities.model_entities import AIModelEntity
class InvokeFrom(StrEnum):
"""
Invoke From.
"""
# SERVICE_API indicates that this invocation is from an API call to Dify app.
#
# Description of service api in Dify docs:
# https://docs.dify.ai/en/guides/application-publishing/developing-with-apis
SERVICE_API = "service-api"
# WEB_APP indicates that this invocation is from
# the web app of the workflow (or chatflow).
#
# Description of web app in Dify docs:
# https://docs.dify.ai/en/guides/application-publishing/launch-your-webapp-quickly/README
WEB_APP = "web-app"
# TRIGGER indicates that this invocation is from a trigger.
# this is used for plugin trigger and webhook trigger.
TRIGGER = "trigger"
# EXPLORE indicates that this invocation is from
# the workflow (or chatflow) explore page.
EXPLORE = "explore"
# DEBUGGER indicates that this invocation is from
# the workflow (or chatflow) edit page.
DEBUGGER = "debugger"
PUBLISHED = "published"
# VALIDATION indicates that this invocation is from validation.
VALIDATION = "validation"
@classmethod
def value_of(cls, value: str):
"""
Get value of given mode.
:param value: mode value
:return: mode
"""
for mode in cls:
if mode.value == value:
return mode
raise ValueError(f"invalid invoke from value {value}")
def to_source(self) -> str:
"""
Get source of invoke from.
:return: source
"""
if self == InvokeFrom.WEB_APP:
return "web_app"
elif self == InvokeFrom.DEBUGGER:
return "dev"
elif self == InvokeFrom.EXPLORE:
return "explore_app"
elif self == InvokeFrom.TRIGGER:
return "trigger"
elif self == InvokeFrom.SERVICE_API:
return "api"
return "dev"
class ModelConfigWithCredentialsEntity(BaseModel):
"""
Model Config With Credentials Entity.
"""
provider: str
model: str
model_schema: AIModelEntity
mode: str
provider_model_bundle: ProviderModelBundle
credentials: dict[str, Any] = Field(default_factory=dict)
parameters: dict[str, Any] = Field(default_factory=dict)
stop: list[str] = Field(default_factory=list)
# pydantic configs
model_config = ConfigDict(protected_namespaces=())
class AppGenerateEntity(BaseModel):
"""
App Generate Entity.
"""
model_config = ConfigDict(arbitrary_types_allowed=True)
task_id: str
# app config
app_config: Any = None
file_upload_config: FileUploadConfig | None = None
inputs: Mapping[str, Any]
files: Sequence[File]
# Unique identifier of the user initiating the execution.
# This corresponds to `Account.id` for platform users or `EndUser.id` for end users.
#
# Note: The `user_id` field does not indicate whether the user is a platform user or an end user.
user_id: str
# extras
stream: bool
invoke_from: InvokeFrom
# invoke call depth
call_depth: int = 0
# extra parameters, like: auto_generate_conversation_name
extras: dict[str, Any] = Field(default_factory=dict)
# tracing instance
trace_manager: Optional["TraceQueueManager"] = None
class EasyUIBasedAppGenerateEntity(AppGenerateEntity):
"""
Chat Application Generate Entity.
"""
# app config
app_config: EasyUIBasedAppConfig = None # type: ignore
model_conf: ModelConfigWithCredentialsEntity
query: str = ""
# pydantic configs
model_config = ConfigDict(protected_namespaces=())
class ConversationAppGenerateEntity(AppGenerateEntity):
"""
Base entity for conversation-based app generation.
"""
conversation_id: str | None = None
parent_message_id: str | None = Field(
default=None,
description=(
"Starting from v0.9.0, parent_message_id is used to support message regeneration for internal chat API."
"For service API, we need to ensure its forward compatibility, "
"so passing in the parent_message_id as request arg is not supported for now. "
"It needs to be set to UUID_NIL so that the subsequent processing will treat it as legacy messages."
),
)
@field_validator("parent_message_id")
@classmethod
def validate_parent_message_id(cls, v, info: ValidationInfo):
if info.data.get("invoke_from") == InvokeFrom.SERVICE_API and v != UUID_NIL:
raise ValueError("parent_message_id should be UUID_NIL for service API")
return v
class ChatAppGenerateEntity(ConversationAppGenerateEntity, EasyUIBasedAppGenerateEntity):
"""
Chat Application Generate Entity.
"""
pass
class CompletionAppGenerateEntity(EasyUIBasedAppGenerateEntity):
"""
Completion Application Generate Entity.
"""
pass
class AgentChatAppGenerateEntity(ConversationAppGenerateEntity, EasyUIBasedAppGenerateEntity):
"""
Agent Chat Application Generate Entity.
"""
pass
class AdvancedChatAppGenerateEntity(ConversationAppGenerateEntity):
"""
Advanced Chat Application Generate Entity.
"""
# app config
app_config: WorkflowUIBasedAppConfig = None # type: ignore
workflow_run_id: str | None = None
query: str
class SingleIterationRunEntity(BaseModel):
"""
Single Iteration Run Entity.
"""
node_id: str
inputs: Mapping
single_iteration_run: SingleIterationRunEntity | None = None
class SingleLoopRunEntity(BaseModel):
"""
Single Loop Run Entity.
"""
node_id: str
inputs: Mapping
single_loop_run: SingleLoopRunEntity | None = None
class WorkflowAppGenerateEntity(AppGenerateEntity):
"""
Workflow Application Generate Entity.
"""
# app config
app_config: WorkflowUIBasedAppConfig = None # type: ignore
workflow_execution_id: str
class SingleIterationRunEntity(BaseModel):
"""
Single Iteration Run Entity.
"""
node_id: str
inputs: dict
single_iteration_run: SingleIterationRunEntity | None = None
class SingleLoopRunEntity(BaseModel):
"""
Single Loop Run Entity.
"""
node_id: str
inputs: dict
single_loop_run: SingleLoopRunEntity | None = None
class RagPipelineGenerateEntity(WorkflowAppGenerateEntity):
"""
RAG Pipeline Application Generate Entity.
"""
# pipeline config
pipeline_config: WorkflowUIBasedAppConfig
datasource_type: str
datasource_info: Mapping[str, Any]
dataset_id: str
batch: str
document_id: str | None = None
original_document_id: str | None = None
start_node_id: str | None = None
# Import TraceQueueManager at runtime to resolve forward references
from core.ops.ops_trace_manager import TraceQueueManager
# Rebuild models that use forward references
AppGenerateEntity.model_rebuild()
EasyUIBasedAppGenerateEntity.model_rebuild()
ConversationAppGenerateEntity.model_rebuild()
ChatAppGenerateEntity.model_rebuild()
CompletionAppGenerateEntity.model_rebuild()
AgentChatAppGenerateEntity.model_rebuild()
AdvancedChatAppGenerateEntity.model_rebuild()
WorkflowAppGenerateEntity.model_rebuild()
RagPipelineGenerateEntity.model_rebuild()

View File

@@ -0,0 +1,511 @@
from collections.abc import Mapping, Sequence
from datetime import datetime
from enum import StrEnum, auto
from typing import Any
from pydantic import BaseModel, ConfigDict, Field
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk
from core.rag.entities.citation_metadata import RetrievalSourceMetadata
from core.workflow.entities import AgentNodeStrategyInit
from core.workflow.enums import WorkflowNodeExecutionMetadataKey
from core.workflow.nodes import NodeType
class QueueEvent(StrEnum):
"""
QueueEvent enum
"""
LLM_CHUNK = "llm_chunk"
TEXT_CHUNK = "text_chunk"
AGENT_MESSAGE = "agent_message"
MESSAGE_REPLACE = "message_replace"
MESSAGE_END = "message_end"
ADVANCED_CHAT_MESSAGE_END = "advanced_chat_message_end"
WORKFLOW_STARTED = "workflow_started"
WORKFLOW_SUCCEEDED = "workflow_succeeded"
WORKFLOW_FAILED = "workflow_failed"
WORKFLOW_PARTIAL_SUCCEEDED = "workflow_partial_succeeded"
ITERATION_START = "iteration_start"
ITERATION_NEXT = "iteration_next"
ITERATION_COMPLETED = "iteration_completed"
LOOP_START = "loop_start"
LOOP_NEXT = "loop_next"
LOOP_COMPLETED = "loop_completed"
NODE_STARTED = "node_started"
NODE_SUCCEEDED = "node_succeeded"
NODE_FAILED = "node_failed"
NODE_EXCEPTION = "node_exception"
RETRIEVER_RESOURCES = "retriever_resources"
ANNOTATION_REPLY = "annotation_reply"
AGENT_THOUGHT = "agent_thought"
MESSAGE_FILE = "message_file"
AGENT_LOG = "agent_log"
ERROR = "error"
PING = "ping"
STOP = "stop"
RETRY = "retry"
class AppQueueEvent(BaseModel):
"""
QueueEvent abstract entity
"""
event: QueueEvent
model_config = ConfigDict(arbitrary_types_allowed=True)
class QueueLLMChunkEvent(AppQueueEvent):
"""
QueueLLMChunkEvent entity
Only for basic mode apps
"""
event: QueueEvent = QueueEvent.LLM_CHUNK
chunk: LLMResultChunk
class QueueIterationStartEvent(AppQueueEvent):
"""
QueueIterationStartEvent entity
"""
event: QueueEvent = QueueEvent.ITERATION_START
node_execution_id: str
node_id: str
node_type: NodeType
node_title: str
start_at: datetime
node_run_index: int
inputs: Mapping[str, object] = Field(default_factory=dict)
metadata: Mapping[str, object] = Field(default_factory=dict)
class QueueIterationNextEvent(AppQueueEvent):
"""
QueueIterationNextEvent entity
"""
event: QueueEvent = QueueEvent.ITERATION_NEXT
index: int
node_execution_id: str
node_id: str
node_type: NodeType
node_title: str
node_run_index: int
output: Any = None # output for the current iteration
class QueueIterationCompletedEvent(AppQueueEvent):
"""
QueueIterationCompletedEvent entity
"""
event: QueueEvent = QueueEvent.ITERATION_COMPLETED
node_execution_id: str
node_id: str
node_type: NodeType
node_title: str
start_at: datetime
node_run_index: int
inputs: Mapping[str, object] = Field(default_factory=dict)
outputs: Mapping[str, object] = Field(default_factory=dict)
metadata: Mapping[str, object] = Field(default_factory=dict)
steps: int = 0
error: str | None = None
class QueueLoopStartEvent(AppQueueEvent):
"""
QueueLoopStartEvent entity
"""
event: QueueEvent = QueueEvent.LOOP_START
node_execution_id: str
node_id: str
node_type: NodeType
node_title: str
start_at: datetime
node_run_index: int
inputs: Mapping[str, object] = Field(default_factory=dict)
metadata: Mapping[str, object] = Field(default_factory=dict)
class QueueLoopNextEvent(AppQueueEvent):
"""
QueueLoopNextEvent entity
"""
event: QueueEvent = QueueEvent.LOOP_NEXT
index: int
node_execution_id: str
node_id: str
node_type: NodeType
node_title: str
node_run_index: int
output: Any = None # output for the current loop
class QueueLoopCompletedEvent(AppQueueEvent):
"""
QueueLoopCompletedEvent entity
"""
event: QueueEvent = QueueEvent.LOOP_COMPLETED
node_execution_id: str
node_id: str
node_type: NodeType
node_title: str
start_at: datetime
node_run_index: int
inputs: Mapping[str, object] = Field(default_factory=dict)
outputs: Mapping[str, object] = Field(default_factory=dict)
metadata: Mapping[str, object] = Field(default_factory=dict)
steps: int = 0
error: str | None = None
class QueueTextChunkEvent(AppQueueEvent):
"""
QueueTextChunkEvent entity
"""
event: QueueEvent = QueueEvent.TEXT_CHUNK
text: str
from_variable_selector: list[str] | None = None
"""from variable selector"""
in_iteration_id: str | None = None
"""iteration id if node is in iteration"""
in_loop_id: str | None = None
"""loop id if node is in loop"""
class QueueAgentMessageEvent(AppQueueEvent):
"""
QueueMessageEvent entity
"""
event: QueueEvent = QueueEvent.AGENT_MESSAGE
chunk: LLMResultChunk
class QueueMessageReplaceEvent(AppQueueEvent):
"""
QueueMessageReplaceEvent entity
"""
class MessageReplaceReason(StrEnum):
"""
Reason for message replace event
"""
OUTPUT_MODERATION = "output_moderation"
event: QueueEvent = QueueEvent.MESSAGE_REPLACE
text: str
reason: str
class QueueRetrieverResourcesEvent(AppQueueEvent):
"""
QueueRetrieverResourcesEvent entity
"""
event: QueueEvent = QueueEvent.RETRIEVER_RESOURCES
retriever_resources: Sequence[RetrievalSourceMetadata]
in_iteration_id: str | None = None
"""iteration id if node is in iteration"""
in_loop_id: str | None = None
"""loop id if node is in loop"""
class QueueAnnotationReplyEvent(AppQueueEvent):
"""
QueueAnnotationReplyEvent entity
"""
event: QueueEvent = QueueEvent.ANNOTATION_REPLY
message_annotation_id: str
class QueueMessageEndEvent(AppQueueEvent):
"""
QueueMessageEndEvent entity
"""
event: QueueEvent = QueueEvent.MESSAGE_END
llm_result: LLMResult | None = None
class QueueAdvancedChatMessageEndEvent(AppQueueEvent):
"""
QueueAdvancedChatMessageEndEvent entity
"""
event: QueueEvent = QueueEvent.ADVANCED_CHAT_MESSAGE_END
class QueueWorkflowStartedEvent(AppQueueEvent):
"""QueueWorkflowStartedEvent entity."""
event: QueueEvent = QueueEvent.WORKFLOW_STARTED
class QueueWorkflowSucceededEvent(AppQueueEvent):
"""
QueueWorkflowSucceededEvent entity
"""
event: QueueEvent = QueueEvent.WORKFLOW_SUCCEEDED
outputs: Mapping[str, object] = Field(default_factory=dict)
class QueueWorkflowFailedEvent(AppQueueEvent):
"""
QueueWorkflowFailedEvent entity
"""
event: QueueEvent = QueueEvent.WORKFLOW_FAILED
error: str
exceptions_count: int
class QueueWorkflowPartialSuccessEvent(AppQueueEvent):
"""
QueueWorkflowFailedEvent entity
"""
event: QueueEvent = QueueEvent.WORKFLOW_PARTIAL_SUCCEEDED
exceptions_count: int
outputs: Mapping[str, object] = Field(default_factory=dict)
class QueueNodeStartedEvent(AppQueueEvent):
"""
QueueNodeStartedEvent entity
"""
event: QueueEvent = QueueEvent.NODE_STARTED
node_execution_id: str
node_id: str
node_title: str
node_type: NodeType
node_run_index: int = 1 # FIXME(-LAN-): may not used
in_iteration_id: str | None = None
in_loop_id: str | None = None
start_at: datetime
agent_strategy: AgentNodeStrategyInit | None = None
# FIXME(-LAN-): only for ToolNode, need to refactor
provider_type: str # should be a core.tools.entities.tool_entities.ToolProviderType
provider_id: str
class QueueNodeSucceededEvent(AppQueueEvent):
"""
QueueNodeSucceededEvent entity
"""
event: QueueEvent = QueueEvent.NODE_SUCCEEDED
node_execution_id: str
node_id: str
node_type: NodeType
in_iteration_id: str | None = None
"""iteration id if node is in iteration"""
in_loop_id: str | None = None
"""loop id if node is in loop"""
start_at: datetime
inputs: Mapping[str, object] = Field(default_factory=dict)
process_data: Mapping[str, object] = Field(default_factory=dict)
outputs: Mapping[str, object] = Field(default_factory=dict)
execution_metadata: Mapping[WorkflowNodeExecutionMetadataKey, Any] | None = None
error: str | None = None
class QueueAgentLogEvent(AppQueueEvent):
"""
QueueAgentLogEvent entity
"""
event: QueueEvent = QueueEvent.AGENT_LOG
id: str
label: str
node_execution_id: str
parent_id: str | None = None
error: str | None = None
status: str
data: Mapping[str, Any]
metadata: Mapping[str, object] = Field(default_factory=dict)
node_id: str
class QueueNodeRetryEvent(QueueNodeStartedEvent):
"""QueueNodeRetryEvent entity"""
event: QueueEvent = QueueEvent.RETRY
inputs: Mapping[str, object] = Field(default_factory=dict)
process_data: Mapping[str, object] = Field(default_factory=dict)
outputs: Mapping[str, object] = Field(default_factory=dict)
execution_metadata: Mapping[WorkflowNodeExecutionMetadataKey, Any] | None = None
error: str
retry_index: int # retry index
class QueueNodeExceptionEvent(AppQueueEvent):
"""
QueueNodeExceptionEvent entity
"""
event: QueueEvent = QueueEvent.NODE_EXCEPTION
node_execution_id: str
node_id: str
node_type: NodeType
in_iteration_id: str | None = None
"""iteration id if node is in iteration"""
in_loop_id: str | None = None
"""loop id if node is in loop"""
start_at: datetime
inputs: Mapping[str, object] = Field(default_factory=dict)
process_data: Mapping[str, object] = Field(default_factory=dict)
outputs: Mapping[str, object] = Field(default_factory=dict)
execution_metadata: Mapping[WorkflowNodeExecutionMetadataKey, Any] | None = None
error: str
class QueueNodeFailedEvent(AppQueueEvent):
"""
QueueNodeFailedEvent entity
"""
event: QueueEvent = QueueEvent.NODE_FAILED
node_execution_id: str
node_id: str
node_type: NodeType
in_iteration_id: str | None = None
"""iteration id if node is in iteration"""
in_loop_id: str | None = None
"""loop id if node is in loop"""
start_at: datetime
inputs: Mapping[str, object] = Field(default_factory=dict)
process_data: Mapping[str, object] = Field(default_factory=dict)
outputs: Mapping[str, object] = Field(default_factory=dict)
execution_metadata: Mapping[WorkflowNodeExecutionMetadataKey, Any] | None = None
error: str
class QueueAgentThoughtEvent(AppQueueEvent):
"""
QueueAgentThoughtEvent entity
"""
event: QueueEvent = QueueEvent.AGENT_THOUGHT
agent_thought_id: str
class QueueMessageFileEvent(AppQueueEvent):
"""
QueueAgentThoughtEvent entity
"""
event: QueueEvent = QueueEvent.MESSAGE_FILE
message_file_id: str
class QueueErrorEvent(AppQueueEvent):
"""
QueueErrorEvent entity
"""
event: QueueEvent = QueueEvent.ERROR
error: Any = None
class QueuePingEvent(AppQueueEvent):
"""
QueuePingEvent entity
"""
event: QueueEvent = QueueEvent.PING
class QueueStopEvent(AppQueueEvent):
"""
QueueStopEvent entity
"""
class StopBy(StrEnum):
"""
Stop by enum
"""
USER_MANUAL = auto()
ANNOTATION_REPLY = auto()
OUTPUT_MODERATION = auto()
INPUT_MODERATION = auto()
event: QueueEvent = QueueEvent.STOP
stopped_by: StopBy
def get_stop_reason(self) -> str:
"""
To stop reason
"""
reason_mapping = {
QueueStopEvent.StopBy.USER_MANUAL: "Stopped by user.",
QueueStopEvent.StopBy.ANNOTATION_REPLY: "Stopped by annotation reply.",
QueueStopEvent.StopBy.OUTPUT_MODERATION: "Stopped by output moderation.",
QueueStopEvent.StopBy.INPUT_MODERATION: "Stopped by input moderation.",
}
return reason_mapping.get(self.stopped_by, "Stopped by unknown reason.")
class QueueMessage(BaseModel):
"""
QueueMessage abstract entity
"""
task_id: str
app_mode: str
event: AppQueueEvent
class MessageQueueMessage(QueueMessage):
"""
MessageQueueMessage entity
"""
message_id: str
conversation_id: str
class WorkflowQueueMessage(QueueMessage):
"""
WorkflowQueueMessage entity
"""
pass

View File

@@ -0,0 +1,14 @@
from typing import Any
from pydantic import BaseModel
class RagPipelineInvokeEntity(BaseModel):
pipeline_id: str
application_generate_entity: dict[str, Any]
user_id: str
tenant_id: str
workflow_id: str
streaming: bool
workflow_execution_id: str | None = None
workflow_thread_pool_id: str | None = None

View File

@@ -0,0 +1,756 @@
from collections.abc import Mapping, Sequence
from enum import StrEnum
from typing import Any
from pydantic import BaseModel, ConfigDict, Field
from core.model_runtime.entities.llm_entities import LLMResult, LLMUsage
from core.rag.entities.citation_metadata import RetrievalSourceMetadata
from core.workflow.entities import AgentNodeStrategyInit
from core.workflow.enums import WorkflowNodeExecutionMetadataKey, WorkflowNodeExecutionStatus
class AnnotationReplyAccount(BaseModel):
id: str
name: str
class AnnotationReply(BaseModel):
id: str
account: AnnotationReplyAccount
class TaskStateMetadata(BaseModel):
annotation_reply: AnnotationReply | None = None
retriever_resources: Sequence[RetrievalSourceMetadata] = Field(default_factory=list)
usage: LLMUsage | None = None
class TaskState(BaseModel):
"""
TaskState entity
"""
metadata: TaskStateMetadata = Field(default_factory=TaskStateMetadata)
class EasyUITaskState(TaskState):
"""
EasyUITaskState entity
"""
llm_result: LLMResult
class WorkflowTaskState(TaskState):
"""
WorkflowTaskState entity
"""
answer: str = ""
first_token_time: float | None = None
last_token_time: float | None = None
is_streaming_response: bool = False
class StreamEvent(StrEnum):
"""
Stream event
"""
PING = "ping"
ERROR = "error"
MESSAGE = "message"
MESSAGE_END = "message_end"
TTS_MESSAGE = "tts_message"
TTS_MESSAGE_END = "tts_message_end"
MESSAGE_FILE = "message_file"
MESSAGE_REPLACE = "message_replace"
AGENT_THOUGHT = "agent_thought"
AGENT_MESSAGE = "agent_message"
WORKFLOW_STARTED = "workflow_started"
WORKFLOW_FINISHED = "workflow_finished"
NODE_STARTED = "node_started"
NODE_FINISHED = "node_finished"
NODE_RETRY = "node_retry"
ITERATION_STARTED = "iteration_started"
ITERATION_NEXT = "iteration_next"
ITERATION_COMPLETED = "iteration_completed"
LOOP_STARTED = "loop_started"
LOOP_NEXT = "loop_next"
LOOP_COMPLETED = "loop_completed"
TEXT_CHUNK = "text_chunk"
TEXT_REPLACE = "text_replace"
AGENT_LOG = "agent_log"
class StreamResponse(BaseModel):
"""
StreamResponse entity
"""
event: StreamEvent
task_id: str
class ErrorStreamResponse(StreamResponse):
"""
ErrorStreamResponse entity
"""
event: StreamEvent = StreamEvent.ERROR
err: Exception
model_config = ConfigDict(arbitrary_types_allowed=True)
class MessageStreamResponse(StreamResponse):
"""
MessageStreamResponse entity
"""
event: StreamEvent = StreamEvent.MESSAGE
id: str
answer: str
from_variable_selector: list[str] | None = None
class MessageAudioStreamResponse(StreamResponse):
"""
MessageStreamResponse entity
"""
event: StreamEvent = StreamEvent.TTS_MESSAGE
audio: str
class MessageAudioEndStreamResponse(StreamResponse):
"""
MessageStreamResponse entity
"""
event: StreamEvent = StreamEvent.TTS_MESSAGE_END
audio: str
class MessageEndStreamResponse(StreamResponse):
"""
MessageEndStreamResponse entity
"""
event: StreamEvent = StreamEvent.MESSAGE_END
id: str
metadata: Mapping[str, object] = Field(default_factory=dict)
files: Sequence[Mapping[str, Any]] | None = None
class MessageFileStreamResponse(StreamResponse):
"""
MessageFileStreamResponse entity
"""
event: StreamEvent = StreamEvent.MESSAGE_FILE
id: str
type: str
belongs_to: str
url: str
class MessageReplaceStreamResponse(StreamResponse):
"""
MessageReplaceStreamResponse entity
"""
event: StreamEvent = StreamEvent.MESSAGE_REPLACE
answer: str
reason: str
class AgentThoughtStreamResponse(StreamResponse):
"""
AgentThoughtStreamResponse entity
"""
event: StreamEvent = StreamEvent.AGENT_THOUGHT
id: str
position: int
thought: str | None = None
observation: str | None = None
tool: str | None = None
tool_labels: Mapping[str, object] = Field(default_factory=dict)
tool_input: str | None = None
message_files: list[str] | None = None
class AgentMessageStreamResponse(StreamResponse):
"""
AgentMessageStreamResponse entity
"""
event: StreamEvent = StreamEvent.AGENT_MESSAGE
id: str
answer: str
class WorkflowStartStreamResponse(StreamResponse):
"""
WorkflowStartStreamResponse entity
"""
class Data(BaseModel):
"""
Data entity
"""
id: str
workflow_id: str
inputs: Mapping[str, Any]
created_at: int
event: StreamEvent = StreamEvent.WORKFLOW_STARTED
workflow_run_id: str
data: Data
class WorkflowFinishStreamResponse(StreamResponse):
"""
WorkflowFinishStreamResponse entity
"""
class Data(BaseModel):
"""
Data entity
"""
id: str
workflow_id: str
status: str
outputs: Mapping[str, Any] | None = None
error: str | None = None
elapsed_time: float
total_tokens: int
total_steps: int
created_by: Mapping[str, object] = Field(default_factory=dict)
created_at: int
finished_at: int
exceptions_count: int | None = 0
files: Sequence[Mapping[str, Any]] | None = []
event: StreamEvent = StreamEvent.WORKFLOW_FINISHED
workflow_run_id: str
data: Data
class NodeStartStreamResponse(StreamResponse):
"""
NodeStartStreamResponse entity
"""
class Data(BaseModel):
"""
Data entity
"""
id: str
node_id: str
node_type: str
title: str
index: int
predecessor_node_id: str | None = None
inputs: Mapping[str, Any] | None = None
inputs_truncated: bool = False
created_at: int
extras: dict[str, object] = Field(default_factory=dict)
iteration_id: str | None = None
loop_id: str | None = None
agent_strategy: AgentNodeStrategyInit | None = None
event: StreamEvent = StreamEvent.NODE_STARTED
workflow_run_id: str
data: Data
def to_ignore_detail_dict(self):
return {
"event": self.event.value,
"task_id": self.task_id,
"workflow_run_id": self.workflow_run_id,
"data": {
"id": self.data.id,
"node_id": self.data.node_id,
"node_type": self.data.node_type,
"title": self.data.title,
"index": self.data.index,
"predecessor_node_id": self.data.predecessor_node_id,
"inputs": None,
"created_at": self.data.created_at,
"extras": {},
"iteration_id": self.data.iteration_id,
"loop_id": self.data.loop_id,
},
}
class NodeFinishStreamResponse(StreamResponse):
"""
NodeFinishStreamResponse entity
"""
class Data(BaseModel):
"""
Data entity
"""
id: str
node_id: str
node_type: str
title: str
index: int
predecessor_node_id: str | None = None
inputs: Mapping[str, Any] | None = None
inputs_truncated: bool = False
process_data: Mapping[str, Any] | None = None
process_data_truncated: bool = False
outputs: Mapping[str, Any] | None = None
outputs_truncated: bool = True
status: str
error: str | None = None
elapsed_time: float
execution_metadata: Mapping[WorkflowNodeExecutionMetadataKey, Any] | None = None
created_at: int
finished_at: int
files: Sequence[Mapping[str, Any]] | None = []
iteration_id: str | None = None
loop_id: str | None = None
event: StreamEvent = StreamEvent.NODE_FINISHED
workflow_run_id: str
data: Data
def to_ignore_detail_dict(self):
return {
"event": self.event.value,
"task_id": self.task_id,
"workflow_run_id": self.workflow_run_id,
"data": {
"id": self.data.id,
"node_id": self.data.node_id,
"node_type": self.data.node_type,
"title": self.data.title,
"index": self.data.index,
"predecessor_node_id": self.data.predecessor_node_id,
"inputs": None,
"process_data": None,
"outputs": None,
"status": self.data.status,
"error": None,
"elapsed_time": self.data.elapsed_time,
"execution_metadata": None,
"created_at": self.data.created_at,
"finished_at": self.data.finished_at,
"files": [],
"iteration_id": self.data.iteration_id,
"loop_id": self.data.loop_id,
},
}
class NodeRetryStreamResponse(StreamResponse):
"""
NodeFinishStreamResponse entity
"""
class Data(BaseModel):
"""
Data entity
"""
id: str
node_id: str
node_type: str
title: str
index: int
predecessor_node_id: str | None = None
inputs: Mapping[str, Any] | None = None
inputs_truncated: bool = False
process_data: Mapping[str, Any] | None = None
process_data_truncated: bool = False
outputs: Mapping[str, Any] | None = None
outputs_truncated: bool = False
status: str
error: str | None = None
elapsed_time: float
execution_metadata: Mapping[WorkflowNodeExecutionMetadataKey, Any] | None = None
created_at: int
finished_at: int
files: Sequence[Mapping[str, Any]] | None = []
iteration_id: str | None = None
loop_id: str | None = None
retry_index: int = 0
event: StreamEvent = StreamEvent.NODE_RETRY
workflow_run_id: str
data: Data
def to_ignore_detail_dict(self):
return {
"event": self.event.value,
"task_id": self.task_id,
"workflow_run_id": self.workflow_run_id,
"data": {
"id": self.data.id,
"node_id": self.data.node_id,
"node_type": self.data.node_type,
"title": self.data.title,
"index": self.data.index,
"predecessor_node_id": self.data.predecessor_node_id,
"inputs": None,
"process_data": None,
"outputs": None,
"status": self.data.status,
"error": None,
"elapsed_time": self.data.elapsed_time,
"execution_metadata": None,
"created_at": self.data.created_at,
"finished_at": self.data.finished_at,
"files": [],
"iteration_id": self.data.iteration_id,
"loop_id": self.data.loop_id,
"retry_index": self.data.retry_index,
},
}
class IterationNodeStartStreamResponse(StreamResponse):
"""
NodeStartStreamResponse entity
"""
class Data(BaseModel):
"""
Data entity
"""
id: str
node_id: str
node_type: str
title: str
created_at: int
extras: dict = Field(default_factory=dict)
metadata: Mapping = {}
inputs: Mapping = {}
inputs_truncated: bool = False
event: StreamEvent = StreamEvent.ITERATION_STARTED
workflow_run_id: str
data: Data
class IterationNodeNextStreamResponse(StreamResponse):
"""
NodeStartStreamResponse entity
"""
class Data(BaseModel):
"""
Data entity
"""
id: str
node_id: str
node_type: str
title: str
index: int
created_at: int
extras: dict = Field(default_factory=dict)
event: StreamEvent = StreamEvent.ITERATION_NEXT
workflow_run_id: str
data: Data
class IterationNodeCompletedStreamResponse(StreamResponse):
"""
NodeCompletedStreamResponse entity
"""
class Data(BaseModel):
"""
Data entity
"""
id: str
node_id: str
node_type: str
title: str
outputs: Mapping | None = None
outputs_truncated: bool = False
created_at: int
extras: dict | None = None
inputs: Mapping | None = None
inputs_truncated: bool = False
status: WorkflowNodeExecutionStatus
error: str | None = None
elapsed_time: float
total_tokens: int
execution_metadata: Mapping[str, object] = Field(default_factory=dict)
finished_at: int
steps: int
event: StreamEvent = StreamEvent.ITERATION_COMPLETED
workflow_run_id: str
data: Data
class LoopNodeStartStreamResponse(StreamResponse):
"""
NodeStartStreamResponse entity
"""
class Data(BaseModel):
"""
Data entity
"""
id: str
node_id: str
node_type: str
title: str
created_at: int
extras: dict = Field(default_factory=dict)
metadata: Mapping = {}
inputs: Mapping = {}
inputs_truncated: bool = False
event: StreamEvent = StreamEvent.LOOP_STARTED
workflow_run_id: str
data: Data
class LoopNodeNextStreamResponse(StreamResponse):
"""
NodeStartStreamResponse entity
"""
class Data(BaseModel):
"""
Data entity
"""
id: str
node_id: str
node_type: str
title: str
index: int
created_at: int
pre_loop_output: Any = None
extras: Mapping[str, object] = Field(default_factory=dict)
event: StreamEvent = StreamEvent.LOOP_NEXT
workflow_run_id: str
data: Data
class LoopNodeCompletedStreamResponse(StreamResponse):
"""
NodeCompletedStreamResponse entity
"""
class Data(BaseModel):
"""
Data entity
"""
id: str
node_id: str
node_type: str
title: str
outputs: Mapping | None = None
outputs_truncated: bool = False
created_at: int
extras: dict | None = None
inputs: Mapping | None = None
inputs_truncated: bool = False
status: WorkflowNodeExecutionStatus
error: str | None = None
elapsed_time: float
total_tokens: int
execution_metadata: Mapping[str, object] = Field(default_factory=dict)
finished_at: int
steps: int
event: StreamEvent = StreamEvent.LOOP_COMPLETED
workflow_run_id: str
data: Data
class TextChunkStreamResponse(StreamResponse):
"""
TextChunkStreamResponse entity
"""
class Data(BaseModel):
"""
Data entity
"""
text: str
from_variable_selector: list[str] | None = None
event: StreamEvent = StreamEvent.TEXT_CHUNK
data: Data
class TextReplaceStreamResponse(StreamResponse):
"""
TextReplaceStreamResponse entity
"""
class Data(BaseModel):
"""
Data entity
"""
text: str
event: StreamEvent = StreamEvent.TEXT_REPLACE
data: Data
class PingStreamResponse(StreamResponse):
"""
PingStreamResponse entity
"""
event: StreamEvent = StreamEvent.PING
class AppStreamResponse(BaseModel):
"""
AppStreamResponse entity
"""
stream_response: StreamResponse
class ChatbotAppStreamResponse(AppStreamResponse):
"""
ChatbotAppStreamResponse entity
"""
conversation_id: str
message_id: str
created_at: int
class CompletionAppStreamResponse(AppStreamResponse):
"""
CompletionAppStreamResponse entity
"""
message_id: str
created_at: int
class WorkflowAppStreamResponse(AppStreamResponse):
"""
WorkflowAppStreamResponse entity
"""
workflow_run_id: str | None = None
class AppBlockingResponse(BaseModel):
"""
AppBlockingResponse entity
"""
task_id: str
class ChatbotAppBlockingResponse(AppBlockingResponse):
"""
ChatbotAppBlockingResponse entity
"""
class Data(BaseModel):
"""
Data entity
"""
id: str
mode: str
conversation_id: str
message_id: str
answer: str
metadata: Mapping[str, object] = Field(default_factory=dict)
created_at: int
data: Data
class CompletionAppBlockingResponse(AppBlockingResponse):
"""
CompletionAppBlockingResponse entity
"""
class Data(BaseModel):
"""
Data entity
"""
id: str
mode: str
message_id: str
answer: str
metadata: Mapping[str, object] = Field(default_factory=dict)
created_at: int
data: Data
class WorkflowAppBlockingResponse(AppBlockingResponse):
"""
WorkflowAppBlockingResponse entity
"""
class Data(BaseModel):
"""
Data entity
"""
id: str
workflow_id: str
status: str
outputs: Mapping[str, Any] | None = None
error: str | None = None
elapsed_time: float
total_tokens: int
total_steps: int
created_at: int
finished_at: int
workflow_run_id: str
data: Data
class AgentLogStreamResponse(StreamResponse):
"""
AgentLogStreamResponse entity
"""
class Data(BaseModel):
"""
Data entity
"""
node_execution_id: str
id: str
label: str
parent_id: str | None = None
error: str | None = None
status: str
data: Mapping[str, Any]
metadata: Mapping[str, object] = Field(default_factory=dict)
node_id: str
event: StreamEvent = StreamEvent.AGENT_LOG
data: Data