dify
This commit is contained in:
124
dify/api/core/trigger/debug/event_bus.py
Normal file
124
dify/api/core/trigger/debug/event_bus.py
Normal file
@@ -0,0 +1,124 @@
|
||||
import hashlib
|
||||
import logging
|
||||
from typing import TypeVar
|
||||
|
||||
from redis import RedisError
|
||||
|
||||
from core.trigger.debug.events import BaseDebugEvent
|
||||
from extensions.ext_redis import redis_client
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
TRIGGER_DEBUG_EVENT_TTL = 300
|
||||
|
||||
TTriggerDebugEvent = TypeVar("TTriggerDebugEvent", bound="BaseDebugEvent")
|
||||
|
||||
|
||||
class TriggerDebugEventBus:
|
||||
"""
|
||||
Unified Redis-based trigger debug service with polling support.
|
||||
|
||||
Uses {tenant_id} hash tags for Redis Cluster compatibility.
|
||||
Supports multiple event types through a generic dispatch/poll interface.
|
||||
"""
|
||||
|
||||
# LUA_SELECT: Atomic poll or register for event
|
||||
# KEYS[1] = trigger_debug_inbox:{tenant_id}:{address_id}
|
||||
# KEYS[2] = trigger_debug_waiting_pool:{tenant_id}:...
|
||||
# ARGV[1] = address_id
|
||||
LUA_SELECT = (
|
||||
"local v=redis.call('GET',KEYS[1]);"
|
||||
"if v then redis.call('DEL',KEYS[1]);return v end;"
|
||||
"redis.call('SADD',KEYS[2],ARGV[1]);"
|
||||
f"redis.call('EXPIRE',KEYS[2],{TRIGGER_DEBUG_EVENT_TTL});"
|
||||
"return false"
|
||||
)
|
||||
|
||||
# LUA_DISPATCH: Dispatch event to all waiting addresses
|
||||
# KEYS[1] = trigger_debug_waiting_pool:{tenant_id}:...
|
||||
# ARGV[1] = tenant_id
|
||||
# ARGV[2] = event_json
|
||||
LUA_DISPATCH = (
|
||||
"local a=redis.call('SMEMBERS',KEYS[1]);"
|
||||
"if #a==0 then return 0 end;"
|
||||
"redis.call('DEL',KEYS[1]);"
|
||||
"for i=1,#a do "
|
||||
f"redis.call('SET','trigger_debug_inbox:'..ARGV[1]..':'..a[i],ARGV[2],'EX',{TRIGGER_DEBUG_EVENT_TTL});"
|
||||
"end;"
|
||||
"return #a"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def dispatch(
|
||||
cls,
|
||||
tenant_id: str,
|
||||
event: BaseDebugEvent,
|
||||
pool_key: str,
|
||||
) -> int:
|
||||
"""
|
||||
Dispatch event to all waiting addresses in the pool.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID for hash tag
|
||||
event: Event object to dispatch
|
||||
pool_key: Pool key (generate using build_{?}_pool_key(...))
|
||||
|
||||
Returns:
|
||||
Number of addresses the event was dispatched to
|
||||
"""
|
||||
event_data = event.model_dump_json()
|
||||
try:
|
||||
result = redis_client.eval(
|
||||
cls.LUA_DISPATCH,
|
||||
1,
|
||||
pool_key,
|
||||
tenant_id,
|
||||
event_data,
|
||||
)
|
||||
return int(result)
|
||||
except RedisError:
|
||||
logger.exception("Failed to dispatch event to pool: %s", pool_key)
|
||||
return 0
|
||||
|
||||
@classmethod
|
||||
def poll(
|
||||
cls,
|
||||
event_type: type[TTriggerDebugEvent],
|
||||
pool_key: str,
|
||||
tenant_id: str,
|
||||
user_id: str,
|
||||
app_id: str,
|
||||
node_id: str,
|
||||
) -> TTriggerDebugEvent | None:
|
||||
"""
|
||||
Poll for an event or register to the waiting pool.
|
||||
|
||||
If an event is available in the inbox, return it immediately.
|
||||
Otherwise, register the address to the waiting pool for future dispatch.
|
||||
|
||||
Args:
|
||||
event_class: Event class for deserialization and type safety
|
||||
pool_key: Pool key (generate using build_{?}_pool_key(...))
|
||||
tenant_id: Tenant ID
|
||||
user_id: User ID for address calculation
|
||||
app_id: App ID for address calculation
|
||||
node_id: Node ID for address calculation
|
||||
|
||||
Returns:
|
||||
Event object if available, None otherwise
|
||||
"""
|
||||
address_id: str = hashlib.sha256(f"{user_id}|{app_id}|{node_id}".encode()).hexdigest()
|
||||
address: str = f"trigger_debug_inbox:{tenant_id}:{address_id}"
|
||||
|
||||
try:
|
||||
event_data = redis_client.eval(
|
||||
cls.LUA_SELECT,
|
||||
2,
|
||||
address,
|
||||
pool_key,
|
||||
address_id,
|
||||
)
|
||||
return event_type.model_validate_json(json_data=event_data) if event_data else None
|
||||
except RedisError:
|
||||
logger.exception("Failed to poll event from pool: %s", pool_key)
|
||||
return None
|
||||
243
dify/api/core/trigger/debug/event_selectors.py
Normal file
243
dify/api/core/trigger/debug/event_selectors.py
Normal file
@@ -0,0 +1,243 @@
|
||||
"""Trigger debug service supporting plugin and webhook debugging in draft workflows."""
|
||||
|
||||
import hashlib
|
||||
import logging
|
||||
import time
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import Mapping
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from core.plugin.entities.request import TriggerInvokeEventResponse
|
||||
from core.trigger.debug.event_bus import TriggerDebugEventBus
|
||||
from core.trigger.debug.events import (
|
||||
PluginTriggerDebugEvent,
|
||||
ScheduleDebugEvent,
|
||||
WebhookDebugEvent,
|
||||
build_plugin_pool_key,
|
||||
build_webhook_pool_key,
|
||||
)
|
||||
from core.workflow.enums import NodeType
|
||||
from core.workflow.nodes.trigger_plugin.entities import TriggerEventNodeData
|
||||
from core.workflow.nodes.trigger_schedule.entities import ScheduleConfig
|
||||
from extensions.ext_redis import redis_client
|
||||
from libs.datetime_utils import ensure_naive_utc, naive_utc_now
|
||||
from libs.schedule_utils import calculate_next_run_at
|
||||
from models.model import App
|
||||
from models.provider_ids import TriggerProviderID
|
||||
from models.workflow import Workflow
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TriggerDebugEvent(BaseModel):
|
||||
workflow_args: Mapping[str, Any]
|
||||
node_id: str
|
||||
|
||||
|
||||
class TriggerDebugEventPoller(ABC):
|
||||
app_id: str
|
||||
user_id: str
|
||||
tenant_id: str
|
||||
node_config: Mapping[str, Any]
|
||||
node_id: str
|
||||
|
||||
def __init__(self, tenant_id: str, user_id: str, app_id: str, node_config: Mapping[str, Any], node_id: str):
|
||||
self.tenant_id = tenant_id
|
||||
self.user_id = user_id
|
||||
self.app_id = app_id
|
||||
self.node_config = node_config
|
||||
self.node_id = node_id
|
||||
|
||||
@abstractmethod
|
||||
def poll(self) -> TriggerDebugEvent | None:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class PluginTriggerDebugEventPoller(TriggerDebugEventPoller):
|
||||
def poll(self) -> TriggerDebugEvent | None:
|
||||
from services.trigger.trigger_service import TriggerService
|
||||
|
||||
plugin_trigger_data = TriggerEventNodeData.model_validate(self.node_config.get("data", {}))
|
||||
provider_id = TriggerProviderID(plugin_trigger_data.provider_id)
|
||||
pool_key: str = build_plugin_pool_key(
|
||||
name=plugin_trigger_data.event_name,
|
||||
provider_id=str(provider_id),
|
||||
tenant_id=self.tenant_id,
|
||||
subscription_id=plugin_trigger_data.subscription_id,
|
||||
)
|
||||
plugin_trigger_event: PluginTriggerDebugEvent | None = TriggerDebugEventBus.poll(
|
||||
event_type=PluginTriggerDebugEvent,
|
||||
pool_key=pool_key,
|
||||
tenant_id=self.tenant_id,
|
||||
user_id=self.user_id,
|
||||
app_id=self.app_id,
|
||||
node_id=self.node_id,
|
||||
)
|
||||
if not plugin_trigger_event:
|
||||
return None
|
||||
trigger_event_response: TriggerInvokeEventResponse = TriggerService.invoke_trigger_event(
|
||||
event=plugin_trigger_event,
|
||||
user_id=plugin_trigger_event.user_id,
|
||||
tenant_id=self.tenant_id,
|
||||
node_config=self.node_config,
|
||||
)
|
||||
|
||||
if trigger_event_response.cancelled:
|
||||
return None
|
||||
|
||||
return TriggerDebugEvent(
|
||||
workflow_args={
|
||||
"inputs": trigger_event_response.variables,
|
||||
"files": [],
|
||||
},
|
||||
node_id=self.node_id,
|
||||
)
|
||||
|
||||
|
||||
class WebhookTriggerDebugEventPoller(TriggerDebugEventPoller):
|
||||
def poll(self) -> TriggerDebugEvent | None:
|
||||
pool_key = build_webhook_pool_key(
|
||||
tenant_id=self.tenant_id,
|
||||
app_id=self.app_id,
|
||||
node_id=self.node_id,
|
||||
)
|
||||
webhook_event: WebhookDebugEvent | None = TriggerDebugEventBus.poll(
|
||||
event_type=WebhookDebugEvent,
|
||||
pool_key=pool_key,
|
||||
tenant_id=self.tenant_id,
|
||||
user_id=self.user_id,
|
||||
app_id=self.app_id,
|
||||
node_id=self.node_id,
|
||||
)
|
||||
if not webhook_event:
|
||||
return None
|
||||
|
||||
from services.trigger.webhook_service import WebhookService
|
||||
|
||||
payload = webhook_event.payload or {}
|
||||
workflow_inputs = payload.get("inputs")
|
||||
if workflow_inputs is None:
|
||||
webhook_data = payload.get("webhook_data", {})
|
||||
workflow_inputs = WebhookService.build_workflow_inputs(webhook_data)
|
||||
|
||||
workflow_args: Mapping[str, Any] = {
|
||||
"inputs": workflow_inputs or {},
|
||||
"files": [],
|
||||
}
|
||||
return TriggerDebugEvent(workflow_args=workflow_args, node_id=self.node_id)
|
||||
|
||||
|
||||
class ScheduleTriggerDebugEventPoller(TriggerDebugEventPoller):
|
||||
"""
|
||||
Poller for schedule trigger debug events.
|
||||
|
||||
This poller will simulate the schedule trigger event by creating a schedule debug runtime cache
|
||||
and calculating the next run at.
|
||||
"""
|
||||
|
||||
RUNTIME_CACHE_TTL = 60 * 5
|
||||
|
||||
class ScheduleDebugRuntime(BaseModel):
|
||||
cache_key: str
|
||||
timezone: str
|
||||
cron_expression: str
|
||||
next_run_at: datetime
|
||||
|
||||
def schedule_debug_runtime_key(self, cron_hash: str) -> str:
|
||||
return f"schedule_debug_runtime:{self.tenant_id}:{self.user_id}:{self.app_id}:{self.node_id}:{cron_hash}"
|
||||
|
||||
def get_or_create_schedule_debug_runtime(self):
|
||||
from services.trigger.schedule_service import ScheduleService
|
||||
|
||||
schedule_config: ScheduleConfig = ScheduleService.to_schedule_config(self.node_config)
|
||||
cron_hash = hashlib.sha256(schedule_config.cron_expression.encode()).hexdigest()
|
||||
cache_key = self.schedule_debug_runtime_key(cron_hash)
|
||||
runtime_cache = redis_client.get(cache_key)
|
||||
if runtime_cache is None:
|
||||
schedule_debug_runtime = self.ScheduleDebugRuntime(
|
||||
cron_expression=schedule_config.cron_expression,
|
||||
timezone=schedule_config.timezone,
|
||||
cache_key=cache_key,
|
||||
next_run_at=ensure_naive_utc(
|
||||
calculate_next_run_at(schedule_config.cron_expression, schedule_config.timezone)
|
||||
),
|
||||
)
|
||||
redis_client.setex(
|
||||
name=self.schedule_debug_runtime_key(cron_hash),
|
||||
time=self.RUNTIME_CACHE_TTL,
|
||||
value=schedule_debug_runtime.model_dump_json(),
|
||||
)
|
||||
return schedule_debug_runtime
|
||||
else:
|
||||
redis_client.expire(cache_key, self.RUNTIME_CACHE_TTL)
|
||||
runtime = self.ScheduleDebugRuntime.model_validate_json(runtime_cache)
|
||||
runtime.next_run_at = ensure_naive_utc(runtime.next_run_at)
|
||||
return runtime
|
||||
|
||||
def create_schedule_event(self, schedule_debug_runtime: ScheduleDebugRuntime) -> ScheduleDebugEvent:
|
||||
redis_client.delete(schedule_debug_runtime.cache_key)
|
||||
return ScheduleDebugEvent(
|
||||
timestamp=int(time.time()),
|
||||
node_id=self.node_id,
|
||||
inputs={},
|
||||
)
|
||||
|
||||
def poll(self) -> TriggerDebugEvent | None:
|
||||
schedule_debug_runtime = self.get_or_create_schedule_debug_runtime()
|
||||
if schedule_debug_runtime.next_run_at > naive_utc_now():
|
||||
return None
|
||||
|
||||
schedule_event: ScheduleDebugEvent = self.create_schedule_event(schedule_debug_runtime)
|
||||
workflow_args: Mapping[str, Any] = {
|
||||
"inputs": schedule_event.inputs or {},
|
||||
"files": [],
|
||||
}
|
||||
return TriggerDebugEvent(workflow_args=workflow_args, node_id=self.node_id)
|
||||
|
||||
|
||||
def create_event_poller(
|
||||
draft_workflow: Workflow, tenant_id: str, user_id: str, app_id: str, node_id: str
|
||||
) -> TriggerDebugEventPoller:
|
||||
node_config = draft_workflow.get_node_config_by_id(node_id=node_id)
|
||||
if not node_config:
|
||||
raise ValueError("Node data not found for node %s", node_id)
|
||||
node_type = draft_workflow.get_node_type_from_node_config(node_config)
|
||||
match node_type:
|
||||
case NodeType.TRIGGER_PLUGIN:
|
||||
return PluginTriggerDebugEventPoller(
|
||||
tenant_id=tenant_id, user_id=user_id, app_id=app_id, node_config=node_config, node_id=node_id
|
||||
)
|
||||
case NodeType.TRIGGER_WEBHOOK:
|
||||
return WebhookTriggerDebugEventPoller(
|
||||
tenant_id=tenant_id, user_id=user_id, app_id=app_id, node_config=node_config, node_id=node_id
|
||||
)
|
||||
case NodeType.TRIGGER_SCHEDULE:
|
||||
return ScheduleTriggerDebugEventPoller(
|
||||
tenant_id=tenant_id, user_id=user_id, app_id=app_id, node_config=node_config, node_id=node_id
|
||||
)
|
||||
case _:
|
||||
raise ValueError("unable to create event poller for node type %s", node_type)
|
||||
|
||||
|
||||
def select_trigger_debug_events(
|
||||
draft_workflow: Workflow, app_model: App, user_id: str, node_ids: list[str]
|
||||
) -> TriggerDebugEvent | None:
|
||||
event: TriggerDebugEvent | None = None
|
||||
for node_id in node_ids:
|
||||
node_config = draft_workflow.get_node_config_by_id(node_id=node_id)
|
||||
if not node_config:
|
||||
raise ValueError("Node data not found for node %s", node_id)
|
||||
poller: TriggerDebugEventPoller = create_event_poller(
|
||||
draft_workflow=draft_workflow,
|
||||
tenant_id=app_model.tenant_id,
|
||||
user_id=user_id,
|
||||
app_id=app_model.id,
|
||||
node_id=node_id,
|
||||
)
|
||||
event = poller.poll()
|
||||
if event is not None:
|
||||
return event
|
||||
return None
|
||||
67
dify/api/core/trigger/debug/events.py
Normal file
67
dify/api/core/trigger/debug/events.py
Normal file
@@ -0,0 +1,67 @@
|
||||
from collections.abc import Mapping
|
||||
from enum import StrEnum
|
||||
from typing import Any
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class TriggerDebugPoolKey(StrEnum):
|
||||
"""Trigger debug pool key."""
|
||||
|
||||
SCHEDULE = "schedule_trigger_debug_waiting_pool"
|
||||
WEBHOOK = "webhook_trigger_debug_waiting_pool"
|
||||
PLUGIN = "plugin_trigger_debug_waiting_pool"
|
||||
|
||||
|
||||
class BaseDebugEvent(BaseModel):
|
||||
"""Base class for all debug events."""
|
||||
|
||||
timestamp: int
|
||||
|
||||
|
||||
class ScheduleDebugEvent(BaseDebugEvent):
|
||||
"""Debug event for schedule triggers."""
|
||||
|
||||
node_id: str
|
||||
inputs: Mapping[str, Any]
|
||||
|
||||
|
||||
class WebhookDebugEvent(BaseDebugEvent):
|
||||
"""Debug event for webhook triggers."""
|
||||
|
||||
request_id: str
|
||||
node_id: str
|
||||
payload: dict[str, Any] = Field(default_factory=dict)
|
||||
|
||||
|
||||
def build_webhook_pool_key(tenant_id: str, app_id: str, node_id: str) -> str:
|
||||
"""Generate pool key for webhook events.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
app_id: App ID
|
||||
node_id: Node ID
|
||||
"""
|
||||
return f"{TriggerDebugPoolKey.WEBHOOK}:{tenant_id}:{app_id}:{node_id}"
|
||||
|
||||
|
||||
class PluginTriggerDebugEvent(BaseDebugEvent):
|
||||
"""Debug event for plugin triggers."""
|
||||
|
||||
name: str
|
||||
user_id: str = Field(description="This is end user id, only for trigger the event. no related with account user id")
|
||||
request_id: str
|
||||
subscription_id: str
|
||||
provider_id: str
|
||||
|
||||
|
||||
def build_plugin_pool_key(tenant_id: str, provider_id: str, subscription_id: str, name: str) -> str:
|
||||
"""Generate pool key for plugin trigger events.
|
||||
|
||||
Args:
|
||||
name: Event name
|
||||
tenant_id: Tenant ID
|
||||
provider_id: Provider ID
|
||||
subscription_id: Subscription ID
|
||||
"""
|
||||
return f"{TriggerDebugPoolKey.PLUGIN}:{tenant_id}:{str(provider_id)}:{subscription_id}:{name}"
|
||||
Reference in New Issue
Block a user