dify
This commit is contained in:
3
dify/api/tests/unit_tests/repositories/__init__.py
Normal file
3
dify/api/tests/unit_tests/repositories/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
"""
|
||||
Unit tests for repositories.
|
||||
"""
|
||||
@@ -0,0 +1,370 @@
|
||||
"""Unit tests for DifyAPISQLAlchemyWorkflowRunRepository implementation."""
|
||||
|
||||
from datetime import UTC, datetime
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
from core.workflow.entities.workflow_pause import WorkflowPauseEntity
|
||||
from core.workflow.enums import WorkflowExecutionStatus
|
||||
from models.workflow import WorkflowPause as WorkflowPauseModel
|
||||
from models.workflow import WorkflowRun
|
||||
from repositories.sqlalchemy_api_workflow_run_repository import (
|
||||
DifyAPISQLAlchemyWorkflowRunRepository,
|
||||
_PrivateWorkflowPauseEntity,
|
||||
_WorkflowRunError,
|
||||
)
|
||||
|
||||
|
||||
class TestDifyAPISQLAlchemyWorkflowRunRepository:
|
||||
"""Test DifyAPISQLAlchemyWorkflowRunRepository implementation."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_session(self):
|
||||
"""Create a mock session."""
|
||||
return Mock(spec=Session)
|
||||
|
||||
@pytest.fixture
|
||||
def mock_session_maker(self, mock_session):
|
||||
"""Create a mock sessionmaker."""
|
||||
session_maker = Mock(spec=sessionmaker)
|
||||
|
||||
# Create a context manager mock
|
||||
context_manager = Mock()
|
||||
context_manager.__enter__ = Mock(return_value=mock_session)
|
||||
context_manager.__exit__ = Mock(return_value=None)
|
||||
session_maker.return_value = context_manager
|
||||
|
||||
# Mock session.begin() context manager
|
||||
begin_context_manager = Mock()
|
||||
begin_context_manager.__enter__ = Mock(return_value=None)
|
||||
begin_context_manager.__exit__ = Mock(return_value=None)
|
||||
mock_session.begin = Mock(return_value=begin_context_manager)
|
||||
|
||||
# Add missing session methods
|
||||
mock_session.commit = Mock()
|
||||
mock_session.rollback = Mock()
|
||||
mock_session.add = Mock()
|
||||
mock_session.delete = Mock()
|
||||
mock_session.get = Mock()
|
||||
mock_session.scalar = Mock()
|
||||
mock_session.scalars = Mock()
|
||||
|
||||
# Also support expire_on_commit parameter
|
||||
def make_session(expire_on_commit=None):
|
||||
cm = Mock()
|
||||
cm.__enter__ = Mock(return_value=mock_session)
|
||||
cm.__exit__ = Mock(return_value=None)
|
||||
return cm
|
||||
|
||||
session_maker.side_effect = make_session
|
||||
return session_maker
|
||||
|
||||
@pytest.fixture
|
||||
def repository(self, mock_session_maker):
|
||||
"""Create repository instance with mocked dependencies."""
|
||||
|
||||
# Create a testable subclass that implements the save method
|
||||
class TestableDifyAPISQLAlchemyWorkflowRunRepository(DifyAPISQLAlchemyWorkflowRunRepository):
|
||||
def __init__(self, session_maker):
|
||||
# Initialize without calling parent __init__ to avoid any instantiation issues
|
||||
self._session_maker = session_maker
|
||||
|
||||
def save(self, execution):
|
||||
"""Mock implementation of save method."""
|
||||
return None
|
||||
|
||||
# Create repository instance
|
||||
repo = TestableDifyAPISQLAlchemyWorkflowRunRepository(mock_session_maker)
|
||||
|
||||
return repo
|
||||
|
||||
@pytest.fixture
|
||||
def sample_workflow_run(self):
|
||||
"""Create a sample WorkflowRun model."""
|
||||
workflow_run = Mock(spec=WorkflowRun)
|
||||
workflow_run.id = "workflow-run-123"
|
||||
workflow_run.tenant_id = "tenant-123"
|
||||
workflow_run.app_id = "app-123"
|
||||
workflow_run.workflow_id = "workflow-123"
|
||||
workflow_run.status = WorkflowExecutionStatus.RUNNING
|
||||
return workflow_run
|
||||
|
||||
@pytest.fixture
|
||||
def sample_workflow_pause(self):
|
||||
"""Create a sample WorkflowPauseModel."""
|
||||
pause = Mock(spec=WorkflowPauseModel)
|
||||
pause.id = "pause-123"
|
||||
pause.workflow_id = "workflow-123"
|
||||
pause.workflow_run_id = "workflow-run-123"
|
||||
pause.state_object_key = "workflow-state-123.json"
|
||||
pause.resumed_at = None
|
||||
pause.created_at = datetime.now(UTC)
|
||||
return pause
|
||||
|
||||
|
||||
class TestCreateWorkflowPause(TestDifyAPISQLAlchemyWorkflowRunRepository):
|
||||
"""Test create_workflow_pause method."""
|
||||
|
||||
def test_create_workflow_pause_success(
|
||||
self,
|
||||
repository: DifyAPISQLAlchemyWorkflowRunRepository,
|
||||
mock_session: Mock,
|
||||
sample_workflow_run: Mock,
|
||||
):
|
||||
"""Test successful workflow pause creation."""
|
||||
# Arrange
|
||||
workflow_run_id = "workflow-run-123"
|
||||
state_owner_user_id = "user-123"
|
||||
state = '{"test": "state"}'
|
||||
|
||||
mock_session.get.return_value = sample_workflow_run
|
||||
|
||||
with patch("repositories.sqlalchemy_api_workflow_run_repository.uuidv7") as mock_uuidv7:
|
||||
mock_uuidv7.side_effect = ["pause-123"]
|
||||
with patch("repositories.sqlalchemy_api_workflow_run_repository.storage") as mock_storage:
|
||||
# Act
|
||||
result = repository.create_workflow_pause(
|
||||
workflow_run_id=workflow_run_id,
|
||||
state_owner_user_id=state_owner_user_id,
|
||||
state=state,
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert isinstance(result, _PrivateWorkflowPauseEntity)
|
||||
assert result.id == "pause-123"
|
||||
assert result.workflow_execution_id == workflow_run_id
|
||||
|
||||
# Verify database interactions
|
||||
mock_session.get.assert_called_once_with(WorkflowRun, workflow_run_id)
|
||||
mock_storage.save.assert_called_once()
|
||||
mock_session.add.assert_called()
|
||||
# When using session.begin() context manager, commit is handled automatically
|
||||
# No explicit commit call is expected
|
||||
|
||||
def test_create_workflow_pause_not_found(
|
||||
self, repository: DifyAPISQLAlchemyWorkflowRunRepository, mock_session: Mock
|
||||
):
|
||||
"""Test workflow pause creation when workflow run not found."""
|
||||
# Arrange
|
||||
mock_session.get.return_value = None
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(ValueError, match="WorkflowRun not found: workflow-run-123"):
|
||||
repository.create_workflow_pause(
|
||||
workflow_run_id="workflow-run-123",
|
||||
state_owner_user_id="user-123",
|
||||
state='{"test": "state"}',
|
||||
)
|
||||
|
||||
mock_session.get.assert_called_once_with(WorkflowRun, "workflow-run-123")
|
||||
|
||||
def test_create_workflow_pause_invalid_status(
|
||||
self, repository: DifyAPISQLAlchemyWorkflowRunRepository, mock_session: Mock, sample_workflow_run: Mock
|
||||
):
|
||||
"""Test workflow pause creation when workflow not in RUNNING status."""
|
||||
# Arrange
|
||||
sample_workflow_run.status = WorkflowExecutionStatus.PAUSED
|
||||
mock_session.get.return_value = sample_workflow_run
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(_WorkflowRunError, match="Only WorkflowRun with RUNNING status can be paused"):
|
||||
repository.create_workflow_pause(
|
||||
workflow_run_id="workflow-run-123",
|
||||
state_owner_user_id="user-123",
|
||||
state='{"test": "state"}',
|
||||
)
|
||||
|
||||
|
||||
class TestResumeWorkflowPause(TestDifyAPISQLAlchemyWorkflowRunRepository):
|
||||
"""Test resume_workflow_pause method."""
|
||||
|
||||
def test_resume_workflow_pause_success(
|
||||
self,
|
||||
repository: DifyAPISQLAlchemyWorkflowRunRepository,
|
||||
mock_session: Mock,
|
||||
sample_workflow_run: Mock,
|
||||
sample_workflow_pause: Mock,
|
||||
):
|
||||
"""Test successful workflow pause resume."""
|
||||
# Arrange
|
||||
workflow_run_id = "workflow-run-123"
|
||||
pause_entity = Mock(spec=WorkflowPauseEntity)
|
||||
pause_entity.id = "pause-123"
|
||||
|
||||
# Setup workflow run and pause
|
||||
sample_workflow_run.status = WorkflowExecutionStatus.PAUSED
|
||||
sample_workflow_run.pause = sample_workflow_pause
|
||||
sample_workflow_pause.resumed_at = None
|
||||
|
||||
mock_session.scalar.return_value = sample_workflow_run
|
||||
|
||||
with patch("repositories.sqlalchemy_api_workflow_run_repository.naive_utc_now") as mock_now:
|
||||
mock_now.return_value = datetime.now(UTC)
|
||||
|
||||
# Act
|
||||
result = repository.resume_workflow_pause(
|
||||
workflow_run_id=workflow_run_id,
|
||||
pause_entity=pause_entity,
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert isinstance(result, _PrivateWorkflowPauseEntity)
|
||||
assert result.id == "pause-123"
|
||||
|
||||
# Verify state transitions
|
||||
assert sample_workflow_pause.resumed_at is not None
|
||||
assert sample_workflow_run.status == WorkflowExecutionStatus.RUNNING
|
||||
|
||||
# Verify database interactions
|
||||
mock_session.add.assert_called()
|
||||
# When using session.begin() context manager, commit is handled automatically
|
||||
# No explicit commit call is expected
|
||||
|
||||
def test_resume_workflow_pause_not_paused(
|
||||
self,
|
||||
repository: DifyAPISQLAlchemyWorkflowRunRepository,
|
||||
mock_session: Mock,
|
||||
sample_workflow_run: Mock,
|
||||
):
|
||||
"""Test resume when workflow is not paused."""
|
||||
# Arrange
|
||||
workflow_run_id = "workflow-run-123"
|
||||
pause_entity = Mock(spec=WorkflowPauseEntity)
|
||||
pause_entity.id = "pause-123"
|
||||
|
||||
sample_workflow_run.status = WorkflowExecutionStatus.RUNNING
|
||||
mock_session.scalar.return_value = sample_workflow_run
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(_WorkflowRunError, match="WorkflowRun is not in PAUSED status"):
|
||||
repository.resume_workflow_pause(
|
||||
workflow_run_id=workflow_run_id,
|
||||
pause_entity=pause_entity,
|
||||
)
|
||||
|
||||
def test_resume_workflow_pause_id_mismatch(
|
||||
self,
|
||||
repository: DifyAPISQLAlchemyWorkflowRunRepository,
|
||||
mock_session: Mock,
|
||||
sample_workflow_run: Mock,
|
||||
sample_workflow_pause: Mock,
|
||||
):
|
||||
"""Test resume when pause ID doesn't match."""
|
||||
# Arrange
|
||||
workflow_run_id = "workflow-run-123"
|
||||
pause_entity = Mock(spec=WorkflowPauseEntity)
|
||||
pause_entity.id = "pause-456" # Different ID
|
||||
|
||||
sample_workflow_run.status = WorkflowExecutionStatus.PAUSED
|
||||
sample_workflow_pause.id = "pause-123"
|
||||
sample_workflow_run.pause = sample_workflow_pause
|
||||
mock_session.scalar.return_value = sample_workflow_run
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(_WorkflowRunError, match="different id in WorkflowPause and WorkflowPauseEntity"):
|
||||
repository.resume_workflow_pause(
|
||||
workflow_run_id=workflow_run_id,
|
||||
pause_entity=pause_entity,
|
||||
)
|
||||
|
||||
|
||||
class TestDeleteWorkflowPause(TestDifyAPISQLAlchemyWorkflowRunRepository):
|
||||
"""Test delete_workflow_pause method."""
|
||||
|
||||
def test_delete_workflow_pause_success(
|
||||
self,
|
||||
repository: DifyAPISQLAlchemyWorkflowRunRepository,
|
||||
mock_session: Mock,
|
||||
sample_workflow_pause: Mock,
|
||||
):
|
||||
"""Test successful workflow pause deletion."""
|
||||
# Arrange
|
||||
pause_entity = Mock(spec=WorkflowPauseEntity)
|
||||
pause_entity.id = "pause-123"
|
||||
|
||||
mock_session.get.return_value = sample_workflow_pause
|
||||
|
||||
with patch("repositories.sqlalchemy_api_workflow_run_repository.storage") as mock_storage:
|
||||
# Act
|
||||
repository.delete_workflow_pause(pause_entity=pause_entity)
|
||||
|
||||
# Assert
|
||||
mock_storage.delete.assert_called_once_with(sample_workflow_pause.state_object_key)
|
||||
mock_session.delete.assert_called_once_with(sample_workflow_pause)
|
||||
# When using session.begin() context manager, commit is handled automatically
|
||||
# No explicit commit call is expected
|
||||
|
||||
def test_delete_workflow_pause_not_found(
|
||||
self,
|
||||
repository: DifyAPISQLAlchemyWorkflowRunRepository,
|
||||
mock_session: Mock,
|
||||
):
|
||||
"""Test delete when pause not found."""
|
||||
# Arrange
|
||||
pause_entity = Mock(spec=WorkflowPauseEntity)
|
||||
pause_entity.id = "pause-123"
|
||||
|
||||
mock_session.get.return_value = None
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(_WorkflowRunError, match="WorkflowPause not found: pause-123"):
|
||||
repository.delete_workflow_pause(pause_entity=pause_entity)
|
||||
|
||||
|
||||
class TestPrivateWorkflowPauseEntity(TestDifyAPISQLAlchemyWorkflowRunRepository):
|
||||
"""Test _PrivateWorkflowPauseEntity class."""
|
||||
|
||||
def test_from_models(self, sample_workflow_pause: Mock):
|
||||
"""Test creating _PrivateWorkflowPauseEntity from models."""
|
||||
# Act
|
||||
entity = _PrivateWorkflowPauseEntity.from_models(sample_workflow_pause)
|
||||
|
||||
# Assert
|
||||
assert isinstance(entity, _PrivateWorkflowPauseEntity)
|
||||
assert entity._pause_model == sample_workflow_pause
|
||||
|
||||
def test_properties(self, sample_workflow_pause: Mock):
|
||||
"""Test entity properties."""
|
||||
# Arrange
|
||||
entity = _PrivateWorkflowPauseEntity.from_models(sample_workflow_pause)
|
||||
|
||||
# Act & Assert
|
||||
assert entity.id == sample_workflow_pause.id
|
||||
assert entity.workflow_execution_id == sample_workflow_pause.workflow_run_id
|
||||
assert entity.resumed_at == sample_workflow_pause.resumed_at
|
||||
|
||||
def test_get_state(self, sample_workflow_pause: Mock):
|
||||
"""Test getting state from storage."""
|
||||
# Arrange
|
||||
entity = _PrivateWorkflowPauseEntity.from_models(sample_workflow_pause)
|
||||
expected_state = b'{"test": "state"}'
|
||||
|
||||
with patch("repositories.sqlalchemy_api_workflow_run_repository.storage") as mock_storage:
|
||||
mock_storage.load.return_value = expected_state
|
||||
|
||||
# Act
|
||||
result = entity.get_state()
|
||||
|
||||
# Assert
|
||||
assert result == expected_state
|
||||
mock_storage.load.assert_called_once_with(sample_workflow_pause.state_object_key)
|
||||
|
||||
def test_get_state_caching(self, sample_workflow_pause: Mock):
|
||||
"""Test state caching in get_state method."""
|
||||
# Arrange
|
||||
entity = _PrivateWorkflowPauseEntity.from_models(sample_workflow_pause)
|
||||
expected_state = b'{"test": "state"}'
|
||||
|
||||
with patch("repositories.sqlalchemy_api_workflow_run_repository.storage") as mock_storage:
|
||||
mock_storage.load.return_value = expected_state
|
||||
|
||||
# Act
|
||||
result1 = entity.get_state()
|
||||
result2 = entity.get_state() # Should use cache
|
||||
|
||||
# Assert
|
||||
assert result1 == expected_state
|
||||
assert result2 == expected_state
|
||||
mock_storage.load.assert_called_once() # Only called once due to caching
|
||||
@@ -0,0 +1,251 @@
|
||||
"""Unit tests for workflow run repository with status filter."""
|
||||
|
||||
import uuid
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from models import WorkflowRun, WorkflowRunTriggeredFrom
|
||||
from repositories.sqlalchemy_api_workflow_run_repository import DifyAPISQLAlchemyWorkflowRunRepository
|
||||
|
||||
|
||||
class TestDifyAPISQLAlchemyWorkflowRunRepository:
|
||||
"""Test workflow run repository with status filtering."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_session_maker(self):
|
||||
"""Create a mock session maker."""
|
||||
return MagicMock(spec=sessionmaker)
|
||||
|
||||
@pytest.fixture
|
||||
def repository(self, mock_session_maker):
|
||||
"""Create repository instance with mock session."""
|
||||
return DifyAPISQLAlchemyWorkflowRunRepository(mock_session_maker)
|
||||
|
||||
def test_get_paginated_workflow_runs_without_status(self, repository, mock_session_maker):
|
||||
"""Test getting paginated workflow runs without status filter."""
|
||||
# Arrange
|
||||
tenant_id = str(uuid.uuid4())
|
||||
app_id = str(uuid.uuid4())
|
||||
mock_session = MagicMock()
|
||||
mock_session_maker.return_value.__enter__.return_value = mock_session
|
||||
|
||||
mock_runs = [MagicMock(spec=WorkflowRun) for _ in range(3)]
|
||||
mock_session.scalars.return_value.all.return_value = mock_runs
|
||||
|
||||
# Act
|
||||
result = repository.get_paginated_workflow_runs(
|
||||
tenant_id=tenant_id,
|
||||
app_id=app_id,
|
||||
triggered_from=WorkflowRunTriggeredFrom.DEBUGGING,
|
||||
limit=20,
|
||||
last_id=None,
|
||||
status=None,
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert len(result.data) == 3
|
||||
assert result.limit == 20
|
||||
assert result.has_more is False
|
||||
|
||||
def test_get_paginated_workflow_runs_with_status_filter(self, repository, mock_session_maker):
|
||||
"""Test getting paginated workflow runs with status filter."""
|
||||
# Arrange
|
||||
tenant_id = str(uuid.uuid4())
|
||||
app_id = str(uuid.uuid4())
|
||||
mock_session = MagicMock()
|
||||
mock_session_maker.return_value.__enter__.return_value = mock_session
|
||||
|
||||
mock_runs = [MagicMock(spec=WorkflowRun, status="succeeded") for _ in range(2)]
|
||||
mock_session.scalars.return_value.all.return_value = mock_runs
|
||||
|
||||
# Act
|
||||
result = repository.get_paginated_workflow_runs(
|
||||
tenant_id=tenant_id,
|
||||
app_id=app_id,
|
||||
triggered_from=WorkflowRunTriggeredFrom.DEBUGGING,
|
||||
limit=20,
|
||||
last_id=None,
|
||||
status="succeeded",
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert len(result.data) == 2
|
||||
assert all(run.status == "succeeded" for run in result.data)
|
||||
|
||||
def test_get_workflow_runs_count_without_status(self, repository, mock_session_maker):
|
||||
"""Test getting workflow runs count without status filter."""
|
||||
# Arrange
|
||||
tenant_id = str(uuid.uuid4())
|
||||
app_id = str(uuid.uuid4())
|
||||
mock_session = MagicMock()
|
||||
mock_session_maker.return_value.__enter__.return_value = mock_session
|
||||
|
||||
# Mock the GROUP BY query results
|
||||
mock_results = [
|
||||
("succeeded", 5),
|
||||
("failed", 2),
|
||||
("running", 1),
|
||||
]
|
||||
mock_session.execute.return_value.all.return_value = mock_results
|
||||
|
||||
# Act
|
||||
result = repository.get_workflow_runs_count(
|
||||
tenant_id=tenant_id,
|
||||
app_id=app_id,
|
||||
triggered_from=WorkflowRunTriggeredFrom.DEBUGGING,
|
||||
status=None,
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert result["total"] == 8
|
||||
assert result["succeeded"] == 5
|
||||
assert result["failed"] == 2
|
||||
assert result["running"] == 1
|
||||
assert result["stopped"] == 0
|
||||
assert result["partial-succeeded"] == 0
|
||||
|
||||
def test_get_workflow_runs_count_with_status_filter(self, repository, mock_session_maker):
|
||||
"""Test getting workflow runs count with status filter."""
|
||||
# Arrange
|
||||
tenant_id = str(uuid.uuid4())
|
||||
app_id = str(uuid.uuid4())
|
||||
mock_session = MagicMock()
|
||||
mock_session_maker.return_value.__enter__.return_value = mock_session
|
||||
|
||||
# Mock the count query for succeeded status
|
||||
mock_session.scalar.return_value = 5
|
||||
|
||||
# Act
|
||||
result = repository.get_workflow_runs_count(
|
||||
tenant_id=tenant_id,
|
||||
app_id=app_id,
|
||||
triggered_from=WorkflowRunTriggeredFrom.DEBUGGING,
|
||||
status="succeeded",
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert result["total"] == 5
|
||||
assert result["succeeded"] == 5
|
||||
assert result["running"] == 0
|
||||
assert result["failed"] == 0
|
||||
assert result["stopped"] == 0
|
||||
assert result["partial-succeeded"] == 0
|
||||
|
||||
def test_get_workflow_runs_count_with_invalid_status(self, repository, mock_session_maker):
|
||||
"""Test that invalid status is still counted in total but not in any specific status."""
|
||||
# Arrange
|
||||
tenant_id = str(uuid.uuid4())
|
||||
app_id = str(uuid.uuid4())
|
||||
mock_session = MagicMock()
|
||||
mock_session_maker.return_value.__enter__.return_value = mock_session
|
||||
|
||||
# Mock count query returning 0 for invalid status
|
||||
mock_session.scalar.return_value = 0
|
||||
|
||||
# Act
|
||||
result = repository.get_workflow_runs_count(
|
||||
tenant_id=tenant_id,
|
||||
app_id=app_id,
|
||||
triggered_from=WorkflowRunTriggeredFrom.DEBUGGING,
|
||||
status="invalid_status",
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert result["total"] == 0
|
||||
assert all(result[status] == 0 for status in ["running", "succeeded", "failed", "stopped", "partial-succeeded"])
|
||||
|
||||
def test_get_workflow_runs_count_with_time_range(self, repository, mock_session_maker):
|
||||
"""Test getting workflow runs count with time range filter verifies SQL query construction."""
|
||||
# Arrange
|
||||
tenant_id = str(uuid.uuid4())
|
||||
app_id = str(uuid.uuid4())
|
||||
mock_session = MagicMock()
|
||||
mock_session_maker.return_value.__enter__.return_value = mock_session
|
||||
|
||||
# Mock the GROUP BY query results
|
||||
mock_results = [
|
||||
("succeeded", 3),
|
||||
("running", 2),
|
||||
]
|
||||
mock_session.execute.return_value.all.return_value = mock_results
|
||||
|
||||
# Act
|
||||
result = repository.get_workflow_runs_count(
|
||||
tenant_id=tenant_id,
|
||||
app_id=app_id,
|
||||
triggered_from=WorkflowRunTriggeredFrom.DEBUGGING,
|
||||
status=None,
|
||||
time_range="1d",
|
||||
)
|
||||
|
||||
# Assert results
|
||||
assert result["total"] == 5
|
||||
assert result["succeeded"] == 3
|
||||
assert result["running"] == 2
|
||||
assert result["failed"] == 0
|
||||
|
||||
# Verify that execute was called (which means GROUP BY query was used)
|
||||
assert mock_session.execute.called, "execute should have been called for GROUP BY query"
|
||||
|
||||
# Verify SQL query includes time filter by checking the statement
|
||||
call_args = mock_session.execute.call_args
|
||||
assert call_args is not None, "execute should have been called with a statement"
|
||||
|
||||
# The first argument should be the SQL statement
|
||||
stmt = call_args[0][0]
|
||||
# Convert to string to inspect the query
|
||||
query_str = str(stmt.compile(compile_kwargs={"literal_binds": True}))
|
||||
|
||||
# Verify the query includes created_at filter
|
||||
# The query should have a WHERE clause with created_at comparison
|
||||
assert "created_at" in query_str.lower() or "workflow_runs.created_at" in query_str.lower(), (
|
||||
"Query should include created_at filter for time range"
|
||||
)
|
||||
|
||||
def test_get_workflow_runs_count_with_status_and_time_range(self, repository, mock_session_maker):
|
||||
"""Test getting workflow runs count with both status and time range filters verifies SQL query."""
|
||||
# Arrange
|
||||
tenant_id = str(uuid.uuid4())
|
||||
app_id = str(uuid.uuid4())
|
||||
mock_session = MagicMock()
|
||||
mock_session_maker.return_value.__enter__.return_value = mock_session
|
||||
|
||||
# Mock the count query for running status within time range
|
||||
mock_session.scalar.return_value = 2
|
||||
|
||||
# Act
|
||||
result = repository.get_workflow_runs_count(
|
||||
tenant_id=tenant_id,
|
||||
app_id=app_id,
|
||||
triggered_from=WorkflowRunTriggeredFrom.DEBUGGING,
|
||||
status="running",
|
||||
time_range="1d",
|
||||
)
|
||||
|
||||
# Assert results
|
||||
assert result["total"] == 2
|
||||
assert result["running"] == 2
|
||||
assert result["succeeded"] == 0
|
||||
assert result["failed"] == 0
|
||||
|
||||
# Verify that scalar was called (which means COUNT query was used)
|
||||
assert mock_session.scalar.called, "scalar should have been called for count query"
|
||||
|
||||
# Verify SQL query includes both status and time filter
|
||||
call_args = mock_session.scalar.call_args
|
||||
assert call_args is not None, "scalar should have been called with a statement"
|
||||
|
||||
# The first argument should be the SQL statement
|
||||
stmt = call_args[0][0]
|
||||
# Convert to string to inspect the query
|
||||
query_str = str(stmt.compile(compile_kwargs={"literal_binds": True}))
|
||||
|
||||
# Verify the query includes both filters
|
||||
assert "created_at" in query_str.lower() or "workflow_runs.created_at" in query_str.lower(), (
|
||||
"Query should include created_at filter for time range"
|
||||
)
|
||||
assert "status" in query_str.lower() or "workflow_runs.status" in query_str.lower(), (
|
||||
"Query should include status filter"
|
||||
)
|
||||
@@ -0,0 +1,3 @@
|
||||
"""
|
||||
Unit tests for workflow_node_execution repositories.
|
||||
"""
|
||||
@@ -0,0 +1,337 @@
|
||||
"""
|
||||
Unit tests for the SQLAlchemy implementation of WorkflowNodeExecutionRepository.
|
||||
"""
|
||||
|
||||
import json
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
from unittest.mock import MagicMock, PropertyMock
|
||||
|
||||
import pytest
|
||||
from pytest_mock import MockerFixture
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
from core.model_runtime.utils.encoders import jsonable_encoder
|
||||
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||
from core.workflow.entities import (
|
||||
WorkflowNodeExecution,
|
||||
)
|
||||
from core.workflow.enums import (
|
||||
NodeType,
|
||||
WorkflowNodeExecutionMetadataKey,
|
||||
WorkflowNodeExecutionStatus,
|
||||
)
|
||||
from core.workflow.repositories.workflow_node_execution_repository import OrderConfig
|
||||
from models.account import Account, Tenant
|
||||
from models.workflow import WorkflowNodeExecutionModel, WorkflowNodeExecutionTriggeredFrom
|
||||
|
||||
|
||||
def configure_mock_execution(mock_execution):
|
||||
"""Configure a mock execution with proper JSON serializable values."""
|
||||
# Configure inputs, outputs, process_data, and execution_metadata to return JSON serializable values
|
||||
type(mock_execution).inputs = PropertyMock(return_value='{"key": "value"}')
|
||||
type(mock_execution).outputs = PropertyMock(return_value='{"result": "success"}')
|
||||
type(mock_execution).process_data = PropertyMock(return_value='{"process": "data"}')
|
||||
type(mock_execution).execution_metadata = PropertyMock(return_value='{"metadata": "info"}')
|
||||
|
||||
# Configure status and triggered_from to be valid enum values
|
||||
mock_execution.status = "running"
|
||||
mock_execution.triggered_from = "workflow-run"
|
||||
|
||||
return mock_execution
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def session():
|
||||
"""Create a mock SQLAlchemy session."""
|
||||
session = MagicMock(spec=Session)
|
||||
# Configure the session to be used as a context manager
|
||||
session.__enter__ = MagicMock(return_value=session)
|
||||
session.__exit__ = MagicMock(return_value=None)
|
||||
|
||||
# Configure the session factory to return the session
|
||||
session_factory = MagicMock(spec=sessionmaker)
|
||||
session_factory.return_value = session
|
||||
return session, session_factory
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_user():
|
||||
"""Create a user instance for testing."""
|
||||
user = Account(name="test", email="test@example.com")
|
||||
user.id = "test-user-id"
|
||||
|
||||
tenant = Tenant(name="Test Workspace")
|
||||
tenant.id = "test-tenant"
|
||||
user._current_tenant = MagicMock()
|
||||
user._current_tenant.id = "test-tenant"
|
||||
|
||||
return user
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def repository(session, mock_user):
|
||||
"""Create a repository instance with test data."""
|
||||
_, session_factory = session
|
||||
app_id = "test-app"
|
||||
return SQLAlchemyWorkflowNodeExecutionRepository(
|
||||
session_factory=session_factory,
|
||||
user=mock_user,
|
||||
app_id=app_id,
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
|
||||
)
|
||||
|
||||
|
||||
def test_save(repository, session):
|
||||
"""Test save method."""
|
||||
session_obj, _ = session
|
||||
# Create a mock execution
|
||||
execution = MagicMock(spec=WorkflowNodeExecution)
|
||||
execution.id = "test-id"
|
||||
execution.node_execution_id = "test-node-execution-id"
|
||||
execution.tenant_id = None
|
||||
execution.app_id = None
|
||||
execution.inputs = None
|
||||
execution.process_data = None
|
||||
execution.outputs = None
|
||||
execution.metadata = None
|
||||
execution.workflow_id = str(uuid.uuid4())
|
||||
|
||||
# Mock the to_db_model method to return the execution itself
|
||||
# This simulates the behavior of setting tenant_id and app_id
|
||||
db_model = MagicMock(spec=WorkflowNodeExecutionModel)
|
||||
db_model.id = "test-id"
|
||||
db_model.node_execution_id = "test-node-execution-id"
|
||||
repository._to_db_model = MagicMock(return_value=db_model)
|
||||
|
||||
# Mock session.get to return None (no existing record)
|
||||
session_obj.get.return_value = None
|
||||
|
||||
# Call save method
|
||||
repository.save(execution)
|
||||
|
||||
# Assert to_db_model was called with the execution
|
||||
repository._to_db_model.assert_called_once_with(execution)
|
||||
|
||||
# Assert session.get was called to check for existing record
|
||||
session_obj.get.assert_called_once_with(WorkflowNodeExecutionModel, db_model.id)
|
||||
|
||||
# Assert session.add was called for new record
|
||||
session_obj.add.assert_called_once_with(db_model)
|
||||
|
||||
# Assert session.commit was called
|
||||
session_obj.commit.assert_called_once()
|
||||
|
||||
|
||||
def test_save_with_existing_tenant_id(repository, session):
|
||||
"""Test save method with existing tenant_id."""
|
||||
session_obj, _ = session
|
||||
# Create a mock execution with existing tenant_id
|
||||
execution = MagicMock(spec=WorkflowNodeExecutionModel)
|
||||
execution.id = "existing-id"
|
||||
execution.node_execution_id = "existing-node-execution-id"
|
||||
execution.tenant_id = "existing-tenant"
|
||||
execution.app_id = None
|
||||
execution.inputs = None
|
||||
execution.process_data = None
|
||||
execution.outputs = None
|
||||
execution.metadata = None
|
||||
|
||||
# Create a modified execution that will be returned by _to_db_model
|
||||
modified_execution = MagicMock(spec=WorkflowNodeExecutionModel)
|
||||
modified_execution.id = "existing-id"
|
||||
modified_execution.node_execution_id = "existing-node-execution-id"
|
||||
modified_execution.tenant_id = "existing-tenant" # Tenant ID should not change
|
||||
modified_execution.app_id = repository._app_id # App ID should be set
|
||||
# Create a dictionary to simulate __dict__ for updating attributes
|
||||
modified_execution.__dict__ = {
|
||||
"id": "existing-id",
|
||||
"node_execution_id": "existing-node-execution-id",
|
||||
"tenant_id": "existing-tenant",
|
||||
"app_id": repository._app_id,
|
||||
}
|
||||
|
||||
# Mock the to_db_model method to return the modified execution
|
||||
repository._to_db_model = MagicMock(return_value=modified_execution)
|
||||
|
||||
# Mock session.get to return an existing record
|
||||
existing_model = MagicMock(spec=WorkflowNodeExecutionModel)
|
||||
session_obj.get.return_value = existing_model
|
||||
|
||||
# Call save method
|
||||
repository.save(execution)
|
||||
|
||||
# Assert to_db_model was called with the execution
|
||||
repository._to_db_model.assert_called_once_with(execution)
|
||||
|
||||
# Assert session.get was called to check for existing record
|
||||
session_obj.get.assert_called_once_with(WorkflowNodeExecutionModel, modified_execution.id)
|
||||
|
||||
# Assert session.add was NOT called since we're updating existing
|
||||
session_obj.add.assert_not_called()
|
||||
|
||||
# Assert session.commit was called
|
||||
session_obj.commit.assert_called_once()
|
||||
|
||||
|
||||
def test_get_by_workflow_run(repository, session, mocker: MockerFixture):
|
||||
"""Test get_by_workflow_run method."""
|
||||
session_obj, _ = session
|
||||
# Set up mock
|
||||
mock_select = mocker.patch("core.repositories.sqlalchemy_workflow_node_execution_repository.select")
|
||||
mock_asc = mocker.patch("core.repositories.sqlalchemy_workflow_node_execution_repository.asc")
|
||||
mock_desc = mocker.patch("core.repositories.sqlalchemy_workflow_node_execution_repository.desc")
|
||||
|
||||
mock_WorkflowNodeExecutionModel = mocker.patch(
|
||||
"core.repositories.sqlalchemy_workflow_node_execution_repository.WorkflowNodeExecutionModel"
|
||||
)
|
||||
mock_stmt = mocker.MagicMock()
|
||||
mock_select.return_value = mock_stmt
|
||||
mock_stmt.where.return_value = mock_stmt
|
||||
mock_stmt.order_by.return_value = mock_stmt
|
||||
mock_asc.return_value = mock_stmt
|
||||
mock_desc.return_value = mock_stmt
|
||||
mock_WorkflowNodeExecutionModel.preload_offload_data_and_files.return_value = mock_stmt
|
||||
|
||||
# Create a properly configured mock execution
|
||||
mock_execution = mocker.MagicMock(spec=WorkflowNodeExecutionModel)
|
||||
configure_mock_execution(mock_execution)
|
||||
session_obj.scalars.return_value.all.return_value = [mock_execution]
|
||||
|
||||
# Create a mock domain model to be returned by _to_domain_model
|
||||
mock_domain_model = mocker.MagicMock()
|
||||
# Mock the _to_domain_model method to return our mock domain model
|
||||
repository._to_domain_model = mocker.MagicMock(return_value=mock_domain_model)
|
||||
|
||||
# Call method
|
||||
order_config = OrderConfig(order_by=["index"], order_direction="desc")
|
||||
result = repository.get_by_workflow_run(workflow_run_id="test-workflow-run-id", order_config=order_config)
|
||||
|
||||
# Assert select was called with correct parameters
|
||||
mock_select.assert_called_once()
|
||||
session_obj.scalars.assert_called_once_with(mock_stmt)
|
||||
mock_WorkflowNodeExecutionModel.preload_offload_data_and_files.assert_called_once_with(mock_stmt)
|
||||
# Assert _to_domain_model was called with the mock execution
|
||||
repository._to_domain_model.assert_called_once_with(mock_execution)
|
||||
# Assert the result contains our mock domain model
|
||||
assert len(result) == 1
|
||||
assert result[0] is mock_domain_model
|
||||
|
||||
|
||||
def test_to_db_model(repository):
|
||||
"""Test to_db_model method."""
|
||||
# Create a domain model
|
||||
domain_model = WorkflowNodeExecution(
|
||||
id="test-id",
|
||||
workflow_id="test-workflow-id",
|
||||
node_execution_id="test-node-execution-id",
|
||||
workflow_execution_id="test-workflow-run-id",
|
||||
index=1,
|
||||
predecessor_node_id="test-predecessor-id",
|
||||
node_id="test-node-id",
|
||||
node_type=NodeType.START,
|
||||
title="Test Node",
|
||||
inputs={"input_key": "input_value"},
|
||||
process_data={"process_key": "process_value"},
|
||||
outputs={"output_key": "output_value"},
|
||||
status=WorkflowNodeExecutionStatus.RUNNING,
|
||||
error=None,
|
||||
elapsed_time=1.5,
|
||||
metadata={
|
||||
WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: 100,
|
||||
WorkflowNodeExecutionMetadataKey.TOTAL_PRICE: Decimal("0.0"),
|
||||
},
|
||||
created_at=datetime.now(),
|
||||
finished_at=None,
|
||||
)
|
||||
|
||||
# Convert to DB model
|
||||
db_model = repository._to_db_model(domain_model)
|
||||
|
||||
# Assert DB model has correct values
|
||||
assert isinstance(db_model, WorkflowNodeExecutionModel)
|
||||
assert db_model.id == domain_model.id
|
||||
assert db_model.tenant_id == repository._tenant_id
|
||||
assert db_model.app_id == repository._app_id
|
||||
assert db_model.workflow_id == domain_model.workflow_id
|
||||
assert db_model.triggered_from == repository._triggered_from
|
||||
assert db_model.workflow_run_id == domain_model.workflow_execution_id
|
||||
assert db_model.index == domain_model.index
|
||||
assert db_model.predecessor_node_id == domain_model.predecessor_node_id
|
||||
assert db_model.node_execution_id == domain_model.node_execution_id
|
||||
assert db_model.node_id == domain_model.node_id
|
||||
assert db_model.node_type == domain_model.node_type
|
||||
assert db_model.title == domain_model.title
|
||||
|
||||
assert db_model.inputs_dict == domain_model.inputs
|
||||
assert db_model.process_data_dict == domain_model.process_data
|
||||
assert db_model.outputs_dict == domain_model.outputs
|
||||
assert db_model.execution_metadata_dict == jsonable_encoder(domain_model.metadata)
|
||||
|
||||
assert db_model.status == domain_model.status
|
||||
assert db_model.error == domain_model.error
|
||||
assert db_model.elapsed_time == domain_model.elapsed_time
|
||||
assert db_model.created_at == domain_model.created_at
|
||||
assert db_model.created_by_role == repository._creator_user_role
|
||||
assert db_model.created_by == repository._creator_user_id
|
||||
assert db_model.finished_at == domain_model.finished_at
|
||||
|
||||
|
||||
def test_to_domain_model(repository):
|
||||
"""Test _to_domain_model method."""
|
||||
# Create input dictionaries
|
||||
inputs_dict = {"input_key": "input_value"}
|
||||
process_data_dict = {"process_key": "process_value"}
|
||||
outputs_dict = {"output_key": "output_value"}
|
||||
metadata_dict = {str(WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS): 100}
|
||||
|
||||
# Create a DB model using our custom subclass
|
||||
db_model = WorkflowNodeExecutionModel()
|
||||
db_model.id = "test-id"
|
||||
db_model.tenant_id = "test-tenant-id"
|
||||
db_model.app_id = "test-app-id"
|
||||
db_model.workflow_id = "test-workflow-id"
|
||||
db_model.triggered_from = "workflow-run"
|
||||
db_model.workflow_run_id = "test-workflow-run-id"
|
||||
db_model.index = 1
|
||||
db_model.predecessor_node_id = "test-predecessor-id"
|
||||
db_model.node_execution_id = "test-node-execution-id"
|
||||
db_model.node_id = "test-node-id"
|
||||
db_model.node_type = NodeType.START
|
||||
db_model.title = "Test Node"
|
||||
db_model.inputs = json.dumps(inputs_dict)
|
||||
db_model.process_data = json.dumps(process_data_dict)
|
||||
db_model.outputs = json.dumps(outputs_dict)
|
||||
db_model.status = WorkflowNodeExecutionStatus.RUNNING
|
||||
db_model.error = None
|
||||
db_model.elapsed_time = 1.5
|
||||
db_model.execution_metadata = json.dumps(metadata_dict)
|
||||
db_model.created_at = datetime.now()
|
||||
db_model.created_by_role = "account"
|
||||
db_model.created_by = "test-user-id"
|
||||
db_model.finished_at = None
|
||||
|
||||
# Convert to domain model
|
||||
domain_model = repository._to_domain_model(db_model)
|
||||
|
||||
# Assert domain model has correct values
|
||||
assert isinstance(domain_model, WorkflowNodeExecution)
|
||||
assert domain_model.id == db_model.id
|
||||
assert domain_model.workflow_id == db_model.workflow_id
|
||||
assert domain_model.workflow_execution_id == db_model.workflow_run_id
|
||||
assert domain_model.index == db_model.index
|
||||
assert domain_model.predecessor_node_id == db_model.predecessor_node_id
|
||||
assert domain_model.node_execution_id == db_model.node_execution_id
|
||||
assert domain_model.node_id == db_model.node_id
|
||||
assert domain_model.node_type == NodeType(db_model.node_type)
|
||||
assert domain_model.title == db_model.title
|
||||
assert domain_model.inputs == inputs_dict
|
||||
assert domain_model.process_data == process_data_dict
|
||||
assert domain_model.outputs == outputs_dict
|
||||
assert domain_model.status == WorkflowNodeExecutionStatus(db_model.status)
|
||||
assert domain_model.error == db_model.error
|
||||
assert domain_model.elapsed_time == db_model.elapsed_time
|
||||
assert domain_model.metadata == metadata_dict
|
||||
assert domain_model.created_at == db_model.created_at
|
||||
assert domain_model.finished_at == db_model.finished_at
|
||||
@@ -0,0 +1,106 @@
|
||||
"""
|
||||
Unit tests for SQLAlchemyWorkflowNodeExecutionRepository, focusing on process_data truncation functionality.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
from unittest.mock import MagicMock, Mock
|
||||
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from core.repositories.sqlalchemy_workflow_node_execution_repository import (
|
||||
SQLAlchemyWorkflowNodeExecutionRepository,
|
||||
)
|
||||
from core.workflow.entities.workflow_node_execution import WorkflowNodeExecution
|
||||
from core.workflow.enums import NodeType
|
||||
from models import Account, WorkflowNodeExecutionModel, WorkflowNodeExecutionTriggeredFrom
|
||||
|
||||
|
||||
class TestSQLAlchemyWorkflowNodeExecutionRepositoryProcessData:
|
||||
"""Test process_data truncation functionality in SQLAlchemyWorkflowNodeExecutionRepository."""
|
||||
|
||||
def create_mock_account(self) -> Account:
|
||||
"""Create a mock Account for testing."""
|
||||
account = Mock(spec=Account)
|
||||
account.id = "test-user-id"
|
||||
account.tenant_id = "test-tenant-id"
|
||||
return account
|
||||
|
||||
def create_mock_session_factory(self) -> sessionmaker:
|
||||
"""Create a mock session factory for testing."""
|
||||
mock_session = MagicMock()
|
||||
mock_session_factory = MagicMock(spec=sessionmaker)
|
||||
mock_session_factory.return_value.__enter__.return_value = mock_session
|
||||
mock_session_factory.return_value.__exit__.return_value = None
|
||||
return mock_session_factory
|
||||
|
||||
def create_repository(self, mock_file_service=None) -> SQLAlchemyWorkflowNodeExecutionRepository:
|
||||
"""Create a repository instance for testing."""
|
||||
mock_account = self.create_mock_account()
|
||||
mock_session_factory = self.create_mock_session_factory()
|
||||
|
||||
repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||
session_factory=mock_session_factory,
|
||||
user=mock_account,
|
||||
app_id="test-app-id",
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
|
||||
)
|
||||
|
||||
if mock_file_service:
|
||||
repository._file_service = mock_file_service
|
||||
|
||||
return repository
|
||||
|
||||
def create_workflow_node_execution(
|
||||
self,
|
||||
process_data: dict[str, Any] | None = None,
|
||||
execution_id: str = "test-execution-id",
|
||||
) -> WorkflowNodeExecution:
|
||||
"""Create a WorkflowNodeExecution instance for testing."""
|
||||
return WorkflowNodeExecution(
|
||||
id=execution_id,
|
||||
workflow_id="test-workflow-id",
|
||||
index=1,
|
||||
node_id="test-node-id",
|
||||
node_type=NodeType.LLM,
|
||||
title="Test Node",
|
||||
process_data=process_data,
|
||||
created_at=datetime.now(),
|
||||
)
|
||||
|
||||
def test_to_domain_model_without_offload_data(self):
|
||||
"""Test _to_domain_model without offload data."""
|
||||
repository = self.create_repository()
|
||||
|
||||
# Create mock database model without offload data
|
||||
db_model = Mock(spec=WorkflowNodeExecutionModel)
|
||||
db_model.id = "test-execution-id"
|
||||
db_model.node_execution_id = "test-node-execution-id"
|
||||
db_model.workflow_id = "test-workflow-id"
|
||||
db_model.workflow_run_id = None
|
||||
db_model.index = 1
|
||||
db_model.predecessor_node_id = None
|
||||
db_model.node_id = "test-node-id"
|
||||
db_model.node_type = "llm"
|
||||
db_model.title = "Test Node"
|
||||
db_model.status = "succeeded"
|
||||
db_model.error = None
|
||||
db_model.elapsed_time = 1.5
|
||||
db_model.created_at = datetime.now()
|
||||
db_model.finished_at = None
|
||||
|
||||
process_data = {"normal": "data"}
|
||||
db_model.process_data_dict = process_data
|
||||
db_model.inputs_dict = None
|
||||
db_model.outputs_dict = None
|
||||
db_model.execution_metadata_dict = {}
|
||||
db_model.offload_data = None
|
||||
|
||||
domain_model = repository._to_domain_model(db_model)
|
||||
|
||||
# Domain model should have the data from database
|
||||
assert domain_model.process_data == process_data
|
||||
|
||||
# Should not be truncated
|
||||
assert domain_model.process_data_truncated is False
|
||||
assert domain_model.get_truncated_process_data() is None
|
||||
Reference in New Issue
Block a user