Files
LangBot/tests/unit_tests/pipeline/conftest.py
Junyan Qin (Chin) b6cdf18c1a feat: add comprehensive unit tests for pipeline stages (#1701)
* feat: add comprehensive unit tests for pipeline stages

* fix: deps install in ci

* ci: use venv

* ci: run run_tests.sh

* fix: resolve circular import issues in pipeline tests

Update all test files to use lazy imports via importlib.import_module()
to avoid circular dependency errors. Fix mock_conversation fixture to
properly mock list.copy() method.

Changes:
- Use lazy import pattern in all test files
- Fix conftest.py fixture for conversation messages
- Add integration test file for full import tests
- Update documentation with known issues and workarounds

Tests now successfully avoid circular import errors while maintaining
full test coverage of pipeline stages.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>

* docs: add comprehensive testing summary

Document implementation details, challenges, solutions, and future
improvements for the pipeline unit test suite.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>

* refactor: rewrite unit tests to test actual pipeline stage code

Rewrote unit tests to properly test real stage implementations instead of
mock logic:

- Test actual BanSessionCheckStage with 7 test cases (100% coverage)
- Test actual RateLimit stage with 3 test cases (70% coverage)
- Test actual PipelineManager with 5 test cases
- Use lazy imports via import_module to avoid circular dependencies
- Import pipelinemgr first to ensure proper stage registration
- Use Query.model_construct() to bypass Pydantic validation in tests
- Remove obsolete pure unit tests that didn't test real code
- All 20 tests passing with 48% overall pipeline coverage

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>

* test: add unit tests for GroupRespondRuleCheckStage

Added comprehensive unit tests for resprule stage:

- Test person message skips rule check
- Test group message with no matching rules (INTERRUPT)
- Test group message with matching rule (CONTINUE)
- Test AtBotRule removes At component correctly
- Test AtBotRule when no At component present

Coverage: 100% on resprule.py and atbot.py
All 25 tests passing with 51% overall pipeline coverage

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>

* refactor: restructure tests to tests/unit_tests/pipeline

Reorganized test directory structure to support multiple test categories:

- Move tests/pipeline → tests/unit_tests/pipeline
- Rename .github/workflows/pipeline-tests.yml → run-tests.yml
- Update run_tests.sh to run all unit tests (not just pipeline)
- Update workflow to trigger on all pkg/** and tests/** changes
- Coverage now tracks entire pkg/ module instead of just pipeline

This structure allows for easy addition of more unit tests for other
modules in the future.

All 25 tests passing with 21% overall pkg coverage.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>

* ci: upload codecov report

* ci: codecov file

* ci: coverage.xml

---------

Co-authored-by: Claude <noreply@anthropic.com>
2025-10-01 10:56:59 +08:00

252 lines
7.8 KiB
Python

"""
Shared test fixtures and configuration
This file provides infrastructure for all pipeline tests, including:
- Mock object factories
- Test fixtures
- Common test helper functions
"""
from __future__ import annotations
import pytest
from unittest.mock import AsyncMock, MagicMock, Mock
from typing import Any
import langbot_plugin.api.entities.builtin.pipeline.query as pipeline_query
import langbot_plugin.api.entities.builtin.platform.message as platform_message
import langbot_plugin.api.entities.builtin.platform.events as platform_events
import langbot_plugin.api.entities.builtin.provider.session as provider_session
import langbot_plugin.api.entities.builtin.provider.message as provider_message
from pkg.pipeline import entities as pipeline_entities
class MockApplication:
"""Mock Application object providing all basic dependencies needed by stages"""
def __init__(self):
self.logger = self._create_mock_logger()
self.sess_mgr = self._create_mock_session_manager()
self.model_mgr = self._create_mock_model_manager()
self.tool_mgr = self._create_mock_tool_manager()
self.plugin_connector = self._create_mock_plugin_connector()
self.persistence_mgr = self._create_mock_persistence_manager()
self.query_pool = self._create_mock_query_pool()
self.instance_config = self._create_mock_instance_config()
self.task_mgr = self._create_mock_task_manager()
def _create_mock_logger(self):
logger = Mock()
logger.debug = Mock()
logger.info = Mock()
logger.error = Mock()
logger.warning = Mock()
return logger
def _create_mock_session_manager(self):
sess_mgr = AsyncMock()
sess_mgr.get_session = AsyncMock()
sess_mgr.get_conversation = AsyncMock()
return sess_mgr
def _create_mock_model_manager(self):
model_mgr = AsyncMock()
model_mgr.get_model_by_uuid = AsyncMock()
return model_mgr
def _create_mock_tool_manager(self):
tool_mgr = AsyncMock()
tool_mgr.get_all_tools = AsyncMock(return_value=[])
return tool_mgr
def _create_mock_plugin_connector(self):
plugin_connector = AsyncMock()
plugin_connector.emit_event = AsyncMock()
return plugin_connector
def _create_mock_persistence_manager(self):
persistence_mgr = AsyncMock()
persistence_mgr.execute_async = AsyncMock()
return persistence_mgr
def _create_mock_query_pool(self):
query_pool = Mock()
query_pool.cached_queries = {}
query_pool.queries = []
query_pool.condition = AsyncMock()
return query_pool
def _create_mock_instance_config(self):
instance_config = Mock()
instance_config.data = {
'command': {'prefix': ['/', '!'], 'enable': True},
'concurrency': {'pipeline': 10},
}
return instance_config
def _create_mock_task_manager(self):
task_mgr = Mock()
task_mgr.create_task = Mock()
return task_mgr
@pytest.fixture
def mock_app():
"""Provides Mock Application instance"""
return MockApplication()
@pytest.fixture
def mock_session():
"""Provides Mock Session object"""
session = Mock()
session.launcher_type = provider_session.LauncherTypes.PERSON
session.launcher_id = 12345
session._semaphore = AsyncMock()
session._semaphore.locked = Mock(return_value=False)
session._semaphore.acquire = AsyncMock()
session._semaphore.release = AsyncMock()
return session
@pytest.fixture
def mock_conversation():
"""Provides Mock Conversation object"""
conversation = Mock()
conversation.uuid = 'test-conversation-uuid'
# Create mock prompt with copy method
mock_prompt = Mock()
mock_prompt.messages = []
mock_prompt.copy = Mock(return_value=Mock(messages=[]))
conversation.prompt = mock_prompt
# Create mock messages list with copy method
mock_messages = Mock()
mock_messages.copy = Mock(return_value=[])
conversation.messages = mock_messages
return conversation
@pytest.fixture
def mock_model():
"""Provides Mock Model object"""
model = Mock()
model.model_entity = Mock()
model.model_entity.uuid = 'test-model-uuid'
model.model_entity.abilities = ['func_call', 'vision']
return model
@pytest.fixture
def mock_adapter():
"""Provides Mock Adapter object"""
adapter = AsyncMock()
adapter.is_stream_output_supported = AsyncMock(return_value=False)
adapter.reply_message = AsyncMock()
adapter.reply_message_chunk = AsyncMock()
return adapter
@pytest.fixture
def sample_message_chain():
"""Provides sample message chain"""
return platform_message.MessageChain(
[
platform_message.Plain(text='Hello, this is a test message'),
]
)
@pytest.fixture
def sample_message_event(sample_message_chain):
"""Provides sample message event"""
event = Mock()
event.sender = Mock()
event.sender.id = 12345
event.time = 1609459200 # 2021-01-01 00:00:00
return event
@pytest.fixture
def sample_query(sample_message_chain, sample_message_event, mock_adapter):
"""Provides sample Query object - using model_construct to bypass validation"""
import langbot_plugin.api.entities.builtin.pipeline.query as pipeline_query
# Use model_construct to bypass Pydantic validation for test purposes
query = pipeline_query.Query.model_construct(
query_id='test-query-id',
launcher_type=provider_session.LauncherTypes.PERSON,
launcher_id=12345,
sender_id=12345,
message_chain=sample_message_chain,
message_event=sample_message_event,
adapter=mock_adapter,
pipeline_uuid='test-pipeline-uuid',
bot_uuid='test-bot-uuid',
pipeline_config={
'ai': {
'runner': {'runner': 'local-agent'},
'local-agent': {'model': 'test-model-uuid', 'prompt': 'test-prompt'},
},
'output': {'misc': {'at-sender': False, 'quote-origin': False}},
'trigger': {'misc': {'combine-quote-message': False}},
},
session=None,
prompt=None,
messages=[],
user_message=None,
use_funcs=[],
use_llm_model_uuid=None,
variables={},
resp_messages=[],
resp_message_chain=None,
current_stage_name=None
)
return query
@pytest.fixture
def sample_pipeline_config():
"""Provides sample pipeline configuration"""
return {
'ai': {
'runner': {'runner': 'local-agent'},
'local-agent': {'model': 'test-model-uuid', 'prompt': 'test-prompt'},
},
'output': {'misc': {'at-sender': False, 'quote-origin': False}},
'trigger': {'misc': {'combine-quote-message': False}},
'ratelimit': {'enable': True, 'algo': 'fixwin', 'window': 60, 'limit': 10},
}
def create_stage_result(
result_type: pipeline_entities.ResultType,
query: pipeline_query.Query,
user_notice: str = '',
console_notice: str = '',
debug_notice: str = '',
error_notice: str = '',
) -> pipeline_entities.StageProcessResult:
"""Helper function to create stage process result"""
return pipeline_entities.StageProcessResult(
result_type=result_type,
new_query=query,
user_notice=user_notice,
console_notice=console_notice,
debug_notice=debug_notice,
error_notice=error_notice,
)
def assert_result_continue(result: pipeline_entities.StageProcessResult):
"""Assert result is CONTINUE type"""
assert result.result_type == pipeline_entities.ResultType.CONTINUE
def assert_result_interrupt(result: pipeline_entities.StageProcessResult):
"""Assert result is INTERRUPT type"""
assert result.result_type == pipeline_entities.ResultType.INTERRUPT