mirror of
https://github.com/langbot-app/LangBot.git
synced 2025-11-25 03:15:06 +08:00
chore: Add PyPI package support for uvx/pip installation (#1764)
* Initial plan * Add package structure and resource path utilities - Created langbot/ package with __init__.py and __main__.py entry point - Added paths utility to find frontend and resource files from package installation - Updated config loading to use resource paths - Updated frontend serving to use resource paths - Added MANIFEST.in for package data inclusion - Updated pyproject.toml with build system and entry points Co-authored-by: RockChinQ <45992437+RockChinQ@users.noreply.github.com> * Add PyPI publishing workflow and update license - Created GitHub Actions workflow to build frontend and publish to PyPI - Added license field to pyproject.toml to fix deprecation warning - Updated .gitignore to exclude build artifacts - Tested package building successfully Co-authored-by: RockChinQ <45992437+RockChinQ@users.noreply.github.com> * Add PyPI installation documentation - Created PYPI_INSTALLATION.md with detailed installation and usage instructions - Updated README.md to feature uvx/pip installation as recommended method - Updated README_EN.md with same changes for English documentation Co-authored-by: RockChinQ <45992437+RockChinQ@users.noreply.github.com> * Address code review feedback - Made package-data configuration more specific to langbot package only - Improved path detection with caching to avoid repeated file I/O - Removed sys.path searching which was incorrect for package data - Removed interactive input() call for non-interactive environment compatibility - Simplified error messages for version check Co-authored-by: RockChinQ <45992437+RockChinQ@users.noreply.github.com> * Fix code review issues - Use specific exception types instead of bare except - Fix misleading comments about directory levels - Remove redundant existence check before makedirs with exist_ok=True - Use context manager for file opening to ensure proper cleanup Co-authored-by: RockChinQ <45992437+RockChinQ@users.noreply.github.com> * Simplify package configuration and document behavioral differences - Removed redundant package-data configuration, relying on MANIFEST.in - Added documentation about behavioral differences between package and source installation - Clarified that include-package-data=true uses MANIFEST.in for data files Co-authored-by: RockChinQ <45992437+RockChinQ@users.noreply.github.com> * chore: update pyproject.toml * chore: try pack templates in langbot/ * chore: update * chore: update * chore: update * chore: update * chore: update * chore: adjust dir structure * chore: fix imports * fix: read default-pipeline-config.json * fix: read default-pipeline-config.json * fix: tests * ci: publish pypi * chore: bump version 4.6.0-beta.1 for testing * chore: add templates/** * fix: send adapters and requesters icons * chore: bump version 4.6.0b2 for testing * chore: add platform field for docker-compose.yaml --------- Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> Co-authored-by: RockChinQ <45992437+RockChinQ@users.noreply.github.com> Co-authored-by: Junyan Qin <rockchinq@gmail.com>
This commit is contained in:
@@ -10,16 +10,13 @@ This file provides infrastructure for all pipeline tests, including:
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
from unittest.mock import AsyncMock, MagicMock, Mock
|
||||
from typing import Any
|
||||
from unittest.mock import AsyncMock, Mock
|
||||
|
||||
import langbot_plugin.api.entities.builtin.pipeline.query as pipeline_query
|
||||
import langbot_plugin.api.entities.builtin.platform.message as platform_message
|
||||
import langbot_plugin.api.entities.builtin.platform.events as platform_events
|
||||
import langbot_plugin.api.entities.builtin.provider.session as provider_session
|
||||
import langbot_plugin.api.entities.builtin.provider.message as provider_message
|
||||
|
||||
from pkg.pipeline import entities as pipeline_entities
|
||||
from langbot.pkg.pipeline import entities as pipeline_entities
|
||||
|
||||
|
||||
class MockApplication:
|
||||
@@ -203,7 +200,7 @@ def sample_query(sample_message_chain, sample_message_event, mock_adapter):
|
||||
variables={},
|
||||
resp_messages=[],
|
||||
resp_message_chain=None,
|
||||
current_stage_name=None
|
||||
current_stage_name=None,
|
||||
)
|
||||
return query
|
||||
|
||||
|
||||
@@ -5,7 +5,6 @@ Tests the actual BanSessionCheckStage implementation from pkg.pipeline.bansess
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import Mock
|
||||
from importlib import import_module
|
||||
import langbot_plugin.api.entities.builtin.provider.session as provider_session
|
||||
|
||||
@@ -13,9 +12,8 @@ import langbot_plugin.api.entities.builtin.provider.session as provider_session
|
||||
def get_modules():
|
||||
"""Lazy import to ensure proper initialization order"""
|
||||
# Import pipelinemgr first to trigger proper stage registration
|
||||
pipelinemgr = import_module('pkg.pipeline.pipelinemgr')
|
||||
bansess = import_module('pkg.pipeline.bansess.bansess')
|
||||
entities = import_module('pkg.pipeline.entities')
|
||||
bansess = import_module('langbot.pkg.pipeline.bansess.bansess')
|
||||
entities = import_module('langbot.pkg.pipeline.entities')
|
||||
return bansess, entities
|
||||
|
||||
|
||||
@@ -26,14 +24,7 @@ async def test_whitelist_allow(mock_app, sample_query):
|
||||
|
||||
sample_query.launcher_type = provider_session.LauncherTypes.PERSON
|
||||
sample_query.launcher_id = '12345'
|
||||
sample_query.pipeline_config = {
|
||||
'trigger': {
|
||||
'access-control': {
|
||||
'mode': 'whitelist',
|
||||
'whitelist': ['person_12345']
|
||||
}
|
||||
}
|
||||
}
|
||||
sample_query.pipeline_config = {'trigger': {'access-control': {'mode': 'whitelist', 'whitelist': ['person_12345']}}}
|
||||
|
||||
stage = bansess.BanSessionCheckStage(mock_app)
|
||||
await stage.initialize(sample_query.pipeline_config)
|
||||
@@ -51,14 +42,7 @@ async def test_whitelist_deny(mock_app, sample_query):
|
||||
|
||||
sample_query.launcher_type = provider_session.LauncherTypes.PERSON
|
||||
sample_query.launcher_id = '99999'
|
||||
sample_query.pipeline_config = {
|
||||
'trigger': {
|
||||
'access-control': {
|
||||
'mode': 'whitelist',
|
||||
'whitelist': ['person_12345']
|
||||
}
|
||||
}
|
||||
}
|
||||
sample_query.pipeline_config = {'trigger': {'access-control': {'mode': 'whitelist', 'whitelist': ['person_12345']}}}
|
||||
|
||||
stage = bansess.BanSessionCheckStage(mock_app)
|
||||
await stage.initialize(sample_query.pipeline_config)
|
||||
@@ -75,14 +59,7 @@ async def test_blacklist_allow(mock_app, sample_query):
|
||||
|
||||
sample_query.launcher_type = provider_session.LauncherTypes.PERSON
|
||||
sample_query.launcher_id = '12345'
|
||||
sample_query.pipeline_config = {
|
||||
'trigger': {
|
||||
'access-control': {
|
||||
'mode': 'blacklist',
|
||||
'blacklist': ['person_99999']
|
||||
}
|
||||
}
|
||||
}
|
||||
sample_query.pipeline_config = {'trigger': {'access-control': {'mode': 'blacklist', 'blacklist': ['person_99999']}}}
|
||||
|
||||
stage = bansess.BanSessionCheckStage(mock_app)
|
||||
await stage.initialize(sample_query.pipeline_config)
|
||||
@@ -99,14 +76,7 @@ async def test_blacklist_deny(mock_app, sample_query):
|
||||
|
||||
sample_query.launcher_type = provider_session.LauncherTypes.PERSON
|
||||
sample_query.launcher_id = '12345'
|
||||
sample_query.pipeline_config = {
|
||||
'trigger': {
|
||||
'access-control': {
|
||||
'mode': 'blacklist',
|
||||
'blacklist': ['person_12345']
|
||||
}
|
||||
}
|
||||
}
|
||||
sample_query.pipeline_config = {'trigger': {'access-control': {'mode': 'blacklist', 'blacklist': ['person_12345']}}}
|
||||
|
||||
stage = bansess.BanSessionCheckStage(mock_app)
|
||||
await stage.initialize(sample_query.pipeline_config)
|
||||
@@ -123,14 +93,7 @@ async def test_wildcard_group(mock_app, sample_query):
|
||||
|
||||
sample_query.launcher_type = provider_session.LauncherTypes.GROUP
|
||||
sample_query.launcher_id = '12345'
|
||||
sample_query.pipeline_config = {
|
||||
'trigger': {
|
||||
'access-control': {
|
||||
'mode': 'whitelist',
|
||||
'whitelist': ['group_*']
|
||||
}
|
||||
}
|
||||
}
|
||||
sample_query.pipeline_config = {'trigger': {'access-control': {'mode': 'whitelist', 'whitelist': ['group_*']}}}
|
||||
|
||||
stage = bansess.BanSessionCheckStage(mock_app)
|
||||
await stage.initialize(sample_query.pipeline_config)
|
||||
@@ -147,14 +110,7 @@ async def test_wildcard_person(mock_app, sample_query):
|
||||
|
||||
sample_query.launcher_type = provider_session.LauncherTypes.PERSON
|
||||
sample_query.launcher_id = '12345'
|
||||
sample_query.pipeline_config = {
|
||||
'trigger': {
|
||||
'access-control': {
|
||||
'mode': 'whitelist',
|
||||
'whitelist': ['person_*']
|
||||
}
|
||||
}
|
||||
}
|
||||
sample_query.pipeline_config = {'trigger': {'access-control': {'mode': 'whitelist', 'whitelist': ['person_*']}}}
|
||||
|
||||
stage = bansess.BanSessionCheckStage(mock_app)
|
||||
await stage.initialize(sample_query.pipeline_config)
|
||||
@@ -172,14 +128,7 @@ async def test_user_id_wildcard(mock_app, sample_query):
|
||||
sample_query.launcher_type = provider_session.LauncherTypes.PERSON
|
||||
sample_query.launcher_id = '12345'
|
||||
sample_query.sender_id = '67890'
|
||||
sample_query.pipeline_config = {
|
||||
'trigger': {
|
||||
'access-control': {
|
||||
'mode': 'whitelist',
|
||||
'whitelist': ['*_67890']
|
||||
}
|
||||
}
|
||||
}
|
||||
sample_query.pipeline_config = {'trigger': {'access-control': {'mode': 'whitelist', 'whitelist': ['*_67890']}}}
|
||||
|
||||
stage = bansess.BanSessionCheckStage(mock_app)
|
||||
await stage.initialize(sample_query.pipeline_config)
|
||||
|
||||
@@ -8,19 +8,19 @@ from importlib import import_module
|
||||
|
||||
|
||||
def get_pipelinemgr_module():
|
||||
return import_module('pkg.pipeline.pipelinemgr')
|
||||
return import_module('langbot.pkg.pipeline.pipelinemgr')
|
||||
|
||||
|
||||
def get_stage_module():
|
||||
return import_module('pkg.pipeline.stage')
|
||||
return import_module('langbot.pkg.pipeline.stage')
|
||||
|
||||
|
||||
def get_entities_module():
|
||||
return import_module('pkg.pipeline.entities')
|
||||
return import_module('langbot.pkg.pipeline.entities')
|
||||
|
||||
|
||||
def get_persistence_pipeline_module():
|
||||
return import_module('pkg.entity.persistence.pipeline')
|
||||
return import_module('langbot.pkg.entity.persistence.pipeline')
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
|
||||
@@ -13,10 +13,9 @@ import langbot_plugin.api.entities.builtin.provider.session as provider_session
|
||||
def get_modules():
|
||||
"""Lazy import to ensure proper initialization order"""
|
||||
# Import pipelinemgr first to trigger proper stage registration
|
||||
pipelinemgr = import_module('pkg.pipeline.pipelinemgr')
|
||||
ratelimit = import_module('pkg.pipeline.ratelimit.ratelimit')
|
||||
entities = import_module('pkg.pipeline.entities')
|
||||
algo_module = import_module('pkg.pipeline.ratelimit.algo')
|
||||
ratelimit = import_module('langbot.pkg.pipeline.ratelimit.ratelimit')
|
||||
entities = import_module('langbot.pkg.pipeline.entities')
|
||||
algo_module = import_module('langbot.pkg.pipeline.ratelimit.algo')
|
||||
return ratelimit, entities, algo_module
|
||||
|
||||
|
||||
@@ -44,11 +43,7 @@ async def test_require_access_allowed(mock_app, sample_query):
|
||||
|
||||
assert result.result_type == entities.ResultType.CONTINUE
|
||||
assert result.new_query == sample_query
|
||||
mock_algo.require_access.assert_called_once_with(
|
||||
sample_query,
|
||||
'person',
|
||||
'12345'
|
||||
)
|
||||
mock_algo.require_access.assert_called_once_with(sample_query, 'person', '12345')
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@@ -102,8 +97,4 @@ async def test_release_access(mock_app, sample_query):
|
||||
|
||||
assert result.result_type == entities.ResultType.CONTINUE
|
||||
assert result.new_query == sample_query
|
||||
mock_algo.release_access.assert_called_once_with(
|
||||
sample_query,
|
||||
'person',
|
||||
'12345'
|
||||
)
|
||||
mock_algo.release_access.assert_called_once_with(sample_query, 'person', '12345')
|
||||
|
||||
@@ -14,11 +14,11 @@ import langbot_plugin.api.entities.builtin.platform.message as platform_message
|
||||
def get_modules():
|
||||
"""Lazy import to ensure proper initialization order"""
|
||||
# Import pipelinemgr first to trigger proper stage registration
|
||||
pipelinemgr = import_module('pkg.pipeline.pipelinemgr')
|
||||
resprule = import_module('pkg.pipeline.resprule.resprule')
|
||||
entities = import_module('pkg.pipeline.entities')
|
||||
rule = import_module('pkg.pipeline.resprule.rule')
|
||||
rule_entities = import_module('pkg.pipeline.resprule.entities')
|
||||
# pipelinemgr = import_module('langbot.pkg.pipeline.pipelinemgr')
|
||||
resprule = import_module('langbot.pkg.pipeline.resprule.resprule')
|
||||
entities = import_module('langbot.pkg.pipeline.entities')
|
||||
rule = import_module('langbot.pkg.pipeline.resprule.rule')
|
||||
rule_entities = import_module('langbot.pkg.pipeline.resprule.entities')
|
||||
return resprule, entities, rule, rule_entities
|
||||
|
||||
|
||||
@@ -28,11 +28,7 @@ async def test_person_message_skip(mock_app, sample_query):
|
||||
resprule, entities, rule, rule_entities = get_modules()
|
||||
|
||||
sample_query.launcher_type = provider_session.LauncherTypes.PERSON
|
||||
sample_query.pipeline_config = {
|
||||
'trigger': {
|
||||
'group-respond-rules': {}
|
||||
}
|
||||
}
|
||||
sample_query.pipeline_config = {'trigger': {'group-respond-rules': {}}}
|
||||
|
||||
stage = resprule.GroupRespondRuleCheckStage(mock_app)
|
||||
await stage.initialize(sample_query.pipeline_config)
|
||||
@@ -50,18 +46,13 @@ async def test_group_message_no_match(mock_app, sample_query):
|
||||
|
||||
sample_query.launcher_type = provider_session.LauncherTypes.GROUP
|
||||
sample_query.launcher_id = '12345'
|
||||
sample_query.pipeline_config = {
|
||||
'trigger': {
|
||||
'group-respond-rules': {}
|
||||
}
|
||||
}
|
||||
sample_query.pipeline_config = {'trigger': {'group-respond-rules': {}}}
|
||||
|
||||
# Create mock rule matcher that doesn't match
|
||||
mock_rule = Mock(spec=rule.GroupRespondRule)
|
||||
mock_rule.match = AsyncMock(return_value=rule_entities.RuleJudgeResult(
|
||||
matching=False,
|
||||
replacement=sample_query.message_chain
|
||||
))
|
||||
mock_rule.match = AsyncMock(
|
||||
return_value=rule_entities.RuleJudgeResult(matching=False, replacement=sample_query.message_chain)
|
||||
)
|
||||
|
||||
stage = resprule.GroupRespondRuleCheckStage(mock_app)
|
||||
await stage.initialize(sample_query.pipeline_config)
|
||||
@@ -81,23 +72,14 @@ async def test_group_message_match(mock_app, sample_query):
|
||||
|
||||
sample_query.launcher_type = provider_session.LauncherTypes.GROUP
|
||||
sample_query.launcher_id = '12345'
|
||||
sample_query.pipeline_config = {
|
||||
'trigger': {
|
||||
'group-respond-rules': {}
|
||||
}
|
||||
}
|
||||
sample_query.pipeline_config = {'trigger': {'group-respond-rules': {}}}
|
||||
|
||||
# Create new message chain after rule processing
|
||||
new_chain = platform_message.MessageChain([
|
||||
platform_message.Plain(text='Processed message')
|
||||
])
|
||||
new_chain = platform_message.MessageChain([platform_message.Plain(text='Processed message')])
|
||||
|
||||
# Create mock rule matcher that matches
|
||||
mock_rule = Mock(spec=rule.GroupRespondRule)
|
||||
mock_rule.match = AsyncMock(return_value=rule_entities.RuleJudgeResult(
|
||||
matching=True,
|
||||
replacement=new_chain
|
||||
))
|
||||
mock_rule.match = AsyncMock(return_value=rule_entities.RuleJudgeResult(matching=True, replacement=new_chain))
|
||||
|
||||
stage = resprule.GroupRespondRuleCheckStage(mock_app)
|
||||
await stage.initialize(sample_query.pipeline_config)
|
||||
@@ -115,27 +97,21 @@ async def test_group_message_match(mock_app, sample_query):
|
||||
async def test_atbot_rule_match(mock_app, sample_query):
|
||||
"""Test AtBotRule removes At component"""
|
||||
resprule, entities, rule, rule_entities = get_modules()
|
||||
atbot_module = import_module('pkg.pipeline.resprule.rules.atbot')
|
||||
atbot_module = import_module('langbot.pkg.pipeline.resprule.rules.atbot')
|
||||
|
||||
sample_query.launcher_type = provider_session.LauncherTypes.GROUP
|
||||
sample_query.adapter.bot_account_id = '999'
|
||||
|
||||
# Create message chain with At component
|
||||
message_chain = platform_message.MessageChain([
|
||||
platform_message.At(target='999'),
|
||||
platform_message.Plain(text='Hello bot')
|
||||
])
|
||||
message_chain = platform_message.MessageChain(
|
||||
[platform_message.At(target='999'), platform_message.Plain(text='Hello bot')]
|
||||
)
|
||||
sample_query.message_chain = message_chain
|
||||
|
||||
atbot_rule = atbot_module.AtBotRule(mock_app)
|
||||
await atbot_rule.initialize()
|
||||
|
||||
result = await atbot_rule.match(
|
||||
str(message_chain),
|
||||
message_chain,
|
||||
{},
|
||||
sample_query
|
||||
)
|
||||
result = await atbot_rule.match(str(message_chain), message_chain, {}, sample_query)
|
||||
|
||||
assert result.matching is True
|
||||
# At component should be removed
|
||||
@@ -147,25 +123,18 @@ async def test_atbot_rule_match(mock_app, sample_query):
|
||||
async def test_atbot_rule_no_match(mock_app, sample_query):
|
||||
"""Test AtBotRule when no At component present"""
|
||||
resprule, entities, rule, rule_entities = get_modules()
|
||||
atbot_module = import_module('pkg.pipeline.resprule.rules.atbot')
|
||||
atbot_module = import_module('langbot.pkg.pipeline.resprule.rules.atbot')
|
||||
|
||||
sample_query.launcher_type = provider_session.LauncherTypes.GROUP
|
||||
sample_query.adapter.bot_account_id = '999'
|
||||
|
||||
# Create message chain without At component
|
||||
message_chain = platform_message.MessageChain([
|
||||
platform_message.Plain(text='Hello')
|
||||
])
|
||||
message_chain = platform_message.MessageChain([platform_message.Plain(text='Hello')])
|
||||
sample_query.message_chain = message_chain
|
||||
|
||||
atbot_rule = atbot_module.AtBotRule(mock_app)
|
||||
await atbot_rule.initialize()
|
||||
|
||||
result = await atbot_rule.match(
|
||||
str(message_chain),
|
||||
message_chain,
|
||||
{},
|
||||
sample_query
|
||||
)
|
||||
result = await atbot_rule.match(str(message_chain), message_chain, {}, sample_query)
|
||||
|
||||
assert result.matching is False
|
||||
|
||||
@@ -4,9 +4,9 @@ Tests for storage manager and provider selection
|
||||
|
||||
import pytest
|
||||
from unittest.mock import Mock, AsyncMock, patch
|
||||
from pkg.storage.mgr import StorageMgr
|
||||
from pkg.storage.providers.localstorage import LocalStorageProvider
|
||||
from pkg.storage.providers.s3storage import S3StorageProvider
|
||||
from langbot.pkg.storage.mgr import StorageMgr
|
||||
from langbot.pkg.storage.providers.localstorage import LocalStorageProvider
|
||||
from langbot.pkg.storage.providers.s3storage import S3StorageProvider
|
||||
|
||||
|
||||
class TestStorageProviderSelection:
|
||||
@@ -34,11 +34,7 @@ class TestStorageProviderSelection:
|
||||
# Mock application
|
||||
mock_app = Mock()
|
||||
mock_app.instance_config = Mock()
|
||||
mock_app.instance_config.data = {
|
||||
'storage': {
|
||||
'use': 'local'
|
||||
}
|
||||
}
|
||||
mock_app.instance_config.data = {'storage': {'use': 'local'}}
|
||||
mock_app.logger = Mock()
|
||||
|
||||
storage_mgr = StorageMgr(mock_app)
|
||||
@@ -62,8 +58,8 @@ class TestStorageProviderSelection:
|
||||
'access_key_id': 'test_key',
|
||||
'secret_access_key': 'test_secret',
|
||||
'region': 'us-east-1',
|
||||
'bucket': 'test-bucket'
|
||||
}
|
||||
'bucket': 'test-bucket',
|
||||
},
|
||||
}
|
||||
}
|
||||
mock_app.logger = Mock()
|
||||
@@ -81,11 +77,7 @@ class TestStorageProviderSelection:
|
||||
# Mock application
|
||||
mock_app = Mock()
|
||||
mock_app.instance_config = Mock()
|
||||
mock_app.instance_config.data = {
|
||||
'storage': {
|
||||
'use': 'invalid_type'
|
||||
}
|
||||
}
|
||||
mock_app.instance_config.data = {'storage': {'use': 'invalid_type'}}
|
||||
mock_app.logger = Mock()
|
||||
|
||||
storage_mgr = StorageMgr(mock_app)
|
||||
|
||||
Reference in New Issue
Block a user