From 06c8773975bedc3099a23bfda68eb793c3824e4b Mon Sep 17 00:00:00 2001 From: RockChinQ <1010553892@qq.com> Date: Fri, 16 Feb 2024 14:11:22 +0800 Subject: [PATCH] =?UTF-8?q?perf:=20=E4=BC=98=E5=8C=96=E6=8E=A7=E5=88=B6?= =?UTF-8?q?=E5=8F=B0=E8=BE=93=E5=87=BA?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- pkg/core/controller.py | 2 +- pkg/pipeline/process/process.py | 2 ++ pkg/platform/sources/aiocqhttp.py | 2 +- pkg/provider/requester/modelmgr.py | 2 +- pkg/provider/requester/tokenizers/tiktoken.py | 2 +- 5 files changed, 6 insertions(+), 4 deletions(-) diff --git a/pkg/core/controller.py b/pkg/core/controller.py index 7173939b..42ef435c 100644 --- a/pkg/core/controller.py +++ b/pkg/core/controller.py @@ -151,7 +151,7 @@ class Controller: except Exception as e: self.ap.logger.error(f"处理请求时出错 query_id={query.query_id}: {e}") self.ap.logger.debug(f"Traceback: {traceback.format_exc()}") - traceback.print_exc() + # traceback.print_exc() finally: self.ap.logger.debug(f"Query {query} processed") diff --git a/pkg/pipeline/process/process.py b/pkg/pipeline/process/process.py index 29051431..c24fdac2 100644 --- a/pkg/pipeline/process/process.py +++ b/pkg/pipeline/process/process.py @@ -32,6 +32,8 @@ class Processor(stage.PipelineStage): """ message_text = str(query.message_chain).strip() + self.ap.logger.info(f"处理 {query.launcher_type.value}_{query.launcher_id} 的请求({query.query_id}): {message_text}") + if message_text.startswith('!') or message_text.startswith('!'): return self.cmd_handler.handle(query) else: diff --git a/pkg/platform/sources/aiocqhttp.py b/pkg/platform/sources/aiocqhttp.py index 9cbc3502..fc6b4fbe 100644 --- a/pkg/platform/sources/aiocqhttp.py +++ b/pkg/platform/sources/aiocqhttp.py @@ -40,7 +40,7 @@ class AiocqhttpMessageConverter(adapter.MessageConverter): elif type(msg) is mirai.Voice: msg_list.append(aiocqhttp.MessageSegment.record(msg.path)) elif type(msg) is forward.Forward: - print("aiocqhttp 暂不支持转发消息组件的转换,使用普通消息链发送") + # print("aiocqhttp 暂不支持转发消息组件的转换,使用普通消息链发送") for node in msg.node_list: msg_list.extend(AiocqhttpMessageConverter.yiri2target(node.message_chain)[0]) diff --git a/pkg/provider/requester/modelmgr.py b/pkg/provider/requester/modelmgr.py index dd942970..b197c9ca 100644 --- a/pkg/provider/requester/modelmgr.py +++ b/pkg/provider/requester/modelmgr.py @@ -24,7 +24,7 @@ class ModelManager: for model in self.model_list: if model.name == name: return model - raise ValueError(f"Model {name} not found") + raise ValueError(f"不支持模型: {name} , 请检查配置文件") async def initialize(self): openai_chat_completion = chatcmpl.OpenAIChatCompletion(self.ap) diff --git a/pkg/provider/requester/tokenizers/tiktoken.py b/pkg/provider/requester/tokenizers/tiktoken.py index 14a456c0..0bf97b17 100644 --- a/pkg/provider/requester/tokenizers/tiktoken.py +++ b/pkg/provider/requester/tokenizers/tiktoken.py @@ -17,7 +17,7 @@ class Tiktoken(tokenizer.LLMTokenizer): try: encoding = tiktoken.encoding_for_model(model.name) except KeyError: - print("Warning: model not found. Using cl100k_base encoding.") + # print("Warning: model not found. Using cl100k_base encoding.") encoding = tiktoken.get_encoding("cl100k_base") num_tokens = 0