mirror of
https://github.com/langbot-app/LangBot.git
synced 2025-11-25 11:29:39 +08:00
feat: add model_config parameter support for Dify assistant type apps (#1796)
* Initial plan * feat: add model_config parameter support for Dify assistant type - Add model_config parameter to AsyncDifyServiceClient.chat_messages method - Add _get_model_config helper method to DifyServiceAPIRunner - Pass model_config from pipeline configuration to all chat_messages calls - Add model-config configuration field to dify-service-api schema in ai.yaml - Support optional model configuration for assistant type apps in open-source Dify Co-authored-by: RockChinQ <45992437+RockChinQ@users.noreply.github.com> * refactor: improve model_config implementation based on code review - Simplify _get_model_config method logic - Add more descriptive comment about model_config usage - Clarify when model_config is used (assistant type apps) Co-authored-by: RockChinQ <45992437+RockChinQ@users.noreply.github.com> * feat: only modify client.py --------- Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> Co-authored-by: RockChinQ <45992437+RockChinQ@users.noreply.github.com> Co-authored-by: Junyan Qin <rockchinq@gmail.com>
This commit is contained in:
@@ -32,6 +32,7 @@ class AsyncDifyServiceClient:
|
|||||||
conversation_id: str = '',
|
conversation_id: str = '',
|
||||||
files: list[dict[str, typing.Any]] = [],
|
files: list[dict[str, typing.Any]] = [],
|
||||||
timeout: float = 30.0,
|
timeout: float = 30.0,
|
||||||
|
model_config: dict[str, typing.Any] | None = None,
|
||||||
) -> typing.AsyncGenerator[dict[str, typing.Any], None]:
|
) -> typing.AsyncGenerator[dict[str, typing.Any], None]:
|
||||||
"""发送消息"""
|
"""发送消息"""
|
||||||
if response_mode != 'streaming':
|
if response_mode != 'streaming':
|
||||||
@@ -42,6 +43,16 @@ class AsyncDifyServiceClient:
|
|||||||
trust_env=True,
|
trust_env=True,
|
||||||
timeout=timeout,
|
timeout=timeout,
|
||||||
) as client:
|
) as client:
|
||||||
|
payload = {
|
||||||
|
'inputs': inputs,
|
||||||
|
'query': query,
|
||||||
|
'user': user,
|
||||||
|
'response_mode': response_mode,
|
||||||
|
'conversation_id': conversation_id,
|
||||||
|
'files': files,
|
||||||
|
'model_config': model_config or {},
|
||||||
|
}
|
||||||
|
|
||||||
async with client.stream(
|
async with client.stream(
|
||||||
'POST',
|
'POST',
|
||||||
'/chat-messages',
|
'/chat-messages',
|
||||||
@@ -49,14 +60,7 @@ class AsyncDifyServiceClient:
|
|||||||
'Authorization': f'Bearer {self.api_key}',
|
'Authorization': f'Bearer {self.api_key}',
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
},
|
},
|
||||||
json={
|
json=payload,
|
||||||
'inputs': inputs,
|
|
||||||
'query': query,
|
|
||||||
'user': user,
|
|
||||||
'response_mode': response_mode,
|
|
||||||
'conversation_id': conversation_id,
|
|
||||||
'files': files,
|
|
||||||
},
|
|
||||||
) as r:
|
) as r:
|
||||||
async for chunk in r.aiter_lines():
|
async for chunk in r.aiter_lines():
|
||||||
if r.status_code != 200:
|
if r.status_code != 200:
|
||||||
|
|||||||
Reference in New Issue
Block a user