Skip to content

Commit 13d3641

Browse files
authored
fix: llm model wrongly required when runner is not local-agent (#1386)
1 parent f2e1ae4 commit 13d3641

File tree

4 files changed

+23
-12
lines changed

4 files changed

+23
-12
lines changed

pkg/core/entities.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -133,7 +133,7 @@ class Conversation(pydantic.BaseModel):
133133

134134
update_time: typing.Optional[datetime.datetime] = pydantic.Field(default_factory=datetime.datetime.now)
135135

136-
use_llm_model: requester.RuntimeLLMModel
136+
use_llm_model: typing.Optional[requester.RuntimeLLMModel] = None
137137

138138
use_funcs: typing.Optional[list[tools_entities.LLMFunction]]
139139

pkg/pipeline/preproc/preproc.py

Lines changed: 18 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -32,8 +32,18 @@ async def process(
3232
"""处理"""
3333
session = await self.ap.sess_mgr.get_session(query)
3434

35+
# 非 local-agent 时,llm_model 为 None
36+
llm_model = (
37+
await self.ap.model_mgr.get_model_by_uuid(query.pipeline_config['ai']['local-agent']['model'])
38+
if query.pipeline_config['ai']['runner'] == 'local-agent'
39+
else None
40+
)
41+
3542
conversation = await self.ap.sess_mgr.get_conversation(
36-
query, session, query.pipeline_config['ai']['local-agent']['prompt']
43+
query,
44+
session,
45+
query.pipeline_config['ai']['local-agent']['prompt'],
46+
llm_model,
3747
)
3848

3949
# 设置query
@@ -43,16 +53,17 @@ async def process(
4353

4454
query.use_llm_model = conversation.use_llm_model
4555

46-
query.use_funcs = (
47-
conversation.use_funcs if query.use_llm_model.model_entity.abilities.__contains__('tool_call') else None
48-
)
56+
if query.pipeline_config['ai']['runner'] == 'local-agent':
57+
query.use_funcs = (
58+
conversation.use_funcs if query.use_llm_model.model_entity.abilities.__contains__('tool_call') else None
59+
)
4960

5061
query.variables = {
5162
'session_id': f'{query.session.launcher_type.value}_{query.session.launcher_id}',
5263
'conversation_id': conversation.uuid,
53-
'msg_create_time': int(query.message_event.time)
54-
if query.message_event.time
55-
else int(datetime.datetime.now().timestamp()),
64+
'msg_create_time': (
65+
int(query.message_event.time) if query.message_event.time else int(datetime.datetime.now().timestamp())
66+
),
5667
}
5768

5869
# Check if this model supports vision, if not, remove all images

pkg/provider/modelmgr/modelmgr.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -99,7 +99,7 @@ async def get_model_by_name(self, name: str) -> entities.LLMModelInfo: # deprec
9999
for model in self.model_list:
100100
if model.name == name:
101101
return model
102-
raise ValueError(f'无法确定模型 {name} 的信息,请在元数据中配置')
102+
raise ValueError(f'无法确定模型 {name} 的信息')
103103

104104
async def get_model_by_uuid(self, uuid: str) -> entities.LLMModelInfo:
105105
"""通过uuid获取模型"""

pkg/provider/session/sessionmgr.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44

55
from ...core import app, entities as core_entities
66
from ...provider import entities as provider_entities
7+
from ...provider.modelmgr import entities as model_entities
78

89

910
class SessionManager:
@@ -41,6 +42,7 @@ async def get_conversation(
4142
query: core_entities.Query,
4243
session: core_entities.Session,
4344
prompt_config: list[dict],
45+
llm_model: model_entities.LLMModelInfo,
4446
) -> core_entities.Conversation:
4547
"""获取对话或创建对话"""
4648

@@ -62,9 +64,7 @@ async def get_conversation(
6264
conversation = core_entities.Conversation(
6365
prompt=prompt,
6466
messages=[],
65-
use_llm_model=await self.ap.model_mgr.get_model_by_uuid(
66-
query.pipeline_config['ai']['local-agent']['model']
67-
),
67+
use_llm_model=llm_model,
6868
use_funcs=await self.ap.tool_mgr.get_all_functions(
6969
plugin_enabled=True,
7070
),

0 commit comments

Comments
 (0)