Commit f6a7b0f
Changed files (1)
src
llm
src/llm/summary.py
@@ -147,8 +147,8 @@ async def ai_summary(client: Client, message: Message, summary_prefix: str | Non
await modify_progress(del_status=True, **kwargs)
return
await modify_progress(text=f"🤖**{summary_model_name}**总结中...\n{msg}", force_update=True, **kwargs)
- config = get_gpt_config(model_type="text", contexts=contexts, force_model=summary_model)
-
+ config = get_gpt_config(summary_model)
+ config["completions"]["messages"] = contexts
# set max_tokens for the model
if "o1" in summary_model or "o3" in summary_model: # o1 or newer models use `max_completion_tokens`
config["completions"]["max_completion_tokens"] = max_tokens