Commit b95a7dd

benny-dou <60535774+benny-dou@users.noreply.github.com>
2025-03-13 02:28:04
style(gpt): beautify response for stream response
1 parent 0e965fc
Changed files (1)
src/llm/response_stream.py
@@ -12,7 +12,7 @@ from pyrogram.parser.markdown import BLOCKQUOTE_EXPANDABLE_DELIM, BLOCKQUOTE_EXP
 from pyrogram.types import Message
 
 from config import GPT, TEXT_LENGTH
-from llm.utils import BOT_TIPS, add_search_results_to_response
+from llm.utils import BOT_TIPS, add_search_results_to_response, beautify_llm_response
 from messages.progress import modify_progress
 from messages.utils import count_without_entities, smart_split
 
@@ -82,6 +82,7 @@ async def send_to_gpt_stream(client: Client, status: Message, config: dict, retr
                 is_reasoning = False
                 answers = re.sub(r",持续(.*?)秒\s*", f"💡\n{BLOCKQUOTE_EXPANDABLE_END_DELIM}", answers, count=1, flags=re.DOTALL)
 
+            answers = beautify_llm_response(answers)
             if await count_without_entities(answers) <= TEXT_LENGTH:
                 await modify_progress(message=status, text=answers, detail_progress=True)
             else:  # answers is too long, split it into multiple messages
@@ -97,7 +98,7 @@ async def send_to_gpt_stream(client: Client, status: Message, config: dict, retr
         answers = add_search_results_to_response(config.get("search_results", []), "".join(sent_answers))
         answers = (await smart_split(answers))[-1]
         # Finally, force update the message
-        await modify_progress(message=status, text=answers.strip(), force_update=True)
+        await modify_progress(message=status, text=beautify_llm_response(answers), force_update=True)
 
     except Exception as e:
         error = f"🤖{config['friendly_name']}请求失败, 重试次数: {retry + 1}/{GPT.MAX_RETRY + 1}\n{e}"