Commit aaa7804

benny-dou <60535774+benny-dou@users.noreply.github.com>
2025-03-13 14:17:00
style(gpt): remove prompt tags from the response
1 parent 5158f94
Changed files (1)
src
src/llm/utils.py
@@ -93,9 +93,18 @@ def beautify_llm_response(text: str) -> str:
     """
     if not text:
         return text
-    text = remove_pound(text)
-    text = remove_dash(text)
-    return remove_consecutive_newlines(text)
+    # remove tags. should align with the tags in `contexts.py`
+    clean_text = ""
+    for line in text.split("\n"):
+        if line.strip().startswith(("[from user]:", "[file name]:")):
+            continue
+        if line.strip() in ["[message begin]", "[message end]", "[file content begin]", "[file content end]"]:
+            continue
+        clean_text += line + "\n"
+    clean_text = clean_text.removesuffix("\n")  # remove the last newline
+    clean_text = remove_pound(clean_text)
+    clean_text = remove_dash(clean_text)
+    return remove_consecutive_newlines(clean_text)
 
 
 def extract_reasoning(text: str) -> tuple[str, str]: