Commit 10d501a

benny-dou <60535774+benny-dou@users.noreply.github.com>
2025-02-27 14:16:15
fix: do not add reasoning prompt to system prompt
model provider has already tweaked the reasoning model
1 parent 4c043b8
Changed files (1)
src/config.py
@@ -168,7 +168,7 @@ class GPT:  # see `llm/README.md`
     HELICONE_API_KEY = os.getenv("HELICONE_API_KEY", "")
 
     # comma separated reasoning models, add system prompt to the models to ensure the output format.
-    REASONING_MODELS = os.getenv("GPT_REASONING_MODELS", "deepseek-r1,o1,o3")
+    REASONING_MODELS = os.getenv("GPT_REASONING_MODELS", "")  # deprecated, we do not need this anymore
     # /gemini command
     GEMINI_MODEL = os.getenv("GPT_GEMINI_MODEL", "gemini-2.0-flash")
     GEMINI_MODEL_NAME = os.getenv("GPT_GEMINI_MODEL_NAME", "Gemini-2.0-Flash")