Commit f61f5ed
Changed files (8)
src/llm/contexts.py
@@ -2,16 +2,16 @@
# -*- coding: utf-8 -*-
import base64
import contextlib
-import re
from pathlib import Path
from typing import TYPE_CHECKING
+from google.genai.types import Part
from loguru import logger
from pyrogram.client import Client
from pyrogram.types import Message
-from config import FILE_SERVER, GPT
-from llm.utils import BOT_TIPS, clean_prefix
+from config import GPT
+from llm.utils import BOT_TIPS, clean_response, convert_md
from messages.parser import parse_msg
if TYPE_CHECKING:
@@ -28,31 +28,23 @@ def get_conversations(message: Message) -> list[Message]:
return messages
-async def get_conversation_contexts(client: Client, conversations: list[Message]) -> list[dict]:
+async def get_conversation_contexts(client: Client, conversations: list[Message], ctx_format: str = "gpt") -> list[dict]:
"""Generate contexts for GPT conversation.
From old to new messages.
-
- Returns:
- list[dict]: [
- {
- "role": "user or assistant",
- "content": [
- {'type': 'text', 'text': 'caption this img'},
- {'type': 'image_url', 'image_url': {'url': 'data:image/jpeg;base64,base64_image'}},
- {'type': 'image_url', 'image_url': {'url': 'https://server.com/dir/image.jpg'}},
- ]
- }
- ]
"""
# parse context for each message
- contexts = [await single_context(client, message) for message in conversations]
- contexts = [x for x in contexts if x.get("content")] # filter out empty context
+ if ctx_format.lower() == "gpt":
+ contexts = [await single_gpt_context(client, message) for message in conversations]
+ contexts = [x for x in contexts if x.get("content")]
+ else:
+ contexts = [await single_gemini_context(client, message) for message in conversations]
+ contexts = [x for x in contexts if x.get("parts")]
return contexts[: int(GPT.HISTORY_CONTEXT)]
-async def single_context(client: Client, message: Message) -> dict:
+async def single_gpt_context(client: Client, message: Message) -> dict:
"""Generate GPT contexts for a single message (Without considering reply message).
Returns:
@@ -65,57 +57,43 @@ async def single_context(client: Client, message: Message) -> dict:
],
}
"""
-
- def clean_text(text: str) -> str:
- if not text:
- return ""
- text = clean_prefix(text)
- # remove bot tips
- text = re.sub(rf"(.*?){BOT_TIPS}\)", "", text, flags=re.DOTALL).strip()
- # remove reasoning
- return re.sub(r"^🤔(.*?)💡", "", text, flags=re.DOTALL).strip()
-
info = parse_msg(message, silent=True)
role = "assistant" if f"{BOT_TIPS})" in info["text"] else "user"
if info["mtype"] not in ["text", "photo", "voice", "video", "document"]:
return {}
+ extra_txt_extensions = [".sh", ".json", ".xml"] # treat these as txt file
+ extra_markdown_extensions = [".pdf", ".html", ".doc", ".docx", ".ppt", ".pptx", ".xls", ".xlsx"] # convert to markdown
+
messages = await client.get_media_group(message.chat.id, message.id) if message.media_group_id else [message]
contexts = []
for msg in messages:
info = parse_msg(msg, silent=True)
- msg_text = clean_text(info["text"])
+ msg_text = clean_response(info["text"])
try:
- if GPT.MEDIA_FORMAT == "base64":
- if info["mtype"] == "photo":
- res = await base64_media(client, msg)
- contexts.append({"type": "image_url", "image_url": {"url": f"data:image/{res['ext']};base64,{res['base64']}"}})
- # elif info["mtype"] == "video":
- # media.append({"type": "video_url", "video_url": {"url": b64}})
- elif info["mtype"] == "document" and info["mime_type"] in ["text/plain", "text/markdown"]:
+ if info["mtype"] == "photo":
+ res = await base64_media(client, msg)
+ contexts.append({"type": "image_url", "image_url": {"url": f"data:image/{res['ext']};base64,{res['base64']}"}})
+ elif info["mtype"] == "document":
+ if info["mime_type"].startswith("text/") or Path(info["file_name"]).suffix in extra_txt_extensions:
res = await base64_media(client, msg)
contexts.append(
{
"type": "text",
- "text": f"[username]: {info['full_name']}\n[filename]: {info['file_name']}\n[file content]:\n{res['value'].strip()}",
+ "text": f"[fileowner]: {info['full_name']}\n[filename]: {info['file_name']}\n[file content]:\n{res['value'].strip()}",
}
)
- else:
- path: str = await client.download_media(msg) # type: ignore
- logger.debug(f"Downloaded GPT media: {path}")
- if info["mtype"] == "photo":
- contexts.append({"type": "image_url", "image_url": {"url": f"{FILE_SERVER}/{Path(path).name}"}})
- # elif info["mtype"] == "video":
- # media.append({"type": "video_url", "video_url": {"url": f"{FILE_SERVER}/{Path(path).name}"}})
- elif info["mtype"] == "document" and info["mime_type"] in ["text/plain", "text/markdown"]:
+ if Path(info["file_name"]).suffix in extra_markdown_extensions:
+ fpath: str = await client.download_media(message) # type: ignore
+ text = convert_md(fpath)
+ Path(fpath).unlink(missing_ok=True)
contexts.append(
{
"type": "text",
- "text": f"[username]: {info['full_name']}\n[filename]: {info['file_name']}\n[file content]:\n{Path(path).read_text().strip()}",
+ "text": f"[fileowner]: {info['full_name']}\n[filename]: {info['file_name']}\n[file content]:\n{text.strip()}",
}
)
- Path(path).unlink(missing_ok=True)
if msg_text:
if role == "user":
contexts.append({"type": "text", "text": f"[username]: {info['full_name']}\n[message]:\n{msg_text}"})
@@ -124,7 +102,59 @@ async def single_context(client: Client, message: Message) -> dict:
except Exception as e:
logger.warning(f"Download media from message failed: {e}")
continue
- return {"role": role, "content": contexts}
+ return {"role": role, "content": contexts} if contexts else {}
+
+
+async def single_gemini_context(client: Client, message: Message) -> dict:
+ """Generate Gemini contexts for a single message (Without considering reply message).
+
+ Returns:
+ {
+ "role": role, # model or user
+ "parts: [
+ {"inlineData": {"mimeType": "image/jpeg", "data": "base64-encoded string"}},
+ {"text": "hello"}
+ ],
+ }
+ """
+ info = parse_msg(message, silent=True)
+ role = "model" if f"{BOT_TIPS})" in info["text"] else "user"
+ if info["mtype"] not in ["text", "photo", "voice", "video", "document"]:
+ return {}
+ extra_mime_types = ["application/pdf", "application/x-javascript"] # gemini has built-in support for these
+ extra_txt_extensions = [".sh", ".json", ".xml"] # also treat these as txt file
+ extra_markdown_extensions = [".html", ".doc", ".docx", ".ppt", ".pptx", ".xls", ".xlsx"] # convert to markdown
+
+ messages = await client.get_media_group(message.chat.id, message.id) if message.media_group_id else [message]
+ parts = []
+ for msg in messages:
+ info = parse_msg(msg, silent=True)
+ msg_text = clean_response(info["text"])
+ try:
+ if info["mtype"] == "photo":
+ res = await base64_media(client, msg)
+ parts.append(Part.from_bytes(mime_type=f"image/{res['ext']}", data=res["base64"]))
+ elif info["mtype"] == "document":
+ if info["mime_type"].startswith("text/") or Path(info["file_name"]).suffix in extra_txt_extensions:
+ res = await base64_media(client, msg)
+ parts.append(Part.from_text(text=f"[fileowner]: {info['full_name']}\n[filename]: {info['file_name']}\n[file content]:\n{res['value'].strip()}"))
+ if info["mime_type"] in extra_mime_types:
+ data: BytesIO = await client.download_media(message, in_memory=True) # type: ignore
+ parts.append(Part.from_bytes(mime_type=info["mime_type"], data=bytes(data.getbuffer())))
+ if Path(info["file_name"]).suffix in extra_markdown_extensions:
+ fpath: str = await client.download_media(message) # type: ignore
+ text = convert_md(fpath)
+ Path(fpath).unlink(missing_ok=True)
+ parts.append(Part.from_text(text=f"[fileowner]: {info['full_name']}\n[filename]: {info['file_name']}\n[file content]:\n{text.strip()}"))
+ if msg_text:
+ if role == "user":
+ parts.append(Part.from_text(text=f"[username]: {info['full_name']}\n[message]:\n{msg_text}"))
+ else:
+ parts.append(Part.from_text(text=msg_text))
+ except Exception as e:
+ logger.warning(f"Download media from message failed: {e}")
+ continue
+ return {"role": role, "parts": parts} if parts else {}
async def base64_media(client: Client, message: Message) -> dict:
src/llm/gemini.py
@@ -14,7 +14,8 @@ from pyrogram.client import Client
from pyrogram.types import Message
from config import DOWNLOAD_DIR, GEMINI, PREFIX, TEXT_LENGTH
-from llm.utils import BOT_TIPS, beautify_llm_response, clean_prefix, clean_source_marks
+from llm.contexts import get_conversation_contexts
+from llm.utils import BOT_TIPS, beautify_llm_response, clean_gemini_sourcemarks, clean_prefix, clean_source_marks
from messages.parser import parse_msg
from messages.progress import modify_progress
from messages.sender import send2tg
@@ -32,41 +33,29 @@ HELP = f"""🌠**AI生图**
"""
-async def gemini_response(client: Client, message: Message, gpt_contexts: list[dict], modality: str = "image", **kwargs):
+async def gemini_response(client: Client, message: Message, conversations: list[Message], modality: str = "image", **kwargs):
r"""Get Gemini response.
- gpt_contexts: [
- {
- "role": role, # assistant or user
- "content": [
- {"type": "image_url", "image_url": {"url": f"data:image/jpeg;base64,encoding"}},
- {"type": "text", "text": "[username]: Bob\n[filename]: sample.txt\n[file content]:\nhello"}
- ]
- }
- ]
-
Args:
client (Client): The Pyrogram client.
message (Message): The trigger message object.
- gpt_contexts (list[dict]): OpenAI context format parsed from chat history.
- model (str): model id.
- model_name (str): friendly model name
+ conversations (list[Message]): list of chat conversations.
modality (str): response modality
"""
info = parse_msg(message)
model = GEMINI.TEXT_MODEL if modality == "text" else GEMINI.IMG_MODEL
model_name = GEMINI.TEXT_MODEL_NAME if modality == "text" else GEMINI.IMG_MODEL_NAME
+ if not GEMINI.API_KEYS:
+ await send2tg(client, message, texts="⚠️**未配置Gemini API, 请尝试其他模型", **kwargs)
response_modalities = ["TEXT", "IMAGE"] if modality == "image" else ["TEXT"]
tools = [Tool(google_search=GoogleSearch())] if modality == "text" else None
- keep_marks = modality == "text" # keep source marks for text response
try:
- msg = f"🤖**{model_name}**: 思考中...\n{clean_prefix(info['text'])}"
+ msg = f"🤖**{model_name}**: 思考中...\n👤**[{info['full_name']}](tg://user?id={info['uid']})**:“{clean_prefix(info['text'])}”"
status_msg = (await send2tg(client, message, texts=msg, **kwargs))[0]
kwargs["progress"] = status_msg
- contexts = [openai_context_to_gemini(context, keep_marks=keep_marks) for context in gpt_contexts]
+ contexts = await get_conversation_contexts(client, conversations, ctx_format="gemini")
gemini_logging(contexts)
- params = {}
- params |= {"model": model, "contents": contexts}
+ params = {"model": model, "contents": contexts}
genconfig = {}
genconfig |= {"response_modalities": response_modalities}
if tools:
@@ -74,7 +63,6 @@ async def gemini_response(client: Client, message: Message, gpt_contexts: list[d
if GEMINI.PREFER_LANG and modality == "text":
genconfig |= {"system_instruction": f"请优先使用{GEMINI.PREFER_LANG}回复"}
params |= {"config": GenerateContentConfig(**genconfig)}
-
if modality == "image":
return await gemini_nonstream(client, message, model_name, params, **kwargs)
return await gemini_stream(client, message, model_name, params, **kwargs)
@@ -82,62 +70,47 @@ async def gemini_response(client: Client, message: Message, gpt_contexts: list[d
logger.error(e)
-def openai_context_to_gemini(context: dict, *, keep_marks: bool = True) -> ContentUnionDict:
- r"""Convert OpenAI context to Gemini format.
-
- Args:
- context (dict): {
- "role": role, # assistant or user
- "content": [
- {"type": "image_url", "image_url": {"url": f"data:image/jpeg;base64,encoding"}},
- {"type": "text", "text": "[username]: Bob\n[filename]: sample.txt\n[file content]:\nhello"}
- ]
- }
-
- Returns:
- dict: {
- "role": role, # model or user
- "parts: [
- {"inlineData": {"mimeType": "image/jpeg", "data": "base64-encoded string"}},
- {"text": "hello"}
- ]
- }
- """
- parts: list[Part] = []
- role = "model" if context["role"] == "assistant" else "user"
- for item in context["content"]:
- if item["type"] == "text":
- if keep_marks:
- parts.append(Part.from_text(text=item["text"]))
- else:
- parts.append(Part.from_text(text=clean_source_marks(item["text"])))
- elif item["type"] == "image_url":
- data = item["image_url"]["url"].split(";base64,")
- mime = data[0].removeprefix("data:")
- parts.append(Part.from_bytes(mime_type=mime, data=data[1]))
-
- return {"role": role, "parts": parts} # type: ignore
-
-
-def gemini_logging(contexts: list):
- msg = ""
- with contextlib.suppress(Exception):
- for item in contexts:
- role = item.get("role", "").upper() or "MODEL"
+async def gemini_stream(
+ client: Client,
+ message: Message,
+ model_name: str,
+ params: dict,
+ retry: int = 0,
+ **kwargs,
+):
+ prefix = f"🤖**{model_name}**: ({BOT_TIPS})\n"
+ try:
+ status = kwargs.get("progress")
+ api_keys = [x.strip() for x in GEMINI.API_KEYS.split(",") if x.strip()]
+ if retry > len(api_keys) - 1:
+ return None
+ app = genai.Client(api_key=api_keys[retry], http_options=HttpOptions(base_url=GEMINI.BASR_URL, async_client_args={"proxy": GEMINI.PROXY}))
+ answers = ""
+ async for chunk in await app.aio.models.generate_content_stream(**params):
+ resp = parse_response(chunk.model_dump())
+ answer = resp.get("texts", "")
+ answers += answer
+ answers = beautify_llm_response(answers)
+ if await count_without_entities(prefix + answers) <= TEXT_LENGTH:
+ if len(answers.removeprefix(prefix)) > 10: # start response if answer is not empty
+ await modify_progress(message=status, text=prefix + answers, detail_progress=True)
+ else: # answers is too long, split it into multiple messages
+ parts = await smart_split(prefix + answers)
+ await modify_progress(message=status, text=blockquote(parts[0]), force_update=True) # force send the first part
+ answers = parts[-1] # keep the last part
+ status = await client.send_message(message.chat.id, answers) # the new message
- # Request
- for part in item.get("parts", []):
- if part.inline_data:
- msg += f"[{role}]: Blob_Data "
- if part.text:
- msg += f"[{role}]: {part.text} "
- # Response
- if item.get("text", ""):
- msg += f"[{role}]: {item['text']} "
- if item.get("inline_data", ""):
- msg += f"[{role}]: Blob_Data "
+ # all chunks are processed
+ answers = blockquote(beautify_llm_response(answers)) # blockquote AI response
+ await modify_progress(message=status, text=prefix + answers, force_update=True)
- logger.debug(f"{msg!r}")
+ except Exception as e:
+ logger.error(e)
+ error = str(e)
+ if "resp" in locals():
+ error += f"\n{resp}"
+ await modify_progress(text=error, force_update=True, **kwargs)
+ return await gemini_stream(client, message, model_name, params, retry + 1, **kwargs) # type: ignore
async def gemini_nonstream(
@@ -148,12 +121,16 @@ async def gemini_nonstream(
retry: int = 0,
**kwargs,
):
+ """Currently, the non-stream mode is for image generation.
+
+ For other uses, please use stream mode.
+ """
try:
+ clean_gemini_sourcemarks(params["contents"])
api_keys = [x.strip() for x in GEMINI.API_KEYS.split(",") if x.strip()]
if retry > len(api_keys) - 1:
return None
app = genai.Client(api_key=api_keys[retry], http_options=HttpOptions(base_url=GEMINI.BASR_URL, async_client_args={"proxy": GEMINI.PROXY}))
-
response = await app.aio.models.generate_content(**params)
prefix = f"🤖**{model_name}**: ({BOT_TIPS})\n"
res = parse_response(response.model_dump(), prefix=prefix)
@@ -196,44 +173,60 @@ def parse_response(data: dict, prefix: str = "") -> dict:
return {"texts": prefix + beautify_llm_response(texts, newline_level=2), "media": media}
-async def gemini_stream(
- client: Client,
- message: Message,
- model_name: str,
- params: dict,
- retry: int = 0,
- **kwargs,
-):
- prefix = f"🤖**{model_name}**: ({BOT_TIPS})\n"
- answers = ""
- try:
- status = kwargs.get("progress")
- api_keys = [x.strip() for x in GEMINI.API_KEYS.split(",") if x.strip()]
- if retry > len(api_keys) - 1:
- return None
- app = genai.Client(api_key=api_keys[retry], http_options=HttpOptions(base_url=GEMINI.BASR_URL, async_client_args={"proxy": GEMINI.PROXY}))
- async for chunk in await app.aio.models.generate_content_stream(**params):
- resp = parse_response(chunk.model_dump())
- answer = resp.get("texts", "")
- answers += answer
- answers = beautify_llm_response(answers)
- if await count_without_entities(prefix + answers) <= TEXT_LENGTH:
- if len(answers.removeprefix(prefix)) > 10: # start response if answer is not empty
- await modify_progress(message=status, text=prefix + answers, detail_progress=True)
- else: # answers is too long, split it into multiple messages
- parts = await smart_split(prefix + answers)
- await modify_progress(message=status, text=blockquote(parts[0]), force_update=True) # force send the first part
- answers = parts[-1] # keep the last part
- status = await client.send_message(message.chat.id, answers) # the new message
+def gemini_logging(contexts: list):
+ msg = ""
+ with contextlib.suppress(Exception):
+ for item in contexts:
+ role = item.get("role", "").upper() or "MODEL"
- # all chunks are processed
- answers = blockquote(beautify_llm_response(answers)) # blockquote AI response
- await modify_progress(message=status, text=prefix + answers, force_update=True)
+ # Request
+ for part in item.get("parts", []):
+ if part.inline_data:
+ msg += f"[{role}]: Blob_Data "
+ if part.text:
+ msg += f"[{role}]: {part.text} "
+ # Response
+ if item.get("text", ""):
+ msg += f"[{role}]: {item['text']} "
+ if item.get("inline_data", ""):
+ msg += f"[{role}]: Blob_Data "
- except Exception as e:
- logger.error(e)
- error = str(e)
- if "resp" in locals():
- error += f"\n{resp}"
- await modify_progress(text=error, force_update=True, **kwargs)
- return await gemini_stream(client, message, model_name, params, retry + 1, **kwargs) # type: ignore
+ logger.debug(f"{msg!r}")
+
+
+def openai_context_to_gemini(context: dict, *, keep_marks: bool = True) -> ContentUnionDict:
+ r"""(Deprecated) Convert OpenAI context to Gemini format.
+
+ Not needed anymore.
+
+ Args:
+ context (dict): {
+ "role": role, # assistant or user
+ "content": [
+ {"type": "image_url", "image_url": {"url": f"data:image/jpeg;base64,encoding"}},
+ {"type": "text", "text": "[username]: Bob\n[filename]: sample.txt\n[file content]:\nhello"}
+ ]
+ }
+
+ Returns:
+ dict: {
+ "role": role, # model or user
+ "parts: [
+ {"inlineData": {"mimeType": "image/jpeg", "data": "base64-encoded string"}},
+ {"text": "hello"}
+ ]
+ }
+ """
+ parts: list[Part] = []
+ role = "model" if context["role"] == "assistant" else "user"
+ for item in context["content"]:
+ if item["type"] == "text":
+ if keep_marks:
+ parts.append(Part.from_text(text=item["text"]))
+ else:
+ parts.append(Part.from_text(text=clean_source_marks(item["text"])))
+ elif item["type"] == "image_url":
+ data = item["image_url"]["url"].split(";base64,")
+ mime = data[0].removeprefix("data:")
+ parts.append(Part.from_bytes(mime_type=mime, data=data[1]))
+ return {"role": role, "parts": parts} # type: ignore
src/llm/gpt.py
@@ -9,7 +9,7 @@ from config import GEMINI, GPT, PREFIX, TEXT_LENGTH, cache
from llm.contexts import get_conversation_contexts, get_conversations
from llm.gemini import HELP as AIGC_HELP
from llm.gemini import gemini_response
-from llm.models import get_context_type, get_gpt_config, parse_force_model
+from llm.models import get_context_type, get_gpt_config, get_model_id
from llm.response import send_to_gpt
from llm.response_stream import send_to_gpt_stream
from llm.tools import merge_tools_response
@@ -25,7 +25,7 @@ HELP = f"""🤖**GPT对话**
暂不支持视频/音频, 可先用`{PREFIX.ASR}`命令转为文字后再调用`{PREFIX.GPT}`
⚙️模型配置:
-`{PREFIX.GPT}`默认模型: **{GPT.TEXT_MODEL_NAME}**
+`{PREFIX.GPT}`默认模型: **{GPT.DEFAULT_MODEL_NAME}**
🔄使用以下命令强制切换模型:
`/gpt`: **{GPT.OPENAI_MODEL_NAME}** {image_emoji(GPT.OPENAI_IMAGE_CAPABILITY)}
@@ -38,7 +38,7 @@ HELP = f"""🤖**GPT对话**
⚠️注意:
若对话历史包含图片
但模型不支持图片(无🏞图标)
-会自动切换为: **{GPT.IMAGE_MODEL_NAME}**
+会自动切换为: **{GPT.OMNI_MODEL_NAME}**
"""
@@ -59,8 +59,6 @@ def is_gpt_conversation(message: Message) -> bool:
GPT.QWEN_MODEL_NAME,
GPT.DOUBAO_MODEL_NAME,
GPT.GROK_MODEL_NAME,
- GPT.TEXT_MODEL_NAME,
- GPT.IMAGE_MODEL_NAME,
GEMINI.TEXT_MODEL_NAME,
GEMINI.IMG_MODEL_NAME,
]
@@ -91,7 +89,6 @@ async def gpt_response(client: Client, message: Message, *, gpt_stream: bool = G
reply_info = parse_msg(message.reply_to_message, silent=True)
reply_text = reply_info["text"]
- force_model, modality = parse_force_model(info["text"], reply_text)
# cache media_group message, only process once
if media_group_id := message.media_group_id:
if cache.get(f"gpt-{info['cid']}-{media_group_id}"):
@@ -99,16 +96,20 @@ async def gpt_response(client: Client, message: Message, *, gpt_stream: bool = G
cache.set(f"gpt-{info['cid']}-{media_group_id}", "1", ttl=120)
kwargs["message_info"] = info # save trigger message info
conversations = get_conversations(message)
- context_type = get_context_type(conversations)
- contexts = await get_conversation_contexts(client, conversations)
- config = get_gpt_config(context_type["type"], contexts, force_model)
- if any("gemini" in x.lower() for x in [config["completions"]["model"], config["friendly_name"]]):
- return await gemini_response(client, message, contexts, modality, **kwargs)
- if not config["client"]["api_key"]:
- logger.error(f"⚠️**{config['friendly_name']}** 未配置API Key")
+ context_type = get_context_type(conversations) # {"type": "text", "error": None} # text, image
+ model_id, resp_modality = get_model_id(info["text"], reply_text, context_type["type"])
+ if "gemini" in model_id.lower():
+ return await gemini_response(client, message, conversations, resp_modality, **kwargs)
+
+ config = get_gpt_config(model_id)
+ if not config["client"]["api_key"].strip():
return await send2tg(client, message, texts=f"⚠️**{config['friendly_name']}** 未配置API Key, 请尝试其他命令\n\n{HELP}", **kwargs)
+ if not config["completions"]["model"].strip():
+ return await send2tg(client, message, texts=f"⚠️**{config['friendly_name']}** 未配置模型ID, 请尝试其他命令\n\n{HELP}", **kwargs)
+
+ config["completions"]["messages"] = await get_conversation_contexts(client, conversations)
- msg = f"🤖**{config['friendly_name']}**: 思考中...\n{clean_prefix(info['text'])}"
+ msg = f"🤖**{config['friendly_name']}**: 思考中...\n👤**[{info['full_name']}](tg://user?id={info['uid']})**:“{clean_prefix(info['text'])}”"
status_msg = (await send2tg(client, message, texts=msg, **kwargs))[0]
kwargs["progress"] = status_msg
if context_type.get("error"):
src/llm/models.py
@@ -27,113 +27,94 @@ def get_context_type(conversations: list[Message]) -> dict:
return res
-def parse_force_model(text: str, reply_text: str) -> tuple[str, str]:
- """Parse the force model from the text or reply text.
+def get_model_id(text: str, reply_text: str, context_type: str) -> tuple[str, str]:
+ """Get model id based on the reply text, prefix command and context type.
/gpt = OpenAI, /gemini = Gemini, /ds = DeepSeek, /qwen = Qwen, /doubao = Doubao, /grok = Grok
+
+ Returns:
+ tuple[str, str]: (model_id, response_modality)
"""
- force_model = ""
- modality = "text"
- # parse from bot reply
- if reply_text.startswith(f"🤖{GPT.OPENAI_MODEL_NAME}"):
- force_model = GPT.OPENAI_MODEL
+ model_id = ""
+ response_modality = "text"
+
+ # parse from reply bot message. If reply to DeepSeek bot message, use DeepSeek model.
+ if reply_text.startswith(f"🤖{GPT.DEFAULT_MODEL_NAME}"):
+ model_id = GPT.DEFAULT_MODEL
+ elif reply_text.startswith(f"🤖{GPT.OMNI_MODEL_NAME}"):
+ model_id = GPT.OMNI_MODEL
+ elif reply_text.startswith(f"🤖{GPT.OPENAI_MODEL_NAME}"):
+ model_id = GPT.OPENAI_MODEL
elif reply_text.startswith(f"🤖{GPT.DEEPSEEK_MODEL_NAME}"):
- force_model = GPT.DEEPSEEK_MODEL
+ model_id = GPT.DEEPSEEK_MODEL
elif reply_text.startswith(f"🤖{GPT.QWEN_MODEL_NAME}"):
- force_model = GPT.QWEN_MODEL
+ model_id = GPT.QWEN_MODEL
elif reply_text.startswith(f"🤖{GPT.DOUBAO_MODEL_NAME}"):
- force_model = GPT.DOUBAO_MODEL
+ model_id = GPT.DOUBAO_MODEL
elif reply_text.startswith(f"🤖{GPT.GROK_MODEL_NAME}"):
- force_model = GPT.GROK_MODEL
- elif reply_text.startswith(f"🤖{GEMINI.IMG_MODEL_NAME}"):
- force_model = GEMINI.IMG_MODEL
- modality = "image"
- # parse from command prefix
- if startswith_prefix(text, prefix=["/gpt"]):
- force_model = GPT.OPENAI_MODEL
+ model_id = GPT.GROK_MODEL
+ elif reply_text.startswith(f"🤖{GEMINI.TEXT_MODEL_NAME}"):
+ model_id = GEMINI.TEXT_MODEL
+ elif reply_text.startswith(f"🤖{GEMINI.IMG_MODEL_NAME}"): # gemini can generate image
+ model_id = GEMINI.IMG_MODEL
+ response_modality = "image"
+
+ # parse from command prefix. If use /gemini command, force use Gemini model.
+ if startswith_prefix(text, prefix=[PREFIX.GPT]):
+ model_id = GPT.DEFAULT_MODEL
+ elif startswith_prefix(text, prefix=["/gpt"]):
+ model_id = GPT.OPENAI_MODEL
elif startswith_prefix(text, prefix=["/ds"]):
- force_model = GPT.DEEPSEEK_MODEL
+ model_id = GPT.DEEPSEEK_MODEL
elif startswith_prefix(text, prefix=["/qwen"]):
- force_model = GPT.QWEN_MODEL
+ model_id = GPT.QWEN_MODEL
elif startswith_prefix(text, prefix=["/doubao"]):
- force_model = GPT.DOUBAO_MODEL
+ model_id = GPT.DOUBAO_MODEL
elif startswith_prefix(text, prefix=["/grok"]):
- force_model = GPT.GROK_MODEL
+ model_id = GPT.GROK_MODEL
elif startswith_prefix(text, prefix=[PREFIX.GENIMG]):
- force_model = GEMINI.IMG_MODEL
- modality = "image"
+ model_id = GEMINI.IMG_MODEL
+ response_modality = "image" # /gen command is for image response.
elif startswith_prefix(text, prefix=["/gemini"]):
- force_model = GEMINI.TEXT_MODEL
- modality = "text"
- return force_model, modality
-
-
-def get_gpt_config(model_type: str, contexts: list[dict], force_model: str = "") -> dict:
- """Get GPT configurations.
-
- contexts:
- [
- {
- "role": "user",
- "content": [
- {"type": "text", "text": "text"},
- {"type": "image_url", "image_url": {"url": "https://server.com/dir/image.jpg"}},
- ]
- }
- ]
- """
- models = {"text": GPT.TEXT_MODEL, "image": GPT.IMAGE_MODEL, "video": GPT.VIDEO_MODEL}
- model_names = {"text": GPT.TEXT_MODEL_NAME, "image": GPT.IMAGE_MODEL_NAME, "video": GPT.VIDEO_MODEL_NAME}
- apis = {"text": GPT.TEXT_API_KEY, "image": GPT.IMAGE_API_KEY, "video": GPT.VIDEO_API_KEY}
- urls = {"text": GPT.TEXT_BASE_URL, "image": GPT.IMAGE_BASE_URL, "video": GPT.VIDEO_BASE_URL}
-
- model = models[model_type]
- model_name = model_names[model_type]
- force_model = force_model or model
-
- # params for OpenAI client
- client = { # this config is based on model type (text or image)
- "api_key": apis[model_type],
- "base_url": urls[model_type],
- "timeout": round(float(GPT.TIMEOUT)),
- "http_client": DefaultAsyncHttpxClient(proxy=PROXY.GPT),
- }
+ model_id = GEMINI.TEXT_MODEL
+ response_modality = "text" # /gemini command is for text response.
+
+ # fallback to omni model if needed
+ if model_id and context_type == "text": # no need to fallback if context type is text
+ return model_id, response_modality
+ if (
+ (model_id == GPT.OPENAI_MODEL and not GPT.OPENAI_IMAGE_CAPABILITY)
+ or (model_id == GPT.DEEPSEEK_MODEL and not GPT.DEEPSEEK_IMAGE_CAPABILITY)
+ or (model_id == GPT.QWEN_MODEL and not GPT.QWEN_IMAGE_CAPABILITY)
+ or (model_id == GPT.DOUBAO_MODEL and not GPT.DOUBAO_IMAGE_CAPABILITY)
+ or (model_id == GPT.GROK_MODEL and not GPT.GROK_IMAGE_CAPABILITY)
+ ):
+ model_id = GPT.OMNI_MODEL
+ response_modality = "text"
+ return model_id, response_modality
+
- # align with force model
- model_factory = {
+def get_gpt_config(model_id: str = "") -> dict:
+ """Get GPT configurations."""
+ model_factory = {GPT.DEFAULT_MODEL: {"api_key": GPT.DEFAULT_API_KEY, "base_url": GPT.DEFAULT_BASE_URL, "model_name": GPT.DEFAULT_MODEL_NAME}}
+ model_factory |= {
GPT.OPENAI_MODEL: {"api_key": GPT.OPENAI_API_KEY, "base_url": GPT.OPENAI_BASE_URL, "model_name": GPT.OPENAI_MODEL_NAME},
GPT.DEEPSEEK_MODEL: {"api_key": GPT.DEEPSEEK_API_KEY, "base_url": GPT.DEEPSEEK_BASE_URL, "model_name": GPT.DEEPSEEK_MODEL_NAME},
GPT.QWEN_MODEL: {"api_key": GPT.QWEN_API_KEY, "base_url": GPT.QWEN_BASE_URL, "model_name": GPT.QWEN_MODEL_NAME},
GPT.DOUBAO_MODEL: {"api_key": GPT.DOUBAO_API_KEY, "base_url": GPT.DOUBAO_BASE_URL, "model_name": GPT.DOUBAO_MODEL_NAME},
GPT.GROK_MODEL: {"api_key": GPT.GROK_API_KEY, "base_url": GPT.GROK_BASE_URL, "model_name": GPT.GROK_MODEL_NAME},
}
+ model_factory |= {GPT.OMNI_MODEL: {"api_key": GPT.OMNI_API_KEY, "base_url": GPT.OMNI_BASE_URL, "model_name": GPT.OMNI_MODEL_NAME}}
model_factory |= {GPT.SUMMARY_MODEL: {"api_key": GPT.SUMMARY_API_KEY, "base_url": GPT.SUMMARY_BASE_URL, "model_name": GPT.SUMMARY_MODEL_NAME}}
- force_model_config = model_factory.get(force_model, {})
-
- force_model_name = force_model_config.get("model_name", model_name)
- force_model_config.pop("model_name", None)
- # merge force model config
- if model_type == "text" or (
- model_type == "image" # check capabilities
- and (
- (force_model == GPT.OPENAI_MODEL and GPT.OPENAI_IMAGE_CAPABILITY)
- or (force_model == GPT.DEEPSEEK_MODEL and GPT.DEEPSEEK_IMAGE_CAPABILITY)
- or (force_model == GPT.QWEN_MODEL and GPT.QWEN_IMAGE_CAPABILITY)
- or (force_model == GPT.DOUBAO_MODEL and GPT.DOUBAO_IMAGE_CAPABILITY)
- or (force_model == GPT.SUMMARY_MODEL and GPT.SUMMARY_IMAGE_CAPABILITY)
- or (force_model == GPT.GROK_MODEL and GPT.GROK_IMAGE_CAPABILITY)
- )
- ):
- client |= force_model_config
- model = force_model
- model_name = force_model_name
-
- return {
- "friendly_name": model_name,
- "client": client,
- "completions": {
- "model": model,
- "messages": contexts,
- "temperature": float(GPT.TEMPERATURE),
- },
+
+ client = {
+ "timeout": round(float(GPT.TIMEOUT)),
+ "http_client": DefaultAsyncHttpxClient(proxy=PROXY.GPT),
}
+
+ model_id_config = model_factory.get(model_id, {})
+ model_name = model_id_config.get("model_name", "")
+ model_id_config.pop("model_name", None)
+ client |= model_id_config
+
+ return {"friendly_name": model_name, "client": client, "completions": {"model": model_id, "temperature": float(GPT.TEMPERATURE)}}
src/llm/utils.py
@@ -2,9 +2,11 @@
# -*- coding: utf-8 -*-
import re
from pathlib import Path
+from typing import BinaryIO
import tiktoken
from loguru import logger
+from markitdown import MarkItDown
from config import DOWNLOAD_DIR, GPT, PREFIX
from utils import number_to_emoji, remove_consecutive_newlines, remove_dash, remove_pound, zhcn
@@ -113,7 +115,7 @@ def clean_source_marks(text: str) -> str:
return text
clean_text = ""
for line in text.split("\n"):
- if line.strip().startswith(("[username]:", "[filename]:")):
+ if line.strip().startswith(("[username]:", "[filename]:", "[fileowner]:")):
continue
if line.strip() in ["[message]:", "[file content]:"]:
continue
@@ -171,3 +173,37 @@ def clean_prefix(text: str) -> str:
for prefix in [PREFIX.GPT, PREFIX.GENIMG, "/gpt", "/gemini", "/ds", "/qwen", "/grok", "/doubao"]:
text = text.removeprefix(prefix).lstrip()
return text
+
+
+def clean_response(text: str) -> str:
+ """Remove bot prefix and reasoning content."""
+ if not text:
+ return ""
+ text = clean_prefix(text)
+ # remove bot tips
+ text = re.sub(rf"(.*?){BOT_TIPS}\)", "", text, flags=re.DOTALL).strip()
+ # remove reasoning
+ return re.sub(r"^🤔(.*?)💡", "", text, flags=re.DOTALL).strip()
+
+
+def clean_gemini_sourcemarks(contexts: list[dict]) -> None:
+ """Clean Gemini source marks."""
+ for item in contexts:
+ for part in item.get("parts", []):
+ if part.text:
+ part.text = clean_source_marks(part.text)
+
+
+def convert_md(path: str | Path | BinaryIO) -> str:
+ """Convert file to markdown format."""
+ if isinstance(path, (str, Path)):
+ path = Path(path).expanduser().resolve()
+ if not path.is_file():
+ return ""
+ md = MarkItDown()
+ try:
+ result = md.convert(path)
+ except Exception as e:
+ logger.error(f"Failed to convert to markdown: {e}")
+ return ""
+ return result.text_content
src/config.py
@@ -151,15 +151,9 @@ class COOKIE: # See: https://github.com/easychen/CookieCloud
class GPT: # see `llm/README.md`
# See class GEMINI for the GEMINI configurations
STREAM_MODE = os.getenv("GPT_STREAM_MODE", "1").lower() in ["1", "y", "yes", "t", "true", "on"]
- TEXT_MODEL = os.getenv("GPT_TEXT_MODEL", "gpt-4o")
- IMAGE_MODEL = os.getenv("GPT_IMAGE_MODEL", "gpt-4o")
- VIDEO_MODEL = os.getenv("GPT_VIDEO_MODEL", "glm-4v-plus")
TOOLS_MODEL = os.getenv("GPT_TOOLS_MODEL", "gpt-4o-mini") # this model should be fast and cheap
# comma separated fallback models for OpenRouter (e.g. openai/gpt-4o,anthropic/claude-3.5-sonnet)
FALLBACK_MODELS = os.getenv("GPT_FALLBACK_MODELS", "")
- TEXT_MODEL_NAME = os.getenv("GPT_TEXT_MODEL_NAME", "GPT-4o") # custom name
- IMAGE_MODEL_NAME = os.getenv("GPT_IMAGE_MODEL_NAME", "GPT-4o")
- VIDEO_MODEL_NAME = os.getenv("GPT_VIDEO_MODEL_NAME", "GLM-4V-Plus")
GLM_API_KEY = os.getenv("GPT_GLM_API_KEY", "")
GLM_BASE_URL = os.getenv("GPT_GLM_BASE_URL", "https://open.bigmodel.cn/api/paas/v4")
SEARCH_NUM_RESULTS = os.getenv("GPT_SEARCH_NUM_RESULTS", "10")
@@ -167,52 +161,61 @@ class GPT: # see `llm/README.md`
TIMEOUT = os.getenv("GPT_TIMEOUT", "300")
TEMPERATURE = os.getenv("GPT_TEMPERATURE", "1.0")
HISTORY_CONTEXT = os.getenv("GPT_HISTORY_CONTEXT", "20") # 最多携带多少条历史消息
- MEDIA_FORMAT = os.getenv("GPT_MEDIA_FORMAT", "base64") # base64 or http
- TEXT_API_KEY = os.getenv("GPT_TEXT_API_KEY", "")
- TEXT_BASE_URL = os.getenv("GPT_TEXT_BASE_URL", "https://api.openai.com/v1")
- IMAGE_API_KEY = os.getenv("GPT_IMAGE_API_KEY", "")
- IMAGE_BASE_URL = os.getenv("GPT_IMAGE_BASE_URL", "https://api.openai.com/v1")
- VIDEO_API_KEY = os.getenv("GPT_VIDEO_API_KEY", "")
- VIDEO_BASE_URL = os.getenv("GPT_VIDEO_BASE_URL", "https://open.bigmodel.cn/api/paas/v4")
TOOLS_API_KEY = os.getenv("GPT_TOOLS_API_KEY", "")
TOOLS_BASE_URL = os.getenv("GPT_TOOLS_BASE_URL", "https://api.openai.com/v1")
- TOKEN_ENCODING = os.getenv("GPT_TOKEN_ENCODING", "o200k_base") # https://github.com/openai/tiktoken
+ TOKEN_ENCODING = os.getenv("GPT_TOKEN_ENCODING", "o200k_base") # https://github.com/openai/tiktoken/blob/main/tiktoken/model.py
MAX_RETRY = int(os.getenv("GPT_MAX_RETRY", "2"))
HELICONE_API_KEY = os.getenv("HELICONE_API_KEY", "")
+ # default command (/ai).
+ # set a string contains "gemini" to switch to gemini (see class GEMINI below for details)
+ DEFAULT_MODEL = os.getenv("GPT_DEFAULT_MODEL", "") # model id,
+ DEFAULT_MODEL_NAME = os.getenv("GPT_DEFAULT_MODEL_NAME", "") # custom model name
+ DEFAULT_API_KEY = os.getenv("GPT_DEFAULT_API_KEY", "")
+ DEFAULT_BASE_URL = os.getenv("GPT_DEFAULT_BASE_URL", "https://api.openai.com/v1")
+ DEFAULT_IMAGE_CAPABILITY = os.getenv("GPT_DEFAULT_IMAGE_CAPABILITY", "1").lower() in ["1", "y", "yes", "t", "true", "on"]
+
+ # omni model (this should be a full modality model, like gpt-4o.)
+ # Used when the contexts contain multi-modelity data (text, image), but other model can not handle it.
+ # For example, /ds command can only handle text, but the contexts contain image.
+ OMNI_MODEL = os.getenv("GPT_OMNI_MODEL", "gpt-4o")
+ OMNI_MODEL_NAME = os.getenv("GPT_OMNI_MODEL_NAME", "GPT-4o")
+ OMNI_API_KEY = os.getenv("GPT_OMNI_API_KEY", "")
+ OMNI_BASE_URL = os.getenv("GPT_OMNI_BASE_URL", "https://api.openai.com/v1")
+
# /gpt command
- OPENAI_MODEL = os.getenv("GPT_OPENAI_MODEL", "gpt-4o")
- OPENAI_MODEL_NAME = os.getenv("GPT_OPENAI_MODEL_NAME", "GPT-4o")
+ OPENAI_MODEL = os.getenv("GPT_OPENAI_MODEL", "")
+ OPENAI_MODEL_NAME = os.getenv("GPT_OPENAI_MODEL_NAME", "")
OPENAI_API_KEY = os.getenv("GPT_OPENAI_API_KEY", "")
OPENAI_BASE_URL = os.getenv("GPT_OPENAI_BASE_URL", "https://api.openai.com/v1")
OPENAI_IMAGE_CAPABILITY = os.getenv("GPT_OPENAI_IMAGE_CAPABILITY", "1").lower() in ["1", "y", "yes", "t", "true", "on"]
# /ds command
- DEEPSEEK_MODEL = os.getenv("GPT_DEEPSEEK_MODEL", "deepseek-r1")
- DEEPSEEK_MODEL_NAME = os.getenv("GPT_DEEPSEEK_MODEL_NAME", "DeepSeek-R1")
+ DEEPSEEK_MODEL = os.getenv("GPT_DEEPSEEK_MODEL", "")
+ DEEPSEEK_MODEL_NAME = os.getenv("GPT_DEEPSEEK_MODEL_NAME", "")
DEEPSEEK_API_KEY = os.getenv("GPT_DEEPSEEK_API_KEY", "")
DEEPSEEK_BASE_URL = os.getenv("GPT_DEEPSEEK_BASE_URL", "https://api.deepseek.com/v1")
DEEPSEEK_IMAGE_CAPABILITY = os.getenv("GPT_DEEPSEEK_IMAGE_CAPABILITY", "1").lower() in ["1", "y", "yes", "t", "true", "on"]
# /qwen command
- QWEN_MODEL = os.getenv("GPT_QWEN_MODEL", "qwen-vl-max")
- QWEN_MODEL_NAME = os.getenv("GPT_QWEN_MODEL_NAME", "Qwen-VL-Max")
+ QWEN_MODEL = os.getenv("GPT_QWEN_MODEL", "")
+ QWEN_MODEL_NAME = os.getenv("GPT_QWEN_MODEL_NAME", "")
QWEN_API_KEY = os.getenv("GPT_QWEN_API_KEY", "")
QWEN_BASE_URL = os.getenv("GPT_QWEN_BASE_URL", "https://dashscope.aliyuncs.com/compatible-mode/v1")
QWEN_IMAGE_CAPABILITY = os.getenv("GPT_QWEN_IMAGE_CAPABILITY", "1").lower() in ["1", "y", "yes", "t", "true", "on"]
# /grok command
- GROK_MODEL = os.getenv("GPT_GROK_MODEL", "grok-3")
- GROK_MODEL_NAME = os.getenv("GPT_GROK_MODEL_NAME", "Grok-3")
+ GROK_MODEL = os.getenv("GPT_GROK_MODEL", "")
+ GROK_MODEL_NAME = os.getenv("GPT_GROK_MODEL_NAME", "")
GROK_API_KEY = os.getenv("GPT_GROK_API_KEY", "")
GROK_BASE_URL = os.getenv("GPT_GROK_BASE_URL", "https://api.x.ai/v1")
GROK_IMAGE_CAPABILITY = os.getenv("GPT_GROK_IMAGE_CAPABILITY", "1").lower() in ["1", "y", "yes", "t", "true", "on"]
# /doubao command
- DOUBAO_MODEL = os.getenv("GPT_DOUBAO_MODEL", "doubao-1-5-vision-pro-32k-250115")
- DOUBAO_MODEL_NAME = os.getenv("GPT_DOUBAO_MODEL_NAME", "豆包-1.5-Pro")
+ DOUBAO_MODEL = os.getenv("GPT_DOUBAO_MODEL", "")
+ DOUBAO_MODEL_NAME = os.getenv("GPT_DOUBAO_MODEL_NAME", "")
DOUBAO_API_KEY = os.getenv("GPT_DOUBAO_API_KEY", "")
DOUBAO_BASE_URL = os.getenv("GPT_DOUBAO_BASE_URL", "https://ark.cn-beijing.volces.com/api/v3")
DOUBAO_IMAGE_CAPABILITY = os.getenv("GPT_DOUBAO_IMAGE_CAPABILITY", "1").lower() in ["1", "y", "yes", "t", "true", "on"]
# /summary command
- SUMMARY_MODEL = os.getenv("GPT_SUMMARY_MODEL", "gpt-4o")
- SUMMARY_MODEL_NAME = os.getenv("GPT_SUMMARY_MODEL_NAME", "GPT-4o")
+ SUMMARY_MODEL = os.getenv("GPT_SUMMARY_MODEL", "")
+ SUMMARY_MODEL_NAME = os.getenv("GPT_SUMMARY_MODEL_NAME", "")
SUMMARY_MODEL_MAX_OUTPUT_LENGTH = os.getenv("GPT_SUMMARY_MODEL_MAX_OUTPUT_LENGTH", "8192") # 8K
SUMMARY_API_KEY = os.getenv("GPT_SUMMARY_API_KEY", "")
SUMMARY_BASE_URL = os.getenv("GPT_SUMMARY_BASE_URL", "https://api.openai.com/v1")
pyproject.toml
@@ -27,6 +27,7 @@ dependencies = [
"youtube-transcript-api>=0.6.3",
"yt-dlp>=2025.1.12rc",
"zhconv>=1.4.3",
+ "markitdown[docx,pdf,pptx,xls,xlsx]>=0.1.1",
]
name = "bennybot"
requires-python = ">=3.11"
uv.lock
@@ -1,6 +1,11 @@
version = 1
revision = 1
requires-python = ">=3.11"
+resolution-markers = [
+ "python_full_version >= '3.13'",
+ "python_full_version == '3.12.*'",
+ "python_full_version < '3.12'",
+]
[[package]]
name = "aioboto3"
@@ -223,6 +228,7 @@ dependencies = [
{ name = "httpx", extra = ["http2", "socks"] },
{ name = "httpx-curl-cffi" },
{ name = "loguru" },
+ { name = "markitdown", extra = ["docx", "pdf", "pptx", "xls", "xlsx"] },
{ name = "openai" },
{ name = "pillow" },
{ name = "pillow-heif" },
@@ -259,6 +265,7 @@ requires-dist = [
{ name = "httpx", extras = ["http2", "socks"], specifier = ">=0.28.1" },
{ name = "httpx-curl-cffi", specifier = ">=0.1.3" },
{ name = "loguru", specifier = ">=0.7.2" },
+ { name = "markitdown", extras = ["docx", "pdf", "pptx", "xls", "xlsx"], specifier = ">=0.1.1" },
{ name = "openai", specifier = ">=1.60.1" },
{ name = "pillow", specifier = ">=10.4.0" },
{ name = "pillow-heif", specifier = ">=0.18.0" },
@@ -438,6 +445,27 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/0e/f6/65ecc6878a89bb1c23a086ea335ad4bf21a588990c3f535a227b9eea9108/charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85", size = 49767 },
]
+[[package]]
+name = "click"
+version = "8.1.8"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188 },
+]
+
+[[package]]
+name = "cobble"
+version = "0.1.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/54/7a/a507c709be2c96e1bb6102eb7b7f4026c5e5e223ef7d745a17d239e9d844/cobble-0.1.4.tar.gz", hash = "sha256:de38be1539992c8a06e569630717c485a5f91be2192c461ea2b220607dfa78aa", size = 3805 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d5/e1/3714a2f371985215c219c2a70953d38e3eed81ef165aed061d21de0e998b/cobble-0.1.4-py3-none-any.whl", hash = "sha256:36c91b1655e599fd428e2b95fdd5f0da1ca2e9f1abb0bc871dec21a0e78a2b44", size = 3984 },
+]
+
[[package]]
name = "colorama"
version = "0.4.6"
@@ -447,6 +475,57 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 },
]
+[[package]]
+name = "coloredlogs"
+version = "15.0.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "humanfriendly" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/cc/c7/eed8f27100517e8c0e6b923d5f0845d0cb99763da6fdee00478f91db7325/coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0", size = 278520 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a7/06/3d6badcf13db419e25b07041d9c7b4a2c331d3f4e7134445ec5df57714cd/coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934", size = 46018 },
+]
+
+[[package]]
+name = "cryptography"
+version = "44.0.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/cd/25/4ce80c78963834b8a9fd1cc1266be5ed8d1840785c0f2e1b73b8d128d505/cryptography-44.0.2.tar.gz", hash = "sha256:c63454aa261a0cf0c5b4718349629793e9e634993538db841165b3df74f37ec0", size = 710807 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/92/ef/83e632cfa801b221570c5f58c0369db6fa6cef7d9ff859feab1aae1a8a0f/cryptography-44.0.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:efcfe97d1b3c79e486554efddeb8f6f53a4cdd4cf6086642784fa31fc384e1d7", size = 6676361 },
+ { url = "https://files.pythonhosted.org/packages/30/ec/7ea7c1e4c8fc8329506b46c6c4a52e2f20318425d48e0fe597977c71dbce/cryptography-44.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29ecec49f3ba3f3849362854b7253a9f59799e3763b0c9d0826259a88efa02f1", size = 3952350 },
+ { url = "https://files.pythonhosted.org/packages/27/61/72e3afdb3c5ac510330feba4fc1faa0fe62e070592d6ad00c40bb69165e5/cryptography-44.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc821e161ae88bfe8088d11bb39caf2916562e0a2dc7b6d56714a48b784ef0bb", size = 4166572 },
+ { url = "https://files.pythonhosted.org/packages/26/e4/ba680f0b35ed4a07d87f9e98f3ebccb05091f3bf6b5a478b943253b3bbd5/cryptography-44.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3c00b6b757b32ce0f62c574b78b939afab9eecaf597c4d624caca4f9e71e7843", size = 3958124 },
+ { url = "https://files.pythonhosted.org/packages/9c/e8/44ae3e68c8b6d1cbc59040288056df2ad7f7f03bbcaca6b503c737ab8e73/cryptography-44.0.2-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7bdcd82189759aba3816d1f729ce42ffded1ac304c151d0a8e89b9996ab863d5", size = 3678122 },
+ { url = "https://files.pythonhosted.org/packages/27/7b/664ea5e0d1eab511a10e480baf1c5d3e681c7d91718f60e149cec09edf01/cryptography-44.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:4973da6ca3db4405c54cd0b26d328be54c7747e89e284fcff166132eb7bccc9c", size = 4191831 },
+ { url = "https://files.pythonhosted.org/packages/2a/07/79554a9c40eb11345e1861f46f845fa71c9e25bf66d132e123d9feb8e7f9/cryptography-44.0.2-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4e389622b6927d8133f314949a9812972711a111d577a5d1f4bee5e58736b80a", size = 3960583 },
+ { url = "https://files.pythonhosted.org/packages/bb/6d/858e356a49a4f0b591bd6789d821427de18432212e137290b6d8a817e9bf/cryptography-44.0.2-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f514ef4cd14bb6fb484b4a60203e912cfcb64f2ab139e88c2274511514bf7308", size = 4191753 },
+ { url = "https://files.pythonhosted.org/packages/b2/80/62df41ba4916067fa6b125aa8c14d7e9181773f0d5d0bd4dcef580d8b7c6/cryptography-44.0.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1bc312dfb7a6e5d66082c87c34c8a62176e684b6fe3d90fcfe1568de675e6688", size = 4079550 },
+ { url = "https://files.pythonhosted.org/packages/f3/cd/2558cc08f7b1bb40683f99ff4327f8dcfc7de3affc669e9065e14824511b/cryptography-44.0.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b721b8b4d948b218c88cb8c45a01793483821e709afe5f622861fc6182b20a7", size = 4298367 },
+ { url = "https://files.pythonhosted.org/packages/71/59/94ccc74788945bc3bd4cf355d19867e8057ff5fdbcac781b1ff95b700fb1/cryptography-44.0.2-cp37-abi3-win32.whl", hash = "sha256:51e4de3af4ec3899d6d178a8c005226491c27c4ba84101bfb59c901e10ca9f79", size = 2772843 },
+ { url = "https://files.pythonhosted.org/packages/ca/2c/0d0bbaf61ba05acb32f0841853cfa33ebb7a9ab3d9ed8bb004bd39f2da6a/cryptography-44.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:c505d61b6176aaf982c5717ce04e87da5abc9a36a5b39ac03905c4aafe8de7aa", size = 3209057 },
+ { url = "https://files.pythonhosted.org/packages/9e/be/7a26142e6d0f7683d8a382dd963745e65db895a79a280a30525ec92be890/cryptography-44.0.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8e0ddd63e6bf1161800592c71ac794d3fb8001f2caebe0966e77c5234fa9efc3", size = 6677789 },
+ { url = "https://files.pythonhosted.org/packages/06/88/638865be7198a84a7713950b1db7343391c6066a20e614f8fa286eb178ed/cryptography-44.0.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81276f0ea79a208d961c433a947029e1a15948966658cf6710bbabb60fcc2639", size = 3951919 },
+ { url = "https://files.pythonhosted.org/packages/d7/fc/99fe639bcdf58561dfad1faa8a7369d1dc13f20acd78371bb97a01613585/cryptography-44.0.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a1e657c0f4ea2a23304ee3f964db058c9e9e635cc7019c4aa21c330755ef6fd", size = 4167812 },
+ { url = "https://files.pythonhosted.org/packages/53/7b/aafe60210ec93d5d7f552592a28192e51d3c6b6be449e7fd0a91399b5d07/cryptography-44.0.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6210c05941994290f3f7f175a4a57dbbb2afd9273657614c506d5976db061181", size = 3958571 },
+ { url = "https://files.pythonhosted.org/packages/16/32/051f7ce79ad5a6ef5e26a92b37f172ee2d6e1cce09931646eef8de1e9827/cryptography-44.0.2-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1c3572526997b36f245a96a2b1713bf79ce99b271bbcf084beb6b9b075f29ea", size = 3679832 },
+ { url = "https://files.pythonhosted.org/packages/78/2b/999b2a1e1ba2206f2d3bca267d68f350beb2b048a41ea827e08ce7260098/cryptography-44.0.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b042d2a275c8cee83a4b7ae30c45a15e6a4baa65a179a0ec2d78ebb90e4f6699", size = 4193719 },
+ { url = "https://files.pythonhosted.org/packages/72/97/430e56e39a1356e8e8f10f723211a0e256e11895ef1a135f30d7d40f2540/cryptography-44.0.2-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d03806036b4f89e3b13b6218fefea8d5312e450935b1a2d55f0524e2ed7c59d9", size = 3960852 },
+ { url = "https://files.pythonhosted.org/packages/89/33/c1cf182c152e1d262cac56850939530c05ca6c8d149aa0dcee490b417e99/cryptography-44.0.2-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:c7362add18b416b69d58c910caa217f980c5ef39b23a38a0880dfd87bdf8cd23", size = 4193906 },
+ { url = "https://files.pythonhosted.org/packages/e1/99/87cf26d4f125380dc674233971069bc28d19b07f7755b29861570e513650/cryptography-44.0.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:8cadc6e3b5a1f144a039ea08a0bdb03a2a92e19c46be3285123d32029f40a922", size = 4081572 },
+ { url = "https://files.pythonhosted.org/packages/b3/9f/6a3e0391957cc0c5f84aef9fbdd763035f2b52e998a53f99345e3ac69312/cryptography-44.0.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6f101b1f780f7fc613d040ca4bdf835c6ef3b00e9bd7125a4255ec574c7916e4", size = 4298631 },
+ { url = "https://files.pythonhosted.org/packages/e2/a5/5bc097adb4b6d22a24dea53c51f37e480aaec3465285c253098642696423/cryptography-44.0.2-cp39-abi3-win32.whl", hash = "sha256:3dc62975e31617badc19a906481deacdeb80b4bb454394b4098e3f2525a488c5", size = 2773792 },
+ { url = "https://files.pythonhosted.org/packages/33/cf/1f7649b8b9a3543e042d3f348e398a061923ac05b507f3f4d95f11938aa9/cryptography-44.0.2-cp39-abi3-win_amd64.whl", hash = "sha256:5f6f90b72d8ccadb9c6e311c775c8305381db88374c65fa1a68250aa8a9cb3a6", size = 3210957 },
+ { url = "https://files.pythonhosted.org/packages/d6/d7/f30e75a6aa7d0f65031886fa4a1485c2fbfe25a1896953920f6a9cfe2d3b/cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:909c97ab43a9c0c0b0ada7a1281430e4e5ec0458e6d9244c0e821bbf152f061d", size = 3887513 },
+ { url = "https://files.pythonhosted.org/packages/9c/b4/7a494ce1032323ca9db9a3661894c66e0d7142ad2079a4249303402d8c71/cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:96e7a5e9d6e71f9f4fca8eebfd603f8e86c5225bb18eb621b2c1e50b290a9471", size = 4107432 },
+ { url = "https://files.pythonhosted.org/packages/45/f8/6b3ec0bc56123b344a8d2b3264a325646d2dcdbdd9848b5e6f3d37db90b3/cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d1b3031093a366ac767b3feb8bcddb596671b3aaff82d4050f984da0c248b615", size = 3891421 },
+ { url = "https://files.pythonhosted.org/packages/57/ff/f3b4b2d007c2a646b0f69440ab06224f9cf37a977a72cdb7b50632174e8a/cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:04abd71114848aa25edb28e225ab5f268096f44cf0127f3d36975bdf1bdf3390", size = 4107081 },
+]
+
[[package]]
name = "curl-cffi"
version = "0.10.0"
@@ -495,6 +574,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277 },
]
+[[package]]
+name = "et-xmlfile"
+version = "2.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d3/38/af70d7ab1ae9d4da450eeec1fa3918940a5fafb9055e934af8d6eb0c2313/et_xmlfile-2.0.0.tar.gz", hash = "sha256:dab3f4764309081ce75662649be815c4c9081e88f0837825f90fd28317d4da54", size = 17234 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c1/8b/5fe2cc11fee489817272089c4203e679c63b570a5aaeb18d852ae3cbba6a/et_xmlfile-2.0.0-py3-none-any.whl", hash = "sha256:7a91720bc756843502c3b7504c77b8fe44217c85c537d85037f0f536151b2caa", size = 18059 },
+]
+
[[package]]
name = "executing"
version = "2.2.0"
@@ -528,6 +616,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/7c/d4/8c31aad9cc18f451c49f7f9cfb5799dadffc88177f7917bc90a66459b1d7/feedparser-6.0.11-py3-none-any.whl", hash = "sha256:0be7ee7b395572b19ebeb1d6aafb0028dee11169f1c934e0ed67d54992f4ad45", size = 81343 },
]
+[[package]]
+name = "flatbuffers"
+version = "25.2.10"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/e4/30/eb5dce7994fc71a2f685d98ec33cc660c0a5887db5610137e60d8cbc4489/flatbuffers-25.2.10.tar.gz", hash = "sha256:97e451377a41262f8d9bd4295cc836133415cc03d8cb966410a4af92eb00d26e", size = 22170 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b8/25/155f9f080d5e4bc0082edfda032ea2bc2b8fab3f4d25d46c1e9dd22a1a89/flatbuffers-25.2.10-py2.py3-none-any.whl", hash = "sha256:ebba5f4d5ea615af3f7fd70fc310636fbb2bbd1f566ac0a23d98dd412de50051", size = 30953 },
+]
+
[[package]]
name = "frozenlist"
version = "1.6.0"
@@ -732,6 +829,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/ea/7e/34ee9a6e1ab95d042174bf106b3446788f8ff692b7c55fe5233e32ef7a6e/httpx_curl_cffi-0.1.3-py3-none-any.whl", hash = "sha256:51b7c0789f67493ee510342dbc539aa179007b1dd6c432b19404fd784da01c01", size = 8654 },
]
+[[package]]
+name = "humanfriendly"
+version = "10.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pyreadline3", marker = "sys_platform == 'win32'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/cc/3f/2c29224acb2e2df4d2046e4c73ee2662023c58ff5b113c4c1adac0886c43/humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc", size = 360702 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f0/0f/310fb31e39e2d734ccaa2c0fb981ee41f7bd5056ce9bc29b2248bd569169/humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477", size = 86794 },
+]
+
[[package]]
name = "hyperframe"
version = "6.1.0"
@@ -878,6 +987,143 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/0c/29/0348de65b8cc732daa3e33e67806420b2ae89bdce2b04af740289c5c6c8c/loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c", size = 61595 },
]
+[[package]]
+name = "lxml"
+version = "5.4.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/76/3d/14e82fc7c8fb1b7761f7e748fd47e2ec8276d137b6acfe5a4bb73853e08f/lxml-5.4.0.tar.gz", hash = "sha256:d12832e1dbea4be280b22fd0ea7c9b87f0d8fc51ba06e92dc62d52f804f78ebd", size = 3679479 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/81/2d/67693cc8a605a12e5975380d7ff83020dcc759351b5a066e1cced04f797b/lxml-5.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:98a3912194c079ef37e716ed228ae0dcb960992100461b704aea4e93af6b0bb9", size = 8083240 },
+ { url = "https://files.pythonhosted.org/packages/73/53/b5a05ab300a808b72e848efd152fe9c022c0181b0a70b8bca1199f1bed26/lxml-5.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0ea0252b51d296a75f6118ed0d8696888e7403408ad42345d7dfd0d1e93309a7", size = 4387685 },
+ { url = "https://files.pythonhosted.org/packages/d8/cb/1a3879c5f512bdcd32995c301886fe082b2edd83c87d41b6d42d89b4ea4d/lxml-5.4.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b92b69441d1bd39f4940f9eadfa417a25862242ca2c396b406f9272ef09cdcaa", size = 4991164 },
+ { url = "https://files.pythonhosted.org/packages/f9/94/bbc66e42559f9d04857071e3b3d0c9abd88579367fd2588a4042f641f57e/lxml-5.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20e16c08254b9b6466526bc1828d9370ee6c0d60a4b64836bc3ac2917d1e16df", size = 4746206 },
+ { url = "https://files.pythonhosted.org/packages/66/95/34b0679bee435da2d7cae895731700e519a8dfcab499c21662ebe671603e/lxml-5.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7605c1c32c3d6e8c990dd28a0970a3cbbf1429d5b92279e37fda05fb0c92190e", size = 5342144 },
+ { url = "https://files.pythonhosted.org/packages/e0/5d/abfcc6ab2fa0be72b2ba938abdae1f7cad4c632f8d552683ea295d55adfb/lxml-5.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ecf4c4b83f1ab3d5a7ace10bafcb6f11df6156857a3c418244cef41ca9fa3e44", size = 4825124 },
+ { url = "https://files.pythonhosted.org/packages/5a/78/6bd33186c8863b36e084f294fc0a5e5eefe77af95f0663ef33809cc1c8aa/lxml-5.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cef4feae82709eed352cd7e97ae062ef6ae9c7b5dbe3663f104cd2c0e8d94ba", size = 4876520 },
+ { url = "https://files.pythonhosted.org/packages/3b/74/4d7ad4839bd0fc64e3d12da74fc9a193febb0fae0ba6ebd5149d4c23176a/lxml-5.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:df53330a3bff250f10472ce96a9af28628ff1f4efc51ccba351a8820bca2a8ba", size = 4765016 },
+ { url = "https://files.pythonhosted.org/packages/24/0d/0a98ed1f2471911dadfc541003ac6dd6879fc87b15e1143743ca20f3e973/lxml-5.4.0-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:aefe1a7cb852fa61150fcb21a8c8fcea7b58c4cb11fbe59c97a0a4b31cae3c8c", size = 5362884 },
+ { url = "https://files.pythonhosted.org/packages/48/de/d4f7e4c39740a6610f0f6959052b547478107967362e8424e1163ec37ae8/lxml-5.4.0-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:ef5a7178fcc73b7d8c07229e89f8eb45b2908a9238eb90dcfc46571ccf0383b8", size = 4902690 },
+ { url = "https://files.pythonhosted.org/packages/07/8c/61763abd242af84f355ca4ef1ee096d3c1b7514819564cce70fd18c22e9a/lxml-5.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d2ed1b3cb9ff1c10e6e8b00941bb2e5bb568b307bfc6b17dffbbe8be5eecba86", size = 4944418 },
+ { url = "https://files.pythonhosted.org/packages/f9/c5/6d7e3b63e7e282619193961a570c0a4c8a57fe820f07ca3fe2f6bd86608a/lxml-5.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:72ac9762a9f8ce74c9eed4a4e74306f2f18613a6b71fa065495a67ac227b3056", size = 4827092 },
+ { url = "https://files.pythonhosted.org/packages/71/4a/e60a306df54680b103348545706a98a7514a42c8b4fbfdcaa608567bb065/lxml-5.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f5cb182f6396706dc6cc1896dd02b1c889d644c081b0cdec38747573db88a7d7", size = 5418231 },
+ { url = "https://files.pythonhosted.org/packages/27/f2/9754aacd6016c930875854f08ac4b192a47fe19565f776a64004aa167521/lxml-5.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:3a3178b4873df8ef9457a4875703488eb1622632a9cee6d76464b60e90adbfcd", size = 5261798 },
+ { url = "https://files.pythonhosted.org/packages/38/a2/0c49ec6941428b1bd4f280650d7b11a0f91ace9db7de32eb7aa23bcb39ff/lxml-5.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e094ec83694b59d263802ed03a8384594fcce477ce484b0cbcd0008a211ca751", size = 4988195 },
+ { url = "https://files.pythonhosted.org/packages/7a/75/87a3963a08eafc46a86c1131c6e28a4de103ba30b5ae903114177352a3d7/lxml-5.4.0-cp311-cp311-win32.whl", hash = "sha256:4329422de653cdb2b72afa39b0aa04252fca9071550044904b2e7036d9d97fe4", size = 3474243 },
+ { url = "https://files.pythonhosted.org/packages/fa/f9/1f0964c4f6c2be861c50db380c554fb8befbea98c6404744ce243a3c87ef/lxml-5.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd3be6481ef54b8cfd0e1e953323b7aa9d9789b94842d0e5b142ef4bb7999539", size = 3815197 },
+ { url = "https://files.pythonhosted.org/packages/f8/4c/d101ace719ca6a4ec043eb516fcfcb1b396a9fccc4fcd9ef593df34ba0d5/lxml-5.4.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b5aff6f3e818e6bdbbb38e5967520f174b18f539c2b9de867b1e7fde6f8d95a4", size = 8127392 },
+ { url = "https://files.pythonhosted.org/packages/11/84/beddae0cec4dd9ddf46abf156f0af451c13019a0fa25d7445b655ba5ccb7/lxml-5.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:942a5d73f739ad7c452bf739a62a0f83e2578afd6b8e5406308731f4ce78b16d", size = 4415103 },
+ { url = "https://files.pythonhosted.org/packages/d0/25/d0d93a4e763f0462cccd2b8a665bf1e4343dd788c76dcfefa289d46a38a9/lxml-5.4.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:460508a4b07364d6abf53acaa0a90b6d370fafde5693ef37602566613a9b0779", size = 5024224 },
+ { url = "https://files.pythonhosted.org/packages/31/ce/1df18fb8f7946e7f3388af378b1f34fcf253b94b9feedb2cec5969da8012/lxml-5.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:529024ab3a505fed78fe3cc5ddc079464e709f6c892733e3f5842007cec8ac6e", size = 4769913 },
+ { url = "https://files.pythonhosted.org/packages/4e/62/f4a6c60ae7c40d43657f552f3045df05118636be1165b906d3423790447f/lxml-5.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ca56ebc2c474e8f3d5761debfd9283b8b18c76c4fc0967b74aeafba1f5647f9", size = 5290441 },
+ { url = "https://files.pythonhosted.org/packages/9e/aa/04f00009e1e3a77838c7fc948f161b5d2d5de1136b2b81c712a263829ea4/lxml-5.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a81e1196f0a5b4167a8dafe3a66aa67c4addac1b22dc47947abd5d5c7a3f24b5", size = 4820165 },
+ { url = "https://files.pythonhosted.org/packages/c9/1f/e0b2f61fa2404bf0f1fdf1898377e5bd1b74cc9b2cf2c6ba8509b8f27990/lxml-5.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00b8686694423ddae324cf614e1b9659c2edb754de617703c3d29ff568448df5", size = 4932580 },
+ { url = "https://files.pythonhosted.org/packages/24/a2/8263f351b4ffe0ed3e32ea7b7830f845c795349034f912f490180d88a877/lxml-5.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:c5681160758d3f6ac5b4fea370495c48aac0989d6a0f01bb9a72ad8ef5ab75c4", size = 4759493 },
+ { url = "https://files.pythonhosted.org/packages/05/00/41db052f279995c0e35c79d0f0fc9f8122d5b5e9630139c592a0b58c71b4/lxml-5.4.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:2dc191e60425ad70e75a68c9fd90ab284df64d9cd410ba8d2b641c0c45bc006e", size = 5324679 },
+ { url = "https://files.pythonhosted.org/packages/1d/be/ee99e6314cdef4587617d3b3b745f9356d9b7dd12a9663c5f3b5734b64ba/lxml-5.4.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:67f779374c6b9753ae0a0195a892a1c234ce8416e4448fe1e9f34746482070a7", size = 4890691 },
+ { url = "https://files.pythonhosted.org/packages/ad/36/239820114bf1d71f38f12208b9c58dec033cbcf80101cde006b9bde5cffd/lxml-5.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:79d5bfa9c1b455336f52343130b2067164040604e41f6dc4d8313867ed540079", size = 4955075 },
+ { url = "https://files.pythonhosted.org/packages/d4/e1/1b795cc0b174efc9e13dbd078a9ff79a58728a033142bc6d70a1ee8fc34d/lxml-5.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3d3c30ba1c9b48c68489dc1829a6eede9873f52edca1dda900066542528d6b20", size = 4838680 },
+ { url = "https://files.pythonhosted.org/packages/72/48/3c198455ca108cec5ae3662ae8acd7fd99476812fd712bb17f1b39a0b589/lxml-5.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1af80c6316ae68aded77e91cd9d80648f7dd40406cef73df841aa3c36f6907c8", size = 5391253 },
+ { url = "https://files.pythonhosted.org/packages/d6/10/5bf51858971c51ec96cfc13e800a9951f3fd501686f4c18d7d84fe2d6352/lxml-5.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4d885698f5019abe0de3d352caf9466d5de2baded00a06ef3f1216c1a58ae78f", size = 5261651 },
+ { url = "https://files.pythonhosted.org/packages/2b/11/06710dd809205377da380546f91d2ac94bad9ff735a72b64ec029f706c85/lxml-5.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:aea53d51859b6c64e7c51d522c03cc2c48b9b5d6172126854cc7f01aa11f52bc", size = 5024315 },
+ { url = "https://files.pythonhosted.org/packages/f5/b0/15b6217834b5e3a59ebf7f53125e08e318030e8cc0d7310355e6edac98ef/lxml-5.4.0-cp312-cp312-win32.whl", hash = "sha256:d90b729fd2732df28130c064aac9bb8aff14ba20baa4aee7bd0795ff1187545f", size = 3486149 },
+ { url = "https://files.pythonhosted.org/packages/91/1e/05ddcb57ad2f3069101611bd5f5084157d90861a2ef460bf42f45cced944/lxml-5.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1dc4ca99e89c335a7ed47d38964abcb36c5910790f9bd106f2a8fa2ee0b909d2", size = 3817095 },
+ { url = "https://files.pythonhosted.org/packages/87/cb/2ba1e9dd953415f58548506fa5549a7f373ae55e80c61c9041b7fd09a38a/lxml-5.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:773e27b62920199c6197130632c18fb7ead3257fce1ffb7d286912e56ddb79e0", size = 8110086 },
+ { url = "https://files.pythonhosted.org/packages/b5/3e/6602a4dca3ae344e8609914d6ab22e52ce42e3e1638c10967568c5c1450d/lxml-5.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ce9c671845de9699904b1e9df95acfe8dfc183f2310f163cdaa91a3535af95de", size = 4404613 },
+ { url = "https://files.pythonhosted.org/packages/4c/72/bf00988477d3bb452bef9436e45aeea82bb40cdfb4684b83c967c53909c7/lxml-5.4.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9454b8d8200ec99a224df8854786262b1bd6461f4280064c807303c642c05e76", size = 5012008 },
+ { url = "https://files.pythonhosted.org/packages/92/1f/93e42d93e9e7a44b2d3354c462cd784dbaaf350f7976b5d7c3f85d68d1b1/lxml-5.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cccd007d5c95279e529c146d095f1d39ac05139de26c098166c4beb9374b0f4d", size = 4760915 },
+ { url = "https://files.pythonhosted.org/packages/45/0b/363009390d0b461cf9976a499e83b68f792e4c32ecef092f3f9ef9c4ba54/lxml-5.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0fce1294a0497edb034cb416ad3e77ecc89b313cff7adbee5334e4dc0d11f422", size = 5283890 },
+ { url = "https://files.pythonhosted.org/packages/19/dc/6056c332f9378ab476c88e301e6549a0454dbee8f0ae16847414f0eccb74/lxml-5.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:24974f774f3a78ac12b95e3a20ef0931795ff04dbb16db81a90c37f589819551", size = 4812644 },
+ { url = "https://files.pythonhosted.org/packages/ee/8a/f8c66bbb23ecb9048a46a5ef9b495fd23f7543df642dabeebcb2eeb66592/lxml-5.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:497cab4d8254c2a90bf988f162ace2ddbfdd806fce3bda3f581b9d24c852e03c", size = 4921817 },
+ { url = "https://files.pythonhosted.org/packages/04/57/2e537083c3f381f83d05d9b176f0d838a9e8961f7ed8ddce3f0217179ce3/lxml-5.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:e794f698ae4c5084414efea0f5cc9f4ac562ec02d66e1484ff822ef97c2cadff", size = 4753916 },
+ { url = "https://files.pythonhosted.org/packages/d8/80/ea8c4072109a350848f1157ce83ccd9439601274035cd045ac31f47f3417/lxml-5.4.0-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:2c62891b1ea3094bb12097822b3d44b93fc6c325f2043c4d2736a8ff09e65f60", size = 5289274 },
+ { url = "https://files.pythonhosted.org/packages/b3/47/c4be287c48cdc304483457878a3f22999098b9a95f455e3c4bda7ec7fc72/lxml-5.4.0-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:142accb3e4d1edae4b392bd165a9abdee8a3c432a2cca193df995bc3886249c8", size = 4874757 },
+ { url = "https://files.pythonhosted.org/packages/2f/04/6ef935dc74e729932e39478e44d8cfe6a83550552eaa072b7c05f6f22488/lxml-5.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1a42b3a19346e5601d1b8296ff6ef3d76038058f311902edd574461e9c036982", size = 4947028 },
+ { url = "https://files.pythonhosted.org/packages/cb/f9/c33fc8daa373ef8a7daddb53175289024512b6619bc9de36d77dca3df44b/lxml-5.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4291d3c409a17febf817259cb37bc62cb7eb398bcc95c1356947e2871911ae61", size = 4834487 },
+ { url = "https://files.pythonhosted.org/packages/8d/30/fc92bb595bcb878311e01b418b57d13900f84c2b94f6eca9e5073ea756e6/lxml-5.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4f5322cf38fe0e21c2d73901abf68e6329dc02a4994e483adbcf92b568a09a54", size = 5381688 },
+ { url = "https://files.pythonhosted.org/packages/43/d1/3ba7bd978ce28bba8e3da2c2e9d5ae3f8f521ad3f0ca6ea4788d086ba00d/lxml-5.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:0be91891bdb06ebe65122aa6bf3fc94489960cf7e03033c6f83a90863b23c58b", size = 5242043 },
+ { url = "https://files.pythonhosted.org/packages/ee/cd/95fa2201041a610c4d08ddaf31d43b98ecc4b1d74b1e7245b1abdab443cb/lxml-5.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:15a665ad90054a3d4f397bc40f73948d48e36e4c09f9bcffc7d90c87410e478a", size = 5021569 },
+ { url = "https://files.pythonhosted.org/packages/2d/a6/31da006fead660b9512d08d23d31e93ad3477dd47cc42e3285f143443176/lxml-5.4.0-cp313-cp313-win32.whl", hash = "sha256:d5663bc1b471c79f5c833cffbc9b87d7bf13f87e055a5c86c363ccd2348d7e82", size = 3485270 },
+ { url = "https://files.pythonhosted.org/packages/fc/14/c115516c62a7d2499781d2d3d7215218c0731b2c940753bf9f9b7b73924d/lxml-5.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:bcb7a1096b4b6b24ce1ac24d4942ad98f983cd3810f9711bcd0293f43a9d8b9f", size = 3814606 },
+]
+
+[[package]]
+name = "magika"
+version = "0.6.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "click" },
+ { name = "numpy" },
+ { name = "onnxruntime" },
+ { name = "python-dotenv" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/6d/18/ea70f6abd36f455037340f12c8125918c726d08cd6e01f0b76b6884e0c38/magika-0.6.1.tar.gz", hash = "sha256:e3dd22c73936630b1cd79d0f412d6d9a53dc99ba5e3709b1ac53f56bc998e635", size = 3030234 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/1f/be/c9f7bb9ee94abe8d344b660672001313e459c67b867b24abe32d5c80a9ce/magika-0.6.1-py3-none-any.whl", hash = "sha256:15838d2469f1394d8e9598bc7fceea1ede7f35aebe9675c6b45c6b5c48315931", size = 2968516 },
+ { url = "https://files.pythonhosted.org/packages/3c/b9/016b174520e81faef5edb31b6c7a73966dc84ee33acd23a2e7b775df7ba4/magika-0.6.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:dadd036296a2e4840fd48fa0712848fe122da438e8f607dc8f19ca4663c359dc", size = 12408519 },
+ { url = "https://files.pythonhosted.org/packages/02/b7/e7dfeb235823a82d676c68a748541c24db0249b854f945f6e3cec11c1b7e/magika-0.6.1-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:133c0e1a844361de86ca2dd7c530e38b324e86177d30c52e36fd82101c190b5c", size = 15089294 },
+ { url = "https://files.pythonhosted.org/packages/64/f0/bec5bff0125d08c1bc3baef88beeb910121085249f67b5994ea961615b55/magika-0.6.1-py3-none-win_amd64.whl", hash = "sha256:0342b6230ea9aea7ab4b8fa92e1b46f1cc62e724d452ee8d6821a37f56738d22", size = 12378455 },
+]
+
+[[package]]
+name = "mammoth"
+version = "1.9.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "cobble" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/d6/a6/27a13ba068cf3ff764d631b8dd71dee1b33040aa8c143f66ce902b7d1da0/mammoth-1.9.0.tar.gz", hash = "sha256:74f5dae10ca240fd9b7a0e1a6deaebe0aad23bc590633ef6f5e868aa9b7042a6", size = 50906 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d0/ab/f8e63fcabc127c6efd68b03633c189ee799a5304fa96c036a325a2894bcb/mammoth-1.9.0-py2.py3-none-any.whl", hash = "sha256:0eea277316586f0ca65d86834aec4de5a0572c83ec54b4991f9bb520a891150f", size = 52901 },
+]
+
+[[package]]
+name = "markdownify"
+version = "1.1.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "beautifulsoup4" },
+ { name = "six" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/2f/78/c48fed23c7aebc2c16049062e72de1da3220c274de59d28c942acdc9ffb2/markdownify-1.1.0.tar.gz", hash = "sha256:449c0bbbf1401c5112379619524f33b63490a8fa479456d41de9dc9e37560ebd", size = 17127 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/64/11/b751af7ad41b254a802cf52f7bc1fca7cabe2388132f2ce60a1a6b9b9622/markdownify-1.1.0-py3-none-any.whl", hash = "sha256:32a5a08e9af02c8a6528942224c91b933b4bd2c7d078f9012943776fc313eeef", size = 13901 },
+]
+
+[[package]]
+name = "markitdown"
+version = "0.1.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "beautifulsoup4" },
+ { name = "charset-normalizer" },
+ { name = "magika" },
+ { name = "markdownify" },
+ { name = "requests" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/cb/e8/83669ba97718bbbccd4c432b763d22783df4c8218e770717151acf01e85b/markitdown-0.1.1.tar.gz", hash = "sha256:da97a55a45a3d775ea758e88a344d5cac94ee97115fb0293f99027d32c2fc3f6", size = 31475 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0b/8a/c1f85ee609de5d45f80d0213bebf6664f76ab406e9d57709e684a4a436ba/markitdown-0.1.1-py3-none-any.whl", hash = "sha256:98ea8c009fe174b37ef933e00f4364214e8fed35691178b8521b13604d0c4a58", size = 48230 },
+]
+
+[package.optional-dependencies]
+docx = [
+ { name = "mammoth" },
+]
+pdf = [
+ { name = "pdfminer-six" },
+]
+pptx = [
+ { name = "python-pptx" },
+]
+xls = [
+ { name = "pandas" },
+ { name = "xlrd" },
+]
+xlsx = [
+ { name = "openpyxl" },
+ { name = "pandas" },
+]
+
[[package]]
name = "matplotlib-inline"
version = "0.1.7"
@@ -890,6 +1136,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/8f/8e/9ad090d3553c280a8060fbf6e24dc1c0c29704ee7d1c372f0c174aa59285/matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca", size = 9899 },
]
+[[package]]
+name = "mpmath"
+version = "1.3.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/e0/47/dd32fa426cc72114383ac549964eecb20ecfd886d1e5ccf5340b55b02f57/mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f", size = 508106 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c", size = 536198 },
+]
+
[[package]]
name = "multidict"
version = "6.4.3"
@@ -967,6 +1222,83 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/96/10/7d526c8974f017f1e7ca584c71ee62a638e9334d8d33f27d7cdfc9ae79e4/multidict-6.4.3-py3-none-any.whl", hash = "sha256:59fe01ee8e2a1e8ceb3f6dbb216b09c8d9f4ef1c22c4fc825d045a147fa2ebc9", size = 10400 },
]
+[[package]]
+name = "numpy"
+version = "2.2.5"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/dc/b2/ce4b867d8cd9c0ee84938ae1e6a6f7926ebf928c9090d036fc3c6a04f946/numpy-2.2.5.tar.gz", hash = "sha256:a9c0d994680cd991b1cb772e8b297340085466a6fe964bc9d4e80f5e2f43c291", size = 20273920 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f5/fb/e4e4c254ba40e8f0c78218f9e86304628c75b6900509b601c8433bdb5da7/numpy-2.2.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c42365005c7a6c42436a54d28c43fe0e01ca11eb2ac3cefe796c25a5f98e5e9b", size = 21256475 },
+ { url = "https://files.pythonhosted.org/packages/81/32/dd1f7084f5c10b2caad778258fdaeedd7fbd8afcd2510672811e6138dfac/numpy-2.2.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:498815b96f67dc347e03b719ef49c772589fb74b8ee9ea2c37feae915ad6ebda", size = 14461474 },
+ { url = "https://files.pythonhosted.org/packages/0e/65/937cdf238ef6ac54ff749c0f66d9ee2b03646034c205cea9b6c51f2f3ad1/numpy-2.2.5-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:6411f744f7f20081b1b4e7112e0f4c9c5b08f94b9f086e6f0adf3645f85d3a4d", size = 5426875 },
+ { url = "https://files.pythonhosted.org/packages/25/17/814515fdd545b07306eaee552b65c765035ea302d17de1b9cb50852d2452/numpy-2.2.5-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:9de6832228f617c9ef45d948ec1cd8949c482238d68b2477e6f642c33a7b0a54", size = 6969176 },
+ { url = "https://files.pythonhosted.org/packages/e5/32/a66db7a5c8b5301ec329ab36d0ecca23f5e18907f43dbd593c8ec326d57c/numpy-2.2.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:369e0d4647c17c9363244f3468f2227d557a74b6781cb62ce57cf3ef5cc7c610", size = 14374850 },
+ { url = "https://files.pythonhosted.org/packages/ad/c9/1bf6ada582eebcbe8978f5feb26584cd2b39f94ededeea034ca8f84af8c8/numpy-2.2.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:262d23f383170f99cd9191a7c85b9a50970fe9069b2f8ab5d786eca8a675d60b", size = 16430306 },
+ { url = "https://files.pythonhosted.org/packages/6a/f0/3f741863f29e128f4fcfdb99253cc971406b402b4584663710ee07f5f7eb/numpy-2.2.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:aa70fdbdc3b169d69e8c59e65c07a1c9351ceb438e627f0fdcd471015cd956be", size = 15884767 },
+ { url = "https://files.pythonhosted.org/packages/98/d9/4ccd8fd6410f7bf2d312cbc98892e0e43c2fcdd1deae293aeb0a93b18071/numpy-2.2.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37e32e985f03c06206582a7323ef926b4e78bdaa6915095ef08070471865b906", size = 18219515 },
+ { url = "https://files.pythonhosted.org/packages/b1/56/783237243d4395c6dd741cf16eeb1a9035ee3d4310900e6b17e875d1b201/numpy-2.2.5-cp311-cp311-win32.whl", hash = "sha256:f5045039100ed58fa817a6227a356240ea1b9a1bc141018864c306c1a16d4175", size = 6607842 },
+ { url = "https://files.pythonhosted.org/packages/98/89/0c93baaf0094bdaaaa0536fe61a27b1dce8a505fa262a865ec142208cfe9/numpy-2.2.5-cp311-cp311-win_amd64.whl", hash = "sha256:b13f04968b46ad705f7c8a80122a42ae8f620536ea38cf4bdd374302926424dd", size = 12949071 },
+ { url = "https://files.pythonhosted.org/packages/e2/f7/1fd4ff108cd9d7ef929b8882692e23665dc9c23feecafbb9c6b80f4ec583/numpy-2.2.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ee461a4eaab4f165b68780a6a1af95fb23a29932be7569b9fab666c407969051", size = 20948633 },
+ { url = "https://files.pythonhosted.org/packages/12/03/d443c278348371b20d830af155ff2079acad6a9e60279fac2b41dbbb73d8/numpy-2.2.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ec31367fd6a255dc8de4772bd1658c3e926d8e860a0b6e922b615e532d320ddc", size = 14176123 },
+ { url = "https://files.pythonhosted.org/packages/2b/0b/5ca264641d0e7b14393313304da48b225d15d471250376f3fbdb1a2be603/numpy-2.2.5-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:47834cde750d3c9f4e52c6ca28a7361859fcaf52695c7dc3cc1a720b8922683e", size = 5163817 },
+ { url = "https://files.pythonhosted.org/packages/04/b3/d522672b9e3d28e26e1613de7675b441bbd1eaca75db95680635dd158c67/numpy-2.2.5-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:2c1a1c6ccce4022383583a6ded7bbcda22fc635eb4eb1e0a053336425ed36dfa", size = 6698066 },
+ { url = "https://files.pythonhosted.org/packages/a0/93/0f7a75c1ff02d4b76df35079676b3b2719fcdfb39abdf44c8b33f43ef37d/numpy-2.2.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d75f338f5f79ee23548b03d801d28a505198297534f62416391857ea0479571", size = 14087277 },
+ { url = "https://files.pythonhosted.org/packages/b0/d9/7c338b923c53d431bc837b5b787052fef9ae68a56fe91e325aac0d48226e/numpy-2.2.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a801fef99668f309b88640e28d261991bfad9617c27beda4a3aec4f217ea073", size = 16135742 },
+ { url = "https://files.pythonhosted.org/packages/2d/10/4dec9184a5d74ba9867c6f7d1e9f2e0fb5fe96ff2bf50bb6f342d64f2003/numpy-2.2.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:abe38cd8381245a7f49967a6010e77dbf3680bd3627c0fe4362dd693b404c7f8", size = 15581825 },
+ { url = "https://files.pythonhosted.org/packages/80/1f/2b6fcd636e848053f5b57712a7d1880b1565eec35a637fdfd0a30d5e738d/numpy-2.2.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5a0ac90e46fdb5649ab6369d1ab6104bfe5854ab19b645bf5cda0127a13034ae", size = 17899600 },
+ { url = "https://files.pythonhosted.org/packages/ec/87/36801f4dc2623d76a0a3835975524a84bd2b18fe0f8835d45c8eae2f9ff2/numpy-2.2.5-cp312-cp312-win32.whl", hash = "sha256:0cd48122a6b7eab8f06404805b1bd5856200e3ed6f8a1b9a194f9d9054631beb", size = 6312626 },
+ { url = "https://files.pythonhosted.org/packages/8b/09/4ffb4d6cfe7ca6707336187951992bd8a8b9142cf345d87ab858d2d7636a/numpy-2.2.5-cp312-cp312-win_amd64.whl", hash = "sha256:ced69262a8278547e63409b2653b372bf4baff0870c57efa76c5703fd6543282", size = 12645715 },
+ { url = "https://files.pythonhosted.org/packages/e2/a0/0aa7f0f4509a2e07bd7a509042967c2fab635690d4f48c6c7b3afd4f448c/numpy-2.2.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:059b51b658f4414fff78c6d7b1b4e18283ab5fa56d270ff212d5ba0c561846f4", size = 20935102 },
+ { url = "https://files.pythonhosted.org/packages/7e/e4/a6a9f4537542912ec513185396fce52cdd45bdcf3e9d921ab02a93ca5aa9/numpy-2.2.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:47f9ed103af0bc63182609044b0490747e03bd20a67e391192dde119bf43d52f", size = 14191709 },
+ { url = "https://files.pythonhosted.org/packages/be/65/72f3186b6050bbfe9c43cb81f9df59ae63603491d36179cf7a7c8d216758/numpy-2.2.5-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:261a1ef047751bb02f29dfe337230b5882b54521ca121fc7f62668133cb119c9", size = 5149173 },
+ { url = "https://files.pythonhosted.org/packages/e5/e9/83e7a9432378dde5802651307ae5e9ea07bb72b416728202218cd4da2801/numpy-2.2.5-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:4520caa3807c1ceb005d125a75e715567806fed67e315cea619d5ec6e75a4191", size = 6684502 },
+ { url = "https://files.pythonhosted.org/packages/ea/27/b80da6c762394c8ee516b74c1f686fcd16c8f23b14de57ba0cad7349d1d2/numpy-2.2.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d14b17b9be5f9c9301f43d2e2a4886a33b53f4e6fdf9ca2f4cc60aeeee76372", size = 14084417 },
+ { url = "https://files.pythonhosted.org/packages/aa/fc/ebfd32c3e124e6a1043e19c0ab0769818aa69050ce5589b63d05ff185526/numpy-2.2.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ba321813a00e508d5421104464510cc962a6f791aa2fca1c97b1e65027da80d", size = 16133807 },
+ { url = "https://files.pythonhosted.org/packages/bf/9b/4cc171a0acbe4666f7775cfd21d4eb6bb1d36d3a0431f48a73e9212d2278/numpy-2.2.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4cbdef3ddf777423060c6f81b5694bad2dc9675f110c4b2a60dc0181543fac7", size = 15575611 },
+ { url = "https://files.pythonhosted.org/packages/a3/45/40f4135341850df48f8edcf949cf47b523c404b712774f8855a64c96ef29/numpy-2.2.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:54088a5a147ab71a8e7fdfd8c3601972751ded0739c6b696ad9cb0343e21ab73", size = 17895747 },
+ { url = "https://files.pythonhosted.org/packages/f8/4c/b32a17a46f0ffbde8cc82df6d3daeaf4f552e346df143e1b188a701a8f09/numpy-2.2.5-cp313-cp313-win32.whl", hash = "sha256:c8b82a55ef86a2d8e81b63da85e55f5537d2157165be1cb2ce7cfa57b6aef38b", size = 6309594 },
+ { url = "https://files.pythonhosted.org/packages/13/ae/72e6276feb9ef06787365b05915bfdb057d01fceb4a43cb80978e518d79b/numpy-2.2.5-cp313-cp313-win_amd64.whl", hash = "sha256:d8882a829fd779f0f43998e931c466802a77ca1ee0fe25a3abe50278616b1471", size = 12638356 },
+ { url = "https://files.pythonhosted.org/packages/79/56/be8b85a9f2adb688e7ded6324e20149a03541d2b3297c3ffc1a73f46dedb/numpy-2.2.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:e8b025c351b9f0e8b5436cf28a07fa4ac0204d67b38f01433ac7f9b870fa38c6", size = 20963778 },
+ { url = "https://files.pythonhosted.org/packages/ff/77/19c5e62d55bff507a18c3cdff82e94fe174957bad25860a991cac719d3ab/numpy-2.2.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8dfa94b6a4374e7851bbb6f35e6ded2120b752b063e6acdd3157e4d2bb922eba", size = 14207279 },
+ { url = "https://files.pythonhosted.org/packages/75/22/aa11f22dc11ff4ffe4e849d9b63bbe8d4ac6d5fae85ddaa67dfe43be3e76/numpy-2.2.5-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:97c8425d4e26437e65e1d189d22dff4a079b747ff9c2788057bfb8114ce1e133", size = 5199247 },
+ { url = "https://files.pythonhosted.org/packages/4f/6c/12d5e760fc62c08eded0394f62039f5a9857f758312bf01632a81d841459/numpy-2.2.5-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:352d330048c055ea6db701130abc48a21bec690a8d38f8284e00fab256dc1376", size = 6711087 },
+ { url = "https://files.pythonhosted.org/packages/ef/94/ece8280cf4218b2bee5cec9567629e61e51b4be501e5c6840ceb593db945/numpy-2.2.5-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b4c0773b6ada798f51f0f8e30c054d32304ccc6e9c5d93d46cb26f3d385ab19", size = 14059964 },
+ { url = "https://files.pythonhosted.org/packages/39/41/c5377dac0514aaeec69115830a39d905b1882819c8e65d97fc60e177e19e/numpy-2.2.5-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55f09e00d4dccd76b179c0f18a44f041e5332fd0e022886ba1c0bbf3ea4a18d0", size = 16121214 },
+ { url = "https://files.pythonhosted.org/packages/db/54/3b9f89a943257bc8e187145c6bc0eb8e3d615655f7b14e9b490b053e8149/numpy-2.2.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:02f226baeefa68f7d579e213d0f3493496397d8f1cff5e2b222af274c86a552a", size = 15575788 },
+ { url = "https://files.pythonhosted.org/packages/b1/c4/2e407e85df35b29f79945751b8f8e671057a13a376497d7fb2151ba0d290/numpy-2.2.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c26843fd58f65da9491165072da2cccc372530681de481ef670dcc8e27cfb066", size = 17893672 },
+ { url = "https://files.pythonhosted.org/packages/29/7e/d0b44e129d038dba453f00d0e29ebd6eaf2f06055d72b95b9947998aca14/numpy-2.2.5-cp313-cp313t-win32.whl", hash = "sha256:1a161c2c79ab30fe4501d5a2bbfe8b162490757cf90b7f05be8b80bc02f7bb8e", size = 6377102 },
+ { url = "https://files.pythonhosted.org/packages/63/be/b85e4aa4bf42c6502851b971f1c326d583fcc68227385f92089cf50a7b45/numpy-2.2.5-cp313-cp313t-win_amd64.whl", hash = "sha256:d403c84991b5ad291d3809bace5e85f4bbf44a04bdc9a88ed2bb1807b3360bb8", size = 12750096 },
+]
+
+[[package]]
+name = "onnxruntime"
+version = "1.21.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "coloredlogs" },
+ { name = "flatbuffers" },
+ { name = "numpy" },
+ { name = "packaging" },
+ { name = "protobuf" },
+ { name = "sympy" },
+]
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/70/ba/13c46c22fb52d8fea53575da163399a7d75fe61223aba685370f047a0882/onnxruntime-1.21.1-cp311-cp311-macosx_13_0_universal2.whl", hash = "sha256:8bee9b5ba7b88ae7bfccb4f97bbe1b4bae801b0fb05d686b28a722cb27c89931", size = 33643424 },
+ { url = "https://files.pythonhosted.org/packages/18/4f/68985138c507b6ad34061aa4f330b8fbd30b0c5c299be53f0c829420528e/onnxruntime-1.21.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4b6a29a1767b92d543091349f5397a1c7619eaca746cd1bc47f8b4ec5a9f1a6c", size = 14162437 },
+ { url = "https://files.pythonhosted.org/packages/0f/76/7dfa4b63f95a17eaf881c9c464feaa59a25bbfb578db204fc22d522b5199/onnxruntime-1.21.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:982dcc04a6688e1af9e3da1d4ef2bdeb11417cf3f8dde81f8f721043c1919a4f", size = 16002403 },
+ { url = "https://files.pythonhosted.org/packages/80/85/397406e758d6c30fb6d0d0152041c6b9ee835c3584765837ce54230c8bc9/onnxruntime-1.21.1-cp311-cp311-win_amd64.whl", hash = "sha256:2b6052c04b9125319293abb9bdcce40e806db3e097f15b82242d4cd72d81fd0c", size = 12301824 },
+ { url = "https://files.pythonhosted.org/packages/a5/42/274438bbc259439fa1606d0d6d2eef4171cdbd2d7a1c3b249b4ba440424b/onnxruntime-1.21.1-cp312-cp312-macosx_13_0_universal2.whl", hash = "sha256:f615c05869a523a94d0a4de1f0936d0199a473cf104d630fc26174bebd5759bd", size = 33658457 },
+ { url = "https://files.pythonhosted.org/packages/9c/93/76f629d4f22571b0b3a29a9d375204faae2bd2b07d557043b56df5848779/onnxruntime-1.21.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:79dfb1f47386c4edd115b21015354b2f05f5566c40c98606251f15a64add3cbe", size = 14164881 },
+ { url = "https://files.pythonhosted.org/packages/1b/86/75cbaa4058758fa8ef912dfebba2d5a4e4fd6738615c15b6a2262d076198/onnxruntime-1.21.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2742935d6610fe0f58e1995018d9db7e8239d0201d9ebbdb7964a61386b5390a", size = 16019966 },
+ { url = "https://files.pythonhosted.org/packages/5f/9d/fb8895b2cb38c9965d4b4e0a9aa1398f3e3f16c4acb75cf3b61689780a65/onnxruntime-1.21.1-cp312-cp312-win_amd64.whl", hash = "sha256:a7afdb3fcb162f5536225e13c2b245018068964b1d0eee05303ea6823ca6785e", size = 12302925 },
+ { url = "https://files.pythonhosted.org/packages/6d/7e/8445eb44ba9fe0ce0bc77c4b569d79f7e3efd6da2dd87c5a04347e6c134e/onnxruntime-1.21.1-cp313-cp313-macosx_13_0_universal2.whl", hash = "sha256:ed4f9771233a92edcab9f11f537702371d450fe6cd79a727b672d37b9dab0cde", size = 33658643 },
+ { url = "https://files.pythonhosted.org/packages/ce/46/9c4026d302f1c7e8427bf9fa3da2d7526d9c5200242bde6adee7928ef1c9/onnxruntime-1.21.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bc100fd1f4f95258e7d0f7068ec69dec2a47cc693f745eec9cf4561ee8d952a", size = 14165205 },
+ { url = "https://files.pythonhosted.org/packages/44/b2/4e4c6b5c03be752d74cb20937961c76f53fe87a9760d5b7345629d35bb31/onnxruntime-1.21.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0fea0d2b98eecf4bebe01f7ce9a265a5d72b3050e9098063bfe65fa2b0633a8e", size = 16019529 },
+ { url = "https://files.pythonhosted.org/packages/ec/1d/afca646af339cc6735f3fb7fafb9ca94b578c5b6a0ebd63a312468767bdb/onnxruntime-1.21.1-cp313-cp313-win_amd64.whl", hash = "sha256:da606061b9ed1b05b63a37be38c2014679a3e725903f58036ffd626df45c0e47", size = 12303603 },
+ { url = "https://files.pythonhosted.org/packages/a5/12/a01e38c9a6b8d7c28e04d9eb83ad9143d568b961474ba49f0f18a3eeec82/onnxruntime-1.21.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:94674315d40d521952bfc28007ce9b6728e87753e1f18d243c8cd953f25903b8", size = 14176329 },
+ { url = "https://files.pythonhosted.org/packages/3a/72/5ff85c540fd6a465610ce47e4cee8fccb472952fc1d589112f51ae2520a5/onnxruntime-1.21.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5c9e4571ff5b2a5d377d414bc85cd9450ba233a9a92f766493874f1093976453", size = 15990556 },
+]
+
[[package]]
name = "openai"
version = "1.76.0"
@@ -986,6 +1318,68 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/59/aa/84e02ab500ca871eb8f62784426963a1c7c17a72fea3c7f268af4bbaafa5/openai-1.76.0-py3-none-any.whl", hash = "sha256:a712b50e78cf78e6d7b2a8f69c4978243517c2c36999756673e07a14ce37dc0a", size = 661201 },
]
+[[package]]
+name = "openpyxl"
+version = "3.1.5"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "et-xmlfile" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/3d/f9/88d94a75de065ea32619465d2f77b29a0469500e99012523b91cc4141cd1/openpyxl-3.1.5.tar.gz", hash = "sha256:cf0e3cf56142039133628b5acffe8ef0c12bc902d2aadd3e0fe5878dc08d1050", size = 186464 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c0/da/977ded879c29cbd04de313843e76868e6e13408a94ed6b987245dc7c8506/openpyxl-3.1.5-py2.py3-none-any.whl", hash = "sha256:5282c12b107bffeef825f4617dc029afaf41d0ea60823bbb665ef3079dc79de2", size = 250910 },
+]
+
+[[package]]
+name = "packaging"
+version = "25.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469 },
+]
+
+[[package]]
+name = "pandas"
+version = "2.2.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "numpy" },
+ { name = "python-dateutil" },
+ { name = "pytz" },
+ { name = "tzdata" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/9c/d6/9f8431bacc2e19dca897724cd097b1bb224a6ad5433784a44b587c7c13af/pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667", size = 4399213 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a8/44/d9502bf0ed197ba9bf1103c9867d5904ddcaf869e52329787fc54ed70cc8/pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039", size = 12602222 },
+ { url = "https://files.pythonhosted.org/packages/52/11/9eac327a38834f162b8250aab32a6781339c69afe7574368fffe46387edf/pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd", size = 11321274 },
+ { url = "https://files.pythonhosted.org/packages/45/fb/c4beeb084718598ba19aa9f5abbc8aed8b42f90930da861fcb1acdb54c3a/pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698", size = 15579836 },
+ { url = "https://files.pythonhosted.org/packages/cd/5f/4dba1d39bb9c38d574a9a22548c540177f78ea47b32f99c0ff2ec499fac5/pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc", size = 13058505 },
+ { url = "https://files.pythonhosted.org/packages/b9/57/708135b90391995361636634df1f1130d03ba456e95bcf576fada459115a/pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3", size = 16744420 },
+ { url = "https://files.pythonhosted.org/packages/86/4a/03ed6b7ee323cf30404265c284cee9c65c56a212e0a08d9ee06984ba2240/pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32", size = 14440457 },
+ { url = "https://files.pythonhosted.org/packages/ed/8c/87ddf1fcb55d11f9f847e3c69bb1c6f8e46e2f40ab1a2d2abadb2401b007/pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5", size = 11617166 },
+ { url = "https://files.pythonhosted.org/packages/17/a3/fb2734118db0af37ea7433f57f722c0a56687e14b14690edff0cdb4b7e58/pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9", size = 12529893 },
+ { url = "https://files.pythonhosted.org/packages/e1/0c/ad295fd74bfac85358fd579e271cded3ac969de81f62dd0142c426b9da91/pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4", size = 11363475 },
+ { url = "https://files.pythonhosted.org/packages/c6/2a/4bba3f03f7d07207481fed47f5b35f556c7441acddc368ec43d6643c5777/pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3", size = 15188645 },
+ { url = "https://files.pythonhosted.org/packages/38/f8/d8fddee9ed0d0c0f4a2132c1dfcf0e3e53265055da8df952a53e7eaf178c/pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319", size = 12739445 },
+ { url = "https://files.pythonhosted.org/packages/20/e8/45a05d9c39d2cea61ab175dbe6a2de1d05b679e8de2011da4ee190d7e748/pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8", size = 16359235 },
+ { url = "https://files.pythonhosted.org/packages/1d/99/617d07a6a5e429ff90c90da64d428516605a1ec7d7bea494235e1c3882de/pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a", size = 14056756 },
+ { url = "https://files.pythonhosted.org/packages/29/d4/1244ab8edf173a10fd601f7e13b9566c1b525c4f365d6bee918e68381889/pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13", size = 11504248 },
+ { url = "https://files.pythonhosted.org/packages/64/22/3b8f4e0ed70644e85cfdcd57454686b9057c6c38d2f74fe4b8bc2527214a/pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015", size = 12477643 },
+ { url = "https://files.pythonhosted.org/packages/e4/93/b3f5d1838500e22c8d793625da672f3eec046b1a99257666c94446969282/pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28", size = 11281573 },
+ { url = "https://files.pythonhosted.org/packages/f5/94/6c79b07f0e5aab1dcfa35a75f4817f5c4f677931d4234afcd75f0e6a66ca/pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0", size = 15196085 },
+ { url = "https://files.pythonhosted.org/packages/e8/31/aa8da88ca0eadbabd0a639788a6da13bb2ff6edbbb9f29aa786450a30a91/pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24", size = 12711809 },
+ { url = "https://files.pythonhosted.org/packages/ee/7c/c6dbdb0cb2a4344cacfb8de1c5808ca885b2e4dcfde8008266608f9372af/pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659", size = 16356316 },
+ { url = "https://files.pythonhosted.org/packages/57/b7/8b757e7d92023b832869fa8881a992696a0bfe2e26f72c9ae9f255988d42/pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb", size = 14022055 },
+ { url = "https://files.pythonhosted.org/packages/3b/bc/4b18e2b8c002572c5a441a64826252ce5da2aa738855747247a971988043/pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d", size = 11481175 },
+ { url = "https://files.pythonhosted.org/packages/76/a3/a5d88146815e972d40d19247b2c162e88213ef51c7c25993942c39dbf41d/pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468", size = 12615650 },
+ { url = "https://files.pythonhosted.org/packages/9c/8c/f0fd18f6140ddafc0c24122c8a964e48294acc579d47def376fef12bcb4a/pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18", size = 11290177 },
+ { url = "https://files.pythonhosted.org/packages/ed/f9/e995754eab9c0f14c6777401f7eece0943840b7a9fc932221c19d1abee9f/pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2", size = 14651526 },
+ { url = "https://files.pythonhosted.org/packages/25/b0/98d6ae2e1abac4f35230aa756005e8654649d305df9a28b16b9ae4353bff/pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4", size = 11871013 },
+ { url = "https://files.pythonhosted.org/packages/cc/57/0f72a10f9db6a4628744c8e8f0df4e6e21de01212c7c981d31e50ffc8328/pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d", size = 15711620 },
+ { url = "https://files.pythonhosted.org/packages/ab/5f/b38085618b950b79d2d9164a711c52b10aefc0ae6833b96f626b7021b2ed/pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a", size = 13098436 },
+]
+
[[package]]
name = "parso"
version = "0.8.4"
@@ -995,6 +1389,19 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/c6/ac/dac4a63f978e4dcb3c6d3a78c4d8e0192a113d288502a1216950c41b1027/parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18", size = 103650 },
]
+[[package]]
+name = "pdfminer-six"
+version = "20250416"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "charset-normalizer" },
+ { name = "cryptography" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a8/27/1a99ce4cfce829bb91040f82a53f33b33fec4e070d2b9c1b45f6796cd8dc/pdfminer_six-20250416.tar.gz", hash = "sha256:30956a85f9d0add806a4e460ed0d67c2b6a48b53323c7ac87de23174596d3acd", size = 7384630 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/77/32/89749ba23e5020e89fb584c1b39d7da6d7c56a9048307de8a88eec79e2d3/pdfminer_six-20250416-py3-none-any.whl", hash = "sha256:dd2a9ad7bc7dd6b62d009aaa9c101ac9d069a47937724569c375a6a9078da303", size = 5619271 },
+]
+
[[package]]
name = "pexpect"
version = "4.9.0"
@@ -1183,6 +1590,20 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/b8/d3/c3cb8f1d6ae3b37f83e1de806713a9b3642c5895f0215a62e1a4bd6e5e34/propcache-0.3.1-py3-none-any.whl", hash = "sha256:9a8ecf38de50a7f518c21568c80f985e776397b902f1ce0b01f799aba1608b40", size = 12376 },
]
+[[package]]
+name = "protobuf"
+version = "6.30.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/c8/8c/cf2ac658216eebe49eaedf1e06bc06cbf6a143469236294a1171a51357c3/protobuf-6.30.2.tar.gz", hash = "sha256:35c859ae076d8c56054c25b59e5e59638d86545ed6e2b6efac6be0b6ea3ba048", size = 429315 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/be/85/cd53abe6a6cbf2e0029243d6ae5fb4335da2996f6c177bb2ce685068e43d/protobuf-6.30.2-cp310-abi3-win32.whl", hash = "sha256:b12ef7df7b9329886e66404bef5e9ce6a26b54069d7f7436a0853ccdeb91c103", size = 419148 },
+ { url = "https://files.pythonhosted.org/packages/97/e9/7b9f1b259d509aef2b833c29a1f3c39185e2bf21c9c1be1cd11c22cb2149/protobuf-6.30.2-cp310-abi3-win_amd64.whl", hash = "sha256:7653c99774f73fe6b9301b87da52af0e69783a2e371e8b599b3e9cb4da4b12b9", size = 431003 },
+ { url = "https://files.pythonhosted.org/packages/8e/66/7f3b121f59097c93267e7f497f10e52ced7161b38295137a12a266b6c149/protobuf-6.30.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:0eb523c550a66a09a0c20f86dd554afbf4d32b02af34ae53d93268c1f73bc65b", size = 417579 },
+ { url = "https://files.pythonhosted.org/packages/d0/89/bbb1bff09600e662ad5b384420ad92de61cab2ed0f12ace1fd081fd4c295/protobuf-6.30.2-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:50f32cc9fd9cb09c783ebc275611b4f19dfdfb68d1ee55d2f0c7fa040df96815", size = 317319 },
+ { url = "https://files.pythonhosted.org/packages/28/50/1925de813499546bc8ab3ae857e3ec84efe7d2f19b34529d0c7c3d02d11d/protobuf-6.30.2-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:4f6c687ae8efae6cf6093389a596548214467778146b7245e886f35e1485315d", size = 316212 },
+ { url = "https://files.pythonhosted.org/packages/e5/a1/93c2acf4ade3c5b557d02d500b06798f4ed2c176fa03e3c34973ca92df7f/protobuf-6.30.2-py3-none-any.whl", hash = "sha256:ae86b030e69a98e08c77beab574cbcb9fff6d031d57209f574a5aea1445f4b51", size = 167062 },
+]
+
[[package]]
name = "ptyprocess"
version = "0.7.0"
@@ -1347,6 +1768,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293 },
]
+[[package]]
+name = "pyreadline3"
+version = "3.5.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/0f/49/4cea918a08f02817aabae639e3d0ac046fef9f9180518a3ad394e22da148/pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7", size = 99839 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5a/dc/491b7661614ab97483abf2056be1deee4dc2490ecbf7bff9ab5cdbac86e1/pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6", size = 83178 },
+]
+
[[package]]
name = "pyrotgfork"
version = "2.2.10"
@@ -1415,6 +1845,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 },
]
+[[package]]
+name = "python-dotenv"
+version = "1.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/88/2c/7bb1416c5620485aa793f2de31d3df393d3686aa8a8506d11e10e13c5baf/python_dotenv-1.1.0.tar.gz", hash = "sha256:41f90bc6f5f177fb41f53e87666db362025010eb28f60a01c9143bfa33a2b2d5", size = 39920 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/1e/18/98a99ad95133c6a6e2005fe89faedf294a748bd5dc803008059409ac9b1e/python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d", size = 20256 },
+]
+
[[package]]
name = "python-ffmpeg"
version = "2.0.12"
@@ -1437,6 +1876,30 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/6c/73/9f872cb81fc5c3bb48f7227872c28975f998f3e7c2b1c16e95e6432bbb90/python_magic-0.4.27-py2.py3-none-any.whl", hash = "sha256:c212960ad306f700aa0d01e5d7a325d20548ff97eb9920dcd29513174f0294d3", size = 13840 },
]
+[[package]]
+name = "python-pptx"
+version = "1.0.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "lxml" },
+ { name = "pillow" },
+ { name = "typing-extensions" },
+ { name = "xlsxwriter" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/52/a9/0c0db8d37b2b8a645666f7fd8accea4c6224e013c42b1d5c17c93590cd06/python_pptx-1.0.2.tar.gz", hash = "sha256:479a8af0eaf0f0d76b6f00b0887732874ad2e3188230315290cd1f9dd9cc7095", size = 10109297 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d9/4f/00be2196329ebbff56ce564aa94efb0fbc828d00de250b1980de1a34ab49/python_pptx-1.0.2-py3-none-any.whl", hash = "sha256:160838e0b8565a8b1f67947675886e9fea18aa5e795db7ae531606d68e785cba", size = 472788 },
+]
+
+[[package]]
+name = "pytz"
+version = "2025.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225 },
+]
+
[[package]]
name = "pyyaml"
version = "6.0.2"
@@ -1631,6 +2094,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/f1/7b/ce1eafaf1a76852e2ec9b22edecf1daa58175c090266e9f6c64afcd81d91/stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695", size = 24521 },
]
+[[package]]
+name = "sympy"
+version = "1.14.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "mpmath" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/83/d3/803453b36afefb7c2bb238361cd4ae6125a569b4db67cd9e79846ba2d68c/sympy-1.14.0.tar.gz", hash = "sha256:d3d3fe8df1e5a0b42f0e7bdf50541697dbe7d23746e894990c030e2b05e72517", size = 7793921 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a2/09/77d55d46fd61b4a135c444fc97158ef34a095e5681d0a6c10b75bf356191/sympy-1.14.0-py3-none-any.whl", hash = "sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5", size = 6299353 },
+]
+
[[package]]
name = "telegraph"
version = "2.2.0"
@@ -1889,6 +2364,24 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/2d/82/f56956041adef78f849db6b289b282e72b55ab8045a75abad81898c28d19/wrapt-1.17.2-py3-none-any.whl", hash = "sha256:b18f2d1533a71f069c7f82d524a52599053d4c7166e9dd374ae2136b7f40f7c8", size = 23594 },
]
+[[package]]
+name = "xlrd"
+version = "2.0.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a6/b3/19a2540d21dea5f908304375bd43f5ed7a4c28a370dc9122c565423e6b44/xlrd-2.0.1.tar.gz", hash = "sha256:f72f148f54442c6b056bf931dbc34f986fd0c3b0b6b5a58d013c9aef274d0c88", size = 100259 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a6/0c/c2a72d51fe56e08a08acc85d13013558a2d793028ae7385448a6ccdfae64/xlrd-2.0.1-py2.py3-none-any.whl", hash = "sha256:6a33ee89877bd9abc1158129f6e94be74e2679636b8a205b43b85206c3f0bbdd", size = 96531 },
+]
+
+[[package]]
+name = "xlsxwriter"
+version = "3.2.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a7/d1/e026d33dd5d552e5bf3a873dee54dad66b550230df8290d79394f09b2315/xlsxwriter-3.2.3.tar.gz", hash = "sha256:ad6fd41bdcf1b885876b1f6b7087560aecc9ae5a9cc2ba97dcac7ab2e210d3d5", size = 209135 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/37/b1/a252d499f2760b314fcf264d2b36fcc4343a1ecdb25492b210cb0db70a68/XlsxWriter-3.2.3-py3-none-any.whl", hash = "sha256:593f8296e8a91790c6d0378ab08b064f34a642b3feb787cf6738236bd0a4860d", size = 169433 },
+]
+
[[package]]
name = "yarl"
version = "1.20.0"