Commit e0f3eda
2025-01-21 15:29:54
Changed files (37)
.github
workflows
scripts
src
.github/workflows/baseimg.yml
@@ -0,0 +1,46 @@
+---
+name: baseimg
+
+on:
+ workflow_dispatch:
+ push:
+ paths:
+ - docker/base.Dockerfile
+ - .github/workflows/baseimg.yml
+ - uv.lock
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: true
+
+permissions: write-all
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@main
+
+ - name: Set up QEMU
+ uses: docker/setup-qemu-action@master
+
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@master
+ - name: Login to GHCR
+ uses: docker/login-action@master
+ with:
+ username: ${{ github.repository_owner }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+ registry: ghcr.io
+
+ - name: Build
+ uses: docker/build-push-action@master
+ with:
+ context: .
+ file: docker/base.Dockerfile
+ cache-from: type=registry,ref=ghcr.io/${{ github.repository }}:build-cache
+ cache-to: type=registry,ref=ghcr.io/${{ github.repository }}:build-cache,mode=max
+ platforms: linux/amd64
+ push: ${{ github.ref == 'refs/heads/main' }}
+ tags: ghcr.io/${{ github.repository }}:base
.github/workflows/docker.yml
@@ -0,0 +1,66 @@
+---
+name: docker
+
+on:
+ workflow_dispatch:
+ workflow_run:
+ workflows: [baseimg]
+ types:
+ - completed
+ push:
+ paths:
+ - src/**
+ - .github/workflows/docker.yml
+ - docker/Dockerfile
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: true
+
+permissions: write-all
+
+jobs:
+ build:
+ if: ${{ github.event.workflow_run.conclusion != 'failure' }}
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@main
+
+ - name: Docker meta
+ id: meta
+ uses: docker/metadata-action@master
+ with:
+ images: ghcr.io/${{ github.repository }}
+ tags: |
+ type=ref,event=branch
+ type=ref,event=pr
+ type=sha,prefix=,format=long
+ type=semver,pattern=v{{version}}
+ type=semver,pattern=v{{major}}.{{minor}}
+ type=semver,pattern=v{{major}}
+ type=raw,value=latest,enable={{is_default_branch}}
+
+ - name: Set up QEMU
+ uses: docker/setup-qemu-action@master
+
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@master
+ - name: Login to GHCR
+ uses: docker/login-action@master
+ with:
+ username: ${{ github.repository_owner }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+ registry: ghcr.io
+
+ - name: Build
+ uses: docker/build-push-action@master
+ with:
+ context: .
+ file: docker/Dockerfile
+ build-args: |
+ IMAGE_NAME=ghcr.io/${{ github.repository }}
+ platforms: linux/amd64
+ push: ${{ github.ref == 'refs/heads/main' }}
+ tags: ${{ steps.meta.outputs.tags }}
+ labels: ${{ steps.meta.outputs.labels }}
.github/dependabot.yml
@@ -0,0 +1,13 @@
+---
+version: 2
+
+updates:
+ - package-ecosystem: pip
+ directory: /
+ schedule:
+ interval: monthly
+
+ - package-ecosystem: docker
+ directory: /docker
+ schedule:
+ interval: monthly
docker/base.Dockerfile
@@ -0,0 +1,27 @@
+FROM ghcr.io/benny-dou/ffmpeg:latest@sha256:f184d2518e6e5a0986393420a42ef965dd03f51f083635951545acf0b3dcba5d AS ffmpeg
+FROM shinsenter/s6-overlay:latest@sha256:048acd13b6221dd1eea0d6677bca7f463b86474a5257f4154582884098a7a4ea AS s6
+FROM python:3.13-slim@sha256:23a81be7b258c8f516f7a60e80943cace4350deb8204cf107c7993e343610d47 AS python
+
+FROM python AS venv
+RUN --mount=type=bind,source=uv.lock,target=uv.lock \
+ --mount=type=bind,source=pyproject.toml,target=pyproject.toml \
+ --mount=from=ghcr.io/astral-sh/uv:0.5,source=/uv,target=/bin/uv \
+ uv venv --relocatable --no-python-downloads --no-cache && \
+ uv sync --frozen --no-dev --no-python-downloads --no-cache --no-install-project --no-editable
+COPY --from=ffmpeg /ffmpeg /.venv/bin/ffmpeg
+COPY --from=ffmpeg /ffprobe /.venv/bin/ffprobe
+
+FROM python
+COPY --link --from=s6 / /
+
+## add PUID and PGID support
+COPY docker/fix-permission /etc/cont-init.d/10-adduser
+# copy venv
+COPY --from=venv /.venv /venv
+
+RUN groupadd --gid 1000 abc && \
+ useradd -u 1000 -g 1000 --create-home -d /app -s /bin/false abc
+
+ENV PATH=/command:/venv/bin:$PATH
+# important: sets s6-overlay entrypoint
+ENTRYPOINT ["/init"]
docker/Dockerfile
@@ -0,0 +1,5 @@
+ARG IMAGE_NAME
+FROM $IMAGE_NAME:base
+COPY --chown=abc src /app
+WORKDIR /app
+CMD ["with-contenv", "s6-setuidgid", "abc", "python", "/app/main.py"]
docker/fix-permission
@@ -0,0 +1,16 @@
+#!/command/with-contenv bash
+
+PUID=${PUID:-1000}
+PGID=${PGID:-1000}
+
+groupmod -o -g "$PGID" abc
+usermod -o -u "$PUID" abc
+
+
+echo "
+User uid: $(id -u abc)
+User gid: $(id -g abc)
+-------------------------------------
+"
+
+chown abc:abc /app
scripts/auth.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""此脚本用于生成 pyrogram 的 session string.
+
+APP ID 和 APP HASH 请在此链接申请: https://my.telegram.org/apps
+
+如果是个人账号 (用户账号), 只需要提供 APP ID 和 APP HASH
+如果是机器人账号, 还需要提供 BOT TOKEN. (BOT TOKEN可以在 @BotFather 那里获取)
+"""
+
+import argparse
+from pathlib import Path
+
+from pyrogram.client import Client
+
+parser = argparse.ArgumentParser()
+parser.add_argument("--appid", type=str, required=True, help="APP ID")
+parser.add_argument("--apphash", type=str, required=True, help="APP HASH")
+parser.add_argument("--bot-token", type=str, required=False, help="BOT TOKEN (Optional, for bot account only)")
+args = parser.parse_args()
+
+proxy = {
+ "scheme": "socks5", # "socks4", "socks5" or "http"
+ "hostname": "127.0.0.1",
+ "port": 7890,
+ # "username": "username",
+ # "password": "password",
+}
+session_path = Path(__file__).parent / "account.session_string"
+session_path.unlink(missing_ok=True)
+with Client("account", api_id=int(args.appid), api_hash=args.apphash, bot_token=args.bot_token, in_memory=True, proxy=proxy) as app:
+ session_str = app.export_session_string() # type: ignore
+ session_path.write_text(session_str)
+
+print(session_path.read_text())
src/asr/tecent_asr.py
@@ -0,0 +1,169 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# From: https://github.com/TencentCloud/tencentcloud-speech-sdk-python
+import base64
+import hashlib
+import hmac
+import time
+from json import JSONDecodeError
+
+from httpx import AsyncClient, RequestError
+from loguru import logger
+
+from config import PROXY
+
+
+# 录音识别极速版
+class FlashRecognitionRequest:
+ def __init__(self, engine_type):
+ self.engine_type = engine_type
+ self.speaker_diarization = 0
+ self.hotword_id = ""
+ self.hotword_list = ""
+ self.input_sample_rate = 0
+ self.customization_id = ""
+ self.filter_dirty = 0
+ self.filter_modal = 0
+ self.filter_punc = 0
+ self.convert_num_mode = 1
+ self.word_info = 0
+ self.voice_format = ""
+ self.first_channel_only = 1
+ self.reinforce_hotword = 0
+ self.sentence_max_length = 0
+
+ def set_first_channel_only(self, first_channel_only):
+ self.first_channel_only = first_channel_only
+
+ def set_speaker_diarization(self, speaker_diarization):
+ self.speaker_diarization = speaker_diarization
+
+ def set_filter_dirty(self, filter_dirty):
+ self.filter_dirty = filter_dirty
+
+ def set_filter_modal(self, filter_modal):
+ self.filter_modal = filter_modal
+
+ def set_filter_punc(self, filter_punc):
+ self.filter_punc = filter_punc
+
+ def set_convert_num_mode(self, convert_num_mode):
+ self.convert_num_mode = convert_num_mode
+
+ def set_word_info(self, word_info):
+ self.word_info = word_info
+
+ def set_hotword_id(self, hotword_id):
+ self.hotword_id = hotword_id
+
+ def set_hotword_list(self, hotword_list):
+ self.hotword_list = hotword_list
+
+ def set_input_sample_rate(self, input_sample_rate):
+ self.input_sample_rate = input_sample_rate
+
+ def set_customization_id(self, customization_id):
+ self.customization_id = customization_id
+
+ def set_voice_format(self, voice_format):
+ self.voice_format = voice_format
+
+ def set_sentence_max_length(self, sentence_max_length):
+ self.sentence_max_length = sentence_max_length
+
+ def set_reinforce_hotword(self, reinforce_hotword):
+ self.reinforce_hotword = reinforce_hotword
+
+
+class FlashRecognizer:
+ def __init__(self, appid, credential):
+ self.credential = credential
+ self.appid = appid
+
+ def _format_sign_string(self, param):
+ signstr = "POSTasr.cloud.tencent.com/asr/flash/v1/"
+ for t in param:
+ if "appid" in t:
+ signstr += str(t[1])
+ break
+ signstr += "?"
+ for x in param:
+ tmp = x
+ if "appid" in x:
+ continue
+ for t in tmp:
+ signstr += str(t)
+ signstr += "="
+ signstr = signstr[:-1]
+ signstr += "&"
+ return signstr[:-1]
+
+ def _build_header(self):
+ header = {}
+ header["Host"] = "asr.cloud.tencent.com"
+ return header
+
+ def _sign(self, signstr, secret_key):
+ hmacstr = hmac.new(secret_key.encode("utf-8"), signstr.encode("utf-8"), hashlib.sha1).digest()
+ s = base64.b64encode(hmacstr)
+ return s.decode("utf-8")
+
+ def _build_req_with_signature(self, secret_key, params, header):
+ query = sorted(params.items(), key=lambda d: d[0])
+ signstr = self._format_sign_string(query)
+ signature = self._sign(signstr, secret_key)
+ header["authorization"] = signature
+ requrl = "https://"
+ requrl += signstr[4::]
+ return requrl
+
+ def _create_query_arr(self, req):
+ query_arr = {}
+ query_arr["appid"] = self.appid
+ query_arr["secretid"] = self.credential.secret_id
+ query_arr["timestamp"] = str(int(time.time()))
+ query_arr["engine_type"] = req.engine_type
+ query_arr["voice_format"] = req.voice_format
+ query_arr["speaker_diarization"] = req.speaker_diarization
+ if req.hotword_id != "":
+ query_arr["hotword_id"] = req.hotword_id
+ if req.hotword_list != "":
+ query_arr["hotword_list"] = req.hotword_list
+ if req.input_sample_rate != 0:
+ query_arr["input_sample_rate"] = req.input_sample_rate
+ query_arr["customization_id"] = req.customization_id
+ query_arr["filter_dirty"] = req.filter_dirty
+ query_arr["filter_modal"] = req.filter_modal
+ query_arr["filter_punc"] = req.filter_punc
+ query_arr["convert_num_mode"] = req.convert_num_mode
+ query_arr["word_info"] = req.word_info
+ query_arr["first_channel_only"] = req.first_channel_only
+ query_arr["reinforce_hotword"] = req.reinforce_hotword
+ query_arr["sentence_max_length"] = req.sentence_max_length
+ return query_arr
+
+ async def recognize(self, req, data) -> dict:
+ header = self._build_header()
+ query_arr = self._create_query_arr(req)
+ req_url = self._build_req_with_signature(self.credential.secret_key, query_arr, header)
+ async with AsyncClient(http2=True, proxy=PROXY.TENCENT, follow_redirects=True) as hx:
+ try:
+ resp = await hx.post(req_url, headers=header, data=data, timeout=30)
+ resp.raise_for_status()
+ if resp.json().get("code") != 0:
+ logger.warning(f"ASR failed: {resp.json()}")
+ except JSONDecodeError:
+ logger.error(f"ASR Json decode failed: {resp}")
+ except RequestError as exc:
+ logger.warning(f"ASR failed while requesting {exc.request.url!r}.")
+ except Exception as e:
+ logger.warning(f"ASR failed: {e}")
+ return resp.json()
+
+
+class Credential:
+ def __init__(self, secret_id, secret_key, token=""):
+ self.secret_id = secret_id
+ self.secret_key = secret_key
+ self.token = token
src/asr/voice_recognition.py
@@ -0,0 +1,216 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+import re
+from pathlib import Path
+
+from loguru import logger
+from pyrogram.client import Client
+from pyrogram.types import Message
+
+from asr.tecent_asr import Credential, FlashRecognitionRequest, FlashRecognizer
+from config import ASR_MAX_DURATION, ENABLE, PREFIX, TOKEN, cache
+from message_utils import modify_progress, send2tg
+from multimedia import convert_to_audio, parse_media_info
+
+# ruff: noqa: RUF001
+
+# https://cloud.tencent.com/document/product/1093/52097
+HELP = f"""🗣**语音转文字**
+使用说明: 以 `{PREFIX.ASR}` 回复包含音频的消息 (如语音, 视频, 音乐)
+默认可以识别普通话、粤语、英语三种语言。
+识别其他语种可在`{PREFIX.ASR}`后加上语种代码, 如:
+以`{PREFIX.ASR} ja`回复音频消息识别日语
+以`{PREFIX.ASR} fr`回复音频消息识别法语
+
+**目前支持以下语种:**
+fy: 多种方言, 上海话、四川话、武汉话、贵阳话、昆明话、西安话、郑州话、太原话、兰州话、银川话、西宁话、南京话、合肥话、南昌话、长沙话、苏州话、杭州话、济南话、天津话、石家庄话、黑龙江话、吉林话、辽宁话
+ja: 日语
+ko: 韩语
+vi: 越南语
+ms: 马来语
+id: 印度尼西亚语
+fil: 菲律宾语
+th: 泰语
+pt: 葡萄牙语
+tr: 土耳其语
+ar: 阿拉伯语
+es: 西班牙语
+hi: 印地语
+fr: 法语
+de: 德语
+"""
+
+ENGINE_MAP = {
+ "16k_zh-PY": "中英粤",
+ "16k_fy": "多种方言, 上海话、四川话、武汉话、贵阳话、昆明话、西安话、郑州话、太原话、兰州话、银川话、西宁话、南京话、合肥话、南昌话、长沙话、苏州话、杭州话、济南话、天津话、石家庄话、黑龙江话、吉林话、辽宁话",
+ "16k_ja": "日语",
+ "16k_ko": "韩语",
+ "16k_vi": "越南语",
+ "16k_ms": "马来语",
+ "16k_id": "印度尼西亚语",
+ "16k_fil": "菲律宾语",
+ "16k_th": "泰语",
+ "16k_pt": "葡萄牙语",
+ "16k_tr": "土耳其语",
+ "16k_ar": "阿拉伯语",
+ "16k_es": "西班牙语",
+ "16k_hi": "印地语",
+ "16k_fr": "法语",
+ "16k_de": "德语",
+}
+
+
+@cache.memoize(ttl=30)
+async def voice_to_text(
+ client: Client,
+ message: Message,
+ *,
+ asr_need_prefix: bool | None = None,
+ asr_skip_voice: bool | None = None,
+ asr_skip_audio: bool | None = None,
+ asr_skip_video: bool | None = None,
+ **kwargs,
+) -> None:
+ """Voice, audio, video message to text.
+
+ By default, "/asr" prefix is needed in in Group & Channel & Bot chats to trigger this function.
+ In private chat, no need to add "/asr" prefix for voice message, but the video & audio message still need it.
+
+ Args:
+ client (Client): The Pyrogram client.
+ message (Message): The trigger message object.
+ asr_need_prefix (bool, optional): If True, must use "/asr" prefix to reply a audio message.
+ asr_skip_voice (bool, optional): If True, skip voice message.
+ asr_skip_audio (bool, optional): If True, skip audio message.
+ asr_skip_video (bool, optional): If True, skip video message.
+ """
+ # send docs if message == "/asr", without reply
+ if str(message.text).lower().strip() == PREFIX.ASR and not message.reply_to_message:
+ await send2tg(client, message, texts=HELP, **kwargs)
+ return
+
+ if not (trigger_message := get_trigger_message(message, asr_need_prefix, asr_skip_voice, asr_skip_audio, asr_skip_video)):
+ return
+
+ asr_engine = "16k_zh-PY" # default: 中英粤
+ if matched := re.match(r"/asr\s+([^.。,,/\s]+)", str(message.text)): # /asr yue
+ asr_engine = f"16k_{matched.group(1)}"
+ asr_engine = asr_engine.replace("16k_fy", "16k_zh_dialect") # fix dialect engine code
+
+ msg = f"Recieved {trigger_message.media.name} message, start recognizing by {ENGINE_MAP.get(asr_engine, 'Unknown')}..."
+ logger.info(msg)
+ if kwargs.get("show_progress"):
+ res = await send2tg(client, message, texts=msg, **kwargs)
+ kwargs["progress"] = res[0]
+ if asr_engine not in ENGINE_MAP:
+ await modify_progress(text=f"Unsupported ASR engine: {asr_engine}", force_update=True, **kwargs)
+ return
+ voice_format = ""
+ path: str | Path = await trigger_message.download() # type: ignore
+ if trigger_message.media.name == "VOICE": # audio/ogg
+ voice_format = str(trigger_message.voice.mime_type).split("/")[-1] # set voice format
+ elif trigger_message.media.name in ["AUDIO", "VIDEO"]:
+ path = convert_to_audio(path, ext="m4a")
+ voice_format = "m4a"
+
+ if not Path(path).expanduser().resolve().is_file():
+ msg = "Failed to download audio, please try again later."
+ logger.error(msg)
+ await modify_progress(text=msg, force_update=True, **kwargs)
+ return
+ path = Path(path).expanduser().resolve()
+
+ if not voice_format:
+ voice_format = path.suffix.removeprefix(".")
+
+ # fix format code
+ if voice_format in ["oga", "ogg", "opus"]:
+ voice_format = "ogg-opus"
+ if voice_format == "mp4":
+ voice_format = "m4a"
+ if voice_format not in ["m4a", "ogg-opus", "wav", "pcm", "speex", "silk", "mp3", "aac", "amr"]:
+ msg = f"Unsupported audio format: {voice_format}"
+ logger.error(msg)
+ await modify_progress(text=msg, force_update=True)
+ return
+
+ # 音频长度
+ duration = parse_media_info(path).get("duration", 0)
+ if duration > ASR_MAX_DURATION:
+ msg = f"无法识别时长超过{ASR_MAX_DURATION}秒的音频, 当前音频时长: {duration}秒"
+ logger.error(msg)
+ await modify_progress(text=msg, force_update=True, **kwargs)
+ return
+
+ logger.debug(f"Recognizing {voice_format} audio by {asr_engine}: {path.as_posix()}")
+ credential_var = Credential(TOKEN.TENCENT_ASR_SECRET_ID, TOKEN.TENCENT_ASR_SECRET_KEY)
+ recognizer = FlashRecognizer(TOKEN.TENCENT_ASR_APPID, credential_var)
+ req = FlashRecognitionRequest(engine_type=asr_engine)
+ req.set_voice_format(voice_format)
+
+ final = ""
+ try:
+ with path.open("rb") as f:
+ resp = await recognizer.recognize(req, f.read())
+ logger.trace(resp)
+ texts = [channel.get("text", "") for channel in resp.get("flash_result", [])]
+ if len(set(texts)) == 1: # single channel
+ final = texts[0]
+ else:
+ for cid, text in enumerate(texts):
+ final += f"通道{cid + 1}: {text}\n"
+ if final:
+ final = f"🗣语音转文字:\n{final}"
+ logger.success(f"Recognized text: {final}")
+ await send2tg(client, trigger_message, texts=final, **kwargs)
+ await modify_progress(del_status=True, **kwargs)
+ except Exception as e:
+ logger.error(f"Failed to recognize audio: {e}")
+ finally:
+ path.unlink(missing_ok=True)
+
+
+@cache.memoize(ttl=10)
+def get_trigger_message(
+ message: Message,
+ asr_need_prefix: bool | None = None,
+ asr_skip_voice: bool | None = None,
+ asr_skip_audio: bool | None = None,
+ asr_skip_video: bool | None = None,
+) -> Message | None:
+ """Check if the message is triggerable for voice recognition.
+
+ By default, "/asr" prefix is needed in in Group & Channel & Bot chats to trigger this function.
+ In private chat, no need to add "/asr" prefix for voice message, but the video & audio message still need it.
+ """
+ if not ENABLE.ASR:
+ return None
+ if message.chat.type.name in ["GROUP", "SUPERGROUP", "CHANNEL", "BOT"]:
+ asr_need_prefix = asr_need_prefix or True
+ asr_skip_voice = asr_skip_voice or False
+ asr_skip_audio = asr_skip_audio or False
+ asr_skip_audio = asr_skip_audio or False
+ else: # private chat
+ asr_need_prefix = asr_need_prefix or False
+ asr_skip_voice = asr_skip_voice or False
+ asr_skip_audio = asr_skip_audio or False
+ asr_skip_video = asr_skip_video or False
+
+ # only trigger if msg has "/asr" prefix
+ if asr_need_prefix and not str(message.text).lower().strip().startswith(PREFIX.ASR):
+ return None
+
+ # treat the reply_to_message as the real message need to be recognized
+ trigger_msg = message.reply_to_message if asr_need_prefix or str(message.text).lower().strip().startswith(PREFIX.ASR) else message
+
+ if not trigger_msg:
+ return None
+ if not trigger_msg.media or trigger_msg.media.name not in ["VOICE", "AUDIO", "VIDEO"]:
+ return None
+ if asr_skip_voice and trigger_msg.media.name == "VOICE":
+ return None
+ if asr_skip_audio and trigger_msg.media.name == "AUDIO":
+ return None
+ if asr_skip_video and trigger_msg.media.name == "VIDEO":
+ return None
+ return trigger_msg
src/bridge/miaomiao.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import re
+
+from loguru import logger
+from pyrogram.client import Client
+from pyrogram.types import Message, ReplyParameters
+
+from config import ENABLE, PREFIX, cache
+from message_utils import send2tg
+from utils import i_am_bot
+
+BOT_NAME = "GLBetabot"
+
+
+@cache.memoize(ttl=10)
+async def ocr_to_miaomiao(client: Client, message: Message, **kwargs):
+ """Send photo to miaomiao bot for OCR.
+
+ See docs in `bridge/README.md` for details.
+ """
+ if not ENABLE.WGET:
+ return
+ # send docs if message == "/ocr", without reply
+ if str(message.text).lower().strip() == PREFIX.OCR and not message.reply_to_message:
+ await send2tg(client, message, texts=f"**图片转文字**: 以`{PREFIX.OCR}`回复图片消息即可提取文字", **kwargs)
+ return
+ msg = message.text or message.caption or "" # /ocr args
+ if not msg.startswith(PREFIX.OCR):
+ return
+ if await i_am_bot(client): # bot can't send message to other bots
+ return
+ # get the img file_id
+ if message.photo:
+ img = message.photo.file_id
+ elif message.reply_to_message and message.reply_to_message.photo:
+ img = message.reply_to_message.photo.file_id
+ else:
+ return
+
+ cid = kwargs.get("target_chat", message.chat.id) # MSG-A's cid
+ mid = kwargs.get("reply_msg_id", message.id) # MSG-A's mid
+ msg += f" \n#ID=({cid},{mid})".replace("None", "0")
+ logger.warning(f"OCR via 妙妙小工具 (@{BOT_NAME}): {msg!r}")
+ await client.send_photo(chat_id=f"@{BOT_NAME}", photo=img, caption=msg)
+
+
+@cache.memoize(ttl=10)
+async def forward_results_from_miaomiao(client: Client, message: Message):
+ """See docs in `bridge/README.md` for details."""
+ if message.from_user.username != BOT_NAME or not message.reply_to_message:
+ return
+ reply_msg = message.reply_to_message
+ reply_msg_text = reply_msg.text or reply_msg.caption or ""
+
+ # forward ocr (result should be a photo)
+ if message.photo and message.caption and reply_msg_text.startswith("/ocr") and (matched := re.search(r"#ID=\((-?\d+),(-?\d+)\)", str(reply_msg_text))):
+ target_cid = matched.group(1) # MSG-A's cid
+ target_mid = int(matched.group(2)) if int(matched.group(2)) != 0 else None # MSG-A's mid
+ cid = message.chat.id # result's cid
+ mid = message.id # result's mid
+ logger.info(f"Forwarding chat=@{BOT_NAME}, id={mid} -> chat={target_cid}, id={target_mid}")
+ await client.copy_message(chat_id=target_cid, from_chat_id=cid, message_id=mid, reply_parameters=ReplyParameters(message_id=target_mid)) # type: ignore
src/bridge/parsehub.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import re
+
+from loguru import logger
+from pyrogram.client import Client
+from pyrogram.types import Message, ReplyParameters
+
+from config import cache
+from message_utils import parse_msg
+from utils import i_am_bot
+
+BOT_NAME = "ParsehubBot"
+
+
+@cache.memoize(ttl=10)
+async def send_to_parsehub(client: Client, message: Message, text: str, **kwargs):
+ """See docs in `bridge/README.md` for details."""
+ if await i_am_bot(client): # bot can't send message to other bots
+ return
+ cid = kwargs.get("target_chat", message.chat.id) # MSG-A's cid
+ mid = kwargs.get("reply_msg_id", message.id) # MSG-A's mid
+ msg = f"#ID=({cid},{mid})\n{text}".replace("None", "0")
+ logger.warning(f"Trying parsehub (@{BOT_NAME}): {msg!r}")
+ await client.send_message(chat_id=f"@{BOT_NAME}", text=msg)
+
+
+@cache.memoize(ttl=10)
+async def forward_results_from_parsehub(client: Client, message: Message):
+ """See docs in `bridge/README.md` for details."""
+ if message.from_user.username != BOT_NAME or not message.media:
+ return
+ parse_msg(message)
+ if message.reply_to_message and (matched := re.search(r"#ID=\((-?\d+),(-?\d+)\)", str(message.reply_to_message.text))):
+ target_cid = matched.group(1) # MSG-A's cid
+ target_mid = int(matched.group(2)) if int(matched.group(2)) != 0 else None # MSG-A's mid
+ cid = message.chat.id # result's cid
+ mid = message.id # result's mid
+ logger.info(f"Forwarding chat=@{BOT_NAME}, id={mid} -> chat={target_cid}, id={target_mid}")
+ await client.copy_message(chat_id=target_cid, from_chat_id=cid, message_id=mid, reply_parameters=ReplyParameters(message_id=target_mid)) # type: ignore
src/bridge/README.md
@@ -0,0 +1,46 @@
+# BridgeBot
+
+Use third-party bots to finish some tasks.
+
+## Preliminary
+
+cid: chat_id
+mid: message_id
+
+## Scenario
+
+1. A user sends us a message, named MSG-A
+
+2. We first reply to MSG-A with a progress updating message, named MSG-UPDATE. (Like "Processing your request...")
+
+3. Then we construct a special message (Named MSG-B) which can trigger the function of the third-party bot (bridgebot).
+ We log the chat_id (MSG-A's cid) and message_id (MSG-A's mid) to #ID tag in the MSG-B, format: #ID=(MSG-A's cid, MSG-A's mid)
+ And log the MSG-UPDATE tag in this format: #PROGRESS=(MSG-UPDATE's cid, MSG-UPDATE's mid) to MSG-B
+
+4. Send MSG-B to bridgebot to finish the task.
+
+5. After the task is finished, the bridgebot will reply to MSG-B with a new message containing the task results, named MSG-RES.
+So, we can parse the reply message of MSG-RES (i.e. MSG-B) to get the #ID tag and MSG-UPDATE tag, and then forward MSG-RES to MSG-A.
+
+6. Finally, we delete MSG-UPDATE.
+
+For example, a user send us a douyin link which need to previewed. (MSG-A)
+
+We want to use @ParsehubBot to finish this task.
+
+First, we reply to MSG-A with a progress updating message. (MSG-UPDATE)
+
+Then we construct a special message (MSG-B) which can trigger the function of @ParsehubBot:
+
+```txt
+#ID=(-1001234455, 2385)
+#PROGRESS=(-1001234455, 2386)
+https://v.douyin.com/helloworld
+```
+
+Send MSG-B to @ParsehubBot to finish the task.
+
+After the task is finished, @ParsehubBot will reply to MSG-B with a new message containing the video, named MSG-RES
+We parse the reply message of MSG-RES (i.e. MSG-B) to get cid= -1001234455, mid= 2385, and then forward MSG-RES to MSG-A.
+
+Finally, we delete MSG-UPDATE (cid= -1001234455, mid= 2386).
src/others/download_external.py
@@ -0,0 +1,72 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+
+import re
+from pathlib import Path
+
+import puremagic
+from loguru import logger
+from pyrogram.client import Client
+from pyrogram.types import Message, ReplyParameters
+
+from config import ENABLE, MAX_FILE_BYTES, PREFIX
+from message_utils import modify_progress, send2tg
+from multimedia import is_valid_video, validate_img
+from networking import download_file
+from utils import https_url, readable_size
+
+
+async def download_url_in_message(client: Client, message: Message, **kwargs):
+ """Download the url from the message."""
+ if not ENABLE.WGET:
+ return
+ if not str(message.text).strip().lower().startswith(PREFIX.WGET):
+ return
+ target_chat = kwargs["target_chat"] if kwargs.get("target_chat") else message.chat.id
+ reply_msg_id = kwargs.get("reply_msg_id", 0)
+ if reply_msg_id == 0:
+ reply_to = message.id
+ elif reply_msg_id == -1:
+ reply_to = None
+ else:
+ reply_to = reply_msg_id
+ reply_parameters = ReplyParameters(message_id=reply_to) # type: ignore
+
+ regex = r"(?i)\b((?:https?://|www\d{0,3}[.]|[a-z0-9.\-]+[.][a-z]{2,4}/)(?:[^\s()<>]+|\(([^\s()<>]+|(\([^\s()<>]+\)))*\))+(?:\(([^\s()<>]+|(\([^\s()<>]+\)))*\)|[^\s`!()\[\]{};:'\".,<>?«»“”‘’]))" # noqa: RUF001
+ if matched := re.findall(regex, message.text):
+ url = https_url(matched[0][0])
+ logger.debug(f"URL found from message text: {url}")
+
+ msg = f"⏬开始下载:\n{url}"
+ if kwargs.get("show_progress"):
+ res = await send2tg(client, message, texts=msg, **kwargs)
+ kwargs["progress"] = res[0]
+ success = False
+ try:
+ path = await download_file(url, workers_proxy=True, **kwargs)
+ suffix = puremagic.from_file(path) # guess the file type
+ if Path(path).suffix != suffix:
+ Path(path).rename(Path(path).with_suffix(suffix))
+ path = Path(path).with_suffix(suffix)
+ if img := validate_img(path, delete=False):
+ await modify_progress(text=f"🖼图片下载成功: {readable_size(path=img)}", force_update=True, **kwargs)
+ success = await send2tg(client, message, target_chat, reply_msg_id, texts=url, media=[{"photo": img}])
+ elif Path(path).suffix in [".m4a", ".mp3", ".wav", ".ogg", ".opus", ".flac", ".aac"]:
+ await modify_progress(text=f"🎧音频下载成功: {readable_size(path=path)}", force_update=True, **kwargs)
+ success = await client.send_audio(target_chat, Path(path).as_posix(), caption=url, reply_parameters=reply_parameters)
+ elif is_valid_video(path, delete=False):
+ await modify_progress(text=f"🎬视频下载成功: {readable_size(path=path)}", force_update=True, **kwargs)
+ success = await send2tg(client, message, target_chat, reply_msg_id, texts=url, media=[{"video": path}])
+ elif Path(path).stat().st_size < MAX_FILE_BYTES:
+ await modify_progress(text=f"💾文件下载成功: {readable_size(path=path)}", force_update=True, **kwargs)
+ success = await client.send_document(target_chat, Path(path).as_posix(), caption=url, reply_parameters=reply_parameters)
+ else:
+ await modify_progress(text=f"❌文件大小: {readable_size(path=path)} 超出限制\nTelegram只允许上传小于{round(MAX_FILE_BYTES / 1024 / 1024)}MB的文件", force_update=True, **kwargs)
+ except Exception as e:
+ logger.error(e)
+ await modify_progress(text=f"❌下载失败: {url}", force_update=True, **kwargs)
+
+ if success:
+ await modify_progress(del_status=True, **kwargs)
+ Path(path).unlink(missing_ok=True)
src/others/emoji.py
@@ -0,0 +1,176 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+import re
+
+# From: https://github.com/Rongronggg9/RSS-to-Telegram-Bot/blob/84ce41fd31819d83e146507d50a95e28a5b58afe/src/parsing/weibo_emojify_map.py
+COMMON = {
+ "[微笑]": "🙂",
+ "[可爱]": "😊",
+ "[太开心]": "😆",
+ "[鼓掌]": "👏",
+ "[嘻嘻]": "😁",
+ "[哈哈]": "😄",
+ "[笑cry]": "😂",
+ "[挤眼]": "😜",
+ "[馋嘴]": "😋",
+ "[黑线]": "😑",
+ "[汗]": "😓",
+ "[哼]": "😠",
+ "[怒]": "😡",
+ "[可怜]": "🥺",
+ "[失望]": "😞",
+ "[悲伤]": "😢",
+ "[泪]": "😭",
+ "[害羞]": "😳",
+ "[爱你]": "🥰",
+ "[亲亲]": "😚",
+ "[色]": "😍",
+ "[阴险]": "😏",
+ "[偷笑]": "🤭",
+ "[酷]": "😎",
+ "[并不简单]": "🧐",
+ "[思考]": "🤔",
+ "[晕]": "😵",
+ "[骷髅]": "💀",
+ "[嘘]": "🤫",
+ "[闭嘴]": "🤐",
+ "[傻眼]": "😮",
+ "[吃惊]": "😲",
+ "[吐]": "🤮",
+ "[感冒]": "😷",
+ "[生病]": "🤒",
+ "[拜拜]": "👋",
+ "[鄙视]": "🖕",
+ "[白眼]": "🙄",
+ "[抓狂]": "😖",
+ "[怒骂]": "🤬",
+ "[钱]": "🤑",
+ "[哈欠]": "🥱",
+ "[困]": "😴",
+ "[睡]": "😪",
+ "[吃瓜]": "🍉",
+ "[酸]": "🍋",
+ "[喵喵]": "🐱",
+ "[抱抱]": "🤗",
+ "[摊手]": "🤷",
+ "[跪了]": "🧎",
+ "[鲜花]": "🌹",
+ "[给你小心心]": "💝",
+ "[心]": "❤",
+ "[伤心]": "💔",
+ "[握手]": "🤝",
+ "[赞]": "👍",
+ "[good]": "👍",
+ "[弱]": "👎",
+ "[NO]": "✋",
+ "[耶]": "✌",
+ "[拳头]": "✊",
+ "[ok]": "👌",
+ "[加油]": "💪",
+ "[haha]": "🤟",
+ "[熊猫]": "🐼",
+ "[兔子]": "🐰",
+ "[猪头]": "🐷",
+ "[太阳]": "🌞",
+ "[月亮]": "🌙",
+ "[浮云]": "☁",
+ "[下雨]": "🌧",
+ "[微风]": "🍃",
+ "[围观]": "👨👧👦",
+ "[飞机]": "✈",
+ "[照相机]": "📷",
+ "[话筒]": "🎙",
+ "[蜡烛]": "🕯",
+ "[音乐]": "🎵",
+ "[可乐]": "🥤",
+ "[干杯]": "🍻",
+ "[蛋糕]": "🎂",
+ "[礼物]": "🎁",
+ "[钟]": "⏰",
+ "[肥皂]": "🧼",
+ "[绿丝带]": "🎗",
+ "[围脖]": "🧣",
+ "[圣诞老人]": "🎅",
+ "[文明遛狗]": "🐕",
+ "[最右]": " →_→ ",
+ "[五仁月饼]": "🥮",
+ "[弗莱见钱眼开]": "🤑",
+ "[棒棒糖]": "🍭",
+ "[炸鸡腿]": "🍗",
+ "[点亮平安灯]": "🏮",
+ "[点亮橙色]": "🖐",
+ "[看涨]": "📈",
+ "[看跌]": "📉",
+ "[星星]": "⭐",
+ "[空星]": "★",
+ "[全家福]": "👪",
+ "[圆月]": "🌕",
+ # customization
+ "[大笑]": "😆",
+}
+
+XHS = {
+ "[零R]": "0️⃣",
+ "[一R]": "1️⃣",
+ "[二R]": "2️⃣",
+ "[三R]": "3️⃣",
+ "[四R]": "4️⃣",
+ "[五R]": "5️⃣",
+ "[六R]": "6️⃣",
+ "[七R]": "7️⃣",
+ "[八R]": "8️⃣",
+ "[九R]": "9️⃣",
+ "[背包R]": "😏",
+ "[满月R]": "😏",
+ "[生气R]": "😡",
+ "[失望R]": "😞",
+ "[抓狂R]": "😖",
+ "[萌萌哒R]": "😊",
+ "[红色心形R]": "❤",
+ "[爆炸R]": "💥",
+ "[炸弹R]": "💣",
+}
+
+DOUYIN = {
+ "[苦涩]": "😭",
+ "[doge]": "🐶",
+ "[挖鼻]": "👃",
+ "[泪]": "😭",
+ "[眼含热泪]": "🥺",
+ "[呲牙]": "😁",
+ "[玫瑰]": "🌹",
+ "[一起加油]": "💪",
+ "[比心]": "💖",
+ "[看]": "🐶",
+ "[宕机]": "😧",
+ "[捂脸]": "🤦",
+ "[泪奔]": "😭",
+ "[尬笑]": "😅",
+ "[抱一抱]": "🤗",
+ "[打call]": "👏",
+ "[哇]": "🥰",
+}
+BILIBILI = {"[笑哭]": "🤦"}
+
+
+def emojify(text: str, platform: str = "all") -> str:
+ """Replace the text emojis with actual emojis.
+
+ [微笑]Hello[爱你] -> 🙂Hello🥰
+ """
+ if platform == "all":
+ EMOJI_MAP = COMMON | BILIBILI | XHS | DOUYIN
+ elif platform == "xhs":
+ EMOJI_MAP = COMMON | XHS
+ elif platform == "douyin":
+ EMOJI_MAP = COMMON | DOUYIN
+ elif platform == "bilibili":
+ EMOJI_MAP = COMMON | BILIBILI
+ pattern = re.compile("|".join(re.escape(k) for k in EMOJI_MAP))
+
+ return pattern.sub(lambda match: EMOJI_MAP[match.group(0)], text)
+
+
+if __name__ == "__main__":
+ text = "[微笑]Hello[爱你]"
+ print(emojify(text))
src/others/extract_audio.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+from pathlib import Path
+
+from loguru import logger
+from pyrogram.client import Client
+from pyrogram.types import Message, ReplyParameters
+
+from config import PREFIX, cache
+from message_utils import modify_progress, parse_msg, send2tg
+from multimedia import convert_to_audio, parse_media_info
+
+# ruff: noqa: RUF001
+
+HELP = f"""
+🎧**视频转音频**
+使用方式:
+1. 以 `{PREFIX.AUDIO}` 回复视频消息提取出音频
+2. 发送视频时, 添加`{PREFIX.AUDIO}`文本描述会同时提取视频音频
+"""
+
+
+@cache.memoize(ttl=60)
+async def extract_audio_file(client: Client, message: Message, **kwargs) -> None:
+ """Extract audio from video message."""
+ # send docs if message == "/audio", without reply
+ if str(message.text).lower().strip() == PREFIX.AUDIO and not message.reply_to_message:
+ await send2tg(client, message, texts=HELP, **kwargs)
+ return
+
+ msg_text = message.text or message.caption or ""
+ msg_text = msg_text.lower().strip()
+
+ if not msg_text.startswith(PREFIX.AUDIO):
+ return
+
+ # 以/audio命令回复一条消息
+ if message.reply_to_message:
+ message = message.reply_to_message
+
+ if not message.media or message.media.name != "VIDEO":
+ return
+ parse_msg(message, verbose=True)
+
+ msg = "🎬收到视频消息, 开始提取🎧音频..."
+ if kwargs.get("show_progress"):
+ res = await send2tg(client, message, texts=msg, **kwargs)
+ kwargs["progress"] = res[0]
+ video: str = await message.download() # type: ignore
+ if Path(video).expanduser().resolve().is_file():
+ await modify_progress(text="🎬视频下载完成, 提取🎧音频中...", force_update=True, **kwargs)
+ else:
+ await modify_progress(text="🎬视频下载失败, 请稍后重试...", force_update=True, **kwargs)
+ return
+ path = convert_to_audio(video, ext="m4a")
+ if not Path(path).expanduser().resolve().is_file():
+ logger.trace(f"File not found: {path}")
+ return
+ path = Path(path).expanduser().resolve()
+ if path.name.endswith(".final.m4a"): # remove final suffix
+ path.rename(path.with_name(path.name.replace(".final.m4a", ".m4a")))
+ path = path.with_name(path.name.replace(".final.m4a", ".m4a"))
+ await modify_progress(text="🎧音频提取已完成, 开始上传...", force_update=True, **kwargs)
+ duration = parse_media_info(path).get("duration", 0)
+
+ target_chat = kwargs["target_chat"] if kwargs.get("target_chat") else message.chat.id
+ reply_msg_id = kwargs.get("reply_msg_id", 0)
+ if reply_msg_id == 0:
+ reply_to = message.id
+ elif reply_msg_id == -1:
+ reply_to = None
+ else:
+ reply_to = reply_msg_id
+ reply_parameters = ReplyParameters(message_id=reply_to) # type: ignore
+ await client.send_audio(target_chat, Path(path).as_posix(), duration=duration, reply_parameters=reply_parameters)
+ Path(video).unlink(missing_ok=True)
+ path.unlink(missing_ok=True)
+ await modify_progress(del_status=True, **kwargs)
src/others/gpt.py
@@ -0,0 +1,400 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+
+import asyncio
+import base64
+import copy
+import re
+from pathlib import Path
+from typing import TYPE_CHECKING
+
+from loguru import logger
+from pyrogram.client import Client
+from pyrogram.types import Message
+
+from config import DOWNLOAD_DIR, ENABLE, GPT, PREFIX, PROXY, cache
+from message_utils import modify_progress, send2tg
+from multimedia import convert_to_audio
+from networking import hx_req
+
+if TYPE_CHECKING:
+ from io import BytesIO
+
+clean_name = lambda x: x.removesuffix("-mini").removesuffix("-latest").removesuffix("-turbo").removesuffix("-exp").removesuffix("-chat")
+HELP = f"""🤖**GPT对话**
+当前模型:
+- 文本模型: **{clean_name(GPT.TEXT_MODEL)}**
+- 图片模型: **{clean_name(GPT.VISION_MODEL)}**
+- 视频模型: **{clean_name(GPT.VIDEO_MODEL)}**
+- 音频模型: **{clean_name(GPT.AUDIO_MODEL)}** (30秒以内)
+
+使用说明:
+1. 在 `{PREFIX.GPT}` 后接提示词即可与GPT对话
+2. 以 `{PREFIX.GPT}` 回复消息可将其加入上下文
+3. 音频模型仅支持30秒以内的音频文件, 如需更长可以先用 `{PREFIX.ASR}` 命令转为文字后再使用 `{PREFIX.GPT}`
+"""
+BOT_TIPS = "回复此消息以继续对话"
+
+
+@cache.memoize(ttl=60)
+async def gpt_response(client: Client, message: Message, **kwargs):
+ """Get GPT response from Various API.
+
+ Args:
+ client (Client): The Pyrogram client.
+ message (Message): The trigger message object.
+ """
+ if not ENABLE.GPT:
+ return
+
+ # send docs if message == "/ai", without reply
+ texts = message.text or message.caption or ""
+ if str(texts).lower().strip() == PREFIX.GPT and not message.reply_to_message:
+ await send2tg(client, message, texts=HELP, **kwargs)
+ return
+
+ if not is_valid_conversation(message):
+ return
+
+ # cache media_group message, only process once
+ if media_group_id := message.media_group_id:
+ if cache.get(f"gpt-{message.chat.id}-{media_group_id}"):
+ return
+ cache.set(f"gpt-{message.chat.id}-{media_group_id}", "1", ttl=120)
+
+ contexts = await generate_contexts(client, message)
+ model_conf = get_model_config(contexts)
+ if model_conf["friendly_name"] == "豆包":
+ contexts = fix_doubao(contexts)
+ msg = f"🤖{model_conf['friendly_name']}: 思考中..."
+ if kwargs.get("show_progress"):
+ res = await send2tg(client, message, texts=msg, **kwargs)
+ kwargs["progress"] = res[0]
+ headers = {"authorization": f"Bearer {model_conf['key']}"}
+ if model_conf["model_type"] == "audio":
+ resp = await hx_req(model_conf["url"], "POST", headers=headers, post_json=model_conf["payload"], proxy=PROXY.GPT, check_has_kv=["output.choices"], timeout=model_conf["timeout"])
+ choices = resp.json()["output"]["choices"]
+ ai_response = choices[0].get("message", {}).get("content", [{}])[0].get("text", "")
+ else:
+ resp = await hx_req(model_conf["url"], "POST", headers=headers, post_json=model_conf["payload"], proxy=PROXY.GPT, check_has_kv=["choices"], timeout=model_conf["timeout"])
+ choices = resp.json()["choices"]
+ ai_response = choices[0].get("message", {}).get("content")
+ cleanup(contexts)
+ if not ai_response:
+ await modify_progress(text=f"🤖{model_conf['friendly_name']}未响应, 请稍后重试...", force_update=True, **kwargs)
+ await asyncio.sleep(3)
+ await modify_progress(del_status=True, **kwargs)
+ return
+ texts = f"{model_conf['bot_msg_prefix']}\n\n{ai_response}"
+ logger.debug(texts)
+ await send2tg(client, message, texts=texts, **kwargs)
+ await modify_progress(del_status=True, **kwargs)
+
+
+def get_model_config(contexts: list[dict]):
+ """Clean downloaded files.
+
+ [
+ {
+ "role": "user",
+ "content": [
+ {"type": "text", "text": "text"},
+ {"type": "image_url", "image_url": {"url": "https://server.com/dir/image.jpg"}},
+ {"type": "video_url", "video_url": {"url": "https://server.com/dir/video.mp4"}}, # 智谱
+ {"type": "video", "video": ["https://server.com/dir/1.jpg","https://server.com/dir/2.jpg","https://server.com/dir/3.jpg"]}, # 千问
+ {"audio": "https://server.com/dir/audio.mp3"},
+ ]
+ }
+ ]
+ """
+ model = GPT.TEXT_MODEL
+ model_type = "text"
+ timeout = GPT.TEXT_TIMEOUT
+ for item in contexts:
+ content = item.get("content", [])
+ for x in content:
+ if x.get("image_url"):
+ model = GPT.VISION_MODEL
+ model_type = "vision"
+ timeout = GPT.VISION_TIMEOUT
+ break
+ if x.get("video_url") or x.get("video"):
+ model = GPT.VIDEO_MODEL
+ model_type = "video"
+ timeout = GPT.VIDEO_TIMEOUT
+ break
+ if x.get("audio"):
+ model = GPT.AUDIO_MODEL
+ model_type = "audio"
+ timeout = GPT.AUDIO_TIMEOUT
+ break
+
+ common = {"model_type": model_type, "timeout": int(timeout)}
+ openai = {
+ "url": GPT.OPENAI_BASE_URL + "/chat/completions",
+ "key": GPT.OPENAI_API_KEY,
+ }
+ gemini = {
+ "url": GPT.GEMINI_BASE_URL + "/chat/completions",
+ "key": GPT.GEMINI_API_KEY,
+ }
+ hunyuan = {
+ "url": GPT.HUNYUAN_BASE_URL + "/chat/completions",
+ "key": GPT.HUNYUAN_API_KEY,
+ }
+ qwen = {
+ "url": GPT.DASHSCOPE_BASE_URL + "/chat/completions",
+ "key": GPT.DASHSCOPE_API_KEY,
+ }
+ openrouter = {
+ "url": GPT.OPENROUTER_BASE_URL + "/chat/completions",
+ "key": GPT.OPENROUTER_API_KEY,
+ }
+ glm = {
+ "url": GPT.GLM_BASE_URL + "/chat/completions",
+ "key": GPT.GLM_API_KEY,
+ }
+ doubao = {
+ "url": GPT.ARK_BASE_URL + "/chat/completions",
+ "key": GPT.ARK_API_KEY,
+ }
+ if model.startswith("gpt"):
+ openai["friendly_name"] = clean_name(model.replace("gpt", "GPT"))
+ config = common | openai
+
+ if model.startswith("gemini"):
+ gemini["friendly_name"] = clean_name(model.capitalize())
+ config = common | gemini
+
+ if model.startswith("hunyuan"):
+ hunyuan["friendly_name"] = clean_name(model.replace("hunyuan", "混元"))
+ config = common | hunyuan
+
+ if model.startswith("qwen"):
+ qwen["friendly_name"] = clean_name(model.replace("qwen", "通义千问"))
+ if model_type == "audio": # qwen-audio
+ qwen["url"] = "https://dashscope.aliyuncs.com/api/v1/services/aigc/multimodal-generation/generation"
+ config = common | qwen
+
+ if model.startswith("deepseek"): # via openrouter: deepseek/deepseek-chat
+ openrouter["friendly_name"] = clean_name(model.split("/")[-1].capitalize())
+ config = common | openrouter
+
+ if model.startswith("glm"):
+ glm["friendly_name"] = model.upper().removesuffix("-FLASH")
+ config = common | glm
+
+ if model.startswith("doubao"):
+ doubao["friendly_name"] = "豆包"
+ if model_type == "text":
+ model = GPT.DOUBAO_TEXT_ENTRYPOINT
+ elif model_type == "vision":
+ model = GPT.DOUBAO_VISION_ENTRYPOINT
+ config = common | doubao
+
+ config["bot_msg_prefix"] = f"🤖**{config['friendly_name']}**: ({BOT_TIPS})"
+ payload = {"model": model, "input": {"messages": contexts}} if model_type == "audio" else {"model": model, "messages": contexts, "temperature": float(GPT.TEMPERATURE)}
+ config["payload"] = payload
+ logger.trace(config)
+ return config
+
+
+async def generate_contexts(client: Client, message: Message) -> list[dict]:
+ """Generate contexts for GPT conversation.
+
+ Returns:
+ list[dict]: [
+ {
+ "role": "user or assistant",
+ "content": [
+ {'type': 'text', 'text': 'caption this img'},
+ {'type': 'image_url', 'image_url': {'url': 'data:image/jpeg;base64,base64_image'}},
+ {'type': 'image_url', 'image_url': {'url': 'https://server.com/dir/image.jpg'}},
+ ]
+ }
+ ]
+ """
+ # 按时间顺序如下
+ reply_msg = copy.deepcopy(message.reply_to_message)
+ contexts = []
+ if context := await generate_single_msg_context(client, message): # this message
+ contexts.append(context)
+ while reply_msg:
+ if context := await generate_single_msg_context(client, reply_msg): # this message
+ contexts.append(context)
+ reply_msg = reply_msg.reply_to_message
+ contexts.reverse()
+ contexts = combine_consecutive_role_contexts(contexts)
+ return contexts[: int(GPT.HISTORY_CONTEXT)]
+
+
+def combine_consecutive_role_contexts(contexts: list[dict]) -> list[dict]:
+ """Combine consecutive user and assistant contexts into one message.
+
+ Some GPT models don't support consecutive user and assistant contexts. (e.g. Hunyuan)
+
+ Args:
+ contexts (list[dict]): [
+ {
+ "role": "user or assistant",
+ "content": [
+ {'type': 'text', 'text': 'caption this img'},
+ {'type': 'image_url', 'image_url': {'url': 'data:image/jpeg;base64,base64_image'}},
+ {'type': 'image_url', 'image_url': {'url': 'https://server.com/dir/image.jpg'}},
+ ]
+ }
+ ]
+ """
+ combined_contexts = []
+ for i, msg in enumerate(contexts):
+ if i == 0:
+ combined_contexts.append(msg)
+ continue
+ if msg["role"] == combined_contexts[-1]["role"]:
+ combined_contexts[-1]["content"].extend(msg["content"])
+ else:
+ combined_contexts.append(msg)
+ return combined_contexts
+
+
+def fix_doubao(contexts: list[dict]) -> list[dict]:
+ """Fix doubao context format.
+
+ Doubao do not support this content for:
+ [{'text': 'hi', 'type': 'text'}], 'role': 'user'}]
+ It support:
+ [{'content': 'hi', 'role': 'user'}]
+
+ Args:
+ contexts (list[dict]): [
+ {
+ "role": "user or assistant",
+ "content": [
+ {'type': 'text', 'text': 'caption this img'},
+ {'type': 'image_url', 'image_url': {'url': 'data:image/jpeg;base64,base64_image'}},
+ {'type': 'image_url', 'image_url': {'url': 'https://server.com/dir/image.jpg'}},
+ ]
+ }
+ ]
+ """
+ fixed_contexts = []
+ for msg in contexts:
+ if (lst := msg.get("content", [])) and all(x.get("type") == "text" for x in lst):
+ msg["content"] = "\n".join([x.get("text") for x in lst])
+ fixed_contexts.append(msg)
+ return fixed_contexts
+
+
+def is_valid_conversation(message: Message) -> bool:
+ # match commands: /ai
+ if str(message.text).strip()[:3].lower() == PREFIX.GPT:
+ return True
+ if str(message.caption).strip()[:3].lower() == PREFIX.GPT:
+ return True
+ # is replying to gpt-bot response message?
+ if not message.reply_to_message:
+ return False
+
+ reply_msg = message.reply_to_message
+ reply_text = reply_msg.text or reply_msg.caption or ""
+ return reply_text.startswith("🤖")
+
+
+async def generate_single_msg_context(client: Client, message: Message) -> dict:
+ """Generate GPT contexts for a single message (Without consider reply message).
+
+ Returns:
+ {
+ "role": "user or assistant",
+ "content": [
+ {"type": "image_url", "image_url": {"url": "https://server.com/dir/image.jpg"}},
+ {"type": "video_url", "video_url": {"url": "https://server.com/dir/video.mp4"}}, # 智谱
+ {"type": "video", "video": ["https://server.com/dir/1.jpg","https://server.com/dir/2.jpg","https://server.com/dir/3.jpg"]}, # 千问
+ {"audio": "https://server.com/dir/audio.mp3"},
+ ],
+ }
+ """
+
+ def clean_text(text: str) -> str:
+ if not text:
+ return ""
+ return re.sub(rf"(.*?){BOT_TIPS}\)", "", text.removeprefix(PREFIX.GPT)).strip()
+
+ role = "assistant" if any(BOT_TIPS in texts for texts in [str(message.text), str(message.caption)]) else "user"
+ # only text
+ if text := clean_text(message.text):
+ return {"role": role, "content": [{"type": "text", "text": text}]}
+
+ if not message.media or message.media.name not in ["PHOTO", "VOICE", "AUDIO", "VIDEO", "DOCUMENT"]:
+ return {}
+
+ # has media
+ messages = await client.get_media_group(message.chat.id, message.id) if message.media_group_id else [message]
+ media = []
+ for msg in messages:
+ try:
+ if GPT.MEDIA_FORMAT == "base64":
+ res: BytesIO = await client.download_media(msg, in_memory=True) # type: ignore
+ logger.debug(f"Downloaded GPT media: {res.name}")
+ ext = Path(res.name).suffix.removeprefix(".").replace("jpg", "jpeg")
+ b64 = base64.b64encode(res.getvalue()).decode("utf-8")
+ if message.media.name == "PHOTO":
+ media.append({"type": "image_url", "image_url": {"url": f"data:image/{ext};base64,{b64}"}})
+ elif message.media.name == "VIDEO":
+ media.append({"type": "video_url", "video_url": {"url": b64}})
+ elif message.media.name == "DOCUMENT" and message.document.mime_type == "text/plain":
+ media.append({"type": "text", "text": res.getvalue().decode("utf-8")})
+ else:
+ logger.warning("Audio do not support base64, please use http")
+ else:
+ path: str = await client.download_media(msg) # type: ignore
+ logger.debug(f"Downloaded GPT media: {path}")
+ if message.media.name == "PHOTO":
+ media.append({"type": "image_url", "image_url": {"url": f"{GPT.MEDIA_SERVER}/{Path(path).name}"}})
+ elif message.media.name == "VIDEO":
+ media.append({"type": "video_url", "video_url": {"url": f"{GPT.MEDIA_SERVER}/{Path(path).name}"}})
+ elif message.media.name in ["AUDIO", "VOICE"]:
+ mp3 = convert_to_audio(path, ext="mp3", codec="libmp3lame")
+ media.append({"audio": f"{GPT.MEDIA_SERVER}/{mp3.name}"})
+ elif message.media.name == "DOCUMENT" and message.document.mime_type == "text/plain":
+ media.append({"type": "text", "text": Path(path).read_text()})
+ Path(path).unlink(missing_ok=True)
+ if caption := msg.caption:
+ media.append({"type": "text", "text": caption})
+ except Exception as e:
+ logger.warning(f"Download image from message failed: {e}")
+ continue
+ return {"role": role, "content": media}
+
+
+def cleanup(messages: list[dict]):
+ """Clean downloaded files.
+
+ [
+ {
+ 'role': 'user',
+ 'content': [
+ {'type': 'text', 'text': 'text'},
+ {'type': 'image_url', 'image_url': {'url': 'https://server.com/dir/image.jpg'}},
+ {"type": "video_url", "video_url": {"url": "https://server.com/dir/video.mp4"}}, # 智谱
+ {"type": "video", "video": ["https://server.com/dir/1.jpg","https://server.com/dir/2.jpg","https://server.com/dir/3.jpg"]}, # 千问
+ {"audio": "https://server.com/dir/audio.mp3"},
+ ]
+ }
+ ]
+ """
+ for item in messages:
+ content = item.get("content", [])
+ if not isinstance(content, list):
+ continue
+ for x in content:
+ if url := x.get("image_url", {}).get("url"):
+ (Path(DOWNLOAD_DIR) / Path(url).name).unlink(missing_ok=True)
+ if url := x.get("video_url", {}).get("url"):
+ (Path(DOWNLOAD_DIR) / Path(url).name).unlink(missing_ok=True)
+ if urls := x.get("video", []):
+ for url in urls:
+ (Path(DOWNLOAD_DIR) / Path(url).name).unlink(missing_ok=True)
+ if url := x.get("audio"):
+ (Path(DOWNLOAD_DIR) / Path(url).name).unlink(missing_ok=True)
src/others/raw_img_file.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+
+from loguru import logger
+from pyrogram.client import Client
+from pyrogram.types import Message
+
+from config import ENABLE, PREFIX
+from message_utils import parse_msg, send2tg
+from multimedia import split_long_img
+
+
+async def convert_raw_img_file(client: Client, message: Message, **kwargs):
+ if not ENABLE.RAW_IMG_CONVERT:
+ return
+ mime_type = message.document.mime_type if message.document else ""
+ if mime_type not in ["image/png", "image/jpeg", "image/heic"]:
+ return
+ try:
+ info = parse_msg(message)
+ logger.info(f"Splitting raw image: {info['file_name']}")
+ path: str = await message.download() # type: ignore
+ logger.trace(f"Image {{info['file_name']}} downloaded to {path}")
+ photos = split_long_img(path)
+ media = [{"photo": photo.as_posix()} for photo in photos if photo.is_file()]
+ # send splits
+ if len(media) == 1:
+ await send2tg(client, message, media=media, texts=f"🔁【原图转换】已完成\n发送 **{PREFIX.MAIN}** 命令查看更多功能", **kwargs)
+ else:
+ await send2tg(client, message, media=media, texts=f"✂️【自动切原图】已完成\n发送 **{PREFIX.MAIN}** 命令查看更多功能", **kwargs)
+ except Exception as e:
+ logger.exception(e)
src/others/subtitle.py
@@ -0,0 +1,161 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+
+import asyncio
+import io
+from datetime import timedelta
+
+from loguru import logger
+from pyrogram.client import Client
+from pyrogram.enums import MessageEntityType
+from pyrogram.types import Message
+from youtube_transcript_api import YouTubeTranscriptApi
+
+from config import API, ENABLE, PREFIX, PROXY, TOKEN
+from database import cache
+from message_utils import modify_progress, send2tg
+from networking import hx_req, match_social_media_link
+
+HELP = f"""📃**提取字幕**
+使用说明:
+1. `{PREFIX.SUBTITLE} URL` 下载该链接的字幕
+2. 以 `{PREFIX.SUBTITLE}` 回复消息可下载消息中链接的字幕
+
+当前只支持YouTube
+"""
+
+
+@cache.memoize(ttl=30)
+async def get_subtitle(client: Client, message: Message, **kwargs):
+ """Get YouTube Subtitle."""
+ if not ENABLE.SUBTITLE:
+ return
+ target_chat = kwargs["target_chat"] if kwargs.get("target_chat") else message.chat.id
+ # send docs if message == "/ai", without reply
+ texts = message.text or message.caption or ""
+ if str(texts).lower().strip() == PREFIX.SUBTITLE and not message.reply_to_message:
+ await send2tg(client, message, texts=HELP, **kwargs)
+ return
+
+ if not (vid := await find_yt_vid(client, message)):
+ return
+ yt_url = f"https://www.youtube.com/watch?v={vid}"
+ msg = f"🔍**正在获取字幕**\n{yt_url}"
+ if kwargs.get("show_progress"):
+ res = await send2tg(client, message, texts=msg, **kwargs)
+ kwargs["progress"] = res[0]
+ # cache media_group message
+ if media_group_id := message.media_group_id:
+ if cache.get(f"subtitle-{message.chat.id}-{media_group_id}"):
+ return
+ cache.set(f"subtitle-{message.chat.id}-{media_group_id}", "1", ttl=120)
+
+ if res := await fetch_subtitle(vid):
+ logger.success(res)
+ if subtitles := res.get("subtitle", ""):
+ length = len(subtitles)
+ with io.BytesIO(subtitles.encode("utf-8")) as f:
+ await client.send_document(target_chat, f, file_name=f"{vid}.vtt-{length}字符.txt", caption=yt_url)
+ elif error := res.get("error", ""):
+ await modify_progress(text=error, force_update=True, **kwargs)
+ await asyncio.sleep(3)
+ else:
+ await modify_progress(text="❌获取字幕失败", force_update=True, **kwargs)
+ await asyncio.sleep(3)
+ await modify_progress(del_status=True, **kwargs)
+
+
+async def find_yt_vid(client: Client, message: Message) -> str:
+ msg_text = message.text or message.caption or ""
+ if not msg_text.strip().lower().startswith(PREFIX.SUBTITLE):
+ return ""
+ url = find_url_in_message(message)
+ # /subtitle "link"
+ info = await match_social_media_link(url, flatten_first=True)
+ if info["platform"] == "youtube":
+ return info["vid"]
+
+ # is replying to message?
+ if not message.reply_to_message:
+ return ""
+ reply_message = message.reply_to_message
+ # if reply to a media_group, fetch all messages in the group
+ reply_messages = await client.get_media_group(message.chat.id, message.id) if message.media_group_id else [reply_message]
+ for msg in reply_messages:
+ url = find_url_in_message(msg)
+ info = await match_social_media_link(url, flatten_first=True)
+ if info["platform"] == "youtube":
+ return info["vid"]
+ return ""
+
+
+def find_url_in_message(message: Message) -> str:
+ # check first url in entities
+ if message.entities:
+ for entity in message.entities:
+ if entity.type == MessageEntityType.TEXT_LINK:
+ return entity.url
+ if message.caption_entities:
+ for entity in message.caption_entities:
+ if entity.type == MessageEntityType.TEXT_LINK:
+ return entity.url
+ return str(message.text).strip() if message.text else str(message.caption).strip()
+
+
+async def fetch_subtitle(video_id: str) -> dict:
+ proxy = {"http": PROXY.SUBTITLE, "https": PROXY.SUBTITLE} if PROXY.SUBTITLE else None
+ logger.info(f"Fetch Subtitle for {video_id=}, {proxy=}")
+ res = {}
+ try:
+ subtitles: list[dict] = YouTubeTranscriptApi.get_transcript(video_id=video_id, languages=["zh-CN", "zh-Hans", "zh", "zh-HK", "zh-TW", "zh-Hant", "en"], proxies=proxy)
+ res["subtitle"] = to_webvtt(subtitles)
+ except Exception as e:
+ logger.error(f"Failed to get subtitle: {e}")
+ return await fetch_subtitle_tikhub(video_id)
+ return res
+
+
+async def fetch_subtitle_tikhub(video_id: str) -> dict:
+ logger.info(f"Fetch Subtitle for {video_id=}")
+ api_url = f"{API.TIKHUB}/api/v1/youtube/web/get_video_subtitles?video_id={video_id}"
+ headers = {"authorization": f"Bearer {TOKEN.TIKHUB}", "accept": "application/json"}
+ resp = await hx_req(api_url, headers=headers, check_has_kv=["data"], check_kv={"code": 200})
+ if resp.status_code != 200:
+ logger.warning(f"Subtitle API failed: {resp}")
+ return {}
+ if subtitles := resp.json()["data"].get("subtitles", []):
+ return {"subtitle": to_webvtt(subtitles)}
+ if error := resp.json()["data"].get("detail", []):
+ return {"error": error}
+ return {}
+
+
+def to_webvtt(subtitles: list[dict]) -> str:
+ """Converts subtitles to WebVTT format.
+
+ sample subtitles = [
+ {'text': 'hello', 'start': 0.056, 'duration': 2.88},
+ {'text': 'world!', 'start': 2.983, 'duration': 3.244},
+ ]
+ """
+
+ def format_timestamp(seconds: str | float) -> str:
+ """Converts seconds to WebVTT timestamp format (hh:mm:ss.mmm)."""
+ ms = int((float(seconds) % 1) * 1000)
+ time = timedelta(seconds=int(seconds))
+ total_seconds = int(time.total_seconds())
+ hours, remainder = divmod(total_seconds, 3600)
+ minutes, seconds = divmod(remainder, 60)
+ return f"{hours:02}:{minutes:02}:{seconds:02}.{ms:03}"
+
+ vtt_output = ["WEBVTT", ""] # WebVTT header
+ for subtitle in subtitles:
+ start = format_timestamp(subtitle["start"])
+ end = format_timestamp(subtitle["start"] + subtitle["duration"])
+ text = subtitle["text"]
+ vtt_output.append(f"{start} --> {end}")
+ vtt_output.append(text)
+ vtt_output.append("") # Add blank line between subtitles
+
+ return "\n".join(vtt_output)
src/preview/douyin.py
@@ -0,0 +1,161 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+from datetime import datetime
+from pathlib import Path
+from zoneinfo import ZoneInfo
+
+from loguru import logger
+from pyrogram.client import Client
+from pyrogram.types import Message
+
+from bridge.parsehub import send_to_parsehub
+from config import API, DB, TOKEN, TZ, cache
+from database import get_db
+from message_utils import copy_messages_from_db, modify_progress, save_messages, send2tg, summay_media
+from networking import download_first_success_urls, download_media, hx_req
+from others.emoji import emojify
+
+
+@cache.memoize(ttl=30)
+async def preview_douyin(
+ client: Client,
+ message: Message,
+ url: str = "",
+ platform: str = "douyin",
+ douyin_extractor: str | None = None,
+ douyin_comments_extractor: str | None = None,
+ *,
+ fetch_douyin_comments: bool = True,
+ fallback: bool = True,
+ **kwargs,
+):
+ """Preview douyin or tiktok link in the message.
+
+ Args:
+ client (Client): The Pyrogram client.
+ message (Message): The trigger message object.
+ url (str, optional): The douyin or tiktok link.
+ platform(str, optional): The platform name. Defaults to "douyin".
+ douyin_extractor (str, optional): The douyin extractor: "free" or "tikhub". Defaults to "free".
+ douyin_comments_extractor (str, optional): The douyin comments extractor: "free" or "tikhub". Defaults to "free".
+ fetch_douyin_comments (bool, optional): Fetch douyin comments. Defaults to True.
+ fallback (bool, optional): Fallback to other bots. Defaults to True.
+ """
+ if kwargs.get("show_progress") and "progress" not in kwargs:
+ res = await send2tg(client, message, texts=f"🔗正在解析抖音链接\n{url}", **kwargs)
+ kwargs["progress"] = res[0]
+ db_key = url
+ if kv := await get_db(db_key):
+ logger.debug(f"{platform} preview {DB.ENGINE} cache hit for key={db_key}")
+ if await copy_messages_from_db(client, message, key=db_key, kv=kv, **kwargs):
+ return
+ await modify_progress(text=f"❌从{DB.ENGINE}缓存中转发失败, 尝试重新解析...", **kwargs)
+
+ logger.info(f"{platform} link preview for {url}")
+ succ = False
+ if douyin_extractor is None or douyin_extractor == "free": # try free first
+ api_url = f"{API.TIKHUB_FREE}/api/hybrid/video_data?url={url}"
+ headers = {"accept": "application/json"} if douyin_extractor == "tikhub" else {}
+ try:
+ resp = await hx_req(api_url, headers=headers, check_has_kv=["data"], check_kv={"code": 200})
+ data = resp.json()["data"]
+ succ = True
+ except Exception:
+ logger.warning(f"{platform} API [free] failed: {resp}")
+ if not succ: # try tikhub
+ api_url = f"{API.TIKHUB}/api/v1/hybrid/video_data?url={url}"
+ headers = {"authorization": f"Bearer {TOKEN.TIKHUB}", "accept": "application/json"}
+ try:
+ resp = await hx_req(api_url, headers=headers, check_has_kv=["data"], check_kv={"code": 200})
+ data = resp.json()["data"]
+ except Exception:
+ logger.warning(f"{platform} API [tikhub] failed: {resp}")
+ if fallback:
+ await modify_progress(text="❌抖音解析失败, 尝试第三方Bot...", **kwargs)
+ await send_to_parsehub(client, message, text=url, **kwargs)
+ await modify_progress(del_status=True, **kwargs)
+ return
+
+ aweme_id = data.get("aweme_id", Path(url).stem)
+ if int(data.get("media_type", 4)) == 2: # image post
+ media = [{"photo": download_first_success_urls(x.get("url_list", []), workers_proxy=True, **kwargs)} for x in data.get("images", [])]
+ else: # video post
+ video = data.get("video", {})
+ video_urls = []
+ for key in ["play_addr_h264", "play_addr_265", "play_addr"]:
+ video_urls.extend(video.get(key, {}).get("url_list", []))
+ media = [{"video": download_first_success_urls(video_urls, suffix=".mp4", workers_proxy=True, **kwargs)}]
+ await modify_progress(text=f"⏬正在下载:\n{summay_media(media)}", force_update=True, **kwargs)
+ media = await download_media(media, **kwargs)
+ texts = ""
+ if author := data.get("author", {}).get("nickname", ""):
+ texts += f"\n🎶**[{author}]({url})**"
+ if ts := data.get("create_time"):
+ dt = datetime.fromtimestamp(ts).astimezone(ZoneInfo(TZ))
+ texts += f"\n🕒{dt:%Y-%m-%d %H:%M:%S}"
+ if decs := data.get("desc"):
+ texts += f"\n{decs}"
+
+ comments = []
+ if fetch_douyin_comments and (comments_list := await get_comments(aweme_id, platform, douyin_comments_extractor)):
+ comments.append("\n**> 💬**点此展开评论区**:")
+ for idx, cmt in enumerate(comments_list):
+ cmt_text = cmt["text"].replace("\n", "\n> ")
+ if idx == len(comments_list) - 1: # last cmt
+ comments.append(f"\n> 💬**{cmt['name']}**{cmt['region']}: {cmt_text}||")
+ else:
+ comments.append(f"\n> 💬**{cmt['name']}**{cmt['region']}: {cmt_text}")
+
+ sent_messages = await send2tg(client, message, texts=emojify(texts), media=media, comments=comments, **kwargs)
+ await modify_progress(del_status=True, **kwargs)
+ await save_messages(messages=sent_messages, key=db_key)
+
+
+async def get_comments(aweme_id: str = "", platform: str = "douyin", douyin_comments_extractor: str | None = None) -> list[dict]:
+ """Fetch douyin or tiktok comments.
+
+ Args:
+ aweme_id (str, optional): post id.
+ platform (str, optional): douyin or tiktok. Defaults to "douyin".
+ douyin_comments_extractor (str | None, optional): The douyin comments extractor: "free" or "tikhub". Defaults to "free".
+
+ Returns:
+ list[dict]: comments list.
+ """
+ comments = []
+ api_urls = {
+ "douyin_tikhub": f"{API.TIKHUB}/api/v1/douyin/web/fetch_video_comments?aweme_id={aweme_id}",
+ "douyin_free": f"{API.TIKHUB_FREE}/api/douyin/web/fetch_video_comments?aweme_id={aweme_id}",
+ "tiktok_tikhub": f"{API.TIKHUB}/api/v1/tiktok/app/v3/fetch_video_comments?aweme_id={aweme_id}",
+ "tiktok_free": f"{API.TIKHUB_FREE}/api/tiktok/web/fetch_post_comment?aweme_id={aweme_id}",
+ }
+ succ = False
+ if douyin_comments_extractor is None or douyin_comments_extractor == "free": # try free first
+ api_url = api_urls.get(f"{platform}_free")
+ headers = {"accept": "application/json"}
+ try:
+ resp = await hx_req(api_url, headers=headers, check_has_kv=["data.comments"], check_kv={"code": 200})
+ data = resp.json()["data"]["comments"]
+ succ = True
+ except Exception:
+ logger.warning(f"{platform} comments API [free] failed: {resp}")
+ if not succ: # try tikhub
+ api_url = api_urls.get(f"{platform}_tikhub")
+ headers = {"authorization": f"Bearer {TOKEN.TIKHUB}", "accept": "application/json"}
+ try:
+ resp = await hx_req(api_url, headers=headers, check_has_kv=["data.comments"], check_kv={"code": 200})
+ data = resp.json()["data"]["comments"]
+ except Exception:
+ logger.warning(f"{platform} comments API [tikhub] failed: {resp}")
+ return []
+ try:
+ for node in data:
+ name = node.get("user", {}).get("nickname", "")
+ text = node.get("text", "")
+ region = f"({node['ip_label']})" if node.get("ip_label") else ""
+ if name and text:
+ comments.append({"name": name, "text": emojify(text.strip()), "region": region})
+ except Exception as e:
+ logger.error(e)
+ return []
+ return comments
src/preview/instagram.py
@@ -0,0 +1,151 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import asyncio
+from datetime import datetime
+from zoneinfo import ZoneInfo
+
+from bs4 import BeautifulSoup
+from loguru import logger
+from pyrogram.client import Client
+from pyrogram.types import Message
+
+from bridge.parsehub import send_to_parsehub
+from config import API, DB, DOWNLOAD_DIR, TOKEN, TZ, UA, cache
+from database import get_db
+from message_utils import copy_messages_from_db, modify_progress, save_messages, send2tg, summay_media
+from multimedia import is_valid_video, validate_img
+from networking import download_file, download_media, hx_req
+
+
+@cache.memoize(ttl=30)
+async def preview_instagram(client: Client, message: Message, url: str = "", *, fallback: bool = True, **kwargs):
+ """Preview instagram link in the message.
+
+ Args:
+ client (Client): The Pyrogram client.
+ message (Message): The trigger message object.
+ url (str, optional): Tnstagram link.
+ fallback (bool, optional): Fallback to other bots. Defaults to True.
+ """
+ if kwargs.get("show_progress") and "progress" not in kwargs:
+ res = await send2tg(client, message, texts=f"🔗正在解析Instagram链接\n{url}", **kwargs)
+ kwargs["progress"] = res[0]
+ db_key = url
+ if kv := await get_db(db_key):
+ logger.debug(f"Instagram preview {DB.ENGINE} cache hit for key={db_key}")
+ if await copy_messages_from_db(client, message, key=db_key, kv=kv, **kwargs):
+ return
+ await modify_progress(text=f"❌从{DB.ENGINE}缓存中转发失败, 尝试重新解析...", **kwargs)
+
+ api_url = API.TIKHUB_INSTAGRAM + url
+ logger.info(f"Preview Instagram TikHub for {api_url}")
+ headers = {"authorization": f"Bearer {TOKEN.TIKHUB}", "accept": "application/json"}
+ resp = await hx_req(api_url, headers=headers, check_has_kv=["data"], check_kv={"code": 200})
+ if resp.status_code != 200:
+ await modify_progress(text="❌Instagram解析失败, 使用DDInstagram预览", **kwargs)
+ await preview_ddinstagram(client, message, fallback=fallback, **kwargs)
+ await asyncio.sleep(2)
+ await modify_progress(del_status=True, **kwargs)
+ return
+
+ data = resp.json()["data"]
+
+ # parse media
+ media = []
+ if data.get("video_url"): # reel
+ media.append({"video": download_file(data.get("video_url", ""), **kwargs)})
+ elif media_nodes := data.get("edge_sidecar_to_children", {}).get("edges", []):
+ for node in media_nodes:
+ ftype = "photo" if not node.get("node", {}).get("is_video") else "video"
+ url = node.get("node", {}).get("display_url", "") if ftype == "photo" else node.get("node", {}).get("video_url", "")
+ media.append({ftype: download_file(url, **kwargs)})
+ elif data.get("display_url"):
+ media.append({"photo": download_file(data.get("display_url"), **kwargs)})
+
+ texts = ""
+ if fullname := data.get("owner", {}).get("full_name"):
+ texts += f"🏞**[{fullname}]({url})**\n"
+
+ if metadata_nodes := data.get("edge_media_to_caption", {}).get("edges"):
+ if ts := metadata_nodes[0].get("node", {}).get("created_at"):
+ dt = datetime.fromtimestamp(float(ts)).astimezone(ZoneInfo(TZ))
+ create_time = f"{dt:%Y-%m-%d %H:%M:%S}"
+ texts += f"🕒{create_time}\n"
+ if description := metadata_nodes[0].get("node", {}).get("text", ""):
+ texts += f"{description}\n"
+
+ # parse comments
+ comment_nodes = data.get("edge_media_to_parent_comment", {}).get("edges", [])
+ comment_nodes = sorted(comment_nodes, key=lambda x: x.get("node", {}).get("created_at", 0))
+ comment_list = [{"author": node.get("node", {}).get("owner", {}).get("username", "user"), "text": node.get("node", {}).get("text", "")} for node in comment_nodes]
+ comment_list = [x for x in comment_list if x["text"]]
+ comments: list[str] = []
+ for idx, cmt in enumerate(comment_list):
+ cmt_text = cmt["text"].replace("\n", "\n> ")
+ if idx == 0:
+ comments.append("\n**> 💬**点此展开评论区**:")
+ if idx == len(comment_list) - 1: # last cmt
+ comments.append(f"\n> 💬**{cmt['author']}**: {cmt_text}||")
+ else:
+ comments.append(f"\n> 💬**{cmt['author']}**: {cmt_text}")
+
+ await modify_progress(text=f"⏬正在下载:\n{summay_media(media)}", force_update=True, **kwargs)
+ media = await download_media(media, **kwargs)
+ sent_messages = await send2tg(client, message, texts=texts.strip(), media=media, comments=comments, **kwargs)
+ await modify_progress(del_status=True, **kwargs)
+ await save_messages(messages=sent_messages, key=db_key)
+
+
+@cache.memoize(ttl=30)
+async def preview_ddinstagram(client: Client, message: Message, url: str, post_type: str, post_id: str, *, fallback: bool = True, **kwargs):
+ """Preview instagram link in the message via DDInstagram.
+
+ https://ddinstagram.com/
+
+ Args:
+ client (Client): The Pyrogram client.
+ message (Message): The trigger message object.
+ url (str, optional): Tnstagram link.
+ post_type (str): post type: "p" or "reel"
+ post_id (str): post id.
+ fallback (bool, optional): Fallback to other bots. Defaults to True.
+ """
+ api_url = f"{API.DDINSTAGRAM}/{post_type}/{post_id}"
+ logger.info(f"Instagram link preview for {api_url}")
+ headers = {"user-agent": UA.TELEGRAM}
+ resp = await hx_req(api_url, headers=headers)
+ if not resp.text:
+ if fallback:
+ await send_to_parsehub(client, message, text=url, **kwargs)
+ return
+
+ soup = BeautifulSoup(resp.text, "html.parser")
+ logger.trace(soup.prettify())
+
+ texts = ""
+ media = {}
+ if tag := soup.find("meta", attrs={"name": "twitter:title"}):
+ author = tag.get("content", "Unknown") # type: ignore
+ texts += f"🏞**[{author}]({url})\n"
+ if tag := soup.find("meta", attrs={"property": "og:description"}):
+ texts: str = tag.get("content", "") # type: ignore
+ if tag := soup.find("meta", attrs={"name": "twitter:image"}):
+ img_url = tag.get("content", "") # type: ignore
+ if img_url:
+ raw_url = f"{API.DDINSTAGRAM}{img_url}"
+ media["photo"] = await download_file(raw_url, path=f"{DOWNLOAD_DIR}/{post_id}.jpg", workers_proxy=True, **kwargs)
+ if not bool(validate_img(media["photo"])):
+ await send_to_parsehub(client, message, text=url, **kwargs)
+ return
+
+ if tag := soup.find("meta", attrs={"property": "og:video"}):
+ video_url = tag.get("content", "") # type: ignore
+ if video_url:
+ raw_url = f"{API.DDINSTAGRAM}{video_url}"
+ media["video"] = await download_file(raw_url, path=f"{DOWNLOAD_DIR}/{post_id}.mp4", workers_proxy=True, **kwargs)
+ if not is_valid_video(media["video"]):
+ await send_to_parsehub(client, message, text=url, **kwargs)
+ return
+
+ await send2tg(client, message, texts=texts, media=[media], **kwargs)
src/preview/twitter.py
@@ -0,0 +1,363 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+import asyncio
+import copy
+import re
+from datetime import UTC, datetime
+from zoneinfo import ZoneInfo
+
+from loguru import logger
+from pyrogram.client import Client
+from pyrogram.types import Message
+
+from config import API, DB, TOKEN, TZ, UA, cache
+from database import get_db
+from message_utils import copy_messages_from_db, modify_progress, save_messages, send2tg, summay_media
+from networking import download_file, download_media, flatten_rediercts, hx_req
+from utils import remove_none_values, split_parts, true
+
+
+@cache.memoize(ttl=30)
+async def preview_twitter(client: Client, message: Message, domain: str = "", handle: str = "", post_id: str = "", twitter_extractor: str | None = None, **kwargs):
+ """Preview twitter link in the message.
+
+ Args:
+ client (Client): The Pyrogram client.
+ message (Message): The trigger message object.
+ domain (str): The domain of the link: twitter.com, x.com, fxtwitter.com, fixupx.com.
+ handle (str): The twitter handle.
+ post_id (str): The twitter post id.
+ twitter_extractor (str): The extractor to use: fxtwitter or tikhub. Defaults to "tikhub".
+
+ If skip_fxtwitter is set to True, and the domain is fxtwitter or fixupx, this function is skipped.
+ """
+ if true(kwargs.get("skip_fxtwitter")) and domain in ["fxtwitter", "fixupx"]:
+ return
+ db_key = f"https://x.com/{handle}/status/{post_id}"
+ if kwargs.get("show_progress") and "progress" not in kwargs:
+ res = await send2tg(client, message, texts=f"🔗正在解析推特链接\n{db_key}", **kwargs)
+ kwargs["progress"] = res[0]
+ if kv := await get_db(db_key):
+ logger.debug(f"Twitter preview {DB.ENGINE} cache hit for key={db_key}")
+ if await copy_messages_from_db(client, message, key=db_key, kv=kv, **kwargs):
+ return
+ await modify_progress(text=f"❌从{DB.ENGINE}缓存中转发失败, 尝试重新解析...", **kwargs)
+ succ = False
+ if twitter_extractor is None or twitter_extractor == "tikhub": # try tikhub first
+ try:
+ this_info = await get_tweet_info_via_tikhub(post_id=post_id, **kwargs)
+ if not this_info:
+ await modify_progress(text="❌推特解析失败", **kwargs)
+ await asyncio.sleep(1)
+ await modify_progress(del_status=True, **kwargs)
+ quote_info = await get_tweet_info_via_tikhub(quote_info=this_info["quote_info"], **kwargs) if this_info["has_quote"] else {}
+ master_info = await get_tweet_info_via_tikhub(post_id=this_info["master_thread_id"], **kwargs) if this_info["has_master"] else {}
+ succ = True
+ except Exception as e:
+ logger.warning(f"Twitter API [tikhub] failed: {e}")
+
+ if not succ: # try fxtwitter
+ try:
+ this_info = await get_tweet_info_via_fxtwitter(handle=handle, post_id=post_id)
+ if not this_info:
+ await modify_progress(text="❌推特解析失败", **kwargs)
+ await asyncio.sleep(1)
+ await modify_progress(del_status=True, **kwargs)
+ master_info = await get_tweet_info_via_fxtwitter(handle=this_info["replying_to_user"], post_id=this_info["replying_post_id"]) if this_info["has_master"] else {}
+ quote_info = await get_tweet_info_via_fxtwitter(quote_info=this_info["quote_info"]) if this_info["has_quote"] else {}
+ except Exception as e:
+ logger.warning(f"Twitter API [fxtwitter] failed: {e}")
+ return
+
+ media = []
+ media_ids = set() # deduplicate media
+ master_media = []
+ for x in master_info.get("media", []):
+ if x["id"] in media_ids:
+ continue
+ media_ids.add(x["id"])
+ x[x["type"]] = download_file(x["url"], **kwargs)
+ master_media.append(x)
+
+ this_media = []
+ for x in this_info.get("media", []):
+ if x["id"] in media_ids:
+ continue
+ media_ids.add(x["id"])
+ x[x["type"]] = download_file(x["url"], **kwargs)
+ this_media.append(x)
+
+ quote_media = []
+ for x in quote_info.get("media", []):
+ if x["id"] in media_ids:
+ continue
+ media_ids.add(x["id"])
+ x[x["type"]] = download_file(x["url"], **kwargs)
+ quote_media.append(x)
+
+ # 生成图片数量说明
+ n_media_this = len(this_media)
+ n_media_master = len(master_media) if this_info["has_master"] else 0
+ n_media_quote = len(quote_media) if this_info["has_quote"] else 0
+ part_strs = split_parts(n_media_master, n_media_this, n_media_quote)
+ for k, v in part_strs.items():
+ if v == "所有":
+ part_strs[k] = "图片及视频均"
+
+ msg = ""
+ master_handle = master_info.get("handle", "")
+ # 被回复主推
+ if master_info:
+ if not (kwargs.get("target_chat") and kwargs.get("reply_msg_id") is None):
+ # 不是转发到其他chat, 并且不是回复给一条包含被解析链接的消息
+ msg += "🔗**该链接推文为主推下的一条跟推**\n"
+
+ msg += "🕊**主推文内容:**"
+ if author := master_info.get("author"):
+ msg += f"\n🕊[{author}](https://x.com/{master_info['handle']}/status/{master_info['post_id']})"
+ if time_str := master_info.get("time"):
+ msg += f"\n🕒{time_str}"
+ if device := master_info.get("device"):
+ msg += f"📱{device}"
+ if part_strs["first"]:
+ msg += f"\n🖼{part_strs['first']}属于主推"
+ if texts := master_info.get("texts"):
+ msg += f"\n{texts}"
+ if comments := master_info.get("comments"):
+ msg += "\n**> 💬**点此展开评论区**:"
+ for idx, cmt in enumerate(comments):
+ if str(cmt["post_id"]) == str(this_info["post_id"]):
+ continue
+ cmt_texts = cmt["text"].replace("\n", "\n> ")
+ if idx == len(comments) - 1: # last cmt
+ msg += f"\n> 💬**{cmt['author']}**: {cmt_texts}||"
+ else:
+ msg += f"\n> 💬**{cmt['author']}**: {cmt_texts}"
+
+ media.extend(master_media)
+
+ # 本条推文
+ media.extend(this_media)
+ this_tweet_type = "跟推" if this_info["has_master"] else "本推"
+ if master_info:
+ msg += f"\n🕊**{this_tweet_type}内容:**"
+
+ if author := this_info.get("author"):
+ msg += f"\n🕊[{author}](https://x.com/{handle}/status/{post_id})"
+
+ if time_str := this_info.get("time"):
+ msg += f"\n🕒{time_str}"
+
+ if device := this_info.get("device"):
+ msg += f"📱{device}"
+ if part_strs["middle"] and (this_info["has_master"] or this_info["has_quote"]): # 当有supp_info时, 附加图片数量说明
+ msg += f"\n🖼{part_strs['middle']}属于{this_tweet_type}"
+
+ if texts := this_info.get("texts"):
+ msg += f"\n{texts}"
+
+ if comments := this_info.get("comments"):
+ msg += "\n**> 💬**点此展开评论区**:"
+ for idx, cmt in enumerate(comments):
+ cmt_texts = cmt["text"].strip().removeprefix(f"@{master_handle}").strip().replace("\n", "\n> ") # 有时回推的comment前会附带被回推的handle, 这里去掉
+ if idx == len(comments) - 1: # last cmt
+ msg += f"\n> 💬**{cmt['author']}**: {cmt_texts}||"
+ else:
+ msg += f"\n> 💬**{cmt['author']}**: {cmt_texts}"
+
+ # 引用推文
+ if quote_info:
+ # 有时候引用推文时会在正文末尾附带引推链接, 这里去掉
+ quote_x_url = f"https://x.com/{quote_info.get('handle', '')}/status/{quote_info.get('post_id', '')}"
+ msg = remove_twitter_suffix(msg, post_id=quote_info["post_id"], same_id_only=True)
+ msg += "\n🔁**本推文还引用下述推文:**"
+ if author := quote_info.get("author"):
+ msg += f"\n🕊[{author}]({quote_x_url})"
+
+ if time_str := quote_info.get("time"):
+ msg += f"\n🕒{time_str}"
+
+ if device := quote_info.get("device"):
+ msg += f"📱{device}"
+
+ if part_strs["last"]:
+ msg += f"\n🖼{part_strs['last']}属于引推"
+
+ if texts := quote_info.get("texts"):
+ msg += f"\n{texts}"
+ media.extend(quote_media)
+
+ await modify_progress(text=f"⏬正在下载:\n{summay_media(media)}", force_update=True, **kwargs)
+ media = await download_media(media, **kwargs)
+ sent_messages = await send2tg(client, message, texts=msg.strip(), media=media, **kwargs)
+ await modify_progress(del_status=True, **kwargs)
+ await save_messages(messages=sent_messages, key=db_key)
+
+
+@cache.memoize(ttl=30)
+async def get_tweet_info_via_tikhub(post_id: str = "", quote_info: dict | None = None, **kwargs) -> dict: # type: ignore
+ """Get a single tweet info."""
+ api_url = f"{API.TIKHUB_TWITTER}{post_id}"
+ logger.info(f"Twitter preview via TikHub: {api_url}")
+ data = {}
+ if quote_info: # quote_info is directly parsed from the this_info
+ data = copy.deepcopy(quote_info)
+ post_id = quote_info.get("tweet_id", "")
+ data["id"] = post_id
+ await modify_progress(text="✅正在解析引用推文...", **kwargs)
+ else:
+ headers = {"authorization": f"Bearer {TOKEN.TIKHUB}", "accept": "application/json"}
+ resp = await hx_req(api_url, headers=headers, check_has_kv=["data.author.screen_name"], check_kv={"data": {"id": post_id}})
+ if resp.status_code != 200:
+ logger.error("Failed to get tweet info via TikHub")
+ return {}
+ data: dict = resp.json()["data"]
+ await modify_progress(text=f"✅推文{post_id}解析成功, 正在处理...", **kwargs)
+
+ data = remove_none_values(data)
+ handle = data.get("author", {}).get("screen_name", "")
+ post_id = data.get("id", post_id)
+ info = {"handle": handle, "post_id": post_id}
+ media_info = data.get("media", {})
+ # the master thread media may be repeated in the reply tweet
+ # so we do not download the media file here but record media "id" for de-duplication
+ media = [{"type": "photo", "url": x.get("media_url_https", ""), "id": x.get("id", "0")} for x in media_info.get("photo", [])]
+ for x in media_info.get("video", []):
+ if variants := [x for x in x.get("variants", []) if "mp4" in x.get("content_type", "")]:
+ mp4_url = sorted(variants, key=lambda x: x.get("bitrate", 0), reverse=True)[0]["url"]
+ media.append({"type": "video", "url": mp4_url, "id": x.get("id", "0")})
+ info["media"] = media
+ info["author"] = data.get("author", {}).get("name", "")
+ if date_string := data.get("created_at", ""):
+ dt = datetime.strptime(date_string, "%a %b %d %H:%M:%S %z %Y").astimezone(ZoneInfo(TZ))
+ info["time"] = f"{dt:%Y-%m-%d %H:%M:%S}"
+ texts = await remove_tco_suffix(data.get("text", ""), post_id=post_id)
+ texts = await flatten_rediercts(texts)
+ info["texts"] = texts
+
+ conversation_id = data.get("conversation_id", "0")
+ if int(conversation_id) != int(post_id):
+ info["has_master"] = True
+ info["master_thread_id"] = conversation_id
+ else:
+ info["has_master"] = False
+
+ # parse comments
+ threads = [x for x in data.get("thread", []) if int(x.get("conversation_id", "0")) == int(conversation_id) and int(x.get("id", "0")) != int(post_id)]
+ threads = sorted(threads, key=lambda x: x.get("id", {}))
+ comments = []
+ for node in threads:
+ comment_handle = node.get("author", {}).get("screen_name", "")
+ comment_text = node.get("text", "").removeprefix(f"@{handle}")
+ comment_text = re.sub(r"https?://t\.co/\w+$", "", comment_text) # remove t.co link suffix
+ comment_text = await remove_tco_suffix(comment_text, post_id=node.get("id", ""))
+ comment_text = await flatten_rediercts(comment_text)
+ comment_text = comment_text.strip()
+ if comment_handle and comment_text:
+ comments.append({"author": comment_handle, "text": comment_text, "post_id": node.get("id", "")})
+
+ info["comments"] = comments
+ info["quote_info"] = data.get("quoted", {})
+ info["has_quote"] = bool(info["quote_info"])
+ return info
+
+
+@cache.memoize(ttl=30)
+async def get_tweet_info_via_fxtwitter(handle: str = "", post_id: str = "", quote_info: dict | None = None) -> dict: # type: ignore
+ """Get a single tweet info."""
+ data = {}
+ if quote_info:
+ data = copy.deepcopy(quote_info)
+ handle = data.get("author", {}).get("name", "")
+ post_id = data.get("id", "")
+ else:
+ api_url = f"{API.FXTWITTER}/{handle}/status/{post_id}"
+ logger.info(f"Twitter preview via fxtwitter: {api_url}")
+ headers = {"user-agent": UA.TELEGRAM}
+ resp = await hx_req(api_url, headers=headers)
+ if str(resp.json().get("tweet", {}).get("id")) != str(post_id):
+ logger.error("Failed to get tweet info via fxtwitter")
+ return {}
+ data: dict = resp.json()["tweet"]
+
+ info = {"handle": data.get("author", {}).get("screen_name", handle), "post_id": data.get("id", post_id)}
+ media = data.get("media", {}).get("all", [])
+ for x in media:
+ if x.get("type", "") == "video" and "mp4" not in x.get("format", ""): # this is a m3u8 url, choose mp4 instead
+ m3u8_url = x.get("url", "")
+ mp4_url = ""
+ if variants := [x for x in x.get("variants", []) if "mp4" in x.get("content_type", "")]:
+ mp4_url = sorted(variants, key=lambda x: x.get("bitrate", 0), reverse=True)[0]["url"]
+ x["url"] = mp4_url or m3u8_url
+ x["id"] = x["url"] # record media "id" for de-duplication
+
+ info["media"] = media
+ info["author"] = data.get("author", {}).get("name", "")
+ if ts := data.get("created_timestamp", ""):
+ dt = datetime.fromtimestamp(round(float(ts)), tz=UTC).astimezone(ZoneInfo(TZ))
+ info["time"] = f"{dt:%Y-%m-%d %H:%M:%S}"
+ info["texts"] = data.get("text", "")
+ info["device"] = data.get("source", "").removeprefix("Twitter for").removeprefix("Twitter").removesuffix("App").strip().removesuffix("Web")
+ info["replying_to_user"] = data.get("replying_to", "")
+ info["replying_post_id"] = data.get("replying_to_status", "")
+ info["quote_info"] = data.get("quote", {})
+ info["has_master"] = bool(data.get("replying_to"))
+ info["has_quote"] = bool(data.get("quote_info"))
+ return info
+
+
+def remove_twitter_suffix(text: str, post_id: str = "", *, same_id_only: bool = True) -> str:
+ """Remove twitter link suffix.
+
+ Some tweet ends with a twitter link to the tweet itself.
+
+ Args:
+ text (str): The tweet text.
+ post_id (str): The text belongs to this post_id .
+ force (bool): Force remove the suffix.
+ same_id_only (bool): Only remove the suffix when the post_id is the same.
+ """
+ text = str(text).strip()
+
+ match_url = ""
+ match_post_id = ""
+ if matched := re.search(r"https?://(:?twitter|x|fxtwitter|fixupx)\.com\/(\w+)\/status/(\d+)$", text):
+ match_url = matched.group(0)
+ match_post_id = matched.group(3)
+
+ if same_id_only and post_id and str(post_id) == str(match_post_id):
+ return text.removesuffix(match_url).strip()
+
+ return text
+
+
+async def remove_tco_suffix(text: str, post_id: str = "") -> str:
+ """Parse t.co link suffix.
+
+ Some tweet ends with t.co link in TikHub parsed info (this is a bug of TikHub). The t.co link may be a redirect link to the tweet itself.
+ Here we extract the t.co link and check if it is the same as the post_id, if so, remove the t.co link.
+
+ Args:
+ text (str): The text to be parsed.
+ post_id (str): The text belongs to this post_id .
+ """
+ text = str(text).strip()
+ # not end with t.co link, do nothing
+ if not (matched := re.search(r"https?://t\.co/\w+$", text)):
+ return text
+
+ # t.co at the end of the text
+ t_co_url: str = matched.group(0)
+
+ # parse t.co redirect
+ raw_url = await flatten_rediercts(t_co_url)
+
+ # check if the redirect url is a twitter link the same with post_id
+ match_post_id = ""
+ if matched := re.search(r"https?://(:?twitter|x|fxtwitter|fixupx)\.com\/(\w+)\/status/(\d+)", raw_url):
+ match_post_id = matched.group(3)
+
+ if str(post_id) == str(match_post_id):
+ return text.removesuffix(t_co_url).strip()
+
+ return text
src/preview/weibo.py
@@ -0,0 +1,285 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+import asyncio
+import contextlib
+import json
+import re
+from datetime import datetime
+from urllib.parse import quote_plus
+from zoneinfo import ZoneInfo
+
+from bs4 import BeautifulSoup
+from loguru import logger
+from pyrogram.client import Client
+from pyrogram.types import Message
+
+from bridge.parsehub import send_to_parsehub
+from config import API, DB, DOWNLOAD_DIR, TOKEN, TZ, cache
+from cookies import get_weibo_cookies
+from database import get_db
+from message_utils import copy_messages_from_db, modify_progress, save_messages, send2tg, summay_media
+from networking import download_file, download_first_success_urls, download_media, hx_req
+from others.emoji import emojify
+from utils import https_url, rand_string, soup_to_text, split_parts, ts_to_dt
+
+
+@cache.memoize(ttl=30)
+async def preview_weibo(client: Client, message: Message, url: str, post_id: str = "", *, fetch_weibo_comments: bool = True, fallback: bool = True, **kwargs):
+ """Preview weibo link in the message.
+
+ Args:
+ client (Client): The Pyrogram client.
+ message (Message): The trigger message object.
+ url (str, optional): Weibo link.
+ post_id (str, optional): Weibo post ID
+ fetch_weibo_comments (bool, optional): Fetch weibo comments. Defaults to True.
+ fallback (bool, optional): Fallback to other bots. Defaults to True.
+ """
+ if post_id.startswith("weibovideo"): # disable comments for weibo video
+ fetch_weibo_comments = False
+ else:
+ url = f"https://m.weibo.cn/detail/{post_id}"
+ if kwargs.get("show_progress") and "progress" not in kwargs:
+ res = await send2tg(client, message, texts=f"🔗正在解析微博链接\n{url}", **kwargs)
+ kwargs["progress"] = res[0]
+ if kv := await get_db(url):
+ logger.debug(f"Weibo preview {DB.ENGINE} cache hit for key={url}")
+ if await copy_messages_from_db(client, message, key=url, kv=kv, **kwargs):
+ return
+ await modify_progress(text=f"❌从{DB.ENGINE}缓存中转发失败, 尝试重新解析...", **kwargs)
+
+ this_info = await parse_weibo_info(post_id, **kwargs)
+ if error_msg := this_info.get("error_msg"):
+ if this_info.get("fallback", fallback):
+ await modify_progress(text=f"❌微博解析失败: {error_msg}\n\n尝试第三方Bot...", **kwargs)
+ await send_to_parsehub(client, message, text=url, **kwargs)
+ else:
+ await modify_progress(text=f"❌微博解析失败: {error_msg}", **kwargs)
+ await asyncio.sleep(3)
+ await modify_progress(del_status=True, **kwargs)
+ return
+ quote_info = await parse_weibo_info(post_id, this_info["reply_data"], **kwargs) if this_info.get("reply_data") else {}
+
+ # 生成图片数量说明
+ num_this = len(this_info["media"])
+ num_quote = len(quote_info.get("media", []))
+ part_strs = split_parts(num_this, last=num_quote)
+ for k, v in part_strs.items():
+ if v == "所有":
+ part_strs[k] = "图片及视频均"
+
+ msg = ""
+ if author := this_info.get("author"):
+ msg += f"\n🧣**[{author}]({this_info.get('author_url', 'weibo.com')})**"
+
+ if dt := this_info["dt"]:
+ msg += f"\n🕒{dt}"
+
+ if region := this_info.get("region"):
+ msg += f" 📍{region}"
+
+ if device := this_info.get("device"):
+ msg += f"\n📱{device}"
+ if part_strs["first"] and quote_info: # 当有quote_info时, 附加图片数量说明:
+ msg += f"\n🖼{part_strs['first']}属于本帖"
+
+ if texts := this_info.get("texts"):
+ msg += f"\n{texts}"
+
+ media = this_info.get("media", [])
+ if quote_info:
+ msg += "\n🔁转帖内容:"
+ if author := quote_info.get("author"):
+ msg += f"\n🧣[{author}](https://m.weibo.cn/detail/{quote_info['post_id']})"
+
+ if dt := quote_info.get("dt"):
+ msg += f"\n🕒{dt}"
+
+ if region := quote_info.get("region"):
+ msg += f" 📍{region}"
+
+ if device := quote_info.get("device"):
+ msg += f"\n📱{device}"
+
+ if part_strs["last"]:
+ msg += f"\n🖼{part_strs['last']}属于转帖"
+
+ if texts := quote_info.get("texts"):
+ msg += f"\n{texts}"
+
+ media.extend(quote_info["media"])
+ comments = await parse_weibo_comments(post_id) if fetch_weibo_comments else []
+ sent_messages = await send2tg(client, message, texts=emojify(msg.strip()), media=media, comments=comments, **kwargs)
+ await modify_progress(del_status=True, **kwargs)
+ await save_messages(messages=sent_messages, key=url)
+
+
+@cache.memoize(ttl=30)
+async def parse_weibo_info(post_id: str, data: dict | None = None, **kwargs) -> dict: # type: ignore
+ info = {}
+ if not data:
+ if post_id.startswith("weibovideo"):
+ return await parse_weibo_video(post_id, **kwargs)
+ weibo_url = f"https://m.weibo.cn/detail/{post_id}"
+ logger.info(f"Weibo link preview for {weibo_url}")
+ headers = {"referer": "https://m.weibo.cn"}
+ try:
+ resp = await hx_req(weibo_url, headers=headers)
+ if not (matched := re.search(r"var \$render_data = (\[.*?\])\[0\]", str(resp.text), re.DOTALL)):
+ info["error_msg"] = "Weibo API empty response"
+ if "微博不存在" in str(resp.text) or "暂无查看权限" in str(resp.text):
+ info["error_msg"] = "微博不存在或暂无查看权限!"
+ info["fallback"] = False
+ logger.error(info["error_msg"])
+ return info
+ json_data: dict = json.loads(matched.group(1))
+ if not json_data:
+ logger.error(f"Weibo API response cannot be parsed: {matched.group(1)}")
+ info["error_msg"] = "Weibo API response cannot be parsed"
+ return info
+ data: dict = json_data[0].get("status", {})
+ data["text"] = soup_to_text(soup=BeautifulSoup(data.get("text", ""), "html.parser"))
+ await modify_progress(text="✅解析成功, 正在处理...", **kwargs)
+ except Exception as e:
+ info["error_msg"] = f"Weibo API failed: {e}"
+ logger.error(info["error_msg"])
+ return info
+ else:
+ await modify_progress(text="✅正在解析转发微博...", **kwargs)
+
+ logger.trace(data)
+ media = []
+ for x in data.get("pics", []):
+ pid = x.get("pid", rand_string())
+ mtype = x.get("type", "photo")
+ photo_url = x.get("large", {}).get("url", x.get("url"))
+ video_url = x.get("videoSrc")
+ if mtype == "livephoto":
+ media.append({"photo": download_file(photo_url, **kwargs)})
+ media.append({"video": download_file(video_url, path=f"{DOWNLOAD_DIR}/{pid}.mov", **kwargs)})
+ elif mtype in ["video", "gifvideos"]:
+ media.append({"video": download_file(video_url, suffix=".mp4", **kwargs)})
+ else:
+ media.append({"photo": download_file(photo_url, **kwargs)})
+ if page_info := data.get("page_info", {}):
+ videos = page_info.get("urls", {})
+ if url := videos.get("mp4_720p_mp4"):
+ # This maybe already downloaded by the above loop (for loop in data['pics'])
+ media.append({"video": download_file(url, skip_exist=True, suffix=".mp4", **kwargs)})
+ info["post_id"] = data.get("id", post_id)
+ info["author"] = data.get("user", {}).get("screen_name", "")
+ info["author_url"] = f"https://m.weibo.cn/detail/{post_id}" # for weibo post, use post url as author url
+ info["region"] = data.get("region_name", "").removeprefix("发布于").strip()
+ info["dt"] = ""
+ with contextlib.suppress(Exception):
+ dt = datetime.strptime(data["created_at"], "%a %b %d %H:%M:%S %z %Y").astimezone(ZoneInfo(TZ))
+ info["dt"] = f"{dt:%Y-%m-%d %H:%M:%S}"
+ info["device"] = data.get("source", "")
+ info["texts"] = soup_to_text(BeautifulSoup(data.get("text", ""), "html.parser"))
+ info["reply_data"] = data.get("retweeted_status", {})
+ await modify_progress(text=f"⏬正在下载:\n{summay_media(media)}", force_update=True, **kwargs)
+ media = await download_media(media, **kwargs)
+ # de-duplicate media
+ media_paths = set()
+ final_media = []
+ for x in media:
+ if x.get("photo") and x["photo"] not in media_paths:
+ final_media.append(x)
+ media_paths.add(x["photo"])
+ elif x.get("video") and x["video"] not in media_paths:
+ final_media.append(x)
+ media_paths.add(x["video"])
+ info["media"] = final_media
+ return info
+
+
+async def parse_weibo_video(post_id: str, **kwargs) -> dict:
+ info = {}
+ try:
+ url = f"https://video.weibo.com/show?fid={post_id.removeprefix('weibovideo')}"
+ api_url = f"{API.TIKHUB_WEIBO_VIDEO}{quote_plus(url)}"
+ headers = {"authorization": f"Bearer {TOKEN.TIKHUB}", "accept": "application/json"}
+ resp = await hx_req(api_url, headers=headers, check_kv={"code": 200}, check_has_kv=["data.data.Component_Play_Playinfo"])
+ data = resp.json()["data"]["data"]["Component_Play_Playinfo"]
+ urls = [https_url(x) for x in data.get("urls", {}).values()]
+ info["media"] = [{"video": await download_first_success_urls(urls, suffix=".mp4", **kwargs)}]
+ info["dt"] = ""
+ if dt := ts_to_dt(data.get("real_date")):
+ info["dt"] = f"{dt:%Y-%m-%d %H:%M:%S}"
+ elif data.get("date"):
+ info["dt"] = data.get("date")
+ info["author"] = data.get("author", "")
+ info["author_url"] = f"https://m.weibo.cn/u/{data['author_id']}" if data.get("author_id") else url # for weibo video, use author profile as author url
+ if region := data.get("ip_info_str"): # maybe empty
+ info["region"] = region.removeprefix("发布于").strip()
+ texts = ""
+ if title := data.get("title"):
+ texts += f"\n📝[{title}]({url})"
+ if desc := data.get("text"):
+ soup = BeautifulSoup(desc, "html.parser")
+ texts += f"\n{soup_to_text(soup)}"
+ info["texts"] = texts.strip()
+ await modify_progress(text="✅解析成功, 正在处理...", **kwargs)
+ except Exception as e:
+ msg = f"Weibo Video API failed: {e}"
+ logger.error(msg)
+ return {"error_msg": msg}
+ return info
+
+
+@cache.memoize(ttl=30)
+async def parse_weibo_comments(post_id: str) -> list[str]:
+ headers = {
+ "cookie": await get_weibo_cookies(),
+ "accept": "application/json, text/plain, */*",
+ "accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
+ "cache-control": "no-cache",
+ "client-version": "v2.47.17",
+ "dnt": "1",
+ "pragma": "no-cache",
+ "priority": "u=1, i",
+ "referer": "https://weibo.com",
+ "sec-ch-ua": '"Not;A=Brand";v="24", "Chromium";v="128"',
+ "sec-ch-ua-mobile": "?0",
+ "sec-ch-ua-platform": '"Linux"',
+ "sec-fetch-dest": "empty",
+ "sec-fetch-mode": "cors",
+ "sec-fetch-site": "same-origin",
+ "server-version": "v2024.12.30.2",
+ "user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36",
+ "x-requested-with": "XMLHttpRequest",
+ }
+ params = {
+ "id": post_id,
+ "is_show_bulletin": 2,
+ "is_mix": 0,
+ "count": 10,
+ "fetch_level": 0,
+ "locale": "zh-CN",
+ "max_id": 0,
+ }
+ api = "https://weibo.com/ajax/statuses/buildComments"
+ resp = await hx_req(api, headers=headers, params=params, check_kv={"ok": 1})
+ if resp.status_code != 200:
+ logger.error(f"Weibo Comments API failed: {resp}")
+ return []
+
+ comments = ["\n**> 💬**点此展开评论区**:"]
+ for info in resp.json().get("data", []):
+ if not info.get("text"):
+ continue
+ cmt = ""
+ if author := info.get("user", {}).get("screen_name"):
+ cmt += f"💬**{author}**"
+ if region := info.get("source", "").removeprefix("来自"):
+ cmt += f"({region})"
+ cmt += ":"
+ if text := info.get("text"):
+ cmt += f" {soup_to_text(BeautifulSoup(text, 'html.parser'))}"
+ cmt = emojify(cmt)
+ comments.append(f"\n> {cmt}")
+ if len(comments) == 1:
+ return []
+ if len(comments) > 2:
+ comments[-1] += "||"
+ return comments
src/preview/xiaohongshu.py
@@ -0,0 +1,152 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+import asyncio
+import json
+from datetime import datetime
+from zoneinfo import ZoneInfo
+
+from bs4 import BeautifulSoup
+from loguru import logger
+from pyrogram.client import Client
+from pyrogram.types import Message
+
+from bridge.parsehub import send_to_parsehub
+from config import DB, PROXY, TZ, UA, cache
+from database import get_db
+from message_utils import copy_messages_from_db, modify_progress, save_messages, send2tg, summay_media
+from networking import download_file, download_first_success_urls, download_media, hx_req
+from others.emoji import emojify
+
+
+@cache.memoize(ttl=30)
+async def preview_xhs(client: Client, message: Message, xhslink: str = "", post_id: str = "", xsec_token: str = "", *, fallback: bool = True, **kwargs):
+ """Preview xiaohongshu link in the message.
+
+ Args:
+ client (Client): The Pyrogram client.
+ message (Message): The trigger message object.
+ xhslink (str, optional): xiaohongshu link in xhslink.com domain.
+ post_id (str, optional): xhs post ID
+ xsec_token (str, optional): xhs xsec_token. (This is mandatory for links are not xhslink.com)
+ fallback (bool, optional): Fallback to other bots. Defaults to True.
+ """
+ db_key = f"https://www.xiaohongshu.com/explore/{post_id}"
+ if kwargs.get("show_progress") and "progress" not in kwargs:
+ res = await send2tg(client, message, texts=f"🔗正在解析小红书链接\n{db_key}", **kwargs)
+ kwargs["progress"] = res[0]
+ if kv := await get_db(db_key):
+ logger.debug(f"Xiaohongshu preview {DB.ENGINE} cache hit for key={db_key}")
+ if await copy_messages_from_db(client, message, key=db_key, kv=kv, **kwargs):
+ return
+ await modify_progress(text=f"❌从{DB.ENGINE}缓存中转发失败, 尝试重新解析...", **kwargs)
+
+ if not xhslink and not xsec_token:
+ msg = "链接格式错误: 缺少 xsec_token 参数, 请发送完整链接"
+ msg += "\n或者使用手机APP分享的链接 (xhslink.com域名)"
+ await send2tg(client, message, texts=msg, **kwargs)
+ await modify_progress(del_status=True, **kwargs)
+ return
+
+ url = xhslink if xhslink else f"https://www.xiaohongshu.com/explore/{post_id}?xsec_token={xsec_token}"
+ logger.info(f"Xiaohongshu link preview for {url}")
+ xhs_info = await get_xhs_info(url)
+ note = xhs_info.get("note", {})
+ if not note:
+ if fallback:
+ await modify_progress(text="❌小红书解析失败, 尝试第三方Bot...", **kwargs)
+ await send_to_parsehub(client, message, text=url, **kwargs)
+ await asyncio.sleep(3)
+ else:
+ await modify_progress(text="❌小红书解析失败, 请稍候再尝试", **kwargs)
+
+ await modify_progress(text="✅解析成功, 正在处理...", **kwargs)
+ media: list[dict] = []
+ if note.get("type") == "video":
+ video_urls = [] # Extract all urls, but prefer H264
+ for vcodec in ["h264", "h265", "av1", "h266"]:
+ format_list = note.get("video", {}).get("media", {}).get("stream").get(vcodec, [])
+ for x in format_list:
+ if x.get("masterUrl"):
+ video_urls.append(x["masterUrl"])
+ if x.get("backupUrls"):
+ video_urls.extend(x.get("backupUrls", []))
+ media.append({"video": download_first_success_urls(video_urls, suffix=".mp4", **kwargs)})
+ else:
+ for img_info in note.get("imageList", []):
+ img_url = img_info.get("urlDefault", "")
+ if img_info.get("livePhoto"):
+ video_urls = []
+ for vcodec in ["h264", "h265", "av1", "h266"]:
+ format_list = img_info.get("stream", {}).get(vcodec, [])
+ for x in format_list:
+ if x.get("masterUrl"):
+ video_urls.append(x["masterUrl"])
+ if x.get("backupUrls"):
+ video_urls.extend(x.get("backupUrls", []))
+ media.append({"livephoto": download_first_success_urls(video_urls, suffix=".mp4", **kwargs)})
+ else:
+ media.append({"photo": download_file(img_url, suffix=".jpg", **kwargs)})
+
+ title = note.get("title", "")
+ author = note.get("user", {}).get("nickname", "")
+ ts = note.get("time", 0) / 1000
+
+ texts = ""
+ if author := note.get("user", {}).get("nickname", ""):
+ texts += f"🍠[{author}]({url})\n"
+ if ts := note.get("time"):
+ dt = datetime.fromtimestamp(float(ts) / 1000).astimezone(ZoneInfo(TZ))
+ texts += f"🕒{dt:%Y-%m-%d %H:%M:%S}"
+ if region := note.get("ipLocation"):
+ texts += f"📍{region}\n"
+ else:
+ texts += "\n"
+ if title := note.get("title", ""):
+ texts += f"📝**{title}**\n"
+ desc = note.get("desc", "").replace("[话题]#", "")
+ texts += desc
+ comments = get_xhs_comments(xhs_info.get("soup")) # Not implemented yet
+ await modify_progress(text=f"⏬正在下载:\n{summay_media(media)}", force_update=True, **kwargs)
+ media = await download_media(media, **kwargs)
+ sent_messages = await send2tg(client, message, texts=emojify(texts), media=media, comments=comments, **kwargs)
+ await modify_progress(del_status=True, **kwargs)
+ await save_messages(messages=sent_messages, key=db_key)
+
+
+async def get_xhs_info(url: str, retry: int = 0) -> dict:
+ """Get xiaohongshu post info.
+
+ XHS banned VPS IP, so we need to use residential proxy.
+ """
+ headers = {"user-agent": UA.TELEGRAM, "referer": "https://www.xiaohongshu.com/"}
+ if retry > 3:
+ return {}
+ data = {}
+ try:
+ resp = await hx_req(url, headers=headers, cookies=None, proxy=PROXY.XHS)
+ soup = BeautifulSoup(resp.text, "html.parser")
+ data["soup"] = soup
+ script_info = next((str(x.text).removeprefix("window.__INITIAL_STATE__=") for x in soup.find_all("script") if str(x.text).startswith("window.__INITIAL_STATE__=")), "{}")
+ info = json.loads(script_info.replace("undefined", '""')) # or use yaml.safe_load(script_info)
+ if not info:
+ retry += 1
+ logger.warning(f"XHS empty response, maybe need to adjust the proxy. Retrying: {retry} / 3")
+ return await get_xhs_info(url, retry=retry)
+ except Exception as e:
+ logger.error(f"XHS parsing response failed: {e}, Retrying: {retry} / 3")
+ retry += 1
+ return await get_xhs_info(url, retry=retry)
+
+ if notes := list(info.get("note", {}).get("noteDetailMap", {}).values()):
+ data["note"] = notes[0].get("note", {})
+ return data
+ retry += 1
+ logger.error(f"Parsed info has no post, Retrying: {retry} / 3")
+ return await get_xhs_info(url, retry=retry)
+
+
+def get_xhs_comments(soup: BeautifulSoup | None) -> list[str]:
+ """Not implemented yet."""
+ if not soup:
+ return []
+ return []
src/preview/ytdlp.py
@@ -0,0 +1,475 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+import asyncio
+import contextlib
+import re
+import threading
+import time
+import warnings
+from collections import Counter
+from pathlib import Path
+
+from bs4 import BeautifulSoup, MarkupResemblesLocatorWarning
+from loguru import logger
+from pyrogram.client import Client
+from pyrogram.types import Message, ReplyParameters
+from yt_dlp import YoutubeDL
+from yt_dlp.utils import DownloadError, ExtractorError, YoutubeDLError
+
+from config import API, CAPTION_LENGTH, DB, DOWNLOAD_DIR, MAX_FILE_BYTES, PROXY, TID, TOKEN, cache
+from database import get_db
+from message_utils import copy_messages_from_db, modify_progress, preprocess_media, save_messages, send2tg, telegram_uploading
+from multimedia import generate_cover
+from networking import hx_req
+from others.emoji import emojify
+from utils import readable_size, readable_time, soup_to_text, true, ts_to_dt, unicode_to_ascii
+
+
+class ProxyError(Exception):
+ pass
+
+
+@cache.memoize(ttl=30)
+async def preview_ytdlp(
+ client: Client,
+ message: Message,
+ url: str = "",
+ platform: str = "",
+ *,
+ ytdlp_audio_only: bool = False,
+ ytdlp_send_video: bool = True,
+ ytdlp_send_audio: bool = False,
+ proxy: str | None = None,
+ **kwargs,
+):
+ """Preview ytdlp link in the message.
+
+ Args:
+ client (Client): The Pyrogram client.
+ message (Message): The trigger message object.
+ url (str, optional): ytdlp link.
+ platform (str, optional): Platform of the link, e.g. "bilibili", "youtube".
+ ytdlp_audio_only (bool, optional): Download audio only. Defaults to True.
+ ytdlp_send_video (bool, optional): Send video. Defaults to True.
+ ytdlp_send_audio (bool, optional): Send audio. Defaults to False.
+ proxy (str, optional): Proxy to use. Defaults to None.
+ """
+ logger.trace(f"url: {url} kwargs: {kwargs}")
+ if kwargs.get("show_progress") and "progress" not in kwargs:
+ res = await send2tg(client, message, texts=f"🔗正在解析{platform}链接\n{url}", **kwargs)
+ kwargs["progress"] = res[0]
+ db_key = url
+ if kv := await get_db(db_key):
+ logger.debug(f"{platform} preview {DB.ENGINE} cache hit for key={db_key}")
+ if await copy_messages_from_db(client, message, key=db_key, kv=kv, **kwargs):
+ return
+ await modify_progress(text=f"❌从{DB.ENGINE}缓存中转发失败, 尝试重新解析...", **kwargs)
+
+ # set download & upload options
+ if ytdlp_audio_only:
+ ytdlp_send_video = False
+ if not ytdlp_send_video:
+ ytdlp_audio_only = True
+ ytdlp_send_audio = True
+
+ ydl_opts = {
+ "paths": {"home": DOWNLOAD_DIR},
+ "cachedir": DOWNLOAD_DIR,
+ "simulate": False,
+ "skip_download": False,
+ "keepvideo": True,
+ "format": "m4a/bestaudio/best" if ytdlp_audio_only else video_selector,
+ "writethumbnail": True,
+ "trim_file_name": 60, # filesystem limit for filename is 255 bytes. UFT-8 char is 1-4 bytes.
+ "proxy": proxy,
+ "extractor_args": {"youtube": {"lang": ["zh-CN", "zh-HK", "zh-TW", "en", "en-GB"]}},
+ "ignore_no_formats_error": False,
+ "live_from_start": False,
+ "retries": 10,
+ "retry_sleep_functions": {"http": lambda _: 1}, # sleep 1 second between retries
+ "nocheckcertificate": True,
+ "source_address": "0.0.0.0", # force-ipv4 # noqa: S104
+ "outtmpl": "%(title)s.%(ext)s",
+ "noplaylist": True,
+ "color": "no_color-tty",
+ "logger": logger,
+ }
+ if kwargs.get("show_progress"):
+ loop = asyncio.get_running_loop()
+ hook = create_hook(kwargs.get("progress"), loop, detail_progress=true(kwargs.get("detail_progress")))
+ ydl_opts["progress_hooks"] = [hook]
+
+ await modify_progress(text="⏬正在下载, 请稍候...", **kwargs)
+ error_msg, info = await download_video_async(url, ydl_opts)
+ if error_msg:
+ if proxy != PROXY.YTDLP_FALLBACK and ("Sign in" in error_msg or "请登录" in error_msg):
+ raise ProxyError(error_msg)
+ cache.delete("modify_progress")
+ await modify_progress(text=error_msg, **kwargs)
+ return
+ logger.trace(info)
+ download_info = info["requested_downloads"][0]
+ if not download_info:
+ await modify_progress(text="❌下载失败, 请重试", **kwargs)
+ return
+
+ final_path = Path(download_info.get("filepath", "")) # maybe video or audio
+ if not final_path.is_file():
+ return
+ # only save messages when both video and audio are uploaded
+ save_to_db = False
+ if ytdlp_audio_only: # audio only
+ ytdlp_send_video = False
+ video_path = Path("")
+ audio_path = final_path
+ else: # video and audio
+ video_path = final_path
+ audio_info = next((x for x in download_info.get("requested_formats", []) if x["audio_ext"].lower() != "none"), {})
+ audio_format_id = audio_info.get("format_id", "")
+ audio_ext = audio_info.get("audio_ext", "")
+ audio_path = video_path.with_suffix(f".f{audio_format_id}.{audio_ext}")
+ if video_path.is_file() and audio_path.is_file():
+ save_to_db = True
+ msg = "✅下载成功:"
+ if video_path.is_file():
+ msg += f"\n🎬视频大小: {readable_size(path=video_path)}"
+ if audio_path.is_file():
+ msg += f"\n🎧音频大小: {readable_size(path=audio_path)}"
+ title = info.get("title", "")
+ msg += f"\n📝{title}"
+ logger.success(f"{msg!r}")
+ await modify_progress(text=msg.strip(), **kwargs)
+
+ author = info.get("uploader", info.get("series", info.get("extractor", "")))
+ author_url = info.get("uploader_url", "")
+ if not author_url:
+ author_id = info.get("uploader_id", "")
+ author_url = f"https://www.youtube.com/{author_id}" if platform == "youtube" else f"https://space.bilibili.com/{author_id}"
+
+ duration = round(float(info.get("duration", "0")))
+ texts = kwargs.get("send_from_user") or ""
+ platform_emoji = "🅱️" if platform == "bilibili" else "🔴"
+ # author
+ if true(kwargs.get("no_author")):
+ pass
+ elif author and author_url:
+ texts += f"\n{platform_emoji}[{author}]({author_url})"
+ elif author:
+ texts += f"\n{platform_emoji}{author}"
+
+ # date
+ create_time = ""
+ if dt := ts_to_dt(info.get("timestamp")):
+ create_time = f"{dt:%Y-%m-%d %H:%M:%S}"
+ elif info.get("upload_date"):
+ create_time = info["update_date"]
+ if not true(kwargs.get("no_date")):
+ texts += f"\n🕒{create_time}"
+
+ # title
+ if not true(kwargs.get("no_title")) and title:
+ texts += f"\n📝[{title}]({url})"
+
+ # desc
+ if not true(kwargs.get("no_description")) and (desc := info.get("description")) and (desc != "-"):
+ warnings.simplefilter("ignore", MarkupResemblesLocatorWarning)
+ soup = BeautifulSoup(desc, "html.parser")
+ texts += f"\n{soup_to_text(soup)}"
+ comments = await get_bilibili_comments(kwargs.get("bvid")) if platform == "bilibili" else await get_youtube_comments(kwargs.get("vid"))
+ for comment in comments:
+ if len(f"{texts}{comment}") < CAPTION_LENGTH:
+ texts += comment
+ texts = texts.strip()
+ sent_messages: list[Message | None] = [] # 把发送的消息都记录下来
+ target_chat = kwargs["target_chat"] if kwargs.get("target_chat") else message.chat.id
+ # split large videos into multiple parts (less than 2GB)
+ if video_path.is_file():
+ if video_path.stat().st_size < MAX_FILE_BYTES:
+ await modify_progress(text=f"🎬视频大小: {readable_size(path=video_path)}", **kwargs)
+ else:
+ await modify_progress(text="🎬视频大小超过Telegram限制(2000MB), 正在切分...", **kwargs)
+ videos = preprocess_media([{"video": video_path}])
+ if len(videos) > 1:
+ await modify_progress(text=f"🎬视频已切分为{len(videos)}份, 开始上传...", **kwargs)
+ await asyncio.sleep(1)
+
+ for idx, video in enumerate(videos):
+ caption = texts.replace("📝[", f"📝[P{idx + 1}-") if len(videos) > 1 else texts
+ await modify_progress(text=f"⏫视频上传中-P{idx + 1}: {readable_size(path=video['video'])}\n🎬{Path(video['video']).name}", force_update=True, **kwargs)
+ with contextlib.suppress(ValueError):
+ target_chat = int(target_chat)
+ sent_messages.append(
+ await client.send_video(
+ chat_id=target_chat,
+ caption=caption[:CAPTION_LENGTH],
+ reply_parameters=ReplyParameters(message_id=kwargs.get("reply_msg_id", message.id)),
+ progress=telegram_uploading,
+ progress_args=(kwargs.get("progress", False), video["video"], true(kwargs.get("detail_progress"))), # message, path, detail_progress
+ **video,
+ )
+ )
+ if audio_path.is_file():
+ target_chat = target_chat if ytdlp_send_audio else TID.CHANNEL_YTDLP_BACKUP # backup to channel if not send audio, so we can save it to db
+ await modify_progress(text=f"⏫音频上传中: {readable_size(path=audio_path)}\n🎧{audio_path.name}", force_update=True, **kwargs)
+ thumb = generate_cover(final_path) # generate cover based on final_path
+ with contextlib.suppress(ValueError):
+ target_chat = int(target_chat)
+ sent_messages.append(
+ await client.send_audio(
+ chat_id=target_chat,
+ audio=audio_path.as_posix(),
+ caption=texts[:CAPTION_LENGTH],
+ performer=author,
+ title=title,
+ duration=duration,
+ reply_parameters=ReplyParameters(message_id=kwargs.get("reply_msg_id", message.id)),
+ progress=telegram_uploading,
+ progress_args=(kwargs.get("progress", False), audio_path, true(kwargs.get("detail_progress"))), # message, path, detail_progress
+ thumb=thumb, # type: ignore
+ )
+ )
+ await modify_progress(del_status=True, **kwargs)
+ if save_to_db:
+ metadata = {}
+ for k in ["author", "author_url", "title", "url", "create_time", "duration", "description"]:
+ if v := locals().get(k):
+ metadata[k] = unicode_to_ascii(v)
+ await save_messages(messages=sent_messages, key=url, metadata=metadata)
+
+ cleanup_ytdlp(title)
+
+
+def get_ytdlp_proxy(platform: str) -> str | None:
+ if platform == "bilibili":
+ proxy = PROXY.BILIBILI
+ elif platform == "youtube":
+ proxy = PROXY.YOUTUBE
+ else:
+ proxy = None
+ logger.debug(f"YTDLP Proxy of {platform}: {proxy}")
+ return proxy
+
+
+def video_selector(ctx):
+ """Select the best format.
+
+ For the best compatibility, we choose .mp4 extension with AVC codec for video, .m4a extension for audio.
+ """
+ # formats are already sorted worst to best
+ formats = ctx.get("formats")[::-1]
+ if not formats:
+ msg = "No format found."
+ raise YoutubeDLError(msg)
+
+ logger.trace(f"Choose best format from {len(formats)} extracted formats")
+ # acodec='none' means there is no audio
+ # find compatible extension, VP9 is not supported by iOS, use AVC instead
+ all_videos = [f for f in formats if f.get("video_ext", "").lower() != "none"]
+ all_audios = [f for f in formats if f.get("audio_ext", "").lower() != "none"]
+ videos = [f for f in all_videos if f.get("video_ext", "").lower() == "mp4" and f.get("acodec", "").lower() == "none" and f.get("vcodec", "").lower().startswith("avc")]
+ audios = [f for f in all_audios if (f.get("resolution", "").lower() == "audio only" and f.get("audio_ext", "").lower() == "m4a")]
+ logger.trace(f"Found {len(videos)} video formats")
+ logger.trace(f"Found {len(audios)} video formats")
+
+ # if no compatible format found, fallback to the best format
+ if not videos:
+ videos = all_videos
+ if not audios:
+ audios = all_audios
+
+ if not videos and not audios:
+ msg = "No video and audio format found."
+ raise YoutubeDLError(msg)
+ elif not videos:
+ best_audio = audios[0]
+ logger.debug(f"Use audio format: {best_audio['format']}")
+ yield {
+ "format_id": f"{best_audio['format_id']}",
+ "ext": best_audio["ext"],
+ "requested_formats": [best_audio],
+ "protocol": f"{best_audio['protocol']}",
+ }
+ elif not audios:
+ best_video = videos[0]
+ logger.debug(f"Use video format: {best_video['format']}")
+ yield {
+ "format_id": f"{best_video['format_id']}",
+ "ext": best_video["ext"],
+ "requested_formats": [best_video],
+ "protocol": f"{best_video['protocol']}",
+ }
+ else:
+ best_video = videos[0]
+ best_audio = audios[0]
+ logger.debug(f"Use video format: {best_video['format']}")
+ logger.debug(f"Use audio format: {best_audio['format']}")
+ yield {
+ "format_id": f"{best_video['format_id']}+{best_audio['format_id']}",
+ "ext": best_video["ext"],
+ "requested_formats": [best_video, best_audio],
+ "protocol": f"{best_video['protocol']}+{best_audio['protocol']}",
+ }
+
+
+def create_hook(message: Message | None, loop, *, detail_progress: bool):
+ """Hook to show downloading progress."""
+
+ def hook(d):
+ msg = ""
+ title = d.get("info_dict", {}).get("title", "")
+ ftype = "视频" if d.get("info_dict", {}).get("video_ext", "").lower() != "none" else "音频"
+ emoji = "🎬" if ftype == "视频" else "🎧"
+ status = d.get("status", "")
+ if status == "downloading":
+ downloaded_bytes = float(d.get("downloaded_bytes")) if d.get("downloaded_bytes") else 0
+ total_bytes = float(d.get("total_bytes")) if d.get("total_bytes") else 0
+ total_bytes_estimate = float(d.get("total_bytes_estimate")) if d.get("total_bytes_estimate") else 0
+ total = max(total_bytes, total_bytes_estimate)
+ eta = float(d.get("eta")) if d.get("eta") else 0 # seconds
+ speed = float(d.get("speed")) if d.get("speed") else 0 # bytes/second
+ finished = downloaded_bytes / total if total > 0 else 0
+ msg += f"⏬{ftype}下载: {readable_size(downloaded_bytes)} / {readable_size(total)} ({finished:.2%})\n"
+ msg += f"⚡️当前网速: {readable_size(speed)}/s\n"
+ msg += f"🕒剩余时长: {readable_time(eta)}\n"
+ msg += f"{emoji}{title}"
+ elif status == "finished":
+ msg = f"✅{ftype}下载完成\n{emoji}{title}"
+ elif status == "error":
+ msg = f"❌{ftype}下载失败\n{emoji}{title}"
+ asyncio.run_coroutine_threadsafe(modify_progress(message, msg.strip(), detail_progress=detail_progress), loop)
+
+ return hook
+
+
+def retry(func, max_retries=5):
+ def wrapper(*args, **kwargs):
+ retries = 0
+ msg = ""
+ while retries < max_retries:
+ try:
+ return func(*args, **kwargs)
+ except ExtractorError as e:
+ msg = f"ExtractorError: {str(e.orig_msg).removeprefix('ERROR: ')}"
+ except DownloadError as e:
+ msg = f"DownloadError: {str(e.msg).removeprefix('ERROR: ')}"
+ if any(x in msg for x in ["Sign in", "请登录", "deleted", "geo-restricted"]):
+ retries += 1
+ break
+ except YoutubeDLError as e:
+ msg = f"YoutubeDLError: {str(e.msg).removeprefix('ERROR: ')}"
+ except Exception as e:
+ msg = f"{type(e).__name__}: {e} (Retrying {retries}/{max_retries})"
+ retries += 1
+ time.sleep(1)
+ logger.error(f"Failed after {retries} retries: {msg}")
+ if msg:
+ args[2]["error_msg"] = msg.replace("<", "[").replace(">", "]")
+ return args[2]
+ return {}
+
+ return wrapper
+
+
+@retry
+def download_video(url: str, ydl_opts: dict, result: dict) -> dict:
+ with YoutubeDL(ydl_opts) as ydl:
+ info: dict = ydl.extract_info(url, download=True, process=True) # type: ignore
+ result["info"] = info
+ return result
+
+
+async def download_video_async(url: str, ydl_opts: dict) -> tuple[str, dict]:
+ """Wrapper to run the download function in a thread.
+
+ Generated by GPT-4o.
+ """
+ # Shared dictionary to hold the results
+ result = {}
+ # Create and start the thread
+ download_thread = threading.Thread(target=download_video, args=(url, ydl_opts, result))
+ download_thread.start()
+ # Wait for the thread to finish
+ await asyncio.to_thread(download_thread.join)
+ # Return the result
+ return result.get("error_msg", ""), result.get("info", {})
+
+
+@cache.memoize(ttl=60)
+async def get_bilibili_comments(bvid: str | None) -> list[str]:
+ if not bvid:
+ return []
+ comments = []
+ try:
+ api = f"{API.TIKHUB_FREE}/api/bilibili/web/fetch_video_comments?bv_id={bvid}"
+ resp = await hx_req(api, check_has_kv=["data.data"], check_kv={"code": 200})
+ if resp.status_code != 200:
+ logger.warning(f"Bilibili Comments API failed: {resp}")
+ return []
+ data = resp.json()["data"]["data"].get("replies", [])
+ for idx, x in enumerate(data):
+ name = x.get("member", {}).get("uname", "匿名")
+ if cmt := x.get("content", {}).get("message"):
+ cmt = cmt.replace("\n", "\n> ")
+ if idx == 0:
+ comments.append("\n**> 💬**点此展开评论区**:")
+ comments.append(f"\n> 💬**{name}**: {emojify(cmt)}")
+ if replies := x.get("replies"):
+ for r in replies:
+ name = r.get("member", {}).get("uname", "匿名")
+ if cmt := r.get("content", {}).get("message"):
+ cmt = cmt.replace("\n", "\n> ")
+ comments.append(f"\n> ↪️**{name}**: {emojify(cmt)}")
+ if len(comments) > 2:
+ comments[-1] += "||"
+ except Exception as e:
+ logger.error(f"Failed to get Bilibili comments: {e}")
+ return []
+ return comments
+
+
+@cache.memoize(ttl=60)
+async def get_youtube_comments(vid: str | None) -> list[str]:
+ if not vid:
+ return []
+ api = "https://www.googleapis.com/youtube/v3/commentThreads"
+ params = {"key": TOKEN.YOUTUBE_API_KEY, "maxResults": 100, "textFormat": "plainText", "part": "snippet", "videoId": vid}
+ comments = []
+ try:
+ resp = await hx_req(api, proxy=PROXY.YOUTUBE, params=params, check_has_kv=["items"])
+ if resp.status_code != 200:
+ logger.warning(f"YouTube Comments API failed: {resp}")
+ return []
+ data = resp.json().get("items", [])
+ for idx, x in enumerate(data):
+ name = x.get("snippet", {}).get("topLevelComment", {}).get("snippet", {}).get("authorDisplayName", "匿名")
+ name = name.removeprefix("@")
+ if cmt := x.get("snippet", {}).get("topLevelComment", {}).get("snippet", {}).get("textDisplay"):
+ cmt = cmt.replace("\n", "\n> ")
+ if idx == 0:
+ comments.append("\n**> 💬**点此展开评论区**:")
+ if idx == len(data) - 1: # last cmt
+ comments.append(f"\n> 💬**{name}**: {cmt}||")
+ else:
+ comments.append(f"\n> 💬**{name}**: {cmt}")
+ except Exception as e:
+ logger.error(f"Failed to get YouTube comments: {e}")
+ return []
+ return comments
+
+
+def cleanup_ytdlp(title: str):
+ """Clean up ytdlp files.
+
+ Some unicode characters can't be matched with title, so we use common characters to match.
+ """
+ if not title:
+ return
+ logger.debug(f"Cleaning up: {title}")
+ for p in Path(DOWNLOAD_DIR).glob("*"):
+ if not p.is_file():
+ continue
+ fname = re.sub(r"(.*)\.f\d+$", r"\1", p.stem) # remove format id ( title.f137.m4a -> title.m4a )
+ common_char = sum((Counter(fname) & Counter(title)).values())
+ if common_char / len(fname) > 0.8: # filename overlaps more than 80%
+ logger.trace(f"Deleting ytdlp files: {p}")
+ p.unlink(missing_ok=True)
src/config.py
@@ -0,0 +1,150 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+import asyncio
+import os
+from pathlib import Path
+
+from cacheout import Cache
+
+cache = Cache(ttl=0, maxsize=2048)
+semaphore = asyncio.Semaphore(8) # max 8 concurrent downloads
+
+DOWNLOAD_DIR = os.getenv("DOWNLOAD_DIR", Path(__file__).parent.joinpath("downloads").as_posix())
+TZ = os.getenv("TZ", "Asia/Shanghai")
+DEVICE_NAME = os.getenv("DEVICE_NAME", "BennyBot")
+TEXT_LENGTH = int(os.getenv("TEXT_LENGTH", "4096")) # Maximum length of text message
+CAPTION_LENGTH = int(os.getenv("CAPTION_LENGTH", "1024")) # 4096 for Premium user
+MAX_FILE_BYTES = int(os.getenv("MAX_FILE_BYTES", "2000")) * 1024 * 1024 # 4000 MB for Premium user
+ASR_MAX_DURATION = int(os.getenv("ASR_MAX_DURATION", "600"))
+
+
+class ENABLE:
+ ASR = os.getenv("ENABLE_ASR", "1").lower() in ["1", "y", "yes", "t", "true", "on"]
+ AUDIO = os.getenv("ENABLE_AUDIO", "1").lower() in ["1", "y", "yes", "t", "true", "on"]
+ BILIBILI = os.getenv("ENABLE_BILIBILI", "1").lower() in ["1", "y", "yes", "t", "true", "on"]
+ CRONTAB = os.getenv("ENABLE_CRONTAB", "1").lower() in ["1", "y", "yes", "t", "true", "on"]
+ DOUYIN = os.getenv("ENABLE_DOUYIN", "1").lower() in ["1", "y", "yes", "t", "true", "on"]
+ GPT = os.getenv("ENABLE_GPT", "1").lower() in ["1", "y", "yes", "t", "true", "on"]
+ INSTAGRAM = os.getenv("ENABLE_INSTAGRAM", "1").lower() in ["1", "y", "yes", "t", "true", "on"]
+ OCR = os.getenv("ENABLE_OCR", "1").lower() in ["1", "y", "yes", "t", "true", "on"]
+ SPLITIMG = os.getenv("ENABLE_SPLITIMG", "1").lower() in ["1", "y", "yes", "t", "true", "on"]
+ SUBTITLE = os.getenv("ENABLE_SUBTITLE", "1").lower() in ["1", "y", "yes", "t", "true", "on"]
+ TIKTOK = os.getenv("ENABLE_TIKTOK", "1").lower() in ["1", "y", "yes", "t", "true", "on"]
+ TWITTER = os.getenv("ENABLE_TWITTER", "1").lower() in ["1", "y", "yes", "t", "true", "on"]
+ VIDEO_TO_AUDIO = os.getenv("ENABLE_VIDEO_TO_AUDIO", "1").lower() in ["1", "y", "yes", "t", "true", "on"]
+ WEIBO = os.getenv("ENABLE_WEIBO", "1").lower() in ["1", "y", "yes", "t", "true", "on"]
+ WGET = os.getenv("ENABLE_WGET", "1").lower() in ["1", "y", "yes", "t", "true", "on"]
+ XHS = os.getenv("ENABLE_XHS", "1").lower() in ["1", "y", "yes", "t", "true", "on"]
+ YOUTUBE = os.getenv("ENABLE_YOUTUBE", "1").lower() in ["1", "y", "yes", "t", "true", "on"]
+ RAW_IMG_CONVERT = os.getenv("ENABLE_RAW_IMG_CONVERT", "1").lower() in ["1", "y", "yes", "t", "true", "on"]
+ GROUPS = os.getenv("ENABLE_GROUPS", "1").lower() in ["1", "y", "yes", "t", "true", "on"]
+ CHANNELS = os.getenv("ENABLE_CHANNELS", "1").lower() in ["1", "y", "yes", "t", "true", "on"]
+ BOTS = os.getenv("ENABLE_BOTS", "1").lower() in ["1", "y", "yes", "t", "true", "on"]
+ USERS = os.getenv("ENABLE_USERS", "1").lower() in ["1", "y", "yes", "t", "true", "on"]
+
+
+class PREFIX:
+ MAIN = os.getenv("PREFIX_MAIN", "/benny").lower()
+ ASR = os.getenv("PREFIX_ASR", "/asr").lower()
+ AUDIO = os.getenv("PREFIX_AUDIO", "/audio").lower()
+ GPT = os.getenv("PREFIX_GPT", "/ai").lower()
+ SUBTITLE = os.getenv("PREFIX_SUBTITLE", "/subtitle").lower()
+ WGET = os.getenv("PREFIX_WGET", "/wget").lower()
+ OCR = os.getenv("PREFIX_OCR", "/ocr").lower()
+
+
+class UA:
+ TELEGRAM = os.getenv("UA_TG", "TelegramBot (like TwitterBot)")
+ IPHONE = os.getenv("UA_IPHONE", "Mozilla/5.0 (iPhone; CPU iPhone OS 17_7_2 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/17.4.1 Mobile/15E148 Safari/604.1")
+ IPAD = os.getenv("UA_IPAD", "Mozilla/5.0 (iPad; CPU OS 17_7_2 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/17.4.1 Mobile/15E148 Safari/604.1")
+ MACOS = os.getenv("UA_MACOS", "Mozilla/5.0 (Macintosh; Intel Mac OS X 14_7_2) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/17.4.1 Safari/605.1.15")
+ CHROME = os.getenv("UA_CHROME", "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36")
+
+
+class API:
+ FXTWITTER = os.getenv("FXTWITTER_API", "https://api.fxtwitter.com")
+ DDINSTAGRAM = os.getenv("DDINSTAGRAM_API", "https://www.ddinstagram.com")
+ TIKHUB = os.getenv("TIKHUB", "https://api.tikhub.io")
+ TIKHUB_FREE = os.getenv("TIKHUB_FREE", "https://api.douyin.wtf")
+ TIKHUB_INSTAGRAM = os.getenv("TIKHUB_INSTAGRAM_API", "https://api.tikhub.io/api/v1/instagram/web_app/fetch_post_info_by_url?url=")
+ TIKHUB_TWITTER = os.getenv("TIKHUB_TWITTER_API", "https://api.tikhub.io/api/v1/twitter/web/fetch_post_comments?tweet_id=")
+ TIKHUB_WEIBO_VIDEO = os.getenv("TIKHUB_WEIBO_VIDEO_API", "https://api.tikhub.io/api/v1/weibo/web/fetch_short_video_data?share_text=")
+
+
+class TOKEN:
+ SESSION_STRING = os.getenv("SESSION_STRING", "")
+ TIKHUB = os.getenv("TIKHUB_TOKEN", "")
+ TENCENT_ASR_APPID = os.getenv("TENCENT_ASR_APPID", "")
+ TENCENT_ASR_SECRET_ID = os.getenv("TENCENT_ASR_SECRET_ID", "")
+ TENCENT_ASR_SECRET_KEY = os.getenv("TENCENT_ASR_SECRET_KEY", "")
+ YOUTUBE_API_KEY = os.getenv("YOUTUBE_API_KEY", "")
+
+
+class PROXY: # format: socks5://127.0.0.1:7890
+ TELEGRAM = os.getenv("TELEGRAM_PROXY", None) # Telegram
+ WORKERS = os.getenv("WORKERS_PROXY", "") # https://github.com/netnr/workers
+ XHS = os.getenv("XHS_PROXY", None) # Banned VPS IP, need residential proxy
+ TENCENT = os.getenv("TENCENT_PROXY", None) # Banned oversea IP, need a back to China proxy
+ GPT = os.getenv("GPT_PROXY", None)
+ SUBTITLE = os.getenv("SUBTITLE_PROXY", None)
+ DOWNLOAD = os.getenv("DOWNLOAD_PROXY", None)
+ WEIBO_COOKIE = os.getenv("WEIBO_COOKIE_PROXY", None) # Weibo visitor cookie
+ BILIBILI = os.getenv("BILIBILI_PROXY", None)
+ YOUTUBE = os.getenv("YOUTUBE_PROXY", None)
+ YTDLP_FALLBACK = os.getenv("YTDLP_FALLBACK_PROXY", None)
+
+
+class COOKIE: # See: https://github.com/easychen/CookieCloud
+ CLOUD_SERVER = os.getenv("COOKIE_CLOUD_SERVER", "")
+ CLOUD_KEY = os.getenv("COOKIE_CLOUD_KEY", "")
+ CLOUD_PASS = os.getenv("COOKIE_CLOUD_PASS", "")
+
+
+class GPT:
+ TEXT_MODEL = os.getenv("GPT_TEXT_MODEL", "gpt-4o")
+ VISION_MODEL = os.getenv("GPT_VISION_MODEL", "gpt-4o")
+ AUDIO_MODEL = os.getenv("GPT_AUDIO_MODEL", "qwen-audio-turbo-latest")
+ VIDEO_MODEL = os.getenv("GPT_VIDEO_MODEL", "glm-4v-plus")
+ TEXT_TIMEOUT = os.getenv("GPT_TEXT_TIMEOUT", "15")
+ VISION_TIMEOUT = os.getenv("GPT_VISION_TIMEOUT", "30")
+ AUDIO_TIMEOUT = os.getenv("GPT_AUDIO_TIMEOUT", "30")
+ VIDEO_TIMEOUT = os.getenv("GPT_VIDEO_TIMEOUT", "30")
+ TEMPERATURE = os.getenv("GPT_TEMPERATURE", "0.5")
+ HISTORY_CONTEXT = os.getenv("GPT_HISTORY_CONTEXT", "20") # 最多携带多少条历史消息
+ MEDIA_FORMAT = os.getenv("GPT_MEDIA_FORMAT", "base64") # base64 or http
+ MEDIA_SERVER = os.getenv("GPT_MEDIA_SERVER", "https://server.com/dir")
+ OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "")
+ OPENAI_BASE_URL = os.getenv("OPENAI_BASE_URL", "https://api.openai.com/v1")
+ GEMINI_API_KEY = os.getenv("GEMINI_API_KEY", "")
+ GEMINI_BASE_URL = os.getenv("GEMINI_BASE_URL", "https://generativelanguage.googleapis.com/v1beta/openai")
+ HUNYUAN_API_KEY = os.getenv("HUNYUAN_API_KEY", "")
+ HUNYUAN_BASE_URL = os.getenv("HUNYUAN_BASE_URL", "https://api.hunyuan.cloud.tencent.com/v1")
+ DASHSCOPE_API_KEY = os.getenv("DASHSCOPE_API_KEY", "")
+ DASHSCOPE_BASE_URL = os.getenv("DASHSCOPE_BASE_URL", "https://dashscope.aliyuncs.com/compatible-mode/v1")
+ GLM_API_KEY = os.getenv("GLM_API_KEY", "")
+ GLM_BASE_URL = os.getenv("GLM_BASE_URL", "https://open.bigmodel.cn/api/paas/v4")
+ ARK_API_KEY = os.getenv("ARK_API_KEY", "")
+ ARK_BASE_URL = os.getenv("ARK_BASE_URL", "https://ark.cn-beijing.volces.com/api/v3")
+ DOUBAO_TEXT_ENTRYPOINT = os.getenv("GPT_DOUBAO_TEXT_ENTRYPOINT", "")
+ DOUBAO_VISION_ENTRYPOINT = os.getenv("GPT_DOUBAO_VISION_ENTRYPOINT", "")
+ OPENROUTER_API_KEY = os.getenv("OPENROUTER_API_KEY", "")
+ OPENROUTER_BASE_URL = os.getenv("OPENROUTER_BASE_URL", "https://openrouter.ai/api/v1")
+
+
+class TID:
+ ADMIN = os.getenv("TID_ADMIN", "me")
+ ADMIN_GROUP = os.getenv("TID_ADMIN_GROUP", "me")
+ CHANNEL_YTDLP_BACKUP = os.getenv("TID_CHANNEL_YTDLP_BACKUP", "me")
+ GROUP67373 = os.getenv("TID_GROUP67373", "")
+
+
+class DB:
+ ENGINE = os.getenv("DB_ENGINE", "Cloudflare-R2")
+ CF_KV_ENABLED = os.getenv("CF_KV_ENABLED", "1").lower() in ["1", "y", "yes", "t", "true", "on"]
+ CF_ACCOUNT_ID = os.getenv("CF_ACCOUNT_ID", "")
+ CF_API_TOKEN = os.getenv("CF_API_TOKEN", "")
+ CF_KV_NAMESPACE_ID = os.getenv("CF_KV_NAMESPACE_ID", "")
+ CF_R2_ENABLED = os.getenv("CF_R2_ENABLED", "1").lower() in ["1", "y", "yes", "t", "true", "on"]
+ CF_R2_BUCKET_NAME = os.getenv("CF_R2_BUCKET_NAME", "bennybot")
+ CF_R2_ACCESS_KEY_ID = os.getenv("CF_R2_ACCESS_KEY_ID", "")
+ CF_R2_SECRET_ACCESS_KEY = os.getenv("CF_R2_SECRET_ACCESS_KEY", "")
src/database.py
@@ -0,0 +1,270 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""Currently, Memory cache, Cloudflare KV and Cloudflare R2 are supported.
+
+Note: Memory Cache is always enabled.
+"""
+
+import json
+from urllib.parse import quote_plus, unquote_plus
+
+from aioboto3 import Session
+from botocore.exceptions import ClientError
+from httpx import AsyncClient, AsyncHTTPTransport
+from loguru import logger
+
+from config import DB, cache
+from utils import plain_url, stringfy
+
+
+async def get_db(key: str) -> dict:
+ """Get KV."""
+ if not key:
+ return {}
+ key = quote_plus(key)
+ if kv := get_memory_kv(key):
+ return kv
+ if DB.ENGINE == "Cloudflare-KV":
+ return await get_cf_kv(key)
+ if DB.ENGINE == "Cloudflare-R2":
+ return await get_cf_r2(key)
+ return {}
+
+
+def get_memory_kv(key: str) -> dict:
+ """Get from memory cache."""
+ if kv := cache.get(key):
+ logger.trace(f"GET KV from memory cache for {key}: {kv}")
+ return kv
+ return {}
+
+
+async def get_cf_kv(key: str, *, log_success: bool = True) -> dict:
+ """Get from Cloudflare KV."""
+ if not DB.CF_KV_ENABLED:
+ logger.warning("SKIP GET CF-KV: Cloudflare KV disabled")
+ return {}
+ key = quote_plus(key)
+ api = f"https://api.cloudflare.com/client/v4/accounts/{DB.CF_ACCOUNT_ID}/storage/kv/namespaces/{DB.CF_KV_NAMESPACE_ID}/values/{key}"
+ headers = {"authorization": f"Bearer {DB.CF_API_TOKEN}", "content-type": "application/json"}
+ async with AsyncClient(http2=True, follow_redirects=True, transport=AsyncHTTPTransport(retries=3, http2=True)) as hx:
+ try:
+ resp = await hx.get(api, headers=headers, timeout=30)
+ if resp.status_code == 404:
+ logger.trace(f"404 Not Found for CF-KV key={key}")
+ return {}
+ resp.raise_for_status()
+ if data := resp.json():
+ if log_success:
+ logger.success(f"GET CF-KV for {key}: {data}")
+ return data
+ except Exception as e:
+ logger.warning(f"GET CF-KV failed for {key}: {e}")
+ return {}
+
+
+async def list_cf_r2(prefix: str = "", continuation_token: str | None = None) -> dict:
+ """Get from Cloudflare R2."""
+ if not DB.CF_R2_ENABLED:
+ logger.warning("SKIP LIST CF-R2: Cloudflare R2 disabled")
+ return {}
+ async with Session().client(
+ service_name="s3",
+ endpoint_url=f"https://{DB.CF_ACCOUNT_ID}.r2.cloudflarestorage.com",
+ aws_access_key_id=DB.CF_R2_ACCESS_KEY_ID,
+ aws_secret_access_key=DB.CF_R2_SECRET_ACCESS_KEY,
+ region_name="auto",
+ ) as s3: # type: ignore
+ payload = {"Bucket": DB.CF_R2_BUCKET_NAME, "MaxKeys": 1000}
+ if continuation_token:
+ payload["ContinuationToken"] = continuation_token
+ if prefix:
+ payload["Prefix"] = prefix
+ try:
+ return await s3.list_objects_v2(**payload)
+ except Exception as e:
+ logger.warning(f"List CF-R2 failed for {prefix=}: {e}")
+ return {}
+
+
+async def get_cf_r2(key: str) -> dict:
+ """Get from Cloudflare R2."""
+ if not DB.CF_R2_ENABLED:
+ logger.warning("SKIP GET CF-R2: Cloudflare R2 disabled")
+ return {}
+
+ key = plain_url(unquote_plus(key)) # remove http(s):// prefix
+ async with Session().client(
+ service_name="s3",
+ endpoint_url=f"https://{DB.CF_ACCOUNT_ID}.r2.cloudflarestorage.com",
+ aws_access_key_id=DB.CF_R2_ACCESS_KEY_ID,
+ aws_secret_access_key=DB.CF_R2_SECRET_ACCESS_KEY,
+ region_name="auto",
+ ) as s3: # type: ignore
+ try:
+ obj = await s3.get_object(Bucket=DB.CF_R2_BUCKET_NAME, Key=key)
+ if obj.get("Body"):
+ data = await obj["Body"].read()
+ data = json.loads(data)
+ logger.success(f"GET CF-R2 for {key}: {data}")
+ return data
+ except ClientError as e:
+ if e.response["Error"]["Code"] != "404":
+ logger.warning(f"GET CF-R2 failed for {key}: {e}")
+ except Exception as e:
+ logger.warning(f"GET CF-R2 failed for {key}: {e}")
+ return {}
+
+
+async def set_db(key: str, data: dict | list, ttl: int = 86400, metadata: dict | None = None) -> bool:
+ """Set KV."""
+ key = quote_plus(key)
+ success = False
+ if DB.ENGINE == "Cloudflare-KV":
+ success = await set_cf_kv(key, data, ttl=None) # no expiration for CF-KV
+ if DB.ENGINE == "Cloudflare-R2":
+ success = await set_cf_r2(key, data, metadata)
+ if success:
+ set_memory_kv(key, data, ttl)
+ return success
+
+
+def set_memory_kv(key: str, data: dict | list | str, ttl: int = 86400) -> None:
+ """Set to memory cache."""
+ cache.set(key, data, ttl=ttl)
+ logger.trace(f"SET KV to memory cache for {key}: {data}")
+
+
+async def set_cf_kv(key: str, data: dict | list | str, ttl: int | None = None, *, skip_in_memory: bool = True, log_success: bool = True) -> bool:
+ """Set to Cloudflare KV.
+
+ If `skip_in_memory` is True, it will skip setting to CF-KV if the key is already in memory cache.
+ """
+ if skip_in_memory and cache.get(key):
+ logger.trace(f"SKIP SET CF-KV: key is already in memory cache for {key}: {cache.get(key)}")
+ return True
+ if not DB.CF_KV_ENABLED:
+ logger.warning("SKIP SET CF-KV: Cloudflare KV disabled")
+ return True
+ key = quote_plus(key)
+ api = f"https://api.cloudflare.com/client/v4/accounts/{DB.CF_ACCOUNT_ID}/storage/kv/namespaces/{DB.CF_KV_NAMESPACE_ID}/values/{key}"
+ if ttl is not None:
+ api = f"{api}?expiration_ttl={ttl}"
+ headers = {"authorization": f"Bearer {DB.CF_API_TOKEN}", "content-type": "*/*"}
+ async with AsyncClient(http2=True, follow_redirects=True, transport=AsyncHTTPTransport(retries=3, http2=True)) as hx:
+ try:
+ resp = await hx.put(api, headers=headers, json=data, timeout=30)
+ resp.raise_for_status()
+ except Exception as e:
+ logger.warning(f"Failed to SET CF-KV for key={key}: {e}")
+ return False
+ if resp.json().get("success"):
+ if log_success:
+ logger.success(f"Successfully SET CF-KV for key={key}: {data}")
+ return True
+ return False
+
+
+async def set_cf_r2(key: str, data: dict | list | str | None = None, metadata: dict | None = None, *, skip_in_memory: bool = True) -> bool:
+ """Set to Cloudflare R2 via boto3.
+
+ We do not put data to R2, just use metadata to store data.
+
+ If `skip_in_memory` is True, it will skip setting to CF-R2 if the key is already in memory cache.
+ """
+ if skip_in_memory and cache.get(key):
+ logger.trace(f"SKIP SET CF-R2: key is already in memory cache for {key}: {cache.get(key)}")
+ return True
+ if not DB.CF_R2_ENABLED:
+ logger.warning("SKIP SET CF-R2: Cloudflare R2 disabled")
+ return True
+ key = plain_url(unquote_plus(key)) # remove http(s):// prefix
+ payload = {
+ "CacheControl": "no-cache",
+ "Bucket": DB.CF_R2_BUCKET_NAME,
+ "Key": key,
+ "ContentType": "application/json",
+ }
+ if data:
+ payload |= {"Body": json.dumps(data).encode("utf-8")}
+ if metadata:
+ payload |= {"Metadata": stringfy(metadata)}
+ async with Session().client(
+ service_name="s3",
+ endpoint_url=f"https://{DB.CF_ACCOUNT_ID}.r2.cloudflarestorage.com",
+ aws_access_key_id=DB.CF_R2_ACCESS_KEY_ID,
+ aws_secret_access_key=DB.CF_R2_SECRET_ACCESS_KEY,
+ region_name="auto",
+ ) as s3: # type: ignore
+ try:
+ await s3.put_object(**payload)
+ logger.success(f"Successfully SET CF-R2 for {key}: {data=}, {metadata=}")
+ except Exception as e:
+ logger.warning(f"SET CF-R2 failed for {key}: {e}")
+ return False
+ return True
+
+
+async def del_db(key: str):
+ """Delete KV."""
+ del_memory_kv(key)
+ if DB.ENGINE == "Cloudflare-KV":
+ await del_cf_kv(key)
+ if DB.ENGINE == "Cloudflare-R2":
+ await del_cf_r2(key)
+
+
+def del_memory_kv(key: str):
+ """Delete from memory cache."""
+ key = quote_plus(key)
+ if cache.get(key):
+ cache.delete(key)
+
+
+async def del_cf_kv(key: str):
+ """Delete from Cloudflare KV."""
+ key = quote_plus(key)
+ if not DB.CF_KV_ENABLED:
+ logger.warning("SKIP SET CF-KV: Cloudflare KV disabled")
+ return
+ api = f"https://api.cloudflare.com/client/v4/accounts/{DB.CF_ACCOUNT_ID}/storage/kv/namespaces/{DB.CF_KV_NAMESPACE_ID}/values/{key}"
+ headers = {"authorization": f"Bearer {DB.CF_API_TOKEN}", "content-type": "application/json"}
+ async with AsyncClient(http2=True, follow_redirects=True, transport=AsyncHTTPTransport(retries=3, http2=True)) as hx:
+ try:
+ resp = await hx.delete(api, headers=headers, timeout=30)
+ resp.raise_for_status()
+ except Exception as e:
+ logger.warning(f"DEL CF-KV failed for key={key}: {e}")
+ return
+ if resp.json().get("success"):
+ logger.success(f"DEL CF-KV for key={key}")
+ return
+
+
+async def del_cf_r2(key: str):
+ """Delete from Cloudflare R2."""
+ if not DB.CF_R2_ENABLED:
+ logger.warning("SKIP SET CF-R2: Cloudflare R2 disabled")
+ return
+ key = plain_url(unquote_plus(key)) # remove http(s):// prefix
+ async with Session().client(
+ service_name="s3",
+ endpoint_url=f"https://{DB.CF_ACCOUNT_ID}.r2.cloudflarestorage.com",
+ aws_access_key_id=DB.CF_R2_ACCESS_KEY_ID,
+ aws_secret_access_key=DB.CF_R2_SECRET_ACCESS_KEY,
+ region_name="auto",
+ ) as s3: # type: ignore
+ try:
+ await s3.delete_object(Bucket=DB.CF_R2_BUCKET_NAME, Key=key)
+ logger.success(f"DEL CF-R2 for key={key}")
+ except Exception as e:
+ logger.warning(f"DEL CF-R2 failed for key={key}: {e}")
+ return
+ return
+
+
+if __name__ == "__main__":
+ import asyncio
+
+ asyncio.run(set_cf_r2("test2", metadata={"finished": "1"}))
+ asyncio.run(set_cf_r2("test2", data={"finished": "1"}))
src/handler.py
@@ -0,0 +1,208 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from loguru import logger
+from pyrogram.client import Client
+from pyrogram.types import Message
+
+from asr.voice_recognition import voice_to_text
+from bridge.miaomiao import ocr_to_miaomiao
+from config import ENABLE, PREFIX, PROXY, cache
+from message_utils import equal_prefix, parse_msg, send2tg, startswith_prefix
+from networking import flatten_rediercts, match_social_media_link
+from others.download_external import download_url_in_message
+from others.extract_audio import extract_audio_file
+from others.gpt import gpt_response
+from others.subtitle import get_subtitle
+from preview.douyin import preview_douyin
+from preview.instagram import preview_instagram
+from preview.twitter import preview_twitter
+from preview.weibo import preview_weibo
+from preview.xiaohongshu import preview_xhs
+from preview.ytdlp import ProxyError, get_ytdlp_proxy, preview_ytdlp
+
+
+@cache.memoize(ttl=60)
+async def handle_utilities(
+ client: Client,
+ message: Message,
+ target_chat: int | str | None = None,
+ reply_msg_id: int = 0,
+ *,
+ asr: bool = True,
+ audio: bool = True,
+ ai: bool = True,
+ subtitle: bool = True,
+ wget: bool = True,
+ ocr: bool = True,
+ show_progress: bool = True,
+ detail_progress: bool = False,
+ **kwargs,
+):
+ """Call utility functions to handle the message.
+
+ Args:
+ client (Client): The Pyrogram client.
+ message (Message): The trigger message object.
+ target_chat (int | str, optional): Send result to this telegram target chat. If not set, send to the trigger message's chat.
+ reply_msg_id (int, optional): If set to integer > 0, the result is sent as a reply message to this message_id.
+ If set to 0, reply to the trigger message itself.
+ If set to -1, do not send as a reply message.
+ asr (bool, optional): Enable ASR. Defaults to True.
+ audio (bool, optional): Enable Video -> Audio. Defaults to True.
+ ai (bool, optional): Enable GPT. Defaults to True.
+ subtitle (bool, optional): Enable YouTube subtitle. Defaults to True.
+ wget (bool, optional): Enable WGET. Defaults to True.
+ ocr (bool, optional): Enable OCR. Defaults to True.
+ show_progress (bool, optional): Show a progress message on Telegram. Defaults to True.
+ detail_progress (bool, optional): Show detailed progress (Only if show_proress is set to True). Defaults to False.
+ """
+ kwargs |= {"target_chat": target_chat, "reply_msg_id": reply_msg_id, "show_progress": show_progress, "detail_progress": detail_progress}
+ if asr:
+ await voice_to_text(client, message, **kwargs) # /asr
+ if audio:
+ await extract_audio_file(client, message, **kwargs) # /audio
+ if ai:
+ await gpt_response(client, message, **kwargs) # /ai
+ if subtitle:
+ await get_subtitle(client, message, **kwargs) # /subtitle
+ if wget:
+ await download_url_in_message(client, message, **kwargs) # /wget
+ if ocr:
+ await ocr_to_miaomiao(client, message) # /ocr
+
+
+@cache.memoize(ttl=60)
+async def handle_social_media(
+ client: Client,
+ message: Message,
+ target_chat: int | str | None = None,
+ reply_msg_id: int = 0,
+ *,
+ need_prefix: bool = True,
+ extra_prefix: list[str] | None = None,
+ ignore_prefix: list[str] | None = None,
+ prepend_sender_user: bool = False,
+ douyin: bool = True,
+ tiktok: bool = True,
+ instagram: bool = True,
+ twitter: bool = True,
+ weibo: bool = True,
+ xhs: bool = True,
+ bilibili: bool = True,
+ youtube: bool = True,
+ show_progress: bool = True,
+ detail_progress: bool = False,
+ **kwargs,
+):
+ """Preview social media link in the message.
+
+ Args:
+ client (Client): The Pyrogram client.
+ message (Message): The trigger message object.
+ target_chat (int | str, optional): Send result to this telegram target chat. If not set, send to the trigger message's chat.
+ reply_msg_id (int, optional): If set to integer > 0, the result is sent as a reply message to this message_id.
+ If set to 0, reply to the trigger message itself.
+ If set to -1, do not send as a reply message.
+ need_prefix (bool, optional): Need to start with PREFIX to call this funciton. Defaults to True.
+ extra_prefix (list[str], optional): Extra prefix to call this function. Defaults to None.
+ ignore_prefix (list[str], optional): Ignore prefix to call this function. Defaults to None.
+ prepend_sender_user (bool, optional): Prepend the sender's username to the message. Defaults to False.
+ show_progress (bool, optional): Show a progress message on Telegram. Defaults to True.
+ detail_progress (bool, optional): Show detailed progress (Only if show_proress is set to True). Defaults to False.
+ """
+ kwargs |= {"target_chat": target_chat, "reply_msg_id": reply_msg_id, "show_progress": show_progress, "detail_progress": detail_progress}
+ info = parse_msg(message)
+ if need_prefix and not startswith_prefix(message, extra_prefix=extra_prefix, ignore_prefix=ignore_prefix):
+ return
+ # send docs if message only contains prefix command, without reply
+ if equal_prefix(message, extra_prefix=extra_prefix, ignore_prefix=ignore_prefix) and not message.reply_to_message:
+ help_msg = get_social_media_help(extra_prefix=extra_prefix, ignore_prefix=ignore_prefix)
+ await send2tg(client, message, texts=help_msg, **kwargs)
+ return
+
+ # use /PREFIX to reply a message, treat the reply_msg as the trigger to preview social media link
+ if equal_prefix(message, extra_prefix=extra_prefix, ignore_prefix=ignore_prefix) and message.reply_to_message:
+ message = message.reply_to_message
+
+ warn_msg = None
+ if need_prefix and startswith_prefix(message):
+ warn_msg = await send2tg(client, message, texts="⚠️本会话中可直接发送链接, 无需添加命令前缀\n⚠️No need to add command prefix in this chat.", **kwargs)
+ warn_msg = warn_msg[0]
+
+ # add send_from_user.
+ if prepend_sender_user:
+ # Caution: this format should be consistent with `save_messages` function in `message_utils.py`
+ kwargs["send_from_user"] = f"👤[@{info['full_name']}](tg://user?id={info['uid']})//"
+ try:
+ texts = message.text or message.caption or ""
+ texts = await flatten_rediercts(texts)
+ matched = await match_social_media_link(texts) # match "platform" and "url" (and other info)
+ kwargs |= matched
+
+ if douyin and matched["platform"] == "douyin" and ENABLE.DOUYIN:
+ await preview_douyin(client, message, **kwargs)
+ if tiktok and matched["platform"] == "tiktok" and ENABLE.TIKTOK:
+ await preview_douyin(client, message, **kwargs)
+ if instagram and matched["platform"] == "instagram" and ENABLE.INSTAGRAM:
+ await preview_instagram(client, message, **kwargs)
+ if twitter and matched["platform"] == "twitter" and ENABLE.TWITTER:
+ await preview_twitter(client, message, **kwargs)
+ if weibo and matched["platform"] == "weibo" and ENABLE.WEIBO:
+ await preview_weibo(client, message, **kwargs)
+ if xhs and matched["platform"] == "xiaohongshu" and ENABLE.XHS:
+ await preview_xhs(client, message, **kwargs)
+ try:
+ if bilibili and matched["platform"] == "bilibili" and ENABLE.BILIBILI:
+ await preview_ytdlp(client, message, proxy=get_ytdlp_proxy("bilibili"), **kwargs)
+ if youtube and matched["platform"] == "youtube" and ENABLE.YOUTUBE:
+ await preview_ytdlp(client, message, proxy=get_ytdlp_proxy("youtube"), **kwargs)
+ except ProxyError:
+ logger.error(f"🚫{matched['platform']}代理错误")
+ if PROXY.YTDLP_FALLBACK:
+ logger.warning(f"🔄使用备用代理{PROXY.YTDLP_FALLBACK}")
+ await preview_ytdlp(client, message, proxy=PROXY.YTDLP_FALLBACK, **kwargs)
+ if warn_msg:
+ await warn_msg.delete()
+
+ except Exception as e:
+ logger.exception(e)
+
+
+def get_social_media_help(extra_prefix: list[str] | None = None, ignore_prefix: list[str] | None = None):
+ """Get the help message for social media preview."""
+ extra_prefix = extra_prefix or []
+ ignore_prefix = ignore_prefix or []
+ prefixes = {PREFIX.MAIN, *extra_prefix} - set(ignore_prefix)
+ msg = "🔗**链接解析**"
+ if prefixes:
+ msg += f"\n🔗命令前缀: {', '.join(prefixes)}"
+ if ENABLE.YOUTUBE:
+ msg += "\n✅油管"
+ if ENABLE.BILIBILI:
+ msg += "\n✅哔哩哔哩"
+ if ENABLE.TWITTER:
+ msg += "\n✅推特"
+ if ENABLE.WEIBO:
+ msg += "\n✅微博"
+ if ENABLE.XHS:
+ msg += "\n✅小红书"
+ if ENABLE.DOUYIN:
+ msg += "\n✅抖音"
+ if ENABLE.TIKTOK:
+ msg += "\n✅TikTok"
+ if ENABLE.INSTAGRAM:
+ msg += "\n✅Instagram"
+ if ENABLE.ASR:
+ msg += f"\n🗣**语音转文字**: `{PREFIX.ASR}` 回复语音消息"
+ if ENABLE.AUDIO:
+ msg += f"\n🎧**视频转音频**: `{PREFIX.AUDIO}` 回复视频消息"
+ if ENABLE.GPT:
+ msg += f"\n🤖**GPT对话**: `{PREFIX.GPT}` + 提示词"
+ if ENABLE.SUBTITLE:
+ msg += f"\n📃**提取字幕**: `{PREFIX.SUBTITLE}` + 油管链接 (或回复油管链接)"
+ if ENABLE.WGET:
+ msg += f"\n⏬**下载文件**: `{PREFIX.WGET}` + URL"
+ if ENABLE.OCR:
+ msg += f"\n🔁**图片转文字**: `{PREFIX.OCR}` 回复图片消息"
+ return msg
src/main.py
@@ -0,0 +1,140 @@
+#!/venv/bin/python
+# -*- coding: utf-8 -*-
+import argparse
+import asyncio
+import logging
+import os
+import platform
+import sys
+from urllib.parse import urlparse
+
+from apscheduler.schedulers.asyncio import AsyncIOScheduler
+from loguru import logger
+from pyrogram import filters
+from pyrogram.client import Client
+from pyrogram.sync import idle
+from pyrogram.types import LinkPreviewOptions, Message
+
+from bridge.miaomiao import forward_results_from_miaomiao
+from bridge.parsehub import forward_results_from_parsehub
+from config import DEVICE_NAME, ENABLE, PROXY, TID, TOKEN, cache
+from handler import handle_social_media, handle_utilities
+from message_utils import parse_msg
+from others.raw_img_file import convert_raw_img_file
+from utils import cleanup_old_files
+
+# ruff: noqa: RUF001
+
+
+async def main():
+ app = Client(
+ "bot",
+ session_string=session_string,
+ in_memory=True,
+ proxy=proxy,
+ device_model=DEVICE_NAME, # A friendly name can be viewed in "Active Sessions" in Telegram settings
+ app_version=f"{Client.APP_VERSION}, Python {platform.python_version()}",
+ skip_updates=False, # handle messages while client is offline
+ fetch_replies=-1, # fetch all replies
+ link_preview_options=LinkPreviewOptions(is_disabled=True),
+ max_concurrent_transmissions=2,
+ max_business_user_connection_cache_size=100, # reduce memory usage
+ max_message_cache_size=100, # reduce memory usage
+ )
+
+ @app.on_message(filters.group)
+ async def groups(client: Client, message: Message):
+ if not ENABLE.GROUPS:
+ return
+ parse_msg(message)
+ if TID.GROUP67373 and message.chat.id in [int(x.strip()) for x in TID.GROUP67373.split(",")]:
+ await handle_utilities(client, message, detail_progress=False)
+ await handle_social_media(client, message, extra_prefix=["/dl", "!dl", "!dl", "!下载", "!下载"], ignore_prefix=["/dl4dw"], prepend_sender_user=True)
+ else:
+ await handle_utilities(client, message, detail_progress=True)
+ await handle_social_media(client, message, extra_prefix=["/dl", "!dl", "!dl"], detail_progress=True)
+
+ @app.on_message(filters.channel)
+ async def channels(client: Client, message: Message):
+ if not ENABLE.CHANNELS:
+ return
+
+ parse_msg(message)
+ await handle_utilities(client, message, detail_progress=True)
+ await handle_social_media(client, message, extra_prefix=["/dl", "!dl", "!dl"], detail_progress=True)
+
+ @app.on_message(filters.bot)
+ async def bots(client: Client, message: Message):
+ if not ENABLE.BOTS:
+ return
+ parse_msg(message, verbose=True)
+ await forward_results_from_parsehub(client, message)
+ await forward_results_from_miaomiao(client, message)
+ await handle_utilities(client, message, detail_progress=True)
+ await handle_social_media(client, message, extra_prefix=["/dl", "!dl", "!dl"], detail_progress=True)
+
+ # filters.private = {user chats + bot chats}
+ # so the private handler should be placed after the bot handler
+ @app.on_message(filters.private)
+ async def private(client: Client, message: Message):
+ if not ENABLE.USERS or message.chat.type.name != "PRIVATE":
+ return
+ parse_msg(message, verbose=True)
+ await convert_raw_img_file(client, message, show_progress=True, detail_progress=True)
+ await handle_utilities(client, message, detail_progress=True)
+ await handle_social_media(client, message, need_prefix=False, detail_progress=True)
+
+ if ENABLE.CRONTAB:
+ scheduler = AsyncIOScheduler()
+ scheduler.add_job(scheduling, "interval", args=[app], seconds=60) # run crontab jobs every 60 seconds
+ logging.getLogger("apscheduler.scheduler").setLevel(logging.ERROR)
+ scheduler.start()
+
+ await app.start()
+ await idle()
+ await app.stop()
+
+
+async def scheduling(client: Client): # noqa: ARG001
+ cache.evict() # delete expired cache
+ cleanup_old_files()
+
+ # custom crontab jobs
+ # now = nowdt()
+ # if now.minute == 0:
+ # await client.send_message(TID.ADMIN_GROUP, "I'm still alive.")
+ # if now.hour == 8 and now.minute == 0:
+ # weather = await get_weather(location="shanghai")
+ # await client.send_message(TID.ADMIN, weather)
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--log-level", type=str, help="Log level")
+ parser.add_argument("--session-str", type=str, help="Telegram SESSION STRING")
+ parser.add_argument("--proxy", type=str, help="Telegram proxy (e.g. socks5://127.0.0.1:7890)")
+ args = parser.parse_args()
+
+ logger.remove() # Remove default handler.
+ logger.add(
+ sys.stderr,
+ level=args.log_level.upper() if args.log_level else os.getenv("LOG_LEVEL", "TRACE").upper(),
+ colorize=True,
+ backtrace=True,
+ diagnose=True,
+ format="<green>{time:YYYY-MM-DD HH:mm:ss}</green>| <level>{level: <7}</level> |<cyan>{name: <12}</cyan>:<cyan>{function: ^20}</cyan>:<cyan>{line: >4}</cyan> - <level>{message}</level>",
+ )
+ # settings
+ session_string = args.session_str if args.session_str else TOKEN.SESSION_STRING
+ if not session_string:
+ logger.error("No session string, you should run python scripts/auth.py first")
+ os._exit(1)
+
+ if args.proxy or PROXY.TELEGRAM:
+ info = urlparse(args.proxy) if args.proxy else urlparse(PROXY.TELEGRAM)
+ proxy = {"scheme": info.scheme, "hostname": info.hostname, "port": info.port}
+ logger.warning(f"Using proxy: {proxy}")
+ else:
+ proxy = {}
+
+ asyncio.run(main())
src/message_utils.py
@@ -0,0 +1,703 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import asyncio
+import contextlib
+import json
+import re
+from pathlib import Path
+
+from loguru import logger
+from pyrogram.client import Client
+from pyrogram.enums import MessageEntityType
+from pyrogram.errors import Flood
+from pyrogram.types import InputMediaPhoto, InputMediaVideo, Message, ReplyParameters
+
+from config import CAPTION_LENGTH, DB, PREFIX, TEXT_LENGTH, cache
+from database import del_db, get_db, set_db
+from multimedia import fix_video_rotation, generate_cover, is_valid_video, parse_media_info, split_large_video, split_long_img, validate_img
+from utils import readable_size, smart_split
+
+
+# ruff: noqa: RUF001
+def parse_msg(message: Message, *, verbose: bool = False) -> dict:
+ if cached := cache.get(f"parse_msg-{message.chat.id}-{message.id}"):
+ return cached
+ if verbose:
+ logger.trace(f"{message!r}")
+ chat_type = message.chat.type.name if message.chat and message.chat.type else ""
+ chat_title = message.chat.title if message.chat and message.chat.title else ""
+ uid = message.from_user.id if message.from_user else 0
+ cid = message.chat.id if message.chat else 0
+ mid = message.id if message.id else 0
+ is_bot = bool(message.from_user and message.from_user.is_bot)
+ text = message.text if message.text else ""
+ first_name = message.from_user.first_name if message.from_user and message.from_user.first_name else ""
+ last_name = message.from_user.last_name if message.from_user and message.from_user.last_name else ""
+ handle = message.from_user.username if message.from_user and message.from_user.username else ""
+ full_name = f"{first_name} {last_name}".strip() if message.from_user else ""
+ video_name = message.video.file_name if message.video else ""
+ photo_id = message.photo.file_unique_id if message.photo else ""
+ caption = message.caption if message.caption else ""
+ gif = message.animation.file_name if message.animation else ""
+ sticker = message.sticker.set_name if message.sticker else ""
+ file_name = message.document.file_name if message.document else ""
+
+ # Parse URL from message entities (Only the first one)
+ entity_url = ""
+ if message.entities:
+ for entity in message.entities:
+ if entity.type == MessageEntityType.TEXT_LINK:
+ logger.trace(f"URL found from message entity: {entity.url}")
+ entity_url = entity.url
+ break
+
+ # log the summary to console
+ chat_type_emoji = {
+ "BOT": "🤖",
+ "GROUP": "👥",
+ "SUPERGROUP": "👥",
+ "CHANNEL": "📡",
+ "PRIVATE": "🔴",
+ }.get(chat_type, "")
+ summary = ""
+ if chat_title:
+ summary += f"{chat_type_emoji}{chat_title}[{mid}]"
+ if first_name:
+ summary += f"🤖{full_name}[{uid}]" if is_bot else f"👤{full_name}[{uid}]"
+ if video_name:
+ summary += f" 🎬{video_name}"
+ if photo_id:
+ summary += f" 🏞{photo_id}"
+ if sticker:
+ summary += f" 🎨{sticker}"
+ if gif:
+ summary += f" ✨{gif}"
+ if file_name:
+ summary += f" 📔{file_name}"
+ if text:
+ summary += f" 📝{text}"
+ if caption:
+ summary += f" 📝{caption}"
+ logger.info(f"{summary!r}")
+
+ info = { # ensure the type of each field
+ "chat_type": str(chat_type),
+ "chat_title": str(chat_title),
+ "uid": int(uid),
+ "cid": int(cid),
+ "mid": int(mid),
+ "is_bot": bool(is_bot),
+ "text": str(text),
+ "first_name": str(first_name),
+ "last_name": str(last_name),
+ "full_name": str(full_name),
+ "handle": str(handle),
+ "video_name": str(video_name),
+ "file_name": str(file_name),
+ "photo_id": str(photo_id),
+ "caption": str(caption),
+ "gif": str(gif),
+ "sticker": str(sticker),
+ "summary": str(summary),
+ "entity_url": str(entity_url),
+ }
+ cache.set(f"parse_msg-{message.chat.id}-{message.id}", info, ttl=120) # cache the same msg for 2 minutes
+ return info
+
+
+@cache.memoize(ttl=60)
+def startswith_prefix(message: Message, extra_prefix: list[str] | None = None, ignore_prefix: list[str] | None = None) -> bool:
+ """Check if the message starts with the given command prefixes.
+
+ Args:
+ message (Message): The message object.
+ extra_prefix (list[str], optional): Extra command prefixes that are effective.
+ ignore_prefix (list[str], optional): Ignore these command prefixes.
+ """
+ text = message.text or message.caption or ""
+ if ignore_prefix and any(text.strip().lower().startswith(prefix) for prefix in ignore_prefix):
+ return False
+ if extra_prefix and any(text.strip().lower().startswith(prefix) for prefix in extra_prefix):
+ return True
+ return text.strip().lower().startswith(PREFIX.MAIN)
+
+
+@cache.memoize(ttl=60)
+def equal_prefix(message: Message, extra_prefix: list[str] | None = None, ignore_prefix: list[str] | None = None) -> bool:
+ """Check if the message equal with the given command prefixes.
+
+ Args:
+ message (Message): The message object.
+ extra_prefix (list[str], optional): Extra command prefixes that are effective.
+ ignore_prefix (list[str], optional): Ignore these command prefixes.
+ """
+ text = message.text or message.caption or ""
+ if ignore_prefix and text.strip().lower() in ignore_prefix:
+ return False
+ if extra_prefix and text.strip().lower() in extra_prefix:
+ return True
+ return text.strip().lower() == PREFIX.MAIN
+
+
+def warp_media_group(media: list[dict], caption: str = "") -> list:
+ """Warp media files into a list of InputMediaPhoto or InputMediaVideo objects.
+
+ item in media:
+ {
+ "photo": "path/to/photo.jpg",
+ }
+ or
+ {
+ "video": "path/to/video.mp4",
+ "width": int,
+ "height": int,
+ "duration": int,
+ "thumb": "path/to/thumbnail.jpg" | None,
+ }
+ """
+ group = []
+ if len(media) < 2:
+ logger.error(f"Media group requires at least 2 items, number of media: {len(media)}")
+ return []
+ if len(caption) > CAPTION_LENGTH:
+ logger.warning(f"Caption too long, length: {len(caption)}, caption: {caption}")
+ caption = caption[:CAPTION_LENGTH]
+ if len(media) > 10:
+ logger.warning(f"Too many media files, number of media: {len(media)}")
+ media = media[:10]
+ # add caption to the first item
+ if media[0].get("photo"):
+ group.append(InputMediaPhoto(media[0]["photo"], caption=caption))
+ elif media[0].get("video"):
+ media[0]["media"] = media[0].pop("video")
+ group.append(InputMediaVideo(caption=caption, **media[0]))
+
+ # DO NOT add captions for remaining media
+ for x in media[1:]:
+ if x.get("photo"):
+ group.append(InputMediaPhoto(x["photo"]))
+ elif x.get("video"):
+ x["media"] = x.pop("video")
+ group.append(InputMediaVideo(**x))
+ return group
+
+
+def preprocess_media(media: list[dict]) -> list[dict]:
+ """Filter out invalid media files.
+
+ - photo must be at most 10 MB in size.
+ - photo's width and height must not exceed 10000 in total.
+ - photo's width and height ratio must be at most 20.
+ - filesize < 2GB for video
+
+ Return a list of valid media info. The format must be:
+ {
+ "photo": "path/to/photo.jpg",
+ }
+ or
+ {
+ "video": "path/to/video.mp4",
+ "width": int,
+ "height": int,
+ "duration": int,
+ "thumb": "path/to/thumbnail.jpg" | None,
+ }
+ """
+ num_before = len(media)
+ logger.trace(f"{num_before} media info before preprocess: {media}")
+ results = []
+
+ # Step-1: Photos
+ step1_res = []
+ for data in media:
+ if photo_path := data.get("photo"):
+ valid_photos = [validate_img(photo) for photo in split_long_img(photo_path) if validate_img(photo)]
+ step1_res.extend({"photo": valid_photo} for valid_photo in valid_photos)
+ continue
+ step1_res.append(data) # other type
+
+ # Step-2: Videos
+ for data in step1_res:
+ if video_path := data.get("video"):
+ video_path = fix_video_rotation(video_path)
+ if not is_valid_video(video_path):
+ logger.warning(f"Video is invalid: {video_path}")
+ continue
+
+ # split large video files ( < 2GB)
+ valid_videos = [x for x in split_large_video(video_path) if is_valid_video(x)]
+ thumbs = [generate_cover(x) for x in valid_videos]
+ for vpath, tpath in zip(valid_videos, thumbs, strict=True):
+ video_info = parse_media_info(vpath)
+ thumb = valid_thumb if (valid_thumb := validate_img(tpath)) else None
+ results.append({"video": vpath.as_posix(), "width": video_info["width"], "height": video_info["height"], "duration": video_info["duration"], "thumb": thumb})
+ continue
+ results.append(data) # other type
+
+ logger.debug(f"Filtered out {num_before - len(results)} invalid media files")
+ logger.trace(f"{len(results)} media info after preprocess: {results}")
+ return results
+
+
+async def send2tg(
+ client: Client,
+ message: Message,
+ target_chat: int | str = "",
+ reply_msg_id: int = 0,
+ *,
+ texts: str = "",
+ media: list[dict] | None = None,
+ comments: list[str] | None = None, # append after texts
+ send_from_user: str | None = None,
+ cooldown: float = 0,
+ **kwargs,
+) -> list[Message | None]:
+ """Send unlimited number of texts and media to Telegram.
+
+ Telegram Message Limitation:
+ - 4096 characters for pure texts
+ - 10 media in a single message
+ - 1024 characters for caption (4096 for premium user)
+
+ Args:
+ client (Client): The Pyrogram client.
+ message (Message): The trigger message object.
+ target_chat (int | str, optional): The chat ID to send the message.
+ reply_msg_id (int, optional): If set to integer > 0, the result is sent as a reply message to this message_id.
+ If set to 0, reply to the trigger message itself.
+ If set to -1, do not send as a reply message.
+ texts (str, optional): The texts to send.
+ media (list[dict], optional): The media files to send.
+ comments (list[str], optional): The comments to append after texts.
+ send_from_user (str, optional): The user name to prefix the texts.
+ cooldown (float, optional): The interval between each media message. Defaults to 0.
+ kwargs: Other keyword arguments. In this function, we use:
+ show_progress (bool, optional): Show a progress message on Telegram. Defaults to True.
+ detail_progress (bool, optional): Show detailed progress (Only if show_proress is set to True). Defaults to False.
+
+ media item format:
+ [
+ {
+ "photo": "path/to/photo.jpg",
+ },
+ {
+ "video": "path/to/video.mp4",
+ }
+ ]
+ TODO: Support to send audio and document
+ """
+ if kwargs:
+ logger.debug(f"kwargs: {kwargs}")
+
+ if not target_chat:
+ target_chat = kwargs["target_chat"] if kwargs.get("target_chat") else message.chat.id
+ if reply_msg_id == 0:
+ reply_to = message.id
+ elif reply_msg_id == -1:
+ reply_to = None
+ else:
+ reply_to = reply_msg_id
+ reply_parameters = ReplyParameters(message_id=reply_to) # type: ignore
+ sent_messages: list[Message | None] = [] # save sent messages results
+
+ if media is None:
+ media = []
+ media = preprocess_media(media)
+ if comments is None:
+ comments = []
+
+ # no text, but has comments. treat comments as texts
+ texts = texts if texts else "".join(comments).strip()
+
+ if send_from_user: # prefix send_from_user
+ texts = f"{send_from_user}{texts.strip()}"
+
+ if kwargs.get("progress") and len(media) > 0:
+ await modify_progress(text=f"⏫正在上传:\n{summay_media(media)}", force_update=True, **kwargs)
+
+ # only media
+ if media and not texts:
+ logger.trace(f"Sending {len(media)} media without any texts")
+ if len(media) == 1:
+ if media[0].get("photo"):
+ sent_messages.append(await client.send_photo(chat_id=target_chat, photo=media[0]["photo"], reply_parameters=reply_parameters))
+ elif media[0].get("video"):
+ sent_messages.append(
+ await client.send_video(
+ chat_id=target_chat,
+ reply_parameters=reply_parameters,
+ progress=telegram_uploading,
+ progress_args=(kwargs.get("progress", False), media[0]["video"], kwargs.get("detail_progress", True)),
+ **media[0],
+ )
+ )
+ elif 1 < len(media) <= 10:
+ group = warp_media_group(media)
+ sent_messages.extend(await client.send_media_group(target_chat, media=group, reply_parameters=reply_parameters))
+ else:
+ media_chunks = [media[i : i + 10] for i in range(0, len(media), 10)]
+ for idx, chunk in enumerate(media_chunks):
+ if idx == 0:
+ sent_messages.extend(await send2tg(client, message, target_chat, reply_msg_id, media=chunk, **kwargs))
+ else:
+ sent_messages.extend(await send2tg(client, message, target_chat, reply_msg_id=-1, media=chunk, **kwargs)) # disbale reply
+ await asyncio.sleep(cooldown) # cool down
+
+ # append comments to texts
+ # For len(texts) < 1024 , ensure the combined texts and comments remains below 1024 characters to avoid sending a subsequent message containing only the comments.
+ # For long texts, keep all comments
+ if len(texts) < CAPTION_LENGTH:
+ for comment in comments:
+ if len(f"{texts}{comment}") < CAPTION_LENGTH:
+ texts += comment
+ else:
+ texts = texts + "".join(comments)
+
+ videos = [x for x in media if x.get("video")]
+ photos = [x for x in media if x.get("photo")]
+ logger.trace(f"{len(texts)} texts, {len(comments)} comments, {len(videos)} videos, {len(photos)} photos: {texts!r}")
+
+ # only texts
+ if texts and not media:
+ logger.trace(f"Sending {len(texts)} texts without any media")
+ for idx, msg in enumerate(smart_split(texts)):
+ if idx == 0:
+ sent_messages.append(await client.send_message(target_chat, msg, reply_parameters=reply_parameters))
+ else:
+ sent_messages.append(await client.send_message(target_chat, msg, reply_parameters=ReplyParameters()))
+ return sent_messages
+ # both texts and media
+ if texts and media:
+ logger.trace(f"Sending {len(media)} media + {len(texts)} texts")
+ # short text, single media
+ if len(texts) < CAPTION_LENGTH and len(media) == 1:
+ if media[0].get("photo"):
+ sent_messages.append(await client.send_photo(target_chat, photo=media[0]["photo"], caption=texts, reply_parameters=reply_parameters))
+ elif media[0].get("video"):
+ sent_messages.append(
+ await client.send_video(
+ chat_id=target_chat,
+ caption=texts,
+ reply_parameters=reply_parameters,
+ progress=telegram_uploading,
+ progress_args=(kwargs.get("progress", False), media[0]["video"], kwargs.get("detail_progress", True)),
+ **media[0],
+ )
+ )
+ # long text, single media
+ elif len(texts) >= CAPTION_LENGTH and len(media) == 1:
+ caption_text = smart_split(texts, CAPTION_LENGTH)[0]
+ if media[0].get("photo"):
+ sent_messages.append(await client.send_photo(target_chat, photo=media[0]["photo"], caption=caption_text, reply_parameters=reply_parameters))
+ elif media[0].get("video"):
+ sent_messages.append(
+ await client.send_video(
+ chat_id=target_chat,
+ caption=caption_text,
+ reply_parameters=reply_parameters,
+ progress=telegram_uploading,
+ progress_args=(kwargs.get("progress", False), media[0]["video"], kwargs.get("detail_progress", True)),
+ **media[0],
+ )
+ )
+ remaining_texts = texts.removeprefix(caption_text)
+ sent_messages.extend(await send2tg(client, message, target_chat, reply_msg_id=-1, texts=remaining_texts, **kwargs))
+
+ # short text, multiple media [1, 10]
+ elif len(texts) < CAPTION_LENGTH and 1 < len(media) <= 10:
+ group = warp_media_group(media, caption=texts)
+ sent_messages.extend(await client.send_media_group(target_chat, media=group, reply_parameters=reply_parameters))
+
+ # short text, multiple media (10, inf)
+ elif len(texts) < CAPTION_LENGTH and len(media) > 10:
+ media_chunks = [media[i : i + 10] for i in range(0, len(media), 10)]
+ num_chunk = len(media_chunks)
+ # send pure media first, and append captions at the last chunk
+ for idx, batch in enumerate(media_chunks):
+ if idx == 0: # first chunk
+ group = warp_media_group(batch)
+ sent_messages.extend(await client.send_media_group(target_chat, media=group, reply_parameters=reply_parameters))
+ elif idx != num_chunk - 1: # disbale reply if not the last chunk
+ group = warp_media_group(batch)
+ sent_messages.extend(await client.send_media_group(target_chat, media=group, reply_parameters=ReplyParameters()))
+ else: # last chunk (media <= 10, texts < 1024)
+ sent_messages.extend(await send2tg(client, message, target_chat, reply_msg_id=-1, texts=texts, media=batch, **kwargs))
+ await asyncio.sleep(cooldown)
+
+ # long text, multiple media [1, 10]
+ elif len(texts) >= CAPTION_LENGTH and 1 < len(media) <= 10:
+ caption_text = smart_split(texts, CAPTION_LENGTH)[0]
+ remaining_texts = texts.removeprefix(caption_text)
+ group = warp_media_group(media, caption=caption_text)
+ sent_messages.extend(await client.send_media_group(target_chat, media=group, reply_parameters=reply_parameters))
+ sent_messages.extend(await send2tg(client, message, target_chat, reply_msg_id=-1, texts=remaining_texts, **kwargs))
+
+ # long text, multiple media (10, inf)
+ else:
+ media_chunks = [media[i : i + 10] for i in range(0, len(media), 10)]
+ num_chunk = len(media_chunks)
+ # send pure media first, and append captions at the last chunk
+ for idx, batch in enumerate(media_chunks):
+ if idx == 0: # first chunk
+ group = warp_media_group(batch)
+ sent_messages.extend(await client.send_media_group(target_chat, media=group, reply_parameters=reply_parameters))
+ elif idx != num_chunk - 1: # disbale reply if not the last chunk
+ group = warp_media_group(batch)
+ sent_messages.extend(await client.send_media_group(target_chat, media=group, reply_parameters=ReplyParameters()))
+ else: # last chunk (media <= 10, texts >= 1024)
+ sent_messages.extend(await send2tg(client, message, target_chat, reply_msg_id=-1, texts=texts, media=batch, **kwargs))
+ await asyncio.sleep(cooldown)
+
+ # clean up
+ logger.trace("Cleaning up media files")
+ for x in media:
+ for key in ["path", "thumb", "audio", "photo", "video"]:
+ if x.get(key) and Path(x[key]).is_file():
+ logger.trace(f"Deleting: {x[key]}")
+ Path(x[key]).unlink(missing_ok=True)
+ return sent_messages
+
+
+def summay_media(media: list[dict]) -> str:
+ def filesize(path: str) -> str:
+ if not isinstance(path, str):
+ return ""
+ return f": {readable_size(path=path)}"
+
+ msg = ""
+ for idx, info in enumerate(media):
+ if value := info.get("photo"):
+ msg += f"\n🏞P{idx + 1}{filesize(value)}"
+ elif (value := info.get("video")) or (value := info.get("livephoto")):
+ msg += f"\n🎬P{idx + 1}{filesize(value)}"
+ elif value := info.get("audio"):
+ msg += f"\n🎧P{idx + 1}{filesize(value)}"
+ return msg.strip()
+
+
+async def save_messages(messages: list[Message | None], key: str, metadata: dict | None = None) -> bool:
+ """Save the messages to DB.
+
+ data format:
+ {
+ "data": [
+ {
+ "cid": 111, # chat id
+ "type": "text", # photo, video, media_group, etc.
+ "mid": 222, # message id
+ "text": "html format",
+ },
+ ...
+ ]
+ }
+ """
+ if not metadata:
+ metadata = {}
+ if not messages:
+ logger.error(f"Skip save messages to {DB.ENGINE} due to empty message list")
+ return False
+ valid_messages = [x for x in messages if isinstance(x, Message)]
+ if len(valid_messages) != len(messages):
+ logger.warning(f"Skip save messages to {DB.ENGINE} due to invalid message type")
+ return False
+ time_str = valid_messages[0].date.isoformat()
+ metadata["time"] = time_str
+ data = []
+ media_group_ids = set() # save once
+ for msg in valid_messages:
+ text = ""
+ if msg.text:
+ text = msg.text
+ if msg.caption:
+ text = msg.caption
+ if hasattr(text, "html"): # DO NOT use markdown, because this format has some bugs
+ text = text.html # type: ignore
+ # Caution: this format should be consistent with `handle_social_media` function in `handler.py`
+ # text = re.sub(r"^👤\[@.*?\]\(tg://user\?id=\d+\)//", "", text) # remove markdown send_from_user
+ text = re.sub(r"^👤\<a.*?tg://user\?id=\d+.*?@.*?</a>//", "", text) # remove markdown send_from_user
+ msg_extra = {"text": text} if text else {}
+ if msg.media_group_id:
+ if msg.media_group_id not in media_group_ids:
+ logger.trace(f"Saving media group message {msg.id}")
+ media_group_ids.add(msg.media_group_id)
+ data.append({"type": "media_group", "cid": msg.chat.id, "mid": msg.id} | msg_extra)
+ continue
+ if msg.video:
+ logger.trace(f"Saving video message {msg.id}")
+ data.append({"type": "video", "cid": msg.chat.id, "mid": msg.id} | msg_extra)
+ continue
+ if msg.photo:
+ logger.trace(f"Saving photo message {msg.id}")
+ data.append({"type": "photo", "cid": msg.chat.id, "mid": msg.id} | msg_extra)
+ continue
+ if msg.audio:
+ logger.trace(f"Saving audio message {msg.id}")
+ data.append({"type": "audio", "cid": msg.chat.id, "mid": msg.id} | msg_extra)
+ continue
+ if msg.text:
+ logger.trace(f"Saving text message {msg.id}")
+ data.append({"type": "text", "cid": msg.chat.id, "mid": msg.id} | msg_extra)
+ continue
+ logger.warning(f"Skip save message {msg.id} to {DB.ENGINE} due to unknown type: {msg}")
+ if data:
+ return await set_db(key, metadata=metadata, data={"data": data})
+ return False
+
+
+async def copy_messages_from_db(client: Client, message: Message, key: str, kv: dict | None = None, **kwargs) -> bool:
+ """Copy messages from database.
+
+ data format:
+ {
+ "data": [
+ {
+ "cid": 111, # chat id
+ "type": "text", # photo, video, media_group, etc.
+ "mid": 222, # message id
+ "text": "html format",
+ },
+ ...
+ ]
+ }
+ OR:
+ { "is_parted": True }
+
+ """
+ if kwargs:
+ logger.debug(f"kwargs: {kwargs}")
+ target_chat = kwargs["target_chat"] if kwargs.get("target_chat") else message.chat.id
+ reply_parameters = ReplyParameters(message_id=kwargs.get("reply_msg_id", message.id))
+
+ if kv is None:
+ kv = await get_db(key)
+ if kv.get("is_parted"):
+ logger.warning(f"Parted messages found for key={key}")
+ return await copy_parted_msgs_from_db(client, message, key, **kwargs)
+
+ if not kv.get("data"):
+ logger.error(f"Wrong {DB.ENGINE} data for key={key}: {kv}")
+ return False
+ data: list[dict] = kv.get("data", [])
+ if isinstance(data, str):
+ data = json.loads(data)
+ logger.debug(f"Sending {len(data)} messages from {DB.ENGINE}: {data}")
+ await modify_progress(text=f"💾在{DB.ENGINE}中查到缓存, 正在转发{len(data)}条消息...", **kwargs)
+ results = []
+ try:
+ for idx, item in enumerate(sorted(data, key=lambda x: x["mid"])):
+ with contextlib.suppress(ValueError):
+ cid = int(item["cid"])
+ if idx != 0:
+ reply_parameters = ReplyParameters() # only send as reply of the first message
+ logger.debug(f"Copying {item['type']} message: ({cid}, {item['mid']}) -> target_chat={target_chat}")
+ text = item.get("text") # str or None
+ if text and kwargs.get("send_from_user"):
+ text = f"{sender_markdown_to_html(kwargs['send_from_user'])}{text}"
+ if item["type"] == "text":
+ if text:
+ results.append(await client.send_message(chat_id=target_chat, text=text, reply_parameters=reply_parameters))
+ else:
+ db_msg: Message = await client.get_messages(chat_id=cid, message_ids=int(item["mid"]), replies=0) # type: ignore
+ results.append(await client.send_message(chat_id=target_chat, text=db_msg.text, reply_parameters=reply_parameters))
+ elif item["type"] in ["photo", "audio", "video"]:
+ results.append(await client.copy_message(chat_id=target_chat, caption=text, from_chat_id=cid, message_id=int(item["mid"]), reply_parameters=reply_parameters)) # type: ignore
+ elif item["type"] == "media_group":
+ results.extend(await client.copy_media_group(chat_id=target_chat, captions=text, from_chat_id=cid, message_id=int(item["mid"]), reply_parameters=reply_parameters)) # type: ignore
+ else:
+ logger.warning(f"Unknown message type: {item}")
+ except Exception as e:
+ logger.error(f"Failed to copy messages for key={key} from {DB.ENGINE}: {e}")
+ await del_db(key)
+ return False
+ if all(isinstance(x, Message) for x in results):
+ logger.success(f"Successfully copied {len(results)} messages for key={key} from {DB.ENGINE}")
+ await modify_progress(del_status=True, **kwargs)
+ return True
+ await del_db(key)
+ return False
+
+
+async def copy_parted_msgs_from_db(client: Client, message: Message, key: str, suffix: str = "-P", **kwargs) -> bool:
+ """Copy parted messages from database.
+
+ For some large video files, they can be sent in parts.
+ The primary key is the video link, and the parted key is suffixed with "P".
+ """
+ for idx in range(1, 100):
+ logger.trace(f"Checking parted message for key={key}{suffix}{idx}")
+ kv = await get_db(f"{key}{suffix}{idx}")
+ if not kv:
+ logger.debug(f"No more parted messages found for key={key}{suffix}{idx}")
+ return True
+ if not await copy_messages_from_db(client, message, key=f"{key}{suffix}{idx}", kv=kv, **kwargs):
+ logger.error(f"Failed to copy parted messages for key={key}{suffix}{idx}")
+ return False
+ logger.warning(f"Too many parted messages found for key={key}")
+ return False
+
+
+def sender_markdown_to_html(sender: str) -> str:
+ """Convert markdown to html.
+
+ 👤[@username](tg://user?id=123456789)// ->
+ 👤<a href="tg://user?id=123456789">@username</a>//
+ """
+ if not sender:
+ return ""
+ return re.sub(r"^👤\[@(.*?)\]\(tg://user\?id=(\d+)\)", r'👤<a href="tg://user?id=\2">@\1</a>', sender)
+
+
+async def modify_progress(message: Message | None = None, text: str = "", *, detail_progress: bool = False, del_status: bool = False, force_update: bool = False, **kwargs):
+ """Modify the progress message.
+
+ Args:
+ message (Message): The progress message object.
+ text (str): The new text to update.
+ detail_progress(bool): Whether to show the detail progress.
+ del_status (bool): Whether the progress is done.
+ force_update (bool): Force update the message.
+ """
+ if message is None:
+ message = kwargs.get("progress")
+ if not isinstance(message, Message):
+ return
+ try:
+ if del_status:
+ logger.info("Deleting progress message")
+ await message.delete()
+ return
+ if not text:
+ return
+ if cache.get("modify_progress"): # DO NOT update too frequently
+ detail_progress = False
+ if force_update:
+ detail_progress = True
+ if not detail_progress:
+ return
+ logger.trace(f"Progress: {text!r}")
+ await message.edit_text(text[:TEXT_LENGTH])
+ cache.set("modify_progress", "1", ttl=2)
+ except Flood as e:
+ logger.warning(f"modify_progress: {e}")
+
+
+async def telegram_uploading(current: int, total: int, *args):
+ """Show video uploading progress."""
+ msg = f"上传中: {current / 1024 / 1024:.1f} / {total / 1024 / 1024:.1f} MB ({current / total:.2%})"
+ if len(args) != 3:
+ return
+ message = args[0]
+ path = args[1]
+ detail_progress = args[2]
+ if not Path(path).is_file():
+ logger.error(f"File not found: {path}")
+ return
+ _type = "视频" if Path(path).suffix in [".mp4", ".mkv", ".mov", ".webm", ".avi", ".flv", ".wmv", ".m4v"] else "音频"
+ emoji = "🎬" if _type == "视频" else "🎧"
+ msg = f"⏫{_type}{msg}\n{emoji}{Path(path).name}"
+ await modify_progress(message=message, text=msg, detail_progress=detail_progress)
+
+
+if __name__ == "__main__":
+ print(sender_markdown_to_html("👤[@username](tg://user?id=123456789)//"))
src/multimedia.py
@@ -0,0 +1,441 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+import contextlib
+import json
+import math
+from pathlib import Path
+
+from ffmpeg import FFmpeg, FFmpegError, Progress
+from loguru import logger
+from PIL import Image
+
+from config import MAX_FILE_BYTES
+from utils import readable_size
+
+
+def parse_media_info(path: str | Path | None) -> dict:
+ """Given a media filepath, parse necessary information."""
+ if path is None or not Path(path).expanduser().resolve().is_file():
+ logger.error(f"File not found: {path}")
+ return {}
+ path = Path(path).expanduser().resolve()
+ logger.trace(f"Parsing media info: {path.name} [{readable_size(path=path)}]")
+ ffprobe = FFmpeg(executable="ffprobe").input(path.as_posix(), print_format="json", show_streams=None)
+ info = {}
+ try:
+ metadata = json.loads(ffprobe.execute())
+ streams = metadata.get("streams", [])
+ audio_stream = next((x for x in streams if x.get("codec_name") and x.get("codec_type", "") == "audio"), {})
+ video_stream = next((x for x in streams if x.get("codec_name") and x.get("codec_type", "") == "video"), {})
+ audio_codec = audio_stream.get("codec_name", "")
+ video_codec = video_stream.get("codec_name", "")
+ durations = [x.get("duration", 0) for x in streams] # all channels duration (some file embed the duration in subtitle stream)
+ duration = max(map(float, durations))
+ width = video_stream.get("width", "0")
+ height = video_stream.get("height", "0")
+ side_data = video_stream.get("side_data_list", [])
+ info = {
+ "name": path.stem,
+ "path": path.resolve().as_posix(),
+ "duration": math.ceil(float(duration)),
+ "width": round(float(width)),
+ "height": round(float(height)),
+ "audio_codec": audio_codec,
+ "video_codec": video_codec,
+ "rotation": round(side_data[0].get("rotation", 0)) if side_data else 0,
+ "filesize": readable_size(path=path),
+ }
+ except Exception as e:
+ logger.error(f"Failed to parse media file info: {e}")
+ logger.debug(f"Parsed media info: {info}")
+ return info
+
+
+def split_long_img(path: str | Path | None, max_height: float = 2500, max_ratio: float = 3, target_ratio: float = 2.17, overlap: float = 15, *, delete: bool = True) -> list[Path]:
+ if path is None or not Path(path).expanduser().resolve().is_file():
+ return []
+ path = Path(path).expanduser().resolve()
+ logger.debug(f"Checking long image: {path.name} [{readable_size(path=path)}]")
+ photos = []
+ path = convert_img_to_telegram_format(path, delete=delete)
+ try:
+ img = Image.open(path)
+ img_width, img_height = img.size
+ hw_ratio = img_height / img_width
+ logger.trace(f"Height: {img_height}, Width: {img_width}, H/W Ratio: {hw_ratio:.2f}")
+ if img_height <= float(max_height) or hw_ratio <= float(max_ratio):
+ photos.append(path)
+ else:
+ logger.warning(f"Long image detected: {path.name}, Splitting ...")
+ # Calculate the effective number of splits
+ split_height = round(img_width * target_ratio)
+ step = split_height - overlap
+ num_splits = math.ceil((img_height - overlap) / step)
+
+ # Adjust step to distribute remaining height across splits
+ total_overlap = (num_splits - 1) * overlap
+ adjusted_step = round((img_height - total_overlap) / num_splits)
+ logger.debug(f"Splitting {path} into {num_splits} splits!")
+
+ for idx in range(num_splits):
+ top = idx * (adjusted_step)
+ bottom = top + split_height
+ if bottom > img_height: # Adjust for the last split
+ bottom = img_height
+ top = max(0, bottom - split_height)
+ box = (0, top, img_width, bottom)
+ cropped_image = img.crop(box)
+ logger.trace(cropped_image)
+ save_path = Path(path).with_suffix(f".s{idx}.jpg")
+ cropped_image.convert("RGB").save(save_path)
+ photos.append(save_path)
+ logger.debug(f"split {idx} saved to {save_path}")
+ if delete:
+ path.unlink(missing_ok=True)
+ except Exception as e:
+ logger.error(f"Failed to split long image: {e}")
+ return [path]
+ return photos
+
+
+def split_large_video(path: str | Path | None, *, delete: bool = True) -> list[Path]:
+ if path is None or not Path(path).expanduser().resolve().is_file():
+ return []
+ path = Path(path).expanduser().resolve()
+ logger.trace(f"Checking large video: {path.name} [{readable_size(path=path)}]")
+ file_size = path.stat().st_size
+ if file_size <= MAX_FILE_BYTES:
+ logger.trace(f"Video is already under 2GB limit: {path.name}")
+ return [path]
+ split_size = MAX_FILE_BYTES - 20 * 1024 * 1024 # reduce a little bit (50MB)
+ videos = []
+
+ num_split = (file_size // split_size) + 1
+ logger.warning(f"Split video file: {path.name} into {num_split} parts.")
+ start_time = 0
+ for idx in range(num_split):
+ out_path = path.with_stem(f"{path.stem}_{idx + 1:02}")
+ try:
+ logger.debug(f"Splitting P{idx + 1}: {path.name} -> {out_path.name}")
+ ffmpeg = FFmpeg().option("y").input(path, ss=f"{start_time * 1000:.0f}ms").output(out_path, acodec="copy", vcodec="copy", fs=split_size)
+ ffmpeg.execute()
+ if probe := parse_media_info(out_path):
+ videos.append(out_path)
+ start_time += probe["duration"]
+ except Exception as e:
+ logger.error(f"Failed to split P{idx + 1}: {path.name} -> {out_path.name} : {e}")
+ if delete:
+ path.unlink(missing_ok=True)
+ return videos
+
+
+def convert_to_h264(path: str | Path | None, *, re_encoding: bool = False, delete: bool = True) -> Path:
+ if path is None or not Path(path).expanduser().resolve().is_file():
+ return Path("")
+ path = Path(path).expanduser().resolve()
+ logger.debug(f"Checking H264 mp4: {path.name}")
+ info = parse_media_info(path)
+ tmp_path = path.with_suffix(".tmp.mp4")
+ mp4_path = path.with_suffix(".h264.mp4")
+ success = True
+ try:
+ if not re_encoding and info["video_codec"] == "h264" and info["audio_codec"] == "aac":
+ logger.debug(f"Video is already H264, without re-encoding: {path.name} -> {tmp_path.name}")
+ ffmpeg = FFmpeg().option("y").input(path).output(tmp_path, codec="copy", movflags="+faststart", f="mp4")
+ ffmpeg.execute()
+ else:
+ logger.warning(f"Re-encoding video: {path.name} -> {tmp_path.name}")
+ ffmpeg = FFmpeg().option("y").input(path).output(tmp_path, acodec="aac", vcodec="libx264", f="mp4")
+
+ @ffmpeg.on("progress")
+ def on_progress(progress: Progress):
+ logger.debug(progress)
+
+ @ffmpeg.on("completed")
+ def on_completed():
+ logger.debug("completed")
+
+ ffmpeg.execute()
+ if delete:
+ path.unlink(missing_ok=True)
+ tmp_path.rename(mp4_path)
+ except FFmpegError as e:
+ logger.error(f"Failed to convert mp4 {path.name}: {e.message}")
+ success = False
+ except Exception as e:
+ logger.error(f"Failed to convert mp4 {path.name}: {e}")
+ success = False
+ finally: # always delete tmp file
+ tmp_path.unlink(missing_ok=True)
+ if success:
+ # delete original file
+ if delete:
+ path.unlink(missing_ok=True)
+ return mp4_path if mp4_path.is_file() else path
+ return path
+
+
+def convert_to_audio(path: str | Path | None, ext: str = "m4a", *, codec: str = "aac", delete: bool = True) -> Path:
+ if path is None or not Path(path).expanduser().resolve().is_file():
+ return Path("")
+ path = Path(path).expanduser().resolve()
+ logger.debug(f"Converting to audio {ext}: {path.name}")
+ info = parse_media_info(path)
+ tmp_path = path.with_suffix(f".tmp.{ext}")
+ final_path = path.with_suffix(f".final.{ext}")
+ success = True
+ try:
+ if info["audio_codec"] == codec:
+ logger.debug(f"Audio stream is already {codec}, without re-encoding: {path.name} -> {tmp_path.name}")
+ ffmpeg = FFmpeg().option("y").input(path).output(tmp_path, vn=None, acodec="copy")
+ ffmpeg.execute()
+ else:
+ logger.warning(f"Re-encoding audio: {path.name} -> {tmp_path.name}")
+ ffmpeg = FFmpeg().option("y").input(path).output(tmp_path, vn=None, acodec=codec)
+
+ @ffmpeg.on("progress")
+ def on_progress(progress: Progress):
+ logger.debug(progress)
+
+ @ffmpeg.on("completed")
+ def on_completed():
+ logger.debug("completed")
+
+ ffmpeg.execute()
+ if delete:
+ path.unlink(missing_ok=True)
+ tmp_path.rename(final_path)
+ except FFmpegError as e:
+ logger.error(f"Failed to convert m4a {path.name}: {e.message}")
+ success = False
+ except Exception as e:
+ logger.error(f"Failed to convert m4a {path.name}: {e}")
+ success = False
+ finally: # always delete tmp file
+ tmp_path.unlink(missing_ok=True)
+ if success:
+ if delete:
+ path.unlink(missing_ok=True)
+ return final_path if final_path.is_file() else path
+ return path
+
+
+def generate_cover(path: Path | str) -> str:
+ """Generate cover image base on media file path.
+
+ Must be jpg format.
+
+ Args:
+ path (Path): media file path
+
+ Returns:
+ str: cover path
+ """
+ logger.debug(f"Generate cover for: {path}")
+ jpg_path = Path(path).with_suffix(".jpg")
+ for ext in [".jpg", ".jpeg"]:
+ cover_path = Path(path).with_suffix(ext)
+ if cover_path.is_file():
+ logger.debug(f"JPG cover image already exists: {cover_path.as_posix()}")
+ return cover_path.as_posix()
+ for ext in [".webp", ".png", ".heic", ".bmp"]:
+ cover_path = Path(path).with_suffix(ext)
+ if cover_path.is_file():
+ converted = convert_img_to_telegram_format(cover_path, force_jpg=True)
+ logger.debug(f"Converted cover image: {cover_path.name} -> {converted.name}")
+ return converted.as_posix()
+
+ logger.debug(f"Generate cover image from the first frame of {path}")
+ with contextlib.suppress(Exception):
+ ffmpeg = FFmpeg().option("y").option("loglevel", "warning").input(path).output(jpg_path, vframes=1)
+ ffmpeg.execute()
+ return jpg_path.as_posix() if jpg_path.is_file() else ""
+
+ logger.error(f"Failed to generate cover image for: {path}")
+ return ""
+
+
+def convert_jpg_via_pillow(path: str | Path | None, *, delete: bool = True) -> tuple[bool, Path]:
+ """Returns: is_success, out_path."""
+ if path is None or not Path(path).expanduser().resolve().is_file():
+ return False, Path("")
+ path = Path(path).expanduser().resolve()
+
+ if path.suffix.lower() == ".heic":
+ try:
+ from pillow_heif import register_heif_opener # type: ignore
+ except ModuleNotFoundError:
+ logger.warning("Please install 'pillow_heif' package for PIL's heic support.")
+ logger.warning("Docs: https://pillow-heif.readthedocs.io/en/latest/installation.html")
+ return False, path
+ register_heif_opener()
+
+ save_path = path.with_suffix(".jpg")
+ logger.debug(f"Converting {path.name} -> {save_path.name}")
+ try:
+ img = Image.open(path)
+ img.convert("RGB").save(save_path)
+ except Exception as e:
+ logger.error(f"Failed convert {path.name} -> {save_path.name}: {e}")
+ return False, path
+ if delete:
+ path.unlink(missing_ok=True)
+ return True, save_path
+
+
+def convert_jpg_via_ffmpeg(path: str | Path | None, *, delete: bool = True) -> tuple[bool, Path]:
+ """Returns: is_success, out_path."""
+ if path is None or not Path(path).expanduser().resolve().is_file():
+ return False, Path("")
+ path = Path(path).expanduser().resolve()
+ save_path = path.with_suffix(".jpg")
+ logger.debug(f"Converting {path.name} -> {save_path.name}")
+ try:
+ ffmpeg = FFmpeg().option("y").option("loglevel", "warning").input(path).output(save_path, vframes=1)
+ ffmpeg.execute()
+ except Exception as e:
+ logger.error(f"Failed convert {path.name} -> {save_path.name}: {e}")
+ return False, path
+ if delete:
+ path.unlink(missing_ok=True)
+ return True, save_path
+
+
+def convert_img_to_telegram_format(path: str | Path | None, *, force_jpg: bool = False, delete: bool = True) -> Path:
+ if path is None or not Path(path).expanduser().resolve().is_file():
+ return Path("")
+ path = Path(path).expanduser().resolve()
+ if path.suffix.lower() in [".jpg", ".jpeg"]:
+ return path
+
+ if force_jpg is False and path.suffix.lower() in [".png", ".bmp", ".gif"]:
+ return path
+
+ success, out_path = convert_jpg_via_pillow(path, delete=delete)
+ if success:
+ logger.success(f"Converted {path.name} via PIL: {out_path.name}")
+ return out_path
+ logger.warning(f"Failed to convert {path.name} via PIL, try FFmpeg ...")
+
+ success, out_path = convert_jpg_via_ffmpeg(path, delete=delete)
+ if success:
+ logger.success(f"Converted {path.name} via FFmpeg: {out_path.name}")
+ return out_path
+ logger.error(f"Failed to convert {path.name} via FFmpeg.")
+
+ return path
+
+
+def convert_img_match_telegram_rules(path: str | Path, num_bytes: int = 10485760, wh_total: int = 10000, max_ratio: float = 20, *, delete: bool = True) -> Path:
+ """Convert image to meet Telegram photo requirements.
+
+ 1. photo must be at most 10 MB in size.
+ 2. photo's width and height must not exceed 10000 in total.
+ 3. photo's width and height ratio must be at most 20.
+ """
+ path = Path(path).expanduser().resolve()
+ if not path.is_file():
+ return Path("")
+ min_ratio = 1 / max_ratio
+ filesize = path.stat().st_size
+ save_path = path.with_stem(f"{path.stem}_reduced")
+ try:
+ img = Image.open(path)
+ width, height = img.size
+ ratio = width / height
+ logger.trace(f"{path.name}: {width}x{height} (r={ratio:.2f}), {filesize} bytes ({readable_size(path=path)})")
+ if filesize < num_bytes and width + height < wh_total and min_ratio < ratio < max_ratio:
+ logger.debug(f"Image is already under limit: {path.name}")
+ return path
+ new_width = round(0.9 * width)
+ if ratio > max_ratio:
+ new_height = round(new_width / max_ratio)
+ elif ratio < min_ratio:
+ new_height = round(new_width * min_ratio)
+ else:
+ new_height = round(new_width / ratio)
+
+ img = img.resize((new_width, new_height))
+ img.save(save_path)
+ except Exception as e:
+ logger.error(f"Failed to reduce image size: {path}, {e}")
+ return path
+ if delete:
+ path.unlink(missing_ok=True)
+ logger.debug(f"Reduced image size: {save_path.stat().st_size} bytes ({readable_size(path=save_path)})")
+ return convert_img_match_telegram_rules(save_path, num_bytes, wh_total, max_ratio, delete=delete)
+
+
+def validate_img(path: str | Path | None, *, delete: bool = True) -> str:
+ """Check if the image is valid.
+
+ 0. format must be in ["heic", "jpg", "jpeg", "png", "webp"]
+ 1. photo must be at most 10 MB in size.
+ 2. photo's width and height must not exceed 10000 in total.
+ 3. photo's width and height ratio must be at most 20.
+ """
+ if path is None or not Path(path).expanduser().resolve().is_file():
+ logger.warning(f"Image path not found: {path}")
+ return ""
+ path = Path(path).expanduser().resolve()
+ logger.trace(f"Checking image: {path.name} [{readable_size(path=path)}]")
+ # Telegram support image format: JPEG, PNG, BMP, and GIF
+ # For other formats, we should convert them to .jpg
+ if path.suffix.lower() not in [".heic", ".jpg", ".jpeg", ".png", ".webp", ".bmp", ".gif"]:
+ logger.warning(f"Invalid image format: {path.name}")
+ return ""
+ path = convert_img_to_telegram_format(path, delete=delete)
+
+ if not path.is_file():
+ logger.warning(f"Invalid image: {path}")
+ return ""
+ try:
+ img = Image.open(path)
+ img.verify()
+ img.close()
+ valid_path = convert_img_match_telegram_rules(path, delete=delete)
+ if valid_path.as_posix() != path.as_posix():
+ valid_path.rename(path)
+ except Exception as e:
+ logger.error(f"Broken image: {path}, {e}")
+ if delete:
+ path.unlink(missing_ok=True)
+ return ""
+ return path.as_posix() if path.is_file() else ""
+
+
+def is_valid_video(path: str | Path | None, *, delete: bool = True) -> bool:
+ """Check if the video is valid."""
+ if parse_media_info(path):
+ return True
+
+ logger.error(f"Invalid video: {path}")
+ if delete:
+ Path(str(path)).unlink(missing_ok=True)
+ return False
+
+
+def fix_video_rotation(path: str | Path | None) -> Path:
+ """Fix video rotation for iOS devcies.
+
+ Some videos (Weibo's livephotos) are displayed in the wrong direction on the Telegram iOS client.
+ """
+ if path is None or not Path(path).expanduser().resolve().is_file():
+ return Path("")
+ path = Path(path).expanduser().resolve()
+ logger.trace(f"Checking video rotation: {path.name} [{readable_size(path=path)}]")
+ probe_info = parse_media_info(path)
+ if not probe_info: # video can't parse by ffprobe
+ logger.warning(f"Invalid video: {path}")
+ return path
+ if probe_info.get("rotation") in [-90, 90]:
+ logger.warning(f"Fixing video rotation from {probe_info['height']}x{probe_info['width']}")
+ path = convert_to_h264(path, re_encoding=True)
+ return path
+
+
+if __name__ == "__main__":
+ # print(convert_to_h264("~/tests/test.mov"))
+ # is_valid_video("~/tests/test.jpg")
+ # convert_img_match_telegram_rules("~/tests/test.large.jpg")
+ print(convert_img_to_telegram_format("~/tests/test.heic"))
src/networking.py
@@ -0,0 +1,462 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import asyncio
+import copy
+import inspect
+import json
+import re
+import time
+from pathlib import Path
+from urllib.parse import parse_qs, quote_plus, urlparse
+
+import anyio
+from httpx import AsyncClient, AsyncHTTPTransport, HTTPStatusError, RequestError, Response
+from loguru import logger
+
+from config import DOWNLOAD_DIR, PROXY, UA, cache, semaphore
+from message_utils import modify_progress, summay_media
+from utils import https_url, readable_size
+
+# ruff: noqa: RUF001
+MOBILE_HEADERS = {
+ "accept": "text/html,application/xhtml+xml,application/xml,application/json;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
+ "accept-language": "en-US,en;q=0.9",
+ "cache-control": "no-cache",
+ "dnt": "1",
+ "pragma": "no-cache",
+ "priority": "u=0, i",
+ "sec-fetch-dest": "empty",
+ "sec-fetch-mode": "navigate",
+ "sec-fetch-site": "same-origin",
+ "upgrade-insecure-requests": "1",
+ "user-agent": "Mozilla/5.0 (iPhone; CPU iPhone OS 16_7_2 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/16.6 Mobile/15E148 Safari/604.1",
+}
+
+
+def retry(max_retries=3, delay=2):
+ def decorator(func):
+ async def async_wrapper(*args, **kwargs):
+ retries = 0
+ resp = Response(500)
+ while retries < max_retries:
+ try:
+ return await func(*args, **kwargs)
+ except RequestError as e:
+ msg = f"RequestError: {e.request.url!r} {e} (Retrying {retries}/{max_retries})"
+ resp.extensions = {"exception": e, "msg": msg}
+ except HTTPStatusError as e:
+ msg = f"HTTPStatusError: {e.response.status_code} while requesting {e.request.url!r} {e} (Retrying {retries}/{max_retries})"
+ resp.extensions = {"exception": e, "msg": msg}
+ except Exception as e:
+ msg = f"{type(e).__name__}: {e} (Retrying {retries}/{max_retries})"
+ resp.extensions = {"exception": e, "msg": msg}
+ logger.error(msg)
+ retries += 1
+ await asyncio.sleep(delay)
+ logger.error(f"Failed after {max_retries} retries")
+ return resp
+
+ def sync_wrapper(*args, **kwargs):
+ retries = 0
+ resp = Response(500)
+ while retries < max_retries:
+ try:
+ return func(*args, **kwargs)
+ except RequestError as e:
+ msg = f"RequestError: {e.request.url!r} {e} (Retrying {retries}/{max_retries})"
+ resp.extensions = {"exception": e, "msg": msg}
+ except HTTPStatusError as e:
+ msg = f"HTTPStatusError: {e.response.status_code} while requesting {e.request.url!r} {e} (Retrying {retries}/{max_retries})"
+ resp.extensions = {"exception": e, "msg": msg}
+ except Exception as e:
+ msg = f"{type(e).__name__}: {e} (Retrying {retries}/{max_retries})"
+ resp.extensions = {"exception": e, "msg": msg}
+ logger.error(msg)
+ retries += 1
+ time.sleep(delay)
+ logger.error(f"Failed after {max_retries} retries")
+ return resp
+
+ def wrapper(*args, **kwargs):
+ if inspect.iscoroutinefunction(func):
+ return async_wrapper(*args, **kwargs)
+ return sync_wrapper(*args, **kwargs)
+
+ return wrapper
+
+ return decorator
+
+
+async def log_req(request):
+ logger.debug(f"{request.method} {request.url}")
+
+
+async def log_resp(response):
+ request = response.request
+ logger.debug(f"[{response.status_code}] {request.method} {request.url}")
+
+
+@retry()
+async def hx_req(
+ url,
+ method: str = "GET",
+ *,
+ headers: dict | None = None,
+ merge_headers: bool = True,
+ cookies: dict | None = None,
+ params: dict | None = None,
+ post_data: dict | None = None,
+ post_json: dict | None = None,
+ transport: AsyncHTTPTransport | None = None,
+ proxy: str | None = None,
+ follow_redirects: bool = True,
+ check_has_kv: list[str] | None = None,
+ check_kv: dict | None = None,
+ timeout: int = 10, # noqa: ASYNC109
+) -> Response:
+ # headers
+ if headers is None:
+ headers = MOBILE_HEADERS
+ elif merge_headers:
+ headers = MOBILE_HEADERS | headers
+ if transport is None:
+ transport = AsyncHTTPTransport(proxy=proxy, http2=True, retries=3)
+ logger.trace(f"Headers: {headers}, Params: {params}")
+ async with AsyncClient(http2=True, proxy=proxy, transport=transport, event_hooks={"request": [log_req], "response": [log_resp]}) as client:
+ if method == "GET":
+ response = await client.get(url, cookies=cookies, headers=headers, params=params, follow_redirects=follow_redirects, timeout=timeout)
+ elif method == "POST":
+ response = await client.post(url, cookies=cookies, headers=headers, data=post_data, json=post_json, params=params, follow_redirects=follow_redirects, timeout=timeout)
+ data = response.text
+ try:
+ logger.trace(json.loads(data))
+ except json.JSONDecodeError:
+ logger.trace(data)
+ response.raise_for_status()
+ if check_has_kv:
+ data = json.loads(response.text)
+ for key in check_has_kv:
+ if "." not in key:
+ if not data.get(key):
+ msg = f"Value of Key={key} not found in json response"
+ logger.error(data)
+ raise ValueError(msg)
+ else:
+ keys = key.split(".")
+ data_copy = copy.deepcopy(data)
+ error_msg = f"Value of Key={key} not found in json response"
+ for k in keys:
+ if not isinstance(data_copy, dict):
+ raise TypeError(error_msg)
+ data_copy = data_copy.get(k, {})
+ if not data_copy:
+ raise ValueError(error_msg)
+
+ if check_kv:
+ data = json.loads(response.text)
+ for key, value in check_kv.items():
+ if isinstance(value, dict):
+ for k, v in value.items():
+ if str(data.get(key, {}).get(k)) != str(v): # convert to str to compare
+ msg = f"Key={key}.{k} got {data.get(key, {}).get(k)} in response, but required: {v}"
+ logger.error(data)
+ raise ValueError(msg)
+ elif str(data.get(key)) != str(value): # convert to str to compare
+ msg = f"Key={key} got {data.get(key)} in response, but required: {value}"
+ logger.error(data)
+ raise ValueError(msg)
+ return response
+
+
+async def download_file(
+ link: str,
+ path: str | Path | None = None,
+ *,
+ suffix: str = "",
+ skip_exist: bool = False,
+ workers_proxy: bool = False,
+ headers: dict | None = None,
+ **kwargs,
+) -> str:
+ """Download a file from the given link and save it to the specified path.
+
+ Args:
+ link (str): URL to download the file.
+ path (str | Path, optional): The path to save the downloaded file. Defaults to auto detect.
+ suffix (str, optional): The suffix to append to the file name. Defaults to auto detect.
+ skip_exist (bool, optional): Skip downloading if the file already exists. Defaults to False.
+ workers_proxy (bool, optional): Use workers proxy. Defaults to False.
+ headers (dict, optional): The headers to use for the request. Defaults to Telegram UA.
+
+ Returns:
+ str: Download file path.
+ """
+ if not link:
+ return ""
+ if path is None:
+ path = Path(DOWNLOAD_DIR) / Path(urlparse(link).path).name
+ path = Path(path).expanduser().resolve()
+ if path.suffix != suffix:
+ path = path.with_suffix(f"{path.suffix}{suffix}") # append suffix, not replace
+
+ if path.is_file() and skip_exist:
+ logger.info(f"File already exists, skipping download: {path}")
+ return path.as_posix()
+ if workers_proxy and PROXY.WORKERS:
+ link = PROXY.WORKERS + quote_plus(link)
+ if headers is None:
+ headers = {"user-agent": UA.TELEGRAM}
+ path.parent.mkdir(parents=True, exist_ok=True)
+ logger.trace(f"Downloading {link} to {path}")
+ hx = AsyncClient(transport=AsyncHTTPTransport(retries=3), proxy=PROXY.DOWNLOAD, timeout=60, follow_redirects=True, event_hooks={"request": [log_req], "response": [log_resp]})
+ async with semaphore, hx.stream("GET", link, headers=headers, timeout=60) as response:
+ total = int(response.headers.get("Content-Length", 0))
+ async with await anyio.open_file(path, "wb") as f:
+ num_bytes_downloaded = response.num_bytes_downloaded
+ async for chunk in response.aiter_bytes():
+ await f.write(chunk)
+ msg = f"⏬下载中: {readable_size(num_bytes_downloaded)} / {readable_size(total)}\n💾{path.name}"
+ msg += f" ({num_bytes_downloaded / total:.2%})" if total and total > 0 else ""
+ await modify_progress(text=msg, **kwargs)
+ num_bytes_downloaded = response.num_bytes_downloaded
+
+ if path.is_file():
+ logger.info(f"Downloaded file saved to {path}")
+ await modify_progress(text=f"🎉下载成功\n{path.name}", **kwargs)
+ return path.as_posix()
+ return ""
+
+
+async def download_first_success_urls(links: list[str], **kwargs) -> str:
+ """Download the first successfully file from a list of links.
+
+ Note: This will only download a single file from the list of links.
+ """
+ if not links:
+ return ""
+ for link in links:
+ res = await download_file(link, **kwargs)
+ if Path(res).is_file():
+ return res
+ return ""
+
+
+async def download_media(media: list[dict], **kwargs) -> list[dict]:
+ tasks = []
+ for item in media:
+ if task := item.get("photo"): # async function
+ tasks.append(task)
+ if task := item.get("video"):
+ tasks.append(task)
+ if task := item.get("livephoto"):
+ tasks.append(task)
+ if task := item.get("audio"):
+ tasks.append(task)
+ # run all tasks
+ results = await asyncio.gather(*tasks, return_exceptions=True)
+
+ final_media = []
+ for item, result in zip(media, results, strict=True):
+ if isinstance(result, Exception):
+ logger.error(f"Failed to download: {result}")
+ else:
+ if item.get("photo"): # async function
+ item["photo"] = result
+ final_media.append(item)
+ if task := item.get("video"):
+ item["video"] = result
+ final_media.append(item)
+ if task := item.get("livephoto"):
+ item["video"] = result
+ final_media.append(item)
+ if task := item.get("audio"):
+ item["audio"] = result
+ final_media.append(item)
+ logger.success(f"Downloaded: {result}")
+ await modify_progress(text=f"✅下载成功:\n{summay_media(final_media)}", **kwargs)
+ return final_media
+
+
+@cache.memoize(ttl=60)
+async def match_social_media_link(text: str, *, flatten_first: bool = False) -> dict:
+ """Matches social media links in the given text and returns a dictionary with the matched information.
+
+ Args:
+ text (str): The text to search for social media links.
+
+ Returns:
+ dict: A dictionary containing the matched information. (must have a key named "platform")
+
+ #! TODO: Handle multiple links in one message.
+ """
+ if flatten_first:
+ text = await flatten_rediercts(text)
+ matched_info = {"platform": ""}
+ # https://www.douyin.com/video/7398813386827468041
+ if matched := re.search(r"(https?://)?(www\.)?douyin\.com/video/(\d+)", text):
+ matched_info = {"url": https_url(matched.group(0)), "platform": "douyin"}
+
+ # https://www.douyin.com/user/MS4wLjABAAAAXgBuOEcyavDhrRBqnD8x7d4pj7RIL5QFRlLehCnem8couoAg8yXR-MGhUK0i4riF?modal_id=7451543857952492810
+ if matched := re.search(r"(https?://)?(www\.)?douyin\.com/user/(.*?)\?(.*?)modal_id=(\d+)", text):
+ matched_info = {"url": f"https://www.douyin.com/video/{matched.group(5)}", "platform": "douyin"}
+
+ # https://www.douyin.com/note/7458195074434846004
+ if matched := re.search(r"(https?://)?(www\.)?douyin\.com/note/(\d+)", text):
+ matched_info = {"url": f"https://www.douyin.com/note/{matched.group(3)}", "platform": "douyin"}
+
+ # https://www.tiktok.com/@baymermel/video/7460653893941267755\?_t\=ZS-8t8YbVWqv5k\&_r\=1
+ if matched := re.search(r"(https?://)?(www\.)?tiktok\.com/(.*?)/(\d+)", text):
+ matched_info = {"url": https_url(matched.group(0)), "platform": "tiktok"}
+
+ # https://www.instagram.com/p/C7P3jN8vmEN
+ # https://www.instagram.com/reel/DBBEGXpvwNF
+ if matched := re.search(r"(https?://)?(www\.)?instagram\.com/(:?|p|reel)/([^.。,,/\s]+)", text):
+ matched_info = {"post_type": matched.group(3), "post_id": matched.group(4), "url": https_url(matched.group(0)), "platform": "instagram"}
+ # https://www.instagram.com/yifaer_chen/p/DEzv9x-vzOn/
+ if matched := re.search(r"(https?://)?(www\.)?instagram\.com/\w+/(:?|p|reel)/([^.。,,/\s]+)", text):
+ matched_info = {"post_type": matched.group(3), "post_id": matched.group(4), "url": https_url(matched.group(0)), "platform": "instagram"}
+
+ # https://x.com/taylorswift13/status/1794805688696275131
+ # https://twitter.com/taylorswift13/status/1794805688696275131
+ # https://fixupx.com/taylorswift13/status/1794805688696275131
+ # https://fxtwitter.com/taylorswift13/status/1794805688696275131
+ if matched := re.search(r"(https?://)?(:?twitter|x|fxtwitter|fixupx)\.com\/(\w+)\/status/(\d+)", text):
+ domain = matched.group(2)
+ handle = matched.group(3)
+ post_id = matched.group(4)
+ url = f"https://x.com/{handle}/status/{post_id}"
+ matched_info = {"domain": domain, "handle": handle, "post_id": post_id, "url": url, "platform": "twitter"}
+
+ # https://weibo.com/1736562685/P6lhSjRnI
+ if matched := re.search(r"(https?://)?(www\.)?weibo\.com/(.*?)/(\w+)", text):
+ matched_info = {"post_id": matched.group(4), "url": https_url(matched.group(0)), "platform": "weibo"}
+ # https://m.weibo.cn/detail/5113333048938691
+ # https://m.weibo.cn/status/5113333048938691
+ if matched := re.search(r"(https?://)?m\.weibo\.cn/(:?detail|status)/(\w+)", text):
+ matched_info = {"post_id": matched.group(3), "url": https_url(matched.group(0)), "platform": "weibo"}
+ # https://video.weibo.com/show?fid=1034:5123779299311660
+ if matched := re.search(r"(https?://)?video\.weibo\.(:?com|cn)/show\?fid=(\d+):(\d+)", text):
+ matched_info = {"post_id": f"weibovideo{matched.group(3)}:{matched.group(4)}", "url": https_url(matched.group(0)), "platform": "weibo"}
+ # https://weibo.com/tv/show/1034:5123779299311660?from=old_pc_videoshow
+ if matched := re.search(r"(https?://)?(www\.)?weibo\.(:?com|cn)/tv/show/(\d+):(\d+)", text):
+ matched_info = {"post_id": f"weibovideo{matched.group(4)}:{matched.group(5)}", "url": https_url(matched.group(0)), "platform": "weibo"}
+
+ # http://xhslink.com/a/Z3VPXAReU1Y1
+ xhs_pattern = r"(https?://)?xhslink\.com/(\w?/?)([^,,.。?\s]+)"
+ if matched := re.search(xhs_pattern, text):
+ flatten = await flatten_rediercts(https_url(matched.group(0)), pattern=xhs_pattern, proxy=PROXY.XHS)
+ base_url = flatten.split("?")[0]
+ post_id = Path(base_url).stem
+ queries = parse_qs(urlparse(flatten).query)
+ xsec_token = queries.get("xsec_token", [""])[0]
+ matched_info = {"url": https_url(matched.group(0)), "xhslink": https_url(matched.group(0)), "post_id": post_id, "xsec_token": xsec_token, "platform": "xiaohongshu"}
+ # https://www.xiaohongshu.com/explore/671a3dfe00000000240161db?xsec_token=ABY-b1JKuAlIm2dX1OSdIFHD7cQFHEdThv5aMyccvmbJo=
+ if matched := re.search(r"(https?://)?(www\.)?xiaohongshu\.com/([^.。,,\s]+)", text):
+ base_url = matched.group(0).split("?")[0]
+ post_id = Path(base_url).stem
+ queries = parse_qs(urlparse(matched.group(0)).query)
+ xsec_token = queries.get("xsec_token", [""])[0]
+ matched_info = {"post_id": post_id, "xsec_token": xsec_token, "url": f"https://www.xiaohongshu.com/explore/{post_id}?xsec_token={xsec_token}", "platform": "xiaohongshu"}
+
+ # https://www.bilibili.com/video/BV1TC411J7PK
+ if matched := re.search(r"(https?://)?(:?m\.|www\.)?bilibili\.com/video/([^,,.。\s]+)", str(text)):
+ base_url = matched.group(0).split("?")[0]
+ bvid = Path(base_url).stem
+ queries = parse_qs(urlparse(matched.group(0)).query)
+ pid = queries.get("p", ["1"])[0]
+ url = f"https://www.bilibili.com/video/{bvid}?p={pid}".removesuffix("?p=1")
+ matched_info = {"url": url, "bvid": bvid, "pid": pid, "platform": "bilibili"}
+
+ # https://www.youtube.com/watch?v=D6aE2E0RHTc
+ if matched := re.search(r"(https?://)?(:?m\.|www\.)?youtube\.com/watch([^,,.。\s]+)", str(text)):
+ queries = parse_qs(urlparse(matched.group(0)).query)
+ if vid := queries.get("v", [""])[0]:
+ matched_info = {"url": f"https://www.youtube.com/watch?v={vid}", "vid": vid, "platform": "youtube"}
+ # https://youtube.com/shorts/lFKHbluAlJw
+ if matched := re.search(r"(https?://)?(:?m\.|www\.)?youtube\.com/shorts/([^,,.。?\s]+)", str(text)):
+ vid = matched.group(3)
+ matched_info = {"url": f"https://www.youtube.com/watch?v={vid}", "vid": vid, "platform": "youtube"}
+
+ if matched_info["platform"]:
+ logger.success(f"Matched: {matched_info}")
+ return matched_info
+
+
+@cache.memoize(ttl=60)
+async def flatten_rediercts(texts: str | None = None, pattern: str | None = None, headers: dict | None = None, proxy: str | None = None, method: str = "HEAD") -> str:
+ if not texts:
+ return ""
+
+ url = ""
+ # youtu.be
+ if matched := re.search(r"(https?://)?youtu\.be/([^.。,,?&/\s]+)", texts):
+ url = matched.group(0)
+ # v.douyin.com
+ if matched := re.search(r"(https?://)?v\.douyin\.com/([^.。,,?&/\s]+)", texts):
+ url = matched.group(0)
+ # vt.tiktok.com
+ if matched := re.search(r"(https?://)?vt\.tiktok\.com/([^.。,,?&/\s]+)", texts):
+ url = matched.group(0)
+ # b23.tv
+ if matched := re.search(r"(https?://)?b23\.tv/([^.。,,?&/\s]+)", texts):
+ url = matched.group(0)
+ # bili2233.cn
+ if matched := re.search(r"(https?://)?bili2233\.cn/([^.。,,?&/\s]+)", texts):
+ url = matched.group(0)
+ # t.co
+ if matched := re.search(r"(https?://)?t\.co/([^.。,,?&/\s]+)", texts):
+ url = matched.group(0)
+ # t.cn
+ if matched := re.search(r"(https?://)?t\.cn/([^.。,,?&/\s]+)", texts):
+ url = matched.group(0)
+ method = "GET"
+ # bit.ly
+ if matched := re.search(r"(https?://)?bit\.ly/([^.。,,?&/\s]+)", texts):
+ url = matched.group(0)
+ # shorturl.at
+ if matched := re.search(r"(https?://)?shorturl\.at/([^.。,,?&/\s]+)", texts):
+ url = matched.group(0)
+
+ # custom pattern
+ if pattern and (matched := re.search(pattern, texts)):
+ url = matched.group(0)
+
+ if not url:
+ return texts
+ # parse redirect
+ headers = {"user-agent": UA.TELEGRAM} if headers is None else headers
+ try:
+ new_url = https_url(url)
+ if method == "HEAD":
+ async with AsyncClient(http2=True, proxy=proxy, follow_redirects=True, event_hooks={"request": [log_req], "response": [log_resp]}) as hx:
+ resp = await hx.head(new_url, headers=headers, timeout=3)
+ elif method == "GET":
+ status_code = 302
+ while str(status_code).startswith("3"):
+ async with AsyncClient(http2=True, proxy=proxy, follow_redirects=False, event_hooks={"request": [log_req], "response": [log_resp]}) as hx:
+ resp = await hx.get(new_url, headers=headers, timeout=3)
+ status_code = resp.status_code
+ new_url = resp.headers.get("Location", new_url)
+ except Exception as e:
+ logger.error(f"Failed to parse redirect for {url}: {e}")
+ return texts
+ rediercted_url = str(resp.url)
+ logger.info(f"Flatten redirect: {url} -> {rediercted_url}")
+ return texts.replace(url, rediercted_url)
+
+
+if __name__ == "__main__":
+ import asyncio
+
+ # asyncio.run(match_social_media_link("https://www.instagram.com/yifaer_chen/p/DEzv9x-vzOn/"))
+ asyncio.run(flatten_rediercts("http://t.cn/A6ukIuVn"))
+ # asyncio.run(flatten_rediercts("shorturl.at/fuyrt"))
+ # asyncio.run(flatten_rediercts("https://v.douyin.com/CeiJfJMQG/"))
+ # asyncio.run(flatten_rediercts("https://t.co/Wwo3x69CQz"))
+ # res = asyncio.run(hx_req("https://httpbin.org/delay/5000"))
+ # asyncio.run(hx_req("https://httpbin.org/get", check_kv={"url": "https://httpbin.org/"}))
+ # resp = asyncio.run(hx_req("https://httpbin.org/get", check_kv={"headers": {"Accept-Language": "en-US,en;q=0.8"}}))
+ # resp = asyncio.run(hx_req("https://httpbin.org/status/404"))
+ # asyncio.run(download_file("https://httpbin.org/image/jpeg", suffix=".jpg"))
+ # asyncio.run(match_social_media_link("https://www.instagram.com/p/C7P3jN8vmEN"))
src/utils.py
@@ -0,0 +1,245 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import json
+import random
+import string
+from datetime import UTC, datetime
+from pathlib import Path
+from zoneinfo import ZoneInfo
+
+from bs4 import PageElement
+from loguru import logger
+from pyrogram.client import Client
+
+from config import DOWNLOAD_DIR, TEXT_LENGTH, TZ, cache
+
+
+def nowdt(tz: str = "UTC") -> datetime:
+ return datetime.now(ZoneInfo(tz))
+
+
+def smart_split(text: str, chars_per_string: int = TEXT_LENGTH) -> list[str]:
+ """Splits one string into multiple strings, with a maximum amount of `chars_per_string` characters per string."""
+
+ def _text_before_last(substr: str) -> str:
+ return substr.join(part.split(substr)[:-1]) + substr
+
+ parts = []
+ while True:
+ if len(text) < chars_per_string:
+ parts.append(text)
+ return parts
+
+ part = text[:chars_per_string]
+
+ if "\n" in part:
+ part = _text_before_last("\n")
+ elif ". " in part:
+ part = _text_before_last(". ")
+ elif " " in part:
+ part = _text_before_last(" ")
+
+ parts.append(part)
+ text = text[len(part) :]
+
+
+def split_parts(first: int = 0, middle: int = 0, last: int = 0) -> dict:
+ """Split a list of items into three parts: first, middle, and last.
+
+ Useful for determine the number of media files in master / reply / quote posts.
+ """
+ total = first + middle + last
+ data = {
+ "first": f"P1-P{first}",
+ "middle": f"P{first + 1}-P{first + middle}",
+ "last": f"P{first + middle + 1}-P{first + middle + last}",
+ }
+ for k, v in data.items():
+ idx1, idx2 = (s.removeprefix("P") for s in v.split("-"))
+ if int(idx1) > int(idx2):
+ data[k] = ""
+ elif int(idx1) == int(idx2):
+ data[k] = f"P{idx1}"
+ if total > 1 and int(idx2) - int(idx1) == total - 1:
+ data[k] = "所有"
+ return data
+
+
+def rand_string(length: int = 48) -> str:
+ return "".join(random.choices(string.ascii_letters + string.digits, k=length))
+
+
+def rand_number(length: int = 8) -> int:
+ return int("".join(random.choices(string.digits, k=length)))
+
+
+def true(value: str | int | bool | None) -> bool:
+ return str(value).lower() in ["1", "y", "yes", "t", "true", "on"]
+
+
+def remove_none_values(d: dict | list) -> dict:
+ """Recursively removes keys with None values from a nested dictionary.
+
+ Cleans None values from lists and processes nested structures.
+
+ Args:
+ d (dict | list): The input dict or list
+
+ Returns:
+ dict: A cleaned dictionary or list with None values removed.
+ """
+ if isinstance(d, dict): # If the input is a dictionary
+ cleaned_dict = {}
+ for key, value in d.items():
+ if isinstance(value, dict):
+ # Recursively clean nested dictionaries
+ nested_cleaned = remove_none_values(value)
+ if nested_cleaned: # Only add non-empty cleaned dict
+ cleaned_dict[key] = nested_cleaned
+ elif isinstance(value, list):
+ # Clean lists recursively
+ cleaned_list = [remove_none_values(item) if isinstance(item, dict | list) else item for item in value if item is not None]
+ if cleaned_list: # Only add non-empty cleaned lists
+ cleaned_dict[key] = cleaned_list
+ elif value is not None:
+ cleaned_dict[key] = value
+ return cleaned_dict
+ if isinstance(d, list): # If the input is a list
+ return [remove_none_values(item) if isinstance(item, dict | list) else item for item in d if item is not None] # type: ignore
+
+ return d # Return non-dict, non-list values as is
+
+
+def soup_to_text(soup: PageElement) -> str:
+ text = ""
+ if not hasattr(soup, "children"):
+ return soup.text
+ for tag in soup.children: # type: ignore
+ if tag.name == "img" and "alt" in tag.attrs:
+ text += tag["alt"]
+ elif tag.name == "br":
+ text += "\n"
+ elif hasattr(tag, "children"):
+ text += soup_to_text(tag)
+ else:
+ text += tag.text
+ return text
+
+
+def stringfy(d: dict) -> dict:
+ """Convert dict values to string.
+
+ Args:
+ d (dict | list): The input dict or list
+
+ Returns:
+ dict: A stringfy dictionary or list.
+ """
+ if isinstance(d, dict): # If the input is a dictionary
+ stringfy_dict = {}
+ for key, value in d.items():
+ if isinstance(value, dict | list | set):
+ stringfy_dict[key] = json.dumps(value)
+ else:
+ stringfy_dict[key] = unicode_to_ascii(value)
+ return stringfy_dict
+ return d # Return non-dict, non-list values as is
+
+
+def readable_time(seconds: str | float) -> str:
+ """Human readable time duration."""
+ seconds = float(seconds)
+ if seconds < 60:
+ return f"{seconds:.0f}s"
+ if seconds < 3600:
+ minutes, seconds = divmod(seconds, 60)
+ return f"{minutes:.0f}m{seconds:.0f}s"
+ if seconds < 86400:
+ hours, seconds = divmod(seconds, 3600)
+ minutes, seconds = divmod(seconds, 60)
+ return f"{hours:.0f}h{minutes:.0f}m{seconds:.0f}s"
+ days, seconds = divmod(seconds, 86400)
+ hours, seconds = divmod(seconds, 3600)
+ minutes, seconds = divmod(seconds, 60)
+ return f"{days:.0f}d{hours:.0f}h{minutes:.0f}m{seconds:.0f}s"
+
+
+def readable_size(num_bytes: str | float = 0, path: str | Path | None = None) -> str:
+ """Human readable file size."""
+ num_bytes = Path(path).stat().st_size if path is not None else float(num_bytes)
+ # for unit in ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB"]:
+ for unit in ["B", "KB"]:
+ if abs(num_bytes) < 1024:
+ return f"{num_bytes:.1f} {unit}"
+ num_bytes /= 1024
+ return f"{num_bytes:.1f} MB"
+
+
+def https_url(url: str) -> str:
+ return "https://" + str(url).removeprefix("https://").removeprefix("http://").lstrip("/").rstrip("/")
+
+
+def plain_url(url: str) -> str:
+ return str(url).removeprefix("https://").removeprefix("http://").lstrip("/").rstrip("/")
+
+
+def ts_to_dt(ts: str | float | None) -> datetime | None:
+ if not ts:
+ return None
+ try:
+ return datetime.fromtimestamp(float(ts), tz=UTC).astimezone(ZoneInfo(TZ))
+ except Exception as e:
+ logger.error(e)
+ return None
+
+
+@cache.memoize(ttl=0)
+async def i_am_bot(client: Client) -> bool:
+ """Check if this clinet is a bot or not."""
+ try:
+ me = await client.get_me()
+ except Exception as e:
+ logger.error(e)
+ return False
+ return me.is_bot
+
+
+def unicode_to_ascii(text: str | float) -> str:
+ if not text:
+ return ""
+ return str(text).encode("unicode_escape").decode("ascii")
+
+
+def ascii_to_unicode(text: str) -> str:
+ if not text:
+ return ""
+ return bytes(str(text), "ascii").decode("unicode_escape")
+
+
+def cleanup_old_files(root: Path | str | None = None, duration: int = 7200) -> None:
+ """Clean up files older than duration seconds."""
+ if root is None:
+ root = DOWNLOAD_DIR
+ root = Path(root).expanduser().resolve()
+ if not root.is_dir():
+ return
+ now = datetime.now(UTC).timestamp()
+ for path in root.glob("*"):
+ if not path.is_file():
+ continue
+ if all(now - x > duration for x in [path.stat().st_atime, path.stat().st_ctime, path.stat().st_mtime]):
+ logger.warning(f"Deleting old file: {path}")
+ path.unlink(missing_ok=True)
+
+
+if __name__ == "__main__":
+ print(rand_string())
+ print(rand_number())
+ print(cleanup_old_files())
+ print(readable_size(2000 * 1024 * 1024))
+ print(unicode_to_ascii("你好"))
+ print(unicode_to_ascii(1.1))
+ print(unicode_to_ascii("test"))
+ print(ascii_to_unicode("1.1"))
+ print(ascii_to_unicode("test"))
.gitignore
@@ -0,0 +1,168 @@
+*.session_string
+dev/
+scripts/*
+!scripts/auth.py
+debug.py
+### Generated by gibo (https://github.com/simonwhitaker/gibo)
+### https://raw.github.com/github/gitignore/4488915eec0b3a45b5c63ead28f286819c0917de/Python.gitignore
+
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+cover/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+.pybuilder/
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+# For a library or package, you might want to ignore these files since the code is
+# intended to run in multiple environments; otherwise, check them in:
+# .python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# poetry
+# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
+# This is especially recommended for binary packages to ensure reproducibility, and is more
+# commonly ignored for libraries.
+# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
+#poetry.lock
+
+# pdm
+# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
+#pdm.lock
+# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
+# in version control.
+# https://pdm.fming.dev/#use-with-ide
+.pdm.toml
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# pytype static type analyzer
+.pytype/
+
+# Cython debug symbols
+cython_debug/
+
+# PyCharm
+# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
+# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
+# and can be added to the global gitignore or merged into this file. For a more nuclear
+# option (not recommended) you can uncomment the following to ignore the entire idea folder.
+#.idea/
.pre-commit-config.yaml
@@ -0,0 +1,159 @@
+---
+default_install_hook_types: [pre-commit, commit-msg]
+default_stages: [pre-commit, pre-push]
+
+repos:
+ - repo: local
+ hooks:
+ - id: gitleaks
+ name: 'gitleaks: Detect hardcoded secrets'
+ description: Detect hardcoded secrets using Gitleaks
+ language: system
+ entry: gitleaks protect --verbose --redact --staged
+ pass_filenames: false
+
+ - id: actionlint
+ name: 'actionlint: Lint GitHub Actions workflow files'
+ description: Runs actionlint to lint GitHub Actions workflow files
+ language: system
+ types: [yaml]
+ files: ^\.github/workflows/
+ entry: actionlint
+
+ - id: hadolint
+ name: 'hadolint: Lint Dockerfiles'
+ description: Runs hadolint to lint Dockerfiles
+ language: system
+ types: [dockerfile]
+ entry: hadolint
+ args: [--failure-threshold, warning, --info, DL3007, --info, DL3008, --info, DL3013, --info, DL3016, --info, DL3018, --info, DL3028]
+
+ - id: yamlfix
+ name: 'yamlfix: fix yaml file'
+ language: system
+ types: [yaml]
+ entry: yamlfix
+
+ - id: taplo
+ name: taplo-cli
+ description: A TOML toolkit written in Rust.
+ entry: taplo format
+ language: system
+ types: [toml]
+
+ - id: ruff
+ name: 'ruff: Python linter'
+ description: Run 'ruff' for extremely fast Python linting
+ language: system
+ minimum_pre_commit_version: 2.9.2
+ types_or: [python, pyi]
+ require_serial: true
+ entry: ruff check --fix --force-exclude --exit-zero
+
+ - id: ruff-format
+ name: 'ruff: Python formatter'
+ description: Run 'ruff' for extremely fast Python linting
+ language: system
+ minimum_pre_commit_version: 2.9.2
+ types_or: [python, pyi]
+ require_serial: true
+ entry: ruff format --force-exclude
+
+ - id: gitlint
+ name: gitlint
+ description: Checks your git commit messages for style.
+ language: system
+ entry: gitlint --staged
+ args: [--msg-filename]
+ stages: [commit-msg]
+
+ - repo: https://github.com/pre-commit/pre-commit-hooks.git
+ rev: v5.0.0
+ hooks:
+ - id: check-added-large-files
+ name: check for added large files
+ description: prevents giant files from being committed.
+
+ - id: check-ast
+ name: check python ast
+ description: simply checks whether the files parse as valid python.
+ types: [python]
+
+ - id: check-case-conflict
+ name: check for case conflicts
+ description: checks for files that would conflict in case-insensitive filesystems.
+ - id: check-docstring-first
+ name: check docstring is first
+ description: checks a common error of defining a docstring after code.
+ types: [python]
+
+ - id: check-executables-have-shebangs
+ name: check that executables have shebangs
+ description: ensures that (non-binary) executables have a shebang.
+ types: [text, executable]
+ stages: [pre-commit, pre-push, manual]
+
+ - id: check-merge-conflict
+ name: check for merge conflicts
+ description: checks for files that contain merge conflict strings.
+ types: [text]
+
+ - id: check-symlinks
+ name: check for broken symlinks
+ description: checks for symlinks which do not point to anything.
+ types: [symlink]
+
+ - id: check-vcs-permalinks
+ name: check vcs permalinks
+ description: ensures that links to vcs websites are permalinks.
+ types: [text]
+
+ - id: check-xml
+ name: check xml
+ description: checks xml files for parseable syntax.
+ types: [xml]
+
+ - id: check-yaml
+ name: check yaml
+ description: checks yaml files for parseable syntax.
+ types: [yaml]
+
+ - id: debug-statements
+ name: debug statements (python)
+ description: checks for debugger imports and py37+ `breakpoint()` calls in python source.
+ types: [python]
+
+ - id: detect-private-key
+ name: detect private key
+ description: detects the presence of private keys.
+ types: [text]
+
+ - id: end-of-file-fixer
+ name: fix end of files
+ description: ensures that a file is either empty, or ends with one newline.
+ types: [text]
+ stages: [pre-commit, pre-push, manual]
+
+ - id: mixed-line-ending
+ name: mixed line ending
+ description: replaces or checks mixed line ending.
+ args: [--fix=no]
+ types: [text]
+
+ - id: name-tests-test
+ name: python tests naming
+ description: this verifies that test files are named correctly.
+ files: (^|/)tests/.+\.py$
+
+ - id: requirements-txt-fixer
+ name: fix requirements.txt
+ description: sorts entries in requirements.txt.
+ files: requirements.*\.txt$
+
+ - id: trailing-whitespace
+ name: trim trailing whitespace
+ description: trims trailing whitespace.
+ types: [python, json, yaml, toml]
+ stages: [pre-commit, pre-push, manual]
+
+# vim: set filetype=yaml :
pyproject.toml
@@ -0,0 +1,84 @@
+[project]
+dependencies = [
+ "aioboto3>=13.2.0",
+ "apscheduler>=3.11.0,<4.0.0",
+ "beautifulsoup4>=4.12.3",
+ "cacheout>=0.16.0",
+ "feedparser>=6.0.11",
+ "httpx[http2,socks]>=0.28.1",
+ "loguru>=0.7.2",
+ "pillow-heif>=0.18.0",
+ "pillow>=10.4.0",
+ "puremagic>=1.28",
+ "pyrotgfork>=2.2.4",
+ "pysocks>=1.7.1",
+ "pytgcrypto>=1.2.9.2",
+ "python-ffmpeg>=2.0.12",
+ "uvloop>=0.21.0",
+ "youtube-transcript-api>=0.6.3",
+ "yt-dlp>=2025.1.12rc",
+]
+name = "bennybot"
+requires-python = ">=3.11"
+version = "0.1.0"
+
+[dependency-groups]
+dev = ["ipdb>=0.13.13"]
+
+[tool.uv]
+environments = ["sys_platform == 'darwin'", "sys_platform == 'linux'"]
+
+[tool.ruff]
+cache-dir = "~/.cache/ruff"
+force-exclude = true
+indent-width = 4
+line-length = 200
+output-format = "grouped"
+respect-gitignore = true
+show-fixes = true
+target-version = "py312"
+
+[tool.ruff.format]
+indent-style = "space"
+line-ending = "lf"
+quote-style = "double"
+
+[tool.ruff.lint]
+ignore = [
+ "ANN",
+ "PTH",
+ "D417",
+ "BLE001",
+ "T20",
+ "ERA",
+ "C90",
+ "D1",
+ "INP001",
+ "E203",
+ "E266",
+ "E501",
+ "E731",
+ "E741",
+ "N806",
+ "EXE001",
+ "PD901",
+ "PGH003",
+ "S101",
+ "S301",
+ "UP009",
+ "COM812",
+ "ISC001",
+ "PLR2004",
+ "S311",
+]
+select = ["ALL"]
+
+[tool.ruff.lint.pydocstyle]
+convention = "google"
+
+[tool.ruff.lint.pylint]
+max-args = 999
+max-branches = 999
+max-locals = 999
+max-returns = 999
+max-statements = 999
uv.lock
@@ -0,0 +1,1186 @@
+version = 1
+requires-python = ">=3.11"
+resolution-markers = [
+ "sys_platform == 'darwin'",
+ "sys_platform == 'linux'",
+]
+supported-markers = [
+ "sys_platform == 'darwin'",
+ "sys_platform == 'linux'",
+]
+
+[[package]]
+name = "aioboto3"
+version = "13.3.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "aiobotocore", extra = ["boto3"], marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "aiofiles", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/e2/29/4684abe9c9f60620576292fe2fcc26da618e20185f0ec3c2cb8d941e5aa6/aioboto3-13.3.0.tar.gz", hash = "sha256:74c2ee3018dcf5714b92bbbe4ce6b78b6dde1e1804de42c784555e40634f8872", size = 32511 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f2/f8/66d12f3d0b7f6df3e3a4797c223b3d750ef88af2d6002f56bf2d2a7810d1/aioboto3-13.3.0-py3-none-any.whl", hash = "sha256:a97d58fa84dc91030be7820724daea59a1603987b535a1d15613eff78c3b3781", size = 34755 },
+]
+
+[[package]]
+name = "aiobotocore"
+version = "2.16.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "aiohttp", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "aioitertools", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "botocore", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "wrapt", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/06/dc/5a44e1cd5e206b11abf67754d47dabcde4f927bb281b93dabdbf77eba3fd/aiobotocore-2.16.0.tar.gz", hash = "sha256:6d6721961a81570e9b920b98778d95eec3d52a9f83b7844c6c5cfdbf2a2d6a11", size = 107433 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c5/63/c03db9dafb0b3b8a90a1714a1949bc1e7db1d0e2c4062400901da35678fe/aiobotocore-2.16.0-py3-none-any.whl", hash = "sha256:eb3641a7b9c51113adbc33a029441de6201ebb026c64ff2e149c7fa802c9abfc", size = 77781 },
+]
+
+[package.optional-dependencies]
+boto3 = [
+ { name = "boto3", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+]
+
+[[package]]
+name = "aiofiles"
+version = "24.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/0b/03/a88171e277e8caa88a4c77808c20ebb04ba74cc4681bf1e9416c862de237/aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c", size = 30247 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a5/45/30bb92d442636f570cb5651bc661f52b610e2eec3f891a5dc3a4c3667db0/aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5", size = 15896 },
+]
+
+[[package]]
+name = "aiohappyeyeballs"
+version = "2.4.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/7f/55/e4373e888fdacb15563ef6fa9fa8c8252476ea071e96fb46defac9f18bf2/aiohappyeyeballs-2.4.4.tar.gz", hash = "sha256:5fdd7d87889c63183afc18ce9271f9b0a7d32c2303e394468dd45d514a757745", size = 21977 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b9/74/fbb6559de3607b3300b9be3cc64e97548d55678e44623db17820dbd20002/aiohappyeyeballs-2.4.4-py3-none-any.whl", hash = "sha256:a980909d50efcd44795c4afeca523296716d50cd756ddca6af8c65b996e27de8", size = 14756 },
+]
+
+[[package]]
+name = "aiohttp"
+version = "3.11.11"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "aiohappyeyeballs", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "aiosignal", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "attrs", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "frozenlist", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "multidict", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "propcache", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "yarl", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/fe/ed/f26db39d29cd3cb2f5a3374304c713fe5ab5a0e4c8ee25a0c45cc6adf844/aiohttp-3.11.11.tar.gz", hash = "sha256:bb49c7f1e6ebf3821a42d81d494f538107610c3a705987f53068546b0e90303e", size = 7669618 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/34/ae/e8806a9f054e15f1d18b04db75c23ec38ec954a10c0a68d3bd275d7e8be3/aiohttp-3.11.11-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ba74ec819177af1ef7f59063c6d35a214a8fde6f987f7661f4f0eecc468a8f76", size = 708624 },
+ { url = "https://files.pythonhosted.org/packages/c7/e0/313ef1a333fb4d58d0c55a6acb3cd772f5d7756604b455181049e222c020/aiohttp-3.11.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4af57160800b7a815f3fe0eba9b46bf28aafc195555f1824555fa2cfab6c1538", size = 468507 },
+ { url = "https://files.pythonhosted.org/packages/a9/60/03455476bf1f467e5b4a32a465c450548b2ce724eec39d69f737191f936a/aiohttp-3.11.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ffa336210cf9cd8ed117011085817d00abe4c08f99968deef0013ea283547204", size = 455571 },
+ { url = "https://files.pythonhosted.org/packages/be/f9/469588603bd75bf02c8ffb8c8a0d4b217eed446b49d4a767684685aa33fd/aiohttp-3.11.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81b8fe282183e4a3c7a1b72f5ade1094ed1c6345a8f153506d114af5bf8accd9", size = 1685694 },
+ { url = "https://files.pythonhosted.org/packages/88/b9/1b7fa43faf6c8616fa94c568dc1309ffee2b6b68b04ac268e5d64b738688/aiohttp-3.11.11-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3af41686ccec6a0f2bdc66686dc0f403c41ac2089f80e2214a0f82d001052c03", size = 1743660 },
+ { url = "https://files.pythonhosted.org/packages/2a/8b/0248d19dbb16b67222e75f6aecedd014656225733157e5afaf6a6a07e2e8/aiohttp-3.11.11-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70d1f9dde0e5dd9e292a6d4d00058737052b01f3532f69c0c65818dac26dc287", size = 1785421 },
+ { url = "https://files.pythonhosted.org/packages/c4/11/f478e071815a46ca0a5ae974651ff0c7a35898c55063305a896e58aa1247/aiohttp-3.11.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:249cc6912405917344192b9f9ea5cd5b139d49e0d2f5c7f70bdfaf6b4dbf3a2e", size = 1675145 },
+ { url = "https://files.pythonhosted.org/packages/26/5d/284d182fecbb5075ae10153ff7374f57314c93a8681666600e3a9e09c505/aiohttp-3.11.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0eb98d90b6690827dcc84c246811feeb4e1eea683c0eac6caed7549be9c84665", size = 1619804 },
+ { url = "https://files.pythonhosted.org/packages/1b/78/980064c2ad685c64ce0e8aeeb7ef1e53f43c5b005edcd7d32e60809c4992/aiohttp-3.11.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ec82bf1fda6cecce7f7b915f9196601a1bd1a3079796b76d16ae4cce6d0ef89b", size = 1654007 },
+ { url = "https://files.pythonhosted.org/packages/21/8d/9e658d63b1438ad42b96f94da227f2e2c1d5c6001c9e8ffcc0bfb22e9105/aiohttp-3.11.11-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9fd46ce0845cfe28f108888b3ab17abff84ff695e01e73657eec3f96d72eef34", size = 1650022 },
+ { url = "https://files.pythonhosted.org/packages/85/fd/a032bf7f2755c2df4f87f9effa34ccc1ef5cea465377dbaeef93bb56bbd6/aiohttp-3.11.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:bd176afcf8f5d2aed50c3647d4925d0db0579d96f75a31e77cbaf67d8a87742d", size = 1732899 },
+ { url = "https://files.pythonhosted.org/packages/c5/0c/c2b85fde167dd440c7ba50af2aac20b5a5666392b174df54c00f888c5a75/aiohttp-3.11.11-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:ec2aa89305006fba9ffb98970db6c8221541be7bee4c1d027421d6f6df7d1ce2", size = 1755142 },
+ { url = "https://files.pythonhosted.org/packages/bc/78/91ae1a3b3b3bed8b893c5d69c07023e151b1c95d79544ad04cf68f596c2f/aiohttp-3.11.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:92cde43018a2e17d48bb09c79e4d4cb0e236de5063ce897a5e40ac7cb4878773", size = 1692736 },
+ { url = "https://files.pythonhosted.org/packages/69/cf/4bda538c502f9738d6b95ada11603c05ec260807246e15e869fc3ec5de97/aiohttp-3.11.11-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e595c591a48bbc295ebf47cb91aebf9bd32f3ff76749ecf282ea7f9f6bb73886", size = 704666 },
+ { url = "https://files.pythonhosted.org/packages/46/7b/87fcef2cad2fad420ca77bef981e815df6904047d0a1bd6aeded1b0d1d66/aiohttp-3.11.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3ea1b59dc06396b0b424740a10a0a63974c725b1c64736ff788a3689d36c02d2", size = 464057 },
+ { url = "https://files.pythonhosted.org/packages/5a/a6/789e1f17a1b6f4a38939fbc39d29e1d960d5f89f73d0629a939410171bc0/aiohttp-3.11.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8811f3f098a78ffa16e0ea36dffd577eb031aea797cbdba81be039a4169e242c", size = 455996 },
+ { url = "https://files.pythonhosted.org/packages/b7/dd/485061fbfef33165ce7320db36e530cd7116ee1098e9c3774d15a732b3fd/aiohttp-3.11.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7227b87a355ce1f4bf83bfae4399b1f5bb42e0259cb9405824bd03d2f4336a", size = 1682367 },
+ { url = "https://files.pythonhosted.org/packages/e9/d7/9ec5b3ea9ae215c311d88b2093e8da17e67b8856673e4166c994e117ee3e/aiohttp-3.11.11-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d40f9da8cabbf295d3a9dae1295c69975b86d941bc20f0a087f0477fa0a66231", size = 1736989 },
+ { url = "https://files.pythonhosted.org/packages/d6/fb/ea94927f7bfe1d86178c9d3e0a8c54f651a0a655214cce930b3c679b8f64/aiohttp-3.11.11-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ffb3dc385f6bb1568aa974fe65da84723210e5d9707e360e9ecb51f59406cd2e", size = 1793265 },
+ { url = "https://files.pythonhosted.org/packages/40/7f/6de218084f9b653026bd7063cd8045123a7ba90c25176465f266976d8c82/aiohttp-3.11.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8f5f7515f3552d899c61202d99dcb17d6e3b0de777900405611cd747cecd1b8", size = 1691841 },
+ { url = "https://files.pythonhosted.org/packages/77/e2/992f43d87831cbddb6b09c57ab55499332f60ad6fdbf438ff4419c2925fc/aiohttp-3.11.11-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3499c7ffbfd9c6a3d8d6a2b01c26639da7e43d47c7b4f788016226b1e711caa8", size = 1619317 },
+ { url = "https://files.pythonhosted.org/packages/96/74/879b23cdd816db4133325a201287c95bef4ce669acde37f8f1b8669e1755/aiohttp-3.11.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8e2bf8029dbf0810c7bfbc3e594b51c4cc9101fbffb583a3923aea184724203c", size = 1641416 },
+ { url = "https://files.pythonhosted.org/packages/30/98/b123f6b15d87c54e58fd7ae3558ff594f898d7f30a90899718f3215ad328/aiohttp-3.11.11-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b6212a60e5c482ef90f2d788835387070a88d52cf6241d3916733c9176d39eab", size = 1646514 },
+ { url = "https://files.pythonhosted.org/packages/d7/38/257fda3dc99d6978ab943141d5165ec74fd4b4164baa15e9c66fa21da86b/aiohttp-3.11.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:d119fafe7b634dbfa25a8c597718e69a930e4847f0b88e172744be24515140da", size = 1702095 },
+ { url = "https://files.pythonhosted.org/packages/0c/f4/ddab089053f9fb96654df5505c0a69bde093214b3c3454f6bfdb1845f558/aiohttp-3.11.11-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:6fba278063559acc730abf49845d0e9a9e1ba74f85f0ee6efd5803f08b285853", size = 1734611 },
+ { url = "https://files.pythonhosted.org/packages/c3/d6/f30b2bc520c38c8aa4657ed953186e535ae84abe55c08d0f70acd72ff577/aiohttp-3.11.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:92fc484e34b733704ad77210c7957679c5c3877bd1e6b6d74b185e9320cc716e", size = 1694576 },
+ { url = "https://files.pythonhosted.org/packages/49/d1/d8af164f400bad432b63e1ac857d74a09311a8334b0481f2f64b158b50eb/aiohttp-3.11.11-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:541d823548ab69d13d23730a06f97460f4238ad2e5ed966aaf850d7c369782d9", size = 697982 },
+ { url = "https://files.pythonhosted.org/packages/92/d1/faad3bf9fa4bfd26b95c69fc2e98937d52b1ff44f7e28131855a98d23a17/aiohttp-3.11.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:929f3ed33743a49ab127c58c3e0a827de0664bfcda566108989a14068f820194", size = 460662 },
+ { url = "https://files.pythonhosted.org/packages/db/61/0d71cc66d63909dabc4590f74eba71f91873a77ea52424401c2498d47536/aiohttp-3.11.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0882c2820fd0132240edbb4a51eb8ceb6eef8181db9ad5291ab3332e0d71df5f", size = 452950 },
+ { url = "https://files.pythonhosted.org/packages/07/db/6d04bc7fd92784900704e16b745484ef45b77bd04e25f58f6febaadf7983/aiohttp-3.11.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b63de12e44935d5aca7ed7ed98a255a11e5cb47f83a9fded7a5e41c40277d104", size = 1665178 },
+ { url = "https://files.pythonhosted.org/packages/54/5c/e95ade9ae29f375411884d9fd98e50535bf9fe316c9feb0f30cd2ac8f508/aiohttp-3.11.11-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa54f8ef31d23c506910c21163f22b124facb573bff73930735cf9fe38bf7dff", size = 1717939 },
+ { url = "https://files.pythonhosted.org/packages/6f/1c/1e7d5c5daea9e409ed70f7986001b8c9e3a49a50b28404498d30860edab6/aiohttp-3.11.11-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a344d5dc18074e3872777b62f5f7d584ae4344cd6006c17ba12103759d407af3", size = 1775125 },
+ { url = "https://files.pythonhosted.org/packages/5d/66/890987e44f7d2f33a130e37e01a164168e6aff06fce15217b6eaf14df4f6/aiohttp-3.11.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b7fb429ab1aafa1f48578eb315ca45bd46e9c37de11fe45c7f5f4138091e2f1", size = 1677176 },
+ { url = "https://files.pythonhosted.org/packages/8f/dc/e2ba57d7a52df6cdf1072fd5fa9c6301a68e1cd67415f189805d3eeb031d/aiohttp-3.11.11-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c341c7d868750e31961d6d8e60ff040fb9d3d3a46d77fd85e1ab8e76c3e9a5c4", size = 1603192 },
+ { url = "https://files.pythonhosted.org/packages/6c/9e/8d08a57de79ca3a358da449405555e668f2c8871a7777ecd2f0e3912c272/aiohttp-3.11.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ed9ee95614a71e87f1a70bc81603f6c6760128b140bc4030abe6abaa988f1c3d", size = 1618296 },
+ { url = "https://files.pythonhosted.org/packages/56/51/89822e3ec72db352c32e7fc1c690370e24e231837d9abd056490f3a49886/aiohttp-3.11.11-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:de8d38f1c2810fa2a4f1d995a2e9c70bb8737b18da04ac2afbf3971f65781d87", size = 1616524 },
+ { url = "https://files.pythonhosted.org/packages/2c/fa/e2e6d9398f462ffaa095e84717c1732916a57f1814502929ed67dd7568ef/aiohttp-3.11.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:a9b7371665d4f00deb8f32208c7c5e652059b0fda41cf6dbcac6114a041f1cc2", size = 1685471 },
+ { url = "https://files.pythonhosted.org/packages/ae/5f/6bb976e619ca28a052e2c0ca7b0251ccd893f93d7c24a96abea38e332bf6/aiohttp-3.11.11-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:620598717fce1b3bd14dd09947ea53e1ad510317c85dda2c9c65b622edc96b12", size = 1715312 },
+ { url = "https://files.pythonhosted.org/packages/79/c1/756a7e65aa087c7fac724d6c4c038f2faaa2a42fe56dbc1dd62a33ca7213/aiohttp-3.11.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:bf8d9bfee991d8acc72d060d53860f356e07a50f0e0d09a8dfedea1c554dd0d5", size = 1672783 },
+]
+
+[[package]]
+name = "aioitertools"
+version = "0.12.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/06/de/38491a84ab323b47c7f86e94d2830e748780525f7a10c8600b67ead7e9ea/aioitertools-0.12.0.tar.gz", hash = "sha256:c2a9055b4fbb7705f561b9d86053e8af5d10cc845d22c32008c43490b2d8dd6b", size = 19369 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/85/13/58b70a580de00893223d61de8fea167877a3aed97d4a5e1405c9159ef925/aioitertools-0.12.0-py3-none-any.whl", hash = "sha256:fc1f5fac3d737354de8831cbba3eb04f79dd649d8f3afb4c5b114925e662a796", size = 24345 },
+]
+
+[[package]]
+name = "aiosignal"
+version = "1.3.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "frozenlist", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ba/b5/6d55e80f6d8a08ce22b982eafa278d823b541c925f11ee774b0b9c43473d/aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54", size = 19424 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ec/6a/bc7e17a3e87a2985d3e8f4da4cd0f481060eb78fb08596c42be62c90a4d9/aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5", size = 7597 },
+]
+
+[[package]]
+name = "anyio"
+version = "4.8.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "idna", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "sniffio", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "typing-extensions", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux')" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a3/73/199a98fc2dae33535d6b8e8e6ec01f8c1d76c9adb096c6b7d64823038cde/anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a", size = 181126 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/46/eb/e7f063ad1fec6b3178a3cd82d1a3c4de82cccf283fc42746168188e1cdd5/anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a", size = 96041 },
+]
+
+[[package]]
+name = "apscheduler"
+version = "3.11.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "tzlocal", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/4e/00/6d6814ddc19be2df62c8c898c4df6b5b1914f3bd024b780028caa392d186/apscheduler-3.11.0.tar.gz", hash = "sha256:4c622d250b0955a65d5d0eb91c33e6d43fd879834bf541e0a18661ae60460133", size = 107347 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d0/ae/9a053dd9229c0fde6b1f1f33f609ccff1ee79ddda364c756a924c6d8563b/APScheduler-3.11.0-py3-none-any.whl", hash = "sha256:fc134ca32e50f5eadcc4938e3a4545ab19131435e851abb40b34d63d5141c6da", size = 64004 },
+]
+
+[[package]]
+name = "asttokens"
+version = "3.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/4a/e7/82da0a03e7ba5141f05cce0d302e6eed121ae055e0456ca228bf693984bc/asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7", size = 61978 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/25/8a/c46dcc25341b5bce5472c718902eb3d38600a903b14fa6aeecef3f21a46f/asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2", size = 26918 },
+]
+
+[[package]]
+name = "attrs"
+version = "24.3.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/48/c8/6260f8ccc11f0917360fc0da435c5c9c7504e3db174d5a12a1494887b045/attrs-24.3.0.tar.gz", hash = "sha256:8f5c07333d543103541ba7be0e2ce16eeee8130cb0b3f9238ab904ce1e85baff", size = 805984 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/89/aa/ab0f7891a01eeb2d2e338ae8fecbe57fcebea1a24dbb64d45801bfab481d/attrs-24.3.0-py3-none-any.whl", hash = "sha256:ac96cd038792094f438ad1f6ff80837353805ac950cd2aa0e0625ef19850c308", size = 63397 },
+]
+
+[[package]]
+name = "beautifulsoup4"
+version = "4.12.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "soupsieve", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/b3/ca/824b1195773ce6166d388573fc106ce56d4a805bd7427b624e063596ec58/beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051", size = 581181 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b1/fe/e8c672695b37eecc5cbf43e1d0638d88d66ba3a44c4d321c796f4e59167f/beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed", size = 147925 },
+]
+
+[[package]]
+name = "bennybot"
+version = "0.1.0"
+source = { virtual = "." }
+dependencies = [
+ { name = "aioboto3", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "apscheduler", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "beautifulsoup4", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "cacheout", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "feedparser", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "httpx", extra = ["http2", "socks"], marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "loguru", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "pillow", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "pillow-heif", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "puremagic", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "pyrotgfork", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "pysocks", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "pytgcrypto", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "python-ffmpeg", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "uvloop", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "youtube-transcript-api", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "yt-dlp", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+]
+
+[package.dev-dependencies]
+dev = [
+ { name = "ipdb", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+]
+
+[package.metadata]
+requires-dist = [
+ { name = "aioboto3", specifier = ">=13.2.0" },
+ { name = "apscheduler", specifier = ">=3.11.0,<4.0.0" },
+ { name = "beautifulsoup4", specifier = ">=4.12.3" },
+ { name = "cacheout", specifier = ">=0.16.0" },
+ { name = "feedparser", specifier = ">=6.0.11" },
+ { name = "httpx", extras = ["http2", "socks"], specifier = ">=0.28.1" },
+ { name = "loguru", specifier = ">=0.7.2" },
+ { name = "pillow", specifier = ">=10.4.0" },
+ { name = "pillow-heif", specifier = ">=0.18.0" },
+ { name = "puremagic", specifier = ">=1.28" },
+ { name = "pyrotgfork", specifier = ">=2.2.4" },
+ { name = "pysocks", specifier = ">=1.7.1" },
+ { name = "pytgcrypto", specifier = ">=1.2.9.2" },
+ { name = "python-ffmpeg", specifier = ">=2.0.12" },
+ { name = "uvloop", specifier = ">=0.21.0" },
+ { name = "youtube-transcript-api", specifier = ">=0.6.3" },
+ { name = "yt-dlp", specifier = ">=2025.1.12rc0" },
+]
+
+[package.metadata.requires-dev]
+dev = [{ name = "ipdb", specifier = ">=0.13.13" }]
+
+[[package]]
+name = "boto3"
+version = "1.35.81"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "botocore", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "jmespath", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "s3transfer", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/d9/a5/8e610a7c230326b6a766758ce290233a8d0ec88bef4f5afe09e2313d2def/boto3-1.35.81.tar.gz", hash = "sha256:d2e95fa06f095b8e0c545dd678c6269d253809b2997c30f5ce8a956c410b4e86", size = 111013 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b4/db/e6bf2a34d7e8440800fcd11f2b42efd4ba18cce56d5a213bb93bd62aaa0e/boto3-1.35.81-py3-none-any.whl", hash = "sha256:742941b2424c0223d2d94a08c3485462fa7c58d816b62ca80f08e555243acee1", size = 139178 },
+]
+
+[[package]]
+name = "botocore"
+version = "1.35.81"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "jmespath", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "python-dateutil", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "urllib3", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/3d/a8/b44d94c14ee4eb13db6dc549269c79199b43bddd70982e192aefd6ca6279/botocore-1.35.81.tar.gz", hash = "sha256:564c2478e50179e0b766e6a87e5e0cdd35e1bc37eb375c1cf15511f5dd13600d", size = 13460205 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/1a/ad/00dfec368dd4e957063ed1126b5511238b0900c1014dfe539af93fc0ac29/botocore-1.35.81-py3-none-any.whl", hash = "sha256:a7b13bbd959bf2d6f38f681676aab408be01974c46802ab997617b51399239f7", size = 13265330 },
+]
+
+[[package]]
+name = "cacheout"
+version = "0.16.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d1/60/ed4c4b27b2131a0b2cc461789be2cf06866644ca462cb34a5d8fca114c15/cacheout-0.16.0.tar.gz", hash = "sha256:ee264897cbaa089ae5f406da11952697d99fa7f3583cfab69fe8a00ff8e1952d", size = 42050 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/72/14/a89bb55107b8a9b586c8878f47d0b7750c3688c209f05f915e70de74880d/cacheout-0.16.0-py3-none-any.whl", hash = "sha256:1a52d9aa8b1e9720d8453b061348f15795578231f9ec4ad376fec49e717d0ed8", size = 21837 },
+]
+
+[[package]]
+name = "certifi"
+version = "2024.12.14"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/0f/bd/1d41ee578ce09523c81a15426705dd20969f5abf006d1afe8aeff0dd776a/certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db", size = 166010 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a5/32/8f6669fc4798494966bf446c8c4a162e0b5d893dff088afddf76414f70e1/certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56", size = 164927 },
+]
+
+[[package]]
+name = "charset-normalizer"
+version = "3.4.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/16/b0/572805e227f01586461c80e0fd25d65a2115599cc9dad142fee4b747c357/charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3", size = 123188 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/72/80/41ef5d5a7935d2d3a773e3eaebf0a9350542f2cab4eac59a7a4741fbbbbe/charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125", size = 194995 },
+ { url = "https://files.pythonhosted.org/packages/7a/28/0b9fefa7b8b080ec492110af6d88aa3dea91c464b17d53474b6e9ba5d2c5/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1", size = 139471 },
+ { url = "https://files.pythonhosted.org/packages/71/64/d24ab1a997efb06402e3fc07317e94da358e2585165930d9d59ad45fcae2/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3", size = 149831 },
+ { url = "https://files.pythonhosted.org/packages/37/ed/be39e5258e198655240db5e19e0b11379163ad7070962d6b0c87ed2c4d39/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd", size = 142335 },
+ { url = "https://files.pythonhosted.org/packages/88/83/489e9504711fa05d8dde1574996408026bdbdbd938f23be67deebb5eca92/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00", size = 143862 },
+ { url = "https://files.pythonhosted.org/packages/c6/c7/32da20821cf387b759ad24627a9aca289d2822de929b8a41b6241767b461/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12", size = 145673 },
+ { url = "https://files.pythonhosted.org/packages/68/85/f4288e96039abdd5aeb5c546fa20a37b50da71b5cf01e75e87f16cd43304/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77", size = 140211 },
+ { url = "https://files.pythonhosted.org/packages/28/a3/a42e70d03cbdabc18997baf4f0227c73591a08041c149e710045c281f97b/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146", size = 148039 },
+ { url = "https://files.pythonhosted.org/packages/85/e4/65699e8ab3014ecbe6f5c71d1a55d810fb716bbfd74f6283d5c2aa87febf/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd", size = 151939 },
+ { url = "https://files.pythonhosted.org/packages/b1/82/8e9fe624cc5374193de6860aba3ea8070f584c8565ee77c168ec13274bd2/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6", size = 149075 },
+ { url = "https://files.pythonhosted.org/packages/3d/7b/82865ba54c765560c8433f65e8acb9217cb839a9e32b42af4aa8e945870f/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8", size = 144340 },
+ { url = "https://files.pythonhosted.org/packages/0a/9a/dd1e1cdceb841925b7798369a09279bd1cf183cef0f9ddf15a3a6502ee45/charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545", size = 196105 },
+ { url = "https://files.pythonhosted.org/packages/d3/8c/90bfabf8c4809ecb648f39794cf2a84ff2e7d2a6cf159fe68d9a26160467/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7", size = 140404 },
+ { url = "https://files.pythonhosted.org/packages/ad/8f/e410d57c721945ea3b4f1a04b74f70ce8fa800d393d72899f0a40526401f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757", size = 150423 },
+ { url = "https://files.pythonhosted.org/packages/f0/b8/e6825e25deb691ff98cf5c9072ee0605dc2acfca98af70c2d1b1bc75190d/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa", size = 143184 },
+ { url = "https://files.pythonhosted.org/packages/3e/a2/513f6cbe752421f16d969e32f3583762bfd583848b763913ddab8d9bfd4f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d", size = 145268 },
+ { url = "https://files.pythonhosted.org/packages/74/94/8a5277664f27c3c438546f3eb53b33f5b19568eb7424736bdc440a88a31f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616", size = 147601 },
+ { url = "https://files.pythonhosted.org/packages/7c/5f/6d352c51ee763623a98e31194823518e09bfa48be2a7e8383cf691bbb3d0/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b", size = 141098 },
+ { url = "https://files.pythonhosted.org/packages/78/d4/f5704cb629ba5ab16d1d3d741396aec6dc3ca2b67757c45b0599bb010478/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d", size = 149520 },
+ { url = "https://files.pythonhosted.org/packages/c5/96/64120b1d02b81785f222b976c0fb79a35875457fa9bb40827678e54d1bc8/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a", size = 152852 },
+ { url = "https://files.pythonhosted.org/packages/84/c9/98e3732278a99f47d487fd3468bc60b882920cef29d1fa6ca460a1fdf4e6/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9", size = 150488 },
+ { url = "https://files.pythonhosted.org/packages/13/0e/9c8d4cb99c98c1007cc11eda969ebfe837bbbd0acdb4736d228ccaabcd22/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1", size = 146192 },
+ { url = "https://files.pythonhosted.org/packages/38/94/ce8e6f63d18049672c76d07d119304e1e2d7c6098f0841b51c666e9f44a0/charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda", size = 195698 },
+ { url = "https://files.pythonhosted.org/packages/24/2e/dfdd9770664aae179a96561cc6952ff08f9a8cd09a908f259a9dfa063568/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313", size = 140162 },
+ { url = "https://files.pythonhosted.org/packages/24/4e/f646b9093cff8fc86f2d60af2de4dc17c759de9d554f130b140ea4738ca6/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9", size = 150263 },
+ { url = "https://files.pythonhosted.org/packages/5e/67/2937f8d548c3ef6e2f9aab0f6e21001056f692d43282b165e7c56023e6dd/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b", size = 142966 },
+ { url = "https://files.pythonhosted.org/packages/52/ed/b7f4f07de100bdb95c1756d3a4d17b90c1a3c53715c1a476f8738058e0fa/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11", size = 144992 },
+ { url = "https://files.pythonhosted.org/packages/96/2c/d49710a6dbcd3776265f4c923bb73ebe83933dfbaa841c5da850fe0fd20b/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f", size = 147162 },
+ { url = "https://files.pythonhosted.org/packages/b4/41/35ff1f9a6bd380303dea55e44c4933b4cc3c4850988927d4082ada230273/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd", size = 140972 },
+ { url = "https://files.pythonhosted.org/packages/fb/43/c6a0b685fe6910d08ba971f62cd9c3e862a85770395ba5d9cad4fede33ab/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2", size = 149095 },
+ { url = "https://files.pythonhosted.org/packages/4c/ff/a9a504662452e2d2878512115638966e75633519ec11f25fca3d2049a94a/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886", size = 152668 },
+ { url = "https://files.pythonhosted.org/packages/6c/71/189996b6d9a4b932564701628af5cee6716733e9165af1d5e1b285c530ed/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601", size = 150073 },
+ { url = "https://files.pythonhosted.org/packages/e4/93/946a86ce20790e11312c87c75ba68d5f6ad2208cfb52b2d6a2c32840d922/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd", size = 145732 },
+ { url = "https://files.pythonhosted.org/packages/0e/f6/65ecc6878a89bb1c23a086ea335ad4bf21a588990c3f535a227b9eea9108/charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85", size = 49767 },
+]
+
+[[package]]
+name = "decorator"
+version = "5.1.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/66/0c/8d907af351aa16b42caae42f9d6aa37b900c67308052d10fdce809f8d952/decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330", size = 35016 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d5/50/83c593b07763e1161326b3b8c6686f0f4b0f24d5526546bee538c89837d6/decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186", size = 9073 },
+]
+
+[[package]]
+name = "defusedxml"
+version = "0.7.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/0f/d5/c66da9b79e5bdb124974bfe172b4daf3c984ebd9c2a06e2b8a4dc7331c72/defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69", size = 75520 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/07/6c/aa3f2f849e01cb6a001cd8554a88d4c77c5c1a31c95bdf1cf9301e6d9ef4/defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61", size = 25604 },
+]
+
+[[package]]
+name = "executing"
+version = "2.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/8c/e3/7d45f492c2c4a0e8e0fad57d081a7c8a0286cdd86372b070cca1ec0caa1e/executing-2.1.0.tar.gz", hash = "sha256:8ea27ddd260da8150fa5a708269c4a10e76161e2496ec3e587da9e3c0fe4b9ab", size = 977485 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b5/fd/afcd0496feca3276f509df3dbd5dae726fcc756f1a08d9e25abe1733f962/executing-2.1.0-py2.py3-none-any.whl", hash = "sha256:8d63781349375b5ebccc3142f4b30350c0cd9c79f921cde38be2be4637e98eaf", size = 25805 },
+]
+
+[[package]]
+name = "feedparser"
+version = "6.0.11"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "sgmllib3k", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ff/aa/7af346ebeb42a76bf108027fe7f3328bb4e57a3a96e53e21fd9ef9dd6dd0/feedparser-6.0.11.tar.gz", hash = "sha256:c9d0407b64c6f2a065d0ebb292c2b35c01050cc0dc33757461aaabdc4c4184d5", size = 286197 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7c/d4/8c31aad9cc18f451c49f7f9cfb5799dadffc88177f7917bc90a66459b1d7/feedparser-6.0.11-py3-none-any.whl", hash = "sha256:0be7ee7b395572b19ebeb1d6aafb0028dee11169f1c934e0ed67d54992f4ad45", size = 81343 },
+]
+
+[[package]]
+name = "frozenlist"
+version = "1.5.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/8f/ed/0f4cec13a93c02c47ec32d81d11c0c1efbadf4a471e3f3ce7cad366cbbd3/frozenlist-1.5.0.tar.gz", hash = "sha256:81d5af29e61b9c8348e876d442253723928dce6433e0e76cd925cd83f1b4b817", size = 39930 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/79/43/0bed28bf5eb1c9e4301003b74453b8e7aa85fb293b31dde352aac528dafc/frozenlist-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fd74520371c3c4175142d02a976aee0b4cb4a7cc912a60586ffd8d5929979b30", size = 94987 },
+ { url = "https://files.pythonhosted.org/packages/bb/bf/b74e38f09a246e8abbe1e90eb65787ed745ccab6eaa58b9c9308e052323d/frozenlist-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2f3f7a0fbc219fb4455264cae4d9f01ad41ae6ee8524500f381de64ffaa077d5", size = 54584 },
+ { url = "https://files.pythonhosted.org/packages/2c/31/ab01375682f14f7613a1ade30149f684c84f9b8823a4391ed950c8285656/frozenlist-1.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f47c9c9028f55a04ac254346e92977bf0f166c483c74b4232bee19a6697e4778", size = 52499 },
+ { url = "https://files.pythonhosted.org/packages/98/a8/d0ac0b9276e1404f58fec3ab6e90a4f76b778a49373ccaf6a563f100dfbc/frozenlist-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0996c66760924da6e88922756d99b47512a71cfd45215f3570bf1e0b694c206a", size = 276357 },
+ { url = "https://files.pythonhosted.org/packages/ad/c9/c7761084fa822f07dac38ac29f841d4587570dd211e2262544aa0b791d21/frozenlist-1.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2fe128eb4edeabe11896cb6af88fca5346059f6c8d807e3b910069f39157869", size = 287516 },
+ { url = "https://files.pythonhosted.org/packages/a1/ff/cd7479e703c39df7bdab431798cef89dc75010d8aa0ca2514c5b9321db27/frozenlist-1.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a8ea951bbb6cacd492e3948b8da8c502a3f814f5d20935aae74b5df2b19cf3d", size = 283131 },
+ { url = "https://files.pythonhosted.org/packages/59/a0/370941beb47d237eca4fbf27e4e91389fd68699e6f4b0ebcc95da463835b/frozenlist-1.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de537c11e4aa01d37db0d403b57bd6f0546e71a82347a97c6a9f0dcc532b3a45", size = 261320 },
+ { url = "https://files.pythonhosted.org/packages/b8/5f/c10123e8d64867bc9b4f2f510a32042a306ff5fcd7e2e09e5ae5100ee333/frozenlist-1.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c2623347b933fcb9095841f1cc5d4ff0b278addd743e0e966cb3d460278840d", size = 274877 },
+ { url = "https://files.pythonhosted.org/packages/fa/79/38c505601ae29d4348f21706c5d89755ceded02a745016ba2f58bd5f1ea6/frozenlist-1.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cee6798eaf8b1416ef6909b06f7dc04b60755206bddc599f52232606e18179d3", size = 269592 },
+ { url = "https://files.pythonhosted.org/packages/19/e2/39f3a53191b8204ba9f0bb574b926b73dd2efba2a2b9d2d730517e8f7622/frozenlist-1.5.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f5f9da7f5dbc00a604fe74aa02ae7c98bcede8a3b8b9666f9f86fc13993bc71a", size = 265934 },
+ { url = "https://files.pythonhosted.org/packages/d5/c9/3075eb7f7f3a91f1a6b00284af4de0a65a9ae47084930916f5528144c9dd/frozenlist-1.5.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:90646abbc7a5d5c7c19461d2e3eeb76eb0b204919e6ece342feb6032c9325ae9", size = 283859 },
+ { url = "https://files.pythonhosted.org/packages/05/f5/549f44d314c29408b962fa2b0e69a1a67c59379fb143b92a0a065ffd1f0f/frozenlist-1.5.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:bdac3c7d9b705d253b2ce370fde941836a5f8b3c5c2b8fd70940a3ea3af7f4f2", size = 287560 },
+ { url = "https://files.pythonhosted.org/packages/9d/f8/cb09b3c24a3eac02c4c07a9558e11e9e244fb02bf62c85ac2106d1eb0c0b/frozenlist-1.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03d33c2ddbc1816237a67f66336616416e2bbb6beb306e5f890f2eb22b959cdf", size = 277150 },
+ { url = "https://files.pythonhosted.org/packages/79/73/fa6d1a96ab7fd6e6d1c3500700963eab46813847f01ef0ccbaa726181dd5/frozenlist-1.5.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:31115ba75889723431aa9a4e77d5f398f5cf976eea3bdf61749731f62d4a4a21", size = 94026 },
+ { url = "https://files.pythonhosted.org/packages/ab/04/ea8bf62c8868b8eada363f20ff1b647cf2e93377a7b284d36062d21d81d1/frozenlist-1.5.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7437601c4d89d070eac8323f121fcf25f88674627505334654fd027b091db09d", size = 54150 },
+ { url = "https://files.pythonhosted.org/packages/d0/9a/8e479b482a6f2070b26bda572c5e6889bb3ba48977e81beea35b5ae13ece/frozenlist-1.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7948140d9f8ece1745be806f2bfdf390127cf1a763b925c4a805c603df5e697e", size = 51927 },
+ { url = "https://files.pythonhosted.org/packages/e3/12/2aad87deb08a4e7ccfb33600871bbe8f0e08cb6d8224371387f3303654d7/frozenlist-1.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feeb64bc9bcc6b45c6311c9e9b99406660a9c05ca8a5b30d14a78555088b0b3a", size = 282647 },
+ { url = "https://files.pythonhosted.org/packages/77/f2/07f06b05d8a427ea0060a9cef6e63405ea9e0d761846b95ef3fb3be57111/frozenlist-1.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:683173d371daad49cffb8309779e886e59c2f369430ad28fe715f66d08d4ab1a", size = 289052 },
+ { url = "https://files.pythonhosted.org/packages/bd/9f/8bf45a2f1cd4aa401acd271b077989c9267ae8463e7c8b1eb0d3f561b65e/frozenlist-1.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7d57d8f702221405a9d9b40f9da8ac2e4a1a8b5285aac6100f3393675f0a85ee", size = 291719 },
+ { url = "https://files.pythonhosted.org/packages/41/d1/1f20fd05a6c42d3868709b7604c9f15538a29e4f734c694c6bcfc3d3b935/frozenlist-1.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c72000fbcc35b129cb09956836c7d7abf78ab5416595e4857d1cae8d6251a6", size = 267433 },
+ { url = "https://files.pythonhosted.org/packages/af/f2/64b73a9bb86f5a89fb55450e97cd5c1f84a862d4ff90d9fd1a73ab0f64a5/frozenlist-1.5.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:000a77d6034fbad9b6bb880f7ec073027908f1b40254b5d6f26210d2dab1240e", size = 283591 },
+ { url = "https://files.pythonhosted.org/packages/29/e2/ffbb1fae55a791fd6c2938dd9ea779509c977435ba3940b9f2e8dc9d5316/frozenlist-1.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5d7f5a50342475962eb18b740f3beecc685a15b52c91f7d975257e13e029eca9", size = 273249 },
+ { url = "https://files.pythonhosted.org/packages/2e/6e/008136a30798bb63618a114b9321b5971172a5abddff44a100c7edc5ad4f/frozenlist-1.5.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:87f724d055eb4785d9be84e9ebf0f24e392ddfad00b3fe036e43f489fafc9039", size = 271075 },
+ { url = "https://files.pythonhosted.org/packages/ae/f0/4e71e54a026b06724cec9b6c54f0b13a4e9e298cc8db0f82ec70e151f5ce/frozenlist-1.5.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:6e9080bb2fb195a046e5177f10d9d82b8a204c0736a97a153c2466127de87784", size = 285398 },
+ { url = "https://files.pythonhosted.org/packages/4d/36/70ec246851478b1c0b59f11ef8ade9c482ff447c1363c2bd5fad45098b12/frozenlist-1.5.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b93d7aaa36c966fa42efcaf716e6b3900438632a626fb09c049f6a2f09fc631", size = 294445 },
+ { url = "https://files.pythonhosted.org/packages/37/e0/47f87544055b3349b633a03c4d94b405956cf2437f4ab46d0928b74b7526/frozenlist-1.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:52ef692a4bc60a6dd57f507429636c2af8b6046db8b31b18dac02cbc8f507f7f", size = 280569 },
+ { url = "https://files.pythonhosted.org/packages/da/3b/915f0bca8a7ea04483622e84a9bd90033bab54bdf485479556c74fd5eaf5/frozenlist-1.5.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a1a048f9215c90973402e26c01d1cff8a209e1f1b53f72b95c13db61b00f953", size = 91538 },
+ { url = "https://files.pythonhosted.org/packages/c7/d1/a7c98aad7e44afe5306a2b068434a5830f1470675f0e715abb86eb15f15b/frozenlist-1.5.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dd47a5181ce5fcb463b5d9e17ecfdb02b678cca31280639255ce9d0e5aa67af0", size = 52849 },
+ { url = "https://files.pythonhosted.org/packages/3a/c8/76f23bf9ab15d5f760eb48701909645f686f9c64fbb8982674c241fbef14/frozenlist-1.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1431d60b36d15cda188ea222033eec8e0eab488f39a272461f2e6d9e1a8e63c2", size = 50583 },
+ { url = "https://files.pythonhosted.org/packages/1f/22/462a3dd093d11df623179d7754a3b3269de3b42de2808cddef50ee0f4f48/frozenlist-1.5.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6482a5851f5d72767fbd0e507e80737f9c8646ae7fd303def99bfe813f76cf7f", size = 265636 },
+ { url = "https://files.pythonhosted.org/packages/80/cf/e075e407fc2ae7328155a1cd7e22f932773c8073c1fc78016607d19cc3e5/frozenlist-1.5.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44c49271a937625619e862baacbd037a7ef86dd1ee215afc298a417ff3270608", size = 270214 },
+ { url = "https://files.pythonhosted.org/packages/a1/58/0642d061d5de779f39c50cbb00df49682832923f3d2ebfb0fedf02d05f7f/frozenlist-1.5.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12f78f98c2f1c2429d42e6a485f433722b0061d5c0b0139efa64f396efb5886b", size = 273905 },
+ { url = "https://files.pythonhosted.org/packages/ab/66/3fe0f5f8f2add5b4ab7aa4e199f767fd3b55da26e3ca4ce2cc36698e50c4/frozenlist-1.5.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce3aa154c452d2467487765e3adc730a8c153af77ad84096bc19ce19a2400840", size = 250542 },
+ { url = "https://files.pythonhosted.org/packages/f6/b8/260791bde9198c87a465224e0e2bb62c4e716f5d198fc3a1dacc4895dbd1/frozenlist-1.5.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b7dc0c4338e6b8b091e8faf0db3168a37101943e687f373dce00959583f7439", size = 267026 },
+ { url = "https://files.pythonhosted.org/packages/2e/a4/3d24f88c527f08f8d44ade24eaee83b2627793fa62fa07cbb7ff7a2f7d42/frozenlist-1.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:45e0896250900b5aa25180f9aec243e84e92ac84bd4a74d9ad4138ef3f5c97de", size = 257690 },
+ { url = "https://files.pythonhosted.org/packages/de/9a/d311d660420b2beeff3459b6626f2ab4fb236d07afbdac034a4371fe696e/frozenlist-1.5.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:561eb1c9579d495fddb6da8959fd2a1fca2c6d060d4113f5844b433fc02f2641", size = 253893 },
+ { url = "https://files.pythonhosted.org/packages/c6/23/e491aadc25b56eabd0f18c53bb19f3cdc6de30b2129ee0bc39cd387cd560/frozenlist-1.5.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:df6e2f325bfee1f49f81aaac97d2aa757c7646534a06f8f577ce184afe2f0a9e", size = 267006 },
+ { url = "https://files.pythonhosted.org/packages/08/c4/ab918ce636a35fb974d13d666dcbe03969592aeca6c3ab3835acff01f79c/frozenlist-1.5.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:140228863501b44b809fb39ec56b5d4071f4d0aa6d216c19cbb08b8c5a7eadb9", size = 276157 },
+ { url = "https://files.pythonhosted.org/packages/c0/29/3b7a0bbbbe5a34833ba26f686aabfe982924adbdcafdc294a7a129c31688/frozenlist-1.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7707a25d6a77f5d27ea7dc7d1fc608aa0a478193823f88511ef5e6b8a48f9d03", size = 264642 },
+ { url = "https://files.pythonhosted.org/packages/c6/c8/a5be5b7550c10858fcf9b0ea054baccab474da77d37f1e828ce043a3a5d4/frozenlist-1.5.0-py3-none-any.whl", hash = "sha256:d994863bba198a4a518b467bb971c56e1db3f180a25c6cf7bb1949c267f748c3", size = 11901 },
+]
+
+[[package]]
+name = "h11"
+version = "0.14.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f5/38/3af3d3633a34a3316095b39c8e8fb4853a28a536e55d347bd8d8e9a14b03/h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d", size = 100418 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/95/04/ff642e65ad6b90db43e668d70ffb6736436c7ce41fcc549f4e9472234127/h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761", size = 58259 },
+]
+
+[[package]]
+name = "h2"
+version = "4.1.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "hpack", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "hyperframe", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/2a/32/fec683ddd10629ea4ea46d206752a95a2d8a48c22521edd70b142488efe1/h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb", size = 2145593 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/2a/e5/db6d438da759efbb488c4f3fbdab7764492ff3c3f953132efa6b9f0e9e53/h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d", size = 57488 },
+]
+
+[[package]]
+name = "hpack"
+version = "4.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/3e/9b/fda93fb4d957db19b0f6b370e79d586b3e8528b20252c729c476a2c02954/hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095", size = 49117 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d5/34/e8b383f35b77c402d28563d2b8f83159319b509bc5f760b15d60b0abf165/hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c", size = 32611 },
+]
+
+[[package]]
+name = "httpcore"
+version = "1.0.7"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "certifi", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "h11", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/6a/41/d7d0a89eb493922c37d343b607bc1b5da7f5be7e383740b4753ad8943e90/httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c", size = 85196 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/87/f5/72347bc88306acb359581ac4d52f23c0ef445b57157adedb9aee0cd689d2/httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd", size = 78551 },
+]
+
+[[package]]
+name = "httpx"
+version = "0.28.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "anyio", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "certifi", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "httpcore", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "idna", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517 },
+]
+
+[package.optional-dependencies]
+http2 = [
+ { name = "h2", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+]
+socks = [
+ { name = "socksio", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+]
+
+[[package]]
+name = "hyperframe"
+version = "6.0.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/5a/2a/4747bff0a17f7281abe73e955d60d80aae537a5d203f417fa1c2e7578ebb/hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914", size = 25008 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d7/de/85a784bcc4a3779d1753a7ec2dee5de90e18c7bcf402e71b51fcf150b129/hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15", size = 12389 },
+]
+
+[[package]]
+name = "idna"
+version = "3.10"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 },
+]
+
+[[package]]
+name = "ipdb"
+version = "0.13.13"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "decorator", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "ipython", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/3d/1b/7e07e7b752017f7693a0f4d41c13e5ca29ce8cbcfdcc1fd6c4ad8c0a27a0/ipdb-0.13.13.tar.gz", hash = "sha256:e3ac6018ef05126d442af680aad863006ec19d02290561ac88b8b1c0b0cfc726", size = 17042 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0c/4c/b075da0092003d9a55cf2ecc1cae9384a1ca4f650d51b00fc59875fe76f6/ipdb-0.13.13-py3-none-any.whl", hash = "sha256:45529994741c4ab6d2388bfa5d7b725c2cf7fe9deffabdb8a6113aa5ed449ed4", size = 12130 },
+]
+
+[[package]]
+name = "ipython"
+version = "8.31.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "decorator", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "jedi", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "matplotlib-inline", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "pexpect", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "prompt-toolkit", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "pygments", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "stack-data", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "traitlets", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "typing-extensions", marker = "(python_full_version < '3.12' and sys_platform == 'darwin') or (python_full_version < '3.12' and sys_platform == 'linux')" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/01/35/6f90fdddff7a08b7b715fccbd2427b5212c9525cd043d26fdc45bee0708d/ipython-8.31.0.tar.gz", hash = "sha256:b6a2274606bec6166405ff05e54932ed6e5cfecaca1fc05f2cacde7bb074d70b", size = 5501011 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/04/60/d0feb6b6d9fe4ab89fe8fe5b47cbf6cd936bfd9f1e7ffa9d0015425aeed6/ipython-8.31.0-py3-none-any.whl", hash = "sha256:46ec58f8d3d076a61d128fe517a51eb730e3aaf0c184ea8c17d16e366660c6a6", size = 821583 },
+]
+
+[[package]]
+name = "jedi"
+version = "0.19.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "parso", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/72/3a/79a912fbd4d8dd6fbb02bf69afd3bb72cf0c729bb3063c6f4498603db17a/jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0", size = 1231287 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c0/5a/9cac0c82afec3d09ccd97c8b6502d48f165f9124db81b4bcb90b4af974ee/jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9", size = 1572278 },
+]
+
+[[package]]
+name = "jmespath"
+version = "1.0.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/00/2a/e867e8531cf3e36b41201936b7fa7ba7b5702dbef42922193f05c8976cd6/jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe", size = 25843 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/31/b4/b9b800c45527aadd64d5b442f9b932b00648617eb5d63d2c7a6587b7cafc/jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", size = 20256 },
+]
+
+[[package]]
+name = "loguru"
+version = "0.7.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/3a/05/a1dae3dffd1116099471c643b8924f5aa6524411dc6c63fdae648c4f1aca/loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6", size = 63559 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0c/29/0348de65b8cc732daa3e33e67806420b2ae89bdce2b04af740289c5c6c8c/loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c", size = 61595 },
+]
+
+[[package]]
+name = "matplotlib-inline"
+version = "0.1.7"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "traitlets", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/99/5b/a36a337438a14116b16480db471ad061c36c3694df7c2084a0da7ba538b7/matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90", size = 8159 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/8f/8e/9ad090d3553c280a8060fbf6e24dc1c0c29704ee7d1c372f0c174aa59285/matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca", size = 9899 },
+]
+
+[[package]]
+name = "multidict"
+version = "6.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d6/be/504b89a5e9ca731cd47487e91c469064f8ae5af93b7259758dcfc2b9c848/multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a", size = 64002 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/93/13/df3505a46d0cd08428e4c8169a196131d1b0c4b515c3649829258843dde6/multidict-6.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3efe2c2cb5763f2f1b275ad2bf7a287d3f7ebbef35648a9726e3b69284a4f3d6", size = 48570 },
+ { url = "https://files.pythonhosted.org/packages/f0/e1/a215908bfae1343cdb72f805366592bdd60487b4232d039c437fe8f5013d/multidict-6.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7053d3b0353a8b9de430a4f4b4268ac9a4fb3481af37dfe49825bf45ca24156", size = 29316 },
+ { url = "https://files.pythonhosted.org/packages/70/0f/6dc70ddf5d442702ed74f298d69977f904960b82368532c88e854b79f72b/multidict-6.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27e5fc84ccef8dfaabb09d82b7d179c7cf1a3fbc8a966f8274fcb4ab2eb4cadb", size = 29640 },
+ { url = "https://files.pythonhosted.org/packages/d8/6d/9c87b73a13d1cdea30b321ef4b3824449866bd7f7127eceed066ccb9b9ff/multidict-6.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e2b90b43e696f25c62656389d32236e049568b39320e2735d51f08fd362761b", size = 131067 },
+ { url = "https://files.pythonhosted.org/packages/cc/1e/1b34154fef373371fd6c65125b3d42ff5f56c7ccc6bfff91b9b3c60ae9e0/multidict-6.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d83a047959d38a7ff552ff94be767b7fd79b831ad1cd9920662db05fec24fe72", size = 138507 },
+ { url = "https://files.pythonhosted.org/packages/fb/e0/0bc6b2bac6e461822b5f575eae85da6aae76d0e2a79b6665d6206b8e2e48/multidict-6.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1a9dd711d0877a1ece3d2e4fea11a8e75741ca21954c919406b44e7cf971304", size = 133905 },
+ { url = "https://files.pythonhosted.org/packages/ba/af/73d13b918071ff9b2205fcf773d316e0f8fefb4ec65354bbcf0b10908cc6/multidict-6.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec2abea24d98246b94913b76a125e855eb5c434f7c46546046372fe60f666351", size = 129004 },
+ { url = "https://files.pythonhosted.org/packages/74/21/23960627b00ed39643302d81bcda44c9444ebcdc04ee5bedd0757513f259/multidict-6.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4867cafcbc6585e4b678876c489b9273b13e9fff9f6d6d66add5e15d11d926cb", size = 121308 },
+ { url = "https://files.pythonhosted.org/packages/8b/5c/cf282263ffce4a596ed0bb2aa1a1dddfe1996d6a62d08842a8d4b33dca13/multidict-6.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b48204e8d955c47c55b72779802b219a39acc3ee3d0116d5080c388970b76e3", size = 132608 },
+ { url = "https://files.pythonhosted.org/packages/d7/3e/97e778c041c72063f42b290888daff008d3ab1427f5b09b714f5a8eff294/multidict-6.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d8fff389528cad1618fb4b26b95550327495462cd745d879a8c7c2115248e399", size = 127029 },
+ { url = "https://files.pythonhosted.org/packages/47/ac/3efb7bfe2f3aefcf8d103e9a7162572f01936155ab2f7ebcc7c255a23212/multidict-6.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a7a9541cd308eed5e30318430a9c74d2132e9a8cb46b901326272d780bf2d423", size = 137594 },
+ { url = "https://files.pythonhosted.org/packages/42/9b/6c6e9e8dc4f915fc90a9b7798c44a30773dea2995fdcb619870e705afe2b/multidict-6.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:da1758c76f50c39a2efd5e9859ce7d776317eb1dd34317c8152ac9251fc574a3", size = 134556 },
+ { url = "https://files.pythonhosted.org/packages/1d/10/8e881743b26aaf718379a14ac58572a240e8293a1c9d68e1418fb11c0f90/multidict-6.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c943a53e9186688b45b323602298ab727d8865d8c9ee0b17f8d62d14b56f0753", size = 130993 },
+ { url = "https://files.pythonhosted.org/packages/fd/16/92057c74ba3b96d5e211b553895cd6dc7cc4d1e43d9ab8fafc727681ef71/multidict-6.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b04772ed465fa3cc947db808fa306d79b43e896beb677a56fb2347ca1a49c1fa", size = 48713 },
+ { url = "https://files.pythonhosted.org/packages/94/3d/37d1b8893ae79716179540b89fc6a0ee56b4a65fcc0d63535c6f5d96f217/multidict-6.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6180c0ae073bddeb5a97a38c03f30c233e0a4d39cd86166251617d1bbd0af436", size = 29516 },
+ { url = "https://files.pythonhosted.org/packages/a2/12/adb6b3200c363062f805275b4c1e656be2b3681aada66c80129932ff0bae/multidict-6.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:071120490b47aa997cca00666923a83f02c7fbb44f71cf7f136df753f7fa8761", size = 29557 },
+ { url = "https://files.pythonhosted.org/packages/47/e9/604bb05e6e5bce1e6a5cf80a474e0f072e80d8ac105f1b994a53e0b28c42/multidict-6.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50b3a2710631848991d0bf7de077502e8994c804bb805aeb2925a981de58ec2e", size = 130170 },
+ { url = "https://files.pythonhosted.org/packages/7e/13/9efa50801785eccbf7086b3c83b71a4fb501a4d43549c2f2f80b8787d69f/multidict-6.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b58c621844d55e71c1b7f7c498ce5aa6985d743a1a59034c57a905b3f153c1ef", size = 134836 },
+ { url = "https://files.pythonhosted.org/packages/bf/0f/93808b765192780d117814a6dfcc2e75de6dcc610009ad408b8814dca3ba/multidict-6.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b6d90641869892caa9ca42ff913f7ff1c5ece06474fbd32fb2cf6834726c95", size = 133475 },
+ { url = "https://files.pythonhosted.org/packages/d3/c8/529101d7176fe7dfe1d99604e48d69c5dfdcadb4f06561f465c8ef12b4df/multidict-6.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b820514bfc0b98a30e3d85462084779900347e4d49267f747ff54060cc33925", size = 131049 },
+ { url = "https://files.pythonhosted.org/packages/ca/0c/fc85b439014d5a58063e19c3a158a889deec399d47b5269a0f3b6a2e28bc/multidict-6.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10a9b09aba0c5b48c53761b7c720aaaf7cf236d5fe394cd399c7ba662d5f9966", size = 120370 },
+ { url = "https://files.pythonhosted.org/packages/db/46/d4416eb20176492d2258fbd47b4abe729ff3b6e9c829ea4236f93c865089/multidict-6.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e16bf3e5fc9f44632affb159d30a437bfe286ce9e02754759be5536b169b305", size = 125178 },
+ { url = "https://files.pythonhosted.org/packages/5b/46/73697ad7ec521df7de5531a32780bbfd908ded0643cbe457f981a701457c/multidict-6.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76f364861c3bfc98cbbcbd402d83454ed9e01a5224bb3a28bf70002a230f73e2", size = 119567 },
+ { url = "https://files.pythonhosted.org/packages/cd/ed/51f060e2cb0e7635329fa6ff930aa5cffa17f4c7f5c6c3ddc3500708e2f2/multidict-6.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:820c661588bd01a0aa62a1283f20d2be4281b086f80dad9e955e690c75fb54a2", size = 129822 },
+ { url = "https://files.pythonhosted.org/packages/df/9e/ee7d1954b1331da3eddea0c4e08d9142da5f14b1321c7301f5014f49d492/multidict-6.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e5f362e895bc5b9e67fe6e4ded2492d8124bdf817827f33c5b46c2fe3ffaca6", size = 128656 },
+ { url = "https://files.pythonhosted.org/packages/77/00/8538f11e3356b5d95fa4b024aa566cde7a38aa7a5f08f4912b32a037c5dc/multidict-6.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ec660d19bbc671e3a6443325f07263be452c453ac9e512f5eb935e7d4ac28b3", size = 125360 },
+ { url = "https://files.pythonhosted.org/packages/22/67/1c7c0f39fe069aa4e5d794f323be24bf4d33d62d2a348acdb7991f8f30db/multidict-6.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d569388c381b24671589335a3be6e1d45546c2988c2ebe30fdcada8457a31008", size = 48771 },
+ { url = "https://files.pythonhosted.org/packages/3c/25/c186ee7b212bdf0df2519eacfb1981a017bda34392c67542c274651daf23/multidict-6.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:052e10d2d37810b99cc170b785945421141bf7bb7d2f8799d431e7db229c385f", size = 29533 },
+ { url = "https://files.pythonhosted.org/packages/67/5e/04575fd837e0958e324ca035b339cea174554f6f641d3fb2b4f2e7ff44a2/multidict-6.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f90c822a402cb865e396a504f9fc8173ef34212a342d92e362ca498cad308e28", size = 29595 },
+ { url = "https://files.pythonhosted.org/packages/d3/b2/e56388f86663810c07cfe4a3c3d87227f3811eeb2d08450b9e5d19d78876/multidict-6.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b225d95519a5bf73860323e633a664b0d85ad3d5bede6d30d95b35d4dfe8805b", size = 130094 },
+ { url = "https://files.pythonhosted.org/packages/6c/ee/30ae9b4186a644d284543d55d491fbd4239b015d36b23fea43b4c94f7052/multidict-6.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:23bfd518810af7de1116313ebd9092cb9aa629beb12f6ed631ad53356ed6b86c", size = 134876 },
+ { url = "https://files.pythonhosted.org/packages/84/c7/70461c13ba8ce3c779503c70ec9d0345ae84de04521c1f45a04d5f48943d/multidict-6.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c09fcfdccdd0b57867577b719c69e347a436b86cd83747f179dbf0cc0d4c1f3", size = 133500 },
+ { url = "https://files.pythonhosted.org/packages/4a/9f/002af221253f10f99959561123fae676148dd730e2daa2cd053846a58507/multidict-6.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf6bea52ec97e95560af5ae576bdac3aa3aae0b6758c6efa115236d9e07dae44", size = 131099 },
+ { url = "https://files.pythonhosted.org/packages/82/42/d1c7a7301d52af79d88548a97e297f9d99c961ad76bbe6f67442bb77f097/multidict-6.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57feec87371dbb3520da6192213c7d6fc892d5589a93db548331954de8248fd2", size = 120403 },
+ { url = "https://files.pythonhosted.org/packages/68/f3/471985c2c7ac707547553e8f37cff5158030d36bdec4414cb825fbaa5327/multidict-6.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0c3f390dc53279cbc8ba976e5f8035eab997829066756d811616b652b00a23a3", size = 125348 },
+ { url = "https://files.pythonhosted.org/packages/67/2c/e6df05c77e0e433c214ec1d21ddd203d9a4770a1f2866a8ca40a545869a0/multidict-6.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:59bfeae4b25ec05b34f1956eaa1cb38032282cd4dfabc5056d0a1ec4d696d3aa", size = 119673 },
+ { url = "https://files.pythonhosted.org/packages/c5/cd/bc8608fff06239c9fb333f9db7743a1b2eafe98c2666c9a196e867a3a0a4/multidict-6.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b2f59caeaf7632cc633b5cf6fc449372b83bbdf0da4ae04d5be36118e46cc0aa", size = 129927 },
+ { url = "https://files.pythonhosted.org/packages/44/8e/281b69b7bc84fc963a44dc6e0bbcc7150e517b91df368a27834299a526ac/multidict-6.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:37bb93b2178e02b7b618893990941900fd25b6b9ac0fa49931a40aecdf083fe4", size = 128711 },
+ { url = "https://files.pythonhosted.org/packages/12/a4/63e7cd38ed29dd9f1881d5119f272c898ca92536cdb53ffe0843197f6c85/multidict-6.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4e9f48f58c2c523d5a06faea47866cd35b32655c46b443f163d08c6d0ddb17d6", size = 125519 },
+ { url = "https://files.pythonhosted.org/packages/99/b7/b9e70fde2c0f0c9af4cc5277782a89b66d35948ea3369ec9f598358c3ac5/multidict-6.1.0-py3-none-any.whl", hash = "sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506", size = 10051 },
+]
+
+[[package]]
+name = "parso"
+version = "0.8.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/66/94/68e2e17afaa9169cf6412ab0f28623903be73d1b32e208d9e8e541bb086d/parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d", size = 400609 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c6/ac/dac4a63f978e4dcb3c6d3a78c4d8e0192a113d288502a1216950c41b1027/parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18", size = 103650 },
+]
+
+[[package]]
+name = "pexpect"
+version = "4.9.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "ptyprocess", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523", size = 63772 },
+]
+
+[[package]]
+name = "pillow"
+version = "11.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f3/af/c097e544e7bd278333db77933e535098c259609c4eb3b85381109602fb5b/pillow-11.1.0.tar.gz", hash = "sha256:368da70808b36d73b4b390a8ffac11069f8a5c85f29eff1f1b01bcf3ef5b2a20", size = 46742715 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/dd/d6/2000bfd8d5414fb70cbbe52c8332f2283ff30ed66a9cde42716c8ecbe22c/pillow-11.1.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:e06695e0326d05b06833b40b7ef477e475d0b1ba3a6d27da1bb48c23209bf457", size = 3229968 },
+ { url = "https://files.pythonhosted.org/packages/d9/45/3fe487010dd9ce0a06adf9b8ff4f273cc0a44536e234b0fad3532a42c15b/pillow-11.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96f82000e12f23e4f29346e42702b6ed9a2f2fea34a740dd5ffffcc8c539eb35", size = 3101806 },
+ { url = "https://files.pythonhosted.org/packages/e3/72/776b3629c47d9d5f1c160113158a7a7ad177688d3a1159cd3b62ded5a33a/pillow-11.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3cd561ded2cf2bbae44d4605837221b987c216cff94f49dfeed63488bb228d2", size = 4322283 },
+ { url = "https://files.pythonhosted.org/packages/e4/c2/e25199e7e4e71d64eeb869f5b72c7ddec70e0a87926398785ab944d92375/pillow-11.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f189805c8be5ca5add39e6f899e6ce2ed824e65fb45f3c28cb2841911da19070", size = 4402945 },
+ { url = "https://files.pythonhosted.org/packages/c1/ed/51d6136c9d5911f78632b1b86c45241c712c5a80ed7fa7f9120a5dff1eba/pillow-11.1.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:dd0052e9db3474df30433f83a71b9b23bd9e4ef1de13d92df21a52c0303b8ab6", size = 4361228 },
+ { url = "https://files.pythonhosted.org/packages/48/a4/fbfe9d5581d7b111b28f1d8c2762dee92e9821bb209af9fa83c940e507a0/pillow-11.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:837060a8599b8f5d402e97197d4924f05a2e0d68756998345c829c33186217b1", size = 4484021 },
+ { url = "https://files.pythonhosted.org/packages/39/db/0b3c1a5018117f3c1d4df671fb8e47d08937f27519e8614bbe86153b65a5/pillow-11.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:aa8dd43daa836b9a8128dbe7d923423e5ad86f50a7a14dc688194b7be5c0dea2", size = 4287449 },
+ { url = "https://files.pythonhosted.org/packages/d9/58/bc128da7fea8c89fc85e09f773c4901e95b5936000e6f303222490c052f3/pillow-11.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0a2f91f8a8b367e7a57c6e91cd25af510168091fb89ec5146003e424e1558a96", size = 4419972 },
+ { url = "https://files.pythonhosted.org/packages/95/20/9ce6ed62c91c073fcaa23d216e68289e19d95fb8188b9fb7a63d36771db8/pillow-11.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2062ffb1d36544d42fcaa277b069c88b01bb7298f4efa06731a7fd6cc290b81a", size = 3226818 },
+ { url = "https://files.pythonhosted.org/packages/b9/d8/f6004d98579a2596c098d1e30d10b248798cceff82d2b77aa914875bfea1/pillow-11.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a85b653980faad27e88b141348707ceeef8a1186f75ecc600c395dcac19f385b", size = 3101662 },
+ { url = "https://files.pythonhosted.org/packages/08/d9/892e705f90051c7a2574d9f24579c9e100c828700d78a63239676f960b74/pillow-11.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9409c080586d1f683df3f184f20e36fb647f2e0bc3988094d4fd8c9f4eb1b3b3", size = 4329317 },
+ { url = "https://files.pythonhosted.org/packages/8c/aa/7f29711f26680eab0bcd3ecdd6d23ed6bce180d82e3f6380fb7ae35fcf3b/pillow-11.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7fdadc077553621911f27ce206ffcbec7d3f8d7b50e0da39f10997e8e2bb7f6a", size = 4412999 },
+ { url = "https://files.pythonhosted.org/packages/c8/c4/8f0fe3b9e0f7196f6d0bbb151f9fba323d72a41da068610c4c960b16632a/pillow-11.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:93a18841d09bcdd774dcdc308e4537e1f867b3dec059c131fde0327899734aa1", size = 4368819 },
+ { url = "https://files.pythonhosted.org/packages/38/0d/84200ed6a871ce386ddc82904bfadc0c6b28b0c0ec78176871a4679e40b3/pillow-11.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9aa9aeddeed452b2f616ff5507459e7bab436916ccb10961c4a382cd3e03f47f", size = 4496081 },
+ { url = "https://files.pythonhosted.org/packages/84/9c/9bcd66f714d7e25b64118e3952d52841a4babc6d97b6d28e2261c52045d4/pillow-11.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3cdcdb0b896e981678eee140d882b70092dac83ac1cdf6b3a60e2216a73f2b91", size = 4296513 },
+ { url = "https://files.pythonhosted.org/packages/db/61/ada2a226e22da011b45f7104c95ebda1b63dcbb0c378ad0f7c2a710f8fd2/pillow-11.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:36ba10b9cb413e7c7dfa3e189aba252deee0602c86c309799da5a74009ac7a1c", size = 4431298 },
+ { url = "https://files.pythonhosted.org/packages/b3/31/9ca79cafdce364fd5c980cd3416c20ce1bebd235b470d262f9d24d810184/pillow-11.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ae98e14432d458fc3de11a77ccb3ae65ddce70f730e7c76140653048c71bfcbc", size = 3226640 },
+ { url = "https://files.pythonhosted.org/packages/ac/0f/ff07ad45a1f172a497aa393b13a9d81a32e1477ef0e869d030e3c1532521/pillow-11.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cc1331b6d5a6e144aeb5e626f4375f5b7ae9934ba620c0ac6b3e43d5e683a0f0", size = 3101437 },
+ { url = "https://files.pythonhosted.org/packages/08/2f/9906fca87a68d29ec4530be1f893149e0cb64a86d1f9f70a7cfcdfe8ae44/pillow-11.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:758e9d4ef15d3560214cddbc97b8ef3ef86ce04d62ddac17ad39ba87e89bd3b1", size = 4326605 },
+ { url = "https://files.pythonhosted.org/packages/b0/0f/f3547ee15b145bc5c8b336401b2d4c9d9da67da9dcb572d7c0d4103d2c69/pillow-11.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b523466b1a31d0dcef7c5be1f20b942919b62fd6e9a9be199d035509cbefc0ec", size = 4411173 },
+ { url = "https://files.pythonhosted.org/packages/b1/df/bf8176aa5db515c5de584c5e00df9bab0713548fd780c82a86cba2c2fedb/pillow-11.1.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:9044b5e4f7083f209c4e35aa5dd54b1dd5b112b108648f5c902ad586d4f945c5", size = 4369145 },
+ { url = "https://files.pythonhosted.org/packages/de/7c/7433122d1cfadc740f577cb55526fdc39129a648ac65ce64db2eb7209277/pillow-11.1.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:3764d53e09cdedd91bee65c2527815d315c6b90d7b8b79759cc48d7bf5d4f114", size = 4496340 },
+ { url = "https://files.pythonhosted.org/packages/25/46/dd94b93ca6bd555588835f2504bd90c00d5438fe131cf01cfa0c5131a19d/pillow-11.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:31eba6bbdd27dde97b0174ddf0297d7a9c3a507a8a1480e1e60ef914fe23d352", size = 4296906 },
+ { url = "https://files.pythonhosted.org/packages/a8/28/2f9d32014dfc7753e586db9add35b8a41b7a3b46540e965cb6d6bc607bd2/pillow-11.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b5d658fbd9f0d6eea113aea286b21d3cd4d3fd978157cbf2447a6035916506d3", size = 4431759 },
+ { url = "https://files.pythonhosted.org/packages/79/30/77f54228401e84d6791354888549b45824ab0ffde659bafa67956303a09f/pillow-11.1.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:70ca5ef3b3b1c4a0812b5c63c57c23b63e53bc38e758b37a951e5bc466449861", size = 3230060 },
+ { url = "https://files.pythonhosted.org/packages/ce/b1/56723b74b07dd64c1010fee011951ea9c35a43d8020acd03111f14298225/pillow-11.1.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8000376f139d4d38d6851eb149b321a52bb8893a88dae8ee7d95840431977081", size = 3106192 },
+ { url = "https://files.pythonhosted.org/packages/e1/cd/7bf7180e08f80a4dcc6b4c3a0aa9e0b0ae57168562726a05dc8aa8fa66b0/pillow-11.1.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ee85f0696a17dd28fbcfceb59f9510aa71934b483d1f5601d1030c3c8304f3c", size = 4446805 },
+ { url = "https://files.pythonhosted.org/packages/97/42/87c856ea30c8ed97e8efbe672b58c8304dee0573f8c7cab62ae9e31db6ae/pillow-11.1.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:dd0e081319328928531df7a0e63621caf67652c8464303fd102141b785ef9547", size = 4530623 },
+ { url = "https://files.pythonhosted.org/packages/ff/41/026879e90c84a88e33fb00cc6bd915ac2743c67e87a18f80270dfe3c2041/pillow-11.1.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e63e4e5081de46517099dc30abe418122f54531a6ae2ebc8680bcd7096860eab", size = 4465191 },
+]
+
+[[package]]
+name = "pillow-heif"
+version = "0.21.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pillow", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/65/f5/993804c7c626256e394f2dcb90ee739862ae22151bd7df00e014f5206573/pillow_heif-0.21.0.tar.gz", hash = "sha256:07aee1bff05e5d61feb989eaa745ae21b367011fd66ee48f7732931f8a12b49b", size = 16178019 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/3e/e3/d076206933ab11a402b820a7bf0590363c19af0f3edb98c16b3741ad174d/pillow_heif-0.21.0-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:0c3ffa486f56f52fe790d3b1bd522d93d2f59e22ce86045641cd596adc3c5273", size = 5400646 },
+ { url = "https://files.pythonhosted.org/packages/8c/44/6ca01ea0889de09915dc6ee9b2c4f99b55910492d791996c1e72790829ed/pillow_heif-0.21.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:c46be20058d72a5a158ffc65e6158279a4bcb337707a29b312c5293846bd5b8a", size = 3967415 },
+ { url = "https://files.pythonhosted.org/packages/a7/61/bd32d0c275f0d204a33b5ab7693c557ae35a5b631e5ab77f34d3d70642d9/pillow_heif-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06663c825a3d71779e51df02080467761b74d515e59fce9d780220cd75de7dd0", size = 6967014 },
+ { url = "https://files.pythonhosted.org/packages/de/a7/0d3ea500a0ea2ec2df8fab34c6fe85bdf1a4107ea4b9717af64be48edfc5/pillow_heif-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23efab69a03a9a3a9ff07043d8c8bf0d15ffd661ecc5c7bff59b386eb25f0466", size = 7803272 },
+ { url = "https://files.pythonhosted.org/packages/3e/97/ef8807224d61ea1d310ccbd504ce88dff1aa0f5349b3c9b4c5df81548581/pillow_heif-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e5eebb73268b806d3c801271126382da4f556b756990f87590c843c5a8ec14e2", size = 8302588 },
+ { url = "https://files.pythonhosted.org/packages/93/f2/d7e02240d71eb11fc79fe90d6eccc420768657903f9d805b6c325a05f79f/pillow_heif-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3456b4cdb4da485f27c53a91c81f0488b44dc99c0be6870f6a1dc5ac85709894", size = 9051892 },
+ { url = "https://files.pythonhosted.org/packages/66/e0/5fc46d46c564cc955e83eb7ba7de4686270f88c242529673b4b30084a364/pillow_heif-0.21.0-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:0aaea6ea45257cf74e76666b80b6109f8f56217009534726fa7f6a5694ebd563", size = 5400784 },
+ { url = "https://files.pythonhosted.org/packages/f1/ab/9f7095e8c66d6cbb8a9dfeaf107c06e4b570d5fe8cbb8388196499d1578e/pillow_heif-0.21.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:f28c2c934f547823de3e204e48866c571d81ebb6b3e8646c32fe2104c570c7b2", size = 3967392 },
+ { url = "https://files.pythonhosted.org/packages/85/1b/37817bc4ab5f58386ce335851807d97ed407c81dabed0281cd2941ed5647/pillow_heif-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e10ab63559346fc294b9612502221ddd6bfac8cd74091ace7328fefc1163a167", size = 6965387 },
+ { url = "https://files.pythonhosted.org/packages/07/db/1107f9a5c7c8d627367b4741f3bdb67917f9e6bb10ffd76498d2b828432e/pillow_heif-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da2a015cfe4afec75551190d93c99dda13410aec89dc468794885b90f870f657", size = 7802240 },
+ { url = "https://files.pythonhosted.org/packages/5c/be/2e05c9038236933091be982894e1098fea6e00a0951b923fa6876516c1e5/pillow_heif-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:41693f5d87ed2b5fd01df4a6215045aff14d148a750aa0708c77e71139698154", size = 8301374 },
+ { url = "https://files.pythonhosted.org/packages/c5/e6/0ea6a7596c0d1bee265b51f233b4858cb2fe87fb6fa715a31bc412efe4c5/pillow_heif-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8b27031c561ee3485a119c769fc2ef41d81fae1de530857beef935683e09615e", size = 9051087 },
+ { url = "https://files.pythonhosted.org/packages/c1/87/229c4c3f558e9fd827f5ac7716e190c71ff94440a9dcb41576240aaff55f/pillow_heif-0.21.0-cp313-cp313-macosx_13_0_x86_64.whl", hash = "sha256:9e67aae3c22a90bc7dfd42c9f0033c53a7d358e0f0d5d29aa42f2f193162fb01", size = 5400786 },
+ { url = "https://files.pythonhosted.org/packages/91/07/825f0ada5976faa92fdadd837522d907e01b39e6aed096178eaeda6b2f5f/pillow_heif-0.21.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:ee2d68cbc0df8ba6fd9103ac6b550ebafcaa3a179416737a96becf6e5f079586", size = 3967393 },
+ { url = "https://files.pythonhosted.org/packages/3f/de/e5e50e0debb5765aa6b1ea0eaad19c347972b9f7954a4ef37f1dc2304317/pillow_heif-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e5c0df7b8c84e4a8c249ba45ceca2453f205028d8a6525612ec6dd0553d925d", size = 6965345 },
+ { url = "https://files.pythonhosted.org/packages/ac/c6/3683070be2a9f3ac5c58058fcd91687dea652613b4358ddce6b687012617/pillow_heif-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aaedb7f16f3f18fbb315648ba576d0d7bb26b18b50c16281665123c38f73101e", size = 7802164 },
+ { url = "https://files.pythonhosted.org/packages/a1/cf/06a9e7e7c24b12f1109f26afa17f4969175ef8e0b98be8d524458914c0fb/pillow_heif-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6724d6a2561f36b06e14e1cd396c004d32717e81528cb03565491ac8679ed760", size = 8301428 },
+ { url = "https://files.pythonhosted.org/packages/bf/c5/d3f8d90577085682183028ccc4cb2010d8accd0c5efab0e96146bb480acf/pillow_heif-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:bf2e2b0abad455a0896118856e82a8d5358dfe5480bedd09ddd6a04b23773899", size = 9051141 },
+]
+
+[[package]]
+name = "prompt-toolkit"
+version = "3.0.48"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "wcwidth", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/2d/4f/feb5e137aff82f7c7f3248267b97451da3644f6cdc218edfe549fb354127/prompt_toolkit-3.0.48.tar.gz", hash = "sha256:d6623ab0477a80df74e646bdbc93621143f5caf104206aa29294d53de1a03d90", size = 424684 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a9/6a/fd08d94654f7e67c52ca30523a178b3f8ccc4237fce4be90d39c938a831a/prompt_toolkit-3.0.48-py3-none-any.whl", hash = "sha256:f49a827f90062e411f1ce1f854f2aedb3c23353244f8108b89283587397ac10e", size = 386595 },
+]
+
+[[package]]
+name = "propcache"
+version = "0.2.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/20/c8/2a13f78d82211490855b2fb303b6721348d0787fdd9a12ac46d99d3acde1/propcache-0.2.1.tar.gz", hash = "sha256:3f77ce728b19cb537714499928fe800c3dda29e8d9428778fc7c186da4c09a64", size = 41735 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/bc/0f/2913b6791ebefb2b25b4efd4bb2299c985e09786b9f5b19184a88e5778dd/propcache-0.2.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1ffc3cca89bb438fb9c95c13fc874012f7b9466b89328c3c8b1aa93cdcfadd16", size = 79297 },
+ { url = "https://files.pythonhosted.org/packages/cf/73/af2053aeccd40b05d6e19058419ac77674daecdd32478088b79375b9ab54/propcache-0.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f174bbd484294ed9fdf09437f889f95807e5f229d5d93588d34e92106fbf6717", size = 45611 },
+ { url = "https://files.pythonhosted.org/packages/3c/09/8386115ba7775ea3b9537730e8cf718d83bbf95bffe30757ccf37ec4e5da/propcache-0.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:70693319e0b8fd35dd863e3e29513875eb15c51945bf32519ef52927ca883bc3", size = 45146 },
+ { url = "https://files.pythonhosted.org/packages/03/7a/793aa12f0537b2e520bf09f4c6833706b63170a211ad042ca71cbf79d9cb/propcache-0.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b480c6a4e1138e1aa137c0079b9b6305ec6dcc1098a8ca5196283e8a49df95a9", size = 232136 },
+ { url = "https://files.pythonhosted.org/packages/f1/38/b921b3168d72111769f648314100558c2ea1d52eb3d1ba7ea5c4aa6f9848/propcache-0.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d27b84d5880f6d8aa9ae3edb253c59d9f6642ffbb2c889b78b60361eed449787", size = 239706 },
+ { url = "https://files.pythonhosted.org/packages/14/29/4636f500c69b5edea7786db3c34eb6166f3384b905665ce312a6e42c720c/propcache-0.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:857112b22acd417c40fa4595db2fe28ab900c8c5fe4670c7989b1c0230955465", size = 238531 },
+ { url = "https://files.pythonhosted.org/packages/85/14/01fe53580a8e1734ebb704a3482b7829a0ef4ea68d356141cf0994d9659b/propcache-0.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf6c4150f8c0e32d241436526f3c3f9cbd34429492abddbada2ffcff506c51af", size = 231063 },
+ { url = "https://files.pythonhosted.org/packages/33/5c/1d961299f3c3b8438301ccfbff0143b69afcc30c05fa28673cface692305/propcache-0.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66d4cfda1d8ed687daa4bc0274fcfd5267873db9a5bc0418c2da19273040eeb7", size = 220134 },
+ { url = "https://files.pythonhosted.org/packages/00/d0/ed735e76db279ba67a7d3b45ba4c654e7b02bc2f8050671ec365d8665e21/propcache-0.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c2f992c07c0fca81655066705beae35fc95a2fa7366467366db627d9f2ee097f", size = 220009 },
+ { url = "https://files.pythonhosted.org/packages/75/90/ee8fab7304ad6533872fee982cfff5a53b63d095d78140827d93de22e2d4/propcache-0.2.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:4a571d97dbe66ef38e472703067021b1467025ec85707d57e78711c085984e54", size = 212199 },
+ { url = "https://files.pythonhosted.org/packages/eb/ec/977ffaf1664f82e90737275873461695d4c9407d52abc2f3c3e24716da13/propcache-0.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:bb6178c241278d5fe853b3de743087be7f5f4c6f7d6d22a3b524d323eecec505", size = 214827 },
+ { url = "https://files.pythonhosted.org/packages/57/48/031fb87ab6081764054821a71b71942161619549396224cbb242922525e8/propcache-0.2.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ad1af54a62ffe39cf34db1aa6ed1a1873bd548f6401db39d8e7cd060b9211f82", size = 228009 },
+ { url = "https://files.pythonhosted.org/packages/1a/06/ef1390f2524850838f2390421b23a8b298f6ce3396a7cc6d39dedd4047b0/propcache-0.2.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e7048abd75fe40712005bcfc06bb44b9dfcd8e101dda2ecf2f5aa46115ad07ca", size = 231638 },
+ { url = "https://files.pythonhosted.org/packages/38/2a/101e6386d5a93358395da1d41642b79c1ee0f3b12e31727932b069282b1d/propcache-0.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:160291c60081f23ee43d44b08a7e5fb76681221a8e10b3139618c5a9a291b84e", size = 222788 },
+ { url = "https://files.pythonhosted.org/packages/4c/28/1d205fe49be8b1b4df4c50024e62480a442b1a7b818e734308bb0d17e7fb/propcache-0.2.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:081a430aa8d5e8876c6909b67bd2d937bfd531b0382d3fdedb82612c618bc41a", size = 79588 },
+ { url = "https://files.pythonhosted.org/packages/21/ee/fc4d893f8d81cd4971affef2a6cb542b36617cd1d8ce56b406112cb80bf7/propcache-0.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d2ccec9ac47cf4e04897619c0e0c1a48c54a71bdf045117d3a26f80d38ab1fb0", size = 45825 },
+ { url = "https://files.pythonhosted.org/packages/4a/de/bbe712f94d088da1d237c35d735f675e494a816fd6f54e9db2f61ef4d03f/propcache-0.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:14d86fe14b7e04fa306e0c43cdbeebe6b2c2156a0c9ce56b815faacc193e320d", size = 45357 },
+ { url = "https://files.pythonhosted.org/packages/7f/14/7ae06a6cf2a2f1cb382586d5a99efe66b0b3d0c6f9ac2f759e6f7af9d7cf/propcache-0.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:049324ee97bb67285b49632132db351b41e77833678432be52bdd0289c0e05e4", size = 241869 },
+ { url = "https://files.pythonhosted.org/packages/cc/59/227a78be960b54a41124e639e2c39e8807ac0c751c735a900e21315f8c2b/propcache-0.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1cd9a1d071158de1cc1c71a26014dcdfa7dd3d5f4f88c298c7f90ad6f27bb46d", size = 247884 },
+ { url = "https://files.pythonhosted.org/packages/84/58/f62b4ffaedf88dc1b17f04d57d8536601e4e030feb26617228ef930c3279/propcache-0.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98110aa363f1bb4c073e8dcfaefd3a5cea0f0834c2aab23dda657e4dab2f53b5", size = 248486 },
+ { url = "https://files.pythonhosted.org/packages/1c/07/ebe102777a830bca91bbb93e3479cd34c2ca5d0361b83be9dbd93104865e/propcache-0.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:647894f5ae99c4cf6bb82a1bb3a796f6e06af3caa3d32e26d2350d0e3e3faf24", size = 243649 },
+ { url = "https://files.pythonhosted.org/packages/ed/bc/4f7aba7f08f520376c4bb6a20b9a981a581b7f2e385fa0ec9f789bb2d362/propcache-0.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfd3223c15bebe26518d58ccf9a39b93948d3dcb3e57a20480dfdd315356baff", size = 229103 },
+ { url = "https://files.pythonhosted.org/packages/fe/d5/04ac9cd4e51a57a96f78795e03c5a0ddb8f23ec098b86f92de028d7f2a6b/propcache-0.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d71264a80f3fcf512eb4f18f59423fe82d6e346ee97b90625f283df56aee103f", size = 226607 },
+ { url = "https://files.pythonhosted.org/packages/e3/f0/24060d959ea41d7a7cc7fdbf68b31852331aabda914a0c63bdb0e22e96d6/propcache-0.2.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e73091191e4280403bde6c9a52a6999d69cdfde498f1fdf629105247599b57ec", size = 221153 },
+ { url = "https://files.pythonhosted.org/packages/77/a7/3ac76045a077b3e4de4859a0753010765e45749bdf53bd02bc4d372da1a0/propcache-0.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3935bfa5fede35fb202c4b569bb9c042f337ca4ff7bd540a0aa5e37131659348", size = 222151 },
+ { url = "https://files.pythonhosted.org/packages/e7/af/5e29da6f80cebab3f5a4dcd2a3240e7f56f2c4abf51cbfcc99be34e17f0b/propcache-0.2.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f508b0491767bb1f2b87fdfacaba5f7eddc2f867740ec69ece6d1946d29029a6", size = 233812 },
+ { url = "https://files.pythonhosted.org/packages/8c/89/ebe3ad52642cc5509eaa453e9f4b94b374d81bae3265c59d5c2d98efa1b4/propcache-0.2.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:1672137af7c46662a1c2be1e8dc78cb6d224319aaa40271c9257d886be4363a6", size = 238829 },
+ { url = "https://files.pythonhosted.org/packages/e9/2f/6b32f273fa02e978b7577159eae7471b3cfb88b48563b1c2578b2d7ca0bb/propcache-0.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b74c261802d3d2b85c9df2dfb2fa81b6f90deeef63c2db9f0e029a3cac50b518", size = 230704 },
+ { url = "https://files.pythonhosted.org/packages/0f/2a/329e0547cf2def8857157f9477669043e75524cc3e6251cef332b3ff256f/propcache-0.2.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aca405706e0b0a44cc6bfd41fbe89919a6a56999157f6de7e182a990c36e37bc", size = 77002 },
+ { url = "https://files.pythonhosted.org/packages/12/2d/c4df5415e2382f840dc2ecbca0eeb2293024bc28e57a80392f2012b4708c/propcache-0.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:12d1083f001ace206fe34b6bdc2cb94be66d57a850866f0b908972f90996b3e9", size = 44639 },
+ { url = "https://files.pythonhosted.org/packages/d0/5a/21aaa4ea2f326edaa4e240959ac8b8386ea31dedfdaa636a3544d9e7a408/propcache-0.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d93f3307ad32a27bda2e88ec81134b823c240aa3abb55821a8da553eed8d9439", size = 44049 },
+ { url = "https://files.pythonhosted.org/packages/4e/3e/021b6cd86c0acc90d74784ccbb66808b0bd36067a1bf3e2deb0f3845f618/propcache-0.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba278acf14471d36316159c94a802933d10b6a1e117b8554fe0d0d9b75c9d536", size = 224819 },
+ { url = "https://files.pythonhosted.org/packages/3c/57/c2fdeed1b3b8918b1770a133ba5c43ad3d78e18285b0c06364861ef5cc38/propcache-0.2.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4e6281aedfca15301c41f74d7005e6e3f4ca143584ba696ac69df4f02f40d629", size = 229625 },
+ { url = "https://files.pythonhosted.org/packages/9d/81/70d4ff57bf2877b5780b466471bebf5892f851a7e2ca0ae7ffd728220281/propcache-0.2.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5b750a8e5a1262434fb1517ddf64b5de58327f1adc3524a5e44c2ca43305eb0b", size = 232934 },
+ { url = "https://files.pythonhosted.org/packages/3c/b9/bb51ea95d73b3fb4100cb95adbd4e1acaf2cbb1fd1083f5468eeb4a099a8/propcache-0.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf72af5e0fb40e9babf594308911436c8efde3cb5e75b6f206c34ad18be5c052", size = 227361 },
+ { url = "https://files.pythonhosted.org/packages/f1/20/3c6d696cd6fd70b29445960cc803b1851a1131e7a2e4ee261ee48e002bcd/propcache-0.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2d0a12018b04f4cb820781ec0dffb5f7c7c1d2a5cd22bff7fb055a2cb19ebce", size = 213904 },
+ { url = "https://files.pythonhosted.org/packages/a1/cb/1593bfc5ac6d40c010fa823f128056d6bc25b667f5393781e37d62f12005/propcache-0.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e800776a79a5aabdb17dcc2346a7d66d0777e942e4cd251defeb084762ecd17d", size = 212632 },
+ { url = "https://files.pythonhosted.org/packages/6d/5c/e95617e222be14a34c709442a0ec179f3207f8a2b900273720501a70ec5e/propcache-0.2.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:4160d9283bd382fa6c0c2b5e017acc95bc183570cd70968b9202ad6d8fc48dce", size = 207897 },
+ { url = "https://files.pythonhosted.org/packages/8e/3b/56c5ab3dc00f6375fbcdeefdede5adf9bee94f1fab04adc8db118f0f9e25/propcache-0.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:30b43e74f1359353341a7adb783c8f1b1c676367b011709f466f42fda2045e95", size = 208118 },
+ { url = "https://files.pythonhosted.org/packages/86/25/d7ef738323fbc6ebcbce33eb2a19c5e07a89a3df2fded206065bd5e868a9/propcache-0.2.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:58791550b27d5488b1bb52bc96328456095d96206a250d28d874fafe11b3dfaf", size = 217851 },
+ { url = "https://files.pythonhosted.org/packages/b3/77/763e6cef1852cf1ba740590364ec50309b89d1c818e3256d3929eb92fabf/propcache-0.2.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:0f022d381747f0dfe27e99d928e31bc51a18b65bb9e481ae0af1380a6725dd1f", size = 222630 },
+ { url = "https://files.pythonhosted.org/packages/4f/e9/0f86be33602089c701696fbed8d8c4c07b6ee9605c5b7536fd27ed540c5b/propcache-0.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:297878dc9d0a334358f9b608b56d02e72899f3b8499fc6044133f0d319e2ec30", size = 216269 },
+ { url = "https://files.pythonhosted.org/packages/41/b6/c5319caea262f4821995dca2107483b94a3345d4607ad797c76cb9c36bcc/propcache-0.2.1-py3-none-any.whl", hash = "sha256:52277518d6aae65536e9cea52d4e7fd2f7a66f4aa2d30ed3f2fcea620ace3c54", size = 11818 },
+]
+
+[[package]]
+name = "ptyprocess"
+version = "0.7.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/20/e5/16ff212c1e452235a90aeb09066144d0c5a6a8c0834397e03f5224495c4e/ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220", size = 70762 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/22/a6/858897256d0deac81a172289110f31629fc4cee19b6f01283303e18c8db3/ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35", size = 13993 },
+]
+
+[[package]]
+name = "pure-eval"
+version = "0.2.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/cd/05/0a34433a064256a578f1783a10da6df098ceaa4a57bbeaa96a6c0352786b/pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42", size = 19752 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/8e/37/efad0257dc6e593a18957422533ff0f87ede7c9c6ea010a2177d738fb82f/pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0", size = 11842 },
+]
+
+[[package]]
+name = "puremagic"
+version = "1.28"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/09/2d/40599f25667733e41bbc3d7e4c7c36d5e7860874aa5fe9c584e90b34954d/puremagic-1.28.tar.gz", hash = "sha256:195893fc129657f611b86b959aab337207d6df7f25372209269ed9e303c1a8c0", size = 314945 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c5/53/200a97332d10ed3edd7afcbc5f5543920ac59badfe5762598327999f012e/puremagic-1.28-py3-none-any.whl", hash = "sha256:e16cb9708ee2007142c37931c58f07f7eca956b3472489106a7245e5c3aa1241", size = 43241 },
+]
+
+[[package]]
+name = "pyaes"
+version = "1.6.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/44/66/2c17bae31c906613795711fc78045c285048168919ace2220daa372c7d72/pyaes-1.6.1.tar.gz", hash = "sha256:02c1b1405c38d3c370b085fb952dd8bea3fadcee6411ad99f312cc129c536d8f", size = 28536 }
+
+[[package]]
+name = "pyee"
+version = "12.1.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/0a/37/8fb6e653597b2b67ef552ed49b438d5398ba3b85a9453f8ada0fd77d455c/pyee-12.1.1.tar.gz", hash = "sha256:bbc33c09e2ff827f74191e3e5bbc6be7da02f627b7ec30d86f5ce1a6fb2424a3", size = 30915 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/25/68/7e150cba9eeffdeb3c5cecdb6896d70c8edd46ce41c0491e12fb2b2256ff/pyee-12.1.1-py3-none-any.whl", hash = "sha256:18a19c650556bb6b32b406d7f017c8f513aceed1ef7ca618fb65de7bd2d347ef", size = 15527 },
+]
+
+[[package]]
+name = "pygments"
+version = "2.19.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293 },
+]
+
+[[package]]
+name = "pyrotgfork"
+version = "2.2.4"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pyaes", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "pysocks", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/53/97/158074afc76f5d84d2fd2599e1d209a8dc21583133da7985b8f259f0402e/pyrotgfork-2.2.4.tar.gz", hash = "sha256:5b452ca122e7c79e22c6d7c6808f0b7e2baf9fe17f247e9aaca1ed5bdfa28d6f", size = 471278 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e2/76/794a30de49ee21c075aa6742a863c555013cd97ca129a09dd8a314151adc/pyrotgfork-2.2.4-py3-none-any.whl", hash = "sha256:625105eb31e33fcb09a6165178e016e33f62f2deccccb973f06adc706a2f6fd6", size = 4906960 },
+]
+
+[[package]]
+name = "pysocks"
+version = "1.7.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/bd/11/293dd436aea955d45fc4e8a35b6ae7270f5b8e00b53cf6c024c83b657a11/PySocks-1.7.1.tar.gz", hash = "sha256:3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0", size = 284429 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/8d/59/b4572118e098ac8e46e399a1dd0f2d85403ce8bbaad9ec79373ed6badaf9/PySocks-1.7.1-py3-none-any.whl", hash = "sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5", size = 16725 },
+]
+
+[[package]]
+name = "pytgcrypto"
+version = "1.2.9.2"
+source = { registry = "https://pypi.org/simple" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/dd/06/15ffcf87f7333b713783cf9cb52ee403251db7ac0088f44a0aab22527531/PyTgCrypto-1.2.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ecb9baa53724df644f1425f439145212050b15eb6f600eb64a567c284646c7e0", size = 50247 },
+ { url = "https://files.pythonhosted.org/packages/bd/f5/25d0663bb5aaf88d7614977aeb3ff2581ae4bd0ee797c2277b3f65408f2b/PyTgCrypto-1.2.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d46a21a11805771d6d3b1fc08a72fc6cb86f82084853317b722ae8768c0a2a66", size = 34635 },
+ { url = "https://files.pythonhosted.org/packages/16/9d/8c8af51c441e89e4dd97bf22432e03c6cac2ed5eb6f6f4485063cfd71dc6/PyTgCrypto-1.2.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d45c3f0c1bceee6fef48716fabe40bbde0e2c1ac53b453abfac6abd0d29e41ec", size = 34582 },
+ { url = "https://files.pythonhosted.org/packages/fb/3a/fd7039eb6fc686b891a1d41bbecdf14755d09bf0d4466e44e57e62b5c701/PyTgCrypto-1.2.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2cd40cfeb09c38a9cfab337cec92f5c898e5b4cb4bfcd40e78bcc33e94d31c2c", size = 51541 },
+ { url = "https://files.pythonhosted.org/packages/42/c4/62f388585a4fc5c60da96332ca63ff69dcb1026d93ccd581504cd6071787/PyTgCrypto-1.2.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66b85bf465c3a48ea6c95e044df81ea47264f398b1972c3b07930e77ad8d6a62", size = 51251 },
+ { url = "https://files.pythonhosted.org/packages/f1/69/9268c4860ce950e69682064b9768999e2379db7fc6b0a5515a247b0308c8/PyTgCrypto-1.2.9.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:85e13dc274c37ef9b1e4eedb28bc973aa764f69a1d4807a9abf076bacccdaf73", size = 49551 },
+ { url = "https://files.pythonhosted.org/packages/be/5c/63741c8217bf0a6f891efebf7474b661eb3937eb73a15047fe4e1017751f/PyTgCrypto-1.2.9.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8a2c78be8c552d25c82043cec485ae0d2782edd77fa3c6012bfda527512d8a1f", size = 51198 },
+ { url = "https://files.pythonhosted.org/packages/d8/36/50e1d735a22713f3a97efc6db283ebf8ae849c8487fdb32d3d8edf7ae17b/PyTgCrypto-1.2.9.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a52173ef90209667a91b77ad5c422258b9b3a110703a3f1f900934d0f5571570", size = 50252 },
+ { url = "https://files.pythonhosted.org/packages/3b/d7/6baa4ab884a4d9fd58c7630016a849ceed2f2e3d2cb8f4cb699d47bcf85d/PyTgCrypto-1.2.9.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ceed07fc67c9b04fd84435227243d4037224da9450e06f58cd91c7b588072804", size = 34644 },
+ { url = "https://files.pythonhosted.org/packages/4f/b5/8c91f186618260e9fc867255ae6fb68791456a266af8f187b84793d053b1/PyTgCrypto-1.2.9.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5380008cd8d8c4cb686ba49b97df0c198b64ff0f4ba233d5cbe3774f5fd3b40b", size = 34580 },
+ { url = "https://files.pythonhosted.org/packages/8e/33/c2b007e2284e24812cd19135e875e989231140c1bd1dd2be66255438e7cb/PyTgCrypto-1.2.9.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d277e1cf7ee0bd1e1abbf4c8f1afd48944fbf78d744a413388c2e4320f35824", size = 51159 },
+ { url = "https://files.pythonhosted.org/packages/b3/21/59a9dcc108e85aeed5e703f33b6a1ef114fa4f68983ddcf8517c2190a0ca/PyTgCrypto-1.2.9.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bf10c7ca656ceee9053cd22fcfdce08cfa74be13536c204bee77397b72dede2", size = 50896 },
+ { url = "https://files.pythonhosted.org/packages/c8/87/7f1cba6f9ac2779afde7bddb8480f1722a2026cb72801c07c098ea9d51ae/PyTgCrypto-1.2.9.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:82e700c769502fb6c3dc431f70b695f766cc0d3fc648b9eeabd5e1c77fd4505e", size = 49155 },
+ { url = "https://files.pythonhosted.org/packages/37/07/5b8017d4b758027b23804b43292068cfc7ddd83af6d57e3ab81c4244ba46/PyTgCrypto-1.2.9.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b2a3a0859df764274955f728ce69224fe7e0fa758061eaf8db4a33e31b0ba6a0", size = 50852 },
+ { url = "https://files.pythonhosted.org/packages/08/f6/0592f61ce097bf983eb9a9bdc90e68b6b24153e9c6efeafd2f4e9bd98cb8/PyTgCrypto-1.2.9.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ad1fda2e119d8eb8df714949838b7bbb7088e14c8578c64e02145730f492bd9a", size = 50259 },
+ { url = "https://files.pythonhosted.org/packages/4d/62/9b2b210b1101680d7cb477176c5b99eb912dea817438f4f72f6d862cc187/PyTgCrypto-1.2.9.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2652733ca67f45bd7fb9d7030b999868f3dba0945129b028bd4a18e75ed8355f", size = 34651 },
+ { url = "https://files.pythonhosted.org/packages/7f/4c/e36958dee327fbd16f40db588aeb87d2e8f34bf7368fdfd63d2a12daf50b/PyTgCrypto-1.2.9.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bd42e1664cd2f4daf7a4162a2b320a2bcb7eee56244fc6779e022a862c7407dd", size = 34579 },
+ { url = "https://files.pythonhosted.org/packages/05/62/79de1aa96794bf3b360a89bc465bf19390d5a802d38399cb48d19edb8e31/PyTgCrypto-1.2.9.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c73167eb24cee2dbc3d746f7f327d212d73561fdcf034b70c41beedebdbb7311", size = 51069 },
+ { url = "https://files.pythonhosted.org/packages/38/5b/c78aea27cc0d0dd040b3f36b800d88eee7b6c9a399d984c7b23eff912a66/PyTgCrypto-1.2.9.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:05a5cd4cbf239e4336f1471460cb03a70d3e9888af5c7f80c77b35e5dbce3113", size = 50828 },
+ { url = "https://files.pythonhosted.org/packages/72/ef/0559df6fdb5350b1ab94b43817659a1d02fcd2a92012be985b79346d279c/PyTgCrypto-1.2.9.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a82cda67b463514843aa81658b9d299ab0a45c2c975d89c1151e84f0919fbefc", size = 49209 },
+ { url = "https://files.pythonhosted.org/packages/1d/41/f4f69b6779c13e93cfee3b7dadb6bf43dd2dede780d003d2802141013b25/PyTgCrypto-1.2.9.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:655e6ef9a249c31198f66b4cda472edaf026edcc2f4db1d71a2ed0a78828d193", size = 50913 },
+]
+
+[[package]]
+name = "python-dateutil"
+version = "2.9.0.post0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "six", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 },
+]
+
+[[package]]
+name = "python-ffmpeg"
+version = "2.0.12"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pyee", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/dd/4d/7ecffb341d646e016be76e36f5a42cb32f409c9ca21a57b68f067fad3fc7/python_ffmpeg-2.0.12.tar.gz", hash = "sha256:19ac80af5a064a2f53c245af1a909b2d7648ea045500d96d3bcd507b88d43dc7", size = 14126292 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7f/6d/02e817aec661defe148cb9eb0c4eca2444846305f625c2243fb9f92a9045/python_ffmpeg-2.0.12-py3-none-any.whl", hash = "sha256:d86697da8dfb39335183e336d31baf42fb217468adf5ac97fd743898240faae3", size = 14411 },
+]
+
+[[package]]
+name = "requests"
+version = "2.32.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "certifi", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "charset-normalizer", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "idna", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "urllib3", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 },
+]
+
+[[package]]
+name = "s3transfer"
+version = "0.10.4"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "botocore", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/c0/0a/1cdbabf9edd0ea7747efdf6c9ab4e7061b085aa7f9bfc36bb1601563b069/s3transfer-0.10.4.tar.gz", hash = "sha256:29edc09801743c21eb5ecbc617a152df41d3c287f67b615f73e5f750583666a7", size = 145287 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/66/05/7957af15543b8c9799209506df4660cba7afc4cf94bfb60513827e96bed6/s3transfer-0.10.4-py3-none-any.whl", hash = "sha256:244a76a24355363a68164241438de1b72f8781664920260c48465896b712a41e", size = 83175 },
+]
+
+[[package]]
+name = "sgmllib3k"
+version = "1.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/9e/bd/3704a8c3e0942d711c1299ebf7b9091930adae6675d7c8f476a7ce48653c/sgmllib3k-1.0.0.tar.gz", hash = "sha256:7868fb1c8bfa764c1ac563d3cf369c381d1325d36124933a726f29fcdaa812e9", size = 5750 }
+
+[[package]]
+name = "six"
+version = "1.17.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050 },
+]
+
+[[package]]
+name = "sniffio"
+version = "1.3.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 },
+]
+
+[[package]]
+name = "socksio"
+version = "1.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f8/5c/48a7d9495be3d1c651198fd99dbb6ce190e2274d0f28b9051307bdec6b85/socksio-1.0.0.tar.gz", hash = "sha256:f88beb3da5b5c38b9890469de67d0cb0f9d494b78b106ca1845f96c10b91c4ac", size = 19055 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/37/c3/6eeb6034408dac0fa653d126c9204ade96b819c936e136c5e8a6897eee9c/socksio-1.0.0-py3-none-any.whl", hash = "sha256:95dc1f15f9b34e8d7b16f06d74b8ccf48f609af32ab33c608d08761c5dcbb1f3", size = 12763 },
+]
+
+[[package]]
+name = "soupsieve"
+version = "2.6"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d7/ce/fbaeed4f9fb8b2daa961f90591662df6a86c1abf25c548329a86920aedfb/soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb", size = 101569 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d1/c2/fe97d779f3ef3b15f05c94a2f1e3d21732574ed441687474db9d342a7315/soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9", size = 36186 },
+]
+
+[[package]]
+name = "stack-data"
+version = "0.6.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "asttokens", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "executing", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "pure-eval", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/28/e3/55dcc2cfbc3ca9c29519eb6884dd1415ecb53b0e934862d3559ddcb7e20b/stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9", size = 44707 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f1/7b/ce1eafaf1a76852e2ec9b22edecf1daa58175c090266e9f6c64afcd81d91/stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695", size = 24521 },
+]
+
+[[package]]
+name = "traitlets"
+version = "5.14.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/eb/79/72064e6a701c2183016abbbfedaba506d81e30e232a68c9f0d6f6fcd1574/traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7", size = 161621 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/00/c0/8f5d070730d7836adc9c9b6408dec68c6ced86b304a9b26a14df072a6e8c/traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f", size = 85359 },
+]
+
+[[package]]
+name = "typing-extensions"
+version = "4.12.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438 },
+]
+
+[[package]]
+name = "tzlocal"
+version = "5.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/04/d3/c19d65ae67636fe63953b20c2e4a8ced4497ea232c43ff8d01db16de8dc0/tzlocal-5.2.tar.gz", hash = "sha256:8d399205578f1a9342816409cc1e46a93ebd5755e39ea2d85334bea911bf0e6e", size = 30201 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/97/3f/c4c51c55ff8487f2e6d0e618dba917e3c3ee2caae6cf0fbb59c9b1876f2e/tzlocal-5.2-py3-none-any.whl", hash = "sha256:49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8", size = 17859 },
+]
+
+[[package]]
+name = "urllib3"
+version = "2.3.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/aa/63/e53da845320b757bf29ef6a9062f5c669fe997973f966045cb019c3f4b66/urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d", size = 307268 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c8/19/4ec628951a74043532ca2cf5d97b7b14863931476d117c471e8e2b1eb39f/urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df", size = 128369 },
+]
+
+[[package]]
+name = "uvloop"
+version = "0.21.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/af/c0/854216d09d33c543f12a44b393c402e89a920b1a0a7dc634c42de91b9cf6/uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3", size = 2492741 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/57/a7/4cf0334105c1160dd6819f3297f8700fda7fc30ab4f61fbf3e725acbc7cc/uvloop-0.21.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c0f3fa6200b3108919f8bdabb9a7f87f20e7097ea3c543754cabc7d717d95cf8", size = 1447410 },
+ { url = "https://files.pythonhosted.org/packages/8c/7c/1517b0bbc2dbe784b563d6ab54f2ef88c890fdad77232c98ed490aa07132/uvloop-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0878c2640cf341b269b7e128b1a5fed890adc4455513ca710d77d5e93aa6d6a0", size = 805476 },
+ { url = "https://files.pythonhosted.org/packages/ee/ea/0bfae1aceb82a503f358d8d2fa126ca9dbdb2ba9c7866974faec1cb5875c/uvloop-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9fb766bb57b7388745d8bcc53a359b116b8a04c83a2288069809d2b3466c37e", size = 3960855 },
+ { url = "https://files.pythonhosted.org/packages/8a/ca/0864176a649838b838f36d44bf31c451597ab363b60dc9e09c9630619d41/uvloop-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a375441696e2eda1c43c44ccb66e04d61ceeffcd76e4929e527b7fa401b90fb", size = 3973185 },
+ { url = "https://files.pythonhosted.org/packages/30/bf/08ad29979a936d63787ba47a540de2132169f140d54aa25bc8c3df3e67f4/uvloop-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:baa0e6291d91649c6ba4ed4b2f982f9fa165b5bbd50a9e203c416a2797bab3c6", size = 3820256 },
+ { url = "https://files.pythonhosted.org/packages/da/e2/5cf6ef37e3daf2f06e651aae5ea108ad30df3cb269102678b61ebf1fdf42/uvloop-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4509360fcc4c3bd2c70d87573ad472de40c13387f5fda8cb58350a1d7475e58d", size = 3937323 },
+ { url = "https://files.pythonhosted.org/packages/8c/4c/03f93178830dc7ce8b4cdee1d36770d2f5ebb6f3d37d354e061eefc73545/uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c", size = 1471284 },
+ { url = "https://files.pythonhosted.org/packages/43/3e/92c03f4d05e50f09251bd8b2b2b584a2a7f8fe600008bcc4523337abe676/uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2", size = 821349 },
+ { url = "https://files.pythonhosted.org/packages/a6/ef/a02ec5da49909dbbfb1fd205a9a1ac4e88ea92dcae885e7c961847cd51e2/uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d", size = 4580089 },
+ { url = "https://files.pythonhosted.org/packages/06/a7/b4e6a19925c900be9f98bec0a75e6e8f79bb53bdeb891916609ab3958967/uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc", size = 4693770 },
+ { url = "https://files.pythonhosted.org/packages/ce/0c/f07435a18a4b94ce6bd0677d8319cd3de61f3a9eeb1e5f8ab4e8b5edfcb3/uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb", size = 4451321 },
+ { url = "https://files.pythonhosted.org/packages/8f/eb/f7032be105877bcf924709c97b1bf3b90255b4ec251f9340cef912559f28/uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f", size = 4659022 },
+ { url = "https://files.pythonhosted.org/packages/3f/8d/2cbef610ca21539f0f36e2b34da49302029e7c9f09acef0b1c3b5839412b/uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281", size = 1468123 },
+ { url = "https://files.pythonhosted.org/packages/93/0d/b0038d5a469f94ed8f2b2fce2434a18396d8fbfb5da85a0a9781ebbdec14/uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af", size = 819325 },
+ { url = "https://files.pythonhosted.org/packages/50/94/0a687f39e78c4c1e02e3272c6b2ccdb4e0085fda3b8352fecd0410ccf915/uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6", size = 4582806 },
+ { url = "https://files.pythonhosted.org/packages/d2/19/f5b78616566ea68edd42aacaf645adbf71fbd83fc52281fba555dc27e3f1/uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816", size = 4701068 },
+ { url = "https://files.pythonhosted.org/packages/47/57/66f061ee118f413cd22a656de622925097170b9380b30091b78ea0c6ea75/uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc", size = 4454428 },
+ { url = "https://files.pythonhosted.org/packages/63/9a/0962b05b308494e3202d3f794a6e85abe471fe3cafdbcf95c2e8c713aabd/uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553", size = 4660018 },
+]
+
+[[package]]
+name = "wcwidth"
+version = "0.2.13"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5", size = 101301 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166 },
+]
+
+[[package]]
+name = "wrapt"
+version = "1.17.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/c3/fc/e91cc220803d7bc4db93fb02facd8461c37364151b8494762cc88b0fbcef/wrapt-1.17.2.tar.gz", hash = "sha256:41388e9d4d1522446fe79d3213196bd9e3b301a336965b9e27ca2788ebd122f3", size = 55531 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/cd/f7/a2aab2cbc7a665efab072344a8949a71081eed1d2f451f7f7d2b966594a2/wrapt-1.17.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ff04ef6eec3eee8a5efef2401495967a916feaa353643defcc03fc74fe213b58", size = 53308 },
+ { url = "https://files.pythonhosted.org/packages/50/ff/149aba8365fdacef52b31a258c4dc1c57c79759c335eff0b3316a2664a64/wrapt-1.17.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4db983e7bca53819efdbd64590ee96c9213894272c776966ca6306b73e4affda", size = 38488 },
+ { url = "https://files.pythonhosted.org/packages/65/46/5a917ce85b5c3b490d35c02bf71aedaa9f2f63f2d15d9949cc4ba56e8ba9/wrapt-1.17.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9abc77a4ce4c6f2a3168ff34b1da9b0f311a8f1cfd694ec96b0603dff1c79438", size = 38776 },
+ { url = "https://files.pythonhosted.org/packages/ca/74/336c918d2915a4943501c77566db41d1bd6e9f4dbc317f356b9a244dfe83/wrapt-1.17.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b929ac182f5ace000d459c59c2c9c33047e20e935f8e39371fa6e3b85d56f4a", size = 83776 },
+ { url = "https://files.pythonhosted.org/packages/09/99/c0c844a5ccde0fe5761d4305485297f91d67cf2a1a824c5f282e661ec7ff/wrapt-1.17.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f09b286faeff3c750a879d336fb6d8713206fc97af3adc14def0cdd349df6000", size = 75420 },
+ { url = "https://files.pythonhosted.org/packages/b4/b0/9fc566b0fe08b282c850063591a756057c3247b2362b9286429ec5bf1721/wrapt-1.17.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7ed2d9d039bd41e889f6fb9364554052ca21ce823580f6a07c4ec245c1f5d6", size = 83199 },
+ { url = "https://files.pythonhosted.org/packages/9d/4b/71996e62d543b0a0bd95dda485219856def3347e3e9380cc0d6cf10cfb2f/wrapt-1.17.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:129a150f5c445165ff941fc02ee27df65940fcb8a22a61828b1853c98763a64b", size = 82307 },
+ { url = "https://files.pythonhosted.org/packages/39/35/0282c0d8789c0dc9bcc738911776c762a701f95cfe113fb8f0b40e45c2b9/wrapt-1.17.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1fb5699e4464afe5c7e65fa51d4f99e0b2eadcc176e4aa33600a3df7801d6662", size = 75025 },
+ { url = "https://files.pythonhosted.org/packages/4f/6d/90c9fd2c3c6fee181feecb620d95105370198b6b98a0770cba090441a828/wrapt-1.17.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9a2bce789a5ea90e51a02dfcc39e31b7f1e662bc3317979aa7e5538e3a034f72", size = 81879 },
+ { url = "https://files.pythonhosted.org/packages/a1/bd/ab55f849fd1f9a58ed7ea47f5559ff09741b25f00c191231f9f059c83949/wrapt-1.17.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d5e2439eecc762cd85e7bd37161d4714aa03a33c5ba884e26c81559817ca0925", size = 53799 },
+ { url = "https://files.pythonhosted.org/packages/53/18/75ddc64c3f63988f5a1d7e10fb204ffe5762bc663f8023f18ecaf31a332e/wrapt-1.17.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fc7cb4c1c744f8c05cd5f9438a3caa6ab94ce8344e952d7c45a8ed59dd88392", size = 38821 },
+ { url = "https://files.pythonhosted.org/packages/48/2a/97928387d6ed1c1ebbfd4efc4133a0633546bec8481a2dd5ec961313a1c7/wrapt-1.17.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8fdbdb757d5390f7c675e558fd3186d590973244fab0c5fe63d373ade3e99d40", size = 38919 },
+ { url = "https://files.pythonhosted.org/packages/73/54/3bfe5a1febbbccb7a2f77de47b989c0b85ed3a6a41614b104204a788c20e/wrapt-1.17.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bb1d0dbf99411f3d871deb6faa9aabb9d4e744d67dcaaa05399af89d847a91d", size = 88721 },
+ { url = "https://files.pythonhosted.org/packages/25/cb/7262bc1b0300b4b64af50c2720ef958c2c1917525238d661c3e9a2b71b7b/wrapt-1.17.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d18a4865f46b8579d44e4fe1e2bcbc6472ad83d98e22a26c963d46e4c125ef0b", size = 80899 },
+ { url = "https://files.pythonhosted.org/packages/2a/5a/04cde32b07a7431d4ed0553a76fdb7a61270e78c5fd5a603e190ac389f14/wrapt-1.17.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc570b5f14a79734437cb7b0500376b6b791153314986074486e0b0fa8d71d98", size = 89222 },
+ { url = "https://files.pythonhosted.org/packages/09/28/2e45a4f4771fcfb109e244d5dbe54259e970362a311b67a965555ba65026/wrapt-1.17.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6d9187b01bebc3875bac9b087948a2bccefe464a7d8f627cf6e48b1bbae30f82", size = 86707 },
+ { url = "https://files.pythonhosted.org/packages/c6/d2/dcb56bf5f32fcd4bd9aacc77b50a539abdd5b6536872413fd3f428b21bed/wrapt-1.17.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9e8659775f1adf02eb1e6f109751268e493c73716ca5761f8acb695e52a756ae", size = 79685 },
+ { url = "https://files.pythonhosted.org/packages/80/4e/eb8b353e36711347893f502ce91c770b0b0929f8f0bed2670a6856e667a9/wrapt-1.17.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8b2816ebef96d83657b56306152a93909a83f23994f4b30ad4573b00bd11bb9", size = 87567 },
+ { url = "https://files.pythonhosted.org/packages/ce/b9/0ffd557a92f3b11d4c5d5e0c5e4ad057bd9eb8586615cdaf901409920b14/wrapt-1.17.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6ed6ffac43aecfe6d86ec5b74b06a5be33d5bb9243d055141e8cabb12aa08125", size = 53800 },
+ { url = "https://files.pythonhosted.org/packages/c0/ef/8be90a0b7e73c32e550c73cfb2fa09db62234227ece47b0e80a05073b375/wrapt-1.17.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:35621ae4c00e056adb0009f8e86e28eb4a41a4bfa8f9bfa9fca7d343fe94f998", size = 38824 },
+ { url = "https://files.pythonhosted.org/packages/36/89/0aae34c10fe524cce30fe5fc433210376bce94cf74d05b0d68344c8ba46e/wrapt-1.17.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a604bf7a053f8362d27eb9fefd2097f82600b856d5abe996d623babd067b1ab5", size = 38920 },
+ { url = "https://files.pythonhosted.org/packages/3b/24/11c4510de906d77e0cfb5197f1b1445d4fec42c9a39ea853d482698ac681/wrapt-1.17.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cbabee4f083b6b4cd282f5b817a867cf0b1028c54d445b7ec7cfe6505057cf8", size = 88690 },
+ { url = "https://files.pythonhosted.org/packages/71/d7/cfcf842291267bf455b3e266c0c29dcb675b5540ee8b50ba1699abf3af45/wrapt-1.17.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49703ce2ddc220df165bd2962f8e03b84c89fee2d65e1c24a7defff6f988f4d6", size = 80861 },
+ { url = "https://files.pythonhosted.org/packages/d5/66/5d973e9f3e7370fd686fb47a9af3319418ed925c27d72ce16b791231576d/wrapt-1.17.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8112e52c5822fc4253f3901b676c55ddf288614dc7011634e2719718eaa187dc", size = 89174 },
+ { url = "https://files.pythonhosted.org/packages/a7/d3/8e17bb70f6ae25dabc1aaf990f86824e4fd98ee9cadf197054e068500d27/wrapt-1.17.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9fee687dce376205d9a494e9c121e27183b2a3df18037f89d69bd7b35bcf59e2", size = 86721 },
+ { url = "https://files.pythonhosted.org/packages/6f/54/f170dfb278fe1c30d0ff864513cff526d624ab8de3254b20abb9cffedc24/wrapt-1.17.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:18983c537e04d11cf027fbb60a1e8dfd5190e2b60cc27bc0808e653e7b218d1b", size = 79763 },
+ { url = "https://files.pythonhosted.org/packages/4a/98/de07243751f1c4a9b15c76019250210dd3486ce098c3d80d5f729cba029c/wrapt-1.17.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:703919b1633412ab54bcf920ab388735832fdcb9f9a00ae49387f0fe67dad504", size = 87585 },
+ { url = "https://files.pythonhosted.org/packages/3d/bc/30f903f891a82d402ffb5fda27ec1d621cc97cb74c16fea0b6141f1d4e87/wrapt-1.17.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:4a721d3c943dae44f8e243b380cb645a709ba5bd35d3ad27bc2ed947e9c68192", size = 56312 },
+ { url = "https://files.pythonhosted.org/packages/8a/04/c97273eb491b5f1c918857cd26f314b74fc9b29224521f5b83f872253725/wrapt-1.17.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:766d8bbefcb9e00c3ac3b000d9acc51f1b399513f44d77dfe0eb026ad7c9a19b", size = 40062 },
+ { url = "https://files.pythonhosted.org/packages/4e/ca/3b7afa1eae3a9e7fefe499db9b96813f41828b9fdb016ee836c4c379dadb/wrapt-1.17.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e496a8ce2c256da1eb98bd15803a79bee00fc351f5dfb9ea82594a3f058309e0", size = 40155 },
+ { url = "https://files.pythonhosted.org/packages/89/be/7c1baed43290775cb9030c774bc53c860db140397047cc49aedaf0a15477/wrapt-1.17.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d615e4fe22f4ad3528448c193b218e077656ca9ccb22ce2cb20db730f8d306", size = 113471 },
+ { url = "https://files.pythonhosted.org/packages/32/98/4ed894cf012b6d6aae5f5cc974006bdeb92f0241775addad3f8cd6ab71c8/wrapt-1.17.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a5aaeff38654462bc4b09023918b7f21790efb807f54c000a39d41d69cf552cb", size = 101208 },
+ { url = "https://files.pythonhosted.org/packages/ea/fd/0c30f2301ca94e655e5e057012e83284ce8c545df7661a78d8bfca2fac7a/wrapt-1.17.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a7d15bbd2bc99e92e39f49a04653062ee6085c0e18b3b7512a4f2fe91f2d681", size = 109339 },
+ { url = "https://files.pythonhosted.org/packages/75/56/05d000de894c4cfcb84bcd6b1df6214297b8089a7bd324c21a4765e49b14/wrapt-1.17.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e3890b508a23299083e065f435a492b5435eba6e304a7114d2f919d400888cc6", size = 110232 },
+ { url = "https://files.pythonhosted.org/packages/53/f8/c3f6b2cf9b9277fb0813418e1503e68414cd036b3b099c823379c9575e6d/wrapt-1.17.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8c8b293cd65ad716d13d8dd3624e42e5a19cc2a2f1acc74b30c2c13f15cb61a6", size = 100476 },
+ { url = "https://files.pythonhosted.org/packages/a7/b1/0bb11e29aa5139d90b770ebbfa167267b1fc548d2302c30c8f7572851738/wrapt-1.17.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4c82b8785d98cdd9fed4cac84d765d234ed3251bd6afe34cb7ac523cb93e8b4f", size = 106377 },
+ { url = "https://files.pythonhosted.org/packages/2d/82/f56956041adef78f849db6b289b282e72b55ab8045a75abad81898c28d19/wrapt-1.17.2-py3-none-any.whl", hash = "sha256:b18f2d1533a71f069c7f82d524a52599053d4c7166e9dd374ae2136b7f40f7c8", size = 23594 },
+]
+
+[[package]]
+name = "yarl"
+version = "1.18.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "idna", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "multidict", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "propcache", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/b7/9d/4b94a8e6d2b51b599516a5cb88e5bc99b4d8d4583e468057eaa29d5f0918/yarl-1.18.3.tar.gz", hash = "sha256:ac1801c45cbf77b6c99242eeff4fffb5e4e73a800b5c4ad4fc0be5def634d2e1", size = 181062 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/40/93/282b5f4898d8e8efaf0790ba6d10e2245d2c9f30e199d1a85cae9356098c/yarl-1.18.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8503ad47387b8ebd39cbbbdf0bf113e17330ffd339ba1144074da24c545f0069", size = 141555 },
+ { url = "https://files.pythonhosted.org/packages/6d/9c/0a49af78df099c283ca3444560f10718fadb8a18dc8b3edf8c7bd9fd7d89/yarl-1.18.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:02ddb6756f8f4517a2d5e99d8b2f272488e18dd0bfbc802f31c16c6c20f22193", size = 94351 },
+ { url = "https://files.pythonhosted.org/packages/5a/a1/205ab51e148fdcedad189ca8dd587794c6f119882437d04c33c01a75dece/yarl-1.18.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:67a283dd2882ac98cc6318384f565bffc751ab564605959df4752d42483ad889", size = 92286 },
+ { url = "https://files.pythonhosted.org/packages/ed/fe/88b690b30f3f59275fb674f5f93ddd4a3ae796c2b62e5bb9ece8a4914b83/yarl-1.18.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d980e0325b6eddc81331d3f4551e2a333999fb176fd153e075c6d1c2530aa8a8", size = 340649 },
+ { url = "https://files.pythonhosted.org/packages/07/eb/3b65499b568e01f36e847cebdc8d7ccb51fff716dbda1ae83c3cbb8ca1c9/yarl-1.18.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b643562c12680b01e17239be267bc306bbc6aac1f34f6444d1bded0c5ce438ca", size = 356623 },
+ { url = "https://files.pythonhosted.org/packages/33/46/f559dc184280b745fc76ec6b1954de2c55595f0ec0a7614238b9ebf69618/yarl-1.18.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c017a3b6df3a1bd45b9fa49a0f54005e53fbcad16633870104b66fa1a30a29d8", size = 354007 },
+ { url = "https://files.pythonhosted.org/packages/af/ba/1865d85212351ad160f19fb99808acf23aab9a0f8ff31c8c9f1b4d671fc9/yarl-1.18.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75674776d96d7b851b6498f17824ba17849d790a44d282929c42dbb77d4f17ae", size = 344145 },
+ { url = "https://files.pythonhosted.org/packages/94/cb/5c3e975d77755d7b3d5193e92056b19d83752ea2da7ab394e22260a7b824/yarl-1.18.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ccaa3a4b521b780a7e771cc336a2dba389a0861592bbce09a476190bb0c8b4b3", size = 336133 },
+ { url = "https://files.pythonhosted.org/packages/19/89/b77d3fd249ab52a5c40859815765d35c91425b6bb82e7427ab2f78f5ff55/yarl-1.18.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2d06d3005e668744e11ed80812e61efd77d70bb7f03e33c1598c301eea20efbb", size = 347967 },
+ { url = "https://files.pythonhosted.org/packages/35/bd/f6b7630ba2cc06c319c3235634c582a6ab014d52311e7d7c22f9518189b5/yarl-1.18.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:9d41beda9dc97ca9ab0b9888cb71f7539124bc05df02c0cff6e5acc5a19dcc6e", size = 346397 },
+ { url = "https://files.pythonhosted.org/packages/18/1a/0b4e367d5a72d1f095318344848e93ea70da728118221f84f1bf6c1e39e7/yarl-1.18.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ba23302c0c61a9999784e73809427c9dbedd79f66a13d84ad1b1943802eaaf59", size = 350206 },
+ { url = "https://files.pythonhosted.org/packages/b5/cf/320fff4367341fb77809a2d8d7fe75b5d323a8e1b35710aafe41fdbf327b/yarl-1.18.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6748dbf9bfa5ba1afcc7556b71cda0d7ce5f24768043a02a58846e4a443d808d", size = 362089 },
+ { url = "https://files.pythonhosted.org/packages/57/cf/aadba261d8b920253204085268bad5e8cdd86b50162fcb1b10c10834885a/yarl-1.18.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0b0cad37311123211dc91eadcb322ef4d4a66008d3e1bdc404808992260e1a0e", size = 366267 },
+ { url = "https://files.pythonhosted.org/packages/54/58/fb4cadd81acdee6dafe14abeb258f876e4dd410518099ae9a35c88d8097c/yarl-1.18.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0fb2171a4486bb075316ee754c6d8382ea6eb8b399d4ec62fde2b591f879778a", size = 359141 },
+ { url = "https://files.pythonhosted.org/packages/33/85/bd2e2729752ff4c77338e0102914897512e92496375e079ce0150a6dc306/yarl-1.18.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1dd4bdd05407ced96fed3d7f25dbbf88d2ffb045a0db60dbc247f5b3c5c25d50", size = 142644 },
+ { url = "https://files.pythonhosted.org/packages/ff/74/1178322cc0f10288d7eefa6e4a85d8d2e28187ccab13d5b844e8b5d7c88d/yarl-1.18.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7c33dd1931a95e5d9a772d0ac5e44cac8957eaf58e3c8da8c1414de7dd27c576", size = 94962 },
+ { url = "https://files.pythonhosted.org/packages/be/75/79c6acc0261e2c2ae8a1c41cf12265e91628c8c58ae91f5ff59e29c0787f/yarl-1.18.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25b411eddcfd56a2f0cd6a384e9f4f7aa3efee14b188de13048c25b5e91f1640", size = 92795 },
+ { url = "https://files.pythonhosted.org/packages/6b/32/927b2d67a412c31199e83fefdce6e645247b4fb164aa1ecb35a0f9eb2058/yarl-1.18.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:436c4fc0a4d66b2badc6c5fc5ef4e47bb10e4fd9bf0c79524ac719a01f3607c2", size = 332368 },
+ { url = "https://files.pythonhosted.org/packages/19/e5/859fca07169d6eceeaa4fde1997c91d8abde4e9a7c018e371640c2da2b71/yarl-1.18.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e35ef8683211db69ffe129a25d5634319a677570ab6b2eba4afa860f54eeaf75", size = 342314 },
+ { url = "https://files.pythonhosted.org/packages/08/75/76b63ccd91c9e03ab213ef27ae6add2e3400e77e5cdddf8ed2dbc36e3f21/yarl-1.18.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84b2deecba4a3f1a398df819151eb72d29bfeb3b69abb145a00ddc8d30094512", size = 341987 },
+ { url = "https://files.pythonhosted.org/packages/1a/e1/a097d5755d3ea8479a42856f51d97eeff7a3a7160593332d98f2709b3580/yarl-1.18.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e5a1fea0fd4f5bfa7440a47eff01d9822a65b4488f7cff83155a0f31a2ecba", size = 336914 },
+ { url = "https://files.pythonhosted.org/packages/0b/42/e1b4d0e396b7987feceebe565286c27bc085bf07d61a59508cdaf2d45e63/yarl-1.18.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0e883008013c0e4aef84dcfe2a0b172c4d23c2669412cf5b3371003941f72bb", size = 325765 },
+ { url = "https://files.pythonhosted.org/packages/7e/18/03a5834ccc9177f97ca1bbb245b93c13e58e8225276f01eedc4cc98ab820/yarl-1.18.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5a3f356548e34a70b0172d8890006c37be92995f62d95a07b4a42e90fba54272", size = 344444 },
+ { url = "https://files.pythonhosted.org/packages/c8/03/a713633bdde0640b0472aa197b5b86e90fbc4c5bc05b727b714cd8a40e6d/yarl-1.18.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ccd17349166b1bee6e529b4add61727d3f55edb7babbe4069b5764c9587a8cc6", size = 340760 },
+ { url = "https://files.pythonhosted.org/packages/eb/99/f6567e3f3bbad8fd101886ea0276c68ecb86a2b58be0f64077396cd4b95e/yarl-1.18.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b958ddd075ddba5b09bb0be8a6d9906d2ce933aee81100db289badbeb966f54e", size = 346484 },
+ { url = "https://files.pythonhosted.org/packages/8e/a9/84717c896b2fc6cb15bd4eecd64e34a2f0a9fd6669e69170c73a8b46795a/yarl-1.18.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c7d79f7d9aabd6011004e33b22bc13056a3e3fb54794d138af57f5ee9d9032cb", size = 359864 },
+ { url = "https://files.pythonhosted.org/packages/1e/2e/d0f5f1bef7ee93ed17e739ec8dbcb47794af891f7d165fa6014517b48169/yarl-1.18.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4891ed92157e5430874dad17b15eb1fda57627710756c27422200c52d8a4e393", size = 364537 },
+ { url = "https://files.pythonhosted.org/packages/97/8a/568d07c5d4964da5b02621a517532adb8ec5ba181ad1687191fffeda0ab6/yarl-1.18.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ce1af883b94304f493698b00d0f006d56aea98aeb49d75ec7d98cd4a777e9285", size = 357861 },
+ { url = "https://files.pythonhosted.org/packages/30/c7/c790513d5328a8390be8f47be5d52e141f78b66c6c48f48d241ca6bd5265/yarl-1.18.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:90adb47ad432332d4f0bc28f83a5963f426ce9a1a8809f5e584e704b82685dcb", size = 140789 },
+ { url = "https://files.pythonhosted.org/packages/30/aa/a2f84e93554a578463e2edaaf2300faa61c8701f0898725842c704ba5444/yarl-1.18.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:913829534200eb0f789d45349e55203a091f45c37a2674678744ae52fae23efa", size = 94144 },
+ { url = "https://files.pythonhosted.org/packages/c6/fc/d68d8f83714b221a85ce7866832cba36d7c04a68fa6a960b908c2c84f325/yarl-1.18.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ef9f7768395923c3039055c14334ba4d926f3baf7b776c923c93d80195624782", size = 91974 },
+ { url = "https://files.pythonhosted.org/packages/56/4e/d2563d8323a7e9a414b5b25341b3942af5902a2263d36d20fb17c40411e2/yarl-1.18.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88a19f62ff30117e706ebc9090b8ecc79aeb77d0b1f5ec10d2d27a12bc9f66d0", size = 333587 },
+ { url = "https://files.pythonhosted.org/packages/25/c9/cfec0bc0cac8d054be223e9f2c7909d3e8442a856af9dbce7e3442a8ec8d/yarl-1.18.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e17c9361d46a4d5addf777c6dd5eab0715a7684c2f11b88c67ac37edfba6c482", size = 344386 },
+ { url = "https://files.pythonhosted.org/packages/ab/5d/4c532190113b25f1364d25f4c319322e86232d69175b91f27e3ebc2caf9a/yarl-1.18.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a74a13a4c857a84a845505fd2d68e54826a2cd01935a96efb1e9d86c728e186", size = 345421 },
+ { url = "https://files.pythonhosted.org/packages/23/d1/6cdd1632da013aa6ba18cee4d750d953104a5e7aac44e249d9410a972bf5/yarl-1.18.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41f7ce59d6ee7741af71d82020346af364949314ed3d87553763a2df1829cc58", size = 339384 },
+ { url = "https://files.pythonhosted.org/packages/9a/c4/6b3c39bec352e441bd30f432cda6ba51681ab19bb8abe023f0d19777aad1/yarl-1.18.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f52a265001d830bc425f82ca9eabda94a64a4d753b07d623a9f2863fde532b53", size = 326689 },
+ { url = "https://files.pythonhosted.org/packages/23/30/07fb088f2eefdc0aa4fc1af4e3ca4eb1a3aadd1ce7d866d74c0f124e6a85/yarl-1.18.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:82123d0c954dc58db301f5021a01854a85bf1f3bb7d12ae0c01afc414a882ca2", size = 345453 },
+ { url = "https://files.pythonhosted.org/packages/63/09/d54befb48f9cd8eec43797f624ec37783a0266855f4930a91e3d5c7717f8/yarl-1.18.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:2ec9bbba33b2d00999af4631a3397d1fd78290c48e2a3e52d8dd72db3a067ac8", size = 341872 },
+ { url = "https://files.pythonhosted.org/packages/91/26/fd0ef9bf29dd906a84b59f0cd1281e65b0c3e08c6aa94b57f7d11f593518/yarl-1.18.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:fbd6748e8ab9b41171bb95c6142faf068f5ef1511935a0aa07025438dd9a9bc1", size = 347497 },
+ { url = "https://files.pythonhosted.org/packages/d9/b5/14ac7a256d0511b2ac168d50d4b7d744aea1c1aa20c79f620d1059aab8b2/yarl-1.18.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:877d209b6aebeb5b16c42cbb377f5f94d9e556626b1bfff66d7b0d115be88d0a", size = 359981 },
+ { url = "https://files.pythonhosted.org/packages/ca/b3/d493221ad5cbd18bc07e642894030437e405e1413c4236dd5db6e46bcec9/yarl-1.18.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b464c4ab4bfcb41e3bfd3f1c26600d038376c2de3297760dfe064d2cb7ea8e10", size = 366229 },
+ { url = "https://files.pythonhosted.org/packages/04/56/6a3e2a5d9152c56c346df9b8fb8edd2c8888b1e03f96324d457e5cf06d34/yarl-1.18.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8d39d351e7faf01483cc7ff7c0213c412e38e5a340238826be7e0e4da450fdc8", size = 360383 },
+ { url = "https://files.pythonhosted.org/packages/f5/4b/a06e0ec3d155924f77835ed2d167ebd3b211a7b0853da1cf8d8414d784ef/yarl-1.18.3-py3-none-any.whl", hash = "sha256:b57f4f58099328dfb26c6a771d09fb20dbbae81d20cfb66141251ea063bd101b", size = 45109 },
+]
+
+[[package]]
+name = "youtube-transcript-api"
+version = "0.6.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "defusedxml", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+ { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/d7/f1/55ff16f7198bdf5204fd7be3c49122e07092a3da47bf4e1560989a4c0255/youtube_transcript_api-0.6.3.tar.gz", hash = "sha256:4d1f6451ae508390a5279f98519efb45e091bf60d3cca5ea0bb122800ab6a011", size = 612052 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/80/d4/be6fd091d29ae49d93813e598769e7ab453419a4de640e1755bf20911cce/youtube_transcript_api-0.6.3-py3-none-any.whl", hash = "sha256:297a74c1863d9df88f6885229f33a7eda61493d73ecb13ec80e876b65423e9b4", size = 622293 },
+]
+
+[[package]]
+name = "yt-dlp"
+version = "2025.1.20.232744.dev0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/82/ae/bb5cb898da06e2a626a16a127cdd693e095ff0ff4b7c94f2f9e4a28fd15c/yt_dlp-2025.1.20.232744.dev0.tar.gz", hash = "sha256:f5d2eb93cb5777b61a0b892917c5f6181f77cc7d1f44499796b91a203c041ef3", size = 2924057 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/8a/43/ab169c500012777180cc30d4c4006b6aa2c572ebbe6c857ba14ccdd3fbbe/yt_dlp-2025.1.20.232744.dev0-py3-none-any.whl", hash = "sha256:3e40abe33e979db99467126fb0ac03206995b4cd6bcf1f100ca4adba5367c799", size = 3185868 },
+]