mirror of
https://github.com/anasty17/mirror-leech-telegram-bot.git
synced 2025-01-09 04:47:34 +08:00
Rclone Download
- Fix aria2c selection - Other bug fixes fix #1397 Signed-off-by: mltb <e.anastayyar@gmail.com>
This commit is contained in:
parent
da31ec629a
commit
885f9d913d
2
.gitignore
vendored
2
.gitignore
vendored
@ -9,9 +9,11 @@ data*
|
||||
log.txt
|
||||
accounts/*
|
||||
Thumbnails/*
|
||||
rclone/*
|
||||
list_drives.txt
|
||||
cookies.txt
|
||||
downloads
|
||||
bot.session
|
||||
user.session
|
||||
terabox.txt
|
||||
rclone.conf
|
||||
|
13
README.md
13
README.md
@ -53,10 +53,10 @@ In each single file there is a major change from base code, it's almost totaly d
|
||||
### Database
|
||||
- Mongo Database support
|
||||
- Store bot settings
|
||||
- Store user settings including thumbnails in database
|
||||
- Store user settings including thumbnails and rclone config in database
|
||||
- Store private files
|
||||
- Store RSS data
|
||||
- Store incomplete task notifier to get incomplete task messages after restart
|
||||
- Store incompleted task messages
|
||||
### Torrents Search
|
||||
- Torrent search support
|
||||
- Search on torrents with Torrent Search API
|
||||
@ -73,8 +73,11 @@ In each single file there is a major change from base code, it's almost totaly d
|
||||
- Rss for each user with tag
|
||||
- Sudo settings to control users feeds
|
||||
- All functions have been improved using buttons from one command.
|
||||
### Rclone
|
||||
- Download using rclone
|
||||
- Ability to choose config, remote and path from list with buttons
|
||||
### Overall
|
||||
- Docker image support for linux `amd64, arm64/v8, arm/v7, s390x`
|
||||
- Docker image support for linux `amd64, arm64/v8, arm/v7`
|
||||
- Switch from sync to async
|
||||
- SWitch from python-telegram-bot to pyrogram
|
||||
- Edit variables and overwrite the private files while bot running
|
||||
@ -107,7 +110,7 @@ In each single file there is a major change from base code, it's almost totaly d
|
||||
- Extract these filetypes
|
||||
> ZIP, RAR, TAR, 7z, ISO, WIM, CAB, GZIP, BZIP2, APM, ARJ, CHM, CPIO, CramFS, DEB, DMG, FAT, HFS, LZH, LZMA, LZMA2, MBR, MSI, MSLZ, NSIS, NTFS, RPM, SquashFS, UDF, VHD, XAR, Z, TAR.XZ
|
||||
- Direct links Supported:
|
||||
> mediafire, letsupload.io, hxfile.co, antfiles, fembed.com, fembed.net, femax20.com, layarkacaxxi.icu, fcdn.stream, sbplay.org, naniplay.com, naniplay.nanime.in, naniplay.nanime.biz, sbembed.com, streamtape.com, streamsb.net, feurl.com, upload.ee, pixeldrain.com, racaty.net, 1fichier.com, 1drv.ms (Only works for file not folder or business account), uptobox.com and solidfiles.com, linkbox.to, shrdsk.me (sharedisk.io), akmfiles.com, wetransfer.com, mdisk.me (with ytdl), terabox.com (you need to add cookies txt with name) [terabox.txt](https://chrome.google.com/webstore/detail/get-cookiestxt/bgaddhkoddajcdgocldbbfleckgcbcid) and almost every anonfiles based sites
|
||||
> mediafire, letsupload.io, hxfile.co, antfiles, fembed.com, fembed.net, femax20.com, layarkacaxxi.icu, fcdn.stream, sbplay.org, naniplay.com, naniplay.nanime.in, naniplay.nanime.biz, sbembed.com, streamtape.com, streamsb.net, feurl.com, upload.ee, pixeldrain.com, racaty.net, 1fichier.com, 1drv.ms (Only works for file not folder or business account), uptobox.com and solidfiles.com, linkbox.to, shrdsk.me (sharedisk.io), akmfiles.com, wetransfer.com, mdisk.me (with ytdl), terabox.com (you need to add cookies txt with name) [terabox.txt](https://github.com/ytdl-org/youtube-dl#how-do-i-pass-cookies-to-youtube-dl) and almost every anonfiles based sites
|
||||
|
||||
# How to deploy?
|
||||
|
||||
@ -170,7 +173,7 @@ Fill up rest of the fields. Meaning of each field is discussed below. **NOTE**:
|
||||
- `SUDO_USERS`: Fill user_id of users whom you want to give sudo permission. Separate them by space. `Int`
|
||||
- `USE_SERVICE_ACCOUNTS`: Whether to use Service Accounts or not. For this to work see [Using Service Accounts](https://github.com/anasty17/mirror-leech-telegram-bot#generate-service-accounts-what-is-service-account) section below. Default is `False`. `Bool`
|
||||
- `INDEX_URL`: Refer to https://gitlab.com/ParveenBhadooOfficial/Google-Drive-Index. `Str`
|
||||
- `STATUS_LIMIT`: Limit the no. of tasks shown in status message with buttons. **NOTE**: Recommended limit is `4` tasks. `Int`
|
||||
- `STATUS_LIMIT`: Limit the no. of tasks shown in status message with buttons. Default is `10`. **NOTE**: Recommended limit is `4` tasks. `Int`
|
||||
- `STOP_DUPLICATE`: Bot will check file in Drive, if it is present in Drive, downloading or cloning will be stopped. (**NOTE**: File will be checked using filename not file hash, so this feature is not perfect yet). Default is `False`. `Bool`
|
||||
- `CMD_SUFFIX`: commands index number. This number will added at the end all commands. `Str`|`Int`
|
||||
- `TORRENT_TIMEOUT`: Timeout of dead torrents downloading with qBittorrent and Aria2c in seconds. `Int`
|
||||
|
@ -224,7 +224,7 @@ DUMP_CHAT = environ.get('DUMP_CHAT', '')
|
||||
DUMP_CHAT = '' if len(DUMP_CHAT) == 0 else int(DUMP_CHAT)
|
||||
|
||||
STATUS_LIMIT = environ.get('STATUS_LIMIT', '')
|
||||
STATUS_LIMIT = '' if len(STATUS_LIMIT) == 0 else int(STATUS_LIMIT)
|
||||
STATUS_LIMIT = 10 if len(STATUS_LIMIT) == 0 else int(STATUS_LIMIT)
|
||||
|
||||
CMD_SUFFIX = environ.get('CMD_SUFFIX', '')
|
||||
|
||||
|
@ -18,7 +18,7 @@ from .helper.telegram_helper.bot_commands import BotCommands
|
||||
from .helper.telegram_helper.message_utils import sendMessage, editMessage, sendFile
|
||||
from .helper.telegram_helper.filters import CustomFilters
|
||||
from .helper.telegram_helper.button_build import ButtonMaker
|
||||
from .modules import authorize, list, cancel_mirror, mirror_status, mirror_leech, clone, ytdlp, rss, shell, eval, delete, count, users_settings, search, bt_select, bot_settings
|
||||
from .modules import authorize, list, cancel_mirror, mirror_leech, clone, status, ytdlp, rss, shell, eval, delete, count, users_settings, search, bt_select, bot_settings
|
||||
|
||||
|
||||
async def stats(client, message):
|
||||
@ -73,7 +73,7 @@ async def restart(client, message):
|
||||
QbInterval[0].cancel()
|
||||
QbInterval.clear()
|
||||
await sync_to_async(clean_all)
|
||||
await (await create_subprocess_exec('pkill', '-9', '-f', 'gunicorn|aria2c|qbittorrent-nox|ffmpeg')).wait()
|
||||
await (await create_subprocess_exec('pkill', '-9', '-f', 'gunicorn|aria2c|qbittorrent-nox|ffmpeg|rclone')).wait()
|
||||
await (await create_subprocess_exec('python3', 'update.py')).wait()
|
||||
async with aiopen(".restartmsg", "w") as f:
|
||||
await f.truncate(0)
|
||||
|
@ -86,7 +86,7 @@ async def getAllDownload(req_status):
|
||||
return dl
|
||||
return None
|
||||
|
||||
def bt_selection_buttons(id_: str):
|
||||
def bt_selection_buttons(id_):
|
||||
gid = id_[:12] if len(id_) > 20 else id_
|
||||
pincode = ""
|
||||
for n in id_:
|
||||
@ -105,12 +105,10 @@ def bt_selection_buttons(id_: str):
|
||||
buttons.ibutton("Done Selecting", f"btsel done {gid} {id_}")
|
||||
return buttons.build_menu(2)
|
||||
|
||||
def get_progress_bar_string(status):
|
||||
completed = status.processed_bytes() / 8
|
||||
total = status.size_raw() / 8
|
||||
p = 0 if total == 0 else round(completed * 100 / total)
|
||||
p = min(max(p, 0), 100)
|
||||
cFull = p // 8
|
||||
def get_progress_bar_string(pct):
|
||||
pct = float(pct.split('%')[0])
|
||||
p = min(max(pct, 0), 100)
|
||||
cFull = int(p // 8)
|
||||
p_str = '■' * cFull
|
||||
p_str += '□' * (12 - cFull)
|
||||
return f"[{p_str}]"
|
||||
@ -118,21 +116,21 @@ def get_progress_bar_string(status):
|
||||
def get_readable_message():
|
||||
msg = ""
|
||||
button = None
|
||||
if STATUS_LIMIT := config_dict['STATUS_LIMIT']:
|
||||
STATUS_LIMIT = config_dict['STATUS_LIMIT']
|
||||
tasks = len(download_dict)
|
||||
globals()['PAGES'] = ceil(tasks/STATUS_LIMIT)
|
||||
if PAGE_NO > PAGES and PAGES != 0:
|
||||
globals()['COUNT'] -= STATUS_LIMIT
|
||||
globals()['PAGE_NO'] -= 1
|
||||
for index, download in enumerate(list(download_dict.values())[COUNT:], start=1):
|
||||
for download in list(download_dict.values())[COUNT:STATUS_LIMIT+COUNT]:
|
||||
if download.message.chat.type.name in ['SUPERGROUP', 'CHANNEL']:
|
||||
msg += f"<b><a href='{download.message.link}'>{download.status()}</a>: </b>"
|
||||
else:
|
||||
msg += f"<b>{download.status()}: </b>"
|
||||
msg += f"<code>{escape(str(download.name()))}</code>"
|
||||
if download.status() not in [MirrorStatus.STATUS_SPLITTING, MirrorStatus.STATUS_SEEDING]:
|
||||
msg += f"\n{get_progress_bar_string(download)} {download.progress()}"
|
||||
msg += f"\n<b>Processed:</b> {get_readable_file_size(download.processed_bytes())} of {download.size()}"
|
||||
msg += f"\n{get_progress_bar_string(download.progress())} {download.progress()}"
|
||||
msg += f"\n<b>Processed:</b> {download.processed_bytes()} of {download.size()}"
|
||||
msg += f"\n<b>Speed:</b> {download.speed()} | <b>ETA:</b> {download.eta()}"
|
||||
if hasattr(download, 'seeders_num'):
|
||||
try:
|
||||
@ -148,8 +146,6 @@ def get_readable_message():
|
||||
else:
|
||||
msg += f"\n<b>Size: </b>{download.size()}"
|
||||
msg += f"\n<code>/{BotCommands.CancelMirror} {download.gid()}</code>\n\n"
|
||||
if index == STATUS_LIMIT:
|
||||
break
|
||||
if len(msg) == 0:
|
||||
return None, None
|
||||
dl_speed = 0
|
||||
@ -173,7 +169,7 @@ def get_readable_message():
|
||||
up_speed += float(spd.split('K')[0]) * 1024
|
||||
elif 'M' in spd:
|
||||
up_speed += float(spd.split('M')[0]) * 1048576
|
||||
if STATUS_LIMIT and tasks > STATUS_LIMIT:
|
||||
if tasks > STATUS_LIMIT:
|
||||
msg += f"<b>Page:</b> {PAGE_NO}/{PAGES} | <b>Tasks:</b> {tasks}\n"
|
||||
buttons = ButtonMaker()
|
||||
buttons.ibutton("<<", "status pre")
|
||||
@ -243,6 +239,9 @@ def is_share_link(url):
|
||||
def is_mega_link(url):
|
||||
return "mega.nz" in url or "mega.co.nz" in url
|
||||
|
||||
def is_rclone_path(path):
|
||||
return bool(re_match(r'^(mrcc:)?(?!magnet:)\w+:(?!.*\/\/).+$|^rcd$', path))
|
||||
|
||||
def get_mega_link_type(url):
|
||||
if "folder" in url:
|
||||
return "folder"
|
||||
@ -275,12 +274,9 @@ async def cmd_exec(cmd, shell=False):
|
||||
proc = await create_subprocess_shell(cmd, stdout=PIPE, stderr=PIPE)
|
||||
else:
|
||||
proc = await create_subprocess_exec(*cmd, stdout=PIPE, stderr=PIPE)
|
||||
|
||||
stdout, stderr = await proc.communicate()
|
||||
|
||||
stdout = stdout.decode().strip()
|
||||
stderr = stderr.decode().strip()
|
||||
|
||||
return stdout, stderr, proc.returncode
|
||||
|
||||
def new_task(func):
|
||||
|
@ -13,7 +13,6 @@ class DbManger:
|
||||
self.__conn = None
|
||||
self.__connect()
|
||||
|
||||
|
||||
def __connect(self):
|
||||
try:
|
||||
self.__conn = AsyncIOMotorClient(DATABASE_URL)
|
||||
@ -36,17 +35,24 @@ class DbManger:
|
||||
# User Data
|
||||
if await self.__db.users.find_one():
|
||||
rows = self.__db.users.find({})
|
||||
# return a dict ==> {_id, is_sudo, is_auth, as_doc, thumb, yt_ql, media_group, equal_splits, split_size}
|
||||
# return a dict ==> {_id, is_sudo, is_auth, as_doc, thumb, yt_ql, media_group, equal_splits, split_size, rclone}
|
||||
async for row in rows:
|
||||
uid = row['_id']
|
||||
del row['_id']
|
||||
path = f"Thumbnails/{uid}.jpg"
|
||||
thumb_path = f'Thumbnails/{uid}.jpg'
|
||||
rclone_path = f'rclone/{uid}.conf'
|
||||
if row.get('thumb'):
|
||||
if not await aiopath.exists('Thumbnails'):
|
||||
await makedirs('Thumbnails')
|
||||
async with aiopen(path, 'wb+') as f:
|
||||
async with aiopen(thumb_path, 'wb+') as f:
|
||||
await f.write(row['thumb'])
|
||||
row['thumb'] = path
|
||||
row['thumb'] = thumb_path
|
||||
if row.get('rclone'):
|
||||
if not await aiopath.exists('rclone'):
|
||||
await makedirs('rclone')
|
||||
async with aiopen(rclone_path, 'wb+') as f:
|
||||
await f.write(row['rclone'])
|
||||
row['rclone'] = rclone_path
|
||||
user_data[uid] = row
|
||||
LOGGER.info("Users data has been imported from Database")
|
||||
# Rss Data
|
||||
@ -95,18 +101,21 @@ class DbManger:
|
||||
data = user_data[user_id]
|
||||
if data.get('thumb'):
|
||||
del data['thumb']
|
||||
if data.get('rclone'):
|
||||
del data['rclone']
|
||||
await self.__db.users.replace_one({'_id': user_id}, data, upsert=True)
|
||||
self.__conn.close
|
||||
|
||||
async def update_thumb(self, user_id, path=None):
|
||||
async def update_user_doc(self, user_id, path=None):
|
||||
if self.__err:
|
||||
return
|
||||
key = 'rclone' if path.endswith('.conf') else 'thumb'
|
||||
if path is not None:
|
||||
async with aiopen(path, 'rb+') as image:
|
||||
image_bin = await image.read()
|
||||
async with aiopen(path, 'rb+') as doc:
|
||||
doc_bin = await doc.read()
|
||||
else:
|
||||
image_bin = ''
|
||||
await self.__db.users.update_one({'_id': user_id}, {'$set': {'thumb': image_bin}}, upsert=True)
|
||||
doc_bin = ''
|
||||
await self.__db.users.update_one({'_id': user_id}, {'$set': {key: doc_bin}}, upsert=True)
|
||||
self.__conn.close
|
||||
|
||||
async def rss_update_all(self):
|
||||
|
@ -4,6 +4,7 @@ from bot.helper.mirror_utils.download_utils.gd_downloader import add_gd_download
|
||||
from bot.helper.mirror_utils.download_utils.mega_downloader import add_mega_download
|
||||
from bot.helper.mirror_utils.download_utils.telegram_downloader import TelegramDownloadHelper
|
||||
from bot.helper.mirror_utils.download_utils.yt_dlp_download_helper import YoutubeDLHelper
|
||||
from bot.helper.mirror_utils.download_utils.rclone_downloader import RcloneDownloadHelper
|
||||
|
||||
|
||||
def start_dl_from_queued(uid):
|
||||
@ -18,6 +19,9 @@ def start_dl_from_queued(uid):
|
||||
elif dl[0] == 'tg':
|
||||
tg = TelegramDownloadHelper(dl[4])
|
||||
bot_loop.create_task(tg.add_download(dl[1], dl[2], dl[3], True))
|
||||
elif dl[0] == 'rcd':
|
||||
rc = RcloneDownloadHelper(dl[5])
|
||||
bot_loop.create_task(rc.add_download(dl[1], dl[2], dl[3], dl[4], True))
|
||||
del queued_dl[uid]
|
||||
|
||||
def start_up_from_queued(uid):
|
||||
|
@ -244,7 +244,7 @@ class MirrorLeechListener:
|
||||
size = size - s
|
||||
LOGGER.info(f"Leech Name: {up_name}")
|
||||
tg = TgUploader(up_name, up_dir, size, self)
|
||||
tg_upload_status = TgUploadStatus(tg, size, gid, self)
|
||||
tg_upload_status = TgUploadStatus(tg, size, gid, self.message)
|
||||
async with download_dict_lock:
|
||||
download_dict[self.uid] = tg_upload_status
|
||||
await update_all_messages()
|
||||
@ -254,7 +254,7 @@ class MirrorLeechListener:
|
||||
size = await get_path_size(up_path)
|
||||
LOGGER.info(f"Upload Name: {up_name}")
|
||||
drive = GoogleDriveHelper(up_name, up_dir, size, self)
|
||||
upload_status = UploadStatus(drive, size, gid, self)
|
||||
upload_status = UploadStatus(drive, size, gid, self.message)
|
||||
async with download_dict_lock:
|
||||
download_dict[self.uid] = upload_status
|
||||
await update_all_messages()
|
||||
|
@ -25,7 +25,7 @@ async def __onDownloadStarted(api, gid):
|
||||
while True:
|
||||
await sleep(0.5)
|
||||
if download.is_removed or download.followed_by_ids:
|
||||
await deleteMessage(listener.message, meta)
|
||||
await deleteMessage(meta)
|
||||
break
|
||||
download = download.live
|
||||
return
|
||||
@ -191,11 +191,17 @@ async def add_aria2c_download(link, path, listener, filename, auth, ratio, seed_
|
||||
LOGGER.info(f"Download Error: {error}")
|
||||
await sendMessage(listener.message, error)
|
||||
return
|
||||
gid = download.gid
|
||||
async with download_dict_lock:
|
||||
download_dict[listener.uid] = AriaDownloadStatus(download.gid, listener)
|
||||
LOGGER.info(f"Aria2Download started: {download.gid}")
|
||||
download_dict[listener.uid] = AriaDownloadStatus(gid, listener)
|
||||
LOGGER.info(f"Aria2Download started: {gid}")
|
||||
await listener.onDownloadStart()
|
||||
if not listener.select:
|
||||
if not listener.select or not config_dict['BASE_URL']:
|
||||
await sendStatusMessage(listener.message)
|
||||
elif download.is_torrent and not download.is_metadata:
|
||||
await sync_to_async(aria2.client.force_pause, gid)
|
||||
SBUTTONS = bt_selection_buttons(gid)
|
||||
msg = "Your download paused. Choose files then press Done Selecting button to start downloading."
|
||||
await sendMessage(listener.message, msg, SBUTTONS)
|
||||
|
||||
start_listener()
|
@ -17,7 +17,6 @@ from re import findall, match, search, sub
|
||||
from time import sleep
|
||||
from urllib.parse import quote, unquote, urlparse
|
||||
from uuid import uuid4
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from cfscrape import create_scraper
|
||||
from lk21 import Bypass
|
||||
|
@ -48,13 +48,13 @@ async def add_gd_download(link, path, listener, newname, from_queue=False):
|
||||
if added_to_queue:
|
||||
LOGGER.info(f"Added to Queue/Download: {name}")
|
||||
async with download_dict_lock:
|
||||
download_dict[listener.uid] = QueueStatus(name, size, gid, listener, 'Dl')
|
||||
download_dict[listener.uid] = QueueStatus(name, size, gid, listener.message, 'Dl')
|
||||
await listener.onDownloadStart()
|
||||
await sendStatusMessage(listener.message)
|
||||
return
|
||||
drive = GoogleDriveHelper(name, path, size, listener)
|
||||
async with download_dict_lock:
|
||||
download_dict[listener.uid] = GdDownloadStatus(drive, size, listener, gid)
|
||||
download_dict[listener.uid] = GdDownloadStatus(drive, size, listener.message, gid)
|
||||
async with queue_dict_lock:
|
||||
non_queued_dl.add(listener.uid)
|
||||
if not from_queue:
|
||||
|
@ -59,7 +59,7 @@ class MegaAppListener(MegaListener):
|
||||
def onRequestFinish(self, api, request, error):
|
||||
if str(error).lower() != "no error":
|
||||
self.error = error.copy()
|
||||
LOGGER.error(self.error)
|
||||
LOGGER.error(f'Mega onRequestFinishError: {self.error}')
|
||||
async_to_sync(self.event_setter)
|
||||
return
|
||||
request_type = request.getType()
|
||||
@ -195,7 +195,7 @@ async def add_mega_download(mega_link, path, listener, name, from_queue=False):
|
||||
if added_to_queue:
|
||||
LOGGER.info(f"Added to Queue/Download: {mname}")
|
||||
async with download_dict_lock:
|
||||
download_dict[listener.uid] = QueueStatus(mname, size, gid, listener, 'Dl')
|
||||
download_dict[listener.uid] = QueueStatus(mname, size, gid, listener.message, 'Dl')
|
||||
await listener.onDownloadStart()
|
||||
await sendStatusMessage(listener.message)
|
||||
await sync_to_async(api.removeListener, mega_listener)
|
||||
@ -203,7 +203,7 @@ async def add_mega_download(mega_link, path, listener, name, from_queue=False):
|
||||
await sync_to_async(folder_api.removeListener, mega_listener)
|
||||
return
|
||||
async with download_dict_lock:
|
||||
download_dict[listener.uid] = MegaDownloadStatus(mega_listener, listener)
|
||||
download_dict[listener.uid] = MegaDownloadStatus(mega_listener, listener.message)
|
||||
async with queue_dict_lock:
|
||||
non_queued_dl.add(listener.uid)
|
||||
await makedirs(path, exist_ok=True)
|
||||
|
@ -37,7 +37,7 @@ def __get_hash_file(path):
|
||||
"""
|
||||
|
||||
async def add_qb_torrent(link, path, listener, ratio, seed_time):
|
||||
client = get_client()
|
||||
client = await sync_to_async(get_client)
|
||||
ADD_TIME = time()
|
||||
try:
|
||||
url = link
|
||||
@ -57,13 +57,11 @@ async def add_qb_torrent(link, path, listener, ratio, seed_time):
|
||||
elif time() - ADD_TIME >= 120:
|
||||
msg = "Not added! Check if the link is valid or not. If it's torrent file then report, this happens if torrent file size above 10mb."
|
||||
await sendMessage(listener.message, msg)
|
||||
await sync_to_async(client.auth_log_out)
|
||||
return
|
||||
tor_info = tor_info[0]
|
||||
ext_hash = tor_info.hash
|
||||
if await getDownloadByGid(ext_hash[:12]):
|
||||
await sendMessage(listener.message, "This Torrent already added!")
|
||||
await sync_to_async(client.auth_log_out)
|
||||
return
|
||||
else:
|
||||
await sendMessage(listener.message, "This is an unsupported/invalid link.")
|
||||
@ -107,7 +105,7 @@ async def add_qb_torrent(link, path, listener, ratio, seed_time):
|
||||
await aioremove(link)
|
||||
await sync_to_async(client.auth_log_out)
|
||||
|
||||
async def __remove_torrent(client, hash_):
|
||||
async def __remove_torrent(client, hash_, tag):
|
||||
await sync_to_async(client.torrents_delete, torrent_hashes=hash_, delete_files=True)
|
||||
async with qb_download_lock:
|
||||
if hash_ in STALLED_TIME:
|
||||
@ -120,34 +118,34 @@ async def __remove_torrent(client, hash_):
|
||||
UPLOADED.remove(hash_)
|
||||
if hash_ in SEEDING:
|
||||
SEEDING.remove(hash_)
|
||||
await sync_to_async(client.torrents_delete_tags, tags=tag)
|
||||
await sync_to_async(client.auth_log_out)
|
||||
|
||||
async def __onDownloadError(err, client, tor, button=None):
|
||||
async def __onDownloadError(err, tor, button=None):
|
||||
client = await sync_to_async(get_client)
|
||||
LOGGER.info(f"Cancelling Download: {tor.name}")
|
||||
await sync_to_async(client.torrents_pause, torrent_hashes=tor.hash)
|
||||
await sleep(0.3)
|
||||
download = await getDownloadByGid(tor.hash[:12])
|
||||
try:
|
||||
if hasattr(download, 'listener'):
|
||||
listener = download.listener()
|
||||
await listener.onDownloadError(err, button)
|
||||
except:
|
||||
pass
|
||||
await __remove_torrent(client, tor.hash)
|
||||
await __remove_torrent(client, tor.hash, tor.tags)
|
||||
|
||||
@new_task
|
||||
async def __onSeedFinish(client, tor):
|
||||
async def __onSeedFinish(tor):
|
||||
client = await sync_to_async(get_client)
|
||||
LOGGER.info(f"Cancelling Seed: {tor.name}")
|
||||
download = await getDownloadByGid(tor.hash[:12])
|
||||
try:
|
||||
if hasattr(download, 'listener'):
|
||||
listener = download.listener()
|
||||
await listener.onUploadError(f"Seeding stopped with Ratio: {round(tor.ratio, 3)} and Time: {get_readable_time(tor.seeding_time)}")
|
||||
except:
|
||||
pass
|
||||
await __remove_torrent(client, tor.hash)
|
||||
|
||||
@new_task
|
||||
async def __stop_duplicate(client, tor):
|
||||
async def __stop_duplicate(tor):
|
||||
download = await getDownloadByGid(tor.hash[:12])
|
||||
try:
|
||||
if hasattr(download, 'listener'):
|
||||
listener = download.listener()
|
||||
if not listener.select and not listener.isLeech:
|
||||
LOGGER.info('Checking File/Folder if already in Drive')
|
||||
@ -163,12 +161,11 @@ async def __stop_duplicate(client, tor):
|
||||
qbmsg, button = await sync_to_async(GoogleDriveHelper().drive_list, qbname, True)
|
||||
if qbmsg:
|
||||
qbmsg = 'File/Folder is already available in Drive.\nHere are the search results:'
|
||||
await __onDownloadError(qbmsg, client, tor, button)
|
||||
except:
|
||||
pass
|
||||
await __onDownloadError(qbmsg, tor, button)
|
||||
|
||||
@new_task
|
||||
async def __onDownloadComplete(client, tor):
|
||||
async def __onDownloadComplete(tor):
|
||||
client = await sync_to_async(get_client)
|
||||
await sleep(2)
|
||||
download = await getDownloadByGid(tor.hash[:12])
|
||||
try:
|
||||
@ -188,21 +185,23 @@ async def __onDownloadComplete(client, tor):
|
||||
else:
|
||||
removed = True
|
||||
if removed:
|
||||
await __remove_torrent(client, tor.hash)
|
||||
await __remove_torrent(client, tor.hash, tor.tags)
|
||||
return
|
||||
async with qb_download_lock:
|
||||
SEEDING.add(tor.hash)
|
||||
await update_all_messages()
|
||||
LOGGER.info(f"Seeding started: {tor.name} - Hash: {tor.hash}")
|
||||
await sync_to_async(client.auth_log_out)
|
||||
else:
|
||||
await __remove_torrent(client, tor.hash)
|
||||
await __remove_torrent(client, tor.hash, tor.tags)
|
||||
|
||||
async def __qb_listener():
|
||||
client = get_client()
|
||||
client = await sync_to_async(get_client)
|
||||
async with qb_download_lock:
|
||||
if len(await sync_to_async(client.torrents_info)) == 0:
|
||||
QbInterval[0].cancel()
|
||||
QbInterval.clear()
|
||||
await sync_to_async(client.auth_log_out)
|
||||
return
|
||||
try:
|
||||
for tor_info in await sync_to_async(client.torrents_info):
|
||||
@ -210,14 +209,14 @@ async def __qb_listener():
|
||||
TORRENT_TIMEOUT = config_dict['TORRENT_TIMEOUT']
|
||||
STALLED_TIME[tor_info.hash] = time()
|
||||
if TORRENT_TIMEOUT and time() - tor_info.added_on >= TORRENT_TIMEOUT:
|
||||
bot_loop.create_task(__onDownloadError("Dead Torrent!", client, tor_info))
|
||||
bot_loop.create_task(__onDownloadError("Dead Torrent!", tor_info))
|
||||
else:
|
||||
await sync_to_async(client.torrents_reannounce, torrent_hashes=tor_info.hash)
|
||||
elif tor_info.state == "downloading":
|
||||
STALLED_TIME[tor_info.hash] = time()
|
||||
if config_dict['STOP_DUPLICATE'] and tor_info.hash not in STOP_DUP_CHECK:
|
||||
STOP_DUP_CHECK.add(tor_info.hash)
|
||||
__stop_duplicate(client, tor_info)
|
||||
__stop_duplicate(tor_info)
|
||||
elif tor_info.state == "stalledDL":
|
||||
TORRENT_TIMEOUT = config_dict['TORRENT_TIMEOUT']
|
||||
if tor_info.hash not in RECHECKED and 0.99989999999999999 < tor_info.progress < 1:
|
||||
@ -225,10 +224,10 @@ async def __qb_listener():
|
||||
msg += f"{tor_info.hash} Downloaded Bytes: {tor_info.downloaded} "
|
||||
msg += f"Size: {tor_info.size} Total Size: {tor_info.total_size}"
|
||||
LOGGER.error(msg)
|
||||
client.torrents_recheck(torrent_hashes=tor_info.hash)
|
||||
await sync_to_async(client.torrents_recheck, torrent_hashes=tor_info.hash)
|
||||
RECHECKED.add(tor_info.hash)
|
||||
elif TORRENT_TIMEOUT and time() - STALLED_TIME.get(tor_info.hash, 0) >= TORRENT_TIMEOUT:
|
||||
bot_loop.create_task(__onDownloadError("Dead Torrent!", client, tor_info))
|
||||
bot_loop.create_task(__onDownloadError("Dead Torrent!", tor_info))
|
||||
else:
|
||||
await sync_to_async(client.torrents_reannounce, torrent_hashes=tor_info.hash)
|
||||
elif tor_info.state == "missingFiles":
|
||||
@ -238,9 +237,11 @@ async def __qb_listener():
|
||||
elif tor_info.completion_on != 0 and tor_info.hash not in UPLOADED and \
|
||||
tor_info.state not in ['checkingUP', 'checkingDL', 'checkingResumeData']:
|
||||
UPLOADED.add(tor_info.hash)
|
||||
__onDownloadComplete(client, tor_info)
|
||||
__onDownloadComplete(tor_info)
|
||||
elif tor_info.state in ['pausedUP', 'pausedDL'] and tor_info.hash in SEEDING:
|
||||
SEEDING.remove(tor_info.hash)
|
||||
__onSeedFinish(client, tor_info)
|
||||
__onSeedFinish(tor_info)
|
||||
except Exception as e:
|
||||
LOGGER.error(str(e))
|
||||
finally:
|
||||
await sync_to_async(client.auth_log_out)
|
131
bot/helper/mirror_utils/download_utils/rclone_downloader.py
Normal file
131
bot/helper/mirror_utils/download_utils/rclone_downloader.py
Normal file
@ -0,0 +1,131 @@
|
||||
from asyncio import create_subprocess_exec
|
||||
from asyncio.subprocess import PIPE
|
||||
from random import SystemRandom
|
||||
from string import ascii_letters, digits
|
||||
from re import findall as re_findall
|
||||
from json import loads
|
||||
|
||||
from bot import download_dict, download_dict_lock, config_dict, queue_dict_lock, non_queued_dl, \
|
||||
non_queued_up, queued_dl, LOGGER, GLOBAL_EXTENSION_FILTER
|
||||
from bot.helper.ext_utils.bot_utils import cmd_exec, sync_to_async, new_task
|
||||
from bot.helper.mirror_utils.status_utils.rclone_status import RcloneStatus
|
||||
from bot.helper.mirror_utils.status_utils.queue_status import QueueStatus
|
||||
from bot.helper.telegram_helper.message_utils import sendMessage, sendStatusMessage
|
||||
from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
|
||||
from bot.helper.ext_utils.fs_utils import get_base_name
|
||||
|
||||
|
||||
class RcloneDownloadHelper:
|
||||
def __init__(self, listener):
|
||||
self.__listener = listener
|
||||
self.__proc = None
|
||||
self.__transferred_size = '0 B'
|
||||
self.__eta = '-'
|
||||
self.__percentage = '0%'
|
||||
self.__speed = '0 B/s'
|
||||
self.__is_cancelled = False
|
||||
self.name = ''
|
||||
self.size = 0
|
||||
self.gid = ''
|
||||
|
||||
@property
|
||||
def transferred_size(self):
|
||||
return self.__transferred_size
|
||||
|
||||
@property
|
||||
def percentage(self):
|
||||
return self.__percentage
|
||||
|
||||
@property
|
||||
def speed(self):
|
||||
return self.__speed
|
||||
|
||||
@property
|
||||
def eta(self):
|
||||
return self.__eta
|
||||
|
||||
@new_task
|
||||
async def __progress(self):
|
||||
while not (self.__proc is None or self.__is_cancelled):
|
||||
data = (await self.__proc.stdout.readline()).decode()
|
||||
if not data:
|
||||
break
|
||||
if data := re_findall(r'Transferred:\s+([\d.]+\s*\w+)\s+/\s+([\d.]+\s*\w+),\s+([\d.]+%)\s*,\s+([\d.]+\s*\w+/s),\s+ETA\s+([\dwdhms]+)', data):
|
||||
self.__transferred_size, _, self.__percentage, self.__speed, self.__eta = data[0]
|
||||
|
||||
async def add_download(self, link, config_path, path, name, from_queue=False):
|
||||
if not name:
|
||||
pre_name = link.rsplit('/', 1)
|
||||
name = pre_name[1] if len(pre_name) > 1 else link.split(':', 1)[1]
|
||||
if not name:
|
||||
name = link.strip(':')
|
||||
self.name = name
|
||||
path += name
|
||||
cmd = ['rclone', 'size', '--json', f'--config={config_path}', link]
|
||||
res = (await cmd_exec(cmd))[0]
|
||||
try:
|
||||
rdict = loads(res)
|
||||
except:
|
||||
await self.__listener.onDownloadError('Invalid Path!')
|
||||
return
|
||||
self.size = rdict['bytes']
|
||||
if config_dict['STOP_DUPLICATE'] and not self.__listener.isLeech:
|
||||
LOGGER.info('Checking File/Folder if already in Drive')
|
||||
if self.__listener.isZip:
|
||||
rname = f"{rname}.zip"
|
||||
elif self.__listener.extract:
|
||||
try:
|
||||
rname = get_base_name(rname)
|
||||
except:
|
||||
rname = None
|
||||
if rname is not None:
|
||||
smsg, button = await sync_to_async(GoogleDriveHelper().drive_list, rname, True)
|
||||
if smsg:
|
||||
msg = "File/Folder is already available in Drive.\nHere are the search results:"
|
||||
await sendMessage(self.__listener.message, msg, button)
|
||||
return
|
||||
self.gid = ''.join(SystemRandom().choices(ascii_letters + digits, k=12))
|
||||
all_limit = config_dict['QUEUE_ALL']
|
||||
dl_limit = config_dict['QUEUE_DOWNLOAD']
|
||||
if all_limit or dl_limit:
|
||||
added_to_queue = False
|
||||
async with queue_dict_lock:
|
||||
dl = len(non_queued_dl)
|
||||
up = len(non_queued_up)
|
||||
if (all_limit and dl + up >= all_limit and (not dl_limit or dl >= dl_limit)) or (dl_limit and dl >= dl_limit):
|
||||
added_to_queue = True
|
||||
queued_dl[self.__listener.uid] = ['rcd', link, config_path, path, name, self.__listener]
|
||||
if added_to_queue:
|
||||
LOGGER.info(f"Added to Queue/Download: {name}")
|
||||
async with download_dict_lock:
|
||||
download_dict[self.__listener.uid] = QueueStatus(name, self.size, self.gid, self.__listener.message, 'Dl')
|
||||
await self.__listener.onDownloadStart()
|
||||
await sendStatusMessage(self.__listener.message)
|
||||
return
|
||||
async with download_dict_lock:
|
||||
download_dict[self.__listener.uid] = RcloneStatus(self, self.__listener.message, 'dl')
|
||||
async with queue_dict_lock:
|
||||
non_queued_dl.add(self.__listener.uid)
|
||||
if not from_queue:
|
||||
await self.__listener.onDownloadStart()
|
||||
await sendStatusMessage(self.__listener.message)
|
||||
LOGGER.info(f"Download with rclone: {link}")
|
||||
else:
|
||||
LOGGER.info(f'Start Queued Download with rclone: {link}')
|
||||
ext = ','.join([f'*.{ext}' for ext in GLOBAL_EXTENSION_FILTER])
|
||||
cmd = ['rclone', 'copy', f'--config={config_path}', '-P', link, path, '--exclude', ext]
|
||||
self.__proc = await create_subprocess_exec(*cmd, stdout=PIPE)
|
||||
self.__progress()
|
||||
await self.__proc.wait()
|
||||
if self.__is_cancelled:
|
||||
return
|
||||
return_code = self.__proc.returncode
|
||||
if return_code == 0:
|
||||
await self.__listener.onDownloadComplete()
|
||||
elif return_code != -9:
|
||||
await self.__listener.onDownloadError('Internal Error, Report..!')
|
||||
|
||||
async def cancel_download(self):
|
||||
self.__is_cancelled = True
|
||||
self.__proc.kill()
|
||||
await self.__listener.onDownloadError('Download stopped by user!')
|
@ -38,7 +38,7 @@ class TelegramDownloadHelper:
|
||||
self.size = size
|
||||
self.__id = file_id
|
||||
async with download_dict_lock:
|
||||
download_dict[self.__listener.uid] = TelegramDownloadStatus(self, self.__listener, file_id[:12])
|
||||
download_dict[self.__listener.uid] = TelegramDownloadStatus(self, self.__listener.message, file_id[:12])
|
||||
async with queue_dict_lock:
|
||||
non_queued_dl.add(self.__listener.uid)
|
||||
if not from_queue:
|
||||
@ -127,7 +127,7 @@ class TelegramDownloadHelper:
|
||||
if added_to_queue:
|
||||
LOGGER.info(f"Added to Queue/Download: {name}")
|
||||
async with download_dict_lock:
|
||||
download_dict[self.__listener.uid] = QueueStatus(name, size, gid, self.__listener, 'Dl')
|
||||
download_dict[self.__listener.uid] = QueueStatus(name, size, gid, self.__listener.message, 'Dl')
|
||||
await self.__listener.onDownloadStart()
|
||||
await sendStatusMessage(self.__listener.message)
|
||||
async with global_lock:
|
||||
|
@ -25,11 +25,11 @@ class AriaDownloadStatus:
|
||||
def __update(self):
|
||||
if self.__download is None:
|
||||
self.__download = get_download(self.__gid)
|
||||
elif self.__download.followed_by_ids:
|
||||
self.__gid = self.__download.followed_by_ids[0]
|
||||
self.__download = get_download(self.__gid)
|
||||
else:
|
||||
self.__download = self.__download.live
|
||||
if self.__download.followed_by_ids:
|
||||
self.__gid = self.__download.followed_by_ids[0]
|
||||
self.__download = get_download(self.__gid)
|
||||
|
||||
def progress(self):
|
||||
"""
|
||||
@ -38,15 +38,8 @@ class AriaDownloadStatus:
|
||||
"""
|
||||
return self.__download.progress_string()
|
||||
|
||||
def size_raw(self):
|
||||
"""
|
||||
Gets total size of the mirror file/folder
|
||||
:return: total size of mirror
|
||||
"""
|
||||
return self.__download.total_length
|
||||
|
||||
def processed_bytes(self):
|
||||
return self.__download.completed_length
|
||||
return self.__download.completed_length_string()
|
||||
|
||||
def speed(self):
|
||||
self.__update()
|
||||
|
@ -10,10 +10,7 @@ class CloneStatus:
|
||||
self.message = message
|
||||
|
||||
def processed_bytes(self):
|
||||
return self.__obj.transferred_size
|
||||
|
||||
def size_raw(self):
|
||||
return self.__size
|
||||
return get_readable_file_size(self.__obj.transferred_size)
|
||||
|
||||
def size(self):
|
||||
return get_readable_file_size(self.__size)
|
||||
@ -36,18 +33,12 @@ class CloneStatus:
|
||||
def progress(self):
|
||||
return f'{round(self.progress_raw(), 2)}%'
|
||||
|
||||
def speed_raw(self):
|
||||
"""
|
||||
:return: Download speed in Bytes/Seconds
|
||||
"""
|
||||
return self.__obj.cspeed()
|
||||
|
||||
def speed(self):
|
||||
return f'{get_readable_file_size(self.speed_raw())}/s'
|
||||
return f'{get_readable_file_size(self.__obj.cspeed())}/s'
|
||||
|
||||
def eta(self):
|
||||
try:
|
||||
seconds = (self.__size - self.__obj.transferred_size) / self.speed_raw()
|
||||
seconds = (self.__size - self.__obj.transferred_size) / self.__obj.cspeed()
|
||||
return f'{get_readable_time(seconds)}'
|
||||
except:
|
||||
return '-'
|
||||
|
@ -20,11 +20,11 @@ class ExtractStatus:
|
||||
return self.__gid
|
||||
|
||||
def speed_raw(self):
|
||||
return self.processed_bytes() / (time() - self.__start_time)
|
||||
return self.processed_raw() / (time() - self.__start_time)
|
||||
|
||||
def progress_raw(self):
|
||||
try:
|
||||
return self.processed_bytes() / self.__size * 100
|
||||
return self.processed_raw() / self.__size * 100
|
||||
except:
|
||||
return 0
|
||||
|
||||
@ -37,15 +37,12 @@ class ExtractStatus:
|
||||
def name(self):
|
||||
return self.__name
|
||||
|
||||
def size_raw(self):
|
||||
return self.__size
|
||||
|
||||
def size(self):
|
||||
return get_readable_file_size(self.__size)
|
||||
|
||||
def eta(self):
|
||||
try:
|
||||
seconds = (self.size_raw() - self.processed_bytes()) / self.speed_raw()
|
||||
seconds = (self.__size - self.processed_raw()) / self.speed_raw()
|
||||
return f'{get_readable_time(seconds)}'
|
||||
except:
|
||||
return '-'
|
||||
@ -54,6 +51,9 @@ class ExtractStatus:
|
||||
return MirrorStatus.STATUS_EXTRACTING
|
||||
|
||||
def processed_bytes(self):
|
||||
return get_readable_file_size(self.processed_raw())
|
||||
|
||||
def processed_raw(self):
|
||||
if self.__listener.newDir:
|
||||
return async_to_sync(get_path_size, f"{DOWNLOAD_DIR}{self.__uid}10000")
|
||||
else:
|
||||
|
@ -3,17 +3,14 @@ from bot.helper.ext_utils.bot_utils import MirrorStatus, get_readable_file_size,
|
||||
|
||||
|
||||
class GdDownloadStatus:
|
||||
def __init__(self, obj, size, listener, gid):
|
||||
def __init__(self, obj, size, message, gid):
|
||||
self.__obj = obj
|
||||
self.__size = size
|
||||
self.__gid = gid
|
||||
self.message = listener.message
|
||||
self.message = message
|
||||
|
||||
def processed_bytes(self):
|
||||
return self.__obj.processed_bytes
|
||||
|
||||
def size_raw(self):
|
||||
return self.__size
|
||||
return get_readable_file_size(self.__obj.processed_bytes)
|
||||
|
||||
def size(self):
|
||||
return get_readable_file_size(self.__size)
|
||||
@ -36,18 +33,12 @@ class GdDownloadStatus:
|
||||
def progress(self):
|
||||
return f'{round(self.progress_raw(), 2)}%'
|
||||
|
||||
def speed_raw(self):
|
||||
"""
|
||||
:return: Download speed in Bytes/Seconds
|
||||
"""
|
||||
return self.__obj.speed()
|
||||
|
||||
def speed(self):
|
||||
return f'{get_readable_file_size(self.speed_raw())}/s'
|
||||
return f'{get_readable_file_size(self.__obj.speed())}/s'
|
||||
|
||||
def eta(self):
|
||||
try:
|
||||
seconds = (self.__size - self.__obj.processed_bytes) / self.speed_raw()
|
||||
seconds = (self.__size - self.__obj.processed_bytes) / self.__obj.speed()
|
||||
return f'{get_readable_time(seconds)}'
|
||||
except:
|
||||
return '-'
|
||||
|
@ -4,52 +4,42 @@ from bot.helper.ext_utils.bot_utils import get_readable_file_size, MirrorStatus,
|
||||
|
||||
class MegaDownloadStatus:
|
||||
|
||||
def __init__(self, obj, listener):
|
||||
def __init__(self, obj, message):
|
||||
self.__obj = obj
|
||||
self.message = listener.message
|
||||
self.message = message
|
||||
|
||||
def name(self) -> str:
|
||||
def name(self):
|
||||
return self.__obj.name
|
||||
|
||||
def progress_raw(self):
|
||||
try:
|
||||
return round(self.processed_bytes() / self.__obj.size * 100,2)
|
||||
return round(self.__obj.downloaded_bytes / self.__obj.size * 100, 2)
|
||||
except:
|
||||
return 0.0
|
||||
|
||||
def progress(self):
|
||||
"""Progress of download in percentage"""
|
||||
return f"{self.progress_raw()}%"
|
||||
|
||||
def status(self) -> str:
|
||||
def status(self):
|
||||
return MirrorStatus.STATUS_DOWNLOADING
|
||||
|
||||
def processed_bytes(self):
|
||||
return self.__obj.downloaded_bytes
|
||||
return get_readable_file_size(self.__obj.downloaded_bytes)
|
||||
|
||||
def eta(self):
|
||||
try:
|
||||
seconds = (self.size_raw() - self.processed_bytes()) / self.speed_raw()
|
||||
seconds = (self.__obj.size - self.__obj.downloaded_bytes) / self.__obj.speed
|
||||
return f'{get_readable_time(seconds)}'
|
||||
except ZeroDivisionError:
|
||||
return '-'
|
||||
|
||||
def size_raw(self):
|
||||
return self.__obj.size
|
||||
def size(self):
|
||||
return get_readable_file_size(self.__obj.size)
|
||||
|
||||
def size(self) -> str:
|
||||
return get_readable_file_size(self.size_raw())
|
||||
def speed(self):
|
||||
return f'{get_readable_file_size(self.__obj.speed)}/s'
|
||||
|
||||
def downloaded(self) -> str:
|
||||
return get_readable_file_size(self.__obj.downloadedBytes)
|
||||
|
||||
def speed_raw(self):
|
||||
return self.__obj.speed
|
||||
|
||||
def speed(self) -> str:
|
||||
return f'{get_readable_file_size(self.speed_raw())}/s'
|
||||
|
||||
def gid(self) -> str:
|
||||
def gid(self):
|
||||
return self.__obj.gid
|
||||
|
||||
def download(self):
|
||||
|
@ -33,15 +33,8 @@ class QbDownloadStatus:
|
||||
"""
|
||||
return f'{round(self.__info.progress*100, 2)}%'
|
||||
|
||||
def size_raw(self):
|
||||
"""
|
||||
Gets total size of the mirror file/folder
|
||||
:return: total size of mirror
|
||||
"""
|
||||
return self.__info.size
|
||||
|
||||
def processed_bytes(self):
|
||||
return self.__info.downloaded
|
||||
return get_readable_file_size(self.__info.downloaded)
|
||||
|
||||
def speed(self):
|
||||
self.__update()
|
||||
@ -114,3 +107,5 @@ class QbDownloadStatus:
|
||||
await sleep(0.3)
|
||||
await self.__listener.onDownloadError('Download stopped by user!')
|
||||
await sync_to_async(self.__client.torrents_delete, torrent_hashes=self.__hash, delete_files=True)
|
||||
await sync_to_async(self.__client.torrents_delete_tags, tags=self.__info.tags)
|
||||
await sync_to_async(self.__client.auth_log_out)
|
@ -18,9 +18,6 @@ class QueueStatus:
|
||||
def name(self):
|
||||
return self.__name
|
||||
|
||||
def size_raw(self):
|
||||
return self.__size
|
||||
|
||||
def size(self):
|
||||
return get_readable_file_size(self.__size)
|
||||
|
||||
|
38
bot/helper/mirror_utils/status_utils/rclone_status.py
Normal file
38
bot/helper/mirror_utils/status_utils/rclone_status.py
Normal file
@ -0,0 +1,38 @@
|
||||
from bot.helper.ext_utils.bot_utils import get_readable_file_size, MirrorStatus
|
||||
|
||||
|
||||
class RcloneStatus:
|
||||
def __init__(self, obj, message, status):
|
||||
self.__obj = obj
|
||||
self.__status = status
|
||||
self.message = message
|
||||
|
||||
def gid(self):
|
||||
return self.__obj.gid
|
||||
|
||||
def progress(self):
|
||||
return self.__obj.percentage
|
||||
|
||||
def speed(self):
|
||||
return self.__obj.speed
|
||||
|
||||
def name(self):
|
||||
return self.__obj.name
|
||||
|
||||
def size(self):
|
||||
return get_readable_file_size(self.__obj.size)
|
||||
|
||||
def eta(self):
|
||||
return self.__obj.eta
|
||||
|
||||
def status(self):
|
||||
if self.__status == 'dl':
|
||||
return MirrorStatus.STATUS_DOWNLOADING
|
||||
else:
|
||||
return MirrorStatus.STATUS_UPLOADING
|
||||
|
||||
def processed_bytes(self):
|
||||
return self.__obj.transferred_size
|
||||
|
||||
def download(self):
|
||||
return self.__obj
|
@ -3,22 +3,19 @@ from bot.helper.ext_utils.bot_utils import MirrorStatus, get_readable_file_size,
|
||||
|
||||
|
||||
class TelegramDownloadStatus:
|
||||
def __init__(self, obj, listener, gid):
|
||||
def __init__(self, obj, message, gid):
|
||||
self.__obj = obj
|
||||
self.__gid = gid
|
||||
self.message = listener.message
|
||||
self.message = message
|
||||
|
||||
def gid(self):
|
||||
return self.__gid
|
||||
|
||||
def processed_bytes(self):
|
||||
return self.__obj.downloaded_bytes
|
||||
|
||||
def size_raw(self):
|
||||
return self.__obj.size
|
||||
return get_readable_file_size(self.__obj.downloaded_bytes)
|
||||
|
||||
def size(self):
|
||||
return get_readable_file_size(self.size_raw())
|
||||
return get_readable_file_size(self.__obj.size)
|
||||
|
||||
def status(self):
|
||||
return MirrorStatus.STATUS_DOWNLOADING
|
||||
@ -32,18 +29,12 @@ class TelegramDownloadStatus:
|
||||
def progress(self):
|
||||
return f'{round(self.progress_raw(), 2)}%'
|
||||
|
||||
def speed_raw(self):
|
||||
"""
|
||||
:return: Download speed in Bytes/Seconds
|
||||
"""
|
||||
return self.__obj.download_speed
|
||||
|
||||
def speed(self):
|
||||
return f'{get_readable_file_size(self.speed_raw())}/s'
|
||||
return f'{get_readable_file_size(self.__obj.download_speed)}/s'
|
||||
|
||||
def eta(self):
|
||||
try:
|
||||
seconds = (self.size_raw() - self.processed_bytes()) / self.speed_raw()
|
||||
seconds = (self.__obj.size - self.__obj.downloaded_bytes) / self.__obj.download_speed
|
||||
return f'{get_readable_time(seconds)}'
|
||||
except:
|
||||
return '-'
|
||||
|
@ -3,17 +3,14 @@ from bot.helper.ext_utils.bot_utils import MirrorStatus, get_readable_file_size,
|
||||
|
||||
|
||||
class TgUploadStatus:
|
||||
def __init__(self, obj, size, gid, listener):
|
||||
def __init__(self, obj, size, gid, message):
|
||||
self.__obj = obj
|
||||
self.__size = size
|
||||
self.__gid = gid
|
||||
self.message = listener.message
|
||||
self.message = message
|
||||
|
||||
def processed_bytes(self):
|
||||
return self.__obj.uploaded_bytes
|
||||
|
||||
def size_raw(self):
|
||||
return self.__size
|
||||
return get_readable_file_size(self.__obj.uploaded_bytes)
|
||||
|
||||
def size(self):
|
||||
return get_readable_file_size(self.__size)
|
||||
@ -33,18 +30,12 @@ class TgUploadStatus:
|
||||
def progress(self):
|
||||
return f'{round(self.progress_raw(), 2)}%'
|
||||
|
||||
def speed_raw(self):
|
||||
"""
|
||||
:return: Upload speed in Bytes/Seconds
|
||||
"""
|
||||
return self.__obj.speed
|
||||
|
||||
def speed(self):
|
||||
return f'{get_readable_file_size(self.speed_raw())}/s'
|
||||
return f'{get_readable_file_size(self.__obj.speed)}/s'
|
||||
|
||||
def eta(self):
|
||||
try:
|
||||
seconds = (self.__size - self.__obj.uploaded_bytes) / self.speed_raw()
|
||||
seconds = (self.__size - self.__obj.uploaded_bytes) / self.__obj.speed
|
||||
return f'{get_readable_time(seconds)}'
|
||||
except ZeroDivisionError:
|
||||
return '-'
|
||||
|
@ -3,17 +3,14 @@ from bot.helper.ext_utils.bot_utils import MirrorStatus, get_readable_file_size,
|
||||
|
||||
|
||||
class UploadStatus:
|
||||
def __init__(self, obj, size, gid, listener):
|
||||
def __init__(self, obj, size, gid, message):
|
||||
self.__obj = obj
|
||||
self.__size = size
|
||||
self.__gid = gid
|
||||
self.message = listener.message
|
||||
self.message = message
|
||||
|
||||
def processed_bytes(self):
|
||||
return self.__obj.processed_bytes
|
||||
|
||||
def size_raw(self):
|
||||
return self.__size
|
||||
return get_readable_file_size(self.__obj.processed_bytes)
|
||||
|
||||
def size(self):
|
||||
return get_readable_file_size(self.__size)
|
||||
@ -33,18 +30,12 @@ class UploadStatus:
|
||||
def progress(self):
|
||||
return f'{round(self.progress_raw(), 2)}%'
|
||||
|
||||
def speed_raw(self):
|
||||
"""
|
||||
:return: Upload speed in Bytes/Seconds
|
||||
"""
|
||||
return self.__obj.speed()
|
||||
|
||||
def speed(self):
|
||||
return f'{get_readable_file_size(self.speed_raw())}/s'
|
||||
return f'{get_readable_file_size(self.__obj.speed())}/s'
|
||||
|
||||
def eta(self):
|
||||
try:
|
||||
seconds = (self.__size - self.__obj.processed_bytes) / self.speed_raw()
|
||||
seconds = (self.__size - self.__obj.processed_bytes) / self.__obj.speed()
|
||||
return f'{get_readable_time(seconds)}'
|
||||
except ZeroDivisionError:
|
||||
return '-'
|
||||
|
@ -14,16 +14,16 @@ class YtDlpDownloadStatus:
|
||||
return self.__gid
|
||||
|
||||
def processed_bytes(self):
|
||||
return get_readable_file_size(self.processed_raw())
|
||||
|
||||
def processed_raw(self):
|
||||
if self.__obj.downloaded_bytes != 0:
|
||||
return self.__obj.downloaded_bytes
|
||||
else:
|
||||
return async_to_sync(get_path_size, f"{DOWNLOAD_DIR}{self.__uid}")
|
||||
|
||||
def size_raw(self):
|
||||
return self.__obj.size
|
||||
|
||||
def size(self):
|
||||
return get_readable_file_size(self.size_raw())
|
||||
return get_readable_file_size(self.__obj.size)
|
||||
|
||||
def status(self):
|
||||
return MirrorStatus.STATUS_DOWNLOADING
|
||||
@ -31,26 +31,17 @@ class YtDlpDownloadStatus:
|
||||
def name(self):
|
||||
return self.__obj.name
|
||||
|
||||
def progress_raw(self):
|
||||
return self.__obj.progress
|
||||
|
||||
def progress(self):
|
||||
return f'{round(self.progress_raw(), 2)}%'
|
||||
|
||||
def speed_raw(self):
|
||||
"""
|
||||
:return: Download speed in Bytes/Seconds
|
||||
"""
|
||||
return self.__obj.download_speed
|
||||
return f'{round(self.__obj.progress, 2)}%'
|
||||
|
||||
def speed(self):
|
||||
return f'{get_readable_file_size(self.speed_raw())}/s'
|
||||
return f'{get_readable_file_size(self.__obj.download_speed)}/s'
|
||||
|
||||
def eta(self):
|
||||
if self.__obj.eta != '-':
|
||||
return f'{get_readable_time(self.__obj.eta)}'
|
||||
try:
|
||||
seconds = (self.size_raw() - self.processed_bytes()) / self.speed_raw()
|
||||
seconds = (self.__obj.size - self.processed_raw()) / self.__obj.download_speed
|
||||
return f'{get_readable_time(seconds)}'
|
||||
except:
|
||||
return '-'
|
||||
|
@ -37,15 +37,12 @@ class ZipStatus:
|
||||
def name(self):
|
||||
return self.__name
|
||||
|
||||
def size_raw(self):
|
||||
return self.__size
|
||||
|
||||
def size(self):
|
||||
return get_readable_file_size(self.__size)
|
||||
|
||||
def eta(self):
|
||||
try:
|
||||
seconds = (self.size_raw() - self.processed_bytes()) / self.speed_raw()
|
||||
seconds = (self.__size - self.processed_bytes()) / self.speed_raw()
|
||||
return f'{get_readable_time(seconds)}'
|
||||
except:
|
||||
return '-'
|
||||
@ -55,9 +52,10 @@ class ZipStatus:
|
||||
|
||||
def processed_bytes(self):
|
||||
if self.__listener.newDir:
|
||||
return async_to_sync(get_path_size, f"{DOWNLOAD_DIR}{self.__uid}10000")
|
||||
size = async_to_sync(get_path_size, f"{DOWNLOAD_DIR}{self.__uid}10000")
|
||||
else:
|
||||
return async_to_sync(get_path_size, f"{DOWNLOAD_DIR}{self.__uid}") - self.__size
|
||||
size = async_to_sync(get_path_size, f"{DOWNLOAD_DIR}{self.__uid}") - self.__size
|
||||
return get_readable_file_size(size)
|
||||
|
||||
def download(self):
|
||||
return self
|
||||
|
@ -159,7 +159,7 @@ class GoogleDriveHelper:
|
||||
break
|
||||
return files
|
||||
|
||||
async def _progress(self):
|
||||
async def __progress(self):
|
||||
if self.__status is not None:
|
||||
chunk_size = self.__status.total_size * self.__status.progress() - self._file_processed_bytes
|
||||
self._file_processed_bytes = self.__status.total_size * self.__status.progress()
|
||||
@ -198,7 +198,7 @@ class GoogleDriveHelper:
|
||||
file_path = f"{self.__path}/{file_name}"
|
||||
size = get_readable_file_size(self.__size)
|
||||
LOGGER.info(f"Uploading: {file_path}")
|
||||
self.__updater = setInterval(self.__update_interval, self._progress)
|
||||
self.__updater = setInterval(self.__update_interval, self.__progress)
|
||||
try:
|
||||
if ospath.isfile(file_path):
|
||||
mime_type = get_mime_type(file_path)
|
||||
@ -722,7 +722,7 @@ class GoogleDriveHelper:
|
||||
self.__service = self.__authorize()
|
||||
self.__is_downloading = True
|
||||
file_id = self.__getIdFromUrl(link)
|
||||
self.__updater = setInterval(self.__update_interval, self._progress)
|
||||
self.__updater = setInterval(self.__update_interval, self.__progress)
|
||||
try:
|
||||
meta = self.__getFileMetadata(file_id)
|
||||
if meta.get("mimeType") == self.__G_DRIVE_DIR_MIME_TYPE:
|
||||
|
205
bot/helper/rclone_utils/list_utils.py
Normal file
205
bot/helper/rclone_utils/list_utils.py
Normal file
@ -0,0 +1,205 @@
|
||||
#!/usr/bin/env python3
|
||||
from asyncio import wait_for, Event, wrap_future
|
||||
from aiofiles.os import path as aiopath
|
||||
from aiofiles import open as aiopen
|
||||
from configparser import ConfigParser
|
||||
from pyrogram.handlers import CallbackQueryHandler
|
||||
from pyrogram.filters import regex, user
|
||||
from functools import partial
|
||||
from json import loads
|
||||
from math import ceil
|
||||
|
||||
from bot.helper.telegram_helper.button_build import ButtonMaker
|
||||
from bot.helper.telegram_helper.message_utils import sendMessage, editMessage
|
||||
from ..ext_utils.bot_utils import cmd_exec, new_thread, get_readable_file_size, new_task
|
||||
|
||||
LIST_LIMIT = 6
|
||||
|
||||
@new_task
|
||||
async def path_updates(client, query, obj):
|
||||
await query.answer()
|
||||
message = query.message
|
||||
data = query.data.split()
|
||||
if data[1] == 'pre':
|
||||
obj.iter_start -= LIST_LIMIT * obj.page_step
|
||||
await obj.get_path_buttons()
|
||||
elif data[1] == 'nex':
|
||||
obj.iter_start += LIST_LIMIT * obj.page_step
|
||||
await obj.get_path_buttons()
|
||||
elif data[1] == 'cancel':
|
||||
obj.path = ''
|
||||
obj.event.set()
|
||||
await message.delete()
|
||||
elif data[1] == 'back':
|
||||
if data[2] == 're':
|
||||
await obj.list_config()
|
||||
else:
|
||||
await obj.back_from_path()
|
||||
elif data[1] == 're':
|
||||
obj.path = data[2]
|
||||
await obj.get_path()
|
||||
elif data[1] == 'pa':
|
||||
index = int(data[3])
|
||||
obj.path = f"{obj.path}{obj.path_list[index]['Path']}" if obj.path.endswith(':') else f"{obj.path}/{obj.path_list[index]['Path']}"
|
||||
if data[2] == 'fo':
|
||||
await obj.get_path()
|
||||
else:
|
||||
await message.delete()
|
||||
obj.event.set()
|
||||
elif data[1] == 'ps':
|
||||
if obj.page_step == int(data[2]):
|
||||
return
|
||||
obj.page_step = int(data[2])
|
||||
await obj.get_path_buttons()
|
||||
elif data[1] == 'root':
|
||||
path = obj.path.split(':', 1)
|
||||
if '' in path:
|
||||
path.remove('')
|
||||
if len(path) > 1:
|
||||
obj.path = f"{path[0]}:"
|
||||
await obj.get_path()
|
||||
elif data[1] == 'cur':
|
||||
await message.delete()
|
||||
obj.event.set()
|
||||
elif data[1] == 'owner':
|
||||
obj.config_path = 'rclone.conf'
|
||||
await obj.list_remotes()
|
||||
elif data[1] == 'user':
|
||||
obj.config_path = obj.user_rcc_path
|
||||
await obj.list_remotes()
|
||||
|
||||
class RcloneHelper:
|
||||
def __init__(self, client, message):
|
||||
self.__user_id = message.from_user.id
|
||||
self.__rc_user = False
|
||||
self.__rc_owner = False
|
||||
self.__client = client
|
||||
self.__message = message
|
||||
self.__sections = []
|
||||
self.__reply_to = None
|
||||
self.event = Event()
|
||||
self.user_rcc_path = f'rclone/{self.__user_id}.conf'
|
||||
self.config_path = ''
|
||||
self.path = ''
|
||||
self.path_list = []
|
||||
self.iter_start = 0
|
||||
self.page_step = 1
|
||||
|
||||
@new_thread
|
||||
async def __event_handler(self):
|
||||
pfunc = partial(path_updates, obj=self)
|
||||
handler = self.__client.add_handler(CallbackQueryHandler(pfunc, filters=regex('^rcq') & user(self.__user_id)), group=-1)
|
||||
try:
|
||||
await wait_for(self.event.wait(), timeout=240)
|
||||
except:
|
||||
self.path = ''
|
||||
self.event.set()
|
||||
self.__client.remove_handler(*handler)
|
||||
|
||||
async def get_path_buttons(self):
|
||||
items_no = len(self.path_list)
|
||||
pages = ceil(items_no/LIST_LIMIT)
|
||||
if items_no <= self.iter_start:
|
||||
self.iter_start = 0
|
||||
elif self.iter_start < 0:
|
||||
self.iter_start = LIST_LIMIT * (pages - 1)
|
||||
page = (self.iter_start/LIST_LIMIT) + 1 if self.iter_start != 0 else 1
|
||||
buttons = ButtonMaker()
|
||||
for index, idict in enumerate(self.path_list[self.iter_start:LIST_LIMIT+self.iter_start]):
|
||||
orig_index = index + self.iter_start
|
||||
ptype = 'fo' if idict['IsDir'] else 'fi'
|
||||
name = idict['Path'] if idict['IsDir'] else f"[{get_readable_file_size(idict['Size'])}] {idict['Path']}"
|
||||
buttons.ibutton(name, f'rcq pa {ptype} {orig_index}')
|
||||
if items_no > LIST_LIMIT:
|
||||
for i in [1, 2, 4, 6, 8, 10]:
|
||||
buttons.ibutton(i, f'rcq ps {i}', position='header')
|
||||
buttons.ibutton('Previous', 'rcq pre', position='footer')
|
||||
buttons.ibutton('Next', 'rcq nex', position='footer')
|
||||
buttons.ibutton('Choose Current Path', 'rcq cur', position='footer')
|
||||
buttons.ibutton('Back', 'rcq back pa', position='footer')
|
||||
if len(self.path.split(':', 1)) > 1 and len(self.__sections) > 1 or self.__rc_user and self.__rc_owner:
|
||||
buttons.ibutton('Back To Root', 'rcq root', position='footer')
|
||||
buttons.ibutton('Cancel', 'rcq cancel', position='footer')
|
||||
button = buttons.build_menu(f_cols=2)
|
||||
msg = f'Choose Path:\n\nItems: {items_no} | Page: {int(page)}/{pages} | Page Step: {self.page_step}'
|
||||
msg += f'\n\nCurrent Path: <code>{self.path}</code>'
|
||||
if self.__reply_to is None:
|
||||
self.__reply_to = await sendMessage(self.__message, msg, button)
|
||||
else:
|
||||
await editMessage(self.__reply_to, msg, button)
|
||||
|
||||
async def get_path(self):
|
||||
cmd = ['rclone', 'lsjson', f'--config={self.config_path}', self.path]
|
||||
res, _, code = await cmd_exec(cmd)
|
||||
if code != 0:
|
||||
self.path = 'Internal Error!'
|
||||
self.event.set()
|
||||
else:
|
||||
self.path_list = sorted(loads(res), key=lambda x: not x['IsDir'])
|
||||
self.iter_start = 0
|
||||
await self.get_path_buttons()
|
||||
|
||||
async def list_remotes(self):
|
||||
config = ConfigParser()
|
||||
async with aiopen(self.config_path, 'r') as f:
|
||||
contents = await f.read()
|
||||
config.read_string(contents)
|
||||
self.__sections = config.sections()
|
||||
if len(self.__sections) == 1:
|
||||
self.path = f'{self.__sections[0]}:'
|
||||
await self.get_path()
|
||||
else:
|
||||
buttons = ButtonMaker()
|
||||
for remote in self.__sections:
|
||||
buttons.ibutton(remote, f'rcq re {remote}:')
|
||||
if self.__rc_user and self.__rc_owner:
|
||||
buttons.ibutton('Back', 'rcq back re')
|
||||
buttons.ibutton('Cancel', 'rcq cancel')
|
||||
button = buttons.build_menu(2)
|
||||
if self.__reply_to is None:
|
||||
self.__reply_to = await sendMessage(self.__message, 'Choose Rclone remote:', button)
|
||||
else:
|
||||
await editMessage(self.__reply_to, 'Choose Rclone remote:', button)
|
||||
|
||||
async def list_config(self):
|
||||
if not self.__rc_owner and not self.__rc_user:
|
||||
return None
|
||||
elif self.__rc_user and self.__rc_owner:
|
||||
buttons = ButtonMaker()
|
||||
buttons.ibutton('Owner Config', 'rcq owner')
|
||||
buttons.ibutton('My Config', 'rcq user')
|
||||
buttons.ibutton('Cancel', 'rcq cancel')
|
||||
button = buttons.build_menu(2)
|
||||
if self.__reply_to is None:
|
||||
self.__reply_to = await sendMessage(self.__message, 'Choose Rclone config:', button)
|
||||
else:
|
||||
await editMessage(self.__reply_to, 'Choose Rclone config:', button)
|
||||
else:
|
||||
self.config_path = 'rclone.conf' if self.__rc_owner else self.user_rcc_path
|
||||
await self.list_remotes()
|
||||
return ''
|
||||
|
||||
async def back_from_path(self):
|
||||
re_path = self.path.split(':', 1)
|
||||
if '' in re_path:
|
||||
re_path.remove('')
|
||||
if len(re_path) > 1:
|
||||
path = self.path.rsplit('/', 1)
|
||||
self.path = path[0] if len(path) > 1 else f'{re_path[0]}:'
|
||||
await self.get_path()
|
||||
elif len(self.__sections) > 1:
|
||||
await self.list_remotes()
|
||||
else:
|
||||
await self.list_config()
|
||||
|
||||
async def get_rclone_path(self):
|
||||
future = self.__event_handler()
|
||||
self.__rc_user = await aiopath.exists(self.user_rcc_path)
|
||||
self.__rc_owner = await aiopath.exists('rclone.conf')
|
||||
path = await self.list_config()
|
||||
if path is None:
|
||||
self.event.set()
|
||||
return None, None
|
||||
await wrap_future(future)
|
||||
await self.__reply_to.delete()
|
||||
return self.config_path, self.path
|
@ -23,13 +23,18 @@ class ButtonMaker:
|
||||
elif position == 'footer':
|
||||
self.__footer_button.append(InlineKeyboardButton(text=key, callback_data=data))
|
||||
|
||||
def build_menu(self, n_cols):
|
||||
menu = [self.__button[i:i + n_cols] for i in range(0, len(self.__button), n_cols)]
|
||||
def build_menu(self, b_cols=1, h_cols=8, f_cols=8):
|
||||
menu = [self.__button[i:i+b_cols] for i in range(0, len(self.__button), b_cols)]
|
||||
if self.__header_button:
|
||||
h_cnt = len(self.__header_button)
|
||||
if h_cnt > h_cols:
|
||||
header_buttons = [self.__header_button[i:i+h_cols] for i in range(0, len(self.__header_button), h_cols)]
|
||||
menu = header_buttons + menu
|
||||
else:
|
||||
menu.insert(0, self.__header_button)
|
||||
if self.__footer_button:
|
||||
if len(self.__footer_button) > 8:
|
||||
[menu.append(self.__footer_button[i:i+8]) for i in range(0, len(self.__footer_button), 8)]
|
||||
if len(self.__footer_button) > f_cols:
|
||||
[menu.append(self.__footer_button[i:i+f_cols]) for i in range(0, len(self.__footer_button), f_cols)]
|
||||
else:
|
||||
menu.append(self.__footer_button)
|
||||
return InlineKeyboardMarkup(menu)
|
@ -28,4 +28,3 @@ class CustomFilters:
|
||||
return bool(uid == OWNER_ID or uid in user_data and user_data[uid].get('is_sudo'))
|
||||
|
||||
sudo = create(sudo_user)
|
||||
|
||||
|
@ -5,10 +5,11 @@ from functools import partial
|
||||
from asyncio import create_subprocess_exec, create_subprocess_shell, sleep
|
||||
from aiofiles.os import remove, rename, path as aiopath
|
||||
from aiofiles import open as aiopen
|
||||
from os import environ
|
||||
from os import environ, getcwd
|
||||
from dotenv import load_dotenv
|
||||
from time import time
|
||||
from io import BytesIO
|
||||
from aioshutil import rmtree as aiormtree
|
||||
|
||||
from bot import config_dict, user_data, DATABASE_URL, MAX_SPLIT_SIZE, DRIVES_IDS, DRIVES_NAMES, INDEX_URLS, aria2, GLOBAL_EXTENSION_FILTER, status_reply_dict_lock, Interval, aria2_options, aria2c_global, IS_PREMIUM_USER, download_dict, qbit_options, get_client, LOGGER, bot
|
||||
from bot.helper.telegram_helper.message_utils import sendMessage, sendFile, editMessage, update_all_messages
|
||||
@ -153,7 +154,7 @@ async def load_config():
|
||||
DUMP_CHAT = '' if len(DUMP_CHAT) == 0 else int(DUMP_CHAT)
|
||||
|
||||
STATUS_LIMIT = environ.get('STATUS_LIMIT', '')
|
||||
STATUS_LIMIT = '' if len(STATUS_LIMIT) == 0 else int(STATUS_LIMIT)
|
||||
STATUS_LIMIT = 10 if len(STATUS_LIMIT) == 0 else int(STATUS_LIMIT)
|
||||
|
||||
RSS_CHAT_ID = environ.get('RSS_CHAT_ID', '')
|
||||
RSS_CHAT_ID = '' if len(RSS_CHAT_ID) == 0 else int(RSS_CHAT_ID)
|
||||
@ -524,7 +525,7 @@ async def update_private_file(client, message, pre_message):
|
||||
await remove(fn)
|
||||
if fn == 'accounts':
|
||||
if await aiopath.exists('accounts'):
|
||||
await (await create_subprocess_exec("rm", "-rf", "accounts")).wait()
|
||||
await aiormtree('accounts')
|
||||
config_dict['USE_SERVICE_ACCOUNTS'] = False
|
||||
if DATABASE_URL:
|
||||
await DbManger().update_config({'USE_SERVICE_ACCOUNTS': False})
|
||||
@ -535,10 +536,10 @@ async def update_private_file(client, message, pre_message):
|
||||
await message.delete()
|
||||
elif doc := message.document:
|
||||
file_name = doc.file_name
|
||||
await message.download(file_name=f'/usr/src/app/{file_name}')
|
||||
await message.download(file_name=f'{getcwd()}/{file_name}')
|
||||
if file_name == 'accounts.zip':
|
||||
if await aiopath.exists('accounts'):
|
||||
await (await create_subprocess_exec("rm", "-rf", "accounts")).wait()
|
||||
await aiormtree('accounts')
|
||||
await (await create_subprocess_exec("7z", "x", "-o.", "-aoa", "accounts.zip", "accounts/*.json")).wait()
|
||||
await (await create_subprocess_exec("chmod", "-R", "777", "accounts")).wait()
|
||||
elif file_name == 'list_drives.txt':
|
||||
@ -587,8 +588,8 @@ async def event_handler(client, query, pfunc, rfunc, document=False):
|
||||
handler_dict[chat_id] = True
|
||||
start_time = time()
|
||||
async def event_filter(_, __, event):
|
||||
return bool(event.from_user.id == query.from_user.id and event.chat.id == chat_id and
|
||||
(event.text or event.document and document))
|
||||
return bool(event.from_user.id or event.sender_chat.id == query.from_user.id and event.chat.id == chat_id
|
||||
and (event.text or event.document and document))
|
||||
handler = client.add_handler(MessageHandler(pfunc, filters=create(event_filter)), group=-1)
|
||||
while handler_dict[chat_id]:
|
||||
await sleep(0.5)
|
||||
@ -787,12 +788,12 @@ async def edit_bot_settings(client, query):
|
||||
if await aiopath.exists(filename):
|
||||
await (await create_subprocess_shell(f"git add -f {filename} \
|
||||
&& git commit -sm botsettings -q \
|
||||
&& git push origin {config_dict['UPSTREAM_BRANCH']} -q")).wait()
|
||||
&& git push origin {config_dict['UPSTREAM_BRANCH']} -qf")).wait()
|
||||
else:
|
||||
await (await create_subprocess_shell(f"git rm -r --cached {filename} \
|
||||
&& git commit -sm botsettings -q \
|
||||
&& git push origin {config_dict['UPSTREAM_BRANCH']} -q")).wait()
|
||||
await message.reply_to_mssage.delete()
|
||||
&& git push origin {config_dict['UPSTREAM_BRANCH']} -qf")).wait()
|
||||
await message.reply_to_message.delete()
|
||||
await message.delete()
|
||||
|
||||
async def bot_settings(client, message):
|
||||
|
@ -7,7 +7,7 @@ from bot import bot, aria2, download_dict, download_dict_lock, OWNER_ID, user_da
|
||||
from bot.helper.telegram_helper.bot_commands import BotCommands
|
||||
from bot.helper.telegram_helper.filters import CustomFilters
|
||||
from bot.helper.telegram_helper.message_utils import sendMessage, sendStatusMessage
|
||||
from bot.helper.ext_utils.bot_utils import async_to_sync, getDownloadByGid, MirrorStatus, bt_selection_buttons, sync_to_async
|
||||
from bot.helper.ext_utils.bot_utils import getDownloadByGid, MirrorStatus, bt_selection_buttons, sync_to_async
|
||||
|
||||
|
||||
async def select(client, message):
|
||||
@ -110,7 +110,7 @@ async def get_confirm(client, query):
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
await async_to_sync(aria2.client.unpause, id_)
|
||||
await sync_to_async(aria2.client.unpause, id_)
|
||||
except Exception as e:
|
||||
LOGGER.error(f"{e} Error in resume, this mostly happens after abuse aria2. Try to use select cmd again!")
|
||||
await sendStatusMessage(message)
|
||||
|
@ -7,18 +7,20 @@ from asyncio import sleep
|
||||
from aiofiles.os import path as aiopath
|
||||
|
||||
from bot import bot, DOWNLOAD_DIR, LOGGER, config_dict
|
||||
from bot.helper.ext_utils.bot_utils import is_url, is_magnet, is_mega_link, is_gdrive_link, get_content_type, new_task, sync_to_async
|
||||
from bot.helper.ext_utils.bot_utils import is_url, is_magnet, is_mega_link, is_gdrive_link, get_content_type, new_task, sync_to_async, is_rclone_path
|
||||
from bot.helper.ext_utils.exceptions import DirectDownloadLinkException
|
||||
from bot.helper.mirror_utils.download_utils.aria2_download import add_aria2c_download
|
||||
from bot.helper.mirror_utils.download_utils.gd_downloader import add_gd_download
|
||||
from bot.helper.mirror_utils.download_utils.qbit_downloader import add_qb_torrent
|
||||
from bot.helper.mirror_utils.download_utils.mega_downloader import add_mega_download
|
||||
from bot.helper.mirror_utils.download_utils.rclone_downloader import RcloneDownloadHelper
|
||||
from bot.helper.mirror_utils.download_utils.direct_link_generator import direct_link_generator
|
||||
from bot.helper.mirror_utils.download_utils.telegram_downloader import TelegramDownloadHelper
|
||||
from bot.helper.telegram_helper.bot_commands import BotCommands
|
||||
from bot.helper.telegram_helper.filters import CustomFilters
|
||||
from bot.helper.telegram_helper.message_utils import sendMessage
|
||||
from bot.helper.listener import MirrorLeechListener
|
||||
from bot.helper.rclone_utils.list_utils import RcloneHelper
|
||||
|
||||
|
||||
@new_task
|
||||
@ -28,7 +30,6 @@ async def _mirror_leech(client, message, isZip=False, extract=False, isQbit=Fals
|
||||
return
|
||||
mesg = message.text.split('\n')
|
||||
message_args = mesg[0].split(maxsplit=1)
|
||||
index = 1
|
||||
ratio = None
|
||||
seed_time = None
|
||||
select = False
|
||||
@ -38,6 +39,7 @@ async def _mirror_leech(client, message, isZip=False, extract=False, isQbit=Fals
|
||||
folder_name = ''
|
||||
|
||||
if len(message_args) > 1:
|
||||
index = 1
|
||||
args = mesg[0].split(maxsplit=4)
|
||||
args.pop(0)
|
||||
for x in args:
|
||||
@ -136,7 +138,7 @@ async def _mirror_leech(client, message, isZip=False, extract=False, isQbit=Fals
|
||||
await TelegramDownloadHelper(listener).add_download(reply_to, f'{path}/', name)
|
||||
return
|
||||
|
||||
if not is_url(link) and not is_magnet(link) and not await aiopath.exists(link):
|
||||
if not is_url(link) and not is_magnet(link) and not await aiopath.exists(link) and not is_rclone_path(link):
|
||||
help_msg = '''
|
||||
<code>/cmd</code> link n: newname pswd: xx(zip/unzip)
|
||||
|
||||
@ -165,18 +167,26 @@ Number should be always before n: or pswd:
|
||||
<code>/cmd</code> 10(number of links/files) m:folder_name
|
||||
Number and m:folder_name (folder_name without space) should be always before n: or pswd:
|
||||
|
||||
<b>Rclone Download</b>:
|
||||
Treat rclone paths exactly like links
|
||||
<code>/cmd</code> main:dump/ubuntu.iso or <code>rcd</code> (To select config, remote and path)
|
||||
Users can add their own rclone from user settings
|
||||
If you want to add path manually from your config add <code>mrcc:</code> before the path without space
|
||||
<code>/cmd</code> <code>mrcc:</code>main:dump/ubuntu.iso
|
||||
|
||||
<b>NOTES:</b>
|
||||
1. When use cmd by reply don't add any option in link msg! Always add them after cmd msg!
|
||||
2. Options (<b>n: and pswd:</b>) should be added randomly after the link if link along with the cmd and after any other option
|
||||
3. Options (<b>d, s, m: and multi</b>) should be added randomly before the link and before any other option.
|
||||
4. Commands that start with <b>qb</b> are ONLY for torrents.
|
||||
5. (n:) option doesn't work with torrents.
|
||||
'''
|
||||
await sendMessage(message, help_msg)
|
||||
return
|
||||
|
||||
LOGGER.info(link)
|
||||
|
||||
if not is_mega_link(link) and not isQbit and not is_magnet(link) \
|
||||
if not is_mega_link(link) and not isQbit and not is_magnet(link) and not is_rclone_path(link) \
|
||||
and not is_gdrive_link(link) and not link.endswith('.torrent'):
|
||||
content_type = await sync_to_async(get_content_type, link)
|
||||
if content_type is None or re_match(r'text/html|text/plain', content_type):
|
||||
@ -192,8 +202,24 @@ Number and m:folder_name (folder_name without space) should be always before n:
|
||||
__run_multi()
|
||||
|
||||
listener = MirrorLeechListener(message, isZip, extract, isQbit, isLeech, pswd, tag, select, seed, sameDir)
|
||||
|
||||
if is_gdrive_link(link):
|
||||
if is_rclone_path(link):
|
||||
config_path = 'rclone.conf'
|
||||
if link == 'rcd':
|
||||
config_path, link = await RcloneHelper(client, message).get_rclone_path()
|
||||
elif link.startswith('mrcc:'):
|
||||
link = link.split('mrcc:', 1)[1]
|
||||
config_path = f'rclone/{message.from_user.id}.conf'
|
||||
if link is None or not await aiopath.exists(config_path):
|
||||
await sendMessage(message, "Rclone Config not Exists!")
|
||||
return
|
||||
if link == '':
|
||||
await sendMessage(message, "Task has been cancelled!")
|
||||
return
|
||||
if not is_rclone_path(link):
|
||||
await sendMessage(message, link)
|
||||
return
|
||||
await RcloneDownloadHelper(listener).add_download(link, config_path, f'{path}/', name)
|
||||
elif is_gdrive_link(link):
|
||||
if not isZip and not extract and not isLeech:
|
||||
gmsg = f"Use /{BotCommands.CloneCommand} to clone Google Drive file/folder\n\n"
|
||||
gmsg += f"Use /{BotCommands.ZipMirrorCommand[0]} to make zip of Google Drive folder\n\n"
|
||||
|
@ -340,7 +340,8 @@ async def event_handler(client, query, pfunc):
|
||||
handler_dict[user_id] = True
|
||||
start_time = time()
|
||||
async def event_filter(_, __, event):
|
||||
return bool(event.from_user.id == user_id and event.chat.id == query.message.chat.id and event.text)
|
||||
return bool(event.from_user.id or event.sender_chat.id == user_id and
|
||||
event.chat.id == query.message.chat.id and event.text)
|
||||
handler = client.add_handler(MessageHandler(pfunc, create(event_filter)), group=-1)
|
||||
while handler_dict[user_id]:
|
||||
await sleep(0.5)
|
||||
|
@ -19,7 +19,7 @@ TELEGRAPH_LIMIT = 300
|
||||
|
||||
|
||||
async def initiate_search_tools():
|
||||
qbclient = get_client()
|
||||
qbclient = await sync_to_async(get_client)
|
||||
qb_plugins = await sync_to_async(qbclient.search_plugins)
|
||||
if SEARCH_PLUGINS := config_dict['SEARCH_PLUGINS']:
|
||||
globals()['PLUGINS'] = []
|
||||
@ -28,7 +28,6 @@ async def initiate_search_tools():
|
||||
for plugin in qb_plugins:
|
||||
await sync_to_async(qbclient.search_uninstall_plugin, names=plugin['name'])
|
||||
await sync_to_async(qbclient.search_install_plugin, src_plugins)
|
||||
await sync_to_async(qbclient.auth_log_out)
|
||||
elif qb_plugins:
|
||||
for plugin in qb_plugins:
|
||||
await sync_to_async(qbclient.search_uninstall_plugin, names=plugin['name'])
|
||||
@ -89,7 +88,7 @@ async def __search(key, site, message, method):
|
||||
return
|
||||
else:
|
||||
LOGGER.info(f"PLUGINS Searching: {key} from {site}")
|
||||
client = get_client()
|
||||
client = await sync_to_async(get_client)
|
||||
search = await sync_to_async(client.search_start, pattern=key, plugins=site, category='all')
|
||||
search_id = search.id
|
||||
while True:
|
||||
@ -112,6 +111,7 @@ async def __search(key, site, message, method):
|
||||
await editMessage(message, msg, button)
|
||||
if not method.startswith('api'):
|
||||
await sync_to_async(client.search_delete, search_id=search_id)
|
||||
await sync_to_async(client.auth_log_out)
|
||||
|
||||
async def __getResult(search_results, key, message, method):
|
||||
telegraph_content = []
|
||||
@ -191,7 +191,7 @@ def __api_buttons(user_id, method):
|
||||
async def __plugin_buttons(user_id):
|
||||
buttons = ButtonMaker()
|
||||
if not PLUGINS:
|
||||
qbclient = get_client()
|
||||
qbclient = await sync_to_async(get_client)
|
||||
pl = await sync_to_async(qbclient.search_plugins)
|
||||
for name in pl:
|
||||
PLUGINS.append(name['name'])
|
||||
|
@ -17,7 +17,7 @@ async def shell(client, message):
|
||||
await sendMessage(message, 'No command to execute was given.')
|
||||
return
|
||||
cmd = cmd[1]
|
||||
stdout, stderr, returncode = await cmd_exec(cmd, shell=True)
|
||||
stdout, stderr, _ = await cmd_exec(cmd, shell=True)
|
||||
reply = ''
|
||||
if len(stdout) != 0:
|
||||
reply += f"*Stdout*\n<code>{stdout}</code>\n"
|
||||
|
@ -3,7 +3,7 @@ from pyrogram.handlers import MessageHandler, CallbackQueryHandler
|
||||
from pyrogram.filters import command, regex, create
|
||||
from aiofiles import open as aiopen
|
||||
from aiofiles.os import remove as aioremove, path as aiopath, mkdir
|
||||
from os import path as ospath
|
||||
from os import path as ospath, getcwd
|
||||
from PIL import Image
|
||||
from time import time
|
||||
from functools import partial
|
||||
@ -26,6 +26,7 @@ async def get_user_settings(from_user):
|
||||
name = from_user.mention
|
||||
buttons = ButtonMaker()
|
||||
thumbpath = f"Thumbnails/{user_id}.jpg"
|
||||
rclone_path = f'rclone/{user_id}.conf'
|
||||
user_dict = user_data.get(user_id, {})
|
||||
if user_dict.get('as_doc', False) or 'as_doc' not in user_dict and config_dict['AS_DOCUMENT']:
|
||||
ltype = "DOCUMENT"
|
||||
@ -62,10 +63,14 @@ async def get_user_settings(from_user):
|
||||
buttons.ibutton("Thumbnail", f"userset {user_id} sthumb")
|
||||
thumbmsg = "Exists" if await aiopath.exists(thumbpath) else "Not Exists"
|
||||
|
||||
buttons.ibutton("Rclone", f"userset {user_id} rcc")
|
||||
rccmsg = "Exists" if await aiopath.exists(rclone_path) else "Not Exists"
|
||||
|
||||
buttons.ibutton("Close", f"userset {user_id} close")
|
||||
text = f"<u>Settings for {name}</u>\n"\
|
||||
f"Leech Type is <b>{ltype}</b>\n"\
|
||||
f"Custom Thumbnail <b>{thumbmsg}</b>\n"\
|
||||
f"Rclone Config <b>{rccmsg}</b>\n"\
|
||||
f"Leech Split Size is <b>{split_size}</b>\n"\
|
||||
f"Equal Splits is <b>{equal_splits}</b>\n"\
|
||||
f"Media Group is <b>{media_group}</b>\n"\
|
||||
@ -104,7 +109,21 @@ async def set_thumb(client, message, pre_event):
|
||||
await message.delete()
|
||||
await update_user_settings(pre_event)
|
||||
if DATABASE_URL:
|
||||
await DbManger().update_thumb(user_id, des_dir)
|
||||
await DbManger().update_doc(user_id, des_dir)
|
||||
|
||||
async def add_rclone(client, message, pre_event):
|
||||
user_id = message.from_user.id
|
||||
handler_dict[user_id] = False
|
||||
path = f'{getcwd()}/rclone/'
|
||||
if not await aiopath.isdir(path):
|
||||
await mkdir(path)
|
||||
des_dir = ospath.join(path, f'{user_id}.conf')
|
||||
await message.download(file_name=des_dir)
|
||||
update_user_ldata(user_id, 'rclone', f'rclone/{user_id}.conf')
|
||||
await message.delete()
|
||||
await update_user_settings(pre_event)
|
||||
if DATABASE_URL:
|
||||
await DbManger().update_user_doc(user_id, des_dir)
|
||||
|
||||
async def leech_split_size(client, message, pre_event):
|
||||
user_id = message.from_user.id
|
||||
@ -116,13 +135,13 @@ async def leech_split_size(client, message, pre_event):
|
||||
if DATABASE_URL:
|
||||
await DbManger().update_user_data(user_id)
|
||||
|
||||
async def event_handler(client, query, pfunc, photo=False):
|
||||
async def event_handler(client, query, pfunc, photo=False, document=False):
|
||||
user_id = query.from_user.id
|
||||
handler_dict[user_id] = True
|
||||
start_time = time()
|
||||
async def event_filter(_, __, event):
|
||||
return bool(event.from_user.id == user_id and event.chat.id == query.message.chat.id and
|
||||
(event.text or event.photo and photo))
|
||||
return bool(event.from_user.id or event.sender_chat.id == user_id and event.chat.id == query.message.chat.id and
|
||||
(event.text or event.photo and photo or event.document and document))
|
||||
handler = client.add_handler(MessageHandler(pfunc, filters=create(event_filter)), group=-1)
|
||||
while handler_dict[user_id]:
|
||||
await sleep(0.5)
|
||||
@ -137,7 +156,8 @@ async def edit_user_settings(client, query):
|
||||
user_id = from_user.id
|
||||
message = query.message
|
||||
data = query.data.split()
|
||||
thumb_path = f"Thumbnails/{user_id}.jpg"
|
||||
thumb_path = f'Thumbnails/{user_id}.jpg'
|
||||
rclone_path = f'rclone/{user_id}.conf'
|
||||
user_dict = user_data.get(user_id, {})
|
||||
if user_id != int(data[1]):
|
||||
await query.answer("Not Yours!", show_alert=True)
|
||||
@ -160,7 +180,7 @@ async def edit_user_settings(client, query):
|
||||
update_user_ldata(user_id, 'thumb', '')
|
||||
await update_user_settings(query)
|
||||
if DATABASE_URL:
|
||||
await DbManger().update_thumb(user_id)
|
||||
await DbManger().update_user_doc(user_id)
|
||||
else:
|
||||
await query.answer("Old Settings", show_alert=True)
|
||||
await update_user_settings(query)
|
||||
@ -205,7 +225,7 @@ Check all available qualities options <a href="https://github.com/yt-dlp/yt-dlp#
|
||||
if user_dict.get('split_size', False):
|
||||
buttons.ibutton("Reset Split Size", f"userset {user_id} rlss")
|
||||
ES = config_dict['EQUAL_SPLITS']
|
||||
if user_dict.get('equal_splits', False) or 'equal_splits' not in user_dict and config_dict['EQUAL_SPLITS']:
|
||||
if user_dict.get('equal_splits', False) or 'equal_splits' not in user_dict and ES:
|
||||
buttons.ibutton("Disable Equal Splits", f"userset {user_id} esplits")
|
||||
else:
|
||||
buttons.ibutton("Enable Equal Splits", f"userset {user_id} esplits")
|
||||
@ -239,6 +259,28 @@ Check all available qualities options <a href="https://github.com/yt-dlp/yt-dlp#
|
||||
await update_user_settings(query)
|
||||
if DATABASE_URL:
|
||||
await DbManger().update_user_data(user_id)
|
||||
elif data[2] == 'rcc':
|
||||
await query.answer()
|
||||
buttons = ButtonMaker()
|
||||
if await aiopath.exists(rclone_path):
|
||||
buttons.ibutton("Delete rclone.conf", f"userset {user_id} drcc")
|
||||
buttons.ibutton("Back", f"userset {user_id} back")
|
||||
buttons.ibutton("Close", f"userset {user_id} close")
|
||||
await editMessage(message, 'Send rclone.conf. Timeout: 60 sec', buttons.build_menu(1))
|
||||
pfunc = partial(add_rclone, pre_event=query)
|
||||
await event_handler(client, query, pfunc, document=True)
|
||||
elif data[2] == 'drcc':
|
||||
handler_dict[user_id] = False
|
||||
if await aiopath.exists(rclone_path):
|
||||
await query.answer()
|
||||
await aioremove(rclone_path)
|
||||
update_user_ldata(user_id, 'rclone', '')
|
||||
await update_user_settings(query)
|
||||
if DATABASE_URL:
|
||||
await DbManger().update_user_doc(user_id)
|
||||
else:
|
||||
await query.answer("Old Settings", show_alert=True)
|
||||
await update_user_settings(query)
|
||||
elif data[2] == 'back':
|
||||
handler_dict[user_id] = False
|
||||
await query.answer()
|
||||
|
@ -3,8 +3,8 @@ from pyrogram.handlers import MessageHandler, CallbackQueryHandler
|
||||
from pyrogram.filters import command, regex
|
||||
from asyncio import sleep
|
||||
from re import split as re_split
|
||||
from aiohttp import ClientSession
|
||||
|
||||
from requests import request
|
||||
from bot import DOWNLOAD_DIR, bot, config_dict, user_data, LOGGER
|
||||
from bot.helper.telegram_helper.message_utils import sendMessage, editMessage
|
||||
from bot.helper.telegram_helper.button_build import ButtonMaker
|
||||
@ -17,14 +17,15 @@ from bot.helper.listener import MirrorLeechListener
|
||||
listener_dict = {}
|
||||
|
||||
|
||||
def _mdisk(link, name):
|
||||
async def _mdisk(link, name):
|
||||
key = link.split('/')[-1]
|
||||
resp = request('GET', f'https://diskuploader.entertainvideo.com/v1/file/cdnurl?param={key}')
|
||||
if resp.ok:
|
||||
resp = resp.json()
|
||||
link = resp['source']
|
||||
async with ClientSession() as session:
|
||||
async with session.get(f'https://diskuploader.entertainvideo.com/v1/file/cdnurl?param={key}') as resp:
|
||||
if resp.status == 200:
|
||||
resp_json = await resp.json()
|
||||
link = resp_json['source']
|
||||
if not name:
|
||||
name = resp['filename']
|
||||
name = resp_json['filename']
|
||||
return name, link
|
||||
|
||||
async def _auto_cancel(msg, task_id):
|
||||
@ -46,13 +47,13 @@ async def _ytdl(client, message, isZip=False, isLeech=False, sameDir={}):
|
||||
qual = ''
|
||||
select = False
|
||||
multi = 0
|
||||
index = 1
|
||||
link = ''
|
||||
folder_name = ''
|
||||
|
||||
args = mssg.split(maxsplit=3)
|
||||
args.pop(0)
|
||||
if len(args) > 0:
|
||||
index = 1
|
||||
for x in args:
|
||||
x = x.strip()
|
||||
if x == 's':
|
||||
@ -158,7 +159,7 @@ Check all yt-dlp api options from this <a href='https://github.com/yt-dlp/yt-dlp
|
||||
|
||||
listener = MirrorLeechListener(message, isZip, isLeech=isLeech, pswd=pswd, tag=tag, sameDir=sameDir)
|
||||
if 'mdisk.me' in link:
|
||||
name, link = await sync_to_async(_mdisk, link, name)
|
||||
name, link = await _mdisk(link, name)
|
||||
ydl = YoutubeDLHelper(listener)
|
||||
try:
|
||||
result = await sync_to_async(ydl.extractMetaData, link, name, opt, True)
|
||||
|
Loading…
Reference in New Issue
Block a user