Added Custom leech destination id/username

DUMP_CHAT_ID ——> LEECH_DUMP_CHAT id/username
RSS_CHAT_ID ——> RSS_CHAT id/username
Other minor fixes

Signed-off-by: anasty17 <e.anastayyar@gmail.com>
This commit is contained in:
anasty17 2023-06-24 04:27:15 +03:00
parent 6fe0526b5d
commit 5bb35aa68d
14 changed files with 436 additions and 376 deletions

View File

@ -36,6 +36,7 @@ In each single file there is a major change from base code, it's almost totaly d
- Ability to leech splitted file parts in media group. Setting for each user
- Download using premium account if available
- Download restricted messages (document or link) by tg private/public/super links
- Custom upload destination for each task or user
### Google
@ -257,7 +258,7 @@ Fill up rest of the fields. Meaning of each field is discussed below. **NOTE**:
- `EQUAL_SPLITS`: Split files larger than **LEECH_SPLIT_SIZE** into equal parts size (Not working with zip cmd). Default is `False`. `Bool`
- `MEDIA_GROUP`: View Uploaded splitted file parts in media group. Default is `False`. `Bool`.
- `LEECH_FILENAME_PREFIX`: Add custom word to leeched file name. `Str`
- `DUMP_CHAT_ID`: Chat ID to where leeched files would be uploaded. `Int`. **NOTE**: Only available for superGroup/channel. Add `-100` before channel/superGroup id. In short don't add bot id or your id!
- `LEECH_DUMP_CHAT`: Chat ID or USERNAME to where files would be uploaded. `Int`|`Str`. **NOTE**: Only available for superGroup/channel. Add `-100` before channel/superGroup id. In short don't add bot or account id!
### qBittorrent/Aria2c
@ -270,8 +271,8 @@ Fill up rest of the fields. Meaning of each field is discussed below. **NOTE**:
### RSS
- `RSS_DELAY`: Time in seconds for rss refresh interval. Recommended `900` second at least. Default is `900` in sec. `Int`
- `RSS_CHAT_ID`: Chat ID where rss links will be sent. If you want message to be sent to the channel then add channel id. Add `-100` before channel id. `Int`
- **RSS NOTES**: `RSS_CHAT_ID` is required, otherwise monitor will not work. You must use `USER_STRING_SESSION` --OR-- *CHANNEL*. If using channel then bot should be added in both channel and group(linked to channel) and `RSS_CHAT_ID` is the channel id, so messages sent by the bot to channel will be forwarded to group. Otherwise with `USER_STRING_SESSION` add group id for `RSS_CHAT_ID`. If `DATABASE_URL` not added you will miss the feeds while bot offline.
- `RSS_CHAT`: Chat ID/USERNAME where rss links will be sent. If you want message to be sent to the channel then add channel id. Add `-100` before channel id. `Int`|`Str`
- **RSS NOTES**: `RSS_CHAT` is required, otherwise monitor will not work. You must use `USER_STRING_SESSION` --OR-- *CHANNEL*. If using channel then bot should be added in both channel and group(linked to channel) and `RSS_CHAT` is the channel id, so messages sent by the bot to channel will be forwarded to group. Otherwise with `USER_STRING_SESSION` add group id for `RSS_CHAT`. If `DATABASE_URL` not added you will miss the feeds while bot offline.
### MEGA

View File

@ -165,8 +165,7 @@ EXTENSION_FILTER = environ.get('EXTENSION_FILTER', '')
if len(EXTENSION_FILTER) > 0:
fx = EXTENSION_FILTER.split()
for x in fx:
if x.strip().startswith('.'):
x = x.lstrip('.')
x = x.lstrip('.')
GLOBAL_EXTENSION_FILTER.append(x.strip().lower())
IS_PREMIUM_USER = False
@ -232,16 +231,20 @@ if len(YT_DLP_OPTIONS) == 0:
SEARCH_LIMIT = environ.get('SEARCH_LIMIT', '')
SEARCH_LIMIT = 0 if len(SEARCH_LIMIT) == 0 else int(SEARCH_LIMIT)
DUMP_CHAT_ID = environ.get('DUMP_CHAT_ID', '')
DUMP_CHAT_ID = '' if len(DUMP_CHAT_ID) == 0 else int(DUMP_CHAT_ID)
LEECH_DUMP_CHAT = environ.get('LEECH_DUMP_CHAT', '')
LEECH_DUMP_CHAT = '' if len(LEECH_DUMP_CHAT) == 0 else LEECH_DUMP_CHAT
if LEECH_DUMP_CHAT.isdigit() or LEECH_DUMP_CHAT.startswith('-'):
LEECH_DUMP_CHAT = int(LEECH_DUMP_CHAT)
STATUS_LIMIT = environ.get('STATUS_LIMIT', '')
STATUS_LIMIT = 10 if len(STATUS_LIMIT) == 0 else int(STATUS_LIMIT)
CMD_SUFFIX = environ.get('CMD_SUFFIX', '')
RSS_CHAT_ID = environ.get('RSS_CHAT_ID', '')
RSS_CHAT_ID = '' if len(RSS_CHAT_ID) == 0 else int(RSS_CHAT_ID)
RSS_CHAT = environ.get('RSS_CHAT', '')
RSS_CHAT = '' if len(RSS_CHAT) == 0 else RSS_CHAT
if RSS_CHAT.isdigit() or RSS_CHAT.startswith('-'):
RSS_CHAT = int(RSS_CHAT)
RSS_DELAY = environ.get('RSS_DELAY', '')
RSS_DELAY = 900 if len(RSS_DELAY) == 0 else int(RSS_DELAY)
@ -324,13 +327,13 @@ config_dict = {'AS_DOCUMENT': AS_DOCUMENT,
'DATABASE_URL': DATABASE_URL,
'DEFAULT_UPLOAD': DEFAULT_UPLOAD,
'DOWNLOAD_DIR': DOWNLOAD_DIR,
'DUMP_CHAT_ID': DUMP_CHAT_ID,
'EQUAL_SPLITS': EQUAL_SPLITS,
'EXTENSION_FILTER': EXTENSION_FILTER,
'GDRIVE_ID': GDRIVE_ID,
'INCOMPLETE_TASK_NOTIFIER': INCOMPLETE_TASK_NOTIFIER,
'INDEX_URL': INDEX_URL,
'IS_TEAM_DRIVE': IS_TEAM_DRIVE,
'LEECH_DUMP_CHAT': LEECH_DUMP_CHAT,
'LEECH_FILENAME_PREFIX': LEECH_FILENAME_PREFIX,
'LEECH_SPLIT_SIZE': LEECH_SPLIT_SIZE,
'MEDIA_GROUP': MEDIA_GROUP,
@ -346,7 +349,7 @@ config_dict = {'AS_DOCUMENT': AS_DOCUMENT,
'RCLONE_SERVE_USER': RCLONE_SERVE_USER,
'RCLONE_SERVE_PASS': RCLONE_SERVE_PASS,
'RCLONE_SERVE_PORT': RCLONE_SERVE_PORT,
'RSS_CHAT_ID': RSS_CHAT_ID,
'RSS_CHAT': RSS_CHAT,
'RSS_DELAY': RSS_DELAY,
'SEARCH_API_LINK': SEARCH_API_LINK,
'SEARCH_LIMIT': SEARCH_LIMIT,

View File

@ -249,7 +249,7 @@ def arg_parser(items, arg_base):
t = len(items)
i = 0
while i + 1 <= t:
part = items[i]
part = items[i].strip()
if part in arg_base:
if part in ['-s', '-j']:
arg_base[part] = True
@ -259,7 +259,7 @@ def arg_parser(items, arg_base):
else:
sub_list = []
for j in range(i+1, t):
item = items[j]
item = items[j].strip()
if item in arg_base:
if part in ['-b', '-e', '-z', '-s', '-j', '-d']:
arg_base[part] = True

View File

@ -118,7 +118,7 @@ async def split_file(path, size, file_, dirpath, split_size, listener, start_tim
leech_split_size = user_dict.get(
'split_size') or config_dict['LEECH_SPLIT_SIZE']
parts = -(-size // leech_split_size)
if (user_dict.get('equal_splits') or config_dict['EQUAL_SPLITS']) and not inLoop:
if (user_dict.get('equal_splits') or config_dict['EQUAL_SPLITS'] and 'equal_splits' not in user_dict) and not inLoop:
split_size = ((size + parts - 1) // parts) + 1000
if (await get_document_type(path))[0]:
if multi_streams:

View File

@ -118,9 +118,8 @@ class MirrorLeechListener:
await start_from_queued()
user_dict = user_data.get(self.message.from_user.id, {})
if self.join:
if await aiopath.isdir(dl_path):
await join_files(dl_path)
if self.join and await aiopath.isdir(dl_path):
await join_files(dl_path)
if self.extract:
pswd = self.extract if isinstance(self.extract, str) else ''

View File

@ -111,7 +111,7 @@ class YoutubeDLHelper:
elif d.get('total_bytes_estimate'):
self.__size = d['total_bytes_estimate']
self.__downloaded_bytes = d['downloaded_bytes']
self.__eta = d.get('eta', '-')
self.__eta = d.get('eta', '-') or '-'
try:
self.__progress = (self.__downloaded_bytes / self.__size) * 100
except:

View File

@ -42,6 +42,7 @@ class TgUploader:
self.__lprefix = ''
self.__as_doc = False
self.__media_group = False
self.__upload_dest = ''
async def __upload_progress(self, current, total):
if self.__is_cancelled:
@ -56,23 +57,30 @@ class TgUploader:
async def __user_settings(self):
user_id = self.__listener.message.from_user.id
user_dict = user_data.get(user_id, {})
self.__as_doc = user_dict.get('as_doc') or config_dict['AS_DOCUMENT']
self.__as_doc = user_dict.get(
'as_doc', False) or config_dict['AS_DOCUMENT'] if 'as_doc' not in user_dict else False
self.__media_group = user_dict.get(
'media_group') or config_dict['MEDIA_GROUP']
'media_group') or config_dict['MEDIA_GROUP'] if 'media_group' not in user_dict else False
self.__lprefix = user_dict.get(
'lprefix') or config_dict['LEECH_FILENAME_PREFIX']
'lprefix') or config_dict['LEECH_FILENAME_PREFIX'] if 'lprefix' not in user_dict else ''
if not await aiopath.exists(self.__thumb):
self.__thumb = None
self.__upload_dest = self.__listener.upPath or user_dict.get(
'leech_dest') or config_dict['LEECH_DUMP_CHAT']
async def __msg_to_reply(self):
if DUMP_CHAT_ID := config_dict['DUMP_CHAT_ID']:
msg = self.__listener.message.link if self.__listener.isSuperGroup else self.__listener.message.text
if IS_PREMIUM_USER:
self.__sent_msg = await user.send_message(chat_id=DUMP_CHAT_ID, text=msg,
disable_web_page_preview=False, disable_notification=True)
else:
self.__sent_msg = await bot.send_message(chat_id=DUMP_CHAT_ID, text=msg,
disable_web_page_preview=False, disable_notification=True)
if self.__upload_dest:
msg = self.__listener.message.link if self.__listener.isSuperGroup else self.__listener.message.text.lstrip('/')
try:
if IS_PREMIUM_USER:
self.__sent_msg = await user.send_message(chat_id=self.__upload_dest, text=msg,
disable_web_page_preview=False, disable_notification=True)
else:
self.__sent_msg = await bot.send_message(chat_id=self.__upload_dest, text=msg,
disable_web_page_preview=False, disable_notification=True)
except Exception as e:
await self.__listener.onUploadError(str(e))
return False
elif IS_PREMIUM_USER:
if not self.__listener.isSuperGroup:
await self.__listener.onUploadError('Use SuperGroup to leech with User!')
@ -148,16 +156,16 @@ class TgUploader:
del self.__msgs_dict[msg.link]
await msg.delete()
del self.__media_dict[key][subkey]
if self.__listener.isSuperGroup or config_dict['DUMP_CHAT_ID']:
if self.__listener.isSuperGroup or self.__upload_dest:
for m in msgs_list:
self.__msgs_dict[m.link] = m.caption
self.__sent_msg = msgs_list[-1]
async def upload(self, o_files, m_size, size):
await self.__user_settings()
res = await self.__msg_to_reply()
if not res:
return
await self.__user_settings()
for dirpath, _, files in sorted(await sync_to_async(walk, self.__path)):
if dirpath.endswith('/yt-dlp-thumb'):
continue
@ -192,7 +200,7 @@ class TgUploader:
await self.__upload_file(cap_mono, file_)
if self.__is_cancelled:
return
if not self.__is_corrupted and (self.__listener.isSuperGroup or config_dict['DUMP_CHAT_ID']):
if not self.__is_corrupted and (self.__listener.isSuperGroup or self.__upload_dest):
self.__msgs_dict[self.__sent_msg.link] = file_
await sleep(1)
except Exception as err:

View File

@ -49,10 +49,10 @@ async def sendFile(message, file, caption=None):
async def sendRss(text):
try:
if user:
return await user.send_message(chat_id=config_dict['RSS_CHAT_ID'], text=text, disable_web_page_preview=True,
return await user.send_message(chat_id=config_dict['RSS_CHAT'], text=text, disable_web_page_preview=True,
disable_notification=True)
else:
return await bot.send_message(chat_id=config_dict['RSS_CHAT_ID'], text=text, disable_web_page_preview=True,
return await bot.send_message(chat_id=config_dict['RSS_CHAT'], text=text, disable_web_page_preview=True,
disable_notification=True)
except FloodWait as f:
LOGGER.warning(str(f))

View File

@ -2,7 +2,7 @@
from pyrogram.handlers import MessageHandler, CallbackQueryHandler
from pyrogram.filters import command, regex, create
from functools import partial
from asyncio import create_subprocess_exec, create_subprocess_shell, sleep
from asyncio import create_subprocess_exec, create_subprocess_shell, sleep, gather
from aiofiles.os import remove, rename, path as aiopath
from aiofiles import open as aiopen
from os import environ, getcwd
@ -35,324 +35,6 @@ default_values = {'AUTO_DELETE_MESSAGE_DURATION': 30,
'UPSTREAM_BRANCH': 'master'}
async def load_config():
BOT_TOKEN = environ.get('BOT_TOKEN', '')
if len(BOT_TOKEN) == 0:
BOT_TOKEN = config_dict['BOT_TOKEN']
TELEGRAM_API = environ.get('TELEGRAM_API', '')
if len(TELEGRAM_API) == 0:
TELEGRAM_API = config_dict['TELEGRAM_API']
else:
TELEGRAM_API = int(TELEGRAM_API)
TELEGRAM_HASH = environ.get('TELEGRAM_HASH', '')
if len(TELEGRAM_HASH) == 0:
TELEGRAM_HASH = config_dict['TELEGRAM_HASH']
OWNER_ID = environ.get('OWNER_ID', '')
OWNER_ID = config_dict['OWNER_ID'] if len(OWNER_ID) == 0 else int(OWNER_ID)
DATABASE_URL = environ.get('DATABASE_URL', '')
if len(DATABASE_URL) == 0:
DATABASE_URL = ''
DOWNLOAD_DIR = environ.get('DOWNLOAD_DIR', '')
if len(DOWNLOAD_DIR) == 0:
DOWNLOAD_DIR = '/usr/src/app/downloads/'
elif not DOWNLOAD_DIR.endswith("/"):
DOWNLOAD_DIR = f'{DOWNLOAD_DIR}/'
GDRIVE_ID = environ.get('GDRIVE_ID', '')
if len(GDRIVE_ID) == 0:
GDRIVE_ID = ''
RCLONE_PATH = environ.get('RCLONE_PATH', '')
if len(RCLONE_PATH) == 0:
RCLONE_PATH = ''
DEFAULT_UPLOAD = environ.get('DEFAULT_UPLOAD', '')
if DEFAULT_UPLOAD != 'rc':
DEFAULT_UPLOAD = 'gd'
RCLONE_FLAGS = environ.get('RCLONE_FLAGS', '')
if len(RCLONE_FLAGS) == 0:
RCLONE_FLAGS = ''
AUTHORIZED_CHATS = environ.get('AUTHORIZED_CHATS', '')
if len(AUTHORIZED_CHATS) != 0:
aid = AUTHORIZED_CHATS.split()
for id_ in aid:
user_data[int(id_.strip())] = {'is_auth': True}
SUDO_USERS = environ.get('SUDO_USERS', '')
if len(SUDO_USERS) != 0:
aid = SUDO_USERS.split()
for id_ in aid:
user_data[int(id_.strip())] = {'is_sudo': True}
EXTENSION_FILTER = environ.get('EXTENSION_FILTER', '')
if len(EXTENSION_FILTER) > 0:
fx = EXTENSION_FILTER.split()
GLOBAL_EXTENSION_FILTER.clear()
GLOBAL_EXTENSION_FILTER.append('aria2')
for x in fx:
if x.strip().startswith('.'):
x = x.lstrip('.')
GLOBAL_EXTENSION_FILTER.append(x.strip().lower())
MEGA_EMAIL = environ.get('MEGA_EMAIL', '')
MEGA_PASSWORD = environ.get('MEGA_PASSWORD', '')
if len(MEGA_EMAIL) == 0 or len(MEGA_PASSWORD) == 0:
MEGA_EMAIL = ''
MEGA_PASSWORD = ''
UPTOBOX_TOKEN = environ.get('UPTOBOX_TOKEN', '')
if len(UPTOBOX_TOKEN) == 0:
UPTOBOX_TOKEN = ''
INDEX_URL = environ.get('INDEX_URL', '').rstrip("/")
if len(INDEX_URL) == 0:
INDEX_URL = ''
SEARCH_API_LINK = environ.get('SEARCH_API_LINK', '').rstrip("/")
if len(SEARCH_API_LINK) == 0:
SEARCH_API_LINK = ''
LEECH_FILENAME_PREFIX = environ.get('LEECH_FILENAME_PREFIX', '')
if len(LEECH_FILENAME_PREFIX) == 0:
LEECH_FILENAME_PREFIX = ''
SEARCH_PLUGINS = environ.get('SEARCH_PLUGINS', '')
if len(SEARCH_PLUGINS) == 0:
SEARCH_PLUGINS = ''
MAX_SPLIT_SIZE = 4194304000 if IS_PREMIUM_USER else 2097152000
LEECH_SPLIT_SIZE = environ.get('LEECH_SPLIT_SIZE', '')
if len(LEECH_SPLIT_SIZE) == 0 or int(LEECH_SPLIT_SIZE) > MAX_SPLIT_SIZE:
LEECH_SPLIT_SIZE = MAX_SPLIT_SIZE
else:
LEECH_SPLIT_SIZE = int(LEECH_SPLIT_SIZE)
STATUS_UPDATE_INTERVAL = environ.get('STATUS_UPDATE_INTERVAL', '')
if len(STATUS_UPDATE_INTERVAL) == 0:
STATUS_UPDATE_INTERVAL = 10
else:
STATUS_UPDATE_INTERVAL = int(STATUS_UPDATE_INTERVAL)
if len(download_dict) != 0:
async with status_reply_dict_lock:
if Interval:
Interval[0].cancel()
Interval.clear()
Interval.append(setInterval(
STATUS_UPDATE_INTERVAL, update_all_messages))
AUTO_DELETE_MESSAGE_DURATION = environ.get(
'AUTO_DELETE_MESSAGE_DURATION', '')
if len(AUTO_DELETE_MESSAGE_DURATION) == 0:
AUTO_DELETE_MESSAGE_DURATION = 30
else:
AUTO_DELETE_MESSAGE_DURATION = int(AUTO_DELETE_MESSAGE_DURATION)
YT_DLP_OPTIONS = environ.get('YT_DLP_OPTIONS', '')
if len(YT_DLP_OPTIONS) == 0:
YT_DLP_OPTIONS = ''
SEARCH_LIMIT = environ.get('SEARCH_LIMIT', '')
SEARCH_LIMIT = 0 if len(SEARCH_LIMIT) == 0 else int(SEARCH_LIMIT)
DUMP_CHAT_ID = environ.get('DUMP_CHAT_ID', '')
DUMP_CHAT_ID = '' if len(DUMP_CHAT_ID) == 0 else int(DUMP_CHAT_ID)
STATUS_LIMIT = environ.get('STATUS_LIMIT', '')
STATUS_LIMIT = 10 if len(STATUS_LIMIT) == 0 else int(STATUS_LIMIT)
RSS_CHAT_ID = environ.get('RSS_CHAT_ID', '')
RSS_CHAT_ID = '' if len(RSS_CHAT_ID) == 0 else int(RSS_CHAT_ID)
RSS_DELAY = environ.get('RSS_DELAY', '')
RSS_DELAY = 900 if len(RSS_DELAY) == 0 else int(RSS_DELAY)
CMD_SUFFIX = environ.get('CMD_SUFFIX', '')
USER_SESSION_STRING = environ.get('USER_SESSION_STRING', '')
TORRENT_TIMEOUT = environ.get('TORRENT_TIMEOUT', '')
downloads = aria2.get_downloads()
if len(TORRENT_TIMEOUT) == 0:
for download in downloads:
if not download.is_complete:
try:
await sync_to_async(aria2.client.change_option, download.gid, {'bt-stop-timeout': '0'})
except Exception as e:
LOGGER.error(e)
aria2_options['bt-stop-timeout'] = '0'
if DATABASE_URL:
await DbManger().update_aria2('bt-stop-timeout', '0')
TORRENT_TIMEOUT = ''
else:
for download in downloads:
if not download.is_complete:
try:
await sync_to_async(aria2.client.change_option, download.gid, {'bt-stop-timeout': TORRENT_TIMEOUT})
except Exception as e:
LOGGER.error(e)
aria2_options['bt-stop-timeout'] = TORRENT_TIMEOUT
if DATABASE_URL:
await DbManger().update_aria2('bt-stop-timeout', TORRENT_TIMEOUT)
TORRENT_TIMEOUT = int(TORRENT_TIMEOUT)
QUEUE_ALL = environ.get('QUEUE_ALL', '')
QUEUE_ALL = '' if len(QUEUE_ALL) == 0 else int(QUEUE_ALL)
QUEUE_DOWNLOAD = environ.get('QUEUE_DOWNLOAD', '')
QUEUE_DOWNLOAD = '' if len(QUEUE_DOWNLOAD) == 0 else int(QUEUE_DOWNLOAD)
QUEUE_UPLOAD = environ.get('QUEUE_UPLOAD', '')
QUEUE_UPLOAD = '' if len(QUEUE_UPLOAD) == 0 else int(QUEUE_UPLOAD)
INCOMPLETE_TASK_NOTIFIER = environ.get('INCOMPLETE_TASK_NOTIFIER', '')
INCOMPLETE_TASK_NOTIFIER = INCOMPLETE_TASK_NOTIFIER.lower() == 'true'
if not INCOMPLETE_TASK_NOTIFIER and DATABASE_URL:
await DbManger().trunc_table('tasks')
STOP_DUPLICATE = environ.get('STOP_DUPLICATE', '')
STOP_DUPLICATE = STOP_DUPLICATE.lower() == 'true'
IS_TEAM_DRIVE = environ.get('IS_TEAM_DRIVE', '')
IS_TEAM_DRIVE = IS_TEAM_DRIVE.lower() == 'true'
USE_SERVICE_ACCOUNTS = environ.get('USE_SERVICE_ACCOUNTS', '')
USE_SERVICE_ACCOUNTS = USE_SERVICE_ACCOUNTS.lower() == 'true'
WEB_PINCODE = environ.get('WEB_PINCODE', '')
WEB_PINCODE = WEB_PINCODE.lower() == 'true'
AS_DOCUMENT = environ.get('AS_DOCUMENT', '')
AS_DOCUMENT = AS_DOCUMENT.lower() == 'true'
EQUAL_SPLITS = environ.get('EQUAL_SPLITS', '')
EQUAL_SPLITS = EQUAL_SPLITS.lower() == 'true'
MEDIA_GROUP = environ.get('MEDIA_GROUP', '')
MEDIA_GROUP = MEDIA_GROUP.lower() == 'true'
BASE_URL_PORT = environ.get('BASE_URL_PORT', '')
BASE_URL_PORT = 80 if len(BASE_URL_PORT) == 0 else int(BASE_URL_PORT)
RCLONE_SERVE_URL = environ.get('RCLONE_SERVE_URL', '')
if len(RCLONE_SERVE_URL) == 0:
RCLONE_SERVE_URL = ''
RCLONE_SERVE_PORT = environ.get('RCLONE_SERVE_PORT', '')
RCLONE_SERVE_PORT = 8080 if len(
RCLONE_SERVE_PORT) == 0 else int(RCLONE_SERVE_PORT)
RCLONE_SERVE_USER = environ.get('RCLONE_SERVE_USER', '')
if len(RCLONE_SERVE_USER) == 0:
RCLONE_SERVE_USER = ''
RCLONE_SERVE_PASS = environ.get('RCLONE_SERVE_PASS', '')
if len(RCLONE_SERVE_PASS) == 0:
RCLONE_SERVE_PASS = ''
await (await create_subprocess_exec("pkill", "-9", "-f", "gunicorn")).wait()
BASE_URL = environ.get('BASE_URL', '').rstrip("/")
if len(BASE_URL) == 0:
BASE_URL = ''
else:
await create_subprocess_shell(f"gunicorn web.wserver:app --bind 0.0.0.0:{BASE_URL_PORT} --worker-class gevent")
UPSTREAM_REPO = environ.get('UPSTREAM_REPO', '')
if len(UPSTREAM_REPO) == 0:
UPSTREAM_REPO = ''
UPSTREAM_BRANCH = environ.get('UPSTREAM_BRANCH', '')
if len(UPSTREAM_BRANCH) == 0:
UPSTREAM_BRANCH = 'master'
DRIVES_IDS.clear()
DRIVES_NAMES.clear()
INDEX_URLS.clear()
if GDRIVE_ID:
DRIVES_NAMES.append("Main")
DRIVES_IDS.append(GDRIVE_ID)
INDEX_URLS.append(INDEX_URL)
if await aiopath.exists('list_drives.txt'):
async with aiopen('list_drives.txt', 'r+') as f:
lines = await f.readlines()
for line in lines:
temp = line.strip().split()
DRIVES_IDS.append(temp[1])
DRIVES_NAMES.append(temp[0].replace("_", " "))
if len(temp) > 2:
INDEX_URLS.append(temp[2])
else:
INDEX_URLS.append('')
config_dict.update({'AS_DOCUMENT': AS_DOCUMENT,
'AUTHORIZED_CHATS': AUTHORIZED_CHATS,
'AUTO_DELETE_MESSAGE_DURATION': AUTO_DELETE_MESSAGE_DURATION,
'BASE_URL': BASE_URL,
'BASE_URL_PORT': BASE_URL_PORT,
'BOT_TOKEN': BOT_TOKEN,
'CMD_SUFFIX': CMD_SUFFIX,
'DATABASE_URL': DATABASE_URL,
'DEFAULT_UPLOAD': DEFAULT_UPLOAD,
'DOWNLOAD_DIR': DOWNLOAD_DIR,
'DUMP_CHAT_ID': DUMP_CHAT_ID,
'EQUAL_SPLITS': EQUAL_SPLITS,
'EXTENSION_FILTER': EXTENSION_FILTER,
'GDRIVE_ID': GDRIVE_ID,
'INCOMPLETE_TASK_NOTIFIER': INCOMPLETE_TASK_NOTIFIER,
'INDEX_URL': INDEX_URL,
'IS_TEAM_DRIVE': IS_TEAM_DRIVE,
'LEECH_FILENAME_PREFIX': LEECH_FILENAME_PREFIX,
'LEECH_SPLIT_SIZE': LEECH_SPLIT_SIZE,
'MEDIA_GROUP': MEDIA_GROUP,
'MEGA_EMAIL': MEGA_EMAIL,
'MEGA_PASSWORD': MEGA_PASSWORD,
'OWNER_ID': OWNER_ID,
'QUEUE_ALL': QUEUE_ALL,
'QUEUE_DOWNLOAD': QUEUE_DOWNLOAD,
'QUEUE_UPLOAD': QUEUE_UPLOAD,
'RCLONE_FLAGS': RCLONE_FLAGS,
'RCLONE_PATH': RCLONE_PATH,
'RCLONE_SERVE_URL': RCLONE_SERVE_URL,
'RCLONE_SERVE_USER': RCLONE_SERVE_USER,
'RCLONE_SERVE_PASS': RCLONE_SERVE_PASS,
'RCLONE_SERVE_PORT': RCLONE_SERVE_PORT,
'RSS_CHAT_ID': RSS_CHAT_ID,
'RSS_DELAY': RSS_DELAY,
'SEARCH_API_LINK': SEARCH_API_LINK,
'SEARCH_LIMIT': SEARCH_LIMIT,
'SEARCH_PLUGINS': SEARCH_PLUGINS,
'STATUS_LIMIT': STATUS_LIMIT,
'STATUS_UPDATE_INTERVAL': STATUS_UPDATE_INTERVAL,
'STOP_DUPLICATE': STOP_DUPLICATE,
'SUDO_USERS': SUDO_USERS,
'TELEGRAM_API': TELEGRAM_API,
'TELEGRAM_HASH': TELEGRAM_HASH,
'TORRENT_TIMEOUT': TORRENT_TIMEOUT,
'UPSTREAM_REPO': UPSTREAM_REPO,
'UPSTREAM_BRANCH': UPSTREAM_BRANCH,
'UPTOBOX_TOKEN': UPTOBOX_TOKEN,
'USER_SESSION_STRING': USER_SESSION_STRING,
'USE_SERVICE_ACCOUNTS': USE_SERVICE_ACCOUNTS,
'WEB_PINCODE': WEB_PINCODE,
'YT_DLP_OPTIONS': YT_DLP_OPTIONS})
if DATABASE_URL:
await DbManger().update_config(config_dict)
await initiate_search_tools()
await start_from_queued()
await rclone_serve_booter()
async def get_buttons(key=None, edit_type=None):
buttons = ButtonMaker()
if key is None:
@ -378,7 +60,7 @@ async def get_buttons(key=None, edit_type=None):
elif key == 'private':
buttons.ibutton('Back', "botset back")
buttons.ibutton('Close', "botset close")
msg = '''Send private file: config.env, token.pickle, accounts.zip, list_drives.txt, cookies.txt, terabox.txt, .netrc or any other file!
msg = '''Send private file: config.env, token.pickle, rclone.conf, accounts.zip, list_drives.txt, cookies.txt, terabox.txt, .netrc or any other private file!
To delete private file send only the file name as text message.
Note: Changing .netrc will not take effect for aria2c until restart.
Timeout: 60 sec'''
@ -458,8 +140,9 @@ async def edit_variable(_, message, pre_message, key):
elif key == 'DOWNLOAD_DIR':
if not value.endswith('/'):
value += '/'
elif key in ['DUMP_CHAT_ID', 'RSS_CHAT_ID']:
value = int(value)
elif key in ['LEECH_DUMP_CHAT', 'RSS_CHAT']:
if value.isdigit() or value.startswith('-'):
value = int(value)
elif key == 'STATUS_UPDATE_INTERVAL':
value = int(value)
if len(download_dict) != 0:
@ -488,10 +171,9 @@ async def edit_variable(_, message, pre_message, key):
elif key == 'EXTENSION_FILTER':
fx = value.split()
GLOBAL_EXTENSION_FILTER.clear()
GLOBAL_EXTENSION_FILTER.append('.aria2')
GLOBAL_EXTENSION_FILTER.append(['aria2', '!qB'])
for x in fx:
if x.strip().startswith('.'):
x = x.lstrip('.')
x = x.lstrip('.')
GLOBAL_EXTENSION_FILTER.append(x.strip().lower())
elif key == 'GDRIVE_ID':
if DRIVES_NAMES and DRIVES_NAMES[0] == 'Main':
@ -858,11 +540,331 @@ async def edit_bot_settings(client, query):
async def bot_settings(_, message):
handler_dict[message.chat.id] = False
msg, button = await get_buttons()
globals()['START'] = 0
await sendMessage(message, msg, button)
async def load_config():
BOT_TOKEN = environ.get('BOT_TOKEN', '')
if len(BOT_TOKEN) == 0:
BOT_TOKEN = config_dict['BOT_TOKEN']
TELEGRAM_API = environ.get('TELEGRAM_API', '')
if len(TELEGRAM_API) == 0:
TELEGRAM_API = config_dict['TELEGRAM_API']
else:
TELEGRAM_API = int(TELEGRAM_API)
TELEGRAM_HASH = environ.get('TELEGRAM_HASH', '')
if len(TELEGRAM_HASH) == 0:
TELEGRAM_HASH = config_dict['TELEGRAM_HASH']
OWNER_ID = environ.get('OWNER_ID', '')
OWNER_ID = config_dict['OWNER_ID'] if len(OWNER_ID) == 0 else int(OWNER_ID)
DATABASE_URL = environ.get('DATABASE_URL', '')
if len(DATABASE_URL) == 0:
DATABASE_URL = ''
DOWNLOAD_DIR = environ.get('DOWNLOAD_DIR', '')
if len(DOWNLOAD_DIR) == 0:
DOWNLOAD_DIR = '/usr/src/app/downloads/'
elif not DOWNLOAD_DIR.endswith("/"):
DOWNLOAD_DIR = f'{DOWNLOAD_DIR}/'
GDRIVE_ID = environ.get('GDRIVE_ID', '')
if len(GDRIVE_ID) == 0:
GDRIVE_ID = ''
RCLONE_PATH = environ.get('RCLONE_PATH', '')
if len(RCLONE_PATH) == 0:
RCLONE_PATH = ''
DEFAULT_UPLOAD = environ.get('DEFAULT_UPLOAD', '')
if DEFAULT_UPLOAD != 'rc':
DEFAULT_UPLOAD = 'gd'
RCLONE_FLAGS = environ.get('RCLONE_FLAGS', '')
if len(RCLONE_FLAGS) == 0:
RCLONE_FLAGS = ''
AUTHORIZED_CHATS = environ.get('AUTHORIZED_CHATS', '')
if len(AUTHORIZED_CHATS) != 0:
aid = AUTHORIZED_CHATS.split()
for id_ in aid:
user_data[int(id_.strip())] = {'is_auth': True}
SUDO_USERS = environ.get('SUDO_USERS', '')
if len(SUDO_USERS) != 0:
aid = SUDO_USERS.split()
for id_ in aid:
user_data[int(id_.strip())] = {'is_sudo': True}
EXTENSION_FILTER = environ.get('EXTENSION_FILTER', '')
if len(EXTENSION_FILTER) > 0:
fx = EXTENSION_FILTER.split()
GLOBAL_EXTENSION_FILTER.clear()
GLOBAL_EXTENSION_FILTER.append('aria2')
for x in fx:
if x.strip().startswith('.'):
x = x.lstrip('.')
GLOBAL_EXTENSION_FILTER.append(x.strip().lower())
MEGA_EMAIL = environ.get('MEGA_EMAIL', '')
MEGA_PASSWORD = environ.get('MEGA_PASSWORD', '')
if len(MEGA_EMAIL) == 0 or len(MEGA_PASSWORD) == 0:
MEGA_EMAIL = ''
MEGA_PASSWORD = ''
UPTOBOX_TOKEN = environ.get('UPTOBOX_TOKEN', '')
if len(UPTOBOX_TOKEN) == 0:
UPTOBOX_TOKEN = ''
INDEX_URL = environ.get('INDEX_URL', '').rstrip("/")
if len(INDEX_URL) == 0:
INDEX_URL = ''
SEARCH_API_LINK = environ.get('SEARCH_API_LINK', '').rstrip("/")
if len(SEARCH_API_LINK) == 0:
SEARCH_API_LINK = ''
LEECH_FILENAME_PREFIX = environ.get('LEECH_FILENAME_PREFIX', '')
if len(LEECH_FILENAME_PREFIX) == 0:
LEECH_FILENAME_PREFIX = ''
SEARCH_PLUGINS = environ.get('SEARCH_PLUGINS', '')
if len(SEARCH_PLUGINS) == 0:
SEARCH_PLUGINS = ''
MAX_SPLIT_SIZE = 4194304000 if IS_PREMIUM_USER else 2097152000
LEECH_SPLIT_SIZE = environ.get('LEECH_SPLIT_SIZE', '')
if len(LEECH_SPLIT_SIZE) == 0 or int(LEECH_SPLIT_SIZE) > MAX_SPLIT_SIZE:
LEECH_SPLIT_SIZE = MAX_SPLIT_SIZE
else:
LEECH_SPLIT_SIZE = int(LEECH_SPLIT_SIZE)
STATUS_UPDATE_INTERVAL = environ.get('STATUS_UPDATE_INTERVAL', '')
if len(STATUS_UPDATE_INTERVAL) == 0:
STATUS_UPDATE_INTERVAL = 10
else:
STATUS_UPDATE_INTERVAL = int(STATUS_UPDATE_INTERVAL)
if len(download_dict) != 0:
async with status_reply_dict_lock:
if Interval:
Interval[0].cancel()
Interval.clear()
Interval.append(setInterval(
STATUS_UPDATE_INTERVAL, update_all_messages))
AUTO_DELETE_MESSAGE_DURATION = environ.get(
'AUTO_DELETE_MESSAGE_DURATION', '')
if len(AUTO_DELETE_MESSAGE_DURATION) == 0:
AUTO_DELETE_MESSAGE_DURATION = 30
else:
AUTO_DELETE_MESSAGE_DURATION = int(AUTO_DELETE_MESSAGE_DURATION)
YT_DLP_OPTIONS = environ.get('YT_DLP_OPTIONS', '')
if len(YT_DLP_OPTIONS) == 0:
YT_DLP_OPTIONS = ''
SEARCH_LIMIT = environ.get('SEARCH_LIMIT', '')
SEARCH_LIMIT = 0 if len(SEARCH_LIMIT) == 0 else int(SEARCH_LIMIT)
LEECH_DUMP_CHAT = environ.get('LEECH_DUMP_CHAT', '')
LEECH_DUMP_CHAT = '' if len(LEECH_DUMP_CHAT) == 0 else LEECH_DUMP_CHAT
if LEECH_DUMP_CHAT.isdigit() or LEECH_DUMP_CHAT.startswith('-'):
LEECH_DUMP_CHAT = int(LEECH_DUMP_CHAT)
STATUS_LIMIT = environ.get('STATUS_LIMIT', '')
STATUS_LIMIT = 10 if len(STATUS_LIMIT) == 0 else int(STATUS_LIMIT)
RSS_CHAT = environ.get('RSS_CHAT', '')
RSS_CHAT = '' if len(RSS_CHAT) == 0 else RSS_CHAT
if RSS_CHAT.isdigit() or RSS_CHAT.startswith('-'):
RSS_CHAT = int(RSS_CHAT)
RSS_DELAY = environ.get('RSS_DELAY', '')
RSS_DELAY = 900 if len(RSS_DELAY) == 0 else int(RSS_DELAY)
CMD_SUFFIX = environ.get('CMD_SUFFIX', '')
USER_SESSION_STRING = environ.get('USER_SESSION_STRING', '')
TORRENT_TIMEOUT = environ.get('TORRENT_TIMEOUT', '')
downloads = aria2.get_downloads()
if len(TORRENT_TIMEOUT) == 0:
for download in downloads:
if not download.is_complete:
try:
await sync_to_async(aria2.client.change_option, download.gid, {'bt-stop-timeout': '0'})
except Exception as e:
LOGGER.error(e)
aria2_options['bt-stop-timeout'] = '0'
if DATABASE_URL:
await DbManger().update_aria2('bt-stop-timeout', '0')
TORRENT_TIMEOUT = ''
else:
for download in downloads:
if not download.is_complete:
try:
await sync_to_async(aria2.client.change_option, download.gid, {'bt-stop-timeout': TORRENT_TIMEOUT})
except Exception as e:
LOGGER.error(e)
aria2_options['bt-stop-timeout'] = TORRENT_TIMEOUT
if DATABASE_URL:
await DbManger().update_aria2('bt-stop-timeout', TORRENT_TIMEOUT)
TORRENT_TIMEOUT = int(TORRENT_TIMEOUT)
QUEUE_ALL = environ.get('QUEUE_ALL', '')
QUEUE_ALL = '' if len(QUEUE_ALL) == 0 else int(QUEUE_ALL)
QUEUE_DOWNLOAD = environ.get('QUEUE_DOWNLOAD', '')
QUEUE_DOWNLOAD = '' if len(QUEUE_DOWNLOAD) == 0 else int(QUEUE_DOWNLOAD)
QUEUE_UPLOAD = environ.get('QUEUE_UPLOAD', '')
QUEUE_UPLOAD = '' if len(QUEUE_UPLOAD) == 0 else int(QUEUE_UPLOAD)
INCOMPLETE_TASK_NOTIFIER = environ.get('INCOMPLETE_TASK_NOTIFIER', '')
INCOMPLETE_TASK_NOTIFIER = INCOMPLETE_TASK_NOTIFIER.lower() == 'true'
if not INCOMPLETE_TASK_NOTIFIER and DATABASE_URL:
await DbManger().trunc_table('tasks')
STOP_DUPLICATE = environ.get('STOP_DUPLICATE', '')
STOP_DUPLICATE = STOP_DUPLICATE.lower() == 'true'
IS_TEAM_DRIVE = environ.get('IS_TEAM_DRIVE', '')
IS_TEAM_DRIVE = IS_TEAM_DRIVE.lower() == 'true'
USE_SERVICE_ACCOUNTS = environ.get('USE_SERVICE_ACCOUNTS', '')
USE_SERVICE_ACCOUNTS = USE_SERVICE_ACCOUNTS.lower() == 'true'
WEB_PINCODE = environ.get('WEB_PINCODE', '')
WEB_PINCODE = WEB_PINCODE.lower() == 'true'
AS_DOCUMENT = environ.get('AS_DOCUMENT', '')
AS_DOCUMENT = AS_DOCUMENT.lower() == 'true'
EQUAL_SPLITS = environ.get('EQUAL_SPLITS', '')
EQUAL_SPLITS = EQUAL_SPLITS.lower() == 'true'
MEDIA_GROUP = environ.get('MEDIA_GROUP', '')
MEDIA_GROUP = MEDIA_GROUP.lower() == 'true'
BASE_URL_PORT = environ.get('BASE_URL_PORT', '')
BASE_URL_PORT = 80 if len(BASE_URL_PORT) == 0 else int(BASE_URL_PORT)
RCLONE_SERVE_URL = environ.get('RCLONE_SERVE_URL', '')
if len(RCLONE_SERVE_URL) == 0:
RCLONE_SERVE_URL = ''
RCLONE_SERVE_PORT = environ.get('RCLONE_SERVE_PORT', '')
RCLONE_SERVE_PORT = 8080 if len(
RCLONE_SERVE_PORT) == 0 else int(RCLONE_SERVE_PORT)
RCLONE_SERVE_USER = environ.get('RCLONE_SERVE_USER', '')
if len(RCLONE_SERVE_USER) == 0:
RCLONE_SERVE_USER = ''
RCLONE_SERVE_PASS = environ.get('RCLONE_SERVE_PASS', '')
if len(RCLONE_SERVE_PASS) == 0:
RCLONE_SERVE_PASS = ''
await (await create_subprocess_exec("pkill", "-9", "-f", "gunicorn")).wait()
BASE_URL = environ.get('BASE_URL', '').rstrip("/")
if len(BASE_URL) == 0:
BASE_URL = ''
else:
await create_subprocess_shell(f"gunicorn web.wserver:app --bind 0.0.0.0:{BASE_URL_PORT} --worker-class gevent")
UPSTREAM_REPO = environ.get('UPSTREAM_REPO', '')
if len(UPSTREAM_REPO) == 0:
UPSTREAM_REPO = ''
UPSTREAM_BRANCH = environ.get('UPSTREAM_BRANCH', '')
if len(UPSTREAM_BRANCH) == 0:
UPSTREAM_BRANCH = 'master'
DRIVES_IDS.clear()
DRIVES_NAMES.clear()
INDEX_URLS.clear()
if GDRIVE_ID:
DRIVES_NAMES.append("Main")
DRIVES_IDS.append(GDRIVE_ID)
INDEX_URLS.append(INDEX_URL)
if await aiopath.exists('list_drives.txt'):
async with aiopen('list_drives.txt', 'r+') as f:
lines = await f.readlines()
for line in lines:
temp = line.strip().split()
DRIVES_IDS.append(temp[1])
DRIVES_NAMES.append(temp[0].replace("_", " "))
if len(temp) > 2:
INDEX_URLS.append(temp[2])
else:
INDEX_URLS.append('')
config_dict.update({'AS_DOCUMENT': AS_DOCUMENT,
'AUTHORIZED_CHATS': AUTHORIZED_CHATS,
'AUTO_DELETE_MESSAGE_DURATION': AUTO_DELETE_MESSAGE_DURATION,
'BASE_URL': BASE_URL,
'BASE_URL_PORT': BASE_URL_PORT,
'BOT_TOKEN': BOT_TOKEN,
'CMD_SUFFIX': CMD_SUFFIX,
'DATABASE_URL': DATABASE_URL,
'DEFAULT_UPLOAD': DEFAULT_UPLOAD,
'DOWNLOAD_DIR': DOWNLOAD_DIR,
'EQUAL_SPLITS': EQUAL_SPLITS,
'EXTENSION_FILTER': EXTENSION_FILTER,
'GDRIVE_ID': GDRIVE_ID,
'INCOMPLETE_TASK_NOTIFIER': INCOMPLETE_TASK_NOTIFIER,
'INDEX_URL': INDEX_URL,
'IS_TEAM_DRIVE': IS_TEAM_DRIVE,
'LEECH_DUMP_CHAT': LEECH_DUMP_CHAT,
'LEECH_FILENAME_PREFIX': LEECH_FILENAME_PREFIX,
'LEECH_SPLIT_SIZE': LEECH_SPLIT_SIZE,
'MEDIA_GROUP': MEDIA_GROUP,
'MEGA_EMAIL': MEGA_EMAIL,
'MEGA_PASSWORD': MEGA_PASSWORD,
'OWNER_ID': OWNER_ID,
'QUEUE_ALL': QUEUE_ALL,
'QUEUE_DOWNLOAD': QUEUE_DOWNLOAD,
'QUEUE_UPLOAD': QUEUE_UPLOAD,
'RCLONE_FLAGS': RCLONE_FLAGS,
'RCLONE_PATH': RCLONE_PATH,
'RCLONE_SERVE_URL': RCLONE_SERVE_URL,
'RCLONE_SERVE_USER': RCLONE_SERVE_USER,
'RCLONE_SERVE_PASS': RCLONE_SERVE_PASS,
'RCLONE_SERVE_PORT': RCLONE_SERVE_PORT,
'RSS_CHAT': RSS_CHAT,
'RSS_DELAY': RSS_DELAY,
'SEARCH_API_LINK': SEARCH_API_LINK,
'SEARCH_LIMIT': SEARCH_LIMIT,
'SEARCH_PLUGINS': SEARCH_PLUGINS,
'STATUS_LIMIT': STATUS_LIMIT,
'STATUS_UPDATE_INTERVAL': STATUS_UPDATE_INTERVAL,
'STOP_DUPLICATE': STOP_DUPLICATE,
'SUDO_USERS': SUDO_USERS,
'TELEGRAM_API': TELEGRAM_API,
'TELEGRAM_HASH': TELEGRAM_HASH,
'TORRENT_TIMEOUT': TORRENT_TIMEOUT,
'UPSTREAM_REPO': UPSTREAM_REPO,
'UPSTREAM_BRANCH': UPSTREAM_BRANCH,
'UPTOBOX_TOKEN': UPTOBOX_TOKEN,
'USER_SESSION_STRING': USER_SESSION_STRING,
'USE_SERVICE_ACCOUNTS': USE_SERVICE_ACCOUNTS,
'WEB_PINCODE': WEB_PINCODE,
'YT_DLP_OPTIONS': YT_DLP_OPTIONS})
if DATABASE_URL:
await DbManger().update_config(config_dict)
await gather(initiate_search_tools(), start_from_queued(), rclone_serve_booter())
bot.add_handler(MessageHandler(bot_settings, filters=command(
BotCommands.BotSetCommand) & CustomFilters.sudo))
bot.add_handler(CallbackQueryHandler(edit_bot_settings,

View File

@ -211,6 +211,9 @@ async def _mirror_leech(client, message, isQbit=False, isLeech=False, sameDir=No
if up != 'gd' and not is_rclone_path(up):
await sendMessage(message, 'Wrong Rclone Upload Destination!')
return
elif up.isdigit() or up.startswith('-'):
up = int(up)
if link == 'rcl':
link = await RcloneList(client, message).get_rclone_path('rcd')

View File

@ -554,8 +554,8 @@ Timeout: 60 sec. Argument -c for command and options
async def rssMonitor():
if not config_dict['RSS_CHAT_ID']:
LOGGER.warning('RSS_CHAT_ID not added! Shutting down rss scheduler...')
if not config_dict['RSS_CHAT']:
LOGGER.warning('RSS_CHAT not added! Shutting down rss scheduler...')
scheduler.shutdown(wait=False)
return
if len(rss_dict) == 0:

View File

@ -36,6 +36,9 @@ async def get_user_settings(from_user):
ltype = "MEDIA"
buttons.ibutton("Send As Document", f"userset {user_id} doc")
buttons.ibutton("Thumbnail", f"userset {user_id} sthumb")
thumbmsg = "Exists" if await aiopath.exists(thumbpath) else "Not Exists"
buttons.ibutton("Leech Splits", f"userset {user_id} lss")
if user_dict.get('split_size', False):
split_size = user_dict['split_size']
@ -52,14 +55,6 @@ async def get_user_settings(from_user):
else:
media_group = 'Disabled'
buttons.ibutton("YT-DLP Options", f"userset {user_id} yto")
if user_dict.get('yt_opt', False):
ytopt = user_dict['yt_opt']
elif 'yt_opt' not in user_dict and (YTO := config_dict['YT_DLP_OPTIONS']):
ytopt = YTO
else:
ytopt = 'None'
buttons.ibutton("Leech Prefix", f"userset {user_id} lprefix")
if user_dict.get('lprefix', False):
lprefix = user_dict['lprefix']
@ -68,22 +63,36 @@ async def get_user_settings(from_user):
else:
lprefix = 'None'
buttons.ibutton("Thumbnail", f"userset {user_id} sthumb")
thumbmsg = "Exists" if await aiopath.exists(thumbpath) else "Not Exists"
buttons.ibutton("Leech Destination", f"userset {user_id} ldest")
if user_dict.get('leech_dest', False):
leech_dest = user_dict['leech_dest']
elif 'leech_dest' not in user_dict and (LD := config_dict['LEECH_DUMP_CHAT']):
leech_dest = LD
else:
leech_dest = 'None'
buttons.ibutton("Rclone", f"userset {user_id} rcc")
rccmsg = "Exists" if await aiopath.exists(rclone_path) else "Not Exists"
buttons.ibutton("YT-DLP Options", f"userset {user_id} yto")
if user_dict.get('yt_opt', False):
ytopt = user_dict['yt_opt']
elif 'yt_opt' not in user_dict and (YTO := config_dict['YT_DLP_OPTIONS']):
ytopt = YTO
else:
ytopt = 'None'
buttons.ibutton("Close", f"userset {user_id} close")
text = f"""<u>Settings for {name}</u>
Leech Type is <b>{ltype}</b>
Custom Thumbnail <b>{thumbmsg}</b>
Rclone Config <b>{rccmsg}</b>
Leech Split Size is <b>{split_size}</b>
Equal Splits is <b>{equal_splits}</b>
Media Group is <b>{media_group}</b>
Leech Prefix is <code>{escape(lprefix)}</code>
Leech Destination is <code>{leech_dest}</code>
Rclone Config <b>{rccmsg}</b>
YT-DLP Options is <b><code>{escape(ytopt)}</code></b>"""
return text, buttons.build_menu(1)
@ -95,7 +104,9 @@ async def update_user_settings(query):
async def user_settings(_, message):
msg, button = await get_user_settings(message.from_user)
from_user = message.from_user
handler_dict[from_user.id] = False
msg, button = await get_user_settings(from_user)
await sendMessage(message, msg, button)
@ -164,6 +175,19 @@ async def leech_split_size(_, message, pre_event):
await DbManger().update_user_data(user_id)
async def set_leech_destination(_, message, pre_event):
user_id = message.from_user.id
handler_dict[user_id] = False
value = message.text
if value.isdigit() or value.startswith('-'):
value = int(value)
update_user_ldata(user_id, 'leech_dest', value)
await message.delete()
await update_user_settings(pre_event)
if DATABASE_URL:
await DbManger().update_user_data(user_id)
async def event_handler(client, query, pfunc, photo=False, document=False):
user_id = query.from_user.id
handler_dict[user_id] = True
@ -328,7 +352,7 @@ Check all yt-dlp api options from this <a href='https://github.com/yt-dlp/yt-dlp
elif data[2] == 'lprefix':
await query.answer()
buttons = ButtonMaker()
if user_dict.get('lprefix', False) or config_dict['LEECH_FILENAME_PREFIX']:
if user_dict.get('lprefix', False) or 'lprefix' not in user_dict and config_dict['LEECH_FILENAME_PREFIX']:
buttons.ibutton("Remove Leech Prefix",
f"userset {user_id} rlprefix")
buttons.ibutton("Back", f"userset {user_id} back")
@ -343,6 +367,24 @@ Check all yt-dlp api options from this <a href='https://github.com/yt-dlp/yt-dlp
await update_user_settings(query)
if DATABASE_URL:
await DbManger().update_user_data(user_id)
elif data[2] == 'ldest':
await query.answer()
buttons = ButtonMaker()
if user_dict.get('leech_dest', False) or 'leech_dest' not in user_dict and config_dict['LEECH_DUMP_CHAT']:
buttons.ibutton("Remove Leech Destination",
f"userset {user_id} rldest")
buttons.ibutton("Back", f"userset {user_id} back")
buttons.ibutton("Close", f"userset {user_id} close")
await editMessage(message, 'Send leech destination ID/USERNAME. Timeout: 60 sec', buttons.build_menu(1))
pfunc = partial(set_leech_destination, pre_event=query)
await event_handler(client, query, pfunc)
elif data[2] == 'rldest':
handler_dict[user_id] = False
await query.answer()
update_user_ldata(user_id, 'leech_dest', '')
await update_user_settings(query)
if DATABASE_URL:
await DbManger().update_user_data(user_id)
elif data[2] == 'back':
handler_dict[user_id] = False
await query.answer()

View File

@ -366,6 +366,8 @@ async def _ytdl(client, message, isLeech=False, sameDir=None, bulk=[]):
if up != 'gd' and not is_rclone_path(up):
await sendMessage(message, 'Wrong Rclone Upload Destination!')
return
elif up.isdigit() or up.startswith('-'):
up = int(up)
if up == 'rcl' and not isLeech:
up = await RcloneList(client, message).get_rclone_path('rcu')

View File

@ -42,7 +42,7 @@ AS_DOCUMENT = "False"
EQUAL_SPLITS = "False"
MEDIA_GROUP = "False"
LEECH_FILENAME_PREFIX = ""
DUMP_CHAT_ID = ""
LEECH_DUMP_CHAT = ""
# qBittorrent/Aria2c
TORRENT_TIMEOUT = ""
BASE_URL = ""
@ -54,7 +54,7 @@ QUEUE_DOWNLOAD = ""
QUEUE_UPLOAD = ""
# RSS
RSS_DELAY = "900"
RSS_CHAT_ID = ""
RSS_CHAT = ""
# Mega
MEGA_EMAIL = ""
MEGA_PASSWORD = ""