diff --git a/README.md b/README.md
index c4c56d43..67fc208e 100644
--- a/README.md
+++ b/README.md
@@ -30,7 +30,8 @@ programming in Python.
- Equal split size settings (global and user option)
- Ability to leech split file parts in a media group (global and user option)
- Download restricted messages (document or link) by tg private/public/super links (task option)
-- Choose transfer by bot or user session in case you have a premium plan (global and user option)
+- Choose transfer by bot or user session in case you have a premium plan (global, user option and task option)
+- Mix upload between user and bot session with respect to file size (global, user option and task option)
## Google Drive
@@ -55,6 +56,7 @@ programming in Python.
- Status buttons to get specific tasks for the chosen status regarding transfer type if the number of tasks is more than
30 (global and user option)
- Steps buttons for how much next/previous buttons should step backward/forward (global and user option)
+- Status for each user (no auto refresh)
## Yt-dlp
@@ -68,10 +70,10 @@ programming in Python.
## JDownloader
- Synchronize Settings (global option)
-- Wating to select (enable/disable files or change variants) before download start
+- Waiting to select (enable/disable files or change variants) before download start
- DLC file support
- All settings can be edited from the remote access to your JDownloader with Web Interface, Android App, iPhone App or
- Browser Extensions.
+ Browser Extensions
## Mongo Database
@@ -296,7 +298,8 @@ quotes, even if it's `Int`, `Bool` or `List`.
- `EQUAL_SPLITS`: Split files larger than **LEECH_SPLIT_SIZE** into equal parts size (Not working with zip cmd). Default
is `False`. `Bool`
- `MEDIA_GROUP`: View Uploaded splitted file parts in media group. Default is `False`. `Bool`.
-- `USER_TRANSMISSION`: Upload/Download by user session. Default is `False`. `Bool`
+- `USER_TRANSMISSION`: Upload/Download by user session. Only in superChat. Default is `False`. `Bool`
+- `MIXED_LEECH`: Upload by user and bot session with respect to file size. Only in superChat. Default is `False`. `Bool`
- `LEECH_FILENAME_PREFIX`: Add custom word to leeched file name. `Str`
- `LEECH_DUMP_CHAT`: ID or USERNAME or PM(private message) to where files would be uploaded. `Int`|`Str`. Add `-100` before channel/superGroup id.
diff --git a/add_to_team_drive.py b/add_to_team_drive.py
index 2e2d461d..fdab364b 100644
--- a/add_to_team_drive.py
+++ b/add_to_team_drive.py
@@ -15,16 +15,27 @@ from google_auth_oauthlib.flow import InstalledAppFlow
stt = time.time()
parse = argparse.ArgumentParser(
- description='A tool to add service accounts to a shared drive from a folder containing credential files.')
-parse.add_argument('--path', '-p', default='accounts',
- help='Specify an alternative path to the service accounts folder.')
-parse.add_argument('--credentials', '-c', default='./credentials.json',
- help='Specify the relative path for the credentials file.')
-parse.add_argument('--yes', '-y', default=False,
- action='store_true', help='Skips the sanity prompt.')
-parsereq = parse.add_argument_group('required arguments')
-parsereq.add_argument('--drive-id', '-d',
- help='The ID of the Shared Drive.', required=True)
+ description="A tool to add service accounts to a shared drive from a folder containing credential files."
+)
+parse.add_argument(
+ "--path",
+ "-p",
+ default="accounts",
+ help="Specify an alternative path to the service accounts folder.",
+)
+parse.add_argument(
+ "--credentials",
+ "-c",
+ default="./credentials.json",
+ help="Specify the relative path for the credentials file.",
+)
+parse.add_argument(
+ "--yes", "-y", default=False, action="store_true", help="Skips the sanity prompt."
+)
+parsereq = parse.add_argument_group("required arguments")
+parsereq.add_argument(
+ "--drive-id", "-d", help="The ID of the Shared Drive.", required=True
+)
args = parse.parse_args()
acc_dir = args.path
@@ -32,56 +43,62 @@ did = args.drive_id
credentials = glob.glob(args.credentials)
try:
- open(credentials[0], 'r')
- print('>> Found credentials.')
+ open(credentials[0], "r")
+ print(">> Found credentials.")
except IndexError:
- print('>> No credentials found.')
+ print(">> No credentials found.")
sys.exit(0)
if not args.yes:
# input('Make sure the following client id is added to the shared drive as Manager:\n' + json.loads((open(
# credentials[0],'r').read()))['installed']['client_id'])
- input('>> Make sure the **Google account** that has generated credentials.json\n is added into your Team Drive '
- '(shared drive) as Manager\n>> (Press any key to continue)')
+ input(
+ ">> Make sure the **Google account** that has generated credentials.json\n is added into your Team Drive "
+ "(shared drive) as Manager\n>> (Press any key to continue)"
+ )
creds = None
-if os.path.exists('token_sa.pickle'):
- with open('token_sa.pickle', 'rb') as token:
+if os.path.exists("token_sa.pickle"):
+ with open("token_sa.pickle", "rb") as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
- flow = InstalledAppFlow.from_client_secrets_file(credentials[0], scopes=[
- 'https://www.googleapis.com/auth/admin.directory.group',
- 'https://www.googleapis.com/auth/admin.directory.group.member'
- ])
+ flow = InstalledAppFlow.from_client_secrets_file(
+ credentials[0],
+ scopes=[
+ "https://www.googleapis.com/auth/admin.directory.group",
+ "https://www.googleapis.com/auth/admin.directory.group.member",
+ ],
+ )
# creds = flow.run_local_server(port=0)
creds = flow.run_console()
# Save the credentials for the next run
- with open('token_sa.pickle', 'wb') as token:
+ with open("token_sa.pickle", "wb") as token:
pickle.dump(creds, token)
drive = googleapiclient.discovery.build("drive", "v3", credentials=creds)
batch = drive.new_batch_http_request()
-aa = glob.glob(f'{acc_dir}/*.json')
+aa = glob.glob(f"{acc_dir}/*.json")
pbar = progress.bar.Bar("Readying accounts", max=len(aa))
for i in aa:
- ce = json.loads(open(i, 'r').read())['client_email']
- batch.add(drive.permissions().create(fileId=did, supportsAllDrives=True, body={
- "role": "organizer",
- "type": "user",
- "emailAddress": ce
- }))
+ ce = json.loads(open(i, "r").read())["client_email"]
+ batch.add(
+ drive.permissions().create(
+ fileId=did,
+ supportsAllDrives=True,
+ body={"role": "organizer", "type": "user", "emailAddress": ce},
+ )
+ )
pbar.next()
pbar.finish()
-print('Adding...')
+print("Adding...")
batch.execute()
-print('Complete.')
+print("Complete.")
hours, rem = divmod((time.time() - stt), 3600)
minutes, sec = divmod(rem, 60)
-print("Elapsed Time:\n{:0>2}:{:0>2}:{:05.2f}".format(
- int(hours), int(minutes), sec))
+print("Elapsed Time:\n{:0>2}:{:0>2}:{:05.2f}".format(int(hours), int(minutes), sec))
diff --git a/bot/__init__.py b/bot/__init__.py
index 84bdd53d..43469d3d 100644
--- a/bot/__init__.py
+++ b/bot/__init__.py
@@ -367,6 +367,9 @@ if len(RCLONE_SERVE_PASS) == 0:
NAME_SUBSTITUTE = environ.get("NAME_SUBSTITUTE", "")
NAME_SUBSTITUTE = "" if len(NAME_SUBSTITUTE) == 0 else NAME_SUBSTITUTE
+MIXED_LEECH = environ.get("MIXED_LEECH", "")
+MIXED_LEECH = MIXED_LEECH.lower() == "true" and IS_PREMIUM_USER
+
config_dict = {
"AS_DOCUMENT": AS_DOCUMENT,
"AUTHORIZED_CHATS": AUTHORIZED_CHATS,
@@ -390,6 +393,7 @@ config_dict = {
"LEECH_FILENAME_PREFIX": LEECH_FILENAME_PREFIX,
"LEECH_SPLIT_SIZE": LEECH_SPLIT_SIZE,
"MEDIA_GROUP": MEDIA_GROUP,
+ "MIXED_LEECH": MIXED_LEECH,
"NAME_SUBSTITUTE": NAME_SUBSTITUTE,
"OWNER_ID": OWNER_ID,
"QUEUE_ALL": QUEUE_ALL,
diff --git a/bot/helper/common.py b/bot/helper/common.py
index cf650788..c45518fe 100644
--- a/bot/helper/common.py
+++ b/bot/helper/common.py
@@ -94,6 +94,7 @@ class TaskConfig:
self.isYtDlp = False
self.equalSplits = False
self.userTransmission = False
+ self.mixedLeech = False
self.extract = False
self.compress = False
self.select = False
@@ -157,7 +158,8 @@ class TaskConfig:
async def beforeStart(self):
self.nameSub = (
self.nameSub
- or self.userDict.get("name_sub", False) or config_dict["NAME_SUBSTITUTE"]
+ or self.userDict.get("name_sub", False)
+ or config_dict["NAME_SUBSTITUTE"]
if "name_sub" not in self.userDict
else ""
)
@@ -257,14 +259,23 @@ class TaskConfig:
or self.userDict.get("leech_dest")
or config_dict["LEECH_DUMP_CHAT"]
)
+ self.mixedLeech = IS_PREMIUM_USER and (
+ self.userDict.get("mixed_leech")
+ or config_dict["MIXED_LEECH"]
+ and "mixed_leech" not in self.userDict
+ )
if self.upDest:
if not isinstance(self.upDest, int):
if self.upDest.startswith("b:"):
self.upDest = self.upDest.replace("b:", "", 1)
self.userTransmission = False
+ self.mixedLeech = False
elif self.upDest.startswith("u:"):
self.upDest = self.upDest.replace("u:", "", 1)
self.userTransmission = IS_PREMIUM_USER
+ elif self.upDest.startswith("m:"):
+ self.userTransmission = IS_PREMIUM_USER
+ self.mixedLeech = self.userTransmission
if self.upDest.isdigit() or self.upDest.startswith("-"):
self.upDest = int(self.upDest)
elif self.upDest.lower() == "pm":
@@ -297,8 +308,9 @@ class TaskConfig:
)
except:
raise ValueError("Start the bot and try again!")
- elif self.userTransmission and not self.isSuperChat:
+ elif (self.userTransmission or self.mixedLeech) and not self.isSuperChat:
self.userTransmission = False
+ self.mixedLeech = False
if self.splitSize:
if self.splitSize.isdigit():
self.splitSize = int(self.splitSize)
@@ -907,9 +919,9 @@ class TaskConfig:
up_dir, name = dl_path.rsplit("/", 1)
for l in self.nameSub:
pattern = l[0]
- res = l[1] if len(l) > 1 and l[1] else ""
+ res = l[1] if len(l) > 1 and l[1] else ""
sen = len(l) > 2 and l[2] == "s"
- new_name = sub(fr"{pattern}", res, name, flags=I if sen else 0)
+ new_name = sub(rf"{pattern}", res, name, flags=I if sen else 0)
new_path = ospath.join(up_dir, new_name)
await move(dl_path, new_path)
return new_path
@@ -919,7 +931,7 @@ class TaskConfig:
f_path = ospath.join(dirpath, file_)
for l in self.nameSub:
pattern = l[0]
- res = l[1] if len(l) > 1 and l[1] else ""
+ res = l[1] if len(l) > 1 and l[1] else ""
sen = len(l) > 2 and l[2] == "s"
new_name = sub(rf"{pattern}", res, file_, flags=I if sen else 0)
await move(f_path, ospath.join(dirpath, new_name))
diff --git a/bot/helper/ext_utils/bot_utils.py b/bot/helper/ext_utils/bot_utils.py
index 8b2b4796..9f2b76b7 100644
--- a/bot/helper/ext_utils/bot_utils.py
+++ b/bot/helper/ext_utils/bot_utils.py
@@ -105,6 +105,7 @@ def arg_parser(items, arg_base):
"-fd",
"-fu",
"-sync",
+ "-ml"
}
t = len(items)
i = 0
@@ -118,7 +119,7 @@ def arg_parser(items, arg_base):
if (
i + 1 == t
and part in bool_arg_set
- or part in ["-s", "-j", "-f", "-fd", "-fu", "-sync"]
+ or part in ["-s", "-j", "-f", "-fd", "-fu", "-sync", "-ml"]
):
arg_base[part] = True
else:
@@ -220,4 +221,4 @@ def new_thread(func):
future = run_coroutine_threadsafe(func(*args, **kwargs), bot_loop)
return future.result() if wait else future
- return wrapper
\ No newline at end of file
+ return wrapper
diff --git a/bot/helper/ext_utils/help_messages.py b/bot/helper/ext_utils/help_messages.py
index 282dc4a4..5538c3e1 100644
--- a/bot/helper/ext_utils/help_messages.py
+++ b/bot/helper/ext_utils/help_messages.py
@@ -52,7 +52,7 @@ If DEFAULT_UPLOAD is `rc` then you can pass up: `gd` to upload using gdrive tool
If DEFAULT_UPLOAD is `gd` then you can pass up: `rc` to upload to RCLONE_PATH.
If you want to add path or gdrive manually from your config/token (uploaded from usetting) add mrcc: for rclone and mtp: before the path/gdrive_id without space.
-/cmd link -up mrcc:main:dump or -up mtp:gdrive_id or -up b:id/@username/pm(leech by bot) or -up u:id/@username(leech by user)
+/cmd link -up mrcc:main:dump or -up mtp:gdrive_id or -up b:id/@username/pm(leech by bot) or -up u:id/@username(leech by user) or -up m:id/@username(mixed leech)
Incase you want to specify whether using token.pickle or service accounts you can add tp:gdrive_id or sa:gdrive_id or mtp:gdrive_id.
DEFAULT_UPLOAD doesn't effect on leech cmds.
@@ -189,6 +189,9 @@ This will effect on all files. Formate: wordToReplace : wordToReplaceWith : sens
3. mP4 will get removed because I have added nothing to replace with
"""
+mixed_leech = """Mixed leech: -ml
+/cmd link -ml (leech by user and bot sessionwith respect to size)"""
+
YT_HELP_DICT = {
"main": yt,
"New-Name": f"{new_name}\nNote: Don't add file extension",
@@ -207,6 +210,7 @@ YT_HELP_DICT = {
"Convert-Media": convert_media,
"Force-Start": force_start,
"Name-Substitute": name_sub,
+ "Mixed-Leech": mixed_leech,
}
MIRROR_HELP_DICT = {
@@ -233,6 +237,7 @@ MIRROR_HELP_DICT = {
"Force-Start": force_start,
"User-Download": user_download,
"Name-Substitute": name_sub,
+ "Mixed-Leech": mixed_leech,
}
CLONE_HELP_DICT = {
diff --git a/bot/helper/ext_utils/status_utils.py b/bot/helper/ext_utils/status_utils.py
index be2770f3..0e419c05 100644
--- a/bot/helper/ext_utils/status_utils.py
+++ b/bot/helper/ext_utils/status_utils.py
@@ -64,9 +64,7 @@ async def getTaskByGid(gid: str):
def getSpecificTasks(status, userId):
if status == "All":
if userId:
- return [
- tk for tk in task_dict.values() if tk.listener.userId == userId
- ]
+ return [tk for tk in task_dict.values() if tk.listener.userId == userId]
else:
return list(task_dict.values())
elif userId:
@@ -75,7 +73,8 @@ def getSpecificTasks(status, userId):
for tk in task_dict.values()
if tk.listener.userId == userId
and (
- (st := tk.status()) and st == status
+ (st := tk.status())
+ and st == status
or status == MirrorStatus.STATUS_DOWNLOADING
and st not in STATUSES.values()
)
@@ -84,7 +83,8 @@ def getSpecificTasks(status, userId):
return [
tk
for tk in task_dict.values()
- if (st := tk.status()) and st == status
+ if (st := tk.status())
+ and st == status
or status == MirrorStatus.STATUS_DOWNLOADING
and st not in STATUSES.values()
]
diff --git a/bot/helper/listeners/qbit_listener.py b/bot/helper/listeners/qbit_listener.py
index 99039aa7..24b5eaf9 100644
--- a/bot/helper/listeners/qbit_listener.py
+++ b/bot/helper/listeners/qbit_listener.py
@@ -116,7 +116,7 @@ async def _onDownloadComplete(tor):
async def _qb_listener():
client = await sync_to_async(get_qb_client)
while True:
- async with qb_listener_lock:
+ async with qb_listener_lock:
try:
torrents = await sync_to_async(client.torrents_info)
if len(torrents) == 0:
diff --git a/bot/helper/mirror_leech_utils/download_utils/jd_download.py b/bot/helper/mirror_leech_utils/download_utils/jd_download.py
index fceb30e3..97b26f2d 100644
--- a/bot/helper/mirror_leech_utils/download_utils/jd_download.py
+++ b/bot/helper/mirror_leech_utils/download_utils/jd_download.py
@@ -23,7 +23,9 @@ from bot.helper.ext_utils.task_manager import (
stop_duplicate_check,
)
from bot.helper.listeners.jdownloader_listener import onDownloadStart
-from bot.helper.mirror_leech_utils.status_utils.jdownloader_status import JDownloaderStatus
+from bot.helper.mirror_leech_utils.status_utils.jdownloader_status import (
+ JDownloaderStatus,
+)
from bot.helper.mirror_leech_utils.status_utils.queue_status import QueueStatus
from bot.helper.telegram_helper.button_build import ButtonMaker
from bot.helper.telegram_helper.message_utils import (
@@ -133,15 +135,15 @@ async def add_jd_download(listener, path):
)
else:
await retry_function(
- jdownloader.device.linkgrabber.add_links,
- [
- {
- "autoExtract": False,
- "links": listener.link,
- "packageName": listener.name or None,
- }
- ],
- )
+ jdownloader.device.linkgrabber.add_links,
+ [
+ {
+ "autoExtract": False,
+ "links": listener.link,
+ "packageName": listener.name or None,
+ }
+ ],
+ )
await sleep(0.5)
while await retry_function(jdownloader.device.linkgrabber.is_collecting):
diff --git a/bot/helper/mirror_leech_utils/rclone_utils/transfer.py b/bot/helper/mirror_leech_utils/rclone_utils/transfer.py
index 8390b8b3..2910bc89 100644
--- a/bot/helper/mirror_leech_utils/rclone_utils/transfer.py
+++ b/bot/helper/mirror_leech_utils/rclone_utils/transfer.py
@@ -436,7 +436,9 @@ class RcloneTransferHelper:
)
return None, destination
- def _getUpdatedCommand(self, config_path, source, destination, method, unwanted_files=None):
+ def _getUpdatedCommand(
+ self, config_path, source, destination, method, unwanted_files=None
+ ):
if unwanted_files is None:
unwanted_files = []
ext = "*.{" + ",".join(self._listener.extensionFilter) + "}"
diff --git a/bot/helper/mirror_leech_utils/telegram_uploader.py b/bot/helper/mirror_leech_utils/telegram_uploader.py
index f7b36872..093c95fd 100644
--- a/bot/helper/mirror_leech_utils/telegram_uploader.py
+++ b/bot/helper/mirror_leech_utils/telegram_uploader.py
@@ -54,10 +54,11 @@ class TgUploader:
self._lprefix = ""
self._media_group = False
self._is_private = False
+ self._user_session = self._listener.userTransmission
async def _upload_progress(self, current, _):
if self._listener.isCancelled:
- if self._listener.userTransmission:
+ if self._user_session:
user.stop_transmission()
else:
self._listener.client.stop_transmission()
@@ -87,7 +88,7 @@ class TgUploader:
else self._listener.message.text.lstrip("/")
)
try:
- if self._listener.userTransmission:
+ if self._user_session:
self._sent_msg = await user.send_message(
chat_id=self._listener.upDest,
text=msg,
@@ -105,7 +106,7 @@ class TgUploader:
except Exception as e:
await self._listener.onUploadError(str(e))
return False
- elif self._listener.userTransmission:
+ elif self._user_session:
self._sent_msg = await user.get_messages(
chat_id=self._listener.message.chat.id, message_ids=self._listener.mid
)
@@ -200,6 +201,15 @@ class TgUploader:
)[-1]
async def _send_media_group(self, subkey, key, msgs):
+ for index, msg in enumerate(msgs):
+ if self._listener.mixedLeech or not self.self._user_session:
+ msgs[index] = await self._listener.client.get_messages(
+ chat_id=msg[0], message_ids=msg[1]
+ )
+ else:
+ msgs[index] = await user.get_messages(
+ chat_id=msg[0], message_ids=msg[1]
+ )
msgs_list = await msgs[0].reply_to_message.reply_media_group(
media=self._get_input_media(subkey, key),
quote=True,
@@ -240,6 +250,18 @@ class TgUploader:
continue
try:
f_size = await aiopath.getsize(self._up_path)
+ if self._listener.mixedLeech:
+ self._user_session = f_size > 2097152000
+ if self._user_session:
+ self._sent_msg = await user.get_messages(
+ chat_id=self._sent_msg.chat.id,
+ message_ids=self._sent_msg.id,
+ )
+ else:
+ self._sent_msg = await self._listener.client.get_messages(
+ chat_id=self._sent_msg.chat.id,
+ message_ids=self._sent_msg.id,
+ )
self._total_files += 1
if f_size == 0:
LOGGER.error(
@@ -427,25 +449,31 @@ class TgUploader:
if match := re_match(r".+(?=\.0*\d+$)|.+(?=\.part\d+\..+$)", o_path):
pname = match.group(0)
if pname in self._media_dict[key].keys():
- self._media_dict[key][pname].append(self._sent_msg)
+ self._media_dict[key][pname].append(
+ [self._sent_msg.chat.id, self._sent_msg.id]
+ )
else:
- self._media_dict[key][pname] = [self._sent_msg]
+ self._media_dict[key][pname] = [
+ [self._sent_msg.chat.id, self._sent_msg.id]
+ ]
msgs = self._media_dict[key][pname]
if len(msgs) == 10:
await self._send_media_group(pname, key, msgs)
else:
self._last_msg_in_group = True
- if (self._thumb is None
+ if (
+ self._thumb is None
and thumb is not None
and await aiopath.exists(thumb)
):
await remove(thumb)
- except FloodWait as f: # for later
+ except FloodWait as f:
LOGGER.warning(str(f))
await sleep(f.value)
except Exception as err:
- if (self._thumb is None
+ if (
+ self._thumb is None
and thumb is not None
and await aiopath.exists(thumb)
):
diff --git a/bot/helper/telegram_helper/message_utils.py b/bot/helper/telegram_helper/message_utils.py
index 4ad07de5..472fab40 100644
--- a/bot/helper/telegram_helper/message_utils.py
+++ b/bot/helper/telegram_helper/message_utils.py
@@ -61,7 +61,7 @@ async def sendFile(message, file, caption=None):
async def sendRss(text):
try:
- app = user if user else bot
+ app = user or bot
return await app.send_message(
chat_id=config_dict["RSS_CHAT"],
text=text,
diff --git a/bot/modules/bot_settings.py b/bot/modules/bot_settings.py
index 65d8f8c5..87a1affc 100644
--- a/bot/modules/bot_settings.py
+++ b/bot/modules/bot_settings.py
@@ -921,6 +921,9 @@ async def load_config():
NAME_SUBSTITUTE = environ.get("NAME_SUBSTITUTE", "")
NAME_SUBSTITUTE = "" if len(NAME_SUBSTITUTE) == 0 else NAME_SUBSTITUTE
+ MIXED_LEECH = environ.get("MIXED_LEECH", "")
+ MIXED_LEECH = MIXED_LEECH.lower() == "true" and IS_PREMIUM_USER
+
await (await create_subprocess_exec("pkill", "-9", "-f", "gunicorn")).wait()
BASE_URL = environ.get("BASE_URL", "").rstrip("/")
if len(BASE_URL) == 0:
@@ -983,6 +986,7 @@ async def load_config():
"LEECH_FILENAME_PREFIX": LEECH_FILENAME_PREFIX,
"LEECH_SPLIT_SIZE": LEECH_SPLIT_SIZE,
"MEDIA_GROUP": MEDIA_GROUP,
+ "MIXED_LEECH": MIXED_LEECH,
"NAME_SUBSTITUTE": NAME_SUBSTITUTE,
"OWNER_ID": OWNER_ID,
"QUEUE_ALL": QUEUE_ALL,
diff --git a/bot/modules/help.py b/bot/modules/help.py
index ecb5e06c..8200d1cd 100644
--- a/bot/modules/help.py
+++ b/bot/modules/help.py
@@ -25,7 +25,9 @@ async def argUsage(_, query):
elif data[2] == "y":
await editMessage(message, COMMAND_USAGE["yt"][0], COMMAND_USAGE["yt"][1])
elif data[2] == "c":
- await editMessage(message, COMMAND_USAGE["clone"][0], COMMAND_USAGE["clone"][1])
+ await editMessage(
+ message, COMMAND_USAGE["clone"][0], COMMAND_USAGE["clone"][1]
+ )
elif data[1] == "mirror":
buttons = ButtonMaker()
buttons.ibutton("Back", "help back m")
diff --git a/bot/modules/mirror_leech.py b/bot/modules/mirror_leech.py
index 4cbe3fbf..12b65388 100644
--- a/bot/modules/mirror_leech.py
+++ b/bot/modules/mirror_leech.py
@@ -85,6 +85,7 @@ class Mirror(TaskListener):
"-f": False,
"-fd": False,
"-fu": False,
+ "-mu": False,
"-i": 0,
"-sp": 0,
"link": "",
@@ -122,6 +123,7 @@ class Mirror(TaskListener):
self.convertAudio = args["-ca"]
self.convertVideo = args["-cv"]
self.nameSub = args["-ns"]
+ self.mixedLeech = args["-ml"]
headers = args["-h"]
isBulk = args["-b"]
diff --git a/bot/modules/rss.py b/bot/modules/rss.py
index c2410814..ef719519 100644
--- a/bot/modules/rss.py
+++ b/bot/modules/rss.py
@@ -7,7 +7,6 @@ from functools import partial
from io import BytesIO
from pyrogram.filters import command, regex, create
from pyrogram.handlers import MessageHandler, CallbackQueryHandler
-from re import split as re_split
from time import time
from bot import scheduler, rss_dict, LOGGER, DATABASE_URL, config_dict, bot
diff --git a/bot/modules/users_settings.py b/bot/modules/users_settings.py
index 499d4a45..a939d996 100644
--- a/bot/modules/users_settings.py
+++ b/bot/modules/users_settings.py
@@ -100,6 +100,16 @@ async def get_user_settings(from_user):
else:
leech_method = "bot"
+ if (
+ IS_PREMIUM_USER
+ and user_dict.get("mixed_leech", False)
+ or "mixed_leech" not in user_dict
+ and config_dict["MIXED_LEECH"]
+ ):
+ mixed_leech = "Enabled"
+ else:
+ mixed_leech = "Disabled"
+
buttons.ibutton("Leech", f"userset {user_id} leech")
buttons.ibutton("Rclone", f"userset {user_id} rclone")
@@ -172,6 +182,7 @@ Media Group is {media_group}
Leech Prefix is {escape(lprefix)}
Leech Destination is {leech_dest}
Leech by {leech_method} session
+Mixed Leech is {mixed_leech}
Rclone Config {rccmsg}
Rclone Path is {rccpath}
Gdrive Token {tokenmsg}
@@ -335,6 +346,7 @@ async def edit_user_settings(client, query):
"media_group",
"user_transmission",
"stop_duplicate",
+ "mixed_leech",
]:
update_user_ldata(user_id, data[2], data[3] == "true")
await query.answer()
@@ -457,6 +469,25 @@ async def edit_user_settings(client, query):
)
else:
leech_method = "bot"
+
+ if (
+ IS_PREMIUM_USER
+ and user_dict.get("mixed_leech", False)
+ or "mixed_leech" not in user_dict
+ and config_dict["MIXED_LEECH"]
+ ):
+ mixed_leech = "Enabled"
+ buttons.ibutton(
+ "Disable Mixed Leech", f"userset {user_id} mixed_leech false"
+ )
+ elif IS_PREMIUM_USER:
+ mixed_leech = "Disabled"
+ buttons.ibutton(
+ "Enable Mixed Leech", f"userset {user_id} mixed_leech true"
+ )
+ else:
+ mixed_leech = "Disabled"
+
buttons.ibutton("Back", f"userset {user_id} back")
buttons.ibutton("Close", f"userset {user_id} close")
text = f"""Leech Settings for {name}
@@ -468,6 +499,7 @@ Media Group is {media_group}
Leech Prefix is {escape(lprefix)}
Leech Destination is {leech_dest}
Leech by {leech_method} session
+Mixed Leech is {mixed_leech}
"""
await editMessage(message, text, buttons.build_menu(2))
elif data[2] == "rclone":
diff --git a/bot/modules/ytdlp.py b/bot/modules/ytdlp.py
index f530653b..9faa3b94 100644
--- a/bot/modules/ytdlp.py
+++ b/bot/modules/ytdlp.py
@@ -298,6 +298,7 @@ class YtDlp(TaskListener):
"-f": False,
"-fd": False,
"-fu": False,
+ "-mu": False,
"-i": 0,
"-sp": 0,
"link": "",
@@ -335,6 +336,7 @@ class YtDlp(TaskListener):
self.convertAudio = args["-ca"]
self.convertVideo = args["-cv"]
self.nameSub = args["-ns"]
+ self.mixedLeech = args["-ml"]
isBulk = args["-b"]
folder_name = args["-m"]
diff --git a/config_sample.env b/config_sample.env
index 0fe19c1b..7943c36f 100644
--- a/config_sample.env
+++ b/config_sample.env
@@ -46,6 +46,7 @@ AS_DOCUMENT = "False"
EQUAL_SPLITS = "False"
MEDIA_GROUP = "False"
USER_TRANSMISSION = "False"
+MIXED_LEECH = "False"
LEECH_FILENAME_PREFIX = ""
LEECH_DUMP_CHAT = ""
# qBittorrent/Aria2c
diff --git a/gen_sa_accounts.py b/gen_sa_accounts.py
index 81d4d869..50d8eb96 100644
--- a/gen_sa_accounts.py
+++ b/gen_sa_accounts.py
@@ -13,8 +13,11 @@ from json import loads
from random import choice
from time import sleep
-SCOPES = ['https://www.googleapis.com/auth/drive', 'https://www.googleapis.com/auth/cloud-platform',
- 'https://www.googleapis.com/auth/iam']
+SCOPES = [
+ "https://www.googleapis.com/auth/drive",
+ "https://www.googleapis.com/auth/cloud-platform",
+ "https://www.googleapis.com/auth/iam",
+]
project_create_ops = []
current_key_dump = []
sleep_time = 30
@@ -24,15 +27,15 @@ sleep_time = 30
def _create_accounts(service, project, count):
batch = service.new_batch_http_request(callback=_def_batch_resp)
for _ in range(count):
- aid = _generate_id('mfc-')
+ aid = _generate_id("mfc-")
batch.add(
service.projects()
.serviceAccounts()
.create(
- name=f'projects/{project}',
+ name=f"projects/{project}",
body={
- 'accountId': aid,
- 'serviceAccount': {'displayName': aid},
+ "accountId": aid,
+ "serviceAccount": {"displayName": aid},
},
)
)
@@ -41,7 +44,7 @@ def _create_accounts(service, project, count):
# Create accounts needed to fill project
def _create_remaining_accounts(iam, project):
- print(f'Creating accounts in {project}')
+ print(f"Creating accounts in {project}")
sa_count = len(_list_sas(iam, project))
while sa_count != 100:
_create_accounts(iam, project, 100 - sa_count)
@@ -49,20 +52,20 @@ def _create_remaining_accounts(iam, project):
# Generate a random id
-def _generate_id(prefix='saf-'):
- chars = '-abcdefghijklmnopqrstuvwxyz1234567890'
- return prefix + ''.join(choice(chars) for _ in range(25)) + choice(chars[1:])
+def _generate_id(prefix="saf-"):
+ chars = "-abcdefghijklmnopqrstuvwxyz1234567890"
+ return prefix + "".join(choice(chars) for _ in range(25)) + choice(chars[1:])
# List projects using service
def _get_projects(service):
- return [i['projectId'] for i in service.projects().list().execute()['projects']]
+ return [i["projectId"] for i in service.projects().list().execute()["projects"]]
# Default batch callback handler
def _def_batch_resp(id, resp, exception):
if exception is not None:
- if str(exception).startswith(' 0:
current_count = len(_get_projects(cloud))
if current_count + create_projects <= max_projects:
- print('Creating %d projects' % (create_projects))
+ print("Creating %d projects" % (create_projects))
nprjs = _create_projects(cloud, create_projects)
selected_projects = nprjs
else:
- sys.exit('No, you cannot create %d new project (s).\n'
- 'Please reduce value of --quick-setup.\n'
- 'Remember that you can totally create %d projects (%d already).\n'
- 'Please do not delete existing projects unless you know what you are doing' % (
- create_projects, max_projects, current_count))
+ sys.exit(
+ "No, you cannot create %d new project (s).\n"
+ "Please reduce value of --quick-setup.\n"
+ "Remember that you can totally create %d projects (%d already).\n"
+ "Please do not delete existing projects unless you know what you are doing"
+ % (create_projects, max_projects, current_count)
+ )
else:
- print('Will overwrite all service accounts in existing projects.\n'
- 'So make sure you have some projects already.')
+ print(
+ "Will overwrite all service accounts in existing projects.\n"
+ "So make sure you have some projects already."
+ )
input("Press Enter to continue...")
if enable_services:
ste = [enable_services]
- if enable_services == '~':
+ if enable_services == "~":
ste = selected_projects
- elif enable_services == '*':
+ elif enable_services == "*":
ste = _get_projects(cloud)
- services = [f'{i}.googleapis.com' for i in services]
- print('Enabling services')
+ services = [f"{i}.googleapis.com" for i in services]
+ print("Enabling services")
_enable_services(serviceusage, ste, services)
if create_sas:
stc = [create_sas]
- if create_sas == '~':
+ if create_sas == "~":
stc = selected_projects
- elif create_sas == '*':
+ elif create_sas == "*":
stc = _get_projects(cloud)
for i in stc:
_create_remaining_accounts(iam, i)
@@ -270,81 +281,119 @@ def serviceaccountfactory(
if e.errno != errno.EEXIST:
raise
std = [download_keys]
- if download_keys == '~':
+ if download_keys == "~":
std = selected_projects
- elif download_keys == '*':
+ elif download_keys == "*":
std = _get_projects(cloud)
_create_sa_keys(iam, std, path)
if delete_sas:
std = []
std.append(delete_sas)
- if delete_sas == '~':
+ if delete_sas == "~":
std = selected_projects
- elif delete_sas == '*':
+ elif delete_sas == "*":
std = _get_projects(cloud)
for i in std:
- print(f'Deleting service accounts in {i}')
+ print(f"Deleting service accounts in {i}")
_delete_sas(iam, i)
-if __name__ == '__main__':
- parse = ArgumentParser(
- description='A tool to create Google service accounts.')
- parse.add_argument('--path', '-p', default='accounts',
- help='Specify an alternate directory to output the credential files.')
- parse.add_argument('--token', default='token_sa.pickle',
- help='Specify the pickle token file path.')
- parse.add_argument('--credentials', default='credentials.json',
- help='Specify the credentials file path.')
- parse.add_argument('--list-projects', default=False, action='store_true',
- help='List projects viewable by the user.')
- parse.add_argument('--list-sas', default=False,
- help='List service accounts in a project.')
- parse.add_argument('--create-projects', type=int,
- default=None, help='Creates up to N projects.')
- parse.add_argument('--max-projects', type=int, default=12,
- help='Max amount of project allowed. Default: 12')
- parse.add_argument('--enable-services', default=None,
- help='Enables services on the project. Default: IAM and Drive')
- parse.add_argument('--services', nargs='+', default=['iam', 'drive'],
- help='Specify a different set of services to enable. Overrides the default.')
- parse.add_argument('--create-sas', default=None,
- help='Create service accounts in a project.')
- parse.add_argument('--delete-sas', default=None,
- help='Delete service accounts in a project.')
- parse.add_argument('--download-keys', default=None,
- help='Download keys for all the service accounts in a project.')
- parse.add_argument('--quick-setup', default=None, type=int,
- help='Create projects, enable services, create service accounts and download keys. ')
- parse.add_argument('--new-only', default=False,
- action='store_true', help='Do not use exisiting projects.')
+if __name__ == "__main__":
+ parse = ArgumentParser(description="A tool to create Google service accounts.")
+ parse.add_argument(
+ "--path",
+ "-p",
+ default="accounts",
+ help="Specify an alternate directory to output the credential files.",
+ )
+ parse.add_argument(
+ "--token", default="token_sa.pickle", help="Specify the pickle token file path."
+ )
+ parse.add_argument(
+ "--credentials",
+ default="credentials.json",
+ help="Specify the credentials file path.",
+ )
+ parse.add_argument(
+ "--list-projects",
+ default=False,
+ action="store_true",
+ help="List projects viewable by the user.",
+ )
+ parse.add_argument(
+ "--list-sas", default=False, help="List service accounts in a project."
+ )
+ parse.add_argument(
+ "--create-projects", type=int, default=None, help="Creates up to N projects."
+ )
+ parse.add_argument(
+ "--max-projects",
+ type=int,
+ default=12,
+ help="Max amount of project allowed. Default: 12",
+ )
+ parse.add_argument(
+ "--enable-services",
+ default=None,
+ help="Enables services on the project. Default: IAM and Drive",
+ )
+ parse.add_argument(
+ "--services",
+ nargs="+",
+ default=["iam", "drive"],
+ help="Specify a different set of services to enable. Overrides the default.",
+ )
+ parse.add_argument(
+ "--create-sas", default=None, help="Create service accounts in a project."
+ )
+ parse.add_argument(
+ "--delete-sas", default=None, help="Delete service accounts in a project."
+ )
+ parse.add_argument(
+ "--download-keys",
+ default=None,
+ help="Download keys for all the service accounts in a project.",
+ )
+ parse.add_argument(
+ "--quick-setup",
+ default=None,
+ type=int,
+ help="Create projects, enable services, create service accounts and download keys. ",
+ )
+ parse.add_argument(
+ "--new-only",
+ default=False,
+ action="store_true",
+ help="Do not use exisiting projects.",
+ )
args = parse.parse_args()
# If credentials file is invalid, search for one.
if not os.path.exists(args.credentials):
- options = glob('*.json')
- print('No credentials found at %s. Please enable the Drive API in:\n'
- 'https://developers.google.com/drive/api/v3/quickstart/python\n'
- 'and save the json file as credentials.json' % args.credentials)
+ options = glob("*.json")
+ print(
+ "No credentials found at %s. Please enable the Drive API in:\n"
+ "https://developers.google.com/drive/api/v3/quickstart/python\n"
+ "and save the json file as credentials.json" % args.credentials
+ )
if not options:
exit(-1)
else:
- print('Select a credentials file below.')
- inp_options = [str(i) for i in list(
- range(1, len(options) + 1))] + options
+ print("Select a credentials file below.")
+ inp_options = [str(i) for i in list(range(1, len(options) + 1))] + options
for i in range(len(options)):
- print(' %d) %s' % (i + 1, options[i]))
+ print(" %d) %s" % (i + 1, options[i]))
inp = None
while True:
- inp = input('> ')
+ inp = input("> ")
if inp in inp_options:
break
args.credentials = inp if inp in options else options[int(inp) - 1]
print(
- f'Use --credentials {args.credentials} next time to use this credentials file.'
+ f"Use --credentials {args.credentials} next time to use this credentials file."
)
if args.quick_setup:
- opt = '~' if args.new_only else '*'
- args.services = ['iam', 'drive']
+ opt = "~" if args.new_only else "*"
+ args.services = ["iam", "drive"]
args.create_projects = args.quick_setup
args.enable_services = opt
args.create_sas = opt
@@ -361,21 +410,20 @@ if __name__ == '__main__':
delete_sas=args.delete_sas,
enable_services=args.enable_services,
services=args.services,
- download_keys=args.download_keys
+ download_keys=args.download_keys,
)
if resp is not None:
if args.list_projects:
if resp:
- print('Projects (%d):' % len(resp))
+ print("Projects (%d):" % len(resp))
for i in resp:
- print(f' {i}')
+ print(f" {i}")
else:
- print('No projects.')
+ print("No projects.")
elif args.list_sas:
if resp:
- print('Service accounts in %s (%d):' %
- (args.list_sas, len(resp)))
+ print("Service accounts in %s (%d):" % (args.list_sas, len(resp)))
for i in resp:
print(f" {i['email']} ({i['uniqueId']})")
else:
- print('No service accounts.')
+ print("No service accounts.")
diff --git a/generate_drive_token.py b/generate_drive_token.py
index 68fb922f..fc6d25c7 100644
--- a/generate_drive_token.py
+++ b/generate_drive_token.py
@@ -10,10 +10,10 @@ if os.path.exists(__G_DRIVE_TOKEN_FILE):
with open(__G_DRIVE_TOKEN_FILE, "rb") as f:
credentials = pickle.load(f)
if (
- (credentials is None or not credentials.valid)
- and credentials
- and credentials.expired
- and credentials.refresh_token
+ (credentials is None or not credentials.valid)
+ and credentials
+ and credentials.expired
+ and credentials.refresh_token
):
credentials.refresh(Request())
else:
diff --git a/update.py b/update.py
index aaf69a07..2cbe5f10 100644
--- a/update.py
+++ b/update.py
@@ -53,9 +53,9 @@ if DATABASE_URL is not None:
if old_config is not None:
del old_config["_id"]
if (
- old_config is not None
- and old_config == dict(dotenv_values("config.env"))
- or old_config is None
+ old_config is not None
+ and old_config == dict(dotenv_values("config.env"))
+ or old_config is None
) and config_dict is not None:
environ["UPSTREAM_REPO"] = config_dict["UPSTREAM_REPO"]
environ["UPSTREAM_BRANCH"] = config_dict["UPSTREAM_BRANCH"]