Add mixed leech

- To upload with bot and user session with respect to file size
- Handle object expire while sending media group for parts

Signed-off-by: anasty17 <e.anastayyar@gmail.com>
This commit is contained in:
anasty17 2024-03-19 19:08:54 +02:00
parent 13e67f9520
commit 6fe2510dca
22 changed files with 363 additions and 199 deletions

View File

@ -30,7 +30,8 @@ programming in Python.
- Equal split size settings (global and user option)
- Ability to leech split file parts in a media group (global and user option)
- Download restricted messages (document or link) by tg private/public/super links (task option)
- Choose transfer by bot or user session in case you have a premium plan (global and user option)
- Choose transfer by bot or user session in case you have a premium plan (global, user option and task option)
- Mix upload between user and bot session with respect to file size (global, user option and task option)
## Google Drive
@ -55,6 +56,7 @@ programming in Python.
- Status buttons to get specific tasks for the chosen status regarding transfer type if the number of tasks is more than
30 (global and user option)
- Steps buttons for how much next/previous buttons should step backward/forward (global and user option)
- Status for each user (no auto refresh)
## Yt-dlp
@ -68,10 +70,10 @@ programming in Python.
## JDownloader
- Synchronize Settings (global option)
- Wating to select (enable/disable files or change variants) before download start
- Waiting to select (enable/disable files or change variants) before download start
- DLC file support
- All settings can be edited from the remote access to your JDownloader with Web Interface, Android App, iPhone App or
Browser Extensions.
Browser Extensions
## Mongo Database
@ -296,7 +298,8 @@ quotes, even if it's `Int`, `Bool` or `List`.
- `EQUAL_SPLITS`: Split files larger than **LEECH_SPLIT_SIZE** into equal parts size (Not working with zip cmd). Default
is `False`. `Bool`
- `MEDIA_GROUP`: View Uploaded splitted file parts in media group. Default is `False`. `Bool`.
- `USER_TRANSMISSION`: Upload/Download by user session. Default is `False`. `Bool`
- `USER_TRANSMISSION`: Upload/Download by user session. Only in superChat. Default is `False`. `Bool`
- `MIXED_LEECH`: Upload by user and bot session with respect to file size. Only in superChat. Default is `False`. `Bool`
- `LEECH_FILENAME_PREFIX`: Add custom word to leeched file name. `Str`
- `LEECH_DUMP_CHAT`: ID or USERNAME or PM(private message) to where files would be uploaded. `Int`|`Str`. Add `-100` before channel/superGroup id.

View File

@ -15,16 +15,27 @@ from google_auth_oauthlib.flow import InstalledAppFlow
stt = time.time()
parse = argparse.ArgumentParser(
description='A tool to add service accounts to a shared drive from a folder containing credential files.')
parse.add_argument('--path', '-p', default='accounts',
help='Specify an alternative path to the service accounts folder.')
parse.add_argument('--credentials', '-c', default='./credentials.json',
help='Specify the relative path for the credentials file.')
parse.add_argument('--yes', '-y', default=False,
action='store_true', help='Skips the sanity prompt.')
parsereq = parse.add_argument_group('required arguments')
parsereq.add_argument('--drive-id', '-d',
help='The ID of the Shared Drive.', required=True)
description="A tool to add service accounts to a shared drive from a folder containing credential files."
)
parse.add_argument(
"--path",
"-p",
default="accounts",
help="Specify an alternative path to the service accounts folder.",
)
parse.add_argument(
"--credentials",
"-c",
default="./credentials.json",
help="Specify the relative path for the credentials file.",
)
parse.add_argument(
"--yes", "-y", default=False, action="store_true", help="Skips the sanity prompt."
)
parsereq = parse.add_argument_group("required arguments")
parsereq.add_argument(
"--drive-id", "-d", help="The ID of the Shared Drive.", required=True
)
args = parse.parse_args()
acc_dir = args.path
@ -32,56 +43,62 @@ did = args.drive_id
credentials = glob.glob(args.credentials)
try:
open(credentials[0], 'r')
print('>> Found credentials.')
open(credentials[0], "r")
print(">> Found credentials.")
except IndexError:
print('>> No credentials found.')
print(">> No credentials found.")
sys.exit(0)
if not args.yes:
# input('Make sure the following client id is added to the shared drive as Manager:\n' + json.loads((open(
# credentials[0],'r').read()))['installed']['client_id'])
input('>> Make sure the **Google account** that has generated credentials.json\n is added into your Team Drive '
'(shared drive) as Manager\n>> (Press any key to continue)')
input(
">> Make sure the **Google account** that has generated credentials.json\n is added into your Team Drive "
"(shared drive) as Manager\n>> (Press any key to continue)"
)
creds = None
if os.path.exists('token_sa.pickle'):
with open('token_sa.pickle', 'rb') as token:
if os.path.exists("token_sa.pickle"):
with open("token_sa.pickle", "rb") as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(credentials[0], scopes=[
'https://www.googleapis.com/auth/admin.directory.group',
'https://www.googleapis.com/auth/admin.directory.group.member'
])
flow = InstalledAppFlow.from_client_secrets_file(
credentials[0],
scopes=[
"https://www.googleapis.com/auth/admin.directory.group",
"https://www.googleapis.com/auth/admin.directory.group.member",
],
)
# creds = flow.run_local_server(port=0)
creds = flow.run_console()
# Save the credentials for the next run
with open('token_sa.pickle', 'wb') as token:
with open("token_sa.pickle", "wb") as token:
pickle.dump(creds, token)
drive = googleapiclient.discovery.build("drive", "v3", credentials=creds)
batch = drive.new_batch_http_request()
aa = glob.glob(f'{acc_dir}/*.json')
aa = glob.glob(f"{acc_dir}/*.json")
pbar = progress.bar.Bar("Readying accounts", max=len(aa))
for i in aa:
ce = json.loads(open(i, 'r').read())['client_email']
batch.add(drive.permissions().create(fileId=did, supportsAllDrives=True, body={
"role": "organizer",
"type": "user",
"emailAddress": ce
}))
ce = json.loads(open(i, "r").read())["client_email"]
batch.add(
drive.permissions().create(
fileId=did,
supportsAllDrives=True,
body={"role": "organizer", "type": "user", "emailAddress": ce},
)
)
pbar.next()
pbar.finish()
print('Adding...')
print("Adding...")
batch.execute()
print('Complete.')
print("Complete.")
hours, rem = divmod((time.time() - stt), 3600)
minutes, sec = divmod(rem, 60)
print("Elapsed Time:\n{:0>2}:{:0>2}:{:05.2f}".format(
int(hours), int(minutes), sec))
print("Elapsed Time:\n{:0>2}:{:0>2}:{:05.2f}".format(int(hours), int(minutes), sec))

View File

@ -367,6 +367,9 @@ if len(RCLONE_SERVE_PASS) == 0:
NAME_SUBSTITUTE = environ.get("NAME_SUBSTITUTE", "")
NAME_SUBSTITUTE = "" if len(NAME_SUBSTITUTE) == 0 else NAME_SUBSTITUTE
MIXED_LEECH = environ.get("MIXED_LEECH", "")
MIXED_LEECH = MIXED_LEECH.lower() == "true" and IS_PREMIUM_USER
config_dict = {
"AS_DOCUMENT": AS_DOCUMENT,
"AUTHORIZED_CHATS": AUTHORIZED_CHATS,
@ -390,6 +393,7 @@ config_dict = {
"LEECH_FILENAME_PREFIX": LEECH_FILENAME_PREFIX,
"LEECH_SPLIT_SIZE": LEECH_SPLIT_SIZE,
"MEDIA_GROUP": MEDIA_GROUP,
"MIXED_LEECH": MIXED_LEECH,
"NAME_SUBSTITUTE": NAME_SUBSTITUTE,
"OWNER_ID": OWNER_ID,
"QUEUE_ALL": QUEUE_ALL,

View File

@ -94,6 +94,7 @@ class TaskConfig:
self.isYtDlp = False
self.equalSplits = False
self.userTransmission = False
self.mixedLeech = False
self.extract = False
self.compress = False
self.select = False
@ -157,7 +158,8 @@ class TaskConfig:
async def beforeStart(self):
self.nameSub = (
self.nameSub
or self.userDict.get("name_sub", False) or config_dict["NAME_SUBSTITUTE"]
or self.userDict.get("name_sub", False)
or config_dict["NAME_SUBSTITUTE"]
if "name_sub" not in self.userDict
else ""
)
@ -257,14 +259,23 @@ class TaskConfig:
or self.userDict.get("leech_dest")
or config_dict["LEECH_DUMP_CHAT"]
)
self.mixedLeech = IS_PREMIUM_USER and (
self.userDict.get("mixed_leech")
or config_dict["MIXED_LEECH"]
and "mixed_leech" not in self.userDict
)
if self.upDest:
if not isinstance(self.upDest, int):
if self.upDest.startswith("b:"):
self.upDest = self.upDest.replace("b:", "", 1)
self.userTransmission = False
self.mixedLeech = False
elif self.upDest.startswith("u:"):
self.upDest = self.upDest.replace("u:", "", 1)
self.userTransmission = IS_PREMIUM_USER
elif self.upDest.startswith("m:"):
self.userTransmission = IS_PREMIUM_USER
self.mixedLeech = self.userTransmission
if self.upDest.isdigit() or self.upDest.startswith("-"):
self.upDest = int(self.upDest)
elif self.upDest.lower() == "pm":
@ -297,8 +308,9 @@ class TaskConfig:
)
except:
raise ValueError("Start the bot and try again!")
elif self.userTransmission and not self.isSuperChat:
elif (self.userTransmission or self.mixedLeech) and not self.isSuperChat:
self.userTransmission = False
self.mixedLeech = False
if self.splitSize:
if self.splitSize.isdigit():
self.splitSize = int(self.splitSize)
@ -907,9 +919,9 @@ class TaskConfig:
up_dir, name = dl_path.rsplit("/", 1)
for l in self.nameSub:
pattern = l[0]
res = l[1] if len(l) > 1 and l[1] else ""
res = l[1] if len(l) > 1 and l[1] else ""
sen = len(l) > 2 and l[2] == "s"
new_name = sub(fr"{pattern}", res, name, flags=I if sen else 0)
new_name = sub(rf"{pattern}", res, name, flags=I if sen else 0)
new_path = ospath.join(up_dir, new_name)
await move(dl_path, new_path)
return new_path
@ -919,7 +931,7 @@ class TaskConfig:
f_path = ospath.join(dirpath, file_)
for l in self.nameSub:
pattern = l[0]
res = l[1] if len(l) > 1 and l[1] else ""
res = l[1] if len(l) > 1 and l[1] else ""
sen = len(l) > 2 and l[2] == "s"
new_name = sub(rf"{pattern}", res, file_, flags=I if sen else 0)
await move(f_path, ospath.join(dirpath, new_name))

View File

@ -105,6 +105,7 @@ def arg_parser(items, arg_base):
"-fd",
"-fu",
"-sync",
"-ml"
}
t = len(items)
i = 0
@ -118,7 +119,7 @@ def arg_parser(items, arg_base):
if (
i + 1 == t
and part in bool_arg_set
or part in ["-s", "-j", "-f", "-fd", "-fu", "-sync"]
or part in ["-s", "-j", "-f", "-fd", "-fu", "-sync", "-ml"]
):
arg_base[part] = True
else:
@ -220,4 +221,4 @@ def new_thread(func):
future = run_coroutine_threadsafe(func(*args, **kwargs), bot_loop)
return future.result() if wait else future
return wrapper
return wrapper

View File

@ -52,7 +52,7 @@ If DEFAULT_UPLOAD is `rc` then you can pass up: `gd` to upload using gdrive tool
If DEFAULT_UPLOAD is `gd` then you can pass up: `rc` to upload to RCLONE_PATH.
If you want to add path or gdrive manually from your config/token (uploaded from usetting) add mrcc: for rclone and mtp: before the path/gdrive_id without space.
/cmd link -up mrcc:main:dump or -up mtp:gdrive_id or -up b:id/@username/pm(leech by bot) or -up u:id/@username(leech by user)
/cmd link -up mrcc:main:dump or -up mtp:gdrive_id or -up b:id/@username/pm(leech by bot) or -up u:id/@username(leech by user) or -up m:id/@username(mixed leech)
Incase you want to specify whether using token.pickle or service accounts you can add tp:gdrive_id or sa:gdrive_id or mtp:gdrive_id.
DEFAULT_UPLOAD doesn't effect on leech cmds.
@ -189,6 +189,9 @@ This will effect on all files. Formate: wordToReplace : wordToReplaceWith : sens
3. mP4 will get removed because I have added nothing to replace with
"""
mixed_leech = """Mixed leech: -ml
/cmd link -ml (leech by user and bot sessionwith respect to size)"""
YT_HELP_DICT = {
"main": yt,
"New-Name": f"{new_name}\nNote: Don't add file extension",
@ -207,6 +210,7 @@ YT_HELP_DICT = {
"Convert-Media": convert_media,
"Force-Start": force_start,
"Name-Substitute": name_sub,
"Mixed-Leech": mixed_leech,
}
MIRROR_HELP_DICT = {
@ -233,6 +237,7 @@ MIRROR_HELP_DICT = {
"Force-Start": force_start,
"User-Download": user_download,
"Name-Substitute": name_sub,
"Mixed-Leech": mixed_leech,
}
CLONE_HELP_DICT = {

View File

@ -64,9 +64,7 @@ async def getTaskByGid(gid: str):
def getSpecificTasks(status, userId):
if status == "All":
if userId:
return [
tk for tk in task_dict.values() if tk.listener.userId == userId
]
return [tk for tk in task_dict.values() if tk.listener.userId == userId]
else:
return list(task_dict.values())
elif userId:
@ -75,7 +73,8 @@ def getSpecificTasks(status, userId):
for tk in task_dict.values()
if tk.listener.userId == userId
and (
(st := tk.status()) and st == status
(st := tk.status())
and st == status
or status == MirrorStatus.STATUS_DOWNLOADING
and st not in STATUSES.values()
)
@ -84,7 +83,8 @@ def getSpecificTasks(status, userId):
return [
tk
for tk in task_dict.values()
if (st := tk.status()) and st == status
if (st := tk.status())
and st == status
or status == MirrorStatus.STATUS_DOWNLOADING
and st not in STATUSES.values()
]

View File

@ -116,7 +116,7 @@ async def _onDownloadComplete(tor):
async def _qb_listener():
client = await sync_to_async(get_qb_client)
while True:
async with qb_listener_lock:
async with qb_listener_lock:
try:
torrents = await sync_to_async(client.torrents_info)
if len(torrents) == 0:

View File

@ -23,7 +23,9 @@ from bot.helper.ext_utils.task_manager import (
stop_duplicate_check,
)
from bot.helper.listeners.jdownloader_listener import onDownloadStart
from bot.helper.mirror_leech_utils.status_utils.jdownloader_status import JDownloaderStatus
from bot.helper.mirror_leech_utils.status_utils.jdownloader_status import (
JDownloaderStatus,
)
from bot.helper.mirror_leech_utils.status_utils.queue_status import QueueStatus
from bot.helper.telegram_helper.button_build import ButtonMaker
from bot.helper.telegram_helper.message_utils import (
@ -133,15 +135,15 @@ async def add_jd_download(listener, path):
)
else:
await retry_function(
jdownloader.device.linkgrabber.add_links,
[
{
"autoExtract": False,
"links": listener.link,
"packageName": listener.name or None,
}
],
)
jdownloader.device.linkgrabber.add_links,
[
{
"autoExtract": False,
"links": listener.link,
"packageName": listener.name or None,
}
],
)
await sleep(0.5)
while await retry_function(jdownloader.device.linkgrabber.is_collecting):

View File

@ -436,7 +436,9 @@ class RcloneTransferHelper:
)
return None, destination
def _getUpdatedCommand(self, config_path, source, destination, method, unwanted_files=None):
def _getUpdatedCommand(
self, config_path, source, destination, method, unwanted_files=None
):
if unwanted_files is None:
unwanted_files = []
ext = "*.{" + ",".join(self._listener.extensionFilter) + "}"

View File

@ -54,10 +54,11 @@ class TgUploader:
self._lprefix = ""
self._media_group = False
self._is_private = False
self._user_session = self._listener.userTransmission
async def _upload_progress(self, current, _):
if self._listener.isCancelled:
if self._listener.userTransmission:
if self._user_session:
user.stop_transmission()
else:
self._listener.client.stop_transmission()
@ -87,7 +88,7 @@ class TgUploader:
else self._listener.message.text.lstrip("/")
)
try:
if self._listener.userTransmission:
if self._user_session:
self._sent_msg = await user.send_message(
chat_id=self._listener.upDest,
text=msg,
@ -105,7 +106,7 @@ class TgUploader:
except Exception as e:
await self._listener.onUploadError(str(e))
return False
elif self._listener.userTransmission:
elif self._user_session:
self._sent_msg = await user.get_messages(
chat_id=self._listener.message.chat.id, message_ids=self._listener.mid
)
@ -200,6 +201,15 @@ class TgUploader:
)[-1]
async def _send_media_group(self, subkey, key, msgs):
for index, msg in enumerate(msgs):
if self._listener.mixedLeech or not self.self._user_session:
msgs[index] = await self._listener.client.get_messages(
chat_id=msg[0], message_ids=msg[1]
)
else:
msgs[index] = await user.get_messages(
chat_id=msg[0], message_ids=msg[1]
)
msgs_list = await msgs[0].reply_to_message.reply_media_group(
media=self._get_input_media(subkey, key),
quote=True,
@ -240,6 +250,18 @@ class TgUploader:
continue
try:
f_size = await aiopath.getsize(self._up_path)
if self._listener.mixedLeech:
self._user_session = f_size > 2097152000
if self._user_session:
self._sent_msg = await user.get_messages(
chat_id=self._sent_msg.chat.id,
message_ids=self._sent_msg.id,
)
else:
self._sent_msg = await self._listener.client.get_messages(
chat_id=self._sent_msg.chat.id,
message_ids=self._sent_msg.id,
)
self._total_files += 1
if f_size == 0:
LOGGER.error(
@ -427,25 +449,31 @@ class TgUploader:
if match := re_match(r".+(?=\.0*\d+$)|.+(?=\.part\d+\..+$)", o_path):
pname = match.group(0)
if pname in self._media_dict[key].keys():
self._media_dict[key][pname].append(self._sent_msg)
self._media_dict[key][pname].append(
[self._sent_msg.chat.id, self._sent_msg.id]
)
else:
self._media_dict[key][pname] = [self._sent_msg]
self._media_dict[key][pname] = [
[self._sent_msg.chat.id, self._sent_msg.id]
]
msgs = self._media_dict[key][pname]
if len(msgs) == 10:
await self._send_media_group(pname, key, msgs)
else:
self._last_msg_in_group = True
if (self._thumb is None
if (
self._thumb is None
and thumb is not None
and await aiopath.exists(thumb)
):
await remove(thumb)
except FloodWait as f: # for later
except FloodWait as f:
LOGGER.warning(str(f))
await sleep(f.value)
except Exception as err:
if (self._thumb is None
if (
self._thumb is None
and thumb is not None
and await aiopath.exists(thumb)
):

View File

@ -61,7 +61,7 @@ async def sendFile(message, file, caption=None):
async def sendRss(text):
try:
app = user if user else bot
app = user or bot
return await app.send_message(
chat_id=config_dict["RSS_CHAT"],
text=text,

View File

@ -921,6 +921,9 @@ async def load_config():
NAME_SUBSTITUTE = environ.get("NAME_SUBSTITUTE", "")
NAME_SUBSTITUTE = "" if len(NAME_SUBSTITUTE) == 0 else NAME_SUBSTITUTE
MIXED_LEECH = environ.get("MIXED_LEECH", "")
MIXED_LEECH = MIXED_LEECH.lower() == "true" and IS_PREMIUM_USER
await (await create_subprocess_exec("pkill", "-9", "-f", "gunicorn")).wait()
BASE_URL = environ.get("BASE_URL", "").rstrip("/")
if len(BASE_URL) == 0:
@ -983,6 +986,7 @@ async def load_config():
"LEECH_FILENAME_PREFIX": LEECH_FILENAME_PREFIX,
"LEECH_SPLIT_SIZE": LEECH_SPLIT_SIZE,
"MEDIA_GROUP": MEDIA_GROUP,
"MIXED_LEECH": MIXED_LEECH,
"NAME_SUBSTITUTE": NAME_SUBSTITUTE,
"OWNER_ID": OWNER_ID,
"QUEUE_ALL": QUEUE_ALL,

View File

@ -25,7 +25,9 @@ async def argUsage(_, query):
elif data[2] == "y":
await editMessage(message, COMMAND_USAGE["yt"][0], COMMAND_USAGE["yt"][1])
elif data[2] == "c":
await editMessage(message, COMMAND_USAGE["clone"][0], COMMAND_USAGE["clone"][1])
await editMessage(
message, COMMAND_USAGE["clone"][0], COMMAND_USAGE["clone"][1]
)
elif data[1] == "mirror":
buttons = ButtonMaker()
buttons.ibutton("Back", "help back m")

View File

@ -85,6 +85,7 @@ class Mirror(TaskListener):
"-f": False,
"-fd": False,
"-fu": False,
"-mu": False,
"-i": 0,
"-sp": 0,
"link": "",
@ -122,6 +123,7 @@ class Mirror(TaskListener):
self.convertAudio = args["-ca"]
self.convertVideo = args["-cv"]
self.nameSub = args["-ns"]
self.mixedLeech = args["-ml"]
headers = args["-h"]
isBulk = args["-b"]

View File

@ -7,7 +7,6 @@ from functools import partial
from io import BytesIO
from pyrogram.filters import command, regex, create
from pyrogram.handlers import MessageHandler, CallbackQueryHandler
from re import split as re_split
from time import time
from bot import scheduler, rss_dict, LOGGER, DATABASE_URL, config_dict, bot

View File

@ -100,6 +100,16 @@ async def get_user_settings(from_user):
else:
leech_method = "bot"
if (
IS_PREMIUM_USER
and user_dict.get("mixed_leech", False)
or "mixed_leech" not in user_dict
and config_dict["MIXED_LEECH"]
):
mixed_leech = "Enabled"
else:
mixed_leech = "Disabled"
buttons.ibutton("Leech", f"userset {user_id} leech")
buttons.ibutton("Rclone", f"userset {user_id} rclone")
@ -172,6 +182,7 @@ Media Group is <b>{media_group}</b>
Leech Prefix is <code>{escape(lprefix)}</code>
Leech Destination is <code>{leech_dest}</code>
Leech by <b>{leech_method}</b> session
Mixed Leech is <b>{mixed_leech}</b>
Rclone Config <b>{rccmsg}</b>
Rclone Path is <code>{rccpath}</code>
Gdrive Token <b>{tokenmsg}</b>
@ -335,6 +346,7 @@ async def edit_user_settings(client, query):
"media_group",
"user_transmission",
"stop_duplicate",
"mixed_leech",
]:
update_user_ldata(user_id, data[2], data[3] == "true")
await query.answer()
@ -457,6 +469,25 @@ async def edit_user_settings(client, query):
)
else:
leech_method = "bot"
if (
IS_PREMIUM_USER
and user_dict.get("mixed_leech", False)
or "mixed_leech" not in user_dict
and config_dict["MIXED_LEECH"]
):
mixed_leech = "Enabled"
buttons.ibutton(
"Disable Mixed Leech", f"userset {user_id} mixed_leech false"
)
elif IS_PREMIUM_USER:
mixed_leech = "Disabled"
buttons.ibutton(
"Enable Mixed Leech", f"userset {user_id} mixed_leech true"
)
else:
mixed_leech = "Disabled"
buttons.ibutton("Back", f"userset {user_id} back")
buttons.ibutton("Close", f"userset {user_id} close")
text = f"""<u>Leech Settings for {name}</u>
@ -468,6 +499,7 @@ Media Group is <b>{media_group}</b>
Leech Prefix is <code>{escape(lprefix)}</code>
Leech Destination is <code>{leech_dest}</code>
Leech by <b>{leech_method}</b> session
Mixed Leech is <b>{mixed_leech}</b>
"""
await editMessage(message, text, buttons.build_menu(2))
elif data[2] == "rclone":

View File

@ -298,6 +298,7 @@ class YtDlp(TaskListener):
"-f": False,
"-fd": False,
"-fu": False,
"-mu": False,
"-i": 0,
"-sp": 0,
"link": "",
@ -335,6 +336,7 @@ class YtDlp(TaskListener):
self.convertAudio = args["-ca"]
self.convertVideo = args["-cv"]
self.nameSub = args["-ns"]
self.mixedLeech = args["-ml"]
isBulk = args["-b"]
folder_name = args["-m"]

View File

@ -46,6 +46,7 @@ AS_DOCUMENT = "False"
EQUAL_SPLITS = "False"
MEDIA_GROUP = "False"
USER_TRANSMISSION = "False"
MIXED_LEECH = "False"
LEECH_FILENAME_PREFIX = ""
LEECH_DUMP_CHAT = ""
# qBittorrent/Aria2c

View File

@ -13,8 +13,11 @@ from json import loads
from random import choice
from time import sleep
SCOPES = ['https://www.googleapis.com/auth/drive', 'https://www.googleapis.com/auth/cloud-platform',
'https://www.googleapis.com/auth/iam']
SCOPES = [
"https://www.googleapis.com/auth/drive",
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/iam",
]
project_create_ops = []
current_key_dump = []
sleep_time = 30
@ -24,15 +27,15 @@ sleep_time = 30
def _create_accounts(service, project, count):
batch = service.new_batch_http_request(callback=_def_batch_resp)
for _ in range(count):
aid = _generate_id('mfc-')
aid = _generate_id("mfc-")
batch.add(
service.projects()
.serviceAccounts()
.create(
name=f'projects/{project}',
name=f"projects/{project}",
body={
'accountId': aid,
'serviceAccount': {'displayName': aid},
"accountId": aid,
"serviceAccount": {"displayName": aid},
},
)
)
@ -41,7 +44,7 @@ def _create_accounts(service, project, count):
# Create accounts needed to fill project
def _create_remaining_accounts(iam, project):
print(f'Creating accounts in {project}')
print(f"Creating accounts in {project}")
sa_count = len(_list_sas(iam, project))
while sa_count != 100:
_create_accounts(iam, project, 100 - sa_count)
@ -49,20 +52,20 @@ def _create_remaining_accounts(iam, project):
# Generate a random id
def _generate_id(prefix='saf-'):
chars = '-abcdefghijklmnopqrstuvwxyz1234567890'
return prefix + ''.join(choice(chars) for _ in range(25)) + choice(chars[1:])
def _generate_id(prefix="saf-"):
chars = "-abcdefghijklmnopqrstuvwxyz1234567890"
return prefix + "".join(choice(chars) for _ in range(25)) + choice(chars[1:])
# List projects using service
def _get_projects(service):
return [i['projectId'] for i in service.projects().list().execute()['projects']]
return [i["projectId"] for i in service.projects().list().execute()["projects"]]
# Default batch callback handler
def _def_batch_resp(id, resp, exception):
if exception is not None:
if str(exception).startswith('<HttpError 429'):
if str(exception).startswith("<HttpError 429"):
sleep(sleep_time / 100)
else:
print(exception)
@ -86,13 +89,13 @@ def _create_projects(cloud, count):
for _ in range(count):
new_proj = _generate_id()
new_projs.append(new_proj)
batch.add(cloud.projects().create(body={'project_id': new_proj}))
batch.add(cloud.projects().create(body={"project_id": new_proj}))
batch.execute()
for i in project_create_ops:
while True:
resp = cloud.operations().get(name=i).execute()
if 'done' in resp and resp['done']:
if "done" in resp and resp["done"]:
break
sleep(3)
return new_projs
@ -103,7 +106,7 @@ def _enable_services(service, projects, ste):
batch = service.new_batch_http_request(callback=_def_batch_resp)
for i in projects:
for j in ste:
batch.add(service.services().enable(name=f'projects/{i}/services/{j}'))
batch.add(service.services().enable(name=f"projects/{i}/services/{j}"))
batch.execute()
@ -112,10 +115,10 @@ def _list_sas(iam, project):
resp = (
iam.projects()
.serviceAccounts()
.list(name=f'projects/{project}', pageSize=100)
.list(name=f"projects/{project}", pageSize=100)
.execute()
)
return resp['accounts'] if 'accounts' in resp else []
return resp["accounts"] if "accounts" in resp else []
# Create Keys Batch Handler
@ -127,10 +130,12 @@ def _batch_keys_resp(id, resp, exception):
elif current_key_dump is None:
sleep(sleep_time / 100)
else:
current_key_dump.append((
resp['name'][resp['name'].rfind('/'):],
b64decode(resp['privateKeyData']).decode('utf-8')
))
current_key_dump.append(
(
resp["name"][resp["name"].rfind("/") :],
b64decode(resp["privateKeyData"]).decode("utf-8"),
)
)
# Create Keys
@ -138,7 +143,7 @@ def _create_sa_keys(iam, projects, path):
global current_key_dump
for i in projects:
current_key_dump = []
print(f'Downloading keys from {i}')
print(f"Downloading keys from {i}")
while current_key_dump is None or len(current_key_dump) != 100:
batch = iam.new_batch_http_request(callback=_batch_keys_resp)
total_sas = _list_sas(iam, i)
@ -150,18 +155,18 @@ def _create_sa_keys(iam, projects, path):
.create(
name=f"projects/{i}/serviceAccounts/{j['uniqueId']}",
body={
'privateKeyType': 'TYPE_GOOGLE_CREDENTIALS_FILE',
'keyAlgorithm': 'KEY_ALG_RSA_2048',
"privateKeyType": "TYPE_GOOGLE_CREDENTIALS_FILE",
"keyAlgorithm": "KEY_ALG_RSA_2048",
},
)
)
batch.execute()
if current_key_dump is None:
print(f'Redownloading keys from {i}')
print(f"Redownloading keys from {i}")
current_key_dump = []
else:
for index, j in enumerate(current_key_dump):
with open(f'{path}/{index}.json', 'w+') as f:
with open(f"{path}/{index}.json", "w+") as f:
f.write(j[1])
@ -170,59 +175,61 @@ def _delete_sas(iam, project):
sas = _list_sas(iam, project)
batch = iam.new_batch_http_request(callback=_def_batch_resp)
for i in sas:
batch.add(iam.projects().serviceAccounts().delete(name=i['name']))
batch.add(iam.projects().serviceAccounts().delete(name=i["name"]))
batch.execute()
def serviceaccountfactory(
credentials='credentials.json',
token='token_sa.pickle',
path=None,
list_projects=False,
list_sas=None,
create_projects=None,
max_projects=12,
enable_services=None,
services=['iam', 'drive'],
create_sas=None,
delete_sas=None,
download_keys=None
credentials="credentials.json",
token="token_sa.pickle",
path=None,
list_projects=False,
list_sas=None,
create_projects=None,
max_projects=12,
enable_services=None,
services=["iam", "drive"],
create_sas=None,
delete_sas=None,
download_keys=None,
):
selected_projects = []
proj_id = loads(open(credentials, 'r').read())['installed']['project_id']
proj_id = loads(open(credentials, "r").read())["installed"]["project_id"]
creds = None
if os.path.exists(token):
with open(token, 'rb') as t:
with open(token, "rb") as t:
creds = pickle.load(t)
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
credentials, SCOPES)
flow = InstalledAppFlow.from_client_secrets_file(credentials, SCOPES)
creds = flow.run_local_server(port=0, open_browser=False)
with open(token, 'wb') as t:
with open(token, "wb") as t:
pickle.dump(creds, t)
cloud = build('cloudresourcemanager', 'v1', credentials=creds)
iam = build('iam', 'v1', credentials=creds)
serviceusage = build('serviceusage', 'v1', credentials=creds)
cloud = build("cloudresourcemanager", "v1", credentials=creds)
iam = build("iam", "v1", credentials=creds)
serviceusage = build("serviceusage", "v1", credentials=creds)
projs = None
while projs is None:
try:
projs = _get_projects(cloud)
except HttpError as e:
if loads(e.content.decode('utf-8'))['error']['status'] == 'PERMISSION_DENIED':
if (
loads(e.content.decode("utf-8"))["error"]["status"]
== "PERMISSION_DENIED"
):
try:
serviceusage.services().enable(
name=f'projects/{proj_id}/services/cloudresourcemanager.googleapis.com'
name=f"projects/{proj_id}/services/cloudresourcemanager.googleapis.com"
).execute()
except HttpError as e:
print(e._get_reason())
input('Press Enter to retry.')
input("Press Enter to retry.")
if list_projects:
return _get_projects(cloud)
if list_sas:
@ -232,34 +239,38 @@ def serviceaccountfactory(
if create_projects > 0:
current_count = len(_get_projects(cloud))
if current_count + create_projects <= max_projects:
print('Creating %d projects' % (create_projects))
print("Creating %d projects" % (create_projects))
nprjs = _create_projects(cloud, create_projects)
selected_projects = nprjs
else:
sys.exit('No, you cannot create %d new project (s).\n'
'Please reduce value of --quick-setup.\n'
'Remember that you can totally create %d projects (%d already).\n'
'Please do not delete existing projects unless you know what you are doing' % (
create_projects, max_projects, current_count))
sys.exit(
"No, you cannot create %d new project (s).\n"
"Please reduce value of --quick-setup.\n"
"Remember that you can totally create %d projects (%d already).\n"
"Please do not delete existing projects unless you know what you are doing"
% (create_projects, max_projects, current_count)
)
else:
print('Will overwrite all service accounts in existing projects.\n'
'So make sure you have some projects already.')
print(
"Will overwrite all service accounts in existing projects.\n"
"So make sure you have some projects already."
)
input("Press Enter to continue...")
if enable_services:
ste = [enable_services]
if enable_services == '~':
if enable_services == "~":
ste = selected_projects
elif enable_services == '*':
elif enable_services == "*":
ste = _get_projects(cloud)
services = [f'{i}.googleapis.com' for i in services]
print('Enabling services')
services = [f"{i}.googleapis.com" for i in services]
print("Enabling services")
_enable_services(serviceusage, ste, services)
if create_sas:
stc = [create_sas]
if create_sas == '~':
if create_sas == "~":
stc = selected_projects
elif create_sas == '*':
elif create_sas == "*":
stc = _get_projects(cloud)
for i in stc:
_create_remaining_accounts(iam, i)
@ -270,81 +281,119 @@ def serviceaccountfactory(
if e.errno != errno.EEXIST:
raise
std = [download_keys]
if download_keys == '~':
if download_keys == "~":
std = selected_projects
elif download_keys == '*':
elif download_keys == "*":
std = _get_projects(cloud)
_create_sa_keys(iam, std, path)
if delete_sas:
std = []
std.append(delete_sas)
if delete_sas == '~':
if delete_sas == "~":
std = selected_projects
elif delete_sas == '*':
elif delete_sas == "*":
std = _get_projects(cloud)
for i in std:
print(f'Deleting service accounts in {i}')
print(f"Deleting service accounts in {i}")
_delete_sas(iam, i)
if __name__ == '__main__':
parse = ArgumentParser(
description='A tool to create Google service accounts.')
parse.add_argument('--path', '-p', default='accounts',
help='Specify an alternate directory to output the credential files.')
parse.add_argument('--token', default='token_sa.pickle',
help='Specify the pickle token file path.')
parse.add_argument('--credentials', default='credentials.json',
help='Specify the credentials file path.')
parse.add_argument('--list-projects', default=False, action='store_true',
help='List projects viewable by the user.')
parse.add_argument('--list-sas', default=False,
help='List service accounts in a project.')
parse.add_argument('--create-projects', type=int,
default=None, help='Creates up to N projects.')
parse.add_argument('--max-projects', type=int, default=12,
help='Max amount of project allowed. Default: 12')
parse.add_argument('--enable-services', default=None,
help='Enables services on the project. Default: IAM and Drive')
parse.add_argument('--services', nargs='+', default=['iam', 'drive'],
help='Specify a different set of services to enable. Overrides the default.')
parse.add_argument('--create-sas', default=None,
help='Create service accounts in a project.')
parse.add_argument('--delete-sas', default=None,
help='Delete service accounts in a project.')
parse.add_argument('--download-keys', default=None,
help='Download keys for all the service accounts in a project.')
parse.add_argument('--quick-setup', default=None, type=int,
help='Create projects, enable services, create service accounts and download keys. ')
parse.add_argument('--new-only', default=False,
action='store_true', help='Do not use exisiting projects.')
if __name__ == "__main__":
parse = ArgumentParser(description="A tool to create Google service accounts.")
parse.add_argument(
"--path",
"-p",
default="accounts",
help="Specify an alternate directory to output the credential files.",
)
parse.add_argument(
"--token", default="token_sa.pickle", help="Specify the pickle token file path."
)
parse.add_argument(
"--credentials",
default="credentials.json",
help="Specify the credentials file path.",
)
parse.add_argument(
"--list-projects",
default=False,
action="store_true",
help="List projects viewable by the user.",
)
parse.add_argument(
"--list-sas", default=False, help="List service accounts in a project."
)
parse.add_argument(
"--create-projects", type=int, default=None, help="Creates up to N projects."
)
parse.add_argument(
"--max-projects",
type=int,
default=12,
help="Max amount of project allowed. Default: 12",
)
parse.add_argument(
"--enable-services",
default=None,
help="Enables services on the project. Default: IAM and Drive",
)
parse.add_argument(
"--services",
nargs="+",
default=["iam", "drive"],
help="Specify a different set of services to enable. Overrides the default.",
)
parse.add_argument(
"--create-sas", default=None, help="Create service accounts in a project."
)
parse.add_argument(
"--delete-sas", default=None, help="Delete service accounts in a project."
)
parse.add_argument(
"--download-keys",
default=None,
help="Download keys for all the service accounts in a project.",
)
parse.add_argument(
"--quick-setup",
default=None,
type=int,
help="Create projects, enable services, create service accounts and download keys. ",
)
parse.add_argument(
"--new-only",
default=False,
action="store_true",
help="Do not use exisiting projects.",
)
args = parse.parse_args()
# If credentials file is invalid, search for one.
if not os.path.exists(args.credentials):
options = glob('*.json')
print('No credentials found at %s. Please enable the Drive API in:\n'
'https://developers.google.com/drive/api/v3/quickstart/python\n'
'and save the json file as credentials.json' % args.credentials)
options = glob("*.json")
print(
"No credentials found at %s. Please enable the Drive API in:\n"
"https://developers.google.com/drive/api/v3/quickstart/python\n"
"and save the json file as credentials.json" % args.credentials
)
if not options:
exit(-1)
else:
print('Select a credentials file below.')
inp_options = [str(i) for i in list(
range(1, len(options) + 1))] + options
print("Select a credentials file below.")
inp_options = [str(i) for i in list(range(1, len(options) + 1))] + options
for i in range(len(options)):
print(' %d) %s' % (i + 1, options[i]))
print(" %d) %s" % (i + 1, options[i]))
inp = None
while True:
inp = input('> ')
inp = input("> ")
if inp in inp_options:
break
args.credentials = inp if inp in options else options[int(inp) - 1]
print(
f'Use --credentials {args.credentials} next time to use this credentials file.'
f"Use --credentials {args.credentials} next time to use this credentials file."
)
if args.quick_setup:
opt = '~' if args.new_only else '*'
args.services = ['iam', 'drive']
opt = "~" if args.new_only else "*"
args.services = ["iam", "drive"]
args.create_projects = args.quick_setup
args.enable_services = opt
args.create_sas = opt
@ -361,21 +410,20 @@ if __name__ == '__main__':
delete_sas=args.delete_sas,
enable_services=args.enable_services,
services=args.services,
download_keys=args.download_keys
download_keys=args.download_keys,
)
if resp is not None:
if args.list_projects:
if resp:
print('Projects (%d):' % len(resp))
print("Projects (%d):" % len(resp))
for i in resp:
print(f' {i}')
print(f" {i}")
else:
print('No projects.')
print("No projects.")
elif args.list_sas:
if resp:
print('Service accounts in %s (%d):' %
(args.list_sas, len(resp)))
print("Service accounts in %s (%d):" % (args.list_sas, len(resp)))
for i in resp:
print(f" {i['email']} ({i['uniqueId']})")
else:
print('No service accounts.')
print("No service accounts.")

View File

@ -10,10 +10,10 @@ if os.path.exists(__G_DRIVE_TOKEN_FILE):
with open(__G_DRIVE_TOKEN_FILE, "rb") as f:
credentials = pickle.load(f)
if (
(credentials is None or not credentials.valid)
and credentials
and credentials.expired
and credentials.refresh_token
(credentials is None or not credentials.valid)
and credentials
and credentials.expired
and credentials.refresh_token
):
credentials.refresh(Request())
else:

View File

@ -53,9 +53,9 @@ if DATABASE_URL is not None:
if old_config is not None:
del old_config["_id"]
if (
old_config is not None
and old_config == dict(dotenv_values("config.env"))
or old_config is None
old_config is not None
and old_config == dict(dotenv_values("config.env"))
or old_config is None
) and config_dict is not None:
environ["UPSTREAM_REPO"] = config_dict["UPSTREAM_REPO"]
environ["UPSTREAM_BRANCH"] = config_dict["UPSTREAM_BRANCH"]