mirror of
https://github.com/anasty17/mirror-leech-telegram-bot.git
synced 2025-01-08 12:07:33 +08:00
Improve jdownloader
- Improve queue - handle invalid directory in jdownloader incase file name is too long - Otherr minor fixes Signed-off-by: anasty17 <e.anastayyar@gmail.com>
This commit is contained in:
parent
862831a68a
commit
010fb337a7
@ -264,7 +264,7 @@ async def main():
|
|||||||
if config_dict["DATABASE_URL"]:
|
if config_dict["DATABASE_URL"]:
|
||||||
await database.db_load()
|
await database.db_load()
|
||||||
await gather(
|
await gather(
|
||||||
jdownloader.initiate(),
|
jdownloader.boot(),
|
||||||
sync_to_async(clean_all),
|
sync_to_async(clean_all),
|
||||||
bot_settings.initiate_search_tools(),
|
bot_settings.initiate_search_tools(),
|
||||||
restart_notification(),
|
restart_notification(),
|
||||||
|
@ -1053,6 +1053,9 @@ class TaskConfig:
|
|||||||
else:
|
else:
|
||||||
res = ""
|
res = ""
|
||||||
name = sub(rf"{pattern}", res, name, flags=I if sen else 0)
|
name = sub(rf"{pattern}", res, name, flags=I if sen else 0)
|
||||||
|
if len(name.encode()) > 255:
|
||||||
|
LOGGER.error(f"Substitute: {name} is too long")
|
||||||
|
return dl_path
|
||||||
new_path = ospath.join(up_dir, name)
|
new_path = ospath.join(up_dir, name)
|
||||||
await move(dl_path, new_path)
|
await move(dl_path, new_path)
|
||||||
return new_path
|
return new_path
|
||||||
@ -1074,5 +1077,8 @@ class TaskConfig:
|
|||||||
else:
|
else:
|
||||||
res = ""
|
res = ""
|
||||||
file_ = sub(rf"{pattern}", res, file_, flags=I if sen else 0)
|
file_ = sub(rf"{pattern}", res, file_, flags=I if sen else 0)
|
||||||
|
if len(file_.encode()) > 255:
|
||||||
|
LOGGER.error(f"Substitute: {file_} is too long")
|
||||||
|
continue
|
||||||
await move(f_path, ospath.join(dirpath, file_))
|
await move(f_path, ospath.join(dirpath, file_))
|
||||||
return dl_path
|
return dl_path
|
||||||
|
@ -171,14 +171,6 @@ def update_user_ldata(id_, key, value):
|
|||||||
user_data[id_][key] = value
|
user_data[id_][key] = value
|
||||||
|
|
||||||
|
|
||||||
async def retry_function(func, *args, **kwargs):
|
|
||||||
try:
|
|
||||||
return await func(*args, **kwargs)
|
|
||||||
except:
|
|
||||||
await sleep(0.2)
|
|
||||||
return await retry_function(func, *args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
async def cmd_exec(cmd, shell=False):
|
async def cmd_exec(cmd, shell=False):
|
||||||
if shell:
|
if shell:
|
||||||
proc = await create_subprocess_shell(cmd, stdout=PIPE, stderr=PIPE)
|
proc = await create_subprocess_shell(cmd, stdout=PIPE, stderr=PIPE)
|
||||||
|
@ -2,19 +2,11 @@ from aiofiles.os import path, makedirs, listdir, rename
|
|||||||
from aioshutil import rmtree
|
from aioshutil import rmtree
|
||||||
from json import dump
|
from json import dump
|
||||||
from random import randint
|
from random import randint
|
||||||
from asyncio import sleep, wait_for
|
|
||||||
from re import match
|
from re import match
|
||||||
|
|
||||||
from bot import config_dict, LOGGER, jd_lock, bot_name
|
from bot import config_dict, LOGGER, bot_name
|
||||||
from .bot_utils import cmd_exec, new_task, retry_function
|
from .bot_utils import cmd_exec, new_task
|
||||||
from myjd import MyJdApi
|
from myjd import MyJdApi
|
||||||
from myjd.exception import (
|
|
||||||
MYJDException,
|
|
||||||
MYJDAuthFailedException,
|
|
||||||
MYJDEmailForbiddenException,
|
|
||||||
MYJDEmailInvalidException,
|
|
||||||
MYJDErrorEmailNotConfirmedException,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class JDownloader(MyJdApi):
|
class JDownloader(MyJdApi):
|
||||||
@ -23,23 +15,16 @@ class JDownloader(MyJdApi):
|
|||||||
self._username = ""
|
self._username = ""
|
||||||
self._password = ""
|
self._password = ""
|
||||||
self._device_name = ""
|
self._device_name = ""
|
||||||
|
self.is_connected = False
|
||||||
self.error = "JDownloader Credentials not provided!"
|
self.error = "JDownloader Credentials not provided!"
|
||||||
self.device = None
|
|
||||||
self.set_app_key("mltb")
|
|
||||||
|
|
||||||
@new_task
|
|
||||||
async def initiate(self):
|
|
||||||
self.device = None
|
|
||||||
async with jd_lock:
|
|
||||||
is_connected = await self.jdconnect()
|
|
||||||
if is_connected:
|
|
||||||
await self.boot()
|
|
||||||
await self.connectToDevice()
|
|
||||||
|
|
||||||
@new_task
|
@new_task
|
||||||
async def boot(self):
|
async def boot(self):
|
||||||
await cmd_exec(["pkill", "-9", "-f", "java"])
|
await cmd_exec(["pkill", "-9", "-f", "java"])
|
||||||
self.device = None
|
if not config_dict["JD_EMAIL"] or not config_dict["JD_PASS"]:
|
||||||
|
self.is_connected = False
|
||||||
|
self.error = "JDownloader Credentials not provided!"
|
||||||
|
return
|
||||||
self.error = "Connecting... Try agin after couple of seconds"
|
self.error = "Connecting... Try agin after couple of seconds"
|
||||||
self._device_name = f"{randint(0, 1000)}@{bot_name}"
|
self._device_name = f"{randint(0, 1000)}@{bot_name}"
|
||||||
if await path.exists("/JDownloader/logs"):
|
if await path.exists("/JDownloader/logs"):
|
||||||
@ -56,6 +41,20 @@ class JDownloader(MyJdApi):
|
|||||||
"devicename": f"{self._device_name}",
|
"devicename": f"{self._device_name}",
|
||||||
"email": config_dict["JD_EMAIL"],
|
"email": config_dict["JD_EMAIL"],
|
||||||
}
|
}
|
||||||
|
remote_data = {
|
||||||
|
"localapiserverheaderaccesscontrollalloworigin": "",
|
||||||
|
"deprecatedapiport": 3128,
|
||||||
|
"localapiserverheaderxcontenttypeoptions": "nosniff",
|
||||||
|
"localapiserverheaderxframeoptions": "DENY",
|
||||||
|
"externinterfaceenabled": True,
|
||||||
|
"deprecatedapilocalhostonly": True,
|
||||||
|
"localapiserverheaderreferrerpolicy": "no-referrer",
|
||||||
|
"deprecatedapienabled": True,
|
||||||
|
"localapiserverheadercontentsecuritypolicy": "default-src 'self'",
|
||||||
|
"jdanywhereapienabled": True,
|
||||||
|
"externinterfacelocalhostonly": False,
|
||||||
|
"localapiserverheaderxxssprotection": "1; mode=block",
|
||||||
|
}
|
||||||
await makedirs("/JDownloader/cfg", exist_ok=True)
|
await makedirs("/JDownloader/cfg", exist_ok=True)
|
||||||
with open(
|
with open(
|
||||||
"/JDownloader/cfg/org.jdownloader.api.myjdownloader.MyJDownloaderSettings.json",
|
"/JDownloader/cfg/org.jdownloader.api.myjdownloader.MyJDownloaderSettings.json",
|
||||||
@ -63,6 +62,12 @@ class JDownloader(MyJdApi):
|
|||||||
) as sf:
|
) as sf:
|
||||||
sf.truncate(0)
|
sf.truncate(0)
|
||||||
dump(jdata, sf)
|
dump(jdata, sf)
|
||||||
|
with open(
|
||||||
|
"/JDownloader/cfg/org.jdownloader.api.RemoteAPIConfig.json",
|
||||||
|
"w",
|
||||||
|
) as rf:
|
||||||
|
rf.truncate(0)
|
||||||
|
dump(remote_data, rf)
|
||||||
if not await path.exists("/JDownloader/JDownloader.jar"):
|
if not await path.exists("/JDownloader/JDownloader.jar"):
|
||||||
pattern = r"JDownloader\.jar\.backup.\d$"
|
pattern = r"JDownloader\.jar\.backup.\d$"
|
||||||
for filename in await listdir("/JDownloader"):
|
for filename in await listdir("/JDownloader"):
|
||||||
@ -74,72 +79,11 @@ class JDownloader(MyJdApi):
|
|||||||
await rmtree("/JDownloader/update")
|
await rmtree("/JDownloader/update")
|
||||||
await rmtree("/JDownloader/tmp")
|
await rmtree("/JDownloader/tmp")
|
||||||
cmd = "java -Dsun.jnu.encoding=UTF-8 -Dfile.encoding=UTF-8 -Djava.awt.headless=true -jar /JDownloader/JDownloader.jar"
|
cmd = "java -Dsun.jnu.encoding=UTF-8 -Dfile.encoding=UTF-8 -Djava.awt.headless=true -jar /JDownloader/JDownloader.jar"
|
||||||
|
self.is_connected = True
|
||||||
_, __, code = await cmd_exec(cmd, shell=True)
|
_, __, code = await cmd_exec(cmd, shell=True)
|
||||||
|
self.is_connected = False
|
||||||
if code != -9:
|
if code != -9:
|
||||||
await self.boot()
|
await self.boot()
|
||||||
|
|
||||||
async def jdconnect(self):
|
|
||||||
if not config_dict["JD_EMAIL"] or not config_dict["JD_PASS"]:
|
|
||||||
return False
|
|
||||||
try:
|
|
||||||
await self.connect(config_dict["JD_EMAIL"], config_dict["JD_PASS"])
|
|
||||||
LOGGER.info("MYJDownloader is connected!")
|
|
||||||
return True
|
|
||||||
except (
|
|
||||||
MYJDAuthFailedException,
|
|
||||||
MYJDEmailForbiddenException,
|
|
||||||
MYJDEmailInvalidException,
|
|
||||||
MYJDErrorEmailNotConfirmedException,
|
|
||||||
) as err:
|
|
||||||
self.error = f"{err}".strip()
|
|
||||||
LOGGER.error(f"Failed to connect with jdownloader! ERROR: {self.error}")
|
|
||||||
self.device = None
|
|
||||||
return False
|
|
||||||
except MYJDException as e:
|
|
||||||
self.error = f"{e}".strip()
|
|
||||||
LOGGER.error(
|
|
||||||
f"Failed to connect with jdownloader! Retrying... ERROR: {self.error}"
|
|
||||||
)
|
|
||||||
return await self.jdconnect()
|
|
||||||
|
|
||||||
async def connectToDevice(self):
|
|
||||||
self.error = "Connecting to device..."
|
|
||||||
await sleep(0.5)
|
|
||||||
while True:
|
|
||||||
self.device = None
|
|
||||||
if not config_dict["JD_EMAIL"] or not config_dict["JD_PASS"]:
|
|
||||||
self.error = "JDownloader Credentials not provided!"
|
|
||||||
await cmd_exec(["pkill", "-9", "-f", "java"])
|
|
||||||
return False
|
|
||||||
try:
|
|
||||||
await self.update_devices()
|
|
||||||
if not (devices := self.list_devices()):
|
|
||||||
continue
|
|
||||||
for device in devices:
|
|
||||||
if self._device_name == device["name"]:
|
|
||||||
self.device = self.get_device(f"{self._device_name}")
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
continue
|
|
||||||
except:
|
|
||||||
continue
|
|
||||||
break
|
|
||||||
await self.device.enable_direct_connection()
|
|
||||||
self.error = ""
|
|
||||||
LOGGER.info("JDownloader Device have been Connected!")
|
|
||||||
return True
|
|
||||||
|
|
||||||
async def check_jdownloader_state(self):
|
|
||||||
try:
|
|
||||||
await wait_for(retry_function(self.device.jd.version), timeout=10)
|
|
||||||
except:
|
|
||||||
is_connected = await self.jdconnect()
|
|
||||||
if not is_connected:
|
|
||||||
raise MYJDException(self.error)
|
|
||||||
await self.boot()
|
|
||||||
isDeviceConnected = await self.connectToDevice()
|
|
||||||
if not isDeviceConnected:
|
|
||||||
raise MYJDException(self.error)
|
|
||||||
|
|
||||||
|
|
||||||
jdownloader = JDownloader()
|
jdownloader = JDownloader()
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
from asyncio import Event, sleep
|
from asyncio import Event
|
||||||
|
|
||||||
from bot import (
|
from bot import (
|
||||||
config_dict,
|
config_dict,
|
||||||
@ -95,13 +95,13 @@ async def check_running_tasks(listener, state="dl"):
|
|||||||
async def start_dl_from_queued(mid: int):
|
async def start_dl_from_queued(mid: int):
|
||||||
queued_dl[mid].set()
|
queued_dl[mid].set()
|
||||||
del queued_dl[mid]
|
del queued_dl[mid]
|
||||||
await sleep(0.7)
|
non_queued_dl.add(mid)
|
||||||
|
|
||||||
|
|
||||||
async def start_up_from_queued(mid: int):
|
async def start_up_from_queued(mid: int):
|
||||||
queued_up[mid].set()
|
queued_up[mid].set()
|
||||||
del queued_up[mid]
|
del queued_up[mid]
|
||||||
await sleep(0.7)
|
non_queued_up.add(mid)
|
||||||
|
|
||||||
|
|
||||||
async def start_from_queued():
|
async def start_from_queued():
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
from asyncio import sleep
|
from asyncio import sleep
|
||||||
|
|
||||||
from bot import intervals, jd_lock, jd_downloads
|
from bot import intervals, jd_lock, jd_downloads
|
||||||
from ..ext_utils.bot_utils import new_task, retry_function
|
from ..ext_utils.bot_utils import new_task
|
||||||
from ..ext_utils.jdownloader_booter import jdownloader
|
from ..ext_utils.jdownloader_booter import jdownloader
|
||||||
from ..ext_utils.status_utils import get_task_by_gid
|
from ..ext_utils.status_utils import get_task_by_gid
|
||||||
|
|
||||||
@ -10,9 +10,8 @@ from ..ext_utils.status_utils import get_task_by_gid
|
|||||||
async def remove_download(gid):
|
async def remove_download(gid):
|
||||||
if intervals["stopAll"]:
|
if intervals["stopAll"]:
|
||||||
return
|
return
|
||||||
await retry_function(
|
await jdownloader.device.downloads.remove_links(
|
||||||
jdownloader.device.downloads.remove_links,
|
package_ids=jd_downloads[gid]["ids"]
|
||||||
package_ids=jd_downloads[gid]["ids"],
|
|
||||||
)
|
)
|
||||||
if task := await get_task_by_gid(gid):
|
if task := await get_task_by_gid(gid):
|
||||||
await task.listener.on_download_error("Download removed manually!")
|
await task.listener.on_download_error("Download removed manually!")
|
||||||
@ -25,8 +24,7 @@ async def _on_download_complete(gid):
|
|||||||
if task := await get_task_by_gid(gid):
|
if task := await get_task_by_gid(gid):
|
||||||
if task.listener.select:
|
if task.listener.select:
|
||||||
async with jd_lock:
|
async with jd_lock:
|
||||||
await retry_function(
|
await jdownloader.device.downloads.cleanup(
|
||||||
jdownloader.device.downloads.cleanup,
|
|
||||||
"DELETE_DISABLED",
|
"DELETE_DISABLED",
|
||||||
"REMOVE_LINKS_AND_DELETE_FILES",
|
"REMOVE_LINKS_AND_DELETE_FILES",
|
||||||
"SELECTED",
|
"SELECTED",
|
||||||
@ -37,8 +35,7 @@ async def _on_download_complete(gid):
|
|||||||
return
|
return
|
||||||
async with jd_lock:
|
async with jd_lock:
|
||||||
if gid in jd_downloads:
|
if gid in jd_downloads:
|
||||||
await retry_function(
|
await jdownloader.device.downloads.remove_links(
|
||||||
jdownloader.device.downloads.remove_links,
|
|
||||||
package_ids=jd_downloads[gid]["ids"],
|
package_ids=jd_downloads[gid]["ids"],
|
||||||
)
|
)
|
||||||
del jd_downloads[gid]
|
del jd_downloads[gid]
|
||||||
@ -52,10 +49,6 @@ async def _jd_listener():
|
|||||||
if len(jd_downloads) == 0:
|
if len(jd_downloads) == 0:
|
||||||
intervals["jd"] = ""
|
intervals["jd"] = ""
|
||||||
break
|
break
|
||||||
try:
|
|
||||||
await jdownloader.check_jdownloader_state()
|
|
||||||
except:
|
|
||||||
continue
|
|
||||||
try:
|
try:
|
||||||
packages = await jdownloader.device.downloads.query_packages(
|
packages = await jdownloader.device.downloads.query_packages(
|
||||||
[{"finished": True, "saveTo": True}]
|
[{"finished": True, "saveTo": True}]
|
||||||
@ -64,8 +57,6 @@ async def _jd_listener():
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
all_packages = {pack["uuid"]: pack for pack in packages}
|
all_packages = {pack["uuid"]: pack for pack in packages}
|
||||||
if not all_packages:
|
|
||||||
continue
|
|
||||||
for d_gid, d_dict in list(jd_downloads.items()):
|
for d_gid, d_dict in list(jd_downloads.items()):
|
||||||
if d_dict["status"] == "down":
|
if d_dict["status"] == "down":
|
||||||
for index, pid in enumerate(d_dict["ids"]):
|
for index, pid in enumerate(d_dict["ids"]):
|
||||||
|
@ -228,8 +228,6 @@ class TaskListener(TaskConfig):
|
|||||||
await event.wait()
|
await event.wait()
|
||||||
if self.is_cancelled:
|
if self.is_cancelled:
|
||||||
return
|
return
|
||||||
async with queue_dict_lock:
|
|
||||||
non_queued_up.add(self.mid)
|
|
||||||
LOGGER.info(f"Start from Queued/Upload: {self.name}")
|
LOGGER.info(f"Start from Queued/Upload: {self.name}")
|
||||||
|
|
||||||
self.size = await get_path_size(up_dir)
|
self.size = await get_path_size(up_dir)
|
||||||
|
@ -8,8 +8,6 @@ from bot import (
|
|||||||
config_dict,
|
config_dict,
|
||||||
aria2_options,
|
aria2_options,
|
||||||
aria2c_global,
|
aria2c_global,
|
||||||
non_queued_dl,
|
|
||||||
queue_dict_lock,
|
|
||||||
)
|
)
|
||||||
from ...ext_utils.bot_utils import bt_selection_buttons, sync_to_async
|
from ...ext_utils.bot_utils import bt_selection_buttons, sync_to_async
|
||||||
from ...ext_utils.task_manager import check_running_tasks
|
from ...ext_utils.task_manager import check_running_tasks
|
||||||
@ -83,8 +81,6 @@ async def add_aria2c_download(listener, dpath, header, ratio, seed_time):
|
|||||||
await event.wait()
|
await event.wait()
|
||||||
if listener.is_cancelled:
|
if listener.is_cancelled:
|
||||||
return
|
return
|
||||||
async with queue_dict_lock:
|
|
||||||
non_queued_dl.add(listener.mid)
|
|
||||||
async with task_dict_lock:
|
async with task_dict_lock:
|
||||||
task = task_dict[listener.mid]
|
task = task_dict[listener.mid]
|
||||||
task.queued = False
|
task.queued = False
|
||||||
|
@ -6,8 +6,6 @@ from bot import (
|
|||||||
aria2c_global,
|
aria2c_global,
|
||||||
task_dict,
|
task_dict,
|
||||||
task_dict_lock,
|
task_dict_lock,
|
||||||
non_queued_dl,
|
|
||||||
queue_dict_lock,
|
|
||||||
)
|
)
|
||||||
from ...ext_utils.bot_utils import sync_to_async
|
from ...ext_utils.bot_utils import sync_to_async
|
||||||
from ...ext_utils.task_manager import check_running_tasks, stop_duplicate_check
|
from ...ext_utils.task_manager import check_running_tasks, stop_duplicate_check
|
||||||
@ -45,8 +43,6 @@ async def add_direct_download(listener, path):
|
|||||||
await event.wait()
|
await event.wait()
|
||||||
if listener.is_cancelled:
|
if listener.is_cancelled:
|
||||||
return
|
return
|
||||||
async with queue_dict_lock:
|
|
||||||
non_queued_dl.add(listener.mid)
|
|
||||||
|
|
||||||
a2c_opt = {**aria2_options}
|
a2c_opt = {**aria2_options}
|
||||||
[a2c_opt.pop(k) for k in aria2c_global if k in aria2_options]
|
[a2c_opt.pop(k) for k in aria2c_global if k in aria2_options]
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
from secrets import token_urlsafe
|
from secrets import token_urlsafe
|
||||||
|
|
||||||
from bot import task_dict, task_dict_lock, LOGGER, non_queued_dl, queue_dict_lock
|
from bot import task_dict, task_dict_lock, LOGGER
|
||||||
from ...ext_utils.bot_utils import sync_to_async
|
from ...ext_utils.bot_utils import sync_to_async
|
||||||
from ...ext_utils.task_manager import check_running_tasks, stop_duplicate_check
|
from ...ext_utils.task_manager import check_running_tasks, stop_duplicate_check
|
||||||
from ...mirror_leech_utils.gdrive_utils.count import GoogleDriveCount
|
from ...mirror_leech_utils.gdrive_utils.count import GoogleDriveCount
|
||||||
@ -38,8 +38,6 @@ async def add_gd_download(listener, path):
|
|||||||
await event.wait()
|
await event.wait()
|
||||||
if listener.is_cancelled:
|
if listener.is_cancelled:
|
||||||
return
|
return
|
||||||
async with queue_dict_lock:
|
|
||||||
non_queued_dl.add(listener.mid)
|
|
||||||
|
|
||||||
drive = GoogleDriveDownload(listener, path)
|
drive = GoogleDriveDownload(listener, path)
|
||||||
async with task_dict_lock:
|
async with task_dict_lock:
|
||||||
|
@ -13,12 +13,10 @@ from bot import (
|
|||||||
task_dict,
|
task_dict,
|
||||||
task_dict_lock,
|
task_dict_lock,
|
||||||
LOGGER,
|
LOGGER,
|
||||||
non_queued_dl,
|
|
||||||
queue_dict_lock,
|
|
||||||
jd_lock,
|
jd_lock,
|
||||||
jd_downloads,
|
jd_downloads,
|
||||||
)
|
)
|
||||||
from ...ext_utils.bot_utils import retry_function, new_task
|
from ...ext_utils.bot_utils import new_task
|
||||||
from ...ext_utils.jdownloader_booter import jdownloader
|
from ...ext_utils.jdownloader_booter import jdownloader
|
||||||
from ...ext_utils.task_manager import check_running_tasks, stop_duplicate_check
|
from ...ext_utils.task_manager import check_running_tasks, stop_duplicate_check
|
||||||
from ...listeners.jdownloader_listener import on_download_start
|
from ...listeners.jdownloader_listener import on_download_start
|
||||||
@ -86,14 +84,13 @@ class JDownloaderHelper:
|
|||||||
|
|
||||||
async def get_online_packages(path, state="grabbing"):
|
async def get_online_packages(path, state="grabbing"):
|
||||||
if state == "grabbing":
|
if state == "grabbing":
|
||||||
queued_downloads = await retry_function(
|
queued_downloads = await jdownloader.device.linkgrabber.query_packages(
|
||||||
jdownloader.device.linkgrabber.query_packages, [{"saveTo": True}]
|
[{"saveTo": True}]
|
||||||
)
|
)
|
||||||
return [qd["uuid"] for qd in queued_downloads if qd["saveTo"].startswith(path)]
|
return [qd["uuid"] for qd in queued_downloads if qd["saveTo"].startswith(path)]
|
||||||
else:
|
else:
|
||||||
download_packages = await retry_function(
|
download_packages = await jdownloader.device.downloads.query_packages(
|
||||||
jdownloader.device.downloads.query_packages,
|
[{"saveTo": True}]
|
||||||
[{"saveTo": True}],
|
|
||||||
)
|
)
|
||||||
return [dl["uuid"] for dl in download_packages if dl["saveTo"].startswith(path)]
|
return [dl["uuid"] for dl in download_packages if dl["saveTo"].startswith(path)]
|
||||||
|
|
||||||
@ -109,35 +106,35 @@ def trim_path(path):
|
|||||||
return "/".join(trimmed_components)
|
return "/".join(trimmed_components)
|
||||||
|
|
||||||
|
|
||||||
|
async def get_jd_download_directory():
|
||||||
|
res = await jdownloader.device.config.get(
|
||||||
|
"org.jdownloader.settings.GeneralSettings", None, "DefaultDownloadFolder"
|
||||||
|
)
|
||||||
|
return f'/{res.strip("/")}/'
|
||||||
|
|
||||||
|
|
||||||
async def add_jd_download(listener, path):
|
async def add_jd_download(listener, path):
|
||||||
try:
|
try:
|
||||||
async with jd_lock:
|
async with jd_lock:
|
||||||
if jdownloader.device is None:
|
if not jdownloader.is_connected:
|
||||||
raise MYJDException(jdownloader.error)
|
raise MYJDException(jdownloader.error)
|
||||||
|
|
||||||
await jdownloader.check_jdownloader_state()
|
default_path = await get_jd_download_directory()
|
||||||
|
|
||||||
if not jd_downloads:
|
if not jd_downloads:
|
||||||
await retry_function(jdownloader.device.linkgrabber.clear_list)
|
await jdownloader.device.linkgrabber.clear_list()
|
||||||
if odl := await retry_function(
|
if odl := await jdownloader.device.downloads.query_packages([{}]):
|
||||||
jdownloader.device.downloads.query_packages, [{}]
|
|
||||||
):
|
|
||||||
odl_list = [od["uuid"] for od in odl]
|
odl_list = [od["uuid"] for od in odl]
|
||||||
await retry_function(
|
await jdownloader.device.downloads.remove_links(
|
||||||
jdownloader.device.downloads.remove_links,
|
package_ids=odl_list
|
||||||
package_ids=odl_list,
|
|
||||||
)
|
)
|
||||||
elif odl := await retry_function(
|
elif odl := await jdownloader.device.linkgrabber.query_packages([{}]):
|
||||||
jdownloader.device.linkgrabber.query_packages, [{}]
|
|
||||||
):
|
|
||||||
if odl_list := [
|
if odl_list := [
|
||||||
od["uuid"]
|
od["uuid"]
|
||||||
for od in odl
|
for od in odl
|
||||||
if od.get("saveTo", "").startswith("/root/Downloads/")
|
if od.get("saveTo", "").startswith(default_path)
|
||||||
]:
|
]:
|
||||||
await retry_function(
|
await jdownloader.device.linkgrabber.remove_links(
|
||||||
jdownloader.device.linkgrabber.remove_links,
|
package_ids=odl_list
|
||||||
package_ids=odl_list,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
gid = token_urlsafe(12)
|
gid = token_urlsafe(12)
|
||||||
@ -147,14 +144,11 @@ async def add_jd_download(listener, path):
|
|||||||
async with aiopen(listener.link, "rb") as dlc:
|
async with aiopen(listener.link, "rb") as dlc:
|
||||||
content = await dlc.read()
|
content = await dlc.read()
|
||||||
content = b64encode(content)
|
content = b64encode(content)
|
||||||
await retry_function(
|
await jdownloader.device.linkgrabber.add_container(
|
||||||
jdownloader.device.linkgrabber.add_container,
|
"DLC", f";base64,{content.decode()}"
|
||||||
"DLC",
|
|
||||||
f";base64,{content.decode()}",
|
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
await retry_function(
|
await jdownloader.device.linkgrabber.add_links(
|
||||||
jdownloader.device.linkgrabber.add_links,
|
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
"autoExtract": False,
|
"autoExtract": False,
|
||||||
@ -165,7 +159,7 @@ async def add_jd_download(listener, path):
|
|||||||
)
|
)
|
||||||
|
|
||||||
await sleep(1)
|
await sleep(1)
|
||||||
while await retry_function(jdownloader.device.linkgrabber.is_collecting):
|
while await jdownloader.device.linkgrabber.is_collecting():
|
||||||
pass
|
pass
|
||||||
start_time = time()
|
start_time = time()
|
||||||
online_packages = []
|
online_packages = []
|
||||||
@ -174,8 +168,7 @@ async def add_jd_download(listener, path):
|
|||||||
name = ""
|
name = ""
|
||||||
error = ""
|
error = ""
|
||||||
while (time() - start_time) < 60:
|
while (time() - start_time) < 60:
|
||||||
queued_downloads = await retry_function(
|
queued_downloads = await jdownloader.device.linkgrabber.query_packages(
|
||||||
jdownloader.device.linkgrabber.query_packages,
|
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
"bytesTotal": True,
|
"bytesTotal": True,
|
||||||
@ -189,8 +182,7 @@ async def add_jd_download(listener, path):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if not online_packages and corrupted_packages and error:
|
if not online_packages and corrupted_packages and error:
|
||||||
await retry_function(
|
await jdownloader.device.linkgrabber.remove_links(
|
||||||
jdownloader.device.linkgrabber.remove_links,
|
|
||||||
package_ids=corrupted_packages,
|
package_ids=corrupted_packages,
|
||||||
)
|
)
|
||||||
raise MYJDException(error)
|
raise MYJDException(error)
|
||||||
@ -203,10 +195,8 @@ async def add_jd_download(listener, path):
|
|||||||
continue
|
continue
|
||||||
save_to = pack["saveTo"]
|
save_to = pack["saveTo"]
|
||||||
if not name:
|
if not name:
|
||||||
if save_to.startswith("/root/Downloads/"):
|
if save_to.startswith(default_path):
|
||||||
name = save_to.replace("/root/Downloads/", "", 1).split(
|
name = save_to.replace(default_path, "", 1).split("/", 1)[0]
|
||||||
"/", 1
|
|
||||||
)[0]
|
|
||||||
else:
|
else:
|
||||||
name = save_to.replace(f"{path}/", "", 1).split("/", 1)[0]
|
name = save_to.replace(f"{path}/", "", 1).split("/", 1)[0]
|
||||||
name = name[:255]
|
name = name[:255]
|
||||||
@ -220,19 +210,17 @@ async def add_jd_download(listener, path):
|
|||||||
|
|
||||||
listener.size += pack.get("bytesTotal", 0)
|
listener.size += pack.get("bytesTotal", 0)
|
||||||
online_packages.append(pack["uuid"])
|
online_packages.append(pack["uuid"])
|
||||||
if save_to.startswith("/root/Downloads/"):
|
if save_to.startswith(default_path):
|
||||||
save_to = trim_path(save_to)
|
save_to = trim_path(save_to)
|
||||||
await retry_function(
|
await jdownloader.device.linkgrabber.set_download_directory(
|
||||||
jdownloader.device.linkgrabber.set_download_directory,
|
save_to.replace(default_path, f"{path}/", 1),
|
||||||
save_to.replace("/root/Downloads", path, 1),
|
|
||||||
[pack["uuid"]],
|
[pack["uuid"]],
|
||||||
)
|
)
|
||||||
|
|
||||||
if online_packages:
|
if online_packages:
|
||||||
if listener.join and len(online_packages) > 1:
|
if listener.join and len(online_packages) > 1:
|
||||||
listener.name = listener.folder_name
|
listener.name = listener.folder_name
|
||||||
await retry_function(
|
await jdownloader.device.linkgrabber.move_to_new_package(
|
||||||
jdownloader.device.linkgrabber.move_to_new_package,
|
|
||||||
listener.name,
|
listener.name,
|
||||||
f"{path}/{listener.name}",
|
f"{path}/{listener.name}",
|
||||||
package_ids=online_packages,
|
package_ids=online_packages,
|
||||||
@ -246,8 +234,7 @@ async def add_jd_download(listener, path):
|
|||||||
)
|
)
|
||||||
if corrupted_packages or online_packages:
|
if corrupted_packages or online_packages:
|
||||||
packages_to_remove = corrupted_packages + online_packages
|
packages_to_remove = corrupted_packages + online_packages
|
||||||
await retry_function(
|
await jdownloader.device.linkgrabber.remove_links(
|
||||||
jdownloader.device.linkgrabber.remove_links,
|
|
||||||
package_ids=packages_to_remove,
|
package_ids=packages_to_remove,
|
||||||
)
|
)
|
||||||
raise MYJDException(error)
|
raise MYJDException(error)
|
||||||
@ -256,8 +243,7 @@ async def add_jd_download(listener, path):
|
|||||||
|
|
||||||
corrupted_links = []
|
corrupted_links = []
|
||||||
if remove_unknown:
|
if remove_unknown:
|
||||||
links = await retry_function(
|
links = await jdownloader.device.linkgrabber.query_links(
|
||||||
jdownloader.device.linkgrabber.query_links,
|
|
||||||
[{"packageUUIDs": online_packages, "availability": True}],
|
[{"packageUUIDs": online_packages, "availability": True}],
|
||||||
)
|
)
|
||||||
corrupted_links = [
|
corrupted_links = [
|
||||||
@ -266,8 +252,7 @@ async def add_jd_download(listener, path):
|
|||||||
if link["availability"].lower() != "online"
|
if link["availability"].lower() != "online"
|
||||||
]
|
]
|
||||||
if corrupted_packages or corrupted_links:
|
if corrupted_packages or corrupted_links:
|
||||||
await retry_function(
|
await jdownloader.device.linkgrabber.remove_links(
|
||||||
jdownloader.device.linkgrabber.remove_links,
|
|
||||||
corrupted_links,
|
corrupted_links,
|
||||||
corrupted_packages,
|
corrupted_packages,
|
||||||
)
|
)
|
||||||
@ -276,8 +261,8 @@ async def add_jd_download(listener, path):
|
|||||||
|
|
||||||
msg, button = await stop_duplicate_check(listener)
|
msg, button = await stop_duplicate_check(listener)
|
||||||
if msg:
|
if msg:
|
||||||
await retry_function(
|
await jdownloader.device.linkgrabber.remove_links(
|
||||||
jdownloader.device.linkgrabber.remove_links, package_ids=online_packages
|
package_ids=online_packages
|
||||||
)
|
)
|
||||||
await listener.on_download_error(msg, button)
|
await listener.on_download_error(msg, button)
|
||||||
async with jd_lock:
|
async with jd_lock:
|
||||||
@ -286,9 +271,8 @@ async def add_jd_download(listener, path):
|
|||||||
|
|
||||||
if listener.select:
|
if listener.select:
|
||||||
if not await JDownloaderHelper(listener).wait_for_configurations():
|
if not await JDownloaderHelper(listener).wait_for_configurations():
|
||||||
await retry_function(
|
await jdownloader.device.linkgrabber.remove_links(
|
||||||
jdownloader.device.linkgrabber.remove_links,
|
package_ids=online_packages
|
||||||
package_ids=online_packages,
|
|
||||||
)
|
)
|
||||||
await listener.remove_from_same_dir()
|
await listener.remove_from_same_dir()
|
||||||
async with jd_lock:
|
async with jd_lock:
|
||||||
@ -297,7 +281,9 @@ async def add_jd_download(listener, path):
|
|||||||
else:
|
else:
|
||||||
online_packages = await get_online_packages(path)
|
online_packages = await get_online_packages(path)
|
||||||
if not online_packages:
|
if not online_packages:
|
||||||
raise MYJDException("This Download have been removed manually!")
|
raise MYJDException(
|
||||||
|
"Select: This Download have been removed manually!"
|
||||||
|
)
|
||||||
async with jd_lock:
|
async with jd_lock:
|
||||||
jd_downloads[gid]["ids"] = online_packages
|
jd_downloads[gid]["ids"] = online_packages
|
||||||
|
|
||||||
@ -312,45 +298,41 @@ async def add_jd_download(listener, path):
|
|||||||
await event.wait()
|
await event.wait()
|
||||||
if listener.is_cancelled:
|
if listener.is_cancelled:
|
||||||
return
|
return
|
||||||
async with queue_dict_lock:
|
|
||||||
non_queued_dl.add(listener.mid)
|
|
||||||
|
|
||||||
await jdownloader.check_jdownloader_state()
|
|
||||||
online_packages = await get_online_packages(path)
|
online_packages = await get_online_packages(path)
|
||||||
if not online_packages:
|
if not online_packages:
|
||||||
raise MYJDException("This Download have been removed manually!")
|
raise MYJDException("Queue: This Download have been removed manually!")
|
||||||
async with jd_lock:
|
async with jd_lock:
|
||||||
jd_downloads[gid]["ids"] = online_packages
|
jd_downloads[gid]["ids"] = online_packages
|
||||||
|
|
||||||
await retry_function(
|
await jdownloader.device.linkgrabber.move_to_downloadlist(
|
||||||
jdownloader.device.linkgrabber.move_to_downloadlist,
|
package_ids=online_packages
|
||||||
package_ids=online_packages,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
await sleep(1)
|
await sleep(0.5)
|
||||||
|
|
||||||
online_packages = await get_online_packages(path, "down")
|
online_packages = await get_online_packages(path, "down")
|
||||||
if not online_packages:
|
if not online_packages:
|
||||||
online_packages = await get_online_packages(path)
|
online_packages = await get_online_packages(path)
|
||||||
if not online_packages:
|
if not online_packages:
|
||||||
raise MYJDException("This Download have been removed manually!")
|
raise MYJDException(
|
||||||
await retry_function(
|
"Linkgrabber: This Download have been removed manually!"
|
||||||
jdownloader.device.linkgrabber.move_to_downloadlist,
|
|
||||||
package_ids=online_packages,
|
|
||||||
)
|
)
|
||||||
await sleep(1)
|
await jdownloader.device.linkgrabber.move_to_downloadlist(
|
||||||
|
package_ids=online_packages
|
||||||
|
)
|
||||||
|
await sleep(0.5)
|
||||||
online_packages = await get_online_packages(path, "down")
|
online_packages = await get_online_packages(path, "down")
|
||||||
if not online_packages:
|
if not online_packages:
|
||||||
raise MYJDException("This Download have been removed manually!")
|
raise MYJDException(
|
||||||
|
"Download List: This Download have been removed manually!"
|
||||||
|
)
|
||||||
|
|
||||||
async with jd_lock:
|
async with jd_lock:
|
||||||
jd_downloads[gid]["status"] = "down"
|
jd_downloads[gid]["status"] = "down"
|
||||||
jd_downloads[gid]["ids"] = online_packages
|
jd_downloads[gid]["ids"] = online_packages
|
||||||
|
|
||||||
await retry_function(
|
await jdownloader.device.downloads.force_download(package_ids=online_packages)
|
||||||
jdownloader.device.downloads.force_download,
|
|
||||||
package_ids=online_packages,
|
|
||||||
)
|
|
||||||
|
|
||||||
async with task_dict_lock:
|
async with task_dict_lock:
|
||||||
task_dict[listener.mid] = JDownloaderStatus(listener, gid)
|
task_dict[listener.mid] = JDownloaderStatus(listener, gid)
|
||||||
@ -371,3 +353,30 @@ async def add_jd_download(listener, path):
|
|||||||
finally:
|
finally:
|
||||||
if await aiopath.exists(listener.link):
|
if await aiopath.exists(listener.link):
|
||||||
await remove(listener.link)
|
await remove(listener.link)
|
||||||
|
|
||||||
|
await sleep(2)
|
||||||
|
|
||||||
|
links = await jdownloader.device.downloads.query_links(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"packageUUIDs": online_packages,
|
||||||
|
"status": True,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
)
|
||||||
|
links_to_remove = []
|
||||||
|
force_download = False
|
||||||
|
for dlink in links:
|
||||||
|
if dlink["status"] == "Invalid download directory":
|
||||||
|
force_download = True
|
||||||
|
new_name, ext = dlink["name"].rsplit(".", 1)
|
||||||
|
new_name = new_name[: 250 - len(f".{ext}".encode())]
|
||||||
|
new_name = f"{new_name}.{ext}"
|
||||||
|
await jdownloader.device.downloads.rename_link(dlink["uuid"], new_name)
|
||||||
|
elif dlink["status"] == "HLS stream broken?":
|
||||||
|
links_to_remove.append(dlink["uuid"])
|
||||||
|
|
||||||
|
if links_to_remove:
|
||||||
|
await jdownloader.device.downloads.remove_links(links_to_remove)
|
||||||
|
if force_download:
|
||||||
|
await jdownloader.device.downloads.force_download(package_ids=online_packages)
|
||||||
|
@ -8,8 +8,6 @@ from bot import (
|
|||||||
sabnzbd_client,
|
sabnzbd_client,
|
||||||
LOGGER,
|
LOGGER,
|
||||||
config_dict,
|
config_dict,
|
||||||
non_queued_dl,
|
|
||||||
queue_dict_lock,
|
|
||||||
)
|
)
|
||||||
from ...ext_utils.bot_utils import bt_selection_buttons
|
from ...ext_utils.bot_utils import bt_selection_buttons
|
||||||
from ...ext_utils.task_manager import check_running_tasks
|
from ...ext_utils.task_manager import check_running_tasks
|
||||||
@ -153,8 +151,6 @@ async def add_nzb(listener, path):
|
|||||||
await event.wait()
|
await event.wait()
|
||||||
if listener.is_cancelled:
|
if listener.is_cancelled:
|
||||||
return
|
return
|
||||||
async with queue_dict_lock:
|
|
||||||
non_queued_dl.add(listener.mid)
|
|
||||||
async with task_dict_lock:
|
async with task_dict_lock:
|
||||||
task_dict[listener.mid].queued = False
|
task_dict[listener.mid].queued = False
|
||||||
|
|
||||||
|
@ -8,8 +8,6 @@ from bot import (
|
|||||||
qbittorrent_client,
|
qbittorrent_client,
|
||||||
LOGGER,
|
LOGGER,
|
||||||
config_dict,
|
config_dict,
|
||||||
non_queued_dl,
|
|
||||||
queue_dict_lock,
|
|
||||||
)
|
)
|
||||||
from ...ext_utils.bot_utils import bt_selection_buttons, sync_to_async
|
from ...ext_utils.bot_utils import bt_selection_buttons, sync_to_async
|
||||||
from ...ext_utils.task_manager import check_running_tasks
|
from ...ext_utils.task_manager import check_running_tasks
|
||||||
@ -65,6 +63,8 @@ async def add_qb_torrent(listener, path, ratio, seed_time):
|
|||||||
start_time = time()
|
start_time = time()
|
||||||
if len(tor_info) == 0:
|
if len(tor_info) == 0:
|
||||||
while (time() - start_time) <= 60:
|
while (time() - start_time) <= 60:
|
||||||
|
if add_to_queue and event.is_set():
|
||||||
|
add_to_queue = False
|
||||||
tor_info = await sync_to_async(
|
tor_info = await sync_to_async(
|
||||||
qbittorrent_client.torrents_info, tag=f"{listener.mid}"
|
qbittorrent_client.torrents_info, tag=f"{listener.mid}"
|
||||||
)
|
)
|
||||||
@ -130,21 +130,20 @@ async def add_qb_torrent(listener, path, ratio, seed_time):
|
|||||||
elif listener.multi <= 1:
|
elif listener.multi <= 1:
|
||||||
await send_status_message(listener.message)
|
await send_status_message(listener.message)
|
||||||
|
|
||||||
if add_to_queue:
|
if event is not None:
|
||||||
|
if not event.is_set():
|
||||||
await event.wait()
|
await event.wait()
|
||||||
if listener.is_cancelled:
|
if listener.is_cancelled:
|
||||||
return
|
return
|
||||||
async with queue_dict_lock:
|
|
||||||
non_queued_dl.add(listener.mid)
|
|
||||||
async with task_dict_lock:
|
async with task_dict_lock:
|
||||||
task_dict[listener.mid].queued = False
|
task_dict[listener.mid].queued = False
|
||||||
|
|
||||||
await sync_to_async(
|
|
||||||
qbittorrent_client.torrents_resume, torrent_hashes=ext_hash
|
|
||||||
)
|
|
||||||
LOGGER.info(
|
LOGGER.info(
|
||||||
f"Start Queued Download from Qbittorrent: {tor_info.name} - Hash: {ext_hash}"
|
f"Start Queued Download from Qbittorrent: {tor_info.name} - Hash: {ext_hash}"
|
||||||
)
|
)
|
||||||
|
await sync_to_async(
|
||||||
|
qbittorrent_client.torrents_resume, torrent_hashes=ext_hash
|
||||||
|
)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
await listener.on_download_error(f"{e}")
|
await listener.on_download_error(f"{e}")
|
||||||
finally:
|
finally:
|
||||||
|
@ -3,7 +3,7 @@ from json import loads
|
|||||||
from secrets import token_urlsafe
|
from secrets import token_urlsafe
|
||||||
from aiofiles.os import remove
|
from aiofiles.os import remove
|
||||||
|
|
||||||
from bot import task_dict, task_dict_lock, queue_dict_lock, non_queued_dl, LOGGER
|
from bot import task_dict, task_dict_lock, LOGGER
|
||||||
from ...ext_utils.bot_utils import cmd_exec
|
from ...ext_utils.bot_utils import cmd_exec
|
||||||
from ...ext_utils.task_manager import check_running_tasks, stop_duplicate_check
|
from ...ext_utils.task_manager import check_running_tasks, stop_duplicate_check
|
||||||
from ...mirror_leech_utils.rclone_utils.transfer import RcloneTransferHelper
|
from ...mirror_leech_utils.rclone_utils.transfer import RcloneTransferHelper
|
||||||
@ -115,8 +115,6 @@ async def add_rclone_download(listener, path):
|
|||||||
await event.wait()
|
await event.wait()
|
||||||
if listener.is_cancelled:
|
if listener.is_cancelled:
|
||||||
return
|
return
|
||||||
async with queue_dict_lock:
|
|
||||||
non_queued_dl.add(listener.mid)
|
|
||||||
|
|
||||||
RCTransfer = RcloneTransferHelper(listener)
|
RCTransfer = RcloneTransferHelper(listener)
|
||||||
async with task_dict_lock:
|
async with task_dict_lock:
|
||||||
|
@ -6,8 +6,6 @@ from bot import (
|
|||||||
LOGGER,
|
LOGGER,
|
||||||
task_dict,
|
task_dict,
|
||||||
task_dict_lock,
|
task_dict_lock,
|
||||||
non_queued_dl,
|
|
||||||
queue_dict_lock,
|
|
||||||
bot,
|
bot,
|
||||||
user,
|
user,
|
||||||
)
|
)
|
||||||
@ -151,8 +149,6 @@ class TelegramDownloadHelper:
|
|||||||
await event.wait()
|
await event.wait()
|
||||||
if self._listener.is_cancelled:
|
if self._listener.is_cancelled:
|
||||||
return
|
return
|
||||||
async with queue_dict_lock:
|
|
||||||
non_queued_dl.add(self._listener.mid)
|
|
||||||
|
|
||||||
await self._on_download_start(gid, add_to_queue)
|
await self._on_download_start(gid, add_to_queue)
|
||||||
await self._download(message, path)
|
await self._download(message, path)
|
||||||
|
@ -4,7 +4,7 @@ from re import search as re_search
|
|||||||
from secrets import token_urlsafe
|
from secrets import token_urlsafe
|
||||||
from yt_dlp import YoutubeDL, DownloadError
|
from yt_dlp import YoutubeDL, DownloadError
|
||||||
|
|
||||||
from bot import task_dict_lock, task_dict, non_queued_dl, queue_dict_lock
|
from bot import task_dict_lock, task_dict
|
||||||
from ...ext_utils.bot_utils import sync_to_async, async_to_sync
|
from ...ext_utils.bot_utils import sync_to_async, async_to_sync
|
||||||
from ...ext_utils.task_manager import check_running_tasks, stop_duplicate_check
|
from ...ext_utils.task_manager import check_running_tasks, stop_duplicate_check
|
||||||
from ...mirror_leech_utils.status_utils.queue_status import QueueStatus
|
from ...mirror_leech_utils.status_utils.queue_status import QueueStatus
|
||||||
@ -325,8 +325,6 @@ class YoutubeDLHelper:
|
|||||||
await event.wait()
|
await event.wait()
|
||||||
if self._listener.is_cancelled:
|
if self._listener.is_cancelled:
|
||||||
return
|
return
|
||||||
async with queue_dict_lock:
|
|
||||||
non_queued_dl.add(self._listener.mid)
|
|
||||||
LOGGER.info(f"Start Queued Download from YT_DLP: {self._listener.name}")
|
LOGGER.info(f"Start Queued Download from YT_DLP: {self._listener.name}")
|
||||||
await self._on_download_start(True)
|
await self._on_download_start(True)
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
from bot import LOGGER, jd_lock, jd_downloads
|
from bot import LOGGER, jd_lock, jd_downloads
|
||||||
from ...ext_utils.bot_utils import retry_function, async_to_sync
|
from ...ext_utils.bot_utils import async_to_sync
|
||||||
from ...ext_utils.jdownloader_booter import jdownloader
|
from ...ext_utils.jdownloader_booter import jdownloader
|
||||||
from ...ext_utils.status_utils import (
|
from ...ext_utils.status_utils import (
|
||||||
MirrorStatus,
|
MirrorStatus,
|
||||||
@ -106,10 +106,7 @@ class JDownloaderStatus:
|
|||||||
async def cancel_task(self):
|
async def cancel_task(self):
|
||||||
self.listener.is_cancelled = True
|
self.listener.is_cancelled = True
|
||||||
LOGGER.info(f"Cancelling Download: {self.name()}")
|
LOGGER.info(f"Cancelling Download: {self.name()}")
|
||||||
await retry_function(
|
await jdownloader.device.downloads.remove_links(package_ids=jd_downloads[self._gid]["ids"])
|
||||||
jdownloader.device.downloads.remove_links,
|
|
||||||
package_ids=jd_downloads[self._gid]["ids"],
|
|
||||||
)
|
|
||||||
async with jd_lock:
|
async with jd_lock:
|
||||||
del jd_downloads[self._gid]
|
del jd_downloads[self._gid]
|
||||||
await self.listener.on_download_error("Download cancelled by user!")
|
await self.listener.on_download_error("Download cancelled by user!")
|
||||||
|
@ -6,7 +6,6 @@ from asyncio import (
|
|||||||
create_subprocess_shell,
|
create_subprocess_shell,
|
||||||
sleep,
|
sleep,
|
||||||
gather,
|
gather,
|
||||||
wait_for,
|
|
||||||
)
|
)
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
from functools import partial
|
from functools import partial
|
||||||
@ -35,7 +34,6 @@ from bot import (
|
|||||||
qbittorrent_client,
|
qbittorrent_client,
|
||||||
sabnzbd_client,
|
sabnzbd_client,
|
||||||
bot,
|
bot,
|
||||||
jd_downloads,
|
|
||||||
nzb_options,
|
nzb_options,
|
||||||
get_nzb_options,
|
get_nzb_options,
|
||||||
get_qb_options,
|
get_qb_options,
|
||||||
@ -44,7 +42,6 @@ from bot import (
|
|||||||
from ..helper.ext_utils.bot_utils import (
|
from ..helper.ext_utils.bot_utils import (
|
||||||
SetInterval,
|
SetInterval,
|
||||||
sync_to_async,
|
sync_to_async,
|
||||||
retry_function,
|
|
||||||
new_task,
|
new_task,
|
||||||
)
|
)
|
||||||
from ..helper.ext_utils.db_handler import database
|
from ..helper.ext_utils.db_handler import database
|
||||||
@ -334,7 +331,7 @@ async def edit_variable(_, message, pre_message, key):
|
|||||||
]:
|
]:
|
||||||
await rclone_serve_booter()
|
await rclone_serve_booter()
|
||||||
elif key in ["JD_EMAIL", "JD_PASS"]:
|
elif key in ["JD_EMAIL", "JD_PASS"]:
|
||||||
await jdownloader.initiate()
|
await jdownloader.boot()
|
||||||
elif key == "RSS_DELAY":
|
elif key == "RSS_DELAY":
|
||||||
add_job()
|
add_job()
|
||||||
elif key == "USET_SERVERS":
|
elif key == "USET_SERVERS":
|
||||||
@ -444,27 +441,9 @@ async def edit_nzb_server(_, message, pre_message, key, index=0):
|
|||||||
|
|
||||||
async def sync_jdownloader():
|
async def sync_jdownloader():
|
||||||
async with jd_lock:
|
async with jd_lock:
|
||||||
if not config_dict["DATABASE_URL"] or jdownloader.device is None:
|
if not config_dict["DATABASE_URL"] or not jdownloader.is_connected:
|
||||||
return
|
|
||||||
try:
|
|
||||||
await wait_for(retry_function(jdownloader.update_devices), timeout=10)
|
|
||||||
except:
|
|
||||||
is_connected = await jdownloader.jdconnect()
|
|
||||||
if not is_connected:
|
|
||||||
LOGGER.error(jdownloader.error)
|
|
||||||
return
|
|
||||||
isDeviceConnected = await jdownloader.connectToDevice()
|
|
||||||
if not isDeviceConnected:
|
|
||||||
LOGGER.error(jdownloader.error)
|
|
||||||
return
|
return
|
||||||
await jdownloader.device.system.exit_jd()
|
await jdownloader.device.system.exit_jd()
|
||||||
is_connected = await jdownloader.jdconnect()
|
|
||||||
if not is_connected:
|
|
||||||
LOGGER.error(jdownloader.error)
|
|
||||||
return
|
|
||||||
isDeviceConnected = await jdownloader.connectToDevice()
|
|
||||||
if not isDeviceConnected:
|
|
||||||
LOGGER.error(jdownloader.error)
|
|
||||||
if await aiopath.exists("cfg.zip"):
|
if await aiopath.exists("cfg.zip"):
|
||||||
await remove("cfg.zip")
|
await remove("cfg.zip")
|
||||||
await (
|
await (
|
||||||
@ -664,8 +643,6 @@ async def edit_bot_settings(client, query):
|
|||||||
elif data[2] == "INCOMPLETE_TASK_NOTIFIER" and config_dict["DATABASE_URL"]:
|
elif data[2] == "INCOMPLETE_TASK_NOTIFIER" and config_dict["DATABASE_URL"]:
|
||||||
await database.trunc_table("tasks")
|
await database.trunc_table("tasks")
|
||||||
elif data[2] in ["JD_EMAIL", "JD_PASS"]:
|
elif data[2] in ["JD_EMAIL", "JD_PASS"]:
|
||||||
jdownloader.device = None
|
|
||||||
jdownloader.error = "JDownloader Credentials not provided!"
|
|
||||||
await create_subprocess_exec("pkill", "-9", "-f", "java")
|
await create_subprocess_exec("pkill", "-9", "-f", "java")
|
||||||
elif data[2] == "USENET_SERVERS":
|
elif data[2] == "USENET_SERVERS":
|
||||||
for s in config_dict["USENET_SERVERS"]:
|
for s in config_dict["USENET_SERVERS"]:
|
||||||
|
@ -38,7 +38,7 @@ RCLONE_SERVE_PASS = ""
|
|||||||
JD_EMAIL = ""
|
JD_EMAIL = ""
|
||||||
JD_PASS = ""
|
JD_PASS = ""
|
||||||
# Sabnzbd
|
# Sabnzbd
|
||||||
USENET_SERVERS = [{'name': 'main', 'host': '', 'port': 5126, 'timeout': 60, 'username': '', 'password': '', 'connections': 8, 'ssl': 1, 'ssl_verify': 2, 'ssl_ciphers': '', 'enable': 1, 'required': 0, 'optional': 0, 'retention': 0, 'send_group': 0, 'priority': 0}]
|
USENET_SERVERS = "[{'name': 'main', 'host': '', 'port': 5126, 'timeout': 60, 'username': '', 'password': '', 'connections': 8, 'ssl': 1, 'ssl_verify': 2, 'ssl_ciphers': '', 'enable': 1, 'required': 0, 'optional': 0, 'retention': 0, 'send_group': 0, 'priority': 0}]"
|
||||||
# Update
|
# Update
|
||||||
UPSTREAM_REPO = ""
|
UPSTREAM_REPO = ""
|
||||||
UPSTREAM_BRANCH = ""
|
UPSTREAM_BRANCH = ""
|
||||||
|
453
myjd/myjdapi.py
453
myjd/myjdapi.py
@ -1,34 +1,15 @@
|
|||||||
# -*- encoding: utf-8 -*-
|
|
||||||
from Crypto.Cipher import AES
|
|
||||||
from base64 import b64encode, b64decode
|
|
||||||
from hashlib import sha256
|
|
||||||
from hmac import new
|
|
||||||
from json import dumps, loads, JSONDecodeError
|
from json import dumps, loads, JSONDecodeError
|
||||||
from httpx import AsyncClient, RequestError
|
from httpx import AsyncClient, RequestError
|
||||||
from httpx import AsyncHTTPTransport
|
from httpx import AsyncHTTPTransport
|
||||||
from time import time
|
|
||||||
from urllib.parse import quote
|
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
|
|
||||||
from .exception import (
|
from .exception import (
|
||||||
MYJDApiException,
|
MYJDApiException,
|
||||||
MYJDConnectionException,
|
MYJDConnectionException,
|
||||||
MYJDDecodeException,
|
MYJDDecodeException,
|
||||||
MYJDDeviceNotFoundException,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
BS = 16
|
|
||||||
|
|
||||||
|
|
||||||
def PAD(s):
|
|
||||||
return s + ((BS - len(s) % BS) * chr(BS - len(s) % BS)).encode()
|
|
||||||
|
|
||||||
|
|
||||||
def UNPAD(s):
|
|
||||||
return s[: -s[-1]]
|
|
||||||
|
|
||||||
|
|
||||||
class System:
|
class System:
|
||||||
def __init__(self, device):
|
def __init__(self, device):
|
||||||
self.device = device
|
self.device = device
|
||||||
@ -254,7 +235,7 @@ class Linkgrabber:
|
|||||||
self.url = "/linkgrabberv2"
|
self.url = "/linkgrabberv2"
|
||||||
|
|
||||||
async def clear_list(self):
|
async def clear_list(self):
|
||||||
return await self.device.action(f"{self.url}/clearList", http_action="POST")
|
return await self.device.action(f"{self.url}/clearList")
|
||||||
|
|
||||||
async def move_to_downloadlist(self, link_ids=None, package_ids=None):
|
async def move_to_downloadlist(self, link_ids=None, package_ids=None):
|
||||||
"""
|
"""
|
||||||
@ -679,9 +660,13 @@ class Downloads:
|
|||||||
async def move_to_new_package(
|
async def move_to_new_package(
|
||||||
self, link_ids, package_ids, new_pkg_name, download_path
|
self, link_ids, package_ids, new_pkg_name, download_path
|
||||||
):
|
):
|
||||||
params = link_ids, package_ids, new_pkg_name, download_path
|
params = [link_ids, package_ids, new_pkg_name, download_path]
|
||||||
return await self.device.action(f"{self.url}/movetoNewPackage", params)
|
return await self.device.action(f"{self.url}/movetoNewPackage", params)
|
||||||
|
|
||||||
|
async def rename_link(self, link_id: list, new_name: str):
|
||||||
|
params = [link_id, new_name]
|
||||||
|
return await self.device.action(f"{self.url}/renameLink", params)
|
||||||
|
|
||||||
|
|
||||||
class Captcha:
|
class Captcha:
|
||||||
|
|
||||||
@ -701,15 +686,12 @@ class Captcha:
|
|||||||
|
|
||||||
class Jddevice:
|
class Jddevice:
|
||||||
|
|
||||||
def __init__(self, jd, device_dict):
|
def __init__(self, jd):
|
||||||
"""This functions initializates the device instance.
|
"""This functions initializates the device instance.
|
||||||
It uses the provided dictionary to create the device.
|
It uses the provided dictionary to create the device.
|
||||||
|
|
||||||
:param device_dict: Device dictionary
|
:param device_dict: Device dictionary
|
||||||
"""
|
"""
|
||||||
self.name = device_dict["name"]
|
|
||||||
self.device_id = device_dict["id"]
|
|
||||||
self.device_type = device_dict["type"]
|
|
||||||
self.myjd = jd
|
self.myjd = jd
|
||||||
self.config = Config(self)
|
self.config = Config(self)
|
||||||
self.linkgrabber = Linkgrabber(self)
|
self.linkgrabber = Linkgrabber(self)
|
||||||
@ -719,105 +701,22 @@ class Jddevice:
|
|||||||
self.extensions = Extension(self)
|
self.extensions = Extension(self)
|
||||||
self.jd = Jd(self)
|
self.jd = Jd(self)
|
||||||
self.system = System(self)
|
self.system = System(self)
|
||||||
self.__direct_connection_info = None
|
|
||||||
self.__direct_connection_enabled = False
|
|
||||||
self.__direct_connection_cooldown = 0
|
|
||||||
self.__direct_connection_consecutive_failures = 0
|
|
||||||
|
|
||||||
async def __refresh_direct_connections(self):
|
|
||||||
response = await self.myjd.request_api(
|
|
||||||
"/device/getDirectConnectionInfos", "POST", None, self.__action_url()
|
|
||||||
)
|
|
||||||
if (
|
|
||||||
response is not None
|
|
||||||
and "data" in response
|
|
||||||
and "infos" in response["data"]
|
|
||||||
and len(response["data"]["infos"]) != 0
|
|
||||||
):
|
|
||||||
self.__update_direct_connections(response["data"]["infos"])
|
|
||||||
|
|
||||||
def __update_direct_connections(self, direct_info):
|
|
||||||
"""
|
|
||||||
Updates the direct_connections info keeping the order.
|
|
||||||
"""
|
|
||||||
tmp = []
|
|
||||||
if self.__direct_connection_info is None:
|
|
||||||
tmp.extend({"conn": conn, "cooldown": 0} for conn in direct_info)
|
|
||||||
self.__direct_connection_info = tmp
|
|
||||||
return
|
|
||||||
# We remove old connections not available anymore.
|
|
||||||
for i in self.__direct_connection_info:
|
|
||||||
if i["conn"] not in direct_info:
|
|
||||||
tmp.remove(i)
|
|
||||||
else:
|
|
||||||
direct_info.remove(i["conn"])
|
|
||||||
# We add new connections
|
|
||||||
tmp.extend({"conn": conn, "cooldown": 0} for conn in direct_info)
|
|
||||||
self.__direct_connection_info = tmp
|
|
||||||
|
|
||||||
async def ping(self):
|
async def ping(self):
|
||||||
return await self.action("/device/ping")
|
return await self.action("/device/ping")
|
||||||
|
|
||||||
async def enable_direct_connection(self):
|
async def action(self, path, params=()):
|
||||||
self.__direct_connection_enabled = True
|
response = await self.myjd.request_api(path, params)
|
||||||
await self.__refresh_direct_connections()
|
|
||||||
|
|
||||||
def disable_direct_connection(self):
|
|
||||||
self.__direct_connection_enabled = False
|
|
||||||
self.__direct_connection_info = None
|
|
||||||
|
|
||||||
async def action(self, path, params=(), http_action="POST"):
|
|
||||||
action_url = self.__action_url()
|
|
||||||
if (
|
|
||||||
self.__direct_connection_enabled
|
|
||||||
and self.__direct_connection_info is not None
|
|
||||||
and time() >= self.__direct_connection_cooldown
|
|
||||||
):
|
|
||||||
return await self.__direct_connect(path, http_action, params, action_url)
|
|
||||||
response = await self.myjd.request_api(path, http_action, params, action_url)
|
|
||||||
if response is None:
|
if response is None:
|
||||||
raise (MYJDConnectionException("No connection established\n"))
|
raise (MYJDConnectionException("No connection established\n"))
|
||||||
if (
|
|
||||||
self.__direct_connection_enabled
|
|
||||||
and time() >= self.__direct_connection_cooldown
|
|
||||||
):
|
|
||||||
await self.__refresh_direct_connections()
|
|
||||||
return response["data"]
|
return response["data"]
|
||||||
|
|
||||||
async def __direct_connect(self, path, http_action, params, action_url):
|
|
||||||
for conn in self.__direct_connection_info:
|
|
||||||
if time() > conn["cooldown"]:
|
|
||||||
connection = conn["conn"]
|
|
||||||
api = "http://" + connection["ip"] + ":" + str(connection["port"])
|
|
||||||
response = await self.myjd.request_api(
|
|
||||||
path, http_action, params, action_url, api
|
|
||||||
)
|
|
||||||
if response is not None:
|
|
||||||
self.__direct_connection_info.remove(conn)
|
|
||||||
self.__direct_connection_info.insert(0, conn)
|
|
||||||
self.__direct_connection_consecutive_failures = 0
|
|
||||||
return response["data"]
|
|
||||||
else:
|
|
||||||
conn["cooldown"] = time() + 60
|
|
||||||
self.__direct_connection_consecutive_failures += 1
|
|
||||||
self.__direct_connection_cooldown = time() + (
|
|
||||||
60 * self.__direct_connection_consecutive_failures
|
|
||||||
)
|
|
||||||
response = await self.myjd.request_api(path, http_action, params, action_url)
|
|
||||||
if response is None:
|
|
||||||
raise (MYJDConnectionException("No connection established\n"))
|
|
||||||
await self.__refresh_direct_connections()
|
|
||||||
return response["data"]
|
|
||||||
|
|
||||||
def __action_url(self):
|
|
||||||
return f"/t_{self.myjd.get_session_token()}_{self.device_id}"
|
|
||||||
|
|
||||||
|
|
||||||
class clientSession(AsyncClient):
|
class clientSession(AsyncClient):
|
||||||
|
|
||||||
@wraps(AsyncClient.request)
|
@wraps(AsyncClient.request)
|
||||||
async def request(self, method: str, url: str, **kwargs):
|
async def request(self, method: str, url: str, **kwargs):
|
||||||
kwargs.setdefault("timeout", 1.5)
|
kwargs.setdefault("timeout", 3)
|
||||||
kwargs.setdefault("follow_redirects", True)
|
kwargs.setdefault("follow_redirects", True)
|
||||||
return await super().request(method, url, **kwargs)
|
return await super().request(method, url, **kwargs)
|
||||||
|
|
||||||
@ -825,19 +724,9 @@ class clientSession(AsyncClient):
|
|||||||
class MyJdApi:
|
class MyJdApi:
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.__request_id = int(time() * 1000)
|
self.__api_url = "http://127.0.0.1:3128"
|
||||||
self.__api_url = "https://api.jdownloader.org"
|
|
||||||
self.__app_key = "mltb"
|
|
||||||
self.__api_version = 1
|
|
||||||
self.__devices = None
|
|
||||||
self.__login_secret = None
|
|
||||||
self.__device_secret = None
|
|
||||||
self.__session_token = None
|
|
||||||
self.__regain_token = None
|
|
||||||
self.__server_encryption_token = None
|
|
||||||
self.__device_encryption_token = None
|
|
||||||
self.__connected = False
|
|
||||||
self._http_session = None
|
self._http_session = None
|
||||||
|
self.device = Jddevice(self)
|
||||||
|
|
||||||
def _session(self):
|
def _session(self):
|
||||||
if self._http_session is not None:
|
if self._http_session is not None:
|
||||||
@ -851,305 +740,37 @@ class MyJdApi:
|
|||||||
|
|
||||||
return self._http_session
|
return self._http_session
|
||||||
|
|
||||||
def get_session_token(self):
|
async def request_api(self, path, params=None):
|
||||||
return self.__session_token
|
|
||||||
|
|
||||||
def is_connected(self):
|
|
||||||
"""
|
|
||||||
Indicates if there is a connection established.
|
|
||||||
"""
|
|
||||||
return self.__connected
|
|
||||||
|
|
||||||
def set_app_key(self, app_key):
|
|
||||||
"""
|
|
||||||
Sets the APP Key.
|
|
||||||
"""
|
|
||||||
self.__app_key = app_key
|
|
||||||
|
|
||||||
def __secret_create(self, email, password, domain):
|
|
||||||
"""
|
|
||||||
Calculates the login_secret and device_secret
|
|
||||||
|
|
||||||
:param email: My.Jdownloader User email
|
|
||||||
:param password: My.Jdownloader User password
|
|
||||||
:param domain: The domain , if is for Server (login_secret) or Device (device_secret)
|
|
||||||
:return: secret hash
|
|
||||||
|
|
||||||
"""
|
|
||||||
secret_hash = sha256()
|
|
||||||
secret_hash.update(
|
|
||||||
email.lower().encode("utf-8")
|
|
||||||
+ password.encode("utf-8")
|
|
||||||
+ domain.lower().encode("utf-8")
|
|
||||||
)
|
|
||||||
return secret_hash.digest()
|
|
||||||
|
|
||||||
def __update_encryption_tokens(self):
|
|
||||||
"""
|
|
||||||
Updates the server_encryption_token and device_encryption_token
|
|
||||||
|
|
||||||
"""
|
|
||||||
if self.__server_encryption_token is None:
|
|
||||||
old_token = self.__login_secret
|
|
||||||
else:
|
|
||||||
old_token = self.__server_encryption_token
|
|
||||||
new_token = sha256()
|
|
||||||
new_token.update(old_token + bytearray.fromhex(self.__session_token))
|
|
||||||
self.__server_encryption_token = new_token.digest()
|
|
||||||
new_token = sha256()
|
|
||||||
new_token.update(self.__device_secret + bytearray.fromhex(self.__session_token))
|
|
||||||
self.__device_encryption_token = new_token.digest()
|
|
||||||
|
|
||||||
def __signature_create(self, key, data):
|
|
||||||
"""
|
|
||||||
Calculates the signature for the data given a key.
|
|
||||||
|
|
||||||
:param key:
|
|
||||||
:param data:
|
|
||||||
"""
|
|
||||||
signature = new(key, data.encode("utf-8"), sha256)
|
|
||||||
return signature.hexdigest()
|
|
||||||
|
|
||||||
def __decrypt(self, secret_token, data):
|
|
||||||
"""
|
|
||||||
Decrypts the data from the server using the provided token
|
|
||||||
|
|
||||||
:param secret_token:
|
|
||||||
:param data:
|
|
||||||
"""
|
|
||||||
init_vector = secret_token[: len(secret_token) // 2]
|
|
||||||
key = secret_token[len(secret_token) // 2 :]
|
|
||||||
decryptor = AES.new(key, AES.MODE_CBC, init_vector)
|
|
||||||
return UNPAD(decryptor.decrypt(b64decode(data)))
|
|
||||||
|
|
||||||
def __encrypt(self, secret_token, data):
|
|
||||||
"""
|
|
||||||
Encrypts the data from the server using the provided token
|
|
||||||
|
|
||||||
:param secret_token:
|
|
||||||
:param data:
|
|
||||||
"""
|
|
||||||
data = PAD(data.encode("utf-8"))
|
|
||||||
init_vector = secret_token[: len(secret_token) // 2]
|
|
||||||
key = secret_token[len(secret_token) // 2 :]
|
|
||||||
encryptor = AES.new(key, AES.MODE_CBC, init_vector)
|
|
||||||
encrypted_data = b64encode(encryptor.encrypt(data))
|
|
||||||
return encrypted_data.decode("utf-8")
|
|
||||||
|
|
||||||
def update_request_id(self):
|
|
||||||
"""
|
|
||||||
Updates Request_Id
|
|
||||||
"""
|
|
||||||
self.__request_id = int(time())
|
|
||||||
|
|
||||||
async def connect(self, email, password):
|
|
||||||
"""Establish connection to api
|
|
||||||
|
|
||||||
:param email: My.Jdownloader User email
|
|
||||||
:param password: My.Jdownloader User password
|
|
||||||
:returns: boolean -- True if succesful, False if there was any error.
|
|
||||||
|
|
||||||
"""
|
|
||||||
self.__clean_resources()
|
|
||||||
self.__login_secret = self.__secret_create(email, password, "server")
|
|
||||||
self.__device_secret = self.__secret_create(email, password, "device")
|
|
||||||
response = await self.request_api(
|
|
||||||
"/my/connect", "GET", [("email", email), ("appkey", self.__app_key)]
|
|
||||||
)
|
|
||||||
self.__connected = True
|
|
||||||
self.update_request_id()
|
|
||||||
self.__session_token = response["sessiontoken"]
|
|
||||||
self.__regain_token = response["regaintoken"]
|
|
||||||
self.__update_encryption_tokens()
|
|
||||||
return response
|
|
||||||
|
|
||||||
async def reconnect(self):
|
|
||||||
"""
|
|
||||||
Reestablish connection to API.
|
|
||||||
|
|
||||||
:returns: boolean -- True if successful, False if there was any error.
|
|
||||||
|
|
||||||
"""
|
|
||||||
response = await self.request_api(
|
|
||||||
"/my/reconnect",
|
|
||||||
"GET",
|
|
||||||
[
|
|
||||||
("sessiontoken", self.__session_token),
|
|
||||||
("regaintoken", self.__regain_token),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
self.update_request_id()
|
|
||||||
self.__session_token = response["sessiontoken"]
|
|
||||||
self.__regain_token = response["regaintoken"]
|
|
||||||
self.__update_encryption_tokens()
|
|
||||||
return response
|
|
||||||
|
|
||||||
async def disconnect(self):
|
|
||||||
"""
|
|
||||||
Disconnects from API
|
|
||||||
|
|
||||||
:returns: boolean -- True if successful, False if there was any error.
|
|
||||||
|
|
||||||
"""
|
|
||||||
response = await self.request_api(
|
|
||||||
"/my/disconnect", "GET", [("sessiontoken", self.__session_token)]
|
|
||||||
)
|
|
||||||
self.__clean_resources()
|
|
||||||
if self._http_session is not None:
|
|
||||||
self._http_session = None
|
|
||||||
await self._http_session.aclose()
|
|
||||||
return response
|
|
||||||
|
|
||||||
def __clean_resources(self):
|
|
||||||
self.update_request_id()
|
|
||||||
self.__login_secret = None
|
|
||||||
self.__device_secret = None
|
|
||||||
self.__session_token = None
|
|
||||||
self.__regain_token = None
|
|
||||||
self.__server_encryption_token = None
|
|
||||||
self.__device_encryption_token = None
|
|
||||||
self.__devices = None
|
|
||||||
self.__connected = False
|
|
||||||
|
|
||||||
async def update_devices(self):
|
|
||||||
"""
|
|
||||||
Updates available devices. Use list_devices() to get the devices list.
|
|
||||||
|
|
||||||
:returns: boolean -- True if successful, False if there was any error.
|
|
||||||
"""
|
|
||||||
response = await self.request_api(
|
|
||||||
"/my/listdevices", "GET", [("sessiontoken", self.__session_token)]
|
|
||||||
)
|
|
||||||
self.update_request_id()
|
|
||||||
self.__devices = response["list"]
|
|
||||||
|
|
||||||
def list_devices(self):
|
|
||||||
"""
|
|
||||||
Returns available devices. Use getDevices() to update the devices list.
|
|
||||||
Each device in the list is a dictionary like this example:
|
|
||||||
|
|
||||||
{
|
|
||||||
'name': 'Device',
|
|
||||||
'id': 'af9d03a21ddb917492dc1af8a6427f11',
|
|
||||||
'type': 'jd'
|
|
||||||
}
|
|
||||||
|
|
||||||
:returns: list -- list of devices.
|
|
||||||
"""
|
|
||||||
return self.__devices
|
|
||||||
|
|
||||||
def get_device(self, device_name=None, device_id=None):
|
|
||||||
"""
|
|
||||||
Returns a jddevice instance of the device
|
|
||||||
|
|
||||||
:param deviceid:
|
|
||||||
"""
|
|
||||||
if not self.is_connected():
|
|
||||||
raise (MYJDConnectionException("No connection established\n"))
|
|
||||||
if device_id is not None:
|
|
||||||
for device in self.__devices:
|
|
||||||
if device["id"] == device_id:
|
|
||||||
return Jddevice(self, device)
|
|
||||||
elif device_name is not None:
|
|
||||||
for device in self.__devices:
|
|
||||||
if device["name"] == device_name:
|
|
||||||
return Jddevice(self, device)
|
|
||||||
raise (MYJDDeviceNotFoundException("Device not found\n"))
|
|
||||||
|
|
||||||
async def request_api(
|
|
||||||
self, path, http_method="GET", params=None, action=None, api=None
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Makes a request to the API to the 'path' using the 'http_method' with parameters,'params'.
|
|
||||||
Ex:
|
|
||||||
http_method=GET
|
|
||||||
params={"test":"test"}
|
|
||||||
post_params={"test2":"test2"}
|
|
||||||
action=True
|
|
||||||
This would make a request to "https://api.jdownloader.org"
|
|
||||||
"""
|
|
||||||
session = self._session()
|
session = self._session()
|
||||||
if not api:
|
|
||||||
api = self.__api_url
|
# Prepare params_request based on the input params
|
||||||
data = None
|
params_request = params if params is not None else []
|
||||||
if not self.is_connected() and path != "/my/connect":
|
|
||||||
raise (MYJDConnectionException("No connection established\n"))
|
# Construct the request payload
|
||||||
if http_method == "GET":
|
|
||||||
query = [f"{path}?"]
|
|
||||||
if params is not None:
|
|
||||||
for param in params:
|
|
||||||
if param[0] != "encryptedLoginSecret":
|
|
||||||
query += [f"{param[0]}={quote(param[1])}"]
|
|
||||||
else:
|
|
||||||
query += [f"&{param[0]}={param[1]}"]
|
|
||||||
query += [f"rid={str(self.__request_id)}"]
|
|
||||||
if self.__server_encryption_token is None:
|
|
||||||
query += [
|
|
||||||
"signature="
|
|
||||||
+ str(
|
|
||||||
self.__signature_create(
|
|
||||||
self.__login_secret, query[0] + "&".join(query[1:])
|
|
||||||
)
|
|
||||||
)
|
|
||||||
]
|
|
||||||
else:
|
|
||||||
query += [
|
|
||||||
"signature="
|
|
||||||
+ str(
|
|
||||||
self.__signature_create(
|
|
||||||
self.__server_encryption_token,
|
|
||||||
query[0] + "&".join(query[1:]),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
]
|
|
||||||
query = query[0] + "&".join(query[1:])
|
|
||||||
res = await session.request(http_method, api + query)
|
|
||||||
encrypted_response = res.text
|
|
||||||
else:
|
|
||||||
params_request = []
|
|
||||||
if params is not None:
|
|
||||||
for param in params:
|
|
||||||
if isinstance(param, (str, list)):
|
|
||||||
params_request += [param]
|
|
||||||
elif isinstance(param, (dict, bool)):
|
|
||||||
params_request += [dumps(param)]
|
|
||||||
else:
|
|
||||||
params_request += [str(param)]
|
|
||||||
params_request = {
|
params_request = {
|
||||||
"apiVer": self.__api_version,
|
|
||||||
"url": path,
|
|
||||||
"params": params_request,
|
"params": params_request,
|
||||||
"rid": self.__request_id,
|
|
||||||
}
|
}
|
||||||
data = dumps(params_request)
|
data = dumps(params_request)
|
||||||
# Removing quotes around null elements.
|
# Removing quotes around null elements.
|
||||||
data = data.replace('"null"', "null")
|
data = data.replace('"null"', "null")
|
||||||
data = data.replace("'null'", "null")
|
data = data.replace("'null'", "null")
|
||||||
encrypted_data = self.__encrypt(self.__device_encryption_token, data)
|
request_url = self.__api_url + path
|
||||||
request_url = api + action + path if action is not None else api + path
|
|
||||||
try:
|
try:
|
||||||
res = await session.request(
|
res = await session.request(
|
||||||
http_method,
|
"POST",
|
||||||
request_url,
|
request_url,
|
||||||
headers={"Content-Type": "application/aesjson-jd; charset=utf-8"},
|
headers={"Content-Type": "application/json; charset=utf-8"},
|
||||||
content=encrypted_data,
|
content=data,
|
||||||
)
|
)
|
||||||
encrypted_response = res.text
|
response = res.text
|
||||||
except RequestError:
|
except RequestError:
|
||||||
return None
|
return None
|
||||||
if res.status_code != 200:
|
if res.status_code != 200:
|
||||||
try:
|
try:
|
||||||
error_msg = loads(encrypted_response)
|
error_msg = loads(response)
|
||||||
except JSONDecodeError:
|
|
||||||
try:
|
|
||||||
error_msg = loads(
|
|
||||||
self.__decrypt(
|
|
||||||
self.__device_encryption_token, encrypted_response
|
|
||||||
)
|
|
||||||
)
|
|
||||||
except JSONDecodeError as exc:
|
except JSONDecodeError as exc:
|
||||||
raise MYJDDecodeException(
|
raise MYJDDecodeException(
|
||||||
"Failed to decode response: {}", encrypted_response
|
"Failed to decode response: {}", response
|
||||||
) from exc
|
) from exc
|
||||||
msg = (
|
msg = (
|
||||||
"\n\tSOURCE: "
|
"\n\tSOURCE: "
|
||||||
@ -1157,31 +778,13 @@ class MyJdApi:
|
|||||||
+ "\n\tTYPE: "
|
+ "\n\tTYPE: "
|
||||||
+ error_msg["type"]
|
+ error_msg["type"]
|
||||||
+ "\n------\nREQUEST_URL: "
|
+ "\n------\nREQUEST_URL: "
|
||||||
+ api
|
+ self.__api_url
|
||||||
+ (path if http_method != "GET" else "")
|
+ path
|
||||||
)
|
)
|
||||||
if http_method == "GET":
|
|
||||||
msg += query
|
|
||||||
msg += "\n"
|
msg += "\n"
|
||||||
if data is not None:
|
if data is not None:
|
||||||
msg += "DATA:\n" + data
|
msg += "DATA:\n" + data
|
||||||
raise (
|
raise (
|
||||||
MYJDApiException.get_exception(error_msg["src"], error_msg["type"], msg)
|
MYJDApiException.get_exception(error_msg["src"], error_msg["type"], msg)
|
||||||
)
|
)
|
||||||
if action is None:
|
return loads(response)
|
||||||
if not self.__server_encryption_token:
|
|
||||||
response = self.__decrypt(self.__login_secret, encrypted_response)
|
|
||||||
else:
|
|
||||||
response = self.__decrypt(
|
|
||||||
self.__server_encryption_token, encrypted_response
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
response = self.__decrypt(
|
|
||||||
self.__device_encryption_token, encrypted_response
|
|
||||||
)
|
|
||||||
jsondata = loads(response.decode("utf-8"))
|
|
||||||
if jsondata["rid"] != self.__request_id:
|
|
||||||
self.update_request_id()
|
|
||||||
return None
|
|
||||||
self.update_request_id()
|
|
||||||
return jsondata
|
|
||||||
|
@ -19,7 +19,6 @@ motor
|
|||||||
natsort
|
natsort
|
||||||
pillow
|
pillow
|
||||||
psutil
|
psutil
|
||||||
pycryptodome
|
|
||||||
pymongo
|
pymongo
|
||||||
pyrofork
|
pyrofork
|
||||||
python-dotenv
|
python-dotenv
|
||||||
|
Loading…
Reference in New Issue
Block a user