Some minor fixes

Signed-off-by: anasty17 <e.anastayyar@gmail.com>
This commit is contained in:
anasty17 2024-01-04 01:38:13 +02:00
parent ee60a514ed
commit d3f59314db
14 changed files with 165 additions and 431 deletions

View File

@ -618,10 +618,9 @@ class TaskConfig:
if not checked:
checked = True
LOGGER.info(f"Creating Sample video: {self.name}")
res = await createSampleVideo(
return await createSampleVideo(
self, dl_path, sample_duration, part_duration, True
)
return res
else:
for dirpath, _, files in await sync_to_async(
walk, dl_path, topdown=False

View File

@ -100,14 +100,12 @@ class JDownloader(Myjdapi):
return
try:
self.update_devices()
devices = self.list_devices()
if devices:
for device in devices:
if self._device_name == device["name"]:
self.device = self.get_device(f"{self._device_name}")
break
else:
continue
if not (devices := self.list_devices()):
continue
for device in devices:
if self._device_name == device["name"]:
self.device = self.get_device(f"{self._device_name}")
break
else:
continue
except:

View File

@ -146,8 +146,7 @@ async def get_document_type(path):
async def take_ss(video_file, ss_nb) -> list:
if ss_nb > 10:
ss_nb = 10
ss_nb = min(ss_nb, 10)
duration = (await get_media_info(video_file))[0]
if duration != 0:
dirpath, name = video_file.rsplit("/", 1)
@ -432,13 +431,7 @@ async def createSampleVideo(
code = listener.suproc.returncode
if code == -9:
return False
elif code != 0:
stderr = stderr.decode().strip()
LOGGER.error(
f"{stderr}. Something went wrong while creating sample video, mostly file is corrupted. Path: {video_file}"
)
return video_file
else:
elif code == 0:
if oneFile:
newDir, _ = ospath.splitext(video_file)
await makedirs(newDir, exist_ok=True)
@ -448,3 +441,9 @@ async def createSampleVideo(
)
return newDir
return True
else:
stderr = stderr.decode().strip()
LOGGER.error(
f"{stderr}. Something went wrong while creating sample video, mostly file is corrupted. Path: {video_file}"
)
return video_file

View File

@ -129,10 +129,11 @@ class TaskListener(TaskConfig):
up_path = f"{self.dir}/{self.name}"
size = await get_path_size(up_path)
async with queue_dict_lock:
if self.mid in non_queued_dl:
non_queued_dl.remove(self.mid)
await start_from_queued()
if not config_dict["QUEUE_ALL"]:
async with queue_dict_lock:
if self.mid in non_queued_dl:
non_queued_dl.remove(self.mid)
await start_from_queued()
if self.join and await aiopath.isdir(up_path):
await join_files(up_path)
@ -171,6 +172,8 @@ class TaskListener(TaskConfig):
all_limit = config_dict["QUEUE_ALL"]
add_to_queue = False
async with queue_dict_lock:
if self.mid in non_queued_dl:
non_queued_dl.remove(self.mid)
dl = len(non_queued_dl)
up = len(non_queued_up)
if (

View File

@ -204,7 +204,7 @@ def mediafire(url, session=None):
html = HTML(session.get(url).text)
except Exception as e:
session.close()
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
if error := html.xpath('//p[@class="notranslate"]/text()'):
session.close()
raise DirectDownloadLinkException(f"ERROR: {error[0]}")
@ -224,7 +224,7 @@ def osdn(url):
try:
html = HTML(session.get(url).text)
except Exception as e:
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
if not (direct_link := html.xapth('//a[@class="mirror_link"]/@href')):
raise DirectDownloadLinkException("ERROR: Direct link not found")
return f"https://osdn.net{direct_link[0]}"
@ -234,8 +234,8 @@ def github(url):
"""GitHub direct links generator"""
try:
findall(r"\bhttps?://.*github\.com.*releases\S+", url)[0]
except IndexError:
raise DirectDownloadLinkException("No GitHub Releases links found")
except IndexError as e:
raise DirectDownloadLinkException("No GitHub Releases links found") from e
with create_scraper() as session:
_res = session.get(url, stream=True, allow_redirects=False)
if "location" in _res.headers:
@ -251,7 +251,7 @@ def hxfile(url):
session.post(url, data={"op": "download2", "id": file_code}).text
)
except Exception as e:
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
if direct_link := html.xpath('//a[@class="btn btn-dow"]/@href'):
return direct_link[0]
raise DirectDownloadLinkException("ERROR: Direct download link not found")
@ -287,7 +287,7 @@ def onedrive(link):
data=data,
).json()
except Exception as e:
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
if "@content.downloadUrl" not in resp:
raise DirectDownloadLinkException("ERROR: Direct link not found")
return resp["@content.downloadUrl"]
@ -307,7 +307,7 @@ def pixeldrain(url):
try:
resp = session.get(info_link).json()
except Exception as e:
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
if resp["success"]:
return dl_link
else:
@ -323,7 +323,7 @@ def streamtape(url):
with Session() as session:
html = HTML(session.get(url).text)
except Exception as e:
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
if not (script := html.xpath("//script[contains(text(),'ideoooolink')]/text()")):
raise DirectDownloadLinkException("ERROR: requeries script not found")
if not (link := findall(r"(&expires\S+)'", script[0])):
@ -338,7 +338,7 @@ def racaty(url):
json_data = {"op": "download2", "id": url.split("/")[-1]}
html = HTML(session.post(url, data=json_data).text)
except Exception as e:
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
if direct_link := html.xpath("//a[@id='uniqueExpirylink']/@href"):
return direct_link[0]
else:
@ -367,7 +367,7 @@ def fichier(link):
pw = {"pass": pswd}
req = cget("post", url, data=pw)
except Exception as e:
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
if req.status_code == 404:
raise DirectDownloadLinkException(
"ERROR: File not found/The link you entered is wrong!"
@ -434,7 +434,7 @@ def solidfiles(url):
)
return loads(mainOptions)["downloadUrl"]
except Exception as e:
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
def krakenfiles(url):
@ -442,7 +442,7 @@ def krakenfiles(url):
try:
_res = session.get(url)
except Exception as e:
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
html = HTML(_res.text)
if post_url := html.xpath('//form[@id="dl-form"]/@action'):
post_url = f"https:{post_url[0]}"
@ -457,7 +457,7 @@ def krakenfiles(url):
except Exception as e:
raise DirectDownloadLinkException(
f"ERROR: {e.__class__.__name__} While send post request"
)
) from e
if _json["status"] != "ok":
raise DirectDownloadLinkException(
"ERROR: Unable to find download after post request"
@ -470,7 +470,7 @@ def uploadee(url):
try:
html = HTML(session.get(url).text)
except Exception as e:
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
if link := html.xpath("//a[@id='d_l']/@href"):
return link[0]
else:
@ -589,7 +589,7 @@ def filepress(url):
json=json_data2,
).json()
except Exception as e:
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
if "data" not in res:
raise DirectDownloadLinkException(f'ERROR: {res["statusText"]}')
return f'https://drive.google.com/uc?id={res["data"]}&export=download'
@ -600,7 +600,7 @@ def gdtot(url):
try:
res = cget("GET", f'https://gdtot.pro/file/{url.split("/")[-1]}')
except Exception as e:
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
token_url = HTML(res.text).xpath(
"//a[contains(@class,'inline-flex items-center justify-center')]/@href"
)
@ -612,7 +612,7 @@ def gdtot(url):
"GET", f"{p_url.scheme}://{p_url.hostname}/ddl/{url.split('/')[-1]}"
)
except Exception as e:
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
if (
drive_link := findall(r"myDl\('(.*?)'\)", res.text)
) and "drive.google.com" in drive_link[0]:
@ -627,7 +627,7 @@ def gdtot(url):
except Exception as e:
raise DirectDownloadLinkException(
f"ERROR: {e.__class__.__name__} with {token_url}"
)
) from e
path = findall('\("(.*?)"\)', token_page.text)
if not path:
raise DirectDownloadLinkException("ERROR: Cannot bypass this")
@ -647,7 +647,7 @@ def sharer_scraper(url):
}
res = cget("GET", url, headers=header)
except Exception as e:
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
key = findall('"key",\s+"(.*?)"', res.text)
if not key:
raise DirectDownloadLinkException("ERROR: Key not found!")
@ -672,7 +672,7 @@ def sharer_scraper(url):
try:
res = cget("POST", url, cookies=res.cookies, headers=headers, data=data).json()
except Exception as e:
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
if "url" not in res:
raise DirectDownloadLinkException(
"ERROR: Drive Link not found, Try in your broswer"
@ -682,7 +682,7 @@ def sharer_scraper(url):
try:
res = cget("GET", res["url"])
except Exception as e:
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
if (
drive_link := HTML(res.text).xpath("//a[contains(@class,'btn')]/@href")
) and "drive.google.com" in drive_link[0]:
@ -704,7 +704,7 @@ def wetransfer(url):
json=json_data,
).json()
except Exception as e:
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
if "direct_link" in res:
return res["direct_link"]
elif "message" in res:
@ -724,7 +724,7 @@ def akmfiles(url):
).text
)
except Exception as e:
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
if direct_link := html.xpath("//a[contains(@class,'btn btn-dow')]/@href"):
return direct_link[0]
else:
@ -738,7 +738,7 @@ def shrdsk(url):
f'https://us-central1-affiliate2apk.cloudfunctions.net/get_data?shortid={url.split("/")[-1]}'
).json()
except Exception as e:
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
if "download_data" not in _json:
raise DirectDownloadLinkException("ERROR: Download data not found")
try:
@ -749,7 +749,7 @@ def shrdsk(url):
if "Location" in _res.headers:
return _res.headers["Location"]
except Exception as e:
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
raise DirectDownloadLinkException("ERROR: cannot find direct link in headers")
@ -768,7 +768,7 @@ def linkBox(url: str):
"https://www.linkbox.to/api/file/detail", params={"itemId": itemId}
).json()
except Exception as e:
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
data = _json["data"]
if not data:
if "msg" in _json:
@ -806,7 +806,7 @@ def linkBox(url: str):
"https://www.linkbox.to/api/file/share_out_list", params=params
).json()
except Exception as e:
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
data = _json["data"]
if not data:
if "msg" in _json:
@ -1103,7 +1103,9 @@ def send_cm_file(url, file_id=None):
try:
html = HTML(session.get(url).text)
except Exception as e:
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
raise DirectDownloadLinkException(
f"ERROR: {e.__class__.__name__}"
) from e
if html.xpath("//input[@name='password']"):
_passwordNeed = True
if not (file_id := html.xpath("//input[@name='id']/@value")):
@ -1116,7 +1118,7 @@ def send_cm_file(url, file_id=None):
if "Location" in _res.headers:
return (_res.headers["Location"], "Referer: https://send.cm/")
except Exception as e:
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
if _passwordNeed:
raise DirectDownloadLinkException(
f"ERROR:\n{PASSWORD_ERROR_MESSAGE.format(url)}"
@ -1235,7 +1237,7 @@ def doods(url):
except Exception as e:
raise DirectDownloadLinkException(
f"ERROR: {e.__class__.__name__} While fetching token link"
)
) from e
if not (link := html.xpath("//div[@class='download-content']//a/@href")):
raise DirectDownloadLinkException(
"ERROR: Token Link not found or maybe not allow to download! open in browser."
@ -1247,7 +1249,7 @@ def doods(url):
except Exception as e:
raise DirectDownloadLinkException(
f"ERROR: {e.__class__.__name__} While fetching download link"
)
) from e
if not (link := search(r"window\.open\('(\S+)'", _res.text)):
raise DirectDownloadLinkException("ERROR: Download link not found try again")
return (link.group(1), f"Referer: {parsed_url.scheme}://{parsed_url.hostname}/")
@ -1304,7 +1306,7 @@ def easyupload(url):
}
json_resp = session.post(url=action_url, data=data).json()
except Exception as e:
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
if "download_link" in json_resp:
return json_resp["download_link"]
elif "data" in json_resp:
@ -1363,7 +1365,7 @@ def filelions_and_streamwish(url):
params={"key": apiKey, "file_code": file_code, "hls": "1"},
).json()
except Exception as e:
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
if _res["status"] != 200:
raise DirectDownloadLinkException(f"ERROR: {_res['msg']}")
result = _res["result"]
@ -1394,7 +1396,7 @@ def streamvid(url: str):
try:
html = HTML(session.get(url).text)
except Exception as e:
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
if quality_defined:
data = {}
if not (inputs := html.xpath('//form[@id="F1"]//input')):
@ -1405,7 +1407,9 @@ def streamvid(url: str):
try:
html = HTML(session.post(url, data=data).text)
except Exception as e:
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
raise DirectDownloadLinkException(
f"ERROR: {e.__class__.__name__}"
) from e
if not (
script := html.xpath(
'//script[contains(text(),"document.location.href")]/text()'
@ -1443,7 +1447,7 @@ def streamhub(url):
try:
html = HTML(session.get(url).text)
except Exception as e:
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
if not (inputs := html.xpath('//form[@name="F1"]//input')):
raise DirectDownloadLinkException("ERROR: No inputs found")
data = {}
@ -1455,7 +1459,7 @@ def streamhub(url):
try:
html = HTML(session.post(url, data=data).text)
except Exception as e:
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
if directLink := html.xpath(
'//a[@class="btn btn-primary btn-go downloadbtn"]/@href'
):
@ -1470,7 +1474,7 @@ def pcloud(url):
try:
res = session.get(url)
except Exception as e:
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
if link := findall(r".downloadlink.:..(https:.*)..", res.text):
return link[0].replace("\/", "/")
raise DirectDownloadLinkException("ERROR: Direct link not found")
@ -1478,9 +1482,7 @@ def pcloud(url):
def tmpsend(url):
pattern = r"https://tmpsend.com/(\w+)$"
match = search(pattern, url)
if match:
if match := search(pattern, url):
file_id = match.group(1)
referer_url = f"https://tmpsend.com/thank-you?d={file_id}"
header = f"Referer: {referer_url}"

View File

@ -91,9 +91,7 @@ async def add_jd_download(listener, path):
if odl := await retry_function(
jdownloader.device.downloads.query_packages, [{}]
):
odl_list = []
for od in odl:
odl_list.append(od["uuid"])
odl_list = [od["uuid"] for od in odl]
await retry_function(
jdownloader.device.downloads.remove_links,
package_ids=odl_list,
@ -122,15 +120,23 @@ async def add_jd_download(listener, path):
"bytesTotal": True,
"saveTo": True,
"availableOnlineCount": True,
"availableTempUnknownCount": True,
"availableUnknownCount": True,
}
],
)
packages = []
online = 0
remove_unknown = False
for pack in queued_downloads:
save_to = pack["saveTo"]
if save_to.startswith(path):
if len(packages) == 0:
if not packages:
if (
pack.get("tempUnknownCount", 0) > 0
or pack.get("unknownCount", 0) > 0
):
remove_unknown = True
name = pack["name"]
gid = pack["uuid"]
size = pack.get("bytesTotal", 0)
@ -155,6 +161,17 @@ async def add_jd_download(listener, path):
)
if len(packages) == 1:
if remove_unknown:
links = await retry_function(
jdownloader.device.linkgrabber.query_links,
[{"packageUUIDs": packages, "availability": True}],
)
if to_remove := [
link["uuid"]
for link in links
if link["availability"].lower() != "online"
]:
await retry_function(jdownloader.device.linkgrabber.remove_links, to_remove)
break
listener.name = listener.name or name
@ -164,14 +181,13 @@ async def add_jd_download(listener, path):
await listener.onDownloadError(msg, button)
return
if listener.select:
if await JDownloaderHelper(listener).waitForConfigurations():
await retry_function(
jdownloader.device.linkgrabber.remove_links,
package_ids=[gid],
)
listener.removeFromSameDir()
return
if listener.select and await JDownloaderHelper(listener).waitForConfigurations():
await retry_function(
jdownloader.device.linkgrabber.remove_links,
package_ids=[gid],
)
listener.removeFromSameDir()
return
add_to_queue, event = await is_queued(listener.mid)
if add_to_queue:

View File

@ -1,221 +0,0 @@
from secrets import token_urlsafe
from aiofiles.os import makedirs
from threading import Event
from mega import (
MegaApi,
MegaListener,
MegaRequest,
MegaTransfer,
MegaError,
)
from bot import (
LOGGER,
config_dict,
task_dict_lock,
task_dict,
non_queued_dl,
queue_dict_lock,
)
from bot.helper.telegram_helper.message_utils import sendMessage, sendStatusMessage
from bot.helper.ext_utils.bot_utils import sync_to_async
from bot.helper.ext_utils.links_utils import get_mega_link_type
from bot.helper.mirror_utils.status_utils.mega_download_status import MegaDownloadStatus
from bot.helper.mirror_utils.status_utils.queue_status import QueueStatus
from bot.helper.ext_utils.task_manager import is_queued, stop_duplicate_check
class MegaAppListener(MegaListener):
_NO_EVENT_ON = (MegaRequest.TYPE_LOGIN, MegaRequest.TYPE_FETCH_NODES)
NO_ERROR = "no error"
def __init__(self, continue_event: Event, listener):
self.continue_event = continue_event
self.node = None
self.public_node = None
self.listener = listener
self.is_cancelled = False
self.error = None
self.completed = False
self.isFile = False
self._bytes_transferred = 0
self._speed = 0
self._name = ""
super().__init__()
@property
def speed(self):
return self._speed
@property
def downloaded_bytes(self):
return self._bytes_transferred
def onRequestFinish(self, api: MegaApi, request: MegaRequest, error):
if self.is_cancelled:
return
if str(error).lower() != "no error":
self.error = error.copy()
LOGGER.error(f"Mega onRequestFinishError: {self.error}")
self.continue_event.set()
return
request_type = request.getType()
if request_type == MegaRequest.TYPE_LOGIN:
api.fetchNodes()
elif request_type == MegaRequest.TYPE_GET_PUBLIC_NODE:
self.public_node = request.getPublicMegaNode()
self._name = self.public_node.getName()
elif request_type == MegaRequest.TYPE_FETCH_NODES:
LOGGER.info("Fetching Root Node.")
self.node = api.getRootNode()
self._name = self.node.getName()
LOGGER.info(f"Node Name: {self.node.getName()}")
if (
request_type not in self._NO_EVENT_ON
or self.node
and "cloud drive" not in self._name.lower()
):
self.continue_event.set()
def onRequestTemporaryError(self, api, request, error: MegaError):
LOGGER.error(f"Mega Request error in {error}")
if not self.is_cancelled:
self.is_cancelled = True
self.error = f"RequestTempError: {error.toString()}"
self.continue_event.set()
def onTransferUpdate(self, api: MegaApi, transfer: MegaTransfer):
if self.is_cancelled:
api.cancelTransfer(transfer, None)
self.continue_event.set()
return
self._speed = transfer.getSpeed()
self._bytes_transferred = transfer.getTransferredBytes()
def onTransferFinish(self, api, transfer: MegaTransfer, error):
try:
if self.is_cancelled:
self.continue_event.set()
elif transfer.isFinished() and (transfer.isFolderTransfer() or self.isFile):
self.completed = True
self.continue_event.set()
except Exception as e:
LOGGER.error(e)
def onTransferTemporaryError(self, api, transfer: MegaTransfer, error: MegaError):
filen = transfer.getFileName()
state = transfer.getState()
errStr = error.toString()
LOGGER.error(f"Mega download error in file {transfer} {filen}: {error}")
if state in [1, 4]:
# Sometimes MEGA (offical client) can't stream a node either and raises a temp failed error.
# Don't break the transfer queue if transfer's in queued (1) or retrying (4) state [causes seg fault]
return
self.error = f"TransferTempError: {errStr} ({filen}"
if not self.is_cancelled:
self.is_cancelled = True
self.continue_event.set()
async def cancel_task(self):
self.is_cancelled = True
await self.listener.onDownloadError("Download Canceled by user")
class AsyncExecutor:
def __init__(self):
self.continue_event = Event()
def do(self, function, args):
self.continue_event.clear()
function(*args)
self.continue_event.wait()
async def add_mega_download(listener, path):
MEGA_EMAIL = config_dict["MEGA_EMAIL"]
MEGA_PASSWORD = config_dict["MEGA_PASSWORD"]
executor = AsyncExecutor()
api = MegaApi(None, None, None, "mirror-leech-telegram-bot")
folder_api = None
mega_listener = MegaAppListener(executor.continue_event, listener)
api.addListener(mega_listener)
if MEGA_EMAIL and MEGA_PASSWORD:
await sync_to_async(executor.do, api.login, (MEGA_EMAIL, MEGA_PASSWORD))
if get_mega_link_type(listener.link) == "file":
await sync_to_async(executor.do, api.getPublicNode, (listener.link,))
node = mega_listener.public_node
mega_listener.isFile = True
else:
folder_api = MegaApi(None, None, None, "mirror-leech-telegram-bot")
folder_api.addListener(mega_listener)
await sync_to_async(executor.do, folder_api.loginToFolder, (listener.link,))
node = await sync_to_async(folder_api.authorizeNode, mega_listener.node)
if mega_listener.error is not None:
await sendMessage(listener.message, str(mega_listener.error))
await sync_to_async(executor.do, api.logout, ())
if folder_api is not None:
await sync_to_async(executor.do, folder_api.logout, ())
return
listener.name = listener.name or node.getName()
msg, button = await stop_duplicate_check(listener)
if msg:
await sendMessage(listener.message, msg, button)
await sync_to_async(executor.do, api.logout, ())
if folder_api is not None:
await sync_to_async(executor.do, folder_api.logout, ())
return
gid = token_urlsafe(8)
size = api.getSize(node)
add_to_queue, event = await is_queued(listener.mid)
if add_to_queue:
LOGGER.info(f"Added to Queue/Download: {listener.name}")
async with task_dict_lock:
task_dict[listener.mid] = QueueStatus(listener, size, gid, "Dl")
await listener.onDownloadStart()
if listener.multi <= 1:
await sendStatusMessage(listener.message)
await event.wait()
async with task_dict_lock:
if listener.mid not in task_dict:
await sync_to_async(executor.do, api.logout, ())
if folder_api is not None:
await sync_to_async(executor.do, folder_api.logout, ())
return
from_queue = True
LOGGER.info(f"Start Queued Download from Mega: {listener.name}")
else:
from_queue = False
async with task_dict_lock:
task_dict[listener.mid] = MegaDownloadStatus(listener, mega_listener, size, gid)
async with queue_dict_lock:
non_queued_dl.add(listener.mid)
if from_queue:
LOGGER.info(f"Start Queued Download from Mega: {listener.name}")
else:
await listener.onDownloadStart()
if listener.multi <= 1:
await sendStatusMessage(listener.message)
LOGGER.info(f"Download from Mega: {listener.name}")
await makedirs(path, exist_ok=True)
await sync_to_async(
executor.do, api.startDownload, (node, path, listener.name, None, False, None)
)
await sync_to_async(executor.do, api.logout, ())
if folder_api is not None:
await sync_to_async(executor.do, folder_api.logout, ())
if mega_listener.completed:
await listener.onDownloadComplete()
elif (error := mega_listener.error) and mega_listener.is_cancelled:
await listener.onDownloadError(error)

View File

@ -56,19 +56,12 @@ class JDownloaderStatus:
return get_readable_file_size(self._info.get("bytesTotal", 0))
def eta(self):
eta = self._info.get("eta", False)
if eta:
return get_readable_time(eta)
else:
return "-"
return get_readable_time(eta) if (eta := self._info.get("eta", False)) else "-"
def status(self):
self._update()
state = self._info.get("status", "paused")
if state == "paused":
return MirrorStatus.STATUS_PAUSED
else:
return state
return MirrorStatus.STATUS_PAUSED if state == "paused" else state
def task(self):
return self

View File

@ -1,50 +0,0 @@
from bot.helper.ext_utils.status_utils import (
get_readable_file_size,
MirrorStatus,
get_readable_time,
)
class MegaDownloadStatus:
def __init__(self, listener, obj, size, gid):
self._obj = obj
self._size = size
self._gid = gid
self.listener = listener
def name(self):
return self.listener.name
def progress_raw(self):
try:
return round(self._obj.downloaded_bytes / self._size * 100, 2)
except:
return 0.0
def progress(self):
return f"{self.progress_raw()}%"
def status(self):
return MirrorStatus.STATUS_DOWNLOADING
def processed_bytes(self):
return get_readable_file_size(self._obj.downloaded_bytes)
def eta(self):
try:
seconds = (self._size - self._obj.downloaded_bytes) / self._obj.speed
return get_readable_time(seconds)
except ZeroDivisionError:
return "-"
def size(self):
return get_readable_file_size(self._size)
def speed(self):
return f"{get_readable_file_size(self._obj.speed)}/s"
def gid(self):
return self._gid
def task(self):
return self._obj

View File

@ -104,66 +104,66 @@ async def get_buttons(key=None, edit_type=None):
buttons.ibutton("Default", f"botset resetaria {key}")
buttons.ibutton("Empty String", f"botset emptyaria {key}")
buttons.ibutton("Close", "botset close")
if key == "newkey":
msg = "Send a key with value. Example: https-proxy-user:value"
else:
msg = f"Send a valid value for {key}. Current value is '{aria2_options[key]}'. Timeout: 60 sec"
msg = (
"Send a key with value. Example: https-proxy-user:value"
if key == "newkey"
else f"Send a valid value for {key}. Current value is '{aria2_options[key]}'. Timeout: 60 sec"
)
elif edit_type == "qbitvar":
buttons.ibutton("Back", "botset qbit")
buttons.ibutton("Empty String", f"botset emptyqbit {key}")
buttons.ibutton("Close", "botset close")
msg = f"Send a valid value for {key}. Current value is '{qbit_options[key]}'. Timeout: 60 sec"
elif key is not None:
if key == "var":
for k in list(config_dict.keys())[START : 10 + START]:
buttons.ibutton(k, f"botset botvar {k}")
if STATE == "view":
buttons.ibutton("Edit", "botset edit var")
else:
buttons.ibutton("View", "botset view var")
buttons.ibutton("Back", "botset back")
buttons.ibutton("Close", "botset close")
for x in range(0, len(config_dict), 10):
buttons.ibutton(
f"{int(x/10)}", f"botset start var {x}", position="footer"
)
msg = f"Config Variables | Page: {int(START/10)} | State: {STATE}"
elif key == "private":
buttons.ibutton("Back", "botset back")
buttons.ibutton("Close", "botset close")
msg = """Send private file: config.env, token.pickle, rclone.conf, accounts.zip, list_drives.txt, cookies.txt, terabox.txt, .netrc or any other private file!
elif key == "var":
for k in list(config_dict.keys())[START : 10 + START]:
buttons.ibutton(k, f"botset botvar {k}")
if STATE == "view":
buttons.ibutton("Edit", "botset edit var")
else:
buttons.ibutton("View", "botset view var")
buttons.ibutton("Back", "botset back")
buttons.ibutton("Close", "botset close")
for x in range(0, len(config_dict), 10):
buttons.ibutton(
f"{int(x/10)}", f"botset start var {x}", position="footer"
)
msg = f"Config Variables | Page: {int(START/10)} | State: {STATE}"
elif key == "private":
buttons.ibutton("Back", "botset back")
buttons.ibutton("Close", "botset close")
msg = """Send private file: config.env, token.pickle, rclone.conf, accounts.zip, list_drives.txt, cookies.txt, terabox.txt, .netrc or any other private file!
To delete private file send only the file name as text message.
Note: Changing .netrc will not take effect for aria2c until restart.
Timeout: 60 sec"""
elif key == "aria":
for k in list(aria2_options.keys())[START : 10 + START]:
buttons.ibutton(k, f"botset ariavar {k}")
if STATE == "view":
buttons.ibutton("Edit", "botset edit aria")
else:
buttons.ibutton("View", "botset view aria")
buttons.ibutton("Add new key", "botset ariavar newkey")
buttons.ibutton("Back", "botset back")
buttons.ibutton("Close", "botset close")
for x in range(0, len(aria2_options), 10):
buttons.ibutton(
f"{int(x/10)}", f"botset start aria {x}", position="footer"
)
msg = f"Aria2c Options | Page: {int(START/10)} | State: {STATE}"
elif key == "qbit":
for k in list(qbit_options.keys())[START : 10 + START]:
buttons.ibutton(k, f"botset qbitvar {k}")
if STATE == "view":
buttons.ibutton("Edit", "botset edit qbit")
else:
buttons.ibutton("View", "botset view qbit")
buttons.ibutton("Back", "botset back")
buttons.ibutton("Close", "botset close")
for x in range(0, len(qbit_options), 10):
buttons.ibutton(
f"{int(x/10)}", f"botset start qbit {x}", position="footer"
)
msg = f"Qbittorrent Options | Page: {int(START/10)} | State: {STATE}"
elif key == "aria":
for k in list(aria2_options.keys())[START : 10 + START]:
buttons.ibutton(k, f"botset ariavar {k}")
if STATE == "view":
buttons.ibutton("Edit", "botset edit aria")
else:
buttons.ibutton("View", "botset view aria")
buttons.ibutton("Add new key", "botset ariavar newkey")
buttons.ibutton("Back", "botset back")
buttons.ibutton("Close", "botset close")
for x in range(0, len(aria2_options), 10):
buttons.ibutton(
f"{int(x/10)}", f"botset start aria {x}", position="footer"
)
msg = f"Aria2c Options | Page: {int(START/10)} | State: {STATE}"
elif key == "qbit":
for k in list(qbit_options.keys())[START : 10 + START]:
buttons.ibutton(k, f"botset qbitvar {k}")
if STATE == "view":
buttons.ibutton("Edit", "botset edit qbit")
else:
buttons.ibutton("View", "botset view qbit")
buttons.ibutton("Back", "botset back")
buttons.ibutton("Close", "botset close")
for x in range(0, len(qbit_options), 10):
buttons.ibutton(
f"{int(x/10)}", f"botset start qbit {x}", position="footer"
)
msg = f"Qbittorrent Options | Page: {int(START/10)} | State: {STATE}"
button = buttons.build_menu(1) if key is None else buttons.build_menu(2)
return msg, button
@ -309,16 +309,15 @@ async def edit_qbit(_, message, pre_message, key):
async def sync_jdownloader():
if DATABASE_URL:
if jdownloader.device is not None:
await sync_to_async(jdownloader.device.system.exit_jd)
if await aiopath.exists("cfg.zip"):
await remove("cfg.zip")
await (
await create_subprocess_exec("7z", "a", "cfg.zip", "/JDownloader/cfg")
).wait()
await DbManger().update_private_file("cfg.zip")
await sync_to_async(jdownloader.connectToDevice)
if DATABASE_URL and jdownloader.device is not None:
await sync_to_async(jdownloader.device.system.exit_jd)
if await aiopath.exists("cfg.zip"):
await remove("cfg.zip")
await (
await create_subprocess_exec("7z", "a", "cfg.zip", "/JDownloader/cfg")
).wait()
await DbManger().update_private_file("cfg.zip")
await sync_to_async(jdownloader.connectToDevice)
async def update_private_file(_, message, pre_message):

View File

@ -85,10 +85,7 @@ async def do(func, message):
try:
with redirect_stdout(stdout):
if func == "exec":
func_return = await sync_to_async(rfunc)
else:
func_return = await rfunc()
func_return = await sync_to_async(rfunc) if func == "exec" else await rfunc()
except Exception as e:
value = stdout.getvalue()
return f"{value}{format_exc()}"

View File

@ -22,7 +22,7 @@ async def argUsage(_, query):
await editMessage(message, COMMAND_USAGE["yt"][0], COMMAND_USAGE["yt"][1])
elif data[1] == "m":
buttons = ButtonMaker()
buttons.ibutton("Back", f"help back m")
buttons.ibutton("Back", "help back m")
button = buttons.build_menu()
await editMessage(message, MIRROR_HELP_DICT[data[2]], button)
elif data[1] == "yt":

View File

@ -138,7 +138,7 @@ ODLS: {get_readable_file_size(dl_speed)}/s
OULS: {get_readable_file_size(up_speed)}/s
OSDS: {get_readable_file_size(seed_speed)}/s
"""
await query.answer(msg, show_alert=True)
await query.answer(msg, show_alert=True, cache_time=30)
bot.add_handler(

View File

@ -392,13 +392,12 @@ class YtDlp(TaskListener):
key, value = map(str.strip, ytopt.split(":", 1))
if key == "postprocessors":
continue
if key == "format":
if not self.select:
if value.startswith("ba/b-"):
qual = value
continue
else:
qual = value
if key == "format" and not self.select:
if value.startswith("ba/b-"):
qual = value
continue
else:
qual = value
if value.startswith("^"):
if "." in value or value == "^inf":
value = float(value.split("^")[1])