Add NZB Download (developing)

close #1514

Signed-off-by: anasty17 <e.anastayyar@gmail.com>
This commit is contained in:
anasty17 2024-05-13 18:53:03 +03:00
parent e805ef3b0b
commit 09cc1b1f10
36 changed files with 1658 additions and 100 deletions

View File

@ -7,19 +7,24 @@ programming in Python.
## qBittorrent
- Select files from a Torrent before and during downloading (Requires Base URL) (task option)
- Select files from a Torrent before and during download (Requires Base URL) (task option)
- Seed torrents to a specific ratio and time (task option)
- Edit Global Options while the bot is running from bot settings (global option)
## Aria2c
- Select files from a Torrent before and during downloading (Requires Base URL) (task option)
- Select files from a Torrent before and during download (Requires Base URL) (task option)
- Seed torrents to a specific ratio and time (task option)
- Netrc support (global option)
- Direct link authentication for a specific link while using the bot (it will work even if only the username or password
is provided) (task option)
- Edit Global Options while the bot is running from bot settings (global option)
## Sabnzbd
- Remove files from job before and during download (Requires Base URL) (task option)
- Edit Global Options while the bot is running from bot settings (global option)
## TG Upload/Download
- Split size (global, user, and task option)
@ -321,7 +326,14 @@ quotes, even if it's `Int`, `Bool` or `List`.
- `JD_EMAIL`: jdownlaoder email sign up on [JDownloader](https://my.jdownloader.org/)
- `JD_PASS`: jdownlaoder password
**9. RSS**
**9. Sabnzbd**
- `USENET_HOST`: usenet provider to grant access
- `USENET_USERNAME`: usenet username
- `USENET_PASSWORD`: usenet password
- **NOTE**: You can more servers from bsetting -> nzb settings -> Add Server
**10. RSS**
- `RSS_DELAY`: Time in seconds for rss refresh interval. Recommended `600` second at least. Default is `600` in
sec. `Int`
@ -333,7 +345,7 @@ quotes, even if it's `Int`, `Bool` or `List`.
with `USER_STRING_SESSION` add group id for `RSS_CHAT`. If `DATABASE_URL` not added you will miss the feeds while
bot offline.
**10. Queue System**
**11. Queue System**
- `QUEUE_ALL`: Number of parallel tasks of downloads and uploads. For example if 20 task added and `QUEUE_ALL` is `8`,
then the summation of uploading and downloading tasks are 8 and the rest in queue. `Int`. **NOTE**: if you want to
@ -342,7 +354,7 @@ quotes, even if it's `Int`, `Bool` or `List`.
- `QUEUE_DOWNLOAD`: Number of all parallel downloading tasks. `Int`
- `QUEUE_UPLOAD`: Number of all parallel uploading tasks. `Int`
**11. Torrent Search**
**12. Torrent Search**
- `SEARCH_API_LINK`: Search api app link. Get your api from deploying
this [repository](https://github.com/Ryuk-me/Torrent-Api-py). `Str`
@ -465,10 +477,12 @@ sudo docker compose logs --follow
mirror - or /m Mirror
qbmirror - or /qm Mirror torrent using qBittorrent
jdmirror - or /jm Mirror using jdownloader
nzbmirror - or /nm Mirror using sabnzbd
ytdl - or /y Mirror yt-dlp supported links
leech - or /l Upload to telegram
qbleech - or /ql Leech torrent using qBittorrent
jdleech - or /jl Leech using jdownloader
nzbleech - or /nl Leech using sabnzbd
ytdlleech - or /yl Leech yt-dlp supported links
clone - Copy file/folder to Drive
count - Count file/folder from GDrive

View File

@ -7,4 +7,5 @@ aria2c --allow-overwrite=true --auto-file-renaming=true --bt-enable-lpd=true --b
--http-accept-gzip=true --max-file-not-found=0 --max-tries=20 --peer-id-prefix=-qB4520- --reuse-uri=true \
--content-disposition-default-utf8=true --user-agent=Wget/1.12 --peer-agent=qBittorrent/4.5.2 --quiet=true \
--summary-interval=0 --max-upload-limit=1K
qbittorrent-nox -d --profile="$(pwd)"
qbittorrent-nox -d --profile="$(pwd)"
sabnzbdplus -f sabnzbd/SABnzbd.ini -s 127.0.0.1:8070 -b 0 -d -c -l 0 --console

View File

@ -18,11 +18,13 @@ from pymongo.mongo_client import MongoClient
from pymongo.server_api import ServerApi
from pyrogram import Client as tgClient, enums
from qbittorrentapi import Client as qbClient
from sabnzbdapi import sabnzbdClient
from socket import setdefaulttimeout
from subprocess import Popen, run
from time import time
from tzlocal import get_localzone
from uvloop import install
from asyncio import run as aiorun
# from faulthandler import enable as faulthandler_enable
# faulthandler_enable()
@ -49,9 +51,10 @@ LOGGER = getLogger(__name__)
load_dotenv("config.env", override=True)
Intervals = {"status": {}, "qb": "", "jd": "", "stopAll": False}
Intervals = {"status": {}, "qb": "", "jd": "", "nzb": "", "stopAll": False}
QbTorrents = {}
jd_downloads = {}
nzb_jobs = {}
DRIVES_NAMES = []
DRIVES_IDS = []
INDEX_URLS = []
@ -59,6 +62,7 @@ GLOBAL_EXTENSION_FILTER = ["aria2", "!qB"]
user_data = {}
aria2_options = {}
qbit_options = {}
nzb_options = {}
queued_dl = {}
queued_up = {}
non_queued_dl = set()
@ -75,6 +79,7 @@ except:
task_dict_lock = Lock()
queue_dict_lock = Lock()
qb_listener_lock = Lock()
nzb_listener_lock = Lock()
jd_lock = Lock()
cpu_eater_lock = Lock()
subprocess_lock = Lock()
@ -130,6 +135,12 @@ if DATABASE_URL:
if qbit_opt := db.settings.qbittorrent.find_one({"_id": bot_id}):
del qbit_opt["_id"]
qbit_options = qbit_opt
if nzb_opt := db.settings.nzb.find_one({"_id": bot_id}):
del nzb_opt["_id"]
(key, value), = nzb_opt.items()
file_ = key.replace("__", ".")
with open(f"sabnzbd/{file_}", "wb+") as f:
f.write(value)
conn.close()
BOT_TOKEN = environ.get("BOT_TOKEN", "")
bot_id = BOT_TOKEN.split(":", 1)[0]
@ -143,7 +154,7 @@ if not ospath.exists(".netrc"):
with open(".netrc", "w"):
pass
run(
"chmod 600 .netrc && cp .netrc /root/.netrc && chmod +x aria-nox.sh && ./aria-nox.sh",
"chmod 600 .netrc && cp .netrc /root/.netrc && chmod +x aria-nox-nzb.sh && ./aria-nox-nzb.sh",
shell=True,
)
@ -184,7 +195,7 @@ if DEFAULT_UPLOAD != "rc":
DOWNLOAD_DIR = environ.get("DOWNLOAD_DIR", "")
if len(DOWNLOAD_DIR) == 0:
DOWNLOAD_DIR = "/usr/src/app/downloads/"
DOWNLOAD_DIR = "/usr/src/app/Downloads/"
elif not DOWNLOAD_DIR.endswith("/"):
DOWNLOAD_DIR = f"{DOWNLOAD_DIR}/"
@ -234,6 +245,14 @@ if len(JD_EMAIL) == 0 or len(JD_PASS) == 0:
JD_EMAIL = ""
JD_PASS = ""
USENET_HOST = environ.get("USENET_HOST", "")
USENET_USERNAME = environ.get("USENET_USERNAME", "")
USENET_PASSWORD = environ.get("USENET_PASSWORD", "")
if len(USENET_HOST) == 0 or len(USENET_USERNAME) == 0 or len(USENET_PASSWORD) == 0:
USENET_HOST = ""
USENET_USERNAME = ""
USENET_PASSWORD = ""
FILELION_API = environ.get("FILELION_API", "")
if len(FILELION_API) == 0:
FILELION_API = ""
@ -428,6 +447,9 @@ config_dict = {
"USER_TRANSMISSION": USER_TRANSMISSION,
"UPSTREAM_REPO": UPSTREAM_REPO,
"UPSTREAM_BRANCH": UPSTREAM_BRANCH,
"USENET_HOST": USENET_HOST,
"USENET_USERNAME": USENET_USERNAME,
"USENET_PASSWORD": USENET_PASSWORD,
"USER_SESSION_STRING": USER_SESSION_STRING,
"USE_SERVICE_ACCOUNTS": USE_SERVICE_ACCOUNTS,
"WEB_PINCODE": WEB_PINCODE,
@ -476,6 +498,15 @@ def get_qb_client():
)
def get_sabnzb_client():
return sabnzbdClient(
host="http://localhost",
api_key="mltb",
port="8070",
HTTPX_REQUETS_ARGS={"timeout": 10},
)
aria2c_global = [
"bt-max-open-files",
"download-result",
@ -526,3 +557,13 @@ if not aria2_options:
else:
a2c_glo = {op: aria2_options[op] for op in aria2c_global if op in aria2_options}
aria2.set_global_options(a2c_glo)
async def get_nzb_options():
global nzb_options
zclient = get_sabnzb_client()
nzb_options = (await zclient.get_config())["config"]["misc"]
await zclient.log_out()
aiorun(get_nzb_options())

View File

@ -1,6 +1,6 @@
from aiofiles import open as aiopen
from aiofiles.os import path as aiopath, remove
from asyncio import gather, create_subprocess_exec, sleep
from asyncio import gather, create_subprocess_exec
from os import execl as osexecl
from psutil import (
disk_usage,
@ -25,6 +25,7 @@ from bot import (
DATABASE_URL,
INCOMPLETE_TASK_NOTIFIER,
scheduler,
get_sabnzb_client,
)
from .helper.ext_utils.bot_utils import cmd_exec, sync_to_async, create_help_buttons
from .helper.ext_utils.db_handler import DbManager
@ -43,13 +44,13 @@ from .modules import (
cancel_task,
clone,
exec,
file_selector,
gd_count,
gd_delete,
gd_search,
mirror_leech,
status,
torrent_search,
torrent_select,
ytdlp,
rss,
shell,
@ -123,11 +124,17 @@ async def restart(_, message):
if st := Intervals["status"]:
for intvl in list(st.values()):
intvl.cancel()
await sleep(1)
nzb_client = get_sabnzb_client()
if nzb_client.LOGGED_IN:
await nzb_client.pause_all()
await nzb_client.purge_all(True)
await nzb_client.shutdown()
await sync_to_async(clean_all)
await sleep(1)
proc1 = await create_subprocess_exec(
"pkill", "-9", "-f", "gunicorn|aria2c|qbittorrent-nox|ffmpeg|rclone|java"
"pkill",
"-9",
"-f",
"gunicorn|aria2c|qbittorrent-nox|ffmpeg|rclone|java|sabnzbdplus",
)
proc2 = await create_subprocess_exec("python3", "update.py")
await gather(proc1.wait(), proc2.wait())
@ -149,20 +156,22 @@ async def log(_, message):
help_string = f"""
NOTE: Try each command without any argument to see more detalis.
/{BotCommands.MirrorCommand[0]} or /{BotCommands.MirrorCommand[1]}: Start mirroring to Google Drive.
/{BotCommands.QbMirrorCommand[0]} or /{BotCommands.QbMirrorCommand[1]}: Start Mirroring to Google Drive using qBittorrent.
/{BotCommands.JdMirrorCommand[0]} or /{BotCommands.JdMirrorCommand[1]}: Start Mirroring to Google Drive using JDownloader.
/{BotCommands.MirrorCommand[0]} or /{BotCommands.MirrorCommand[1]}: Start mirroring to cloud.
/{BotCommands.QbMirrorCommand[0]} or /{BotCommands.QbMirrorCommand[1]}: Start Mirroring to cloud using qBittorrent.
/{BotCommands.JdMirrorCommand[0]} or /{BotCommands.JdMirrorCommand[1]}: Start Mirroring to cloud using JDownloader.
/{BotCommands.NzbMirrorCommand[0]} or /{BotCommands.NzbMirrorCommand[1]}: Start Mirroring to cloud using Sabnzbd.
/{BotCommands.YtdlCommand[0]} or /{BotCommands.YtdlCommand[1]}: Mirror yt-dlp supported link.
/{BotCommands.LeechCommand[0]} or /{BotCommands.LeechCommand[1]}: Start leeching to Telegram.
/{BotCommands.QbLeechCommand[0]} or /{BotCommands.QbLeechCommand[1]}: Start leeching using qBittorrent.
/{BotCommands.JdLeechCommand[0]} or /{BotCommands.JdLeechCommand[1]}: Start leeching using JDownloader.
/{BotCommands.NzbLeechCommand[0]} or /{BotCommands.NzbLeechCommand[1]}: Start leeching using Sabnzbd.
/{BotCommands.YtdlLeechCommand[0]} or /{BotCommands.YtdlLeechCommand[1]}: Leech yt-dlp supported link.
/{BotCommands.CloneCommand} [drive_url]: Copy file/folder to Google Drive.
/{BotCommands.CountCommand} [drive_url]: Count file/folder of Google Drive.
/{BotCommands.DeleteCommand} [drive_url]: Delete file/folder from Google Drive (Only Owner & Sudo).
/{BotCommands.UserSetCommand[0]} or /{BotCommands.UserSetCommand[1]} [query]: Users settings.
/{BotCommands.BotSetCommand[0]} or /{BotCommands.BotSetCommand[1]} [query]: Bot settings.
/{BotCommands.BtSelectCommand}: Select files from torrents by gid or reply.
/{BotCommands.SelectCommand}: Select files from torrents or nzb by gid or reply.
/{BotCommands.CancelTaskCommand[0]} or /{BotCommands.CancelTaskCommand[1]} [gid]: Cancel task by gid or reply.
/{BotCommands.ForceStartCommand[0]} or /{BotCommands.ForceStartCommand[1]} [gid]: Force start task by gid or reply.
/{BotCommands.CancelAllCommand} [query]: Cancel all [status] tasks.

View File

@ -89,6 +89,7 @@ class TaskConfig:
self.size = 0
self.isLeech = False
self.isQbit = False
self.isNzb = False
self.isJd = False
self.isClone = False
self.isYtDlp = False
@ -406,6 +407,7 @@ class TaskConfig:
self.isQbit,
self.isLeech,
self.isJd,
self.isNzb,
self.sameDir,
self.bulk,
self.multiTag,

View File

@ -57,7 +57,7 @@ def create_help_buttons():
def bt_selection_buttons(id_):
gid = id_[:12] if len(id_) > 20 else id_
gid = id_[:12] if len(id_) > 25 else id_
pincode = "".join([n for n in id_ if n.isdigit()][:4])
buttons = ButtonMaker()
BASE_URL = config_dict["BASE_URL"]

View File

@ -55,6 +55,13 @@ class DbManager:
await self._db.settings.qbittorrent.update_one(
{"_id": bot_id}, {"$set": qbit_options}, upsert=True
)
# Save nzb config
if await self._db.settings.nzb.find_one({"_id": bot_id}) is None:
async with aiopen("sabnzbd/SABnzbd.ini", "rb+") as pf:
nzb_conf = await pf.read()
await self._db.settings.nzb.update_one(
{"_id": bot_id}, {"$set": {"SABnzbd__ini": nzb_conf}}, upsert=True
)
# User Data
if await self._db.users.find_one():
rows = self._db.users.find({})
@ -146,6 +153,13 @@ class DbManager:
else:
self._conn.close
async def update_nzb_config(self):
async with aiopen("sabnzbd/SABnzbd.ini", "rb+") as pf:
nzb_conf = await pf.read()
await self._db.settings.nzb.update_one(
{"_id": bot_id}, {"$set": {"SABnzbd__ini": nzb_conf}}, upsert=True
)
async def update_user_data(self, user_id):
if self._err:
return

View File

@ -219,7 +219,7 @@ MIRROR_HELP_DICT = {
"DL-Auth": "<b>Direct link authorization</b>: -au -ap\n\n/cmd link -au username -ap password",
"Headers": "<b>Direct link custom headers</b>: -h\n\n/cmd link -h key: value key1: value1",
"Extract/Zip": extract_zip,
"Torrent-Files": "<b>Bittorrent/JDownloader File Selection</b>: -s\n\n/cmd link -s or by replying to file/link",
"Select-Files": "<b>Bittorrent/JDownloader/Sabnzbd File Selection</b>: -s\n\n/cmd link -s or by replying to file/link",
"Torrent-Seed": seed,
"Multi-Link": multi_link,
"Same-Directory": same_dir,

View File

@ -119,6 +119,11 @@ def get_readable_time(seconds: int):
return result
def time_to_seconds(time_duration):
hours, minutes, seconds = map(int, time_duration.split(":"))
return hours * 3600 + minutes * 60 + seconds
def speed_string_to_bytes(size_text: str):
size = 0
size_text = size_text.lower()

View File

@ -0,0 +1,124 @@
from asyncio import sleep, gather
from bot import (
Intervals,
get_sabnzb_client,
nzb_jobs,
nzb_listener_lock,
task_dict_lock,
LOGGER,
bot_loop,
)
from bot.helper.ext_utils.bot_utils import new_task
from bot.helper.ext_utils.status_utils import getTaskByGid
from bot.helper.ext_utils.task_manager import stop_duplicate_check
async def _remove_job(client, nzo_id, mid):
res1, _ = await gather(
client.delete_history(nzo_id, del_files=True), client.delete_category(f"{mid}")
)
if not res1:
await client.delete_job(nzo_id, True)
async with nzb_listener_lock:
if nzo_id in nzb_jobs:
del nzb_jobs[nzo_id]
await client.log_out()
@new_task
async def _onDownloadError(err, nzo_id, button=None):
task = await getTaskByGid(nzo_id)
LOGGER.info(f"Cancelling Download: {task.name()}")
await gather(
task.listener.onDownloadError(err, button),
_remove_job(task.client, nzo_id, task.listener.mid),
)
@new_task
async def _change_status(nzo_id, status):
task = await getTaskByGid(nzo_id)
async with task_dict_lock:
task.cstatus = status
@new_task
async def _stop_duplicate(nzo_id):
task = await getTaskByGid(nzo_id)
task.listener.name = task.name()
if not hasattr(task, "listener"):
return
msg, button = await stop_duplicate_check(task.listener)
if msg:
_onDownloadError(msg, nzo_id, button)
@new_task
async def _onDownloadComplete(nzo_id):
task = await getTaskByGid(nzo_id)
await task.listener.onDownloadComplete()
if Intervals["stopAll"]:
return
await _remove_job(task.client, nzo_id, task.listener.mid)
async def _nzb_listener():
client = get_sabnzb_client()
while True:
if Intervals["stopAll"]:
break
async with nzb_listener_lock:
try:
jobs = (await client.get_history())["history"]["slots"]
downloads = (await client.get_downloads())["queue"]["slots"]
if len(jobs) == 0 and len(downloads) == 0:
Intervals["nzb"] = ""
await client.log_out()
break
for job in jobs:
nzo_id = job["nzo_id"]
if nzo_id not in nzb_jobs:
continue
if job["status"] == "Completed":
if not nzb_jobs[nzo_id]["uploaded"]:
_onDownloadComplete(nzo_id)
nzb_jobs[nzo_id]["uploaded"] = True
nzb_jobs[nzo_id]["status"] = "Completed"
elif job["status"] == "Failed":
_onDownloadError(job["fail_message"], nzo_id)
elif job["status"] in [
"QuickCheck",
"Verifying",
"Repairing",
"Fetching",
"Moving",
"Extracting",
]:
if job["status"] != nzb_jobs[nzo_id]["status"]:
_change_status(nzo_id, job["status"])
for dl in downloads:
nzo_id = dl["nzo_id"]
if nzo_id not in nzb_jobs:
continue
if (
dl["status"] == "Downloading"
and not nzb_jobs[nzo_id]["stop_dup_check"]
and not dl["filename"].startswith("Trying")
):
nzb_jobs[nzo_id]["stop_dup_check"] = True
_stop_duplicate(nzo_id)
except Exception as e:
LOGGER.error(str(e))
await sleep(3)
async def onDownloadStart(nzo_id):
async with nzb_listener_lock:
nzb_jobs[nzo_id] = {
"uploaded": False,
"stop_dup_check": False,
"status": "Downloading",
}
if not Intervals["nzb"]:
Intervals["nzb"] = bot_loop.create_task(_nzb_listener())

View File

@ -62,27 +62,24 @@ async def _stop_duplicate(tor):
task = await getTaskByGid(tor.hash[:12])
if not hasattr(task, "listener"):
return
task.listener.name = tor.content_path.rsplit("/", 1)[-1].rsplit(".!qB", 1)[0]
msg, button = await stop_duplicate_check(task.listener)
if msg:
_onDownloadError(msg, tor, button)
if task.listener.stopDuplicate:
task.listener.name = tor.content_path.rsplit("/", 1)[-1].rsplit(".!qB", 1)[0]
msg, button = await stop_duplicate_check(task.listener)
if msg:
_onDownloadError(msg, tor, button)
@new_task
async def _onDownloadComplete(tor):
ext_hash = tor.hash
tag = tor.tags
await sleep(2)
task = await getTaskByGid(ext_hash[:12])
if not hasattr(task, "client"):
return
client = task.qbclient()
if not task.listener.seed:
await sync_to_async(client.torrents_pause, torrent_hashes=ext_hash)
await sync_to_async(task.client.torrents_pause, torrent_hashes=ext_hash)
if task.listener.select:
await clean_unwanted(task.listener.dir)
path = tor.content_path.rsplit("/", 1)[0]
res = await sync_to_async(client.torrents_files, torrent_hash=ext_hash)
res = await sync_to_async(task.client.torrents_files, torrent_hash=ext_hash)
for f in res:
if f.priority == 0 and await aiopath.exists(f"{path}/{f.name}"):
try:
@ -100,7 +97,7 @@ async def _onDownloadComplete(tor):
else:
removed = True
if removed:
await _remove_torrent(client, ext_hash, tag)
await _remove_torrent(task.client, ext_hash, tag)
return
async with qb_listener_lock:
if tag in QbTorrents:
@ -110,11 +107,11 @@ async def _onDownloadComplete(tor):
await update_status_message(task.listener.message.chat.id)
LOGGER.info(f"Seeding started: {tor.name} - Hash: {ext_hash}")
else:
await _remove_torrent(client, ext_hash, tag)
await _remove_torrent(task.client, ext_hash, tag)
async def _qb_listener():
client = await sync_to_async(get_qb_client)
client = get_qb_client()
while True:
async with qb_listener_lock:
try:
@ -142,10 +139,7 @@ async def _qb_listener():
)
elif state == "downloading":
QbTorrents[tag]["stalled_time"] = time()
if (
config_dict["STOP_DUPLICATE"]
and not QbTorrents[tag]["stop_dup_check"]
):
if not QbTorrents[tag]["stop_dup_check"]:
QbTorrents[tag]["stop_dup_check"] = True
_stop_duplicate(tor_info)
elif state == "stalledDL":

View File

@ -145,7 +145,7 @@ class TaskListener(TaskConfig):
if self.join and await aiopath.isdir(up_path):
await join_files(up_path)
if self.extract:
if self.extract and not self.isNzb:
up_path = await self.proceedExtract(up_path, gid)
if self.isCancelled:
return

View File

@ -40,7 +40,7 @@ def _get_hash_file(fpath):
async def add_qb_torrent(listener, path, ratio, seed_time):
client = await sync_to_async(get_qb_client)
client = get_qb_client()
try:
url = listener.link
tpath = None
@ -138,6 +138,6 @@ async def add_qb_torrent(listener, path, ratio, seed_time):
except Exception as e:
await listener.onDownloadError(f"{e}")
finally:
if await aiopath.exists(listener.link):
if tpath and await aiopath.exists(listener.link):
await remove(listener.link)
await sync_to_async(client.auth_log_out)

View File

@ -47,7 +47,7 @@ async def add_rclone_download(listener, path):
err = (
res1[1]
or res2[1]
or "Use '/shell cat rlog.txt' to see more information"
or "Use <code>/shell cat rlog.txt</code> to see more information"
)
msg = f"Error: While getting rclone stat/size. Path: {remote}:{listener.link}. Stderr: {err[:4000]}"
await listener.onDownloadError(msg)
@ -57,7 +57,7 @@ async def add_rclone_download(listener, path):
rsize = loads(res2[0])
except Exception as err:
if not str(err):
err = "Use '/shell cat rlog.txt' to see more information"
err = "Use <code>/shell cat rlog.txt</code> to see more information"
await listener.onDownloadError(f"RcloneDownload JsonLoad: {err}")
return
if rstat["IsDir"]:

View File

@ -0,0 +1,129 @@
from aiofiles.os import remove, path as aiopath
from asyncio import gather
from bot import (
task_dict,
task_dict_lock,
get_sabnzb_client,
LOGGER,
config_dict,
non_queued_dl,
queue_dict_lock,
)
from bot.helper.ext_utils.bot_utils import bt_selection_buttons
from bot.helper.ext_utils.task_manager import check_running_tasks
from bot.helper.listeners.nzb_listener import onDownloadStart
from bot.helper.mirror_leech_utils.status_utils.nzb_status import SabnzbdStatus
from bot.helper.telegram_helper.message_utils import (
sendMessage,
sendStatusMessage,
)
async def add_nzb(listener, path):
client = get_sabnzb_client()
if not client.LOGGED_IN:
res = await client.check_login()
if not res and (
not config_dict["USENET_HOST"]
or not config_dict["USENET_PASSWORD"]
or not config_dict["USENET_USERNAME"]
):
await listener.onDownloadError(
"Set USENET_HOST, USENET_USERNAME and USENET_PASSWORD in bsetting or config!"
)
return
else:
try:
await client.login(
"main",
config_dict["USENET_HOST"],
config_dict["USENET_USERNAME"],
config_dict["USENET_PASSWORD"],
)
except Exception as e:
await listener.onDownloadError(str(e))
return
try:
await client.create_category(f"{listener.mid}", path)
url = listener.link
nzbpath = None
if await aiopath.exists(listener.link):
url = None
nzbpath = listener.link
add_to_queue, event = await check_running_tasks(listener)
res = await client.add_uri(
url,
nzbpath,
listener.name,
listener.extract if isinstance(listener.extract, str) else "",
f"{listener.mid}",
priority=-2 if add_to_queue else 0,
pp=3 if listener.extract else 1,
)
if not res["status"]:
await listener.onDownloadError(
"Not added! Mostly issue in the link",
)
return
job_id = res["nzo_ids"][0]
downloads = await client.get_downloads(nzo_ids=job_id)
if not downloads["queue"]["slots"]:
history = await client.get_history(nzo_ids=job_id)
if history["history"]["slots"][0]["status"] == "Failed":
err = (
history["slots"][0]["fail_message"]
or "Link not added, unknown error!"
)
await gather(
listener.onDownloadError(err),
client.delete_history(job_id, del_files=True),
)
return
name = history["history"]["slots"][0]["name"]
else:
name = downloads["queue"]["slots"][0]["filename"]
async with task_dict_lock:
task_dict[listener.mid] = SabnzbdStatus(
listener, job_id, queued=add_to_queue
)
await onDownloadStart(job_id)
if add_to_queue:
LOGGER.info(f"Added to Queue/Download: {name} - Job_id: {job_id}")
else:
LOGGER.info(f"NzbDownload started: {name} - Job_id: {job_id}")
await listener.onDownloadStart()
if config_dict["BASE_URL"] and listener.select:
if not add_to_queue:
await client.pause_job(job_id)
SBUTTONS = bt_selection_buttons(job_id)
msg = "Your download paused. Choose files then press Done Selecting button to start downloading."
await sendMessage(listener.message, msg, SBUTTONS)
elif listener.multi <= 1:
await sendStatusMessage(listener.message)
if add_to_queue:
await event.wait()
if listener.isCancelled:
return
async with queue_dict_lock:
non_queued_dl.add(listener.mid)
async with task_dict_lock:
task_dict[listener.mid].queued = False
await client.resume_job(job_id)
LOGGER.info(
f"Start Queued Download from Sabnzbd: {name} - Job_id: {job_id}"
)
except Exception as e:
await listener.onDownloadError(f"{e}")
finally:
if nzbpath and await aiopath.exists(listener.link):
await remove(listener.link)
await client.log_out()

View File

@ -232,7 +232,7 @@ class RcloneList:
res, err, code = await cmd_exec(cmd)
if code not in [0, -9]:
if not err:
err = "Use '/shell cat rlog.txt' to see more information"
err = "Use <code>/shell cat rlog.txt</code> to see more information"
LOGGER.error(
f"While rclone listing. Path: {self.remote}{self.path}. Stderr: {err}"
)

View File

@ -124,7 +124,7 @@ class RcloneTransferHelper:
if not error and remote_type == "drive" and self._use_service_accounts:
error = "Mostly your service accounts don't have access to this drive!"
elif not error:
error = "Use '/shell cat rlog.txt' to see more information"
error = "Use <code>/shell cat rlog.txt</code> to see more information"
LOGGER.error(error)
if (
@ -221,7 +221,7 @@ class RcloneTransferHelper:
)
elif code != -9:
if not err:
err = "Use '/shell cat rlog.txt' to see more information"
err = "Use <code>/shell cat rlog.txt</code> to see more information"
LOGGER.error(
f"while getting drive link. Path: {destination}. Stderr: {err}"
)
@ -242,7 +242,7 @@ class RcloneTransferHelper:
if not error and remote_type == "drive" and self._use_service_accounts:
error = "Mostly your service accounts don't have access to this drive or RATE_LIMIT_EXCEEDED"
elif not error:
error = "Use '/shell cat rlog.txt' to see more information"
error = "Use <code>/shell cat rlog.txt</code> to see more information"
LOGGER.error(error)
if (
self._sa_number != 0
@ -352,7 +352,7 @@ class RcloneTransferHelper:
link = res
elif code != -9:
if not err:
err = "Use '/shell cat rlog.txt' to see more information"
err = "Use <code>/shell cat rlog.txt</code> to see more information"
LOGGER.error(f"while getting link. Path: {destination} | Stderr: {err}")
link = ""
if self._listener.isCancelled:
@ -400,7 +400,9 @@ class RcloneTransferHelper:
if return_code == -9:
return None, None
elif return_code != 0:
error = (await self._proc.stderr.read()).decode().strip() or "Use '/shell cat rlog.txt' to see more information"
error = (
await self._proc.stderr.read()
).decode().strip() or "Use <code>/shell cat rlog.txt</code> to see more information"
LOGGER.error(error)
await self._listener.onUploadError(error[:4000])
return None, None
@ -428,7 +430,7 @@ class RcloneTransferHelper:
return res, destination
elif code != -9:
if not err:
err = "Use '/shell cat rlog.txt' to see more information"
err = "Use <code>/shell cat rlog.txt</code> to see more information"
LOGGER.error(
f"while getting link. Path: {destination} | Stderr: {err}"
)

View File

@ -0,0 +1,97 @@
from asyncio import gather
from bot import LOGGER, get_sabnzb_client, nzb_jobs, nzb_listener_lock
from bot.helper.ext_utils.bot_utils import async_to_sync
from bot.helper.ext_utils.status_utils import (
MirrorStatus,
get_readable_file_size,
get_readable_time,
time_to_seconds,
)
async def get_download(client, nzo_id, old_info=None):
try:
res = await client.get_downloads(nzo_ids=nzo_id)
slot = res["queue"]["slots"][0]
if msg := slot["labels"]:
LOGGER.warning(msg.join(" | "))
return slot or old_info
except Exception as e:
LOGGER.error(f"{e}: Sabnzbd, while getting job info. ID: {nzo_id}")
return old_info
class SabnzbdStatus:
def __init__(self, listener, gid, queued=False, status=None):
self.client = get_sabnzb_client()
self.queued = queued
self.listener = listener
self.cstatus = status
self._gid = gid
self._info = None
async def update(self):
self._info = await get_download(self.client, self._gid, self._info)
def progress(self):
return f"{self._info['percentage']}%"
def processed_raw(self):
return (float(self._info["mb"]) - float(self._info["mbleft"])) * 1048576
def processed_bytes(self):
return get_readable_file_size(self.processed_raw())
def speed_raw(self):
try:
return int(float(self._info["mbleft"]) * 1048576) / self.eta_raw()
except:
return 0
def speed(self):
return f"{get_readable_file_size(self.speed_raw())}/s"
def name(self):
return self._info["filename"]
def size(self):
return self._info["size"]
def eta_raw(self):
return time_to_seconds(self._info["timeleft"])
def eta(self):
return get_readable_time(self.eta_raw())
def status(self):
async_to_sync(self.update)
state = self._info["status"]
if state == "Paused" and self.queued:
return MirrorStatus.STATUS_QUEUEDL
elif self.cstatus:
return self.cstatus
elif state == "Paused":
return MirrorStatus.STATUS_PAUSED
else:
return MirrorStatus.STATUS_DOWNLOADING
def task(self):
return self
def gid(self):
return self._gid
async def cancel_task(self):
self.listener.isCancelled = True
await self.update()
LOGGER.info(f"Cancelling Download: {self.name()}")
await gather(
self.listener.onDownloadError("Download stopped by user!"),
self.client.delete_job(self._gid, delete_files=True),
self.client.delete_category(f"{self.listener.mid}"),
)
await self.client.log_out()
async with nzb_listener_lock:
if self._gid in nzb_jobs:
del nzb_jobs[self._gid]

View File

@ -93,9 +93,6 @@ class QbittorrentStatus:
def hash(self):
return self._info.hash
def qbclient(self):
return self.client
async def cancel_task(self):
self.listener.isCancelled = True
await sync_to_async(self.update)

View File

@ -8,10 +8,12 @@ class _BotCommands:
self.QbMirrorCommand = [f"qbmirror{CMD_SUFFIX}", f"qm{CMD_SUFFIX}"]
self.JdMirrorCommand = [f"jdmirror{CMD_SUFFIX}", f"jm{CMD_SUFFIX}"]
self.YtdlCommand = [f"ytdl{CMD_SUFFIX}", f"y{CMD_SUFFIX}"]
self.NzbMirrorCommand = [f"nzbmirror{CMD_SUFFIX}", f"nm{CMD_SUFFIX}"]
self.LeechCommand = [f"leech{CMD_SUFFIX}", f"l{CMD_SUFFIX}"]
self.QbLeechCommand = [f"qbleech{CMD_SUFFIX}", f"ql{CMD_SUFFIX}"]
self.JdLeechCommand = [f"jdLeech{CMD_SUFFIX}", f"jl{CMD_SUFFIX}"]
self.YtdlLeechCommand = [f"ytdlleech{CMD_SUFFIX}", f"yl{CMD_SUFFIX}"]
self.NzbLeechCommand = [f"nzbleech{CMD_SUFFIX}", f"nl{CMD_SUFFIX}"]
self.CloneCommand = f"clone{CMD_SUFFIX}"
self.CountCommand = f"count{CMD_SUFFIX}"
self.DeleteCommand = f"del{CMD_SUFFIX}"
@ -37,7 +39,7 @@ class _BotCommands:
self.ClearLocalsCommand = f"clearlocals{CMD_SUFFIX}"
self.BotSetCommand = [f"bsetting{CMD_SUFFIX}", f"bs{CMD_SUFFIX}"]
self.UserSetCommand = [f"usetting{CMD_SUFFIX}", f"us{CMD_SUFFIX}"]
self.BtSelectCommand = f"btsel{CMD_SUFFIX}"
self.SelectCommand = f"sel{CMD_SUFFIX}"
self.RssCommand = f"rss{CMD_SUFFIX}"

View File

@ -33,9 +33,11 @@ from bot import (
task_dict,
qbit_options,
get_qb_client,
get_sabnzb_client,
LOGGER,
bot,
jd_downloads,
nzb_options,
)
from bot.helper.ext_utils.bot_utils import (
setInterval,
@ -64,7 +66,7 @@ START = 0
STATE = "view"
handler_dict = {}
default_values = {
"DOWNLOAD_DIR": "/usr/src/app/downloads/",
"DOWNLOAD_DIR": "/usr/src/app/Downloads/",
"LEECH_SPLIT_SIZE": MAX_SPLIT_SIZE,
"RSS_DELAY": 600,
"STATUS_UPDATE_INTERVAL": 15,
@ -81,6 +83,7 @@ async def get_buttons(key=None, edit_type=None):
buttons.ibutton("Private Files", "botset private")
buttons.ibutton("Qbit Settings", "botset qbit")
buttons.ibutton("Aria2c Settings", "botset aria")
buttons.ibutton("Sabnzbd Settings", "botset nzb")
buttons.ibutton("JDownloader Sync", "botset syncjd")
buttons.ibutton("Close", "botset close")
msg = "Bot Settings:"
@ -112,7 +115,7 @@ async def get_buttons(key=None, edit_type=None):
buttons.ibutton("Empty String", f"botset emptyaria {key}")
buttons.ibutton("Close", "botset close")
msg = (
"Send a key with value. Example: https-proxy-user:value"
"Send a key with value. Example: https-proxy-user:value. Timeout: 60 sec"
if key == "newkey"
else f"Send a valid value for {key}. Current value is '{aria2_options[key]}'. Timeout: 60 sec"
)
@ -121,6 +124,16 @@ async def get_buttons(key=None, edit_type=None):
buttons.ibutton("Empty String", f"botset emptyqbit {key}")
buttons.ibutton("Close", "botset close")
msg = f"Send a valid value for {key}. Current value is '{qbit_options[key]}'. Timeout: 60 sec"
elif edit_type == "nzbvar":
buttons.ibutton("Back", "botset nzb")
if key != "newserver":
buttons.ibutton("Default", f"botset resetnzb {key}")
buttons.ibutton("Empty String", f"botset emptynzb {key}")
buttons.ibutton("Close", "botset close")
if key == "newserver":
msg = "Send host : user : password. Example: xxx.xxx.xxx : myuser : my password. Timeout: 60 sec"
else:
msg = f"Send a valid value for {key}. Current value is '{nzb_options[key]}.\nIf the value is list then seperate them by space or ,\nExample: .exe,info or .exe .info\nTimeout: 60 sec"
elif key == "var":
for k in list(config_dict.keys())[START : 10 + START]:
buttons.ibutton(k, f"botset botvar {k}")
@ -171,6 +184,21 @@ Timeout: 60 sec"""
f"{int(x / 10)}", f"botset start qbit {x}", position="footer"
)
msg = f"Qbittorrent Options | Page: {int(START / 10)} | State: {STATE}"
elif key == "nzb":
for k in list(nzb_options.keys())[START : 10 + START]:
buttons.ibutton(k, f"botset nzbvar {k}")
if STATE == "view":
buttons.ibutton("Edit", "botset edit nzb")
else:
buttons.ibutton("View", "botset view nzb")
buttons.ibutton("Add Server", "botset nzbvar newserver")
buttons.ibutton("Back", "botset back")
buttons.ibutton("Close", "botset close")
for x in range(0, len(nzb_options), 10):
buttons.ibutton(
f"{int(x / 10)}", f"botset start nzb {x}", position="footer"
)
msg = f"Sabnzbd Options | Page: {int(START / 10)} | State: {STATE}"
button = buttons.build_menu(1) if key is None else buttons.build_menu(2)
return msg, button
@ -265,6 +293,21 @@ async def edit_variable(_, message, pre_message, key):
jdownloader.initiate()
elif key == "RSS_DELAY":
addJob()
elif key in ["USENET_HOST", "USENET_USERNAME", "USENET_PASSWORD"]:
nzb_client = get_sabnzb_client()
if key == "USENET_HOST":
items = {
"name": "main",
"displayname": value,
"host": value,
"connections": 8,
}
elif key == "USENET_USERNAME":
items = {"name": "main", "username": value}
elif key == "USENET_PASSWORD":
items = {"name": "main", "password": value}
await nzb_client.set_special_config("servers", items)
await nzb_client.log_out()
async def edit_aria(_, message, pre_message, key):
@ -314,6 +357,28 @@ async def edit_qbit(_, message, pre_message, key):
await DbManager().update_qbittorrent(key, value)
async def edit_nzb(_, message, pre_message, key):
handler_dict[message.chat.id] = False
value = message.text
nzb_client = get_sabnzb_client()
if key == "newserver":
host, username, password = [x.strip() for x in value.split(" : ", 2)]
await nzb_client.login(host, host, username, password)
else:
if value.isdigit():
value = int(value)
elif value.startswith("[") and value.endswith("]"):
value = f"{eval(value).join(',')}"
res = await nzb_client.set_config("misc", key, value)
value = res["misc"][key]
nzb_options[key] = value
await nzb_client.log_out()
await update_buttons(pre_message, "nzb")
await deleteMessage(message)
if DATABASE_URL:
await DbManager().update_nzb_config()
async def sync_jdownloader():
if not DATABASE_URL or jdownloader.device is None:
return
@ -478,7 +543,7 @@ async def edit_bot_settings(client, query):
show_alert=True,
)
await sync_jdownloader()
elif data[1] in ["var", "aria", "qbit"]:
elif data[1] in ["var", "aria", "qbit", "nzb"]:
await query.answer()
await update_buttons(message, data[1])
elif data[1] == "resetvar":
@ -554,6 +619,10 @@ async def edit_bot_settings(client, query):
"RCLONE_SERVE_PASS",
]:
await rclone_serve_booter()
elif data[2] in ["USENET_HOST", "USENET_USERNAME", "USENET_PASSWORD"]:
nzb_client = get_sabnzb_client()
await nzb_client.delete_config("servers", "main")
await nzb_client.log_out()
elif data[1] == "resetaria":
aria2_defaults = await sync_to_async(aria2.client.get_global_option)
if aria2_defaults[data[2]] == aria2_options[data[2]]:
@ -574,6 +643,15 @@ async def edit_bot_settings(client, query):
LOGGER.error(e)
if DATABASE_URL:
await DbManager().update_aria2(data[2], value)
elif data[1] == "resetnzb":
await query.answer()
nzb_client = get_sabnzb_client()
res = await nzb_client.set_config_default(data[2])
nzb_options[data[2]] = res["misc"][data[2]]
await nzb_client.log_out()
await update_buttons(message, "nzb")
if DATABASE_URL:
await DbManager().update_nzb_config()
elif data[1] == "emptyaria":
await query.answer()
aria2_options[data[2]] = ""
@ -596,6 +674,15 @@ async def edit_bot_settings(client, query):
await update_buttons(message, "qbit")
if DATABASE_URL:
await DbManager().update_qbittorrent(data[2], "")
elif data[1] == "emptynzb":
await query.answer()
nzb_client = get_sabnzb_client()
res = await nzb_client.set_config("misc", data[2], "")
nzb_options[data[2]] = res["misc"][data[2]]
await nzb_client.log_out()
await update_buttons(message, "nzb")
if DATABASE_URL:
await DbManager().update_nzb_config()
elif data[1] == "private":
await query.answer()
await update_buttons(message, data[1])
@ -640,7 +727,7 @@ async def edit_bot_settings(client, query):
await query.answer()
await update_buttons(message, data[2], data[1])
pfunc = partial(edit_qbit, pre_message=message, key=data[2])
rfunc = partial(update_buttons, message, "var")
rfunc = partial(update_buttons, message, "qbit")
await event_handler(client, query, pfunc, rfunc)
elif data[1] == "qbitvar" and STATE == "view":
value = qbit_options[data[2]]
@ -653,6 +740,23 @@ async def edit_bot_settings(client, query):
elif value == "":
value = None
await query.answer(f"{value}", show_alert=True)
elif data[1] == "nzbvar" and (STATE == "edit" or data[2] == "newserver"):
await query.answer()
await update_buttons(message, data[2], data[1])
pfunc = partial(edit_nzb, pre_message=message, key=data[2])
rfunc = partial(update_buttons, message, "nzb")
await event_handler(client, query, pfunc, rfunc)
elif data[1] == "nzbvar" and STATE == "view":
value = nzb_options[data[2]]
if len(str(value)) > 200:
await query.answer()
with BytesIO(str.encode(value)) as out_file:
out_file.name = f"{data[2]}.txt"
await sendFile(message, out_file)
return
elif value == "":
value = None
await query.answer(f"{value}", show_alert=True)
elif data[1] == "edit":
await query.answer()
globals()["STATE"] = "edit"
@ -720,7 +824,7 @@ async def load_config():
DOWNLOAD_DIR = environ.get("DOWNLOAD_DIR", "")
if len(DOWNLOAD_DIR) == 0:
DOWNLOAD_DIR = "/usr/src/app/downloads/"
DOWNLOAD_DIR = "/usr/src/app/Downloads/"
elif not DOWNLOAD_DIR.endswith("/"):
DOWNLOAD_DIR = f"{DOWNLOAD_DIR}/"
@ -768,6 +872,14 @@ async def load_config():
JD_EMAIL = ""
JD_PASS = ""
USENET_HOST = environ.get("USENET_HOST", "")
USENET_USERNAME = environ.get("USENET_USERNAME", "")
USENET_PASSWORD = environ.get("USENET_PASSWORD", "")
if len(USENET_HOST) == 0 or len(USENET_USERNAME) == 0 or len(USENET_PASSWORD) == 0:
USENET_HOST = ""
USENET_USERNAME = ""
USENET_PASSWORD = ""
FILELION_API = environ.get("FILELION_API", "")
if len(FILELION_API) == 0:
FILELION_API = ""
@ -1024,6 +1136,9 @@ async def load_config():
"USER_TRANSMISSION": USER_TRANSMISSION,
"UPSTREAM_REPO": UPSTREAM_REPO,
"UPSTREAM_BRANCH": UPSTREAM_BRANCH,
"USENET_HOST": USENET_HOST,
"USENET_USERNAME": USENET_USERNAME,
"USENET_PASSWORD": USENET_PASSWORD,
"USER_SESSION_STRING": USER_SESSION_STRING,
"USE_SERVICE_ACCOUNTS": USE_SERVICE_ACCOUNTS,
"WEB_PINCODE": WEB_PINCODE,

View File

@ -48,6 +48,7 @@ class Clone(TaskListener):
__=None,
___=None,
____=None,
_____=None,
bulk=None,
multiTag=None,
options="",

View File

@ -43,8 +43,8 @@ async def select(_, message):
return
elif len(msg) == 1:
msg = (
"Reply to an active /cmd which was used to start the qb-download or add gid along with cmd\n\n"
+ "This command mainly for selection incase you decided to select files from already added torrent. "
"Reply to an active /cmd which was used to start the download or add gid along with cmd\n\n"
+ "This command mainly for selection incase you decided to select files from already added torrent/nzb. "
+ "But you can always use /cmd with arg `s` to select files before download start."
)
await sendMessage(message, msg)
@ -64,7 +64,7 @@ async def select(_, message):
]:
await sendMessage(
message,
"Task should be in download or pause (incase message deleted by wrong) or queued (status incase you used torrent file)!",
"Task should be in download or pause (incase message deleted by wrong) or queued status (incase you have used torrent or nzb file)!",
)
return
if task.name().startswith("[METADATA]"):
@ -72,14 +72,16 @@ async def select(_, message):
return
try:
await sync_to_async(task.update)
if task.listener.isQbit:
id_ = task.hash()
if not task.queued:
id_ = task.gid()
if not task.queued:
if task.listener.isNzb:
await task.client.pause_job(id_)
elif task.listener.isQbit:
await sync_to_async(task.update)
id_ = task.hash()
await sync_to_async(task.client.torrents_pause, torrent_hashes=id_)
else:
id_ = task.gid()
if not task.queued:
else:
await sync_to_async(task.update)
try:
await sync_to_async(aria2.client.force_pause, id_)
except Exception as e:
@ -88,7 +90,7 @@ async def select(_, message):
)
task.listener.select = True
except:
await sendMessage(message, "This is not a bittorrent task!")
await sendMessage(message, "This is not a bittorrent or sabnzbd task!")
return
SBUTTONS = bt_selection_buttons(id_)
@ -111,9 +113,9 @@ async def get_confirm(_, query):
await query.answer(data[3], show_alert=True)
elif data[1] == "done":
await query.answer()
id_ = data[3]
if hasattr(task, "seeding"):
id_ = data[3]
if len(id_) > 20:
if task.listener.isQbit:
tor_info = (
await sync_to_async(task.client.torrents_info, torrent_hash=id_)
)[0]
@ -145,17 +147,18 @@ async def get_confirm(_, query):
LOGGER.error(
f"{e} Error in resume, this mostly happens after abuse aria2. Try to use select cmd again!"
)
elif task.listener.isNzb:
await task.client.resume_job(id_)
await sendStatusMessage(message)
await deleteMessage(message)
else:
await deleteMessage(message)
obj = task.task()
await obj.cancel_task()
await task.cancel_task()
bot.add_handler(
MessageHandler(
select, filters=command(BotCommands.BtSelectCommand) & CustomFilters.authorized
select, filters=command(BotCommands.SelectCommand) & CustomFilters.authorized
)
)
bot.add_handler(CallbackQueryHandler(get_confirm, filters=regex("^btsel")))
bot.add_handler(CallbackQueryHandler(get_confirm, filters=regex("^sel")))

View File

@ -48,8 +48,7 @@ async def remove_from_queue(_, message):
):
await sendMessage(message, "This task is not for you!")
return
obj = task.task()
listener = obj.listener
listener = task.listener
msg = ""
async with queue_dict_lock:
if status == "fu":

View File

@ -34,6 +34,7 @@ from bot.helper.mirror_leech_utils.download_utils.direct_link_generator import (
from bot.helper.mirror_leech_utils.download_utils.gd_download import add_gd_download
from bot.helper.mirror_leech_utils.download_utils.jd_download import add_jd_download
from bot.helper.mirror_leech_utils.download_utils.qbit_download import add_qb_torrent
from bot.helper.mirror_leech_utils.download_utils.sabnzbd_downloader import add_nzb
from bot.helper.mirror_leech_utils.download_utils.rclone_download import (
add_rclone_download,
)
@ -54,6 +55,7 @@ class Mirror(TaskListener):
isQbit=False,
isLeech=False,
isJd=False,
isNzb=False,
sameDir=None,
bulk=None,
multiTag=None,
@ -73,6 +75,7 @@ class Mirror(TaskListener):
self.isQbit = isQbit
self.isLeech = isLeech
self.isJd = isJd
self.isNzb = isNzb
@new_task
async def newEvent(self):
@ -249,7 +252,7 @@ class Mirror(TaskListener):
reply_to = None
elif reply_to.document and (
file_.mime_type == "application/x-bittorrent"
or file_.file_name.endswith((".torrent", ".dlc"))
or file_.file_name.endswith((".torrent", ".dlc", ".nzb"))
):
self.link = await reply_to.download()
file_ = None
@ -285,6 +288,7 @@ class Mirror(TaskListener):
if (
not self.isJd
and not self.isNzb
and not self.isQbit
and not is_magnet(self.link)
and not is_rclone_path(self.link)
@ -328,6 +332,8 @@ class Mirror(TaskListener):
await remove(self.link)
elif self.isQbit:
await add_qb_torrent(self, path, ratio, seed_time)
elif self.isNzb:
await add_nzb(self, path)
elif is_rclone_path(self.link):
await add_rclone_download(self, f"{path}/")
elif is_gdrive_link(self.link) or is_gdrive_id(self.link):
@ -351,6 +357,14 @@ async def qb_mirror(client, message):
Mirror(client, message, isQbit=True).newEvent()
async def jd_mirror(client, message):
Mirror(client, message, isJd=True).newEvent()
async def nzb_mirror(client, message):
Mirror(client, message, isNzb=True).newEvent()
async def leech(client, message):
Mirror(client, message, isLeech=True).newEvent()
@ -359,14 +373,14 @@ async def qb_leech(client, message):
Mirror(client, message, isQbit=True, isLeech=True).newEvent()
async def jd_mirror(client, message):
Mirror(client, message, isJd=True).newEvent()
async def jd_leech(client, message):
Mirror(client, message, isLeech=True, isJd=True).newEvent()
async def nzb_leech(client, message):
Mirror(client, message, isLeech=True, isNzb=True).newEvent()
bot.add_handler(
MessageHandler(
mirror, filters=command(BotCommands.MirrorCommand) & CustomFilters.authorized
@ -378,6 +392,18 @@ bot.add_handler(
filters=command(BotCommands.QbMirrorCommand) & CustomFilters.authorized,
)
)
bot.add_handler(
MessageHandler(
jd_mirror,
filters=command(BotCommands.JdMirrorCommand) & CustomFilters.authorized,
)
)
bot.add_handler(
MessageHandler(
nzb_mirror,
filters=command(BotCommands.NzbMirrorCommand) & CustomFilters.authorized,
)
)
bot.add_handler(
MessageHandler(
leech, filters=command(BotCommands.LeechCommand) & CustomFilters.authorized
@ -390,12 +416,12 @@ bot.add_handler(
)
bot.add_handler(
MessageHandler(
jd_mirror,
filters=command(BotCommands.JdMirrorCommand) & CustomFilters.authorized,
jd_leech, filters=command(BotCommands.JdLeechCommand) & CustomFilters.authorized
)
)
bot.add_handler(
MessageHandler(
jd_leech, filters=command(BotCommands.JdLeechCommand) & CustomFilters.authorized
nzb_leech,
filters=command(BotCommands.NzbLeechCommand) & CustomFilters.authorized,
)
)

View File

@ -19,7 +19,7 @@ TELEGRAPH_LIMIT = 300
async def initiate_search_tools():
qbclient = await sync_to_async(get_qb_client)
qbclient = get_qb_client()
qb_plugins = await sync_to_async(qbclient.search_plugins)
if SEARCH_PLUGINS := config_dict["SEARCH_PLUGINS"]:
globals()["PLUGINS"] = []
@ -100,7 +100,7 @@ async def _search(key, site, message, method):
return
else:
LOGGER.info(f"PLUGINS Searching: {key} from {site}")
client = await sync_to_async(get_qb_client)
client = get_qb_client()
search = await sync_to_async(
client.search_start, pattern=key, plugins=site, category="all"
)
@ -224,7 +224,7 @@ def _api_buttons(user_id, method):
async def _plugin_buttons(user_id):
buttons = ButtonMaker()
if not PLUGINS:
qbclient = await sync_to_async(get_qb_client)
qbclient = get_qb_client()
pl = await sync_to_async(qbclient.search_plugins)
for name in pl:
PLUGINS.append(name["name"])

View File

@ -262,6 +262,7 @@ class YtDlp(TaskListener):
_=None,
isLeech=False,
__=None,
___=None,
sameDir=None,
bulk=None,
multiTag=None,

View File

@ -8,7 +8,7 @@ TELEGRAM_HASH = "" # Require restart after changing it
# OPTIONAL CONFIG
USER_SESSION_STRING = "" # Require restart after changing it while bot running
DATABASE_URL = "" # Require restart after changing it while bot running
DOWNLOAD_DIR = "/usr/src/app/downloads/" # Require restart after changing it while bot running
DOWNLOAD_DIR = "/usr/src/app/Downloads/" # Require restart after changing it while bot running
CMD_SUFFIX = "" # Require restart after changing it while bot running
AUTHORIZED_CHATS = "" # Require restart after changing it while bot running
SUDO_USERS = "" # Require restart after changing it while bot running
@ -37,6 +37,10 @@ RCLONE_SERVE_PASS = ""
# JDownloader
JD_EMAIL = ""
JD_PASS = ""
# Sabnzbd
USENET_HOST = ""
USENET_USERNAME = ""
USENET_PASSWORD = ""
# Update
UPSTREAM_REPO = ""
UPSTREAM_BRANCH = ""

287
sabnzbd/SABnzbd.ini Normal file
View File

@ -0,0 +1,287 @@
__version__ = 19
__encoding__ = utf-8
[misc]
helpful_warnings = 1
queue_complete = ""
queue_complete_pers = 0
bandwidth_perc = 100
refresh_rate = 0
interface_settings = ""
queue_limit = 20
config_lock = 0
fixed_ports = 1
notified_new_skin = 0
direct_unpack_tested = 0
sorters_converted = 1
check_new_rel = 1
auto_browser = 1
language = en
enable_https_verification = 1
host = 127.0.0.1
port = 8070
https_port = ""
username = ""
password = ""
bandwidth_max = ""
cache_limit = 1G
web_dir = Glitter
web_color = Auto
https_cert = server.cert
https_key = server.key
https_chain = ""
enable_https = 0
inet_exposure = 0
api_key = mltb
nzb_key = ""
socks5_proxy_url = ""
permissions = ""
download_dir = /usr/src/app/Downloads/incomplete
download_free = ""
complete_dir = /usr/src/app/Downloads/complete
complete_free = ""
fulldisk_autoresume = 0
script_dir = ""
nzb_backup_dir = ""
admin_dir = admin
backup_dir = ""
dirscan_dir = ""
dirscan_speed = 5
password_file = ""
log_dir = logs
max_art_tries = 3
top_only = 0
sfv_check = 1
script_can_fail = 0
enable_recursive = 1
flat_unpack = 0
par_option = ""
pre_check = 0
nice = ""
win_process_prio = 3
ionice = ""
fail_hopeless_jobs = 1
fast_fail = 1
auto_disconnect = 1
pre_script = None
end_queue_script = None
no_dupes = 0
no_series_dupes = 0
no_smart_dupes = 0
dupes_propercheck = 1
pause_on_pwrar = 1
ignore_samples = 0
deobfuscate_final_filenames = 1
auto_sort = ""
direct_unpack = 0
propagation_delay = 0
folder_rename = 1
replace_spaces = 0
replace_underscores = 0
replace_dots = 0
safe_postproc = 1
pause_on_post_processing = 0
enable_all_par = 0
sanitize_safe = 0
cleanup_list = ,
unwanted_extensions = ,
action_on_unwanted_extensions = 0
unwanted_extensions_mode = 0
new_nzb_on_failure = 0
history_retention = 0
quota_size = ""
quota_day = ""
quota_resume = 0
quota_period = m
enable_tv_sorting = 0
tv_sort_string = ""
tv_categories = ,
enable_movie_sorting = 0
movie_sort_string = ""
movie_sort_extra = -cd%1
movie_categories = ,
enable_date_sorting = 0
date_sort_string = ""
date_categories = ,
schedlines = ,
rss_rate = 60
ampm = 0
start_paused = 0
preserve_paused_state = 0
enable_par_cleanup = 1
process_unpacked_par2 = 1
enable_multipar = 1
enable_unrar = 1
enable_7zip = 1
enable_filejoin = 1
enable_tsjoin = 1
overwrite_files = 0
ignore_unrar_dates = 0
backup_for_duplicates = 1
empty_postproc = 0
wait_for_dfolder = 0
rss_filenames = 0
api_logging = 1
html_login = 1
warn_dupl_jobs = 0
keep_awake = 1
tray_icon = 1
allow_incomplete_nzb = 0
enable_broadcast = 1
ipv6_hosting = 0
api_warnings = 1
no_penalties = 0
x_frame_options = 1
allow_old_ssl_tls = 0
enable_season_sorting = 1
verify_xff_header = 0
rss_odd_titles = nzbindex.nl/, nzbindex.com/, nzbclub.com/
quick_check_ext_ignore = nfo, sfv, srr
req_completion_rate = 100.2
selftest_host = self-test.sabnzbd.org
movie_rename_limit = 100M
episode_rename_limit = 20M
size_limit = 0
direct_unpack_threads = 3
history_limit = 10
wait_ext_drive = 5
max_foldername_length = 246
nomedia_marker = ""
ipv6_servers = 1
url_base = /sabnzbd
host_whitelist = ,
local_ranges = ,
max_url_retries = 10
downloader_sleep_time = 10
receive_threads = 2
switchinterval = 0.005
ssdp_broadcast_interval = 15
ext_rename_ignore = ,
email_server = ""
email_to = ,
email_from = ""
email_account = ""
email_pwd = ""
email_endjob = 0
email_full = 0
email_dir = ""
email_rss = 0
email_cats = *,
[logging]
log_level = 1
max_log_size = 5242880
log_backups = 5
[ncenter]
ncenter_enable = 0
ncenter_cats = *,
ncenter_prio_startup = 0
ncenter_prio_download = 0
ncenter_prio_pause_resume = 0
ncenter_prio_pp = 0
ncenter_prio_complete = 1
ncenter_prio_failed = 1
ncenter_prio_disk_full = 1
ncenter_prio_new_login = 0
ncenter_prio_warning = 0
ncenter_prio_error = 0
ncenter_prio_queue_done = 0
ncenter_prio_other = 1
[acenter]
acenter_enable = 0
acenter_cats = *,
acenter_prio_startup = 0
acenter_prio_download = 0
acenter_prio_pause_resume = 0
acenter_prio_pp = 0
acenter_prio_complete = 1
acenter_prio_failed = 1
acenter_prio_disk_full = 1
acenter_prio_new_login = 0
acenter_prio_warning = 0
acenter_prio_error = 0
acenter_prio_queue_done = 0
acenter_prio_other = 1
[ntfosd]
ntfosd_enable = 1
ntfosd_cats = *,
ntfosd_prio_startup = 0
ntfosd_prio_download = 0
ntfosd_prio_pause_resume = 0
ntfosd_prio_pp = 0
ntfosd_prio_complete = 1
ntfosd_prio_failed = 1
ntfosd_prio_disk_full = 1
ntfosd_prio_new_login = 0
ntfosd_prio_warning = 0
ntfosd_prio_error = 0
ntfosd_prio_queue_done = 0
ntfosd_prio_other = 1
[prowl]
prowl_enable = 0
prowl_cats = *,
prowl_apikey = ""
prowl_prio_startup = -3
prowl_prio_download = -3
prowl_prio_pause_resume = -3
prowl_prio_pp = -3
prowl_prio_complete = 0
prowl_prio_failed = 1
prowl_prio_disk_full = 1
prowl_prio_new_login = -3
prowl_prio_warning = -3
prowl_prio_error = -3
prowl_prio_queue_done = -3
prowl_prio_other = 0
[pushover]
pushover_token = ""
pushover_userkey = ""
pushover_device = ""
pushover_emergency_expire = 3600
pushover_emergency_retry = 60
pushover_enable = 0
pushover_cats = *,
pushover_prio_startup = -3
pushover_prio_download = -2
pushover_prio_pause_resume = -2
pushover_prio_pp = -3
pushover_prio_complete = -1
pushover_prio_failed = -1
pushover_prio_disk_full = 1
pushover_prio_new_login = -3
pushover_prio_warning = 1
pushover_prio_error = 1
pushover_prio_queue_done = -3
pushover_prio_other = -1
[pushbullet]
pushbullet_enable = 0
pushbullet_cats = *,
pushbullet_apikey = ""
pushbullet_device = ""
pushbullet_prio_startup = 0
pushbullet_prio_download = 0
pushbullet_prio_pause_resume = 0
pushbullet_prio_pp = 0
pushbullet_prio_complete = 1
pushbullet_prio_failed = 1
pushbullet_prio_disk_full = 1
pushbullet_prio_new_login = 0
pushbullet_prio_warning = 0
pushbullet_prio_error = 0
pushbullet_prio_queue_done = 0
pushbullet_prio_other = 1
[nscript]
nscript_enable = 0
nscript_cats = *,
nscript_script = ""
nscript_parameters = ""
nscript_prio_startup = 0
nscript_prio_download = 0
nscript_prio_pause_resume = 0
nscript_prio_pp = 0
nscript_prio_complete = 1
nscript_prio_failed = 1
nscript_prio_disk_full = 1
nscript_prio_new_login = 0
nscript_prio_warning = 0
nscript_prio_error = 0
nscript_prio_queue_done = 0
nscript_prio_other = 1

1
sabnzbdapi/__init__.py Normal file
View File

@ -0,0 +1 @@
from sabnzbdapi.requests import sabnzbdClient

View File

@ -0,0 +1,23 @@
class SubFunctions:
def __init__(self):
pass
async def login(self, name: str, host: str, username: str, password: str):
return await self.set_special_config(
"servers",
{
"name": name,
"displayname": host,
"host": host,
"connections": 8,
"username": username,
"password": password,
},
)
async def create_category(self, name: str, dir: str):
return await self.set_special_config("categories", {"name": name, "dir": dir})
async def delete_category(self, name: str):
return await self.delete_config("categories", name)

18
sabnzbdapi/exception.py Normal file
View File

@ -0,0 +1,18 @@
from httpx import RequestError, DecodingError
class APIError(Exception):
"""Base error for all exceptions from this Client."""
class APIConnectionError(RequestError, APIError):
"""Base class for all communications errors including HTTP errors."""
class LoginFailed(DecodingError, APIConnectionError):
"""This can technically be raised with any request since log in may be attempted for
any request and could fail."""
class NotLoggedIn(APIConnectionError):
"""Raised when login is not successful."""

510
sabnzbdapi/job_functions.py Normal file
View File

@ -0,0 +1,510 @@
from sabnzbdapi.bound_methods import SubFunctions
class JobFunctions(SubFunctions):
def __init__(self):
pass
async def add_uri(
self,
url: str = "",
file: str = "",
nzbname: str = "",
password: str = "",
cat: str = "*",
script: list = None,
priority: int = 0,
pp: int = 1,
):
'return {"status": True, "nzo_ids": ["SABnzbd_nzo_kyt1f0"]}'
if file:
name = file
mode = "addlocalfile"
else:
name = url
mode = "addurl"
return await self.call(
{
"mode": mode,
"name": name,
"nzbname": nzbname,
"password": password,
"cat": cat,
"script": script,
"priority": priority,
"pp": pp,
}
)
async def get_downloads(
self,
start: int | None = None,
limit: int | None = None,
search: str | None = None,
category: str | list[str] | None = None,
priority: int | list[str] | None = None,
status: str | list[str] | None = None,
nzo_ids: str | list[str] | None = None,
):
"""return {
"queue": {
"status": "Downloading",
"speedlimit": "9",
"speedlimit_abs": "4718592.0",
"paused": false,
"noofslots_total": 2,
"noofslots": 2,
"limit": 10,
"start": 0,
"timeleft": "0:16:44",
"speed": "1.3 M",
"kbpersec": "1296.02",
"size": "1.2 GB",
"sizeleft": "1.2 GB",
"mb": "1277.65",
"mbleft": "1271.58",
"slots": [
{
"status": "Downloading",
"index": 0,
"password": "",
"avg_age": "2895d",
"script": "None",
"direct_unpack": "10/30",
"mb": "1277.65",
"mbleft": "1271.59",
"mbmissing": "0.0",
"size": "1.2 GB",
"sizeleft": "1.2 GB",
"filename": "TV.Show.S04E11.720p.HDTV.x264",
"labels": [],
"priority": "Normal",
"cat": "tv",
"timeleft": "0:16:44",
"percentage": "0",
"nzo_id": "SABnzbd_nzo_p86tgx",
"unpackopts": "3"
},
{
"status": "Paused",
"index": 1,
"password": "",
"avg_age": "2895d",
"script": "None",
"direct_unpack": null,
"mb": "1277.76",
"mbleft": "1277.76",
"mbmissing": "0.0",
"size": "1.2 GB",
"sizeleft": "1.2 GB",
"filename": "TV.Show.S04E12.720p.HDTV.x264",
"labels": [
"TOO LARGE",
"DUPLICATE"
],
"priority": "Normal",
"cat": "tv",
"timeleft": "0:00:00",
"percentage": "0",
"nzo_id": "SABnzbd_nzo_ksfai6",
"unpackopts": "3"
}
],
"diskspace1": "161.16",
"diskspace2": "161.16",
"diskspacetotal1": "465.21",
"diskspacetotal2": "465.21",
"diskspace1_norm": "161.2 G",
"diskspace2_norm": "161.2 G",
"have_warnings": "0",
"pause_int": "0",
"left_quota": "0 ",
"version": "3.x.x",
"finish": 2,
"cache_art": "16",
"cache_size": "6 MB",
"finishaction": null,
"paused_all": false,
"quota": "0 ",
"have_quota": false,
}
}"""
if nzo_ids:
nzo_ids = nzo_ids if isinstance(nzo_ids, str) else ",".join(nzo_ids)
if status:
status = status if isinstance(status, str) else ",".join(status)
if category:
category = category if isinstance(category, str) else ",".join(category)
if priority:
priority = priority if isinstance(priority, str) else ",".join(priority)
return await self.call(
{
"mode": "queue",
"start": start,
"limit": limit,
"search": search,
"category": category,
"priority": priority,
"status": status,
"nzo_ids": nzo_ids,
},
)
async def pause_job(self, nzo_id: str):
"""return {"status": True, "nzo_ids": ["all effected ids"]}"""
return await self.call({"mode": "queue", "name": "pause", "value": nzo_id})
async def resume_job(self, nzo_id: str):
"""return {"status": True, "nzo_ids": ["all effected ids"]}"""
return await self.call({"mode": "queue", "name": "resume", "value": nzo_id})
async def delete_job(self, nzo_id: str | list[str], delete_files: bool = False):
"""return {"status": True, "nzo_ids": ["all effected ids"]}"""
return await self.call(
{
"mode": "queue",
"name": "delete",
"value": nzo_id if isinstance(nzo_id, str) else ",".join(nzo_id),
"del_files": 1 if delete_files else 0,
}
)
async def pause_all(self):
"""return {"status": True}"""
return await self.call({"mode": "pause"})
async def resume_all(self):
"""return {"status": True}"""
return await self.call({"mode": "resume"})
async def purge_all(self, delete_files: bool = False):
"""return {"status": True, "nzo_ids": ["all effected ids"]}"""
return await self.call(
{"mode": "queue", "name": "purge", "del_files": 1 if delete_files else 0}
)
async def get_files(self, nzo_id: str):
"""
return {
"files": [
{
"status": "finished",
"mbleft": "0.00",
"mb": "0.05",
"age": "25d",
"bytes": "52161.00",
"filename": "93a4ec7c37752640deab48dabb46b164.par2",
"nzf_id": "SABnzbd_nzf_1lk0ij",
},
...,
]
}
"""
return await self.call({"mode": "get_files", "value": nzo_id})
async def remove_file(self, nzo_id: str, file_ids: str | list[str]):
return await self.call(
{
"mode": "queue",
"name": "delete_nzf",
"value": nzo_id,
"value2": file_ids if isinstance(file_ids, str) else ",".join(file_ids),
}
) # return nzf_ids of removed file idk how yet
async def get_history(
self,
start: int | None = None,
limit: int | None = None,
search: str | None = None,
category: str | list[str] | None = None,
archive: int | None = None,
status: str | list[str] | None = None,
nzo_ids: str | list[str] | None = None,
failed_only: bool = False,
last_history_update: int | None = None,
):
"""{
"history": {
"noofslots": 220,
"ppslots": 1,
"day_size": "1.9 G",
"week_size": "30.4 G",
"month_size": "167.3 G",
"total_size": "678.1 G",
"last_history_update": 1469210913,
"slots": [
{
"action_line": "",
"duplicate_key": "TV.Show/4/2",
"meta": null,
"fail_message": "",
"loaded": false,
"size": "2.3 GB",
"category": "tv",
"pp": "D",
"retry": 0,
"script": "None",
"nzb_name": "TV.Show.S04E02.720p.BluRay.x264-xHD.nzb",
"download_time": 64,
"storage": "C:\\Users\\xxx\\Videos\\Complete\\TV.Show.S04E02.720p.BluRay.x264-xHD",
"has_rating": false,
"status": "Completed",
"script_line": "",
"completed": 1469172988,
"nzo_id": "SABnzbd_nzo_sdkoun",
"downloaded": 2436906376,
"report": "",
"password": "",
"path": "\\\\?\\C:\\SABnzbd\\TV.Show.S04E02.720p.BluRay.x264-xHD",
"postproc_time": 40,
"name": "TV.Show.S04E02.720p.BluRay.x264-xHD",
"url": "TV.Show.S04E02.720p.BluRay.x264-xHD.nzb",
"md5sum": "d2c16aeecbc1b1921d04422850e93013",
"archive": false,
"bytes": 2436906376,
"url_info": "",
"stage_log": [
{
"name": "Source",
"actions": [
"TV.Show.S04E02.720p.BluRay.x264-xHD.nzb"
]
},
{
"name": "Download",
"actions": [
"Downloaded in 1 min 4 seconds at an average of 36.2 MB/s<br/>Age: 550d<br/>10 articles were malformed"
]
},
{
"name": "Servers",
"actions": [
"Frugal=2.3 GB"
]
},
{
"name": "Repair",
"actions": [
"[pA72r5Ac6lW3bmpd20T7Hj1Zg2bymUsINBB50skrI] Repaired in 19 seconds"
]
},
{
"name": "Unpack",
"actions": [
"[pA72r5Ac6lW3bmpd20T7Hj1Zg2bymUsINBB50skrI] Unpacked 1 files/folders in 6 seconds"
]
}
]
},
{
"action_line": "",
"duplicate_key": "TV.Show/4/13",
"meta": null,
"fail_message": "",
"loaded": false,
"size": "2.3 GB",
"category": "tv",
"pp": "D",
"retry": 0,
"script": "None",
"nzb_name": "TV.Show.S04E13.720p.BluRay.x264-xHD.nzb",
"download_time": 60,
"storage": "C:\\Users\\xxx\\Videos\\Complete\\TV.Show.S04E13.720p.BluRay.x264-xHD",
"has_rating": false,
"status": "Completed",
"script_line": "",
"completed": 1469172947,
"nzo_id": "SABnzbd_nzo_gqhp63",
"downloaded": 2491255137,
"report": "",
"password": "",
"path": "\\\\?\\C:\\SABnzbd\\TV.Show.S04E13.720p.BluRay.x264-xHD",
"postproc_time": 82,
"name": "TV.Show.S04E13.720p.BluRay.x264-xHD",
"url": "TV.Show.S04E13.720p.BluRay.x264-xHD.nzb",
"md5sum": "85baf55ec0de0dc732c2af6537c5c01b",
"archive": true,
"bytes": 2491255137,
"url_info": "",
"stage_log": [
{
"name": "Source",
"actions": [
"TV.Show.S04E13.720p.BluRay.x264-xHD.nzb"
]
},
{
"name": "Download",
"actions": [
"Downloaded in 1 min at an average of 39.4 MB/s<br/>Age: 558d<br/>15 articles were malformed"
]
},
{
"name": "Servers",
"actions": [
"Frugal=2.3 GB"
]
},
{
"name": "Repair",
"actions": [
"[m0vklMEMKIT5L5XH9z5YTmuquoitCQ3F5LISTLFjT] Repaired in 47 seconds"
]
},
{
"name": "Unpack",
"actions": [
"[m0vklMEMKIT5L5XH9z5YTmuquoitCQ3F5LISTLFjT] Unpacked 1 files/folders in 6 seconds"
]
}
]
}
]
}
}"""
if nzo_ids:
nzo_ids = nzo_ids if isinstance(nzo_ids, str) else ",".join(nzo_ids)
if status:
status = status if isinstance(status, str) else ",".join(status)
if category:
category = category if isinstance(category, str) else ",".join(category)
return await self.call(
{
"mode": "history",
"start": start,
"limit": limit,
"archive": archive,
"search": search,
"category": category,
"status": status,
"nzo_ids": nzo_ids,
"failed_only": failed_only,
"last_history_update": last_history_update,
},
)
async def retry_item(self, nzo_id: str, password: str = ""):
"""return {"status": True}"""
return await self.call({"mode": "retry", "value": nzo_id, "password": password})
async def retry_all(self):
"""return {"status": True}"""
return await self.call({"mode": "retry_all"})
async def delete_history(
self, nzo_ids: str | list[str], archive: int = 0, del_files: int = 0
):
"""return {"status": True}"""
return await self.call(
{
"mode": "history",
"value": nzo_ids if isinstance(nzo_ids, str) else ",".join(nzo_ids),
"archive": archive,
"del_files": del_files,
}
)
async def change_job_pp(self, nzo_id: str, pp: int):
"""return {"status": True}"""
return await self.call({"mode": "change_opts", "value": nzo_id, "value2": pp})
async def set_speedlimit(self, limit: str | int):
"""return {"status": True}"""
return await self.call({"mode": "config", "name": "speedlimit", "value": limit})
async def delete_config(self, section: str, keyword: str):
"""return {"status": True}"""
return await self.call(
{"mode": "del_config", "section": section, "keyword": keyword}
)
async def set_config_default(self, keyword: str | list[str]):
"""return {"status": True}"""
return await self.call({"mode": "set_config_default", "keyword": keyword})
async def get_config(self, section: str = None, keyword: str = None):
"""return config as dic"""
return await self.call(
{"mode": "get_config", "section": section, "keyword": keyword}
)
async def set_config(self, section: str, keyword: str, value: str):
"""Returns the new setting when saved successfully"""
return await self.call(
{
"mode": "set_config",
"section": section,
"keyword": keyword,
"value": value,
}
)
async def set_special_config(self, section: str, items: dict):
"""Returns the new setting when saved successfully"""
return await self.call(
{
"mode": "set_config",
"section": section,
**items,
}
)
async def server_stats(self):
"""return {
"day": 2352634799,
"week": 32934490677,
"month": 179983557488,
"total": 728426161290,
"servers": {
"eunews.server.com": {
"week": 19783288936,
"total": 163741252273,
"day": 2352634799,
"month": 90478917031,
"daily": {
"2017-01-28": 1234,
"2017-01-29": 4567
},
"articles_tried": 929299,
"articles_success": 8299
},
"News.server.net": {
"week": 13151201741,
"total": 165783396295,
"day": 0,
"month": 89499300889,
"daily": {
"2017-01-28": 1234,
"2017-01-29": 4567
},
"articles_tried": 520400,
"articles_success": 78881
}
}
}"""
return await self.call({"mode": "server_stats"})
async def version(self):
"""return {'version': '4.2.2'}"""
return await self.call({"mode": "version"})
async def restart(self):
"""return {"status": True}"""
return await self.call({"mode": "restart"})
async def restart_repair(self):
"""return {"status": True}"""
return await self.call({"mode": "restart_repair"})
async def shutdown(self):
"""return {"status": True}"""
return await self.call({"mode": "shutdown"})

101
sabnzbdapi/requests.py Normal file
View File

@ -0,0 +1,101 @@
from httpx import AsyncClient, Response, DecodingError
from httpx import AsyncHTTPTransport
from urllib3 import disable_warnings
from urllib3.exceptions import InsecureRequestWarning
from functools import wraps
from .job_functions import JobFunctions
class sabnzbdSession(AsyncClient):
@wraps(AsyncClient.request)
async def request(self, method: str, url: str, **kwargs) -> Response:
kwargs.setdefault("timeout", 15.1)
kwargs.setdefault("follow_redirects", True)
data = kwargs.get("data") or {}
is_data = any(x is not None for x in data.values())
if method.lower() == "post" and not is_data:
kwargs.setdefault("headers", {}).update({"Content-Length": "0"})
return await super().request(method, url, **kwargs)
class sabnzbdClient(JobFunctions):
LOGGED_IN = False
def __init__(
self,
host: str,
api_key: str,
port: str = "8070",
VERIFY_CERTIFICATE: bool = False,
RETRIES: int = 3,
HTTPX_REQUETS_ARGS: dict = None,
):
if HTTPX_REQUETS_ARGS is None:
HTTPX_REQUETS_ARGS = {}
self._base_url = f"{host.rstrip('/')}:{port}/sabnzbd/api"
self._default_params = {"apikey": api_key, "output": "json"}
self._VERIFY_CERTIFICATE = VERIFY_CERTIFICATE
self._RETRIES = RETRIES
self._HTTPX_REQUETS_ARGS = HTTPX_REQUETS_ARGS
self._http_session = None
if not self._VERIFY_CERTIFICATE:
disable_warnings(InsecureRequestWarning)
super().__init__()
def _session(self):
if self._http_session is not None:
return self._http_session
transport = AsyncHTTPTransport(
retries=self._RETRIES, verify=self._VERIFY_CERTIFICATE
)
self._http_session = sabnzbdSession(transport=transport)
self._http_session.verify = self._VERIFY_CERTIFICATE
return self._http_session
async def call(
self,
params: dict = None,
api_method: str = "GET",
requests_args: dict = None,
**kwargs,
):
if requests_args is None:
requests_args = {}
session = self._session()
params |= kwargs
requests_kwargs = {**self._HTTPX_REQUETS_ARGS, **requests_args}
retries = 3
for retry_count in range(retries):
try:
res = await session.request(
method=api_method,
url=self._base_url,
params={**self._default_params, **params},
**requests_kwargs,
)
response = res.json()
except DecodingError as e:
raise DecodingError(f"Failed to decode response!: {res.text}") from e
except Exception:
if retry_count >= (retries - 1):
raise
return response
async def check_login(self):
res = await self.get_config("servers")
if res["config"]:
self.LOGGED_IN = True
return True
else:
return False
async def log_out(self):
if self._http_session is not None:
await self._http_session.aclose()
self._http_session = None

View File

@ -4,7 +4,7 @@ from re import findall as re_findall
DOWNLOAD_DIR = environ.get("DOWNLOAD_DIR", "")
if len(DOWNLOAD_DIR) == 0:
DOWNLOAD_DIR = "/usr/src/app/downloads/"
DOWNLOAD_DIR = "/usr/src/app/Downloads/"
elif not DOWNLOAD_DIR.endswith("/"):
DOWNLOAD_DIR += "/"
@ -47,9 +47,9 @@ def get_folders(path):
return fs.split("/")
def make_tree(res, aria2=False):
parent = TorNode("Torrent")
if not aria2:
def make_tree(res, tool=False):
if tool == "qbit":
parent = TorNode("Torrent")
for i in res:
folders = qb_get_folders(i.name)
if len(folders) > 1:
@ -84,7 +84,8 @@ def make_tree(res, aria2=False):
file_id=i.id,
progress=round(i.progress * 100, 5),
)
else:
elif tool == "aria":
parent = TorNode("Torrent")
for i in res:
folders = get_folders(i["path"])
priority = 1
@ -126,6 +127,24 @@ def make_tree(res, aria2=False):
(int(i["completedLength"]) / int(i["length"])) * 100, 5
),
)
else:
parent = TorNode("Torrent")
priority = 1
for i in res["files"]:
TorNode(
i["filename"],
is_file=True,
parent=parent,
size=float(i["mb"]) * 1024,
priority=priority,
file_id=i["nzf_id"],
progress=round(
((float(i["mb"]) - float(i["mbleft"])) / float(i["mb"])) * 100,
5,
),
)
return create_list(parent, ["", 0])

View File

@ -3,6 +3,8 @@ from flask import Flask, request
from logging import getLogger, FileHandler, StreamHandler, INFO, basicConfig
from qbittorrentapi import NotFound404Error, Client as qbClient
from time import sleep
from sabnzbdapi import sabnzbdClient
from asyncio import run
from web.nodes import make_tree
@ -713,14 +715,19 @@ def list_torrent_contents(id_):
if request.args["pin_code"] != pincode:
return "<h1>Incorrect pin code</h1>"
if len(id_) > 20:
if id_.startswith("SABnzbd_nzo"):
client = sabnzbdClient(host="http://localhost", api_key="mltb", port="8070")
res = run(client.get_files(id_))
cont = make_tree(res, "nzb")
run(client.log_out())
elif len(id_) > 20:
client = qbClient(host="localhost", port="8090")
res = client.torrents_files(torrent_hash=id_)
cont = make_tree(res)
cont = make_tree(res, "qbit")
client.auth_log_out()
else:
res = aria2.client.get_files(id_)
cont = make_tree(res, True)
cont = make_tree(res, "aria")
return page.replace("{My_content}", cont[0]).replace(
"{form_url}", f"/app/files/{id_}?pin_code={pincode}"
)
@ -730,10 +737,21 @@ def list_torrent_contents(id_):
def set_priority(id_):
data = dict(request.form)
resume = ""
if len(id_) > 20:
pause = ""
if id_.startswith("SABnzbd_nzo"):
client = sabnzbdClient(host="http://localhost", api_key="mltb", port="8070")
to_remove = []
for i, value in data.items():
if "filenode" in i and value != "on":
node_no = i.split("_")[-1]
to_remove.append(node_no)
run(client.remove_file(id_, to_remove))
LOGGER.info(f"Verified! nzo_id: {id_}")
run(client.log_out())
elif len(id_) > 20:
resume = ""
pause = ""
for i, value in data.items():
if "filenode" in i:
node_no = i.split("_")[-1]
@ -765,6 +783,7 @@ def set_priority(id_):
LOGGER.error(f"Verification Failed! Hash: {id_}")
client.auth_log_out()
else:
resume = ""
for i, value in data.items():
if "filenode" in i and value == "on":
node_no = i.split("_")[-1]