TG_SPLI_SIZE changed to LEECH_SPLIT_SIZE

- Fix minor bug

Signed-off-by: anasty17 <e.anastayyar@gmail.com>
This commit is contained in:
anasty17 2022-07-23 06:06:22 +03:00
parent 578960ebf9
commit c7cbf7a59e
9 changed files with 54 additions and 46 deletions

View File

@ -22,7 +22,7 @@ This is a Telegram Bot written in Python for mirroring files on the Internet to
- Mirror/Leech/Watch/Clone/Count/Del by reply.
- YT-DLP quality buttons.
- Search on torrents with Torrent Search API or with variable plugins using qBittorrent search engine
- Docker image support for linux `amd64, arm64/v8, arm/v7, s390x, riscv64, ppc64le`.
- Docker image support for linux `amd64, arm64/v8, arm/v7, s390x`.
- Update bot at startup and with restart command using `UPSTREAM_REPO`.
- Qbittorrent seed until reaching specific ratio or time.
- Rss feed and filter. Based on this repository [rss-chan](https://github.com/hyPnOtICDo0g/rss-chan).
@ -53,12 +53,16 @@ This is a Telegram Bot written in Python for mirroring files on the Internet to
> ZIP, RAR, TAR, 7z, ISO, WIM, CAB, GZIP, BZIP2, APM, ARJ, CHM, CPIO, CramFS, DEB, DMG, FAT, HFS, LZH, LZMA, LZMA2, MBR, MSI, MSLZ, NSIS, NTFS, RPM, SquashFS, UDF, VHD, XAR, Z, TAR.XZ
- Direct links Supported:
>letsupload.io, hxfile.co, anonfiles.com, bayfiles.com, antfiles, fembed.com, fembed.net, femax20.com, layarkacaxxi.icu, fcdn.stream, sbplay.org, naniplay.com, naniplay.nanime.in, naniplay.nanime.biz, sbembed.com, streamtape.com, streamsb.net, feurl.com, pixeldrain.com, racaty.net, 1fichier.com, 1drv.ms (Only works for file not folder or business account), uptobox.com (Uptobox account must be premium) and solidfiles.com
>mediafire, zippyshare, letsupload.io, hxfile.co, anonfiles.com, bayfiles.com, antfiles, fembed.com, fembed.net, femax20.com, layarkacaxxi.icu, fcdn.stream, sbplay.org, naniplay.com, naniplay.nanime.in, naniplay.nanime.biz, sbembed.com, streamtape.com, streamsb.net, feurl.com, pixeldrain.com, racaty.net, 1fichier.com, 1drv.ms (Only works for file not folder or business account), uptobox.com (Uptobox account must be premium) and solidfiles.com
# How to deploy?
## Prerequisites
- Tutorial Video from A to Z:
- Thanks to [Wiszky](https://github.com/vishnoe115)
<p><a href="https://youtu.be/IUmq1paCiHI"> <img src="https://img.shields.io/badge/See%20Video-black?style=for-the-badge&logo=YouTube" width="160""/></a></p>
### 1. Installing requirements
- Clone this repo:
@ -127,9 +131,9 @@ Fill up rest of the fields. Meaning of each field is discussed below:
### Leech
- `USER_SESSION_STRING`: To download/upload from your telegram account. To generate session string use this command `python3 generate_string_session.py` after mounting repo folder for sure. `Str`. **NOTE**: You can't use bot with private message, use it with supergroup or channel.
- `TG_SPLIT_SIZE`: Size of split in bytes. Default is `2GB`. Default is `4GB` if your account is premium. `Str`
- `LEECH_SPLIT_SIZE`: Size of split in bytes. Default is `2GB`. Default is `4GB` if your account is premium. `Str`
- `AS_DOCUMENT`: Default type of Telegram file upload. Default is `False` mean as media. `Bool`
- `EQUAL_SPLITS`: Split files larger than **TG_SPLIT_SIZE** into equal parts size (Not working with zip cmd). Default is `False`. `Bool`
- `EQUAL_SPLITS`: Split files larger than **LEECH_SPLIT_SIZE** into equal parts size (Not working with zip cmd). Default is `False`. `Bool`
- `CUSTOM_FILENAME`: Add custom word to leeched file name. `Str`
### qBittorrent
@ -209,7 +213,7 @@ sudo docker image prune -a
```
4. Check the number of processing units of your machine with `nproc` cmd and times it by 4, then edit `AsyncIOThreadsCount` in qBittorrent.conf.
5. You can add `CONFIG_FILE_URL` variable using docker and docker-compose, google it.
6. Only `amd64` and `arm64/v8` are tested, if you faced any error while deploying with other images then you can open issue.
6. Only `amd64` and `arm64/v8` are tested, if you faced missing package package error while installing requirements with other images then you can open issue.
------

View File

@ -207,15 +207,15 @@ try:
except:
DB_URI = None
try:
TG_SPLIT_SIZE = getConfig('TG_SPLIT_SIZE')
if len(TG_SPLIT_SIZE) == 0 or (not IS_PREMIUM_USER and TG_SPLIT_SIZE > 2097152000) or TG_SPLIT_SIZE > 4194304000:
LEECH_SPLIT_SIZE = getConfig('LEECH_SPLIT_SIZE')
if len(LEECH_SPLIT_SIZE) == 0 or (not IS_PREMIUM_USER and LEECH_SPLIT_SIZE > 2097152000) or LEECH_SPLIT_SIZE > 4194304000:
raise KeyError
TG_SPLIT_SIZE = int(TG_SPLIT_SIZE)
LEECH_SPLIT_SIZE = int(LEECH_SPLIT_SIZE)
except:
if not IS_PREMIUM_USER:
TG_SPLIT_SIZE = 2097152000
LEECH_SPLIT_SIZE = 2097152000
else:
TG_SPLIT_SIZE = 4194304000
LEECH_SPLIT_SIZE = 4194304000
try:
STATUS_LIMIT = getConfig('STATUS_LIMIT')
if len(STATUS_LIMIT) == 0:

View File

@ -10,7 +10,7 @@ from math import ceil
from re import split as re_split, I
from .exceptions import NotSupportedExtractionArchive
from bot import aria2, app, LOGGER, DOWNLOAD_DIR, get_client, TG_SPLIT_SIZE, EQUAL_SPLITS, IS_PREMIUM_USER
from bot import aria2, app, LOGGER, DOWNLOAD_DIR, get_client, LEECH_SPLIT_SIZE, EQUAL_SPLITS, IS_PREMIUM_USER
if IS_PREMIUM_USER:
MAX_SPLIT_SIZE = 4194304000
@ -117,7 +117,7 @@ def take_ss(video_file):
return des_dir
def split_file(path, size, file_, dirpath, split_size, listener, start_time=0, i=1, inLoop=False):
parts = ceil(size/TG_SPLIT_SIZE)
parts = ceil(size/LEECH_SPLIT_SIZE)
if EQUAL_SPLITS and not inLoop:
split_size = ceil(size/parts) + 1000
if file_.upper().endswith(VIDEO_SUFFIXES):

View File

@ -60,5 +60,6 @@ class ExtractStatus:
def cancel_download(self):
LOGGER.info(f'Cancelling Extract: {self.__name}')
self.__listener.ext_proc.kill()
if self.__listener.suproc is not None:
self.__listener.suproc.kill()
self.__listener.onUploadError('extracting stopped by user!')

View File

@ -39,5 +39,6 @@ class SplitStatus:
def cancel_download(self):
LOGGER.info(f'Cancelling Split: {self.__name}')
self.__listener.split_proc.kill()
if self.__listener.suproc is not None:
self.__listener.suproc.kill()
self.__listener.onUploadError('splitting stopped by user!')

View File

@ -60,5 +60,6 @@ class ZipStatus:
def cancel_download(self):
LOGGER.info(f'Cancelling Archive: {self.__name}')
self.__listener.arch_proc.kill()
if self.__listener.suproc is not None:
self.__listener.suproc.kill()
self.__listener.onUploadError('archiving stopped by user!')

View File

@ -379,9 +379,9 @@ class GoogleDriveHelper:
LOGGER.info(f"Total Attempts: {err.last_attempt.attempt_number}")
err = err.last_attempt.exception()
err = str(err).replace('>', '').replace('<', '')
if "User rate limit exceeded" in str(err):
if "User rate limit exceeded" in err:
msg = "User rate limit exceeded."
elif "File not found" in str(err):
elif "File not found" in err:
token_service = self.__alt_authorize()
if token_service is not None:
self.__service = token_service
@ -701,7 +701,7 @@ class GoogleDriveHelper:
LOGGER.info(f"Total Attempts: {err.last_attempt.attempt_number}")
err = err.last_attempt.exception()
err = str(err).replace('>', '').replace('<', '')
if "File not found" in str(err):
if "File not found" in err:
token_service = self.__alt_authorize()
if token_service is not None:
self.__service = token_service
@ -757,7 +757,7 @@ class GoogleDriveHelper:
LOGGER.info(f"Total Attempts: {err.last_attempt.attempt_number}")
err = err.last_attempt.exception()
err = str(err).replace('>', '').replace('<', '')
if "File not found" in str(err):
if "File not found" in err:
token_service = self.__alt_authorize()
if token_service is not None:
self.__service = token_service
@ -785,9 +785,9 @@ class GoogleDriveHelper:
LOGGER.info(f"Total Attempts: {err.last_attempt.attempt_number}")
err = err.last_attempt.exception()
err = str(err).replace('>', '').replace('<', '')
if "downloadQuotaExceeded" in str(err):
if "downloadQuotaExceeded" in err:
err = "Download Quota Exceeded."
elif "File not found" in str(err):
elif "File not found" in err:
token_service = self.__alt_authorize()
if token_service is not None:
self.__service = token_service

View File

@ -11,7 +11,7 @@ from telegram.ext import CommandHandler
from telegram import InlineKeyboardMarkup
from bot import Interval, INDEX_URL, VIEW_LINK, aria2, QB_SEED, dispatcher, DOWNLOAD_DIR, \
download_dict, download_dict_lock, TG_SPLIT_SIZE, LOGGER, DB_URI, INCOMPLETE_TASK_NOTIFIER
download_dict, download_dict_lock, LEECH_SPLIT_SIZE, LOGGER, DB_URI, INCOMPLETE_TASK_NOTIFIER
from bot.helper.ext_utils.bot_utils import is_url, is_magnet, is_mega_link, is_gdrive_link, get_content_type
from bot.helper.ext_utils.fs_utils import get_base_name, get_path_size, split_file, clean_download
from bot.helper.ext_utils.exceptions import DirectDownloadLinkException, NotSupportedExtractionArchive
@ -48,6 +48,7 @@ class MirrorListener:
self.tag = tag
self.seed = any([seed, QB_SEED])
self.isPrivate = self.message.chat.type in ['private', 'group']
self.suproc = None
def clean(self):
try:
@ -77,25 +78,25 @@ class MirrorListener:
with download_dict_lock:
download_dict[self.uid] = ZipStatus(name, size, gid, self)
if self.pswd is not None:
if self.isLeech and int(size) > TG_SPLIT_SIZE:
if self.isLeech and int(size) > LEECH_SPLIT_SIZE:
LOGGER.info(f'Zip: orig_path: {m_path}, zip_path: {path}.0*')
self.arch_proc = Popen(["7z", f"-v{TG_SPLIT_SIZE}b", "a", "-mx=0", f"-p{self.pswd}", path, m_path])
self.suproc = Popen(["7z", f"-v{LEECH_SPLIT_SIZE}b", "a", "-mx=0", f"-p{self.pswd}", path, m_path])
else:
LOGGER.info(f'Zip: orig_path: {m_path}, zip_path: {path}')
self.arch_proc = Popen(["7z", "a", "-mx=0", f"-p{self.pswd}", path, m_path])
elif self.isLeech and int(size) > TG_SPLIT_SIZE:
self.suproc = Popen(["7z", "a", "-mx=0", f"-p{self.pswd}", path, m_path])
elif self.isLeech and int(size) > LEECH_SPLIT_SIZE:
LOGGER.info(f'Zip: orig_path: {m_path}, zip_path: {path}.0*')
self.arch_proc = Popen(["7z", f"-v{TG_SPLIT_SIZE}b", "a", "-mx=0", path, m_path])
self.suproc = Popen(["7z", f"-v{LEECH_SPLIT_SIZE}b", "a", "-mx=0", path, m_path])
else:
LOGGER.info(f'Zip: orig_path: {m_path}, zip_path: {path}')
self.arch_proc = Popen(["7z", "a", "-mx=0", path, m_path])
self.arch_proc.wait()
if self.arch_proc.returncode == -9:
self.suproc = Popen(["7z", "a", "-mx=0", path, m_path])
self.suproc.wait()
if self.suproc.returncode == -9:
return
elif self.arch_proc.returncode != 0:
elif self.suproc.returncode != 0:
LOGGER.error('An error occurred while zipping! Uploading anyway')
path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
if self.arch_proc.returncode == 0 and (not self.isQbit or not self.seed or self.isLeech):
if self.suproc.returncode == 0 and (not self.isQbit or not self.seed or self.isLeech):
try:
rmtree(m_path)
except:
@ -114,15 +115,15 @@ class MirrorListener:
or (file_.endswith(".rar") and not re_search(r'\.part\d+\.rar$', file_)):
m_path = ospath.join(dirpath, file_)
if self.pswd is not None:
self.ext_proc = Popen(["7z", "x", f"-p{self.pswd}", m_path, f"-o{dirpath}", "-aot"])
self.suproc = Popen(["7z", "x", f"-p{self.pswd}", m_path, f"-o{dirpath}", "-aot"])
else:
self.ext_proc = Popen(["7z", "x", m_path, f"-o{dirpath}", "-aot"])
self.ext_proc.wait()
if self.ext_proc.returncode == -9:
self.suproc = Popen(["7z", "x", m_path, f"-o{dirpath}", "-aot"])
self.suproc.wait()
if self.suproc.returncode == -9:
return
elif self.ext_proc.returncode != 0:
elif self.suproc.returncode != 0:
LOGGER.error('Unable to extract archive splits! Uploading anyway')
if self.ext_proc.returncode == 0:
if self.suproc is not None and self.suproc.returncode == 0:
for file_ in files:
if file_.endswith((".rar", ".zip", ".7z")) or \
re_search(r'\.r\d+$|\.7z\.\d+$|\.z\d+$|\.zip\.\d+$', file_):
@ -131,13 +132,13 @@ class MirrorListener:
path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
else:
if self.pswd is not None:
self.ext_proc = Popen(["bash", "pextract", m_path, self.pswd])
self.suproc = Popen(["bash", "pextract", m_path, self.pswd])
else:
self.ext_proc = Popen(["bash", "extract", m_path])
self.ext_proc.wait()
if self.ext_proc.returncode == -9:
self.suproc = Popen(["bash", "extract", m_path])
self.suproc.wait()
if self.suproc.returncode == -9:
return
elif self.ext_proc.returncode == 0:
elif self.suproc.returncode == 0:
LOGGER.info(f"Extracted Path: {path}")
osremove(m_path)
else:
@ -155,13 +156,13 @@ class MirrorListener:
for file_ in files:
f_path = ospath.join(dirpath, file_)
f_size = ospath.getsize(f_path)
if int(f_size) > TG_SPLIT_SIZE:
if int(f_size) > LEECH_SPLIT_SIZE:
if not checked:
checked = True
with download_dict_lock:
download_dict[self.uid] = SplitStatus(up_name, size, gid, self)
LOGGER.info(f"Splitting: {up_name}")
res = split_file(f_path, f_size, file_, dirpath, TG_SPLIT_SIZE, self)
res = split_file(f_path, f_size, file_, dirpath, LEECH_SPLIT_SIZE, self)
if not res:
return
osremove(f_path)

View File

@ -29,7 +29,7 @@ UPSTREAM_REPO = ""
UPSTREAM_BRANCH = ""
# Leech
USER_SESSION_STRING = ""
TG_SPLIT_SIZE = ""
LEECH_SPLIT_SIZE = ""
AS_DOCUMENT = ""
EQUAL_SPLITS = ""
CUSTOM_FILENAME = ""