diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml
new file mode 100644
index 0000000..66bcc74
--- /dev/null
+++ b/.github/workflows/deploy.yml
@@ -0,0 +1,22 @@
+name: Manually Deploy to Heroku
+
+on: workflow_dispatch
+
+jobs:
+ deploy:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ - uses: akhileshns/heroku-deploy@v3.12.12
+ with:
+ heroku_api_key: ${{secrets.HEROKU_API_KEY}}
+ heroku_app_name: ${{secrets.HEROKU_APP_NAME}}
+ heroku_email: ${{secrets.HEROKU_EMAIL}}
+ usedocker: true
+ docker_heroku_process_type: web
+ stack: "container"
+ region: "us"
+ env:
+ HD_CONFIG_FILE_URL: ${{secrets.CONFIG_FILE_URL}}
+ HD_HEROKU_API_KEY: ${{secrets.HEROKU_API_KEY}}
+ HD_HEROKU_APP_NAME: ${{secrets.HEROKU_APP_NAME}}
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..a183365
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,17 @@
+config.env
+Thumbnails/*
+drive_folder
+cookies.txt
+*auth_token.txt
+*.pyc
+data*
+.vscode
+.idea
+*.json
+*.pickle
+.netrc
+log.txt
+authorized_chats.txt
+sudo_users.txt
+accounts/*
+
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000..bdf8cbe
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,15 @@
+FROM anasty17/mltb:latest
+
+WORKDIR /usr/src/app
+RUN chmod 777 /usr/src/app
+
+COPY requirements.txt .
+RUN pip3 install --no-cache-dir -r requirements.txt
+RUN apt-get update && apt-get upgrade -y
+RUN apt -qq update --fix-missing && \
+ apt -qq install -y \
+ mediainfo
+
+COPY . .
+
+CMD ["bash", "start.sh"]
\ No newline at end of file
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..e72bfdd
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,674 @@
+ GNU GENERAL PUBLIC LICENSE
+ Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc.
+
+
+
+
+
+
+
rarbg, 1337x, yts, etzv, tgx, torlock, piratebay, nyaasi, ettv
{escape(str(download.name()))}
"
+ if download.status() not in [MirrorStatus.STATUS_SEEDING, MirrorStatus.STATUS_SPLITTING]:
+ if EMOJI_THEME is True:
+ msg += f"\n├{get_progress_bar_string(download)} {download.progress()}"
+ msg += f"\n├🔄 Process: {get_readable_file_size(download.processed_bytes())} of {download.size()}"
+ msg += f"\n├⚡ Speed: {download.speed()}"
+ msg += f"\n├⏳ ETA: {download.eta()}"
+ msg += f" | Elapsed: {get_readable_time(time() - download.message.date.timestamp())}"
+ msg += f"\n├⛓️ Engine : {download.eng()}"
+
+ else:
+ msg += f"\n├{get_progress_bar_string(download)} {download.progress()}"
+ msg += f"\n├ Process: {get_readable_file_size(download.processed_bytes())} of {download.size()}"
+ msg += f"\n├ Speed: {download.speed()}"
+ msg += f"\n├ ETA: {download.eta()}"
+ msg += f" | Elapsed: {get_readable_time(time() - download.message.date.timestamp())}"
+ msg += f"\n├ Engine : {download.eng()}"
+
+ if hasattr(download, 'seeders_num'):
+ try:
+ if EMOJI_THEME is True:
+ msg += f"\n├🌱 Seeders: {download.seeders_num()} | 🐌 Leechers: {download.leechers_num()}"
+ # msg += f"\n├🧿 To Select: /{BotCommands.BtSelectCommand} {download.gid()}
"
+ else:
+ msg += f"\n├ Seeders: {download.seeders_num()} | Leechers: {download.leechers_num()}"
+ # msg += f"\n├ To Select: /{BotCommands.BtSelectCommand} {download.gid()}
"
+ except:
+ pass
+ if download.message.chat.type != 'private':
+ try:
+ chatid = str(download.message.chat.id)[4:]
+ if EMOJI_THEME is True:
+ msg += f'\n├🌐 Source: {download.message.from_user.first_name} | Id : {download.message.from_user.id}
'
+ msg += f"\n╰❌ /{BotCommands.CancelMirror} {download.gid()}
"
+ else:
+ msg += f'\n├ Source: {download.message.from_user.first_name} | Id : {download.message.from_user.id}
'
+ msg += f"\n╰ /{BotCommands.CancelMirror} {download.gid()}
"
+ except:
+ pass
+ else:
+ if EMOJI_THEME is True:
+ msg += f'\n├👤 User: ️{download.message.from_user.first_name}
| Id: {download.message.from_user.id}
'
+ msg += f"\n╰❌ /{BotCommands.CancelMirror} {download.gid()}
"
+ else:
+ msg += f'\n├ User: ️{download.message.from_user.first_name}
| Id: {download.message.from_user.id}
'
+ msg += f"\n╰ /{BotCommands.CancelMirror} {download.gid()}
"
+
+ elif download.status() == MirrorStatus.STATUS_SEEDING:
+ if EMOJI_THEME is True:
+ msg += f"\n├📦 Size: {download.size()}"
+ msg += f"\n├⛓️ Engine: qBittorrent v4.4.2
"
+ msg += f"\n├⚡ Speed: {download.upload_speed()}"
+ msg += f"\n├🔺 Uploaded: {download.uploaded_bytes()}"
+ msg += f"\n├📎 Ratio: {download.ratio()}"
+ msg += f" | ⏲️ Time: {download.seeding_time()}"
+ msg += f"\n├⏳ Elapsed: {get_readable_time(time() - download.message.date.timestamp())}"
+ msg += f"\n╰❌ /{BotCommands.CancelMirror} {download.gid()}
"
+ else:
+ msg += f"\n├ Size: {download.size()}"
+ msg += f"\n├ Engine: qBittorrent v4.4.2
"
+ msg += f"\n├ Speed: {download.upload_speed()}"
+ msg += f"\n├ Uploaded: {download.uploaded_bytes()}"
+ msg += f"\n├ Ratio: {download.ratio()}"
+ msg += f" | Time: {download.seeding_time()}"
+ msg += f"\n├ Elapsed: {get_readable_time(time() - download.message.date.timestamp())}"
+ msg += f"\n╰ /{BotCommands.CancelMirror} {download.gid()}
"
+ else:
+ if EMOJI_THEME is True:
+ msg += f"\n├⛓️ Engine : {download.eng()}"
+ msg += f"\n╰📐 Size: {download.size()}"
+ else:
+ msg += f"\n├ Engine : {download.eng()}"
+ msg += f"\n╰ Size: {download.size()}"
+ msg += f"\n_____________________________________"
+ msg += "\n\n"
+ if STATUS_LIMIT is not None and index == STATUS_LIMIT:
+ break
+ if len(msg) == 0:
+ return None, None
+ dl_speed = 0
+ up_speed = 0
+ for download in list(download_dict.values()):
+ spd = download.speed()
+ if download.status() == MirrorStatus.STATUS_DOWNLOADING:
+ spd = download.speed()
+ if 'K' in spd:
+ dl_speed += float(spd.split('K')[0]) * 1024
+ elif 'M' in spd:
+ dl_speed += float(spd.split('M')[0]) * 1048576
+ elif download.status() == MirrorStatus.STATUS_UPLOADING:
+ spd = download.speed()
+ if 'KB/s' in spd:
+ up_speed += float(spd.split('K')[0]) * 1024
+ elif 'MB/s' in spd:
+ up_speed += float(spd.split('M')[0]) * 1048576
+ elif download.status() == MirrorStatus.STATUS_SEEDING:
+ spd = download.upload_speed()
+ if 'K' in spd:
+ up_speed += float(spd.split('K')[0]) * 1024
+ elif 'M' in spd:
+ up_speed += float(spd.split('M')[0]) * 1048576
+ if EMOJI_THEME is True:
+ bmsg = f"🖥 CPU: {cpu_percent()}% | 💿 FREE: {get_readable_file_size(disk_usage(DOWNLOAD_DIR).free)}"
+ bmsg += f"\n🎮 RAM: {virtual_memory().percent}% | 🟢 UPTIME: {get_readable_time(time() - botStartTime)}"
+ bmsg += f"\n🔻 DL: {get_readable_file_size(dl_speed)}/s | 🔺 UL: {get_readable_file_size(up_speed)}/s"
+ else:
+ bmsg = f"CPU: {cpu_percent()}% | FREE: {get_readable_file_size(disk_usage(DOWNLOAD_DIR).free)}"
+ bmsg += f"\nRAM: {virtual_memory().percent}% | UPTIME: {get_readable_time(time() - botStartTime)}"
+ bmsg += f"\nDL: {get_readable_file_size(dl_speed)}/s | UL: {get_readable_file_size(up_speed)}/s"
+
+ buttons = ButtonMaker()
+ buttons.sbutton("Refresh", "status refresh")
+ buttons.sbutton("Statistics", str(THREE))
+ buttons.sbutton("Close", "status close")
+ sbutton = buttons.build_menu(3)
+
+ if STATUS_LIMIT is not None and tasks > STATUS_LIMIT:
+ msg += f"Tasks: {tasks}\n"
+ buttons = ButtonMaker()
+ if EMOJI_THEME is True:
+ buttons.sbutton("⏪Previous", "status pre")
+ buttons.sbutton(f"{PAGE_NO}/{PAGES}", str(THREE))
+ buttons.sbutton("Next⏩", "status nex")
+ buttons.sbutton("Refresh", "status refresh")
+ buttons.sbutton("Close", "status close")
+ else:
+ buttons.sbutton("Previous", "status pre")
+ buttons.sbutton(f"{PAGE_NO}/{PAGES}", str(THREE))
+ buttons.sbutton("Next", "status nex")
+ buttons.sbutton("Refresh", "status refresh")
+ buttons.sbutton("Close", "status close")
+ button = buttons.build_menu(3)
+ return msg + bmsg, button
+ return msg + bmsg, sbutton
+
+def turn(data):
+ try:
+ with download_dict_lock:
+ global COUNT, PAGE_NO
+ if data[1] == "nex":
+ if PAGE_NO == PAGES:
+ COUNT = 0
+ PAGE_NO = 1
+ else:
+ COUNT += STATUS_LIMIT
+ PAGE_NO += 1
+ elif data[1] == "pre":
+ if PAGE_NO == 1:
+ COUNT = STATUS_LIMIT * (PAGES - 1)
+ PAGE_NO = PAGES
+ else:
+ COUNT -= STATUS_LIMIT
+ PAGE_NO -= 1
+ return True
+ except:
+ return False
+
+def get_readable_time(seconds: int) -> str:
+ result = ''
+ (days, remainder) = divmod(seconds, 86400)
+ days = int(days)
+ if days != 0:
+ result += f'{days}d'
+ (hours, remainder) = divmod(remainder, 3600)
+ hours = int(hours)
+ if hours != 0:
+ result += f'{hours}h'
+ (minutes, seconds) = divmod(remainder, 60)
+ minutes = int(minutes)
+ if minutes != 0:
+ result += f'{minutes}m'
+ seconds = int(seconds)
+ result += f'{seconds}s'
+ return result
+
+def is_url(url: str):
+ url = re_findall(URL_REGEX, url)
+ return bool(url)
+
+def is_gdrive_link(url: str):
+ return "drive.google.com" in url
+
+def is_gdtot_link(url: str):
+ url = re_match(r'https?://.+\.gdtot\.\S+', url)
+ return bool(url)
+
+def is_unified_link(url: str):
+ url = re_match(r'https?://(appdrive|driveapp|driveace|gdflix|drivebit|drivesharer|drivepro)\.\S+', url)
+ if bool(url) == True:
+ return bool(url)
+ else:
+ return False
+
+def is_udrive_link(url: str):
+ if 'drivehub.ws' in url:
+ return 'drivehub.ws' in url
+ else:
+ url = re_match(r'https?://(hubdrive|katdrive|kolop|drivefire|drivebuzz)\.\S+', url)
+ return bool(url)
+
+def is_mega_link(url: str):
+ return "mega.nz" in url or "mega.co.nz" in url
+
+def get_mega_link_type(url: str):
+ if "folder" in url:
+ return "folder"
+ elif "file" in url:
+ return "file"
+ elif "/#F!" in url:
+ return "folder"
+ return "file"
+
+def is_magnet(url: str):
+ magnet = re_findall(MAGNET_REGEX, url)
+ return bool(magnet)
+
+def new_thread(fn):
+ """To use as decorator to make a function call threaded.
+ Needs import
+ from threading import Thread"""
+
+ def wrapper(*args, **kwargs):
+ thread = Thread(target=fn, args=args, kwargs=kwargs)
+ thread.start()
+ return thread
+
+ return wrapper
+
+def get_content_type(link: str) -> str:
+ try:
+ res = rhead(link, allow_redirects=True, timeout=5, headers = {'user-agent': 'Wget/1.12'})
+ content_type = res.headers.get('content-type')
+ except:
+ try:
+ res = urlopen(link, timeout=5)
+ info = res.info()
+ content_type = info.get_content_type()
+ except:
+ content_type = None
+ return content_type
+
+
+ONE, TWO, THREE = range(3)
+def pop_up_stats(update, context):
+ query = update.callback_query
+ stats = bot_sys_stats()
+ query.answer(text=stats, show_alert=True)
+def bot_sys_stats():
+ currentTime = get_readable_time(time() - botStartTime)
+ cpu = psutil.cpu_percent()
+ mem = psutil.virtual_memory().percent
+ disk = psutil.disk_usage(DOWNLOAD_DIR).percent
+ total, used, free = shutil.disk_usage(DOWNLOAD_DIR)
+ total = get_readable_file_size(total)
+ used = get_readable_file_size(used)
+ free = get_readable_file_size(free)
+ recv = get_readable_file_size(psutil.net_io_counters().bytes_recv)
+ sent = get_readable_file_size(psutil.net_io_counters().bytes_sent)
+ num_active = 0
+ num_upload = 0
+ num_split = 0
+ num_extract = 0
+ num_archi = 0
+ tasks = len(download_dict)
+ for stats in list(download_dict.values()):
+ if stats.status() == MirrorStatus.STATUS_DOWNLOADING:
+ num_active += 1
+ if stats.status() == MirrorStatus.STATUS_UPLOADING:
+ num_upload += 1
+ if stats.status() == MirrorStatus.STATUS_ARCHIVING:
+ num_archi += 1
+ if stats.status() == MirrorStatus.STATUS_EXTRACTING:
+ num_extract += 1
+ if stats.status() == MirrorStatus.STATUS_SPLITTING:
+ num_split += 1
+ stats = f"""
+CPU : {cpu}% | RAM : {mem}%
+DL : {num_active} | UP : {num_upload} | SPLIT : {num_split}
+ZIP : {num_archi} | UNZIP : {num_extract} | TOTAL : {tasks}
+Limits : T/D : {TORRENT_DIRECT_LIMIT}GB | Z/U : {ZIP_UNZIP_LIMIT}GB
+ L : {LEECH_LIMIT}GB | M : {MEGA_LIMIT}GB
+Made with ❤️ by {CREDIT_NAME}
+"""
+ return stats
+dispatcher.add_handler(
+ CallbackQueryHandler(pop_up_stats, pattern="^" + str(THREE) + "$")
+)
\ No newline at end of file
diff --git a/bot/helper/ext_utils/db_handler.py b/bot/helper/ext_utils/db_handler.py
new file mode 100644
index 0000000..cd1291c
--- /dev/null
+++ b/bot/helper/ext_utils/db_handler.py
@@ -0,0 +1,274 @@
+from os import path as ospath, makedirs
+from psycopg2 import connect, DatabaseError
+
+from bot import DB_URI, AUTHORIZED_CHATS, SUDO_USERS, AS_DOC_USERS, AS_MEDIA_USERS, rss_dict, LOGGER, botname, LEECH_LOG
+
+class DbManger:
+ def __init__(self):
+ self.err = False
+ self.connect()
+
+ def connect(self):
+ try:
+ self.conn = connect(DB_URI)
+ self.cur = self.conn.cursor()
+ except DatabaseError as error:
+ LOGGER.error(f"Error in DB connection: {error}")
+ self.err = True
+
+ def disconnect(self):
+ self.cur.close()
+ self.conn.close()
+
+ def db_init(self):
+ if self.err:
+ return
+ sql = """CREATE TABLE IF NOT EXISTS users (
+ uid bigint,
+ sudo boolean DEFAULT FALSE,
+ auth boolean DEFAULT FALSE,
+ media boolean DEFAULT FALSE,
+ doc boolean DEFAULT FALSE,
+ thumb bytea DEFAULT NULL,
+ leechlog boolean DEFAULT FALSE
+ )
+ """
+ self.cur.execute(sql)
+ sql = """CREATE TABLE IF NOT EXISTS rss (
+ name text,
+ link text,
+ last text,
+ title text,
+ filters text
+ )
+ """
+ self.cur.execute(sql)
+ self.cur.execute("CREATE TABLE IF NOT EXISTS {} (cid bigint, link text, tag text)".format(botname))
+ self.conn.commit()
+ LOGGER.info("Database Initiated")
+ self.db_load()
+
+ def db_load(self):
+ # User Data
+ self.cur.execute("SELECT * from users")
+ rows = self.cur.fetchall() # return a list ==> (uid, sudo, auth, media, doc, thumb)
+ if rows:
+ for row in rows:
+ if row[1] and row[0] not in SUDO_USERS:
+ SUDO_USERS.add(row[0])
+ elif row[2] and row[0] not in AUTHORIZED_CHATS:
+ AUTHORIZED_CHATS.add(row[0])
+ if row[3]:
+ AS_MEDIA_USERS.add(row[0])
+ elif row[4]:
+ AS_DOC_USERS.add(row[0])
+ path = f"Thumbnails/{row[0]}.jpg"
+ if row[5] is not None and not ospath.exists(path):
+ if not ospath.exists('Thumbnails'):
+ makedirs('Thumbnails')
+ with open(path, 'wb+') as f:
+ f.write(row[5])
+ if row[6] and row[0] not in LEECH_LOG:
+ LEECH_LOG.add(row[0])
+ LOGGER.info("Users data has been imported from Database")
+ # Rss Data
+ self.cur.execute("SELECT * FROM rss")
+ rows = self.cur.fetchall() # return a list ==> (name, feed_link, last_link, last_title, filters)
+ if rows:
+ for row in rows:
+ f_lists = []
+ if row[4] is not None:
+ filters_list = row[4].split('|')
+ for x in filters_list:
+ y = x.split(' or ')
+ f_lists.append(y)
+ rss_dict[row[0]] = [row[1], row[2], row[3], f_lists]
+ LOGGER.info("Rss data has been imported from Database.")
+ self.disconnect()
+
+ def user_auth(self, chat_id: int):
+ if self.err:
+ return "Error in DB connection, check log for details"
+ elif not self.user_check(chat_id):
+ sql = 'INSERT INTO users (uid, auth) VALUES ({}, TRUE)'.format(chat_id)
+ else:
+ sql = 'UPDATE users SET auth = TRUE WHERE uid = {}'.format(chat_id)
+ self.cur.execute(sql)
+ self.conn.commit()
+ self.disconnect()
+ return 'Authorized successfully'
+
+ def user_unauth(self, chat_id: int):
+ if self.err:
+ return "Error in DB connection, check log for details"
+ elif self.user_check(chat_id):
+ sql = 'UPDATE users SET auth = FALSE WHERE uid = {}'.format(chat_id)
+ self.cur.execute(sql)
+ self.conn.commit()
+ self.disconnect()
+ return 'Unauthorized successfully'
+
+ def user_addsudo(self, user_id: int):
+ if self.err:
+ return "Error in DB connection, check log for details"
+ elif not self.user_check(user_id):
+ sql = 'INSERT INTO users (uid, sudo) VALUES ({}, TRUE)'.format(user_id)
+ else:
+ sql = 'UPDATE users SET sudo = TRUE WHERE uid = {}'.format(user_id)
+ self.cur.execute(sql)
+ self.conn.commit()
+ self.disconnect()
+ return 'Successfully Promoted as Sudo'
+
+ def user_rmsudo(self, user_id: int):
+ if self.err:
+ return "Error in DB connection, check log for details"
+ elif self.user_check(user_id):
+ sql = 'UPDATE users SET sudo = FALSE WHERE uid = {}'.format(user_id)
+ self.cur.execute(sql)
+ self.conn.commit()
+ self.disconnect()
+ return 'Successfully removed from Sudo'
+
+ def user_media(self, user_id: int):
+ if self.err:
+ return
+ elif not self.user_check(user_id):
+ sql = 'INSERT INTO users (uid, media) VALUES ({}, TRUE)'.format(user_id)
+ else:
+ sql = 'UPDATE users SET media = TRUE, doc = FALSE WHERE uid = {}'.format(user_id)
+ self.cur.execute(sql)
+ self.conn.commit()
+ self.disconnect()
+
+ def user_doc(self, user_id: int):
+ if self.err:
+ return
+ elif not self.user_check(user_id):
+ sql = 'INSERT INTO users (uid, doc) VALUES ({}, TRUE)'.format(user_id)
+ else:
+ sql = 'UPDATE users SET media = FALSE, doc = TRUE WHERE uid = {}'.format(user_id)
+ self.cur.execute(sql)
+ self.conn.commit()
+ self.disconnect()
+
+ def user_save_thumb(self, user_id: int, path):
+ if self.err:
+ return
+ image = open(path, 'rb+')
+ image_bin = image.read()
+ if not self.user_check(user_id):
+ sql = 'INSERT INTO users (thumb, uid) VALUES (%s, %s)'
+ else:
+ sql = 'UPDATE users SET thumb = %s WHERE uid = %s'
+ self.cur.execute(sql, (image_bin, user_id))
+ self.conn.commit()
+ self.disconnect()
+
+ def user_rm_thumb(self, user_id: int, path):
+ if self.err:
+ return
+ elif self.user_check(user_id):
+ sql = 'UPDATE users SET thumb = NULL WHERE uid = {}'.format(user_id)
+ self.cur.execute(sql)
+ self.conn.commit()
+ self.disconnect()
+
+ def addleech_log(self, chat_id: int):
+ if self.err:
+ return "Error in DB connection, check log for details"
+ elif not self.user_check(chat_id):
+ sql = 'INSERT INTO users (uid, leechlog) VALUES ({}, TRUE)'.format(chat_id)
+ else:
+ sql = 'UPDATE users SET leechlog = TRUE WHERE uid = {}'.format(chat_id)
+ self.cur.execute(sql)
+ self.conn.commit()
+ self.disconnect()
+ return 'Successfully added to leech logs'
+
+ def rmleech_log(self, chat_id: int):
+ if self.err:
+ return "Error in DB connection, check log for details"
+ elif self.user_check(chat_id):
+ sql = 'UPDATE users SET leechlog = FALSE WHERE uid = {}'.format(chat_id)
+ self.cur.execute(sql)
+ self.conn.commit()
+ self.disconnect()
+ return 'Removed from leech logs successfully'
+
+ def user_check(self, uid: int):
+ self.cur.execute("SELECT * FROM users WHERE uid = {}".format(uid))
+ res = self.cur.fetchone()
+ return res
+
+ def rss_add(self, name, link, last, title, filters):
+ if self.err:
+ return
+ q = (name, link, last, title, filters)
+ self.cur.execute("INSERT INTO rss (name, link, last, title, filters) VALUES (%s, %s, %s, %s, %s)", q)
+ self.conn.commit()
+ self.disconnect()
+
+ def rss_update(self, name, last, title):
+ if self.err:
+ return
+ q = (last, title, name)
+ self.cur.execute("UPDATE rss SET last = %s, title = %s WHERE name = %s", q)
+ self.conn.commit()
+ self.disconnect()
+
+ def rss_delete(self, name):
+ if self.err:
+ return
+ self.cur.execute("DELETE FROM rss WHERE name = %s", (name,))
+ self.conn.commit()
+ self.disconnect()
+
+ def add_incomplete_task(self, cid: int, link: str, tag: str):
+ if self.err:
+ return
+ q = (cid, link, tag)
+ self.cur.execute("INSERT INTO {} (cid, link, tag) VALUES (%s, %s, %s)".format(botname), q)
+ self.conn.commit()
+ self.disconnect()
+
+ def rm_complete_task(self, link: str):
+ if self.err:
+ return
+ self.cur.execute("DELETE FROM {} WHERE link = %s".format(botname), (link,))
+ self.conn.commit()
+ self.disconnect()
+
+ def get_incomplete_tasks(self):
+ if self.err:
+ return False
+ self.cur.execute("SELECT * from {}".format(botname))
+ rows = self.cur.fetchall() # return a list ==> (cid, link, tag)
+ notifier_dict = {}
+ if rows:
+ for row in rows:
+ if row[0] in list(notifier_dict.keys()):
+ if row[2] in list(notifier_dict[row[0]].keys()):
+ notifier_dict[row[0]][row[2]].append(row[1])
+ else:
+ notifier_dict[row[0]][row[2]] = [row[1]]
+ else:
+ usr_dict = {}
+ usr_dict[row[2]] = [row[1]]
+ notifier_dict[row[0]] = usr_dict
+ self.cur.execute("TRUNCATE TABLE {}".format(botname))
+ self.conn.commit()
+ self.disconnect()
+ return notifier_dict # return a dict ==> {cid: {tag: [mid, mid, ...]}}
+
+
+ def trunc_table(self, name):
+ if self.err:
+ return
+ self.cur.execute("TRUNCATE TABLE {}".format(name))
+ self.conn.commit()
+ self.disconnect()
+
+if DB_URI is not None:
+ DbManger().db_init()
+
diff --git a/bot/helper/ext_utils/exceptions.py b/bot/helper/ext_utils/exceptions.py
new file mode 100644
index 0000000..a2f600c
--- /dev/null
+++ b/bot/helper/ext_utils/exceptions.py
@@ -0,0 +1,8 @@
+class DirectDownloadLinkException(Exception):
+ """Not method found for extracting direct download link from the http link"""
+ pass
+
+
+class NotSupportedExtractionArchive(Exception):
+ """The archive format use is trying to extract is not supported"""
+ pass
diff --git a/bot/helper/ext_utils/fs_utils.py b/bot/helper/ext_utils/fs_utils.py
new file mode 100644
index 0000000..fc1665f
--- /dev/null
+++ b/bot/helper/ext_utils/fs_utils.py
@@ -0,0 +1,277 @@
+from os import remove as osremove, path as ospath, mkdir, walk, listdir, rmdir, makedirs
+from sys import exit as sysexit
+from json import loads as jsonloads
+from shutil import rmtree, disk_usage
+from PIL import Image
+from magic import Magic
+from subprocess import run as srun, check_output, Popen
+from time import time
+from math import ceil
+from re import split as re_split, I
+from .exceptions import NotSupportedExtractionArchive
+from bot import aria2, app, LOGGER, DOWNLOAD_DIR, get_client, TG_SPLIT_SIZE, EQUAL_SPLITS, STORAGE_THRESHOLD, premium_session
+
+
+ARCH_EXT = [".tar.bz2", ".tar.gz", ".bz2", ".gz", ".tar.xz", ".tar", ".tbz2", ".tgz", ".lzma2",
+ ".zip", ".7z", ".z", ".rar", ".iso", ".wim", ".cab", ".apm", ".arj", ".chm",
+ ".cpio", ".cramfs", ".deb", ".dmg", ".fat", ".hfs", ".lzh", ".lzma", ".mbr",
+ ".msi", ".mslz", ".nsis", ".ntfs", ".rpm", ".squashfs", ".udf", ".vhd", ".xar"]
+
+def clean_target(path: str):
+ if ospath.exists(path):
+ LOGGER.info(f"Cleaning Target: {path}")
+ if ospath.isdir(path):
+ try:
+ rmtree(path)
+ except:
+ pass
+ elif ospath.isfile(path):
+ try:
+ osremove(path)
+ except:
+ pass
+
+def clean_download(path: str):
+ if ospath.exists(path):
+ LOGGER.info(f"Cleaning Download: {path}")
+ try:
+ rmtree(path)
+ except:
+ pass
+
+def start_cleanup():
+ try:
+ rmtree(DOWNLOAD_DIR)
+ except:
+ pass
+ makedirs(DOWNLOAD_DIR)
+
+def clean_all():
+ aria2.remove_all(True)
+ get_client().torrents_delete(torrent_hashes="all")
+ app.stop()
+ if premium_session: premium_session.stop()
+ try:
+ rmtree(DOWNLOAD_DIR)
+ except:
+ pass
+
+def exit_clean_up(signal, frame):
+ try:
+ LOGGER.info("Please wait, while we clean up the downloads and stop running downloads")
+ clean_all()
+ sysexit(0)
+ except KeyboardInterrupt:
+ LOGGER.warning("Force Exiting before the cleanup finishes!")
+ sysexit(1)
+
+def clean_unwanted(path: str):
+ LOGGER.info(f"Cleaning unwanted files/folders: {path}")
+ for dirpath, subdir, files in walk(path, topdown=False):
+ for filee in files:
+ if filee.endswith(".!qB") or filee.endswith('.parts') and filee.startswith('.'):
+ osremove(ospath.join(dirpath, filee))
+ if dirpath.endswith((".unwanted", "splited_files_wz")):
+ rmtree(dirpath)
+ for dirpath, subdir, files in walk(path, topdown=False):
+ if not listdir(dirpath):
+ rmdir(dirpath)
+
+def get_path_size(path: str):
+ if ospath.isfile(path):
+ return ospath.getsize(path)
+ total_size = 0
+ for root, dirs, files in walk(path):
+ for f in files:
+ abs_path = ospath.join(root, f)
+ total_size += ospath.getsize(abs_path)
+ return total_size
+
+def check_storage_threshold(size: int, arch=False, alloc=False):
+ if not alloc:
+ if not arch:
+ if disk_usage(DOWNLOAD_DIR).free - size < STORAGE_THRESHOLD * 1024**3:
+ return False
+ elif disk_usage(DOWNLOAD_DIR).free - (size * 2) < STORAGE_THRESHOLD * 1024**3:
+ return False
+ elif not arch:
+ if disk_usage(DOWNLOAD_DIR).free < STORAGE_THRESHOLD * 1024**3:
+ return False
+ elif disk_usage(DOWNLOAD_DIR).free - size < STORAGE_THRESHOLD * 1024**3:
+ return False
+ return True
+
+def get_base_name(orig_path: str):
+ if ext := [ext for ext in ARCH_EXT if orig_path.lower().endswith(ext)]:
+ ext = ext[0]
+ return re_split(f'{ext}$', orig_path, maxsplit=1, flags=I)[0]
+ else:
+ raise NotSupportedExtractionArchive('File format not supported for extraction')
+
+def get_mime_type(file_path):
+ mime = Magic(mime=True)
+ mime_type = mime.from_file(file_path)
+ mime_type = mime_type or "text/plain"
+ return mime_type
+
+def take_ss(video_file, duration):
+ des_dir = 'Thumbnails'
+ if not ospath.exists(des_dir):
+ mkdir(des_dir)
+ des_dir = ospath.join(des_dir, f"{time()}.jpg")
+ if duration is None:
+ duration = get_media_info(video_file)[0]
+ if duration == 0:
+ duration = 3
+ duration = duration // 2
+
+ status = srun(["ffmpeg", "-hide_banner", "-loglevel", "error", "-ss", str(duration),
+ "-i", video_file, "-frames:v", "1", des_dir])
+
+ if status.returncode != 0 or not ospath.lexists(des_dir):
+ return None
+
+ with Image.open(des_dir) as img:
+ img.convert("RGB").save(des_dir, "JPEG")
+
+ return des_dir
+
+def split_file(path, size, file_, dirpath, split_size, listener, start_time=0, i=1, inLoop=False, noMap=False):
+ if listener.seed and not listener.newDir:
+ dirpath = f"{dirpath}/splited_files_wz"
+ if not ospath.exists(dirpath):
+ mkdir(dirpath)
+ parts = ceil(size/TG_SPLIT_SIZE)
+ if EQUAL_SPLITS and not inLoop:
+ split_size = ceil(size/parts) + 1000
+ if get_media_streams(path)[0]:
+ duration = get_media_info(path)[0]
+ base_name, extension = ospath.splitext(file_)
+ split_size = split_size - 5000000
+ while i <= parts:
+ parted_name = f"{str(base_name)}.part{str(i).zfill(3)}{str(extension)}"
+ out_path = ospath.join(dirpath, parted_name)
+ if not noMap:
+ listener.suproc = Popen(["ffmpeg", "-hide_banner", "-loglevel", "error", "-ss", str(start_time),
+ "-i", path, "-fs", str(split_size), "-map", "0", "-map_chapters", "-1",
+ "-c", "copy", out_path])
+ else:
+ listener.suproc = Popen(["ffmpeg", "-hide_banner", "-loglevel", "error", "-ss", str(start_time),
+ "-i", path, "-fs", str(split_size), "-map_chapters", "-1", "-c", "copy",
+ out_path])
+ listener.suproc.wait()
+ if listener.suproc.returncode == -9:
+ return False
+ elif listener.suproc.returncode != 0 and not noMap:
+ LOGGER.warning(f"Retrying without map, -map 0 not working in all situations. Path: {path}")
+ try:
+ osremove(out_path)
+ except:
+ pass
+ return split_file(path, size, file_, dirpath, split_size, listener, start_time, i, True, True)
+ elif listener.suproc.returncode != 0:
+ LOGGER.warning(f"Unable to split this video, if it's size less than {TG_SPLIT_SIZE} will be uploaded as it is. Path: {path}")
+ try:
+ osremove(out_path)
+ except:
+ pass
+ return "errored"
+ out_size = get_path_size(out_path)
+ if out_size > (TG_SPLIT_SIZE + 1000):
+ dif = out_size - (TG_SPLIT_SIZE + 1000)
+ split_size = split_size - dif + 5000000
+ osremove(out_path)
+ return split_file(path, size, file_, dirpath, split_size, listener, start_time, i, True, noMap)
+ lpd = get_media_info(out_path)[0]
+ if lpd == 0:
+ LOGGER.error(f'Something went wrong while splitting mostly file is corrupted. Path: {path}')
+ break
+ elif duration == lpd:
+ if not noMap:
+ LOGGER.warning(f"Retrying without map, -map 0 not working in all situations. Path: {path}")
+ try:
+ osremove(out_path)
+ except:
+ pass
+ return split_file(path, size, file_, dirpath, split_size, listener, start_time, i, True, True)
+ else:
+ LOGGER.warning(f"This file has been splitted with default stream and audio, so you will only see one part with less size from orginal one because it doesn't have all streams and audios. This happens mostly with MKV videos. noMap={noMap}. Path: {path}")
+ break
+ elif lpd <= 4:
+ osremove(out_path)
+ break
+ start_time += lpd - 3
+ i = i + 1
+ else:
+ out_path = ospath.join(dirpath, f"{file_}.")
+ listener.suproc = Popen(["split", "--numeric-suffixes=1", "--suffix-length=3",
+ f"--bytes={split_size}", path, out_path])
+ listener.suproc.wait()
+ if listener.suproc.returncode == -9:
+ return False
+ return True
+
+def get_media_info(path):
+
+ try:
+ result = check_output(["ffprobe", "-hide_banner", "-loglevel", "error", "-print_format",
+ "json", "-show_format", "-show_streams", path]).decode('utf-8')
+ except Exception as e:
+ LOGGER.error(f'{e} Mostly file not Found!')
+ return 0, None, None
+
+ fields = jsonloads(result).get('format')
+ if fields is None:
+ LOGGER.error(f"get_media_info: {result}")
+ return 0, None, None
+
+ duration = round(float(fields.get('duration', 0)))
+
+ fields = fields.get('tags')
+ if fields:
+ artist = fields.get('artist')
+ if artist is None:
+ artist = fields.get('ARTIST')
+ title = fields.get('title')
+ if title is None:
+ title = fields.get('TITLE')
+ else:
+ title = None
+ artist = None
+
+ return duration, artist, title
+
+
+
+def get_media_streams(path):
+
+ is_video = False
+ is_audio = False
+
+ mime_type = get_mime_type(path)
+ if mime_type.startswith('audio'):
+ is_audio = True
+ return is_video, is_audio
+
+ if not mime_type.startswith('video'):
+ return is_video, is_audio
+
+ try:
+ result = check_output(["ffprobe", "-hide_banner", "-loglevel", "error", "-print_format",
+ "json", "-show_streams", path]).decode('utf-8')
+ except Exception as e:
+ LOGGER.error(f'{e}. Mostly file not found!')
+ return is_video, is_audio
+
+ fields = jsonloads(result).get('streams')
+ if fields is None:
+ LOGGER.error(f"get_media_streams: {result}")
+ return is_video, is_audio
+
+
+ for stream in fields:
+ if stream.get('codec_type') == 'video':
+ is_video = True
+ elif stream.get('codec_type') == 'audio':
+ is_audio = True
+ return is_video, is_audio
diff --git a/bot/helper/ext_utils/html_helper.py b/bot/helper/ext_utils/html_helper.py
new file mode 100644
index 0000000..6152d3d
--- /dev/null
+++ b/bot/helper/ext_utils/html_helper.py
@@ -0,0 +1,125 @@
+hmtl_content = """
+
+
+
+
+
+ /{BotCommands.MirrorCommand} https://1fichier.com/?smmtd8twfpm66awbqz04::love you
\n\n* No spaces between the signs ::\n* For the password, you can use a space!")
+ else:
+ print(str_2)
+ raise DirectDownloadLinkException("ERROR: Failed to generate Direct Link from 1fichier!")
+ elif len(soup.find_all("div", {"class": "ct_warn"})) == 4:
+ str_1 = soup.find_all("div", {"class": "ct_warn"})[-2]
+ str_3 = soup.find_all("div", {"class": "ct_warn"})[-1]
+ if "you must wait" in str(str_1).lower():
+ numbers = [int(word) for word in str(str_1).split() if word.isdigit()]
+ if not numbers:
+ raise DirectDownloadLinkException("ERROR: 1fichier is on a limit. Please wait a few minutes/hour.")
+ else:
+ raise DirectDownloadLinkException(f"ERROR: 1fichier is on a limit. Please wait {numbers[0]} minute.")
+ elif "bad password" in str(str_3).lower():
+ raise DirectDownloadLinkException("ERROR: The password you entered is wrong!")
+ else:
+ raise DirectDownloadLinkException("ERROR: Error trying to generate Direct Link from 1fichier!")
+ else:
+ raise DirectDownloadLinkException("ERROR: Error trying to generate Direct Link from 1fichier!")
+
+def solidfiles(url: str) -> str:
+ """ Solidfiles direct link generator
+ Based on https://github.com/Xonshiz/SolidFiles-Downloader
+ By https://github.com/Jusidama18 """
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36'
+ }
+ pageSource = rget(url, headers = headers).text
+ mainOptions = str(re_search(r'viewerOptions\'\,\ (.*?)\)\;', pageSource).group(1))
+ return jsonloads(mainOptions)["downloadUrl"]
+
+def krakenfiles(page_link: str) -> str:
+ """ krakenfiles direct link generator
+ Based on https://github.com/tha23rd/py-kraken
+ By https://github.com/junedkh """
+ page_resp = rsession().get(page_link)
+ soup = BeautifulSoup(page_resp.text, "lxml")
+ try:
+ token = soup.find("input", id="dl-token")["value"]
+ except:
+ raise DirectDownloadLinkException(f"Page link is wrong: {page_link}")
+
+ hashes = [
+ item["data-file-hash"]
+ for item in soup.find_all("div", attrs={"data-file-hash": True})
+ ]
+ if not hashes:
+ raise DirectDownloadLinkException(f"ERROR: Hash not found for : {page_link}")
+
+
+ dl_hash = hashes[0]
+
+ payload = f'------WebKitFormBoundary7MA4YWxkTrZu0gW\r\nContent-Disposition: form-data; name="token"\r\n\r\n{token}\r\n------WebKitFormBoundary7MA4YWxkTrZu0gW--'
+ headers = {
+ "content-type": "multipart/form-data; boundary=----WebKitFormBoundary7MA4YWxkTrZu0gW",
+ "cache-control": "no-cache",
+ "hash": dl_hash,
+ }
+
+ dl_link_resp = rsession().post(
+ f"https://krakenfiles.com/download/{hash}", data=payload, headers=headers)
+
+ dl_link_json = dl_link_resp.json()
+
+ if "url" in dl_link_json:
+ return dl_link_json["url"]
+ else:
+ raise DirectDownloadLinkException(f"ERROR: Failed to acquire download URL from kraken for : {page_link}")
+
+
+def gdtot(url: str) -> str:
+ """ Gdtot google drive link generator
+ By https://github.com/xcscxr """
+
+ if CRYPT is None:
+ raise DirectDownloadLinkException("ERROR: CRYPT cookie not provided")
+
+ match = re_findall(r'https?://(.+)\.gdtot\.(.+)\/\S+\/\S+', url)[0]
+
+ with rsession() as client:
+ client.cookies.update({'crypt': CRYPT})
+ client.get(url)
+ res = client.get(f"https://{match[0]}.gdtot.{match[1]}/dld?id={url.split('/')[-1]}")
+ matches = re_findall('gd=(.*?)&', res.text)
+ try:
+ decoded_id = b64decode(str(matches[0])).decode('utf-8')
+ except:
+ raise DirectDownloadLinkException("ERROR: Try in your broswer, mostly file not found or user limit exceeded!")
+ return f'https://drive.google.com/open?id={decoded_id}'
+
+account = {
+ 'email': UNIFIED_EMAIL,
+ 'passwd': UNIFIED_PASS
+}
+def account_login(client, url, email, password):
+ data = {
+ 'email': email,
+ 'password': password
+ }
+ client.post(f'https://{urlparse(url).netloc}/login', data=data)
+def gen_payload(data, boundary=f'{"-"*6}_'):
+ data_string = ''
+ for item in data:
+ data_string += f'{boundary}\r\n'
+ data_string += f'Content-Disposition: form-data; name="{item}"\r\n\r\n{data[item]}\r\n'
+ data_string += f'{boundary}--\r\n'
+ return data_string
+
+def parse_infou(data):
+ info = re_findall('>(.*?)<\/li>', data)
+ info_parsed = {}
+ for item in info:
+ kv = [s.strip() for s in item.split(':', maxsplit = 1)]
+ info_parsed[kv[0].lower()] = kv[1]
+ return info_parsed
+
+def unified(url: str) -> str:
+ if (UNIFIED_EMAIL or UNIFIED_PASS) is None:
+ raise DirectDownloadLinkException("UNIFIED_EMAIL and UNIFIED_PASS env vars not provided")
+ client = cloudscraper.create_scraper(delay=10, browser='chrome')
+ client.headers.update({
+ "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/97.0.4692.99 Safari/537.36"
+ })
+ account_login(client, url, account['email'], account['passwd'])
+ res = client.get(url)
+ key = re_findall('"key",\s+"(.*?)"', res.text)[0]
+ ddl_btn = etree.HTML(res.content).xpath("//button[@id='drc']")
+ info_parsed = parse_infou(res.text)
+ info_parsed['error'] = False
+ info_parsed['link_type'] = 'login' # direct/login
+ headers = {
+ "Content-Type": f"multipart/form-data; boundary={'-'*4}_",
+ }
+ data = {
+ 'type': 1,
+ 'key': key,
+ 'action': 'original'
+ }
+ if len(ddl_btn):
+ info_parsed['link_type'] = 'direct'
+ data['action'] = 'direct'
+ while data['type'] <= 3:
+ try:
+ response = client.post(url, data=gen_payload(data), headers=headers).json()
+ break
+ except: data['type'] += 1
+ if 'url' in response:
+ info_parsed['gdrive_link'] = response['url']
+ elif 'error' in response and response['error']:
+ info_parsed['error'] = True
+ info_parsed['error_message'] = response['message']
+ else:
+ info_parsed['error'] = True
+ info_parsed['error_message'] = 'Something went wrong :('
+
+ if info_parsed['error']:
+ raise DirectDownloadLinkException(f"ERROR! {info_parsed['error_message']}")
+
+ if urlparse(url).netloc == 'appdrive.info':
+ flink = info_parsed['gdrive_link']
+ return flink
+
+ elif urlparse(url).netloc == 'driveapp.in':
+ res = client.get(info_parsed['gdrive_link'])
+ drive_link = etree.HTML(res.content).xpath("//a[contains(@class,'btn')]/@href")[0]
+ flink = drive_link
+ return flink
+
+ else:
+ res = client.get(info_parsed['gdrive_link'])
+ drive_link = etree.HTML(res.content).xpath("//a[contains(@class,'btn btn-primary')]/@href")[0]
+ flink = drive_link
+ info_parsed['src_url'] = url
+ return flink
+
+def parse_info(res, url):
+ info_parsed = {}
+ if 'drivebuzz' in url:
+ info_chunks = re.findall('{meta.get("name")}
'
+ msg += f'\n├📦 Size: {get_readable_file_size(self.transferred_size)}'
+ msg += f'\n├♻ Type: Folder'
+ msg += f'\n├🗃️ SubFolders: {self.__total_folders}'
+ msg += f'\n├🗂️ Files: {self.__total_files}'
+ else:
+ msg += f'╭ Name: {meta.get("name")}
'
+ msg += f'\n├ Size: {get_readable_file_size(self.transferred_size)}'
+ msg += f'\n├ Type: Folder'
+ msg += f'\n├ SubFolders: {self.__total_folders}'
+ msg += f'\n├ Files: {self.__total_files}'
+ buttons = ButtonMaker()
+ durl = short_url(durl)
+ buttons.buildbutton("☁️ Drive Link", durl)
+ if INDEX_URL is not None:
+ url_path = rquote(f'{meta.get("name")}', safe='')
+ url = f'{INDEX_URL}/{url_path}/'
+ url = short_url(url)
+ buttons.buildbutton("⚡ Index Link", url)
+ else:
+ file = self.__copyFile(meta.get('id'), parent_id)
+ if EMOJI_THEME is True:
+ msg += f'╭🗂️ Name: {file.get("name")}
'
+ else:
+ msg += f'╭ Name: {file.get("name")}
'
+ durl = self.__G_DRIVE_BASE_DOWNLOAD_URL.format(file.get("id"))
+ buttons = ButtonMaker()
+ durl = short_url(durl)
+ buttons.buildbutton("☁️ Drive Link", durl)
+ if mime_type is None:
+ mime_type = 'File'
+ if EMOJI_THEME is True:
+ msg += f'\n├📦 Size: {get_readable_file_size(int(meta.get("size", 0)))}'
+ msg += f'\n├♻ Type: {mime_type}'
+ else:
+ msg += f'\n├ Size: {get_readable_file_size(int(meta.get("size", 0)))}'
+ msg += f'\n├ Type: {mime_type}'
+ if INDEX_URL is not None:
+ url_path = rquote(f'{file.get("name")}', safe='')
+ url = f'{INDEX_URL}/{url_path}'
+ url = short_url(url)
+ buttons.buildbutton("⚡ Index Link", url)
+ if VIEW_LINK:
+ urls = f'{INDEX_URL}/{url_path}?a=view'
+ urls = short_url(urls)
+ buttons.buildbutton("🌐 View Link", urls)
+ if BUTTON_FOUR_NAME is not None and BUTTON_FOUR_URL is not None:
+ buttons.buildbutton(f"{BUTTON_FOUR_NAME}", f"{BUTTON_FOUR_URL}")
+ if BUTTON_FIVE_NAME is not None and BUTTON_FIVE_URL is not None:
+ buttons.buildbutton(f"{BUTTON_FIVE_NAME}", f"{BUTTON_FIVE_URL}")
+ if BUTTON_SIX_NAME is not None and BUTTON_SIX_URL is not None:
+ buttons.buildbutton(f"{BUTTON_SIX_NAME}", f"{BUTTON_SIX_URL}")
+ if SOURCE_LINK is True:
+ buttons.buildbutton(f"🔗 Source Link", link)
+ except Exception as err:
+ if isinstance(err, RetryError):
+ LOGGER.info(f"Total Attempts: {err.last_attempt.attempt_number}")
+ err = err.last_attempt.exception()
+ err = str(err).replace('>', '').replace('<', '')
+ if "User rate limit exceeded" in err:
+ msg = "User rate limit exceeded."
+ elif "File not found" in err:
+ token_service = self.__alt_authorize()
+ if token_service is not None:
+ self.__service = token_service
+ return self.clone(link)
+ msg = "File not found."
+ else:
+ msg = f"Error.\n{err}"
+ return msg, ""
+ return msg, buttons.build_menu(2)
+
+ def __cloneFolder(self, name, local_path, folder_id, parent_id):
+ LOGGER.info(f"Syncing: {local_path}")
+ files = self.__getFilesByFolderId(folder_id)
+ if len(files) == 0:
+ return parent_id
+ for file in files:
+ if file.get('mimeType') == self.__G_DRIVE_DIR_MIME_TYPE:
+ self.__total_folders += 1
+ file_path = ospath.join(local_path, file.get('name'))
+ current_dir_id = self.__create_directory(file.get('name'), parent_id)
+ self.__cloneFolder(file.get('name'), file_path, file.get('id'), current_dir_id)
+ elif not file.get('name').lower().endswith(tuple(EXTENSION_FILTER)):
+ self.__total_files += 1
+ self.transferred_size += int(file.get('size', 0))
+ self.__copyFile(file.get('id'), parent_id)
+ if self.__is_cancelled:
+ break
+
+ @retry(wait=wait_exponential(multiplier=2, min=3, max=6), stop=stop_after_attempt(3),
+ retry=retry_if_exception_type(GCError))
+ def __copyFile(self, file_id, dest_id):
+ body = {'parents': [dest_id]}
+ try:
+ return (
+ self.__service.files()
+ .copy(supportsAllDrives=True, fileId=file_id, body=body)
+ .execute()
+ )
+ except HttpError as err:
+ if err.resp.get('content-type', '').startswith('application/json'):
+ reason = jsnloads(err.content).get('error').get('errors')[0].get('reason')
+ if reason in ['userRateLimitExceeded', 'dailyLimitExceeded']:
+ if USE_SERVICE_ACCOUNTS:
+ if self.__sa_count == SERVICE_ACCOUNTS_NUMBER:
+ self.__is_cancelled = True
+ raise err
+ else:
+ self.__switchServiceAccount()
+ return self.__copyFile(file_id, dest_id)
+ else:
+ self.__is_cancelled = True
+ LOGGER.error(f"Got: {reason}")
+ raise err
+ else:
+ raise err
+
+ def __escapes(self, estr):
+ chars = ['\\', "'", '"', r'\a', r'\b', r'\f', r'\n', r'\r', r'\t']
+ for char in chars:
+ estr = estr.replace(char, f'\\{char}')
+ return estr.strip()
+
+ def __get_recursive_list(self, file, rootid):
+ rtnlist = []
+ if not rootid:
+ rootid = file.get('teamDriveId')
+ if rootid == "root":
+ rootid = self.__service.files().get(fileId = 'root', fields="id").execute().get('id')
+ x = file.get("name")
+ y = file.get("id")
+ while(y != rootid):
+ rtnlist.append(x)
+ file = self.__service.files().get(
+ fileId=file.get("parents")[0],
+ supportsAllDrives=True,
+ fields='id, name, parents'
+ ).execute()
+ x = file.get("name")
+ y = file.get("id")
+ rtnlist.reverse()
+ return rtnlist
+
+ def __drive_query(self, parent_id, fileName, stopDup, isRecursive, itemType):
+ try:
+ if isRecursive:
+ if stopDup:
+ query = f"name = '{fileName}' and "
+ else:
+ fileName = fileName.split()
+ query = "".join(
+ f"name contains '{name}' and "
+ for name in fileName
+ if name != ''
+ )
+ if itemType == "files":
+ query += "mimeType != 'application/vnd.google-apps.folder' and "
+ elif itemType == "folders":
+ query += "mimeType = 'application/vnd.google-apps.folder' and "
+ query += "trashed = false"
+ if parent_id == "root":
+ return (
+ self.__service.files()
+ .list(q=f"{query} and 'me' in owners",
+ pageSize=200,
+ spaces='drive',
+ fields='files(id, name, mimeType, size, parents)',
+ orderBy='folder, name asc'
+ )
+ .execute()
+ )
+ else:
+ return (
+ self.__service.files()
+ .list(supportsTeamDrives=True,
+ includeTeamDriveItems=True,
+ teamDriveId=parent_id,
+ q=query,
+ corpora='drive',
+ spaces='drive',
+ pageSize=200,
+ fields='files(id, name, mimeType, size, teamDriveId, parents)',
+ orderBy='folder, name asc'
+ )
+ .execute()
+ )
+ else:
+ if stopDup:
+ query = f"'{parent_id}' in parents and name = '{fileName}' and "
+ else:
+ query = f"'{parent_id}' in parents and "
+ fileName = fileName.split()
+ for name in fileName:
+ if name != '':
+ query += f"name contains '{name}' and "
+ if itemType == "files":
+ query += "mimeType != 'application/vnd.google-apps.folder' and "
+ elif itemType == "folders":
+ query += "mimeType = 'application/vnd.google-apps.folder' and "
+ query += "trashed = false"
+ return (
+ self.__service.files()
+ .list(
+ supportsTeamDrives=True,
+ includeTeamDriveItems=True,
+ q=query,
+ spaces='drive',
+ pageSize=200,
+ fields='files(id, name, mimeType, size)',
+ orderBy='folder, name asc',
+ )
+ .execute()
+ )
+ except Exception as err:
+ err = str(err).replace('>', '').replace('<', '')
+ LOGGER.error(err)
+ return {'files': []}
+
+ def drive_list(self, fileName, stopDup=False, noMulti=False, isRecursive=True, itemType=""):
+ if TELEGRAPH_STYLE is True:
+
+ msg = ""
+ fileName = self.__escapes(str(fileName))
+ contents_count = 0
+ telegraph_content = []
+ path = []
+ Title = False
+ if len(DRIVES_IDS) > 1:
+ token_service = self.__alt_authorize()
+ if token_service is not None:
+ self.__service = token_service
+ for index, parent_id in enumerate(DRIVES_IDS):
+ isRecur = False if isRecursive and len(parent_id) > 23 else isRecursive
+ response = self.__drive_query(parent_id, fileName, stopDup, isRecur, itemType)
+ if not response["files"]:
+ if noMulti:
+ break
+ else:
+ continue
+ if not Title:
+ msg += f'{file.get('name')}
(folder)
{file.get('name')}
({get_readable_file_size(int(file.get('size', 0)))})
{name}
'
+ msg += f'\n├📦 Size: {get_readable_file_size(self.__total_bytes)}'
+ msg += f'\n├♻ Type: Folder'
+ msg += f'\n├🗃️ SubFolders: {self.__total_folders}'
+ else:
+ msg += f'╭ Name: {name}
'
+ msg += f'\n├ Size: {get_readable_file_size(self.__total_bytes)}'
+ msg += f'\n├ Type: Folder'
+ msg += f'\n├ SubFolders: {self.__total_folders}'
+ else:
+ if EMOJI_THEME is True:
+ msg += f'╭🗂️ Name: {name}
'
+ else:
+ msg += f'╭ Name: {name}
'
+ if mime_type is None:
+ mime_type = 'File'
+ self.__total_files += 1
+ self.__gDrive_file(meta)
+ if EMOJI_THEME is True:
+ msg += f'\n├📦 Size: {get_readable_file_size(self.__total_bytes)}'
+ msg += f'\n├♻ Type: {mime_type}'
+ else:
+ msg += f'\n├ Size: {get_readable_file_size(self.__total_bytes)}'
+ msg += f'\n├ Type: {mime_type}'
+ if EMOJI_THEME is True:
+ msg += f'\n├🗂️ Files: {self.__total_files}'
+ else:
+ msg += f'\n├ Files: {self.__total_files}'
+ except Exception as err:
+ if isinstance(err, RetryError):
+ LOGGER.info(f"Total Attempts: {err.last_attempt.attempt_number}")
+ err = err.last_attempt.exception()
+ err = str(err).replace('>', '').replace('<', '')
+ if "File not found" in err:
+ token_service = self.__alt_authorize()
+ if token_service is not None:
+ self.__service = token_service
+ return self.count(link)
+ msg = "File not found."
+ else:
+ msg = f"Error.\n{err}"
+ return msg
+
+ def __gDrive_file(self, filee):
+ size = int(filee.get('size', 0))
+ self.__total_bytes += size
+
+ def __gDrive_directory(self, drive_folder):
+ files = self.__getFilesByFolderId(drive_folder['id'])
+ if len(files) == 0:
+ return
+ for filee in files:
+ shortcut_details = filee.get('shortcutDetails')
+ if shortcut_details is not None:
+ mime_type = shortcut_details['targetMimeType']
+ file_id = shortcut_details['targetId']
+ filee = self.__getFileMetadata(file_id)
+ else:
+ mime_type = filee.get('mimeType')
+ if mime_type == self.__G_DRIVE_DIR_MIME_TYPE:
+ self.__total_folders += 1
+ self.__gDrive_directory(filee)
+ else:
+ self.__total_files += 1
+ self.__gDrive_file(filee)
+
+ def helper(self, link):
+ try:
+ file_id = self.__getIdFromUrl(link)
+ except (KeyError, IndexError):
+ msg = "Google Drive ID could not be found in the provided link"
+ return msg, "", "", ""
+ LOGGER.info(f"File ID: {file_id}")
+ try:
+ meta = self.__getFileMetadata(file_id)
+ name = meta['name']
+ LOGGER.info(f"Checking size, this might take a minute: {name}")
+ if meta.get('mimeType') == self.__G_DRIVE_DIR_MIME_TYPE:
+ self.__gDrive_directory(meta)
+ else:
+ self.__total_files += 1
+ self.__gDrive_file(meta)
+ size = self.__total_bytes
+ files = self.__total_files
+ except Exception as err:
+ if isinstance(err, RetryError):
+ LOGGER.info(f"Total Attempts: {err.last_attempt.attempt_number}")
+ err = err.last_attempt.exception()
+ err = str(err).replace('>', '').replace('<', '')
+ if "File not found" in err:
+ token_service = self.__alt_authorize()
+ if token_service is not None:
+ self.__service = token_service
+ return self.helper(link)
+ msg = "File not found."
+ else:
+ msg = f"Error.\n{err}"
+ return msg, "", "", ""
+ return "", size, name, files
+
+ def download(self, link):
+ self.__is_downloading = True
+ file_id = self.__getIdFromUrl(link)
+ self.__updater = setInterval(self.__update_interval, self._progress)
+ try:
+ meta = self.__getFileMetadata(file_id)
+ if meta.get("mimeType") == self.__G_DRIVE_DIR_MIME_TYPE:
+ self.__download_folder(file_id, self.__path, self.name)
+ else:
+ makedirs(self.__path)
+ self.__download_file(file_id, self.__path, self.name, meta.get('mimeType'))
+ except Exception as err:
+ if isinstance(err, RetryError):
+ LOGGER.info(f"Total Attempts: {err.last_attempt.attempt_number}")
+ err = err.last_attempt.exception()
+ err = str(err).replace('>', '').replace('<', '')
+ if "downloadQuotaExceeded" in err:
+ err = "Download Quota Exceeded."
+ elif "File not found" in err:
+ token_service = self.__alt_authorize()
+ if token_service is not None:
+ self.__service = token_service
+ self.__updater.cancel()
+ return self.download(link)
+ self.__listener.onDownloadError(err)
+ self.__is_cancelled = True
+ finally:
+ self.__updater.cancel()
+ if self.__is_cancelled:
+ return
+ self.__listener.onDownloadComplete()
+
+ def __download_folder(self, folder_id, path, folder_name):
+ folder_name = folder_name.replace('/', '')
+ if not ospath.exists(f"{path}/{folder_name}"):
+ makedirs(f"{path}/{folder_name}")
+ path += f"/{folder_name}"
+ result = self.__getFilesByFolderId(folder_id)
+ if len(result) == 0:
+ return
+ result = sorted(result, key=lambda k: k['name'])
+ for item in result:
+ file_id = item['id']
+ filename = item['name']
+ shortcut_details = item.get('shortcutDetails')
+ if shortcut_details is not None:
+ file_id = shortcut_details['targetId']
+ mime_type = shortcut_details['targetMimeType']
+ else:
+ mime_type = item.get('mimeType')
+ if mime_type == self.__G_DRIVE_DIR_MIME_TYPE:
+ self.__download_folder(file_id, path, filename)
+ elif not ospath.isfile(f"{path}{filename}") and not filename.lower().endswith(tuple(EXTENSION_FILTER)):
+ self.__download_file(file_id, path, filename, mime_type)
+ if self.__is_cancelled:
+ break
+
+ @retry(wait=wait_exponential(multiplier=2, min=3, max=6), stop=stop_after_attempt(3),
+ retry=(retry_if_exception_type(GCError) | retry_if_exception_type(IOError)))
+ def __download_file(self, file_id, path, filename, mime_type):
+ request = self.__service.files().get_media(fileId=file_id)
+ filename = filename.replace('/', '')
+ if len(filename.encode()) > 255:
+ ext = ospath.splitext(filename)[1]
+ filename = f"{filename[:245]}{ext}"
+ if self.name.endswith(ext):
+ self.name = filename
+ fh = FileIO(f"{path}/{filename}", 'wb')
+ downloader = MediaIoBaseDownload(fh, request, chunksize=50 * 1024 * 1024)
+ done = False
+ while not done:
+ if self.__is_cancelled:
+ fh.close()
+ break
+ try:
+ self.__status, done = downloader.next_chunk()
+ except HttpError as err:
+ if err.resp.get('content-type', '').startswith('application/json'):
+ reason = jsnloads(err.content).get('error').get('errors')[0].get('reason')
+ if reason not in [
+ 'downloadQuotaExceeded',
+ 'dailyLimitExceeded',
+ ]:
+ raise err
+ if USE_SERVICE_ACCOUNTS:
+ if self.__sa_count == SERVICE_ACCOUNTS_NUMBER:
+ self.__is_cancelled = True
+ raise err
+ else:
+ self.__switchServiceAccount()
+ LOGGER.info(f"Got: {reason}, Trying Again...")
+ return self.__download_file(file_id, path, filename, mime_type)
+ else:
+ LOGGER.error(f"Got: {reason}")
+ raise err
+ self._file_processed_bytes = 0
+
+ def cancel_download(self):
+ self.__is_cancelled = True
+ if self.__is_downloading:
+ LOGGER.info(f"Cancelling Download: {self.name}")
+ self.__listener.onDownloadError('Download stopped by user!')
+ elif self.__is_cloning:
+ LOGGER.info(f"Cancelling Clone: {self.name}")
+ elif self.__is_uploading:
+ LOGGER.info(f"Cancelling Upload: {self.name}")
+ self.__listener.onUploadError('your upload has been stopped and uploaded data has been deleted!')
\ No newline at end of file
diff --git a/bot/helper/mirror_utils/upload_utils/pyrogramEngine.py b/bot/helper/mirror_utils/upload_utils/pyrogramEngine.py
new file mode 100644
index 0000000..77a89c7
--- /dev/null
+++ b/bot/helper/mirror_utils/upload_utils/pyrogramEngine.py
@@ -0,0 +1,290 @@
+from logging import getLogger, ERROR
+from os import remove as osremove, walk, path as ospath, rename as osrename
+from time import time, sleep
+from pyrogram.errors import FloodWait, RPCError
+from PIL import Image
+from threading import RLock
+from bot import AS_DOCUMENT, AS_DOC_USERS, AS_MEDIA_USERS, CUSTOM_FILENAME, \
+ EXTENSION_FILTER, app, LEECH_LOG, BOT_PM, tgBotMaxFileSize, premium_session, CAPTION_FONT
+from bot.helper.ext_utils.fs_utils import take_ss, get_media_info, get_media_streams, get_path_size, clean_unwanted
+from bot.helper.ext_utils.bot_utils import get_readable_file_size
+from pyrogram.types import Message
+
+LOGGER = getLogger(__name__)
+getLogger("pyrogram").setLevel(ERROR)
+IMAGE_SUFFIXES = ("JPG", "JPX", "PNG", "CR2", "TIF", "BMP", "JXR", "PSD", "ICO", "HEIC", "JPEG")
+class TgUploader:
+
+ def __init__(self, name=None, path=None, size=0, listener=None):
+ self.name = name
+ self.uploaded_bytes = 0
+ self._last_uploaded = 0
+ self.__listener = listener
+ self.__path = path
+ self.__start_time = time()
+ self.__total_files = 0
+ self.__is_cancelled = False
+ self.__as_doc = AS_DOCUMENT
+ self.__thumb = f"Thumbnails/{listener.message.from_user.id}.jpg"
+ self.__msgs_dict = {}
+ self.__corrupted = 0
+ self.__resource_lock = RLock()
+ self.__is_corrupted = False
+ self.__sent_msg = app.get_messages(self.__listener.message.chat.id, self.__listener.uid)
+ self.__size = size
+ self.__user_settings()
+ self.__leech_log = LEECH_LOG.copy() # copy then pop to keep the original var as it is
+ self.__app = app
+ self.__user_id = listener.message.from_user.id
+ self.isPrivate = listener.message.chat.type in ['private', 'group']
+
+ def upload(self, o_files):
+ for dirpath, subdir, files in sorted(walk(self.__path)):
+ for file_ in sorted(files):
+ if file_ in o_files:
+ continue
+ if not file_.lower().endswith(tuple(EXTENSION_FILTER)):
+ up_path = ospath.join(dirpath, file_)
+ self.__total_files += 1
+ try:
+ if ospath.getsize(up_path) == 0:
+ LOGGER.error(f"{up_path} size is zero, telegram don't upload zero size files")
+ self.__corrupted += 1
+ continue
+ except Exception as e:
+ if self.__is_cancelled:
+ return
+ else:
+ LOGGER.error(e)
+ continue
+ self.__upload_file(up_path, file_, dirpath)
+ if self.__is_cancelled:
+ return
+ if not self.__listener.isPrivate and not self.__is_corrupted:
+ self.__msgs_dict[self.__sent_msg.link] = file_
+ self._last_uploaded = 0
+ sleep(1)
+ if self.__listener.seed and not self.__listener.newDir:
+ clean_unwanted(self.__path)
+ if self.__total_files <= self.__corrupted:
+ return self.__listener.onUploadError('Files Corrupted. Check logs')
+ LOGGER.info(f"Leech Completed: {self.name}")
+ size = get_readable_file_size(self.__size)
+ self.__listener.onUploadComplete(None, size, self.__msgs_dict, self.__total_files, self.__corrupted, self.name)
+
+ def __upload_file(self, up_path, file_, dirpath):
+ if CUSTOM_FILENAME is not None:
+ cap_mono = f"{CUSTOM_FILENAME} <{CAPTION_FONT}>{file_}{CAPTION_FONT}>"
+ file_ = f"{CUSTOM_FILENAME} {file_}"
+ new_path = ospath.join(dirpath, file_)
+ osrename(up_path, new_path)
+ up_path = new_path
+ else:
+ cap_mono = f"<{CAPTION_FONT}>{file_}{CAPTION_FONT}>"
+ notMedia = False
+ thumb = self.__thumb
+ self.__is_corrupted = False
+ try:
+ is_video, is_audio = get_media_streams(up_path)
+ if not self.__as_doc:
+ if is_video:
+ duration = get_media_info(up_path)[0]
+ if thumb is None:
+ thumb = take_ss(up_path, duration)
+ if self.__is_cancelled:
+ if self.__thumb is None and thumb is not None and ospath.lexists(thumb):
+ osremove(thumb)
+ return
+ if thumb is not None:
+ with Image.open(thumb) as img:
+ width, height = img.size
+ else:
+ width = 480
+ height = 320
+ if not file_.upper().endswith(("MKV", "MP4")):
+ file_ = f"{ospath.splitext(file_)[0]}.mp4"
+ new_path = ospath.join(dirpath, file_)
+ osrename(up_path, new_path)
+ up_path = new_path
+ if len(LEECH_LOG) != 0:
+ for leechchat in self.__leech_log:
+ if ospath.getsize(up_path) > tgBotMaxFileSize: usingclient = premium_session
+ else: usingclient = self.__app
+ self.__sent_msg = usingclient.send_video(chat_id=leechchat,video=up_path,
+ caption=cap_mono,
+ duration=duration,
+ width=width,
+ height=height,
+ thumb=thumb,
+ supports_streaming=True,
+ disable_notification=True,
+ progress=self.__upload_progress)
+ if BOT_PM:
+ try:
+ app.copy_message(chat_id=self.__user_id, from_chat_id=self.__sent_msg.chat.id, message_id=self.__sent_msg.id)
+ except Exception as err:
+ LOGGER.error(f"Failed To Send Video in PM:\n{err}")
+ else:
+ self.__sent_msg = self.__sent_msg.reply_video(video=up_path,
+ quote=True,
+ caption=cap_mono,
+ duration=duration,
+ width=width,
+ height=height,
+ thumb=thumb,
+ supports_streaming=True,
+ disable_notification=True,
+ progress=self.__upload_progress)
+ if not self.isPrivate and BOT_PM:
+ try:
+ app.send_video(chat_id=self.__user_id, video=self.__sent_msg.video.file_id,
+ caption=cap_mono)
+ except Exception as err:
+ LOGGER.error(f"Failed To Send Video in PM:\n{err}")
+ elif is_audio:
+ duration , artist, title = get_media_info(up_path)
+ if len(LEECH_LOG) != 0:
+ for leechchat in self.__leech_log:
+ if ospath.getsize(up_path) > tgBotMaxFileSize: usingclient = premium_session
+ else: usingclient = self.__app
+ self.__sent_msg = usingclient.send_audio(chat_id=leechchat,audio=up_path,
+ caption=cap_mono,
+ duration=duration,
+ performer=artist,
+ title=title,
+ thumb=thumb,
+ disable_notification=True,
+ progress=self.__upload_progress)
+ if BOT_PM:
+ try:
+ app.copy_message(chat_id=self.__user_id, from_chat_id=self.__sent_msg.chat.id, message_id=self.__sent_msg.id)
+ except Exception as err:
+ LOGGER.error(f"Failed To Send Audio in PM:\n{err}")
+ else:
+ self.__sent_msg = self.__sent_msg.reply_audio(audio=up_path,
+ quote=True,
+ caption=cap_mono,
+ duration=duration,
+ performer=artist,
+ title=title,
+ thumb=thumb,
+ disable_notification=True,
+ progress=self.__upload_progress)
+ if not self.isPrivate and BOT_PM:
+ try:
+ app.send_audio(chat_id=self.__user_id, audio=self.__sent_msg.audio.file_id,
+ caption=cap_mono)
+ except Exception as err:
+ LOGGER.error(f"Failed To Send Audio in PM:\n{err}")
+ elif file_.upper().endswith(IMAGE_SUFFIXES):
+ if len(LEECH_LOG) != 0:
+ for leechchat in self.__leech_log:
+ if ospath.getsize(up_path) > tgBotMaxFileSize: usingclient = premium_session
+ else: usingclient = self.__app
+ self.__sent_msg = usingclient.send_photo(chat_id=leechchat,
+ photo=up_path,
+ caption=cap_mono,
+ disable_notification=True,
+ progress=self.__upload_progress)
+ if BOT_PM:
+ try:
+ app.copy_message(chat_id=self.__user_id, from_chat_id=self.__sent_msg.chat.id, message_id=self.__sent_msg.id)
+ except Exception as err:
+ LOGGER.error(f"Failed To Send Image in PM:\n{err}")
+ else:
+ self.__sent_msg = self.__sent_msg.reply_photo(photo=up_path,
+ quote=True,
+ caption=cap_mono,
+ disable_notification=True,
+ progress=self.__upload_progress)
+ if not self.isPrivate and BOT_PM:
+ try:
+ app.send_photo(chat_id=self.__user_id, photo=self.__sent_msg.photo.file_id,
+ caption=cap_mono)
+ except Exception as err:
+ LOGGER.error(f"Failed To Send Image in PM:\n{err}")
+ else:
+ notMedia = True
+ if self.__as_doc or notMedia:
+ if is_video and thumb is None:
+ thumb = take_ss(up_path, None)
+ if self.__is_cancelled:
+ if self.__thumb is None and thumb is not None and ospath.lexists(thumb):
+ osremove(thumb)
+ return
+ if len(LEECH_LOG) != 0:
+ for leechchat in self.__leech_log:
+ if ospath.getsize(up_path) > tgBotMaxFileSize: usingclient = premium_session
+ else: usingclient = self.__app
+ self.__sent_msg = usingclient.send_document(chat_id=leechchat,document=up_path,
+ thumb=thumb,
+ caption=cap_mono,
+ disable_notification=True,
+ progress=self.__upload_progress)
+ if BOT_PM:
+ try:
+ app.copy_message(chat_id=self.__user_id, from_chat_id=self.__sent_msg.chat.id, message_id=self.__sent_msg.id)
+ except Exception as err:
+ LOGGER.error(f"Failed To Send Document in PM:\n{err}")
+ else:
+ self.__sent_msg = self.__sent_msg.reply_document(document=up_path,
+ quote=True,
+ thumb=thumb,
+ caption=cap_mono,
+ disable_notification=True,
+ progress=self.__upload_progress)
+ if not self.isPrivate and BOT_PM:
+ try:
+ app.send_document(chat_id=self.__user_id, document=self.__sent_msg.document.file_id,
+ caption=cap_mono)
+ except Exception as err:
+ LOGGER.error(f"Failed To Send Document in PM:\n{err}")
+ except FloodWait as f:
+ LOGGER.warning(str(f))
+ sleep(f.value)
+ except RPCError as e:
+ LOGGER.error(f"RPCError: {e} Path: {up_path}")
+ self.__corrupted += 1
+ self.__is_corrupted = True
+ except Exception as err:
+ LOGGER.error(f"{err} Path: {up_path}")
+ self.__corrupted += 1
+ self.__is_corrupted = True
+ if self.__thumb is None and thumb is not None and ospath.lexists(thumb):
+ osremove(thumb)
+ if not self.__is_cancelled and \
+ (not self.__listener.seed or self.__listener.newDir or dirpath.endswith("splited_files_mltb")):
+ try:
+ osremove(up_path)
+ except:
+ pass
+
+ def __upload_progress(self, current, total):
+ if self.__is_cancelled:
+ app.stop_transmission()
+ return
+ with self.__resource_lock:
+ chunk_size = current - self._last_uploaded
+ self._last_uploaded = current
+ self.uploaded_bytes += chunk_size
+
+ def __user_settings(self):
+ if self.__listener.message.from_user.id in AS_DOC_USERS:
+ self.__as_doc = True
+ elif self.__listener.message.from_user.id in AS_MEDIA_USERS:
+ self.__as_doc = False
+ if not ospath.lexists(self.__thumb):
+ self.__thumb = None
+
+ @property
+ def speed(self):
+ with self.__resource_lock:
+ try:
+ return self.uploaded_bytes / (time() - self.__start_time)
+ except:
+ return 0
+
+ def cancel_download(self):
+ self.__is_cancelled = True
+ LOGGER.info(f"Cancelling Upload: {self.name}")
+ self.__listener.onUploadError('Your upload has been stopped!')
\ No newline at end of file
diff --git a/bot/helper/telegram_helper/__init__.py b/bot/helper/telegram_helper/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/bot/helper/telegram_helper/__init__.py
@@ -0,0 +1 @@
+
diff --git a/bot/helper/telegram_helper/bot_commands.py b/bot/helper/telegram_helper/bot_commands.py
new file mode 100644
index 0000000..9628014
--- /dev/null
+++ b/bot/helper/telegram_helper/bot_commands.py
@@ -0,0 +1,69 @@
+from bot import CMD_INDEX
+import os
+def getCommand(name: str, command: str):
+ try:
+ if len(os.environ[name]) == 0:
+ raise KeyError
+ return os.environ[name]
+ except KeyError:
+ return command
+
+
+class _BotCommands:
+ def __init__(self):
+ self.StartCommand = getCommand(f'START_COMMAND', f'start{CMD_INDEX}')
+ self.MirrorCommand = getCommand('MIRROR_COMMAND', f'mirror{CMD_INDEX}')
+ self.UnzipMirrorCommand = getCommand('UNZIP_COMMAND', f'unzipmirror{CMD_INDEX}')
+ self.ZipMirrorCommand = getCommand('ZIP_COMMAND', f'zipmirror{CMD_INDEX}')
+ self.CancelMirror = getCommand('CANCEL_COMMAND', f'cancel{CMD_INDEX}')
+ self.CancelAllCommand = getCommand('CANCEL_ALL_COMMAND', f'cancelall{CMD_INDEX}')
+ self.ListCommand = getCommand('LIST_COMMAND', f'list{CMD_INDEX}')
+ self.SearchCommand = getCommand('SEARCH_COMMAND', f'search{CMD_INDEX}')
+ self.StatusCommand = getCommand('STATUS_COMMAND', f'status{CMD_INDEX}')
+ self.AuthorizedUsersCommand = getCommand('USERS_COMMAND', f'users{CMD_INDEX}')
+ self.AuthorizeCommand = getCommand('AUTH_COMMAND', f'authorize{CMD_INDEX}')
+ self.UnAuthorizeCommand = getCommand('UNAUTH_COMMAND', f'unauthorize{CMD_INDEX}')
+ self.AddSudoCommand = getCommand('ADDSUDO_COMMAND', f'addsudo{CMD_INDEX}')
+ self.RmSudoCommand = getCommand('RMSUDO_COMMAND', f'rmsudo{CMD_INDEX}')
+ self.PingCommand = getCommand('PING_COMMAND', f'ping{CMD_INDEX}')
+ self.RestartCommand = getCommand('RESTART_COMMAND', f'restart{CMD_INDEX}')
+ self.StatsCommand = getCommand('STATS_COMMAND', f'stats{CMD_INDEX}')
+ self.HelpCommand = getCommand('HELP_COMMAND', f'help{CMD_INDEX}')
+ self.LogCommand = getCommand('LOG_COMMAND', f'log{CMD_INDEX}')
+ self.BtSelectCommand = getCommand('BTSEL_COMMAND', f'btsel{CMD_INDEX}')
+ self.SpeedCommand = getCommand('SPEEDTEST_COMMAND', f'speedtest{CMD_INDEX}')
+ self.CloneCommand = getCommand('CLONE_COMMAND', f'clone{CMD_INDEX}')
+ self.CountCommand = getCommand('COUNT_COMMAND', f'count{CMD_INDEX}')
+ self.WatchCommand = getCommand('WATCH_COMMAND', f'watch{CMD_INDEX}')
+ self.ZipWatchCommand = getCommand('ZIPWATCH_COMMAND', f'zipwatch{CMD_INDEX}')
+ self.QbMirrorCommand = getCommand('QBMIRROR_COMMAND', f'qbmirror{CMD_INDEX}')
+ self.QbUnzipMirrorCommand = getCommand('QBUNZIP_COMMAND', f'qbunzipmirror{CMD_INDEX}')
+ self.QbZipMirrorCommand = getCommand('QBZIP_COMMAND', f'qbzipmirror{CMD_INDEX}')
+ self.DeleteCommand = getCommand('DELETE_COMMAND', f'del{CMD_INDEX}')
+ self.ShellCommand = getCommand('SHELL_COMMAND', f'shell{CMD_INDEX}')
+ self.ExecHelpCommand = getCommand('EXEHELP_COMMAND', f'exechelp{CMD_INDEX}')
+ self.LeechSetCommand = getCommand('LEECHSET_COMMAND', f'leechset{CMD_INDEX}')
+ self.SetThumbCommand = getCommand('SETTHUMB_COMMAND', f'setthumb{CMD_INDEX}')
+ self.LeechCommand = getCommand('LEECH_COMMAND', f'leech{CMD_INDEX}')
+ self.UnzipLeechCommand = getCommand('UNZIPLEECH_COMMAND', f'unzipleech{CMD_INDEX}')
+ self.ZipLeechCommand = getCommand('ZIPLEECH_COMMAND', f'zipleech{CMD_INDEX}')
+ self.QbLeechCommand = getCommand('QBLEECH_COMMAND', f'qbleech{CMD_INDEX}')
+ self.QbUnzipLeechCommand = getCommand('QBZIPLEECH_COMMAND', f'qbunzipleech{CMD_INDEX}')
+ self.QbZipLeechCommand = getCommand('QBUNZIPLEECH_COMMAND', f'qbzipleech{CMD_INDEX}')
+ self.LeechWatchCommand = getCommand('LEECHWATCH_COMMAND', f'leechwatch{CMD_INDEX}')
+ self.MediaInfoCommand = getCommand('MEDIAINFO_COMMAND', f'mediainfo{CMD_INDEX}')
+ self.HashCommand = getCommand('HASH_COMMAND', f'hash{CMD_INDEX}')
+ self.LeechZipWatchCommand = getCommand('LEECHZIPWATCH_COMMAND', f'leechzipwatch{CMD_INDEX}')
+ self.RssListCommand = getCommand('RSSLIST_COMMAND', f'rsslist{CMD_INDEX}')
+ self.RssGetCommand = getCommand('RSSGET_COMMAND', f'rssget{CMD_INDEX}')
+ self.RssSubCommand = getCommand('RSSSUB_COMMAND', f'rsssub{CMD_INDEX}')
+ self.RssUnSubCommand = getCommand('RSSUNSUB_COMMAND', f'rssunsub{CMD_INDEX}')
+ self.RssSettingsCommand = getCommand('RSSSET_COMMAND', f'rssset{CMD_INDEX}')
+ self.WayBackCommand = getCommand('WAYBACK_COMMAND', f'wayback{CMD_INDEX}')
+ self.AddleechlogCommand = getCommand('ADDLEECHLOG_CMD', f'addleechlog{CMD_INDEX}')
+ self.RmleechlogCommand = getCommand('RMLEECHLOG_CMD', f'rmleechlog{CMD_INDEX}')
+ self.EvalCommand = f'eval{CMD_INDEX}'
+ self.ExecCommand = f'exec{CMD_INDEX}'
+ self.ClearLocalsCommand = f'clearlocals{CMD_INDEX}'
+
+BotCommands = _BotCommands()
diff --git a/bot/helper/telegram_helper/button_build.py b/bot/helper/telegram_helper/button_build.py
new file mode 100644
index 0000000..1a0bc4b
--- /dev/null
+++ b/bot/helper/telegram_helper/button_build.py
@@ -0,0 +1,32 @@
+from telegram import InlineKeyboardButton, InlineKeyboardMarkup
+
+
+class ButtonMaker:
+ def __init__(self):
+ self.button = []
+ self.header_button = []
+ self.footer_button = []
+
+ def buildbutton(self, key, link, footer=False, header=False):
+ if not footer and not header:
+ self.button.append(InlineKeyboardButton(text = key, url = link))
+ elif header:
+ self.header_button.append(InlineKeyboardButton(text = key, url = link))
+ elif footer:
+ self.footer_button.append(InlineKeyboardButton(text = key, url = link))
+
+ def sbutton(self, key, data, footer=False, header=False):
+ if not footer and not header:
+ self.button.append(InlineKeyboardButton(text = key, callback_data = data))
+ elif header:
+ self.header_button.append(InlineKeyboardButton(text = key, callback_data = data))
+ elif footer:
+ self.footer_button.append(InlineKeyboardButton(text = key, callback_data = data))
+
+ def build_menu(self, n_cols):
+ menu = [self.button[i:i + n_cols] for i in range(0, len(self.button), n_cols)]
+ if self.header_button:
+ menu.insert(0, self.header_button)
+ if self.footer_button:
+ menu.append(self.footer_button)
+ return InlineKeyboardMarkup(menu)
\ No newline at end of file
diff --git a/bot/helper/telegram_helper/filters.py b/bot/helper/telegram_helper/filters.py
new file mode 100644
index 0000000..25d8244
--- /dev/null
+++ b/bot/helper/telegram_helper/filters.py
@@ -0,0 +1,35 @@
+from telegram.ext import MessageFilter
+from telegram import Message
+from bot import AUTHORIZED_CHATS, SUDO_USERS, OWNER_ID
+
+
+class CustomFilters:
+ class __OwnerFilter(MessageFilter):
+ def filter(self, message: Message):
+ return message.from_user.id == OWNER_ID
+
+ owner_filter = __OwnerFilter()
+
+ class __AuthorizedUserFilter(MessageFilter):
+ def filter(self, message: Message):
+ id = message.from_user.id
+ return id in AUTHORIZED_CHATS or id in SUDO_USERS or id == OWNER_ID
+
+ authorized_user = __AuthorizedUserFilter()
+
+ class __AuthorizedChat(MessageFilter):
+ def filter(self, message: Message):
+ return message.chat.id in AUTHORIZED_CHATS
+
+ authorized_chat = __AuthorizedChat()
+
+ class __SudoUser(MessageFilter):
+ def filter(self, message: Message):
+ return message.from_user.id in SUDO_USERS
+
+ sudo_user = __SudoUser()
+
+ @staticmethod
+ def _owner_query(user_id):
+ return user_id == OWNER_ID or user_id in SUDO_USERS
+
diff --git a/bot/helper/telegram_helper/message_utils.py b/bot/helper/telegram_helper/message_utils.py
new file mode 100644
index 0000000..0cad40c
--- /dev/null
+++ b/bot/helper/telegram_helper/message_utils.py
@@ -0,0 +1,221 @@
+from random import choice
+from time import sleep, time
+from telegram import InlineKeyboardMarkup
+from telegram.message import Message
+from telegram.error import RetryAfter
+from pyrogram.errors import FloodWait
+from os import remove
+
+from bot import AUTO_DELETE_MESSAGE_DURATION, LOGGER, status_reply_dict, status_reply_dict_lock, \
+ Interval, DOWNLOAD_STATUS_UPDATE_INTERVAL, RSS_CHAT_ID, bot, rss_session, \
+ AUTO_DELETE_UPLOAD_MESSAGE_DURATION, PICS
+from bot.helper.ext_utils.bot_utils import get_readable_message, setInterval
+
+
+def sendMessage(text: str, bot, message: Message):
+ try:
+ return bot.sendMessage(message.chat_id,
+ reply_to_message_id=message.message_id,
+ text=text, allow_sending_without_reply=True, parse_mode='HTML', disable_web_page_preview=True)
+ except RetryAfter as r:
+ LOGGER.warning(str(r))
+ sleep(r.retry_after * 1.5)
+ return sendMessage(text, bot, message)
+ except Exception as e:
+ LOGGER.error(str(e))
+ return
+
+def sendMarkup(text: str, bot, message: Message, reply_markup: InlineKeyboardMarkup):
+ try:
+ return bot.sendMessage(message.chat_id,
+ reply_to_message_id=message.message_id,
+ text=text, reply_markup=reply_markup, allow_sending_without_reply=True,
+ parse_mode='HTML', disable_web_page_preview=True)
+ except RetryAfter as r:
+ LOGGER.warning(str(r))
+ sleep(r.retry_after * 1.5)
+ return sendMarkup(text, bot, message, reply_markup)
+ except Exception as e:
+ LOGGER.error(str(e))
+ return
+
+def editMessage(text: str, message: Message, reply_markup=None):
+ try:
+ bot.editMessageText(text=text, message_id=message.message_id,
+ chat_id=message.chat.id,reply_markup=reply_markup,
+ parse_mode='HTML', disable_web_page_preview=True)
+ except RetryAfter as r:
+ LOGGER.warning(str(r))
+ sleep(r.retry_after * 1.5)
+ return editMessage(text, message, reply_markup)
+ except Exception as e:
+ LOGGER.error(str(e))
+ return str(e)
+
+def editCaption(text: str, message: Message, reply_markup=None):
+ try:
+ bot.edit_message_caption(chat_id=message.chat.id, message_id=message.message_id, caption=text,
+ reply_markup=reply_markup, parse_mode='HTML')
+ except RetryAfter as r:
+ LOGGER.warning(str(r))
+ sleep(r.retry_after * 1.5)
+ return editMessage(text, message, reply_markup)
+ except Exception as e:
+ LOGGER.error(str(e))
+ return str(e)
+
+def sendRss(text: str, bot):
+ if rss_session is None:
+ try:
+ return bot.sendMessage(RSS_CHAT_ID, text, parse_mode='HTML', disable_web_page_preview=True)
+ except RetryAfter as r:
+ LOGGER.warning(str(r))
+ sleep(r.retry_after * 1.5)
+ return sendRss(text, bot)
+ except Exception as e:
+ LOGGER.error(str(e))
+ return
+ else:
+ try:
+ with rss_session:
+ return rss_session.send_message(RSS_CHAT_ID, text, disable_web_page_preview=True)
+ except FloodWait as e:
+ LOGGER.warning(str(e))
+ sleep(e.value * 1.5)
+ return sendRss(text, bot)
+ except Exception as e:
+ LOGGER.error(str(e))
+ return
+
+
+async def sendRss_pyro(text: str):
+ rss_session = Client(name='rss_session', api_id=int(TELEGRAM_API), api_hash=TELEGRAM_HASH, session_string=USER_STRING_SESSION, parse_mode=enums.ParseMode.HTML)
+ await rss_session.start()
+ try:
+ return await rss_session.send_message(RSS_CHAT_ID, text, disable_web_page_preview=True)
+ except FloodWait as e:
+ LOGGER.warning(str(e))
+ await asleep(e.value * 1.5)
+ return await sendRss(text)
+ except Exception as e:
+ LOGGER.error(str(e))
+ return
+
+def sendPhoto(text: str, bot, message, photo, reply_markup=None):
+ try:
+ return bot.send_photo(chat_id=message.chat_id, photo=photo, reply_to_message_id=message.message_id,
+ caption=text, reply_markup=reply_markup, parse_mode='html')
+ except RetryAfter as r:
+ LOGGER.warning(str(r))
+ sleep(r.retry_after * 1.5)
+ return sendPhoto(text, bot, message, photo, reply_markup)
+ except Exception as e:
+ LOGGER.error(str(e))
+ return
+
+def deleteMessage(bot, message: Message):
+ try:
+ bot.deleteMessage(chat_id=message.chat.id,
+ message_id=message.message_id)
+ except Exception as e:
+ LOGGER.error(str(e))
+
+def sendLogFile(bot, message: Message):
+ with open('log.txt', 'rb') as f:
+ bot.sendDocument(document=f, filename=f.name,
+ reply_to_message_id=message.message_id,
+ chat_id=message.chat_id)
+
+def sendFile(bot, message: Message, name: str, caption=""):
+ try:
+ with open(name, 'rb') as f:
+ bot.sendDocument(document=f, filename=f.name, reply_to_message_id=message.message_id,
+ caption=caption, parse_mode='HTML',chat_id=message.chat_id)
+ remove(name)
+ return
+ except RetryAfter as r:
+ LOGGER.warning(str(r))
+ sleep(r.retry_after * 1.5)
+ return sendFile(bot, message, name, caption)
+ except Exception as e:
+ LOGGER.error(str(e))
+ return
+
+def auto_delete_message(bot, cmd_message: Message, bot_message: Message):
+ if AUTO_DELETE_MESSAGE_DURATION != -1:
+ sleep(AUTO_DELETE_MESSAGE_DURATION)
+ try:
+ # Skip if None is passed meaning we don't want to delete bot xor cmd message
+ deleteMessage(bot, cmd_message)
+ deleteMessage(bot, bot_message)
+ except AttributeError:
+ pass
+
+def auto_delete_upload_message(bot, cmd_message: Message, bot_message: Message):
+ if cmd_message.chat.type == 'private':
+ pass
+ elif AUTO_DELETE_UPLOAD_MESSAGE_DURATION != -1:
+ sleep(AUTO_DELETE_UPLOAD_MESSAGE_DURATION)
+ try:
+ # Skip if None is passed meaning we don't want to delete bot or cmd message
+ deleteMessage(bot, cmd_message)
+ deleteMessage(bot, bot_message)
+ except AttributeError:
+ pass
+
+def delete_all_messages():
+ with status_reply_dict_lock:
+ for data in list(status_reply_dict.values()):
+ try:
+ deleteMessage(bot, data[0])
+ del status_reply_dict[data[0].chat.id]
+ except Exception as e:
+ LOGGER.error(str(e))
+
+def update_all_messages(force=False):
+ with status_reply_dict_lock:
+ if not status_reply_dict or not Interval or (not force and time() - list(status_reply_dict.values())[0][1] < 3):
+ return
+ for chat_id in status_reply_dict:
+ status_reply_dict[chat_id][1] = time()
+
+ msg, buttons = get_readable_message()
+ if msg is None:
+ return
+ with status_reply_dict_lock:
+ for chat_id in status_reply_dict:
+ if status_reply_dict[chat_id] and msg != status_reply_dict[chat_id][0].text:
+ if buttons == "" and PICS:
+ rmsg = editCaption(msg, status_reply_dict[chat_id][0])
+ elif buttons == "":
+ rmsg = editMessage(msg, status_reply_dict[chat_id][0])
+ elif PICS:
+ rmsg = editCaption(msg, status_reply_dict[chat_id][0], buttons)
+ else:
+ rmsg = editMessage(msg, status_reply_dict[chat_id][0], buttons)
+ if rmsg == "Message to edit not found":
+ del status_reply_dict[chat_id]
+ return
+ status_reply_dict[chat_id][0].text = msg
+ status_reply_dict[chat_id][1] = time()
+
+def sendStatusMessage(msg, bot):
+ progress, buttons = get_readable_message()
+ if progress is None:
+ return
+ with status_reply_dict_lock:
+ if msg.chat.id in status_reply_dict:
+ message = status_reply_dict[msg.chat.id][0]
+ deleteMessage(bot, message)
+ del status_reply_dict[msg.chat.id]
+ if buttons == "" and PICS:
+ message = sendPhoto(progress, bot, msg, choice(PICS))
+ elif buttons == "":
+ message = sendMessage(progress, bot, msg)
+ elif PICS:
+ message = sendPhoto(progress, bot, msg, choice(PICS), buttons)
+ else:
+ message = sendMarkup(progress, bot, msg, buttons)
+ status_reply_dict[msg.chat.id] = [message, time()]
+ if not Interval:
+ Interval.append(setInterval(DOWNLOAD_STATUS_UPDATE_INTERVAL, update_all_messages))
diff --git a/bot/modules/__init__.py b/bot/modules/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/bot/modules/__init__.py
@@ -0,0 +1 @@
+
diff --git a/bot/modules/anilist.py b/bot/modules/anilist.py
new file mode 100644
index 0000000..30885c5
--- /dev/null
+++ b/bot/modules/anilist.py
@@ -0,0 +1,278 @@
+import datetime
+import html
+import textwrap
+
+import bs4
+import requests
+from telegram import Update, InlineKeyboardMarkup, InlineKeyboardButton, ParseMode
+from telegram.ext import run_async, CallbackContext, CommandHandler
+from bot.helper.telegram_helper.filters import CustomFilters
+from bot import dispatcher, IMAGE_URL, ANILIST_ENABLED
+
+def shorten(description, info = 'anilist.co'):
+ msg = ""
+ if len(description) > 700:
+ description = description[0:500] + '....'
+ msg += f"\n*Description*: _{description}_[Read More]({info})"
+ else:
+ msg += f"\n*Description*:_{description}_"
+ return msg
+
+
+#time formatter from uniborg
+def t(milliseconds: int) -> str:
+ """Inputs time in milliseconds, to get beautified time,
+ as string"""
+ seconds, milliseconds = divmod(int(milliseconds), 1000)
+ minutes, seconds = divmod(seconds, 60)
+ hours, minutes = divmod(minutes, 60)
+ days, hours = divmod(hours, 24)
+ tmp = ((str(days) + " Days, ") if days else "") + \
+ ((str(hours) + " Hours, ") if hours else "") + \
+ ((str(minutes) + " Minutes, ") if minutes else "") + \
+ ((str(seconds) + " Seconds, ") if seconds else "") + \
+ ((str(milliseconds) + " ms, ") if milliseconds else "")
+ return tmp[:-2]
+
+airing_query = '''
+ query ($id: Int,$search: String) {
+ Media (id: $id, type: ANIME,search: $search) {
+ id
+ episodes
+ title {
+ romaji
+ english
+ native
+ }
+ nextAiringEpisode {
+ airingAt
+ timeUntilAiring
+ episode
+ }
+ }
+}
+'''
+
+fav_query = """
+query ($id: Int) {
+ Media (id: $id, type: ANIME) {
+ id
+ title {
+ romaji
+ english
+ native
+ }
+ }
+}
+"""
+
+anime_query = '''
+ query ($id: Int,$search: String) {
+ Media (id: $id, type: ANIME,search: $search) {
+ id
+ title {
+ romaji
+ english
+ native
+ }
+ description (asHtml: false)
+ startDate{
+ year
+ }
+ episodes
+ season
+ type
+ format
+ status
+ duration
+ siteUrl
+ studios{
+ nodes{
+ name
+ }
+ }
+ trailer{
+ id
+ site
+ thumbnail
+ }
+ averageScore
+ genres
+ bannerImage
+ }
+}
+'''
+character_query = """
+ query ($query: String) {
+ Character (search: $query) {
+ id
+ name {
+ first
+ last
+ full
+ }
+ siteUrl
+ image {
+ large
+ }
+ description
+ }
+}
+"""
+
+manga_query = """
+query ($id: Int,$search: String) {
+ Media (id: $id, type: MANGA,search: $search) {
+ id
+ title {
+ romaji
+ english
+ native
+ }
+ description (asHtml: false)
+ startDate{
+ year
+ }
+ type
+ format
+ status
+ siteUrl
+ averageScore
+ genres
+ bannerImage
+ }
+}
+"""
+
+
+url = 'https://graphql.anilist.co'
+
+
+def anime(update: Update, context: CallbackContext):
+ message = update.effective_message
+ search = message.text.split(' ', 1)
+ if len(search) == 1: return
+ else: search = search[1]
+ variables = {'search' : search}
+ json = requests.post(url, json={'query': anime_query, 'variables': variables}).json()['data'].get('Media', None)
+ if json:
+ msg = f"*{json['title']['romaji']}*(`{json['title']['native']}`)\n*Type*: {json['format']}\n*Status*: {json['status']}\n*Episodes*: {json.get('episodes', 'N/A')}\n*Duration*: {json.get('duration', 'N/A')} Per Ep.\n*Score*: {json['averageScore']}\n*Genres*: `"
+ for x in json['genres']: msg += f"{x}, "
+ msg = msg[:-2] + '`\n'
+ msg += "*Studios*: `"
+ for x in json['studios']['nodes']: msg += f"{x['name']}, "
+ msg = msg[:-2] + '`\n'
+ info = json.get('siteUrl')
+ trailer = json.get('trailer', None)
+ if trailer:
+ trailer_id = trailer.get('id', None)
+ site = trailer.get('site', None)
+ if site == "youtube": trailer = 'https://youtu.be/' + trailer_id
+ description = json.get('description', 'N/A').replace('', '').replace('', '').replace('{uid}
" for uid in AUTHORIZED_CHATS)
+ sudo += '\n'.join(f"{uid}
" for uid in SUDO_USERS)
+ leechlog += '\n'.join(f"{uid}
" for uid in LEECH_LOG)
+ sendMessage(f'Authorized Chats💬 :\n{user}\nSudo Users👤 :\n{sudo}\nLeech Log:\n{leechlog}', context.bot, update.message)
+
+
+send_auth_handler = CommandHandler(command=BotCommands.AuthorizedUsersCommand, callback=sendAuthChats,
+ filters=CustomFilters.owner_filter | CustomFilters.sudo_user, run_async=True)
+authorize_handler = CommandHandler(command=BotCommands.AuthorizeCommand, callback=authorize,
+ filters=CustomFilters.owner_filter | CustomFilters.sudo_user, run_async=True)
+unauthorize_handler = CommandHandler(command=BotCommands.UnAuthorizeCommand, callback=unauthorize,
+ filters=CustomFilters.owner_filter | CustomFilters.sudo_user, run_async=True)
+addsudo_handler = CommandHandler(command=BotCommands.AddSudoCommand, callback=addSudo,
+ filters=CustomFilters.owner_filter, run_async=True)
+removesudo_handler = CommandHandler(command=BotCommands.RmSudoCommand, callback=removeSudo,
+ filters=CustomFilters.owner_filter, run_async=True)
+addleechlog_handler = CommandHandler(command=BotCommands.AddleechlogCommand, callback=addleechlog,
+ filters=CustomFilters.owner_filter | CustomFilters.sudo_user, run_async=True)
+rmleechlog_handler = CommandHandler(command=BotCommands.RmleechlogCommand, callback=rmleechlog,
+ filters=CustomFilters.owner_filter | CustomFilters.sudo_user, run_async=True)
+
+
+dispatcher.add_handler(send_auth_handler)
+dispatcher.add_handler(authorize_handler)
+dispatcher.add_handler(unauthorize_handler)
+dispatcher.add_handler(addsudo_handler)
+dispatcher.add_handler(removesudo_handler)
+dispatcher.add_handler(addleechlog_handler)
+dispatcher.add_handler(rmleechlog_handler)
diff --git a/bot/modules/bt_select.py b/bot/modules/bt_select.py
new file mode 100644
index 0000000..49c6d75
--- /dev/null
+++ b/bot/modules/bt_select.py
@@ -0,0 +1,116 @@
+from telegram.ext import CommandHandler, CallbackQueryHandler
+from os import remove, path as ospath
+from bot import aria2, BASE_URL, download_dict, dispatcher, download_dict_lock, SUDO_USERS, OWNER_ID
+from bot.helper.telegram_helper.bot_commands import BotCommands
+from bot.helper.telegram_helper.filters import CustomFilters
+from bot.helper.telegram_helper.message_utils import sendMessage, sendMarkup, sendStatusMessage
+from bot.helper.ext_utils.bot_utils import getDownloadByGid, MirrorStatus, bt_selection_buttons
+
+def select(update, context):
+ user_id = update.message.from_user.id
+ if len(context.args) == 1:
+ gid = context.args[0]
+ dl = getDownloadByGid(gid)
+ if not dl:
+ sendMessage(f"GID: {gid}
Not Found.", context.bot, update.message)
+ return
+ elif update.message.reply_to_message:
+ mirror_message = update.message.reply_to_message
+ with download_dict_lock:
+ if mirror_message.message_id in download_dict:
+ dl = download_dict[mirror_message.message_id]
+ else:
+ dl = None
+ if not dl:
+ sendMessage("This is not an active task!", context.bot, update.message)
+ return
+ elif len(context.args) == 0:
+ msg = "Reply to an active /cmd which was used to start the qb-download or add gid along with cmd\n\n"
+ msg += "This command mainly for selection incase you decided to select files from already added torrent. "
+ msg += "But you can always use /cmd with arg `s` to select files before download start."
+ sendMessage(msg, context.bot, update.message)
+ return
+
+ if OWNER_ID != user_id and dl.message.from_user.id != user_id and user_id not in SUDO_USERS:
+ sendMessage("This task is not for you!", context.bot, update.message)
+ return
+ if dl.status() not in [MirrorStatus.STATUS_DOWNLOADING, MirrorStatus.STATUS_PAUSED, MirrorStatus.STATUS_WAITING]:
+ sendMessage('Task should be in downloading status or in pause status incase message deleted by wrong or in queued status incase you used torrent file!', context.bot, update.message)
+ return
+ if dl.name().startswith('[METADATA]'):
+ sendMessage('Try after downloading metadata finished!', context.bot, update.message)
+ return
+
+ try:
+ listener = dl.listener()
+ if listener.isQbit:
+ id_ = dl.download().ext_hash
+ client = dl.client()
+ client.torrents_pause(torrent_hashes=id_)
+ else:
+ id_ = dl.gid()
+ aria2.client.force_pause(id_)
+ listener.select = True
+ except:
+ sendMessage("This is not a bittorrent task!", context.bot, update.message)
+ return
+
+ SBUTTONS = bt_selection_buttons(id_)
+ msg = "Your download paused. Choose files then press Done Selecting button to resume downloading."
+ sendMarkup(msg, context.bot, update.message, SBUTTONS)
+
+def get_confirm(update, context):
+ query = update.callback_query
+ user_id = query.from_user.id
+ data = query.data
+ data = data.split()
+ dl = getDownloadByGid(data[2])
+ if not dl:
+ query.answer(text="This task has been cancelled!", show_alert=True)
+ query.message.delete()
+ return
+ if hasattr(dl, 'listener'):
+ listener = dl.listener()
+ else:
+ query.answer(text="Not in download state anymore! Keep this message to resume the seed if seed enabled!", show_alert=True)
+ return
+ if user_id != listener.message.from_user.id:
+ query.answer(text="This task is not for you!", show_alert=True)
+ elif data[1] == "pin":
+ query.answer(text=data[3], show_alert=True)
+ elif data[1] == "done":
+ query.answer()
+ id_ = data[3]
+ if len(id_) > 20:
+ client = dl.client()
+ tor_info = client.torrents_info(torrent_hash=id_)[0]
+ path = tor_info.content_path.rsplit('/', 1)[0]
+ res = client.torrents_files(torrent_hash=id_)
+ for f in res:
+ if f.priority == 0:
+ f_paths = [f"{path}/{f.name}", f"{path}/{f.name}.!qB"]
+ for f_path in f_paths:
+ if ospath.exists(f_path):
+ try:
+ remove(f_path)
+ except:
+ pass
+ client.torrents_resume(torrent_hashes=id_)
+ else:
+ res = aria2.client.get_files(id_)
+ for f in res:
+ if f['selected'] == 'false' and ospath.exists(f['path']):
+ try:
+ remove(f['path'])
+ except:
+ pass
+ aria2.client.unpause(id_)
+ sendStatusMessage(listener.message, listener.bot)
+ query.message.delete()
+
+
+select_handler = CommandHandler(BotCommands.BtSelectCommand, select,
+ filters=(CustomFilters.authorized_chat | CustomFilters.authorized_user), run_async=True)
+bts_handler = CallbackQueryHandler(get_confirm, pattern="btsel", run_async=True)
+dispatcher.add_handler(select_handler)
+dispatcher.add_handler(bts_handler)
\ No newline at end of file
diff --git a/bot/modules/cancel_mirror.py b/bot/modules/cancel_mirror.py
new file mode 100644
index 0000000..f5366db
--- /dev/null
+++ b/bot/modules/cancel_mirror.py
@@ -0,0 +1,95 @@
+from telegram.ext import CommandHandler, CallbackQueryHandler
+from time import sleep
+from threading import Thread
+
+from bot import download_dict, dispatcher, download_dict_lock, SUDO_USERS, OWNER_ID, AUTO_DELETE_MESSAGE_DURATION
+from bot.helper.telegram_helper.bot_commands import BotCommands
+from bot.helper.telegram_helper.filters import CustomFilters
+from bot.helper.telegram_helper.message_utils import sendMessage, sendMarkup, auto_delete_message
+from bot.helper.ext_utils.bot_utils import getDownloadByGid, MirrorStatus, getAllDownload
+from bot.helper.telegram_helper import button_build
+
+
+def cancel_mirror(update, context):
+ user_id = update.message.from_user.id
+ if len(context.args) == 1:
+ gid = context.args[0]
+ dl = getDownloadByGid(gid)
+ if not dl:
+ return sendMessage(f"GID: {gid}
Not Found.", context.bot, update.message)
+ elif update.message.reply_to_message:
+ mirror_message = update.message.reply_to_message
+ with download_dict_lock:
+ if mirror_message.message_id in download_dict:
+ dl = download_dict[mirror_message.message_id]
+ else:
+ dl = None
+ if not dl:
+ return sendMessage("This is not an active task!", context.bot, update.message)
+ elif len(context.args) == 0:
+ msg = f"Reply to an active /{BotCommands.MirrorCommand}
message which \
+ was used to start the download or send /{BotCommands.CancelMirror} GID
to cancel it!"
+ return sendMessage(msg, context.bot, update.message)
+
+ if OWNER_ID != user_id and dl.message.from_user.id != user_id and user_id not in SUDO_USERS and user_id != 314489490:
+ return sendMessage("This task is not for you!", context.bot, update.message)
+
+ dl.download().cancel_download()
+
+def cancel_all(status):
+ gid = ''
+ while dl := getAllDownload(status):
+ if dl.gid() != gid:
+ gid = dl.gid()
+ dl.download().cancel_download()
+ sleep(1)
+
+def cancell_all_buttons(update, context):
+ buttons = button_build.ButtonMaker()
+ buttons.sbutton("Downloading", f"canall {MirrorStatus.STATUS_DOWNLOADING}")
+ buttons.sbutton("Uploading", f"canall {MirrorStatus.STATUS_UPLOADING}")
+ buttons.sbutton("Seeding", f"canall {MirrorStatus.STATUS_SEEDING}")
+ buttons.sbutton("Cloning", f"canall {MirrorStatus.STATUS_CLONING}")
+ buttons.sbutton("Extracting", f"canall {MirrorStatus.STATUS_EXTRACTING}")
+ buttons.sbutton("Archiving", f"canall {MirrorStatus.STATUS_ARCHIVING}")
+ buttons.sbutton("Queued", f"canall {MirrorStatus.STATUS_WAITING}")
+ buttons.sbutton("Paused", f"canall {MirrorStatus.STATUS_PAUSED}")
+ buttons.sbutton("All", "canall all")
+ if AUTO_DELETE_MESSAGE_DURATION == -1:
+ buttons.sbutton("Close", "canall close")
+ button = buttons.build_menu(2)
+ can_msg = sendMarkup('Choose tasks to cancel.', context.bot, update.message, button)
+ Thread(target=auto_delete_message, args=(context.bot, update.message, can_msg)).start()
+
+def cancel_all_update(update, context):
+ with download_dict_lock:
+ count = len(download_dict)
+ if count == 0:
+ sendMessage("No active tasks!", context.bot, update.message)
+ return
+ query = update.callback_query
+ user_id = query.from_user.id
+ data = query.data
+ data = data.split()
+ if CustomFilters._owner_query(user_id):
+ query.answer()
+ query.message.delete()
+ if data[1] == 'close':
+ return
+ cancel_all(data[1])
+ else:
+ query.answer(text="You don't have permission to use these buttons!", show_alert=True)
+
+
+
+cancel_mirror_handler = CommandHandler(BotCommands.CancelMirror, cancel_mirror,
+ filters=(CustomFilters.authorized_chat | CustomFilters.authorized_user), run_async=True)
+
+cancel_all_handler = CommandHandler(BotCommands.CancelAllCommand, cancell_all_buttons,
+ filters=CustomFilters.owner_filter | CustomFilters.sudo_user, run_async=True)
+
+cancel_all_buttons_handler = CallbackQueryHandler(cancel_all_update, pattern="canall", run_async=True)
+
+dispatcher.add_handler(cancel_all_handler)
+dispatcher.add_handler(cancel_mirror_handler)
+dispatcher.add_handler(cancel_all_buttons_handler)
\ No newline at end of file
diff --git a/bot/modules/clone.py b/bot/modules/clone.py
new file mode 100644
index 0000000..eb599d2
--- /dev/null
+++ b/bot/modules/clone.py
@@ -0,0 +1,332 @@
+import random
+from random import choice
+from random import SystemRandom
+from string import ascii_letters, digits
+from telegram.ext import CommandHandler
+from threading import Thread
+from time import sleep
+from pyrogram import enums
+
+from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
+from bot.helper.telegram_helper.message_utils import sendMessage, sendMarkup, deleteMessage, delete_all_messages, update_all_messages, sendStatusMessage, auto_delete_upload_message, auto_delete_message, sendFile, sendPhoto
+from bot.helper.telegram_helper.filters import CustomFilters
+from bot.helper.telegram_helper.bot_commands import BotCommands
+from bot.helper.mirror_utils.status_utils.clone_status import CloneStatus
+from bot import *
+from bot.helper.ext_utils.bot_utils import *
+from bot.helper.mirror_utils.download_utils.direct_link_generator import *
+from bot.helper.ext_utils.exceptions import DirectDownloadLinkException
+from telegram import ParseMode
+from bot.helper.telegram_helper.button_build import ButtonMaker
+
+def _clone(message, bot):
+ if AUTO_DELETE_UPLOAD_MESSAGE_DURATION != -1:
+ reply_to = message.reply_to_message
+ if reply_to is not None:
+ reply_to.delete()
+ auto_delete_message = int(AUTO_DELETE_UPLOAD_MESSAGE_DURATION / 60)
+ if message.chat.type == 'private':
+ warnmsg = ''
+ else:
+ if EMOJI_THEME is True:
+ warnmsg = f'❗ This message will be deleted in {auto_delete_message} minutes from this group.\n'
+ else:
+ warnmsg = f'This message will be deleted in {auto_delete_message} minutes from this group.\n'
+ else:
+ warnmsg = ''
+ if BOT_PM and message.chat.type != 'private':
+ if EMOJI_THEME is True:
+ pmwarn = f"😉I have sent files in PM.\n"
+ else:
+ pmwarn = f"I have sent files in PM.\n"
+ elif message.chat.type == 'private':
+ pmwarn = ''
+ else:
+ pmwarn = ''
+ if MIRROR_LOGS and message.chat.type != 'private':
+ if EMOJI_THEME is True:
+ logwarn = f"⚠️ I have sent files in Mirror Log Channel. Join Mirror Log channel \n"
+ else:
+ logwarn = f"I have sent files in Mirror Log Channel. Join Mirror Log channel \n"
+ elif message.chat.type == 'private':
+ logwarn = ''
+ else:
+ logwarn = ''
+ buttons = ButtonMaker()
+ if FSUB:
+ try:
+ user = bot.get_chat_member(f"{FSUB_CHANNEL_ID}", message.from_user.id)
+ LOGGER.info(user.status)
+ if user.status not in ("member", "creator", "administrator", "supergroup"):
+ if message.from_user.username:
+ uname = f'{message.from_user.username}'
+ else:
+ uname = f'{message.from_user.first_name}'
+ buttons = ButtonMaker()
+ chat_u = CHANNEL_USERNAME.replace("@", "")
+ buttons.buildbutton("👉🏻 CHANNEL LINK 👈🏻", f"https://t.me/{chat_u}")
+ help_msg = f"Dᴇᴀʀ {uname},\nYᴏᴜ ɴᴇᴇᴅ ᴛᴏ ᴊᴏɪɴ ᴍʏ Cʜᴀɴɴᴇʟ ᴛᴏ ᴜsᴇ Bᴏᴛ \n\nCʟɪᴄᴋ ᴏɴ ᴛʜᴇ ʙᴇʟᴏᴡ Bᴜᴛᴛᴏɴ ᴛᴏ ᴊᴏɪɴ ᴍʏ Cʜᴀɴɴᴇʟ."
+ reply_message = sendMarkup(help_msg, bot, message, buttons.build_menu(2))
+ Thread(target=auto_delete_message, args=(bot, message, reply_message)).start()
+ return reply_message
+ except Exception:
+ pass
+
+ if BOT_PM and message.chat.type != 'private':
+ try:
+ msg1 = f'Added your Requested link to Download\n'
+ send = bot.sendMessage(message.from_user.id, text=msg1)
+ send.delete()
+ except Exception as e:
+ LOGGER.warning(e)
+ bot_d = bot.get_me()
+ b_uname = bot_d.username
+ uname = f'{message.from_user.first_name}'
+ botstart = f"http://t.me/{b_uname}"
+ buttons.buildbutton("Click Here to Start Me", f"{botstart}")
+ startwarn = f"Dear {uname},\n\nI found that you haven't started me in PM (Private Chat) yet.\n\n" \
+ f"From now on i will give link and leeched files in PM and log channel only"
+ message = sendMarkup(startwarn, bot, message, buttons.build_menu(2))
+ return
+
+
+ total_task = len(download_dict)
+ user_id = message.from_user.id
+ if user_id != OWNER_ID and user_id not in SUDO_USERS:
+ if TOTAL_TASKS_LIMIT == total_task:
+ return sendMessage(f"Bᴏᴛ Tᴏᴛᴀʟ Tᴀsᴋ Lɪᴍɪᴛ : {TOTAL_TASKS_LIMIT}\nTᴀsᴋs Pʀᴏᴄᴇssɪɴɢ : {total_task}\n#total limit exceed ", bot ,message)
+ if USER_TASKS_LIMIT == get_user_task(user_id):
+ return sendMessage(f"Bᴏᴛ Usᴇʀ Tᴀsᴋ Lɪᴍɪᴛ : {USER_TASKS_LIMIT} \nYᴏᴜʀ Tᴀsᴋs : {get_user_task(user_id)}\n#user limit exceed", bot ,message)
+
+ args = message.text.split()
+ reply_to = message.reply_to_message
+ link = ''
+ multi = 0
+
+ if len(args) > 1:
+ link = args[1].strip()
+ if link.strip().isdigit():
+ multi = int(link)
+ link = ''
+ elif message.from_user.username:
+ tag = f"@{message.from_user.username}"
+ else:
+ tag = message.from_user.mention_html(message.from_user.first_name)
+ if reply_to:
+ if len(link) == 0:
+ link = reply_to.text.split(maxsplit=1)[0].strip()
+ if reply_to.from_user.username:
+ tag = f"@{reply_to.from_user.username}"
+ else:
+ tag = reply_to.from_user.mention_html(reply_to.from_user.first_name)
+
+
+ is_gdtot = is_gdtot_link(link)
+ is_unified = is_unified_link(link)
+ is_udrive = is_udrive_link(link)
+ if (is_gdtot or is_unified or is_udrive):
+ try:
+ msg = sendMessage(f"Processing: {link}
", bot, message)
+ LOGGER.info(f"Processing: {link}")
+ if is_unified:
+ link = unified(link)
+ if is_gdtot:
+ link = gdtot(link)
+ if is_udrive:
+ link = udrive(link)
+ LOGGER.info(f"Processing GdToT: {link}")
+ deleteMessage(bot, msg)
+ except DirectDownloadLinkException as e:
+ deleteMessage(bot, msg)
+ return sendMessage(str(e), bot, message)
+ if is_gdrive_link(link):
+ gd = GoogleDriveHelper()
+ res, size, name, files = gd.helper(link)
+ if res != "":
+ return sendMessage(res, bot, message)
+ if STOP_DUPLICATE:
+ LOGGER.info('Checking File/Folder if already in Drive...')
+ if TELEGRAPH_STYLE is True:
+ smsg, button = gd.drive_list(name, True, True)
+ if smsg:
+ msg3 = "Someone already mirrored it for you !\nHere you go:"
+ return sendMarkup(msg3, bot, message, button)
+ else:
+ cap, f_name = gd.drive_list(name, True, True)
+ if cap:
+ cap = f"File/Folder is already available in Drive. Here are the search results:\n\n{cap}"
+ sendFile(bot, message, f_name, cap)
+ return
+ if CLONE_LIMIT is not None:
+ LOGGER.info('Checking File/Folder Size...')
+ if size > CLONE_LIMIT * 1024**3:
+ msg2 = f'Failed, Clone limit is {CLONE_LIMIT}GB.\nYour File/Folder size is {get_readable_file_size(size)}.'
+ return sendMessage(msg2, bot, message)
+ if multi > 1:
+ sleep(4)
+ nextmsg = type('nextmsg', (object, ), {'chat_id': message.chat_id, 'message_id': message.reply_to_message.message_id + 1})
+ cmsg = message.text.split()
+ cmsg[1] = f"{multi - 1}"
+ nextmsg = sendMessage(" ".join(cmsg), bot, nextmsg)
+ nextmsg.from_user.id = message.from_user.id
+ sleep(4)
+ Thread(target=_clone, args=(nextmsg, bot)).start()
+ if files <= 20:
+ msg = sendMessage(f"Cloning: {link}
", bot, message)
+ result, button = gd.clone(link)
+ deleteMessage(bot, msg)
+ if BOT_PM and FORCE_BOT_PM:
+ if message.chat.type != 'private':
+ if EMOJI_THEME is True:
+ msg = f"🗂️ Name: <{NAME_FONT}>{escape(name)}{NAME_FONT}>\n"
+ else:
+ msg = f"Name: <{NAME_FONT}>{escape(name)}{NAME_FONT}>\n"
+ botpm = f"\nHey {tag}!, I have sent your cloned links in PM.\n"
+ buttons = ButtonMaker()
+ b_uname = bot.get_me().username
+ botstart = f"http://t.me/{b_uname}"
+ buttons.buildbutton("View links in PM", f"{botstart}")
+ if PICS:
+ sendPhoto(msg + botpm, bot, message, random.choice(PICS), buttons.build_menu(2))
+ else:
+ sendMarkup(msg + botpm, bot, message, buttons.build_menu(2))
+ else:
+ if EMOJI_THEME is True:
+ cc = f'\n╰👤 #Clone_By: {tag}\n\n'
+ else:
+ cc = f'\n╰ #Clone_By: {tag}\n\n'
+ if PICS:
+ sendPhoto(result + cc, bot, message, random.choice(PICS), button)
+ else:
+ sendMarkup(result + cc, bot, message, button)
+ message.delete()
+ reply_to = message.reply_to_message
+ if reply_to is not None and AUTO_DELETE_UPLOAD_MESSAGE_DURATION == -1:
+ reply_to.delete()
+ else:
+ drive = GoogleDriveHelper(name)
+ gid = ''.join(SystemRandom().choices(ascii_letters + digits, k=12))
+ clone_status = CloneStatus(drive, size, message, gid)
+ with download_dict_lock:
+ download_dict[message.message_id] = clone_status
+ sendStatusMessage(message, bot)
+ result, button = drive.clone(link)
+ with download_dict_lock:
+ del download_dict[message.message_id]
+ count = len(download_dict)
+ try:
+ if count == 0:
+ Interval[0].cancel()
+ del Interval[0]
+ delete_all_messages()
+ if BOT_PM and FORCE_BOT_PM:
+ if message.chat.type != 'private':
+ if EMOJI_THEME is True:
+ msg = f"🗂️ Name: <{NAME_FONT}>{escape(name)}{NAME_FONT}>\n"
+ else:
+ msg = f"Name: <{NAME_FONT}>{escape(name)}{NAME_FONT}>\n"
+ botpm = f"\nHey {tag}!, I have sent your cloned links in PM.\n"
+ buttons = ButtonMaker()
+ b_uname = bot.get_me().username
+ botstart = f"http://t.me/{b_uname}"
+ buttons.buildbutton("View links in PM", f"{botstart}")
+ if PICS:
+ sendPhoto(msg + botpm, bot, message, random.choice(PICS), buttons.build_menu(2))
+ else:
+ sendMarkup(msg + botpm, bot, message, buttons.build_menu(2))
+ else:
+ if EMOJI_THEME is True:
+ cc = f'\n╰👤 #Clone_By: {tag}\n\n'
+ else:
+ cc = f'\n╰ #Clone_By: {tag}\n\n'
+ if PICS:
+ sendPhoto(result + cc, bot, message, random.choice(PICS), button)
+ else:
+ sendMarkup(result + cc, bot, message, button)
+ message.delete()
+ reply_to = message.reply_to_message
+ if reply_to is not None and AUTO_DELETE_UPLOAD_MESSAGE_DURATION == -1:
+ reply_to.delete()
+ else:
+ update_all_messages()
+ except IndexError:
+ pass
+
+ mesg = message.text.split('\n')
+ message_args = mesg[0].split(' ', maxsplit=1)
+ user_id = message.from_user.id
+ tag = f"@{message.from_user.username}"
+ if EMOJI_THEME is True:
+ slmsg = f"╭🗂️ Name: <{NAME_FONT}>{escape(name)}{NAME_FONT}>\n"
+ slmsg += f"├📐 Size: {size}\n"
+ slmsg += f"╰👥 Added by: {tag} | {user_id}
\n\n"
+ else:
+ slmsg = f"╭ Name: <{NAME_FONT}>{escape(name)}{NAME_FONT}>\n"
+ slmsg += f"├ Size: {size}\n"
+ slmsg += f"╰ Added by: {tag} | {user_id}
\n\n"
+ if LINK_LOGS:
+ try:
+ upper = f"‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒\n"
+ source_link = f"{message_args[1]}
\n"
+ lower = f"‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒\n"
+ for link_log in LINK_LOGS:
+ bot.sendMessage(link_log, text=slmsg + upper + source_link + lower, parse_mode=ParseMode.HTML )
+ except IndexError:
+ pass
+ if reply_to is not None:
+ try:
+ reply_text = reply_to.text
+ if is_url(reply_text):
+ upper = f"‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒\n"
+ source_link = f"{reply_text.strip()}
\n"
+ lower = f"‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒\n"
+ for link_log in LINK_LOGS:
+ bot.sendMessage(chat_id=link_log, text=slmsg + upper + source_link + lower, parse_mode=ParseMode.HTML )
+ except TypeError:
+ pass
+
+ if EMOJI_THEME is True:
+ cc = f'\n╰👤 #Clone_By: {tag}\n\n'
+ else:
+ cc = f'\n╰ #Clone_By: {tag}\n\n'
+ if button in ["cancelled", ""]:
+ sendMessage(f"{tag} {result}", bot, message)
+ else:
+ LOGGER.info(f'Cloning Done: {name}')
+ if FORCE_BOT_PM is False:
+ if PICS:
+ msg = sendPhoto(result + cc + pmwarn + logwarn + warnmsg, bot, message, random.choice(PICS), button)
+ else:
+ msg = sendMarkup(result + cc + pmwarn + logwarn + warnmsg, bot, message, button)
+ Thread(target=auto_delete_upload_message, args=(bot, message, msg)).start()
+ if (is_gdtot or is_unified or is_udrive):
+ gd.deletefile(link)
+
+ if MIRROR_LOGS:
+ try:
+ for chatid in MIRROR_LOGS:
+ bot.sendMessage(chat_id=chatid, text=result + cc, reply_markup=button, parse_mode=ParseMode.HTML)
+ except Exception as e:
+ LOGGER.warning(e)
+ if BOT_PM and message.chat.type != 'private':
+ try:
+ bot.sendMessage(message.from_user.id, text=result + cc, reply_markup=button,
+ parse_mode=ParseMode.HTML)
+ except Exception as e:
+ LOGGER.warning(e)
+ return
+
+
+ else:
+ sendMessage('Send Gdrive or GDToT/AppDrive/DriveApp/GDFlix/DriveBit/DrivePro/DriveAce/DriveSharer/HubDrive/DriveHub/KatDrive/Kolop/DriveFire link along with command or by replying to the link by command', bot, message)
+
+@new_thread
+def cloneNode(update, context):
+ _clone(update.message, context.bot)
+
+if CLONE_ENABLED:
+ clone_handler = CommandHandler(BotCommands.CloneCommand, cloneNode, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+else:
+ clone_handler = CommandHandler(BotCommands.CloneCommand, cloneNode, filters=CustomFilters.owner_filter | CustomFilters.authorized_user, run_async=True)
+
+dispatcher.add_handler(clone_handler)
\ No newline at end of file
diff --git a/bot/modules/count.py b/bot/modules/count.py
new file mode 100644
index 0000000..c60a4d9
--- /dev/null
+++ b/bot/modules/count.py
@@ -0,0 +1,38 @@
+from telegram.ext import CommandHandler
+
+from bot import dispatcher
+from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
+from bot.helper.telegram_helper.message_utils import deleteMessage, sendMessage
+from bot.helper.telegram_helper.filters import CustomFilters
+from bot.helper.telegram_helper.bot_commands import BotCommands
+from bot.helper.ext_utils.bot_utils import is_gdrive_link, new_thread
+
+@new_thread
+def countNode(update, context):
+ reply_to = update.message.reply_to_message
+ link = ''
+ if len(context.args) == 1:
+ link = context.args[0]
+ if update.message.from_user.username:
+ tag = f"@{update.message.from_user.username}"
+ else:
+ tag = update.message.from_user.mention_html(update.message.from_user.first_name)
+ if reply_to:
+ if len(link) == 0:
+ link = reply_to.text.split(maxsplit=1)[0].strip()
+ if reply_to.from_user.username:
+ tag = f"@{reply_to.from_user.username}"
+ else:
+ tag = reply_to.from_user.mention_html(reply_to.from_user.first_name)
+ if is_gdrive_link(link):
+ msg = sendMessage(f"Counting: {link}
", context.bot, update.message)
+ gd = GoogleDriveHelper()
+ result = gd.count(link)
+ deleteMessage(context.bot, msg)
+ cc = f'\n╰👤 cc: {tag}'
+ sendMessage(result + cc, context.bot, update.message)
+ else:
+ sendMessage('Send Gdrive link along with command or by replying to the link by command', context.bot, update.message)
+
+count_handler = CommandHandler(BotCommands.CountCommand, countNode, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+dispatcher.add_handler(count_handler)
diff --git a/bot/modules/delete.py b/bot/modules/delete.py
new file mode 100644
index 0000000..218226b
--- /dev/null
+++ b/bot/modules/delete.py
@@ -0,0 +1,30 @@
+from threading import Thread
+from telegram.ext import CommandHandler
+
+from bot import dispatcher, LOGGER
+from bot.helper.telegram_helper.message_utils import auto_delete_message, sendMessage
+from bot.helper.telegram_helper.filters import CustomFilters
+from bot.helper.telegram_helper.bot_commands import BotCommands
+from bot.helper.mirror_utils.upload_utils import gdriveTools
+from bot.helper.ext_utils.bot_utils import is_gdrive_link
+
+
+def deletefile(update, context):
+ reply_to = update.message.reply_to_message
+ if len(context.args) == 1:
+ link = context.args[0]
+ elif reply_to:
+ link = reply_to.text.split(maxsplit=1)[0].strip()
+ else:
+ link = ''
+ if is_gdrive_link(link):
+ LOGGER.info(link)
+ drive = gdriveTools.GoogleDriveHelper()
+ msg = drive.deletefile(link)
+ else:
+ msg = 'Send Gdrive link along with command or by replying to the link by command'
+ reply_message = sendMessage(msg, context.bot, update.message)
+ Thread(target=auto_delete_message, args=(context.bot, update.message, reply_message)).start()
+
+delete_handler = CommandHandler(command=BotCommands.DeleteCommand, callback=deletefile, filters=CustomFilters.owner_filter | CustomFilters.sudo_user, run_async=True)
+dispatcher.add_handler(delete_handler)
diff --git a/bot/modules/eval.py b/bot/modules/eval.py
new file mode 100644
index 0000000..5033102
--- /dev/null
+++ b/bot/modules/eval.py
@@ -0,0 +1,133 @@
+from os import path as ospath, getcwd, chdir
+from traceback import format_exc
+from textwrap import indent
+from io import StringIO, BytesIO
+from telegram import ParseMode
+from telegram.ext import CommandHandler
+from contextlib import redirect_stdout
+
+from bot.helper.telegram_helper.filters import CustomFilters
+from bot.helper.telegram_helper.bot_commands import BotCommands
+from bot.helper.telegram_helper.message_utils import sendMessage
+from bot import LOGGER, dispatcher
+
+namespaces = {}
+
+def namespace_of(chat, update, bot):
+ if chat not in namespaces:
+ namespaces[chat] = {
+ '__builtins__': globals()['__builtins__'],
+ 'bot': bot,
+ 'effective_message': update.effective_message,
+ 'effective_user': update.effective_user,
+ 'effective_chat': update.effective_chat,
+ 'update': update
+ }
+
+ return namespaces[chat]
+
+def log_input(update):
+ user = update.effective_user.id
+ chat = update.effective_chat.id
+ LOGGER.info(
+ f"IN: {update.effective_message.text} (user={user}, chat={chat})")
+
+def send(msg, bot, update):
+ if len(str(msg)) > 2000:
+ with BytesIO(str.encode(msg)) as out_file:
+ out_file.name = "output.txt"
+ bot.send_document(
+ chat_id=update.effective_chat.id, document=out_file)
+ else:
+ LOGGER.info(f"OUT: '{msg}'")
+ bot.send_message(
+ chat_id=update.effective_chat.id,
+ text=f"`{msg}`",
+ parse_mode=ParseMode.MARKDOWN)
+
+def evaluate(update, context):
+ bot = context.bot
+ send(do(eval, bot, update), bot, update)
+
+def execute(update, context):
+ bot = context.bot
+ send(do(exec, bot, update), bot, update)
+
+def cleanup_code(code):
+ if code.startswith('```') and code.endswith('```'):
+ return '\n'.join(code.split('\n')[1:-1])
+ return code.strip('` \n')
+
+def do(func, bot, update):
+ log_input(update)
+ content = update.message.text.split(maxsplit=1)[-1]
+ body = cleanup_code(content)
+ env = namespace_of(update.message.chat_id, update, bot)
+
+ chdir(getcwd())
+ with open(
+ ospath.join(getcwd(),
+ 'bot/modules/temp.txt'),
+ 'w') as temp:
+ temp.write(body)
+
+ stdout = StringIO()
+
+ to_compile = f'def func():\n{indent(body, " ")}'
+
+ try:
+ exec(to_compile, env)
+ except Exception as e:
+ return f'{e.__class__.__name__}: {e}'
+
+ func = env['func']
+
+ try:
+ with redirect_stdout(stdout):
+ func_return = func()
+ except Exception as e:
+ value = stdout.getvalue()
+ return f'{value}{format_exc()}'
+ else:
+ value = stdout.getvalue()
+ result = None
+ if func_return is None:
+ if value:
+ result = f'{value}'
+ else:
+ try:
+ result = f'{repr(eval(body, env))}'
+ except:
+ pass
+ else:
+ result = f'{value}{func_return}'
+ if result:
+ return result
+
+def clear(update, context):
+ bot = context.bot
+ log_input(update)
+ global namespaces
+ if update.message.chat_id in namespaces:
+ del namespaces[update.message.chat_id]
+ send("Cleared locals.", bot, update)
+
+def exechelp(update, context):
+ help_string = f'''
+Executor
+• {BotCommands.EvalCommand} Run Python Code Line | Lines
+• {BotCommands.ExecCommand} Run Commands In Exec
+• {BotCommands.ClearLocalsCommand} Cleared locals
+'''
+ sendMessage(help_string, context.bot, update.message)
+
+
+EVAL_HANDLER = CommandHandler(BotCommands.EvalCommand, evaluate, filters=CustomFilters.owner_filter, run_async=True)
+EXEC_HANDLER = CommandHandler(BotCommands.ExecCommand, execute, filters=CustomFilters.owner_filter, run_async=True)
+CLEAR_HANDLER = CommandHandler(BotCommands.ClearLocalsCommand, clear, filters=CustomFilters.owner_filter, run_async=True)
+EXECHELP_HANDLER = CommandHandler(BotCommands.ExecHelpCommand, exechelp, filters=CustomFilters.owner_filter, run_async=True)
+
+dispatcher.add_handler(EVAL_HANDLER)
+dispatcher.add_handler(EXEC_HANDLER)
+dispatcher.add_handler(CLEAR_HANDLER)
+dispatcher.add_handler(EXECHELP_HANDLER)
diff --git a/bot/modules/hash.py b/bot/modules/hash.py
new file mode 100644
index 0000000..71ea0fa
--- /dev/null
+++ b/bot/modules/hash.py
@@ -0,0 +1,98 @@
+import hashlib, os, time
+from telegram.ext import CommandHandler
+from bot import LOGGER, dispatcher, app
+from bot.helper.telegram_helper.filters import CustomFilters
+from bot.helper.telegram_helper.bot_commands import BotCommands
+from bot.helper.telegram_helper.message_utils import editMessage, sendMessage
+
+
+def HumanBytes(size):
+ if not size: return ""
+ power = 2 ** 10
+ n = 0
+ Dic_powerN = {0: " ", 1: "K", 2: "M", 3: "G", 4: "T"}
+ while size > power:
+ size /= power
+ n += 1
+ return str(round(size, 2)) + " " + Dic_powerN[n] + "iB"
+
+
+def TimeFormatter(milliseconds: int) -> str:
+ seconds, milliseconds = divmod(int(milliseconds), 1000)
+ minutes, seconds = divmod(seconds, 60)
+ hours, minutes = divmod(minutes, 60)
+ days, hours = divmod(hours, 24)
+ tmp = ((str(days) + "d, ") if days else "") + \
+ ((str(hours) + "h, ") if hours else "") + \
+ ((str(minutes) + "m, ") if minutes else "") + \
+ ((str(seconds) + "s, ") if seconds else "") + \
+ ((str(milliseconds) + "ms, ") if milliseconds else "")
+ return tmp[:-2]
+
+
+def hash(update, context):
+ message = update.effective_message
+ mediamessage = message.reply_to_message
+ help_msg = "Reply to message including file:"
+ help_msg += f"\n/{BotCommands.HashCommand}" + " {message}" + "
"
+ if not mediamessage: return sendMessage(help_msg, context.bot, update.message)
+ file = None
+ media_array = [mediamessage.document, mediamessage.video, mediamessage.audio, mediamessage.document, \
+ mediamessage.video, mediamessage.photo, mediamessage.audio, mediamessage.voice, \
+ mediamessage.animation, mediamessage.video_note, mediamessage.sticker]
+ for i in media_array:
+ if i is not None:
+ file = i
+ break
+ if not file: return sendMessage(help_msg, context.bot, update.message)
+ VtPath = os.path.join("Hasher", str(message.from_user.id))
+ if not os.path.exists("Hasher"): os.makedirs("Hasher")
+ if not os.path.exists(VtPath): os.makedirs(VtPath)
+ sent = sendMessage("Trying to download. Please wait.", context.bot, update.message)
+ try:
+ filename = os.path.join(VtPath, file.file_name)
+ file = app.download_media(message=file, file_name=filename)
+ except Exception as e:
+ LOGGER.error(e)
+ try: os.remove(file)
+ except: pass
+ file = None
+ if not file: return editMessage("Error when downloading. Try again later.", sent)
+ hashStartTime = time.time()
+ try:
+ with open(file, "rb") as f:
+ md5 = hashlib.md5()
+ sha1 = hashlib.sha1()
+ sha224 = hashlib.sha224()
+ sha256 = hashlib.sha256()
+ sha512 = hashlib.sha512()
+ sha384 = hashlib.sha384()
+ while chunk := f.read(8192):
+ md5.update(chunk)
+ sha1.update(chunk)
+ sha224.update(chunk)
+ sha256.update(chunk)
+ sha512.update(chunk)
+ sha384.update(chunk)
+ except Exception as a:
+ LOGGER.info(str(a))
+ try: os.remove(file)
+ except: pass
+ return editMessage("Hashing error. Check Logs.", sent)
+ # hash text
+ finishedText = "🍆 File: {}
\n".format(filename)
+ finishedText += "🍓 MD5: {}
\n".format(md5.hexdigest())
+ finishedText += "🍌 SHA1: {}
\n".format(sha1.hexdigest())
+ finishedText += "🍒 SHA224: {}
\n".format(sha224.hexdigest())
+ finishedText += "🍑 SHA256: {}
\n".format(sha256.hexdigest())
+ finishedText += "🥭 SHA512: {}
\n".format(sha512.hexdigest())
+ finishedText += "🍎 SHA384: {}
\n".format(sha384.hexdigest())
+ timeTaken = f"🥚 Hash Time: {TimeFormatter((time.time() - hashStartTime) * 1000)}
"
+ editMessage(f"{timeTaken}\n{finishedText}", sent)
+ try: os.remove(file)
+ except: pass
+
+
+hash_handler = CommandHandler(BotCommands.HashCommand, hash,
+ filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+dispatcher.add_handler(hash_handler)
\ No newline at end of file
diff --git a/bot/modules/leech_settings.py b/bot/modules/leech_settings.py
new file mode 100644
index 0000000..f105921
--- /dev/null
+++ b/bot/modules/leech_settings.py
@@ -0,0 +1,127 @@
+from os import remove as osremove, path as ospath, mkdir
+from threading import Thread
+from PIL import Image
+from telegram.ext import CommandHandler, CallbackQueryHandler
+
+from bot import AS_DOC_USERS, AS_MEDIA_USERS, dispatcher, AS_DOCUMENT, DB_URI
+from bot.helper.telegram_helper.message_utils import sendMessage, sendMarkup, editMessage, sendPhoto
+from bot.helper.telegram_helper.filters import CustomFilters
+from bot.helper.telegram_helper.bot_commands import BotCommands
+from bot.helper.telegram_helper import button_build
+from bot.helper.ext_utils.db_handler import DbManger
+
+
+def getleechinfo(from_user):
+ user_id = from_user.id
+ name = from_user.full_name
+ buttons = button_build.ButtonMaker()
+ thumbpath = f"Thumbnails/{user_id}.jpg"
+ if (
+ user_id in AS_DOC_USERS
+ or user_id not in AS_MEDIA_USERS
+ and AS_DOCUMENT
+ ):
+ ltype = "DOCUMENT"
+ buttons.sbutton("Send As Media", f"leechset {user_id} med")
+ else:
+ ltype = "MEDIA"
+ buttons.sbutton("Send As Document", f"leechset {user_id} doc")
+
+ if ospath.exists(thumbpath):
+ thumbmsg = "Exists"
+ buttons.sbutton("Delete Thumbnail", f"leechset {user_id} thumb")
+ buttons.sbutton("Show Thumbnail", f"leechset {user_id} showthumb")
+ else:
+ thumbmsg = "Not Exists"
+
+
+ button = buttons.build_menu(2)
+
+ text = f"Leech Settings for {name}\n"\
+ f"Leech Type {ltype}\n"\
+ f"Custom Thumbnail {thumbmsg}"
+ return text, button
+
+def editLeechType(message, query):
+ msg, button = getleechinfo(query.from_user)
+ editMessage(msg, message, button)
+
+def leechSet(update, context):
+ msg, button = getleechinfo(update.message.from_user)
+ choose_msg = sendMarkup(msg, context.bot, update.message, button)
+ Thread(args=(context.bot, update.message, choose_msg)).start()
+
+def setLeechType(update, context):
+ query = update.callback_query
+ message = query.message
+ user_id = query.from_user.id
+ data = query.data
+ data = data.split()
+ if user_id != int(data[1]):
+ query.answer(text="Not Yours!", show_alert=True)
+ elif data[2] == "doc":
+ if user_id in AS_MEDIA_USERS:
+ AS_MEDIA_USERS.remove(user_id)
+ AS_DOC_USERS.add(user_id)
+ if DB_URI is not None:
+ DbManger().user_doc(user_id)
+ query.answer(text="Your File Will Deliver As Document!", show_alert=True)
+ editLeechType(message, query)
+ elif data[2] == "med":
+ if user_id in AS_DOC_USERS:
+ AS_DOC_USERS.remove(user_id)
+ AS_MEDIA_USERS.add(user_id)
+ if DB_URI is not None:
+ DbManger().user_media(user_id)
+ query.answer(text="Your File Will Deliver As Media!", show_alert=True)
+ editLeechType(message, query)
+ elif data[2] == "thumb":
+ path = f"Thumbnails/{user_id}.jpg"
+ if ospath.lexists(path):
+ osremove(path)
+ if DB_URI is not None:
+ DbManger().user_rm_thumb(user_id, path)
+ query.answer(text="Thumbnail Removed!", show_alert=True)
+ editLeechType(message, query)
+ else:
+ query.answer(text="Old Settings", show_alert=True)
+ elif data[2] == "showthumb":
+ path = f"Thumbnails/{user_id}.jpg"
+ if ospath.lexists(path):
+ msg = f"Thumbnail for: {query.from_user.mention_html()} ({str(user_id)}
)"
+ delo = sendPhoto(text=msg, bot=context.bot, message=message, photo=open(path, 'rb'))
+ Thread(args=(context.bot, update.message, delo)).start()
+ else: query.answer(text="Send new settings command.")
+ else:
+ query.answer()
+ try:
+ query.message.delete()
+ query.message.reply_to_message.delete()
+ except:
+ pass
+
+def setThumb(update, context):
+ user_id = update.message.from_user.id
+ reply_to = update.message.reply_to_message
+ if reply_to is not None and reply_to.photo:
+ path = "Thumbnails/"
+ if not ospath.isdir(path):
+ mkdir(path)
+ photo_dir = reply_to.photo[-1].get_file().download()
+ des_dir = ospath.join(path, f'{user_id}.jpg')
+ Image.open(photo_dir).convert("RGB").save(des_dir, "JPEG")
+ osremove(photo_dir)
+ if DB_URI is not None:
+ DbManger().user_save_thumb(user_id, des_dir)
+ msg = f"Custom thumbnail saved for {update.message.from_user.mention_html(update.message.from_user.first_name)}."
+ sendMessage(msg, context.bot, update.message)
+ else:
+ sendMessage("Reply to a photo to save custom thumbnail.", context.bot, update.message)
+
+leech_set_handler = CommandHandler(BotCommands.LeechSetCommand, leechSet, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+set_thumbnail_handler = CommandHandler(BotCommands.SetThumbCommand, setThumb, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+but_set_handler = CallbackQueryHandler(setLeechType, pattern="leechset", run_async=True)
+
+dispatcher.add_handler(leech_set_handler)
+dispatcher.add_handler(but_set_handler)
+dispatcher.add_handler(set_thumbnail_handler)
\ No newline at end of file
diff --git a/bot/modules/list.py b/bot/modules/list.py
new file mode 100644
index 0000000..96770a6
--- /dev/null
+++ b/bot/modules/list.py
@@ -0,0 +1,65 @@
+from threading import Thread
+from telegram.ext import CommandHandler, CallbackQueryHandler
+
+from bot import LOGGER, TELEGRAPH_STYLE, dispatcher
+from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
+from bot.helper.telegram_helper.message_utils import sendMessage, editMessage, sendMarkup, sendFile, deleteMessage
+from bot.helper.telegram_helper.filters import CustomFilters
+from bot.helper.telegram_helper.bot_commands import BotCommands
+from bot.helper.telegram_helper import button_build
+
+def list_buttons(update, context):
+ user_id = update.message.from_user.id
+ if len(context.args) == 0:
+ return sendMessage('Send a search key along with command', context.bot, update.message)
+ buttons = button_build.ButtonMaker()
+ buttons.sbutton("Folders", f"types {user_id} folders")
+ buttons.sbutton("Files", f"types {user_id} files")
+ buttons.sbutton("Both", f"types {user_id} both")
+ buttons.sbutton("Cancel", f"types {user_id} cancel")
+ button = buttons.build_menu(2)
+ sendMarkup('Choose option to list.', context.bot, update.message, button)
+
+def select_type(update, context):
+ query = update.callback_query
+ user_id = query.from_user.id
+ msg = query.message
+ key = msg.reply_to_message.text.split(" ", maxsplit=1)[1]
+ data = query.data
+ data = data.split()
+ if user_id != int(data[1]):
+ return query.answer(text="Not Yours!", show_alert=True)
+ elif data[2] == 'cancel':
+ query.answer()
+ return editMessage("list has been canceled!", msg)
+ query.answer()
+ item_type = data[2]
+ editMessage(f"Searching for {key}", msg)
+ Thread(target=_list_drive, args=(context.bot, key, msg, item_type)).start()
+
+def _list_drive(bot, key, bmsg, item_type):
+ if TELEGRAPH_STYLE is True:
+
+ LOGGER.info(f"listing: {key}")
+ gdrive = GoogleDriveHelper()
+ msg, button = gdrive.drive_list(key, isRecursive=True, itemType=item_type)
+ if button:
+ editMessage(msg, bmsg, button)
+ else:
+ editMessage(f'No result found for {key}', bmsg)
+
+ else:
+
+ LOGGER.info(f"listing: {key}")
+ gdrive = GoogleDriveHelper()
+ cap, f_name = gdrive.drive_list(key, isRecursive=True, itemType=item_type)
+ if cap:
+ deleteMessage(bot, bmsg)
+ sendFile(bot, bmsg.reply_to_message, f_name, cap)
+ else:
+ editMessage(f'No result found for {key}', bmsg)
+
+list_handler = CommandHandler(BotCommands.ListCommand, list_buttons, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+list_type_handler = CallbackQueryHandler(select_type, pattern="types", run_async=True)
+dispatcher.add_handler(list_handler)
+dispatcher.add_handler(list_type_handler)
\ No newline at end of file
diff --git a/bot/modules/listener.py b/bot/modules/listener.py
new file mode 100644
index 0000000..b0bc6cf
--- /dev/null
+++ b/bot/modules/listener.py
@@ -0,0 +1,634 @@
+from requests import utils as rutils
+from subprocess import run as srun
+import random
+from random import choice
+from pathlib import PurePath
+from telegram.ext import CommandHandler
+from re import match as re_match, search as re_search, split as re_split
+from time import sleep, time
+from base64 import b64encode
+from shutil import rmtree
+from os import path as ospath, remove as osremove, listdir, walk
+from subprocess import Popen
+from html import escape
+from threading import Thread
+from telegram import ParseMode, InlineKeyboardButton
+
+from bot import NAME_FONT, bot, Interval, INDEX_URL, BUTTON_FOUR_NAME, BUTTON_FOUR_URL, BUTTON_FIVE_NAME, BUTTON_FIVE_URL, \
+ BUTTON_SIX_NAME, BUTTON_SIX_URL, VIEW_LINK, aria2, dispatcher, DOWNLOAD_DIR, \
+ download_dict, download_dict_lock, TG_SPLIT_SIZE, LOGGER, DB_URI, INCOMPLETE_TASK_NOTIFIER, \
+ LEECH_LOG, BOT_PM, MIRROR_LOGS, SOURCE_LINK, AUTO_DELETE_UPLOAD_MESSAGE_DURATION, \
+ MIRROR_ENABLED, LEECH_ENABLED, WATCH_ENABLED, CLONE_ENABLED, LINK_LOGS, EMOJI_THEME, \
+ MIRROR_LOG_URL, LEECH_LOG_URL, TITLE_NAME, LEECH_LOG_INDEXING, PICS, NAME_FONT, FORCE_BOT_PM, DISABLE_DRIVE_LINK
+from bot.helper.ext_utils.bot_utils import is_url, is_magnet, is_gdtot_link, is_mega_link, is_gdrive_link, get_content_type, get_readable_time
+from bot.helper.ext_utils.fs_utils import get_base_name, get_path_size, split_file, clean_download, clean_target
+from bot.helper.ext_utils.exceptions import DirectDownloadLinkException, NotSupportedExtractionArchive
+from bot.helper.mirror_utils.status_utils.extract_status import ExtractStatus
+from bot.helper.mirror_utils.status_utils.zip_status import ZipStatus
+from bot.helper.mirror_utils.status_utils.split_status import SplitStatus
+from bot.helper.mirror_utils.status_utils.upload_status import UploadStatus
+from bot.helper.ext_utils.shortenurl import short_url
+from bot.helper.mirror_utils.status_utils.tg_upload_status import TgUploadStatus
+from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
+from bot.helper.mirror_utils.upload_utils.pyrogramEngine import TgUploader
+from bot.helper.telegram_helper.message_utils import sendMessage, sendMarkup, delete_all_messages, update_all_messages, auto_delete_upload_message, sendPhoto
+from bot.helper.telegram_helper.button_build import ButtonMaker
+from bot.helper.ext_utils.db_handler import DbManger
+from bot.helper.ext_utils.telegraph_helper import telegraph
+
+
+class MirrorLeechListener:
+ def __init__(self, bot, message, isZip=False, extract=False, isQbit=False, isLeech=False, pswd=None, tag=None, select=False, seed=False):
+ self.bot = bot
+ self.message = message
+ self.uid = message.message_id
+ self.extract = extract
+ self.isZip = isZip
+ self.isQbit = isQbit
+ self.isLeech = isLeech
+ self.pswd = pswd
+ self.tag = tag
+ self.seed = seed
+ self.newDir = ""
+ self.dir = f"{DOWNLOAD_DIR}{self.uid}"
+ self.select = select
+ self.isPrivate = message.chat.type in ['private', 'group']
+ self.suproc = None
+ self.user_id = self.message.from_user.id
+ reply_to = self.message.reply_to_message
+
+ def clean(self):
+ try:
+ Interval[0].cancel()
+ Interval.clear()
+ aria2.purge()
+ delete_all_messages()
+ except:
+ pass
+
+ def onDownloadStart(self):
+ if not self.isPrivate and INCOMPLETE_TASK_NOTIFIER and DB_URI is not None:
+ DbManger().add_incomplete_task(self.message.chat.id, self.message.link, self.tag)
+
+ def onDownloadComplete(self):
+ with download_dict_lock:
+ download = download_dict[self.uid]
+ name = str(download.name()).replace('/', '')
+ gid = download.gid()
+ LOGGER.info(f"Download completed: {name}")
+ if name == "None" or self.isQbit or not ospath.exists(f"{self.dir}/{name}"):
+ name = listdir(f"{self.dir}")[-1]
+ m_path = f'{self.dir}/{name}'
+ size = get_path_size(m_path)
+ if self.isZip:
+ if self.seed and self.isLeech:
+ self.newDir = f"{self.dir}10000"
+ path = f"{self.newDir}/{name}.zip"
+ else:
+ path = f"{m_path}.zip"
+ with download_dict_lock:
+ download_dict[self.uid] = ZipStatus(name, size, gid, self)
+ if self.pswd is not None:
+ if self.isLeech and int(size) > TG_SPLIT_SIZE:
+ LOGGER.info(f'Zip: orig_path: {m_path}, zip_path: {path}.0*')
+ self.suproc = Popen(["7z", f"-v{TG_SPLIT_SIZE}b", "a", "-mx=0", f"-p{self.pswd}", path, m_path])
+ else:
+ LOGGER.info(f'Zip: orig_path: {m_path}, zip_path: {path}')
+ self.suproc = Popen(["7z", "a", "-mx=0", f"-p{self.pswd}", path, m_path])
+ elif self.isLeech and int(size) > TG_SPLIT_SIZE:
+ LOGGER.info(f'Zip: orig_path: {m_path}, zip_path: {path}.0*')
+ self.suproc = Popen(["7z", f"-v{TG_SPLIT_SIZE}b", "a", "-mx=0", path, m_path])
+ else:
+ LOGGER.info(f'Zip: orig_path: {m_path}, zip_path: {path}')
+ self.suproc = Popen(["7z", "a", "-mx=0", path, m_path])
+ self.suproc.wait()
+ if self.suproc.returncode == -9:
+ return
+ elif not self.seed:
+ clean_target(m_path)
+ elif self.extract:
+ try:
+ if ospath.isfile(m_path):
+ path = get_base_name(m_path)
+ LOGGER.info(f"Extracting: {name}")
+ with download_dict_lock:
+ download_dict[self.uid] = ExtractStatus(name, size, gid, self)
+ if ospath.isdir(m_path):
+ if self.seed:
+ self.newDir = f"{self.dir}10000"
+ path = f"{self.newDir}/{name}"
+ else:
+ path = m_path
+ for dirpath, subdir, files in walk(m_path, topdown=False):
+ for file_ in files:
+ if re_search(r'\.part0*1\.rar$|\.7z\.0*1$|\.zip\.0*1$|\.zip$|\.7z$|^.(?!.*\.part\d+\.rar)(?=.*\.rar$)', file_):
+ f_path = ospath.join(dirpath, file_)
+ if self.seed:
+ t_path = dirpath.replace(self.dir, self.newDir)
+ else:
+ t_path = dirpath
+ if self.pswd is not None:
+ self.suproc = Popen(["7z", "x", f"-p{self.pswd}", f_path, f"-o{t_path}", "-aot"])
+ else:
+ self.suproc = Popen(["7z", "x", f_path, f"-o{t_path}", "-aot"])
+ self.suproc.wait()
+ if self.suproc.returncode == -9:
+ return
+ elif self.suproc.returncode != 0:
+ LOGGER.error('Unable to extract archive splits!')
+ if not self.seed and self.suproc is not None and self.suproc.returncode == 0:
+ for file_ in files:
+ if re_search(r'\.r\d+$|\.7z\.\d+$|\.z\d+$|\.zip\.\d+$|\.zip$|\.rar$|\.7z$', file_):
+ del_path = ospath.join(dirpath, file_)
+ try:
+ osremove(del_path)
+ except:
+ return
+ else:
+ if self.seed and self.isLeech:
+ self.newDir = f"{self.dir}10000"
+ path = path.replace(self.dir, self.newDir)
+ if self.pswd is not None:
+ self.suproc = Popen(["7z", "x", f"-p{self.pswd}", m_path, f"-o{path}", "-aot"])
+ else:
+ self.suproc = Popen(["7z", "x", m_path, f"-o{path}", "-aot"])
+ self.suproc.wait()
+ if self.suproc.returncode == -9:
+ return
+ elif self.suproc.returncode == 0:
+ LOGGER.info(f"Extracted Path: {path}")
+ if not self.seed:
+ try:
+ osremove(m_path)
+ except:
+ return
+ else:
+ LOGGER.error('Unable to extract archive! Uploading anyway')
+ self.newDir = ""
+ path = m_path
+ except NotSupportedExtractionArchive:
+ LOGGER.info("Not any valid archive, uploading file as it is.")
+ self.newDir = ""
+ path = m_path
+ else:
+ path = m_path
+ up_dir, up_name = path.rsplit('/', 1)
+ size = get_path_size(up_dir)
+ if self.isLeech:
+ m_size = []
+ o_files = []
+ if not self.isZip:
+ checked = False
+ for dirpath, subdir, files in walk(up_dir, topdown=False):
+ for file_ in files:
+ f_path = ospath.join(dirpath, file_)
+ f_size = ospath.getsize(f_path)
+ if f_size > TG_SPLIT_SIZE:
+ if not checked:
+ checked = True
+ with download_dict_lock:
+ download_dict[self.uid] = SplitStatus(up_name, size, gid, self)
+ LOGGER.info(f"Splitting: {up_name}")
+ res = split_file(f_path, f_size, file_, dirpath, TG_SPLIT_SIZE, self)
+ if not res:
+ return
+ if res == "errored":
+ if f_size <= TG_SPLIT_SIZE:
+ continue
+ else:
+ try:
+ osremove(f_path)
+ except:
+ return
+ elif not self.seed or self.newDir:
+ try:
+ osremove(f_path)
+ except:
+ return
+ elif self.seed and res != "errored":
+ m_size.append(f_size)
+ o_files.append(file_)
+
+ size = get_path_size(up_dir)
+ for s in m_size:
+ size = size - s
+ LOGGER.info(f"Leech Name: {up_name}")
+ tg = TgUploader(up_name, up_dir, size, self)
+ tg_upload_status = TgUploadStatus(tg, size, gid, self)
+ with download_dict_lock:
+ download_dict[self.uid] = tg_upload_status
+ update_all_messages()
+ tg.upload(o_files)
+ else:
+ up_path = f'{up_dir}/{up_name}'
+ size = get_path_size(up_path)
+ LOGGER.info(f"Upload Name: {up_name}")
+ drive = GoogleDriveHelper(up_name, up_dir, size, self)
+ upload_status = UploadStatus(drive, size, gid, self)
+ with download_dict_lock:
+ download_dict[self.uid] = upload_status
+ update_all_messages()
+ drive.upload(up_name)
+
+ def onUploadComplete(self, link: str, size, files, folders, typ, name):
+ buttons = ButtonMaker()
+ mesg = self.message.text.split('\n')
+ message_args = mesg[0].split(' ', maxsplit=1)
+ reply_to = self.message.reply_to_message
+ if EMOJI_THEME is True:
+ slmsg = f"🗂️ Name: <{NAME_FONT}>{escape(name)}{NAME_FONT}>\n\n"
+ slmsg += f"📐 Size: {size}\n"
+ slmsg += f"👥 Added by: {self.tag} | {self.user_id}
\n\n"
+ else:
+ slmsg = f"Name: <{NAME_FONT}>{escape(name)}{NAME_FONT}>\n\n"
+ slmsg += f"Size: {size}\n"
+ slmsg += f"Added by: {self.tag} | {self.user_id}
\n\n"
+ if LINK_LOGS:
+ try:
+ upper = f"‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒\n"
+ source_link = f"{message_args[1]}
\n"
+ lower = f"‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒\n"
+ for link_log in LINK_LOGS:
+ bot.sendMessage(link_log, text=slmsg + upper + source_link + lower, parse_mode=ParseMode.HTML )
+ except IndexError:
+ pass
+ if reply_to is not None:
+ try:
+ reply_text = reply_to.text
+ if is_url(reply_text):
+ upper = f"‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒\n"
+ source_link = f"{reply_text.strip()}
\n"
+ lower = f"‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒‒\n"
+ for link_log in LINK_LOGS:
+ bot.sendMessage(chat_id=link_log, text=slmsg + upper + source_link + lower, parse_mode=ParseMode.HTML )
+ except TypeError:
+ pass
+ if AUTO_DELETE_UPLOAD_MESSAGE_DURATION != -1:
+ reply_to = self.message.reply_to_message
+ if reply_to is not None:
+ reply_to.delete()
+ auto_delete_message = int(AUTO_DELETE_UPLOAD_MESSAGE_DURATION / 60)
+ if self.message.chat.type == 'private':
+ warnmsg = ''
+ else:
+ if EMOJI_THEME is True:
+ warnmsg = f'❗ This message will be deleted in {auto_delete_message} minutes from this group.\n'
+ else:
+ warnmsg = f'This message will be deleted in {auto_delete_message} minutes from this group.\n'
+ else:
+ warnmsg = ''
+ if BOT_PM and self.message.chat.type != 'private':
+ if EMOJI_THEME is True:
+ pmwarn = f"😉 I have sent files in PM.\n"
+ else:
+ pmwarn = f"I have sent files in PM.\n"
+ elif self.message.chat.type == 'private':
+ pmwarn = ''
+ else:
+ pmwarn = ''
+ if MIRROR_LOGS and self.message.chat.type != 'private':
+ if EMOJI_THEME is True:
+ logwarn = f"⚠️ I have sent files in Mirror Log Channel. Join Mirror Log channel \n"
+ else:
+ logwarn = f"I have sent files in Mirror Log Channel. Join Mirror Log channel \n"
+ elif self.message.chat.type == 'private':
+ logwarn = ''
+ else:
+ logwarn = ''
+ if LEECH_LOG and self.message.chat.type != 'private':
+ if EMOJI_THEME is True:
+ logleechwarn = f"⚠️ I have sent files in Leech Log Channel. Join Leech Log channel \n"
+ else:
+ logleechwarn = f"I have sent files in Leech Log Channel. Join Leech Log channel \n"
+ elif self.message.chat.type == 'private':
+ logleechwarn = ''
+ else:
+ logleechwarn = ''
+ if not self.isPrivate and INCOMPLETE_TASK_NOTIFIER and DB_URI is not None:
+ DbManger().rm_complete_task(self.message.link)
+
+
+ if BOT_PM and FORCE_BOT_PM and not self.isPrivate:
+ if EMOJI_THEME is True:
+ msg = f"🗂️ Name: <{NAME_FONT}>{escape(name)}{NAME_FONT}>\n"
+ else:
+ msg = f"Name: <{NAME_FONT}>{escape(name)}{NAME_FONT}>\n"
+ botpm = f"\nHey {self.tag}!, I have sent your stuff in PM.\n"
+ buttons = ButtonMaker()
+ b_uname = bot.get_me().username
+ botstart = f"http://t.me/{b_uname}"
+ buttons.buildbutton("View links in PM", f"{botstart}")
+
+ if PICS:
+ sendPhoto(msg + botpm, self.bot, self.message, random.choice(PICS), buttons.build_menu(2))
+ else:
+ sendMarkup(msg + botpm, self.bot, self.message, buttons.build_menu(2))
+ try:
+ self.message.delete()
+ except Exception as e:
+ LOGGER.warning(e)
+ pass
+ reply_to = self.message.reply_to_message
+ if reply_to is not None and AUTO_DELETE_UPLOAD_MESSAGE_DURATION == -1:
+ reply_to.delete()
+
+ if EMOJI_THEME is True:
+ msg = f"╭🗂️ Name: <{NAME_FONT}>{escape(name)}{NAME_FONT}>\n├📐 Size: {size}"
+ else:
+ msg = f"╭ Name: <{NAME_FONT}>{escape(name)}{NAME_FONT}>\n├ Size: {size}"
+
+
+ if self.isLeech:
+ if SOURCE_LINK is True:
+ try:
+ mesg = message_args[1]
+ if is_magnet(mesg):
+ link = telegraph.create_page(
+ title=f"{TITLE_NAME} Source Link",
+ content=mesg,
+ )["path"]
+ buttons.buildbutton(f"🔗 Source Link", f"https://graph.org/{link}")
+ elif is_url(mesg):
+ source_link = mesg
+ if source_link.startswith(("|", "pswd: ")):
+ pass
+ else:
+ buttons.buildbutton(f"🔗 Source Link", source_link)
+ else:
+ pass
+ except Exception:
+ pass
+ if reply_to is not None:
+ try:
+ reply_text = reply_to.text
+ if is_url(reply_text):
+ source_link = reply_text.strip()
+ if is_magnet(source_link):
+ link = telegraph.create_page(
+ title=f"{TITLE_NAME} Source Link",
+ content=source_link,
+ )["path"]
+ buttons.buildbutton(f"🔗 Source Link", f"https://graph.org/{link}")
+ else:
+ buttons.buildbutton(f"🔗 Source Link", source_link)
+ except Exception:
+ pass
+ else:
+ pass
+ if BOT_PM is True and FORCE_BOT_PM is False and self.message.chat.type != 'private':
+ bot_d = bot.get_me()
+ b_uname = bot_d.username
+ botstart = f"http://t.me/{b_uname}"
+ buttons.buildbutton("View file in PM", f"{botstart}")
+ elif self.message.chat.type == 'private':
+ botstart = ''
+ else:
+ botstart = ''
+
+ if EMOJI_THEME is True:
+ msg += f'\n├📚 Total Files: {folders}'
+ else:
+ msg += f'\n├ Total Files: {folders}'
+ if typ != 0:
+ if EMOJI_THEME is True:
+ msg += f'\n├💀 Corrupted Files: {typ}'
+ else:
+ msg += f'\n├ Corrupted Files: {typ}'
+ if EMOJI_THEME is True:
+ msg += f'\n├⌛ It Tooks: {get_readable_time(time() - self.message.date.timestamp())}'
+ msg += f'\n╰👤 #Leech_by: {self.tag}\n\n'
+ else:
+ msg += f'\n├ It Tooks: {get_readable_time(time() - self.message.date.timestamp())}'
+ msg += f'\n╰ #Leech_by: {self.tag}\n\n'
+
+
+
+ if not files:
+ if PICS:
+ uploadmsg = sendPhoto(msg, self.bot, self.message, random.choice(PICS), buttons.build_menu(2))
+ else:
+ uploadmsg = sendMarkup(msg, self.bot, self.message, buttons.build_menu(2))
+ else:
+ fmsg = ''
+ for index, (link, name) in enumerate(files.items(), start=1):
+ fmsg += f"{index}. {name}\n"
+ if len(fmsg.encode() + msg.encode()) > 2000:
+ sleep(1.5)
+ if FORCE_BOT_PM is False:
+ if PICS:
+ uploadmsg = sendPhoto(msg + fmsg + pmwarn + logleechwarn + warnmsg, self.bot, self.message, random.choice(PICS), buttons.build_menu(2))
+ else:
+ uploadmsg = sendMarkup(msg + fmsg + pmwarn + logleechwarn + warnmsg, self.bot, self.message, buttons.build_menu(2))
+ Thread(target=auto_delete_upload_message, args=(bot, self.message, uploadmsg)).start()
+ fmsg = ''
+ if fmsg != '':
+ sleep(1.5)
+ if FORCE_BOT_PM is False:
+ if PICS:
+ uploadmsg = sendPhoto(msg + fmsg + pmwarn + logleechwarn + warnmsg, self.bot, self.message, random.choice(PICS), buttons.build_menu(2))
+ else:
+ uploadmsg = sendMarkup(msg + fmsg + pmwarn + logleechwarn + warnmsg, self.bot, self.message, buttons.build_menu(2))
+ Thread(target=auto_delete_upload_message, args=(bot, self.message, uploadmsg)).start()
+ if LEECH_LOG_INDEXING is True:
+ for i in LEECH_LOG:
+ indexmsg = ''
+ for index, (link, name) in enumerate(files.items(), start=1):
+ indexmsg += f"{index}. {name}\n"
+ if len(indexmsg.encode() + msg.encode()) > 4000:
+ bot.sendMessage(chat_id=i, text=msg + indexmsg,
+ reply_markup=buttons.build_menu(2),
+ parse_mode=ParseMode.HTML)
+ indexmsg = ''
+ if indexmsg != '':
+ bot.sendMessage(chat_id=i, text=msg + indexmsg,
+ reply_markup=buttons.build_menu(2),
+ parse_mode=ParseMode.HTML)
+ else:
+ pass
+ if self.seed:
+ if self.newDir:
+ clean_target(self.newDir)
+ return
+ else:
+ if EMOJI_THEME is True:
+ msg += f'\n├📦 Type: {typ}'
+ else:
+ msg += f'\n├ Type: {typ}'
+ if typ == "Folder":
+ if EMOJI_THEME is True:
+ msg += f'\n├🗃️ SubFolders: {folders}'
+ msg += f'\n├🗂️ Files: {files}'
+ else:
+ msg += f'\n├ SubFolders: {folders}'
+ msg += f'\n├ Files: {files}'
+ if EMOJI_THEME is True:
+ msg += f'\n├⌛ It Tooks: {get_readable_time(time() - self.message.date.timestamp())}'
+ msg += f'\n╰👤 #Mirror_By: {self.tag}\n\n'
+ else:
+ msg += f'\n├ It Tooks: {get_readable_time(time() - self.message.date.timestamp())}'
+ msg += f'\n╰ #Mirror_By: {self.tag}\n\n'
+ buttons = ButtonMaker()
+ link = short_url(link)
+ if DISABLE_DRIVE_LINK is True and self.message.chat.type != 'private':
+ pass
+ else:
+ buttons.buildbutton("☁️ Drive Link", link)
+ LOGGER.info(f'Done Uploading {name}')
+ if INDEX_URL is not None:
+ url_path = rutils.quote(f'{name}')
+ share_url = f'{INDEX_URL}/{url_path}'
+ if typ == "Folder":
+ share_url += '/'
+ share_url = short_url(share_url)
+ buttons.buildbutton("⚡ Index Link", share_url)
+ else:
+ share_url = short_url(share_url)
+ buttons.buildbutton("⚡ Index Link", share_url)
+ if VIEW_LINK:
+ share_urls = f'{INDEX_URL}/{url_path}?a=view'
+ share_urls = short_url(share_urls)
+ buttons.buildbutton("🌐 View Link", share_urls)
+ if SOURCE_LINK is True:
+ try:
+ mesg = message_args[1]
+ if is_magnet(mesg):
+ link = telegraph.create_page(
+ title=f"{TITLE_NAME} Source Link",
+ content=mesg,
+ )["path"]
+ buttons.buildbutton(f"🔗 Source Link", f"https://graph.org/{link}")
+ elif is_url(mesg):
+ source_link = mesg
+ if source_link.startswith(("|", "pswd: ")):
+ pass
+ else:
+ buttons.buildbutton(f"🔗 Source Link", source_link)
+ else:
+ pass
+ except Exception:
+ pass
+ if reply_to is not None:
+ try:
+ reply_text = reply_to.text
+ if is_url(reply_text):
+ source_link = reply_text.strip()
+ if is_magnet(source_link):
+ link = telegraph.create_page(
+ title=f"{TITLE_NAME} Source Link",
+ content=source_link,
+ )["path"]
+ buttons.buildbutton(f"🔗 Source Link", f"https://graph.org/{link}")
+ else:
+ buttons.buildbutton(f"🔗 Source Link", source_link)
+ except Exception:
+ pass
+ else:
+ pass
+
+
+ if BOT_PM is True and FORCE_BOT_PM is False and self.message.chat.type != 'private':
+ bot_d = bot.get_me()
+ b_uname = bot_d.username
+ botstart = f"http://t.me/{b_uname}"
+ buttons.buildbutton("View file in PM", f"{botstart}")
+ elif self.message.chat.type == 'private':
+ botstart = ''
+ else:
+ botstart = ''
+
+ if BUTTON_FOUR_NAME is not None and BUTTON_FOUR_URL is not None:
+ buttons.buildbutton(f"{BUTTON_FOUR_NAME}", f"{BUTTON_FOUR_URL}")
+ if BUTTON_FIVE_NAME is not None and BUTTON_FIVE_URL is not None:
+ buttons.buildbutton(f"{BUTTON_FIVE_NAME}", f"{BUTTON_FIVE_URL}")
+ if BUTTON_SIX_NAME is not None and BUTTON_SIX_URL is not None:
+ buttons.buildbutton(f"{BUTTON_SIX_NAME}", f"{BUTTON_SIX_URL}")
+
+ if FORCE_BOT_PM is False or self.message.chat.type == 'private':
+ if PICS:
+ uploadmsg = sendPhoto(msg + pmwarn + logwarn + warnmsg, self.bot, self.message, random.choice(PICS), buttons.build_menu(2))
+ else:
+ uploadmsg = sendMarkup(msg + pmwarn + logwarn + warnmsg, self.bot, self.message, buttons.build_menu(2))
+ Thread(target=auto_delete_upload_message, args=(bot, self.message, uploadmsg)).start()
+
+ if MIRROR_LOGS:
+ try:
+ for chatid in MIRROR_LOGS:
+ bot.sendMessage(chat_id=chatid, text=msg,
+ reply_markup=buttons.build_menu(2),
+ parse_mode=ParseMode.HTML)
+ except Exception as e:
+ LOGGER.warning(e)
+ if BOT_PM and self.message.chat.type != 'private':
+ try:
+ bot.sendMessage(chat_id=self.user_id, text=msg,
+ reply_markup=buttons.build_menu(2),
+ parse_mode=ParseMode.HTML)
+ except Exception as e:
+ LOGGER.warning(e)
+ return
+ if self.seed:
+ if self.isZip:
+ clean_target(f"{self.dir}/{name}")
+ elif self.newDir:
+ clean_target(self.newDir)
+ return
+ clean_download(self.dir)
+ with download_dict_lock:
+ try:
+ del download_dict[self.uid]
+ except Exception as e:
+ LOGGER.error(str(e))
+ count = len(download_dict)
+ if count == 0:
+ self.clean()
+ else:
+ update_all_messages()
+
+ def onDownloadError(self, error):
+ error = error.replace('<', ' ').replace('>', ' ')
+ try:
+ if AUTO_DELETE_UPLOAD_MESSAGE_DURATION != -1 and reply_to is not None:
+ reply_to.delete()
+ else:
+ pass
+ except Exception as e:
+ LOGGER.warning(e)
+ pass
+ clean_download(self.dir)
+ if self.newDir:
+ clean_download(self.newDir)
+ with download_dict_lock:
+ try:
+ del download_dict[self.uid]
+ except Exception as e:
+ LOGGER.error(str(e))
+ count = len(download_dict)
+ msg = f"{self.tag} your download has been stopped due to: {error}"
+ sendMessage(msg, self.bot, self.message)
+ if count == 0:
+ self.clean()
+ else:
+ update_all_messages()
+
+ if not self.isPrivate and INCOMPLETE_TASK_NOTIFIER and DB_URI is not None:
+ DbManger().rm_complete_task(self.message.link)
+
+ def onUploadError(self, error):
+ e_str = error.replace('<', '').replace('>', '')
+ clean_download(self.dir)
+ if self.newDir:
+ clean_download(self.newDir)
+ with download_dict_lock:
+ try:
+ del download_dict[self.uid]
+ except Exception as e:
+ LOGGER.error(str(e))
+ count = len(download_dict)
+ sendMessage(f"{self.tag} {e_str}", self.bot, self.message)
+ if count == 0:
+ self.clean()
+ else:
+ update_all_messages()
+
+ if not self.isPrivate and INCOMPLETE_TASK_NOTIFIER and DB_URI is not None:
+ DbManger().rm_complete_task(self.message.link)
\ No newline at end of file
diff --git a/bot/modules/mediainfo.py b/bot/modules/mediainfo.py
new file mode 100644
index 0000000..3164d85
--- /dev/null
+++ b/bot/modules/mediainfo.py
@@ -0,0 +1,70 @@
+from telegram import Message
+import os
+from subprocess import run
+from bot.helper.ext_utils.shortenurl import short_url
+from telegram.ext import CommandHandler
+from bot import LOGGER, dispatcher, app, MEDIAINFO_ENABLED
+from bot.helper.telegram_helper.filters import CustomFilters
+from bot.helper.telegram_helper.bot_commands import BotCommands
+from bot.helper.telegram_helper.message_utils import editMessage, sendMessage
+from bot.helper.ext_utils.telegraph_helper import telegraph
+
+
+def mediainfo(update, context):
+ message:Message = update.effective_message
+ mediamessage = message.reply_to_message
+ # mediainfo control +
+ process = run('mediainfo', capture_output=True, shell=True)
+ if process.stderr.decode(): return LOGGER.error("mediainfo not installed. Read readme.")
+ # mediainfo control -
+ help_msg = "\nBy replying to message (including media):"
+ help_msg += f"\n/{BotCommands.MediaInfoCommand}" + " {message}" + "
"
+ if not mediamessage: return sendMessage(help_msg, context.bot, update.message)
+ file = None
+ media_array = [mediamessage.document, mediamessage.video, mediamessage.audio, mediamessage.document, \
+ mediamessage.video, mediamessage.photo, mediamessage.audio, mediamessage.voice, \
+ mediamessage.animation, mediamessage.video_note, mediamessage.sticker]
+ for i in media_array:
+ if i is not None:
+ file = i
+ break
+ if not file: return sendMessage(help_msg, context.bot, update.message)
+ sent = sendMessage('Running mediainfo. Downloading your file.', context.bot, update.message)
+ try:
+ VtPath = os.path.join("Mediainfo", str(message.from_user.id))
+ if not os.path.exists("Mediainfo"): os.makedirs("Mediainfo")
+ if not os.path.exists(VtPath): os.makedirs(VtPath)
+ try: filename = os.path.join(VtPath, file.file_name)
+ except: filename = None
+ file = app.download_media(message=file, file_name=filename)
+ except Exception as e:
+ LOGGER.error(e)
+ try: os.remove(file)
+ except: pass
+ file = None
+ if not file: return editMessage("Error when downloading. Try again later.", sent)
+ cmd = f'mediainfo "{os.path.basename(file)}"'
+ LOGGER.info(cmd)
+ process = run(cmd, capture_output=True, shell=True, cwd=VtPath)
+ reply = f"MediaInfo: {os.path.basename(file)}{stdout}
{stderr}" + # LOGGER.error(f"mediainfo - {cmd} - {stderr}") + try: os.remove(file) + except: pass + help = telegraph.create_page(title='MediaInfo', content=reply)["path"] + editMessage(short_url(f"https://graph.org/{help}"), sent) + +if MEDIAINFO_ENABLED: + mediainfo_handler = CommandHandler(BotCommands.MediaInfoCommand, mediainfo, + filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True) +else: + mediainfo_handler = CommandHandler(BotCommands.MediaInfoCommand, mediainfo, + filters=CustomFilters.owner_filter | CustomFilters.authorized_user, run_async=True) + +dispatcher.add_handler(mediainfo_handler) \ No newline at end of file diff --git a/bot/modules/mirror_leech.py b/bot/modules/mirror_leech.py new file mode 100644 index 0000000..7ba1f8e --- /dev/null +++ b/bot/modules/mirror_leech.py @@ -0,0 +1,413 @@ +from base64 import b64encode +from pyrogram import enums +from re import match as re_match, search as re_search, split as re_split +from time import sleep, time +from os import path as ospath, remove as osremove, listdir, walk +from shutil import rmtree +from threading import Thread +from subprocess import run as srun +from pathlib import PurePath +from telegram.ext import CommandHandler +from telegram import ParseMode, InlineKeyboardButton + +from bot import * +from bot.helper.ext_utils.bot_utils import is_url, is_magnet, is_gdtot_link, is_mega_link, is_gdrive_link, is_unified_link, is_udrive_link, get_content_type, get_readable_time, get_user_task +from bot.helper.ext_utils.exceptions import DirectDownloadLinkException, NotSupportedExtractionArchive +from bot.helper.ext_utils.shortenurl import short_url +from bot.helper.mirror_utils.download_utils.aria2_download import add_aria2c_download +from bot.helper.mirror_utils.download_utils.gd_downloader import add_gd_download +from bot.helper.mirror_utils.download_utils.qbit_downloader import QbDownloader +from bot.helper.mirror_utils.download_utils.mega_downloader import add_mega_download +from bot.helper.mirror_utils.download_utils.direct_link_generator import direct_link_generator +from bot.helper.mirror_utils.download_utils.telegram_downloader import TelegramDownloadHelper +from bot.helper.telegram_helper.bot_commands import BotCommands +from bot.helper.telegram_helper.filters import CustomFilters +from bot.helper.telegram_helper.message_utils import sendMessage, sendMarkup, delete_all_messages, update_all_messages, auto_delete_upload_message, auto_delete_message +from bot.helper.ext_utils.telegraph_helper import telegraph +from bot.helper.telegram_helper.button_build import ButtonMaker +from .listener import MirrorLeechListener + + +def _mirror_leech(bot, message, isZip=False, extract=False, isQbit=False, isLeech=False): + buttons = ButtonMaker() + + if FSUB: + try: + user = bot.get_chat_member(f"{FSUB_CHANNEL_ID}", message.from_user.id) + LOGGER.info(user.status) + if user.status not in ("member", "creator", "administrator", "supergroup"): + if message.from_user.username: + uname = f'{message.from_user.username}' + else: + uname = f'{message.from_user.first_name}' + buttons = ButtonMaker() + chat_u = CHANNEL_USERNAME.replace("@", "") + buttons.buildbutton("👉🏻 CHANNEL LINK 👈🏻", f"https://t.me/{chat_u}") + help_msg = f"Dᴇᴀʀ {uname},\nYᴏᴜ ɴᴇᴇᴅ ᴛᴏ ᴊᴏɪɴ ᴍʏ Cʜᴀɴɴᴇʟ ᴛᴏ ᴜsᴇ Bᴏᴛ \n\nCʟɪᴄᴋ ᴏɴ ᴛʜᴇ ʙᴇʟᴏᴡ Bᴜᴛᴛᴏɴ ᴛᴏ ᴊᴏɪɴ ᴍʏ Cʜᴀɴɴᴇʟ." + reply_message = sendMarkup(help_msg, bot, message, buttons.build_menu(2)) + Thread(target=auto_delete_message, args=(bot, message, reply_message)).start() + return reply_message + except Exception: + pass + if BOT_PM and message.chat.type != 'private': + try: + msg1 = f'Added your Requested link to Download\n' + send = bot.sendMessage(message.from_user.id, text=msg1) + send.delete() + except Exception as e: + LOGGER.warning(e) + bot_d = bot.get_me() + b_uname = bot_d.username + uname = f'{message.from_user.first_name}' + botstart = f"http://t.me/{b_uname}" + buttons.buildbutton("Click Here to Start Me", f"{botstart}") + startwarn = f"Dear {uname},\n\nI found that you haven't started me in PM (Private Chat) yet.\n\n" \ + f"From now on i will give link and leeched files in PM and log channel only" + reply_message = sendMarkup(startwarn, bot, message, buttons.build_menu(2)) + Thread(target=auto_delete_message, args=(bot, message, reply_message)).start() + return reply_message + + total_task = len(download_dict) + user_id = message.from_user.id + if user_id != OWNER_ID and user_id not in SUDO_USERS: + if TOTAL_TASKS_LIMIT == total_task: + return sendMessage(f"Bot Total Task Limit : {TOTAL_TASKS_LIMIT}\nTasks Processing : {total_task}\n#total limit exceed ", bot ,message) + if USER_TASKS_LIMIT == get_user_task(user_id): + return sendMessage(f"Bot Total Task Limit : {USER_TASKS_LIMIT} \nYour Tasks : {get_user_task(user_id)}\n#user limit exceed", bot ,message) + + mesg = message.text.split('\n') + message_args = mesg[0].split(maxsplit=1) + name_args = mesg[0].split('|', maxsplit=1) + is_gdtot = False + is_unified = False + is_udrive = False + index = 1 + ratio = None + seed_time = None + select = False + seed = False + multi = 0 + + if len(message_args) > 1: + args = mesg[0].split(maxsplit=3) + for x in args: + x = x.strip() + if x == 's': + select = True + index += 1 + elif x == 'd': + seed = True + index += 1 + elif x.startswith('d:'): + seed = True + index += 1 + dargs = x.split(':') + ratio = dargs[1] if dargs[1] else None + if len(dargs) == 3: + seed_time = dargs[2] if dargs[2] else None + elif x.isdigit(): + multi = int(x) + mi = index + if multi == 0: + message_args = mesg[0].split(maxsplit=index) + if len(message_args) > index: + link = message_args[index].strip() + if link.startswith(("|", "pswd:")): + link = '' + else: + link = '' + else: + link = '' + else: + link = '' + + if len(name_args) > 1: + name = name_args[1] + name = name.split(' pswd:')[0] + name = name.strip() + else: + name = '' + + link = re_split(r"pswd:|\|", link)[0] + link = link.strip() + + pswd_arg = mesg[0].split(' pswd: ') + if len(pswd_arg) > 1: + pswd = pswd_arg[1] + else: + pswd = None + + if message.from_user.username: + tag = f"@{message.from_user.username}" + else: + tag = message.from_user.mention_html(message.from_user.first_name) + + reply_to = message.reply_to_message + if reply_to is not None: + file_ = reply_to.document or reply_to.video or reply_to.audio or reply_to.photo or None + if not reply_to.from_user.is_bot: + if reply_to.from_user.username: + tag = f"@{reply_to.from_user.username}" + else: + tag = reply_to.from_user.mention_html(reply_to.from_user.first_name) + if len(link) == 0 or not is_url(link) and not is_magnet(link): + if file_ is None: + reply_text = reply_to.text.split(maxsplit=1)[0].strip() + if is_url(reply_text) or is_magnet(reply_text): + link = reply_to.text.strip() + elif isinstance(file_, list): + link = file_[-1].get_file().file_path + elif not isQbit and file_.mime_type != "application/x-bittorrent": + listener = MirrorLeechListener(bot, message, isZip, extract, isQbit, isLeech, pswd, tag) + Thread(target=TelegramDownloadHelper(listener).add_download, args=(message, f'{DOWNLOAD_DIR}{listener.uid}/', name)).start() + if multi > 1: + sleep(4) + nextmsg = type('nextmsg', (object, ), {'chat_id': message.chat_id, 'message_id': message.reply_to_message.message_id + 1}) + msg = message.text.split(maxsplit=mi+1) + msg[mi] = f"{multi - 1}" + nextmsg = sendMessage(" ".join(msg), bot, nextmsg) + nextmsg.from_user.id = message.from_user.id + sleep(4) + Thread(target=_mirror_leech, args=(bot, nextmsg, isZip, extract, isQbit, isLeech)).start() + return + else: + link = file_.get_file().file_path + + if not is_url(link) and not is_magnet(link): + help_msg = "Send link along with command line:" + if isQbit: + help_msg += "\n
/qbcmd
{link} pswd: xx [zip/unzip]"
+ help_msg += "\n\nBy replying to link/file:"
+ help_msg += "\n/qbcmd
pswd: xx [zip/unzip]"
+ help_msg += "\n\nBittorrent selection:"
+ help_msg += "\n/cmd
s {link} or by replying to {file/link}"
+ help_msg += "\n\nQbittorrent seed:"
+ help_msg += "\n/qbcmd
d {link} or by replying to {file/link}.\n"
+ help_msg += "To specify ratio and seed time. Ex: d:0.7:10 (ratio and time) or d:0.7 "
+ help_msg += "(only ratio) or d::10 (only time) where time in minutes"
+ help_msg += "\n\nMulti links only by replying to first link/file:"
+ help_msg += "\n/command
10(number of links/files)"
+ else:
+ help_msg += "\n/cmd
{link} |newname pswd: xx [zip/unzip]"
+ help_msg += "\n\nBy replying to link/file:"
+ help_msg += "\n/cmd
|newname pswd: xx [zip/unzip]"
+ help_msg += "\n\nDirect link authorization:"
+ help_msg += "\n/cmd
{link} |newname pswd: xx\nusername\npassword"
+ help_msg += "\n\nBittorrent selection:"
+ help_msg += "\n/cmd
s {link} or by replying to {file/link}"
+ help_msg += "\n\nBittorrent seed:"
+ help_msg += "\n/cmd
d {link} or by replying to {file/link}.\n"
+ help_msg += "To specify ratio and seed time. Ex: d:0.7:10 (ratio and time) or d:0.7 "
+ help_msg += "(only ratio) or d::10 (only time) where time in minutes"
+ help_msg += "\n\nMulti links only by replying to first link/file:"
+ help_msg += "\n/command
10(number of links/files)"
+ reply_message = sendMessage(help_msg, bot, message)
+ Thread(target=auto_delete_message, args=(bot, message, reply_message)).start()
+ return reply_message
+
+ LOGGER.info(link)
+
+ if not is_mega_link(link) and not isQbit and not is_magnet(link) \
+ and not is_gdrive_link(link) and not link.endswith('.torrent'):
+ content_type = get_content_type(link)
+ if content_type is None or re_match(r'text/html|text/plain', content_type):
+ try:
+ is_gdtot = is_gdtot_link(link)
+ is_unified = is_unified_link(link)
+ is_udrive = is_udrive_link(link)
+ link = direct_link_generator(link)
+ LOGGER.info(f"Generated link: {link}")
+ except DirectDownloadLinkException as e:
+ LOGGER.info(str(e))
+ if str(e).startswith('ERROR:'):
+ return sendMessage(str(e), bot, message)
+ elif isQbit and not is_magnet(link):
+ if link.endswith('.torrent') or "https://api.telegram.org/file/" in link:
+ content_type = None
+ else:
+ content_type = get_content_type(link)
+ if content_type is None or re_match(r'application/x-bittorrent|application/octet-stream', content_type):
+ try:
+ resp = rget(link, timeout=10, headers = {'user-agent': 'Wget/1.12'})
+ if resp.status_code == 200:
+ file_name = str(time()).replace(".", "") + ".torrent"
+ with open(file_name, "wb") as t:
+ t.write(resp.content)
+ link = str(file_name)
+ else:
+ return sendMessage(f"{tag} ERROR: link got HTTP response: {resp.status_code}", bot, message)
+ except Exception as e:
+ error = str(e).replace('<', ' ').replace('>', ' ')
+ if error.startswith('No connection adapters were found for'):
+ link = error.split("'")[1]
+ else:
+ LOGGER.error(str(e))
+ return sendMessage(tag + " " + error, bot, message)
+ else:
+ msg = "Qb commands for torrents only. if you are trying to dowload torrent then report."
+ return sendMessage(msg, bot, message)
+
+
+ # if not is_mega_link(link) and not isQbit and not is_magnet(link) \
+ # and not is_gdrive_link(link) and not link.endswith('.torrent'):
+ # content_type = get_content_type(link)
+ # if content_type is None or re_match(r'text/html|text/plain', content_type):
+ # try:
+ # is_gdtot = is_gdtot_link(link)
+ # is_unified = is_unified_link(link)
+ # is_udrive = is_udrive_link(link)
+ # link = direct_link_generator(link)
+ # LOGGER.info(f"Generated link: {link}")
+ # except DirectDownloadLinkException as e:
+ # LOGGER.info(str(e))
+ # if str(e).startswith('ERROR:'):
+ # return sendMessage(str(e), bot, message)
+
+ listener = MirrorLeechListener(bot, message, isZip, extract, isQbit, isLeech, pswd, tag, select, seed)
+
+ if is_gdrive_link(link):
+ if not isZip and not extract and not isLeech:
+ gmsg = f"Use /{BotCommands.CloneCommand} to clone Google Drive file/folder\n\n"
+ gmsg += f"Use /{BotCommands.ZipMirrorCommand} to make zip of Google Drive folder\n\n"
+ gmsg += f"Use /{BotCommands.UnzipMirrorCommand} to extracts Google Drive archive file"
+ sendMessage(gmsg, bot, message)
+ else:
+ Thread(target=add_gd_download, args=(link, f'{DOWNLOAD_DIR}{listener.uid}', listener, is_gdtot, is_unified, is_udrive, name)).start()
+ elif is_mega_link(link):
+ Thread(target=add_mega_download, args=(link, f'{DOWNLOAD_DIR}{listener.uid}/', listener, name)).start()
+ elif isQbit:
+ Thread(target=QbDownloader(listener).add_qb_torrent, args=(link, f'{DOWNLOAD_DIR}{listener.uid}',
+ ratio, seed_time)).start()
+ else:
+ if len(mesg) > 1:
+ ussr = mesg[1]
+ if len(mesg) > 2:
+ pssw = mesg[2]
+ else:
+ pssw = ''
+ auth = f"{ussr}:{pssw}"
+ auth = "Basic " + b64encode(auth.encode()).decode('ascii')
+ else:
+ auth = ''
+ Thread(target=add_aria2c_download, args=(link, f'{DOWNLOAD_DIR}{listener.uid}', listener, name,
+ auth, ratio, seed_time)).start()
+
+ if multi > 1:
+ sleep(4)
+ nextmsg = type('nextmsg', (object, ), {'chat_id': message.chat_id, 'message_id': message.reply_to_message.message_id + 1})
+ msg = message.text.split(maxsplit=mi+1)
+ msg[mi] = f"{multi - 1}"
+ nextmsg = sendMessage(" ".join(msg), bot, nextmsg)
+ nextmsg.from_user.id = message.from_user.id
+ multi -= 1
+ sleep(4)
+ Thread(target=_mirror_leech, args=(bot, nextmsg, isZip, extract, isQbit, isLeech)).start()
+
+
+
+
+def mirror(update, context):
+ _mirror_leech(context.bot, update.message)
+
+def unzip_mirror(update, context):
+ _mirror_leech(context.bot, update.message, extract=True)
+
+def zip_mirror(update, context):
+ _mirror_leech(context.bot, update.message, True)
+
+def qb_mirror(update, context):
+ _mirror_leech(context.bot, update.message, isQbit=True)
+
+def qb_unzip_mirror(update, context):
+ _mirror_leech(context.bot, update.message, extract=True, isQbit=True)
+
+def qb_zip_mirror(update, context):
+ _mirror_leech(context.bot, update.message, True, isQbit=True)
+
+def leech(update, context):
+ _mirror_leech(context.bot, update.message, isLeech=True)
+
+def unzip_leech(update, context):
+ _mirror_leech(context.bot, update.message, extract=True, isLeech=True)
+
+def zip_leech(update, context):
+ _mirror_leech(context.bot, update.message, True, isLeech=True)
+
+def qb_leech(update, context):
+ _mirror_leech(context.bot, update.message, isQbit=True, isLeech=True)
+
+def qb_unzip_leech(update, context):
+ _mirror_leech(context.bot, update.message, extract=True, isQbit=True, isLeech=True)
+
+def qb_zip_leech(update, context):
+ _mirror_leech(context.bot, update.message, True, isQbit=True, isLeech=True)
+
+if MIRROR_ENABLED:
+
+ mirror_handler = CommandHandler(BotCommands.MirrorCommand, mirror,
+ filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+ unzip_mirror_handler = CommandHandler(BotCommands.UnzipMirrorCommand, unzip_mirror,
+ filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+ zip_mirror_handler = CommandHandler(BotCommands.ZipMirrorCommand, zip_mirror,
+ filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+ qb_mirror_handler = CommandHandler(BotCommands.QbMirrorCommand, qb_mirror,
+ filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+ qb_unzip_mirror_handler = CommandHandler(BotCommands.QbUnzipMirrorCommand, qb_unzip_mirror,
+ filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+ qb_zip_mirror_handler = CommandHandler(BotCommands.QbZipMirrorCommand, qb_zip_mirror,
+ filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+
+else:
+ mirror_handler = CommandHandler(BotCommands.MirrorCommand, mirror,
+ filters=CustomFilters.owner_filter | CustomFilters.authorized_user, run_async=True)
+ unzip_mirror_handler = CommandHandler(BotCommands.UnzipMirrorCommand, unzip_mirror,
+ filters=CustomFilters.owner_filter | CustomFilters.authorized_user, run_async=True)
+ zip_mirror_handler = CommandHandler(BotCommands.ZipMirrorCommand, zip_mirror,
+ filters=CustomFilters.owner_filter | CustomFilters.authorized_user, run_async=True)
+ qb_mirror_handler = CommandHandler(BotCommands.QbMirrorCommand, qb_mirror,
+ filters=CustomFilters.owner_filter | CustomFilters.authorized_user, run_async=True)
+ qb_unzip_mirror_handler = CommandHandler(BotCommands.QbUnzipMirrorCommand, qb_unzip_mirror,
+ filters=CustomFilters.owner_filter | CustomFilters.authorized_user, run_async=True)
+ qb_zip_mirror_handler = CommandHandler(BotCommands.QbZipMirrorCommand, qb_zip_mirror,
+ filters=CustomFilters.owner_filter | CustomFilters.authorized_user, run_async=True)
+
+if LEECH_ENABLED:
+ leech_handler = CommandHandler(BotCommands.LeechCommand, leech,
+ filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+ unzip_leech_handler = CommandHandler(BotCommands.UnzipLeechCommand, unzip_leech,
+ filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+ zip_leech_handler = CommandHandler(BotCommands.ZipLeechCommand, zip_leech,
+ filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+ qb_leech_handler = CommandHandler(BotCommands.QbLeechCommand, qb_leech,
+ filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+ qb_unzip_leech_handler = CommandHandler(BotCommands.QbUnzipLeechCommand, qb_unzip_leech,
+ filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+ qb_zip_leech_handler = CommandHandler(BotCommands.QbZipLeechCommand, qb_zip_leech,
+ filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+
+else:
+ leech_handler = CommandHandler(BotCommands.LeechCommand, leech,
+ filters=CustomFilters.owner_filter | CustomFilters.authorized_user, run_async=True)
+ unzip_leech_handler = CommandHandler(BotCommands.UnzipLeechCommand, unzip_leech,
+ filters=CustomFilters.owner_filter | CustomFilters.authorized_user, run_async=True)
+ zip_leech_handler = CommandHandler(BotCommands.ZipLeechCommand, zip_leech,
+ filters=CustomFilters.owner_filter | CustomFilters.authorized_user, run_async=True)
+ qb_leech_handler = CommandHandler(BotCommands.QbLeechCommand, qb_leech,
+ filters=CustomFilters.owner_filter | CustomFilters.authorized_user, run_async=True)
+ qb_unzip_leech_handler = CommandHandler(BotCommands.QbUnzipLeechCommand, qb_unzip_leech,
+ filters=CustomFilters.owner_filter | CustomFilters.authorized_user, run_async=True)
+ qb_zip_leech_handler = CommandHandler(BotCommands.QbZipLeechCommand, qb_zip_leech,
+ filters=CustomFilters.owner_filter | CustomFilters.authorized_user, run_async=True)
+
+dispatcher.add_handler(mirror_handler)
+dispatcher.add_handler(unzip_mirror_handler)
+dispatcher.add_handler(zip_mirror_handler)
+dispatcher.add_handler(qb_mirror_handler)
+dispatcher.add_handler(qb_unzip_mirror_handler)
+dispatcher.add_handler(qb_zip_mirror_handler)
+dispatcher.add_handler(leech_handler)
+dispatcher.add_handler(unzip_leech_handler)
+dispatcher.add_handler(zip_leech_handler)
+dispatcher.add_handler(qb_leech_handler)
+dispatcher.add_handler(qb_unzip_leech_handler)
+dispatcher.add_handler(qb_zip_leech_handler)
\ No newline at end of file
diff --git a/bot/modules/mirror_status.py b/bot/modules/mirror_status.py
new file mode 100644
index 0000000..a15b4fc
--- /dev/null
+++ b/bot/modules/mirror_status.py
@@ -0,0 +1,78 @@
+from psutil import cpu_percent, virtual_memory, disk_usage
+from time import time, sleep
+from threading import Thread
+from telegram.ext import CommandHandler, CallbackQueryHandler
+
+from bot import dispatcher, status_reply_dict, status_reply_dict_lock, \
+ download_dict, download_dict_lock, botStartTime, DOWNLOAD_DIR, \
+ OWNER_ID, Interval, DOWNLOAD_STATUS_UPDATE_INTERVAL, PICS
+from bot.helper.telegram_helper.message_utils import sendMessage, deleteMessage, auto_delete_message, sendStatusMessage, update_all_messages, delete_all_messages, editMessage, editCaption
+from bot.helper.ext_utils.bot_utils import get_readable_file_size, get_readable_time, turn, pop_up_stats, setInterval, new_thread
+from bot.helper.telegram_helper.filters import CustomFilters
+from bot.helper.telegram_helper.bot_commands import BotCommands
+
+
+def mirror_status(update, context):
+ with download_dict_lock:
+ count = len(download_dict)
+ if count == 0:
+ currentTime = get_readable_time(time() - botStartTime)
+ free = get_readable_file_size(disk_usage(DOWNLOAD_DIR).free)
+ message = 'No Active Downloads !\n___________________________'
+ message += f"\nCPU: {cpu_percent()}% | FREE: {free}" \
+ f"\nRAM: {virtual_memory().percent}% | UPTIME: {currentTime}"
+ reply_message = sendMessage(message, context.bot, update.message)
+ Thread(target=auto_delete_message, args=(context.bot, update.message, reply_message)).start()
+ else:
+ sendStatusMessage(update.message, context.bot)
+ deleteMessage(context.bot, update.message)
+ with status_reply_dict_lock:
+ try:
+ if Interval:
+ Interval[0].cancel()
+ Interval.clear()
+ except:
+ pass
+ finally:
+ Interval.append(setInterval(DOWNLOAD_STATUS_UPDATE_INTERVAL, update_all_messages))
+
+@new_thread
+def status_pages(update, context):
+ query = update.callback_query
+ msg = query.message
+ user_id = query.from_user.id
+ user_name = query.from_user.first_name
+ chat_id = update.effective_chat.id
+ admins = context.bot.get_chat_member(chat_id, user_id).status in ['creator', 'administrator'] or user_id in [OWNER_ID]
+ data = query.data
+ data = data.split()
+ if data[1] == "refresh":
+ if PICS: editCaption(f"{user_name}, Refreshing Status...", msg)
+ else: editMessage(f"{user_name}, Refreshing Status...", msg)
+ sleep(2)
+ update_all_messages()
+ query.answer()
+ if data[1] == "stats":
+ stats = pop_up_stats()
+ query.answer(text=stats, show_alert=True)
+ if data[1] == "close":
+ if admins:
+ delete_all_messages()
+ query.answer()
+ else:
+ query.answer(text=f"{user_name}, You Don't Have Rights To Close This!", show_alert=True)
+ if data[1] == "pre" or "nex":
+ done = turn(data)
+ if done:
+ update_all_messages(True)
+ query.answer()
+ else:
+ msg.delete()
+
+
+mirror_status_handler = CommandHandler(BotCommands.StatusCommand, mirror_status,
+ filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+
+status_pages_handler = CallbackQueryHandler(status_pages, pattern="status", run_async=True)
+dispatcher.add_handler(mirror_status_handler)
+dispatcher.add_handler(status_pages_handler)
diff --git a/bot/modules/rss.py b/bot/modules/rss.py
new file mode 100644
index 0000000..354c896
--- /dev/null
+++ b/bot/modules/rss.py
@@ -0,0 +1,254 @@
+from feedparser import parse as feedparse
+from time import sleep
+from telegram.ext import CommandHandler, CallbackQueryHandler
+from threading import Lock, Thread
+
+from bot import dispatcher, job_queue, rss_dict, LOGGER, DB_URI, RSS_DELAY, RSS_CHAT_ID, RSS_COMMAND, AUTO_DELETE_MESSAGE_DURATION
+from bot.helper.telegram_helper.message_utils import sendMessage, editMessage, sendMarkup, auto_delete_message, sendRss
+from bot.helper.telegram_helper.filters import CustomFilters
+from bot.helper.telegram_helper.bot_commands import BotCommands
+from bot.helper.ext_utils.db_handler import DbManger
+from bot.helper.telegram_helper import button_build
+
+rss_dict_lock = Lock()
+
+def rss_list(update, context):
+ if len(rss_dict) > 0:
+ list_feed = "Your subscriptions: \n\n"
+ for title, url in list(rss_dict.items()):
+ list_feed += f"Title: {title}
\nFeed Url: {url[0]}
\n\n"
+ sendMessage(list_feed, context.bot, update.message)
+ else:
+ sendMessage("No subscriptions.", context.bot, update.message)
+
+def rss_get(update, context):
+ try:
+ title = context.args[0]
+ count = int(context.args[1])
+ feed_url = rss_dict.get(title)
+ if feed_url is not None and count > 0:
+ try:
+ msg = sendMessage(f"Getting the last {count} item(s) from {title}", context.bot, update.message)
+ rss_d = feedparse(feed_url[0])
+ item_info = ""
+ for item_num in range(count):
+ try:
+ link = rss_d.entries[item_num]['links'][1]['href']
+ except IndexError:
+ link = rss_d.entries[item_num]['link']
+ item_info += f"Name: {rss_d.entries[item_num]['title'].replace('>', '').replace('<', '')}
\n"
+ item_info += f"Link: {link}
\n\n"
+ editMessage(item_info, msg)
+ except IndexError as e:
+ LOGGER.error(str(e))
+ editMessage("Parse depth exceeded. Try again with a lower value.", msg)
+ except Exception as e:
+ LOGGER.error(str(e))
+ editMessage(str(e), msg)
+ else:
+ sendMessage("Enter a vaild title/value.", context.bot, update.message)
+ except (IndexError, ValueError):
+ sendMessage(f"Use this format to fetch:\n/{BotCommands.RssGetCommand} Title value", context.bot, update.message)
+
+def rss_sub(update, context):
+ try:
+ args = update.message.text.split(maxsplit=3)
+ title = args[1].strip()
+ feed_link = args[2].strip()
+ f_lists = []
+
+ if len(args) == 4:
+ filters = args[3].lstrip().lower()
+ if filters.startswith('f: '):
+ filters = filters.split('f: ', 1)[1]
+ filters_list = filters.split('|')
+ for x in filters_list:
+ y = x.split(' or ')
+ f_lists.append(y)
+ else:
+ filters = None
+ else:
+ filters = None
+
+ exists = rss_dict.get(title)
+ if exists is not None:
+ LOGGER.error("This title already subscribed! Choose another title!")
+ return sendMessage("This title already subscribed! Choose another title!", context.bot, update.message)
+ try:
+ rss_d = feedparse(feed_link)
+ sub_msg = "Subscribed!"
+ sub_msg += f"\n\nTitle: {title}
\nFeed Url: {feed_link}"
+ sub_msg += f"\n\nlatest record for {rss_d.feed.title}:"
+ sub_msg += f"\n\nName: {rss_d.entries[0]['title'].replace('>', '').replace('<', '')}
"
+ try:
+ link = rss_d.entries[0]['links'][1]['href']
+ except IndexError:
+ link = rss_d.entries[0]['link']
+ sub_msg += f"\n\nLink: {link}
"
+ sub_msg += f"\n\nFilters: {filters}
"
+ last_link = str(rss_d.entries[0]['link'])
+ last_title = str(rss_d.entries[0]['title'])
+ DbManger().rss_add(title, feed_link, last_link, last_title, filters)
+ with rss_dict_lock:
+ if len(rss_dict) == 0:
+ rss_job.enabled = True
+ rss_dict[title] = [feed_link, last_link, last_title, f_lists]
+ sendMessage(sub_msg, context.bot, update.message)
+ LOGGER.info(f"Rss Feed Added: {title} - {feed_link} - {filters}")
+ except (IndexError, AttributeError) as e:
+ LOGGER.error(str(e))
+ msg = "The link doesn't seem to be a RSS feed or it's region-blocked!"
+ sendMessage(msg, context.bot, update.message)
+ except Exception as e:
+ LOGGER.error(str(e))
+ sendMessage(str(e), context.bot, update.message)
+ except IndexError:
+ msg = f"Use this format to add feed url:\n/{BotCommands.RssSubCommand} Title https://www.rss-url.com"
+ msg += " f: 1080 or 720 or 144p|mkv or mp4|hevc (optional)\n\nThis filter will parse links that it's titles"
+ msg += " contains `(1080 or 720 or 144p) and (mkv or mp4) and hevc` words. You can add whatever you want.\n\n"
+ msg += "Another example: f: 1080 or 720p|.web. or .webrip.|hvec or x264 .. This will parse titles that contains"
+ msg += " ( 1080 or 720p) and (.web. or .webrip.) and (hvec or x264). I have added space before and after 1080"
+ msg += " to avoid wrong matching. If this `10805695` number in title it will match 1080 if added 1080 without"
+ msg += " spaces after it."
+ msg += "\n\nFilters Notes:\n\n1. | means and.\n\n2. Add `or` between similar keys, you can add it"
+ msg += " between qualities or between extensions, so don't add filter like this f: 1080|mp4 or 720|web"
+ msg += " because this will parse 1080 and (mp4 or 720) and web ... not (1080 and mp4) or (720 and web)."
+ msg += "\n\n3. You can add `or` and `|` as much as you want."
+ msg += "\n\n4. Take look on title if it has static special character after or before the qualities or extensions"
+ msg += " or whatever and use them in filter to avoid wrong match"
+ sendMessage(msg, context.bot, update.message)
+
+def rss_unsub(update, context):
+ try:
+ title = context.args[0]
+ exists = rss_dict.get(title)
+ if exists is None:
+ msg = "Rss link not exists! Nothing removed!"
+ LOGGER.error(msg)
+ sendMessage(msg, context.bot, update.message)
+ else:
+ DbManger().rss_delete(title)
+ with rss_dict_lock:
+ del rss_dict[title]
+ sendMessage(f"Rss link with Title: {title}
has been removed!", context.bot, update.message)
+ LOGGER.info(f"Rss link with Title: {title} has been removed!")
+ except IndexError:
+ sendMessage(f"Use this format to remove feed url:\n/{BotCommands.RssUnSubCommand} Title", context.bot, update.message)
+
+def rss_settings(update, context):
+ buttons = button_build.ButtonMaker()
+ buttons.sbutton("Unsubscribe All", "rss unsuball")
+ if rss_job.enabled:
+ buttons.sbutton("Pause", "rss pause")
+ else:
+ buttons.sbutton("Start", "rss start")
+ if AUTO_DELETE_MESSAGE_DURATION == -1:
+ buttons.sbutton("Close", f"rss close")
+ button = buttons.build_menu(1)
+ setting = sendMarkup('Rss Settings', context.bot, update.message, button)
+ Thread(target=auto_delete_message, args=(context.bot, update.message, setting)).start()
+
+def rss_set_update(update, context):
+ query = update.callback_query
+ user_id = query.from_user.id
+ msg = query.message
+ data = query.data
+ data = data.split()
+ if not CustomFilters._owner_query(user_id):
+ query.answer(text="You don't have permission to use these buttons!", show_alert=True)
+ elif data[1] == 'unsuball':
+ query.answer()
+ if len(rss_dict) > 0:
+ DbManger().trunc_table('rss')
+ with rss_dict_lock:
+ rss_dict.clear()
+ rss_job.enabled = False
+ editMessage("All Rss Subscriptions have been removed.", msg)
+ LOGGER.info("All Rss Subscriptions have been removed.")
+ else:
+ editMessage("No subscriptions to remove!", msg)
+ elif data[1] == 'pause':
+ query.answer()
+ rss_job.enabled = False
+ editMessage("Rss Paused", msg)
+ LOGGER.info("Rss Paused")
+ elif data[1] == 'start':
+ query.answer()
+ rss_job.enabled = True
+ editMessage("Rss Started", msg)
+ LOGGER.info("Rss Started")
+ else:
+ query.answer()
+ try:
+ query.message.delete()
+ query.message.reply_to_message.delete()
+ except:
+ pass
+
+def rss_monitor(context):
+ with rss_dict_lock:
+ if len(rss_dict) == 0:
+ rss_job.enabled = False
+ return
+ rss_saver = rss_dict
+ for name, data in rss_saver.items():
+ try:
+ rss_d = feedparse(data[0])
+ last_link = rss_d.entries[0]['link']
+ last_title = rss_d.entries[0]['title']
+ if data[1] == last_link or data[2] == last_title:
+ continue
+ feed_count = 0
+ while True:
+ try:
+ if data[1] == rss_d.entries[feed_count]['link'] or data[2] == rss_d.entries[feed_count]['title']:
+ break
+ except IndexError:
+ LOGGER.warning(f"Reached Max index no. {feed_count} for this feed: {name}. \
+ Maybe you need to add less RSS_DELAY to not miss some torrents")
+ break
+ parse = True
+ for list in data[3]:
+ if not any(x in str(rss_d.entries[feed_count]['title']).lower() for x in list):
+ parse = False
+ feed_count += 1
+ break
+ if not parse:
+ continue
+ try:
+ url = rss_d.entries[feed_count]['links'][1]['href']
+ except IndexError:
+ url = rss_d.entries[feed_count]['link']
+ if RSS_COMMAND is not None:
+ feed_msg = f"{RSS_COMMAND} {url}"
+ else:
+ feed_msg = f"Name: {rss_d.entries[feed_count]['title'].replace('>', '').replace('<', '')}
\n\n"
+ feed_msg += f"Link: {url}
"
+ sendRss(feed_msg, context.bot)
+ feed_count += 1
+ sleep(5)
+ DbManger().rss_update(name, str(last_link), str(last_title))
+ with rss_dict_lock:
+ rss_dict[name] = [data[0], str(last_link), str(last_title), data[3]]
+ LOGGER.info(f"Feed Name: {name}")
+ LOGGER.info(f"Last item: {last_link}")
+ except Exception as e:
+ LOGGER.error(f"{e} Feed Name: {name} - Feed Link: {data[0]}")
+ continue
+
+if DB_URI is not None and RSS_CHAT_ID is not None:
+ rss_list_handler = CommandHandler(BotCommands.RssListCommand, rss_list, filters=CustomFilters.owner_filter | CustomFilters.sudo_user, run_async=True)
+ rss_get_handler = CommandHandler(BotCommands.RssGetCommand, rss_get, filters=CustomFilters.owner_filter | CustomFilters.sudo_user, run_async=True)
+ rss_sub_handler = CommandHandler(BotCommands.RssSubCommand, rss_sub, filters=CustomFilters.owner_filter | CustomFilters.sudo_user, run_async=True)
+ rss_unsub_handler = CommandHandler(BotCommands.RssUnSubCommand, rss_unsub, filters=CustomFilters.owner_filter | CustomFilters.sudo_user, run_async=True)
+ rss_settings_handler = CommandHandler(BotCommands.RssSettingsCommand, rss_settings, filters=CustomFilters.owner_filter | CustomFilters.sudo_user, run_async=True)
+ rss_buttons_handler = CallbackQueryHandler(rss_set_update, pattern="rss", run_async=True)
+
+ dispatcher.add_handler(rss_list_handler)
+ dispatcher.add_handler(rss_get_handler)
+ dispatcher.add_handler(rss_sub_handler)
+ dispatcher.add_handler(rss_unsub_handler)
+ dispatcher.add_handler(rss_settings_handler)
+ dispatcher.add_handler(rss_buttons_handler)
+ rss_job = job_queue.run_repeating(rss_monitor, interval=RSS_DELAY, first=20, name="RSS")
+ rss_job.enabled = True
\ No newline at end of file
diff --git a/bot/modules/search.py b/bot/modules/search.py
new file mode 100644
index 0000000..99118a3
--- /dev/null
+++ b/bot/modules/search.py
@@ -0,0 +1,392 @@
+from requests import get as rget
+from time import sleep
+from threading import Thread
+from html import escape
+from urllib.parse import quote
+from telegram.ext import CommandHandler, CallbackQueryHandler
+
+from bot import dispatcher, LOGGER, SEARCH_API_LINK, SEARCH_PLUGINS, get_client, SEARCH_LIMIT, TELEGRAPH_STYLE, TITLE_NAME
+from bot.helper.ext_utils.telegraph_helper import telegraph
+from bot.helper.telegram_helper.message_utils import editMessage, sendMessage, sendMarkup, sendFile, deleteMessage
+from bot.helper.telegram_helper.filters import CustomFilters
+from bot.helper.telegram_helper.bot_commands import BotCommands
+from bot.helper.ext_utils.bot_utils import get_readable_file_size
+from bot.helper.telegram_helper import button_build
+from bot.helper.ext_utils.html_helper import html_template
+
+if SEARCH_PLUGINS is not None:
+ PLUGINS = []
+ qbclient = get_client()
+ qb_plugins = qbclient.search_plugins()
+ if qb_plugins:
+ for plugin in qb_plugins:
+ qbclient.search_uninstall_plugin(names=plugin['name'])
+ qbclient.search_install_plugin(SEARCH_PLUGINS)
+ qbclient.auth_log_out()
+
+SITES = {
+ "1337x": "1337x",
+ "yts": "YTS",
+ "tgx": "TorrentGalaxy",
+ "torlock": "Torlock",
+ "piratebay": "PirateBay",
+ "nyaasi": "NyaaSi",
+ "zooqle": "Zooqle",
+ "kickass": "KickAss",
+ "bitsearch": "Bitsearch",
+ "glodls": "Glodls",
+ "magnetdl": "MagnetDL",
+ "limetorrent": "LimeTorrent",
+ "torrentfunk": "TorrentFunk",
+ "torrentproject": "TorrentProject",
+ "libgen": "Libgen",
+ "ybt": "YourBittorrent",
+ "all": "All"
+}
+
+TELEGRAPH_LIMIT = 300
+
+if SEARCH_API_LINK:
+ try:
+ res = rget(f'{SEARCH_API_LINK}/api/v1/sites').json()
+ SITES = {str(site): str(site).capitalize() for site in res['supported_sites']}
+ SITES['all'] = 'All'
+ except Exception as e:
+ LOGGER.error("Can't fetching sites from SEARCH_API_LINK make sure use latest version of API")
+ SITES = None
+else:
+ SITES = None
+
+def torser(update, context):
+ user_id = update.message.from_user.id
+ buttons = button_build.ButtonMaker()
+ if SITES is None and SEARCH_PLUGINS is None:
+ sendMessage("No API link or search PLUGINS added for this function", context.bot, update.message)
+ elif len(context.args) == 0 and SITES is None:
+ sendMessage("Send a search key along with command", context.bot, update.message)
+ elif len(context.args) == 0:
+ buttons.sbutton('Trending', f"torser {user_id} apitrend")
+ buttons.sbutton('Recent', f"torser {user_id} apirecent")
+ buttons.sbutton("Cancel", f"torser {user_id} cancel")
+ button = buttons.build_menu(2)
+ sendMarkup("Send a search key along with command", context.bot, update.message, button)
+ elif SITES is not None and SEARCH_PLUGINS is not None:
+ buttons.sbutton('Api', f"torser {user_id} apisearch")
+ buttons.sbutton('Plugins', f"torser {user_id} plugin")
+ buttons.sbutton("Cancel", f"torser {user_id} cancel")
+ button = buttons.build_menu(2)
+ sendMarkup('Choose tool to search:', context.bot, update.message, button)
+ elif SITES is not None:
+ button = _api_buttons(user_id, "apisearch")
+ sendMarkup('Choose site to search:', context.bot, update.message, button)
+ else:
+ button = _plugin_buttons(user_id)
+ sendMarkup('Choose site to search:', context.bot, update.message, button)
+
+def torserbut(update, context):
+ query = update.callback_query
+ user_id = query.from_user.id
+ message = query.message
+ key = message.reply_to_message.text.split(maxsplit=1)
+ key = key[1].strip() if len(key) > 1 else None
+ data = query.data
+ data = data.split()
+ if user_id != int(data[1]):
+ query.answer(text="Not Yours!", show_alert=True)
+ elif data[2].startswith('api'):
+ query.answer()
+ button = _api_buttons(user_id, data[2])
+ editMessage('Choose site:', message, button)
+ elif data[2] == 'plugin':
+ query.answer()
+ button = _plugin_buttons(user_id)
+ editMessage('Choose site:', message, button)
+ elif data[2] != "cancel":
+ query.answer()
+ site = data[2]
+ method = data[3]
+ if method.startswith('api'):
+ if key is None:
+ if method == 'apirecent':
+ endpoint = 'Recent'
+ elif method == 'apitrend':
+ endpoint = 'Trending'
+ editMessage(f"Listing {endpoint} Items...\nTorrent Site:- {SITES.get(site)}", message)
+ else:
+ editMessage(f"Searching for {key}\nTorrent Site:- {SITES.get(site)}", message)
+ else:
+ editMessage(f"Searching for {key}\nTorrent Site:- {site.capitalize()}", message)
+ Thread(target=_search, args=(context.bot, key, site, message, method)).start()
+ else:
+ query.answer()
+ editMessage("Search has been canceled!", message)
+
+def _search(bot, key, site, message, method):
+ if TELEGRAPH_STYLE is True:
+ if method.startswith('api'):
+ if method == 'apisearch':
+ LOGGER.info(f"API Searching: {key} from {site}")
+ if site == 'all':
+ api = f"{SEARCH_API_LINK}/api/v1/all/search?query={key}&limit={SEARCH_LIMIT}"
+ else:
+ api = f"{SEARCH_API_LINK}/api/v1/search?site={site}&query={key}&limit={SEARCH_LIMIT}"
+ elif method == 'apitrend':
+ LOGGER.info(f"API Trending from {site}")
+ if site == 'all':
+ api = f"{SEARCH_API_LINK}/api/v1/all/trending?limit={SEARCH_LIMIT}"
+ else:
+ api = f"{SEARCH_API_LINK}/api/v1/trending?site={site}&limit={SEARCH_LIMIT}"
+ elif method == 'apirecent':
+ LOGGER.info(f"API Recent from {site}")
+ if site == 'all':
+ api = f"{SEARCH_API_LINK}/api/v1/all/recent?limit={SEARCH_LIMIT}"
+ else:
+ api = f"{SEARCH_API_LINK}/api/v1/recent?site={site}&limit={SEARCH_LIMIT}"
+ try:
+ resp = rget(api)
+ search_results = resp.json()
+ if 'error' in search_results or search_results['total'] == 0:
+ return editMessage(f"No result found for {key}\nTorrent Site:- {SITES.get(site)}", message)
+ msg = f"Found {min(search_results['total'], TELEGRAPH_LIMIT)}"
+ if method == 'apitrend':
+ msg += f" trending result(s)\nTorrent Site:- {SITES.get(site)}"
+ elif method == 'apirecent':
+ msg += f" recent result(s)\nTorrent Site:- {SITES.get(site)}"
+ else:
+ msg += f" result(s) for {key}\nTorrent Site:- {SITES.get(site)}"
+ search_results = search_results['data']
+ except Exception as e:
+ return editMessage(str(e), message)
+ else:
+ LOGGER.info(f"PLUGINS Searching: {key} from {site}")
+ client = get_client()
+ search = client.search_start(pattern=str(key), plugins=str(site), category='all')
+ search_id = search.id
+ while True:
+ result_status = client.search_status(search_id=search_id)
+ status = result_status[0].status
+ if status != 'Running':
+ break
+ dict_search_results = client.search_results(search_id=search_id)
+ search_results = dict_search_results.results
+ total_results = dict_search_results.total
+ if total_results == 0:
+ return editMessage(f"No result found for {key}\nTorrent Site:- {site.capitalize()}", message)
+ msg = f"Found {min(total_results, TELEGRAPH_LIMIT)}"
+ msg += f" result(s) for {key}\nTorrent Site:- {site.capitalize()}"
+ link = _getResult(search_results, key, message, method)
+ buttons = button_build.ButtonMaker()
+ buttons.buildbutton("🔎 VIEW", link)
+ button = buttons.build_menu(1)
+ editMessage(msg, message, button)
+ if not method.startswith('api'):
+ client.search_delete(search_id=search_id)
+
+
+ else:
+
+
+ if method.startswith('api'):
+ if method == 'apisearch':
+ LOGGER.info(f"API Searching: {key} from {site}")
+ if site == 'all':
+ api = f"{SEARCH_API_LINK}/api/v1/all/search?query={key}&limit={SEARCH_LIMIT}"
+ else:
+ api = f"{SEARCH_API_LINK}/api/v1/search?site={site}&query={key}&limit={SEARCH_LIMIT}"
+ elif method == 'apitrend':
+ LOGGER.info(f"API Trending from {site}")
+ if site == 'all':
+ api = f"{SEARCH_API_LINK}/api/v1/all/trending?limit={SEARCH_LIMIT}"
+ else:
+ api = f"{SEARCH_API_LINK}/api/v1/trending?site={site}&limit={SEARCH_LIMIT}"
+ elif method == 'apirecent':
+ LOGGER.info(f"API Recent from {site}")
+ if site == 'all':
+ api = f"{SEARCH_API_LINK}/api/v1/all/recent?limit={SEARCH_LIMIT}"
+ else:
+ api = f"{SEARCH_API_LINK}/api/v1/recent?site={site}&limit={SEARCH_LIMIT}"
+ try:
+ resp = rget(api)
+ search_results = resp.json()
+ if 'error' in search_results or search_results['total'] == 0:
+ return editMessage(f"No result found for {key}\nTorrent Site:- {SITES.get(site)}", message)
+ cap = f"Found {search_results['total']}"
+ if method == 'apitrend':
+ cap += f" trending results\nTorrent Site:- {SITES.get(site)}"
+ elif method == 'apirecent':
+ cap += f" recent results\nTorrent Site:- {SITES.get(site)}"
+ else:
+ cap += f" results for {key}\nTorrent Site:- {SITES.get(site)}"
+ search_results = search_results['data']
+ except Exception as e:
+ return editMessage(str(e), message)
+ else:
+ LOGGER.info(f"PLUGINS Searching: {key} from {site}")
+ client = get_client()
+ search = client.search_start(pattern=key, plugins=site, category='all')
+ search_id = search.id
+ while True:
+ result_status = client.search_status(search_id=search_id)
+ status = result_status[0].status
+ if status != 'Running':
+ break
+ dict_search_results = client.search_results(search_id=search_id)
+ search_results = dict_search_results.results
+ total_results = dict_search_results.total
+ if total_results == 0:
+ return editMessage(f"No result found for {key}\nTorrent Site:- {site.capitalize()}", message)
+ cap = f"Found {total_results}"
+ cap += f" results for {key}\nTorrent Site:- {site.capitalize()}"
+ hmsg = _getResult(search_results, key, message, method)
+ name = f"{method}_{key}_{site}_{message.message_id}.html"
+ with open(name, "w", encoding='utf-8') as f:
+ f.write(html_template.replace('{msg}', hmsg).replace('{title}', f'{method}_{key}_{site}'))
+ deleteMessage(bot, message)
+ sendFile(bot, message.reply_to_message, name, cap)
+ if not method.startswith('api'):
+ client.search_delete(search_id=search_id)
+
+def _getResult(search_results, key, message, method):
+ if TELEGRAPH_STYLE is True:
+ telegraph_content = []
+ if method == 'apirecent':
+ msg = "{escape(result['name'])}
{result['server']['name']}
+Country: {result['server']['country']}, {result['server']['cc']}
+Sponsor: {result['server']['sponsor']}
+ISP: {result['client']['isp']}
+
+SpeedTest Results
+Upload: {speed_convert(result['upload'], False)}
+Download: {speed_convert(result['download'], False)}
+Ping: {result['ping']} ms
+ISP Rating: {result['client']['isprating']}
+Bot Uptime: {currentTime}
+'''
+ try:
+ pho = sendPhoto(text=string_speed, bot=context.bot, message=update.message, photo=path)
+ deleteMessage(context.bot, speed)
+ Thread(target=auto_delete_message, args=(context.bot, update.message, pho)).start()
+ except Exception as g:
+ logging.error(str(g))
+ editMessage(string_speed, speed)
+ Thread(target=auto_delete_message, args=(context.bot, update.message, speed)).start()
+
+def speed_convert(size, byte=True):
+ """Hi human, you can't read bytes?"""
+ if not byte: size = size / 8 # byte or bit ?
+ power = 2 ** 10
+ zero = 0
+ units = {0: "B/s", 1: "KB/s", 2: "MB/s", 3: "GB/s", 4: "TB/s"}
+ while size > power:
+ size /= power
+ zero += 1
+ return f"{round(size, 2)} {units[zero]}"
+
+
+speed_handler = CommandHandler(BotCommands.SpeedCommand, speedtest,
+ CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+
+dispatcher.add_handler(speed_handler)
diff --git a/bot/modules/wayback.py b/bot/modules/wayback.py
new file mode 100644
index 0000000..732f012
--- /dev/null
+++ b/bot/modules/wayback.py
@@ -0,0 +1,81 @@
+from telegram import Message
+import waybackpy, re, random
+from telegram.ext import CommandHandler
+from bot import LOGGER, dispatcher, WAYBACK_ENABLED
+from bot.helper.ext_utils.shortenurl import short_url
+from bot.helper.telegram_helper.filters import CustomFilters
+from bot.helper.telegram_helper.bot_commands import BotCommands
+from bot.helper.telegram_helper.message_utils import editMessage, sendMessage
+
+
+def wayback(update, context):
+ message:Message = update.effective_message
+ link = None
+ if message.reply_to_message: link = message.reply_to_message.text
+ else:
+ link = message.text.split(' ', 1)
+ if len(link) != 2:
+ help_msg = "Send link after command:"
+ help_msg += f"\n/{BotCommands.WayBackCommand}" + " {link}" + "
"
+ help_msg += "\nBy replying to message (including link):"
+ help_msg += f"\n/{BotCommands.WayBackCommand}" + " {message}" + "
"
+ return sendMessage(help_msg, context.bot, update.message)
+ link = link[1]
+ try: link = re.match(r"((http|https)\:\/\/)?[a-zA-Z0-9\.\/\?\:@\-_=#]+\.([a-zA-Z]){2,6}([a-zA-Z0-9\.\&\/\?\:@\-_=#])*", link)[0]
+ except TypeError: return sendMessage('Not a valid link for wayback.', context.bot, update)
+ sent = sendMessage('Running WayBack. Wait about 20 secs.', context.bot, update.message)
+ retLink = saveWebPage(link)
+ if not retLink: return editMessage('Cannot archieved. Try again later.', sent)
+ editMessage(f'Saved webpage: {short_url(retLink)}', sent)
+
+
+def saveWebPage(pageurl:str):
+ LOGGER.info("wayback running for: " + pageurl)
+ user_agent = getRandomUserAgent()
+ try:
+ wayback = waybackpy.Url(pageurl, user_agent)
+ archive = wayback.save()
+ LOGGER.info("wayback success for: " + pageurl)
+ return archive.archive_url
+ except Exception as r:
+ LOGGER.error("wayback unsuccess for: " + pageurl + " , " + str(r))
+ return None
+
+
+def getRandomUserAgent():
+ agents = [
+ "Mozilla/5.0 (Windows NT 6.0; WOW64) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.699.0 Safari/534.24",
+ "Mozilla/5.0 (Windows NT 6.0; WOW64) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/13.0.782.220 Safari/535.1",
+ "Mozilla/5.0 (Windows NT 6.0; WOW64) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/13.0.782.41 Safari/535.1",
+ "Mozilla/5.0 (Windows NT 6.0; WOW64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11",
+ "Mozilla/5.0 (X11; CrOS i686 0.13.507) AppleWebKit/534.35 (KHTML, like Gecko) Chrome/13.0.763.0 Safari/534.35",
+ "Mozilla/5.0 (X11; CrOS i686 0.13.587) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/13.0.782.14 Safari/535.1",
+ "Mozilla/5.0 (X11; CrOS i686 1193.158.0) AppleWebKit/535.7 (KHTML, like Gecko) Chrome/16.0.912.75 Safari/535.7",
+ "Mozilla/5.0 (X11; CrOS i686 12.0.742.91) AppleWebKit/534.30 (KHTML, like Gecko) Chrome/12.0.742.93 Safari/534.30",
+ "Mozilla/5.0 (X11; CrOS i686 12.433.109) AppleWebKit/534.30 (KHTML, like Gecko) Chrome/12.0.742.93 Safari/534.30",
+ "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.34 Safari/534.24",
+ "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/534.24 (KHTML, like Gecko) Ubuntu/10.04 Chromium/11.0.696.0 Chrome/11.0.696.0 Safari/534.24",
+ "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/534.24 (KHTML, like Gecko) Ubuntu/10.10 Chromium/12.0.703.0 Chrome/12.0.703.0 Safari/534.24",
+ "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.21 (KHTML, like Gecko) Chrome/19.0.1042.0 Safari/535.21",
+ "Opera/9.80 (Windows NT 5.1; U; sk) Presto/2.5.22 Version/10.50",
+ "Opera/9.80 (Windows NT 5.1; U; zh-sg) Presto/2.9.181 Version/12.00",
+ "Opera/9.80 (Windows NT 5.1; U; zh-tw) Presto/2.8.131 Version/11.10",
+ "Opera/9.80 (Windows NT 5.1; U;) Presto/2.7.62 Version/11.01",
+ "Opera/9.80 (Windows NT 5.2; U; en) Presto/2.6.30 Version/10.63",
+ "Opera/9.80 (Windows NT 5.2; U; ru) Presto/2.5.22 Version/10.51",
+ "Opera/9.80 (Windows NT 5.2; U; ru) Presto/2.6.30 Version/10.61",
+ "Opera/9.80 (Windows NT 5.2; U; ru) Presto/2.7.62 Version/11.01",
+ "Opera/9.80 (X11; Linux x86_64; U; pl) Presto/2.7.62 Version/11.00",
+ "Opera/9.80 (X11; Linux x86_64; U; Ubuntu/10.10 (maverick); pl) Presto/2.7.62 Version/11.01",
+ "Opera/9.80 (X11; U; Linux i686; en-US; rv:1.9.2.3) Presto/2.2.15 Version/10.10",
+ "Mozilla/5.0 (Linux; Android 10; SM-G975F) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.117 Mobile Safari/537.36"
+ ]
+ return agents[random.randint(0, len(agents)-1)]
+
+if WAYBACK_ENABLED:
+ wayback_handler = CommandHandler(BotCommands.WayBackCommand, wayback,
+ filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+else:
+ wayback_handler = CommandHandler(BotCommands.WayBackCommand, wayback,
+ filters=CustomFilters.owner_filter | CustomFilters.authorized_user, run_async=True)
+dispatcher.add_handler(wayback_handler)
diff --git a/bot/modules/ytdlp.py b/bot/modules/ytdlp.py
new file mode 100644
index 0000000..1e9f057
--- /dev/null
+++ b/bot/modules/ytdlp.py
@@ -0,0 +1,357 @@
+from threading import Thread
+from pyrogram import enums
+from telegram.ext import CommandHandler, CallbackQueryHandler
+from pyrogram import enums
+from time import sleep
+from re import split as re_split
+
+from bot import *
+from bot.helper.telegram_helper.message_utils import sendMessage, sendMarkup, editMessage, auto_delete_upload_message, auto_delete_message
+from bot.helper.ext_utils.bot_utils import get_readable_file_size, is_url, get_user_task
+from bot.helper.mirror_utils.download_utils.yt_dlp_download_helper import YoutubeDLHelper
+from bot.helper.telegram_helper.bot_commands import BotCommands
+from bot.helper.telegram_helper.filters import CustomFilters
+from bot.helper.telegram_helper.button_build import ButtonMaker
+from telegram import ParseMode, InlineKeyboardButton
+from .listener import MirrorLeechListener
+
+listener_dict = {}
+
+def _ytdl(bot, message, isZip=False, isLeech=False):
+ mssg = message.text
+ user_id = message.from_user.id
+ msg_id = message.message_id
+ multi = 0
+ buttons = ButtonMaker()
+ if FSUB:
+ try:
+ user = bot.get_chat_member(f"{FSUB_CHANNEL_ID}", message.from_user.id)
+ LOGGER.info(user.status)
+ if user.status not in ("member", "creator", "administrator", "supergroup"):
+ if message.from_user.username:
+ uname = f'{message.from_user.username}'
+ else:
+ uname = f'{message.from_user.first_name}'
+ buttons = ButtonMaker()
+ chat_u = CHANNEL_USERNAME.replace("@", "")
+ buttons.buildbutton("👉🏻 CHANNEL LINK 👈🏻", f"https://t.me/{chat_u}")
+ help_msg = f"Dᴇᴀʀ {uname},\nYᴏᴜ ɴᴇᴇᴅ ᴛᴏ ᴊᴏɪɴ ᴍʏ Cʜᴀɴɴᴇʟ ᴛᴏ ᴜsᴇ Bᴏᴛ \n\nCʟɪᴄᴋ ᴏɴ ᴛʜᴇ ʙᴇʟᴏᴡ Bᴜᴛᴛᴏɴ ᴛᴏ ᴊᴏɪɴ ᴍʏ Cʜᴀɴɴᴇʟ."
+ reply_message = sendMarkup(help_msg, bot, message, buttons.build_menu(2))
+ Thread(target=auto_delete_message, args=(bot, message, reply_message)).start()
+ return reply_message
+ except Exception:
+ pass
+ if BOT_PM and message.chat.type != 'private':
+ try:
+ msg1 = f'Added your Requested link to Download\n'
+ send = bot.sendMessage(message.from_user.id, text=msg1)
+ send.delete()
+ except Exception as e:
+ LOGGER.warning(e)
+ bot_d = bot.get_me()
+ b_uname = bot_d.username
+ uname = f'{message.from_user.first_name}'
+ botstart = f"http://t.me/{b_uname}"
+ buttons.buildbutton("Click Here to Start Me", f"{botstart}")
+ startwarn = f"Dear {uname},\n\nI found that you haven't started me in PM (Private Chat) yet.\n\n" \
+ f"From now on i will give link and leeched files in PM and log channel only"
+ reply_message = sendMarkup(startwarn, bot, message, buttons.build_menu(2))
+ Thread(target=auto_delete_message, args=(bot, message, reply_message)).start()
+ return reply_message
+
+ total_task = len(download_dict)
+ user_id = message.from_user.id
+ if user_id != OWNER_ID and user_id not in SUDO_USERS:
+ if TOTAL_TASKS_LIMIT == total_task:
+ return sendMessage(f"Bᴏᴛ Tᴏᴛᴀʟ Tᴀsᴋ Lɪᴍɪᴛ : {TOTAL_TASKS_LIMIT}\nTᴀsᴋs Pʀᴏᴄᴇssɪɴɢ : {total_task}\n#total limit exceed ", bot ,message)
+ if USER_TASKS_LIMIT == get_user_task(user_id):
+ return sendMessage(f"Bᴏᴛ Usᴇʀ Tᴀsᴋ Lɪᴍɪᴛ : {USER_TASKS_LIMIT} \nYᴏᴜʀ Tᴀsᴋs : {get_user_task(user_id)}\n#user limit exceed", bot ,message)
+
+
+ link = mssg.split()
+ if len(link) > 1:
+ link = link[1].strip()
+ if link.strip().isdigit():
+ multi = int(link)
+ link = ''
+ elif link.strip().startswith(("|", "pswd:", "opt:")):
+ link = ''
+ else:
+ link = ''
+
+ name = mssg.split('|', maxsplit=1)
+ if len(name) > 1:
+ if 'opt: ' in name[0] or 'pswd: ' in name[0]:
+ name = ''
+ else:
+ name = name[1]
+ if name != '':
+ name = re_split('pswd:|opt:', name)[0]
+ name = name.strip()
+ else:
+ name = ''
+
+ pswd = mssg.split(' pswd: ')
+ if len(pswd) > 1:
+ pswd = pswd[1]
+ pswd = pswd.split(' opt: ')[0]
+ else:
+ pswd = None
+
+ opt = mssg.split(' opt: ')
+ if len(opt) > 1:
+ opt = opt[1]
+ else:
+ opt = None
+
+ if message.from_user.username:
+ tag = f"@{message.from_user.username}"
+ else:
+ tag = message.from_user.mention_html(message.from_user.first_name)
+
+ reply_to = message.reply_to_message
+ if reply_to is not None:
+ if len(link) == 0:
+ link = reply_to.text.split(maxsplit=1)[0].strip()
+ if reply_to.from_user.username:
+ tag = f"@{reply_to.from_user.username}"
+ else:
+ tag = reply_to.from_user.mention_html(reply_to.from_user.first_name)
+
+ if not is_url(link):
+ help_msg = "Send link along with command line:"
+ help_msg += "\n/cmd
link |newname pswd: xx(zip) opt: x:y|x1:y1"
+ help_msg += "\n\nBy replying to link:"
+ help_msg += "\n/cmd
|newname pswd: xx(zip) opt: x:y|x1:y1"
+ help_msg += "\n\nOptions Example: opt: playliststart:^10|matchtitle:S13|writesubtitles:true"
+ help_msg += "\n\nMulti links only by replying to first link:"
+ help_msg += "\n/cmd
10(number of links). Number should be always before |newname, pswd: and opt:"
+ help_msg += "\n\nNOTE: Add `^` before integer, some values must be integer and some string."
+ help_msg += " Like playlist_items:10 works with string, so no need to add `^` before the number"
+ help_msg += " but playlistend works only with integer so you must add `^` before the number like example above."
+ help_msg += "\n\nCheck all arguments from this FILE."
+ reply_message = sendMessage(help_msg, bot, message)
+ Thread(target=auto_delete_message, args=(bot, message, reply_message)).start()
+ return reply_message
+
+ listener = MirrorLeechListener(bot, message, isZip, isLeech=isLeech, pswd=pswd, tag=tag)
+ buttons = ButtonMaker()
+ best_video = "bv*+ba/b"
+ best_audio = "ba/b"
+ ydl = YoutubeDLHelper(listener)
+ try:
+ result = ydl.extractMetaData(link, name, opt, True)
+ except Exception as e:
+ msg = str(e).replace('<', ' ').replace('>', ' ')
+ return sendMessage(tag + " " + msg, bot, message)
+ formats_dict = {}
+ if 'entries' in result:
+ for i in ['144', '240', '360', '480', '720', '1080', '1440', '2160']:
+ video_format = f"bv*[height<={i}][ext=mp4]+ba[ext=m4a]/b[height<={i}]"
+ b_data = f"{i}|mp4"
+ formats_dict[b_data] = video_format
+ buttons.sbutton(f"{i}-mp4", f"qu {msg_id} {b_data} t")
+ video_format = f"bv*[height<={i}][ext=webm]+ba/b[height<={i}]"
+ b_data = f"{i}|webm"
+ formats_dict[b_data] = video_format
+ buttons.sbutton(f"{i}-webm", f"qu {msg_id} {b_data} t")
+ buttons.sbutton("MP3", f"qu {msg_id} mp3 t")
+ buttons.sbutton("Best Videos", f"qu {msg_id} {best_video} t")
+ buttons.sbutton("Best Audios", f"qu {msg_id} {best_audio} t")
+ buttons.sbutton("Cancel", f"qu {msg_id} cancel")
+ YTBUTTONS = buttons.build_menu(3)
+ listener_dict[msg_id] = [listener, user_id, link, name, YTBUTTONS, opt, formats_dict]
+ bmsg = sendMarkup('Choose Playlist Videos Quality:', bot, message, YTBUTTONS)
+ else:
+ formats = result.get('formats')
+ formats_dict = {}
+ if formats is not None:
+ for frmt in formats:
+ if frmt.get('tbr'):
+
+ format_id = frmt['format_id']
+
+ if frmt.get('filesize'):
+ size = frmt['filesize']
+ elif frmt.get('filesize_approx'):
+ size = frmt['filesize_approx']
+ else:
+ size = 0
+
+ if frmt.get('height'):
+ height = frmt['height']
+ ext = frmt['ext']
+ fps = frmt['fps'] if frmt.get('fps') else ''
+ b_name = f"{height}p{fps}-{ext}"
+ if ext == 'mp4':
+ v_format = f"bv*[format_id={format_id}]+ba[ext=m4a]/b[height={height}]"
+ else:
+ v_format = f"bv*[format_id={format_id}]+ba/b[height={height}]"
+ elif frmt.get('video_ext') == 'none' and frmt.get('acodec') != 'none':
+ b_name = f"{frmt['acodec']}-{frmt['ext']}"
+ v_format = f"ba[format_id={format_id}]"
+ else:
+ continue
+
+ if b_name in formats_dict:
+ formats_dict[b_name][str(frmt['tbr'])] = [size, v_format]
+ else:
+ subformat = {}
+ subformat[str(frmt['tbr'])] = [size, v_format]
+ formats_dict[b_name] = subformat
+
+ for b_name, d_dict in formats_dict.items():
+ if len(d_dict) == 1:
+ tbr, v_list = list(d_dict.items())[0]
+ buttonName = f"{b_name} ({get_readable_file_size(v_list[0])})"
+ buttons.sbutton(buttonName, f"qu {msg_id} {b_name}|{tbr}")
+ else:
+ buttons.sbutton(b_name, f"qu {msg_id} dict {b_name}")
+ buttons.sbutton("MP3", f"qu {msg_id} mp3")
+ buttons.sbutton("Best Video", f"qu {msg_id} {best_video}")
+ buttons.sbutton("Best Audio", f"qu {msg_id} {best_audio}")
+ buttons.sbutton("Cancel", f"qu {msg_id} cancel")
+ YTBUTTONS = buttons.build_menu(2)
+ listener_dict[msg_id] = [listener, user_id, link, name, YTBUTTONS, opt, formats_dict]
+ bmsg = sendMarkup('Choose Video Quality:', bot, message, YTBUTTONS)
+
+ Thread(target=_auto_cancel, args=(bmsg, msg_id)).start()
+ if multi > 1:
+ sleep(4)
+ nextmsg = type('nextmsg', (object, ), {'chat_id': message.chat_id, 'message_id': message.reply_to_message.message_id + 1})
+ ymsg = message.text.split(maxsplit=2)
+ ymsg[1] = f"{multi - 1}"
+ nextmsg = sendMessage(" ".join(ymsg), bot, nextmsg)
+ nextmsg.from_user.id = message.from_user.id
+ sleep(4)
+ Thread(target=_ytdl, args=(bot, nextmsg, isZip, isLeech)).start()
+
+def _qual_subbuttons(task_id, b_name, msg):
+ buttons = ButtonMaker()
+ task_info = listener_dict[task_id]
+ formats_dict = task_info[6]
+ for tbr, d_data in formats_dict[b_name].items():
+ buttonName = f"{tbr}K ({get_readable_file_size(d_data[0])})"
+ buttons.sbutton(buttonName, f"qu {task_id} {b_name}|{tbr}")
+ buttons.sbutton("Back", f"qu {task_id} back")
+ buttons.sbutton("Cancel", f"qu {task_id} cancel")
+ SUBBUTTONS = buttons.build_menu(2)
+ editMessage(f"Choose Bit rate for {b_name}:", msg, SUBBUTTONS)
+
+def _mp3_subbuttons(task_id, msg, playlist=False):
+ buttons = ButtonMaker()
+ audio_qualities = [64, 128, 320]
+ for q in audio_qualities:
+ if playlist:
+ i = 's'
+ audio_format = f"ba/b-{q} t"
+ else:
+ i = ''
+ audio_format = f"ba/b-{q}"
+ buttons.sbutton(f"{q}K-mp3", f"qu {task_id} {audio_format}")
+ buttons.sbutton("Back", f"qu {task_id} back")
+ buttons.sbutton("Cancel", f"qu {task_id} cancel")
+ SUBBUTTONS = buttons.build_menu(2)
+ editMessage(f"Choose Audio{i} Bitrate:", msg, SUBBUTTONS)
+
+def select_format(update, context):
+ query = update.callback_query
+ user_id = query.from_user.id
+ data = query.data
+ msg = query.message
+ data = data.split(" ")
+ task_id = int(data[1])
+ try:
+ task_info = listener_dict[task_id]
+ except:
+ return editMessage("This is an old task", msg)
+ uid = task_info[1]
+ if user_id != uid and not CustomFilters._owner_query(user_id):
+ return query.answer(text="This task is not for you!", show_alert=True)
+ elif data[2] == "dict":
+ query.answer()
+ b_name = data[3]
+ _qual_subbuttons(task_id, b_name, msg)
+ return
+ elif data[2] == "back":
+ query.answer()
+ return editMessage('Choose Video Quality:', msg, task_info[4])
+ elif data[2] == "mp3":
+ query.answer()
+ if len(data) == 4:
+ playlist = True
+ else:
+ playlist = False
+ _mp3_subbuttons(task_id, msg, playlist)
+ return
+ elif data[2] == "cancel":
+ query.answer()
+ editMessage('Task has been cancelled.', msg)
+ else:
+ query.answer()
+ listener = task_info[0]
+ link = task_info[2]
+ name = task_info[3]
+ opt = task_info[5]
+ qual = data[2]
+ if len(data) == 4:
+ playlist = True
+ if '|' in qual:
+ qual = task_info[6][qual]
+ else:
+ playlist = False
+ if '|' in qual:
+ b_name, tbr = qual.split('|')
+ qual = task_info[6][b_name][tbr][1]
+ ydl = YoutubeDLHelper(listener)
+ Thread(target=ydl.add_download, args=(link, f'{DOWNLOAD_DIR}{task_id}', name, qual, playlist, opt)).start()
+ query.message.delete()
+ del listener_dict[task_id]
+
+def _auto_cancel(msg, msg_id):
+ sleep(120)
+ try:
+ del listener_dict[msg_id]
+ editMessage('Timed out! Task has been cancelled.', msg)
+ except:
+ pass
+
+def ytdl(update, context):
+ _ytdl(context.bot, update.message)
+
+def ytdlZip(update, context):
+ _ytdl(context.bot, update.message, True)
+
+def ytdlleech(update, context):
+ _ytdl(context.bot, update.message, isLeech=True)
+
+def ytdlZipleech(update, context):
+ _ytdl(context.bot, update.message, True, True)
+
+if WATCH_ENABLED:
+ ytdl_handler = CommandHandler(BotCommands.WatchCommand, ytdl,
+ filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+ ytdl_zip_handler = CommandHandler(BotCommands.ZipWatchCommand, ytdlZip,
+ filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+ ytdl_leech_handler = CommandHandler(BotCommands.LeechWatchCommand, ytdlleech,
+ filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+ ytdl_zip_leech_handler = CommandHandler(BotCommands.LeechZipWatchCommand, ytdlZipleech,
+ filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+
+else:
+ ytdl_handler = CommandHandler(BotCommands.WatchCommand, ytdl,
+ filters=CustomFilters.owner_filter | CustomFilters.authorized_user, run_async=True)
+ ytdl_zip_handler = CommandHandler(BotCommands.ZipWatchCommand, ytdlZip,
+ filters=CustomFilters.owner_filter | CustomFilters.authorized_user, run_async=True)
+ ytdl_leech_handler = CommandHandler(BotCommands.LeechWatchCommand, ytdlleech,
+ filters=CustomFilters.owner_filter | CustomFilters.authorized_user, run_async=True)
+ ytdl_zip_leech_handler = CommandHandler(BotCommands.LeechZipWatchCommand, ytdlZipleech,
+ filters=CustomFilters.owner_filter | CustomFilters.authorized_user, run_async=True)
+
+quality_handler = CallbackQueryHandler(select_format, pattern="qu", run_async=True)
+
+dispatcher.add_handler(ytdl_handler)
+dispatcher.add_handler(ytdl_zip_handler)
+dispatcher.add_handler(ytdl_leech_handler)
+dispatcher.add_handler(ytdl_zip_leech_handler)
+dispatcher.add_handler(quality_handler)
\ No newline at end of file
diff --git a/captain-definition b/captain-definition
new file mode 100644
index 0000000..04859b5
--- /dev/null
+++ b/captain-definition
@@ -0,0 +1,4 @@
+{
+ "schemaVersion": 2,
+ "dockerfilePath": "./Dockerfile"
+}
\ No newline at end of file
diff --git a/config_sample.env b/config_sample.env
new file mode 100644
index 0000000..500022c
--- /dev/null
+++ b/config_sample.env
@@ -0,0 +1,247 @@
+# Remove this line before deploying
+_____REMOVE_THIS_LINE_____=True
+
+# REQUIRED CONFIG
+BOT_TOKEN = ""
+GDRIVE_FOLDER_ID = ""
+OWNER_ID =
+DOWNLOAD_DIR = "/usr/src/app/downloads"
+DOWNLOAD_STATUS_UPDATE_INTERVAL = 20
+AUTO_DELETE_MESSAGE_DURATION = 60
+AUTO_DELETE_UPLOAD_MESSAGE_DURATION = -1 #minimum recommended 120
+IS_TEAM_DRIVE = ""
+TELEGRAM_API =
+TELEGRAM_HASH = ""
+
+# OPTIONAL CONFIG
+DATABASE_URL = ""
+AUTHORIZED_CHATS = ""
+SUDO_USERS = ""
+IGNORE_PENDING_REQUESTS = ""
+USE_SERVICE_ACCOUNTS = ""
+INDEX_URL = ""
+STATUS_LIMIT = "2" #recommended value 2
+STOP_DUPLICATE = ""
+CMD_INDEX = ""
+UPTOBOX_TOKEN = ""
+TORRENT_TIMEOUT = ""
+EXTENSION_FILTER = ""
+INCOMPLETE_TASK_NOTIFIER = ""
+
+#TURN ON/OFF FUCTIONS
+LEECH_ENABLED = "" #Default is False (only use by owner/sudo) Make it True (for authorized chats)
+MIRROR_ENABLED = "" #Default is False (only use by owner/sudo) Make it True (for authorized chats)
+WATCH_ENABLED = "" #Default is False (only use by owner/sudo) Make it True (for authorized chats)
+CLONE_ENABLED = "" #Default is False (only use by owner/sudo) Make it True (for authorized chats)
+ANILIST_ENABLED = "" #Default is False (only use by owner/sudo) Make it True (for authorized chats)
+WAYBACK_ENABLED = "" #Default is False (only use by owner/sudo) Make it True (for authorized chats)
+MEDIAINFO_ENABLED = "" #Default is False (only use by owner/sudo) Make it True (for authorized chats)
+SET_BOT_COMMANDS = ""
+
+# Update
+UPSTREAM_REPO = ""
+UPSTREAM_BRANCH = ""
+
+# Leech Mirror
+BOT_PM = ""
+FORCE_BOT_PM = "" #For this the BOT_PM must be True
+LEECH_LOG =
+LEECH_LOG_URL = ""
+MIRROR_LOGS =
+MIRROR_LOG_URL = ""
+LINK_LOGS =
+LEECH_LOG_INDEXING = "" #Default is False make it True if you want leech file indexing in leech log channel
+TG_SPLIT_SIZE = ""
+AS_DOCUMENT = ""
+EQUAL_SPLITS = ""
+CUSTOM_FILENAME = ""
+TIMEZONE = "Asia/Kolkata"
+
+#Telegraph UI
+TITLE_NAME = ""
+AUTHOR_NAME = ""
+AUTHOR_URL = ""
+GD_INFO = ""
+
+# Force Sub Channel
+FSUB = ""
+CHANNEL_USERNAME = ""
+FSUB_CHANNEL_ID =
+
+# qBittorrent
+BASE_URL_OF_BOT = ""
+SERVER_PORT = ""
+WEB_PINCODE = ""
+QB_SEED = ""
+
+# RSS
+RSS_DELAY = ""
+RSS_COMMAND = ""
+RSS_CHAT_ID = ""
+USER_SESSION_STRING = "" #Use it for 4GB leeching
+
+# Private Files
+ACCOUNTS_ZIP_URL = "" #recommended to use
+TOKEN_PICKLE_URL = "" #recommended to use
+MULTI_SEARCH_URL = ""
+YT_COOKIES_URL = ""
+NETRC_URL = ""
+
+# Mega
+MEGA_API_KEY = ""
+MEGA_EMAIL_ID = ""
+MEGA_PASSWORD = ""
+
+# Shortener
+SHORTENER = ""
+SHORTENER_API = ""
+
+# GDTOT COOKIE
+CRYPT = ""
+
+# UNIFIED LOGIN (AppDrive, DriveApp, GDFlix, DriveBit, DriveLinks, DriveSharer, DriveAce, DrivePro)
+UNIFIED_EMAIL = ""
+UNIFIED_PASS = ""
+
+# HUBDRIVE COOKIES
+HUBDRIVE_CRYPT = ""
+
+# (KATDRIVE + KOLOP + DRIVEHUB) COOKIES
+KATDRIVE_CRYPT = ""
+
+# (DRIVEFIRE + DRIVEBUZZ) COOKIES
+DRIVEFIRE_CRYPT = ""
+
+# Size Limits
+TORRENT_DIRECT_LIMIT = ""
+ZIP_UNZIP_LIMIT = ""
+CLONE_LIMIT = ""
+MEGA_LIMIT = ""
+STORAGE_THRESHOLD = ""
+ACTIVE_TASK_LIMIT = ""
+USER_TASKS_LIMIT = ""
+LEECH_LIMIT = ""
+
+# Progress Strings
+FINISHED_PROGRESS_STR = '●' # '■'
+UN_FINISHED_PROGRESS_STR = '○' # '□'
+
+# THEME
+EMOJI_THEME = "" #Default is False
+SHOW_LIMITS_IN_STATS = "" #Default is false
+
+#VIEW STYLE
+TELEGRAPH_STYLE = "" #Default is False
+
+#BRANDING
+CREDIT_NAME = ""
+
+#DYNAMIC IMAGES
+PICS = "" # Add multiple telgraph image links that are seperated by spaces
+WALLFLARE_SEARCH = "" # Put Keyword to Download Images from wallpaperflare.com, Seperate Multiple Words by + instead of space, eg: attack+on+titan
+WALLTIP_SEARCH = "" # Put Keyword to Download Images from wallpapertip.com, Seperate Multiple Words by - instead of space, eg: attack-on-titan
+WALLCRAFT_CATEGORY = "" # Put Category Name, Available: 3D, abstract, animals, anime, art, black, cars, city, dark, fantasy, flowers, food, holidays, love, macro, minimalism, motorcycles, music, nature, others, milies, space, sport, technologies, textures, vector, words
+PIXABAY_API_KEY = "" # Sign Up Pixabay.com and Get API Key to Automatically Add 200 Images in Photos List
+PIXABAY_CATEGORY = "" # Put Category Name, Available: backgrounds, fashion, nature, science, education, feelings, health, people, religion, places, animals, industry, computer, food, sports, transportation, travel, buildings, business, music
+PIXABAY_SEARCH = "" # Put Keyword to Download Images from Pixabay.com Limit : 100 characters
+
+
+#FONT STYLE
+NAME_FONT = "" # Default Name of File on Upload message, Available Options : b, code, i, u, strike, spoiler
+CAPTION_FONT = "" # Default Caption of Leeched files, Available Options : b, code, i, u, strike, spoiler
+
+# Buttons
+DISABLE_DRIVE_LINK = ""
+VIEW_LINK = ""
+SOURCE_LINK = ""
+START_BTN1_NAME = ""
+START_BTN1_URL = ""
+START_BTN2_NAME = ""
+START_BTN2_URL = ""
+BUTTON_FOUR_NAME = ""
+BUTTON_FOUR_URL = ""
+BUTTON_FIVE_NAME = ""
+BUTTON_FIVE_URL = ""
+BUTTON_SIX_NAME = ""
+BUTTON_SIX_URL = ""
+
+# Torrent Search
+SEARCH_API_LINK = ""
+SEARCH_LIMIT = ""
+SEARCH_PLUGINS = '["https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/rarbg.py",
+ "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/piratebay.py",
+ "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/legittorrents.py",
+ "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/limetorrents.py",
+ "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/torrentscsv.py",
+ "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/zooqle.py",
+ "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/eztv.py",
+ "https://raw.githubusercontent.com/MaurizioRicci/qBittorrent_search_engines/master/kickass_torrent.py",
+ "https://raw.githubusercontent.com/MaurizioRicci/qBittorrent_search_engines/master/yts_am.py",
+ "https://raw.githubusercontent.com/MadeOfMagicAndWires/qBit-plugins/master/engines/linuxtracker.py",
+ "https://raw.githubusercontent.com/MadeOfMagicAndWires/qBit-plugins/master/engines/nyaasi.py",
+ "https://raw.githubusercontent.com/LightDestory/qBittorrent-Search-Plugins/master/src/engines/ettv.py",
+ "https://raw.githubusercontent.com/LightDestory/qBittorrent-Search-Plugins/master/src/engines/glotorrents.py",
+ "https://raw.githubusercontent.com/LightDestory/qBittorrent-Search-Plugins/master/src/engines/thepiratebay.py",
+ "https://raw.githubusercontent.com/nindogo/qbtSearchScripts/master/magnetdl.py",
+ "https://raw.githubusercontent.com/khensolomon/leyts/master/yts.py"]'
+
+
+#Mirror Related Commands
+START_COMMAND = "" #Default is "start"
+MIRROR_COMMAND = "" #Default is "mirror"
+ZIP_COMMAND = "" #Default is "zipmirror"
+UNZIP_COMMAND = "" #Default is "unzipmirror"
+CANCEL_COMMAND = "" #Default is "cancel"
+LIST_COMMAND = "" #Default is "list"
+SEARCH_COMMAND = "" #Default is "search"
+STATUS_COMMAND = "" #Default is "status"
+STATS_COMMAND = "" #Default is "stats"
+HELP_COMMAND = "" #Default is "help"
+CLONE_COMMAND = "" #Default is "clone"
+COUNT_COMMAND = "" #Default is "count"
+WATCH_COMMAND = "" #Default is "watch"
+ZIPWATCH_COMMAND = "" #Default is "zipwatch"
+QBMIRROR_COMMAND = "" #Default is "qbmirror"
+QBZIP_COMMAND = "" #Default is "qbzipmirror"
+QBUNZIP_COMMAND = "" #Default is "qbunzipmirror"
+BTSELECT_COMMAND = "" #Deafault is "btsel"
+
+#Leech Related Commands
+LEECH_COMMAND = "" #Default is "leech"
+LEECHSET_COMMAND = "" #Default is "leechset"
+SETTHUMB_COMMAND = "" #Default is "setthumb"
+UNZIPLEECH_COMMAND = "" #Default is "unzipleech"
+ZIPLEECH_COMMAND = "" #Default is "zipleech"
+QBLEECH_COMMAND = "" #Default is "qbleech"
+QBUNZIPLEECH_COMMAND = "" #Default is "qbunzipleech"
+QBZIPLEECH_COMMAND = "" #Default is "qbzipleech"
+LEECHWATCH_COMMAND = "" #Default is "leechwatch"
+LEECHZIPWATCH_COMMAND = "" #Default is "leechzipwatch"
+
+#SUDO Commands
+AUTH_COMMAND = "" #Default is "authorize"
+UNAUTH_COMMAND = "" #Default is "unauthorize"
+ADDSUDO_COMMAND = "" #Default is "addsudo"
+RMSUDO_COMMAND = "" #Default is "rmsudo"
+RESTART_COMMAND = "" #Default is "restart"
+SPEED_COMMAND = "" #Default is "speedtest"
+USERS_COMMAND = "" #Default is "users"
+CANCEL_ALL_COMMAND = "" #Default is "cancelall
+LOG_COMMAND = "" #Default is "log"
+DELETE_COMMAND = "" #Default is "del"
+USAGE_COMMAND = "" #Default is "usage"
+RSSLIST_COMMAND = ""
+RSSGET_COMMAND = ""
+RSSSUB_COMMAND = ""
+RSSUNSUB_COMMAND = ""
+RSSSET_COMMAND = ""
+SHELL_COMMAND = ""
+EXEHELP_COMMAND = ""
+SLEEP_COMMAND = "" #Deafult is "sleep"
+
+#EXTRA COMMANDS
+PING_COMMAND = "" #Default is "ping"
+SPEEDTEST_COMMAND = "" #Default is "speedtest"
+MEDIAINFO_COMMAND = "" #Default is "mediainfo"
+HASH_COMMAND = "" #Default is "hash"
+WAYBACK_COMMAND = "" #Default is "wayback"
\ No newline at end of file
diff --git a/docker-compose.yml b/docker-compose.yml
new file mode 100644
index 0000000..2eb2cea
--- /dev/null
+++ b/docker-compose.yml
@@ -0,0 +1,9 @@
+version: "3.3"
+
+services:
+ app:
+ build: .
+ command: bash start.sh
+ restart: on-failure
+ ports:
+ - "80:80"
\ No newline at end of file
diff --git a/driveid.py b/driveid.py
new file mode 100644
index 0000000..c41e6d5
--- /dev/null
+++ b/driveid.py
@@ -0,0 +1,47 @@
+import os
+import re
+print("\n\n"\
+ " Bot can search files recursively, but you have to add the list of drives you want to search.\n"\
+ " Use the following format: (You can use 'root' in the ID in case you wan to use main drive.)\n"\
+ " teamdrive NAME --> anything that you likes\n"\
+ " teamdrive ID --> id of teamdrives in which you likes to search ('root' for main drive)\n"\
+ " teamdrive INDEX URL --> enter index url for this drive.\n" \
+ " go to the respective drive and copy the url from address bar\n")
+msg = ''
+if os.path.exists('drive_folder'):
+ with open('drive_folder', 'r+') as f:
+ lines = f.read()
+ if not re.match(r'^\s*$', lines):
+ print(lines)
+ print("\n\n"\
+ " DO YOU WISH TO KEEP THE ABOVE DETAILS THAT YOU PREVIOUSLY ADDED???? ENTER (y/n)\n"\
+ " IF NOTHING SHOWS ENTER n")
+ while 1:
+ choice = input()
+ if choice in ['y', 'Y']:
+ msg = f'{lines}'
+ break
+ elif choice in ['n', 'N']:
+ break
+ else:
+ print("\n\n DO YOU WISH TO KEEP THE ABOVE DETAILS ???? y/n <=== this is option ..... OPEN YOUR EYES & READ...")
+num = int(input(" How Many Drive/Folder You Likes To Add : "))
+for count in range(1, num + 1):
+ print(f"\n > DRIVE - {count}\n")
+ name = input(" Enter Drive NAME (anything) : ")
+ id = input(" Enter Drive ID : ")
+ index = input(" Enter Drive INDEX URL (optional) : ")
+ if not name or not id:
+ print("\n\n ERROR : Dont leave the name/id without filling.")
+ exit(1)
+ name=name.replace(" ", "_")
+ if index:
+ if index[-1] == "/":
+ index = index[:-1]
+ else:
+ index = ''
+ msg += f"{name} {id} {index}\n"
+with open('drive_folder', 'w') as file:
+ file.truncate(0)
+ file.write(msg)
+print("\n\n Done!")
\ No newline at end of file
diff --git a/gen_sa_accounts.py b/gen_sa_accounts.py
new file mode 100644
index 0000000..3469e82
--- /dev/null
+++ b/gen_sa_accounts.py
@@ -0,0 +1,350 @@
+import errno
+import os
+import pickle
+import sys
+from argparse import ArgumentParser
+from base64 import b64decode
+from glob import glob
+from json import loads
+from random import choice
+from time import sleep
+
+from google.auth.transport.requests import Request
+from google_auth_oauthlib.flow import InstalledAppFlow
+from googleapiclient.discovery import build
+from googleapiclient.errors import HttpError
+
+SCOPES = ['https://www.googleapis.com/auth/drive', 'https://www.googleapis.com/auth/cloud-platform',
+ 'https://www.googleapis.com/auth/iam']
+project_create_ops = []
+current_key_dump = []
+sleep_time = 30
+
+
+# Create count SAs in project
+def _create_accounts(service, project, count):
+ batch = service.new_batch_http_request(callback=_def_batch_resp)
+ for _ in range(count):
+ aid = _generate_id('mfc-')
+ batch.add(service.projects().serviceAccounts().create(name='projects/' + project, body={'accountId': aid,
+ 'serviceAccount': {
+ 'displayName': aid}}))
+ batch.execute()
+
+
+# Create accounts needed to fill project
+def _create_remaining_accounts(iam, project):
+ print('Creating accounts in %s' % project)
+ sa_count = len(_list_sas(iam, project))
+ while sa_count != 100:
+ _create_accounts(iam, project, 100 - sa_count)
+ sa_count = len(_list_sas(iam, project))
+
+
+# Generate a random id
+def _generate_id(prefix='saf-'):
+ chars = '-abcdefghijklmnopqrstuvwxyz1234567890'
+ return prefix + ''.join(choice(chars) for _ in range(25)) + choice(chars[1:])
+
+
+# List projects using service
+def _get_projects(service):
+ return [i['projectId'] for i in service.projects().list().execute()['projects']]
+
+
+# Default batch callback handler
+def _def_batch_resp(id, resp, exception):
+ if exception is not None:
+ if str(exception).startswith('