forked from MaxxRider/Leech-Pro
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
6 changed files
with
735 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,75 @@ | ||
#!/usr/bin/env python3 | ||
# -*- coding: utf-8 -*- | ||
# (c) Shrimadhav U K | gautamajay52 | ||
|
||
# the logging things | ||
import logging | ||
import os | ||
import shutil | ||
logging.basicConfig(level=logging.DEBUG, | ||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s') | ||
LOGGER = logging.getLogger(__name__) | ||
|
||
from pyrogram.types import CallbackQuery | ||
from tobrot.helper_funcs.admin_check import AdminCheck | ||
from tobrot.helper_funcs.download_aria_p_n import aria_start | ||
from tobrot.helper_funcs.youtube_dl_button import youtube_dl_call_back | ||
from tobrot.plugins.status_message_fn import cancel_message_f | ||
from tobrot import ( | ||
MAX_MESSAGE_LENGTH, | ||
AUTH_CHANNEL | ||
) | ||
async def button(bot, update: CallbackQuery): | ||
cb_data = update.data | ||
try: | ||
g = await AdminCheck(bot, update.message.chat.id, update.from_user.id) | ||
print(g) | ||
except: | ||
pass | ||
if "|" in cb_data: | ||
await youtube_dl_call_back(bot, update) | ||
elif (update.from_user.id == update.message.reply_to_message.from_user.id) or g: | ||
print(cb_data) | ||
if cb_data.startswith("cancel"): | ||
if len(cb_data) > 1: | ||
i_m_s_e_g = await update.message.reply_text("checking..?", quote=True) | ||
aria_i_p = await aria_start() | ||
g_id = cb_data.split()[-1] | ||
LOGGER.info(g_id) | ||
try: | ||
downloads = aria_i_p.get_download(g_id) | ||
file_name = downloads.name | ||
LOGGER.info(downloads) | ||
LOGGER.info(downloads.remove(force=True)) | ||
if os.path.exists(file_name): | ||
if os.path.isdir(file_name): | ||
shutil.rmtree(file_name) | ||
else: | ||
os.remove(file_name) | ||
await i_m_s_e_g.edit_text(f"Leech Cancelled by <a href='tg://user?id={update.from_user.id}'>{update.from_user.first_name}</a>") | ||
except Exception as e: | ||
await i_m_s_e_g.edit_text("<i>FAILED</i>\n\n" + str(e) + "\n#error") | ||
else: | ||
await update.message.delete() | ||
elif cb_data == "fuckingdo": | ||
if update.from_user.id in AUTH_CHANNEL: | ||
g_d_list = ['app.json', 'venv', 'rclone.conf', '.gitignore', '_config.yml', 'COPYING', 'Dockerfile', 'DOWNLOADS', 'Procfile', '.heroku', '.profile.d', 'rclone.jpg', 'README.md', 'requirements.txt', 'runtime.txt', 'start.sh', 'tobrot', 'gautam', 'Torrentleech-Gdrive.log', 'vendor'] | ||
LOGGER.info(g_d_list) | ||
g_list = os.listdir() | ||
LOGGER.info(g_list) | ||
g_del_list = list(set(g_list)-set(g_d_list)) | ||
LOGGER.info(g_del_list) | ||
if len(g_del_list) != 0: | ||
for f in g_del_list: | ||
if os.path.isfile(f): | ||
os.remove(f) | ||
else: | ||
shutil.rmtree(f) | ||
await update.message.edit_text(f"Deleted {len(g_del_list)} objects 😬") | ||
else: | ||
await update.message.edit_text("Nothing to clear 🙄") | ||
else: | ||
await update.message.edit_text("You are not allowed to do that 🤭") | ||
elif cb_data == "fuckoff": | ||
await update.message.edit_text("Okay! fine 🤬") | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,66 @@ | ||
"""ThumbNail utilities, © @AnyDLBot""" | ||
|
||
|
||
import os | ||
|
||
from tobrot import DOWNLOAD_LOCATION | ||
|
||
from hachoir.metadata import extractMetadata | ||
from hachoir.parser import createParser | ||
from PIL import Image | ||
|
||
|
||
async def save_thumb_nail(client, message): | ||
thumbnail_location = os.path.join( | ||
DOWNLOAD_LOCATION, | ||
"thumbnails" | ||
) | ||
thumb_image_path = os.path.join( | ||
thumbnail_location, | ||
str(message.from_user.id) + ".jpg" | ||
) | ||
ismgs = await message.reply_text("processing ...") | ||
if message.reply_to_message is not None: | ||
if not os.path.isdir(thumbnail_location): | ||
os.makedirs(thumbnail_location) | ||
download_location = thumbnail_location + "/" | ||
downloaded_file_name = await client.download_media( | ||
message=message.reply_to_message, | ||
file_name=download_location | ||
) | ||
# https://stackoverflow.com/a/21669827/4723940 | ||
Image.open(downloaded_file_name).convert("RGB").save(downloaded_file_name) | ||
metadata = extractMetadata(createParser(downloaded_file_name)) | ||
height = 0 | ||
if metadata.has("height"): | ||
height = metadata.get("height") | ||
# resize image | ||
# ref: https://t.me/PyrogramChat/44663 | ||
img = Image.open(downloaded_file_name) | ||
# https://stackoverflow.com/a/37631799/4723940 | ||
# img.thumbnail((320, 320)) | ||
img.resize((320, height)) | ||
img.save(thumb_image_path, "JPEG") | ||
# https://pillow.readthedocs.io/en/3.1.x/reference/Image.html#create-thumbnails | ||
os.remove(downloaded_file_name) | ||
await ismgs.edit( | ||
"Custom video / file thumbnail saved. " + \ | ||
"This image will be used in the upload, till /clearthumbnail." | ||
) | ||
else: | ||
await message.edit("Reply to a photo to save custom thumbnail") | ||
|
||
|
||
async def clear_thumb_nail(client, message): | ||
thumbnail_location = os.path.join( | ||
DOWNLOAD_LOCATION, | ||
"thumbnails" | ||
) | ||
thumb_image_path = os.path.join( | ||
thumbnail_location, | ||
str(message.from_user.id) + ".jpg" | ||
) | ||
ismgs = await message.reply_text("processing ...") | ||
if os.path.exists(thumb_image_path): | ||
os.remove(thumb_image_path) | ||
await ismgs.edit("✅ Custom thumbnail cleared succesfully.") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,243 @@ | ||
#!/usr/bin/env python3 | ||
# -*- coding: utf-8 -*- | ||
# (c) Shrimadhav U K | gautamajay52 | Akshay C | ||
|
||
# the logging things | ||
import logging | ||
logging.basicConfig( | ||
level=logging.DEBUG, | ||
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s" | ||
) | ||
logging.getLogger("pyrogram").setLevel(logging.WARNING) | ||
LOGGER = logging.getLogger(__name__) | ||
|
||
|
||
import os | ||
import requests | ||
|
||
from tobrot import ( | ||
DOWNLOAD_LOCATION | ||
) | ||
|
||
|
||
import time | ||
import aria2p | ||
import asyncio | ||
from tobrot.helper_funcs.extract_link_from_message import extract_link | ||
from tobrot.helper_funcs.download_aria_p_n import call_apropriate_function, call_apropriate_function_g, aria_start | ||
from tobrot.helper_funcs.download_from_link import request_download | ||
from tobrot.helper_funcs.display_progress import progress_for_pyrogram | ||
from tobrot.helper_funcs.youtube_dl_extractor import extract_youtube_dl_formats | ||
from tobrot.helper_funcs.admin_check import AdminCheck | ||
from tobrot.helper_funcs.ytplaylist import yt_playlist_downg | ||
from tobrot.helper_funcs.cloneHelper import CloneHelper | ||
|
||
async def incoming_purge_message_f(client, message): | ||
"""/purge command""" | ||
i_m_sefg2 = await message.reply_text("Purging...", quote=True) | ||
if await AdminCheck(client, message.chat.id, message.from_user.id): | ||
aria_i_p = await aria_start() | ||
# Show All Downloads | ||
downloads = aria_i_p.get_downloads() | ||
for download in downloads: | ||
LOGGER.info(download.remove(force=True)) | ||
await i_m_sefg2.delete() | ||
|
||
async def incoming_message_f(client, message): | ||
"""/leech command""" | ||
i_m_sefg = await message.reply_text("processing", quote=True) | ||
is_zip = False | ||
is_unzip = False | ||
is_unrar = False | ||
is_untar = False | ||
if len(message.command) > 1: | ||
if message.command[1] == "archive": | ||
is_zip = True | ||
elif message.command[1] == "unzip": | ||
is_unzip = True | ||
elif message.command[1] == "unrar": | ||
is_unrar = True | ||
elif message.command[1] == "untar": | ||
is_untar = True | ||
# get link from the incoming message | ||
dl_url, cf_name, _, _ = await extract_link(message.reply_to_message, "LEECH") | ||
LOGGER.info(dl_url) | ||
LOGGER.info(cf_name) | ||
if dl_url is not None: | ||
await i_m_sefg.edit_text("extracting links") | ||
# start the aria2c daemon | ||
aria_i_p = await aria_start() | ||
LOGGER.info(aria_i_p) | ||
current_user_id = message.from_user.id | ||
# create an unique directory | ||
new_download_location = os.path.join( | ||
DOWNLOAD_LOCATION, | ||
str(current_user_id), | ||
str(time.time()) | ||
) | ||
# create download directory, if not exist | ||
if not os.path.isdir(new_download_location): | ||
os.makedirs(new_download_location) | ||
await i_m_sefg.edit_text("trying to download") | ||
# try to download the "link" | ||
sagtus, err_message = await call_apropriate_function( | ||
aria_i_p, | ||
dl_url, | ||
new_download_location, | ||
i_m_sefg, | ||
is_zip, | ||
cf_name, | ||
is_unzip, | ||
is_unrar, | ||
is_untar, | ||
message | ||
) | ||
if not sagtus: | ||
# if FAILED, display the error message | ||
await i_m_sefg.edit_text(err_message) | ||
else: | ||
await i_m_sefg.edit_text( | ||
"**FCUK**! wat have you entered. \nPlease read /help \n" | ||
f"<b>API Error</b>: {cf_name}" | ||
) | ||
# | ||
async def incoming_gdrive_message_f(client, message): | ||
"""/gleech command""" | ||
i_m_sefg = await message.reply_text("processing", quote=True) | ||
is_zip = False | ||
is_unzip = False | ||
is_unrar = False | ||
is_untar = False | ||
if len(message.command) > 1: | ||
if message.command[1] == "archive": | ||
is_zip = True | ||
elif message.command[1] == "unzip": | ||
is_unzip = True | ||
elif message.command[1] == "unrar": | ||
is_unrar = True | ||
elif message.command[1] == "untar": | ||
is_untar = True | ||
# get link from the incoming message | ||
dl_url, cf_name, _, _ = await extract_link(message.reply_to_message, "GLEECH") | ||
LOGGER.info(dl_url) | ||
LOGGER.info(cf_name) | ||
if dl_url is not None: | ||
await i_m_sefg.edit_text("extracting links") | ||
# start the aria2c daemon | ||
aria_i_p = await aria_start() | ||
LOGGER.info(aria_i_p) | ||
current_user_id = message.from_user.id | ||
# create an unique directory | ||
new_download_location = os.path.join( | ||
DOWNLOAD_LOCATION, | ||
str(current_user_id), | ||
str(time.time()) | ||
) | ||
# create download directory, if not exist | ||
if not os.path.isdir(new_download_location): | ||
os.makedirs(new_download_location) | ||
await i_m_sefg.edit_text("trying to download") | ||
# try to download the "link" | ||
await call_apropriate_function_g( | ||
aria_i_p, | ||
dl_url, | ||
new_download_location, | ||
i_m_sefg, | ||
is_zip, | ||
cf_name, | ||
is_unzip, | ||
is_unrar, | ||
is_untar, | ||
message | ||
) | ||
else: | ||
await i_m_sefg.edit_text( | ||
"**FCUK**! wat have you entered. \nPlease read /help \n" | ||
f"<b>API Error</b>: {cf_name}" | ||
) | ||
|
||
|
||
async def incoming_youtube_dl_f(client, message): | ||
""" /ytdl command """ | ||
i_m_sefg = await message.reply_text("processing", quote=True) | ||
# LOGGER.info(message) | ||
# extract link from message | ||
dl_url, cf_name, yt_dl_user_name, yt_dl_pass_word = await extract_link( | ||
message.reply_to_message, "YTDL" | ||
) | ||
LOGGER.info(dl_url) | ||
#if len(message.command) > 1: | ||
#if message.command[1] == "gdrive": | ||
#with open('blame_my_knowledge.txt', 'w+') as gg: | ||
#gg.write("I am noob and don't know what to do that's why I have did this") | ||
LOGGER.info(cf_name) | ||
if dl_url is not None: | ||
await i_m_sefg.edit_text("extracting links") | ||
current_user_id = message.from_user.id | ||
# create an unique directory | ||
user_working_dir = os.path.join(DOWNLOAD_LOCATION, str(current_user_id)) | ||
# create download directory, if not exist | ||
if not os.path.isdir(user_working_dir): | ||
os.makedirs(user_working_dir) | ||
# list the formats, and display in button markup formats | ||
thumb_image, text_message, reply_markup = await extract_youtube_dl_formats( | ||
dl_url, | ||
cf_name, | ||
yt_dl_user_name, | ||
yt_dl_pass_word, | ||
user_working_dir | ||
) | ||
print(thumb_image) | ||
req = requests.get(f"{thumb_image}") | ||
gau_tam = f"{current_user_id}.jpg" | ||
open(gau_tam, 'wb').write(req.content) | ||
if thumb_image is not None: | ||
await message.reply_photo( | ||
#text_message, | ||
photo=gau_tam, | ||
quote=True, | ||
caption=text_message, | ||
reply_markup=reply_markup | ||
) | ||
await i_m_sefg.delete() | ||
else: | ||
await i_m_sefg.edit_text( | ||
text=text_message, | ||
reply_markup=reply_markup | ||
) | ||
else: | ||
await i_m_sefg.edit_text( | ||
"**FCUK**! wat have you entered. \nPlease read /help \n" | ||
f"<b>API Error</b>: {cf_name}" | ||
) | ||
#playlist | ||
async def g_yt_playlist(client, message): | ||
""" /pytdl command """ | ||
#i_m_sefg = await message.reply_text("Processing...you should wait🤗", quote=True) | ||
usr_id = message.from_user.id | ||
G_DRIVE = False | ||
if len(message.command) > 1: | ||
if message.command[1] == "gdrive": | ||
G_DRIVE = True | ||
if 'youtube.com/playlist' in message.reply_to_message.text: | ||
i_m_sefg = await message.reply_text("Downloading...you should wait🤗", quote=True) | ||
await yt_playlist_downg(message.reply_to_message, i_m_sefg, G_DRIVE) | ||
|
||
else: | ||
await message.reply_text("Reply to youtube playlist link only 🙄") | ||
|
||
# | ||
async def g_clonee(client, message): | ||
""" /gclone command """ | ||
g_id = message.from_user.id | ||
if message.reply_to_message is not None: | ||
LOGGER.info(message.reply_to_message.text) | ||
gclone = CloneHelper(message) | ||
gclone.config() | ||
a, h = gclone.get_id() | ||
LOGGER.info(a) | ||
LOGGER.info(h) | ||
await gclone.gcl() | ||
await gclone.link_gen_size() | ||
else: | ||
await message.reply_text("<b>😡FCUK!What have you entered 😒:You should reply to a message, which format should be [ID of Gdrive file/folder Name of the file/folder]\nOr read Github for detailled information</b>") |
Oops, something went wrong.