Skip to content

Commit

Permalink
Added Yiffer Support, Fixed time issue on E621 & E926
Browse files Browse the repository at this point in the history
Yiffer is now supported
Fixed long sleep time on E621 and E926
  • Loading branch information
Official-Husko committed Jul 18, 2022
1 parent 97c042e commit f0edc0b
Show file tree
Hide file tree
Showing 7 changed files with 68 additions and 10 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,5 @@ modules/__pycache__/
dist/

build/

media/
6 changes: 4 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

Welcome to the successor of the [multporn image downloader][1] [v1][2] & [v2][1] and most downloaders out there regarding "NSFW" material. The NN-Downloader or Naughty-Naughty-Downloader (yes very creative i know) supports multiple sites with their official api (if available), proxies and its also portable.

This is not the complete version and it only works on a [few][13] sites currently. The other parts are WIP and will be complete in the near future (few days to a couple of weeks probably). More Documentation and other gibberish coming soo.
This is not the complete version and it only works on a [few][13] sites currently. The other parts are WIP and will be complete in the near future. More Documentation and other gibberish coming soo.

[Download][14]

Expand All @@ -14,12 +14,13 @@ This is not the complete version and it only works on a [few][13] sites currentl
- [E926][5] (API)
- [Furbooru][6] (API)
- [Multporn][7]
- [Yiffer][8]

#### Planned:
- [Yiffer][8]
- [YiffGallery][9]
- ~~[FurryBooru][10]~~ Currently not possible due to cloudflare issues.
- [BooruPlus][11]
- [nHentai][15]

[1]:https://github.com/Official-Husko/multporn-image-downloader-v2
[2]:https://github.com/Official-Husko/multporn-image-downloader
Expand All @@ -35,6 +36,7 @@ This is not the complete version and it only works on a [few][13] sites currentl
[12]:https://booru.plus/
[13]:https://github.com/Official-Husko/NN-Downloader#currently-supported=
[14]:https://github.com/Official-Husko/NN-Downloader/releases/latest
[15]:https://nhentai.net/

Further sites can be added. Just open a [support ticket][11] with the url to the site.

Expand Down
10 changes: 6 additions & 4 deletions main.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,11 @@
from modules import E621, RULE34, ProxyScraper, FURBOORU, E926
from modules import E621, RULE34, ProxyScraper, FURBOORU, E926, Multporn, Yiffer
import json
import os
from termcolor import colored
from ctypes import windll
from time import sleep
from sys import exit

from modules.multporn import Multporn


version = "1.1.0"
windll.kernel32.SetConsoleTitleW(f"NN-Downloader | v{version}")
Expand Down Expand Up @@ -93,7 +91,7 @@ def main_startup():
Main.main_startup()
print("")

if site == "multporn":
if site in ["multporn", "yiffer"]:
pass
else:
print(colored("Please enter the tags you want to use", "green"))
Expand Down Expand Up @@ -137,6 +135,10 @@ def main_startup():
print(colored("Please enter the link. (e.g. https://multporn.net/comics/double_trouble_18)", "green"))
URL = input(">> ")
Multporn.Fetcher(proxy_list=proxy_list, user_proxies=user_proxies, header=header, URL=URL)
elif site == "yiffer":
print(colored("Please enter the link. (e.g. https://yiffer.xyz/Howl & Jasper)", "green"))
URL = input(">> ")
Yiffer.Fetcher(proxy_list=proxy_list, user_proxies=user_proxies, header=header, URL=URL)


else:
Expand Down
3 changes: 2 additions & 1 deletion modules/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,5 @@
from .proxyscraper import ProxyScraper
from .furbooru import FURBOORU
from .e926 import E926
from .multporn import Multporn
from .multporn import Multporn
from .yiffer import Yiffer
2 changes: 1 addition & 1 deletion modules/e621.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def Fetcher(user_tags, user_blacklist, proxy_list, max_sites, user_proxies, apiU

if req["posts"] == []:
print(colored("No images found! Try different tags.", "yellow"))
sleep(999)
sleep(5)
break

elif page == max_sites:
Expand Down
2 changes: 1 addition & 1 deletion modules/e926.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def Fetcher(user_tags, user_blacklist, proxy_list, max_sites, user_proxies, apiU

if req["posts"] == []:
print(colored("No images found! Try different tags.", "yellow"))
sleep(999)
sleep(5)
break

elif page == max_sites:
Expand Down
53 changes: 52 additions & 1 deletion modules/yiffer.py
Original file line number Diff line number Diff line change
@@ -1 +1,52 @@
# Phew a bit empty in here isn't it?
import requests
import random
from termcolor import colored
from time import sleep
from alive_progress import alive_bar
import os

class Yiffer():
def Fetcher(proxy_list, user_proxies, header, URL):

# link operations
URL = requests.utils.unquote(URL, encoding='utf-8', errors='replace')
parts = URL.split("/")
print(parts)
title = parts[3]
print(title)

# Get item info
URL = f"https://yiffer.xyz/api/comics/{title}"
if user_proxies == True:
proxy = random.choice(proxy_list)
req = requests.get(URL, headers=header, proxies=proxy).json()
else:
req = requests.get(URL, headers=header).json()
pages = req["numberOfPages"]
page_range = pages + 1

# Download all images
with alive_bar(pages, calibrate=1, dual_line=True, title='Downloading') as bar:
bar.text = f'-> Downloading: {title}, please wait...'
progress = 0
for number in range(1,page_range):
progress += 1
if progress <= 9:
URL = f"https://static.yiffer.xyz/comics/{title}/00{progress}.jpg"
elif progress >= 10 and progress < 100:
URL = f"https://static.yiffer.xyz/comics/{title}/0{progress}.jpg"
else:
URL = f"https://static.yiffer.xyz/comics/{title}/{progress}.jpg"
if user_proxies == True:
proxy = random.choice(proxy_list)
img_data = requests.get(URL, proxies=proxy).content
else:
sleep(1)
img_data = requests.get(URL).content
if not os.path.exists(f"media/{title}"):
os.mkdir(f"media/{title}")
with open(f"media/{title}/{str(number)}.jpg", "wb") as handler:
handler.write(img_data)
bar()
print("[ " + colored("i","blue") + " ] " + f"Completed downloading {title}!")
sleep(5)

0 comments on commit f0edc0b

Please sign in to comment.