|
| 1 | +from urllib.parse import quote_plus |
| 2 | + |
| 3 | +from bs4 import BeautifulSoup |
| 4 | + |
| 5 | +import cloudscraper |
| 6 | + |
| 7 | +BASE_URL = "https://www.apkmirror.com" |
| 8 | +BASE_SEARCH = f"{BASE_URL}/?post_type=app_release&searchtype=apk&s=" |
| 9 | +USER_AGENT_STRING = "Mozilla/5.0 (Linux; Android 10; K) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Mobile Safari/537.36" |
| 10 | +HEADERS = { |
| 11 | + "User-Agent": USER_AGENT_STRING |
| 12 | +} |
| 13 | + |
| 14 | +scraper = cloudscraper.create_scraper() |
| 15 | + |
| 16 | +def search(query): |
| 17 | + search_url = BASE_SEARCH + quote_plus(query) |
| 18 | + resp = scraper.get(search_url, headers=HEADERS) |
| 19 | + |
| 20 | + print(resp.status_code) |
| 21 | + |
| 22 | + soup = BeautifulSoup(resp.text, "html.parser") |
| 23 | + apps = [] |
| 24 | + appRow = soup.find_all("div", {"class": "appRow"}) |
| 25 | + |
| 26 | + for app in appRow: |
| 27 | + try: |
| 28 | + app_dict = { |
| 29 | + "name": app.find("h5", {"class": "appRowTitle"}).text.strip(), |
| 30 | + "link": BASE_URL + app.find("a", {"class": "downloadLink"})["href"], |
| 31 | + "image": BASE_URL + app.find("img", {"class": "ellipsisText"})["src"].replace("h=32", "h=96").replace("w=32", "w=96") |
| 32 | + } |
| 33 | + |
| 34 | + apps.append(app_dict) |
| 35 | + |
| 36 | + except AttributeError: |
| 37 | + pass |
| 38 | + |
| 39 | + return apps[:5] |
| 40 | + |
| 41 | +def get_app_details(): |
| 42 | + resp = scraper.get(search("discord")[0]["link"], headers=HEADERS) |
| 43 | + |
| 44 | + print(resp.status_code) |
| 45 | + |
| 46 | + soup = BeautifulSoup(resp.text, "html.parser") |
| 47 | + |
| 48 | + data = soup.find_all("div", {"class": ["table-row", "headerFont"]})[1] |
| 49 | + |
| 50 | + architecture = data.find_all("div", {"class": ["table-cell", "rowheight", "addseparator", "expand", "pad", "dowrap"]})[1].text.strip() |
| 51 | + android_version = data.find_all("div", {"class": ["table-cell", "rowheight", "addseparator", "expand", "pad", "dowrap"]})[2].text.strip() |
| 52 | + dpi = data.find_all("div", {"class": ["table-cell", "rowheight", "addseparator", "expand", "pad", "dowrap"]})[3].text.strip() |
| 53 | + download_link = BASE_URL + data.find_all("a", {"class": "accent_color"})[0]["href"] |
| 54 | + |
| 55 | + return architecture, android_version, dpi, download_link |
| 56 | + |
| 57 | +def get_download_link(): |
| 58 | + resp = scraper.get(get_app_details()[3], headers=HEADERS) |
| 59 | + |
| 60 | + print(resp.status_code) |
| 61 | + |
| 62 | + soup = BeautifulSoup(resp.text, "html.parser") |
| 63 | + return soup.find_all("a", {"class": "downloadButton"})[0]["href"] |
| 64 | + |
| 65 | +def get_direct_download_link(): |
| 66 | + resp = scraper.get(get_download_link(), headers=HEADERS) |
| 67 | + |
| 68 | + print(resp.status_code) |
| 69 | + |
| 70 | + soup = BeautifulSoup(resp.text, "html.parser") |
| 71 | + |
| 72 | + data = soup.find('a', {'rel': 'nofollow', 'data-google-vignette': 'false', 'href': lambda href: href and '/wp-content/themes/APKMirror/download.php' in href})["href"] |
| 73 | + |
| 74 | + return data |
| 75 | + |
| 76 | +def main(): |
| 77 | + # print(search("discord")[0]["link"]) |
| 78 | + # print(get_app_details()) |
| 79 | + print(get_download_link()) |
| 80 | + # print(get_direct_download_link()) |
| 81 | + |
| 82 | +if __name__ == "__main__": |
| 83 | + main() |
0 commit comments