Skip to content
This repository has been archived by the owner on Nov 16, 2024. It is now read-only.

Commit

Permalink
v.1.3.4 - Improved file_name scraping
Browse files Browse the repository at this point in the history
  • Loading branch information
rix committed May 26, 2024
1 parent 555dbd8 commit e236f10
Show file tree
Hide file tree
Showing 3 changed files with 28 additions and 11 deletions.
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@ A simple proxy for slow AltStore servers

# Features

- Meant for use as custom Repo with AltStore Beta
- Useful for manual sideloading / automatic update notifications with AltStore
- Meant for use as custom Repo with AltStore Beta to facilitate faster downloading of apps
- Useful for manual app sideloading with AltStore (non-Beta), if used with Discord Webhook for update notifications

# Setup

Expand Down
2 changes: 1 addition & 1 deletion altstore_proxy/providers/version.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@


def get_version():
return "1.3.3"
return "1.3.4"


def create_version_file():
Expand Down
33 changes: 25 additions & 8 deletions altstore_proxy/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,17 +38,34 @@ def serve_forever(self):
self.server.serve_forever()


def download_and_cache_ipa(url):
def determine_file_name_from_stream(response):
content_disposition = response.headers.get('content-disposition')
if content_disposition:
filename = content_disposition.split("filename=")[1]
if filename:
return filename
return None


def download_and_cache_ipa(app):
url = app['downloadURL']

response = requests.get(url, stream=True, allow_redirects=True)
total_size_in_bytes = int(response.headers.get('content-length', 0))

# Resolve the actual URL if the provided URL is a shortened URL
if "tinyurl.com" in url:
if response.url != url:
url = response.url
response = requests.get(url, stream=True, allow_redirects=False)

file_path = os.path.join(shared_state.values["cache"], os.path.basename(url))
file_name = os.path.basename(file_path)
file_name = determine_file_name_from_stream(response)
if not file_name:
file_name = os.path.basename(url)
if not file_name:
file_name = f"{app['name']}_{app['version']}".translate(str.maketrans(" :/", "___"))
if not file_name.endswith(".ipa"):
file_name += ".ipa"

file_path = os.path.join(shared_state.values["cache"], file_name)

os.makedirs(os.path.dirname(file_path), exist_ok=True)

Expand All @@ -72,7 +89,7 @@ def download_and_cache_ipa(url):
return file_name, False


def update_json_proxy(shared_state_dict, shared_state_lock):
def cache_repositories(shared_state_dict, shared_state_lock):
shared_state.set_state(shared_state_dict, shared_state_lock)

try:
Expand All @@ -93,7 +110,7 @@ def update_json_proxy(shared_state_dict, shared_state_lock):
data = response.json()
for app in data['apps']:
print("Found " + app['name'] + ", v." + app['version'])
app['filename'], skipped = download_and_cache_ipa(app['downloadURL'])
app['filename'], skipped = download_and_cache_ipa(app)
app['downloadURL'] = shared_state.values["baseurl"] + '/cache/' + app['filename']

if not skipped:
Expand Down Expand Up @@ -252,7 +269,7 @@ def status():
print("[AntiGateHandler] status - Error: " + str(e))
return abort(503, "Cache not initialized. Please try again later.")

hourly_update = multiprocessing.Process(target=update_json_proxy, args=(shared_state_dict, shared_state_lock,))
hourly_update = multiprocessing.Process(target=cache_repositories, args=(shared_state_dict, shared_state_lock,))
hourly_update.start()

print(
Expand Down

0 comments on commit e236f10

Please sign in to comment.