forked from devmanorg/async-download-service
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathserver.py
123 lines (97 loc) · 3.36 KB
/
server.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
import os.path
import datetime
import argparse
import logging
import asyncio
from functools import partial
from aiohttp import web
import aiofiles
LOGGING_LEVELS = [logging.CRITICAL, logging.INFO]
def get_args():
parser = argparse.ArgumentParser(description='Async downloader')
parser.add_argument(
'--logs',
help='Logging the process (0 or 1, 0 by default)',
type=int,
default=0,
choices=[0, 1]
)
parser.add_argument(
'--delay',
help='Delay response, sec (0.001 by default)',
type=float,
default=0.001
)
parser.add_argument(
'--dir_img',
help='Folder with photos',
type=str,
)
args = parser.parse_args()
return args
async def archivate(delay, dir_img, request):
'''Asynchronously archive directory on the fly and send it to client.'''
dir_img_path = '{}/{}'.format(dir_img, request.match_info['archive_hash'])
if not os.path.exists(dir_img_path):
return web.HTTPNotFound(text='Error 404: archive does not exists.')
# Create stream object
resp = web.StreamResponse()
# Send headers.
resp.headers['Content-Type'] = 'application/zip'
resp.headers['Content-Disposition'] = 'attachment; filename="archive.zip"'
await resp.prepare(request)
# Create async subprocess for archive directory.
cmd = 'zip -r - {}'.format(dir_img_path)
process = await asyncio.create_subprocess_shell(
cmd,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE
)
try:
while True:
# Get chunks from process stdout, check if exists,
# write it to response, log and delay.
archive_chunk = await process.stdout.readline()
if not archive_chunk:
break
await resp.write(archive_chunk)
logging.info(
'Sending archive {}, chunk {} bytes'.format(
request.match_info['archive_hash'],
len(archive_chunk)
)
)
await asyncio.sleep(delay)
await resp.write_eof()
process.kill()
await process.wait()
except asyncio.CancelledError as e:
resp.force_close()
process.kill()
await process.wait()
finally:
resp.force_close()
return resp
async def handle_index_page(request):
async with aiofiles.open('index.html', mode='r') as index_file:
index_contents = await index_file.read()
return web.Response(text=index_contents, content_type='text/html')
if __name__ == '__main__':
args = get_args()
logs, delay, dir_img = args.logs, args.delay, args.dir_img
if not dir_img:
exit('Script argument required (path to folder with photos)')
if not os.path.isdir(dir_img):
exit('Folder does not exists')
logging.basicConfig(
level=LOGGING_LEVELS[logs],
format='%(asctime)s,%(msecs)d %(levelname)s: %(message)s',
datefmt='%H:%M:%S',
)
archivate_with_args = partial(archivate, delay, dir_img)
app = web.Application()
app.add_routes([
web.get('/', handle_index_page),
web.get('/archive/{archive_hash}/', archivate_with_args),
])
web.run_app(app)