Skip to content

Commit f0fd64d

Browse files
authored
Clean up the file upload interface with FileVar class (#549)
1 parent 9a84f0b commit f0fd64d

File tree

13 files changed

+1289
-992
lines changed

13 files changed

+1289
-992
lines changed

docs/usage/file_upload.rst

Lines changed: 58 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -14,11 +14,14 @@ Single File
1414
In order to upload a single file, you need to:
1515

1616
* set the file as a variable value in the mutation
17-
* provide the opened file to the `variable_values` argument of `execute`
17+
* create a :class:`FileVar <gql.FileVar>` object with your file path
18+
* provide the `FileVar` instance to the `variable_values` argument of `execute`
1819
* set the `upload_files` argument to True
1920

2021
.. code-block:: python
2122
23+
from gql import client, gql, FileVar
24+
2225
transport = AIOHTTPTransport(url='YOUR_URL')
2326
# Or transport = RequestsHTTPTransport(url='YOUR_URL')
2427
# Or transport = HTTPXTransport(url='YOUR_URL')
@@ -34,32 +37,38 @@ In order to upload a single file, you need to:
3437
}
3538
''')
3639
37-
with open("YOUR_FILE_PATH", "rb") as f:
38-
39-
params = {"file": f}
40+
params = {"file": FileVar("YOUR_FILE_PATH")}
4041
41-
result = client.execute(
42-
query, variable_values=params, upload_files=True
43-
)
42+
result = client.execute(
43+
query, variable_values=params, upload_files=True
44+
)
4445
4546
Setting the content-type
4647
^^^^^^^^^^^^^^^^^^^^^^^^
4748

4849
If you need to set a specific Content-Type attribute to a file,
49-
you can set the :code:`content_type` attribute of the file like this:
50+
you can set the :code:`content_type` attribute of :class:`FileVar <gql.FileVar>`:
5051

5152
.. code-block:: python
5253
53-
with open("YOUR_FILE_PATH", "rb") as f:
54+
# Setting the content-type to a pdf file for example
55+
filevar = FileVar(
56+
"YOUR_FILE_PATH",
57+
content_type="application/pdf",
58+
)
5459
55-
# Setting the content-type to a pdf file for example
56-
f.content_type = "application/pdf"
60+
Setting the uploaded file name
61+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
5762

58-
params = {"file": f}
63+
To modify the uploaded filename, use the :code:`filename` attribute of :class:`FileVar <gql.FileVar>`:
5964

60-
result = client.execute(
61-
query, variable_values=params, upload_files=True
62-
)
65+
.. code-block:: python
66+
67+
# Setting the content-type to a pdf file for example
68+
filevar = FileVar(
69+
"YOUR_FILE_PATH",
70+
filename="filename1.txt",
71+
)
6372
6473
File list
6574
---------
@@ -68,6 +77,8 @@ It is also possible to upload multiple files using a list.
6877

6978
.. code-block:: python
7079
80+
from gql import client, gql, FileVar
81+
7182
transport = AIOHTTPTransport(url='YOUR_URL')
7283
# Or transport = RequestsHTTPTransport(url='YOUR_URL')
7384
# Or transport = HTTPXTransport(url='YOUR_URL')
@@ -83,18 +94,15 @@ It is also possible to upload multiple files using a list.
8394
}
8495
''')
8596
86-
f1 = open("YOUR_FILE_PATH_1", "rb")
87-
f2 = open("YOUR_FILE_PATH_2", "rb")
97+
f1 = FileVar("YOUR_FILE_PATH_1")
98+
f2 = FileVar("YOUR_FILE_PATH_2")
8899
89100
params = {"files": [f1, f2]}
90101
91102
result = client.execute(
92103
query, variable_values=params, upload_files=True
93104
)
94105
95-
f1.close()
96-
f2.close()
97-
98106
99107
Streaming
100108
---------
@@ -120,18 +128,8 @@ Streaming local files
120128
aiohttp allows to upload files using an asynchronous generator.
121129
See `Streaming uploads on aiohttp docs`_.
122130

123-
124-
In order to stream local files, instead of providing opened files to the
125-
`variable_values` argument of `execute`, you need to provide an async generator
126-
which will provide parts of the files.
127-
128-
You can use `aiofiles`_
129-
to read the files in chunks and create this asynchronous generator.
130-
131-
.. _Streaming uploads on aiohttp docs: https://docs.aiohttp.org/en/stable/client_quickstart.html#streaming-uploads
132-
.. _aiofiles: https://github.com/Tinche/aiofiles
133-
134-
Example:
131+
From gql version 4.0, it is possible to activate file streaming simply by
132+
setting the `streaming` argument of :class:`FileVar <gql.FileVar>` to `True`
135133

136134
.. code-block:: python
137135
@@ -147,18 +145,38 @@ Example:
147145
}
148146
''')
149147
148+
f1 = FileVar(
149+
file_name='YOUR_FILE_PATH',
150+
streaming=True,
151+
)
152+
153+
params = {"file": f1}
154+
155+
result = client.execute(
156+
query, variable_values=params, upload_files=True
157+
)
158+
159+
Another option is to use an async generator to provide parts of the file.
160+
161+
You can use `aiofiles`_
162+
to read the files in chunks and create this asynchronous generator.
163+
164+
.. _Streaming uploads on aiohttp docs: https://docs.aiohttp.org/en/stable/client_quickstart.html#streaming-uploads
165+
.. _aiofiles: https://github.com/Tinche/aiofiles
166+
167+
.. code-block:: python
168+
150169
async def file_sender(file_name):
151170
async with aiofiles.open(file_name, 'rb') as f:
152-
chunk = await f.read(64*1024)
153-
while chunk:
154-
yield chunk
155-
chunk = await f.read(64*1024)
171+
while chunk := await f.read(64*1024):
172+
yield chunk
156173
157-
params = {"file": file_sender(file_name='YOUR_FILE_PATH')}
174+
f1 = FileVar(file_sender(file_name='YOUR_FILE_PATH'))
175+
params = {"file": f1}
158176
159177
result = client.execute(
160-
query, variable_values=params, upload_files=True
161-
)
178+
query, variable_values=params, upload_files=True
179+
)
162180
163181
Streaming downloaded files
164182
^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -200,7 +218,7 @@ Example:
200218
}
201219
''')
202220
203-
params = {"file": resp.content}
221+
params = {"file": FileVar(resp.content)}
204222
205223
result = client.execute(
206224
query, variable_values=params, upload_files=True

gql/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,10 +11,12 @@
1111
from .client import Client
1212
from .gql import gql
1313
from .graphql_request import GraphQLRequest
14+
from .transport.file_upload import FileVar
1415

1516
__all__ = [
1617
"__version__",
1718
"gql",
1819
"Client",
1920
"GraphQLRequest",
21+
"FileVar",
2022
]

gql/transport/aiohttp.py

Lines changed: 59 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,6 @@
2323
from graphql import DocumentNode, ExecutionResult, print_ast
2424
from multidict import CIMultiDictProxy
2525

26-
from ..utils import extract_files
2726
from .appsync_auth import AppSyncAuthentication
2827
from .async_transport import AsyncTransport
2928
from .common.aiohttp_closed_event import create_aiohttp_closed_event
@@ -33,6 +32,7 @@
3332
TransportProtocolError,
3433
TransportServerError,
3534
)
35+
from .file_upload import FileVar, close_files, extract_files, open_files
3636

3737
log = logging.getLogger(__name__)
3838

@@ -207,6 +207,10 @@ async def execute(
207207
file_classes=self.file_classes,
208208
)
209209

210+
# Opening the files using the FileVar parameters
211+
open_files(list(files.values()), transport_supports_streaming=True)
212+
self.files = files
213+
210214
# Save the nulled variable values in the payload
211215
payload["variables"] = nulled_variable_values
212216

@@ -220,8 +224,8 @@ async def execute(
220224
file_map = {str(i): [path] for i, path in enumerate(files)}
221225

222226
# Enumerate the file streams
223-
# Will generate something like {'0': <_io.BufferedReader ...>}
224-
file_streams = {str(i): files[path] for i, path in enumerate(files)}
227+
# Will generate something like {'0': FileVar object}
228+
file_vars = {str(i): files[path] for i, path in enumerate(files)}
225229

226230
# Add the payload to the operations field
227231
operations_str = self.json_serialize(payload)
@@ -235,12 +239,15 @@ async def execute(
235239
log.debug("file_map %s", file_map_str)
236240
data.add_field("map", file_map_str, content_type="application/json")
237241

238-
# Add the extracted files as remaining fields
239-
for k, f in file_streams.items():
240-
name = getattr(f, "name", k)
241-
content_type = getattr(f, "content_type", None)
242+
for k, file_var in file_vars.items():
243+
assert isinstance(file_var, FileVar)
242244

243-
data.add_field(k, f, filename=name, content_type=content_type)
245+
data.add_field(
246+
k,
247+
file_var.f,
248+
filename=file_var.filename,
249+
content_type=file_var.content_type,
250+
)
244251

245252
post_args: Dict[str, Any] = {"data": data}
246253

@@ -267,51 +274,59 @@ async def execute(
267274
if self.session is None:
268275
raise TransportClosed("Transport is not connected")
269276

270-
async with self.session.post(self.url, ssl=self.ssl, **post_args) as resp:
271-
272-
# Saving latest response headers in the transport
273-
self.response_headers = resp.headers
277+
try:
278+
async with self.session.post(self.url, ssl=self.ssl, **post_args) as resp:
274279

275-
async def raise_response_error(
276-
resp: aiohttp.ClientResponse, reason: str
277-
) -> NoReturn:
278-
# We raise a TransportServerError if the status code is 400 or higher
279-
# We raise a TransportProtocolError in the other cases
280+
# Saving latest response headers in the transport
281+
self.response_headers = resp.headers
280282

281-
try:
282-
# Raise a ClientResponseError if response status is 400 or higher
283-
resp.raise_for_status()
284-
except ClientResponseError as e:
285-
raise TransportServerError(str(e), e.status) from e
286-
287-
result_text = await resp.text()
288-
raise TransportProtocolError(
289-
f"Server did not return a GraphQL result: "
290-
f"{reason}: "
291-
f"{result_text}"
292-
)
283+
async def raise_response_error(
284+
resp: aiohttp.ClientResponse, reason: str
285+
) -> NoReturn:
286+
# We raise a TransportServerError if status code is 400 or higher
287+
# We raise a TransportProtocolError in the other cases
293288

294-
try:
295-
result = await resp.json(loads=self.json_deserialize, content_type=None)
289+
try:
290+
# Raise ClientResponseError if response status is 400 or higher
291+
resp.raise_for_status()
292+
except ClientResponseError as e:
293+
raise TransportServerError(str(e), e.status) from e
296294

297-
if log.isEnabledFor(logging.INFO):
298295
result_text = await resp.text()
299-
log.info("<<< %s", result_text)
296+
raise TransportProtocolError(
297+
f"Server did not return a GraphQL result: "
298+
f"{reason}: "
299+
f"{result_text}"
300+
)
300301

301-
except Exception:
302-
await raise_response_error(resp, "Not a JSON answer")
302+
try:
303+
result = await resp.json(
304+
loads=self.json_deserialize, content_type=None
305+
)
303306

304-
if result is None:
305-
await raise_response_error(resp, "Not a JSON answer")
307+
if log.isEnabledFor(logging.INFO):
308+
result_text = await resp.text()
309+
log.info("<<< %s", result_text)
306310

307-
if "errors" not in result and "data" not in result:
308-
await raise_response_error(resp, 'No "data" or "errors" keys in answer')
311+
except Exception:
312+
await raise_response_error(resp, "Not a JSON answer")
309313

310-
return ExecutionResult(
311-
errors=result.get("errors"),
312-
data=result.get("data"),
313-
extensions=result.get("extensions"),
314-
)
314+
if result is None:
315+
await raise_response_error(resp, "Not a JSON answer")
316+
317+
if "errors" not in result and "data" not in result:
318+
await raise_response_error(
319+
resp, 'No "data" or "errors" keys in answer'
320+
)
321+
322+
return ExecutionResult(
323+
errors=result.get("errors"),
324+
data=result.get("data"),
325+
extensions=result.get("extensions"),
326+
)
327+
finally:
328+
if upload_files:
329+
close_files(list(self.files.values()))
315330

316331
def subscribe(
317332
self,

0 commit comments

Comments
 (0)