Skip to content

Commit

Permalink
mypy specific ignores
Browse files Browse the repository at this point in the history
  • Loading branch information
derlih committed Jan 18, 2021
1 parent 60447aa commit d559a39
Show file tree
Hide file tree
Showing 30 changed files with 183 additions and 137 deletions.
17 changes: 9 additions & 8 deletions aiohttp/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
Type,
TypeVar,
Union,
cast,
)

from multidict import CIMultiDict, MultiDict, MultiDictProxy, istr
Expand Down Expand Up @@ -133,7 +134,7 @@
try:
from ssl import SSLContext
except ImportError: # pragma: no cover
SSLContext = object # type: ignore
SSLContext = object # type: ignore[assignment,misc]


@dataclasses.dataclass(frozen=True)
Expand Down Expand Up @@ -245,10 +246,10 @@ def __init__(
self._default_auth = auth
self._version = version
self._json_serialize = json_serialize
if timeout is sentinel:
self._timeout = DEFAULT_TIMEOUT
if isinstance(timeout, ClientTimeout):
self._timeout = timeout
else:
self._timeout = timeout # type: ignore
self._timeout = DEFAULT_TIMEOUT
self._raise_for_status = raise_for_status
self._auto_decompress = auto_decompress
self._trust_env = trust_env
Expand Down Expand Up @@ -378,10 +379,10 @@ async def _request(
raise InvalidURL(proxy) from e

if timeout is sentinel:
real_timeout = self._timeout # type: ClientTimeout
real_timeout: ClientTimeout = self._timeout
else:
if not isinstance(timeout, ClientTimeout):
real_timeout = ClientTimeout(total=timeout) # type: ignore
real_timeout = ClientTimeout(total=cast(float, timeout))
else:
real_timeout = timeout
# timeout is cumulative for all request operations
Expand Down Expand Up @@ -690,7 +691,7 @@ async def _ws_connect(
DeprecationWarning,
stacklevel=2,
)
ws_timeout = ClientWSTimeout(ws_close=timeout) # type: ignore
ws_timeout = ClientWSTimeout(ws_close=cast(float, timeout))
else:
ws_timeout = DEFAULT_WS_CLIENT_TIMEOUT
if receive_timeout is not None:
Expand Down Expand Up @@ -1053,7 +1054,7 @@ def __init__(self, coro: Coroutine["asyncio.Future[Any]", None, _RetType]) -> No
def send(self, arg: None) -> "asyncio.Future[Any]":
return self._coro.send(arg)

def throw(self, arg: BaseException) -> None: # type: ignore
def throw(self, arg: BaseException) -> None: # type: ignore[override]
self._coro.throw(arg)

def close(self) -> None:
Expand Down
6 changes: 3 additions & 3 deletions aiohttp/client_exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

SSLContext = ssl.SSLContext
except ImportError: # pragma: no cover
ssl = SSLContext = None # type: ignore
ssl = SSLContext = None # type: ignore[assignment]


if TYPE_CHECKING: # pragma: no cover
Expand Down Expand Up @@ -270,11 +270,11 @@ class ClientSSLError(ClientConnectorError):
ssl_error_bases = (ClientSSLError,)


class ClientConnectorSSLError(*ssl_error_bases): # type: ignore
class ClientConnectorSSLError(*ssl_error_bases): # type: ignore[misc]
"""Response ssl error."""


class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore
class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore[misc]
"""Response certificate error."""

def __init__(
Expand Down
35 changes: 20 additions & 15 deletions aiohttp/client_reqrep.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,13 +64,13 @@
import ssl
from ssl import SSLContext
except ImportError: # pragma: no cover
ssl = None # type: ignore
SSLContext = object # type: ignore
ssl = None # type: ignore[assignment]
SSLContext = object # type: ignore[assignment,misc]

try:
import cchardet as chardet
except ImportError: # pragma: no cover
import chardet # type: ignore
import chardet # type: ignore[no-redef]


__all__ = ("ClientRequest", "ClientResponse", "RequestInfo", "Fingerprint")
Expand Down Expand Up @@ -333,9 +333,9 @@ def update_headers(self, headers: Optional[LooseHeaders]) -> None:

if headers:
if isinstance(headers, (dict, MultiDictProxy, MultiDict)):
headers = headers.items() # type: ignore
headers = headers.items() # type: ignore[assignment]

for key, value in headers: # type: ignore
for key, value in headers: # type: ignore[misc]
# A special case for Host header
if key.lower() == "host":
self.headers[key] = value
Expand All @@ -347,7 +347,7 @@ def update_auto_headers(self, skip_auto_headers: Iterable[str]) -> None:
(hdr, None) for hdr in sorted(skip_auto_headers)
)
used_headers = self.headers.copy()
used_headers.extend(self.skip_auto_headers) # type: ignore
used_headers.extend(self.skip_auto_headers) # type: ignore[arg-type]

for hdr, val in self.DEFAULT_HEADERS.items():
if hdr not in used_headers:
Expand All @@ -369,15 +369,15 @@ def update_cookies(self, cookies: Optional[LooseCookies]) -> None:
if isinstance(cookies, Mapping):
iter_cookies = cookies.items()
else:
iter_cookies = cookies # type: ignore
iter_cookies = cookies # type: ignore[assignment]
for name, value in iter_cookies:
if isinstance(value, Morsel):
# Preserve coded_value
mrsl_val = value.get(value.key, Morsel())
mrsl_val.set(value.key, value.value, value.coded_value)
c[name] = mrsl_val
else:
c[name] = value # type: ignore
c[name] = value # type: ignore[assignment]

self.headers[hdrs.COOKIE] = c.output(header="", sep=";").strip()

Expand Down Expand Up @@ -519,10 +519,12 @@ async def write_bytes(
await self.body.write(writer)
else:
if isinstance(self.body, (bytes, bytearray)):
self.body = (self.body,) # type: ignore
body = (self.body,)
else:
body = self.body

for chunk in self.body:
await writer.write(chunk) # type: ignore
for chunk in body:
await writer.write(chunk)

await writer.write_eof()
except OSError as exc:
Expand Down Expand Up @@ -806,7 +808,7 @@ def links(self) -> "MultiDictProxy[MultiDictProxy[Union[str, URL]]]":

link.add(key, value)

key = link.get("rel", url) # type: ignore
key = link.get("rel", url) # type: ignore[assignment]

link.add("url", self.url.join(URL(url)))

Expand All @@ -818,13 +820,14 @@ async def start(self, connection: "Connection") -> "ClientResponse":
"""Start response processing."""
self._closed = False
self._protocol = connection.protocol
assert self._protocol is not None
self._connection = connection

with self._timer:
while True:
# read response
try:
message, payload = await self._protocol.read() # type: ignore
message, payload = await self._protocol.read()
except http.HttpProcessingError as exc:
raise ClientResponseError(
self.request_info,
Expand Down Expand Up @@ -1007,11 +1010,12 @@ async def text(self, encoding: Optional[str] = None, errors: str = "strict") ->
"""Read response payload and decode."""
if self._body is None:
await self.read()
assert self._body is not None

if encoding is None:
encoding = self.get_encoding()

return self._body.decode(encoding, errors=errors) # type: ignore
return self._body.decode(encoding, errors=errors)

async def json(
self,
Expand All @@ -1023,6 +1027,7 @@ async def json(
"""Read and decodes JSON response."""
if self._body is None:
await self.read()
assert self._body is not None

if content_type:
ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
Expand All @@ -1039,7 +1044,7 @@ async def json(
if encoding is None:
encoding = self.get_encoding()

return loads(self._body.decode(encoding)) # type: ignore
return loads(self._body.decode(encoding))

async def __aenter__(self) -> "ClientResponse":
return self
Expand Down
21 changes: 14 additions & 7 deletions aiohttp/connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,8 @@

SSLContext = ssl.SSLContext
except ImportError: # pragma: no cover
ssl = None # type: ignore
SSLContext = object # type: ignore
ssl = None # type: ignore[assignment]
SSLContext = object # type: ignore[assignment,misc]


__all__ = ("BaseConnector", "TCPConnector", "UnixConnector", "NamedPipeConnector")
Expand Down Expand Up @@ -218,7 +218,7 @@ def __init__(
self._force_close = force_close

# {host_key: FIFO list of waiters}
self._waiters = defaultdict(deque) # type: ignore
self._waiters = defaultdict(deque) # type: ignore[var-annotated]

self._loop = loop
self._factory = functools.partial(ResponseHandler, loop=loop)
Expand Down Expand Up @@ -943,7 +943,12 @@ async def _wrap_create_connection(
) -> Tuple[asyncio.Transport, ResponseHandler]:
try:
async with ceil_timeout(timeout.sock_connect):
return await self._loop.create_connection(*args, **kwargs) # type: ignore # noqa
transport, handler = await self._loop.create_connection(*args, **kwargs)
return (
cast(asyncio.Transport, transport),
cast(ResponseHandler, handler),
)

except cert_errors as exc:
raise ClientConnectorCertificateError(req.connection_key, exc) from exc
except ssl_errors as exc:
Expand Down Expand Up @@ -1031,7 +1036,7 @@ async def _create_proxy_connection(
) -> Tuple[asyncio.Transport, ResponseHandler]:
headers = {} # type: Dict[str, str]
if req.proxy_headers is not None:
headers = req.proxy_headers # type: ignore
headers = req.proxy_headers # type: ignore[assignment]
headers[hdrs.HOST] = req.headers[hdrs.HOST]

url = req.proxy
Expand Down Expand Up @@ -1202,7 +1207,9 @@ def __init__(
limit=limit,
limit_per_host=limit_per_host,
)
if not isinstance(self._loop, asyncio.ProactorEventLoop): # type: ignore
if not isinstance(
self._loop, asyncio.ProactorEventLoop # type: ignore[attr-defined]
):
raise RuntimeError(
"Named Pipes only available in proactor " "loop under windows"
)
Expand All @@ -1218,7 +1225,7 @@ async def _create_connection(
) -> ResponseHandler:
try:
async with ceil_timeout(timeout.sock_connect):
_, proto = await self._loop.create_pipe_connection( # type: ignore
_, proto = await self._loop.create_pipe_connection( # type: ignore[attr-defined] # noqa: E501
self._factory, self._path
)
# the drain is required so that the connection_made is called
Expand Down
2 changes: 1 addition & 1 deletion aiohttp/cookiejar.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> No
for name, cookie in cookies:
if not isinstance(cookie, Morsel):
tmp = SimpleCookie() # type: SimpleCookie[str]
tmp[name] = cookie # type: ignore
tmp[name] = cookie # type: ignore[assignment]
cookie = tmp[name]

domain = cookie["domain"]
Expand Down
4 changes: 2 additions & 2 deletions aiohttp/formdata.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,14 +92,14 @@ def add_fields(self, *fields: Any) -> None:

if isinstance(rec, io.IOBase):
k = guess_filename(rec, "unknown")
self.add_field(k, rec) # type: ignore
self.add_field(k, rec)

elif isinstance(rec, (MultiDictProxy, MultiDict)):
to_add.extend(rec.items())

elif isinstance(rec, (list, tuple)) and len(rec) == 2:
k, fp = rec
self.add_field(k, fp) # type: ignore
self.add_field(k, fp)

else:
raise TypeError(
Expand Down
29 changes: 22 additions & 7 deletions aiohttp/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
TypeVar,
Union,
cast,
overload,
)
from urllib.parse import quote
from urllib.request import getproxies
Expand Down Expand Up @@ -325,6 +326,16 @@ def parse_mimetype(mimetype: str) -> MimeType:
)


@overload
def guess_filename(obj: Any, default: str) -> str:
...


@overload
def guess_filename(obj: Any) -> Optional[str]:
...


def guess_filename(obj: Any, default: Optional[str] = None) -> Optional[str]:
name = getattr(obj, "name", None)
if name and isinstance(name, str) and name[0] != "<" and name[-1] != ">":
Expand Down Expand Up @@ -451,7 +462,7 @@ def __set__(self, inst: _TSelf[_T], value: _T) -> None:
from ._helpers import reify as reify_c

if not NO_EXTENSIONS:
reify = reify_c # type: ignore
reify = reify_c # type: ignore[assignment,misc]
except ImportError:
pass

Expand Down Expand Up @@ -719,23 +730,27 @@ def _parse_content_type(self, raw: str) -> None:
@property
def content_type(self) -> str:
"""The value of content part for Content-Type HTTP header."""
raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore
raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore[attr-defined]
if self._stored_content_type != raw:
self._parse_content_type(raw)
return self._content_type # type: ignore
return cast(str, self._content_type)

@property
def charset(self) -> Optional[str]:
"""The value of charset part for Content-Type HTTP header."""
raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore
raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore[attr-defined]
if self._stored_content_type != raw:
self._parse_content_type(raw)
return self._content_dict.get("charset") # type: ignore

assert self._content_dict is not None
return self._content_dict.get("charset")

@property
def content_length(self) -> Optional[int]:
"""The value of Content-Length HTTP header."""
content_length = self._headers.get(hdrs.CONTENT_LENGTH) # type: ignore
content_length = self._headers.get( # type: ignore[attr-defined]
hdrs.CONTENT_LENGTH
)

if content_length is not None:
return int(content_length)
Expand Down Expand Up @@ -779,7 +794,7 @@ def get(self, key: str, default: Any = None) -> Any:

def __len__(self) -> int:
# reuses stored hash values if possible
return len(set().union(*self._maps)) # type: ignore
return len(set().union(*self._maps)) # type: ignore[arg-type]

def __iter__(self) -> Iterator[str]:
d = {} # type: Dict[str, Any]
Expand Down
2 changes: 1 addition & 1 deletion aiohttp/http_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -916,7 +916,7 @@ def end_http_chunk_receiving(self) -> None:

try:
if not NO_EXTENSIONS:
from ._http_parser import ( # type: ignore
from ._http_parser import ( # type: ignore[import,no-redef]
HttpRequestParser,
HttpResponseParser,
RawRequestMessage,
Expand Down
2 changes: 1 addition & 1 deletion aiohttp/http_websocket.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ def _websocket_mask_python(mask: bytes, data: bytearray) -> None:
_websocket_mask = _websocket_mask_python
else:
try:
from ._websocket import _websocket_mask_cython # type: ignore
from ._websocket import _websocket_mask_cython # type: ignore[import]

_websocket_mask = _websocket_mask_cython
except ImportError: # pragma: no cover
Expand Down
Loading

0 comments on commit d559a39

Please sign in to comment.