Skip to content

Commit

Permalink
Specify mypy errors for ignore only
Browse files Browse the repository at this point in the history
  • Loading branch information
derlih committed Jan 20, 2021
1 parent e465642 commit eaee5aa
Show file tree
Hide file tree
Showing 30 changed files with 141 additions and 119 deletions.
10 changes: 5 additions & 5 deletions aiohttp/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@
try:
from ssl import SSLContext
except ImportError: # pragma: no cover
SSLContext = object # type: ignore
SSLContext = object # type: ignore[misc,assignment]


@dataclasses.dataclass(frozen=True)
Expand Down Expand Up @@ -248,7 +248,7 @@ def __init__(
if timeout is sentinel:
self._timeout = DEFAULT_TIMEOUT
else:
self._timeout = timeout # type: ignore
self._timeout = timeout # type: ignore[assignment]
self._raise_for_status = raise_for_status
self._auto_decompress = auto_decompress
self._trust_env = trust_env
Expand Down Expand Up @@ -381,7 +381,7 @@ async def _request(
real_timeout = self._timeout # type: ClientTimeout
else:
if not isinstance(timeout, ClientTimeout):
real_timeout = ClientTimeout(total=timeout) # type: ignore
real_timeout = ClientTimeout(total=timeout) # type: ignore[arg-type]
else:
real_timeout = timeout
# timeout is cumulative for all request operations
Expand Down Expand Up @@ -690,7 +690,7 @@ async def _ws_connect(
DeprecationWarning,
stacklevel=2,
)
ws_timeout = ClientWSTimeout(ws_close=timeout) # type: ignore
ws_timeout = ClientWSTimeout(ws_close=timeout) # type: ignore[arg-type]
else:
ws_timeout = DEFAULT_WS_CLIENT_TIMEOUT
if receive_timeout is not None:
Expand Down Expand Up @@ -1053,7 +1053,7 @@ def __init__(self, coro: Coroutine["asyncio.Future[Any]", None, _RetType]) -> No
def send(self, arg: None) -> "asyncio.Future[Any]":
return self._coro.send(arg)

def throw(self, arg: BaseException) -> None: # type: ignore
def throw(self, arg: BaseException) -> None: # type: ignore[arg-type,override]
self._coro.throw(arg)

def close(self) -> None:
Expand Down
6 changes: 3 additions & 3 deletions aiohttp/client_exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

SSLContext = ssl.SSLContext
except ImportError: # pragma: no cover
ssl = SSLContext = None # type: ignore
ssl = SSLContext = None # type: ignore[assignment]


if TYPE_CHECKING: # pragma: no cover
Expand Down Expand Up @@ -270,11 +270,11 @@ class ClientSSLError(ClientConnectorError):
ssl_error_bases = (ClientSSLError,)


class ClientConnectorSSLError(*ssl_error_bases): # type: ignore
class ClientConnectorSSLError(*ssl_error_bases): # type: ignore[misc]
"""Response ssl error."""


class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore
class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore[misc]
"""Response certificate error."""

def __init__(
Expand Down
29 changes: 15 additions & 14 deletions aiohttp/client_reqrep.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,13 +64,13 @@
import ssl
from ssl import SSLContext
except ImportError: # pragma: no cover
ssl = None # type: ignore
SSLContext = object # type: ignore
ssl = None # type: ignore[assignment]
SSLContext = object # type: ignore[misc,assignment]

try:
import cchardet as chardet
except ImportError: # pragma: no cover
import chardet # type: ignore
import chardet # type: ignore[no-redef]


__all__ = ("ClientRequest", "ClientResponse", "RequestInfo", "Fingerprint")
Expand Down Expand Up @@ -333,9 +333,9 @@ def update_headers(self, headers: Optional[LooseHeaders]) -> None:

if headers:
if isinstance(headers, (dict, MultiDictProxy, MultiDict)):
headers = headers.items() # type: ignore
headers = headers.items() # type: ignore[assignment]

for key, value in headers: # type: ignore
for key, value in headers: # type: ignore[misc]
# A special case for Host header
if key.lower() == "host":
self.headers[key] = value
Expand All @@ -347,7 +347,7 @@ def update_auto_headers(self, skip_auto_headers: Iterable[str]) -> None:
(hdr, None) for hdr in sorted(skip_auto_headers)
)
used_headers = self.headers.copy()
used_headers.extend(self.skip_auto_headers) # type: ignore
used_headers.extend(self.skip_auto_headers) # type: ignore[arg-type]

for hdr, val in self.DEFAULT_HEADERS.items():
if hdr not in used_headers:
Expand All @@ -369,15 +369,15 @@ def update_cookies(self, cookies: Optional[LooseCookies]) -> None:
if isinstance(cookies, Mapping):
iter_cookies = cookies.items()
else:
iter_cookies = cookies # type: ignore
iter_cookies = cookies # type: ignore[assignment]
for name, value in iter_cookies:
if isinstance(value, Morsel):
# Preserve coded_value
mrsl_val = value.get(value.key, Morsel())
mrsl_val.set(value.key, value.value, value.coded_value)
c[name] = mrsl_val
else:
c[name] = value # type: ignore
c[name] = value # type: ignore[assignment]

self.headers[hdrs.COOKIE] = c.output(header="", sep=";").strip()

Expand Down Expand Up @@ -519,10 +519,10 @@ async def write_bytes(
await self.body.write(writer)
else:
if isinstance(self.body, (bytes, bytearray)):
self.body = (self.body,) # type: ignore
self.body = (self.body,) # type: ignore[assignment]

for chunk in self.body:
await writer.write(chunk) # type: ignore
await writer.write(chunk) # type: ignore[arg-type]

await writer.write_eof()
except OSError as exc:
Expand Down Expand Up @@ -806,7 +806,7 @@ def links(self) -> "MultiDictProxy[MultiDictProxy[Union[str, URL]]]":

link.add(key, value)

key = link.get("rel", url) # type: ignore
key = link.get("rel", url) # type: ignore[assignment]

link.add("url", self.url.join(URL(url)))

Expand All @@ -824,7 +824,8 @@ async def start(self, connection: "Connection") -> "ClientResponse":
while True:
# read response
try:
message, payload = await self._protocol.read() # type: ignore
protocol = self._protocol
message, payload = await protocol.read() # type: ignore[union-attr]
except http.HttpProcessingError as exc:
raise ClientResponseError(
self.request_info,
Expand Down Expand Up @@ -1011,7 +1012,7 @@ async def text(self, encoding: Optional[str] = None, errors: str = "strict") ->
if encoding is None:
encoding = self.get_encoding()

return self._body.decode(encoding, errors=errors) # type: ignore
return self._body.decode(encoding, errors=errors) # type: ignore[union-attr]

async def json(
self,
Expand Down Expand Up @@ -1039,7 +1040,7 @@ async def json(
if encoding is None:
encoding = self.get_encoding()

return loads(self._body.decode(encoding)) # type: ignore
return loads(self._body.decode(encoding)) # type: ignore[union-attr]

async def __aenter__(self) -> "ClientResponse":
return self
Expand Down
16 changes: 9 additions & 7 deletions aiohttp/connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,8 @@

SSLContext = ssl.SSLContext
except ImportError: # pragma: no cover
ssl = None # type: ignore
SSLContext = object # type: ignore
ssl = None # type: ignore[assignment]
SSLContext = object # type: ignore[misc,assignment]


__all__ = ("BaseConnector", "TCPConnector", "UnixConnector", "NamedPipeConnector")
Expand Down Expand Up @@ -218,7 +218,7 @@ def __init__(
self._force_close = force_close

# {host_key: FIFO list of waiters}
self._waiters = defaultdict(deque) # type: ignore
self._waiters = defaultdict(deque) # type: ignore[var-annotated]

self._loop = loop
self._factory = functools.partial(ResponseHandler, loop=loop)
Expand Down Expand Up @@ -943,7 +943,7 @@ async def _wrap_create_connection(
) -> Tuple[asyncio.Transport, ResponseHandler]:
try:
async with ceil_timeout(timeout.sock_connect):
return await self._loop.create_connection(*args, **kwargs) # type: ignore # noqa
return await self._loop.create_connection(*args, **kwargs) # type: ignore[return-value] # noqa
except cert_errors as exc:
raise ClientConnectorCertificateError(req.connection_key, exc) from exc
except ssl_errors as exc:
Expand Down Expand Up @@ -1031,7 +1031,7 @@ async def _create_proxy_connection(
) -> Tuple[asyncio.Transport, ResponseHandler]:
headers = {} # type: Dict[str, str]
if req.proxy_headers is not None:
headers = req.proxy_headers # type: ignore
headers = req.proxy_headers # type: ignore[assignment]
headers[hdrs.HOST] = req.headers[hdrs.HOST]

url = req.proxy
Expand Down Expand Up @@ -1202,7 +1202,9 @@ def __init__(
limit=limit,
limit_per_host=limit_per_host,
)
if not isinstance(self._loop, asyncio.ProactorEventLoop): # type: ignore
if not isinstance(
self._loop, asyncio.ProactorEventLoop # type: ignore[attr-defined]
):
raise RuntimeError(
"Named Pipes only available in proactor " "loop under windows"
)
Expand All @@ -1218,7 +1220,7 @@ async def _create_connection(
) -> ResponseHandler:
try:
async with ceil_timeout(timeout.sock_connect):
_, proto = await self._loop.create_pipe_connection( # type: ignore
_, proto = await self._loop.create_pipe_connection( # type: ignore[attr-defined] # noqa: E501
self._factory, self._path
)
# the drain is required so that the connection_made is called
Expand Down
2 changes: 1 addition & 1 deletion aiohttp/cookiejar.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> No
for name, cookie in cookies:
if not isinstance(cookie, Morsel):
tmp = SimpleCookie() # type: SimpleCookie[str]
tmp[name] = cookie # type: ignore
tmp[name] = cookie # type: ignore[assignment]
cookie = tmp[name]

domain = cookie["domain"]
Expand Down
4 changes: 2 additions & 2 deletions aiohttp/formdata.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,14 +92,14 @@ def add_fields(self, *fields: Any) -> None:

if isinstance(rec, io.IOBase):
k = guess_filename(rec, "unknown")
self.add_field(k, rec) # type: ignore
self.add_field(k, rec) # type: ignore[arg-type]

elif isinstance(rec, (MultiDictProxy, MultiDict)):
to_add.extend(rec.items())

elif isinstance(rec, (list, tuple)) and len(rec) == 2:
k, fp = rec
self.add_field(k, fp) # type: ignore
self.add_field(k, fp) # type: ignore[arg-type]

else:
raise TypeError(
Expand Down
16 changes: 9 additions & 7 deletions aiohttp/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -451,7 +451,7 @@ def __set__(self, inst: _TSelf[_T], value: _T) -> None:
from ._helpers import reify as reify_c

if not NO_EXTENSIONS:
reify = reify_c # type: ignore
reify = reify_c # type: ignore[misc,assignment]
except ImportError:
pass

Expand Down Expand Up @@ -719,23 +719,25 @@ def _parse_content_type(self, raw: str) -> None:
@property
def content_type(self) -> str:
"""The value of content part for Content-Type HTTP header."""
raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore
raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore[attr-defined]
if self._stored_content_type != raw:
self._parse_content_type(raw)
return self._content_type # type: ignore
return self._content_type # type: ignore[return-value]

@property
def charset(self) -> Optional[str]:
"""The value of charset part for Content-Type HTTP header."""
raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore
raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore[attr-defined]
if self._stored_content_type != raw:
self._parse_content_type(raw)
return self._content_dict.get("charset") # type: ignore
return self._content_dict.get("charset") # type: ignore[union-attr]

@property
def content_length(self) -> Optional[int]:
"""The value of Content-Length HTTP header."""
content_length = self._headers.get(hdrs.CONTENT_LENGTH) # type: ignore
content_length = self._headers.get( # type: ignore[attr-defined]
hdrs.CONTENT_LENGTH
)

if content_length is not None:
return int(content_length)
Expand Down Expand Up @@ -779,7 +781,7 @@ def get(self, key: str, default: Any = None) -> Any:

def __len__(self) -> int:
# reuses stored hash values if possible
return len(set().union(*self._maps)) # type: ignore
return len(set().union(*self._maps)) # type: ignore[arg-type]

def __iter__(self) -> Iterator[str]:
d = {} # type: Dict[str, Any]
Expand Down
2 changes: 1 addition & 1 deletion aiohttp/http_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -916,7 +916,7 @@ def end_http_chunk_receiving(self) -> None:

try:
if not NO_EXTENSIONS:
from ._http_parser import ( # type: ignore
from ._http_parser import ( # type: ignore[import,no-redef]
HttpRequestParser,
HttpResponseParser,
RawRequestMessage,
Expand Down
2 changes: 1 addition & 1 deletion aiohttp/http_websocket.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ def _websocket_mask_python(mask: bytes, data: bytearray) -> None:
_websocket_mask = _websocket_mask_python
else:
try:
from ._websocket import _websocket_mask_cython # type: ignore
from ._websocket import _websocket_mask_cython # type: ignore[import]

_websocket_mask = _websocket_mask_cython
except ImportError: # pragma: no cover
Expand Down
2 changes: 1 addition & 1 deletion aiohttp/http_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,7 @@ def _py_serialize_headers(status_line: str, headers: "CIMultiDict[str]") -> byte
_serialize_headers = _py_serialize_headers

try:
import aiohttp._http_writer as _http_writer # type: ignore
import aiohttp._http_writer as _http_writer # type: ignore[import]

_c_serialize_headers = _http_writer._serialize_headers
if not NO_EXTENSIONS:
Expand Down
8 changes: 4 additions & 4 deletions aiohttp/multipart.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,7 +275,7 @@ def __init__(
self._cache = {} # type: Dict[str, Any]

def __aiter__(self) -> AsyncIterator["BodyPartReader"]:
return self # type: ignore
return self # type: ignore[return-value]

async def __anext__(self) -> bytes:
part = await self.next()
Expand Down Expand Up @@ -587,7 +587,7 @@ def __init__(
def __aiter__(
self,
) -> AsyncIterator["BodyPartReader"]:
return self # type: ignore
return self # type: ignore[return-value]

async def __anext__(
self,
Expand Down Expand Up @@ -888,7 +888,7 @@ def append_payload(self, payload: Payload) -> Payload:
if size is not None and not (encoding or te_encoding):
payload.headers[CONTENT_LENGTH] = str(size)

self._parts.append((payload, encoding, te_encoding)) # type: ignore
self._parts.append((payload, encoding, te_encoding)) # type: ignore[arg-type]
return payload

def append_json(
Expand Down Expand Up @@ -953,7 +953,7 @@ async def write(self, writer: Any, close_boundary: bool = True) -> None:
w.enable_compression(encoding)
if te_encoding:
w.enable_encoding(te_encoding)
await part.write(w) # type: ignore
await part.write(w) # type: ignore[arg-type]
await w.write_eof()
else:
await part.write(writer)
Expand Down
Loading

0 comments on commit eaee5aa

Please sign in to comment.