Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/master' into fix1698
Browse files Browse the repository at this point in the history
  • Loading branch information
dmoisset committed Aug 16, 2016
2 parents e53c355 + d4e15f9 commit 6026e66
Show file tree
Hide file tree
Showing 104 changed files with 1,190 additions and 1,272 deletions.
17 changes: 11 additions & 6 deletions docs/source/cheat_sheet.rst
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,7 @@ Built-in types

.. code-block:: python
from typing import List, Set, Dict, Tuple, Optional
import six
from typing import List, Set, Dict, Tuple, Text, Optional
# For simple built-in types, just use the name of the type.
x = 1 # type: int
Expand All @@ -39,9 +38,9 @@ Built-in types
# For tuples, we specify the types of all the elements.
x = (3, "yes", 7.5) # type: Tuple[int, str, float]
# For textual data, we generally use six.text_type.
# For textual data, use Text.
# This is `unicode` in Python 2 and `str` in Python 3.
x = ["string", u"unicode"] # type: List[six.text_type]
x = ["string", u"unicode"] # type: List[Text]
# Use Optional for values that could be None.
input_str = f() # type: Optional[str]
Expand Down Expand Up @@ -177,11 +176,17 @@ Other stuff
.. code-block:: python
# typing.Match describes regex matches from the re module.
from typing import Match
from typing import Match, AnyStr
x = re.match(r'[0-9]+', "15") # type: Match[str]
# Use AnyStr for functions that should accept any kind of string
# without allowing different kinds of strings to mix.
def concat(a: AnyStr, b: AnyStr) -> AnyStr:
return a + b
concat(u"foo", u"bar") # type: unicode
concat(b"foo", b"bar") # type: bytes
# TODO: add typing.IO: e.g., sys.stdout has type IO[str]
# TODO: add TypeVar and a simple generic function
# TODO: add AnyStr (and mention up next to strings)
3 changes: 3 additions & 0 deletions docs/source/duck_type_compatibility.rst
Original file line number Diff line number Diff line change
Expand Up @@ -35,3 +35,6 @@ and also behaves as expected:
silently pass type checking. In Python 3 ``str`` and ``bytes`` are
separate, unrelated types and this kind of error is easy to
detect. This a good reason for preferring Python 3 over Python 2!

See :ref:`text-and-anystr` for details on how to enforce that a
value must be a unicode string in a cross-compatible way.
44 changes: 44 additions & 0 deletions docs/source/kinds_of_types.rst
Original file line number Diff line number Diff line change
Expand Up @@ -657,3 +657,47 @@ Now mypy will infer the correct type of the result when we call

For more details about ``Type[]`` see `PEP 484
<https://www.python.org/dev/peps/pep-0484/#the-type-of-class-objects>`_.

.. _text-and-anystr:

Text and AnyStr
***************

Sometimes you may want to write a function which will accept only unicode
strings. This can be challenging to do in a codebase intended to run in
both Python 2 and Python 3 since ``str`` means something different in both
versions and ``unicode`` is not a keyword in Python 3.

To help solve this issue, use ``typing.Text`` which is aliased to
``unicode`` in Python 2 and to ``str`` in Python 3. This allows you to
indicate that a function should accept only unicode strings in a
cross-compatible way:

.. code-block:: python
from typing import Text
def unicode_only(s: Text) -> Text:
return s + u'\u2713'
In other cases, you may want to write a function that will work with any
kind of string but will not let you mix two different string types. To do
so use ``typing.AnyStr``:

.. code-block:: python
from typing import AnyStr
def concat(x: AnyStr, y: AnyStr) -> AnyStr:
return x + y
concat('a', 'b') # Okay
concat(b'a', b'b') # Okay
concat('a', b'b') # Error: cannot mix bytes and unicode
For more details, see :ref:`type-variable-value-restriction`.

.. note::

How ``bytes``, ``str``, and ``unicode`` are handled between Python 2 and
Python 3 may change in future versions of mypy.
17 changes: 14 additions & 3 deletions mypy/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -1181,7 +1181,7 @@ def wrap_context(self) -> Iterator[None]:
except CompileError:
raise
except Exception as err:
report_internal_error(err, self.path, 0)
report_internal_error(err, self.path, 0, self.manager.errors)
self.manager.errors.set_import_context(save_import_context)
self.check_blockers()

Expand Down Expand Up @@ -1344,6 +1344,7 @@ def load_graph(sources: List[BuildSource], manager: BuildManager) -> Graph:
# TODO: Consider whether to go depth-first instead. This may
# affect the order in which we process files within import cycles.
new = collections.deque() # type: collections.deque[State]
entry_points = set() # type: Set[str]
# Seed the graph with the initial root sources.
for bs in sources:
try:
Expand All @@ -1356,11 +1357,16 @@ def load_graph(sources: List[BuildSource], manager: BuildManager) -> Graph:
manager.errors.raise_error()
graph[st.id] = st
new.append(st)
entry_points.add(bs.module)
# Collect dependencies. We go breadth-first.
while new:
st = new.popleft()
for dep in st.ancestors + st.dependencies:
if dep not in graph:
for dep in st.ancestors + st.dependencies + st.suppressed:
# We don't want to recheck imports marked with '# type: ignore'
# so we ignore any suppressed module not explicitly re-included
# from the command line.
ignored = dep in st.suppressed and dep not in entry_points
if dep not in graph and not ignored:
try:
if dep in st.ancestors:
# TODO: Why not 'if dep not in st.dependencies' ?
Expand All @@ -1380,6 +1386,11 @@ def load_graph(sources: List[BuildSource], manager: BuildManager) -> Graph:
new.append(newst)
if dep in st.ancestors and dep in graph:
graph[dep].child_modules.add(st.id)
if dep in graph and dep in st.suppressed:
# Previously suppressed file is now visible
if dep in st.suppressed:
st.suppressed.remove(dep)
st.dependencies.append(dep)
for id, g in graph.items():
if g.has_new_submodules():
g.parse_file()
Expand Down
16 changes: 10 additions & 6 deletions mypy/checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,7 @@ def accept(self, node: Node, type_context: Type = None) -> Type:
try:
typ = node.accept(self)
except Exception as err:
report_internal_error(err, self.errors.file, node.line)
report_internal_error(err, self.errors.file, node.line, self.errors)
self.type_context.pop()
self.store_type(node, typ)
if self.typing_mode_none():
Expand Down Expand Up @@ -809,9 +809,13 @@ def check_method_override_for_base_with_name(
# Map the overridden method type to subtype context so that
# it can be checked for compatibility.
original_type = base_attr.type
if original_type is None and isinstance(base_attr.node,
FuncDef):
original_type = self.function_type(base_attr.node)
if original_type is None:
if isinstance(base_attr.node, FuncDef):
original_type = self.function_type(base_attr.node)
elif isinstance(base_attr.node, Decorator):
original_type = self.function_type(base_attr.node.func)
else:
assert False, str(base_attr.node)
if isinstance(original_type, FunctionLike):
original = map_type_from_supertype(
method_type(original_type),
Expand All @@ -825,7 +829,6 @@ def check_method_override_for_base_with_name(
base.name(),
defn)
else:
assert original_type is not None
self.msg.signature_incompatible_with_supertype(
defn.name(), name, base.name(), defn)

Expand Down Expand Up @@ -2248,7 +2251,8 @@ def leave_partial_types(self) -> None:
partial_types = self.partial_types.pop()
if not self.current_node_deferred:
for var, context in partial_types.items():
if experiments.STRICT_OPTIONAL and cast(PartialType, var.type).type is None:
if (experiments.STRICT_OPTIONAL and
isinstance(var.type, PartialType) and var.type.type is None):
# None partial type: assume variable is intended to have type None
var.type = NoneTyp()
else:
Expand Down
99 changes: 57 additions & 42 deletions mypy/checkexpr.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,8 +158,10 @@ def try_infer_partial_type(self, e: CallExpr) -> None:
var = cast(Var, e.callee.expr.node)
partial_types = self.chk.find_partial_types(var)
if partial_types is not None and not self.chk.current_node_deferred:
partial_type = cast(PartialType, var.type)
if partial_type is None or partial_type.type is None:
partial_type = var.type
if (partial_type is None or
not isinstance(partial_type, PartialType) or
partial_type.type is None):
# A partial None type -> can't infer anything.
return
typename = partial_type.type.fullname()
Expand Down Expand Up @@ -1538,55 +1540,68 @@ def check_generator_or_comprehension(self, gen: GeneratorExpr,
type_name: str,
id_for_messages: str) -> Type:
"""Type check a generator expression or a list comprehension."""
self.check_for_comp(gen)
with self.chk.binder.frame_context():
self.check_for_comp(gen)

# Infer the type of the list comprehension by using a synthetic generic
# callable type.
tvdef = TypeVarDef('T', -1, [], self.chk.object_type())
tv = TypeVarType(tvdef)
constructor = CallableType(
[tv],
[nodes.ARG_POS],
[None],
self.chk.named_generic_type(type_name, [tv]),
self.chk.named_type('builtins.function'),
name=id_for_messages,
variables=[tvdef])
return self.check_call(constructor,
[gen.left_expr], [nodes.ARG_POS], gen)[0]
# Infer the type of the list comprehension by using a synthetic generic
# callable type.
tvdef = TypeVarDef('T', -1, [], self.chk.object_type())
tv = TypeVarType(tvdef)
constructor = CallableType(
[tv],
[nodes.ARG_POS],
[None],
self.chk.named_generic_type(type_name, [tv]),
self.chk.named_type('builtins.function'),
name=id_for_messages,
variables=[tvdef])
return self.check_call(constructor,
[gen.left_expr], [nodes.ARG_POS], gen)[0]

def visit_dictionary_comprehension(self, e: DictionaryComprehension) -> Type:
"""Type check a dictionary comprehension."""
self.check_for_comp(e)

# Infer the type of the list comprehension by using a synthetic generic
# callable type.
ktdef = TypeVarDef('KT', -1, [], self.chk.object_type())
vtdef = TypeVarDef('VT', -2, [], self.chk.object_type())
kt = TypeVarType(ktdef)
vt = TypeVarType(vtdef)
constructor = CallableType(
[kt, vt],
[nodes.ARG_POS, nodes.ARG_POS],
[None, None],
self.chk.named_generic_type('builtins.dict', [kt, vt]),
self.chk.named_type('builtins.function'),
name='<dictionary-comprehension>',
variables=[ktdef, vtdef])
return self.check_call(constructor,
[e.key, e.value], [nodes.ARG_POS, nodes.ARG_POS], e)[0]
with self.chk.binder.frame_context():
self.check_for_comp(e)

# Infer the type of the list comprehension by using a synthetic generic
# callable type.
ktdef = TypeVarDef('KT', -1, [], self.chk.object_type())
vtdef = TypeVarDef('VT', -2, [], self.chk.object_type())
kt = TypeVarType(ktdef)
vt = TypeVarType(vtdef)
constructor = CallableType(
[kt, vt],
[nodes.ARG_POS, nodes.ARG_POS],
[None, None],
self.chk.named_generic_type('builtins.dict', [kt, vt]),
self.chk.named_type('builtins.function'),
name='<dictionary-comprehension>',
variables=[ktdef, vtdef])
return self.check_call(constructor,
[e.key, e.value], [nodes.ARG_POS, nodes.ARG_POS], e)[0]

def check_for_comp(self, e: Union[GeneratorExpr, DictionaryComprehension]) -> None:
"""Check the for_comp part of comprehensions. That is the part from 'for':
... for x in y if z
Note: This adds the type information derived from the condlists to the current binder.
"""
with self.chk.binder.frame_context():
for index, sequence, conditions in zip(e.indices, e.sequences,
e.condlists):
sequence_type = self.chk.analyze_iterable_item_type(sequence)
self.chk.analyze_index_variables(index, sequence_type, e)
for condition in conditions:
self.accept(condition)
for index, sequence, conditions in zip(e.indices, e.sequences,
e.condlists):
sequence_type = self.chk.analyze_iterable_item_type(sequence)
self.chk.analyze_index_variables(index, sequence_type, e)
for condition in conditions:
self.accept(condition)

# values are only part of the comprehension when all conditions are true
true_map, _ = mypy.checker.find_isinstance_check(
condition, self.chk.type_map,
self.chk.typing_mode_weak()
)

if true_map:
for var, type in true_map.items():
self.chk.binder.push(var, type)

def visit_conditional_expr(self, e: ConditionalExpr) -> Type:
cond_type = self.accept(e.cond)
Expand Down
Loading

0 comments on commit 6026e66

Please sign in to comment.