Skip to content

gh-117865: Defer several imports in inspect.py #119526

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 1 commit into
base: main
Choose a base branch
from
Draft
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 10 additions & 6 deletions Lib/inspect.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,25 +142,22 @@


import abc
import ast
import dis
import collections.abc
import enum
import importlib.machinery
import itertools
import linecache
import os
import re
import sys
import tokenize
import token
import types
import functools
import builtins
from keyword import iskeyword
from operator import attrgetter
from collections import namedtuple, OrderedDict
from weakref import ref as make_weakref
from _weakref import ref as make_weakref

# Create constants for the compiler flags in Include/code.h
# We try to get them from dis to avoid duplication
Expand Down Expand Up @@ -1149,6 +1146,8 @@ def __init__(self):
self.body_col0 = None

def tokeneater(self, type, token, srowcol, erowcol, line):
import tokenize

if not self.started and not self.indecorator:
# skip any decorators
if token == "@":
Expand Down Expand Up @@ -1194,6 +1193,8 @@ def tokeneater(self, type, token, srowcol, erowcol, line):
def getblock(lines):
"""Extract the block of code at the top of the given list of lines."""
blockfinder = BlockFinder()
import tokenize

try:
tokens = tokenize.generate_tokens(iter(lines).__next__)
for _token in tokens:
Expand Down Expand Up @@ -1420,6 +1421,7 @@ def formatannotation(annotation, base_module=None):
def repl(match):
text = match.group()
return text.removeprefix('typing.')
import re
return re.sub(r'[\w\.]+', repl, repr(annotation))
if isinstance(annotation, types.GenericAlias):
return str(annotation)
Expand Down Expand Up @@ -2155,6 +2157,8 @@ def _signature_strip_non_python_syntax(signature):

lines = [l.encode('ascii') for l in signature.split('\n') if l]
generator = iter(lines).__next__

import tokenize
token_stream = tokenize.tokenize(generator)

text = []
Expand Down Expand Up @@ -2191,10 +2195,10 @@ def _signature_fromstr(cls, obj, s, skip_bound_arg=True):
"""Private helper to parse content of '__text_signature__'
and return a Signature based on it.
"""
Parameter = cls._parameter_cls
import ast

Parameter = cls._parameter_cls
clean_signature, self_parameter = _signature_strip_non_python_syntax(s)

program = "def foo" + clean_signature + ": pass"

try:
Expand Down
Loading