Skip to content

Commit c7481a5

Browse files
committed
Add tests
1 parent ed1237a commit c7481a5

File tree

3 files changed

+15
-1
lines changed

3 files changed

+15
-1
lines changed

Lib/test/inspect_fodder.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -113,3 +113,8 @@ async def asyncf(self):
113113
# after asyncf - line 113
114114
# end of WhichComments - line 114
115115
# after WhichComments - line 115
116+
117+
# Test that getsource works on a line that includes
118+
# a closing parenthesis with the opening paren being in another line
119+
(
120+
); after_closing = lambda: 1

Lib/test/test_inspect.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -557,7 +557,8 @@ def test_getclasses(self):
557557

558558
def test_getfunctions(self):
559559
functions = inspect.getmembers(mod, inspect.isfunction)
560-
self.assertEqual(functions, [('eggs', mod.eggs),
560+
self.assertEqual(functions, [('after_closing', mod.after_closing),
561+
('eggs', mod.eggs),
561562
('lobbest', mod.lobbest),
562563
('spam', mod.spam)])
563564

@@ -641,6 +642,7 @@ def test_getsource(self):
641642
self.assertSourceEqual(git.abuse, 29, 39)
642643
self.assertSourceEqual(mod.StupidGit, 21, 51)
643644
self.assertSourceEqual(mod.lobbest, 75, 76)
645+
self.assertSourceEqual(mod.after_closing, 120, 120)
644646

645647
def test_getsourcefile(self):
646648
self.assertEqual(normcase(inspect.getsourcefile(mod.spam)), modfile)

Lib/test/test_tokenize.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1100,6 +1100,13 @@ def test_newline_after_parenthesized_block_with_comment(self):
11001100
NEWLINE '\\n' (4, 1) (4, 2)
11011101
""")
11021102

1103+
def test_closing_parenthesis_from_different_line(self):
1104+
self.check_tokenize("); x", """\
1105+
OP ')' (1, 0) (1, 1)
1106+
OP ';' (1, 1) (1, 2)
1107+
NAME 'x' (1, 3) (1, 4)
1108+
""")
1109+
11031110
class GenerateTokensTest(TokenizeTest):
11041111
def check_tokenize(self, s, expected):
11051112
# Format the tokens in s in a table format.

0 commit comments

Comments
 (0)