Skip to content

Commit 2c2c980

Browse files
committed
gh-110259: Fix f-strings with multiline expressions and format specs
Signed-off-by: Pablo Galindo <pablogsal@gmail.com>
1 parent 8c07137 commit 2c2c980

File tree

5 files changed

+90
-9
lines changed

5 files changed

+90
-9
lines changed

Lib/ast.py

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1270,13 +1270,15 @@ def visit_JoinedStr(self, node):
12701270
quote_type = quote_types[0]
12711271
self.write(f"{quote_type}{value}{quote_type}")
12721272

1273-
def _write_fstring_inner(self, node):
1273+
def _write_fstring_inner(self, node, scape_newlines=False):
12741274
if isinstance(node, JoinedStr):
12751275
# for both the f-string itself, and format_spec
12761276
for value in node.values:
1277-
self._write_fstring_inner(value)
1277+
self._write_fstring_inner(value, scape_newlines=scape_newlines)
12781278
elif isinstance(node, Constant) and isinstance(node.value, str):
12791279
value = node.value.replace("{", "{{").replace("}", "}}")
1280+
if scape_newlines:
1281+
value = value.replace("\n", "\\n")
12801282
self.write(value)
12811283
elif isinstance(node, FormattedValue):
12821284
self.visit_FormattedValue(node)
@@ -1299,7 +1301,10 @@ def unparse_inner(inner):
12991301
self.write(f"!{chr(node.conversion)}")
13001302
if node.format_spec:
13011303
self.write(":")
1302-
self._write_fstring_inner(node.format_spec)
1304+
self._write_fstring_inner(
1305+
node.format_spec,
1306+
scape_newlines=True
1307+
)
13031308

13041309
def visit_Name(self, node):
13051310
self.write(node.id)

Lib/test/test_tokenize.py

Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -566,6 +566,38 @@ def test_string(self):
566566
OP '=' (3, 0) (3, 1)
567567
OP '}' (3, 1) (3, 2)
568568
FSTRING_END "'''" (3, 2) (3, 5)
569+
""")
570+
self.check_tokenize("""\
571+
f'''__{
572+
x:a
573+
}__'''""", """\
574+
FSTRING_START "f'''" (1, 0) (1, 4)
575+
FSTRING_MIDDLE '__' (1, 4) (1, 6)
576+
OP '{' (1, 6) (1, 7)
577+
NL '\\n' (1, 7) (1, 8)
578+
NAME 'x' (2, 4) (2, 5)
579+
OP ':' (2, 5) (2, 6)
580+
FSTRING_MIDDLE 'a' (2, 6) (2, 7)
581+
NL '\\n' (2, 7) (2, 8)
582+
OP '}' (3, 0) (3, 1)
583+
FSTRING_MIDDLE '__' (3, 1) (3, 3)
584+
FSTRING_END "'''" (3, 3) (3, 6)
585+
""")
586+
self.check_tokenize("""\
587+
f'__{
588+
x:d
589+
}__'""", """\
590+
FSTRING_START "f'" (1, 0) (1, 2)
591+
FSTRING_MIDDLE '__' (1, 2) (1, 4)
592+
OP '{' (1, 4) (1, 5)
593+
NL '\\n' (1, 5) (1, 6)
594+
NAME 'x' (2, 4) (2, 5)
595+
OP ':' (2, 5) (2, 6)
596+
FSTRING_MIDDLE 'd' (2, 6) (2, 7)
597+
NL '\\n' (2, 7) (2, 8)
598+
OP '}' (3, 0) (3, 1)
599+
FSTRING_MIDDLE '__' (3, 1) (3, 3)
600+
FSTRING_END "'" (3, 3) (3, 4)
569601
""")
570602

571603
def test_function(self):
@@ -2277,6 +2309,35 @@ def test_string(self):
22772309
FSTRING_START \'f"\' (1, 0) (1, 2)
22782310
FSTRING_MIDDLE 'hola\\\\\\\\\\\\r\\\\ndfgf' (1, 2) (1, 16)
22792311
FSTRING_END \'"\' (1, 16) (1, 17)
2312+
""")
2313+
2314+
self.check_tokenize("""\
2315+
f'''__{
2316+
x:a
2317+
}__'''""", """\
2318+
FSTRING_START "f'''" (1, 0) (1, 4)
2319+
FSTRING_MIDDLE '__' (1, 4) (1, 6)
2320+
LBRACE '{' (1, 6) (1, 7)
2321+
NAME 'x' (2, 4) (2, 5)
2322+
COLON ':' (2, 5) (2, 6)
2323+
FSTRING_MIDDLE 'a' (2, 6) (2, 7)
2324+
RBRACE '}' (3, 0) (3, 1)
2325+
FSTRING_MIDDLE '__' (3, 1) (3, 3)
2326+
FSTRING_END "'''" (3, 3) (3, 6)
2327+
""")
2328+
self.check_tokenize("""\
2329+
f'__{
2330+
x:d
2331+
}__'""", """\
2332+
FSTRING_START "f'" (1, 0) (1, 2)
2333+
FSTRING_MIDDLE '__' (1, 2) (1, 4)
2334+
LBRACE '{' (1, 4) (1, 5)
2335+
NAME 'x' (2, 4) (2, 5)
2336+
COLON ':' (2, 5) (2, 6)
2337+
FSTRING_MIDDLE 'd' (2, 6) (2, 7)
2338+
RBRACE '}' (3, 0) (3, 1)
2339+
FSTRING_MIDDLE '__' (3, 1) (3, 3)
2340+
FSTRING_END "'" (3, 3) (3, 4)
22802341
""")
22812342

22822343
def test_function(self):

Lib/test/test_unparse.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -730,7 +730,8 @@ class DirectoryTestCase(ASTTestCase):
730730
test_directories = (lib_dir, lib_dir / "test")
731731
run_always_files = {"test_grammar.py", "test_syntax.py", "test_compile.py",
732732
"test_ast.py", "test_asdl_parser.py", "test_fstring.py",
733-
"test_patma.py", "test_type_alias.py", "test_type_params.py"}
733+
"test_patma.py", "test_type_alias.py", "test_type_params.py",
734+
"test_tokenize.py"}
734735

735736
_files_to_test = None
736737

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
Correctly identify the format spec in f-strings (with single or triple
2+
quotes) that have multiple lines in the expression part and include a
3+
formatting spec. Patch by Pablo Galindo

Parser/tokenizer.c

Lines changed: 16 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2690,6 +2690,22 @@ tok_get_fstring_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct
26902690
if (tok->done == E_ERROR) {
26912691
return MAKE_TOKEN(ERRORTOKEN);
26922692
}
2693+
int in_format_spec = (
2694+
current_tok->last_expr_end != -1
2695+
&&
2696+
INSIDE_FSTRING_EXPR(current_tok)
2697+
);
2698+
2699+
// If we are in a format spec and we found a newline,
2700+
// it means that the format spec ends here and we should
2701+
// return to the regular mode.
2702+
if (in_format_spec && c == '\n') {
2703+
tok_backup(tok, c);
2704+
TOK_GET_MODE(tok)->kind = TOK_REGULAR_MODE;
2705+
p_start = tok->start;
2706+
p_end = tok->cur;
2707+
return MAKE_TOKEN(FSTRING_MIDDLE);
2708+
}
26932709
if (c == EOF || (current_tok->f_string_quote_size == 1 && c == '\n')) {
26942710
if (tok->decoding_erred) {
26952711
return MAKE_TOKEN(ERRORTOKEN);
@@ -2726,11 +2742,6 @@ tok_get_fstring_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct
27262742
end_quote_size = 0;
27272743
}
27282744

2729-
int in_format_spec = (
2730-
current_tok->last_expr_end != -1
2731-
&&
2732-
INSIDE_FSTRING_EXPR(current_tok)
2733-
);
27342745
if (c == '{') {
27352746
int peek = tok_nextc(tok);
27362747
if (peek != '{' || in_format_spec) {

0 commit comments

Comments
 (0)