158
158
else : # pragma: <3.12 cover
159
159
FSTRING_START = FSTRING_MIDDLE = FSTRING_END = - 1
160
160
161
+ if sys .version_info >= (3 , 14 ): # pragma: >=3.14 cover
162
+ TSTRING_START = tokenize .TSTRING_START
163
+ TSTRING_MIDDLE = tokenize .TSTRING_MIDDLE
164
+ TSTRING_END = tokenize .TSTRING_END
165
+ else : # pragma: <3.14 cover
166
+ TSTRING_START = TSTRING_MIDDLE = TSTRING_END = - 1
167
+
161
168
_checks = {'physical_line' : {}, 'logical_line' : {}, 'tree' : {}}
162
169
163
170
@@ -697,7 +704,12 @@ def continued_indentation(logical_line, tokens, indent_level, hang_closing,
697
704
if verbose >= 4 :
698
705
print (f"bracket depth { depth } indent to { start [1 ]} " )
699
706
# deal with implicit string concatenation
700
- elif token_type in (tokenize .STRING , tokenize .COMMENT , FSTRING_START ):
707
+ elif token_type in {
708
+ tokenize .STRING ,
709
+ tokenize .COMMENT ,
710
+ FSTRING_START ,
711
+ TSTRING_START
712
+ }:
701
713
indent_chances [start [1 ]] = str
702
714
# visual indent after assert/raise/with
703
715
elif not row and not depth and text in ["assert" , "raise" , "with" ]:
@@ -873,13 +885,17 @@ def missing_whitespace(logical_line, tokens):
873
885
brace_stack .append (text )
874
886
elif token_type == FSTRING_START : # pragma: >=3.12 cover
875
887
brace_stack .append ('f' )
888
+ elif token_type == TSTRING_START : # pragma: >=3.14 cover
889
+ brace_stack .append ('t' )
876
890
elif token_type == tokenize .NAME and text == 'lambda' :
877
891
brace_stack .append ('l' )
878
892
elif brace_stack :
879
893
if token_type == tokenize .OP and text in {']' , ')' , '}' }:
880
894
brace_stack .pop ()
881
895
elif token_type == FSTRING_END : # pragma: >=3.12 cover
882
896
brace_stack .pop ()
897
+ elif token_type == TSTRING_END : # pragma: >=3.14 cover
898
+ brace_stack .pop ()
883
899
elif (
884
900
brace_stack [- 1 ] == 'l' and
885
901
token_type == tokenize .OP and
@@ -899,6 +915,9 @@ def missing_whitespace(logical_line, tokens):
899
915
# 3.12+ fstring format specifier
900
916
elif text == ':' and brace_stack [- 2 :] == ['f' , '{' ]: # pragma: >=3.12 cover # noqa: E501
901
917
pass
918
+ # 3.14+ tstring format specifier
919
+ elif text == ':' and brace_stack [- 2 :] == ['t' , '{' ]: # pragma: >=3.14 cover # noqa: E501
920
+ pass
902
921
# tuple (and list for some reason?)
903
922
elif text == ',' and next_char in ')]' :
904
923
pass
@@ -948,7 +967,9 @@ def missing_whitespace(logical_line, tokens):
948
967
# allow keyword args or defaults: foo(bar=None).
949
968
brace_stack [- 1 :] == ['(' ] or
950
969
# allow python 3.8 fstring repr specifier
951
- brace_stack [- 2 :] == ['f' , '{' ]
970
+ brace_stack [- 2 :] == ['f' , '{' ] or
971
+ # allow python 3.8 fstring repr specifier
972
+ brace_stack [- 2 :] == ['t' , '{' ]
952
973
)
953
974
):
954
975
pass
@@ -1639,11 +1660,11 @@ def python_3000_invalid_escape_sequence(logical_line, tokens, noqa):
1639
1660
1640
1661
prefixes = []
1641
1662
for token_type , text , start , _ , _ in tokens :
1642
- if token_type in {tokenize .STRING , FSTRING_START }:
1663
+ if token_type in {tokenize .STRING , FSTRING_START , TSTRING_START }:
1643
1664
# Extract string modifiers (e.g. u or r)
1644
1665
prefixes .append (text [:text .index (text [- 1 ])].lower ())
1645
1666
1646
- if token_type in {tokenize .STRING , FSTRING_MIDDLE }:
1667
+ if token_type in {tokenize .STRING , FSTRING_MIDDLE , TSTRING_MIDDLE }:
1647
1668
if 'r' not in prefixes [- 1 ]:
1648
1669
start_line , start_col = start
1649
1670
pos = text .find ('\\ ' )
@@ -1661,7 +1682,7 @@ def python_3000_invalid_escape_sequence(logical_line, tokens, noqa):
1661
1682
)
1662
1683
pos = text .find ('\\ ' , pos + 1 )
1663
1684
1664
- if token_type in {tokenize .STRING , FSTRING_END }:
1685
+ if token_type in {tokenize .STRING , FSTRING_END , TSTRING_END }:
1665
1686
prefixes .pop ()
1666
1687
1667
1688
@@ -1859,7 +1880,7 @@ def __init__(self, filename=None, lines=None,
1859
1880
self .max_line_length = options .max_line_length
1860
1881
self .max_doc_length = options .max_doc_length
1861
1882
self .indent_size = options .indent_size
1862
- self .fstring_start = 0
1883
+ self .fstring_start = self . tstring_start = 0
1863
1884
self .multiline = False # in a multiline string?
1864
1885
self .hang_closing = options .hang_closing
1865
1886
self .indent_size = options .indent_size
@@ -1954,7 +1975,7 @@ def build_tokens_line(self):
1954
1975
continue
1955
1976
if token_type == tokenize .STRING :
1956
1977
text = mute_string (text )
1957
- elif token_type == FSTRING_MIDDLE : # pragma: >=3.12 cover
1978
+ elif token_type in { FSTRING_MIDDLE , TSTRING_MIDDLE } : # pragma: >=3.12 cover # noqa: E501
1958
1979
# fstring tokens are "unescaped" braces -- re-escape!
1959
1980
brace_count = text .count ('{' ) + text .count ('}' )
1960
1981
text = 'x' * (len (text ) + brace_count )
@@ -2046,6 +2067,8 @@ def maybe_check_physical(self, token, prev_physical):
2046
2067
2047
2068
if token .type == FSTRING_START : # pragma: >=3.12 cover
2048
2069
self .fstring_start = token .start [0 ]
2070
+ elif token .type == TSTRING_START : # pragma: >=3.14 cover
2071
+ self .tstring_start = token .start [0 ]
2049
2072
# a newline token ends a single physical line.
2050
2073
elif _is_eol_token (token ):
2051
2074
# if the file does not end with a newline, the NEWLINE
@@ -2057,7 +2080,8 @@ def maybe_check_physical(self, token, prev_physical):
2057
2080
self .check_physical (token .line )
2058
2081
elif (
2059
2082
token .type == tokenize .STRING and '\n ' in token .string or
2060
- token .type == FSTRING_END
2083
+ token .type == FSTRING_END or
2084
+ token .type == TSTRING_END
2061
2085
):
2062
2086
# Less obviously, a string that contains newlines is a
2063
2087
# multiline string, either triple-quoted or with internal
@@ -2078,6 +2102,8 @@ def maybe_check_physical(self, token, prev_physical):
2078
2102
return
2079
2103
if token .type == FSTRING_END : # pragma: >=3.12 cover
2080
2104
start = self .fstring_start
2105
+ elif token .type == TSTRING_END : # pragma: >=3.12 cover
2106
+ start = self .tstring_start
2081
2107
else :
2082
2108
start = token .start [0 ]
2083
2109
end = token .end [0 ]
0 commit comments