@@ -147,7 +147,7 @@ def _parse_requirement_marker(
147
147
else :
148
148
tokenizer .read ()
149
149
150
- marker = _parse_marker_expr (tokenizer )
150
+ marker = _parse_marker (tokenizer )
151
151
tokenizer .consume ("WS" )
152
152
153
153
return marker
@@ -234,12 +234,12 @@ def _parse_version_many(tokenizer: Tokenizer) -> str:
234
234
# Recursive descent parser for marker expression
235
235
# --------------------------------------------------------------------------------------
236
236
def parse_marker (source : str ) -> MarkerList :
237
- return _parse_marker_expr (Tokenizer (source , rules = DEFAULT_RULES ))
237
+ return _parse_marker (Tokenizer (source , rules = DEFAULT_RULES ))
238
238
239
239
240
- def _parse_marker_expr (tokenizer : Tokenizer ) -> MarkerList :
240
+ def _parse_marker (tokenizer : Tokenizer ) -> MarkerList :
241
241
"""
242
- marker_expr = marker_atom (BOOLOP marker_atom)+
242
+ marker = marker_atom (BOOLOP marker_atom)+
243
243
"""
244
244
expression = [_parse_marker_atom (tokenizer )]
245
245
while tokenizer .check ("BOOLOP" ):
@@ -251,15 +251,15 @@ def _parse_marker_expr(tokenizer: Tokenizer) -> MarkerList:
251
251
252
252
def _parse_marker_atom (tokenizer : Tokenizer ) -> MarkerAtom :
253
253
"""
254
- marker_atom = WS? LEFT_PARENTHESIS WS? marker_expr WS? RIGHT_PARENTHESIS WS?
254
+ marker_atom = WS? LEFT_PARENTHESIS WS? marker WS? RIGHT_PARENTHESIS WS?
255
255
| WS? marker_item WS?
256
256
"""
257
257
258
258
tokenizer .consume ("WS" )
259
259
if tokenizer .check ("LEFT_PARENTHESIS" , peek = True ):
260
260
with tokenizer .enclosing_tokens ("LEFT_PARENTHESIS" , "RIGHT_PARENTHESIS" ):
261
261
tokenizer .consume ("WS" )
262
- marker : MarkerAtom = _parse_marker_expr (tokenizer )
262
+ marker : MarkerAtom = _parse_marker (tokenizer )
263
263
tokenizer .consume ("WS" )
264
264
else :
265
265
marker = _parse_marker_item (tokenizer )
0 commit comments