@@ -46,7 +46,7 @@ def __init__(self, fromdir, todir, replacements=None):
46
46
for key , val in (replacements or {}).items ():
47
47
self .token_replacements [key ] = val
48
48
49
- def match (self , filepath ):
49
+ def _match (self , filepath ):
50
50
"""Determines if a Rule matches a given filepath and if so
51
51
returns a higher comparable value if the match is more specific.
52
52
"""
@@ -70,15 +70,15 @@ def unasync_file(self, filepath):
70
70
encoding , _ = std_tokenize .detect_encoding (f .readline )
71
71
write_kwargs ["encoding" ] = encoding
72
72
f .seek (0 )
73
- tokens = tokenize (f )
74
- tokens = self .unasync_tokens (tokens )
75
- result = untokenize (tokens )
73
+ tokens = _tokenize (f )
74
+ tokens = self ._unasync_tokens (tokens )
75
+ result = _untokenize (tokens )
76
76
outfilepath = filepath .replace (self .fromdir , self .todir )
77
- makedirs_existok (os .path .dirname (outfilepath ))
77
+ _makedirs_existok (os .path .dirname (outfilepath ))
78
78
with open (outfilepath , "w" , ** write_kwargs ) as f :
79
79
print (result , file = f , end = "" )
80
80
81
- def unasync_tokens (self , tokens ):
81
+ def _unasync_tokens (self , tokens ):
82
82
# TODO __await__, ...?
83
83
used_space = None
84
84
for space , toknum , tokval in tokens :
@@ -90,16 +90,16 @@ def unasync_tokens(self, tokens):
90
90
used_space = space
91
91
else :
92
92
if toknum == std_tokenize .NAME :
93
- tokval = self .unasync_name (tokval )
93
+ tokval = self ._unasync_name (tokval )
94
94
elif toknum == std_tokenize .STRING :
95
95
left_quote , name , right_quote = tokval [0 ], tokval [1 :- 1 ], tokval [- 1 ]
96
- tokval = left_quote + self .unasync_name (name ) + right_quote
96
+ tokval = left_quote + self ._unasync_name (name ) + right_quote
97
97
if used_space is None :
98
98
used_space = space
99
99
yield (used_space , tokval )
100
100
used_space = None
101
101
102
- def unasync_name (self , name ):
102
+ def _unasync_name (self , name ):
103
103
if name in self .token_replacements :
104
104
return self .token_replacements [name ]
105
105
# Convert classes prefixed with 'Async' into 'Sync'
@@ -111,7 +111,7 @@ def unasync_name(self, name):
111
111
Token = collections .namedtuple ("Token" , ["type" , "string" , "start" , "end" , "line" ])
112
112
113
113
114
- def get_tokens (f ):
114
+ def _get_tokens (f ):
115
115
if sys .version_info [0 ] == 2 :
116
116
for tok in std_tokenize .generate_tokens (f .readline ):
117
117
type_ , string , start , end , line = tok
@@ -123,9 +123,9 @@ def get_tokens(f):
123
123
yield tok
124
124
125
125
126
- def tokenize (f ):
126
+ def _tokenize (f ):
127
127
last_end = (1 , 0 )
128
- for tok in get_tokens (f ):
128
+ for tok in _get_tokens (f ):
129
129
if last_end [0 ] < tok .start [0 ]:
130
130
yield ("" , std_tokenize .STRING , " \\ \n " )
131
131
last_end = (tok .start [0 ], 0 )
@@ -141,11 +141,11 @@ def tokenize(f):
141
141
last_end = (tok .end [0 ] + 1 , 0 )
142
142
143
143
144
- def untokenize (tokens ):
144
+ def _untokenize (tokens ):
145
145
return "" .join (space + tokval for space , tokval in tokens )
146
146
147
147
148
- def makedirs_existok (dir ):
148
+ def _makedirs_existok (dir ):
149
149
try :
150
150
os .makedirs (dir )
151
151
except OSError as e :
@@ -184,7 +184,7 @@ def run(self):
184
184
found_weight = None
185
185
186
186
for rule in rules :
187
- weight = rule .match (f )
187
+ weight = rule ._match (f )
188
188
if weight and (found_weight is None or weight > found_weight ):
189
189
found_rule = rule
190
190
found_weight = weight
0 commit comments