@@ -31,19 +31,23 @@ def parse_args():
31
31
32
32
# Should be enough that any connection leak will exhaust available file
33
33
# descriptors.
34
- parser .add_option ("-n" , "--nrequests" , type = "int" ,
34
+ parser .add_option (
35
+ "-n" , "--nrequests" , type = "int" ,
35
36
dest = "nrequests" , default = 50 * 1000 ,
36
37
help = "Number of times to GET the URL, in total" )
37
38
38
- parser .add_option ("-t" , "--nthreads" , type = "int" ,
39
+ parser .add_option (
40
+ "-t" , "--nthreads" , type = "int" ,
39
41
dest = "nthreads" , default = 100 ,
40
42
help = "Number of threads with mode 'parallel'" )
41
43
42
- parser .add_option ("-q" , "--quiet" ,
44
+ parser .add_option (
45
+ "-q" , "--quiet" ,
43
46
action = "store_false" , dest = "verbose" , default = True ,
44
47
help = "Don't print status messages to stdout" )
45
48
46
- parser .add_option ("-c" , "--continue" ,
49
+ parser .add_option (
50
+ "-c" , "--continue" ,
47
51
action = "store_true" , dest = "continue_" , default = False ,
48
52
help = "Continue after HTTP errors" )
49
53
@@ -65,7 +69,7 @@ def get(url):
65
69
66
70
67
71
class URLGetterThread (threading .Thread ):
68
- # class variables
72
+ # Class variables.
69
73
counter_lock = threading .Lock ()
70
74
counter = 0
71
75
@@ -107,12 +111,11 @@ def main(options, mode, url):
107
111
nrequests_per_thread = options .nrequests / options .nthreads
108
112
109
113
if options .verbose :
110
- print ('Getting %s %s times total in %s threads, '
114
+ print (
115
+ 'Getting %s %s times total in %s threads, '
111
116
'%s times per thread' % (
112
- url , nrequests_per_thread * options .nthreads , options .nthreads ,
113
- nrequests_per_thread
114
- ))
115
-
117
+ url , nrequests_per_thread * options .nthreads ,
118
+ options .nthreads , nrequests_per_thread ))
116
119
threads = [
117
120
URLGetterThread (options , url , nrequests_per_thread )
118
121
for _ in range (options .nthreads )
0 commit comments