Skip to content

Commit bfb9f9f

Browse files
committed
[SPARK-2627] keep up with the PEP 8 fixes
1 parent 9da347f commit bfb9f9f

File tree

3 files changed

+13
-10
lines changed

3 files changed

+13
-10
lines changed

python/pyspark/daemon.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -138,8 +138,7 @@ def handle_sigchld(*args):
138138
try:
139139
os.kill(worker_pid, signal.SIGKILL)
140140
except OSError:
141-
pass # process already died
142-
141+
pass # process already died
143142

144143
if listen_sock in ready_fds:
145144
sock, addr = listen_sock.accept()

python/pyspark/serializers.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -293,6 +293,7 @@ def _hack_namedtuple(cls):
293293
""" Make class generated by namedtuple picklable """
294294
name = cls.__name__
295295
fields = cls._fields
296+
296297
def __reduce__(self):
297298
return (_restore, (name, fields, tuple(self)))
298299
cls.__reduce__ = __reduce__
@@ -301,11 +302,11 @@ def __reduce__(self):
301302

302303
def _hijack_namedtuple():
303304
""" Hack namedtuple() to make it picklable """
304-
global _old_namedtuple # or it will put in closure
305+
global _old_namedtuple # or it will put in closure
305306

306307
def _copy_func(f):
307308
return types.FunctionType(f.func_code, f.func_globals, f.func_name,
308-
f.func_defaults, f.func_closure)
309+
f.func_defaults, f.func_closure)
309310

310311
_old_namedtuple = _copy_func(collections.namedtuple)
311312

@@ -323,9 +324,9 @@ def namedtuple(name, fields, verbose=False, rename=False):
323324
# so only hack those in __main__ module
324325
for n, o in sys.modules["__main__"].__dict__.iteritems():
325326
if (type(o) is type and o.__base__ is tuple
326-
and hasattr(o, "_fields")
327-
and "__reduce__" not in o.__dict__):
328-
_hack_namedtuple(o) # hack inplace
327+
and hasattr(o, "_fields")
328+
and "__reduce__" not in o.__dict__):
329+
_hack_namedtuple(o) # hack inplace
329330

330331

331332
_hijack_namedtuple()

python/pyspark/tests.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -840,12 +840,15 @@ def test_termination_sigterm(self):
840840

841841

842842
class TestWorker(PySparkTestCase):
843+
843844
def test_cancel_task(self):
844845
temp = tempfile.NamedTemporaryFile(delete=True)
845846
temp.close()
846847
path = temp.name
848+
847849
def sleep(x):
848-
import os, time
850+
import os
851+
import time
849852
with open(path, 'w') as f:
850853
f.write("%d %d" % (os.getppid(), os.getpid()))
851854
time.sleep(100)
@@ -875,7 +878,7 @@ def run():
875878
os.kill(worker_pid, 0)
876879
time.sleep(0.1)
877880
except OSError:
878-
break # worker was killed
881+
break # worker was killed
879882
else:
880883
self.fail("worker has not been killed after 5 seconds")
881884

@@ -885,7 +888,7 @@ def run():
885888
self.fail("daemon had been killed")
886889

887890
def test_fd_leak(self):
888-
N = 1100 # fd limit is 1024 by default
891+
N = 1100 # fd limit is 1024 by default
889892
rdd = self.sc.parallelize(range(N), N)
890893
self.assertEquals(N, rdd.count())
891894

0 commit comments

Comments
 (0)