Skip to content

Commit bb7ccf3

Browse files
Ken Takagiwagiwa
Ken Takagiwa
authored andcommitted
remove unused import in python
1 parent 224fc5e commit bb7ccf3

File tree

6 files changed

+63
-38
lines changed

6 files changed

+63
-38
lines changed

python/pyspark/streaming/context.py

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -15,15 +15,6 @@
1515
# limitations under the License.
1616
#
1717

18-
import os
19-
import shutil
20-
import sys
21-
from threading import Lock
22-
from tempfile import NamedTemporaryFile
23-
24-
from pyspark import accumulators
25-
from pyspark.accumulators import Accumulator
26-
from pyspark.broadcast import Broadcast
2718
from pyspark.conf import SparkConf
2819
from pyspark.files import SparkFiles
2920
from pyspark.java_gateway import launch_gateway

python/pyspark/streaming/dstream.py

Lines changed: 4 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -1,28 +1,8 @@
1-
from base64 import standard_b64encode as b64enc
2-
import copy
31
from collections import defaultdict
4-
from collections import namedtuple
52
from itertools import chain, ifilter, imap
6-
import operator
7-
import os
8-
import sys
9-
import shlex
10-
import traceback
11-
from subprocess import Popen, PIPE
12-
from tempfile import NamedTemporaryFile
13-
from threading import Thread
14-
import warnings
15-
import heapq
16-
from random import Random
17-
18-
from pyspark.serializers import NoOpSerializer, CartesianDeserializer, \
19-
BatchedSerializer, CloudPickleSerializer, PairDeserializer, pack_long
20-
from pyspark.join import python_join, python_left_outer_join, \
21-
python_right_outer_join, python_cogroup
22-
from pyspark.statcounter import StatCounter
23-
from pyspark.rddsampler import RDDSampler
24-
from pyspark.storagelevel import StorageLevel
25-
#from pyspark.resultiterable import ResultIterable
3+
4+
from pyspark.serializers import NoOpSerializer,\
5+
BatchedSerializer, CloudPickleSerializer, pack_long
266
from pyspark.rdd import _JavaStackTrace
277

288
from py4j.java_collections import ListConverter, MapConverter
@@ -47,15 +27,14 @@ def generatedRDDs(self):
4727
def print_(self):
4828
"""
4929
"""
50-
# print is a resrved name of Python. We cannot give print to function name
30+
# print is a reserved name of Python. We cannot give print to function name
5131
getattr(self._jdstream, "print")()
5232

5333
def pyprint(self):
5434
"""
5535
"""
5636
self._jdstream.pyprint()
5737

58-
5938
def filter(self, f):
6039
"""
6140
"""
@@ -140,7 +119,6 @@ def add_shuffle_key(split, iterator):
140119
keyed._bypass_serializer = True
141120
with _JavaStackTrace(self.ctx) as st:
142121
#JavaDStream
143-
#pairRDD = self.ctx._jvm.PairwiseDStream(keyed._jdstream.dstream()).asJavaPairRDD()
144122
pairDStream = self.ctx._jvm.PairwiseDStream(keyed._jdstream.dstream()).asJavaPairDStream()
145123
partitioner = self.ctx._jvm.PythonPartitioner(numPartitions,
146124
id(partitionFunc))

python/pyspark/streaming/duration.py

Lines changed: 16 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,19 @@
1-
__author__ = 'ktakagiw'
1+
#
2+
# Licensed to the Apache Software Foundation (ASF) under one or more
3+
# contributor license agreements. See the NOTICE file distributed with
4+
# this work for additional information regarding copyright ownership.
5+
# The ASF licenses this file to You under the Apache License, Version 2.0
6+
# (the "License"); you may not use this file except in compliance with
7+
# the License. You may obtain a copy of the License at
8+
#
9+
# http://www.apache.org/licenses/LICENSE-2.0
10+
#
11+
# Unless required by applicable law or agreed to in writing, software
12+
# distributed under the License is distributed on an "AS IS" BASIS,
13+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
# See the License for the specific language governing permissions and
15+
# limitations under the License.
16+
#
217

318
from pyspark.streaming import utils
419

python/pyspark/streaming/jtime.py

Lines changed: 23 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,30 @@
1-
__author__ = 'ktakagiw'
1+
#
2+
# Licensed to the Apache Software Foundation (ASF) under one or more
3+
# contributor license agreements. See the NOTICE file distributed with
4+
# this work for additional information regarding copyright ownership.
5+
# The ASF licenses this file to You under the Apache License, Version 2.0
6+
# (the "License"); you may not use this file except in compliance with
7+
# the License. You may obtain a copy of the License at
8+
#
9+
# http://www.apache.org/licenses/LICENSE-2.0
10+
#
11+
# Unless required by applicable law or agreed to in writing, software
12+
# distributed under the License is distributed on an "AS IS" BASIS,
13+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
# See the License for the specific language governing permissions and
15+
# limitations under the License.
16+
#
217

318
from pyspark.streaming import utils
419
from pyspark.streaming.duration import Duration
520

21+
"""
22+
The name of this file, time is not good naming for python
23+
because if we do import time when we want to use native python time package, it does
24+
not import python time package.
25+
"""
26+
27+
628
class Time(object):
729
"""
830
Time for Spark Streaming application. Used to set Time

python/pyspark/streaming/pyprint.py

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,24 @@
1+
#
2+
# Licensed to the Apache Software Foundation (ASF) under one or more
3+
# contributor license agreements. See the NOTICE file distributed with
4+
# this work for additional information regarding copyright ownership.
5+
# The ASF licenses this file to You under the Apache License, Version 2.0
6+
# (the "License"); you may not use this file except in compliance with
7+
# the License. You may obtain a copy of the License at
8+
#
9+
# http://www.apache.org/licenses/LICENSE-2.0
10+
#
11+
# Unless required by applicable law or agreed to in writing, software
12+
# distributed under the License is distributed on an "AS IS" BASIS,
13+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
# See the License for the specific language governing permissions and
15+
# limitations under the License.
16+
#
17+
18+
119
import sys
220
from itertools import chain
21+
322
from pyspark.serializers import PickleSerializer
423

524
def collect(binary_file_path):

streaming/src/main/scala/org/apache/spark/streaming/api/python/PythonDStream.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ class PythonDStream[T: ClassTag](
5555
case None => None
5656
}
5757
}
58-
58+
5959
val asJavaDStream = JavaDStream.fromDStream(this)
6060

6161
/**

0 commit comments

Comments
 (0)