@@ -34,7 +34,7 @@ class StreamingContext(object):
34
34
35
35
def __init__ (self , master = None , appName = None , sparkHome = None , pyFiles = None ,
36
36
environment = None , batchSize = 1024 , serializer = PickleSerializer (), conf = None ,
37
- gateway = None , duration = None ):
37
+ gateway = None , sparkContext = None , duration = None ):
38
38
"""
39
39
Create a new StreamingContext. At least the master and app name and duration
40
40
should be set, either through the named parameters here or through C{conf}.
@@ -55,14 +55,18 @@ def __init__(self, master=None, appName=None, sparkHome=None, pyFiles=None,
55
55
@param conf: A L{SparkConf} object setting Spark properties.
56
56
@param gateway: Use an existing gateway and JVM, otherwise a new JVM
57
57
will be instatiated.
58
- @param duration: A L{Duration} Duration for SparkStreaming
58
+ @param sparkContext: L{SparkContext} object.
59
+ @param duration: A L{Duration} object for SparkStreaming.
59
60
60
61
"""
61
62
62
- # Create the Python Sparkcontext
63
- self ._sc = SparkContext (master = master , appName = appName , sparkHome = sparkHome ,
64
- pyFiles = pyFiles , environment = environment , batchSize = batchSize ,
65
- serializer = serializer , conf = conf , gateway = gateway )
63
+ if sparkContext is None :
64
+ # Create the Python Sparkcontext
65
+ self ._sc = SparkContext (master = master , appName = appName , sparkHome = sparkHome ,
66
+ pyFiles = pyFiles , environment = environment , batchSize = batchSize ,
67
+ serializer = serializer , conf = conf , gateway = gateway )
68
+ else :
69
+ self ._sc = sparkContext
66
70
67
71
# Start py4j callback server.
68
72
# Callback sever is need only by SparkStreming; therefore the callback sever
0 commit comments