python程序内使用pyspark,如下
conf = (SparkConf()
.setMaster("spark://192.168.1.168:7077")
.setAppName("My app")
.set("spark.executor.memory", "1g"))
sc = SparkContext(conf = conf)
报错:
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-8-82cc76f32223> in <module>()
----> 1 sc = SparkContext(conf = conf)
/data/ENV/flowadmin/lib/python3.5/site-packages/pyspark/context.py in __init__(self, master, appName, sparkHome, pyFiles, environment, batchSize, serializer, conf, gateway, jsc, profiler_cls)
116 try:
117 self._do_init(master, appName, sparkHome, pyFiles, environment, batchSize, serializer,
--> 118 conf, jsc, profiler_cls)
119 except:
120 # If an error occurs, clean up in order to allow future SparkContext creation:
/data/ENV/flowadmin/lib/python3.5/site-packages/pyspark/context.py in _do_init(self, master, appName, sparkHome, pyFiles, environment, batchSize, serializer, conf, jsc, profiler_cls)
186 self._accumulatorServer = accumulators._start_update_server()
187 (host, port) = self._accumulatorServer.server_address
--> 188 self._javaAccumulator = self._jvm.PythonAccumulatorV2(host, port)
189 self._jsc.sc().register(self._javaAccumulator)
190
TypeError: 'JavaPackage' object is not callable
这个怎么解决的?