org.apache.zeppelin.interpreter.InterpreterException: org.apache.zeppelin.interpreter.InterpreterException: java.io.IOException: Fail to run additional Python init file: python/zeppelin_ipyspark.py
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-11-8d96a11dffde> in <module>
48 jconf = jsc.getConf()
49 conf = SparkConf(_jvm=gateway.jvm, _jconf=jconf)
---> 50 sc = _zsc_ = SparkContext(jsc=jsc, gateway=gateway, conf=conf)
51
52 if not intp.isSpark1():
~/.cache/spark/spark-2.3.2-bin-hadoop2.7/python/lib/pyspark.zip/pyspark/context.py in __init__(self, master, appName, sparkHome, pyFiles, environment, batchSize, serializer, conf, gateway, jsc, profiler_cls)
113 """
114 self._callsite = first_spark_call() or CallSite(None, None, None)
--> 115 SparkContext._ensure_initialized(self, gateway=gateway, conf=conf)
116 try:
117 self._do_init(master, appName, sparkHome, pyFiles, environment, batchSize, serializer,
~/.cache/spark/spark-2.3.2-bin-hadoop2.7/python/lib/pyspark.zip/pyspark/context.py in _ensure_initialized(cls, instance, gateway, conf)
314 " created by %s at %s:%s "
315 % (currentAppName, currentMaster,
--> 316 callsite.function, callsite.file, callsite.linenum))
317 else:
318 SparkContext._active_spark_context = instance
ValueError: Cannot run multiple SparkContexts at once; existing SparkContext(app=Zeppelin, master=local) created by __init__ at <ipython-input-4-6bfd003b2498>:50
at org.apache.zeppelin.interpreter.LazyOpenInterpreter.open(LazyOpenInterpreter.java:76)
at org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer$InterpretJob.jobRun(RemoteInterpreterServer.java:668)
at org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer$InterpretJob.jobRun(RemoteInterpreterServer.java:577)
at org.apache.zeppelin.scheduler.Job.run(Job.java:172)
at org.apache.zeppelin.scheduler.AbstractScheduler.runJob(AbstractScheduler.java:130)
at org.apache.zeppelin.scheduler.FIFOScheduler.lambda$runJobInScheduler$0(FIFOScheduler.java:39)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Caused by: org.apache.zeppelin.interpreter.InterpreterException: java.io.IOException: Fail to run additional Python init file: python/zeppelin_ipyspark.py
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-11-8d96a11dffde> in <module>
48 jconf = jsc.getConf()
49 conf = SparkConf(_jvm=gateway.jvm, _jconf=jconf)
---> 50 sc = _zsc_ = SparkContext(jsc=jsc, gateway=gateway, conf=conf)
51
52 if not intp.isSpark1():
~/.cache/spark/spark-2.3.2-bin-hadoop2.7/python/lib/pyspark.zip/pyspark/context.py in __init__(self, master, appName, sparkHome, pyFiles, environment, batchSize, serializer, conf, gateway, jsc, profiler_cls)
113 """
114 self._callsite = first_spark_call() or CallSite(None, None, None)
--> 115 SparkContext._ensure_initialized(self, gateway=gateway, conf=conf)
116 try:
117 self._do_init(master, appName, sparkHome, pyFiles, environment, batchSize, serializer,
~/.cache/spark/spark-2.3.2-bin-hadoop2.7/python/lib/pyspark.zip/pyspark/context.py in _ensure_initialized(cls, instance, gateway, conf)
314 " created by %s at %s:%s "
315 % (currentAppName, currentMaster,
--> 316 callsite.function, callsite.file, callsite.linenum))
317 else:
318 SparkContext._active_spark_context = instance
ValueError: Cannot run multiple SparkContexts at once; existing SparkContext(app=Zeppelin, master=local) created by __init__ at <ipython-input-4-6bfd003b2498>:50
at org.apache.zeppelin.python.IPythonInterpreter.open(IPythonInterpreter.java:117)
at org.apache.zeppelin.spark.IPySparkInterpreter.open(IPySparkInterpreter.java:66)
at org.apache.zeppelin.interpreter.LazyOpenInterpreter.open(LazyOpenInterpreter.java:70)
... 8 more
Caused by: java.io.IOException: Fail to run additional Python init file: python/zeppelin_ipyspark.py
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-11-8d96a11dffde> in <module>
48 jconf = jsc.getConf()
49 conf = SparkConf(_jvm=gateway.jvm, _jconf=jconf)
---> 50 sc = _zsc_ = SparkContext(jsc=jsc, gateway=gateway, conf=conf)
51
52 if not intp.isSpark1():
~/.cache/spark/spark-2.3.2-bin-hadoop2.7/python/lib/pyspark.zip/pyspark/context.py in __init__(self, master, appName, sparkHome, pyFiles, environment, batchSize, serializer, conf, gateway, jsc, profiler_cls)
113 """
114 self._callsite = first_spark_call() or CallSite(None, None, None)
--> 115 SparkContext._ensure_initialized(self, gateway=gateway, conf=conf)
116 try:
117 self._do_init(master, appName, sparkHome, pyFiles, environment, batchSize, serializer,
~/.cache/spark/spark-2.3.2-bin-hadoop2.7/python/lib/pyspark.zip/pyspark/context.py in _ensure_initialized(cls, instance, gateway, conf)
314 " created by %s at %s:%s "
315 % (currentAppName, currentMaster,
--> 316 callsite.function, callsite.file, callsite.linenum))
317 else:
318 SparkContext._active_spark_context = instance
ValueError: Cannot run multiple SparkContexts at once; existing SparkContext(app=Zeppelin, master=local) created by __init__ at <ipython-input-4-6bfd003b2498>:50
at org.apache.zeppelin.python.IPythonInterpreter.initPythonInterpreter(IPythonInterpreter.java:165)
at org.apache.zeppelin.python.IPythonInterpreter.open(IPythonInterpreter.java:114)
... 10 more