Import evalML fails
See original GitHub issueI tried reinstalling and still get the same issue. Is there a way to import without importing the demo?
Exception Traceback (most recent call last) <ipython-input-1-c86bfebb5257> in <module> 4 import pandas as pd 5 import numpy as np ----> 6 import evalml
~\Anaconda3\lib\site-packages\evalml_init_.py in <module> 4 # must import sklearn first 5 import sklearn ----> 6 import evalml.demos 7 import evalml.model_family 8 import evalml.objectives
~\Anaconda3\lib\site-packages\evalml\demos_init_.py in <module> ----> 1 from .breast_cancer import load_breast_cancer 2 from .diabetes import load_diabetes 3 from .fraud import load_fraud 4 from .wine import load_wine 5 from .churn import load_churn
~\Anaconda3\lib\site-packages\evalml\demos\breast_cancer.py in <module> 1 import pandas as pd ----> 2 import woodwork as ww 3 from sklearn.datasets import load_breast_cancer as load_breast_cancer_sk 4 5
~\Anaconda3\lib\site-packages\woodwork_init_.py in <module> 1 # flake8: noqa 2 from .config import config ----> 3 from .datatable import DataColumn, DataTable 4 from .type_sys import type_system 5 from .type_sys.utils import list_logical_types, list_semantic_tags
~\Anaconda3\lib\site-packages\woodwork\datatable.py in <module> 27 ks = import_or_none(‘databricks.koalas’) 28 if ks: —> 29 ks.set_option(‘compute.ops_on_diff_frames’, True) 30 31
~\Anaconda3\lib\site-packages\databricks\koalas\config.py in set_option(key, value) 320 _options_dict[key].validate(value) 321 –> 322 default_session().conf.set(_key_format(key), json.dumps(value)) 323 324
~\Anaconda3\lib\site-packages\databricks\koalas\utils.py in default_session(conf) 413 builder.config(“spark.executor.allowSparkContext”, False) 414 –> 415 session = builder.getOrCreate() 416 417 if not should_use_legacy_ipc:
~\Anaconda3\lib\site-packages\pyspark\sql\session.py in getOrCreate(self)
184 sparkConf.set(key, value)
185 # This SparkContext may be an existing one.
–> 186 sc = SparkContext.getOrCreate(sparkConf)
187 # Do not update SparkConf
for existing SparkContext
, as it’s shared
188 # by all sessions.
~\Anaconda3\lib\site-packages\pyspark\context.py in getOrCreate(cls, conf) 374 with SparkContext._lock: 375 if SparkContext._active_spark_context is None: –> 376 SparkContext(conf=conf or SparkConf()) 377 return SparkContext._active_spark_context 378
~\Anaconda3\lib\site-packages\pyspark\context.py in init(self, master, appName, sparkHome, pyFiles, environment, batchSize, serializer, conf, gateway, jsc, profiler_cls) 131 " is not allowed as it is a security risk.") 132 –> 133 SparkContext._ensure_initialized(self, gateway=gateway, conf=conf) 134 try: 135 self._do_init(master, appName, sparkHome, pyFiles, environment, batchSize, serializer,
~\Anaconda3\lib\site-packages\pyspark\context.py in _ensure_initialized(cls, instance, gateway, conf) 323 with SparkContext._lock: 324 if not SparkContext._gateway: –> 325 SparkContext._gateway = gateway or launch_gateway(conf) 326 SparkContext._jvm = SparkContext._gateway.jvm 327
~\Anaconda3\lib\site-packages\pyspark\java_gateway.py in launch_gateway(conf, popen_kwargs) 103 104 if not os.path.isfile(conn_info_file): –> 105 raise Exception(“Java gateway process exited before sending its port number”) 106 107 with open(conn_info_file, “rb”) as info:
Exception: Java gateway process exited before sending its port number
pd.set_option(‘display.max_columns’, None) pd.set_option(‘display.max_rows’, None)
Issue Analytics
- State:
- Created 3 years ago
- Comments:5 (2 by maintainers)
Top GitHub Comments
@dsherry @SD-HussainAbbas I have filed an issue on Woodwork, and we are working to address this.
@dsherry we can go ahead and close this issue. We would need more information on how to reproduce this issue as we were unable to do so on Woodwork.
Additionally, Woodwork makes it so Koalas is optional, and we test this.