@@ -36,19 +36,24 @@ def is_spark_home(path):
36
36
(os .path .isdir (os .path .join (path , "jars" )) or
37
37
os .path .isdir (os .path .join (path , "assembly" ))))
38
38
39
- paths = ["../" , os .path .dirname (os .path .realpath (__file__ ))]
39
+ # Spark distribution can be downloaded when HADOOP_VERSION environment variable is set.
40
+ # We should look up this directory first, see also SPARK-32017.
41
+ spark_dist_dir = "spark-distribution"
42
+ paths = [
43
+ "../" , # When we're in spark/python.
44
+ # Two case belows are valid when the current script is called as a library.
45
+ os .path .join (os .path .dirname (os .path .realpath (__file__ )), spark_dist_dir ),
46
+ os .path .dirname (os .path .realpath (__file__ ))]
40
47
41
48
# Add the path of the PySpark module if it exists
42
49
import_error_raised = False
43
50
from importlib .util import find_spec
44
51
try :
45
- # Spark distribution can be downloaded when HADOOP_VERSION environment variable is set.
46
- # We should look up this directory first, see also SPARK-32017.
47
- spark_dist_dir = "spark-distribution"
48
52
module_home = os .path .dirname (find_spec ("pyspark" ).origin )
49
53
paths .append (os .path .join (module_home , spark_dist_dir ))
50
54
paths .append (module_home )
51
55
# If we are installed in edit mode also look two dirs up
56
+ # Downloading different versions are not supported in edit mode.
52
57
paths .append (os .path .join (module_home , "../../" ))
53
58
except ImportError :
54
59
# Not pip installed no worries
0 commit comments