Skip to content

Commit 33594e1

Browse files
committed
Address comment
1 parent 058e61a commit 33594e1

File tree

1 file changed

+9
-4
lines changed

1 file changed

+9
-4
lines changed

python/pyspark/find_spark_home.py

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -36,19 +36,24 @@ def is_spark_home(path):
3636
(os.path.isdir(os.path.join(path, "jars")) or
3737
os.path.isdir(os.path.join(path, "assembly"))))
3838

39-
paths = ["../", os.path.dirname(os.path.realpath(__file__))]
39+
# Spark distribution can be downloaded when HADOOP_VERSION environment variable is set.
40+
# We should look up this directory first, see also SPARK-32017.
41+
spark_dist_dir = "spark-distribution"
42+
paths = [
43+
"../", # When we're in spark/python.
44+
# Two case belows are valid when the current script is called as a library.
45+
os.path.join(os.path.dirname(os.path.realpath(__file__)), spark_dist_dir),
46+
os.path.dirname(os.path.realpath(__file__))]
4047

4148
# Add the path of the PySpark module if it exists
4249
import_error_raised = False
4350
from importlib.util import find_spec
4451
try:
45-
# Spark distribution can be downloaded when HADOOP_VERSION environment variable is set.
46-
# We should look up this directory first, see also SPARK-32017.
47-
spark_dist_dir = "spark-distribution"
4852
module_home = os.path.dirname(find_spec("pyspark").origin)
4953
paths.append(os.path.join(module_home, spark_dist_dir))
5054
paths.append(module_home)
5155
# If we are installed in edit mode also look two dirs up
56+
# Downloading different versions are not supported in edit mode.
5257
paths.append(os.path.join(module_home, "../../"))
5358
except ImportError:
5459
# Not pip installed no worries

0 commit comments

Comments
 (0)