From 0f5f49f7518a463d65d7105156789aa96ce87d2e Mon Sep 17 00:00:00 2001
From: Matt Belhorn <matt.belhorn@gmail.com>
Date: Thu, 18 Oct 2018 15:45:11 -0400
Subject: [PATCH] spark: set JAVA_HOME for hadoop CLASSPATH fetching (#9439)

Sets the `JAVA_HOME` in the `hadoop` subprocess called during
`spark.setup_environment`.
---
 var/spack/repos/builtin/packages/spark/package.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/var/spack/repos/builtin/packages/spark/package.py b/var/spack/repos/builtin/packages/spark/package.py
index 2c685ead63..4ccfbbf4cb 100644
--- a/var/spack/repos/builtin/packages/spark/package.py
+++ b/var/spack/repos/builtin/packages/spark/package.py
@@ -49,6 +49,7 @@ def install_dir(dirname):
     @when('+hadoop')
     def setup_environment(self, spack_env, run_env):
         hadoop = self.spec['hadoop'].command
+        hadoop.add_default_env('JAVA_HOME', self.spec['java'].home)
         hadoop_classpath = hadoop('classpath', output=str)
 
         # Remove whitespaces, as they can compromise syntax in
-- 
GitLab