@@ -21,37 +21,34 @@ import org.apache.hadoop.conf.Configuration
2121import org .apache .hadoop .fs .{FileSystem , Path }
2222import org .apache .spark .SparkConf
2323import org .apache .spark .sql .SparkSession
24- import za .co .absa .spark .commons .test .YarnSparkConfiguration ._
2524
26- import scala .collection .JavaConverters .iterableAsScalaIterableConverter
2725
2826class YarnSparkConfiguration (confDir : String , distJarsDir : String ) extends SparkTestConfig {
29-
3027 override def master : String = " yarn"
3128
3229 override def appName : String = super .appName + " - Yarn"
3330
3431 override protected def builder : SparkSession .Builder = {
3532 super .builder
36- .config(new SparkConf ().setAll(getHadoopConfigurationForSpark(confDir)))
33+ .config(new SparkConf ().setAll(YarnSparkConfiguration . getHadoopConfigurationForSpark(confDir)))
3734 .config(" spark.yarn.jars" , dependencies)
3835 .config(" spark.deploy.mode" , " client" )
3936 }
4037
4138 protected def dependencies : String = {
4239 // get a list of all dist jars
4340 val distJars = FileSystem
44- .get(getHadoopConfiguration(confDir))
41+ .get(YarnSparkConfiguration . getHadoopConfiguration(confDir))
4542 .listStatus(new Path (distJarsDir))
4643 .map(_.getPath)
47- val localJars = getDepsFromClassPath(" absa" )
48- val currentJars = getCurrentProjectJars
44+ val localJars = YarnSparkConfiguration . getDepsFromClassPath(" absa" )
45+ val currentJars = YarnSparkConfiguration . getCurrentProjectJars
4946 (distJars ++ localJars ++ currentJars).mkString(" ," )
5047 }
5148
5249}
5350
54- object YarnSparkConfiguration {
51+ object YarnSparkConfiguration extends JavaConvertersWrapper {
5552
5653 /**
5754 * Gets a Hadoop configuration object from the specified hadoopConfDir parameter
@@ -73,7 +70,7 @@ object YarnSparkConfiguration {
7370 * @param hadoopConf Hadoop Configuration object to be converted into Spark configs
7471 */
7572 def hadoopConfToSparkMap (hadoopConf : Configuration ): Map [String , String ] = {
76- hadoopConf. asScala.map(entry => (s " spark.hadoop. ${entry.getKey}" , entry.getValue)).toMap
73+ asScala(hadoopConf) .map(entry => (s " spark.hadoop. ${entry.getKey}" , entry.getValue)).toMap
7774 }
7875
7976 /**
@@ -88,7 +85,7 @@ object YarnSparkConfiguration {
8885 */
8986 def getDepsFromClassPath (inclPattern : String ): Seq [String ] = {
9087 val cl = this .getClass.getClassLoader
91- cl.asInstanceOf [java.net.URLClassLoader ].getURLs.filter(c => c.toString.contains(inclPattern)).map(_.toString())
88+ cl.asInstanceOf [java.net.URLClassLoader ].getURLs.toSeq. filter(c => c.toString.contains(inclPattern)).map(_.toString())
9289 }
9390
9491 /**
@@ -98,6 +95,7 @@ object YarnSparkConfiguration {
9895 val targetDir = new File (s " ${System .getProperty(" user.dir" )}/target " )
9996 targetDir
10097 .listFiles()
98+ .toSeq
10199 .filter(f => f.getName.split(" \\ ." ).last.toLowerCase() == " jar" && f.getName.contains(" original" ))
102100 .map(_.getAbsolutePath)
103101 }
0 commit comments