From 7d920d12065c68759b1856949e096a2ca8e0486c Mon Sep 17 00:00:00 2001 From: skaarthik Date: Mon, 10 Oct 2016 19:11:55 -0700 Subject: [PATCH] fixing the issue in YARN clusters introduced during Spark 2.0 upgrade --- .../Configuration/ConfigurationService.cs | 4 +++- scala/src/main/org/apache/spark/api/csharp/CSharpRDD.scala | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/csharp/Adapter/Microsoft.Spark.CSharp/Configuration/ConfigurationService.cs b/csharp/Adapter/Microsoft.Spark.CSharp/Configuration/ConfigurationService.cs index 1e8abba..cf63039 100644 --- a/csharp/Adapter/Microsoft.Spark.CSharp/Configuration/ConfigurationService.cs +++ b/csharp/Adapter/Microsoft.Spark.CSharp/Configuration/ConfigurationService.cs @@ -65,7 +65,9 @@ namespace Microsoft.Spark.CSharp.Configuration configuration = new SparkCLRConfiguration(appConfig); runMode = RunMode.CLUSTER; } - else if (sparkMaster.Equals("yarn-client", StringComparison.OrdinalIgnoreCase) || sparkMaster.Equals("yarn-cluster", StringComparison.OrdinalIgnoreCase)) + else if (sparkMaster.Equals("yarn-cluster", StringComparison.OrdinalIgnoreCase) || + sparkMaster.Equals("yarn-client", StringComparison.OrdinalIgnoreCase) || + sparkMaster.Equals("yarn", StringComparison.OrdinalIgnoreCase)) //supported in Spark 2.0 { configuration = new SparkCLRConfiguration(appConfig); runMode = RunMode.YARN; diff --git a/scala/src/main/org/apache/spark/api/csharp/CSharpRDD.scala b/scala/src/main/org/apache/spark/api/csharp/CSharpRDD.scala index 1faf776..d48e9f3 100644 --- a/scala/src/main/org/apache/spark/api/csharp/CSharpRDD.scala +++ b/scala/src/main/org/apache/spark/api/csharp/CSharpRDD.scala @@ -68,7 +68,7 @@ class CSharpRDD( val func = SQLUtils.createCSharpFunction(command, envVars, cSharpIncludes, - cSharpWorkerExecutable, + cSharpWorker.getAbsolutePath, unUsedVersionIdentifier, broadcastVars, accumulator)