fixing the issue in YARN clusters introduced during Spark 2.0 upgrade

This commit is contained in:
skaarthik 2016-10-10 19:11:55 -07:00
Родитель edd7b7fd8b
Коммит 7d920d1206
2 изменённых файлов: 4 добавлений и 2 удалений

Просмотреть файл

@ -65,7 +65,9 @@ namespace Microsoft.Spark.CSharp.Configuration
configuration = new SparkCLRConfiguration(appConfig); configuration = new SparkCLRConfiguration(appConfig);
runMode = RunMode.CLUSTER; runMode = RunMode.CLUSTER;
} }
else if (sparkMaster.Equals("yarn-client", StringComparison.OrdinalIgnoreCase) || sparkMaster.Equals("yarn-cluster", StringComparison.OrdinalIgnoreCase)) else if (sparkMaster.Equals("yarn-cluster", StringComparison.OrdinalIgnoreCase) ||
sparkMaster.Equals("yarn-client", StringComparison.OrdinalIgnoreCase) ||
sparkMaster.Equals("yarn", StringComparison.OrdinalIgnoreCase)) //supported in Spark 2.0
{ {
configuration = new SparkCLRConfiguration(appConfig); configuration = new SparkCLRConfiguration(appConfig);
runMode = RunMode.YARN; runMode = RunMode.YARN;

Просмотреть файл

@ -68,7 +68,7 @@ class CSharpRDD(
val func = SQLUtils.createCSharpFunction(command, val func = SQLUtils.createCSharpFunction(command,
envVars, envVars,
cSharpIncludes, cSharpIncludes,
cSharpWorkerExecutable, cSharpWorker.getAbsolutePath,
unUsedVersionIdentifier, unUsedVersionIdentifier,
broadcastVars, broadcastVars,
accumulator) accumulator)