fixing the issue in YARN clusters introduced during Spark 2.0 upgrade

This commit is contained in:
Kaarthik Sivashanmugam 2016-10-10 23:42:07 -07:00 коммит произвёл GitHub
Родитель edd7b7fd8b 7d920d1206
Коммит fbf0f8d841
2 изменённых файлов: 4 добавлений и 2 удалений

Просмотреть файл

@ -65,7 +65,9 @@ namespace Microsoft.Spark.CSharp.Configuration
configuration = new SparkCLRConfiguration(appConfig); configuration = new SparkCLRConfiguration(appConfig);
runMode = RunMode.CLUSTER; runMode = RunMode.CLUSTER;
} }
else if (sparkMaster.Equals("yarn-client", StringComparison.OrdinalIgnoreCase) || sparkMaster.Equals("yarn-cluster", StringComparison.OrdinalIgnoreCase)) else if (sparkMaster.Equals("yarn-cluster", StringComparison.OrdinalIgnoreCase) ||
sparkMaster.Equals("yarn-client", StringComparison.OrdinalIgnoreCase) ||
sparkMaster.Equals("yarn", StringComparison.OrdinalIgnoreCase)) //supported in Spark 2.0
{ {
configuration = new SparkCLRConfiguration(appConfig); configuration = new SparkCLRConfiguration(appConfig);
runMode = RunMode.YARN; runMode = RunMode.YARN;

Просмотреть файл

@ -68,7 +68,7 @@ class CSharpRDD(
val func = SQLUtils.createCSharpFunction(command, val func = SQLUtils.createCSharpFunction(command,
envVars, envVars,
cSharpIncludes, cSharpIncludes,
cSharpWorkerExecutable, cSharpWorker.getAbsolutePath,
unUsedVersionIdentifier, unUsedVersionIdentifier,
broadcastVars, broadcastVars,
accumulator) accumulator)