upgrade to Spark 1.6.1
This commit is contained in:
Родитель
9095a1ef1d
Коммит
3a4c938619
|
@ -1,4 +1,4 @@
|
|||
version: 1.6.0-SNAPSHOT.{build}
|
||||
version: 1.6.1-SNAPSHOT.{build}
|
||||
|
||||
environment:
|
||||
securefile:
|
||||
|
|
|
@ -41,7 +41,7 @@ if "%precheck%" == "bad" (goto :EOF)
|
|||
@rem
|
||||
@rem setup Hadoop and Spark versions
|
||||
@rem
|
||||
set SPARK_VERSION=1.6.0
|
||||
set SPARK_VERSION=1.6.1
|
||||
set HADOOP_VERSION=2.6
|
||||
@echo [RunSamples.cmd] SPARK_VERSION=%SPARK_VERSION%, HADOOP_VERSION=%HADOOP_VERSION%
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ if ($stage.ToLower() -eq "run")
|
|||
$hadoopVersion = if ($envValue -eq $null) { "2.6" } else { $envValue }
|
||||
|
||||
$envValue = [Environment]::GetEnvironmentVariable("SPARK_VERSION")
|
||||
$sparkVersion = if ($envValue -eq $null) { "1.6.0" } else { $envValue }
|
||||
$sparkVersion = if ($envValue -eq $null) { "1.6.1" } else { $envValue }
|
||||
|
||||
Write-Output "[downloadtools] hadoopVersion=$hadoopVersion, sparkVersion=$sparkVersion"
|
||||
}
|
||||
|
|
|
@ -11,7 +11,7 @@ do
|
|||
done
|
||||
|
||||
# setup Hadoop and Spark versions
|
||||
export SPARK_VERSION=1.6.0
|
||||
export SPARK_VERSION=1.6.1
|
||||
export HADOOP_VERSION=2.6
|
||||
echo "[run-samples.sh] SPARK_VERSION=$SPARK_VERSION, HADOOP_VERSION=$HADOOP_VERSION"
|
||||
|
||||
|
|
|
@ -30,5 +30,5 @@ using System.Runtime.InteropServices;
|
|||
// Build Number
|
||||
// Revision
|
||||
//
|
||||
[assembly: AssemblyVersion("1.6.0.0")]
|
||||
[assembly: AssemblyFileVersion("1.6.0.0")]
|
||||
[assembly: AssemblyVersion("1.6.1.0")]
|
||||
[assembly: AssemblyFileVersion("1.6.1.0")]
|
||||
|
|
|
@ -30,5 +30,5 @@ using System.Runtime.InteropServices;
|
|||
// Build Number
|
||||
// Revision
|
||||
//
|
||||
[assembly: AssemblyVersion("1.6.0.0")]
|
||||
[assembly: AssemblyFileVersion("1.6.0.0")]
|
||||
[assembly: AssemblyVersion("1.6.1.0")]
|
||||
[assembly: AssemblyFileVersion("1.6.1.0")]
|
||||
|
|
|
@ -34,5 +34,5 @@ using System.Runtime.InteropServices;
|
|||
// You can specify all the values or you can default the Build and Revision Numbers
|
||||
// by using the '*' as shown below:
|
||||
// [assembly: AssemblyVersion("1.0.*")]
|
||||
[assembly: AssemblyVersion("1.6.0.0")]
|
||||
[assembly: AssemblyFileVersion("1.6.0.0")]
|
||||
[assembly: AssemblyVersion("1.6.1.0")]
|
||||
[assembly: AssemblyFileVersion("1.6.1.0")]
|
||||
|
|
|
@ -30,5 +30,5 @@ using System.Runtime.InteropServices;
|
|||
// Build Number
|
||||
// Revision
|
||||
//
|
||||
[assembly: AssemblyVersion("1.6.0.0")]
|
||||
[assembly: AssemblyFileVersion("1.6.0.0")]
|
||||
[assembly: AssemblyVersion("1.6.1.0")]
|
||||
[assembly: AssemblyFileVersion("1.6.1.0")]
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
<package xmlns="http://schemas.microsoft.com/packaging/2010/07/nuspec.xsd">
|
||||
<metadata>
|
||||
<id>Microsoft.SparkCLR</id>
|
||||
<version>1.6.0-SNAPSHOT</version>
|
||||
<version>1.6.1-SNAPSHOT</version>
|
||||
<authors>Microsoft Corporation</authors>
|
||||
<owners>Microsoft Corporation</owners>
|
||||
<licenseUrl>https://github.com/Microsoft/SparkCLR/blob/master/LICENSE</licenseUrl>
|
||||
|
|
|
@ -30,5 +30,5 @@ using System.Runtime.InteropServices;
|
|||
// Build Number
|
||||
// Revision
|
||||
//
|
||||
[assembly: AssemblyVersion("1.6.0.0")]
|
||||
[assembly: AssemblyFileVersion("1.6.0.0")]
|
||||
[assembly: AssemblyVersion("1.6.1.0")]
|
||||
[assembly: AssemblyFileVersion("1.6.1.0")]
|
||||
|
|
|
@ -32,5 +32,5 @@ using System.Runtime.InteropServices;
|
|||
// You can specify all the values or you can default the Build and Revision Numbers
|
||||
// by using the '*' as shown below:
|
||||
// [assembly: AssemblyVersion("1.0.*")]
|
||||
[assembly: AssemblyVersion("1.6.0.0")]
|
||||
[assembly: AssemblyFileVersion("1.6.0.0")]
|
||||
[assembly: AssemblyVersion("1.6.1.0")]
|
||||
[assembly: AssemblyFileVersion("1.6.1.0")]
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
<maven.compiler.target>1.5</maven.compiler.target>
|
||||
<encoding>UTF-8</encoding>
|
||||
<scala.version>2.10.4</scala.version>
|
||||
<spark.version>1.6.0</spark.version>
|
||||
<spark.version>1.6.1</spark.version>
|
||||
<scala.binary.version>2.10</scala.binary.version>
|
||||
</properties>
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>com.microsoft.sparkclr</groupId>
|
||||
<artifactId>spark-clr_2.10</artifactId>
|
||||
<version>1.6.0-SNAPSHOT</version>
|
||||
<version>1.6.1-SNAPSHOT</version>
|
||||
<name>SparkCLR Project</name>
|
||||
<description>C# language binding and extensions to Apache Spark</description>
|
||||
<url>https://github.com/Microsoft/sparkclr</url>
|
||||
|
@ -35,7 +35,7 @@
|
|||
<maven.compiler.target>1.5</maven.compiler.target>
|
||||
<encoding>UTF-8</encoding>
|
||||
<scala.version>2.10.4</scala.version>
|
||||
<spark.version>1.6.0</spark.version>
|
||||
<spark.version>1.6.1</spark.version>
|
||||
<scala.binary.version>2.10</scala.binary.version>
|
||||
</properties>
|
||||
|
||||
|
|
|
@ -36,7 +36,7 @@ if "%SPARK_ASSEMBLY_JAR%"=="0" (
|
|||
exit /b 1
|
||||
)
|
||||
|
||||
if not defined SPARKCLR_JAR (set SPARKCLR_JAR=spark-clr_2.10-1.6.0-SNAPSHOT.jar)
|
||||
if not defined SPARKCLR_JAR (set SPARKCLR_JAR=spark-clr_2.10-1.6.1-SNAPSHOT.jar)
|
||||
echo SPARKCLR_JAR=%SPARKCLR_JAR%
|
||||
set SPARKCLR_CLASSPATH=%SPARKCLR_HOME%\lib\%SPARKCLR_JAR%
|
||||
REM SPARKCLR_DEBUGMODE_EXT_JARS environment variable is used to specify external dependencies to use in debug mode
|
||||
|
@ -98,4 +98,4 @@ goto :eof
|
|||
@echo Example 2:
|
||||
@echo sparkclr-submit.cmd [--verbose] [--master local] [--deploy-mode client] [--name testapp] --exe csdriver.exe c:\sparkclrapp\driver.zip arg1 arg2 arg3
|
||||
@echo Example 3:
|
||||
@echo sparkclr-submit.cmd [--verbose] --master spark://host:port --deploy-mode cluster [--name testapp] --exe csdriver.exe --remote-sparkclr-jar hdfs://path/to/spark-clr-1.6.0-SNAPSHOT.jar hdfs://path/to/driver.zip arg1 arg2 arg3
|
||||
@echo sparkclr-submit.cmd [--verbose] --master spark://host:port --deploy-mode cluster [--name testapp] --exe csdriver.exe --remote-sparkclr-jar hdfs://path/to/spark-clr-1.6.1-SNAPSHOT.jar hdfs://path/to/driver.zip arg1 arg2 arg3
|
||||
|
|
|
@ -27,7 +27,7 @@ function usage() {
|
|||
echo "Example 2:"
|
||||
echo "sparkclr-submit.sh [--verbose] [--master local] [--deploy-mode client] [--name testapp] --exe csdriver.exe sparkclrapp/driver.zip arg1 arg2 arg3"
|
||||
echo "Example 3:"
|
||||
echo "sparkclr-submit.sh [--verbose] --master spark://host:port --deploy-mode cluster [--name testapp] --exe csdriver.exe --remote-sparkclr-jar --remote-sparkclr-jar hdfs://path/to/spark-clr_2.10-1.6.0-SNAPSHOT.jar hdfs://path/to/driver.zip arg1 arg2 arg3"
|
||||
echo "sparkclr-submit.sh [--verbose] --master spark://host:port --deploy-mode cluster [--name testapp] --exe csdriver.exe --remote-sparkclr-jar --remote-sparkclr-jar hdfs://path/to/spark-clr_2.10-1.6.1-SNAPSHOT.jar hdfs://path/to/driver.zip arg1 arg2 arg3"
|
||||
}
|
||||
|
||||
[ "$SPARK_HOME" = "" ] && spark_home_error
|
||||
|
|
Загрузка…
Ссылка в новой задаче