2012-09-25 02:43:19 +04:00
|
|
|
@echo off
|
|
|
|
|
2013-01-15 23:45:42 +04:00
|
|
|
set SCALA_VERSION=2.9.2
|
2012-09-25 02:43:19 +04:00
|
|
|
|
|
|
|
rem Figure out where the Spark framework is installed
|
|
|
|
set FWDIR=%~dp0
|
|
|
|
|
|
|
|
rem Export this as SPARK_HOME
|
|
|
|
set SPARK_HOME=%FWDIR%
|
|
|
|
|
|
|
|
rem Load environment variables from conf\spark-env.cmd, if it exists
|
|
|
|
if exist "%FWDIR%conf\spark-env.cmd" call "%FWDIR%conf\spark-env.cmd"
|
|
|
|
|
2013-02-07 09:56:00 +04:00
|
|
|
rem Test that an argument was given
|
|
|
|
if not "x%1"=="x" goto arg_given
|
|
|
|
echo Usage: run ^<spark-class^> [^<args^>]
|
|
|
|
goto exit
|
|
|
|
:arg_given
|
|
|
|
|
|
|
|
set RUNNING_DAEMON=0
|
|
|
|
if "%1"=="spark.deploy.master.Master" set RUNNING_DAEMON=1
|
|
|
|
if "%1"=="spark.deploy.worker.Worker" set RUNNING_DAEMON=1
|
|
|
|
if "x%SPARK_DAEMON_MEMORY%" == "x" set SPARK_DAEMON_MEMORY=512m
|
|
|
|
if "%RUNNING_DAEMON%"=="1" set SPARK_MEM=%SPARK_DAEMON_MEMORY%
|
|
|
|
if "%RUNNING_DAEMON%"=="1" set SPARK_JAVA_OPTS=%SPARK_DAEMON_JAVA_OPTS%
|
|
|
|
|
2012-09-25 02:43:19 +04:00
|
|
|
rem Check that SCALA_HOME has been specified
|
|
|
|
if not "x%SCALA_HOME%"=="x" goto scala_exists
|
2013-02-07 09:56:00 +04:00
|
|
|
echo SCALA_HOME is not set
|
2012-09-25 02:43:19 +04:00
|
|
|
goto exit
|
|
|
|
:scala_exists
|
|
|
|
|
|
|
|
rem If the user specifies a Mesos JAR, put it before our included one on the classpath
|
|
|
|
set MESOS_CLASSPATH=
|
|
|
|
if not "x%MESOS_JAR%"=="x" set MESOS_CLASSPATH=%MESOS_JAR%
|
|
|
|
|
|
|
|
rem Figure out how much memory to use per executor and set it as an environment
|
|
|
|
rem variable so that our process sees it and can report it to Mesos
|
|
|
|
if "x%SPARK_MEM%"=="x" set SPARK_MEM=512m
|
|
|
|
|
|
|
|
rem Set JAVA_OPTS to be able to load native libraries and to set heap size
|
|
|
|
set JAVA_OPTS=%SPARK_JAVA_OPTS% -Djava.library.path=%SPARK_LIBRARY_PATH% -Xms%SPARK_MEM% -Xmx%SPARK_MEM%
|
|
|
|
rem Load extra JAVA_OPTS from conf/java-opts, if it exists
|
|
|
|
if exist "%FWDIR%conf\java-opts.cmd" call "%FWDIR%conf\java-opts.cmd"
|
|
|
|
|
|
|
|
set CORE_DIR=%FWDIR%core
|
|
|
|
set REPL_DIR=%FWDIR%repl
|
|
|
|
set EXAMPLES_DIR=%FWDIR%examples
|
|
|
|
set BAGEL_DIR=%FWDIR%bagel
|
2013-02-25 04:24:52 +04:00
|
|
|
set STREAMING_DIR=%FWDIR%streaming
|
2013-01-02 02:48:45 +04:00
|
|
|
set PYSPARK_DIR=%FWDIR%python
|
2012-09-25 02:43:19 +04:00
|
|
|
|
|
|
|
rem Build up classpath
|
|
|
|
set CLASSPATH=%SPARK_CLASSPATH%;%MESOS_CLASSPATH%;%FWDIR%conf;%CORE_DIR%\target\scala-%SCALA_VERSION%\classes
|
|
|
|
set CLASSPATH=%CLASSPATH%;%CORE_DIR%\target\scala-%SCALA_VERSION%\test-classes;%CORE_DIR%\src\main\resources
|
2013-02-25 04:24:52 +04:00
|
|
|
set CLASSPATH=%CLASSPATH%;%STREAMING_DIR%\target\scala-%SCALA_VERSION%\classes;%STREAMING_DIR%\target\scala-%SCALA_VERSION%\test-classes
|
|
|
|
set CLASSPATH=%CLASSPATH%;%STREAMING_DIR%\lib\org\apache\kafka\kafka\0.7.2-spark\*
|
2012-09-25 02:43:19 +04:00
|
|
|
set CLASSPATH=%CLASSPATH%;%REPL_DIR%\target\scala-%SCALA_VERSION%\classes;%EXAMPLES_DIR%\target\scala-%SCALA_VERSION%\classes
|
2013-02-07 09:56:00 +04:00
|
|
|
set CLASSPATH=%CLASSPATH%;%FWDIR%lib_managed\jars\*
|
|
|
|
set CLASSPATH=%CLASSPATH%;%FWDIR%lib_managed\bundles\*
|
|
|
|
set CLASSPATH=%CLASSPATH%;%FWDIR%repl\lib\*
|
|
|
|
set CLASSPATH=%CLASSPATH%;%FWDIR%python\lib\*
|
2012-09-25 02:43:19 +04:00
|
|
|
set CLASSPATH=%CLASSPATH%;%BAGEL_DIR%\target\scala-%SCALA_VERSION%\classes
|
|
|
|
|
2013-02-26 07:34:32 +04:00
|
|
|
rem Figure out the JAR file that our examples were packaged into.
|
|
|
|
rem First search in the build path from SBT:
|
2013-02-26 08:23:36 +04:00
|
|
|
for %%d in ("examples/target/scala-%SCALA_VERSION%/spark-examples*.jar") do (
|
|
|
|
set SPARK_EXAMPLES_JAR=examples/target/scala-%SCALA_VERSION%/%%d
|
2013-02-26 07:34:32 +04:00
|
|
|
)
|
|
|
|
rem Then search in the build path from Maven:
|
2013-02-26 08:23:36 +04:00
|
|
|
for %%d in ("examples/target/spark-examples*hadoop*.jar") do (
|
|
|
|
set SPARK_EXAMPLES_JAR=examples/target/%%d
|
2013-02-26 07:34:32 +04:00
|
|
|
)
|
|
|
|
|
2012-09-25 02:43:19 +04:00
|
|
|
rem Figure out whether to run our class with java or with the scala launcher.
|
|
|
|
rem In most cases, we'd prefer to execute our process with java because scala
|
|
|
|
rem creates a shell script as the parent of its Java process, which makes it
|
|
|
|
rem hard to kill the child with stuff like Process.destroy(). However, for
|
|
|
|
rem the Spark shell, the wrapper is necessary to properly reset the terminal
|
|
|
|
rem when we exit, so we allow it to set a variable to launch with scala.
|
|
|
|
if "%SPARK_LAUNCH_WITH_SCALA%" NEQ 1 goto java_runner
|
|
|
|
set RUNNER=%SCALA_HOME%\bin\scala
|
|
|
|
# Java options will be passed to scala as JAVA_OPTS
|
|
|
|
set EXTRA_ARGS=
|
|
|
|
goto run_spark
|
|
|
|
:java_runner
|
|
|
|
set CLASSPATH=%CLASSPATH%;%SCALA_HOME%\lib\scala-library.jar;%SCALA_HOME%\lib\scala-compiler.jar;%SCALA_HOME%\lib\jline.jar
|
|
|
|
set RUNNER=java
|
|
|
|
if not "x%JAVA_HOME%"=="x" set RUNNER=%JAVA_HOME%\bin\java
|
|
|
|
rem The JVM doesn't read JAVA_OPTS by default so we need to pass it in
|
|
|
|
set EXTRA_ARGS=%JAVA_OPTS%
|
|
|
|
:run_spark
|
|
|
|
|
2012-11-15 10:37:05 +04:00
|
|
|
"%RUNNER%" -cp "%CLASSPATH%" %EXTRA_ARGS% %*
|
2012-10-05 02:11:43 +04:00
|
|
|
:exit
|