зеркало из https://github.com/microsoft/spark.git
Merge branch 'master' into mos-bt
This commit is contained in:
Коммит
495b38658e
|
@ -1,10 +1,26 @@
|
|||
*~
|
||||
*.swp
|
||||
build
|
||||
work
|
||||
*.iml
|
||||
.idea/
|
||||
/build/
|
||||
work/
|
||||
out/
|
||||
.DS_Store
|
||||
third_party/libmesos.so
|
||||
third_party/libmesos.dylib
|
||||
conf/java-opts
|
||||
conf/spark-env.sh
|
||||
conf/log4j.properties
|
||||
target/
|
||||
reports/
|
||||
.project
|
||||
.classpath
|
||||
.scala_dependencies
|
||||
lib_managed/
|
||||
src_managed/
|
||||
project/boot/
|
||||
project/plugins/project/build.properties
|
||||
project/build/target/
|
||||
project/plugins/target/
|
||||
project/plugins/lib_managed/
|
||||
project/plugins/src_managed/
|
||||
|
|
73
Makefile
73
Makefile
|
@ -1,73 +0,0 @@
|
|||
EMPTY =
|
||||
SPACE = $(EMPTY) $(EMPTY)
|
||||
|
||||
# Build up classpath by concatenating some strings
|
||||
JARS = third_party/mesos.jar
|
||||
JARS += third_party/asm-3.2/lib/all/asm-all-3.2.jar
|
||||
JARS += third_party/colt.jar
|
||||
JARS += third_party/guava-r07/guava-r07.jar
|
||||
JARS += third_party/hadoop-0.20.0/hadoop-0.20.0-core.jar
|
||||
JARS += third_party/hadoop-0.20.0/lib/commons-logging-1.0.4.jar
|
||||
JARS += third_party/scalatest-1.2/scalatest-1.2.jar
|
||||
JARS += third_party/scalacheck_2.8.0-1.7.jar
|
||||
JARS += third_party/jetty-7.1.6.v20100715/jetty-server-7.1.6.v20100715.jar
|
||||
JARS += third_party/jetty-7.1.6.v20100715/servlet-api-2.5.jar
|
||||
JARS += third_party/apache-log4j-1.2.16/log4j-1.2.16.jar
|
||||
JARS += third_party/slf4j-1.6.1/slf4j-api-1.6.1.jar
|
||||
JARS += third_party/slf4j-1.6.1/slf4j-log4j12-1.6.1.jar
|
||||
JARS += third_party/compress-lzf-0.6.0/compress-lzf-0.6.0.jar
|
||||
|
||||
CLASSPATH = $(subst $(SPACE),:,$(JARS))
|
||||
|
||||
SCALA_SOURCES = src/examples/*.scala src/scala/spark/*.scala src/scala/spark/repl/*.scala
|
||||
SCALA_SOURCES += src/test/spark/*.scala src/test/spark/repl/*.scala
|
||||
|
||||
|
||||
ifeq ($(USE_FSC),1)
|
||||
COMPILER_NAME = fsc
|
||||
else
|
||||
COMPILER_NAME = scalac
|
||||
endif
|
||||
|
||||
ifeq ($(SCALA_HOME),)
|
||||
COMPILER = $(COMPILER_NAME)
|
||||
else
|
||||
COMPILER = $(SCALA_HOME)/bin/$(COMPILER_NAME)
|
||||
endif
|
||||
|
||||
CONF_FILES = conf/spark-env.sh conf/log4j.properties conf/java-opts
|
||||
|
||||
all: scala conf-files
|
||||
|
||||
build/classes:
|
||||
mkdir -p build/classes
|
||||
|
||||
scala: build/classes
|
||||
$(COMPILER) -d build/classes -classpath build/classes:$(CLASSPATH) $(SCALA_SOURCES)
|
||||
|
||||
jar: build/spark.jar build/spark-dep.jar
|
||||
|
||||
dep-jar: build/spark-dep.jar
|
||||
|
||||
build/spark.jar: scala
|
||||
jar cf build/spark.jar -C build/classes spark
|
||||
|
||||
build/spark-dep.jar:
|
||||
mkdir -p build/dep
|
||||
cd build/dep && for i in $(JARS); do jar xf ../../$$i; done
|
||||
jar cf build/spark-dep.jar -C build/dep .
|
||||
|
||||
conf-files: $(CONF_FILES)
|
||||
|
||||
$(CONF_FILES): %: | %.template
|
||||
cp $@.template $@
|
||||
|
||||
test: all
|
||||
./alltests
|
||||
|
||||
default: all
|
||||
|
||||
clean:
|
||||
rm -rf build
|
||||
|
||||
.phony: default all clean scala jar dep-jar conf-files
|
28
README
28
README
|
@ -1,24 +1,32 @@
|
|||
ONLINE DOCUMENTATION
|
||||
|
||||
You can find the latest Spark documentation, including a programming guide,
|
||||
on the project wiki at http://github.com/mesos/spark/wiki. This file only
|
||||
contains basic setup instructions.
|
||||
|
||||
|
||||
|
||||
BUILDING
|
||||
|
||||
Spark requires Scala 2.8. This version has been tested with 2.8.0.final.
|
||||
Spark requires Scala 2.8. This version has been tested with 2.8.1.final.
|
||||
|
||||
To build and run Spark, you will need to have Scala's bin in your $PATH,
|
||||
or you will need to set the SCALA_HOME environment variable to point
|
||||
to where you've installed Scala. Scala must be accessible through one
|
||||
of these methods on Mesos slave nodes as well as on the master.
|
||||
The project is built using Simple Build Tool (SBT), which is packaged with it.
|
||||
To build Spark and its example programs, run sbt/sbt compile.
|
||||
|
||||
To build Spark and the example programs, run make.
|
||||
To run Spark, you will need to have Scala's bin in your $PATH, or you
|
||||
will need to set the SCALA_HOME environment variable to point to where
|
||||
you've installed Scala. Scala must be accessible through one of these
|
||||
methods on Mesos slave nodes as well as on the master.
|
||||
|
||||
To run one of the examples, use ./run <class> <params>. For example,
|
||||
./run SparkLR will run the Logistic Regression example. Each of the
|
||||
example programs prints usage help if no params are given.
|
||||
./run spark.examples.SparkLR will run the Logistic Regression example.
|
||||
Each of the example programs prints usage help if no params are given.
|
||||
|
||||
All of the Spark samples take a <host> parameter that is the Mesos master
|
||||
to connect to. This can be a Mesos URL, or "local" to run locally with one
|
||||
thread, or "local[N]" to run locally with N threads.
|
||||
|
||||
Tip: If you are building Spark and examples repeatedly, export USE_FSC=1
|
||||
to have the Makefile use the fsc compiler daemon instead of scalac.
|
||||
|
||||
|
||||
CONFIGURATION
|
||||
|
||||
|
|
11
alltests
11
alltests
|
@ -1,11 +0,0 @@
|
|||
#!/bin/bash
|
||||
FWDIR="`dirname $0`"
|
||||
if [ "x$SPARK_MEM" == "x" ]; then
|
||||
export SPARK_MEM=500m
|
||||
fi
|
||||
RESULTS_DIR="$FWDIR/build/test_results"
|
||||
if [ -d $RESULTS_DIR ]; then
|
||||
rm -r $RESULTS_DIR
|
||||
fi
|
||||
mkdir -p $RESULTS_DIR
|
||||
$FWDIR/run org.scalatest.tools.Runner -p $FWDIR/build/classes -u $RESULTS_DIR -o $@
|
Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше
Загрузка…
Ссылка в новой задаче