Add a testing harness, borrowing heavily from snakebite's

This commit is contained in:
Colin Marc 2014-10-08 22:51:45 +02:00
Родитель e55d0dcd32
Коммит 0fd1c6a03c
5 изменённых файлов: 70 добавлений и 10 удалений

1
.gitignore поставляемый Normal file
Просмотреть файл

@ -0,0 +1 @@
build

11
.travis.yml Normal file
Просмотреть файл

@ -0,0 +1,11 @@
language: go
go:
- 1.2
- 1.3
install: make get-deps
env:
- HADOOP_DISTRO=cdh
- HADOOP_DISTRO=hdp
before_script:
- NN_PORT=9000 ./setup_test_env.sh
- export HADOOP_NAMENODE="localhost:9000"

15
Makefile Normal file
Просмотреть файл

@ -0,0 +1,15 @@
GOCMD ?= $(shell which go)
all: test
install: get-deps
$(GOCMD) install
test: get-deps
$(GOCMD) test
get-deps:
$(GOCMD) get github.com/stretchr/testify/assert
$(GOCMD) get code.google.com/p/goprotobuf/proto
.PHONY: install test get-deps

Просмотреть файл

@ -1,10 +0,0 @@
#!/bin/sh
HADOOP_HOME=${HADOOP_HOME-"/usr/local/hadoop"}
NN_PORT=${NN_PORT-"9000"}
MINICLUSTER_JAR=$(find $HADOOP_HOME -name "hadoop-mapreduce-client-jobclient*.jar" | grep -v tests | head -1)
echo $MINICLUSTER_JAR
export HADOOP_NAMENODE="localhost:$NN_PORT"
exec $HADOOP_HOME/bin/hadoop jar $MINICLUSTER_JAR minicluster -nnport $NN_PORT -nomr -format $@

43
setup_test_env.sh Executable file
Просмотреть файл

@ -0,0 +1,43 @@
#!/bin/sh
HADOOP_DISTRO=${HADOOP_DISTRO-"cdh"}
HADOOP_HOME=${HADOOP_HOME-"/tmp/hadoop-$HADOOP_DISTRO"}
NN_PORT=${NN_PORT-"9000"}
HADOOP_NAMENODE="localhost:$NN_PORT"
if [ ! -d "$HADOOP_HOME" ]; then
mkdir -p $HADOOP_HOME
if [ $HADOOP_DISTRO = "cdh" ]; then
HADOOP_URL="http://archive.cloudera.com/cdh5/cdh/5/hadoop-latest.tar.gz"
elif [ $HADOOP_DISTRO = "hdp" ]; then
HADOOP_URL="http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0/tars/hadoop-2.2.0.2.0.6.0-76.tar.gz"
else
echo "No/bad HADOOP_DISTRO='${HADOOP_DISTRO}' specified"
exit 1
fi
echo "Downloading Hadoop from $HADOOP_URL to ${HADOOP_HOME}/hadoop.tar.gz"
curl -o ${HADOOP_HOME}/hadoop.tar.gz -L $HADOOP_URL
echo "Extracting ${HADOOP_HOME}/hadoop.tar.gz into $HADOOP_HOME"
tar zxf ${HADOOP_HOME}/hadoop.tar.gz --strip-components 1 -C $HADOOP_HOME
fi
MINICLUSTER_JAR=$(find $HADOOP_HOME -name "hadoop-mapreduce-client-jobclient*.jar" | grep -v tests | grep -v sources | head -1)
if [ ! -f "$MINICLUSTER_JAR" ]; then
echo "Couldn't find minicluster jar"
exit 1
fi
echo "minicluster jar found at $MINICLUSTER_JAR"
# start the namenode in the background
$HADOOP_HOME/bin/hadoop jar $MINICLUSTER_JAR minicluster -nnport $NN_PORT -nomr -format $@ &
sleep 10
echo "bar" > foo
$HADOOP_HOME/bin/hadoop fs -Ddfs.block.size=1048576 -put foo "hdfs://$HADOOP_NAMENODE/"
dd if=/dev/urandom of=longfile bs=1048576 count=10
$HADOOP_HOME/bin/hadoop fs -Ddfs.block.size=1048576 -put longfile "hdfs://$HADOOP_NAMENODE/"