#!/bin/bash # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -ex HADOOP_VERSION=0.22.0 STITCH_FILES="CHANGES.txt LICENSE.txt NOTICE.txt README.txt" WORKDIR=hadoop-$HADOOP_VERSION rm -rf $WORKDIR mkdir $WORKDIR rm -f $STITCH_FILES if uname -m | grep -q 64 ; then JSVC_URL=http://archive.apache.org/dist/commons/daemon/binaries/1.0.2/linux/commons-daemon-1.0.2-bin-linux-x86_64.tar.gz else JSVC_URL=http://archive.apache.org/dist/commons/daemon/binaries/1.0.2/linux/commons-daemon-1.0.2-bin-linux-i386.tar.gz fi # -Dlibrecordio=true for project in common hdfs mapreduce ; do cd $project LDFLAGS="-lcrypto -lm" ant \ -Dxercescroot=$XERCESCROOT \ -Djava5.home=$JAVA5_HOME \ -Dforrest.home=$FORREST_HOME \ -Dversion=${HADOOP_VERSION} \ -Dhadoop-common.version=${HADOOP_VERSION} \ -Dhadoop-hdfs.version=${HADOOP_VERSION} \ -Djsvc.location=${JSVC_URL} \ -Dcompile.native=true \ -Dhadoop.conf.dir=/etc/hadoop/conf \ -Dlibhdfs=1 -Dfusedfs=true -Dcompile.c++=true -Djdiff.build.dir=build/docs/jdiff \ api-report bin-package $BUILD_CONTRIB tar \ "$@" cat build/hadoop-*.tar.gz | (cd ../$WORKDIR ; tar xzf -) for file in $STITCH_FILES ; do [ -f $file ] && cat $file >> ../$file done BUILD_CONTRIB="compile-contrib" cd .. done cd "$WORKDIR" # get rid of the version names mv hadoop-common-* common mv hadoop-hdfs-* hdfs mv hadoop-mapred-* mapreduce # make sure configure scripts are executable chmod 755 common/src/test/system/c++/runAs/configure hdfs/src/contrib/fuse-dfs/configure # munge text docs cp common/{LICENSE,NOTICE,README}.txt . #rm hdfs/{LICENSE,NOTICE}.txt # same as common #rm mapred/{LICENSE,NOTICE}.txt # same as common # we can NOT simply munge the docs directories, since # index.html is per-component (even though everything else # doesn't collide). We have to come up with a better way # of solving this, but lets simply disable it for now # for dir in docs; do # mkdir $dir # for proj in common hdfs mapred; do # cp -Rp $proj/$dir/* $dir # done # done # munge the contrib directories for dir in contrib; do mkdir $dir for proj in hdfs mapreduce; do cp -Rp $proj/$dir/* $dir done done # munge the bin and conf directories for dir in bin conf; do mkdir $dir for proj in common hdfs mapreduce; do cp -Rp $proj/$dir/* $dir done done # munge the lib directory mkdir lib for proj in common hdfs mapreduce; do cp -Rp $proj/lib/* lib rm -r $proj/lib # create empty lib directories so that build.xml logic doesn't break mkdir $proj/lib done rm lib/hadoop-*.jar # hdfs and mapred dependencies on common, hdfs jars # munge the webapps directory mkdir webapps for proj in hdfs mapreduce; do cp -Rp $proj/webapps/* webapps rm -r $proj/webapps done # get the mapred c++ (if compiled) mkdir c++ for proj in hdfs mapreduce; do if [ -d $proj/c++ ]; then cp -Rp $proj/c++/* c++ rm -r $proj/c++ fi done # get our jar files mv {common,hdfs,mapreduce}/*.jar . # remove native build files rm `find . -type f | egrep "\.(P|)o$"` # tar up the result cd .. tar czf hadoop-$HADOOP_VERSION.tar.gz hadoop-$HADOOP_VERSION mv hadoop-$HADOOP_VERSION build # FIXME: this is a hack for shv if [ -d ../../RPMS ] ; then mv hadoop-$HADOOP_VERSION.tar.gz ../../RPMS elif [ -d ../../../output ] ; then mv hadoop-$HADOOP_VERSION.tar.gz ../../../output fi