hadoop ant build script : Big Project Ant Script « Ant « Java

Home
Java
1.2D Graphics GUI
2.3D
3.Advanced Graphics
4.Ant
5.Apache Common
6.Chart
7.Class
8.Collections Data Structure
9.Data Type
10.Database SQL JDBC
11.Design Pattern
12.Development Class
13.EJB3
14.Email
15.Event
16.File Input Output
17.Game
18.Generics
19.GWT
20.Hibernate
21.I18N
22.J2EE
23.J2ME
24.JDK 6
25.JNDI LDAP
26.JPA
27.JSP
28.JSTL
29.Language Basics
30.Network Protocol
31.PDF RTF
32.Reflection
33.Regular Expressions
34.Scripting
35.Security
36.Servlets
37.Spring
38.Swing Components
39.Swing JFC
40.SWT JFace Eclipse
41.Threads
42.Tiny Application
43.Velocity
44.Web Services SOA
45.XML
Java Tutorial
Java Source Code / Java Documentation
Java Open Source
Jar File Download
Java Articles
Java Products
Java by API
SCJP
Java » Ant » Big Project Ant ScriptScreenshots 
hadoop ant build script


<?xml version="1.0"?>

<!--
   Licensed to the Apache Software Foundation (ASFunder one or more
   contributor license agreements.  See the NOTICE file distributed with
   this work for additional information regarding copyright ownership.
   The ASF licenses this file to You under the Apache License, Version 2.0
   (the "License"); you may not use this file except in compliance with
   the License.  You may obtain a copy of the License at

       http://www.apache.org/licenses/LICENSE-2.0

   Unless required by applicable law or agreed to in writing, software
   distributed under the License is distributed on an "AS IS" BASIS,
   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   See the License for the specific language governing permissions and
   limitations under the License.
-->

<project name="Hadoop" default="compile" 
   xmlns:ivy="antlib:org.apache.ivy.ant"

  <!-- Load all the default properties, and any the user wants    -->
  <!-- to contribute (without having to type -D or edit this file -->
  <property file="${user.home}/build.properties" />
  <property file="${basedir}/build.properties" />
 
  <property name="Name" value="Hadoop"/>
  <property name="name" value="hadoop"/>
  <property name="version" value="0.20.2-dev"/>
  <property name="final.name" value="${name}-${version}"/>
  <property name="year" value="2009"/>

  <property name="src.dir" value="${basedir}/src"/>    
  <property name="core.src.dir" value="${src.dir}/core"/>
  <property name="mapred.src.dir" value="${src.dir}/mapred"/> 
  <property name="hdfs.src.dir" value="${src.dir}/hdfs"/>
  <property name="native.src.dir" value="${basedir}/src/native"/>
  <property name="examples.dir" value="${basedir}/src/examples"/>
  <property name="anttasks.dir" value="${basedir}/src/ant"/>
  <property name="lib.dir" value="${basedir}/lib"/>
  <property name="conf.dir" value="${basedir}/conf"/>
  <property name="contrib.dir" value="${basedir}/src/contrib"/>
  <property name="docs.src" value="${basedir}/src/docs"/>
  <property name="src.docs.cn" value="${basedir}/src/docs/cn"/>
  <property name="changes.src" value="${docs.src}/changes"/>
  <property name="c++.src" value="${basedir}/src/c++"/>
  <property name="c++.utils.src" value="${c++.src}/utils"/>
  <property name="c++.pipes.src" value="${c++.src}/pipes"/>
  <property name="c++.examples.pipes.src" value="${examples.dir}/pipes"/>
  <property name="c++.libhdfs.src" value="${c++.src}/libhdfs"/>
  <property name="librecordio.src" value="${c++.src}/librecordio"/>
  <property name="tools.src" value="${basedir}/src/tools"/>

  <property name="xercescroot" value=""/> 
  <property name="build.dir" value="${basedir}/build"/>
  <property name="build.classes" value="${build.dir}/classes"/>
  <property name="build.src" value="${build.dir}/src"/>
  <property name="build.tools" value="${build.dir}/tools"/>
  <property name="build.webapps" value="${build.dir}/webapps"/>
  <property name="build.examples" value="${build.dir}/examples"/>
  <property name="build.anttasks" value="${build.dir}/ant"/>
  <property name="build.librecordio" value="${build.dir}/librecordio"/>
  <!-- convert spaces to _ so that mac os doesn't break things -->
  <exec executable="sed" inputstring="${os.name}" 
        outputproperty="nonspace.os">
     <arg value="s/ /_/g"/>
  </exec>
  <property name="build.platform" 
            value="${nonspace.os}-${os.arch}-${sun.arch.data.model}"/>
  <property name="jvm.arch" 
            value="${sun.arch.data.model}"/>
  <property name="build.native" value="${build.dir}/native/${build.platform}"/>
  <property name="build.c++" value="${build.dir}/c++-build/${build.platform}"/>
  <property name="build.c++.utils" value="${build.c++}/utils"/>
  <property name="build.c++.pipes" value="${build.c++}/pipes"/>
  <property name="build.c++.libhdfs" value="${build.c++}/libhdfs"/>
  <property name="build.c++.examples.pipes" 
            value="${build.c++}/examples/pipes"/>
  <property name="build.docs" value="${build.dir}/docs"/>
  <property name="build.docs.cn" value="${build.dir}/docs/cn"/>
  <property name="build.javadoc" value="${build.docs}/api"/>
  <property name="build.javadoc.dev" value="${build.docs}/dev-api"/>
  <property name="build.encoding" value="ISO-8859-1"/>
  <property name="install.c++" value="${build.dir}/c++/${build.platform}"/>
  <property name="install.c++.examples" 
            value="${build.dir}/c++-examples/${build.platform}"/>

  <property name="test.src.dir" value="${basedir}/src/test"/>
  <property name="test.lib.dir" value="${basedir}/src/test/lib"/>
  <property name="test.build.dir" value="${build.dir}/test"/>
  <property name="test.generated.dir" value="${test.build.dir}/src"/>
  <property name="test.build.data" value="${test.build.dir}/data"/>
  <property name="test.cache.data" value="${test.build.dir}/cache"/>
  <property name="test.debug.data" value="${test.build.dir}/debug"/>
  <property name="test.log.dir" value="${test.build.dir}/logs"/>
  <property name="test.build.classes" value="${test.build.dir}/classes"/>
  <property name="test.build.testjar" value="${test.build.dir}/testjar"/>
  <property name="test.build.testshell" value="${test.build.dir}/testshell"/>
  <property name="test.build.extraconf" value="${test.build.dir}/extraconf"/>
  <property name="test.build.javadoc" value="${test.build.dir}/docs/api"/>
  <property name="test.build.javadoc.dev" value="${test.build.dir}/docs/dev-api"/>
  <property name="test.include" value="Test*"/>
  <property name="test.classpath.id" value="test.classpath"/>
  <property name="test.output" value="no"/>
  <property name="test.timeout" value="900000"/>
  <property name="test.junit.output.format" value="plain"/>
  <property name="test.junit.fork.mode" value="perTest" />
  <property name="test.junit.printsummary" value="yes" />
  <property name="test.junit.haltonfailure" value="no" />
  <property name="test.junit.maxmemory" value="512m" />

  <property name="test.libhdfs.conf.dir" value="${c++.libhdfs.src}/tests/conf"/>
  <property name="test.libhdfs.dir" value="${test.build.dir}/libhdfs"/>

  <property name="librecordio.test.dir" value="${test.build.dir}/librecordio"/>
  <property name="web.src.dir" value="${basedir}/src/web"/>
  <property name="src.webapps" value="${basedir}/src/webapps"/>

  <property name="javadoc.link.java"
      value="http://java.sun.com/javase/6/docs/api/"/>
  <property name="javadoc.packages" value="org.apache.hadoop.*"/>

  <property name="dist.dir" value="${build.dir}/${final.name}"/>

  <property name="javac.debug" value="on"/>
  <property name="javac.optimize" value="on"/>
  <property name="javac.deprecation" value="off"/>
  <property name="javac.version" value="1.6"/>
  <property name="javac.args" value=""/>
  <property name="javac.args.warnings" value="-Xlint:unchecked"/>

  <property name="clover.db.dir" location="${build.dir}/test/clover/db"/>
  <property name="clover.report.dir" location="${build.dir}/test/clover/reports"/>

  <property name="rat.reporting.classname" value="rat.Report"/>

  <property name="jdiff.build.dir" value="${build.docs}/jdiff"/>
  <property name="jdiff.xml.dir" value="${lib.dir}/jdiff"/>
  <property name="jdiff.stable" value="0.19.2"/>
  <property name="jdiff.stable.javadoc" 
            value="http://hadoop.apache.org/core/docs/r${jdiff.stable}/api/"/>

  <property name="scratch.dir" value="${user.home}/tmp"/>
  <property name="svn.cmd" value="svn"/>
  <property name="grep.cmd" value="grep"/>
  <property name="patch.cmd" value="patch"/>
  <property name="make.cmd" value="make"/>

  <!-- IVY properteis set here -->
  <property name="ivy.dir" location="ivy" />
  <loadproperties srcfile="${ivy.dir}/libraries.properties"/>
  <property name="ivy.jar" location="${ivy.dir}/ivy-${ivy.version}.jar"/>
  <property name="ivy_repo_url" value="http://repo2.maven.org/maven2/org/apache/ivy/ivy/${ivy.version}/ivy-${ivy.version}.jar"/>
  <property name="ivysettings.xml" location="${ivy.dir}/ivysettings.xml" />
  <property name="ivy.org" value="org.apache.hadoop"/>
  <property name="build.dir" location="build" />
  <property name="dist.dir" value="${build.dir}/${final.name}"/>
  <property name="build.ivy.dir" location="${build.dir}/ivy" />
  <property name="build.ivy.lib.dir" location="${build.ivy.dir}/lib" />
  <property name="common.ivy.lib.dir" location="${build.ivy.lib.dir}/${ant.project.name}/common"/>
  <property name="build.ivy.report.dir" location="${build.ivy.dir}/report" />
  <property name="build.ivy.maven.dir" location="${build.ivy.dir}/maven" />
  <property name="build.ivy.maven.pom" location="${build.ivy.maven.dir}/hadoop-core-${hadoop.version}.pom" />
  <property name="build.ivy.maven.jar" location="${build.ivy.maven.dir}/hadoop-core-${hadoop.version}.jar" />

  <!--this is the naming policy for artifacts we want pulled down-->
  <property name="ivy.artifact.retrieve.pattern" value="${ant.project.name}/[conf]/[artifact]-[revision].[ext]"/>

  <!--this is how artifacts that get built are named-->
  <property name="ivy.publish.pattern" value="hadoop-[revision]-core.[ext]"/>
  <property name="hadoop.jar" location="${build.dir}/hadoop-${hadoop.version}-core.jar" />

  <!-- jdiff.home property set -->
  <property name="jdiff.home" value="${build.ivy.lib.dir}/${ant.project.name}/jdiff"/>
  <property name="jdiff.jar" value="${jdiff.home}/jdiff-${jdiff.version}.jar"/>
  <property name="xerces.jar" value="${jdiff.home}/xerces-${xerces.version}.jar"/>

  <property name="clover.jar" location="${clover.home}/lib/clover.jar"/>
  <available property="clover.present" file="${clover.jar}" />

  <!-- check if clover reports should be generated -->
  <condition property="clover.enabled">
    <and>
        <isset property="run.clover"/>
        <isset property="clover.present"/>
    </and>
  </condition>

  <!-- the normal classpath -->
  <path id="classpath">
    <pathelement location="${build.classes}"/>
    <fileset dir="${lib.dir}">
      <include name="**/*.jar" />
      <exclude name="**/excluded/" />
    </fileset>
    <pathelement location="${conf.dir}"/>
    <path refid="ivy-common.classpath"/>
  </path>

  <!-- the unit test classpath: uses test.src.dir for configuration -->
  <path id="test.classpath">
    <pathelement location="${test.build.extraconf}"/>
    <pathelement location="${test.build.classes}" />
    <pathelement location="${test.src.dir}"/>
    <pathelement location="${build.dir}"/>
    <pathelement location="${build.examples}"/>
    <pathelement location="${build.tools}"/>
    <pathelement path="${clover.jar}"/>
    <fileset dir="${test.lib.dir}">
      <include name="**/*.jar" />
      <exclude name="**/excluded/" />
    </fileset>
    <path refid="classpath"/>
  </path>

  <!-- the cluster test classpath: uses conf.dir for configuration -->
  <path id="test.cluster.classpath">
    <path refid="classpath"/>
    <pathelement location="${test.build.classes}" />
    <pathelement location="${test.src.dir}"/>
    <pathelement location="${build.dir}"/>
  </path>

  <!-- properties dependent on the items defined above. -->
  <!--<available classname="${rat.reporting.classname}" classpathref="classpath" property="rat.present" value="true"/> -->

  <!-- ----------------------------------------------------== -->
  <!-- Macro definitions                                      -->
  <!-- ----------------------------------------------------== -->
  <macrodef name="macro_tar" description="Worker Macro for tar">
    <attribute name="param.destfile"/>
    <element name="param.listofitems"/>
    <sequential>
      <tar compression="gzip" longfile="gnu"
      destfile="@{param.destfile}">
      <param.listofitems/>
      </tar>
    </sequential>
  </macrodef>

  <!-- ----------------------------------------------------== -->
  <!-- Stuff needed by all targets                            -->
  <!-- ----------------------------------------------------== -->
  <target name="init" depends="ivy-retrieve-common">
    <mkdir dir="${build.dir}"/>
    <mkdir dir="${build.classes}"/>
    <mkdir dir="${build.tools}"/>
    <mkdir dir="${build.src}"/>
    <mkdir dir="${build.webapps}/task/WEB-INF"/>
    <mkdir dir="${build.webapps}/job/WEB-INF"/>
    <mkdir dir="${build.webapps}/hdfs/WEB-INF"/>
    <mkdir dir="${build.webapps}/datanode/WEB-INF"/>
    <mkdir dir="${build.webapps}/secondary/WEB-INF"/>
    <mkdir dir="${build.examples}"/>
    <mkdir dir="${build.anttasks}"/>
    <mkdir dir="${build.dir}/c++"/>
 
    <mkdir dir="${test.build.dir}"/>
    <mkdir dir="${test.build.classes}"/>
    <mkdir dir="${test.build.testjar}"/>
    <mkdir dir="${test.build.testshell}"/>
    <mkdir dir="${test.build.extraconf}"/>
    <tempfile property="touch.temp.file" destDir="${java.io.tmpdir}"/>
    <touch millis="0" file="${touch.temp.file}">
      <fileset dir="${conf.dir}" includes="**/*.template"/>
      <fileset dir="${contrib.dir}" includes="**/*.template"/>
    </touch>
    <delete file="${touch.temp.file}"/>
    <!-- copy all of the jsp and static files -->
    <copy todir="${build.webapps}">
      <fileset dir="${src.webapps}">
        <exclude name="**/*.jsp" />
      </fileset>
    </copy>

    <copy todir="${conf.dir}" verbose="true">
      <fileset dir="${conf.dir}" includes="**/*.template"/>
      <mapper type="glob" from="*.template" to="*"/>
    </copy>

    <copy todir="${contrib.dir}" verbose="true">
      <fileset dir="${contrib.dir}" includes="**/*.template"/>
      <mapper type="glob" from="*.template" to="*"/>
    </copy>

    <exec executable="sh">
       <arg line="src/saveVersion.sh ${version}"/>
    </exec>
  
   <exec executable="sh">
       <arg line="src/fixFontsPath.sh ${src.docs.cn}"/>
   </exec>
  </target>

  <!-- ----------------------------------------------------== -->
  <!-- Compile the Java files                                 -->
  <!-- ----------------------------------------------------== -->
  <target name="record-parser" depends="init" if="javacc.home">
      <javacc
          target="${core.src.dir}/org/apache/hadoop/record/compiler/generated/rcc.jj"
          outputdirectory="${core.src.dir}/org/apache/hadoop/record/compiler/generated"
          javacchome="${javacc.home}" />
  </target>
  
  <target name="compile-rcc-compiler" depends="init, record-parser">
    <javac 
        encoding="${build.encoding}" 
        srcdir="${core.src.dir}"
        includes="org/apache/hadoop/record/compiler/**/*.java"
        destdir="${build.classes}"
        debug="${javac.debug}"
        optimize="${javac.optimize}"
        target="${javac.version}"
        source="${javac.version}"
        deprecation="${javac.deprecation}">
        <compilerarg line="${javac.args}"/>
        <classpath refid="classpath"/>
    </javac>
    
    <taskdef name="recordcc" classname="org.apache.hadoop.record.compiler.ant.RccTask">
      <classpath refid="classpath" />
    </taskdef>
  </target>
  
  <target name="compile-core-classes" depends="init, compile-rcc-compiler">
     <taskdef classname="org.apache.jasper.JspC" name="jsp-compile" >
        <classpath refid="test.classpath"/>
     </taskdef>
    <!-- Compile Java files (excluding JSPschecking warnings -->
    <javac 
     encoding="${build.encoding}" 
     srcdir="${core.src.dir}"  
     includes="org/apache/hadoop/**/*.java"
     destdir="${build.classes}"
     debug="${javac.debug}"
     optimize="${javac.optimize}"
     target="${javac.version}"
     source="${javac.version}"
     deprecation="${javac.deprecation}">
      <compilerarg line="${javac.args} ${javac.args.warnings}" />
      <classpath refid="classpath"/>
    </javac>

    <copy todir="${build.classes}">
      <fileset dir="${core.src.dir}" includes="**/*.properties"/>
      <fileset dir="${core.src.dir}" includes="core-default.xml"/>
    </copy>
     
  </target>

  <target name="compile-mapred-classes" depends="compile-core-classes">
    <jsp-compile
     uriroot="${src.webapps}/task"
     outputdir="${build.src}"
     package="org.apache.hadoop.mapred"
     webxml="${build.webapps}/task/WEB-INF/web.xml">
    </jsp-compile>

    <jsp-compile
     uriroot="${src.webapps}/job"
     outputdir="${build.src}"
     package="org.apache.hadoop.mapred"
     webxml="${build.webapps}/job/WEB-INF/web.xml">
    </jsp-compile>

    <!-- Compile Java files (excluding JSPschecking warnings -->
    <javac 
     encoding="${build.encoding}" 
     srcdir="${mapred.src.dir};${build.src}" 
     includes="org/apache/hadoop/**/*.java"
     destdir="${build.classes}"
     debug="${javac.debug}"
     optimize="${javac.optimize}"
     target="${javac.version}"
     source="${javac.version}"
     deprecation="${javac.deprecation}">
      <compilerarg line="${javac.args} ${javac.args.warnings}" />
      <classpath refid="classpath"/>
    </javac>   
    
    <copy todir="${build.classes}">
      <fileset dir="${mapred.src.dir}" includes="**/*.properties"/>
      <fileset dir="${mapred.src.dir}" includes="mapred-default.xml"/>
    </copy>
  </target>

  <target name="compile-hdfs-classes" depends="compile-core-classes">
    <jsp-compile
     uriroot="${src.webapps}/hdfs"
     outputdir="${build.src}"
     package="org.apache.hadoop.hdfs.server.namenode"
     webxml="${build.webapps}/hdfs/WEB-INF/web.xml">
    </jsp-compile>

    <jsp-compile
     uriroot="${src.webapps}/datanode"
     outputdir="${build.src}"
     package="org.apache.hadoop.hdfs.server.datanode"
     webxml="${build.webapps}/datanode/WEB-INF/web.xml">
    </jsp-compile>

    <!-- Compile Java files (excluding JSPschecking warnings -->
    <javac 
     encoding="${build.encoding}" 
     srcdir="${hdfs.src.dir};${build.src}" 
     includes="org/apache/hadoop/**/*.java"
     destdir="${build.classes}"
     debug="${javac.debug}"
     optimize="${javac.optimize}"
     target="${javac.version}"
     source="${javac.version}"
     deprecation="${javac.deprecation}">
      <compilerarg line="${javac.args} ${javac.args.warnings}" />
      <classpath refid="classpath"/>
    </javac>   

    <copy todir="${build.classes}">
     <fileset dir="${hdfs.src.dir}" includes="**/*.properties"/>
     <fileset dir="${hdfs.src.dir}" includes="hdfs-default.xml"/>
    </copy>
  </target>

  <target name="compile-tools" depends="init">
    <javac 
     encoding="${build.encoding}" 
     srcdir="${tools.src}"
     includes="org/apache/hadoop/**/*.java"
     destdir="${build.tools}"
     debug="${javac.debug}"
     optimize="${javac.optimize}"
     target="${javac.version}"
     source="${javac.version}"
     deprecation="${javac.deprecation}">
      <compilerarg line="${javac.args} ${javac.args.warnings}" />
      <classpath refid="classpath"/>
    </javac>   
    
    <copy todir="${build.tools}">
      <fileset 
        dir="${tools.src}" 
        includes="**/*.properties"
      />
    </copy>
  </target>

  <target name="compile-native">
    <antcall target="compile-core-native">
      <param name="compile.native" value="true"/>
    </antcall> 
  </target>

  <target name="compile-core-native" depends="compile-core-classes"
          if="compile.native">
    
    <mkdir dir="${build.native}/lib"/>
    <mkdir dir="${build.native}/src/org/apache/hadoop/io/compress/zlib"/>

    <javah
      classpath="${build.classes}"
      destdir="${build.native}/src/org/apache/hadoop/io/compress/zlib"
      force="yes"
      verbose="yes"
      >
      <class name="org.apache.hadoop.io.compress.zlib.ZlibCompressor" />
      <class name="org.apache.hadoop.io.compress.zlib.ZlibDecompressor" />
    </javah>

  <exec dir="${build.native}" executable="sh" failonerror="true">
    <env key="OS_NAME" value="${os.name}"/>
    <env key="OS_ARCH" value="${os.arch}"/>
    <env key="JVM_DATA_MODEL" value="${sun.arch.data.model}"/>
    <env key="HADOOP_NATIVE_SRCDIR" value="${native.src.dir}"/>
    <arg line="${native.src.dir}/configure"/>
    </exec>

    <exec dir="${build.native}" executable="${make.cmd}" failonerror="true">
      <env key="OS_NAME" value="${os.name}"/>
      <env key="OS_ARCH" value="${os.arch}"/>
      <env key="JVM_DATA_MODEL" value="${sun.arch.data.model}"/>
      <env key="HADOOP_NATIVE_SRCDIR" value="${native.src.dir}"/>
    </exec>

  <exec dir="${build.native}" executable="sh" failonerror="true">
    <arg line="${build.native}/libtool --mode=install cp ${build.native}/lib/libhadoop.la ${build.native}/lib"/>
    </exec>

  </target>

  <target name="compile-core"
          depends="clover,compile-core-classes,compile-mapred-classes,
    compile-hdfs-classes,compile-core-native,compile-c++" 
    description="Compile core only">
  </target>

  <target name="compile-contrib" depends="compile-core,compile-c++-libhdfs">
     <subant target="compile">
        <property name="version" value="${version}"/>
        <fileset file="${contrib.dir}/build.xml"/>
     </subant>    
  </target>
  
  <target name="compile" depends="compile-core, compile-contrib, compile-ant-tasks, compile-tools" description="Compile core, contrib">
  </target>

  <target name="compile-examples" 
          depends="compile-core,compile-tools,compile-c++-examples">
    <javac 
     encoding="${build.encoding}" 
     srcdir="${examples.dir}"
     includes="org/apache/hadoop/**/*.java"
     destdir="${build.examples}"
     debug="${javac.debug}"
     optimize="${javac.optimize}"
     target="${javac.version}"
     source="${javac.version}"
     deprecation="${javac.deprecation}">
      <compilerarg line="${javac.args} ${javac.args.warnings}" />
      <classpath>
        <path refid="classpath"/>
        <pathelement location="${build.tools}"/>
      </classpath>
    </javac>    
  </target>

  <!-- ----------------------------------------------------------------== -->
  <!-- Make hadoop.jar                                                     -->
  <!-- ----------------------------------------------------------------== -->
  <!--                                                                    -->
  <!-- ----------------------------------------------------------------== -->
  <target name="jar" depends="compile-core" description="Make hadoop.jar">
    <tar compression="gzip" destfile="${build.classes}/bin.tgz">
      <tarfileset dir="bin" mode="755"/>
    </tar>
    <jar jarfile="${build.dir}/${final.name}-core.jar"
         basedir="${build.classes}">
      <manifest>
        <section name="org/apache/hadoop">
          <attribute name="Implementation-Title" value="Hadoop"/>
          <attribute name="Implementation-Version" value="${version}"/>
          <attribute name="Implementation-Vendor" value="Apache"/>
        </section>
      </manifest>
      <fileset file="${conf.dir}/commons-logging.properties"/>
      <fileset file="${conf.dir}/log4j.properties"/>
      <fileset file="${conf.dir}/hadoop-metrics.properties"/>
      <zipfileset dir="${build.webapps}" prefix="webapps"/>
    </jar>
  </target>

  <!-- ----------------------------------------------------------------== -->
  <!-- Make the Hadoop examples jar.                                      -->
  <!-- ----------------------------------------------------------------== -->
  <!--                                                                    -->
  <!-- ----------------------------------------------------------------== -->
  <target name="examples" depends="jar, compile-examples" description="Make the Hadoop examples jar.">
    <jar jarfile="${build.dir}/${final.name}-examples.jar"
         basedir="${build.examples}">
      <manifest>
        <attribute name="Main-Class" 
                   value="org/apache/hadoop/examples/ExampleDriver"/>
      </manifest>
    </jar>
  </target>

  <target name="tools-jar" depends="jar, compile-tools" 
          description="Make the Hadoop tools jar.">
    <jar jarfile="${build.dir}/${final.name}-tools.jar"
         basedir="${build.tools}">
      <manifest>
        <attribute name="Main-Class" 
                   value="org/apache/hadoop/examples/ExampleDriver"/>
      </manifest>
    </jar>
  </target>

  <!-- ----------------------------------------------------------------== -->
  <!-- Make the Hadoop metrics jar. (for use outside Hadoop)              -->
  <!-- ----------------------------------------------------------------== -->
  <!--                                                                    -->
  <!-- ----------------------------------------------------------------== -->
  <target name="metrics.jar" depends="compile-core" description="Make the Hadoop metrics jar. (for use outside Hadoop)">
    <jar jarfile="${build.dir}/hadoop-metrics-${version}.jar"
         basedir="${build.classes}">
      <include name="**/metrics/**" />
      <exclude name="**/package.html" />
    </jar>
  </target>

  <target name="generate-test-records" depends="compile-rcc-compiler">
    <recordcc destdir="${test.generated.dir}">
      <fileset dir="${test.src.dir}"
           includes="**/*.jr" />
    </recordcc>
  </target>
  
  <!-- ----------------------------------------------------------------== -->
  <!-- Compile test code                                                  --> 
  <!-- ----------------------------------------------------------------== -->
  <target name="compile-core-test" depends="compile-examples, compile-tools, generate-test-records">
    <javac 
     encoding="${build.encoding}" 
     srcdir="${test.generated.dir}"
     includes="org/apache/hadoop/**/*.java"
     destdir="${test.build.classes}"
     debug="${javac.debug}"
     optimize="${javac.optimize}"
     target="${javac.version}"
     source="${javac.version}"
     deprecation="${javac.deprecation}">
      <compilerarg line="${javac.args}" />
      <classpath refid="test.classpath"/>
    </javac>
    <javac 
     encoding="${build.encoding}" 
     srcdir="${test.src.dir}"
     includes="org/apache/hadoop/**/*.java"
     destdir="${test.build.classes}"
     debug="${javac.debug}"
     optimize="${javac.optimize}"
     target="${javac.version}"
     source="${javac.version}"
     deprecation="${javac.deprecation}">
      <compilerarg line="${javac.args} ${javac.args.warnings}" />
      <classpath refid="test.classpath"/>
    </javac> 
    <javac
     encoding="${build.encoding}"
     srcdir="${test.src.dir}/testjar"
     includes="*.java"
     destdir="${test.build.testjar}"
     debug="${javac.debug}"
     optimize="${javac.optimize}"
     target="${javac.version}"
     source="${javac.version}"
     deprecation="${javac.deprecation}">
      <compilerarg line="${javac.args} ${javac.args.warnings}" />
      <classpath refid="test.classpath"/>
    </javac>                                 
    <delete file="${test.build.testjar}/testjob.jar"/> 
    <jar jarfile="${test.build.testjar}/testjob.jar"
     basedir="${test.build.testjar}">
    </jar>
    <javac 
     encoding="${build.encoding}"
     srcdir="${test.src.dir}/testshell"
     includes="*.java"
     destdir="${test.build.testshell}"
     debug="${javac.debug}"
     optimize="${javac.optimize}"
     target="${javac.version}"
     source="${javac.version}"
     deprecation="${javac.deprecation}">
      <compilerarg line="${javac.args} ${javac.args.warnings}"/>
      <classpath refid="test.classpath"/>
     </javac>
     <delete file="${test.build.testshell}/testshell.jar"/>
     <jar jarfile="${test.build.testshell}/testshell.jar"
      basedir="${test.build.testshell}">
     </jar>
                                                              
    <delete dir="${test.cache.data}"/>
    <mkdir dir="${test.cache.data}"/>
    <delete dir="${test.debug.data}"/>
    <mkdir dir="${test.debug.data}"/>
    <copy file="${test.src.dir}/org/apache/hadoop/mapred/testscript.txt" todir="${test.debug.data}"/>
    <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.txt" todir="${test.cache.data}"/>
    <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.jar" todir="${test.cache.data}"/>
    <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.zip" todir="${test.cache.data}"/>
    <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.tar" todir="${test.cache.data}"/>
    <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.tgz" todir="${test.cache.data}"/>
    <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.tar.gz" todir="${test.cache.data}"/>
    <copy file="${test.src.dir}/org/apache/hadoop/hdfs/hadoop-14-dfs-dir.tgz" todir="${test.cache.data}"/>
    <copy file="${test.src.dir}/org/apache/hadoop/hdfs/hadoop-dfs-dir.txt" todir="${test.cache.data}"/>
    <copy file="${test.src.dir}/org/apache/hadoop/cli/testConf.xml" todir="${test.cache.data}"/>
    <copy file="${test.src.dir}/org/apache/hadoop/cli/clitest_data/data15bytes" todir="${test.cache.data}"/>
    <copy file="${test.src.dir}/org/apache/hadoop/cli/clitest_data/data30bytes" todir="${test.cache.data}"/>
    <copy file="${test.src.dir}/org/apache/hadoop/cli/clitest_data/data60bytes" todir="${test.cache.data}"/>
    <copy file="${test.src.dir}/org/apache/hadoop/cli/clitest_data/data120bytes" todir="${test.cache.data}"/>
  </target>

  <!-- ----------------------------------------------------------------== -->
  <!-- Make hadoop-test.jar                                               -->
  <!-- ----------------------------------------------------------------== -->
  <!--                                                                    -->
  <!-- ----------------------------------------------------------------== -->
  <target name="jar-test" depends="compile-core-test" description="Make hadoop-test.jar">
    <jar jarfile="${build.dir}/${final.name}-test.jar"
         basedir="${test.build.classes}">
         <manifest>
           <attribute name="Main-Class"
                      value="org/apache/hadoop/test/AllTestDriver"/>
          <section name="org/apache/hadoop">
            <attribute name="Implementation-Title" value="Hadoop"/>
            <attribute name="Implementation-Version" value="${version}"/>
            <attribute name="Implementation-Vendor" value="Apache"/>
          </section>
         </manifest>
    </jar>
  </target>

  <!-- ----------------------------------------------------------------== -->
  <!-- Run unit tests                                                     --> 
  <!-- ----------------------------------------------------------------== -->
  <target name="test-core" depends="jar-test" description="Run core unit tests">

    <delete dir="${test.build.data}"/>
    <mkdir dir="${test.build.data}"/>
    <delete dir="${test.log.dir}"/>
    <mkdir dir="${test.log.dir}"/>
    <copy file="${test.src.dir}/hadoop-policy.xml" 
      todir="${test.build.extraconf}" />
    <junit showoutput="${test.output}"
      printsummary="${test.junit.printsummary}"
      haltonfailure="${test.junit.haltonfailure}"
      fork="yes"
      forkmode="${test.junit.fork.mode}"
      maxmemory="${test.junit.maxmemory}"
      dir="${basedir}" timeout="${test.timeout}"
      errorProperty="tests.failed" failureProperty="tests.failed">
      <sysproperty key="test.build.data" value="${test.build.data}"/>
      <sysproperty key="test.cache.data" value="${test.cache.data}"/>      
      <sysproperty key="test.debug.data" value="${test.debug.data}"/>
      <sysproperty key="hadoop.log.dir" value="${test.log.dir}"/>
      <sysproperty key="test.src.dir" value="${test.src.dir}"/>
      <sysproperty key="test.build.extraconf" value="${test.build.extraconf}" />
      <sysproperty key="hadoop.policy.file" value="hadoop-policy.xml"/>
      <sysproperty key="java.library.path"
       value="${build.native}/lib:${lib.dir}/native/${build.platform}"/>
      <sysproperty key="install.c++.examples" value="${install.c++.examples}"/>
      <!-- set io.compression.codec.lzo.class in the child jvm only if it is set -->
    <syspropertyset dynamic="no">
      <propertyref name="io.compression.codec.lzo.class"/>
    </syspropertyset>
      <!-- set compile.c++ in the child jvm only if it is set -->
      <syspropertyset dynamic="no">
         <propertyref name="compile.c++"/>
      </syspropertyset>
      <classpath refid="${test.classpath.id}"/>
      <formatter type="${test.junit.output.format}" />
      <batchtest todir="${test.build.dir}" unless="testcase">
        <fileset dir="${test.src.dir}"
           includes="**/${test.include}.java"
     excludes="**/${test.exclude}.java" />
      </batchtest>
      <batchtest todir="${test.build.dir}" if="testcase">
        <fileset dir="${test.src.dir}" includes="**/${testcase}.java"/>
      </batchtest>
    </junit>
    <fail if="tests.failed">Tests failed!</fail>
  </target>   

  <target name="test-contrib" depends="compile, compile-core-test" description="Run contrib unit tests">
    <subant target="test">
       <property name="version" value="${version}"/>
       <fileset file="${contrib.dir}/build.xml"/>
    </subant> 
  </target>
    
  <target name="test" depends="test-core, test-contrib" description="Run core, contrib unit tests">
  </target>

  <!-- Run all unit tests, not just Test*, and use non-test configuration. -->
  <target name="test-cluster" description="Run all unit tests, not just Test*, and use non-test configuration.">
    <antcall target="test">
      <param name="test.include" value="*"/>
      <param name="test.classpath.id" value="test.cluster.classpath"/>
    </antcall>
  </target>

  <target name="nightly" depends="test, tar">
  </target>
  
  <!-- ----------------------------------------------------------------== -->
  <!-- Run optional third-party tool targets                              --> 
  <!-- ----------------------------------------------------------------== -->
  <target name="checkstyle" depends="ivy-retrieve-checkstyle,check-for-checkstyle" if="checkstyle.present" description="Run optional third-party tool targets">
       <taskdef resource="checkstyletask.properties">
         <classpath refid="checkstyle-classpath"/>
       </taskdef>
  
  <mkdir dir="${test.build.dir}"/>
    
    <checkstyle config="${test.src.dir}/checkstyle.xml"
      failOnViolation="false">
      <fileset dir="${core.src.dir}" includes="**/*.java" excludes="**/generated/**"/>
      <fileset dir="${mapred.src.dir}" includes="**/*.java" excludes="**/generated/**"/>
      <fileset dir="${hdfs.src.dir}" includes="**/*.java" excludes="**/generated/**"/>      
      <formatter type="xml" toFile="${test.build.dir}/checkstyle-errors.xml"/>
    </checkstyle>
    
    <xslt style="${test.src.dir}/checkstyle-noframes-sorted.xsl"
        in="${test.build.dir}/checkstyle-errors.xml"
        out="${test.build.dir}/checkstyle-errors.html"/>
  </target>
  
  <target name="check-for-checkstyle">
    <available property="checkstyle.present" resource="checkstyletask.properties">
       <classpath refid="checkstyle-classpath"/>
    </available>    
  </target>

 <property name="findbugs.home" value=""/>
  <target name="findbugs" depends="check-for-findbugs, tar" if="findbugs.present" description="Run findbugs if present">
    <property name="findbugs.out.dir" value="${test.build.dir}/findbugs"/>
    <property name="findbugs.exclude.file" value="${test.src.dir}/findbugsExcludeFile.xml"/>
    <property name="findbugs.report.htmlfile" value="${findbugs.out.dir}/hadoop-findbugs-report.html"/>
    <property name="findbugs.report.xmlfile" value="${findbugs.out.dir}/hadoop-findbugs-report.xml"/>
    <taskdef name="findbugs" classname="edu.umd.cs.findbugs.anttask.FindBugsTask"
        classpath="${findbugs.home}/lib/findbugs-ant.jar" />

        <mkdir dir="${findbugs.out.dir}"/>

    <findbugs home="${findbugs.home}" output="xml:withMessages"
        outputFile="${findbugs.report.xmlfile}" effort="max"
        excludeFilter="${findbugs.exclude.file}" jvmargs="-Xmx512M">
      <auxClasspath>
        <fileset dir="${lib.dir}">
          <include name="**/*.jar"/>
        </fileset>
        <fileset dir="${build.ivy.lib.dir}/${ant.project.name}/common">
          <include name="**/*.jar"/>
        </fileset>
      </auxClasspath>
      <sourcePath path="${core.src.dir}"/>
      <sourcePath path="${mapred.src.dir}"/>
      <sourcePath path="${hdfs.src.dir}"/>
      <sourcePath path="${examples.dir}" />
      <sourcePath path="${tools.src}" />
      <sourcePath path="${basedir}/src/contrib/streaming/src/java" />
      <class location="${basedir}/build/${final.name}-core.jar" />
      <class location="${basedir}/build/${final.name}-examples.jar" />
      <class location="${basedir}/build/${final.name}-tools.jar" />
      <class location="${basedir}/build/contrib/streaming/${final.name}-streaming.jar" />
    </findbugs>

        <xslt style="${findbugs.home}/src/xsl/default.xsl"
        in="${findbugs.report.xmlfile}"
        out="${findbugs.report.htmlfile}"/>
  </target>
  
  <target name="check-for-findbugs">
    <available property="findbugs.present"
        file="${findbugs.home}/lib/findbugs.jar" />
  </target>


  <!-- ----------------------------------------------------------------== -->
  <!-- Documentation                                                      -->
  <!-- ----------------------------------------------------------------== -->
  
  <target name="docs" depends="forrest.check" description="Generate forrest-based documentation. To use, specify -Dforrest.home=&lt;base of Apache Forrest installation&gt; on the command line." if="forrest.home">
    <exec dir="${docs.src}" executable="${forrest.home}/bin/forrest"
    failonerror="true">
      <env key="JAVA_HOME" value="${java5.home}"/>
    </exec>
    <copy todir="${build.docs}">
      <fileset dir="${docs.src}/build/site/" />
    </copy>
    <copy file="${docs.src}/releasenotes.html" todir="${build.docs}"/>
    <style basedir="${core.src.dir}" destdir="${build.docs}"
           includes="core-default.xml" style="conf/configuration.xsl"/>
    <style basedir="${hdfs.src.dir}" destdir="${build.docs}"
           includes="hdfs-default.xml" style="conf/configuration.xsl"/>
    <style basedir="${mapred.src.dir}" destdir="${build.docs}"
           includes="mapred-default.xml" style="conf/configuration.xsl"/>
    <antcall target="changes-to-html"/>
    <antcall target="cn-docs"/>
  </target>

  <target name="cn-docs" depends="forrest.check, init" 
       description="Generate forrest-based Chinese documentation. To use, specify -Dforrest.home=&lt;base of Apache Forrest installation&gt; on the command line." 
        if="forrest.home">
    <exec dir="${src.docs.cn}" executable="${forrest.home}/bin/forrest" failonerror="true">
      <env key="LANG" value="en_US.utf8"/>
      <env key="JAVA_HOME" value="${java5.home}"/>
    </exec>
    <copy todir="${build.docs.cn}">
      <fileset dir="${src.docs.cn}/build/site/" />
    </copy>
    <style basedir="${core.src.dir}" destdir="${build.docs.cn}"
           includes="core-default.xml" style="conf/configuration.xsl"/>
    <style basedir="${hdfs.src.dir}" destdir="${build.docs.cn}"
           includes="hdfs-default.xml" style="conf/configuration.xsl"/>
    <style basedir="${mapred.src.dir}" destdir="${build.docs.cn}"
           includes="mapred-default.xml" style="conf/configuration.xsl"/>
    <antcall target="changes-to-html"/>
  </target>

  <target name="forrest.check" unless="forrest.home" depends="java5.check">
    <fail message="'forrest.home' is not defined. Please pass -Dforrest.home=&lt;base of Apache Forrest installation&gt; to Ant on the command-line." />
  </target>

  <target name="java5.check" unless="java5.home">
    <fail message="'java5.home' is not defined.  Forrest requires Java 5.  Please pass -Djava5.home=&lt;base of Java 5 distribution&gt; to Ant on the command-line." />
  </target>
  
  <target name="javadoc-dev" description="Generate javadoc for hadoop developers">
    <mkdir dir="${build.javadoc.dev}"/>
    <javadoc
      overview="${core.src.dir}/overview.html"
      packagenames="org.apache.hadoop.*"
      destdir="${build.javadoc.dev}"
      author="true"
      version="true"
      use="true"
      windowtitle="${Name} ${version} API"
      doctitle="${Name} ${version} Developer API"
      bottom="Copyright &amp;copy; ${year} The Apache Software Foundation"
      >
        <packageset dir="${core.src.dir}"/>
        <packageset dir="${mapred.src.dir}"/>
        <packageset dir="${hdfs.src.dir}"/>          
      <packageset dir="${examples.dir}"/>

      <packageset dir="src/contrib/streaming/src/java"/>
      <packageset dir="src/contrib/data_join/src/java"/>
      <packageset dir="src/contrib/index/src/java"/>

        <link href="${javadoc.link.java}"/>

        <classpath >
          <path refid="classpath" />
          <fileset dir="src/contrib/">
            <include name="*/lib/*.jar" />
          </fileset>
          <pathelement path="${java.class.path}"/>
        </classpath>

      <group title="Core" packages="org.apache.*"/>
      <group title="Examples" packages="org.apache.hadoop.examples*"/>

       <group title="contrib: Streaming" packages="org.apache.hadoop.streaming*"/>
       <group title="contrib: DataJoin" packages="org.apache.hadoop.contrib.utils.join*"/>
       <group title="contrib: Index" packages="org.apache.hadoop.contrib.index*"/>

    </javadoc>
  </target>  

  <target name="javadoc" depends="compile, ivy-retrieve-javadoc" description="Generate javadoc">
    <mkdir dir="${build.javadoc}"/>
    <javadoc
      overview="${core.src.dir}/overview.html"
      packagenames="org.apache.hadoop.*"
      destdir="${build.javadoc}"
      author="true"
      version="true"
      use="true"
      windowtitle="${Name} ${version} API"
      doctitle="${Name} ${version} API"
      bottom="Copyright &amp;copy; ${year} The Apache Software Foundation"
      >
        <packageset dir="${core.src.dir}"/>
        <packageset dir="${mapred.src.dir}"/>
      <packageset dir="${examples.dir}"/>

      <packageset dir="src/contrib/streaming/src/java"/>
      <packageset dir="src/contrib/data_join/src/java"/>
      <packageset dir="src/contrib/index/src/java"/>
  <packageset dir="src/contrib/failmon/src/java/"/> 
  
        <link href="${javadoc.link.java}"/>

        <classpath >
          <path refid="classpath" />
          <fileset dir="src/contrib/">
            <include name="*/lib/*.jar" />
          </fileset>
          <path refid="javadoc-classpath"/>
          <pathelement path="${java.class.path}"/>
          <pathelement location="${build.tools}"/>
        </classpath>

      <group title="Core" packages="org.apache.*"/>
      <group title="Examples" packages="org.apache.hadoop.examples*"/>

       <group title="contrib: Streaming" packages="org.apache.hadoop.streaming*"/>
       <group title="contrib: DataJoin" packages="org.apache.hadoop.contrib.utils.join*"/>
       <group title="contrib: Index" packages="org.apache.hadoop.contrib.index*"/>
       <group title="contrib: FailMon" packages="org.apache.hadoop.contrib.failmon*"/>
    </javadoc>
  </target>  

  <target name="api-xml" depends="ivy-retrieve-jdiff,javadoc,write-null">
    <javadoc>
       <doclet name="jdiff.JDiff"
               path="${jdiff.jar}:${xerces.jar}">
         <param name="-apidir" value="${jdiff.xml.dir}"/>
         <param name="-apiname" value="hadoop ${version}"/>
       </doclet>
       <packageset dir="src/core"/>
       <packageset dir="src/mapred"/>
       <packageset dir="src/tools"/>
       <classpath >
         <path refid="classpath" />
         <path refid="jdiff-classpath" />
         <pathelement path="${java.class.path}"/>
       </classpath>
    </javadoc>
  </target>
  
  <target name="write-null">
  <exec executable="touch">
     <arg value="${jdiff.home}/Null.java"/>
        </exec>
  </target> 

  <target name="api-report" depends="ivy-retrieve-jdiff,api-xml">
    <mkdir dir="${jdiff.build.dir}"/>
    <javadoc sourcepath="src/core,src/hdfs,src,mapred,src/tools"
             destdir="${jdiff.build.dir}"
             sourceFiles="${jdiff.home}/Null.java">
       <doclet name="jdiff.JDiff"
               path="${jdiff.jar}:${xerces.jar}">
         <param name="-oldapi" value="hadoop ${jdiff.stable}"/>
         <param name="-newapi" value="hadoop ${version}"/>
         <param name="-oldapidir" value="${jdiff.xml.dir}"/>
         <param name="-newapidir" value="${jdiff.xml.dir}"/>
         <param name="-javadocold" value="${jdiff.stable.javadoc}"/>
         <param name="-javadocnew" value="../../api/"/>
         <param name="-stats"/>
       </doclet>
       <classpath >
         <path refid="classpath" />
         <path refid="jdiff-classpath"/>
         <pathelement path="${java.class.path}"/>
       </classpath>
    </javadoc>
  </target>
  
  <target name="changes-to-html" description="Convert CHANGES.txt into an html file">
    <mkdir dir="${build.docs}"/>
    <exec executable="perl" input="CHANGES.txt" output="${build.docs}/changes.html" failonerror="true">
      <arg value="${changes.src}/changes2html.pl"/>
    </exec>
    <copy todir="${build.docs}">
      <fileset dir="${changes.src}" includes="*.css"/>
    </copy>
  </target>

  <!-- ----------------------------------------------------------------== -->
  <!-- D I S T R I B U T I O N                                            -->
  <!-- ----------------------------------------------------------------== -->
  <!--                                                                    -->
  <!-- ----------------------------------------------------------------== -->
  <target name="package" depends="compile, jar, javadoc, docs, cn-docs, api-report, examples, tools-jar, jar-test, ant-tasks, package-librecordio"
    description="Build distribution">
    <mkdir dir="${dist.dir}"/>
    <mkdir dir="${dist.dir}/lib"/>
    <mkdir dir="${dist.dir}/contrib"/>
    <mkdir dir="${dist.dir}/bin"/>
    <mkdir dir="${dist.dir}/docs"/>
    <mkdir dir="${dist.dir}/docs/api"/>
    <mkdir dir="${dist.dir}/docs/jdiff"/>

    <copy todir="${dist.dir}/lib" includeEmptyDirs="false" flatten="true">
      <fileset dir="${common.ivy.lib.dir}"/>
    </copy>

    <copy todir="${dist.dir}/lib" includeEmptyDirs="false">
      <fileset dir="lib">
        <exclude name="**/native/**"/>
      </fileset>
    </copy>

    <exec dir="${dist.dir}" executable="sh" failonerror="true">
    <env key="BASE_NATIVE_LIB_DIR" value="${lib.dir}/native"/>
    <env key="BUILD_NATIVE_DIR" value="${build.dir}/native"/>
    <env key="DIST_LIB_DIR" value="${dist.dir}/lib/native"/>
    <arg line="${native.src.dir}/packageNativeHadoop.sh"/>
    </exec>

    <subant target="package">
      <!--Pass down the version in case its needed again and the target
      distribution directory so contribs know where to install to.-->
      <property name="version" value="${version}"/>
      <property name="dist.dir" value="${dist.dir}"/>
      <fileset file="${contrib.dir}/build.xml"/>
    </subant>    

    <copy todir="${dist.dir}/webapps">
      <fileset dir="${build.webapps}"/>
    </copy>

    <copy todir="${dist.dir}"
      <fileset file="${build.dir}/${final.name}-*.jar"/>
    </copy>
    
    <copy todir="${dist.dir}/bin">
      <fileset dir="bin"/>
    </copy>

    <copy todir="${dist.dir}/conf">
      <fileset dir="${conf.dir}" excludes="**/*.template"/>
    </copy>

    <copy todir="${dist.dir}/docs">
      <fileset dir="${build.docs}"/>
    </copy>

    <copy file="ivy.xml" tofile="${dist.dir}/ivy.xml"/>

    <copy todir="${dist.dir}/ivy">
      <fileset dir="ivy"/>
    </copy>

    <copy todir="${dist.dir}">
      <fileset dir=".">
        <include name="*.txt" />
      </fileset>
    </copy>

    <copy todir="${dist.dir}/src" includeEmptyDirs="true">
      <fileset dir="src" excludes="**/*.template **/docs/build/**/*"/>
    </copy>
    
    <copy todir="${dist.dir}/c++" includeEmptyDirs="false">
      <fileset dir="${build.dir}/c++"/>
    </copy>

    <copy todir="${dist.dir}/" file="build.xml"/>

    <chmod perm="ugo+x" type="file" parallel="false">
        <fileset dir="${dist.dir}/bin"/>
        <fileset dir="${dist.dir}/src/contrib/">
          <include name="*/bin/*" />
        </fileset>
        <fileset dir="${dist.dir}/src/contrib/ec2/bin/image"/>
    </chmod>
    <chmod perm="ugo+x" type="file">
        <fileset dir="${dist.dir}/src/c++/pipes/debug"/>
    </chmod>

  </target>

  <!-- ----------------------------------------------------------------== -->
  <!-- Make release tarball                                               -->
  <!-- ----------------------------------------------------------------== -->
  <target name="tar" depends="package" description="Make release tarball">
    <macro_tar param.destfile="${build.dir}/${final.name}.tar.gz">
      <param.listofitems>
        <tarfileset dir="${build.dir}" mode="664">
          <exclude name="${final.name}/bin/*" />
          <exclude name="${final.name}/contrib/*/bin/*" />
          <exclude name="${final.name}/src/contrib/ec2/bin/*" />
          <exclude name="${final.name}/src/contrib/ec2/bin/image/*" />
          <include name="${final.name}/**" />
        </tarfileset>
        <tarfileset dir="${build.dir}" mode="755">
          <include name="${final.name}/bin/*" />
          <include name="${final.name}/contrib/*/bin/*" />
          <include name="${final.name}/src/contrib/ec2/bin/*" />
          <include name="${final.name}/src/contrib/ec2/bin/image/*" />
        </tarfileset>
      </param.listofitems>
    </macro_tar>
  </target>

  <target name="bin-package" depends="compile, jar, examples, tools-jar, jar-test, ant-tasks, package-librecordio" 
    description="assembles artifacts for binary target">
    <mkdir dir="${dist.dir}"/>
    <mkdir dir="${dist.dir}/lib"/>
    <mkdir dir="${dist.dir}/contrib"/>
    <mkdir dir="${dist.dir}/bin"/>

    <copy todir="${dist.dir}/lib" includeEmptyDirs="false" flatten="true">
      <fileset dir="${common.ivy.lib.dir}"/>
    </copy>

    <copy todir="${dist.dir}/lib" includeEmptyDirs="false">
      <fileset dir="lib">
        <exclude name="**/native/**"/>
      </fileset>
    </copy>

    <exec dir="${dist.dir}" executable="sh" failonerror="true">
    <env key="BASE_NATIVE_LIB_DIR" value="${lib.dir}/native"/>
    <env key="BUILD_NATIVE_DIR" value="${build.dir}/native"/>
    <env key="DIST_LIB_DIR" value="${dist.dir}/lib/native"/>
    <arg line="${native.src.dir}/packageNativeHadoop.sh"/>
    </exec>

    <subant target="package">
      <!--Pass down the version in case its needed again and the target
      distribution directory so contribs know where to install to.-->
      <property name="version" value="${version}"/>
      <property name="dist.dir" value="${dist.dir}"/>
      <fileset file="${contrib.dir}/build.xml"/>
    </subant>    

    <copy todir="${dist.dir}/webapps">
      <fileset dir="${build.webapps}"/>
    </copy>

    <copy todir="${dist.dir}"
      <fileset file="${build.dir}/${final.name}-*.jar"/>
    </copy>
    
    <copy todir="${dist.dir}/bin">
      <fileset dir="bin"/>
    </copy>

    <copy todir="${dist.dir}/conf">
      <fileset dir="${conf.dir}" excludes="**/*.template"/>
    </copy>

    <copy file="ivy.xml" tofile="${dist.dir}/ivy.xml"/>

    <copy todir="${dist.dir}/ivy">
      <fileset dir="ivy"/>
    </copy>

    <copy todir="${dist.dir}">
      <fileset dir=".">
        <include name="*.txt" />
      </fileset>
    </copy>
    
    <copy todir="${dist.dir}/c++" includeEmptyDirs="false">
      <fileset dir="${build.dir}/c++"/>
    </copy>

    <copy todir="${dist.dir}/" file="build.xml"/>

    <chmod perm="ugo+x" type="file" parallel="false">
        <fileset dir="${dist.dir}/bin"/>
    </chmod>
  </target>

  <target name="binary" depends="bin-package" description="Make tarball without source and documentation">
    <macro_tar param.destfile="${build.dir}/${final.name}-bin.tar.gz">
      <param.listofitems>
        <tarfileset dir="${build.dir}" mode="664">
          <exclude name="${final.name}/bin/*" />
          <exclude name="${final.name}/src/**" />
          <exclude name="${final.name}/docs/**" />
          <include name="${final.name}/**" />
        </tarfileset>
        <tarfileset dir="${build.dir}" mode="755">
          <include name="${final.name}/bin/*" />
        </tarfileset>
      </param.listofitems>
    </macro_tar>
  </target>

  <!-- ----------------------------------------------------------------== -->
  <!-- Perform audit activities for the release                           -->
  <!-- ----------------------------------------------------------------== -->
  <target name="releaseaudit" depends="package,ivy-retrieve-releaseaudit" description="Release Audit activities">
    <fail unless="rat.present" message="Failed to load class [${rat.reporting.classname}]."/>
    <java classname="${rat.reporting.classname}" fork="true">
      <classpath refid="releaseaudit-classpath"/>
      <arg value="${build.dir}/${final.name}"/>
    </java>
  </target>

  <!-- ----------------------------------------------------------------== -->
  <!-- Clean.  Delete the build files, and their directories              -->
  <!-- ----------------------------------------------------------------== -->
  <target name="clean" depends="clean-contrib" description="Clean.  Delete the build files, and their directories">
    <delete dir="${build.dir}"/>
    <delete dir="${docs.src}/build"/>
    <delete dir="${src.docs.cn}/build"/>
  </target>

  <!-- ----------------------------------------------------------------== -->
  <!-- Clean contrib target. For now, must be called explicitly           -->
  <!-- Using subant instead of ant as a workaround for 30569              -->
  <!-- ----------------------------------------------------------------== -->
  <target name="clean-contrib">
     <subant target="clean">        
        <fileset file="src/contrib/build.xml"/>
     </subant>    
  </target>
  
 <target name="test-c++-libhdfs" depends="compile-c++-libhdfs, compile-core" if="islibhdfs">
    <delete dir="${test.libhdfs.dir}"/>
    <mkdir dir="${test.libhdfs.dir}"/>
    <mkdir dir="${test.libhdfs.dir}/logs"/>
    <mkdir dir="${test.libhdfs.dir}/hdfs/name"/>

    <exec dir="${build.c++.libhdfs}" executable="${make.cmd}" failonerror="true">
        <env key="OS_NAME" value="${os.name}"/>
        <env key="OS_ARCH" value="${os.arch}"/>
        <env key="JVM_ARCH" value="${jvm.arch}"/>
        <env key="LIBHDFS_BUILD_DIR" value="${build.c++.libhdfs}"/>
        <env key="HADOOP_HOME" value="${basedir}"/>
        <env key="HADOOP_CONF_DIR" value="${test.libhdfs.conf.dir}"/>
        <env key="HADOOP_LOG_DIR" value="${test.libhdfs.dir}/logs"/>
        <env key="LIBHDFS_SRC_DIR" value="${c++.libhdfs.src}"/>
        <env key="LIBHDFS_INSTALL_DIR" value="${install.c++}/lib"/>  
        <env key="LIB_DIR" value="${common.ivy.lib.dir}"/>
    <arg value="test"/>
    </exec>
  </target>

<!-- ----------------------------------------------------------------== -->
<!-- librecordio targets.                                               -->
<!-- ----------------------------------------------------------------== -->    

  <target name="compile-librecordio" depends="init" if="librecordio" >
     <mkdir dir="${build.librecordio}"/>
     <exec dir="${librecordio.src}" executable="${make.cmd}" failonerror="true">
        <env key="XERCESCROOT" value="${xercescroot}"/>
        <env key="LIBRECORDIO_BUILD_DIR" value="${build.librecordio}"/>
     </exec>
  </target>
    
  <target name="test-librecordio" depends="compile-librecordio, compile-core" if="librecordio">
    <delete dir="${librecordio.test.dir}"/>
    <mkdir dir="${librecordio.test.dir}"/>
    <exec dir="${librecordio.src}/test" executable="${make.cmd}" failonerror="true">
        <env key="HADOOP_HOME" value="${basedir}"/>
  <env key="XERCESCROOT" value="${xercescroot}"/>
        <env key="LIBRECORDIO_BUILD_DIR" value="${build.librecordio}"/>   
        <env key="LIBRECORDIO_TEST_DIR" value="${librecordio.test.dir}"/>
          <arg value="all"/>    
    </exec>
  </target>

  <target name="package-librecordio" depends="compile-librecordio" if="librecordio">
    <mkdir dir="${dist.dir}/librecordio"/> 
    <copy todir="${dist.dir}/librecordio">
       <fileset dir="${build.librecordio}" casesensitive="yes" followsymlinks="false">
          <exclude name="**/tests/**"/>
          <exclude name="*.so"/> 
          <exclude name="*.o"/>
       </fileset>
    </copy>
    <chmod perm="ugo+x" type="file">
       <fileset dir="${dist.dir}/librecordio"/>
    </chmod>
  </target>
 
  <target name="create-c++-configure" depends="init" if="compile.c++">
    <exec executable="autoreconf" dir="${c++.utils.src}" searchpath="yes" 
          failonerror="yes">
       <arg value="-if"/>
    </exec>
    <exec executable="autoreconf" dir="${c++.pipes.src}" searchpath="yes" 
          failonerror="yes">
       <arg value="-if"/>
    </exec>
    <exec executable="autoreconf" dir="${c++.examples.pipes.src}" 
          searchpath="yes" failonerror="yes">
       <arg value="-if"/>
    </exec>
    <antcall target="create-c++-configure-libhdfs"/>
  </target>
   
  <target name="create-c++-configure-libhdfs" depends="check-c++-libhdfs" if="islibhdfs">
    <exec executable="autoreconf" dir="${c++.libhdfs.src}" 
          searchpath="yes" failonerror="yes">
       <arg value="-if"/>
    </exec>
  </target>

  <target name="check-c++-makefiles" depends="init" if="compile.c++">
    <condition property="need.c++.utils.makefile">
       <not> <available file="${build.c++.utils}/Makefile"/> </not>
    </condition>
    <condition property="need.c++.pipes.makefile">
       <not> <available file="${build.c++.pipes}/Makefile"/> </not>
    </condition>
    <condition property="need.c++.examples.pipes.makefile">
       <not> <available file="${build.c++.examples.pipes}/Makefile"/> </not>
    </condition>
  </target>

  <target name="check-c++-libhdfs">
    <condition property="islibhdfs">
      <and>
        <isset property="compile.c++"/>
        <isset property="libhdfs"/>
      </and>
    </condition>
  </target>

  <target name="check-c++-makefile-libhdfs" depends="init,check-c++-libhdfs" if="islibhdfs">
    <condition property="need.c++.libhdfs.makefile">
       <not> <available file="${build.c++.libhdfs}/Makefile"/> </not>
    </condition>
  </target>

  <target name="create-c++-libhdfs-makefile" depends="check-c++-makefile-libhdfs" 
                                           if="need.c++.libhdfs.makefile">
    <mkdir dir="${build.c++.libhdfs}"/>
    <chmod file="${c++.libhdfs.src}/configure" perm="ugo+x"/>
    <exec executable="${c++.libhdfs.src}/configure" dir="${build.c++.libhdfs}"
          failonerror="yes">
      <env key="ac_cv_func_malloc_0_nonnull" value="yes"/>
      <env key="JVM_ARCH" value="${jvm.arch}"/>
      <arg value="--prefix=${install.c++}"/>
    </exec>
  </target>

  <target name="create-c++-utils-makefile" depends="check-c++-makefiles" 
                                           if="need.c++.utils.makefile">
    <mkdir dir="${build.c++.utils}"/>
    <exec executable="${c++.utils.src}/configure" dir="${build.c++.utils}"
          failonerror="yes">
      <arg value="--prefix=${install.c++}"/>
    </exec>
  </target>

  <target name="compile-c++-utils" depends="create-c++-utils-makefile"
                                   if="compile.c++">
    <exec executable="${make.cmd}" dir="${build.c++.utils}" searchpath="yes" 
          failonerror="yes">
      <arg  value="install"/>
    </exec>
  </target>

  <target name="create-c++-pipes-makefile" depends="check-c++-makefiles" 
                                           if="need.c++.pipes.makefile">
    <mkdir dir="${build.c++.pipes}"/>
    <exec executable="${c++.pipes.src}/configure" dir="${build.c++.pipes}"
          failonerror="yes">
      <arg value="--prefix=${install.c++}"/>
    </exec>
  </target>

  <target name="compile-c++-pipes" 
          depends="create-c++-pipes-makefile,compile-c++-utils"
          if="compile.c++">
    <exec executable="${make.cmd}" dir="${build.c++.pipes}" searchpath="yes" 
          failonerror="yes">
      <arg value="install"/>
    </exec>
  </target>

  <target name="compile-c++" 
          depends="compile-c++-pipes"/>

  <target name="create-c++-examples-pipes-makefile" 
          depends="check-c++-makefiles" 
          if="need.c++.examples.pipes.makefile">
    <mkdir dir="${build.c++.examples.pipes}"/>
    <exec executable="${c++.examples.pipes.src}/configure" 
          dir="${build.c++.examples.pipes}"
          failonerror="yes">
      <arg value="--prefix=${install.c++.examples}"/>
      <arg value="--with-hadoop-utils=${install.c++}"/>
      <arg value="--with-hadoop-pipes=${install.c++}"/>
    </exec>
  </target>

  <target name="compile-c++-examples-pipes" 
          depends="create-c++-examples-pipes-makefile,compile-c++-pipes"
          if="compile.c++">
    <exec executable="${make.cmd}" dir="${build.c++.examples.pipes}" searchpath="yes" 
          failonerror="yes">
      <arg  value="install"/>
    </exec>
  </target>

  <target name="compile-c++-examples" 
          depends="compile-c++-examples-pipes"/>

  <target name="compile-c++-libhdfs" depends="create-c++-libhdfs-makefile" if="islibhdfs">
    <exec executable="${make.cmd}" dir="${build.c++.libhdfs}" searchpath="yes"
          failonerror="yes">
      <env key="ac_cv_func_malloc_0_nonnull" value="yes"/>
      <env key="JVM_ARCH" value="${jvm.arch}"/>
      <arg value="install"/>
    </exec>
  </target>



  <target name="compile-ant-tasks" depends="compile-core">
    <javac
        encoding="${build.encoding}"
        srcdir="${anttasks.dir}"
        includes="org/apache/hadoop/ant/**/*.java"
        destdir="${build.anttasks}"
        debug="${javac.debug}"
        optimize="${javac.optimize}"
        target="${javac.version}"
        source="${javac.version}"
        deprecation="${javac.deprecation}">
        <compilerarg line="${javac.args}"/>
        <classpath refid="classpath"/>
    </javac>
  </target>

  <target name="ant-tasks" depends="jar, compile-ant-tasks">
    <copy file="${anttasks.dir}/org/apache/hadoop/ant/antlib.xml"
          todir="${build.anttasks}/org/apache/hadoop/ant"/>
    <jar destfile="${build.dir}/${final.name}-ant.jar">
      <fileset dir="${build.anttasks}"/>
    </jar>
  </target>



 <target name="clover" depends="clover.setup, clover.info" description="Instrument the Unit tests using Clover.  To use, specify -Dclover.home=&lt;base of clover installation&gt; -Drun.clover=true on the command line."/>

<target name="clover.setup" if="clover.enabled">
   <taskdef resource="cloverlib.xml" classpath="${clover.jar}"/>
   <mkdir dir="${clover.db.dir}"/>
   <clover-setup initString="${clover.db.dir}/hadoop_coverage.db">
     <fileset dir="src" includes="core/**/* tools/**/* hdfs/**/* mapred/**/*"/>
   </clover-setup>
</target>

<target name="clover.info" unless="clover.present">
  <echo>
     Clover not found. Code coverage reports disabled.
  </echo>
</target>

<target name="clover.check">
  <fail unless="clover.present">
  ##################################################################
   Clover not found.
   Please specify -Dclover.home=&lt;base of clover installation&gt;
   on the command line.
  ##################################################################
  </fail>
</target>

<target name="generate-clover-reports" depends="clover.check, clover">
  <mkdir dir="${clover.report.dir}"/>
  <clover-report>
     <current outfile="${clover.report.dir}" title="${final.name}">
     <format type="html"/>
     </current>
  </clover-report>
  <clover-report>
     <current outfile="${clover.report.dir}/clover.xml" title="${final.name}">
     <format type="xml"/>
     </current>
  </clover-report>
</target>

<target name="findbugs.check" depends="check-for-findbugs" unless="findbugs.present">
  <fail message="'findbugs.home' is not defined. Please pass -Dfindbugs.home=&lt;base of Findbugs installation&gt; to Ant on the command-line." />
</target>

<target name="patch.check" unless="patch.file">
  <fail message="'patch.file' is not defined. Please pass -Dpatch.file=&lt;location of patch file&gt; to Ant on the command-line." />
</target>

<target name="test-patch" depends="patch.check,findbugs.check,forrest.check">
  <exec executable="bash" failonerror="true">
    <arg value="${basedir}/src/test/bin/test-patch.sh"/>
    <arg value="DEVELOPER"/>
    <arg value="${patch.file}"/>
    <arg value="${scratch.dir}"/>
    <arg value="${svn.cmd}"/>
    <arg value="${grep.cmd}"/>
    <arg value="${patch.cmd}"/>
    <arg value="${findbugs.home}"/>
    <arg value="${forrest.home}"/>
    <arg value="${basedir}"/>
    <arg value="${java5.home}"/>
  </exec>
</target>

<target name="hudson-test-patch" depends="findbugs.check,forrest.check">
  <exec executable="bash" failonerror="true">
    <arg value="${basedir}/src/test/bin/test-patch.sh"/>
    <arg value="HUDSON"/>
    <arg value="${scratch.dir}"/>
    <arg value="${support.dir}"/>
    <arg value="${ps.cmd}"/>
    <arg value="${wget.cmd}"/>
    <arg value="${jiracli.cmd}"/>
    <arg value="${svn.cmd}"/>
    <arg value="${grep.cmd}"/>
    <arg value="${patch.cmd}"/>
    <arg value="${findbugs.home}"/>
    <arg value="${forrest.home}"/>
    <arg value="${eclipse.home}"/>
    <arg value="${python.home}"/>
    <arg value="${basedir}"/>
    <arg value="${trigger.url}"/>
    <arg value="${jira.passwd}"/>
    <arg value="${java5.home}"/>
  </exec>
</target>
  
  <target name="eclipse-files" depends="init"
          description="Generate files for Eclipse">
    <pathconvert property="eclipse.project">
      <path path="${basedir}"/>
      <regexpmapper from="^.*/([^/]+)$$" to="\1" handledirsep="yes"/>
    </pathconvert>
    <copy todir="." overwrite="true">
      <fileset dir=".eclipse.templates">
        <exclude name="**/README.txt"/>
      </fileset>
      <filterset>
        <filter token="PROJECT" value="${eclipse.project}"/>
      </filterset>
    </copy>
  </target>

  <target name="ivy-init-dirs">
    <mkdir dir="${build.ivy.dir}" />
    <mkdir dir="${build.ivy.lib.dir}" />
    <mkdir dir="${build.ivy.report.dir}" />
    <mkdir dir="${build.ivy.maven.dir}" />
  </target>

  <target name="ivy-probe-antlib" >
    <condition property="ivy.found">
      <typefound uri="antlib:org.apache.ivy.ant" name="cleancache"/>
    </condition>
  </target>

  <target name="ivy-download" description="To download ivy" unless="offline">
    <get src="${ivy_repo_url}" dest="${ivy.jar}" usetimestamp="true"/>
  </target>

  <!--
  To avoid Ivy leaking things across big projects, always load Ivy in the same classloader.
  Also note how we skip loading Ivy if it is already there, just to make sure all is well.
  -->
  <target name="ivy-init-antlib" depends="ivy-download,ivy-init-dirs,ivy-probe-antlib" unless="ivy.found">
    <typedef uri="antlib:org.apache.ivy.ant" onerror="fail"
      loaderRef="ivyLoader">
      <classpath>
        <pathelement location="${ivy.jar}"/>
      </classpath>
    </typedef>
    <fail >
      <condition >
        <not>
          <typefound uri="antlib:org.apache.ivy.ant" name="cleancache"/>
        </not>
      </condition>
      You need Apache Ivy 2.0 or later from http://ant.apache.org/
      It could not be loaded from ${ivy_repo_url}
    </fail>
  </target>


  <target name="ivy-init" depends="ivy-init-antlib" >

    <!--Configure Ivy by reading in the settings file
        If anyone has already read in a settings file into this settings ID, it gets priority
    -->
    <ivy:configure settingsid="${ant.project.name}.ivy.settings" file="${ivysettings.xml}" override='false'/>
  </target>

  <target name="ivy-resolve" depends="ivy-init">
    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings"/>
  </target>

  <target name="ivy-resolve-javadoc" depends="ivy-init">
    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="javadoc"/>
  </target>

  <target name="ivy-resolve-releaseaudit" depends="ivy-init">
    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="releaseaudit"/>
  </target>

  <target name="ivy-resolve-test" depends="ivy-init">
    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="test" />
  </target>

  <target name="ivy-resolve-common" depends="ivy-init">
    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="common" />
  </target>

  <target name="ivy-resolve-jdiff" depends="ivy-init">
    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="jdiff" />
  </target>

  <target name="ivy-resolve-checkstyle" depends="ivy-init">
    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="checkstyle"/>
  </target>

  <target name="ivy-retrieve" depends="ivy-resolve"
    description="Retrieve Ivy-managed artifacts">
    <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
  </target>

  <target name="ivy-retrieve-checkstyle" depends="ivy-resolve-checkstyle"
    description="Retrieve Ivy-managed artifacts for the checkstyle configurations">
    <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
    <ivy:cachepath pathid="checkstyle-classpath" conf="checkstyle"/>
  </target>

  <target name="ivy-retrieve-jdiff" depends="ivy-resolve-jdiff"
    description="Retrieve Ivy-managed artifacts for the javadoc configurations">
    <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
    <ivy:cachepath pathid="jdiff-classpath" conf="jdiff"/>
  </target>

  <target name="ivy-retrieve-javadoc" depends="ivy-resolve-javadoc"
    description="Retrieve Ivy-managed artifacts for the javadoc configurations">
    <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
    <ivy:cachepath pathid="javadoc-classpath" conf="javadoc"/>
  </target>

  <target name="ivy-retrieve-test" depends="ivy-resolve-test"
    description="Retrieve Ivy-managed artifacts for the test configurations">
    <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
    <ivy:cachepath pathid="test.classpath" conf="test"/>
  </target>

  <target name="ivy-retrieve-common" depends="ivy-resolve-common"
    description="Retrieve Ivy-managed artifacts for the compile configurations">
    <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
    <ivy:cachepath pathid="ivy-common.classpath" conf="common"/>
  </target>

  <target name="ivy-retrieve-releaseaudit" depends="ivy-resolve-releaseaudit"
    description="Retrieve Ivy-managed artifacts for the compile configurations">
    <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}" />
    <ivy:cachepath pathid="releaseaudit-classpath" conf="releaseaudit"/>
    <available classname="${rat.reporting.classname}" 
      classpathref="releaseaudit-classpath" property="rat.present" value="true"/>
  </target>

  <target name="ivy-report" depends="ivy-resolve-releaseaudit"
    description="Generate">
    <ivy:report todir="${build.ivy.report.dir}" settingsRef="${ant.project.name}.ivy.settings"/>
    <echo>
      Reports generated:${build.ivy.report.dir}
    </echo>
  </target>

  <target name="assert-hadoop-jar-exists" depends="ivy-init">
    <fail>
      <condition >
        <not>
          <available file="${hadoop.jar}" />
        </not>
      </condition>
      Not found: ${hadoop.jar}
      Please run the target "jar" in the main build file
    </fail>

  </target>

  <target name="ready-to-publish" depends="jar,assert-hadoop-jar-exists,ivy-resolve"/>

  <target name="ivy-publish-local" depends="ready-to-publish,ivy-resolve">
    <ivy:publish
      settingsRef="${ant.project.name}.ivy.settings"
      resolver="local"
      pubrevision="${hadoop.version}"
      overwrite="true"
      artifactspattern="${build.dir}/${ivy.publish.pattern}" />
  </target>


  <!-- this is here for curiosity, to see how well the makepom task works
  Answer: it depends whether you want transitive dependencies excluded or not
  -->
  <target name="makepom" depends="ivy-resolve">
    <ivy:makepom settingsRef="${ant.project.name}.ivy.settings"
      ivyfile="ivy.xml"
      pomfile="${build.ivy.maven.dir}/generated.pom">
      <ivy:mapping conf="default" scope="default"/>
      <ivy:mapping conf="master" scope="master"/>
      <ivy:mapping conf="runtime" scope="runtime"/>
    </ivy:makepom>
  </target>


  <target name="copy-jar-to-maven" depends="ready-to-publish">
    <copy file="${hadoop.jar}"
      tofile="${build.ivy.maven.jar}"/>
    <checksum file="${build.ivy.maven.jar}" algorithm="md5"/>
  </target>

  <target name="copypom" depends="ivy-init-dirs">

   <presetdef name="expandingcopy" >
    <copy overwrite="true">
      <filterchain>
        <expandproperties/>
      </filterchain>
    </copy>
   </presetdef>

   <expandingcopy file="ivy/hadoop-core.pom"
      tofile="${build.ivy.maven.pom}"/>
   <checksum file="${build.ivy.maven.pom}" algorithm="md5"/>
  </target>

  <target name="maven-artifacts" depends="copy-jar-to-maven,copypom" />

  <target name="published" depends="ivy-publish-local,maven-artifacts">

  </target>

</project>



File: ivy.xml

<?xml version="1.0" ?>

<!--
   Licensed to the Apache Software Foundation (ASFunder one or more
   contributor license agreements.  See the NOTICE file distributed with
   this work for additional information regarding copyright ownership.
   The ASF licenses this file to You under the Apache License, Version 2.0
   (the "License"); you may not use this file except in compliance with
   the License.  You may obtain a copy of the License at

       http://www.apache.org/licenses/LICENSE-2.0

   Unless required by applicable law or agreed to in writing, software
   distributed under the License is distributed on an "AS IS" BASIS,
   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   See the License for the specific language governing permissions and
   limitations under the License.
-->

<ivy-module version="1.0">
  <info organisation="org.apache.hadoop" module="${ant.project.name}">
    <license name="Apache 2.0"/>
    <ivyauthor name="Apache Hadoop Team" url="http://hadoop.apache.org"/>
    <description>
        Hadoop Core
    </description>
  </info>
  <configurations defaultconfmapping="default">
    <!--these match the Maven configurations-->
    <conf name="default" extends="master,runtime"/>
    <conf name="master" description="contains the artifact but no dependencies"/>
    <conf name="runtime" description="runtime but not the artifact"
      extends="client,server,s3-server,kfs"/>

    <conf name="mandatory" description="contains the critical  dependencies"
      extends="commons-logging,log4j"/>

    <!--
    These public configurations contain the core dependencies for running hadoop client or server.
    The server is effectively a superset of the client.
    -->
    <conf name="client" description="client-side dependencies"
      extends="mandatory,httpclient"/>
    <conf name="server" description="server-side dependencies"
      extends="client"/>
    <conf name="s3-client" description="dependencies for working with S3/EC2 infrastructure"
      extends="client"/>
    <conf name="s3-server" description="dependencies for running on S3/EC2 infrastructure"
      extends="s3-client,server"/>
    <conf name="kfs" description="dependencies for KFS file system support"/>
    <conf name="ftp" description="dependencies for workign with FTP filesytems"
              extends="mandatory"/>
   <conf name="jetty" description="Jetty provides the in-VM HTTP daemon" extends="commons-logging"/>

    <!--Private configurations. -->

    <conf name="common" visibility="private" extends="runtime,mandatory,httpclient,ftp,jetty"
          description="common artifacts"/>
    <conf name="javadoc" visibility="private" description="artiracts required while performing doc generation"
      extends="common,mandatory,jetty,lucene"/>
    <!--Testing pulls in everything-->
    <conf name="test" extends="common,default,s3-server,kfs" visibility="private"
      description="the classpath needed to run tests"/>
    <conf name="releaseaudit" visibility="private"
  description="Artifacts required for releaseaudit target"/>
     
    <conf name="commons-logging" visibility="private"/>
    <conf name="httpclient" visibility="private" extends="commons-logging"/>
    <conf name="log4j" visibility="private"/>
    <conf name="lucene" visibility="private"/>
    <conf name="jdiff" visibility="private" extends="log4j,s3-client,jetty,server"/>
    <conf name="checkstyle" visibility="private"/>

  </configurations>

  <publications>
    <!--get the artifact from our module name-->
    <artifact conf="master"/>
  </publications>
  <dependencies>

 <!--used client side-->
    <dependency org="commons-cli"
      name="commons-cli"
      rev="${commons-cli.version}"
      conf="client->default"/>

    <dependency org="checkstyle"
      name="checkstyle"
      rev="${checkstyle.version}"
      conf="checkstyle->default"/>
    <dependency org="jdiff"
      name="jdiff"
      rev="${jdiff.version}"
      conf="jdiff->default"/>
    <dependency org="xerces"
      name="xerces"
      rev="${xerces.version}"
      conf="jdiff->default">
    </dependency>

    <dependency org="xmlenc"
      name="xmlenc"
      rev="${xmlenc.version}"
      conf="server->default"/>

    <!--Configuration: httpclient-->

    <!--
    commons-httpclient asks for too many files.
    All it needs is commons-codec and commons-logging JARs
    -->
    <dependency org="commons-httpclient"
      name="commons-httpclient"
      rev="${commons-httpclient.version}"
      conf="httpclient->master">
    </dependency>

    <dependency org="commons-codec"
      name="commons-codec"
      rev="${commons-codec.version}"
      conf="httpclient->default"/>

    <dependency org="commons-net"
      name="commons-net"
      rev="${commons-net.version}"
      conf="ftp->default"/>

    <!--Configuration: Jetty -->

<!-- <dependency org="javax.servlet"
      name="servlet-api"
      rev="${servlet-api.version}"
      conf="jetty->master"/>   -->
    <dependency org="org.mortbay.jetty"
      name="jetty"
      rev="${jetty.version}"
      conf="jetty->master"/>
    <dependency org="org.mortbay.jetty"
      name="jetty-util"
      rev="${jetty-util.version}"
      conf="jetty->master"/>

    <dependency org="tomcat"
      name="jasper-runtime"
      rev="${jasper.version}"
      conf="jetty->master"/>
    <dependency org="tomcat"
      name="jasper-compiler"
      rev="${jasper.version}"
      conf="jetty->master"/>
<!-- this is resolved locally from the lib folder 
   <dependency org="tomcat"
      name="jsp-api"
      rev="${jsp-api.version}"
      conf="jetty->master"/> -->
    <dependency org="commons-el"
      name="commons-el"
      rev="${commons-el.version}"
      conf="jetty->master"/>


    <!--Configuration: commons-logging -->

    <!--it is essential that only the master JAR of commons logging
    is pulled in, as its dependencies are usually a mess, including things
    like out of date servlet APIs, bits of Avalon, etc.
    -->
    <dependency org="commons-logging"
      name="commons-logging"
      rev="${commons-logging.version}"
      conf="commons-logging->master"/>


    <!--Configuration: commons-logging -->

    <!--log4J is not optional until commons-logging.properties is stripped out of the JAR -->
    <dependency org="log4j"
      name="log4j"
      rev="${log4j.version}"
      conf="log4j->master"/>

    <!--Configuration: s3-client -->
    <!--there are two jets3t projects in the repository; this one goes up to 0.6 and
    is assumed to be the live one-->
    <dependency org="net.java.dev.jets3t"
      name="jets3t"
      rev="${jets3t.version}"
      conf="s3-client->master"/>
    <dependency org="commons-net"
      name="commons-net"
      rev="${commons-net.version}"
      conf="s3-client->master"/> 
    <dependency org="org.mortbay.jetty"
      name="servlet-api-2.5"
      rev="${servlet-api-2.5.version}"
      conf="s3-client->master"/>

    <!--Configuration: kfs -->

    <!-- This is not in the repository
  <dependency org="org.kosmix"
    name="kfs"
    rev="${kfs.version}"
    conf="kfs->default"/>-->

    <!--Configuration: test -->

    <!--artifacts needed for testing -->
    <dependency org="junit"
      name="junit"
      rev="${junit.version}"
      conf="common->default"/>
    <dependency org="com.google.code.p.arat"
      name="rat-lib"
      rev="${rats-lib.version}"
      conf="releaseaudit->default"/>
    <dependency org="commons-lang"
      name="commons-lang"
      rev="${commons-lang.version}"
      conf="releaseaudit->default"/>
    <dependency org="commons-collections"
      name="commons-collections"
      rev="${commons-collections.version}"
      conf="releaseaudit->default"/>
<!--<dependency org="hsqldb"
      name="hsqldb"
      rev="${hsqldb.version}"
      conf="common->default"/>
    <dependency org="lucene"
      name="lucene"
      rev="${lucene.version}"
      conf="javadoc->default"/> --> 
    <dependency org="org.apache.lucene"
      name="lucene-core"
      rev="${lucene-core.version}"
      conf="javadoc->default"/> 
    <dependency org="commons-logging"
      name="commons-logging-api"
      rev="${commons-logging-api.version}"
      conf="common->default"/>
    <dependency org="org.slf4j"
      name="slf4j-api"
      rev="${slf4j-api.version}"
      conf="common->master"/>
    <dependency org="org.eclipse.jdt"
      name="core"
      rev="${core.version}"
      conf="common->master"/>
    <dependency org="oro"
      name="oro"
      rev="${oro.version}"
      conf="common->default"/>
    <dependency org="org.slf4j"
      name="slf4j-log4j12"
      rev="${slf4j-log4j12.version}"
      conf="common->master">
    </dependency>
    </dependencies>
  
</ivy-module>

 
Related examples in the same category
1.Ant script for xmlgraphics-commons
2.nutch ant script
3.rhino ant build script
4.apache solr ant script
5.Tomcat ant build script
6.OFBiz ant build script
7.Apache Lenya Build System
8.Apache pivot ant build script
9.XmlSchema ant script
10.xml security
11.velocity tools ant script
12.weka build script
13.xml bean ant script
14.xml graphics common ant script
15.uPortal ant script
16.SmartGWT ant script
17.Build file to fetch maven2 tasks; extracted from (Ant's) fetch.xml
18.Build file to fetch optional libraries for Apache Ant
19.Ant build script
20.Build script for apache-cassandra-0.5.1-src
21.apache-log4j-site\build.xml
22.apache-roller-src-4.0.1
23.Build script from apache dbutils
24.Fop build script
25.Google guice ant script
26.GWT ant script
27.jakarta jmeter ant script
28.jakarta oro ant script
29.jakarta regexp ant script
30.jedit build script
31.jibx ant build script
32.lucene ant build script
www.java2java.com | Contact Us
Copyright 2009 - 12 Demo Source and Support. All rights reserved.
All other trademarks are property of their respective owners.