aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorClement Laforet <clement@FreeBSD.org>2012-03-27 14:51:01 +0000
committerClement Laforet <clement@FreeBSD.org>2012-03-27 14:51:01 +0000
commit95b1c2e57b6bf90b810fbca7e8c809552a26ffcf (patch)
treeaec89bab56fd6040b1cc70a2172ecdd14aedae64
parent228a70531207ba9af72e70812f0a79451444bafe (diff)
downloadports-95b1c2e57b6bf90b810fbca7e8c809552a26ffcf.tar.gz
ports-95b1c2e57b6bf90b810fbca7e8c809552a26ffcf.zip
- Add hadoop 1.0.0
- Add hadoop user to GIDs/UIDs (955) The Apache Hadoop software library is a framework that allows for the distributed processing of large data sets across clusters of computers using a simple programming model. WWW: http://hadoop.apache.org/ Approved by: culot@, jadawin@ (mentors) Feature safe: yes
Notes
Notes: svn path=/head/; revision=293885
-rw-r--r--GIDs1
-rw-r--r--UIDs1
-rw-r--r--devel/Makefile1
-rw-r--r--devel/hadoop/Makefile144
-rw-r--r--devel/hadoop/distinfo2
-rw-r--r--devel/hadoop/files/datanode.in72
-rw-r--r--devel/hadoop/files/hadoop.in7
-rw-r--r--devel/hadoop/files/jobtracker.in72
-rw-r--r--devel/hadoop/files/namenode.in72
-rw-r--r--devel/hadoop/files/patch-build.xml80
-rw-r--r--devel/hadoop/files/patch-src__c++__libhdfs__hdfs.c11
-rw-r--r--devel/hadoop/files/patch-src__c++__libhdfs__hdfsJniHelper.c11
-rw-r--r--devel/hadoop/files/patch-src__native__Makefile.in105
-rw-r--r--devel/hadoop/files/patch-src__native__configure11
-rw-r--r--devel/hadoop/files/patch-src__native__configure.ac13
-rw-r--r--devel/hadoop/files/patch-src__native__src__org__apache__hadoop__io__nativeio__NativeIO.c11
-rw-r--r--devel/hadoop/files/patch-src__native__src__org__apache__hadoop__security__JniBasedUnixGroupsNetgroupMapping.c12
-rw-r--r--devel/hadoop/files/pkg-deinstall.in14
-rw-r--r--devel/hadoop/files/pkg-install.in55
-rw-r--r--devel/hadoop/files/secondarynamenode.in72
-rw-r--r--devel/hadoop/files/tasktracker.in72
-rw-r--r--devel/hadoop/pkg-descr5
-rw-r--r--devel/hadoop/pkg-plist346
23 files changed, 1190 insertions, 0 deletions
diff --git a/GIDs b/GIDs
index 7e18eea2f43a..c3c7ce7d5e71 100644
--- a/GIDs
+++ b/GIDs
@@ -227,4 +227,5 @@ conquest:*:950:
openerpd:*:951:
bitten-slave:*:952:
_neubot:*:953:
+hadoop:*:955:
nobody:*:65534:
diff --git a/UIDs b/UIDs
index 31f5788fb845..d2d323a3e101 100644
--- a/UIDs
+++ b/UIDs
@@ -230,4 +230,5 @@ openerpd:*:951:951::0:0:Openerpd user:/nonexistent:/usr/sbin/nologin
bitten-slave:*:952:952:daemon:0:0:Bitten slave user:/var/lib/bitten-slave:/usr/sbin/nologin
_neubot:*:953:953::0:0:neubot daemon:/nonexistent:/usr/sbin/nologin
oops:*:954:65534::0:0:oops user:/nonexistent:/usr/sbin/nologin
+hadoop:*:955:955::0:0:hadoop user:/nonexistent:/usr/sbin/nologin
nobody:*:65534:65534::0:0:Unprivileged user:/nonexistent:/usr/sbin/nologin
diff --git a/devel/Makefile b/devel/Makefile
index 3badc079f9fa..49279fb8c2f3 100644
--- a/devel/Makefile
+++ b/devel/Makefile
@@ -554,6 +554,7 @@
SUBDIR += hachoir-core
SUBDIR += hachoir-parser
SUBDIR += hachoir-regex
+ SUBDIR += hadoop
SUBDIR += happydoc
SUBDIR += hapy
SUBDIR += hcs12mem
diff --git a/devel/hadoop/Makefile b/devel/hadoop/Makefile
new file mode 100644
index 000000000000..6f7cd66d7aff
--- /dev/null
+++ b/devel/hadoop/Makefile
@@ -0,0 +1,144 @@
+# New ports collection makefile for: hadoop
+# Date created: 2012-02-22
+# Whom: Clement Laforet <clement@FreeBSD.org>
+#
+# $FreeBSD$
+
+PORTNAME= hadoop
+PORTVERSION= 1.0.0
+CATEGORIES= devel java
+MASTER_SITES= ${MASTER_SITE_APACHE}
+MASTER_SITE_SUBDIR= ${PORTNAME}/core/${PORTNAME}-${PORTVERSION}
+DIST_SUBDIR= hadoop
+
+MAINTAINER= clement@FreeBSD.org
+COMMENT= Apache Map/Reduce framework
+
+LICENSE= AL2
+LICENSE_FILE= ${WRKSRC}/LICENSE.txt
+
+USE_JAVA= yes
+JAVA_VERSION= 1.6
+USE_ANT= yes
+ONLY_FOR_ARCHS= amd64 i386
+
+WRKSRC= ${WRKDIR}/${PORTNAME}-${PORTVERSION}
+
+BUILD_DEPENDS+= ${LOCALBASE}/bin/gmake:${PORTSDIR}/devel/gmake \
+ ${LOCALBASE}/bin/gsed:${PORTSDIR}/textproc/gsed
+RUN_DEPENDS+= ${LOCALBASE}/bin/bash:${PORTSDIR}/shells/bash
+
+_HADOOP_DIR= share/${PORTNAME}
+_HADOOP_ETC= etc/${PORTNAME}
+HADOOP_HOME= ${PREFIX}/${_HADOOP_DIR}
+HADOOP_ETC= ${PREFIX}/${_HADOOP_ETC}
+
+HADOOP_LOGDIR= /var/log/${PORTNAME}
+HADOOP_RUNDIR= /var/run/${PORTNAME}
+HADOOP_BIN= ${PREFIX}/bin/${PORTNAME}
+
+USERS= hadoop
+GROUPS= hadoop
+HADOOP_USER= ${USERS}
+HADOOP_GROUP= ${GROUPS}
+
+SUB_FILES+= pkg-install pkg-deinstall hadoop 000.java_home.env
+USE_RC_SUBR+= tasktracker jobtracker datanode namenode secondarynamenode
+
+PLIST_SUB+= PORTVERSION="${PORTVERSION}"\
+ HADOOP_HOME="${_HADOOP_DIR}" \
+ HADOOP_ETC="${_HADOOP_ETC}"
+SUB_LIST= HADOOP_LOGDIR="${HADOOP_LOGDIR}" \
+ HADOOP_RUNDIR="${HADOOP_RUNDIR}" \
+ HADOOP_USER="${HADOOP_USER}" \
+ HADOOP_GROUP="${HADOOP_GROUP}" \
+ HADOOP_UID="${HADOOP_UID}" \
+ HADOOP_GID="${HADOOP_GID}" \
+ HADOOP_HOME="${HADOOP_HOME}" \
+ HADOOP_ETC="${HADOOP_ETC}" \
+ JAVA_HOME="${JAVA_HOME}"
+
+PORTDOCS= *
+FIX_PERMS= src/c++/pipes/install-sh src/c++/utils/install-sh src/c++/libhdfs/install-sh \
+ src/c++/libhdfs/tests/test-libhdfs.sh
+
+FIX_DOCS= docs/cn/skin/css docs/cn/skin/scripts docs/cn/skin/translations \
+ docs/jdiff docs/skin/css docs/skin/scripts docs/skin/translations
+
+DOC= CHANGES.txt LICENSE.txt NOTICE.txt README.txt
+
+DEFAULTS= src/core/core-default.xml src/hdfs/hdfs-default.xml src/mapred/mapred-default.xml hadoop-examples-${PORTVERSION}.jar
+
+DIST= bin contrib hadoop-ant-${PORTVERSION}.jar hadoop-core-${PORTVERSION}.jar \
+ hadoop-test-${PORTVERSION}.jar hadoop-tools-${PORTVERSION}.jar lib webapps
+CONF= capacity-scheduler.xml configuration.xsl core-site.xml hadoop-env.sh hadoop-metrics2.properties \
+ hadoop-policy.xml hdfs-site.xml log4j.properties mapred-queue-acls.xml mapred-site.xml taskcontroller.cfg
+
+MAKE_ARGS= -Dbuild.classes=${WRKSRC}/hadoop-core-${PORTVERSION}.jar -Dversion=${PORTVERSION} -Dcompile.native=true \
+ -Dcompile.c++=true -Dmake.cmd=${GMAKE} -Dlibhdfs=1 -Dlibrecordio=true -Dskip.record-parser=true
+ALL_TARGET= compile-core-native compile-c++ compile-c++-libhdfs compile-c++-pipes compile-c++-utils
+
+.include <bsd.port.pre.mk>
+
+.if ${ARCH} == "amd64"
+_HADOOP_ARCH= FreeBSD-amd64-64
+.else
+_HADOOP_ARCH= FreeBSD-i386-32
+.endif
+PLIST_SUB+= HADOOP_ARCH=${_HADOOP_ARCH}
+
+pre-build:
+.for f in ${FIX_PERMS}
+ @${CHMOD} +x ${WRKSRC}/${f}
+.endfor
+.for d in ${FIX_DOCS}
+ @${TOUCH} ${WRKSRC}/${d}/.empty
+.endfor
+
+#do-build:
+# @cd ${WRKSRC}; \
+# ${ANT} -Dbuild.classes=${WRKSRC}/hadoop-core-${PORTVERSION}.jar -Dversion=${PORTVERSION} -Dcompile.native=true \
+# -Dcompile.c++=true -Dmake.cmd=${GMAKE} compile-core-native compile-c++ compile-c++-libhdfs compile-c++-pipes \
+# compile-c++-utils -Dlibhdfs=1 -Dlibrecordio=true -Dskip.record-parser=true
+
+post-build:
+ @cd ${WRKSRC} ;${ANT} FreeBSD-dist
+ @${TAR} -cf - -C ${WRKSRC}/build/c++/ ${_HADOOP_ARCH}/lib ${_HADOOP_ARCH}/include | ${TAR} xf - -C ${WRKSRC}/c++/
+
+pre-install:
+ @${SETENV} PKG_PREFIX=${PREFIX} ${SH} ${PKGINSTALL} ${PKGNAME} PRE-INSTALL
+
+do-install:
+ @${MKDIR} ${HADOOP_HOME}
+ @${TAR} -cf - -C ${WRKSRC}/ ${DIST} | ${TAR} -xf - -C ${HADOOP_HOME}
+
+ @${MKDIR} ${HADOOP_ETC} ${HADOOP_ETC}/envvars.d
+ @${INSTALL_DATA} ${WRKDIR}/000.java_home.env ${HADOOP_ETC}/envvars.d/
+ @${ECHO} "export HADOOP_HOME=${HADOOP_HOME}" > ${HADOOP_ETC}/envvars.d/001.hadoop_home.env
+ @${ECHO} "export HADOOP_CONF_DIR=${HADOOP_ETC}" > ${HADOOP_ETC}/envvars.d/002.hadoop_conf.env
+ @${ECHO} "export HADOOP_LOG_DIR=${HADOOP_LOGDIR}" > ${HADOOP_ETC}/envvars.d/003.hadoop_log.env
+ @${ECHO} "export HADOOP_PID_DIR=${HADOOP_RUNDIR}" > ${HADOOP_ETC}/envvars.d/004.hadoop_run.env
+ @${MKDIR} ${EXAMPLESDIR}
+ @${TAR} -cf - -C ${WRKSRC}/c++/${_HADOOP_ARCH}/ include lib| ${TAR} xf - -C ${PREFIX}
+
+.for f in ${DEFAULTS}
+ @${INSTALL_DATA} ${WRKSRC}/${f} ${EXAMPLESDIR}
+.endfor
+
+.if !defined(NOPORTDOCS)
+ @${MKDIR} ${DOCSDIR}
+ @${TAR} -cf - -C ${WRKSRC}/ docs | ${TAR} -xf - -C ${DOCSDIR}
+.for f in ${DOC}
+ @${INSTALL_DATA} ${WRKSRC}/${f} ${DOCSDIR}
+.endfor
+.endif
+ @${INSTALL_SCRIPT} ${WRKDIR}/hadoop ${PREFIX}/bin/
+ @cd ${WRKSRC}; ${COPYTREE_SHARE} conf ${EXAMPLESDIR}
+.for f in ${CONF}
+ @[ -f ${HADOOP_ETC}/${f} ] || ${INSTALL_DATA} ${EXAMPLESDIR}/conf/${f} ${HADOOP_ETC}
+.endfor
+
+post-install:
+ @${SETENV} PKG_PREFIX=${PREFIX} ${SH} ${PKGINSTALL} ${PKGNAME} POST-INSTALL
+
+.include <bsd.port.post.mk>
diff --git a/devel/hadoop/distinfo b/devel/hadoop/distinfo
new file mode 100644
index 000000000000..92325bd9cd80
--- /dev/null
+++ b/devel/hadoop/distinfo
@@ -0,0 +1,2 @@
+SHA256 (hadoop/hadoop-1.0.0.tar.gz) = 587bc9389d062f4e8042f2604b2d9a574080d4178614cccc07c5e5d743836f71
+SIZE (hadoop/hadoop-1.0.0.tar.gz) = 59468784
diff --git a/devel/hadoop/files/datanode.in b/devel/hadoop/files/datanode.in
new file mode 100644
index 000000000000..402ecc228531
--- /dev/null
+++ b/devel/hadoop/files/datanode.in
@@ -0,0 +1,72 @@
+#!/bin/sh
+#
+# $FreeBSD$
+#
+# PROVIDE: datanode
+# REQUIRE: LOGIN
+# KEYWORD: shutdown
+#
+# Add the following lines to /etc/rc.conf to enable this service:
+#
+# datanode_enable (bool): Set to NO by default.
+# Set it to YES to enable datanode.
+# datanode_user (str): Set to %%HADOOP_USER%% by default.
+# Set to a valid user to change default
+# datanode user.
+# datanode_java_home (str): Unset by default.
+# Switch to another JVM.
+#
+# datanode_log_dir (str): Unset by default.
+# datanode_pid_dir (str): Unset by default.
+#
+
+. /etc/rc.subr
+
+name=datanode
+rcvar=datanode_enable
+
+load_rc_config "${name}"
+
+: ${datanode_enable:=NO}
+: ${datanode_user:=%%HADOOP_USER%%}
+: ${datanode_group:=%%HADOOP_GROUP%%}
+
+command="%%HADOOP_HOME%%/bin/hadoop-daemon.sh"
+command_args='--config ${HADOOP_CONF_DIR} start datanode'
+
+start_precmd=datanode_init
+stop_precmd=datanode_init
+stop_cmd=datanode_stop
+
+
+datanode_init()
+{
+ if [ -n "${datanode_java_home}" ]
+ then
+ export JAVA_HOME="${datanode_java_home}"
+ fi
+
+ for f in %%HADOOP_ETC%%/envvars.d/*.env
+ do
+ . ${f}
+ done
+ if [ -n "${datanode_log_dir}" ]
+ then
+ HADOOP_LOG_DIR=${datanode_log_dir}
+ fi
+
+ if [ -n "${datanode_pid_dir}" ]
+ then
+ HADOOP_LOG_DIR=${datanode_pid_dir}
+ fi
+
+ install -d -m 755 -o ${datanode_user} -g ${datanode_group} ${HADOOP_PID_DIR}
+ install -d -m 755 -o ${datanode_user} -g ${datanode_group} ${HADOOP_LOG_DIR}
+}
+
+datanode_stop ()
+{
+ su -m ${datanode_user} -c "${command} --config ${HADOOP_CONF_DIR} stop datanode"
+}
+
+run_rc_command "$1"
diff --git a/devel/hadoop/files/hadoop.in b/devel/hadoop/files/hadoop.in
new file mode 100644
index 000000000000..269b9821332b
--- /dev/null
+++ b/devel/hadoop/files/hadoop.in
@@ -0,0 +1,7 @@
+#!/usr/bin/env bash
+for i in `ls %%HADOOP_ETC%%/envvars.d/*.env`
+do
+ . ${i}
+done
+
+exec %%HADOOP_HOME%%/bin/hadoop $@
diff --git a/devel/hadoop/files/jobtracker.in b/devel/hadoop/files/jobtracker.in
new file mode 100644
index 000000000000..20313e8dd661
--- /dev/null
+++ b/devel/hadoop/files/jobtracker.in
@@ -0,0 +1,72 @@
+#!/bin/sh
+#
+# $FreeBSD$
+#
+# PROVIDE: jobtracker
+# REQUIRE: LOGIN
+# KEYWORD: shutdown
+#
+# Add the following lines to /etc/rc.conf to enable this service:
+#
+# jobtracker_enable (bool): Set to NO by default.
+# Set it to YES to enable jobtracker.
+# jobtracker_user (str): Set to %%HADOOP_USER%% by default.
+# Set to a valid user to change default
+# jobtracker user.
+# jobtracker_java_home (str): Unset by default.
+# Switch to another JVM.
+#
+# jobtracker_log_dir (str): Unset by default.
+# jobtracker_pid_dir (str): Unset by default.
+#
+
+. /etc/rc.subr
+
+name=jobtracker
+rcvar=jobtracker_enable
+
+load_rc_config "${name}"
+
+: ${jobtracker_enable:=NO}
+: ${jobtracker_user:=%%HADOOP_USER%%}
+: ${jobtracker_group:=%%HADOOP_GROUP%%}
+
+command="%%HADOOP_HOME%%/bin/hadoop-daemon.sh"
+command_args='--config ${HADOOP_CONF_DIR} start jobtracker'
+
+start_precmd=jobtracker_init
+stop_precmd=jobtracker_init
+stop_cmd=jobtracker_stop
+
+
+jobtracker_init()
+{
+ if [ -n "${jobtracker_java_home}" ]
+ then
+ export JAVA_HOME="${jobtracker_java_home}"
+ fi
+
+ for f in %%HADOOP_ETC%%/envvars.d/*.env
+ do
+ . ${f}
+ done
+ if [ -n "${jobtracker_log_dir}" ]
+ then
+ HADOOP_LOG_DIR=${jobtracker_log_dir}
+ fi
+
+ if [ -n "${jobtracker_pid_dir}" ]
+ then
+ HADOOP_LOG_DIR=${jobtracker_pid_dir}
+ fi
+
+ install -d -m 755 -o ${jobtracker_user} -g ${jobtracker_group} ${HADOOP_PID_DIR}
+ install -d -m 755 -o ${jobtracker_user} -g ${jobtracker_group} ${HADOOP_LOG_DIR}
+}
+
+jobtracker_stop ()
+{
+ su -m ${jobtracker_user} -c "${command} --config ${HADOOP_CONF_DIR} stop jobtracker"
+}
+
+run_rc_command "$1"
diff --git a/devel/hadoop/files/namenode.in b/devel/hadoop/files/namenode.in
new file mode 100644
index 000000000000..dc48bccc50b2
--- /dev/null
+++ b/devel/hadoop/files/namenode.in
@@ -0,0 +1,72 @@
+#!/bin/sh
+#
+# $FreeBSD$
+#
+# PROVIDE: namenode
+# REQUIRE: LOGIN
+# KEYWORD: shutdown
+#
+# Add the following lines to /etc/rc.conf to enable this service:
+#
+# namenode_enable (bool): Set to NO by default.
+# Set it to YES to enable namenode.
+# namenode_user (str): Set to %%HADOOP_USER%% by default.
+# Set to a valid user to change default
+# namenode user.
+# namenode_java_home (str): Unset by default.
+# Switch to another JVM.
+#
+# namenode_log_dir (str): Unset by default.
+# namenode_pid_dir (str): Unset by default.
+#
+
+. /etc/rc.subr
+
+name=namenode
+rcvar=namenode_enable
+
+load_rc_config "${name}"
+
+: ${namenode_enable:=NO}
+: ${namenode_user:=%%HADOOP_USER%%}
+: ${namenode_group:=%%HADOOP_GROUP%%}
+
+command="%%HADOOP_HOME%%/bin/hadoop-daemon.sh"
+command_args='--config ${HADOOP_CONF_DIR} start namenode'
+
+start_precmd=namenode_init
+stop_precmd=namenode_init
+stop_cmd=namenode_stop
+
+
+namenode_init()
+{
+ if [ -n "${namenode_java_home}" ]
+ then
+ export JAVA_HOME="${namenode_java_home}"
+ fi
+
+ for f in %%HADOOP_ETC%%/envvars.d/*.env
+ do
+ . ${f}
+ done
+ if [ -n "${namenode_log_dir}" ]
+ then
+ HADOOP_LOG_DIR=${namenode_log_dir}
+ fi
+
+ if [ -n "${namenode_pid_dir}" ]
+ then
+ HADOOP_LOG_DIR=${namenode_pid_dir}
+ fi
+
+ install -d -m 755 -o ${namenode_user} -g ${namenode_group} ${HADOOP_PID_DIR}
+ install -d -m 755 -o ${namenode_user} -g ${namenode_group} ${HADOOP_LOG_DIR}
+}
+
+namenode_stop ()
+{
+ su -m ${namenode_user} -c "${command} --config ${HADOOP_CONF_DIR} stop namenode"
+}
+
+run_rc_command "$1"
diff --git a/devel/hadoop/files/patch-build.xml b/devel/hadoop/files/patch-build.xml
new file mode 100644
index 000000000000..ef61c7ce5845
--- /dev/null
+++ b/devel/hadoop/files/patch-build.xml
@@ -0,0 +1,80 @@
+--- build.xml.orig 2011-05-04 08:30:16.000000000 +0200
++++ build.xml 2011-08-08 00:08:22.000000000 +0200
+@@ -372,7 +372,7 @@
+ <!-- ====================================================== -->
+ <!-- Compile the Java files -->
+ <!-- ====================================================== -->
+- <target name="record-parser" depends="init" if="javacc.home">
++ <target name="record-parser" if="javacc.home">
+ <javacc
+ target="${core.src.dir}/org/apache/hadoop/record/compiler/generated/rcc.jj"
+ outputdirectory="${core.src.dir}/org/apache/hadoop/record/compiler/generated"
+@@ -539,7 +539,7 @@
+ </antcall>
+ </target>
+
+- <target name="compile-core-native" depends="compile-core-classes"
++ <target name="compile-core-native"
+ if="compile.native">
+
+ <mkdir dir="${build.native}/lib"/>
+@@ -1669,7 +1669,7 @@
+ <!-- librecordio targets. -->
+ <!-- ================================================================== -->
+
+- <target name="compile-librecordio" depends="init" if="librecordio" >
++ <target name="compile-librecordio" if="librecordio" >
+ <mkdir dir="${build.librecordio}"/>
+ <exec dir="${librecordio.src}" executable="${make.cmd}" failonerror="true">
+ <env key="XERCESCROOT" value="${xercescroot}"/>
+@@ -1703,7 +1703,7 @@
+ </chmod>
+ </target>
+
+- <target name="create-c++-configure" depends="init" if="compile.c++">
++ <target name="create-c++-configure" if="compile.c++">
+ <exec executable="autoreconf" dir="${c++.utils.src}" searchpath="yes"
+ failonerror="yes">
+ <arg value="-if"/>
+@@ -1726,7 +1726,7 @@
+ </exec>
+ </target>
+
+- <target name="check-c++-makefiles" depends="init" if="compile.c++">
++ <target name="check-c++-makefiles" if="compile.c++">
+ <condition property="need.c++.utils.makefile">
+ <not> <available file="${build.c++.utils}/Makefile"/> </not>
+ </condition>
+@@ -1747,7 +1747,7 @@
+ </condition>
+ </target>
+
+- <target name="check-c++-makefile-libhdfs" depends="init,check-c++-libhdfs" if="islibhdfs">
++ <target name="check-c++-makefile-libhdfs" depends="check-c++-libhdfs" if="islibhdfs">
+ <condition property="need.c++.libhdfs.makefile">
+ <not> <available file="${build.c++.libhdfs}/Makefile"/> </not>
+ </condition>
+@@ -2326,5 +2326,23 @@
+ <fileset file="${jsvc.install.dir}/jsvc"/>
+ </chmod>
+ </target>
++ <target name="FreeBSD-dist" >
++ <mkdir dir="${dist.dir}"/>
++ <mkdir dir="${dist.dir}/lib"/>
++ <mkdir dir="${dist.dir}/contrib"/>
++ <mkdir dir="${dist.dir}/bin"/>
++
++ <copy todir="${dist.dir}/lib" includeEmptyDirs="false">
++ <fileset dir="lib">
++ <exclude name="**/native/**"/>
++ </fileset>
++ </copy>
+
++ <exec dir="${basedir}" executable="sh" failonerror="true">
++ <env key="BASE_NATIVE_LIB_DIR" value="${basedir}/lib/native"/>
++ <env key="BUILD_NATIVE_DIR" value="${build.dir}/native"/>
++ <env key="DIST_LIB_DIR" value="${basedir}/lib/native"/>
++ <arg line="${native.src.dir}/packageNativeHadoop.sh"/>
++ </exec>
++</target>
+ </project>
diff --git a/devel/hadoop/files/patch-src__c++__libhdfs__hdfs.c b/devel/hadoop/files/patch-src__c++__libhdfs__hdfs.c
new file mode 100644
index 000000000000..7ea39e3471af
--- /dev/null
+++ b/devel/hadoop/files/patch-src__c++__libhdfs__hdfs.c
@@ -0,0 +1,11 @@
+--- src/c++/libhdfs/hdfs.c.orig 2011-08-07 16:38:59.000000000 +0200
++++ src/c++/libhdfs/hdfs.c 2011-08-07 16:39:18.000000000 +0200
+@@ -252,7 +252,7 @@
+ cURI = malloc(strlen(host)+16);
+ sprintf(cURI, "hdfs://%s:%d", host, (int)(port));
+ if (cURI == NULL) {
+- fprintf (stderr, "Couldn't allocate an object of size %d",
++ fprintf (stderr, "Couldn't allocate an object of size %llu",
+ strlen(host) + 16);
+ errno = EINTERNAL;
+ goto done;
diff --git a/devel/hadoop/files/patch-src__c++__libhdfs__hdfsJniHelper.c b/devel/hadoop/files/patch-src__c++__libhdfs__hdfsJniHelper.c
new file mode 100644
index 000000000000..ea2ff1a40923
--- /dev/null
+++ b/devel/hadoop/files/patch-src__c++__libhdfs__hdfsJniHelper.c
@@ -0,0 +1,11 @@
+--- src/c++/libhdfs/hdfsJniHelper.c.orig 2011-05-04 08:30:16.000000000 +0200
++++ src/c++/libhdfs/hdfsJniHelper.c 2011-08-07 16:40:54.000000000 +0200
+@@ -15,7 +15,7 @@
+ */
+
+ #include <string.h>
+-#include <error.h>
++//#include <error.h>
+ #include "hdfsJniHelper.h"
+
+ static pthread_mutex_t hdfsHashMutex = PTHREAD_MUTEX_INITIALIZER;
diff --git a/devel/hadoop/files/patch-src__native__Makefile.in b/devel/hadoop/files/patch-src__native__Makefile.in
new file mode 100644
index 000000000000..0ebf9fb4223f
--- /dev/null
+++ b/devel/hadoop/files/patch-src__native__Makefile.in
@@ -0,0 +1,105 @@
+--- src/native/Makefile.in.orig 2011-05-04 08:30:16.000000000 +0200
++++ src/native/Makefile.in 2011-12-09 10:38:40.000000000 +0100
+@@ -92,10 +92,7 @@
+ libLTLIBRARIES_INSTALL = $(INSTALL)
+ LTLIBRARIES = $(lib_LTLIBRARIES)
+ libhadoop_la_DEPENDENCIES =
+-am_libhadoop_la_OBJECTS = ZlibCompressor.lo ZlibDecompressor.lo \
+- getGroup.lo JniBasedUnixGroupsMapping.lo \
+- JniBasedUnixGroupsNetgroupMapping.lo file_descriptor.lo \
+- errno_enum.lo NativeIO.lo
++am_libhadoop_la_OBJECTS = ZlibCompressor.lo ZlibDecompressor.lo
+ libhadoop_la_OBJECTS = $(am_libhadoop_la_OBJECTS)
+ DEFAULT_INCLUDES = -I. -I$(srcdir) -I.
+ depcomp = $(SHELL) $(top_srcdir)/config/depcomp
+@@ -223,24 +220,15 @@
+ sysconfdir = @sysconfdir@
+ target_alias = @target_alias@
+ AM_CPPFLAGS = @JNI_CPPFLAGS@ -I$(HADOOP_NATIVE_SRCDIR)/src \
+- -Isrc/org/apache/hadoop/io/compress/zlib \
+- -Isrc/org/apache/hadoop/io/nativeio \
+- -Isrc/org/apache/hadoop/security
++ -Isrc/org/apache/hadoop/io/compress/zlib
+
+ AM_LDFLAGS = @JNI_LDFLAGS@ -m$(JVM_DATA_MODEL)
+ AM_CFLAGS = -g -Wall -fPIC -O2 -m$(JVM_DATA_MODEL)
+ lib_LTLIBRARIES = libhadoop.la
+ libhadoop_la_SOURCES = src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c \
+- src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c \
+- src/org/apache/hadoop/security/getGroup.c \
+- src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c \
+- src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c \
+- src/org/apache/hadoop/io/nativeio/file_descriptor.c \
+- src/org/apache/hadoop/io/nativeio/errno_enum.c \
+- src/org/apache/hadoop/io/nativeio/NativeIO.c
+-
++ src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c
+ libhadoop_la_LDFLAGS = -version-info 1:0:0
+-libhadoop_la_LIBADD = -ldl -ljvm
++libhadoop_la_LIBADD = -ljvm
+ all: config.h
+ $(MAKE) $(AM_MAKEFLAGS) all-am
+
+@@ -332,14 +320,8 @@
+ distclean-compile:
+ -rm -f *.tab.c
+
+-@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/JniBasedUnixGroupsMapping.Plo@am__quote@
+-@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/JniBasedUnixGroupsNetgroupMapping.Plo@am__quote@
+-@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/NativeIO.Plo@am__quote@
+ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/ZlibCompressor.Plo@am__quote@
+ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/ZlibDecompressor.Plo@am__quote@
+-@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/errno_enum.Plo@am__quote@
+-@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/file_descriptor.Plo@am__quote@
+-@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/getGroup.Plo@am__quote@
+
+ .c.o:
+ @am__fastdepCC_TRUE@ if $(COMPILE) -MT $@ -MD -MP -MF "$(DEPDIR)/$*.Tpo" -c -o $@ $<; \
+@@ -376,47 +358,6 @@
+ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+ @am__fastdepCC_FALSE@ $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o ZlibDecompressor.lo `test -f 'src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c
+
+-getGroup.lo: src/org/apache/hadoop/security/getGroup.c
+-@am__fastdepCC_TRUE@ if $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT getGroup.lo -MD -MP -MF "$(DEPDIR)/getGroup.Tpo" -c -o getGroup.lo `test -f 'src/org/apache/hadoop/security/getGroup.c' || echo '$(srcdir)/'`src/org/apache/hadoop/security/getGroup.c; \
+-@am__fastdepCC_TRUE@ then mv -f "$(DEPDIR)/getGroup.Tpo" "$(DEPDIR)/getGroup.Plo"; else rm -f "$(DEPDIR)/getGroup.Tpo"; exit 1; fi
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src/org/apache/hadoop/security/getGroup.c' object='getGroup.lo' libtool=yes @AMDEPBACKSLASH@
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+-@am__fastdepCC_FALSE@ $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o getGroup.lo `test -f 'src/org/apache/hadoop/security/getGroup.c' || echo '$(srcdir)/'`src/org/apache/hadoop/security/getGroup.c
+-
+-JniBasedUnixGroupsMapping.lo: src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c
+-@am__fastdepCC_TRUE@ if $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT JniBasedUnixGroupsMapping.lo -MD -MP -MF "$(DEPDIR)/JniBasedUnixGroupsMapping.Tpo" -c -o JniBasedUnixGroupsMapping.lo `test -f 'src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c' || echo '$(srcdir)/'`src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c; \
+-@am__fastdepCC_TRUE@ then mv -f "$(DEPDIR)/JniBasedUnixGroupsMapping.Tpo" "$(DEPDIR)/JniBasedUnixGroupsMapping.Plo"; else rm -f "$(DEPDIR)/JniBasedUnixGroupsMapping.Tpo"; exit 1; fi
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c' object='JniBasedUnixGroupsMapping.lo' libtool=yes @AMDEPBACKSLASH@
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+-@am__fastdepCC_FALSE@ $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o JniBasedUnixGroupsMapping.lo `test -f 'src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c' || echo '$(srcdir)/'`src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c
+-
+-JniBasedUnixGroupsNetgroupMapping.lo: src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c
+-@am__fastdepCC_TRUE@ if $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT JniBasedUnixGroupsNetgroupMapping.lo -MD -MP -MF "$(DEPDIR)/JniBasedUnixGroupsNetgroupMapping.Tpo" -c -o JniBasedUnixGroupsNetgroupMapping.lo `test -f 'src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c' || echo '$(srcdir)/'`src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c; \
+-@am__fastdepCC_TRUE@ then mv -f "$(DEPDIR)/JniBasedUnixGroupsNetgroupMapping.Tpo" "$(DEPDIR)/JniBasedUnixGroupsNetgroupMapping.Plo"; else rm -f "$(DEPDIR)/JniBasedUnixGroupsNetgroupMapping.Tpo"; exit 1; fi
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c' object='JniBasedUnixGroupsNetgroupMapping.lo' libtool=yes @AMDEPBACKSLASH@
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+-@am__fastdepCC_FALSE@ $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o JniBasedUnixGroupsNetgroupMapping.lo `test -f 'src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c' || echo '$(srcdir)/'`src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c
+-
+-file_descriptor.lo: src/org/apache/hadoop/io/nativeio/file_descriptor.c
+-@am__fastdepCC_TRUE@ if $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT file_descriptor.lo -MD -MP -MF "$(DEPDIR)/file_descriptor.Tpo" -c -o file_descriptor.lo `test -f 'src/org/apache/hadoop/io/nativeio/file_descriptor.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/nativeio/file_descriptor.c; \
+-@am__fastdepCC_TRUE@ then mv -f "$(DEPDIR)/file_descriptor.Tpo" "$(DEPDIR)/file_descriptor.Plo"; else rm -f "$(DEPDIR)/file_descriptor.Tpo"; exit 1; fi
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src/org/apache/hadoop/io/nativeio/file_descriptor.c' object='file_descriptor.lo' libtool=yes @AMDEPBACKSLASH@
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+-@am__fastdepCC_FALSE@ $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o file_descriptor.lo `test -f 'src/org/apache/hadoop/io/nativeio/file_descriptor.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/nativeio/file_descriptor.c
+-
+-errno_enum.lo: src/org/apache/hadoop/io/nativeio/errno_enum.c
+-@am__fastdepCC_TRUE@ if $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT errno_enum.lo -MD -MP -MF "$(DEPDIR)/errno_enum.Tpo" -c -o errno_enum.lo `test -f 'src/org/apache/hadoop/io/nativeio/errno_enum.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/nativeio/errno_enum.c; \
+-@am__fastdepCC_TRUE@ then mv -f "$(DEPDIR)/errno_enum.Tpo" "$(DEPDIR)/errno_enum.Plo"; else rm -f "$(DEPDIR)/errno_enum.Tpo"; exit 1; fi
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src/org/apache/hadoop/io/nativeio/errno_enum.c' object='errno_enum.lo' libtool=yes @AMDEPBACKSLASH@
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+-@am__fastdepCC_FALSE@ $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o errno_enum.lo `test -f 'src/org/apache/hadoop/io/nativeio/errno_enum.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/nativeio/errno_enum.c
+-
+-NativeIO.lo: src/org/apache/hadoop/io/nativeio/NativeIO.c
+-@am__fastdepCC_TRUE@ if $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT NativeIO.lo -MD -MP -MF "$(DEPDIR)/NativeIO.Tpo" -c -o NativeIO.lo `test -f 'src/org/apache/hadoop/io/nativeio/NativeIO.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/nativeio/NativeIO.c; \
+-@am__fastdepCC_TRUE@ then mv -f "$(DEPDIR)/NativeIO.Tpo" "$(DEPDIR)/NativeIO.Plo"; else rm -f "$(DEPDIR)/NativeIO.Tpo"; exit 1; fi
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src/org/apache/hadoop/io/nativeio/NativeIO.c' object='NativeIO.lo' libtool=yes @AMDEPBACKSLASH@
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+-@am__fastdepCC_FALSE@ $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o NativeIO.lo `test -f 'src/org/apache/hadoop/io/nativeio/NativeIO.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/nativeio/NativeIO.c
+
+ mostlyclean-libtool:
+ -rm -f *.lo
diff --git a/devel/hadoop/files/patch-src__native__configure b/devel/hadoop/files/patch-src__native__configure
new file mode 100644
index 000000000000..d9b70ca443a3
--- /dev/null
+++ b/devel/hadoop/files/patch-src__native__configure
@@ -0,0 +1,11 @@
+--- src/native/configure.orig 2011-12-08 23:11:17.000000000 +0100
++++ src/native/configure 2011-12-08 23:11:50.000000000 +0100
+@@ -20504,7 +20504,7 @@
+ echo 'int main(int argc, char **argv){return 0;}' > conftest.c
+ if test -z "`${CC} ${LDFLAGS} -o conftest conftest.c -lz 2>&1`"; then
+ if test ! -z "`which objdump | grep -v 'no objdump'`"; then
+- ac_cv_libname_z="`objdump -p conftest | grep NEEDED | grep z | sed 's/\W*NEEDED\W*\(.*\)\W*$/\"\1\"/'`"
++ ac_cv_libname_z="`objdump -p conftest | grep NEEDED | grep z | gsed 's/\W*NEEDED\W*\(.*\)\W*$/\"\1\"/'`"
+ elif test ! -z "`which ldd | grep -v 'no ldd'`"; then
+ ac_cv_libname_z="`ldd conftest | grep z | sed 's/^[^A-Za-z0-9]*\([A-Za-z0-9\.]*\)[^A-Za-z0-9]*=>.*$/\"\1\"/'`"
+ else
diff --git a/devel/hadoop/files/patch-src__native__configure.ac b/devel/hadoop/files/patch-src__native__configure.ac
new file mode 100644
index 000000000000..91561671f959
--- /dev/null
+++ b/devel/hadoop/files/patch-src__native__configure.ac
@@ -0,0 +1,13 @@
+--- src/native/configure.ac.orig 2011-05-04 08:30:16.000000000 +0200
++++ src/native/configure.ac 2011-08-07 16:17:58.000000000 +0200
+@@ -46,10 +46,6 @@
+ AC_PROG_CC
+ AC_PROG_LIBTOOL
+
+-# Checks for libraries.
+-dnl Check for '-ldl'
+-AC_CHECK_LIB([dl], [dlopen])
+-
+ dnl Check for '-ljvm'
+ JNI_LDFLAGS=""
+ if test $JAVA_HOME != ""
diff --git a/devel/hadoop/files/patch-src__native__src__org__apache__hadoop__io__nativeio__NativeIO.c b/devel/hadoop/files/patch-src__native__src__org__apache__hadoop__io__nativeio__NativeIO.c
new file mode 100644
index 000000000000..20ae853ee768
--- /dev/null
+++ b/devel/hadoop/files/patch-src__native__src__org__apache__hadoop__io__nativeio__NativeIO.c
@@ -0,0 +1,11 @@
+--- src/native/src/org/apache/hadoop/io/nativeio/NativeIO.c.orig 2011-08-10 13:43:50.000000000 +0200
++++ src/native/src/org/apache/hadoop/io/nativeio/NativeIO.c 2011-08-10 13:44:32.000000000 +0200
+@@ -188,7 +188,7 @@
+ if (flags & O_CREAT) {
+ fd = open(path, flags, mode);
+ } else {
+- fd = open(path, flags);
++ fd = open(path, flags | O_CREAT);
+ }
+
+ if (fd == -1) {
diff --git a/devel/hadoop/files/patch-src__native__src__org__apache__hadoop__security__JniBasedUnixGroupsNetgroupMapping.c b/devel/hadoop/files/patch-src__native__src__org__apache__hadoop__security__JniBasedUnixGroupsNetgroupMapping.c
new file mode 100644
index 000000000000..d8881f76bb33
--- /dev/null
+++ b/devel/hadoop/files/patch-src__native__src__org__apache__hadoop__security__JniBasedUnixGroupsNetgroupMapping.c
@@ -0,0 +1,12 @@
+--- src/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c.orig 2011-08-07 16:43:00.000000000 +0200
++++ src/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c 2011-08-07 16:43:30.000000000 +0200
+@@ -73,7 +73,8 @@
+ // was successfull or not (as long as it was called we need to call
+ // endnetgrent)
+ setnetgrentCalledFlag = 1;
+- if(setnetgrent(cgroup) == 1) {
++ setnetgrent(cgroup);
++ if(1 == 1) {
+ UserList *current = NULL;
+ // three pointers are for host, user, domain, we only care
+ // about user now
diff --git a/devel/hadoop/files/pkg-deinstall.in b/devel/hadoop/files/pkg-deinstall.in
new file mode 100644
index 000000000000..e42856245c9f
--- /dev/null
+++ b/devel/hadoop/files/pkg-deinstall.in
@@ -0,0 +1,14 @@
+#!/bin/sh
+# $FreeBSD$
+
+PATH="/bin:/sbin:/usr/bin:/usr/sbin"
+
+RUNDIR=%%HADOOP_RUNDIR%%
+LOGDIR=%%HADOOP_LOGDIR%%
+
+if [ "$2" = "POST-DEINSTALL" ]; then
+ echo "=> Deleting ${RUNDIR} if empty..."
+ rm -d ${RUNDIR} 2>/dev/null || true
+ echo "=> Deleting ${LOGDIR} if empty..."
+ rm -d ${LOGDIR} 2>/dev/null || true
+fi
diff --git a/devel/hadoop/files/pkg-install.in b/devel/hadoop/files/pkg-install.in
new file mode 100644
index 000000000000..7d0b09b046c3
--- /dev/null
+++ b/devel/hadoop/files/pkg-install.in
@@ -0,0 +1,55 @@
+#!/bin/sh
+# $FreeBSD$
+PATH="/bin:/sbin:/usr/bin:/usr/sbin"
+
+HADOOP_USER=%%HADOOP_USER%%
+HADOOP_GROUP=%%HADOOP_GROUP%%
+UID=%%HADOOP_UID%%
+GID=%%HADOOP_GID%%
+
+RUNDIR=%%HADOOP_RUNDIR%%
+LOGDIR=%%HADOOP_LOGDIR%%
+
+PW="pw"
+CHOWN="chown"
+INSTALL_DIR="install -d -o ${HADOOP_USER} -g ${HADOOP_GROUP} -m 0755"
+
+if [ "$2" = "PRE-INSTALL" ]; then
+
+# if ! ${PW} groupshow ${HADOOP_GROUP} 2>/dev/null 1>&2; then
+# if ${PW} groupadd ${HADOOP_GROUP} -g $GID; then
+# echo "=> Added group \"${HADOOP_GROUP}\"."
+# else
+# echo "=> Adding group \"${HADOOP_GROUP}\" failed..."
+# exit 1
+# fi
+# fi
+
+# if ! ${PW} usershow ${HADOOP_USER} 2>/dev/null 1>&2; then
+# if ${PW} useradd ${HADOOP_USER} -u $UID -g ${HADOOP_GROUP} -h - \
+# -s "/sbin/nologin" -d "/nonexistent" \
+# -c "Hadoop Daemons"; then
+# echo "=> Added user \"${HADOOP_USER}\"."
+# else
+# echo "=> Adding user \"${HADOOP_USER}\" failed..."
+# exit 1
+# fi
+# fi
+ sleep 1
+
+elif [ "$2" = "POST-INSTALL" ]; then
+ if [ -d ${RUNDIR} ]; then
+ echo "=> ${RUNDIR} already exists."
+ else
+ echo -n "=> Creating RUNDIR ${RUNDIR}... "
+ ${INSTALL_DIR} ${RUNDIR} || echo "failed"
+ fi
+ if [ -d ${LOGDIR} ]; then
+ echo "=> ${LOGDIR} already exists."
+ else
+ echo -n "=> Creating LOGDIR ${LOGDIR}... "
+ ${INSTALL_DIR} ${LOGDIR} || echo "failed"
+ fi
+fi
+
+exit 0
diff --git a/devel/hadoop/files/secondarynamenode.in b/devel/hadoop/files/secondarynamenode.in
new file mode 100644
index 000000000000..fa58dd346017
--- /dev/null
+++ b/devel/hadoop/files/secondarynamenode.in
@@ -0,0 +1,72 @@
+#!/bin/sh
+#
+# $FreeBSD$
+#
+# PROVIDE: secondarynamenode
+# REQUIRE: LOGIN
+# KEYWORD: shutdown
+#
+# Add the following lines to /etc/rc.conf to enable this service:
+#
+# secondarynamenode_enable (bool): Set to NO by default.
+# Set it to YES to enable secondarynamenode.
+# secondarynamenode_user (str): Set to %%HADOOP_USER%% by default.
+# Set to a valid user to change default
+# secondarynamenode user.
+# secondarynamenode_java_home (str): Unset by default.
+# Switch to another JVM.
+#
+# secondarynamenode_log_dir (str): Unset by default.
+# secondarynamenode_pid_dir (str): Unset by default.
+#
+
+. /etc/rc.subr
+
+name=secondarynamenode
+rcvar=secondarynamenode_enable
+
+load_rc_config "${name}"
+
+: ${secondarynamenode_enable:=NO}
+: ${secondarynamenode_user:=%%HADOOP_USER%%}
+: ${secondarynamenode_group:=%%HADOOP_GROUP%%}
+
+command="%%HADOOP_HOME%%/bin/hadoop-daemon.sh"
+command_args='--config ${HADOOP_CONF_DIR} start secondarynamenode'
+
+start_precmd=secondarynamenode_init
+stop_precmd=secondarynamenode_init
+stop_cmd=secondarynamenode_stop
+
+
+secondarynamenode_init()
+{
+ if [ -n "${secondarynamenode_java_home}" ]
+ then
+ export JAVA_HOME="${secondarynamenode_java_home}"
+ fi
+
+ for f in %%HADOOP_ETC%%/envvars.d/*.env
+ do
+ . ${f}
+ done
+ if [ -n "${secondarynamenode_log_dir}" ]
+ then
+ HADOOP_LOG_DIR=${secondarynamenode_log_dir}
+ fi
+
+ if [ -n "${secondarynamenode_pid_dir}" ]
+ then
+ HADOOP_LOG_DIR=${secondarynamenode_pid_dir}
+ fi
+
+ install -d -m 755 -o ${secondarynamenode_user} -g ${secondarynamenode_group} ${HADOOP_PID_DIR}
+ install -d -m 755 -o ${secondarynamenode_user} -g ${secondarynamenode_group} ${HADOOP_LOG_DIR}
+}
+
+secondarynamenode_stop ()
+{
+ su -m ${secondarynamenode_user} -c "${command} --config ${HADOOP_CONF_DIR} stop secondarynamenode"
+}
+
+run_rc_command "$1"
diff --git a/devel/hadoop/files/tasktracker.in b/devel/hadoop/files/tasktracker.in
new file mode 100644
index 000000000000..6d97a049f361
--- /dev/null
+++ b/devel/hadoop/files/tasktracker.in
@@ -0,0 +1,72 @@
+#!/bin/sh
+#
+# $FreeBSD$
+#
+# PROVIDE: tasktracker
+# REQUIRE: LOGIN
+# KEYWORD: shutdown
+#
+# Add the following lines to /etc/rc.conf to enable this service:
+#
+# tasktracker_enable (bool): Set to NO by default.
+# Set it to YES to enable tasktracker.
+# tasktracker_user (str): Set to %%HADOOP_USER%% by default.
+# Set to a valid user to change default
+# tasktracker user.
+# tasktracker_java_home (str): Unset by default.
+# Switch to another JVM.
+#
+# tasktracker_log_dir (str): Unset by default.
+# tasktracker_pid_dir (str): Unset by default.
+#
+
+. /etc/rc.subr
+
+name=tasktracker
+rcvar=tasktracker_enable
+
+load_rc_config "${name}"
+
+: ${tasktracker_enable:=NO}
+: ${tasktracker_user:=%%HADOOP_USER%%}
+: ${tasktracker_group:=%%HADOOP_GROUP%%}
+
+command="%%HADOOP_HOME%%/bin/hadoop-daemon.sh"
+command_args='--config ${HADOOP_CONF_DIR} start tasktracker'
+
+start_precmd=tasktracker_init
+stop_precmd=tasktracker_init
+stop_cmd=tasktracker_stop
+
+
+tasktracker_init()
+{
+ if [ -n "${tasktracker_java_home}" ]
+ then
+ export JAVA_HOME="${tasktracker_java_home}"
+ fi
+
+ for f in %%HADOOP_ETC%%/envvars.d/*.env
+ do
+ . ${f}
+ done
+ if [ -n "${tasktracker_log_dir}" ]
+ then
+ HADOOP_LOG_DIR=${tasktracker_log_dir}
+ fi
+
+ if [ -n "${tasktracker_pid_dir}" ]
+ then
+ HADOOP_LOG_DIR=${tasktracker_pid_dir}
+ fi
+
+ install -d -m 755 -o ${tasktracker_user} -g ${tasktracker_group} ${HADOOP_PID_DIR}
+ install -d -m 755 -o ${tasktracker_user} -g ${tasktracker_group} ${HADOOP_LOG_DIR}
+}
+
+tasktracker_stop ()
+{
+ su -m ${tasktracker_user} -c "${command} --config ${HADOOP_CONF_DIR} stop tasktracker"
+}
+
+run_rc_command "$1"
diff --git a/devel/hadoop/pkg-descr b/devel/hadoop/pkg-descr
new file mode 100644
index 000000000000..a0b85fd4037b
--- /dev/null
+++ b/devel/hadoop/pkg-descr
@@ -0,0 +1,5 @@
+The Apache Hadoop software library is a framework that allows for the
+distributed processing of large data sets across clusters of computers
+using a simple programming model.
+
+WWW: http://hadoop.apache.org/
diff --git a/devel/hadoop/pkg-plist b/devel/hadoop/pkg-plist
new file mode 100644
index 000000000000..bf4079f1a338
--- /dev/null
+++ b/devel/hadoop/pkg-plist
@@ -0,0 +1,346 @@
+bin/hadoop
+%%HADOOP_ETC%%/envvars.d/000.java_home.env
+%%HADOOP_ETC%%/envvars.d/001.hadoop_home.env
+%%HADOOP_ETC%%/envvars.d/002.hadoop_conf.env
+%%HADOOP_ETC%%/envvars.d/003.hadoop_log.env
+%%HADOOP_ETC%%/envvars.d/004.hadoop_run.env
+%%HADOOP_HOME%%/bin/hadoop
+%%HADOOP_HOME%%/bin/hadoop-config.sh
+%%HADOOP_HOME%%/bin/hadoop-daemon.sh
+%%HADOOP_HOME%%/bin/hadoop-daemons.sh
+%%HADOOP_HOME%%/bin/rcc
+%%HADOOP_HOME%%/bin/slaves.sh
+%%HADOOP_HOME%%/bin/start-all.sh
+%%HADOOP_HOME%%/bin/start-balancer.sh
+%%HADOOP_HOME%%/bin/start-dfs.sh
+%%HADOOP_HOME%%/bin/start-jobhistoryserver.sh
+%%HADOOP_HOME%%/bin/start-mapred.sh
+%%HADOOP_HOME%%/bin/stop-all.sh
+%%HADOOP_HOME%%/bin/stop-balancer.sh
+%%HADOOP_HOME%%/bin/stop-dfs.sh
+%%HADOOP_HOME%%/bin/stop-jobhistoryserver.sh
+%%HADOOP_HOME%%/bin/stop-mapred.sh
+%%HADOOP_HOME%%/bin/task-controller
+%%HADOOP_HOME%%/lib/hadoop-capacity-scheduler-%%PORTVERSION%%.jar
+%%HADOOP_HOME%%/contrib/datajoin/hadoop-datajoin-%%PORTVERSION%%.jar
+%%HADOOP_HOME%%/contrib/failmon/hadoop-failmon-%%PORTVERSION%%.jar
+%%HADOOP_HOME%%/lib/hadoop-fairscheduler-%%PORTVERSION%%.jar
+%%HADOOP_HOME%%/contrib/gridmix/hadoop-gridmix-%%PORTVERSION%%.jar
+%%HADOOP_HOME%%/contrib/hdfsproxy/bin/hdfsproxy
+%%HADOOP_HOME%%/contrib/hdfsproxy/bin/hdfsproxy-config.sh
+%%HADOOP_HOME%%/contrib/hdfsproxy/bin/hdfsproxy-daemon.sh
+%%HADOOP_HOME%%/contrib/hdfsproxy/bin/hdfsproxy-daemons.sh
+%%HADOOP_HOME%%/contrib/hdfsproxy/bin/hdfsproxy-slaves.sh
+%%HADOOP_HOME%%/contrib/hdfsproxy/bin/start-hdfsproxy.sh
+%%HADOOP_HOME%%/contrib/hdfsproxy/bin/stop-hdfsproxy.sh
+%%HADOOP_HOME%%/contrib/hdfsproxy/conf/configuration.xsl
+%%HADOOP_HOME%%/contrib/hdfsproxy/conf/hdfsproxy-default.xml
+%%HADOOP_HOME%%/contrib/hdfsproxy/conf/hdfsproxy-env.sh
+%%HADOOP_HOME%%/contrib/hdfsproxy/conf/hdfsproxy-env.sh.template
+%%HADOOP_HOME%%/contrib/hdfsproxy/conf/hdfsproxy-hosts
+%%HADOOP_HOME%%/contrib/hdfsproxy/conf/log4j.properties
+%%HADOOP_HOME%%/contrib/hdfsproxy/conf/ssl-server.xml
+%%HADOOP_HOME%%/contrib/hdfsproxy/conf/tomcat-forward-web.xml
+%%HADOOP_HOME%%/contrib/hdfsproxy/conf/tomcat-web.xml
+%%HADOOP_HOME%%/contrib/hdfsproxy/conf/user-certs.xml
+%%HADOOP_HOME%%/contrib/hdfsproxy/conf/user-permissions.xml
+%%HADOOP_HOME%%/contrib/hdfsproxy/README
+%%HADOOP_HOME%%/contrib/hdfsproxy/build.xml
+%%HADOOP_HOME%%/contrib/hdfsproxy/hdfsproxy-2.0.jar
+%%HADOOP_HOME%%/contrib/hod/bin/VERSION
+%%HADOOP_HOME%%/contrib/hod/bin/checknodes
+%%HADOOP_HOME%%/contrib/hod/bin/hod
+%%HADOOP_HOME%%/contrib/hod/bin/hodcleanup
+%%HADOOP_HOME%%/contrib/hod/bin/hodring
+%%HADOOP_HOME%%/contrib/hod/bin/ringmaster
+%%HADOOP_HOME%%/contrib/hod/bin/verify-account
+%%HADOOP_HOME%%/contrib/hod/conf/hodrc
+%%HADOOP_HOME%%/contrib/hod/hodlib/AllocationManagers/__init__.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/AllocationManagers/goldAllocationManager.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/Common/__init__.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/Common/allocationManagerUtil.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/Common/desc.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/Common/descGenerator.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/Common/hodsvc.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/Common/logger.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/Common/miniHTMLParser.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/Common/nodepoolutil.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/Common/setup.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/Common/socketServers.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/Common/tcp.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/Common/threads.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/Common/types.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/Common/util.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/Common/xmlrpc.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/GridServices/__init__.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/GridServices/hdfs.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/GridServices/mapred.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/GridServices/service.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/Hod/__init__.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/Hod/hadoop.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/Hod/hod.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/Hod/nodePool.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/HodRing/__init__.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/HodRing/hodRing.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/NodePools/__init__.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/NodePools/torque.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/RingMaster/__init__.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/RingMaster/idleJobTracker.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/RingMaster/ringMaster.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/Schedulers/__init__.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/Schedulers/torque.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/ServiceProxy/__init__.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/ServiceProxy/serviceProxy.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/ServiceRegistry/__init__.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/ServiceRegistry/serviceRegistry.py
+%%HADOOP_HOME%%/contrib/hod/hodlib/__init__.py
+%%HADOOP_HOME%%/contrib/hod/ivy/libraries.properties
+%%HADOOP_HOME%%/contrib/hod/support/checklimits.sh
+%%HADOOP_HOME%%/contrib/hod/support/logcondense.py
+%%HADOOP_HOME%%/contrib/hod/testing/__init__.py
+%%HADOOP_HOME%%/contrib/hod/testing/helper.py
+%%HADOOP_HOME%%/contrib/hod/testing/lib.py
+%%HADOOP_HOME%%/contrib/hod/testing/main.py
+%%HADOOP_HOME%%/contrib/hod/testing/testHadoop.py
+%%HADOOP_HOME%%/contrib/hod/testing/testHod.py
+%%HADOOP_HOME%%/contrib/hod/testing/testHodCleanup.py
+%%HADOOP_HOME%%/contrib/hod/testing/testHodRing.py
+%%HADOOP_HOME%%/contrib/hod/testing/testModule.py
+%%HADOOP_HOME%%/contrib/hod/testing/testRingmasterRPCs.py
+%%HADOOP_HOME%%/contrib/hod/testing/testThreads.py
+%%HADOOP_HOME%%/contrib/hod/testing/testTypes.py
+%%HADOOP_HOME%%/contrib/hod/testing/testUtil.py
+%%HADOOP_HOME%%/contrib/hod/testing/testXmlrpc.py
+%%HADOOP_HOME%%/contrib/hod/CHANGES.txt
+%%HADOOP_HOME%%/contrib/hod/README
+%%HADOOP_HOME%%/contrib/hod/build.xml
+%%HADOOP_HOME%%/contrib/hod/config.txt
+%%HADOOP_HOME%%/contrib/hod/getting_started.txt
+%%HADOOP_HOME%%/contrib/hod/ivy.xml
+%%HADOOP_HOME%%/contrib/index/hadoop-index-%%PORTVERSION%%.jar
+%%HADOOP_HOME%%/contrib/streaming/hadoop-streaming-%%PORTVERSION%%.jar
+%%HADOOP_HOME%%/lib/hadoop-thriftfs-%%PORTVERSION%%.jar
+%%HADOOP_HOME%%/contrib/vaidya/bin/vaidya.sh
+%%HADOOP_HOME%%/contrib/vaidya/conf/postex_diagnosis_tests.xml
+%%HADOOP_HOME%%/contrib/vaidya/hadoop-vaidya-%%PORTVERSION%%.jar
+%%HADOOP_HOME%%/hadoop-ant-%%PORTVERSION%%.jar
+%%HADOOP_HOME%%/hadoop-core-%%PORTVERSION%%.jar
+%%HADOOP_HOME%%/hadoop-test-%%PORTVERSION%%.jar
+%%HADOOP_HOME%%/hadoop-tools-%%PORTVERSION%%.jar
+%%HADOOP_HOME%%/lib/jdiff/hadoop_0.17.0.xml
+%%HADOOP_HOME%%/lib/jdiff/hadoop_0.18.1.xml
+%%HADOOP_HOME%%/lib/jdiff/hadoop_0.18.2.xml
+%%HADOOP_HOME%%/lib/jdiff/hadoop_0.18.3.xml
+%%HADOOP_HOME%%/lib/jdiff/hadoop_0.19.0.xml
+%%HADOOP_HOME%%/lib/jdiff/hadoop_0.19.1.xml
+%%HADOOP_HOME%%/lib/jdiff/hadoop_0.19.2.xml
+%%HADOOP_HOME%%/lib/jdiff/hadoop_0.20.1.xml
+%%HADOOP_HOME%%/lib/jdiff/hadoop_0.20.205.0.xml
+%%HADOOP_HOME%%/lib/jdiff/hadoop_%%PORTVERSION%%.xml
+%%HADOOP_HOME%%/lib/jsp-2.1/jsp-2.1.jar
+%%HADOOP_HOME%%/lib/jsp-2.1/jsp-api-2.1.jar
+%%HADOOP_HOME%%/lib/native/Linux-amd64-64/libhadoop.a
+%%HADOOP_HOME%%/lib/native/Linux-amd64-64/libhadoop.la
+%%HADOOP_HOME%%/lib/native/Linux-amd64-64/libhadoop.so
+%%HADOOP_HOME%%/lib/native/Linux-amd64-64/libhadoop.so.1
+%%HADOOP_HOME%%/lib/native/Linux-amd64-64/libhadoop.so.1.0.0
+%%HADOOP_HOME%%/lib/native/Linux-i386-32/libhadoop.a
+%%HADOOP_HOME%%/lib/native/Linux-i386-32/libhadoop.la
+%%HADOOP_HOME%%/lib/native/Linux-i386-32/libhadoop.so
+%%HADOOP_HOME%%/lib/native/Linux-i386-32/libhadoop.so.1
+%%HADOOP_HOME%%/lib/native/Linux-i386-32/libhadoop.so.1.0.0
+%%HADOOP_HOME%%/lib/native/%%HADOOP_ARCH%%/libhadoop.a
+%%HADOOP_HOME%%/lib/native/%%HADOOP_ARCH%%/libhadoop.la
+%%HADOOP_HOME%%/lib/native/%%HADOOP_ARCH%%/libhadoop.so
+%%HADOOP_HOME%%/lib/native/%%HADOOP_ARCH%%/libhadoop.so.1
+%%HADOOP_HOME%%/lib/asm-3.2.jar
+%%HADOOP_HOME%%/lib/aspectjrt-1.6.5.jar
+%%HADOOP_HOME%%/lib/aspectjtools-1.6.5.jar
+%%HADOOP_HOME%%/lib/commons-beanutils-1.7.0.jar
+%%HADOOP_HOME%%/lib/commons-beanutils-core-1.8.0.jar
+%%HADOOP_HOME%%/lib/commons-cli-1.2.jar
+%%HADOOP_HOME%%/lib/commons-codec-1.4.jar
+%%HADOOP_HOME%%/lib/commons-collections-3.2.1.jar
+%%HADOOP_HOME%%/lib/commons-configuration-1.6.jar
+%%HADOOP_HOME%%/lib/commons-daemon-1.0.1.jar
+%%HADOOP_HOME%%/lib/commons-digester-1.8.jar
+%%HADOOP_HOME%%/lib/commons-el-1.0.jar
+%%HADOOP_HOME%%/lib/commons-httpclient-3.0.1.jar
+%%HADOOP_HOME%%/lib/commons-lang-2.4.jar
+%%HADOOP_HOME%%/lib/commons-logging-1.1.1.jar
+%%HADOOP_HOME%%/lib/commons-logging-api-1.0.4.jar
+%%HADOOP_HOME%%/lib/commons-math-2.1.jar
+%%HADOOP_HOME%%/lib/commons-net-1.4.1.jar
+%%HADOOP_HOME%%/lib/core-3.1.1.jar
+%%HADOOP_HOME%%/lib/hsqldb-1.8.0.10.LICENSE.txt
+%%HADOOP_HOME%%/lib/hsqldb-1.8.0.10.jar
+%%HADOOP_HOME%%/lib/jackson-core-asl-1.0.1.jar
+%%HADOOP_HOME%%/lib/jackson-mapper-asl-1.0.1.jar
+%%HADOOP_HOME%%/lib/jasper-compiler-5.5.12.jar
+%%HADOOP_HOME%%/lib/jasper-runtime-5.5.12.jar
+%%HADOOP_HOME%%/lib/jdeb-0.8.jar
+%%HADOOP_HOME%%/lib/jersey-core-1.8.jar
+%%HADOOP_HOME%%/lib/jersey-json-1.8.jar
+%%HADOOP_HOME%%/lib/jersey-server-1.8.jar
+%%HADOOP_HOME%%/lib/jets3t-0.6.1.jar
+%%HADOOP_HOME%%/lib/jetty-6.1.26.jar
+%%HADOOP_HOME%%/lib/jetty-util-6.1.26.jar
+%%HADOOP_HOME%%/lib/jsch-0.1.42.jar
+%%HADOOP_HOME%%/lib/junit-4.5.jar
+%%HADOOP_HOME%%/lib/kfs-0.2.2.jar
+%%HADOOP_HOME%%/lib/kfs-0.2.LICENSE.txt
+%%HADOOP_HOME%%/lib/log4j-1.2.15.jar
+%%HADOOP_HOME%%/lib/mockito-all-1.8.5.jar
+%%HADOOP_HOME%%/lib/oro-2.0.8.jar
+%%HADOOP_HOME%%/lib/servlet-api-2.5-20081211.jar
+%%HADOOP_HOME%%/lib/slf4j-api-1.4.3.jar
+%%HADOOP_HOME%%/lib/slf4j-log4j12-1.4.3.jar
+%%HADOOP_HOME%%/lib/xmlenc-0.52.jar
+%%HADOOP_HOME%%/webapps/task/index.html
+%%HADOOP_HOME%%/webapps/task/WEB-INF/web.xml
+%%HADOOP_HOME%%/webapps/static/sorttable.js
+%%HADOOP_HOME%%/webapps/static/jobtracker.js
+%%HADOOP_HOME%%/webapps/static/jobconf.xsl
+%%HADOOP_HOME%%/webapps/static/hadoop.css
+%%HADOOP_HOME%%/webapps/static/hadoop-logo.jpg
+%%HADOOP_HOME%%/webapps/job/taskstatshistory.jsp
+%%HADOOP_HOME%%/webapps/job/taskstats.jsp
+%%HADOOP_HOME%%/webapps/job/taskdetailshistory.jsp
+%%HADOOP_HOME%%/webapps/job/taskdetails.jsp
+%%HADOOP_HOME%%/webapps/job/machines.jsp
+%%HADOOP_HOME%%/webapps/job/loadhistory.jsp
+%%HADOOP_HOME%%/webapps/job/legacyjobhistory.jsp
+%%HADOOP_HOME%%/webapps/job/jobtracker.jsp
+%%HADOOP_HOME%%/webapps/job/jobtaskshistory.jsp
+%%HADOOP_HOME%%/webapps/job/jobtasks.jsp
+%%HADOOP_HOME%%/webapps/job/jobqueue_details.jsp
+%%HADOOP_HOME%%/webapps/job/jobhistoryhome.jsp
+%%HADOOP_HOME%%/webapps/job/jobhistory.jsp
+%%HADOOP_HOME%%/webapps/job/jobfailures.jsp
+%%HADOOP_HOME%%/webapps/job/jobdetailshistory.jsp
+%%HADOOP_HOME%%/webapps/job/jobdetails.jsp
+%%HADOOP_HOME%%/webapps/job/jobconf_history.jsp
+%%HADOOP_HOME%%/webapps/job/jobconf.jsp
+%%HADOOP_HOME%%/webapps/job/jobblacklistedtrackers.jsp
+%%HADOOP_HOME%%/webapps/job/job_authorization_error.jsp
+%%HADOOP_HOME%%/webapps/job/index.html
+%%HADOOP_HOME%%/webapps/job/gethistory.jsp
+%%HADOOP_HOME%%/webapps/job/analysejobhistory.jsp
+%%HADOOP_HOME%%/webapps/job/WEB-INF/web.xml
+%%HADOOP_HOME%%/webapps/history/WEB-INF/web.xml
+%%HADOOP_HOME%%/webapps/hdfs/index.html
+%%HADOOP_HOME%%/webapps/hdfs/WEB-INF/web.xml
+%%HADOOP_HOME%%/webapps/datanode/WEB-INF/web.xml
+%%EXAMPLESDIR%%/hadoop-examples-%%PORTVERSION%%.jar
+%%EXAMPLESDIR%%/core-default.xml
+%%EXAMPLESDIR%%/hdfs-default.xml
+%%EXAMPLESDIR%%/mapred-default.xml
+%%EXAMPLESDIR%%/conf/masters
+%%EXAMPLESDIR%%/conf/slaves
+%%EXAMPLESDIR%%/conf/ssl-client.xml.example
+%%EXAMPLESDIR%%/conf/ssl-server.xml.example
+@unexec if cmp -s %D/%%HADOOP_ETC%%/capacity-scheduler.xml %D/%%EXAMPLESDIR%%/conf/capacity-scheduler.xml; then rm -f %D/%%HADOOP_ETC%%/capacity-scheduler.xml; fi
+%%EXAMPLESDIR%%/conf/capacity-scheduler.xml
+@exec [ -f %D/%%HADOOP_ETC%%/capacity-scheduler.xml ] || cp %D/%%EXAMPLESDIR%%/conf/capacity-scheduler.xml %D/%%HADOOP_ETC%%
+@unexec if cmp -s %D/%%HADOOP_ETC%%/configuration.xsl %D/%%EXAMPLESDIR%%/conf/configuration.xsl; then rm -f %D/%%HADOOP_ETC%%/configuration.xsl; fi
+%%EXAMPLESDIR%%/conf/configuration.xsl
+@exec [ -f %D/%%HADOOP_ETC%%/configuration.xsl ] || cp %D/%%EXAMPLESDIR%%/conf/configuration.xsl %D/%%HADOOP_ETC%%
+@unexec if cmp -s %D/%%HADOOP_ETC%%/core-site.xml %D/%%EXAMPLESDIR%%/conf/core-site.xml; then rm -f %D/%%HADOOP_ETC%%/core-site.xml; fi
+%%EXAMPLESDIR%%/conf/core-site.xml
+@exec [ -f %D/%%HADOOP_ETC%%/core-site.xml ] || cp %D/%%EXAMPLESDIR%%/conf/core-site.xml %D/%%HADOOP_ETC%%
+@unexec if cmp -s %D/%%HADOOP_ETC%%/hadoop-env.sh %D/%%EXAMPLESDIR%%/conf/hadoop-env.sh; then rm -f %D/%%HADOOP_ETC%%/hadoop-env.sh; fi
+%%EXAMPLESDIR%%/conf/hadoop-env.sh
+@exec [ -f %D/%%HADOOP_ETC%%/hadoop-env.sh ] || cp %D/%%EXAMPLESDIR%%/conf/hadoop-env.sh %D/%%HADOOP_ETC%%
+@unexec if cmp -s %D/%%HADOOP_ETC%%/hadoop-metrics2.properties %D/%%EXAMPLESDIR%%/conf/hadoop-metrics2.properties; then rm -f %D/%%HADOOP_ETC%%/hadoop-metrics2.properties; fi
+%%EXAMPLESDIR%%/conf/hadoop-metrics2.properties
+@exec [ -f %D/%%HADOOP_ETC%%/hadoop-metrics2.properties ] || cp %D/%%EXAMPLESDIR%%/conf/hadoop-metrics2.properties %D/%%HADOOP_ETC%%
+@unexec if cmp -s %D/%%HADOOP_ETC%%/hadoop-policy.xml %D/%%EXAMPLESDIR%%/conf/hadoop-policy.xml; then rm -f %D/%%HADOOP_ETC%%/hadoop-policy.xml; fi
+%%EXAMPLESDIR%%/conf/hadoop-policy.xml
+@exec [ -f %D/%%HADOOP_ETC%%/hadoop-policy.xml ] || cp %D/%%EXAMPLESDIR%%/conf/hadoop-policy.xml %D/%%HADOOP_ETC%%
+@unexec if cmp -s %D/%%HADOOP_ETC%%/hdfs-site.xml %D/%%EXAMPLESDIR%%/conf/hdfs-site.xml; then rm -f %D/%%HADOOP_ETC%%/hdfs-site.xml; fi
+%%EXAMPLESDIR%%/conf/hdfs-site.xml
+@exec [ -f %D/%%HADOOP_ETC%%/hdfs-site.xml ] || cp %D/%%EXAMPLESDIR%%/conf/hdfs-site.xml %D/%%HADOOP_ETC%%
+@unexec if cmp -s %D/%%HADOOP_ETC%%/log4j.properties %D/%%EXAMPLESDIR%%/conf/log4j.properties; then rm -f %D/%%HADOOP_ETC%%/log4j.properties; fi
+%%EXAMPLESDIR%%/conf/log4j.properties
+@exec [ -f %D/%%HADOOP_ETC%%/log4j.properties ] || cp %D/%%EXAMPLESDIR%%/conf/log4j.properties %D/%%HADOOP_ETC%%
+@unexec if cmp -s %D/%%HADOOP_ETC%%/mapred-queue-acls.xml %D/%%EXAMPLESDIR%%/conf/mapred-queue-acls.xml; then rm -f %D/%%HADOOP_ETC%%/mapred-queue-acls.xml; fi
+%%EXAMPLESDIR%%/conf/mapred-queue-acls.xml
+@exec [ -f %D/%%HADOOP_ETC%%/mapred-queue-acls.xml ] || cp %D/%%EXAMPLESDIR%%/conf/mapred-queue-acls.xml %D/%%HADOOP_ETC%%
+@unexec if cmp -s %D/%%HADOOP_ETC%%/mapred-site.xml %D/%%EXAMPLESDIR%%/conf/mapred-site.xml; then rm -f %D/%%HADOOP_ETC%%/mapred-site.xml; fi
+%%EXAMPLESDIR%%/conf/mapred-site.xml
+@exec [ -f %D/%%HADOOP_ETC%%/mapred-site.xml ] || cp %D/%%EXAMPLESDIR%%/conf/mapred-site.xml %D/%%HADOOP_ETC%%
+@unexec if cmp -s %D/%%HADOOP_ETC%%/taskcontroller.cfg %D/%%EXAMPLESDIR%%/conf/taskcontroller.cfg; then rm -f %D/%%HADOOP_ETC%%/taskcontroller.cfg; fi
+%%EXAMPLESDIR%%/conf/taskcontroller.cfg
+@exec [ -f %D/%%HADOOP_ETC%%/taskcontroller.cfg ] || cp %D/%%EXAMPLESDIR%%/conf/taskcontroller.cfg %D/%%HADOOP_ETC%%
+@unexec if cmp -s %D/%%HADOOP_ETC%%/fair-scheduler.xml %D/%%EXAMPLESDIR%%/conf/fair-scheduler.xml; then rm -f %D/%%HADOOP_ETC%%/fair-scheduler.xml; fi
+%%EXAMPLESDIR%%/conf/fair-scheduler.xml
+@exec [ -f %D/%%HADOOP_ETC%%/fair-scheduler.xml ] || cp %D/%%EXAMPLESDIR%%/conf/fair-scheduler.xml %D/%%HADOOP_ETC%%
+include/hadoop/StringUtils.hh
+include/hadoop/SerialUtils.hh
+include/hadoop/Pipes.hh
+include/hadoop/TemplateFactory.hh
+lib/libhadooputils.a
+lib/libhadooppipes.a
+lib/libhdfs.so.0
+lib/libhdfs.so
+lib/libhdfs.la
+@exec mkdir -p %D/%%HADOOP_HOME%%/webapps/secondary/WEB-INF
+@exec mkdir -p %D/%%HADOOP_HOME%%/contrib/hdfsproxy/logs
+@dirrm %%EXAMPLESDIR%%/conf
+@dirrm %%EXAMPLESDIR%%
+@dirrm %%HADOOP_HOME%%/lib/native/Linux-i386-32
+@dirrm %%HADOOP_HOME%%/lib/native/Linux-amd64-64
+@dirrm %%HADOOP_HOME%%/lib/native/%%HADOOP_ARCH%%
+@dirrm %%HADOOP_HOME%%/lib/native
+@dirrm %%HADOOP_HOME%%/lib/jsp-2.1
+@dirrm %%HADOOP_HOME%%/lib/jdiff
+@dirrm %%HADOOP_HOME%%/lib
+@dirrm %%HADOOP_HOME%%/bin
+@dirrm %%HADOOP_HOME%%/webapps/task/WEB-INF
+@dirrm %%HADOOP_HOME%%/webapps/task
+@dirrm %%HADOOP_HOME%%/webapps/static
+@dirrm %%HADOOP_HOME%%/webapps/secondary/WEB-INF
+@dirrm %%HADOOP_HOME%%/webapps/secondary
+@dirrm %%HADOOP_HOME%%/webapps/job/WEB-INF
+@dirrm %%HADOOP_HOME%%/webapps/job
+@dirrm %%HADOOP_HOME%%/webapps/history/WEB-INF
+@dirrm %%HADOOP_HOME%%/webapps/history
+@dirrm %%HADOOP_HOME%%/webapps/hdfs/WEB-INF
+@dirrm %%HADOOP_HOME%%/webapps/hdfs
+@dirrm %%HADOOP_HOME%%/webapps/datanode/WEB-INF
+@dirrm %%HADOOP_HOME%%/webapps/datanode
+@dirrm %%HADOOP_HOME%%/webapps/
+@dirrm %%HADOOP_HOME%%/contrib/vaidya/conf
+@dirrm %%HADOOP_HOME%%/contrib/vaidya/bin
+@dirrm %%HADOOP_HOME%%/contrib/vaidya
+@dirrm %%HADOOP_HOME%%/contrib/streaming
+@dirrm %%HADOOP_HOME%%/contrib/index
+@dirrm %%HADOOP_HOME%%/contrib/hod/testing
+@dirrm %%HADOOP_HOME%%/contrib/hod/support
+@dirrm %%HADOOP_HOME%%/contrib/hod/ivy
+@dirrm %%HADOOP_HOME%%/contrib/hod/hodlib/ServiceRegistry
+@dirrm %%HADOOP_HOME%%/contrib/hod/hodlib/ServiceProxy
+@dirrm %%HADOOP_HOME%%/contrib/hod/hodlib/Schedulers
+@dirrm %%HADOOP_HOME%%/contrib/hod/hodlib/RingMaster
+@dirrm %%HADOOP_HOME%%/contrib/hod/hodlib/NodePools
+@dirrm %%HADOOP_HOME%%/contrib/hod/hodlib/HodRing
+@dirrm %%HADOOP_HOME%%/contrib/hod/hodlib/Hod
+@dirrm %%HADOOP_HOME%%/contrib/hod/hodlib/GridServices
+@dirrm %%HADOOP_HOME%%/contrib/hod/hodlib/Common
+@dirrm %%HADOOP_HOME%%/contrib/hod/hodlib/AllocationManagers
+@dirrm %%HADOOP_HOME%%/contrib/hod/hodlib
+@dirrm %%HADOOP_HOME%%/contrib/hod/conf
+@dirrm %%HADOOP_HOME%%/contrib/hod/bin
+@dirrm %%HADOOP_HOME%%/contrib/hod
+@dirrm %%HADOOP_HOME%%/contrib/hdfsproxy/logs
+@dirrm %%HADOOP_HOME%%/contrib/hdfsproxy/conf
+@dirrm %%HADOOP_HOME%%/contrib/hdfsproxy/bin
+@dirrm %%HADOOP_HOME%%/contrib/hdfsproxy
+@dirrm %%HADOOP_HOME%%/contrib/gridmix
+@dirrm %%HADOOP_HOME%%/contrib/failmon
+@dirrm %%HADOOP_HOME%%/contrib/datajoin
+@dirrm %%HADOOP_HOME%%/contrib/
+@dirrm %%HADOOP_HOME%%
+@dirrm include/hadoop
+@dirrmtry %%HADOOP_ETC%%/envvars.d
+@dirrmtry %%HADOOP_ETC%%