summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorUltrabug <ultrabug@gentoo.org>2013-03-28 13:20:49 +0100
committerUltrabug <ultrabug@gentoo.org>2013-03-28 13:20:49 +0100
commit8b1e71f53dde99046a15e3c9f981fb71182032de (patch)
treecb4399ad8348927e2d6bfd4e5d001c23279a9e63 /sys-cluster
parentapache-hadoop bump (diff)
downloadultrabug-8b1e71f53dde99046a15e3c9f981fb71182032de.tar.gz
ultrabug-8b1e71f53dde99046a15e3c9f981fb71182032de.tar.bz2
ultrabug-8b1e71f53dde99046a15e3c9f981fb71182032de.zip
new sys-cluster/cloudera-hadoop
Diffstat (limited to 'sys-cluster')
-rw-r--r--sys-cluster/cloudera-hadoop/Manifest11
-rw-r--r--sys-cluster/cloudera-hadoop/cloudera-hadoop-4.2.0.ebuild238
-rw-r--r--sys-cluster/cloudera-hadoop/files/hadoop15
-rw-r--r--sys-cluster/cloudera-hadoop/files/hadoop-layout.sh30
-rw-r--r--sys-cluster/cloudera-hadoop/files/hdfs/hadoop-hdfs.initd90
-rwxr-xr-xsys-cluster/cloudera-hadoop/files/hdfs/hdfs7
-rw-r--r--sys-cluster/cloudera-hadoop/files/hdfs/hdfs-site.xml25
-rw-r--r--sys-cluster/cloudera-hadoop/files/hdfs/hdfs.limitsd17
-rwxr-xr-xsys-cluster/cloudera-hadoop/files/mapred/mapred7
-rw-r--r--sys-cluster/cloudera-hadoop/files/mapred/mapred-site.xml21
-rw-r--r--sys-cluster/cloudera-hadoop/files/mapred/mapreduce.limitsd17
11 files changed, 478 insertions, 0 deletions
diff --git a/sys-cluster/cloudera-hadoop/Manifest b/sys-cluster/cloudera-hadoop/Manifest
new file mode 100644
index 0000000..ded06c8
--- /dev/null
+++ b/sys-cluster/cloudera-hadoop/Manifest
@@ -0,0 +1,11 @@
+AUX hadoop 480 SHA256 714cd4c4bda48a7c34c438a8e7731fd677448938febe1a584d70d247ad4b50b8 SHA512 5dd56d4a53be67323a9ab3fb3c1b3d69697da53dcef3194348c6198cd6cac41f75d7f844126ba94ffd62b696f80cbfb265123208e7ce65c9baf668f46c794283 WHIRLPOOL 0930e2b17147b70dcc0a4b28d93436aec9d97e968e28418e6c57350f5334970470ed730e4521cef10575859629c5a9a746bc7e0c5686ba9a8ff80906b7975a8b
+AUX hadoop-layout.sh 1364 SHA256 94114de8dedae91777c594f6463cfc83f0dcee29203d5e47a8a78f75a7365afc SHA512 36b4c3e82ff434494518b942b3e2caea7b1b118a834d7cbf316b7506609ce53b2518237c15bca95ee22c888589cf08e65d04649a1600be49a3186862dcc65693 WHIRLPOOL b2fbe77350e09d0cb8c2ebb51d9f9b0653f1abf199382940f38604f8fb6f210b8d89372dcdba128c85e472e7fe926e8d759a76d9e54d2524aae4b888251b4a87
+AUX hdfs/hadoop-hdfs.initd 2251 SHA256 cee29634ea2faaa17debcf0572da9cf2cd794b36a55c64f4f9e1a8e93cb1d2a2 SHA512 9884499fc4a890b44f535cb2f2e2f6e616d0cb7bb0842dce84664c50a27b9587d52988a77346b83788af59a17d3100f351d09ebc9680b4dc42b6d700a6547ae7 WHIRLPOOL 92d3a9885273e860d03077da3cdda4c5c4f6ca541c1e8815d250340f34660d1f4d37933a2e1a47f3fa64bd53febbdcc48ecbdde529ea9276065408b9e8f1d173
+AUX hdfs/hdfs 149 SHA256 64f66030aa9f1474af33e52840c57a2626d282ffd4ebd263f6e792dd98f14c4c SHA512 95bfeef8f5e0583cd5f0f3b97cc636c4f462fadd171d3e2d76ccbe837e5d2f319740094117f402377efae6c39dd69a64f90ffdaf7dcd832d24f52e48e27e86d6 WHIRLPOOL e308b399a8d37abb9e646840c4f8ef2540e8a4a125960b7ec11a4a60b823999a194a8c3fb60a03affd3e9302da7c53e3a2e88e7dd16ab5bb3793b84bd498be63
+AUX hdfs/hdfs-site.xml 1023 SHA256 a126891a37e24faa1fa265fc0e8f094956770d41432bd3afe1257bfe0ba5befe SHA512 47d36eed69932239e8ea15fdd9cafe97447cfad130f3b715ebca22b5e45f99e8a41020215cf857194d33f187ea11e6116aede91d37465aa044ab112a2e63e92d WHIRLPOOL 2c2d816d6deb60249a7fdf68cf7f11ebda1366b0938317b38f438ff9e4de7ae190e8af4dfa3d76d2795363bc1555719136104c4b17540b077c2a6635c0925bfb
+AUX hdfs/hdfs.limitsd 822 SHA256 051ccb6663e66ec861d5436dcf9478825b89d098e5e6cda77c71689fdd0d937d SHA512 29078d4fe566f14531f9347ccd13421aa44e240e042d7f65a82e94d1bb4629e5c3e80d34b01fe99ba38e413ec934e53f50d63d41714cd5127ead08d6f501170d WHIRLPOOL 0676e2b1e0af74311494828c53c07e1f6de336204ebdad797c951e9b9909b2944b54c6875f4664e80bf83dc3d98439eb75532e45f420c7b2a301195f5ca552cc
+AUX mapred/mapred 156 SHA256 691f311d7ee1109d8bb05c120f5d5bf432e06541fad7a3eceb74e4b443bbb526 SHA512 b3ac324d43bd4dacbf8a4ac1cea5cc8a24bf57ab40b37e5f3c2ec73cd2481523554372df9bb8ca71e20df8420b9aa34480b122ccdf9ac715b21e1bcaa46ca99f WHIRLPOOL 2c738ee652fe563b440dd716a2462e180071a75f37e9a5a48b616f88175b2e134b774cd228208cac1177337f2f59d2eee885632f0ac375a8c736a8011a072c56
+AUX mapred/mapred-site.xml 904 SHA256 73afa1c41ea13cc7f61e384bb2e75087931ae22e4b2906e55fc5220130322da1 SHA512 09f3e325414969a0553cff968544030a88ed53f482bc7c89b4173378e3713fae03504e8d49b4296175af24bbc498c7edfdcb5fcea6515b061f29f26bcbe1a4c2 WHIRLPOOL 1a7344e70100b463153da555f1f3e5b47172af89b4e5afb2d8e7873914451ef3b565c5b95a77e985b30f633b969b5e7e452d395e517e6e502e3dcbc91498e094
+AUX mapred/mapreduce.limitsd 832 SHA256 dacbbc9bf0d35e50c97648fd4555f9da562de325d1771a9e29796d761b9e78f6 SHA512 97c11f5afb5ef960194196c7750bcecdb4d79e3cbb0bed369a68e5f828b430eb79873f08af6eaa7455cd363dd0cd8eb7a99894d5ab3cc0f99eb96a58403b3a4d WHIRLPOOL 3b382d5c899d945532f6706f88a0103281a38a888757355bec0bb2970d423f1e7e5c49f644f7599d32bcd8315ee80444fedf7409587f11a6758c3ed4b5dfb726
+DIST hadoop-2.0.0-cdh4.2.0.tar.gz 135568139 SHA256 0725b7a2987363dde1ef36ae446093d1bde11412f89986b5e9c1f78568a711d3 SHA512 72846d38f0bcd6ffb600694e879d0d89aef4d53fc100cd2c97f87bde9f52e3f68c1a9adda8bde25038093e01bd1f2ea4388c7ac36f38bb51edeca73adb0982e4 WHIRLPOOL d5e0658386bceb527275b2be3f467ebdcdb7da09a79e1d8ab08e5b8a95ba19984d059330c1895f1d213d2de2524745da613506de1e4619de9c675db589af0c8b
+EBUILD cloudera-hadoop-4.2.0.ebuild 7410 SHA256 07cd085612b2ec66ed9c69c1152237ab5f31ebe76e9aed54fe934f4c6b38db25 SHA512 f52fc683290d87970c08399c07f7ae444b33bbb1af9db291e3faf4dc3b1846313805d795a05a658a061e10e59c6a26ea6b089fd8e3be016069df712f4417b703 WHIRLPOOL bbf43560648d620be3a9f1175d5cfc9b1093093954d24e1c52297516171ff74009c36133c9f2928470f5ab7c115d3186e47ba1ff38d2161fbc11b0d57b081ee8
diff --git a/sys-cluster/cloudera-hadoop/cloudera-hadoop-4.2.0.ebuild b/sys-cluster/cloudera-hadoop/cloudera-hadoop-4.2.0.ebuild
new file mode 100644
index 0000000..7377be3
--- /dev/null
+++ b/sys-cluster/cloudera-hadoop/cloudera-hadoop-4.2.0.ebuild
@@ -0,0 +1,238 @@
+# Copyright 1999-2010 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Header: $
+
+EAPI="5"
+
+inherit eutils java-utils-2
+
+MY_PV="2.0.0"
+MY_PN="hadoop"
+MY_P="${MY_PN}-${PV}"
+
+DESCRIPTION="Cloudera’s Distribution for Apache Hadoop"
+HOMEPAGE="http://hadoop.apache.org"
+SRC_URI="http://archive.cloudera.com/cdh4/cdh/4/${MY_PN}-${MY_PV}-cdh${PV}.tar.gz"
+
+LICENSE="Apache-2.0"
+SLOT="0"
+KEYWORDS="~amd64 ~x86"
+RESTRICT="mirror" # binchecks
+IUSE="hdfs httpfs mapreduce"
+
+#TODO: mapreduce use is missing hadoop-yarn dep
+DEPEND=""
+RDEPEND=">=virtual/jre-1.6
+ dev-java/java-config-wrapper
+ =dev-libs/protobuf-2.4.0a"
+
+CONFIG_DIR=/etc/"${MY_PN}"/conf
+export CONFIG_PROTECT="${CONFIG_PROTECT} ${CONFIG_DIR}"
+
+S=${WORKDIR}/hadoop-"${MY_PV}"-cdh"${PV}"
+
+pkg_setup(){
+ enewgroup hadoop
+ if use hdfs; then
+ enewgroup hdfs
+ enewuser hdfs -1 /bin/bash /var/lib/hdfs "hdfs,hadoop"
+ fi
+ if use mapreduce; then
+ enewgroup mapred
+ enewuser mapred -1 /bin/bash /var/lib/hadoop-mapreduce "mapred,hadoop"
+ fi
+}
+
+src_compile() {
+ export JAVA_HOME=$(java-config -g JAVA_HOME)
+
+ pushd src
+ mvn package -DskipTests -Pnative || die
+ popd
+}
+
+install_hdfs() {
+ diropts -m755 -o root -g root
+ pushd src/hadoop-hdfs-project/hadoop-hdfs/target
+ insinto /usr/$(get_libdir)
+ dolib.so native/target/usr/local/lib/libhdfs.so.0.0.0
+ #
+ insinto /usr/lib/hadoop-hdfs
+ doins hadoop-hdfs-"${MY_PV}"-cdh"${PV}".jar
+ doins hadoop-hdfs-"${MY_PV}"-cdh"${PV}"-tests.jar
+ dosym hadoop-hdfs-"${MY_PV}"-cdh"${PV}".jar /usr/lib/hadoop-hdfs/hadoop-hdfs.jar
+ #
+ doins -r webapps
+ popd
+ doins -r share/hadoop/hdfs/lib
+
+ insinto /usr/lib/hadoop-hdfs/bin
+ doins bin/hdfs
+ fperms 755 /usr/lib/hadoop-hdfs/bin/hdfs
+
+ insinto /usr/lib/hadoop-hdfs/sbin
+ doins sbin/distribute-exclude.sh
+ doins sbin/refresh-namenodes.sh
+ fperms 0755 /usr/lib/hadoop-hdfs/sbin/{distribute-exclude.sh,refresh-namenodes.sh}
+
+ insinto /usr/lib/hadoop/libexec
+ doins libexec/hdfs-config.sh
+ fperms 0755 /usr/lib/hadoop/libexec/hdfs-config.sh
+
+ insinto /etc/security/limits.d
+ newins "${FILESDIR}"/hdfs/hdfs.limitsd hdfs.conf
+
+ insinto /etc/hadoop/conf
+ doins "${FILESDIR}"/hdfs/hdfs-site.xml
+
+ dobin "${FILESDIR}"/hdfs/hdfs
+
+ diropts -m775 -o root -g hadoop
+ dodir /var/log/hadoop-hdfs
+
+ diropts -m775 -o hdfs -g hadoop
+ dodir /var/lib/hadoop-hdfs/ /var/lib/hadoop-hdfs/cache
+ fperms 1777 /var/lib/hadoop-hdfs/cache
+
+ newinitd "${FILESDIR}"/hdfs/hadoop-hdfs.initd hadoop-hdfs
+ for daemon in "datanode" "namenode" "secondarynamenode"; do
+ dosym hadoop-hdfs /etc/init.d/hadoop-hdfs-"${daemon}"
+ done
+}
+
+install_mapreduce() {
+ diropts -m755 -o root -g root
+ pushd src/hadoop-mapreduce-project
+ insinto /usr/lib/hadoop-mapreduce
+ for jar in $(find hadoop-mapreduce-client/ -type f -name "*.jar"); do
+ doins "${jar}"
+ done
+ # rename mapreduce-client-app
+ mv "${D}"/usr/lib/hadoop-mapreduce/mr-app.jar "${D}"/usr/lib/hadoop-mapreduce/hadoop-mapreduce-client-app-"${MY_PV}"-cdh"${PV}".jar
+ mv "${D}"/usr/lib/hadoop-mapreduce/mr-app-tests.jar "${D}"/usr/lib/hadoop-mapreduce/hadoop-mapreduce-client-app-"${MY_PV}"-cdh"${PV}"-tests.jar
+ # symlinks
+ for categ in "app" "common" "core" "hs" "jobclient" "shuffle"; do
+ dosym hadoop-mapreduce-client-"${categ}"-"${MY_PV}"-cdh"${PV}".jar /usr/lib/hadoop-mapreduce/hadoop-mapreduce-client-"${categ}".jar
+ done
+ # examples
+ doins hadoop-mapreduce-examples/target/hadoop-mapreduce-examples-"${MY_PV}"-cdh"${PV}".jar
+ dosym hadoop-mapreduce-examples-"${MY_PV}"-cdh"${PV}".jar /usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar
+ popd
+ pushd src/hadoop-tools
+ for categ in "archives" "datajoin" "distcp" "extras" "gridmix" "rumen" "streaming"; do
+ doins hadoop-"${categ}"/target/hadoop-"${categ}"-"${MY_PV}"-cdh"${PV}".jar
+ dosym hadoop-"${categ}"-"${MY_PV}"-cdh"${PV}".jar /usr/lib/hadoop-mapreduce/hadoop-"${categ}".jar
+ done
+ popd
+ doins -r share/hadoop/mapreduce/lib
+
+ insinto /usr/lib/hadoop-mapreduce/bin
+ doins bin/mapred
+ doins src/hadoop-tools/hadoop-pipes/target/native/examples/*
+ fperms 755 /usr/lib/hadoop-mapreduce/bin/{mapred,pipes-sort,wordcount-nopipe,wordcount-part,wordcount-simple}
+
+ insinto /usr/lib/hadoop-mapreduce/sbin
+ doins sbin/mr-jobhistory-daemon.sh
+ fperms 0755 /usr/lib/hadoop-mapreduce/sbin/mr-jobhistory-daemon.sh
+
+ insinto /usr/lib/hadoop/libexec
+ doins libexec/mapred-config.sh
+ fperms 0755 /usr/lib/hadoop/libexec/mapred-config.sh
+
+ insinto /etc/security/limits.d
+ newins "${FILESDIR}"/mapred/mapreduce.limitsd mapreduce.conf
+
+ insinto /etc/hadoop/conf
+ doins "${FILESDIR}"/mapred/mapred-site.xml
+
+ dobin "${FILESDIR}"/mapred/mapred
+
+ diropts -m775 -o root -g hadoop
+ dodir /var/log/hadoop-mapreduce
+
+ diropts -m775 -o mapred -g hadoop
+ dodir /var/lib/hadoop-mapreduce/ /var/lib/hadoop-mapreduce/cache
+ fperms 1777 /var/lib/hadoop-mapreduce/cache
+}
+
+src_install() {
+ # config dir
+ insinto ${CONFIG_DIR}
+ for config_file in "core-site.xml" "hadoop-metrics.properties" \
+ "hadoop-metrics2.properties" "log4j.properties" "slaves" \
+ "ssl-client.xml.example" "ssl-server.xml.example"; do
+ doins etc/hadoop/"${config_file}"
+ done
+ echo "JAVA_HOME='$(java-config -g JAVA_HOME)'" > "${T}"/hadoop-env.sh
+ doins "${T}"/hadoop-env.sh
+
+ # /usr/lib dirs
+ diropts -m755 -o root -g root
+ insinto /usr/lib/"${MY_PN}"
+
+ # common
+ pushd src/hadoop-common-project/hadoop-common/target
+ doins hadoop-common-"${MY_PV}"-cdh"${PV}".jar
+ doins hadoop-common-"${MY_PV}"-cdh"${PV}"-tests.jar
+ popd
+ dosym hadoop-common-2.0.0-cdh4.2.0.jar /usr/lib/"${MY_PN}"/hadoop-common.jar
+
+ # annotations
+ pushd src/hadoop-common-project/hadoop-annotations/target
+ doins hadoop-annotations-"${MY_PV}"-cdh"${PV}".jar
+ popd
+ dosym hadoop-annotations-2.0.0-cdh4.2.0.jar /usr/lib/"${MY_PN}"/hadoop-annotations.jar
+
+ # auth
+ pushd src/hadoop-common-project/hadoop-auth/target
+ doins hadoop-auth-"${MY_PV}"-cdh"${PV}".jar
+ popd
+ dosym hadoop-auth-2.0.0-cdh4.2.0.jar /usr/lib/"${MY_PN}"/hadoop-auth.jar
+
+ ## bin
+ insinto /usr/lib/"${MY_PN}"/bin
+ doins bin/hadoop bin/rcc
+
+ ## lib
+ insinto /usr/lib/"${MY_PN}"/lib
+ pushd src/hadoop-tools
+ for jar in $(find . -type f -name "*.jar"); do
+ doins "${jar}"
+ done
+ popd
+ find "${D}"/usr/lib/"${MY_PN}"/lib -type f -name "hadoop-*.jar" -delete
+
+ ## lib/native
+ insinto /usr/lib/"${MY_PN}"/lib/native
+ doins src/hadoop-hdfs-project/hadoop-hdfs/target/native/target/usr/local/lib/libhdfs.a
+ doins src/hadoop-tools/hadoop-pipes/target/native/libhadooputils.a
+ doins src/hadoop-tools/hadoop-pipes/target/native/libhadooppipes.a
+ doins src/hadoop-common-project/hadoop-common/target/native/target/usr/local/lib/libhadoop.a
+ #
+ doins src/hadoop-common-project/hadoop-common/target/native/target/usr/local/lib/libhadoop.so
+ doins src/hadoop-common-project/hadoop-common/target/native/target/usr/local/lib/libhadoop.so.1.0.0
+
+ ## libexec
+ insinto /usr/lib/"${MY_PN}"/libexec
+ doins libexec/hadoop-config.sh
+ doins "${FILESDIR}"/hadoop-layout.sh
+ fperms 0755 /usr/lib/"${MY_PN}"/libexec/{hadoop-config.sh,hadoop-layout.sh}
+
+ ## sbin
+ insinto /usr/lib/"${MY_PN}"/sbin
+ doins sbin/hadoop-daemon.sh sbin/hadoop-daemons.sh sbin/slaves.sh
+ fperms 0755 /usr/lib/"${MY_PN}"/sbin/{hadoop-daemon.sh,hadoop-daemons.sh,slaves.sh}
+
+ ## conf
+ dosym ${CONFIG_DIR} /usr/lib/"${MY_PN}"/etc/hadoop
+
+ # bin
+ dobin "${FILESDIR}"/hadoop
+ fperms 0755 /usr/lib/hadoop/bin/hadoop
+
+ # HDFS ?
+ use hdfs && install_hdfs
+
+ # MAPREDUCE ?
+ use mapreduce && install_mapreduce
+}
diff --git a/sys-cluster/cloudera-hadoop/files/hadoop b/sys-cluster/cloudera-hadoop/files/hadoop
new file mode 100644
index 0000000..af66e02
--- /dev/null
+++ b/sys-cluster/cloudera-hadoop/files/hadoop
@@ -0,0 +1,15 @@
+#!/bin/sh
+
+export JAVA_HOME=$(java-config -g JAVA_HOME)
+
+export HADOOP_HOME_WARN_SUPPRESS=true
+export HADOOP_PREFIX=/usr/lib/hadoop
+export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec
+export HADOOP_CONF_DIR=/etc/hadoop/conf
+export HADOOP_COMMON_HOME=/usr/lib/hadoop
+export HADOOP_HDFS_HOME=/usr/lib/hadoop-hdfs
+export HADOOP_MAPRED_HOME=/usr/lib/hadoop-mapreduce
+# export YARN_HOME=/usr/lib/hadoop-yarn
+# export JSVC_HOME=/usr/lib/bigtop-utils
+
+exec /usr/lib/hadoop/bin/hadoop "$@"
diff --git a/sys-cluster/cloudera-hadoop/files/hadoop-layout.sh b/sys-cluster/cloudera-hadoop/files/hadoop-layout.sh
new file mode 100644
index 0000000..b65bf0b
--- /dev/null
+++ b/sys-cluster/cloudera-hadoop/files/hadoop-layout.sh
@@ -0,0 +1,30 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+HADOOP_COMMON_DIR="./"
+HADOOP_COMMON_LIB_JARS_DIR="lib"
+HADOOP_COMMON_LIB_NATIVE_DIR="lib/native"
+HDFS_DIR="./"
+HDFS_LIB_JARS_DIR="lib"
+YARN_DIR="./"
+YARN_LIB_JARS_DIR="lib"
+MAPRED_DIR="./"
+MAPRED_LIB_JARS_DIR="lib"
+
+HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-"/usr/lib/hadoop/libexec"}
+HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-"/etc/hadoop/conf"}
+HADOOP_COMMON_HOME=${HADOOP_COMMON_HOME:-"/usr/lib/hadoop"}
+HADOOP_HDFS_HOME=${HADOOP_HDFS_HOME:-"/usr/lib/hadoop-hdfs"}
+HADOOP_MAPRED_HOME=${HADOOP_MAPRED_HOME:-"/usr/lib/hadoop-0.20-mapreduce"}
+YARN_HOME=${YARN_HOME:-"/usr/lib/hadoop-yarn"}
diff --git a/sys-cluster/cloudera-hadoop/files/hdfs/hadoop-hdfs.initd b/sys-cluster/cloudera-hadoop/files/hdfs/hadoop-hdfs.initd
new file mode 100644
index 0000000..a192147
--- /dev/null
+++ b/sys-cluster/cloudera-hadoop/files/hdfs/hadoop-hdfs.initd
@@ -0,0 +1,90 @@
+#!/sbin/runscript
+# Copyright 1999-2011 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Header: $
+
+extra_commands="format"
+
+DAEMON=${SVCNAME/hadoop-hdfs-}
+
+depend() {
+ use dns net
+}
+
+chk_initd() {
+ if [ "${DAEMON}" == "${SVCNAME}" ]; then
+ eerror "You should not run this init script, use the provided aliases"
+ eend 1
+ return 1
+ fi
+}
+
+init_env_vars() {
+ if [ -n "${HADOOP_CONF_DIR}" ]; then
+ export HADOOP_CONF_DIR=${HADOOP_CONF_DIR}
+ else
+ export HADOOP_CONF_DIR=/etc/hadoop/conf
+ fi
+
+ # source hadoop-env.sh for configuration
+ if test -f ${HADOOP_CONF_DIR}/hadoop-env.sh; then
+ . ${HADOOP_CONF_DIR}/hadoop-env.sh
+ fi
+ export JAVA_HOME=${JAVA_HOME}
+
+ # fixed
+ export HADOOP_PREFIX=/usr/lib/hadoop
+ export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec/
+ export HADOOP_PID_DIR=/var/run/hadoop-hdfs
+ export HADOOP_LOG_DIR=/var/log/hadoop-hdfs
+
+ # user defined, in hadoop-env.sh
+ if [ "${HADOOP_IDENT_STRING}" == "" ]; then
+ export HADOOP_IDENT_STRING=$HADOOP_IDENT_STRING
+ else
+ export HADOOP_IDENT_STRING=hdfs
+ fi
+
+ # Determine if we're starting a secure datanode, and if so, redefine appropriate variables
+ if [ -n "$HADOOP_SECURE_DN_USER" ] && [ "${DAEMON}" == "datanode" ]; then
+ DN_USER=root
+ IDENT_USER=${HADOOP_SECURE_DN_USER}
+ else
+ DN_USER=hdfs
+ IDENT_USER=${DN_USER}
+ fi
+
+ # check for the required paths
+ checkpath -d -m 0755 -o hdfs:hdfs "${HADOOP_PID_DIR}"
+}
+
+start() {
+ chk_initd || exit 1
+ ebegin "Starting Cloudera Hadoop ${DAEMON}"
+ init_env_vars
+ start-stop-daemon --start --quiet --wait 3000 \
+ --pidfile ${HADOOP_PID_DIR}/hadoop-${IDENT_USER}-${DAEMON}.pid \
+ -u ${DN_USER} -x ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh -- \
+ --config ${HADOOP_CONF_DIR} start ${DAEMON}
+ eend $?
+}
+
+stop() {
+ chk_initd || exit 1
+ ebegin "Stopping Cloudera Hadoop ${DAEMON}"
+ init_env_vars
+ start-stop-daemon --stop --quiet \
+ --pidfile ${HADOOP_PID_DIR}/hadoop-${IDENT_USER}-${DAEMON}.pid
+ eend $?
+}
+
+format() {
+ if [ "${DAEMON}" == "namenode" ]; then
+ ebegin "Formatting Cloudera Hadoop ${DAEMON}"
+ init_env_vars
+ su ${DN_USER} --preserve-environment -- hdfs --config ${HADOOP_CONF_DIR} namenode -format
+ else
+ eerror "The format command is only available for the namenode daemon"
+ eend 1
+ fi
+}
diff --git a/sys-cluster/cloudera-hadoop/files/hdfs/hdfs b/sys-cluster/cloudera-hadoop/files/hdfs/hdfs
new file mode 100755
index 0000000..ca47f9d
--- /dev/null
+++ b/sys-cluster/cloudera-hadoop/files/hdfs/hdfs
@@ -0,0 +1,7 @@
+#!/bin/sh
+
+export JAVA_HOME=$(java-config -g JAVA_HOME)
+
+export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec/
+
+exec /usr/lib/hadoop-hdfs/bin/hdfs "$@"
diff --git a/sys-cluster/cloudera-hadoop/files/hdfs/hdfs-site.xml b/sys-cluster/cloudera-hadoop/files/hdfs/hdfs-site.xml
new file mode 100644
index 0000000..4948b47
--- /dev/null
+++ b/sys-cluster/cloudera-hadoop/files/hdfs/hdfs-site.xml
@@ -0,0 +1,25 @@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<configuration>
+ <property>
+ <name>dfs.name.dir</name>
+ <value>/var/lib/hadoop-hdfs/cache/hdfs/dfs/name</value>
+ </property>
+</configuration>
diff --git a/sys-cluster/cloudera-hadoop/files/hdfs/hdfs.limitsd b/sys-cluster/cloudera-hadoop/files/hdfs/hdfs.limitsd
new file mode 100644
index 0000000..95bd535
--- /dev/null
+++ b/sys-cluster/cloudera-hadoop/files/hdfs/hdfs.limitsd
@@ -0,0 +1,17 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+hdfs - nofile 32768
+hdfs - nproc 65536
diff --git a/sys-cluster/cloudera-hadoop/files/mapred/mapred b/sys-cluster/cloudera-hadoop/files/mapred/mapred
new file mode 100755
index 0000000..a34bfc4
--- /dev/null
+++ b/sys-cluster/cloudera-hadoop/files/mapred/mapred
@@ -0,0 +1,7 @@
+#!/bin/sh
+
+export JAVA_HOME=$(java-config -g JAVA_HOME)
+
+export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec/
+
+exec /usr/lib/hadoop-mapreduce/bin/mapred "$@"
diff --git a/sys-cluster/cloudera-hadoop/files/mapred/mapred-site.xml b/sys-cluster/cloudera-hadoop/files/mapred/mapred-site.xml
new file mode 100644
index 0000000..3fc8f34
--- /dev/null
+++ b/sys-cluster/cloudera-hadoop/files/mapred/mapred-site.xml
@@ -0,0 +1,21 @@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<configuration>
+</configuration>
diff --git a/sys-cluster/cloudera-hadoop/files/mapred/mapreduce.limitsd b/sys-cluster/cloudera-hadoop/files/mapred/mapreduce.limitsd
new file mode 100644
index 0000000..eb51cb9
--- /dev/null
+++ b/sys-cluster/cloudera-hadoop/files/mapred/mapreduce.limitsd
@@ -0,0 +1,17 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+mapred - nofile 32768
+mapred - nproc 65536