Gentoo Archives: gentoo-commits

From: Alexys Jacob <ultrabug@g.o>
To: gentoo-commits@l.g.o
Subject: [gentoo-commits] dev/ultrabug:master commit in: sys-cluster/cloudera-hadoop/files/hdfs/, sys-cluster/cloudera-hadoop/files/, ...
Date: Thu, 28 Mar 2013 12:21:00
Message-Id: 1364473249.8b1e71f53dde99046a15e3c9f981fb71182032de.ultrabug@gentoo
1 commit: 8b1e71f53dde99046a15e3c9f981fb71182032de
2 Author: Ultrabug <ultrabug <AT> gentoo <DOT> org>
3 AuthorDate: Thu Mar 28 12:20:49 2013 +0000
4 Commit: Alexys Jacob <ultrabug <AT> gentoo <DOT> org>
5 CommitDate: Thu Mar 28 12:20:49 2013 +0000
6 URL: http://git.overlays.gentoo.org/gitweb/?p=dev/ultrabug.git;a=commit;h=8b1e71f5
7
8 new sys-cluster/cloudera-hadoop
9
10 ---
11 sys-cluster/cloudera-hadoop/Manifest | 11 +
12 .../cloudera-hadoop/cloudera-hadoop-4.2.0.ebuild | 238 ++++++++++++++++++++
13 sys-cluster/cloudera-hadoop/files/hadoop | 15 ++
14 sys-cluster/cloudera-hadoop/files/hadoop-layout.sh | 30 +++
15 .../cloudera-hadoop/files/hdfs/hadoop-hdfs.initd | 90 ++++++++
16 sys-cluster/cloudera-hadoop/files/hdfs/hdfs | 7 +
17 .../cloudera-hadoop/files/hdfs/hdfs-site.xml | 25 ++
18 .../cloudera-hadoop/files/hdfs/hdfs.limitsd | 17 ++
19 sys-cluster/cloudera-hadoop/files/mapred/mapred | 7 +
20 .../cloudera-hadoop/files/mapred/mapred-site.xml | 21 ++
21 .../cloudera-hadoop/files/mapred/mapreduce.limitsd | 17 ++
22 11 files changed, 478 insertions(+), 0 deletions(-)
23
24 diff --git a/sys-cluster/cloudera-hadoop/Manifest b/sys-cluster/cloudera-hadoop/Manifest
25 new file mode 100644
26 index 0000000..ded06c8
27 --- /dev/null
28 +++ b/sys-cluster/cloudera-hadoop/Manifest
29 @@ -0,0 +1,11 @@
30 +AUX hadoop 480 SHA256 714cd4c4bda48a7c34c438a8e7731fd677448938febe1a584d70d247ad4b50b8 SHA512 5dd56d4a53be67323a9ab3fb3c1b3d69697da53dcef3194348c6198cd6cac41f75d7f844126ba94ffd62b696f80cbfb265123208e7ce65c9baf668f46c794283 WHIRLPOOL 0930e2b17147b70dcc0a4b28d93436aec9d97e968e28418e6c57350f5334970470ed730e4521cef10575859629c5a9a746bc7e0c5686ba9a8ff80906b7975a8b
31 +AUX hadoop-layout.sh 1364 SHA256 94114de8dedae91777c594f6463cfc83f0dcee29203d5e47a8a78f75a7365afc SHA512 36b4c3e82ff434494518b942b3e2caea7b1b118a834d7cbf316b7506609ce53b2518237c15bca95ee22c888589cf08e65d04649a1600be49a3186862dcc65693 WHIRLPOOL b2fbe77350e09d0cb8c2ebb51d9f9b0653f1abf199382940f38604f8fb6f210b8d89372dcdba128c85e472e7fe926e8d759a76d9e54d2524aae4b888251b4a87
32 +AUX hdfs/hadoop-hdfs.initd 2251 SHA256 cee29634ea2faaa17debcf0572da9cf2cd794b36a55c64f4f9e1a8e93cb1d2a2 SHA512 9884499fc4a890b44f535cb2f2e2f6e616d0cb7bb0842dce84664c50a27b9587d52988a77346b83788af59a17d3100f351d09ebc9680b4dc42b6d700a6547ae7 WHIRLPOOL 92d3a9885273e860d03077da3cdda4c5c4f6ca541c1e8815d250340f34660d1f4d37933a2e1a47f3fa64bd53febbdcc48ecbdde529ea9276065408b9e8f1d173
33 +AUX hdfs/hdfs 149 SHA256 64f66030aa9f1474af33e52840c57a2626d282ffd4ebd263f6e792dd98f14c4c SHA512 95bfeef8f5e0583cd5f0f3b97cc636c4f462fadd171d3e2d76ccbe837e5d2f319740094117f402377efae6c39dd69a64f90ffdaf7dcd832d24f52e48e27e86d6 WHIRLPOOL e308b399a8d37abb9e646840c4f8ef2540e8a4a125960b7ec11a4a60b823999a194a8c3fb60a03affd3e9302da7c53e3a2e88e7dd16ab5bb3793b84bd498be63
34 +AUX hdfs/hdfs-site.xml 1023 SHA256 a126891a37e24faa1fa265fc0e8f094956770d41432bd3afe1257bfe0ba5befe SHA512 47d36eed69932239e8ea15fdd9cafe97447cfad130f3b715ebca22b5e45f99e8a41020215cf857194d33f187ea11e6116aede91d37465aa044ab112a2e63e92d WHIRLPOOL 2c2d816d6deb60249a7fdf68cf7f11ebda1366b0938317b38f438ff9e4de7ae190e8af4dfa3d76d2795363bc1555719136104c4b17540b077c2a6635c0925bfb
35 +AUX hdfs/hdfs.limitsd 822 SHA256 051ccb6663e66ec861d5436dcf9478825b89d098e5e6cda77c71689fdd0d937d SHA512 29078d4fe566f14531f9347ccd13421aa44e240e042d7f65a82e94d1bb4629e5c3e80d34b01fe99ba38e413ec934e53f50d63d41714cd5127ead08d6f501170d WHIRLPOOL 0676e2b1e0af74311494828c53c07e1f6de336204ebdad797c951e9b9909b2944b54c6875f4664e80bf83dc3d98439eb75532e45f420c7b2a301195f5ca552cc
36 +AUX mapred/mapred 156 SHA256 691f311d7ee1109d8bb05c120f5d5bf432e06541fad7a3eceb74e4b443bbb526 SHA512 b3ac324d43bd4dacbf8a4ac1cea5cc8a24bf57ab40b37e5f3c2ec73cd2481523554372df9bb8ca71e20df8420b9aa34480b122ccdf9ac715b21e1bcaa46ca99f WHIRLPOOL 2c738ee652fe563b440dd716a2462e180071a75f37e9a5a48b616f88175b2e134b774cd228208cac1177337f2f59d2eee885632f0ac375a8c736a8011a072c56
37 +AUX mapred/mapred-site.xml 904 SHA256 73afa1c41ea13cc7f61e384bb2e75087931ae22e4b2906e55fc5220130322da1 SHA512 09f3e325414969a0553cff968544030a88ed53f482bc7c89b4173378e3713fae03504e8d49b4296175af24bbc498c7edfdcb5fcea6515b061f29f26bcbe1a4c2 WHIRLPOOL 1a7344e70100b463153da555f1f3e5b47172af89b4e5afb2d8e7873914451ef3b565c5b95a77e985b30f633b969b5e7e452d395e517e6e502e3dcbc91498e094
38 +AUX mapred/mapreduce.limitsd 832 SHA256 dacbbc9bf0d35e50c97648fd4555f9da562de325d1771a9e29796d761b9e78f6 SHA512 97c11f5afb5ef960194196c7750bcecdb4d79e3cbb0bed369a68e5f828b430eb79873f08af6eaa7455cd363dd0cd8eb7a99894d5ab3cc0f99eb96a58403b3a4d WHIRLPOOL 3b382d5c899d945532f6706f88a0103281a38a888757355bec0bb2970d423f1e7e5c49f644f7599d32bcd8315ee80444fedf7409587f11a6758c3ed4b5dfb726
39 +DIST hadoop-2.0.0-cdh4.2.0.tar.gz 135568139 SHA256 0725b7a2987363dde1ef36ae446093d1bde11412f89986b5e9c1f78568a711d3 SHA512 72846d38f0bcd6ffb600694e879d0d89aef4d53fc100cd2c97f87bde9f52e3f68c1a9adda8bde25038093e01bd1f2ea4388c7ac36f38bb51edeca73adb0982e4 WHIRLPOOL d5e0658386bceb527275b2be3f467ebdcdb7da09a79e1d8ab08e5b8a95ba19984d059330c1895f1d213d2de2524745da613506de1e4619de9c675db589af0c8b
40 +EBUILD cloudera-hadoop-4.2.0.ebuild 7410 SHA256 07cd085612b2ec66ed9c69c1152237ab5f31ebe76e9aed54fe934f4c6b38db25 SHA512 f52fc683290d87970c08399c07f7ae444b33bbb1af9db291e3faf4dc3b1846313805d795a05a658a061e10e59c6a26ea6b089fd8e3be016069df712f4417b703 WHIRLPOOL bbf43560648d620be3a9f1175d5cfc9b1093093954d24e1c52297516171ff74009c36133c9f2928470f5ab7c115d3186e47ba1ff38d2161fbc11b0d57b081ee8
41
42 diff --git a/sys-cluster/cloudera-hadoop/cloudera-hadoop-4.2.0.ebuild b/sys-cluster/cloudera-hadoop/cloudera-hadoop-4.2.0.ebuild
43 new file mode 100644
44 index 0000000..7377be3
45 --- /dev/null
46 +++ b/sys-cluster/cloudera-hadoop/cloudera-hadoop-4.2.0.ebuild
47 @@ -0,0 +1,238 @@
48 +# Copyright 1999-2010 Gentoo Foundation
49 +# Distributed under the terms of the GNU General Public License v2
50 +# $Header: $
51 +
52 +EAPI="5"
53 +
54 +inherit eutils java-utils-2
55 +
56 +MY_PV="2.0.0"
57 +MY_PN="hadoop"
58 +MY_P="${MY_PN}-${PV}"
59 +
60 +DESCRIPTION="Cloudera’s Distribution for Apache Hadoop"
61 +HOMEPAGE="http://hadoop.apache.org"
62 +SRC_URI="http://archive.cloudera.com/cdh4/cdh/4/${MY_PN}-${MY_PV}-cdh${PV}.tar.gz"
63 +
64 +LICENSE="Apache-2.0"
65 +SLOT="0"
66 +KEYWORDS="~amd64 ~x86"
67 +RESTRICT="mirror" # binchecks
68 +IUSE="hdfs httpfs mapreduce"
69 +
70 +#TODO: mapreduce use is missing hadoop-yarn dep
71 +DEPEND=""
72 +RDEPEND=">=virtual/jre-1.6
73 + dev-java/java-config-wrapper
74 + =dev-libs/protobuf-2.4.0a"
75 +
76 +CONFIG_DIR=/etc/"${MY_PN}"/conf
77 +export CONFIG_PROTECT="${CONFIG_PROTECT} ${CONFIG_DIR}"
78 +
79 +S=${WORKDIR}/hadoop-"${MY_PV}"-cdh"${PV}"
80 +
81 +pkg_setup(){
82 + enewgroup hadoop
83 + if use hdfs; then
84 + enewgroup hdfs
85 + enewuser hdfs -1 /bin/bash /var/lib/hdfs "hdfs,hadoop"
86 + fi
87 + if use mapreduce; then
88 + enewgroup mapred
89 + enewuser mapred -1 /bin/bash /var/lib/hadoop-mapreduce "mapred,hadoop"
90 + fi
91 +}
92 +
93 +src_compile() {
94 + export JAVA_HOME=$(java-config -g JAVA_HOME)
95 +
96 + pushd src
97 + mvn package -DskipTests -Pnative || die
98 + popd
99 +}
100 +
101 +install_hdfs() {
102 + diropts -m755 -o root -g root
103 + pushd src/hadoop-hdfs-project/hadoop-hdfs/target
104 + insinto /usr/$(get_libdir)
105 + dolib.so native/target/usr/local/lib/libhdfs.so.0.0.0
106 + #
107 + insinto /usr/lib/hadoop-hdfs
108 + doins hadoop-hdfs-"${MY_PV}"-cdh"${PV}".jar
109 + doins hadoop-hdfs-"${MY_PV}"-cdh"${PV}"-tests.jar
110 + dosym hadoop-hdfs-"${MY_PV}"-cdh"${PV}".jar /usr/lib/hadoop-hdfs/hadoop-hdfs.jar
111 + #
112 + doins -r webapps
113 + popd
114 + doins -r share/hadoop/hdfs/lib
115 +
116 + insinto /usr/lib/hadoop-hdfs/bin
117 + doins bin/hdfs
118 + fperms 755 /usr/lib/hadoop-hdfs/bin/hdfs
119 +
120 + insinto /usr/lib/hadoop-hdfs/sbin
121 + doins sbin/distribute-exclude.sh
122 + doins sbin/refresh-namenodes.sh
123 + fperms 0755 /usr/lib/hadoop-hdfs/sbin/{distribute-exclude.sh,refresh-namenodes.sh}
124 +
125 + insinto /usr/lib/hadoop/libexec
126 + doins libexec/hdfs-config.sh
127 + fperms 0755 /usr/lib/hadoop/libexec/hdfs-config.sh
128 +
129 + insinto /etc/security/limits.d
130 + newins "${FILESDIR}"/hdfs/hdfs.limitsd hdfs.conf
131 +
132 + insinto /etc/hadoop/conf
133 + doins "${FILESDIR}"/hdfs/hdfs-site.xml
134 +
135 + dobin "${FILESDIR}"/hdfs/hdfs
136 +
137 + diropts -m775 -o root -g hadoop
138 + dodir /var/log/hadoop-hdfs
139 +
140 + diropts -m775 -o hdfs -g hadoop
141 + dodir /var/lib/hadoop-hdfs/ /var/lib/hadoop-hdfs/cache
142 + fperms 1777 /var/lib/hadoop-hdfs/cache
143 +
144 + newinitd "${FILESDIR}"/hdfs/hadoop-hdfs.initd hadoop-hdfs
145 + for daemon in "datanode" "namenode" "secondarynamenode"; do
146 + dosym hadoop-hdfs /etc/init.d/hadoop-hdfs-"${daemon}"
147 + done
148 +}
149 +
150 +install_mapreduce() {
151 + diropts -m755 -o root -g root
152 + pushd src/hadoop-mapreduce-project
153 + insinto /usr/lib/hadoop-mapreduce
154 + for jar in $(find hadoop-mapreduce-client/ -type f -name "*.jar"); do
155 + doins "${jar}"
156 + done
157 + # rename mapreduce-client-app
158 + mv "${D}"/usr/lib/hadoop-mapreduce/mr-app.jar "${D}"/usr/lib/hadoop-mapreduce/hadoop-mapreduce-client-app-"${MY_PV}"-cdh"${PV}".jar
159 + mv "${D}"/usr/lib/hadoop-mapreduce/mr-app-tests.jar "${D}"/usr/lib/hadoop-mapreduce/hadoop-mapreduce-client-app-"${MY_PV}"-cdh"${PV}"-tests.jar
160 + # symlinks
161 + for categ in "app" "common" "core" "hs" "jobclient" "shuffle"; do
162 + dosym hadoop-mapreduce-client-"${categ}"-"${MY_PV}"-cdh"${PV}".jar /usr/lib/hadoop-mapreduce/hadoop-mapreduce-client-"${categ}".jar
163 + done
164 + # examples
165 + doins hadoop-mapreduce-examples/target/hadoop-mapreduce-examples-"${MY_PV}"-cdh"${PV}".jar
166 + dosym hadoop-mapreduce-examples-"${MY_PV}"-cdh"${PV}".jar /usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar
167 + popd
168 + pushd src/hadoop-tools
169 + for categ in "archives" "datajoin" "distcp" "extras" "gridmix" "rumen" "streaming"; do
170 + doins hadoop-"${categ}"/target/hadoop-"${categ}"-"${MY_PV}"-cdh"${PV}".jar
171 + dosym hadoop-"${categ}"-"${MY_PV}"-cdh"${PV}".jar /usr/lib/hadoop-mapreduce/hadoop-"${categ}".jar
172 + done
173 + popd
174 + doins -r share/hadoop/mapreduce/lib
175 +
176 + insinto /usr/lib/hadoop-mapreduce/bin
177 + doins bin/mapred
178 + doins src/hadoop-tools/hadoop-pipes/target/native/examples/*
179 + fperms 755 /usr/lib/hadoop-mapreduce/bin/{mapred,pipes-sort,wordcount-nopipe,wordcount-part,wordcount-simple}
180 +
181 + insinto /usr/lib/hadoop-mapreduce/sbin
182 + doins sbin/mr-jobhistory-daemon.sh
183 + fperms 0755 /usr/lib/hadoop-mapreduce/sbin/mr-jobhistory-daemon.sh
184 +
185 + insinto /usr/lib/hadoop/libexec
186 + doins libexec/mapred-config.sh
187 + fperms 0755 /usr/lib/hadoop/libexec/mapred-config.sh
188 +
189 + insinto /etc/security/limits.d
190 + newins "${FILESDIR}"/mapred/mapreduce.limitsd mapreduce.conf
191 +
192 + insinto /etc/hadoop/conf
193 + doins "${FILESDIR}"/mapred/mapred-site.xml
194 +
195 + dobin "${FILESDIR}"/mapred/mapred
196 +
197 + diropts -m775 -o root -g hadoop
198 + dodir /var/log/hadoop-mapreduce
199 +
200 + diropts -m775 -o mapred -g hadoop
201 + dodir /var/lib/hadoop-mapreduce/ /var/lib/hadoop-mapreduce/cache
202 + fperms 1777 /var/lib/hadoop-mapreduce/cache
203 +}
204 +
205 +src_install() {
206 + # config dir
207 + insinto ${CONFIG_DIR}
208 + for config_file in "core-site.xml" "hadoop-metrics.properties" \
209 + "hadoop-metrics2.properties" "log4j.properties" "slaves" \
210 + "ssl-client.xml.example" "ssl-server.xml.example"; do
211 + doins etc/hadoop/"${config_file}"
212 + done
213 + echo "JAVA_HOME='$(java-config -g JAVA_HOME)'" > "${T}"/hadoop-env.sh
214 + doins "${T}"/hadoop-env.sh
215 +
216 + # /usr/lib dirs
217 + diropts -m755 -o root -g root
218 + insinto /usr/lib/"${MY_PN}"
219 +
220 + # common
221 + pushd src/hadoop-common-project/hadoop-common/target
222 + doins hadoop-common-"${MY_PV}"-cdh"${PV}".jar
223 + doins hadoop-common-"${MY_PV}"-cdh"${PV}"-tests.jar
224 + popd
225 + dosym hadoop-common-2.0.0-cdh4.2.0.jar /usr/lib/"${MY_PN}"/hadoop-common.jar
226 +
227 + # annotations
228 + pushd src/hadoop-common-project/hadoop-annotations/target
229 + doins hadoop-annotations-"${MY_PV}"-cdh"${PV}".jar
230 + popd
231 + dosym hadoop-annotations-2.0.0-cdh4.2.0.jar /usr/lib/"${MY_PN}"/hadoop-annotations.jar
232 +
233 + # auth
234 + pushd src/hadoop-common-project/hadoop-auth/target
235 + doins hadoop-auth-"${MY_PV}"-cdh"${PV}".jar
236 + popd
237 + dosym hadoop-auth-2.0.0-cdh4.2.0.jar /usr/lib/"${MY_PN}"/hadoop-auth.jar
238 +
239 + ## bin
240 + insinto /usr/lib/"${MY_PN}"/bin
241 + doins bin/hadoop bin/rcc
242 +
243 + ## lib
244 + insinto /usr/lib/"${MY_PN}"/lib
245 + pushd src/hadoop-tools
246 + for jar in $(find . -type f -name "*.jar"); do
247 + doins "${jar}"
248 + done
249 + popd
250 + find "${D}"/usr/lib/"${MY_PN}"/lib -type f -name "hadoop-*.jar" -delete
251 +
252 + ## lib/native
253 + insinto /usr/lib/"${MY_PN}"/lib/native
254 + doins src/hadoop-hdfs-project/hadoop-hdfs/target/native/target/usr/local/lib/libhdfs.a
255 + doins src/hadoop-tools/hadoop-pipes/target/native/libhadooputils.a
256 + doins src/hadoop-tools/hadoop-pipes/target/native/libhadooppipes.a
257 + doins src/hadoop-common-project/hadoop-common/target/native/target/usr/local/lib/libhadoop.a
258 + #
259 + doins src/hadoop-common-project/hadoop-common/target/native/target/usr/local/lib/libhadoop.so
260 + doins src/hadoop-common-project/hadoop-common/target/native/target/usr/local/lib/libhadoop.so.1.0.0
261 +
262 + ## libexec
263 + insinto /usr/lib/"${MY_PN}"/libexec
264 + doins libexec/hadoop-config.sh
265 + doins "${FILESDIR}"/hadoop-layout.sh
266 + fperms 0755 /usr/lib/"${MY_PN}"/libexec/{hadoop-config.sh,hadoop-layout.sh}
267 +
268 + ## sbin
269 + insinto /usr/lib/"${MY_PN}"/sbin
270 + doins sbin/hadoop-daemon.sh sbin/hadoop-daemons.sh sbin/slaves.sh
271 + fperms 0755 /usr/lib/"${MY_PN}"/sbin/{hadoop-daemon.sh,hadoop-daemons.sh,slaves.sh}
272 +
273 + ## conf
274 + dosym ${CONFIG_DIR} /usr/lib/"${MY_PN}"/etc/hadoop
275 +
276 + # bin
277 + dobin "${FILESDIR}"/hadoop
278 + fperms 0755 /usr/lib/hadoop/bin/hadoop
279 +
280 + # HDFS ?
281 + use hdfs && install_hdfs
282 +
283 + # MAPREDUCE ?
284 + use mapreduce && install_mapreduce
285 +}
286
287 diff --git a/sys-cluster/cloudera-hadoop/files/hadoop b/sys-cluster/cloudera-hadoop/files/hadoop
288 new file mode 100644
289 index 0000000..af66e02
290 --- /dev/null
291 +++ b/sys-cluster/cloudera-hadoop/files/hadoop
292 @@ -0,0 +1,15 @@
293 +#!/bin/sh
294 +
295 +export JAVA_HOME=$(java-config -g JAVA_HOME)
296 +
297 +export HADOOP_HOME_WARN_SUPPRESS=true
298 +export HADOOP_PREFIX=/usr/lib/hadoop
299 +export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec
300 +export HADOOP_CONF_DIR=/etc/hadoop/conf
301 +export HADOOP_COMMON_HOME=/usr/lib/hadoop
302 +export HADOOP_HDFS_HOME=/usr/lib/hadoop-hdfs
303 +export HADOOP_MAPRED_HOME=/usr/lib/hadoop-mapreduce
304 +# export YARN_HOME=/usr/lib/hadoop-yarn
305 +# export JSVC_HOME=/usr/lib/bigtop-utils
306 +
307 +exec /usr/lib/hadoop/bin/hadoop "$@"
308
309 diff --git a/sys-cluster/cloudera-hadoop/files/hadoop-layout.sh b/sys-cluster/cloudera-hadoop/files/hadoop-layout.sh
310 new file mode 100644
311 index 0000000..b65bf0b
312 --- /dev/null
313 +++ b/sys-cluster/cloudera-hadoop/files/hadoop-layout.sh
314 @@ -0,0 +1,30 @@
315 +# Licensed to the Apache Software Foundation (ASF) under one or more
316 +# contributor license agreements. See the NOTICE file distributed with
317 +# this work for additional information regarding copyright ownership.
318 +# The ASF licenses this file to You under the Apache License, Version 2.0
319 +# (the "License"); you may not use this file except in compliance with
320 +# the License. You may obtain a copy of the License at
321 +#
322 +# http://www.apache.org/licenses/LICENSE-2.0
323 +#
324 +# Unless required by applicable law or agreed to in writing, software
325 +# distributed under the License is distributed on an "AS IS" BASIS,
326 +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
327 +# See the License for the specific language governing permissions and
328 +# limitations under the License.
329 +HADOOP_COMMON_DIR="./"
330 +HADOOP_COMMON_LIB_JARS_DIR="lib"
331 +HADOOP_COMMON_LIB_NATIVE_DIR="lib/native"
332 +HDFS_DIR="./"
333 +HDFS_LIB_JARS_DIR="lib"
334 +YARN_DIR="./"
335 +YARN_LIB_JARS_DIR="lib"
336 +MAPRED_DIR="./"
337 +MAPRED_LIB_JARS_DIR="lib"
338 +
339 +HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-"/usr/lib/hadoop/libexec"}
340 +HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-"/etc/hadoop/conf"}
341 +HADOOP_COMMON_HOME=${HADOOP_COMMON_HOME:-"/usr/lib/hadoop"}
342 +HADOOP_HDFS_HOME=${HADOOP_HDFS_HOME:-"/usr/lib/hadoop-hdfs"}
343 +HADOOP_MAPRED_HOME=${HADOOP_MAPRED_HOME:-"/usr/lib/hadoop-0.20-mapreduce"}
344 +YARN_HOME=${YARN_HOME:-"/usr/lib/hadoop-yarn"}
345
346 diff --git a/sys-cluster/cloudera-hadoop/files/hdfs/hadoop-hdfs.initd b/sys-cluster/cloudera-hadoop/files/hdfs/hadoop-hdfs.initd
347 new file mode 100644
348 index 0000000..a192147
349 --- /dev/null
350 +++ b/sys-cluster/cloudera-hadoop/files/hdfs/hadoop-hdfs.initd
351 @@ -0,0 +1,90 @@
352 +#!/sbin/runscript
353 +# Copyright 1999-2011 Gentoo Foundation
354 +# Distributed under the terms of the GNU General Public License v2
355 +# $Header: $
356 +
357 +extra_commands="format"
358 +
359 +DAEMON=${SVCNAME/hadoop-hdfs-}
360 +
361 +depend() {
362 + use dns net
363 +}
364 +
365 +chk_initd() {
366 + if [ "${DAEMON}" == "${SVCNAME}" ]; then
367 + eerror "You should not run this init script, use the provided aliases"
368 + eend 1
369 + return 1
370 + fi
371 +}
372 +
373 +init_env_vars() {
374 + if [ -n "${HADOOP_CONF_DIR}" ]; then
375 + export HADOOP_CONF_DIR=${HADOOP_CONF_DIR}
376 + else
377 + export HADOOP_CONF_DIR=/etc/hadoop/conf
378 + fi
379 +
380 + # source hadoop-env.sh for configuration
381 + if test -f ${HADOOP_CONF_DIR}/hadoop-env.sh; then
382 + . ${HADOOP_CONF_DIR}/hadoop-env.sh
383 + fi
384 + export JAVA_HOME=${JAVA_HOME}
385 +
386 + # fixed
387 + export HADOOP_PREFIX=/usr/lib/hadoop
388 + export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec/
389 + export HADOOP_PID_DIR=/var/run/hadoop-hdfs
390 + export HADOOP_LOG_DIR=/var/log/hadoop-hdfs
391 +
392 + # user defined, in hadoop-env.sh
393 + if [ "${HADOOP_IDENT_STRING}" == "" ]; then
394 + export HADOOP_IDENT_STRING=$HADOOP_IDENT_STRING
395 + else
396 + export HADOOP_IDENT_STRING=hdfs
397 + fi
398 +
399 + # Determine if we're starting a secure datanode, and if so, redefine appropriate variables
400 + if [ -n "$HADOOP_SECURE_DN_USER" ] && [ "${DAEMON}" == "datanode" ]; then
401 + DN_USER=root
402 + IDENT_USER=${HADOOP_SECURE_DN_USER}
403 + else
404 + DN_USER=hdfs
405 + IDENT_USER=${DN_USER}
406 + fi
407 +
408 + # check for the required paths
409 + checkpath -d -m 0755 -o hdfs:hdfs "${HADOOP_PID_DIR}"
410 +}
411 +
412 +start() {
413 + chk_initd || exit 1
414 + ebegin "Starting Cloudera Hadoop ${DAEMON}"
415 + init_env_vars
416 + start-stop-daemon --start --quiet --wait 3000 \
417 + --pidfile ${HADOOP_PID_DIR}/hadoop-${IDENT_USER}-${DAEMON}.pid \
418 + -u ${DN_USER} -x ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh -- \
419 + --config ${HADOOP_CONF_DIR} start ${DAEMON}
420 + eend $?
421 +}
422 +
423 +stop() {
424 + chk_initd || exit 1
425 + ebegin "Stopping Cloudera Hadoop ${DAEMON}"
426 + init_env_vars
427 + start-stop-daemon --stop --quiet \
428 + --pidfile ${HADOOP_PID_DIR}/hadoop-${IDENT_USER}-${DAEMON}.pid
429 + eend $?
430 +}
431 +
432 +format() {
433 + if [ "${DAEMON}" == "namenode" ]; then
434 + ebegin "Formatting Cloudera Hadoop ${DAEMON}"
435 + init_env_vars
436 + su ${DN_USER} --preserve-environment -- hdfs --config ${HADOOP_CONF_DIR} namenode -format
437 + else
438 + eerror "The format command is only available for the namenode daemon"
439 + eend 1
440 + fi
441 +}
442
443 diff --git a/sys-cluster/cloudera-hadoop/files/hdfs/hdfs b/sys-cluster/cloudera-hadoop/files/hdfs/hdfs
444 new file mode 100755
445 index 0000000..ca47f9d
446 --- /dev/null
447 +++ b/sys-cluster/cloudera-hadoop/files/hdfs/hdfs
448 @@ -0,0 +1,7 @@
449 +#!/bin/sh
450 +
451 +export JAVA_HOME=$(java-config -g JAVA_HOME)
452 +
453 +export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec/
454 +
455 +exec /usr/lib/hadoop-hdfs/bin/hdfs "$@"
456
457 diff --git a/sys-cluster/cloudera-hadoop/files/hdfs/hdfs-site.xml b/sys-cluster/cloudera-hadoop/files/hdfs/hdfs-site.xml
458 new file mode 100644
459 index 0000000..4948b47
460 --- /dev/null
461 +++ b/sys-cluster/cloudera-hadoop/files/hdfs/hdfs-site.xml
462 @@ -0,0 +1,25 @@
463 +<?xml version="1.0"?>
464 +<!--
465 + Licensed to the Apache Software Foundation (ASF) under one or more
466 + contributor license agreements. See the NOTICE file distributed with
467 + this work for additional information regarding copyright ownership.
468 + The ASF licenses this file to You under the Apache License, Version 2.0
469 + (the "License"); you may not use this file except in compliance with
470 + the License. You may obtain a copy of the License at
471 +
472 + http://www.apache.org/licenses/LICENSE-2.0
473 +
474 + Unless required by applicable law or agreed to in writing, software
475 + distributed under the License is distributed on an "AS IS" BASIS,
476 + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
477 + See the License for the specific language governing permissions and
478 + limitations under the License.
479 +-->
480 +<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
481 +
482 +<configuration>
483 + <property>
484 + <name>dfs.name.dir</name>
485 + <value>/var/lib/hadoop-hdfs/cache/hdfs/dfs/name</value>
486 + </property>
487 +</configuration>
488
489 diff --git a/sys-cluster/cloudera-hadoop/files/hdfs/hdfs.limitsd b/sys-cluster/cloudera-hadoop/files/hdfs/hdfs.limitsd
490 new file mode 100644
491 index 0000000..95bd535
492 --- /dev/null
493 +++ b/sys-cluster/cloudera-hadoop/files/hdfs/hdfs.limitsd
494 @@ -0,0 +1,17 @@
495 +# Licensed to the Apache Software Foundation (ASF) under one or more
496 +# contributor license agreements. See the NOTICE file distributed with
497 +# this work for additional information regarding copyright ownership.
498 +# The ASF licenses this file to You under the Apache License, Version 2.0
499 +# (the "License"); you may not use this file except in compliance with
500 +# the License. You may obtain a copy of the License at
501 +#
502 +# http://www.apache.org/licenses/LICENSE-2.0
503 +#
504 +# Unless required by applicable law or agreed to in writing, software
505 +# distributed under the License is distributed on an "AS IS" BASIS,
506 +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
507 +# See the License for the specific language governing permissions and
508 +# limitations under the License.
509 +
510 +hdfs - nofile 32768
511 +hdfs - nproc 65536
512
513 diff --git a/sys-cluster/cloudera-hadoop/files/mapred/mapred b/sys-cluster/cloudera-hadoop/files/mapred/mapred
514 new file mode 100755
515 index 0000000..a34bfc4
516 --- /dev/null
517 +++ b/sys-cluster/cloudera-hadoop/files/mapred/mapred
518 @@ -0,0 +1,7 @@
519 +#!/bin/sh
520 +
521 +export JAVA_HOME=$(java-config -g JAVA_HOME)
522 +
523 +export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec/
524 +
525 +exec /usr/lib/hadoop-mapreduce/bin/mapred "$@"
526
527 diff --git a/sys-cluster/cloudera-hadoop/files/mapred/mapred-site.xml b/sys-cluster/cloudera-hadoop/files/mapred/mapred-site.xml
528 new file mode 100644
529 index 0000000..3fc8f34
530 --- /dev/null
531 +++ b/sys-cluster/cloudera-hadoop/files/mapred/mapred-site.xml
532 @@ -0,0 +1,21 @@
533 +<?xml version="1.0"?>
534 +<!--
535 + Licensed to the Apache Software Foundation (ASF) under one or more
536 + contributor license agreements. See the NOTICE file distributed with
537 + this work for additional information regarding copyright ownership.
538 + The ASF licenses this file to You under the Apache License, Version 2.0
539 + (the "License"); you may not use this file except in compliance with
540 + the License. You may obtain a copy of the License at
541 +
542 + http://www.apache.org/licenses/LICENSE-2.0
543 +
544 + Unless required by applicable law or agreed to in writing, software
545 + distributed under the License is distributed on an "AS IS" BASIS,
546 + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
547 + See the License for the specific language governing permissions and
548 + limitations under the License.
549 +-->
550 +<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
551 +
552 +<configuration>
553 +</configuration>
554
555 diff --git a/sys-cluster/cloudera-hadoop/files/mapred/mapreduce.limitsd b/sys-cluster/cloudera-hadoop/files/mapred/mapreduce.limitsd
556 new file mode 100644
557 index 0000000..eb51cb9
558 --- /dev/null
559 +++ b/sys-cluster/cloudera-hadoop/files/mapred/mapreduce.limitsd
560 @@ -0,0 +1,17 @@
561 +# Licensed to the Apache Software Foundation (ASF) under one or more
562 +# contributor license agreements. See the NOTICE file distributed with
563 +# this work for additional information regarding copyright ownership.
564 +# The ASF licenses this file to You under the Apache License, Version 2.0
565 +# (the "License"); you may not use this file except in compliance with
566 +# the License. You may obtain a copy of the License at
567 +#
568 +# http://www.apache.org/licenses/LICENSE-2.0
569 +#
570 +# Unless required by applicable law or agreed to in writing, software
571 +# distributed under the License is distributed on an "AS IS" BASIS,
572 +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
573 +# See the License for the specific language governing permissions and
574 +# limitations under the License.
575 +
576 +mapred - nofile 32768
577 +mapred - nproc 65536