1 |
commit: 1311e5c324c842c715e9b4a98e77d80538ffe436 |
2 |
Author: Ultrabug <ultrabug <AT> gentoo <DOT> org> |
3 |
AuthorDate: Thu Jul 5 16:24:27 2012 +0000 |
4 |
Commit: Alexys Jacob <ultrabug <AT> gentoo <DOT> org> |
5 |
CommitDate: Thu Jul 5 16:24:27 2012 +0000 |
6 |
URL: http://git.overlays.gentoo.org/gitweb/?p=dev/ultrabug.git;a=commit;h=1311e5c3 |
7 |
|
8 |
new apache-hadoop-bin ebuild |
9 |
|
10 |
--- |
11 |
sys-cluster/apache-hadoop-bin/Manifest | 3 + |
12 |
.../apache-hadoop-bin-1.0.3.ebuild | 131 ++++++++++++++++++++ |
13 |
sys-cluster/apache-hadoop-bin/files/hadoop.initd | 75 +++++++++++ |
14 |
3 files changed, 209 insertions(+), 0 deletions(-) |
15 |
|
16 |
diff --git a/sys-cluster/apache-hadoop-bin/Manifest b/sys-cluster/apache-hadoop-bin/Manifest |
17 |
new file mode 100644 |
18 |
index 0000000..054b030 |
19 |
--- /dev/null |
20 |
+++ b/sys-cluster/apache-hadoop-bin/Manifest |
21 |
@@ -0,0 +1,3 @@ |
22 |
+AUX hadoop.initd 1833 RMD160 4550bb50f34508cbdccf20c23f855a4c213ef77c SHA1 bf4c2cadae9e4c24217626fbbe8004b2348c9637 SHA256 3a4567d6af2d011c357fc41a0159a3bf9ad47ef76e06a76f79ccaeb598d2cb38 |
23 |
+DIST hadoop-1.0.3.tar.gz 62428860 RMD160 e41421483156fd0fa65d608b206a17cd2a73a989 SHA1 5ca6b77e0a600475fae6770c52b47a751f646f9c SHA256 716ab51f75ffb70343c3cca02f7ba4722f42376edb67eecbd42a426a054e6423 |
24 |
+EBUILD apache-hadoop-bin-1.0.3.ebuild 3357 RMD160 1ade6bdb5313059919bdf8e0e60e15ecbebe7cf1 SHA1 c1b6eca5ffdb03ec8944f6cac19aaa52c9b2f4d7 SHA256 b32e1a2a3deefe6066740ff3b7036dddf2ef9da552be6c6652eb0cda1afa25eb |
25 |
|
26 |
diff --git a/sys-cluster/apache-hadoop-bin/apache-hadoop-bin-1.0.3.ebuild b/sys-cluster/apache-hadoop-bin/apache-hadoop-bin-1.0.3.ebuild |
27 |
new file mode 100644 |
28 |
index 0000000..2bb7bc4 |
29 |
--- /dev/null |
30 |
+++ b/sys-cluster/apache-hadoop-bin/apache-hadoop-bin-1.0.3.ebuild |
31 |
@@ -0,0 +1,131 @@ |
32 |
+# Copyright 1999-2010 Gentoo Foundation |
33 |
+# Distributed under the terms of the GNU General Public License v2 |
34 |
+# $Header: $ |
35 |
+ |
36 |
+EAPI="4" |
37 |
+ |
38 |
+inherit eutils |
39 |
+ |
40 |
+MY_PN="hadoop" |
41 |
+MY_P="${MY_PN}-${PV}" |
42 |
+ |
43 |
+DESCRIPTION="Software framework for data intensive distributed applications" |
44 |
+HOMEPAGE="http://hadoop.apache.org/" |
45 |
+SRC_URI="mirror://apache/hadoop/core/${MY_P}/${MY_P}.tar.gz" |
46 |
+ |
47 |
+LICENSE="Apache-2.0" |
48 |
+SLOT="0" |
49 |
+KEYWORDS="~amd64 ~x86" |
50 |
+RESTRICT="mirror" |
51 |
+IUSE="" |
52 |
+ |
53 |
+DEPEND="" |
54 |
+RDEPEND=">=virtual/jre-1.6 |
55 |
+ net-misc/openssh |
56 |
+ net-misc/rsync" |
57 |
+ |
58 |
+S=${WORKDIR}/${MY_P} |
59 |
+ |
60 |
+pkg_setup(){ |
61 |
+ enewgroup hadoop |
62 |
+ enewuser hdfs -1 /bin/bash /var/lib/hadoop/hdfs hadoop |
63 |
+ enewuser mapred -1 /bin/bash /var/lib/hadoop/mapred hadoop |
64 |
+} |
65 |
+ |
66 |
+src_install() { |
67 |
+ # Get the arch for libs |
68 |
+ if [ $(get_libdir) == "lib64" ]; then |
69 |
+ local MY_ARCH="Linux-amd64-64" |
70 |
+ else |
71 |
+ local MY_ARCH="Linux-i386-32" |
72 |
+ fi |
73 |
+ |
74 |
+ # The hadoop-env.sh file needs JAVA_HOME set explicitly |
75 |
+ JAVA_HOME=$(java-config -g JAVA_HOME) |
76 |
+ sed -e "2iexport JAVA_HOME=${JAVA_HOME}" -i conf/hadoop-env.sh || die "sed failed" |
77 |
+ cat >> conf/hadoop-env.sh <<-EOF |
78 |
+ |
79 |
+# Added by Gentoo Portage |
80 |
+export HADOOP_CONF_DIR=/etc/hadoop |
81 |
+export HADOOP_LOG_DIR=/var/log/hadoop |
82 |
+export HADOOP_SECURE_DN_LOG_DIR=/var/log/hadoop |
83 |
+export HADOOP_PID_DIR=/var/run/hadoop |
84 |
+export HADOOP_SECURE_DN_PID_DIR=/var/run/hadoop |
85 |
+EOF |
86 |
+ |
87 |
+ # make useful dirs |
88 |
+ diropts -m750 -o root -g hadoop |
89 |
+ dodir /etc/"${MY_PN}" |
90 |
+ dodir /usr/share/"${MY_PN}" |
91 |
+ diropts -m770 -o root -g hadoop |
92 |
+ dodir /var/log/"${MY_PN}" |
93 |
+ dodir /var/run/"${MY_PN}" |
94 |
+ |
95 |
+ # conf |
96 |
+ mv "${S}"/conf/* "${D}${HADOOP_CONF}" || die "install failed" |
97 |
+ rm -rf "${S}"/conf || die "install failed" |
98 |
+ |
99 |
+ # /usr/bin stuff |
100 |
+ for bin in "hadoop" "task-controller"; do |
101 |
+ dobin bin/"${bin}" |
102 |
+ rm bin/"${bin}" || die "install failed" |
103 |
+ fowners root:hadoop /usr/bin/"${bin}" |
104 |
+ done |
105 |
+ |
106 |
+ # /usr/sbin stuff |
107 |
+ sed -i -e "s@JAVA_HOME=/usr/java/default@JAVA_HOME=${JAVA_HOME}@g" \ |
108 |
+ sbin/update-hadoop-env.sh || die "sed failed" |
109 |
+ for sbin in bin/* sbin/*; do |
110 |
+ dosbin "${sbin}" |
111 |
+ rm "${sbin}" || die "install failed" |
112 |
+ done |
113 |
+ |
114 |
+ # /usr/include stuff |
115 |
+ insinto /usr/include/"${MY_PN}" |
116 |
+ doins "${S}"/c++/"${MY_ARCH}"/include/"${MY_PN}"/* |
117 |
+ |
118 |
+ # libs |
119 |
+ pushd "${S}"/lib/native/"${MY_ARCH}" |
120 |
+# dolib *.la |
121 |
+# dolib.a *.a |
122 |
+ for soname in *.so.1.0.0; do |
123 |
+ dolib.so "${soname}" |
124 |
+ dosym ./"${soname}" /usr/$(get_libdir)/"${soname/.0.0}" |
125 |
+ dosym ./"${soname}" /usr/$(get_libdir)/"${soname/.1.0.0}" |
126 |
+ done |
127 |
+ popd |
128 |
+ # |
129 |
+ pushd "${S}"/c++/"${MY_ARCH}"/lib |
130 |
+# dolib *.la |
131 |
+# dolib.a *.a |
132 |
+ for soname in *.so.0.0.0; do |
133 |
+ dolib.so "${soname}" |
134 |
+ dosym ./"${soname}" /usr/$(get_libdir)/"${soname/.0.0}" |
135 |
+ dosym ./"${soname}" /usr/$(get_libdir)/"${soname/.0.0.0}" |
136 |
+ done |
137 |
+ popd |
138 |
+ |
139 |
+ # libexec |
140 |
+ insinto /usr/libexec |
141 |
+ insopts -m755 |
142 |
+ doins "${S}"/libexec/* |
143 |
+ |
144 |
+ # /usr/share stuff |
145 |
+ rm -rf "${S}"/lib/native/ |
146 |
+ for d in "contrib" "lib" "webapps"; do |
147 |
+ mv "${S}"/"${d}" "${D}"/usr/share/"${MY_PN}" || die "install failed" |
148 |
+ done |
149 |
+ mv "${S}"/share/"${MY_PN}"/templates "${D}"/usr/share/"${MY_PN}" || die "install failed" |
150 |
+ mv "${S}"/*.jar "${D}"/usr/share/"${MY_PN}" || die "install failed" |
151 |
+ |
152 |
+ # init scripts |
153 |
+ newinitd "${FILESDIR}"/"${MY_PN}".initd "${MY_PN}" |
154 |
+ for i in "namenode" "datanode" "historyserver" "jobtracker" "secondarynamenode" "tasktracker" |
155 |
+ do |
156 |
+ dosym /etc/init.d/"${MY_PN}" /etc/init.d/"${MY_PN}"-"${i}" |
157 |
+ done |
158 |
+} |
159 |
+ |
160 |
+pkg_postinst() { |
161 |
+ elog "For info on configuration see http://hadoop.apache.org/core/docs/r${PV}" |
162 |
+} |
163 |
|
164 |
diff --git a/sys-cluster/apache-hadoop-bin/files/hadoop.initd b/sys-cluster/apache-hadoop-bin/files/hadoop.initd |
165 |
new file mode 100644 |
166 |
index 0000000..53f276d |
167 |
--- /dev/null |
168 |
+++ b/sys-cluster/apache-hadoop-bin/files/hadoop.initd |
169 |
@@ -0,0 +1,75 @@ |
170 |
+#!/sbin/runscript |
171 |
+# Copyright 1999-2011 Gentoo Foundation |
172 |
+# Distributed under the terms of the GNU General Public License v2 |
173 |
+# $Header: $ |
174 |
+ |
175 |
+extra_commands="format" |
176 |
+ |
177 |
+DAEMON=${SVCNAME/hadoop-} |
178 |
+ |
179 |
+depend() { |
180 |
+ use dns net |
181 |
+} |
182 |
+ |
183 |
+chk_initd() { |
184 |
+ if [ "${DAEMON}" == "${SVCNAME}" ]; then |
185 |
+ eerror "You should not run this init script, use the provided aliases" |
186 |
+ eend 1 |
187 |
+ return 1 |
188 |
+ fi |
189 |
+} |
190 |
+ |
191 |
+init_env_vars() { |
192 |
+ export HADOOP_PREFIX="/usr" |
193 |
+ |
194 |
+ # source hadoop-env.sh |
195 |
+ if test -f /etc/hadoop/hadoop-env.sh; then |
196 |
+ . /etc/hadoop/hadoop-env.sh |
197 |
+ fi |
198 |
+ |
199 |
+ # setup secure dn user for datanodes |
200 |
+ if [ -n "$HADOOP_SECURE_DN_USER" ] && [ "${DAEMON}" == "datanode" ]; then |
201 |
+ DN_USER="root" |
202 |
+ IDENT_USER=${HADOOP_SECURE_DN_USER} |
203 |
+ elif [ "${DAEMON}" == "jobtracker" ] || [ "${DAEMON}" == "tasktracker" ] || [ "${DAEMON}" == "historyserver" ]; then |
204 |
+ DN_USER="mapred" |
205 |
+ IDENT_USER=${DN_USER} |
206 |
+ else |
207 |
+ DN_USER="hdfs" |
208 |
+ IDENT_USER=${DN_USER} |
209 |
+ fi |
210 |
+ |
211 |
+ # check for the required paths |
212 |
+ checkpath -d -m 0770 -o root:hadoop "${HADOOP_PID_DIR}" |
213 |
+} |
214 |
+ |
215 |
+start() { |
216 |
+ chk_initd || exit 1 |
217 |
+ ebegin "Starting Apache Hadoop ${DAEMON}" |
218 |
+ init_env_vars |
219 |
+ start-stop-daemon --start --quiet \ |
220 |
+ --pidfile ${HADOOP_PID_DIR}/hadoop-${IDENT_USER}-${DAEMON}.pid \ |
221 |
+ -u ${DN_USER} -x ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh -- \ |
222 |
+ --config ${HADOOP_CONF_DIR} start ${DAEMON} |
223 |
+ eend $? |
224 |
+} |
225 |
+ |
226 |
+stop() { |
227 |
+ chk_initd || exit 1 |
228 |
+ ebegin "Stopping Apache Hadoop ${DAEMON}" |
229 |
+ init_env_vars |
230 |
+ start-stop-daemon --stop --quiet \ |
231 |
+ --pidfile ${HADOOP_PID_DIR}/hadoop-${IDENT_USER}-${DAEMON}.pid |
232 |
+ eend $? |
233 |
+} |
234 |
+ |
235 |
+format() { |
236 |
+ if [ "${DAEMON}" == "namenode" ]; then |
237 |
+ ebegin "Formatting Apache Hadoop ${DAEMON}" |
238 |
+ init_env_vars |
239 |
+ su ${DN_USER} --preserve-environment -- ${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} namenode -format |
240 |
+ else |
241 |
+ eerror "The format command is only available for the namenode daemon" |
242 |
+ eend 1 |
243 |
+ fi |
244 |
+} |
245 |
\ No newline at end of file |