source: trunk/dports/java/hadoop/Portfile @ 100232

Last change on this file since 100232 was 100232, checked in by hum@…, 6 years ago

hadoop: update to 1.1.1.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Id
File size: 10.8 KB
Line 
1# -*- coding: utf-8; mode: tcl; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- vim:fenc=utf-8:ft=tcl:et:sw=4:ts=4:sts=4
2# $Id: Portfile 100232 2012-12-04 15:12:45Z hum@macports.org $
3
4PortSystem          1.0
5
6name                hadoop
7version             1.1.1
8categories          java devel science
9maintainers         hum openmaintainer
10
11description         Open-source software for reliable, scalable, distributed computing
12long_description    Hadoop is a distributed computing platform written in Java. \
13                    It incorporates features similar to those of the Google File System \
14                    and of MapReduce.
15
16homepage            http://hadoop.apache.org/
17platforms           darwin
18license             Apache-2
19
20master_sites        apache:hadoop/common/${distname}
21checksums           rmd160  7eebc962eeb014c1b7ccf3242c5439ba8e1f3a2e \
22                    sha256  52d650a84a2ff483ef9f8e2fac6d1dc79955646bc2aff720c3db717937aa4d84
23
24patchfiles          patch-conf-hadoop-env.sh.diff \
25                    patch-src-native.diff \
26                    patch-src-c++.diff
27
28depends_build       bin:ant:apache-ant \
29                    port:autoconf port:automake port:libtool
30depends_lib         port:zlib \
31                    port:snappy
32
33set java_home       /System/Library/Frameworks/JavaVM.framework/Versions/1.6/Home
34
35pre-fetch {
36    # This port works on Mac OS X 10.6 or later, because 'sudo option -E'
37    # is not available on Mac OS X 10.5 or earlier. See #34665.
38    if {${os.platform} == "darwin" && ${os.major} <= 9} {
39        ui_error "This port works on Mac OS X 10.6 (Snow Leopard) or later"
40        return -code error "Mac OS X 10.6 (Snow Leopard) or later is required"
41    }
42    if {![file exists ${java_home}]} {
43        ui_error "Java 1.6 is required, but not located at ${java_home}"
44        return -code error "Java 1.6 missing"
45    }
46}
47
48# Fix a file permission to build c++-libhdfs.
49post-extract {
50    file attributes ${worksrcpath}/src/c++/libhdfs/install-sh -permissions 0755
51}
52
53variant universal {}
54
55use_configure       no
56
57set java_include    -I/System/Library/Frameworks/JavaVM.framework/Headers
58set cflags          "${configure.cflags} [get_canonical_archflags]"
59set cxxflags        "${configure.cxxflags} [get_canonical_archflags cxx]"
60set ldflags         "${configure.ldflags} [get_canonical_archflags] -framework JavaVM"
61set cppflags        "${configure.cppflags} ${java_include}"
62
63# Set configure args to build native and c++-libhdfs.
64post-patch {
65    set libs  "-ldl -lz -lsnappy"
66    reinplace "s|@cc@|${configure.cc}|g"      ${worksrcpath}/build.xml
67    reinplace "s|@cflags@|${cflags}|g"        ${worksrcpath}/build.xml
68    reinplace "s|@cxx@|${configure.cxx}|g"    ${worksrcpath}/build.xml
69    reinplace "s|@cxxflags@|${cxxflags}|g"    ${worksrcpath}/build.xml
70    reinplace "s|@ldflags@|${ldflags}|g"      ${worksrcpath}/build.xml
71    reinplace "s|@libs@|${libs}|g"            ${worksrcpath}/build.xml
72    reinplace "s|@cppflags@|${cppflags}|g"    ${worksrcpath}/build.xml
73    reinplace "s|@args@|--prefix=${prefix}|g" ${worksrcpath}/build.xml
74}
75
76# Build native and c++-libhdfs.
77build.cmd           ant
78build.args          -Dcompile.native=true \
79                    -Dsnappy.prefix=${prefix} \
80                    -Dcompile.c++=true \
81                    -Dlibhdfs=true
82build.target        compile-native compile-c++-libhdfs
83
84# Fix install_name of dylib.
85pre-destroot {
86    foreach file [glob ${worksrcpath}/build/native/**/lib/*.dylib \
87                       ${worksrcpath}/build/c++/**/lib/*.dylib] {
88        if {[file isfile ${file}]} {
89            set libname [file tail ${file}]
90            system "install_name_tool -id ${prefix}/lib/${libname} ${file}"
91        }
92    }
93}
94
95# Hadoop home and conf directories.
96set hadoop_basedir  ${prefix}/share/java
97set hadoop_home     ${hadoop_basedir}/${distname}
98set hadoop_conf_dir ${hadoop_home}/conf
99
100# Document directory.
101set hadoop_doc_dir  ${prefix}/share/doc/${distname}
102
103# Working directories.
104set hadoop_var_dir  ${prefix}/var/${name}
105set hadoop_log_dir  ${hadoop_var_dir}/log
106set hadoop_pid_dir  ${hadoop_var_dir}/run
107
108set hadoopuser      hadoop
109
110add_users ${hadoopuser} \
111    group=${hadoopuser} \
112    realname=Hadoop\ Server \
113    home=${hadoop_var_dir} \
114    shell=/bin/bash
115
116variant fusedfs description {Add Fuse-DFS} {
117    depends_lib-append port:fuse4x
118
119    patchfiles-append  patch-src-contrib-fusedfs.diff
120
121    # libhdfs.dylib must be built before configuring fuse-dfs.
122    post-build {
123        set libs  "-lfuse -lhdfs"
124        # "$@" is replaced with "${hadoop.root}/build/c++/${build.platform}/lib".
125        # See files/patch-src-contrib-fusedfs.diff.
126        set args  "--prefix=${prefix} \
127                   CC=${configure.cc} \
128                   CFLAGS=\"${cflags}\" \
129                   LDFLAGS=\"${ldflags} -L$@\" \
130                   LIBS=\"${libs}\" \
131                   CPPFLAGS=\"${cppflags}\""
132        set sh ${worksrcpath}/src/contrib/fuse-dfs/bootstrap.sh
133        reinplace "s|\./configure|\./configure ${args}|" ${sh}
134        # Build fusedfs.
135        system -W ${worksrcpath} "ant compile-contrib -Dlibhdfs=1 -Dfusedfs=1"
136    }
137
138    post-destroot {
139        xinstall -m 755 \
140            ${worksrcpath}/build/contrib/fuse-dfs/fuse_dfs \
141            ${destroot}${prefix}/bin
142        # Fix install_name in fuse_dfs.
143        set bin ${destroot}${prefix}/bin/fuse_dfs
144        regexp {(\S+\/libhdfs\S+dylib)} [exec otool -L ${bin}] path
145        system "install_name_tool -change ${path} ${prefix}/lib/libhdfs.dylib ${bin}"
146        # Install fuse_dfs_wrapper.sh.
147        xinstall -m 755 ${filespath}/fuse_dfs_wrapper.sh ${destroot}${hadoop_home}/bin
148        set sh ${destroot}${hadoop_home}/bin/fuse_dfs_wrapper.sh
149        reinplace "s|@hadoop_home@|${hadoop_home}|" ${sh}
150        reinplace "s|@java_home@|${java_home}|"     ${sh}
151        reinplace "s|@prefix@|${prefix}|"           ${sh}
152    }
153}
154
155destroot {
156    # Copy the distribution to Hadoop home directory.
157    xinstall -m 755 -${destroot}${hadoop_home}
158    foreach dir {bin conf contrib lib libexec sbin share webapps} {
159        copy ${worksrcpath}/${dir} ${destroot}${hadoop_home}
160    }
161    foreach file [glob ${worksrcpath}/hadoop-*] {
162        xinstall -m 644 ${file} ${destroot}${hadoop_home}
163    }
164    delete ${destroot}${hadoop_home}/lib/native
165    delete ${destroot}${hadoop_home}/libexec/jsvc.amd64
166
167    # Patch for Mahout 0.4 to suppress warnings.
168    set webinf ${destroot}${hadoop_home}/webapps/secondary/WEB-INF
169    xinstall -m 755 -d ${webinf}
170    destroot.keepdirs-append ${webinf}
171
172    # Install native and c++ libraries.
173    foreach file [glob ${worksrcpath}/build/native/**/lib/* \
174                       ${worksrcpath}/build/c++/**/lib/*] {
175        xinstall -m 644 ${file} ${destroot}${prefix}/lib
176    }
177   
178    # Copy extra documents.
179    xinstall -m 755 -${destroot}${hadoop_doc_dir}
180    copy ${worksrcpath}/docs ${destroot}${hadoop_doc_dir}
181    foreach file [glob ${worksrcpath}/*.txt] {
182        xinstall -m 644 ${file} ${destroot}${hadoop_doc_dir}
183    }
184
185    # Install an extra script for this port.
186    set hadoop_bin ${destroot}${prefix}/bin/hadoop-bin
187    xinstall -m 755 ${filespath}/hadoop-bin      ${hadoop_bin}
188    reinplace "s|@hadoop_home@|${hadoop_home}|g" ${hadoop_bin}
189    reinplace "s|@java_home@|${java_home}|g"     ${hadoop_bin}
190    reinplace "s|@hadoopuser@|${hadoopuser}|g"   ${hadoop_bin}
191
192    # Setup 'hadoop-env.sh' in conf.
193    set env_sh ${destroot}${hadoop_conf_dir}/hadoop-env.sh
194    reinplace "s|@java_home@|${java_home}|g"           ${env_sh}
195    reinplace "s|@hadoop_log_dir@|${hadoop_log_dir}|g" ${env_sh}
196    reinplace "s|@hadoop_pid_dir@|${hadoop_pid_dir}|g" ${env_sh}
197
198    # Create working directories.
199    xinstall -m 755 -o ${hadoopuser} -g ${hadoopuser} -d \
200        ${destroot}${hadoop_var_dir} \
201        ${destroot}${hadoop_log_dir} \
202        ${destroot}${hadoop_pid_dir}
203    destroot.keepdirs-append \
204        ${destroot}${hadoop_var_dir} \
205        ${destroot}${hadoop_log_dir} \
206        ${destroot}${hadoop_pid_dir}
207}
208
209post-deactivate {
210    ui_msg "********************************************************"
211    ui_msg "* To revert the system after uninstalling the port:"
212    ui_msg "* 1) Delete Hadoop working directory:"
213    ui_msg "*  $ sudo rm -rf ${hadoop_var_dir}"
214    ui_msg "* 2) Delete Hadoop user and group:"
215    ui_msg "*  $ sudo dscl . -delete /Users/${hadoopuser}"
216    ui_msg "*  $ sudo dscl . -delete /Groups/${hadoopuser}"
217    ui_msg "********************************************************"
218}
219
220default_variants    +pseudo
221
222set hadoop_tmp_dir  ${hadoop_var_dir}/cache
223
224variant pseudo description {Run on a single-node in a pseudo-distributed mode} {
225    patchfiles-append  patch-pseudo.diff
226
227    post-destroot {
228        # Set conf directory for a pseudo-distributed mode.
229        copy ${destroot}${hadoop_conf_dir} ${destroot}${hadoop_conf_dir}.pseudo
230        move ${destroot}${hadoop_conf_dir} ${destroot}${hadoop_conf_dir}.local
231        ln -s conf.pseudo ${destroot}${hadoop_conf_dir}
232
233        # Set the maximum number of tasks based on the number of the CPUs (cores).
234        regexp {\d+} [exec sysctl hw.ncpu] ncpu
235        set tasks_max [expr $ncpu + 2]
236
237        # Setup configuration files.
238        reinplace "s|@tasks_max@|${tasks_max}|g" \
239            ${destroot}${hadoop_conf_dir}.pseudo/mapred-site.xml
240        reinplace "s|@hadoop_tmp_dir@|${hadoop_tmp_dir}|" \
241            ${destroot}${hadoop_conf_dir}.pseudo/core-site.xml
242
243        # Create a temporary directory.
244        xinstall -m 755 -o ${hadoopuser} -g ${hadoopuser} -d \
245            ${destroot}${hadoop_tmp_dir}
246        destroot.keepdirs-append \
247            ${destroot}${hadoop_tmp_dir}
248    }
249
250    post-activate {
251        # Setup passphraseless ssh.
252        set ssh_dir ${hadoop_var_dir}/.ssh
253        if {![file exists ${ssh_dir}]} {
254            xinstall -m 700 -o ${hadoopuser} -g ${hadoopuser} -d ${ssh_dir}
255            system "sudo -u ${hadoopuser} ssh-keygen -t rsa -P '' -f ${ssh_dir}/id_rsa"
256            xinstall -m 644 -o ${hadoopuser} -g ${hadoopuser} \
257                ${ssh_dir}/id_rsa.pub \
258                ${ssh_dir}/authorized_keys
259        }
260
261        ui_msg "********************************************************"
262        ui_msg "* To run on a single-node in a pseudo-distributed mode:"
263        ui_msg "* 1) Turn on Remote Login (sshd):"
264        ui_msg "*  check 'System Preferences > Sharing > Remote Login'"
265        ui_msg "* 2) Format a new distributed-filesystem:"
266        ui_msg "*  $ hadoop-bin hadoop namenode -format"
267        ui_msg "* 3) Start the hadoop daemons:"
268        ui_msg "*  $ hadoop-bin start-all.sh"
269        ui_msg "* 4) Perform operations you like. To see examples:"
270        ui_msg "*  $ open file://${hadoop_home}/docs/single_node_setup.html"
271        ui_msg "* 5) When you're done, stop the daemons with:"
272        ui_msg "*  $ hadoop-bin stop-all.sh"
273        ui_msg "********************************************************"
274    }
275}
276
277livecheck.type      regex
278livecheck.url       http://www.apache.org/dist/hadoop/common/stable/
279livecheck.regex     ${name}-(\[0-9.\]+).tar
Note: See TracBrowser for help on using the repository browser.