source: trunk/dports/java/hadoop/Portfile

Last change on this file was 117032, checked in by hum@…, 3 years ago

hadoop: set JAVA_HOME in the build phase; see #41332.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Id
File size: 10.9 KB
Line 
1# -*- coding: utf-8; mode: tcl; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- vim:fenc=utf-8:ft=tcl:et:sw=4:ts=4:sts=4
2# $Id: Portfile 117032 2014-02-13 15:48:18Z ryandesign@macports.org $
3
4PortSystem          1.0
5
6name                hadoop
7version             1.2.1
8categories          java devel science
9maintainers         hum openmaintainer
10
11description         Open-source software for reliable, scalable, distributed computing
12long_description    Hadoop is a distributed computing platform written in Java. \
13                    It incorporates features similar to those of the Google File System \
14                    and of MapReduce.
15
16homepage            http://hadoop.apache.org/
17platforms           darwin
18license             Apache-2
19
20master_sites        apache:hadoop/common/${distname}
21checksums           rmd160  6330ded6043a1c8dd8597910e77f3dedf249a807 \
22                    sha256  94a1181771f173bdb55c8f901722825866396091f0516bdd12b34dc3de1706a1
23
24patchfiles          patch-conf-hadoop-env.sh.diff \
25                    patch-src-native.diff \
26                    patch-src-c++.diff
27
28depends_build       bin:ant:apache-ant \
29                    port:autoconf port:automake port:libtool
30depends_lib         port:zlib \
31                    port:snappy
32
33set java_home       /System/Library/Frameworks/JavaVM.framework/Versions/1.6/Home
34
35pre-fetch {
36    # This port works on Mac OS X 10.6 or later, because 'sudo option -E'
37    # is not available on Mac OS X 10.5 or earlier. See #34665.
38    if {${os.platform} eq "darwin" && ${os.major} <= 9} {
39        ui_error "This port works on Mac OS X 10.6 (Snow Leopard) or later"
40        return -code error "Mac OS X 10.6 (Snow Leopard) or later is required"
41    }
42    if {![file exists ${java_home}]} {
43        ui_error "Java 1.6 is required, but not located at ${java_home}"
44        return -code error "Java 1.6 missing"
45    }
46}
47
48# Fix a file permission to build c++-libhdfs.
49post-extract {
50    file attributes ${worksrcpath}/src/c++/libhdfs/install-sh -permissions 0755
51}
52
53variant universal {}
54
55use_configure       no
56
57set java_include    -I/System/Library/Frameworks/JavaVM.framework/Headers
58set cflags          "${configure.cflags} [get_canonical_archflags]"
59set cxxflags        "${configure.cxxflags} [get_canonical_archflags cxx]"
60set ldflags         "${configure.ldflags} [get_canonical_archflags] -framework JavaVM"
61set cppflags        "${configure.cppflags} ${java_include}"
62
63# Set configure args to build native and c++-libhdfs.
64post-patch {
65    set libs  "-ldl -lz -lsnappy"
66    reinplace "s|@cc@|${configure.cc}|g"      ${worksrcpath}/build.xml
67    reinplace "s|@cflags@|${cflags}|g"        ${worksrcpath}/build.xml
68    reinplace "s|@cxx@|${configure.cxx}|g"    ${worksrcpath}/build.xml
69    reinplace "s|@cxxflags@|${cxxflags}|g"    ${worksrcpath}/build.xml
70    reinplace "s|@ldflags@|${ldflags}|g"      ${worksrcpath}/build.xml
71    reinplace "s|@libs@|${libs}|g"            ${worksrcpath}/build.xml
72    reinplace "s|@cppflags@|${cppflags}|g"    ${worksrcpath}/build.xml
73    reinplace "s|@args@|--prefix=${prefix}|g" ${worksrcpath}/build.xml
74}
75
76# Build native and c++-libhdfs.
77build.cmd           ant
78build.args          -Dcompile.native=true \
79                    -Dsnappy.prefix=${prefix} \
80                    -Dcompile.c++=true \
81                    -Dlibhdfs=true
82build.target        compile-native compile-c++-libhdfs
83build.env           JAVA_HOME=${java_home}
84
85# Fix install_name of dylib.
86pre-destroot {
87    foreach file [glob ${worksrcpath}/build/native/**/lib/*.dylib \
88                       ${worksrcpath}/build/c++/**/lib/*.dylib] {
89        if {[file isfile ${file}]} {
90            set libname [file tail ${file}]
91            system "install_name_tool -id ${prefix}/lib/${libname} ${file}"
92        }
93    }
94}
95
96# Hadoop home and conf directories.
97set hadoop_basedir  ${prefix}/share/java
98set hadoop_home     ${hadoop_basedir}/${distname}
99set hadoop_conf_dir ${hadoop_home}/conf
100
101# Document directory.
102set hadoop_doc_dir  ${prefix}/share/doc/${distname}
103
104# Working directories.
105set hadoop_var_dir  ${prefix}/var/${name}
106set hadoop_log_dir  ${hadoop_var_dir}/log
107set hadoop_pid_dir  ${hadoop_var_dir}/run
108
109set hadoopuser      hadoop
110
111add_users ${hadoopuser} \
112    group=${hadoopuser} \
113    realname=Hadoop\ Server \
114    home=${hadoop_var_dir} \
115    shell=/bin/bash
116
117variant fusedfs description {Add Fuse-DFS} {
118    depends_lib-append port:fuse4x
119
120    patchfiles-append  patch-src-contrib-fusedfs.diff
121
122    # libhdfs.dylib must be built before configuring fuse-dfs.
123    post-build {
124        set libs  "-lfuse -lhdfs"
125        # "$@" is replaced with "${hadoop.root}/build/c++/${build.platform}/lib".
126        # See files/patch-src-contrib-fusedfs.diff.
127        set args  "--prefix=${prefix} \
128                   CC=${configure.cc} \
129                   CFLAGS=\"${cflags}\" \
130                   LDFLAGS=\"${ldflags} -L$@\" \
131                   LIBS=\"${libs}\" \
132                   CPPFLAGS=\"${cppflags}\""
133        set sh ${worksrcpath}/src/contrib/fuse-dfs/bootstrap.sh
134        reinplace "s|\./configure|\./configure ${args}|" ${sh}
135        # Build fusedfs.
136        system -W ${worksrcpath} "ant compile-contrib -Dlibhdfs=1 -Dfusedfs=1"
137    }
138
139    post-destroot {
140        xinstall -m 755 \
141            ${worksrcpath}/build/contrib/fuse-dfs/fuse_dfs \
142            ${destroot}${prefix}/bin
143        # Fix install_name in fuse_dfs.
144        set bin ${destroot}${prefix}/bin/fuse_dfs
145        regexp {(\S+\/libhdfs\S+dylib)} [exec otool -L ${bin}] path
146        system "install_name_tool -change ${path} ${prefix}/lib/libhdfs.dylib ${bin}"
147        # Install fuse_dfs_wrapper.sh.
148        xinstall -m 755 ${filespath}/fuse_dfs_wrapper.sh ${destroot}${hadoop_home}/bin
149        set sh ${destroot}${hadoop_home}/bin/fuse_dfs_wrapper.sh
150        reinplace "s|@hadoop_home@|${hadoop_home}|" ${sh}
151        reinplace "s|@java_home@|${java_home}|"     ${sh}
152        reinplace "s|@prefix@|${prefix}|"           ${sh}
153    }
154}
155
156destroot {
157    # Copy the distribution to Hadoop home directory.
158    xinstall -m 755 -${destroot}${hadoop_home}
159    foreach dir {bin conf contrib lib libexec sbin share webapps} {
160        copy ${worksrcpath}/${dir} ${destroot}${hadoop_home}
161    }
162    foreach file [glob ${worksrcpath}/hadoop-*] {
163        xinstall -m 644 ${file} ${destroot}${hadoop_home}
164    }
165    delete ${destroot}${hadoop_home}/lib/native
166    delete ${destroot}${hadoop_home}/libexec/jsvc.amd64
167
168    # Patch for Mahout 0.4 to suppress warnings.
169    set webinf ${destroot}${hadoop_home}/webapps/secondary/WEB-INF
170    xinstall -m 755 -d ${webinf}
171    destroot.keepdirs-append ${webinf}
172
173    # Install native and c++ libraries.
174    foreach file [glob ${worksrcpath}/build/native/**/lib/* \
175                       ${worksrcpath}/build/c++/**/lib/*] {
176        xinstall -m 644 ${file} ${destroot}${prefix}/lib
177    }
178   
179    # Copy extra documents.
180    xinstall -m 755 -${destroot}${hadoop_doc_dir}
181    copy ${worksrcpath}/docs ${destroot}${hadoop_doc_dir}
182    foreach file [glob ${worksrcpath}/*.txt] {
183        xinstall -m 644 ${file} ${destroot}${hadoop_doc_dir}
184    }
185
186    # Install an extra script for this port.
187    set hadoop_bin ${destroot}${prefix}/bin/hadoop-bin
188    xinstall -m 755 ${filespath}/hadoop-bin      ${hadoop_bin}
189    reinplace "s|@hadoop_home@|${hadoop_home}|g" ${hadoop_bin}
190    reinplace "s|@java_home@|${java_home}|g"     ${hadoop_bin}
191    reinplace "s|@hadoopuser@|${hadoopuser}|g"   ${hadoop_bin}
192
193    # Setup 'hadoop-env.sh' in conf.
194    set env_sh ${destroot}${hadoop_conf_dir}/hadoop-env.sh
195    reinplace "s|@java_home@|${java_home}|g"           ${env_sh}
196    reinplace "s|@hadoop_log_dir@|${hadoop_log_dir}|g" ${env_sh}
197    reinplace "s|@hadoop_pid_dir@|${hadoop_pid_dir}|g" ${env_sh}
198
199    # Create working directories.
200    xinstall -m 755 -o ${hadoopuser} -g ${hadoopuser} -d \
201        ${destroot}${hadoop_var_dir} \
202        ${destroot}${hadoop_log_dir} \
203        ${destroot}${hadoop_pid_dir}
204    destroot.keepdirs-append \
205        ${destroot}${hadoop_var_dir} \
206        ${destroot}${hadoop_log_dir} \
207        ${destroot}${hadoop_pid_dir}
208}
209
210post-deactivate {
211    ui_msg "********************************************************"
212    ui_msg "* To revert the system after uninstalling the port:"
213    ui_msg "* 1) Delete Hadoop working directory:"
214    ui_msg "*  $ sudo rm -rf ${hadoop_var_dir}"
215    ui_msg "* 2) Delete Hadoop user and group:"
216    ui_msg "*  $ sudo dscl . -delete /Users/${hadoopuser}"
217    ui_msg "*  $ sudo dscl . -delete /Groups/${hadoopuser}"
218    ui_msg "********************************************************"
219}
220
221default_variants    +pseudo
222
223set hadoop_tmp_dir  ${hadoop_var_dir}/cache
224
225variant pseudo description {Run on a single-node in a pseudo-distributed mode} {
226    patchfiles-append  patch-pseudo.diff
227
228    post-destroot {
229        # Set conf directory for a pseudo-distributed mode.
230        copy ${destroot}${hadoop_conf_dir} ${destroot}${hadoop_conf_dir}.pseudo
231        move ${destroot}${hadoop_conf_dir} ${destroot}${hadoop_conf_dir}.local
232        ln -s conf.pseudo ${destroot}${hadoop_conf_dir}
233
234        # Set the maximum number of tasks based on the number of the CPUs (cores).
235        regexp {\d+} [exec sysctl hw.ncpu] ncpu
236        set tasks_max [expr $ncpu + 2]
237
238        # Setup configuration files.
239        reinplace "s|@tasks_max@|${tasks_max}|g" \
240            ${destroot}${hadoop_conf_dir}.pseudo/mapred-site.xml
241        reinplace "s|@hadoop_tmp_dir@|${hadoop_tmp_dir}|" \
242            ${destroot}${hadoop_conf_dir}.pseudo/core-site.xml
243
244        # Create a temporary directory.
245        xinstall -m 755 -o ${hadoopuser} -g ${hadoopuser} -d \
246            ${destroot}${hadoop_tmp_dir}
247        destroot.keepdirs-append \
248            ${destroot}${hadoop_tmp_dir}
249    }
250
251    post-activate {
252        # Setup passphraseless ssh.
253        set ssh_dir ${hadoop_var_dir}/.ssh
254        if {![file exists ${ssh_dir}]} {
255            xinstall -m 700 -o ${hadoopuser} -g ${hadoopuser} -d ${ssh_dir}
256            system "sudo -u ${hadoopuser} ssh-keygen -t rsa -P '' -f ${ssh_dir}/id_rsa"
257            xinstall -m 644 -o ${hadoopuser} -g ${hadoopuser} \
258                ${ssh_dir}/id_rsa.pub \
259                ${ssh_dir}/authorized_keys
260        }
261
262        ui_msg "********************************************************"
263        ui_msg "* To run on a single-node in a pseudo-distributed mode:"
264        ui_msg "* 1) Turn on Remote Login (sshd):"
265        ui_msg "*  check 'System Preferences > Sharing > Remote Login'"
266        ui_msg "* 2) Format a new distributed-filesystem:"
267        ui_msg "*  $ hadoop-bin hadoop namenode -format"
268        ui_msg "* 3) Start the hadoop daemons:"
269        ui_msg "*  $ hadoop-bin start-all.sh"
270        ui_msg "* 4) Perform operations you like. To see examples:"
271        ui_msg "*  $ open file://${hadoop_home}/docs/single_node_setup.html"
272        ui_msg "* 5) When you're done, stop the daemons with:"
273        ui_msg "*  $ hadoop-bin stop-all.sh"
274        ui_msg "********************************************************"
275    }
276}
277
278livecheck.type      regex
279livecheck.url       http://www.apache.org/dist/hadoop/common/stable1/
280livecheck.regex     ${name}-(\[0-9.\]+).tar
Note: See TracBrowser for help on using the repository browser.