source: trunk/dports/java/hadoop/Portfile @ 94070

Last change on this file since 94070 was 94067, checked in by hum@…, 7 years ago

hadoop: prohibit installation on Mac OS X 10.5 or earlier; see #34665.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Id
File size: 6.7 KB
Line 
1# -*- coding: utf-8; mode: tcl; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- vim:fenc=utf-8:ft=tcl:et:sw=4:ts=4:sts=4
2# $Id: Portfile 94067 2012-06-08 13:36:10Z hum@macports.org $
3
4PortSystem          1.0
5
6name                hadoop
7version             1.0.3
8revision            1
9categories          java devel science
10maintainers         hum openmaintainer
11
12description         Open-source software for reliable, scalable, distributed computing
13long_description    Hadoop is a distributed computing platform written in Java. \
14                    It incorporates features similar to those of the Google File System \
15                    and of MapReduce.
16
17homepage            http://hadoop.apache.org/
18platforms           darwin
19supported_archs     noarch
20license             Apache-2.0
21
22master_sites        apache:hadoop/common/${distname}
23distfiles           ${distname}-bin${extract.suffix}
24checksums           rmd160  a98ca552a8a6dfcc6455b53cfd7988d562c265d1 \
25                    sha256  a33d07ece0e9b7e1ecf32670cac28444ba8f56b3360548d96b56964facc2d1ef
26
27patchfiles          patch-hadoop-env.sh.diff
28
29use_configure       no
30
31build {}
32
33set hadoopuser      hadoop
34
35set java_home       /System/Library/Frameworks/JavaVM.framework/Versions/1.6/Home
36
37pre-configure {
38    if {![file exists ${java_home}]} {
39        ui_error "Java 1.6 is required, but not located at ${java_home}"
40        return -code error "Java 1.6 missing"
41    }
42
43    # This port works on Mac OS X 10.6 or later, because 'sudo option -E'
44    # is not available on Mac OS X 10.5 or earlier. See #34665.
45    set os_major_version  [lindex [split ${os.version} .] 0]
46    if {${os_major_version} <= 9} {
47        ui_error "This port works on Mac OS X 10.6 (Snow Leopard) or later"
48        return -code error "Mac OS X 10.6 (Snow Leopard) or later is required"
49    }
50}
51
52# Hadoop home and conf directories.
53set hadoop_basedir  ${prefix}/share/java
54set hadoop_home     ${hadoop_basedir}/${distname}
55set hadoop_conf_dir ${hadoop_home}/conf
56
57# Working directories.
58set hadoop_var_dir  ${prefix}/var/${name}
59set hadoop_log_dir  ${hadoop_var_dir}/log
60set hadoop_pid_dir  ${hadoop_var_dir}/run
61
62destroot {
63    # Copy the distribution to Hadoop home directory.
64    xinstall -m 755 -${destroot}${hadoop_basedir}
65    copy ${worksrcpath} ${destroot}${hadoop_basedir}
66
67    # Patch for Mahout 0.4 to suppress warnings.
68    xinstall -m 755 -d \
69        ${destroot}${hadoop_home}/webapps/secondary/WEB-INF
70
71    # Install an extra script for this port.
72    set hadoop_bin ${destroot}${prefix}/bin/hadoop-bin
73    xinstall -m 755 ${filespath}/hadoop-bin      ${hadoop_bin}
74    reinplace "s|@hadoop_home@|${hadoop_home}|g" ${hadoop_bin}
75    reinplace "s|@java_home@|${java_home}|g"     ${hadoop_bin}
76    reinplace "s|@hadoopuser@|${hadoopuser}|g"   ${hadoop_bin}
77
78    # Setup 'hadoop-env.sh' in conf.
79    set env_sh ${destroot}${hadoop_conf_dir}/hadoop-env.sh
80    reinplace "s|@java_home@|${java_home}|g"           ${env_sh}
81    reinplace "s|@hadoop_log_dir@|${hadoop_log_dir}|g" ${env_sh}
82    reinplace "s|@hadoop_pid_dir@|${hadoop_pid_dir}|g" ${env_sh}
83
84    add_hadoop_user_and_group
85
86    # Create working directories.
87    xinstall -m 755 -o ${hadoopuser} -g ${hadoopuser} -d \
88        ${destroot}${hadoop_var_dir} \
89        ${destroot}${hadoop_log_dir} \
90        ${destroot}${hadoop_pid_dir}
91    destroot.keepdirs-append \
92        ${destroot}${hadoop_var_dir} \
93        ${destroot}${hadoop_log_dir} \
94        ${destroot}${hadoop_pid_dir}
95}
96
97pre-activate {
98    add_hadoop_user_and_group
99}
100
101proc add_hadoop_user_and_group {} {
102    global hadoopuser hadoop_var_dir
103    if {![existsgroup ${hadoopuser}]} {
104        addgroup ${hadoopuser}
105        adduser ${hadoopuser} \
106            gid=[existsgroup ${hadoopuser}] \
107            realname=Hadoop\ Server \
108            home=${hadoop_var_dir} \
109            shell=/bin/bash
110    }
111}
112
113post-deactivate {
114    ui_msg "********************************************************"
115    ui_msg "* To revert the system after uninstalling the port:"
116    ui_msg "* 1) Delete Hadoop working directory:"
117    ui_msg "*  $ sudo rm -rf ${hadoop_var_dir}"
118    ui_msg "* 2) Delete Hadoop user and group:"
119    ui_msg "*  $ sudo dscl . -delete /Users/${hadoopuser}"
120    ui_msg "*  $ sudo dscl . -delete /Groups/${hadoopuser}"
121    ui_msg "********************************************************"
122}
123
124default_variants    +pseudo
125
126set hadoop_tmp_dir  ${hadoop_var_dir}/cache
127
128variant pseudo description {Run on a single-node in a pseudo-distributed mode} {
129    patchfiles-append  patch-conf.diff
130
131    post-destroot {
132        # Set conf directory for a pseudo-distributed mode.
133        copy ${destroot}${hadoop_conf_dir} ${destroot}${hadoop_conf_dir}.pseudo
134        move ${destroot}${hadoop_conf_dir} ${destroot}${hadoop_conf_dir}.local
135        ln -s conf.pseudo ${destroot}${hadoop_conf_dir}
136
137        # Set the maximum number of tasks based on the number of the CPUs (cores).
138        regexp {\d+} [exec sysctl hw.ncpu] ncpu
139        set tasks_max [expr $ncpu + 2]
140
141        # Setup configuration files.
142        reinplace "s|@tasks_max@|${tasks_max}|g" \
143            ${destroot}${hadoop_conf_dir}.pseudo/mapred-site.xml
144        reinplace "s|@hadoop_tmp_dir@|${hadoop_tmp_dir}|" \
145            ${destroot}${hadoop_conf_dir}.pseudo/core-site.xml
146
147        # Create a temporary directory.
148        xinstall -m 755 -o ${hadoopuser} -g ${hadoopuser} -d \
149            ${destroot}${hadoop_tmp_dir}
150        destroot.keepdirs-append \
151            ${destroot}${hadoop_tmp_dir}
152    }
153
154    post-activate {
155        # Setup passphraseless ssh.
156        set ssh_dir ${hadoop_var_dir}/.ssh
157        if {![file exists ${ssh_dir}]} {
158            xinstall -m 700 -o ${hadoopuser} -g ${hadoopuser} -d ${ssh_dir}
159            system "sudo -u ${hadoopuser} ssh-keygen -t rsa -P '' -f ${ssh_dir}/id_rsa"
160            xinstall -m 644 -o ${hadoopuser} -g ${hadoopuser} \
161                ${ssh_dir}/id_rsa.pub \
162                ${ssh_dir}/authorized_keys
163        }
164
165        ui_msg "********************************************************"
166        ui_msg "* To run on a single-node in a pseudo-distributed mode:"
167        ui_msg "* 1) Turn on Remote Login (sshd):"
168        ui_msg "*  check 'System Preferences > Sharing > Remote Login'"
169        ui_msg "* 2) Format a new distributed-filesystem:"
170        ui_msg "*  $ hadoop-bin hadoop namenode -format"
171        ui_msg "* 3) Start the hadoop daemons:"
172        ui_msg "*  $ hadoop-bin start-all.sh"
173        ui_msg "* 4) Perform operations you like. To see examples:"
174        ui_msg "*  $ open file://${hadoop_home}/docs/single_node_setup.html"
175        ui_msg "* 5) When you're done, stop the daemons with:"
176        ui_msg "*  $ hadoop-bin stop-all.sh"
177        ui_msg "********************************************************"
178    }
179}
180
181livecheck.type      regex
182livecheck.url       http://www.apache.org/dist/hadoop/common/
183livecheck.regex     ${name}-(1.\[0-9.\]+)
Note: See TracBrowser for help on using the repository browser.