Ticket #29542: Portfile

File Portfile, 6.1 KB (added by humem (humem), 13 years ago)
Line 
1# -*- coding: utf-8; mode: tcl; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- vim:fenc=utf-8:ft=tcl:et:sw=4:ts=4:sts=4
2# $Id$
3
4PortSystem          1.0
5
6name                hadoop
7version             0.20.203.0
8
9categories          java science
10platforms           darwin
11maintainers         gmail.com:hiroshi.umemoto openmaintainer
12license             Apache-2.0
13
14description \
15    Open-source software for reliable, scalable, distributed computing
16
17long_description \
18    Hadoop is a distributed computing platform written in Java. \
19    It incorporates features similar to those of the Google File System \
20    and of MapReduce.
21
22homepage            http://hadoop.apache.org/
23master_sites        apache:hadoop/common/stable
24
25distfiles           ${name}-${version}rc1${extract.suffix}
26
27checksums           sha1    18684bf8b8f72ef9a00a0034ad7167d6b16ce7c8 \
28                    rmd160  79bbcc8ad71f01d8179c2e6ce2ae60b371c4ed5d
29
30patchfiles          patch-hadoop-env.sh.diff
31
32use_configure       no
33supported_archs     noarch
34
35build {}
36
37set hadoopuser      hadoop
38
39set java_home \
40    /System/Library/Frameworks/JavaVM.framework/Versions/1.6/Home
41
42if {![file exists ${java_home}]} {
43    ui_error "Java 1.6 is required, but not located at ${java_home}"
44    return -code error
45}
46
47# Hadoop home and conf directories.
48set hadoop_basedir  ${prefix}/share/java
49set hadoop_home     ${hadoop_basedir}/${distname}
50set hadoop_conf_dir ${hadoop_home}/conf
51
52# Working directories.
53set hadoop_var_dir  ${prefix}/var/${name}
54set hadoop_log_dir  ${hadoop_var_dir}/log
55set hadoop_pid_dir  ${hadoop_var_dir}/run
56
57destroot {
58    # Copy the distribution to Hadoop home directory.
59    xinstall -m 755 -${destroot}${hadoop_basedir}
60    copy ${worksrcpath} ${destroot}${hadoop_basedir}
61
62    # Patch for Mahout 0.4 to suppress warnings.
63    xinstall -m 755 -d \
64        ${destroot}${hadoop_home}/webapps/secondary/WEB-INF
65
66    # Install an extra script for this port.
67    set hadoop_bin ${destroot}${prefix}/bin/hadoop-bin
68    xinstall -m 755 ${filespath}/hadoop-bin      ${hadoop_bin}
69    reinplace "s|@hadoop_home@|${hadoop_home}|g" ${hadoop_bin}
70    reinplace "s|@java_home@|${java_home}|g"     ${hadoop_bin}
71    reinplace "s|@hadoopuser@|${hadoopuser}|g"   ${hadoop_bin}
72
73    # Setup 'hadoop-env.sh' in conf.
74    set env_sh ${destroot}${hadoop_conf_dir}/hadoop-env.sh
75    reinplace "s|@java_home@|${java_home}|g"           ${env_sh}
76    reinplace "s|@hadoop_log_dir@|${hadoop_log_dir}|g" ${env_sh}
77    reinplace "s|@hadoop_pid_dir@|${hadoop_pid_dir}|g" ${env_sh}
78
79    # Add Hadoop user and group.
80    addgroup ${hadoopuser}
81    set gid [existsgroup ${hadoopuser}]
82    adduser ${hadoopuser} \
83        gid=${gid} \
84        realname=Hadoop\ Server \
85        home=${hadoop_var_dir} \
86        shell=/bin/bash
87
88    # Create working directories.
89    xinstall -m 755 -o ${hadoopuser} -g ${hadoopuser} -d \
90        ${destroot}${hadoop_var_dir} \
91        ${destroot}${hadoop_log_dir} \
92        ${destroot}${hadoop_pid_dir}
93    destroot.keepdirs-append \
94        ${destroot}${hadoop_var_dir} \
95        ${destroot}${hadoop_log_dir} \
96        ${destroot}${hadoop_pid_dir}
97
98    ui_msg "********************************************************"
99    ui_msg "* To revert the system after uninstalling the port:"
100    ui_msg "* 1) Delete Hadoop working directory:"
101    ui_msg "*  $ sudo rm -rf ${hadoop_var_dir}"
102    ui_msg "* 2) Delete Hadoop user and group:"
103    ui_msg "*  $ sudo dscl . -delete /Users/${hadoopuser}"
104    ui_msg "*  $ sudo dscl . -delete /Groups/${hadoopuser}"
105    ui_msg "********************************************************"
106}
107
108default_variants    +pseudo
109
110set hadoop_tmp_dir  ${hadoop_var_dir}/cache
111
112variant pseudo description {Run on a single-node in a pseudo-distributed mode} {
113    patchfiles-append  patch-conf.diff
114
115    post-destroot {
116        # Set conf directory for a pseudo-distributed mode.
117        copy ${destroot}${hadoop_conf_dir} ${destroot}${hadoop_conf_dir}.pseudo
118        move ${destroot}${hadoop_conf_dir} ${destroot}${hadoop_conf_dir}.local
119        system "cd ${destroot}${hadoop_home} && ln -s conf.pseudo conf"
120
121        # Set the maximum number of tasks based on the number of the CPUs (cores).
122        regexp {\d+} [exec sysctl hw.ncpu] ncpu
123        set tasks_max [expr $ncpu + 2]
124
125        # Setup configuration files.
126        reinplace "s|@tasks_max@|${tasks_max}|g" \
127            ${destroot}${hadoop_conf_dir}.pseudo/mapred-site.xml
128        reinplace "s|@hadoop_tmp_dir@|${hadoop_tmp_dir}|" \
129            ${destroot}${hadoop_conf_dir}.pseudo/core-site.xml
130
131        # Create a temporary directory.
132        xinstall -m 755 -o ${hadoopuser} -g ${hadoopuser} -d \
133            ${destroot}${hadoop_tmp_dir}
134        destroot.keepdirs-append \
135            ${destroot}${hadoop_tmp_dir}
136    }
137
138    post-activate {
139        # Setup passphraseless ssh.
140        set ssh_dir ${hadoop_var_dir}/.ssh
141        if {![file exists ${ssh_dir}]} {
142            xinstall -m 700 -o ${hadoopuser} -g ${hadoopuser} -d ${ssh_dir}
143            system "sudo -u ${hadoopuser} ssh-keygen -t rsa -P '' -f ${ssh_dir}/id_rsa"
144            xinstall -m 644 -o ${hadoopuser} -g ${hadoopuser} \
145                ${ssh_dir}/id_rsa.pub \
146                ${ssh_dir}/authorized_keys
147        }
148
149        ui_msg "********************************************************"
150        ui_msg "* To run on a single-node in a pseudo-distributed mode:"
151        ui_msg "* 1) Turn on Remote Login (sshd):"
152        ui_msg "*  check 'System Preferences > Sharing > Remote Login'"
153        ui_msg "* 2) Format a new distributed-filesystem:"
154        ui_msg "*  $ hadoop-bin hadoop namenode -format"
155        ui_msg "* 3) Start the hadoop daemons:"
156        ui_msg "*  $ hadoop-bin start-all.sh"
157        ui_msg "* 4) Perform operations you like. To see examples:"
158        ui_msg "*  $ open file://${hadoop_home}/docs/single_node_setup.html"
159        ui_msg "* 5) When you're done, stop the daemons with:"
160        ui_msg "*  $ hadoop-bin stop-all.sh"
161        ui_msg "********************************************************"
162    }
163}
164
165livecheck.type      regex
166livecheck.url       http://www.apache.org/dist/hadoop/common/stable/
167livecheck.regex     ${name}-(\[0-9.\]+)