source: trunk/dports/java/hadoop/Portfile @ 87606

Last change on this file since 87606 was 87606, checked in by hum@…, 8 years ago

hadoop: add_hadoop_user_and_group in pre-active phase to fix binary installation.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Id
File size: 6.3 KB
Line 
1# -*- coding: utf-8; mode: tcl; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- vim:fenc=utf-8:ft=tcl:et:sw=4:ts=4:sts=4
2# $Id: Portfile 87606 2011-11-28 14:42:42Z hum@macports.org $
3
4PortSystem          1.0
5
6name                hadoop
7version             0.20.203.0
8categories          java science
9platforms           darwin
10maintainers         hum openmaintainer
11license             Apache-2.0
12
13description         Open-source software for reliable, scalable, distributed computing
14
15long_description    Hadoop is a distributed computing platform written in Java. \
16                    It incorporates features similar to those of the Google File System \
17                    and of MapReduce.
18
19homepage            http://hadoop.apache.org/
20master_sites        apache:hadoop/common/stable
21
22distfiles           ${name}-${version}rc1${extract.suffix}
23
24checksums           sha1    18684bf8b8f72ef9a00a0034ad7167d6b16ce7c8 \
25                    rmd160  79bbcc8ad71f01d8179c2e6ce2ae60b371c4ed5d
26
27patchfiles          patch-hadoop-env.sh.diff
28
29use_configure       no
30supported_archs     noarch
31
32build {}
33
34set hadoopuser      hadoop
35
36set java_home       /System/Library/Frameworks/JavaVM.framework/Versions/1.6/Home
37
38pre-configure {
39    if {![file exists ${java_home}]} {
40        ui_error "Java 1.6 is required, but not located at ${java_home}"
41        return -code error "Java 1.6 missing"
42    }
43}
44
45# Hadoop home and conf directories.
46set hadoop_basedir  ${prefix}/share/java
47set hadoop_home     ${hadoop_basedir}/${distname}
48set hadoop_conf_dir ${hadoop_home}/conf
49
50# Working directories.
51set hadoop_var_dir  ${prefix}/var/${name}
52set hadoop_log_dir  ${hadoop_var_dir}/log
53set hadoop_pid_dir  ${hadoop_var_dir}/run
54
55destroot {
56    # Copy the distribution to Hadoop home directory.
57    xinstall -m 755 -${destroot}${hadoop_basedir}
58    copy ${worksrcpath} ${destroot}${hadoop_basedir}
59
60    # Patch for Mahout 0.4 to suppress warnings.
61    xinstall -m 755 -d \
62        ${destroot}${hadoop_home}/webapps/secondary/WEB-INF
63
64    # Install an extra script for this port.
65    set hadoop_bin ${destroot}${prefix}/bin/hadoop-bin
66    xinstall -m 755 ${filespath}/hadoop-bin      ${hadoop_bin}
67    reinplace "s|@hadoop_home@|${hadoop_home}|g" ${hadoop_bin}
68    reinplace "s|@java_home@|${java_home}|g"     ${hadoop_bin}
69    reinplace "s|@hadoopuser@|${hadoopuser}|g"   ${hadoop_bin}
70
71    # Setup 'hadoop-env.sh' in conf.
72    set env_sh ${destroot}${hadoop_conf_dir}/hadoop-env.sh
73    reinplace "s|@java_home@|${java_home}|g"           ${env_sh}
74    reinplace "s|@hadoop_log_dir@|${hadoop_log_dir}|g" ${env_sh}
75    reinplace "s|@hadoop_pid_dir@|${hadoop_pid_dir}|g" ${env_sh}
76
77    add_hadoop_user_and_group
78
79    # Create working directories.
80    xinstall -m 755 -o ${hadoopuser} -g ${hadoopuser} -d \
81        ${destroot}${hadoop_var_dir} \
82        ${destroot}${hadoop_log_dir} \
83        ${destroot}${hadoop_pid_dir}
84    destroot.keepdirs-append \
85        ${destroot}${hadoop_var_dir} \
86        ${destroot}${hadoop_log_dir} \
87        ${destroot}${hadoop_pid_dir}
88}
89
90pre-activate {
91    add_hadoop_user_and_group
92}
93
94proc add_hadoop_user_and_group {} {
95    global hadoopuser hadoop_var_dir
96    if {![existsgroup ${hadoopuser}]} {
97        addgroup ${hadoopuser}
98        adduser ${hadoopuser} \
99            gid=[existsgroup ${hadoopuser}] \
100            realname=Hadoop\ Server \
101            home=${hadoop_var_dir} \
102            shell=/bin/bash
103    }
104}
105
106post-deactivate {
107    ui_msg "********************************************************"
108    ui_msg "* To revert the system after uninstalling the port:"
109    ui_msg "* 1) Delete Hadoop working directory:"
110    ui_msg "*  $ sudo rm -rf ${hadoop_var_dir}"
111    ui_msg "* 2) Delete Hadoop user and group:"
112    ui_msg "*  $ sudo dscl . -delete /Users/${hadoopuser}"
113    ui_msg "*  $ sudo dscl . -delete /Groups/${hadoopuser}"
114    ui_msg "********************************************************"
115}
116
117default_variants    +pseudo
118
119set hadoop_tmp_dir  ${hadoop_var_dir}/cache
120
121variant pseudo description {Run on a single-node in a pseudo-distributed mode} {
122    patchfiles-append  patch-conf.diff
123
124    post-destroot {
125        # Set conf directory for a pseudo-distributed mode.
126        copy ${destroot}${hadoop_conf_dir} ${destroot}${hadoop_conf_dir}.pseudo
127        move ${destroot}${hadoop_conf_dir} ${destroot}${hadoop_conf_dir}.local
128        system "cd ${destroot}${hadoop_home} && ln -s conf.pseudo conf"
129
130        # Set the maximum number of tasks based on the number of the CPUs (cores).
131        regexp {\d+} [exec sysctl hw.ncpu] ncpu
132        set tasks_max [expr $ncpu + 2]
133
134        # Setup configuration files.
135        reinplace "s|@tasks_max@|${tasks_max}|g" \
136            ${destroot}${hadoop_conf_dir}.pseudo/mapred-site.xml
137        reinplace "s|@hadoop_tmp_dir@|${hadoop_tmp_dir}|" \
138            ${destroot}${hadoop_conf_dir}.pseudo/core-site.xml
139
140        # Create a temporary directory.
141        xinstall -m 755 -o ${hadoopuser} -g ${hadoopuser} -d \
142            ${destroot}${hadoop_tmp_dir}
143        destroot.keepdirs-append \
144            ${destroot}${hadoop_tmp_dir}
145    }
146
147    post-activate {
148        # Setup passphraseless ssh.
149        set ssh_dir ${hadoop_var_dir}/.ssh
150        if {![file exists ${ssh_dir}]} {
151            xinstall -m 700 -o ${hadoopuser} -g ${hadoopuser} -d ${ssh_dir}
152            system "sudo -u ${hadoopuser} ssh-keygen -t rsa -P '' -f ${ssh_dir}/id_rsa"
153            xinstall -m 644 -o ${hadoopuser} -g ${hadoopuser} \
154                ${ssh_dir}/id_rsa.pub \
155                ${ssh_dir}/authorized_keys
156        }
157
158        ui_msg "********************************************************"
159        ui_msg "* To run on a single-node in a pseudo-distributed mode:"
160        ui_msg "* 1) Turn on Remote Login (sshd):"
161        ui_msg "*  check 'System Preferences > Sharing > Remote Login'"
162        ui_msg "* 2) Format a new distributed-filesystem:"
163        ui_msg "*  $ hadoop-bin hadoop namenode -format"
164        ui_msg "* 3) Start the hadoop daemons:"
165        ui_msg "*  $ hadoop-bin start-all.sh"
166        ui_msg "* 4) Perform operations you like. To see examples:"
167        ui_msg "*  $ open file://${hadoop_home}/docs/single_node_setup.html"
168        ui_msg "* 5) When you're done, stop the daemons with:"
169        ui_msg "*  $ hadoop-bin stop-all.sh"
170        ui_msg "********************************************************"
171    }
172}
173
174livecheck.type      regex
175livecheck.url       http://www.eu.apache.org/dist/hadoop/common/stable/
176livecheck.regex     ${name}-(\[0-9.\]+)
Note: See TracBrowser for help on using the repository browser.