Ticket #61395: Portfile.2

File Portfile.2, 5.6 KB (added by josh-seidel-db, 4 years ago)

java/apache-spark3.0/Portfile

Line 
1# -*- coding: utf-8; mode: tcl; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- vim:fenc=utf-8:ft=tcl:et:sw=4:ts=4:sts=4
2PortSystem          1.0
3PortGroup           java 1.0
4PortGroup           select 1.0
5
6name                apache-spark3.0
7version             3.0.1
8revision            0
9categories          java parallel devel lang databases
10maintainers         nomaintainer
11description         Engine for large-scale data processing
12long_description \
13    Apache Spark is a lightning-fast unified analytics engine for big data and machine \
14    learning. It was originally developed at UC Berkeley in 2009. Apache Spark is a fast \
15    and general-purpose cluster computing system. It provides high-level APIs in Java, \
16    Scala and Python, and an optimized engine that supports general execution graphs. \
17    It also supports a rich set of higher-level tools including Spark SQL for SQL and \
18    structured data processing, MLlib for machine learning, GraphX for graph processing, and Spark Streaming.
19homepage            https://spark.apache.org/
20platforms           darwin
21supported_archs     noarch
22license             Apache-2
23use_xcode           no
24
25master_sites        http://apache.mirrors.hoobly.com/spark/spark-${version}/ \
26                    http://apache.mirrors.pair.com/spark/spark-${version}/ \
27                    http://apache.spinellicreations.com/spark/spark-${version}/ \
28                    http://mirror.cc.columbia.edu/pub/software/apache/spark/spark-${version}/ \
29                    http://mirror.cogentco.com/pub/apache/spark/spark-${version}/ \
30                    http://mirror.metrocast.net/apache/spark/spark-${version}/ \
31                    http://mirrors.advancedhosters.com/apache/spark/spark-${version}/ \
32                    http://mirrors.ibiblio.org/apache/spark/spark-${version}/ \
33                    http://www.gtlib.gatech.edu/pub/apache/spark/spark-${version}/ \
34                    http://www.trieuvan.com/apache/spark/spark-${version}/ \
35                    https://apache.claz.org/spark/spark-${version}/ \
36                    https://apache.cs.utah.edu/spark/spark-${version}/ \
37                    https://apache.mirrors.lucidnetworks.net/spark/spark-${version}/ \
38                    https://apache.osuosl.org/spark/spark-${version}/ \
39                    https://ftp.wayne.edu/apache/spark/spark-${version}/ \
40                    https://mirror.olnevhost.net/pub/apache/spark/spark-${version}/ \
41                    https://mirrors.gigenet.com/apache/spark/spark-${version}/ \
42                    https://mirrors.koehn.com/apache/spark/spark-${version}/ \
43                    https://mirrors.ocf.berkeley.edu/apache/spark/spark-${version}/ \
44                    https://mirrors.sonic.net/apache/spark/spark-${version}/ \
45                    https://us.mirrors.quenda.co/apache/spark/spark-${version}/ \
46                    https://archive.apache.org/dist/spark/spark-${version}/
47distname            spark-${version}-bin-hadoop3.2
48distfiles           ${distname}.tgz
49extract.suffix      .tgz
50checksums           md5    31e019e35e75a4c55c7efa4464641bf1\
51                    sha1   6ed7e4e1ba71e4d1ea2d113c4df85acdb8ec124c\
52                    rmd160 3ca0876dbafd66b6948333f3621b70df58bf4f80\
53                    sha256 e2d05efa1c657dd5180628a83ea36c97c00f972b4aee935b7affa2e1058b0279\
54                    size   224062525
55
56use_configure       no
57
58# Require java version
59java.version        11+
60# JDK port to install if required java not found
61java.fallback       openjdk11
62
63depends_run         port:sbt port:scala2.13 port:python38
64
65extract.suffix      .tgz
66extract.mkdir       yes
67
68if {$subport == $name} {
69    depends_lib          port:apache-spark_select
70    select.group         apache-spark
71    select.file          ${filespath}/${name}
72
73    build               { }
74
75    destroot {
76        set sparkdir ${prefix}/share/${name}
77
78        xinstall -m 755 -d ${sparkdir}
79   
80        file copy ${worksrcpath}/${distname} ${destroot}${sparkdir}
81       
82            set beeline.cmd beeline
83        set docker-image-tool.cmd docker-image-tool.sh
84            set find-spark-home.cmd find-spark-home
85        set load-spark-env.cmd load-spark-env.sh
86        set pyspark.cmd pyspark
87            set spark-class.cmd spark-class
88        set spark-shell.cmd spark-shell
89            set spark-sql.cmd spark-sql
90        set spark-submit.cmd spark-submit
91            set sparkR.cmd sparkR
92       
93        xinstall -m 755 ${worksrcpath}/${distname}/bin/${beeline.cmd} ${destroot}${sparkdir}/bin
94            xinstall -m 755 ${worksrcpath}/${distname}/bin/${docker-image-tool.cmd} ${destroot}${sparkdir}/bin
95        xinstall -m 755 ${worksrcpath}/${distname}/bin/${find-spark-home.cmd} ${destroot}${sparkdir}/bin
96            xinstall -m 755 ${worksrcpath}/${distname}/bin/${load-spark-env.cmd} ${destroot}${sparkdir}/bin
97        xinstall -m 755 ${worksrcpath}/${distname}/bin/${pyspark.cmd} ${destroot}${sparkdir}/bin
98            xinstall -m 755 ${worksrcpath}/${distname}/bin/${spark-class.cmd} ${destroot}${sparkdir}/bin
99        xinstall -m 755 ${worksrcpath}/${distname}/bin/${spark-shell.cmd} ${destroot}${sparkdir}/bin
100            xinstall -m 755 ${worksrcpath}/${distname}/bin/${spark-sql.cmd} ${destroot}${sparkdir}/bin
101        xinstall -m 755 ${worksrcpath}/${distname}/bin/${spark-submit.cmd} ${destroot}${sparkdir}/bin
102            xinstall -m 755 ${worksrcpath}/${distname}/bin/${sparkR.cmd} ${destroot}${sparkdir}/bin
103    }
104}
105
106notes "
107The Spark Home environment variable requires the environment variables to be exported and
108set as well as the PySpark Python version. Please add the following lines to your .profile
109or export in your shell session to make ${name} work:
110
111    export SPARK_HOME=${prefix}/share/${name}
112    export PYSPARK_PYTHON=python3
113"