1 | # -*- coding: utf-8; mode: tcl; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- vim:fenc=utf-8:ft=tcl:et:sw=4:ts=4:sts=4 |
---|
2 | PortSystem 1.0 |
---|
3 | PortGroup java 1.0 |
---|
4 | PortGroup select 1.0 |
---|
5 | |
---|
6 | name apache-spark2.4 |
---|
7 | version 2.4.7 |
---|
8 | revision 0 |
---|
9 | categories java parallel devel lang databases |
---|
10 | maintainers nomaintainer |
---|
11 | description Engine for large-scale data processing |
---|
12 | long_description \ |
---|
13 | Apache Spark is a lightning-fast unified analytics engine for big data and machine \ |
---|
14 | learning. It was originally developed at UC Berkeley in 2009. Apache Spark is a fast \ |
---|
15 | and general-purpose cluster computing system. It provides high-level APIs in Java, \ |
---|
16 | Scala and Python, and an optimized engine that supports general execution graphs. \ |
---|
17 | It also supports a rich set of higher-level tools including Spark SQL for SQL and \ |
---|
18 | structured data processing, MLlib for machine learning, GraphX for graph processing, and Spark Streaming. |
---|
19 | homepage https://spark.apache.org/ |
---|
20 | platforms darwin |
---|
21 | supported_archs noarch |
---|
22 | license Apache-2 |
---|
23 | use_xcode no |
---|
24 | |
---|
25 | master_sites http://apache.mirrors.hoobly.com/spark/spark-${version}/ \ |
---|
26 | http://apache.mirrors.pair.com/spark/spark-${version}/ \ |
---|
27 | http://apache.spinellicreations.com/spark/spark-${version}/ \ |
---|
28 | http://mirror.cc.columbia.edu/pub/software/apache/spark/spark-${version}/ \ |
---|
29 | http://mirror.cogentco.com/pub/apache/spark/spark-${version}/ \ |
---|
30 | http://mirror.metrocast.net/apache/spark/spark-${version}/ \ |
---|
31 | http://mirrors.advancedhosters.com/apache/spark/spark-${version}/ \ |
---|
32 | http://mirrors.ibiblio.org/apache/spark/spark-${version}/ \ |
---|
33 | http://www.gtlib.gatech.edu/pub/apache/spark/spark-${version}/ \ |
---|
34 | http://www.trieuvan.com/apache/spark/spark-${version}/ \ |
---|
35 | https://apache.claz.org/spark/spark-${version}/ \ |
---|
36 | https://apache.cs.utah.edu/spark/spark-${version}/ \ |
---|
37 | https://apache.mirrors.lucidnetworks.net/spark/spark-${version}/ \ |
---|
38 | https://apache.osuosl.org/spark/spark-${version}/ \ |
---|
39 | https://ftp.wayne.edu/apache/spark/spark-${version}/ \ |
---|
40 | https://mirror.olnevhost.net/pub/apache/spark/spark-${version}/ \ |
---|
41 | https://mirrors.gigenet.com/apache/spark/spark-${version}/ \ |
---|
42 | https://mirrors.koehn.com/apache/spark/spark-${version}/ \ |
---|
43 | https://mirrors.ocf.berkeley.edu/apache/spark/spark-${version}/ \ |
---|
44 | https://mirrors.sonic.net/apache/spark/spark-${version}/ \ |
---|
45 | https://us.mirrors.quenda.co/apache/spark/spark-${version}/ \ |
---|
46 | https://archive.apache.org/dist/spark/spark-${version}/ |
---|
47 | distname spark-${version}-bin-hadoop2.7 |
---|
48 | distfiles ${distname}.tgz |
---|
49 | extract.suffix .tgz |
---|
50 | checksums md5 76afb611aaac5721c9fa91fdc9defa99\ |
---|
51 | sha1 75b755b8fe55404593dbd1f5069c605ee89b6ab6\ |
---|
52 | rmd160 a611dafda70c099f8fc57c1df7db73ec9555f419\ |
---|
53 | sha256 13098490936c9931beda3acc4c30cdc5ca707acd1415eebde1030b11903934fe\ |
---|
54 | size 233333392 |
---|
55 | |
---|
56 | use_configure no |
---|
57 | |
---|
58 | # Require java version |
---|
59 | java.version 8+ |
---|
60 | # JDK port to install if required java not found |
---|
61 | java.fallback openjdk8 |
---|
62 | |
---|
63 | depends_run port:sbt port:scala2.12 port:python37 |
---|
64 | |
---|
65 | extract.suffix .tgz |
---|
66 | extract.mkdir yes |
---|
67 | |
---|
68 | if {$subport == $name} { |
---|
69 | depends_lib port:apache-spark_select |
---|
70 | select.group apache-spark |
---|
71 | select.file ${filespath}/${name} |
---|
72 | |
---|
73 | build { } |
---|
74 | |
---|
75 | destroot { |
---|
76 | set sparkdir ${prefix}/share/${name} |
---|
77 | |
---|
78 | xinstall -m 755 -d ${sparkdir} |
---|
79 | |
---|
80 | file copy ${worksrcpath}/${distname} ${destroot}${sparkdir} |
---|
81 | |
---|
82 | set beeline.cmd beeline |
---|
83 | set docker-image-tool.cmd docker-image-tool.sh |
---|
84 | set find-spark-home.cmd find-spark-home |
---|
85 | set load-spark-env.cmd load-spark-env.sh |
---|
86 | set pyspark.cmd pyspark |
---|
87 | set spark-class.cmd spark-class |
---|
88 | set spark-shell.cmd spark-shell |
---|
89 | set spark-sql.cmd spark-sql |
---|
90 | set spark-submit.cmd spark-submit |
---|
91 | set sparkR.cmd sparkR |
---|
92 | |
---|
93 | xinstall -m 755 ${worksrcpath}/${distname}/bin/${beeline.cmd} ${destroot}${sparkdir}/bin |
---|
94 | xinstall -m 755 ${worksrcpath}/${distname}/bin/${docker-image-tool.cmd} ${destroot}${sparkdir}/bin |
---|
95 | xinstall -m 755 ${worksrcpath}/${distname}/bin/${find-spark-home.cmd} ${destroot}${sparkdir}/bin |
---|
96 | xinstall -m 755 ${worksrcpath}/${distname}/bin/${load-spark-env.cmd} ${destroot}${sparkdir}/bin |
---|
97 | xinstall -m 755 ${worksrcpath}/${distname}/bin/${pyspark.cmd} ${destroot}${sparkdir}/bin |
---|
98 | xinstall -m 755 ${worksrcpath}/${distname}/bin/${spark-class.cmd} ${destroot}${sparkdir}/bin |
---|
99 | xinstall -m 755 ${worksrcpath}/${distname}/bin/${spark-shell.cmd} ${destroot}${sparkdir}/bin |
---|
100 | xinstall -m 755 ${worksrcpath}/${distname}/bin/${spark-sql.cmd} ${destroot}${sparkdir}/bin |
---|
101 | xinstall -m 755 ${worksrcpath}/${distname}/bin/${spark-submit.cmd} ${destroot}${sparkdir}/bin |
---|
102 | xinstall -m 755 ${worksrcpath}/${distname}/bin/${sparkR.cmd} ${destroot}${sparkdir}/bin |
---|
103 | } |
---|
104 | } |
---|
105 | |
---|
106 | notes " |
---|
107 | The Spark Home environment variable requires the environment variables to be exported and |
---|
108 | set as well as the PySpark Python version. Please add the following lines to your .profile |
---|
109 | or export in your shell session to make ${name} work: |
---|
110 | |
---|
111 | export SPARK_HOME=${prefix}/share/${name} |
---|
112 | export PYSPARK_PYTHON=python3 |
---|
113 | " |
---|