Skip to content
This repository has been archived by the owner on Sep 20, 2022. It is now read-only.

Commit

Permalink
Close #137: [HIVEMALL-179][SPARK] Support spark-v2.3
Browse files Browse the repository at this point in the history
  • Loading branch information
maropu committed Mar 28, 2018
1 parent fc881c3 commit bd14314
Show file tree
Hide file tree
Showing 42 changed files with 7,932 additions and 5 deletions.
6 changes: 2 additions & 4 deletions bin/run_travis_tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -35,14 +35,12 @@ cd $HIVEMALL_HOME/spark

export MAVEN_OPTS="-XX:MaxPermSize=256m"

mvn -q scalastyle:check -Pspark-2.0 -pl spark-2.0 -am test -Dtest=none

mvn -q scalastyle:check clean -Pspark-2.1 -pl spark-2.1 -am test -Dtest=none
mvn -q scalastyle:check -pl spark-2.0,spark-2.1 -am test

# spark-2.2 runs on Java 8+
if [[ ! -z "$(java -version 2>&1 | grep 1.8)" ]]; then
mvn -q scalastyle:check clean -Djava.source.version=1.8 -Djava.target.version=1.8 \
-Pspark-2.2 -pl spark-2.2 -am test -Dtest=none
-pl spark-2.2,spark-2.3 -am test
fi

exit 0
Expand Down
6 changes: 5 additions & 1 deletion spark/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
<module>spark-2.0</module>
<module>spark-2.1</module>
<module>spark-2.2</module>
<module>spark-2.3</module>
</modules>

<properties>
Expand Down Expand Up @@ -157,7 +158,10 @@
<include>org.apache.hivemall:hivemall-spark-common</include>
<!-- hivemall-core -->
<include>org.apache.hivemall:hivemall-core</include>
<include>io.netty:netty-all</include>
<!--
Since `netty-all` is bundled in Spark, we don't need to include it here
<include>io.netty:netty-all</include>
-->
<include>com.github.haifengl:smile-core</include>
<include>com.github.haifengl:smile-math</include>
<include>com.github.haifengl:smile-data</include>
Expand Down
99 changes: 99 additions & 0 deletions spark/spark-2.3/bin/mvn-zinc
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
#!/usr/bin/env bash

#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

# Copyed from commit 48682f6bf663e54cb63b7e95a4520d34b6fa890b in Apache Spark

# Determine the current working directory
_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
# Preserve the calling directory
_CALLING_DIR="$(pwd)"
# Options used during compilation
_COMPILE_JVM_OPTS="-Xmx2g -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=512m"

# Installs any application tarball given a URL, the expected tarball name,
# and, optionally, a checkable binary path to determine if the binary has
# already been installed
## Arg1 - URL
## Arg2 - Tarball Name
## Arg3 - Checkable Binary
install_app() {
local remote_tarball="$1/$2"
local local_tarball="${_DIR}/$2"
local binary="${_DIR}/$3"
local curl_opts="--progress-bar -L"
local wget_opts="--progress=bar:force ${wget_opts}"

if [ -z "$3" -o ! -f "$binary" ]; then
# check if we already have the tarball
# check if we have curl installed
# download application
[ ! -f "${local_tarball}" ] && [ $(command -v curl) ] && \
echo "exec: curl ${curl_opts} ${remote_tarball}" 1>&2 && \
curl ${curl_opts} "${remote_tarball}" > "${local_tarball}"
# if the file still doesn't exist, lets try `wget` and cross our fingers
[ ! -f "${local_tarball}" ] && [ $(command -v wget) ] && \
echo "exec: wget ${wget_opts} ${remote_tarball}" 1>&2 && \
wget ${wget_opts} -O "${local_tarball}" "${remote_tarball}"
# if both were unsuccessful, exit
[ ! -f "${local_tarball}" ] && \
echo -n "ERROR: Cannot download $2 with cURL or wget; " && \
echo "please install manually and try again." && \
exit 2
cd "${_DIR}" && tar -xzf "$2"
rm -rf "$local_tarball"
fi
}

# Install zinc under the bin/ folder
install_zinc() {
local zinc_path="zinc-0.3.9/bin/zinc"
[ ! -f "${_DIR}/${zinc_path}" ] && ZINC_INSTALL_FLAG=1
install_app \
"http://downloads.typesafe.com/zinc/0.3.9" \
"zinc-0.3.9.tgz" \
"${zinc_path}"
ZINC_BIN="${_DIR}/${zinc_path}"
}

# Setup healthy defaults for the Zinc port if none were provided from
# the environment
ZINC_PORT=${ZINC_PORT:-"3030"}

# Install Zinc for the bin/
install_zinc

# Reset the current working directory
cd "${_CALLING_DIR}"

# Now that zinc is ensured to be installed, check its status and, if its
# not running or just installed, start it
if [ ! -f "${ZINC_BIN}" ]; then
exit -1
fi
if [ -n "${ZINC_INSTALL_FLAG}" -o -z "`"${ZINC_BIN}" -status -port ${ZINC_PORT}`" ]; then
export ZINC_OPTS=${ZINC_OPTS:-"$_COMPILE_JVM_OPTS"}
"${ZINC_BIN}" -shutdown -port ${ZINC_PORT}
"${ZINC_BIN}" -start -port ${ZINC_PORT} &>/dev/null
fi

# Set any `mvn` options if not already present
export MAVEN_OPTS=${MAVEN_OPTS:-"$_COMPILE_JVM_OPTS"}

# Last, call the `mvn` command as usual
mvn -DzincPort=${ZINC_PORT} "$@"
20 changes: 20 additions & 0 deletions spark/spark-2.3/extra-src/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->

Copyed from the spark v2.3.0 release.
Loading

0 comments on commit bd14314

Please sign in to comment.