generated from barrelsofdata/spark-boilerplate
Initial commit
This commit is contained in:
commit
f60539d7c4
11
.gitignore
vendored
Normal file
11
.gitignore
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
# Compiled classes
|
||||
*.class
|
||||
# Gradle files
|
||||
.gralde
|
||||
# IntelliJ IDEA files
|
||||
.idea
|
||||
# Build files
|
||||
build
|
||||
# Log files
|
||||
*.log
|
||||
logs
|
26
README.md
Normal file
26
README.md
Normal file
@ -0,0 +1,26 @@
|
||||
# Spark Boilerplate
|
||||
This is a boilerplate project for Apache Spark. The related blog post can be found at [https://www.barrelsofdata.com/spark-boilerplate-using-scala](https://www.barrelsofdata.com/spark-boilerplate-using-scala)
|
||||
|
||||
## Build instructions
|
||||
From the root of the project execute the below commands
|
||||
- To clear all compiled classes, build and log directories
|
||||
```shell script
|
||||
./gradlew clean
|
||||
```
|
||||
- To run tests
|
||||
```shell script
|
||||
./gradlew test
|
||||
```
|
||||
- To build jar
|
||||
```shell script
|
||||
./gradlew shadowJar
|
||||
```
|
||||
- All combined
|
||||
```shell script
|
||||
./gradlew clean test shadowJar
|
||||
```
|
||||
|
||||
## Run
|
||||
```shell script
|
||||
spark-submit --master yarn --deploy-mode cluster build/libs/spark-boilerplate-1.0.jar
|
||||
```
|
45
build.gradle
Normal file
45
build.gradle
Normal file
@ -0,0 +1,45 @@
|
||||
plugins {
|
||||
id "scala"
|
||||
id "com.github.johnrengelman.shadow" version "${shadowJarPluginVersion}"
|
||||
}
|
||||
|
||||
group "${projectGroup}"
|
||||
version "${projectVersion}"
|
||||
|
||||
repositories {
|
||||
mavenCentral()
|
||||
}
|
||||
|
||||
dependencies {
|
||||
compileOnly group: "org.scala-lang", name:"scala-library", version: "${scalaMajorVersion}.${scalaMinorVersion}"
|
||||
|
||||
compileOnly group: "org.apache.spark", name: "spark-core_${scalaMajorVersion}", version: "${apacheSparkVersion}"
|
||||
compileOnly group: "org.apache.spark", name: "spark-sql_${scalaMajorVersion}", version: "${apacheSparkVersion}"
|
||||
|
||||
testImplementation group: "org.scalatest", name: "scalatest_${scalaMajorVersion}", version: "${scalaTestVersion}"
|
||||
}
|
||||
|
||||
configurations {
|
||||
testImplementation.extendsFrom compileOnly
|
||||
}
|
||||
|
||||
task scalaTest(dependsOn: ['testClasses'], type: JavaExec) {
|
||||
main = 'org.scalatest.tools.Runner'
|
||||
args = ['-R', 'build/classes/scala/test', '-o']
|
||||
classpath = sourceSets.test.runtimeClasspath
|
||||
}
|
||||
|
||||
test.dependsOn scalaTest
|
||||
|
||||
shadowJar {
|
||||
mergeServiceFiles()
|
||||
zip64 true
|
||||
manifest {
|
||||
attributes "Main-Class": "${mainClass}"
|
||||
}
|
||||
archiveFileName.set("${getArchiveBaseName().get()}-${projectVersion}.${getArchiveExtension().get()}")
|
||||
}
|
||||
|
||||
clean.doFirst {
|
||||
delete "logs/"
|
||||
}
|
14
gradle.properties
Normal file
14
gradle.properties
Normal file
@ -0,0 +1,14 @@
|
||||
scalaMajorVersion=2.12
|
||||
scalaMinorVersion=10
|
||||
|
||||
apacheSparkVersion=3.0.0
|
||||
|
||||
scalaTestVersion=3.1.2
|
||||
|
||||
shadowJarPluginVersion=6.0.0
|
||||
|
||||
mainClass=com.barrelsofdata.sparkexamples.Driver
|
||||
projectGroup=com.barrelsofdata.sparkexamples
|
||||
projectVersion=1.0
|
||||
|
||||
org.gradle.daemon=false
|
BIN
gradle/wrapper/gradle-wrapper.jar
vendored
Normal file
BIN
gradle/wrapper/gradle-wrapper.jar
vendored
Normal file
Binary file not shown.
5
gradle/wrapper/gradle-wrapper.properties
vendored
Normal file
5
gradle/wrapper/gradle-wrapper.properties
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-6.5-bin.zip
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
185
gradlew
vendored
Executable file
185
gradlew
vendored
Executable file
@ -0,0 +1,185 @@
|
||||
#!/usr/bin/env sh
|
||||
|
||||
#
|
||||
# Copyright 2015 the original author or authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
##############################################################################
|
||||
##
|
||||
## Gradle start up script for UN*X
|
||||
##
|
||||
##############################################################################
|
||||
|
||||
# Attempt to set APP_HOME
|
||||
# Resolve links: $0 may be a link
|
||||
PRG="$0"
|
||||
# Need this for relative symlinks.
|
||||
while [ -h "$PRG" ] ; do
|
||||
ls=`ls -ld "$PRG"`
|
||||
link=`expr "$ls" : '.*-> \(.*\)$'`
|
||||
if expr "$link" : '/.*' > /dev/null; then
|
||||
PRG="$link"
|
||||
else
|
||||
PRG=`dirname "$PRG"`"/$link"
|
||||
fi
|
||||
done
|
||||
SAVED="`pwd`"
|
||||
cd "`dirname \"$PRG\"`/" >/dev/null
|
||||
APP_HOME="`pwd -P`"
|
||||
cd "$SAVED" >/dev/null
|
||||
|
||||
APP_NAME="Gradle"
|
||||
APP_BASE_NAME=`basename "$0"`
|
||||
|
||||
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
|
||||
|
||||
# Use the maximum available, or set MAX_FD != -1 to use that value.
|
||||
MAX_FD="maximum"
|
||||
|
||||
warn () {
|
||||
echo "$*"
|
||||
}
|
||||
|
||||
die () {
|
||||
echo
|
||||
echo "$*"
|
||||
echo
|
||||
exit 1
|
||||
}
|
||||
|
||||
# OS specific support (must be 'true' or 'false').
|
||||
cygwin=false
|
||||
msys=false
|
||||
darwin=false
|
||||
nonstop=false
|
||||
case "`uname`" in
|
||||
CYGWIN* )
|
||||
cygwin=true
|
||||
;;
|
||||
Darwin* )
|
||||
darwin=true
|
||||
;;
|
||||
MINGW* )
|
||||
msys=true
|
||||
;;
|
||||
NONSTOP* )
|
||||
nonstop=true
|
||||
;;
|
||||
esac
|
||||
|
||||
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
|
||||
|
||||
|
||||
# Determine the Java command to use to start the JVM.
|
||||
if [ -n "$JAVA_HOME" ] ; then
|
||||
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
|
||||
# IBM's JDK on AIX uses strange locations for the executables
|
||||
JAVACMD="$JAVA_HOME/jre/sh/java"
|
||||
else
|
||||
JAVACMD="$JAVA_HOME/bin/java"
|
||||
fi
|
||||
if [ ! -x "$JAVACMD" ] ; then
|
||||
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
|
||||
|
||||
Please set the JAVA_HOME variable in your environment to match the
|
||||
location of your Java installation."
|
||||
fi
|
||||
else
|
||||
JAVACMD="java"
|
||||
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
|
||||
Please set the JAVA_HOME variable in your environment to match the
|
||||
location of your Java installation."
|
||||
fi
|
||||
|
||||
# Increase the maximum file descriptors if we can.
|
||||
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
|
||||
MAX_FD_LIMIT=`ulimit -H -n`
|
||||
if [ $? -eq 0 ] ; then
|
||||
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
|
||||
MAX_FD="$MAX_FD_LIMIT"
|
||||
fi
|
||||
ulimit -n $MAX_FD
|
||||
if [ $? -ne 0 ] ; then
|
||||
warn "Could not set maximum file descriptor limit: $MAX_FD"
|
||||
fi
|
||||
else
|
||||
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
|
||||
fi
|
||||
fi
|
||||
|
||||
# For Darwin, add options to specify how the application appears in the dock
|
||||
if $darwin; then
|
||||
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
|
||||
fi
|
||||
|
||||
# For Cygwin or MSYS, switch paths to Windows format before running java
|
||||
if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
|
||||
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
|
||||
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
|
||||
|
||||
JAVACMD=`cygpath --unix "$JAVACMD"`
|
||||
|
||||
# We build the pattern for arguments to be converted via cygpath
|
||||
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
|
||||
SEP=""
|
||||
for dir in $ROOTDIRSRAW ; do
|
||||
ROOTDIRS="$ROOTDIRS$SEP$dir"
|
||||
SEP="|"
|
||||
done
|
||||
OURCYGPATTERN="(^($ROOTDIRS))"
|
||||
# Add a user-defined pattern to the cygpath arguments
|
||||
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
|
||||
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
|
||||
fi
|
||||
# Now convert the arguments - kludge to limit ourselves to /bin/sh
|
||||
i=0
|
||||
for arg in "$@" ; do
|
||||
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
|
||||
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
|
||||
|
||||
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
|
||||
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
|
||||
else
|
||||
eval `echo args$i`="\"$arg\""
|
||||
fi
|
||||
i=`expr $i + 1`
|
||||
done
|
||||
case $i in
|
||||
0) set -- ;;
|
||||
1) set -- "$args0" ;;
|
||||
2) set -- "$args0" "$args1" ;;
|
||||
3) set -- "$args0" "$args1" "$args2" ;;
|
||||
4) set -- "$args0" "$args1" "$args2" "$args3" ;;
|
||||
5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
|
||||
6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
|
||||
7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
|
||||
8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
|
||||
9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
|
||||
esac
|
||||
fi
|
||||
|
||||
# Escape application args
|
||||
save () {
|
||||
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
|
||||
echo " "
|
||||
}
|
||||
APP_ARGS=`save "$@"`
|
||||
|
||||
# Collect all arguments for the java command, following the shell quoting and substitution rules
|
||||
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
|
||||
|
||||
exec "$JAVACMD" "$@"
|
104
gradlew.bat
vendored
Normal file
104
gradlew.bat
vendored
Normal file
@ -0,0 +1,104 @@
|
||||
@rem
|
||||
@rem Copyright 2015 the original author or authors.
|
||||
@rem
|
||||
@rem Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@rem you may not use this file except in compliance with the License.
|
||||
@rem You may obtain a copy of the License at
|
||||
@rem
|
||||
@rem https://www.apache.org/licenses/LICENSE-2.0
|
||||
@rem
|
||||
@rem Unless required by applicable law or agreed to in writing, software
|
||||
@rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
@rem See the License for the specific language governing permissions and
|
||||
@rem limitations under the License.
|
||||
@rem
|
||||
|
||||
@if "%DEBUG%" == "" @echo off
|
||||
@rem ##########################################################################
|
||||
@rem
|
||||
@rem Gradle startup script for Windows
|
||||
@rem
|
||||
@rem ##########################################################################
|
||||
|
||||
@rem Set local scope for the variables with windows NT shell
|
||||
if "%OS%"=="Windows_NT" setlocal
|
||||
|
||||
set DIRNAME=%~dp0
|
||||
if "%DIRNAME%" == "" set DIRNAME=.
|
||||
set APP_BASE_NAME=%~n0
|
||||
set APP_HOME=%DIRNAME%
|
||||
|
||||
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
|
||||
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
|
||||
|
||||
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
|
||||
|
||||
@rem Find java.exe
|
||||
if defined JAVA_HOME goto findJavaFromJavaHome
|
||||
|
||||
set JAVA_EXE=java.exe
|
||||
%JAVA_EXE% -version >NUL 2>&1
|
||||
if "%ERRORLEVEL%" == "0" goto init
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
echo.
|
||||
echo Please set the JAVA_HOME variable in your environment to match the
|
||||
echo location of your Java installation.
|
||||
|
||||
goto fail
|
||||
|
||||
:findJavaFromJavaHome
|
||||
set JAVA_HOME=%JAVA_HOME:"=%
|
||||
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
|
||||
|
||||
if exist "%JAVA_EXE%" goto init
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
|
||||
echo.
|
||||
echo Please set the JAVA_HOME variable in your environment to match the
|
||||
echo location of your Java installation.
|
||||
|
||||
goto fail
|
||||
|
||||
:init
|
||||
@rem Get command-line arguments, handling Windows variants
|
||||
|
||||
if not "%OS%" == "Windows_NT" goto win9xME_args
|
||||
|
||||
:win9xME_args
|
||||
@rem Slurp the command line arguments.
|
||||
set CMD_LINE_ARGS=
|
||||
set _SKIP=2
|
||||
|
||||
:win9xME_args_slurp
|
||||
if "x%~1" == "x" goto execute
|
||||
|
||||
set CMD_LINE_ARGS=%*
|
||||
|
||||
:execute
|
||||
@rem Setup the command line
|
||||
|
||||
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
|
||||
|
||||
|
||||
@rem Execute Gradle
|
||||
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
|
||||
|
||||
:end
|
||||
@rem End local scope for the variables with windows NT shell
|
||||
if "%ERRORLEVEL%"=="0" goto mainEnd
|
||||
|
||||
:fail
|
||||
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
|
||||
rem the _cmd.exe /c_ return code!
|
||||
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
|
||||
exit /b 1
|
||||
|
||||
:mainEnd
|
||||
if "%OS%"=="Windows_NT" endlocal
|
||||
|
||||
:omega
|
1
settings.gradle
Normal file
1
settings.gradle
Normal file
@ -0,0 +1 @@
|
||||
rootProject.name = 'spark-boilerplate'
|
21
src/main/scala/com/barrelsofdata/sparkexamples/Driver.scala
Normal file
21
src/main/scala/com/barrelsofdata/sparkexamples/Driver.scala
Normal file
@ -0,0 +1,21 @@
|
||||
package com.barrelsofdata.sparkexamples
|
||||
|
||||
import org.apache.log4j.Logger
|
||||
import org.apache.spark.sql.SparkSession
|
||||
|
||||
object Driver {
|
||||
|
||||
val JOB_NAME: String = "Boilerplate"
|
||||
val LOG: Logger = Logger.getLogger(this.getClass.getCanonicalName)
|
||||
|
||||
def main(args: Array[String]): Unit = {
|
||||
|
||||
val spark: SparkSession = SparkSession.builder().appName(JOB_NAME).getOrCreate()
|
||||
spark.sql("SELECT 'hello' AS col1").show()
|
||||
|
||||
LOG.info("Dummy info message")
|
||||
LOG.warn("Dummy warn message")
|
||||
LOG.error("Dummy error message")
|
||||
}
|
||||
|
||||
}
|
16
src/test/resources/log4j-test.properties
Normal file
16
src/test/resources/log4j-test.properties
Normal file
@ -0,0 +1,16 @@
|
||||
log4j.rootLogger=INFO, console
|
||||
log4j.logger.com.barrelsofdata.sparkexamples=INFO, console
|
||||
log4j.additivity.com.barrelsofdata.sparkexamples=false
|
||||
|
||||
log4j.appender.console=org.apache.log4j.ConsoleAppender
|
||||
log4j.appender.console.target=System.out
|
||||
log4j.appender.console.immediateFlush=true
|
||||
log4j.appender.console.encoding=UTF-8
|
||||
|
||||
log4j.appender.console.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.console.layout.conversionPattern=%d{yyyy/MM/dd HH:mm:ss} %p %c: %m%n
|
||||
|
||||
log4j.logger.org.apache=ERROR
|
||||
log4j.logger.org.spark_project=ERROR
|
||||
log4j.logger.org.sparkproject=ERROR
|
||||
log4j.logger.parquet=ERROR
|
@ -0,0 +1,41 @@
|
||||
package com.barrelsofdata.sparkexamples
|
||||
|
||||
import java.util.Properties
|
||||
|
||||
import org.apache.log4j.{LogManager, Logger, PropertyConfigurator}
|
||||
import org.apache.spark.sql.SparkSession
|
||||
import org.scalatest.BeforeAndAfterAll
|
||||
import org.scalatest.funsuite.AnyFunSuite
|
||||
|
||||
class DriverTest extends AnyFunSuite with BeforeAndAfterAll {
|
||||
|
||||
val JOB_NAME: String = "Driver Test Job"
|
||||
val LOGGER_PROPERTIES: String = "log4j-test.properties"
|
||||
val LOG: Logger = Logger.getLogger(this.getClass.getCanonicalName)
|
||||
|
||||
var spark: SparkSession = _
|
||||
|
||||
def setupLogger(): Unit = {
|
||||
val properties = new Properties
|
||||
properties.load(getClass.getClassLoader.getResource(LOGGER_PROPERTIES).openStream())
|
||||
LogManager.resetConfiguration()
|
||||
PropertyConfigurator.configure(properties)
|
||||
}
|
||||
|
||||
override def beforeAll: Unit = {
|
||||
setupLogger()
|
||||
LOG.info("Setting up spark session")
|
||||
spark = SparkSession.builder().appName(JOB_NAME).master("local[*]").getOrCreate()
|
||||
}
|
||||
|
||||
test("Check if spark session is working") {
|
||||
LOG.info("Testing spark job")
|
||||
assertResult("hello")(spark.sql("SELECT 'hello'").collect().head.get(0))
|
||||
}
|
||||
|
||||
override def afterAll: Unit = {
|
||||
LOG.info("Closing spark session")
|
||||
spark.close()
|
||||
}
|
||||
|
||||
}
|
Loading…
Reference in New Issue
Block a user