Initial commit

This commit is contained in:
2020-07-09 13:26:50 +00:00
commit 62c65fa963
12 changed files with 469 additions and 0 deletions

View File

@ -0,0 +1,21 @@
package com.barrelsofdata.sparkexamples
import org.apache.log4j.Logger
import org.apache.spark.sql.SparkSession
object Driver {
val JOB_NAME: String = "Boilerplate"
val LOG: Logger = Logger.getLogger(this.getClass.getCanonicalName)
def main(args: Array[String]): Unit = {
val spark: SparkSession = SparkSession.builder().appName(JOB_NAME).getOrCreate()
spark.sql("SELECT 'hello' AS col1").show()
LOG.info("Dummy info message")
LOG.warn("Dummy warn message")
LOG.error("Dummy error message")
}
}

View File

@ -0,0 +1,16 @@
log4j.rootLogger=INFO, console
log4j.logger.com.barrelsofdata.sparkexamples=INFO, console
log4j.additivity.com.barrelsofdata.sparkexamples=false
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.target=System.out
log4j.appender.console.immediateFlush=true
log4j.appender.console.encoding=UTF-8
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.conversionPattern=%d{yyyy/MM/dd HH:mm:ss} %p %c: %m%n
log4j.logger.org.apache=ERROR
log4j.logger.org.spark_project=ERROR
log4j.logger.org.sparkproject=ERROR
log4j.logger.parquet=ERROR

View File

@ -0,0 +1,41 @@
package com.barrelsofdata.sparkexamples
import java.util.Properties
import org.apache.log4j.{LogManager, Logger, PropertyConfigurator}
import org.apache.spark.sql.SparkSession
import org.scalatest.BeforeAndAfterAll
import org.scalatest.funsuite.AnyFunSuite
class DriverTest extends AnyFunSuite with BeforeAndAfterAll {
val JOB_NAME: String = "Driver Test Job"
val LOGGER_PROPERTIES: String = "log4j-test.properties"
val LOG: Logger = Logger.getLogger(this.getClass.getCanonicalName)
var spark: SparkSession = _
def setupLogger(): Unit = {
val properties = new Properties
properties.load(getClass.getClassLoader.getResource(LOGGER_PROPERTIES).openStream())
LogManager.resetConfiguration()
PropertyConfigurator.configure(properties)
}
override def beforeAll: Unit = {
setupLogger()
LOG.info("Setting up spark session")
spark = SparkSession.builder().appName(JOB_NAME).master("local[*]").getOrCreate()
}
test("Check if spark session is working") {
LOG.info("Testing spark job")
assertResult("hello")(spark.sql("SELECT 'hello'").collect().head.get(0))
}
override def afterAll: Unit = {
LOG.info("Closing spark session")
spark.close()
}
}