spark-state-store-data-dedu.../build.gradle.kts
karthik a0cef20f21
All checks were successful
Tests / reset-status (push) Successful in 2s
Tests / tests (push) Successful in 4m6s
Tests / build (push) Successful in 4m20s
Use gradle version catalogs
2023-10-07 14:01:18 +02:00

55 lines
1.4 KiB
Plaintext

plugins {
scala
}
project.group = "com.barrelsofdata"
project.version = "1.0.0"
dependencies {
compileOnly(libs.scala.library)
compileOnly(libs.bundles.spark)
implementation(libs.spark.sql.kafka)
testImplementation(libs.scala.test)
}
tasks.withType<Test>().configureEach {
maxParallelForks = (Runtime.getRuntime().availableProcessors() / 2).coerceAtLeast(1)
}
configurations {
implementation {
resolutionStrategy.failOnVersionConflict()
}
testImplementation {
extendsFrom(configurations.compileOnly.get())
}
}
tasks.register<JavaExec>("scalaTest") {
dependsOn("testClasses")
mainClass = "org.scalatest.tools.Runner"
args = listOf("-R", "build/classes/scala/test", "-o")
jvmArgs = listOf("-Xms128m", "-Xmx512m", "-XX:MetaspaceSize=300m", "-ea", "--add-exports=java.base/sun.nio.ch=ALL-UNNAMED") // https://lists.apache.org/thread/p1yrwo126vjx5tht82cktgjbmm2xtpw9
classpath = sourceSets.test.get().runtimeClasspath
}
tasks.withType<Test> {
dependsOn(":scalaTest")
}
tasks.withType<Jar> {
manifest {
attributes["Main-Class"] = "com.barrelsofdata.sparkexamples.Driver"
}
duplicatesStrategy = DuplicatesStrategy.EXCLUDE
from (configurations.runtimeClasspath.get().map { if (it.isDirectory()) it else zipTree(it) })
archiveFileName.set("${archiveBaseName.get()}-${project.version}.${archiveExtension.get()}")
}
tasks.clean {
doFirst {
delete("logs/")
}
}