plugins { scala } project.group = "com.barrelsofdata" project.version = "1.0.0" dependencies { compileOnly(libs.scala.library) compileOnly(libs.bundles.spark) implementation(libs.spark.sql.kafka) testImplementation(libs.scala.test) } tasks.withType().configureEach { maxParallelForks = (Runtime.getRuntime().availableProcessors() / 2).coerceAtLeast(1) } configurations { implementation { resolutionStrategy.failOnVersionConflict() } testImplementation { extendsFrom(configurations.compileOnly.get()) } } tasks.register("scalaTest") { dependsOn("testClasses") mainClass = "org.scalatest.tools.Runner" args = listOf("-R", "build/classes/scala/test", "-o") jvmArgs = listOf("-Xms128m", "-Xmx512m", "-XX:MetaspaceSize=300m", "-ea", "--add-exports=java.base/sun.nio.ch=ALL-UNNAMED") // https://lists.apache.org/thread/p1yrwo126vjx5tht82cktgjbmm2xtpw9 classpath = sourceSets.test.get().runtimeClasspath } tasks.withType { dependsOn(":scalaTest") } tasks.withType { manifest { attributes["Main-Class"] = "com.barrelsofdata.sparkexamples.Driver" } duplicatesStrategy = DuplicatesStrategy.EXCLUDE from (configurations.runtimeClasspath.get().map { if (it.isDirectory()) it else zipTree(it) }) archiveFileName.set("${archiveBaseName.get()}-${project.version}.${archiveExtension.get()}") } tasks.clean { doFirst { delete("logs/") } }