import org.opensearch.hadoop.gradle.scala.SparkVariantPlugin description = "OpenSearch Spark Core" apply plugin: 'java-library' apply plugin: 'scala' apply plugin: 'opensearch.hadoop.build' apply plugin: 'spark.variants' sparkVariants { capabilityGroup 'org.opensearch.spark.variant' setCoreDefaultVariant "spark20scala212", spark24Version, scala212Version addCoreFeatureVariant "spark30scala213", spark30Version, scala213Version addCoreFeatureVariant "spark30scala212", spark30Version, scala212Version addCoreFeatureVariant "spark20scala211", spark24Version, scala211Version addCoreFeatureVariant "spark20scala210", spark22Version, scala210Version all { SparkVariantPlugin.SparkVariant variant -> String scalaCompileTaskName = project.sourceSets .getByName(variant.getSourceSetName("main")) .getCompileTaskName("scala") // Configure main compile task project.getTasks().getByName(scalaCompileTaskName) { ScalaCompile compileScala -> configure(compileScala.scalaCompileOptions.forkOptions) { memoryMaximumSize = '1g' } compileScala.scalaCompileOptions.additionalParameters = [ "-feature", "-unchecked", "-deprecation", "-Xfuture", "-Yno-adapted-args", "-Ywarn-dead-code", "-Ywarn-numeric-widen", "-Xfatal-warnings" ] } dependencies { add(variant.configuration('api'), "org.scala-lang:scala-library:${variant.scalaVersion}") add(variant.configuration('api'), "org.scala-lang:scala-reflect:${variant.scalaVersion}") add(variant.configuration('api'), "org.apache.spark:spark-core_${variant.scalaMajorVersion}:${variant.sparkVersion}") { exclude group: 'javax.servlet' exclude group: 'org.apache.hadoop' } add(variant.configuration('implementation'), project(":opensearch-hadoop-mr")) add(variant.configuration('implementation'), "commons-logging:commons-logging:1.2") add(variant.configuration('compileOnly'), "com.fasterxml.jackson.module:jackson-module-scala_${variant.scalaMajorVersion}:2.9.10") add(variant.configuration('compileOnly'), "com.fasterxml.jackson.core:jackson-annotations:2.15.2") add(variant.configuration('compileOnly'), "com.google.guava:guava:23.0") add(variant.configuration('compileOnly'), "com.google.protobuf:protobuf-java:3.23.3") add(variant.configuration('compileOnly'), "org.slf4j:slf4j-api:2.0.7") add(variant.configuration('test', 'implementation'), project(":test:shared")) add(variant.configuration('test', 'implementation'), "com.esotericsoftware.kryo:kryo:2.24.0") add(variant.configuration('test', 'implementation'), "org.apache.spark:spark-core_${variant.scalaMajorVersion}:${variant.sparkVersion}") { exclude group: 'javax.servlet' exclude group: 'org.apache.hadoop' } add(variant.configuration('itest', 'implementation'), project(":test:shared")) add(variant.configuration('test', 'implementation'), "org.elasticsearch:securemock:1.2") if (variant.scalaMajorVersion == '2.10') { add(variant.configuration('implementation'), "org.apache.spark:spark-unsafe_${variant.scalaMajorVersion}:${variant.sparkVersion}") add(variant.configuration('implementation'), "org.apache.avro:avro:1.8.2") add(variant.configuration('implementation'), "log4j:log4j:1.2.17") add(variant.configuration('implementation'), "com.google.code.findbugs:jsr305:3.0.2") add(variant.configuration('implementation'), "org.json4s:json4s-ast_2.10:3.6.12") add(variant.configuration('implementation'), "com.esotericsoftware.kryo:kryo:2.24.0") add(variant.configuration('compileOnly'), "org.apache.hadoop:hadoop-annotations:${project.ext.hadoopVersion}") add(variant.configuration('compileOnly'), "org.codehaus.jackson:jackson-core-asl:${project.ext.jacksonVersion}") add(variant.configuration('compileOnly'), "org.codehaus.jackson:jackson-mapper-asl:${project.ext.jacksonVersion}") add(variant.configuration('compileOnly'), "org.codehaus.woodstox:stax2-api:4.2.1") if (variant.sparkVersion == spark22Version) { add(variant.configuration('compileOnly'), "org.apache.spark:spark-tags_${variant.scalaMajorVersion}:${variant.sparkVersion}") } } add(variant.configuration('additionalSources'), project(":opensearch-hadoop-mr")) add(variant.configuration('javadocSources'), project(":opensearch-hadoop-mr")) } def javaFilesOnly = { FileTreeElement spec -> spec.file.name.endsWith('.java') || spec.isDirectory() } // Add java files from scala source set to javadocSourceElements. project.fileTree("src/main/scala").include(javaFilesOnly).each { project.artifacts.add(variant.configuration('javadocSourceElements'), it) } // Configure java source generation for javadoc purposes if (variant.scalaMajorVersion != '2.10') { String generatedJavaDirectory = "$buildDir/generated/java/${variant.name}" Configuration scalaCompilerPlugin = project.configurations.maybeCreate(variant.configuration('scalaCompilerPlugin')) scalaCompilerPlugin.defaultDependencies { dependencies -> dependencies.add(project.dependencies.create("com.typesafe.genjavadoc:genjavadoc-plugin_${variant.scalaVersion}:0.18")) } ScalaCompile compileScala = tasks.getByName(scalaCompileTaskName) as ScalaCompile compileScala.scalaCompileOptions.with { additionalParameters = [ "-Xplugin:" + configurations.getByName(variant.configuration('scalaCompilerPlugin')).asPath, "-P:genjavadoc:out=$generatedJavaDirectory".toString() ] } // Export generated Java code from the genjavadoc compiler plugin artifacts { add(variant.configuration('javadocSourceElements'), project.file(generatedJavaDirectory)) { builtBy compileScala } } tasks.getByName(variant.taskName('javadoc')) { dependsOn compileScala source(generatedJavaDirectory) } } scaladoc { title = "${rootProject.description} ${version} API" } } } // deal with the messy conflicts out there // Ignore the scalaCompilerPlugin configurations since it is immediately resolved to configure the scala compiler tasks configurations.matching{ it.name.contains('CompilerPlugin') == false && (it.name.contains("spark30") || it.name.contains("Spark30")) == false}.all { Configuration conf -> conf.resolutionStrategy { eachDependency { details -> // change all javax.servlet artifacts to the one used by Spark otherwise these will lead to // SecurityException (signer information wrong) if (details.requested.name.contains("servlet") && !details.requested.name.contains("guice")) { details.useTarget group: "org.eclipse.jetty.orbit", name: "javax.servlet", version: "3.0.0.v201112011016" } } } conf.exclude group: "org.mortbay.jetty" } if (JavaVersion.current() >= JavaVersion.VERSION_17) { tasks.withType(Test) { Test task -> if (task.getName().startsWith("test")) task.configure { jvmArgs "--add-opens=java.base/java.io=ALL-UNNAMED" // Needed for IOUtils's BYTE_ARRAY_BUFFER reflection jvmArgs "--add-opens=java.base/java.nio=ALL-UNNAMED" // Needed for org.apache.spark.SparkConf, which indirectly uses java.nio.DirectByteBuffer jvmArgs "--add-opens=java.base/java.lang=ALL-UNNAMED" // Needed for secure mock } } } // Set minimum compatibility and java home for compiler task tasks.withType(ScalaCompile) { ScalaCompile task -> task.scalaCompileOptions.additionalParameters = ["-javabootclasspath", new File(project.ext.runtimeJavaHome, 'jre/lib/rt.jar').absolutePath] task.options.bootstrapClasspath = layout.files(new File(project.ext.runtimeJavaHome, 'jre/lib/rt.jar')) task.sourceCompatibility = project.ext.minimumRuntimeVersion task.targetCompatibility = project.ext.minimumRuntimeVersion task.options.forkOptions.executable = new File(project.ext.runtimeJavaHome, 'bin/java').absolutePath }