/* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 */ buildscript { ext { opensearch_version = System.getProperty("opensearch.version", "3.0.0-SNAPSHOT") isSnapshot = "true" == System.getProperty("build.snapshot", "true") buildVersionQualifier = System.getProperty("build.version_qualifier", "") buildDockerJdkVersion = System.getProperty("build.docker_jdk_ver", "11") // 3.0.0-SNAPSHOT -> 3.0.0.0-SNAPSHOT version_tokens = opensearch_version.tokenize('-') opensearch_build = version_tokens[0] + '.0' if (buildVersionQualifier) { opensearch_build += "-${buildVersionQualifier}" } if (isSnapshot) { opensearch_build += "-SNAPSHOT" } gitPaBranch = 'main' gitPaRepo = "https://github.com/opensearch-project/performance-analyzer.git" runGauntletTests = "true" == System.getProperty("run.gauntlet.tests", "false") } repositories { mavenLocal() maven { url "https://aws.oss.sonatype.org/content/repositories/snapshots" } mavenCentral() maven { url "https://plugins.gradle.org/m2/" } } dependencies { classpath "org.opensearch.gradle:build-tools:${opensearch_version}" classpath group: 'com.google.guava', name: 'guava', version: '31.1-jre' } } plugins { id 'java' id 'application' id 'maven-publish' id 'com.google.protobuf' version '0.8.18' id 'jacoco' id 'idea' id 'com.github.spotbugs' version '5.0.13' id "de.undercouch.download" version "4.0.4" id 'com.adarshr.test-logger' version '2.1.0' id 'org.gradle.test-retry' version '1.5.2' id 'com.diffplug.spotless' version '5.11.0' } application { mainClassName = 'org.opensearch.performanceanalyzer.PerformanceAnalyzerApp' applicationDefaultJvmArgs = ['-Xms64M', '-Xmx64M', '-XX:+UseSerialGC', '-XX:CICompilerCount=1', '-XX:-TieredCompilation', '-XX:InitialCodeCacheSize=4096', '-XX:MaxRAM=400m'] } // Include PA related folder in the distribution. applicationDistribution.from(".") { include 'config/*' include 'bin/*' } distributions { main { contents { eachFile { it.path = it.path.replace("-$version/", '/') } } } } spotless { java { licenseHeaderFile(file('license-header')) googleJavaFormat('1.12.0').aosp() importOrder() removeUnusedImports() trimTrailingWhitespace() endWithNewline() // add support for spotless:off and spotless:on tags to exclude sections of code toggleOffOn() } } tasks.withType(Checkstyle) { reports { xml.enabled false html.enabled true html.stylesheet resources.text.fromFile('configs/xsl/severity-sorted.xsl') } } testlogger { theme 'standard' showExceptions true showStackTraces false showFullStackTraces false showCauses true slowThreshold 4000 showSummary true showSimpleNames false showPassed true showSkipped true showFailed true showStandardStreams false showPassedStandardStreams true showSkippedStandardStreams true showFailedStandardStreams true logLevel 'lifecycle' } spotbugsMain { excludeFilter = file("checkstyle/findbugs-exclude.xml") effort = 'max' ignoreFailures = true // TODO: Set this to false later as they are too many warnings to be fixed. reports { xml.enabled = false html.enabled = true } } spotbugsTest { ignoreFailures = true } check { dependsOn spotbugsMain //dependsOn spotbugsTest } jacoco { toolVersion = "0.8.7" } jacocoTestReport { reports { xml.enabled true html.enabled true csv.enabled false } afterEvaluate { classDirectories.from = files(classDirectories.files.collect { fileTree(dir: it, include: [ '**/org/opensearch/performanceanalyzer/**' ], exclude: [ '**/org/opensearch/performanceanalyzer/grpc/**', '**/org/opensearch/performanceanalyzer/rca/framework/api/metrics/**', '**/org/opensearch/performanceanalyzer/rca/net/requests**' ]) }) } } jacocoTestCoverageVerification { afterEvaluate { classDirectories.from = files(classDirectories.files.collect { fileTree(dir: it, include: [ '**/org/opensearch/performanceanalyzer/rca/**', '**/org/opensearch/performanceanalyzer/reader/**' ], exclude: [ '**/org/opensearch/performanceanalyzer/grpc/**']) }) } if (runGauntletTests) { violationRules { rule { limit { minimum = 0.4 } } } } else { violationRules { rule { limit { minimum = 0.6 } } } } } // to run coverage verification during the build (and fail when appropriate) check.dependsOn jacocoTestCoverageVerification version = opensearch_build distZip { archiveName "performance-analyzer-rca-${version}.zip" } publishing { publications { maven(MavenPublication) { groupId = 'org.opensearch' artifactId = 'performanceanalyzer-rca' from components.java } } } test { if (!runGauntletTests) { filter { excludeTestsMatching 'org.opensearch.performanceanalyzer.rca.integTests.*' // TODO: Fix this test as it causes OutOfMemoryError: Java heap space error and runs forever excludeTestsMatching 'org.opensearch.performanceanalyzer.reader.OSMetricsSnapshotTests' } } else { filter { includeTestsMatching 'org.opensearch.performanceanalyzer.rca.integTests.*' // Ignore failing tests (known issues with the test) excludeTestsMatching 'org.opensearch.performanceanalyzer.rca.integTests.tests.jvmsizing.HeapSizeIncreaseMissingMetricsTest' excludeTestsMatching 'org.opensearch.performanceanalyzer.rca.integTests.tests.consolidate_tuning.JvmFlipFlopITest' } } enabled = true retry { maxRetries = 2 maxFailures = 20 filter { // filter by qualified class name (* matches zero or more of any character) // ignoring retries on all the integ test as the retries on them fails // retry plugin is unable to retry the following test methods as the method name differs for these tests. // java.lang.Class#testDataNodeMissingGcMetrics excludeClasses.add("*integTests*") } } } tasks.withType(Test) { jvmArgs('--add-opens=java.base/java.io=ALL-UNNAMED') jvmArgs('--add-opens=java.base/java.util.concurrent=ALL-UNNAMED') jvmArgs('--add-opens=java.base/java.time=ALL-UNNAMED') jvmArgs('--add-opens=java.base/java.util.stream=ALL-UNNAMED') jvmArgs('--add-opens=java.base/sun.nio.fs=ALL-UNNAMED') jvmArgs('--add-opens=java.base/java.nio.file=ALL-UNNAMED') jvmArgs('--add-opens=java.base/java.lang=ALL-UNNAMED') jvmArgs('--add-opens=java.base/java.util=ALL-UNNAMED') } task rcaTest(type: Test) { useJUnit { includeCategories 'org.opensearch.performanceanalyzer.rca.GradleTaskForRca' } testLogging.showStandardStreams = true } task rcaIt(type: Test) { dependsOn spotbugsMain useJUnit { includeCategories 'org.opensearch.performanceanalyzer.rca.integTests.framework.RcaItMarker' } //testLogging.showStandardStreams = true } sourceCompatibility = JavaVersion.VERSION_11 targetCompatibility = JavaVersion.VERSION_11 compileJava { dependsOn spotlessApply JavaVersion targetVersion = JavaVersion.toVersion(targetCompatibility); if (targetVersion.isJava9Compatible()) { options.compilerArgs += ["--add-exports", "jdk.attach/sun.tools.attach=ALL-UNNAMED"] options.compilerArgs += ["--add-exports", "jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED"] } } javadoc { JavaVersion targetVersion = JavaVersion.toVersion(targetCompatibility); if (targetVersion.isJava9Compatible()) { options.addStringOption("-add-exports", "jdk.attach/sun.tools.attach=ALL-UNNAMED") options.addStringOption("-add-exports", "jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED") } } project.afterEvaluate { JavaVersion targetVersion = JavaVersion.toVersion(targetCompatibility) // cannot contain the first version // should be '8' or '11' etc. String version = targetVersion.toString() if (version.length() > 2) { version = targetVersion.toString().substring(2) } compileJava.options.compilerArgs.removeAll(['--release', version]) } repositories { mavenLocal() mavenCentral() maven { url "https://aws.oss.sonatype.org/content/repositories/snapshots" } } configurations { includeJars dockerBuild } tasks.withType(JavaCompile) { options.warnings = false } import org.opensearch.gradle.VersionProperties dependencies { if (JavaVersion.current() <= JavaVersion.VERSION_1_8) { implementation files("${System.properties['java.home']}/../lib/tools.jar") } def versions = VersionProperties.versions def jacksonVersion = "${versions.jackson}" def jacksonDataBindVersion = "${versions.jackson_databind}" def nettyVersion = "${versions.netty}" def junitVersion = "${versions.junit}" def log4jVersion = "${versions.log4j}" def protobufVersion = "${versions.protobuf}" def guavaVersion = "${versions.guava}" def jakartaVersion = "${versions.jakarta_annotation}" implementation 'org.jooq:jooq:3.10.8' implementation 'org.bouncycastle:bcprov-jdk15to18:1.74' implementation 'org.bouncycastle:bcpkix-jdk15to18:1.74' implementation 'org.xerial:sqlite-jdbc:3.41.2.2' implementation "com.google.guava:guava:${guavaVersion}" implementation 'com.google.code.gson:gson:2.9.0' implementation 'org.checkerframework:checker-qual:3.29.0' implementation "com.fasterxml.jackson.core:jackson-annotations:${jacksonVersion}" implementation "com.fasterxml.jackson.core:jackson-databind:${jacksonDataBindVersion}" implementation "org.opensearch:performance-analyzer-commons:1.0.0-SNAPSHOT" implementation group: 'org.apache.logging.log4j', name: 'log4j-api', version: "${log4jVersion}" implementation group: 'org.apache.logging.log4j', name: 'log4j-core', version: "${log4jVersion}" implementation group: 'org.apache.commons', name: 'commons-lang3', version: '3.9' implementation group: 'commons-io', name: 'commons-io', version: '2.7' implementation group: 'com.google.errorprone', name: 'error_prone_annotations', version: '2.9.0' implementation group: 'com.google.protobuf', name: 'protobuf-java', version: "${protobufVersion}" implementation 'io.grpc:grpc-netty:1.52.1' implementation 'io.grpc:grpc-protobuf:1.52.1' implementation("io.netty:netty-codec-http2:${nettyVersion}") { force = 'true' } implementation("io.netty:netty-handler-proxy:${nettyVersion}") { force = 'true' } implementation 'io.grpc:grpc-stub:1.52.1' implementation "jakarta.annotation:jakarta.annotation-api:${jakartaVersion}" // JDK9+ has to run powermock 2+. https://github.com/powermock/powermock/issues/888 testImplementation group: 'org.powermock', name: 'powermock-api-mockito2', version: '2.0.0' testImplementation group: 'org.powermock', name: 'powermock-module-junit4', version: '2.0.0' implementation("org.mockito:mockito-core") { version { strictly "2.23.0" } } testImplementation group: 'org.powermock', name: 'powermock-core', version: '2.0.0' testImplementation group: 'org.powermock', name: 'powermock-api-support', version: '2.0.0' testImplementation group: 'org.powermock', name: 'powermock-module-junit4-common', version: '2.0.0' testImplementation group: 'org.javassist', name: 'javassist', version: '3.24.0-GA' testImplementation group: 'org.powermock', name: 'powermock-reflect', version: '2.0.0' testImplementation group: 'net.bytebuddy', name: 'byte-buddy', version: '1.9.3' testImplementation group: 'org.objenesis', name: 'objenesis', version: '3.0.1' testImplementation group: 'org.hamcrest', name: 'hamcrest-library', version: '2.1' testImplementation group: 'org.hamcrest', name: 'hamcrest', version: '2.1' testImplementation group: 'junit', name: 'junit', version: "${junitVersion}" // Required for Docker build dockerBuild group: 'org.opensearch.plugin', name:'performance-analyzer', version: "${opensearch_build}" } configurations.each { c -> c.resolutionStrategy.dependencySubstitution { all { DependencySubstitution dependency -> if (dependency.requested.group == 'org.apache.bcel') { dependency.useTarget 'org.apache.bcel:bcel:6.6.1' } } } } protobuf { protoc { artifact = "com.google.protobuf:protoc:3.21.12" } plugins { grpc { artifact = 'io.grpc:protoc-gen-grpc-java:1.52.1' } } generateProtoTasks { all()*.plugins { grpc {} } } } idea { module { sourceDirs += file("${projectDir}/build/generated/source/proto/main/java"); sourceDirs += file("${projectDir}/build/generated/source/proto/main/grpc"); } } import groovy.json.JsonSlurper import java.nio.file.Paths String paDir String dockerBuildDir String dockerArtifactsDir String file_ext = "zip" task pullPAPlugin(type: Copy) { dockerBuildDir = Paths.get(getProject().getBuildDir().toString(), "docker-build").toString() mkdir dockerBuildDir paDir = Paths.get(dockerBuildDir, "pa") mkdir paDir println String.format('opensearch_version: (%s), plugin_version: (%s), snapshot: (%s), qualifier: (%s).', opensearch_version, version, isSnapshot, buildVersionQualifier) from(configurations.dockerBuild) into(paDir) rename "performance-analyzer-${version}.${file_ext}", "opensearch-performance-analyzer-${version}.${file_ext}" println String.format('Pulled PA Plugin at: (%s), will be used for Docker Build', Paths.get(paDir, "opensearch-performance-analyzer-${version}.${file_ext}")) } task copyAllArtifacts(type: Copy) { dependsOn(assemble, publishToMavenLocal, pullPAPlugin) def projectPathStr = getProjectDir().toPath().toString() def dockerArtifacts = Paths.get(projectPathStr, 'docker').toString() def rcaArtifacts = Paths.get(projectPathStr, 'build', 'distributions', "performance-analyzer-rca-${version}.${file_ext}") def paArtifacts = Paths.get(dockerBuildDir, 'pa', "opensearch-performance-analyzer-${version}.${file_ext}") dockerArtifactsDir = Paths.get(dockerBuildDir, 'rca-docker') mkdir dockerArtifactsDir from(dockerArtifacts) from(rcaArtifacts) from(paArtifacts) into(dockerArtifactsDir) } task copyReaderMetricsFiles(type: Copy) { dependsOn(pullPAPlugin) copy { from ('src/test/resources/reader') { } into 'build/private/test_resources' } } task buildDocker(type: Exec) { dependsOn(copyAllArtifacts) workingDir(dockerArtifactsDir) commandLine 'docker', 'build', '-t', 'opensearch/pa-rca:3.0', '.', '--build-arg', "JDK_VER=${buildDockerJdkVersion}" } task runDocker(type: Exec) { dependsOn(buildDocker) workingDir(dockerArtifactsDir) // This block is included to make the runDocker task work with Github Actions // It sets the path to the docker-compose program from an environment variable // The DOCKER_COMPOSE_LOCATION environment variable is set in the gradle.yml file inside the // performance-analyzer repository. def docker_compose_location = "docker-compose" if (System.getenv("DOCKER_COMPOSE_LOCATION") != null) { docker_compose_location = System.getenv("DOCKER_COMPOSE_LOCATION") } environment 'DATA_VOLUME1', 'opensearchdata1' environment 'DATA_VOLUME2', 'opensearchdata2' commandLine(docker_compose_location, '-f', 'docker-compose.yml', '-f', 'docker-compose.hostports.yml', '-f', 'docker-compose.cluster.yml', 'up', 'opensearch1', 'opensearch2') } def printCommandOutput(proc) { Thread.start { BufferedReader stdInput = new BufferedReader(new InputStreamReader(proc.getInputStream())); BufferedReader stdError = new BufferedReader(new InputStreamReader(proc.getErrorStream())); System.out.println("Here is the standard output of the command:\n"); String s = null; while ((s = stdInput.readLine()) != null) { System.out.println(s); } System.out.println("Here is the standard error of the command (if any):\n"); while ((s = stdError.readLine()) != null) { System.out.println(s); } } int exitVal = proc.waitFor() System.out.println("Process exitValue: " + exitVal); } def runCommand(commandArr, envArr, workingDir, printOutput) { try { Process proc = Runtime.getRuntime().exec(commandArr, envArr, file(workingDir)) if (printOutput) { printCommandOutput(proc) } } catch (Throwable th) { th.printStackTrace() } } def runInProcess(commandArr) { Process proc = new ProcessBuilder(commandArr).start(); printCommandOutput(proc) } def runDockerCompose = { executionPath -> // This block is included to make the runDocker task work with Github Actions // It sets the path to the docker-compose program from an environment variable // The DOCKER_COMPOSE_LOCATION environment variable is set in the gradle.yml file inside the // performance-analyzer repository. String dockerCompose = System.getenv("DOCKER_COMPOSE_LOCATION") == null ? "docker-compose" : System.getenv("DOCKER_COMPOSE_LOCATION") String[] commandArray = [dockerCompose, '-f', 'docker-compose.yml', '-f', 'docker-compose.hostports.yml', '-f', 'docker-compose.cluster.yml', 'up', 'opensearch1', 'opensearch2'] String[] env = ['DATA_VOLUME1=opensearchdata1', 'DATA_VOLUME2=opensearchdata2'] runCommand(commandArray, env, file(executionPath), true) } def getHttpResponse(server, timeoutSeconds) { URL url = new URL(server) long timeout = timeoutSeconds * 1000 long increments = 1000 long passed = 0 println 'waiting for ' + server while (passed < timeout) { try { HttpURLConnection con = (HttpURLConnection) url.openConnection() con.setRequestMethod("GET") InputStreamReader isr = new InputStreamReader(con.getInputStream()) BufferedReader bufR = new BufferedReader(isr) String inputLine; StringBuffer content = new StringBuffer() while ((inputLine = bufR.readLine()) != null) { content.append(inputLine); } println(content.toString()) bufR.close() break } catch (IOException) { Thread.sleep(increments); passed += increments } } if (passed == timeout) { throw new GradleException(String.format("ERROR: %s not ready.", server)); } } /** * enableComponent is used to enable PA or RCA based on the given parameters * @param endpoint the endpoint to make the request against e.g. localhost:9200/pa/rca/example * @param desiredState * The list of enabled components is represented as an n-bit binary number * The LSB represents the state of PA and the LSB+1th bit is RCA * 0x03 (00000011 in binary) is therefore a state where both PA and RCA are enabled * @param timeoutSeconds we will attempt to enable the component for this many seconds before * giving up and throwing a GradleException */ def enableComponent(endpoint, desiredState, timeoutSeconds) { long timeout = timeoutSeconds * 1000 long increments = 1000 long passed = 0 println 'waiting for ' + endpoint while (passed < timeout) { try { def p = ['curl', endpoint, '-H', 'Content-Type: application/json', '-d', '{"enabled": true}'].execute() def json = new JsonSlurper().parseText(p.text) if (json.get("currentPerformanceAnalyzerClusterState").equals(desiredState) || // desiredState+2 means that the "next" significant component is enabled json.get("currentPerformanceAnalyzerClusterState").equals(desiredState + 2)) { break } } catch (Exception) { } Thread.sleep(increments); passed += increments } if (passed == timeout) { throw new GradleException(String.format("ERROR: %s not ready.", endpoint)); } } Boolean openSearchIsUp def openSearchUpChecker = { String server = "http://localhost:9200" int timeoutSeconds = 2 * 60 getHttpResponse(server, timeoutSeconds) openSearchIsUp = true } // Attempts to enable PA for up to 2 minutes. Returns when PA is successfully enabled or // throws an Exception if the timeout is exceeded def paUpChecker = { String server = "localhost:9200/_plugins/_performanceanalyzer/cluster/config" int timeoutSeconds = 2 * 60 enableComponent(server, 1, timeoutSeconds) } // Attempts to enable RCA for up to 2 minutes. Returns when RCA is successfully enabled or // throws an Exception if the timeout is exceeded def rcaUpChecker = { String server = "localhost:9200/_plugins/_performanceanalyzer/rca/cluster/config" int timeoutSeconds = 2 * 60 enableComponent(server, 3, timeoutSeconds) } Thread openSearchRunnerThread def runInThread(cl, input) { return Thread.start { cl(input) } } task waitForOpenSearch { dependsOn(buildDocker) doLast { openSearchRunnerThread = runInThread(runDockerCompose, dockerArtifactsDir) openSearchUpChecker() } } String perfTopDir String perfTopZip String perfTopBin task downloadPerfTop(type: Download) { String zip = 'perf-top-1.9.0.0-MACOS.zip' perfTopDir = Paths.get(dockerBuildDir, 'perftop').toString() mkdir perfTopDir src 'https://d3g5vo6xdbdb9a.cloudfront.net/downloads/perftop/' + zip dest perfTopDir overwrite false perfTopZip = Paths.get(perfTopDir, zip).toString() File f = file(perfTopZip) enabled = !f.exists() //to clone only once } task unzipPerfTop(type: Copy) { dependsOn(downloadPerfTop) perfTopBin = Paths.get(perfTopDir, 'perf-top-macos').toString() def zipFile = file(perfTopZip) def outputDir = file(perfTopDir) from zipTree(zipFile) into outputDir enabled = !file(perfTopBin).exists() } // Attempts to enable PA for up to 2 minutes. Returns when PA is successfully enabled or // throws an Exception if the timeout is exceeded task enablePa() { dependsOn(waitForOpenSearch) doLast { paUpChecker() } } // Attempts to enable RCA for up to 2 minutes. Returns when RCA is successfully enabled or // throws an Exception if the timeout is exceeded task enableRca() { dependsOn(enablePa) doLast { rcaUpChecker() } } task runPerftop { dependsOn(enableRca, unzipPerfTop) doLast { println '============================================================================' println 'invoke perftop as: ' println String.format("%s --dashboard %s", Paths.get(perfTopBin).toString(), 'ClusterOverview') println '============================================================================' } } task generateLoad { dependsOn(enableRca, runPerftop) doLast { String[] deleteIndex = ['curl', '-X', 'DELETE', 'localhost:9200/accounts?pretty'] String[] bulk = ['curl', '-s', '-H', "Content-Type: application/x-ndjson", '-XPOST', 'localhost:9200/_bulk', '--data-binary', '@accounts.json'] String[] search = ['curl', '-X', 'GET', 'localhost:9200/_search?pretty', '-H', 'Content-Type:application/json', '-d', '{"query": {"range": {"balance": {"gte": 100, "lte": 100000}}}}'] String wdir = Paths.get(getProjectDir().getAbsolutePath(), 'src', 'test', 'resources') .toString() while (true) { // runCommand(deleteIndex, null, wdir, false) //println("starting indexing: " + System.currentTimeMillis()) runCommand(bulk, null, wdir, false) //println("completed indexing: " + System.currentTimeMillis()) Thread.sleep(3000) //println("starting search" + System.currentTimeMillis()) runCommand(search, null, wdir, false) Thread.sleep(1000) //println("completed search" + System.currentTimeMillis()) runCommand(deleteIndex, null, wdir, false) Thread.sleep(1000) } } } task runRally { dependsOn(enableRca, runPerftop) doLast { String rallyTrack='pmc' String[] opensearchrally = ['docker', 'run', '--network=host', 'elastic/rally', '--track=' + rallyTrack, '--test-mode', '--pipeline=benchmark-only', '--target-hosts=localhost:9200'] String wdir = Paths.get(getProjectDir().getAbsolutePath(), 'src', 'test', 'resources') .toString() while (true) { runCommand(opensearchrally, null, wdir, true) println('Get all RCAs using:') println('curl localhost:9600/_plugins/_performanceanalyzer/rca?all | python -m json.tool') Thread.sleep(1000) } } } // updateVersion: Task to auto increment to the next development iteration task updateVersion { onlyIf { System.getProperty('newVersion') } doLast { ext.newVersion = System.getProperty('newVersion') println "Setting version to ${newVersion}." // String tokenization to support -SNAPSHOT ant.replaceregexp(match: opensearch_version.tokenize('-')[0], replace: newVersion.tokenize('-')[0], flags:'g', byline:true) { fileset(dir: projectDir) { // Include the required files that needs to be updated with new Version include(name: "INSTALL.md") include(name: "docker/Dockerfile") include(name: "src/main/resources/plugin-descriptor.properties") } } ant.replaceregexp(file:'build.gradle', match: '"opensearch.version", "\\d.*"', replace: '"opensearch.version", "' + newVersion.tokenize('-')[0] + '-SNAPSHOT"', flags:'g', byline:true) } }