lib = library(identifier: 'jenkins@1.0.4', retriever: modernSCM([ $class: 'GitSCMSource', remote: 'https://github.com/opensearch-project/opensearch-build-libraries.git', ])) pipeline { agent { docker { label 'Jenkins-Agent-AL2-X64-C54xlarge-Docker-Host' image 'opensearchstaging/ci-runner:ci-runner-rockylinux8-opensearch-build-v3' registryUrl 'https://public.ecr.aws/' alwaysPull true } } parameters{ string( name: 'URLs', trim: true, description: 'comma separated list of urls of artifacts to be signed' ) string( name: 'S3_FILE_UPLOAD_PATH', trim: true, description: 'Path to upload to artifacts and signatures on s3. Eg: dummy_project/1.0' ) choice( choices: ['linux', 'windows'], name: 'DISTRIBUTION_PLATFORM', description: 'What platform is this distribution build for?' ) choice( choices: ['.sig', '.rpm'], name: 'SIGNATURE_TYPE', description: 'What is signature file type? Required only for linux signing.' ) } environment { ARTIFACT_BUCKET_NAME = credentials('jenkins-artifact-bucket-name') } stages { stage('sign') { steps { script { if (URLs == '' || S3_FILE_UPLOAD_PATH == '') { currentBuild.result = 'ABORTED' error('URLs or S3_FILE_UPLOAD_PATH params are not set') } S3_FILE_UPLOAD_PATH = S3_FILE_UPLOAD_PATH.replaceAll('/$', "") S3_FILE_UPLOAD_PATH = S3_FILE_UPLOAD_PATH.replaceAll('^/+', "") downloadedFiles = downloadArtifactsFromUrls() signArtifacts( artifactPath: "$WORKSPACE/artifacts", sigtype: SIGNATURE_TYPE, platform: DISTRIBUTION_PLATFORM ) filenamesForUrls = [] println("Note: only supported file types will be signed") for(filename in downloadedFiles){ if (DISTRIBUTION_PLATFORM == 'windows') { filenamesForUrls.add(filename) filenamesForUrls.add('signed/' + filename) } else if (SIGNATURE_TYPE.equals('.sig')) { filenamesForUrls.add(filename) filenamesForUrls.add(filename + SIGNATURE_TYPE) } else { filenamesForUrls.add(filename) } } finalUploadPath = ([ "${JOB_NAME}", "${S3_FILE_UPLOAD_PATH}", "${BUILD_NUMBER}", "dist", "signed" ].join('/')) // uploading artifacts with signatures on s3 uploadToS3( sourcePath: "$WORKSPACE/artifacts", bucket: "${ARTIFACT_BUCKET_NAME}", path: finalUploadPath ) printArtifactDownloadUrlsForStaging( artifactFileNames: filenamesForUrls, uploadPath: finalUploadPath ) } } post() { always { script { postCleanup() } } } } } } List downloadArtifactsFromUrls() { listOfURls = URLs.split(",") def downloadedFiles = [] sh "mkdir ${WORKSPACE}/artifacts" for (url in listOfURls) { trimmedUrl = url.trim() filename = trimmedUrl.substring(trimmedUrl.lastIndexOf('/') + 1) downloadedFiles.add(filename) sh """ set +x set -e STATUS_CODES=`curl -IL ${trimmedUrl} 2>&1 | grep -Eo "HTTP/[12] [0-9]+" | grep -Eo '[0-9][0-9][0-9]' | sort | uniq` for resp_code in \$STATUS_CODES; do if echo \$resp_code | grep -qEo '[45][0-9][0-9]'; then echo "Error url with error response code \$resp_code, exit 1" exit 1 fi done echo "No error detected, valid response \$resp_code, downloading ${trimmedUrl}" curl -SL ${trimmedUrl} -o ${WORKSPACE}/artifacts/${filename} """ } return downloadedFiles }