Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,10 @@ FROM eclipse-temurin:11-jre-focal

ARG REPO_DIR

ARG JAR_FILE=target/data-export-exec.jar
ARG EXEC_JAR_FILE=target/data-export-exec.jar
ARG VERIFIER_JAR_FILE=target/data-export-verifier.jar

WORKDIR ${REPO_DIR}

COPY --from=build-jar ${REPO_DIR}/${JAR_FILE} ./target/
COPY --from=build-jar ${REPO_DIR}/${EXEC_JAR_FILE} ./target/
COPY --from=build-jar ${REPO_DIR}/${VERIFIER_JAR_FILE} ./target/
119 changes: 83 additions & 36 deletions Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -8,25 +8,25 @@ def utils = new Utilities()

pipeline{
agent any

// Set output folder that will contain the files generated by this step.
environment {
OUTPUT_FOLDER = "export"
environment {
OUTPUT_FOLDER = "export"
ECR_URL = 'public.ecr.aws/reactome/data-export'
CONT_NAME = 'data_export_container'
CONT_ROOT = '/opt/data-export'
}
}

stages{
// This stage checks that upstream project 'DiagramConverter' was run successfully.
stage('Check DiagramConverter build succeeded'){
steps{
script{
utils.checkUpstreamBuildsSucceeded("File-Generation/job/DiagramConverter/")
utils.checkUpstreamBuildsSucceeded("File-Generation/job/DiagramConverter/")
}
}
}


stage('Setup: Pull and clean docker environment'){
steps{
sh "docker pull ${ECR_URL}:latest"
Expand All @@ -37,12 +37,13 @@ pipeline{
"""
}
}

// Execute the jar file, producing data-export files.
stage('Main: Run Data-Export'){
steps{
script{
def releaseVersion = utils.getReleaseVersion()

sh "mkdir -p ${env.OUTPUT_FOLDER}"
sh "rm -rf ${env.OUTPUT_FOLDER}/*"
withCredentials([usernamePassword(credentialsId: 'neo4jUsernamePassword', passwordVariable: 'pass', usernameVariable: 'user')]){
Expand All @@ -52,9 +53,15 @@ pipeline{
sh "sudo service tomcat9 stop"

sh """\
docker run -v \$(pwd)/${env.OUTPUT_FOLDER}:${CONT_ROOT}/${env.OUTPUT_FOLDER} --net=host --name ${CONT_NAME} ${ECR_URL}:latest /bin/bash -c 'java -Xmx${env.JAVA_MEM_MAX}m -jar target/data-export-exec.jar --user $user --password $pass --output ./${env.OUTPUT_FOLDER} --verbose'
"""
sh "sudo chown jenkins:jenkins ${env.OUTPUT_FOLDER}"
docker run \\
-v \$(pwd)/${env.OUTPUT_FOLDER}:${CONT_ROOT}/${env.OUTPUT_FOLDER} \\
--net=host \\
--name ${CONT_NAME} \\
${ECR_URL}:latest \\
/bin/bash -c 'java -Xmx${env.JAVA_MEM_MAX}m -jar target/data-export-exec.jar --user $user --password $pass --output ./${env.OUTPUT_FOLDER} --verbose'
"""
sh "sudo chown jenkins:jenkins ${env.OUTPUT_FOLDER}"

sh "sudo service mysql start"
sh "sudo service tomcat9 start"
// Archive the files produced by the step for S3.
Expand All @@ -63,46 +70,86 @@ pipeline{
}
}
}

// Execute the verifier jar file checking for the existence and proper file sizes of the data-export output
stage('Post: Verify DataExport ran correctly') {
steps {
script {
def releaseVersion = utils.getReleaseVersion()

sh """
docker run \\
--rm \\
-v ${pwd()}/${env.OUTPUT_FOLDER}:${CONT_ROOT}/${env.OUTPUT_FOLDER}/ \\
-v \$HOME/.aws:/root/.aws:ro \\
-e AWS_REGION=us-east-1 \\
--net=host \\
--name ${CONT_NAME}_verifier \\
${ECR_URL}:latest \\
/bin/bash -c "java -jar target/data-export-verifier.jar --releaseNumber ${releaseVersion} --output ${CONT_ROOT}/${env.OUTPUT_FOLDER}"
"""
}
}
}

// Creates a list of files and their sizes to use for comparison baseline during next release
stage('Post: Create files and sizes list to upload for next release\'s verifier') {
steps {
script {
def fileSizeList = "files_and_sizes.txt"
def releaseVersion = utils.getReleaseVersion()

sh "find ${env.OUTPUT_FOLDER} -type f -printf \"%s\t%P\n\" > ${fileSizeList}"
sh "aws s3 --no-progress cp ${fileSizeList} s3://reactome/private/releases/${releaseVersion}/data_export/data/"
sh "rm ${fileSizeList}"
}
}
}

// This stage outputs the difference in line counts for data-export files between releases.
stage('Post: Compare Data-Export file line counts between releases') {
steps{
script{
def releaseVersion = utils.getReleaseVersion()
def previousReleaseVersion = utils.getPreviousReleaseVersion()
def previousExportsArchive = "export-v${previousReleaseVersion}.tgz"
def currentDir = pwd()

sh "mkdir -p ${previousReleaseVersion}"

// Download data-export files archive from previous release from S3.
sh "aws s3 --no-progress cp s3://reactome/private/releases/${previousReleaseVersion}/data_export/data/${previousExportsArchive} ${previousReleaseVersion}/"
dir("${previousReleaseVersion}"){
sh "tar -xf ${previousExportsArchive}"
steps{
script{
def releaseVersion = utils.getReleaseVersion()
def previousReleaseVersion = utils.getPreviousReleaseVersion()
def previousExportsArchive = "export-v${previousReleaseVersion}.tgz"
def currentDir = pwd()

sh "mkdir -p ${previousReleaseVersion}"

// Download data-export files archive from previous release from S3.
sh "aws s3 --no-progress cp s3://reactome/private/releases/${previousReleaseVersion}/data_export/data/${previousExportsArchive} ${previousReleaseVersion}/"
dir("${previousReleaseVersion}"){
sh "tar -xf ${previousExportsArchive}"
}
// Output line counts between files.
utils.outputLineCountsOfFilesBetweenFolders("${env.OUTPUT_FOLDER}", "${previousReleaseVersion}/${env.OUTPUT_FOLDER}", "$currentDir")
sh "rm -r ${previousReleaseVersion}*"
}
}
// Output line counts between files.
utils.outputLineCountsOfFilesBetweenFolders("${env.OUTPUT_FOLDER}", "${previousReleaseVersion}/${env.OUTPUT_FOLDER}", "$currentDir")
sh "rm -r ${previousReleaseVersion}*"
}
}
}

// Move all data-export files to the downloads folder. At time of writing, these files aren't gzipped.
stage('Post: Move export files to download folder') {
steps{
script{
def releaseVersion = utils.getReleaseVersion()
def downloadPath = "${env.ABS_DOWNLOAD_PATH}/${releaseVersion}"
sh "mv ${env.OUTPUT_FOLDER}/* ${downloadPath}/ "
}
}
steps{
script{
def releaseVersion = utils.getReleaseVersion()
def downloadPath = "${env.ABS_DOWNLOAD_PATH}/${releaseVersion}"

sh "mv ${env.OUTPUT_FOLDER}/* ${downloadPath}/ "
}
}
}

// Archive everything on S3, and move the 'diagram' folder to the download/vXX folder.
stage('Post: Archive Outputs'){
steps{
script{
def releaseVersion = utils.getReleaseVersion()
def dataFiles = ["export-v${releaseVersion}.tgz"]
def dataFiles = ["export-v${releaseVersion}.tgz"]
def logFiles = []
def foldersToDelete = []

utils.cleanUpAndArchiveBuildFiles("data_export", dataFiles, logFiles, foldersToDelete)
}
}
Expand Down
Loading