Members of the KDE Community are recommended to subscribe to the kde-community mailing list at https://mail.kde.org/mailman/listinfo/kde-community to allow them to participate in important discussions and receive other important announcements

Commit a01371e7 authored by Ben Cooksley's avatar Ben Cooksley

Reorder the elements to keep Jenkins happy

parent e7fa054b
......@@ -144,42 +144,10 @@ knownPrebuiltJobs.each {
}
}
// We also want to ensure a cleanup job for the nodes is created
// Read in the necessary Pipeline template
def pipelineScript = readFileFromWorkspace("craft-cache/cleanup-nodes.pipeline")
// Actually create the job now
pipelineJob( "Craft_Builder_Cleanup" ) {
properties {
// We don't want to keep build results forever
// We'll set it to keep the last 10 builds and discard everything else
buildDiscarder {
strategy {
logRotator {
numToKeepStr("5")
daysToKeepStr('')
artifactDaysToKeepStr('')
artifactNumToKeepStr('')
}
}
}
// We don't want to be building the same project more than once
// This is to prevent one project hogging resources
// And also has a practical component as otherwise an older build could finish afterwards and upload old build results
disableConcurrentBuilds()
}
// This is where the Pipeline script actually happens :)
definition {
cps {
script( pipelineScript )
sandbox()
}
}
}
// gath src files
// To help speed up builds and make them more reliable (as source archives have a habit of being hosted on unreliable systems, or just disappearing completely) we cache them on our infrastructure
// Setup those jobs too
def srcJobsToParse = readFileFromWorkspace('craft-cache/experimental-pipelines/gathered-jobs.json')
def srcExperimentalJobs = new groovy.json.JsonSlurper().parseText( srcJobsToParse )
def knownSrcJobs = new groovy.json.JsonSlurper().parseText( srcJobsToParse )
knownSrcJobs.each {
// Create our job name
......@@ -225,3 +193,36 @@ knownSrcJobs.each {
}
}
// We also want to ensure a cleanup job for the nodes is created
// Read in the necessary Pipeline template
def pipelineScript = readFileFromWorkspace("craft-cache/cleanup-nodes.pipeline")
// Actually create the job now
pipelineJob( "Craft_Builder_Cleanup" ) {
properties {
// We don't want to keep build results forever
// We'll set it to keep the last 10 builds and discard everything else
buildDiscarder {
strategy {
logRotator {
numToKeepStr("5")
daysToKeepStr('')
artifactDaysToKeepStr('')
artifactNumToKeepStr('')
}
}
}
// We don't want to be building the same project more than once
// This is to prevent one project hogging resources
// And also has a practical component as otherwise an older build could finish afterwards and upload old build results
disableConcurrentBuilds()
}
// This is where the Pipeline script actually happens :)
definition {
cps {
script( pipelineScript )
sandbox()
}
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment