Commit 50dccfae authored by Ben Cooksley's avatar Ben Cooksley

Allow for custom build jobs to be provisioned within the CI system.

This should only really be used for specialised jobs which fall outside the scope of the usual fetch sources/fetch dependencies/configure/build/install/run tests logic.
Examples of this include scripted software which is used straight from it's repository and therefore has no configuration, build or install to do (like Craft)
parent 45275621
// Request a node to be allocated to us
node( "SUSEQt5.9" ) {
// We want Timestamps on everything
timestamps {
// We want to catch any errors that occur to allow us to send out notifications (ie. emails) if needed
catchError {
// First Thing: Checkout Sources
stage('Checkout Sources') {
// Craft itself
checkout changelog: true, poll: true, scm: [
$class: 'GitSCM',
branches: [[name: 'master']],
extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'craft/']],
userRemoteConfigs: [[url: "https://anongit.kde.org/craft"]]
]
// Craftmaster
checkout changelog: true, poll: true, scm: [
$class: 'GitSCM',
branches: [[name: 'master']],
extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'craftmaster/']],
userRemoteConfigs: [[url: "https://anongit.kde.org/craftmaster"]]
]
// Craft Blueprints for KDE
checkout changelog: true, poll: true, scm: [
$class: 'GitSCM',
branches: [[name: 'master']],
extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'craft-blueprints-kde/']],
userRemoteConfigs: [[url: "https://anongit.kde.org/craft-blueprints-kde"]]
]
}
// Use Craftmaster to get Craft ready to go
stage('Building Application') {
// Install craft and it's core dependencies...
sh """
python3 craftmaster/CraftMaster.py --config config/CraftBinaryCache.ini -c -i craft
"""
}
// Now run the Craft tests
stage('Capturing APKs') {
// Ask Craftmaster to do this for us too
sh """
python3 craftmaster/CraftMaster.py --config config/CraftBinaryCache.ini -c --test craft-core
"""
}
}
}
}
[
{"name": "Extragear_craft_master"}
]
// Read the contents of the gathered-jobs.json file a step created for us previously
def jobsToParse = readFileFromWorkspace('custom-jobs/known-jobs.json')
def knownJobs = new groovy.json.JsonSlurper().parseText( jobsToParse )
// Iterate over all of the known jobs and create them!
knownJobs.each {
// Save our job name for later
def jobName = "${it.name}"
// Read in the necessary Pipeline script
def pipelineScript = readFileFromWorkspace("custom-jobs/${it.name}.pipeline")
// Actually create the job now
pipelineJob( jobName ) {
properties {
// We don't want to keep build results forever
// We'll set it to keep the last 10 builds and discard everything else
buildDiscarder {
strategy {
logRotator {
numToKeepStr("25")
daysToKeepStr('')
artifactDaysToKeepStr('')
artifactNumToKeepStr('')
}
}
}
// We don't want to be building the same project more than once
// This is to prevent one project hogging resources
// And also has a practical component as otherwise an older build could finish afterwards and upload old build results
disableConcurrentBuilds()
}
triggers {
// We want to enable SCM Polling so that git.kde.org can tell Jenkins to look for changes
// At the same time, we don't want Jenkins scanning for changes, so set the Polling specification to be empty so nothing gets scheduled
pollSCM {
scmpoll_spec('')
ignorePostCommitHooks(false)
}
}
// This is where the Pipeline script actually happens :)
definition {
cps {
script( pipelineScript )
sandbox()
}
}
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment