Commit 2e8b4f50 authored by Ben Cooksley's avatar Ben Cooksley

Implement new DSL which will create the Build Dependency Jobs.

Also, fix a bug which meant package names weren't being namespaced by branch group
parent 69b4f21f
kde-build-metadata
repo-metadata
gathered-jobs.json
gathered-structure.json
helpers/helperslib/__pycache__/
......@@ -22,7 +22,7 @@ knownJobs.each {
|${pipelineTemplate}""".stripMargin()
// Actually create the job now
// Actually create the job now
pipelineJob( jobName ) {
definition {
cps {
......
// Read the contents of the gathered-structure.json file a step created for us previously
def structureToParse = readFileFromWorkspace('gathered-structure.json')
def knownStructure = new groovy.json.JsonSlurper().parseText( jobsToParse )
// Iterate over all of the known jobs and create the necessary platform files
knownStructure.combinations.each {
// Create our job name
def jobName = "${it.product} Dependency Build ${it.branchGroup} ${it.platform}"
// Read in the necessary Pipeline template
def pipelineTemplate = readFileFromWorkspace("pipeline-templates/dependency-build/${it.platform}.template")
// Now we can construct our Pipeline script
def pipelineScript = """
|def productName = "${it.product}"
|def branchGroup = "${it.branchGroup}"
|def currentPlatform = "${it.platform}"
|def ciEnvironment = "${knownStructure.environment}"
|${pipelineTemplate}""".stripMargin()
// Actually create the job now
pipelineJob( jobName ) {
definition {
cps {
script( pipelineScript )
sandbox()
}
}
}
}
......@@ -37,6 +37,15 @@ branchResolver.loadProjectsToBranchesData( lmsLocation )
# 6) The description for the resulting job
jobsGathered = []
# We also output a second set of data, which contains the structure (Products and the Platforms those are built on)
# This is used to produce some of the supporting / infrastructure jobs (for building all the dependencies of a Product for instance)
# As well as setup the views in Jenkins
jobsStructure = {
'products': list( productHandler.knownProducts() ),
'environment': arguments.environment,
'combinations': []
}
# With everything now ready, we determine what products we have and start going from there
for product in productHandler.knownProducts():
# Determine which projects are built on this platform
......@@ -46,6 +55,19 @@ for product in productHandler.knownProducts():
# As well as the platforms we are interested in
platformsToBuild = productHandler.platformsFor( product )
# Add each platform and branch group combo as a combination for this product
for platform in platformsToBuild:
# Make sure we go over branch groups as well
for branchGroup in relevantBranchGroups:
# Produce the entry
combinationEntry = {
'product': product,
'platform': platform,
'branchGroup': branchGroup
}
# Store it
jobsStructure['combinations'].append( combinationEntry )
# Determine what jobs this project needs
for project in builtProjects:
# Is it a possible item to build?
......@@ -84,5 +106,11 @@ filePath = os.path.join( CommonUtils.scriptsBaseDirectory(), 'gathered-jobs.json
with open(filePath, 'w') as jobsFile:
json.dump( jobsGathered, jobsFile )
# Also output the structure data we've gathered in JSON to disk
# This will subsequently be read in by a Jenkins DSL script
filePath = os.path.join( CommonUtils.scriptsBaseDirectory(), 'gathered-structure.json' )
with open(filePath, 'w') as structureFile:
json.dump( jobsStructure, structureFile )
# All done!
sys.exit(0)
......@@ -9,7 +9,7 @@ import urllib, urllib.request
from helperslib import CommonUtils
def nameForProject( product, project, branchGroup ):
return "{0}-{1}".format( product, project, branchGroup )
return "{0}-{1}-{2}".format( product, project, branchGroup )
class Archive(object):
# Sets up an archive for use, to allow for retrieving and uploading new resources to the archive
......
// Set some properties of the job up
// We use this to ensure we don't do more than one build at once and don't keep logs forever
properties([
buildDiscarder( logRotator(numToKeepStr: '25') ),
disableConcurrentBuilds(),
pipelineTriggers([])
])
// Request a node to be allocated to us
node( currentPlatform ) {
// We want Timestamps on everything
timestamps {
// First Thing: Checkout Sources
stage('Checkout Sources') {
// Our CI scripts
checkout changelog: false, poll: false, scm: [
$class: 'GitSCM',
branches: [[name: 'master']],
extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/']],
userRemoteConfigs: [[url: 'https://anongit.kde.org/scratch/bcooksley/ci-tooling']]
]
// Projects metadata and next generation dependency metadata
checkout changelog: false, poll: false, scm: [
$class: 'GitSCM',
branches: [[name: 'master']],
extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/repo-metadata/']],
userRemoteConfigs: [[url: 'https://anongit.kde.org/sysadmin/repo-metadata']]
]
// Dependency Metadata
checkout changelog: false, poll: false, scm: [
$class: 'GitSCM',
branches: [[name: 'master']],
extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/kde-build-metadata/']],
userRemoteConfigs: [[url: 'https://anongit.kde.org/kde-build-metadata']]
]
}
// Now we run the Product Dependency Build Process
stage('Build Product Dependencies') {
// This script will do the following:
// 1) Determine what is in this Product
// 2) Determine what those repositories depend on
// 3) Determine what dependencies are outside of this Product
// 4) Sort those dependencies into an appropriate order to build them
// 5) Checkout, Configure, Compile, Install and Capture the Installation each of those dependencies in turn
// We can't do this as Pipeline steps unfortunately (at least not easily)
// Tests and Other Quality Tests won't be run during this process
// The results of this process are only intended to be used as part of the base of this Product, so don't need testing
sh "python3 -u ci-tooling/helpers/build-product-dependencies.py --product ${productName} --branchGroup ${branchGroup} --environment ${ciEnvironment} --platform ${currentPlatform} --installTo '$WORKSPACE/install-prefix/'"
}
}
}
// Set some properties of the job up
// We use this to ensure we don't do more than one build at once and don't keep logs forever
properties([
buildDiscarder( logRotator(numToKeepStr: '25') ),
disableConcurrentBuilds(),
pipelineTriggers([])
])
// Request a node to be allocated to us
node( currentPlatform ) {
// We want Timestamps on everything
timestamps {
// First Thing: Checkout Sources
stage('Checkout Sources') {
// Make sure we have a clean slate to begin with
deleteDir()
// Our CI scripts
checkout changelog: false, poll: false, scm: [
$class: 'GitSCM',
branches: [[name: 'master']],
extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/']],
userRemoteConfigs: [[url: 'https://anongit.kde.org/scratch/bcooksley/ci-tooling']]
]
// Projects metadata and next generation dependency metadata
checkout changelog: false, poll: false, scm: [
$class: 'GitSCM',
branches: [[name: 'master']],
extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/repo-metadata/']],
userRemoteConfigs: [[url: 'https://anongit.kde.org/sysadmin/repo-metadata']]
]
// Dependency Metadata
checkout changelog: false, poll: false, scm: [
$class: 'GitSCM',
branches: [[name: 'master']],
extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/kde-build-metadata/']],
userRemoteConfigs: [[url: 'https://anongit.kde.org/kde-build-metadata']]
]
}
// Now we can build the dependencies of this product
stage('Build Product Dependencies') {
// This script will do the following:
// 1) Determine what is in this Product
// 2) Determine what those repositories depend on
// 3) Determine what dependencies are outside of this Product
// 4) Sort those dependencies into an appropriate order to build them
// 5) Checkout, Configure, Compile, Install and Capture the Installation each of those dependencies in turn
// We can't do this as Pipeline steps unfortunately (at least not easily)
// Tests and Other Quality Tests won't be run during this process
// The results of this process are only intended to be used as part of the base of this Product, so don't need testing
bat """
call "C:/Program Files (x86)/Microsoft Visual Studio/2017/Enterprise/VC/Auxiliary/Build/vcvars64.bat"
set PATH=C:/Qt/5.7/msvc2015_64/bin;%PATH%
python -u ci-tooling/helpers/build-product-dependencies.py --product ${productName} --branchGroup ${branchGroup} --environment ${ciEnvironment} --platform ${currentPlatform} --installTo "%WORKSPACE%/install-prefix/"
"""
}
}
}
// Set some properties of the job up
// We use this to ensure we don't do more than one build at once and don't keep logs forever
properties([
buildDiscarder( logRotator(numToKeepStr: '25') ),
disableConcurrentBuilds(),
pipelineTriggers([])
])
// Request a node to be allocated to us
node( currentPlatform ) {
// We want Timestamps on everything
timestamps {
// First Thing: Checkout Sources
stage('Checkout Sources') {
// Our CI scripts
checkout changelog: false, poll: false, scm: [
$class: 'GitSCM',
branches: [[name: 'master']],
extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/']],
userRemoteConfigs: [[url: 'https://anongit.kde.org/scratch/bcooksley/ci-tooling']]
]
// Projects metadata and next generation dependency metadata
checkout changelog: false, poll: false, scm: [
$class: 'GitSCM',
branches: [[name: 'master']],
extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/repo-metadata/']],
userRemoteConfigs: [[url: 'https://anongit.kde.org/sysadmin/repo-metadata']]
]
// Dependency Metadata
checkout changelog: false, poll: false, scm: [
$class: 'GitSCM',
branches: [[name: 'master']],
extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/kde-build-metadata/']],
userRemoteConfigs: [[url: 'https://anongit.kde.org/kde-build-metadata']]
]
}
// Now we run the Product Dependency Build Process
stage('Build Product Dependencies') {
// This script will do the following:
// 1) Determine what is in this Product
// 2) Determine what those repositories depend on
// 3) Determine what dependencies are outside of this Product
// 4) Sort those dependencies into an appropriate order to build them
// 5) Checkout, Configure, Compile, Install and Capture the Installation each of those dependencies in turn
// We can't do this as Pipeline steps unfortunately (at least not easily)
// Tests and Other Quality Tests won't be run during this process
// The results of this process are only intended to be used as part of the base of this Product, so don't need testing
sh "python3 -u ci-tooling/helpers/build-product-dependencies.py --product ${productName} --branchGroup ${branchGroup} --environment ${ciEnvironment} --platform ${currentPlatform} --installTo '$HOME/install-prefix/'"
}
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment