Commit ecf7da98 authored by Ben Cooksley's avatar Ben Cooksley

Mostly built out now. Still needs prepare-dependencies.py to be finshed, then...

Mostly built out now. Still needs prepare-dependencies.py to be finshed, then we need to add the Docker template(s) and the DSL
parents
client:
- downloadBaseUrl: "https://build-artifacts.kde.org/production/"
- uploadHostname: "build-artifacts.kde.org"
- uploadUsername: "productionclient"
- uploadDirectory: "/home/productionclient/uploads/"
- uploadTools: "/home/productionclient/ci-tooling/"
server:
- archiveDirectory: "/srv/production/"
cacheLocation:
- Windows: "D:\\Archives\\Production\\"
- XenialQt5.7: "/srv/archives/production/"
# The keys in this file match the Track Name within the Product
# Product and Repository are taken care of by task-specs/$PRODUCT/$REPOSITORY.yaml which just leaves Track...
# The track name for this exemplar is "Devel"
Devel:
# in-source-build: Controls whether builds will be performed in the root of the source tree instead of a sub-directory named build/ (an out of source build)
# Defaults to False (do an out of source build)
in-source-build: True
# detect-build-system: Controls whether the system will attempt to auto-detect the build system and configure it appropriately
# Defaults to True. Only CMake and Autotools are supported
detect-build-system: True
# cmake-options: Specify some additional CMake options to pass to CMake when configuring a CMake build
# Options specified here will not be able to override any set for all builds (either in general or on the platform which the build is being performed on)
# Has no default value
cmake-options: "-DEXAMPLE=Change"
# autotools-options: Specify additional options to be passed to ./configure when configuring a Autotools build system
# Options specified here will not be able to override any set for all builds (either in general or on the platform which the build is being performed on)
# Has no default value
autotools-options: '--enable-maintainer-mode'
# configure-commands: Specify configure commands which should be run to configure the project for compilation and installation
# The commands will be executed relative to the build directory (as controlled by in-source-build)
# The values specified here will be run after any auto-detected build system (CMake, Autotools) has been configured unless that has been disabled by setting detect-build-system to False
# A list of commands must be supplied, not a single value
# The following keys will be substituted into the commands when running them:
# {sources} : Directory which contains the sources for this project
# {installationPrefix}: The prefix where the project should be installed on running "make install"
# {maximumLoad} : The maximum system load value which should be reached during the build
# {cpuCount} : The number of CPU cores available for performing this build
configure-commands:
- "make prepare-to-build"
# do-appstream-check: Controls whether the Appstream validation tool is run over the installed files after the installation phase is completed
# Appstream validation will only be done on Linux, regardless of the value specified here
# Defaults to True
do-appstream-check: True
# run-tests: Controls whether the CI system will attempt to run tests for this build
# Tests will only be executed for CMake based projects, regardless of the value specified here
# Defaults to True
run-tests: True
# per-test-timeout: Controls the amount of time CTest is told to allow each test to take when running. Specified in seconds
# Defaults to 600 (5 minutes)
per-test-timeout: 1200
# setup-x-environment: Governs whether an instance of Xvfb will be launched to provide a graphical environment for X Dependent tests.
# X will never be setup on Windows or OSX systems, regardless of the value specified here
# Defaults to True
setup-x-environment: True
# launch-dbus-session: Governs whether a D-Bus Session Bus will be launched to provide a session bus for D-Bus dependent tests.
# D-Bus will never be launched on Windows or OSX systems, regardless of the value specified here
# Defaults to True
launch-dbus-session: True
# run-cppcheck: Governs whether the code quality tool Cppcheck is run over the source code
# Cppcheck validation will only be done on Linux, regardless of the value specified here
# Defaults to True
run-cppcheck: False
# cppcheck-arguments: Specifies additional arguments which should be passed to cppcheck when executing it
# Cannot be used to override any arguments which are specified in general or on the platform this build is being performed on
# Has no default value and is not required
cppcheck-arguments: "-DSOMETHING_DEFINED"
# extract-lcov-results: Determines whether gcovr is run to extract code coverage metadata
# Defaults to True
extract-lcov-results: True
# lcov-extractor-arguments: Specifies additional arguments which should be passed to the lcov results extractor (gcovr) when running it
# Has no default value and is not required
lcov-extractor-arguments: '-e "autotests/.*"'
#!/usr/bin/python3
import os
import sys
import tarfile
import tempfile
import argparse
from helperslib import BuildSpecs, BuildSystem, CommonUtils, EnvironmentHandler, Packages
# Parse the command line arguments we've been given
parser = argparse.ArgumentParser(description='Utility to install a project, diverting the installation for later capture if requested.')
parser.add_argument('--project', type=str, required=True)
parser.add_argument('--branchGroup', type=str, required=True)
parser.add_argument('--platform', type=str, required=True)
parser.add_argument('--environment', type=str, required=True)
parser.add_argument('--installedTo', type=str, required=True)
parser.add_argument('--divertedTo', type=str, required=True)
arguments = parser.parse_args()
# Create a temporary file, then open the file as a tar archive for writing, using xz compression to save space
# We don't want it to be deleted as storePackage will move the archive into it's cache
archiveFile = tempfile.NamedTemporaryFile(delete=False)
archive = tarfile.open( fileobj=archiveFile, mode='w:xz' )
# Now determine the path we should be archiving
# Because we could potentially be running on Windows we have to ensure our second path has been converted to a suitable form
# This conversion is necessary as os.path.join can't handle the presence of drive letters in paths other than the first argument
pathToArchive = os.path.join( arguments.divertedTo, CommonUtils.makePathRelative(arguments.installedTo) )
# Add all the files which need to be in the archive into the archive
# We want to capture the tree as it is inside the install directory and don't want any trailing slashes in the archive as this isn't standards compliant
# Therefore we list everything in the install directory and add each of those to the archive, rather than adding the whole install directory
filesToInclude = os.listdir( pathToArchive )
for filename in filesToInclude:
fullPath = os.path.join(pathToArchive, filename)
archive.add( fullPath, arcname=filename, recursive=True )
# Close the archive, which will write it out to disk, finishing what we need to do here
# This is also necessary on Windows to allow for storePackage to move it to it's final home
archive.close()
archiveFile.close()
# Initialize the archive manager
ourArchive = Packages.Archive( arguments.environment, arguments.platform )
# Determine which SCM revision we are storing
# This will be embedded into the package metadata, and later used for helpful output by prepare-dependencies.py
# GIT_COMMIT is set by Jenkins Git plugin, so we can rely on that for most of our builds
scmRevision = ''
if os.getenv('GIT_COMMIT') != '':
scmRevision = os.getenv('GIT_COMMIT')
# Determine the package name...
package = Packages.nameForProject( arguments.project, arguments.branchGroup )
# Add the package to the archive
ourArchive.storePackage( package, archiveFile.name, scmRevision )
# Now open the archive - so we can extract it's contents over the install prefix
# This is so later tests can rely on the project having been installed
# Because storePackage will have moved our temporary file to the cache we have to ask the package archive where the file now lives
filename, metadata = ourArchive.retrievePackage( package )
archive = tarfile.open( file=filename, mode='r:xz' )
archive.extractall( path=arguments.installedTo )
# All done!
sys.exit(0)
#!/usr/bin/python3
import sys
import argparse
from helperslib import BuildSpecs, BuildSystem, CommonUtils, EnvironmentHandler
# Parse the command line arguments we've been given
parser = argparse.ArgumentParser(description='Utility to determine whether an application is appstream compliant.')
parser.add_argument('--project', type=str, required=True)
parser.add_argument('--branchGroup', type=str, required=True)
parser.add_argument('--platform', type=str, required=True)
parser.add_argument('--usingInstall', type=str, required=True)
arguments = parser.parse_args()
# Load our build specification, which governs how we handle this build
buildSpecification = new BuildSpecs.Loader( project=arguments.project, branchGroup=arguments.branchGroup )
# Determine the environment we need to provide for the compilation process
buildEnvironment = EnvironmentHandler.generateFor( installPrefix=arguments.usingInstall )
# If we aren't running on Linux, or if Appstream Check's have been disabled for this build, bail
# Appstream isn't relevant outside Linux, so even if the tooling was available (probably not) there is little point running the checks there
if sys.platform != "linux2" or not buildSpecification['do-appstream-check']:
# Bail!
sys.exit(0)
# Determine the command we need to run
commandToRun = "appstreamcli validate-tree --pedantic {0}"
commandToRun = commandToRun.format( arguments.usingInstall )
# Now run it!
try:
subprocess.check_call( commandToRun, stdout=sys.stdout, stderr=sys.stderr, shell=True, env=buildEnvironment )
except Exception:
sys.exit(1)
# The project passed appstream validation successfully
sys.exit(0)
\ No newline at end of file
#!/usr/bin/python3
import sys
import argparse
from helperslib import BuildSpecs, BuildSystem, CommonUtils, EnvironmentHandler
# Parse the command line arguments we've been given
parser = argparse.ArgumentParser(description='Utility to compile a project.')
parser.add_argument('--project', type=str, required=True)
parser.add_argument('--branchGroup', type=str, required=True)
parser.add_argument('--platform', type=str, required=True)
parser.add_argument('--usingInstall', type=str, required=True)
arguments = parser.parse_args()
# Load our build specification, which governs how we handle this build
buildSpecification = new BuildSpecs.Loader( project=arguments.project, branchGroup=arguments.branchGroup )
# Determine the environment we need to provide for the compilation process
buildEnvironment = EnvironmentHandler.generateFor( installPrefix=arguments.usingInstall )
# Determine where our source code is checked out to and where we will be building it
# We'll assume that the directory we're running from is where the sources are located
sourcesLocation = os.getcwd()
buildLocation = CommonUtils.buildDirectoryForSources( sources=sourcesLocation, inSourceBuild=buildSpecification['in-source-build'] )
# Compile the project
try:
commandToRun = BuildSystem.substituteCommandTokens( "make -j {cpuCount} -l {maximumLoad}" )
subprocess.check_call( commandToRun, stdout=sys.stdout, stderr=sys.stderr, shell=True, cwd=buildLocation, env=buildEnvironment )
except Exception:
sys.exit(1)
# The project was compiled successfully
sys.exit(0)
\ No newline at end of file
#!/usr/bin/python3
import sys
import argparse
from helperslib import BuildSpecs, BuildSystem, CommonUtils, EnvironmentHandler
# Parse the command line arguments we've been given
parser = argparse.ArgumentParser(description='Utility to configure a project to be built.')
parser.add_argument('--project', type=str, required=True)
parser.add_argument('--branchGroup', type=str, required=True)
parser.add_argument('--platform', type=str, required=True)
parser.add_argument('--installTo', type=str, required=True)
arguments = parser.parse_args()
# Load our build specification, which governs how we handle this build
buildSpecification = new BuildSpecs.Loader( project=arguments.project, branchGroup=arguments.branchGroup )
# Determine the environment we need to provide to any configure system (like CMake or Autotools)
buildEnvironment = EnvironmentHandler.generateFor( installPrefix=arguments.installTo )
# Determine where our source code is checked out to and where we will be building it
# We'll assume that the directory we're running from is where the sources are located
sourcesLocation = os.getcwd()
buildLocation = CommonUtils.buildDirectoryForSources( sources=sourcesLocation, inSourceBuild=buildSpecification['in-source-build'] )
# Make sure the build location exists
if not os.path.isdir(buildLocation):
os.mkdir(buildLocation)
# Detect the appropriate build system to use
# We always do this even if the build specification says not to in order to keep the code cleaner
buildSystemHandler = BuildSystem.detect( sourcesLocation )
# Are we allowed to configure the build system using the handler?
if buildSpecification['detect-build-system'] and buildSystemHandler is not None:
# Ask the build system handler to configure the project
if not buildSystemHandler.configure( buildLocation, sourcesLocation, arguments.installTo, buildSpecification, buildEnvironment ):
# Looks like the handler failed to configure the project - let's bail!
sys.exit(1)
# Now we run any additional configure commands specified in the build specification
for configureCommand in buildSpecification['configure-commands']:
# Now run it
try:
commandToRun = BuildSystem.substituteCommandTokens( configureCommand, sources=sourcesLocation, installPrefix=arguments.installTo )
subprocess.check_call( commandToRun, stdout=sys.stdout, stderr=sys.stderr, shell=True, cwd=buildLocation, env=buildEnvironment )
except Exception:
sys.exit(1)
# All done!
sys.exit(0)
#!/usr/bin/python3
import sys
import argparse
from helperslib import BuildSpecs, BuildSystem, CommonUtils, EnvironmentHandler, Packages
# Parse the command line arguments we've been given
parser = argparse.ArgumentParser(description='Utility to extract CMake metadata from a build system.')
parser.add_argument('--project', type=str, required=True)
parser.add_argument('--branchGroup', type=str, required=True)
parser.add_argument('--usingInstall', type=str, required=True)
arguments = parser.parse_args()
# Load our build specification, which governs how we handle this build
buildSpecification = new BuildSpecs.Loader( project=arguments.project, branchGroup=arguments.branchGroup )
# Determine the environment we need to provide for the compilation process
buildEnvironment = EnvironmentHandler.generateFor( installPrefix=arguments.usingInstall )
# Determine where our source code is checked out to and where we will be building it
# We'll assume that the directory we're running from is where the sources are located
sourcesLocation = os.getcwd()
buildLocation = CommonUtils.buildDirectoryForSources( sources=sourcesLocation, inSourceBuild=buildSpecification['in-source-build'] )
# Determine the name we'll use to store the results, as well as the local and remote paths it will be stored at
dependencyFilename = Packages.nameForProject(arguments.project, arguments.branchGroup) + '.json'
localDependencyFilename = os.path.join( sourcesLocation, dependencyFilename )
remoteDependencyFilename = os.path.join( '/srv/dependency-metadata/', dependencyFilename )
# Build the command to run
commandToRun = "python3 {0}/kde-dev-scripts/cmake-dependencies.py"
commandToRun = commandToRun.format( CommonUtils.scriptsBaseDirectory() )
# Time to run the command - open our dependency metadata file...
with open(localDependencyFilename, 'w') as localDependencyFile:
# Now run the command
# We redirect stdout and stderr into the file because this program prints it's results to the console (stdout/stderr)
process = subprocess.Popen( commandToRun, stdout=localDependencyFile, stderr=localDependencyFile, env=buildEnvironment, cwd=buildLocation )
process.wait()
# Now we transfer it to it's final home - establish the ssh connection
privateKeyFile = os.path.join( os.path.expanduser('~'), 'cmake-dependencies.key')
transport = CommonUtils.establishSSHConnection( 'nellie.kde.org', 'dependencymetadata', privateKeyFile )
# Bring up a SFTP session
sftp = paramiko.SFTPClient.from_transport(transport)
# Transfer it there
sftp.put( localDependencyFilename, remoteDependencyFilename )
# All done, cleanup
sftp.close()
transport.close()
#!/usr/bin/python3
import sys
import argparse
from helperslib import BuildSpecs, BuildSystem, CommonUtils, EnvironmentHandler, Packages
# Parse the command line arguments we've been given
parser = argparse.ArgumentParser(description='Utility to extract CMake Dependency Metadata from a build system. Provided to assist packagers with their packages.')
parser.add_argument('--project', type=str, required=True)
parser.add_argument('--branchGroup', type=str, required=True)
parser.add_argument('--usingInstall', type=str, required=True)
arguments = parser.parse_args()
# Load our build specification, which governs how we handle this build
buildSpecification = new BuildSpecs.Loader( project=arguments.project, branchGroup=arguments.branchGroup )
# Determine the environment we need to provide for the compilation process
buildEnvironment = EnvironmentHandler.generateFor( installPrefix=arguments.usingInstall )
# Determine where our source code is checked out to and where we will be building it
# We'll assume that the directory we're running from is where the sources are located
sourcesLocation = os.getcwd()
buildLocation = CommonUtils.buildDirectoryForSources( sources=sourcesLocation, inSourceBuild=buildSpecification['in-source-build'] )
# Determine the name we'll use to store the results, as well as the local and remote paths it will be stored at
# We use the package name as it's pretty much guaranteed to be unique among all the builds we will be running
# As the CMake dependency metadata is only used by Linux packagers, we don't need to take platform into account here
dependencyFilename = Packages.nameForProject(arguments.project, arguments.branchGroup) + '.json'
localDependencyFilename = os.path.join( sourcesLocation, dependencyFilename )
remoteDependencyFilename = os.path.join( '/srv/dependency-metadata/', dependencyFilename )
# Build the command to run
commandToRun = "python3 {0}/kde-dev-scripts/cmake-dependencies.py"
commandToRun = commandToRun.format( CommonUtils.scriptsBaseDirectory() )
# Time to run the command - open our dependency metadata file...
with open(localDependencyFilename, 'w') as localDependencyFile:
# Now run the command
# We redirect stdout and stderr into the file because this program prints it's results to the console (stdout/stderr)
process = subprocess.Popen( commandToRun, stdout=localDependencyFile, stderr=localDependencyFile, env=buildEnvironment, cwd=buildLocation )
process.wait()
# Now we transfer it to it's final home - establish the ssh connection
privateKeyFile = os.path.join( os.path.expanduser('~'), 'cmake-dependencies.key')
transport = CommonUtils.establishSSHConnection( 'nellie.kde.org', 'dependencymetadata', privateKeyFile )
# Bring up a SFTP session
sftp = paramiko.SFTPClient.from_transport(transport)
# Transfer it there
sftp.put( localDependencyFilename, remoteDependencyFilename )
# All done, cleanup
sftp.close()
transport.close()
#!/usr/bin/python3
import sys
import argparse
from helperslib import BuildSpecs, BuildSystem, CommonUtils, EnvironmentHandler, Packages
# Parse the command line arguments we've been given
parser = argparse.ArgumentParser(description='Utility to extract CMake metadata from a build system.')
parser.add_argument('--project', type=str, required=True)
parser.add_argument('--branchGroup', type=str, required=True)
parser.add_argument('--usingInstall', type=str, required=True)
arguments = parser.parse_args()
# Load our build specification, which governs how we handle this build
buildSpecification = new BuildSpecs.Loader( project=arguments.project, branchGroup=arguments.branchGroup )
# Determine the environment we need to provide for the compilation process
buildEnvironment = EnvironmentHandler.generateFor( installPrefix=arguments.usingInstall )
# Determine where our source code is checked out to and where we will be building it
# We'll assume that the directory we're running from is where the sources are located
sourcesLocation = os.getcwd()
buildLocation = CommonUtils.buildDirectoryForSources( sources=sourcesLocation, inSourceBuild=buildSpecification['in-source-build'] )
### Part 1: depdiagram-prepare for API documentation @ api.kde.org
# First determine where we the data will be stored temporarily
outputDirectory = os.path.join( sourcesLocation, 'dotdata' )
# Build up the command to run
commandToRun = "python {0}/kapidox/src/depdiagram-prepare -s {1} {2}"
commandToRun = commandToRun.format( CommonUtils.scriptsBaseDirectory(), sourcesLocation, outputDirectory )
# Run the command, which will generate a pile of *.dot files for us
process = subprocess.Popen( commandToRun, stdout=sys.stdout, stderr=sys.stderr, shell=True, env=buildEnvironment )
# Connect to the server to upload the files
# Our base storage path, for future reference
remoteStoragePath = os.path.join('/home/api/depdiagram-output/', Packages.nameForProject(arguments.project, arguments.branchGroup))
# Connect to api.kde.org
privateKeyFile = os.path.join( os.path.expanduser('~'), 'api-access.key')
transport = CommonUtils.establishSSHConnection( 'zivo.kde.org', 'api', privateKeyFile )
# Bring up a SFTP session
sftp = paramiko.SFTPClient.from_transport(transport)
# Does our storage path exist?
if not CommonUtils.sftpFileExists(sftp, apiStoragePath):
# Create it then!
sftp.mkdir(apiStoragePath)
# Make sure it has been cleaned out of anything which is in there
fileListing = sftp.listdir(apiStoragePath)
# Now remove them all
for fileToRemove in fileListing:
pathToRemove = os.path.join(apiStoragePath, fileToRemove)
sftp.remove(pathToRemove)
# Upload the files we've just generated
for fileToUpload in os.listdir(outputDirectory):
# Determine the full local and remote paths
fullLocalPath = os.path.join(outputDirectory, fileToUpload)
fullRemotePath = os.path.join(remoteStoragePath, fileToUpload)
# Upload it!
sftp.put(fullLocalPath, fullRemotePath)
# All done now, close the remote server connection
sftp.close()
transport.close()
### Part 2: depdiagram-prepare for API documentation @ api.kde.org
#!/usr/bin/python3
import os
import sys
import argparse
import paramiko
from helperslib import BuildSpecs, BuildSystem, CommonUtils, EnvironmentHandler, Packages
# Parse the command line arguments we've been given
parser = argparse.ArgumentParser(description='Utility to generate dependency diagram information for use by api.kde.org.')
parser.add_argument('--project', type=str, required=True)
parser.add_argument('--branchGroup', type=str, required=True)
parser.add_argument('--usingInstall', type=str, required=True)
arguments = parser.parse_args()
# Determine the environment we need to provide for the compilation process
buildEnvironment = EnvironmentHandler.generateFor( installPrefix=arguments.usingInstall )
# Determine where our source code is checked out to and where we will be building it
# We'll assume that the directory we're running from is where the sources are located
sourcesLocation = os.getcwd()
# First determine where we the data will be stored, both temporarily and on the server
# As the API documentation can only be generated once, and we have the greatest capacity available for Linux we will use Linux dependency diagrams on api.kde.org.
outputDirectory = os.path.join( sourcesLocation, 'dotdata' )
remoteStoragePath = os.path.join('/home/api/depdiagram-output/', Packages.nameForProject(arguments.project, arguments.branchGroup))
# Build up the command to run
commandToRun = "python {0}/kapidox/src/depdiagram-prepare -s {1} {2}"
commandToRun = commandToRun.format( CommonUtils.scriptsBaseDirectory(), sourcesLocation, outputDirectory )
# Run the command, which will generate a pile of *.dot files for us
process = subprocess.Popen( commandToRun, stdout=sys.stdout, stderr=sys.stderr, shell=True, env=buildEnvironment )
# Connect to the server to upload the files
privateKeyFile = os.path.join( os.path.expanduser('~'), 'api-access.key')
transport = CommonUtils.establishSSHConnection( 'zivo.kde.org', 'api', privateKeyFile )
# Bring up a SFTP session
sftp = paramiko.SFTPClient.from_transport(transport)
# Does our storage path exist?
if not CommonUtils.sftpFileExists(sftp, apiStoragePath):
# Create it then!
sftp.mkdir(apiStoragePath)
# Make sure it has been cleaned out of anything which is in there
# This is necessary to ensure any dependency or component which has been dropped doesn't hang around unnecessarily
fileListing = sftp.listdir(apiStoragePath)
for fileToRemove in fileListing:
pathToRemove = os.path.join(apiStoragePath, fileToRemove)
sftp.remove(pathToRemove)
# Upload the files we've just generated
for fileToUpload in os.listdir(outputDirectory):
# Determine the full local and remote paths
fullLocalPath = os.path.join(outputDirectory, fileToUpload)
fullRemotePath = os.path.join(remoteStoragePath, fileToUpload)
# Upload it!
sftp.put(fullLocalPath, fullRemotePath)
# All done now, close the remote server connection
sftp.close()
transport.close()
# And bow out gracefully
sys.exit(0)
import os
import yaml
from helperslib import CommonUtils
# Class to make it convenient to read configs for working with build specs
class Loader(object):
# Loads the Project's configuration, should it have one and merges it with the default configuration
def __init__(self, project, branchGroup):
# Start by copying the default settings
self.mergedConfig = self.defaultOptions()
# Where should our configuration file be?
# It should be at /task-specs/<project>.yaml
# As the configuration location already contains /build-specs/ we just have to add on <project>.yaml
configFileLocation = os.path.join( CommonUtils.scriptsBaseDirectory(), 'build-specs', project + '.yaml' )
# Does the file exist?
# If it does not, then we don't need to do anything else
if not os.path.isfile(configFileLocation):
return
# Load the file now
with open(configFileLocation, 'r') as configFile:
# Parse the YAML file
projectConfig = yaml.load(configFile)
# Does it specify something for this branch group?
if branchGroup in projectConfig:
# The merge it in
self.mergedConfig.update( projectConfig[branchGroup] )
# Allow fetching our config
def __getitem__(self, name):
# Do we know about this attribute?
if name in self.mergedConfig:
# Then return it
return self.mergedConfig[name]
# We don't know about it
raise KeyError
# Specifies our defaults
def defaultOptions(self):
return {
'in-source-build': False,
'detect-build-system': True,
'cmake-options': '',
'autotools-options': '',
'configure-commands': {},
'do-appstream-check': True,
'run-tests': True,
'per-test-timeout': 600,
'setup-x-environment': True,
'launch-dbus-session': True
'run-cppcheck': True,
'cppcheck-arguments': '',
'extract-lcov-results': True,
'lcov-extractor-arguments': ''
}
import os
import sys
import subprocess
import multiprocessing
from lxml import etree
from helperslib import CommonUtils
# Detect whether this build system is able to build the project whose source code is at the given location
def detect( pathToSources ):
# These are our supported build systems
supportedSystems = [
CMake,
AutoTools
]
# Go over each one in turn
for bSystem in supportedSystems:
# Does this one support building it?
if bSystem.canConfigure(pathToSources):
# Return it for use
return bSystem
# Otherwise we don't support it
return None
def substituteCommandTokens( command, sources = '', installPrefix = '' ):
# Perform the substitution
return command.format(
sources = sourcesLocation,
installationPrefix = installationPrefix,
maximumLoad = multiprocessing.cpu_count() * 1.5,
cpuCount = multiprocessing.cpu_count()
)
# Base Class for build systems, providing logic to detect the system in use and configure a build using it
class CMake(object):
# Detect whether this build system is able to build the project whose source code is at the given location
@staticmethod
def canConfigure( pathToSources ):
# Do we have a CMakeLists.txt file?
# If we do, it uses a CMake build system
if os.path.isfile( os.path.join(pathToSources, 'CMakeLists.txt') ):
return True
# Otherwise this isn't a CMake build system
return False
# Configure the build system in the specified directory, using sources at the given location, to be installed at the given location
# Ensure that any appropriate directives in the build specification are honoured
@staticmethod
def configure( buildDirectory, pathToSources, installPrefix, buildSpecification, buildEnvironment ):
# Begin building up our configure command
cmakeCommand = ['cmake']
# We want a Debug build to allow for good backtraces
cmakeCommand.append('-DCMAKE_BUILD_TYPE=Debug')
# Enable ASAN for our builds
cmakeCommand.append("-DECM_ENABLE_SANITIZERS='address'")
# We want tests to be built!
cmakeCommand.append('-DBUILD_TESTING=ON')
# And we want to be installed in a given directory
cmakeCommand.append('-DCMAKE_INSTALL_PREFIX=' + installPrefix)
# Are we on Windows?
if sys.platform == 'win32':
# We want a NMake based build, rather than the default MSBuild
cmakeCommand.append('-G "NMake Makefiles JOM"')
# Finally we drag in options specified by the build specification
cmakeCommand.append( buildSpecification['cmake-options'] )
# Lucky last, we add the path to our sources
cmakeCommand.append( pathToSources )
# Now glue it all together and substitute any tokens we need to swap out
commandToRun = ' '.join( cmakeCommand )
commandToRun = substituteCommandTokens( commandToRun, sources = pathToSources, installPrefix = installPrefix )
# Run the command
try:
subprocess.check_call( commandToRun, stdout=sys.stdout, stderr=sys.stderr, shell=True, cwd=buildDirectory, env=buildEnvironment )
except Exception:
return False
# Did we succeed?
return True
@staticmethod
def convertCTestResultsToJUnit( buildDirectory ):
# Where is the base prefix for all test data for this project located?
testDataDirectory = os.path.join( buildDirectory, 'Testing' )
# Determine where we will find the test run data for the latest run
filename = os.path.join( testDataDirectory, 'TAG' )
with open(filename, 'r') as tagFile:
testDirectoryName = tagFile.readline().strip()
# Open the test result XML and load it
filename = os.path.join( testDataDirectory, testDirectoryName, 'Test.xml' )
with open(filename , 'r') as xmlFile:
xmlDocument = etree.parse( xmlFile )
# Load the XSLT file
filename = os.path.join( CommonUtils.scriptsBaseDirectory(), 'templates', 'ctesttojunit.xsl' )
with open(filename, 'r') as xslFile:
xslContent = xslFile.read()
xsltRoot = etree.XML(xslContent)
# Transform the CTest XML into JUnit XML
transform = etree.XSLT(xsltRoot)
return transform(xmlDocument)
# Base Class for build systems, providing logic to detect the system in use and configure a build using it
class AutoTools(object):
# Detect whether this build system is able to build the project whose source code is at the given location
@staticmethod
def canConfigure( pathToSources ):