Source code for mojo.libdist.DistributionBuilder

"""
Distribution Builder
====================
Contains the class 'Distribution Builder' which is used for creating the distribution file

With the setup method all output and temp directories and the distribution name are created,
it also calls modifySources, which customize the constant's in the trace suite header files.
The createDistribution method uses the createModuleBashScript and createMakeCommandLine methods
to create a BashScript which builds the trace suite in libdist version and executes it.
The created tar files are unpacked and tested by testDistribution.
After a successful test the tested distribution is packed in one tar file with the packTestedDistribution
method.

"""


import argparse
import glob
import os
import re
import shutil
import subprocess
import tarfile
import tempfile

from lxml import etree

from mojo.bricabrac.fileIO import Printer

from . import setPrecompileFlag
from . import libdistData

from ..bricabrac import fileIO
from ..jobManagement import jobManagementData as jobConsts
from ..jobManagement.jobObserver import JobEvent
from ..jobManagement.jobs.job import Job
from ..jobManagement.jobs.jobList import JobList
from ..jobManagement.management import resources
from ..pavayo.statusOutput import FailedJobsJsonOutput


[docs]class DistributionBuilder: """ Build a libdist distribution file for one build configuration specification. :param pathToSourceCodeTarFile: path to the source code tarfile :param configFilePath: path to configuration file :param nBuilds: number of parallel builds :param outputPath: directory in which the output file will be saved :param buildConfiguration: build configuration :param traceSuitePath: path to the trace suite :type pathToSourceCodeTarFile: string :type configFilePath: string :type nBuilds: int :type outputPath: string :type buildConfiguration: buildConfiguration object :type traceSuitePath: string """ def __init__(self, pathToSourceCodeTarFile, configFilePath, nProcsPerBuild, outputPath, buildConfiguration, traceSuitePath): """ Constructor :param pathToSourceCodeTarFile: path to the source code tarfile :param configFilePath: path to configuration file :param nBuilds: number of parallel builds :param outputPath: directory in which the output file will be saved :param buildConfiguration: build configuration :param traceSuitePath: path to the trace suite :type pathToSourceCodeTarFile: string :type configFilePath: string :type nBuilds: int :type outputPath: string :type buildConfiguration: buildConfiguration object :type traceSuitePath: string """ # path to the sourceCodeTarFile self.pathToSourceCodeTarFile = pathToSourceCodeTarFile # configFilePath if configFilePath: self.configFilePath = configFilePath else: self.configFilePath = os.path.abspath(os.path.join(__file__, "TEST/data/test.xml")) # number of parallel builds self.nProcsPerBuild = nProcsPerBuild # directory in which the output file will be saved self.outputPath = outputPath # build configuration stored in a BuildConfiguration object self.buildConfiguration = buildConfiguration # buildDirectory self.buildPath = os.path.join(self.outputPath, libdistData.BUILD_DIRECTORY) # temporary directory for the unpacked distribution # self.distributionPath = tempfile.mkdtemp() self.distributionPath = os.path.join(self.buildPath, self.buildConfiguration.name) # directory used by the test distribution self.testDistributionPath = os.path.join(self.distributionPath, libdistData.TEST_DISTRIBUTION_DIRECTORY) # logDirectory self.logPath = os.path.join(self.outputPath, libdistData.LOG_DIRECTORY)
[docs] def cleanUp(self): """Remove all temporary files and directories. """ fileIO.removeFile(self.distributionPath)
def _getXML(self): """create a xml which saves the information of the set which is build at the moment all informations are read out of the config xml the created xml is saved in the test distribution directory """ xmlName = os.path.basename(self.configFilePath) # save copy of old xml in an element tree with open(self.configFilePath, "r") as oldXmlFile: ElementTree = etree.parse(oldXmlFile) root = ElementTree.getroot() # find all sets in the element tree and delete all except the set which is built at the moment for child in root.findall(libdistData.BUILDCONFIGURATIONS_SET): eleName = child.find(libdistData.BUILDCONFIGURATIONS_SET_NAME) if eleName.text != self.buildConfiguration.name: root.remove(child) compilerModule = root.find(".//" + libdistData.BUILDCONFIGURATIONS_SET_MODULES_COMPILER) if compilerModule.text in libdistData.COMPILER: if compilerModule.text == libdistData.COMPILER_GNU and libdistData.COMPILER[libdistData.COMPILER_GNU] == '': compilerModule.text = self.getGccVersion() else: compilerModule.text = libdistData.COMPILER[compilerModule.text] # save the changed element tree as xml file in the test distribution path temp = tempfile.NamedTemporaryFile(delete=False) ElementTree.write(temp) return temp.name, xmlName
[docs] def makeJob(self, make_command, nProcs, workingDirectory=None, outputDir=None, outputFile=None, jobName="makeJob", envVars=None): """ creates a job which executes a make command :param make_command: make command :param nProcs: number of processors :param workingDirectory: path to working directory :param outputDir: path to output directory :param outputFile: path to output file :param jobName: name of the created job :param envVars: environment variables :type make_command: string :type nProcs: int :type workingDirectory: string :type outputDir: string :type outputFile: string :type jobName: string :type envVars: dictionary """ makeJob = Job(make_command, workingDirectory=workingDirectory, executeOnMaster=True, nProcs=nProcs, useMpirun=False, outputDir=outputDir, outputFile=outputFile, jobName=jobName, envVars=envVars) return makeJob
def createBuildScript(self, scriptName, makeCommand): with open(scriptName, "w") as fp: fp.write("#!/bin/bash\n{0}".format(makeCommand)) os.chmod(scriptName, 0o777)
[docs] def createLibraries(self, pathToTarfile, pathToTraceSuite): """ find all files which have to be added to the libraries. A tarfile with these libraries is created :param pathToTarfile: path to tarfile :param pathToTraceSuite: path to trace suite :type pathToTarfile: string :type pathToTraceSuite: string """ necessaryIncludeFiles = self._determineNecessaryIncludeFiles() with tarfile.open(pathToTarfile, "w:gz") as tar: for path in libdistData.ADD_TO_TARFILE: libPath = os.path.join(pathToTraceSuite, path) libPath = glob.glob(libPath) for lib in libPath: arcname = lib.replace(pathToTraceSuite, libdistData.TRACE_SUITE) tar.add(lib, arcname=arcname, filter=self._excludeCreatedPostLibs) for path in necessaryIncludeFiles: arcname = path.replace(pathToTraceSuite, libdistData.TRACE_SUITE) tar.add(path, arcname=arcname) xmlPath, xmlName = self._getXML() tar.add(xmlPath, arcname=os.path.join(libdistData.TRACE_SUITE, xmlName)) fileIO.removeFile(xmlPath)
def _excludeCreatedPostLibs(self, tarinfo): """ returns None if the tarinfo object is included in list of the post libraries which should be excluded else it returns the tarinfo object is needed to use the filter argument from Tarfile.add() :param tarinfo: tarinfo :type tarinfo: tarinfo object """ if tarinfo.name in libdistData.POST_LIBS_EXCLUDE: return None else: return tarinfo
[docs] def createDistributionFile(self, envVars=None): """ executes make command which is created by createMakeCommandLine creates a tarfile including trace_suite in libdist version :param envVars: environment variables :type envVars: dictionary :return: joblist for creating distribution file :rtype: joblist """ pathToTraceSuite = os.path.join(self.distributionPath, libdistData.TRACE_SUITE) make_command = self.createMakeCommandLine("libdist") pathToTarfile = os.path.join(self.distributionPath, libdistData.TRACE_SUITE, libdistData.TRACE_SUITE_LIBDIST_TAR) if not envVars: envVars = getEnvironmentVariablesFromModules(self.buildConfiguration.modules) logPath = os.path.join(self.logPath, self.buildConfiguration.name) if envVars: envVarFile = os.path.join(logPath, "makeJob_envVars.out") fileIO.ensurePath(envVarFile) with open(envVarFile, "w") as envFile: for key, value in envVars.items(): envFile.write(f"{key}:{value}\n") createDistributionFileJobList = JobList(name=libdistData.CREATE_DISTRIBUTION_FILE_JOBLIST.format(self.buildConfiguration.name)) name = libdistData.MAKE_JOB.format(self.buildConfiguration.name) makeJob = self.makeJob(make_command, self.nProcsPerBuild, workingDirectory=pathToTraceSuite, outputDir=logPath, outputFile=os.path.join(logPath, name + ".out"), jobName=name, envVars=envVars) createDistributionFileJobList.addJob(makeJob) name = libdistData.CREATE_BUILD_SCRIPT_JOB.format(self.buildConfiguration.name) finalize_libdist_command = self.createMakeCommandLine(nprocs=1) createBuildScriptJob = Job(self.createBuildScript, args=[os.path.join(pathToTraceSuite, libdistData.INSTALL_SCRIPT[0]), finalize_libdist_command], jobName=name, outputDir=logPath, outputFile=os.path.join(logPath, name + ".out")) createDistributionFileJobList.addJob(createBuildScriptJob) createLibrariesJob = Job(self.createLibraries, args=[pathToTarfile, pathToTraceSuite], jobName=name, outputDir=logPath, outputFile=os.path.join(logPath, name + ".out")) createDistributionFileJobList.addJob(createLibrariesJob, [makeJob.id, createBuildScriptJob.id]) return createDistributionFileJobList
def _createDistributionFileName(self): """ create distribution file name as follows <name>_<traceSuiteVersion>_<mpi>_<compiler>_<cgns>_<vtk>_<build configuration name>.tar.gz :return: distribution file name :rtype: string """ traceSuiteVersion = self.getTraceSuiteVersion() # get compiler version # if compiler set get its version, else gcc version if self.buildConfiguration.modules[libdistData.BUILDCONFIGURATIONS_SET_MODULES_COMPILER]: compiler = self.buildConfiguration.modules[libdistData.BUILDCONFIGURATIONS_SET_MODULES_COMPILER] compiler = compiler[compiler.rfind("/") + 1:] else: compiler = self.getGccVersion() # get the used mpi module mpi = self.buildConfiguration.modules[libdistData.BUILDCONFIGURATIONS_SET_MODULES_MPI] mpi = mpi[mpi.rfind("/") + 1:] # get the used cgns module cgns = self.buildConfiguration.modules[libdistData.BUILDCONFIGURATIONS_SET_MODULES_CGNS] cgns = cgns[cgns.rfind("/") + 1:] # get the used vtk module vtk = self.buildConfiguration.modules[libdistData.BUILDCONFIGURATIONS_SET_MODULES_VTK] vtk = vtk[vtk.rfind("/") + 1:] distributionFileName = libdistData.CREATED_TARFILE_FORMAT.format(name=libdistData.TRACE_SUITE, traceVersion=traceSuiteVersion, compiler=compiler, mpi=mpi, cgns=cgns, vtk=vtk, buildConfigurationName=self.buildConfiguration.name) distributionFileName = distributionFileName.replace("/", "-") distributionFileName = distributionFileName.replace(" ", "_") distributionFileName = distributionFileName.replace("(", "") distributionFileName = distributionFileName.replace(")", "") return distributionFileName
[docs] def getGccVersion(self): """ return current gcc version :return: gcc version :rtype: string """ gccVersionCommand = libdistData.GCC_VERSION_COMMAND gccVersionOutput = subprocess.check_output(gccVersionCommand.split(), encoding='utf8') gccVersion = re.findall(libdistData.GET_VERSION_PATTERN, gccVersionOutput)[0] return libdistData.GCC_RETURN_STRING.format(gccVersion)
[docs] def getTraceSuiteVersion(self): """ get trace version number :return: trace suite version :rtype: string """ if os.path.isfile(os.path.join(self.testDistributionPath, libdistData.TRACE_SUITE, libdistData.TRACE_VERSION_PATH)): pathToTraceVersionHeader = os.path.join(self.testDistributionPath, libdistData.TRACE_SUITE, libdistData.TRACE_VERSION_PATH) else: return "0.0.0" with open(pathToTraceVersionHeader, "r") as version: versionString = version.read() try: release_number = re.findall(libdistData.VERSION_PATTERN, versionString)[0] except IndexError: print("No release number could be extracted from file '{fileName}'".format(fileName=os.path.split(pathToTraceVersionHeader)[1])) raise return release_number
def _deleteAndCreateDistributionDirectories(self): """Delete and re-create the directories for the distribution. """ self._createOutputDirectory() fileIO.removeFile(self.distributionPath) fileIO.md(self.distributionPath) fileIO.md(self.testDistributionPath) logPath = os.path.join(self.logPath, self.buildConfiguration.name) fileIO.removeFile(logPath) fileIO.md(logPath)
[docs] def createMakeCommandLine(self, target=None, nprocs=None): """Create make command line string from make commands :return: make command line :rtype: string """ # create basic command line makeCommandLine = libdistData.MAKE_COMMAND.format(nprocs or self.nProcsPerBuild) if target: makeCommandLine += " " + target # append make options defined in config xml to the make command for option in self.buildConfiguration.makeCommands: makeCommandLine += " " + option return makeCommandLine
def _getIncludedHeaders(self, pathToHeader): """ return list of included headers in the given header file :return: headers included in given header file :rtype: list """ includedHeaders = [] with open(pathToHeader, "r") as header: includes = re.findall(libdistData.INCLUDE_PATTERN, header.read()) for include in includes: header = re.findall(libdistData.INCLUDE_HEADER_PATTERN, include)[0] includedHeaders.append(header) return includedHeaders def _determineNecessaryIncludeFiles(self): """ Determine which include file have to be added to the distribution. :return: include files to add to the distribution :rtype: list """ necessaryIncludeFiles = [] pathToTraceSuite = os.path.join(self.distributionPath, libdistData.TRACE_SUITE) trace_header_paths = [] for header_path in libdistData.TRACE_HEADER_FOR_POST: header_full_path = os.path.join(pathToTraceSuite, header_path) trace_header_paths.extend(glob.glob(header_full_path)) searchForIncludedHeaders = [] for path in libdistData.POST_SEARCH_FOR_HEADERS: fullPath = os.path.join(pathToTraceSuite, path) searchForIncludedHeaders.extend(glob.glob(fullPath)) while searchForIncludedHeaders: includedHeaders = self._getIncludedHeaders(searchForIncludedHeaders.pop()) for header in includedHeaders: for trace_header in trace_header_paths: if header == os.path.basename(trace_header): if trace_header not in necessaryIncludeFiles: searchForIncludedHeaders.append(trace_header) necessaryIncludeFiles.append(trace_header) return necessaryIncludeFiles def _modifySources(self, verbose=1): """Modify source code according to build configuration before compiling it. Uses setPrecompileFlag to change values of the macros to the values defined in the config xml :param verbose: verbosity level :type verbose: integer """ if verbose and self.buildConfiguration.macroChangeSets: print("Setting pre-compile flags ...") for macro in self.buildConfiguration.macroChangeSets: filename = os.path.join(self.distributionPath, libdistData.TRACE_SUITE, macro[0]) setPrecompileFlag.setPrecompile(filename, macro[1], macro[2], verbose=verbose)
[docs] def setup(self): """ Prepare source code according to build configuration. Calls unpackDistribution, createOutputDirectory, createTempDirectory, createDistributionDirectories :return: creates setup joblist :rtype: joblist """ logPath = os.path.join(self.logPath, self.buildConfiguration.name) setupJobList = JobList(name=libdistData.SETUP_JOBLIST.format(self.buildConfiguration.name)) createDistributionDirectories = Job(self._deleteAndCreateDistributionDirectories, outputDir=self.logPath, jobName=libdistData.CREATE_DISTRIBUTION_DIRECTORIES_JOB.format(self.buildConfiguration.name)) unpackDistributionJob = Job(self._unpackDistribution, args=[self.pathToSourceCodeTarFile, self.distributionPath], jobName=libdistData.UNPACK_DISTRIBUTION_JOB.format(self.buildConfiguration.name), outputDir=logPath) modifySourcesJob = Job(self._modifySources, kwargs=dict(verbose=1), outputDir=logPath, jobName=libdistData.MODIFY_SOURCES_JOB.format(self.buildConfiguration.name)) setupJobList.addJob(createDistributionDirectories) setupJobList.addJob(unpackDistributionJob, parents=[createDistributionDirectories.id]) setupJobList.addJob(modifySourcesJob, parents=[unpackDistributionJob.id]) return setupJobList
def _createOutputDirectory(self): """add current set name to the output path and create output directory """ try: fileIO.md(self.logPath) fileIO.md(self.buildPath) except OSError: pass def _testOptionalExecutables(self, command, envVars, logPath, execName): """ Method to test optional executables. First test if the executable exists. Then test the specified command. :param command: command line string to test executable :param envVars: environment variables :param logPath: path for the log file :param execName: name of the executable :type command: str :type envVars: dict :type logPath: str :type execName: str """ workingDir = os.path.join(self.testDistributionPath, libdistData.TRACE_SUITE) executable = command.split(' ')[0] try: fileIO.checkExecutables(os.path.join(workingDir, executable)) except OSError: print("Optional executable {} not found and therefore not tested.".format(executable)) else: jobName = libdistData.TEST_EXECUTABLE_JOB.format(execName) testExecutableJob = Job(command, envVars=envVars, workingDirectory=workingDir, outputFile=os.path.join(logPath, jobName + "_test.out"), outputDir=logPath, executeOnMaster=True, jobName=jobName) testExecutableJob.runJob()
[docs] def testDistribution(self, envVars=None): """ unpack distribution file, compile it, and run help. Should raise an exception in case of error. :param envVars: environment variables :type envVars: dictionary :return: test distribution joblist :rtype: joblist """ libdistOutputPath = os.path.join(self.distributionPath, libdistData.TRACE_SUITE, libdistData.TRACE_SUITE_LIBDIST_TAR) makeCommand = self.createMakeCommandLine() if not envVars: envVars = getEnvironmentVariablesFromModules(self.buildConfiguration.modules) logPath = os.path.join(self.logPath, self.buildConfiguration.name) testDistributionJobList = JobList(name=libdistData.TEST_DISTRIBUTION_JOBLIST.format(self.buildConfiguration.name)) unpackTestDistributionJob = Job(self._unpackDistribution, args=[libdistOutputPath, self.testDistributionPath], jobName=libdistData.UNPACK_TEST_DISTRIBUTION_JOB.format(self.buildConfiguration.name), outputDir=logPath) testDistributionJobList.addJob(unpackTestDistributionJob) name = libdistData.MAKE_TEST_JOB.format(self.buildConfiguration.name) makeTestJob = Job(makeCommand, workingDirectory=os.path.join(self.testDistributionPath, libdistData.TRACE_SUITE), envVars=envVars, outputFile=os.path.join(logPath, name + ".out"), outputDir=logPath, executeOnMaster=True, useMpirun=False, nProcs=self.nProcsPerBuild, jobName=libdistData.MAKE_TEST_JOB.format(self.buildConfiguration.name)) testDistributionJobList.addJob(makeTestJob, [unpackTestDistributionJob.id]) for execName, command in libdistData.TEST_EXECUTABLES_DICT.items(): testExecutableJob = Job(command, envVars=envVars, workingDirectory=os.path.join(self.testDistributionPath, libdistData.TRACE_SUITE), outputDir=logPath, executeOnMaster=True, jobName=libdistData.TEST_EXECUTABLE_JOB.format(execName)) testDistributionJobList.addJob(testExecutableJob, [makeTestJob.id]) for execName, command in libdistData.TEST_OPTIONAL_EXECUTABLES_DICT.items(): testExecutableJob = Job(self._testOptionalExecutables, args=[command, envVars, logPath, execName], jobName=libdistData.TEST_EXECUTABLE_JOB.format(execName), outputDir=logPath) testDistributionJobList.addJob(testExecutableJob, [makeTestJob.id]) return testDistributionJobList
[docs] def copyDistributionToOutputPath(self): """calls addXMLToTestDistribution and createLinkScript, creates a tar in the output directory including all files stored in the testDistribution directory """ distributionFileName = self._createDistributionFileName() distributionFileOutputPath = os.path.join(self.outputPath, distributionFileName) libdistOutputPath = os.path.join(self.distributionPath, libdistData.TRACE_SUITE, libdistData.TRACE_SUITE_LIBDIST_TAR) shutil.copy(libdistOutputPath, distributionFileOutputPath)
def _unpackDistribution(self, srcPath, destPath, verbose=0): """Unpack zipped tar file to directory :param srcPath: source path :type srcPath: string :param destPath: destination path :type destPath: string """ if verbose: print(f"Extracting tar file '{srcPath}' in destination '{destPath}'") with tarfile.open(srcPath, "r:gz") as tar: tar.extractall(destPath)
# TODO beck_ki, 2015-08-25: There must be a more elegant way to set the environment variables
[docs]def getEnvironmentVariablesFromModules(modules): """ Search the module files related to the modules which are described in the buildConfiguration. Analyses the module files and reads out how they would change the environment variables. The described changes are afterwards realized by this method. The new environment is returned as an dictionary. :param: modules :type: dictionary :return: environment variables :rtype: dictionary """ parser = argparse.ArgumentParser(description='Process module files and extract options.') parser.add_argument('-d', '--delim', dest='delimiter', default=':', type=str, help='delimiter for arguments') # default environment delimiter delimiter = ":" # searched keywords in module files setEnv = "setenv" appendEnv = "append-path" prependEnv = "prepend-path" searchPatternFormat = r"\s*({setEnv}|{appendEnv}|{prependEnv})(.*)" # replace environment settings in module files setEnvPatternFormat = r"\s*set\s+{0}\s+(.*)" setEnvPattern = setEnvPatternFormat.format(r"(\w+)") environmentVariables = {} # get all modules that have to be loaded modulesList = [value for value in modules.values() if value] moduleFilesToLoad = [] modulePaths = os.environ.get("MODULEPATH").split(":") for module in modulesList: for modulePath in modulePaths: try: moduleFilesToLoad.append(fileIO.getAbsoluteFilePath(os.path.join(modulePath, module))) except RuntimeError: pass else: break if len(moduleFilesToLoad) != len(modulesList): raise RuntimeError("\nModules in list ({nMds}): {listMd}\nFound modules ({nfMds}): {listfMd}".format(nMds=len(modulesList), listMd=", ".join(modulesList), nfMds=len(moduleFilesToLoad), listfMd=", ".join(moduleFilesToLoad))) for moduleFilePath in moduleFilesToLoad: with open(moduleFilePath, "r") as fileHandler: # remove all '#' comments from file # TODO: What about '#' in strings or string variables moduleFile = re.sub(re.compile("#.*?\n"), "", fileHandler.read()) for variable, _ in re.findall(setEnvPattern, moduleFile): setEnvPatternVar = setEnvPatternFormat.format(variable) for value in re.findall(setEnvPatternVar, moduleFile): moduleFile = re.sub(r"\$[\({{]?\b{0}\b[\)}}]?".format(variable), value, moduleFile) for match in re.findall(searchPatternFormat.format(setEnv=setEnv, appendEnv=appendEnv, prependEnv=prependEnv), moduleFile): envType = match[0] args, envVarSet = parser.parse_known_args(match[1].split()) try: variable, values = envVarSet except ValueError: print(f"PLEASE CHECK YOUR MODULE FILE '{moduleFilePath}': syntax is not supported = '{match[1]}.") raise if args.delimiter != delimiter: values = values.replace(args.delimiter, delimiter) if envType == setEnv or not (variable in environmentVariables or variable in os.environ.keys()): environmentVariables[variable] = values else: if variable not in environmentVariables: environmentVariables[variable] = os.environ[variable] if envType == prependEnv: environmentVariables[variable] = values + delimiter + environmentVariables[variable] else: environmentVariables[variable] += delimiter + values return environmentVariables
def _buildJobList(buildConfiguration, pathToSourceCodeTarFile, configFilePath, nProcsPerBuild, outputPath, traceSuitePath): """ :param pathToSourceCodeTarFile: path to the source code tarfile :param configFilePath: path to configuration file :param nProcsPerBuild: number of processors per build :param outputPath: :param buildConfiguration: :param traceSuitePath: :type pathToSourceCodeTarFile: :type configFilePath: :type nProcsPerBuild: :type outputPath: :type buildConfiguration: :type traceSuitePath: """ distBuilder = DistributionBuilder(pathToSourceCodeTarFile, configFilePath, nProcsPerBuild, outputPath, buildConfiguration, traceSuitePath) logPath = os.path.join(distBuilder.logPath, buildConfiguration.name) subJobList = JobList(name=buildConfiguration.name + libdistData.JOBLIST) setupJob = distBuilder.setup() subJobList.addJob(setupJob) createDistributionFileJob = distBuilder.createDistributionFile() subJobList.addJob(createDistributionFileJob, [setupJob.id]) testDistributionJob = distBuilder.testDistribution() subJobList.addJob(testDistributionJob, [createDistributionFileJob.id]) copyDistributionToOutputPathJob = Job(distBuilder.copyDistributionToOutputPath, outputDir=logPath, jobName=libdistData.COPY_DISTRIBUTION_TO_OUTPUT_PATH_JOB) subJobList.addJob(copyDistributionToOutputPathJob, [testDistributionJob.id]) return subJobList, distBuilder, buildConfiguration.name def _cleanupJobList(cleanJobList, buildConfigurationSettings, keepOutput): successfulBuildList = list() failedSetNameList = list() for i in range(len(buildConfigurationSettings)): subJobList = buildConfigurationSettings[i][0] if subJobList.status.code == jobConsts.DONE: successfulBuildList.append(i) else: failedSetNameList.append(buildConfigurationSettings[i][2]) if not keepOutput: for i in successfulBuildList: _, distBuilder, buildName = buildConfigurationSettings[i] logPath = os.path.join(distBuilder.logPath, buildName) cleanUpJob = Job(distBuilder.cleanUp, jobName=libdistData.CLEAN_UP_JOB, outputDir=logPath) cleanJobList.addJob(cleanUpJob) return failedSetNameList
[docs]def createDistributions(buildConfigurations, pathToSourceCodeTarFile, configFilePath, nBuilds, nProcsPerBuild, outputPath, traceSuitePath, verbosity=fileIO.Printer.Verbosity.DEFAULT, keepOutput=False): """ creates DistributionBuilder instance, builds distributions equivalent to the configurations stored in the buildConfigurations instances. The arguments pathToSourceCodeTarFile, configFilePath, nBuilds and outputPath are required. :param buildConfigurations: build configuration :param pathToSourceCodeTarFile: path to the source code tarfile :param configFilePath: path to configuration file :param nBuilds: number of parallel builds :param nProcsPerBuild: number of processors per build :param outputPath: directory in which the output file will be saved :param traceSuitePath: path to the trace suite :param verbosity: verbosity :param keepOutput: controls if the jobmanagement output files are erased :type buildConfigurations: build configuration object :type pathToSourceCodeTarFile: str :type configFilePath: str :type nBuilds: int :type nProcsPerBuild: int :type traceSuitePath: str :type outputPath: str :type verbosity: Printer.Verbosity :type keepOutput: bool :return: number of failed jobs :rtype: int """ # set number of available processors nProcsAvailable = nBuilds * nProcsPerBuild if nProcsAvailable > resources.DEFAULT_RESOURCES[resources.PROCESSORS]: Printer.verbosePrint(f"Number of available processors is reduced to {resources.DEFAULT_RESOURCES[resources.PROCESSORS]} (specified: {nProcsAvailable})", printLevel=Printer.Verbosity.DEFAULT) nProcsAvailable = resources.DEFAULT_RESOURCES[resources.PROCESSORS] mainJobList = JobList(nNodesAvailable=1, nProcsAvailable=nProcsAvailable, name=libdistData.MAIN_JOBLIST, verbosity=verbosity) cleanJobList = JobList(nNodesAvailable=1, nProcsAvailable=nProcsAvailable, name="cleanJobList", verbosity=verbosity) # store pointers to jobList and distribution builder for each build configuration for cleanup. buildConfigurationSettings = list() failedSets = list() for buildConfiguration in buildConfigurations: subJobList, distBuilder, buildName = _buildJobList(buildConfiguration, pathToSourceCodeTarFile, configFilePath, nProcsPerBuild, outputPath, traceSuitePath) mainJobList.addJob(subJobList) buildConfigurationSettings.append((subJobList, distBuilder, buildName)) mainJobList.notifier.registerObserver(FailedJobsJsonOutput("failedJobs.json"), JobEvent.JOB_FAILED_EVENT) try: mainJobList.runJoblist() except KeyboardInterrupt: cleanJobList._resources = resources.Resources.fromParameters(nBuilds, 1, None) finally: failedSets = _cleanupJobList(cleanJobList, buildConfigurationSettings, keepOutput) if cleanJobList.jobs: cleanJobList.runJoblist(True) if keepOutput or mainJobList.status.code != jobConsts.DONE: pass elif not mainJobList.failedJobs and not cleanJobList.failedJobs: mainJobList.cleanup() cleanJobList.cleanup(force=True) return failedSets, mainJobList.failedJobs