12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778 |
- #
- # Copyright (c) 2006-2011 Nokia Corporation and/or its subsidiary(-ies).
- # All rights reserved.
- # This component and the accompanying materials are made available
- # under the terms of the License "Eclipse Public License v1.0"
- # which accompanies this distribution, and is available
- # at the URL "http://www.eclipse.org/legal/epl-v10.html".
- #
- # Initial Contributors:
- # Nokia Corporation - initial contribution.
- #
- # Contributors:
- #
- # Description:
- # raptor module
- # This module represents the running Raptor program. Raptor is started
- # either by calling the Main() function, which creates an instance of
- # the Raptor class and calls its methods to perform a build based
- # on command-line parameters, or by explicitly creating a Raptor
- # instance and calling its methods to set-up and perform a build.
- #
- name = "sbs" # the public name for the raptor build tool
- env = "SBS_HOME" # the environment variable that locates us
- xml = "sbs_init.xml" # the primary initialisation file
- env2 = "HOME" # the environment variable that locates the user
- xml2 = ".sbs_init.xml" # the override initialisation file
- import sys
- import os
- import stat
- import time
- import traceback
- import subprocess
- import types
- import json
- from xml.sax.saxutils import escape
- from raptor import generic_path
- import raptor.cache
- import raptor.cli
- import raptor.data
- import raptor.make
- import raptor.meta
- import raptor.utilities
- import raptor.version
- import raptor.timing as timing
- from raptor.makefile import MakefileSet
- from raptor.sysdef import SystemModel
- from raptor.filter_list import FilterList
- from raptor.pluginbox import PluginBox
- from raptor.buildrecord import BuildRecord
- if not "HOSTPLATFORM" in os.environ or not "HOSTPLATFORM_DIR" in os.environ or not "HOSTPLATFORM32_DIR" in os.environ:
- sys.stderr.write("Error: HOSTPLATFORM, HOSTPLATFORM_DIR and HOSTPLATFORM32_DIR must be set in the environment (this is usually done automatically by the startup script).\n")
- sys.exit(1)
- hostplatform = set(os.environ["HOSTPLATFORM"].split(" "))
- unixplatforms = set(['linux','freebsd','darwin','sunos'])
- isunix = not hostplatform.isdisjoint(unixplatforms)
- hostplatform_dir = os.environ["HOSTPLATFORM_DIR"]
- hostplatform32_dir = os.environ["HOSTPLATFORM32_DIR"]
- # defaults can use EPOCROOT
- if "EPOCROOT" in os.environ:
- incoming_epocroot = os.environ["EPOCROOT"]
- epocroot = incoming_epocroot.replace("\\","/")
- else:
- if 'win' in hostplatform:
- incoming_epocroot = os.sep
- epocroot = "/"
- os.environ["EPOCROOT"] = os.sep
- else:
- epocroot=os.environ['HOME'] + os.sep + "epocroot"
- os.environ["EPOCROOT"] = epocroot
- incoming_epocroot = epocroot
- if "SBS_BUILD_DIR" in os.environ:
- sbs_build_dir = os.environ["SBS_BUILD_DIR"]
- else:
- sbs_build_dir = (epocroot + "/epoc32/build").replace("//","/")
- # only use default XML from the epoc32 tree if it exists
- defaultSystemConfig = "lib/config"
- epoc32UserConfigDir = generic_path.Join(epocroot, "epoc32/sbs_config")
- if epoc32UserConfigDir.isDir():
- defaultSystemConfig = str(epoc32UserConfigDir) + os.pathsep + defaultSystemConfig
- # parameters that can be overriden by the sbs_init.xml file
- # or by the command-line.
- defaults = {
- "allowCommandLineOverrides" : True,
- "CLI" : "raptor_cli",
- "buildInformation" : generic_path.Path("bld.inf"),
- "defaultConfig" : "default",
- "jobs": 4,
- "keepGoing": False,
- "logFileName" : generic_path.Join(sbs_build_dir,"Makefile.%TIME.log"),
- "makeEngine" : "make",
- "preferBuildInfoToSystemDefinition" : False,
- "pruneDuplicateMakefiles": True,
- "quiet" : False,
- "systemConfig" : defaultSystemConfig,
- "systemDefinition" : generic_path.Path("System_Definition.xml"),
- "systemDefinitionBase" : generic_path.Path("."),
- "systemFLM" : generic_path.Path("lib/flm"),
- "systemPlugins" : generic_path.Path("raptor/plugins"),
- "topMakefile" : generic_path.Join(sbs_build_dir,"Makefile.%TIME"),
- "tries": 1,
- "writeSingleMakefile": True,
- "ignoreOsDetection": False,
- "toolcheck": "on",
- "incremental_parsing": False,
- "no_metadata_depend": False,
- "ignore_zero_flmcall_makefiles": False,
- "filterList": "filterterminal,filterlogfile"
- }
- class ModelNode(object):
- """ Represents any node in a a tree of build information
- e.g. a tree of bld.infs, mmps and finally things like resource blocks and string table blocks.
- This is before they are produced into "build" specs.
- """
- def __init__(self, id, parent = None):
- self.id = id
- self.type = type
- self.specs = []
- self.deps = [] # files that this node depends on
- self.depfiles = [] # files with dependencies listed in them in GNU make format
- self.children = set()
- self.unfurled = False
- self.parent = parent
- # Allow one to make a set
- def __hash__(self):
- return hash(self.id)
- def __eq__(self,other):
- return self.id == other.id
- def __iter__(self):
- return iter(self.children)
- def __getitem__(self,x):
- if isinstance(x, slice):
- return self.children[x.start:x.stop]
- return self.children[x]
- def __setitem__(self,k, v):
- self.children[k] = v
- def __len__(self):
- return len(self.children)
- def add(self, item):
- return self.children.add(item)
- def isunfurled(self, c):
- return self.unfurled == False
- def unique_filename(self):
- """ returns a string that is short enough to add to a filename that is believed to be unique
- within a single build. This really should be overridden by children """
- # not a great default but we're not supposed to know what self.id contains so we can't e.g. process it like a filename
- return raptor.utilities.sanitise(hash(id))
- def alldeps(self):
- """ All the known simple dependencies of the object represented by this Node.
- This doesn't include dependencies that are generated by tools like CPP - alldepfiles() does that"""
- for d in self.deps:
- yield d
- for c in self.children:
- for d in c.alldeps():
- yield d
- def alldepfiles(self):
- """Dependencies are sometimes discovered by tools like CPP and then stored in files"""
- for d in self.depfiles:
- yield d
- for c in self.children:
- for d in c.alldepfiles():
- yield d
- def unfurl(self, build):
- """Find any children of this node by processing it, produces specs"""
- pass
- def unfurl_all(self, build):
- """Unfurl self and all children - preparatory e.g for realisation"""
- if not self.unfurled:
- self.unfurl(build)
- self.realise_exports(build) # permit communication of dependencies between children
- for c in self.children:
- c.unfurl_all(build)
- def realise_exports(self, build):
- """Do the things that are needed such that we can fully unfurl all
- sibling nodes. i.e. this step is here to "take care" of the dependencies
- between siblings.
- """
- pass
- def makefile_name(self, build):
- """use data from the build object to determine the makefilename for this node"""
- postfix = "_" + self.unique_filename()
- makefilename_base = build.topMakefile.Absolute()
- return str(makefilename_base) + postfix
-
- def realise_makefile(self, build, specs):
- makefile = self.makefile_name(build)
- metadepsfilename = makefile + ".metadeps"
- try:
- os.makedirs(str(generic_path.Path(makefile).Dir()))
- except OSError as e:
- pass # if the dir is already there
- build.Debug("Layer Deps: {0} with {1} children depfile {2}".format(self.id, len(self.children),metadepsfilename))
- build.InfoDiscovery(object_type = "layers", count = 1)
- build.InfoStartTime(object_type = "layer", task = "parse",
- key = makefile)
- makefileset = build.maker.Write(generic_path.Path(makefile), specs, build.buildUnitsToBuild)
- makefileset.write_metadeps(self.alldeps(), self.alldepfiles()) # ensure that this makefileset's dependency file has been written out
- build.InfoEndTime(object_type = "layer", task = "parse",
- key = makefile)
- # Tell the buildrecord about this set of makefiles
- build.build_record.record_makefileset(makefileset)
- return makefileset
- def realise(self, build):
- """Give the spec trees to the make engine and actually
- "build" the product represented by this model node"""
- # Must ensure that all children are unfurled at this point
- self.unfurl_all(build)
- sp = self.specs
- build.AssertBuildOK()
- makefileset = self.realise_makefile(build, sp)
- build.InfoStartTime(object_type = "layer", task = "build",
- key = (str(makefileset.directory) + "/" + str(makefileset.filenamebase)))
- result = build.Make(makefileset)
- build.InfoEndTime(object_type = "layer", task = "build",
- key = (str(makefileset.directory) + "/" + str(makefileset.filenamebase)))
- return result
- class Project(ModelNode):
- """A project or, in symbian-speak, an MMP
- """
- def __init__(self, filename, parent = None):
- super(Project,self).__init__(filename, parent = parent)
- # Assume that components are specified in mmp files for now
- # One day that tyranny might end.
- self.mmp_name = str(generic_path.Path.Absolute(filename))
- self.id = self.mmp_name
- self.unfurled = False
- self.deps.append(self.mmp_name)
- def makefile(self, makefilename_base, engine, named = False):
- """Makefiles for individual mmps not feasible at the moment"""
- pass
- # Cannot, currently, "unfurl an mmp" directly but do want
- # to be able to simulate the overall recursive unfurling of a build.
- class Component(ModelNode):
- """ An Abstract class for group of projects (where projects are
- usually things that represent one program or libary) """
- def __init__(self, filename, layername="", componentname=""):
- super(Component,self).__init__(filename)
- self.filename = filename
- self.id = str(filename)
- self.exportspecs = []
- self.depfiles = []
- self.unfurled = False # We can parse this
- self.deps.append(str(self.filename))
-
- # Extra metadata optionally supplied with system definition file gathered components
- self.layername = layername
- self.name = componentname
- def render_bldinf(self, build):
- raise Exception("Can't render a bld.inf from component {0} - don't know how".format(self.filename))
- class BldinfComponent(Component):
- """A group of projects or, in symbian-speak, a bld.inf.
- """
- def __init__(self, filename, layername="", componentname=""):
- super(BldinfComponent,self).__init__(filename, layername = layername, componentname=componentname)
- # Assume that components are specified in bld.inf files for now
- # One day that tyranny might end.
- self.bldinf = None # Slot for a bldinf object if we spot one later
- self.bldinf_filename = generic_path.Path(str(filename)).Absolute()
- def AddMMP(self, filename):
- self.children.add(Project(filename))
- def render_bldinf(self, build):
- return self
- class QmakeErrorException(Exception):
- def __init__(self, text, output = "", errorcode=1,command=""):
- self.output = output
- self.errorcode = errorcode
- self.command=command
- def __str__(self):
- return "{0} - while running: {1}".format(self.output,self.command)
- class QtProComponent(BldinfComponent):
- """ represents a component that is specified in a .pro file. The intention is that it should
- be possible to translate it into a bld.inf as we don't intend to parse Qt files directly which
- would be an immense and pointless effort."""
- def __init__(self, filename, layername="", componentname=""):
- self.qtpro_filename = generic_path.Path(filename).Absolute()
- super(QtProComponent,self).__init__(filename,layername=layername, componentname=componentname)
- # automatically determine the related bld.inf name by putting it in the same dir as the qt file.
- self.bldinf_filename = generic_path.Join(self.qtpro_filename.Dir(), "bld.inf")
- self.bldinf_produced = False
- def render_bldinf(self, build):
- self.bldinf_produced = True
- qmake = build.metaeval.Get("QMAKE")
- moc = build.metaeval.Get("MOC")
- uic = build.metaeval.Get("UIC")
- rcc = build.metaeval.Get("RCC")
- # run qmake and produce the bld.inf immediately.
- shell = "/bin/sh" # only needed on linux.
- # should really get qmake(.exe)'s absolute location from somewhere
- global epocroot
- spec = build.metaeval.Get("QMAKESPEC")
- incdir = build.metaeval.Get("QMAKE_INCDIR_QT")
- headers = build.metaeval.Get("QT_HEADERS")
- makeenv = os.environ.copy()
- if isunix:
- command = [qmake, "-spec", spec, str(self.qtpro_filename), "-o",str(self.bldinf_filename),
- "QMAKE_INCDIR_QT=" + headers,
- "QMAKE_MOC=" + moc,
- "QMAKE_UIC=" + uic,
- "QMAKE_RCC=" + rcc]
- build.Debug("qmake command: {0}".format(command))
- p = subprocess.Popen(
- args = command,
- bufsize = 65535,
- stdout = subprocess.PIPE,
- stderr = subprocess.STDOUT,
- shell = False,
- universal_newlines = True,
- env = makeenv)
- else:
- command = "{0} -spec {1} {2} -o {3} QMAKE_INCDIR_QT={4} QMAKE_MOC={5} QMAKE_UIC={6} QMAKE_RCC={7}".format(qmake, spec, self.qtpro_filename, self.bldinf_filename, headers, moc,uic,rcc)
- build.Debug("qmake command: {0}".format(command))
- p = subprocess.Popen(
- args = command,
- bufsize = 65535,
- stdout = subprocess.PIPE,
- stderr = subprocess.STDOUT,
- shell = False,
- env = makeenv)
- stream = p.stdout
- self.qmake_output = []
- for l in stream:
- self.qmake_output.append(l)
-
- returncode = p.wait()
- if returncode != 0:
- e = QmakeErrorException("{0} failed for '{1}'".format(qmake, self.qtpro_filename), output = "\n".join(self.qmake_output), errorcode = returncode, command = command)
- raise e
- return self
- class Layer(ModelNode):
- """ Some components that should be built togther
- e.g. a Layer in the system definition.
-
- Components that come from system definition files can
- have extra surrounding metadata that we need to pass
- on for use in log output.
- """
- def __init__(self, name, componentlist=[]):
- """ componentlist may be a list of items of type Component xor type sysdef.SystemModelComponent
- @componentlist must be a list of objects that are derived from the Component class.
- """
- super(Layer,self).__init__(name)
- self.name = name
- for c in componentlist:
- # this is a component from the plain old command-line and we expect it to be of type "Component" already.
- self.children.add(c)
- def unique_filename(self):
- """ returns a string that is short enough to add to a filename that is believed to be unique
- within a single build. """
- # the layer name should be unique
- return raptor.utilities.sanitise(self.name)
- @classmethod
- def from_system_model(cls, name, sysmodel_componentlist):
- """ A factory method to build a layer from a sysdef.SystemModelComponent
- this eases the process of working with a "system_definition.xml" file. """
- l = cls(name) # Call our class' constructor
- for c in sysmodel_componentlist:
- l.children.add(BldinfComponent(c, c.GetLayerName(), c.GetContainerName("component")))
- return l
- def unfurl(self, build):
- """Discover the children of this layer. This involves parsing the component MetaData (bld.infs, mmps).
- Takes a raptor object as a parameter (build), together with a list of Configurations.
- We currently have parsers that work on collections of components/bld.infs and that cannot
- parse at a "finer" level. So one can't 'unfurl' an mmp at the moment.
- Returns True if the object was successfully unfurled.
- """
- # setup all our components
- for c in self.children:
- c.specs = []
- self.configs = build.buildUnitsToBuild
- # render the components down to bld.inf form (if possible)
- # since we don't understand any other component format
- components = []
- for c in self.children:
- try:
- components.append(c.render_bldinf(build))
- except QmakeErrorException as e:
- build.Error(str(e))
- if len(components) > 0:
- try:
- # create a MetaReader that is aware of the list of
- # configurations that we are trying to build.
- metaReader = raptor.meta.MetaReader(build, build.buildUnitsToBuild)
- # convert the list of bld.inf files into a specification
- # hierarchy suitable for all the configurations we are using.
- self.specs = list(build.generic_specs)
- self.specs.extend(metaReader.ReadBldInfFiles(components, doexport = build.doExport, dobuild = not build.doExportOnly))
- except raptor.meta.MetaDataError as e:
- build.Error(e.Text)
- self.unfurled = True
- def _split_into_blocks(self, build):
- """ Split layer's components into blocks for parallel parsing """
- nc = len(self.children)
- number_blocks = build.jobs
- block_size = int((nc / number_blocks) + 1)
- component_blocks = [] # list of mini-layers, split up for parallel parsing
- b = 0
- childlist = list(self.children)
- while b < nc:
- l = Layer(self.name, childlist[b:b+block_size])
- component_blocks.append(l)
- b += block_size
- while len(component_blocks[-1].children) <= 0:
- component_blocks.pop()
- number_blocks -= 1
- build.Info("Parallel Parsing: bld.infs split into {0} blocks\n".format(number_blocks))
- return component_blocks
- def meta_realise(self, build):
- """Generate specs that can be used to "take care of" finding out more
- about this metaunit - i.e. one doesn't want to parse it immediately
- but to create a makefile that will parse it.
- In this case it allows bld.infs to be parsed in parallel by make."""
- # insert the start time into the Makefile name?
- self.configs = build.buildUnitsToBuild
- # Pass certain CLI flags through to the makefile-generating sbs calls
- cli_options = ""
- if build.debugOutput == True:
- cli_options += " -d"
- if build.ignoreOsDetection == True:
- cli_options += " -i"
- if build.keepGoing == True:
- cli_options += " -k"
- if build.quiet == True:
- cli_options += " -q"
- if build.noDependInclude == True:
- cli_options += " --no-depend-include"
- if build.noDependGenerate == True:
- cli_options += " --no-depend-generate"
- tm = build.topMakefile.Absolute()
- # List of all confgurations apart from "build" which
- # is used internally for generating makefiles for
- # parallel parsing
- configList = " ".join([c.name for c in self.configs if c.name != "build" ])
-
- # Cause the binding makefiles to have the toplevel makefile's
- # name. The bindee's have __pp appended.
- binding_makefiles = MakefileSet(str(tm.Dir()), build.maker.selectors, makefiles=None, filenamebase=str(tm.File()))
- build.topMakefile = generic_path.Path(str(tm) + "_pp")
-
- component_blocks = self._split_into_blocks(build)
- spec_nodes = []
- loop_number = 0
- for block in component_blocks:
- loop_number += 1
- specNode = raptor.data.Specification("metadata_" + self.name)
-
- # root path for generated sysdef files and their partnering makefiles
- makefile_path = str(build.topMakefile) + "_" + str(loop_number)
- try:
- os.unlink(makefile_path)
- except Exception:
- pass
-
- pp_system_definition = makefile_path + ".sysdef.xml"
-
- try:
- sys_def_writer = SystemModel(build, aDoRead=False)
- sys_def_writer.AddLayer(block)
- sys_def_writer.Write(pp_system_definition)
- build.Debug("Wrote intermediate parallel-parsing system definition file " + pp_system_definition)
- except Exception as e:
- build.Error("Failed to write intermediate parallel-parsing system definition file: {0} {1}".format(pp_system_definition,str(e)))
- raise e
- configList = " ".join([c.name for c in self.configs if c.name != "build" ])
- # add some basic data in a component-wide variant
- var = raptor.data.Variant()
- var.AddOperation(raptor.data.Set("PP_SYSTEM_DEFINITION", pp_system_definition))
- var.AddOperation(raptor.data.Set("MAKEFILE_PATH", makefile_path))
- var.AddOperation(raptor.data.Set("CONFIGS", configList))
- var.AddOperation(raptor.data.Set("CLI_OPTIONS", cli_options))
- # Allow the flm to skip exports. Note: this parameter
- doexport_str = '1'
- if not build.doExport:
- doexport_str = ''
- var.AddOperation(raptor.data.Set("DOEXPORT", doexport_str ))
- # Pass on '-n' (if specified) to the makefile-generating sbs calls
- if build.noBuild:
- var.AddOperation(raptor.data.Set("NO_BUILD", "1"))
- specNode.AddVariant(var)
- try:
- interface = build.cache.FindNamedInterface("build.makefiles")
- specNode.SetInterface(interface)
- except KeyError:
- build.Error("Can't find flm interface 'build.makefiles' ")
- spec_nodes.append(specNode)
- binding_makefiles.addInclude(str(makefile_path)+"_all")
- build.InfoDiscovery(object_type = "layers", count = 1)
- build.InfoStartTime(object_type = "layer", task = "parse",
- key = str(build.topMakefile))
- # Generate the makefileset and build it
- mset = self.realise_makefile(build, spec_nodes)
- mset.close()
- gen_result = build.Make(mset)
- build.InfoEndTime(object_type = "layer", task = "parse",
- key = str(build.topMakefile))
- build.InfoStartTime(object_type = "layer", task = "build",
- key = str(build.topMakefile))
- build.Debug("Binding Makefile base name is {0} ".format(binding_makefiles.filenamebase))
- binding_makefiles.close()
- b = build.Make(binding_makefiles)
- build.InfoEndTime(object_type = "layer", task = "build",
- key = str(build.topMakefile))
- return b
- class BuildCannotProgressException(Exception):
- pass
- # raptor module classes
- class Raptor(object):
- """An instance of a running Raptor program.
- When operated from the command-line there is a single Raptor object
- created by the Main function. When operated by an IDE several Raptor
- objects may be created and operated at the same time.
-
- """
- # mission enumeration
- M_BUILD = 1
- M_QUERY = 2
- M_VERSION = 3
- def __init__(self, home = None, commandline = [], dotargets = True, logger = None, load_defaults = True):
- """
- Keyword Arguments:
- home - where to load xml settings from (default None indicated the current working directory)
- commandline - potentially a commandline full of options to set (default empty list)
- targets - when processing a commandline this ensures
- that correct make targets are added e.g. "WHAT" if doing --what. (Default True)
- logger - a class that provides Debug, Info, Warning and Error
- functions ( default is an internal logger )"""
- self.args = commandline
- self.dotargets = dotargets
- self.logger = logger
- if self.logger is not None:
- # Patching functions out here rather than putting
- # an if statement in each function seems
- # a little more efficient although it may
- # be over optimisation.
- self.Info = self.logger.Info
- self.Debug = self.logger.Debug
- self.Warn = self.logger.Warn
- self.load_defaults = load_defaults
- self._default_setup(home)
- # Load up the all the other XML configuration data:
- self.configPath = generic_path.NormalisePathList(self.systemConfig.split(os.pathsep))
- # If there are any commandline arguments then apply them
- if len(commandline) > 0:
- # remember the arguments for the log
- self.args = commandline
- # assuming self.CLI = "cli"
- if not raptor.cli.GetArgs(self, self.args):
- # raise exception quietly since the error
- # messages are already printed out from GetArgs
- raise BuildCannotProgressException("")
- # With incremental parsing which is itself an experimental feature
- # it is relatively safe to switch on a feature that doesn't execute
- # Makefiles if they have no flm calls in them
- if self.incremental_parsing:
- self.ignore_zero_flmcall_makefiles = True
- # Validate our current state
- if self.incremental_parsing and self.doParallelParsing:
- raise BuildCannotProgressException("The parallel parsing (--pp=on) and incremental parsing (--ip=on) options cannot be used together at the moment")
- if (self.dotargets):
- self._check_and_set_build_targets()
- def _default_setup(self, home = None):
- """the default set-up state"""
- self.errorCode = 0
- self.skipAll = False
- self.summary = True
- self.out = sys.stdout # Just until filters get started.
- # Create a bootstrap output system.
- self.out = FilterList()
- if home == None:
- try:
- home = os.environ[env]
- except KeyError:
- home = os.getcwd()
- # make sure the home directory exists
- self.home = generic_path.Path(home).Absolute()
- if not self.home.isDir():
- self.Error("{0} '{1}' is not a directory".format(env, self.home))
- raise BuildCannotProgressException("{0} '{1}' is not a directory".format(env, str(self.home)))
- # the set-up file location.
- # use the override "env2/xml2" if it exists
- # else use the primary "env/xml" if it exists
- # else keep the hard-coded defaults.
- self.raptorXML = self.home.Append(xml)
- if env2 in os.environ:
- sbs_init = generic_path.Join(os.environ[env2], xml2)
- if sbs_init.isFile():
- self.raptorXML = sbs_init
- # things that can be overridden by the set-up file
- for key, value in defaults.items():
- self.__dict__[key] = value
- # things to initialise
- self.args = []
- self.layers = []
- self.orderLayers = False
- self.commandline_layer = Layer('commandline')
- self.systemModel = None
- self.systemDefinitionFile = None
- self.systemDefinitionRequestedLayers = []
- self.systemDefinitionOrderLayers = False
- self.specGroups = {}
- self.configNames = []
- self.configsToBuild = set()
- self.makeOptions = []
- self.maker = None
- self.debugOutput = False
- self.doExportOnly = False
- self.doExport = True
- self.noBuild = False
- self.noDependInclude = False
- self.noDependGenerate = False
- self.projects = set()
- self.queries = []
-
- self.cache = raptor.cache.Cache(self)
- self.override = {env: str(self.home)}
- self.targets = []
- self.defaultTargets = []
- self.doCheck = False
- self.doWhat = False
- self.doParallelParsing = False
- self.doCaseFolding_rsg = False
- self.mission = Raptor.M_BUILD
- # what platform and filesystem are we running on?
- self.filesystem = raptor.utilities.getOSFileSystem()
- self.timing = True # Needed by filters such as copy_file to monitor progress
- self.toolset = None
- self.starttime = time.time()
- # Create a unique time id that combines millisecond part of start time
- # and id of the process which Raptor python is running on
- timeid = str(round(self.starttime, 3)).split('.')[1] + '-' + str(os.getpid())
- self.timestring = time.strftime("%Y-%m-%d-%H-%M-%S.") + timeid
- self.fatalErrorState = False
- if self.load_defaults:
- # Load up the raptor defaults from XML (formerly from the ConfigFile function)
- if self.raptorXML.isFile():
- self.cache.Load(self.raptorXML)
- # find the 'defaults.raptor' variant and extract the values
- try:
- var = self.cache.FindNamedVariant("defaults.init")
- evaluator = self.GetEvaluator( None, raptor.data.BuildUnit(var.name,[var]) )
- for key, value in defaults.items():
- newValue = evaluator.Resolve(key)
- if newValue != None:
- # got a string for the value
- if type(value) == bool:
- newValue = (newValue.lower() != "false")
- elif type(value) == int:
- newValue = int(newValue)
- elif isinstance(value, generic_path.Path):
- newValue = generic_path.Path(newValue)
- self.__dict__[key] = newValue
- except KeyError:
- # it is OK to not have this but useful to say it wasn't there
- self.Info("No 'defaults.init' configuration found in " + str(self.raptorXML))
- def _load_cache(self):
- """Before initiating any action like a build or query, we should load up all
- xml configuration. This function is not intended for use except by other
- members of the Raptor class.
- """
- self.InfoStartTime(object_type = "config_cache", task = "load", key="cache")
- def mkAbsolute(aGenericPath):
- """ internal function to make a generic_path.Path
- absolute if required"""
- if not aGenericPath.isAbsolute():
- return self.home.Append(aGenericPath)
- else:
- return aGenericPath
- # make generic paths absolute (if required)
- self.configPath = [mkAbsolute(p) for p in self.configPath]
- self.cache.Load(self.configPath)
- if not self.systemFLM.isAbsolute():
- self.systemFLM = self.home.Append(self.systemFLM)
- self.cache.Load(self.systemFLM)
- # Make it possible to ask this instance about default tools locations without
- # doing the evaluator creation repeatedly for no reason
- self.metavariant = self.cache.FindNamedVariant("meta")
- self.metaeval = self.GetEvaluator(None, raptor.data.BuildUnit(self.metavariant.name, [self.metavariant]) )
- self.InfoEndTime(object_type = "config_cache", task = "load", key="cache")
- def AddConfigList(self, configPathList):
- # this function converts cmd line option into a list
- # and prepends it to default config.
- self.configPath = generic_path.NormalisePathList(configPathList.split(os.pathsep)) + self.configPath
- return True
- def AddConfigName(self, name):
- if name == "build":
- traceback.print_stack((sys.stdout))
- sys.exit(1)
- self.configNames.append(name)
- return True
- def RunQuietly(self, TrueOrFalse):
- self.quiet = TrueOrFalse
- return True
- def SetCheck(self, TrueOrFalse):
- self.doCheck = TrueOrFalse
- return True
- def SetWhat(self, TrueOrFalse):
- self.doWhat = TrueOrFalse
- return True
- def SetEnv(self, name, value):
- self.override[name] = value
- def AddTarget(self, target):
- if self.doCheck or self.doWhat:
- self.Warn("ignoring target {0} because --what or --check is specified.\n".format(target))
- else:
- self.targets.append(target)
- def AddSourceTarget(self, filename):
- # source targets are sanitised and then added as if they were a "normal" makefile target
- # in addition they have a default, empty, top-level target assigned in order that they can
- # be presented to any generated makefile without error
- sourceTarget = generic_path.Path(filename).Absolute()
- sourceTarget = 'SOURCETARGET_' + raptor.utilities.sanitise(str(sourceTarget))
- self.AddTarget(sourceTarget)
- self.defaultTargets.append(sourceTarget)
- return True
- def SetSysDefFile(self, filename):
- self.systemDefinitionFile = generic_path.Path(filename)
- return True
- def SetSysDefBase(self, path):
- self.systemDefinitionBase = generic_path.Path(path)
- return True
- def AddSysDefLayer(self, layer):
- self.systemDefinitionRequestedLayers.append(layer)
- return True
- def SetSysDefOrderLayers(self, TrueOrFalse):
- self.systemDefinitionOrderLayers = TrueOrFalse
- return True
- def AddBuildInfoFile(self, filename):
- bldinf = str(generic_path.Path(filename).Absolute())
- self.commandline_layer.add(BldinfComponent(bldinf))
- return True
-
- def AddQtProFile(self, filename):
- qt_pro_file = str(generic_path.Path(filename).Absolute())
- self.commandline_layer.add(QtProComponent(qt_pro_file))
-
- return True
- def SetTopMakefile(self, filename):
- self.topMakefile = generic_path.Path(filename)
- return True
- def SetDebugOutput(self, TrueOrFalse):
- self.debugOutput = TrueOrFalse
- return True
- def SetExportOnly(self, TrueOrFalse):
- self.doExportOnly = TrueOrFalse
- if not self.doExport:
- self.Error("The --noexport and --export-only options are incompatible - won't to do anything useful")
- return False
- return True
- def SetNoExport(self, TrueOrFalse):
- self.doExport = not TrueOrFalse
- if self.doExportOnly:
- self.Error("The --noexport and --export-only options are incompatible - won't to do anything useful")
- return False
- return True
- def SetNoBuild(self, TrueOrFalse):
- self.noBuild = TrueOrFalse
- return True
- def SetNoDependInclude(self, TrueOrFalse):
- self.noDependInclude = TrueOrFalse
- return True
- def SetNoDependGenerate(self, TrueOrFalse):
- self.noDependGenerate = TrueOrFalse
- return True
- def SetKeepGoing(self, TrueOrFalse):
- self.keepGoing = TrueOrFalse
- return True
- def SetLogFileName(self, logfile):
- if logfile == "-":
- self.logFileName = None # stdout
- else:
- self.logFileName = generic_path.Path(logfile)
- return True
- def SetMakeEngine(self, makeEngine):
- self.makeEngine = makeEngine
- return True
- def AddMakeOption(self, makeOption):
- self.makeOptions.append(makeOption)
- return True
- def SetJobs(self, numberOfJobs):
- try:
- self.jobs = int(numberOfJobs)
- except ValueError:
- self.jobs = 0
- if self.jobs < 1:
- self.Warn("The number of jobs ({0}) must be a positive integer\n".format(numberOfJobs))
- self.jobs = 1
- return False
- return True
- def SetTries(self, numberOfTries):
- try:
- self.tries = int(numberOfTries)
- except ValueError:
- self.tries = 0
- if self.tries < 1:
- self.Warn("The number of tries ({0}) must be a positive integer\n".format(numberOfTries))
- self.tries = 1
- return False
- return True
- def SetToolCheck(self, type):
- type = type.lower()
- toolcheck_types= [ "forced", "on", "off" ]
- if type in toolcheck_types:
- self.toolcheck=type
- else:
- self.Warn("toolcheck option must be one of: {0}".format(toolcheck_types))
- return False
- return True
- def SetTiming(self, TrueOrFalse):
- self.Info("--timing switch no longer has any effect - build timing is now permanently on")
- return True
- def SetParallelParsing(self, type):
- type = type.lower()
- if type == "on":
- self.doParallelParsing = True
- elif type == "slave":
- self.isParallelParsingSlave = True
- elif type == "off":
- self.doParallelParsing = False
- else:
- self.Warn(" parallel parsing option must be either 'on' or 'off' (was {0})".format(type))
- return False
- return True
- def SetRsgCaseFolding(self, TrueOrFalse):
- self.doCaseFolding_rsg = TrueOrFalse
- return True
- def SetIncrementalParsing(self, type):
- if type == "on":
- self.incremental_parsing = True
- elif type == "off":
- self.incremental_parsing = False
- else:
- self.Warn(" incremental parsing option must be either 'on' or 'off' (was {0})".format(type))
- return False
- return True
-
- def SetNoMetadataDepend(self, TrueOrFalse):
- self.no_metadata_depend = TrueOrFalse
- return True
- def AddProject(self, projectName):
- self.projects.add(projectName.lower())
- return True
- def AddQuery(self, q):
- self.queries.append(q)
- self.mission = Raptor.M_QUERY
- return True
-
- def FilterList(self, value):
- self.filterList = value
- return True
- def IgnoreOsDetection(self, value):
- self.ignoreOsDetection = value
- return True
- def PrintVersion(self,dummy):
- global name
- print("{0} version {1}".format(name, raptor.version.fullversion()))
- self.mission = Raptor.M_VERSION
- return True
- # worker methods
- def Introduction(self):
- """Print a header of useful information about Raptor"""
- self.Info("{0}: version {1}".format(name, raptor.version.fullversion()))
- self.Info("{0} {1}".format(env, str(self.home)))
- self.Info("Set-up {0}".format(str(self.raptorXML)))
- self.Info("Command-line-arguments {0}".format(" ".join(self.args)))
- self.Info("Current working directory {0}".format(os.getcwd()))
- # the inherited environment
- for e, value in sorted( os.environ.items() ):
- self.Info("Environment {0}={1}".format(e, value.replace("]]>", "]]>")))
- # and some general debug stuff
- self.Debug("Platform {0}".format("-".join(hostplatform)))
- self.Debug("Filesystem {0}".format(self.filesystem))
- self.Debug("Python {0}.{1}.{2}".format(*sys.version_info[:3]))
- self.Debug("Command-line-parser {0}".format(self.CLI))
- self.Debug("Currently selected filters: {0}".format(self.filterList))
- for e,value in self.override.items():
- self.Debug("Override {0} = {1}".format(e, value))
- for t in self.targets:
- self.Debug("Target {0}".format(t))
- def _check_and_set_build_targets(self):
- # resolve inter-argument dependencies.
-
- targets = [x.lower() for x in self.targets]
-
- # --what or --check implies the WHAT target and FilterWhat Filter
- if self.doWhat or self.doCheck:
- self.targets = ["WHAT"]
- self.filterList = "filterwhat"
- else:
- # 1. CLEAN/CLEANEXPORT/REALLYCLEAN needs the FilterClean filter.
- # 2. Targets that clean should not be combined with other targets.
- CL = "clean"
- CE = "cleanexport"
- RC = "reallyclean"
- EX = "export"
- is_clean = 0
- is_suspicious_clean = 0
- if CL in targets and CE in targets:
- is_clean = 1
- if len(targets) > 2:
- is_suspicious_clean = 1
- elif RC in targets or CL in targets or CE in targets:
- is_clean = 1
- if len(targets) > 1:
- is_suspicious_clean = 1
- if is_clean:
- self.filterList += ",filterclean"
- if is_suspicious_clean:
- self.Warn('CLEAN, CLEANEXPORT and a REALLYCLEAN should not be combined with other targets as the result is unpredictable.')
- else:
- # Copyfile implements the <copy> tag which is primarily useful with cluster builds.
- # It allows file copying to occur on the primary build host rather than on the cluster.
- # This is more efficient.
-
- if EX in targets: # sbs EXPORT implies sbs --export-only
- self.SetExportOnly(True)
- elif self.doExportOnly: # sbs --export-only implies sbs EXPORT
- self.targets.append(EX)
- else:
- self.filterList += ",filtercopyfile"
-
- def GetBuildUnitsToBuild(self, configNames):
- """Return a list of the configuration objects that correspond to the
- list of configuration names in the configNames parameter.
- GetBuildUnitsToBuild(["armv5", "winscw"])
- >>> [ config1, config2, ... , configN ]
- """
- if len(configNames) == 0:
- # use default config
- if len(self.defaultConfig) == 0:
- self.Warn("No default configuration name")
- else:
- configNames.append(self.defaultConfig)
- buildUnitsToBuild = raptor.data.GetBuildUnits(configNames, self.cache, self)
- for b in buildUnitsToBuild:
- self.Info("Buildable configuration '{0}'".format(b.name))
- if len(buildUnitsToBuild) == 0:
- self.Error("No build configurations given")
- return buildUnitsToBuild
- def CheckToolset(self, evaluator, configname):
- """Check the toolset for a particular config, allow other objects access
- to the toolset for this build (e.g. the make class)."""
- if self.toolset is None:
- if self.toolcheck == 'on':
- self.toolset = raptor.data.ToolSet(log=self)
- elif self.toolcheck == 'forced' :
- self.toolset = raptor.data.ToolSet(log=self, forced=True)
- else:
- return True
- return self.toolset.check(evaluator, configname)
- def CheckConfigs(self, configs):
- """ Tool checking for all the buildable configurations
- NB. We are allowed to use different tool versions for different
- configurations."""
- tools_ok = True
- tool_problems = []
- for b in configs:
- self.Debug("Tool check for {0}".format(b.name))
- config_ok = False #default
- try:
- evaluator = self.GetEvaluator(None, b, gathertools=True)
- config_ok = self.CheckToolset(evaluator, b.name)
- except raptor.data.UninitialisedVariableException as e:
- tool_problems.append(b.name)
- self.Error("{0} is a bad configuration: {1}".format(b.name,str(e)))
- tools_ok = tools_ok and config_ok
- if len(tool_problems) > 0:
- self.FatalError("Build stopped because the following requested configurations are incomplete or invalid: {0}".format(", ".join(tool_problems)))
- return tools_ok
- def GatherSysModelLayers(self, systemModel, systemDefinitionRequestedLayers):
- """Return a list of lists of components to be built.
- components = GatherSysModelLayers(self, configurations)
- >>> set("abc/group/bld.inf","def/group/bld.inf, ....")
- """
- layersToBuild = []
- if systemModel:
- # We either process all available layers in the system model, or a subset of
- # layers specified on the command line. In both cases, the processing is the same,
- # and can be subject to ordering if explicitly requested.
- systemModel.DumpInfo()
- if systemDefinitionRequestedLayers:
- layersToProcess = systemDefinitionRequestedLayers
- else:
- layersToProcess = systemModel.GetLayerNames()
- for layer in layersToProcess:
- systemModel.DumpLayerInfo(layer)
- if systemModel.IsLayerBuildable(layer):
- layersToBuild.append(Layer.from_system_model(layer,
- systemModel.GetLayerComponents(layer)))
- return layersToBuild
- # Add bld.inf or system definition xml to command line layers (depending on preference)
- def FindSysDefIn(self, aDir = None):
- # Find a system definition file
- if aDir is None:
- dir = generic_path.CurrentDir()
- else:
- dir = generic_path.Path(aDir)
- sysDef = dir.Append(self.systemDefinition)
- if not sysDef.isFile():
- return None
- return sysDef
- def FindComponentIn(self, aDir = None):
- # look for a bld.inf
- if aDir is None:
- dir = generic_path.CurrentDir()
- else:
- dir = generic_path.Path(aDir)
- bldInf = dir.Append(self.buildInformation)
- if bldInf.isFile():
- return BldinfComponent(bldInf)
- return None
- def GenerateGenericSpecs(self, configsToBuild):
- # if a Configuration has any config-wide interfaces
- # then add a Specification node to call each of them.
- configWide = {}
- genericSpecs = []
- for c in configsToBuild:
- evaluator = self.GetEvaluator(None, c)
- iface = evaluator.Get("INTERFACE.config")
- if iface:
- if iface in configWide:
- # seen it already, so reuse the node
- filter = configWide[iface]
- filter.AddConfigCondition(c.name)
- else:
- # create a new node
- filter = raptor.data.Filter(name = "config_wide")
- filter.AddConfigCondition(c.name)
- for i in iface.split():
- spec = raptor.data.Specification(i)
- spec.SetInterface(i)
- filter.AddChildSpecification(spec)
- # remember it, use it
- configWide[iface] = filter
- genericSpecs.append(filter)
- return genericSpecs
- def GetEvaluator(self, specification, configuration, gathertools=False):
- """ this will perform some caching later """
- return raptor.data.Evaluator(specification, configuration, gathertools=gathertools, cache = self.cache)
- def Make(self, makefileset):
- if not self.noBuild and makefileset is not None:
- if self.maker.Make(makefileset, not self.ignore_zero_flmcall_makefiles):
- self.Info("The make-engine exited successfully.")
- return True
- else:
- self.Error("The make-engine exited with errors.")
- return False
- else:
- self.Info("No build performed")
- def Report(self):
- if self.quiet:
- return
- # Allow filters access to the build duration or "run time"
- self.raptor_params.runtime = int(0.5 + self.build_record.timing['runtime'])
- self.Info("Run time {0} seconds".format(self.raptor_params.runtime))
- def AssertBuildOK(self):
- """Raise a BuildCannotProgressException if no further processing is required
- """
- if self.Skip():
- raise BuildCannotProgressException("")
- return True
- def Skip(self):
- """Indicate not to perform operation if:
- fatalErrorState is set
- an error code is set but we're not in keepgoing mode
- """
- return self.fatalErrorState or ((self.errorCode != 0) and (not self.keepGoing))
- def InitPlugins(self):
- """ Find all the raptor plugins and put them into a pluginbox. """
- if not self.systemPlugins.isAbsolute():
- self.systemPlugins = self.home.Append(self.systemPlugins)
- self.pbox = PluginBox(str(self.systemPlugins))
- # Try to load up plugins from the era before raptor-as-a-package
- oldplugins = self.home.Append("python/plugins")
- oldraptor_dir = str(self.home.Append("python"))
- sys.path.insert(0,oldraptor_dir)
- self.pbox.add_plugins(str(oldplugins))
- sys.path[0:0] = []
- # log file open/close
- def OpenLog(self):
- """Open a log file for the various I/O methods to write to."""
- try:
- self.InitPlugins()
-
- self.raptor_params = BuildStats(self)
- # Open the requested plugins using the pluginbox
- self.out.open(self.raptor_params, self.filterList, self.pbox)
- # log header
- self.out.write("<?xml version=\"1.0\" encoding=\"ISO-8859-1\" ?>\n")
- namespace = "http://symbian.com/xml/build/log"
- progress_namespace = "http://symbian.com/xml/build/log/progress"
- schema = "http://symbian.com/xml/build/log/1_0.xsd"
- self.out.write("<buildlog sbs_version=\"{0}\" xmlns=\"{1}\" xmlns:progress=\"{2}\""
- " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"{3} {4}\">\n".format(
- raptor.version.fullversion(), namespace, progress_namespace, namespace, schema))
- self.logOpen = True
- except Exception as e:
- self.out = sys.stdout # make sure that we can actually get errors out.
- self.logOpen = False
- self.FatalError("Unable to open the output logs: {0}".format(str(e)))
- def CloseLog(self):
- if self.logOpen:
- self.out.summary()
- self.out.write("</buildlog>\n")
- if not self.out.close():
- self.errorCode = 1
- def Cleanup(self):
- # ensure that the toolset cache is flushed.
- if self.toolset is not None:
- self.toolset.write()
- # I/O methods
- @staticmethod
- def attributeString(dictionary):
- """Turn a dictionary into a string of XML attributes"""
- return "".join([" {0}='{1}'".format(a,v) for a,v in dictionary.items()])
- def Info(self, msg, **attributes):
- """Send an information message to the configured channel
- (XML control characters will be escaped)
- """
- self.out.write("<info{0}>{1}</info>\n".format(self.attributeString(attributes), escape(msg)))
- def InfoDiscovery(self, object_type, count):
- if self.timing:
- try:
- self.out.write(timing.Timing.discovery_string(object_type = object_type,
- count = count))
- except Exception as exception:
- self.Error(exception.Text, function = "InfoDiscoveryTime")
- def InfoStartTime(self, object_type, task, key):
- if self.timing:
- try:
- self.out.write(timing.Timing.start_string(object_type = object_type,
- task = task, key = key))
- except Exception as exception:
- self.Error(exception.message, function = "InfoStartTime")
- def InfoEndTime(self, object_type, task, key):
- if self.timing:
- try:
- self.out.write(timing.Timing.end_string(object_type = object_type,
- task = task, key = key))
- except Exception as exception:
- self.Error(exception.Text, function = "InfoEndTime")
- def Debug(self, msg, **attributes):
- "Send a debugging message to the configured channel"
- # the debug text is out of our control so wrap it in a CDATA
- # in case it contains characters special to XML... like <>
- if self.debugOutput:
- self.out.write("<debug{0}><![CDATA[\n{1}\n]]></debug>\n".format(self.attributeString(attributes),escape(msg)))
- def Warn(self, msg, **attributes):
- """Send a warning message to the configured channel
- (XML control characters will be escaped)
- """
- self.out.write("<warning{0}>{1}</warning>\n".format(self.attributeString(attributes), escape(msg)))
- def FatalError(self, msg, **attributes):
- """Send an error message to the configured channel. This implies such a serious
- error that the entire build must be shut down asap whilst still finishing off
- correctly whatever housekeeping is possible e.g. producing error reports.
- Remains quiet if the raptor object is already in a fatal state since there
- further errors are probably triggered by the first.
- """
- if not self.fatalErrorState:
- self.out.write("<error{0}>{1}</error>\n".format(self.attributeString(attributes), escape(msg)))
- self.errorCode = 1
- self.fatalErrorState = True
- def Error(self, msg, **attributes):
- """Send an error message to the configured channel
- (XML control characters will be escaped)
- """
- self.out.write("<error{0}>{1}</error>\n".format(self.attributeString(attributes), escape(msg)))
- self.errorCode = 1
- def PrintXML(self, format, *extras):
- "Print to configured channel (no newline is added) (assumes valid xml)"
- if format:
- self.out.write(format % extras)
- def GetLayersFromCLI(self):
- """Returns the list of layers as specified by the
- commandline interface to Raptor e.g. parameters
- or the current directory"""
- layers=[]
- # Look for bld.infs or sysdefs in the current dir if none were specified
- if self.systemDefinitionFile == None and len(self.commandline_layer) == 0:
- cwd = os.getcwd()
- if not self.preferBuildInfoToSystemDefinition:
- self.systemDefinitionFile = self.FindSysDefIn(cwd)
- if self.systemDefinitionFile == None:
- aComponent = self.FindComponentIn(cwd)
- if aComponent is not None:
- layers.append(Layer('default',[aComponent]))
- else:
- aComponent = self.FindComponentIn(cwd)
- if aComponent is None:
- self.systemDefinitionFile = self.FindSysDefIn(cwd)
- else:
- layers.append(Layer('default',[aComponent]))
- if len(layers) <= 0 and self.systemDefinitionFile == None:
- self.Warn("No default bld.inf or system definition file found in current directory ({0})".format(cwd))
- # If we now have a System Definition to parse then get the layers of components
- if self.systemDefinitionFile != None:
- systemModel = SystemModel(self, self.systemDefinitionFile, self.systemDefinitionBase)
- layers = self.GatherSysModelLayers(systemModel, self.systemDefinitionRequestedLayers)
- # Now get components specified on a commandline - build them after any
- # layers in the system definition.
- if len(self.commandline_layer) > 0:
- layers.append(self.commandline_layer)
- # If we aren't building components in order then flatten down
- # the groups
- if not self.systemDefinitionOrderLayers:
- # Flatten the layers into one group of components if
- # we are not required to build them in order.
- newcg = Layer("all")
- for cg in layers:
- for c in cg:
- newcg.add(c)
- layers = [newcg]
- return layers
- def Query(self):
- "process command-line queries."
-
- if self.mission != Raptor.M_QUERY:
- return 0
-
- # establish an object cache based on the current settings
- self._load_cache()
-
- # our "self" is a valid object for initialising an API Context
- import raptor.api
- context = raptor.api.Context(self)
-
- # intialise the plugs in case they are needed for the query
- self.InitPlugins()
-
- print("<sbs version='{0}'>".format(raptor.version.numericversion()))
-
- for q in self.queries:
- try:
- print(context.stringquery(q))
-
- except Exception as e:
- self.Error("exception '{0}' with query '{1}'".format(str(e), q))
-
- print("</sbs>")
- return self.errorCode
-
- def Build(self):
- if self.mission != Raptor.M_BUILD: # help or version requested instead.
- return 0
- # open the log file
- self.OpenLog()
- try:
- self.AssertBuildOK()
- # show the command and platform info
- self.Introduction()
- # load the cache of configurations etc
- self._load_cache()
- # establish an object cache
- self.AssertBuildOK()
- # find out what configurations to build
- buildUnitsToBuild = self.GetBuildUnitsToBuild(self.configNames)
- if len(buildUnitsToBuild) == 0:
- raise BuildCannotProgressException("No configurations to build.")
-
- self.buildUnitsToBuild = buildUnitsToBuild
- self.AssertBuildOK()
- ###### insert the start time into the Makefile name
- makefile = self.topMakefile.Absolute()
- makefile.path = makefile.path.replace("%TIME", self.timestring)
- self.topMakefile = makefile
- ######
- # Create a build record as non-parallel parsed builds can take advantage of incremental
- # makefile generation with some level of safety:
- tm_dir = str(makefile.Dir())
- tm_file = str(makefile.File())
-
- environment = ";".join(["{0}={1}".format(k,os.environ[k]) for k in BuildRecord.sensed_environment_variables])
- must_create_makefiles = True
- if self.incremental_parsing:
- self.build_record = BuildRecord.from_old(adir = str(self.topMakefile.Dir()), commandline=" ".join(self.args), environment = environment, topmakefile = str(makefile), logfilename = str(self.logFileName))
- must_create_makefiles = not self.build_record.reused
- else:
- self.build_record = BuildRecord(commandline=" ".join(self.args), environment = environment, topmakefilename = str(makefile), logfilename=str(self.logFileName), makefilesets=[])
- if must_create_makefiles:
- if self.incremental_parsing:
- self.Info("incremental makefile generation: cannot reuse any pre-existing makefiles")
- if len(self.build_record.new_metadata) > 0:
- self.Info("incremental makefile generation: metadata changed: {0}".format(" ".join(self.build_record.new_metadata)))
- else:
- self.Info("incremental makefile generation: no metadata was newer than the makefiles")
- # find out what components to build, and in what way
- layers = []
- self.AssertBuildOK()
- if len(buildUnitsToBuild) >= 0:
- layers = self.GetLayersFromCLI()
- componentCount = sum([len(cg) for cg in layers])
- if not componentCount > 0:
- raise BuildCannotProgressException("No components to build.")
- # check the configurations (tools versions)
- self.AssertBuildOK()
- if self.toolcheck != 'off':
- self.CheckConfigs(buildUnitsToBuild)
- else:
- self.Info("Not Checking Tool Versions")
- self.AssertBuildOK()
- else:
- # Don't use the makefile name specified on the commmandline - use the one from the build record.
- self.Info("incremental makefile generation: pre-existing makefiles will be reused: {0}".format(self.build_record.topmakefilename))
- # Setup a make engine.
- if not self.maker:
- try:
- self.maker = raptor.make.MakeEngine(self, self.makeEngine)
- except raptor.make.BadMakeEngineException as e:
- self.Error("Unable to use make engine: {0} ".format(str(e)))
-
- self.AssertBuildOK()
- # if self.doParallelParsing and not (len(layers) == 1 and len(layers[0]) == 1):
- # rebuilding and parallel parsing are not compatible at the moment. There is
- # a validation check for that earlier.
- if self.doParallelParsing:
- # Create a Makefile to parse components in parallel and build them
- self.build_record = BuildRecord(commandline=" ".join(self.args), environment=environment, topmakefilename=str(makefile), logfilename=str(self.logFileName), makefilesets=[])
- for l in layers:
- l.meta_realise(self)
- elif must_create_makefiles:
- # Parse components serially, creating one set of makefiles
- # create non-component specs
- self.generic_specs = self.GenerateGenericSpecs(buildUnitsToBuild)
- self.AssertBuildOK()
- self.build_record = BuildRecord(commandline=" ".join(self.args), environment=environment, topmakefilename=str(makefile), logfilename=str(self.logFileName), makefilesets=[])
- self.build_record.build_start(self.starttime)
- for l in layers:
- # create specs for a specific group of components
- try:
- l.realise(self)
- except raptor.make.CannotWriteMakefileException as e:
- pass # make will report these errors itself
- # Record the end of the build
- self.build_record.build_end()
- try:
- self.build_record.to_file()
- except Exception as e:
- self.Info("Couldn't write build record file: {0}".format(str(e)))
- else:
- self.build_record.build_start(self.starttime)
- # Reusing old makefiles
- for makefileset in self.build_record.makefilesets:
- self.InfoStartTime(object_type = "makefileset", task = "build",
- key = makefileset.metadepsfilename)
- result = self.Make(makefileset)
- self.InfoEndTime(object_type = "makefileset", task = "build",
- key = makefileset.metadepsfilename)
- self.build_record.build_end()
-
- except BuildCannotProgressException as b:
- if str(b) != "":
- self.Info(str(b))
- # final report
- if not self.fatalErrorState:
- self.Report()
- self.Cleanup()
- # close the log file
- self.CloseLog()
- return self.errorCode
- @classmethod
- def CreateCommandlineAnalysis(cls, argv):
- """ Perform an analysis run where a build is not performed.
- Don't parse command line targets - they don't make any sense if you're not doing a build"""
- build = Raptor(commandline=argv, dotargets = False)
- return build
- # Class for passing constricted parameters to filters
- class BuildStats(object):
- def __init__(self, raptor_instance):
- self.incoming_epocroot = incoming_epocroot
- self.epocroot = epocroot
- self.logFileName = raptor_instance.logFileName
- self.configPath = raptor_instance.configPath
- self.home = raptor_instance.home
- self.quiet = raptor_instance.quiet
- self.doCheck = raptor_instance.doCheck
- self.doWhat = raptor_instance.doWhat
- self.platform = hostplatform
- self.skipAll = raptor_instance.fatalErrorState
- self.timestring = raptor_instance.timestring
- self.targets = raptor_instance.targets
- self.runtime = 0
- self.name = raptor.version.name
- self.topMakefile = raptor_instance.topMakefile
- # raptor module functions
- def Main(argv):
- """The main entry point for Raptor.
- argv is a list of command-line parameters,
- NOT including the name of the calling script.
- The return value is zero for success and non-zero for failure."""
- DisplayBanner()
- try:
- # object which represents a build
- b = Raptor(commandline=argv)
- if b.mission == Raptor.M_QUERY:
- return b.Query()
-
- return b.Build()
- except BuildCannotProgressException as e:
- t = str(e)
- if t != "":
- sys.stderr.write("sbs error: {0:s}\n".format(t))
- return 1
- def DisplayBanner():
- """Stuff that needs printing out for every command."""
- pass
- # end of the raptor module
|