1*4882a593Smuzhiyun# 2*4882a593Smuzhiyun# Copyright (C) 2003, 2004 Chris Larson 3*4882a593Smuzhiyun# Copyright (C) 2003, 2004 Phil Blundell 4*4882a593Smuzhiyun# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer 5*4882a593Smuzhiyun# Copyright (C) 2005 Holger Hans Peter Freyther 6*4882a593Smuzhiyun# Copyright (C) 2005 ROAD GmbH 7*4882a593Smuzhiyun# Copyright (C) 2006 - 2007 Richard Purdie 8*4882a593Smuzhiyun# 9*4882a593Smuzhiyun# SPDX-License-Identifier: GPL-2.0-only 10*4882a593Smuzhiyun# 11*4882a593Smuzhiyun 12*4882a593Smuzhiyunimport sys, os, glob, os.path, re, time 13*4882a593Smuzhiyunimport itertools 14*4882a593Smuzhiyunimport logging 15*4882a593Smuzhiyunimport multiprocessing 16*4882a593Smuzhiyunimport threading 17*4882a593Smuzhiyunfrom io import StringIO, UnsupportedOperation 18*4882a593Smuzhiyunfrom contextlib import closing 19*4882a593Smuzhiyunfrom collections import defaultdict, namedtuple 20*4882a593Smuzhiyunimport bb, bb.exceptions, bb.command 21*4882a593Smuzhiyunfrom bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build 22*4882a593Smuzhiyunimport queue 23*4882a593Smuzhiyunimport signal 24*4882a593Smuzhiyunimport prserv.serv 25*4882a593Smuzhiyunimport pyinotify 26*4882a593Smuzhiyunimport json 27*4882a593Smuzhiyunimport pickle 28*4882a593Smuzhiyunimport codecs 29*4882a593Smuzhiyunimport hashserv 30*4882a593Smuzhiyun 31*4882a593Smuzhiyunlogger = logging.getLogger("BitBake") 32*4882a593Smuzhiyuncollectlog = logging.getLogger("BitBake.Collection") 33*4882a593Smuzhiyunbuildlog = logging.getLogger("BitBake.Build") 34*4882a593Smuzhiyunparselog = logging.getLogger("BitBake.Parsing") 35*4882a593Smuzhiyunproviderlog = logging.getLogger("BitBake.Provider") 36*4882a593Smuzhiyun 37*4882a593Smuzhiyunclass NoSpecificMatch(bb.BBHandledException): 38*4882a593Smuzhiyun """ 39*4882a593Smuzhiyun Exception raised when no or multiple file matches are found 40*4882a593Smuzhiyun """ 41*4882a593Smuzhiyun 42*4882a593Smuzhiyunclass NothingToBuild(Exception): 43*4882a593Smuzhiyun """ 44*4882a593Smuzhiyun Exception raised when there is nothing to build 45*4882a593Smuzhiyun """ 46*4882a593Smuzhiyun 47*4882a593Smuzhiyunclass CollectionError(bb.BBHandledException): 48*4882a593Smuzhiyun """ 49*4882a593Smuzhiyun Exception raised when layer configuration is incorrect 50*4882a593Smuzhiyun """ 51*4882a593Smuzhiyun 52*4882a593Smuzhiyunclass state: 53*4882a593Smuzhiyun initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7)) 54*4882a593Smuzhiyun 55*4882a593Smuzhiyun @classmethod 56*4882a593Smuzhiyun def get_name(cls, code): 57*4882a593Smuzhiyun for name in dir(cls): 58*4882a593Smuzhiyun value = getattr(cls, name) 59*4882a593Smuzhiyun if type(value) == type(cls.initial) and value == code: 60*4882a593Smuzhiyun return name 61*4882a593Smuzhiyun raise ValueError("Invalid status code: %s" % code) 62*4882a593Smuzhiyun 63*4882a593Smuzhiyun 64*4882a593Smuzhiyunclass SkippedPackage: 65*4882a593Smuzhiyun def __init__(self, info = None, reason = None): 66*4882a593Smuzhiyun self.pn = None 67*4882a593Smuzhiyun self.skipreason = None 68*4882a593Smuzhiyun self.provides = None 69*4882a593Smuzhiyun self.rprovides = None 70*4882a593Smuzhiyun 71*4882a593Smuzhiyun if info: 72*4882a593Smuzhiyun self.pn = info.pn 73*4882a593Smuzhiyun self.skipreason = info.skipreason 74*4882a593Smuzhiyun self.provides = info.provides 75*4882a593Smuzhiyun self.rprovides = info.packages + info.rprovides 76*4882a593Smuzhiyun for package in info.packages: 77*4882a593Smuzhiyun self.rprovides += info.rprovides_pkg[package] 78*4882a593Smuzhiyun elif reason: 79*4882a593Smuzhiyun self.skipreason = reason 80*4882a593Smuzhiyun 81*4882a593Smuzhiyun 82*4882a593Smuzhiyunclass CookerFeatures(object): 83*4882a593Smuzhiyun _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3)) 84*4882a593Smuzhiyun 85*4882a593Smuzhiyun def __init__(self): 86*4882a593Smuzhiyun self._features=set() 87*4882a593Smuzhiyun 88*4882a593Smuzhiyun def setFeature(self, f): 89*4882a593Smuzhiyun # validate we got a request for a feature we support 90*4882a593Smuzhiyun if f not in CookerFeatures._feature_list: 91*4882a593Smuzhiyun return 92*4882a593Smuzhiyun self._features.add(f) 93*4882a593Smuzhiyun 94*4882a593Smuzhiyun def __contains__(self, f): 95*4882a593Smuzhiyun return f in self._features 96*4882a593Smuzhiyun 97*4882a593Smuzhiyun def __iter__(self): 98*4882a593Smuzhiyun return self._features.__iter__() 99*4882a593Smuzhiyun 100*4882a593Smuzhiyun def __next__(self): 101*4882a593Smuzhiyun return next(self._features) 102*4882a593Smuzhiyun 103*4882a593Smuzhiyun 104*4882a593Smuzhiyunclass EventWriter: 105*4882a593Smuzhiyun def __init__(self, cooker, eventfile): 106*4882a593Smuzhiyun self.file_inited = None 107*4882a593Smuzhiyun self.cooker = cooker 108*4882a593Smuzhiyun self.eventfile = eventfile 109*4882a593Smuzhiyun self.event_queue = [] 110*4882a593Smuzhiyun 111*4882a593Smuzhiyun def write_event(self, event): 112*4882a593Smuzhiyun with open(self.eventfile, "a") as f: 113*4882a593Smuzhiyun try: 114*4882a593Smuzhiyun str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8') 115*4882a593Smuzhiyun f.write("%s\n" % json.dumps({"class": event.__module__ + "." + event.__class__.__name__, 116*4882a593Smuzhiyun "vars": str_event})) 117*4882a593Smuzhiyun except Exception as err: 118*4882a593Smuzhiyun import traceback 119*4882a593Smuzhiyun print(err, traceback.format_exc()) 120*4882a593Smuzhiyun 121*4882a593Smuzhiyun def send(self, event): 122*4882a593Smuzhiyun if self.file_inited: 123*4882a593Smuzhiyun # we have the file, just write the event 124*4882a593Smuzhiyun self.write_event(event) 125*4882a593Smuzhiyun else: 126*4882a593Smuzhiyun # init on bb.event.BuildStarted 127*4882a593Smuzhiyun name = "%s.%s" % (event.__module__, event.__class__.__name__) 128*4882a593Smuzhiyun if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"): 129*4882a593Smuzhiyun with open(self.eventfile, "w") as f: 130*4882a593Smuzhiyun f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])})) 131*4882a593Smuzhiyun 132*4882a593Smuzhiyun self.file_inited = True 133*4882a593Smuzhiyun 134*4882a593Smuzhiyun # write pending events 135*4882a593Smuzhiyun for evt in self.event_queue: 136*4882a593Smuzhiyun self.write_event(evt) 137*4882a593Smuzhiyun 138*4882a593Smuzhiyun # also write the current event 139*4882a593Smuzhiyun self.write_event(event) 140*4882a593Smuzhiyun else: 141*4882a593Smuzhiyun # queue all events until the file is inited 142*4882a593Smuzhiyun self.event_queue.append(event) 143*4882a593Smuzhiyun 144*4882a593Smuzhiyun#============================================================================# 145*4882a593Smuzhiyun# BBCooker 146*4882a593Smuzhiyun#============================================================================# 147*4882a593Smuzhiyunclass BBCooker: 148*4882a593Smuzhiyun """ 149*4882a593Smuzhiyun Manages one bitbake build run 150*4882a593Smuzhiyun """ 151*4882a593Smuzhiyun 152*4882a593Smuzhiyun def __init__(self, featureSet=None, idleCallBackRegister=None): 153*4882a593Smuzhiyun self.recipecaches = None 154*4882a593Smuzhiyun self.eventlog = None 155*4882a593Smuzhiyun self.skiplist = {} 156*4882a593Smuzhiyun self.featureset = CookerFeatures() 157*4882a593Smuzhiyun if featureSet: 158*4882a593Smuzhiyun for f in featureSet: 159*4882a593Smuzhiyun self.featureset.setFeature(f) 160*4882a593Smuzhiyun 161*4882a593Smuzhiyun self.orig_syspath = sys.path.copy() 162*4882a593Smuzhiyun self.orig_sysmodules = [*sys.modules] 163*4882a593Smuzhiyun 164*4882a593Smuzhiyun self.configuration = bb.cookerdata.CookerConfiguration() 165*4882a593Smuzhiyun 166*4882a593Smuzhiyun self.idleCallBackRegister = idleCallBackRegister 167*4882a593Smuzhiyun 168*4882a593Smuzhiyun bb.debug(1, "BBCooker starting %s" % time.time()) 169*4882a593Smuzhiyun sys.stdout.flush() 170*4882a593Smuzhiyun 171*4882a593Smuzhiyun self.configwatcher = None 172*4882a593Smuzhiyun self.confignotifier = None 173*4882a593Smuzhiyun 174*4882a593Smuzhiyun self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \ 175*4882a593Smuzhiyun pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \ 176*4882a593Smuzhiyun pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO 177*4882a593Smuzhiyun 178*4882a593Smuzhiyun self.watcher = None 179*4882a593Smuzhiyun self.notifier = None 180*4882a593Smuzhiyun 181*4882a593Smuzhiyun # If being called by something like tinfoil, we need to clean cached data 182*4882a593Smuzhiyun # which may now be invalid 183*4882a593Smuzhiyun bb.parse.clear_cache() 184*4882a593Smuzhiyun bb.parse.BBHandler.cached_statements = {} 185*4882a593Smuzhiyun 186*4882a593Smuzhiyun self.ui_cmdline = None 187*4882a593Smuzhiyun self.hashserv = None 188*4882a593Smuzhiyun self.hashservaddr = None 189*4882a593Smuzhiyun 190*4882a593Smuzhiyun self.inotify_modified_files = [] 191*4882a593Smuzhiyun 192*4882a593Smuzhiyun def _process_inotify_updates(server, cooker, halt): 193*4882a593Smuzhiyun cooker.process_inotify_updates() 194*4882a593Smuzhiyun return 1.0 195*4882a593Smuzhiyun 196*4882a593Smuzhiyun self.idleCallBackRegister(_process_inotify_updates, self) 197*4882a593Smuzhiyun 198*4882a593Smuzhiyun # TOSTOP must not be set or our children will hang when they output 199*4882a593Smuzhiyun try: 200*4882a593Smuzhiyun fd = sys.stdout.fileno() 201*4882a593Smuzhiyun if os.isatty(fd): 202*4882a593Smuzhiyun import termios 203*4882a593Smuzhiyun tcattr = termios.tcgetattr(fd) 204*4882a593Smuzhiyun if tcattr[3] & termios.TOSTOP: 205*4882a593Smuzhiyun buildlog.info("The terminal had the TOSTOP bit set, clearing...") 206*4882a593Smuzhiyun tcattr[3] = tcattr[3] & ~termios.TOSTOP 207*4882a593Smuzhiyun termios.tcsetattr(fd, termios.TCSANOW, tcattr) 208*4882a593Smuzhiyun except UnsupportedOperation: 209*4882a593Smuzhiyun pass 210*4882a593Smuzhiyun 211*4882a593Smuzhiyun self.command = bb.command.Command(self) 212*4882a593Smuzhiyun self.state = state.initial 213*4882a593Smuzhiyun 214*4882a593Smuzhiyun self.parser = None 215*4882a593Smuzhiyun 216*4882a593Smuzhiyun signal.signal(signal.SIGTERM, self.sigterm_exception) 217*4882a593Smuzhiyun # Let SIGHUP exit as SIGTERM 218*4882a593Smuzhiyun signal.signal(signal.SIGHUP, self.sigterm_exception) 219*4882a593Smuzhiyun 220*4882a593Smuzhiyun bb.debug(1, "BBCooker startup complete %s" % time.time()) 221*4882a593Smuzhiyun sys.stdout.flush() 222*4882a593Smuzhiyun 223*4882a593Smuzhiyun def init_configdata(self): 224*4882a593Smuzhiyun if not hasattr(self, "data"): 225*4882a593Smuzhiyun self.initConfigurationData() 226*4882a593Smuzhiyun bb.debug(1, "BBCooker parsed base configuration %s" % time.time()) 227*4882a593Smuzhiyun sys.stdout.flush() 228*4882a593Smuzhiyun self.handlePRServ() 229*4882a593Smuzhiyun 230*4882a593Smuzhiyun def setupConfigWatcher(self): 231*4882a593Smuzhiyun if self.configwatcher: 232*4882a593Smuzhiyun self.configwatcher.close() 233*4882a593Smuzhiyun self.confignotifier = None 234*4882a593Smuzhiyun self.configwatcher = None 235*4882a593Smuzhiyun self.configwatcher = pyinotify.WatchManager() 236*4882a593Smuzhiyun self.configwatcher.bbseen = set() 237*4882a593Smuzhiyun self.configwatcher.bbwatchedfiles = set() 238*4882a593Smuzhiyun self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications) 239*4882a593Smuzhiyun 240*4882a593Smuzhiyun def setupParserWatcher(self): 241*4882a593Smuzhiyun if self.watcher: 242*4882a593Smuzhiyun self.watcher.close() 243*4882a593Smuzhiyun self.notifier = None 244*4882a593Smuzhiyun self.watcher = None 245*4882a593Smuzhiyun self.watcher = pyinotify.WatchManager() 246*4882a593Smuzhiyun self.watcher.bbseen = set() 247*4882a593Smuzhiyun self.watcher.bbwatchedfiles = set() 248*4882a593Smuzhiyun self.notifier = pyinotify.Notifier(self.watcher, self.notifications) 249*4882a593Smuzhiyun 250*4882a593Smuzhiyun def process_inotify_updates(self): 251*4882a593Smuzhiyun for n in [self.confignotifier, self.notifier]: 252*4882a593Smuzhiyun if n and n.check_events(timeout=0): 253*4882a593Smuzhiyun # read notified events and enqeue them 254*4882a593Smuzhiyun n.read_events() 255*4882a593Smuzhiyun n.process_events() 256*4882a593Smuzhiyun 257*4882a593Smuzhiyun def config_notifications(self, event): 258*4882a593Smuzhiyun if event.maskname == "IN_Q_OVERFLOW": 259*4882a593Smuzhiyun bb.warn("inotify event queue overflowed, invalidating caches.") 260*4882a593Smuzhiyun self.parsecache_valid = False 261*4882a593Smuzhiyun self.baseconfig_valid = False 262*4882a593Smuzhiyun bb.parse.clear_cache() 263*4882a593Smuzhiyun return 264*4882a593Smuzhiyun if not event.pathname in self.configwatcher.bbwatchedfiles: 265*4882a593Smuzhiyun return 266*4882a593Smuzhiyun if "IN_ISDIR" in event.maskname: 267*4882a593Smuzhiyun if "IN_CREATE" in event.maskname or "IN_DELETE" in event.maskname: 268*4882a593Smuzhiyun if event.pathname in self.configwatcher.bbseen: 269*4882a593Smuzhiyun self.configwatcher.bbseen.remove(event.pathname) 270*4882a593Smuzhiyun # Could remove all entries starting with the directory but for now... 271*4882a593Smuzhiyun bb.parse.clear_cache() 272*4882a593Smuzhiyun if not event.pathname in self.inotify_modified_files: 273*4882a593Smuzhiyun self.inotify_modified_files.append(event.pathname) 274*4882a593Smuzhiyun self.baseconfig_valid = False 275*4882a593Smuzhiyun 276*4882a593Smuzhiyun def notifications(self, event): 277*4882a593Smuzhiyun if event.maskname == "IN_Q_OVERFLOW": 278*4882a593Smuzhiyun bb.warn("inotify event queue overflowed, invalidating caches.") 279*4882a593Smuzhiyun self.parsecache_valid = False 280*4882a593Smuzhiyun bb.parse.clear_cache() 281*4882a593Smuzhiyun return 282*4882a593Smuzhiyun if event.pathname.endswith("bitbake-cookerdaemon.log") \ 283*4882a593Smuzhiyun or event.pathname.endswith("bitbake.lock"): 284*4882a593Smuzhiyun return 285*4882a593Smuzhiyun if "IN_ISDIR" in event.maskname: 286*4882a593Smuzhiyun if "IN_CREATE" in event.maskname or "IN_DELETE" in event.maskname: 287*4882a593Smuzhiyun if event.pathname in self.watcher.bbseen: 288*4882a593Smuzhiyun self.watcher.bbseen.remove(event.pathname) 289*4882a593Smuzhiyun # Could remove all entries starting with the directory but for now... 290*4882a593Smuzhiyun bb.parse.clear_cache() 291*4882a593Smuzhiyun if not event.pathname in self.inotify_modified_files: 292*4882a593Smuzhiyun self.inotify_modified_files.append(event.pathname) 293*4882a593Smuzhiyun self.parsecache_valid = False 294*4882a593Smuzhiyun 295*4882a593Smuzhiyun def add_filewatch(self, deps, watcher=None, dirs=False): 296*4882a593Smuzhiyun if not watcher: 297*4882a593Smuzhiyun watcher = self.watcher 298*4882a593Smuzhiyun for i in deps: 299*4882a593Smuzhiyun watcher.bbwatchedfiles.add(i[0]) 300*4882a593Smuzhiyun if dirs: 301*4882a593Smuzhiyun f = i[0] 302*4882a593Smuzhiyun else: 303*4882a593Smuzhiyun f = os.path.dirname(i[0]) 304*4882a593Smuzhiyun if f in watcher.bbseen: 305*4882a593Smuzhiyun continue 306*4882a593Smuzhiyun watcher.bbseen.add(f) 307*4882a593Smuzhiyun watchtarget = None 308*4882a593Smuzhiyun while True: 309*4882a593Smuzhiyun # We try and add watches for files that don't exist but if they did, would influence 310*4882a593Smuzhiyun # the parser. The parent directory of these files may not exist, in which case we need 311*4882a593Smuzhiyun # to watch any parent that does exist for changes. 312*4882a593Smuzhiyun try: 313*4882a593Smuzhiyun watcher.add_watch(f, self.watchmask, quiet=False) 314*4882a593Smuzhiyun if watchtarget: 315*4882a593Smuzhiyun watcher.bbwatchedfiles.add(watchtarget) 316*4882a593Smuzhiyun break 317*4882a593Smuzhiyun except pyinotify.WatchManagerError as e: 318*4882a593Smuzhiyun if 'ENOENT' in str(e): 319*4882a593Smuzhiyun watchtarget = f 320*4882a593Smuzhiyun f = os.path.dirname(f) 321*4882a593Smuzhiyun if f in watcher.bbseen: 322*4882a593Smuzhiyun break 323*4882a593Smuzhiyun watcher.bbseen.add(f) 324*4882a593Smuzhiyun continue 325*4882a593Smuzhiyun if 'ENOSPC' in str(e): 326*4882a593Smuzhiyun providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?") 327*4882a593Smuzhiyun providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.") 328*4882a593Smuzhiyun providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.") 329*4882a593Smuzhiyun providerlog.error("Root privilege is required to modify max_user_watches.") 330*4882a593Smuzhiyun raise 331*4882a593Smuzhiyun 332*4882a593Smuzhiyun def sigterm_exception(self, signum, stackframe): 333*4882a593Smuzhiyun if signum == signal.SIGTERM: 334*4882a593Smuzhiyun bb.warn("Cooker received SIGTERM, shutting down...") 335*4882a593Smuzhiyun elif signum == signal.SIGHUP: 336*4882a593Smuzhiyun bb.warn("Cooker received SIGHUP, shutting down...") 337*4882a593Smuzhiyun self.state = state.forceshutdown 338*4882a593Smuzhiyun 339*4882a593Smuzhiyun def setFeatures(self, features): 340*4882a593Smuzhiyun # we only accept a new feature set if we're in state initial, so we can reset without problems 341*4882a593Smuzhiyun if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]: 342*4882a593Smuzhiyun raise Exception("Illegal state for feature set change") 343*4882a593Smuzhiyun original_featureset = list(self.featureset) 344*4882a593Smuzhiyun for feature in features: 345*4882a593Smuzhiyun self.featureset.setFeature(feature) 346*4882a593Smuzhiyun bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset))) 347*4882a593Smuzhiyun if (original_featureset != list(self.featureset)) and self.state != state.error and hasattr(self, "data"): 348*4882a593Smuzhiyun self.reset() 349*4882a593Smuzhiyun 350*4882a593Smuzhiyun def initConfigurationData(self): 351*4882a593Smuzhiyun 352*4882a593Smuzhiyun self.state = state.initial 353*4882a593Smuzhiyun self.caches_array = [] 354*4882a593Smuzhiyun 355*4882a593Smuzhiyun sys.path = self.orig_syspath.copy() 356*4882a593Smuzhiyun for mod in [*sys.modules]: 357*4882a593Smuzhiyun if mod not in self.orig_sysmodules: 358*4882a593Smuzhiyun del sys.modules[mod] 359*4882a593Smuzhiyun 360*4882a593Smuzhiyun self.setupConfigWatcher() 361*4882a593Smuzhiyun 362*4882a593Smuzhiyun # Need to preserve BB_CONSOLELOG over resets 363*4882a593Smuzhiyun consolelog = None 364*4882a593Smuzhiyun if hasattr(self, "data"): 365*4882a593Smuzhiyun consolelog = self.data.getVar("BB_CONSOLELOG") 366*4882a593Smuzhiyun 367*4882a593Smuzhiyun if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset: 368*4882a593Smuzhiyun self.enableDataTracking() 369*4882a593Smuzhiyun 370*4882a593Smuzhiyun all_extra_cache_names = [] 371*4882a593Smuzhiyun # We hardcode all known cache types in a single place, here. 372*4882a593Smuzhiyun if CookerFeatures.HOB_EXTRA_CACHES in self.featureset: 373*4882a593Smuzhiyun all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo") 374*4882a593Smuzhiyun 375*4882a593Smuzhiyun caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names 376*4882a593Smuzhiyun 377*4882a593Smuzhiyun # At least CoreRecipeInfo will be loaded, so caches_array will never be empty! 378*4882a593Smuzhiyun # This is the entry point, no further check needed! 379*4882a593Smuzhiyun for var in caches_name_array: 380*4882a593Smuzhiyun try: 381*4882a593Smuzhiyun module_name, cache_name = var.split(':') 382*4882a593Smuzhiyun module = __import__(module_name, fromlist=(cache_name,)) 383*4882a593Smuzhiyun self.caches_array.append(getattr(module, cache_name)) 384*4882a593Smuzhiyun except ImportError as exc: 385*4882a593Smuzhiyun logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc)) 386*4882a593Smuzhiyun raise bb.BBHandledException() 387*4882a593Smuzhiyun 388*4882a593Smuzhiyun self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False) 389*4882a593Smuzhiyun self.databuilder.parseBaseConfiguration() 390*4882a593Smuzhiyun self.data = self.databuilder.data 391*4882a593Smuzhiyun self.data_hash = self.databuilder.data_hash 392*4882a593Smuzhiyun self.extraconfigdata = {} 393*4882a593Smuzhiyun 394*4882a593Smuzhiyun if consolelog: 395*4882a593Smuzhiyun self.data.setVar("BB_CONSOLELOG", consolelog) 396*4882a593Smuzhiyun 397*4882a593Smuzhiyun self.data.setVar('BB_CMDLINE', self.ui_cmdline) 398*4882a593Smuzhiyun 399*4882a593Smuzhiyun if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset: 400*4882a593Smuzhiyun self.disableDataTracking() 401*4882a593Smuzhiyun 402*4882a593Smuzhiyun for mc in self.databuilder.mcdata.values(): 403*4882a593Smuzhiyun mc.renameVar("__depends", "__base_depends") 404*4882a593Smuzhiyun self.add_filewatch(mc.getVar("__base_depends", False), self.configwatcher) 405*4882a593Smuzhiyun 406*4882a593Smuzhiyun self.baseconfig_valid = True 407*4882a593Smuzhiyun self.parsecache_valid = False 408*4882a593Smuzhiyun 409*4882a593Smuzhiyun def handlePRServ(self): 410*4882a593Smuzhiyun # Setup a PR Server based on the new configuration 411*4882a593Smuzhiyun try: 412*4882a593Smuzhiyun self.prhost = prserv.serv.auto_start(self.data) 413*4882a593Smuzhiyun except prserv.serv.PRServiceConfigError as e: 414*4882a593Smuzhiyun bb.fatal("Unable to start PR Server, exiting, check the bitbake-cookerdaemon.log") 415*4882a593Smuzhiyun 416*4882a593Smuzhiyun if self.data.getVar("BB_HASHSERVE") == "auto": 417*4882a593Smuzhiyun # Create a new hash server bound to a unix domain socket 418*4882a593Smuzhiyun if not self.hashserv: 419*4882a593Smuzhiyun dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db" 420*4882a593Smuzhiyun upstream = self.data.getVar("BB_HASHSERVE_UPSTREAM") or None 421*4882a593Smuzhiyun if upstream: 422*4882a593Smuzhiyun import socket 423*4882a593Smuzhiyun try: 424*4882a593Smuzhiyun sock = socket.create_connection(upstream.split(":"), 5) 425*4882a593Smuzhiyun sock.close() 426*4882a593Smuzhiyun except socket.error as e: 427*4882a593Smuzhiyun bb.warn("BB_HASHSERVE_UPSTREAM is not valid, unable to connect hash equivalence server at '%s': %s" 428*4882a593Smuzhiyun % (upstream, repr(e))) 429*4882a593Smuzhiyun 430*4882a593Smuzhiyun self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR") 431*4882a593Smuzhiyun self.hashserv = hashserv.create_server( 432*4882a593Smuzhiyun self.hashservaddr, 433*4882a593Smuzhiyun dbfile, 434*4882a593Smuzhiyun sync=False, 435*4882a593Smuzhiyun upstream=upstream, 436*4882a593Smuzhiyun ) 437*4882a593Smuzhiyun self.hashserv.serve_as_process() 438*4882a593Smuzhiyun self.data.setVar("BB_HASHSERVE", self.hashservaddr) 439*4882a593Smuzhiyun self.databuilder.origdata.setVar("BB_HASHSERVE", self.hashservaddr) 440*4882a593Smuzhiyun self.databuilder.data.setVar("BB_HASHSERVE", self.hashservaddr) 441*4882a593Smuzhiyun for mc in self.databuilder.mcdata: 442*4882a593Smuzhiyun self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservaddr) 443*4882a593Smuzhiyun 444*4882a593Smuzhiyun bb.parse.init_parser(self.data) 445*4882a593Smuzhiyun 446*4882a593Smuzhiyun def enableDataTracking(self): 447*4882a593Smuzhiyun self.configuration.tracking = True 448*4882a593Smuzhiyun if hasattr(self, "data"): 449*4882a593Smuzhiyun self.data.enableTracking() 450*4882a593Smuzhiyun 451*4882a593Smuzhiyun def disableDataTracking(self): 452*4882a593Smuzhiyun self.configuration.tracking = False 453*4882a593Smuzhiyun if hasattr(self, "data"): 454*4882a593Smuzhiyun self.data.disableTracking() 455*4882a593Smuzhiyun 456*4882a593Smuzhiyun def parseConfiguration(self): 457*4882a593Smuzhiyun self.updateCacheSync() 458*4882a593Smuzhiyun 459*4882a593Smuzhiyun # Change nice level if we're asked to 460*4882a593Smuzhiyun nice = self.data.getVar("BB_NICE_LEVEL") 461*4882a593Smuzhiyun if nice: 462*4882a593Smuzhiyun curnice = os.nice(0) 463*4882a593Smuzhiyun nice = int(nice) - curnice 464*4882a593Smuzhiyun buildlog.verbose("Renice to %s " % os.nice(nice)) 465*4882a593Smuzhiyun 466*4882a593Smuzhiyun if self.recipecaches: 467*4882a593Smuzhiyun del self.recipecaches 468*4882a593Smuzhiyun self.multiconfigs = self.databuilder.mcdata.keys() 469*4882a593Smuzhiyun self.recipecaches = {} 470*4882a593Smuzhiyun for mc in self.multiconfigs: 471*4882a593Smuzhiyun self.recipecaches[mc] = bb.cache.CacheData(self.caches_array) 472*4882a593Smuzhiyun 473*4882a593Smuzhiyun self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS")) 474*4882a593Smuzhiyun 475*4882a593Smuzhiyun self.parsecache_valid = False 476*4882a593Smuzhiyun 477*4882a593Smuzhiyun def updateConfigOpts(self, options, environment, cmdline): 478*4882a593Smuzhiyun self.ui_cmdline = cmdline 479*4882a593Smuzhiyun clean = True 480*4882a593Smuzhiyun for o in options: 481*4882a593Smuzhiyun if o in ['prefile', 'postfile']: 482*4882a593Smuzhiyun # Only these options may require a reparse 483*4882a593Smuzhiyun try: 484*4882a593Smuzhiyun if getattr(self.configuration, o) == options[o]: 485*4882a593Smuzhiyun # Value is the same, no need to mark dirty 486*4882a593Smuzhiyun continue 487*4882a593Smuzhiyun except AttributeError: 488*4882a593Smuzhiyun pass 489*4882a593Smuzhiyun logger.debug("Marking as dirty due to '%s' option change to '%s'" % (o, options[o])) 490*4882a593Smuzhiyun print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o])) 491*4882a593Smuzhiyun clean = False 492*4882a593Smuzhiyun if hasattr(self.configuration, o): 493*4882a593Smuzhiyun setattr(self.configuration, o, options[o]) 494*4882a593Smuzhiyun 495*4882a593Smuzhiyun if self.configuration.writeeventlog: 496*4882a593Smuzhiyun if self.eventlog and self.eventlog[0] != self.configuration.writeeventlog: 497*4882a593Smuzhiyun bb.event.unregister_UIHhandler(self.eventlog[1]) 498*4882a593Smuzhiyun if not self.eventlog or self.eventlog[0] != self.configuration.writeeventlog: 499*4882a593Smuzhiyun # we log all events to a file if so directed 500*4882a593Smuzhiyun # register the log file writer as UI Handler 501*4882a593Smuzhiyun writer = EventWriter(self, self.configuration.writeeventlog) 502*4882a593Smuzhiyun EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event']) 503*4882a593Smuzhiyun self.eventlog = (self.configuration.writeeventlog, bb.event.register_UIHhandler(EventLogWriteHandler(writer))) 504*4882a593Smuzhiyun 505*4882a593Smuzhiyun bb.msg.loggerDefaultLogLevel = self.configuration.default_loglevel 506*4882a593Smuzhiyun bb.msg.loggerDefaultDomains = self.configuration.debug_domains 507*4882a593Smuzhiyun 508*4882a593Smuzhiyun if hasattr(self, "data"): 509*4882a593Smuzhiyun origenv = bb.data.init() 510*4882a593Smuzhiyun for k in environment: 511*4882a593Smuzhiyun origenv.setVar(k, environment[k]) 512*4882a593Smuzhiyun self.data.setVar("BB_ORIGENV", origenv) 513*4882a593Smuzhiyun 514*4882a593Smuzhiyun for k in bb.utils.approved_variables(): 515*4882a593Smuzhiyun if k in environment and k not in self.configuration.env: 516*4882a593Smuzhiyun logger.debug("Updating new environment variable %s to %s" % (k, environment[k])) 517*4882a593Smuzhiyun self.configuration.env[k] = environment[k] 518*4882a593Smuzhiyun clean = False 519*4882a593Smuzhiyun if k in self.configuration.env and k not in environment: 520*4882a593Smuzhiyun logger.debug("Updating environment variable %s (deleted)" % (k)) 521*4882a593Smuzhiyun del self.configuration.env[k] 522*4882a593Smuzhiyun clean = False 523*4882a593Smuzhiyun if k not in self.configuration.env and k not in environment: 524*4882a593Smuzhiyun continue 525*4882a593Smuzhiyun if environment[k] != self.configuration.env[k]: 526*4882a593Smuzhiyun logger.debug("Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k])) 527*4882a593Smuzhiyun self.configuration.env[k] = environment[k] 528*4882a593Smuzhiyun clean = False 529*4882a593Smuzhiyun 530*4882a593Smuzhiyun # Now update all the variables not in the datastore to match 531*4882a593Smuzhiyun self.configuration.env = environment 532*4882a593Smuzhiyun 533*4882a593Smuzhiyun if not clean: 534*4882a593Smuzhiyun logger.debug("Base environment change, triggering reparse") 535*4882a593Smuzhiyun self.reset() 536*4882a593Smuzhiyun 537*4882a593Smuzhiyun def runCommands(self, server, data, halt): 538*4882a593Smuzhiyun """ 539*4882a593Smuzhiyun Run any queued asynchronous command 540*4882a593Smuzhiyun This is done by the idle handler so it runs in true context rather than 541*4882a593Smuzhiyun tied to any UI. 542*4882a593Smuzhiyun """ 543*4882a593Smuzhiyun 544*4882a593Smuzhiyun return self.command.runAsyncCommand() 545*4882a593Smuzhiyun 546*4882a593Smuzhiyun def showVersions(self): 547*4882a593Smuzhiyun 548*4882a593Smuzhiyun (latest_versions, preferred_versions, required) = self.findProviders() 549*4882a593Smuzhiyun 550*4882a593Smuzhiyun logger.plain("%-35s %25s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version", "Required Version") 551*4882a593Smuzhiyun logger.plain("%-35s %25s %25s %25s\n", "===========", "==============", "=================", "================") 552*4882a593Smuzhiyun 553*4882a593Smuzhiyun for p in sorted(self.recipecaches[''].pkg_pn): 554*4882a593Smuzhiyun preferred = preferred_versions[p] 555*4882a593Smuzhiyun latest = latest_versions[p] 556*4882a593Smuzhiyun requiredstr = "" 557*4882a593Smuzhiyun preferredstr = "" 558*4882a593Smuzhiyun if required[p]: 559*4882a593Smuzhiyun if preferred[0] is not None: 560*4882a593Smuzhiyun requiredstr = preferred[0][0] + ":" + preferred[0][1] + '-' + preferred[0][2] 561*4882a593Smuzhiyun else: 562*4882a593Smuzhiyun bb.fatal("REQUIRED_VERSION of package %s not available" % p) 563*4882a593Smuzhiyun else: 564*4882a593Smuzhiyun preferredstr = preferred[0][0] + ":" + preferred[0][1] + '-' + preferred[0][2] 565*4882a593Smuzhiyun 566*4882a593Smuzhiyun lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2] 567*4882a593Smuzhiyun 568*4882a593Smuzhiyun if preferred == latest: 569*4882a593Smuzhiyun preferredstr = "" 570*4882a593Smuzhiyun 571*4882a593Smuzhiyun logger.plain("%-35s %25s %25s %25s", p, lateststr, preferredstr, requiredstr) 572*4882a593Smuzhiyun 573*4882a593Smuzhiyun def showEnvironment(self, buildfile=None, pkgs_to_build=None): 574*4882a593Smuzhiyun """ 575*4882a593Smuzhiyun Show the outer or per-recipe environment 576*4882a593Smuzhiyun """ 577*4882a593Smuzhiyun fn = None 578*4882a593Smuzhiyun envdata = None 579*4882a593Smuzhiyun mc = '' 580*4882a593Smuzhiyun if not pkgs_to_build: 581*4882a593Smuzhiyun pkgs_to_build = [] 582*4882a593Smuzhiyun 583*4882a593Smuzhiyun orig_tracking = self.configuration.tracking 584*4882a593Smuzhiyun if not orig_tracking: 585*4882a593Smuzhiyun self.enableDataTracking() 586*4882a593Smuzhiyun self.reset() 587*4882a593Smuzhiyun # reset() resets to the UI requested value so we have to redo this 588*4882a593Smuzhiyun self.enableDataTracking() 589*4882a593Smuzhiyun 590*4882a593Smuzhiyun def mc_base(p): 591*4882a593Smuzhiyun if p.startswith('mc:'): 592*4882a593Smuzhiyun s = p.split(':') 593*4882a593Smuzhiyun if len(s) == 2: 594*4882a593Smuzhiyun return s[1] 595*4882a593Smuzhiyun return None 596*4882a593Smuzhiyun 597*4882a593Smuzhiyun if buildfile: 598*4882a593Smuzhiyun # Parse the configuration here. We need to do it explicitly here since 599*4882a593Smuzhiyun # this showEnvironment() code path doesn't use the cache 600*4882a593Smuzhiyun self.parseConfiguration() 601*4882a593Smuzhiyun 602*4882a593Smuzhiyun fn, cls, mc = bb.cache.virtualfn2realfn(buildfile) 603*4882a593Smuzhiyun fn = self.matchFile(fn, mc) 604*4882a593Smuzhiyun fn = bb.cache.realfn2virtual(fn, cls, mc) 605*4882a593Smuzhiyun elif len(pkgs_to_build) == 1: 606*4882a593Smuzhiyun mc = mc_base(pkgs_to_build[0]) 607*4882a593Smuzhiyun if not mc: 608*4882a593Smuzhiyun ignore = self.data.getVar("ASSUME_PROVIDED") or "" 609*4882a593Smuzhiyun if pkgs_to_build[0] in set(ignore.split()): 610*4882a593Smuzhiyun bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0]) 611*4882a593Smuzhiyun 612*4882a593Smuzhiyun taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.halt, allowincomplete=True) 613*4882a593Smuzhiyun 614*4882a593Smuzhiyun mc = runlist[0][0] 615*4882a593Smuzhiyun fn = runlist[0][3] 616*4882a593Smuzhiyun 617*4882a593Smuzhiyun if fn: 618*4882a593Smuzhiyun try: 619*4882a593Smuzhiyun bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array) 620*4882a593Smuzhiyun envdata = bb_caches[mc].loadDataFull(fn, self.collections[mc].get_file_appends(fn)) 621*4882a593Smuzhiyun except Exception as e: 622*4882a593Smuzhiyun parselog.exception("Unable to read %s", fn) 623*4882a593Smuzhiyun raise 624*4882a593Smuzhiyun else: 625*4882a593Smuzhiyun if not mc in self.databuilder.mcdata: 626*4882a593Smuzhiyun bb.fatal('Not multiconfig named "%s" found' % mc) 627*4882a593Smuzhiyun envdata = self.databuilder.mcdata[mc] 628*4882a593Smuzhiyun data.expandKeys(envdata) 629*4882a593Smuzhiyun parse.ast.runAnonFuncs(envdata) 630*4882a593Smuzhiyun 631*4882a593Smuzhiyun # Display history 632*4882a593Smuzhiyun with closing(StringIO()) as env: 633*4882a593Smuzhiyun self.data.inchistory.emit(env) 634*4882a593Smuzhiyun logger.plain(env.getvalue()) 635*4882a593Smuzhiyun 636*4882a593Smuzhiyun # emit variables and shell functions 637*4882a593Smuzhiyun with closing(StringIO()) as env: 638*4882a593Smuzhiyun data.emit_env(env, envdata, True) 639*4882a593Smuzhiyun logger.plain(env.getvalue()) 640*4882a593Smuzhiyun 641*4882a593Smuzhiyun # emit the metadata which isn't valid shell 642*4882a593Smuzhiyun for e in sorted(envdata.keys()): 643*4882a593Smuzhiyun if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False): 644*4882a593Smuzhiyun logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False)) 645*4882a593Smuzhiyun 646*4882a593Smuzhiyun if not orig_tracking: 647*4882a593Smuzhiyun self.disableDataTracking() 648*4882a593Smuzhiyun self.reset() 649*4882a593Smuzhiyun 650*4882a593Smuzhiyun def buildTaskData(self, pkgs_to_build, task, halt, allowincomplete=False): 651*4882a593Smuzhiyun """ 652*4882a593Smuzhiyun Prepare a runqueue and taskdata object for iteration over pkgs_to_build 653*4882a593Smuzhiyun """ 654*4882a593Smuzhiyun bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data) 655*4882a593Smuzhiyun 656*4882a593Smuzhiyun # A task of None means use the default task 657*4882a593Smuzhiyun if task is None: 658*4882a593Smuzhiyun task = self.configuration.cmd 659*4882a593Smuzhiyun if not task.startswith("do_"): 660*4882a593Smuzhiyun task = "do_%s" % task 661*4882a593Smuzhiyun 662*4882a593Smuzhiyun targetlist = self.checkPackages(pkgs_to_build, task) 663*4882a593Smuzhiyun fulltargetlist = [] 664*4882a593Smuzhiyun defaulttask_implicit = '' 665*4882a593Smuzhiyun defaulttask_explicit = False 666*4882a593Smuzhiyun wildcard = False 667*4882a593Smuzhiyun 668*4882a593Smuzhiyun # Wild card expansion: 669*4882a593Smuzhiyun # Replace string such as "mc:*:bash" 670*4882a593Smuzhiyun # into "mc:A:bash mc:B:bash bash" 671*4882a593Smuzhiyun for k in targetlist: 672*4882a593Smuzhiyun if k.startswith("mc:") and k.count(':') >= 2: 673*4882a593Smuzhiyun if wildcard: 674*4882a593Smuzhiyun bb.fatal('multiconfig conflict') 675*4882a593Smuzhiyun if k.split(":")[1] == "*": 676*4882a593Smuzhiyun wildcard = True 677*4882a593Smuzhiyun for mc in self.multiconfigs: 678*4882a593Smuzhiyun if mc: 679*4882a593Smuzhiyun fulltargetlist.append(k.replace('*', mc)) 680*4882a593Smuzhiyun # implicit default task 681*4882a593Smuzhiyun else: 682*4882a593Smuzhiyun defaulttask_implicit = k.split(":")[2] 683*4882a593Smuzhiyun else: 684*4882a593Smuzhiyun fulltargetlist.append(k) 685*4882a593Smuzhiyun else: 686*4882a593Smuzhiyun defaulttask_explicit = True 687*4882a593Smuzhiyun fulltargetlist.append(k) 688*4882a593Smuzhiyun 689*4882a593Smuzhiyun if not defaulttask_explicit and defaulttask_implicit != '': 690*4882a593Smuzhiyun fulltargetlist.append(defaulttask_implicit) 691*4882a593Smuzhiyun 692*4882a593Smuzhiyun bb.debug(1,"Target list: %s" % (str(fulltargetlist))) 693*4882a593Smuzhiyun taskdata = {} 694*4882a593Smuzhiyun localdata = {} 695*4882a593Smuzhiyun 696*4882a593Smuzhiyun for mc in self.multiconfigs: 697*4882a593Smuzhiyun taskdata[mc] = bb.taskdata.TaskData(halt, skiplist=self.skiplist, allowincomplete=allowincomplete) 698*4882a593Smuzhiyun localdata[mc] = data.createCopy(self.databuilder.mcdata[mc]) 699*4882a593Smuzhiyun bb.data.expandKeys(localdata[mc]) 700*4882a593Smuzhiyun 701*4882a593Smuzhiyun current = 0 702*4882a593Smuzhiyun runlist = [] 703*4882a593Smuzhiyun for k in fulltargetlist: 704*4882a593Smuzhiyun origk = k 705*4882a593Smuzhiyun mc = "" 706*4882a593Smuzhiyun if k.startswith("mc:") and k.count(':') >= 2: 707*4882a593Smuzhiyun mc = k.split(":")[1] 708*4882a593Smuzhiyun k = ":".join(k.split(":")[2:]) 709*4882a593Smuzhiyun ktask = task 710*4882a593Smuzhiyun if ":do_" in k: 711*4882a593Smuzhiyun k2 = k.split(":do_") 712*4882a593Smuzhiyun k = k2[0] 713*4882a593Smuzhiyun ktask = k2[1] 714*4882a593Smuzhiyun 715*4882a593Smuzhiyun if mc not in self.multiconfigs: 716*4882a593Smuzhiyun bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named %s" % (origk, mc)) 717*4882a593Smuzhiyun 718*4882a593Smuzhiyun taskdata[mc].add_provider(localdata[mc], self.recipecaches[mc], k) 719*4882a593Smuzhiyun current += 1 720*4882a593Smuzhiyun if not ktask.startswith("do_"): 721*4882a593Smuzhiyun ktask = "do_%s" % ktask 722*4882a593Smuzhiyun if k not in taskdata[mc].build_targets or not taskdata[mc].build_targets[k]: 723*4882a593Smuzhiyun # e.g. in ASSUME_PROVIDED 724*4882a593Smuzhiyun continue 725*4882a593Smuzhiyun fn = taskdata[mc].build_targets[k][0] 726*4882a593Smuzhiyun runlist.append([mc, k, ktask, fn]) 727*4882a593Smuzhiyun bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data) 728*4882a593Smuzhiyun 729*4882a593Smuzhiyun havemc = False 730*4882a593Smuzhiyun for mc in self.multiconfigs: 731*4882a593Smuzhiyun if taskdata[mc].get_mcdepends(): 732*4882a593Smuzhiyun havemc = True 733*4882a593Smuzhiyun 734*4882a593Smuzhiyun # No need to do check providers if there are no mcdeps or not an mc build 735*4882a593Smuzhiyun if havemc or len(self.multiconfigs) > 1: 736*4882a593Smuzhiyun seen = set() 737*4882a593Smuzhiyun new = True 738*4882a593Smuzhiyun # Make sure we can provide the multiconfig dependency 739*4882a593Smuzhiyun while new: 740*4882a593Smuzhiyun mcdeps = set() 741*4882a593Smuzhiyun # Add unresolved first, so we can get multiconfig indirect dependencies on time 742*4882a593Smuzhiyun for mc in self.multiconfigs: 743*4882a593Smuzhiyun taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc]) 744*4882a593Smuzhiyun mcdeps |= set(taskdata[mc].get_mcdepends()) 745*4882a593Smuzhiyun new = False 746*4882a593Smuzhiyun for k in mcdeps: 747*4882a593Smuzhiyun if k in seen: 748*4882a593Smuzhiyun continue 749*4882a593Smuzhiyun l = k.split(':') 750*4882a593Smuzhiyun depmc = l[2] 751*4882a593Smuzhiyun if depmc not in self.multiconfigs: 752*4882a593Smuzhiyun bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named configuration %s" % (k,depmc)) 753*4882a593Smuzhiyun else: 754*4882a593Smuzhiyun logger.debug("Adding providers for multiconfig dependency %s" % l[3]) 755*4882a593Smuzhiyun taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3]) 756*4882a593Smuzhiyun seen.add(k) 757*4882a593Smuzhiyun new = True 758*4882a593Smuzhiyun 759*4882a593Smuzhiyun for mc in self.multiconfigs: 760*4882a593Smuzhiyun taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc]) 761*4882a593Smuzhiyun 762*4882a593Smuzhiyun bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data) 763*4882a593Smuzhiyun return taskdata, runlist 764*4882a593Smuzhiyun 765*4882a593Smuzhiyun def prepareTreeData(self, pkgs_to_build, task): 766*4882a593Smuzhiyun """ 767*4882a593Smuzhiyun Prepare a runqueue and taskdata object for iteration over pkgs_to_build 768*4882a593Smuzhiyun """ 769*4882a593Smuzhiyun 770*4882a593Smuzhiyun # We set halt to False here to prevent unbuildable targets raising 771*4882a593Smuzhiyun # an exception when we're just generating data 772*4882a593Smuzhiyun taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True) 773*4882a593Smuzhiyun 774*4882a593Smuzhiyun return runlist, taskdata 775*4882a593Smuzhiyun 776*4882a593Smuzhiyun ######## WARNING : this function requires cache_extra to be enabled ######## 777*4882a593Smuzhiyun 778*4882a593Smuzhiyun def generateTaskDepTreeData(self, pkgs_to_build, task): 779*4882a593Smuzhiyun """ 780*4882a593Smuzhiyun Create a dependency graph of pkgs_to_build including reverse dependency 781*4882a593Smuzhiyun information. 782*4882a593Smuzhiyun """ 783*4882a593Smuzhiyun if not task.startswith("do_"): 784*4882a593Smuzhiyun task = "do_%s" % task 785*4882a593Smuzhiyun 786*4882a593Smuzhiyun runlist, taskdata = self.prepareTreeData(pkgs_to_build, task) 787*4882a593Smuzhiyun rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist) 788*4882a593Smuzhiyun rq.rqdata.prepare() 789*4882a593Smuzhiyun return self.buildDependTree(rq, taskdata) 790*4882a593Smuzhiyun 791*4882a593Smuzhiyun @staticmethod 792*4882a593Smuzhiyun def add_mc_prefix(mc, pn): 793*4882a593Smuzhiyun if mc: 794*4882a593Smuzhiyun return "mc:%s:%s" % (mc, pn) 795*4882a593Smuzhiyun return pn 796*4882a593Smuzhiyun 797*4882a593Smuzhiyun def buildDependTree(self, rq, taskdata): 798*4882a593Smuzhiyun seen_fns = [] 799*4882a593Smuzhiyun depend_tree = {} 800*4882a593Smuzhiyun depend_tree["depends"] = {} 801*4882a593Smuzhiyun depend_tree["tdepends"] = {} 802*4882a593Smuzhiyun depend_tree["pn"] = {} 803*4882a593Smuzhiyun depend_tree["rdepends-pn"] = {} 804*4882a593Smuzhiyun depend_tree["packages"] = {} 805*4882a593Smuzhiyun depend_tree["rdepends-pkg"] = {} 806*4882a593Smuzhiyun depend_tree["rrecs-pkg"] = {} 807*4882a593Smuzhiyun depend_tree['providermap'] = {} 808*4882a593Smuzhiyun depend_tree["layer-priorities"] = self.bbfile_config_priorities 809*4882a593Smuzhiyun 810*4882a593Smuzhiyun for mc in taskdata: 811*4882a593Smuzhiyun for name, fn in list(taskdata[mc].get_providermap().items()): 812*4882a593Smuzhiyun pn = self.recipecaches[mc].pkg_fn[fn] 813*4882a593Smuzhiyun pn = self.add_mc_prefix(mc, pn) 814*4882a593Smuzhiyun if name != pn: 815*4882a593Smuzhiyun version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[fn] 816*4882a593Smuzhiyun depend_tree['providermap'][name] = (pn, version) 817*4882a593Smuzhiyun 818*4882a593Smuzhiyun for tid in rq.rqdata.runtaskentries: 819*4882a593Smuzhiyun (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid) 820*4882a593Smuzhiyun pn = self.recipecaches[mc].pkg_fn[taskfn] 821*4882a593Smuzhiyun pn = self.add_mc_prefix(mc, pn) 822*4882a593Smuzhiyun version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn] 823*4882a593Smuzhiyun if pn not in depend_tree["pn"]: 824*4882a593Smuzhiyun depend_tree["pn"][pn] = {} 825*4882a593Smuzhiyun depend_tree["pn"][pn]["filename"] = taskfn 826*4882a593Smuzhiyun depend_tree["pn"][pn]["version"] = version 827*4882a593Smuzhiyun depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None) 828*4882a593Smuzhiyun 829*4882a593Smuzhiyun # if we have extra caches, list all attributes they bring in 830*4882a593Smuzhiyun extra_info = [] 831*4882a593Smuzhiyun for cache_class in self.caches_array: 832*4882a593Smuzhiyun if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'): 833*4882a593Smuzhiyun cachefields = getattr(cache_class, 'cachefields', []) 834*4882a593Smuzhiyun extra_info = extra_info + cachefields 835*4882a593Smuzhiyun 836*4882a593Smuzhiyun # for all attributes stored, add them to the dependency tree 837*4882a593Smuzhiyun for ei in extra_info: 838*4882a593Smuzhiyun depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn] 839*4882a593Smuzhiyun 840*4882a593Smuzhiyun 841*4882a593Smuzhiyun dotname = "%s.%s" % (pn, bb.runqueue.taskname_from_tid(tid)) 842*4882a593Smuzhiyun if not dotname in depend_tree["tdepends"]: 843*4882a593Smuzhiyun depend_tree["tdepends"][dotname] = [] 844*4882a593Smuzhiyun for dep in rq.rqdata.runtaskentries[tid].depends: 845*4882a593Smuzhiyun (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep) 846*4882a593Smuzhiyun deppn = self.recipecaches[depmc].pkg_fn[deptaskfn] 847*4882a593Smuzhiyun if depmc: 848*4882a593Smuzhiyun depmc = "mc:" + depmc + ":" 849*4882a593Smuzhiyun depend_tree["tdepends"][dotname].append("%s%s.%s" % (depmc, deppn, bb.runqueue.taskname_from_tid(dep))) 850*4882a593Smuzhiyun if taskfn not in seen_fns: 851*4882a593Smuzhiyun seen_fns.append(taskfn) 852*4882a593Smuzhiyun packages = [] 853*4882a593Smuzhiyun 854*4882a593Smuzhiyun depend_tree["depends"][pn] = [] 855*4882a593Smuzhiyun for dep in taskdata[mc].depids[taskfn]: 856*4882a593Smuzhiyun depend_tree["depends"][pn].append(dep) 857*4882a593Smuzhiyun 858*4882a593Smuzhiyun depend_tree["rdepends-pn"][pn] = [] 859*4882a593Smuzhiyun for rdep in taskdata[mc].rdepids[taskfn]: 860*4882a593Smuzhiyun depend_tree["rdepends-pn"][pn].append(rdep) 861*4882a593Smuzhiyun 862*4882a593Smuzhiyun rdepends = self.recipecaches[mc].rundeps[taskfn] 863*4882a593Smuzhiyun for package in rdepends: 864*4882a593Smuzhiyun depend_tree["rdepends-pkg"][package] = [] 865*4882a593Smuzhiyun for rdepend in rdepends[package]: 866*4882a593Smuzhiyun depend_tree["rdepends-pkg"][package].append(rdepend) 867*4882a593Smuzhiyun packages.append(package) 868*4882a593Smuzhiyun 869*4882a593Smuzhiyun rrecs = self.recipecaches[mc].runrecs[taskfn] 870*4882a593Smuzhiyun for package in rrecs: 871*4882a593Smuzhiyun depend_tree["rrecs-pkg"][package] = [] 872*4882a593Smuzhiyun for rdepend in rrecs[package]: 873*4882a593Smuzhiyun depend_tree["rrecs-pkg"][package].append(rdepend) 874*4882a593Smuzhiyun if not package in packages: 875*4882a593Smuzhiyun packages.append(package) 876*4882a593Smuzhiyun 877*4882a593Smuzhiyun for package in packages: 878*4882a593Smuzhiyun if package not in depend_tree["packages"]: 879*4882a593Smuzhiyun depend_tree["packages"][package] = {} 880*4882a593Smuzhiyun depend_tree["packages"][package]["pn"] = pn 881*4882a593Smuzhiyun depend_tree["packages"][package]["filename"] = taskfn 882*4882a593Smuzhiyun depend_tree["packages"][package]["version"] = version 883*4882a593Smuzhiyun 884*4882a593Smuzhiyun return depend_tree 885*4882a593Smuzhiyun 886*4882a593Smuzhiyun ######## WARNING : this function requires cache_extra to be enabled ######## 887*4882a593Smuzhiyun def generatePkgDepTreeData(self, pkgs_to_build, task): 888*4882a593Smuzhiyun """ 889*4882a593Smuzhiyun Create a dependency tree of pkgs_to_build, returning the data. 890*4882a593Smuzhiyun """ 891*4882a593Smuzhiyun if not task.startswith("do_"): 892*4882a593Smuzhiyun task = "do_%s" % task 893*4882a593Smuzhiyun 894*4882a593Smuzhiyun _, taskdata = self.prepareTreeData(pkgs_to_build, task) 895*4882a593Smuzhiyun 896*4882a593Smuzhiyun seen_fns = [] 897*4882a593Smuzhiyun depend_tree = {} 898*4882a593Smuzhiyun depend_tree["depends"] = {} 899*4882a593Smuzhiyun depend_tree["pn"] = {} 900*4882a593Smuzhiyun depend_tree["rdepends-pn"] = {} 901*4882a593Smuzhiyun depend_tree["rdepends-pkg"] = {} 902*4882a593Smuzhiyun depend_tree["rrecs-pkg"] = {} 903*4882a593Smuzhiyun 904*4882a593Smuzhiyun # if we have extra caches, list all attributes they bring in 905*4882a593Smuzhiyun extra_info = [] 906*4882a593Smuzhiyun for cache_class in self.caches_array: 907*4882a593Smuzhiyun if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'): 908*4882a593Smuzhiyun cachefields = getattr(cache_class, 'cachefields', []) 909*4882a593Smuzhiyun extra_info = extra_info + cachefields 910*4882a593Smuzhiyun 911*4882a593Smuzhiyun tids = [] 912*4882a593Smuzhiyun for mc in taskdata: 913*4882a593Smuzhiyun for tid in taskdata[mc].taskentries: 914*4882a593Smuzhiyun tids.append(tid) 915*4882a593Smuzhiyun 916*4882a593Smuzhiyun for tid in tids: 917*4882a593Smuzhiyun (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid) 918*4882a593Smuzhiyun 919*4882a593Smuzhiyun pn = self.recipecaches[mc].pkg_fn[taskfn] 920*4882a593Smuzhiyun pn = self.add_mc_prefix(mc, pn) 921*4882a593Smuzhiyun 922*4882a593Smuzhiyun if pn not in depend_tree["pn"]: 923*4882a593Smuzhiyun depend_tree["pn"][pn] = {} 924*4882a593Smuzhiyun depend_tree["pn"][pn]["filename"] = taskfn 925*4882a593Smuzhiyun version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn] 926*4882a593Smuzhiyun depend_tree["pn"][pn]["version"] = version 927*4882a593Smuzhiyun rdepends = self.recipecaches[mc].rundeps[taskfn] 928*4882a593Smuzhiyun rrecs = self.recipecaches[mc].runrecs[taskfn] 929*4882a593Smuzhiyun depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None) 930*4882a593Smuzhiyun 931*4882a593Smuzhiyun # for all extra attributes stored, add them to the dependency tree 932*4882a593Smuzhiyun for ei in extra_info: 933*4882a593Smuzhiyun depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn] 934*4882a593Smuzhiyun 935*4882a593Smuzhiyun if taskfn not in seen_fns: 936*4882a593Smuzhiyun seen_fns.append(taskfn) 937*4882a593Smuzhiyun 938*4882a593Smuzhiyun depend_tree["depends"][pn] = [] 939*4882a593Smuzhiyun for dep in taskdata[mc].depids[taskfn]: 940*4882a593Smuzhiyun pn_provider = "" 941*4882a593Smuzhiyun if dep in taskdata[mc].build_targets and taskdata[mc].build_targets[dep]: 942*4882a593Smuzhiyun fn_provider = taskdata[mc].build_targets[dep][0] 943*4882a593Smuzhiyun pn_provider = self.recipecaches[mc].pkg_fn[fn_provider] 944*4882a593Smuzhiyun else: 945*4882a593Smuzhiyun pn_provider = dep 946*4882a593Smuzhiyun pn_provider = self.add_mc_prefix(mc, pn_provider) 947*4882a593Smuzhiyun depend_tree["depends"][pn].append(pn_provider) 948*4882a593Smuzhiyun 949*4882a593Smuzhiyun depend_tree["rdepends-pn"][pn] = [] 950*4882a593Smuzhiyun for rdep in taskdata[mc].rdepids[taskfn]: 951*4882a593Smuzhiyun pn_rprovider = "" 952*4882a593Smuzhiyun if rdep in taskdata[mc].run_targets and taskdata[mc].run_targets[rdep]: 953*4882a593Smuzhiyun fn_rprovider = taskdata[mc].run_targets[rdep][0] 954*4882a593Smuzhiyun pn_rprovider = self.recipecaches[mc].pkg_fn[fn_rprovider] 955*4882a593Smuzhiyun else: 956*4882a593Smuzhiyun pn_rprovider = rdep 957*4882a593Smuzhiyun pn_rprovider = self.add_mc_prefix(mc, pn_rprovider) 958*4882a593Smuzhiyun depend_tree["rdepends-pn"][pn].append(pn_rprovider) 959*4882a593Smuzhiyun 960*4882a593Smuzhiyun depend_tree["rdepends-pkg"].update(rdepends) 961*4882a593Smuzhiyun depend_tree["rrecs-pkg"].update(rrecs) 962*4882a593Smuzhiyun 963*4882a593Smuzhiyun return depend_tree 964*4882a593Smuzhiyun 965*4882a593Smuzhiyun def generateDepTreeEvent(self, pkgs_to_build, task): 966*4882a593Smuzhiyun """ 967*4882a593Smuzhiyun Create a task dependency graph of pkgs_to_build. 968*4882a593Smuzhiyun Generate an event with the result 969*4882a593Smuzhiyun """ 970*4882a593Smuzhiyun depgraph = self.generateTaskDepTreeData(pkgs_to_build, task) 971*4882a593Smuzhiyun bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data) 972*4882a593Smuzhiyun 973*4882a593Smuzhiyun def generateDotGraphFiles(self, pkgs_to_build, task): 974*4882a593Smuzhiyun """ 975*4882a593Smuzhiyun Create a task dependency graph of pkgs_to_build. 976*4882a593Smuzhiyun Save the result to a set of .dot files. 977*4882a593Smuzhiyun """ 978*4882a593Smuzhiyun 979*4882a593Smuzhiyun depgraph = self.generateTaskDepTreeData(pkgs_to_build, task) 980*4882a593Smuzhiyun 981*4882a593Smuzhiyun with open('pn-buildlist', 'w') as f: 982*4882a593Smuzhiyun for pn in depgraph["pn"]: 983*4882a593Smuzhiyun f.write(pn + "\n") 984*4882a593Smuzhiyun logger.info("PN build list saved to 'pn-buildlist'") 985*4882a593Smuzhiyun 986*4882a593Smuzhiyun # Remove old format output files to ensure no confusion with stale data 987*4882a593Smuzhiyun try: 988*4882a593Smuzhiyun os.unlink('pn-depends.dot') 989*4882a593Smuzhiyun except FileNotFoundError: 990*4882a593Smuzhiyun pass 991*4882a593Smuzhiyun try: 992*4882a593Smuzhiyun os.unlink('package-depends.dot') 993*4882a593Smuzhiyun except FileNotFoundError: 994*4882a593Smuzhiyun pass 995*4882a593Smuzhiyun try: 996*4882a593Smuzhiyun os.unlink('recipe-depends.dot') 997*4882a593Smuzhiyun except FileNotFoundError: 998*4882a593Smuzhiyun pass 999*4882a593Smuzhiyun 1000*4882a593Smuzhiyun with open('task-depends.dot', 'w') as f: 1001*4882a593Smuzhiyun f.write("digraph depends {\n") 1002*4882a593Smuzhiyun for task in sorted(depgraph["tdepends"]): 1003*4882a593Smuzhiyun (pn, taskname) = task.rsplit(".", 1) 1004*4882a593Smuzhiyun fn = depgraph["pn"][pn]["filename"] 1005*4882a593Smuzhiyun version = depgraph["pn"][pn]["version"] 1006*4882a593Smuzhiyun f.write('"%s.%s" [label="%s %s\\n%s\\n%s"]\n' % (pn, taskname, pn, taskname, version, fn)) 1007*4882a593Smuzhiyun for dep in sorted(depgraph["tdepends"][task]): 1008*4882a593Smuzhiyun f.write('"%s" -> "%s"\n' % (task, dep)) 1009*4882a593Smuzhiyun f.write("}\n") 1010*4882a593Smuzhiyun logger.info("Task dependencies saved to 'task-depends.dot'") 1011*4882a593Smuzhiyun 1012*4882a593Smuzhiyun def show_appends_with_no_recipes(self): 1013*4882a593Smuzhiyun appends_without_recipes = {} 1014*4882a593Smuzhiyun # Determine which bbappends haven't been applied 1015*4882a593Smuzhiyun for mc in self.multiconfigs: 1016*4882a593Smuzhiyun # First get list of recipes, including skipped 1017*4882a593Smuzhiyun recipefns = list(self.recipecaches[mc].pkg_fn.keys()) 1018*4882a593Smuzhiyun recipefns.extend(self.skiplist.keys()) 1019*4882a593Smuzhiyun 1020*4882a593Smuzhiyun # Work out list of bbappends that have been applied 1021*4882a593Smuzhiyun applied_appends = [] 1022*4882a593Smuzhiyun for fn in recipefns: 1023*4882a593Smuzhiyun applied_appends.extend(self.collections[mc].get_file_appends(fn)) 1024*4882a593Smuzhiyun 1025*4882a593Smuzhiyun appends_without_recipes[mc] = [] 1026*4882a593Smuzhiyun for _, appendfn in self.collections[mc].bbappends: 1027*4882a593Smuzhiyun if not appendfn in applied_appends: 1028*4882a593Smuzhiyun appends_without_recipes[mc].append(appendfn) 1029*4882a593Smuzhiyun 1030*4882a593Smuzhiyun msgs = [] 1031*4882a593Smuzhiyun for mc in sorted(appends_without_recipes.keys()): 1032*4882a593Smuzhiyun if appends_without_recipes[mc]: 1033*4882a593Smuzhiyun msgs.append('No recipes in %s available for:\n %s' % (mc if mc else 'default', 1034*4882a593Smuzhiyun '\n '.join(appends_without_recipes[mc]))) 1035*4882a593Smuzhiyun 1036*4882a593Smuzhiyun if msgs: 1037*4882a593Smuzhiyun msg = "\n".join(msgs) 1038*4882a593Smuzhiyun warn_only = self.databuilder.mcdata[mc].getVar("BB_DANGLINGAPPENDS_WARNONLY", \ 1039*4882a593Smuzhiyun False) or "no" 1040*4882a593Smuzhiyun if warn_only.lower() in ("1", "yes", "true"): 1041*4882a593Smuzhiyun bb.warn(msg) 1042*4882a593Smuzhiyun else: 1043*4882a593Smuzhiyun bb.fatal(msg) 1044*4882a593Smuzhiyun 1045*4882a593Smuzhiyun def handlePrefProviders(self): 1046*4882a593Smuzhiyun 1047*4882a593Smuzhiyun for mc in self.multiconfigs: 1048*4882a593Smuzhiyun localdata = data.createCopy(self.databuilder.mcdata[mc]) 1049*4882a593Smuzhiyun bb.data.expandKeys(localdata) 1050*4882a593Smuzhiyun 1051*4882a593Smuzhiyun # Handle PREFERRED_PROVIDERS 1052*4882a593Smuzhiyun for p in (localdata.getVar('PREFERRED_PROVIDERS') or "").split(): 1053*4882a593Smuzhiyun try: 1054*4882a593Smuzhiyun (providee, provider) = p.split(':') 1055*4882a593Smuzhiyun except: 1056*4882a593Smuzhiyun providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p) 1057*4882a593Smuzhiyun continue 1058*4882a593Smuzhiyun if providee in self.recipecaches[mc].preferred and self.recipecaches[mc].preferred[providee] != provider: 1059*4882a593Smuzhiyun providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecaches[mc].preferred[providee]) 1060*4882a593Smuzhiyun self.recipecaches[mc].preferred[providee] = provider 1061*4882a593Smuzhiyun 1062*4882a593Smuzhiyun def findConfigFilePath(self, configfile): 1063*4882a593Smuzhiyun """ 1064*4882a593Smuzhiyun Find the location on disk of configfile and if it exists and was parsed by BitBake 1065*4882a593Smuzhiyun emit the ConfigFilePathFound event with the path to the file. 1066*4882a593Smuzhiyun """ 1067*4882a593Smuzhiyun path = bb.cookerdata.findConfigFile(configfile, self.data) 1068*4882a593Smuzhiyun if not path: 1069*4882a593Smuzhiyun return 1070*4882a593Smuzhiyun 1071*4882a593Smuzhiyun # Generate a list of parsed configuration files by searching the files 1072*4882a593Smuzhiyun # listed in the __depends and __base_depends variables with a .conf suffix. 1073*4882a593Smuzhiyun conffiles = [] 1074*4882a593Smuzhiyun dep_files = self.data.getVar('__base_depends', False) or [] 1075*4882a593Smuzhiyun dep_files = dep_files + (self.data.getVar('__depends', False) or []) 1076*4882a593Smuzhiyun 1077*4882a593Smuzhiyun for f in dep_files: 1078*4882a593Smuzhiyun if f[0].endswith(".conf"): 1079*4882a593Smuzhiyun conffiles.append(f[0]) 1080*4882a593Smuzhiyun 1081*4882a593Smuzhiyun _, conf, conffile = path.rpartition("conf/") 1082*4882a593Smuzhiyun match = os.path.join(conf, conffile) 1083*4882a593Smuzhiyun # Try and find matches for conf/conffilename.conf as we don't always 1084*4882a593Smuzhiyun # have the full path to the file. 1085*4882a593Smuzhiyun for cfg in conffiles: 1086*4882a593Smuzhiyun if cfg.endswith(match): 1087*4882a593Smuzhiyun bb.event.fire(bb.event.ConfigFilePathFound(path), 1088*4882a593Smuzhiyun self.data) 1089*4882a593Smuzhiyun break 1090*4882a593Smuzhiyun 1091*4882a593Smuzhiyun def findFilesMatchingInDir(self, filepattern, directory): 1092*4882a593Smuzhiyun """ 1093*4882a593Smuzhiyun Searches for files containing the substring 'filepattern' which are children of 1094*4882a593Smuzhiyun 'directory' in each BBPATH. i.e. to find all rootfs package classes available 1095*4882a593Smuzhiyun to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes') 1096*4882a593Smuzhiyun or to find all machine configuration files one could call: 1097*4882a593Smuzhiyun findFilesMatchingInDir(self, '.conf', 'conf/machine') 1098*4882a593Smuzhiyun """ 1099*4882a593Smuzhiyun 1100*4882a593Smuzhiyun matches = [] 1101*4882a593Smuzhiyun bbpaths = self.data.getVar('BBPATH').split(':') 1102*4882a593Smuzhiyun for path in bbpaths: 1103*4882a593Smuzhiyun dirpath = os.path.join(path, directory) 1104*4882a593Smuzhiyun if os.path.exists(dirpath): 1105*4882a593Smuzhiyun for root, dirs, files in os.walk(dirpath): 1106*4882a593Smuzhiyun for f in files: 1107*4882a593Smuzhiyun if filepattern in f: 1108*4882a593Smuzhiyun matches.append(f) 1109*4882a593Smuzhiyun 1110*4882a593Smuzhiyun if matches: 1111*4882a593Smuzhiyun bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data) 1112*4882a593Smuzhiyun 1113*4882a593Smuzhiyun def testCookerCommandEvent(self, filepattern): 1114*4882a593Smuzhiyun # Dummy command used by OEQA selftest to test tinfoil without IO 1115*4882a593Smuzhiyun matches = ["A", "B"] 1116*4882a593Smuzhiyun bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data) 1117*4882a593Smuzhiyun 1118*4882a593Smuzhiyun def findProviders(self, mc=''): 1119*4882a593Smuzhiyun return bb.providers.findProviders(self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn) 1120*4882a593Smuzhiyun 1121*4882a593Smuzhiyun def findBestProvider(self, pn, mc=''): 1122*4882a593Smuzhiyun if pn in self.recipecaches[mc].providers: 1123*4882a593Smuzhiyun filenames = self.recipecaches[mc].providers[pn] 1124*4882a593Smuzhiyun eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.databuilder.mcdata[mc], self.recipecaches[mc]) 1125*4882a593Smuzhiyun if eligible is not None: 1126*4882a593Smuzhiyun filename = eligible[0] 1127*4882a593Smuzhiyun else: 1128*4882a593Smuzhiyun filename = None 1129*4882a593Smuzhiyun return None, None, None, filename 1130*4882a593Smuzhiyun elif pn in self.recipecaches[mc].pkg_pn: 1131*4882a593Smuzhiyun (latest, latest_f, preferred_ver, preferred_file, required) = bb.providers.findBestProvider(pn, self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn) 1132*4882a593Smuzhiyun if required and preferred_file is None: 1133*4882a593Smuzhiyun return None, None, None, None 1134*4882a593Smuzhiyun return (latest, latest_f, preferred_ver, preferred_file) 1135*4882a593Smuzhiyun else: 1136*4882a593Smuzhiyun return None, None, None, None 1137*4882a593Smuzhiyun 1138*4882a593Smuzhiyun def findConfigFiles(self, varname): 1139*4882a593Smuzhiyun """ 1140*4882a593Smuzhiyun Find config files which are appropriate values for varname. 1141*4882a593Smuzhiyun i.e. MACHINE, DISTRO 1142*4882a593Smuzhiyun """ 1143*4882a593Smuzhiyun possible = [] 1144*4882a593Smuzhiyun var = varname.lower() 1145*4882a593Smuzhiyun 1146*4882a593Smuzhiyun data = self.data 1147*4882a593Smuzhiyun # iterate configs 1148*4882a593Smuzhiyun bbpaths = data.getVar('BBPATH').split(':') 1149*4882a593Smuzhiyun for path in bbpaths: 1150*4882a593Smuzhiyun confpath = os.path.join(path, "conf", var) 1151*4882a593Smuzhiyun if os.path.exists(confpath): 1152*4882a593Smuzhiyun for root, dirs, files in os.walk(confpath): 1153*4882a593Smuzhiyun # get all child files, these are appropriate values 1154*4882a593Smuzhiyun for f in files: 1155*4882a593Smuzhiyun val, sep, end = f.rpartition('.') 1156*4882a593Smuzhiyun if end == 'conf': 1157*4882a593Smuzhiyun possible.append(val) 1158*4882a593Smuzhiyun 1159*4882a593Smuzhiyun if possible: 1160*4882a593Smuzhiyun bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data) 1161*4882a593Smuzhiyun 1162*4882a593Smuzhiyun def findInheritsClass(self, klass): 1163*4882a593Smuzhiyun """ 1164*4882a593Smuzhiyun Find all recipes which inherit the specified class 1165*4882a593Smuzhiyun """ 1166*4882a593Smuzhiyun pkg_list = [] 1167*4882a593Smuzhiyun 1168*4882a593Smuzhiyun for pfn in self.recipecaches[''].pkg_fn: 1169*4882a593Smuzhiyun inherits = self.recipecaches[''].inherits.get(pfn, None) 1170*4882a593Smuzhiyun if inherits and klass in inherits: 1171*4882a593Smuzhiyun pkg_list.append(self.recipecaches[''].pkg_fn[pfn]) 1172*4882a593Smuzhiyun 1173*4882a593Smuzhiyun return pkg_list 1174*4882a593Smuzhiyun 1175*4882a593Smuzhiyun def generateTargetsTree(self, klass=None, pkgs=None): 1176*4882a593Smuzhiyun """ 1177*4882a593Smuzhiyun Generate a dependency tree of buildable targets 1178*4882a593Smuzhiyun Generate an event with the result 1179*4882a593Smuzhiyun """ 1180*4882a593Smuzhiyun # if the caller hasn't specified a pkgs list default to universe 1181*4882a593Smuzhiyun if not pkgs: 1182*4882a593Smuzhiyun pkgs = ['universe'] 1183*4882a593Smuzhiyun # if inherited_class passed ensure all recipes which inherit the 1184*4882a593Smuzhiyun # specified class are included in pkgs 1185*4882a593Smuzhiyun if klass: 1186*4882a593Smuzhiyun extra_pkgs = self.findInheritsClass(klass) 1187*4882a593Smuzhiyun pkgs = pkgs + extra_pkgs 1188*4882a593Smuzhiyun 1189*4882a593Smuzhiyun # generate a dependency tree for all our packages 1190*4882a593Smuzhiyun tree = self.generatePkgDepTreeData(pkgs, 'build') 1191*4882a593Smuzhiyun bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data) 1192*4882a593Smuzhiyun 1193*4882a593Smuzhiyun def interactiveMode( self ): 1194*4882a593Smuzhiyun """Drop off into a shell""" 1195*4882a593Smuzhiyun try: 1196*4882a593Smuzhiyun from bb import shell 1197*4882a593Smuzhiyun except ImportError: 1198*4882a593Smuzhiyun parselog.exception("Interactive mode not available") 1199*4882a593Smuzhiyun raise bb.BBHandledException() 1200*4882a593Smuzhiyun else: 1201*4882a593Smuzhiyun shell.start( self ) 1202*4882a593Smuzhiyun 1203*4882a593Smuzhiyun 1204*4882a593Smuzhiyun def handleCollections(self, collections): 1205*4882a593Smuzhiyun """Handle collections""" 1206*4882a593Smuzhiyun errors = False 1207*4882a593Smuzhiyun self.bbfile_config_priorities = [] 1208*4882a593Smuzhiyun if collections: 1209*4882a593Smuzhiyun collection_priorities = {} 1210*4882a593Smuzhiyun collection_depends = {} 1211*4882a593Smuzhiyun collection_list = collections.split() 1212*4882a593Smuzhiyun min_prio = 0 1213*4882a593Smuzhiyun for c in collection_list: 1214*4882a593Smuzhiyun bb.debug(1,'Processing %s in collection list' % (c)) 1215*4882a593Smuzhiyun 1216*4882a593Smuzhiyun # Get collection priority if defined explicitly 1217*4882a593Smuzhiyun priority = self.data.getVar("BBFILE_PRIORITY_%s" % c) 1218*4882a593Smuzhiyun if priority: 1219*4882a593Smuzhiyun try: 1220*4882a593Smuzhiyun prio = int(priority) 1221*4882a593Smuzhiyun except ValueError: 1222*4882a593Smuzhiyun parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority) 1223*4882a593Smuzhiyun errors = True 1224*4882a593Smuzhiyun if min_prio == 0 or prio < min_prio: 1225*4882a593Smuzhiyun min_prio = prio 1226*4882a593Smuzhiyun collection_priorities[c] = prio 1227*4882a593Smuzhiyun else: 1228*4882a593Smuzhiyun collection_priorities[c] = None 1229*4882a593Smuzhiyun 1230*4882a593Smuzhiyun # Check dependencies and store information for priority calculation 1231*4882a593Smuzhiyun deps = self.data.getVar("LAYERDEPENDS_%s" % c) 1232*4882a593Smuzhiyun if deps: 1233*4882a593Smuzhiyun try: 1234*4882a593Smuzhiyun depDict = bb.utils.explode_dep_versions2(deps) 1235*4882a593Smuzhiyun except bb.utils.VersionStringException as vse: 1236*4882a593Smuzhiyun bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse))) 1237*4882a593Smuzhiyun for dep, oplist in list(depDict.items()): 1238*4882a593Smuzhiyun if dep in collection_list: 1239*4882a593Smuzhiyun for opstr in oplist: 1240*4882a593Smuzhiyun layerver = self.data.getVar("LAYERVERSION_%s" % dep) 1241*4882a593Smuzhiyun (op, depver) = opstr.split() 1242*4882a593Smuzhiyun if layerver: 1243*4882a593Smuzhiyun try: 1244*4882a593Smuzhiyun res = bb.utils.vercmp_string_op(layerver, depver, op) 1245*4882a593Smuzhiyun except bb.utils.VersionStringException as vse: 1246*4882a593Smuzhiyun bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse))) 1247*4882a593Smuzhiyun if not res: 1248*4882a593Smuzhiyun parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver) 1249*4882a593Smuzhiyun errors = True 1250*4882a593Smuzhiyun else: 1251*4882a593Smuzhiyun parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep) 1252*4882a593Smuzhiyun errors = True 1253*4882a593Smuzhiyun else: 1254*4882a593Smuzhiyun parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep) 1255*4882a593Smuzhiyun errors = True 1256*4882a593Smuzhiyun collection_depends[c] = list(depDict.keys()) 1257*4882a593Smuzhiyun else: 1258*4882a593Smuzhiyun collection_depends[c] = [] 1259*4882a593Smuzhiyun 1260*4882a593Smuzhiyun # Check recommends and store information for priority calculation 1261*4882a593Smuzhiyun recs = self.data.getVar("LAYERRECOMMENDS_%s" % c) 1262*4882a593Smuzhiyun if recs: 1263*4882a593Smuzhiyun try: 1264*4882a593Smuzhiyun recDict = bb.utils.explode_dep_versions2(recs) 1265*4882a593Smuzhiyun except bb.utils.VersionStringException as vse: 1266*4882a593Smuzhiyun bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse))) 1267*4882a593Smuzhiyun for rec, oplist in list(recDict.items()): 1268*4882a593Smuzhiyun if rec in collection_list: 1269*4882a593Smuzhiyun if oplist: 1270*4882a593Smuzhiyun opstr = oplist[0] 1271*4882a593Smuzhiyun layerver = self.data.getVar("LAYERVERSION_%s" % rec) 1272*4882a593Smuzhiyun if layerver: 1273*4882a593Smuzhiyun (op, recver) = opstr.split() 1274*4882a593Smuzhiyun try: 1275*4882a593Smuzhiyun res = bb.utils.vercmp_string_op(layerver, recver, op) 1276*4882a593Smuzhiyun except bb.utils.VersionStringException as vse: 1277*4882a593Smuzhiyun bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse))) 1278*4882a593Smuzhiyun if not res: 1279*4882a593Smuzhiyun parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver) 1280*4882a593Smuzhiyun continue 1281*4882a593Smuzhiyun else: 1282*4882a593Smuzhiyun parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec) 1283*4882a593Smuzhiyun continue 1284*4882a593Smuzhiyun parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec) 1285*4882a593Smuzhiyun collection_depends[c].append(rec) 1286*4882a593Smuzhiyun else: 1287*4882a593Smuzhiyun parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec) 1288*4882a593Smuzhiyun 1289*4882a593Smuzhiyun # Recursively work out collection priorities based on dependencies 1290*4882a593Smuzhiyun def calc_layer_priority(collection): 1291*4882a593Smuzhiyun if not collection_priorities[collection]: 1292*4882a593Smuzhiyun max_depprio = min_prio 1293*4882a593Smuzhiyun for dep in collection_depends[collection]: 1294*4882a593Smuzhiyun calc_layer_priority(dep) 1295*4882a593Smuzhiyun depprio = collection_priorities[dep] 1296*4882a593Smuzhiyun if depprio > max_depprio: 1297*4882a593Smuzhiyun max_depprio = depprio 1298*4882a593Smuzhiyun max_depprio += 1 1299*4882a593Smuzhiyun parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio) 1300*4882a593Smuzhiyun collection_priorities[collection] = max_depprio 1301*4882a593Smuzhiyun 1302*4882a593Smuzhiyun # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities 1303*4882a593Smuzhiyun for c in collection_list: 1304*4882a593Smuzhiyun calc_layer_priority(c) 1305*4882a593Smuzhiyun regex = self.data.getVar("BBFILE_PATTERN_%s" % c) 1306*4882a593Smuzhiyun if regex is None: 1307*4882a593Smuzhiyun parselog.error("BBFILE_PATTERN_%s not defined" % c) 1308*4882a593Smuzhiyun errors = True 1309*4882a593Smuzhiyun continue 1310*4882a593Smuzhiyun elif regex == "": 1311*4882a593Smuzhiyun parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c) 1312*4882a593Smuzhiyun cre = re.compile('^NULL$') 1313*4882a593Smuzhiyun errors = False 1314*4882a593Smuzhiyun else: 1315*4882a593Smuzhiyun try: 1316*4882a593Smuzhiyun cre = re.compile(regex) 1317*4882a593Smuzhiyun except re.error: 1318*4882a593Smuzhiyun parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex) 1319*4882a593Smuzhiyun errors = True 1320*4882a593Smuzhiyun continue 1321*4882a593Smuzhiyun self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c])) 1322*4882a593Smuzhiyun if errors: 1323*4882a593Smuzhiyun # We've already printed the actual error(s) 1324*4882a593Smuzhiyun raise CollectionError("Errors during parsing layer configuration") 1325*4882a593Smuzhiyun 1326*4882a593Smuzhiyun def buildSetVars(self): 1327*4882a593Smuzhiyun """ 1328*4882a593Smuzhiyun Setup any variables needed before starting a build 1329*4882a593Smuzhiyun """ 1330*4882a593Smuzhiyun t = time.gmtime() 1331*4882a593Smuzhiyun for mc in self.databuilder.mcdata: 1332*4882a593Smuzhiyun ds = self.databuilder.mcdata[mc] 1333*4882a593Smuzhiyun if not ds.getVar("BUILDNAME", False): 1334*4882a593Smuzhiyun ds.setVar("BUILDNAME", "${DATE}${TIME}") 1335*4882a593Smuzhiyun ds.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t)) 1336*4882a593Smuzhiyun ds.setVar("DATE", time.strftime('%Y%m%d', t)) 1337*4882a593Smuzhiyun ds.setVar("TIME", time.strftime('%H%M%S', t)) 1338*4882a593Smuzhiyun 1339*4882a593Smuzhiyun def reset_mtime_caches(self): 1340*4882a593Smuzhiyun """ 1341*4882a593Smuzhiyun Reset mtime caches - this is particularly important when memory resident as something 1342*4882a593Smuzhiyun which is cached is not unlikely to have changed since the last invocation (e.g. a 1343*4882a593Smuzhiyun file associated with a recipe might have been modified by the user). 1344*4882a593Smuzhiyun """ 1345*4882a593Smuzhiyun build.reset_cache() 1346*4882a593Smuzhiyun bb.fetch._checksum_cache.mtime_cache.clear() 1347*4882a593Smuzhiyun siggen_cache = getattr(bb.parse.siggen, 'checksum_cache', None) 1348*4882a593Smuzhiyun if siggen_cache: 1349*4882a593Smuzhiyun bb.parse.siggen.checksum_cache.mtime_cache.clear() 1350*4882a593Smuzhiyun 1351*4882a593Smuzhiyun def matchFiles(self, bf, mc=''): 1352*4882a593Smuzhiyun """ 1353*4882a593Smuzhiyun Find the .bb files which match the expression in 'buildfile'. 1354*4882a593Smuzhiyun """ 1355*4882a593Smuzhiyun if bf.startswith("/") or bf.startswith("../"): 1356*4882a593Smuzhiyun bf = os.path.abspath(bf) 1357*4882a593Smuzhiyun 1358*4882a593Smuzhiyun self.collections = {mc: CookerCollectFiles(self.bbfile_config_priorities, mc)} 1359*4882a593Smuzhiyun filelist, masked, searchdirs = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc]) 1360*4882a593Smuzhiyun try: 1361*4882a593Smuzhiyun os.stat(bf) 1362*4882a593Smuzhiyun bf = os.path.abspath(bf) 1363*4882a593Smuzhiyun return [bf] 1364*4882a593Smuzhiyun except OSError: 1365*4882a593Smuzhiyun regexp = re.compile(bf) 1366*4882a593Smuzhiyun matches = [] 1367*4882a593Smuzhiyun for f in filelist: 1368*4882a593Smuzhiyun if regexp.search(f) and os.path.isfile(f): 1369*4882a593Smuzhiyun matches.append(f) 1370*4882a593Smuzhiyun return matches 1371*4882a593Smuzhiyun 1372*4882a593Smuzhiyun def matchFile(self, buildfile, mc=''): 1373*4882a593Smuzhiyun """ 1374*4882a593Smuzhiyun Find the .bb file which matches the expression in 'buildfile'. 1375*4882a593Smuzhiyun Raise an error if multiple files 1376*4882a593Smuzhiyun """ 1377*4882a593Smuzhiyun matches = self.matchFiles(buildfile, mc) 1378*4882a593Smuzhiyun if len(matches) != 1: 1379*4882a593Smuzhiyun if matches: 1380*4882a593Smuzhiyun msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches)) 1381*4882a593Smuzhiyun if matches: 1382*4882a593Smuzhiyun for f in matches: 1383*4882a593Smuzhiyun msg += "\n %s" % f 1384*4882a593Smuzhiyun parselog.error(msg) 1385*4882a593Smuzhiyun else: 1386*4882a593Smuzhiyun parselog.error("Unable to find any recipe file matching '%s'" % buildfile) 1387*4882a593Smuzhiyun raise NoSpecificMatch 1388*4882a593Smuzhiyun return matches[0] 1389*4882a593Smuzhiyun 1390*4882a593Smuzhiyun def buildFile(self, buildfile, task): 1391*4882a593Smuzhiyun """ 1392*4882a593Smuzhiyun Build the file matching regexp buildfile 1393*4882a593Smuzhiyun """ 1394*4882a593Smuzhiyun bb.event.fire(bb.event.BuildInit(), self.data) 1395*4882a593Smuzhiyun 1396*4882a593Smuzhiyun # Too many people use -b because they think it's how you normally 1397*4882a593Smuzhiyun # specify a target to be built, so show a warning 1398*4882a593Smuzhiyun bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.") 1399*4882a593Smuzhiyun 1400*4882a593Smuzhiyun self.buildFileInternal(buildfile, task) 1401*4882a593Smuzhiyun 1402*4882a593Smuzhiyun def buildFileInternal(self, buildfile, task, fireevents=True, quietlog=False): 1403*4882a593Smuzhiyun """ 1404*4882a593Smuzhiyun Build the file matching regexp buildfile 1405*4882a593Smuzhiyun """ 1406*4882a593Smuzhiyun 1407*4882a593Smuzhiyun # Parse the configuration here. We need to do it explicitly here since 1408*4882a593Smuzhiyun # buildFile() doesn't use the cache 1409*4882a593Smuzhiyun self.parseConfiguration() 1410*4882a593Smuzhiyun 1411*4882a593Smuzhiyun # If we are told to do the None task then query the default task 1412*4882a593Smuzhiyun if task is None: 1413*4882a593Smuzhiyun task = self.configuration.cmd 1414*4882a593Smuzhiyun if not task.startswith("do_"): 1415*4882a593Smuzhiyun task = "do_%s" % task 1416*4882a593Smuzhiyun 1417*4882a593Smuzhiyun fn, cls, mc = bb.cache.virtualfn2realfn(buildfile) 1418*4882a593Smuzhiyun fn = self.matchFile(fn, mc) 1419*4882a593Smuzhiyun 1420*4882a593Smuzhiyun self.buildSetVars() 1421*4882a593Smuzhiyun self.reset_mtime_caches() 1422*4882a593Smuzhiyun 1423*4882a593Smuzhiyun bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array) 1424*4882a593Smuzhiyun 1425*4882a593Smuzhiyun infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn)) 1426*4882a593Smuzhiyun infos = dict(infos) 1427*4882a593Smuzhiyun 1428*4882a593Smuzhiyun fn = bb.cache.realfn2virtual(fn, cls, mc) 1429*4882a593Smuzhiyun try: 1430*4882a593Smuzhiyun info_array = infos[fn] 1431*4882a593Smuzhiyun except KeyError: 1432*4882a593Smuzhiyun bb.fatal("%s does not exist" % fn) 1433*4882a593Smuzhiyun 1434*4882a593Smuzhiyun if info_array[0].skipped: 1435*4882a593Smuzhiyun bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason)) 1436*4882a593Smuzhiyun 1437*4882a593Smuzhiyun self.recipecaches[mc].add_from_recipeinfo(fn, info_array) 1438*4882a593Smuzhiyun 1439*4882a593Smuzhiyun # Tweak some variables 1440*4882a593Smuzhiyun item = info_array[0].pn 1441*4882a593Smuzhiyun self.recipecaches[mc].ignored_dependencies = set() 1442*4882a593Smuzhiyun self.recipecaches[mc].bbfile_priority[fn] = 1 1443*4882a593Smuzhiyun self.configuration.limited_deps = True 1444*4882a593Smuzhiyun 1445*4882a593Smuzhiyun # Remove external dependencies 1446*4882a593Smuzhiyun self.recipecaches[mc].task_deps[fn]['depends'] = {} 1447*4882a593Smuzhiyun self.recipecaches[mc].deps[fn] = [] 1448*4882a593Smuzhiyun self.recipecaches[mc].rundeps[fn] = defaultdict(list) 1449*4882a593Smuzhiyun self.recipecaches[mc].runrecs[fn] = defaultdict(list) 1450*4882a593Smuzhiyun 1451*4882a593Smuzhiyun # Invalidate task for target if force mode active 1452*4882a593Smuzhiyun if self.configuration.force: 1453*4882a593Smuzhiyun logger.verbose("Invalidate task %s, %s", task, fn) 1454*4882a593Smuzhiyun bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn) 1455*4882a593Smuzhiyun 1456*4882a593Smuzhiyun # Setup taskdata structure 1457*4882a593Smuzhiyun taskdata = {} 1458*4882a593Smuzhiyun taskdata[mc] = bb.taskdata.TaskData(self.configuration.halt) 1459*4882a593Smuzhiyun taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item) 1460*4882a593Smuzhiyun 1461*4882a593Smuzhiyun if quietlog: 1462*4882a593Smuzhiyun rqloglevel = bb.runqueue.logger.getEffectiveLevel() 1463*4882a593Smuzhiyun bb.runqueue.logger.setLevel(logging.WARNING) 1464*4882a593Smuzhiyun 1465*4882a593Smuzhiyun buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME") 1466*4882a593Smuzhiyun if fireevents: 1467*4882a593Smuzhiyun bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc]) 1468*4882a593Smuzhiyun 1469*4882a593Smuzhiyun # Execute the runqueue 1470*4882a593Smuzhiyun runlist = [[mc, item, task, fn]] 1471*4882a593Smuzhiyun 1472*4882a593Smuzhiyun rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist) 1473*4882a593Smuzhiyun 1474*4882a593Smuzhiyun def buildFileIdle(server, rq, halt): 1475*4882a593Smuzhiyun 1476*4882a593Smuzhiyun msg = None 1477*4882a593Smuzhiyun interrupted = 0 1478*4882a593Smuzhiyun if halt or self.state == state.forceshutdown: 1479*4882a593Smuzhiyun rq.finish_runqueue(True) 1480*4882a593Smuzhiyun msg = "Forced shutdown" 1481*4882a593Smuzhiyun interrupted = 2 1482*4882a593Smuzhiyun elif self.state == state.shutdown: 1483*4882a593Smuzhiyun rq.finish_runqueue(False) 1484*4882a593Smuzhiyun msg = "Stopped build" 1485*4882a593Smuzhiyun interrupted = 1 1486*4882a593Smuzhiyun failures = 0 1487*4882a593Smuzhiyun try: 1488*4882a593Smuzhiyun retval = rq.execute_runqueue() 1489*4882a593Smuzhiyun except runqueue.TaskFailure as exc: 1490*4882a593Smuzhiyun failures += len(exc.args) 1491*4882a593Smuzhiyun retval = False 1492*4882a593Smuzhiyun except SystemExit as exc: 1493*4882a593Smuzhiyun self.command.finishAsyncCommand(str(exc)) 1494*4882a593Smuzhiyun if quietlog: 1495*4882a593Smuzhiyun bb.runqueue.logger.setLevel(rqloglevel) 1496*4882a593Smuzhiyun return False 1497*4882a593Smuzhiyun 1498*4882a593Smuzhiyun if not retval: 1499*4882a593Smuzhiyun if fireevents: 1500*4882a593Smuzhiyun bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc]) 1501*4882a593Smuzhiyun self.command.finishAsyncCommand(msg) 1502*4882a593Smuzhiyun # We trashed self.recipecaches above 1503*4882a593Smuzhiyun self.parsecache_valid = False 1504*4882a593Smuzhiyun self.configuration.limited_deps = False 1505*4882a593Smuzhiyun bb.parse.siggen.reset(self.data) 1506*4882a593Smuzhiyun if quietlog: 1507*4882a593Smuzhiyun bb.runqueue.logger.setLevel(rqloglevel) 1508*4882a593Smuzhiyun return False 1509*4882a593Smuzhiyun if retval is True: 1510*4882a593Smuzhiyun return True 1511*4882a593Smuzhiyun return retval 1512*4882a593Smuzhiyun 1513*4882a593Smuzhiyun self.idleCallBackRegister(buildFileIdle, rq) 1514*4882a593Smuzhiyun 1515*4882a593Smuzhiyun def buildTargets(self, targets, task): 1516*4882a593Smuzhiyun """ 1517*4882a593Smuzhiyun Attempt to build the targets specified 1518*4882a593Smuzhiyun """ 1519*4882a593Smuzhiyun 1520*4882a593Smuzhiyun def buildTargetsIdle(server, rq, halt): 1521*4882a593Smuzhiyun msg = None 1522*4882a593Smuzhiyun interrupted = 0 1523*4882a593Smuzhiyun if halt or self.state == state.forceshutdown: 1524*4882a593Smuzhiyun rq.finish_runqueue(True) 1525*4882a593Smuzhiyun msg = "Forced shutdown" 1526*4882a593Smuzhiyun interrupted = 2 1527*4882a593Smuzhiyun elif self.state == state.shutdown: 1528*4882a593Smuzhiyun rq.finish_runqueue(False) 1529*4882a593Smuzhiyun msg = "Stopped build" 1530*4882a593Smuzhiyun interrupted = 1 1531*4882a593Smuzhiyun failures = 0 1532*4882a593Smuzhiyun try: 1533*4882a593Smuzhiyun retval = rq.execute_runqueue() 1534*4882a593Smuzhiyun except runqueue.TaskFailure as exc: 1535*4882a593Smuzhiyun failures += len(exc.args) 1536*4882a593Smuzhiyun retval = False 1537*4882a593Smuzhiyun except SystemExit as exc: 1538*4882a593Smuzhiyun self.command.finishAsyncCommand(str(exc)) 1539*4882a593Smuzhiyun return False 1540*4882a593Smuzhiyun 1541*4882a593Smuzhiyun if not retval: 1542*4882a593Smuzhiyun try: 1543*4882a593Smuzhiyun for mc in self.multiconfigs: 1544*4882a593Smuzhiyun bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc]) 1545*4882a593Smuzhiyun finally: 1546*4882a593Smuzhiyun self.command.finishAsyncCommand(msg) 1547*4882a593Smuzhiyun return False 1548*4882a593Smuzhiyun if retval is True: 1549*4882a593Smuzhiyun return True 1550*4882a593Smuzhiyun return retval 1551*4882a593Smuzhiyun 1552*4882a593Smuzhiyun self.reset_mtime_caches() 1553*4882a593Smuzhiyun self.buildSetVars() 1554*4882a593Smuzhiyun 1555*4882a593Smuzhiyun # If we are told to do the None task then query the default task 1556*4882a593Smuzhiyun if task is None: 1557*4882a593Smuzhiyun task = self.configuration.cmd 1558*4882a593Smuzhiyun 1559*4882a593Smuzhiyun if not task.startswith("do_"): 1560*4882a593Smuzhiyun task = "do_%s" % task 1561*4882a593Smuzhiyun 1562*4882a593Smuzhiyun packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets] 1563*4882a593Smuzhiyun 1564*4882a593Smuzhiyun bb.event.fire(bb.event.BuildInit(packages), self.data) 1565*4882a593Smuzhiyun 1566*4882a593Smuzhiyun taskdata, runlist = self.buildTaskData(targets, task, self.configuration.halt) 1567*4882a593Smuzhiyun 1568*4882a593Smuzhiyun buildname = self.data.getVar("BUILDNAME", False) 1569*4882a593Smuzhiyun 1570*4882a593Smuzhiyun # make targets to always look as <target>:do_<task> 1571*4882a593Smuzhiyun ntargets = [] 1572*4882a593Smuzhiyun for target in runlist: 1573*4882a593Smuzhiyun if target[0]: 1574*4882a593Smuzhiyun ntargets.append("mc:%s:%s:%s" % (target[0], target[1], target[2])) 1575*4882a593Smuzhiyun ntargets.append("%s:%s" % (target[1], target[2])) 1576*4882a593Smuzhiyun 1577*4882a593Smuzhiyun for mc in self.multiconfigs: 1578*4882a593Smuzhiyun bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc]) 1579*4882a593Smuzhiyun 1580*4882a593Smuzhiyun rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist) 1581*4882a593Smuzhiyun if 'universe' in targets: 1582*4882a593Smuzhiyun rq.rqdata.warn_multi_bb = True 1583*4882a593Smuzhiyun 1584*4882a593Smuzhiyun self.idleCallBackRegister(buildTargetsIdle, rq) 1585*4882a593Smuzhiyun 1586*4882a593Smuzhiyun 1587*4882a593Smuzhiyun def getAllKeysWithFlags(self, flaglist): 1588*4882a593Smuzhiyun dump = {} 1589*4882a593Smuzhiyun for k in self.data.keys(): 1590*4882a593Smuzhiyun try: 1591*4882a593Smuzhiyun expand = True 1592*4882a593Smuzhiyun flags = self.data.getVarFlags(k) 1593*4882a593Smuzhiyun if flags and "func" in flags and "python" in flags: 1594*4882a593Smuzhiyun expand = False 1595*4882a593Smuzhiyun v = self.data.getVar(k, expand) 1596*4882a593Smuzhiyun if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart): 1597*4882a593Smuzhiyun dump[k] = { 1598*4882a593Smuzhiyun 'v' : str(v) , 1599*4882a593Smuzhiyun 'history' : self.data.varhistory.variable(k), 1600*4882a593Smuzhiyun } 1601*4882a593Smuzhiyun for d in flaglist: 1602*4882a593Smuzhiyun if flags and d in flags: 1603*4882a593Smuzhiyun dump[k][d] = flags[d] 1604*4882a593Smuzhiyun else: 1605*4882a593Smuzhiyun dump[k][d] = None 1606*4882a593Smuzhiyun except Exception as e: 1607*4882a593Smuzhiyun print(e) 1608*4882a593Smuzhiyun return dump 1609*4882a593Smuzhiyun 1610*4882a593Smuzhiyun 1611*4882a593Smuzhiyun def updateCacheSync(self): 1612*4882a593Smuzhiyun if self.state == state.running: 1613*4882a593Smuzhiyun return 1614*4882a593Smuzhiyun 1615*4882a593Smuzhiyun # reload files for which we got notifications 1616*4882a593Smuzhiyun for p in self.inotify_modified_files: 1617*4882a593Smuzhiyun bb.parse.update_cache(p) 1618*4882a593Smuzhiyun if p in bb.parse.BBHandler.cached_statements: 1619*4882a593Smuzhiyun del bb.parse.BBHandler.cached_statements[p] 1620*4882a593Smuzhiyun self.inotify_modified_files = [] 1621*4882a593Smuzhiyun 1622*4882a593Smuzhiyun if not self.baseconfig_valid: 1623*4882a593Smuzhiyun logger.debug("Reloading base configuration data") 1624*4882a593Smuzhiyun self.initConfigurationData() 1625*4882a593Smuzhiyun self.handlePRServ() 1626*4882a593Smuzhiyun 1627*4882a593Smuzhiyun # This is called for all async commands when self.state != running 1628*4882a593Smuzhiyun def updateCache(self): 1629*4882a593Smuzhiyun if self.state == state.running: 1630*4882a593Smuzhiyun return 1631*4882a593Smuzhiyun 1632*4882a593Smuzhiyun if self.state in (state.shutdown, state.forceshutdown, state.error): 1633*4882a593Smuzhiyun if hasattr(self.parser, 'shutdown'): 1634*4882a593Smuzhiyun self.parser.shutdown(clean=False) 1635*4882a593Smuzhiyun self.parser.final_cleanup() 1636*4882a593Smuzhiyun raise bb.BBHandledException() 1637*4882a593Smuzhiyun 1638*4882a593Smuzhiyun if self.state != state.parsing: 1639*4882a593Smuzhiyun self.updateCacheSync() 1640*4882a593Smuzhiyun 1641*4882a593Smuzhiyun if self.state != state.parsing and not self.parsecache_valid: 1642*4882a593Smuzhiyun self.setupParserWatcher() 1643*4882a593Smuzhiyun 1644*4882a593Smuzhiyun bb.parse.siggen.reset(self.data) 1645*4882a593Smuzhiyun self.parseConfiguration () 1646*4882a593Smuzhiyun if CookerFeatures.SEND_SANITYEVENTS in self.featureset: 1647*4882a593Smuzhiyun for mc in self.multiconfigs: 1648*4882a593Smuzhiyun bb.event.fire(bb.event.SanityCheck(False), self.databuilder.mcdata[mc]) 1649*4882a593Smuzhiyun 1650*4882a593Smuzhiyun for mc in self.multiconfigs: 1651*4882a593Smuzhiyun ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED") or "" 1652*4882a593Smuzhiyun self.recipecaches[mc].ignored_dependencies = set(ignore.split()) 1653*4882a593Smuzhiyun 1654*4882a593Smuzhiyun for dep in self.configuration.extra_assume_provided: 1655*4882a593Smuzhiyun self.recipecaches[mc].ignored_dependencies.add(dep) 1656*4882a593Smuzhiyun 1657*4882a593Smuzhiyun self.collections = {} 1658*4882a593Smuzhiyun 1659*4882a593Smuzhiyun mcfilelist = {} 1660*4882a593Smuzhiyun total_masked = 0 1661*4882a593Smuzhiyun searchdirs = set() 1662*4882a593Smuzhiyun for mc in self.multiconfigs: 1663*4882a593Smuzhiyun self.collections[mc] = CookerCollectFiles(self.bbfile_config_priorities, mc) 1664*4882a593Smuzhiyun (filelist, masked, search) = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc]) 1665*4882a593Smuzhiyun 1666*4882a593Smuzhiyun mcfilelist[mc] = filelist 1667*4882a593Smuzhiyun total_masked += masked 1668*4882a593Smuzhiyun searchdirs |= set(search) 1669*4882a593Smuzhiyun 1670*4882a593Smuzhiyun # Add inotify watches for directories searched for bb/bbappend files 1671*4882a593Smuzhiyun for dirent in searchdirs: 1672*4882a593Smuzhiyun self.add_filewatch([[dirent]], dirs=True) 1673*4882a593Smuzhiyun 1674*4882a593Smuzhiyun self.parser = CookerParser(self, mcfilelist, total_masked) 1675*4882a593Smuzhiyun self.parsecache_valid = True 1676*4882a593Smuzhiyun 1677*4882a593Smuzhiyun self.state = state.parsing 1678*4882a593Smuzhiyun 1679*4882a593Smuzhiyun if not self.parser.parse_next(): 1680*4882a593Smuzhiyun collectlog.debug(1, "parsing complete") 1681*4882a593Smuzhiyun if self.parser.error: 1682*4882a593Smuzhiyun raise bb.BBHandledException() 1683*4882a593Smuzhiyun self.show_appends_with_no_recipes() 1684*4882a593Smuzhiyun self.handlePrefProviders() 1685*4882a593Smuzhiyun for mc in self.multiconfigs: 1686*4882a593Smuzhiyun self.recipecaches[mc].bbfile_priority = self.collections[mc].collection_priorities(self.recipecaches[mc].pkg_fn, self.parser.mcfilelist[mc], self.data) 1687*4882a593Smuzhiyun self.state = state.running 1688*4882a593Smuzhiyun 1689*4882a593Smuzhiyun # Send an event listing all stamps reachable after parsing 1690*4882a593Smuzhiyun # which the metadata may use to clean up stale data 1691*4882a593Smuzhiyun for mc in self.multiconfigs: 1692*4882a593Smuzhiyun event = bb.event.ReachableStamps(self.recipecaches[mc].stamp) 1693*4882a593Smuzhiyun bb.event.fire(event, self.databuilder.mcdata[mc]) 1694*4882a593Smuzhiyun return None 1695*4882a593Smuzhiyun 1696*4882a593Smuzhiyun return True 1697*4882a593Smuzhiyun 1698*4882a593Smuzhiyun def checkPackages(self, pkgs_to_build, task=None): 1699*4882a593Smuzhiyun 1700*4882a593Smuzhiyun # Return a copy, don't modify the original 1701*4882a593Smuzhiyun pkgs_to_build = pkgs_to_build[:] 1702*4882a593Smuzhiyun 1703*4882a593Smuzhiyun if not pkgs_to_build: 1704*4882a593Smuzhiyun raise NothingToBuild 1705*4882a593Smuzhiyun 1706*4882a593Smuzhiyun ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split() 1707*4882a593Smuzhiyun for pkg in pkgs_to_build.copy(): 1708*4882a593Smuzhiyun if pkg in ignore: 1709*4882a593Smuzhiyun parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg) 1710*4882a593Smuzhiyun if pkg.startswith("multiconfig:"): 1711*4882a593Smuzhiyun pkgs_to_build.remove(pkg) 1712*4882a593Smuzhiyun pkgs_to_build.append(pkg.replace("multiconfig:", "mc:")) 1713*4882a593Smuzhiyun 1714*4882a593Smuzhiyun if 'world' in pkgs_to_build: 1715*4882a593Smuzhiyun pkgs_to_build.remove('world') 1716*4882a593Smuzhiyun for mc in self.multiconfigs: 1717*4882a593Smuzhiyun bb.providers.buildWorldTargetList(self.recipecaches[mc], task) 1718*4882a593Smuzhiyun for t in self.recipecaches[mc].world_target: 1719*4882a593Smuzhiyun if mc: 1720*4882a593Smuzhiyun t = "mc:" + mc + ":" + t 1721*4882a593Smuzhiyun pkgs_to_build.append(t) 1722*4882a593Smuzhiyun 1723*4882a593Smuzhiyun if 'universe' in pkgs_to_build: 1724*4882a593Smuzhiyun parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.") 1725*4882a593Smuzhiyun parselog.debug(1, "collating packages for \"universe\"") 1726*4882a593Smuzhiyun pkgs_to_build.remove('universe') 1727*4882a593Smuzhiyun for mc in self.multiconfigs: 1728*4882a593Smuzhiyun for t in self.recipecaches[mc].universe_target: 1729*4882a593Smuzhiyun if task: 1730*4882a593Smuzhiyun foundtask = False 1731*4882a593Smuzhiyun for provider_fn in self.recipecaches[mc].providers[t]: 1732*4882a593Smuzhiyun if task in self.recipecaches[mc].task_deps[provider_fn]['tasks']: 1733*4882a593Smuzhiyun foundtask = True 1734*4882a593Smuzhiyun break 1735*4882a593Smuzhiyun if not foundtask: 1736*4882a593Smuzhiyun bb.debug(1, "Skipping %s for universe tasks as task %s doesn't exist" % (t, task)) 1737*4882a593Smuzhiyun continue 1738*4882a593Smuzhiyun if mc: 1739*4882a593Smuzhiyun t = "mc:" + mc + ":" + t 1740*4882a593Smuzhiyun pkgs_to_build.append(t) 1741*4882a593Smuzhiyun 1742*4882a593Smuzhiyun return pkgs_to_build 1743*4882a593Smuzhiyun 1744*4882a593Smuzhiyun def pre_serve(self): 1745*4882a593Smuzhiyun return 1746*4882a593Smuzhiyun 1747*4882a593Smuzhiyun def post_serve(self): 1748*4882a593Smuzhiyun self.shutdown(force=True) 1749*4882a593Smuzhiyun prserv.serv.auto_shutdown() 1750*4882a593Smuzhiyun if hasattr(bb.parse, "siggen"): 1751*4882a593Smuzhiyun bb.parse.siggen.exit() 1752*4882a593Smuzhiyun if self.hashserv: 1753*4882a593Smuzhiyun self.hashserv.process.terminate() 1754*4882a593Smuzhiyun self.hashserv.process.join() 1755*4882a593Smuzhiyun if hasattr(self, "data"): 1756*4882a593Smuzhiyun bb.event.fire(CookerExit(), self.data) 1757*4882a593Smuzhiyun 1758*4882a593Smuzhiyun def shutdown(self, force = False): 1759*4882a593Smuzhiyun if force: 1760*4882a593Smuzhiyun self.state = state.forceshutdown 1761*4882a593Smuzhiyun else: 1762*4882a593Smuzhiyun self.state = state.shutdown 1763*4882a593Smuzhiyun 1764*4882a593Smuzhiyun if self.parser: 1765*4882a593Smuzhiyun self.parser.shutdown(clean=not force) 1766*4882a593Smuzhiyun self.parser.final_cleanup() 1767*4882a593Smuzhiyun 1768*4882a593Smuzhiyun def finishcommand(self): 1769*4882a593Smuzhiyun self.state = state.initial 1770*4882a593Smuzhiyun 1771*4882a593Smuzhiyun def reset(self): 1772*4882a593Smuzhiyun if hasattr(bb.parse, "siggen"): 1773*4882a593Smuzhiyun bb.parse.siggen.exit() 1774*4882a593Smuzhiyun self.initConfigurationData() 1775*4882a593Smuzhiyun self.handlePRServ() 1776*4882a593Smuzhiyun 1777*4882a593Smuzhiyun def clientComplete(self): 1778*4882a593Smuzhiyun """Called when the client is done using the server""" 1779*4882a593Smuzhiyun self.finishcommand() 1780*4882a593Smuzhiyun self.extraconfigdata = {} 1781*4882a593Smuzhiyun self.command.reset() 1782*4882a593Smuzhiyun if hasattr(self, "data"): 1783*4882a593Smuzhiyun self.databuilder.reset() 1784*4882a593Smuzhiyun self.data = self.databuilder.data 1785*4882a593Smuzhiyun self.parsecache_valid = False 1786*4882a593Smuzhiyun self.baseconfig_valid = False 1787*4882a593Smuzhiyun 1788*4882a593Smuzhiyun 1789*4882a593Smuzhiyunclass CookerExit(bb.event.Event): 1790*4882a593Smuzhiyun """ 1791*4882a593Smuzhiyun Notify clients of the Cooker shutdown 1792*4882a593Smuzhiyun """ 1793*4882a593Smuzhiyun 1794*4882a593Smuzhiyun def __init__(self): 1795*4882a593Smuzhiyun bb.event.Event.__init__(self) 1796*4882a593Smuzhiyun 1797*4882a593Smuzhiyun 1798*4882a593Smuzhiyunclass CookerCollectFiles(object): 1799*4882a593Smuzhiyun def __init__(self, priorities, mc=''): 1800*4882a593Smuzhiyun self.mc = mc 1801*4882a593Smuzhiyun self.bbappends = [] 1802*4882a593Smuzhiyun # Priorities is a list of tuples, with the second element as the pattern. 1803*4882a593Smuzhiyun # We need to sort the list with the longest pattern first, and so on to 1804*4882a593Smuzhiyun # the shortest. This allows nested layers to be properly evaluated. 1805*4882a593Smuzhiyun self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True) 1806*4882a593Smuzhiyun 1807*4882a593Smuzhiyun def calc_bbfile_priority(self, filename): 1808*4882a593Smuzhiyun for _, _, regex, pri in self.bbfile_config_priorities: 1809*4882a593Smuzhiyun if regex.match(filename): 1810*4882a593Smuzhiyun return pri, regex 1811*4882a593Smuzhiyun return 0, None 1812*4882a593Smuzhiyun 1813*4882a593Smuzhiyun def get_bbfiles(self): 1814*4882a593Smuzhiyun """Get list of default .bb files by reading out the current directory""" 1815*4882a593Smuzhiyun path = os.getcwd() 1816*4882a593Smuzhiyun contents = os.listdir(path) 1817*4882a593Smuzhiyun bbfiles = [] 1818*4882a593Smuzhiyun for f in contents: 1819*4882a593Smuzhiyun if f.endswith(".bb"): 1820*4882a593Smuzhiyun bbfiles.append(os.path.abspath(os.path.join(path, f))) 1821*4882a593Smuzhiyun return bbfiles 1822*4882a593Smuzhiyun 1823*4882a593Smuzhiyun def find_bbfiles(self, path): 1824*4882a593Smuzhiyun """Find all the .bb and .bbappend files in a directory""" 1825*4882a593Smuzhiyun found = [] 1826*4882a593Smuzhiyun for dir, dirs, files in os.walk(path): 1827*4882a593Smuzhiyun for ignored in ('SCCS', 'CVS', '.svn'): 1828*4882a593Smuzhiyun if ignored in dirs: 1829*4882a593Smuzhiyun dirs.remove(ignored) 1830*4882a593Smuzhiyun found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))] 1831*4882a593Smuzhiyun 1832*4882a593Smuzhiyun return found 1833*4882a593Smuzhiyun 1834*4882a593Smuzhiyun def collect_bbfiles(self, config, eventdata): 1835*4882a593Smuzhiyun """Collect all available .bb build files""" 1836*4882a593Smuzhiyun masked = 0 1837*4882a593Smuzhiyun 1838*4882a593Smuzhiyun collectlog.debug(1, "collecting .bb files") 1839*4882a593Smuzhiyun 1840*4882a593Smuzhiyun files = (config.getVar( "BBFILES") or "").split() 1841*4882a593Smuzhiyun 1842*4882a593Smuzhiyun # Sort files by priority 1843*4882a593Smuzhiyun files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem)[0] ) 1844*4882a593Smuzhiyun config.setVar("BBFILES_PRIORITIZED", " ".join(files)) 1845*4882a593Smuzhiyun 1846*4882a593Smuzhiyun if not files: 1847*4882a593Smuzhiyun files = self.get_bbfiles() 1848*4882a593Smuzhiyun 1849*4882a593Smuzhiyun if not files: 1850*4882a593Smuzhiyun collectlog.error("no recipe files to build, check your BBPATH and BBFILES?") 1851*4882a593Smuzhiyun bb.event.fire(CookerExit(), eventdata) 1852*4882a593Smuzhiyun 1853*4882a593Smuzhiyun # We need to track where we look so that we can add inotify watches. There 1854*4882a593Smuzhiyun # is no nice way to do this, this is horrid. We intercept the os.listdir() 1855*4882a593Smuzhiyun # (or os.scandir() for python 3.6+) calls while we run glob(). 1856*4882a593Smuzhiyun origlistdir = os.listdir 1857*4882a593Smuzhiyun if hasattr(os, 'scandir'): 1858*4882a593Smuzhiyun origscandir = os.scandir 1859*4882a593Smuzhiyun searchdirs = [] 1860*4882a593Smuzhiyun 1861*4882a593Smuzhiyun def ourlistdir(d): 1862*4882a593Smuzhiyun searchdirs.append(d) 1863*4882a593Smuzhiyun return origlistdir(d) 1864*4882a593Smuzhiyun 1865*4882a593Smuzhiyun def ourscandir(d): 1866*4882a593Smuzhiyun searchdirs.append(d) 1867*4882a593Smuzhiyun return origscandir(d) 1868*4882a593Smuzhiyun 1869*4882a593Smuzhiyun os.listdir = ourlistdir 1870*4882a593Smuzhiyun if hasattr(os, 'scandir'): 1871*4882a593Smuzhiyun os.scandir = ourscandir 1872*4882a593Smuzhiyun try: 1873*4882a593Smuzhiyun # Can't use set here as order is important 1874*4882a593Smuzhiyun newfiles = [] 1875*4882a593Smuzhiyun for f in files: 1876*4882a593Smuzhiyun if os.path.isdir(f): 1877*4882a593Smuzhiyun dirfiles = self.find_bbfiles(f) 1878*4882a593Smuzhiyun for g in dirfiles: 1879*4882a593Smuzhiyun if g not in newfiles: 1880*4882a593Smuzhiyun newfiles.append(g) 1881*4882a593Smuzhiyun else: 1882*4882a593Smuzhiyun globbed = glob.glob(f) 1883*4882a593Smuzhiyun if not globbed and os.path.exists(f): 1884*4882a593Smuzhiyun globbed = [f] 1885*4882a593Smuzhiyun # glob gives files in order on disk. Sort to be deterministic. 1886*4882a593Smuzhiyun for g in sorted(globbed): 1887*4882a593Smuzhiyun if g not in newfiles: 1888*4882a593Smuzhiyun newfiles.append(g) 1889*4882a593Smuzhiyun finally: 1890*4882a593Smuzhiyun os.listdir = origlistdir 1891*4882a593Smuzhiyun if hasattr(os, 'scandir'): 1892*4882a593Smuzhiyun os.scandir = origscandir 1893*4882a593Smuzhiyun 1894*4882a593Smuzhiyun bbmask = config.getVar('BBMASK') 1895*4882a593Smuzhiyun 1896*4882a593Smuzhiyun if bbmask: 1897*4882a593Smuzhiyun # First validate the individual regular expressions and ignore any 1898*4882a593Smuzhiyun # that do not compile 1899*4882a593Smuzhiyun bbmasks = [] 1900*4882a593Smuzhiyun for mask in bbmask.split(): 1901*4882a593Smuzhiyun # When constructing an older style single regex, it's possible for BBMASK 1902*4882a593Smuzhiyun # to end up beginning with '|', which matches and masks _everything_. 1903*4882a593Smuzhiyun if mask.startswith("|"): 1904*4882a593Smuzhiyun collectlog.warning("BBMASK contains regular expression beginning with '|', fixing: %s" % mask) 1905*4882a593Smuzhiyun mask = mask[1:] 1906*4882a593Smuzhiyun try: 1907*4882a593Smuzhiyun re.compile(mask) 1908*4882a593Smuzhiyun bbmasks.append(mask) 1909*4882a593Smuzhiyun except re.error: 1910*4882a593Smuzhiyun collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask) 1911*4882a593Smuzhiyun 1912*4882a593Smuzhiyun # Then validate the combined regular expressions. This should never 1913*4882a593Smuzhiyun # fail, but better safe than sorry... 1914*4882a593Smuzhiyun bbmask = "|".join(bbmasks) 1915*4882a593Smuzhiyun try: 1916*4882a593Smuzhiyun bbmask_compiled = re.compile(bbmask) 1917*4882a593Smuzhiyun except re.error: 1918*4882a593Smuzhiyun collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask) 1919*4882a593Smuzhiyun bbmask = None 1920*4882a593Smuzhiyun 1921*4882a593Smuzhiyun bbfiles = [] 1922*4882a593Smuzhiyun bbappend = [] 1923*4882a593Smuzhiyun for f in newfiles: 1924*4882a593Smuzhiyun if bbmask and bbmask_compiled.search(f): 1925*4882a593Smuzhiyun collectlog.debug(1, "skipping masked file %s", f) 1926*4882a593Smuzhiyun masked += 1 1927*4882a593Smuzhiyun continue 1928*4882a593Smuzhiyun if f.endswith('.bb'): 1929*4882a593Smuzhiyun bbfiles.append(f) 1930*4882a593Smuzhiyun elif f.endswith('.bbappend'): 1931*4882a593Smuzhiyun bbappend.append(f) 1932*4882a593Smuzhiyun else: 1933*4882a593Smuzhiyun collectlog.debug(1, "skipping %s: unknown file extension", f) 1934*4882a593Smuzhiyun 1935*4882a593Smuzhiyun # Build a list of .bbappend files for each .bb file 1936*4882a593Smuzhiyun for f in bbappend: 1937*4882a593Smuzhiyun base = os.path.basename(f).replace('.bbappend', '.bb') 1938*4882a593Smuzhiyun self.bbappends.append((base, f)) 1939*4882a593Smuzhiyun 1940*4882a593Smuzhiyun # Find overlayed recipes 1941*4882a593Smuzhiyun # bbfiles will be in priority order which makes this easy 1942*4882a593Smuzhiyun bbfile_seen = dict() 1943*4882a593Smuzhiyun self.overlayed = defaultdict(list) 1944*4882a593Smuzhiyun for f in reversed(bbfiles): 1945*4882a593Smuzhiyun base = os.path.basename(f) 1946*4882a593Smuzhiyun if base not in bbfile_seen: 1947*4882a593Smuzhiyun bbfile_seen[base] = f 1948*4882a593Smuzhiyun else: 1949*4882a593Smuzhiyun topfile = bbfile_seen[base] 1950*4882a593Smuzhiyun self.overlayed[topfile].append(f) 1951*4882a593Smuzhiyun 1952*4882a593Smuzhiyun return (bbfiles, masked, searchdirs) 1953*4882a593Smuzhiyun 1954*4882a593Smuzhiyun def get_file_appends(self, fn): 1955*4882a593Smuzhiyun """ 1956*4882a593Smuzhiyun Returns a list of .bbappend files to apply to fn 1957*4882a593Smuzhiyun """ 1958*4882a593Smuzhiyun filelist = [] 1959*4882a593Smuzhiyun f = os.path.basename(fn) 1960*4882a593Smuzhiyun for b in self.bbappends: 1961*4882a593Smuzhiyun (bbappend, filename) = b 1962*4882a593Smuzhiyun if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])): 1963*4882a593Smuzhiyun filelist.append(filename) 1964*4882a593Smuzhiyun return tuple(filelist) 1965*4882a593Smuzhiyun 1966*4882a593Smuzhiyun def collection_priorities(self, pkgfns, fns, d): 1967*4882a593Smuzhiyun # Return the priorities of the entries in pkgfns 1968*4882a593Smuzhiyun # Also check that all the regexes in self.bbfile_config_priorities are used 1969*4882a593Smuzhiyun # (but to do that we need to ensure skipped recipes aren't counted, nor 1970*4882a593Smuzhiyun # collections in BBFILE_PATTERN_IGNORE_EMPTY) 1971*4882a593Smuzhiyun 1972*4882a593Smuzhiyun priorities = {} 1973*4882a593Smuzhiyun seen = set() 1974*4882a593Smuzhiyun matched = set() 1975*4882a593Smuzhiyun 1976*4882a593Smuzhiyun matched_regex = set() 1977*4882a593Smuzhiyun unmatched_regex = set() 1978*4882a593Smuzhiyun for _, _, regex, _ in self.bbfile_config_priorities: 1979*4882a593Smuzhiyun unmatched_regex.add(regex) 1980*4882a593Smuzhiyun 1981*4882a593Smuzhiyun # Calculate priorities for each file 1982*4882a593Smuzhiyun for p in pkgfns: 1983*4882a593Smuzhiyun realfn, cls, mc = bb.cache.virtualfn2realfn(p) 1984*4882a593Smuzhiyun priorities[p], regex = self.calc_bbfile_priority(realfn) 1985*4882a593Smuzhiyun if regex in unmatched_regex: 1986*4882a593Smuzhiyun matched_regex.add(regex) 1987*4882a593Smuzhiyun unmatched_regex.remove(regex) 1988*4882a593Smuzhiyun seen.add(realfn) 1989*4882a593Smuzhiyun if regex: 1990*4882a593Smuzhiyun matched.add(realfn) 1991*4882a593Smuzhiyun 1992*4882a593Smuzhiyun if unmatched_regex: 1993*4882a593Smuzhiyun # Account for bbappend files 1994*4882a593Smuzhiyun for b in self.bbappends: 1995*4882a593Smuzhiyun (bbfile, append) = b 1996*4882a593Smuzhiyun seen.add(append) 1997*4882a593Smuzhiyun 1998*4882a593Smuzhiyun # Account for skipped recipes 1999*4882a593Smuzhiyun seen.update(fns) 2000*4882a593Smuzhiyun 2001*4882a593Smuzhiyun seen.difference_update(matched) 2002*4882a593Smuzhiyun 2003*4882a593Smuzhiyun def already_matched(fn): 2004*4882a593Smuzhiyun for regex in matched_regex: 2005*4882a593Smuzhiyun if regex.match(fn): 2006*4882a593Smuzhiyun return True 2007*4882a593Smuzhiyun return False 2008*4882a593Smuzhiyun 2009*4882a593Smuzhiyun for unmatch in unmatched_regex.copy(): 2010*4882a593Smuzhiyun for fn in seen: 2011*4882a593Smuzhiyun if unmatch.match(fn): 2012*4882a593Smuzhiyun # If the bbappend or file was already matched by another regex, skip it 2013*4882a593Smuzhiyun # e.g. for a layer within a layer, the outer regex could match, the inner 2014*4882a593Smuzhiyun # regex may match nothing and we should warn about that 2015*4882a593Smuzhiyun if already_matched(fn): 2016*4882a593Smuzhiyun continue 2017*4882a593Smuzhiyun unmatched_regex.remove(unmatch) 2018*4882a593Smuzhiyun break 2019*4882a593Smuzhiyun 2020*4882a593Smuzhiyun for collection, pattern, regex, _ in self.bbfile_config_priorities: 2021*4882a593Smuzhiyun if regex in unmatched_regex: 2022*4882a593Smuzhiyun if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1': 2023*4882a593Smuzhiyun collectlog.warning("No bb files in %s matched BBFILE_PATTERN_%s '%s'" % (self.mc if self.mc else 'default', 2024*4882a593Smuzhiyun collection, pattern)) 2025*4882a593Smuzhiyun 2026*4882a593Smuzhiyun return priorities 2027*4882a593Smuzhiyun 2028*4882a593Smuzhiyunclass ParsingFailure(Exception): 2029*4882a593Smuzhiyun def __init__(self, realexception, recipe): 2030*4882a593Smuzhiyun self.realexception = realexception 2031*4882a593Smuzhiyun self.recipe = recipe 2032*4882a593Smuzhiyun Exception.__init__(self, realexception, recipe) 2033*4882a593Smuzhiyun 2034*4882a593Smuzhiyunclass Parser(multiprocessing.Process): 2035*4882a593Smuzhiyun def __init__(self, jobs, results, quit, profile): 2036*4882a593Smuzhiyun self.jobs = jobs 2037*4882a593Smuzhiyun self.results = results 2038*4882a593Smuzhiyun self.quit = quit 2039*4882a593Smuzhiyun multiprocessing.Process.__init__(self) 2040*4882a593Smuzhiyun self.context = bb.utils.get_context().copy() 2041*4882a593Smuzhiyun self.handlers = bb.event.get_class_handlers().copy() 2042*4882a593Smuzhiyun self.profile = profile 2043*4882a593Smuzhiyun self.queue_signals = False 2044*4882a593Smuzhiyun self.signal_received = [] 2045*4882a593Smuzhiyun self.signal_threadlock = threading.Lock() 2046*4882a593Smuzhiyun 2047*4882a593Smuzhiyun def catch_sig(self, signum, frame): 2048*4882a593Smuzhiyun if self.queue_signals: 2049*4882a593Smuzhiyun self.signal_received.append(signum) 2050*4882a593Smuzhiyun else: 2051*4882a593Smuzhiyun self.handle_sig(signum, frame) 2052*4882a593Smuzhiyun 2053*4882a593Smuzhiyun def handle_sig(self, signum, frame): 2054*4882a593Smuzhiyun if signum == signal.SIGTERM: 2055*4882a593Smuzhiyun signal.signal(signal.SIGTERM, signal.SIG_DFL) 2056*4882a593Smuzhiyun os.kill(os.getpid(), signal.SIGTERM) 2057*4882a593Smuzhiyun elif signum == signal.SIGINT: 2058*4882a593Smuzhiyun signal.default_int_handler(signum, frame) 2059*4882a593Smuzhiyun 2060*4882a593Smuzhiyun def run(self): 2061*4882a593Smuzhiyun 2062*4882a593Smuzhiyun if not self.profile: 2063*4882a593Smuzhiyun self.realrun() 2064*4882a593Smuzhiyun return 2065*4882a593Smuzhiyun 2066*4882a593Smuzhiyun try: 2067*4882a593Smuzhiyun import cProfile as profile 2068*4882a593Smuzhiyun except: 2069*4882a593Smuzhiyun import profile 2070*4882a593Smuzhiyun prof = profile.Profile() 2071*4882a593Smuzhiyun try: 2072*4882a593Smuzhiyun profile.Profile.runcall(prof, self.realrun) 2073*4882a593Smuzhiyun finally: 2074*4882a593Smuzhiyun logfile = "profile-parse-%s.log" % multiprocessing.current_process().name 2075*4882a593Smuzhiyun prof.dump_stats(logfile) 2076*4882a593Smuzhiyun 2077*4882a593Smuzhiyun def realrun(self): 2078*4882a593Smuzhiyun # Signal handling here is hard. We must not terminate any process or thread holding the write 2079*4882a593Smuzhiyun # lock for the event stream as it will not be released, ever, and things will hang. 2080*4882a593Smuzhiyun # Python handles signals in the main thread/process but they can be raised from any thread and 2081*4882a593Smuzhiyun # we want to defer processing of any SIGTERM/SIGINT signal until we're outside the critical section 2082*4882a593Smuzhiyun # and don't hold the lock (see server/process.py). We therefore always catch the signals (so any 2083*4882a593Smuzhiyun # new thread should also do so) and we defer handling but we handle with the local thread lock 2084*4882a593Smuzhiyun # held (a threading lock, not a multiprocessing one) so that no other thread in the process 2085*4882a593Smuzhiyun # can be in the critical section. 2086*4882a593Smuzhiyun signal.signal(signal.SIGTERM, self.catch_sig) 2087*4882a593Smuzhiyun signal.signal(signal.SIGHUP, signal.SIG_DFL) 2088*4882a593Smuzhiyun signal.signal(signal.SIGINT, self.catch_sig) 2089*4882a593Smuzhiyun bb.utils.set_process_name(multiprocessing.current_process().name) 2090*4882a593Smuzhiyun multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1) 2091*4882a593Smuzhiyun multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1) 2092*4882a593Smuzhiyun 2093*4882a593Smuzhiyun pending = [] 2094*4882a593Smuzhiyun try: 2095*4882a593Smuzhiyun while True: 2096*4882a593Smuzhiyun try: 2097*4882a593Smuzhiyun self.quit.get_nowait() 2098*4882a593Smuzhiyun except queue.Empty: 2099*4882a593Smuzhiyun pass 2100*4882a593Smuzhiyun else: 2101*4882a593Smuzhiyun break 2102*4882a593Smuzhiyun 2103*4882a593Smuzhiyun if pending: 2104*4882a593Smuzhiyun result = pending.pop() 2105*4882a593Smuzhiyun else: 2106*4882a593Smuzhiyun try: 2107*4882a593Smuzhiyun job = self.jobs.pop() 2108*4882a593Smuzhiyun except IndexError: 2109*4882a593Smuzhiyun break 2110*4882a593Smuzhiyun result = self.parse(*job) 2111*4882a593Smuzhiyun # Clear the siggen cache after parsing to control memory usage, its huge 2112*4882a593Smuzhiyun bb.parse.siggen.postparsing_clean_cache() 2113*4882a593Smuzhiyun try: 2114*4882a593Smuzhiyun self.results.put(result, timeout=0.25) 2115*4882a593Smuzhiyun except queue.Full: 2116*4882a593Smuzhiyun pending.append(result) 2117*4882a593Smuzhiyun finally: 2118*4882a593Smuzhiyun self.results.close() 2119*4882a593Smuzhiyun self.results.join_thread() 2120*4882a593Smuzhiyun 2121*4882a593Smuzhiyun def parse(self, mc, cache, filename, appends): 2122*4882a593Smuzhiyun try: 2123*4882a593Smuzhiyun origfilter = bb.event.LogHandler.filter 2124*4882a593Smuzhiyun # Record the filename we're parsing into any events generated 2125*4882a593Smuzhiyun def parse_filter(self, record): 2126*4882a593Smuzhiyun record.taskpid = bb.event.worker_pid 2127*4882a593Smuzhiyun record.fn = filename 2128*4882a593Smuzhiyun return True 2129*4882a593Smuzhiyun 2130*4882a593Smuzhiyun # Reset our environment and handlers to the original settings 2131*4882a593Smuzhiyun bb.utils.set_context(self.context.copy()) 2132*4882a593Smuzhiyun bb.event.set_class_handlers(self.handlers.copy()) 2133*4882a593Smuzhiyun bb.event.LogHandler.filter = parse_filter 2134*4882a593Smuzhiyun 2135*4882a593Smuzhiyun return True, mc, cache.parse(filename, appends) 2136*4882a593Smuzhiyun except Exception as exc: 2137*4882a593Smuzhiyun tb = sys.exc_info()[2] 2138*4882a593Smuzhiyun exc.recipe = filename 2139*4882a593Smuzhiyun exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3)) 2140*4882a593Smuzhiyun return True, None, exc 2141*4882a593Smuzhiyun # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown 2142*4882a593Smuzhiyun # and for example a worker thread doesn't just exit on its own in response to 2143*4882a593Smuzhiyun # a SystemExit event for example. 2144*4882a593Smuzhiyun except BaseException as exc: 2145*4882a593Smuzhiyun return True, None, ParsingFailure(exc, filename) 2146*4882a593Smuzhiyun finally: 2147*4882a593Smuzhiyun bb.event.LogHandler.filter = origfilter 2148*4882a593Smuzhiyun 2149*4882a593Smuzhiyunclass CookerParser(object): 2150*4882a593Smuzhiyun def __init__(self, cooker, mcfilelist, masked): 2151*4882a593Smuzhiyun self.mcfilelist = mcfilelist 2152*4882a593Smuzhiyun self.cooker = cooker 2153*4882a593Smuzhiyun self.cfgdata = cooker.data 2154*4882a593Smuzhiyun self.cfghash = cooker.data_hash 2155*4882a593Smuzhiyun self.cfgbuilder = cooker.databuilder 2156*4882a593Smuzhiyun 2157*4882a593Smuzhiyun # Accounting statistics 2158*4882a593Smuzhiyun self.parsed = 0 2159*4882a593Smuzhiyun self.cached = 0 2160*4882a593Smuzhiyun self.error = 0 2161*4882a593Smuzhiyun self.masked = masked 2162*4882a593Smuzhiyun 2163*4882a593Smuzhiyun self.skipped = 0 2164*4882a593Smuzhiyun self.virtuals = 0 2165*4882a593Smuzhiyun 2166*4882a593Smuzhiyun self.current = 0 2167*4882a593Smuzhiyun self.process_names = [] 2168*4882a593Smuzhiyun 2169*4882a593Smuzhiyun self.bb_caches = bb.cache.MulticonfigCache(self.cfgbuilder, self.cfghash, cooker.caches_array) 2170*4882a593Smuzhiyun self.fromcache = set() 2171*4882a593Smuzhiyun self.willparse = set() 2172*4882a593Smuzhiyun for mc in self.cooker.multiconfigs: 2173*4882a593Smuzhiyun for filename in self.mcfilelist[mc]: 2174*4882a593Smuzhiyun appends = self.cooker.collections[mc].get_file_appends(filename) 2175*4882a593Smuzhiyun if not self.bb_caches[mc].cacheValid(filename, appends): 2176*4882a593Smuzhiyun self.willparse.add((mc, self.bb_caches[mc], filename, appends)) 2177*4882a593Smuzhiyun else: 2178*4882a593Smuzhiyun self.fromcache.add((mc, self.bb_caches[mc], filename, appends)) 2179*4882a593Smuzhiyun 2180*4882a593Smuzhiyun self.total = len(self.fromcache) + len(self.willparse) 2181*4882a593Smuzhiyun self.toparse = len(self.willparse) 2182*4882a593Smuzhiyun self.progress_chunk = int(max(self.toparse / 100, 1)) 2183*4882a593Smuzhiyun 2184*4882a593Smuzhiyun self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or 2185*4882a593Smuzhiyun multiprocessing.cpu_count()), self.toparse) 2186*4882a593Smuzhiyun 2187*4882a593Smuzhiyun self.start() 2188*4882a593Smuzhiyun self.haveshutdown = False 2189*4882a593Smuzhiyun self.syncthread = None 2190*4882a593Smuzhiyun 2191*4882a593Smuzhiyun def start(self): 2192*4882a593Smuzhiyun self.results = self.load_cached() 2193*4882a593Smuzhiyun self.processes = [] 2194*4882a593Smuzhiyun if self.toparse: 2195*4882a593Smuzhiyun bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata) 2196*4882a593Smuzhiyun 2197*4882a593Smuzhiyun self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes) 2198*4882a593Smuzhiyun self.result_queue = multiprocessing.Queue() 2199*4882a593Smuzhiyun 2200*4882a593Smuzhiyun def chunkify(lst,n): 2201*4882a593Smuzhiyun return [lst[i::n] for i in range(n)] 2202*4882a593Smuzhiyun self.jobs = chunkify(list(self.willparse), self.num_processes) 2203*4882a593Smuzhiyun 2204*4882a593Smuzhiyun for i in range(0, self.num_processes): 2205*4882a593Smuzhiyun parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, self.cooker.configuration.profile) 2206*4882a593Smuzhiyun parser.start() 2207*4882a593Smuzhiyun self.process_names.append(parser.name) 2208*4882a593Smuzhiyun self.processes.append(parser) 2209*4882a593Smuzhiyun 2210*4882a593Smuzhiyun self.results = itertools.chain(self.results, self.parse_generator()) 2211*4882a593Smuzhiyun 2212*4882a593Smuzhiyun def shutdown(self, clean=True): 2213*4882a593Smuzhiyun if not self.toparse: 2214*4882a593Smuzhiyun return 2215*4882a593Smuzhiyun if self.haveshutdown: 2216*4882a593Smuzhiyun return 2217*4882a593Smuzhiyun self.haveshutdown = True 2218*4882a593Smuzhiyun 2219*4882a593Smuzhiyun if clean: 2220*4882a593Smuzhiyun event = bb.event.ParseCompleted(self.cached, self.parsed, 2221*4882a593Smuzhiyun self.skipped, self.masked, 2222*4882a593Smuzhiyun self.virtuals, self.error, 2223*4882a593Smuzhiyun self.total) 2224*4882a593Smuzhiyun 2225*4882a593Smuzhiyun bb.event.fire(event, self.cfgdata) 2226*4882a593Smuzhiyun else: 2227*4882a593Smuzhiyun bb.error("Parsing halted due to errors, see error messages above") 2228*4882a593Smuzhiyun 2229*4882a593Smuzhiyun for process in self.processes: 2230*4882a593Smuzhiyun self.parser_quit.put(None) 2231*4882a593Smuzhiyun 2232*4882a593Smuzhiyun # Cleanup the queue before call process.join(), otherwise there might be 2233*4882a593Smuzhiyun # deadlocks. 2234*4882a593Smuzhiyun while True: 2235*4882a593Smuzhiyun try: 2236*4882a593Smuzhiyun self.result_queue.get(timeout=0.25) 2237*4882a593Smuzhiyun except queue.Empty: 2238*4882a593Smuzhiyun break 2239*4882a593Smuzhiyun 2240*4882a593Smuzhiyun for process in self.processes: 2241*4882a593Smuzhiyun process.join(0.5) 2242*4882a593Smuzhiyun 2243*4882a593Smuzhiyun for process in self.processes: 2244*4882a593Smuzhiyun if process.exitcode is None: 2245*4882a593Smuzhiyun os.kill(process.pid, signal.SIGINT) 2246*4882a593Smuzhiyun 2247*4882a593Smuzhiyun for process in self.processes: 2248*4882a593Smuzhiyun process.join(0.5) 2249*4882a593Smuzhiyun 2250*4882a593Smuzhiyun for process in self.processes: 2251*4882a593Smuzhiyun if process.exitcode is None: 2252*4882a593Smuzhiyun process.terminate() 2253*4882a593Smuzhiyun 2254*4882a593Smuzhiyun for process in self.processes: 2255*4882a593Smuzhiyun process.join() 2256*4882a593Smuzhiyun # Added in 3.7, cleans up zombies 2257*4882a593Smuzhiyun if hasattr(process, "close"): 2258*4882a593Smuzhiyun process.close() 2259*4882a593Smuzhiyun 2260*4882a593Smuzhiyun self.parser_quit.close() 2261*4882a593Smuzhiyun # Allow data left in the cancel queue to be discarded 2262*4882a593Smuzhiyun self.parser_quit.cancel_join_thread() 2263*4882a593Smuzhiyun 2264*4882a593Smuzhiyun def sync_caches(): 2265*4882a593Smuzhiyun for c in self.bb_caches.values(): 2266*4882a593Smuzhiyun c.sync() 2267*4882a593Smuzhiyun 2268*4882a593Smuzhiyun sync = threading.Thread(target=sync_caches, name="SyncThread") 2269*4882a593Smuzhiyun self.syncthread = sync 2270*4882a593Smuzhiyun sync.start() 2271*4882a593Smuzhiyun bb.codeparser.parser_cache_savemerge() 2272*4882a593Smuzhiyun bb.fetch.fetcher_parse_done() 2273*4882a593Smuzhiyun if self.cooker.configuration.profile: 2274*4882a593Smuzhiyun profiles = [] 2275*4882a593Smuzhiyun for i in self.process_names: 2276*4882a593Smuzhiyun logfile = "profile-parse-%s.log" % i 2277*4882a593Smuzhiyun if os.path.exists(logfile): 2278*4882a593Smuzhiyun profiles.append(logfile) 2279*4882a593Smuzhiyun 2280*4882a593Smuzhiyun pout = "profile-parse.log.processed" 2281*4882a593Smuzhiyun bb.utils.process_profilelog(profiles, pout = pout) 2282*4882a593Smuzhiyun print("Processed parsing statistics saved to %s" % (pout)) 2283*4882a593Smuzhiyun 2284*4882a593Smuzhiyun def final_cleanup(self): 2285*4882a593Smuzhiyun if self.syncthread: 2286*4882a593Smuzhiyun self.syncthread.join() 2287*4882a593Smuzhiyun 2288*4882a593Smuzhiyun def load_cached(self): 2289*4882a593Smuzhiyun for mc, cache, filename, appends in self.fromcache: 2290*4882a593Smuzhiyun cached, infos = cache.load(filename, appends) 2291*4882a593Smuzhiyun yield not cached, mc, infos 2292*4882a593Smuzhiyun 2293*4882a593Smuzhiyun def parse_generator(self): 2294*4882a593Smuzhiyun empty = False 2295*4882a593Smuzhiyun while self.processes or not empty: 2296*4882a593Smuzhiyun for process in self.processes.copy(): 2297*4882a593Smuzhiyun if not process.is_alive(): 2298*4882a593Smuzhiyun process.join() 2299*4882a593Smuzhiyun self.processes.remove(process) 2300*4882a593Smuzhiyun 2301*4882a593Smuzhiyun if self.parsed >= self.toparse: 2302*4882a593Smuzhiyun break 2303*4882a593Smuzhiyun 2304*4882a593Smuzhiyun try: 2305*4882a593Smuzhiyun result = self.result_queue.get(timeout=0.25) 2306*4882a593Smuzhiyun except queue.Empty: 2307*4882a593Smuzhiyun empty = True 2308*4882a593Smuzhiyun yield None, None, None 2309*4882a593Smuzhiyun else: 2310*4882a593Smuzhiyun empty = False 2311*4882a593Smuzhiyun yield result 2312*4882a593Smuzhiyun 2313*4882a593Smuzhiyun if not (self.parsed >= self.toparse): 2314*4882a593Smuzhiyun raise bb.parse.ParseError("Not all recipes parsed, parser thread killed/died? Exiting.", None) 2315*4882a593Smuzhiyun 2316*4882a593Smuzhiyun 2317*4882a593Smuzhiyun def parse_next(self): 2318*4882a593Smuzhiyun result = [] 2319*4882a593Smuzhiyun parsed = None 2320*4882a593Smuzhiyun try: 2321*4882a593Smuzhiyun parsed, mc, result = next(self.results) 2322*4882a593Smuzhiyun if isinstance(result, BaseException): 2323*4882a593Smuzhiyun # Turn exceptions back into exceptions 2324*4882a593Smuzhiyun raise result 2325*4882a593Smuzhiyun if parsed is None: 2326*4882a593Smuzhiyun # Timeout, loop back through the main loop 2327*4882a593Smuzhiyun return True 2328*4882a593Smuzhiyun 2329*4882a593Smuzhiyun except StopIteration: 2330*4882a593Smuzhiyun self.shutdown() 2331*4882a593Smuzhiyun return False 2332*4882a593Smuzhiyun except bb.BBHandledException as exc: 2333*4882a593Smuzhiyun self.error += 1 2334*4882a593Smuzhiyun logger.debug('Failed to parse recipe: %s' % exc.recipe) 2335*4882a593Smuzhiyun self.shutdown(clean=False) 2336*4882a593Smuzhiyun return False 2337*4882a593Smuzhiyun except ParsingFailure as exc: 2338*4882a593Smuzhiyun self.error += 1 2339*4882a593Smuzhiyun logger.error('Unable to parse %s: %s' % 2340*4882a593Smuzhiyun (exc.recipe, bb.exceptions.to_string(exc.realexception))) 2341*4882a593Smuzhiyun self.shutdown(clean=False) 2342*4882a593Smuzhiyun return False 2343*4882a593Smuzhiyun except bb.parse.ParseError as exc: 2344*4882a593Smuzhiyun self.error += 1 2345*4882a593Smuzhiyun logger.error(str(exc)) 2346*4882a593Smuzhiyun self.shutdown(clean=False) 2347*4882a593Smuzhiyun return False 2348*4882a593Smuzhiyun except bb.data_smart.ExpansionError as exc: 2349*4882a593Smuzhiyun self.error += 1 2350*4882a593Smuzhiyun bbdir = os.path.dirname(__file__) + os.sep 2351*4882a593Smuzhiyun etype, value, _ = sys.exc_info() 2352*4882a593Smuzhiyun tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback)) 2353*4882a593Smuzhiyun logger.error('ExpansionError during parsing %s', value.recipe, 2354*4882a593Smuzhiyun exc_info=(etype, value, tb)) 2355*4882a593Smuzhiyun self.shutdown(clean=False) 2356*4882a593Smuzhiyun return False 2357*4882a593Smuzhiyun except Exception as exc: 2358*4882a593Smuzhiyun self.error += 1 2359*4882a593Smuzhiyun etype, value, tb = sys.exc_info() 2360*4882a593Smuzhiyun if hasattr(value, "recipe"): 2361*4882a593Smuzhiyun logger.error('Unable to parse %s' % value.recipe, 2362*4882a593Smuzhiyun exc_info=(etype, value, exc.traceback)) 2363*4882a593Smuzhiyun else: 2364*4882a593Smuzhiyun # Most likely, an exception occurred during raising an exception 2365*4882a593Smuzhiyun import traceback 2366*4882a593Smuzhiyun logger.error('Exception during parse: %s' % traceback.format_exc()) 2367*4882a593Smuzhiyun self.shutdown(clean=False) 2368*4882a593Smuzhiyun return False 2369*4882a593Smuzhiyun 2370*4882a593Smuzhiyun self.current += 1 2371*4882a593Smuzhiyun self.virtuals += len(result) 2372*4882a593Smuzhiyun if parsed: 2373*4882a593Smuzhiyun self.parsed += 1 2374*4882a593Smuzhiyun if self.parsed % self.progress_chunk == 0: 2375*4882a593Smuzhiyun bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse), 2376*4882a593Smuzhiyun self.cfgdata) 2377*4882a593Smuzhiyun else: 2378*4882a593Smuzhiyun self.cached += 1 2379*4882a593Smuzhiyun 2380*4882a593Smuzhiyun for virtualfn, info_array in result: 2381*4882a593Smuzhiyun if info_array[0].skipped: 2382*4882a593Smuzhiyun self.skipped += 1 2383*4882a593Smuzhiyun self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0]) 2384*4882a593Smuzhiyun self.bb_caches[mc].add_info(virtualfn, info_array, self.cooker.recipecaches[mc], 2385*4882a593Smuzhiyun parsed=parsed, watcher = self.cooker.add_filewatch) 2386*4882a593Smuzhiyun return True 2387*4882a593Smuzhiyun 2388*4882a593Smuzhiyun def reparse(self, filename): 2389*4882a593Smuzhiyun to_reparse = set() 2390*4882a593Smuzhiyun for mc in self.cooker.multiconfigs: 2391*4882a593Smuzhiyun to_reparse.add((mc, filename, self.cooker.collections[mc].get_file_appends(filename))) 2392*4882a593Smuzhiyun 2393*4882a593Smuzhiyun for mc, filename, appends in to_reparse: 2394*4882a593Smuzhiyun infos = self.bb_caches[mc].parse(filename, appends) 2395*4882a593Smuzhiyun for vfn, info_array in infos: 2396*4882a593Smuzhiyun self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array) 2397