1*4882a593Smuzhiyun# 2*4882a593Smuzhiyun# Copyright BitBake Contributors 3*4882a593Smuzhiyun# 4*4882a593Smuzhiyun# SPDX-License-Identifier: GPL-2.0-only 5*4882a593Smuzhiyun# 6*4882a593Smuzhiyun 7*4882a593Smuzhiyunimport hashlib 8*4882a593Smuzhiyunimport logging 9*4882a593Smuzhiyunimport os 10*4882a593Smuzhiyunimport re 11*4882a593Smuzhiyunimport tempfile 12*4882a593Smuzhiyunimport pickle 13*4882a593Smuzhiyunimport bb.data 14*4882a593Smuzhiyunimport difflib 15*4882a593Smuzhiyunimport simplediff 16*4882a593Smuzhiyunimport json 17*4882a593Smuzhiyunimport bb.compress.zstd 18*4882a593Smuzhiyunfrom bb.checksum import FileChecksumCache 19*4882a593Smuzhiyunfrom bb import runqueue 20*4882a593Smuzhiyunimport hashserv 21*4882a593Smuzhiyunimport hashserv.client 22*4882a593Smuzhiyun 23*4882a593Smuzhiyunlogger = logging.getLogger('BitBake.SigGen') 24*4882a593Smuzhiyunhashequiv_logger = logging.getLogger('BitBake.SigGen.HashEquiv') 25*4882a593Smuzhiyun 26*4882a593Smuzhiyunclass SetEncoder(json.JSONEncoder): 27*4882a593Smuzhiyun def default(self, obj): 28*4882a593Smuzhiyun if isinstance(obj, set): 29*4882a593Smuzhiyun return dict(_set_object=list(sorted(obj))) 30*4882a593Smuzhiyun return json.JSONEncoder.default(self, obj) 31*4882a593Smuzhiyun 32*4882a593Smuzhiyundef SetDecoder(dct): 33*4882a593Smuzhiyun if '_set_object' in dct: 34*4882a593Smuzhiyun return set(dct['_set_object']) 35*4882a593Smuzhiyun return dct 36*4882a593Smuzhiyun 37*4882a593Smuzhiyundef init(d): 38*4882a593Smuzhiyun siggens = [obj for obj in globals().values() 39*4882a593Smuzhiyun if type(obj) is type and issubclass(obj, SignatureGenerator)] 40*4882a593Smuzhiyun 41*4882a593Smuzhiyun desired = d.getVar("BB_SIGNATURE_HANDLER") or "noop" 42*4882a593Smuzhiyun for sg in siggens: 43*4882a593Smuzhiyun if desired == sg.name: 44*4882a593Smuzhiyun return sg(d) 45*4882a593Smuzhiyun else: 46*4882a593Smuzhiyun logger.error("Invalid signature generator '%s', using default 'noop'\n" 47*4882a593Smuzhiyun "Available generators: %s", desired, 48*4882a593Smuzhiyun ', '.join(obj.name for obj in siggens)) 49*4882a593Smuzhiyun return SignatureGenerator(d) 50*4882a593Smuzhiyun 51*4882a593Smuzhiyunclass SignatureGenerator(object): 52*4882a593Smuzhiyun """ 53*4882a593Smuzhiyun """ 54*4882a593Smuzhiyun name = "noop" 55*4882a593Smuzhiyun 56*4882a593Smuzhiyun # If the derived class supports multiconfig datacaches, set this to True 57*4882a593Smuzhiyun # The default is False for backward compatibility with derived signature 58*4882a593Smuzhiyun # generators that do not understand multiconfig caches 59*4882a593Smuzhiyun supports_multiconfig_datacaches = False 60*4882a593Smuzhiyun 61*4882a593Smuzhiyun def __init__(self, data): 62*4882a593Smuzhiyun self.basehash = {} 63*4882a593Smuzhiyun self.taskhash = {} 64*4882a593Smuzhiyun self.unihash = {} 65*4882a593Smuzhiyun self.runtaskdeps = {} 66*4882a593Smuzhiyun self.file_checksum_values = {} 67*4882a593Smuzhiyun self.taints = {} 68*4882a593Smuzhiyun self.unitaskhashes = {} 69*4882a593Smuzhiyun self.tidtopn = {} 70*4882a593Smuzhiyun self.setscenetasks = set() 71*4882a593Smuzhiyun 72*4882a593Smuzhiyun def finalise(self, fn, d, varient): 73*4882a593Smuzhiyun return 74*4882a593Smuzhiyun 75*4882a593Smuzhiyun def postparsing_clean_cache(self): 76*4882a593Smuzhiyun return 77*4882a593Smuzhiyun 78*4882a593Smuzhiyun def get_unihash(self, tid): 79*4882a593Smuzhiyun return self.taskhash[tid] 80*4882a593Smuzhiyun 81*4882a593Smuzhiyun def prep_taskhash(self, tid, deps, dataCaches): 82*4882a593Smuzhiyun return 83*4882a593Smuzhiyun 84*4882a593Smuzhiyun def get_taskhash(self, tid, deps, dataCaches): 85*4882a593Smuzhiyun self.taskhash[tid] = hashlib.sha256(tid.encode("utf-8")).hexdigest() 86*4882a593Smuzhiyun return self.taskhash[tid] 87*4882a593Smuzhiyun 88*4882a593Smuzhiyun def writeout_file_checksum_cache(self): 89*4882a593Smuzhiyun """Write/update the file checksum cache onto disk""" 90*4882a593Smuzhiyun return 91*4882a593Smuzhiyun 92*4882a593Smuzhiyun def stampfile(self, stampbase, file_name, taskname, extrainfo): 93*4882a593Smuzhiyun return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.') 94*4882a593Smuzhiyun 95*4882a593Smuzhiyun def stampcleanmask(self, stampbase, file_name, taskname, extrainfo): 96*4882a593Smuzhiyun return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.') 97*4882a593Smuzhiyun 98*4882a593Smuzhiyun def dump_sigtask(self, fn, task, stampbase, runtime): 99*4882a593Smuzhiyun return 100*4882a593Smuzhiyun 101*4882a593Smuzhiyun def invalidate_task(self, task, d, fn): 102*4882a593Smuzhiyun bb.build.del_stamp(task, d, fn) 103*4882a593Smuzhiyun 104*4882a593Smuzhiyun def dump_sigs(self, dataCache, options): 105*4882a593Smuzhiyun return 106*4882a593Smuzhiyun 107*4882a593Smuzhiyun def get_taskdata(self): 108*4882a593Smuzhiyun return (self.runtaskdeps, self.taskhash, self.unihash, self.file_checksum_values, self.taints, self.basehash, self.unitaskhashes, self.tidtopn, self.setscenetasks) 109*4882a593Smuzhiyun 110*4882a593Smuzhiyun def set_taskdata(self, data): 111*4882a593Smuzhiyun self.runtaskdeps, self.taskhash, self.unihash, self.file_checksum_values, self.taints, self.basehash, self.unitaskhashes, self.tidtopn, self.setscenetasks = data 112*4882a593Smuzhiyun 113*4882a593Smuzhiyun def reset(self, data): 114*4882a593Smuzhiyun self.__init__(data) 115*4882a593Smuzhiyun 116*4882a593Smuzhiyun def get_taskhashes(self): 117*4882a593Smuzhiyun return self.taskhash, self.unihash, self.unitaskhashes, self.tidtopn 118*4882a593Smuzhiyun 119*4882a593Smuzhiyun def set_taskhashes(self, hashes): 120*4882a593Smuzhiyun self.taskhash, self.unihash, self.unitaskhashes, self.tidtopn = hashes 121*4882a593Smuzhiyun 122*4882a593Smuzhiyun def save_unitaskhashes(self): 123*4882a593Smuzhiyun return 124*4882a593Smuzhiyun 125*4882a593Smuzhiyun def set_setscene_tasks(self, setscene_tasks): 126*4882a593Smuzhiyun return 127*4882a593Smuzhiyun 128*4882a593Smuzhiyun @classmethod 129*4882a593Smuzhiyun def get_data_caches(cls, dataCaches, mc): 130*4882a593Smuzhiyun """ 131*4882a593Smuzhiyun This function returns the datacaches that should be passed to signature 132*4882a593Smuzhiyun generator functions. If the signature generator supports multiconfig 133*4882a593Smuzhiyun caches, the entire dictionary of data caches is sent, otherwise a 134*4882a593Smuzhiyun special proxy is sent that support both index access to all 135*4882a593Smuzhiyun multiconfigs, and also direct access for the default multiconfig. 136*4882a593Smuzhiyun 137*4882a593Smuzhiyun The proxy class allows code in this class itself to always use 138*4882a593Smuzhiyun multiconfig aware code (to ease maintenance), but derived classes that 139*4882a593Smuzhiyun are unaware of multiconfig data caches can still access the default 140*4882a593Smuzhiyun multiconfig as expected. 141*4882a593Smuzhiyun 142*4882a593Smuzhiyun Do not override this function in derived classes; it will be removed in 143*4882a593Smuzhiyun the future when support for multiconfig data caches is mandatory 144*4882a593Smuzhiyun """ 145*4882a593Smuzhiyun class DataCacheProxy(object): 146*4882a593Smuzhiyun def __init__(self): 147*4882a593Smuzhiyun pass 148*4882a593Smuzhiyun 149*4882a593Smuzhiyun def __getitem__(self, key): 150*4882a593Smuzhiyun return dataCaches[key] 151*4882a593Smuzhiyun 152*4882a593Smuzhiyun def __getattr__(self, name): 153*4882a593Smuzhiyun return getattr(dataCaches[mc], name) 154*4882a593Smuzhiyun 155*4882a593Smuzhiyun if cls.supports_multiconfig_datacaches: 156*4882a593Smuzhiyun return dataCaches 157*4882a593Smuzhiyun 158*4882a593Smuzhiyun return DataCacheProxy() 159*4882a593Smuzhiyun 160*4882a593Smuzhiyun def exit(self): 161*4882a593Smuzhiyun return 162*4882a593Smuzhiyun 163*4882a593Smuzhiyunclass SignatureGeneratorBasic(SignatureGenerator): 164*4882a593Smuzhiyun """ 165*4882a593Smuzhiyun """ 166*4882a593Smuzhiyun name = "basic" 167*4882a593Smuzhiyun 168*4882a593Smuzhiyun def __init__(self, data): 169*4882a593Smuzhiyun self.basehash = {} 170*4882a593Smuzhiyun self.taskhash = {} 171*4882a593Smuzhiyun self.unihash = {} 172*4882a593Smuzhiyun self.taskdeps = {} 173*4882a593Smuzhiyun self.runtaskdeps = {} 174*4882a593Smuzhiyun self.file_checksum_values = {} 175*4882a593Smuzhiyun self.taints = {} 176*4882a593Smuzhiyun self.gendeps = {} 177*4882a593Smuzhiyun self.lookupcache = {} 178*4882a593Smuzhiyun self.setscenetasks = set() 179*4882a593Smuzhiyun self.basehash_ignore_vars = set((data.getVar("BB_BASEHASH_IGNORE_VARS") or "").split()) 180*4882a593Smuzhiyun self.taskhash_ignore_tasks = None 181*4882a593Smuzhiyun self.init_rundepcheck(data) 182*4882a593Smuzhiyun checksum_cache_file = data.getVar("BB_HASH_CHECKSUM_CACHE_FILE") 183*4882a593Smuzhiyun if checksum_cache_file: 184*4882a593Smuzhiyun self.checksum_cache = FileChecksumCache() 185*4882a593Smuzhiyun self.checksum_cache.init_cache(data, checksum_cache_file) 186*4882a593Smuzhiyun else: 187*4882a593Smuzhiyun self.checksum_cache = None 188*4882a593Smuzhiyun 189*4882a593Smuzhiyun self.unihash_cache = bb.cache.SimpleCache("3") 190*4882a593Smuzhiyun self.unitaskhashes = self.unihash_cache.init_cache(data, "bb_unihashes.dat", {}) 191*4882a593Smuzhiyun self.localdirsexclude = (data.getVar("BB_SIGNATURE_LOCAL_DIRS_EXCLUDE") or "CVS .bzr .git .hg .osc .p4 .repo .svn").split() 192*4882a593Smuzhiyun self.tidtopn = {} 193*4882a593Smuzhiyun 194*4882a593Smuzhiyun def init_rundepcheck(self, data): 195*4882a593Smuzhiyun self.taskhash_ignore_tasks = data.getVar("BB_TASKHASH_IGNORE_TASKS") or None 196*4882a593Smuzhiyun if self.taskhash_ignore_tasks: 197*4882a593Smuzhiyun self.twl = re.compile(self.taskhash_ignore_tasks) 198*4882a593Smuzhiyun else: 199*4882a593Smuzhiyun self.twl = None 200*4882a593Smuzhiyun 201*4882a593Smuzhiyun def _build_data(self, fn, d): 202*4882a593Smuzhiyun 203*4882a593Smuzhiyun ignore_mismatch = ((d.getVar("BB_HASH_IGNORE_MISMATCH") or '') == '1') 204*4882a593Smuzhiyun tasklist, gendeps, lookupcache = bb.data.generate_dependencies(d, self.basehash_ignore_vars) 205*4882a593Smuzhiyun 206*4882a593Smuzhiyun taskdeps, basehash = bb.data.generate_dependency_hash(tasklist, gendeps, lookupcache, self.basehash_ignore_vars, fn) 207*4882a593Smuzhiyun 208*4882a593Smuzhiyun for task in tasklist: 209*4882a593Smuzhiyun tid = fn + ":" + task 210*4882a593Smuzhiyun if not ignore_mismatch and tid in self.basehash and self.basehash[tid] != basehash[tid]: 211*4882a593Smuzhiyun bb.error("When reparsing %s, the basehash value changed from %s to %s. The metadata is not deterministic and this needs to be fixed." % (tid, self.basehash[tid], basehash[tid])) 212*4882a593Smuzhiyun bb.error("The following commands may help:") 213*4882a593Smuzhiyun cmd = "$ bitbake %s -c%s" % (d.getVar('PN'), task) 214*4882a593Smuzhiyun # Make sure sigdata is dumped before run printdiff 215*4882a593Smuzhiyun bb.error("%s -Snone" % cmd) 216*4882a593Smuzhiyun bb.error("Then:") 217*4882a593Smuzhiyun bb.error("%s -Sprintdiff\n" % cmd) 218*4882a593Smuzhiyun self.basehash[tid] = basehash[tid] 219*4882a593Smuzhiyun 220*4882a593Smuzhiyun self.taskdeps[fn] = taskdeps 221*4882a593Smuzhiyun self.gendeps[fn] = gendeps 222*4882a593Smuzhiyun self.lookupcache[fn] = lookupcache 223*4882a593Smuzhiyun 224*4882a593Smuzhiyun return taskdeps 225*4882a593Smuzhiyun 226*4882a593Smuzhiyun def set_setscene_tasks(self, setscene_tasks): 227*4882a593Smuzhiyun self.setscenetasks = set(setscene_tasks) 228*4882a593Smuzhiyun 229*4882a593Smuzhiyun def finalise(self, fn, d, variant): 230*4882a593Smuzhiyun 231*4882a593Smuzhiyun mc = d.getVar("__BBMULTICONFIG", False) or "" 232*4882a593Smuzhiyun if variant or mc: 233*4882a593Smuzhiyun fn = bb.cache.realfn2virtual(fn, variant, mc) 234*4882a593Smuzhiyun 235*4882a593Smuzhiyun try: 236*4882a593Smuzhiyun taskdeps = self._build_data(fn, d) 237*4882a593Smuzhiyun except bb.parse.SkipRecipe: 238*4882a593Smuzhiyun raise 239*4882a593Smuzhiyun except: 240*4882a593Smuzhiyun bb.warn("Error during finalise of %s" % fn) 241*4882a593Smuzhiyun raise 242*4882a593Smuzhiyun 243*4882a593Smuzhiyun #Slow but can be useful for debugging mismatched basehashes 244*4882a593Smuzhiyun #for task in self.taskdeps[fn]: 245*4882a593Smuzhiyun # self.dump_sigtask(fn, task, d.getVar("STAMP"), False) 246*4882a593Smuzhiyun 247*4882a593Smuzhiyun for task in taskdeps: 248*4882a593Smuzhiyun d.setVar("BB_BASEHASH:task-%s" % task, self.basehash[fn + ":" + task]) 249*4882a593Smuzhiyun 250*4882a593Smuzhiyun def postparsing_clean_cache(self): 251*4882a593Smuzhiyun # 252*4882a593Smuzhiyun # After parsing we can remove some things from memory to reduce our memory footprint 253*4882a593Smuzhiyun # 254*4882a593Smuzhiyun self.gendeps = {} 255*4882a593Smuzhiyun self.lookupcache = {} 256*4882a593Smuzhiyun self.taskdeps = {} 257*4882a593Smuzhiyun 258*4882a593Smuzhiyun def rundep_check(self, fn, recipename, task, dep, depname, dataCaches): 259*4882a593Smuzhiyun # Return True if we should keep the dependency, False to drop it 260*4882a593Smuzhiyun # We only manipulate the dependencies for packages not in the ignore 261*4882a593Smuzhiyun # list 262*4882a593Smuzhiyun if self.twl and not self.twl.search(recipename): 263*4882a593Smuzhiyun # then process the actual dependencies 264*4882a593Smuzhiyun if self.twl.search(depname): 265*4882a593Smuzhiyun return False 266*4882a593Smuzhiyun return True 267*4882a593Smuzhiyun 268*4882a593Smuzhiyun def read_taint(self, fn, task, stampbase): 269*4882a593Smuzhiyun taint = None 270*4882a593Smuzhiyun try: 271*4882a593Smuzhiyun with open(stampbase + '.' + task + '.taint', 'r') as taintf: 272*4882a593Smuzhiyun taint = taintf.read() 273*4882a593Smuzhiyun except IOError: 274*4882a593Smuzhiyun pass 275*4882a593Smuzhiyun return taint 276*4882a593Smuzhiyun 277*4882a593Smuzhiyun def prep_taskhash(self, tid, deps, dataCaches): 278*4882a593Smuzhiyun 279*4882a593Smuzhiyun (mc, _, task, fn) = bb.runqueue.split_tid_mcfn(tid) 280*4882a593Smuzhiyun 281*4882a593Smuzhiyun self.basehash[tid] = dataCaches[mc].basetaskhash[tid] 282*4882a593Smuzhiyun self.runtaskdeps[tid] = [] 283*4882a593Smuzhiyun self.file_checksum_values[tid] = [] 284*4882a593Smuzhiyun recipename = dataCaches[mc].pkg_fn[fn] 285*4882a593Smuzhiyun 286*4882a593Smuzhiyun self.tidtopn[tid] = recipename 287*4882a593Smuzhiyun 288*4882a593Smuzhiyun for dep in sorted(deps, key=clean_basepath): 289*4882a593Smuzhiyun (depmc, _, _, depmcfn) = bb.runqueue.split_tid_mcfn(dep) 290*4882a593Smuzhiyun depname = dataCaches[depmc].pkg_fn[depmcfn] 291*4882a593Smuzhiyun if not self.supports_multiconfig_datacaches and mc != depmc: 292*4882a593Smuzhiyun # If the signature generator doesn't understand multiconfig 293*4882a593Smuzhiyun # data caches, any dependency not in the same multiconfig must 294*4882a593Smuzhiyun # be skipped for backward compatibility 295*4882a593Smuzhiyun continue 296*4882a593Smuzhiyun if not self.rundep_check(fn, recipename, task, dep, depname, dataCaches): 297*4882a593Smuzhiyun continue 298*4882a593Smuzhiyun if dep not in self.taskhash: 299*4882a593Smuzhiyun bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?" % dep) 300*4882a593Smuzhiyun self.runtaskdeps[tid].append(dep) 301*4882a593Smuzhiyun 302*4882a593Smuzhiyun if task in dataCaches[mc].file_checksums[fn]: 303*4882a593Smuzhiyun if self.checksum_cache: 304*4882a593Smuzhiyun checksums = self.checksum_cache.get_checksums(dataCaches[mc].file_checksums[fn][task], recipename, self.localdirsexclude) 305*4882a593Smuzhiyun else: 306*4882a593Smuzhiyun checksums = bb.fetch2.get_file_checksums(dataCaches[mc].file_checksums[fn][task], recipename, self.localdirsexclude) 307*4882a593Smuzhiyun for (f,cs) in checksums: 308*4882a593Smuzhiyun self.file_checksum_values[tid].append((f,cs)) 309*4882a593Smuzhiyun 310*4882a593Smuzhiyun taskdep = dataCaches[mc].task_deps[fn] 311*4882a593Smuzhiyun if 'nostamp' in taskdep and task in taskdep['nostamp']: 312*4882a593Smuzhiyun # Nostamp tasks need an implicit taint so that they force any dependent tasks to run 313*4882a593Smuzhiyun if tid in self.taints and self.taints[tid].startswith("nostamp:"): 314*4882a593Smuzhiyun # Don't reset taint value upon every call 315*4882a593Smuzhiyun pass 316*4882a593Smuzhiyun else: 317*4882a593Smuzhiyun import uuid 318*4882a593Smuzhiyun taint = str(uuid.uuid4()) 319*4882a593Smuzhiyun self.taints[tid] = "nostamp:" + taint 320*4882a593Smuzhiyun 321*4882a593Smuzhiyun taint = self.read_taint(fn, task, dataCaches[mc].stamp[fn]) 322*4882a593Smuzhiyun if taint: 323*4882a593Smuzhiyun self.taints[tid] = taint 324*4882a593Smuzhiyun logger.warning("%s is tainted from a forced run" % tid) 325*4882a593Smuzhiyun 326*4882a593Smuzhiyun return 327*4882a593Smuzhiyun 328*4882a593Smuzhiyun def get_taskhash(self, tid, deps, dataCaches): 329*4882a593Smuzhiyun 330*4882a593Smuzhiyun data = self.basehash[tid] 331*4882a593Smuzhiyun for dep in self.runtaskdeps[tid]: 332*4882a593Smuzhiyun data += self.get_unihash(dep) 333*4882a593Smuzhiyun 334*4882a593Smuzhiyun for (f, cs) in self.file_checksum_values[tid]: 335*4882a593Smuzhiyun if cs: 336*4882a593Smuzhiyun if "/./" in f: 337*4882a593Smuzhiyun data += "./" + f.split("/./")[1] 338*4882a593Smuzhiyun data += cs 339*4882a593Smuzhiyun 340*4882a593Smuzhiyun if tid in self.taints: 341*4882a593Smuzhiyun if self.taints[tid].startswith("nostamp:"): 342*4882a593Smuzhiyun data += self.taints[tid][8:] 343*4882a593Smuzhiyun else: 344*4882a593Smuzhiyun data += self.taints[tid] 345*4882a593Smuzhiyun 346*4882a593Smuzhiyun h = hashlib.sha256(data.encode("utf-8")).hexdigest() 347*4882a593Smuzhiyun self.taskhash[tid] = h 348*4882a593Smuzhiyun #d.setVar("BB_TASKHASH:task-%s" % task, taskhash[task]) 349*4882a593Smuzhiyun return h 350*4882a593Smuzhiyun 351*4882a593Smuzhiyun def writeout_file_checksum_cache(self): 352*4882a593Smuzhiyun """Write/update the file checksum cache onto disk""" 353*4882a593Smuzhiyun if self.checksum_cache: 354*4882a593Smuzhiyun self.checksum_cache.save_extras() 355*4882a593Smuzhiyun self.checksum_cache.save_merge() 356*4882a593Smuzhiyun else: 357*4882a593Smuzhiyun bb.fetch2.fetcher_parse_save() 358*4882a593Smuzhiyun bb.fetch2.fetcher_parse_done() 359*4882a593Smuzhiyun 360*4882a593Smuzhiyun def save_unitaskhashes(self): 361*4882a593Smuzhiyun self.unihash_cache.save(self.unitaskhashes) 362*4882a593Smuzhiyun 363*4882a593Smuzhiyun def dump_sigtask(self, fn, task, stampbase, runtime): 364*4882a593Smuzhiyun 365*4882a593Smuzhiyun tid = fn + ":" + task 366*4882a593Smuzhiyun referencestamp = stampbase 367*4882a593Smuzhiyun if isinstance(runtime, str) and runtime.startswith("customfile"): 368*4882a593Smuzhiyun sigfile = stampbase 369*4882a593Smuzhiyun referencestamp = runtime[11:] 370*4882a593Smuzhiyun elif runtime and tid in self.taskhash: 371*4882a593Smuzhiyun sigfile = stampbase + "." + task + ".sigdata" + "." + self.get_unihash(tid) 372*4882a593Smuzhiyun else: 373*4882a593Smuzhiyun sigfile = stampbase + "." + task + ".sigbasedata" + "." + self.basehash[tid] 374*4882a593Smuzhiyun 375*4882a593Smuzhiyun with bb.utils.umask(0o002): 376*4882a593Smuzhiyun bb.utils.mkdirhier(os.path.dirname(sigfile)) 377*4882a593Smuzhiyun 378*4882a593Smuzhiyun data = {} 379*4882a593Smuzhiyun data['task'] = task 380*4882a593Smuzhiyun data['basehash_ignore_vars'] = self.basehash_ignore_vars 381*4882a593Smuzhiyun data['taskhash_ignore_tasks'] = self.taskhash_ignore_tasks 382*4882a593Smuzhiyun data['taskdeps'] = self.taskdeps[fn][task] 383*4882a593Smuzhiyun data['basehash'] = self.basehash[tid] 384*4882a593Smuzhiyun data['gendeps'] = {} 385*4882a593Smuzhiyun data['varvals'] = {} 386*4882a593Smuzhiyun data['varvals'][task] = self.lookupcache[fn][task] 387*4882a593Smuzhiyun for dep in self.taskdeps[fn][task]: 388*4882a593Smuzhiyun if dep in self.basehash_ignore_vars: 389*4882a593Smuzhiyun continue 390*4882a593Smuzhiyun data['gendeps'][dep] = self.gendeps[fn][dep] 391*4882a593Smuzhiyun data['varvals'][dep] = self.lookupcache[fn][dep] 392*4882a593Smuzhiyun 393*4882a593Smuzhiyun if runtime and tid in self.taskhash: 394*4882a593Smuzhiyun data['runtaskdeps'] = self.runtaskdeps[tid] 395*4882a593Smuzhiyun data['file_checksum_values'] = [] 396*4882a593Smuzhiyun for f,cs in self.file_checksum_values[tid]: 397*4882a593Smuzhiyun if "/./" in f: 398*4882a593Smuzhiyun data['file_checksum_values'].append(("./" + f.split("/./")[1], cs)) 399*4882a593Smuzhiyun else: 400*4882a593Smuzhiyun data['file_checksum_values'].append((os.path.basename(f), cs)) 401*4882a593Smuzhiyun data['runtaskhashes'] = {} 402*4882a593Smuzhiyun for dep in data['runtaskdeps']: 403*4882a593Smuzhiyun data['runtaskhashes'][dep] = self.get_unihash(dep) 404*4882a593Smuzhiyun data['taskhash'] = self.taskhash[tid] 405*4882a593Smuzhiyun data['unihash'] = self.get_unihash(tid) 406*4882a593Smuzhiyun 407*4882a593Smuzhiyun taint = self.read_taint(fn, task, referencestamp) 408*4882a593Smuzhiyun if taint: 409*4882a593Smuzhiyun data['taint'] = taint 410*4882a593Smuzhiyun 411*4882a593Smuzhiyun if runtime and tid in self.taints: 412*4882a593Smuzhiyun if 'nostamp:' in self.taints[tid]: 413*4882a593Smuzhiyun data['taint'] = self.taints[tid] 414*4882a593Smuzhiyun 415*4882a593Smuzhiyun computed_basehash = calc_basehash(data) 416*4882a593Smuzhiyun if computed_basehash != self.basehash[tid]: 417*4882a593Smuzhiyun bb.error("Basehash mismatch %s versus %s for %s" % (computed_basehash, self.basehash[tid], tid)) 418*4882a593Smuzhiyun if runtime and tid in self.taskhash: 419*4882a593Smuzhiyun computed_taskhash = calc_taskhash(data) 420*4882a593Smuzhiyun if computed_taskhash != self.taskhash[tid]: 421*4882a593Smuzhiyun bb.error("Taskhash mismatch %s versus %s for %s" % (computed_taskhash, self.taskhash[tid], tid)) 422*4882a593Smuzhiyun sigfile = sigfile.replace(self.taskhash[tid], computed_taskhash) 423*4882a593Smuzhiyun 424*4882a593Smuzhiyun fd, tmpfile = bb.utils.mkstemp(dir=os.path.dirname(sigfile), prefix="sigtask.") 425*4882a593Smuzhiyun try: 426*4882a593Smuzhiyun with bb.compress.zstd.open(fd, "wt", encoding="utf-8", num_threads=1) as f: 427*4882a593Smuzhiyun json.dump(data, f, sort_keys=True, separators=(",", ":"), cls=SetEncoder) 428*4882a593Smuzhiyun f.flush() 429*4882a593Smuzhiyun os.chmod(tmpfile, 0o664) 430*4882a593Smuzhiyun bb.utils.rename(tmpfile, sigfile) 431*4882a593Smuzhiyun except (OSError, IOError) as err: 432*4882a593Smuzhiyun try: 433*4882a593Smuzhiyun os.unlink(tmpfile) 434*4882a593Smuzhiyun except OSError: 435*4882a593Smuzhiyun pass 436*4882a593Smuzhiyun raise err 437*4882a593Smuzhiyun 438*4882a593Smuzhiyun def dump_sigfn(self, fn, dataCaches, options): 439*4882a593Smuzhiyun if fn in self.taskdeps: 440*4882a593Smuzhiyun for task in self.taskdeps[fn]: 441*4882a593Smuzhiyun tid = fn + ":" + task 442*4882a593Smuzhiyun mc = bb.runqueue.mc_from_tid(tid) 443*4882a593Smuzhiyun if tid not in self.taskhash: 444*4882a593Smuzhiyun continue 445*4882a593Smuzhiyun if dataCaches[mc].basetaskhash[tid] != self.basehash[tid]: 446*4882a593Smuzhiyun bb.error("Bitbake's cached basehash does not match the one we just generated (%s)!" % tid) 447*4882a593Smuzhiyun bb.error("The mismatched hashes were %s and %s" % (dataCaches[mc].basetaskhash[tid], self.basehash[tid])) 448*4882a593Smuzhiyun self.dump_sigtask(fn, task, dataCaches[mc].stamp[fn], True) 449*4882a593Smuzhiyun 450*4882a593Smuzhiyunclass SignatureGeneratorBasicHash(SignatureGeneratorBasic): 451*4882a593Smuzhiyun name = "basichash" 452*4882a593Smuzhiyun 453*4882a593Smuzhiyun def get_stampfile_hash(self, tid): 454*4882a593Smuzhiyun if tid in self.taskhash: 455*4882a593Smuzhiyun return self.taskhash[tid] 456*4882a593Smuzhiyun 457*4882a593Smuzhiyun # If task is not in basehash, then error 458*4882a593Smuzhiyun return self.basehash[tid] 459*4882a593Smuzhiyun 460*4882a593Smuzhiyun def stampfile(self, stampbase, fn, taskname, extrainfo, clean=False): 461*4882a593Smuzhiyun if taskname != "do_setscene" and taskname.endswith("_setscene"): 462*4882a593Smuzhiyun tid = fn + ":" + taskname[:-9] 463*4882a593Smuzhiyun else: 464*4882a593Smuzhiyun tid = fn + ":" + taskname 465*4882a593Smuzhiyun if clean: 466*4882a593Smuzhiyun h = "*" 467*4882a593Smuzhiyun else: 468*4882a593Smuzhiyun h = self.get_stampfile_hash(tid) 469*4882a593Smuzhiyun 470*4882a593Smuzhiyun return ("%s.%s.%s.%s" % (stampbase, taskname, h, extrainfo)).rstrip('.') 471*4882a593Smuzhiyun 472*4882a593Smuzhiyun def stampcleanmask(self, stampbase, fn, taskname, extrainfo): 473*4882a593Smuzhiyun return self.stampfile(stampbase, fn, taskname, extrainfo, clean=True) 474*4882a593Smuzhiyun 475*4882a593Smuzhiyun def invalidate_task(self, task, d, fn): 476*4882a593Smuzhiyun bb.note("Tainting hash to force rebuild of task %s, %s" % (fn, task)) 477*4882a593Smuzhiyun bb.build.write_taint(task, d, fn) 478*4882a593Smuzhiyun 479*4882a593Smuzhiyunclass SignatureGeneratorUniHashMixIn(object): 480*4882a593Smuzhiyun def __init__(self, data): 481*4882a593Smuzhiyun self.extramethod = {} 482*4882a593Smuzhiyun super().__init__(data) 483*4882a593Smuzhiyun 484*4882a593Smuzhiyun def get_taskdata(self): 485*4882a593Smuzhiyun return (self.server, self.method, self.extramethod) + super().get_taskdata() 486*4882a593Smuzhiyun 487*4882a593Smuzhiyun def set_taskdata(self, data): 488*4882a593Smuzhiyun self.server, self.method, self.extramethod = data[:3] 489*4882a593Smuzhiyun super().set_taskdata(data[3:]) 490*4882a593Smuzhiyun 491*4882a593Smuzhiyun def client(self): 492*4882a593Smuzhiyun if getattr(self, '_client', None) is None: 493*4882a593Smuzhiyun self._client = hashserv.create_client(self.server) 494*4882a593Smuzhiyun return self._client 495*4882a593Smuzhiyun 496*4882a593Smuzhiyun def reset(self, data): 497*4882a593Smuzhiyun if getattr(self, '_client', None) is not None: 498*4882a593Smuzhiyun self._client.close() 499*4882a593Smuzhiyun self._client = None 500*4882a593Smuzhiyun return super().reset(data) 501*4882a593Smuzhiyun 502*4882a593Smuzhiyun def exit(self): 503*4882a593Smuzhiyun if getattr(self, '_client', None) is not None: 504*4882a593Smuzhiyun self._client.close() 505*4882a593Smuzhiyun self._client = None 506*4882a593Smuzhiyun return super().exit() 507*4882a593Smuzhiyun 508*4882a593Smuzhiyun def get_stampfile_hash(self, tid): 509*4882a593Smuzhiyun if tid in self.taskhash: 510*4882a593Smuzhiyun # If a unique hash is reported, use it as the stampfile hash. This 511*4882a593Smuzhiyun # ensures that if a task won't be re-run if the taskhash changes, 512*4882a593Smuzhiyun # but it would result in the same output hash 513*4882a593Smuzhiyun unihash = self._get_unihash(tid) 514*4882a593Smuzhiyun if unihash is not None: 515*4882a593Smuzhiyun return unihash 516*4882a593Smuzhiyun 517*4882a593Smuzhiyun return super().get_stampfile_hash(tid) 518*4882a593Smuzhiyun 519*4882a593Smuzhiyun def set_unihash(self, tid, unihash): 520*4882a593Smuzhiyun (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid) 521*4882a593Smuzhiyun key = mc + ":" + self.tidtopn[tid] + ":" + taskname 522*4882a593Smuzhiyun self.unitaskhashes[key] = (self.taskhash[tid], unihash) 523*4882a593Smuzhiyun self.unihash[tid] = unihash 524*4882a593Smuzhiyun 525*4882a593Smuzhiyun def _get_unihash(self, tid, checkkey=None): 526*4882a593Smuzhiyun if tid not in self.tidtopn: 527*4882a593Smuzhiyun return None 528*4882a593Smuzhiyun (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid) 529*4882a593Smuzhiyun key = mc + ":" + self.tidtopn[tid] + ":" + taskname 530*4882a593Smuzhiyun if key not in self.unitaskhashes: 531*4882a593Smuzhiyun return None 532*4882a593Smuzhiyun if not checkkey: 533*4882a593Smuzhiyun checkkey = self.taskhash[tid] 534*4882a593Smuzhiyun (key, unihash) = self.unitaskhashes[key] 535*4882a593Smuzhiyun if key != checkkey: 536*4882a593Smuzhiyun return None 537*4882a593Smuzhiyun return unihash 538*4882a593Smuzhiyun 539*4882a593Smuzhiyun def get_unihash(self, tid): 540*4882a593Smuzhiyun taskhash = self.taskhash[tid] 541*4882a593Smuzhiyun 542*4882a593Smuzhiyun # If its not a setscene task we can return 543*4882a593Smuzhiyun if self.setscenetasks and tid not in self.setscenetasks: 544*4882a593Smuzhiyun self.unihash[tid] = None 545*4882a593Smuzhiyun return taskhash 546*4882a593Smuzhiyun 547*4882a593Smuzhiyun # TODO: This cache can grow unbounded. It probably only needs to keep 548*4882a593Smuzhiyun # for each task 549*4882a593Smuzhiyun unihash = self._get_unihash(tid) 550*4882a593Smuzhiyun if unihash is not None: 551*4882a593Smuzhiyun self.unihash[tid] = unihash 552*4882a593Smuzhiyun return unihash 553*4882a593Smuzhiyun 554*4882a593Smuzhiyun # In the absence of being able to discover a unique hash from the 555*4882a593Smuzhiyun # server, make it be equivalent to the taskhash. The unique "hash" only 556*4882a593Smuzhiyun # really needs to be a unique string (not even necessarily a hash), but 557*4882a593Smuzhiyun # making it match the taskhash has a few advantages: 558*4882a593Smuzhiyun # 559*4882a593Smuzhiyun # 1) All of the sstate code that assumes hashes can be the same 560*4882a593Smuzhiyun # 2) It provides maximal compatibility with builders that don't use 561*4882a593Smuzhiyun # an equivalency server 562*4882a593Smuzhiyun # 3) The value is easy for multiple independent builders to derive the 563*4882a593Smuzhiyun # same unique hash from the same input. This means that if the 564*4882a593Smuzhiyun # independent builders find the same taskhash, but it isn't reported 565*4882a593Smuzhiyun # to the server, there is a better chance that they will agree on 566*4882a593Smuzhiyun # the unique hash. 567*4882a593Smuzhiyun unihash = taskhash 568*4882a593Smuzhiyun 569*4882a593Smuzhiyun try: 570*4882a593Smuzhiyun method = self.method 571*4882a593Smuzhiyun if tid in self.extramethod: 572*4882a593Smuzhiyun method = method + self.extramethod[tid] 573*4882a593Smuzhiyun data = self.client().get_unihash(method, self.taskhash[tid]) 574*4882a593Smuzhiyun if data: 575*4882a593Smuzhiyun unihash = data 576*4882a593Smuzhiyun # A unique hash equal to the taskhash is not very interesting, 577*4882a593Smuzhiyun # so it is reported it at debug level 2. If they differ, that 578*4882a593Smuzhiyun # is much more interesting, so it is reported at debug level 1 579*4882a593Smuzhiyun hashequiv_logger.debug((1, 2)[unihash == taskhash], 'Found unihash %s in place of %s for %s from %s' % (unihash, taskhash, tid, self.server)) 580*4882a593Smuzhiyun else: 581*4882a593Smuzhiyun hashequiv_logger.debug2('No reported unihash for %s:%s from %s' % (tid, taskhash, self.server)) 582*4882a593Smuzhiyun except ConnectionError as e: 583*4882a593Smuzhiyun bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) 584*4882a593Smuzhiyun 585*4882a593Smuzhiyun self.set_unihash(tid, unihash) 586*4882a593Smuzhiyun self.unihash[tid] = unihash 587*4882a593Smuzhiyun return unihash 588*4882a593Smuzhiyun 589*4882a593Smuzhiyun def report_unihash(self, path, task, d): 590*4882a593Smuzhiyun import importlib 591*4882a593Smuzhiyun 592*4882a593Smuzhiyun taskhash = d.getVar('BB_TASKHASH') 593*4882a593Smuzhiyun unihash = d.getVar('BB_UNIHASH') 594*4882a593Smuzhiyun report_taskdata = d.getVar('SSTATE_HASHEQUIV_REPORT_TASKDATA') == '1' 595*4882a593Smuzhiyun tempdir = d.getVar('T') 596*4882a593Smuzhiyun fn = d.getVar('BB_FILENAME') 597*4882a593Smuzhiyun tid = fn + ':do_' + task 598*4882a593Smuzhiyun key = tid + ':' + taskhash 599*4882a593Smuzhiyun 600*4882a593Smuzhiyun if self.setscenetasks and tid not in self.setscenetasks: 601*4882a593Smuzhiyun return 602*4882a593Smuzhiyun 603*4882a593Smuzhiyun # This can happen if locked sigs are in action. Detect and just exit 604*4882a593Smuzhiyun if taskhash != self.taskhash[tid]: 605*4882a593Smuzhiyun return 606*4882a593Smuzhiyun 607*4882a593Smuzhiyun # Sanity checks 608*4882a593Smuzhiyun cache_unihash = self._get_unihash(tid, checkkey=taskhash) 609*4882a593Smuzhiyun if cache_unihash is None: 610*4882a593Smuzhiyun bb.fatal('%s not in unihash cache. Please report this error' % key) 611*4882a593Smuzhiyun 612*4882a593Smuzhiyun if cache_unihash != unihash: 613*4882a593Smuzhiyun bb.fatal("Cache unihash %s doesn't match BB_UNIHASH %s" % (cache_unihash, unihash)) 614*4882a593Smuzhiyun 615*4882a593Smuzhiyun sigfile = None 616*4882a593Smuzhiyun sigfile_name = "depsig.do_%s.%d" % (task, os.getpid()) 617*4882a593Smuzhiyun sigfile_link = "depsig.do_%s" % task 618*4882a593Smuzhiyun 619*4882a593Smuzhiyun try: 620*4882a593Smuzhiyun sigfile = open(os.path.join(tempdir, sigfile_name), 'w+b') 621*4882a593Smuzhiyun 622*4882a593Smuzhiyun locs = {'path': path, 'sigfile': sigfile, 'task': task, 'd': d} 623*4882a593Smuzhiyun 624*4882a593Smuzhiyun if "." in self.method: 625*4882a593Smuzhiyun (module, method) = self.method.rsplit('.', 1) 626*4882a593Smuzhiyun locs['method'] = getattr(importlib.import_module(module), method) 627*4882a593Smuzhiyun outhash = bb.utils.better_eval('method(path, sigfile, task, d)', locs) 628*4882a593Smuzhiyun else: 629*4882a593Smuzhiyun outhash = bb.utils.better_eval(self.method + '(path, sigfile, task, d)', locs) 630*4882a593Smuzhiyun 631*4882a593Smuzhiyun try: 632*4882a593Smuzhiyun extra_data = {} 633*4882a593Smuzhiyun 634*4882a593Smuzhiyun owner = d.getVar('SSTATE_HASHEQUIV_OWNER') 635*4882a593Smuzhiyun if owner: 636*4882a593Smuzhiyun extra_data['owner'] = owner 637*4882a593Smuzhiyun 638*4882a593Smuzhiyun if report_taskdata: 639*4882a593Smuzhiyun sigfile.seek(0) 640*4882a593Smuzhiyun 641*4882a593Smuzhiyun extra_data['PN'] = d.getVar('PN') 642*4882a593Smuzhiyun extra_data['PV'] = d.getVar('PV') 643*4882a593Smuzhiyun extra_data['PR'] = d.getVar('PR') 644*4882a593Smuzhiyun extra_data['task'] = task 645*4882a593Smuzhiyun extra_data['outhash_siginfo'] = sigfile.read().decode('utf-8') 646*4882a593Smuzhiyun 647*4882a593Smuzhiyun method = self.method 648*4882a593Smuzhiyun if tid in self.extramethod: 649*4882a593Smuzhiyun method = method + self.extramethod[tid] 650*4882a593Smuzhiyun 651*4882a593Smuzhiyun data = self.client().report_unihash(taskhash, method, outhash, unihash, extra_data) 652*4882a593Smuzhiyun new_unihash = data['unihash'] 653*4882a593Smuzhiyun 654*4882a593Smuzhiyun if new_unihash != unihash: 655*4882a593Smuzhiyun hashequiv_logger.debug('Task %s unihash changed %s -> %s by server %s' % (taskhash, unihash, new_unihash, self.server)) 656*4882a593Smuzhiyun bb.event.fire(bb.runqueue.taskUniHashUpdate(fn + ':do_' + task, new_unihash), d) 657*4882a593Smuzhiyun self.set_unihash(tid, new_unihash) 658*4882a593Smuzhiyun d.setVar('BB_UNIHASH', new_unihash) 659*4882a593Smuzhiyun else: 660*4882a593Smuzhiyun hashequiv_logger.debug('Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server)) 661*4882a593Smuzhiyun except ConnectionError as e: 662*4882a593Smuzhiyun bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) 663*4882a593Smuzhiyun finally: 664*4882a593Smuzhiyun if sigfile: 665*4882a593Smuzhiyun sigfile.close() 666*4882a593Smuzhiyun 667*4882a593Smuzhiyun sigfile_link_path = os.path.join(tempdir, sigfile_link) 668*4882a593Smuzhiyun bb.utils.remove(sigfile_link_path) 669*4882a593Smuzhiyun 670*4882a593Smuzhiyun try: 671*4882a593Smuzhiyun os.symlink(sigfile_name, sigfile_link_path) 672*4882a593Smuzhiyun except OSError: 673*4882a593Smuzhiyun pass 674*4882a593Smuzhiyun 675*4882a593Smuzhiyun def report_unihash_equiv(self, tid, taskhash, wanted_unihash, current_unihash, datacaches): 676*4882a593Smuzhiyun try: 677*4882a593Smuzhiyun extra_data = {} 678*4882a593Smuzhiyun method = self.method 679*4882a593Smuzhiyun if tid in self.extramethod: 680*4882a593Smuzhiyun method = method + self.extramethod[tid] 681*4882a593Smuzhiyun 682*4882a593Smuzhiyun data = self.client().report_unihash_equiv(taskhash, method, wanted_unihash, extra_data) 683*4882a593Smuzhiyun hashequiv_logger.verbose('Reported task %s as unihash %s to %s (%s)' % (tid, wanted_unihash, self.server, str(data))) 684*4882a593Smuzhiyun 685*4882a593Smuzhiyun if data is None: 686*4882a593Smuzhiyun bb.warn("Server unable to handle unihash report") 687*4882a593Smuzhiyun return False 688*4882a593Smuzhiyun 689*4882a593Smuzhiyun finalunihash = data['unihash'] 690*4882a593Smuzhiyun 691*4882a593Smuzhiyun if finalunihash == current_unihash: 692*4882a593Smuzhiyun hashequiv_logger.verbose('Task %s unihash %s unchanged by server' % (tid, finalunihash)) 693*4882a593Smuzhiyun elif finalunihash == wanted_unihash: 694*4882a593Smuzhiyun hashequiv_logger.verbose('Task %s unihash changed %s -> %s as wanted' % (tid, current_unihash, finalunihash)) 695*4882a593Smuzhiyun self.set_unihash(tid, finalunihash) 696*4882a593Smuzhiyun return True 697*4882a593Smuzhiyun else: 698*4882a593Smuzhiyun # TODO: What to do here? 699*4882a593Smuzhiyun hashequiv_logger.verbose('Task %s unihash reported as unwanted hash %s' % (tid, finalunihash)) 700*4882a593Smuzhiyun 701*4882a593Smuzhiyun except ConnectionError as e: 702*4882a593Smuzhiyun bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) 703*4882a593Smuzhiyun 704*4882a593Smuzhiyun return False 705*4882a593Smuzhiyun 706*4882a593Smuzhiyun# 707*4882a593Smuzhiyun# Dummy class used for bitbake-selftest 708*4882a593Smuzhiyun# 709*4882a593Smuzhiyunclass SignatureGeneratorTestEquivHash(SignatureGeneratorUniHashMixIn, SignatureGeneratorBasicHash): 710*4882a593Smuzhiyun name = "TestEquivHash" 711*4882a593Smuzhiyun def init_rundepcheck(self, data): 712*4882a593Smuzhiyun super().init_rundepcheck(data) 713*4882a593Smuzhiyun self.server = data.getVar('BB_HASHSERVE') 714*4882a593Smuzhiyun self.method = "sstate_output_hash" 715*4882a593Smuzhiyun 716*4882a593Smuzhiyun# 717*4882a593Smuzhiyun# Dummy class used for bitbake-selftest 718*4882a593Smuzhiyun# 719*4882a593Smuzhiyunclass SignatureGeneratorTestMulticonfigDepends(SignatureGeneratorBasicHash): 720*4882a593Smuzhiyun name = "TestMulticonfigDepends" 721*4882a593Smuzhiyun supports_multiconfig_datacaches = True 722*4882a593Smuzhiyun 723*4882a593Smuzhiyundef dump_this_task(outfile, d): 724*4882a593Smuzhiyun import bb.parse 725*4882a593Smuzhiyun fn = d.getVar("BB_FILENAME") 726*4882a593Smuzhiyun task = "do_" + d.getVar("BB_CURRENTTASK") 727*4882a593Smuzhiyun referencestamp = bb.build.stamp_internal(task, d, None, True) 728*4882a593Smuzhiyun bb.parse.siggen.dump_sigtask(fn, task, outfile, "customfile:" + referencestamp) 729*4882a593Smuzhiyun 730*4882a593Smuzhiyundef init_colors(enable_color): 731*4882a593Smuzhiyun """Initialise colour dict for passing to compare_sigfiles()""" 732*4882a593Smuzhiyun # First set up the colours 733*4882a593Smuzhiyun colors = {'color_title': '\033[1m', 734*4882a593Smuzhiyun 'color_default': '\033[0m', 735*4882a593Smuzhiyun 'color_add': '\033[0;32m', 736*4882a593Smuzhiyun 'color_remove': '\033[0;31m', 737*4882a593Smuzhiyun } 738*4882a593Smuzhiyun # Leave all keys present but clear the values 739*4882a593Smuzhiyun if not enable_color: 740*4882a593Smuzhiyun for k in colors.keys(): 741*4882a593Smuzhiyun colors[k] = '' 742*4882a593Smuzhiyun return colors 743*4882a593Smuzhiyun 744*4882a593Smuzhiyundef worddiff_str(oldstr, newstr, colors=None): 745*4882a593Smuzhiyun if not colors: 746*4882a593Smuzhiyun colors = init_colors(False) 747*4882a593Smuzhiyun diff = simplediff.diff(oldstr.split(' '), newstr.split(' ')) 748*4882a593Smuzhiyun ret = [] 749*4882a593Smuzhiyun for change, value in diff: 750*4882a593Smuzhiyun value = ' '.join(value) 751*4882a593Smuzhiyun if change == '=': 752*4882a593Smuzhiyun ret.append(value) 753*4882a593Smuzhiyun elif change == '+': 754*4882a593Smuzhiyun item = '{color_add}{{+{value}+}}{color_default}'.format(value=value, **colors) 755*4882a593Smuzhiyun ret.append(item) 756*4882a593Smuzhiyun elif change == '-': 757*4882a593Smuzhiyun item = '{color_remove}[-{value}-]{color_default}'.format(value=value, **colors) 758*4882a593Smuzhiyun ret.append(item) 759*4882a593Smuzhiyun whitespace_note = '' 760*4882a593Smuzhiyun if oldstr != newstr and ' '.join(oldstr.split()) == ' '.join(newstr.split()): 761*4882a593Smuzhiyun whitespace_note = ' (whitespace changed)' 762*4882a593Smuzhiyun return '"%s"%s' % (' '.join(ret), whitespace_note) 763*4882a593Smuzhiyun 764*4882a593Smuzhiyundef list_inline_diff(oldlist, newlist, colors=None): 765*4882a593Smuzhiyun if not colors: 766*4882a593Smuzhiyun colors = init_colors(False) 767*4882a593Smuzhiyun diff = simplediff.diff(oldlist, newlist) 768*4882a593Smuzhiyun ret = [] 769*4882a593Smuzhiyun for change, value in diff: 770*4882a593Smuzhiyun value = ' '.join(value) 771*4882a593Smuzhiyun if change == '=': 772*4882a593Smuzhiyun ret.append("'%s'" % value) 773*4882a593Smuzhiyun elif change == '+': 774*4882a593Smuzhiyun item = '{color_add}+{value}{color_default}'.format(value=value, **colors) 775*4882a593Smuzhiyun ret.append(item) 776*4882a593Smuzhiyun elif change == '-': 777*4882a593Smuzhiyun item = '{color_remove}-{value}{color_default}'.format(value=value, **colors) 778*4882a593Smuzhiyun ret.append(item) 779*4882a593Smuzhiyun return '[%s]' % (', '.join(ret)) 780*4882a593Smuzhiyun 781*4882a593Smuzhiyundef clean_basepath(basepath): 782*4882a593Smuzhiyun basepath, dir, recipe_task = basepath.rsplit("/", 2) 783*4882a593Smuzhiyun cleaned = dir + '/' + recipe_task 784*4882a593Smuzhiyun 785*4882a593Smuzhiyun if basepath[0] == '/': 786*4882a593Smuzhiyun return cleaned 787*4882a593Smuzhiyun 788*4882a593Smuzhiyun if basepath.startswith("mc:") and basepath.count(':') >= 2: 789*4882a593Smuzhiyun mc, mc_name, basepath = basepath.split(":", 2) 790*4882a593Smuzhiyun mc_suffix = ':mc:' + mc_name 791*4882a593Smuzhiyun else: 792*4882a593Smuzhiyun mc_suffix = '' 793*4882a593Smuzhiyun 794*4882a593Smuzhiyun # mc stuff now removed from basepath. Whatever was next, if present will be the first 795*4882a593Smuzhiyun # suffix. ':/', recipe path start, marks the end of this. Something like 796*4882a593Smuzhiyun # 'virtual:a[:b[:c]]:/path...' (b and c being optional) 797*4882a593Smuzhiyun if basepath[0] != '/': 798*4882a593Smuzhiyun cleaned += ':' + basepath.split(':/', 1)[0] 799*4882a593Smuzhiyun 800*4882a593Smuzhiyun return cleaned + mc_suffix 801*4882a593Smuzhiyun 802*4882a593Smuzhiyundef clean_basepaths(a): 803*4882a593Smuzhiyun b = {} 804*4882a593Smuzhiyun for x in a: 805*4882a593Smuzhiyun b[clean_basepath(x)] = a[x] 806*4882a593Smuzhiyun return b 807*4882a593Smuzhiyun 808*4882a593Smuzhiyundef clean_basepaths_list(a): 809*4882a593Smuzhiyun b = [] 810*4882a593Smuzhiyun for x in a: 811*4882a593Smuzhiyun b.append(clean_basepath(x)) 812*4882a593Smuzhiyun return b 813*4882a593Smuzhiyun 814*4882a593Smuzhiyun# Handled renamed fields 815*4882a593Smuzhiyundef handle_renames(data): 816*4882a593Smuzhiyun if 'basewhitelist' in data: 817*4882a593Smuzhiyun data['basehash_ignore_vars'] = data['basewhitelist'] 818*4882a593Smuzhiyun del data['basewhitelist'] 819*4882a593Smuzhiyun if 'taskwhitelist' in data: 820*4882a593Smuzhiyun data['taskhash_ignore_tasks'] = data['taskwhitelist'] 821*4882a593Smuzhiyun del data['taskwhitelist'] 822*4882a593Smuzhiyun 823*4882a593Smuzhiyun 824*4882a593Smuzhiyundef compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False): 825*4882a593Smuzhiyun output = [] 826*4882a593Smuzhiyun 827*4882a593Smuzhiyun colors = init_colors(color) 828*4882a593Smuzhiyun def color_format(formatstr, **values): 829*4882a593Smuzhiyun """ 830*4882a593Smuzhiyun Return colour formatted string. 831*4882a593Smuzhiyun NOTE: call with the format string, not an already formatted string 832*4882a593Smuzhiyun containing values (otherwise you could have trouble with { and } 833*4882a593Smuzhiyun characters) 834*4882a593Smuzhiyun """ 835*4882a593Smuzhiyun if not formatstr.endswith('{color_default}'): 836*4882a593Smuzhiyun formatstr += '{color_default}' 837*4882a593Smuzhiyun # In newer python 3 versions you can pass both of these directly, 838*4882a593Smuzhiyun # but we only require 3.4 at the moment 839*4882a593Smuzhiyun formatparams = {} 840*4882a593Smuzhiyun formatparams.update(colors) 841*4882a593Smuzhiyun formatparams.update(values) 842*4882a593Smuzhiyun return formatstr.format(**formatparams) 843*4882a593Smuzhiyun 844*4882a593Smuzhiyun with bb.compress.zstd.open(a, "rt", encoding="utf-8", num_threads=1) as f: 845*4882a593Smuzhiyun a_data = json.load(f, object_hook=SetDecoder) 846*4882a593Smuzhiyun with bb.compress.zstd.open(b, "rt", encoding="utf-8", num_threads=1) as f: 847*4882a593Smuzhiyun b_data = json.load(f, object_hook=SetDecoder) 848*4882a593Smuzhiyun 849*4882a593Smuzhiyun for data in [a_data, b_data]: 850*4882a593Smuzhiyun handle_renames(data) 851*4882a593Smuzhiyun 852*4882a593Smuzhiyun def dict_diff(a, b, ignored_vars=set()): 853*4882a593Smuzhiyun sa = set(a.keys()) 854*4882a593Smuzhiyun sb = set(b.keys()) 855*4882a593Smuzhiyun common = sa & sb 856*4882a593Smuzhiyun changed = set() 857*4882a593Smuzhiyun for i in common: 858*4882a593Smuzhiyun if a[i] != b[i] and i not in ignored_vars: 859*4882a593Smuzhiyun changed.add(i) 860*4882a593Smuzhiyun added = sb - sa 861*4882a593Smuzhiyun removed = sa - sb 862*4882a593Smuzhiyun return changed, added, removed 863*4882a593Smuzhiyun 864*4882a593Smuzhiyun def file_checksums_diff(a, b): 865*4882a593Smuzhiyun from collections import Counter 866*4882a593Smuzhiyun 867*4882a593Smuzhiyun # Convert lists back to tuples 868*4882a593Smuzhiyun a = [(f[0], f[1]) for f in a] 869*4882a593Smuzhiyun b = [(f[0], f[1]) for f in b] 870*4882a593Smuzhiyun 871*4882a593Smuzhiyun # Compare lists, ensuring we can handle duplicate filenames if they exist 872*4882a593Smuzhiyun removedcount = Counter(a) 873*4882a593Smuzhiyun removedcount.subtract(b) 874*4882a593Smuzhiyun addedcount = Counter(b) 875*4882a593Smuzhiyun addedcount.subtract(a) 876*4882a593Smuzhiyun added = [] 877*4882a593Smuzhiyun for x in b: 878*4882a593Smuzhiyun if addedcount[x] > 0: 879*4882a593Smuzhiyun addedcount[x] -= 1 880*4882a593Smuzhiyun added.append(x) 881*4882a593Smuzhiyun removed = [] 882*4882a593Smuzhiyun changed = [] 883*4882a593Smuzhiyun for x in a: 884*4882a593Smuzhiyun if removedcount[x] > 0: 885*4882a593Smuzhiyun removedcount[x] -= 1 886*4882a593Smuzhiyun for y in added: 887*4882a593Smuzhiyun if y[0] == x[0]: 888*4882a593Smuzhiyun changed.append((x[0], x[1], y[1])) 889*4882a593Smuzhiyun added.remove(y) 890*4882a593Smuzhiyun break 891*4882a593Smuzhiyun else: 892*4882a593Smuzhiyun removed.append(x) 893*4882a593Smuzhiyun added = [x[0] for x in added] 894*4882a593Smuzhiyun removed = [x[0] for x in removed] 895*4882a593Smuzhiyun return changed, added, removed 896*4882a593Smuzhiyun 897*4882a593Smuzhiyun if 'basehash_ignore_vars' in a_data and a_data['basehash_ignore_vars'] != b_data['basehash_ignore_vars']: 898*4882a593Smuzhiyun output.append(color_format("{color_title}basehash_ignore_vars changed{color_default} from '%s' to '%s'") % (a_data['basehash_ignore_vars'], b_data['basehash_ignore_vars'])) 899*4882a593Smuzhiyun if a_data['basehash_ignore_vars'] and b_data['basehash_ignore_vars']: 900*4882a593Smuzhiyun output.append("changed items: %s" % a_data['basehash_ignore_vars'].symmetric_difference(b_data['basehash_ignore_vars'])) 901*4882a593Smuzhiyun 902*4882a593Smuzhiyun if 'taskhash_ignore_tasks' in a_data and a_data['taskhash_ignore_tasks'] != b_data['taskhash_ignore_tasks']: 903*4882a593Smuzhiyun output.append(color_format("{color_title}taskhash_ignore_tasks changed{color_default} from '%s' to '%s'") % (a_data['taskhash_ignore_tasks'], b_data['taskhash_ignore_tasks'])) 904*4882a593Smuzhiyun if a_data['taskhash_ignore_tasks'] and b_data['taskhash_ignore_tasks']: 905*4882a593Smuzhiyun output.append("changed items: %s" % a_data['taskhash_ignore_tasks'].symmetric_difference(b_data['taskhash_ignore_tasks'])) 906*4882a593Smuzhiyun 907*4882a593Smuzhiyun if a_data['taskdeps'] != b_data['taskdeps']: 908*4882a593Smuzhiyun output.append(color_format("{color_title}Task dependencies changed{color_default} from:\n%s\nto:\n%s") % (sorted(a_data['taskdeps']), sorted(b_data['taskdeps']))) 909*4882a593Smuzhiyun 910*4882a593Smuzhiyun if a_data['basehash'] != b_data['basehash'] and not collapsed: 911*4882a593Smuzhiyun output.append(color_format("{color_title}basehash changed{color_default} from %s to %s") % (a_data['basehash'], b_data['basehash'])) 912*4882a593Smuzhiyun 913*4882a593Smuzhiyun changed, added, removed = dict_diff(a_data['gendeps'], b_data['gendeps'], a_data['basehash_ignore_vars'] & b_data['basehash_ignore_vars']) 914*4882a593Smuzhiyun if changed: 915*4882a593Smuzhiyun for dep in sorted(changed): 916*4882a593Smuzhiyun output.append(color_format("{color_title}List of dependencies for variable %s changed from '{color_default}%s{color_title}' to '{color_default}%s{color_title}'") % (dep, a_data['gendeps'][dep], b_data['gendeps'][dep])) 917*4882a593Smuzhiyun if a_data['gendeps'][dep] and b_data['gendeps'][dep]: 918*4882a593Smuzhiyun output.append("changed items: %s" % a_data['gendeps'][dep].symmetric_difference(b_data['gendeps'][dep])) 919*4882a593Smuzhiyun if added: 920*4882a593Smuzhiyun for dep in sorted(added): 921*4882a593Smuzhiyun output.append(color_format("{color_title}Dependency on variable %s was added") % (dep)) 922*4882a593Smuzhiyun if removed: 923*4882a593Smuzhiyun for dep in sorted(removed): 924*4882a593Smuzhiyun output.append(color_format("{color_title}Dependency on Variable %s was removed") % (dep)) 925*4882a593Smuzhiyun 926*4882a593Smuzhiyun 927*4882a593Smuzhiyun changed, added, removed = dict_diff(a_data['varvals'], b_data['varvals']) 928*4882a593Smuzhiyun if changed: 929*4882a593Smuzhiyun for dep in sorted(changed): 930*4882a593Smuzhiyun oldval = a_data['varvals'][dep] 931*4882a593Smuzhiyun newval = b_data['varvals'][dep] 932*4882a593Smuzhiyun if newval and oldval and ('\n' in oldval or '\n' in newval): 933*4882a593Smuzhiyun diff = difflib.unified_diff(oldval.splitlines(), newval.splitlines(), lineterm='') 934*4882a593Smuzhiyun # Cut off the first two lines, since we aren't interested in 935*4882a593Smuzhiyun # the old/new filename (they are blank anyway in this case) 936*4882a593Smuzhiyun difflines = list(diff)[2:] 937*4882a593Smuzhiyun if color: 938*4882a593Smuzhiyun # Add colour to diff output 939*4882a593Smuzhiyun for i, line in enumerate(difflines): 940*4882a593Smuzhiyun if line.startswith('+'): 941*4882a593Smuzhiyun line = color_format('{color_add}{line}', line=line) 942*4882a593Smuzhiyun difflines[i] = line 943*4882a593Smuzhiyun elif line.startswith('-'): 944*4882a593Smuzhiyun line = color_format('{color_remove}{line}', line=line) 945*4882a593Smuzhiyun difflines[i] = line 946*4882a593Smuzhiyun output.append(color_format("{color_title}Variable {var} value changed:{color_default}\n{diff}", var=dep, diff='\n'.join(difflines))) 947*4882a593Smuzhiyun elif newval and oldval and (' ' in oldval or ' ' in newval): 948*4882a593Smuzhiyun output.append(color_format("{color_title}Variable {var} value changed:{color_default}\n{diff}", var=dep, diff=worddiff_str(oldval, newval, colors))) 949*4882a593Smuzhiyun else: 950*4882a593Smuzhiyun output.append(color_format("{color_title}Variable {var} value changed from '{color_default}{oldval}{color_title}' to '{color_default}{newval}{color_title}'{color_default}", var=dep, oldval=oldval, newval=newval)) 951*4882a593Smuzhiyun 952*4882a593Smuzhiyun if not 'file_checksum_values' in a_data: 953*4882a593Smuzhiyun a_data['file_checksum_values'] = [] 954*4882a593Smuzhiyun if not 'file_checksum_values' in b_data: 955*4882a593Smuzhiyun b_data['file_checksum_values'] = [] 956*4882a593Smuzhiyun 957*4882a593Smuzhiyun changed, added, removed = file_checksums_diff(a_data['file_checksum_values'], b_data['file_checksum_values']) 958*4882a593Smuzhiyun if changed: 959*4882a593Smuzhiyun for f, old, new in changed: 960*4882a593Smuzhiyun output.append(color_format("{color_title}Checksum for file %s changed{color_default} from %s to %s") % (f, old, new)) 961*4882a593Smuzhiyun if added: 962*4882a593Smuzhiyun for f in added: 963*4882a593Smuzhiyun output.append(color_format("{color_title}Dependency on checksum of file %s was added") % (f)) 964*4882a593Smuzhiyun if removed: 965*4882a593Smuzhiyun for f in removed: 966*4882a593Smuzhiyun output.append(color_format("{color_title}Dependency on checksum of file %s was removed") % (f)) 967*4882a593Smuzhiyun 968*4882a593Smuzhiyun if not 'runtaskdeps' in a_data: 969*4882a593Smuzhiyun a_data['runtaskdeps'] = {} 970*4882a593Smuzhiyun if not 'runtaskdeps' in b_data: 971*4882a593Smuzhiyun b_data['runtaskdeps'] = {} 972*4882a593Smuzhiyun 973*4882a593Smuzhiyun if not collapsed: 974*4882a593Smuzhiyun if len(a_data['runtaskdeps']) != len(b_data['runtaskdeps']): 975*4882a593Smuzhiyun changed = ["Number of task dependencies changed"] 976*4882a593Smuzhiyun else: 977*4882a593Smuzhiyun changed = [] 978*4882a593Smuzhiyun for idx, task in enumerate(a_data['runtaskdeps']): 979*4882a593Smuzhiyun a = a_data['runtaskdeps'][idx] 980*4882a593Smuzhiyun b = b_data['runtaskdeps'][idx] 981*4882a593Smuzhiyun if a_data['runtaskhashes'][a] != b_data['runtaskhashes'][b] and not collapsed: 982*4882a593Smuzhiyun changed.append("%s with hash %s\n changed to\n%s with hash %s" % (clean_basepath(a), a_data['runtaskhashes'][a], clean_basepath(b), b_data['runtaskhashes'][b])) 983*4882a593Smuzhiyun 984*4882a593Smuzhiyun if changed: 985*4882a593Smuzhiyun clean_a = clean_basepaths_list(a_data['runtaskdeps']) 986*4882a593Smuzhiyun clean_b = clean_basepaths_list(b_data['runtaskdeps']) 987*4882a593Smuzhiyun if clean_a != clean_b: 988*4882a593Smuzhiyun output.append(color_format("{color_title}runtaskdeps changed:{color_default}\n%s") % list_inline_diff(clean_a, clean_b, colors)) 989*4882a593Smuzhiyun else: 990*4882a593Smuzhiyun output.append(color_format("{color_title}runtaskdeps changed:")) 991*4882a593Smuzhiyun output.append("\n".join(changed)) 992*4882a593Smuzhiyun 993*4882a593Smuzhiyun 994*4882a593Smuzhiyun if 'runtaskhashes' in a_data and 'runtaskhashes' in b_data: 995*4882a593Smuzhiyun a = clean_basepaths(a_data['runtaskhashes']) 996*4882a593Smuzhiyun b = clean_basepaths(b_data['runtaskhashes']) 997*4882a593Smuzhiyun changed, added, removed = dict_diff(a, b) 998*4882a593Smuzhiyun if added: 999*4882a593Smuzhiyun for dep in sorted(added): 1000*4882a593Smuzhiyun bdep_found = False 1001*4882a593Smuzhiyun if removed: 1002*4882a593Smuzhiyun for bdep in removed: 1003*4882a593Smuzhiyun if b[dep] == a[bdep]: 1004*4882a593Smuzhiyun #output.append("Dependency on task %s was replaced by %s with same hash" % (dep, bdep)) 1005*4882a593Smuzhiyun bdep_found = True 1006*4882a593Smuzhiyun if not bdep_found: 1007*4882a593Smuzhiyun output.append(color_format("{color_title}Dependency on task %s was added{color_default} with hash %s") % (dep, b[dep])) 1008*4882a593Smuzhiyun if removed: 1009*4882a593Smuzhiyun for dep in sorted(removed): 1010*4882a593Smuzhiyun adep_found = False 1011*4882a593Smuzhiyun if added: 1012*4882a593Smuzhiyun for adep in added: 1013*4882a593Smuzhiyun if b[adep] == a[dep]: 1014*4882a593Smuzhiyun #output.append("Dependency on task %s was replaced by %s with same hash" % (adep, dep)) 1015*4882a593Smuzhiyun adep_found = True 1016*4882a593Smuzhiyun if not adep_found: 1017*4882a593Smuzhiyun output.append(color_format("{color_title}Dependency on task %s was removed{color_default} with hash %s") % (dep, a[dep])) 1018*4882a593Smuzhiyun if changed: 1019*4882a593Smuzhiyun for dep in sorted(changed): 1020*4882a593Smuzhiyun if not collapsed: 1021*4882a593Smuzhiyun output.append(color_format("{color_title}Hash for task dependency %s changed{color_default} from %s to %s") % (dep, a[dep], b[dep])) 1022*4882a593Smuzhiyun if callable(recursecb): 1023*4882a593Smuzhiyun recout = recursecb(dep, a[dep], b[dep]) 1024*4882a593Smuzhiyun if recout: 1025*4882a593Smuzhiyun if collapsed: 1026*4882a593Smuzhiyun output.extend(recout) 1027*4882a593Smuzhiyun else: 1028*4882a593Smuzhiyun # If a dependent hash changed, might as well print the line above and then defer to the changes in 1029*4882a593Smuzhiyun # that hash since in all likelyhood, they're the same changes this task also saw. 1030*4882a593Smuzhiyun output = [output[-1]] + recout 1031*4882a593Smuzhiyun break 1032*4882a593Smuzhiyun 1033*4882a593Smuzhiyun a_taint = a_data.get('taint', None) 1034*4882a593Smuzhiyun b_taint = b_data.get('taint', None) 1035*4882a593Smuzhiyun if a_taint != b_taint: 1036*4882a593Smuzhiyun if a_taint and a_taint.startswith('nostamp:'): 1037*4882a593Smuzhiyun a_taint = a_taint.replace('nostamp:', 'nostamp(uuid4):') 1038*4882a593Smuzhiyun if b_taint and b_taint.startswith('nostamp:'): 1039*4882a593Smuzhiyun b_taint = b_taint.replace('nostamp:', 'nostamp(uuid4):') 1040*4882a593Smuzhiyun output.append(color_format("{color_title}Taint (by forced/invalidated task) changed{color_default} from %s to %s") % (a_taint, b_taint)) 1041*4882a593Smuzhiyun 1042*4882a593Smuzhiyun return output 1043*4882a593Smuzhiyun 1044*4882a593Smuzhiyun 1045*4882a593Smuzhiyundef calc_basehash(sigdata): 1046*4882a593Smuzhiyun task = sigdata['task'] 1047*4882a593Smuzhiyun basedata = sigdata['varvals'][task] 1048*4882a593Smuzhiyun 1049*4882a593Smuzhiyun if basedata is None: 1050*4882a593Smuzhiyun basedata = '' 1051*4882a593Smuzhiyun 1052*4882a593Smuzhiyun alldeps = sigdata['taskdeps'] 1053*4882a593Smuzhiyun for dep in alldeps: 1054*4882a593Smuzhiyun basedata = basedata + dep 1055*4882a593Smuzhiyun val = sigdata['varvals'][dep] 1056*4882a593Smuzhiyun if val is not None: 1057*4882a593Smuzhiyun basedata = basedata + str(val) 1058*4882a593Smuzhiyun 1059*4882a593Smuzhiyun return hashlib.sha256(basedata.encode("utf-8")).hexdigest() 1060*4882a593Smuzhiyun 1061*4882a593Smuzhiyundef calc_taskhash(sigdata): 1062*4882a593Smuzhiyun data = sigdata['basehash'] 1063*4882a593Smuzhiyun 1064*4882a593Smuzhiyun for dep in sigdata['runtaskdeps']: 1065*4882a593Smuzhiyun data = data + sigdata['runtaskhashes'][dep] 1066*4882a593Smuzhiyun 1067*4882a593Smuzhiyun for c in sigdata['file_checksum_values']: 1068*4882a593Smuzhiyun if c[1]: 1069*4882a593Smuzhiyun if "./" in c[0]: 1070*4882a593Smuzhiyun data = data + c[0] 1071*4882a593Smuzhiyun data = data + c[1] 1072*4882a593Smuzhiyun 1073*4882a593Smuzhiyun if 'taint' in sigdata: 1074*4882a593Smuzhiyun if 'nostamp:' in sigdata['taint']: 1075*4882a593Smuzhiyun data = data + sigdata['taint'][8:] 1076*4882a593Smuzhiyun else: 1077*4882a593Smuzhiyun data = data + sigdata['taint'] 1078*4882a593Smuzhiyun 1079*4882a593Smuzhiyun return hashlib.sha256(data.encode("utf-8")).hexdigest() 1080*4882a593Smuzhiyun 1081*4882a593Smuzhiyun 1082*4882a593Smuzhiyundef dump_sigfile(a): 1083*4882a593Smuzhiyun output = [] 1084*4882a593Smuzhiyun 1085*4882a593Smuzhiyun with bb.compress.zstd.open(a, "rt", encoding="utf-8", num_threads=1) as f: 1086*4882a593Smuzhiyun a_data = json.load(f, object_hook=SetDecoder) 1087*4882a593Smuzhiyun 1088*4882a593Smuzhiyun handle_renames(a_data) 1089*4882a593Smuzhiyun 1090*4882a593Smuzhiyun output.append("basehash_ignore_vars: %s" % (sorted(a_data['basehash_ignore_vars']))) 1091*4882a593Smuzhiyun 1092*4882a593Smuzhiyun output.append("taskhash_ignore_tasks: %s" % (sorted(a_data['taskhash_ignore_tasks'] or []))) 1093*4882a593Smuzhiyun 1094*4882a593Smuzhiyun output.append("Task dependencies: %s" % (sorted(a_data['taskdeps']))) 1095*4882a593Smuzhiyun 1096*4882a593Smuzhiyun output.append("basehash: %s" % (a_data['basehash'])) 1097*4882a593Smuzhiyun 1098*4882a593Smuzhiyun for dep in sorted(a_data['gendeps']): 1099*4882a593Smuzhiyun output.append("List of dependencies for variable %s is %s" % (dep, sorted(a_data['gendeps'][dep]))) 1100*4882a593Smuzhiyun 1101*4882a593Smuzhiyun for dep in sorted(a_data['varvals']): 1102*4882a593Smuzhiyun output.append("Variable %s value is %s" % (dep, a_data['varvals'][dep])) 1103*4882a593Smuzhiyun 1104*4882a593Smuzhiyun if 'runtaskdeps' in a_data: 1105*4882a593Smuzhiyun output.append("Tasks this task depends on: %s" % (sorted(a_data['runtaskdeps']))) 1106*4882a593Smuzhiyun 1107*4882a593Smuzhiyun if 'file_checksum_values' in a_data: 1108*4882a593Smuzhiyun output.append("This task depends on the checksums of files: %s" % (sorted(a_data['file_checksum_values']))) 1109*4882a593Smuzhiyun 1110*4882a593Smuzhiyun if 'runtaskhashes' in a_data: 1111*4882a593Smuzhiyun for dep in sorted(a_data['runtaskhashes']): 1112*4882a593Smuzhiyun output.append("Hash for dependent task %s is %s" % (dep, a_data['runtaskhashes'][dep])) 1113*4882a593Smuzhiyun 1114*4882a593Smuzhiyun if 'taint' in a_data: 1115*4882a593Smuzhiyun if a_data['taint'].startswith('nostamp:'): 1116*4882a593Smuzhiyun msg = a_data['taint'].replace('nostamp:', 'nostamp(uuid4):') 1117*4882a593Smuzhiyun else: 1118*4882a593Smuzhiyun msg = a_data['taint'] 1119*4882a593Smuzhiyun output.append("Tainted (by forced/invalidated task): %s" % msg) 1120*4882a593Smuzhiyun 1121*4882a593Smuzhiyun if 'task' in a_data: 1122*4882a593Smuzhiyun computed_basehash = calc_basehash(a_data) 1123*4882a593Smuzhiyun output.append("Computed base hash is %s and from file %s" % (computed_basehash, a_data['basehash'])) 1124*4882a593Smuzhiyun else: 1125*4882a593Smuzhiyun output.append("Unable to compute base hash") 1126*4882a593Smuzhiyun 1127*4882a593Smuzhiyun computed_taskhash = calc_taskhash(a_data) 1128*4882a593Smuzhiyun output.append("Computed task hash is %s" % computed_taskhash) 1129*4882a593Smuzhiyun 1130*4882a593Smuzhiyun return output 1131