1# 2# Sanity check the users setup for common misconfigurations 3# 4 5SANITY_REQUIRED_UTILITIES ?= "patch diffstat git bzip2 tar \ 6 gzip gawk chrpath wget cpio perl file which" 7 8def bblayers_conf_file(d): 9 return os.path.join(d.getVar('TOPDIR'), 'conf/bblayers.conf') 10 11def sanity_conf_read(fn): 12 with open(fn, 'r') as f: 13 lines = f.readlines() 14 return lines 15 16def sanity_conf_find_line(pattern, lines): 17 import re 18 return next(((index, line) 19 for index, line in enumerate(lines) 20 if re.search(pattern, line)), (None, None)) 21 22def sanity_conf_update(fn, lines, version_var_name, new_version): 23 index, line = sanity_conf_find_line(r"^%s" % version_var_name, lines) 24 lines[index] = '%s = "%d"\n' % (version_var_name, new_version) 25 with open(fn, "w") as f: 26 f.write(''.join(lines)) 27 28# Functions added to this variable MUST throw a NotImplementedError exception unless 29# they successfully changed the config version in the config file. Exceptions 30# are used since exec_func doesn't handle return values. 31BBLAYERS_CONF_UPDATE_FUNCS += " \ 32 conf/bblayers.conf:LCONF_VERSION:LAYER_CONF_VERSION:oecore_update_bblayers \ 33 conf/local.conf:CONF_VERSION:LOCALCONF_VERSION:oecore_update_localconf \ 34 conf/site.conf:SCONF_VERSION:SITE_CONF_VERSION:oecore_update_siteconf \ 35" 36 37SANITY_DIFF_TOOL ?= "meld" 38 39SANITY_LOCALCONF_SAMPLE ?= "${COREBASE}/meta*/conf/local.conf.sample" 40python oecore_update_localconf() { 41 # Check we are using a valid local.conf 42 current_conf = d.getVar('CONF_VERSION') 43 conf_version = d.getVar('LOCALCONF_VERSION') 44 45 failmsg = """Your version of local.conf was generated from an older/newer version of 46local.conf.sample and there have been updates made to this file. Please compare the two 47files and merge any changes before continuing. 48 49Matching the version numbers will remove this message. 50 51\"${SANITY_DIFF_TOOL} conf/local.conf ${SANITY_LOCALCONF_SAMPLE}\" 52 53is a good way to visualise the changes.""" 54 failmsg = d.expand(failmsg) 55 56 raise NotImplementedError(failmsg) 57} 58 59SANITY_SITECONF_SAMPLE ?= "${COREBASE}/meta*/conf/site.conf.sample" 60python oecore_update_siteconf() { 61 # If we have a site.conf, check it's valid 62 current_sconf = d.getVar('SCONF_VERSION') 63 sconf_version = d.getVar('SITE_CONF_VERSION') 64 65 failmsg = """Your version of site.conf was generated from an older version of 66site.conf.sample and there have been updates made to this file. Please compare the two 67files and merge any changes before continuing. 68 69Matching the version numbers will remove this message. 70 71\"${SANITY_DIFF_TOOL} conf/site.conf ${SANITY_SITECONF_SAMPLE}\" 72 73is a good way to visualise the changes.""" 74 failmsg = d.expand(failmsg) 75 76 raise NotImplementedError(failmsg) 77} 78 79SANITY_BBLAYERCONF_SAMPLE ?= "${COREBASE}/meta*/conf/bblayers.conf.sample" 80python oecore_update_bblayers() { 81 # bblayers.conf is out of date, so see if we can resolve that 82 83 current_lconf = int(d.getVar('LCONF_VERSION')) 84 lconf_version = int(d.getVar('LAYER_CONF_VERSION')) 85 86 failmsg = """Your version of bblayers.conf has the wrong LCONF_VERSION (has ${LCONF_VERSION}, expecting ${LAYER_CONF_VERSION}). 87Please compare your file against bblayers.conf.sample and merge any changes before continuing. 88"${SANITY_DIFF_TOOL} conf/bblayers.conf ${SANITY_BBLAYERCONF_SAMPLE}" 89 90is a good way to visualise the changes.""" 91 failmsg = d.expand(failmsg) 92 93 if not current_lconf: 94 raise NotImplementedError(failmsg) 95 96 lines = [] 97 98 if current_lconf < 4: 99 raise NotImplementedError(failmsg) 100 101 bblayers_fn = bblayers_conf_file(d) 102 lines = sanity_conf_read(bblayers_fn) 103 104 if current_lconf == 4 and lconf_version > 4: 105 topdir_var = '$' + '{TOPDIR}' 106 index, bbpath_line = sanity_conf_find_line('BBPATH', lines) 107 if bbpath_line: 108 start = bbpath_line.find('"') 109 if start != -1 and (len(bbpath_line) != (start + 1)): 110 if bbpath_line[start + 1] == '"': 111 lines[index] = (bbpath_line[:start + 1] + 112 topdir_var + bbpath_line[start + 1:]) 113 else: 114 if not topdir_var in bbpath_line: 115 lines[index] = (bbpath_line[:start + 1] + 116 topdir_var + ':' + bbpath_line[start + 1:]) 117 else: 118 raise NotImplementedError(failmsg) 119 else: 120 index, bbfiles_line = sanity_conf_find_line('BBFILES', lines) 121 if bbfiles_line: 122 lines.insert(index, 'BBPATH = "' + topdir_var + '"\n') 123 else: 124 raise NotImplementedError(failmsg) 125 126 current_lconf += 1 127 sanity_conf_update(bblayers_fn, lines, 'LCONF_VERSION', current_lconf) 128 bb.note("Your conf/bblayers.conf has been automatically updated.") 129 return 130 131 elif current_lconf == 5 and lconf_version > 5: 132 # Null update, to avoid issues with people switching between poky and other distros 133 current_lconf = 6 134 sanity_conf_update(bblayers_fn, lines, 'LCONF_VERSION', current_lconf) 135 bb.note("Your conf/bblayers.conf has been automatically updated.") 136 return 137 138 status.addresult() 139 140 elif current_lconf == 6 and lconf_version > 6: 141 # Handle rename of meta-yocto -> meta-poky 142 # This marks the start of separate version numbers but code is needed in OE-Core 143 # for the migration, one last time. 144 layers = d.getVar('BBLAYERS').split() 145 layers = [ os.path.basename(path) for path in layers ] 146 if 'meta-yocto' in layers: 147 found = False 148 while True: 149 index, meta_yocto_line = sanity_conf_find_line(r'.*meta-yocto[\'"\s\n]', lines) 150 if meta_yocto_line: 151 lines[index] = meta_yocto_line.replace('meta-yocto', 'meta-poky') 152 found = True 153 else: 154 break 155 if not found: 156 raise NotImplementedError(failmsg) 157 index, meta_yocto_line = sanity_conf_find_line('LCONF_VERSION.*\n', lines) 158 if meta_yocto_line: 159 lines[index] = 'POKY_BBLAYERS_CONF_VERSION = "1"\n' 160 else: 161 raise NotImplementedError(failmsg) 162 with open(bblayers_fn, "w") as f: 163 f.write(''.join(lines)) 164 bb.note("Your conf/bblayers.conf has been automatically updated.") 165 return 166 current_lconf += 1 167 sanity_conf_update(bblayers_fn, lines, 'LCONF_VERSION', current_lconf) 168 bb.note("Your conf/bblayers.conf has been automatically updated.") 169 return 170 171 raise NotImplementedError(failmsg) 172} 173 174def raise_sanity_error(msg, d, network_error=False): 175 if d.getVar("SANITY_USE_EVENTS") == "1": 176 try: 177 bb.event.fire(bb.event.SanityCheckFailed(msg, network_error), d) 178 except TypeError: 179 bb.event.fire(bb.event.SanityCheckFailed(msg), d) 180 return 181 182 bb.fatal(""" OE-core's config sanity checker detected a potential misconfiguration. 183 Either fix the cause of this error or at your own risk disable the checker (see sanity.conf). 184 Following is the list of potential problems / advisories: 185 186 %s""" % msg) 187 188# Check a single tune for validity. 189def check_toolchain_tune(data, tune, multilib): 190 tune_errors = [] 191 if not tune: 192 return "No tuning found for %s multilib." % multilib 193 localdata = bb.data.createCopy(data) 194 if multilib != "default": 195 # Apply the overrides so we can look at the details. 196 overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + multilib 197 localdata.setVar("OVERRIDES", overrides) 198 bb.debug(2, "Sanity-checking tuning '%s' (%s) features:" % (tune, multilib)) 199 features = (localdata.getVar("TUNE_FEATURES:tune-%s" % tune) or "").split() 200 if not features: 201 return "Tuning '%s' has no defined features, and cannot be used." % tune 202 valid_tunes = localdata.getVarFlags('TUNEVALID') or {} 203 conflicts = localdata.getVarFlags('TUNECONFLICTS') or {} 204 # [doc] is the documentation for the variable, not a real feature 205 if 'doc' in valid_tunes: 206 del valid_tunes['doc'] 207 if 'doc' in conflicts: 208 del conflicts['doc'] 209 for feature in features: 210 if feature in conflicts: 211 for conflict in conflicts[feature].split(): 212 if conflict in features: 213 tune_errors.append("Feature '%s' conflicts with '%s'." % 214 (feature, conflict)) 215 if feature in valid_tunes: 216 bb.debug(2, " %s: %s" % (feature, valid_tunes[feature])) 217 else: 218 tune_errors.append("Feature '%s' is not defined." % feature) 219 if tune_errors: 220 return "Tuning '%s' has the following errors:\n" % tune + '\n'.join(tune_errors) 221 222def check_toolchain(data): 223 tune_error_set = [] 224 deftune = data.getVar("DEFAULTTUNE") 225 tune_errors = check_toolchain_tune(data, deftune, 'default') 226 if tune_errors: 227 tune_error_set.append(tune_errors) 228 229 multilibs = (data.getVar("MULTILIB_VARIANTS") or "").split() 230 global_multilibs = (data.getVar("MULTILIB_GLOBAL_VARIANTS") or "").split() 231 232 if multilibs: 233 seen_libs = [] 234 seen_tunes = [] 235 for lib in multilibs: 236 if lib in seen_libs: 237 tune_error_set.append("The multilib '%s' appears more than once." % lib) 238 else: 239 seen_libs.append(lib) 240 if not lib in global_multilibs: 241 tune_error_set.append("Multilib %s is not present in MULTILIB_GLOBAL_VARIANTS" % lib) 242 tune = data.getVar("DEFAULTTUNE:virtclass-multilib-%s" % lib) 243 if tune in seen_tunes: 244 tune_error_set.append("The tuning '%s' appears in more than one multilib." % tune) 245 else: 246 seen_libs.append(tune) 247 if tune == deftune: 248 tune_error_set.append("Multilib '%s' (%s) is also the default tuning." % (lib, deftune)) 249 else: 250 tune_errors = check_toolchain_tune(data, tune, lib) 251 if tune_errors: 252 tune_error_set.append(tune_errors) 253 if tune_error_set: 254 return "Toolchain tunings invalid:\n" + '\n'.join(tune_error_set) + "\n" 255 256 return "" 257 258def check_conf_exists(fn, data): 259 bbpath = [] 260 fn = data.expand(fn) 261 vbbpath = data.getVar("BBPATH", False) 262 if vbbpath: 263 bbpath += vbbpath.split(":") 264 for p in bbpath: 265 currname = os.path.join(data.expand(p), fn) 266 if os.access(currname, os.R_OK): 267 return True 268 return False 269 270def check_create_long_filename(filepath, pathname): 271 import string, random 272 testfile = os.path.join(filepath, ''.join(random.choice(string.ascii_letters) for x in range(200))) 273 try: 274 if not os.path.exists(filepath): 275 bb.utils.mkdirhier(filepath) 276 f = open(testfile, "w") 277 f.close() 278 os.remove(testfile) 279 except IOError as e: 280 import errno 281 err, strerror = e.args 282 if err == errno.ENAMETOOLONG: 283 return "Failed to create a file with a long name in %s. Please use a filesystem that does not unreasonably limit filename length.\n" % pathname 284 else: 285 return "Failed to create a file in %s: %s.\n" % (pathname, strerror) 286 except OSError as e: 287 errno, strerror = e.args 288 return "Failed to create %s directory in which to run long name sanity check: %s.\n" % (pathname, strerror) 289 return "" 290 291def check_path_length(filepath, pathname, limit): 292 if len(filepath) > limit: 293 return "The length of %s is longer than %s, this would cause unexpected errors, please use a shorter path.\n" % (pathname, limit) 294 return "" 295 296def get_filesystem_id(path): 297 import subprocess 298 try: 299 return subprocess.check_output(["stat", "-f", "-c", "%t", path]).decode('utf-8').strip() 300 except subprocess.CalledProcessError: 301 bb.warn("Can't get filesystem id of: %s" % path) 302 return None 303 304# Check that the path isn't located on nfs. 305def check_not_nfs(path, name): 306 # The nfs' filesystem id is 6969 307 if get_filesystem_id(path) == "6969": 308 return "The %s: %s can't be located on nfs.\n" % (name, path) 309 return "" 310 311# Check that the path is on a case-sensitive file system 312def check_case_sensitive(path, name): 313 import tempfile 314 with tempfile.NamedTemporaryFile(prefix='TmP', dir=path) as tmp_file: 315 if os.path.exists(tmp_file.name.lower()): 316 return "The %s (%s) can't be on a case-insensitive file system.\n" % (name, path) 317 return "" 318 319# Check that path isn't a broken symlink 320def check_symlink(lnk, data): 321 if os.path.islink(lnk) and not os.path.exists(lnk): 322 raise_sanity_error("%s is a broken symlink." % lnk, data) 323 324def check_connectivity(d): 325 # URI's to check can be set in the CONNECTIVITY_CHECK_URIS variable 326 # using the same syntax as for SRC_URI. If the variable is not set 327 # the check is skipped 328 test_uris = (d.getVar('CONNECTIVITY_CHECK_URIS') or "").split() 329 retval = "" 330 331 bbn = d.getVar('BB_NO_NETWORK') 332 if bbn not in (None, '0', '1'): 333 return 'BB_NO_NETWORK should be "0" or "1", but it is "%s"' % bbn 334 335 # Only check connectivity if network enabled and the 336 # CONNECTIVITY_CHECK_URIS are set 337 network_enabled = not (bbn == '1') 338 check_enabled = len(test_uris) 339 if check_enabled and network_enabled: 340 # Take a copy of the data store and unset MIRRORS and PREMIRRORS 341 data = bb.data.createCopy(d) 342 data.delVar('PREMIRRORS') 343 data.delVar('MIRRORS') 344 try: 345 fetcher = bb.fetch2.Fetch(test_uris, data) 346 fetcher.checkstatus() 347 except Exception as err: 348 # Allow the message to be configured so that users can be 349 # pointed to a support mechanism. 350 msg = data.getVar('CONNECTIVITY_CHECK_MSG') or "" 351 if len(msg) == 0: 352 msg = "%s.\n" % err 353 msg += " Please ensure your host's network is configured correctly.\n" 354 msg += " Please ensure CONNECTIVITY_CHECK_URIS is correct and specified URIs are available.\n" 355 msg += " If your ISP or network is blocking the above URL,\n" 356 msg += " try with another domain name, for example by setting:\n" 357 msg += " CONNECTIVITY_CHECK_URIS = \"https://www.example.com/\"" 358 msg += " You could also set BB_NO_NETWORK = \"1\" to disable network\n" 359 msg += " access if all required sources are on local disk.\n" 360 retval = msg 361 362 return retval 363 364def check_supported_distro(sanity_data): 365 from fnmatch import fnmatch 366 367 tested_distros = sanity_data.getVar('SANITY_TESTED_DISTROS') 368 if not tested_distros: 369 return 370 371 try: 372 distro = oe.lsb.distro_identifier() 373 except Exception: 374 distro = None 375 376 if not distro: 377 bb.warn('Host distribution could not be determined; you may possibly experience unexpected failures. It is recommended that you use a tested distribution.') 378 379 for supported in [x.strip() for x in tested_distros.split('\\n')]: 380 if fnmatch(distro, supported): 381 return 382 383 bb.warn('Host distribution "%s" has not been validated with this version of the build system; you may possibly experience unexpected failures. It is recommended that you use a tested distribution.' % distro) 384 385# Checks we should only make if MACHINE is set correctly 386def check_sanity_validmachine(sanity_data): 387 messages = "" 388 389 # Check TUNE_ARCH is set 390 if sanity_data.getVar('TUNE_ARCH') == 'INVALID': 391 messages = messages + 'TUNE_ARCH is unset. Please ensure your MACHINE configuration includes a valid tune configuration file which will set this correctly.\n' 392 393 # Check TARGET_OS is set 394 if sanity_data.getVar('TARGET_OS') == 'INVALID': 395 messages = messages + 'Please set TARGET_OS directly, or choose a MACHINE or DISTRO that does so.\n' 396 397 # Check that we don't have duplicate entries in PACKAGE_ARCHS & that TUNE_PKGARCH is in PACKAGE_ARCHS 398 pkgarchs = sanity_data.getVar('PACKAGE_ARCHS') 399 tunepkg = sanity_data.getVar('TUNE_PKGARCH') 400 defaulttune = sanity_data.getVar('DEFAULTTUNE') 401 tunefound = False 402 seen = {} 403 dups = [] 404 405 for pa in pkgarchs.split(): 406 if seen.get(pa, 0) == 1: 407 dups.append(pa) 408 else: 409 seen[pa] = 1 410 if pa == tunepkg: 411 tunefound = True 412 413 if len(dups): 414 messages = messages + "Error, the PACKAGE_ARCHS variable contains duplicates. The following archs are listed more than once: %s" % " ".join(dups) 415 416 if tunefound == False: 417 messages = messages + "Error, the PACKAGE_ARCHS variable (%s) for DEFAULTTUNE (%s) does not contain TUNE_PKGARCH (%s)." % (pkgarchs, defaulttune, tunepkg) 418 419 return messages 420 421# Patch before 2.7 can't handle all the features in git-style diffs. Some 422# patches may incorrectly apply, and others won't apply at all. 423def check_patch_version(sanity_data): 424 import re, subprocess 425 426 try: 427 result = subprocess.check_output(["patch", "--version"], stderr=subprocess.STDOUT).decode('utf-8') 428 version = re.search(r"[0-9.]+", result.splitlines()[0]).group() 429 if bb.utils.vercmp_string_op(version, "2.7", "<"): 430 return "Your version of patch is older than 2.7 and has bugs which will break builds. Please install a newer version of patch.\n" 431 else: 432 return None 433 except subprocess.CalledProcessError as e: 434 return "Unable to execute patch --version, exit code %d:\n%s\n" % (e.returncode, e.output) 435 436# Glibc needs make 4.0 or later, we may as well match at this point 437def check_make_version(sanity_data): 438 import subprocess 439 440 try: 441 result = subprocess.check_output(['make', '--version'], stderr=subprocess.STDOUT).decode('utf-8') 442 except subprocess.CalledProcessError as e: 443 return "Unable to execute make --version, exit code %d\n%s\n" % (e.returncode, e.output) 444 version = result.split()[2] 445 if bb.utils.vercmp_string_op(version, "4.0", "<"): 446 return "Please install a make version of 4.0 or later.\n" 447 448 if bb.utils.vercmp_string_op(version, "4.2.1", "=="): 449 distro = oe.lsb.distro_identifier() 450 if "ubuntu" in distro or "debian" in distro or "linuxmint" in distro: 451 return None 452 return "make version 4.2.1 is known to have issues on Centos/OpenSUSE and other non-Ubuntu systems. Please use a buildtools-make-tarball or a newer version of make.\n" 453 return None 454 455 456# Check if we're running on WSL (Windows Subsystem for Linux). 457# WSLv1 is known not to work but WSLv2 should work properly as 458# long as the VHDX file is optimized often, let the user know 459# upfront. 460# More information on installing WSLv2 at: 461# https://docs.microsoft.com/en-us/windows/wsl/wsl2-install 462def check_wsl(d): 463 with open("/proc/version", "r") as f: 464 verdata = f.readlines() 465 for l in verdata: 466 if "Microsoft" in l: 467 return "OpenEmbedded doesn't work under WSLv1, please upgrade to WSLv2 if you want to run builds on Windows" 468 elif "microsoft" in l: 469 bb.warn("You are running bitbake under WSLv2, this works properly but you should optimize your VHDX file eventually to avoid running out of storage space") 470 return None 471 472# Require at least gcc version 7.5. 473# 474# This can be fixed on CentOS-7 with devtoolset-6+ 475# https://www.softwarecollections.org/en/scls/rhscl/devtoolset-6/ 476# 477# A less invasive fix is with scripts/install-buildtools (or with user 478# built buildtools-extended-tarball) 479# 480def check_gcc_version(sanity_data): 481 import subprocess 482 483 build_cc, version = oe.utils.get_host_compiler_version(sanity_data) 484 if build_cc.strip() == "gcc": 485 if bb.utils.vercmp_string_op(version, "7.5", "<"): 486 return "Your version of gcc is older than 7.5 and will break builds. Please install a newer version of gcc (you could use the project's buildtools-extended-tarball or use scripts/install-buildtools).\n" 487 return None 488 489# Tar version 1.24 and onwards handle overwriting symlinks correctly 490# but earlier versions do not; this needs to work properly for sstate 491# Version 1.28 is needed so opkg-build works correctly when reproducibile builds are enabled 492def check_tar_version(sanity_data): 493 import subprocess 494 try: 495 result = subprocess.check_output(["tar", "--version"], stderr=subprocess.STDOUT).decode('utf-8') 496 except subprocess.CalledProcessError as e: 497 return "Unable to execute tar --version, exit code %d\n%s\n" % (e.returncode, e.output) 498 version = result.split()[3] 499 if bb.utils.vercmp_string_op(version, "1.28", "<"): 500 return "Your version of tar is older than 1.28 and does not have the support needed to enable reproducible builds. Please install a newer version of tar (you could use the project's buildtools-tarball from our last release or use scripts/install-buildtools).\n" 501 502 try: 503 result = subprocess.check_output(["tar", "--help"], stderr=subprocess.STDOUT).decode('utf-8') 504 if "--xattrs" not in result: 505 return "Your tar doesn't support --xattrs, please use GNU tar.\n" 506 except subprocess.CalledProcessError as e: 507 return "Unable to execute tar --help, exit code %d\n%s\n" % (e.returncode, e.output) 508 509 return None 510 511# We use git parameters and functionality only found in 1.7.8 or later 512# The kernel tools assume git >= 1.8.3.1 (verified needed > 1.7.9.5) see #6162 513# The git fetcher also had workarounds for git < 1.7.9.2 which we've dropped 514def check_git_version(sanity_data): 515 import subprocess 516 try: 517 result = subprocess.check_output(["git", "--version"], stderr=subprocess.DEVNULL).decode('utf-8') 518 except subprocess.CalledProcessError as e: 519 return "Unable to execute git --version, exit code %d\n%s\n" % (e.returncode, e.output) 520 version = result.split()[2] 521 if bb.utils.vercmp_string_op(version, "1.8.3.1", "<"): 522 return "Your version of git is older than 1.8.3.1 and has bugs which will break builds. Please install a newer version of git.\n" 523 return None 524 525# Check the required perl modules which may not be installed by default 526def check_perl_modules(sanity_data): 527 import subprocess 528 ret = "" 529 modules = ( "Text::ParseWords", "Thread::Queue", "Data::Dumper" ) 530 errresult = '' 531 for m in modules: 532 try: 533 subprocess.check_output(["perl", "-e", "use %s" % m]) 534 except subprocess.CalledProcessError as e: 535 errresult += bytes.decode(e.output) 536 ret += "%s " % m 537 if ret: 538 return "Required perl module(s) not found: %s\n\n%s\n" % (ret, errresult) 539 return None 540 541def sanity_check_conffiles(d): 542 funcs = d.getVar('BBLAYERS_CONF_UPDATE_FUNCS').split() 543 for func in funcs: 544 conffile, current_version, required_version, func = func.split(":") 545 if check_conf_exists(conffile, d) and d.getVar(current_version) is not None and \ 546 d.getVar(current_version) != d.getVar(required_version): 547 try: 548 bb.build.exec_func(func, d) 549 except NotImplementedError as e: 550 bb.fatal(str(e)) 551 d.setVar("BB_INVALIDCONF", True) 552 553def drop_v14_cross_builds(d): 554 import glob 555 indexes = glob.glob(d.expand("${SSTATE_MANIFESTS}/index-${BUILD_ARCH}_*")) 556 for i in indexes: 557 with open(i, "r") as f: 558 lines = f.readlines() 559 for l in reversed(lines): 560 try: 561 (stamp, manifest, workdir) = l.split() 562 except ValueError: 563 bb.fatal("Invalid line '%s' in sstate manifest '%s'" % (l, i)) 564 for m in glob.glob(manifest + ".*"): 565 if m.endswith(".postrm"): 566 continue 567 sstate_clean_manifest(m, d) 568 bb.utils.remove(stamp + "*") 569 bb.utils.remove(workdir, recurse = True) 570 571def sanity_handle_abichanges(status, d): 572 # 573 # Check the 'ABI' of TMPDIR 574 # 575 import subprocess 576 577 current_abi = d.getVar('OELAYOUT_ABI') 578 abifile = d.getVar('SANITY_ABIFILE') 579 if os.path.exists(abifile): 580 with open(abifile, "r") as f: 581 abi = f.read().strip() 582 if not abi.isdigit(): 583 with open(abifile, "w") as f: 584 f.write(current_abi) 585 elif int(abi) <= 11 and current_abi == "12": 586 status.addresult("The layout of TMPDIR changed for Recipe Specific Sysroots.\nConversion doesn't make sense and this change will rebuild everything so please delete TMPDIR (%s).\n" % d.getVar("TMPDIR")) 587 elif int(abi) <= 13 and current_abi == "14": 588 status.addresult("TMPDIR changed to include path filtering from the pseudo database.\nIt is recommended to use a clean TMPDIR with the new pseudo path filtering so TMPDIR (%s) would need to be removed to continue.\n" % d.getVar("TMPDIR")) 589 elif int(abi) == 14 and current_abi == "15": 590 drop_v14_cross_builds(d) 591 with open(abifile, "w") as f: 592 f.write(current_abi) 593 elif (abi != current_abi): 594 # Code to convert from one ABI to another could go here if possible. 595 status.addresult("Error, TMPDIR has changed its layout version number (%s to %s) and you need to either rebuild, revert or adjust it at your own risk.\n" % (abi, current_abi)) 596 else: 597 with open(abifile, "w") as f: 598 f.write(current_abi) 599 600def check_sanity_sstate_dir_change(sstate_dir, data): 601 # Sanity checks to be done when the value of SSTATE_DIR changes 602 603 # Check that SSTATE_DIR isn't on a filesystem with limited filename length (eg. eCryptFS) 604 testmsg = "" 605 if sstate_dir != "": 606 testmsg = check_create_long_filename(sstate_dir, "SSTATE_DIR") 607 # If we don't have permissions to SSTATE_DIR, suggest the user set it as an SSTATE_MIRRORS 608 try: 609 err = testmsg.split(': ')[1].strip() 610 if err == "Permission denied.": 611 testmsg = testmsg + "You could try using %s in SSTATE_MIRRORS rather than as an SSTATE_CACHE.\n" % (sstate_dir) 612 except IndexError: 613 pass 614 return testmsg 615 616def check_sanity_version_change(status, d): 617 # Sanity checks to be done when SANITY_VERSION or NATIVELSBSTRING changes 618 # In other words, these tests run once in a given build directory and then 619 # never again until the sanity version or host distrubution id/version changes. 620 621 # Check the python install is complete. Examples that are often removed in 622 # minimal installations: glib-2.0-natives requries # xml.parsers.expat and icu 623 # requires distutils.sysconfig. 624 try: 625 import xml.parsers.expat 626 import distutils.sysconfig 627 except ImportError as e: 628 status.addresult('Your Python 3 is not a full install. Please install the module %s (see the Getting Started guide for further information).\n' % e.name) 629 630 status.addresult(check_gcc_version(d)) 631 status.addresult(check_make_version(d)) 632 status.addresult(check_patch_version(d)) 633 status.addresult(check_tar_version(d)) 634 status.addresult(check_git_version(d)) 635 status.addresult(check_perl_modules(d)) 636 status.addresult(check_wsl(d)) 637 638 missing = "" 639 640 if not check_app_exists("${MAKE}", d): 641 missing = missing + "GNU make," 642 643 if not check_app_exists('${BUILD_CC}', d): 644 missing = missing + "C Compiler (%s)," % d.getVar("BUILD_CC") 645 646 if not check_app_exists('${BUILD_CXX}', d): 647 missing = missing + "C++ Compiler (%s)," % d.getVar("BUILD_CXX") 648 649 required_utilities = d.getVar('SANITY_REQUIRED_UTILITIES') 650 651 for util in required_utilities.split(): 652 if not check_app_exists(util, d): 653 missing = missing + "%s," % util 654 655 if missing: 656 missing = missing.rstrip(',') 657 status.addresult("Please install the following missing utilities: %s\n" % missing) 658 659 assume_provided = d.getVar('ASSUME_PROVIDED').split() 660 # Check user doesn't have ASSUME_PROVIDED = instead of += in local.conf 661 if "diffstat-native" not in assume_provided: 662 status.addresult('Please use ASSUME_PROVIDED +=, not ASSUME_PROVIDED = in your local.conf\n') 663 664 # Check that TMPDIR isn't on a filesystem with limited filename length (eg. eCryptFS) 665 import stat 666 tmpdir = d.getVar('TMPDIR') 667 status.addresult(check_create_long_filename(tmpdir, "TMPDIR")) 668 tmpdirmode = os.stat(tmpdir).st_mode 669 if (tmpdirmode & stat.S_ISGID): 670 status.addresult("TMPDIR is setgid, please don't build in a setgid directory") 671 if (tmpdirmode & stat.S_ISUID): 672 status.addresult("TMPDIR is setuid, please don't build in a setuid directory") 673 674 # Check that a user isn't building in a path in PSEUDO_IGNORE_PATHS 675 pseudoignorepaths = d.getVar('PSEUDO_IGNORE_PATHS', expand=True).split(",") 676 workdir = d.getVar('WORKDIR', expand=True) 677 for i in pseudoignorepaths: 678 if i and workdir.startswith(i): 679 status.addresult("You are building in a path included in PSEUDO_IGNORE_PATHS " + str(i) + " please locate the build outside this path.\n") 680 681 # Check if PSEUDO_IGNORE_PATHS and and paths under pseudo control overlap 682 pseudoignorepaths = d.getVar('PSEUDO_IGNORE_PATHS', expand=True).split(",") 683 pseudo_control_dir = "${D},${PKGD},${PKGDEST},${IMAGEROOTFS},${SDK_OUTPUT}" 684 pseudocontroldir = d.expand(pseudo_control_dir).split(",") 685 for i in pseudoignorepaths: 686 for j in pseudocontroldir: 687 if i and j: 688 if j.startswith(i): 689 status.addresult("A path included in PSEUDO_IGNORE_PATHS " + str(i) + " and the path " + str(j) + " overlap and this will break pseudo permission and ownership tracking. Please set the path " + str(j) + " to a different directory which does not overlap with pseudo controlled directories. \n") 690 691 # Some third-party software apparently relies on chmod etc. being suid root (!!) 692 import stat 693 suid_check_bins = "chown chmod mknod".split() 694 for bin_cmd in suid_check_bins: 695 bin_path = bb.utils.which(os.environ["PATH"], bin_cmd) 696 if bin_path: 697 bin_stat = os.stat(bin_path) 698 if bin_stat.st_uid == 0 and bin_stat.st_mode & stat.S_ISUID: 699 status.addresult('%s has the setuid bit set. This interferes with pseudo and may cause other issues that break the build process.\n' % bin_path) 700 701 # Check that we can fetch from various network transports 702 netcheck = check_connectivity(d) 703 status.addresult(netcheck) 704 if netcheck: 705 status.network_error = True 706 707 nolibs = d.getVar('NO32LIBS') 708 if not nolibs: 709 lib32path = '/lib' 710 if os.path.exists('/lib64') and ( os.path.islink('/lib64') or os.path.islink('/lib') ): 711 lib32path = '/lib32' 712 713 if os.path.exists('%s/libc.so.6' % lib32path) and not os.path.exists('/usr/include/gnu/stubs-32.h'): 714 status.addresult("You have a 32-bit libc, but no 32-bit headers. You must install the 32-bit libc headers.\n") 715 716 bbpaths = d.getVar('BBPATH').split(":") 717 if ("." in bbpaths or "./" in bbpaths or "" in bbpaths): 718 status.addresult("BBPATH references the current directory, either through " \ 719 "an empty entry, a './' or a '.'.\n\t This is unsafe and means your "\ 720 "layer configuration is adding empty elements to BBPATH.\n\t "\ 721 "Please check your layer.conf files and other BBPATH " \ 722 "settings to remove the current working directory " \ 723 "references.\n" \ 724 "Parsed BBPATH is" + str(bbpaths)); 725 726 oes_bb_conf = d.getVar( 'OES_BITBAKE_CONF') 727 if not oes_bb_conf: 728 status.addresult('You are not using the OpenEmbedded version of conf/bitbake.conf. This means your environment is misconfigured, in particular check BBPATH.\n') 729 730 # The length of TMPDIR can't be longer than 410 731 status.addresult(check_path_length(tmpdir, "TMPDIR", 410)) 732 733 # Check that TMPDIR isn't located on nfs 734 status.addresult(check_not_nfs(tmpdir, "TMPDIR")) 735 736 # Check for case-insensitive file systems (such as Linux in Docker on 737 # macOS with default HFS+ file system) 738 status.addresult(check_case_sensitive(tmpdir, "TMPDIR")) 739 740def sanity_check_locale(d): 741 """ 742 Currently bitbake switches locale to en_US.UTF-8 so check that this locale actually exists. 743 """ 744 import locale 745 try: 746 locale.setlocale(locale.LC_ALL, "en_US.UTF-8") 747 except locale.Error: 748 raise_sanity_error("Your system needs to support the en_US.UTF-8 locale.", d) 749 750def check_sanity_everybuild(status, d): 751 import os, stat 752 # Sanity tests which test the users environment so need to run at each build (or are so cheap 753 # it makes sense to always run them. 754 755 if 0 == os.getuid(): 756 raise_sanity_error("Do not use Bitbake as root.", d) 757 758 # Check the Python version, we now have a minimum of Python 3.6 759 import sys 760 if sys.hexversion < 0x030600F0: 761 status.addresult('The system requires at least Python 3.6 to run. Please update your Python interpreter.\n') 762 763 # Check the bitbake version meets minimum requirements 764 minversion = d.getVar('BB_MIN_VERSION') 765 if bb.utils.vercmp_string_op(bb.__version__, minversion, "<"): 766 status.addresult('Bitbake version %s is required and version %s was found\n' % (minversion, bb.__version__)) 767 768 sanity_check_locale(d) 769 770 paths = d.getVar('PATH').split(":") 771 if "." in paths or "./" in paths or "" in paths: 772 status.addresult("PATH contains '.', './' or '' (empty element), which will break the build, please remove this.\nParsed PATH is " + str(paths) + "\n") 773 774 #Check if bitbake is present in PATH environment variable 775 bb_check = bb.utils.which(d.getVar('PATH'), 'bitbake') 776 if not bb_check: 777 bb.warn("bitbake binary is not found in PATH, did you source the script?") 778 779 # Check whether 'inherit' directive is found (used for a class to inherit) 780 # in conf file it's supposed to be uppercase INHERIT 781 inherit = d.getVar('inherit') 782 if inherit: 783 status.addresult("Please don't use inherit directive in your local.conf. The directive is supposed to be used in classes and recipes only to inherit of bbclasses. Here INHERIT should be used.\n") 784 785 # Check that the DISTRO is valid, if set 786 # need to take into account DISTRO renaming DISTRO 787 distro = d.getVar('DISTRO') 788 if distro and distro != "nodistro": 789 if not ( check_conf_exists("conf/distro/${DISTRO}.conf", d) or check_conf_exists("conf/distro/include/${DISTRO}.inc", d) ): 790 status.addresult("DISTRO '%s' not found. Please set a valid DISTRO in your local.conf\n" % d.getVar("DISTRO")) 791 792 # Check that these variables don't use tilde-expansion as we don't do that 793 for v in ("TMPDIR", "DL_DIR", "SSTATE_DIR"): 794 if d.getVar(v).startswith("~"): 795 status.addresult("%s uses ~ but Bitbake will not expand this, use an absolute path or variables." % v) 796 797 # Check that DL_DIR is set, exists and is writable. In theory, we should never even hit the check if DL_DIR isn't 798 # set, since so much relies on it being set. 799 dldir = d.getVar('DL_DIR') 800 if not dldir: 801 status.addresult("DL_DIR is not set. Your environment is misconfigured, check that DL_DIR is set, and if the directory exists, that it is writable. \n") 802 if os.path.exists(dldir) and not os.access(dldir, os.W_OK): 803 status.addresult("DL_DIR: %s exists but you do not appear to have write access to it. \n" % dldir) 804 check_symlink(dldir, d) 805 806 # Check that the MACHINE is valid, if it is set 807 machinevalid = True 808 if d.getVar('MACHINE'): 809 if not check_conf_exists("conf/machine/${MACHINE}.conf", d): 810 status.addresult('MACHINE=%s is invalid. Please set a valid MACHINE in your local.conf, environment or other configuration file.\n' % (d.getVar('MACHINE'))) 811 machinevalid = False 812 else: 813 status.addresult(check_sanity_validmachine(d)) 814 else: 815 status.addresult('Please set a MACHINE in your local.conf or environment\n') 816 machinevalid = False 817 if machinevalid: 818 status.addresult(check_toolchain(d)) 819 820 # Check that the SDKMACHINE is valid, if it is set 821 if d.getVar('SDKMACHINE'): 822 if not check_conf_exists("conf/machine-sdk/${SDKMACHINE}.conf", d): 823 status.addresult('Specified SDKMACHINE value is not valid\n') 824 elif d.getVar('SDK_ARCH', False) == "${BUILD_ARCH}": 825 status.addresult('SDKMACHINE is set, but SDK_ARCH has not been changed as a result - SDKMACHINE may have been set too late (e.g. in the distro configuration)\n') 826 827 # If SDK_VENDOR looks like "-my-sdk" then the triples are badly formed so fail early 828 sdkvendor = d.getVar("SDK_VENDOR") 829 if not (sdkvendor.startswith("-") and sdkvendor.count("-") == 1): 830 status.addresult("SDK_VENDOR should be of the form '-foosdk' with a single dash; found '%s'\n" % sdkvendor) 831 832 check_supported_distro(d) 833 834 omask = os.umask(0o022) 835 if omask & 0o755: 836 status.addresult("Please use a umask which allows a+rx and u+rwx\n") 837 os.umask(omask) 838 839 if d.getVar('TARGET_ARCH') == "arm": 840 # This path is no longer user-readable in modern (very recent) Linux 841 try: 842 if os.path.exists("/proc/sys/vm/mmap_min_addr"): 843 f = open("/proc/sys/vm/mmap_min_addr", "r") 844 try: 845 if (int(f.read().strip()) > 65536): 846 status.addresult("/proc/sys/vm/mmap_min_addr is not <= 65536. This will cause problems with qemu so please fix the value (as root).\n\nTo fix this in later reboots, set vm.mmap_min_addr = 65536 in /etc/sysctl.conf.\n") 847 finally: 848 f.close() 849 except: 850 pass 851 852 for checkdir in ['COREBASE', 'TMPDIR']: 853 val = d.getVar(checkdir) 854 if val.find('..') != -1: 855 status.addresult("Error, you have '..' in your %s directory path. Please ensure the variable contains an absolute path as this can break some recipe builds in obtuse ways." % checkdir) 856 if val.find('+') != -1: 857 status.addresult("Error, you have an invalid character (+) in your %s directory path. Please move the installation to a directory which doesn't include any + characters." % checkdir) 858 if val.find('@') != -1: 859 status.addresult("Error, you have an invalid character (@) in your %s directory path. Please move the installation to a directory which doesn't include any @ characters." % checkdir) 860 if val.find(' ') != -1: 861 status.addresult("Error, you have a space in your %s directory path. Please move the installation to a directory which doesn't include a space since autotools doesn't support this." % checkdir) 862 if val.find('%') != -1: 863 status.addresult("Error, you have an invalid character (%) in your %s directory path which causes problems with python string formatting. Please move the installation to a directory which doesn't include any % characters." % checkdir) 864 865 # Check the format of MIRRORS, PREMIRRORS and SSTATE_MIRRORS 866 import re 867 mirror_vars = ['MIRRORS', 'PREMIRRORS', 'SSTATE_MIRRORS'] 868 protocols = ['http', 'ftp', 'file', 'https', \ 869 'git', 'gitsm', 'hg', 'osc', 'p4', 'svn', \ 870 'bzr', 'cvs', 'npm', 'sftp', 'ssh', 's3', 'az', 'ftps', 'crate'] 871 for mirror_var in mirror_vars: 872 mirrors = (d.getVar(mirror_var) or '').replace('\\n', ' ').split() 873 874 # Split into pairs 875 if len(mirrors) % 2 != 0: 876 bb.warn('Invalid mirror variable value for %s: %s, should contain paired members.' % (mirror_var, str(mirrors))) 877 continue 878 mirrors = list(zip(*[iter(mirrors)]*2)) 879 880 for mirror_entry in mirrors: 881 pattern, mirror = mirror_entry 882 883 decoded = bb.fetch2.decodeurl(pattern) 884 try: 885 pattern_scheme = re.compile(decoded[0]) 886 except re.error as exc: 887 bb.warn('Invalid scheme regex (%s) in %s; %s' % (pattern, mirror_var, mirror_entry)) 888 continue 889 890 if not any(pattern_scheme.match(protocol) for protocol in protocols): 891 bb.warn('Invalid protocol (%s) in %s: %s' % (decoded[0], mirror_var, mirror_entry)) 892 continue 893 894 if not any(mirror.startswith(protocol + '://') for protocol in protocols): 895 bb.warn('Invalid protocol in %s: %s' % (mirror_var, mirror_entry)) 896 continue 897 898 if mirror.startswith('file://'): 899 import urllib 900 check_symlink(urllib.parse.urlparse(mirror).path, d) 901 # SSTATE_MIRROR ends with a /PATH string 902 if mirror.endswith('/PATH'): 903 # remove /PATH$ from SSTATE_MIRROR to get a working 904 # base directory path 905 mirror_base = urllib.parse.urlparse(mirror[:-1*len('/PATH')]).path 906 check_symlink(mirror_base, d) 907 908 # Check sstate mirrors aren't being used with a local hash server and no remote 909 hashserv = d.getVar("BB_HASHSERVE") 910 if d.getVar("SSTATE_MIRRORS") and hashserv and hashserv.startswith("unix://") and not d.getVar("BB_HASHSERVE_UPSTREAM"): 911 bb.warn("You are using a local hash equivalence server but have configured an sstate mirror. This will likely mean no sstate will match from the mirror. You may wish to disable the hash equivalence use (BB_HASHSERVE), or use a hash equivalence server alongside the sstate mirror.") 912 913 # Check that TMPDIR hasn't changed location since the last time we were run 914 tmpdir = d.getVar('TMPDIR') 915 checkfile = os.path.join(tmpdir, "saved_tmpdir") 916 if os.path.exists(checkfile): 917 with open(checkfile, "r") as f: 918 saved_tmpdir = f.read().strip() 919 if (saved_tmpdir != tmpdir): 920 status.addresult("Error, TMPDIR has changed location. You need to either move it back to %s or delete it and rebuild\n" % saved_tmpdir) 921 else: 922 bb.utils.mkdirhier(tmpdir) 923 # Remove setuid, setgid and sticky bits from TMPDIR 924 try: 925 os.chmod(tmpdir, os.stat(tmpdir).st_mode & ~ stat.S_ISUID) 926 os.chmod(tmpdir, os.stat(tmpdir).st_mode & ~ stat.S_ISGID) 927 os.chmod(tmpdir, os.stat(tmpdir).st_mode & ~ stat.S_ISVTX) 928 except OSError as exc: 929 bb.warn("Unable to chmod TMPDIR: %s" % exc) 930 with open(checkfile, "w") as f: 931 f.write(tmpdir) 932 933 # If /bin/sh is a symlink, check that it points to dash or bash 934 if os.path.islink('/bin/sh'): 935 real_sh = os.path.realpath('/bin/sh') 936 # Due to update-alternatives, the shell name may take various 937 # forms, such as /bin/dash, bin/bash, /bin/bash.bash ... 938 if '/dash' not in real_sh and '/bash' not in real_sh: 939 status.addresult("Error, /bin/sh links to %s, must be dash or bash\n" % real_sh) 940 941def check_sanity(sanity_data): 942 class SanityStatus(object): 943 def __init__(self): 944 self.messages = "" 945 self.network_error = False 946 947 def addresult(self, message): 948 if message: 949 self.messages = self.messages + message 950 951 status = SanityStatus() 952 953 tmpdir = sanity_data.getVar('TMPDIR') 954 sstate_dir = sanity_data.getVar('SSTATE_DIR') 955 956 check_symlink(sstate_dir, sanity_data) 957 958 # Check saved sanity info 959 last_sanity_version = 0 960 last_tmpdir = "" 961 last_sstate_dir = "" 962 last_nativelsbstr = "" 963 sanityverfile = sanity_data.expand("${TOPDIR}/cache/sanity_info") 964 if os.path.exists(sanityverfile): 965 with open(sanityverfile, 'r') as f: 966 for line in f: 967 if line.startswith('SANITY_VERSION'): 968 last_sanity_version = int(line.split()[1]) 969 if line.startswith('TMPDIR'): 970 last_tmpdir = line.split()[1] 971 if line.startswith('SSTATE_DIR'): 972 last_sstate_dir = line.split()[1] 973 if line.startswith('NATIVELSBSTRING'): 974 last_nativelsbstr = line.split()[1] 975 976 check_sanity_everybuild(status, sanity_data) 977 978 sanity_version = int(sanity_data.getVar('SANITY_VERSION') or 1) 979 network_error = False 980 # NATIVELSBSTRING var may have been overridden with "universal", so 981 # get actual host distribution id and version 982 nativelsbstr = lsb_distro_identifier(sanity_data) 983 if last_sanity_version < sanity_version or last_nativelsbstr != nativelsbstr: 984 check_sanity_version_change(status, sanity_data) 985 status.addresult(check_sanity_sstate_dir_change(sstate_dir, sanity_data)) 986 else: 987 if last_sstate_dir != sstate_dir: 988 status.addresult(check_sanity_sstate_dir_change(sstate_dir, sanity_data)) 989 990 if os.path.exists(os.path.dirname(sanityverfile)) and not status.messages: 991 with open(sanityverfile, 'w') as f: 992 f.write("SANITY_VERSION %s\n" % sanity_version) 993 f.write("TMPDIR %s\n" % tmpdir) 994 f.write("SSTATE_DIR %s\n" % sstate_dir) 995 f.write("NATIVELSBSTRING %s\n" % nativelsbstr) 996 997 sanity_handle_abichanges(status, sanity_data) 998 999 if status.messages != "": 1000 raise_sanity_error(sanity_data.expand(status.messages), sanity_data, status.network_error) 1001 1002addhandler config_reparse_eventhandler 1003config_reparse_eventhandler[eventmask] = "bb.event.ConfigParsed" 1004python config_reparse_eventhandler() { 1005 sanity_check_conffiles(e.data) 1006} 1007 1008addhandler check_sanity_eventhandler 1009check_sanity_eventhandler[eventmask] = "bb.event.SanityCheck bb.event.NetworkTest" 1010python check_sanity_eventhandler() { 1011 if bb.event.getName(e) == "SanityCheck": 1012 sanity_data = bb.data.createCopy(e.data) 1013 check_sanity(sanity_data) 1014 if e.generateevents: 1015 sanity_data.setVar("SANITY_USE_EVENTS", "1") 1016 bb.event.fire(bb.event.SanityCheckPassed(), e.data) 1017 elif bb.event.getName(e) == "NetworkTest": 1018 sanity_data = bb.data.createCopy(e.data) 1019 if e.generateevents: 1020 sanity_data.setVar("SANITY_USE_EVENTS", "1") 1021 bb.event.fire(bb.event.NetworkTestFailed() if check_connectivity(sanity_data) else bb.event.NetworkTestPassed(), e.data) 1022 1023 return 1024} 1025