1BB_DEFAULT_TASK ?= "build" 2CLASSOVERRIDE ?= "class-target" 3 4inherit patch 5inherit staging 6 7inherit mirrors 8inherit utils 9inherit utility-tasks 10inherit metadata_scm 11inherit logging 12 13OE_EXTRA_IMPORTS ?= "" 14 15OE_IMPORTS += "os sys time oe.path oe.utils oe.types oe.package oe.packagegroup oe.sstatesig oe.lsb oe.cachedpath oe.license oe.qa oe.reproducible oe.rust ${OE_EXTRA_IMPORTS}" 16OE_IMPORTS[type] = "list" 17 18PACKAGECONFIG_CONFARGS ??= "" 19 20def oe_import(d): 21 import sys 22 23 bbpath = [os.path.join(dir, "lib") for dir in d.getVar("BBPATH").split(":")] 24 sys.path[0:0] = [dir for dir in bbpath if dir not in sys.path] 25 26 import oe.data 27 for toimport in oe.data.typed_value("OE_IMPORTS", d): 28 try: 29 # Make a python object accessible from the metadata 30 bb.utils._context[toimport.split(".", 1)[0]] = __import__(toimport) 31 except AttributeError as e: 32 bb.error("Error importing OE modules: %s" % str(e)) 33 return "" 34 35# We need the oe module name space early (before INHERITs get added) 36OE_IMPORTED := "${@oe_import(d)}" 37 38def lsb_distro_identifier(d): 39 adjust = d.getVar('LSB_DISTRO_ADJUST') 40 adjust_func = None 41 if adjust: 42 try: 43 adjust_func = globals()[adjust] 44 except KeyError: 45 pass 46 return oe.lsb.distro_identifier(adjust_func) 47 48die() { 49 bbfatal_log "$*" 50} 51 52oe_runmake_call() { 53 bbnote ${MAKE} ${EXTRA_OEMAKE} "$@" 54 ${MAKE} ${EXTRA_OEMAKE} "$@" 55} 56 57oe_runmake() { 58 oe_runmake_call "$@" || die "oe_runmake failed" 59} 60 61 62def get_base_dep(d): 63 if d.getVar('INHIBIT_DEFAULT_DEPS', False): 64 return "" 65 return "${BASE_DEFAULT_DEPS}" 66 67BASE_DEFAULT_DEPS = "virtual/${HOST_PREFIX}gcc virtual/${HOST_PREFIX}compilerlibs virtual/libc" 68 69BASEDEPENDS = "" 70BASEDEPENDS:class-target = "${@get_base_dep(d)}" 71BASEDEPENDS:class-nativesdk = "${@get_base_dep(d)}" 72 73DEPENDS:prepend="${BASEDEPENDS} " 74 75FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}" 76# THISDIR only works properly with imediate expansion as it has to run 77# in the context of the location its used (:=) 78THISDIR = "${@os.path.dirname(d.getVar('FILE'))}" 79 80def extra_path_elements(d): 81 path = "" 82 elements = (d.getVar('EXTRANATIVEPATH') or "").split() 83 for e in elements: 84 path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":" 85 return path 86 87PATH:prepend = "${@extra_path_elements(d)}" 88 89def get_lic_checksum_file_list(d): 90 filelist = [] 91 lic_files = d.getVar("LIC_FILES_CHKSUM") or '' 92 tmpdir = d.getVar("TMPDIR") 93 s = d.getVar("S") 94 b = d.getVar("B") 95 workdir = d.getVar("WORKDIR") 96 97 urls = lic_files.split() 98 for url in urls: 99 # We only care about items that are absolute paths since 100 # any others should be covered by SRC_URI. 101 try: 102 (method, host, path, user, pswd, parm) = bb.fetch.decodeurl(url) 103 if method != "file" or not path: 104 raise bb.fetch.MalformedUrl(url) 105 106 if path[0] == '/': 107 if path.startswith((tmpdir, s, b, workdir)): 108 continue 109 filelist.append(path + ":" + str(os.path.exists(path))) 110 except bb.fetch.MalformedUrl: 111 bb.fatal(d.getVar('PN') + ": LIC_FILES_CHKSUM contains an invalid URL: " + url) 112 return " ".join(filelist) 113 114def setup_hosttools_dir(dest, toolsvar, d, fatal=True): 115 tools = d.getVar(toolsvar).split() 116 origbbenv = d.getVar("BB_ORIGENV", False) 117 path = origbbenv.getVar("PATH") 118 # Need to ignore our own scripts directories to avoid circular links 119 for p in path.split(":"): 120 if p.endswith("/scripts"): 121 path = path.replace(p, "/ignoreme") 122 bb.utils.mkdirhier(dest) 123 notfound = [] 124 for tool in tools: 125 desttool = os.path.join(dest, tool) 126 if not os.path.exists(desttool): 127 # clean up dead symlink 128 if os.path.islink(desttool): 129 os.unlink(desttool) 130 srctool = bb.utils.which(path, tool, executable=True) 131 # gcc/g++ may link to ccache on some hosts, e.g., 132 # /usr/local/bin/ccache/gcc -> /usr/bin/ccache, then which(gcc) 133 # would return /usr/local/bin/ccache/gcc, but what we need is 134 # /usr/bin/gcc, this code can check and fix that. 135 if os.path.islink(srctool) and os.path.basename(os.readlink(srctool)) == 'ccache': 136 srctool = bb.utils.which(path, tool, executable=True, direction=1) 137 if srctool: 138 os.symlink(srctool, desttool) 139 else: 140 notfound.append(tool) 141 142 if notfound and fatal: 143 bb.fatal("The following required tools (as specified by HOSTTOOLS) appear to be unavailable in PATH, please install them in order to proceed:\n %s" % " ".join(notfound)) 144 145addtask fetch 146do_fetch[dirs] = "${DL_DIR}" 147do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}" 148do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}" 149do_fetch[vardeps] += "SRCREV" 150do_fetch[network] = "1" 151python base_do_fetch() { 152 153 src_uri = (d.getVar('SRC_URI') or "").split() 154 if not src_uri: 155 return 156 157 try: 158 fetcher = bb.fetch2.Fetch(src_uri, d) 159 fetcher.download() 160 except bb.fetch2.BBFetchException as e: 161 bb.fatal("Bitbake Fetcher Error: " + repr(e)) 162} 163 164addtask unpack after do_fetch 165do_unpack[dirs] = "${WORKDIR}" 166 167do_unpack[cleandirs] = "${@d.getVar('S') if os.path.normpath(d.getVar('S')) != os.path.normpath(d.getVar('WORKDIR')) else os.path.join('${S}', 'patches')}" 168 169python base_do_unpack() { 170 src_uri = (d.getVar('SRC_URI') or "").split() 171 if not src_uri: 172 return 173 174 try: 175 fetcher = bb.fetch2.Fetch(src_uri, d) 176 fetcher.unpack(d.getVar('WORKDIR')) 177 except bb.fetch2.BBFetchException as e: 178 bb.fatal("Bitbake Fetcher Error: " + repr(e)) 179} 180 181SSTATETASKS += "do_deploy_source_date_epoch" 182 183do_deploy_source_date_epoch () { 184 mkdir -p ${SDE_DEPLOYDIR} 185 if [ -e ${SDE_FILE} ]; then 186 echo "Deploying SDE from ${SDE_FILE} -> ${SDE_DEPLOYDIR}." 187 cp -p ${SDE_FILE} ${SDE_DEPLOYDIR}/__source_date_epoch.txt 188 else 189 echo "${SDE_FILE} not found!" 190 fi 191} 192 193python do_deploy_source_date_epoch_setscene () { 194 sstate_setscene(d) 195 bb.utils.mkdirhier(d.getVar('SDE_DIR')) 196 sde_file = os.path.join(d.getVar('SDE_DEPLOYDIR'), '__source_date_epoch.txt') 197 if os.path.exists(sde_file): 198 target = d.getVar('SDE_FILE') 199 bb.debug(1, "Moving setscene SDE file %s -> %s" % (sde_file, target)) 200 bb.utils.rename(sde_file, target) 201 else: 202 bb.debug(1, "%s not found!" % sde_file) 203} 204 205do_deploy_source_date_epoch[dirs] = "${SDE_DEPLOYDIR}" 206do_deploy_source_date_epoch[sstate-plaindirs] = "${SDE_DEPLOYDIR}" 207addtask do_deploy_source_date_epoch_setscene 208addtask do_deploy_source_date_epoch before do_configure after do_patch 209 210python create_source_date_epoch_stamp() { 211 # Version: 1 212 source_date_epoch = oe.reproducible.get_source_date_epoch(d, d.getVar('S')) 213 oe.reproducible.epochfile_write(source_date_epoch, d.getVar('SDE_FILE'), d) 214} 215do_unpack[postfuncs] += "create_source_date_epoch_stamp" 216 217def get_source_date_epoch_value(d): 218 return oe.reproducible.epochfile_read(d.getVar('SDE_FILE'), d) 219 220def get_layers_branch_rev(d): 221 layers = (d.getVar("BBLAYERS") or "").split() 222 layers_branch_rev = ["%-20s = \"%s:%s\"" % (os.path.basename(i), \ 223 base_get_metadata_git_branch(i, None).strip(), \ 224 base_get_metadata_git_revision(i, None)) \ 225 for i in layers] 226 i = len(layers_branch_rev)-1 227 p1 = layers_branch_rev[i].find("=") 228 s1 = layers_branch_rev[i][p1:] 229 while i > 0: 230 p2 = layers_branch_rev[i-1].find("=") 231 s2= layers_branch_rev[i-1][p2:] 232 if s1 == s2: 233 layers_branch_rev[i-1] = layers_branch_rev[i-1][0:p2] 234 i -= 1 235 else: 236 i -= 1 237 p1 = layers_branch_rev[i].find("=") 238 s1= layers_branch_rev[i][p1:] 239 return layers_branch_rev 240 241 242BUILDCFG_FUNCS ??= "buildcfg_vars get_layers_branch_rev buildcfg_neededvars" 243BUILDCFG_FUNCS[type] = "list" 244 245def buildcfg_vars(d): 246 statusvars = oe.data.typed_value('BUILDCFG_VARS', d) 247 for var in statusvars: 248 value = d.getVar(var) 249 if value is not None: 250 yield '%-20s = "%s"' % (var, value) 251 252def buildcfg_neededvars(d): 253 needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d) 254 pesteruser = [] 255 for v in needed_vars: 256 val = d.getVar(v) 257 if not val or val == 'INVALID': 258 pesteruser.append(v) 259 260 if pesteruser: 261 bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser)) 262 263addhandler base_eventhandler 264base_eventhandler[eventmask] = "bb.event.ConfigParsed bb.event.MultiConfigParsed bb.event.BuildStarted bb.event.RecipePreFinalise bb.event.RecipeParsed" 265python base_eventhandler() { 266 import bb.runqueue 267 268 if isinstance(e, bb.event.ConfigParsed): 269 if not d.getVar("NATIVELSBSTRING", False): 270 d.setVar("NATIVELSBSTRING", lsb_distro_identifier(d)) 271 d.setVar("ORIGNATIVELSBSTRING", d.getVar("NATIVELSBSTRING", False)) 272 d.setVar('BB_VERSION', bb.__version__) 273 274 # There might be no bb.event.ConfigParsed event if bitbake server is 275 # running, so check bb.event.BuildStarted too to make sure ${HOSTTOOLS_DIR} 276 # exists. 277 if isinstance(e, bb.event.ConfigParsed) or \ 278 (isinstance(e, bb.event.BuildStarted) and not os.path.exists(d.getVar('HOSTTOOLS_DIR'))): 279 # Works with the line in layer.conf which changes PATH to point here 280 setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS', d) 281 setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS_NONFATAL', d, fatal=False) 282 283 if isinstance(e, bb.event.MultiConfigParsed): 284 # We need to expand SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS in each of the multiconfig data stores 285 # own contexts so the variables get expanded correctly for that arch, then inject back into 286 # the main data store. 287 deps = [] 288 for config in e.mcdata: 289 deps.append(e.mcdata[config].getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS")) 290 deps = " ".join(deps) 291 e.mcdata[''].setVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", deps) 292 293 if isinstance(e, bb.event.BuildStarted): 294 localdata = bb.data.createCopy(d) 295 statuslines = [] 296 for func in oe.data.typed_value('BUILDCFG_FUNCS', localdata): 297 g = globals() 298 if func not in g: 299 bb.warn("Build configuration function '%s' does not exist" % func) 300 else: 301 flines = g[func](localdata) 302 if flines: 303 statuslines.extend(flines) 304 305 statusheader = d.getVar('BUILDCFG_HEADER') 306 if statusheader: 307 bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines))) 308 309 # This code is to silence warnings where the SDK variables overwrite the 310 # target ones and we'd see dulpicate key names overwriting each other 311 # for various PREFERRED_PROVIDERS 312 if isinstance(e, bb.event.RecipePreFinalise): 313 if d.getVar("TARGET_PREFIX") == d.getVar("SDK_PREFIX"): 314 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}binutils") 315 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc") 316 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}g++") 317 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}compilerlibs") 318 319 if isinstance(e, bb.event.RecipeParsed): 320 # 321 # If we have multiple providers of virtual/X and a PREFERRED_PROVIDER_virtual/X is set 322 # skip parsing for all the other providers which will mean they get uninstalled from the 323 # sysroot since they're now "unreachable". This makes switching virtual/kernel work in 324 # particular. 325 # 326 pn = d.getVar('PN') 327 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False) 328 if not source_mirror_fetch: 329 provs = (d.getVar("PROVIDES") or "").split() 330 multiprovidersallowed = (d.getVar("BB_MULTI_PROVIDER_ALLOWED") or "").split() 331 for p in provs: 332 if p.startswith("virtual/") and p not in multiprovidersallowed: 333 profprov = d.getVar("PREFERRED_PROVIDER_" + p) 334 if profprov and pn != profprov: 335 raise bb.parse.SkipRecipe("PREFERRED_PROVIDER_%s set to %s, not %s" % (p, profprov, pn)) 336} 337 338CONFIGURESTAMPFILE = "${WORKDIR}/configure.sstate" 339CLEANBROKEN = "0" 340 341addtask configure after do_patch 342do_configure[dirs] = "${B}" 343base_do_configure() { 344 if [ -n "${CONFIGURESTAMPFILE}" -a -e "${CONFIGURESTAMPFILE}" ]; then 345 if [ "`cat ${CONFIGURESTAMPFILE}`" != "${BB_TASKHASH}" ]; then 346 cd ${B} 347 if [ "${CLEANBROKEN}" != "1" -a \( -e Makefile -o -e makefile -o -e GNUmakefile \) ]; then 348 oe_runmake clean 349 fi 350 # -ignore_readdir_race does not work correctly with -delete; 351 # use xargs to avoid spurious build failures 352 find ${B} -ignore_readdir_race -name \*.la -type f -print0 | xargs -0 rm -f 353 fi 354 fi 355 if [ -n "${CONFIGURESTAMPFILE}" ]; then 356 mkdir -p `dirname ${CONFIGURESTAMPFILE}` 357 echo ${BB_TASKHASH} > ${CONFIGURESTAMPFILE} 358 fi 359} 360 361addtask compile after do_configure 362do_compile[dirs] = "${B}" 363base_do_compile() { 364 if [ -e Makefile -o -e makefile -o -e GNUmakefile ]; then 365 oe_runmake || die "make failed" 366 else 367 bbnote "nothing to compile" 368 fi 369} 370 371addtask install after do_compile 372do_install[dirs] = "${B}" 373# Remove and re-create ${D} so that is it guaranteed to be empty 374do_install[cleandirs] = "${D}" 375 376base_do_install() { 377 : 378} 379 380base_do_package() { 381 : 382} 383 384addtask build after do_populate_sysroot 385do_build[noexec] = "1" 386do_build[recrdeptask] += "do_deploy" 387do_build () { 388 : 389} 390 391def set_packagetriplet(d): 392 archs = [] 393 tos = [] 394 tvs = [] 395 396 archs.append(d.getVar("PACKAGE_ARCHS").split()) 397 tos.append(d.getVar("TARGET_OS")) 398 tvs.append(d.getVar("TARGET_VENDOR")) 399 400 def settriplet(d, varname, archs, tos, tvs): 401 triplets = [] 402 for i in range(len(archs)): 403 for arch in archs[i]: 404 triplets.append(arch + tvs[i] + "-" + tos[i]) 405 triplets.reverse() 406 d.setVar(varname, " ".join(triplets)) 407 408 settriplet(d, "PKGTRIPLETS", archs, tos, tvs) 409 410 variants = d.getVar("MULTILIB_VARIANTS") or "" 411 for item in variants.split(): 412 localdata = bb.data.createCopy(d) 413 overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item 414 localdata.setVar("OVERRIDES", overrides) 415 416 archs.append(localdata.getVar("PACKAGE_ARCHS").split()) 417 tos.append(localdata.getVar("TARGET_OS")) 418 tvs.append(localdata.getVar("TARGET_VENDOR")) 419 420 settriplet(d, "PKGMLTRIPLETS", archs, tos, tvs) 421 422python () { 423 import string, re 424 425 # Handle backfilling 426 oe.utils.features_backfill("DISTRO_FEATURES", d) 427 oe.utils.features_backfill("MACHINE_FEATURES", d) 428 429 if d.getVar("S")[-1] == '/': 430 bb.warn("Recipe %s sets S variable with trailing slash '%s', remove it" % (d.getVar("PN"), d.getVar("S"))) 431 if d.getVar("B")[-1] == '/': 432 bb.warn("Recipe %s sets B variable with trailing slash '%s', remove it" % (d.getVar("PN"), d.getVar("B"))) 433 434 if os.path.normpath(d.getVar("WORKDIR")) != os.path.normpath(d.getVar("S")): 435 d.appendVar("PSEUDO_IGNORE_PATHS", ",${S}") 436 if os.path.normpath(d.getVar("WORKDIR")) != os.path.normpath(d.getVar("B")): 437 d.appendVar("PSEUDO_IGNORE_PATHS", ",${B}") 438 439 # To add a recipe to the skip list , set: 440 # SKIP_RECIPE[pn] = "message" 441 pn = d.getVar('PN') 442 skip_msg = d.getVarFlag('SKIP_RECIPE', pn) 443 if skip_msg: 444 bb.debug(1, "Skipping %s %s" % (pn, skip_msg)) 445 raise bb.parse.SkipRecipe("Recipe will be skipped because: %s" % (skip_msg)) 446 447 # Handle PACKAGECONFIG 448 # 449 # These take the form: 450 # 451 # PACKAGECONFIG ??= "<default options>" 452 # PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends,foo_runtime_recommends,foo_conflict_packageconfig" 453 pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {} 454 if pkgconfigflags: 455 pkgconfig = (d.getVar('PACKAGECONFIG') or "").split() 456 pn = d.getVar("PN") 457 458 mlprefix = d.getVar("MLPREFIX") 459 460 def expandFilter(appends, extension, prefix): 461 appends = bb.utils.explode_deps(d.expand(" ".join(appends))) 462 newappends = [] 463 for a in appends: 464 if a.endswith("-native") or ("-cross-" in a): 465 newappends.append(a) 466 elif a.startswith("virtual/"): 467 subs = a.split("/", 1)[1] 468 if subs.startswith(prefix): 469 newappends.append(a + extension) 470 else: 471 newappends.append("virtual/" + prefix + subs + extension) 472 else: 473 if a.startswith(prefix): 474 newappends.append(a + extension) 475 else: 476 newappends.append(prefix + a + extension) 477 return newappends 478 479 def appendVar(varname, appends): 480 if not appends: 481 return 482 if varname.find("DEPENDS") != -1: 483 if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d) : 484 appends = expandFilter(appends, "", "nativesdk-") 485 elif bb.data.inherits_class('native', d): 486 appends = expandFilter(appends, "-native", "") 487 elif mlprefix: 488 appends = expandFilter(appends, "", mlprefix) 489 varname = d.expand(varname) 490 d.appendVar(varname, " " + " ".join(appends)) 491 492 extradeps = [] 493 extrardeps = [] 494 extrarrecs = [] 495 extraconf = [] 496 for flag, flagval in sorted(pkgconfigflags.items()): 497 items = flagval.split(",") 498 num = len(items) 499 if num > 6: 500 bb.error("%s: PACKAGECONFIG[%s] Only enable,disable,depend,rdepend,rrecommend,conflict_packageconfig can be specified!" 501 % (d.getVar('PN'), flag)) 502 503 if flag in pkgconfig: 504 if num >= 3 and items[2]: 505 extradeps.append(items[2]) 506 if num >= 4 and items[3]: 507 extrardeps.append(items[3]) 508 if num >= 5 and items[4]: 509 extrarrecs.append(items[4]) 510 if num >= 1 and items[0]: 511 extraconf.append(items[0]) 512 elif num >= 2 and items[1]: 513 extraconf.append(items[1]) 514 515 if num >= 6 and items[5]: 516 conflicts = set(items[5].split()) 517 invalid = conflicts.difference(set(pkgconfigflags.keys())) 518 if invalid: 519 bb.error("%s: PACKAGECONFIG[%s] Invalid conflict package config%s '%s' specified." 520 % (d.getVar('PN'), flag, 's' if len(invalid) > 1 else '', ' '.join(invalid))) 521 522 if flag in pkgconfig: 523 intersec = conflicts.intersection(set(pkgconfig)) 524 if intersec: 525 bb.fatal("%s: PACKAGECONFIG[%s] Conflict package config%s '%s' set in PACKAGECONFIG." 526 % (d.getVar('PN'), flag, 's' if len(intersec) > 1 else '', ' '.join(intersec))) 527 528 appendVar('DEPENDS', extradeps) 529 appendVar('RDEPENDS:${PN}', extrardeps) 530 appendVar('RRECOMMENDS:${PN}', extrarrecs) 531 appendVar('PACKAGECONFIG_CONFARGS', extraconf) 532 533 pn = d.getVar('PN') 534 license = d.getVar('LICENSE') 535 if license == "INVALID" and pn != "defaultpkgname": 536 bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn) 537 538 if bb.data.inherits_class('license', d): 539 check_license_format(d) 540 unmatched_license_flags = check_license_flags(d) 541 if unmatched_license_flags: 542 if len(unmatched_license_flags) == 1: 543 message = "because it has a restricted license '{0}'. Which is not listed in LICENSE_FLAGS_ACCEPTED".format(unmatched_license_flags[0]) 544 else: 545 message = "because it has restricted licenses {0}. Which are not listed in LICENSE_FLAGS_ACCEPTED".format( 546 ", ".join("'{0}'".format(f) for f in unmatched_license_flags)) 547 bb.debug(1, "Skipping %s %s" % (pn, message)) 548 raise bb.parse.SkipRecipe(message) 549 550 # If we're building a target package we need to use fakeroot (pseudo) 551 # in order to capture permissions, owners, groups and special files 552 if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('cross', d): 553 d.appendVarFlag('do_prepare_recipe_sysroot', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') 554 d.appendVarFlag('do_install', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') 555 d.setVarFlag('do_install', 'fakeroot', '1') 556 d.appendVarFlag('do_package', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') 557 d.setVarFlag('do_package', 'fakeroot', '1') 558 d.setVarFlag('do_package_setscene', 'fakeroot', '1') 559 d.appendVarFlag('do_package_setscene', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') 560 d.setVarFlag('do_devshell', 'fakeroot', '1') 561 d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') 562 563 need_machine = d.getVar('COMPATIBLE_MACHINE') 564 if need_machine and not d.getVar('PARSE_ALL_RECIPES', False): 565 import re 566 compat_machines = (d.getVar('MACHINEOVERRIDES') or "").split(":") 567 for m in compat_machines: 568 if re.match(need_machine, m): 569 break 570 else: 571 raise bb.parse.SkipRecipe("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE')) 572 573 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False) or d.getVar('PARSE_ALL_RECIPES', False) 574 if not source_mirror_fetch: 575 need_host = d.getVar('COMPATIBLE_HOST') 576 if need_host: 577 import re 578 this_host = d.getVar('HOST_SYS') 579 if not re.match(need_host, this_host): 580 raise bb.parse.SkipRecipe("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host) 581 582 bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split() 583 584 check_license = False if pn.startswith("nativesdk-") else True 585 for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}", 586 "-crosssdk-${SDK_SYS}", "-crosssdk-initial-${SDK_SYS}", 587 "-cross-canadian-${TRANSLATED_TARGET_ARCH}"]: 588 if pn.endswith(d.expand(t)): 589 check_license = False 590 if pn.startswith("gcc-source-"): 591 check_license = False 592 593 if check_license and bad_licenses: 594 bad_licenses = expand_wildcard_licenses(d, bad_licenses) 595 596 exceptions = (d.getVar("INCOMPATIBLE_LICENSE_EXCEPTIONS") or "").split() 597 598 for lic_exception in exceptions: 599 if ":" in lic_exception: 600 lic_exception = lic_exception.split(":")[1] 601 if lic_exception in oe.license.obsolete_license_list(): 602 bb.fatal("Obsolete license %s used in INCOMPATIBLE_LICENSE_EXCEPTIONS" % lic_exception) 603 604 pkgs = d.getVar('PACKAGES').split() 605 skipped_pkgs = {} 606 unskipped_pkgs = [] 607 for pkg in pkgs: 608 remaining_bad_licenses = oe.license.apply_pkg_license_exception(pkg, bad_licenses, exceptions) 609 610 incompatible_lic = incompatible_license(d, remaining_bad_licenses, pkg) 611 if incompatible_lic: 612 skipped_pkgs[pkg] = incompatible_lic 613 else: 614 unskipped_pkgs.append(pkg) 615 616 if unskipped_pkgs: 617 for pkg in skipped_pkgs: 618 bb.debug(1, "Skipping the package %s at do_rootfs because of incompatible license(s): %s" % (pkg, ' '.join(skipped_pkgs[pkg]))) 619 d.setVar('_exclude_incompatible-' + pkg, ' '.join(skipped_pkgs[pkg])) 620 for pkg in unskipped_pkgs: 621 bb.debug(1, "Including the package %s" % pkg) 622 else: 623 incompatible_lic = incompatible_license(d, bad_licenses) 624 for pkg in skipped_pkgs: 625 incompatible_lic += skipped_pkgs[pkg] 626 incompatible_lic = sorted(list(set(incompatible_lic))) 627 628 if incompatible_lic: 629 bb.debug(1, "Skipping recipe %s because of incompatible license(s): %s" % (pn, ' '.join(incompatible_lic))) 630 raise bb.parse.SkipRecipe("it has incompatible license(s): %s" % ' '.join(incompatible_lic)) 631 632 needsrcrev = False 633 srcuri = d.getVar('SRC_URI') 634 for uri_string in srcuri.split(): 635 uri = bb.fetch.URI(uri_string) 636 # Also check downloadfilename as the URL path might not be useful for sniffing 637 path = uri.params.get("downloadfilename", uri.path) 638 639 # HTTP/FTP use the wget fetcher 640 if uri.scheme in ("http", "https", "ftp"): 641 d.appendVarFlag('do_fetch', 'depends', ' wget-native:do_populate_sysroot') 642 643 # Svn packages should DEPEND on subversion-native 644 if uri.scheme == "svn": 645 needsrcrev = True 646 d.appendVarFlag('do_fetch', 'depends', ' subversion-native:do_populate_sysroot') 647 648 # Git packages should DEPEND on git-native 649 elif uri.scheme in ("git", "gitsm"): 650 needsrcrev = True 651 d.appendVarFlag('do_fetch', 'depends', ' git-native:do_populate_sysroot') 652 653 # Mercurial packages should DEPEND on mercurial-native 654 elif uri.scheme == "hg": 655 needsrcrev = True 656 d.appendVar("EXTRANATIVEPATH", ' python3-native ') 657 d.appendVarFlag('do_fetch', 'depends', ' mercurial-native:do_populate_sysroot') 658 659 # Perforce packages support SRCREV = "${AUTOREV}" 660 elif uri.scheme == "p4": 661 needsrcrev = True 662 663 # OSC packages should DEPEND on osc-native 664 elif uri.scheme == "osc": 665 d.appendVarFlag('do_fetch', 'depends', ' osc-native:do_populate_sysroot') 666 667 elif uri.scheme == "npm": 668 d.appendVarFlag('do_fetch', 'depends', ' nodejs-native:do_populate_sysroot') 669 670 elif uri.scheme == "repo": 671 needsrcrev = True 672 d.appendVarFlag('do_fetch', 'depends', ' repo-native:do_populate_sysroot') 673 674 # *.lz4 should DEPEND on lz4-native for unpacking 675 if path.endswith('.lz4'): 676 d.appendVarFlag('do_unpack', 'depends', ' lz4-native:do_populate_sysroot') 677 678 # *.zst should DEPEND on zstd-native for unpacking 679 elif path.endswith('.zst'): 680 d.appendVarFlag('do_unpack', 'depends', ' zstd-native:do_populate_sysroot') 681 682 # *.lz should DEPEND on lzip-native for unpacking 683 elif path.endswith('.lz'): 684 d.appendVarFlag('do_unpack', 'depends', ' lzip-native:do_populate_sysroot') 685 686 # *.xz should DEPEND on xz-native for unpacking 687 elif path.endswith('.xz') or path.endswith('.txz'): 688 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot') 689 690 # .zip should DEPEND on unzip-native for unpacking 691 elif path.endswith('.zip') or path.endswith('.jar'): 692 d.appendVarFlag('do_unpack', 'depends', ' unzip-native:do_populate_sysroot') 693 694 # Some rpm files may be compressed internally using xz (for example, rpms from Fedora) 695 elif path.endswith('.rpm'): 696 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot') 697 698 # *.deb should DEPEND on xz-native for unpacking 699 elif path.endswith('.deb'): 700 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot') 701 702 if needsrcrev: 703 d.setVar("SRCPV", "${@bb.fetch2.get_srcrev(d)}") 704 705 # Gather all named SRCREVs to add to the sstate hash calculation 706 # This anonymous python snippet is called multiple times so we 707 # need to be careful to not double up the appends here and cause 708 # the base hash to mismatch the task hash 709 for uri in srcuri.split(): 710 parm = bb.fetch.decodeurl(uri)[5] 711 uri_names = parm.get("name", "").split(",") 712 for uri_name in filter(None, uri_names): 713 srcrev_name = "SRCREV_{}".format(uri_name) 714 if srcrev_name not in (d.getVarFlag("do_fetch", "vardeps") or "").split(): 715 d.appendVarFlag("do_fetch", "vardeps", " {}".format(srcrev_name)) 716 717 set_packagetriplet(d) 718 719 # 'multimachine' handling 720 mach_arch = d.getVar('MACHINE_ARCH') 721 pkg_arch = d.getVar('PACKAGE_ARCH') 722 723 if (pkg_arch == mach_arch): 724 # Already machine specific - nothing further to do 725 return 726 727 # 728 # We always try to scan SRC_URI for urls with machine overrides 729 # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0 730 # 731 override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH') 732 if override != '0': 733 paths = [] 734 fpaths = (d.getVar('FILESPATH') or '').split(':') 735 machine = d.getVar('MACHINE') 736 for p in fpaths: 737 if os.path.basename(p) == machine and os.path.isdir(p): 738 paths.append(p) 739 740 if paths: 741 for s in srcuri.split(): 742 if not s.startswith("file://"): 743 continue 744 fetcher = bb.fetch2.Fetch([s], d) 745 local = fetcher.localpath(s) 746 for mp in paths: 747 if local.startswith(mp): 748 #bb.note("overriding PACKAGE_ARCH from %s to %s for %s" % (pkg_arch, mach_arch, pn)) 749 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}") 750 return 751 752 packages = d.getVar('PACKAGES').split() 753 for pkg in packages: 754 pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg) 755 756 # We could look for != PACKAGE_ARCH here but how to choose 757 # if multiple differences are present? 758 # Look through PACKAGE_ARCHS for the priority order? 759 if pkgarch and pkgarch == mach_arch: 760 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}") 761 bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN")) 762} 763 764addtask cleansstate after do_clean 765python do_cleansstate() { 766 sstate_clean_cachefiles(d) 767} 768addtask cleanall after do_cleansstate 769do_cleansstate[nostamp] = "1" 770 771python do_cleanall() { 772 src_uri = (d.getVar('SRC_URI') or "").split() 773 if not src_uri: 774 return 775 776 try: 777 fetcher = bb.fetch2.Fetch(src_uri, d) 778 fetcher.clean() 779 except bb.fetch2.BBFetchException as e: 780 bb.fatal(str(e)) 781} 782do_cleanall[nostamp] = "1" 783 784 785EXPORT_FUNCTIONS do_fetch do_unpack do_configure do_compile do_install do_package 786