1*4882a593SmuzhiyunBB_DEFAULT_TASK ?= "build" 2*4882a593SmuzhiyunCLASSOVERRIDE ?= "class-target" 3*4882a593Smuzhiyun 4*4882a593Smuzhiyuninherit patch 5*4882a593Smuzhiyuninherit staging 6*4882a593Smuzhiyun 7*4882a593Smuzhiyuninherit mirrors 8*4882a593Smuzhiyuninherit utils 9*4882a593Smuzhiyuninherit utility-tasks 10*4882a593Smuzhiyuninherit metadata_scm 11*4882a593Smuzhiyuninherit logging 12*4882a593Smuzhiyun 13*4882a593SmuzhiyunOE_EXTRA_IMPORTS ?= "" 14*4882a593Smuzhiyun 15*4882a593SmuzhiyunOE_IMPORTS += "os sys time oe.path oe.utils oe.types oe.package oe.packagegroup oe.sstatesig oe.lsb oe.cachedpath oe.license oe.qa oe.reproducible oe.rust ${OE_EXTRA_IMPORTS}" 16*4882a593SmuzhiyunOE_IMPORTS[type] = "list" 17*4882a593Smuzhiyun 18*4882a593SmuzhiyunPACKAGECONFIG_CONFARGS ??= "" 19*4882a593Smuzhiyun 20*4882a593Smuzhiyundef oe_import(d): 21*4882a593Smuzhiyun import sys 22*4882a593Smuzhiyun 23*4882a593Smuzhiyun bbpath = [os.path.join(dir, "lib") for dir in d.getVar("BBPATH").split(":")] 24*4882a593Smuzhiyun sys.path[0:0] = [dir for dir in bbpath if dir not in sys.path] 25*4882a593Smuzhiyun 26*4882a593Smuzhiyun import oe.data 27*4882a593Smuzhiyun for toimport in oe.data.typed_value("OE_IMPORTS", d): 28*4882a593Smuzhiyun try: 29*4882a593Smuzhiyun # Make a python object accessible from the metadata 30*4882a593Smuzhiyun bb.utils._context[toimport.split(".", 1)[0]] = __import__(toimport) 31*4882a593Smuzhiyun except AttributeError as e: 32*4882a593Smuzhiyun bb.error("Error importing OE modules: %s" % str(e)) 33*4882a593Smuzhiyun return "" 34*4882a593Smuzhiyun 35*4882a593Smuzhiyun# We need the oe module name space early (before INHERITs get added) 36*4882a593SmuzhiyunOE_IMPORTED := "${@oe_import(d)}" 37*4882a593Smuzhiyun 38*4882a593Smuzhiyundef lsb_distro_identifier(d): 39*4882a593Smuzhiyun adjust = d.getVar('LSB_DISTRO_ADJUST') 40*4882a593Smuzhiyun adjust_func = None 41*4882a593Smuzhiyun if adjust: 42*4882a593Smuzhiyun try: 43*4882a593Smuzhiyun adjust_func = globals()[adjust] 44*4882a593Smuzhiyun except KeyError: 45*4882a593Smuzhiyun pass 46*4882a593Smuzhiyun return oe.lsb.distro_identifier(adjust_func) 47*4882a593Smuzhiyun 48*4882a593Smuzhiyundie() { 49*4882a593Smuzhiyun bbfatal_log "$*" 50*4882a593Smuzhiyun} 51*4882a593Smuzhiyun 52*4882a593Smuzhiyunoe_runmake_call() { 53*4882a593Smuzhiyun bbnote ${MAKE} ${EXTRA_OEMAKE} "$@" 54*4882a593Smuzhiyun ${MAKE} ${EXTRA_OEMAKE} "$@" 55*4882a593Smuzhiyun} 56*4882a593Smuzhiyun 57*4882a593Smuzhiyunoe_runmake() { 58*4882a593Smuzhiyun oe_runmake_call "$@" || die "oe_runmake failed" 59*4882a593Smuzhiyun} 60*4882a593Smuzhiyun 61*4882a593Smuzhiyun 62*4882a593Smuzhiyundef get_base_dep(d): 63*4882a593Smuzhiyun if d.getVar('INHIBIT_DEFAULT_DEPS', False): 64*4882a593Smuzhiyun return "" 65*4882a593Smuzhiyun return "${BASE_DEFAULT_DEPS}" 66*4882a593Smuzhiyun 67*4882a593SmuzhiyunBASE_DEFAULT_DEPS = "virtual/${HOST_PREFIX}gcc virtual/${HOST_PREFIX}compilerlibs virtual/libc" 68*4882a593Smuzhiyun 69*4882a593SmuzhiyunBASEDEPENDS = "" 70*4882a593SmuzhiyunBASEDEPENDS:class-target = "${@get_base_dep(d)}" 71*4882a593SmuzhiyunBASEDEPENDS:class-nativesdk = "${@get_base_dep(d)}" 72*4882a593Smuzhiyun 73*4882a593SmuzhiyunDEPENDS:prepend="${BASEDEPENDS} " 74*4882a593Smuzhiyun 75*4882a593SmuzhiyunFILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}" 76*4882a593Smuzhiyun# THISDIR only works properly with imediate expansion as it has to run 77*4882a593Smuzhiyun# in the context of the location its used (:=) 78*4882a593SmuzhiyunTHISDIR = "${@os.path.dirname(d.getVar('FILE'))}" 79*4882a593Smuzhiyun 80*4882a593Smuzhiyundef extra_path_elements(d): 81*4882a593Smuzhiyun path = "" 82*4882a593Smuzhiyun elements = (d.getVar('EXTRANATIVEPATH') or "").split() 83*4882a593Smuzhiyun for e in elements: 84*4882a593Smuzhiyun path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":" 85*4882a593Smuzhiyun return path 86*4882a593Smuzhiyun 87*4882a593SmuzhiyunPATH:prepend = "${@extra_path_elements(d)}" 88*4882a593Smuzhiyun 89*4882a593Smuzhiyundef get_lic_checksum_file_list(d): 90*4882a593Smuzhiyun filelist = [] 91*4882a593Smuzhiyun lic_files = d.getVar("LIC_FILES_CHKSUM") or '' 92*4882a593Smuzhiyun tmpdir = d.getVar("TMPDIR") 93*4882a593Smuzhiyun s = d.getVar("S") 94*4882a593Smuzhiyun b = d.getVar("B") 95*4882a593Smuzhiyun workdir = d.getVar("WORKDIR") 96*4882a593Smuzhiyun 97*4882a593Smuzhiyun urls = lic_files.split() 98*4882a593Smuzhiyun for url in urls: 99*4882a593Smuzhiyun # We only care about items that are absolute paths since 100*4882a593Smuzhiyun # any others should be covered by SRC_URI. 101*4882a593Smuzhiyun try: 102*4882a593Smuzhiyun (method, host, path, user, pswd, parm) = bb.fetch.decodeurl(url) 103*4882a593Smuzhiyun if method != "file" or not path: 104*4882a593Smuzhiyun raise bb.fetch.MalformedUrl(url) 105*4882a593Smuzhiyun 106*4882a593Smuzhiyun if path[0] == '/': 107*4882a593Smuzhiyun if path.startswith((tmpdir, s, b, workdir)): 108*4882a593Smuzhiyun continue 109*4882a593Smuzhiyun filelist.append(path + ":" + str(os.path.exists(path))) 110*4882a593Smuzhiyun except bb.fetch.MalformedUrl: 111*4882a593Smuzhiyun bb.fatal(d.getVar('PN') + ": LIC_FILES_CHKSUM contains an invalid URL: " + url) 112*4882a593Smuzhiyun return " ".join(filelist) 113*4882a593Smuzhiyun 114*4882a593Smuzhiyundef setup_hosttools_dir(dest, toolsvar, d, fatal=True): 115*4882a593Smuzhiyun tools = d.getVar(toolsvar).split() 116*4882a593Smuzhiyun origbbenv = d.getVar("BB_ORIGENV", False) 117*4882a593Smuzhiyun path = origbbenv.getVar("PATH") 118*4882a593Smuzhiyun # Need to ignore our own scripts directories to avoid circular links 119*4882a593Smuzhiyun for p in path.split(":"): 120*4882a593Smuzhiyun if p.endswith("/scripts"): 121*4882a593Smuzhiyun path = path.replace(p, "/ignoreme") 122*4882a593Smuzhiyun bb.utils.mkdirhier(dest) 123*4882a593Smuzhiyun notfound = [] 124*4882a593Smuzhiyun for tool in tools: 125*4882a593Smuzhiyun desttool = os.path.join(dest, tool) 126*4882a593Smuzhiyun if not os.path.exists(desttool): 127*4882a593Smuzhiyun # clean up dead symlink 128*4882a593Smuzhiyun if os.path.islink(desttool): 129*4882a593Smuzhiyun os.unlink(desttool) 130*4882a593Smuzhiyun srctool = bb.utils.which(path, tool, executable=True) 131*4882a593Smuzhiyun # gcc/g++ may link to ccache on some hosts, e.g., 132*4882a593Smuzhiyun # /usr/local/bin/ccache/gcc -> /usr/bin/ccache, then which(gcc) 133*4882a593Smuzhiyun # would return /usr/local/bin/ccache/gcc, but what we need is 134*4882a593Smuzhiyun # /usr/bin/gcc, this code can check and fix that. 135*4882a593Smuzhiyun if os.path.islink(srctool) and os.path.basename(os.readlink(srctool)) == 'ccache': 136*4882a593Smuzhiyun srctool = bb.utils.which(path, tool, executable=True, direction=1) 137*4882a593Smuzhiyun if srctool: 138*4882a593Smuzhiyun os.symlink(srctool, desttool) 139*4882a593Smuzhiyun else: 140*4882a593Smuzhiyun notfound.append(tool) 141*4882a593Smuzhiyun 142*4882a593Smuzhiyun if notfound and fatal: 143*4882a593Smuzhiyun bb.fatal("The following required tools (as specified by HOSTTOOLS) appear to be unavailable in PATH, please install them in order to proceed:\n %s" % " ".join(notfound)) 144*4882a593Smuzhiyun 145*4882a593Smuzhiyunaddtask fetch 146*4882a593Smuzhiyundo_fetch[dirs] = "${DL_DIR}" 147*4882a593Smuzhiyundo_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}" 148*4882a593Smuzhiyundo_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}" 149*4882a593Smuzhiyundo_fetch[vardeps] += "SRCREV" 150*4882a593Smuzhiyundo_fetch[network] = "1" 151*4882a593Smuzhiyunpython base_do_fetch() { 152*4882a593Smuzhiyun 153*4882a593Smuzhiyun src_uri = (d.getVar('SRC_URI') or "").split() 154*4882a593Smuzhiyun if not src_uri: 155*4882a593Smuzhiyun return 156*4882a593Smuzhiyun 157*4882a593Smuzhiyun try: 158*4882a593Smuzhiyun fetcher = bb.fetch2.Fetch(src_uri, d) 159*4882a593Smuzhiyun fetcher.download() 160*4882a593Smuzhiyun except bb.fetch2.BBFetchException as e: 161*4882a593Smuzhiyun bb.fatal("Bitbake Fetcher Error: " + repr(e)) 162*4882a593Smuzhiyun} 163*4882a593Smuzhiyun 164*4882a593Smuzhiyunaddtask unpack after do_fetch 165*4882a593Smuzhiyundo_unpack[dirs] = "${WORKDIR}" 166*4882a593Smuzhiyun 167*4882a593Smuzhiyundo_unpack[cleandirs] = "${@d.getVar('S') if os.path.normpath(d.getVar('S')) != os.path.normpath(d.getVar('WORKDIR')) else os.path.join('${S}', 'patches')}" 168*4882a593Smuzhiyun 169*4882a593Smuzhiyunpython base_do_unpack() { 170*4882a593Smuzhiyun src_uri = (d.getVar('SRC_URI') or "").split() 171*4882a593Smuzhiyun if not src_uri: 172*4882a593Smuzhiyun return 173*4882a593Smuzhiyun 174*4882a593Smuzhiyun try: 175*4882a593Smuzhiyun fetcher = bb.fetch2.Fetch(src_uri, d) 176*4882a593Smuzhiyun fetcher.unpack(d.getVar('WORKDIR')) 177*4882a593Smuzhiyun except bb.fetch2.BBFetchException as e: 178*4882a593Smuzhiyun bb.fatal("Bitbake Fetcher Error: " + repr(e)) 179*4882a593Smuzhiyun} 180*4882a593Smuzhiyun 181*4882a593SmuzhiyunSSTATETASKS += "do_deploy_source_date_epoch" 182*4882a593Smuzhiyun 183*4882a593Smuzhiyundo_deploy_source_date_epoch () { 184*4882a593Smuzhiyun mkdir -p ${SDE_DEPLOYDIR} 185*4882a593Smuzhiyun if [ -e ${SDE_FILE} ]; then 186*4882a593Smuzhiyun echo "Deploying SDE from ${SDE_FILE} -> ${SDE_DEPLOYDIR}." 187*4882a593Smuzhiyun cp -p ${SDE_FILE} ${SDE_DEPLOYDIR}/__source_date_epoch.txt 188*4882a593Smuzhiyun else 189*4882a593Smuzhiyun echo "${SDE_FILE} not found!" 190*4882a593Smuzhiyun fi 191*4882a593Smuzhiyun} 192*4882a593Smuzhiyun 193*4882a593Smuzhiyunpython do_deploy_source_date_epoch_setscene () { 194*4882a593Smuzhiyun sstate_setscene(d) 195*4882a593Smuzhiyun bb.utils.mkdirhier(d.getVar('SDE_DIR')) 196*4882a593Smuzhiyun sde_file = os.path.join(d.getVar('SDE_DEPLOYDIR'), '__source_date_epoch.txt') 197*4882a593Smuzhiyun if os.path.exists(sde_file): 198*4882a593Smuzhiyun target = d.getVar('SDE_FILE') 199*4882a593Smuzhiyun bb.debug(1, "Moving setscene SDE file %s -> %s" % (sde_file, target)) 200*4882a593Smuzhiyun bb.utils.rename(sde_file, target) 201*4882a593Smuzhiyun else: 202*4882a593Smuzhiyun bb.debug(1, "%s not found!" % sde_file) 203*4882a593Smuzhiyun} 204*4882a593Smuzhiyun 205*4882a593Smuzhiyundo_deploy_source_date_epoch[dirs] = "${SDE_DEPLOYDIR}" 206*4882a593Smuzhiyundo_deploy_source_date_epoch[sstate-plaindirs] = "${SDE_DEPLOYDIR}" 207*4882a593Smuzhiyunaddtask do_deploy_source_date_epoch_setscene 208*4882a593Smuzhiyunaddtask do_deploy_source_date_epoch before do_configure after do_patch 209*4882a593Smuzhiyun 210*4882a593Smuzhiyunpython create_source_date_epoch_stamp() { 211*4882a593Smuzhiyun # Version: 1 212*4882a593Smuzhiyun source_date_epoch = oe.reproducible.get_source_date_epoch(d, d.getVar('S')) 213*4882a593Smuzhiyun oe.reproducible.epochfile_write(source_date_epoch, d.getVar('SDE_FILE'), d) 214*4882a593Smuzhiyun} 215*4882a593Smuzhiyundo_unpack[postfuncs] += "create_source_date_epoch_stamp" 216*4882a593Smuzhiyun 217*4882a593Smuzhiyundef get_source_date_epoch_value(d): 218*4882a593Smuzhiyun return oe.reproducible.epochfile_read(d.getVar('SDE_FILE'), d) 219*4882a593Smuzhiyun 220*4882a593Smuzhiyundef get_layers_branch_rev(d): 221*4882a593Smuzhiyun layers = (d.getVar("BBLAYERS") or "").split() 222*4882a593Smuzhiyun layers_branch_rev = ["%-20s = \"%s:%s\"" % (os.path.basename(i), \ 223*4882a593Smuzhiyun base_get_metadata_git_branch(i, None).strip(), \ 224*4882a593Smuzhiyun base_get_metadata_git_revision(i, None)) \ 225*4882a593Smuzhiyun for i in layers] 226*4882a593Smuzhiyun i = len(layers_branch_rev)-1 227*4882a593Smuzhiyun p1 = layers_branch_rev[i].find("=") 228*4882a593Smuzhiyun s1 = layers_branch_rev[i][p1:] 229*4882a593Smuzhiyun while i > 0: 230*4882a593Smuzhiyun p2 = layers_branch_rev[i-1].find("=") 231*4882a593Smuzhiyun s2= layers_branch_rev[i-1][p2:] 232*4882a593Smuzhiyun if s1 == s2: 233*4882a593Smuzhiyun layers_branch_rev[i-1] = layers_branch_rev[i-1][0:p2] 234*4882a593Smuzhiyun i -= 1 235*4882a593Smuzhiyun else: 236*4882a593Smuzhiyun i -= 1 237*4882a593Smuzhiyun p1 = layers_branch_rev[i].find("=") 238*4882a593Smuzhiyun s1= layers_branch_rev[i][p1:] 239*4882a593Smuzhiyun return layers_branch_rev 240*4882a593Smuzhiyun 241*4882a593Smuzhiyun 242*4882a593SmuzhiyunBUILDCFG_FUNCS ??= "buildcfg_vars get_layers_branch_rev buildcfg_neededvars" 243*4882a593SmuzhiyunBUILDCFG_FUNCS[type] = "list" 244*4882a593Smuzhiyun 245*4882a593Smuzhiyundef buildcfg_vars(d): 246*4882a593Smuzhiyun statusvars = oe.data.typed_value('BUILDCFG_VARS', d) 247*4882a593Smuzhiyun for var in statusvars: 248*4882a593Smuzhiyun value = d.getVar(var) 249*4882a593Smuzhiyun if value is not None: 250*4882a593Smuzhiyun yield '%-20s = "%s"' % (var, value) 251*4882a593Smuzhiyun 252*4882a593Smuzhiyundef buildcfg_neededvars(d): 253*4882a593Smuzhiyun needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d) 254*4882a593Smuzhiyun pesteruser = [] 255*4882a593Smuzhiyun for v in needed_vars: 256*4882a593Smuzhiyun val = d.getVar(v) 257*4882a593Smuzhiyun if not val or val == 'INVALID': 258*4882a593Smuzhiyun pesteruser.append(v) 259*4882a593Smuzhiyun 260*4882a593Smuzhiyun if pesteruser: 261*4882a593Smuzhiyun bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser)) 262*4882a593Smuzhiyun 263*4882a593Smuzhiyunaddhandler base_eventhandler 264*4882a593Smuzhiyunbase_eventhandler[eventmask] = "bb.event.ConfigParsed bb.event.MultiConfigParsed bb.event.BuildStarted bb.event.RecipePreFinalise bb.event.RecipeParsed" 265*4882a593Smuzhiyunpython base_eventhandler() { 266*4882a593Smuzhiyun import bb.runqueue 267*4882a593Smuzhiyun 268*4882a593Smuzhiyun if isinstance(e, bb.event.ConfigParsed): 269*4882a593Smuzhiyun if not d.getVar("NATIVELSBSTRING", False): 270*4882a593Smuzhiyun d.setVar("NATIVELSBSTRING", lsb_distro_identifier(d)) 271*4882a593Smuzhiyun d.setVar("ORIGNATIVELSBSTRING", d.getVar("NATIVELSBSTRING", False)) 272*4882a593Smuzhiyun d.setVar('BB_VERSION', bb.__version__) 273*4882a593Smuzhiyun 274*4882a593Smuzhiyun # There might be no bb.event.ConfigParsed event if bitbake server is 275*4882a593Smuzhiyun # running, so check bb.event.BuildStarted too to make sure ${HOSTTOOLS_DIR} 276*4882a593Smuzhiyun # exists. 277*4882a593Smuzhiyun if isinstance(e, bb.event.ConfigParsed) or \ 278*4882a593Smuzhiyun (isinstance(e, bb.event.BuildStarted) and not os.path.exists(d.getVar('HOSTTOOLS_DIR'))): 279*4882a593Smuzhiyun # Works with the line in layer.conf which changes PATH to point here 280*4882a593Smuzhiyun setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS', d) 281*4882a593Smuzhiyun setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS_NONFATAL', d, fatal=False) 282*4882a593Smuzhiyun 283*4882a593Smuzhiyun if isinstance(e, bb.event.MultiConfigParsed): 284*4882a593Smuzhiyun # We need to expand SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS in each of the multiconfig data stores 285*4882a593Smuzhiyun # own contexts so the variables get expanded correctly for that arch, then inject back into 286*4882a593Smuzhiyun # the main data store. 287*4882a593Smuzhiyun deps = [] 288*4882a593Smuzhiyun for config in e.mcdata: 289*4882a593Smuzhiyun deps.append(e.mcdata[config].getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS")) 290*4882a593Smuzhiyun deps = " ".join(deps) 291*4882a593Smuzhiyun e.mcdata[''].setVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", deps) 292*4882a593Smuzhiyun 293*4882a593Smuzhiyun if isinstance(e, bb.event.BuildStarted): 294*4882a593Smuzhiyun localdata = bb.data.createCopy(d) 295*4882a593Smuzhiyun statuslines = [] 296*4882a593Smuzhiyun for func in oe.data.typed_value('BUILDCFG_FUNCS', localdata): 297*4882a593Smuzhiyun g = globals() 298*4882a593Smuzhiyun if func not in g: 299*4882a593Smuzhiyun bb.warn("Build configuration function '%s' does not exist" % func) 300*4882a593Smuzhiyun else: 301*4882a593Smuzhiyun flines = g[func](localdata) 302*4882a593Smuzhiyun if flines: 303*4882a593Smuzhiyun statuslines.extend(flines) 304*4882a593Smuzhiyun 305*4882a593Smuzhiyun statusheader = d.getVar('BUILDCFG_HEADER') 306*4882a593Smuzhiyun if statusheader: 307*4882a593Smuzhiyun bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines))) 308*4882a593Smuzhiyun 309*4882a593Smuzhiyun # This code is to silence warnings where the SDK variables overwrite the 310*4882a593Smuzhiyun # target ones and we'd see dulpicate key names overwriting each other 311*4882a593Smuzhiyun # for various PREFERRED_PROVIDERS 312*4882a593Smuzhiyun if isinstance(e, bb.event.RecipePreFinalise): 313*4882a593Smuzhiyun if d.getVar("TARGET_PREFIX") == d.getVar("SDK_PREFIX"): 314*4882a593Smuzhiyun d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}binutils") 315*4882a593Smuzhiyun d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc") 316*4882a593Smuzhiyun d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}g++") 317*4882a593Smuzhiyun d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}compilerlibs") 318*4882a593Smuzhiyun 319*4882a593Smuzhiyun if isinstance(e, bb.event.RecipeParsed): 320*4882a593Smuzhiyun # 321*4882a593Smuzhiyun # If we have multiple providers of virtual/X and a PREFERRED_PROVIDER_virtual/X is set 322*4882a593Smuzhiyun # skip parsing for all the other providers which will mean they get uninstalled from the 323*4882a593Smuzhiyun # sysroot since they're now "unreachable". This makes switching virtual/kernel work in 324*4882a593Smuzhiyun # particular. 325*4882a593Smuzhiyun # 326*4882a593Smuzhiyun pn = d.getVar('PN') 327*4882a593Smuzhiyun source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False) 328*4882a593Smuzhiyun if not source_mirror_fetch: 329*4882a593Smuzhiyun provs = (d.getVar("PROVIDES") or "").split() 330*4882a593Smuzhiyun multiprovidersallowed = (d.getVar("BB_MULTI_PROVIDER_ALLOWED") or "").split() 331*4882a593Smuzhiyun for p in provs: 332*4882a593Smuzhiyun if p.startswith("virtual/") and p not in multiprovidersallowed: 333*4882a593Smuzhiyun profprov = d.getVar("PREFERRED_PROVIDER_" + p) 334*4882a593Smuzhiyun if profprov and pn != profprov: 335*4882a593Smuzhiyun raise bb.parse.SkipRecipe("PREFERRED_PROVIDER_%s set to %s, not %s" % (p, profprov, pn)) 336*4882a593Smuzhiyun} 337*4882a593Smuzhiyun 338*4882a593SmuzhiyunCONFIGURESTAMPFILE = "${WORKDIR}/configure.sstate" 339*4882a593SmuzhiyunCLEANBROKEN = "0" 340*4882a593Smuzhiyun 341*4882a593Smuzhiyunaddtask configure after do_patch 342*4882a593Smuzhiyundo_configure[dirs] = "${B}" 343*4882a593Smuzhiyunbase_do_configure() { 344*4882a593Smuzhiyun if [ -n "${CONFIGURESTAMPFILE}" -a -e "${CONFIGURESTAMPFILE}" ]; then 345*4882a593Smuzhiyun if [ "`cat ${CONFIGURESTAMPFILE}`" != "${BB_TASKHASH}" ]; then 346*4882a593Smuzhiyun cd ${B} 347*4882a593Smuzhiyun if [ "${CLEANBROKEN}" != "1" -a \( -e Makefile -o -e makefile -o -e GNUmakefile \) ]; then 348*4882a593Smuzhiyun oe_runmake clean 349*4882a593Smuzhiyun fi 350*4882a593Smuzhiyun # -ignore_readdir_race does not work correctly with -delete; 351*4882a593Smuzhiyun # use xargs to avoid spurious build failures 352*4882a593Smuzhiyun find ${B} -ignore_readdir_race -name \*.la -type f -print0 | xargs -0 rm -f 353*4882a593Smuzhiyun fi 354*4882a593Smuzhiyun fi 355*4882a593Smuzhiyun if [ -n "${CONFIGURESTAMPFILE}" ]; then 356*4882a593Smuzhiyun mkdir -p `dirname ${CONFIGURESTAMPFILE}` 357*4882a593Smuzhiyun echo ${BB_TASKHASH} > ${CONFIGURESTAMPFILE} 358*4882a593Smuzhiyun fi 359*4882a593Smuzhiyun} 360*4882a593Smuzhiyun 361*4882a593Smuzhiyunaddtask compile after do_configure 362*4882a593Smuzhiyundo_compile[dirs] = "${B}" 363*4882a593Smuzhiyunbase_do_compile() { 364*4882a593Smuzhiyun if [ -e Makefile -o -e makefile -o -e GNUmakefile ]; then 365*4882a593Smuzhiyun oe_runmake || die "make failed" 366*4882a593Smuzhiyun else 367*4882a593Smuzhiyun bbnote "nothing to compile" 368*4882a593Smuzhiyun fi 369*4882a593Smuzhiyun} 370*4882a593Smuzhiyun 371*4882a593Smuzhiyunaddtask install after do_compile 372*4882a593Smuzhiyundo_install[dirs] = "${B}" 373*4882a593Smuzhiyun# Remove and re-create ${D} so that is it guaranteed to be empty 374*4882a593Smuzhiyundo_install[cleandirs] = "${D}" 375*4882a593Smuzhiyun 376*4882a593Smuzhiyunbase_do_install() { 377*4882a593Smuzhiyun : 378*4882a593Smuzhiyun} 379*4882a593Smuzhiyun 380*4882a593Smuzhiyunbase_do_package() { 381*4882a593Smuzhiyun : 382*4882a593Smuzhiyun} 383*4882a593Smuzhiyun 384*4882a593Smuzhiyunaddtask build after do_populate_sysroot 385*4882a593Smuzhiyundo_build[noexec] = "1" 386*4882a593Smuzhiyundo_build[recrdeptask] += "do_deploy" 387*4882a593Smuzhiyundo_build () { 388*4882a593Smuzhiyun : 389*4882a593Smuzhiyun} 390*4882a593Smuzhiyun 391*4882a593Smuzhiyundef set_packagetriplet(d): 392*4882a593Smuzhiyun archs = [] 393*4882a593Smuzhiyun tos = [] 394*4882a593Smuzhiyun tvs = [] 395*4882a593Smuzhiyun 396*4882a593Smuzhiyun archs.append(d.getVar("PACKAGE_ARCHS").split()) 397*4882a593Smuzhiyun tos.append(d.getVar("TARGET_OS")) 398*4882a593Smuzhiyun tvs.append(d.getVar("TARGET_VENDOR")) 399*4882a593Smuzhiyun 400*4882a593Smuzhiyun def settriplet(d, varname, archs, tos, tvs): 401*4882a593Smuzhiyun triplets = [] 402*4882a593Smuzhiyun for i in range(len(archs)): 403*4882a593Smuzhiyun for arch in archs[i]: 404*4882a593Smuzhiyun triplets.append(arch + tvs[i] + "-" + tos[i]) 405*4882a593Smuzhiyun triplets.reverse() 406*4882a593Smuzhiyun d.setVar(varname, " ".join(triplets)) 407*4882a593Smuzhiyun 408*4882a593Smuzhiyun settriplet(d, "PKGTRIPLETS", archs, tos, tvs) 409*4882a593Smuzhiyun 410*4882a593Smuzhiyun variants = d.getVar("MULTILIB_VARIANTS") or "" 411*4882a593Smuzhiyun for item in variants.split(): 412*4882a593Smuzhiyun localdata = bb.data.createCopy(d) 413*4882a593Smuzhiyun overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item 414*4882a593Smuzhiyun localdata.setVar("OVERRIDES", overrides) 415*4882a593Smuzhiyun 416*4882a593Smuzhiyun archs.append(localdata.getVar("PACKAGE_ARCHS").split()) 417*4882a593Smuzhiyun tos.append(localdata.getVar("TARGET_OS")) 418*4882a593Smuzhiyun tvs.append(localdata.getVar("TARGET_VENDOR")) 419*4882a593Smuzhiyun 420*4882a593Smuzhiyun settriplet(d, "PKGMLTRIPLETS", archs, tos, tvs) 421*4882a593Smuzhiyun 422*4882a593Smuzhiyunpython () { 423*4882a593Smuzhiyun import string, re 424*4882a593Smuzhiyun 425*4882a593Smuzhiyun # Handle backfilling 426*4882a593Smuzhiyun oe.utils.features_backfill("DISTRO_FEATURES", d) 427*4882a593Smuzhiyun oe.utils.features_backfill("MACHINE_FEATURES", d) 428*4882a593Smuzhiyun 429*4882a593Smuzhiyun if d.getVar("S")[-1] == '/': 430*4882a593Smuzhiyun bb.warn("Recipe %s sets S variable with trailing slash '%s', remove it" % (d.getVar("PN"), d.getVar("S"))) 431*4882a593Smuzhiyun if d.getVar("B")[-1] == '/': 432*4882a593Smuzhiyun bb.warn("Recipe %s sets B variable with trailing slash '%s', remove it" % (d.getVar("PN"), d.getVar("B"))) 433*4882a593Smuzhiyun 434*4882a593Smuzhiyun if os.path.normpath(d.getVar("WORKDIR")) != os.path.normpath(d.getVar("S")): 435*4882a593Smuzhiyun d.appendVar("PSEUDO_IGNORE_PATHS", ",${S}") 436*4882a593Smuzhiyun if os.path.normpath(d.getVar("WORKDIR")) != os.path.normpath(d.getVar("B")): 437*4882a593Smuzhiyun d.appendVar("PSEUDO_IGNORE_PATHS", ",${B}") 438*4882a593Smuzhiyun 439*4882a593Smuzhiyun # To add a recipe to the skip list , set: 440*4882a593Smuzhiyun # SKIP_RECIPE[pn] = "message" 441*4882a593Smuzhiyun pn = d.getVar('PN') 442*4882a593Smuzhiyun skip_msg = d.getVarFlag('SKIP_RECIPE', pn) 443*4882a593Smuzhiyun if skip_msg: 444*4882a593Smuzhiyun bb.debug(1, "Skipping %s %s" % (pn, skip_msg)) 445*4882a593Smuzhiyun raise bb.parse.SkipRecipe("Recipe will be skipped because: %s" % (skip_msg)) 446*4882a593Smuzhiyun 447*4882a593Smuzhiyun # Handle PACKAGECONFIG 448*4882a593Smuzhiyun # 449*4882a593Smuzhiyun # These take the form: 450*4882a593Smuzhiyun # 451*4882a593Smuzhiyun # PACKAGECONFIG ??= "<default options>" 452*4882a593Smuzhiyun # PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends,foo_runtime_recommends,foo_conflict_packageconfig" 453*4882a593Smuzhiyun pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {} 454*4882a593Smuzhiyun if pkgconfigflags: 455*4882a593Smuzhiyun pkgconfig = (d.getVar('PACKAGECONFIG') or "").split() 456*4882a593Smuzhiyun pn = d.getVar("PN") 457*4882a593Smuzhiyun 458*4882a593Smuzhiyun mlprefix = d.getVar("MLPREFIX") 459*4882a593Smuzhiyun 460*4882a593Smuzhiyun def expandFilter(appends, extension, prefix): 461*4882a593Smuzhiyun appends = bb.utils.explode_deps(d.expand(" ".join(appends))) 462*4882a593Smuzhiyun newappends = [] 463*4882a593Smuzhiyun for a in appends: 464*4882a593Smuzhiyun if a.endswith("-native") or ("-cross-" in a): 465*4882a593Smuzhiyun newappends.append(a) 466*4882a593Smuzhiyun elif a.startswith("virtual/"): 467*4882a593Smuzhiyun subs = a.split("/", 1)[1] 468*4882a593Smuzhiyun if subs.startswith(prefix): 469*4882a593Smuzhiyun newappends.append(a + extension) 470*4882a593Smuzhiyun else: 471*4882a593Smuzhiyun newappends.append("virtual/" + prefix + subs + extension) 472*4882a593Smuzhiyun else: 473*4882a593Smuzhiyun if a.startswith(prefix): 474*4882a593Smuzhiyun newappends.append(a + extension) 475*4882a593Smuzhiyun else: 476*4882a593Smuzhiyun newappends.append(prefix + a + extension) 477*4882a593Smuzhiyun return newappends 478*4882a593Smuzhiyun 479*4882a593Smuzhiyun def appendVar(varname, appends): 480*4882a593Smuzhiyun if not appends: 481*4882a593Smuzhiyun return 482*4882a593Smuzhiyun if varname.find("DEPENDS") != -1: 483*4882a593Smuzhiyun if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d) : 484*4882a593Smuzhiyun appends = expandFilter(appends, "", "nativesdk-") 485*4882a593Smuzhiyun elif bb.data.inherits_class('native', d): 486*4882a593Smuzhiyun appends = expandFilter(appends, "-native", "") 487*4882a593Smuzhiyun elif mlprefix: 488*4882a593Smuzhiyun appends = expandFilter(appends, "", mlprefix) 489*4882a593Smuzhiyun varname = d.expand(varname) 490*4882a593Smuzhiyun d.appendVar(varname, " " + " ".join(appends)) 491*4882a593Smuzhiyun 492*4882a593Smuzhiyun extradeps = [] 493*4882a593Smuzhiyun extrardeps = [] 494*4882a593Smuzhiyun extrarrecs = [] 495*4882a593Smuzhiyun extraconf = [] 496*4882a593Smuzhiyun for flag, flagval in sorted(pkgconfigflags.items()): 497*4882a593Smuzhiyun items = flagval.split(",") 498*4882a593Smuzhiyun num = len(items) 499*4882a593Smuzhiyun if num > 6: 500*4882a593Smuzhiyun bb.error("%s: PACKAGECONFIG[%s] Only enable,disable,depend,rdepend,rrecommend,conflict_packageconfig can be specified!" 501*4882a593Smuzhiyun % (d.getVar('PN'), flag)) 502*4882a593Smuzhiyun 503*4882a593Smuzhiyun if flag in pkgconfig: 504*4882a593Smuzhiyun if num >= 3 and items[2]: 505*4882a593Smuzhiyun extradeps.append(items[2]) 506*4882a593Smuzhiyun if num >= 4 and items[3]: 507*4882a593Smuzhiyun extrardeps.append(items[3]) 508*4882a593Smuzhiyun if num >= 5 and items[4]: 509*4882a593Smuzhiyun extrarrecs.append(items[4]) 510*4882a593Smuzhiyun if num >= 1 and items[0]: 511*4882a593Smuzhiyun extraconf.append(items[0]) 512*4882a593Smuzhiyun elif num >= 2 and items[1]: 513*4882a593Smuzhiyun extraconf.append(items[1]) 514*4882a593Smuzhiyun 515*4882a593Smuzhiyun if num >= 6 and items[5]: 516*4882a593Smuzhiyun conflicts = set(items[5].split()) 517*4882a593Smuzhiyun invalid = conflicts.difference(set(pkgconfigflags.keys())) 518*4882a593Smuzhiyun if invalid: 519*4882a593Smuzhiyun bb.error("%s: PACKAGECONFIG[%s] Invalid conflict package config%s '%s' specified." 520*4882a593Smuzhiyun % (d.getVar('PN'), flag, 's' if len(invalid) > 1 else '', ' '.join(invalid))) 521*4882a593Smuzhiyun 522*4882a593Smuzhiyun if flag in pkgconfig: 523*4882a593Smuzhiyun intersec = conflicts.intersection(set(pkgconfig)) 524*4882a593Smuzhiyun if intersec: 525*4882a593Smuzhiyun bb.fatal("%s: PACKAGECONFIG[%s] Conflict package config%s '%s' set in PACKAGECONFIG." 526*4882a593Smuzhiyun % (d.getVar('PN'), flag, 's' if len(intersec) > 1 else '', ' '.join(intersec))) 527*4882a593Smuzhiyun 528*4882a593Smuzhiyun appendVar('DEPENDS', extradeps) 529*4882a593Smuzhiyun appendVar('RDEPENDS:${PN}', extrardeps) 530*4882a593Smuzhiyun appendVar('RRECOMMENDS:${PN}', extrarrecs) 531*4882a593Smuzhiyun appendVar('PACKAGECONFIG_CONFARGS', extraconf) 532*4882a593Smuzhiyun 533*4882a593Smuzhiyun pn = d.getVar('PN') 534*4882a593Smuzhiyun license = d.getVar('LICENSE') 535*4882a593Smuzhiyun if license == "INVALID" and pn != "defaultpkgname": 536*4882a593Smuzhiyun bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn) 537*4882a593Smuzhiyun 538*4882a593Smuzhiyun if bb.data.inherits_class('license', d): 539*4882a593Smuzhiyun check_license_format(d) 540*4882a593Smuzhiyun unmatched_license_flags = check_license_flags(d) 541*4882a593Smuzhiyun if unmatched_license_flags: 542*4882a593Smuzhiyun if len(unmatched_license_flags) == 1: 543*4882a593Smuzhiyun message = "because it has a restricted license '{0}'. Which is not listed in LICENSE_FLAGS_ACCEPTED".format(unmatched_license_flags[0]) 544*4882a593Smuzhiyun else: 545*4882a593Smuzhiyun message = "because it has restricted licenses {0}. Which are not listed in LICENSE_FLAGS_ACCEPTED".format( 546*4882a593Smuzhiyun ", ".join("'{0}'".format(f) for f in unmatched_license_flags)) 547*4882a593Smuzhiyun bb.debug(1, "Skipping %s %s" % (pn, message)) 548*4882a593Smuzhiyun raise bb.parse.SkipRecipe(message) 549*4882a593Smuzhiyun 550*4882a593Smuzhiyun # If we're building a target package we need to use fakeroot (pseudo) 551*4882a593Smuzhiyun # in order to capture permissions, owners, groups and special files 552*4882a593Smuzhiyun if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('cross', d): 553*4882a593Smuzhiyun d.appendVarFlag('do_prepare_recipe_sysroot', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') 554*4882a593Smuzhiyun d.appendVarFlag('do_install', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') 555*4882a593Smuzhiyun d.setVarFlag('do_install', 'fakeroot', '1') 556*4882a593Smuzhiyun d.appendVarFlag('do_package', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') 557*4882a593Smuzhiyun d.setVarFlag('do_package', 'fakeroot', '1') 558*4882a593Smuzhiyun d.setVarFlag('do_package_setscene', 'fakeroot', '1') 559*4882a593Smuzhiyun d.appendVarFlag('do_package_setscene', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') 560*4882a593Smuzhiyun d.setVarFlag('do_devshell', 'fakeroot', '1') 561*4882a593Smuzhiyun d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') 562*4882a593Smuzhiyun 563*4882a593Smuzhiyun need_machine = d.getVar('COMPATIBLE_MACHINE') 564*4882a593Smuzhiyun if need_machine and not d.getVar('PARSE_ALL_RECIPES', False): 565*4882a593Smuzhiyun import re 566*4882a593Smuzhiyun compat_machines = (d.getVar('MACHINEOVERRIDES') or "").split(":") 567*4882a593Smuzhiyun for m in compat_machines: 568*4882a593Smuzhiyun if re.match(need_machine, m): 569*4882a593Smuzhiyun break 570*4882a593Smuzhiyun else: 571*4882a593Smuzhiyun raise bb.parse.SkipRecipe("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE')) 572*4882a593Smuzhiyun 573*4882a593Smuzhiyun source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False) or d.getVar('PARSE_ALL_RECIPES', False) 574*4882a593Smuzhiyun if not source_mirror_fetch: 575*4882a593Smuzhiyun need_host = d.getVar('COMPATIBLE_HOST') 576*4882a593Smuzhiyun if need_host: 577*4882a593Smuzhiyun import re 578*4882a593Smuzhiyun this_host = d.getVar('HOST_SYS') 579*4882a593Smuzhiyun if not re.match(need_host, this_host): 580*4882a593Smuzhiyun raise bb.parse.SkipRecipe("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host) 581*4882a593Smuzhiyun 582*4882a593Smuzhiyun bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split() 583*4882a593Smuzhiyun 584*4882a593Smuzhiyun check_license = False if pn.startswith("nativesdk-") else True 585*4882a593Smuzhiyun for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}", 586*4882a593Smuzhiyun "-crosssdk-${SDK_SYS}", "-crosssdk-initial-${SDK_SYS}", 587*4882a593Smuzhiyun "-cross-canadian-${TRANSLATED_TARGET_ARCH}"]: 588*4882a593Smuzhiyun if pn.endswith(d.expand(t)): 589*4882a593Smuzhiyun check_license = False 590*4882a593Smuzhiyun if pn.startswith("gcc-source-"): 591*4882a593Smuzhiyun check_license = False 592*4882a593Smuzhiyun 593*4882a593Smuzhiyun if check_license and bad_licenses: 594*4882a593Smuzhiyun bad_licenses = expand_wildcard_licenses(d, bad_licenses) 595*4882a593Smuzhiyun 596*4882a593Smuzhiyun exceptions = (d.getVar("INCOMPATIBLE_LICENSE_EXCEPTIONS") or "").split() 597*4882a593Smuzhiyun 598*4882a593Smuzhiyun for lic_exception in exceptions: 599*4882a593Smuzhiyun if ":" in lic_exception: 600*4882a593Smuzhiyun lic_exception = lic_exception.split(":")[1] 601*4882a593Smuzhiyun if lic_exception in oe.license.obsolete_license_list(): 602*4882a593Smuzhiyun bb.fatal("Obsolete license %s used in INCOMPATIBLE_LICENSE_EXCEPTIONS" % lic_exception) 603*4882a593Smuzhiyun 604*4882a593Smuzhiyun pkgs = d.getVar('PACKAGES').split() 605*4882a593Smuzhiyun skipped_pkgs = {} 606*4882a593Smuzhiyun unskipped_pkgs = [] 607*4882a593Smuzhiyun for pkg in pkgs: 608*4882a593Smuzhiyun remaining_bad_licenses = oe.license.apply_pkg_license_exception(pkg, bad_licenses, exceptions) 609*4882a593Smuzhiyun 610*4882a593Smuzhiyun incompatible_lic = incompatible_license(d, remaining_bad_licenses, pkg) 611*4882a593Smuzhiyun if incompatible_lic: 612*4882a593Smuzhiyun skipped_pkgs[pkg] = incompatible_lic 613*4882a593Smuzhiyun else: 614*4882a593Smuzhiyun unskipped_pkgs.append(pkg) 615*4882a593Smuzhiyun 616*4882a593Smuzhiyun if unskipped_pkgs: 617*4882a593Smuzhiyun for pkg in skipped_pkgs: 618*4882a593Smuzhiyun bb.debug(1, "Skipping the package %s at do_rootfs because of incompatible license(s): %s" % (pkg, ' '.join(skipped_pkgs[pkg]))) 619*4882a593Smuzhiyun d.setVar('_exclude_incompatible-' + pkg, ' '.join(skipped_pkgs[pkg])) 620*4882a593Smuzhiyun for pkg in unskipped_pkgs: 621*4882a593Smuzhiyun bb.debug(1, "Including the package %s" % pkg) 622*4882a593Smuzhiyun else: 623*4882a593Smuzhiyun incompatible_lic = incompatible_license(d, bad_licenses) 624*4882a593Smuzhiyun for pkg in skipped_pkgs: 625*4882a593Smuzhiyun incompatible_lic += skipped_pkgs[pkg] 626*4882a593Smuzhiyun incompatible_lic = sorted(list(set(incompatible_lic))) 627*4882a593Smuzhiyun 628*4882a593Smuzhiyun if incompatible_lic: 629*4882a593Smuzhiyun bb.debug(1, "Skipping recipe %s because of incompatible license(s): %s" % (pn, ' '.join(incompatible_lic))) 630*4882a593Smuzhiyun raise bb.parse.SkipRecipe("it has incompatible license(s): %s" % ' '.join(incompatible_lic)) 631*4882a593Smuzhiyun 632*4882a593Smuzhiyun needsrcrev = False 633*4882a593Smuzhiyun srcuri = d.getVar('SRC_URI') 634*4882a593Smuzhiyun for uri_string in srcuri.split(): 635*4882a593Smuzhiyun uri = bb.fetch.URI(uri_string) 636*4882a593Smuzhiyun # Also check downloadfilename as the URL path might not be useful for sniffing 637*4882a593Smuzhiyun path = uri.params.get("downloadfilename", uri.path) 638*4882a593Smuzhiyun 639*4882a593Smuzhiyun # HTTP/FTP use the wget fetcher 640*4882a593Smuzhiyun if uri.scheme in ("http", "https", "ftp"): 641*4882a593Smuzhiyun d.appendVarFlag('do_fetch', 'depends', ' wget-native:do_populate_sysroot') 642*4882a593Smuzhiyun 643*4882a593Smuzhiyun # Svn packages should DEPEND on subversion-native 644*4882a593Smuzhiyun if uri.scheme == "svn": 645*4882a593Smuzhiyun needsrcrev = True 646*4882a593Smuzhiyun d.appendVarFlag('do_fetch', 'depends', ' subversion-native:do_populate_sysroot') 647*4882a593Smuzhiyun 648*4882a593Smuzhiyun # Git packages should DEPEND on git-native 649*4882a593Smuzhiyun elif uri.scheme in ("git", "gitsm"): 650*4882a593Smuzhiyun needsrcrev = True 651*4882a593Smuzhiyun d.appendVarFlag('do_fetch', 'depends', ' git-native:do_populate_sysroot') 652*4882a593Smuzhiyun 653*4882a593Smuzhiyun # Mercurial packages should DEPEND on mercurial-native 654*4882a593Smuzhiyun elif uri.scheme == "hg": 655*4882a593Smuzhiyun needsrcrev = True 656*4882a593Smuzhiyun d.appendVar("EXTRANATIVEPATH", ' python3-native ') 657*4882a593Smuzhiyun d.appendVarFlag('do_fetch', 'depends', ' mercurial-native:do_populate_sysroot') 658*4882a593Smuzhiyun 659*4882a593Smuzhiyun # Perforce packages support SRCREV = "${AUTOREV}" 660*4882a593Smuzhiyun elif uri.scheme == "p4": 661*4882a593Smuzhiyun needsrcrev = True 662*4882a593Smuzhiyun 663*4882a593Smuzhiyun # OSC packages should DEPEND on osc-native 664*4882a593Smuzhiyun elif uri.scheme == "osc": 665*4882a593Smuzhiyun d.appendVarFlag('do_fetch', 'depends', ' osc-native:do_populate_sysroot') 666*4882a593Smuzhiyun 667*4882a593Smuzhiyun elif uri.scheme == "npm": 668*4882a593Smuzhiyun d.appendVarFlag('do_fetch', 'depends', ' nodejs-native:do_populate_sysroot') 669*4882a593Smuzhiyun 670*4882a593Smuzhiyun elif uri.scheme == "repo": 671*4882a593Smuzhiyun needsrcrev = True 672*4882a593Smuzhiyun d.appendVarFlag('do_fetch', 'depends', ' repo-native:do_populate_sysroot') 673*4882a593Smuzhiyun 674*4882a593Smuzhiyun # *.lz4 should DEPEND on lz4-native for unpacking 675*4882a593Smuzhiyun if path.endswith('.lz4'): 676*4882a593Smuzhiyun d.appendVarFlag('do_unpack', 'depends', ' lz4-native:do_populate_sysroot') 677*4882a593Smuzhiyun 678*4882a593Smuzhiyun # *.zst should DEPEND on zstd-native for unpacking 679*4882a593Smuzhiyun elif path.endswith('.zst'): 680*4882a593Smuzhiyun d.appendVarFlag('do_unpack', 'depends', ' zstd-native:do_populate_sysroot') 681*4882a593Smuzhiyun 682*4882a593Smuzhiyun # *.lz should DEPEND on lzip-native for unpacking 683*4882a593Smuzhiyun elif path.endswith('.lz'): 684*4882a593Smuzhiyun d.appendVarFlag('do_unpack', 'depends', ' lzip-native:do_populate_sysroot') 685*4882a593Smuzhiyun 686*4882a593Smuzhiyun # *.xz should DEPEND on xz-native for unpacking 687*4882a593Smuzhiyun elif path.endswith('.xz') or path.endswith('.txz'): 688*4882a593Smuzhiyun d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot') 689*4882a593Smuzhiyun 690*4882a593Smuzhiyun # .zip should DEPEND on unzip-native for unpacking 691*4882a593Smuzhiyun elif path.endswith('.zip') or path.endswith('.jar'): 692*4882a593Smuzhiyun d.appendVarFlag('do_unpack', 'depends', ' unzip-native:do_populate_sysroot') 693*4882a593Smuzhiyun 694*4882a593Smuzhiyun # Some rpm files may be compressed internally using xz (for example, rpms from Fedora) 695*4882a593Smuzhiyun elif path.endswith('.rpm'): 696*4882a593Smuzhiyun d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot') 697*4882a593Smuzhiyun 698*4882a593Smuzhiyun # *.deb should DEPEND on xz-native for unpacking 699*4882a593Smuzhiyun elif path.endswith('.deb'): 700*4882a593Smuzhiyun d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot') 701*4882a593Smuzhiyun 702*4882a593Smuzhiyun if needsrcrev: 703*4882a593Smuzhiyun d.setVar("SRCPV", "${@bb.fetch2.get_srcrev(d)}") 704*4882a593Smuzhiyun 705*4882a593Smuzhiyun # Gather all named SRCREVs to add to the sstate hash calculation 706*4882a593Smuzhiyun # This anonymous python snippet is called multiple times so we 707*4882a593Smuzhiyun # need to be careful to not double up the appends here and cause 708*4882a593Smuzhiyun # the base hash to mismatch the task hash 709*4882a593Smuzhiyun for uri in srcuri.split(): 710*4882a593Smuzhiyun parm = bb.fetch.decodeurl(uri)[5] 711*4882a593Smuzhiyun uri_names = parm.get("name", "").split(",") 712*4882a593Smuzhiyun for uri_name in filter(None, uri_names): 713*4882a593Smuzhiyun srcrev_name = "SRCREV_{}".format(uri_name) 714*4882a593Smuzhiyun if srcrev_name not in (d.getVarFlag("do_fetch", "vardeps") or "").split(): 715*4882a593Smuzhiyun d.appendVarFlag("do_fetch", "vardeps", " {}".format(srcrev_name)) 716*4882a593Smuzhiyun 717*4882a593Smuzhiyun set_packagetriplet(d) 718*4882a593Smuzhiyun 719*4882a593Smuzhiyun # 'multimachine' handling 720*4882a593Smuzhiyun mach_arch = d.getVar('MACHINE_ARCH') 721*4882a593Smuzhiyun pkg_arch = d.getVar('PACKAGE_ARCH') 722*4882a593Smuzhiyun 723*4882a593Smuzhiyun if (pkg_arch == mach_arch): 724*4882a593Smuzhiyun # Already machine specific - nothing further to do 725*4882a593Smuzhiyun return 726*4882a593Smuzhiyun 727*4882a593Smuzhiyun # 728*4882a593Smuzhiyun # We always try to scan SRC_URI for urls with machine overrides 729*4882a593Smuzhiyun # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0 730*4882a593Smuzhiyun # 731*4882a593Smuzhiyun override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH') 732*4882a593Smuzhiyun if override != '0': 733*4882a593Smuzhiyun paths = [] 734*4882a593Smuzhiyun fpaths = (d.getVar('FILESPATH') or '').split(':') 735*4882a593Smuzhiyun machine = d.getVar('MACHINE') 736*4882a593Smuzhiyun for p in fpaths: 737*4882a593Smuzhiyun if os.path.basename(p) == machine and os.path.isdir(p): 738*4882a593Smuzhiyun paths.append(p) 739*4882a593Smuzhiyun 740*4882a593Smuzhiyun if paths: 741*4882a593Smuzhiyun for s in srcuri.split(): 742*4882a593Smuzhiyun if not s.startswith("file://"): 743*4882a593Smuzhiyun continue 744*4882a593Smuzhiyun fetcher = bb.fetch2.Fetch([s], d) 745*4882a593Smuzhiyun local = fetcher.localpath(s) 746*4882a593Smuzhiyun for mp in paths: 747*4882a593Smuzhiyun if local.startswith(mp): 748*4882a593Smuzhiyun #bb.note("overriding PACKAGE_ARCH from %s to %s for %s" % (pkg_arch, mach_arch, pn)) 749*4882a593Smuzhiyun d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}") 750*4882a593Smuzhiyun return 751*4882a593Smuzhiyun 752*4882a593Smuzhiyun packages = d.getVar('PACKAGES').split() 753*4882a593Smuzhiyun for pkg in packages: 754*4882a593Smuzhiyun pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg) 755*4882a593Smuzhiyun 756*4882a593Smuzhiyun # We could look for != PACKAGE_ARCH here but how to choose 757*4882a593Smuzhiyun # if multiple differences are present? 758*4882a593Smuzhiyun # Look through PACKAGE_ARCHS for the priority order? 759*4882a593Smuzhiyun if pkgarch and pkgarch == mach_arch: 760*4882a593Smuzhiyun d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}") 761*4882a593Smuzhiyun bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN")) 762*4882a593Smuzhiyun} 763*4882a593Smuzhiyun 764*4882a593Smuzhiyunaddtask cleansstate after do_clean 765*4882a593Smuzhiyunpython do_cleansstate() { 766*4882a593Smuzhiyun sstate_clean_cachefiles(d) 767*4882a593Smuzhiyun} 768*4882a593Smuzhiyunaddtask cleanall after do_cleansstate 769*4882a593Smuzhiyundo_cleansstate[nostamp] = "1" 770*4882a593Smuzhiyun 771*4882a593Smuzhiyunpython do_cleanall() { 772*4882a593Smuzhiyun src_uri = (d.getVar('SRC_URI') or "").split() 773*4882a593Smuzhiyun if not src_uri: 774*4882a593Smuzhiyun return 775*4882a593Smuzhiyun 776*4882a593Smuzhiyun try: 777*4882a593Smuzhiyun fetcher = bb.fetch2.Fetch(src_uri, d) 778*4882a593Smuzhiyun fetcher.clean() 779*4882a593Smuzhiyun except bb.fetch2.BBFetchException as e: 780*4882a593Smuzhiyun bb.fatal(str(e)) 781*4882a593Smuzhiyun} 782*4882a593Smuzhiyundo_cleanall[nostamp] = "1" 783*4882a593Smuzhiyun 784*4882a593Smuzhiyun 785*4882a593SmuzhiyunEXPORT_FUNCTIONS do_fetch do_unpack do_configure do_compile do_install do_package 786