xref: /OK3568_Linux_fs/yocto/poky/meta/classes/package.bbclass (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1#
2# Packaging process
3#
4# Executive summary: This class iterates over the functions listed in PACKAGEFUNCS
5# Taking D and splitting it up into the packages listed in PACKAGES, placing the
6# resulting output in PKGDEST.
7#
8# There are the following default steps but PACKAGEFUNCS can be extended:
9#
10# a) package_convert_pr_autoinc - convert AUTOINC in PKGV to ${PRSERV_PV_AUTOINC}
11#
12# b) perform_packagecopy - Copy D into PKGD
13#
14# c) package_do_split_locales - Split out the locale files, updates FILES and PACKAGES
15#
16# d) split_and_strip_files - split the files into runtime and debug and strip them.
17#    Debug files include debug info split, and associated sources that end up in -dbg packages
18#
19# e) fixup_perms - Fix up permissions in the package before we split it.
20#
21# f) populate_packages - Split the files in PKGD into separate packages in PKGDEST/<pkgname>
22#    Also triggers the binary stripping code to put files in -dbg packages.
23#
24# g) package_do_filedeps - Collect perfile run-time dependency metadata
25#    The data is stores in FILER{PROVIDES,DEPENDS}_file_pkg variables with
26#    a list of affected files in FILER{PROVIDES,DEPENDS}FLIST_pkg
27#
28# h) package_do_shlibs - Look at the shared libraries generated and autotmatically add any
29#    dependencies found. Also stores the package name so anyone else using this library
30#    knows which package to depend on.
31#
32# i) package_do_pkgconfig - Keep track of which packages need and provide which .pc files
33#
34# j) read_shlibdeps - Reads the stored shlibs information into the metadata
35#
36# k) package_depchains - Adds automatic dependencies to -dbg and -dev packages
37#
38# l) emit_pkgdata - saves the packaging data into PKGDATA_DIR for use in later
39#    packaging steps
40
41inherit packagedata
42inherit chrpath
43inherit package_pkgdata
44inherit insane
45
46PKGD    = "${WORKDIR}/package"
47PKGDEST = "${WORKDIR}/packages-split"
48
49LOCALE_SECTION ?= ''
50
51ALL_MULTILIB_PACKAGE_ARCHS = "${@all_multilib_tune_values(d, 'PACKAGE_ARCHS')}"
52
53# rpm is used for the per-file dependency identification
54# dwarfsrcfiles is used to determine the list of debug source files
55PACKAGE_DEPENDS += "rpm-native dwarfsrcfiles-native"
56
57
58# If your postinstall can execute at rootfs creation time rather than on
59# target but depends on a native/cross tool in order to execute, you need to
60# list that tool in PACKAGE_WRITE_DEPS. Target package dependencies belong
61# in the package dependencies as normal, this is just for native/cross support
62# tools at rootfs build time.
63PACKAGE_WRITE_DEPS ??= ""
64
65def legitimize_package_name(s):
66    """
67    Make sure package names are legitimate strings
68    """
69    import re
70
71    def fixutf(m):
72        cp = m.group(1)
73        if cp:
74            return ('\\u%s' % cp).encode('latin-1').decode('unicode_escape')
75
76    # Handle unicode codepoints encoded as <U0123>, as in glibc locale files.
77    s = re.sub(r'<U([0-9A-Fa-f]{1,4})>', fixutf, s)
78
79    # Remaining package name validity fixes
80    return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-')
81
82def do_split_packages(d, root, file_regex, output_pattern, description, postinst=None, recursive=False, hook=None, extra_depends=None, aux_files_pattern=None, postrm=None, allow_dirs=False, prepend=False, match_path=False, aux_files_pattern_verbatim=None, allow_links=False, summary=None):
83    """
84    Used in .bb files to split up dynamically generated subpackages of a
85    given package, usually plugins or modules.
86
87    Arguments:
88    root           -- the path in which to search
89    file_regex     -- regular expression to match searched files. Use
90                      parentheses () to mark the part of this expression
91                      that should be used to derive the module name (to be
92                      substituted where %s is used in other function
93                      arguments as noted below)
94    output_pattern -- pattern to use for the package names. Must include %s.
95    description    -- description to set for each package. Must include %s.
96    postinst       -- postinstall script to use for all packages (as a
97                      string)
98    recursive      -- True to perform a recursive search - default False
99    hook           -- a hook function to be called for every match. The
100                      function will be called with the following arguments
101                      (in the order listed):
102                        f: full path to the file/directory match
103                        pkg: the package name
104                        file_regex: as above
105                        output_pattern: as above
106                        modulename: the module name derived using file_regex
107    extra_depends  -- extra runtime dependencies (RDEPENDS) to be set for
108                      all packages. The default value of None causes a
109                      dependency on the main package (${PN}) - if you do
110                      not want this, pass '' for this parameter.
111    aux_files_pattern -- extra item(s) to be added to FILES for each
112                      package. Can be a single string item or a list of
113                      strings for multiple items.  Must include %s.
114    postrm         -- postrm script to use for all packages (as a string)
115    allow_dirs     -- True allow directories to be matched - default False
116    prepend        -- if True, prepend created packages to PACKAGES instead
117                      of the default False which appends them
118    match_path     -- match file_regex on the whole relative path to the
119                      root rather than just the file name
120    aux_files_pattern_verbatim -- extra item(s) to be added to FILES for
121                      each package, using the actual derived module name
122                      rather than converting it to something legal for a
123                      package name. Can be a single string item or a list
124                      of strings for multiple items. Must include %s.
125    allow_links    -- True to allow symlinks to be matched - default False
126    summary        -- Summary to set for each package. Must include %s;
127                      defaults to description if not set.
128
129    """
130
131    dvar = d.getVar('PKGD')
132    root = d.expand(root)
133    output_pattern = d.expand(output_pattern)
134    extra_depends = d.expand(extra_depends)
135
136    # If the root directory doesn't exist, don't error out later but silently do
137    # no splitting.
138    if not os.path.exists(dvar + root):
139        return []
140
141    ml = d.getVar("MLPREFIX")
142    if ml:
143        if not output_pattern.startswith(ml):
144            output_pattern = ml + output_pattern
145
146        newdeps = []
147        for dep in (extra_depends or "").split():
148            if dep.startswith(ml):
149                newdeps.append(dep)
150            else:
151                newdeps.append(ml + dep)
152        if newdeps:
153            extra_depends = " ".join(newdeps)
154
155
156    packages = d.getVar('PACKAGES').split()
157    split_packages = set()
158
159    if postinst:
160        postinst = '#!/bin/sh\n' + postinst + '\n'
161    if postrm:
162        postrm = '#!/bin/sh\n' + postrm + '\n'
163    if not recursive:
164        objs = os.listdir(dvar + root)
165    else:
166        objs = []
167        for walkroot, dirs, files in os.walk(dvar + root):
168            for file in files:
169                relpath = os.path.join(walkroot, file).replace(dvar + root + '/', '', 1)
170                if relpath:
171                    objs.append(relpath)
172
173    if extra_depends == None:
174        extra_depends = d.getVar("PN")
175
176    if not summary:
177        summary = description
178
179    for o in sorted(objs):
180        import re, stat
181        if match_path:
182            m = re.match(file_regex, o)
183        else:
184            m = re.match(file_regex, os.path.basename(o))
185
186        if not m:
187            continue
188        f = os.path.join(dvar + root, o)
189        mode = os.lstat(f).st_mode
190        if not (stat.S_ISREG(mode) or (allow_links and stat.S_ISLNK(mode)) or (allow_dirs and stat.S_ISDIR(mode))):
191            continue
192        on = legitimize_package_name(m.group(1))
193        pkg = output_pattern % on
194        split_packages.add(pkg)
195        if not pkg in packages:
196            if prepend:
197                packages = [pkg] + packages
198            else:
199                packages.append(pkg)
200        oldfiles = d.getVar('FILES:' + pkg)
201        newfile = os.path.join(root, o)
202        # These names will be passed through glob() so if the filename actually
203        # contains * or ? (rare, but possible) we need to handle that specially
204        newfile = newfile.replace('*', '[*]')
205        newfile = newfile.replace('?', '[?]')
206        if not oldfiles:
207            the_files = [newfile]
208            if aux_files_pattern:
209                if type(aux_files_pattern) is list:
210                    for fp in aux_files_pattern:
211                        the_files.append(fp % on)
212                else:
213                    the_files.append(aux_files_pattern % on)
214            if aux_files_pattern_verbatim:
215                if type(aux_files_pattern_verbatim) is list:
216                    for fp in aux_files_pattern_verbatim:
217                        the_files.append(fp % m.group(1))
218                else:
219                    the_files.append(aux_files_pattern_verbatim % m.group(1))
220            d.setVar('FILES:' + pkg, " ".join(the_files))
221        else:
222            d.setVar('FILES:' + pkg, oldfiles + " " + newfile)
223        if extra_depends != '':
224            d.appendVar('RDEPENDS:' + pkg, ' ' + extra_depends)
225        if not d.getVar('DESCRIPTION:' + pkg):
226            d.setVar('DESCRIPTION:' + pkg, description % on)
227        if not d.getVar('SUMMARY:' + pkg):
228            d.setVar('SUMMARY:' + pkg, summary % on)
229        if postinst:
230            d.setVar('pkg_postinst:' + pkg, postinst)
231        if postrm:
232            d.setVar('pkg_postrm:' + pkg, postrm)
233        if callable(hook):
234            hook(f, pkg, file_regex, output_pattern, m.group(1))
235
236    d.setVar('PACKAGES', ' '.join(packages))
237    return list(split_packages)
238
239PACKAGE_DEPENDS += "file-native"
240
241python () {
242    if d.getVar('PACKAGES') != '':
243        deps = ""
244        for dep in (d.getVar('PACKAGE_DEPENDS') or "").split():
245            deps += " %s:do_populate_sysroot" % dep
246        if d.getVar('PACKAGE_MINIDEBUGINFO') == '1':
247            deps += ' xz-native:do_populate_sysroot'
248        d.appendVarFlag('do_package', 'depends', deps)
249
250        # shlibs requires any DEPENDS to have already packaged for the *.list files
251        d.appendVarFlag('do_package', 'deptask', " do_packagedata")
252}
253
254# Get a list of files from file vars by searching files under current working directory
255# The list contains symlinks, directories and normal files.
256def files_from_filevars(filevars):
257    import os,glob
258    cpath = oe.cachedpath.CachedPath()
259    files = []
260    for f in filevars:
261        if os.path.isabs(f):
262            f = '.' + f
263        if not f.startswith("./"):
264            f = './' + f
265        globbed = glob.glob(f)
266        if globbed:
267            if [ f ] != globbed:
268                files += globbed
269                continue
270        files.append(f)
271
272    symlink_paths = []
273    for ind, f in enumerate(files):
274        # Handle directory symlinks. Truncate path to the lowest level symlink
275        parent = ''
276        for dirname in f.split('/')[:-1]:
277            parent = os.path.join(parent, dirname)
278            if dirname == '.':
279                continue
280            if cpath.islink(parent):
281                bb.warn("FILES contains file '%s' which resides under a "
282                        "directory symlink. Please fix the recipe and use the "
283                        "real path for the file." % f[1:])
284                symlink_paths.append(f)
285                files[ind] = parent
286                f = parent
287                break
288
289        if not cpath.islink(f):
290            if cpath.isdir(f):
291                newfiles = [ os.path.join(f,x) for x in os.listdir(f) ]
292                if newfiles:
293                    files += newfiles
294
295    return files, symlink_paths
296
297# Called in package_<rpm,ipk,deb>.bbclass to get the correct list of configuration files
298def get_conffiles(pkg, d):
299    pkgdest = d.getVar('PKGDEST')
300    root = os.path.join(pkgdest, pkg)
301    cwd = os.getcwd()
302    os.chdir(root)
303
304    conffiles = d.getVar('CONFFILES:%s' % pkg);
305    if conffiles == None:
306        conffiles = d.getVar('CONFFILES')
307    if conffiles == None:
308        conffiles = ""
309    conffiles = conffiles.split()
310    conf_orig_list = files_from_filevars(conffiles)[0]
311
312    # Remove links and directories from conf_orig_list to get conf_list which only contains normal files
313    conf_list = []
314    for f in conf_orig_list:
315        if os.path.isdir(f):
316            continue
317        if os.path.islink(f):
318            continue
319        if not os.path.exists(f):
320            continue
321        conf_list.append(f)
322
323    # Remove the leading './'
324    for i in range(0, len(conf_list)):
325        conf_list[i] = conf_list[i][1:]
326
327    os.chdir(cwd)
328    return conf_list
329
330def checkbuildpath(file, d):
331    tmpdir = d.getVar('TMPDIR')
332    with open(file) as f:
333        file_content = f.read()
334        if tmpdir in file_content:
335            return True
336
337    return False
338
339def parse_debugsources_from_dwarfsrcfiles_output(dwarfsrcfiles_output):
340    debugfiles = {}
341
342    for line in dwarfsrcfiles_output.splitlines():
343        if line.startswith("\t"):
344            debugfiles[os.path.normpath(line.split()[0])] = ""
345
346    return debugfiles.keys()
347
348def source_info(file, d, fatal=True):
349    import subprocess
350
351    cmd = ["dwarfsrcfiles", file]
352    try:
353        output = subprocess.check_output(cmd, universal_newlines=True, stderr=subprocess.STDOUT)
354        retval = 0
355    except subprocess.CalledProcessError as exc:
356        output = exc.output
357        retval = exc.returncode
358
359    # 255 means a specific file wasn't fully parsed to get the debug file list, which is not a fatal failure
360    if retval != 0 and retval != 255:
361        msg = "dwarfsrcfiles failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else "")
362        if fatal:
363            bb.fatal(msg)
364        bb.note(msg)
365
366    debugsources = parse_debugsources_from_dwarfsrcfiles_output(output)
367
368    return list(debugsources)
369
370def splitdebuginfo(file, dvar, dv, d):
371    # Function to split a single file into two components, one is the stripped
372    # target system binary, the other contains any debugging information. The
373    # two files are linked to reference each other.
374    #
375    # return a mapping of files:debugsources
376
377    import stat
378    import subprocess
379
380    src = file[len(dvar):]
381    dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
382    debugfile = dvar + dest
383    sources = []
384
385    if file.endswith(".ko") and file.find("/lib/modules/") != -1:
386        if oe.package.is_kernel_module_signed(file):
387            bb.debug(1, "Skip strip on signed module %s" % file)
388            return (file, sources)
389
390    # Split the file...
391    bb.utils.mkdirhier(os.path.dirname(debugfile))
392    #bb.note("Split %s -> %s" % (file, debugfile))
393    # Only store off the hard link reference if we successfully split!
394
395    dvar = d.getVar('PKGD')
396    objcopy = d.getVar("OBJCOPY")
397
398    newmode = None
399    if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
400        origmode = os.stat(file)[stat.ST_MODE]
401        newmode = origmode | stat.S_IWRITE | stat.S_IREAD
402        os.chmod(file, newmode)
403
404    # We need to extract the debug src information here...
405    if dv["srcdir"]:
406        sources = source_info(file, d)
407
408    bb.utils.mkdirhier(os.path.dirname(debugfile))
409
410    subprocess.check_output([objcopy, '--only-keep-debug', file, debugfile], stderr=subprocess.STDOUT)
411
412    # Set the debuglink to have the view of the file path on the target
413    subprocess.check_output([objcopy, '--add-gnu-debuglink', debugfile, file], stderr=subprocess.STDOUT)
414
415    if newmode:
416        os.chmod(file, origmode)
417
418    return (file, sources)
419
420def splitstaticdebuginfo(file, dvar, dv, d):
421    # Unlike the function above, there is no way to split a static library
422    # two components.  So to get similar results we will copy the unmodified
423    # static library (containing the debug symbols) into a new directory.
424    # We will then strip (preserving symbols) the static library in the
425    # typical location.
426    #
427    # return a mapping of files:debugsources
428
429    import stat
430
431    src = file[len(dvar):]
432    dest = dv["staticlibdir"] + os.path.dirname(src) + dv["staticdir"] + "/" + os.path.basename(src) + dv["staticappend"]
433    debugfile = dvar + dest
434    sources = []
435
436    # Copy the file...
437    bb.utils.mkdirhier(os.path.dirname(debugfile))
438    #bb.note("Copy %s -> %s" % (file, debugfile))
439
440    dvar = d.getVar('PKGD')
441
442    newmode = None
443    if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
444        origmode = os.stat(file)[stat.ST_MODE]
445        newmode = origmode | stat.S_IWRITE | stat.S_IREAD
446        os.chmod(file, newmode)
447
448    # We need to extract the debug src information here...
449    if dv["srcdir"]:
450        sources = source_info(file, d)
451
452    bb.utils.mkdirhier(os.path.dirname(debugfile))
453
454    # Copy the unmodified item to the debug directory
455    shutil.copy2(file, debugfile)
456
457    if newmode:
458        os.chmod(file, origmode)
459
460    return (file, sources)
461
462def inject_minidebuginfo(file, dvar, dv, d):
463    # Extract just the symbols from debuginfo into minidebuginfo,
464    # compress it with xz and inject it back into the binary in a .gnu_debugdata section.
465    # https://sourceware.org/gdb/onlinedocs/gdb/MiniDebugInfo.html
466
467    import subprocess
468
469    readelf = d.getVar('READELF')
470    nm = d.getVar('NM')
471    objcopy = d.getVar('OBJCOPY')
472
473    minidebuginfodir = d.expand('${WORKDIR}/minidebuginfo')
474
475    src = file[len(dvar):]
476    dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
477    debugfile = dvar + dest
478    minidebugfile = minidebuginfodir + src + '.minidebug'
479    bb.utils.mkdirhier(os.path.dirname(minidebugfile))
480
481    # If we didn't produce debuginfo for any reason, we can't produce minidebuginfo either
482    # so skip it.
483    if not os.path.exists(debugfile):
484        bb.debug(1, 'ELF file {} has no debuginfo, skipping minidebuginfo injection'.format(file))
485        return
486
487    # minidebuginfo does not make sense to apply to ELF objects other than
488    # executables and shared libraries, skip applying the minidebuginfo
489    # generation for objects like kernel modules.
490    for line in subprocess.check_output([readelf, '-h', debugfile], universal_newlines=True).splitlines():
491        if not line.strip().startswith("Type:"):
492            continue
493        elftype = line.split(":")[1].strip()
494        if not any(elftype.startswith(i) for i in ["EXEC", "DYN"]):
495            bb.debug(1, 'ELF file {} is not executable/shared, skipping minidebuginfo injection'.format(file))
496            return
497        break
498
499    # Find non-allocated PROGBITS, NOTE, and NOBITS sections in the debuginfo.
500    # We will exclude all of these from minidebuginfo to save space.
501    remove_section_names = []
502    for line in subprocess.check_output([readelf, '-W', '-S', debugfile], universal_newlines=True).splitlines():
503        # strip the leading "  [ 1]" section index to allow splitting on space
504        if ']' not in line:
505            continue
506        fields = line[line.index(']') + 1:].split()
507        if len(fields) < 7:
508            continue
509        name = fields[0]
510        type = fields[1]
511        flags = fields[6]
512        # .debug_ sections will be removed by objcopy -S so no need to explicitly remove them
513        if name.startswith('.debug_'):
514            continue
515        if 'A' not in flags and type in ['PROGBITS', 'NOTE', 'NOBITS']:
516            remove_section_names.append(name)
517
518    # List dynamic symbols in the binary. We can exclude these from minidebuginfo
519    # because they are always present in the binary.
520    dynsyms = set()
521    for line in subprocess.check_output([nm, '-D', file, '--format=posix', '--defined-only'], universal_newlines=True).splitlines():
522        dynsyms.add(line.split()[0])
523
524    # Find all function symbols from debuginfo which aren't in the dynamic symbols table.
525    # These are the ones we want to keep in minidebuginfo.
526    keep_symbols_file = minidebugfile + '.symlist'
527    found_any_symbols = False
528    with open(keep_symbols_file, 'w') as f:
529        for line in subprocess.check_output([nm, debugfile, '--format=sysv', '--defined-only'], universal_newlines=True).splitlines():
530            fields = line.split('|')
531            if len(fields) < 7:
532                continue
533            name = fields[0].strip()
534            type = fields[3].strip()
535            if type == 'FUNC' and name not in dynsyms:
536                f.write('{}\n'.format(name))
537                found_any_symbols = True
538
539    if not found_any_symbols:
540        bb.debug(1, 'ELF file {} contains no symbols, skipping minidebuginfo injection'.format(file))
541        return
542
543    bb.utils.remove(minidebugfile)
544    bb.utils.remove(minidebugfile + '.xz')
545
546    subprocess.check_call([objcopy, '-S'] +
547                          ['--remove-section={}'.format(s) for s in remove_section_names] +
548                          ['--keep-symbols={}'.format(keep_symbols_file), debugfile, minidebugfile])
549
550    subprocess.check_call(['xz', '--keep', minidebugfile])
551
552    subprocess.check_call([objcopy, '--add-section', '.gnu_debugdata={}.xz'.format(minidebugfile), file])
553
554def copydebugsources(debugsrcdir, sources, d):
555    # The debug src information written out to sourcefile is further processed
556    # and copied to the destination here.
557
558    import stat
559    import subprocess
560
561    if debugsrcdir and sources:
562        sourcefile = d.expand("${WORKDIR}/debugsources.list")
563        bb.utils.remove(sourcefile)
564
565        # filenames are null-separated - this is an artefact of the previous use
566        # of rpm's debugedit, which was writing them out that way, and the code elsewhere
567        # is still assuming that.
568        debuglistoutput = '\0'.join(sources) + '\0'
569        with open(sourcefile, 'a') as sf:
570           sf.write(debuglistoutput)
571
572        dvar = d.getVar('PKGD')
573        strip = d.getVar("STRIP")
574        objcopy = d.getVar("OBJCOPY")
575        workdir = d.getVar("WORKDIR")
576        sdir = d.getVar("S")
577        sparentdir = os.path.dirname(os.path.dirname(sdir))
578        sbasedir = os.path.basename(os.path.dirname(sdir)) + "/" + os.path.basename(sdir)
579        workparentdir = os.path.dirname(os.path.dirname(workdir))
580        workbasedir = os.path.basename(os.path.dirname(workdir)) + "/" + os.path.basename(workdir)
581
582        # If S isnt based on WORKDIR we can infer our sources are located elsewhere,
583        # e.g. using externalsrc; use S as base for our dirs
584        if workdir in sdir or 'work-shared' in sdir:
585            basedir = workbasedir
586            parentdir = workparentdir
587        else:
588            basedir = sbasedir
589            parentdir = sparentdir
590
591        # If build path exists in sourcefile, it means toolchain did not use
592        # -fdebug-prefix-map to compile
593        if checkbuildpath(sourcefile, d):
594            localsrc_prefix = parentdir + "/"
595        else:
596            localsrc_prefix = "/usr/src/debug/"
597
598        nosuchdir = []
599        basepath = dvar
600        for p in debugsrcdir.split("/"):
601            basepath = basepath + "/" + p
602            if not cpath.exists(basepath):
603                nosuchdir.append(basepath)
604        bb.utils.mkdirhier(basepath)
605        cpath.updatecache(basepath)
606
607        # Ignore files from the recipe sysroots (target and native)
608        processdebugsrc =  "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '((<internal>|<built-in>)$|/.*recipe-sysroot.*/)' | "
609        # We need to ignore files that are not actually ours
610        # we do this by only paying attention to items from this package
611        processdebugsrc += "fgrep -zw '%s' | "
612        # Remove prefix in the source paths
613        processdebugsrc += "sed 's#%s##g' | "
614        processdebugsrc += "(cd '%s' ; cpio -pd0mlL --no-preserve-owner '%s%s' 2>/dev/null)"
615
616        cmd = processdebugsrc % (sourcefile, basedir, localsrc_prefix, parentdir, dvar, debugsrcdir)
617        try:
618            subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
619        except subprocess.CalledProcessError:
620            # Can "fail" if internal headers/transient sources are attempted
621            pass
622
623        # cpio seems to have a bug with -lL together and symbolic links are just copied, not dereferenced.
624        # Work around this by manually finding and copying any symbolic links that made it through.
625        cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s')" % \
626                (dvar, debugsrcdir, dvar, debugsrcdir, parentdir, dvar, debugsrcdir)
627        subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
628
629
630        # debugsources.list may be polluted from the host if we used externalsrc,
631        # cpio uses copy-pass and may have just created a directory structure
632        # matching the one from the host, if thats the case move those files to
633        # debugsrcdir to avoid host contamination.
634        # Empty dir structure will be deleted in the next step.
635
636        # Same check as above for externalsrc
637        if workdir not in sdir:
638            if os.path.exists(dvar + debugsrcdir + sdir):
639                # Special case for /build since we need to move into
640                # /usr/src/debug/build so rename sdir to build.build
641                if sdir.find("/build") == 0:
642                    cmd = "mv %s%s%s %s%s%s" % (dvar, debugsrcdir, "/build", dvar, debugsrcdir, "/build.build")
643                    subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
644                    sdir = sdir.replace("/build", "/build.build", 1)
645
646                cmd = "mv %s%s%s/* %s%s" % (dvar, debugsrcdir, sdir, dvar,debugsrcdir)
647                subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
648
649        # The copy by cpio may have resulted in some empty directories!  Remove these
650        cmd = "find %s%s -empty -type d -delete" % (dvar, debugsrcdir)
651        subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
652
653        # Also remove debugsrcdir if its empty
654        for p in nosuchdir[::-1]:
655            if os.path.exists(p) and not os.listdir(p):
656                os.rmdir(p)
657
658#
659# Package data handling routines
660#
661
662def get_package_mapping (pkg, basepkg, d, depversions=None):
663    import oe.packagedata
664
665    data = oe.packagedata.read_subpkgdata(pkg, d)
666    key = "PKG:%s" % pkg
667
668    if key in data:
669        if bb.data.inherits_class('allarch', d) and bb.data.inherits_class('packagegroup', d) and pkg != data[key]:
670            bb.error("An allarch packagegroup shouldn't depend on packages which are dynamically renamed (%s to %s)" % (pkg, data[key]))
671        # Have to avoid undoing the write_extra_pkgs(global_variants...)
672        if bb.data.inherits_class('allarch', d) and not d.getVar('MULTILIB_VARIANTS') \
673            and data[key] == basepkg:
674            return pkg
675        if depversions == []:
676            # Avoid returning a mapping if the renamed package rprovides its original name
677            rprovkey = "RPROVIDES:%s" % pkg
678            if rprovkey in data:
679                if pkg in bb.utils.explode_dep_versions2(data[rprovkey]):
680                    bb.note("%s rprovides %s, not replacing the latter" % (data[key], pkg))
681                    return pkg
682        # Do map to rewritten package name
683        return data[key]
684
685    return pkg
686
687def get_package_additional_metadata (pkg_type, d):
688    base_key = "PACKAGE_ADD_METADATA"
689    for key in ("%s_%s" % (base_key, pkg_type.upper()), base_key):
690        if d.getVar(key, False) is None:
691            continue
692        d.setVarFlag(key, "type", "list")
693        if d.getVarFlag(key, "separator") is None:
694            d.setVarFlag(key, "separator", "\\n")
695        metadata_fields = [field.strip() for field in oe.data.typed_value(key, d)]
696        return "\n".join(metadata_fields).strip()
697
698def runtime_mapping_rename (varname, pkg, d):
699    #bb.note("%s before: %s" % (varname, d.getVar(varname)))
700
701    new_depends = {}
702    deps = bb.utils.explode_dep_versions2(d.getVar(varname) or "")
703    for depend, depversions in deps.items():
704        new_depend = get_package_mapping(depend, pkg, d, depversions)
705        if depend != new_depend:
706            bb.note("package name mapping done: %s -> %s" % (depend, new_depend))
707        new_depends[new_depend] = deps[depend]
708
709    d.setVar(varname, bb.utils.join_deps(new_depends, commasep=False))
710
711    #bb.note("%s after: %s" % (varname, d.getVar(varname)))
712
713#
714# Used by do_packagedata (and possibly other routines post do_package)
715#
716
717PRSERV_ACTIVE = "${@bool(d.getVar("PRSERV_HOST"))}"
718PRSERV_ACTIVE[vardepvalue] = "${PRSERV_ACTIVE}"
719package_get_auto_pr[vardepsexclude] = "BB_TASKDEPDATA"
720package_get_auto_pr[vardeps] += "PRSERV_ACTIVE"
721python package_get_auto_pr() {
722    import oe.prservice
723
724    def get_do_package_hash(pn):
725        if d.getVar("BB_RUNTASK") != "do_package":
726            taskdepdata = d.getVar("BB_TASKDEPDATA", False)
727            for dep in taskdepdata:
728                if taskdepdata[dep][1] == "do_package" and taskdepdata[dep][0] == pn:
729                    return taskdepdata[dep][6]
730        return None
731
732    # Support per recipe PRSERV_HOST
733    pn = d.getVar('PN')
734    host = d.getVar("PRSERV_HOST_" + pn)
735    if not (host is None):
736        d.setVar("PRSERV_HOST", host)
737
738    pkgv = d.getVar("PKGV")
739
740    # PR Server not active, handle AUTOINC
741    if not d.getVar('PRSERV_HOST'):
742        d.setVar("PRSERV_PV_AUTOINC", "0")
743        return
744
745    auto_pr = None
746    pv = d.getVar("PV")
747    version = d.getVar("PRAUTOINX")
748    pkgarch = d.getVar("PACKAGE_ARCH")
749    checksum = get_do_package_hash(pn)
750
751    # If do_package isn't in the dependencies, we can't get the checksum...
752    if not checksum:
753        bb.warn('Task %s requested do_package unihash, but it was not available.' % d.getVar('BB_RUNTASK'))
754        #taskdepdata = d.getVar("BB_TASKDEPDATA", False)
755        #for dep in taskdepdata:
756        #    bb.warn('%s:%s = %s' % (taskdepdata[dep][0], taskdepdata[dep][1], taskdepdata[dep][6]))
757        return
758
759    if d.getVar('PRSERV_LOCKDOWN'):
760        auto_pr = d.getVar('PRAUTO_' + version + '_' + pkgarch) or d.getVar('PRAUTO_' + version) or None
761        if auto_pr is None:
762            bb.fatal("Can NOT get PRAUTO from lockdown exported file")
763        d.setVar('PRAUTO',str(auto_pr))
764        return
765
766    try:
767        conn = oe.prservice.prserv_make_conn(d)
768        if conn is not None:
769            if "AUTOINC" in pkgv:
770                srcpv = bb.fetch2.get_srcrev(d)
771                base_ver = "AUTOINC-%s" % version[:version.find(srcpv)]
772                value = conn.getPR(base_ver, pkgarch, srcpv)
773                d.setVar("PRSERV_PV_AUTOINC", str(value))
774
775            auto_pr = conn.getPR(version, pkgarch, checksum)
776            conn.close()
777    except Exception as e:
778        bb.fatal("Can NOT get PRAUTO, exception %s" %  str(e))
779    if auto_pr is None:
780        bb.fatal("Can NOT get PRAUTO from remote PR service")
781    d.setVar('PRAUTO',str(auto_pr))
782}
783
784#
785# Package functions suitable for inclusion in PACKAGEFUNCS
786#
787
788python package_convert_pr_autoinc() {
789    pkgv = d.getVar("PKGV")
790
791    # Adjust pkgv as necessary...
792    if 'AUTOINC' in pkgv:
793        d.setVar("PKGV", pkgv.replace("AUTOINC", "${PRSERV_PV_AUTOINC}"))
794
795    # Change PRSERV_PV_AUTOINC and EXTENDPRAUTO usage to special values
796    d.setVar('PRSERV_PV_AUTOINC', '@PRSERV_PV_AUTOINC@')
797    d.setVar('EXTENDPRAUTO', '@EXTENDPRAUTO@')
798}
799
800LOCALEBASEPN ??= "${PN}"
801
802python package_do_split_locales() {
803    if (d.getVar('PACKAGE_NO_LOCALE') == '1'):
804        bb.debug(1, "package requested not splitting locales")
805        return
806
807    packages = (d.getVar('PACKAGES') or "").split()
808
809    datadir = d.getVar('datadir')
810    if not datadir:
811        bb.note("datadir not defined")
812        return
813
814    dvar = d.getVar('PKGD')
815    pn = d.getVar('LOCALEBASEPN')
816
817    if pn + '-locale' in packages:
818        packages.remove(pn + '-locale')
819
820    localedir = os.path.join(dvar + datadir, 'locale')
821
822    if not cpath.isdir(localedir):
823        bb.debug(1, "No locale files in this package")
824        return
825
826    locales = os.listdir(localedir)
827
828    summary = d.getVar('SUMMARY') or pn
829    description = d.getVar('DESCRIPTION') or ""
830    locale_section = d.getVar('LOCALE_SECTION')
831    mlprefix = d.getVar('MLPREFIX') or ""
832    for l in sorted(locales):
833        ln = legitimize_package_name(l)
834        pkg = pn + '-locale-' + ln
835        packages.append(pkg)
836        d.setVar('FILES:' + pkg, os.path.join(datadir, 'locale', l))
837        d.setVar('RRECOMMENDS:' + pkg, '%svirtual-locale-%s' % (mlprefix, ln))
838        d.setVar('RPROVIDES:' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln))
839        d.setVar('SUMMARY:' + pkg, '%s - %s translations' % (summary, l))
840        d.setVar('DESCRIPTION:' + pkg, '%s  This package contains language translation files for the %s locale.' % (description, l))
841        if locale_section:
842            d.setVar('SECTION:' + pkg, locale_section)
843
844    d.setVar('PACKAGES', ' '.join(packages))
845
846    # Disabled by RP 18/06/07
847    # Wildcards aren't supported in debian
848    # They break with ipkg since glibc-locale* will mean that
849    # glibc-localedata-translit* won't install as a dependency
850    # for some other package which breaks meta-toolchain
851    # Probably breaks since virtual-locale- isn't provided anywhere
852    #rdep = (d.getVar('RDEPENDS:%s' % pn) or "").split()
853    #rdep.append('%s-locale*' % pn)
854    #d.setVar('RDEPENDS:%s' % pn, ' '.join(rdep))
855}
856
857python perform_packagecopy () {
858    import subprocess
859    import shutil
860
861    dest = d.getVar('D')
862    dvar = d.getVar('PKGD')
863
864    # Start by package population by taking a copy of the installed
865    # files to operate on
866    # Preserve sparse files and hard links
867    cmd = 'tar --exclude=./sysroot-only -cf - -C %s -p -S . | tar -xf - -C %s' % (dest, dvar)
868    subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
869
870    # replace RPATHs for the nativesdk binaries, to make them relocatable
871    if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d):
872        rpath_replace (dvar, d)
873}
874perform_packagecopy[cleandirs] = "${PKGD}"
875perform_packagecopy[dirs] = "${PKGD}"
876
877# We generate a master list of directories to process, we start by
878# seeding this list with reasonable defaults, then load from
879# the fs-perms.txt files
880python fixup_perms () {
881    import pwd, grp
882
883    # init using a string with the same format as a line as documented in
884    # the fs-perms.txt file
885    # <path> <mode> <uid> <gid> <walk> <fmode> <fuid> <fgid>
886    # <path> link <link target>
887    #
888    # __str__ can be used to print out an entry in the input format
889    #
890    # if fs_perms_entry.path is None:
891    #    an error occurred
892    # if fs_perms_entry.link, you can retrieve:
893    #    fs_perms_entry.path = path
894    #    fs_perms_entry.link = target of link
895    # if not fs_perms_entry.link, you can retrieve:
896    #    fs_perms_entry.path = path
897    #    fs_perms_entry.mode = expected dir mode or None
898    #    fs_perms_entry.uid = expected uid or -1
899    #    fs_perms_entry.gid = expected gid or -1
900    #    fs_perms_entry.walk = 'true' or something else
901    #    fs_perms_entry.fmode = expected file mode or None
902    #    fs_perms_entry.fuid = expected file uid or -1
903    #    fs_perms_entry_fgid = expected file gid or -1
904    class fs_perms_entry():
905        def __init__(self, line):
906            lsplit = line.split()
907            if len(lsplit) == 3 and lsplit[1].lower() == "link":
908                self._setlink(lsplit[0], lsplit[2])
909            elif len(lsplit) == 8:
910                self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7])
911            else:
912                msg = "Fixup Perms: invalid config line %s" % line
913                oe.qa.handle_error("perm-config", msg, d)
914                self.path = None
915                self.link = None
916
917        def _setdir(self, path, mode, uid, gid, walk, fmode, fuid, fgid):
918            self.path = os.path.normpath(path)
919            self.link = None
920            self.mode = self._procmode(mode)
921            self.uid  = self._procuid(uid)
922            self.gid  = self._procgid(gid)
923            self.walk = walk.lower()
924            self.fmode = self._procmode(fmode)
925            self.fuid = self._procuid(fuid)
926            self.fgid = self._procgid(fgid)
927
928        def _setlink(self, path, link):
929            self.path = os.path.normpath(path)
930            self.link = link
931
932        def _procmode(self, mode):
933            if not mode or (mode and mode == "-"):
934                return None
935            else:
936                return int(mode,8)
937
938        # Note uid/gid -1 has special significance in os.lchown
939        def _procuid(self, uid):
940            if uid is None or uid == "-":
941                return -1
942            elif uid.isdigit():
943                return int(uid)
944            else:
945                return pwd.getpwnam(uid).pw_uid
946
947        def _procgid(self, gid):
948            if gid is None or gid == "-":
949                return -1
950            elif gid.isdigit():
951                return int(gid)
952            else:
953                return grp.getgrnam(gid).gr_gid
954
955        # Use for debugging the entries
956        def __str__(self):
957            if self.link:
958                return "%s link %s" % (self.path, self.link)
959            else:
960                mode = "-"
961                if self.mode:
962                    mode = "0%o" % self.mode
963                fmode = "-"
964                if self.fmode:
965                    fmode = "0%o" % self.fmode
966                uid = self._mapugid(self.uid)
967                gid = self._mapugid(self.gid)
968                fuid = self._mapugid(self.fuid)
969                fgid = self._mapugid(self.fgid)
970                return "%s %s %s %s %s %s %s %s" % (self.path, mode, uid, gid, self.walk, fmode, fuid, fgid)
971
972        def _mapugid(self, id):
973            if id is None or id == -1:
974                return "-"
975            else:
976                return "%d" % id
977
978    # Fix the permission, owner and group of path
979    def fix_perms(path, mode, uid, gid, dir):
980        if mode and not os.path.islink(path):
981            #bb.note("Fixup Perms: chmod 0%o %s" % (mode, dir))
982            os.chmod(path, mode)
983        # -1 is a special value that means don't change the uid/gid
984        # if they are BOTH -1, don't bother to lchown
985        if not (uid == -1 and gid == -1):
986            #bb.note("Fixup Perms: lchown %d:%d %s" % (uid, gid, dir))
987            os.lchown(path, uid, gid)
988
989    # Return a list of configuration files based on either the default
990    # files/fs-perms.txt or the contents of FILESYSTEM_PERMS_TABLES
991    # paths are resolved via BBPATH
992    def get_fs_perms_list(d):
993        str = ""
994        bbpath = d.getVar('BBPATH')
995        fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES') or ""
996        for conf_file in fs_perms_tables.split():
997            confpath = bb.utils.which(bbpath, conf_file)
998            if confpath:
999                str += " %s" % bb.utils.which(bbpath, conf_file)
1000            else:
1001                bb.warn("cannot find %s specified in FILESYSTEM_PERMS_TABLES" % conf_file)
1002        return str
1003
1004
1005
1006    dvar = d.getVar('PKGD')
1007
1008    fs_perms_table = {}
1009    fs_link_table = {}
1010
1011    # By default all of the standard directories specified in
1012    # bitbake.conf will get 0755 root:root.
1013    target_path_vars = [    'base_prefix',
1014                'prefix',
1015                'exec_prefix',
1016                'base_bindir',
1017                'base_sbindir',
1018                'base_libdir',
1019                'datadir',
1020                'sysconfdir',
1021                'servicedir',
1022                'sharedstatedir',
1023                'localstatedir',
1024                'infodir',
1025                'mandir',
1026                'docdir',
1027                'bindir',
1028                'sbindir',
1029                'libexecdir',
1030                'libdir',
1031                'includedir',
1032                'oldincludedir' ]
1033
1034    for path in target_path_vars:
1035        dir = d.getVar(path) or ""
1036        if dir == "":
1037            continue
1038        fs_perms_table[dir] = fs_perms_entry(d.expand("%s 0755 root root false - - -" % (dir)))
1039
1040    # Now we actually load from the configuration files
1041    for conf in get_fs_perms_list(d).split():
1042        if not os.path.exists(conf):
1043            continue
1044        with open(conf) as f:
1045            for line in f:
1046                if line.startswith('#'):
1047                    continue
1048                lsplit = line.split()
1049                if len(lsplit) == 0:
1050                    continue
1051                if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"):
1052                    msg = "Fixup perms: %s invalid line: %s" % (conf, line)
1053                    oe.qa.handle_error("perm-line", msg, d)
1054                    continue
1055                entry = fs_perms_entry(d.expand(line))
1056                if entry and entry.path:
1057                    if entry.link:
1058                        fs_link_table[entry.path] = entry
1059                        if entry.path in fs_perms_table:
1060                            fs_perms_table.pop(entry.path)
1061                    else:
1062                        fs_perms_table[entry.path] = entry
1063                        if entry.path in fs_link_table:
1064                            fs_link_table.pop(entry.path)
1065
1066    # Debug -- list out in-memory table
1067    #for dir in fs_perms_table:
1068    #    bb.note("Fixup Perms: %s: %s" % (dir, str(fs_perms_table[dir])))
1069    #for link in fs_link_table:
1070    #    bb.note("Fixup Perms: %s: %s" % (link, str(fs_link_table[link])))
1071
1072    # We process links first, so we can go back and fixup directory ownership
1073    # for any newly created directories
1074    # Process in sorted order so /run gets created before /run/lock, etc.
1075    for entry in sorted(fs_link_table.values(), key=lambda x: x.link):
1076        link = entry.link
1077        dir = entry.path
1078        origin = dvar + dir
1079        if not (cpath.exists(origin) and cpath.isdir(origin) and not cpath.islink(origin)):
1080            continue
1081
1082        if link[0] == "/":
1083            target = dvar + link
1084            ptarget = link
1085        else:
1086            target = os.path.join(os.path.dirname(origin), link)
1087            ptarget = os.path.join(os.path.dirname(dir), link)
1088        if os.path.exists(target):
1089            msg = "Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget)
1090            oe.qa.handle_error("perm-link", msg, d)
1091            continue
1092
1093        # Create path to move directory to, move it, and then setup the symlink
1094        bb.utils.mkdirhier(os.path.dirname(target))
1095        #bb.note("Fixup Perms: Rename %s -> %s" % (dir, ptarget))
1096        bb.utils.rename(origin, target)
1097        #bb.note("Fixup Perms: Link %s -> %s" % (dir, link))
1098        os.symlink(link, origin)
1099
1100    for dir in fs_perms_table:
1101        origin = dvar + dir
1102        if not (cpath.exists(origin) and cpath.isdir(origin)):
1103            continue
1104
1105        fix_perms(origin, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
1106
1107        if fs_perms_table[dir].walk == 'true':
1108            for root, dirs, files in os.walk(origin):
1109                for dr in dirs:
1110                    each_dir = os.path.join(root, dr)
1111                    fix_perms(each_dir, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
1112                for f in files:
1113                    each_file = os.path.join(root, f)
1114                    fix_perms(each_file, fs_perms_table[dir].fmode, fs_perms_table[dir].fuid, fs_perms_table[dir].fgid, dir)
1115}
1116
1117def package_debug_vars(d):
1118    # We default to '.debug' style
1119    if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory':
1120        # Single debug-file-directory style debug info
1121        debug_vars = {
1122            "append": ".debug",
1123            "staticappend": "",
1124            "dir": "",
1125            "staticdir": "",
1126            "libdir": "/usr/lib/debug",
1127            "staticlibdir": "/usr/lib/debug-static",
1128            "srcdir": "/usr/src/debug",
1129        }
1130    elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-without-src':
1131        # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug
1132        debug_vars = {
1133            "append": "",
1134            "staticappend": "",
1135            "dir": "/.debug",
1136            "staticdir": "/.debug-static",
1137            "libdir": "",
1138            "staticlibdir": "",
1139            "srcdir": "",
1140        }
1141    elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg':
1142        debug_vars = {
1143            "append": "",
1144            "staticappend": "",
1145            "dir": "/.debug",
1146            "staticdir": "/.debug-static",
1147            "libdir": "",
1148            "staticlibdir": "",
1149            "srcdir": "/usr/src/debug",
1150        }
1151    else:
1152        # Original OE-core, a.k.a. ".debug", style debug info
1153        debug_vars = {
1154            "append": "",
1155            "staticappend": "",
1156            "dir": "/.debug",
1157            "staticdir": "/.debug-static",
1158            "libdir": "",
1159            "staticlibdir": "",
1160            "srcdir": "/usr/src/debug",
1161        }
1162
1163    return debug_vars
1164
1165python split_and_strip_files () {
1166    import stat, errno
1167    import subprocess
1168
1169    dvar = d.getVar('PKGD')
1170    pn = d.getVar('PN')
1171    hostos = d.getVar('HOST_OS')
1172
1173    oldcwd = os.getcwd()
1174    os.chdir(dvar)
1175
1176    dv = package_debug_vars(d)
1177
1178    #
1179    # First lets figure out all of the files we may have to process ... do this only once!
1180    #
1181    elffiles = {}
1182    symlinks = {}
1183    staticlibs = []
1184    inodes = {}
1185    libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir"))
1186    baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir"))
1187    skipfiles = (d.getVar("INHIBIT_PACKAGE_STRIP_FILES") or "").split()
1188    if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1' or \
1189            d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
1190        checkelf = {}
1191        checkelflinks = {}
1192        for root, dirs, files in cpath.walk(dvar):
1193            for f in files:
1194                file = os.path.join(root, f)
1195
1196                # Skip debug files
1197                if dv["append"] and file.endswith(dv["append"]):
1198                    continue
1199                if dv["dir"] and dv["dir"] in os.path.dirname(file[len(dvar):]):
1200                    continue
1201
1202                if file in skipfiles:
1203                    continue
1204
1205                if oe.package.is_static_lib(file):
1206                    staticlibs.append(file)
1207                    continue
1208
1209                try:
1210                    ltarget = cpath.realpath(file, dvar, False)
1211                    s = cpath.lstat(ltarget)
1212                except OSError as e:
1213                    (err, strerror) = e.args
1214                    if err != errno.ENOENT:
1215                        raise
1216                    # Skip broken symlinks
1217                    continue
1218                if not s:
1219                    continue
1220                # Check its an executable
1221                if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) \
1222                        or (s[stat.ST_MODE] & stat.S_IXOTH) \
1223                        or ((file.startswith(libdir) or file.startswith(baselibdir)) \
1224                        and (".so" in f or ".node" in f)) \
1225                        or (f.startswith('vmlinux') or ".ko" in f):
1226
1227                    if cpath.islink(file):
1228                        checkelflinks[file] = ltarget
1229                        continue
1230                    # Use a reference of device ID and inode number to identify files
1231                    file_reference = "%d_%d" % (s.st_dev, s.st_ino)
1232                    checkelf[file] = (file, file_reference)
1233
1234        results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelflinks.values(), d)
1235        results_map = {}
1236        for (ltarget, elf_file) in results:
1237            results_map[ltarget] = elf_file
1238        for file in checkelflinks:
1239            ltarget = checkelflinks[file]
1240            # If it's a symlink, and points to an ELF file, we capture the readlink target
1241            if results_map[ltarget]:
1242                target = os.readlink(file)
1243                #bb.note("Sym: %s (%d)" % (ltarget, results_map[ltarget]))
1244                symlinks[file] = target
1245
1246        results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelf.keys(), d)
1247
1248        # Sort results by file path. This ensures that the files are always
1249        # processed in the same order, which is important to make sure builds
1250        # are reproducible when dealing with hardlinks
1251        results.sort(key=lambda x: x[0])
1252
1253        for (file, elf_file) in results:
1254            # It's a file (or hardlink), not a link
1255            # ...but is it ELF, and is it already stripped?
1256            if elf_file & 1:
1257                if elf_file & 2:
1258                    if 'already-stripped' in (d.getVar('INSANE_SKIP:' + pn) or "").split():
1259                        bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn))
1260                    else:
1261                        msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn)
1262                        oe.qa.handle_error("already-stripped", msg, d)
1263                    continue
1264
1265                # At this point we have an unstripped elf file. We need to:
1266                #  a) Make sure any file we strip is not hardlinked to anything else outside this tree
1267                #  b) Only strip any hardlinked file once (no races)
1268                #  c) Track any hardlinks between files so that we can reconstruct matching debug file hardlinks
1269
1270                # Use a reference of device ID and inode number to identify files
1271                file_reference = checkelf[file][1]
1272                if file_reference in inodes:
1273                    os.unlink(file)
1274                    os.link(inodes[file_reference][0], file)
1275                    inodes[file_reference].append(file)
1276                else:
1277                    inodes[file_reference] = [file]
1278                    # break hardlink
1279                    bb.utils.break_hardlinks(file)
1280                    elffiles[file] = elf_file
1281                # Modified the file so clear the cache
1282                cpath.updatecache(file)
1283
1284    def strip_pkgd_prefix(f):
1285        nonlocal dvar
1286
1287        if f.startswith(dvar):
1288            return f[len(dvar):]
1289
1290        return f
1291
1292    #
1293    # First lets process debug splitting
1294    #
1295    if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
1296        results = oe.utils.multiprocess_launch(splitdebuginfo, list(elffiles), d, extraargs=(dvar, dv, d))
1297
1298        if dv["srcdir"] and not hostos.startswith("mingw"):
1299            if (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
1300                results = oe.utils.multiprocess_launch(splitstaticdebuginfo, staticlibs, d, extraargs=(dvar, dv, d))
1301            else:
1302                for file in staticlibs:
1303                    results.append( (file,source_info(file, d)) )
1304
1305        d.setVar("PKGDEBUGSOURCES", {strip_pkgd_prefix(f): sorted(s) for f, s in results})
1306
1307        sources = set()
1308        for r in results:
1309            sources.update(r[1])
1310
1311        # Hardlink our debug symbols to the other hardlink copies
1312        for ref in inodes:
1313            if len(inodes[ref]) == 1:
1314                continue
1315
1316            target = inodes[ref][0][len(dvar):]
1317            for file in inodes[ref][1:]:
1318                src = file[len(dvar):]
1319                dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
1320                fpath = dvar + dest
1321                ftarget = dvar + dv["libdir"] + os.path.dirname(target) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
1322                bb.utils.mkdirhier(os.path.dirname(fpath))
1323                # Only one hardlink of separated debug info file in each directory
1324                if not os.access(fpath, os.R_OK):
1325                    #bb.note("Link %s -> %s" % (fpath, ftarget))
1326                    os.link(ftarget, fpath)
1327
1328        # Create symlinks for all cases we were able to split symbols
1329        for file in symlinks:
1330            src = file[len(dvar):]
1331            dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
1332            fpath = dvar + dest
1333            # Skip it if the target doesn't exist
1334            try:
1335                s = os.stat(fpath)
1336            except OSError as e:
1337                (err, strerror) = e.args
1338                if err != errno.ENOENT:
1339                    raise
1340                continue
1341
1342            ltarget = symlinks[file]
1343            lpath = os.path.dirname(ltarget)
1344            lbase = os.path.basename(ltarget)
1345            ftarget = ""
1346            if lpath and lpath != ".":
1347                ftarget += lpath + dv["dir"] + "/"
1348            ftarget += lbase + dv["append"]
1349            if lpath.startswith(".."):
1350                ftarget = os.path.join("..", ftarget)
1351            bb.utils.mkdirhier(os.path.dirname(fpath))
1352            #bb.note("Symlink %s -> %s" % (fpath, ftarget))
1353            os.symlink(ftarget, fpath)
1354
1355        # Process the dv["srcdir"] if requested...
1356        # This copies and places the referenced sources for later debugging...
1357        copydebugsources(dv["srcdir"], sources, d)
1358    #
1359    # End of debug splitting
1360    #
1361
1362    #
1363    # Now lets go back over things and strip them
1364    #
1365    if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1'):
1366        strip = d.getVar("STRIP")
1367        sfiles = []
1368        for file in elffiles:
1369            elf_file = int(elffiles[file])
1370            #bb.note("Strip %s" % file)
1371            sfiles.append((file, elf_file, strip))
1372        if (d.getVar('PACKAGE_STRIP_STATIC') == '1' or d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
1373            for f in staticlibs:
1374                sfiles.append((f, 16, strip))
1375
1376        oe.utils.multiprocess_launch(oe.package.runstrip, sfiles, d)
1377
1378    # Build "minidebuginfo" and reinject it back into the stripped binaries
1379    if d.getVar('PACKAGE_MINIDEBUGINFO') == '1':
1380        oe.utils.multiprocess_launch(inject_minidebuginfo, list(elffiles), d,
1381                                     extraargs=(dvar, dv, d))
1382
1383    #
1384    # End of strip
1385    #
1386    os.chdir(oldcwd)
1387}
1388
1389python populate_packages () {
1390    import glob, re
1391
1392    workdir = d.getVar('WORKDIR')
1393    outdir = d.getVar('DEPLOY_DIR')
1394    dvar = d.getVar('PKGD')
1395    packages = d.getVar('PACKAGES').split()
1396    pn = d.getVar('PN')
1397
1398    bb.utils.mkdirhier(outdir)
1399    os.chdir(dvar)
1400
1401    autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG") or False)
1402
1403    split_source_package = (d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg')
1404
1405    # If debug-with-srcpkg mode is enabled then add the source package if it
1406    # doesn't exist and add the source file contents to the source package.
1407    if split_source_package:
1408        src_package_name = ('%s-src' % d.getVar('PN'))
1409        if not src_package_name in packages:
1410            packages.append(src_package_name)
1411        d.setVar('FILES:%s' % src_package_name, '/usr/src/debug')
1412
1413    # Sanity check PACKAGES for duplicates
1414    # Sanity should be moved to sanity.bbclass once we have the infrastructure
1415    package_dict = {}
1416
1417    for i, pkg in enumerate(packages):
1418        if pkg in package_dict:
1419            msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg
1420            oe.qa.handle_error("packages-list", msg, d)
1421        # Ensure the source package gets the chance to pick up the source files
1422        # before the debug package by ordering it first in PACKAGES. Whether it
1423        # actually picks up any source files is controlled by
1424        # PACKAGE_DEBUG_SPLIT_STYLE.
1425        elif pkg.endswith("-src"):
1426            package_dict[pkg] = (10, i)
1427        elif autodebug and pkg.endswith("-dbg"):
1428            package_dict[pkg] = (30, i)
1429        else:
1430            package_dict[pkg] = (50, i)
1431    packages = sorted(package_dict.keys(), key=package_dict.get)
1432    d.setVar('PACKAGES', ' '.join(packages))
1433    pkgdest = d.getVar('PKGDEST')
1434
1435    seen = []
1436
1437    # os.mkdir masks the permissions with umask so we have to unset it first
1438    oldumask = os.umask(0)
1439
1440    debug = []
1441    for root, dirs, files in cpath.walk(dvar):
1442        dir = root[len(dvar):]
1443        if not dir:
1444            dir = os.sep
1445        for f in (files + dirs):
1446            path = "." + os.path.join(dir, f)
1447            if "/.debug/" in path or "/.debug-static/" in path or path.endswith("/.debug"):
1448                debug.append(path)
1449
1450    for pkg in packages:
1451        root = os.path.join(pkgdest, pkg)
1452        bb.utils.mkdirhier(root)
1453
1454        filesvar = d.getVar('FILES:%s' % pkg) or ""
1455        if "//" in filesvar:
1456            msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg
1457            oe.qa.handle_error("files-invalid", msg, d)
1458            filesvar.replace("//", "/")
1459
1460        origfiles = filesvar.split()
1461        files, symlink_paths = files_from_filevars(origfiles)
1462
1463        if autodebug and pkg.endswith("-dbg"):
1464            files.extend(debug)
1465
1466        for file in files:
1467            if (not cpath.islink(file)) and (not cpath.exists(file)):
1468                continue
1469            if file in seen:
1470                continue
1471            seen.append(file)
1472
1473            def mkdir(src, dest, p):
1474                src = os.path.join(src, p)
1475                dest = os.path.join(dest, p)
1476                fstat = cpath.stat(src)
1477                os.mkdir(dest)
1478                os.chmod(dest, fstat.st_mode)
1479                os.chown(dest, fstat.st_uid, fstat.st_gid)
1480                if p not in seen:
1481                    seen.append(p)
1482                cpath.updatecache(dest)
1483
1484            def mkdir_recurse(src, dest, paths):
1485                if cpath.exists(dest + '/' + paths):
1486                    return
1487                while paths.startswith("./"):
1488                    paths = paths[2:]
1489                p = "."
1490                for c in paths.split("/"):
1491                    p = os.path.join(p, c)
1492                    if not cpath.exists(os.path.join(dest, p)):
1493                        mkdir(src, dest, p)
1494
1495            if cpath.isdir(file) and not cpath.islink(file):
1496                mkdir_recurse(dvar, root, file)
1497                continue
1498
1499            mkdir_recurse(dvar, root, os.path.dirname(file))
1500            fpath = os.path.join(root,file)
1501            if not cpath.islink(file):
1502                os.link(file, fpath)
1503                continue
1504            ret = bb.utils.copyfile(file, fpath)
1505            if ret is False or ret == 0:
1506                bb.fatal("File population failed")
1507
1508        # Check if symlink paths exist
1509        for file in symlink_paths:
1510            if not os.path.exists(os.path.join(root,file)):
1511                bb.fatal("File '%s' cannot be packaged into '%s' because its "
1512                         "parent directory structure does not exist. One of "
1513                         "its parent directories is a symlink whose target "
1514                         "directory is not included in the package." %
1515                         (file, pkg))
1516
1517    os.umask(oldumask)
1518    os.chdir(workdir)
1519
1520    # Handle excluding packages with incompatible licenses
1521    package_list = []
1522    for pkg in packages:
1523        licenses = d.getVar('_exclude_incompatible-' + pkg)
1524        if licenses:
1525            msg = "Excluding %s from packaging as it has incompatible license(s): %s" % (pkg, licenses)
1526            oe.qa.handle_error("incompatible-license", msg, d)
1527        else:
1528            package_list.append(pkg)
1529    d.setVar('PACKAGES', ' '.join(package_list))
1530
1531    unshipped = []
1532    for root, dirs, files in cpath.walk(dvar):
1533        dir = root[len(dvar):]
1534        if not dir:
1535            dir = os.sep
1536        for f in (files + dirs):
1537            path = os.path.join(dir, f)
1538            if ('.' + path) not in seen:
1539                unshipped.append(path)
1540
1541    if unshipped != []:
1542        msg = pn + ": Files/directories were installed but not shipped in any package:"
1543        if "installed-vs-shipped" in (d.getVar('INSANE_SKIP:' + pn) or "").split():
1544            bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn)
1545        else:
1546            for f in unshipped:
1547                msg = msg + "\n  " + f
1548            msg = msg + "\nPlease set FILES such that these items are packaged. Alternatively if they are unneeded, avoid installing them or delete them within do_install.\n"
1549            msg = msg + "%s: %d installed and not shipped files." % (pn, len(unshipped))
1550            oe.qa.handle_error("installed-vs-shipped", msg, d)
1551}
1552populate_packages[dirs] = "${D}"
1553
1554python package_fixsymlinks () {
1555    import errno
1556    pkgdest = d.getVar('PKGDEST')
1557    packages = d.getVar("PACKAGES", False).split()
1558
1559    dangling_links = {}
1560    pkg_files = {}
1561    for pkg in packages:
1562        dangling_links[pkg] = []
1563        pkg_files[pkg] = []
1564        inst_root = os.path.join(pkgdest, pkg)
1565        for path in pkgfiles[pkg]:
1566                rpath = path[len(inst_root):]
1567                pkg_files[pkg].append(rpath)
1568                rtarget = cpath.realpath(path, inst_root, True, assume_dir = True)
1569                if not cpath.lexists(rtarget):
1570                    dangling_links[pkg].append(os.path.normpath(rtarget[len(inst_root):]))
1571
1572    newrdepends = {}
1573    for pkg in dangling_links:
1574        for l in dangling_links[pkg]:
1575            found = False
1576            bb.debug(1, "%s contains dangling link %s" % (pkg, l))
1577            for p in packages:
1578                if l in pkg_files[p]:
1579                        found = True
1580                        bb.debug(1, "target found in %s" % p)
1581                        if p == pkg:
1582                            break
1583                        if pkg not in newrdepends:
1584                            newrdepends[pkg] = []
1585                        newrdepends[pkg].append(p)
1586                        break
1587            if found == False:
1588                bb.note("%s contains dangling symlink to %s" % (pkg, l))
1589
1590    for pkg in newrdepends:
1591        rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "")
1592        for p in newrdepends[pkg]:
1593            if p not in rdepends:
1594                rdepends[p] = []
1595        d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False))
1596}
1597
1598
1599python package_package_name_hook() {
1600    """
1601    A package_name_hook function can be used to rewrite the package names by
1602    changing PKG.  For an example, see debian.bbclass.
1603    """
1604    pass
1605}
1606
1607EXPORT_FUNCTIONS package_name_hook
1608
1609
1610PKGDESTWORK = "${WORKDIR}/pkgdata"
1611
1612PKGDATA_VARS = "PN PE PV PR PKGE PKGV PKGR LICENSE DESCRIPTION SUMMARY RDEPENDS RPROVIDES RRECOMMENDS RSUGGESTS RREPLACES RCONFLICTS SECTION PKG ALLOW_EMPTY FILES CONFFILES FILES_INFO PACKAGE_ADD_METADATA pkg_postinst pkg_postrm pkg_preinst pkg_prerm"
1613
1614python emit_pkgdata() {
1615    from glob import glob
1616    import json
1617    import bb.compress.zstd
1618
1619    def process_postinst_on_target(pkg, mlprefix):
1620        pkgval = d.getVar('PKG:%s' % pkg)
1621        if pkgval is None:
1622            pkgval = pkg
1623
1624        defer_fragment = """
1625if [ -n "$D" ]; then
1626    $INTERCEPT_DIR/postinst_intercept delay_to_first_boot %s mlprefix=%s
1627    exit 0
1628fi
1629""" % (pkgval, mlprefix)
1630
1631        postinst = d.getVar('pkg_postinst:%s' % pkg)
1632        postinst_ontarget = d.getVar('pkg_postinst_ontarget:%s' % pkg)
1633
1634        if postinst_ontarget:
1635            bb.debug(1, 'adding deferred pkg_postinst_ontarget() to pkg_postinst() for %s' % pkg)
1636            if not postinst:
1637                postinst = '#!/bin/sh\n'
1638            postinst += defer_fragment
1639            postinst += postinst_ontarget
1640            d.setVar('pkg_postinst:%s' % pkg, postinst)
1641
1642    def add_set_e_to_scriptlets(pkg):
1643        for scriptlet_name in ('pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm'):
1644            scriptlet = d.getVar('%s:%s' % (scriptlet_name, pkg))
1645            if scriptlet:
1646                scriptlet_split = scriptlet.split('\n')
1647                if scriptlet_split[0].startswith("#!"):
1648                    scriptlet = scriptlet_split[0] + "\nset -e\n" + "\n".join(scriptlet_split[1:])
1649                else:
1650                    scriptlet = "set -e\n" + "\n".join(scriptlet_split[0:])
1651            d.setVar('%s:%s' % (scriptlet_name, pkg), scriptlet)
1652
1653    def write_if_exists(f, pkg, var):
1654        def encode(str):
1655            import codecs
1656            c = codecs.getencoder("unicode_escape")
1657            return c(str)[0].decode("latin1")
1658
1659        val = d.getVar('%s:%s' % (var, pkg))
1660        if val:
1661            f.write('%s:%s: %s\n' % (var, pkg, encode(val)))
1662            return val
1663        val = d.getVar('%s' % (var))
1664        if val:
1665            f.write('%s: %s\n' % (var, encode(val)))
1666        return val
1667
1668    def write_extra_pkgs(variants, pn, packages, pkgdatadir):
1669        for variant in variants:
1670            with open("%s/%s-%s" % (pkgdatadir, variant, pn), 'w') as fd:
1671                fd.write("PACKAGES: %s\n" % ' '.join(
1672                            map(lambda pkg: '%s-%s' % (variant, pkg), packages.split())))
1673
1674    def write_extra_runtime_pkgs(variants, packages, pkgdatadir):
1675        for variant in variants:
1676            for pkg in packages.split():
1677                ml_pkg = "%s-%s" % (variant, pkg)
1678                subdata_file = "%s/runtime/%s" % (pkgdatadir, ml_pkg)
1679                with open(subdata_file, 'w') as fd:
1680                    fd.write("PKG:%s: %s" % (ml_pkg, pkg))
1681
1682    packages = d.getVar('PACKAGES')
1683    pkgdest = d.getVar('PKGDEST')
1684    pkgdatadir = d.getVar('PKGDESTWORK')
1685
1686    data_file = pkgdatadir + d.expand("/${PN}")
1687    with open(data_file, 'w') as fd:
1688        fd.write("PACKAGES: %s\n" % packages)
1689
1690    pkgdebugsource = d.getVar("PKGDEBUGSOURCES") or []
1691
1692    pn = d.getVar('PN')
1693    global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS') or "").split()
1694    variants = (d.getVar('MULTILIB_VARIANTS') or "").split()
1695
1696    if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
1697        write_extra_pkgs(variants, pn, packages, pkgdatadir)
1698
1699    if bb.data.inherits_class('allarch', d) and not variants \
1700        and not bb.data.inherits_class('packagegroup', d):
1701        write_extra_pkgs(global_variants, pn, packages, pkgdatadir)
1702
1703    workdir = d.getVar('WORKDIR')
1704
1705    for pkg in packages.split():
1706        pkgval = d.getVar('PKG:%s' % pkg)
1707        if pkgval is None:
1708            pkgval = pkg
1709            d.setVar('PKG:%s' % pkg, pkg)
1710
1711        extended_data = {
1712            "files_info": {}
1713        }
1714
1715        pkgdestpkg = os.path.join(pkgdest, pkg)
1716        files = {}
1717        files_extra = {}
1718        total_size = 0
1719        seen = set()
1720        for f in pkgfiles[pkg]:
1721            fpath = os.sep + os.path.relpath(f, pkgdestpkg)
1722
1723            fstat = os.lstat(f)
1724            files[fpath] = fstat.st_size
1725
1726            extended_data["files_info"].setdefault(fpath, {})
1727            extended_data["files_info"][fpath]['size'] = fstat.st_size
1728
1729            if fstat.st_ino not in seen:
1730                seen.add(fstat.st_ino)
1731                total_size += fstat.st_size
1732
1733            if fpath in pkgdebugsource:
1734                extended_data["files_info"][fpath]['debugsrc'] = pkgdebugsource[fpath]
1735                del pkgdebugsource[fpath]
1736
1737        d.setVar('FILES_INFO:' + pkg , json.dumps(files, sort_keys=True))
1738
1739        process_postinst_on_target(pkg, d.getVar("MLPREFIX"))
1740        add_set_e_to_scriptlets(pkg)
1741
1742        subdata_file = pkgdatadir + "/runtime/%s" % pkg
1743        with open(subdata_file, 'w') as sf:
1744            for var in (d.getVar('PKGDATA_VARS') or "").split():
1745                val = write_if_exists(sf, pkg, var)
1746
1747            write_if_exists(sf, pkg, 'FILERPROVIDESFLIST')
1748            for dfile in sorted((d.getVar('FILERPROVIDESFLIST:' + pkg) or "").split()):
1749                write_if_exists(sf, pkg, 'FILERPROVIDES:' + dfile)
1750
1751            write_if_exists(sf, pkg, 'FILERDEPENDSFLIST')
1752            for dfile in sorted((d.getVar('FILERDEPENDSFLIST:' + pkg) or "").split()):
1753                write_if_exists(sf, pkg, 'FILERDEPENDS:' + dfile)
1754
1755            sf.write('%s:%s: %d\n' % ('PKGSIZE', pkg, total_size))
1756
1757        subdata_extended_file = pkgdatadir + "/extended/%s.json.zstd" % pkg
1758        num_threads = int(d.getVar("BB_NUMBER_THREADS"))
1759        with bb.compress.zstd.open(subdata_extended_file, "wt", encoding="utf-8", num_threads=num_threads) as f:
1760            json.dump(extended_data, f, sort_keys=True, separators=(",", ":"))
1761
1762        # Symlinks needed for rprovides lookup
1763        rprov = d.getVar('RPROVIDES:%s' % pkg) or d.getVar('RPROVIDES')
1764        if rprov:
1765            for p in bb.utils.explode_deps(rprov):
1766                subdata_sym = pkgdatadir + "/runtime-rprovides/%s/%s" % (p, pkg)
1767                bb.utils.mkdirhier(os.path.dirname(subdata_sym))
1768                oe.path.symlink("../../runtime/%s" % pkg, subdata_sym, True)
1769
1770        allow_empty = d.getVar('ALLOW_EMPTY:%s' % pkg)
1771        if not allow_empty:
1772            allow_empty = d.getVar('ALLOW_EMPTY')
1773        root = "%s/%s" % (pkgdest, pkg)
1774        os.chdir(root)
1775        g = glob('*')
1776        if g or allow_empty == "1":
1777            # Symlinks needed for reverse lookups (from the final package name)
1778            subdata_sym = pkgdatadir + "/runtime-reverse/%s" % pkgval
1779            oe.path.symlink("../runtime/%s" % pkg, subdata_sym, True)
1780
1781            packagedfile = pkgdatadir + '/runtime/%s.packaged' % pkg
1782            open(packagedfile, 'w').close()
1783
1784    if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
1785        write_extra_runtime_pkgs(variants, packages, pkgdatadir)
1786
1787    if bb.data.inherits_class('allarch', d) and not variants \
1788        and not bb.data.inherits_class('packagegroup', d):
1789        write_extra_runtime_pkgs(global_variants, packages, pkgdatadir)
1790
1791}
1792emit_pkgdata[dirs] = "${PKGDESTWORK}/runtime ${PKGDESTWORK}/runtime-reverse ${PKGDESTWORK}/runtime-rprovides ${PKGDESTWORK}/extended"
1793emit_pkgdata[vardepsexclude] = "BB_NUMBER_THREADS"
1794
1795ldconfig_postinst_fragment() {
1796if [ x"$D" = "x" ]; then
1797	if [ -x /sbin/ldconfig ]; then /sbin/ldconfig ; fi
1798fi
1799}
1800
1801RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/rpmdeps --alldeps --define '__font_provides %{nil}'"
1802
1803# Collect perfile run-time dependency metadata
1804# Output:
1805#  FILERPROVIDESFLIST:pkg - list of all files w/ deps
1806#  FILERPROVIDES:filepath:pkg - per file dep
1807#
1808#  FILERDEPENDSFLIST:pkg - list of all files w/ deps
1809#  FILERDEPENDS:filepath:pkg - per file dep
1810
1811python package_do_filedeps() {
1812    if d.getVar('SKIP_FILEDEPS') == '1':
1813        return
1814
1815    pkgdest = d.getVar('PKGDEST')
1816    packages = d.getVar('PACKAGES')
1817    rpmdeps = d.getVar('RPMDEPS')
1818
1819    def chunks(files, n):
1820        return [files[i:i+n] for i in range(0, len(files), n)]
1821
1822    pkglist = []
1823    for pkg in packages.split():
1824        if d.getVar('SKIP_FILEDEPS:' + pkg) == '1':
1825            continue
1826        if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-') or pkg.endswith('-src'):
1827            continue
1828        for files in chunks(pkgfiles[pkg], 100):
1829            pkglist.append((pkg, files, rpmdeps, pkgdest))
1830
1831    processed = oe.utils.multiprocess_launch(oe.package.filedeprunner, pkglist, d)
1832
1833    provides_files = {}
1834    requires_files = {}
1835
1836    for result in processed:
1837        (pkg, provides, requires) = result
1838
1839        if pkg not in provides_files:
1840            provides_files[pkg] = []
1841        if pkg not in requires_files:
1842            requires_files[pkg] = []
1843
1844        for file in sorted(provides):
1845            provides_files[pkg].append(file)
1846            key = "FILERPROVIDES:" + file + ":" + pkg
1847            d.appendVar(key, " " + " ".join(provides[file]))
1848
1849        for file in sorted(requires):
1850            requires_files[pkg].append(file)
1851            key = "FILERDEPENDS:" + file + ":" + pkg
1852            d.appendVar(key, " " + " ".join(requires[file]))
1853
1854    for pkg in requires_files:
1855        d.setVar("FILERDEPENDSFLIST:" + pkg, " ".join(sorted(requires_files[pkg])))
1856    for pkg in provides_files:
1857        d.setVar("FILERPROVIDESFLIST:" + pkg, " ".join(sorted(provides_files[pkg])))
1858}
1859
1860SHLIBSDIRS = "${WORKDIR_PKGDATA}/${MLPREFIX}shlibs2"
1861SHLIBSWORKDIR = "${PKGDESTWORK}/${MLPREFIX}shlibs2"
1862
1863python package_do_shlibs() {
1864    import itertools
1865    import re, pipes
1866    import subprocess
1867
1868    exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', False)
1869    if exclude_shlibs:
1870        bb.note("not generating shlibs")
1871        return
1872
1873    lib_re = re.compile(r"^.*\.so")
1874    libdir_re = re.compile(r".*/%s$" % d.getVar('baselib'))
1875
1876    packages = d.getVar('PACKAGES')
1877
1878    shlib_pkgs = []
1879    exclusion_list = d.getVar("EXCLUDE_PACKAGES_FROM_SHLIBS")
1880    if exclusion_list:
1881        for pkg in packages.split():
1882            if pkg not in exclusion_list.split():
1883                shlib_pkgs.append(pkg)
1884            else:
1885                bb.note("not generating shlibs for %s" % pkg)
1886    else:
1887        shlib_pkgs = packages.split()
1888
1889    hostos = d.getVar('HOST_OS')
1890
1891    workdir = d.getVar('WORKDIR')
1892
1893    ver = d.getVar('PKGV')
1894    if not ver:
1895        msg = "PKGV not defined"
1896        oe.qa.handle_error("pkgv-undefined", msg, d)
1897        return
1898
1899    pkgdest = d.getVar('PKGDEST')
1900
1901    shlibswork_dir = d.getVar('SHLIBSWORKDIR')
1902
1903    def linux_so(file, pkg, pkgver, d):
1904        needs_ldconfig = False
1905        needed = set()
1906        sonames = set()
1907        renames = []
1908        ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
1909        cmd = d.getVar('OBJDUMP') + " -p " + pipes.quote(file) + " 2>/dev/null"
1910        fd = os.popen(cmd)
1911        lines = fd.readlines()
1912        fd.close()
1913        rpath = tuple()
1914        for l in lines:
1915            m = re.match(r"\s+RPATH\s+([^\s]*)", l)
1916            if m:
1917                rpaths = m.group(1).replace("$ORIGIN", ldir).split(":")
1918                rpath = tuple(map(os.path.normpath, rpaths))
1919        for l in lines:
1920            m = re.match(r"\s+NEEDED\s+([^\s]*)", l)
1921            if m:
1922                dep = m.group(1)
1923                if dep not in needed:
1924                    needed.add((dep, file, rpath))
1925            m = re.match(r"\s+SONAME\s+([^\s]*)", l)
1926            if m:
1927                this_soname = m.group(1)
1928                prov = (this_soname, ldir, pkgver)
1929                if not prov in sonames:
1930                    # if library is private (only used by package) then do not build shlib for it
1931                    import fnmatch
1932                    if not private_libs or len([i for i in private_libs if fnmatch.fnmatch(this_soname, i)]) == 0:
1933                        sonames.add(prov)
1934                if libdir_re.match(os.path.dirname(file)):
1935                    needs_ldconfig = True
1936                if needs_ldconfig and snap_symlinks and (os.path.basename(file) != this_soname):
1937                    renames.append((file, os.path.join(os.path.dirname(file), this_soname)))
1938        return (needs_ldconfig, needed, sonames, renames)
1939
1940    def darwin_so(file, needed, sonames, renames, pkgver):
1941        if not os.path.exists(file):
1942            return
1943        ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
1944
1945        def get_combinations(base):
1946            #
1947            # Given a base library name, find all combinations of this split by "." and "-"
1948            #
1949            combos = []
1950            options = base.split(".")
1951            for i in range(1, len(options) + 1):
1952                combos.append(".".join(options[0:i]))
1953            options = base.split("-")
1954            for i in range(1, len(options) + 1):
1955                combos.append("-".join(options[0:i]))
1956            return combos
1957
1958        if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.endswith('-src'):
1959            # Drop suffix
1960            name = os.path.basename(file).rsplit(".",1)[0]
1961            # Find all combinations
1962            combos = get_combinations(name)
1963            for combo in combos:
1964                if not combo in sonames:
1965                    prov = (combo, ldir, pkgver)
1966                    sonames.add(prov)
1967        if file.endswith('.dylib') or file.endswith('.so'):
1968            rpath = []
1969            p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-l', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
1970            out, err = p.communicate()
1971            # If returned successfully, process stdout for results
1972            if p.returncode == 0:
1973                for l in out.split("\n"):
1974                    l = l.strip()
1975                    if l.startswith('path '):
1976                        rpath.append(l.split()[1])
1977
1978        p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-L', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
1979        out, err = p.communicate()
1980        # If returned successfully, process stdout for results
1981        if p.returncode == 0:
1982            for l in out.split("\n"):
1983                l = l.strip()
1984                if not l or l.endswith(":"):
1985                    continue
1986                if "is not an object file" in l:
1987                    continue
1988                name = os.path.basename(l.split()[0]).rsplit(".", 1)[0]
1989                if name and name not in needed[pkg]:
1990                     needed[pkg].add((name, file, tuple()))
1991
1992    def mingw_dll(file, needed, sonames, renames, pkgver):
1993        if not os.path.exists(file):
1994            return
1995
1996        if file.endswith(".dll"):
1997            # assume all dlls are shared objects provided by the package
1998            sonames.add((os.path.basename(file), os.path.dirname(file).replace(pkgdest + "/" + pkg, ''), pkgver))
1999
2000        if (file.endswith(".dll") or file.endswith(".exe")):
2001            # use objdump to search for "DLL Name: .*\.dll"
2002            p = subprocess.Popen([d.expand("${HOST_PREFIX}objdump"), "-p", file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
2003            out, err = p.communicate()
2004            # process the output, grabbing all .dll names
2005            if p.returncode == 0:
2006                for m in re.finditer(r"DLL Name: (.*?\.dll)$", out.decode(), re.MULTILINE | re.IGNORECASE):
2007                    dllname = m.group(1)
2008                    if dllname:
2009                        needed[pkg].add((dllname, file, tuple()))
2010
2011    if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS') == "1":
2012        snap_symlinks = True
2013    else:
2014        snap_symlinks = False
2015
2016    needed = {}
2017
2018    shlib_provider = oe.package.read_shlib_providers(d)
2019
2020    for pkg in shlib_pkgs:
2021        private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or ""
2022        private_libs = private_libs.split()
2023        needs_ldconfig = False
2024        bb.debug(2, "calculating shlib provides for %s" % pkg)
2025
2026        pkgver = d.getVar('PKGV:' + pkg)
2027        if not pkgver:
2028            pkgver = d.getVar('PV_' + pkg)
2029        if not pkgver:
2030            pkgver = ver
2031
2032        needed[pkg] = set()
2033        sonames = set()
2034        renames = []
2035        linuxlist = []
2036        for file in pkgfiles[pkg]:
2037                soname = None
2038                if cpath.islink(file):
2039                    continue
2040                if hostos == "darwin" or hostos == "darwin8":
2041                    darwin_so(file, needed, sonames, renames, pkgver)
2042                elif hostos.startswith("mingw"):
2043                    mingw_dll(file, needed, sonames, renames, pkgver)
2044                elif os.access(file, os.X_OK) or lib_re.match(file):
2045                    linuxlist.append(file)
2046
2047        if linuxlist:
2048            results = oe.utils.multiprocess_launch(linux_so, linuxlist, d, extraargs=(pkg, pkgver, d))
2049            for r in results:
2050                ldconfig = r[0]
2051                needed[pkg] |= r[1]
2052                sonames |= r[2]
2053                renames.extend(r[3])
2054                needs_ldconfig = needs_ldconfig or ldconfig
2055
2056        for (old, new) in renames:
2057            bb.note("Renaming %s to %s" % (old, new))
2058            bb.utils.rename(old, new)
2059            pkgfiles[pkg].remove(old)
2060
2061        shlibs_file = os.path.join(shlibswork_dir, pkg + ".list")
2062        if len(sonames):
2063            with open(shlibs_file, 'w') as fd:
2064                for s in sorted(sonames):
2065                    if s[0] in shlib_provider and s[1] in shlib_provider[s[0]]:
2066                        (old_pkg, old_pkgver) = shlib_provider[s[0]][s[1]]
2067                        if old_pkg != pkg:
2068                            bb.warn('%s-%s was registered as shlib provider for %s, changing it to %s-%s because it was built later' % (old_pkg, old_pkgver, s[0], pkg, pkgver))
2069                    bb.debug(1, 'registering %s-%s as shlib provider for %s' % (pkg, pkgver, s[0]))
2070                    fd.write(s[0] + ':' + s[1] + ':' + s[2] + '\n')
2071                    if s[0] not in shlib_provider:
2072                        shlib_provider[s[0]] = {}
2073                    shlib_provider[s[0]][s[1]] = (pkg, pkgver)
2074        if needs_ldconfig:
2075            bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
2076            postinst = d.getVar('pkg_postinst:%s' % pkg)
2077            if not postinst:
2078                postinst = '#!/bin/sh\n'
2079            postinst += d.getVar('ldconfig_postinst_fragment')
2080            d.setVar('pkg_postinst:%s' % pkg, postinst)
2081        bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames))
2082
2083    assumed_libs = d.getVar('ASSUME_SHLIBS')
2084    if assumed_libs:
2085        libdir = d.getVar("libdir")
2086        for e in assumed_libs.split():
2087            l, dep_pkg = e.split(":")
2088            lib_ver = None
2089            dep_pkg = dep_pkg.rsplit("_", 1)
2090            if len(dep_pkg) == 2:
2091                lib_ver = dep_pkg[1]
2092            dep_pkg = dep_pkg[0]
2093            if l not in shlib_provider:
2094                shlib_provider[l] = {}
2095            shlib_provider[l][libdir] = (dep_pkg, lib_ver)
2096
2097    libsearchpath = [d.getVar('libdir'), d.getVar('base_libdir')]
2098
2099    for pkg in shlib_pkgs:
2100        bb.debug(2, "calculating shlib requirements for %s" % pkg)
2101
2102        private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or ""
2103        private_libs = private_libs.split()
2104
2105        deps = list()
2106        for n in needed[pkg]:
2107            # if n is in private libraries, don't try to search provider for it
2108            # this could cause problem in case some abc.bb provides private
2109            # /opt/abc/lib/libfoo.so.1 and contains /usr/bin/abc depending on system library libfoo.so.1
2110            # but skipping it is still better alternative than providing own
2111            # version and then adding runtime dependency for the same system library
2112            import fnmatch
2113            if private_libs and len([i for i in private_libs if fnmatch.fnmatch(n[0], i)]) > 0:
2114                bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0]))
2115                continue
2116            if n[0] in shlib_provider.keys():
2117                shlib_provider_map = shlib_provider[n[0]]
2118                matches = set()
2119                for p in itertools.chain(list(n[2]), sorted(shlib_provider_map.keys()), libsearchpath):
2120                    if p in shlib_provider_map:
2121                        matches.add(p)
2122                if len(matches) > 1:
2123                    matchpkgs = ', '.join([shlib_provider_map[match][0] for match in matches])
2124                    bb.error("%s: Multiple shlib providers for %s: %s (used by files: %s)" % (pkg, n[0], matchpkgs, n[1]))
2125                elif len(matches) == 1:
2126                    (dep_pkg, ver_needed) = shlib_provider_map[matches.pop()]
2127
2128                    bb.debug(2, '%s: Dependency %s requires package %s (used by files: %s)' % (pkg, n[0], dep_pkg, n[1]))
2129
2130                    if dep_pkg == pkg:
2131                        continue
2132
2133                    if ver_needed:
2134                        dep = "%s (>= %s)" % (dep_pkg, ver_needed)
2135                    else:
2136                        dep = dep_pkg
2137                    if not dep in deps:
2138                        deps.append(dep)
2139                    continue
2140            bb.note("Couldn't find shared library provider for %s, used by files: %s" % (n[0], n[1]))
2141
2142        deps_file = os.path.join(pkgdest, pkg + ".shlibdeps")
2143        if os.path.exists(deps_file):
2144            os.remove(deps_file)
2145        if deps:
2146            with open(deps_file, 'w') as fd:
2147                for dep in sorted(deps):
2148                    fd.write(dep + '\n')
2149}
2150
2151python package_do_pkgconfig () {
2152    import re
2153
2154    packages = d.getVar('PACKAGES')
2155    workdir = d.getVar('WORKDIR')
2156    pkgdest = d.getVar('PKGDEST')
2157
2158    shlibs_dirs = d.getVar('SHLIBSDIRS').split()
2159    shlibswork_dir = d.getVar('SHLIBSWORKDIR')
2160
2161    pc_re = re.compile(r'(.*)\.pc$')
2162    var_re = re.compile(r'(.*)=(.*)')
2163    field_re = re.compile(r'(.*): (.*)')
2164
2165    pkgconfig_provided = {}
2166    pkgconfig_needed = {}
2167    for pkg in packages.split():
2168        pkgconfig_provided[pkg] = []
2169        pkgconfig_needed[pkg] = []
2170        for file in sorted(pkgfiles[pkg]):
2171                m = pc_re.match(file)
2172                if m:
2173                    pd = bb.data.init()
2174                    name = m.group(1)
2175                    pkgconfig_provided[pkg].append(os.path.basename(name))
2176                    if not os.access(file, os.R_OK):
2177                        continue
2178                    with open(file, 'r') as f:
2179                        lines = f.readlines()
2180                    for l in lines:
2181                        m = var_re.match(l)
2182                        if m:
2183                            name = m.group(1)
2184                            val = m.group(2)
2185                            pd.setVar(name, pd.expand(val))
2186                            continue
2187                        m = field_re.match(l)
2188                        if m:
2189                            hdr = m.group(1)
2190                            exp = pd.expand(m.group(2))
2191                            if hdr == 'Requires':
2192                                pkgconfig_needed[pkg] += exp.replace(',', ' ').split()
2193
2194    for pkg in packages.split():
2195        pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist")
2196        if pkgconfig_provided[pkg] != []:
2197            with open(pkgs_file, 'w') as f:
2198                for p in sorted(pkgconfig_provided[pkg]):
2199                    f.write('%s\n' % p)
2200
2201    # Go from least to most specific since the last one found wins
2202    for dir in reversed(shlibs_dirs):
2203        if not os.path.exists(dir):
2204            continue
2205        for file in sorted(os.listdir(dir)):
2206            m = re.match(r'^(.*)\.pclist$', file)
2207            if m:
2208                pkg = m.group(1)
2209                with open(os.path.join(dir, file)) as fd:
2210                    lines = fd.readlines()
2211                pkgconfig_provided[pkg] = []
2212                for l in lines:
2213                    pkgconfig_provided[pkg].append(l.rstrip())
2214
2215    for pkg in packages.split():
2216        deps = []
2217        for n in pkgconfig_needed[pkg]:
2218            found = False
2219            for k in pkgconfig_provided.keys():
2220                if n in pkgconfig_provided[k]:
2221                    if k != pkg and not (k in deps):
2222                        deps.append(k)
2223                    found = True
2224            if found == False:
2225                bb.note("couldn't find pkgconfig module '%s' in any package" % n)
2226        deps_file = os.path.join(pkgdest, pkg + ".pcdeps")
2227        if len(deps):
2228            with open(deps_file, 'w') as fd:
2229                for dep in deps:
2230                    fd.write(dep + '\n')
2231}
2232
2233def read_libdep_files(d):
2234    pkglibdeps = {}
2235    packages = d.getVar('PACKAGES').split()
2236    for pkg in packages:
2237        pkglibdeps[pkg] = {}
2238        for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
2239            depsfile = d.expand("${PKGDEST}/" + pkg + extension)
2240            if os.access(depsfile, os.R_OK):
2241                with open(depsfile) as fd:
2242                    lines = fd.readlines()
2243                for l in lines:
2244                    l.rstrip()
2245                    deps = bb.utils.explode_dep_versions2(l)
2246                    for dep in deps:
2247                        if not dep in pkglibdeps[pkg]:
2248                            pkglibdeps[pkg][dep] = deps[dep]
2249    return pkglibdeps
2250
2251python read_shlibdeps () {
2252    pkglibdeps = read_libdep_files(d)
2253
2254    packages = d.getVar('PACKAGES').split()
2255    for pkg in packages:
2256        rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "")
2257        for dep in sorted(pkglibdeps[pkg]):
2258            # Add the dep if it's not already there, or if no comparison is set
2259            if dep not in rdepends:
2260                rdepends[dep] = []
2261            for v in pkglibdeps[pkg][dep]:
2262                if v not in rdepends[dep]:
2263                    rdepends[dep].append(v)
2264        d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False))
2265}
2266
2267python package_depchains() {
2268    """
2269    For a given set of prefix and postfix modifiers, make those packages
2270    RRECOMMENDS on the corresponding packages for its RDEPENDS.
2271
2272    Example:  If package A depends upon package B, and A's .bb emits an
2273    A-dev package, this would make A-dev Recommends: B-dev.
2274
2275    If only one of a given suffix is specified, it will take the RRECOMMENDS
2276    based on the RDEPENDS of *all* other packages. If more than one of a given
2277    suffix is specified, its will only use the RDEPENDS of the single parent
2278    package.
2279    """
2280
2281    packages  = d.getVar('PACKAGES')
2282    postfixes = (d.getVar('DEPCHAIN_POST') or '').split()
2283    prefixes  = (d.getVar('DEPCHAIN_PRE') or '').split()
2284
2285    def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d):
2286
2287        #bb.note('depends for %s is %s' % (base, depends))
2288        rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "")
2289
2290        for depend in sorted(depends):
2291            if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
2292                #bb.note("Skipping %s" % depend)
2293                continue
2294            if depend.endswith('-dev'):
2295                depend = depend[:-4]
2296            if depend.endswith('-dbg'):
2297                depend = depend[:-4]
2298            pkgname = getname(depend, suffix)
2299            #bb.note("Adding %s for %s" % (pkgname, depend))
2300            if pkgname not in rreclist and pkgname != pkg:
2301                rreclist[pkgname] = []
2302
2303        #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist)))
2304        d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
2305
2306    def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
2307
2308        #bb.note('rdepends for %s is %s' % (base, rdepends))
2309        rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "")
2310
2311        for depend in sorted(rdepends):
2312            if depend.find('virtual-locale-') != -1:
2313                #bb.note("Skipping %s" % depend)
2314                continue
2315            if depend.endswith('-dev'):
2316                depend = depend[:-4]
2317            if depend.endswith('-dbg'):
2318                depend = depend[:-4]
2319            pkgname = getname(depend, suffix)
2320            #bb.note("Adding %s for %s" % (pkgname, depend))
2321            if pkgname not in rreclist and pkgname != pkg:
2322                rreclist[pkgname] = []
2323
2324        #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist)))
2325        d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
2326
2327    def add_dep(list, dep):
2328        if dep not in list:
2329            list.append(dep)
2330
2331    depends = []
2332    for dep in bb.utils.explode_deps(d.getVar('DEPENDS') or ""):
2333        add_dep(depends, dep)
2334
2335    rdepends = []
2336    for pkg in packages.split():
2337        for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + pkg) or ""):
2338            add_dep(rdepends, dep)
2339
2340    #bb.note('rdepends is %s' % rdepends)
2341
2342    def post_getname(name, suffix):
2343        return '%s%s' % (name, suffix)
2344    def pre_getname(name, suffix):
2345        return '%s%s' % (suffix, name)
2346
2347    pkgs = {}
2348    for pkg in packages.split():
2349        for postfix in postfixes:
2350            if pkg.endswith(postfix):
2351                if not postfix in pkgs:
2352                    pkgs[postfix] = {}
2353                pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname)
2354
2355        for prefix in prefixes:
2356            if pkg.startswith(prefix):
2357                if not prefix in pkgs:
2358                    pkgs[prefix] = {}
2359                pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname)
2360
2361    if "-dbg" in pkgs:
2362        pkglibdeps = read_libdep_files(d)
2363        pkglibdeplist = []
2364        for pkg in pkglibdeps:
2365            for k in pkglibdeps[pkg]:
2366                add_dep(pkglibdeplist, k)
2367        dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS') == '1') or (bb.data.inherits_class('packagegroup', d)))
2368
2369    for suffix in pkgs:
2370        for pkg in pkgs[suffix]:
2371            if d.getVarFlag('RRECOMMENDS:' + pkg, 'nodeprrecs'):
2372                continue
2373            (base, func) = pkgs[suffix][pkg]
2374            if suffix == "-dev":
2375                pkg_adddeprrecs(pkg, base, suffix, func, depends, d)
2376            elif suffix == "-dbg":
2377                if not dbgdefaultdeps:
2378                    pkg_addrrecs(pkg, base, suffix, func, pkglibdeplist, d)
2379                    continue
2380            if len(pkgs[suffix]) == 1:
2381                pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
2382            else:
2383                rdeps = []
2384                for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + base) or ""):
2385                    add_dep(rdeps, dep)
2386                pkg_addrrecs(pkg, base, suffix, func, rdeps, d)
2387}
2388
2389# Since bitbake can't determine which variables are accessed during package
2390# iteration, we need to list them here:
2391PACKAGEVARS = "FILES RDEPENDS RRECOMMENDS SUMMARY DESCRIPTION RSUGGESTS RPROVIDES RCONFLICTS PKG ALLOW_EMPTY pkg_postinst pkg_postrm pkg_postinst_ontarget INITSCRIPT_NAME INITSCRIPT_PARAMS DEBIAN_NOAUTONAME ALTERNATIVE PKGE PKGV PKGR USERADD_PARAM GROUPADD_PARAM CONFFILES SYSTEMD_SERVICE LICENSE SECTION pkg_preinst pkg_prerm RREPLACES GROUPMEMS_PARAM SYSTEMD_AUTO_ENABLE SKIP_FILEDEPS PRIVATE_LIBS PACKAGE_ADD_METADATA"
2392
2393def gen_packagevar(d, pkgvars="PACKAGEVARS"):
2394    ret = []
2395    pkgs = (d.getVar("PACKAGES") or "").split()
2396    vars = (d.getVar(pkgvars) or "").split()
2397    for v in vars:
2398        ret.append(v)
2399    for p in pkgs:
2400        for v in vars:
2401            ret.append(v + ":" + p)
2402
2403        # Ensure that changes to INCOMPATIBLE_LICENSE re-run do_package for
2404        # affected recipes.
2405        ret.append('_exclude_incompatible-%s' % p)
2406    return " ".join(ret)
2407
2408PACKAGE_PREPROCESS_FUNCS ?= ""
2409# Functions for setting up PKGD
2410PACKAGEBUILDPKGD ?= " \
2411                package_prepare_pkgdata \
2412                perform_packagecopy \
2413                ${PACKAGE_PREPROCESS_FUNCS} \
2414                split_and_strip_files \
2415                fixup_perms \
2416                "
2417# Functions which split PKGD up into separate packages
2418PACKAGESPLITFUNCS ?= " \
2419                package_do_split_locales \
2420                populate_packages"
2421# Functions which process metadata based on split packages
2422PACKAGEFUNCS += " \
2423                package_fixsymlinks \
2424                package_name_hook \
2425                package_do_filedeps \
2426                package_do_shlibs \
2427                package_do_pkgconfig \
2428                read_shlibdeps \
2429                package_depchains \
2430                emit_pkgdata"
2431
2432python do_package () {
2433    # Change the following version to cause sstate to invalidate the package
2434    # cache.  This is useful if an item this class depends on changes in a
2435    # way that the output of this class changes.  rpmdeps is a good example
2436    # as any change to rpmdeps requires this to be rerun.
2437    # PACKAGE_BBCLASS_VERSION = "4"
2438
2439    # Init cachedpath
2440    global cpath
2441    cpath = oe.cachedpath.CachedPath()
2442
2443    ###########################################################################
2444    # Sanity test the setup
2445    ###########################################################################
2446
2447    packages = (d.getVar('PACKAGES') or "").split()
2448    if len(packages) < 1:
2449        bb.debug(1, "No packages to build, skipping do_package")
2450        return
2451
2452    workdir = d.getVar('WORKDIR')
2453    outdir = d.getVar('DEPLOY_DIR')
2454    dest = d.getVar('D')
2455    dvar = d.getVar('PKGD')
2456    pn = d.getVar('PN')
2457
2458    if not workdir or not outdir or not dest or not dvar or not pn:
2459        msg = "WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package"
2460        oe.qa.handle_error("var-undefined", msg, d)
2461        return
2462
2463    bb.build.exec_func("package_convert_pr_autoinc", d)
2464
2465    ###########################################################################
2466    # Optimisations
2467    ###########################################################################
2468
2469    # Continually expanding complex expressions is inefficient, particularly
2470    # when we write to the datastore and invalidate the expansion cache. This
2471    # code pre-expands some frequently used variables
2472
2473    def expandVar(x, d):
2474        d.setVar(x, d.getVar(x))
2475
2476    for x in 'PN', 'PV', 'BPN', 'TARGET_SYS', 'EXTENDPRAUTO':
2477        expandVar(x, d)
2478
2479    ###########################################################################
2480    # Setup PKGD (from D)
2481    ###########################################################################
2482
2483    for f in (d.getVar('PACKAGEBUILDPKGD') or '').split():
2484        bb.build.exec_func(f, d)
2485
2486    ###########################################################################
2487    # Split up PKGD into PKGDEST
2488    ###########################################################################
2489
2490    cpath = oe.cachedpath.CachedPath()
2491
2492    for f in (d.getVar('PACKAGESPLITFUNCS') or '').split():
2493        bb.build.exec_func(f, d)
2494
2495    ###########################################################################
2496    # Process PKGDEST
2497    ###########################################################################
2498
2499    # Build global list of files in each split package
2500    global pkgfiles
2501    pkgfiles = {}
2502    packages = d.getVar('PACKAGES').split()
2503    pkgdest = d.getVar('PKGDEST')
2504    for pkg in packages:
2505        pkgfiles[pkg] = []
2506        for walkroot, dirs, files in cpath.walk(pkgdest + "/" + pkg):
2507            for file in files:
2508                pkgfiles[pkg].append(walkroot + os.sep + file)
2509
2510    for f in (d.getVar('PACKAGEFUNCS') or '').split():
2511        bb.build.exec_func(f, d)
2512
2513    oe.qa.exit_if_errors(d)
2514}
2515
2516do_package[dirs] = "${SHLIBSWORKDIR} ${D}"
2517do_package[vardeps] += "${PACKAGEBUILDPKGD} ${PACKAGESPLITFUNCS} ${PACKAGEFUNCS} ${@gen_packagevar(d)}"
2518addtask package after do_install
2519
2520SSTATETASKS += "do_package"
2521do_package[cleandirs] = "${PKGDEST} ${PKGDESTWORK}"
2522do_package[sstate-plaindirs] = "${PKGD} ${PKGDEST} ${PKGDESTWORK}"
2523do_package_setscene[dirs] = "${STAGING_DIR}"
2524
2525python do_package_setscene () {
2526    sstate_setscene(d)
2527}
2528addtask do_package_setscene
2529
2530# Copy from PKGDESTWORK to tempdirectory as tempdirectory can be cleaned at both
2531# do_package_setscene and do_packagedata_setscene leading to races
2532python do_packagedata () {
2533    bb.build.exec_func("package_get_auto_pr", d)
2534
2535    src = d.expand("${PKGDESTWORK}")
2536    dest = d.expand("${WORKDIR}/pkgdata-pdata-input")
2537    oe.path.copyhardlinktree(src, dest)
2538
2539    bb.build.exec_func("packagedata_translate_pr_autoinc", d)
2540}
2541do_packagedata[cleandirs] += "${WORKDIR}/pkgdata-pdata-input"
2542
2543# Translate the EXTENDPRAUTO and AUTOINC to the final values
2544packagedata_translate_pr_autoinc() {
2545    find ${WORKDIR}/pkgdata-pdata-input -type f | xargs --no-run-if-empty \
2546        sed -e 's,@PRSERV_PV_AUTOINC@,${PRSERV_PV_AUTOINC},g' \
2547            -e 's,@EXTENDPRAUTO@,${EXTENDPRAUTO},g' -i
2548}
2549
2550addtask packagedata before do_build after do_package
2551
2552SSTATETASKS += "do_packagedata"
2553do_packagedata[sstate-inputdirs] = "${WORKDIR}/pkgdata-pdata-input"
2554do_packagedata[sstate-outputdirs] = "${PKGDATA_DIR}"
2555do_packagedata[stamp-extra-info] = "${MACHINE_ARCH}"
2556
2557python do_packagedata_setscene () {
2558    sstate_setscene(d)
2559}
2560addtask do_packagedata_setscene
2561
2562#
2563# Helper functions for the package writing classes
2564#
2565
2566def mapping_rename_hook(d):
2567    """
2568    Rewrite variables to account for package renaming in things
2569    like debian.bbclass or manual PKG variable name changes
2570    """
2571    pkg = d.getVar("PKG")
2572    runtime_mapping_rename("RDEPENDS", pkg, d)
2573    runtime_mapping_rename("RRECOMMENDS", pkg, d)
2574    runtime_mapping_rename("RSUGGESTS", pkg, d)
2575