xref: /OK3568_Linux_fs/yocto/poky/bitbake/lib/bb/utils.py (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1"""
2BitBake Utility Functions
3"""
4
5# Copyright (C) 2004 Michael Lauer
6#
7# SPDX-License-Identifier: GPL-2.0-only
8#
9
10import re, fcntl, os, string, stat, shutil, time
11import sys
12import errno
13import logging
14import bb
15import bb.msg
16import multiprocessing
17import fcntl
18import importlib
19import importlib.machinery
20import importlib.util
21import itertools
22import subprocess
23import glob
24import fnmatch
25import traceback
26import errno
27import signal
28import collections
29import copy
30import ctypes
31import random
32import tempfile
33from subprocess import getstatusoutput
34from contextlib import contextmanager
35from ctypes import cdll
36
37logger = logging.getLogger("BitBake.Util")
38python_extensions = importlib.machinery.all_suffixes()
39
40
41def clean_context():
42    return {
43        "os": os,
44        "bb": bb,
45        "time": time,
46    }
47
48def get_context():
49    return _context
50
51
52def set_context(ctx):
53    _context = ctx
54
55# Context used in better_exec, eval
56_context = clean_context()
57
58class VersionStringException(Exception):
59    """Exception raised when an invalid version specification is found"""
60
61def explode_version(s):
62    r = []
63    alpha_regexp = re.compile(r'^([a-zA-Z]+)(.*)$')
64    numeric_regexp = re.compile(r'^(\d+)(.*)$')
65    while (s != ''):
66        if s[0] in string.digits:
67            m = numeric_regexp.match(s)
68            r.append((0, int(m.group(1))))
69            s = m.group(2)
70            continue
71        if s[0] in string.ascii_letters:
72            m = alpha_regexp.match(s)
73            r.append((1, m.group(1)))
74            s = m.group(2)
75            continue
76        if s[0] == '~':
77            r.append((-1, s[0]))
78        else:
79            r.append((2, s[0]))
80        s = s[1:]
81    return r
82
83def split_version(s):
84    """Split a version string into its constituent parts (PE, PV, PR)"""
85    s = s.strip(" <>=")
86    e = 0
87    if s.count(':'):
88        e = int(s.split(":")[0])
89        s = s.split(":")[1]
90    r = ""
91    if s.count('-'):
92        r = s.rsplit("-", 1)[1]
93        s = s.rsplit("-", 1)[0]
94    v = s
95    return (e, v, r)
96
97def vercmp_part(a, b):
98    va = explode_version(a)
99    vb = explode_version(b)
100    while True:
101        if va == []:
102            (oa, ca) = (0, None)
103        else:
104            (oa, ca) = va.pop(0)
105        if vb == []:
106            (ob, cb) = (0, None)
107        else:
108            (ob, cb) = vb.pop(0)
109        if (oa, ca) == (0, None) and (ob, cb) == (0, None):
110            return 0
111        if oa < ob:
112            return -1
113        elif oa > ob:
114            return 1
115        elif ca is None:
116            return -1
117        elif cb is None:
118            return 1
119        elif ca < cb:
120            return -1
121        elif ca > cb:
122            return 1
123
124def vercmp(ta, tb):
125    (ea, va, ra) = ta
126    (eb, vb, rb) = tb
127
128    r = int(ea or 0) - int(eb or 0)
129    if (r == 0):
130        r = vercmp_part(va, vb)
131    if (r == 0):
132        r = vercmp_part(ra, rb)
133    return r
134
135def vercmp_string(a, b):
136    """ Split version strings and compare them """
137    ta = split_version(a)
138    tb = split_version(b)
139    return vercmp(ta, tb)
140
141def vercmp_string_op(a, b, op):
142    """
143    Compare two versions and check if the specified comparison operator matches the result of the comparison.
144    This function is fairly liberal about what operators it will accept since there are a variety of styles
145    depending on the context.
146    """
147    res = vercmp_string(a, b)
148    if op in ('=', '=='):
149        return res == 0
150    elif op == '<=':
151        return res <= 0
152    elif op == '>=':
153        return res >= 0
154    elif op in ('>', '>>'):
155        return res > 0
156    elif op in ('<', '<<'):
157        return res < 0
158    elif op == '!=':
159        return res != 0
160    else:
161        raise VersionStringException('Unsupported comparison operator "%s"' % op)
162
163def explode_deps(s):
164    """
165    Take an RDEPENDS style string of format:
166    "DEPEND1 (optional version) DEPEND2 (optional version) ..."
167    and return a list of dependencies.
168    Version information is ignored.
169    """
170    r = []
171    l = s.split()
172    flag = False
173    for i in l:
174        if i[0] == '(':
175            flag = True
176            #j = []
177        if not flag:
178            r.append(i)
179        #else:
180        #    j.append(i)
181        if flag and i.endswith(')'):
182            flag = False
183            # Ignore version
184            #r[-1] += ' ' + ' '.join(j)
185    return r
186
187def explode_dep_versions2(s, *, sort=True):
188    """
189    Take an RDEPENDS style string of format:
190    "DEPEND1 (optional version) DEPEND2 (optional version) ..."
191    and return a dictionary of dependencies and versions.
192    """
193    r = collections.OrderedDict()
194    l = s.replace(",", "").split()
195    lastdep = None
196    lastcmp = ""
197    lastver = ""
198    incmp = False
199    inversion = False
200    for i in l:
201        if i[0] == '(':
202            incmp = True
203            i = i[1:].strip()
204            if not i:
205                continue
206
207        if incmp:
208            incmp = False
209            inversion = True
210            # This list is based on behavior and supported comparisons from deb, opkg and rpm.
211            #
212            # Even though =<, <<, ==, !=, =>, and >> may not be supported,
213            # we list each possibly valid item.
214            # The build system is responsible for validation of what it supports.
215            if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')):
216                lastcmp = i[0:2]
217                i = i[2:]
218            elif i.startswith(('<', '>', '=')):
219                lastcmp = i[0:1]
220                i = i[1:]
221            else:
222                # This is an unsupported case!
223                raise VersionStringException('Invalid version specification in "(%s" - invalid or missing operator' % i)
224                lastcmp = (i or "")
225                i = ""
226            i.strip()
227            if not i:
228                continue
229
230        if inversion:
231            if i.endswith(')'):
232                i = i[:-1] or ""
233                inversion = False
234                if lastver and i:
235                    lastver += " "
236            if i:
237                lastver += i
238                if lastdep not in r:
239                    r[lastdep] = []
240                r[lastdep].append(lastcmp + " " + lastver)
241            continue
242
243        #if not inversion:
244        lastdep = i
245        lastver = ""
246        lastcmp = ""
247        if not (i in r and r[i]):
248            r[lastdep] = []
249
250    if sort:
251        r = collections.OrderedDict(sorted(r.items(), key=lambda x: x[0]))
252    return r
253
254def explode_dep_versions(s):
255    """
256    Take an RDEPENDS style string of format:
257    "DEPEND1 (optional version) DEPEND2 (optional version) ..."
258    skip null value and items appeared in dependency string multiple times
259    and return a dictionary of dependencies and versions.
260    """
261    r = explode_dep_versions2(s)
262    for d in r:
263        if not r[d]:
264            r[d] = None
265            continue
266        if len(r[d]) > 1:
267            bb.warn("explode_dep_versions(): Item %s appeared in dependency string '%s' multiple times with different values.  explode_dep_versions cannot cope with this." % (d, s))
268        r[d] = r[d][0]
269    return r
270
271def join_deps(deps, commasep=True):
272    """
273    Take the result from explode_dep_versions and generate a dependency string
274    """
275    result = []
276    for dep in deps:
277        if deps[dep]:
278            if isinstance(deps[dep], list):
279                for v in deps[dep]:
280                    result.append(dep + " (" + v + ")")
281            else:
282                result.append(dep + " (" + deps[dep] + ")")
283        else:
284            result.append(dep)
285    if commasep:
286        return ", ".join(result)
287    else:
288        return " ".join(result)
289
290def _print_trace(body, line):
291    """
292    Print the Environment of a Text Body
293    """
294    error = []
295    # print the environment of the method
296    min_line = max(1, line-4)
297    max_line = min(line + 4, len(body))
298    for i in range(min_line, max_line + 1):
299        if line == i:
300            error.append(' *** %.4d:%s' % (i, body[i-1].rstrip()))
301        else:
302            error.append('     %.4d:%s' % (i, body[i-1].rstrip()))
303    return error
304
305def better_compile(text, file, realfile, mode = "exec", lineno = 0):
306    """
307    A better compile method. This method
308    will print the offending lines.
309    """
310    try:
311        cache = bb.methodpool.compile_cache(text)
312        if cache:
313            return cache
314        # We can't add to the linenumbers for compile, we can pad to the correct number of blank lines though
315        text2 = "\n" * int(lineno) + text
316        code = compile(text2, realfile, mode)
317        bb.methodpool.compile_cache_add(text, code)
318        return code
319    except Exception as e:
320        error = []
321        # split the text into lines again
322        body = text.split('\n')
323        error.append("Error in compiling python function in %s, line %s:\n" % (realfile, e.lineno))
324        if hasattr(e, "lineno"):
325            error.append("The code lines resulting in this error were:")
326            # e.lineno: line's position in reaflile
327            # lineno: function name's "position -1" in realfile
328            # e.lineno - lineno: line's relative position in function
329            error.extend(_print_trace(body, e.lineno - lineno))
330        else:
331            error.append("The function causing this error was:")
332            for line in body:
333                error.append(line)
334        error.append("%s: %s" % (e.__class__.__name__, str(e)))
335
336        logger.error("\n".join(error))
337
338        e = bb.BBHandledException(e)
339        raise e
340
341def _print_exception(t, value, tb, realfile, text, context):
342    error = []
343    try:
344        exception = traceback.format_exception_only(t, value)
345        error.append('Error executing a python function in %s:\n' % realfile)
346
347        # Strip 'us' from the stack (better_exec call) unless that was where the
348        # error came from
349        if tb.tb_next is not None:
350            tb = tb.tb_next
351
352        textarray = text.split('\n')
353
354        linefailed = tb.tb_lineno
355
356        tbextract = traceback.extract_tb(tb)
357        tbformat = traceback.format_list(tbextract)
358        error.append("The stack trace of python calls that resulted in this exception/failure was:")
359        error.append("File: '%s', lineno: %s, function: %s" % (tbextract[0][0], tbextract[0][1], tbextract[0][2]))
360        error.extend(_print_trace(textarray, linefailed))
361
362        # See if this is a function we constructed and has calls back into other functions in
363        # "text". If so, try and improve the context of the error by diving down the trace
364        level = 0
365        nexttb = tb.tb_next
366        while nexttb is not None and (level+1) < len(tbextract):
367            error.append("File: '%s', lineno: %s, function: %s" % (tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2]))
368            if tbextract[level][0] == tbextract[level+1][0] and tbextract[level+1][2] == tbextract[level][0]:
369                # The code was possibly in the string we compiled ourselves
370                error.extend(_print_trace(textarray, tbextract[level+1][1]))
371            elif tbextract[level+1][0].startswith("/"):
372                # The code looks like it might be in a file, try and load it
373                try:
374                    with open(tbextract[level+1][0], "r") as f:
375                        text = f.readlines()
376                        error.extend(_print_trace(text, tbextract[level+1][1]))
377                except:
378                    error.append(tbformat[level+1])
379            else:
380                error.append(tbformat[level+1])
381            nexttb = tb.tb_next
382            level = level + 1
383
384        error.append("Exception: %s" % ''.join(exception))
385
386        # If the exception is from spawning a task, let's be helpful and display
387        # the output (which hopefully includes stderr).
388        if isinstance(value, subprocess.CalledProcessError) and value.output:
389            error.append("Subprocess output:")
390            error.append(value.output.decode("utf-8", errors="ignore"))
391    finally:
392        logger.error("\n".join(error))
393
394def better_exec(code, context, text = None, realfile = "<code>", pythonexception=False):
395    """
396    Similiar to better_compile, better_exec will
397    print the lines that are responsible for the
398    error.
399    """
400    import bb.parse
401    if not text:
402        text = code
403    if not hasattr(code, "co_filename"):
404        code = better_compile(code, realfile, realfile)
405    try:
406        exec(code, get_context(), context)
407    except (bb.BBHandledException, bb.parse.SkipRecipe, bb.data_smart.ExpansionError, bb.process.ExecutionError):
408        # Error already shown so passthrough, no need for traceback
409        raise
410    except Exception as e:
411        if pythonexception:
412            raise
413        (t, value, tb) = sys.exc_info()
414        try:
415            _print_exception(t, value, tb, realfile, text, context)
416        except Exception as e2:
417            logger.error("Exception handler error: %s" % str(e2))
418
419        e = bb.BBHandledException(e)
420        raise e
421
422def simple_exec(code, context):
423    exec(code, get_context(), context)
424
425def better_eval(source, locals, extraglobals = None):
426    ctx = get_context()
427    if extraglobals:
428        ctx = copy.copy(ctx)
429        for g in extraglobals:
430            ctx[g] = extraglobals[g]
431    return eval(source, ctx, locals)
432
433@contextmanager
434def fileslocked(files, *args, **kwargs):
435    """Context manager for locking and unlocking file locks."""
436    locks = []
437    if files:
438        for lockfile in files:
439            l = bb.utils.lockfile(lockfile, *args, **kwargs)
440            if l is not None:
441                locks.append(l)
442
443    try:
444        yield
445    finally:
446        for lock in locks:
447            bb.utils.unlockfile(lock)
448
449def lockfile(name, shared=False, retry=True, block=False):
450    """
451    Use the specified file as a lock file, return when the lock has
452    been acquired. Returns a variable to pass to unlockfile().
453    Parameters:
454        retry: True to re-try locking if it fails, False otherwise
455        block: True to block until the lock succeeds, False otherwise
456    The retry and block parameters are kind of equivalent unless you
457    consider the possibility of sending a signal to the process to break
458    out - at which point you want block=True rather than retry=True.
459    """
460    basename = os.path.basename(name)
461    if len(basename) > 255:
462        root, ext = os.path.splitext(basename)
463        basename = root[:255 - len(ext)] + ext
464
465    dirname = os.path.dirname(name)
466    mkdirhier(dirname)
467
468    name = os.path.join(dirname, basename)
469
470    if not os.access(dirname, os.W_OK):
471        logger.error("Unable to acquire lock '%s', directory is not writable",
472                     name)
473        sys.exit(1)
474
475    op = fcntl.LOCK_EX
476    if shared:
477        op = fcntl.LOCK_SH
478    if not retry and not block:
479        op = op | fcntl.LOCK_NB
480
481    while True:
482        # If we leave the lockfiles lying around there is no problem
483        # but we should clean up after ourselves. This gives potential
484        # for races though. To work around this, when we acquire the lock
485        # we check the file we locked was still the lock file on disk.
486        # by comparing inode numbers. If they don't match or the lockfile
487        # no longer exists, we start again.
488
489        # This implementation is unfair since the last person to request the
490        # lock is the most likely to win it.
491
492        try:
493            lf = open(name, 'a+')
494            fileno = lf.fileno()
495            fcntl.flock(fileno, op)
496            statinfo = os.fstat(fileno)
497            if os.path.exists(lf.name):
498                statinfo2 = os.stat(lf.name)
499                if statinfo.st_ino == statinfo2.st_ino:
500                    return lf
501            lf.close()
502        except OSError as e:
503            if e.errno == errno.EACCES or e.errno == errno.ENAMETOOLONG:
504                logger.error("Unable to acquire lock '%s', %s",
505                             e.strerror, name)
506                sys.exit(1)
507            try:
508                lf.close()
509            except Exception:
510                pass
511            pass
512        if not retry:
513            return None
514
515def unlockfile(lf):
516    """
517    Unlock a file locked using lockfile()
518    """
519    try:
520        # If we had a shared lock, we need to promote to exclusive before
521        # removing the lockfile. Attempt this, ignore failures.
522        fcntl.flock(lf.fileno(), fcntl.LOCK_EX|fcntl.LOCK_NB)
523        os.unlink(lf.name)
524    except (IOError, OSError):
525        pass
526    fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
527    lf.close()
528
529def _hasher(method, filename):
530    import mmap
531
532    with open(filename, "rb") as f:
533        try:
534            with mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ) as mm:
535                for chunk in iter(lambda: mm.read(8192), b''):
536                    method.update(chunk)
537        except ValueError:
538            # You can't mmap() an empty file so silence this exception
539            pass
540    return method.hexdigest()
541
542
543def md5_file(filename):
544    """
545    Return the hex string representation of the MD5 checksum of filename.
546    """
547    import hashlib
548    try:
549        sig = hashlib.new('MD5', usedforsecurity=False)
550    except TypeError:
551        # Some configurations don't appear to support two arguments
552        sig = hashlib.new('MD5')
553    return _hasher(sig, filename)
554
555def sha256_file(filename):
556    """
557    Return the hex string representation of the 256-bit SHA checksum of
558    filename.
559    """
560    import hashlib
561    return _hasher(hashlib.sha256(), filename)
562
563def sha1_file(filename):
564    """
565    Return the hex string representation of the SHA1 checksum of the filename
566    """
567    import hashlib
568    return _hasher(hashlib.sha1(), filename)
569
570def sha384_file(filename):
571    """
572    Return the hex string representation of the SHA384 checksum of the filename
573    """
574    import hashlib
575    return _hasher(hashlib.sha384(), filename)
576
577def sha512_file(filename):
578    """
579    Return the hex string representation of the SHA512 checksum of the filename
580    """
581    import hashlib
582    return _hasher(hashlib.sha512(), filename)
583
584def preserved_envvars_exported():
585    """Variables which are taken from the environment and placed in and exported
586    from the metadata"""
587    return [
588        'BB_TASKHASH',
589        'HOME',
590        'LOGNAME',
591        'PATH',
592        'PWD',
593        'SHELL',
594        'USER',
595        'LC_ALL',
596        'BBSERVER',
597    ]
598
599def preserved_envvars():
600    """Variables which are taken from the environment and placed in the metadata"""
601    v = [
602        'BBPATH',
603        'BB_PRESERVE_ENV',
604        'BB_ENV_PASSTHROUGH',
605        'BB_ENV_PASSTHROUGH_ADDITIONS',
606    ]
607    return v + preserved_envvars_exported()
608
609def filter_environment(good_vars):
610    """
611    Create a pristine environment for bitbake. This will remove variables that
612    are not known and may influence the build in a negative way.
613    """
614
615    removed_vars = {}
616    for key in list(os.environ):
617        if key in good_vars:
618            continue
619
620        removed_vars[key] = os.environ[key]
621        del os.environ[key]
622
623    # If we spawn a python process, we need to have a UTF-8 locale, else python's file
624    # access methods will use ascii. You can't change that mode once the interpreter is
625    # started so we have to ensure a locale is set. Ideally we'd use C.UTF-8 but not all
626    # distros support that and we need to set something.
627    os.environ["LC_ALL"] = "en_US.UTF-8"
628
629    if removed_vars:
630        logger.debug("Removed the following variables from the environment: %s", ", ".join(removed_vars.keys()))
631
632    return removed_vars
633
634def approved_variables():
635    """
636    Determine and return the list of variables which are approved
637    to remain in the environment.
638    """
639    if 'BB_PRESERVE_ENV' in os.environ:
640        return os.environ.keys()
641    approved = []
642    if 'BB_ENV_PASSTHROUGH' in os.environ:
643        approved = os.environ['BB_ENV_PASSTHROUGH'].split()
644        approved.extend(['BB_ENV_PASSTHROUGH'])
645    else:
646        approved = preserved_envvars()
647    if 'BB_ENV_PASSTHROUGH_ADDITIONS' in os.environ:
648        approved.extend(os.environ['BB_ENV_PASSTHROUGH_ADDITIONS'].split())
649        if 'BB_ENV_PASSTHROUGH_ADDITIONS' not in approved:
650            approved.extend(['BB_ENV_PASSTHROUGH_ADDITIONS'])
651    return approved
652
653def clean_environment():
654    """
655    Clean up any spurious environment variables. This will remove any
656    variables the user hasn't chosen to preserve.
657    """
658    if 'BB_PRESERVE_ENV' not in os.environ:
659        good_vars = approved_variables()
660        return filter_environment(good_vars)
661
662    return {}
663
664def empty_environment():
665    """
666    Remove all variables from the environment.
667    """
668    for s in list(os.environ.keys()):
669        os.unsetenv(s)
670        del os.environ[s]
671
672def build_environment(d):
673    """
674    Build an environment from all exported variables.
675    """
676    import bb.data
677    for var in bb.data.keys(d):
678        export = d.getVarFlag(var, "export", False)
679        if export:
680            os.environ[var] = d.getVar(var) or ""
681
682def _check_unsafe_delete_path(path):
683    """
684    Basic safeguard against recursively deleting something we shouldn't. If it returns True,
685    the caller should raise an exception with an appropriate message.
686    NOTE: This is NOT meant to be a security mechanism - just a guard against silly mistakes
687    with potentially disastrous results.
688    """
689    extra = ''
690    # HOME might not be /home/something, so in case we can get it, check against it
691    homedir = os.environ.get('HOME', '')
692    if homedir:
693        extra = '|%s' % homedir
694    if re.match('(/|//|/home|/home/[^/]*%s)$' % extra, os.path.abspath(path)):
695        return True
696    return False
697
698def remove(path, recurse=False, ionice=False):
699    """Equivalent to rm -f or rm -rf"""
700    if not path:
701        return
702    if recurse:
703        for name in glob.glob(path):
704            if _check_unsafe_delete_path(name):
705                raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % name)
706        # shutil.rmtree(name) would be ideal but its too slow
707        cmd = []
708        if ionice:
709            cmd = ['ionice', '-c', '3']
710        subprocess.check_call(cmd + ['rm', '-rf'] + glob.glob(path))
711        return
712    for name in glob.glob(path):
713        try:
714            os.unlink(name)
715        except OSError as exc:
716            if exc.errno != errno.ENOENT:
717                raise
718
719def prunedir(topdir, ionice=False):
720    """ Delete everything reachable from the directory named in 'topdir'. """
721    # CAUTION:  This is dangerous!
722    if _check_unsafe_delete_path(topdir):
723        raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir)
724    remove(topdir, recurse=True, ionice=ionice)
725
726#
727# Could also use return re.compile("(%s)" % "|".join(map(re.escape, suffixes))).sub(lambda mo: "", var)
728# but thats possibly insane and suffixes is probably going to be small
729#
730def prune_suffix(var, suffixes, d):
731    """
732    See if var ends with any of the suffixes listed and
733    remove it if found
734    """
735    for suffix in suffixes:
736        if suffix and var.endswith(suffix):
737            return var[:-len(suffix)]
738    return var
739
740def mkdirhier(directory):
741    """Create a directory like 'mkdir -p', but does not complain if
742    directory already exists like os.makedirs
743    """
744
745    try:
746        os.makedirs(directory)
747    except OSError as e:
748        if e.errno != errno.EEXIST or not os.path.isdir(directory):
749            raise e
750
751def movefile(src, dest, newmtime = None, sstat = None):
752    """Moves a file from src to dest, preserving all permissions and
753    attributes; mtime will be preserved even when moving across
754    filesystems.  Returns true on success and false on failure. Move is
755    atomic.
756    """
757
758    #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
759    try:
760        if not sstat:
761            sstat = os.lstat(src)
762    except Exception as e:
763        logger.warning("movefile: Stating source file failed...", e)
764        return None
765
766    destexists = 1
767    try:
768        dstat = os.lstat(dest)
769    except:
770        dstat = os.lstat(os.path.dirname(dest))
771        destexists = 0
772
773    if destexists:
774        if stat.S_ISLNK(dstat[stat.ST_MODE]):
775            try:
776                os.unlink(dest)
777                destexists = 0
778            except Exception as e:
779                pass
780
781    if stat.S_ISLNK(sstat[stat.ST_MODE]):
782        try:
783            target = os.readlink(src)
784            if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
785                os.unlink(dest)
786            os.symlink(target, dest)
787            #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
788            os.unlink(src)
789            return os.lstat(dest)
790        except Exception as e:
791            logger.warning("movefile: failed to properly create symlink:", dest, "->", target, e)
792            return None
793
794    renamefailed = 1
795    # os.rename needs to know the dest path ending with file name
796    # so append the file name to a path only if it's a dir specified
797    srcfname = os.path.basename(src)
798    destpath = os.path.join(dest, srcfname) if os.path.isdir(dest) \
799                else dest
800
801    if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]:
802        try:
803            bb.utils.rename(src, destpath)
804            renamefailed = 0
805        except Exception as e:
806            if e.errno != errno.EXDEV:
807                # Some random error.
808                logger.warning("movefile: Failed to move", src, "to", dest, e)
809                return None
810            # Invalid cross-device-link 'bind' mounted or actually Cross-Device
811
812    if renamefailed:
813        didcopy = 0
814        if stat.S_ISREG(sstat[stat.ST_MODE]):
815            try: # For safety copy then move it over.
816                shutil.copyfile(src, destpath + "#new")
817                bb.utils.rename(destpath + "#new", destpath)
818                didcopy = 1
819            except Exception as e:
820                logger.warning('movefile: copy', src, '->', dest, 'failed.', e)
821                return None
822        else:
823            #we don't yet handle special, so we need to fall back to /bin/mv
824            a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'")
825            if a[0] != 0:
826                logger.warning("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a)
827                return None # failure
828        try:
829            if didcopy:
830                os.lchown(destpath, sstat[stat.ST_UID], sstat[stat.ST_GID])
831                os.chmod(destpath, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
832                os.unlink(src)
833        except Exception as e:
834            logger.warning("movefile: Failed to chown/chmod/unlink", dest, e)
835            return None
836
837    if newmtime:
838        os.utime(destpath, (newmtime, newmtime))
839    else:
840        os.utime(destpath, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
841        newmtime = sstat[stat.ST_MTIME]
842    return newmtime
843
844def copyfile(src, dest, newmtime = None, sstat = None):
845    """
846    Copies a file from src to dest, preserving all permissions and
847    attributes; mtime will be preserved even when moving across
848    filesystems.  Returns true on success and false on failure.
849    """
850    #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
851    try:
852        if not sstat:
853            sstat = os.lstat(src)
854    except Exception as e:
855        logger.warning("copyfile: stat of %s failed (%s)" % (src, e))
856        return False
857
858    destexists = 1
859    try:
860        dstat = os.lstat(dest)
861    except:
862        dstat = os.lstat(os.path.dirname(dest))
863        destexists = 0
864
865    if destexists:
866        if stat.S_ISLNK(dstat[stat.ST_MODE]):
867            try:
868                os.unlink(dest)
869                destexists = 0
870            except Exception as e:
871                pass
872
873    if stat.S_ISLNK(sstat[stat.ST_MODE]):
874        try:
875            target = os.readlink(src)
876            if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
877                os.unlink(dest)
878            os.symlink(target, dest)
879            os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
880            return os.lstat(dest)
881        except Exception as e:
882            logger.warning("copyfile: failed to create symlink %s to %s (%s)" % (dest, target, e))
883            return False
884
885    if stat.S_ISREG(sstat[stat.ST_MODE]):
886        try:
887            srcchown = False
888            if not os.access(src, os.R_OK):
889                # Make sure we can read it
890                srcchown = True
891                os.chmod(src, sstat[stat.ST_MODE] | stat.S_IRUSR)
892
893            # For safety copy then move it over.
894            shutil.copyfile(src, dest + "#new")
895            bb.utils.rename(dest + "#new", dest)
896        except Exception as e:
897            logger.warning("copyfile: copy %s to %s failed (%s)" % (src, dest, e))
898            return False
899        finally:
900            if srcchown:
901                os.chmod(src, sstat[stat.ST_MODE])
902                os.utime(src, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
903
904    else:
905        #we don't yet handle special, so we need to fall back to /bin/mv
906        a = getstatusoutput("/bin/cp -f " + "'" + src + "' '" + dest + "'")
907        if a[0] != 0:
908            logger.warning("copyfile: failed to copy special file %s to %s (%s)" % (src, dest, a))
909            return False # failure
910    try:
911        os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
912        os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
913    except Exception as e:
914        logger.warning("copyfile: failed to chown/chmod %s (%s)" % (dest, e))
915        return False
916
917    if newmtime:
918        os.utime(dest, (newmtime, newmtime))
919    else:
920        os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
921        newmtime = sstat[stat.ST_MTIME]
922    return newmtime
923
924def break_hardlinks(src, sstat = None):
925    """
926    Ensures src is the only hardlink to this file.  Other hardlinks,
927    if any, are not affected (other than in their st_nlink value, of
928    course).  Returns true on success and false on failure.
929
930    """
931    try:
932        if not sstat:
933            sstat = os.lstat(src)
934    except Exception as e:
935        logger.warning("break_hardlinks: stat of %s failed (%s)" % (src, e))
936        return False
937    if sstat[stat.ST_NLINK] == 1:
938        return True
939    return copyfile(src, src, sstat=sstat)
940
941def which(path, item, direction = 0, history = False, executable=False):
942    """
943    Locate `item` in the list of paths `path` (colon separated string like $PATH).
944    If `direction` is non-zero then the list is reversed.
945    If `history` is True then the list of candidates also returned as result,history.
946    If `executable` is True then the candidate has to be an executable file,
947    otherwise the candidate simply has to exist.
948    """
949
950    if executable:
951        is_candidate = lambda p: os.path.isfile(p) and os.access(p, os.X_OK)
952    else:
953        is_candidate = lambda p: os.path.exists(p)
954
955    hist = []
956    paths = (path or "").split(':')
957    if direction != 0:
958        paths.reverse()
959
960    for p in paths:
961        next = os.path.join(p, item)
962        hist.append(next)
963        if is_candidate(next):
964            if not os.path.isabs(next):
965                next = os.path.abspath(next)
966            if history:
967                return next, hist
968            return next
969
970    if history:
971        return "", hist
972    return ""
973
974@contextmanager
975def umask(new_mask):
976    """
977    Context manager to set the umask to a specific mask, and restore it afterwards.
978    """
979    current_mask = os.umask(new_mask)
980    try:
981        yield
982    finally:
983        os.umask(current_mask)
984
985def to_boolean(string, default=None):
986    """
987    Check input string and return boolean value True/False/None
988    depending upon the checks
989    """
990    if not string:
991        return default
992
993    if isinstance(string, int):
994        return string != 0
995
996    normalized = string.lower()
997    if normalized in ("y", "yes", "1", "true"):
998        return True
999    elif normalized in ("n", "no", "0", "false"):
1000        return False
1001    else:
1002        raise ValueError("Invalid value for to_boolean: %s" % string)
1003
1004def contains(variable, checkvalues, truevalue, falsevalue, d):
1005    """Check if a variable contains all the values specified.
1006
1007    Arguments:
1008
1009    variable -- the variable name. This will be fetched and expanded (using
1010    d.getVar(variable)) and then split into a set().
1011
1012    checkvalues -- if this is a string it is split on whitespace into a set(),
1013    otherwise coerced directly into a set().
1014
1015    truevalue -- the value to return if checkvalues is a subset of variable.
1016
1017    falsevalue -- the value to return if variable is empty or if checkvalues is
1018    not a subset of variable.
1019
1020    d -- the data store.
1021    """
1022
1023    val = d.getVar(variable)
1024    if not val:
1025        return falsevalue
1026    val = set(val.split())
1027    if isinstance(checkvalues, str):
1028        checkvalues = set(checkvalues.split())
1029    else:
1030        checkvalues = set(checkvalues)
1031    if checkvalues.issubset(val):
1032        return truevalue
1033    return falsevalue
1034
1035def contains_any(variable, checkvalues, truevalue, falsevalue, d):
1036    """Check if a variable contains any values specified.
1037
1038    Arguments:
1039
1040    variable -- the variable name. This will be fetched and expanded (using
1041    d.getVar(variable)) and then split into a set().
1042
1043    checkvalues -- if this is a string it is split on whitespace into a set(),
1044    otherwise coerced directly into a set().
1045
1046    truevalue -- the value to return if checkvalues is a subset of variable.
1047
1048    falsevalue -- the value to return if variable is empty or if checkvalues is
1049    not a subset of variable.
1050
1051    d -- the data store.
1052    """
1053    val = d.getVar(variable)
1054    if not val:
1055        return falsevalue
1056    val = set(val.split())
1057    if isinstance(checkvalues, str):
1058        checkvalues = set(checkvalues.split())
1059    else:
1060        checkvalues = set(checkvalues)
1061    if checkvalues & val:
1062        return truevalue
1063    return falsevalue
1064
1065def filter(variable, checkvalues, d):
1066    """Return all words in the variable that are present in the checkvalues.
1067
1068    Arguments:
1069
1070    variable -- the variable name. This will be fetched and expanded (using
1071    d.getVar(variable)) and then split into a set().
1072
1073    checkvalues -- if this is a string it is split on whitespace into a set(),
1074    otherwise coerced directly into a set().
1075
1076    d -- the data store.
1077    """
1078
1079    val = d.getVar(variable)
1080    if not val:
1081        return ''
1082    val = set(val.split())
1083    if isinstance(checkvalues, str):
1084        checkvalues = set(checkvalues.split())
1085    else:
1086        checkvalues = set(checkvalues)
1087    return ' '.join(sorted(checkvalues & val))
1088
1089
1090def get_referenced_vars(start_expr, d):
1091    """
1092    :return: names of vars referenced in start_expr (recursively), in quasi-BFS order (variables within the same level
1093    are ordered arbitrarily)
1094    """
1095
1096    seen = set()
1097    ret = []
1098
1099    # The first entry in the queue is the unexpanded start expression
1100    queue = collections.deque([start_expr])
1101    # Subsequent entries will be variable names, so we need to track whether or not entry requires getVar
1102    is_first = True
1103
1104    empty_data = bb.data.init()
1105    while queue:
1106        entry = queue.popleft()
1107        if is_first:
1108            # Entry is the start expression - no expansion needed
1109            is_first = False
1110            expression = entry
1111        else:
1112            # This is a variable name - need to get the value
1113            expression = d.getVar(entry, False)
1114            ret.append(entry)
1115
1116        # expandWithRefs is how we actually get the referenced variables in the expression. We call it using an empty
1117        # data store because we only want the variables directly used in the expression. It returns a set, which is what
1118        # dooms us to only ever be "quasi-BFS" rather than full BFS.
1119        new_vars = empty_data.expandWithRefs(expression, None).references - set(seen)
1120
1121        queue.extend(new_vars)
1122        seen.update(new_vars)
1123    return ret
1124
1125
1126def cpu_count():
1127    return multiprocessing.cpu_count()
1128
1129def nonblockingfd(fd):
1130    fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
1131
1132def process_profilelog(fn, pout = None):
1133    # Either call with a list of filenames and set pout or a filename and optionally pout.
1134    if not pout:
1135        pout = fn + '.processed'
1136
1137    with open(pout, 'w') as pout:
1138        import pstats
1139        if isinstance(fn, list):
1140            p = pstats.Stats(*fn, stream=pout)
1141        else:
1142            p = pstats.Stats(fn, stream=pout)
1143        p.sort_stats('time')
1144        p.print_stats()
1145        p.print_callers()
1146        p.sort_stats('cumulative')
1147        p.print_stats()
1148
1149        pout.flush()
1150
1151#
1152# Was present to work around multiprocessing pool bugs in python < 2.7.3
1153#
1154def multiprocessingpool(*args, **kwargs):
1155
1156    import multiprocessing.pool
1157    #import multiprocessing.util
1158    #multiprocessing.util.log_to_stderr(10)
1159    # Deal with a multiprocessing bug where signals to the processes would be delayed until the work
1160    # completes. Putting in a timeout means the signals (like SIGINT/SIGTERM) get processed.
1161    def wrapper(func):
1162        def wrap(self, timeout=None):
1163            return func(self, timeout=timeout if timeout is not None else 1e100)
1164        return wrap
1165    multiprocessing.pool.IMapIterator.next = wrapper(multiprocessing.pool.IMapIterator.next)
1166
1167    return multiprocessing.Pool(*args, **kwargs)
1168
1169def exec_flat_python_func(func, *args, **kwargs):
1170    """Execute a flat python function (defined with def funcname(args):...)"""
1171    # Prepare a small piece of python code which calls the requested function
1172    # To do this we need to prepare two things - a set of variables we can use to pass
1173    # the values of arguments into the calling function, and the list of arguments for
1174    # the function being called
1175    context = {}
1176    funcargs = []
1177    # Handle unnamed arguments
1178    aidx = 1
1179    for arg in args:
1180        argname = 'arg_%s' % aidx
1181        context[argname] = arg
1182        funcargs.append(argname)
1183        aidx += 1
1184    # Handle keyword arguments
1185    context.update(kwargs)
1186    funcargs.extend(['%s=%s' % (arg, arg) for arg in kwargs.keys()])
1187    code = 'retval = %s(%s)' % (func, ', '.join(funcargs))
1188    comp = bb.utils.better_compile(code, '<string>', '<string>')
1189    bb.utils.better_exec(comp, context, code, '<string>')
1190    return context['retval']
1191
1192def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
1193    """Edit lines from a recipe or config file and modify one or more
1194    specified variable values set in the file using a specified callback
1195    function. Lines are expected to have trailing newlines.
1196    Parameters:
1197        meta_lines: lines from the file; can be a list or an iterable
1198            (e.g. file pointer)
1199        variables: a list of variable names to look for. Functions
1200            may also be specified, but must be specified with '()' at
1201            the end of the name. Note that the function doesn't have
1202            any intrinsic understanding of :append, :prepend, :remove,
1203            or overrides, so these are considered as part of the name.
1204            These values go into a regular expression, so regular
1205            expression syntax is allowed.
1206        varfunc: callback function called for every variable matching
1207            one of the entries in the variables parameter. The function
1208            should take four arguments:
1209                varname: name of variable matched
1210                origvalue: current value in file
1211                op: the operator (e.g. '+=')
1212                newlines: list of lines up to this point. You can use
1213                    this to prepend lines before this variable setting
1214                    if you wish.
1215            and should return a four-element tuple:
1216                newvalue: new value to substitute in, or None to drop
1217                    the variable setting entirely. (If the removal
1218                    results in two consecutive blank lines, one of the
1219                    blank lines will also be dropped).
1220                newop: the operator to use - if you specify None here,
1221                    the original operation will be used.
1222                indent: number of spaces to indent multi-line entries,
1223                    or -1 to indent up to the level of the assignment
1224                    and opening quote, or a string to use as the indent.
1225                minbreak: True to allow the first element of a
1226                    multi-line value to continue on the same line as
1227                    the assignment, False to indent before the first
1228                    element.
1229            To clarify, if you wish not to change the value, then you
1230            would return like this: return origvalue, None, 0, True
1231        match_overrides: True to match items with _overrides on the end,
1232            False otherwise
1233    Returns a tuple:
1234        updated:
1235            True if changes were made, False otherwise.
1236        newlines:
1237            Lines after processing
1238    """
1239
1240    var_res = {}
1241    if match_overrides:
1242        override_re = r'(_[a-zA-Z0-9-_$(){}]+)?'
1243    else:
1244        override_re = ''
1245    for var in variables:
1246        if var.endswith('()'):
1247            var_res[var] = re.compile(r'^(%s%s)[ \\t]*\([ \\t]*\)[ \\t]*{' % (var[:-2].rstrip(), override_re))
1248        else:
1249            var_res[var] = re.compile(r'^(%s%s)[ \\t]*[?+:.]*=[+.]*[ \\t]*(["\'])' % (var, override_re))
1250
1251    updated = False
1252    varset_start = ''
1253    varlines = []
1254    newlines = []
1255    in_var = None
1256    full_value = ''
1257    var_end = ''
1258
1259    def handle_var_end():
1260        prerun_newlines = newlines[:]
1261        op = varset_start[len(in_var):].strip()
1262        (newvalue, newop, indent, minbreak) = varfunc(in_var, full_value, op, newlines)
1263        changed = (prerun_newlines != newlines)
1264
1265        if newvalue is None:
1266            # Drop the value
1267            return True
1268        elif newvalue != full_value or (newop not in [None, op]):
1269            if newop not in [None, op]:
1270                # Callback changed the operator
1271                varset_new = "%s %s" % (in_var, newop)
1272            else:
1273                varset_new = varset_start
1274
1275            if isinstance(indent, int):
1276                if indent == -1:
1277                    indentspc = ' ' * (len(varset_new) + 2)
1278                else:
1279                    indentspc = ' ' * indent
1280            else:
1281                indentspc = indent
1282            if in_var.endswith('()'):
1283                # A function definition
1284                if isinstance(newvalue, list):
1285                    newlines.append('%s {\n%s%s\n}\n' % (varset_new, indentspc, ('\n%s' % indentspc).join(newvalue)))
1286                else:
1287                    if not newvalue.startswith('\n'):
1288                        newvalue = '\n' + newvalue
1289                    if not newvalue.endswith('\n'):
1290                        newvalue = newvalue + '\n'
1291                    newlines.append('%s {%s}\n' % (varset_new, newvalue))
1292            else:
1293                # Normal variable
1294                if isinstance(newvalue, list):
1295                    if not newvalue:
1296                        # Empty list -> empty string
1297                        newlines.append('%s ""\n' % varset_new)
1298                    elif minbreak:
1299                        # First item on first line
1300                        if len(newvalue) == 1:
1301                            newlines.append('%s "%s"\n' % (varset_new, newvalue[0]))
1302                        else:
1303                            newlines.append('%s "%s \\\n' % (varset_new, newvalue[0]))
1304                            for item in newvalue[1:]:
1305                                newlines.append('%s%s \\\n' % (indentspc, item))
1306                            newlines.append('%s"\n' % indentspc)
1307                    else:
1308                        # No item on first line
1309                        newlines.append('%s " \\\n' % varset_new)
1310                        for item in newvalue:
1311                            newlines.append('%s%s \\\n' % (indentspc, item))
1312                        newlines.append('%s"\n' % indentspc)
1313                else:
1314                    newlines.append('%s "%s"\n' % (varset_new, newvalue))
1315            return True
1316        else:
1317            # Put the old lines back where they were
1318            newlines.extend(varlines)
1319            # If newlines was touched by the function, we'll need to return True
1320            return changed
1321
1322    checkspc = False
1323
1324    for line in meta_lines:
1325        if in_var:
1326            value = line.rstrip()
1327            varlines.append(line)
1328            if in_var.endswith('()'):
1329                full_value += '\n' + value
1330            else:
1331                full_value += value[:-1]
1332            if value.endswith(var_end):
1333                if in_var.endswith('()'):
1334                    if full_value.count('{') - full_value.count('}') >= 0:
1335                        continue
1336                    full_value = full_value[:-1]
1337                if handle_var_end():
1338                    updated = True
1339                    checkspc = True
1340                in_var = None
1341        else:
1342            skip = False
1343            for (varname, var_re) in var_res.items():
1344                res = var_re.match(line)
1345                if res:
1346                    isfunc = varname.endswith('()')
1347                    if isfunc:
1348                        splitvalue = line.split('{', 1)
1349                        var_end = '}'
1350                    else:
1351                        var_end = res.groups()[-1]
1352                        splitvalue = line.split(var_end, 1)
1353                    varset_start = splitvalue[0].rstrip()
1354                    value = splitvalue[1].rstrip()
1355                    if not isfunc and value.endswith('\\'):
1356                        value = value[:-1]
1357                    full_value = value
1358                    varlines = [line]
1359                    in_var = res.group(1)
1360                    if isfunc:
1361                        in_var += '()'
1362                    if value.endswith(var_end):
1363                        full_value = full_value[:-1]
1364                        if handle_var_end():
1365                            updated = True
1366                            checkspc = True
1367                        in_var = None
1368                    skip = True
1369                    break
1370            if not skip:
1371                if checkspc:
1372                    checkspc = False
1373                    if newlines and newlines[-1] == '\n' and line == '\n':
1374                        # Squash blank line if there are two consecutive blanks after a removal
1375                        continue
1376                newlines.append(line)
1377    return (updated, newlines)
1378
1379
1380def edit_metadata_file(meta_file, variables, varfunc):
1381    """Edit a recipe or config file and modify one or more specified
1382    variable values set in the file using a specified callback function.
1383    The file is only written to if the value(s) actually change.
1384    This is basically the file version of edit_metadata(), see that
1385    function's description for parameter/usage information.
1386    Returns True if the file was written to, False otherwise.
1387    """
1388    with open(meta_file, 'r') as f:
1389        (updated, newlines) = edit_metadata(f, variables, varfunc)
1390    if updated:
1391        with open(meta_file, 'w') as f:
1392            f.writelines(newlines)
1393    return updated
1394
1395
1396def edit_bblayers_conf(bblayers_conf, add, remove, edit_cb=None):
1397    """Edit bblayers.conf, adding and/or removing layers
1398    Parameters:
1399        bblayers_conf: path to bblayers.conf file to edit
1400        add: layer path (or list of layer paths) to add; None or empty
1401            list to add nothing
1402        remove: layer path (or list of layer paths) to remove; None or
1403            empty list to remove nothing
1404        edit_cb: optional callback function that will be called after
1405            processing adds/removes once per existing entry.
1406    Returns a tuple:
1407        notadded: list of layers specified to be added but weren't
1408            (because they were already in the list)
1409        notremoved: list of layers that were specified to be removed
1410            but weren't (because they weren't in the list)
1411    """
1412
1413    import fnmatch
1414
1415    def remove_trailing_sep(pth):
1416        if pth and pth[-1] == os.sep:
1417            pth = pth[:-1]
1418        return pth
1419
1420    approved = bb.utils.approved_variables()
1421    def canonicalise_path(pth):
1422        pth = remove_trailing_sep(pth)
1423        if 'HOME' in approved and '~' in pth:
1424            pth = os.path.expanduser(pth)
1425        return pth
1426
1427    def layerlist_param(value):
1428        if not value:
1429            return []
1430        elif isinstance(value, list):
1431            return [remove_trailing_sep(x) for x in value]
1432        else:
1433            return [remove_trailing_sep(value)]
1434
1435    addlayers = layerlist_param(add)
1436    removelayers = layerlist_param(remove)
1437
1438    # Need to use a list here because we can't set non-local variables from a callback in python 2.x
1439    bblayercalls = []
1440    removed = []
1441    plusequals = False
1442    orig_bblayers = []
1443
1444    def handle_bblayers_firstpass(varname, origvalue, op, newlines):
1445        bblayercalls.append(op)
1446        if op == '=':
1447            del orig_bblayers[:]
1448        orig_bblayers.extend([canonicalise_path(x) for x in origvalue.split()])
1449        return (origvalue, None, 2, False)
1450
1451    def handle_bblayers(varname, origvalue, op, newlines):
1452        updated = False
1453        bblayers = [remove_trailing_sep(x) for x in origvalue.split()]
1454        if removelayers:
1455            for removelayer in removelayers:
1456                for layer in bblayers:
1457                    if fnmatch.fnmatch(canonicalise_path(layer), canonicalise_path(removelayer)):
1458                        updated = True
1459                        bblayers.remove(layer)
1460                        removed.append(removelayer)
1461                        break
1462        if addlayers and not plusequals:
1463            for addlayer in addlayers:
1464                if addlayer not in bblayers:
1465                    updated = True
1466                    bblayers.append(addlayer)
1467            del addlayers[:]
1468
1469        if edit_cb:
1470            newlist = []
1471            for layer in bblayers:
1472                res = edit_cb(layer, canonicalise_path(layer))
1473                if res != layer:
1474                    newlist.append(res)
1475                    updated = True
1476                else:
1477                    newlist.append(layer)
1478            bblayers = newlist
1479
1480        if updated:
1481            if op == '+=' and not bblayers:
1482                bblayers = None
1483            return (bblayers, None, 2, False)
1484        else:
1485            return (origvalue, None, 2, False)
1486
1487    with open(bblayers_conf, 'r') as f:
1488        (_, newlines) = edit_metadata(f, ['BBLAYERS'], handle_bblayers_firstpass)
1489
1490    if not bblayercalls:
1491        raise Exception('Unable to find BBLAYERS in %s' % bblayers_conf)
1492
1493    # Try to do the "smart" thing depending on how the user has laid out
1494    # their bblayers.conf file
1495    if bblayercalls.count('+=') > 1:
1496        plusequals = True
1497
1498    removelayers_canon = [canonicalise_path(layer) for layer in removelayers]
1499    notadded = []
1500    for layer in addlayers:
1501        layer_canon = canonicalise_path(layer)
1502        if layer_canon in orig_bblayers and not layer_canon in removelayers_canon:
1503            notadded.append(layer)
1504    notadded_canon = [canonicalise_path(layer) for layer in notadded]
1505    addlayers[:] = [layer for layer in addlayers if canonicalise_path(layer) not in notadded_canon]
1506
1507    (updated, newlines) = edit_metadata(newlines, ['BBLAYERS'], handle_bblayers)
1508    if addlayers:
1509        # Still need to add these
1510        for addlayer in addlayers:
1511            newlines.append('BBLAYERS += "%s"\n' % addlayer)
1512        updated = True
1513
1514    if updated:
1515        with open(bblayers_conf, 'w') as f:
1516            f.writelines(newlines)
1517
1518    notremoved = list(set(removelayers) - set(removed))
1519
1520    return (notadded, notremoved)
1521
1522def get_collection_res(d):
1523    collections = (d.getVar('BBFILE_COLLECTIONS') or '').split()
1524    collection_res = {}
1525    for collection in collections:
1526        collection_res[collection] = d.getVar('BBFILE_PATTERN_%s' % collection) or ''
1527
1528    return collection_res
1529
1530
1531def get_file_layer(filename, d, collection_res={}):
1532    """Determine the collection (as defined by a layer's layer.conf file) containing the specified file"""
1533    if not collection_res:
1534        collection_res = get_collection_res(d)
1535
1536    def path_to_layer(path):
1537        # Use longest path so we handle nested layers
1538        matchlen = 0
1539        match = None
1540        for collection, regex in collection_res.items():
1541            if len(regex) > matchlen and re.match(regex, path):
1542                matchlen = len(regex)
1543                match = collection
1544        return match
1545
1546    result = None
1547    bbfiles = (d.getVar('BBFILES_PRIORITIZED') or '').split()
1548    bbfilesmatch = False
1549    for bbfilesentry in bbfiles:
1550        if fnmatch.fnmatchcase(filename, bbfilesentry):
1551            bbfilesmatch = True
1552            result = path_to_layer(bbfilesentry)
1553            break
1554
1555    if not bbfilesmatch:
1556        # Probably a bbclass
1557        result = path_to_layer(filename)
1558
1559    return result
1560
1561
1562# Constant taken from http://linux.die.net/include/linux/prctl.h
1563PR_SET_PDEATHSIG = 1
1564
1565class PrCtlError(Exception):
1566    pass
1567
1568def signal_on_parent_exit(signame):
1569    """
1570    Trigger signame to be sent when the parent process dies
1571    """
1572    signum = getattr(signal, signame)
1573    # http://linux.die.net/man/2/prctl
1574    result = cdll['libc.so.6'].prctl(PR_SET_PDEATHSIG, signum)
1575    if result != 0:
1576        raise PrCtlError('prctl failed with error code %s' % result)
1577
1578#
1579# Manually call the ioprio syscall. We could depend on other libs like psutil
1580# however this gets us enough of what we need to bitbake for now without the
1581# dependency
1582#
1583_unamearch = os.uname()[4]
1584IOPRIO_WHO_PROCESS = 1
1585IOPRIO_CLASS_SHIFT = 13
1586
1587def ioprio_set(who, cls, value):
1588    NR_ioprio_set = None
1589    if _unamearch == "x86_64":
1590      NR_ioprio_set = 251
1591    elif _unamearch[0] == "i" and _unamearch[2:3] == "86":
1592      NR_ioprio_set = 289
1593    elif _unamearch == "aarch64":
1594      NR_ioprio_set = 30
1595
1596    if NR_ioprio_set:
1597        ioprio = value | (cls << IOPRIO_CLASS_SHIFT)
1598        rc = cdll['libc.so.6'].syscall(NR_ioprio_set, IOPRIO_WHO_PROCESS, who, ioprio)
1599        if rc != 0:
1600            raise ValueError("Unable to set ioprio, syscall returned %s" % rc)
1601    else:
1602        bb.warn("Unable to set IO Prio for arch %s" % _unamearch)
1603
1604def set_process_name(name):
1605    from ctypes import cdll, byref, create_string_buffer
1606    # This is nice to have for debugging, not essential
1607    try:
1608        libc = cdll.LoadLibrary('libc.so.6')
1609        buf = create_string_buffer(bytes(name, 'utf-8'))
1610        libc.prctl(15, byref(buf), 0, 0, 0)
1611    except:
1612        pass
1613
1614def disable_network(uid=None, gid=None):
1615    """
1616    Disable networking in the current process if the kernel supports it, else
1617    just return after logging to debug. To do this we need to create a new user
1618    namespace, then map back to the original uid/gid.
1619    """
1620    libc = ctypes.CDLL('libc.so.6')
1621
1622    # From sched.h
1623    # New user namespace
1624    CLONE_NEWUSER = 0x10000000
1625    # New network namespace
1626    CLONE_NEWNET = 0x40000000
1627
1628    if uid is None:
1629        uid = os.getuid()
1630    if gid is None:
1631        gid = os.getgid()
1632
1633    ret = libc.unshare(CLONE_NEWNET | CLONE_NEWUSER)
1634    if ret != 0:
1635        logger.debug("System doesn't suport disabling network without admin privs")
1636        return
1637    with open("/proc/self/uid_map", "w") as f:
1638        f.write("%s %s 1" % (uid, uid))
1639    with open("/proc/self/setgroups", "w") as f:
1640        f.write("deny")
1641    with open("/proc/self/gid_map", "w") as f:
1642        f.write("%s %s 1" % (gid, gid))
1643
1644def export_proxies(d):
1645    """ export common proxies variables from datastore to environment """
1646
1647    variables = ['http_proxy', 'HTTP_PROXY', 'https_proxy', 'HTTPS_PROXY',
1648                    'ftp_proxy', 'FTP_PROXY', 'no_proxy', 'NO_PROXY',
1649                    'GIT_PROXY_COMMAND', 'SSL_CERT_FILE', 'SSL_CERT_DIR']
1650
1651    origenv = d.getVar("BB_ORIGENV")
1652
1653    for name in variables:
1654        value = d.getVar(name)
1655        if not value and origenv:
1656            value = origenv.getVar(name)
1657        if value:
1658            os.environ[name] = value
1659
1660
1661
1662def load_plugins(logger, plugins, pluginpath):
1663    def load_plugin(name):
1664        logger.debug('Loading plugin %s' % name)
1665        spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] )
1666        if spec:
1667            mod = importlib.util.module_from_spec(spec)
1668            spec.loader.exec_module(mod)
1669            return mod
1670
1671    logger.debug('Loading plugins from %s...' % pluginpath)
1672
1673    expanded = (glob.glob(os.path.join(pluginpath, '*' + ext))
1674                for ext in python_extensions)
1675    files = itertools.chain.from_iterable(expanded)
1676    names = set(os.path.splitext(os.path.basename(fn))[0] for fn in files)
1677    for name in names:
1678        if name != '__init__':
1679            plugin = load_plugin(name)
1680            if hasattr(plugin, 'plugin_init'):
1681                obj = plugin.plugin_init(plugins)
1682                plugins.append(obj or plugin)
1683            else:
1684                plugins.append(plugin)
1685
1686
1687class LogCatcher(logging.Handler):
1688    """Logging handler for collecting logged messages so you can check them later"""
1689    def __init__(self):
1690        self.messages = []
1691        logging.Handler.__init__(self, logging.WARNING)
1692    def emit(self, record):
1693        self.messages.append(bb.build.logformatter.format(record))
1694    def contains(self, message):
1695        return (message in self.messages)
1696
1697def is_semver(version):
1698    """
1699        Is the version string following the semver semantic?
1700
1701        https://semver.org/spec/v2.0.0.html
1702    """
1703    regex = re.compile(
1704    r"""
1705    ^
1706    (0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)
1707    (?:-(
1708        (?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)
1709        (?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*
1710    ))?
1711    (?:\+(
1712        [0-9a-zA-Z-]+
1713        (?:\.[0-9a-zA-Z-]+)*
1714    ))?
1715    $
1716    """, re.VERBOSE)
1717
1718    if regex.match(version) is None:
1719        return False
1720
1721    return True
1722
1723# Wrapper around os.rename which can handle cross device problems
1724# e.g. from container filesystems
1725def rename(src, dst):
1726    try:
1727        os.rename(src, dst)
1728    except OSError as err:
1729        if err.errno == 18:
1730            # Invalid cross-device link error
1731            shutil.move(src, dst)
1732        else:
1733            raise err
1734
1735@contextmanager
1736def environment(**envvars):
1737    """
1738    Context manager to selectively update the environment with the specified mapping.
1739    """
1740    backup = dict(os.environ)
1741    try:
1742        os.environ.update(envvars)
1743        yield
1744    finally:
1745        for var in envvars:
1746            if var in backup:
1747                os.environ[var] = backup[var]
1748            elif var in os.environ:
1749                del os.environ[var]
1750
1751def is_local_uid(uid=''):
1752    """
1753    Check whether uid is a local one or not.
1754    Can't use pwd module since it gets all UIDs, not local ones only.
1755    """
1756    if not uid:
1757        uid = os.getuid()
1758    with open('/etc/passwd', 'r') as f:
1759        for line in f:
1760            line_split = line.split(':')
1761            if len(line_split) < 3:
1762                continue
1763            if str(uid) == line_split[2]:
1764                return True
1765    return False
1766
1767def mkstemp(suffix=None, prefix=None, dir=None, text=False):
1768    """
1769    Generates a unique filename, independent of time.
1770
1771    mkstemp() in glibc (at least) generates unique file names based on the
1772    current system time. When combined with highly parallel builds, and
1773    operating over NFS (e.g. shared sstate/downloads) this can result in
1774    conflicts and race conditions.
1775
1776    This function adds additional entropy to the file name so that a collision
1777    is independent of time and thus extremely unlikely.
1778    """
1779    entropy = "".join(random.choices("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890", k=20))
1780    if prefix:
1781        prefix = prefix + entropy
1782    else:
1783        prefix = tempfile.gettempprefix() + entropy
1784    return tempfile.mkstemp(suffix=suffix, prefix=prefix, dir=dir, text=text)
1785