xref: /OK3568_Linux_fs/yocto/poky/meta/lib/oe/utils.py (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1#
2# SPDX-License-Identifier: GPL-2.0-only
3#
4
5import subprocess
6import multiprocessing
7import traceback
8
9def read_file(filename):
10    try:
11        f = open( filename, "r" )
12    except IOError as reason:
13        return "" # WARNING: can't raise an error now because of the new RDEPENDS handling. This is a bit ugly. :M:
14    else:
15        data = f.read().strip()
16        f.close()
17        return data
18    return None
19
20def ifelse(condition, iftrue = True, iffalse = False):
21    if condition:
22        return iftrue
23    else:
24        return iffalse
25
26def conditional(variable, checkvalue, truevalue, falsevalue, d):
27    if d.getVar(variable) == checkvalue:
28        return truevalue
29    else:
30        return falsevalue
31
32def vartrue(var, iftrue, iffalse, d):
33    import oe.types
34    if oe.types.boolean(d.getVar(var)):
35        return iftrue
36    else:
37        return iffalse
38
39def less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
40    if float(d.getVar(variable)) <= float(checkvalue):
41        return truevalue
42    else:
43        return falsevalue
44
45def version_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
46    result = bb.utils.vercmp_string(d.getVar(variable), checkvalue)
47    if result <= 0:
48        return truevalue
49    else:
50        return falsevalue
51
52def both_contain(variable1, variable2, checkvalue, d):
53    val1 = d.getVar(variable1)
54    val2 = d.getVar(variable2)
55    val1 = set(val1.split())
56    val2 = set(val2.split())
57    if isinstance(checkvalue, str):
58        checkvalue = set(checkvalue.split())
59    else:
60        checkvalue = set(checkvalue)
61    if checkvalue.issubset(val1) and checkvalue.issubset(val2):
62        return " ".join(checkvalue)
63    else:
64        return ""
65
66def set_intersect(variable1, variable2, d):
67    """
68    Expand both variables, interpret them as lists of strings, and return the
69    intersection as a flattened string.
70
71    For example:
72    s1 = "a b c"
73    s2 = "b c d"
74    s3 = set_intersect(s1, s2)
75    => s3 = "b c"
76    """
77    val1 = set(d.getVar(variable1).split())
78    val2 = set(d.getVar(variable2).split())
79    return " ".join(val1 & val2)
80
81def prune_suffix(var, suffixes, d):
82    # See if var ends with any of the suffixes listed and
83    # remove it if found
84    for suffix in suffixes:
85        if suffix and var.endswith(suffix):
86            var = var[:-len(suffix)]
87
88    prefix = d.getVar("MLPREFIX")
89    if prefix and var.startswith(prefix):
90        var = var[len(prefix):]
91
92    return var
93
94def str_filter(f, str, d):
95    from re import match
96    return " ".join([x for x in str.split() if match(f, x, 0)])
97
98def str_filter_out(f, str, d):
99    from re import match
100    return " ".join([x for x in str.split() if not match(f, x, 0)])
101
102def build_depends_string(depends, task):
103    """Append a taskname to a string of dependencies as used by the [depends] flag"""
104    return " ".join(dep + ":" + task for dep in depends.split())
105
106def inherits(d, *classes):
107    """Return True if the metadata inherits any of the specified classes"""
108    return any(bb.data.inherits_class(cls, d) for cls in classes)
109
110def features_backfill(var,d):
111    # This construct allows the addition of new features to variable specified
112    # as var
113    # Example for var = "DISTRO_FEATURES"
114    # This construct allows the addition of new features to DISTRO_FEATURES
115    # that if not present would disable existing functionality, without
116    # disturbing distributions that have already set DISTRO_FEATURES.
117    # Distributions wanting to elide a value in DISTRO_FEATURES_BACKFILL should
118    # add the feature to DISTRO_FEATURES_BACKFILL_CONSIDERED
119    features = (d.getVar(var) or "").split()
120    backfill = (d.getVar(var+"_BACKFILL") or "").split()
121    considered = (d.getVar(var+"_BACKFILL_CONSIDERED") or "").split()
122
123    addfeatures = []
124    for feature in backfill:
125        if feature not in features and feature not in considered:
126            addfeatures.append(feature)
127
128    if addfeatures:
129        d.appendVar(var, " " + " ".join(addfeatures))
130
131def all_distro_features(d, features, truevalue="1", falsevalue=""):
132    """
133    Returns truevalue if *all* given features are set in DISTRO_FEATURES,
134    else falsevalue. The features can be given as single string or anything
135    that can be turned into a set.
136
137    This is a shorter, more flexible version of
138    bb.utils.contains("DISTRO_FEATURES", features, truevalue, falsevalue, d).
139
140    Without explicit true/false values it can be used directly where
141    Python expects a boolean:
142       if oe.utils.all_distro_features(d, "foo bar"):
143           bb.fatal("foo and bar are mutually exclusive DISTRO_FEATURES")
144
145    With just a truevalue, it can be used to include files that are meant to be
146    used only when requested via DISTRO_FEATURES:
147       require ${@ oe.utils.all_distro_features(d, "foo bar", "foo-and-bar.inc")
148    """
149    return bb.utils.contains("DISTRO_FEATURES", features, truevalue, falsevalue, d)
150
151def any_distro_features(d, features, truevalue="1", falsevalue=""):
152    """
153    Returns truevalue if at least *one* of the given features is set in DISTRO_FEATURES,
154    else falsevalue. The features can be given as single string or anything
155    that can be turned into a set.
156
157    This is a shorter, more flexible version of
158    bb.utils.contains_any("DISTRO_FEATURES", features, truevalue, falsevalue, d).
159
160    Without explicit true/false values it can be used directly where
161    Python expects a boolean:
162       if not oe.utils.any_distro_features(d, "foo bar"):
163           bb.fatal("foo, bar or both must be set in DISTRO_FEATURES")
164
165    With just a truevalue, it can be used to include files that are meant to be
166    used only when requested via DISTRO_FEATURES:
167       require ${@ oe.utils.any_distro_features(d, "foo bar", "foo-or-bar.inc")
168
169    """
170    return bb.utils.contains_any("DISTRO_FEATURES", features, truevalue, falsevalue, d)
171
172def parallel_make(d, makeinst=False):
173    """
174    Return the integer value for the number of parallel threads to use when
175    building, scraped out of PARALLEL_MAKE. If no parallelization option is
176    found, returns None
177
178    e.g. if PARALLEL_MAKE = "-j 10", this will return 10 as an integer.
179    """
180    if makeinst:
181        pm = (d.getVar('PARALLEL_MAKEINST') or '').split()
182    else:
183        pm = (d.getVar('PARALLEL_MAKE') or '').split()
184    # look for '-j' and throw other options (e.g. '-l') away
185    while pm:
186        opt = pm.pop(0)
187        if opt == '-j':
188            v = pm.pop(0)
189        elif opt.startswith('-j'):
190            v = opt[2:].strip()
191        else:
192            continue
193
194        return int(v)
195
196    return ''
197
198def parallel_make_argument(d, fmt, limit=None, makeinst=False):
199    """
200    Helper utility to construct a parallel make argument from the number of
201    parallel threads specified in PARALLEL_MAKE.
202
203    Returns the input format string `fmt` where a single '%d' will be expanded
204    with the number of parallel threads to use. If `limit` is specified, the
205    number of parallel threads will be no larger than it. If no parallelization
206    option is found in PARALLEL_MAKE, returns an empty string
207
208    e.g. if PARALLEL_MAKE = "-j 10", parallel_make_argument(d, "-n %d") will return
209    "-n 10"
210    """
211    v = parallel_make(d, makeinst)
212    if v:
213        if limit:
214            v = min(limit, v)
215        return fmt % v
216    return ''
217
218def packages_filter_out_system(d):
219    """
220    Return a list of packages from PACKAGES with the "system" packages such as
221    PN-dbg PN-doc PN-locale-eb-gb removed.
222    """
223    pn = d.getVar('PN')
224    pkgfilter = [pn + suffix for suffix in ('', '-dbg', '-dev', '-doc', '-locale', '-staticdev', '-src')]
225    localepkg = pn + "-locale-"
226    pkgs = []
227
228    for pkg in d.getVar('PACKAGES').split():
229        if pkg not in pkgfilter and localepkg not in pkg:
230            pkgs.append(pkg)
231    return pkgs
232
233def getstatusoutput(cmd):
234    return subprocess.getstatusoutput(cmd)
235
236
237def trim_version(version, num_parts=2):
238    """
239    Return just the first <num_parts> of <version>, split by periods.  For
240    example, trim_version("1.2.3", 2) will return "1.2".
241    """
242    if type(version) is not str:
243        raise TypeError("Version should be a string")
244    if num_parts < 1:
245        raise ValueError("Cannot split to parts < 1")
246
247    parts = version.split(".")
248    trimmed = ".".join(parts[:num_parts])
249    return trimmed
250
251def cpu_count(at_least=1, at_most=64):
252    cpus = len(os.sched_getaffinity(0))
253    return max(min(cpus, at_most), at_least)
254
255def execute_pre_post_process(d, cmds):
256    if cmds is None:
257        return
258
259    for cmd in cmds.strip().split(';'):
260        cmd = cmd.strip()
261        if cmd != '':
262            bb.note("Executing %s ..." % cmd)
263            bb.build.exec_func(cmd, d)
264
265# For each item in items, call the function 'target' with item as the first
266# argument, extraargs as the other arguments and handle any exceptions in the
267# parent thread
268def multiprocess_launch(target, items, d, extraargs=None):
269
270    class ProcessLaunch(multiprocessing.Process):
271        def __init__(self, *args, **kwargs):
272            multiprocessing.Process.__init__(self, *args, **kwargs)
273            self._pconn, self._cconn = multiprocessing.Pipe()
274            self._exception = None
275            self._result = None
276
277        def run(self):
278            try:
279                ret = self._target(*self._args, **self._kwargs)
280                self._cconn.send((None, ret))
281            except Exception as e:
282                tb = traceback.format_exc()
283                self._cconn.send((e, tb))
284
285        def update(self):
286            if self._pconn.poll():
287                (e, tb) = self._pconn.recv()
288                if e is not None:
289                    self._exception = (e, tb)
290                else:
291                    self._result = tb
292
293        @property
294        def exception(self):
295            self.update()
296            return self._exception
297
298        @property
299        def result(self):
300            self.update()
301            return self._result
302
303    max_process = int(d.getVar("BB_NUMBER_THREADS") or os.cpu_count() or 1)
304    launched = []
305    errors = []
306    results = []
307    items = list(items)
308    while (items and not errors) or launched:
309        if not errors and items and len(launched) < max_process:
310            args = (items.pop(),)
311            if extraargs is not None:
312                args = args + extraargs
313            p = ProcessLaunch(target=target, args=args)
314            p.start()
315            launched.append(p)
316        for q in launched:
317            # Have to manually call update() to avoid deadlocks. The pipe can be full and
318            # transfer stalled until we try and read the results object but the subprocess won't exit
319            # as it still has data to write (https://bugs.python.org/issue8426)
320            q.update()
321            # The finished processes are joined when calling is_alive()
322            if not q.is_alive():
323                if q.exception:
324                    errors.append(q.exception)
325                if q.result:
326                    results.append(q.result)
327                launched.remove(q)
328    # Paranoia doesn't hurt
329    for p in launched:
330        p.join()
331    if errors:
332        msg = ""
333        for (e, tb) in errors:
334            if isinstance(e, subprocess.CalledProcessError) and e.output:
335                msg = msg + str(e) + "\n"
336                msg = msg + "Subprocess output:"
337                msg = msg + e.output.decode("utf-8", errors="ignore")
338            else:
339                msg = msg + str(e) + ": " + str(tb) + "\n"
340        bb.fatal("Fatal errors occurred in subprocesses:\n%s" % msg)
341    return results
342
343def squashspaces(string):
344    import re
345    return re.sub(r"\s+", " ", string).strip()
346
347def rprovides_map(pkgdata_dir, pkg_dict):
348    # Map file -> pkg provider
349    rprov_map = {}
350
351    for pkg in pkg_dict:
352        path_to_pkgfile = os.path.join(pkgdata_dir, 'runtime-reverse', pkg)
353        if not os.path.isfile(path_to_pkgfile):
354            continue
355        with open(path_to_pkgfile) as f:
356            for line in f:
357                if line.startswith('RPROVIDES') or line.startswith('FILERPROVIDES'):
358                    # List all components provided by pkg.
359                    # Exclude version strings, i.e. those starting with (
360                    provides = [x for x in line.split()[1:] if not x.startswith('(')]
361                    for prov in provides:
362                        if prov in rprov_map:
363                            rprov_map[prov].append(pkg)
364                        else:
365                            rprov_map[prov] = [pkg]
366
367    return rprov_map
368
369def format_pkg_list(pkg_dict, ret_format=None, pkgdata_dir=None):
370    output = []
371
372    if ret_format == "arch":
373        for pkg in sorted(pkg_dict):
374            output.append("%s %s" % (pkg, pkg_dict[pkg]["arch"]))
375    elif ret_format == "file":
376        for pkg in sorted(pkg_dict):
377            output.append("%s %s %s" % (pkg, pkg_dict[pkg]["filename"], pkg_dict[pkg]["arch"]))
378    elif ret_format == "ver":
379        for pkg in sorted(pkg_dict):
380            output.append("%s %s %s" % (pkg, pkg_dict[pkg]["arch"], pkg_dict[pkg]["ver"]))
381    elif ret_format == "deps":
382        rprov_map = rprovides_map(pkgdata_dir, pkg_dict)
383        for pkg in sorted(pkg_dict):
384            for dep in pkg_dict[pkg]["deps"]:
385                if dep in rprov_map:
386                    # There could be multiple providers within the image
387                    for pkg_provider in rprov_map[dep]:
388                        output.append("%s|%s * %s [RPROVIDES]" % (pkg, pkg_provider, dep))
389                else:
390                    output.append("%s|%s" % (pkg, dep))
391    else:
392        for pkg in sorted(pkg_dict):
393            output.append(pkg)
394
395    output_str = '\n'.join(output)
396
397    if output_str:
398        # make sure last line is newline terminated
399        output_str += '\n'
400
401    return output_str
402
403
404# Helper function to get the host compiler version
405# Do not assume the compiler is gcc
406def get_host_compiler_version(d, taskcontextonly=False):
407    import re, subprocess
408
409    if taskcontextonly and d.getVar('BB_WORKERCONTEXT') != '1':
410        return
411
412    compiler = d.getVar("BUILD_CC")
413    # Get rid of ccache since it is not present when parsing.
414    if compiler.startswith('ccache '):
415        compiler = compiler[7:]
416    try:
417        env = os.environ.copy()
418        # datastore PATH does not contain session PATH as set by environment-setup-...
419        # this breaks the install-buildtools use-case
420        # env["PATH"] = d.getVar("PATH")
421        output = subprocess.check_output("%s --version" % compiler, \
422                    shell=True, env=env, stderr=subprocess.STDOUT).decode("utf-8")
423    except subprocess.CalledProcessError as e:
424        bb.fatal("Error running %s --version: %s" % (compiler, e.output.decode("utf-8")))
425
426    match = re.match(r".* (\d+\.\d+)\.\d+.*", output.split('\n')[0])
427    if not match:
428        bb.fatal("Can't get compiler version from %s --version output" % compiler)
429
430    version = match.group(1)
431    return compiler, version
432
433
434def host_gcc_version(d, taskcontextonly=False):
435    import re, subprocess
436
437    if taskcontextonly and d.getVar('BB_WORKERCONTEXT') != '1':
438        return
439
440    compiler = d.getVar("BUILD_CC")
441    # Get rid of ccache since it is not present when parsing.
442    if compiler.startswith('ccache '):
443        compiler = compiler[7:]
444    try:
445        env = os.environ.copy()
446        env["PATH"] = d.getVar("PATH")
447        output = subprocess.check_output("%s --version" % compiler, \
448                    shell=True, env=env, stderr=subprocess.STDOUT).decode("utf-8")
449    except subprocess.CalledProcessError as e:
450        bb.fatal("Error running %s --version: %s" % (compiler, e.output.decode("utf-8")))
451
452    match = re.match(r".* (\d+\.\d+)\.\d+.*", output.split('\n')[0])
453    if not match:
454        bb.fatal("Can't get compiler version from %s --version output" % compiler)
455
456    version = match.group(1)
457    return "-%s" % version if version in ("4.8", "4.9") else ""
458
459
460def get_multilib_datastore(variant, d):
461    localdata = bb.data.createCopy(d)
462    if variant:
463        overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + variant
464        localdata.setVar("OVERRIDES", overrides)
465        localdata.setVar("MLPREFIX", variant + "-")
466    else:
467        origdefault = localdata.getVar("DEFAULTTUNE_MULTILIB_ORIGINAL")
468        if origdefault:
469            localdata.setVar("DEFAULTTUNE", origdefault)
470        overrides = localdata.getVar("OVERRIDES", False).split(":")
471        overrides = ":".join([x for x in overrides if not x.startswith("virtclass-multilib-")])
472        localdata.setVar("OVERRIDES", overrides)
473        localdata.setVar("MLPREFIX", "")
474    return localdata
475
476#
477# Python 2.7 doesn't have threaded pools (just multiprocessing)
478# so implement a version here
479#
480
481from queue import Queue
482from threading import Thread
483
484class ThreadedWorker(Thread):
485    """Thread executing tasks from a given tasks queue"""
486    def __init__(self, tasks, worker_init, worker_end, name=None):
487        Thread.__init__(self, name=name)
488        self.tasks = tasks
489        self.daemon = True
490
491        self.worker_init = worker_init
492        self.worker_end = worker_end
493
494    def run(self):
495        from queue import Empty
496
497        if self.worker_init is not None:
498            self.worker_init(self)
499
500        while True:
501            try:
502                func, args, kargs = self.tasks.get(block=False)
503            except Empty:
504                if self.worker_end is not None:
505                    self.worker_end(self)
506                break
507
508            try:
509                func(self, *args, **kargs)
510            except Exception as e:
511                # Eat all exceptions
512                bb.mainlogger.debug("Worker task raised %s" % e, exc_info=e)
513            finally:
514                self.tasks.task_done()
515
516class ThreadedPool:
517    """Pool of threads consuming tasks from a queue"""
518    def __init__(self, num_workers, num_tasks, worker_init=None, worker_end=None, name="ThreadedPool-"):
519        self.tasks = Queue(num_tasks)
520        self.workers = []
521
522        for i in range(num_workers):
523            worker = ThreadedWorker(self.tasks, worker_init, worker_end, name=name + str(i))
524            self.workers.append(worker)
525
526    def start(self):
527        for worker in self.workers:
528            worker.start()
529
530    def add_task(self, func, *args, **kargs):
531        """Add a task to the queue"""
532        self.tasks.put((func, args, kargs))
533
534    def wait_completion(self):
535        """Wait for completion of all the tasks in the queue"""
536        self.tasks.join()
537        for worker in self.workers:
538            worker.join()
539
540class ImageQAFailed(Exception):
541    def __init__(self, description, name=None, logfile=None):
542        self.description = description
543        self.name = name
544        self.logfile=logfile
545
546    def __str__(self):
547        msg = 'Function failed: %s' % self.name
548        if self.description:
549            msg = msg + ' (%s)' % self.description
550
551        return msg
552
553def sh_quote(string):
554    import shlex
555    return shlex.quote(string)
556
557def directory_size(root, blocksize=4096):
558    """
559    Calculate the size of the directory, taking into account hard links,
560    rounding up every size to multiples of the blocksize.
561    """
562    def roundup(size):
563        """
564        Round the size up to the nearest multiple of the block size.
565        """
566        import math
567        return math.ceil(size / blocksize) * blocksize
568
569    def getsize(filename):
570        """
571        Get the size of the filename, not following symlinks, taking into
572        account hard links.
573        """
574        stat = os.lstat(filename)
575        if stat.st_ino not in inodes:
576            inodes.add(stat.st_ino)
577            return stat.st_size
578        else:
579            return 0
580
581    inodes = set()
582    total = 0
583    for root, dirs, files in os.walk(root):
584        total += sum(roundup(getsize(os.path.join(root, name))) for name in files)
585        total += roundup(getsize(root))
586    return total
587