xref: /OK3568_Linux_fs/yocto/poky/scripts/lib/scriptutils.py (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1# Script utility functions
2#
3# Copyright (C) 2014 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import argparse
9import glob
10import logging
11import os
12import random
13import shlex
14import shutil
15import string
16import subprocess
17import sys
18import tempfile
19import threading
20import importlib
21import importlib.machinery
22import importlib.util
23
24class KeepAliveStreamHandler(logging.StreamHandler):
25    def __init__(self, keepalive=True, **kwargs):
26        super().__init__(**kwargs)
27        if keepalive is True:
28            keepalive = 5000 # default timeout
29        self._timeout = threading.Condition()
30        self._stop = False
31
32        # background thread waits on condition, if the condition does not
33        # happen emit a keep alive message
34        def thread():
35            while not self._stop:
36                with self._timeout:
37                    if not self._timeout.wait(keepalive):
38                        self.emit(logging.LogRecord("keepalive", logging.INFO,
39                            None, None, "Keepalive message", None, None))
40
41        self._thread = threading.Thread(target = thread, daemon = True)
42        self._thread.start()
43
44    def close(self):
45        # mark the thread to stop and notify it
46        self._stop = True
47        with self._timeout:
48            self._timeout.notify()
49        # wait for it to join
50        self._thread.join()
51        super().close()
52
53    def emit(self, record):
54        super().emit(record)
55        # trigger timer reset
56        with self._timeout:
57            self._timeout.notify()
58
59def logger_create(name, stream=None, keepalive=None):
60    logger = logging.getLogger(name)
61    if keepalive is not None:
62        loggerhandler = KeepAliveStreamHandler(stream=stream, keepalive=keepalive)
63    else:
64        loggerhandler = logging.StreamHandler(stream=stream)
65    loggerhandler.setFormatter(logging.Formatter("%(levelname)s: %(message)s"))
66    logger.addHandler(loggerhandler)
67    logger.setLevel(logging.INFO)
68    return logger
69
70def logger_setup_color(logger, color='auto'):
71    from bb.msg import BBLogFormatter
72
73    for handler in logger.handlers:
74        if (isinstance(handler, logging.StreamHandler) and
75            isinstance(handler.formatter, BBLogFormatter)):
76            if color == 'always' or (color == 'auto' and handler.stream.isatty()):
77                handler.formatter.enable_color()
78
79
80def load_plugins(logger, plugins, pluginpath):
81
82    def load_plugin(name):
83        logger.debug('Loading plugin %s' % name)
84        spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] )
85        if spec:
86            mod = importlib.util.module_from_spec(spec)
87            spec.loader.exec_module(mod)
88            return mod
89
90    def plugin_name(filename):
91        return os.path.splitext(os.path.basename(filename))[0]
92
93    known_plugins = [plugin_name(p.__name__) for p in plugins]
94    logger.debug('Loading plugins from %s...' % pluginpath)
95    for fn in glob.glob(os.path.join(pluginpath, '*.py')):
96        name = plugin_name(fn)
97        if name != '__init__' and name not in known_plugins:
98            plugin = load_plugin(name)
99            if hasattr(plugin, 'plugin_init'):
100                plugin.plugin_init(plugins)
101            plugins.append(plugin)
102
103
104def git_convert_standalone_clone(repodir):
105    """If specified directory is a git repository, ensure it's a standalone clone"""
106    import bb.process
107    if os.path.exists(os.path.join(repodir, '.git')):
108        alternatesfile = os.path.join(repodir, '.git', 'objects', 'info', 'alternates')
109        if os.path.exists(alternatesfile):
110            # This will have been cloned with -s, so we need to convert it so none
111            # of the contents is shared
112            bb.process.run('git repack -a', cwd=repodir)
113            os.remove(alternatesfile)
114
115def _get_temp_recipe_dir(d):
116    # This is a little bit hacky but we need to find a place where we can put
117    # the recipe so that bitbake can find it. We're going to delete it at the
118    # end so it doesn't really matter where we put it.
119    bbfiles = d.getVar('BBFILES').split()
120    fetchrecipedir = None
121    for pth in bbfiles:
122        if pth.endswith('.bb'):
123            pthdir = os.path.dirname(pth)
124            if os.access(os.path.dirname(os.path.dirname(pthdir)), os.W_OK):
125                fetchrecipedir = pthdir.replace('*', 'recipetool')
126                if pthdir.endswith('workspace/recipes/*'):
127                    # Prefer the workspace
128                    break
129    return fetchrecipedir
130
131class FetchUrlFailure(Exception):
132    def __init__(self, url):
133        self.url = url
134    def __str__(self):
135        return "Failed to fetch URL %s" % self.url
136
137def fetch_url(tinfoil, srcuri, srcrev, destdir, logger, preserve_tmp=False, mirrors=False):
138    """
139    Fetch the specified URL using normal do_fetch and do_unpack tasks, i.e.
140    any dependencies that need to be satisfied in order to support the fetch
141    operation will be taken care of
142    """
143
144    import bb
145
146    checksums = {}
147    fetchrecipepn = None
148
149    # We need to put our temp directory under ${BASE_WORKDIR} otherwise
150    # we may have problems with the recipe-specific sysroot population
151    tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR')
152    bb.utils.mkdirhier(tmpparent)
153    tmpdir = tempfile.mkdtemp(prefix='recipetool-', dir=tmpparent)
154    try:
155        tmpworkdir = os.path.join(tmpdir, 'work')
156        logger.debug('fetch_url: temp dir is %s' % tmpdir)
157
158        fetchrecipedir = _get_temp_recipe_dir(tinfoil.config_data)
159        if not fetchrecipedir:
160            logger.error('Searched BBFILES but unable to find a writeable place to put temporary recipe')
161            sys.exit(1)
162        fetchrecipe = None
163        bb.utils.mkdirhier(fetchrecipedir)
164        try:
165            # Generate a dummy recipe so we can follow more or less normal paths
166            # for do_fetch and do_unpack
167            # I'd use tempfile functions here but underscores can be produced by that and those
168            # aren't allowed in recipe file names except to separate the version
169            rndstring = ''.join(random.choice(string.ascii_lowercase + string.digits) for _ in range(8))
170            fetchrecipe = os.path.join(fetchrecipedir, 'tmp-recipetool-%s.bb' % rndstring)
171            fetchrecipepn = os.path.splitext(os.path.basename(fetchrecipe))[0]
172            logger.debug('Generating initial recipe %s for fetching' % fetchrecipe)
173            with open(fetchrecipe, 'w') as f:
174                # We don't want to have to specify LIC_FILES_CHKSUM
175                f.write('LICENSE = "CLOSED"\n')
176                # We don't need the cross-compiler
177                f.write('INHIBIT_DEFAULT_DEPS = "1"\n')
178                # We don't have the checksums yet so we can't require them
179                f.write('BB_STRICT_CHECKSUM = "ignore"\n')
180                f.write('SRC_URI = "%s"\n' % srcuri)
181                f.write('SRCREV = "%s"\n' % srcrev)
182                f.write('PV = "0.0+${SRCPV}"\n')
183                f.write('WORKDIR = "%s"\n' % tmpworkdir)
184                # Set S out of the way so it doesn't get created under the workdir
185                f.write('S = "%s"\n' % os.path.join(tmpdir, 'emptysrc'))
186                if not mirrors:
187                    # We do not need PREMIRRORS since we are almost certainly
188                    # fetching new source rather than something that has already
189                    # been fetched. Hence, we disable them by default.
190                    # However, we provide an option for users to enable it.
191                    f.write('PREMIRRORS = ""\n')
192                    f.write('MIRRORS = ""\n')
193
194            logger.info('Fetching %s...' % srcuri)
195
196            # FIXME this is too noisy at the moment
197
198            # Parse recipes so our new recipe gets picked up
199            tinfoil.parse_recipes()
200
201            def eventhandler(event):
202                if isinstance(event, bb.fetch2.MissingChecksumEvent):
203                    checksums.update(event.checksums)
204                    return True
205                return False
206
207            # Run the fetch + unpack tasks
208            res = tinfoil.build_targets(fetchrecipepn,
209                                        'do_unpack',
210                                        handle_events=True,
211                                        extra_events=['bb.fetch2.MissingChecksumEvent'],
212                                        event_callback=eventhandler)
213            if not res:
214                raise FetchUrlFailure(srcuri)
215
216            # Remove unneeded directories
217            rd = tinfoil.parse_recipe(fetchrecipepn)
218            if rd:
219                pathvars = ['T', 'RECIPE_SYSROOT', 'RECIPE_SYSROOT_NATIVE']
220                for pathvar in pathvars:
221                    path = rd.getVar(pathvar)
222                    if os.path.exists(path):
223                        shutil.rmtree(path)
224        finally:
225            if fetchrecipe:
226                try:
227                    os.remove(fetchrecipe)
228                except FileNotFoundError:
229                    pass
230            try:
231                os.rmdir(fetchrecipedir)
232            except OSError as e:
233                import errno
234                if e.errno != errno.ENOTEMPTY:
235                    raise
236
237        bb.utils.mkdirhier(destdir)
238        for fn in os.listdir(tmpworkdir):
239            shutil.move(os.path.join(tmpworkdir, fn), destdir)
240
241    finally:
242        if not preserve_tmp:
243            shutil.rmtree(tmpdir)
244            tmpdir = None
245
246    return checksums, tmpdir
247
248
249def run_editor(fn, logger=None):
250    if isinstance(fn, str):
251        files = [fn]
252    else:
253        files = fn
254
255    editor = os.getenv('VISUAL', os.getenv('EDITOR', 'vi'))
256    try:
257        #print(shlex.split(editor) + files)
258        return subprocess.check_call(shlex.split(editor) + files)
259    except subprocess.CalledProcessError as exc:
260        logger.error("Execution of '%s' failed: %s" % (editor, exc))
261        return 1
262
263def is_src_url(param):
264    """
265    Check if a parameter is a URL and return True if so
266    NOTE: be careful about changing this as it will influence how devtool/recipetool command line handling works
267    """
268    if not param:
269        return False
270    elif '://' in param:
271        return True
272    elif param.startswith('git@') or ('@' in param and param.endswith('.git')):
273        return True
274    return False
275
276def filter_src_subdirs(pth):
277    """
278    Filter out subdirectories of initial unpacked source trees that we do not care about.
279    Used by devtool and recipetool.
280    """
281    dirlist = os.listdir(pth)
282    filterout = ['git.indirectionsymlink', 'source-date-epoch']
283    dirlist = [x for x in dirlist if x not in filterout]
284    return dirlist
285