xref: /OK3568_Linux_fs/yocto/poky/meta/classes/externalsrc.bbclass (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1# Copyright (C) 2012 Linux Foundation
2# Author: Richard Purdie
3# Some code and influence taken from srctree.bbclass:
4# Copyright (C) 2009 Chris Larson <clarson@kergoth.com>
5# Released under the MIT license (see COPYING.MIT for the terms)
6#
7# externalsrc.bbclass enables use of an existing source tree, usually external to
8# the build system to build a piece of software rather than the usual fetch/unpack/patch
9# process.
10#
11# To use, add externalsrc to the global inherit and set EXTERNALSRC to point at the
12# directory you want to use containing the sources e.g. from local.conf for a recipe
13# called "myrecipe" you would do:
14#
15# INHERIT += "externalsrc"
16# EXTERNALSRC:pn-myrecipe = "/path/to/my/source/tree"
17#
18# In order to make this class work for both target and native versions (or with
19# multilibs/cross or other BBCLASSEXTEND variants), B is set to point to a separate
20# directory under the work directory (split source and build directories). This is
21# the default, but the build directory can be set to the source directory if
22# circumstances dictate by setting EXTERNALSRC_BUILD to the same value, e.g.:
23#
24# EXTERNALSRC_BUILD:pn-myrecipe = "/path/to/my/source/tree"
25#
26
27SRCTREECOVEREDTASKS ?= "do_patch do_unpack do_fetch"
28EXTERNALSRC_SYMLINKS ?= "oe-workdir:${WORKDIR} oe-logs:${T}"
29
30python () {
31    externalsrc = d.getVar('EXTERNALSRC')
32    externalsrcbuild = d.getVar('EXTERNALSRC_BUILD')
33
34    if externalsrc and not externalsrc.startswith("/"):
35        bb.error("EXTERNALSRC must be an absolute path")
36    if externalsrcbuild and not externalsrcbuild.startswith("/"):
37        bb.error("EXTERNALSRC_BUILD must be an absolute path")
38
39    # If this is the base recipe and EXTERNALSRC is set for it or any of its
40    # derivatives, then enable BB_DONT_CACHE to force the recipe to always be
41    # re-parsed so that the file-checksums function for do_compile is run every
42    # time.
43    bpn = d.getVar('BPN')
44    classextend = (d.getVar('BBCLASSEXTEND') or '').split()
45    if bpn == d.getVar('PN') or not classextend:
46        if (externalsrc or
47                ('native' in classextend and
48                 d.getVar('EXTERNALSRC:pn-%s-native' % bpn)) or
49                ('nativesdk' in classextend and
50                 d.getVar('EXTERNALSRC:pn-nativesdk-%s' % bpn)) or
51                ('cross' in classextend and
52                 d.getVar('EXTERNALSRC:pn-%s-cross' % bpn))):
53            d.setVar('BB_DONT_CACHE', '1')
54
55    if externalsrc:
56        import oe.recipeutils
57        import oe.path
58
59        d.setVar('S', externalsrc)
60        if externalsrcbuild:
61            d.setVar('B', externalsrcbuild)
62        else:
63            d.setVar('B', '${WORKDIR}/${BPN}-${PV}')
64
65        local_srcuri = []
66        fetch = bb.fetch2.Fetch((d.getVar('SRC_URI') or '').split(), d)
67        for url in fetch.urls:
68            url_data = fetch.ud[url]
69            parm = url_data.parm
70            if (url_data.type == 'file' or
71                    url_data.type == 'npmsw' or url_data.type == 'crate' or
72                    'type' in parm and parm['type'] == 'kmeta'):
73                local_srcuri.append(url)
74
75        d.setVar('SRC_URI', ' '.join(local_srcuri))
76
77        # Dummy value because the default function can't be called with blank SRC_URI
78        d.setVar('SRCPV', '999')
79
80        if d.getVar('CONFIGUREOPT_DEPTRACK') == '--disable-dependency-tracking':
81            d.setVar('CONFIGUREOPT_DEPTRACK', '')
82
83        tasks = filter(lambda k: d.getVarFlag(k, "task"), d.keys())
84
85        for task in tasks:
86            if task.endswith("_setscene"):
87                # sstate is never going to work for external source trees, disable it
88                bb.build.deltask(task, d)
89            elif os.path.realpath(d.getVar('S')) == os.path.realpath(d.getVar('B')):
90                # Since configure will likely touch ${S}, ensure only we lock so one task has access at a time
91                d.appendVarFlag(task, "lockfiles", " ${S}/singletask.lock")
92
93        for v in d.keys():
94            cleandirs = d.getVarFlag(v, "cleandirs", False)
95            if cleandirs:
96                # We do not want our source to be wiped out, ever (kernel.bbclass does this for do_clean)
97                cleandirs = oe.recipeutils.split_var_value(cleandirs)
98                setvalue = False
99                for cleandir in cleandirs[:]:
100                    if oe.path.is_path_parent(externalsrc, d.expand(cleandir)):
101                        cleandirs.remove(cleandir)
102                        setvalue = True
103                if setvalue:
104                    d.setVarFlag(v, 'cleandirs', ' '.join(cleandirs))
105
106        fetch_tasks = ['do_fetch', 'do_unpack']
107        # If we deltask do_patch, there's no dependency to ensure do_unpack gets run, so add one
108        # Note that we cannot use d.appendVarFlag() here because deps is expected to be a list object, not a string
109        d.setVarFlag('do_configure', 'deps', (d.getVarFlag('do_configure', 'deps', False) or []) + ['do_unpack'])
110
111        for task in d.getVar("SRCTREECOVEREDTASKS").split():
112            if local_srcuri and task in fetch_tasks:
113                continue
114            bb.build.deltask(task, d)
115            if task == 'do_unpack':
116                # The reproducible build create_source_date_epoch_stamp function must
117                # be run after the source is available and before the
118                # do_deploy_source_date_epoch task.  In the normal case, it's attached
119                # to do_unpack as a postfuncs, but since we removed do_unpack (above)
120                # we need to move the function elsewhere.  The easiest thing to do is
121                # move it into the prefuncs of the do_deploy_source_date_epoch task.
122                # This is safe, as externalsrc runs with the source already unpacked.
123                d.prependVarFlag('do_deploy_source_date_epoch', 'prefuncs', 'create_source_date_epoch_stamp ')
124
125        d.prependVarFlag('do_compile', 'prefuncs', "externalsrc_compile_prefunc ")
126        d.prependVarFlag('do_configure', 'prefuncs', "externalsrc_configure_prefunc ")
127
128        d.setVarFlag('do_compile', 'file-checksums', '${@srctree_hash_files(d)}')
129        d.setVarFlag('do_configure', 'file-checksums', '${@srctree_configure_hash_files(d)}')
130
131        # We don't want the workdir to go away
132        d.appendVar('RM_WORK_EXCLUDE', ' ' + d.getVar('PN'))
133
134        bb.build.addtask('do_buildclean',
135                         'do_clean' if d.getVar('S') == d.getVar('B') else None,
136                         None, d)
137
138        # If B=S the same builddir is used even for different architectures.
139        # Thus, use a shared CONFIGURESTAMPFILE and STAMP directory so that
140        # change of do_configure task hash is correctly detected and stamps are
141        # invalidated if e.g. MACHINE changes.
142        if d.getVar('S') == d.getVar('B'):
143            configstamp = '${TMPDIR}/work-shared/${PN}/${EXTENDPE}${PV}-${PR}/configure.sstate'
144            d.setVar('CONFIGURESTAMPFILE', configstamp)
145            d.setVar('STAMP', '${STAMPS_DIR}/work-shared/${PN}/${EXTENDPE}${PV}-${PR}')
146            d.setVar('STAMPCLEAN', '${STAMPS_DIR}/work-shared/${PN}/*-*')
147}
148
149python externalsrc_configure_prefunc() {
150    s_dir = d.getVar('S')
151    # Create desired symlinks
152    symlinks = (d.getVar('EXTERNALSRC_SYMLINKS') or '').split()
153    newlinks = []
154    for symlink in symlinks:
155        symsplit = symlink.split(':', 1)
156        lnkfile = os.path.join(s_dir, symsplit[0])
157        target = d.expand(symsplit[1])
158        if len(symsplit) > 1:
159            if os.path.islink(lnkfile):
160                # Link already exists, leave it if it points to the right location already
161                if os.readlink(lnkfile) == target:
162                    continue
163                os.unlink(lnkfile)
164            elif os.path.exists(lnkfile):
165                # File/dir exists with same name as link, just leave it alone
166                continue
167            os.symlink(target, lnkfile)
168            newlinks.append(symsplit[0])
169    # Hide the symlinks from git
170    try:
171        git_exclude_file = os.path.join(s_dir, '.git/info/exclude')
172        if os.path.exists(git_exclude_file):
173            with open(git_exclude_file, 'r+') as efile:
174                elines = efile.readlines()
175                for link in newlinks:
176                    if link in elines or '/'+link in elines:
177                        continue
178                    efile.write('/' + link + '\n')
179    except IOError as ioe:
180        bb.note('Failed to hide EXTERNALSRC_SYMLINKS from git')
181}
182
183python externalsrc_compile_prefunc() {
184    # Make it obvious that this is happening, since forgetting about it could lead to much confusion
185    bb.plain('NOTE: %s: compiling from external source tree %s' % (d.getVar('PN'), d.getVar('EXTERNALSRC')))
186}
187
188do_buildclean[dirs] = "${S} ${B}"
189do_buildclean[nostamp] = "1"
190do_buildclean[doc] = "Call 'make clean' or equivalent in ${B}"
191externalsrc_do_buildclean() {
192	if [ -e Makefile -o -e makefile -o -e GNUmakefile ]; then
193		rm -f ${@' '.join([x.split(':')[0] for x in (d.getVar('EXTERNALSRC_SYMLINKS') or '').split()])}
194		if [ "${CLEANBROKEN}" != "1" ]; then
195			oe_runmake clean || die "make failed"
196		fi
197	else
198		bbnote "nothing to do - no makefile found"
199	fi
200}
201
202def srctree_hash_files(d, srcdir=None):
203    import shutil
204    import subprocess
205    import tempfile
206    import hashlib
207
208    s_dir = srcdir or d.getVar('EXTERNALSRC')
209    git_dir = None
210
211    try:
212        git_dir = os.path.join(s_dir,
213            subprocess.check_output(['git', '-C', s_dir, 'rev-parse', '--git-dir'], stderr=subprocess.DEVNULL).decode("utf-8").rstrip())
214        top_git_dir = os.path.join(d.getVar("TOPDIR"),
215            subprocess.check_output(['git', '-C', d.getVar("TOPDIR"), 'rev-parse', '--git-dir'], stderr=subprocess.DEVNULL).decode("utf-8").rstrip())
216        if git_dir == top_git_dir:
217            git_dir = None
218    except subprocess.CalledProcessError:
219        pass
220
221    ret = " "
222    if git_dir is not None:
223        oe_hash_file = os.path.join(git_dir, 'oe-devtool-tree-sha1-%s' % d.getVar('PN'))
224        with tempfile.NamedTemporaryFile(prefix='oe-devtool-index') as tmp_index:
225            # Clone index
226            shutil.copyfile(os.path.join(git_dir, 'index'), tmp_index.name)
227            # Update our custom index
228            env = os.environ.copy()
229            env['GIT_INDEX_FILE'] = tmp_index.name
230            subprocess.check_output(['git', 'add', '-A', '.'], cwd=s_dir, env=env)
231            git_sha1 = subprocess.check_output(['git', 'write-tree'], cwd=s_dir, env=env).decode("utf-8")
232            if os.path.exists(os.path.join(s_dir, ".gitmodules")) and os.path.getsize(os.path.join(s_dir, ".gitmodules")) > 0:
233                submodule_helper = subprocess.check_output(["git", "config", "--file", ".gitmodules", "--get-regexp", "path"], cwd=s_dir, env=env).decode("utf-8")
234                for line in submodule_helper.splitlines():
235                    module_dir = os.path.join(s_dir, line.rsplit(maxsplit=1)[1])
236                    if os.path.isdir(module_dir):
237                        proc = subprocess.Popen(['git', 'add', '-A', '.'], cwd=module_dir, env=env, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
238                        proc.communicate()
239                        proc = subprocess.Popen(['git', 'write-tree'], cwd=module_dir, env=env, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL)
240                        stdout, _ = proc.communicate()
241                        git_sha1 += stdout.decode("utf-8")
242            sha1 = hashlib.sha1(git_sha1.encode("utf-8")).hexdigest()
243        with open(oe_hash_file, 'w') as fobj:
244            fobj.write(sha1)
245        ret = oe_hash_file + ':True'
246    else:
247        ret = s_dir + '/*:True'
248    return ret
249
250def srctree_configure_hash_files(d):
251    """
252    Get the list of files that should trigger do_configure to re-execute,
253    based on the value of CONFIGURE_FILES
254    """
255    in_files = (d.getVar('CONFIGURE_FILES') or '').split()
256    out_items = []
257    search_files = []
258    for entry in in_files:
259        if entry.startswith('/'):
260            out_items.append('%s:%s' % (entry, os.path.exists(entry)))
261        else:
262            search_files.append(entry)
263    if search_files:
264        s_dir = d.getVar('EXTERNALSRC')
265        for root, _, files in os.walk(s_dir):
266            for f in files:
267                if f in search_files:
268                    out_items.append('%s:True' % os.path.join(root, f))
269    return ' '.join(out_items)
270
271EXPORT_FUNCTIONS do_buildclean
272