xref: /OK3568_Linux_fs/yocto/poky/bitbake/lib/bb/cache.py (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun#
2*4882a593Smuzhiyun# BitBake Cache implementation
3*4882a593Smuzhiyun#
4*4882a593Smuzhiyun# Caching of bitbake variables before task execution
5*4882a593Smuzhiyun
6*4882a593Smuzhiyun# Copyright (C) 2006        Richard Purdie
7*4882a593Smuzhiyun# Copyright (C) 2012        Intel Corporation
8*4882a593Smuzhiyun
9*4882a593Smuzhiyun# but small sections based on code from bin/bitbake:
10*4882a593Smuzhiyun# Copyright (C) 2003, 2004  Chris Larson
11*4882a593Smuzhiyun# Copyright (C) 2003, 2004  Phil Blundell
12*4882a593Smuzhiyun# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
13*4882a593Smuzhiyun# Copyright (C) 2005        Holger Hans Peter Freyther
14*4882a593Smuzhiyun# Copyright (C) 2005        ROAD GmbH
15*4882a593Smuzhiyun#
16*4882a593Smuzhiyun# SPDX-License-Identifier: GPL-2.0-only
17*4882a593Smuzhiyun#
18*4882a593Smuzhiyun
19*4882a593Smuzhiyunimport os
20*4882a593Smuzhiyunimport logging
21*4882a593Smuzhiyunimport pickle
22*4882a593Smuzhiyunfrom collections import defaultdict
23*4882a593Smuzhiyunfrom collections.abc import Mapping
24*4882a593Smuzhiyunimport bb.utils
25*4882a593Smuzhiyunfrom bb import PrefixLoggerAdapter
26*4882a593Smuzhiyunimport re
27*4882a593Smuzhiyun
28*4882a593Smuzhiyunlogger = logging.getLogger("BitBake.Cache")
29*4882a593Smuzhiyun
30*4882a593Smuzhiyun__cache_version__ = "154"
31*4882a593Smuzhiyun
32*4882a593Smuzhiyundef getCacheFile(path, filename, mc, data_hash):
33*4882a593Smuzhiyun    mcspec = ''
34*4882a593Smuzhiyun    if mc:
35*4882a593Smuzhiyun        mcspec = ".%s" % mc
36*4882a593Smuzhiyun    return os.path.join(path, filename + mcspec + "." + data_hash)
37*4882a593Smuzhiyun
38*4882a593Smuzhiyun# RecipeInfoCommon defines common data retrieving methods
39*4882a593Smuzhiyun# from meta data for caches. CoreRecipeInfo as well as other
40*4882a593Smuzhiyun# Extra RecipeInfo needs to inherit this class
41*4882a593Smuzhiyunclass RecipeInfoCommon(object):
42*4882a593Smuzhiyun
43*4882a593Smuzhiyun    @classmethod
44*4882a593Smuzhiyun    def listvar(cls, var, metadata):
45*4882a593Smuzhiyun        return cls.getvar(var, metadata).split()
46*4882a593Smuzhiyun
47*4882a593Smuzhiyun    @classmethod
48*4882a593Smuzhiyun    def intvar(cls, var, metadata):
49*4882a593Smuzhiyun        return int(cls.getvar(var, metadata) or 0)
50*4882a593Smuzhiyun
51*4882a593Smuzhiyun    @classmethod
52*4882a593Smuzhiyun    def depvar(cls, var, metadata):
53*4882a593Smuzhiyun        return bb.utils.explode_deps(cls.getvar(var, metadata))
54*4882a593Smuzhiyun
55*4882a593Smuzhiyun    @classmethod
56*4882a593Smuzhiyun    def pkgvar(cls, var, packages, metadata):
57*4882a593Smuzhiyun        return dict((pkg, cls.depvar("%s:%s" % (var, pkg), metadata))
58*4882a593Smuzhiyun                    for pkg in packages)
59*4882a593Smuzhiyun
60*4882a593Smuzhiyun    @classmethod
61*4882a593Smuzhiyun    def taskvar(cls, var, tasks, metadata):
62*4882a593Smuzhiyun        return dict((task, cls.getvar("%s:task-%s" % (var, task), metadata))
63*4882a593Smuzhiyun                    for task in tasks)
64*4882a593Smuzhiyun
65*4882a593Smuzhiyun    @classmethod
66*4882a593Smuzhiyun    def flaglist(cls, flag, varlist, metadata, squash=False):
67*4882a593Smuzhiyun        out_dict = dict((var, metadata.getVarFlag(var, flag))
68*4882a593Smuzhiyun                    for var in varlist)
69*4882a593Smuzhiyun        if squash:
70*4882a593Smuzhiyun            return dict((k,v) for (k,v) in out_dict.items() if v)
71*4882a593Smuzhiyun        else:
72*4882a593Smuzhiyun            return out_dict
73*4882a593Smuzhiyun
74*4882a593Smuzhiyun    @classmethod
75*4882a593Smuzhiyun    def getvar(cls, var, metadata, expand = True):
76*4882a593Smuzhiyun        return metadata.getVar(var, expand) or ''
77*4882a593Smuzhiyun
78*4882a593Smuzhiyun
79*4882a593Smuzhiyunclass CoreRecipeInfo(RecipeInfoCommon):
80*4882a593Smuzhiyun    __slots__ = ()
81*4882a593Smuzhiyun
82*4882a593Smuzhiyun    cachefile = "bb_cache.dat"
83*4882a593Smuzhiyun
84*4882a593Smuzhiyun    def __init__(self, filename, metadata):
85*4882a593Smuzhiyun        self.file_depends = metadata.getVar('__depends', False)
86*4882a593Smuzhiyun        self.timestamp = bb.parse.cached_mtime(filename)
87*4882a593Smuzhiyun        self.variants = self.listvar('__VARIANTS', metadata) + ['']
88*4882a593Smuzhiyun        self.appends = self.listvar('__BBAPPEND', metadata)
89*4882a593Smuzhiyun        self.nocache = self.getvar('BB_DONT_CACHE', metadata)
90*4882a593Smuzhiyun
91*4882a593Smuzhiyun        self.provides  = self.depvar('PROVIDES', metadata)
92*4882a593Smuzhiyun        self.rprovides = self.depvar('RPROVIDES', metadata)
93*4882a593Smuzhiyun        self.pn = self.getvar('PN', metadata) or bb.parse.vars_from_file(filename,metadata)[0]
94*4882a593Smuzhiyun        self.packages = self.listvar('PACKAGES', metadata)
95*4882a593Smuzhiyun        if not self.packages:
96*4882a593Smuzhiyun            self.packages.append(self.pn)
97*4882a593Smuzhiyun        self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
98*4882a593Smuzhiyun        self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata)
99*4882a593Smuzhiyun
100*4882a593Smuzhiyun        self.skipreason = self.getvar('__SKIPPED', metadata)
101*4882a593Smuzhiyun        if self.skipreason:
102*4882a593Smuzhiyun            self.skipped = True
103*4882a593Smuzhiyun            return
104*4882a593Smuzhiyun
105*4882a593Smuzhiyun        self.tasks = metadata.getVar('__BBTASKS', False)
106*4882a593Smuzhiyun
107*4882a593Smuzhiyun        self.basetaskhashes = self.taskvar('BB_BASEHASH', self.tasks, metadata)
108*4882a593Smuzhiyun        self.hashfilename = self.getvar('BB_HASHFILENAME', metadata)
109*4882a593Smuzhiyun
110*4882a593Smuzhiyun        self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}}
111*4882a593Smuzhiyun
112*4882a593Smuzhiyun        self.skipped = False
113*4882a593Smuzhiyun        self.pe = self.getvar('PE', metadata)
114*4882a593Smuzhiyun        self.pv = self.getvar('PV', metadata)
115*4882a593Smuzhiyun        self.pr = self.getvar('PR', metadata)
116*4882a593Smuzhiyun        self.defaultpref = self.intvar('DEFAULT_PREFERENCE', metadata)
117*4882a593Smuzhiyun        self.not_world = self.getvar('EXCLUDE_FROM_WORLD', metadata)
118*4882a593Smuzhiyun        self.stamp = self.getvar('STAMP', metadata)
119*4882a593Smuzhiyun        self.stampclean = self.getvar('STAMPCLEAN', metadata)
120*4882a593Smuzhiyun        self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata)
121*4882a593Smuzhiyun        self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True)
122*4882a593Smuzhiyun        self.depends          = self.depvar('DEPENDS', metadata)
123*4882a593Smuzhiyun        self.rdepends         = self.depvar('RDEPENDS', metadata)
124*4882a593Smuzhiyun        self.rrecommends      = self.depvar('RRECOMMENDS', metadata)
125*4882a593Smuzhiyun        self.rdepends_pkg     = self.pkgvar('RDEPENDS', self.packages, metadata)
126*4882a593Smuzhiyun        self.rrecommends_pkg  = self.pkgvar('RRECOMMENDS', self.packages, metadata)
127*4882a593Smuzhiyun        self.inherits         = self.getvar('__inherit_cache', metadata, expand=False)
128*4882a593Smuzhiyun        self.fakerootenv      = self.getvar('FAKEROOTENV', metadata)
129*4882a593Smuzhiyun        self.fakerootdirs     = self.getvar('FAKEROOTDIRS', metadata)
130*4882a593Smuzhiyun        self.fakerootlogs     = self.getvar('FAKEROOTLOGS', metadata)
131*4882a593Smuzhiyun        self.fakerootnoenv    = self.getvar('FAKEROOTNOENV', metadata)
132*4882a593Smuzhiyun        self.extradepsfunc    = self.getvar('calculate_extra_depends', metadata)
133*4882a593Smuzhiyun
134*4882a593Smuzhiyun    @classmethod
135*4882a593Smuzhiyun    def init_cacheData(cls, cachedata):
136*4882a593Smuzhiyun        # CacheData in Core RecipeInfo Class
137*4882a593Smuzhiyun        cachedata.task_deps = {}
138*4882a593Smuzhiyun        cachedata.pkg_fn = {}
139*4882a593Smuzhiyun        cachedata.pkg_pn = defaultdict(list)
140*4882a593Smuzhiyun        cachedata.pkg_pepvpr = {}
141*4882a593Smuzhiyun        cachedata.pkg_dp = {}
142*4882a593Smuzhiyun
143*4882a593Smuzhiyun        cachedata.stamp = {}
144*4882a593Smuzhiyun        cachedata.stampclean = {}
145*4882a593Smuzhiyun        cachedata.stamp_extrainfo = {}
146*4882a593Smuzhiyun        cachedata.file_checksums = {}
147*4882a593Smuzhiyun        cachedata.fn_provides = {}
148*4882a593Smuzhiyun        cachedata.pn_provides = defaultdict(list)
149*4882a593Smuzhiyun        cachedata.all_depends = []
150*4882a593Smuzhiyun
151*4882a593Smuzhiyun        cachedata.deps = defaultdict(list)
152*4882a593Smuzhiyun        cachedata.packages = defaultdict(list)
153*4882a593Smuzhiyun        cachedata.providers = defaultdict(list)
154*4882a593Smuzhiyun        cachedata.rproviders = defaultdict(list)
155*4882a593Smuzhiyun        cachedata.packages_dynamic = defaultdict(list)
156*4882a593Smuzhiyun
157*4882a593Smuzhiyun        cachedata.rundeps = defaultdict(lambda: defaultdict(list))
158*4882a593Smuzhiyun        cachedata.runrecs = defaultdict(lambda: defaultdict(list))
159*4882a593Smuzhiyun        cachedata.possible_world = []
160*4882a593Smuzhiyun        cachedata.universe_target = []
161*4882a593Smuzhiyun        cachedata.hashfn = {}
162*4882a593Smuzhiyun
163*4882a593Smuzhiyun        cachedata.basetaskhash = {}
164*4882a593Smuzhiyun        cachedata.inherits = {}
165*4882a593Smuzhiyun        cachedata.fakerootenv = {}
166*4882a593Smuzhiyun        cachedata.fakerootnoenv = {}
167*4882a593Smuzhiyun        cachedata.fakerootdirs = {}
168*4882a593Smuzhiyun        cachedata.fakerootlogs = {}
169*4882a593Smuzhiyun        cachedata.extradepsfunc = {}
170*4882a593Smuzhiyun
171*4882a593Smuzhiyun    def add_cacheData(self, cachedata, fn):
172*4882a593Smuzhiyun        cachedata.task_deps[fn] = self.task_deps
173*4882a593Smuzhiyun        cachedata.pkg_fn[fn] = self.pn
174*4882a593Smuzhiyun        cachedata.pkg_pn[self.pn].append(fn)
175*4882a593Smuzhiyun        cachedata.pkg_pepvpr[fn] = (self.pe, self.pv, self.pr)
176*4882a593Smuzhiyun        cachedata.pkg_dp[fn] = self.defaultpref
177*4882a593Smuzhiyun        cachedata.stamp[fn] = self.stamp
178*4882a593Smuzhiyun        cachedata.stampclean[fn] = self.stampclean
179*4882a593Smuzhiyun        cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo
180*4882a593Smuzhiyun        cachedata.file_checksums[fn] = self.file_checksums
181*4882a593Smuzhiyun
182*4882a593Smuzhiyun        provides = [self.pn]
183*4882a593Smuzhiyun        for provide in self.provides:
184*4882a593Smuzhiyun            if provide not in provides:
185*4882a593Smuzhiyun                provides.append(provide)
186*4882a593Smuzhiyun        cachedata.fn_provides[fn] = provides
187*4882a593Smuzhiyun
188*4882a593Smuzhiyun        for provide in provides:
189*4882a593Smuzhiyun            cachedata.providers[provide].append(fn)
190*4882a593Smuzhiyun            if provide not in cachedata.pn_provides[self.pn]:
191*4882a593Smuzhiyun                cachedata.pn_provides[self.pn].append(provide)
192*4882a593Smuzhiyun
193*4882a593Smuzhiyun        for dep in self.depends:
194*4882a593Smuzhiyun            if dep not in cachedata.deps[fn]:
195*4882a593Smuzhiyun                cachedata.deps[fn].append(dep)
196*4882a593Smuzhiyun            if dep not in cachedata.all_depends:
197*4882a593Smuzhiyun                cachedata.all_depends.append(dep)
198*4882a593Smuzhiyun
199*4882a593Smuzhiyun        rprovides = self.rprovides
200*4882a593Smuzhiyun        for package in self.packages:
201*4882a593Smuzhiyun            cachedata.packages[package].append(fn)
202*4882a593Smuzhiyun            rprovides += self.rprovides_pkg[package]
203*4882a593Smuzhiyun
204*4882a593Smuzhiyun        for rprovide in rprovides:
205*4882a593Smuzhiyun            if fn not in cachedata.rproviders[rprovide]:
206*4882a593Smuzhiyun                cachedata.rproviders[rprovide].append(fn)
207*4882a593Smuzhiyun
208*4882a593Smuzhiyun        for package in self.packages_dynamic:
209*4882a593Smuzhiyun            cachedata.packages_dynamic[package].append(fn)
210*4882a593Smuzhiyun
211*4882a593Smuzhiyun        # Build hash of runtime depends and recommends
212*4882a593Smuzhiyun        for package in self.packages:
213*4882a593Smuzhiyun            cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package]
214*4882a593Smuzhiyun            cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package]
215*4882a593Smuzhiyun
216*4882a593Smuzhiyun        # Collect files we may need for possible world-dep
217*4882a593Smuzhiyun        # calculations
218*4882a593Smuzhiyun        if not self.not_world:
219*4882a593Smuzhiyun            cachedata.possible_world.append(fn)
220*4882a593Smuzhiyun        #else:
221*4882a593Smuzhiyun        #    logger.debug2("EXCLUDE FROM WORLD: %s", fn)
222*4882a593Smuzhiyun
223*4882a593Smuzhiyun        # create a collection of all targets for sanity checking
224*4882a593Smuzhiyun        # tasks, such as upstream versions, license, and tools for
225*4882a593Smuzhiyun        # task and image creation.
226*4882a593Smuzhiyun        cachedata.universe_target.append(self.pn)
227*4882a593Smuzhiyun
228*4882a593Smuzhiyun        cachedata.hashfn[fn] = self.hashfilename
229*4882a593Smuzhiyun        for task, taskhash in self.basetaskhashes.items():
230*4882a593Smuzhiyun            identifier = '%s:%s' % (fn, task)
231*4882a593Smuzhiyun            cachedata.basetaskhash[identifier] = taskhash
232*4882a593Smuzhiyun
233*4882a593Smuzhiyun        cachedata.inherits[fn] = self.inherits
234*4882a593Smuzhiyun        cachedata.fakerootenv[fn] = self.fakerootenv
235*4882a593Smuzhiyun        cachedata.fakerootnoenv[fn] = self.fakerootnoenv
236*4882a593Smuzhiyun        cachedata.fakerootdirs[fn] = self.fakerootdirs
237*4882a593Smuzhiyun        cachedata.fakerootlogs[fn] = self.fakerootlogs
238*4882a593Smuzhiyun        cachedata.extradepsfunc[fn] = self.extradepsfunc
239*4882a593Smuzhiyun
240*4882a593Smuzhiyundef virtualfn2realfn(virtualfn):
241*4882a593Smuzhiyun    """
242*4882a593Smuzhiyun    Convert a virtual file name to a real one + the associated subclass keyword
243*4882a593Smuzhiyun    """
244*4882a593Smuzhiyun    mc = ""
245*4882a593Smuzhiyun    if virtualfn.startswith('mc:') and virtualfn.count(':') >= 2:
246*4882a593Smuzhiyun        elems = virtualfn.split(':')
247*4882a593Smuzhiyun        mc = elems[1]
248*4882a593Smuzhiyun        virtualfn = ":".join(elems[2:])
249*4882a593Smuzhiyun
250*4882a593Smuzhiyun    fn = virtualfn
251*4882a593Smuzhiyun    cls = ""
252*4882a593Smuzhiyun    if virtualfn.startswith('virtual:'):
253*4882a593Smuzhiyun        elems = virtualfn.split(':')
254*4882a593Smuzhiyun        cls = ":".join(elems[1:-1])
255*4882a593Smuzhiyun        fn = elems[-1]
256*4882a593Smuzhiyun
257*4882a593Smuzhiyun    return (fn, cls, mc)
258*4882a593Smuzhiyun
259*4882a593Smuzhiyundef realfn2virtual(realfn, cls, mc):
260*4882a593Smuzhiyun    """
261*4882a593Smuzhiyun    Convert a real filename + the associated subclass keyword to a virtual filename
262*4882a593Smuzhiyun    """
263*4882a593Smuzhiyun    if cls:
264*4882a593Smuzhiyun        realfn = "virtual:" + cls + ":" + realfn
265*4882a593Smuzhiyun    if mc:
266*4882a593Smuzhiyun        realfn = "mc:" + mc + ":" + realfn
267*4882a593Smuzhiyun    return realfn
268*4882a593Smuzhiyun
269*4882a593Smuzhiyundef variant2virtual(realfn, variant):
270*4882a593Smuzhiyun    """
271*4882a593Smuzhiyun    Convert a real filename + the associated subclass keyword to a virtual filename
272*4882a593Smuzhiyun    """
273*4882a593Smuzhiyun    if variant == "":
274*4882a593Smuzhiyun        return realfn
275*4882a593Smuzhiyun    if variant.startswith("mc:") and variant.count(':') >= 2:
276*4882a593Smuzhiyun        elems = variant.split(":")
277*4882a593Smuzhiyun        if elems[2]:
278*4882a593Smuzhiyun            return "mc:" + elems[1] + ":virtual:" + ":".join(elems[2:]) + ":" + realfn
279*4882a593Smuzhiyun        return "mc:" + elems[1] + ":" + realfn
280*4882a593Smuzhiyun    return "virtual:" + variant + ":" + realfn
281*4882a593Smuzhiyun
282*4882a593Smuzhiyundef parse_recipe(bb_data, bbfile, appends, mc=''):
283*4882a593Smuzhiyun    """
284*4882a593Smuzhiyun    Parse a recipe
285*4882a593Smuzhiyun    """
286*4882a593Smuzhiyun
287*4882a593Smuzhiyun    bb_data.setVar("__BBMULTICONFIG", mc)
288*4882a593Smuzhiyun
289*4882a593Smuzhiyun    bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
290*4882a593Smuzhiyun    bb.parse.cached_mtime_noerror(bbfile_loc)
291*4882a593Smuzhiyun
292*4882a593Smuzhiyun    if appends:
293*4882a593Smuzhiyun        bb_data.setVar('__BBAPPEND', " ".join(appends))
294*4882a593Smuzhiyun    bb_data = bb.parse.handle(bbfile, bb_data)
295*4882a593Smuzhiyun    return bb_data
296*4882a593Smuzhiyun
297*4882a593Smuzhiyun
298*4882a593Smuzhiyunclass NoCache(object):
299*4882a593Smuzhiyun
300*4882a593Smuzhiyun    def __init__(self, databuilder):
301*4882a593Smuzhiyun        self.databuilder = databuilder
302*4882a593Smuzhiyun        self.data = databuilder.data
303*4882a593Smuzhiyun
304*4882a593Smuzhiyun    def loadDataFull(self, virtualfn, appends):
305*4882a593Smuzhiyun        """
306*4882a593Smuzhiyun        Return a complete set of data for fn.
307*4882a593Smuzhiyun        To do this, we need to parse the file.
308*4882a593Smuzhiyun        """
309*4882a593Smuzhiyun        logger.debug("Parsing %s (full)" % virtualfn)
310*4882a593Smuzhiyun        (fn, virtual, mc) = virtualfn2realfn(virtualfn)
311*4882a593Smuzhiyun        bb_data = self.load_bbfile(virtualfn, appends, virtonly=True)
312*4882a593Smuzhiyun        return bb_data[virtual]
313*4882a593Smuzhiyun
314*4882a593Smuzhiyun    def load_bbfile(self, bbfile, appends, virtonly = False, mc=None):
315*4882a593Smuzhiyun        """
316*4882a593Smuzhiyun        Load and parse one .bb build file
317*4882a593Smuzhiyun        Return the data and whether parsing resulted in the file being skipped
318*4882a593Smuzhiyun        """
319*4882a593Smuzhiyun
320*4882a593Smuzhiyun        if virtonly:
321*4882a593Smuzhiyun            (bbfile, virtual, mc) = virtualfn2realfn(bbfile)
322*4882a593Smuzhiyun            bb_data = self.databuilder.mcdata[mc].createCopy()
323*4882a593Smuzhiyun            bb_data.setVar("__ONLYFINALISE", virtual or "default")
324*4882a593Smuzhiyun            datastores = parse_recipe(bb_data, bbfile, appends, mc)
325*4882a593Smuzhiyun            return datastores
326*4882a593Smuzhiyun
327*4882a593Smuzhiyun        if mc is not None:
328*4882a593Smuzhiyun            bb_data = self.databuilder.mcdata[mc].createCopy()
329*4882a593Smuzhiyun            return parse_recipe(bb_data, bbfile, appends, mc)
330*4882a593Smuzhiyun
331*4882a593Smuzhiyun        bb_data = self.data.createCopy()
332*4882a593Smuzhiyun        datastores = parse_recipe(bb_data, bbfile, appends)
333*4882a593Smuzhiyun
334*4882a593Smuzhiyun        for mc in self.databuilder.mcdata:
335*4882a593Smuzhiyun            if not mc:
336*4882a593Smuzhiyun                continue
337*4882a593Smuzhiyun            bb_data = self.databuilder.mcdata[mc].createCopy()
338*4882a593Smuzhiyun            newstores = parse_recipe(bb_data, bbfile, appends, mc)
339*4882a593Smuzhiyun            for ns in newstores:
340*4882a593Smuzhiyun                datastores["mc:%s:%s" % (mc, ns)] = newstores[ns]
341*4882a593Smuzhiyun
342*4882a593Smuzhiyun        return datastores
343*4882a593Smuzhiyun
344*4882a593Smuzhiyunclass Cache(NoCache):
345*4882a593Smuzhiyun    """
346*4882a593Smuzhiyun    BitBake Cache implementation
347*4882a593Smuzhiyun    """
348*4882a593Smuzhiyun    def __init__(self, databuilder, mc, data_hash, caches_array):
349*4882a593Smuzhiyun        super().__init__(databuilder)
350*4882a593Smuzhiyun        data = databuilder.data
351*4882a593Smuzhiyun
352*4882a593Smuzhiyun        # Pass caches_array information into Cache Constructor
353*4882a593Smuzhiyun        # It will be used later for deciding whether we
354*4882a593Smuzhiyun        # need extra cache file dump/load support
355*4882a593Smuzhiyun        self.mc = mc
356*4882a593Smuzhiyun        self.logger = PrefixLoggerAdapter("Cache: %s: " % (mc if mc else "default"), logger)
357*4882a593Smuzhiyun        self.caches_array = caches_array
358*4882a593Smuzhiyun        self.cachedir = data.getVar("CACHE")
359*4882a593Smuzhiyun        self.clean = set()
360*4882a593Smuzhiyun        self.checked = set()
361*4882a593Smuzhiyun        self.depends_cache = {}
362*4882a593Smuzhiyun        self.data_fn = None
363*4882a593Smuzhiyun        self.cacheclean = True
364*4882a593Smuzhiyun        self.data_hash = data_hash
365*4882a593Smuzhiyun        self.filelist_regex = re.compile(r'(?:(?<=:True)|(?<=:False))\s+')
366*4882a593Smuzhiyun
367*4882a593Smuzhiyun        if self.cachedir in [None, '']:
368*4882a593Smuzhiyun            self.has_cache = False
369*4882a593Smuzhiyun            self.logger.info("Not using a cache. "
370*4882a593Smuzhiyun                             "Set CACHE = <directory> to enable.")
371*4882a593Smuzhiyun            return
372*4882a593Smuzhiyun
373*4882a593Smuzhiyun        self.has_cache = True
374*4882a593Smuzhiyun
375*4882a593Smuzhiyun    def getCacheFile(self, cachefile):
376*4882a593Smuzhiyun        return getCacheFile(self.cachedir, cachefile, self.mc, self.data_hash)
377*4882a593Smuzhiyun
378*4882a593Smuzhiyun    def prepare_cache(self, progress):
379*4882a593Smuzhiyun        if not self.has_cache:
380*4882a593Smuzhiyun            return 0
381*4882a593Smuzhiyun
382*4882a593Smuzhiyun        loaded = 0
383*4882a593Smuzhiyun
384*4882a593Smuzhiyun        self.cachefile = self.getCacheFile("bb_cache.dat")
385*4882a593Smuzhiyun
386*4882a593Smuzhiyun        self.logger.debug("Cache dir: %s", self.cachedir)
387*4882a593Smuzhiyun        bb.utils.mkdirhier(self.cachedir)
388*4882a593Smuzhiyun
389*4882a593Smuzhiyun        cache_ok = True
390*4882a593Smuzhiyun        if self.caches_array:
391*4882a593Smuzhiyun            for cache_class in self.caches_array:
392*4882a593Smuzhiyun                cachefile = self.getCacheFile(cache_class.cachefile)
393*4882a593Smuzhiyun                cache_exists = os.path.exists(cachefile)
394*4882a593Smuzhiyun                self.logger.debug2("Checking if %s exists: %r", cachefile, cache_exists)
395*4882a593Smuzhiyun                cache_ok = cache_ok and cache_exists
396*4882a593Smuzhiyun                cache_class.init_cacheData(self)
397*4882a593Smuzhiyun        if cache_ok:
398*4882a593Smuzhiyun            loaded = self.load_cachefile(progress)
399*4882a593Smuzhiyun        elif os.path.isfile(self.cachefile):
400*4882a593Smuzhiyun            self.logger.info("Out of date cache found, rebuilding...")
401*4882a593Smuzhiyun        else:
402*4882a593Smuzhiyun            self.logger.debug("Cache file %s not found, building..." % self.cachefile)
403*4882a593Smuzhiyun
404*4882a593Smuzhiyun        # We don't use the symlink, its just for debugging convinience
405*4882a593Smuzhiyun        if self.mc:
406*4882a593Smuzhiyun            symlink = os.path.join(self.cachedir, "bb_cache.dat.%s" % self.mc)
407*4882a593Smuzhiyun        else:
408*4882a593Smuzhiyun            symlink = os.path.join(self.cachedir, "bb_cache.dat")
409*4882a593Smuzhiyun
410*4882a593Smuzhiyun        if os.path.exists(symlink):
411*4882a593Smuzhiyun            bb.utils.remove(symlink)
412*4882a593Smuzhiyun        try:
413*4882a593Smuzhiyun            os.symlink(os.path.basename(self.cachefile), symlink)
414*4882a593Smuzhiyun        except OSError:
415*4882a593Smuzhiyun            pass
416*4882a593Smuzhiyun
417*4882a593Smuzhiyun        return loaded
418*4882a593Smuzhiyun
419*4882a593Smuzhiyun    def cachesize(self):
420*4882a593Smuzhiyun        if not self.has_cache:
421*4882a593Smuzhiyun            return 0
422*4882a593Smuzhiyun
423*4882a593Smuzhiyun        cachesize = 0
424*4882a593Smuzhiyun        for cache_class in self.caches_array:
425*4882a593Smuzhiyun            cachefile = self.getCacheFile(cache_class.cachefile)
426*4882a593Smuzhiyun            try:
427*4882a593Smuzhiyun                with open(cachefile, "rb") as cachefile:
428*4882a593Smuzhiyun                    cachesize += os.fstat(cachefile.fileno()).st_size
429*4882a593Smuzhiyun            except FileNotFoundError:
430*4882a593Smuzhiyun                pass
431*4882a593Smuzhiyun
432*4882a593Smuzhiyun        return cachesize
433*4882a593Smuzhiyun
434*4882a593Smuzhiyun    def load_cachefile(self, progress):
435*4882a593Smuzhiyun        previous_progress = 0
436*4882a593Smuzhiyun
437*4882a593Smuzhiyun        for cache_class in self.caches_array:
438*4882a593Smuzhiyun            cachefile = self.getCacheFile(cache_class.cachefile)
439*4882a593Smuzhiyun            self.logger.debug('Loading cache file: %s' % cachefile)
440*4882a593Smuzhiyun            with open(cachefile, "rb") as cachefile:
441*4882a593Smuzhiyun                pickled = pickle.Unpickler(cachefile)
442*4882a593Smuzhiyun                # Check cache version information
443*4882a593Smuzhiyun                try:
444*4882a593Smuzhiyun                    cache_ver = pickled.load()
445*4882a593Smuzhiyun                    bitbake_ver = pickled.load()
446*4882a593Smuzhiyun                except Exception:
447*4882a593Smuzhiyun                    self.logger.info('Invalid cache, rebuilding...')
448*4882a593Smuzhiyun                    return 0
449*4882a593Smuzhiyun
450*4882a593Smuzhiyun                if cache_ver != __cache_version__:
451*4882a593Smuzhiyun                    self.logger.info('Cache version mismatch, rebuilding...')
452*4882a593Smuzhiyun                    return 0
453*4882a593Smuzhiyun                elif bitbake_ver != bb.__version__:
454*4882a593Smuzhiyun                    self.logger.info('Bitbake version mismatch, rebuilding...')
455*4882a593Smuzhiyun                    return 0
456*4882a593Smuzhiyun
457*4882a593Smuzhiyun                # Load the rest of the cache file
458*4882a593Smuzhiyun                current_progress = 0
459*4882a593Smuzhiyun                while cachefile:
460*4882a593Smuzhiyun                    try:
461*4882a593Smuzhiyun                        key = pickled.load()
462*4882a593Smuzhiyun                        value = pickled.load()
463*4882a593Smuzhiyun                    except Exception:
464*4882a593Smuzhiyun                        break
465*4882a593Smuzhiyun                    if not isinstance(key, str):
466*4882a593Smuzhiyun                        bb.warn("%s from extras cache is not a string?" % key)
467*4882a593Smuzhiyun                        break
468*4882a593Smuzhiyun                    if not isinstance(value, RecipeInfoCommon):
469*4882a593Smuzhiyun                        bb.warn("%s from extras cache is not a RecipeInfoCommon class?" % value)
470*4882a593Smuzhiyun                        break
471*4882a593Smuzhiyun
472*4882a593Smuzhiyun                    if key in self.depends_cache:
473*4882a593Smuzhiyun                        self.depends_cache[key].append(value)
474*4882a593Smuzhiyun                    else:
475*4882a593Smuzhiyun                        self.depends_cache[key] = [value]
476*4882a593Smuzhiyun                    # only fire events on even percentage boundaries
477*4882a593Smuzhiyun                    current_progress = cachefile.tell() + previous_progress
478*4882a593Smuzhiyun                    progress(cachefile.tell() + previous_progress)
479*4882a593Smuzhiyun
480*4882a593Smuzhiyun                previous_progress += current_progress
481*4882a593Smuzhiyun
482*4882a593Smuzhiyun        return len(self.depends_cache)
483*4882a593Smuzhiyun
484*4882a593Smuzhiyun    def parse(self, filename, appends):
485*4882a593Smuzhiyun        """Parse the specified filename, returning the recipe information"""
486*4882a593Smuzhiyun        self.logger.debug("Parsing %s", filename)
487*4882a593Smuzhiyun        infos = []
488*4882a593Smuzhiyun        datastores = self.load_bbfile(filename, appends, mc=self.mc)
489*4882a593Smuzhiyun        depends = []
490*4882a593Smuzhiyun        variants = []
491*4882a593Smuzhiyun        # Process the "real" fn last so we can store variants list
492*4882a593Smuzhiyun        for variant, data in sorted(datastores.items(),
493*4882a593Smuzhiyun                                    key=lambda i: i[0],
494*4882a593Smuzhiyun                                    reverse=True):
495*4882a593Smuzhiyun            virtualfn = variant2virtual(filename, variant)
496*4882a593Smuzhiyun            variants.append(variant)
497*4882a593Smuzhiyun            depends = depends + (data.getVar("__depends", False) or [])
498*4882a593Smuzhiyun            if depends and not variant:
499*4882a593Smuzhiyun                data.setVar("__depends", depends)
500*4882a593Smuzhiyun            if virtualfn == filename:
501*4882a593Smuzhiyun                data.setVar("__VARIANTS", " ".join(variants))
502*4882a593Smuzhiyun            info_array = []
503*4882a593Smuzhiyun            for cache_class in self.caches_array:
504*4882a593Smuzhiyun                info = cache_class(filename, data)
505*4882a593Smuzhiyun                info_array.append(info)
506*4882a593Smuzhiyun            infos.append((virtualfn, info_array))
507*4882a593Smuzhiyun
508*4882a593Smuzhiyun        return infos
509*4882a593Smuzhiyun
510*4882a593Smuzhiyun    def load(self, filename, appends):
511*4882a593Smuzhiyun        """Obtain the recipe information for the specified filename,
512*4882a593Smuzhiyun        using cached values if available, otherwise parsing.
513*4882a593Smuzhiyun
514*4882a593Smuzhiyun        Note that if it does parse to obtain the info, it will not
515*4882a593Smuzhiyun        automatically add the information to the cache or to your
516*4882a593Smuzhiyun        CacheData.  Use the add or add_info method to do so after
517*4882a593Smuzhiyun        running this, or use loadData instead."""
518*4882a593Smuzhiyun        cached = self.cacheValid(filename, appends)
519*4882a593Smuzhiyun        if cached:
520*4882a593Smuzhiyun            infos = []
521*4882a593Smuzhiyun            # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
522*4882a593Smuzhiyun            info_array = self.depends_cache[filename]
523*4882a593Smuzhiyun            for variant in info_array[0].variants:
524*4882a593Smuzhiyun                virtualfn = variant2virtual(filename, variant)
525*4882a593Smuzhiyun                infos.append((virtualfn, self.depends_cache[virtualfn]))
526*4882a593Smuzhiyun        else:
527*4882a593Smuzhiyun            return self.parse(filename, appends, configdata, self.caches_array)
528*4882a593Smuzhiyun
529*4882a593Smuzhiyun        return cached, infos
530*4882a593Smuzhiyun
531*4882a593Smuzhiyun    def loadData(self, fn, appends, cacheData):
532*4882a593Smuzhiyun        """Load the recipe info for the specified filename,
533*4882a593Smuzhiyun        parsing and adding to the cache if necessary, and adding
534*4882a593Smuzhiyun        the recipe information to the supplied CacheData instance."""
535*4882a593Smuzhiyun        skipped, virtuals = 0, 0
536*4882a593Smuzhiyun
537*4882a593Smuzhiyun        cached, infos = self.load(fn, appends)
538*4882a593Smuzhiyun        for virtualfn, info_array in infos:
539*4882a593Smuzhiyun            if info_array[0].skipped:
540*4882a593Smuzhiyun                self.logger.debug("Skipping %s: %s", virtualfn, info_array[0].skipreason)
541*4882a593Smuzhiyun                skipped += 1
542*4882a593Smuzhiyun            else:
543*4882a593Smuzhiyun                self.add_info(virtualfn, info_array, cacheData, not cached)
544*4882a593Smuzhiyun                virtuals += 1
545*4882a593Smuzhiyun
546*4882a593Smuzhiyun        return cached, skipped, virtuals
547*4882a593Smuzhiyun
548*4882a593Smuzhiyun    def cacheValid(self, fn, appends):
549*4882a593Smuzhiyun        """
550*4882a593Smuzhiyun        Is the cache valid for fn?
551*4882a593Smuzhiyun        Fast version, no timestamps checked.
552*4882a593Smuzhiyun        """
553*4882a593Smuzhiyun        if fn not in self.checked:
554*4882a593Smuzhiyun            self.cacheValidUpdate(fn, appends)
555*4882a593Smuzhiyun
556*4882a593Smuzhiyun        # Is cache enabled?
557*4882a593Smuzhiyun        if not self.has_cache:
558*4882a593Smuzhiyun            return False
559*4882a593Smuzhiyun        if fn in self.clean:
560*4882a593Smuzhiyun            return True
561*4882a593Smuzhiyun        return False
562*4882a593Smuzhiyun
563*4882a593Smuzhiyun    def cacheValidUpdate(self, fn, appends):
564*4882a593Smuzhiyun        """
565*4882a593Smuzhiyun        Is the cache valid for fn?
566*4882a593Smuzhiyun        Make thorough (slower) checks including timestamps.
567*4882a593Smuzhiyun        """
568*4882a593Smuzhiyun        # Is cache enabled?
569*4882a593Smuzhiyun        if not self.has_cache:
570*4882a593Smuzhiyun            return False
571*4882a593Smuzhiyun
572*4882a593Smuzhiyun        self.checked.add(fn)
573*4882a593Smuzhiyun
574*4882a593Smuzhiyun        # File isn't in depends_cache
575*4882a593Smuzhiyun        if not fn in self.depends_cache:
576*4882a593Smuzhiyun            self.logger.debug2("%s is not cached", fn)
577*4882a593Smuzhiyun            return False
578*4882a593Smuzhiyun
579*4882a593Smuzhiyun        mtime = bb.parse.cached_mtime_noerror(fn)
580*4882a593Smuzhiyun
581*4882a593Smuzhiyun        # Check file still exists
582*4882a593Smuzhiyun        if mtime == 0:
583*4882a593Smuzhiyun            self.logger.debug2("%s no longer exists", fn)
584*4882a593Smuzhiyun            self.remove(fn)
585*4882a593Smuzhiyun            return False
586*4882a593Smuzhiyun
587*4882a593Smuzhiyun        info_array = self.depends_cache[fn]
588*4882a593Smuzhiyun        # Check the file's timestamp
589*4882a593Smuzhiyun        if mtime != info_array[0].timestamp:
590*4882a593Smuzhiyun            self.logger.debug2("%s changed", fn)
591*4882a593Smuzhiyun            self.remove(fn)
592*4882a593Smuzhiyun            return False
593*4882a593Smuzhiyun
594*4882a593Smuzhiyun        # Check dependencies are still valid
595*4882a593Smuzhiyun        depends = info_array[0].file_depends
596*4882a593Smuzhiyun        if depends:
597*4882a593Smuzhiyun            for f, old_mtime in depends:
598*4882a593Smuzhiyun                fmtime = bb.parse.cached_mtime_noerror(f)
599*4882a593Smuzhiyun                # Check if file still exists
600*4882a593Smuzhiyun                if old_mtime != 0 and fmtime == 0:
601*4882a593Smuzhiyun                    self.logger.debug2("%s's dependency %s was removed",
602*4882a593Smuzhiyun                                         fn, f)
603*4882a593Smuzhiyun                    self.remove(fn)
604*4882a593Smuzhiyun                    return False
605*4882a593Smuzhiyun
606*4882a593Smuzhiyun                if (fmtime != old_mtime):
607*4882a593Smuzhiyun                    self.logger.debug2("%s's dependency %s changed",
608*4882a593Smuzhiyun                                         fn, f)
609*4882a593Smuzhiyun                    self.remove(fn)
610*4882a593Smuzhiyun                    return False
611*4882a593Smuzhiyun
612*4882a593Smuzhiyun        if hasattr(info_array[0], 'file_checksums'):
613*4882a593Smuzhiyun            for _, fl in info_array[0].file_checksums.items():
614*4882a593Smuzhiyun                fl = fl.strip()
615*4882a593Smuzhiyun                if not fl:
616*4882a593Smuzhiyun                    continue
617*4882a593Smuzhiyun                # Have to be careful about spaces and colons in filenames
618*4882a593Smuzhiyun                flist = self.filelist_regex.split(fl)
619*4882a593Smuzhiyun                for f in flist:
620*4882a593Smuzhiyun                    if not f:
621*4882a593Smuzhiyun                        continue
622*4882a593Smuzhiyun                    f, exist = f.rsplit(":", 1)
623*4882a593Smuzhiyun                    if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
624*4882a593Smuzhiyun                        self.logger.debug2("%s's file checksum list file %s changed",
625*4882a593Smuzhiyun                                             fn, f)
626*4882a593Smuzhiyun                        self.remove(fn)
627*4882a593Smuzhiyun                        return False
628*4882a593Smuzhiyun
629*4882a593Smuzhiyun        if tuple(appends) != tuple(info_array[0].appends):
630*4882a593Smuzhiyun            self.logger.debug2("appends for %s changed", fn)
631*4882a593Smuzhiyun            self.logger.debug2("%s to %s" % (str(appends), str(info_array[0].appends)))
632*4882a593Smuzhiyun            self.remove(fn)
633*4882a593Smuzhiyun            return False
634*4882a593Smuzhiyun
635*4882a593Smuzhiyun        invalid = False
636*4882a593Smuzhiyun        for cls in info_array[0].variants:
637*4882a593Smuzhiyun            virtualfn = variant2virtual(fn, cls)
638*4882a593Smuzhiyun            self.clean.add(virtualfn)
639*4882a593Smuzhiyun            if virtualfn not in self.depends_cache:
640*4882a593Smuzhiyun                self.logger.debug2("%s is not cached", virtualfn)
641*4882a593Smuzhiyun                invalid = True
642*4882a593Smuzhiyun            elif len(self.depends_cache[virtualfn]) != len(self.caches_array):
643*4882a593Smuzhiyun                self.logger.debug2("Extra caches missing for %s?" % virtualfn)
644*4882a593Smuzhiyun                invalid = True
645*4882a593Smuzhiyun
646*4882a593Smuzhiyun        # If any one of the variants is not present, mark as invalid for all
647*4882a593Smuzhiyun        if invalid:
648*4882a593Smuzhiyun            for cls in info_array[0].variants:
649*4882a593Smuzhiyun                virtualfn = variant2virtual(fn, cls)
650*4882a593Smuzhiyun                if virtualfn in self.clean:
651*4882a593Smuzhiyun                    self.logger.debug2("Removing %s from cache", virtualfn)
652*4882a593Smuzhiyun                    self.clean.remove(virtualfn)
653*4882a593Smuzhiyun            if fn in self.clean:
654*4882a593Smuzhiyun                self.logger.debug2("Marking %s as not clean", fn)
655*4882a593Smuzhiyun                self.clean.remove(fn)
656*4882a593Smuzhiyun            return False
657*4882a593Smuzhiyun
658*4882a593Smuzhiyun        self.clean.add(fn)
659*4882a593Smuzhiyun        return True
660*4882a593Smuzhiyun
661*4882a593Smuzhiyun    def remove(self, fn):
662*4882a593Smuzhiyun        """
663*4882a593Smuzhiyun        Remove a fn from the cache
664*4882a593Smuzhiyun        Called from the parser in error cases
665*4882a593Smuzhiyun        """
666*4882a593Smuzhiyun        if fn in self.depends_cache:
667*4882a593Smuzhiyun            self.logger.debug("Removing %s from cache", fn)
668*4882a593Smuzhiyun            del self.depends_cache[fn]
669*4882a593Smuzhiyun        if fn in self.clean:
670*4882a593Smuzhiyun            self.logger.debug("Marking %s as unclean", fn)
671*4882a593Smuzhiyun            self.clean.remove(fn)
672*4882a593Smuzhiyun
673*4882a593Smuzhiyun    def sync(self):
674*4882a593Smuzhiyun        """
675*4882a593Smuzhiyun        Save the cache
676*4882a593Smuzhiyun        Called from the parser when complete (or exiting)
677*4882a593Smuzhiyun        """
678*4882a593Smuzhiyun
679*4882a593Smuzhiyun        if not self.has_cache:
680*4882a593Smuzhiyun            return
681*4882a593Smuzhiyun
682*4882a593Smuzhiyun        if self.cacheclean:
683*4882a593Smuzhiyun            self.logger.debug2("Cache is clean, not saving.")
684*4882a593Smuzhiyun            return
685*4882a593Smuzhiyun
686*4882a593Smuzhiyun        for cache_class in self.caches_array:
687*4882a593Smuzhiyun            cache_class_name = cache_class.__name__
688*4882a593Smuzhiyun            cachefile = self.getCacheFile(cache_class.cachefile)
689*4882a593Smuzhiyun            self.logger.debug2("Writing %s", cachefile)
690*4882a593Smuzhiyun            with open(cachefile, "wb") as f:
691*4882a593Smuzhiyun                p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL)
692*4882a593Smuzhiyun                p.dump(__cache_version__)
693*4882a593Smuzhiyun                p.dump(bb.__version__)
694*4882a593Smuzhiyun
695*4882a593Smuzhiyun                for key, info_array in self.depends_cache.items():
696*4882a593Smuzhiyun                    for info in info_array:
697*4882a593Smuzhiyun                        if isinstance(info, RecipeInfoCommon) and info.__class__.__name__ == cache_class_name:
698*4882a593Smuzhiyun                            p.dump(key)
699*4882a593Smuzhiyun                            p.dump(info)
700*4882a593Smuzhiyun
701*4882a593Smuzhiyun        del self.depends_cache
702*4882a593Smuzhiyun
703*4882a593Smuzhiyun    @staticmethod
704*4882a593Smuzhiyun    def mtime(cachefile):
705*4882a593Smuzhiyun        return bb.parse.cached_mtime_noerror(cachefile)
706*4882a593Smuzhiyun
707*4882a593Smuzhiyun    def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None):
708*4882a593Smuzhiyun        if self.mc is not None:
709*4882a593Smuzhiyun            (fn, cls, mc) = virtualfn2realfn(filename)
710*4882a593Smuzhiyun            if mc:
711*4882a593Smuzhiyun                self.logger.error("Unexpected multiconfig %s", filename)
712*4882a593Smuzhiyun                return
713*4882a593Smuzhiyun
714*4882a593Smuzhiyun            vfn = realfn2virtual(fn, cls, self.mc)
715*4882a593Smuzhiyun        else:
716*4882a593Smuzhiyun            vfn = filename
717*4882a593Smuzhiyun
718*4882a593Smuzhiyun        if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
719*4882a593Smuzhiyun            cacheData.add_from_recipeinfo(vfn, info_array)
720*4882a593Smuzhiyun
721*4882a593Smuzhiyun            if watcher:
722*4882a593Smuzhiyun                watcher(info_array[0].file_depends)
723*4882a593Smuzhiyun
724*4882a593Smuzhiyun        if not self.has_cache:
725*4882a593Smuzhiyun            return
726*4882a593Smuzhiyun
727*4882a593Smuzhiyun        if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache:
728*4882a593Smuzhiyun            if parsed:
729*4882a593Smuzhiyun                self.cacheclean = False
730*4882a593Smuzhiyun            self.depends_cache[filename] = info_array
731*4882a593Smuzhiyun
732*4882a593Smuzhiyun    def add(self, file_name, data, cacheData, parsed=None):
733*4882a593Smuzhiyun        """
734*4882a593Smuzhiyun        Save data we need into the cache
735*4882a593Smuzhiyun        """
736*4882a593Smuzhiyun
737*4882a593Smuzhiyun        realfn = virtualfn2realfn(file_name)[0]
738*4882a593Smuzhiyun
739*4882a593Smuzhiyun        info_array = []
740*4882a593Smuzhiyun        for cache_class in self.caches_array:
741*4882a593Smuzhiyun            info_array.append(cache_class(realfn, data))
742*4882a593Smuzhiyun        self.add_info(file_name, info_array, cacheData, parsed)
743*4882a593Smuzhiyun
744*4882a593Smuzhiyunclass MulticonfigCache(Mapping):
745*4882a593Smuzhiyun    def __init__(self, databuilder, data_hash, caches_array):
746*4882a593Smuzhiyun        def progress(p):
747*4882a593Smuzhiyun            nonlocal current_progress
748*4882a593Smuzhiyun            nonlocal previous_progress
749*4882a593Smuzhiyun            nonlocal previous_percent
750*4882a593Smuzhiyun            nonlocal cachesize
751*4882a593Smuzhiyun
752*4882a593Smuzhiyun            current_progress = previous_progress + p
753*4882a593Smuzhiyun
754*4882a593Smuzhiyun            if current_progress > cachesize:
755*4882a593Smuzhiyun                # we might have calculated incorrect total size because a file
756*4882a593Smuzhiyun                # might've been written out just after we checked its size
757*4882a593Smuzhiyun                cachesize = current_progress
758*4882a593Smuzhiyun            current_percent = 100 * current_progress / cachesize
759*4882a593Smuzhiyun            if current_percent > previous_percent:
760*4882a593Smuzhiyun                previous_percent = current_percent
761*4882a593Smuzhiyun                bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
762*4882a593Smuzhiyun                                databuilder.data)
763*4882a593Smuzhiyun
764*4882a593Smuzhiyun
765*4882a593Smuzhiyun        cachesize = 0
766*4882a593Smuzhiyun        current_progress = 0
767*4882a593Smuzhiyun        previous_progress = 0
768*4882a593Smuzhiyun        previous_percent = 0
769*4882a593Smuzhiyun        self.__caches = {}
770*4882a593Smuzhiyun
771*4882a593Smuzhiyun        for mc, mcdata in databuilder.mcdata.items():
772*4882a593Smuzhiyun            self.__caches[mc] = Cache(databuilder, mc, data_hash, caches_array)
773*4882a593Smuzhiyun
774*4882a593Smuzhiyun            cachesize += self.__caches[mc].cachesize()
775*4882a593Smuzhiyun
776*4882a593Smuzhiyun        bb.event.fire(bb.event.CacheLoadStarted(cachesize), databuilder.data)
777*4882a593Smuzhiyun        loaded = 0
778*4882a593Smuzhiyun
779*4882a593Smuzhiyun        for c in self.__caches.values():
780*4882a593Smuzhiyun            loaded += c.prepare_cache(progress)
781*4882a593Smuzhiyun            previous_progress = current_progress
782*4882a593Smuzhiyun
783*4882a593Smuzhiyun        # Note: depends cache number is corresponding to the parsing file numbers.
784*4882a593Smuzhiyun        # The same file has several caches, still regarded as one item in the cache
785*4882a593Smuzhiyun        bb.event.fire(bb.event.CacheLoadCompleted(cachesize, loaded), databuilder.data)
786*4882a593Smuzhiyun
787*4882a593Smuzhiyun    def __len__(self):
788*4882a593Smuzhiyun        return len(self.__caches)
789*4882a593Smuzhiyun
790*4882a593Smuzhiyun    def __getitem__(self, key):
791*4882a593Smuzhiyun        return self.__caches[key]
792*4882a593Smuzhiyun
793*4882a593Smuzhiyun    def __contains__(self, key):
794*4882a593Smuzhiyun        return key in self.__caches
795*4882a593Smuzhiyun
796*4882a593Smuzhiyun    def __iter__(self):
797*4882a593Smuzhiyun        for k in self.__caches:
798*4882a593Smuzhiyun            yield k
799*4882a593Smuzhiyun
800*4882a593Smuzhiyundef init(cooker):
801*4882a593Smuzhiyun    """
802*4882a593Smuzhiyun    The Objective: Cache the minimum amount of data possible yet get to the
803*4882a593Smuzhiyun    stage of building packages (i.e. tryBuild) without reparsing any .bb files.
804*4882a593Smuzhiyun
805*4882a593Smuzhiyun    To do this, we intercept getVar calls and only cache the variables we see
806*4882a593Smuzhiyun    being accessed. We rely on the cache getVar calls being made for all
807*4882a593Smuzhiyun    variables bitbake might need to use to reach this stage. For each cached
808*4882a593Smuzhiyun    file we need to track:
809*4882a593Smuzhiyun
810*4882a593Smuzhiyun    * Its mtime
811*4882a593Smuzhiyun    * The mtimes of all its dependencies
812*4882a593Smuzhiyun    * Whether it caused a parse.SkipRecipe exception
813*4882a593Smuzhiyun
814*4882a593Smuzhiyun    Files causing parsing errors are evicted from the cache.
815*4882a593Smuzhiyun
816*4882a593Smuzhiyun    """
817*4882a593Smuzhiyun    return Cache(cooker.configuration.data, cooker.configuration.data_hash)
818*4882a593Smuzhiyun
819*4882a593Smuzhiyun
820*4882a593Smuzhiyunclass CacheData(object):
821*4882a593Smuzhiyun    """
822*4882a593Smuzhiyun    The data structures we compile from the cached data
823*4882a593Smuzhiyun    """
824*4882a593Smuzhiyun
825*4882a593Smuzhiyun    def __init__(self, caches_array):
826*4882a593Smuzhiyun        self.caches_array = caches_array
827*4882a593Smuzhiyun        for cache_class in self.caches_array:
828*4882a593Smuzhiyun            if not issubclass(cache_class, RecipeInfoCommon):
829*4882a593Smuzhiyun                bb.error("Extra cache data class %s should subclass RecipeInfoCommon class" % cache_class)
830*4882a593Smuzhiyun            cache_class.init_cacheData(self)
831*4882a593Smuzhiyun
832*4882a593Smuzhiyun        # Direct cache variables
833*4882a593Smuzhiyun        self.task_queues = {}
834*4882a593Smuzhiyun        self.preferred = {}
835*4882a593Smuzhiyun        self.tasks = {}
836*4882a593Smuzhiyun        # Indirect Cache variables (set elsewhere)
837*4882a593Smuzhiyun        self.ignored_dependencies = []
838*4882a593Smuzhiyun        self.world_target = set()
839*4882a593Smuzhiyun        self.bbfile_priority = {}
840*4882a593Smuzhiyun
841*4882a593Smuzhiyun    def add_from_recipeinfo(self, fn, info_array):
842*4882a593Smuzhiyun        for info in info_array:
843*4882a593Smuzhiyun            info.add_cacheData(self, fn)
844*4882a593Smuzhiyun
845*4882a593Smuzhiyunclass MultiProcessCache(object):
846*4882a593Smuzhiyun    """
847*4882a593Smuzhiyun    BitBake multi-process cache implementation
848*4882a593Smuzhiyun
849*4882a593Smuzhiyun    Used by the codeparser & file checksum caches
850*4882a593Smuzhiyun    """
851*4882a593Smuzhiyun
852*4882a593Smuzhiyun    def __init__(self):
853*4882a593Smuzhiyun        self.cachefile = None
854*4882a593Smuzhiyun        self.cachedata = self.create_cachedata()
855*4882a593Smuzhiyun        self.cachedata_extras = self.create_cachedata()
856*4882a593Smuzhiyun
857*4882a593Smuzhiyun    def init_cache(self, d, cache_file_name=None):
858*4882a593Smuzhiyun        cachedir = (d.getVar("PERSISTENT_DIR") or
859*4882a593Smuzhiyun                    d.getVar("CACHE"))
860*4882a593Smuzhiyun        if cachedir in [None, '']:
861*4882a593Smuzhiyun            return
862*4882a593Smuzhiyun        bb.utils.mkdirhier(cachedir)
863*4882a593Smuzhiyun        self.cachefile = os.path.join(cachedir,
864*4882a593Smuzhiyun                                      cache_file_name or self.__class__.cache_file_name)
865*4882a593Smuzhiyun        logger.debug("Using cache in '%s'", self.cachefile)
866*4882a593Smuzhiyun
867*4882a593Smuzhiyun        glf = bb.utils.lockfile(self.cachefile + ".lock")
868*4882a593Smuzhiyun
869*4882a593Smuzhiyun        try:
870*4882a593Smuzhiyun            with open(self.cachefile, "rb") as f:
871*4882a593Smuzhiyun                p = pickle.Unpickler(f)
872*4882a593Smuzhiyun                data, version = p.load()
873*4882a593Smuzhiyun        except:
874*4882a593Smuzhiyun            bb.utils.unlockfile(glf)
875*4882a593Smuzhiyun            return
876*4882a593Smuzhiyun
877*4882a593Smuzhiyun        bb.utils.unlockfile(glf)
878*4882a593Smuzhiyun
879*4882a593Smuzhiyun        if version != self.__class__.CACHE_VERSION:
880*4882a593Smuzhiyun            return
881*4882a593Smuzhiyun
882*4882a593Smuzhiyun        self.cachedata = data
883*4882a593Smuzhiyun
884*4882a593Smuzhiyun    def create_cachedata(self):
885*4882a593Smuzhiyun        data = [{}]
886*4882a593Smuzhiyun        return data
887*4882a593Smuzhiyun
888*4882a593Smuzhiyun    def save_extras(self):
889*4882a593Smuzhiyun        if not self.cachefile:
890*4882a593Smuzhiyun            return
891*4882a593Smuzhiyun
892*4882a593Smuzhiyun        glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
893*4882a593Smuzhiyun
894*4882a593Smuzhiyun        i = os.getpid()
895*4882a593Smuzhiyun        lf = None
896*4882a593Smuzhiyun        while not lf:
897*4882a593Smuzhiyun            lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False)
898*4882a593Smuzhiyun            if not lf or os.path.exists(self.cachefile + "-" + str(i)):
899*4882a593Smuzhiyun                if lf:
900*4882a593Smuzhiyun                    bb.utils.unlockfile(lf)
901*4882a593Smuzhiyun                    lf = None
902*4882a593Smuzhiyun                i = i + 1
903*4882a593Smuzhiyun                continue
904*4882a593Smuzhiyun
905*4882a593Smuzhiyun            with open(self.cachefile + "-" + str(i), "wb") as f:
906*4882a593Smuzhiyun                p = pickle.Pickler(f, -1)
907*4882a593Smuzhiyun                p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION])
908*4882a593Smuzhiyun
909*4882a593Smuzhiyun        bb.utils.unlockfile(lf)
910*4882a593Smuzhiyun        bb.utils.unlockfile(glf)
911*4882a593Smuzhiyun
912*4882a593Smuzhiyun    def merge_data(self, source, dest):
913*4882a593Smuzhiyun        for j in range(0,len(dest)):
914*4882a593Smuzhiyun            for h in source[j]:
915*4882a593Smuzhiyun                if h not in dest[j]:
916*4882a593Smuzhiyun                    dest[j][h] = source[j][h]
917*4882a593Smuzhiyun
918*4882a593Smuzhiyun    def save_merge(self):
919*4882a593Smuzhiyun        if not self.cachefile:
920*4882a593Smuzhiyun            return
921*4882a593Smuzhiyun
922*4882a593Smuzhiyun        glf = bb.utils.lockfile(self.cachefile + ".lock")
923*4882a593Smuzhiyun
924*4882a593Smuzhiyun        data = self.cachedata
925*4882a593Smuzhiyun
926*4882a593Smuzhiyun        for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
927*4882a593Smuzhiyun            f = os.path.join(os.path.dirname(self.cachefile), f)
928*4882a593Smuzhiyun            try:
929*4882a593Smuzhiyun                with open(f, "rb") as fd:
930*4882a593Smuzhiyun                    p = pickle.Unpickler(fd)
931*4882a593Smuzhiyun                    extradata, version = p.load()
932*4882a593Smuzhiyun            except (IOError, EOFError):
933*4882a593Smuzhiyun                os.unlink(f)
934*4882a593Smuzhiyun                continue
935*4882a593Smuzhiyun
936*4882a593Smuzhiyun            if version != self.__class__.CACHE_VERSION:
937*4882a593Smuzhiyun                os.unlink(f)
938*4882a593Smuzhiyun                continue
939*4882a593Smuzhiyun
940*4882a593Smuzhiyun            self.merge_data(extradata, data)
941*4882a593Smuzhiyun            os.unlink(f)
942*4882a593Smuzhiyun
943*4882a593Smuzhiyun        with open(self.cachefile, "wb") as f:
944*4882a593Smuzhiyun            p = pickle.Pickler(f, -1)
945*4882a593Smuzhiyun            p.dump([data, self.__class__.CACHE_VERSION])
946*4882a593Smuzhiyun
947*4882a593Smuzhiyun        bb.utils.unlockfile(glf)
948*4882a593Smuzhiyun
949*4882a593Smuzhiyun
950*4882a593Smuzhiyunclass SimpleCache(object):
951*4882a593Smuzhiyun    """
952*4882a593Smuzhiyun    BitBake multi-process cache implementation
953*4882a593Smuzhiyun
954*4882a593Smuzhiyun    Used by the codeparser & file checksum caches
955*4882a593Smuzhiyun    """
956*4882a593Smuzhiyun
957*4882a593Smuzhiyun    def __init__(self, version):
958*4882a593Smuzhiyun        self.cachefile = None
959*4882a593Smuzhiyun        self.cachedata = None
960*4882a593Smuzhiyun        self.cacheversion = version
961*4882a593Smuzhiyun
962*4882a593Smuzhiyun    def init_cache(self, d, cache_file_name=None, defaultdata=None):
963*4882a593Smuzhiyun        cachedir = (d.getVar("PERSISTENT_DIR") or
964*4882a593Smuzhiyun                    d.getVar("CACHE"))
965*4882a593Smuzhiyun        if not cachedir:
966*4882a593Smuzhiyun            return defaultdata
967*4882a593Smuzhiyun
968*4882a593Smuzhiyun        bb.utils.mkdirhier(cachedir)
969*4882a593Smuzhiyun        self.cachefile = os.path.join(cachedir,
970*4882a593Smuzhiyun                                      cache_file_name or self.__class__.cache_file_name)
971*4882a593Smuzhiyun        logger.debug("Using cache in '%s'", self.cachefile)
972*4882a593Smuzhiyun
973*4882a593Smuzhiyun        glf = bb.utils.lockfile(self.cachefile + ".lock")
974*4882a593Smuzhiyun
975*4882a593Smuzhiyun        try:
976*4882a593Smuzhiyun            with open(self.cachefile, "rb") as f:
977*4882a593Smuzhiyun                p = pickle.Unpickler(f)
978*4882a593Smuzhiyun                data, version = p.load()
979*4882a593Smuzhiyun        except:
980*4882a593Smuzhiyun            bb.utils.unlockfile(glf)
981*4882a593Smuzhiyun            return defaultdata
982*4882a593Smuzhiyun
983*4882a593Smuzhiyun        bb.utils.unlockfile(glf)
984*4882a593Smuzhiyun
985*4882a593Smuzhiyun        if version != self.cacheversion:
986*4882a593Smuzhiyun            return defaultdata
987*4882a593Smuzhiyun
988*4882a593Smuzhiyun        return data
989*4882a593Smuzhiyun
990*4882a593Smuzhiyun    def save(self, data):
991*4882a593Smuzhiyun        if not self.cachefile:
992*4882a593Smuzhiyun            return
993*4882a593Smuzhiyun
994*4882a593Smuzhiyun        glf = bb.utils.lockfile(self.cachefile + ".lock")
995*4882a593Smuzhiyun
996*4882a593Smuzhiyun        with open(self.cachefile, "wb") as f:
997*4882a593Smuzhiyun            p = pickle.Pickler(f, -1)
998*4882a593Smuzhiyun            p.dump([data, self.cacheversion])
999*4882a593Smuzhiyun
1000*4882a593Smuzhiyun        bb.utils.unlockfile(glf)
1001