xref: /OK3568_Linux_fs/yocto/bitbake/lib/bb/fetch2/__init__.py (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun"""
2*4882a593SmuzhiyunBitBake 'Fetch' implementations
3*4882a593Smuzhiyun
4*4882a593SmuzhiyunClasses for obtaining upstream sources for the
5*4882a593SmuzhiyunBitBake build tools.
6*4882a593Smuzhiyun"""
7*4882a593Smuzhiyun
8*4882a593Smuzhiyun# Copyright (C) 2003, 2004  Chris Larson
9*4882a593Smuzhiyun# Copyright (C) 2012  Intel Corporation
10*4882a593Smuzhiyun#
11*4882a593Smuzhiyun# SPDX-License-Identifier: GPL-2.0-only
12*4882a593Smuzhiyun#
13*4882a593Smuzhiyun# Based on functions from the base bb module, Copyright 2003 Holger Schurig
14*4882a593Smuzhiyun
15*4882a593Smuzhiyunimport os, re
16*4882a593Smuzhiyunimport signal
17*4882a593Smuzhiyunimport logging
18*4882a593Smuzhiyunimport urllib.request, urllib.parse, urllib.error
19*4882a593Smuzhiyunif 'git' not in urllib.parse.uses_netloc:
20*4882a593Smuzhiyun    urllib.parse.uses_netloc.append('git')
21*4882a593Smuzhiyunimport operator
22*4882a593Smuzhiyunimport collections
23*4882a593Smuzhiyunimport subprocess
24*4882a593Smuzhiyunimport pickle
25*4882a593Smuzhiyunimport errno
26*4882a593Smuzhiyunimport bb.persist_data, bb.utils
27*4882a593Smuzhiyunimport bb.checksum
28*4882a593Smuzhiyunimport bb.process
29*4882a593Smuzhiyunimport bb.event
30*4882a593Smuzhiyun
31*4882a593Smuzhiyun__version__ = "2"
32*4882a593Smuzhiyun_checksum_cache = bb.checksum.FileChecksumCache()
33*4882a593Smuzhiyun
34*4882a593Smuzhiyunlogger = logging.getLogger("BitBake.Fetcher")
35*4882a593Smuzhiyun
36*4882a593SmuzhiyunCHECKSUM_LIST = [ "md5", "sha256", "sha1", "sha384", "sha512" ]
37*4882a593SmuzhiyunSHOWN_CHECKSUM_LIST = ["sha256"]
38*4882a593Smuzhiyun
39*4882a593Smuzhiyunclass BBFetchException(Exception):
40*4882a593Smuzhiyun    """Class all fetch exceptions inherit from"""
41*4882a593Smuzhiyun    def __init__(self, message):
42*4882a593Smuzhiyun        self.msg = message
43*4882a593Smuzhiyun        Exception.__init__(self, message)
44*4882a593Smuzhiyun
45*4882a593Smuzhiyun    def __str__(self):
46*4882a593Smuzhiyun        return self.msg
47*4882a593Smuzhiyun
48*4882a593Smuzhiyunclass UntrustedUrl(BBFetchException):
49*4882a593Smuzhiyun    """Exception raised when encountering a host not listed in BB_ALLOWED_NETWORKS"""
50*4882a593Smuzhiyun    def __init__(self, url, message=''):
51*4882a593Smuzhiyun        if message:
52*4882a593Smuzhiyun            msg = message
53*4882a593Smuzhiyun        else:
54*4882a593Smuzhiyun            msg = "The URL: '%s' is not trusted and cannot be used" % url
55*4882a593Smuzhiyun        self.url = url
56*4882a593Smuzhiyun        BBFetchException.__init__(self, msg)
57*4882a593Smuzhiyun        self.args = (url,)
58*4882a593Smuzhiyun
59*4882a593Smuzhiyunclass MalformedUrl(BBFetchException):
60*4882a593Smuzhiyun    """Exception raised when encountering an invalid url"""
61*4882a593Smuzhiyun    def __init__(self, url, message=''):
62*4882a593Smuzhiyun        if message:
63*4882a593Smuzhiyun            msg = message
64*4882a593Smuzhiyun        else:
65*4882a593Smuzhiyun            msg = "The URL: '%s' is invalid and cannot be interpreted" % url
66*4882a593Smuzhiyun        self.url = url
67*4882a593Smuzhiyun        BBFetchException.__init__(self, msg)
68*4882a593Smuzhiyun        self.args = (url,)
69*4882a593Smuzhiyun
70*4882a593Smuzhiyunclass FetchError(BBFetchException):
71*4882a593Smuzhiyun    """General fetcher exception when something happens incorrectly"""
72*4882a593Smuzhiyun    def __init__(self, message, url = None):
73*4882a593Smuzhiyun        if url:
74*4882a593Smuzhiyun            msg = "Fetcher failure for URL: '%s'. %s" % (url, message)
75*4882a593Smuzhiyun        else:
76*4882a593Smuzhiyun            msg = "Fetcher failure: %s" % message
77*4882a593Smuzhiyun        self.url = url
78*4882a593Smuzhiyun        BBFetchException.__init__(self, msg)
79*4882a593Smuzhiyun        self.args = (message, url)
80*4882a593Smuzhiyun
81*4882a593Smuzhiyunclass ChecksumError(FetchError):
82*4882a593Smuzhiyun    """Exception when mismatched checksum encountered"""
83*4882a593Smuzhiyun    def __init__(self, message, url = None, checksum = None):
84*4882a593Smuzhiyun        self.checksum = checksum
85*4882a593Smuzhiyun        FetchError.__init__(self, message, url)
86*4882a593Smuzhiyun
87*4882a593Smuzhiyunclass NoChecksumError(FetchError):
88*4882a593Smuzhiyun    """Exception when no checksum is specified, but BB_STRICT_CHECKSUM is set"""
89*4882a593Smuzhiyun
90*4882a593Smuzhiyunclass UnpackError(BBFetchException):
91*4882a593Smuzhiyun    """General fetcher exception when something happens incorrectly when unpacking"""
92*4882a593Smuzhiyun    def __init__(self, message, url):
93*4882a593Smuzhiyun        msg = "Unpack failure for URL: '%s'. %s" % (url, message)
94*4882a593Smuzhiyun        self.url = url
95*4882a593Smuzhiyun        BBFetchException.__init__(self, msg)
96*4882a593Smuzhiyun        self.args = (message, url)
97*4882a593Smuzhiyun
98*4882a593Smuzhiyunclass NoMethodError(BBFetchException):
99*4882a593Smuzhiyun    """Exception raised when there is no method to obtain a supplied url or set of urls"""
100*4882a593Smuzhiyun    def __init__(self, url):
101*4882a593Smuzhiyun        msg = "Could not find a fetcher which supports the URL: '%s'" % url
102*4882a593Smuzhiyun        self.url = url
103*4882a593Smuzhiyun        BBFetchException.__init__(self, msg)
104*4882a593Smuzhiyun        self.args = (url,)
105*4882a593Smuzhiyun
106*4882a593Smuzhiyunclass MissingParameterError(BBFetchException):
107*4882a593Smuzhiyun    """Exception raised when a fetch method is missing a critical parameter in the url"""
108*4882a593Smuzhiyun    def __init__(self, missing, url):
109*4882a593Smuzhiyun        msg = "URL: '%s' is missing the required parameter '%s'" % (url, missing)
110*4882a593Smuzhiyun        self.url = url
111*4882a593Smuzhiyun        self.missing = missing
112*4882a593Smuzhiyun        BBFetchException.__init__(self, msg)
113*4882a593Smuzhiyun        self.args = (missing, url)
114*4882a593Smuzhiyun
115*4882a593Smuzhiyunclass ParameterError(BBFetchException):
116*4882a593Smuzhiyun    """Exception raised when a url cannot be processed due to invalid parameters."""
117*4882a593Smuzhiyun    def __init__(self, message, url):
118*4882a593Smuzhiyun        msg = "URL: '%s' has invalid parameters. %s" % (url, message)
119*4882a593Smuzhiyun        self.url = url
120*4882a593Smuzhiyun        BBFetchException.__init__(self, msg)
121*4882a593Smuzhiyun        self.args = (message, url)
122*4882a593Smuzhiyun
123*4882a593Smuzhiyunclass NetworkAccess(BBFetchException):
124*4882a593Smuzhiyun    """Exception raised when network access is disabled but it is required."""
125*4882a593Smuzhiyun    def __init__(self, url, cmd):
126*4882a593Smuzhiyun        msg = "Network access disabled through BB_NO_NETWORK (or set indirectly due to use of BB_FETCH_PREMIRRORONLY) but access requested with command %s (for url %s)" % (cmd, url)
127*4882a593Smuzhiyun        self.url = url
128*4882a593Smuzhiyun        self.cmd = cmd
129*4882a593Smuzhiyun        BBFetchException.__init__(self, msg)
130*4882a593Smuzhiyun        self.args = (url, cmd)
131*4882a593Smuzhiyun
132*4882a593Smuzhiyunclass NonLocalMethod(Exception):
133*4882a593Smuzhiyun    def __init__(self):
134*4882a593Smuzhiyun        Exception.__init__(self)
135*4882a593Smuzhiyun
136*4882a593Smuzhiyunclass MissingChecksumEvent(bb.event.Event):
137*4882a593Smuzhiyun    def __init__(self, url, **checksums):
138*4882a593Smuzhiyun        self.url = url
139*4882a593Smuzhiyun        self.checksums = checksums
140*4882a593Smuzhiyun        bb.event.Event.__init__(self)
141*4882a593Smuzhiyun
142*4882a593Smuzhiyun
143*4882a593Smuzhiyunclass URI(object):
144*4882a593Smuzhiyun    """
145*4882a593Smuzhiyun    A class representing a generic URI, with methods for
146*4882a593Smuzhiyun    accessing the URI components, and stringifies to the
147*4882a593Smuzhiyun    URI.
148*4882a593Smuzhiyun
149*4882a593Smuzhiyun    It is constructed by calling it with a URI, or setting
150*4882a593Smuzhiyun    the attributes manually:
151*4882a593Smuzhiyun
152*4882a593Smuzhiyun     uri = URI("http://example.com/")
153*4882a593Smuzhiyun
154*4882a593Smuzhiyun     uri = URI()
155*4882a593Smuzhiyun     uri.scheme = 'http'
156*4882a593Smuzhiyun     uri.hostname = 'example.com'
157*4882a593Smuzhiyun     uri.path = '/'
158*4882a593Smuzhiyun
159*4882a593Smuzhiyun    It has the following attributes:
160*4882a593Smuzhiyun
161*4882a593Smuzhiyun      * scheme (read/write)
162*4882a593Smuzhiyun      * userinfo (authentication information) (read/write)
163*4882a593Smuzhiyun        * username (read/write)
164*4882a593Smuzhiyun        * password (read/write)
165*4882a593Smuzhiyun
166*4882a593Smuzhiyun        Note, password is deprecated as of RFC 3986.
167*4882a593Smuzhiyun
168*4882a593Smuzhiyun      * hostname (read/write)
169*4882a593Smuzhiyun      * port (read/write)
170*4882a593Smuzhiyun      * hostport (read only)
171*4882a593Smuzhiyun        "hostname:port", if both are set, otherwise just "hostname"
172*4882a593Smuzhiyun      * path (read/write)
173*4882a593Smuzhiyun      * path_quoted (read/write)
174*4882a593Smuzhiyun        A URI quoted version of path
175*4882a593Smuzhiyun      * params (dict) (read/write)
176*4882a593Smuzhiyun      * query (dict) (read/write)
177*4882a593Smuzhiyun      * relative (bool) (read only)
178*4882a593Smuzhiyun        True if this is a "relative URI", (e.g. file:foo.diff)
179*4882a593Smuzhiyun
180*4882a593Smuzhiyun    It stringifies to the URI itself.
181*4882a593Smuzhiyun
182*4882a593Smuzhiyun    Some notes about relative URIs: while it's specified that
183*4882a593Smuzhiyun    a URI beginning with <scheme>:// should either be directly
184*4882a593Smuzhiyun    followed by a hostname or a /, the old URI handling of the
185*4882a593Smuzhiyun    fetch2 library did not conform to this. Therefore, this URI
186*4882a593Smuzhiyun    class has some kludges to make sure that URIs are parsed in
187*4882a593Smuzhiyun    a way comforming to bitbake's current usage. This URI class
188*4882a593Smuzhiyun    supports the following:
189*4882a593Smuzhiyun
190*4882a593Smuzhiyun     file:relative/path.diff (IETF compliant)
191*4882a593Smuzhiyun     git:relative/path.git (IETF compliant)
192*4882a593Smuzhiyun     git:///absolute/path.git (IETF compliant)
193*4882a593Smuzhiyun     file:///absolute/path.diff (IETF compliant)
194*4882a593Smuzhiyun
195*4882a593Smuzhiyun     file://relative/path.diff (not IETF compliant)
196*4882a593Smuzhiyun
197*4882a593Smuzhiyun    But it does not support the following:
198*4882a593Smuzhiyun
199*4882a593Smuzhiyun     file://hostname/absolute/path.diff (would be IETF compliant)
200*4882a593Smuzhiyun
201*4882a593Smuzhiyun    Note that the last case only applies to a list of
202*4882a593Smuzhiyun    explicitly allowed schemes (currently only file://), that requires
203*4882a593Smuzhiyun    its URIs to not have a network location.
204*4882a593Smuzhiyun    """
205*4882a593Smuzhiyun
206*4882a593Smuzhiyun    _relative_schemes = ['file', 'git']
207*4882a593Smuzhiyun    _netloc_forbidden = ['file']
208*4882a593Smuzhiyun
209*4882a593Smuzhiyun    def __init__(self, uri=None):
210*4882a593Smuzhiyun        self.scheme = ''
211*4882a593Smuzhiyun        self.userinfo = ''
212*4882a593Smuzhiyun        self.hostname = ''
213*4882a593Smuzhiyun        self.port = None
214*4882a593Smuzhiyun        self._path = ''
215*4882a593Smuzhiyun        self.params = {}
216*4882a593Smuzhiyun        self.query = {}
217*4882a593Smuzhiyun        self.relative = False
218*4882a593Smuzhiyun
219*4882a593Smuzhiyun        if not uri:
220*4882a593Smuzhiyun            return
221*4882a593Smuzhiyun
222*4882a593Smuzhiyun        # We hijack the URL parameters, since the way bitbake uses
223*4882a593Smuzhiyun        # them are not quite RFC compliant.
224*4882a593Smuzhiyun        uri, param_str = (uri.split(";", 1) + [None])[:2]
225*4882a593Smuzhiyun
226*4882a593Smuzhiyun        urlp = urllib.parse.urlparse(uri)
227*4882a593Smuzhiyun        self.scheme = urlp.scheme
228*4882a593Smuzhiyun
229*4882a593Smuzhiyun        reparse = 0
230*4882a593Smuzhiyun
231*4882a593Smuzhiyun        # Coerce urlparse to make URI scheme use netloc
232*4882a593Smuzhiyun        if not self.scheme in urllib.parse.uses_netloc:
233*4882a593Smuzhiyun            urllib.parse.uses_params.append(self.scheme)
234*4882a593Smuzhiyun            reparse = 1
235*4882a593Smuzhiyun
236*4882a593Smuzhiyun        # Make urlparse happy(/ier) by converting local resources
237*4882a593Smuzhiyun        # to RFC compliant URL format. E.g.:
238*4882a593Smuzhiyun        #   file://foo.diff -> file:foo.diff
239*4882a593Smuzhiyun        if urlp.scheme in self._netloc_forbidden:
240*4882a593Smuzhiyun            uri = re.sub("(?<=:)//(?!/)", "", uri, 1)
241*4882a593Smuzhiyun            reparse = 1
242*4882a593Smuzhiyun
243*4882a593Smuzhiyun        if reparse:
244*4882a593Smuzhiyun            urlp = urllib.parse.urlparse(uri)
245*4882a593Smuzhiyun
246*4882a593Smuzhiyun        # Identify if the URI is relative or not
247*4882a593Smuzhiyun        if urlp.scheme in self._relative_schemes and \
248*4882a593Smuzhiyun           re.compile(r"^\w+:(?!//)").match(uri):
249*4882a593Smuzhiyun            self.relative = True
250*4882a593Smuzhiyun
251*4882a593Smuzhiyun        if not self.relative:
252*4882a593Smuzhiyun            self.hostname = urlp.hostname or ''
253*4882a593Smuzhiyun            self.port = urlp.port
254*4882a593Smuzhiyun
255*4882a593Smuzhiyun            self.userinfo += urlp.username or ''
256*4882a593Smuzhiyun
257*4882a593Smuzhiyun            if urlp.password:
258*4882a593Smuzhiyun                self.userinfo += ':%s' % urlp.password
259*4882a593Smuzhiyun
260*4882a593Smuzhiyun        self.path = urllib.parse.unquote(urlp.path)
261*4882a593Smuzhiyun
262*4882a593Smuzhiyun        if param_str:
263*4882a593Smuzhiyun            self.params = self._param_str_split(param_str, ";")
264*4882a593Smuzhiyun        if urlp.query:
265*4882a593Smuzhiyun            self.query = self._param_str_split(urlp.query, "&")
266*4882a593Smuzhiyun
267*4882a593Smuzhiyun    def __str__(self):
268*4882a593Smuzhiyun        userinfo = self.userinfo
269*4882a593Smuzhiyun        if userinfo:
270*4882a593Smuzhiyun            userinfo += '@'
271*4882a593Smuzhiyun
272*4882a593Smuzhiyun        return "%s:%s%s%s%s%s%s" % (
273*4882a593Smuzhiyun            self.scheme,
274*4882a593Smuzhiyun            '' if self.relative else '//',
275*4882a593Smuzhiyun            userinfo,
276*4882a593Smuzhiyun            self.hostport,
277*4882a593Smuzhiyun            self.path_quoted,
278*4882a593Smuzhiyun            self._query_str(),
279*4882a593Smuzhiyun            self._param_str())
280*4882a593Smuzhiyun
281*4882a593Smuzhiyun    def _param_str(self):
282*4882a593Smuzhiyun        return (
283*4882a593Smuzhiyun            ''.join([';', self._param_str_join(self.params, ";")])
284*4882a593Smuzhiyun            if self.params else '')
285*4882a593Smuzhiyun
286*4882a593Smuzhiyun    def _query_str(self):
287*4882a593Smuzhiyun        return (
288*4882a593Smuzhiyun            ''.join(['?', self._param_str_join(self.query, "&")])
289*4882a593Smuzhiyun            if self.query else '')
290*4882a593Smuzhiyun
291*4882a593Smuzhiyun    def _param_str_split(self, string, elmdelim, kvdelim="="):
292*4882a593Smuzhiyun        ret = collections.OrderedDict()
293*4882a593Smuzhiyun        for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim) if x]:
294*4882a593Smuzhiyun            ret[k] = v
295*4882a593Smuzhiyun        return ret
296*4882a593Smuzhiyun
297*4882a593Smuzhiyun    def _param_str_join(self, dict_, elmdelim, kvdelim="="):
298*4882a593Smuzhiyun        return elmdelim.join([kvdelim.join([k, v]) for k, v in dict_.items()])
299*4882a593Smuzhiyun
300*4882a593Smuzhiyun    @property
301*4882a593Smuzhiyun    def hostport(self):
302*4882a593Smuzhiyun        if not self.port:
303*4882a593Smuzhiyun            return self.hostname
304*4882a593Smuzhiyun        return "%s:%d" % (self.hostname, self.port)
305*4882a593Smuzhiyun
306*4882a593Smuzhiyun    @property
307*4882a593Smuzhiyun    def path_quoted(self):
308*4882a593Smuzhiyun        return urllib.parse.quote(self.path)
309*4882a593Smuzhiyun
310*4882a593Smuzhiyun    @path_quoted.setter
311*4882a593Smuzhiyun    def path_quoted(self, path):
312*4882a593Smuzhiyun        self.path = urllib.parse.unquote(path)
313*4882a593Smuzhiyun
314*4882a593Smuzhiyun    @property
315*4882a593Smuzhiyun    def path(self):
316*4882a593Smuzhiyun        return self._path
317*4882a593Smuzhiyun
318*4882a593Smuzhiyun    @path.setter
319*4882a593Smuzhiyun    def path(self, path):
320*4882a593Smuzhiyun        self._path = path
321*4882a593Smuzhiyun
322*4882a593Smuzhiyun        if not path or re.compile("^/").match(path):
323*4882a593Smuzhiyun            self.relative = False
324*4882a593Smuzhiyun        else:
325*4882a593Smuzhiyun            self.relative = True
326*4882a593Smuzhiyun
327*4882a593Smuzhiyun    @property
328*4882a593Smuzhiyun    def username(self):
329*4882a593Smuzhiyun        if self.userinfo:
330*4882a593Smuzhiyun            return (self.userinfo.split(":", 1))[0]
331*4882a593Smuzhiyun        return ''
332*4882a593Smuzhiyun
333*4882a593Smuzhiyun    @username.setter
334*4882a593Smuzhiyun    def username(self, username):
335*4882a593Smuzhiyun        password = self.password
336*4882a593Smuzhiyun        self.userinfo = username
337*4882a593Smuzhiyun        if password:
338*4882a593Smuzhiyun            self.userinfo += ":%s" % password
339*4882a593Smuzhiyun
340*4882a593Smuzhiyun    @property
341*4882a593Smuzhiyun    def password(self):
342*4882a593Smuzhiyun        if self.userinfo and ":" in self.userinfo:
343*4882a593Smuzhiyun            return (self.userinfo.split(":", 1))[1]
344*4882a593Smuzhiyun        return ''
345*4882a593Smuzhiyun
346*4882a593Smuzhiyun    @password.setter
347*4882a593Smuzhiyun    def password(self, password):
348*4882a593Smuzhiyun        self.userinfo = "%s:%s" % (self.username, password)
349*4882a593Smuzhiyun
350*4882a593Smuzhiyundef decodeurl(url):
351*4882a593Smuzhiyun    """Decodes an URL into the tokens (scheme, network location, path,
352*4882a593Smuzhiyun    user, password, parameters).
353*4882a593Smuzhiyun    """
354*4882a593Smuzhiyun
355*4882a593Smuzhiyun    m = re.compile('(?P<type>[^:]*)://((?P<user>[^/;]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
356*4882a593Smuzhiyun    if not m:
357*4882a593Smuzhiyun        raise MalformedUrl(url)
358*4882a593Smuzhiyun
359*4882a593Smuzhiyun    type = m.group('type')
360*4882a593Smuzhiyun    location = m.group('location')
361*4882a593Smuzhiyun    if not location:
362*4882a593Smuzhiyun        raise MalformedUrl(url)
363*4882a593Smuzhiyun    user = m.group('user')
364*4882a593Smuzhiyun    parm = m.group('parm')
365*4882a593Smuzhiyun
366*4882a593Smuzhiyun    locidx = location.find('/')
367*4882a593Smuzhiyun    if locidx != -1 and type.lower() != 'file':
368*4882a593Smuzhiyun        host = location[:locidx]
369*4882a593Smuzhiyun        path = location[locidx:]
370*4882a593Smuzhiyun    elif type.lower() == 'file':
371*4882a593Smuzhiyun        host = ""
372*4882a593Smuzhiyun        path = location
373*4882a593Smuzhiyun    else:
374*4882a593Smuzhiyun        host = location
375*4882a593Smuzhiyun        path = "/"
376*4882a593Smuzhiyun    if user:
377*4882a593Smuzhiyun        m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
378*4882a593Smuzhiyun        if m:
379*4882a593Smuzhiyun            user = m.group('user')
380*4882a593Smuzhiyun            pswd = m.group('pswd')
381*4882a593Smuzhiyun    else:
382*4882a593Smuzhiyun        user = ''
383*4882a593Smuzhiyun        pswd = ''
384*4882a593Smuzhiyun
385*4882a593Smuzhiyun    p = collections.OrderedDict()
386*4882a593Smuzhiyun    if parm:
387*4882a593Smuzhiyun        for s in parm.split(';'):
388*4882a593Smuzhiyun            if s:
389*4882a593Smuzhiyun                if not '=' in s:
390*4882a593Smuzhiyun                    raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s))
391*4882a593Smuzhiyun                s1, s2 = s.split('=')
392*4882a593Smuzhiyun                p[s1] = s2
393*4882a593Smuzhiyun
394*4882a593Smuzhiyun    return type, host, urllib.parse.unquote(path), user, pswd, p
395*4882a593Smuzhiyun
396*4882a593Smuzhiyundef encodeurl(decoded):
397*4882a593Smuzhiyun    """Encodes a URL from tokens (scheme, network location, path,
398*4882a593Smuzhiyun    user, password, parameters).
399*4882a593Smuzhiyun    """
400*4882a593Smuzhiyun
401*4882a593Smuzhiyun    type, host, path, user, pswd, p = decoded
402*4882a593Smuzhiyun
403*4882a593Smuzhiyun    if not type:
404*4882a593Smuzhiyun        raise MissingParameterError('type', "encoded from the data %s" % str(decoded))
405*4882a593Smuzhiyun    url = ['%s://' % type]
406*4882a593Smuzhiyun    if user and type != "file":
407*4882a593Smuzhiyun        url.append("%s" % user)
408*4882a593Smuzhiyun        if pswd:
409*4882a593Smuzhiyun            url.append(":%s" % pswd)
410*4882a593Smuzhiyun        url.append("@")
411*4882a593Smuzhiyun    if host and type != "file":
412*4882a593Smuzhiyun        url.append("%s" % host)
413*4882a593Smuzhiyun    if path:
414*4882a593Smuzhiyun        # Standardise path to ensure comparisons work
415*4882a593Smuzhiyun        while '//' in path:
416*4882a593Smuzhiyun            path = path.replace("//", "/")
417*4882a593Smuzhiyun        url.append("%s" % urllib.parse.quote(path))
418*4882a593Smuzhiyun    if p:
419*4882a593Smuzhiyun        for parm in p:
420*4882a593Smuzhiyun            url.append(";%s=%s" % (parm, p[parm]))
421*4882a593Smuzhiyun
422*4882a593Smuzhiyun    return "".join(url)
423*4882a593Smuzhiyun
424*4882a593Smuzhiyundef uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
425*4882a593Smuzhiyun    if not ud.url or not uri_find or not uri_replace:
426*4882a593Smuzhiyun        logger.error("uri_replace: passed an undefined value, not replacing")
427*4882a593Smuzhiyun        return None
428*4882a593Smuzhiyun    uri_decoded = list(decodeurl(ud.url))
429*4882a593Smuzhiyun    uri_find_decoded = list(decodeurl(uri_find))
430*4882a593Smuzhiyun    uri_replace_decoded = list(decodeurl(uri_replace))
431*4882a593Smuzhiyun    logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
432*4882a593Smuzhiyun    result_decoded = ['', '', '', '', '', {}]
433*4882a593Smuzhiyun    # 0 - type, 1 - host, 2 - path, 3 - user,  4- pswd, 5 - params
434*4882a593Smuzhiyun    for loc, i in enumerate(uri_find_decoded):
435*4882a593Smuzhiyun        result_decoded[loc] = uri_decoded[loc]
436*4882a593Smuzhiyun        regexp = i
437*4882a593Smuzhiyun        if loc == 0 and regexp and not regexp.endswith("$"):
438*4882a593Smuzhiyun            # Leaving the type unanchored can mean "https" matching "file" can become "files"
439*4882a593Smuzhiyun            # which is clearly undesirable.
440*4882a593Smuzhiyun            regexp += "$"
441*4882a593Smuzhiyun        if loc == 5:
442*4882a593Smuzhiyun            # Handle URL parameters
443*4882a593Smuzhiyun            if i:
444*4882a593Smuzhiyun                # Any specified URL parameters must match
445*4882a593Smuzhiyun                for k in uri_find_decoded[loc]:
446*4882a593Smuzhiyun                    if uri_decoded[loc][k] != uri_find_decoded[loc][k]:
447*4882a593Smuzhiyun                        return None
448*4882a593Smuzhiyun            # Overwrite any specified replacement parameters
449*4882a593Smuzhiyun            for k in uri_replace_decoded[loc]:
450*4882a593Smuzhiyun                for l in replacements:
451*4882a593Smuzhiyun                    uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l])
452*4882a593Smuzhiyun                result_decoded[loc][k] = uri_replace_decoded[loc][k]
453*4882a593Smuzhiyun        elif (loc == 3 or loc == 4) and uri_replace_decoded[loc]:
454*4882a593Smuzhiyun            # User/password in the replacement is just a straight replacement
455*4882a593Smuzhiyun            result_decoded[loc] = uri_replace_decoded[loc]
456*4882a593Smuzhiyun        elif (re.match(regexp, uri_decoded[loc])):
457*4882a593Smuzhiyun            if not uri_replace_decoded[loc]:
458*4882a593Smuzhiyun                result_decoded[loc] = ""
459*4882a593Smuzhiyun            else:
460*4882a593Smuzhiyun                for k in replacements:
461*4882a593Smuzhiyun                    uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k])
462*4882a593Smuzhiyun                #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc]))
463*4882a593Smuzhiyun                result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc], 1)
464*4882a593Smuzhiyun            if loc == 2:
465*4882a593Smuzhiyun                # Handle path manipulations
466*4882a593Smuzhiyun                basename = None
467*4882a593Smuzhiyun                if uri_decoded[0] != uri_replace_decoded[0] and mirrortarball:
468*4882a593Smuzhiyun                    # If the source and destination url types differ, must be a mirrortarball mapping
469*4882a593Smuzhiyun                    basename = os.path.basename(mirrortarball)
470*4882a593Smuzhiyun                    # Kill parameters, they make no sense for mirror tarballs
471*4882a593Smuzhiyun                    uri_decoded[5] = {}
472*4882a593Smuzhiyun                elif ud.localpath and ud.method.supports_checksum(ud):
473*4882a593Smuzhiyun                    basename = os.path.basename(ud.localpath)
474*4882a593Smuzhiyun                if basename:
475*4882a593Smuzhiyun                    uri_basename = os.path.basename(uri_decoded[loc])
476*4882a593Smuzhiyun                    # Prefix with a slash as a sentinel in case
477*4882a593Smuzhiyun                    # result_decoded[loc] does not contain one.
478*4882a593Smuzhiyun                    path = "/" + result_decoded[loc]
479*4882a593Smuzhiyun                    if uri_basename and basename != uri_basename and path.endswith("/" + uri_basename):
480*4882a593Smuzhiyun                        result_decoded[loc] = path[1:-len(uri_basename)] + basename
481*4882a593Smuzhiyun                    elif not path.endswith("/" + basename):
482*4882a593Smuzhiyun                        result_decoded[loc] = os.path.join(path[1:], basename)
483*4882a593Smuzhiyun        else:
484*4882a593Smuzhiyun            return None
485*4882a593Smuzhiyun    result = encodeurl(result_decoded)
486*4882a593Smuzhiyun    if result == ud.url:
487*4882a593Smuzhiyun        return None
488*4882a593Smuzhiyun    logger.debug2("For url %s returning %s" % (ud.url, result))
489*4882a593Smuzhiyun    return result
490*4882a593Smuzhiyun
491*4882a593Smuzhiyunmethods = []
492*4882a593Smuzhiyunurldata_cache = {}
493*4882a593Smuzhiyunsaved_headrevs = {}
494*4882a593Smuzhiyun
495*4882a593Smuzhiyundef fetcher_init(d):
496*4882a593Smuzhiyun    """
497*4882a593Smuzhiyun    Called to initialize the fetchers once the configuration data is known.
498*4882a593Smuzhiyun    Calls before this must not hit the cache.
499*4882a593Smuzhiyun    """
500*4882a593Smuzhiyun
501*4882a593Smuzhiyun    revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
502*4882a593Smuzhiyun    try:
503*4882a593Smuzhiyun        # fetcher_init is called multiple times, so make sure we only save the
504*4882a593Smuzhiyun        # revs the first time it is called.
505*4882a593Smuzhiyun        if not bb.fetch2.saved_headrevs:
506*4882a593Smuzhiyun            bb.fetch2.saved_headrevs = dict(revs)
507*4882a593Smuzhiyun    except:
508*4882a593Smuzhiyun        pass
509*4882a593Smuzhiyun
510*4882a593Smuzhiyun    # When to drop SCM head revisions controlled by user policy
511*4882a593Smuzhiyun    srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear"
512*4882a593Smuzhiyun    if srcrev_policy == "cache":
513*4882a593Smuzhiyun        logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
514*4882a593Smuzhiyun    elif srcrev_policy == "clear":
515*4882a593Smuzhiyun        logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
516*4882a593Smuzhiyun        revs.clear()
517*4882a593Smuzhiyun    else:
518*4882a593Smuzhiyun        raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
519*4882a593Smuzhiyun
520*4882a593Smuzhiyun    _checksum_cache.init_cache(d)
521*4882a593Smuzhiyun
522*4882a593Smuzhiyun    for m in methods:
523*4882a593Smuzhiyun        if hasattr(m, "init"):
524*4882a593Smuzhiyun            m.init(d)
525*4882a593Smuzhiyun
526*4882a593Smuzhiyundef fetcher_parse_save():
527*4882a593Smuzhiyun    _checksum_cache.save_extras()
528*4882a593Smuzhiyun
529*4882a593Smuzhiyundef fetcher_parse_done():
530*4882a593Smuzhiyun    _checksum_cache.save_merge()
531*4882a593Smuzhiyun
532*4882a593Smuzhiyundef fetcher_compare_revisions(d):
533*4882a593Smuzhiyun    """
534*4882a593Smuzhiyun    Compare the revisions in the persistent cache with the saved values from
535*4882a593Smuzhiyun    when bitbake was started and return true if they have changed.
536*4882a593Smuzhiyun    """
537*4882a593Smuzhiyun
538*4882a593Smuzhiyun    headrevs = dict(bb.persist_data.persist('BB_URI_HEADREVS', d))
539*4882a593Smuzhiyun    return headrevs != bb.fetch2.saved_headrevs
540*4882a593Smuzhiyun
541*4882a593Smuzhiyundef mirror_from_string(data):
542*4882a593Smuzhiyun    mirrors = (data or "").replace('\\n',' ').split()
543*4882a593Smuzhiyun    # Split into pairs
544*4882a593Smuzhiyun    if len(mirrors) % 2 != 0:
545*4882a593Smuzhiyun        bb.warn('Invalid mirror data %s, should have paired members.' % data)
546*4882a593Smuzhiyun    return list(zip(*[iter(mirrors)]*2))
547*4882a593Smuzhiyun
548*4882a593Smuzhiyundef verify_checksum(ud, d, precomputed={}):
549*4882a593Smuzhiyun    """
550*4882a593Smuzhiyun    verify the MD5 and SHA256 checksum for downloaded src
551*4882a593Smuzhiyun
552*4882a593Smuzhiyun    Raises a FetchError if one or both of the SRC_URI checksums do not match
553*4882a593Smuzhiyun    the downloaded file, or if BB_STRICT_CHECKSUM is set and there are no
554*4882a593Smuzhiyun    checksums specified.
555*4882a593Smuzhiyun
556*4882a593Smuzhiyun    Returns a dict of checksums that can be stored in a done stamp file and
557*4882a593Smuzhiyun    passed in as precomputed parameter in a later call to avoid re-computing
558*4882a593Smuzhiyun    the checksums from the file. This allows verifying the checksums of the
559*4882a593Smuzhiyun    file against those in the recipe each time, rather than only after
560*4882a593Smuzhiyun    downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571.
561*4882a593Smuzhiyun    """
562*4882a593Smuzhiyun
563*4882a593Smuzhiyun    if ud.ignore_checksums or not ud.method.supports_checksum(ud):
564*4882a593Smuzhiyun        return {}
565*4882a593Smuzhiyun
566*4882a593Smuzhiyun    def compute_checksum_info(checksum_id):
567*4882a593Smuzhiyun        checksum_name = getattr(ud, "%s_name" % checksum_id)
568*4882a593Smuzhiyun
569*4882a593Smuzhiyun        if checksum_id in precomputed:
570*4882a593Smuzhiyun            checksum_data = precomputed[checksum_id]
571*4882a593Smuzhiyun        else:
572*4882a593Smuzhiyun            checksum_data = getattr(bb.utils, "%s_file" % checksum_id)(ud.localpath)
573*4882a593Smuzhiyun
574*4882a593Smuzhiyun        checksum_expected = getattr(ud, "%s_expected" % checksum_id)
575*4882a593Smuzhiyun
576*4882a593Smuzhiyun        if checksum_expected == '':
577*4882a593Smuzhiyun            checksum_expected = None
578*4882a593Smuzhiyun
579*4882a593Smuzhiyun        return {
580*4882a593Smuzhiyun            "id": checksum_id,
581*4882a593Smuzhiyun            "name": checksum_name,
582*4882a593Smuzhiyun            "data": checksum_data,
583*4882a593Smuzhiyun            "expected": checksum_expected
584*4882a593Smuzhiyun        }
585*4882a593Smuzhiyun
586*4882a593Smuzhiyun    checksum_infos = []
587*4882a593Smuzhiyun    for checksum_id in CHECKSUM_LIST:
588*4882a593Smuzhiyun        checksum_infos.append(compute_checksum_info(checksum_id))
589*4882a593Smuzhiyun
590*4882a593Smuzhiyun    checksum_dict = {ci["id"] : ci["data"] for ci in checksum_infos}
591*4882a593Smuzhiyun    checksum_event = {"%ssum" % ci["id"] : ci["data"] for ci in checksum_infos}
592*4882a593Smuzhiyun
593*4882a593Smuzhiyun    for ci in checksum_infos:
594*4882a593Smuzhiyun        if ci["id"] in SHOWN_CHECKSUM_LIST:
595*4882a593Smuzhiyun            checksum_lines = ["SRC_URI[%s] = \"%s\"" % (ci["name"], ci["data"])]
596*4882a593Smuzhiyun
597*4882a593Smuzhiyun    # If no checksum has been provided
598*4882a593Smuzhiyun    if ud.method.recommends_checksum(ud) and all(ci["expected"] is None for ci in checksum_infos):
599*4882a593Smuzhiyun        messages = []
600*4882a593Smuzhiyun        strict = d.getVar("BB_STRICT_CHECKSUM") or "0"
601*4882a593Smuzhiyun
602*4882a593Smuzhiyun        # If strict checking enabled and neither sum defined, raise error
603*4882a593Smuzhiyun        if strict == "1":
604*4882a593Smuzhiyun            messages.append("No checksum specified for '%s', please add at " \
605*4882a593Smuzhiyun                            "least one to the recipe:" % ud.localpath)
606*4882a593Smuzhiyun            messages.extend(checksum_lines)
607*4882a593Smuzhiyun            logger.error("\n".join(messages))
608*4882a593Smuzhiyun            raise NoChecksumError("Missing SRC_URI checksum", ud.url)
609*4882a593Smuzhiyun
610*4882a593Smuzhiyun        bb.event.fire(MissingChecksumEvent(ud.url, **checksum_event), d)
611*4882a593Smuzhiyun
612*4882a593Smuzhiyun        if strict == "ignore":
613*4882a593Smuzhiyun            return checksum_dict
614*4882a593Smuzhiyun
615*4882a593Smuzhiyun        # Log missing sums so user can more easily add them
616*4882a593Smuzhiyun        messages.append("Missing checksum for '%s', consider adding at " \
617*4882a593Smuzhiyun                        "least one to the recipe:" % ud.localpath)
618*4882a593Smuzhiyun        messages.extend(checksum_lines)
619*4882a593Smuzhiyun        logger.warning("\n".join(messages))
620*4882a593Smuzhiyun
621*4882a593Smuzhiyun    # We want to alert the user if a checksum is defined in the recipe but
622*4882a593Smuzhiyun    # it does not match.
623*4882a593Smuzhiyun    messages = []
624*4882a593Smuzhiyun    messages.append("Checksum mismatch!")
625*4882a593Smuzhiyun    bad_checksum = None
626*4882a593Smuzhiyun
627*4882a593Smuzhiyun    for ci in checksum_infos:
628*4882a593Smuzhiyun        if ci["expected"] and ci["expected"] != ci["data"]:
629*4882a593Smuzhiyun            messages.append("File: '%s' has %s checksum '%s' when '%s' was " \
630*4882a593Smuzhiyun                            "expected" % (ud.localpath, ci["id"], ci["data"], ci["expected"]))
631*4882a593Smuzhiyun            bad_checksum = ci["data"]
632*4882a593Smuzhiyun
633*4882a593Smuzhiyun    if bad_checksum:
634*4882a593Smuzhiyun        messages.append("If this change is expected (e.g. you have upgraded " \
635*4882a593Smuzhiyun                        "to a new version without updating the checksums) " \
636*4882a593Smuzhiyun                        "then you can use these lines within the recipe:")
637*4882a593Smuzhiyun        messages.extend(checksum_lines)
638*4882a593Smuzhiyun        messages.append("Otherwise you should retry the download and/or " \
639*4882a593Smuzhiyun                        "check with upstream to determine if the file has " \
640*4882a593Smuzhiyun                        "become corrupted or otherwise unexpectedly modified.")
641*4882a593Smuzhiyun        raise ChecksumError("\n".join(messages), ud.url, bad_checksum)
642*4882a593Smuzhiyun
643*4882a593Smuzhiyun    return checksum_dict
644*4882a593Smuzhiyun
645*4882a593Smuzhiyundef verify_donestamp(ud, d, origud=None):
646*4882a593Smuzhiyun    """
647*4882a593Smuzhiyun    Check whether the done stamp file has the right checksums (if the fetch
648*4882a593Smuzhiyun    method supports them). If it doesn't, delete the done stamp and force
649*4882a593Smuzhiyun    a re-download.
650*4882a593Smuzhiyun
651*4882a593Smuzhiyun    Returns True, if the donestamp exists and is valid, False otherwise. When
652*4882a593Smuzhiyun    returning False, any existing done stamps are removed.
653*4882a593Smuzhiyun    """
654*4882a593Smuzhiyun    if not ud.needdonestamp or (origud and not origud.needdonestamp):
655*4882a593Smuzhiyun        return True
656*4882a593Smuzhiyun
657*4882a593Smuzhiyun    if not os.path.exists(ud.localpath):
658*4882a593Smuzhiyun        # local path does not exist
659*4882a593Smuzhiyun        if os.path.exists(ud.donestamp):
660*4882a593Smuzhiyun            # done stamp exists, but the downloaded file does not; the done stamp
661*4882a593Smuzhiyun            # must be incorrect, re-trigger the download
662*4882a593Smuzhiyun            bb.utils.remove(ud.donestamp)
663*4882a593Smuzhiyun        return False
664*4882a593Smuzhiyun
665*4882a593Smuzhiyun    if (not ud.method.supports_checksum(ud) or
666*4882a593Smuzhiyun        (origud and not origud.method.supports_checksum(origud))):
667*4882a593Smuzhiyun        # if done stamp exists and checksums not supported; assume the local
668*4882a593Smuzhiyun        # file is current
669*4882a593Smuzhiyun        return os.path.exists(ud.donestamp)
670*4882a593Smuzhiyun
671*4882a593Smuzhiyun    precomputed_checksums = {}
672*4882a593Smuzhiyun    # Only re-use the precomputed checksums if the donestamp is newer than the
673*4882a593Smuzhiyun    # file. Do not rely on the mtime of directories, though. If ud.localpath is
674*4882a593Smuzhiyun    # a directory, there will probably not be any checksums anyway.
675*4882a593Smuzhiyun    if os.path.exists(ud.donestamp) and (os.path.isdir(ud.localpath) or
676*4882a593Smuzhiyun            os.path.getmtime(ud.localpath) < os.path.getmtime(ud.donestamp)):
677*4882a593Smuzhiyun        try:
678*4882a593Smuzhiyun            with open(ud.donestamp, "rb") as cachefile:
679*4882a593Smuzhiyun                pickled = pickle.Unpickler(cachefile)
680*4882a593Smuzhiyun                precomputed_checksums.update(pickled.load())
681*4882a593Smuzhiyun        except Exception as e:
682*4882a593Smuzhiyun            # Avoid the warnings on the upgrade path from emtpy done stamp
683*4882a593Smuzhiyun            # files to those containing the checksums.
684*4882a593Smuzhiyun            if not isinstance(e, EOFError):
685*4882a593Smuzhiyun                # Ignore errors, they aren't fatal
686*4882a593Smuzhiyun                logger.warning("Couldn't load checksums from donestamp %s: %s "
687*4882a593Smuzhiyun                               "(msg: %s)" % (ud.donestamp, type(e).__name__,
688*4882a593Smuzhiyun                                              str(e)))
689*4882a593Smuzhiyun
690*4882a593Smuzhiyun    try:
691*4882a593Smuzhiyun        checksums = verify_checksum(ud, d, precomputed_checksums)
692*4882a593Smuzhiyun        # If the cache file did not have the checksums, compute and store them
693*4882a593Smuzhiyun        # as an upgrade path from the previous done stamp file format.
694*4882a593Smuzhiyun        if checksums != precomputed_checksums:
695*4882a593Smuzhiyun            with open(ud.donestamp, "wb") as cachefile:
696*4882a593Smuzhiyun                p = pickle.Pickler(cachefile, 2)
697*4882a593Smuzhiyun                p.dump(checksums)
698*4882a593Smuzhiyun        return True
699*4882a593Smuzhiyun    except ChecksumError as e:
700*4882a593Smuzhiyun        # Checksums failed to verify, trigger re-download and remove the
701*4882a593Smuzhiyun        # incorrect stamp file.
702*4882a593Smuzhiyun        logger.warning("Checksum mismatch for local file %s\n"
703*4882a593Smuzhiyun                       "Cleaning and trying again." % ud.localpath)
704*4882a593Smuzhiyun        if os.path.exists(ud.localpath):
705*4882a593Smuzhiyun            rename_bad_checksum(ud, e.checksum)
706*4882a593Smuzhiyun        bb.utils.remove(ud.donestamp)
707*4882a593Smuzhiyun    return False
708*4882a593Smuzhiyun
709*4882a593Smuzhiyun
710*4882a593Smuzhiyundef update_stamp(ud, d):
711*4882a593Smuzhiyun    """
712*4882a593Smuzhiyun        donestamp is file stamp indicating the whole fetching is done
713*4882a593Smuzhiyun        this function update the stamp after verifying the checksum
714*4882a593Smuzhiyun    """
715*4882a593Smuzhiyun    if not ud.needdonestamp:
716*4882a593Smuzhiyun        return
717*4882a593Smuzhiyun
718*4882a593Smuzhiyun    if os.path.exists(ud.donestamp):
719*4882a593Smuzhiyun        # Touch the done stamp file to show active use of the download
720*4882a593Smuzhiyun        try:
721*4882a593Smuzhiyun            os.utime(ud.donestamp, None)
722*4882a593Smuzhiyun        except:
723*4882a593Smuzhiyun            # Errors aren't fatal here
724*4882a593Smuzhiyun            pass
725*4882a593Smuzhiyun    else:
726*4882a593Smuzhiyun        try:
727*4882a593Smuzhiyun            checksums = verify_checksum(ud, d)
728*4882a593Smuzhiyun            # Store the checksums for later re-verification against the recipe
729*4882a593Smuzhiyun            with open(ud.donestamp, "wb") as cachefile:
730*4882a593Smuzhiyun                p = pickle.Pickler(cachefile, 2)
731*4882a593Smuzhiyun                p.dump(checksums)
732*4882a593Smuzhiyun        except ChecksumError as e:
733*4882a593Smuzhiyun            # Checksums failed to verify, trigger re-download and remove the
734*4882a593Smuzhiyun            # incorrect stamp file.
735*4882a593Smuzhiyun            logger.warning("Checksum mismatch for local file %s\n"
736*4882a593Smuzhiyun                           "Cleaning and trying again." % ud.localpath)
737*4882a593Smuzhiyun            if os.path.exists(ud.localpath):
738*4882a593Smuzhiyun                rename_bad_checksum(ud, e.checksum)
739*4882a593Smuzhiyun            bb.utils.remove(ud.donestamp)
740*4882a593Smuzhiyun            raise
741*4882a593Smuzhiyun
742*4882a593Smuzhiyundef subprocess_setup():
743*4882a593Smuzhiyun    # Python installs a SIGPIPE handler by default. This is usually not what
744*4882a593Smuzhiyun    # non-Python subprocesses expect.
745*4882a593Smuzhiyun    # SIGPIPE errors are known issues with gzip/bash
746*4882a593Smuzhiyun    signal.signal(signal.SIGPIPE, signal.SIG_DFL)
747*4882a593Smuzhiyun
748*4882a593Smuzhiyundef get_autorev(d):
749*4882a593Smuzhiyun    #  only not cache src rev in autorev case
750*4882a593Smuzhiyun    if d.getVar('BB_SRCREV_POLICY') != "cache":
751*4882a593Smuzhiyun        d.setVar('BB_DONT_CACHE', '1')
752*4882a593Smuzhiyun    return "AUTOINC"
753*4882a593Smuzhiyun
754*4882a593Smuzhiyundef get_srcrev(d, method_name='sortable_revision'):
755*4882a593Smuzhiyun    """
756*4882a593Smuzhiyun    Return the revision string, usually for use in the version string (PV) of the current package
757*4882a593Smuzhiyun    Most packages usually only have one SCM so we just pass on the call.
758*4882a593Smuzhiyun    In the multi SCM case, we build a value based on SRCREV_FORMAT which must
759*4882a593Smuzhiyun    have been set.
760*4882a593Smuzhiyun
761*4882a593Smuzhiyun    The idea here is that we put the string "AUTOINC+" into return value if the revisions are not
762*4882a593Smuzhiyun    incremental, other code is then responsible for turning that into an increasing value (if needed)
763*4882a593Smuzhiyun
764*4882a593Smuzhiyun    A method_name can be supplied to retrieve an alternatively formatted revision from a fetcher, if
765*4882a593Smuzhiyun    that fetcher provides a method with the given name and the same signature as sortable_revision.
766*4882a593Smuzhiyun    """
767*4882a593Smuzhiyun
768*4882a593Smuzhiyun    d.setVar("__BBSEENSRCREV", "1")
769*4882a593Smuzhiyun    recursion = d.getVar("__BBINSRCREV")
770*4882a593Smuzhiyun    if recursion:
771*4882a593Smuzhiyun        raise FetchError("There are recursive references in fetcher variables, likely through SRC_URI")
772*4882a593Smuzhiyun    d.setVar("__BBINSRCREV", True)
773*4882a593Smuzhiyun
774*4882a593Smuzhiyun    scms = []
775*4882a593Smuzhiyun    fetcher = Fetch(d.getVar('SRC_URI').split(), d)
776*4882a593Smuzhiyun    urldata = fetcher.ud
777*4882a593Smuzhiyun    for u in urldata:
778*4882a593Smuzhiyun        if urldata[u].method.supports_srcrev():
779*4882a593Smuzhiyun            scms.append(u)
780*4882a593Smuzhiyun
781*4882a593Smuzhiyun    if not scms:
782*4882a593Smuzhiyun        raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
783*4882a593Smuzhiyun
784*4882a593Smuzhiyun    if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
785*4882a593Smuzhiyun        autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].names[0])
786*4882a593Smuzhiyun        if len(rev) > 10:
787*4882a593Smuzhiyun            rev = rev[:10]
788*4882a593Smuzhiyun        d.delVar("__BBINSRCREV")
789*4882a593Smuzhiyun        if autoinc:
790*4882a593Smuzhiyun            return "AUTOINC+" + rev
791*4882a593Smuzhiyun        return rev
792*4882a593Smuzhiyun
793*4882a593Smuzhiyun    #
794*4882a593Smuzhiyun    # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
795*4882a593Smuzhiyun    #
796*4882a593Smuzhiyun    format = d.getVar('SRCREV_FORMAT')
797*4882a593Smuzhiyun    if not format:
798*4882a593Smuzhiyun        raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.\n"\
799*4882a593Smuzhiyun                         "The SCMs are:\n%s" % '\n'.join(scms))
800*4882a593Smuzhiyun
801*4882a593Smuzhiyun    name_to_rev = {}
802*4882a593Smuzhiyun    seenautoinc = False
803*4882a593Smuzhiyun    for scm in scms:
804*4882a593Smuzhiyun        ud = urldata[scm]
805*4882a593Smuzhiyun        for name in ud.names:
806*4882a593Smuzhiyun            autoinc, rev = getattr(ud.method, method_name)(ud, d, name)
807*4882a593Smuzhiyun            seenautoinc = seenautoinc or autoinc
808*4882a593Smuzhiyun            if len(rev) > 10:
809*4882a593Smuzhiyun                rev = rev[:10]
810*4882a593Smuzhiyun            name_to_rev[name] = rev
811*4882a593Smuzhiyun    # Replace names by revisions in the SRCREV_FORMAT string. The approach used
812*4882a593Smuzhiyun    # here can handle names being prefixes of other names and names appearing
813*4882a593Smuzhiyun    # as substrings in revisions (in which case the name should not be
814*4882a593Smuzhiyun    # expanded). The '|' regular expression operator tries matches from left to
815*4882a593Smuzhiyun    # right, so we need to sort the names with the longest ones first.
816*4882a593Smuzhiyun    names_descending_len = sorted(name_to_rev, key=len, reverse=True)
817*4882a593Smuzhiyun    name_to_rev_re = "|".join(re.escape(name) for name in names_descending_len)
818*4882a593Smuzhiyun    format = re.sub(name_to_rev_re, lambda match: name_to_rev[match.group(0)], format)
819*4882a593Smuzhiyun
820*4882a593Smuzhiyun    if seenautoinc:
821*4882a593Smuzhiyun        format = "AUTOINC+" + format
822*4882a593Smuzhiyun
823*4882a593Smuzhiyun    d.delVar("__BBINSRCREV")
824*4882a593Smuzhiyun    return format
825*4882a593Smuzhiyun
826*4882a593Smuzhiyundef localpath(url, d):
827*4882a593Smuzhiyun    fetcher = bb.fetch2.Fetch([url], d)
828*4882a593Smuzhiyun    return fetcher.localpath(url)
829*4882a593Smuzhiyun
830*4882a593Smuzhiyun# Need to export PATH as binary could be in metadata paths
831*4882a593Smuzhiyun# rather than host provided
832*4882a593Smuzhiyun# Also include some other variables.
833*4882a593SmuzhiyunFETCH_EXPORT_VARS = ['HOME', 'PATH',
834*4882a593Smuzhiyun                     'HTTP_PROXY', 'http_proxy',
835*4882a593Smuzhiyun                     'HTTPS_PROXY', 'https_proxy',
836*4882a593Smuzhiyun                     'FTP_PROXY', 'ftp_proxy',
837*4882a593Smuzhiyun                     'FTPS_PROXY', 'ftps_proxy',
838*4882a593Smuzhiyun                     'NO_PROXY', 'no_proxy',
839*4882a593Smuzhiyun                     'ALL_PROXY', 'all_proxy',
840*4882a593Smuzhiyun                     'GIT_PROXY_COMMAND',
841*4882a593Smuzhiyun                     'GIT_SSH',
842*4882a593Smuzhiyun                     'GIT_SSH_COMMAND',
843*4882a593Smuzhiyun                     'GIT_SSL_CAINFO',
844*4882a593Smuzhiyun                     'GIT_SMART_HTTP',
845*4882a593Smuzhiyun                     'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
846*4882a593Smuzhiyun                     'SOCKS5_USER', 'SOCKS5_PASSWD',
847*4882a593Smuzhiyun                     'DBUS_SESSION_BUS_ADDRESS',
848*4882a593Smuzhiyun                     'P4CONFIG',
849*4882a593Smuzhiyun                     'SSL_CERT_FILE',
850*4882a593Smuzhiyun                     'AWS_PROFILE',
851*4882a593Smuzhiyun                     'AWS_ACCESS_KEY_ID',
852*4882a593Smuzhiyun                     'AWS_SECRET_ACCESS_KEY',
853*4882a593Smuzhiyun                     'AWS_DEFAULT_REGION']
854*4882a593Smuzhiyun
855*4882a593Smuzhiyundef get_fetcher_environment(d):
856*4882a593Smuzhiyun    newenv = {}
857*4882a593Smuzhiyun    origenv = d.getVar("BB_ORIGENV")
858*4882a593Smuzhiyun    for name in bb.fetch2.FETCH_EXPORT_VARS:
859*4882a593Smuzhiyun        value = d.getVar(name)
860*4882a593Smuzhiyun        if not value and origenv:
861*4882a593Smuzhiyun            value = origenv.getVar(name)
862*4882a593Smuzhiyun        if value:
863*4882a593Smuzhiyun            newenv[name] = value
864*4882a593Smuzhiyun    return newenv
865*4882a593Smuzhiyun
866*4882a593Smuzhiyundef runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
867*4882a593Smuzhiyun    """
868*4882a593Smuzhiyun    Run cmd returning the command output
869*4882a593Smuzhiyun    Raise an error if interrupted or cmd fails
870*4882a593Smuzhiyun    Optionally echo command output to stdout
871*4882a593Smuzhiyun    Optionally remove the files/directories listed in cleanup upon failure
872*4882a593Smuzhiyun    """
873*4882a593Smuzhiyun
874*4882a593Smuzhiyun    exportvars = FETCH_EXPORT_VARS
875*4882a593Smuzhiyun
876*4882a593Smuzhiyun    if not cleanup:
877*4882a593Smuzhiyun        cleanup = []
878*4882a593Smuzhiyun
879*4882a593Smuzhiyun    # If PATH contains WORKDIR which contains PV-PR which contains SRCPV we
880*4882a593Smuzhiyun    # can end up in circular recursion here so give the option of breaking it
881*4882a593Smuzhiyun    # in a data store copy.
882*4882a593Smuzhiyun    try:
883*4882a593Smuzhiyun        d.getVar("PV")
884*4882a593Smuzhiyun        d.getVar("PR")
885*4882a593Smuzhiyun    except bb.data_smart.ExpansionError:
886*4882a593Smuzhiyun        d = bb.data.createCopy(d)
887*4882a593Smuzhiyun        d.setVar("PV", "fetcheravoidrecurse")
888*4882a593Smuzhiyun        d.setVar("PR", "fetcheravoidrecurse")
889*4882a593Smuzhiyun
890*4882a593Smuzhiyun    origenv = d.getVar("BB_ORIGENV", False)
891*4882a593Smuzhiyun    for var in exportvars:
892*4882a593Smuzhiyun        val = d.getVar(var) or (origenv and origenv.getVar(var))
893*4882a593Smuzhiyun        if val:
894*4882a593Smuzhiyun            cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
895*4882a593Smuzhiyun
896*4882a593Smuzhiyun    # Disable pseudo as it may affect ssh, potentially causing it to hang.
897*4882a593Smuzhiyun    cmd = 'export PSEUDO_DISABLED=1; ' + cmd
898*4882a593Smuzhiyun
899*4882a593Smuzhiyun    if workdir:
900*4882a593Smuzhiyun        logger.debug("Running '%s' in %s" % (cmd, workdir))
901*4882a593Smuzhiyun    else:
902*4882a593Smuzhiyun        logger.debug("Running %s", cmd)
903*4882a593Smuzhiyun
904*4882a593Smuzhiyun    success = False
905*4882a593Smuzhiyun    error_message = ""
906*4882a593Smuzhiyun
907*4882a593Smuzhiyun    try:
908*4882a593Smuzhiyun        (output, errors) = bb.process.run(cmd, log=log, shell=True, stderr=subprocess.PIPE, cwd=workdir)
909*4882a593Smuzhiyun        success = True
910*4882a593Smuzhiyun    except bb.process.NotFoundError as e:
911*4882a593Smuzhiyun        error_message = "Fetch command %s not found" % (e.command)
912*4882a593Smuzhiyun    except bb.process.ExecutionError as e:
913*4882a593Smuzhiyun        if e.stdout:
914*4882a593Smuzhiyun            output = "output:\n%s\n%s" % (e.stdout, e.stderr)
915*4882a593Smuzhiyun        elif e.stderr:
916*4882a593Smuzhiyun            output = "output:\n%s" % e.stderr
917*4882a593Smuzhiyun        else:
918*4882a593Smuzhiyun            output = "no output"
919*4882a593Smuzhiyun        error_message = "Fetch command %s failed with exit code %s, %s" % (e.command, e.exitcode, output)
920*4882a593Smuzhiyun    except bb.process.CmdError as e:
921*4882a593Smuzhiyun        error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg)
922*4882a593Smuzhiyun    if not success:
923*4882a593Smuzhiyun        for f in cleanup:
924*4882a593Smuzhiyun            try:
925*4882a593Smuzhiyun                bb.utils.remove(f, True)
926*4882a593Smuzhiyun            except OSError:
927*4882a593Smuzhiyun                pass
928*4882a593Smuzhiyun
929*4882a593Smuzhiyun        raise FetchError(error_message)
930*4882a593Smuzhiyun
931*4882a593Smuzhiyun    return output
932*4882a593Smuzhiyun
933*4882a593Smuzhiyundef check_network_access(d, info, url):
934*4882a593Smuzhiyun    """
935*4882a593Smuzhiyun    log remote network access, and error if BB_NO_NETWORK is set or the given
936*4882a593Smuzhiyun    URI is untrusted
937*4882a593Smuzhiyun    """
938*4882a593Smuzhiyun    if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")):
939*4882a593Smuzhiyun        raise NetworkAccess(url, info)
940*4882a593Smuzhiyun    elif not trusted_network(d, url):
941*4882a593Smuzhiyun        raise UntrustedUrl(url, info)
942*4882a593Smuzhiyun    else:
943*4882a593Smuzhiyun        logger.debug("Fetcher accessed the network with the command %s" % info)
944*4882a593Smuzhiyun
945*4882a593Smuzhiyundef build_mirroruris(origud, mirrors, ld):
946*4882a593Smuzhiyun    uris = []
947*4882a593Smuzhiyun    uds = []
948*4882a593Smuzhiyun
949*4882a593Smuzhiyun    replacements = {}
950*4882a593Smuzhiyun    replacements["TYPE"] = origud.type
951*4882a593Smuzhiyun    replacements["HOST"] = origud.host
952*4882a593Smuzhiyun    replacements["PATH"] = origud.path
953*4882a593Smuzhiyun    replacements["BASENAME"] = origud.path.split("/")[-1]
954*4882a593Smuzhiyun    replacements["MIRRORNAME"] = origud.host.replace(':','.') + origud.path.replace('/', '.').replace('*', '.')
955*4882a593Smuzhiyun
956*4882a593Smuzhiyun    def adduri(ud, uris, uds, mirrors, tarballs):
957*4882a593Smuzhiyun        for line in mirrors:
958*4882a593Smuzhiyun            try:
959*4882a593Smuzhiyun                (find, replace) = line
960*4882a593Smuzhiyun            except ValueError:
961*4882a593Smuzhiyun                continue
962*4882a593Smuzhiyun
963*4882a593Smuzhiyun            for tarball in tarballs:
964*4882a593Smuzhiyun                newuri = uri_replace(ud, find, replace, replacements, ld, tarball)
965*4882a593Smuzhiyun                if not newuri or newuri in uris or newuri == origud.url:
966*4882a593Smuzhiyun                    continue
967*4882a593Smuzhiyun
968*4882a593Smuzhiyun                if not trusted_network(ld, newuri):
969*4882a593Smuzhiyun                    logger.debug("Mirror %s not in the list of trusted networks, skipping" %  (newuri))
970*4882a593Smuzhiyun                    continue
971*4882a593Smuzhiyun
972*4882a593Smuzhiyun                # Create a local copy of the mirrors minus the current line
973*4882a593Smuzhiyun                # this will prevent us from recursively processing the same line
974*4882a593Smuzhiyun                # as well as indirect recursion A -> B -> C -> A
975*4882a593Smuzhiyun                localmirrors = list(mirrors)
976*4882a593Smuzhiyun                localmirrors.remove(line)
977*4882a593Smuzhiyun
978*4882a593Smuzhiyun                try:
979*4882a593Smuzhiyun                    newud = FetchData(newuri, ld)
980*4882a593Smuzhiyun                    newud.setup_localpath(ld)
981*4882a593Smuzhiyun                except bb.fetch2.BBFetchException as e:
982*4882a593Smuzhiyun                    logger.debug("Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
983*4882a593Smuzhiyun                    logger.debug(str(e))
984*4882a593Smuzhiyun                    try:
985*4882a593Smuzhiyun                        # setup_localpath of file:// urls may fail, we should still see
986*4882a593Smuzhiyun                        # if mirrors of the url exist
987*4882a593Smuzhiyun                        adduri(newud, uris, uds, localmirrors, tarballs)
988*4882a593Smuzhiyun                    except UnboundLocalError:
989*4882a593Smuzhiyun                        pass
990*4882a593Smuzhiyun                    continue
991*4882a593Smuzhiyun                uris.append(newuri)
992*4882a593Smuzhiyun                uds.append(newud)
993*4882a593Smuzhiyun
994*4882a593Smuzhiyun                adduri(newud, uris, uds, localmirrors, tarballs)
995*4882a593Smuzhiyun
996*4882a593Smuzhiyun    adduri(origud, uris, uds, mirrors, origud.mirrortarballs or [None])
997*4882a593Smuzhiyun
998*4882a593Smuzhiyun    return uris, uds
999*4882a593Smuzhiyun
1000*4882a593Smuzhiyundef rename_bad_checksum(ud, suffix):
1001*4882a593Smuzhiyun    """
1002*4882a593Smuzhiyun    Renames files to have suffix from parameter
1003*4882a593Smuzhiyun    """
1004*4882a593Smuzhiyun
1005*4882a593Smuzhiyun    if ud.localpath is None:
1006*4882a593Smuzhiyun        return
1007*4882a593Smuzhiyun
1008*4882a593Smuzhiyun    new_localpath = "%s_bad-checksum_%s" % (ud.localpath, suffix)
1009*4882a593Smuzhiyun    bb.warn("Renaming %s to %s" % (ud.localpath, new_localpath))
1010*4882a593Smuzhiyun    if not bb.utils.movefile(ud.localpath, new_localpath):
1011*4882a593Smuzhiyun        bb.warn("Renaming %s to %s failed, grep movefile in log.do_fetch to see why" % (ud.localpath, new_localpath))
1012*4882a593Smuzhiyun
1013*4882a593Smuzhiyun
1014*4882a593Smuzhiyundef try_mirror_url(fetch, origud, ud, ld, check = False):
1015*4882a593Smuzhiyun    # Return of None or a value means we're finished
1016*4882a593Smuzhiyun    # False means try another url
1017*4882a593Smuzhiyun
1018*4882a593Smuzhiyun    if ud.lockfile and ud.lockfile != origud.lockfile:
1019*4882a593Smuzhiyun        lf = bb.utils.lockfile(ud.lockfile)
1020*4882a593Smuzhiyun
1021*4882a593Smuzhiyun    try:
1022*4882a593Smuzhiyun        if check:
1023*4882a593Smuzhiyun            found = ud.method.checkstatus(fetch, ud, ld)
1024*4882a593Smuzhiyun            if found:
1025*4882a593Smuzhiyun                return found
1026*4882a593Smuzhiyun            return False
1027*4882a593Smuzhiyun
1028*4882a593Smuzhiyun        if not verify_donestamp(ud, ld, origud) or ud.method.need_update(ud, ld):
1029*4882a593Smuzhiyun            ud.method.download(ud, ld)
1030*4882a593Smuzhiyun            if hasattr(ud.method,"build_mirror_data"):
1031*4882a593Smuzhiyun                ud.method.build_mirror_data(ud, ld)
1032*4882a593Smuzhiyun
1033*4882a593Smuzhiyun        if not ud.localpath or not os.path.exists(ud.localpath):
1034*4882a593Smuzhiyun            return False
1035*4882a593Smuzhiyun
1036*4882a593Smuzhiyun        if ud.localpath == origud.localpath:
1037*4882a593Smuzhiyun            return ud.localpath
1038*4882a593Smuzhiyun
1039*4882a593Smuzhiyun        # We may be obtaining a mirror tarball which needs further processing by the real fetcher
1040*4882a593Smuzhiyun        # If that tarball is a local file:// we need to provide a symlink to it
1041*4882a593Smuzhiyun        dldir = ld.getVar("DL_DIR")
1042*4882a593Smuzhiyun
1043*4882a593Smuzhiyun        if origud.mirrortarballs and os.path.basename(ud.localpath) in origud.mirrortarballs and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
1044*4882a593Smuzhiyun            # Create donestamp in old format to avoid triggering a re-download
1045*4882a593Smuzhiyun            if ud.donestamp:
1046*4882a593Smuzhiyun                bb.utils.mkdirhier(os.path.dirname(ud.donestamp))
1047*4882a593Smuzhiyun                open(ud.donestamp, 'w').close()
1048*4882a593Smuzhiyun            dest = os.path.join(dldir, os.path.basename(ud.localpath))
1049*4882a593Smuzhiyun            if not os.path.exists(dest):
1050*4882a593Smuzhiyun                # In case this is executing without any file locks held (as is
1051*4882a593Smuzhiyun                # the case for file:// URLs), two tasks may end up here at the
1052*4882a593Smuzhiyun                # same time, in which case we do not want the second task to
1053*4882a593Smuzhiyun                # fail when the link has already been created by the first task.
1054*4882a593Smuzhiyun                try:
1055*4882a593Smuzhiyun                    os.symlink(ud.localpath, dest)
1056*4882a593Smuzhiyun                except FileExistsError:
1057*4882a593Smuzhiyun                    pass
1058*4882a593Smuzhiyun            if not verify_donestamp(origud, ld) or origud.method.need_update(origud, ld):
1059*4882a593Smuzhiyun                origud.method.download(origud, ld)
1060*4882a593Smuzhiyun                if hasattr(origud.method, "build_mirror_data"):
1061*4882a593Smuzhiyun                    origud.method.build_mirror_data(origud, ld)
1062*4882a593Smuzhiyun            return origud.localpath
1063*4882a593Smuzhiyun        # Otherwise the result is a local file:// and we symlink to it
1064*4882a593Smuzhiyun        ensure_symlink(ud.localpath, origud.localpath)
1065*4882a593Smuzhiyun        update_stamp(origud, ld)
1066*4882a593Smuzhiyun        return ud.localpath
1067*4882a593Smuzhiyun
1068*4882a593Smuzhiyun    except bb.fetch2.NetworkAccess:
1069*4882a593Smuzhiyun        raise
1070*4882a593Smuzhiyun
1071*4882a593Smuzhiyun    except IOError as e:
1072*4882a593Smuzhiyun        if e.errno in [errno.ESTALE]:
1073*4882a593Smuzhiyun            logger.warning("Stale Error Observed %s." % ud.url)
1074*4882a593Smuzhiyun            return False
1075*4882a593Smuzhiyun        raise
1076*4882a593Smuzhiyun
1077*4882a593Smuzhiyun    except bb.fetch2.BBFetchException as e:
1078*4882a593Smuzhiyun        if isinstance(e, ChecksumError):
1079*4882a593Smuzhiyun            logger.warning("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (ud.url, origud.url))
1080*4882a593Smuzhiyun            logger.warning(str(e))
1081*4882a593Smuzhiyun            if os.path.exists(ud.localpath):
1082*4882a593Smuzhiyun                rename_bad_checksum(ud, e.checksum)
1083*4882a593Smuzhiyun        elif isinstance(e, NoChecksumError):
1084*4882a593Smuzhiyun            raise
1085*4882a593Smuzhiyun        else:
1086*4882a593Smuzhiyun            logger.debug("Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url))
1087*4882a593Smuzhiyun            logger.debug(str(e))
1088*4882a593Smuzhiyun        try:
1089*4882a593Smuzhiyun            ud.method.clean(ud, ld)
1090*4882a593Smuzhiyun        except UnboundLocalError:
1091*4882a593Smuzhiyun            pass
1092*4882a593Smuzhiyun        return False
1093*4882a593Smuzhiyun    finally:
1094*4882a593Smuzhiyun        if ud.lockfile and ud.lockfile != origud.lockfile:
1095*4882a593Smuzhiyun            bb.utils.unlockfile(lf)
1096*4882a593Smuzhiyun
1097*4882a593Smuzhiyun
1098*4882a593Smuzhiyundef ensure_symlink(target, link_name):
1099*4882a593Smuzhiyun    if not os.path.exists(link_name):
1100*4882a593Smuzhiyun        dirname = os.path.dirname(link_name)
1101*4882a593Smuzhiyun        bb.utils.mkdirhier(dirname)
1102*4882a593Smuzhiyun        if os.path.islink(link_name):
1103*4882a593Smuzhiyun            # Broken symbolic link
1104*4882a593Smuzhiyun            os.unlink(link_name)
1105*4882a593Smuzhiyun
1106*4882a593Smuzhiyun        # In case this is executing without any file locks held (as is
1107*4882a593Smuzhiyun        # the case for file:// URLs), two tasks may end up here at the
1108*4882a593Smuzhiyun        # same time, in which case we do not want the second task to
1109*4882a593Smuzhiyun        # fail when the link has already been created by the first task.
1110*4882a593Smuzhiyun        try:
1111*4882a593Smuzhiyun            os.symlink(target, link_name)
1112*4882a593Smuzhiyun        except FileExistsError:
1113*4882a593Smuzhiyun            pass
1114*4882a593Smuzhiyun
1115*4882a593Smuzhiyun
1116*4882a593Smuzhiyundef try_mirrors(fetch, d, origud, mirrors, check = False):
1117*4882a593Smuzhiyun    """
1118*4882a593Smuzhiyun    Try to use a mirrored version of the sources.
1119*4882a593Smuzhiyun    This method will be automatically called before the fetchers go.
1120*4882a593Smuzhiyun
1121*4882a593Smuzhiyun    d Is a bb.data instance
1122*4882a593Smuzhiyun    uri is the original uri we're trying to download
1123*4882a593Smuzhiyun    mirrors is the list of mirrors we're going to try
1124*4882a593Smuzhiyun    """
1125*4882a593Smuzhiyun    ld = d.createCopy()
1126*4882a593Smuzhiyun
1127*4882a593Smuzhiyun    uris, uds = build_mirroruris(origud, mirrors, ld)
1128*4882a593Smuzhiyun
1129*4882a593Smuzhiyun    for index, uri in enumerate(uris):
1130*4882a593Smuzhiyun        ret = try_mirror_url(fetch, origud, uds[index], ld, check)
1131*4882a593Smuzhiyun        if ret:
1132*4882a593Smuzhiyun            return ret
1133*4882a593Smuzhiyun    return None
1134*4882a593Smuzhiyun
1135*4882a593Smuzhiyundef trusted_network(d, url):
1136*4882a593Smuzhiyun    """
1137*4882a593Smuzhiyun    Use a trusted url during download if networking is enabled and
1138*4882a593Smuzhiyun    BB_ALLOWED_NETWORKS is set globally or for a specific recipe.
1139*4882a593Smuzhiyun    Note: modifies SRC_URI & mirrors.
1140*4882a593Smuzhiyun    """
1141*4882a593Smuzhiyun    if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")):
1142*4882a593Smuzhiyun        return True
1143*4882a593Smuzhiyun
1144*4882a593Smuzhiyun    pkgname = d.expand(d.getVar('PN', False))
1145*4882a593Smuzhiyun    trusted_hosts = None
1146*4882a593Smuzhiyun    if pkgname:
1147*4882a593Smuzhiyun        trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False)
1148*4882a593Smuzhiyun
1149*4882a593Smuzhiyun    if not trusted_hosts:
1150*4882a593Smuzhiyun        trusted_hosts = d.getVar('BB_ALLOWED_NETWORKS')
1151*4882a593Smuzhiyun
1152*4882a593Smuzhiyun    # Not enabled.
1153*4882a593Smuzhiyun    if not trusted_hosts:
1154*4882a593Smuzhiyun        return True
1155*4882a593Smuzhiyun
1156*4882a593Smuzhiyun    scheme, network, path, user, passwd, param = decodeurl(url)
1157*4882a593Smuzhiyun
1158*4882a593Smuzhiyun    if not network:
1159*4882a593Smuzhiyun        return True
1160*4882a593Smuzhiyun
1161*4882a593Smuzhiyun    network = network.split(':')[0]
1162*4882a593Smuzhiyun    network = network.lower()
1163*4882a593Smuzhiyun
1164*4882a593Smuzhiyun    for host in trusted_hosts.split(" "):
1165*4882a593Smuzhiyun        host = host.lower()
1166*4882a593Smuzhiyun        if host.startswith("*.") and ("." + network).endswith(host[1:]):
1167*4882a593Smuzhiyun            return True
1168*4882a593Smuzhiyun        if host == network:
1169*4882a593Smuzhiyun            return True
1170*4882a593Smuzhiyun
1171*4882a593Smuzhiyun    return False
1172*4882a593Smuzhiyun
1173*4882a593Smuzhiyundef srcrev_internal_helper(ud, d, name):
1174*4882a593Smuzhiyun    """
1175*4882a593Smuzhiyun    Return:
1176*4882a593Smuzhiyun        a) a source revision if specified
1177*4882a593Smuzhiyun        b) latest revision if SRCREV="AUTOINC"
1178*4882a593Smuzhiyun        c) None if not specified
1179*4882a593Smuzhiyun    """
1180*4882a593Smuzhiyun
1181*4882a593Smuzhiyun    srcrev = None
1182*4882a593Smuzhiyun    pn = d.getVar("PN")
1183*4882a593Smuzhiyun    attempts = []
1184*4882a593Smuzhiyun    if name != '' and pn:
1185*4882a593Smuzhiyun        attempts.append("SRCREV_%s:pn-%s" % (name, pn))
1186*4882a593Smuzhiyun    if name != '':
1187*4882a593Smuzhiyun        attempts.append("SRCREV_%s" % name)
1188*4882a593Smuzhiyun    if pn:
1189*4882a593Smuzhiyun        attempts.append("SRCREV:pn-%s" % pn)
1190*4882a593Smuzhiyun    attempts.append("SRCREV")
1191*4882a593Smuzhiyun
1192*4882a593Smuzhiyun    for a in attempts:
1193*4882a593Smuzhiyun        srcrev = d.getVar(a)
1194*4882a593Smuzhiyun        if srcrev and srcrev != "INVALID":
1195*4882a593Smuzhiyun            break
1196*4882a593Smuzhiyun
1197*4882a593Smuzhiyun    if 'rev' in ud.parm and 'tag' in ud.parm:
1198*4882a593Smuzhiyun        raise FetchError("Please specify a ;rev= parameter or a ;tag= parameter in the url %s but not both." % (ud.url))
1199*4882a593Smuzhiyun
1200*4882a593Smuzhiyun    if 'rev' in ud.parm or 'tag' in ud.parm:
1201*4882a593Smuzhiyun        if 'rev' in ud.parm:
1202*4882a593Smuzhiyun            parmrev = ud.parm['rev']
1203*4882a593Smuzhiyun        else:
1204*4882a593Smuzhiyun            parmrev = ud.parm['tag']
1205*4882a593Smuzhiyun        if srcrev == "INVALID" or not srcrev:
1206*4882a593Smuzhiyun            return parmrev
1207*4882a593Smuzhiyun        if srcrev != parmrev:
1208*4882a593Smuzhiyun            raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please specify one valid value" % (srcrev, parmrev))
1209*4882a593Smuzhiyun        return parmrev
1210*4882a593Smuzhiyun
1211*4882a593Smuzhiyun    if srcrev == "INVALID" or not srcrev:
1212*4882a593Smuzhiyun        raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url)
1213*4882a593Smuzhiyun    if srcrev == "AUTOINC":
1214*4882a593Smuzhiyun        srcrev = ud.method.latest_revision(ud, d, name)
1215*4882a593Smuzhiyun
1216*4882a593Smuzhiyun    return srcrev
1217*4882a593Smuzhiyun
1218*4882a593Smuzhiyundef get_checksum_file_list(d):
1219*4882a593Smuzhiyun    """ Get a list of files checksum in SRC_URI
1220*4882a593Smuzhiyun
1221*4882a593Smuzhiyun    Returns the resolved local paths of all local file entries in
1222*4882a593Smuzhiyun    SRC_URI as a space-separated string
1223*4882a593Smuzhiyun    """
1224*4882a593Smuzhiyun    fetch = Fetch([], d, cache = False, localonly = True)
1225*4882a593Smuzhiyun
1226*4882a593Smuzhiyun    dl_dir = d.getVar('DL_DIR')
1227*4882a593Smuzhiyun    filelist = []
1228*4882a593Smuzhiyun    for u in fetch.urls:
1229*4882a593Smuzhiyun        ud = fetch.ud[u]
1230*4882a593Smuzhiyun
1231*4882a593Smuzhiyun        if ud and isinstance(ud.method, local.Local):
1232*4882a593Smuzhiyun            paths = ud.method.localpaths(ud, d)
1233*4882a593Smuzhiyun            for f in paths:
1234*4882a593Smuzhiyun                pth = ud.decodedurl
1235*4882a593Smuzhiyun                if f.startswith(dl_dir):
1236*4882a593Smuzhiyun                    # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
1237*4882a593Smuzhiyun                    if os.path.exists(f):
1238*4882a593Smuzhiyun                        bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN'), os.path.basename(f)))
1239*4882a593Smuzhiyun                    else:
1240*4882a593Smuzhiyun                        bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN'), os.path.basename(f)))
1241*4882a593Smuzhiyun                filelist.append(f + ":" + str(os.path.exists(f)))
1242*4882a593Smuzhiyun
1243*4882a593Smuzhiyun    return " ".join(filelist)
1244*4882a593Smuzhiyun
1245*4882a593Smuzhiyundef get_file_checksums(filelist, pn, localdirsexclude):
1246*4882a593Smuzhiyun    """Get a list of the checksums for a list of local files
1247*4882a593Smuzhiyun
1248*4882a593Smuzhiyun    Returns the checksums for a list of local files, caching the results as
1249*4882a593Smuzhiyun    it proceeds
1250*4882a593Smuzhiyun
1251*4882a593Smuzhiyun    """
1252*4882a593Smuzhiyun    return _checksum_cache.get_checksums(filelist, pn, localdirsexclude)
1253*4882a593Smuzhiyun
1254*4882a593Smuzhiyun
1255*4882a593Smuzhiyunclass FetchData(object):
1256*4882a593Smuzhiyun    """
1257*4882a593Smuzhiyun    A class which represents the fetcher state for a given URI.
1258*4882a593Smuzhiyun    """
1259*4882a593Smuzhiyun    def __init__(self, url, d, localonly = False):
1260*4882a593Smuzhiyun        # localpath is the location of a downloaded result. If not set, the file is local.
1261*4882a593Smuzhiyun        self.donestamp = None
1262*4882a593Smuzhiyun        self.needdonestamp = True
1263*4882a593Smuzhiyun        self.localfile = ""
1264*4882a593Smuzhiyun        self.localpath = None
1265*4882a593Smuzhiyun        self.lockfile = None
1266*4882a593Smuzhiyun        self.mirrortarballs = []
1267*4882a593Smuzhiyun        self.basename = None
1268*4882a593Smuzhiyun        self.basepath = None
1269*4882a593Smuzhiyun        (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(d.expand(url))
1270*4882a593Smuzhiyun        self.date = self.getSRCDate(d)
1271*4882a593Smuzhiyun        self.url = url
1272*4882a593Smuzhiyun        if not self.user and "user" in self.parm:
1273*4882a593Smuzhiyun            self.user = self.parm["user"]
1274*4882a593Smuzhiyun        if not self.pswd and "pswd" in self.parm:
1275*4882a593Smuzhiyun            self.pswd = self.parm["pswd"]
1276*4882a593Smuzhiyun        self.setup = False
1277*4882a593Smuzhiyun
1278*4882a593Smuzhiyun        def configure_checksum(checksum_id):
1279*4882a593Smuzhiyun            if "name" in self.parm:
1280*4882a593Smuzhiyun                checksum_name = "%s.%ssum" % (self.parm["name"], checksum_id)
1281*4882a593Smuzhiyun            else:
1282*4882a593Smuzhiyun                checksum_name = "%ssum" % checksum_id
1283*4882a593Smuzhiyun
1284*4882a593Smuzhiyun            setattr(self, "%s_name" % checksum_id, checksum_name)
1285*4882a593Smuzhiyun
1286*4882a593Smuzhiyun            if checksum_name in self.parm:
1287*4882a593Smuzhiyun                checksum_expected = self.parm[checksum_name]
1288*4882a593Smuzhiyun            elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3", "az"]:
1289*4882a593Smuzhiyun                checksum_expected = None
1290*4882a593Smuzhiyun            else:
1291*4882a593Smuzhiyun                checksum_expected = d.getVarFlag("SRC_URI", checksum_name)
1292*4882a593Smuzhiyun
1293*4882a593Smuzhiyun            setattr(self, "%s_expected" % checksum_id, checksum_expected)
1294*4882a593Smuzhiyun
1295*4882a593Smuzhiyun        for checksum_id in CHECKSUM_LIST:
1296*4882a593Smuzhiyun            configure_checksum(checksum_id)
1297*4882a593Smuzhiyun
1298*4882a593Smuzhiyun        self.ignore_checksums = False
1299*4882a593Smuzhiyun
1300*4882a593Smuzhiyun        self.names = self.parm.get("name",'default').split(',')
1301*4882a593Smuzhiyun
1302*4882a593Smuzhiyun        self.method = None
1303*4882a593Smuzhiyun        for m in methods:
1304*4882a593Smuzhiyun            if m.supports(self, d):
1305*4882a593Smuzhiyun                self.method = m
1306*4882a593Smuzhiyun                break
1307*4882a593Smuzhiyun
1308*4882a593Smuzhiyun        if not self.method:
1309*4882a593Smuzhiyun            raise NoMethodError(url)
1310*4882a593Smuzhiyun
1311*4882a593Smuzhiyun        if localonly and not isinstance(self.method, local.Local):
1312*4882a593Smuzhiyun            raise NonLocalMethod()
1313*4882a593Smuzhiyun
1314*4882a593Smuzhiyun        if self.parm.get("proto", None) and "protocol" not in self.parm:
1315*4882a593Smuzhiyun            logger.warning('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN'))
1316*4882a593Smuzhiyun            self.parm["protocol"] = self.parm.get("proto", None)
1317*4882a593Smuzhiyun
1318*4882a593Smuzhiyun        if hasattr(self.method, "urldata_init"):
1319*4882a593Smuzhiyun            self.method.urldata_init(self, d)
1320*4882a593Smuzhiyun
1321*4882a593Smuzhiyun        if "localpath" in self.parm:
1322*4882a593Smuzhiyun            # if user sets localpath for file, use it instead.
1323*4882a593Smuzhiyun            self.localpath = self.parm["localpath"]
1324*4882a593Smuzhiyun            self.basename = os.path.basename(self.localpath)
1325*4882a593Smuzhiyun        elif self.localfile:
1326*4882a593Smuzhiyun            self.localpath = self.method.localpath(self, d)
1327*4882a593Smuzhiyun
1328*4882a593Smuzhiyun        dldir = d.getVar("DL_DIR")
1329*4882a593Smuzhiyun
1330*4882a593Smuzhiyun        if not self.needdonestamp:
1331*4882a593Smuzhiyun            return
1332*4882a593Smuzhiyun
1333*4882a593Smuzhiyun        # Note: .done and .lock files should always be in DL_DIR whereas localpath may not be.
1334*4882a593Smuzhiyun        if self.localpath and self.localpath.startswith(dldir):
1335*4882a593Smuzhiyun            basepath = self.localpath
1336*4882a593Smuzhiyun        elif self.localpath:
1337*4882a593Smuzhiyun            basepath = dldir + os.sep + os.path.basename(self.localpath)
1338*4882a593Smuzhiyun        elif self.basepath or self.basename:
1339*4882a593Smuzhiyun            basepath = dldir + os.sep + (self.basepath or self.basename)
1340*4882a593Smuzhiyun        else:
1341*4882a593Smuzhiyun            bb.fatal("Can't determine lock path for url %s" % url)
1342*4882a593Smuzhiyun
1343*4882a593Smuzhiyun        self.donestamp = basepath + '.done'
1344*4882a593Smuzhiyun        self.lockfile = basepath + '.lock'
1345*4882a593Smuzhiyun
1346*4882a593Smuzhiyun    def setup_revisions(self, d):
1347*4882a593Smuzhiyun        self.revisions = {}
1348*4882a593Smuzhiyun        for name in self.names:
1349*4882a593Smuzhiyun            self.revisions[name] = srcrev_internal_helper(self, d, name)
1350*4882a593Smuzhiyun
1351*4882a593Smuzhiyun        # add compatibility code for non name specified case
1352*4882a593Smuzhiyun        if len(self.names) == 1:
1353*4882a593Smuzhiyun            self.revision = self.revisions[self.names[0]]
1354*4882a593Smuzhiyun
1355*4882a593Smuzhiyun    def setup_localpath(self, d):
1356*4882a593Smuzhiyun        if not self.localpath:
1357*4882a593Smuzhiyun            self.localpath = self.method.localpath(self, d)
1358*4882a593Smuzhiyun
1359*4882a593Smuzhiyun    def getSRCDate(self, d):
1360*4882a593Smuzhiyun        """
1361*4882a593Smuzhiyun        Return the SRC Date for the component
1362*4882a593Smuzhiyun
1363*4882a593Smuzhiyun        d the bb.data module
1364*4882a593Smuzhiyun        """
1365*4882a593Smuzhiyun        if "srcdate" in self.parm:
1366*4882a593Smuzhiyun            return self.parm['srcdate']
1367*4882a593Smuzhiyun
1368*4882a593Smuzhiyun        pn = d.getVar("PN")
1369*4882a593Smuzhiyun
1370*4882a593Smuzhiyun        if pn:
1371*4882a593Smuzhiyun            return d.getVar("SRCDATE_%s" % pn) or d.getVar("SRCDATE") or d.getVar("DATE")
1372*4882a593Smuzhiyun
1373*4882a593Smuzhiyun        return d.getVar("SRCDATE") or d.getVar("DATE")
1374*4882a593Smuzhiyun
1375*4882a593Smuzhiyunclass FetchMethod(object):
1376*4882a593Smuzhiyun    """Base class for 'fetch'ing data"""
1377*4882a593Smuzhiyun
1378*4882a593Smuzhiyun    def __init__(self, urls=None):
1379*4882a593Smuzhiyun        self.urls = []
1380*4882a593Smuzhiyun
1381*4882a593Smuzhiyun    def supports(self, urldata, d):
1382*4882a593Smuzhiyun        """
1383*4882a593Smuzhiyun        Check to see if this fetch class supports a given url.
1384*4882a593Smuzhiyun        """
1385*4882a593Smuzhiyun        return 0
1386*4882a593Smuzhiyun
1387*4882a593Smuzhiyun    def localpath(self, urldata, d):
1388*4882a593Smuzhiyun        """
1389*4882a593Smuzhiyun        Return the local filename of a given url assuming a successful fetch.
1390*4882a593Smuzhiyun        Can also setup variables in urldata for use in go (saving code duplication
1391*4882a593Smuzhiyun        and duplicate code execution)
1392*4882a593Smuzhiyun        """
1393*4882a593Smuzhiyun        return os.path.join(d.getVar("DL_DIR"), urldata.localfile)
1394*4882a593Smuzhiyun
1395*4882a593Smuzhiyun    def supports_checksum(self, urldata):
1396*4882a593Smuzhiyun        """
1397*4882a593Smuzhiyun        Is localpath something that can be represented by a checksum?
1398*4882a593Smuzhiyun        """
1399*4882a593Smuzhiyun
1400*4882a593Smuzhiyun        # We cannot compute checksums for directories
1401*4882a593Smuzhiyun        if os.path.isdir(urldata.localpath):
1402*4882a593Smuzhiyun            return False
1403*4882a593Smuzhiyun        return True
1404*4882a593Smuzhiyun
1405*4882a593Smuzhiyun    def recommends_checksum(self, urldata):
1406*4882a593Smuzhiyun        """
1407*4882a593Smuzhiyun        Is the backend on where checksumming is recommended (should warnings
1408*4882a593Smuzhiyun        be displayed if there is no checksum)?
1409*4882a593Smuzhiyun        """
1410*4882a593Smuzhiyun        return False
1411*4882a593Smuzhiyun
1412*4882a593Smuzhiyun    def verify_donestamp(self, ud, d):
1413*4882a593Smuzhiyun        """
1414*4882a593Smuzhiyun        Verify the donestamp file
1415*4882a593Smuzhiyun        """
1416*4882a593Smuzhiyun        return verify_donestamp(ud, d)
1417*4882a593Smuzhiyun
1418*4882a593Smuzhiyun    def update_donestamp(self, ud, d):
1419*4882a593Smuzhiyun        """
1420*4882a593Smuzhiyun        Update the donestamp file
1421*4882a593Smuzhiyun        """
1422*4882a593Smuzhiyun        update_stamp(ud, d)
1423*4882a593Smuzhiyun
1424*4882a593Smuzhiyun    def _strip_leading_slashes(self, relpath):
1425*4882a593Smuzhiyun        """
1426*4882a593Smuzhiyun        Remove leading slash as os.path.join can't cope
1427*4882a593Smuzhiyun        """
1428*4882a593Smuzhiyun        while os.path.isabs(relpath):
1429*4882a593Smuzhiyun            relpath = relpath[1:]
1430*4882a593Smuzhiyun        return relpath
1431*4882a593Smuzhiyun
1432*4882a593Smuzhiyun    def setUrls(self, urls):
1433*4882a593Smuzhiyun        self.__urls = urls
1434*4882a593Smuzhiyun
1435*4882a593Smuzhiyun    def getUrls(self):
1436*4882a593Smuzhiyun        return self.__urls
1437*4882a593Smuzhiyun
1438*4882a593Smuzhiyun    urls = property(getUrls, setUrls, None, "Urls property")
1439*4882a593Smuzhiyun
1440*4882a593Smuzhiyun    def need_update(self, ud, d):
1441*4882a593Smuzhiyun        """
1442*4882a593Smuzhiyun        Force a fetch, even if localpath exists?
1443*4882a593Smuzhiyun        """
1444*4882a593Smuzhiyun        if os.path.exists(ud.localpath):
1445*4882a593Smuzhiyun            return False
1446*4882a593Smuzhiyun        return True
1447*4882a593Smuzhiyun
1448*4882a593Smuzhiyun    def supports_srcrev(self):
1449*4882a593Smuzhiyun        """
1450*4882a593Smuzhiyun        The fetcher supports auto source revisions (SRCREV)
1451*4882a593Smuzhiyun        """
1452*4882a593Smuzhiyun        return False
1453*4882a593Smuzhiyun
1454*4882a593Smuzhiyun    def download(self, urldata, d):
1455*4882a593Smuzhiyun        """
1456*4882a593Smuzhiyun        Fetch urls
1457*4882a593Smuzhiyun        Assumes localpath was called first
1458*4882a593Smuzhiyun        """
1459*4882a593Smuzhiyun        raise NoMethodError(urldata.url)
1460*4882a593Smuzhiyun
1461*4882a593Smuzhiyun    def unpack(self, urldata, rootdir, data):
1462*4882a593Smuzhiyun        iterate = False
1463*4882a593Smuzhiyun        file = urldata.localpath
1464*4882a593Smuzhiyun
1465*4882a593Smuzhiyun        try:
1466*4882a593Smuzhiyun            unpack = bb.utils.to_boolean(urldata.parm.get('unpack'), True)
1467*4882a593Smuzhiyun        except ValueError as exc:
1468*4882a593Smuzhiyun            bb.fatal("Invalid value for 'unpack' parameter for %s: %s" %
1469*4882a593Smuzhiyun                     (file, urldata.parm.get('unpack')))
1470*4882a593Smuzhiyun
1471*4882a593Smuzhiyun        base, ext = os.path.splitext(file)
1472*4882a593Smuzhiyun        if ext in ['.gz', '.bz2', '.Z', '.xz', '.lz']:
1473*4882a593Smuzhiyun            efile = os.path.join(rootdir, os.path.basename(base))
1474*4882a593Smuzhiyun        else:
1475*4882a593Smuzhiyun            efile = file
1476*4882a593Smuzhiyun        cmd = None
1477*4882a593Smuzhiyun
1478*4882a593Smuzhiyun        if unpack:
1479*4882a593Smuzhiyun            tar_cmd = 'tar --extract --no-same-owner'
1480*4882a593Smuzhiyun            if 'striplevel' in urldata.parm:
1481*4882a593Smuzhiyun                tar_cmd += ' --strip-components=%s' %  urldata.parm['striplevel']
1482*4882a593Smuzhiyun            if file.endswith('.tar'):
1483*4882a593Smuzhiyun                cmd = '%s -f %s' % (tar_cmd, file)
1484*4882a593Smuzhiyun            elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
1485*4882a593Smuzhiyun                cmd = '%s -z -f %s' % (tar_cmd, file)
1486*4882a593Smuzhiyun            elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
1487*4882a593Smuzhiyun                cmd = 'bzip2 -dc %s | %s -f -' % (file, tar_cmd)
1488*4882a593Smuzhiyun            elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
1489*4882a593Smuzhiyun                cmd = 'gzip -dc %s > %s' % (file, efile)
1490*4882a593Smuzhiyun            elif file.endswith('.bz2'):
1491*4882a593Smuzhiyun                cmd = 'bzip2 -dc %s > %s' % (file, efile)
1492*4882a593Smuzhiyun            elif file.endswith('.txz') or file.endswith('.tar.xz'):
1493*4882a593Smuzhiyun                cmd = 'xz -dc %s | %s -f -' % (file, tar_cmd)
1494*4882a593Smuzhiyun            elif file.endswith('.xz'):
1495*4882a593Smuzhiyun                cmd = 'xz -dc %s > %s' % (file, efile)
1496*4882a593Smuzhiyun            elif file.endswith('.tar.lz'):
1497*4882a593Smuzhiyun                cmd = 'lzip -dc %s | %s -f -' % (file, tar_cmd)
1498*4882a593Smuzhiyun            elif file.endswith('.lz'):
1499*4882a593Smuzhiyun                cmd = 'lzip -dc %s > %s' % (file, efile)
1500*4882a593Smuzhiyun            elif file.endswith('.tar.7z'):
1501*4882a593Smuzhiyun                cmd = '7z x -so %s | %s -f -' % (file, tar_cmd)
1502*4882a593Smuzhiyun            elif file.endswith('.7z'):
1503*4882a593Smuzhiyun                cmd = '7za x -y %s 1>/dev/null' % file
1504*4882a593Smuzhiyun            elif file.endswith('.tzst') or file.endswith('.tar.zst'):
1505*4882a593Smuzhiyun                cmd = 'zstd --decompress --stdout %s | %s -f -' % (file, tar_cmd)
1506*4882a593Smuzhiyun            elif file.endswith('.zst'):
1507*4882a593Smuzhiyun                cmd = 'zstd --decompress --stdout %s > %s' % (file, efile)
1508*4882a593Smuzhiyun            elif file.endswith('.zip') or file.endswith('.jar'):
1509*4882a593Smuzhiyun                try:
1510*4882a593Smuzhiyun                    dos = bb.utils.to_boolean(urldata.parm.get('dos'), False)
1511*4882a593Smuzhiyun                except ValueError as exc:
1512*4882a593Smuzhiyun                    bb.fatal("Invalid value for 'dos' parameter for %s: %s" %
1513*4882a593Smuzhiyun                             (file, urldata.parm.get('dos')))
1514*4882a593Smuzhiyun                cmd = 'unzip -q -o'
1515*4882a593Smuzhiyun                if dos:
1516*4882a593Smuzhiyun                    cmd = '%s -a' % cmd
1517*4882a593Smuzhiyun                cmd = "%s '%s'" % (cmd, file)
1518*4882a593Smuzhiyun            elif file.endswith('.rpm') or file.endswith('.srpm'):
1519*4882a593Smuzhiyun                if 'extract' in urldata.parm:
1520*4882a593Smuzhiyun                    unpack_file = urldata.parm.get('extract')
1521*4882a593Smuzhiyun                    cmd = 'rpm2cpio.sh %s | cpio -id %s' % (file, unpack_file)
1522*4882a593Smuzhiyun                    iterate = True
1523*4882a593Smuzhiyun                    iterate_file = unpack_file
1524*4882a593Smuzhiyun                else:
1525*4882a593Smuzhiyun                    cmd = 'rpm2cpio.sh %s | cpio -id' % (file)
1526*4882a593Smuzhiyun            elif file.endswith('.deb') or file.endswith('.ipk'):
1527*4882a593Smuzhiyun                output = subprocess.check_output(['ar', '-t', file], preexec_fn=subprocess_setup)
1528*4882a593Smuzhiyun                datafile = None
1529*4882a593Smuzhiyun                if output:
1530*4882a593Smuzhiyun                    for line in output.decode().splitlines():
1531*4882a593Smuzhiyun                        if line.startswith('data.tar.'):
1532*4882a593Smuzhiyun                            datafile = line
1533*4882a593Smuzhiyun                            break
1534*4882a593Smuzhiyun                    else:
1535*4882a593Smuzhiyun                        raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar.* file", urldata.url)
1536*4882a593Smuzhiyun                else:
1537*4882a593Smuzhiyun                    raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url)
1538*4882a593Smuzhiyun                cmd = 'ar x %s %s && %s -p -f %s && rm %s' % (file, datafile, tar_cmd, datafile, datafile)
1539*4882a593Smuzhiyun
1540*4882a593Smuzhiyun        # If 'subdir' param exists, create a dir and use it as destination for unpack cmd
1541*4882a593Smuzhiyun        if 'subdir' in urldata.parm:
1542*4882a593Smuzhiyun            subdir = urldata.parm.get('subdir')
1543*4882a593Smuzhiyun            if os.path.isabs(subdir):
1544*4882a593Smuzhiyun                if not os.path.realpath(subdir).startswith(os.path.realpath(rootdir)):
1545*4882a593Smuzhiyun                    raise UnpackError("subdir argument isn't a subdirectory of unpack root %s" % rootdir, urldata.url)
1546*4882a593Smuzhiyun                unpackdir = subdir
1547*4882a593Smuzhiyun            else:
1548*4882a593Smuzhiyun                unpackdir = os.path.join(rootdir, subdir)
1549*4882a593Smuzhiyun            bb.utils.mkdirhier(unpackdir)
1550*4882a593Smuzhiyun        else:
1551*4882a593Smuzhiyun            unpackdir = rootdir
1552*4882a593Smuzhiyun
1553*4882a593Smuzhiyun        if not unpack or not cmd:
1554*4882a593Smuzhiyun            # If file == dest, then avoid any copies, as we already put the file into dest!
1555*4882a593Smuzhiyun            dest = os.path.join(unpackdir, os.path.basename(file))
1556*4882a593Smuzhiyun            if file != dest and not (os.path.exists(dest) and os.path.samefile(file, dest)):
1557*4882a593Smuzhiyun                destdir = '.'
1558*4882a593Smuzhiyun                # For file:// entries all intermediate dirs in path must be created at destination
1559*4882a593Smuzhiyun                if urldata.type == "file":
1560*4882a593Smuzhiyun                    # Trailing '/' does a copying to wrong place
1561*4882a593Smuzhiyun                    urlpath = urldata.path.rstrip('/')
1562*4882a593Smuzhiyun                    # Want files places relative to cwd so no leading '/'
1563*4882a593Smuzhiyun                    urlpath = urlpath.lstrip('/')
1564*4882a593Smuzhiyun                    if urlpath.find("/") != -1:
1565*4882a593Smuzhiyun                        destdir = urlpath.rsplit("/", 1)[0] + '/'
1566*4882a593Smuzhiyun                        bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir))
1567*4882a593Smuzhiyun                cmd = 'cp -fpPRH "%s" "%s"' % (file, destdir)
1568*4882a593Smuzhiyun
1569*4882a593Smuzhiyun        if not cmd:
1570*4882a593Smuzhiyun            return
1571*4882a593Smuzhiyun
1572*4882a593Smuzhiyun        path = data.getVar('PATH')
1573*4882a593Smuzhiyun        if path:
1574*4882a593Smuzhiyun            cmd = "PATH=\"%s\" %s" % (path, cmd)
1575*4882a593Smuzhiyun        bb.note("Unpacking %s to %s/" % (file, unpackdir))
1576*4882a593Smuzhiyun        ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True, cwd=unpackdir)
1577*4882a593Smuzhiyun
1578*4882a593Smuzhiyun        if ret != 0:
1579*4882a593Smuzhiyun            raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), urldata.url)
1580*4882a593Smuzhiyun
1581*4882a593Smuzhiyun        if iterate is True:
1582*4882a593Smuzhiyun            iterate_urldata = urldata
1583*4882a593Smuzhiyun            iterate_urldata.localpath = "%s/%s" % (rootdir, iterate_file)
1584*4882a593Smuzhiyun            self.unpack(urldata, rootdir, data)
1585*4882a593Smuzhiyun
1586*4882a593Smuzhiyun        return
1587*4882a593Smuzhiyun
1588*4882a593Smuzhiyun    def clean(self, urldata, d):
1589*4882a593Smuzhiyun        """
1590*4882a593Smuzhiyun        Clean any existing full or partial download
1591*4882a593Smuzhiyun        """
1592*4882a593Smuzhiyun        bb.utils.remove(urldata.localpath)
1593*4882a593Smuzhiyun
1594*4882a593Smuzhiyun    def try_premirror(self, urldata, d):
1595*4882a593Smuzhiyun        """
1596*4882a593Smuzhiyun        Should premirrors be used?
1597*4882a593Smuzhiyun        """
1598*4882a593Smuzhiyun        return True
1599*4882a593Smuzhiyun
1600*4882a593Smuzhiyun    def try_mirrors(self, fetch, urldata, d, mirrors, check=False):
1601*4882a593Smuzhiyun        """
1602*4882a593Smuzhiyun        Try to use a mirror
1603*4882a593Smuzhiyun        """
1604*4882a593Smuzhiyun        return bool(try_mirrors(fetch, d, urldata, mirrors, check))
1605*4882a593Smuzhiyun
1606*4882a593Smuzhiyun    def checkstatus(self, fetch, urldata, d):
1607*4882a593Smuzhiyun        """
1608*4882a593Smuzhiyun        Check the status of a URL
1609*4882a593Smuzhiyun        Assumes localpath was called first
1610*4882a593Smuzhiyun        """
1611*4882a593Smuzhiyun        logger.info("URL %s could not be checked for status since no method exists.", urldata.url)
1612*4882a593Smuzhiyun        return True
1613*4882a593Smuzhiyun
1614*4882a593Smuzhiyun    def latest_revision(self, ud, d, name):
1615*4882a593Smuzhiyun        """
1616*4882a593Smuzhiyun        Look in the cache for the latest revision, if not present ask the SCM.
1617*4882a593Smuzhiyun        """
1618*4882a593Smuzhiyun        if not hasattr(self, "_latest_revision"):
1619*4882a593Smuzhiyun            raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url)
1620*4882a593Smuzhiyun
1621*4882a593Smuzhiyun        revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
1622*4882a593Smuzhiyun        key = self.generate_revision_key(ud, d, name)
1623*4882a593Smuzhiyun        try:
1624*4882a593Smuzhiyun            return revs[key]
1625*4882a593Smuzhiyun        except KeyError:
1626*4882a593Smuzhiyun            revs[key] = rev = self._latest_revision(ud, d, name)
1627*4882a593Smuzhiyun            return rev
1628*4882a593Smuzhiyun
1629*4882a593Smuzhiyun    def sortable_revision(self, ud, d, name):
1630*4882a593Smuzhiyun        latest_rev = self._build_revision(ud, d, name)
1631*4882a593Smuzhiyun        return True, str(latest_rev)
1632*4882a593Smuzhiyun
1633*4882a593Smuzhiyun    def generate_revision_key(self, ud, d, name):
1634*4882a593Smuzhiyun        return self._revision_key(ud, d, name)
1635*4882a593Smuzhiyun
1636*4882a593Smuzhiyun    def latest_versionstring(self, ud, d):
1637*4882a593Smuzhiyun        """
1638*4882a593Smuzhiyun        Compute the latest release name like "x.y.x" in "x.y.x+gitHASH"
1639*4882a593Smuzhiyun        by searching through the tags output of ls-remote, comparing
1640*4882a593Smuzhiyun        versions and returning the highest match as a (version, revision) pair.
1641*4882a593Smuzhiyun        """
1642*4882a593Smuzhiyun        return ('', '')
1643*4882a593Smuzhiyun
1644*4882a593Smuzhiyun    def done(self, ud, d):
1645*4882a593Smuzhiyun        """
1646*4882a593Smuzhiyun        Is the download done ?
1647*4882a593Smuzhiyun        """
1648*4882a593Smuzhiyun        if os.path.exists(ud.localpath):
1649*4882a593Smuzhiyun            return True
1650*4882a593Smuzhiyun        return False
1651*4882a593Smuzhiyun
1652*4882a593Smuzhiyun    def implicit_urldata(self, ud, d):
1653*4882a593Smuzhiyun        """
1654*4882a593Smuzhiyun        Get a list of FetchData objects for any implicit URLs that will also
1655*4882a593Smuzhiyun        be downloaded when we fetch the given URL.
1656*4882a593Smuzhiyun        """
1657*4882a593Smuzhiyun        return []
1658*4882a593Smuzhiyun
1659*4882a593Smuzhiyunclass Fetch(object):
1660*4882a593Smuzhiyun    def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None):
1661*4882a593Smuzhiyun        if localonly and cache:
1662*4882a593Smuzhiyun            raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
1663*4882a593Smuzhiyun
1664*4882a593Smuzhiyun        if not urls:
1665*4882a593Smuzhiyun            urls = d.getVar("SRC_URI").split()
1666*4882a593Smuzhiyun        self.urls = urls
1667*4882a593Smuzhiyun        self.d = d
1668*4882a593Smuzhiyun        self.ud = {}
1669*4882a593Smuzhiyun        self.connection_cache = connection_cache
1670*4882a593Smuzhiyun
1671*4882a593Smuzhiyun        fn = d.getVar('FILE')
1672*4882a593Smuzhiyun        mc = d.getVar('__BBMULTICONFIG') or ""
1673*4882a593Smuzhiyun        key = None
1674*4882a593Smuzhiyun        if cache and fn:
1675*4882a593Smuzhiyun            key = mc + fn + str(id(d))
1676*4882a593Smuzhiyun        if key in urldata_cache:
1677*4882a593Smuzhiyun            self.ud = urldata_cache[key]
1678*4882a593Smuzhiyun
1679*4882a593Smuzhiyun        for url in urls:
1680*4882a593Smuzhiyun            if url not in self.ud:
1681*4882a593Smuzhiyun                try:
1682*4882a593Smuzhiyun                    self.ud[url] = FetchData(url, d, localonly)
1683*4882a593Smuzhiyun                except NonLocalMethod:
1684*4882a593Smuzhiyun                    if localonly:
1685*4882a593Smuzhiyun                        self.ud[url] = None
1686*4882a593Smuzhiyun                        pass
1687*4882a593Smuzhiyun
1688*4882a593Smuzhiyun        if key:
1689*4882a593Smuzhiyun            urldata_cache[key] = self.ud
1690*4882a593Smuzhiyun
1691*4882a593Smuzhiyun    def localpath(self, url):
1692*4882a593Smuzhiyun        if url not in self.urls:
1693*4882a593Smuzhiyun            self.ud[url] = FetchData(url, self.d)
1694*4882a593Smuzhiyun
1695*4882a593Smuzhiyun        self.ud[url].setup_localpath(self.d)
1696*4882a593Smuzhiyun        return self.d.expand(self.ud[url].localpath)
1697*4882a593Smuzhiyun
1698*4882a593Smuzhiyun    def localpaths(self):
1699*4882a593Smuzhiyun        """
1700*4882a593Smuzhiyun        Return a list of the local filenames, assuming successful fetch
1701*4882a593Smuzhiyun        """
1702*4882a593Smuzhiyun        local = []
1703*4882a593Smuzhiyun
1704*4882a593Smuzhiyun        for u in self.urls:
1705*4882a593Smuzhiyun            ud = self.ud[u]
1706*4882a593Smuzhiyun            ud.setup_localpath(self.d)
1707*4882a593Smuzhiyun            local.append(ud.localpath)
1708*4882a593Smuzhiyun
1709*4882a593Smuzhiyun        return local
1710*4882a593Smuzhiyun
1711*4882a593Smuzhiyun    def download(self, urls=None):
1712*4882a593Smuzhiyun        """
1713*4882a593Smuzhiyun        Fetch all urls
1714*4882a593Smuzhiyun        """
1715*4882a593Smuzhiyun        if not urls:
1716*4882a593Smuzhiyun            urls = self.urls
1717*4882a593Smuzhiyun
1718*4882a593Smuzhiyun        network = self.d.getVar("BB_NO_NETWORK")
1719*4882a593Smuzhiyun        premirroronly = bb.utils.to_boolean(self.d.getVar("BB_FETCH_PREMIRRORONLY"))
1720*4882a593Smuzhiyun
1721*4882a593Smuzhiyun        for u in urls:
1722*4882a593Smuzhiyun            ud = self.ud[u]
1723*4882a593Smuzhiyun            ud.setup_localpath(self.d)
1724*4882a593Smuzhiyun            m = ud.method
1725*4882a593Smuzhiyun            done = False
1726*4882a593Smuzhiyun
1727*4882a593Smuzhiyun            if ud.lockfile:
1728*4882a593Smuzhiyun                lf = bb.utils.lockfile(ud.lockfile)
1729*4882a593Smuzhiyun
1730*4882a593Smuzhiyun            try:
1731*4882a593Smuzhiyun                self.d.setVar("BB_NO_NETWORK", network)
1732*4882a593Smuzhiyun
1733*4882a593Smuzhiyun                if m.verify_donestamp(ud, self.d) and not m.need_update(ud, self.d):
1734*4882a593Smuzhiyun                    done = True
1735*4882a593Smuzhiyun                elif m.try_premirror(ud, self.d):
1736*4882a593Smuzhiyun                    logger.debug("Trying PREMIRRORS")
1737*4882a593Smuzhiyun                    mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
1738*4882a593Smuzhiyun                    done = m.try_mirrors(self, ud, self.d, mirrors)
1739*4882a593Smuzhiyun                    if done:
1740*4882a593Smuzhiyun                        try:
1741*4882a593Smuzhiyun                            # early checksum verification so that if the checksum of the premirror
1742*4882a593Smuzhiyun                            # contents mismatch the fetcher can still try upstream and mirrors
1743*4882a593Smuzhiyun                            m.update_donestamp(ud, self.d)
1744*4882a593Smuzhiyun                        except ChecksumError as e:
1745*4882a593Smuzhiyun                            logger.warning("Checksum failure encountered with premirror download of %s - will attempt other sources." % u)
1746*4882a593Smuzhiyun                            logger.debug(str(e))
1747*4882a593Smuzhiyun                            done = False
1748*4882a593Smuzhiyun
1749*4882a593Smuzhiyun                if premirroronly:
1750*4882a593Smuzhiyun                    self.d.setVar("BB_NO_NETWORK", "1")
1751*4882a593Smuzhiyun
1752*4882a593Smuzhiyun                firsterr = None
1753*4882a593Smuzhiyun                verified_stamp = False
1754*4882a593Smuzhiyun                if done:
1755*4882a593Smuzhiyun                    verified_stamp = m.verify_donestamp(ud, self.d)
1756*4882a593Smuzhiyun                if not done and (not verified_stamp or m.need_update(ud, self.d)):
1757*4882a593Smuzhiyun                    try:
1758*4882a593Smuzhiyun                        if not trusted_network(self.d, ud.url):
1759*4882a593Smuzhiyun                            raise UntrustedUrl(ud.url)
1760*4882a593Smuzhiyun                        logger.debug("Trying Upstream")
1761*4882a593Smuzhiyun                        m.download(ud, self.d)
1762*4882a593Smuzhiyun                        if hasattr(m, "build_mirror_data"):
1763*4882a593Smuzhiyun                            m.build_mirror_data(ud, self.d)
1764*4882a593Smuzhiyun                        done = True
1765*4882a593Smuzhiyun                        # early checksum verify, so that if checksum mismatched,
1766*4882a593Smuzhiyun                        # fetcher still have chance to fetch from mirror
1767*4882a593Smuzhiyun                        m.update_donestamp(ud, self.d)
1768*4882a593Smuzhiyun
1769*4882a593Smuzhiyun                    except bb.fetch2.NetworkAccess:
1770*4882a593Smuzhiyun                        raise
1771*4882a593Smuzhiyun
1772*4882a593Smuzhiyun                    except BBFetchException as e:
1773*4882a593Smuzhiyun                        if isinstance(e, ChecksumError):
1774*4882a593Smuzhiyun                            logger.warning("Checksum failure encountered with download of %s - will attempt other sources if available" % u)
1775*4882a593Smuzhiyun                            logger.debug(str(e))
1776*4882a593Smuzhiyun                            if os.path.exists(ud.localpath):
1777*4882a593Smuzhiyun                                rename_bad_checksum(ud, e.checksum)
1778*4882a593Smuzhiyun                        elif isinstance(e, NoChecksumError):
1779*4882a593Smuzhiyun                            raise
1780*4882a593Smuzhiyun                        else:
1781*4882a593Smuzhiyun                            logger.warning('Failed to fetch URL %s, attempting MIRRORS if available' % u)
1782*4882a593Smuzhiyun                            logger.debug(str(e))
1783*4882a593Smuzhiyun                        firsterr = e
1784*4882a593Smuzhiyun                        # Remove any incomplete fetch
1785*4882a593Smuzhiyun                        if not verified_stamp:
1786*4882a593Smuzhiyun                            m.clean(ud, self.d)
1787*4882a593Smuzhiyun                        logger.debug("Trying MIRRORS")
1788*4882a593Smuzhiyun                        mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
1789*4882a593Smuzhiyun                        done = m.try_mirrors(self, ud, self.d, mirrors)
1790*4882a593Smuzhiyun
1791*4882a593Smuzhiyun                if not done or not m.done(ud, self.d):
1792*4882a593Smuzhiyun                    if firsterr:
1793*4882a593Smuzhiyun                        logger.error(str(firsterr))
1794*4882a593Smuzhiyun                    raise FetchError("Unable to fetch URL from any source.", u)
1795*4882a593Smuzhiyun
1796*4882a593Smuzhiyun                m.update_donestamp(ud, self.d)
1797*4882a593Smuzhiyun
1798*4882a593Smuzhiyun            except IOError as e:
1799*4882a593Smuzhiyun                if e.errno in [errno.ESTALE]:
1800*4882a593Smuzhiyun                    logger.error("Stale Error Observed %s." % u)
1801*4882a593Smuzhiyun                    raise ChecksumError("Stale Error Detected")
1802*4882a593Smuzhiyun
1803*4882a593Smuzhiyun            except BBFetchException as e:
1804*4882a593Smuzhiyun                if isinstance(e, ChecksumError):
1805*4882a593Smuzhiyun                    logger.error("Checksum failure fetching %s" % u)
1806*4882a593Smuzhiyun                raise
1807*4882a593Smuzhiyun
1808*4882a593Smuzhiyun            finally:
1809*4882a593Smuzhiyun                if ud.lockfile:
1810*4882a593Smuzhiyun                    bb.utils.unlockfile(lf)
1811*4882a593Smuzhiyun
1812*4882a593Smuzhiyun    def checkstatus(self, urls=None):
1813*4882a593Smuzhiyun        """
1814*4882a593Smuzhiyun        Check all URLs exist upstream.
1815*4882a593Smuzhiyun
1816*4882a593Smuzhiyun        Returns None if the URLs exist, raises FetchError if the check wasn't
1817*4882a593Smuzhiyun        successful but there wasn't an error (such as file not found), and
1818*4882a593Smuzhiyun        raises other exceptions in error cases.
1819*4882a593Smuzhiyun        """
1820*4882a593Smuzhiyun
1821*4882a593Smuzhiyun        if not urls:
1822*4882a593Smuzhiyun            urls = self.urls
1823*4882a593Smuzhiyun
1824*4882a593Smuzhiyun        for u in urls:
1825*4882a593Smuzhiyun            ud = self.ud[u]
1826*4882a593Smuzhiyun            ud.setup_localpath(self.d)
1827*4882a593Smuzhiyun            m = ud.method
1828*4882a593Smuzhiyun            logger.debug("Testing URL %s", u)
1829*4882a593Smuzhiyun            # First try checking uri, u, from PREMIRRORS
1830*4882a593Smuzhiyun            mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
1831*4882a593Smuzhiyun            ret = m.try_mirrors(self, ud, self.d, mirrors, True)
1832*4882a593Smuzhiyun            if not ret:
1833*4882a593Smuzhiyun                # Next try checking from the original uri, u
1834*4882a593Smuzhiyun                ret = m.checkstatus(self, ud, self.d)
1835*4882a593Smuzhiyun                if not ret:
1836*4882a593Smuzhiyun                    # Finally, try checking uri, u, from MIRRORS
1837*4882a593Smuzhiyun                    mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
1838*4882a593Smuzhiyun                    ret = m.try_mirrors(self, ud, self.d, mirrors, True)
1839*4882a593Smuzhiyun
1840*4882a593Smuzhiyun            if not ret:
1841*4882a593Smuzhiyun                raise FetchError("URL %s doesn't work" % u, u)
1842*4882a593Smuzhiyun
1843*4882a593Smuzhiyun    def unpack(self, root, urls=None):
1844*4882a593Smuzhiyun        """
1845*4882a593Smuzhiyun        Unpack urls to root
1846*4882a593Smuzhiyun        """
1847*4882a593Smuzhiyun
1848*4882a593Smuzhiyun        if not urls:
1849*4882a593Smuzhiyun            urls = self.urls
1850*4882a593Smuzhiyun
1851*4882a593Smuzhiyun        for u in urls:
1852*4882a593Smuzhiyun            ud = self.ud[u]
1853*4882a593Smuzhiyun            ud.setup_localpath(self.d)
1854*4882a593Smuzhiyun
1855*4882a593Smuzhiyun            if ud.lockfile:
1856*4882a593Smuzhiyun                lf = bb.utils.lockfile(ud.lockfile)
1857*4882a593Smuzhiyun
1858*4882a593Smuzhiyun            ud.method.unpack(ud, root, self.d)
1859*4882a593Smuzhiyun
1860*4882a593Smuzhiyun            if ud.lockfile:
1861*4882a593Smuzhiyun                bb.utils.unlockfile(lf)
1862*4882a593Smuzhiyun
1863*4882a593Smuzhiyun    def clean(self, urls=None):
1864*4882a593Smuzhiyun        """
1865*4882a593Smuzhiyun        Clean files that the fetcher gets or places
1866*4882a593Smuzhiyun        """
1867*4882a593Smuzhiyun
1868*4882a593Smuzhiyun        if not urls:
1869*4882a593Smuzhiyun            urls = self.urls
1870*4882a593Smuzhiyun
1871*4882a593Smuzhiyun        for url in urls:
1872*4882a593Smuzhiyun            if url not in self.ud:
1873*4882a593Smuzhiyun                self.ud[url] = FetchData(url, self.d)
1874*4882a593Smuzhiyun            ud = self.ud[url]
1875*4882a593Smuzhiyun            ud.setup_localpath(self.d)
1876*4882a593Smuzhiyun
1877*4882a593Smuzhiyun            if not ud.localfile and ud.localpath is None:
1878*4882a593Smuzhiyun                continue
1879*4882a593Smuzhiyun
1880*4882a593Smuzhiyun            if ud.lockfile:
1881*4882a593Smuzhiyun                lf = bb.utils.lockfile(ud.lockfile)
1882*4882a593Smuzhiyun
1883*4882a593Smuzhiyun            ud.method.clean(ud, self.d)
1884*4882a593Smuzhiyun            if ud.donestamp:
1885*4882a593Smuzhiyun                bb.utils.remove(ud.donestamp)
1886*4882a593Smuzhiyun
1887*4882a593Smuzhiyun            if ud.lockfile:
1888*4882a593Smuzhiyun                bb.utils.unlockfile(lf)
1889*4882a593Smuzhiyun
1890*4882a593Smuzhiyun    def expanded_urldata(self, urls=None):
1891*4882a593Smuzhiyun        """
1892*4882a593Smuzhiyun        Get an expanded list of FetchData objects covering both the given
1893*4882a593Smuzhiyun        URLS and any additional implicit URLs that are added automatically by
1894*4882a593Smuzhiyun        the appropriate FetchMethod.
1895*4882a593Smuzhiyun        """
1896*4882a593Smuzhiyun
1897*4882a593Smuzhiyun        if not urls:
1898*4882a593Smuzhiyun            urls = self.urls
1899*4882a593Smuzhiyun
1900*4882a593Smuzhiyun        urldata = []
1901*4882a593Smuzhiyun        for url in urls:
1902*4882a593Smuzhiyun            ud = self.ud[url]
1903*4882a593Smuzhiyun            urldata.append(ud)
1904*4882a593Smuzhiyun            urldata += ud.method.implicit_urldata(ud, self.d)
1905*4882a593Smuzhiyun
1906*4882a593Smuzhiyun        return urldata
1907*4882a593Smuzhiyun
1908*4882a593Smuzhiyunclass FetchConnectionCache(object):
1909*4882a593Smuzhiyun    """
1910*4882a593Smuzhiyun        A class which represents an container for socket connections.
1911*4882a593Smuzhiyun    """
1912*4882a593Smuzhiyun    def __init__(self):
1913*4882a593Smuzhiyun        self.cache = {}
1914*4882a593Smuzhiyun
1915*4882a593Smuzhiyun    def get_connection_name(self, host, port):
1916*4882a593Smuzhiyun        return host + ':' + str(port)
1917*4882a593Smuzhiyun
1918*4882a593Smuzhiyun    def add_connection(self, host, port, connection):
1919*4882a593Smuzhiyun        cn = self.get_connection_name(host, port)
1920*4882a593Smuzhiyun
1921*4882a593Smuzhiyun        if cn not in self.cache:
1922*4882a593Smuzhiyun            self.cache[cn] = connection
1923*4882a593Smuzhiyun
1924*4882a593Smuzhiyun    def get_connection(self, host, port):
1925*4882a593Smuzhiyun        connection = None
1926*4882a593Smuzhiyun
1927*4882a593Smuzhiyun        cn = self.get_connection_name(host, port)
1928*4882a593Smuzhiyun        if cn in self.cache:
1929*4882a593Smuzhiyun            connection = self.cache[cn]
1930*4882a593Smuzhiyun
1931*4882a593Smuzhiyun        return connection
1932*4882a593Smuzhiyun
1933*4882a593Smuzhiyun    def remove_connection(self, host, port):
1934*4882a593Smuzhiyun        cn = self.get_connection_name(host, port)
1935*4882a593Smuzhiyun        if cn in self.cache:
1936*4882a593Smuzhiyun            self.cache[cn].close()
1937*4882a593Smuzhiyun            del self.cache[cn]
1938*4882a593Smuzhiyun
1939*4882a593Smuzhiyun    def close_connections(self):
1940*4882a593Smuzhiyun        for cn in list(self.cache.keys()):
1941*4882a593Smuzhiyun            self.cache[cn].close()
1942*4882a593Smuzhiyun            del self.cache[cn]
1943*4882a593Smuzhiyun
1944*4882a593Smuzhiyunfrom . import cvs
1945*4882a593Smuzhiyunfrom . import git
1946*4882a593Smuzhiyunfrom . import gitsm
1947*4882a593Smuzhiyunfrom . import gitannex
1948*4882a593Smuzhiyunfrom . import local
1949*4882a593Smuzhiyunfrom . import svn
1950*4882a593Smuzhiyunfrom . import wget
1951*4882a593Smuzhiyunfrom . import ssh
1952*4882a593Smuzhiyunfrom . import sftp
1953*4882a593Smuzhiyunfrom . import s3
1954*4882a593Smuzhiyunfrom . import perforce
1955*4882a593Smuzhiyunfrom . import bzr
1956*4882a593Smuzhiyunfrom . import hg
1957*4882a593Smuzhiyunfrom . import osc
1958*4882a593Smuzhiyunfrom . import repo
1959*4882a593Smuzhiyunfrom . import clearcase
1960*4882a593Smuzhiyunfrom . import npm
1961*4882a593Smuzhiyunfrom . import npmsw
1962*4882a593Smuzhiyunfrom . import az
1963*4882a593Smuzhiyunfrom . import crate
1964*4882a593Smuzhiyun
1965*4882a593Smuzhiyunmethods.append(local.Local())
1966*4882a593Smuzhiyunmethods.append(wget.Wget())
1967*4882a593Smuzhiyunmethods.append(svn.Svn())
1968*4882a593Smuzhiyunmethods.append(git.Git())
1969*4882a593Smuzhiyunmethods.append(gitsm.GitSM())
1970*4882a593Smuzhiyunmethods.append(gitannex.GitANNEX())
1971*4882a593Smuzhiyunmethods.append(cvs.Cvs())
1972*4882a593Smuzhiyunmethods.append(ssh.SSH())
1973*4882a593Smuzhiyunmethods.append(sftp.SFTP())
1974*4882a593Smuzhiyunmethods.append(s3.S3())
1975*4882a593Smuzhiyunmethods.append(perforce.Perforce())
1976*4882a593Smuzhiyunmethods.append(bzr.Bzr())
1977*4882a593Smuzhiyunmethods.append(hg.Hg())
1978*4882a593Smuzhiyunmethods.append(osc.Osc())
1979*4882a593Smuzhiyunmethods.append(repo.Repo())
1980*4882a593Smuzhiyunmethods.append(clearcase.ClearCase())
1981*4882a593Smuzhiyunmethods.append(npm.Npm())
1982*4882a593Smuzhiyunmethods.append(npmsw.NpmShrinkWrap())
1983*4882a593Smuzhiyunmethods.append(az.Az())
1984*4882a593Smuzhiyunmethods.append(crate.Crate())
1985