1# Copyright (C) 2020 Savoir-Faire Linux 2# 3# SPDX-License-Identifier: GPL-2.0-only 4# 5""" 6BitBake 'Fetch' npm shrinkwrap implementation 7 8npm fetcher support the SRC_URI with format of: 9SRC_URI = "npmsw://some.registry.url;OptionA=xxx;OptionB=xxx;..." 10 11Supported SRC_URI options are: 12 13- dev 14 Set to 1 to also install devDependencies. 15 16- destsuffix 17 Specifies the directory to use to unpack the dependencies (default: ${S}). 18""" 19 20import json 21import os 22import re 23import bb 24from bb.fetch2 import Fetch 25from bb.fetch2 import FetchMethod 26from bb.fetch2 import ParameterError 27from bb.fetch2 import runfetchcmd 28from bb.fetch2 import URI 29from bb.fetch2.npm import npm_integrity 30from bb.fetch2.npm import npm_localfile 31from bb.fetch2.npm import npm_unpack 32from bb.utils import is_semver 33from bb.utils import lockfile 34from bb.utils import unlockfile 35 36def foreach_dependencies(shrinkwrap, callback=None, dev=False): 37 """ 38 Run a callback for each dependencies of a shrinkwrap file. 39 The callback is using the format: 40 callback(name, params, deptree) 41 with: 42 name = the package name (string) 43 params = the package parameters (dictionary) 44 deptree = the package dependency tree (array of strings) 45 """ 46 def _walk_deps(deps, deptree): 47 for name in deps: 48 subtree = [*deptree, name] 49 _walk_deps(deps[name].get("dependencies", {}), subtree) 50 if callback is not None: 51 if deps[name].get("dev", False) and not dev: 52 continue 53 elif deps[name].get("bundled", False): 54 continue 55 callback(name, deps[name], subtree) 56 57 _walk_deps(shrinkwrap.get("dependencies", {}), []) 58 59class NpmShrinkWrap(FetchMethod): 60 """Class to fetch all package from a shrinkwrap file""" 61 62 def supports(self, ud, d): 63 """Check if a given url can be fetched with npmsw""" 64 return ud.type in ["npmsw"] 65 66 def urldata_init(self, ud, d): 67 """Init npmsw specific variables within url data""" 68 69 # Get the 'shrinkwrap' parameter 70 ud.shrinkwrap_file = re.sub(r"^npmsw://", "", ud.url.split(";")[0]) 71 72 # Get the 'dev' parameter 73 ud.dev = bb.utils.to_boolean(ud.parm.get("dev"), False) 74 75 # Resolve the dependencies 76 ud.deps = [] 77 78 def _resolve_dependency(name, params, deptree): 79 url = None 80 localpath = None 81 extrapaths = [] 82 destsubdirs = [os.path.join("node_modules", dep) for dep in deptree] 83 destsuffix = os.path.join(*destsubdirs) 84 unpack = True 85 86 integrity = params.get("integrity", None) 87 resolved = params.get("resolved", None) 88 version = params.get("version", None) 89 90 # Handle registry sources 91 if is_semver(version) and integrity: 92 # Handle duplicate dependencies without url 93 if not resolved: 94 return 95 96 localfile = npm_localfile(name, version) 97 98 uri = URI(resolved) 99 uri.params["downloadfilename"] = localfile 100 101 checksum_name, checksum_expected = npm_integrity(integrity) 102 uri.params[checksum_name] = checksum_expected 103 104 url = str(uri) 105 106 localpath = os.path.join(d.getVar("DL_DIR"), localfile) 107 108 # Create a resolve file to mimic the npm fetcher and allow 109 # re-usability of the downloaded file. 110 resolvefile = localpath + ".resolved" 111 112 bb.utils.mkdirhier(os.path.dirname(resolvefile)) 113 with open(resolvefile, "w") as f: 114 f.write(url) 115 116 extrapaths.append(resolvefile) 117 118 # Handle http tarball sources 119 elif version.startswith("http") and integrity: 120 localfile = npm_localfile(os.path.basename(version)) 121 122 uri = URI(version) 123 uri.params["downloadfilename"] = localfile 124 125 checksum_name, checksum_expected = npm_integrity(integrity) 126 uri.params[checksum_name] = checksum_expected 127 128 url = str(uri) 129 130 localpath = os.path.join(d.getVar("DL_DIR"), localfile) 131 132 # Handle git sources 133 elif version.startswith("git"): 134 if version.startswith("github:"): 135 version = "git+https://github.com/" + version[len("github:"):] 136 regex = re.compile(r""" 137 ^ 138 git\+ 139 (?P<protocol>[a-z]+) 140 :// 141 (?P<url>[^#]+) 142 \# 143 (?P<rev>[0-9a-f]+) 144 $ 145 """, re.VERBOSE) 146 147 match = regex.match(version) 148 149 if not match: 150 raise ParameterError("Invalid git url: %s" % version, ud.url) 151 152 groups = match.groupdict() 153 154 uri = URI("git://" + str(groups["url"])) 155 uri.params["protocol"] = str(groups["protocol"]) 156 uri.params["rev"] = str(groups["rev"]) 157 uri.params["destsuffix"] = destsuffix 158 159 url = str(uri) 160 161 # Handle local tarball and link sources 162 elif version.startswith("file"): 163 localpath = version[5:] 164 if not version.endswith(".tgz"): 165 unpack = False 166 167 else: 168 raise ParameterError("Unsupported dependency: %s" % name, ud.url) 169 170 ud.deps.append({ 171 "url": url, 172 "localpath": localpath, 173 "extrapaths": extrapaths, 174 "destsuffix": destsuffix, 175 "unpack": unpack, 176 }) 177 178 try: 179 with open(ud.shrinkwrap_file, "r") as f: 180 shrinkwrap = json.load(f) 181 except Exception as e: 182 raise ParameterError("Invalid shrinkwrap file: %s" % str(e), ud.url) 183 184 foreach_dependencies(shrinkwrap, _resolve_dependency, ud.dev) 185 186 # Avoid conflicts between the environment data and: 187 # - the proxy url revision 188 # - the proxy url checksum 189 data = bb.data.createCopy(d) 190 data.delVar("SRCREV") 191 data.delVarFlags("SRC_URI") 192 193 # This fetcher resolves multiple URIs from a shrinkwrap file and then 194 # forwards it to a proxy fetcher. The management of the donestamp file, 195 # the lockfile and the checksums are forwarded to the proxy fetcher. 196 ud.proxy = Fetch([dep["url"] for dep in ud.deps if dep["url"]], data) 197 ud.needdonestamp = False 198 199 @staticmethod 200 def _foreach_proxy_method(ud, handle): 201 returns = [] 202 for proxy_url in ud.proxy.urls: 203 proxy_ud = ud.proxy.ud[proxy_url] 204 proxy_d = ud.proxy.d 205 proxy_ud.setup_localpath(proxy_d) 206 lf = lockfile(proxy_ud.lockfile) 207 returns.append(handle(proxy_ud.method, proxy_ud, proxy_d)) 208 unlockfile(lf) 209 return returns 210 211 def verify_donestamp(self, ud, d): 212 """Verify the donestamp file""" 213 def _handle(m, ud, d): 214 return m.verify_donestamp(ud, d) 215 return all(self._foreach_proxy_method(ud, _handle)) 216 217 def update_donestamp(self, ud, d): 218 """Update the donestamp file""" 219 def _handle(m, ud, d): 220 m.update_donestamp(ud, d) 221 self._foreach_proxy_method(ud, _handle) 222 223 def need_update(self, ud, d): 224 """Force a fetch, even if localpath exists ?""" 225 def _handle(m, ud, d): 226 return m.need_update(ud, d) 227 return all(self._foreach_proxy_method(ud, _handle)) 228 229 def try_mirrors(self, fetch, ud, d, mirrors): 230 """Try to use a mirror""" 231 def _handle(m, ud, d): 232 return m.try_mirrors(fetch, ud, d, mirrors) 233 return all(self._foreach_proxy_method(ud, _handle)) 234 235 def download(self, ud, d): 236 """Fetch url""" 237 ud.proxy.download() 238 239 def unpack(self, ud, rootdir, d): 240 """Unpack the downloaded dependencies""" 241 destdir = d.getVar("S") 242 destsuffix = ud.parm.get("destsuffix") 243 if destsuffix: 244 destdir = os.path.join(rootdir, destsuffix) 245 246 bb.utils.mkdirhier(destdir) 247 bb.utils.copyfile(ud.shrinkwrap_file, 248 os.path.join(destdir, "npm-shrinkwrap.json")) 249 250 auto = [dep["url"] for dep in ud.deps if not dep["localpath"]] 251 manual = [dep for dep in ud.deps if dep["localpath"]] 252 253 if auto: 254 ud.proxy.unpack(destdir, auto) 255 256 for dep in manual: 257 depdestdir = os.path.join(destdir, dep["destsuffix"]) 258 if dep["url"]: 259 npm_unpack(dep["localpath"], depdestdir, d) 260 else: 261 depsrcdir= os.path.join(destdir, dep["localpath"]) 262 if dep["unpack"]: 263 npm_unpack(depsrcdir, depdestdir, d) 264 else: 265 bb.utils.mkdirhier(depdestdir) 266 cmd = 'cp -fpPRH "%s/." .' % (depsrcdir) 267 runfetchcmd(cmd, d, workdir=depdestdir) 268 269 def clean(self, ud, d): 270 """Clean any existing full or partial download""" 271 ud.proxy.clean() 272 273 # Clean extra files 274 for dep in ud.deps: 275 for path in dep["extrapaths"]: 276 bb.utils.remove(path) 277 278 def done(self, ud, d): 279 """Is the download done ?""" 280 def _handle(m, ud, d): 281 return m.done(ud, d) 282 return all(self._foreach_proxy_method(ud, _handle)) 283