1*4882a593Smuzhiyun# Copyright (C) 2020 Savoir-Faire Linux 2*4882a593Smuzhiyun# 3*4882a593Smuzhiyun# SPDX-License-Identifier: GPL-2.0-only 4*4882a593Smuzhiyun# 5*4882a593Smuzhiyun""" 6*4882a593SmuzhiyunBitBake 'Fetch' npm shrinkwrap implementation 7*4882a593Smuzhiyun 8*4882a593Smuzhiyunnpm fetcher support the SRC_URI with format of: 9*4882a593SmuzhiyunSRC_URI = "npmsw://some.registry.url;OptionA=xxx;OptionB=xxx;..." 10*4882a593Smuzhiyun 11*4882a593SmuzhiyunSupported SRC_URI options are: 12*4882a593Smuzhiyun 13*4882a593Smuzhiyun- dev 14*4882a593Smuzhiyun Set to 1 to also install devDependencies. 15*4882a593Smuzhiyun 16*4882a593Smuzhiyun- destsuffix 17*4882a593Smuzhiyun Specifies the directory to use to unpack the dependencies (default: ${S}). 18*4882a593Smuzhiyun""" 19*4882a593Smuzhiyun 20*4882a593Smuzhiyunimport json 21*4882a593Smuzhiyunimport os 22*4882a593Smuzhiyunimport re 23*4882a593Smuzhiyunimport bb 24*4882a593Smuzhiyunfrom bb.fetch2 import Fetch 25*4882a593Smuzhiyunfrom bb.fetch2 import FetchMethod 26*4882a593Smuzhiyunfrom bb.fetch2 import ParameterError 27*4882a593Smuzhiyunfrom bb.fetch2 import runfetchcmd 28*4882a593Smuzhiyunfrom bb.fetch2 import URI 29*4882a593Smuzhiyunfrom bb.fetch2.npm import npm_integrity 30*4882a593Smuzhiyunfrom bb.fetch2.npm import npm_localfile 31*4882a593Smuzhiyunfrom bb.fetch2.npm import npm_unpack 32*4882a593Smuzhiyunfrom bb.utils import is_semver 33*4882a593Smuzhiyunfrom bb.utils import lockfile 34*4882a593Smuzhiyunfrom bb.utils import unlockfile 35*4882a593Smuzhiyun 36*4882a593Smuzhiyundef foreach_dependencies(shrinkwrap, callback=None, dev=False): 37*4882a593Smuzhiyun """ 38*4882a593Smuzhiyun Run a callback for each dependencies of a shrinkwrap file. 39*4882a593Smuzhiyun The callback is using the format: 40*4882a593Smuzhiyun callback(name, params, deptree) 41*4882a593Smuzhiyun with: 42*4882a593Smuzhiyun name = the package name (string) 43*4882a593Smuzhiyun params = the package parameters (dictionary) 44*4882a593Smuzhiyun deptree = the package dependency tree (array of strings) 45*4882a593Smuzhiyun """ 46*4882a593Smuzhiyun def _walk_deps(deps, deptree): 47*4882a593Smuzhiyun for name in deps: 48*4882a593Smuzhiyun subtree = [*deptree, name] 49*4882a593Smuzhiyun _walk_deps(deps[name].get("dependencies", {}), subtree) 50*4882a593Smuzhiyun if callback is not None: 51*4882a593Smuzhiyun if deps[name].get("dev", False) and not dev: 52*4882a593Smuzhiyun continue 53*4882a593Smuzhiyun elif deps[name].get("bundled", False): 54*4882a593Smuzhiyun continue 55*4882a593Smuzhiyun callback(name, deps[name], subtree) 56*4882a593Smuzhiyun 57*4882a593Smuzhiyun _walk_deps(shrinkwrap.get("dependencies", {}), []) 58*4882a593Smuzhiyun 59*4882a593Smuzhiyunclass NpmShrinkWrap(FetchMethod): 60*4882a593Smuzhiyun """Class to fetch all package from a shrinkwrap file""" 61*4882a593Smuzhiyun 62*4882a593Smuzhiyun def supports(self, ud, d): 63*4882a593Smuzhiyun """Check if a given url can be fetched with npmsw""" 64*4882a593Smuzhiyun return ud.type in ["npmsw"] 65*4882a593Smuzhiyun 66*4882a593Smuzhiyun def urldata_init(self, ud, d): 67*4882a593Smuzhiyun """Init npmsw specific variables within url data""" 68*4882a593Smuzhiyun 69*4882a593Smuzhiyun # Get the 'shrinkwrap' parameter 70*4882a593Smuzhiyun ud.shrinkwrap_file = re.sub(r"^npmsw://", "", ud.url.split(";")[0]) 71*4882a593Smuzhiyun 72*4882a593Smuzhiyun # Get the 'dev' parameter 73*4882a593Smuzhiyun ud.dev = bb.utils.to_boolean(ud.parm.get("dev"), False) 74*4882a593Smuzhiyun 75*4882a593Smuzhiyun # Resolve the dependencies 76*4882a593Smuzhiyun ud.deps = [] 77*4882a593Smuzhiyun 78*4882a593Smuzhiyun def _resolve_dependency(name, params, deptree): 79*4882a593Smuzhiyun url = None 80*4882a593Smuzhiyun localpath = None 81*4882a593Smuzhiyun extrapaths = [] 82*4882a593Smuzhiyun destsubdirs = [os.path.join("node_modules", dep) for dep in deptree] 83*4882a593Smuzhiyun destsuffix = os.path.join(*destsubdirs) 84*4882a593Smuzhiyun unpack = True 85*4882a593Smuzhiyun 86*4882a593Smuzhiyun integrity = params.get("integrity", None) 87*4882a593Smuzhiyun resolved = params.get("resolved", None) 88*4882a593Smuzhiyun version = params.get("version", None) 89*4882a593Smuzhiyun 90*4882a593Smuzhiyun # Handle registry sources 91*4882a593Smuzhiyun if is_semver(version) and integrity: 92*4882a593Smuzhiyun # Handle duplicate dependencies without url 93*4882a593Smuzhiyun if not resolved: 94*4882a593Smuzhiyun return 95*4882a593Smuzhiyun 96*4882a593Smuzhiyun localfile = npm_localfile(name, version) 97*4882a593Smuzhiyun 98*4882a593Smuzhiyun uri = URI(resolved) 99*4882a593Smuzhiyun uri.params["downloadfilename"] = localfile 100*4882a593Smuzhiyun 101*4882a593Smuzhiyun checksum_name, checksum_expected = npm_integrity(integrity) 102*4882a593Smuzhiyun uri.params[checksum_name] = checksum_expected 103*4882a593Smuzhiyun 104*4882a593Smuzhiyun url = str(uri) 105*4882a593Smuzhiyun 106*4882a593Smuzhiyun localpath = os.path.join(d.getVar("DL_DIR"), localfile) 107*4882a593Smuzhiyun 108*4882a593Smuzhiyun # Create a resolve file to mimic the npm fetcher and allow 109*4882a593Smuzhiyun # re-usability of the downloaded file. 110*4882a593Smuzhiyun resolvefile = localpath + ".resolved" 111*4882a593Smuzhiyun 112*4882a593Smuzhiyun bb.utils.mkdirhier(os.path.dirname(resolvefile)) 113*4882a593Smuzhiyun with open(resolvefile, "w") as f: 114*4882a593Smuzhiyun f.write(url) 115*4882a593Smuzhiyun 116*4882a593Smuzhiyun extrapaths.append(resolvefile) 117*4882a593Smuzhiyun 118*4882a593Smuzhiyun # Handle http tarball sources 119*4882a593Smuzhiyun elif version.startswith("http") and integrity: 120*4882a593Smuzhiyun localfile = npm_localfile(os.path.basename(version)) 121*4882a593Smuzhiyun 122*4882a593Smuzhiyun uri = URI(version) 123*4882a593Smuzhiyun uri.params["downloadfilename"] = localfile 124*4882a593Smuzhiyun 125*4882a593Smuzhiyun checksum_name, checksum_expected = npm_integrity(integrity) 126*4882a593Smuzhiyun uri.params[checksum_name] = checksum_expected 127*4882a593Smuzhiyun 128*4882a593Smuzhiyun url = str(uri) 129*4882a593Smuzhiyun 130*4882a593Smuzhiyun localpath = os.path.join(d.getVar("DL_DIR"), localfile) 131*4882a593Smuzhiyun 132*4882a593Smuzhiyun # Handle git sources 133*4882a593Smuzhiyun elif version.startswith("git"): 134*4882a593Smuzhiyun if version.startswith("github:"): 135*4882a593Smuzhiyun version = "git+https://github.com/" + version[len("github:"):] 136*4882a593Smuzhiyun regex = re.compile(r""" 137*4882a593Smuzhiyun ^ 138*4882a593Smuzhiyun git\+ 139*4882a593Smuzhiyun (?P<protocol>[a-z]+) 140*4882a593Smuzhiyun :// 141*4882a593Smuzhiyun (?P<url>[^#]+) 142*4882a593Smuzhiyun \# 143*4882a593Smuzhiyun (?P<rev>[0-9a-f]+) 144*4882a593Smuzhiyun $ 145*4882a593Smuzhiyun """, re.VERBOSE) 146*4882a593Smuzhiyun 147*4882a593Smuzhiyun match = regex.match(version) 148*4882a593Smuzhiyun 149*4882a593Smuzhiyun if not match: 150*4882a593Smuzhiyun raise ParameterError("Invalid git url: %s" % version, ud.url) 151*4882a593Smuzhiyun 152*4882a593Smuzhiyun groups = match.groupdict() 153*4882a593Smuzhiyun 154*4882a593Smuzhiyun uri = URI("git://" + str(groups["url"])) 155*4882a593Smuzhiyun uri.params["protocol"] = str(groups["protocol"]) 156*4882a593Smuzhiyun uri.params["rev"] = str(groups["rev"]) 157*4882a593Smuzhiyun uri.params["destsuffix"] = destsuffix 158*4882a593Smuzhiyun 159*4882a593Smuzhiyun url = str(uri) 160*4882a593Smuzhiyun 161*4882a593Smuzhiyun # Handle local tarball and link sources 162*4882a593Smuzhiyun elif version.startswith("file"): 163*4882a593Smuzhiyun localpath = version[5:] 164*4882a593Smuzhiyun if not version.endswith(".tgz"): 165*4882a593Smuzhiyun unpack = False 166*4882a593Smuzhiyun 167*4882a593Smuzhiyun else: 168*4882a593Smuzhiyun raise ParameterError("Unsupported dependency: %s" % name, ud.url) 169*4882a593Smuzhiyun 170*4882a593Smuzhiyun ud.deps.append({ 171*4882a593Smuzhiyun "url": url, 172*4882a593Smuzhiyun "localpath": localpath, 173*4882a593Smuzhiyun "extrapaths": extrapaths, 174*4882a593Smuzhiyun "destsuffix": destsuffix, 175*4882a593Smuzhiyun "unpack": unpack, 176*4882a593Smuzhiyun }) 177*4882a593Smuzhiyun 178*4882a593Smuzhiyun try: 179*4882a593Smuzhiyun with open(ud.shrinkwrap_file, "r") as f: 180*4882a593Smuzhiyun shrinkwrap = json.load(f) 181*4882a593Smuzhiyun except Exception as e: 182*4882a593Smuzhiyun raise ParameterError("Invalid shrinkwrap file: %s" % str(e), ud.url) 183*4882a593Smuzhiyun 184*4882a593Smuzhiyun foreach_dependencies(shrinkwrap, _resolve_dependency, ud.dev) 185*4882a593Smuzhiyun 186*4882a593Smuzhiyun # Avoid conflicts between the environment data and: 187*4882a593Smuzhiyun # - the proxy url revision 188*4882a593Smuzhiyun # - the proxy url checksum 189*4882a593Smuzhiyun data = bb.data.createCopy(d) 190*4882a593Smuzhiyun data.delVar("SRCREV") 191*4882a593Smuzhiyun data.delVarFlags("SRC_URI") 192*4882a593Smuzhiyun 193*4882a593Smuzhiyun # This fetcher resolves multiple URIs from a shrinkwrap file and then 194*4882a593Smuzhiyun # forwards it to a proxy fetcher. The management of the donestamp file, 195*4882a593Smuzhiyun # the lockfile and the checksums are forwarded to the proxy fetcher. 196*4882a593Smuzhiyun ud.proxy = Fetch([dep["url"] for dep in ud.deps if dep["url"]], data) 197*4882a593Smuzhiyun ud.needdonestamp = False 198*4882a593Smuzhiyun 199*4882a593Smuzhiyun @staticmethod 200*4882a593Smuzhiyun def _foreach_proxy_method(ud, handle): 201*4882a593Smuzhiyun returns = [] 202*4882a593Smuzhiyun for proxy_url in ud.proxy.urls: 203*4882a593Smuzhiyun proxy_ud = ud.proxy.ud[proxy_url] 204*4882a593Smuzhiyun proxy_d = ud.proxy.d 205*4882a593Smuzhiyun proxy_ud.setup_localpath(proxy_d) 206*4882a593Smuzhiyun lf = lockfile(proxy_ud.lockfile) 207*4882a593Smuzhiyun returns.append(handle(proxy_ud.method, proxy_ud, proxy_d)) 208*4882a593Smuzhiyun unlockfile(lf) 209*4882a593Smuzhiyun return returns 210*4882a593Smuzhiyun 211*4882a593Smuzhiyun def verify_donestamp(self, ud, d): 212*4882a593Smuzhiyun """Verify the donestamp file""" 213*4882a593Smuzhiyun def _handle(m, ud, d): 214*4882a593Smuzhiyun return m.verify_donestamp(ud, d) 215*4882a593Smuzhiyun return all(self._foreach_proxy_method(ud, _handle)) 216*4882a593Smuzhiyun 217*4882a593Smuzhiyun def update_donestamp(self, ud, d): 218*4882a593Smuzhiyun """Update the donestamp file""" 219*4882a593Smuzhiyun def _handle(m, ud, d): 220*4882a593Smuzhiyun m.update_donestamp(ud, d) 221*4882a593Smuzhiyun self._foreach_proxy_method(ud, _handle) 222*4882a593Smuzhiyun 223*4882a593Smuzhiyun def need_update(self, ud, d): 224*4882a593Smuzhiyun """Force a fetch, even if localpath exists ?""" 225*4882a593Smuzhiyun def _handle(m, ud, d): 226*4882a593Smuzhiyun return m.need_update(ud, d) 227*4882a593Smuzhiyun return all(self._foreach_proxy_method(ud, _handle)) 228*4882a593Smuzhiyun 229*4882a593Smuzhiyun def try_mirrors(self, fetch, ud, d, mirrors): 230*4882a593Smuzhiyun """Try to use a mirror""" 231*4882a593Smuzhiyun def _handle(m, ud, d): 232*4882a593Smuzhiyun return m.try_mirrors(fetch, ud, d, mirrors) 233*4882a593Smuzhiyun return all(self._foreach_proxy_method(ud, _handle)) 234*4882a593Smuzhiyun 235*4882a593Smuzhiyun def download(self, ud, d): 236*4882a593Smuzhiyun """Fetch url""" 237*4882a593Smuzhiyun ud.proxy.download() 238*4882a593Smuzhiyun 239*4882a593Smuzhiyun def unpack(self, ud, rootdir, d): 240*4882a593Smuzhiyun """Unpack the downloaded dependencies""" 241*4882a593Smuzhiyun destdir = d.getVar("S") 242*4882a593Smuzhiyun destsuffix = ud.parm.get("destsuffix") 243*4882a593Smuzhiyun if destsuffix: 244*4882a593Smuzhiyun destdir = os.path.join(rootdir, destsuffix) 245*4882a593Smuzhiyun 246*4882a593Smuzhiyun bb.utils.mkdirhier(destdir) 247*4882a593Smuzhiyun bb.utils.copyfile(ud.shrinkwrap_file, 248*4882a593Smuzhiyun os.path.join(destdir, "npm-shrinkwrap.json")) 249*4882a593Smuzhiyun 250*4882a593Smuzhiyun auto = [dep["url"] for dep in ud.deps if not dep["localpath"]] 251*4882a593Smuzhiyun manual = [dep for dep in ud.deps if dep["localpath"]] 252*4882a593Smuzhiyun 253*4882a593Smuzhiyun if auto: 254*4882a593Smuzhiyun ud.proxy.unpack(destdir, auto) 255*4882a593Smuzhiyun 256*4882a593Smuzhiyun for dep in manual: 257*4882a593Smuzhiyun depdestdir = os.path.join(destdir, dep["destsuffix"]) 258*4882a593Smuzhiyun if dep["url"]: 259*4882a593Smuzhiyun npm_unpack(dep["localpath"], depdestdir, d) 260*4882a593Smuzhiyun else: 261*4882a593Smuzhiyun depsrcdir= os.path.join(destdir, dep["localpath"]) 262*4882a593Smuzhiyun if dep["unpack"]: 263*4882a593Smuzhiyun npm_unpack(depsrcdir, depdestdir, d) 264*4882a593Smuzhiyun else: 265*4882a593Smuzhiyun bb.utils.mkdirhier(depdestdir) 266*4882a593Smuzhiyun cmd = 'cp -fpPRH "%s/." .' % (depsrcdir) 267*4882a593Smuzhiyun runfetchcmd(cmd, d, workdir=depdestdir) 268*4882a593Smuzhiyun 269*4882a593Smuzhiyun def clean(self, ud, d): 270*4882a593Smuzhiyun """Clean any existing full or partial download""" 271*4882a593Smuzhiyun ud.proxy.clean() 272*4882a593Smuzhiyun 273*4882a593Smuzhiyun # Clean extra files 274*4882a593Smuzhiyun for dep in ud.deps: 275*4882a593Smuzhiyun for path in dep["extrapaths"]: 276*4882a593Smuzhiyun bb.utils.remove(path) 277*4882a593Smuzhiyun 278*4882a593Smuzhiyun def done(self, ud, d): 279*4882a593Smuzhiyun """Is the download done ?""" 280*4882a593Smuzhiyun def _handle(m, ud, d): 281*4882a593Smuzhiyun return m.done(ud, d) 282*4882a593Smuzhiyun return all(self._foreach_proxy_method(ud, _handle)) 283