1# Recipe creation tool - create command plugin 2# 3# Copyright (C) 2014-2017 Intel Corporation 4# 5# SPDX-License-Identifier: GPL-2.0-only 6# 7 8import sys 9import os 10import argparse 11import glob 12import fnmatch 13import re 14import json 15import logging 16import scriptutils 17from urllib.parse import urlparse, urldefrag, urlsplit 18import hashlib 19import bb.fetch2 20logger = logging.getLogger('recipetool') 21 22tinfoil = None 23plugins = None 24 25def log_error_cond(message, debugonly): 26 if debugonly: 27 logger.debug(message) 28 else: 29 logger.error(message) 30 31def log_info_cond(message, debugonly): 32 if debugonly: 33 logger.debug(message) 34 else: 35 logger.info(message) 36 37def plugin_init(pluginlist): 38 # Take a reference to the list so we can use it later 39 global plugins 40 plugins = pluginlist 41 42def tinfoil_init(instance): 43 global tinfoil 44 tinfoil = instance 45 46class RecipeHandler(object): 47 recipelibmap = {} 48 recipeheadermap = {} 49 recipecmakefilemap = {} 50 recipebinmap = {} 51 52 def __init__(self): 53 self._devtool = False 54 55 @staticmethod 56 def load_libmap(d): 57 '''Load library->recipe mapping''' 58 import oe.package 59 60 if RecipeHandler.recipelibmap: 61 return 62 # First build up library->package mapping 63 d2 = bb.data.createCopy(d) 64 d2.setVar("WORKDIR_PKGDATA", "${PKGDATA_DIR}") 65 shlib_providers = oe.package.read_shlib_providers(d2) 66 libdir = d.getVar('libdir') 67 base_libdir = d.getVar('base_libdir') 68 libpaths = list(set([base_libdir, libdir])) 69 libname_re = re.compile(r'^lib(.+)\.so.*$') 70 pkglibmap = {} 71 for lib, item in shlib_providers.items(): 72 for path, pkg in item.items(): 73 if path in libpaths: 74 res = libname_re.match(lib) 75 if res: 76 libname = res.group(1) 77 if not libname in pkglibmap: 78 pkglibmap[libname] = pkg[0] 79 else: 80 logger.debug('unable to extract library name from %s' % lib) 81 82 # Now turn it into a library->recipe mapping 83 pkgdata_dir = d.getVar('PKGDATA_DIR') 84 for libname, pkg in pkglibmap.items(): 85 try: 86 with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f: 87 for line in f: 88 if line.startswith('PN:'): 89 RecipeHandler.recipelibmap[libname] = line.split(':', 1)[-1].strip() 90 break 91 except IOError as ioe: 92 if ioe.errno == 2: 93 logger.warning('unable to find a pkgdata file for package %s' % pkg) 94 else: 95 raise 96 97 # Some overrides - these should be mapped to the virtual 98 RecipeHandler.recipelibmap['GL'] = 'virtual/libgl' 99 RecipeHandler.recipelibmap['EGL'] = 'virtual/egl' 100 RecipeHandler.recipelibmap['GLESv2'] = 'virtual/libgles2' 101 102 @staticmethod 103 def load_devel_filemap(d): 104 '''Build up development file->recipe mapping''' 105 if RecipeHandler.recipeheadermap: 106 return 107 pkgdata_dir = d.getVar('PKGDATA_DIR') 108 includedir = d.getVar('includedir') 109 cmakedir = os.path.join(d.getVar('libdir'), 'cmake') 110 for pkg in glob.glob(os.path.join(pkgdata_dir, 'runtime', '*-dev')): 111 with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f: 112 pn = None 113 headers = [] 114 cmakefiles = [] 115 for line in f: 116 if line.startswith('PN:'): 117 pn = line.split(':', 1)[-1].strip() 118 elif line.startswith('FILES_INFO:%s:' % pkg): 119 val = line.split(': ', 1)[1].strip() 120 dictval = json.loads(val) 121 for fullpth in sorted(dictval): 122 if fullpth.startswith(includedir) and fullpth.endswith('.h'): 123 headers.append(os.path.relpath(fullpth, includedir)) 124 elif fullpth.startswith(cmakedir) and fullpth.endswith('.cmake'): 125 cmakefiles.append(os.path.relpath(fullpth, cmakedir)) 126 if pn and headers: 127 for header in headers: 128 RecipeHandler.recipeheadermap[header] = pn 129 if pn and cmakefiles: 130 for fn in cmakefiles: 131 RecipeHandler.recipecmakefilemap[fn] = pn 132 133 @staticmethod 134 def load_binmap(d): 135 '''Build up native binary->recipe mapping''' 136 if RecipeHandler.recipebinmap: 137 return 138 sstate_manifests = d.getVar('SSTATE_MANIFESTS') 139 staging_bindir_native = d.getVar('STAGING_BINDIR_NATIVE') 140 build_arch = d.getVar('BUILD_ARCH') 141 fileprefix = 'manifest-%s-' % build_arch 142 for fn in glob.glob(os.path.join(sstate_manifests, '%s*-native.populate_sysroot' % fileprefix)): 143 with open(fn, 'r') as f: 144 pn = os.path.basename(fn).rsplit('.', 1)[0][len(fileprefix):] 145 for line in f: 146 if line.startswith(staging_bindir_native): 147 prog = os.path.basename(line.rstrip()) 148 RecipeHandler.recipebinmap[prog] = pn 149 150 @staticmethod 151 def checkfiles(path, speclist, recursive=False, excludedirs=None): 152 results = [] 153 if recursive: 154 for root, dirs, files in os.walk(path, topdown=True): 155 if excludedirs: 156 dirs[:] = [d for d in dirs if d not in excludedirs] 157 for fn in files: 158 for spec in speclist: 159 if fnmatch.fnmatch(fn, spec): 160 results.append(os.path.join(root, fn)) 161 else: 162 for spec in speclist: 163 results.extend(glob.glob(os.path.join(path, spec))) 164 return results 165 166 @staticmethod 167 def handle_depends(libdeps, pcdeps, deps, outlines, values, d): 168 if pcdeps: 169 recipemap = read_pkgconfig_provides(d) 170 if libdeps: 171 RecipeHandler.load_libmap(d) 172 173 ignorelibs = ['socket'] 174 ignoredeps = ['gcc-runtime', 'glibc', 'uclibc', 'musl', 'tar-native', 'binutils-native', 'coreutils-native'] 175 176 unmappedpc = [] 177 pcdeps = list(set(pcdeps)) 178 for pcdep in pcdeps: 179 if isinstance(pcdep, str): 180 recipe = recipemap.get(pcdep, None) 181 if recipe: 182 deps.append(recipe) 183 else: 184 if not pcdep.startswith('$'): 185 unmappedpc.append(pcdep) 186 else: 187 for item in pcdep: 188 recipe = recipemap.get(pcdep, None) 189 if recipe: 190 deps.append(recipe) 191 break 192 else: 193 unmappedpc.append('(%s)' % ' or '.join(pcdep)) 194 195 unmappedlibs = [] 196 for libdep in libdeps: 197 if isinstance(libdep, tuple): 198 lib, header = libdep 199 else: 200 lib = libdep 201 header = None 202 203 if lib in ignorelibs: 204 logger.debug('Ignoring library dependency %s' % lib) 205 continue 206 207 recipe = RecipeHandler.recipelibmap.get(lib, None) 208 if recipe: 209 deps.append(recipe) 210 elif recipe is None: 211 if header: 212 RecipeHandler.load_devel_filemap(d) 213 recipe = RecipeHandler.recipeheadermap.get(header, None) 214 if recipe: 215 deps.append(recipe) 216 elif recipe is None: 217 unmappedlibs.append(lib) 218 else: 219 unmappedlibs.append(lib) 220 221 deps = set(deps).difference(set(ignoredeps)) 222 223 if unmappedpc: 224 outlines.append('# NOTE: unable to map the following pkg-config dependencies: %s' % ' '.join(unmappedpc)) 225 outlines.append('# (this is based on recipes that have previously been built and packaged)') 226 227 if unmappedlibs: 228 outlines.append('# NOTE: the following library dependencies are unknown, ignoring: %s' % ' '.join(list(set(unmappedlibs)))) 229 outlines.append('# (this is based on recipes that have previously been built and packaged)') 230 231 if deps: 232 values['DEPENDS'] = ' '.join(deps) 233 234 @staticmethod 235 def genfunction(outlines, funcname, content, python=False, forcespace=False): 236 if python: 237 prefix = 'python ' 238 else: 239 prefix = '' 240 outlines.append('%s%s () {' % (prefix, funcname)) 241 if python or forcespace: 242 indent = ' ' 243 else: 244 indent = '\t' 245 addnoop = not python 246 for line in content: 247 outlines.append('%s%s' % (indent, line)) 248 if addnoop: 249 strippedline = line.lstrip() 250 if strippedline and not strippedline.startswith('#'): 251 addnoop = False 252 if addnoop: 253 # Without this there'll be a syntax error 254 outlines.append('%s:' % indent) 255 outlines.append('}') 256 outlines.append('') 257 258 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): 259 return False 260 261 262def validate_pv(pv): 263 if not pv or '_version' in pv.lower() or pv[0] not in '0123456789': 264 return False 265 return True 266 267def determine_from_filename(srcfile): 268 """Determine name and version from a filename""" 269 if is_package(srcfile): 270 # Force getting the value from the package metadata 271 return None, None 272 273 if '.tar.' in srcfile: 274 namepart = srcfile.split('.tar.')[0] 275 else: 276 namepart = os.path.splitext(srcfile)[0] 277 namepart = namepart.lower().replace('_', '-') 278 if namepart.endswith('.src'): 279 namepart = namepart[:-4] 280 if namepart.endswith('.orig'): 281 namepart = namepart[:-5] 282 splitval = namepart.split('-') 283 logger.debug('determine_from_filename: split name %s into: %s' % (srcfile, splitval)) 284 285 ver_re = re.compile('^v?[0-9]') 286 287 pv = None 288 pn = None 289 if len(splitval) == 1: 290 # Try to split the version out if there is no separator (or a .) 291 res = re.match('^([^0-9]+)([0-9.]+.*)$', namepart) 292 if res: 293 if len(res.group(1)) > 1 and len(res.group(2)) > 1: 294 pn = res.group(1).rstrip('.') 295 pv = res.group(2) 296 else: 297 pn = namepart 298 else: 299 if splitval[-1] in ['source', 'src']: 300 splitval.pop() 301 if len(splitval) > 2 and re.match('^(alpha|beta|stable|release|rc[0-9]|pre[0-9]|p[0-9]|[0-9]{8})', splitval[-1]) and ver_re.match(splitval[-2]): 302 pv = '-'.join(splitval[-2:]) 303 if pv.endswith('-release'): 304 pv = pv[:-8] 305 splitval = splitval[:-2] 306 elif ver_re.match(splitval[-1]): 307 pv = splitval.pop() 308 pn = '-'.join(splitval) 309 if pv and pv.startswith('v'): 310 pv = pv[1:] 311 logger.debug('determine_from_filename: name = "%s" version = "%s"' % (pn, pv)) 312 return (pn, pv) 313 314def determine_from_url(srcuri): 315 """Determine name and version from a URL""" 316 pn = None 317 pv = None 318 parseres = urlparse(srcuri.lower().split(';', 1)[0]) 319 if parseres.path: 320 if 'github.com' in parseres.netloc: 321 res = re.search(r'.*/(.*?)/archive/(.*)-final\.(tar|zip)', parseres.path) 322 if res: 323 pn = res.group(1).strip().replace('_', '-') 324 pv = res.group(2).strip().replace('_', '.') 325 else: 326 res = re.search(r'.*/(.*?)/archive/v?(.*)\.(tar|zip)', parseres.path) 327 if res: 328 pn = res.group(1).strip().replace('_', '-') 329 pv = res.group(2).strip().replace('_', '.') 330 elif 'bitbucket.org' in parseres.netloc: 331 res = re.search(r'.*/(.*?)/get/[a-zA-Z_-]*([0-9][0-9a-zA-Z_.]*)\.(tar|zip)', parseres.path) 332 if res: 333 pn = res.group(1).strip().replace('_', '-') 334 pv = res.group(2).strip().replace('_', '.') 335 336 if not pn and not pv: 337 if parseres.scheme not in ['git', 'gitsm', 'svn', 'hg']: 338 srcfile = os.path.basename(parseres.path.rstrip('/')) 339 pn, pv = determine_from_filename(srcfile) 340 elif parseres.scheme in ['git', 'gitsm']: 341 pn = os.path.basename(parseres.path.rstrip('/')).lower().replace('_', '-') 342 if pn.endswith('.git'): 343 pn = pn[:-4] 344 345 logger.debug('Determined from source URL: name = "%s", version = "%s"' % (pn, pv)) 346 return (pn, pv) 347 348def supports_srcrev(uri): 349 localdata = bb.data.createCopy(tinfoil.config_data) 350 # This is a bit sad, but if you don't have this set there can be some 351 # odd interactions with the urldata cache which lead to errors 352 localdata.setVar('SRCREV', '${AUTOREV}') 353 try: 354 fetcher = bb.fetch2.Fetch([uri], localdata) 355 urldata = fetcher.ud 356 for u in urldata: 357 if urldata[u].method.supports_srcrev(): 358 return True 359 except bb.fetch2.FetchError as e: 360 logger.debug('FetchError in supports_srcrev: %s' % str(e)) 361 # Fall back to basic check 362 if uri.startswith(('git://', 'gitsm://')): 363 return True 364 return False 365 366def reformat_git_uri(uri): 367 '''Convert any http[s]://....git URI into git://...;protocol=http[s]''' 368 checkuri = uri.split(';', 1)[0] 369 if checkuri.endswith('.git') or '/git/' in checkuri or re.match('https?://git(hub|lab).com/[^/]+/[^/]+/?$', checkuri): 370 # Appends scheme if the scheme is missing 371 if not '://' in uri: 372 uri = 'git://' + uri 373 scheme, host, path, user, pswd, parms = bb.fetch2.decodeurl(uri) 374 # Detection mechanism, this is required due to certain URL are formatter with ":" rather than "/" 375 # which causes decodeurl to fail getting the right host and path 376 if len(host.split(':')) > 1: 377 splitslash = host.split(':') 378 # Port number should not be split from host 379 if not re.match('^[0-9]+$', splitslash[1]): 380 host = splitslash[0] 381 path = '/' + splitslash[1] + path 382 #Algorithm: 383 # if user is defined, append protocol=ssh or if a protocol is defined, then honor the user-defined protocol 384 # if no user & password is defined, check for scheme type and append the protocol with the scheme type 385 # finally if protocols or if the url is well-formed, do nothing and rejoin everything back to normal 386 # Need to repackage the arguments for encodeurl, the format is: (scheme, host, path, user, password, OrderedDict([('key', 'value')])) 387 if user: 388 if not 'protocol' in parms: 389 parms.update({('protocol', 'ssh')}) 390 elif (scheme == "http" or scheme == 'https' or scheme == 'ssh') and not ('protocol' in parms): 391 parms.update({('protocol', scheme)}) 392 # Always append 'git://' 393 fUrl = bb.fetch2.encodeurl(('git', host, path, user, pswd, parms)) 394 return fUrl 395 else: 396 return uri 397 398def is_package(url): 399 '''Check if a URL points to a package''' 400 checkurl = url.split(';', 1)[0] 401 if checkurl.endswith(('.deb', '.ipk', '.rpm', '.srpm')): 402 return True 403 return False 404 405def create_recipe(args): 406 import bb.process 407 import tempfile 408 import shutil 409 import oe.recipeutils 410 411 pkgarch = "" 412 if args.machine: 413 pkgarch = "${MACHINE_ARCH}" 414 415 extravalues = {} 416 checksums = {} 417 tempsrc = '' 418 source = args.source 419 srcsubdir = '' 420 srcrev = '${AUTOREV}' 421 srcbranch = '' 422 scheme = '' 423 storeTagName = '' 424 pv_srcpv = False 425 426 if os.path.isfile(source): 427 source = 'file://%s' % os.path.abspath(source) 428 429 if scriptutils.is_src_url(source): 430 # Warn about github archive URLs 431 if re.match(r'https?://github.com/[^/]+/[^/]+/archive/.+(\.tar\..*|\.zip)$', source): 432 logger.warning('github archive files are not guaranteed to be stable and may be re-generated over time. If the latter occurs, the checksums will likely change and the recipe will fail at do_fetch. It is recommended that you point to an actual commit or tag in the repository instead (using the repository URL in conjunction with the -S/--srcrev option).') 433 # Fetch a URL 434 fetchuri = reformat_git_uri(urldefrag(source)[0]) 435 if args.binary: 436 # Assume the archive contains the directory structure verbatim 437 # so we need to extract to a subdirectory 438 fetchuri += ';subdir=${BPN}' 439 srcuri = fetchuri 440 rev_re = re.compile(';rev=([^;]+)') 441 res = rev_re.search(srcuri) 442 if res: 443 if args.srcrev: 444 logger.error('rev= parameter and -S/--srcrev option cannot both be specified - use one or the other') 445 sys.exit(1) 446 if args.autorev: 447 logger.error('rev= parameter and -a/--autorev option cannot both be specified - use one or the other') 448 sys.exit(1) 449 srcrev = res.group(1) 450 srcuri = rev_re.sub('', srcuri) 451 elif args.srcrev: 452 srcrev = args.srcrev 453 454 # Check whether users provides any branch info in fetchuri. 455 # If true, we will skip all branch checking process to honor all user's input. 456 scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(fetchuri) 457 srcbranch = params.get('branch') 458 if args.srcbranch: 459 if srcbranch: 460 logger.error('branch= parameter and -B/--srcbranch option cannot both be specified - use one or the other') 461 sys.exit(1) 462 srcbranch = args.srcbranch 463 params['branch'] = srcbranch 464 nobranch = params.get('nobranch') 465 if nobranch and srcbranch: 466 logger.error('nobranch= cannot be used if you specify a branch') 467 sys.exit(1) 468 tag = params.get('tag') 469 if not srcbranch and not nobranch and srcrev != '${AUTOREV}': 470 # Append nobranch=1 in the following conditions: 471 # 1. User did not set 'branch=' in srcuri, and 472 # 2. User did not set 'nobranch=1' in srcuri, and 473 # 3. Source revision is not '${AUTOREV}' 474 params['nobranch'] = '1' 475 if tag: 476 # Keep a copy of tag and append nobranch=1 then remove tag from URL. 477 # Bitbake fetcher unable to fetch when {AUTOREV} and tag is set at the same time. 478 storeTagName = params['tag'] 479 params['nobranch'] = '1' 480 del params['tag'] 481 # Assume 'master' branch if not set 482 if scheme in ['git', 'gitsm'] and 'branch' not in params and 'nobranch' not in params: 483 params['branch'] = 'master' 484 fetchuri = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params)) 485 486 tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR') 487 bb.utils.mkdirhier(tmpparent) 488 tempsrc = tempfile.mkdtemp(prefix='recipetool-', dir=tmpparent) 489 srctree = os.path.join(tempsrc, 'source') 490 491 try: 492 checksums, ftmpdir = scriptutils.fetch_url(tinfoil, fetchuri, srcrev, srctree, logger, preserve_tmp=args.keep_temp) 493 except scriptutils.FetchUrlFailure as e: 494 logger.error(str(e)) 495 sys.exit(1) 496 497 if ftmpdir and args.keep_temp: 498 logger.info('Fetch temp directory is %s' % ftmpdir) 499 500 dirlist = scriptutils.filter_src_subdirs(srctree) 501 logger.debug('Directory listing (excluding filtered out):\n %s' % '\n '.join(dirlist)) 502 if len(dirlist) == 1: 503 singleitem = os.path.join(srctree, dirlist[0]) 504 if os.path.isdir(singleitem): 505 # We unpacked a single directory, so we should use that 506 srcsubdir = dirlist[0] 507 srctree = os.path.join(srctree, srcsubdir) 508 else: 509 check_single_file(dirlist[0], fetchuri) 510 elif len(dirlist) == 0: 511 if '/' in fetchuri: 512 fn = os.path.join(tinfoil.config_data.getVar('DL_DIR'), fetchuri.split('/')[-1]) 513 if os.path.isfile(fn): 514 check_single_file(fn, fetchuri) 515 # If we've got to here then there's no source so we might as well give up 516 logger.error('URL %s resulted in an empty source tree' % fetchuri) 517 sys.exit(1) 518 519 # We need this checking mechanism to improve the recipe created by recipetool and devtool 520 # is able to parse and build by bitbake. 521 # If there is no input for branch name, then check for branch name with SRCREV provided. 522 if not srcbranch and not nobranch and srcrev and (srcrev != '${AUTOREV}') and scheme in ['git', 'gitsm']: 523 try: 524 cmd = 'git branch -r --contains' 525 check_branch, check_branch_err = bb.process.run('%s %s' % (cmd, srcrev), cwd=srctree) 526 except bb.process.ExecutionError as err: 527 logger.error(str(err)) 528 sys.exit(1) 529 get_branch = [x.strip() for x in check_branch.splitlines()] 530 # Remove HEAD reference point and drop remote prefix 531 get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')] 532 if 'master' in get_branch: 533 # Even with the case where get_branch has multiple objects, if 'master' is one 534 # of them, we should default take from 'master' 535 srcbranch = 'master' 536 elif len(get_branch) == 1: 537 # If 'master' isn't in get_branch and get_branch contains only ONE object, then store result into 'srcbranch' 538 srcbranch = get_branch[0] 539 else: 540 # If get_branch contains more than one objects, then display error and exit. 541 mbrch = '\n ' + '\n '.join(get_branch) 542 logger.error('Revision %s was found on multiple branches: %s\nPlease provide the correct branch with -B/--srcbranch' % (srcrev, mbrch)) 543 sys.exit(1) 544 545 # Since we might have a value in srcbranch, we need to 546 # recontruct the srcuri to include 'branch' in params. 547 scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(srcuri) 548 if scheme in ['git', 'gitsm']: 549 params['branch'] = srcbranch or 'master' 550 551 if storeTagName and scheme in ['git', 'gitsm']: 552 # Check srcrev using tag and check validity of the tag 553 cmd = ('git rev-parse --verify %s' % (storeTagName)) 554 try: 555 check_tag, check_tag_err = bb.process.run('%s' % cmd, cwd=srctree) 556 srcrev = check_tag.split()[0] 557 except bb.process.ExecutionError as err: 558 logger.error(str(err)) 559 logger.error("Possibly wrong tag name is provided") 560 sys.exit(1) 561 # Drop tag from srcuri as it will have conflicts with SRCREV during recipe parse. 562 del params['tag'] 563 srcuri = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params)) 564 565 if os.path.exists(os.path.join(srctree, '.gitmodules')) and srcuri.startswith('git://'): 566 srcuri = 'gitsm://' + srcuri[6:] 567 logger.info('Fetching submodules...') 568 bb.process.run('git submodule update --init --recursive', cwd=srctree) 569 570 if is_package(fetchuri): 571 localdata = bb.data.createCopy(tinfoil.config_data) 572 pkgfile = bb.fetch2.localpath(fetchuri, localdata) 573 if pkgfile: 574 tmpfdir = tempfile.mkdtemp(prefix='recipetool-') 575 try: 576 if pkgfile.endswith(('.deb', '.ipk')): 577 stdout, _ = bb.process.run('ar x %s' % pkgfile, cwd=tmpfdir) 578 stdout, _ = bb.process.run('tar xf control.tar.gz', cwd=tmpfdir) 579 values = convert_debian(tmpfdir) 580 extravalues.update(values) 581 elif pkgfile.endswith(('.rpm', '.srpm')): 582 stdout, _ = bb.process.run('rpm -qp --xml %s > pkginfo.xml' % pkgfile, cwd=tmpfdir) 583 values = convert_rpm_xml(os.path.join(tmpfdir, 'pkginfo.xml')) 584 extravalues.update(values) 585 finally: 586 shutil.rmtree(tmpfdir) 587 else: 588 # Assume we're pointing to an existing source tree 589 if args.extract_to: 590 logger.error('--extract-to cannot be specified if source is a directory') 591 sys.exit(1) 592 if not os.path.isdir(source): 593 logger.error('Invalid source directory %s' % source) 594 sys.exit(1) 595 srctree = source 596 srcuri = '' 597 if os.path.exists(os.path.join(srctree, '.git')): 598 # Try to get upstream repo location from origin remote 599 try: 600 stdout, _ = bb.process.run('git remote -v', cwd=srctree, shell=True) 601 except bb.process.ExecutionError as e: 602 stdout = None 603 if stdout: 604 for line in stdout.splitlines(): 605 splitline = line.split() 606 if len(splitline) > 1: 607 if splitline[0] == 'origin' and scriptutils.is_src_url(splitline[1]): 608 srcuri = reformat_git_uri(splitline[1]) + ';branch=master' 609 srcsubdir = 'git' 610 break 611 612 if args.src_subdir: 613 srcsubdir = os.path.join(srcsubdir, args.src_subdir) 614 srctree_use = os.path.abspath(os.path.join(srctree, args.src_subdir)) 615 else: 616 srctree_use = os.path.abspath(srctree) 617 618 if args.outfile and os.path.isdir(args.outfile): 619 outfile = None 620 outdir = args.outfile 621 else: 622 outfile = args.outfile 623 outdir = None 624 if outfile and outfile != '-': 625 if os.path.exists(outfile): 626 logger.error('Output file %s already exists' % outfile) 627 sys.exit(1) 628 629 lines_before = [] 630 lines_after = [] 631 632 lines_before.append('# Recipe created by %s' % os.path.basename(sys.argv[0])) 633 lines_before.append('# This is the basis of a recipe and may need further editing in order to be fully functional.') 634 lines_before.append('# (Feel free to remove these comments when editing.)') 635 # We need a blank line here so that patch_recipe_lines can rewind before the LICENSE comments 636 lines_before.append('') 637 638 # We'll come back and replace this later in handle_license_vars() 639 lines_before.append('##LICENSE_PLACEHOLDER##') 640 641 handled = [] 642 classes = [] 643 644 # FIXME This is kind of a hack, we probably ought to be using bitbake to do this 645 pn = None 646 pv = None 647 if outfile: 648 recipefn = os.path.splitext(os.path.basename(outfile))[0] 649 fnsplit = recipefn.split('_') 650 if len(fnsplit) > 1: 651 pn = fnsplit[0] 652 pv = fnsplit[1] 653 else: 654 pn = recipefn 655 656 if args.version: 657 pv = args.version 658 659 if args.name: 660 pn = args.name 661 if args.name.endswith('-native'): 662 if args.also_native: 663 logger.error('--also-native cannot be specified for a recipe named *-native (*-native denotes a recipe that is already only for native) - either remove the -native suffix from the name or drop --also-native') 664 sys.exit(1) 665 classes.append('native') 666 elif args.name.startswith('nativesdk-'): 667 if args.also_native: 668 logger.error('--also-native cannot be specified for a recipe named nativesdk-* (nativesdk-* denotes a recipe that is already only for nativesdk)') 669 sys.exit(1) 670 classes.append('nativesdk') 671 672 if pv and pv not in 'git svn hg'.split(): 673 realpv = pv 674 else: 675 realpv = None 676 677 if not srcuri: 678 lines_before.append('# No information for SRC_URI yet (only an external source tree was specified)') 679 lines_before.append('SRC_URI = "%s"' % srcuri) 680 for key, value in sorted(checksums.items()): 681 lines_before.append('SRC_URI[%s] = "%s"' % (key, value)) 682 if srcuri and supports_srcrev(srcuri): 683 lines_before.append('') 684 lines_before.append('# Modify these as desired') 685 # Note: we have code to replace realpv further down if it gets set to some other value 686 scheme, _, _, _, _, _ = bb.fetch2.decodeurl(srcuri) 687 if scheme in ['git', 'gitsm']: 688 srcpvprefix = 'git' 689 elif scheme == 'svn': 690 srcpvprefix = 'svnr' 691 else: 692 srcpvprefix = scheme 693 lines_before.append('PV = "%s+%s${SRCPV}"' % (realpv or '1.0', srcpvprefix)) 694 pv_srcpv = True 695 if not args.autorev and srcrev == '${AUTOREV}': 696 if os.path.exists(os.path.join(srctree, '.git')): 697 (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree) 698 srcrev = stdout.rstrip() 699 lines_before.append('SRCREV = "%s"' % srcrev) 700 if args.provides: 701 lines_before.append('PROVIDES = "%s"' % args.provides) 702 lines_before.append('') 703 704 if srcsubdir and not args.binary: 705 # (for binary packages we explicitly specify subdir= when fetching to 706 # match the default value of S, so we don't need to set it in that case) 707 lines_before.append('S = "${WORKDIR}/%s"' % srcsubdir) 708 lines_before.append('') 709 710 if pkgarch: 711 lines_after.append('PACKAGE_ARCH = "%s"' % pkgarch) 712 lines_after.append('') 713 714 if args.binary: 715 lines_after.append('INSANE_SKIP:${PN} += "already-stripped"') 716 lines_after.append('') 717 718 if args.npm_dev: 719 extravalues['NPM_INSTALL_DEV'] = 1 720 721 # Find all plugins that want to register handlers 722 logger.debug('Loading recipe handlers') 723 raw_handlers = [] 724 for plugin in plugins: 725 if hasattr(plugin, 'register_recipe_handlers'): 726 plugin.register_recipe_handlers(raw_handlers) 727 # Sort handlers by priority 728 handlers = [] 729 for i, handler in enumerate(raw_handlers): 730 if isinstance(handler, tuple): 731 handlers.append((handler[0], handler[1], i)) 732 else: 733 handlers.append((handler, 0, i)) 734 handlers.sort(key=lambda item: (item[1], -item[2]), reverse=True) 735 for handler, priority, _ in handlers: 736 logger.debug('Handler: %s (priority %d)' % (handler.__class__.__name__, priority)) 737 setattr(handler, '_devtool', args.devtool) 738 handlers = [item[0] for item in handlers] 739 740 # Apply the handlers 741 if args.binary: 742 classes.append('bin_package') 743 handled.append('buildsystem') 744 745 for handler in handlers: 746 handler.process(srctree_use, classes, lines_before, lines_after, handled, extravalues) 747 748 extrafiles = extravalues.pop('extrafiles', {}) 749 extra_pn = extravalues.pop('PN', None) 750 extra_pv = extravalues.pop('PV', None) 751 752 if extra_pv and not realpv: 753 realpv = extra_pv 754 if not validate_pv(realpv): 755 realpv = None 756 else: 757 realpv = realpv.lower().split()[0] 758 if '_' in realpv: 759 realpv = realpv.replace('_', '-') 760 if extra_pn and not pn: 761 pn = extra_pn 762 if pn.startswith('GNU '): 763 pn = pn[4:] 764 if ' ' in pn: 765 # Probably a descriptive identifier rather than a proper name 766 pn = None 767 else: 768 pn = pn.lower() 769 if '_' in pn: 770 pn = pn.replace('_', '-') 771 772 if srcuri and not realpv or not pn: 773 name_pn, name_pv = determine_from_url(srcuri) 774 if name_pn and not pn: 775 pn = name_pn 776 if name_pv and not realpv: 777 realpv = name_pv 778 779 licvalues = handle_license_vars(srctree_use, lines_before, handled, extravalues, tinfoil.config_data) 780 781 if not outfile: 782 if not pn: 783 log_error_cond('Unable to determine short program name from source tree - please specify name with -N/--name or output file name with -o/--outfile', args.devtool) 784 # devtool looks for this specific exit code, so don't change it 785 sys.exit(15) 786 else: 787 if srcuri and srcuri.startswith(('gitsm://', 'git://', 'hg://', 'svn://')): 788 suffix = srcuri.split(':', 1)[0] 789 if suffix == 'gitsm': 790 suffix = 'git' 791 outfile = '%s_%s.bb' % (pn, suffix) 792 elif realpv: 793 outfile = '%s_%s.bb' % (pn, realpv) 794 else: 795 outfile = '%s.bb' % pn 796 if outdir: 797 outfile = os.path.join(outdir, outfile) 798 # We need to check this again 799 if os.path.exists(outfile): 800 logger.error('Output file %s already exists' % outfile) 801 sys.exit(1) 802 803 # Move any extra files the plugins created to a directory next to the recipe 804 if extrafiles: 805 if outfile == '-': 806 extraoutdir = pn 807 else: 808 extraoutdir = os.path.join(os.path.dirname(outfile), pn) 809 bb.utils.mkdirhier(extraoutdir) 810 for destfn, extrafile in extrafiles.items(): 811 shutil.move(extrafile, os.path.join(extraoutdir, destfn)) 812 813 lines = lines_before 814 lines_before = [] 815 skipblank = True 816 for line in lines: 817 if skipblank: 818 skipblank = False 819 if not line: 820 continue 821 if line.startswith('S = '): 822 if realpv and pv not in 'git svn hg'.split(): 823 line = line.replace(realpv, '${PV}') 824 if pn: 825 line = line.replace(pn, '${BPN}') 826 if line == 'S = "${WORKDIR}/${BPN}-${PV}"': 827 skipblank = True 828 continue 829 elif line.startswith('SRC_URI = '): 830 if realpv and not pv_srcpv: 831 line = line.replace(realpv, '${PV}') 832 elif line.startswith('PV = '): 833 if realpv: 834 # Replace the first part of the PV value 835 line = re.sub(r'"[^+]*\+', '"%s+' % realpv, line) 836 lines_before.append(line) 837 838 if args.also_native: 839 lines = lines_after 840 lines_after = [] 841 bbclassextend = None 842 for line in lines: 843 if line.startswith('BBCLASSEXTEND ='): 844 splitval = line.split('"') 845 if len(splitval) > 1: 846 bbclassextend = splitval[1].split() 847 if not 'native' in bbclassextend: 848 bbclassextend.insert(0, 'native') 849 line = 'BBCLASSEXTEND = "%s"' % ' '.join(bbclassextend) 850 lines_after.append(line) 851 if not bbclassextend: 852 lines_after.append('BBCLASSEXTEND = "native"') 853 854 postinst = ("postinst", extravalues.pop('postinst', None)) 855 postrm = ("postrm", extravalues.pop('postrm', None)) 856 preinst = ("preinst", extravalues.pop('preinst', None)) 857 prerm = ("prerm", extravalues.pop('prerm', None)) 858 funcs = [postinst, postrm, preinst, prerm] 859 for func in funcs: 860 if func[1]: 861 RecipeHandler.genfunction(lines_after, 'pkg_%s_${PN}' % func[0], func[1]) 862 863 outlines = [] 864 outlines.extend(lines_before) 865 if classes: 866 if outlines[-1] and not outlines[-1].startswith('#'): 867 outlines.append('') 868 outlines.append('inherit %s' % ' '.join(classes)) 869 outlines.append('') 870 outlines.extend(lines_after) 871 872 if extravalues: 873 _, outlines = oe.recipeutils.patch_recipe_lines(outlines, extravalues, trailing_newline=False) 874 875 if args.extract_to: 876 scriptutils.git_convert_standalone_clone(srctree) 877 if os.path.isdir(args.extract_to): 878 # If the directory exists we'll move the temp dir into it instead of 879 # its contents - of course, we could try to always move its contents 880 # but that is a pain if there are symlinks; the simplest solution is 881 # to just remove it first 882 os.rmdir(args.extract_to) 883 shutil.move(srctree, args.extract_to) 884 if tempsrc == srctree: 885 tempsrc = None 886 log_info_cond('Source extracted to %s' % args.extract_to, args.devtool) 887 888 if outfile == '-': 889 sys.stdout.write('\n'.join(outlines) + '\n') 890 else: 891 with open(outfile, 'w') as f: 892 lastline = None 893 for line in outlines: 894 if not lastline and not line: 895 # Skip extra blank lines 896 continue 897 f.write('%s\n' % line) 898 lastline = line 899 log_info_cond('Recipe %s has been created; further editing may be required to make it fully functional' % outfile, args.devtool) 900 901 if tempsrc: 902 if args.keep_temp: 903 logger.info('Preserving temporary directory %s' % tempsrc) 904 else: 905 shutil.rmtree(tempsrc) 906 907 return 0 908 909def check_single_file(fn, fetchuri): 910 """Determine if a single downloaded file is something we can't handle""" 911 with open(fn, 'r', errors='surrogateescape') as f: 912 if '<html' in f.read(100).lower(): 913 logger.error('Fetching "%s" returned a single HTML page - check the URL is correct and functional' % fetchuri) 914 sys.exit(1) 915 916def split_value(value): 917 if isinstance(value, str): 918 return value.split() 919 else: 920 return value 921 922def fixup_license(value): 923 # Ensure licenses with OR starts and ends with brackets 924 if '|' in value: 925 return '(' + value + ')' 926 return value 927 928def tidy_licenses(value): 929 """Flat, split and sort licenses""" 930 from oe.license import flattened_licenses 931 def _choose(a, b): 932 str_a, str_b = sorted((" & ".join(a), " & ".join(b)), key=str.casefold) 933 return ["(%s | %s)" % (str_a, str_b)] 934 if not isinstance(value, str): 935 value = " & ".join(value) 936 return sorted(list(set(flattened_licenses(value, _choose))), key=str.casefold) 937 938def handle_license_vars(srctree, lines_before, handled, extravalues, d): 939 lichandled = [x for x in handled if x[0] == 'license'] 940 if lichandled: 941 # Someone else has already handled the license vars, just return their value 942 return lichandled[0][1] 943 944 licvalues = guess_license(srctree, d) 945 licenses = [] 946 lic_files_chksum = [] 947 lic_unknown = [] 948 lines = [] 949 if licvalues: 950 for licvalue in licvalues: 951 license = licvalue[0] 952 lics = tidy_licenses(fixup_license(license)) 953 lics = [lic for lic in lics if lic not in licenses] 954 if len(lics): 955 licenses.extend(lics) 956 lic_files_chksum.append('file://%s;md5=%s' % (licvalue[1], licvalue[2])) 957 if license == 'Unknown': 958 lic_unknown.append(licvalue[1]) 959 if lic_unknown: 960 lines.append('#') 961 lines.append('# The following license files were not able to be identified and are') 962 lines.append('# represented as "Unknown" below, you will need to check them yourself:') 963 for licfile in lic_unknown: 964 lines.append('# %s' % licfile) 965 966 extra_license = tidy_licenses(extravalues.pop('LICENSE', '')) 967 if extra_license: 968 if licenses == ['Unknown']: 969 licenses = extra_license 970 else: 971 for item in extra_license: 972 if item not in licenses: 973 licenses.append(item) 974 extra_lic_files_chksum = split_value(extravalues.pop('LIC_FILES_CHKSUM', [])) 975 for item in extra_lic_files_chksum: 976 if item not in lic_files_chksum: 977 lic_files_chksum.append(item) 978 979 if lic_files_chksum: 980 # We are going to set the vars, so prepend the standard disclaimer 981 lines.insert(0, '# WARNING: the following LICENSE and LIC_FILES_CHKSUM values are best guesses - it is') 982 lines.insert(1, '# your responsibility to verify that the values are complete and correct.') 983 else: 984 # Without LIC_FILES_CHKSUM we set LICENSE = "CLOSED" to allow the 985 # user to get started easily 986 lines.append('# Unable to find any files that looked like license statements. Check the accompanying') 987 lines.append('# documentation and source headers and set LICENSE and LIC_FILES_CHKSUM accordingly.') 988 lines.append('#') 989 lines.append('# NOTE: LICENSE is being set to "CLOSED" to allow you to at least start building - if') 990 lines.append('# this is not accurate with respect to the licensing of the software being built (it') 991 lines.append('# will not be in most cases) you must specify the correct value before using this') 992 lines.append('# recipe for anything other than initial testing/development!') 993 licenses = ['CLOSED'] 994 995 if extra_license and sorted(licenses) != sorted(extra_license): 996 lines.append('# NOTE: Original package / source metadata indicates license is: %s' % ' & '.join(extra_license)) 997 998 if len(licenses) > 1: 999 lines.append('#') 1000 lines.append('# NOTE: multiple licenses have been detected; they have been separated with &') 1001 lines.append('# in the LICENSE value for now since it is a reasonable assumption that all') 1002 lines.append('# of the licenses apply. If instead there is a choice between the multiple') 1003 lines.append('# licenses then you should change the value to separate the licenses with |') 1004 lines.append('# instead of &. If there is any doubt, check the accompanying documentation') 1005 lines.append('# to determine which situation is applicable.') 1006 1007 lines.append('LICENSE = "%s"' % ' & '.join(sorted(licenses, key=str.casefold))) 1008 lines.append('LIC_FILES_CHKSUM = "%s"' % ' \\\n '.join(lic_files_chksum)) 1009 lines.append('') 1010 1011 # Replace the placeholder so we get the values in the right place in the recipe file 1012 try: 1013 pos = lines_before.index('##LICENSE_PLACEHOLDER##') 1014 except ValueError: 1015 pos = -1 1016 if pos == -1: 1017 lines_before.extend(lines) 1018 else: 1019 lines_before[pos:pos+1] = lines 1020 1021 handled.append(('license', licvalues)) 1022 return licvalues 1023 1024def get_license_md5sums(d, static_only=False, linenumbers=False): 1025 import bb.utils 1026 import csv 1027 md5sums = {} 1028 if not static_only and not linenumbers: 1029 # Gather md5sums of license files in common license dir 1030 commonlicdir = d.getVar('COMMON_LICENSE_DIR') 1031 for fn in os.listdir(commonlicdir): 1032 md5value = bb.utils.md5_file(os.path.join(commonlicdir, fn)) 1033 md5sums[md5value] = fn 1034 1035 # The following were extracted from common values in various recipes 1036 # (double checking the license against the license file itself, not just 1037 # the LICENSE value in the recipe) 1038 1039 # Read license md5sums from csv file 1040 scripts_path = os.path.dirname(os.path.realpath(__file__)) 1041 for path in (d.getVar('BBPATH').split(':') 1042 + [os.path.join(scripts_path, '..', '..')]): 1043 csv_path = os.path.join(path, 'lib', 'recipetool', 'licenses.csv') 1044 if os.path.isfile(csv_path): 1045 with open(csv_path, newline='') as csv_file: 1046 fieldnames = ['md5sum', 'license', 'beginline', 'endline', 'md5'] 1047 reader = csv.DictReader(csv_file, delimiter=',', fieldnames=fieldnames) 1048 for row in reader: 1049 if linenumbers: 1050 md5sums[row['md5sum']] = ( 1051 row['license'], row['beginline'], row['endline'], row['md5']) 1052 else: 1053 md5sums[row['md5sum']] = row['license'] 1054 1055 return md5sums 1056 1057def crunch_license(licfile): 1058 ''' 1059 Remove non-material text from a license file and then check 1060 its md5sum against a known list. This works well for licenses 1061 which contain a copyright statement, but is also a useful way 1062 to handle people's insistence upon reformatting the license text 1063 slightly (with no material difference to the text of the 1064 license). 1065 ''' 1066 1067 import oe.utils 1068 1069 # Note: these are carefully constructed! 1070 license_title_re = re.compile(r'^#*\(? *(This is )?([Tt]he )?.{0,15} ?[Ll]icen[sc]e( \(.{1,10}\))?\)?[:\.]? ?#*$') 1071 license_statement_re = re.compile(r'^((This (project|software)|.{1,10}) is( free software)? (released|licen[sc]ed)|(Released|Licen[cs]ed)) under the .{1,10} [Ll]icen[sc]e:?$') 1072 copyright_re = re.compile('^ *[#\*]* *(Modified work |MIT LICENSED )?Copyright ?(\([cC]\))? .*$') 1073 disclaimer_re = re.compile('^ *\*? ?All [Rr]ights [Rr]eserved\.$') 1074 email_re = re.compile('^.*<[\w\.-]*@[\w\.\-]*>$') 1075 header_re = re.compile('^(\/\**!?)? ?[\-=\*]* ?(\*\/)?$') 1076 tag_re = re.compile('^ *@?\(?([Ll]icense|MIT)\)?$') 1077 url_re = re.compile('^ *[#\*]* *https?:\/\/[\w\.\/\-]+$') 1078 1079 crunched_md5sums = {} 1080 1081 # common licenses 1082 crunched_md5sums['89f3bf322f30a1dcfe952e09945842f0'] = 'Apache-2.0' 1083 crunched_md5sums['13b6fe3075f8f42f2270a748965bf3a1'] = '0BSD' 1084 crunched_md5sums['ba87a7d7c20719c8df4b8beed9b78c43'] = 'BSD-2-Clause' 1085 crunched_md5sums['7f8892c03b72de419c27be4ebfa253f8'] = 'BSD-3-Clause' 1086 crunched_md5sums['21128c0790b23a8a9f9e260d5f6b3619'] = 'BSL-1.0' 1087 crunched_md5sums['975742a59ae1b8abdea63a97121f49f4'] = 'EDL-1.0' 1088 crunched_md5sums['5322cee4433d84fb3aafc9e253116447'] = 'EPL-1.0' 1089 crunched_md5sums['6922352e87de080f42419bed93063754'] = 'EPL-2.0' 1090 crunched_md5sums['793475baa22295cae1d3d4046a3a0ceb'] = 'GPL-2.0-only' 1091 crunched_md5sums['ff9047f969b02c20f0559470df5cb433'] = 'GPL-2.0-or-later' 1092 crunched_md5sums['ea6de5453fcadf534df246e6cdafadcd'] = 'GPL-3.0-only' 1093 crunched_md5sums['b419257d4d153a6fde92ddf96acf5b67'] = 'GPL-3.0-or-later' 1094 crunched_md5sums['228737f4c49d3ee75b8fb3706b090b84'] = 'ISC' 1095 crunched_md5sums['c6a782e826ca4e85bf7f8b89435a677d'] = 'LGPL-2.0-only' 1096 crunched_md5sums['32d8f758a066752f0db09bd7624b8090'] = 'LGPL-2.0-or-later' 1097 crunched_md5sums['4820937eb198b4f84c52217ed230be33'] = 'LGPL-2.1-only' 1098 crunched_md5sums['db13fe9f3a13af7adab2dc7a76f9e44a'] = 'LGPL-2.1-or-later' 1099 crunched_md5sums['d7a0f2e4e0950e837ac3eabf5bd1d246'] = 'LGPL-3.0-only' 1100 crunched_md5sums['abbf328e2b434f9153351f06b9f79d02'] = 'LGPL-3.0-or-later' 1101 crunched_md5sums['eecf6429523cbc9693547cf2db790b5c'] = 'MIT' 1102 crunched_md5sums['b218b0e94290b9b818c4be67c8e1cc82'] = 'MIT-0' 1103 crunched_md5sums['ddc18131d6748374f0f35a621c245b49'] = 'Unlicense' 1104 crunched_md5sums['51f9570ff32571fc0a443102285c5e33'] = 'WTFPL' 1105 1106 # The following two were gleaned from the "forever" npm package 1107 crunched_md5sums['0a97f8e4cbaf889d6fa51f84b89a79f6'] = 'ISC' 1108 # https://github.com/waffle-gl/waffle/blob/master/LICENSE.txt 1109 crunched_md5sums['50fab24ce589d69af8964fdbfe414c60'] = 'BSD-2-Clause' 1110 # https://github.com/spigwitmer/fakeds1963s/blob/master/LICENSE 1111 crunched_md5sums['88a4355858a1433fea99fae34a44da88'] = 'GPL-2.0-only' 1112 # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt 1113 crunched_md5sums['063b5c3ebb5f3aa4c85a2ed18a31fbe7'] = 'GPL-2.0-only' 1114 # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv2.1 1115 crunched_md5sums['7f5202f4d44ed15dcd4915f5210417d8'] = 'LGPL-2.1-only' 1116 # unixODBC-2.3.4 COPYING 1117 crunched_md5sums['3debde09238a8c8e1f6a847e1ec9055b'] = 'LGPL-2.1-only' 1118 # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv3 1119 crunched_md5sums['f90c613c51aa35da4d79dd55fc724ceb'] = 'LGPL-3.0-only' 1120 # https://raw.githubusercontent.com/eclipse/mosquitto/v1.4.14/epl-v10 1121 crunched_md5sums['efe2cb9a35826992b9df68224e3c2628'] = 'EPL-1.0' 1122 1123 # https://raw.githubusercontent.com/jquery/esprima/3.1.3/LICENSE.BSD 1124 crunched_md5sums['80fa7b56a28e8c902e6af194003220a5'] = 'BSD-2-Clause' 1125 # https://raw.githubusercontent.com/npm/npm-install-checks/master/LICENSE 1126 crunched_md5sums['e659f77bfd9002659e112d0d3d59b2c1'] = 'BSD-2-Clause' 1127 # https://raw.githubusercontent.com/silverwind/default-gateway/4.2.0/LICENSE 1128 crunched_md5sums['4c641f2d995c47f5cb08bdb4b5b6ea05'] = 'BSD-2-Clause' 1129 # https://raw.githubusercontent.com/tad-lispy/node-damerau-levenshtein/v1.0.5/LICENSE 1130 crunched_md5sums['2b8c039b2b9a25f0feb4410c4542d346'] = 'BSD-2-Clause' 1131 # https://raw.githubusercontent.com/terser/terser/v3.17.0/LICENSE 1132 crunched_md5sums['8bd23871802951c9ad63855151204c2c'] = 'BSD-2-Clause' 1133 # https://raw.githubusercontent.com/alexei/sprintf.js/1.0.3/LICENSE 1134 crunched_md5sums['008c22318c8ea65928bf730ddd0273e3'] = 'BSD-3-Clause' 1135 # https://raw.githubusercontent.com/Caligatio/jsSHA/v3.2.0/LICENSE 1136 crunched_md5sums['0e46634a01bfef056892949acaea85b1'] = 'BSD-3-Clause' 1137 # https://raw.githubusercontent.com/d3/d3-path/v1.0.9/LICENSE 1138 crunched_md5sums['b5f72aef53d3b2b432702c30b0215666'] = 'BSD-3-Clause' 1139 # https://raw.githubusercontent.com/feross/ieee754/v1.1.13/LICENSE 1140 crunched_md5sums['a39327c997c20da0937955192d86232d'] = 'BSD-3-Clause' 1141 # https://raw.githubusercontent.com/joyent/node-extsprintf/v1.3.0/LICENSE 1142 crunched_md5sums['721f23a96ff4161ca3a5f071bbe18108'] = 'MIT' 1143 # https://raw.githubusercontent.com/pvorb/clone/v0.2.0/LICENSE 1144 crunched_md5sums['b376d29a53c9573006b9970709231431'] = 'MIT' 1145 # https://raw.githubusercontent.com/andris9/encoding/v0.1.12/LICENSE 1146 crunched_md5sums['85d8a977ee9d7c5ab4ac03c9b95431c4'] = 'MIT-0' 1147 # https://raw.githubusercontent.com/faye/websocket-driver-node/0.7.3/LICENSE.md 1148 crunched_md5sums['b66384e7137e41a9b1904ef4d39703b6'] = 'Apache-2.0' 1149 # https://raw.githubusercontent.com/less/less.js/v4.1.1/LICENSE 1150 crunched_md5sums['b27575459e02221ccef97ec0bfd457ae'] = 'Apache-2.0' 1151 # https://raw.githubusercontent.com/microsoft/TypeScript/v3.5.3/LICENSE.txt 1152 crunched_md5sums['a54a1a6a39e7f9dbb4a23a42f5c7fd1c'] = 'Apache-2.0' 1153 # https://raw.githubusercontent.com/request/request/v2.87.0/LICENSE 1154 crunched_md5sums['1034431802e57486b393d00c5d262b8a'] = 'Apache-2.0' 1155 # https://raw.githubusercontent.com/dchest/tweetnacl-js/v0.14.5/LICENSE 1156 crunched_md5sums['75605e6bdd564791ab698fca65c94a4f'] = 'Unlicense' 1157 # https://raw.githubusercontent.com/stackgl/gl-mat3/v2.0.0/LICENSE.md 1158 crunched_md5sums['75512892d6f59dddb6d1c7e191957e9c'] = 'Zlib' 1159 1160 lictext = [] 1161 with open(licfile, 'r', errors='surrogateescape') as f: 1162 for line in f: 1163 # Drop opening statements 1164 if copyright_re.match(line): 1165 continue 1166 elif disclaimer_re.match(line): 1167 continue 1168 elif email_re.match(line): 1169 continue 1170 elif header_re.match(line): 1171 continue 1172 elif tag_re.match(line): 1173 continue 1174 elif url_re.match(line): 1175 continue 1176 elif license_title_re.match(line): 1177 continue 1178 elif license_statement_re.match(line): 1179 continue 1180 # Strip comment symbols 1181 line = line.replace('*', '') \ 1182 .replace('#', '') 1183 # Unify spelling 1184 line = line.replace('sub-license', 'sublicense') 1185 # Squash spaces 1186 line = oe.utils.squashspaces(line.strip()) 1187 # Replace smart quotes, double quotes and backticks with single quotes 1188 line = line.replace(u"\u2018", "'").replace(u"\u2019", "'").replace(u"\u201c","'").replace(u"\u201d", "'").replace('"', '\'').replace('`', '\'') 1189 # Unify brackets 1190 line = line.replace("{", "[").replace("}", "]") 1191 if line: 1192 lictext.append(line) 1193 1194 m = hashlib.md5() 1195 try: 1196 m.update(' '.join(lictext).encode('utf-8')) 1197 md5val = m.hexdigest() 1198 except UnicodeEncodeError: 1199 md5val = None 1200 lictext = '' 1201 license = crunched_md5sums.get(md5val, None) 1202 return license, md5val, lictext 1203 1204def guess_license(srctree, d): 1205 import bb 1206 md5sums = get_license_md5sums(d) 1207 1208 licenses = [] 1209 licspecs = ['*LICEN[CS]E*', 'COPYING*', '*[Ll]icense*', 'LEGAL*', '[Ll]egal*', '*GPL*', 'README.lic*', 'COPYRIGHT*', '[Cc]opyright*', 'e[dp]l-v10'] 1210 skip_extensions = (".html", ".js", ".json", ".svg", ".ts") 1211 licfiles = [] 1212 for root, dirs, files in os.walk(srctree): 1213 for fn in files: 1214 if fn.endswith(skip_extensions): 1215 continue 1216 for spec in licspecs: 1217 if fnmatch.fnmatch(fn, spec): 1218 fullpath = os.path.join(root, fn) 1219 if not fullpath in licfiles: 1220 licfiles.append(fullpath) 1221 for licfile in sorted(licfiles): 1222 md5value = bb.utils.md5_file(licfile) 1223 license = md5sums.get(md5value, None) 1224 if not license: 1225 license, crunched_md5, lictext = crunch_license(licfile) 1226 if lictext and not license: 1227 license = 'Unknown' 1228 logger.info("Please add the following line for '%s' to a 'lib/recipetool/licenses.csv' " \ 1229 "and replace `Unknown` with the license:\n" \ 1230 "%s,Unknown" % (os.path.relpath(licfile, srctree), md5value)) 1231 if license: 1232 licenses.append((license, os.path.relpath(licfile, srctree), md5value)) 1233 1234 # FIXME should we grab at least one source file with a license header and add that too? 1235 1236 return licenses 1237 1238def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=[], pn='${PN}'): 1239 """ 1240 Given a list of (license, path, md5sum) as returned by guess_license(), 1241 a dict of package name to path mappings, write out a set of 1242 package-specific LICENSE values. 1243 """ 1244 pkglicenses = {pn: []} 1245 for license, licpath, _ in licvalues: 1246 license = fixup_license(license) 1247 for pkgname, pkgpath in packages.items(): 1248 if licpath.startswith(pkgpath + '/'): 1249 if pkgname in pkglicenses: 1250 pkglicenses[pkgname].append(license) 1251 else: 1252 pkglicenses[pkgname] = [license] 1253 break 1254 else: 1255 # Accumulate on the main package 1256 pkglicenses[pn].append(license) 1257 outlicenses = {} 1258 for pkgname in packages: 1259 # Assume AND operator between license files 1260 license = ' & '.join(list(set(pkglicenses.get(pkgname, ['Unknown'])))) or 'Unknown' 1261 if license == 'Unknown' and pkgname in fallback_licenses: 1262 license = fallback_licenses[pkgname] 1263 licenses = tidy_licenses(license) 1264 license = ' & '.join(licenses) 1265 outlines.append('LICENSE:%s = "%s"' % (pkgname, license)) 1266 outlicenses[pkgname] = licenses 1267 return outlicenses 1268 1269def read_pkgconfig_provides(d): 1270 pkgdatadir = d.getVar('PKGDATA_DIR') 1271 pkgmap = {} 1272 for fn in glob.glob(os.path.join(pkgdatadir, 'shlibs2', '*.pclist')): 1273 with open(fn, 'r') as f: 1274 for line in f: 1275 pkgmap[os.path.basename(line.rstrip())] = os.path.splitext(os.path.basename(fn))[0] 1276 recipemap = {} 1277 for pc, pkg in pkgmap.items(): 1278 pkgdatafile = os.path.join(pkgdatadir, 'runtime', pkg) 1279 if os.path.exists(pkgdatafile): 1280 with open(pkgdatafile, 'r') as f: 1281 for line in f: 1282 if line.startswith('PN: '): 1283 recipemap[pc] = line.split(':', 1)[1].strip() 1284 return recipemap 1285 1286def convert_debian(debpath): 1287 value_map = {'Package': 'PN', 1288 'Version': 'PV', 1289 'Section': 'SECTION', 1290 'License': 'LICENSE', 1291 'Homepage': 'HOMEPAGE'} 1292 1293 # FIXME extend this mapping - perhaps use distro_alias.inc? 1294 depmap = {'libz-dev': 'zlib'} 1295 1296 values = {} 1297 depends = [] 1298 with open(os.path.join(debpath, 'control'), 'r', errors='surrogateescape') as f: 1299 indesc = False 1300 for line in f: 1301 if indesc: 1302 if line.startswith(' '): 1303 if line.startswith(' This package contains'): 1304 indesc = False 1305 else: 1306 if 'DESCRIPTION' in values: 1307 values['DESCRIPTION'] += ' ' + line.strip() 1308 else: 1309 values['DESCRIPTION'] = line.strip() 1310 else: 1311 indesc = False 1312 if not indesc: 1313 splitline = line.split(':', 1) 1314 if len(splitline) < 2: 1315 continue 1316 key = splitline[0] 1317 value = splitline[1].strip() 1318 if key == 'Build-Depends': 1319 for dep in value.split(','): 1320 dep = dep.split()[0] 1321 mapped = depmap.get(dep, '') 1322 if mapped: 1323 depends.append(mapped) 1324 elif key == 'Description': 1325 values['SUMMARY'] = value 1326 indesc = True 1327 else: 1328 varname = value_map.get(key, None) 1329 if varname: 1330 values[varname] = value 1331 postinst = os.path.join(debpath, 'postinst') 1332 postrm = os.path.join(debpath, 'postrm') 1333 preinst = os.path.join(debpath, 'preinst') 1334 prerm = os.path.join(debpath, 'prerm') 1335 sfiles = [postinst, postrm, preinst, prerm] 1336 for sfile in sfiles: 1337 if os.path.isfile(sfile): 1338 logger.info("Converting %s file to recipe function..." % 1339 os.path.basename(sfile).upper()) 1340 content = [] 1341 with open(sfile) as f: 1342 for line in f: 1343 if "#!/" in line: 1344 continue 1345 line = line.rstrip("\n") 1346 if line.strip(): 1347 content.append(line) 1348 if content: 1349 values[os.path.basename(f.name)] = content 1350 1351 #if depends: 1352 # values['DEPENDS'] = ' '.join(depends) 1353 1354 return values 1355 1356def convert_rpm_xml(xmlfile): 1357 '''Converts the output from rpm -qp --xml to a set of variable values''' 1358 import xml.etree.ElementTree as ElementTree 1359 rpmtag_map = {'Name': 'PN', 1360 'Version': 'PV', 1361 'Summary': 'SUMMARY', 1362 'Description': 'DESCRIPTION', 1363 'License': 'LICENSE', 1364 'Url': 'HOMEPAGE'} 1365 1366 values = {} 1367 tree = ElementTree.parse(xmlfile) 1368 root = tree.getroot() 1369 for child in root: 1370 if child.tag == 'rpmTag': 1371 name = child.attrib.get('name', None) 1372 if name: 1373 varname = rpmtag_map.get(name, None) 1374 if varname: 1375 values[varname] = child[0].text 1376 return values 1377 1378 1379def register_commands(subparsers): 1380 parser_create = subparsers.add_parser('create', 1381 help='Create a new recipe', 1382 description='Creates a new recipe from a source tree') 1383 parser_create.add_argument('source', help='Path or URL to source') 1384 parser_create.add_argument('-o', '--outfile', help='Specify filename for recipe to create') 1385 parser_create.add_argument('-p', '--provides', help='Specify an alias for the item provided by the recipe') 1386 parser_create.add_argument('-m', '--machine', help='Make recipe machine-specific as opposed to architecture-specific', action='store_true') 1387 parser_create.add_argument('-x', '--extract-to', metavar='EXTRACTPATH', help='Assuming source is a URL, fetch it and extract it to the directory specified as %(metavar)s') 1388 parser_create.add_argument('-N', '--name', help='Name to use within recipe (PN)') 1389 parser_create.add_argument('-V', '--version', help='Version to use within recipe (PV)') 1390 parser_create.add_argument('-b', '--binary', help='Treat the source tree as something that should be installed verbatim (no compilation, same directory structure)', action='store_true') 1391 parser_create.add_argument('--also-native', help='Also add native variant (i.e. support building recipe for the build host as well as the target machine)', action='store_true') 1392 parser_create.add_argument('--src-subdir', help='Specify subdirectory within source tree to use', metavar='SUBDIR') 1393 group = parser_create.add_mutually_exclusive_group() 1394 group.add_argument('-a', '--autorev', help='When fetching from a git repository, set SRCREV in the recipe to a floating revision instead of fixed', action="store_true") 1395 group.add_argument('-S', '--srcrev', help='Source revision to fetch if fetching from an SCM such as git (default latest)') 1396 parser_create.add_argument('-B', '--srcbranch', help='Branch in source repository if fetching from an SCM such as git (default master)') 1397 parser_create.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)') 1398 parser_create.add_argument('--npm-dev', action="store_true", help='For npm, also fetch devDependencies') 1399 parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS) 1400 parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).') 1401 parser_create.set_defaults(func=create_recipe) 1402 1403