diff options
Diffstat (limited to 'scripts/lib/recipetool/create_npm.py')
| -rw-r--r-- | scripts/lib/recipetool/create_npm.py | 468 |
1 files changed, 178 insertions, 290 deletions
diff --git a/scripts/lib/recipetool/create_npm.py b/scripts/lib/recipetool/create_npm.py index 39429ebad3..7f0d8a04a3 100644 --- a/scripts/lib/recipetool/create_npm.py +++ b/scripts/lib/recipetool/create_npm.py | |||
| @@ -1,321 +1,209 @@ | |||
| 1 | # Recipe creation tool - node.js NPM module support plugin | ||
| 2 | # | ||
| 3 | # Copyright (C) 2016 Intel Corporation | 1 | # Copyright (C) 2016 Intel Corporation |
| 2 | # Copyright (C) 2020 Savoir-Faire Linux | ||
| 4 | # | 3 | # |
| 5 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
| 6 | # | 5 | # |
| 6 | """Recipe creation tool - npm module support plugin""" | ||
| 7 | 7 | ||
| 8 | import json | ||
| 8 | import os | 9 | import os |
| 10 | import re | ||
| 9 | import sys | 11 | import sys |
| 10 | import logging | ||
| 11 | import subprocess | ||
| 12 | import tempfile | 12 | import tempfile |
| 13 | import shutil | 13 | import bb |
| 14 | import json | 14 | from bb.fetch2.npm import NpmEnvironment |
| 15 | from recipetool.create import RecipeHandler, split_pkg_licenses, handle_license_vars | 15 | from recipetool.create import RecipeHandler |
| 16 | 16 | ||
| 17 | logger = logging.getLogger('recipetool') | 17 | TINFOIL = None |
| 18 | 18 | ||
| 19 | def tinfoil_init(instance): | ||
| 20 | """Initialize tinfoil""" | ||
| 21 | global TINFOIL | ||
| 22 | TINFOIL = instance | ||
| 19 | 23 | ||
| 20 | tinfoil = None | 24 | class NpmRecipeHandler(RecipeHandler): |
| 25 | """Class to handle the npm recipe creation""" | ||
| 26 | |||
| 27 | @staticmethod | ||
| 28 | def _npm_name(name): | ||
| 29 | """Generate a Yocto friendly npm name""" | ||
| 30 | name = re.sub("/", "-", name) | ||
| 31 | name = name.lower() | ||
| 32 | name = re.sub(r"[^\-a-z0-9]", "", name) | ||
| 33 | name = name.strip("-") | ||
| 34 | return name | ||
| 35 | |||
| 36 | @staticmethod | ||
| 37 | def _get_registry(lines): | ||
| 38 | """Get the registry value from the 'npm://registry' url""" | ||
| 39 | registry = None | ||
| 40 | |||
| 41 | def _handle_registry(varname, origvalue, op, newlines): | ||
| 42 | nonlocal registry | ||
| 43 | if origvalue.startswith("npm://"): | ||
| 44 | registry = re.sub(r"^npm://", "http://", origvalue.split(";")[0]) | ||
| 45 | return origvalue, None, 0, True | ||
| 21 | 46 | ||
| 22 | def tinfoil_init(instance): | 47 | bb.utils.edit_metadata(lines, ["SRC_URI"], _handle_registry) |
| 23 | global tinfoil | ||
| 24 | tinfoil = instance | ||
| 25 | 48 | ||
| 49 | return registry | ||
| 26 | 50 | ||
| 27 | class NpmRecipeHandler(RecipeHandler): | 51 | @staticmethod |
| 28 | lockdownpath = None | 52 | def _ensure_npm(): |
| 53 | """Check if the 'npm' command is available in the recipes""" | ||
| 54 | if not TINFOIL.recipes_parsed: | ||
| 55 | TINFOIL.parse_recipes() | ||
| 29 | 56 | ||
| 30 | def _ensure_npm(self, fixed_setup=False): | ||
| 31 | if not tinfoil.recipes_parsed: | ||
| 32 | tinfoil.parse_recipes() | ||
| 33 | try: | 57 | try: |
| 34 | rd = tinfoil.parse_recipe('nodejs-native') | 58 | d = TINFOIL.parse_recipe("nodejs-native") |
| 35 | except bb.providers.NoProvider: | 59 | except bb.providers.NoProvider: |
| 36 | if fixed_setup: | 60 | bb.error("Nothing provides 'nodejs-native' which is required for the build") |
| 37 | msg = 'nodejs-native is required for npm but is not available within this SDK' | 61 | bb.note("You will likely need to add a layer that provides nodejs") |
| 38 | else: | 62 | sys.exit(14) |
| 39 | msg = 'nodejs-native is required for npm but is not available - you will likely need to add a layer that provides nodejs' | 63 | |
| 40 | logger.error(msg) | 64 | bindir = d.getVar("STAGING_BINDIR_NATIVE") |
| 41 | return None | 65 | npmpath = os.path.join(bindir, "npm") |
| 42 | bindir = rd.getVar('STAGING_BINDIR_NATIVE') | 66 | |
| 43 | npmpath = os.path.join(bindir, 'npm') | ||
| 44 | if not os.path.exists(npmpath): | 67 | if not os.path.exists(npmpath): |
| 45 | tinfoil.build_targets('nodejs-native', 'addto_recipe_sysroot') | 68 | TINFOIL.build_targets("nodejs-native", "addto_recipe_sysroot") |
| 69 | |||
| 46 | if not os.path.exists(npmpath): | 70 | if not os.path.exists(npmpath): |
| 47 | logger.error('npm required to process specified source, but nodejs-native did not seem to populate it') | 71 | bb.error("Failed to add 'npm' to sysroot") |
| 48 | return None | 72 | sys.exit(14) |
| 73 | |||
| 49 | return bindir | 74 | return bindir |
| 50 | 75 | ||
| 51 | def _handle_license(self, data): | 76 | @staticmethod |
| 52 | ''' | 77 | def _npm_global_configs(dev): |
| 53 | Handle the license value from an npm package.json file | 78 | """Get the npm global configuration""" |
| 54 | ''' | 79 | configs = [] |
| 55 | license = None | 80 | |
| 56 | if 'license' in data: | 81 | if dev: |
| 57 | license = data['license'] | 82 | configs.append(("also", "development")) |
| 58 | if isinstance(license, dict): | 83 | else: |
| 59 | license = license.get('type', None) | 84 | configs.append(("only", "production")) |
| 60 | if license: | 85 | |
| 61 | if 'OR' in license: | 86 | configs.append(("save", "false")) |
| 62 | license = license.replace('OR', '|') | 87 | configs.append(("package-lock", "false")) |
| 63 | license = license.replace('AND', '&') | 88 | configs.append(("shrinkwrap", "false")) |
| 64 | license = license.replace(' ', '_') | 89 | return configs |
| 65 | if not license[0] == '(': | 90 | |
| 66 | license = '(' + license + ')' | 91 | def _run_npm_install(self, d, srctree, registry, dev): |
| 67 | else: | 92 | """Run the 'npm install' command without building the addons""" |
| 68 | license = license.replace('AND', '&') | 93 | configs = self._npm_global_configs(dev) |
| 69 | if license[0] == '(': | 94 | configs.append(("ignore-scripts", "true")) |
| 70 | license = license[1:] | 95 | |
| 71 | if license[-1] == ')': | 96 | if registry: |
| 72 | license = license[:-1] | 97 | configs.append(("registry", registry)) |
| 73 | license = license.replace('MIT/X11', 'MIT') | 98 | |
| 74 | license = license.replace('Public Domain', 'PD') | 99 | bb.utils.remove(os.path.join(srctree, "node_modules"), recurse=True) |
| 75 | license = license.replace('SEE LICENSE IN EULA', | 100 | |
| 76 | 'SEE-LICENSE-IN-EULA') | 101 | env = NpmEnvironment(d, configs=configs) |
| 77 | return license | 102 | env.run("npm install", workdir=srctree) |
| 78 | 103 | ||
| 79 | def _shrinkwrap(self, srctree, localfilesdir, extravalues, lines_before, d): | 104 | def _generate_shrinkwrap(self, d, srctree, dev): |
| 80 | try: | 105 | """Check and generate the 'npm-shrinkwrap.json' file if needed""" |
| 81 | runenv = dict(os.environ, PATH=d.getVar('PATH')) | 106 | configs = self._npm_global_configs(dev) |
| 82 | bb.process.run('npm shrinkwrap', cwd=srctree, stderr=subprocess.STDOUT, env=runenv, shell=True) | 107 | |
| 83 | except bb.process.ExecutionError as e: | 108 | env = NpmEnvironment(d, configs=configs) |
| 84 | logger.warning('npm shrinkwrap failed:\n%s' % e.stdout) | 109 | env.run("npm shrinkwrap", workdir=srctree) |
| 85 | return | 110 | |
| 86 | 111 | return os.path.join(srctree, "npm-shrinkwrap.json") | |
| 87 | tmpfile = os.path.join(localfilesdir, 'npm-shrinkwrap.json') | ||
| 88 | shutil.move(os.path.join(srctree, 'npm-shrinkwrap.json'), tmpfile) | ||
| 89 | extravalues.setdefault('extrafiles', {}) | ||
| 90 | extravalues['extrafiles']['npm-shrinkwrap.json'] = tmpfile | ||
| 91 | lines_before.append('NPM_SHRINKWRAP := "${THISDIR}/${PN}/npm-shrinkwrap.json"') | ||
| 92 | |||
| 93 | def _lockdown(self, srctree, localfilesdir, extravalues, lines_before, d): | ||
| 94 | runenv = dict(os.environ, PATH=d.getVar('PATH')) | ||
| 95 | if not NpmRecipeHandler.lockdownpath: | ||
| 96 | NpmRecipeHandler.lockdownpath = tempfile.mkdtemp('recipetool-npm-lockdown') | ||
| 97 | bb.process.run('npm install lockdown --prefix %s' % NpmRecipeHandler.lockdownpath, | ||
| 98 | cwd=srctree, stderr=subprocess.STDOUT, env=runenv, shell=True) | ||
| 99 | relockbin = os.path.join(NpmRecipeHandler.lockdownpath, 'node_modules', 'lockdown', 'relock.js') | ||
| 100 | if not os.path.exists(relockbin): | ||
| 101 | logger.warning('Could not find relock.js within lockdown directory; skipping lockdown') | ||
| 102 | return | ||
| 103 | try: | ||
| 104 | bb.process.run('node %s' % relockbin, cwd=srctree, stderr=subprocess.STDOUT, env=runenv, shell=True) | ||
| 105 | except bb.process.ExecutionError as e: | ||
| 106 | logger.warning('lockdown-relock failed:\n%s' % e.stdout) | ||
| 107 | return | ||
| 108 | |||
| 109 | tmpfile = os.path.join(localfilesdir, 'lockdown.json') | ||
| 110 | shutil.move(os.path.join(srctree, 'lockdown.json'), tmpfile) | ||
| 111 | extravalues.setdefault('extrafiles', {}) | ||
| 112 | extravalues['extrafiles']['lockdown.json'] = tmpfile | ||
| 113 | lines_before.append('NPM_LOCKDOWN := "${THISDIR}/${PN}/lockdown.json"') | ||
| 114 | |||
| 115 | def _handle_dependencies(self, d, deps, optdeps, devdeps, lines_before, srctree): | ||
| 116 | import scriptutils | ||
| 117 | # If this isn't a single module we need to get the dependencies | ||
| 118 | # and add them to SRC_URI | ||
| 119 | def varfunc(varname, origvalue, op, newlines): | ||
| 120 | if varname == 'SRC_URI': | ||
| 121 | if not origvalue.startswith('npm://'): | ||
| 122 | src_uri = origvalue.split() | ||
| 123 | deplist = {} | ||
| 124 | for dep, depver in optdeps.items(): | ||
| 125 | depdata = self.get_npm_data(dep, depver, d) | ||
| 126 | if self.check_npm_optional_dependency(depdata): | ||
| 127 | deplist[dep] = depdata | ||
| 128 | for dep, depver in devdeps.items(): | ||
| 129 | depdata = self.get_npm_data(dep, depver, d) | ||
| 130 | if self.check_npm_optional_dependency(depdata): | ||
| 131 | deplist[dep] = depdata | ||
| 132 | for dep, depver in deps.items(): | ||
| 133 | depdata = self.get_npm_data(dep, depver, d) | ||
| 134 | deplist[dep] = depdata | ||
| 135 | |||
| 136 | extra_urls = [] | ||
| 137 | for dep, depdata in deplist.items(): | ||
| 138 | version = depdata.get('version', None) | ||
| 139 | if version: | ||
| 140 | url = 'npm://registry.npmjs.org;name=%s;version=%s;subdir=node_modules/%s' % (dep, version, dep) | ||
| 141 | extra_urls.append(url) | ||
| 142 | if extra_urls: | ||
| 143 | scriptutils.fetch_url(tinfoil, ' '.join(extra_urls), None, srctree, logger) | ||
| 144 | src_uri.extend(extra_urls) | ||
| 145 | return src_uri, None, -1, True | ||
| 146 | return origvalue, None, 0, True | ||
| 147 | updated, newlines = bb.utils.edit_metadata(lines_before, ['SRC_URI'], varfunc) | ||
| 148 | if updated: | ||
| 149 | del lines_before[:] | ||
| 150 | for line in newlines: | ||
| 151 | # Hack to avoid newlines that edit_metadata inserts | ||
| 152 | if line.endswith('\n'): | ||
| 153 | line = line[:-1] | ||
| 154 | lines_before.append(line) | ||
| 155 | return updated | ||
| 156 | 112 | ||
| 157 | def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): | 113 | def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): |
| 158 | import bb.utils | 114 | """Handle the npm recipe creation""" |
| 159 | import oe.package | ||
| 160 | from collections import OrderedDict | ||
| 161 | 115 | ||
| 162 | if 'buildsystem' in handled: | 116 | if "buildsystem" in handled: |
| 163 | return False | 117 | return False |
| 164 | 118 | ||
| 165 | def read_package_json(fn): | 119 | files = RecipeHandler.checkfiles(srctree, ["package.json"]) |
| 166 | with open(fn, 'r', errors='surrogateescape') as f: | ||
| 167 | return json.loads(f.read()) | ||
| 168 | 120 | ||
| 169 | files = RecipeHandler.checkfiles(srctree, ['package.json']) | 121 | if not files: |
| 170 | if files: | 122 | return False |
| 171 | d = bb.data.createCopy(tinfoil.config_data) | 123 | |
| 172 | npm_bindir = self._ensure_npm() | 124 | with open(files[0], "r") as f: |
| 173 | if not npm_bindir: | 125 | data = json.load(f) |
| 174 | sys.exit(14) | ||
| 175 | d.prependVar('PATH', '%s:' % npm_bindir) | ||
| 176 | |||
| 177 | data = read_package_json(files[0]) | ||
| 178 | if 'name' in data and 'version' in data: | ||
| 179 | extravalues['PN'] = data['name'] | ||
| 180 | extravalues['PV'] = data['version'] | ||
| 181 | classes.append('npm') | ||
| 182 | handled.append('buildsystem') | ||
| 183 | if 'description' in data: | ||
| 184 | extravalues['SUMMARY'] = data['description'] | ||
| 185 | if 'homepage' in data: | ||
| 186 | extravalues['HOMEPAGE'] = data['homepage'] | ||
| 187 | |||
| 188 | fetchdev = extravalues['fetchdev'] or None | ||
| 189 | deps, optdeps, devdeps = self.get_npm_package_dependencies(data, fetchdev) | ||
| 190 | self._handle_dependencies(d, deps, optdeps, devdeps, lines_before, srctree) | ||
| 191 | |||
| 192 | # Shrinkwrap | ||
| 193 | localfilesdir = tempfile.mkdtemp(prefix='recipetool-npm') | ||
| 194 | self._shrinkwrap(srctree, localfilesdir, extravalues, lines_before, d) | ||
| 195 | |||
| 196 | # Lockdown | ||
| 197 | self._lockdown(srctree, localfilesdir, extravalues, lines_before, d) | ||
| 198 | |||
| 199 | # Split each npm module out to is own package | ||
| 200 | npmpackages = oe.package.npm_split_package_dirs(srctree) | ||
| 201 | licvalues = None | ||
| 202 | for item in handled: | ||
| 203 | if isinstance(item, tuple): | ||
| 204 | if item[0] == 'license': | ||
| 205 | licvalues = item[1] | ||
| 206 | break | ||
| 207 | if not licvalues: | ||
| 208 | licvalues = handle_license_vars(srctree, lines_before, handled, extravalues, d) | ||
| 209 | if licvalues: | ||
| 210 | # Augment the license list with information we have in the packages | ||
| 211 | licenses = {} | ||
| 212 | license = self._handle_license(data) | ||
| 213 | if license: | ||
| 214 | licenses['${PN}'] = license | ||
| 215 | for pkgname, pkgitem in npmpackages.items(): | ||
| 216 | _, pdata = pkgitem | ||
| 217 | license = self._handle_license(pdata) | ||
| 218 | if license: | ||
| 219 | licenses[pkgname] = license | ||
| 220 | # Now write out the package-specific license values | ||
| 221 | # We need to strip out the json data dicts for this since split_pkg_licenses | ||
| 222 | # isn't expecting it | ||
| 223 | packages = OrderedDict((x,y[0]) for x,y in npmpackages.items()) | ||
| 224 | packages['${PN}'] = '' | ||
| 225 | pkglicenses = split_pkg_licenses(licvalues, packages, lines_after, licenses) | ||
| 226 | all_licenses = list(set([item.replace('_', ' ') for pkglicense in pkglicenses.values() for item in pkglicense])) | ||
| 227 | if '&' in all_licenses: | ||
| 228 | all_licenses.remove('&') | ||
| 229 | extravalues['LICENSE'] = ' & '.join(all_licenses) | ||
| 230 | |||
| 231 | # Need to move S setting after inherit npm | ||
| 232 | for i, line in enumerate(lines_before): | ||
| 233 | if line.startswith('S ='): | ||
| 234 | lines_before.pop(i) | ||
| 235 | lines_after.insert(0, '# Must be set after inherit npm since that itself sets S') | ||
| 236 | lines_after.insert(1, line) | ||
| 237 | break | ||
| 238 | |||
| 239 | return True | ||
| 240 | |||
| 241 | return False | ||
| 242 | |||
| 243 | # FIXME this is duplicated from lib/bb/fetch2/npm.py | ||
| 244 | def _parse_view(self, output): | ||
| 245 | ''' | ||
| 246 | Parse the output of npm view --json; the last JSON result | ||
| 247 | is assumed to be the one that we're interested in. | ||
| 248 | ''' | ||
| 249 | pdata = None | ||
| 250 | outdeps = {} | ||
| 251 | datalines = [] | ||
| 252 | bracelevel = 0 | ||
| 253 | for line in output.splitlines(): | ||
| 254 | if bracelevel: | ||
| 255 | datalines.append(line) | ||
| 256 | elif '{' in line: | ||
| 257 | datalines = [] | ||
| 258 | datalines.append(line) | ||
| 259 | bracelevel = bracelevel + line.count('{') - line.count('}') | ||
| 260 | if datalines: | ||
| 261 | pdata = json.loads('\n'.join(datalines)) | ||
| 262 | return pdata | ||
| 263 | |||
| 264 | # FIXME this is effectively duplicated from lib/bb/fetch2/npm.py | ||
| 265 | # (split out from _getdependencies()) | ||
| 266 | def get_npm_data(self, pkg, version, d): | ||
| 267 | import bb.fetch2 | ||
| 268 | pkgfullname = pkg | ||
| 269 | if version != '*' and not '/' in version: | ||
| 270 | pkgfullname += "@'%s'" % version | ||
| 271 | logger.debug(2, "Calling getdeps on %s" % pkg) | ||
| 272 | runenv = dict(os.environ, PATH=d.getVar('PATH')) | ||
| 273 | fetchcmd = "npm view %s --json" % pkgfullname | ||
| 274 | output, _ = bb.process.run(fetchcmd, stderr=subprocess.STDOUT, env=runenv, shell=True) | ||
| 275 | data = self._parse_view(output) | ||
| 276 | return data | ||
| 277 | |||
| 278 | # FIXME this is effectively duplicated from lib/bb/fetch2/npm.py | ||
| 279 | # (split out from _getdependencies()) | ||
| 280 | def get_npm_package_dependencies(self, pdata, fetchdev): | ||
| 281 | dependencies = pdata.get('dependencies', {}) | ||
| 282 | optionalDependencies = pdata.get('optionalDependencies', {}) | ||
| 283 | dependencies.update(optionalDependencies) | ||
| 284 | if fetchdev: | ||
| 285 | devDependencies = pdata.get('devDependencies', {}) | ||
| 286 | dependencies.update(devDependencies) | ||
| 287 | else: | ||
| 288 | devDependencies = {} | ||
| 289 | depsfound = {} | ||
| 290 | optdepsfound = {} | ||
| 291 | devdepsfound = {} | ||
| 292 | for dep in dependencies: | ||
| 293 | if dep in optionalDependencies: | ||
| 294 | optdepsfound[dep] = dependencies[dep] | ||
| 295 | elif dep in devDependencies: | ||
| 296 | devdepsfound[dep] = dependencies[dep] | ||
| 297 | else: | ||
| 298 | depsfound[dep] = dependencies[dep] | ||
| 299 | return depsfound, optdepsfound, devdepsfound | ||
| 300 | |||
| 301 | # FIXME this is effectively duplicated from lib/bb/fetch2/npm.py | ||
| 302 | # (split out from _getdependencies()) | ||
| 303 | def check_npm_optional_dependency(self, pdata): | ||
| 304 | pkg_os = pdata.get('os', None) | ||
| 305 | if pkg_os: | ||
| 306 | if not isinstance(pkg_os, list): | ||
| 307 | pkg_os = [pkg_os] | ||
| 308 | blacklist = False | ||
| 309 | for item in pkg_os: | ||
| 310 | if item.startswith('!'): | ||
| 311 | blacklist = True | ||
| 312 | break | ||
| 313 | if (not blacklist and 'linux' not in pkg_os) or '!linux' in pkg_os: | ||
| 314 | pkg = pdata.get('name', 'Unnamed package') | ||
| 315 | logger.debug(2, "Skipping %s since it's incompatible with Linux" % pkg) | ||
| 316 | return False | ||
| 317 | return True | ||
| 318 | 126 | ||
| 127 | if "name" not in data or "version" not in data: | ||
| 128 | return False | ||
| 129 | |||
| 130 | extravalues["PN"] = self._npm_name(data["name"]) | ||
| 131 | extravalues["PV"] = data["version"] | ||
| 132 | |||
| 133 | if "description" in data: | ||
| 134 | extravalues["SUMMARY"] = data["description"] | ||
| 135 | |||
| 136 | if "homepage" in data: | ||
| 137 | extravalues["HOMEPAGE"] = data["homepage"] | ||
| 138 | |||
| 139 | dev = bb.utils.to_boolean(str(extravalues.get("NPM_INSTALL_DEV", "0")), False) | ||
| 140 | registry = self._get_registry(lines_before) | ||
| 141 | |||
| 142 | bb.note("Checking if npm is available ...") | ||
| 143 | # The native npm is used here (and not the host one) to ensure that the | ||
| 144 | # npm version is high enough to ensure an efficient dependency tree | ||
| 145 | # resolution and avoid issue with the shrinkwrap file format. | ||
| 146 | # Moreover the native npm is mandatory for the build. | ||
| 147 | bindir = self._ensure_npm() | ||
| 148 | |||
| 149 | d = bb.data.createCopy(TINFOIL.config_data) | ||
| 150 | d.prependVar("PATH", bindir + ":") | ||
| 151 | d.setVar("S", srctree) | ||
| 152 | |||
| 153 | bb.note("Generating shrinkwrap file ...") | ||
| 154 | # To generate the shrinkwrap file the dependencies have to be installed | ||
| 155 | # first. During the generation process some files may be updated / | ||
| 156 | # deleted. By default devtool tracks the diffs in the srctree and raises | ||
| 157 | # errors when finishing the recipe if some diffs are found. | ||
| 158 | git_exclude_file = os.path.join(srctree, ".git", "info", "exclude") | ||
| 159 | if os.path.exists(git_exclude_file): | ||
| 160 | with open(git_exclude_file, "r+") as f: | ||
| 161 | lines = f.readlines() | ||
| 162 | for line in ["/node_modules/", "/npm-shrinkwrap.json"]: | ||
| 163 | if line not in lines: | ||
| 164 | f.write(line + "\n") | ||
| 165 | |||
| 166 | lock_file = os.path.join(srctree, "package-lock.json") | ||
| 167 | lock_copy = lock_file + ".copy" | ||
| 168 | if os.path.exists(lock_file): | ||
| 169 | bb.utils.copyfile(lock_file, lock_copy) | ||
| 170 | |||
| 171 | self._run_npm_install(d, srctree, registry, dev) | ||
| 172 | shrinkwrap_file = self._generate_shrinkwrap(d, srctree, dev) | ||
| 173 | |||
| 174 | if os.path.exists(lock_copy): | ||
| 175 | bb.utils.movefile(lock_copy, lock_file) | ||
| 176 | |||
| 177 | # Add the shrinkwrap file as 'extrafiles' | ||
| 178 | shrinkwrap_copy = shrinkwrap_file + ".copy" | ||
| 179 | bb.utils.copyfile(shrinkwrap_file, shrinkwrap_copy) | ||
| 180 | extravalues.setdefault("extrafiles", {}) | ||
| 181 | extravalues["extrafiles"]["npm-shrinkwrap.json"] = shrinkwrap_copy | ||
| 182 | |||
| 183 | url_local = "npmsw://%s" % shrinkwrap_file | ||
| 184 | url_recipe= "npmsw://${THISDIR}/${BPN}/npm-shrinkwrap.json" | ||
| 185 | |||
| 186 | if dev: | ||
| 187 | url_local += ";dev=1" | ||
| 188 | url_recipe += ";dev=1" | ||
| 189 | |||
| 190 | # Add the npmsw url in the SRC_URI of the generated recipe | ||
| 191 | def _handle_srcuri(varname, origvalue, op, newlines): | ||
| 192 | """Update the version value and add the 'npmsw://' url""" | ||
| 193 | value = origvalue.replace("version=" + data["version"], "version=${PV}") | ||
| 194 | value = value.replace("version=latest", "version=${PV}") | ||
| 195 | values = [line.strip() for line in value.strip('\n').splitlines()] | ||
| 196 | values.append(url_recipe) | ||
| 197 | return values, None, 4, False | ||
| 198 | |||
| 199 | (_, newlines) = bb.utils.edit_metadata(lines_before, ["SRC_URI"], _handle_srcuri) | ||
| 200 | lines_before[:] = [line.rstrip('\n') for line in newlines] | ||
| 201 | |||
| 202 | classes.append("npm") | ||
| 203 | handled.append("buildsystem") | ||
| 204 | |||
| 205 | return True | ||
| 319 | 206 | ||
| 320 | def register_recipe_handlers(handlers): | 207 | def register_recipe_handlers(handlers): |
| 208 | """Register the npm handler""" | ||
| 321 | handlers.append((NpmRecipeHandler(), 60)) | 209 | handlers.append((NpmRecipeHandler(), 60)) |
