diff options
| author | Christian Lindeberg <christian.lindeberg@axis.com> | 2025-06-27 14:48:45 +0100 |
|---|---|---|
| committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2025-07-01 08:49:37 +0100 |
| commit | 90cc27f8ce15f2b51de95aa3b6628516030349ee (patch) | |
| tree | bce4a0782e4588f6e5806c132a92b7ace07ed940 | |
| parent | 45eb6f8188cc10ad89a3342e925006e9847dd047 (diff) | |
| download | poky-90cc27f8ce15f2b51de95aa3b6628516030349ee.tar.gz | |
recipetool: create_go: Use gomod fetcher instead of go mod vendor
Use the go-mod bbclass together with the gomod fetcher instead of the
go-vendor bbclass.
(From OE-Core rev: 42b46ab3b92a4f011592e8efcedead075731b8bd)
Signed-off-by: Christian Lindeberg <christian.lindeberg@axis.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
| -rw-r--r-- | scripts/lib/recipetool/create_go.py | 721 |
1 files changed, 99 insertions, 622 deletions
diff --git a/scripts/lib/recipetool/create_go.py b/scripts/lib/recipetool/create_go.py index 5cc53931f0..3e9fc85784 100644 --- a/scripts/lib/recipetool/create_go.py +++ b/scripts/lib/recipetool/create_go.py | |||
| @@ -10,48 +10,31 @@ | |||
| 10 | # | 10 | # |
| 11 | 11 | ||
| 12 | 12 | ||
| 13 | from collections import namedtuple | ||
| 14 | from enum import Enum | ||
| 15 | from html.parser import HTMLParser | ||
| 16 | from recipetool.create import RecipeHandler, handle_license_vars | 13 | from recipetool.create import RecipeHandler, handle_license_vars |
| 17 | from recipetool.create import find_licenses, tidy_licenses, fixup_license | 14 | from recipetool.create import find_licenses |
| 18 | from recipetool.create import determine_from_url | ||
| 19 | from urllib.error import URLError, HTTPError | ||
| 20 | 15 | ||
| 21 | import bb.utils | 16 | import bb.utils |
| 22 | import json | 17 | import json |
| 23 | import logging | 18 | import logging |
| 24 | import os | 19 | import os |
| 25 | import re | 20 | import re |
| 26 | import subprocess | ||
| 27 | import sys | 21 | import sys |
| 28 | import shutil | ||
| 29 | import tempfile | 22 | import tempfile |
| 30 | import urllib.parse | 23 | import urllib.parse |
| 31 | import urllib.request | 24 | import urllib.request |
| 32 | 25 | ||
| 33 | 26 | ||
| 34 | GoImport = namedtuple('GoImport', 'root vcs url suffix') | ||
| 35 | logger = logging.getLogger('recipetool') | 27 | logger = logging.getLogger('recipetool') |
| 36 | CodeRepo = namedtuple( | ||
| 37 | 'CodeRepo', 'path codeRoot codeDir pathMajor pathPrefix pseudoMajor') | ||
| 38 | 28 | ||
| 39 | tinfoil = None | 29 | tinfoil = None |
| 40 | 30 | ||
| 41 | # Regular expression to parse pseudo semantic version | ||
| 42 | # see https://go.dev/ref/mod#pseudo-versions | ||
| 43 | re_pseudo_semver = re.compile( | ||
| 44 | r"^v[0-9]+\.(0\.0-|\d+\.\d+-([^+]*\.)?0\.)(?P<utc>\d{14})-(?P<commithash>[A-Za-z0-9]+)(\+[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*)?$") | ||
| 45 | # Regular expression to parse semantic version | ||
| 46 | re_semver = re.compile( | ||
| 47 | r"^v(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$") | ||
| 48 | |||
| 49 | 31 | ||
| 50 | def tinfoil_init(instance): | 32 | def tinfoil_init(instance): |
| 51 | global tinfoil | 33 | global tinfoil |
| 52 | tinfoil = instance | 34 | tinfoil = instance |
| 53 | 35 | ||
| 54 | 36 | ||
| 37 | |||
| 55 | class GoRecipeHandler(RecipeHandler): | 38 | class GoRecipeHandler(RecipeHandler): |
| 56 | """Class to handle the go recipe creation""" | 39 | """Class to handle the go recipe creation""" |
| 57 | 40 | ||
| @@ -83,577 +66,96 @@ class GoRecipeHandler(RecipeHandler): | |||
| 83 | 66 | ||
| 84 | return bindir | 67 | return bindir |
| 85 | 68 | ||
| 86 | def __resolve_repository_static(self, modulepath): | 69 | @staticmethod |
| 87 | """Resolve the repository in a static manner | 70 | def __unescape_path(path): |
| 88 | 71 | """Unescape capital letters using exclamation points.""" | |
| 89 | The method is based on the go implementation of | 72 | return re.sub(r'!([a-z])', lambda m: m.group(1).upper(), path) |
| 90 | `repoRootFromVCSPaths` in | ||
| 91 | https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go | ||
| 92 | """ | ||
| 93 | |||
| 94 | url = urllib.parse.urlparse("https://" + modulepath) | ||
| 95 | req = urllib.request.Request(url.geturl()) | ||
| 96 | |||
| 97 | try: | ||
| 98 | resp = urllib.request.urlopen(req) | ||
| 99 | # Some modulepath are just redirects to github (or some other vcs | ||
| 100 | # hoster). Therefore, we check if this modulepath redirects to | ||
| 101 | # somewhere else | ||
| 102 | if resp.geturl() != url.geturl(): | ||
| 103 | bb.debug(1, "%s is redirectred to %s" % | ||
| 104 | (url.geturl(), resp.geturl())) | ||
| 105 | url = urllib.parse.urlparse(resp.geturl()) | ||
| 106 | modulepath = url.netloc + url.path | ||
| 107 | |||
| 108 | except URLError as url_err: | ||
| 109 | # This is probably because the module path | ||
| 110 | # contains the subdir and major path. Thus, | ||
| 111 | # we ignore this error for now | ||
| 112 | logger.debug( | ||
| 113 | 1, "Failed to fetch page from [%s]: %s" % (url, str(url_err))) | ||
| 114 | |||
| 115 | host, _, _ = modulepath.partition('/') | ||
| 116 | |||
| 117 | class vcs(Enum): | ||
| 118 | pathprefix = "pathprefix" | ||
| 119 | regexp = "regexp" | ||
| 120 | type = "type" | ||
| 121 | repo = "repo" | ||
| 122 | check = "check" | ||
| 123 | schemelessRepo = "schemelessRepo" | ||
| 124 | |||
| 125 | # GitHub | ||
| 126 | vcsGitHub = {} | ||
| 127 | vcsGitHub[vcs.pathprefix] = "github.com" | ||
| 128 | vcsGitHub[vcs.regexp] = re.compile( | ||
| 129 | r'^(?P<root>github\.com/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$') | ||
| 130 | vcsGitHub[vcs.type] = "git" | ||
| 131 | vcsGitHub[vcs.repo] = "https://\\g<root>" | ||
| 132 | |||
| 133 | # Bitbucket | ||
| 134 | vcsBitbucket = {} | ||
| 135 | vcsBitbucket[vcs.pathprefix] = "bitbucket.org" | ||
| 136 | vcsBitbucket[vcs.regexp] = re.compile( | ||
| 137 | r'^(?P<root>bitbucket\.org/(?P<bitname>[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+))(/(?P<suffix>[A-Za-z0-9_.\-]+))*$') | ||
| 138 | vcsBitbucket[vcs.type] = "git" | ||
| 139 | vcsBitbucket[vcs.repo] = "https://\\g<root>" | ||
| 140 | |||
| 141 | # IBM DevOps Services (JazzHub) | ||
| 142 | vcsIBMDevOps = {} | ||
| 143 | vcsIBMDevOps[vcs.pathprefix] = "hub.jazz.net/git" | ||
| 144 | vcsIBMDevOps[vcs.regexp] = re.compile( | ||
| 145 | r'^(?P<root>hub\.jazz\.net/git/[a-z0-9]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$') | ||
| 146 | vcsIBMDevOps[vcs.type] = "git" | ||
| 147 | vcsIBMDevOps[vcs.repo] = "https://\\g<root>" | ||
| 148 | |||
| 149 | # Git at Apache | ||
| 150 | vcsApacheGit = {} | ||
| 151 | vcsApacheGit[vcs.pathprefix] = "git.apache.org" | ||
| 152 | vcsApacheGit[vcs.regexp] = re.compile( | ||
| 153 | r'^(?P<root>git\.apache\.org/[a-z0-9_.\-]+\.git)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$') | ||
| 154 | vcsApacheGit[vcs.type] = "git" | ||
| 155 | vcsApacheGit[vcs.repo] = "https://\\g<root>" | ||
| 156 | |||
| 157 | # Git at OpenStack | ||
| 158 | vcsOpenStackGit = {} | ||
| 159 | vcsOpenStackGit[vcs.pathprefix] = "git.openstack.org" | ||
| 160 | vcsOpenStackGit[vcs.regexp] = re.compile( | ||
| 161 | r'^(?P<root>git\.openstack\.org/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(\.git)?(/(?P<suffix>[A-Za-z0-9_.\-]+))*$') | ||
| 162 | vcsOpenStackGit[vcs.type] = "git" | ||
| 163 | vcsOpenStackGit[vcs.repo] = "https://\\g<root>" | ||
| 164 | |||
| 165 | # chiselapp.com for fossil | ||
| 166 | vcsChiselapp = {} | ||
| 167 | vcsChiselapp[vcs.pathprefix] = "chiselapp.com" | ||
| 168 | vcsChiselapp[vcs.regexp] = re.compile( | ||
| 169 | r'^(?P<root>chiselapp\.com/user/[A-Za-z0-9]+/repository/[A-Za-z0-9_.\-]+)$') | ||
| 170 | vcsChiselapp[vcs.type] = "fossil" | ||
| 171 | vcsChiselapp[vcs.repo] = "https://\\g<root>" | ||
| 172 | |||
| 173 | # General syntax for any server. | ||
| 174 | # Must be last. | ||
| 175 | vcsGeneralServer = {} | ||
| 176 | vcsGeneralServer[vcs.regexp] = re.compile( | ||
| 177 | "(?P<root>(?P<repo>([a-z0-9.\\-]+\\.)+[a-z0-9.\\-]+(:[0-9]+)?(/~?[A-Za-z0-9_.\\-]+)+?)\\.(?P<vcs>bzr|fossil|git|hg|svn))(/~?(?P<suffix>[A-Za-z0-9_.\\-]+))*$") | ||
| 178 | vcsGeneralServer[vcs.schemelessRepo] = True | ||
| 179 | |||
| 180 | vcsPaths = [vcsGitHub, vcsBitbucket, vcsIBMDevOps, | ||
| 181 | vcsApacheGit, vcsOpenStackGit, vcsChiselapp, | ||
| 182 | vcsGeneralServer] | ||
| 183 | |||
| 184 | if modulepath.startswith("example.net") or modulepath == "rsc.io": | ||
| 185 | logger.warning("Suspicious module path %s" % modulepath) | ||
| 186 | return None | ||
| 187 | if modulepath.startswith("http:") or modulepath.startswith("https:"): | ||
| 188 | logger.warning("Import path should not start with %s %s" % | ||
| 189 | ("http", "https")) | ||
| 190 | return None | ||
| 191 | |||
| 192 | rootpath = None | ||
| 193 | vcstype = None | ||
| 194 | repourl = None | ||
| 195 | suffix = None | ||
| 196 | |||
| 197 | for srv in vcsPaths: | ||
| 198 | m = srv[vcs.regexp].match(modulepath) | ||
| 199 | if vcs.pathprefix in srv: | ||
| 200 | if host == srv[vcs.pathprefix]: | ||
| 201 | rootpath = m.group('root') | ||
| 202 | vcstype = srv[vcs.type] | ||
| 203 | repourl = m.expand(srv[vcs.repo]) | ||
| 204 | suffix = m.group('suffix') | ||
| 205 | break | ||
| 206 | elif m and srv[vcs.schemelessRepo]: | ||
| 207 | rootpath = m.group('root') | ||
| 208 | vcstype = m[vcs.type] | ||
| 209 | repourl = m[vcs.repo] | ||
| 210 | suffix = m.group('suffix') | ||
| 211 | break | ||
| 212 | |||
| 213 | return GoImport(rootpath, vcstype, repourl, suffix) | ||
| 214 | |||
| 215 | def __resolve_repository_dynamic(self, modulepath): | ||
| 216 | """Resolve the repository root in a dynamic manner. | ||
| 217 | |||
| 218 | The method is based on the go implementation of | ||
| 219 | `repoRootForImportDynamic` in | ||
| 220 | https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go | ||
| 221 | """ | ||
| 222 | url = urllib.parse.urlparse("https://" + modulepath) | ||
| 223 | |||
| 224 | class GoImportHTMLParser(HTMLParser): | ||
| 225 | |||
| 226 | def __init__(self): | ||
| 227 | super().__init__() | ||
| 228 | self.__srv = {} | ||
| 229 | |||
| 230 | def handle_starttag(self, tag, attrs): | ||
| 231 | if tag == 'meta' and list( | ||
| 232 | filter(lambda a: (a[0] == 'name' and a[1] == 'go-import'), attrs)): | ||
| 233 | content = list( | ||
| 234 | filter(lambda a: (a[0] == 'content'), attrs)) | ||
| 235 | if content: | ||
| 236 | srv = content[0][1].split() | ||
| 237 | self.__srv[srv[0]] = srv | ||
| 238 | |||
| 239 | def go_import(self, modulepath): | ||
| 240 | if modulepath in self.__srv: | ||
| 241 | srv = self.__srv[modulepath] | ||
| 242 | return GoImport(srv[0], srv[1], srv[2], None) | ||
| 243 | return None | ||
| 244 | 73 | ||
| 245 | url = url.geturl() + "?go-get=1" | 74 | @staticmethod |
| 246 | req = urllib.request.Request(url) | 75 | def __fold_uri(uri): |
| 76 | """Fold URI for sorting shorter module paths before longer.""" | ||
| 77 | return uri.replace(';', ' ').replace('/', '!') | ||
| 247 | 78 | ||
| 248 | try: | 79 | @staticmethod |
| 249 | body = urllib.request.urlopen(req).read() | 80 | def __go_run_cmd(cmd, cwd, d): |
| 250 | except HTTPError as http_err: | 81 | env = dict(os.environ, PATH=d.getVar('PATH'), GOMODCACHE=d.getVar('GOMODCACHE')) |
| 251 | logger.warning( | 82 | return bb.process.run(cmd, env=env, shell=True, cwd=cwd) |
| 252 | "Unclean status when fetching page from [%s]: %s", url, str(http_err)) | ||
| 253 | body = http_err.fp.read() | ||
| 254 | except URLError as url_err: | ||
| 255 | logger.warning( | ||
| 256 | "Failed to fetch page from [%s]: %s", url, str(url_err)) | ||
| 257 | return None | ||
| 258 | 83 | ||
| 259 | parser = GoImportHTMLParser() | 84 | def __go_mod(self, go_mod, srctree, localfilesdir, extravalues, d): |
| 260 | parser.feed(body.decode('utf-8')) | 85 | moddir = d.getVar('GOMODCACHE') |
| 261 | parser.close() | ||
| 262 | 86 | ||
| 263 | return parser.go_import(modulepath) | 87 | # List main packages and their dependencies with the go list command. |
| 88 | stdout, _ = self.__go_run_cmd(f"go list -json=Dir,Module -deps {go_mod['Module']['Path']}/...", srctree, d) | ||
| 89 | pkgs = json.loads('[' + stdout.replace('}\n{', '},\n{') + ']') | ||
| 264 | 90 | ||
| 265 | def __resolve_from_golang_proxy(self, modulepath, version): | 91 | # Collect licenses for the dependencies. |
| 266 | """ | 92 | licenses = set() |
| 267 | Resolves repository data from golang proxy | 93 | lic_files_chksum = [] |
| 268 | """ | 94 | lic_files = {} |
| 269 | url = urllib.parse.urlparse("https://proxy.golang.org/" | 95 | for pkg in pkgs: |
| 270 | + modulepath | 96 | # TODO: If the package is in a subdirectory with its own license |
| 271 | + "/@v/" | 97 | # files then report those istead of the license files found in the |
| 272 | + version | 98 | # module root directory. |
| 273 | + ".info") | 99 | mod = pkg.get('Module', None) |
| 100 | if not mod or mod.get('Main', False): | ||
| 101 | continue | ||
| 102 | path = os.path.relpath(mod['Dir'], moddir) | ||
| 103 | for lic in find_licenses(mod['Dir'], d): | ||
| 104 | lic_files[os.path.join(path, lic[1])] = (lic[0], lic[2]) | ||
| 105 | |||
| 106 | for lic_file in lic_files: | ||
| 107 | licenses.add(lic_files[lic_file][0]) | ||
| 108 | lic_files_chksum.append( | ||
| 109 | f'file://pkg/mod/{lic_file};md5={lic_files[lic_file][1]}') | ||
| 110 | |||
| 111 | # Collect the module cache files downloaded by the go list command as | ||
| 112 | # the go list command knows best what the go list command needs and it | ||
| 113 | # needs more files in the module cache than the go install command as | ||
| 114 | # it doesn't do the dependency pruning mentioned in the Go module | ||
| 115 | # reference, https://go.dev/ref/mod, for go 1.17 or higher. | ||
| 116 | src_uris = [] | ||
| 117 | downloaddir = os.path.join(moddir, 'cache', 'download') | ||
| 118 | for dirpath, _, filenames in os.walk(downloaddir): | ||
| 119 | path, base = os.path.split(os.path.relpath(dirpath, downloaddir)) | ||
| 120 | if base != '@v': | ||
| 121 | continue | ||
| 122 | path = self.__unescape_path(path) | ||
| 123 | zipver = None | ||
| 124 | for name in filenames: | ||
| 125 | ver, ext = os.path.splitext(name) | ||
| 126 | if ext == '.zip': | ||
| 127 | chksum = bb.utils.sha256_file(os.path.join(dirpath, name)) | ||
| 128 | src_uris.append(f'gomod://{path};version={ver};sha256sum={chksum}') | ||
| 129 | zipver = ver | ||
| 130 | break | ||
| 131 | for name in filenames: | ||
| 132 | ver, ext = os.path.splitext(name) | ||
| 133 | if ext == '.mod' and ver != zipver: | ||
| 134 | chksum = bb.utils.sha256_file(os.path.join(dirpath, name)) | ||
| 135 | src_uris.append(f'gomod://{path};version={ver};mod=1;sha256sum={chksum}') | ||
| 274 | 136 | ||
| 275 | # Transform url to lower case, golang proxy doesn't like mixed case | 137 | self.__go_run_cmd("go clean -modcache", srctree, d) |
| 276 | req = urllib.request.Request(url.geturl().lower()) | ||
| 277 | 138 | ||
| 278 | try: | 139 | licenses_basename = "{pn}-licenses.inc" |
| 279 | resp = urllib.request.urlopen(req) | 140 | licenses_filename = os.path.join(localfilesdir, licenses_basename) |
| 280 | except URLError as url_err: | 141 | with open(licenses_filename, "w") as f: |
| 281 | logger.warning( | 142 | f.write(f'GO_MOD_LICENSES = "{" & ".join(sorted(licenses))}"\n\n') |
| 282 | "Failed to fetch page from [%s]: %s", url, str(url_err)) | 143 | f.write('LIC_FILES_CHKSUM += "\\\n') |
| 283 | return None | 144 | for lic in sorted(lic_files_chksum, key=self.__fold_uri): |
| 145 | f.write(' ' + lic + ' \\\n') | ||
| 146 | f.write('"\n') | ||
| 284 | 147 | ||
| 285 | golang_proxy_res = resp.read().decode('utf-8') | 148 | extravalues['extrafiles'][f"../{licenses_basename}"] = licenses_filename |
| 286 | modinfo = json.loads(golang_proxy_res) | ||
| 287 | |||
| 288 | if modinfo and 'Origin' in modinfo: | ||
| 289 | origin = modinfo['Origin'] | ||
| 290 | _root_url = urllib.parse.urlparse(origin['URL']) | ||
| 291 | |||
| 292 | # We normalize the repo URL since we don't want the scheme in it | ||
| 293 | _subdir = origin['Subdir'] if 'Subdir' in origin else None | ||
| 294 | _root, _, _ = self.__split_path_version(modulepath) | ||
| 295 | if _subdir: | ||
| 296 | _root = _root[:-len(_subdir)].strip('/') | ||
| 297 | |||
| 298 | _commit = origin['Hash'] | ||
| 299 | _vcs = origin['VCS'] | ||
| 300 | return (GoImport(_root, _vcs, _root_url.geturl(), None), _commit) | ||
| 301 | |||
| 302 | return None | ||
| 303 | |||
| 304 | def __resolve_repository(self, modulepath): | ||
| 305 | """ | ||
| 306 | Resolves src uri from go module-path | ||
| 307 | """ | ||
| 308 | repodata = self.__resolve_repository_static(modulepath) | ||
| 309 | if not repodata or not repodata.url: | ||
| 310 | repodata = self.__resolve_repository_dynamic(modulepath) | ||
| 311 | if not repodata or not repodata.url: | ||
| 312 | logger.error( | ||
| 313 | "Could not resolve repository for module path '%s'" % modulepath) | ||
| 314 | # There is no way to recover from this | ||
| 315 | sys.exit(14) | ||
| 316 | if repodata: | ||
| 317 | logger.debug(1, "Resolved download path for import '%s' => %s" % ( | ||
| 318 | modulepath, repodata.url)) | ||
| 319 | return repodata | ||
| 320 | |||
| 321 | def __split_path_version(self, path): | ||
| 322 | i = len(path) | ||
| 323 | dot = False | ||
| 324 | for j in range(i, 0, -1): | ||
| 325 | if path[j - 1] < '0' or path[j - 1] > '9': | ||
| 326 | break | ||
| 327 | if path[j - 1] == '.': | ||
| 328 | dot = True | ||
| 329 | break | ||
| 330 | i = j - 1 | ||
| 331 | |||
| 332 | if i <= 1 or i == len( | ||
| 333 | path) or path[i - 1] != 'v' or path[i - 2] != '/': | ||
| 334 | return path, "", True | ||
| 335 | |||
| 336 | prefix, pathMajor = path[:i - 2], path[i - 2:] | ||
| 337 | if dot or len( | ||
| 338 | pathMajor) <= 2 or pathMajor[2] == '0' or pathMajor == "/v1": | ||
| 339 | return path, "", False | ||
| 340 | |||
| 341 | return prefix, pathMajor, True | ||
| 342 | |||
| 343 | def __get_path_major(self, pathMajor): | ||
| 344 | if not pathMajor: | ||
| 345 | return "" | ||
| 346 | |||
| 347 | if pathMajor[0] != '/' and pathMajor[0] != '.': | ||
| 348 | logger.error( | ||
| 349 | "pathMajor suffix %s passed to PathMajorPrefix lacks separator", pathMajor) | ||
| 350 | |||
| 351 | if pathMajor.startswith(".v") and pathMajor.endswith("-unstable"): | ||
| 352 | pathMajor = pathMajor[:len("-unstable") - 2] | ||
| 353 | |||
| 354 | return pathMajor[1:] | ||
| 355 | |||
| 356 | def __build_coderepo(self, repo, path): | ||
| 357 | codedir = "" | ||
| 358 | pathprefix, pathMajor, _ = self.__split_path_version(path) | ||
| 359 | if repo.root == path: | ||
| 360 | pathprefix = path | ||
| 361 | elif path.startswith(repo.root): | ||
| 362 | codedir = pathprefix[len(repo.root):].strip('/') | ||
| 363 | |||
| 364 | pseudoMajor = self.__get_path_major(pathMajor) | ||
| 365 | |||
| 366 | logger.debug("root='%s', codedir='%s', prefix='%s', pathMajor='%s', pseudoMajor='%s'", | ||
| 367 | repo.root, codedir, pathprefix, pathMajor, pseudoMajor) | ||
| 368 | |||
| 369 | return CodeRepo(path, repo.root, codedir, | ||
| 370 | pathMajor, pathprefix, pseudoMajor) | ||
| 371 | |||
| 372 | def __resolve_version(self, repo, path, version): | ||
| 373 | hash = None | ||
| 374 | coderoot = self.__build_coderepo(repo, path) | ||
| 375 | |||
| 376 | def vcs_fetch_all(): | ||
| 377 | tmpdir = tempfile.mkdtemp() | ||
| 378 | clone_cmd = "%s clone --bare %s %s" % ('git', repo.url, tmpdir) | ||
| 379 | bb.process.run(clone_cmd) | ||
| 380 | log_cmd = "git log --all --pretty='%H %d' --decorate=short" | ||
| 381 | output, _ = bb.process.run( | ||
| 382 | log_cmd, shell=True, stderr=subprocess.PIPE, cwd=tmpdir) | ||
| 383 | bb.utils.prunedir(tmpdir) | ||
| 384 | return output.strip().split('\n') | ||
| 385 | |||
| 386 | def vcs_fetch_remote(tag): | ||
| 387 | # add * to grab ^{} | ||
| 388 | refs = {} | ||
| 389 | ls_remote_cmd = "git ls-remote -q --tags {} {}*".format( | ||
| 390 | repo.url, tag) | ||
| 391 | output, _ = bb.process.run(ls_remote_cmd) | ||
| 392 | output = output.strip().split('\n') | ||
| 393 | for line in output: | ||
| 394 | f = line.split(maxsplit=1) | ||
| 395 | if len(f) != 2: | ||
| 396 | continue | ||
| 397 | |||
| 398 | for prefix in ["HEAD", "refs/heads/", "refs/tags/"]: | ||
| 399 | if f[1].startswith(prefix): | ||
| 400 | refs[f[1][len(prefix):]] = f[0] | ||
| 401 | |||
| 402 | for key, hash in refs.items(): | ||
| 403 | if key.endswith(r"^{}"): | ||
| 404 | refs[key.strip(r"^{}")] = hash | ||
| 405 | |||
| 406 | return refs[tag] | ||
| 407 | |||
| 408 | m_pseudo_semver = re_pseudo_semver.match(version) | ||
| 409 | |||
| 410 | if m_pseudo_semver: | ||
| 411 | remote_refs = vcs_fetch_all() | ||
| 412 | short_commit = m_pseudo_semver.group('commithash') | ||
| 413 | for l in remote_refs: | ||
| 414 | r = l.split(maxsplit=1) | ||
| 415 | sha1 = r[0] if len(r) else None | ||
| 416 | if not sha1: | ||
| 417 | logger.error( | ||
| 418 | "Ups: could not resolve abbref commit for %s" % short_commit) | ||
| 419 | |||
| 420 | elif sha1.startswith(short_commit): | ||
| 421 | hash = sha1 | ||
| 422 | break | ||
| 423 | else: | ||
| 424 | m_semver = re_semver.match(version) | ||
| 425 | if m_semver: | ||
| 426 | |||
| 427 | def get_sha1_remote(re): | ||
| 428 | rsha1 = None | ||
| 429 | for line in remote_refs: | ||
| 430 | # Split lines of the following format: | ||
| 431 | # 22e90d9b964610628c10f673ca5f85b8c2a2ca9a (tag: sometag) | ||
| 432 | lineparts = line.split(maxsplit=1) | ||
| 433 | sha1 = lineparts[0] if len(lineparts) else None | ||
| 434 | refstring = lineparts[1] if len( | ||
| 435 | lineparts) == 2 else None | ||
| 436 | if refstring: | ||
| 437 | # Normalize tag string and split in case of multiple | ||
| 438 | # regs e.g. (tag: speech/v1.10.0, tag: orchestration/v1.5.0 ...) | ||
| 439 | refs = refstring.strip('(), ').split(',') | ||
| 440 | for ref in refs: | ||
| 441 | if re.match(ref.strip()): | ||
| 442 | rsha1 = sha1 | ||
| 443 | return rsha1 | ||
| 444 | |||
| 445 | semver = "v" + m_semver.group('major') + "."\ | ||
| 446 | + m_semver.group('minor') + "."\ | ||
| 447 | + m_semver.group('patch') \ | ||
| 448 | + (("-" + m_semver.group('prerelease')) | ||
| 449 | if m_semver.group('prerelease') else "") | ||
| 450 | |||
| 451 | tag = os.path.join( | ||
| 452 | coderoot.codeDir, semver) if coderoot.codeDir else semver | ||
| 453 | |||
| 454 | # probe tag using 'ls-remote', which is faster than fetching | ||
| 455 | # complete history | ||
| 456 | hash = vcs_fetch_remote(tag) | ||
| 457 | if not hash: | ||
| 458 | # backup: fetch complete history | ||
| 459 | remote_refs = vcs_fetch_all() | ||
| 460 | hash = get_sha1_remote( | ||
| 461 | re.compile(fr"(tag:|HEAD ->) ({tag})")) | ||
| 462 | |||
| 463 | logger.debug( | ||
| 464 | "Resolving commit for tag '%s' -> '%s'", tag, hash) | ||
| 465 | return hash | ||
| 466 | |||
| 467 | def __generate_srcuri_inline_fcn(self, path, version, replaces=None): | ||
| 468 | """Generate SRC_URI functions for go imports""" | ||
| 469 | |||
| 470 | logger.info("Resolving repository for module %s", path) | ||
| 471 | # First try to resolve repo and commit from golang proxy | ||
| 472 | # Most info is already there and we don't have to go through the | ||
| 473 | # repository or even perform the version resolve magic | ||
| 474 | golang_proxy_info = self.__resolve_from_golang_proxy(path, version) | ||
| 475 | if golang_proxy_info: | ||
| 476 | repo = golang_proxy_info[0] | ||
| 477 | commit = golang_proxy_info[1] | ||
| 478 | else: | ||
| 479 | # Fallback | ||
| 480 | # Resolve repository by 'hand' | ||
| 481 | repo = self.__resolve_repository(path) | ||
| 482 | commit = self.__resolve_version(repo, path, version) | ||
| 483 | |||
| 484 | url = urllib.parse.urlparse(repo.url) | ||
| 485 | repo_url = url.netloc + url.path | ||
| 486 | |||
| 487 | coderoot = self.__build_coderepo(repo, path) | ||
| 488 | |||
| 489 | inline_fcn = "${@go_src_uri(" | ||
| 490 | inline_fcn += f"'{repo_url}','{version}'" | ||
| 491 | if repo_url != path: | ||
| 492 | inline_fcn += f",path='{path}'" | ||
| 493 | if coderoot.codeDir: | ||
| 494 | inline_fcn += f",subdir='{coderoot.codeDir}'" | ||
| 495 | if repo.vcs != 'git': | ||
| 496 | inline_fcn += f",vcs='{repo.vcs}'" | ||
| 497 | if replaces: | ||
| 498 | inline_fcn += f",replaces='{replaces}'" | ||
| 499 | if coderoot.pathMajor: | ||
| 500 | inline_fcn += f",pathmajor='{coderoot.pathMajor}'" | ||
| 501 | inline_fcn += ")}" | ||
| 502 | |||
| 503 | return inline_fcn, commit | ||
| 504 | |||
| 505 | def __go_handle_dependencies(self, go_mod, srctree, localfilesdir, extravalues, d): | ||
| 506 | |||
| 507 | import re | ||
| 508 | src_uris = [] | ||
| 509 | src_revs = [] | ||
| 510 | |||
| 511 | def generate_src_rev(path, version, commithash): | ||
| 512 | src_rev = f"# {path}@{version} => {commithash}\n" | ||
| 513 | # Ups...maybe someone manipulated the source repository and the | ||
| 514 | # version or commit could not be resolved. This is a sign of | ||
| 515 | # a) the supply chain was manipulated (bad) | ||
| 516 | # b) the implementation for the version resolving didn't work | ||
| 517 | # anymore (less bad) | ||
| 518 | if not commithash: | ||
| 519 | src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n" | ||
| 520 | src_rev += f"#!!! Could not resolve version !!!\n" | ||
| 521 | src_rev += f"#!!! Possible supply chain attack !!!\n" | ||
| 522 | src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n" | ||
| 523 | src_rev += f"SRCREV_{path.replace('/', '.')} = \"{commithash}\"" | ||
| 524 | |||
| 525 | return src_rev | ||
| 526 | |||
| 527 | # we first go over replacement list, because we are essentialy | ||
| 528 | # interested only in the replaced path | ||
| 529 | if go_mod['Replace']: | ||
| 530 | for replacement in go_mod['Replace']: | ||
| 531 | oldpath = replacement['Old']['Path'] | ||
| 532 | path = replacement['New']['Path'] | ||
| 533 | version = '' | ||
| 534 | if 'Version' in replacement['New']: | ||
| 535 | version = replacement['New']['Version'] | ||
| 536 | |||
| 537 | if os.path.exists(os.path.join(srctree, path)): | ||
| 538 | # the module refers to the local path, remove it from requirement list | ||
| 539 | # because it's a local module | ||
| 540 | go_mod['Require'][:] = [v for v in go_mod['Require'] if v.get('Path') != oldpath] | ||
| 541 | else: | ||
| 542 | # Replace the path and the version, so we don't iterate replacement list anymore | ||
| 543 | for require in go_mod['Require']: | ||
| 544 | if require['Path'] == oldpath: | ||
| 545 | require.update({'Path': path, 'Version': version}) | ||
| 546 | break | ||
| 547 | |||
| 548 | for require in go_mod['Require']: | ||
| 549 | path = require['Path'] | ||
| 550 | version = require['Version'] | ||
| 551 | |||
| 552 | inline_fcn, commithash = self.__generate_srcuri_inline_fcn( | ||
| 553 | path, version) | ||
| 554 | src_uris.append(inline_fcn) | ||
| 555 | src_revs.append(generate_src_rev(path, version, commithash)) | ||
| 556 | |||
| 557 | # strip version part from module URL /vXX | ||
| 558 | baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path']) | ||
| 559 | pn, _ = determine_from_url(baseurl) | ||
| 560 | go_mods_basename = "%s-modules.inc" % pn | ||
| 561 | 149 | ||
| 150 | go_mods_basename = "{pn}-go-mods.inc" | ||
| 562 | go_mods_filename = os.path.join(localfilesdir, go_mods_basename) | 151 | go_mods_filename = os.path.join(localfilesdir, go_mods_basename) |
| 563 | with open(go_mods_filename, "w") as f: | 152 | with open(go_mods_filename, "w") as f: |
| 564 | # We introduce this indirection to make the tests a little easier | 153 | f.write('SRC_URI += "\\\n') |
| 565 | f.write("SRC_URI += \"${GO_DEPENDENCIES_SRC_URI}\"\n") | 154 | for uri in sorted(src_uris, key=self.__fold_uri): |
| 566 | f.write("GO_DEPENDENCIES_SRC_URI = \"\\\n") | 155 | f.write(' ' + uri + ' \\\n') |
| 567 | for uri in src_uris: | 156 | f.write('"\n') |
| 568 | f.write(" " + uri + " \\\n") | ||
| 569 | f.write("\"\n\n") | ||
| 570 | for rev in src_revs: | ||
| 571 | f.write(rev + "\n") | ||
| 572 | |||
| 573 | extravalues['extrafiles'][go_mods_basename] = go_mods_filename | ||
| 574 | |||
| 575 | def __go_run_cmd(self, cmd, cwd, d): | ||
| 576 | return bb.process.run(cmd, env=dict(os.environ, PATH=d.getVar('PATH')), | ||
| 577 | shell=True, cwd=cwd) | ||
| 578 | |||
| 579 | def __go_native_version(self, d): | ||
| 580 | stdout, _ = self.__go_run_cmd("go version", None, d) | ||
| 581 | m = re.match(r".*\sgo((\d+).(\d+).(\d+))\s([\w\/]*)", stdout) | ||
| 582 | major = int(m.group(2)) | ||
| 583 | minor = int(m.group(3)) | ||
| 584 | patch = int(m.group(4)) | ||
| 585 | |||
| 586 | return major, minor, patch | ||
| 587 | |||
| 588 | def __go_mod_patch(self, srctree, localfilesdir, extravalues, d): | ||
| 589 | |||
| 590 | patchfilename = "go.mod.patch" | ||
| 591 | go_native_version_major, go_native_version_minor, _ = self.__go_native_version( | ||
| 592 | d) | ||
| 593 | self.__go_run_cmd("go mod tidy -go=%d.%d" % | ||
| 594 | (go_native_version_major, go_native_version_minor), srctree, d) | ||
| 595 | stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d) | ||
| 596 | |||
| 597 | # Create patch in order to upgrade go version | ||
| 598 | self.__go_run_cmd("git diff go.mod > %s" % (patchfilename), srctree, d) | ||
| 599 | # Restore original state | ||
| 600 | self.__go_run_cmd("git checkout HEAD go.mod go.sum", srctree, d) | ||
| 601 | |||
| 602 | go_mod = json.loads(stdout) | ||
| 603 | tmpfile = os.path.join(localfilesdir, patchfilename) | ||
| 604 | shutil.move(os.path.join(srctree, patchfilename), tmpfile) | ||
| 605 | |||
| 606 | extravalues['extrafiles'][patchfilename] = tmpfile | ||
| 607 | |||
| 608 | return go_mod, patchfilename | ||
| 609 | 157 | ||
| 610 | def __go_mod_vendor(self, go_mod, srctree, localfilesdir, extravalues, d): | 158 | extravalues['extrafiles'][f"../{go_mods_basename}"] = go_mods_filename |
| 611 | # Perform vendoring to retrieve the correct modules.txt | ||
| 612 | tmp_vendor_dir = tempfile.mkdtemp() | ||
| 613 | |||
| 614 | # -v causes to go to print modules.txt to stderr | ||
| 615 | _, stderr = self.__go_run_cmd( | ||
| 616 | "go mod vendor -v -o %s" % (tmp_vendor_dir), srctree, d) | ||
| 617 | |||
| 618 | modules_txt_basename = "modules.txt" | ||
| 619 | modules_txt_filename = os.path.join(localfilesdir, modules_txt_basename) | ||
| 620 | with open(modules_txt_filename, "w") as f: | ||
| 621 | f.write(stderr) | ||
| 622 | |||
| 623 | extravalues['extrafiles'][modules_txt_basename] = modules_txt_filename | ||
| 624 | |||
| 625 | licenses = [] | ||
| 626 | lic_files_chksum = [] | ||
| 627 | licvalues = find_licenses(tmp_vendor_dir, d) | ||
| 628 | shutil.rmtree(tmp_vendor_dir) | ||
| 629 | |||
| 630 | if licvalues: | ||
| 631 | for licvalue in licvalues: | ||
| 632 | license = licvalue[0] | ||
| 633 | lics = tidy_licenses(fixup_license(license)) | ||
| 634 | lics = [lic for lic in lics if lic not in licenses] | ||
| 635 | if len(lics): | ||
| 636 | licenses.extend(lics) | ||
| 637 | lic_files_chksum.append( | ||
| 638 | 'file://src/${GO_IMPORT}/vendor/%s;md5=%s' % (licvalue[1], licvalue[2])) | ||
| 639 | |||
| 640 | # strip version part from module URL /vXX | ||
| 641 | baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path']) | ||
| 642 | pn, _ = determine_from_url(baseurl) | ||
| 643 | licenses_basename = "%s-licenses.inc" % pn | ||
| 644 | |||
| 645 | licenses_filename = os.path.join(localfilesdir, licenses_basename) | ||
| 646 | with open(licenses_filename, "w") as f: | ||
| 647 | f.write("GO_MOD_LICENSES = \"%s\"\n\n" % | ||
| 648 | ' & '.join(sorted(licenses, key=str.casefold))) | ||
| 649 | # We introduce this indirection to make the tests a little easier | ||
| 650 | f.write("LIC_FILES_CHKSUM += \"${VENDORED_LIC_FILES_CHKSUM}\"\n") | ||
| 651 | f.write("VENDORED_LIC_FILES_CHKSUM = \"\\\n") | ||
| 652 | for lic in lic_files_chksum: | ||
| 653 | f.write(" " + lic + " \\\n") | ||
| 654 | f.write("\"\n") | ||
| 655 | |||
| 656 | extravalues['extrafiles'][licenses_basename] = licenses_filename | ||
| 657 | 159 | ||
| 658 | def process(self, srctree, classes, lines_before, | 160 | def process(self, srctree, classes, lines_before, |
| 659 | lines_after, handled, extravalues): | 161 | lines_after, handled, extravalues): |
| @@ -672,56 +174,30 @@ class GoRecipeHandler(RecipeHandler): | |||
| 672 | 174 | ||
| 673 | d.prependVar('PATH', '%s:' % go_bindir) | 175 | d.prependVar('PATH', '%s:' % go_bindir) |
| 674 | handled.append('buildsystem') | 176 | handled.append('buildsystem') |
| 675 | classes.append("go-vendor") | 177 | classes.append("go-mod") |
| 676 | 178 | ||
| 677 | stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d) | 179 | tmp_mod_dir = tempfile.mkdtemp(prefix='go-mod-') |
| 180 | d.setVar('GOMODCACHE', tmp_mod_dir) | ||
| 678 | 181 | ||
| 182 | stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d) | ||
| 679 | go_mod = json.loads(stdout) | 183 | go_mod = json.loads(stdout) |
| 680 | go_import = go_mod['Module']['Path'] | 184 | go_import = re.sub(r'/v([0-9]+)$', '', go_mod['Module']['Path']) |
| 681 | go_version_match = re.match("([0-9]+).([0-9]+)", go_mod['Go']) | ||
| 682 | go_version_major = int(go_version_match.group(1)) | ||
| 683 | go_version_minor = int(go_version_match.group(2)) | ||
| 684 | src_uris = [] | ||
| 685 | 185 | ||
| 686 | localfilesdir = tempfile.mkdtemp(prefix='recipetool-go-') | 186 | localfilesdir = tempfile.mkdtemp(prefix='recipetool-go-') |
| 687 | extravalues.setdefault('extrafiles', {}) | 187 | extravalues.setdefault('extrafiles', {}) |
| 688 | 188 | ||
| 689 | # Use an explicit name determined from the module name because it | 189 | # Write the ${BPN}-licenses.inc and ${BPN}-go-mods.inc files |
| 690 | # might differ from the actual URL for replaced modules | 190 | self.__go_mod(go_mod, srctree, localfilesdir, extravalues, d) |
| 691 | # strip version part from module URL /vXX | ||
| 692 | baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path']) | ||
| 693 | pn, _ = determine_from_url(baseurl) | ||
| 694 | |||
| 695 | # go.mod files with version < 1.17 may not include all indirect | ||
| 696 | # dependencies. Thus, we have to upgrade the go version. | ||
| 697 | if go_version_major == 1 and go_version_minor < 17: | ||
| 698 | logger.warning( | ||
| 699 | "go.mod files generated by Go < 1.17 might have incomplete indirect dependencies.") | ||
| 700 | go_mod, patchfilename = self.__go_mod_patch(srctree, localfilesdir, | ||
| 701 | extravalues, d) | ||
| 702 | src_uris.append( | ||
| 703 | "file://%s;patchdir=src/${GO_IMPORT}" % (patchfilename)) | ||
| 704 | |||
| 705 | # Check whether the module is vendored. If so, we have nothing to do. | ||
| 706 | # Otherwise we gather all dependencies and add them to the recipe | ||
| 707 | if not os.path.exists(os.path.join(srctree, "vendor")): | ||
| 708 | |||
| 709 | # Write additional $BPN-modules.inc file | ||
| 710 | self.__go_mod_vendor(go_mod, srctree, localfilesdir, extravalues, d) | ||
| 711 | lines_before.append("LICENSE += \" & ${GO_MOD_LICENSES}\"") | ||
| 712 | lines_before.append("require %s-licenses.inc" % (pn)) | ||
| 713 | |||
| 714 | self.__rewrite_src_uri(lines_before, ["file://modules.txt"]) | ||
| 715 | |||
| 716 | self.__go_handle_dependencies(go_mod, srctree, localfilesdir, extravalues, d) | ||
| 717 | lines_before.append("require %s-modules.inc" % (pn)) | ||
| 718 | 191 | ||
| 719 | # Do generic license handling | 192 | # Do generic license handling |
| 720 | handle_license_vars(srctree, lines_before, handled, extravalues, d) | 193 | handle_license_vars(srctree, lines_before, handled, extravalues, d) |
| 721 | self.__rewrite_lic_uri(lines_before) | 194 | self.__rewrite_lic_vars(lines_before) |
| 195 | |||
| 196 | self.__rewrite_src_uri(lines_before) | ||
| 722 | 197 | ||
| 723 | lines_before.append("GO_IMPORT = \"{}\"".format(baseurl)) | 198 | lines_before.append('require ${BPN}-licenses.inc') |
| 724 | lines_before.append("SRCREV_FORMAT = \"${BPN}\"") | 199 | lines_before.append('require ${BPN}-go-mods.inc') |
| 200 | lines_before.append(f'GO_IMPORT = "{go_import}"') | ||
| 725 | 201 | ||
| 726 | def __update_lines_before(self, updated, newlines, lines_before): | 202 | def __update_lines_before(self, updated, newlines, lines_before): |
| 727 | if updated: | 203 | if updated: |
| @@ -733,9 +209,11 @@ class GoRecipeHandler(RecipeHandler): | |||
| 733 | lines_before.append(line) | 209 | lines_before.append(line) |
| 734 | return updated | 210 | return updated |
| 735 | 211 | ||
| 736 | def __rewrite_lic_uri(self, lines_before): | 212 | def __rewrite_lic_vars(self, lines_before): |
| 737 | 213 | ||
| 738 | def varfunc(varname, origvalue, op, newlines): | 214 | def varfunc(varname, origvalue, op, newlines): |
| 215 | if varname == 'LICENSE': | ||
| 216 | return ' & '.join((origvalue, '${GO_MOD_LICENSES}')), None, -1, True | ||
| 739 | if varname == 'LIC_FILES_CHKSUM': | 217 | if varname == 'LIC_FILES_CHKSUM': |
| 740 | new_licenses = [] | 218 | new_licenses = [] |
| 741 | licenses = origvalue.split('\\') | 219 | licenses = origvalue.split('\\') |
| @@ -757,15 +235,14 @@ class GoRecipeHandler(RecipeHandler): | |||
| 757 | return origvalue, None, 0, True | 235 | return origvalue, None, 0, True |
| 758 | 236 | ||
| 759 | updated, newlines = bb.utils.edit_metadata( | 237 | updated, newlines = bb.utils.edit_metadata( |
| 760 | lines_before, ['LIC_FILES_CHKSUM'], varfunc) | 238 | lines_before, ['LICENSE', 'LIC_FILES_CHKSUM'], varfunc) |
| 761 | return self.__update_lines_before(updated, newlines, lines_before) | 239 | return self.__update_lines_before(updated, newlines, lines_before) |
| 762 | 240 | ||
| 763 | def __rewrite_src_uri(self, lines_before, additional_uris = []): | 241 | def __rewrite_src_uri(self, lines_before): |
| 764 | 242 | ||
| 765 | def varfunc(varname, origvalue, op, newlines): | 243 | def varfunc(varname, origvalue, op, newlines): |
| 766 | if varname == 'SRC_URI': | 244 | if varname == 'SRC_URI': |
| 767 | src_uri = ["git://${GO_IMPORT};destsuffix=git/src/${GO_IMPORT};nobranch=1;name=${BPN};protocol=https"] | 245 | src_uri = ['git://${GO_IMPORT};protocol=https;nobranch=1;destsuffix=${GO_SRCURI_DESTSUFFIX}'] |
| 768 | src_uri.extend(additional_uris) | ||
| 769 | return src_uri, None, -1, True | 246 | return src_uri, None, -1, True |
| 770 | return origvalue, None, 0, True | 247 | return origvalue, None, 0, True |
| 771 | 248 | ||
