diff options
| -rw-r--r-- | bitbake/lib/bb/__init__.py | 5 | ||||
| -rw-r--r-- | bitbake/lib/bb/fetch/__init__.py | 832 | ||||
| -rw-r--r-- | bitbake/lib/bb/fetch/bzr.py | 148 | ||||
| -rw-r--r-- | bitbake/lib/bb/fetch/cvs.py | 172 | ||||
| -rw-r--r-- | bitbake/lib/bb/fetch/git.py | 339 | ||||
| -rw-r--r-- | bitbake/lib/bb/fetch/hg.py | 180 | ||||
| -rw-r--r-- | bitbake/lib/bb/fetch/local.py | 73 | ||||
| -rw-r--r-- | bitbake/lib/bb/fetch/osc.py | 143 | ||||
| -rw-r--r-- | bitbake/lib/bb/fetch/perforce.py | 206 | ||||
| -rw-r--r-- | bitbake/lib/bb/fetch/repo.py | 98 | ||||
| -rw-r--r-- | bitbake/lib/bb/fetch/ssh.py | 118 | ||||
| -rw-r--r-- | bitbake/lib/bb/fetch/svk.py | 104 | ||||
| -rw-r--r-- | bitbake/lib/bb/fetch/svn.py | 204 | ||||
| -rw-r--r-- | bitbake/lib/bb/fetch/wget.py | 93 |
14 files changed, 2 insertions, 2713 deletions
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py index dce561006d..8d579c7940 100644 --- a/bitbake/lib/bb/__init__.py +++ b/bitbake/lib/bb/__init__.py | |||
| @@ -79,9 +79,8 @@ if "BBDEBUG" in os.environ: | |||
| 79 | if level: | 79 | if level: |
| 80 | bb.msg.set_debug_level(level) | 80 | bb.msg.set_debug_level(level) |
| 81 | 81 | ||
| 82 | if os.environ.get("BBFETCH2"): | 82 | from bb import fetch2 as fetch |
| 83 | from bb import fetch2 as fetch | 83 | sys.modules['bb.fetch'] = sys.modules['bb.fetch2'] |
| 84 | sys.modules['bb.fetch'] = sys.modules['bb.fetch2'] | ||
| 85 | 84 | ||
| 86 | # Messaging convenience functions | 85 | # Messaging convenience functions |
| 87 | def plain(*args): | 86 | def plain(*args): |
diff --git a/bitbake/lib/bb/fetch/__init__.py b/bitbake/lib/bb/fetch/__init__.py deleted file mode 100644 index 18988646b9..0000000000 --- a/bitbake/lib/bb/fetch/__init__.py +++ /dev/null | |||
| @@ -1,832 +0,0 @@ | |||
| 1 | # ex:ts=4:sw=4:sts=4:et | ||
| 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 3 | """ | ||
| 4 | BitBake 'Fetch' implementations | ||
| 5 | |||
| 6 | Classes for obtaining upstream sources for the | ||
| 7 | BitBake build tools. | ||
| 8 | """ | ||
| 9 | |||
| 10 | # Copyright (C) 2003, 2004 Chris Larson | ||
| 11 | # | ||
| 12 | # This program is free software; you can redistribute it and/or modify | ||
| 13 | # it under the terms of the GNU General Public License version 2 as | ||
| 14 | # published by the Free Software Foundation. | ||
| 15 | # | ||
| 16 | # This program is distributed in the hope that it will be useful, | ||
| 17 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 18 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
| 19 | # GNU General Public License for more details. | ||
| 20 | # | ||
| 21 | # You should have received a copy of the GNU General Public License along | ||
| 22 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
| 23 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
| 24 | # | ||
| 25 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
| 26 | |||
| 27 | from __future__ import absolute_import | ||
| 28 | from __future__ import print_function | ||
| 29 | import os, re | ||
| 30 | import logging | ||
| 31 | import bb | ||
| 32 | from bb import data | ||
| 33 | from bb import persist_data | ||
| 34 | from bb import utils | ||
| 35 | |||
| 36 | __version__ = "1" | ||
| 37 | |||
| 38 | logger = logging.getLogger("BitBake.Fetch") | ||
| 39 | |||
| 40 | class MalformedUrl(Exception): | ||
| 41 | """Exception raised when encountering an invalid url""" | ||
| 42 | |||
| 43 | class FetchError(Exception): | ||
| 44 | """Exception raised when a download fails""" | ||
| 45 | |||
| 46 | class NoMethodError(Exception): | ||
| 47 | """Exception raised when there is no method to obtain a supplied url or set of urls""" | ||
| 48 | |||
| 49 | class MissingParameterError(Exception): | ||
| 50 | """Exception raised when a fetch method is missing a critical parameter in the url""" | ||
| 51 | |||
| 52 | class ParameterError(Exception): | ||
| 53 | """Exception raised when a url cannot be proccessed due to invalid parameters.""" | ||
| 54 | |||
| 55 | class MD5SumError(Exception): | ||
| 56 | """Exception raised when a MD5SUM of a file does not match the expected one""" | ||
| 57 | |||
| 58 | class InvalidSRCREV(Exception): | ||
| 59 | """Exception raised when an invalid SRCREV is encountered""" | ||
| 60 | |||
| 61 | def decodeurl(url): | ||
| 62 | """Decodes an URL into the tokens (scheme, network location, path, | ||
| 63 | user, password, parameters). | ||
| 64 | """ | ||
| 65 | |||
| 66 | m = re.compile('(?P<type>[^:]*)://((?P<user>.+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url) | ||
| 67 | if not m: | ||
| 68 | raise MalformedUrl(url) | ||
| 69 | |||
| 70 | type = m.group('type') | ||
| 71 | location = m.group('location') | ||
| 72 | if not location: | ||
| 73 | raise MalformedUrl(url) | ||
| 74 | user = m.group('user') | ||
| 75 | parm = m.group('parm') | ||
| 76 | |||
| 77 | locidx = location.find('/') | ||
| 78 | if locidx != -1 and type.lower() != 'file': | ||
| 79 | host = location[:locidx] | ||
| 80 | path = location[locidx:] | ||
| 81 | else: | ||
| 82 | host = "" | ||
| 83 | path = location | ||
| 84 | if user: | ||
| 85 | m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user) | ||
| 86 | if m: | ||
| 87 | user = m.group('user') | ||
| 88 | pswd = m.group('pswd') | ||
| 89 | else: | ||
| 90 | user = '' | ||
| 91 | pswd = '' | ||
| 92 | |||
| 93 | p = {} | ||
| 94 | if parm: | ||
| 95 | for s in parm.split(';'): | ||
| 96 | s1, s2 = s.split('=') | ||
| 97 | p[s1] = s2 | ||
| 98 | |||
| 99 | return (type, host, path, user, pswd, p) | ||
| 100 | |||
| 101 | def encodeurl(decoded): | ||
| 102 | """Encodes a URL from tokens (scheme, network location, path, | ||
| 103 | user, password, parameters). | ||
| 104 | """ | ||
| 105 | |||
| 106 | (type, host, path, user, pswd, p) = decoded | ||
| 107 | |||
| 108 | if not type or not path: | ||
| 109 | raise MissingParameterError("Type or path url components missing when encoding %s" % decoded) | ||
| 110 | url = '%s://' % type | ||
| 111 | if user: | ||
| 112 | url += "%s" % user | ||
| 113 | if pswd: | ||
| 114 | url += ":%s" % pswd | ||
| 115 | url += "@" | ||
| 116 | if host: | ||
| 117 | url += "%s" % host | ||
| 118 | url += "%s" % path | ||
| 119 | if p: | ||
| 120 | for parm in p: | ||
| 121 | url += ";%s=%s" % (parm, p[parm]) | ||
| 122 | |||
| 123 | return url | ||
| 124 | |||
| 125 | def uri_replace(uri, uri_find, uri_replace, d): | ||
| 126 | if not uri or not uri_find or not uri_replace: | ||
| 127 | logger.debug(1, "uri_replace: passed an undefined value, not replacing") | ||
| 128 | uri_decoded = list(decodeurl(uri)) | ||
| 129 | uri_find_decoded = list(decodeurl(uri_find)) | ||
| 130 | uri_replace_decoded = list(decodeurl(uri_replace)) | ||
| 131 | result_decoded = ['', '', '', '', '', {}] | ||
| 132 | for i in uri_find_decoded: | ||
| 133 | loc = uri_find_decoded.index(i) | ||
| 134 | result_decoded[loc] = uri_decoded[loc] | ||
| 135 | if isinstance(i, basestring): | ||
| 136 | if (re.match(i, uri_decoded[loc])): | ||
| 137 | result_decoded[loc] = re.sub(i, uri_replace_decoded[loc], uri_decoded[loc]) | ||
| 138 | if uri_find_decoded.index(i) == 2: | ||
| 139 | if d: | ||
| 140 | localfn = bb.fetch.localpath(uri, d) | ||
| 141 | if localfn: | ||
| 142 | result_decoded[loc] = os.path.join(os.path.dirname(result_decoded[loc]), os.path.basename(bb.fetch.localpath(uri, d))) | ||
| 143 | else: | ||
| 144 | return uri | ||
| 145 | return encodeurl(result_decoded) | ||
| 146 | |||
| 147 | methods = [] | ||
| 148 | urldata_cache = {} | ||
| 149 | saved_headrevs = {} | ||
| 150 | |||
| 151 | def fetcher_init(d): | ||
| 152 | """ | ||
| 153 | Called to initialize the fetchers once the configuration data is known. | ||
| 154 | Calls before this must not hit the cache. | ||
| 155 | """ | ||
| 156 | # When to drop SCM head revisions controlled by user policy | ||
| 157 | srcrev_policy = d.getVar('BB_SRCREV_POLICY', True) or "clear" | ||
| 158 | if srcrev_policy == "cache": | ||
| 159 | logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) | ||
| 160 | elif srcrev_policy == "clear": | ||
| 161 | logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) | ||
| 162 | revs = persist_data.persist('BB_URI_HEADREVS', d) | ||
| 163 | try: | ||
| 164 | bb.fetch.saved_headrevs = revs.items() | ||
| 165 | except: | ||
| 166 | pass | ||
| 167 | revs.clear() | ||
| 168 | else: | ||
| 169 | raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) | ||
| 170 | |||
| 171 | for m in methods: | ||
| 172 | if hasattr(m, "init"): | ||
| 173 | m.init(d) | ||
| 174 | |||
| 175 | def fetcher_compare_revisions(d): | ||
| 176 | """ | ||
| 177 | Compare the revisions in the persistant cache with current values and | ||
| 178 | return true/false on whether they've changed. | ||
| 179 | """ | ||
| 180 | |||
| 181 | data = persist_data.persist('BB_URI_HEADREVS', d).items() | ||
| 182 | data2 = bb.fetch.saved_headrevs | ||
| 183 | |||
| 184 | changed = False | ||
| 185 | for key in data: | ||
| 186 | if key not in data2 or data2[key] != data[key]: | ||
| 187 | logger.debug(1, "%s changed", key) | ||
| 188 | changed = True | ||
| 189 | return True | ||
| 190 | else: | ||
| 191 | logger.debug(2, "%s did not change", key) | ||
| 192 | return False | ||
| 193 | |||
| 194 | # Function call order is usually: | ||
| 195 | # 1. init | ||
| 196 | # 2. go | ||
| 197 | # 3. localpaths | ||
| 198 | # localpath can be called at any time | ||
| 199 | |||
| 200 | def init(urls, d, setup = True): | ||
| 201 | urldata = {} | ||
| 202 | |||
| 203 | fn = d.getVar('FILE', True) | ||
| 204 | if fn in urldata_cache: | ||
| 205 | urldata = urldata_cache[fn] | ||
| 206 | |||
| 207 | for url in urls: | ||
| 208 | if url not in urldata: | ||
| 209 | urldata[url] = FetchData(url, d) | ||
| 210 | |||
| 211 | if setup: | ||
| 212 | for url in urldata: | ||
| 213 | if not urldata[url].setup: | ||
| 214 | urldata[url].setup_localpath(d) | ||
| 215 | |||
| 216 | urldata_cache[fn] = urldata | ||
| 217 | return urldata | ||
| 218 | |||
| 219 | def mirror_from_string(data): | ||
| 220 | return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ] | ||
| 221 | |||
| 222 | def verify_checksum(u, ud, d): | ||
| 223 | """ | ||
| 224 | verify the MD5 and SHA256 checksum for downloaded src | ||
| 225 | |||
| 226 | return value: | ||
| 227 | - True: checksum matched | ||
| 228 | - False: checksum unmatched | ||
| 229 | |||
| 230 | if checksum is missing in recipes file, "BB_STRICT_CHECKSUM" decide the return value. | ||
| 231 | if BB_STRICT_CHECKSUM = "1" then return false as unmatched, otherwise return true as | ||
| 232 | matched | ||
| 233 | """ | ||
| 234 | |||
| 235 | if not ud.type in ["http", "https", "ftp", "ftps"]: | ||
| 236 | return | ||
| 237 | |||
| 238 | md5data = bb.utils.md5_file(ud.localpath) | ||
| 239 | sha256data = bb.utils.sha256_file(ud.localpath) | ||
| 240 | |||
| 241 | if (ud.md5_expected == None or ud.sha256_expected == None): | ||
| 242 | logger.warn('Missing SRC_URI checksum for %s, consider adding to the recipe:\n' | ||
| 243 | 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"', | ||
| 244 | ud.localpath, ud.md5_name, md5data, | ||
| 245 | ud.sha256_name, sha256data) | ||
| 246 | if d.getVar("BB_STRICT_CHECKSUM", True) == "1": | ||
| 247 | raise FetchError("No checksum specified for %s." % u) | ||
| 248 | return | ||
| 249 | |||
| 250 | if (ud.md5_expected != md5data or ud.sha256_expected != sha256data): | ||
| 251 | logger.error('The checksums for "%s" did not match.\n' | ||
| 252 | ' MD5: expected "%s", got "%s"\n' | ||
| 253 | ' SHA256: expected "%s", got "%s"\n', | ||
| 254 | ud.localpath, ud.md5_expected, md5data, | ||
| 255 | ud.sha256_expected, sha256data) | ||
| 256 | raise FetchError("%s checksum mismatch." % u) | ||
| 257 | |||
| 258 | def go(d, urls = None): | ||
| 259 | """ | ||
| 260 | Fetch all urls | ||
| 261 | init must have previously been called | ||
| 262 | """ | ||
| 263 | if not urls: | ||
| 264 | urls = d.getVar("SRC_URI", True).split() | ||
| 265 | urldata = init(urls, d, True) | ||
| 266 | |||
| 267 | for u in urls: | ||
| 268 | ud = urldata[u] | ||
| 269 | m = ud.method | ||
| 270 | localpath = "" | ||
| 271 | |||
| 272 | if not ud.localfile: | ||
| 273 | continue | ||
| 274 | |||
| 275 | lf = bb.utils.lockfile(ud.lockfile) | ||
| 276 | |||
| 277 | if m.try_premirror(u, ud, d): | ||
| 278 | # First try fetching uri, u, from PREMIRRORS | ||
| 279 | mirrors = mirror_from_string(d.getVar('PREMIRRORS', True)) | ||
| 280 | localpath = try_mirrors(d, u, mirrors, False, m.forcefetch(u, ud, d)) | ||
| 281 | elif os.path.exists(ud.localfile): | ||
| 282 | localpath = ud.localfile | ||
| 283 | |||
| 284 | # Need to re-test forcefetch() which will return true if our copy is too old | ||
| 285 | if m.forcefetch(u, ud, d) or not localpath: | ||
| 286 | # Next try fetching from the original uri, u | ||
| 287 | try: | ||
| 288 | m.go(u, ud, d) | ||
| 289 | localpath = ud.localpath | ||
| 290 | except FetchError: | ||
| 291 | # Remove any incomplete file | ||
| 292 | bb.utils.remove(ud.localpath) | ||
| 293 | # Finally, try fetching uri, u, from MIRRORS | ||
| 294 | mirrors = mirror_from_string(d.getVar('MIRRORS', True)) | ||
| 295 | localpath = try_mirrors (d, u, mirrors) | ||
| 296 | if not localpath or not os.path.exists(localpath): | ||
| 297 | raise FetchError("Unable to fetch URL %s from any source." % u) | ||
| 298 | |||
| 299 | ud.localpath = localpath | ||
| 300 | |||
| 301 | if os.path.exists(ud.md5): | ||
| 302 | # Touch the md5 file to show active use of the download | ||
| 303 | try: | ||
| 304 | os.utime(ud.md5, None) | ||
| 305 | except: | ||
| 306 | # Errors aren't fatal here | ||
| 307 | pass | ||
| 308 | else: | ||
| 309 | # Only check the checksums if we've not seen this item before | ||
| 310 | verify_checksum(u, ud, d) | ||
| 311 | Fetch.write_md5sum(u, ud, d) | ||
| 312 | |||
| 313 | bb.utils.unlockfile(lf) | ||
| 314 | |||
| 315 | def checkstatus(d, urls = None): | ||
| 316 | """ | ||
| 317 | Check all urls exist upstream | ||
| 318 | init must have previously been called | ||
| 319 | """ | ||
| 320 | urldata = init([], d, True) | ||
| 321 | |||
| 322 | if not urls: | ||
| 323 | urls = urldata | ||
| 324 | |||
| 325 | for u in urls: | ||
| 326 | ud = urldata[u] | ||
| 327 | m = ud.method | ||
| 328 | logger.debug(1, "Testing URL %s", u) | ||
| 329 | # First try checking uri, u, from PREMIRRORS | ||
| 330 | mirrors = mirror_from_string(d.getVar('PREMIRRORS', True)) | ||
| 331 | ret = try_mirrors(d, u, mirrors, True) | ||
| 332 | if not ret: | ||
| 333 | # Next try checking from the original uri, u | ||
| 334 | try: | ||
| 335 | ret = m.checkstatus(u, ud, d) | ||
| 336 | except: | ||
| 337 | # Finally, try checking uri, u, from MIRRORS | ||
| 338 | mirrors = mirror_from_string(d.getVar('MIRRORS', True)) | ||
| 339 | ret = try_mirrors (d, u, mirrors, True) | ||
| 340 | |||
| 341 | if not ret: | ||
| 342 | raise FetchError("URL %s doesn't work" % u) | ||
| 343 | |||
| 344 | def localpaths(d): | ||
| 345 | """ | ||
| 346 | Return a list of the local filenames, assuming successful fetch | ||
| 347 | """ | ||
| 348 | local = [] | ||
| 349 | urldata = init([], d, True) | ||
| 350 | |||
| 351 | for u in urldata: | ||
| 352 | ud = urldata[u] | ||
| 353 | local.append(ud.localpath) | ||
| 354 | |||
| 355 | return local | ||
| 356 | |||
| 357 | srcrev_internal_call = False | ||
| 358 | |||
| 359 | def get_autorev(d): | ||
| 360 | return get_srcrev(d) | ||
| 361 | |||
| 362 | def get_srcrev(d): | ||
| 363 | """ | ||
| 364 | Return the version string for the current package | ||
| 365 | (usually to be used as PV) | ||
| 366 | Most packages usually only have one SCM so we just pass on the call. | ||
| 367 | In the multi SCM case, we build a value based on SRCREV_FORMAT which must | ||
| 368 | have been set. | ||
| 369 | """ | ||
| 370 | |||
| 371 | # | ||
| 372 | # Ugly code alert. localpath in the fetchers will try to evaluate SRCREV which | ||
| 373 | # could translate into a call to here. If it does, we need to catch this | ||
| 374 | # and provide some way so it knows get_srcrev is active instead of being | ||
| 375 | # some number etc. hence the srcrev_internal_call tracking and the magic | ||
| 376 | # "SRCREVINACTION" return value. | ||
| 377 | # | ||
| 378 | # Neater solutions welcome! | ||
| 379 | # | ||
| 380 | if bb.fetch.srcrev_internal_call: | ||
| 381 | return "SRCREVINACTION" | ||
| 382 | |||
| 383 | scms = [] | ||
| 384 | |||
| 385 | # Only call setup_localpath on URIs which supports_srcrev() | ||
| 386 | urldata = init(d.getVar('SRC_URI', True).split(), d, False) | ||
| 387 | for u in urldata: | ||
| 388 | ud = urldata[u] | ||
| 389 | if ud.method.supports_srcrev(): | ||
| 390 | if not ud.setup: | ||
| 391 | ud.setup_localpath(d) | ||
| 392 | scms.append(u) | ||
| 393 | |||
| 394 | if len(scms) == 0: | ||
| 395 | logger.error("SRCREV was used yet no valid SCM was found in SRC_URI") | ||
| 396 | raise ParameterError | ||
| 397 | |||
| 398 | if d.getVar('BB_SRCREV_POLICY', True) != "cache": | ||
| 399 | d.setVar('__BB_DONT_CACHE', '1') | ||
| 400 | |||
| 401 | if len(scms) == 1: | ||
| 402 | return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d) | ||
| 403 | |||
| 404 | # | ||
| 405 | # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT | ||
| 406 | # | ||
| 407 | format = d.getVar('SRCREV_FORMAT', True) | ||
| 408 | if not format: | ||
| 409 | logger.error("The SRCREV_FORMAT variable must be set when multiple SCMs are used.") | ||
| 410 | raise ParameterError | ||
| 411 | |||
| 412 | for scm in scms: | ||
| 413 | if 'name' in urldata[scm].parm: | ||
| 414 | name = urldata[scm].parm["name"] | ||
| 415 | rev = urldata[scm].method.sortable_revision(scm, urldata[scm], d) | ||
| 416 | format = format.replace(name, rev) | ||
| 417 | |||
| 418 | return format | ||
| 419 | |||
| 420 | def localpath(url, d, cache = True): | ||
| 421 | """ | ||
| 422 | Called from the parser with cache=False since the cache isn't ready | ||
| 423 | at this point. Also called from classed in OE e.g. patch.bbclass | ||
| 424 | """ | ||
| 425 | ud = init([url], d) | ||
| 426 | if ud[url].method: | ||
| 427 | return ud[url].localpath | ||
| 428 | return url | ||
| 429 | |||
| 430 | def runfetchcmd(cmd, d, quiet = False): | ||
| 431 | """ | ||
| 432 | Run cmd returning the command output | ||
| 433 | Raise an error if interrupted or cmd fails | ||
| 434 | Optionally echo command output to stdout | ||
| 435 | """ | ||
| 436 | |||
| 437 | # Need to export PATH as binary could be in metadata paths | ||
| 438 | # rather than host provided | ||
| 439 | # Also include some other variables. | ||
| 440 | # FIXME: Should really include all export varaiables? | ||
| 441 | exportvars = ['PATH', 'GIT_PROXY_COMMAND', 'GIT_PROXY_HOST', | ||
| 442 | 'GIT_PROXY_PORT', 'GIT_CONFIG', 'http_proxy', 'ftp_proxy', | ||
| 443 | 'https_proxy', 'no_proxy', 'ALL_PROXY', 'all_proxy', | ||
| 444 | 'KRB5CCNAME', 'SSH_AUTH_SOCK', 'SSH_AGENT_PID', 'HOME'] | ||
| 445 | |||
| 446 | for var in exportvars: | ||
| 447 | val = data.getVar(var, d, True) | ||
| 448 | if val: | ||
| 449 | cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd) | ||
| 450 | |||
| 451 | logger.debug(1, "Running %s", cmd) | ||
| 452 | |||
| 453 | # redirect stderr to stdout | ||
| 454 | stdout_handle = os.popen(cmd + " 2>&1", "r") | ||
| 455 | output = "" | ||
| 456 | |||
| 457 | while True: | ||
| 458 | line = stdout_handle.readline() | ||
| 459 | if not line: | ||
| 460 | break | ||
| 461 | if not quiet: | ||
| 462 | print(line, end=' ') | ||
| 463 | output += line | ||
| 464 | |||
| 465 | status = stdout_handle.close() or 0 | ||
| 466 | signal = status >> 8 | ||
| 467 | exitstatus = status & 0xff | ||
| 468 | |||
| 469 | if signal: | ||
| 470 | raise FetchError("Fetch command %s failed with signal %s, output:\n%s" % (cmd, signal, output)) | ||
| 471 | elif status != 0: | ||
| 472 | raise FetchError("Fetch command %s failed with exit code %s, output:\n%s" % (cmd, status, output)) | ||
| 473 | |||
| 474 | return output | ||
| 475 | |||
| 476 | def try_mirrors(d, uri, mirrors, check = False, force = False): | ||
| 477 | """ | ||
| 478 | Try to use a mirrored version of the sources. | ||
| 479 | This method will be automatically called before the fetchers go. | ||
| 480 | |||
| 481 | d Is a bb.data instance | ||
| 482 | uri is the original uri we're trying to download | ||
| 483 | mirrors is the list of mirrors we're going to try | ||
| 484 | """ | ||
| 485 | fpath = os.path.join(data.getVar("DL_DIR", d, 1), os.path.basename(uri)) | ||
| 486 | if not check and os.access(fpath, os.R_OK) and not force: | ||
| 487 | logger.debug(1, "%s already exists, skipping checkout.", fpath) | ||
| 488 | return fpath | ||
| 489 | |||
| 490 | ld = d.createCopy() | ||
| 491 | for (find, replace) in mirrors: | ||
| 492 | newuri = uri_replace(uri, find, replace, ld) | ||
| 493 | if newuri != uri: | ||
| 494 | try: | ||
| 495 | ud = FetchData(newuri, ld) | ||
| 496 | except bb.fetch.NoMethodError: | ||
| 497 | logger.debug(1, "No method for %s", uri) | ||
| 498 | continue | ||
| 499 | |||
| 500 | ud.setup_localpath(ld) | ||
| 501 | |||
| 502 | try: | ||
| 503 | if check: | ||
| 504 | found = ud.method.checkstatus(newuri, ud, ld) | ||
| 505 | if found: | ||
| 506 | return found | ||
| 507 | else: | ||
| 508 | ud.method.go(newuri, ud, ld) | ||
| 509 | return ud.localpath | ||
| 510 | except (bb.fetch.MissingParameterError, | ||
| 511 | bb.fetch.FetchError, | ||
| 512 | bb.fetch.MD5SumError): | ||
| 513 | import sys | ||
| 514 | (type, value, traceback) = sys.exc_info() | ||
| 515 | logger.debug(2, "Mirror fetch failure: %s", value) | ||
| 516 | bb.utils.remove(ud.localpath) | ||
| 517 | continue | ||
| 518 | return None | ||
| 519 | |||
| 520 | |||
| 521 | class FetchData(object): | ||
| 522 | """ | ||
| 523 | A class which represents the fetcher state for a given URI. | ||
| 524 | """ | ||
| 525 | def __init__(self, url, d): | ||
| 526 | self.localfile = "" | ||
| 527 | (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d)) | ||
| 528 | self.date = Fetch.getSRCDate(self, d) | ||
| 529 | self.url = url | ||
| 530 | if not self.user and "user" in self.parm: | ||
| 531 | self.user = self.parm["user"] | ||
| 532 | if not self.pswd and "pswd" in self.parm: | ||
| 533 | self.pswd = self.parm["pswd"] | ||
| 534 | self.setup = False | ||
| 535 | |||
| 536 | if "name" in self.parm: | ||
| 537 | self.md5_name = "%s.md5sum" % self.parm["name"] | ||
| 538 | self.sha256_name = "%s.sha256sum" % self.parm["name"] | ||
| 539 | else: | ||
| 540 | self.md5_name = "md5sum" | ||
| 541 | self.sha256_name = "sha256sum" | ||
| 542 | self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name) | ||
| 543 | self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name) | ||
| 544 | |||
| 545 | for m in methods: | ||
| 546 | if m.supports(url, self, d): | ||
| 547 | self.method = m | ||
| 548 | return | ||
| 549 | raise NoMethodError("Missing implementation for url %s" % url) | ||
| 550 | |||
| 551 | def setup_localpath(self, d): | ||
| 552 | self.setup = True | ||
| 553 | if "localpath" in self.parm: | ||
| 554 | # if user sets localpath for file, use it instead. | ||
| 555 | self.localpath = self.parm["localpath"] | ||
| 556 | self.basename = os.path.basename(self.localpath) | ||
| 557 | else: | ||
| 558 | premirrors = d.getVar('PREMIRRORS', True) | ||
| 559 | local = "" | ||
| 560 | if premirrors and self.url: | ||
| 561 | aurl = self.url.split(";")[0] | ||
| 562 | mirrors = mirror_from_string(premirrors) | ||
| 563 | for (find, replace) in mirrors: | ||
| 564 | if replace.startswith("file://"): | ||
| 565 | path = aurl.split("://")[1] | ||
| 566 | path = path.split(";")[0] | ||
| 567 | local = replace.split("://")[1] + os.path.basename(path) | ||
| 568 | if local == aurl or not os.path.exists(local) or os.path.isdir(local): | ||
| 569 | local = "" | ||
| 570 | self.localpath = local | ||
| 571 | if not local: | ||
| 572 | try: | ||
| 573 | bb.fetch.srcrev_internal_call = True | ||
| 574 | self.localpath = self.method.localpath(self.url, self, d) | ||
| 575 | finally: | ||
| 576 | bb.fetch.srcrev_internal_call = False | ||
| 577 | # We have to clear data's internal caches since the cached value of SRCREV is now wrong. | ||
| 578 | # Horrible... | ||
| 579 | bb.data.delVar("ISHOULDNEVEREXIST", d) | ||
| 580 | |||
| 581 | if self.localpath is not None: | ||
| 582 | # Note: These files should always be in DL_DIR whereas localpath may not be. | ||
| 583 | basepath = bb.data.expand("${DL_DIR}/%s" % os.path.basename(self.localpath), d) | ||
| 584 | self.md5 = basepath + '.md5' | ||
| 585 | self.lockfile = basepath + '.lock' | ||
| 586 | |||
| 587 | |||
| 588 | class Fetch(object): | ||
| 589 | """Base class for 'fetch'ing data""" | ||
| 590 | |||
| 591 | def __init__(self, urls = []): | ||
| 592 | self.urls = [] | ||
| 593 | |||
| 594 | def supports(self, url, urldata, d): | ||
| 595 | """ | ||
| 596 | Check to see if this fetch class supports a given url. | ||
| 597 | """ | ||
| 598 | return 0 | ||
| 599 | |||
| 600 | def localpath(self, url, urldata, d): | ||
| 601 | """ | ||
| 602 | Return the local filename of a given url assuming a successful fetch. | ||
| 603 | Can also setup variables in urldata for use in go (saving code duplication | ||
| 604 | and duplicate code execution) | ||
| 605 | """ | ||
| 606 | return url | ||
| 607 | def _strip_leading_slashes(self, relpath): | ||
| 608 | """ | ||
| 609 | Remove leading slash as os.path.join can't cope | ||
| 610 | """ | ||
| 611 | while os.path.isabs(relpath): | ||
| 612 | relpath = relpath[1:] | ||
| 613 | return relpath | ||
| 614 | |||
| 615 | def setUrls(self, urls): | ||
| 616 | self.__urls = urls | ||
| 617 | |||
| 618 | def getUrls(self): | ||
| 619 | return self.__urls | ||
| 620 | |||
| 621 | urls = property(getUrls, setUrls, None, "Urls property") | ||
| 622 | |||
| 623 | def forcefetch(self, url, urldata, d): | ||
| 624 | """ | ||
| 625 | Force a fetch, even if localpath exists? | ||
| 626 | """ | ||
| 627 | return False | ||
| 628 | |||
| 629 | def supports_srcrev(self): | ||
| 630 | """ | ||
| 631 | The fetcher supports auto source revisions (SRCREV) | ||
| 632 | """ | ||
| 633 | return False | ||
| 634 | |||
| 635 | def go(self, url, urldata, d): | ||
| 636 | """ | ||
| 637 | Fetch urls | ||
| 638 | Assumes localpath was called first | ||
| 639 | """ | ||
| 640 | raise NoMethodError("Missing implementation for url") | ||
| 641 | |||
| 642 | def try_premirror(self, url, urldata, d): | ||
| 643 | """ | ||
| 644 | Should premirrors be used? | ||
| 645 | """ | ||
| 646 | if urldata.method.forcefetch(url, urldata, d): | ||
| 647 | return True | ||
| 648 | elif os.path.exists(urldata.md5) and os.path.exists(urldata.localfile): | ||
| 649 | return False | ||
| 650 | else: | ||
| 651 | return True | ||
| 652 | |||
| 653 | def checkstatus(self, url, urldata, d): | ||
| 654 | """ | ||
| 655 | Check the status of a URL | ||
| 656 | Assumes localpath was called first | ||
| 657 | """ | ||
| 658 | logger.info("URL %s could not be checked for status since no method exists.", url) | ||
| 659 | return True | ||
| 660 | |||
| 661 | def getSRCDate(urldata, d): | ||
| 662 | """ | ||
| 663 | Return the SRC Date for the component | ||
| 664 | |||
| 665 | d the bb.data module | ||
| 666 | """ | ||
| 667 | if "srcdate" in urldata.parm: | ||
| 668 | return urldata.parm['srcdate'] | ||
| 669 | |||
| 670 | pn = data.getVar("PN", d, 1) | ||
| 671 | |||
| 672 | if pn: | ||
| 673 | return data.getVar("SRCDATE_%s" % pn, d, 1) or data.getVar("CVSDATE_%s" % pn, d, 1) or data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1) | ||
| 674 | |||
| 675 | return data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1) | ||
| 676 | getSRCDate = staticmethod(getSRCDate) | ||
| 677 | |||
| 678 | def srcrev_internal_helper(ud, d): | ||
| 679 | """ | ||
| 680 | Return: | ||
| 681 | a) a source revision if specified | ||
| 682 | b) True if auto srcrev is in action | ||
| 683 | c) False otherwise | ||
| 684 | """ | ||
| 685 | |||
| 686 | if 'rev' in ud.parm: | ||
| 687 | return ud.parm['rev'] | ||
| 688 | |||
| 689 | if 'tag' in ud.parm: | ||
| 690 | return ud.parm['tag'] | ||
| 691 | |||
| 692 | rev = None | ||
| 693 | if 'name' in ud.parm: | ||
| 694 | pn = data.getVar("PN", d, 1) | ||
| 695 | rev = data.getVar("SRCREV_%s_pn-%s" % (ud.parm['name'], pn), d, 1) | ||
| 696 | if not rev: | ||
| 697 | rev = data.getVar("SRCREV_pn-%s_%s" % (pn, ud.parm['name']), d, 1) | ||
| 698 | if not rev: | ||
| 699 | rev = data.getVar("SRCREV_%s" % (ud.parm['name']), d, 1) | ||
| 700 | if not rev: | ||
| 701 | rev = data.getVar("SRCREV", d, 1) | ||
| 702 | if rev == "INVALID": | ||
| 703 | raise InvalidSRCREV("Please set SRCREV to a valid value") | ||
| 704 | if not rev: | ||
| 705 | return False | ||
| 706 | if rev == "SRCREVINACTION": | ||
| 707 | return True | ||
| 708 | return rev | ||
| 709 | |||
| 710 | srcrev_internal_helper = staticmethod(srcrev_internal_helper) | ||
| 711 | |||
| 712 | def localcount_internal_helper(ud, d): | ||
| 713 | """ | ||
| 714 | Return: | ||
| 715 | a) a locked localcount if specified | ||
| 716 | b) None otherwise | ||
| 717 | """ | ||
| 718 | |||
| 719 | localcount = None | ||
| 720 | if 'name' in ud.parm: | ||
| 721 | pn = data.getVar("PN", d, 1) | ||
| 722 | localcount = data.getVar("LOCALCOUNT_" + ud.parm['name'], d, 1) | ||
| 723 | if not localcount: | ||
| 724 | localcount = data.getVar("LOCALCOUNT", d, 1) | ||
| 725 | return localcount | ||
| 726 | |||
| 727 | localcount_internal_helper = staticmethod(localcount_internal_helper) | ||
| 728 | |||
| 729 | def verify_md5sum(ud, got_sum): | ||
| 730 | """ | ||
| 731 | Verify the md5sum we wanted with the one we got | ||
| 732 | """ | ||
| 733 | wanted_sum = ud.parm.get('md5sum') | ||
| 734 | if not wanted_sum: | ||
| 735 | return True | ||
| 736 | |||
| 737 | return wanted_sum == got_sum | ||
| 738 | verify_md5sum = staticmethod(verify_md5sum) | ||
| 739 | |||
| 740 | def write_md5sum(url, ud, d): | ||
| 741 | md5data = bb.utils.md5_file(ud.localpath) | ||
| 742 | # verify the md5sum | ||
| 743 | if not Fetch.verify_md5sum(ud, md5data): | ||
| 744 | raise MD5SumError(url) | ||
| 745 | |||
| 746 | md5out = file(ud.md5, 'w') | ||
| 747 | md5out.write(md5data) | ||
| 748 | md5out.close() | ||
| 749 | write_md5sum = staticmethod(write_md5sum) | ||
| 750 | |||
| 751 | def latest_revision(self, url, ud, d): | ||
| 752 | """ | ||
| 753 | Look in the cache for the latest revision, if not present ask the SCM. | ||
| 754 | """ | ||
| 755 | if not hasattr(self, "_latest_revision"): | ||
| 756 | raise ParameterError | ||
| 757 | |||
| 758 | revs = persist_data.persist('BB_URI_HEADREVS', d) | ||
| 759 | key = self.generate_revision_key(url, ud, d) | ||
| 760 | try: | ||
| 761 | return revs[key] | ||
| 762 | except KeyError: | ||
| 763 | revs[key] = rev = self._latest_revision(url, ud, d) | ||
| 764 | return rev | ||
| 765 | |||
| 766 | def sortable_revision(self, url, ud, d): | ||
| 767 | """ | ||
| 768 | |||
| 769 | """ | ||
| 770 | if hasattr(self, "_sortable_revision"): | ||
| 771 | return self._sortable_revision(url, ud, d) | ||
| 772 | |||
| 773 | localcounts = persist_data.persist('BB_URI_LOCALCOUNT', d) | ||
| 774 | key = self.generate_revision_key(url, ud, d) | ||
| 775 | |||
| 776 | latest_rev = self._build_revision(url, ud, d) | ||
| 777 | last_rev = localcounts.get(key + '_rev') | ||
| 778 | uselocalcount = d.getVar("BB_LOCALCOUNT_OVERRIDE", True) or False | ||
| 779 | count = None | ||
| 780 | if uselocalcount: | ||
| 781 | count = Fetch.localcount_internal_helper(ud, d) | ||
| 782 | if count is None: | ||
| 783 | count = localcounts.get(key + '_count') | ||
| 784 | |||
| 785 | if last_rev == latest_rev: | ||
| 786 | return str(count + "+" + latest_rev) | ||
| 787 | |||
| 788 | buildindex_provided = hasattr(self, "_sortable_buildindex") | ||
| 789 | if buildindex_provided: | ||
| 790 | count = self._sortable_buildindex(url, ud, d, latest_rev) | ||
| 791 | |||
| 792 | if count is None: | ||
| 793 | count = "0" | ||
| 794 | elif uselocalcount or buildindex_provided: | ||
| 795 | count = str(count) | ||
| 796 | else: | ||
| 797 | count = str(int(count) + 1) | ||
| 798 | |||
| 799 | localcounts[key + '_rev'] = latest_rev | ||
| 800 | localcounts[key + '_count'] = count | ||
| 801 | |||
| 802 | return str(count + "+" + latest_rev) | ||
| 803 | |||
| 804 | def generate_revision_key(self, url, ud, d): | ||
| 805 | key = self._revision_key(url, ud, d) | ||
| 806 | return "%s-%s" % (key, d.getVar("PN", True) or "") | ||
| 807 | |||
| 808 | from . import cvs | ||
| 809 | from . import git | ||
| 810 | from . import local | ||
| 811 | from . import svn | ||
| 812 | from . import wget | ||
| 813 | from . import svk | ||
| 814 | from . import ssh | ||
| 815 | from . import perforce | ||
| 816 | from . import bzr | ||
| 817 | from . import hg | ||
| 818 | from . import osc | ||
| 819 | from . import repo | ||
| 820 | |||
| 821 | methods.append(local.Local()) | ||
| 822 | methods.append(wget.Wget()) | ||
| 823 | methods.append(svn.Svn()) | ||
| 824 | methods.append(git.Git()) | ||
| 825 | methods.append(cvs.Cvs()) | ||
| 826 | methods.append(svk.Svk()) | ||
| 827 | methods.append(ssh.SSH()) | ||
| 828 | methods.append(perforce.Perforce()) | ||
| 829 | methods.append(bzr.Bzr()) | ||
| 830 | methods.append(hg.Hg()) | ||
| 831 | methods.append(osc.Osc()) | ||
| 832 | methods.append(repo.Repo()) | ||
diff --git a/bitbake/lib/bb/fetch/bzr.py b/bitbake/lib/bb/fetch/bzr.py deleted file mode 100644 index 85a92940e6..0000000000 --- a/bitbake/lib/bb/fetch/bzr.py +++ /dev/null | |||
| @@ -1,148 +0,0 @@ | |||
| 1 | """ | ||
| 2 | BitBake 'Fetch' implementation for bzr. | ||
| 3 | |||
| 4 | """ | ||
| 5 | |||
| 6 | # Copyright (C) 2007 Ross Burton | ||
| 7 | # Copyright (C) 2007 Richard Purdie | ||
| 8 | # | ||
| 9 | # Classes for obtaining upstream sources for the | ||
| 10 | # BitBake build tools. | ||
| 11 | # Copyright (C) 2003, 2004 Chris Larson | ||
| 12 | # | ||
| 13 | # This program is free software; you can redistribute it and/or modify | ||
| 14 | # it under the terms of the GNU General Public License version 2 as | ||
| 15 | # published by the Free Software Foundation. | ||
| 16 | # | ||
| 17 | # This program is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
| 20 | # GNU General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU General Public License along | ||
| 23 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
| 24 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
| 25 | |||
| 26 | import os | ||
| 27 | import sys | ||
| 28 | import logging | ||
| 29 | import bb | ||
| 30 | from bb import data | ||
| 31 | from bb.fetch import Fetch, FetchError, runfetchcmd, logger | ||
| 32 | |||
| 33 | class Bzr(Fetch): | ||
| 34 | def supports(self, url, ud, d): | ||
| 35 | return ud.type in ['bzr'] | ||
| 36 | |||
| 37 | def localpath (self, url, ud, d): | ||
| 38 | |||
| 39 | # Create paths to bzr checkouts | ||
| 40 | relpath = self._strip_leading_slashes(ud.path) | ||
| 41 | ud.pkgdir = os.path.join(data.expand('${BZRDIR}', d), ud.host, relpath) | ||
| 42 | |||
| 43 | revision = Fetch.srcrev_internal_helper(ud, d) | ||
| 44 | if revision is True: | ||
| 45 | ud.revision = self.latest_revision(url, ud, d) | ||
| 46 | elif revision: | ||
| 47 | ud.revision = revision | ||
| 48 | |||
| 49 | if not ud.revision: | ||
| 50 | ud.revision = self.latest_revision(url, ud, d) | ||
| 51 | |||
| 52 | ud.localfile = data.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d) | ||
| 53 | |||
| 54 | return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) | ||
| 55 | |||
| 56 | def _buildbzrcommand(self, ud, d, command): | ||
| 57 | """ | ||
| 58 | Build up an bzr commandline based on ud | ||
| 59 | command is "fetch", "update", "revno" | ||
| 60 | """ | ||
| 61 | |||
| 62 | basecmd = data.expand('${FETCHCMD_bzr}', d) | ||
| 63 | |||
| 64 | proto = ud.parm.get('proto', 'http') | ||
| 65 | |||
| 66 | bzrroot = ud.host + ud.path | ||
| 67 | |||
| 68 | options = [] | ||
| 69 | |||
| 70 | if command == "revno": | ||
| 71 | bzrcmd = "%s revno %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot) | ||
| 72 | else: | ||
| 73 | if ud.revision: | ||
| 74 | options.append("-r %s" % ud.revision) | ||
| 75 | |||
| 76 | if command == "fetch": | ||
| 77 | bzrcmd = "%s co %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot) | ||
| 78 | elif command == "update": | ||
| 79 | bzrcmd = "%s pull %s --overwrite" % (basecmd, " ".join(options)) | ||
| 80 | else: | ||
| 81 | raise FetchError("Invalid bzr command %s" % command) | ||
| 82 | |||
| 83 | return bzrcmd | ||
| 84 | |||
| 85 | def go(self, loc, ud, d): | ||
| 86 | """Fetch url""" | ||
| 87 | |||
| 88 | if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK): | ||
| 89 | bzrcmd = self._buildbzrcommand(ud, d, "update") | ||
| 90 | logger.debug(1, "BZR Update %s", loc) | ||
| 91 | os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path))) | ||
| 92 | runfetchcmd(bzrcmd, d) | ||
| 93 | else: | ||
| 94 | bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True) | ||
| 95 | bzrcmd = self._buildbzrcommand(ud, d, "fetch") | ||
| 96 | logger.debug(1, "BZR Checkout %s", loc) | ||
| 97 | bb.utils.mkdirhier(ud.pkgdir) | ||
| 98 | os.chdir(ud.pkgdir) | ||
| 99 | logger.debug(1, "Running %s", bzrcmd) | ||
| 100 | runfetchcmd(bzrcmd, d) | ||
| 101 | |||
| 102 | os.chdir(ud.pkgdir) | ||
| 103 | |||
| 104 | scmdata = ud.parm.get("scmdata", "") | ||
| 105 | if scmdata == "keep": | ||
| 106 | tar_flags = "" | ||
| 107 | else: | ||
| 108 | tar_flags = "--exclude '.bzr' --exclude '.bzrtags'" | ||
| 109 | |||
| 110 | # tar them up to a defined filename | ||
| 111 | try: | ||
| 112 | runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)), d) | ||
| 113 | except: | ||
| 114 | t, v, tb = sys.exc_info() | ||
| 115 | try: | ||
| 116 | os.unlink(ud.localpath) | ||
| 117 | except OSError: | ||
| 118 | pass | ||
| 119 | raise t, v, tb | ||
| 120 | |||
| 121 | def supports_srcrev(self): | ||
| 122 | return True | ||
| 123 | |||
| 124 | def _revision_key(self, url, ud, d): | ||
| 125 | """ | ||
| 126 | Return a unique key for the url | ||
| 127 | """ | ||
| 128 | return "bzr:" + ud.pkgdir | ||
| 129 | |||
| 130 | def _latest_revision(self, url, ud, d): | ||
| 131 | """ | ||
| 132 | Return the latest upstream revision number | ||
| 133 | """ | ||
| 134 | logger.debug(2, "BZR fetcher hitting network for %s", url) | ||
| 135 | |||
| 136 | output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True) | ||
| 137 | |||
| 138 | return output.strip() | ||
| 139 | |||
| 140 | def _sortable_revision(self, url, ud, d): | ||
| 141 | """ | ||
| 142 | Return a sortable revision number which in our case is the revision number | ||
| 143 | """ | ||
| 144 | |||
| 145 | return self._build_revision(url, ud, d) | ||
| 146 | |||
| 147 | def _build_revision(self, url, ud, d): | ||
| 148 | return ud.revision | ||
diff --git a/bitbake/lib/bb/fetch/cvs.py b/bitbake/lib/bb/fetch/cvs.py deleted file mode 100644 index 64450afc29..0000000000 --- a/bitbake/lib/bb/fetch/cvs.py +++ /dev/null | |||
| @@ -1,172 +0,0 @@ | |||
| 1 | # ex:ts=4:sw=4:sts=4:et | ||
| 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 3 | """ | ||
| 4 | BitBake 'Fetch' implementations | ||
| 5 | |||
| 6 | Classes for obtaining upstream sources for the | ||
| 7 | BitBake build tools. | ||
| 8 | |||
| 9 | """ | ||
| 10 | |||
| 11 | # Copyright (C) 2003, 2004 Chris Larson | ||
| 12 | # | ||
| 13 | # This program is free software; you can redistribute it and/or modify | ||
| 14 | # it under the terms of the GNU General Public License version 2 as | ||
| 15 | # published by the Free Software Foundation. | ||
| 16 | # | ||
| 17 | # This program is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
| 20 | # GNU General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU General Public License along | ||
| 23 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
| 24 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
| 25 | # | ||
| 26 | #Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
| 27 | # | ||
| 28 | |||
| 29 | import os | ||
| 30 | import logging | ||
| 31 | import bb | ||
| 32 | from bb import data | ||
| 33 | from bb.fetch import Fetch, FetchError, MissingParameterError, logger | ||
| 34 | |||
| 35 | class Cvs(Fetch): | ||
| 36 | """ | ||
| 37 | Class to fetch a module or modules from cvs repositories | ||
| 38 | """ | ||
| 39 | def supports(self, url, ud, d): | ||
| 40 | """ | ||
| 41 | Check to see if a given url can be fetched with cvs. | ||
| 42 | """ | ||
| 43 | return ud.type in ['cvs'] | ||
| 44 | |||
| 45 | def localpath(self, url, ud, d): | ||
| 46 | if not "module" in ud.parm: | ||
| 47 | raise MissingParameterError("cvs method needs a 'module' parameter") | ||
| 48 | ud.module = ud.parm["module"] | ||
| 49 | |||
| 50 | ud.tag = ud.parm.get('tag', "") | ||
| 51 | |||
| 52 | # Override the default date in certain cases | ||
| 53 | if 'date' in ud.parm: | ||
| 54 | ud.date = ud.parm['date'] | ||
| 55 | elif ud.tag: | ||
| 56 | ud.date = "" | ||
| 57 | |||
| 58 | norecurse = '' | ||
| 59 | if 'norecurse' in ud.parm: | ||
| 60 | norecurse = '_norecurse' | ||
| 61 | |||
| 62 | fullpath = '' | ||
| 63 | if 'fullpath' in ud.parm: | ||
| 64 | fullpath = '_fullpath' | ||
| 65 | |||
| 66 | ud.localfile = data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d) | ||
| 67 | |||
| 68 | return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) | ||
| 69 | |||
| 70 | def forcefetch(self, url, ud, d): | ||
| 71 | if (ud.date == "now"): | ||
| 72 | return True | ||
| 73 | return False | ||
| 74 | |||
| 75 | def go(self, loc, ud, d): | ||
| 76 | |||
| 77 | method = ud.parm.get('method', 'pserver') | ||
| 78 | localdir = ud.parm.get('localdir', ud.module) | ||
| 79 | cvs_port = ud.parm.get('port', '') | ||
| 80 | |||
| 81 | cvs_rsh = None | ||
| 82 | if method == "ext": | ||
| 83 | if "rsh" in ud.parm: | ||
| 84 | cvs_rsh = ud.parm["rsh"] | ||
| 85 | |||
| 86 | if method == "dir": | ||
| 87 | cvsroot = ud.path | ||
| 88 | else: | ||
| 89 | cvsroot = ":" + method | ||
| 90 | cvsproxyhost = data.getVar('CVS_PROXY_HOST', d, True) | ||
| 91 | if cvsproxyhost: | ||
| 92 | cvsroot += ";proxy=" + cvsproxyhost | ||
| 93 | cvsproxyport = data.getVar('CVS_PROXY_PORT', d, True) | ||
| 94 | if cvsproxyport: | ||
| 95 | cvsroot += ";proxyport=" + cvsproxyport | ||
| 96 | cvsroot += ":" + ud.user | ||
| 97 | if ud.pswd: | ||
| 98 | cvsroot += ":" + ud.pswd | ||
| 99 | cvsroot += "@" + ud.host + ":" + cvs_port + ud.path | ||
| 100 | |||
| 101 | options = [] | ||
| 102 | if 'norecurse' in ud.parm: | ||
| 103 | options.append("-l") | ||
| 104 | if ud.date: | ||
| 105 | # treat YYYYMMDDHHMM specially for CVS | ||
| 106 | if len(ud.date) == 12: | ||
| 107 | options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12])) | ||
| 108 | else: | ||
| 109 | options.append("-D \"%s UTC\"" % ud.date) | ||
| 110 | if ud.tag: | ||
| 111 | options.append("-r %s" % ud.tag) | ||
| 112 | |||
| 113 | localdata = data.createCopy(d) | ||
| 114 | data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata) | ||
| 115 | data.update_data(localdata) | ||
| 116 | |||
| 117 | data.setVar('CVSROOT', cvsroot, localdata) | ||
| 118 | data.setVar('CVSCOOPTS', " ".join(options), localdata) | ||
| 119 | data.setVar('CVSMODULE', ud.module, localdata) | ||
| 120 | cvscmd = data.getVar('FETCHCOMMAND', localdata, 1) | ||
| 121 | cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1) | ||
| 122 | |||
| 123 | if cvs_rsh: | ||
| 124 | cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd) | ||
| 125 | cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd) | ||
| 126 | |||
| 127 | # create module directory | ||
| 128 | logger.debug(2, "Fetch: checking for module directory") | ||
| 129 | pkg = data.expand('${PN}', d) | ||
| 130 | pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg) | ||
| 131 | moddir = os.path.join(pkgdir, localdir) | ||
| 132 | if os.access(os.path.join(moddir, 'CVS'), os.R_OK): | ||
| 133 | logger.info("Update " + loc) | ||
| 134 | # update sources there | ||
| 135 | os.chdir(moddir) | ||
| 136 | myret = os.system(cvsupdatecmd) | ||
| 137 | else: | ||
| 138 | logger.info("Fetch " + loc) | ||
| 139 | # check out sources there | ||
| 140 | bb.utils.mkdirhier(pkgdir) | ||
| 141 | os.chdir(pkgdir) | ||
| 142 | logger.debug(1, "Running %s", cvscmd) | ||
| 143 | myret = os.system(cvscmd) | ||
| 144 | |||
| 145 | if myret != 0 or not os.access(moddir, os.R_OK): | ||
| 146 | try: | ||
| 147 | os.rmdir(moddir) | ||
| 148 | except OSError: | ||
| 149 | pass | ||
| 150 | raise FetchError(ud.module) | ||
| 151 | |||
| 152 | scmdata = ud.parm.get("scmdata", "") | ||
| 153 | if scmdata == "keep": | ||
| 154 | tar_flags = "" | ||
| 155 | else: | ||
| 156 | tar_flags = "--exclude 'CVS'" | ||
| 157 | |||
| 158 | # tar them up to a defined filename | ||
| 159 | if 'fullpath' in ud.parm: | ||
| 160 | os.chdir(pkgdir) | ||
| 161 | myret = os.system("tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir)) | ||
| 162 | else: | ||
| 163 | os.chdir(moddir) | ||
| 164 | os.chdir('..') | ||
| 165 | myret = os.system("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir))) | ||
| 166 | |||
| 167 | if myret != 0: | ||
| 168 | try: | ||
| 169 | os.unlink(ud.localpath) | ||
| 170 | except OSError: | ||
| 171 | pass | ||
| 172 | raise FetchError(ud.module) | ||
diff --git a/bitbake/lib/bb/fetch/git.py b/bitbake/lib/bb/fetch/git.py deleted file mode 100644 index 7160919d5a..0000000000 --- a/bitbake/lib/bb/fetch/git.py +++ /dev/null | |||
| @@ -1,339 +0,0 @@ | |||
| 1 | # ex:ts=4:sw=4:sts=4:et | ||
| 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 3 | """ | ||
| 4 | BitBake 'Fetch' git implementation | ||
| 5 | |||
| 6 | """ | ||
| 7 | |||
| 8 | #Copyright (C) 2005 Richard Purdie | ||
| 9 | # | ||
| 10 | # This program is free software; you can redistribute it and/or modify | ||
| 11 | # it under the terms of the GNU General Public License version 2 as | ||
| 12 | # published by the Free Software Foundation. | ||
| 13 | # | ||
| 14 | # This program is distributed in the hope that it will be useful, | ||
| 15 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 16 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
| 17 | # GNU General Public License for more details. | ||
| 18 | # | ||
| 19 | # You should have received a copy of the GNU General Public License along | ||
| 20 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
| 21 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
| 22 | |||
| 23 | import os | ||
| 24 | import bb | ||
| 25 | import bb.persist_data | ||
| 26 | from bb import data | ||
| 27 | from bb.fetch import Fetch | ||
| 28 | from bb.fetch import runfetchcmd | ||
| 29 | from bb.fetch import logger | ||
| 30 | |||
| 31 | class Git(Fetch): | ||
| 32 | """Class to fetch a module or modules from git repositories""" | ||
| 33 | def init(self, d): | ||
| 34 | # | ||
| 35 | # Only enable _sortable revision if the key is set | ||
| 36 | # | ||
| 37 | if d.getVar("BB_GIT_CLONE_FOR_SRCREV", True): | ||
| 38 | self._sortable_buildindex = self._sortable_buildindex_disabled | ||
| 39 | def supports(self, url, ud, d): | ||
| 40 | """ | ||
| 41 | Check to see if a given url can be fetched with git. | ||
| 42 | """ | ||
| 43 | return ud.type in ['git'] | ||
| 44 | |||
| 45 | def localpath(self, url, ud, d): | ||
| 46 | |||
| 47 | if 'protocol' in ud.parm: | ||
| 48 | ud.proto = ud.parm['protocol'] | ||
| 49 | elif not ud.host: | ||
| 50 | ud.proto = 'file' | ||
| 51 | else: | ||
| 52 | ud.proto = "rsync" | ||
| 53 | |||
| 54 | ud.branch = ud.parm.get("branch", "master") | ||
| 55 | |||
| 56 | gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.')) | ||
| 57 | ud.mirrortarball = 'git_%s.tar.gz' % (gitsrcname) | ||
| 58 | ud.clonedir = os.path.join(data.expand('${GITDIR}', d), gitsrcname) | ||
| 59 | |||
| 60 | tag = Fetch.srcrev_internal_helper(ud, d) | ||
| 61 | if tag is True: | ||
| 62 | ud.tag = self.latest_revision(url, ud, d) | ||
| 63 | elif tag: | ||
| 64 | ud.tag = tag | ||
| 65 | |||
| 66 | if not ud.tag or ud.tag == "master": | ||
| 67 | ud.tag = self.latest_revision(url, ud, d) | ||
| 68 | |||
| 69 | subdir = ud.parm.get("subpath", "") | ||
| 70 | if subdir != "": | ||
| 71 | if subdir.endswith("/"): | ||
| 72 | subdir = subdir[:-1] | ||
| 73 | subdirpath = os.path.join(ud.path, subdir); | ||
| 74 | else: | ||
| 75 | subdirpath = ud.path; | ||
| 76 | |||
| 77 | if 'fullclone' in ud.parm: | ||
| 78 | ud.localfile = ud.mirrortarball | ||
| 79 | else: | ||
| 80 | ud.localfile = data.expand('git_%s%s_%s.tar.gz' % (ud.host, subdirpath.replace('/', '.'), ud.tag), d) | ||
| 81 | |||
| 82 | ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git" | ||
| 83 | |||
| 84 | if 'noclone' in ud.parm: | ||
| 85 | ud.localfile = None | ||
| 86 | return None | ||
| 87 | |||
| 88 | return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) | ||
| 89 | |||
| 90 | def forcefetch(self, url, ud, d): | ||
| 91 | if 'fullclone' in ud.parm: | ||
| 92 | return True | ||
| 93 | if 'noclone' in ud.parm: | ||
| 94 | return False | ||
| 95 | if os.path.exists(ud.localpath): | ||
| 96 | return False | ||
| 97 | if not self._contains_ref(ud.tag, d): | ||
| 98 | return True | ||
| 99 | return False | ||
| 100 | |||
| 101 | def try_premirror(self, u, ud, d): | ||
| 102 | if 'noclone' in ud.parm: | ||
| 103 | return False | ||
| 104 | if os.path.exists(ud.clonedir): | ||
| 105 | return False | ||
| 106 | if os.path.exists(ud.localpath): | ||
| 107 | return False | ||
| 108 | |||
| 109 | return True | ||
| 110 | |||
| 111 | def go(self, loc, ud, d): | ||
| 112 | """Fetch url""" | ||
| 113 | |||
| 114 | if ud.user: | ||
| 115 | username = ud.user + '@' | ||
| 116 | else: | ||
| 117 | username = "" | ||
| 118 | |||
| 119 | repofile = os.path.join(data.getVar("DL_DIR", d, 1), ud.mirrortarball) | ||
| 120 | |||
| 121 | |||
| 122 | coname = '%s' % (ud.tag) | ||
| 123 | codir = os.path.join(ud.clonedir, coname) | ||
| 124 | |||
| 125 | # If we have no existing clone and no mirror tarball, try and obtain one | ||
| 126 | if not os.path.exists(ud.clonedir) and not os.path.exists(repofile): | ||
| 127 | try: | ||
| 128 | Fetch.try_mirrors(ud.mirrortarball) | ||
| 129 | except: | ||
| 130 | pass | ||
| 131 | |||
| 132 | # If the checkout doesn't exist and the mirror tarball does, extract it | ||
| 133 | if not os.path.exists(ud.clonedir) and os.path.exists(repofile): | ||
| 134 | bb.utils.mkdirhier(ud.clonedir) | ||
| 135 | os.chdir(ud.clonedir) | ||
| 136 | runfetchcmd("tar -xzf %s" % (repofile), d) | ||
| 137 | |||
| 138 | # If the repo still doesn't exist, fallback to cloning it | ||
| 139 | if not os.path.exists(ud.clonedir): | ||
| 140 | runfetchcmd("%s clone -n %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.clonedir), d) | ||
| 141 | |||
| 142 | os.chdir(ud.clonedir) | ||
| 143 | # Update the checkout if needed | ||
| 144 | if not self._contains_ref(ud.tag, d) or 'fullclone' in ud.parm: | ||
| 145 | # Remove all but the .git directory | ||
| 146 | runfetchcmd("rm * -Rf", d) | ||
| 147 | if 'fullclone' in ud.parm: | ||
| 148 | runfetchcmd("%s fetch --all" % (ud.basecmd), d) | ||
| 149 | else: | ||
| 150 | runfetchcmd("%s fetch %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.branch), d) | ||
| 151 | runfetchcmd("%s fetch --tags %s://%s%s%s" % (ud.basecmd, ud.proto, username, ud.host, ud.path), d) | ||
| 152 | runfetchcmd("%s prune-packed" % ud.basecmd, d) | ||
| 153 | runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d) | ||
| 154 | |||
| 155 | # Generate a mirror tarball if needed | ||
| 156 | os.chdir(ud.clonedir) | ||
| 157 | mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) | ||
| 158 | if mirror_tarballs != "0" or 'fullclone' in ud.parm: | ||
| 159 | logger.info("Creating tarball of git repository") | ||
| 160 | runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d) | ||
| 161 | |||
| 162 | if 'fullclone' in ud.parm: | ||
| 163 | return | ||
| 164 | |||
| 165 | if os.path.exists(codir): | ||
| 166 | bb.utils.prunedir(codir) | ||
| 167 | |||
| 168 | subdir = ud.parm.get("subpath", "") | ||
| 169 | if subdir != "": | ||
| 170 | if subdir.endswith("/"): | ||
| 171 | subdirbase = os.path.basename(subdir[:-1]) | ||
| 172 | else: | ||
| 173 | subdirbase = os.path.basename(subdir) | ||
| 174 | else: | ||
| 175 | subdirbase = "" | ||
| 176 | |||
| 177 | if subdir != "": | ||
| 178 | readpathspec = ":%s" % (subdir) | ||
| 179 | codir = os.path.join(codir, "git") | ||
| 180 | coprefix = os.path.join(codir, subdirbase, "") | ||
| 181 | else: | ||
| 182 | readpathspec = "" | ||
| 183 | coprefix = os.path.join(codir, "git", "") | ||
| 184 | |||
| 185 | scmdata = ud.parm.get("scmdata", "") | ||
| 186 | if scmdata == "keep": | ||
| 187 | runfetchcmd("%s clone -n %s %s" % (ud.basecmd, ud.clonedir, coprefix), d) | ||
| 188 | os.chdir(coprefix) | ||
| 189 | runfetchcmd("%s checkout -q -f %s%s" % (ud.basecmd, ud.tag, readpathspec), d) | ||
| 190 | else: | ||
| 191 | bb.utils.mkdirhier(codir) | ||
| 192 | os.chdir(ud.clonedir) | ||
| 193 | runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.tag, readpathspec), d) | ||
| 194 | runfetchcmd("%s checkout-index -q -f --prefix=%s -a" % (ud.basecmd, coprefix), d) | ||
| 195 | |||
| 196 | os.chdir(codir) | ||
| 197 | logger.info("Creating tarball of git checkout") | ||
| 198 | runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ), d) | ||
| 199 | |||
| 200 | os.chdir(ud.clonedir) | ||
| 201 | bb.utils.prunedir(codir) | ||
| 202 | |||
| 203 | def supports_srcrev(self): | ||
| 204 | return True | ||
| 205 | |||
| 206 | def _contains_ref(self, tag, d): | ||
| 207 | basecmd = data.getVar("FETCHCMD_git", d, True) or "git" | ||
| 208 | output = runfetchcmd("%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (basecmd, tag), d, quiet=True) | ||
| 209 | return output.split()[0] != "0" | ||
| 210 | |||
| 211 | def _revision_key(self, url, ud, d, branch=False): | ||
| 212 | """ | ||
| 213 | Return a unique key for the url | ||
| 214 | """ | ||
| 215 | key = 'git:' + ud.host + ud.path.replace('/', '.') | ||
| 216 | if branch: | ||
| 217 | return key + ud.branch | ||
| 218 | else: | ||
| 219 | return key | ||
| 220 | |||
| 221 | def generate_revision_key(self, url, ud, d, branch=False): | ||
| 222 | key = self._revision_key(url, ud, d, branch) | ||
| 223 | return "%s-%s" % (key, d.getVar("PN", True) or "") | ||
| 224 | |||
| 225 | def _latest_revision(self, url, ud, d): | ||
| 226 | """ | ||
| 227 | Compute the HEAD revision for the url | ||
| 228 | """ | ||
| 229 | if ud.user: | ||
| 230 | username = ud.user + '@' | ||
| 231 | else: | ||
| 232 | username = "" | ||
| 233 | |||
| 234 | basecmd = data.getVar("FETCHCMD_git", d, True) or "git" | ||
| 235 | cmd = "%s ls-remote %s://%s%s%s %s" % (basecmd, ud.proto, username, ud.host, ud.path, ud.branch) | ||
| 236 | output = runfetchcmd(cmd, d, True) | ||
| 237 | if not output: | ||
| 238 | raise bb.fetch.FetchError("Fetch command %s gave empty output\n" % (cmd)) | ||
| 239 | return output.split()[0] | ||
| 240 | |||
| 241 | def latest_revision(self, url, ud, d): | ||
| 242 | """ | ||
| 243 | Look in the cache for the latest revision, if not present ask the SCM. | ||
| 244 | """ | ||
| 245 | revs = bb.persist_data.persist('BB_URI_HEADREVS', d) | ||
| 246 | |||
| 247 | key = self.generate_revision_key(url, ud, d, branch=True) | ||
| 248 | |||
| 249 | try: | ||
| 250 | return revs[key] | ||
| 251 | except KeyError: | ||
| 252 | # Compatibility with old key format, no branch included | ||
| 253 | oldkey = self.generate_revision_key(url, ud, d, branch=False) | ||
| 254 | try: | ||
| 255 | rev = revs[oldkey] | ||
| 256 | except KeyError: | ||
| 257 | rev = self._latest_revision(url, ud, d) | ||
| 258 | else: | ||
| 259 | del revs[oldkey] | ||
| 260 | revs[key] = rev | ||
| 261 | return rev | ||
| 262 | |||
| 263 | def sortable_revision(self, url, ud, d): | ||
| 264 | """ | ||
| 265 | |||
| 266 | """ | ||
| 267 | localcounts = bb.persist_data.persist('BB_URI_LOCALCOUNT', d) | ||
| 268 | key = self.generate_revision_key(url, ud, d, branch=True) | ||
| 269 | oldkey = self.generate_revision_key(url, ud, d, branch=False) | ||
| 270 | |||
| 271 | latest_rev = self._build_revision(url, ud, d) | ||
| 272 | last_rev = localcounts.get(key + '_rev') | ||
| 273 | if last_rev is None: | ||
| 274 | last_rev = localcounts.get(oldkey + '_rev') | ||
| 275 | if last_rev is not None: | ||
| 276 | del localcounts[oldkey + '_rev'] | ||
| 277 | localcounts[key + '_rev'] = last_rev | ||
| 278 | |||
| 279 | uselocalcount = d.getVar("BB_LOCALCOUNT_OVERRIDE", True) or False | ||
| 280 | count = None | ||
| 281 | if uselocalcount: | ||
| 282 | count = Fetch.localcount_internal_helper(ud, d) | ||
| 283 | if count is None: | ||
| 284 | count = localcounts.get(key + '_count') | ||
| 285 | if count is None: | ||
| 286 | count = localcounts.get(oldkey + '_count') | ||
| 287 | if count is not None: | ||
| 288 | del localcounts[oldkey + '_count'] | ||
| 289 | localcounts[key + '_count'] = count | ||
| 290 | |||
| 291 | if last_rev == latest_rev: | ||
| 292 | return str(count + "+" + latest_rev) | ||
| 293 | |||
| 294 | buildindex_provided = hasattr(self, "_sortable_buildindex") | ||
| 295 | if buildindex_provided: | ||
| 296 | count = self._sortable_buildindex(url, ud, d, latest_rev) | ||
| 297 | if count is None: | ||
| 298 | count = "0" | ||
| 299 | elif uselocalcount or buildindex_provided: | ||
| 300 | count = str(count) | ||
| 301 | else: | ||
| 302 | count = str(int(count) + 1) | ||
| 303 | |||
| 304 | localcounts[key + '_rev'] = latest_rev | ||
| 305 | localcounts[key + '_count'] = count | ||
| 306 | |||
| 307 | return str(count + "+" + latest_rev) | ||
| 308 | |||
| 309 | def _build_revision(self, url, ud, d): | ||
| 310 | return ud.tag | ||
| 311 | |||
| 312 | def _sortable_buildindex_disabled(self, url, ud, d, rev): | ||
| 313 | """ | ||
| 314 | Return a suitable buildindex for the revision specified. This is done by counting revisions | ||
| 315 | using "git rev-list" which may or may not work in different circumstances. | ||
| 316 | """ | ||
| 317 | |||
| 318 | cwd = os.getcwd() | ||
| 319 | |||
| 320 | # Check if we have the rev already | ||
| 321 | |||
| 322 | if not os.path.exists(ud.clonedir): | ||
| 323 | print("no repo") | ||
| 324 | self.go(None, ud, d) | ||
| 325 | if not os.path.exists(ud.clonedir): | ||
| 326 | logger.error("GIT repository for %s doesn't exist in %s, cannot get sortable buildnumber, using old value", url, ud.clonedir) | ||
| 327 | return None | ||
| 328 | |||
| 329 | |||
| 330 | os.chdir(ud.clonedir) | ||
| 331 | if not self._contains_ref(rev, d): | ||
| 332 | self.go(None, ud, d) | ||
| 333 | |||
| 334 | output = runfetchcmd("%s rev-list %s -- 2> /dev/null | wc -l" % (ud.basecmd, rev), d, quiet=True) | ||
| 335 | os.chdir(cwd) | ||
| 336 | |||
| 337 | buildindex = "%s" % output.split()[0] | ||
| 338 | logger.debug(1, "GIT repository for %s in %s is returning %s revisions in rev-list before %s", url, ud.clonedir, buildindex, rev) | ||
| 339 | return buildindex | ||
diff --git a/bitbake/lib/bb/fetch/hg.py b/bitbake/lib/bb/fetch/hg.py deleted file mode 100644 index 2b3aec543b..0000000000 --- a/bitbake/lib/bb/fetch/hg.py +++ /dev/null | |||
| @@ -1,180 +0,0 @@ | |||
| 1 | # ex:ts=4:sw=4:sts=4:et | ||
| 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 3 | """ | ||
| 4 | BitBake 'Fetch' implementation for mercurial DRCS (hg). | ||
| 5 | |||
| 6 | """ | ||
| 7 | |||
| 8 | # Copyright (C) 2003, 2004 Chris Larson | ||
| 9 | # Copyright (C) 2004 Marcin Juszkiewicz | ||
| 10 | # Copyright (C) 2007 Robert Schuster | ||
| 11 | # | ||
| 12 | # This program is free software; you can redistribute it and/or modify | ||
| 13 | # it under the terms of the GNU General Public License version 2 as | ||
| 14 | # published by the Free Software Foundation. | ||
| 15 | # | ||
| 16 | # This program is distributed in the hope that it will be useful, | ||
| 17 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 18 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
| 19 | # GNU General Public License for more details. | ||
| 20 | # | ||
| 21 | # You should have received a copy of the GNU General Public License along | ||
| 22 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
| 23 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
| 24 | # | ||
| 25 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
| 26 | |||
| 27 | import os | ||
| 28 | import sys | ||
| 29 | import logging | ||
| 30 | import bb | ||
| 31 | from bb import data | ||
| 32 | from bb.fetch import Fetch | ||
| 33 | from bb.fetch import FetchError | ||
| 34 | from bb.fetch import MissingParameterError | ||
| 35 | from bb.fetch import runfetchcmd | ||
| 36 | from bb.fetch import logger | ||
| 37 | |||
| 38 | class Hg(Fetch): | ||
| 39 | """Class to fetch from mercurial repositories""" | ||
| 40 | def supports(self, url, ud, d): | ||
| 41 | """ | ||
| 42 | Check to see if a given url can be fetched with mercurial. | ||
| 43 | """ | ||
| 44 | return ud.type in ['hg'] | ||
| 45 | |||
| 46 | def forcefetch(self, url, ud, d): | ||
| 47 | revTag = ud.parm.get('rev', 'tip') | ||
| 48 | return revTag == "tip" | ||
| 49 | |||
| 50 | def localpath(self, url, ud, d): | ||
| 51 | if not "module" in ud.parm: | ||
| 52 | raise MissingParameterError("hg method needs a 'module' parameter") | ||
| 53 | |||
| 54 | ud.module = ud.parm["module"] | ||
| 55 | |||
| 56 | # Create paths to mercurial checkouts | ||
| 57 | relpath = self._strip_leading_slashes(ud.path) | ||
| 58 | ud.pkgdir = os.path.join(data.expand('${HGDIR}', d), ud.host, relpath) | ||
| 59 | ud.moddir = os.path.join(ud.pkgdir, ud.module) | ||
| 60 | |||
| 61 | if 'rev' in ud.parm: | ||
| 62 | ud.revision = ud.parm['rev'] | ||
| 63 | else: | ||
| 64 | tag = Fetch.srcrev_internal_helper(ud, d) | ||
| 65 | if tag is True: | ||
| 66 | ud.revision = self.latest_revision(url, ud, d) | ||
| 67 | elif tag: | ||
| 68 | ud.revision = tag | ||
| 69 | else: | ||
| 70 | ud.revision = self.latest_revision(url, ud, d) | ||
| 71 | |||
| 72 | ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d) | ||
| 73 | |||
| 74 | return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) | ||
| 75 | |||
| 76 | def _buildhgcommand(self, ud, d, command): | ||
| 77 | """ | ||
| 78 | Build up an hg commandline based on ud | ||
| 79 | command is "fetch", "update", "info" | ||
| 80 | """ | ||
| 81 | |||
| 82 | basecmd = data.expand('${FETCHCMD_hg}', d) | ||
| 83 | |||
| 84 | proto = ud.parm.get('proto', 'http') | ||
| 85 | |||
| 86 | host = ud.host | ||
| 87 | if proto == "file": | ||
| 88 | host = "/" | ||
| 89 | ud.host = "localhost" | ||
| 90 | |||
| 91 | if not ud.user: | ||
| 92 | hgroot = host + ud.path | ||
| 93 | else: | ||
| 94 | hgroot = ud.user + "@" + host + ud.path | ||
| 95 | |||
| 96 | if command is "info": | ||
| 97 | return "%s identify -i %s://%s/%s" % (basecmd, proto, hgroot, ud.module) | ||
| 98 | |||
| 99 | options = []; | ||
| 100 | if ud.revision: | ||
| 101 | options.append("-r %s" % ud.revision) | ||
| 102 | |||
| 103 | if command is "fetch": | ||
| 104 | cmd = "%s clone %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, hgroot, ud.module, ud.module) | ||
| 105 | elif command is "pull": | ||
| 106 | # do not pass options list; limiting pull to rev causes the local | ||
| 107 | # repo not to contain it and immediately following "update" command | ||
| 108 | # will crash | ||
| 109 | cmd = "%s pull" % (basecmd) | ||
| 110 | elif command is "update": | ||
| 111 | cmd = "%s update -C %s" % (basecmd, " ".join(options)) | ||
| 112 | else: | ||
| 113 | raise FetchError("Invalid hg command %s" % command) | ||
| 114 | |||
| 115 | return cmd | ||
| 116 | |||
| 117 | def go(self, loc, ud, d): | ||
| 118 | """Fetch url""" | ||
| 119 | |||
| 120 | logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") | ||
| 121 | |||
| 122 | if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK): | ||
| 123 | updatecmd = self._buildhgcommand(ud, d, "pull") | ||
| 124 | logger.info("Update " + loc) | ||
| 125 | # update sources there | ||
| 126 | os.chdir(ud.moddir) | ||
| 127 | logger.debug(1, "Running %s", updatecmd) | ||
| 128 | runfetchcmd(updatecmd, d) | ||
| 129 | |||
| 130 | else: | ||
| 131 | fetchcmd = self._buildhgcommand(ud, d, "fetch") | ||
| 132 | logger.info("Fetch " + loc) | ||
| 133 | # check out sources there | ||
| 134 | bb.utils.mkdirhier(ud.pkgdir) | ||
| 135 | os.chdir(ud.pkgdir) | ||
| 136 | logger.debug(1, "Running %s", fetchcmd) | ||
| 137 | runfetchcmd(fetchcmd, d) | ||
| 138 | |||
| 139 | # Even when we clone (fetch), we still need to update as hg's clone | ||
| 140 | # won't checkout the specified revision if its on a branch | ||
| 141 | updatecmd = self._buildhgcommand(ud, d, "update") | ||
| 142 | os.chdir(ud.moddir) | ||
| 143 | logger.debug(1, "Running %s", updatecmd) | ||
| 144 | runfetchcmd(updatecmd, d) | ||
| 145 | |||
| 146 | scmdata = ud.parm.get("scmdata", "") | ||
| 147 | if scmdata == "keep": | ||
| 148 | tar_flags = "" | ||
| 149 | else: | ||
| 150 | tar_flags = "--exclude '.hg' --exclude '.hgrags'" | ||
| 151 | |||
| 152 | os.chdir(ud.pkgdir) | ||
| 153 | try: | ||
| 154 | runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d) | ||
| 155 | except: | ||
| 156 | t, v, tb = sys.exc_info() | ||
| 157 | try: | ||
| 158 | os.unlink(ud.localpath) | ||
| 159 | except OSError: | ||
| 160 | pass | ||
| 161 | raise t, v, tb | ||
| 162 | |||
| 163 | def supports_srcrev(self): | ||
| 164 | return True | ||
| 165 | |||
| 166 | def _latest_revision(self, url, ud, d): | ||
| 167 | """ | ||
| 168 | Compute tip revision for the url | ||
| 169 | """ | ||
| 170 | output = runfetchcmd(self._buildhgcommand(ud, d, "info"), d) | ||
| 171 | return output.strip() | ||
| 172 | |||
| 173 | def _build_revision(self, url, ud, d): | ||
| 174 | return ud.revision | ||
| 175 | |||
| 176 | def _revision_key(self, url, ud, d): | ||
| 177 | """ | ||
| 178 | Return a unique key for the url | ||
| 179 | """ | ||
| 180 | return "hg:" + ud.moddir | ||
diff --git a/bitbake/lib/bb/fetch/local.py b/bitbake/lib/bb/fetch/local.py deleted file mode 100644 index 6aa9e45768..0000000000 --- a/bitbake/lib/bb/fetch/local.py +++ /dev/null | |||
| @@ -1,73 +0,0 @@ | |||
| 1 | # ex:ts=4:sw=4:sts=4:et | ||
| 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 3 | """ | ||
| 4 | BitBake 'Fetch' implementations | ||
| 5 | |||
| 6 | Classes for obtaining upstream sources for the | ||
| 7 | BitBake build tools. | ||
| 8 | |||
| 9 | """ | ||
| 10 | |||
| 11 | # Copyright (C) 2003, 2004 Chris Larson | ||
| 12 | # | ||
| 13 | # This program is free software; you can redistribute it and/or modify | ||
| 14 | # it under the terms of the GNU General Public License version 2 as | ||
| 15 | # published by the Free Software Foundation. | ||
| 16 | # | ||
| 17 | # This program is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
| 20 | # GNU General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU General Public License along | ||
| 23 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
| 24 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
| 25 | # | ||
| 26 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
| 27 | |||
| 28 | import os | ||
| 29 | import bb | ||
| 30 | import bb.utils | ||
| 31 | from bb import data | ||
| 32 | from bb.fetch import Fetch | ||
| 33 | |||
| 34 | class Local(Fetch): | ||
| 35 | def supports(self, url, urldata, d): | ||
| 36 | """ | ||
| 37 | Check to see if a given url represents a local fetch. | ||
| 38 | """ | ||
| 39 | return urldata.type in ['file'] | ||
| 40 | |||
| 41 | def localpath(self, url, urldata, d): | ||
| 42 | """ | ||
| 43 | Return the local filename of a given url assuming a successful fetch. | ||
| 44 | """ | ||
| 45 | path = url.split("://")[1] | ||
| 46 | path = path.split(";")[0] | ||
| 47 | newpath = path | ||
| 48 | if path[0] != "/": | ||
| 49 | filespath = data.getVar('FILESPATH', d, 1) | ||
| 50 | if filespath: | ||
| 51 | newpath = bb.utils.which(filespath, path) | ||
| 52 | if not newpath: | ||
| 53 | filesdir = data.getVar('FILESDIR', d, 1) | ||
| 54 | if filesdir: | ||
| 55 | newpath = os.path.join(filesdir, path) | ||
| 56 | # We don't set localfile as for this fetcher the file is already local! | ||
| 57 | return newpath | ||
| 58 | |||
| 59 | def go(self, url, urldata, d): | ||
| 60 | """Fetch urls (no-op for Local method)""" | ||
| 61 | # no need to fetch local files, we'll deal with them in place. | ||
| 62 | return 1 | ||
| 63 | |||
| 64 | def checkstatus(self, url, urldata, d): | ||
| 65 | """ | ||
| 66 | Check the status of the url | ||
| 67 | """ | ||
| 68 | if urldata.localpath.find("*") != -1: | ||
| 69 | logger.info("URL %s looks like a glob and was therefore not checked.", url) | ||
| 70 | return True | ||
| 71 | if os.path.exists(urldata.localpath): | ||
| 72 | return True | ||
| 73 | return False | ||
diff --git a/bitbake/lib/bb/fetch/osc.py b/bitbake/lib/bb/fetch/osc.py deleted file mode 100644 index 32237b9740..0000000000 --- a/bitbake/lib/bb/fetch/osc.py +++ /dev/null | |||
| @@ -1,143 +0,0 @@ | |||
| 1 | # ex:ts=4:sw=4:sts=4:et | ||
| 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 3 | """ | ||
| 4 | Bitbake "Fetch" implementation for osc (Opensuse build service client). | ||
| 5 | Based on the svn "Fetch" implementation. | ||
| 6 | |||
| 7 | """ | ||
| 8 | |||
| 9 | import os | ||
| 10 | import sys | ||
| 11 | import logging | ||
| 12 | import bb | ||
| 13 | from bb import data | ||
| 14 | from bb import utils | ||
| 15 | from bb.fetch import Fetch | ||
| 16 | from bb.fetch import FetchError | ||
| 17 | from bb.fetch import MissingParameterError | ||
| 18 | from bb.fetch import runfetchcmd | ||
| 19 | |||
| 20 | class Osc(Fetch): | ||
| 21 | """Class to fetch a module or modules from Opensuse build server | ||
| 22 | repositories.""" | ||
| 23 | |||
| 24 | def supports(self, url, ud, d): | ||
| 25 | """ | ||
| 26 | Check to see if a given url can be fetched with osc. | ||
| 27 | """ | ||
| 28 | return ud.type in ['osc'] | ||
| 29 | |||
| 30 | def localpath(self, url, ud, d): | ||
| 31 | if not "module" in ud.parm: | ||
| 32 | raise MissingParameterError("osc method needs a 'module' parameter.") | ||
| 33 | |||
| 34 | ud.module = ud.parm["module"] | ||
| 35 | |||
| 36 | # Create paths to osc checkouts | ||
| 37 | relpath = self._strip_leading_slashes(ud.path) | ||
| 38 | ud.pkgdir = os.path.join(data.expand('${OSCDIR}', d), ud.host) | ||
| 39 | ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module) | ||
| 40 | |||
| 41 | if 'rev' in ud.parm: | ||
| 42 | ud.revision = ud.parm['rev'] | ||
| 43 | else: | ||
| 44 | pv = data.getVar("PV", d, 0) | ||
| 45 | rev = Fetch.srcrev_internal_helper(ud, d) | ||
| 46 | if rev and rev != True: | ||
| 47 | ud.revision = rev | ||
| 48 | else: | ||
| 49 | ud.revision = "" | ||
| 50 | |||
| 51 | ud.localfile = data.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision), d) | ||
| 52 | |||
| 53 | return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) | ||
| 54 | |||
| 55 | def _buildosccommand(self, ud, d, command): | ||
| 56 | """ | ||
| 57 | Build up an ocs commandline based on ud | ||
| 58 | command is "fetch", "update", "info" | ||
| 59 | """ | ||
| 60 | |||
| 61 | basecmd = data.expand('${FETCHCMD_osc}', d) | ||
| 62 | |||
| 63 | proto = ud.parm.get('proto', 'ocs') | ||
| 64 | |||
| 65 | options = [] | ||
| 66 | |||
| 67 | config = "-c %s" % self.generate_config(ud, d) | ||
| 68 | |||
| 69 | if ud.revision: | ||
| 70 | options.append("-r %s" % ud.revision) | ||
| 71 | |||
| 72 | coroot = self._strip_leading_slashes(ud.path) | ||
| 73 | |||
| 74 | if command is "fetch": | ||
| 75 | osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options)) | ||
| 76 | elif command is "update": | ||
| 77 | osccmd = "%s %s up %s" % (basecmd, config, " ".join(options)) | ||
| 78 | else: | ||
| 79 | raise FetchError("Invalid osc command %s" % command) | ||
| 80 | |||
| 81 | return osccmd | ||
| 82 | |||
| 83 | def go(self, loc, ud, d): | ||
| 84 | """ | ||
| 85 | Fetch url | ||
| 86 | """ | ||
| 87 | |||
| 88 | logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") | ||
| 89 | |||
| 90 | if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK): | ||
| 91 | oscupdatecmd = self._buildosccommand(ud, d, "update") | ||
| 92 | logger.info("Update "+ loc) | ||
| 93 | # update sources there | ||
| 94 | os.chdir(ud.moddir) | ||
| 95 | logger.debug(1, "Running %s", oscupdatecmd) | ||
| 96 | runfetchcmd(oscupdatecmd, d) | ||
| 97 | else: | ||
| 98 | oscfetchcmd = self._buildosccommand(ud, d, "fetch") | ||
| 99 | logger.info("Fetch " + loc) | ||
| 100 | # check out sources there | ||
| 101 | bb.utils.mkdirhier(ud.pkgdir) | ||
| 102 | os.chdir(ud.pkgdir) | ||
| 103 | logger.debug(1, "Running %s", oscfetchcmd) | ||
| 104 | runfetchcmd(oscfetchcmd, d) | ||
| 105 | |||
| 106 | os.chdir(os.path.join(ud.pkgdir + ud.path)) | ||
| 107 | # tar them up to a defined filename | ||
| 108 | try: | ||
| 109 | runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d) | ||
| 110 | except: | ||
| 111 | t, v, tb = sys.exc_info() | ||
| 112 | try: | ||
| 113 | os.unlink(ud.localpath) | ||
| 114 | except OSError: | ||
| 115 | pass | ||
| 116 | raise t, v, tb | ||
| 117 | |||
| 118 | def supports_srcrev(self): | ||
| 119 | return False | ||
| 120 | |||
| 121 | def generate_config(self, ud, d): | ||
| 122 | """ | ||
| 123 | Generate a .oscrc to be used for this run. | ||
| 124 | """ | ||
| 125 | |||
| 126 | config_path = os.path.join(data.expand('${OSCDIR}', d), "oscrc") | ||
| 127 | bb.utils.remove(config_path) | ||
| 128 | |||
| 129 | f = open(config_path, 'w') | ||
| 130 | f.write("[general]\n") | ||
| 131 | f.write("apisrv = %s\n" % ud.host) | ||
| 132 | f.write("scheme = http\n") | ||
| 133 | f.write("su-wrapper = su -c\n") | ||
| 134 | f.write("build-root = %s\n" % data.expand('${WORKDIR}', d)) | ||
| 135 | f.write("urllist = http://moblin-obs.jf.intel.com:8888/build/%(project)s/%(repository)s/%(buildarch)s/:full/%(name)s.rpm\n") | ||
| 136 | f.write("extra-pkgs = gzip\n") | ||
| 137 | f.write("\n") | ||
| 138 | f.write("[%s]\n" % ud.host) | ||
| 139 | f.write("user = %s\n" % ud.parm["user"]) | ||
| 140 | f.write("pass = %s\n" % ud.parm["pswd"]) | ||
| 141 | f.close() | ||
| 142 | |||
| 143 | return config_path | ||
diff --git a/bitbake/lib/bb/fetch/perforce.py b/bitbake/lib/bb/fetch/perforce.py deleted file mode 100644 index e933d27603..0000000000 --- a/bitbake/lib/bb/fetch/perforce.py +++ /dev/null | |||
| @@ -1,206 +0,0 @@ | |||
| 1 | # ex:ts=4:sw=4:sts=4:et | ||
| 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 3 | """ | ||
| 4 | BitBake 'Fetch' implementations | ||
| 5 | |||
| 6 | Classes for obtaining upstream sources for the | ||
| 7 | BitBake build tools. | ||
| 8 | |||
| 9 | """ | ||
| 10 | |||
| 11 | # Copyright (C) 2003, 2004 Chris Larson | ||
| 12 | # | ||
| 13 | # This program is free software; you can redistribute it and/or modify | ||
| 14 | # it under the terms of the GNU General Public License version 2 as | ||
| 15 | # published by the Free Software Foundation. | ||
| 16 | # | ||
| 17 | # This program is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
| 20 | # GNU General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU General Public License along | ||
| 23 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
| 24 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
| 25 | # | ||
| 26 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
| 27 | |||
| 28 | from future_builtins import zip | ||
| 29 | import os | ||
| 30 | import logging | ||
| 31 | import bb | ||
| 32 | from bb import data | ||
| 33 | from bb.fetch import Fetch | ||
| 34 | from bb.fetch import FetchError | ||
| 35 | from bb.fetch import logger | ||
| 36 | |||
| 37 | class Perforce(Fetch): | ||
| 38 | def supports(self, url, ud, d): | ||
| 39 | return ud.type in ['p4'] | ||
| 40 | |||
| 41 | def doparse(url, d): | ||
| 42 | parm = {} | ||
| 43 | path = url.split("://")[1] | ||
| 44 | delim = path.find("@"); | ||
| 45 | if delim != -1: | ||
| 46 | (user, pswd, host, port) = path.split('@')[0].split(":") | ||
| 47 | path = path.split('@')[1] | ||
| 48 | else: | ||
| 49 | (host, port) = data.getVar('P4PORT', d).split(':') | ||
| 50 | user = "" | ||
| 51 | pswd = "" | ||
| 52 | |||
| 53 | if path.find(";") != -1: | ||
| 54 | keys=[] | ||
| 55 | values=[] | ||
| 56 | plist = path.split(';') | ||
| 57 | for item in plist: | ||
| 58 | if item.count('='): | ||
| 59 | (key, value) = item.split('=') | ||
| 60 | keys.append(key) | ||
| 61 | values.append(value) | ||
| 62 | |||
| 63 | parm = dict(zip(keys, values)) | ||
| 64 | path = "//" + path.split(';')[0] | ||
| 65 | host += ":%s" % (port) | ||
| 66 | parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm) | ||
| 67 | |||
| 68 | return host, path, user, pswd, parm | ||
| 69 | doparse = staticmethod(doparse) | ||
| 70 | |||
| 71 | def getcset(d, depot, host, user, pswd, parm): | ||
| 72 | p4opt = "" | ||
| 73 | if "cset" in parm: | ||
| 74 | return parm["cset"]; | ||
| 75 | if user: | ||
| 76 | p4opt += " -u %s" % (user) | ||
| 77 | if pswd: | ||
| 78 | p4opt += " -P %s" % (pswd) | ||
| 79 | if host: | ||
| 80 | p4opt += " -p %s" % (host) | ||
| 81 | |||
| 82 | p4date = data.getVar("P4DATE", d, 1) | ||
| 83 | if "revision" in parm: | ||
| 84 | depot += "#%s" % (parm["revision"]) | ||
| 85 | elif "label" in parm: | ||
| 86 | depot += "@%s" % (parm["label"]) | ||
| 87 | elif p4date: | ||
| 88 | depot += "@%s" % (p4date) | ||
| 89 | |||
| 90 | p4cmd = data.getVar('FETCHCOMMAND_p4', d, 1) | ||
| 91 | logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot) | ||
| 92 | p4file = os.popen("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot)) | ||
| 93 | cset = p4file.readline().strip() | ||
| 94 | logger.debug(1, "READ %s", cset) | ||
| 95 | if not cset: | ||
| 96 | return -1 | ||
| 97 | |||
| 98 | return cset.split(' ')[1] | ||
| 99 | getcset = staticmethod(getcset) | ||
| 100 | |||
| 101 | def localpath(self, url, ud, d): | ||
| 102 | |||
| 103 | (host, path, user, pswd, parm) = Perforce.doparse(url, d) | ||
| 104 | |||
| 105 | # If a label is specified, we use that as our filename | ||
| 106 | |||
| 107 | if "label" in parm: | ||
| 108 | ud.localfile = "%s.tar.gz" % (parm["label"]) | ||
| 109 | return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile) | ||
| 110 | |||
| 111 | base = path | ||
| 112 | which = path.find('/...') | ||
| 113 | if which != -1: | ||
| 114 | base = path[:which] | ||
| 115 | |||
| 116 | base = self._strip_leading_slashes(base) | ||
| 117 | |||
| 118 | cset = Perforce.getcset(d, path, host, user, pswd, parm) | ||
| 119 | |||
| 120 | ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base.replace('/', '.'), cset), d) | ||
| 121 | |||
| 122 | return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile) | ||
| 123 | |||
| 124 | def go(self, loc, ud, d): | ||
| 125 | """ | ||
| 126 | Fetch urls | ||
| 127 | """ | ||
| 128 | |||
| 129 | (host, depot, user, pswd, parm) = Perforce.doparse(loc, d) | ||
| 130 | |||
| 131 | if depot.find('/...') != -1: | ||
| 132 | path = depot[:depot.find('/...')] | ||
| 133 | else: | ||
| 134 | path = depot | ||
| 135 | |||
| 136 | module = parm.get('module', os.path.basename(path)) | ||
| 137 | |||
| 138 | localdata = data.createCopy(d) | ||
| 139 | data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata) | ||
| 140 | data.update_data(localdata) | ||
| 141 | |||
| 142 | # Get the p4 command | ||
| 143 | p4opt = "" | ||
| 144 | if user: | ||
| 145 | p4opt += " -u %s" % (user) | ||
| 146 | |||
| 147 | if pswd: | ||
| 148 | p4opt += " -P %s" % (pswd) | ||
| 149 | |||
| 150 | if host: | ||
| 151 | p4opt += " -p %s" % (host) | ||
| 152 | |||
| 153 | p4cmd = data.getVar('FETCHCOMMAND', localdata, 1) | ||
| 154 | |||
| 155 | # create temp directory | ||
| 156 | logger.debug(2, "Fetch: creating temporary directory") | ||
| 157 | bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata)) | ||
| 158 | data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata) | ||
| 159 | tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") | ||
| 160 | tmpfile = tmppipe.readline().strip() | ||
| 161 | if not tmpfile: | ||
| 162 | logger.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") | ||
| 163 | raise FetchError(module) | ||
| 164 | |||
| 165 | if "label" in parm: | ||
| 166 | depot = "%s@%s" % (depot, parm["label"]) | ||
| 167 | else: | ||
| 168 | cset = Perforce.getcset(d, depot, host, user, pswd, parm) | ||
| 169 | depot = "%s@%s" % (depot, cset) | ||
| 170 | |||
| 171 | os.chdir(tmpfile) | ||
| 172 | logger.info("Fetch " + loc) | ||
| 173 | logger.info("%s%s files %s", p4cmd, p4opt, depot) | ||
| 174 | p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot)) | ||
| 175 | |||
| 176 | if not p4file: | ||
| 177 | logger.error("Fetch: unable to get the P4 files from %s", depot) | ||
| 178 | raise FetchError(module) | ||
| 179 | |||
| 180 | count = 0 | ||
| 181 | |||
| 182 | for file in p4file: | ||
| 183 | list = file.split() | ||
| 184 | |||
| 185 | if list[2] == "delete": | ||
| 186 | continue | ||
| 187 | |||
| 188 | dest = list[0][len(path)+1:] | ||
| 189 | where = dest.find("#") | ||
| 190 | |||
| 191 | os.system("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0])) | ||
| 192 | count = count + 1 | ||
| 193 | |||
| 194 | if count == 0: | ||
| 195 | logger.error("Fetch: No files gathered from the P4 fetch") | ||
| 196 | raise FetchError(module) | ||
| 197 | |||
| 198 | myret = os.system("tar -czf %s %s" % (ud.localpath, module)) | ||
| 199 | if myret != 0: | ||
| 200 | try: | ||
| 201 | os.unlink(ud.localpath) | ||
| 202 | except OSError: | ||
| 203 | pass | ||
| 204 | raise FetchError(module) | ||
| 205 | # cleanup | ||
| 206 | bb.utils.prunedir(tmpfile) | ||
diff --git a/bitbake/lib/bb/fetch/repo.py b/bitbake/lib/bb/fetch/repo.py deleted file mode 100644 index 512fffb4ce..0000000000 --- a/bitbake/lib/bb/fetch/repo.py +++ /dev/null | |||
| @@ -1,98 +0,0 @@ | |||
| 1 | # ex:ts=4:sw=4:sts=4:et | ||
| 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 3 | """ | ||
| 4 | BitBake "Fetch" repo (git) implementation | ||
| 5 | |||
| 6 | """ | ||
| 7 | |||
| 8 | # Copyright (C) 2009 Tom Rini <trini@embeddedalley.com> | ||
| 9 | # | ||
| 10 | # Based on git.py which is: | ||
| 11 | #Copyright (C) 2005 Richard Purdie | ||
| 12 | # | ||
| 13 | # This program is free software; you can redistribute it and/or modify | ||
| 14 | # it under the terms of the GNU General Public License version 2 as | ||
| 15 | # published by the Free Software Foundation. | ||
| 16 | # | ||
| 17 | # This program is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
| 20 | # GNU General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU General Public License along | ||
| 23 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
| 24 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
| 25 | |||
| 26 | import os | ||
| 27 | import bb | ||
| 28 | from bb import data | ||
| 29 | from bb.fetch import Fetch | ||
| 30 | from bb.fetch import runfetchcmd | ||
| 31 | |||
| 32 | class Repo(Fetch): | ||
| 33 | """Class to fetch a module or modules from repo (git) repositories""" | ||
| 34 | def supports(self, url, ud, d): | ||
| 35 | """ | ||
| 36 | Check to see if a given url can be fetched with repo. | ||
| 37 | """ | ||
| 38 | return ud.type in ["repo"] | ||
| 39 | |||
| 40 | def localpath(self, url, ud, d): | ||
| 41 | """ | ||
| 42 | We don"t care about the git rev of the manifests repository, but | ||
| 43 | we do care about the manifest to use. The default is "default". | ||
| 44 | We also care about the branch or tag to be used. The default is | ||
| 45 | "master". | ||
| 46 | """ | ||
| 47 | |||
| 48 | ud.proto = ud.parm.get('protocol', 'git') | ||
| 49 | ud.branch = ud.parm.get('branch', 'master') | ||
| 50 | ud.manifest = ud.parm.get('manifest', 'default.xml') | ||
| 51 | if not ud.manifest.endswith('.xml'): | ||
| 52 | ud.manifest += '.xml' | ||
| 53 | |||
| 54 | ud.localfile = data.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch), d) | ||
| 55 | |||
| 56 | return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) | ||
| 57 | |||
| 58 | def go(self, loc, ud, d): | ||
| 59 | """Fetch url""" | ||
| 60 | |||
| 61 | if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK): | ||
| 62 | logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath) | ||
| 63 | return | ||
| 64 | |||
| 65 | gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", ".")) | ||
| 66 | repodir = data.getVar("REPODIR", d, True) or os.path.join(data.getVar("DL_DIR", d, True), "repo") | ||
| 67 | codir = os.path.join(repodir, gitsrcname, ud.manifest) | ||
| 68 | |||
| 69 | if ud.user: | ||
| 70 | username = ud.user + "@" | ||
| 71 | else: | ||
| 72 | username = "" | ||
| 73 | |||
| 74 | bb.utils.mkdirhier(os.path.join(codir, "repo")) | ||
| 75 | os.chdir(os.path.join(codir, "repo")) | ||
| 76 | if not os.path.exists(os.path.join(codir, "repo", ".repo")): | ||
| 77 | runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d) | ||
| 78 | |||
| 79 | runfetchcmd("repo sync", d) | ||
| 80 | os.chdir(codir) | ||
| 81 | |||
| 82 | scmdata = ud.parm.get("scmdata", "") | ||
| 83 | if scmdata == "keep": | ||
| 84 | tar_flags = "" | ||
| 85 | else: | ||
| 86 | tar_flags = "--exclude '.repo' --exclude '.git'" | ||
| 87 | |||
| 88 | # Create a cache | ||
| 89 | runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d) | ||
| 90 | |||
| 91 | def supports_srcrev(self): | ||
| 92 | return False | ||
| 93 | |||
| 94 | def _build_revision(self, url, ud, d): | ||
| 95 | return ud.manifest | ||
| 96 | |||
| 97 | def _want_sortable_revision(self, url, ud, d): | ||
| 98 | return False | ||
diff --git a/bitbake/lib/bb/fetch/ssh.py b/bitbake/lib/bb/fetch/ssh.py deleted file mode 100644 index 86c76f4e44..0000000000 --- a/bitbake/lib/bb/fetch/ssh.py +++ /dev/null | |||
| @@ -1,118 +0,0 @@ | |||
| 1 | # ex:ts=4:sw=4:sts=4:et | ||
| 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 3 | ''' | ||
| 4 | BitBake 'Fetch' implementations | ||
| 5 | |||
| 6 | This implementation is for Secure Shell (SSH), and attempts to comply with the | ||
| 7 | IETF secsh internet draft: | ||
| 8 | http://tools.ietf.org/wg/secsh/draft-ietf-secsh-scp-sftp-ssh-uri/ | ||
| 9 | |||
| 10 | Currently does not support the sftp parameters, as this uses scp | ||
| 11 | Also does not support the 'fingerprint' connection parameter. | ||
| 12 | |||
| 13 | ''' | ||
| 14 | |||
| 15 | # Copyright (C) 2006 OpenedHand Ltd. | ||
| 16 | # | ||
| 17 | # | ||
| 18 | # Based in part on svk.py: | ||
| 19 | # Copyright (C) 2006 Holger Hans Peter Freyther | ||
| 20 | # Based on svn.py: | ||
| 21 | # Copyright (C) 2003, 2004 Chris Larson | ||
| 22 | # Based on functions from the base bb module: | ||
| 23 | # Copyright 2003 Holger Schurig | ||
| 24 | # | ||
| 25 | # | ||
| 26 | # This program is free software; you can redistribute it and/or modify | ||
| 27 | # it under the terms of the GNU General Public License version 2 as | ||
| 28 | # published by the Free Software Foundation. | ||
| 29 | # | ||
| 30 | # This program is distributed in the hope that it will be useful, | ||
| 31 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 32 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
| 33 | # GNU General Public License for more details. | ||
| 34 | # | ||
| 35 | # You should have received a copy of the GNU General Public License along | ||
| 36 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
| 37 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
| 38 | |||
| 39 | import re, os | ||
| 40 | from bb import data | ||
| 41 | from bb.fetch import Fetch | ||
| 42 | from bb.fetch import FetchError | ||
| 43 | |||
| 44 | |||
| 45 | __pattern__ = re.compile(r''' | ||
| 46 | \s* # Skip leading whitespace | ||
| 47 | ssh:// # scheme | ||
| 48 | ( # Optional username/password block | ||
| 49 | (?P<user>\S+) # username | ||
| 50 | (:(?P<pass>\S+))? # colon followed by the password (optional) | ||
| 51 | )? | ||
| 52 | (?P<cparam>(;[^;]+)*)? # connection parameters block (optional) | ||
| 53 | @ | ||
| 54 | (?P<host>\S+?) # non-greedy match of the host | ||
| 55 | (:(?P<port>[0-9]+))? # colon followed by the port (optional) | ||
| 56 | / | ||
| 57 | (?P<path>[^;]+) # path on the remote system, may be absolute or relative, | ||
| 58 | # and may include the use of '~' to reference the remote home | ||
| 59 | # directory | ||
| 60 | (?P<sparam>(;[^;]+)*)? # parameters block (optional) | ||
| 61 | $ | ||
| 62 | ''', re.VERBOSE) | ||
| 63 | |||
| 64 | class SSH(Fetch): | ||
| 65 | '''Class to fetch a module or modules via Secure Shell''' | ||
| 66 | |||
| 67 | def supports(self, url, urldata, d): | ||
| 68 | return __pattern__.match(url) != None | ||
| 69 | |||
| 70 | def localpath(self, url, urldata, d): | ||
| 71 | m = __pattern__.match(url) | ||
| 72 | path = m.group('path') | ||
| 73 | host = m.group('host') | ||
| 74 | lpath = os.path.join(data.getVar('DL_DIR', d, True), host, os.path.basename(path)) | ||
| 75 | return lpath | ||
| 76 | |||
| 77 | def go(self, url, urldata, d): | ||
| 78 | dldir = data.getVar('DL_DIR', d, 1) | ||
| 79 | |||
| 80 | m = __pattern__.match(url) | ||
| 81 | path = m.group('path') | ||
| 82 | host = m.group('host') | ||
| 83 | port = m.group('port') | ||
| 84 | user = m.group('user') | ||
| 85 | password = m.group('pass') | ||
| 86 | |||
| 87 | ldir = os.path.join(dldir, host) | ||
| 88 | lpath = os.path.join(ldir, os.path.basename(path)) | ||
| 89 | |||
| 90 | if not os.path.exists(ldir): | ||
| 91 | os.makedirs(ldir) | ||
| 92 | |||
| 93 | if port: | ||
| 94 | port = '-P %s' % port | ||
| 95 | else: | ||
| 96 | port = '' | ||
| 97 | |||
| 98 | if user: | ||
| 99 | fr = user | ||
| 100 | if password: | ||
| 101 | fr += ':%s' % password | ||
| 102 | fr += '@%s' % host | ||
| 103 | else: | ||
| 104 | fr = host | ||
| 105 | fr += ':%s' % path | ||
| 106 | |||
| 107 | |||
| 108 | import commands | ||
| 109 | cmd = 'scp -B -r %s %s %s/' % ( | ||
| 110 | port, | ||
| 111 | commands.mkarg(fr), | ||
| 112 | commands.mkarg(ldir) | ||
| 113 | ) | ||
| 114 | |||
| 115 | (exitstatus, output) = commands.getstatusoutput(cmd) | ||
| 116 | if exitstatus != 0: | ||
| 117 | print(output) | ||
| 118 | raise FetchError('Unable to fetch %s' % url) | ||
diff --git a/bitbake/lib/bb/fetch/svk.py b/bitbake/lib/bb/fetch/svk.py deleted file mode 100644 index dc818d2d67..0000000000 --- a/bitbake/lib/bb/fetch/svk.py +++ /dev/null | |||
| @@ -1,104 +0,0 @@ | |||
| 1 | # ex:ts=4:sw=4:sts=4:et | ||
| 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 3 | """ | ||
| 4 | BitBake 'Fetch' implementations | ||
| 5 | |||
| 6 | This implementation is for svk. It is based on the svn implementation | ||
| 7 | |||
| 8 | """ | ||
| 9 | |||
| 10 | # Copyright (C) 2006 Holger Hans Peter Freyther | ||
| 11 | # Copyright (C) 2003, 2004 Chris Larson | ||
| 12 | # | ||
| 13 | # This program is free software; you can redistribute it and/or modify | ||
| 14 | # it under the terms of the GNU General Public License version 2 as | ||
| 15 | # published by the Free Software Foundation. | ||
| 16 | # | ||
| 17 | # This program is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
| 20 | # GNU General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU General Public License along | ||
| 23 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
| 24 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
| 25 | # | ||
| 26 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
| 27 | |||
| 28 | import os | ||
| 29 | import logging | ||
| 30 | import bb | ||
| 31 | from bb import data | ||
| 32 | from bb.fetch import Fetch | ||
| 33 | from bb.fetch import FetchError | ||
| 34 | from bb.fetch import MissingParameterError | ||
| 35 | from bb.fetch import logger | ||
| 36 | |||
| 37 | class Svk(Fetch): | ||
| 38 | """Class to fetch a module or modules from svk repositories""" | ||
| 39 | def supports(self, url, ud, d): | ||
| 40 | """ | ||
| 41 | Check to see if a given url can be fetched with svk. | ||
| 42 | """ | ||
| 43 | return ud.type in ['svk'] | ||
| 44 | |||
| 45 | def localpath(self, url, ud, d): | ||
| 46 | if not "module" in ud.parm: | ||
| 47 | raise MissingParameterError("svk method needs a 'module' parameter") | ||
| 48 | else: | ||
| 49 | ud.module = ud.parm["module"] | ||
| 50 | |||
| 51 | ud.revision = ud.parm.get('rev', "") | ||
| 52 | |||
| 53 | ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d) | ||
| 54 | |||
| 55 | return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) | ||
| 56 | |||
| 57 | def forcefetch(self, url, ud, d): | ||
| 58 | return ud.date == "now" | ||
| 59 | |||
| 60 | def go(self, loc, ud, d): | ||
| 61 | """Fetch urls""" | ||
| 62 | |||
| 63 | svkroot = ud.host + ud.path | ||
| 64 | |||
| 65 | svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module) | ||
| 66 | |||
| 67 | if ud.revision: | ||
| 68 | svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module) | ||
| 69 | |||
| 70 | # create temp directory | ||
| 71 | localdata = data.createCopy(d) | ||
| 72 | data.update_data(localdata) | ||
| 73 | logger.debug(2, "Fetch: creating temporary directory") | ||
| 74 | bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata)) | ||
| 75 | data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata) | ||
| 76 | tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") | ||
| 77 | tmpfile = tmppipe.readline().strip() | ||
| 78 | if not tmpfile: | ||
| 79 | logger.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") | ||
| 80 | raise FetchError(ud.module) | ||
| 81 | |||
| 82 | # check out sources there | ||
| 83 | os.chdir(tmpfile) | ||
| 84 | logger.info("Fetch " + loc) | ||
| 85 | logger.debug(1, "Running %s", svkcmd) | ||
| 86 | myret = os.system(svkcmd) | ||
| 87 | if myret != 0: | ||
| 88 | try: | ||
| 89 | os.rmdir(tmpfile) | ||
| 90 | except OSError: | ||
| 91 | pass | ||
| 92 | raise FetchError(ud.module) | ||
| 93 | |||
| 94 | os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module))) | ||
| 95 | # tar them up to a defined filename | ||
| 96 | myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module))) | ||
| 97 | if myret != 0: | ||
| 98 | try: | ||
| 99 | os.unlink(ud.localpath) | ||
| 100 | except OSError: | ||
| 101 | pass | ||
| 102 | raise FetchError(ud.module) | ||
| 103 | # cleanup | ||
| 104 | bb.utils.prunedir(tmpfile) | ||
diff --git a/bitbake/lib/bb/fetch/svn.py b/bitbake/lib/bb/fetch/svn.py deleted file mode 100644 index 6c2a1187d2..0000000000 --- a/bitbake/lib/bb/fetch/svn.py +++ /dev/null | |||
| @@ -1,204 +0,0 @@ | |||
| 1 | # ex:ts=4:sw=4:sts=4:et | ||
| 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 3 | """ | ||
| 4 | BitBake 'Fetch' implementation for svn. | ||
| 5 | |||
| 6 | """ | ||
| 7 | |||
| 8 | # Copyright (C) 2003, 2004 Chris Larson | ||
| 9 | # Copyright (C) 2004 Marcin Juszkiewicz | ||
| 10 | # | ||
| 11 | # This program is free software; you can redistribute it and/or modify | ||
| 12 | # it under the terms of the GNU General Public License version 2 as | ||
| 13 | # published by the Free Software Foundation. | ||
| 14 | # | ||
| 15 | # This program is distributed in the hope that it will be useful, | ||
| 16 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 17 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
| 18 | # GNU General Public License for more details. | ||
| 19 | # | ||
| 20 | # You should have received a copy of the GNU General Public License along | ||
| 21 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
| 22 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
| 23 | # | ||
| 24 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
| 25 | |||
| 26 | import os | ||
| 27 | import sys | ||
| 28 | import logging | ||
| 29 | import bb | ||
| 30 | from bb import data | ||
| 31 | from bb.fetch import Fetch | ||
| 32 | from bb.fetch import FetchError | ||
| 33 | from bb.fetch import MissingParameterError | ||
| 34 | from bb.fetch import runfetchcmd | ||
| 35 | from bb.fetch import logger | ||
| 36 | |||
| 37 | class Svn(Fetch): | ||
| 38 | """Class to fetch a module or modules from svn repositories""" | ||
| 39 | def supports(self, url, ud, d): | ||
| 40 | """ | ||
| 41 | Check to see if a given url can be fetched with svn. | ||
| 42 | """ | ||
| 43 | return ud.type in ['svn'] | ||
| 44 | |||
| 45 | def localpath(self, url, ud, d): | ||
| 46 | if not "module" in ud.parm: | ||
| 47 | raise MissingParameterError("svn method needs a 'module' parameter") | ||
| 48 | |||
| 49 | ud.module = ud.parm["module"] | ||
| 50 | |||
| 51 | # Create paths to svn checkouts | ||
| 52 | relpath = self._strip_leading_slashes(ud.path) | ||
| 53 | ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath) | ||
| 54 | ud.moddir = os.path.join(ud.pkgdir, ud.module) | ||
| 55 | |||
| 56 | if 'rev' in ud.parm: | ||
| 57 | ud.date = "" | ||
| 58 | ud.revision = ud.parm['rev'] | ||
| 59 | elif 'date' in ud.date: | ||
| 60 | ud.date = ud.parm['date'] | ||
| 61 | ud.revision = "" | ||
| 62 | else: | ||
| 63 | # | ||
| 64 | # ***Nasty hack*** | ||
| 65 | # If DATE in unexpanded PV, use ud.date (which is set from SRCDATE) | ||
| 66 | # Should warn people to switch to SRCREV here | ||
| 67 | # | ||
| 68 | pv = data.getVar("PV", d, 0) | ||
| 69 | if "DATE" in pv: | ||
| 70 | ud.revision = "" | ||
| 71 | else: | ||
| 72 | rev = Fetch.srcrev_internal_helper(ud, d) | ||
| 73 | if rev is True: | ||
| 74 | ud.revision = self.latest_revision(url, ud, d) | ||
| 75 | ud.date = "" | ||
| 76 | elif rev: | ||
| 77 | ud.revision = rev | ||
| 78 | ud.date = "" | ||
| 79 | else: | ||
| 80 | ud.revision = "" | ||
| 81 | |||
| 82 | ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d) | ||
| 83 | |||
| 84 | return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) | ||
| 85 | |||
| 86 | def _buildsvncommand(self, ud, d, command): | ||
| 87 | """ | ||
| 88 | Build up an svn commandline based on ud | ||
| 89 | command is "fetch", "update", "info" | ||
| 90 | """ | ||
| 91 | |||
| 92 | basecmd = data.expand('${FETCHCMD_svn}', d) | ||
| 93 | |||
| 94 | proto = ud.parm.get('proto', 'svn') | ||
| 95 | |||
| 96 | svn_rsh = None | ||
| 97 | if proto == "svn+ssh" and "rsh" in ud.parm: | ||
| 98 | svn_rsh = ud.parm["rsh"] | ||
| 99 | |||
| 100 | svnroot = ud.host + ud.path | ||
| 101 | |||
| 102 | # either use the revision, or SRCDATE in braces, | ||
| 103 | options = [] | ||
| 104 | |||
| 105 | if ud.user: | ||
| 106 | options.append("--username %s" % ud.user) | ||
| 107 | |||
| 108 | if ud.pswd: | ||
| 109 | options.append("--password %s" % ud.pswd) | ||
| 110 | |||
| 111 | if command is "info": | ||
| 112 | svncmd = "%s info %s %s://%s/%s/" % (basecmd, " ".join(options), proto, svnroot, ud.module) | ||
| 113 | else: | ||
| 114 | suffix = "" | ||
| 115 | if ud.revision: | ||
| 116 | options.append("-r %s" % ud.revision) | ||
| 117 | suffix = "@%s" % (ud.revision) | ||
| 118 | elif ud.date: | ||
| 119 | options.append("-r {%s}" % ud.date) | ||
| 120 | |||
| 121 | if command is "fetch": | ||
| 122 | svncmd = "%s co %s %s://%s/%s%s %s" % (basecmd, " ".join(options), proto, svnroot, ud.module, suffix, ud.module) | ||
| 123 | elif command is "update": | ||
| 124 | svncmd = "%s update %s" % (basecmd, " ".join(options)) | ||
| 125 | else: | ||
| 126 | raise FetchError("Invalid svn command %s" % command) | ||
| 127 | |||
| 128 | if svn_rsh: | ||
| 129 | svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd) | ||
| 130 | |||
| 131 | return svncmd | ||
| 132 | |||
| 133 | def go(self, loc, ud, d): | ||
| 134 | """Fetch url""" | ||
| 135 | |||
| 136 | logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") | ||
| 137 | |||
| 138 | if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK): | ||
| 139 | svnupdatecmd = self._buildsvncommand(ud, d, "update") | ||
| 140 | logger.info("Update " + loc) | ||
| 141 | # update sources there | ||
| 142 | os.chdir(ud.moddir) | ||
| 143 | logger.debug(1, "Running %s", svnupdatecmd) | ||
| 144 | runfetchcmd(svnupdatecmd, d) | ||
| 145 | else: | ||
| 146 | svnfetchcmd = self._buildsvncommand(ud, d, "fetch") | ||
| 147 | logger.info("Fetch " + loc) | ||
| 148 | # check out sources there | ||
| 149 | bb.utils.mkdirhier(ud.pkgdir) | ||
| 150 | os.chdir(ud.pkgdir) | ||
| 151 | logger.debug(1, "Running %s", svnfetchcmd) | ||
| 152 | runfetchcmd(svnfetchcmd, d) | ||
| 153 | |||
| 154 | scmdata = ud.parm.get("scmdata", "") | ||
| 155 | if scmdata == "keep": | ||
| 156 | tar_flags = "" | ||
| 157 | else: | ||
| 158 | tar_flags = "--exclude '.svn'" | ||
| 159 | |||
| 160 | os.chdir(ud.pkgdir) | ||
| 161 | # tar them up to a defined filename | ||
| 162 | try: | ||
| 163 | runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d) | ||
| 164 | except: | ||
| 165 | t, v, tb = sys.exc_info() | ||
| 166 | try: | ||
| 167 | os.unlink(ud.localpath) | ||
| 168 | except OSError: | ||
| 169 | pass | ||
| 170 | raise t, v, tb | ||
| 171 | |||
| 172 | def supports_srcrev(self): | ||
| 173 | return True | ||
| 174 | |||
| 175 | def _revision_key(self, url, ud, d): | ||
| 176 | """ | ||
| 177 | Return a unique key for the url | ||
| 178 | """ | ||
| 179 | return "svn:" + ud.moddir | ||
| 180 | |||
| 181 | def _latest_revision(self, url, ud, d): | ||
| 182 | """ | ||
| 183 | Return the latest upstream revision number | ||
| 184 | """ | ||
| 185 | logger.debug(2, "SVN fetcher hitting network for %s", url) | ||
| 186 | |||
| 187 | output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "info"), d, True) | ||
| 188 | |||
| 189 | revision = None | ||
| 190 | for line in output.splitlines(): | ||
| 191 | if "Last Changed Rev" in line: | ||
| 192 | revision = line.split(":")[1].strip() | ||
| 193 | |||
| 194 | return revision | ||
| 195 | |||
| 196 | def _sortable_revision(self, url, ud, d): | ||
| 197 | """ | ||
| 198 | Return a sortable revision number which in our case is the revision number | ||
| 199 | """ | ||
| 200 | |||
| 201 | return self._build_revision(url, ud, d) | ||
| 202 | |||
| 203 | def _build_revision(self, url, ud, d): | ||
| 204 | return ud.revision | ||
diff --git a/bitbake/lib/bb/fetch/wget.py b/bitbake/lib/bb/fetch/wget.py deleted file mode 100644 index 4d4bdfd493..0000000000 --- a/bitbake/lib/bb/fetch/wget.py +++ /dev/null | |||
| @@ -1,93 +0,0 @@ | |||
| 1 | # ex:ts=4:sw=4:sts=4:et | ||
| 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 3 | """ | ||
| 4 | BitBake 'Fetch' implementations | ||
| 5 | |||
| 6 | Classes for obtaining upstream sources for the | ||
| 7 | BitBake build tools. | ||
| 8 | |||
| 9 | """ | ||
| 10 | |||
| 11 | # Copyright (C) 2003, 2004 Chris Larson | ||
| 12 | # | ||
| 13 | # This program is free software; you can redistribute it and/or modify | ||
| 14 | # it under the terms of the GNU General Public License version 2 as | ||
| 15 | # published by the Free Software Foundation. | ||
| 16 | # | ||
| 17 | # This program is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
| 20 | # GNU General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU General Public License along | ||
| 23 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
| 24 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
| 25 | # | ||
| 26 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
| 27 | |||
| 28 | import os | ||
| 29 | import logging | ||
| 30 | import bb | ||
| 31 | import urllib | ||
| 32 | from bb import data | ||
| 33 | from bb.fetch import Fetch, FetchError, encodeurl, decodeurl, logger, runfetchcmd | ||
| 34 | |||
| 35 | class Wget(Fetch): | ||
| 36 | """Class to fetch urls via 'wget'""" | ||
| 37 | def supports(self, url, ud, d): | ||
| 38 | """ | ||
| 39 | Check to see if a given url can be fetched with wget. | ||
| 40 | """ | ||
| 41 | return ud.type in ['http', 'https', 'ftp'] | ||
| 42 | |||
| 43 | def localpath(self, url, ud, d): | ||
| 44 | |||
| 45 | url = encodeurl([ud.type, ud.host, ud.path, ud.user, ud.pswd, {}]) | ||
| 46 | ud.basename = os.path.basename(ud.path) | ||
| 47 | ud.localfile = data.expand(urllib.unquote(ud.basename), d) | ||
| 48 | |||
| 49 | return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) | ||
| 50 | |||
| 51 | def go(self, uri, ud, d, checkonly = False): | ||
| 52 | """Fetch urls""" | ||
| 53 | |||
| 54 | def fetch_uri(uri, ud, d): | ||
| 55 | if checkonly: | ||
| 56 | fetchcmd = data.getVar("CHECKCOMMAND", d, 1) | ||
| 57 | elif os.path.exists(ud.localpath): | ||
| 58 | # file exists, but we didnt complete it.. trying again.. | ||
| 59 | fetchcmd = data.getVar("RESUMECOMMAND", d, 1) | ||
| 60 | else: | ||
| 61 | fetchcmd = data.getVar("FETCHCOMMAND", d, 1) | ||
| 62 | |||
| 63 | uri = uri.split(";")[0] | ||
| 64 | uri_decoded = list(decodeurl(uri)) | ||
| 65 | uri_type = uri_decoded[0] | ||
| 66 | uri_host = uri_decoded[1] | ||
| 67 | |||
| 68 | fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0]) | ||
| 69 | fetchcmd = fetchcmd.replace("${FILE}", ud.basename) | ||
| 70 | logger.info("fetch " + uri) | ||
| 71 | logger.debug(2, "executing " + fetchcmd) | ||
| 72 | runfetchcmd(fetchcmd, d) | ||
| 73 | |||
| 74 | # Sanity check since wget can pretend it succeed when it didn't | ||
| 75 | # Also, this used to happen if sourceforge sent us to the mirror page | ||
| 76 | if not os.path.exists(ud.localpath) and not checkonly: | ||
| 77 | logger.debug(2, "The fetch command for %s returned success but %s doesn't exist?...", uri, ud.localpath) | ||
| 78 | return False | ||
| 79 | |||
| 80 | return True | ||
| 81 | |||
| 82 | localdata = data.createCopy(d) | ||
| 83 | data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata) | ||
| 84 | data.update_data(localdata) | ||
| 85 | |||
| 86 | if fetch_uri(uri, ud, localdata): | ||
| 87 | return True | ||
| 88 | |||
| 89 | raise FetchError(uri) | ||
| 90 | |||
| 91 | |||
| 92 | def checkstatus(self, uri, ud, d): | ||
| 93 | return self.go(uri, ud, d, True) | ||
