diff options
83 files changed, 3021 insertions, 278 deletions
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py index f8577c6794..e1ddbd53ff 100644 --- a/bitbake/lib/bb/__init__.py +++ b/bitbake/lib/bb/__init__.py | |||
| @@ -35,6 +35,11 @@ class NullHandler(logging.Handler): | |||
| 35 | def emit(self, record): | 35 | def emit(self, record): |
| 36 | pass | 36 | pass |
| 37 | 37 | ||
| 38 | class BBLogRecord(logging.LogRecord): | ||
| 39 | def __init__(self, name, level, fn, lno, msg, args, exc_info, func, extra): | ||
| 40 | self.taskpid = bb.event.worker_pid | ||
| 41 | logging.LogRecord.__init__(self, name, level, fn, lno, msg, args, exc_info, func) | ||
| 42 | |||
| 38 | Logger = logging.getLoggerClass() | 43 | Logger = logging.getLoggerClass() |
| 39 | class BBLogger(Logger): | 44 | class BBLogger(Logger): |
| 40 | def __init__(self, name): | 45 | def __init__(self, name): |
| @@ -42,6 +47,9 @@ class BBLogger(Logger): | |||
| 42 | self.debug = self.bbdebug | 47 | self.debug = self.bbdebug |
| 43 | Logger.__init__(self, name) | 48 | Logger.__init__(self, name) |
| 44 | 49 | ||
| 50 | def makeRecord(self, name, lvl, fn, lno, msg, args, exc_info, func=None, extra=None): | ||
| 51 | return BBLogRecord(name, lvl, fn, lno, msg, args, exc_info, func, extra) | ||
| 52 | |||
| 45 | def bbdebug(self, level, msg, *args, **kwargs): | 53 | def bbdebug(self, level, msg, *args, **kwargs): |
| 46 | return self.log(logging.DEBUG - level - 1, msg, *args, **kwargs) | 54 | return self.log(logging.DEBUG - level - 1, msg, *args, **kwargs) |
| 47 | 55 | ||
| @@ -70,6 +78,9 @@ if "BBDEBUG" in os.environ: | |||
| 70 | if level: | 78 | if level: |
| 71 | bb.msg.set_debug_level(level) | 79 | bb.msg.set_debug_level(level) |
| 72 | 80 | ||
| 81 | if True or os.environ.get("BBFETCH2"): | ||
| 82 | from bb import fetch2 as fetch | ||
| 83 | sys.modules['bb.fetch'] = sys.modules['bb.fetch2'] | ||
| 73 | 84 | ||
| 74 | # Messaging convenience functions | 85 | # Messaging convenience functions |
| 75 | def plain(*args): | 86 | def plain(*args): |
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py index 968e2ea562..f127796c07 100644 --- a/bitbake/lib/bb/build.py +++ b/bitbake/lib/bb/build.py | |||
| @@ -416,7 +416,7 @@ def del_stamp(task, d, file_name = None): | |||
| 416 | Removes a stamp for a given task | 416 | Removes a stamp for a given task |
| 417 | (d can be a data dict or dataCache) | 417 | (d can be a data dict or dataCache) |
| 418 | """ | 418 | """ |
| 419 | stamp_internal(task, d, file_name) | 419 | stamp = stamp_internal(task, d, file_name) |
| 420 | if os.access(stamp, os.F_OK): | 420 | if os.access(stamp, os.F_OK): |
| 421 | os.remove(stamp) | 421 | os.remove(stamp) |
| 422 | 422 | ||
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py index c477501d68..9a2e2d5298 100644 --- a/bitbake/lib/bb/cache.py +++ b/bitbake/lib/bb/cache.py | |||
| @@ -106,7 +106,19 @@ class RecipeInfo(namedtuple('RecipeInfo', recipe_fields)): | |||
| 106 | return metadata.getVar(var, True) or '' | 106 | return metadata.getVar(var, True) or '' |
| 107 | 107 | ||
| 108 | @classmethod | 108 | @classmethod |
| 109 | def make_optional(cls, default=None, **kwargs): | ||
| 110 | """Construct the namedtuple from the specified keyword arguments, | ||
| 111 | with every value considered optional, using the default value if | ||
| 112 | it was not specified.""" | ||
| 113 | for field in cls._fields: | ||
| 114 | kwargs[field] = kwargs.get(field, default) | ||
| 115 | return cls(**kwargs) | ||
| 116 | |||
| 117 | @classmethod | ||
| 109 | def from_metadata(cls, filename, metadata): | 118 | def from_metadata(cls, filename, metadata): |
| 119 | if cls.getvar('__SKIPPED', metadata): | ||
| 120 | return cls.make_optional(skipped=True) | ||
| 121 | |||
| 110 | tasks = metadata.getVar('__BBTASKS', False) | 122 | tasks = metadata.getVar('__BBTASKS', False) |
| 111 | 123 | ||
| 112 | pn = cls.getvar('PN', metadata) | 124 | pn = cls.getvar('PN', metadata) |
| @@ -124,7 +136,7 @@ class RecipeInfo(namedtuple('RecipeInfo', recipe_fields)): | |||
| 124 | {'tasks': [], 'parents': {}}, | 136 | {'tasks': [], 'parents': {}}, |
| 125 | variants = cls.listvar('__VARIANTS', metadata) + [''], | 137 | variants = cls.listvar('__VARIANTS', metadata) + [''], |
| 126 | 138 | ||
| 127 | skipped = cls.getvar('__SKIPPED', metadata), | 139 | skipped = False, |
| 128 | timestamp = bb.parse.cached_mtime(filename), | 140 | timestamp = bb.parse.cached_mtime(filename), |
| 129 | packages = cls.listvar('PACKAGES', metadata), | 141 | packages = cls.listvar('PACKAGES', metadata), |
| 130 | pn = pn, | 142 | pn = pn, |
diff --git a/bitbake/lib/bb/codeparser.py b/bitbake/lib/bb/codeparser.py index 1d3557cd6d..06409319e1 100644 --- a/bitbake/lib/bb/codeparser.py +++ b/bitbake/lib/bb/codeparser.py | |||
| @@ -4,7 +4,8 @@ import logging | |||
| 4 | import os.path | 4 | import os.path |
| 5 | import bb.utils, bb.data | 5 | import bb.utils, bb.data |
| 6 | from itertools import chain | 6 | from itertools import chain |
| 7 | from bb.pysh import pyshyacc, pyshlex | 7 | from pysh import pyshyacc, pyshlex |
| 8 | |||
| 8 | 9 | ||
| 9 | logger = logging.getLogger('BitBake.CodeParser') | 10 | logger = logging.getLogger('BitBake.CodeParser') |
| 10 | PARSERCACHE_VERSION = 2 | 11 | PARSERCACHE_VERSION = 2 |
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py index 23fd72f432..e524db7498 100644 --- a/bitbake/lib/bb/cooker.py +++ b/bitbake/lib/bb/cooker.py | |||
| @@ -893,6 +893,11 @@ class BBCooker: | |||
| 893 | def post_serve(self): | 893 | def post_serve(self): |
| 894 | bb.event.fire(CookerExit(), self.configuration.event_data) | 894 | bb.event.fire(CookerExit(), self.configuration.event_data) |
| 895 | 895 | ||
| 896 | def shutdown(self): | ||
| 897 | self.state = state.shutdown | ||
| 898 | |||
| 899 | def stop(self): | ||
| 900 | self.state = state.stop | ||
| 896 | 901 | ||
| 897 | def server_main(cooker, func, *args): | 902 | def server_main(cooker, func, *args): |
| 898 | cooker.pre_serve() | 903 | cooker.pre_serve() |
| @@ -935,12 +940,6 @@ def server_main(cooker, func, *args): | |||
| 935 | 940 | ||
| 936 | return ret | 941 | return ret |
| 937 | 942 | ||
| 938 | def shutdown(self): | ||
| 939 | self.state = state.shutdown | ||
| 940 | |||
| 941 | def stop(self): | ||
| 942 | self.state = state.stop | ||
| 943 | |||
| 944 | class CookerExit(bb.event.Event): | 943 | class CookerExit(bb.event.Event): |
| 945 | """ | 944 | """ |
| 946 | Notify clients of the Cooker shutdown | 945 | Notify clients of the Cooker shutdown |
diff --git a/bitbake/lib/bb/data.py b/bitbake/lib/bb/data.py index 6ec522aa48..198b628fad 100644 --- a/bitbake/lib/bb/data.py +++ b/bitbake/lib/bb/data.py | |||
| @@ -161,7 +161,7 @@ def expandKeys(alterdata, readdata = None): | |||
| 161 | 161 | ||
| 162 | def inheritFromOS(d): | 162 | def inheritFromOS(d): |
| 163 | """Inherit variables from the environment.""" | 163 | """Inherit variables from the environment.""" |
| 164 | exportlist = bb.utils.preserved_envvars_export_list() | 164 | exportlist = bb.utils.preserved_envvars_exported() |
| 165 | for s in os.environ.keys(): | 165 | for s in os.environ.keys(): |
| 166 | try: | 166 | try: |
| 167 | setVar(s, os.environ[s], d) | 167 | setVar(s, os.environ[s], d) |
| @@ -192,7 +192,8 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False): | |||
| 192 | return 0 | 192 | return 0 |
| 193 | 193 | ||
| 194 | if all: | 194 | if all: |
| 195 | o.write('# %s=%s\n' % (var, oval)) | 195 | commentVal = re.sub('\n', '\n#', str(oval)) |
| 196 | o.write('# %s=%s\n' % (var, commentVal)) | ||
| 196 | 197 | ||
| 197 | if (var.find("-") != -1 or var.find(".") != -1 or var.find('{') != -1 or var.find('}') != -1 or var.find('+') != -1) and not all: | 198 | if (var.find("-") != -1 or var.find(".") != -1 or var.find('{') != -1 or var.find('}') != -1 or var.find('+') != -1) and not all: |
| 198 | return 0 | 199 | return 0 |
| @@ -219,6 +220,7 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False): | |||
| 219 | # if we're going to output this within doublequotes, | 220 | # if we're going to output this within doublequotes, |
| 220 | # to a shell, we need to escape the quotes in the var | 221 | # to a shell, we need to escape the quotes in the var |
| 221 | alter = re.sub('"', '\\"', val.strip()) | 222 | alter = re.sub('"', '\\"', val.strip()) |
| 223 | alter = re.sub('\n', ' \\\n', alter) | ||
| 222 | o.write('%s="%s"\n' % (varExpanded, alter)) | 224 | o.write('%s="%s"\n' % (varExpanded, alter)) |
| 223 | return 0 | 225 | return 0 |
| 224 | 226 | ||
diff --git a/bitbake/lib/bb/event.py b/bitbake/lib/bb/event.py index 8b45501c00..450d913633 100644 --- a/bitbake/lib/bb/event.py +++ b/bitbake/lib/bb/event.py | |||
| @@ -129,13 +129,11 @@ def fire(event, d): | |||
| 129 | def worker_fire(event, d): | 129 | def worker_fire(event, d): |
| 130 | data = "<event>" + pickle.dumps(event) + "</event>" | 130 | data = "<event>" + pickle.dumps(event) + "</event>" |
| 131 | worker_pipe.write(data) | 131 | worker_pipe.write(data) |
| 132 | worker_pipe.flush() | ||
| 133 | 132 | ||
| 134 | def fire_from_worker(event, d): | 133 | def fire_from_worker(event, d): |
| 135 | if not event.startswith("<event>") or not event.endswith("</event>"): | 134 | if not event.startswith("<event>") or not event.endswith("</event>"): |
| 136 | print("Error, not an event %s" % event) | 135 | print("Error, not an event %s" % event) |
| 137 | return | 136 | return |
| 138 | #print "Got event %s" % event | ||
| 139 | event = pickle.loads(event[7:-8]) | 137 | event = pickle.loads(event[7:-8]) |
| 140 | fire_ui_handlers(event, d) | 138 | fire_ui_handlers(event, d) |
| 141 | 139 | ||
diff --git a/bitbake/lib/bb/fetch/__init__.py b/bitbake/lib/bb/fetch/__init__.py index 67e5addfe0..07eb77dbfc 100644 --- a/bitbake/lib/bb/fetch/__init__.py +++ b/bitbake/lib/bb/fetch/__init__.py | |||
| @@ -243,17 +243,20 @@ def verify_checksum(u, ud, d): | |||
| 243 | sha256data = bb.utils.sha256_file(ud.localpath) | 243 | sha256data = bb.utils.sha256_file(ud.localpath) |
| 244 | 244 | ||
| 245 | if (ud.md5_expected == None or ud.sha256_expected == None): | 245 | if (ud.md5_expected == None or ud.sha256_expected == None): |
| 246 | bb.warn("Missing SRC_URI checksum for %s, consider to add\n" \ | 246 | logger.warn('Missing SRC_URI checksum for %s, consider adding to the recipe:\n' |
| 247 | "SRC_URI[%s] = \"%s\"\nSRC_URI[%s] = \"%s\"" \ | 247 | 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"', |
| 248 | % (ud.localpath, ud.md5_name, md5data, ud.sha256_name, sha256data)) | 248 | ud.localpath, ud.md5_name, md5data, |
| 249 | ud.sha256_name, sha256data) | ||
| 249 | if bb.data.getVar("BB_STRICT_CHECKSUM", d, True) == "1": | 250 | if bb.data.getVar("BB_STRICT_CHECKSUM", d, True) == "1": |
| 250 | raise FetchError("No checksum specified for %s." % u) | 251 | raise FetchError("No checksum specified for %s." % u) |
| 251 | return | 252 | return |
| 252 | 253 | ||
| 253 | if (ud.md5_expected != md5data or ud.sha256_expected != sha256data): | 254 | if (ud.md5_expected != md5data or ud.sha256_expected != sha256data): |
| 254 | bb.error("The checksums for '%s' did not match." % ud.localpath) | 255 | logger.error('The checksums for "%s" did not match.\n' |
| 255 | bb.error("Expected MD5: '%s' and Got: '%s'" % (ud.md5_expected, md5data)) | 256 | ' MD5: expected "%s", got "%s"\n' |
| 256 | bb.error("Expected SHA256: '%s' and Got: '%s'" % (ud.sha256_expected, sha256data)) | 257 | ' SHA256: expected "%s", got "%s"\n', |
| 258 | ud.localpath, ud.md5_expected, md5data, | ||
| 259 | ud.sha256_expected, sha256data) | ||
| 257 | raise FetchError("%s checksum mismatch." % u) | 260 | raise FetchError("%s checksum mismatch." % u) |
| 258 | 261 | ||
| 259 | def go(d, urls = None): | 262 | def go(d, urls = None): |
| @@ -326,7 +329,7 @@ def checkstatus(d, urls = None): | |||
| 326 | for u in urls: | 329 | for u in urls: |
| 327 | ud = urldata[u] | 330 | ud = urldata[u] |
| 328 | m = ud.method | 331 | m = ud.method |
| 329 | logger.debug(1, "Testing URL %s" % u) | 332 | logger.debug(1, "Testing URL %s", u) |
| 330 | # First try checking uri, u, from PREMIRRORS | 333 | # First try checking uri, u, from PREMIRRORS |
| 331 | mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True)) | 334 | mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True)) |
| 332 | ret = try_mirrors(d, u, mirrors, True) | 335 | ret = try_mirrors(d, u, mirrors, True) |
| @@ -357,6 +360,9 @@ def localpaths(d): | |||
| 357 | 360 | ||
| 358 | srcrev_internal_call = False | 361 | srcrev_internal_call = False |
| 359 | 362 | ||
| 363 | def get_autorev(d): | ||
| 364 | return get_srcrev(d) | ||
| 365 | |||
| 360 | def get_srcrev(d): | 366 | def get_srcrev(d): |
| 361 | """ | 367 | """ |
| 362 | Return the version string for the current package | 368 | Return the version string for the current package |
| @@ -482,7 +488,7 @@ def try_mirrors(d, uri, mirrors, check = False, force = False): | |||
| 482 | """ | 488 | """ |
| 483 | fpath = os.path.join(data.getVar("DL_DIR", d, 1), os.path.basename(uri)) | 489 | fpath = os.path.join(data.getVar("DL_DIR", d, 1), os.path.basename(uri)) |
| 484 | if not check and os.access(fpath, os.R_OK) and not force: | 490 | if not check and os.access(fpath, os.R_OK) and not force: |
| 485 | logger.debug(1, "%s already exists, skipping checkout." % fpath) | 491 | logger.debug(1, "%s already exists, skipping checkout.", fpath) |
| 486 | return fpath | 492 | return fpath |
| 487 | 493 | ||
| 488 | ld = d.createCopy() | 494 | ld = d.createCopy() |
| @@ -510,7 +516,7 @@ def try_mirrors(d, uri, mirrors, check = False, force = False): | |||
| 510 | bb.fetch.MD5SumError): | 516 | bb.fetch.MD5SumError): |
| 511 | import sys | 517 | import sys |
| 512 | (type, value, traceback) = sys.exc_info() | 518 | (type, value, traceback) = sys.exc_info() |
| 513 | logger.debug(2, "Mirror fetch failure: %s" % value) | 519 | logger.debug(2, "Mirror fetch failure: %s", value) |
| 514 | removefile(ud.localpath) | 520 | removefile(ud.localpath) |
| 515 | continue | 521 | continue |
| 516 | return None | 522 | return None |
| @@ -694,7 +700,7 @@ class Fetch(object): | |||
| 694 | if not rev: | 700 | if not rev: |
| 695 | rev = data.getVar("SRCREV_pn-%s_%s" % (pn, ud.parm['name']), d, 1) | 701 | rev = data.getVar("SRCREV_pn-%s_%s" % (pn, ud.parm['name']), d, 1) |
| 696 | if not rev: | 702 | if not rev: |
| 697 | rev = data.getVar("SRCREV_%s" % (ud.parm['name']), d, 1) | 703 | rev = data.getVar("SRCREV_%s" % (ud.parm['name']), d, 1) |
| 698 | if not rev: | 704 | if not rev: |
| 699 | rev = data.getVar("SRCREV", d, 1) | 705 | rev = data.getVar("SRCREV", d, 1) |
| 700 | if rev == "INVALID": | 706 | if rev == "INVALID": |
diff --git a/bitbake/lib/bb/fetch/git.py b/bitbake/lib/bb/fetch/git.py index de415ec309..b37a09743e 100644 --- a/bitbake/lib/bb/fetch/git.py +++ b/bitbake/lib/bb/fetch/git.py | |||
| @@ -22,6 +22,7 @@ BitBake 'Fetch' git implementation | |||
| 22 | 22 | ||
| 23 | import os | 23 | import os |
| 24 | import bb | 24 | import bb |
| 25 | import bb.persist_data | ||
| 25 | from bb import data | 26 | from bb import data |
| 26 | from bb.fetch import Fetch | 27 | from bb.fetch import Fetch |
| 27 | from bb.fetch import runfetchcmd | 28 | from bb.fetch import runfetchcmd |
| @@ -117,6 +118,7 @@ class Git(Fetch): | |||
| 117 | 118 | ||
| 118 | repofile = os.path.join(data.getVar("DL_DIR", d, 1), ud.mirrortarball) | 119 | repofile = os.path.join(data.getVar("DL_DIR", d, 1), ud.mirrortarball) |
| 119 | 120 | ||
| 121 | |||
| 120 | coname = '%s' % (ud.tag) | 122 | coname = '%s' % (ud.tag) |
| 121 | codir = os.path.join(ud.clonedir, coname) | 123 | codir = os.path.join(ud.clonedir, coname) |
| 122 | 124 | ||
| @@ -206,11 +208,19 @@ class Git(Fetch): | |||
| 206 | output = runfetchcmd("%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (basecmd, tag), d, quiet=True) | 208 | output = runfetchcmd("%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (basecmd, tag), d, quiet=True) |
| 207 | return output.split()[0] != "0" | 209 | return output.split()[0] != "0" |
| 208 | 210 | ||
| 209 | def _revision_key(self, url, ud, d): | 211 | def _revision_key(self, url, ud, d, branch=False): |
| 210 | """ | 212 | """ |
| 211 | Return a unique key for the url | 213 | Return a unique key for the url |
| 212 | """ | 214 | """ |
| 213 | return "git:" + ud.host + ud.path.replace('/', '.') + ud.branch | 215 | key = 'git:' + ud.host + ud.path.replace('/', '.') |
| 216 | if branch: | ||
| 217 | return key + ud.branch | ||
| 218 | else: | ||
| 219 | return key | ||
| 220 | |||
| 221 | def generate_revision_key(self, url, ud, d, branch=False): | ||
| 222 | key = self._revision_key(url, ud, d, branch) | ||
| 223 | return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "") | ||
| 214 | 224 | ||
| 215 | def _latest_revision(self, url, ud, d): | 225 | def _latest_revision(self, url, ud, d): |
| 216 | """ | 226 | """ |
| @@ -228,6 +238,74 @@ class Git(Fetch): | |||
| 228 | raise bb.fetch.FetchError("Fetch command %s gave empty output\n" % (cmd)) | 238 | raise bb.fetch.FetchError("Fetch command %s gave empty output\n" % (cmd)) |
| 229 | return output.split()[0] | 239 | return output.split()[0] |
| 230 | 240 | ||
| 241 | def latest_revision(self, url, ud, d): | ||
| 242 | """ | ||
| 243 | Look in the cache for the latest revision, if not present ask the SCM. | ||
| 244 | """ | ||
| 245 | persisted = bb.persist_data.persist(d) | ||
| 246 | revs = persisted['BB_URI_HEADREVS'] | ||
| 247 | |||
| 248 | key = self.generate_revision_key(url, ud, d, branch=True) | ||
| 249 | rev = revs[key] | ||
| 250 | if rev is None: | ||
| 251 | # Compatibility with old key format, no branch included | ||
| 252 | oldkey = self.generate_revision_key(url, ud, d, branch=False) | ||
| 253 | rev = revs[oldkey] | ||
| 254 | if rev is not None: | ||
| 255 | del revs[oldkey] | ||
| 256 | else: | ||
| 257 | rev = self._latest_revision(url, ud, d) | ||
| 258 | revs[key] = rev | ||
| 259 | |||
| 260 | return str(rev) | ||
| 261 | |||
| 262 | def sortable_revision(self, url, ud, d): | ||
| 263 | """ | ||
| 264 | |||
| 265 | """ | ||
| 266 | pd = bb.persist_data.persist(d) | ||
| 267 | localcounts = pd['BB_URI_LOCALCOUNT'] | ||
| 268 | key = self.generate_revision_key(url, ud, d, branch=True) | ||
| 269 | oldkey = self.generate_revision_key(url, ud, d, branch=False) | ||
| 270 | |||
| 271 | latest_rev = self._build_revision(url, ud, d) | ||
| 272 | last_rev = localcounts[key + '_rev'] | ||
| 273 | if last_rev is None: | ||
| 274 | last_rev = localcounts[oldkey + '_rev'] | ||
| 275 | if last_rev is not None: | ||
| 276 | del localcounts[oldkey + '_rev'] | ||
| 277 | localcounts[key + '_rev'] = last_rev | ||
| 278 | |||
| 279 | uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False | ||
| 280 | count = None | ||
| 281 | if uselocalcount: | ||
| 282 | count = Fetch.localcount_internal_helper(ud, d) | ||
| 283 | if count is None: | ||
| 284 | count = localcounts[key + '_count'] | ||
| 285 | if count is None: | ||
| 286 | count = localcounts[oldkey + '_count'] | ||
| 287 | if count is not None: | ||
| 288 | del localcounts[oldkey + '_count'] | ||
| 289 | localcounts[key + '_count'] = count | ||
| 290 | |||
| 291 | if last_rev == latest_rev: | ||
| 292 | return str(count + "+" + latest_rev) | ||
| 293 | |||
| 294 | buildindex_provided = hasattr(self, "_sortable_buildindex") | ||
| 295 | if buildindex_provided: | ||
| 296 | count = self._sortable_buildindex(url, ud, d, latest_rev) | ||
| 297 | if count is None: | ||
| 298 | count = "0" | ||
| 299 | elif uselocalcount or buildindex_provided: | ||
| 300 | count = str(count) | ||
| 301 | else: | ||
| 302 | count = str(int(count) + 1) | ||
| 303 | |||
| 304 | localcounts[key + '_rev'] = latest_rev | ||
| 305 | localcounts[key + '_count'] = count | ||
| 306 | |||
| 307 | return str(count + "+" + latest_rev) | ||
| 308 | |||
| 231 | def _build_revision(self, url, ud, d): | 309 | def _build_revision(self, url, ud, d): |
| 232 | return ud.tag | 310 | return ud.tag |
| 233 | 311 | ||
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py new file mode 100644 index 0000000000..751e514121 --- /dev/null +++ b/bitbake/lib/bb/fetch2/__init__.py | |||
| @@ -0,0 +1,824 @@ | |||
| 1 | # ex:ts=4:sw=4:sts=4:et | ||
| 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 3 | """ | ||
| 4 | BitBake 'Fetch' implementations | ||
| 5 | |||
| 6 | Classes for obtaining upstream sources for the | ||
| 7 | BitBake build tools. | ||
| 8 | """ | ||
| 9 | |||
| 10 | # Copyright (C) 2003, 2004 Chris Larson | ||
| 11 | # | ||
| 12 | # This program is free software; you can redistribute it and/or modify | ||
| 13 | # it under the terms of the GNU General Public License version 2 as | ||
| 14 | # published by the Free Software Foundation. | ||
| 15 | # | ||
| 16 | # This program is distributed in the hope that it will be useful, | ||
| 17 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 18 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
| 19 | # GNU General Public License for more details. | ||
| 20 | # | ||
| 21 | # You should have received a copy of the GNU General Public License along | ||
| 22 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
| 23 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
| 24 | # | ||
| 25 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
| 26 | |||
| 27 | from __future__ import absolute_import | ||
| 28 | from __future__ import print_function | ||
| 29 | import os, re | ||
| 30 | import logging | ||
| 31 | import bb | ||
| 32 | from bb import data | ||
| 33 | from bb import persist_data | ||
| 34 | |||
| 35 | logger = logging.getLogger("BitBake.Fetch") | ||
| 36 | |||
| 37 | class MalformedUrl(Exception): | ||
| 38 | """Exception raised when encountering an invalid url""" | ||
| 39 | |||
| 40 | class FetchError(Exception): | ||
| 41 | """Exception raised when a download fails""" | ||
| 42 | |||
| 43 | class NoMethodError(Exception): | ||
| 44 | """Exception raised when there is no method to obtain a supplied url or set of urls""" | ||
| 45 | |||
| 46 | class MissingParameterError(Exception): | ||
| 47 | """Exception raised when a fetch method is missing a critical parameter in the url""" | ||
| 48 | |||
| 49 | class ParameterError(Exception): | ||
| 50 | """Exception raised when a url cannot be proccessed due to invalid parameters.""" | ||
| 51 | |||
| 52 | class MD5SumError(Exception): | ||
| 53 | """Exception raised when a MD5SUM of a file does not match the expected one""" | ||
| 54 | |||
| 55 | class InvalidSRCREV(Exception): | ||
| 56 | """Exception raised when an invalid SRCREV is encountered""" | ||
| 57 | |||
| 58 | def decodeurl(url): | ||
| 59 | """Decodes an URL into the tokens (scheme, network location, path, | ||
| 60 | user, password, parameters). | ||
| 61 | """ | ||
| 62 | |||
| 63 | m = re.compile('(?P<type>[^:]*)://((?P<user>.+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url) | ||
| 64 | if not m: | ||
| 65 | raise MalformedUrl(url) | ||
| 66 | |||
| 67 | type = m.group('type') | ||
| 68 | location = m.group('location') | ||
| 69 | if not location: | ||
| 70 | raise MalformedUrl(url) | ||
| 71 | user = m.group('user') | ||
| 72 | parm = m.group('parm') | ||
| 73 | |||
| 74 | locidx = location.find('/') | ||
| 75 | if locidx != -1 and type.lower() != 'file': | ||
| 76 | host = location[:locidx] | ||
| 77 | path = location[locidx:] | ||
| 78 | else: | ||
| 79 | host = "" | ||
| 80 | path = location | ||
| 81 | if user: | ||
| 82 | m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user) | ||
| 83 | if m: | ||
| 84 | user = m.group('user') | ||
| 85 | pswd = m.group('pswd') | ||
| 86 | else: | ||
| 87 | user = '' | ||
| 88 | pswd = '' | ||
| 89 | |||
| 90 | p = {} | ||
| 91 | if parm: | ||
| 92 | for s in parm.split(';'): | ||
| 93 | s1, s2 = s.split('=') | ||
| 94 | p[s1] = s2 | ||
| 95 | |||
| 96 | return (type, host, path, user, pswd, p) | ||
| 97 | |||
| 98 | def encodeurl(decoded): | ||
| 99 | """Encodes a URL from tokens (scheme, network location, path, | ||
| 100 | user, password, parameters). | ||
| 101 | """ | ||
| 102 | |||
| 103 | (type, host, path, user, pswd, p) = decoded | ||
| 104 | |||
| 105 | if not type or not path: | ||
| 106 | raise MissingParameterError("Type or path url components missing when encoding %s" % decoded) | ||
| 107 | url = '%s://' % type | ||
| 108 | if user: | ||
| 109 | url += "%s" % user | ||
| 110 | if pswd: | ||
| 111 | url += ":%s" % pswd | ||
| 112 | url += "@" | ||
| 113 | if host: | ||
| 114 | url += "%s" % host | ||
| 115 | url += "%s" % path | ||
| 116 | if p: | ||
| 117 | for parm in p: | ||
| 118 | url += ";%s=%s" % (parm, p[parm]) | ||
| 119 | |||
| 120 | return url | ||
| 121 | |||
| 122 | def uri_replace(uri, uri_find, uri_replace, d): | ||
| 123 | if not uri or not uri_find or not uri_replace: | ||
| 124 | logger.debug(1, "uri_replace: passed an undefined value, not replacing") | ||
| 125 | uri_decoded = list(decodeurl(uri)) | ||
| 126 | uri_find_decoded = list(decodeurl(uri_find)) | ||
| 127 | uri_replace_decoded = list(decodeurl(uri_replace)) | ||
| 128 | result_decoded = ['', '', '', '', '', {}] | ||
| 129 | for i in uri_find_decoded: | ||
| 130 | loc = uri_find_decoded.index(i) | ||
| 131 | result_decoded[loc] = uri_decoded[loc] | ||
| 132 | if isinstance(i, basestring): | ||
| 133 | if (re.match(i, uri_decoded[loc])): | ||
| 134 | result_decoded[loc] = re.sub(i, uri_replace_decoded[loc], uri_decoded[loc]) | ||
| 135 | if uri_find_decoded.index(i) == 2: | ||
| 136 | if d: | ||
| 137 | localfn = bb.fetch2.localpath(uri, d) | ||
| 138 | if localfn: | ||
| 139 | result_decoded[loc] = os.path.join(os.path.dirname(result_decoded[loc]), os.path.basename(bb.fetch2.localpath(uri, d))) | ||
| 140 | else: | ||
| 141 | return uri | ||
| 142 | return encodeurl(result_decoded) | ||
| 143 | |||
| 144 | methods = [] | ||
| 145 | urldata_cache = {} | ||
| 146 | saved_headrevs = {} | ||
| 147 | |||
| 148 | def fetcher_init(d): | ||
| 149 | """ | ||
| 150 | Called to initialize the fetchers once the configuration data is known. | ||
| 151 | Calls before this must not hit the cache. | ||
| 152 | """ | ||
| 153 | pd = persist_data.persist(d) | ||
| 154 | # When to drop SCM head revisions controlled by user policy | ||
| 155 | srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear" | ||
| 156 | if srcrev_policy == "cache": | ||
| 157 | logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) | ||
| 158 | elif srcrev_policy == "clear": | ||
| 159 | logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) | ||
| 160 | try: | ||
| 161 | bb.fetch2.saved_headrevs = pd['BB_URI_HEADREVS'].items() | ||
| 162 | except: | ||
| 163 | pass | ||
| 164 | del pd['BB_URI_HEADREVS'] | ||
| 165 | else: | ||
| 166 | raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) | ||
| 167 | |||
| 168 | for m in methods: | ||
| 169 | if hasattr(m, "init"): | ||
| 170 | m.init(d) | ||
| 171 | |||
| 172 | def fetcher_compare_revisions(d): | ||
| 173 | """ | ||
| 174 | Compare the revisions in the persistant cache with current values and | ||
| 175 | return true/false on whether they've changed. | ||
| 176 | """ | ||
| 177 | |||
| 178 | pd = persist_data.persist(d) | ||
| 179 | data = pd['BB_URI_HEADREVS'].items() | ||
| 180 | data2 = bb.fetch2.saved_headrevs | ||
| 181 | |||
| 182 | changed = False | ||
| 183 | for key in data: | ||
| 184 | if key not in data2 or data2[key] != data[key]: | ||
| 185 | logger.debug(1, "%s changed", key) | ||
| 186 | changed = True | ||
| 187 | return True | ||
| 188 | else: | ||
| 189 | logger.debug(2, "%s did not change", key) | ||
| 190 | return False | ||
| 191 | |||
| 192 | # Function call order is usually: | ||
| 193 | # 1. init | ||
| 194 | # 2. go | ||
| 195 | # 3. localpaths | ||
| 196 | # localpath can be called at any time | ||
| 197 | |||
| 198 | def init(urls, d, setup = True): | ||
| 199 | urldata = {} | ||
| 200 | |||
| 201 | fn = bb.data.getVar('FILE', d, 1) | ||
| 202 | if fn in urldata_cache: | ||
| 203 | urldata = urldata_cache[fn] | ||
| 204 | |||
| 205 | for url in urls: | ||
| 206 | if url not in urldata: | ||
| 207 | urldata[url] = FetchData(url, d) | ||
| 208 | |||
| 209 | if setup: | ||
| 210 | for url in urldata: | ||
| 211 | if not urldata[url].setup: | ||
| 212 | urldata[url].setup_localpath(d) | ||
| 213 | |||
| 214 | urldata_cache[fn] = urldata | ||
| 215 | return urldata | ||
| 216 | |||
| 217 | def mirror_from_string(data): | ||
| 218 | return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ] | ||
| 219 | |||
| 220 | def removefile(f): | ||
| 221 | try: | ||
| 222 | os.remove(f) | ||
| 223 | except: | ||
| 224 | pass | ||
| 225 | |||
| 226 | def verify_checksum(u, ud, d): | ||
| 227 | """ | ||
| 228 | verify the MD5 and SHA256 checksum for downloaded src | ||
| 229 | |||
| 230 | return value: | ||
| 231 | - True: checksum matched | ||
| 232 | - False: checksum unmatched | ||
| 233 | |||
| 234 | if checksum is missing in recipes file, "BB_STRICT_CHECKSUM" decide the return value. | ||
| 235 | if BB_STRICT_CHECKSUM = "1" then return false as unmatched, otherwise return true as | ||
| 236 | matched | ||
| 237 | """ | ||
| 238 | |||
| 239 | if not ud.type in ["http", "https", "ftp", "ftps"]: | ||
| 240 | return | ||
| 241 | |||
| 242 | md5data = bb.utils.md5_file(ud.localpath) | ||
| 243 | sha256data = bb.utils.sha256_file(ud.localpath) | ||
| 244 | |||
| 245 | if (ud.md5_expected == None or ud.sha256_expected == None): | ||
| 246 | logger.warn('Missing SRC_URI checksum for %s, consider adding to the recipe:\n' | ||
| 247 | 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"', | ||
| 248 | ud.localpath, ud.md5_name, md5data, | ||
| 249 | ud.sha256_name, sha256data) | ||
| 250 | if bb.data.getVar("BB_STRICT_CHECKSUM", d, True) == "1": | ||
| 251 | raise FetchError("No checksum specified for %s." % u) | ||
| 252 | return | ||
| 253 | |||
| 254 | if (ud.md5_expected != md5data or ud.sha256_expected != sha256data): | ||
| 255 | logger.error('The checksums for "%s" did not match.\n' | ||
| 256 | ' MD5: expected "%s", got "%s"\n' | ||
| 257 | ' SHA256: expected "%s", got "%s"\n', | ||
| 258 | ud.localpath, ud.md5_expected, md5data, | ||
| 259 | ud.sha256_expected, sha256data) | ||
| 260 | raise FetchError("%s checksum mismatch." % u) | ||
| 261 | |||
| 262 | def go(d, urls = None): | ||
| 263 | """ | ||
| 264 | Fetch all urls | ||
| 265 | init must have previously been called | ||
| 266 | """ | ||
| 267 | if not urls: | ||
| 268 | urls = d.getVar("SRC_URI", 1).split() | ||
| 269 | urldata = init(urls, d, True) | ||
| 270 | |||
| 271 | for u in urls: | ||
| 272 | ud = urldata[u] | ||
| 273 | m = ud.method | ||
| 274 | localpath = "" | ||
| 275 | |||
| 276 | if not ud.localfile: | ||
| 277 | continue | ||
| 278 | |||
| 279 | lf = bb.utils.lockfile(ud.lockfile) | ||
| 280 | |||
| 281 | if m.try_premirror(u, ud, d): | ||
| 282 | # First try fetching uri, u, from PREMIRRORS | ||
| 283 | mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True)) | ||
| 284 | localpath = try_mirrors(d, u, mirrors, False, m.forcefetch(u, ud, d)) | ||
| 285 | elif os.path.exists(ud.localfile): | ||
| 286 | localpath = ud.localfile | ||
| 287 | |||
| 288 | # Need to re-test forcefetch() which will return true if our copy is too old | ||
| 289 | if m.forcefetch(u, ud, d) or not localpath: | ||
| 290 | # Next try fetching from the original uri, u | ||
| 291 | try: | ||
| 292 | m.go(u, ud, d) | ||
| 293 | localpath = ud.localpath | ||
| 294 | except FetchError: | ||
| 295 | # Remove any incomplete file | ||
| 296 | removefile(ud.localpath) | ||
| 297 | # Finally, try fetching uri, u, from MIRRORS | ||
| 298 | mirrors = mirror_from_string(bb.data.getVar('MIRRORS', d, True)) | ||
| 299 | localpath = try_mirrors (d, u, mirrors) | ||
| 300 | if not localpath or not os.path.exists(localpath): | ||
| 301 | raise FetchError("Unable to fetch URL %s from any source." % u) | ||
| 302 | |||
| 303 | ud.localpath = localpath | ||
| 304 | |||
| 305 | if os.path.exists(ud.md5): | ||
| 306 | # Touch the md5 file to show active use of the download | ||
| 307 | try: | ||
| 308 | os.utime(ud.md5, None) | ||
| 309 | except: | ||
| 310 | # Errors aren't fatal here | ||
| 311 | pass | ||
| 312 | else: | ||
| 313 | # Only check the checksums if we've not seen this item before | ||
| 314 | verify_checksum(u, ud, d) | ||
| 315 | Fetch.write_md5sum(u, ud, d) | ||
| 316 | |||
| 317 | bb.utils.unlockfile(lf) | ||
| 318 | |||
| 319 | def checkstatus(d, urls = None): | ||
| 320 | """ | ||
| 321 | Check all urls exist upstream | ||
| 322 | init must have previously been called | ||
| 323 | """ | ||
| 324 | urldata = init([], d, True) | ||
| 325 | |||
| 326 | if not urls: | ||
| 327 | urls = urldata | ||
| 328 | |||
| 329 | for u in urls: | ||
| 330 | ud = urldata[u] | ||
| 331 | m = ud.method | ||
| 332 | logger.debug(1, "Testing URL %s", u) | ||
| 333 | # First try checking uri, u, from PREMIRRORS | ||
| 334 | mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True)) | ||
| 335 | ret = try_mirrors(d, u, mirrors, True) | ||
| 336 | if not ret: | ||
| 337 | # Next try checking from the original uri, u | ||
| 338 | try: | ||
| 339 | ret = m.checkstatus(u, ud, d) | ||
| 340 | except: | ||
| 341 | # Finally, try checking uri, u, from MIRRORS | ||
| 342 | mirrors = mirror_from_string(bb.data.getVar('MIRRORS', d, True)) | ||
| 343 | ret = try_mirrors (d, u, mirrors, True) | ||
| 344 | |||
| 345 | if not ret: | ||
| 346 | raise FetchError("URL %s doesn't work" % u) | ||
| 347 | |||
| 348 | def localpaths(d): | ||
| 349 | """ | ||
| 350 | Return a list of the local filenames, assuming successful fetch | ||
| 351 | """ | ||
| 352 | local = [] | ||
| 353 | urldata = init([], d, True) | ||
| 354 | |||
| 355 | for u in urldata: | ||
| 356 | ud = urldata[u] | ||
| 357 | local.append(ud.localpath) | ||
| 358 | |||
| 359 | return local | ||
| 360 | |||
| 361 | def get_autorev(d): | ||
| 362 | # only not cache src rev in autorev case | ||
| 363 | if bb.data.getVar('BB_SRCREV_POLICY', d, True) != "cache": | ||
| 364 | bb.data.setVar('__BB_DONT_CACHE', '1', d) | ||
| 365 | return "AUTOINC" | ||
| 366 | |||
| 367 | def get_srcrev(d): | ||
| 368 | """ | ||
| 369 | Return the version string for the current package | ||
| 370 | (usually to be used as PV) | ||
| 371 | Most packages usually only have one SCM so we just pass on the call. | ||
| 372 | In the multi SCM case, we build a value based on SRCREV_FORMAT which must | ||
| 373 | have been set. | ||
| 374 | """ | ||
| 375 | |||
| 376 | scms = [] | ||
| 377 | |||
| 378 | # Only call setup_localpath on URIs which supports_srcrev() | ||
| 379 | urldata = init(bb.data.getVar('SRC_URI', d, 1).split(), d, False) | ||
| 380 | for u in urldata: | ||
| 381 | ud = urldata[u] | ||
| 382 | if ud.method.supports_srcrev(): | ||
| 383 | if not ud.setup: | ||
| 384 | ud.setup_localpath(d) | ||
| 385 | scms.append(u) | ||
| 386 | |||
| 387 | if len(scms) == 0: | ||
| 388 | logger.error("SRCREV was used yet no valid SCM was found in SRC_URI") | ||
| 389 | raise ParameterError | ||
| 390 | |||
| 391 | if len(scms) == 1: | ||
| 392 | return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d) | ||
| 393 | |||
| 394 | # | ||
| 395 | # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT | ||
| 396 | # | ||
| 397 | format = bb.data.getVar('SRCREV_FORMAT', d, 1) | ||
| 398 | if not format: | ||
| 399 | logger.error("The SRCREV_FORMAT variable must be set when multiple SCMs are used.") | ||
| 400 | raise ParameterError | ||
| 401 | |||
| 402 | for scm in scms: | ||
| 403 | if 'name' in urldata[scm].parm: | ||
| 404 | name = urldata[scm].parm["name"] | ||
| 405 | rev = urldata[scm].method.sortable_revision(scm, urldata[scm], d) | ||
| 406 | format = format.replace(name, rev) | ||
| 407 | |||
| 408 | return format | ||
| 409 | |||
| 410 | def localpath(url, d, cache = True): | ||
| 411 | """ | ||
| 412 | Called from the parser with cache=False since the cache isn't ready | ||
| 413 | at this point. Also called from classed in OE e.g. patch.bbclass | ||
| 414 | """ | ||
| 415 | ud = init([url], d) | ||
| 416 | if ud[url].method: | ||
| 417 | return ud[url].localpath | ||
| 418 | return url | ||
| 419 | |||
| 420 | def runfetchcmd(cmd, d, quiet = False): | ||
| 421 | """ | ||
| 422 | Run cmd returning the command output | ||
| 423 | Raise an error if interrupted or cmd fails | ||
| 424 | Optionally echo command output to stdout | ||
| 425 | """ | ||
| 426 | |||
| 427 | # Need to export PATH as binary could be in metadata paths | ||
| 428 | # rather than host provided | ||
| 429 | # Also include some other variables. | ||
| 430 | # FIXME: Should really include all export varaiables? | ||
| 431 | exportvars = ['PATH', 'GIT_PROXY_COMMAND', 'GIT_PROXY_HOST', | ||
| 432 | 'GIT_PROXY_PORT', 'GIT_CONFIG', 'http_proxy', 'ftp_proxy', | ||
| 433 | 'https_proxy', 'no_proxy', 'ALL_PROXY', 'all_proxy', | ||
| 434 | 'SSH_AUTH_SOCK', 'SSH_AGENT_PID', 'HOME'] | ||
| 435 | |||
| 436 | for var in exportvars: | ||
| 437 | val = data.getVar(var, d, True) | ||
| 438 | if val: | ||
| 439 | cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd) | ||
| 440 | |||
| 441 | logger.debug(1, "Running %s", cmd) | ||
| 442 | |||
| 443 | # redirect stderr to stdout | ||
| 444 | stdout_handle = os.popen(cmd + " 2>&1", "r") | ||
| 445 | output = "" | ||
| 446 | |||
| 447 | while True: | ||
| 448 | line = stdout_handle.readline() | ||
| 449 | if not line: | ||
| 450 | break | ||
| 451 | if not quiet: | ||
| 452 | print(line, end=' ') | ||
| 453 | output += line | ||
| 454 | |||
| 455 | status = stdout_handle.close() or 0 | ||
| 456 | signal = status >> 8 | ||
| 457 | exitstatus = status & 0xff | ||
| 458 | |||
| 459 | if signal: | ||
| 460 | raise FetchError("Fetch command %s failed with signal %s, output:\n%s" % (cmd, signal, output)) | ||
| 461 | elif status != 0: | ||
| 462 | raise FetchError("Fetch command %s failed with exit code %s, output:\n%s" % (cmd, status, output)) | ||
| 463 | |||
| 464 | return output | ||
| 465 | |||
| 466 | def try_mirrors(d, uri, mirrors, check = False, force = False): | ||
| 467 | """ | ||
| 468 | Try to use a mirrored version of the sources. | ||
| 469 | This method will be automatically called before the fetchers go. | ||
| 470 | |||
| 471 | d Is a bb.data instance | ||
| 472 | uri is the original uri we're trying to download | ||
| 473 | mirrors is the list of mirrors we're going to try | ||
| 474 | """ | ||
| 475 | fpath = os.path.join(data.getVar("DL_DIR", d, 1), os.path.basename(uri)) | ||
| 476 | if not check and os.access(fpath, os.R_OK) and not force: | ||
| 477 | logger.debug(1, "%s already exists, skipping checkout.", fpath) | ||
| 478 | return fpath | ||
| 479 | |||
| 480 | ld = d.createCopy() | ||
| 481 | for (find, replace) in mirrors: | ||
| 482 | newuri = uri_replace(uri, find, replace, ld) | ||
| 483 | if newuri != uri: | ||
| 484 | try: | ||
| 485 | ud = FetchData(newuri, ld) | ||
| 486 | except bb.fetch2.NoMethodError: | ||
| 487 | logger.debug(1, "No method for %s", uri) | ||
| 488 | continue | ||
| 489 | |||
| 490 | ud.setup_localpath(ld) | ||
| 491 | |||
| 492 | try: | ||
| 493 | if check: | ||
| 494 | found = ud.method.checkstatus(newuri, ud, ld) | ||
| 495 | if found: | ||
| 496 | return found | ||
| 497 | else: | ||
| 498 | ud.method.go(newuri, ud, ld) | ||
| 499 | return ud.localpath | ||
| 500 | except (bb.fetch2.MissingParameterError, | ||
| 501 | bb.fetch2.FetchError, | ||
| 502 | bb.fetch2.MD5SumError): | ||
| 503 | import sys | ||
| 504 | (type, value, traceback) = sys.exc_info() | ||
| 505 | logger.debug(2, "Mirror fetch failure: %s", value) | ||
| 506 | removefile(ud.localpath) | ||
| 507 | continue | ||
| 508 | return None | ||
| 509 | |||
| 510 | |||
| 511 | class FetchData(object): | ||
| 512 | """ | ||
| 513 | A class which represents the fetcher state for a given URI. | ||
| 514 | """ | ||
| 515 | def __init__(self, url, d): | ||
| 516 | self.localfile = "" | ||
| 517 | (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d)) | ||
| 518 | self.date = Fetch.getSRCDate(self, d) | ||
| 519 | self.url = url | ||
| 520 | if not self.user and "user" in self.parm: | ||
| 521 | self.user = self.parm["user"] | ||
| 522 | if not self.pswd and "pswd" in self.parm: | ||
| 523 | self.pswd = self.parm["pswd"] | ||
| 524 | self.setup = False | ||
| 525 | |||
| 526 | if "name" in self.parm: | ||
| 527 | self.md5_name = "%s.md5sum" % self.parm["name"] | ||
| 528 | self.sha256_name = "%s.sha256sum" % self.parm["name"] | ||
| 529 | else: | ||
| 530 | self.md5_name = "md5sum" | ||
| 531 | self.sha256_name = "sha256sum" | ||
| 532 | self.md5_expected = bb.data.getVarFlag("SRC_URI", self.md5_name, d) | ||
| 533 | self.sha256_expected = bb.data.getVarFlag("SRC_URI", self.sha256_name, d) | ||
| 534 | |||
| 535 | for m in methods: | ||
| 536 | if m.supports(url, self, d): | ||
| 537 | self.method = m | ||
| 538 | if hasattr(m,"urldata_init"): | ||
| 539 | m.urldata_init(self, d) | ||
| 540 | if m.supports_srcrev(): | ||
| 541 | self.revision = Fetch.srcrev_internal_helper(self, d); | ||
| 542 | return | ||
| 543 | raise NoMethodError("Missing implementation for url %s" % url) | ||
| 544 | |||
| 545 | def setup_localpath(self, d): | ||
| 546 | self.setup = True | ||
| 547 | if "localpath" in self.parm: | ||
| 548 | # if user sets localpath for file, use it instead. | ||
| 549 | self.localpath = self.parm["localpath"] | ||
| 550 | self.basename = os.path.basename(self.localpath) | ||
| 551 | else: | ||
| 552 | premirrors = bb.data.getVar('PREMIRRORS', d, True) | ||
| 553 | local = "" | ||
| 554 | if premirrors and self.url: | ||
| 555 | aurl = self.url.split(";")[0] | ||
| 556 | mirrors = mirror_from_string(premirrors) | ||
| 557 | for (find, replace) in mirrors: | ||
| 558 | if replace.startswith("file://"): | ||
| 559 | path = aurl.split("://")[1] | ||
| 560 | path = path.split(";")[0] | ||
| 561 | local = replace.split("://")[1] + os.path.basename(path) | ||
| 562 | if local == aurl or not os.path.exists(local) or os.path.isdir(local): | ||
| 563 | local = "" | ||
| 564 | self.localpath = local | ||
| 565 | if not local: | ||
| 566 | self.localpath = self.method.localpath(self.url, self, d) | ||
| 567 | # We have to clear data's internal caches since the cached value of SRCREV is now wrong. | ||
| 568 | # Horrible... | ||
| 569 | bb.data.delVar("ISHOULDNEVEREXIST", d) | ||
| 570 | |||
| 571 | if self.localpath is not None: | ||
| 572 | # Note: These files should always be in DL_DIR whereas localpath may not be. | ||
| 573 | basepath = bb.data.expand("${DL_DIR}/%s" % os.path.basename(self.localpath), d) | ||
| 574 | self.md5 = basepath + '.md5' | ||
| 575 | self.lockfile = basepath + '.lock' | ||
| 576 | |||
| 577 | |||
| 578 | class Fetch(object): | ||
| 579 | """Base class for 'fetch'ing data""" | ||
| 580 | |||
| 581 | def __init__(self, urls = []): | ||
| 582 | self.urls = [] | ||
| 583 | |||
| 584 | def supports(self, url, urldata, d): | ||
| 585 | """ | ||
| 586 | Check to see if this fetch class supports a given url. | ||
| 587 | """ | ||
| 588 | return 0 | ||
| 589 | |||
| 590 | def localpath(self, url, urldata, d): | ||
| 591 | """ | ||
| 592 | Return the local filename of a given url assuming a successful fetch. | ||
| 593 | Can also setup variables in urldata for use in go (saving code duplication | ||
| 594 | and duplicate code execution) | ||
| 595 | """ | ||
| 596 | return url | ||
| 597 | def _strip_leading_slashes(self, relpath): | ||
| 598 | """ | ||
| 599 | Remove leading slash as os.path.join can't cope | ||
| 600 | """ | ||
| 601 | while os.path.isabs(relpath): | ||
| 602 | relpath = relpath[1:] | ||
| 603 | return relpath | ||
| 604 | |||
| 605 | def setUrls(self, urls): | ||
| 606 | self.__urls = urls | ||
| 607 | |||
| 608 | def getUrls(self): | ||
| 609 | return self.__urls | ||
| 610 | |||
| 611 | urls = property(getUrls, setUrls, None, "Urls property") | ||
| 612 | |||
| 613 | def forcefetch(self, url, urldata, d): | ||
| 614 | """ | ||
| 615 | Force a fetch, even if localpath exists? | ||
| 616 | """ | ||
| 617 | return False | ||
| 618 | |||
| 619 | def supports_srcrev(self): | ||
| 620 | """ | ||
| 621 | The fetcher supports auto source revisions (SRCREV) | ||
| 622 | """ | ||
| 623 | return False | ||
| 624 | |||
| 625 | def go(self, url, urldata, d): | ||
| 626 | """ | ||
| 627 | Fetch urls | ||
| 628 | Assumes localpath was called first | ||
| 629 | """ | ||
| 630 | raise NoMethodError("Missing implementation for url") | ||
| 631 | |||
| 632 | def try_premirror(self, url, urldata, d): | ||
| 633 | """ | ||
| 634 | Should premirrors be used? | ||
| 635 | """ | ||
| 636 | if urldata.method.forcefetch(url, urldata, d): | ||
| 637 | return True | ||
| 638 | elif os.path.exists(urldata.md5) and os.path.exists(urldata.localfile): | ||
| 639 | return False | ||
| 640 | else: | ||
| 641 | return True | ||
| 642 | |||
| 643 | def checkstatus(self, url, urldata, d): | ||
| 644 | """ | ||
| 645 | Check the status of a URL | ||
| 646 | Assumes localpath was called first | ||
| 647 | """ | ||
| 648 | logger.info("URL %s could not be checked for status since no method exists.", url) | ||
| 649 | return True | ||
| 650 | |||
| 651 | def getSRCDate(urldata, d): | ||
| 652 | """ | ||
| 653 | Return the SRC Date for the component | ||
| 654 | |||
| 655 | d the bb.data module | ||
| 656 | """ | ||
| 657 | if "srcdate" in urldata.parm: | ||
| 658 | return urldata.parm['srcdate'] | ||
| 659 | |||
| 660 | pn = data.getVar("PN", d, 1) | ||
| 661 | |||
| 662 | if pn: | ||
| 663 | return data.getVar("SRCDATE_%s" % pn, d, 1) or data.getVar("CVSDATE_%s" % pn, d, 1) or data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1) | ||
| 664 | |||
| 665 | return data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1) | ||
| 666 | getSRCDate = staticmethod(getSRCDate) | ||
| 667 | |||
| 668 | def srcrev_internal_helper(ud, d): | ||
| 669 | """ | ||
| 670 | Return: | ||
| 671 | a) a source revision if specified | ||
| 672 | b) latest revision if SREREV="AUTOINC" | ||
| 673 | c) None if not specified | ||
| 674 | """ | ||
| 675 | |||
| 676 | if 'rev' in ud.parm: | ||
| 677 | return ud.parm['rev'] | ||
| 678 | |||
| 679 | if 'tag' in ud.parm: | ||
| 680 | return ud.parm['tag'] | ||
| 681 | |||
| 682 | rev = None | ||
| 683 | if 'name' in ud.parm: | ||
| 684 | pn = data.getVar("PN", d, 1) | ||
| 685 | rev = data.getVar("SRCREV_%s_pn-%s" % (ud.parm['name'], pn), d, 1) | ||
| 686 | if not rev: | ||
| 687 | rev = data.getVar("SRCREV_pn-%s_%s" % (pn, ud.parm['name']), d, 1) | ||
| 688 | if not rev: | ||
| 689 | rev = data.getVar("SRCREV_%s" % (ud.parm['name']), d, 1) | ||
| 690 | if not rev: | ||
| 691 | rev = data.getVar("SRCREV", d, 1) | ||
| 692 | if rev == "INVALID": | ||
| 693 | raise InvalidSRCREV("Please set SRCREV to a valid value") | ||
| 694 | if rev == "AUTOINC": | ||
| 695 | rev = ud.method.latest_revision(ud.url, ud, d) | ||
| 696 | |||
| 697 | return rev | ||
| 698 | |||
| 699 | srcrev_internal_helper = staticmethod(srcrev_internal_helper) | ||
| 700 | |||
| 701 | def localcount_internal_helper(ud, d): | ||
| 702 | """ | ||
| 703 | Return: | ||
| 704 | a) a locked localcount if specified | ||
| 705 | b) None otherwise | ||
| 706 | """ | ||
| 707 | |||
| 708 | localcount = None | ||
| 709 | if 'name' in ud.parm: | ||
| 710 | pn = data.getVar("PN", d, 1) | ||
| 711 | localcount = data.getVar("LOCALCOUNT_" + ud.parm['name'], d, 1) | ||
| 712 | if not localcount: | ||
| 713 | localcount = data.getVar("LOCALCOUNT", d, 1) | ||
| 714 | return localcount | ||
| 715 | |||
| 716 | localcount_internal_helper = staticmethod(localcount_internal_helper) | ||
| 717 | |||
| 718 | def verify_md5sum(ud, got_sum): | ||
| 719 | """ | ||
| 720 | Verify the md5sum we wanted with the one we got | ||
| 721 | """ | ||
| 722 | wanted_sum = ud.parm.get('md5sum') | ||
| 723 | if not wanted_sum: | ||
| 724 | return True | ||
| 725 | |||
| 726 | return wanted_sum == got_sum | ||
| 727 | verify_md5sum = staticmethod(verify_md5sum) | ||
| 728 | |||
| 729 | def write_md5sum(url, ud, d): | ||
| 730 | md5data = bb.utils.md5_file(ud.localpath) | ||
| 731 | # verify the md5sum | ||
| 732 | if not Fetch.verify_md5sum(ud, md5data): | ||
| 733 | raise MD5SumError(url) | ||
| 734 | |||
| 735 | md5out = file(ud.md5, 'w') | ||
| 736 | md5out.write(md5data) | ||
| 737 | md5out.close() | ||
| 738 | write_md5sum = staticmethod(write_md5sum) | ||
| 739 | |||
| 740 | def latest_revision(self, url, ud, d): | ||
| 741 | """ | ||
| 742 | Look in the cache for the latest revision, if not present ask the SCM. | ||
| 743 | """ | ||
| 744 | if not hasattr(self, "_latest_revision"): | ||
| 745 | raise ParameterError | ||
| 746 | |||
| 747 | pd = persist_data.persist(d) | ||
| 748 | revs = pd['BB_URI_HEADREVS'] | ||
| 749 | key = self.generate_revision_key(url, ud, d) | ||
| 750 | rev = revs[key] | ||
| 751 | if rev != None: | ||
| 752 | return str(rev) | ||
| 753 | |||
| 754 | revs[key] = rev = self._latest_revision(url, ud, d) | ||
| 755 | return rev | ||
| 756 | |||
| 757 | def sortable_revision(self, url, ud, d): | ||
| 758 | """ | ||
| 759 | |||
| 760 | """ | ||
| 761 | if hasattr(self, "_sortable_revision"): | ||
| 762 | return self._sortable_revision(url, ud, d) | ||
| 763 | |||
| 764 | pd = persist_data.persist(d) | ||
| 765 | localcounts = pd['BB_URI_LOCALCOUNT'] | ||
| 766 | key = self.generate_revision_key(url, ud, d) | ||
| 767 | |||
| 768 | latest_rev = self._build_revision(url, ud, d) | ||
| 769 | last_rev = localcounts[key + '_rev'] | ||
| 770 | uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False | ||
| 771 | count = None | ||
| 772 | if uselocalcount: | ||
| 773 | count = Fetch.localcount_internal_helper(ud, d) | ||
| 774 | if count is None: | ||
| 775 | count = localcounts[key + '_count'] | ||
| 776 | |||
| 777 | if last_rev == latest_rev: | ||
| 778 | return str(count + "+" + latest_rev) | ||
| 779 | |||
| 780 | buildindex_provided = hasattr(self, "_sortable_buildindex") | ||
| 781 | if buildindex_provided: | ||
| 782 | count = self._sortable_buildindex(url, ud, d, latest_rev) | ||
| 783 | |||
| 784 | if count is None: | ||
| 785 | count = "0" | ||
| 786 | elif uselocalcount or buildindex_provided: | ||
| 787 | count = str(count) | ||
| 788 | else: | ||
| 789 | count = str(int(count) + 1) | ||
| 790 | |||
| 791 | localcounts[key + '_rev'] = latest_rev | ||
| 792 | localcounts[key + '_count'] = count | ||
| 793 | |||
| 794 | return str(count + "+" + latest_rev) | ||
| 795 | |||
| 796 | def generate_revision_key(self, url, ud, d): | ||
| 797 | key = self._revision_key(url, ud, d) | ||
| 798 | return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "") | ||
| 799 | |||
| 800 | from . import cvs | ||
| 801 | from . import git | ||
| 802 | from . import local | ||
| 803 | from . import svn | ||
| 804 | from . import wget | ||
| 805 | from . import svk | ||
| 806 | from . import ssh | ||
| 807 | from . import perforce | ||
| 808 | from . import bzr | ||
| 809 | from . import hg | ||
| 810 | from . import osc | ||
| 811 | from . import repo | ||
| 812 | |||
| 813 | methods.append(local.Local()) | ||
| 814 | methods.append(wget.Wget()) | ||
| 815 | methods.append(svn.Svn()) | ||
| 816 | methods.append(git.Git()) | ||
| 817 | methods.append(cvs.Cvs()) | ||
| 818 | methods.append(svk.Svk()) | ||
| 819 | methods.append(ssh.SSH()) | ||
| 820 | methods.append(perforce.Perforce()) | ||
| 821 | methods.append(bzr.Bzr()) | ||
| 822 | methods.append(hg.Hg()) | ||
| 823 | methods.append(osc.Osc()) | ||
| 824 | methods.append(repo.Repo()) | ||
diff --git a/bitbake/lib/bb/fetch2/bzr.py b/bitbake/lib/bb/fetch2/bzr.py new file mode 100644 index 0000000000..97b042b2a5 --- /dev/null +++ b/bitbake/lib/bb/fetch2/bzr.py | |||
| @@ -0,0 +1,145 @@ | |||
| 1 | """ | ||
| 2 | BitBake 'Fetch' implementation for bzr. | ||
| 3 | |||
| 4 | """ | ||
| 5 | |||
| 6 | # Copyright (C) 2007 Ross Burton | ||
| 7 | # Copyright (C) 2007 Richard Purdie | ||
| 8 | # | ||
| 9 | # Classes for obtaining upstream sources for the | ||
| 10 | # BitBake build tools. | ||
| 11 | # Copyright (C) 2003, 2004 Chris Larson | ||
| 12 | # | ||
| 13 | # This program is free software; you can redistribute it and/or modify | ||
| 14 | # it under the terms of the GNU General Public License version 2 as | ||
| 15 | # published by the Free Software Foundation. | ||
| 16 | # | ||
| 17 | # This program is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
| 20 | # GNU General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU General Public License along | ||
| 23 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
| 24 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
| 25 | |||
| 26 | import os | ||
| 27 | import sys | ||
| 28 | import logging | ||
| 29 | import bb | ||
| 30 | from bb import data | ||
| 31 | from bb.fetch2 import Fetch, FetchError, runfetchcmd, logger | ||
| 32 | |||
| 33 | class Bzr(Fetch): | ||
| 34 | def supports(self, url, ud, d): | ||
| 35 | return ud.type in ['bzr'] | ||
| 36 | |||
| 37 | def urldata_init(self, ud, d): | ||
| 38 | """ | ||
| 39 | init bzr specific variable within url data | ||
| 40 | """ | ||
| 41 | # Create paths to bzr checkouts | ||
| 42 | relpath = self._strip_leading_slashes(ud.path) | ||
| 43 | ud.pkgdir = os.path.join(data.expand('${BZRDIR}', d), ud.host, relpath) | ||
| 44 | |||
| 45 | def localpath (self, url, ud, d): | ||
| 46 | if not ud.revision: | ||
| 47 | ud.revision = self.latest_revision(url, ud, d) | ||
| 48 | |||
| 49 | ud.localfile = data.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d) | ||
| 50 | |||
| 51 | return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) | ||
| 52 | |||
| 53 | def _buildbzrcommand(self, ud, d, command): | ||
| 54 | """ | ||
| 55 | Build up an bzr commandline based on ud | ||
| 56 | command is "fetch", "update", "revno" | ||
| 57 | """ | ||
| 58 | |||
| 59 | basecmd = data.expand('${FETCHCMD_bzr}', d) | ||
| 60 | |||
| 61 | proto = ud.parm.get('proto', 'http') | ||
| 62 | |||
| 63 | bzrroot = ud.host + ud.path | ||
| 64 | |||
| 65 | options = [] | ||
| 66 | |||
| 67 | if command is "revno": | ||
| 68 | bzrcmd = "%s revno %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot) | ||
| 69 | else: | ||
| 70 | if ud.revision: | ||
| 71 | options.append("-r %s" % ud.revision) | ||
| 72 | |||
| 73 | if command is "fetch": | ||
| 74 | bzrcmd = "%s co %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot) | ||
| 75 | elif command is "update": | ||
| 76 | bzrcmd = "%s pull %s --overwrite" % (basecmd, " ".join(options)) | ||
| 77 | else: | ||
| 78 | raise FetchError("Invalid bzr command %s" % command) | ||
| 79 | |||
| 80 | return bzrcmd | ||
| 81 | |||
| 82 | def go(self, loc, ud, d): | ||
| 83 | """Fetch url""" | ||
| 84 | |||
| 85 | if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK): | ||
| 86 | bzrcmd = self._buildbzrcommand(ud, d, "update") | ||
| 87 | logger.debug(1, "BZR Update %s", loc) | ||
| 88 | os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path))) | ||
| 89 | runfetchcmd(bzrcmd, d) | ||
| 90 | else: | ||
| 91 | bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True) | ||
| 92 | bzrcmd = self._buildbzrcommand(ud, d, "fetch") | ||
| 93 | logger.debug(1, "BZR Checkout %s", loc) | ||
| 94 | bb.mkdirhier(ud.pkgdir) | ||
| 95 | os.chdir(ud.pkgdir) | ||
| 96 | logger.debug(1, "Running %s", bzrcmd) | ||
| 97 | runfetchcmd(bzrcmd, d) | ||
| 98 | |||
| 99 | os.chdir(ud.pkgdir) | ||
| 100 | |||
| 101 | scmdata = ud.parm.get("scmdata", "") | ||
| 102 | if scmdata == "keep": | ||
| 103 | tar_flags = "" | ||
| 104 | else: | ||
| 105 | tar_flags = "--exclude '.bzr' --exclude '.bzrtags'" | ||
| 106 | |||
| 107 | # tar them up to a defined filename | ||
| 108 | try: | ||
| 109 | runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)), d) | ||
| 110 | except: | ||
| 111 | t, v, tb = sys.exc_info() | ||
| 112 | try: | ||
| 113 | os.unlink(ud.localpath) | ||
| 114 | except OSError: | ||
| 115 | pass | ||
| 116 | raise t, v, tb | ||
| 117 | |||
| 118 | def supports_srcrev(self): | ||
| 119 | return True | ||
| 120 | |||
| 121 | def _revision_key(self, url, ud, d): | ||
| 122 | """ | ||
| 123 | Return a unique key for the url | ||
| 124 | """ | ||
| 125 | return "bzr:" + ud.pkgdir | ||
| 126 | |||
| 127 | def _latest_revision(self, url, ud, d): | ||
| 128 | """ | ||
| 129 | Return the latest upstream revision number | ||
| 130 | """ | ||
| 131 | logger.debug(2, "BZR fetcher hitting network for %s", url) | ||
| 132 | |||
| 133 | output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True) | ||
| 134 | |||
| 135 | return output.strip() | ||
| 136 | |||
| 137 | def _sortable_revision(self, url, ud, d): | ||
| 138 | """ | ||
| 139 | Return a sortable revision number which in our case is the revision number | ||
| 140 | """ | ||
| 141 | |||
| 142 | return self._build_revision(url, ud, d) | ||
| 143 | |||
| 144 | def _build_revision(self, url, ud, d): | ||
| 145 | return ud.revision | ||
diff --git a/bitbake/lib/bb/fetch2/cvs.py b/bitbake/lib/bb/fetch2/cvs.py new file mode 100644 index 0000000000..1570cab9c3 --- /dev/null +++ b/bitbake/lib/bb/fetch2/cvs.py | |||
| @@ -0,0 +1,172 @@ | |||
| 1 | # ex:ts=4:sw=4:sts=4:et | ||
| 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 3 | """ | ||
| 4 | BitBake 'Fetch' implementations | ||
| 5 | |||
| 6 | Classes for obtaining upstream sources for the | ||
| 7 | BitBake build tools. | ||
| 8 | |||
| 9 | """ | ||
| 10 | |||
| 11 | # Copyright (C) 2003, 2004 Chris Larson | ||
| 12 | # | ||
| 13 | # This program is free software; you can redistribute it and/or modify | ||
| 14 | # it under the terms of the GNU General Public License version 2 as | ||
| 15 | # published by the Free Software Foundation. | ||
| 16 | # | ||
| 17 | # This program is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
| 20 | # GNU General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU General Public License along | ||
| 23 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
| 24 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
| 25 | # | ||
| 26 | #Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
| 27 | # | ||
| 28 | |||
| 29 | import os | ||
| 30 | import logging | ||
| 31 | import bb | ||
| 32 | from bb import data | ||
| 33 | from bb.fetch2 import Fetch, FetchError, MissingParameterError, logger | ||
| 34 | |||
| 35 | class Cvs(Fetch): | ||
| 36 | """ | ||
| 37 | Class to fetch a module or modules from cvs repositories | ||
| 38 | """ | ||
| 39 | def supports(self, url, ud, d): | ||
| 40 | """ | ||
| 41 | Check to see if a given url can be fetched with cvs. | ||
| 42 | """ | ||
| 43 | return ud.type in ['cvs'] | ||
| 44 | |||
| 45 | def localpath(self, url, ud, d): | ||
| 46 | if not "module" in ud.parm: | ||
| 47 | raise MissingParameterError("cvs method needs a 'module' parameter") | ||
| 48 | ud.module = ud.parm["module"] | ||
| 49 | |||
| 50 | ud.tag = ud.parm.get('tag', "") | ||
| 51 | |||
| 52 | # Override the default date in certain cases | ||
| 53 | if 'date' in ud.parm: | ||
| 54 | ud.date = ud.parm['date'] | ||
| 55 | elif ud.tag: | ||
| 56 | ud.date = "" | ||
| 57 | |||
| 58 | norecurse = '' | ||
| 59 | if 'norecurse' in ud.parm: | ||
| 60 | norecurse = '_norecurse' | ||
| 61 | |||
| 62 | fullpath = '' | ||
| 63 | if 'fullpath' in ud.parm: | ||
| 64 | fullpath = '_fullpath' | ||
| 65 | |||
| 66 | ud.localfile = data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d) | ||
| 67 | |||
| 68 | return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) | ||
| 69 | |||
| 70 | def forcefetch(self, url, ud, d): | ||
| 71 | if (ud.date == "now"): | ||
| 72 | return True | ||
| 73 | return False | ||
| 74 | |||
| 75 | def go(self, loc, ud, d): | ||
| 76 | |||
| 77 | method = ud.parm.get('method', 'pserver') | ||
| 78 | localdir = ud.parm.get('localdir', ud.module) | ||
| 79 | cvs_port = ud.parm.get('port', '') | ||
| 80 | |||
| 81 | cvs_rsh = None | ||
| 82 | if method == "ext": | ||
| 83 | if "rsh" in ud.parm: | ||
| 84 | cvs_rsh = ud.parm["rsh"] | ||
| 85 | |||
| 86 | if method == "dir": | ||
| 87 | cvsroot = ud.path | ||
| 88 | else: | ||
| 89 | cvsroot = ":" + method | ||
| 90 | cvsproxyhost = data.getVar('CVS_PROXY_HOST', d, True) | ||
| 91 | if cvsproxyhost: | ||
| 92 | cvsroot += ";proxy=" + cvsproxyhost | ||
| 93 | cvsproxyport = data.getVar('CVS_PROXY_PORT', d, True) | ||
| 94 | if cvsproxyport: | ||
| 95 | cvsroot += ";proxyport=" + cvsproxyport | ||
| 96 | cvsroot += ":" + ud.user | ||
| 97 | if ud.pswd: | ||
| 98 | cvsroot += ":" + ud.pswd | ||
| 99 | cvsroot += "@" + ud.host + ":" + cvs_port + ud.path | ||
| 100 | |||
| 101 | options = [] | ||
| 102 | if 'norecurse' in ud.parm: | ||
| 103 | options.append("-l") | ||
| 104 | if ud.date: | ||
| 105 | # treat YYYYMMDDHHMM specially for CVS | ||
| 106 | if len(ud.date) == 12: | ||
| 107 | options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12])) | ||
| 108 | else: | ||
| 109 | options.append("-D \"%s UTC\"" % ud.date) | ||
| 110 | if ud.tag: | ||
| 111 | options.append("-r %s" % ud.tag) | ||
| 112 | |||
| 113 | localdata = data.createCopy(d) | ||
| 114 | data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata) | ||
| 115 | data.update_data(localdata) | ||
| 116 | |||
| 117 | data.setVar('CVSROOT', cvsroot, localdata) | ||
| 118 | data.setVar('CVSCOOPTS', " ".join(options), localdata) | ||
| 119 | data.setVar('CVSMODULE', ud.module, localdata) | ||
| 120 | cvscmd = data.getVar('FETCHCOMMAND', localdata, 1) | ||
| 121 | cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1) | ||
| 122 | |||
| 123 | if cvs_rsh: | ||
| 124 | cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd) | ||
| 125 | cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd) | ||
| 126 | |||
| 127 | # create module directory | ||
| 128 | logger.debug(2, "Fetch: checking for module directory") | ||
| 129 | pkg = data.expand('${PN}', d) | ||
| 130 | pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg) | ||
| 131 | moddir = os.path.join(pkgdir, localdir) | ||
| 132 | if os.access(os.path.join(moddir, 'CVS'), os.R_OK): | ||
| 133 | logger.info("Update " + loc) | ||
| 134 | # update sources there | ||
| 135 | os.chdir(moddir) | ||
| 136 | myret = os.system(cvsupdatecmd) | ||
| 137 | else: | ||
| 138 | logger.info("Fetch " + loc) | ||
| 139 | # check out sources there | ||
| 140 | bb.mkdirhier(pkgdir) | ||
| 141 | os.chdir(pkgdir) | ||
| 142 | logger.debug(1, "Running %s", cvscmd) | ||
| 143 | myret = os.system(cvscmd) | ||
| 144 | |||
| 145 | if myret != 0 or not os.access(moddir, os.R_OK): | ||
| 146 | try: | ||
| 147 | os.rmdir(moddir) | ||
| 148 | except OSError: | ||
| 149 | pass | ||
| 150 | raise FetchError(ud.module) | ||
| 151 | |||
| 152 | scmdata = ud.parm.get("scmdata", "") | ||
| 153 | if scmdata == "keep": | ||
| 154 | tar_flags = "" | ||
| 155 | else: | ||
| 156 | tar_flags = "--exclude 'CVS'" | ||
| 157 | |||
| 158 | # tar them up to a defined filename | ||
| 159 | if 'fullpath' in ud.parm: | ||
| 160 | os.chdir(pkgdir) | ||
| 161 | myret = os.system("tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir)) | ||
| 162 | else: | ||
| 163 | os.chdir(moddir) | ||
| 164 | os.chdir('..') | ||
| 165 | myret = os.system("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir))) | ||
| 166 | |||
| 167 | if myret != 0: | ||
| 168 | try: | ||
| 169 | os.unlink(ud.localpath) | ||
| 170 | except OSError: | ||
| 171 | pass | ||
| 172 | raise FetchError(ud.module) | ||
diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py new file mode 100644 index 0000000000..c62145770f --- /dev/null +++ b/bitbake/lib/bb/fetch2/git.py | |||
| @@ -0,0 +1,260 @@ | |||
| 1 | # ex:ts=4:sw=4:sts=4:et | ||
| 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 3 | """ | ||
| 4 | BitBake 'Fetch' git implementation | ||
| 5 | |||
| 6 | """ | ||
| 7 | |||
| 8 | #Copyright (C) 2005 Richard Purdie | ||
| 9 | # | ||
| 10 | # This program is free software; you can redistribute it and/or modify | ||
| 11 | # it under the terms of the GNU General Public License version 2 as | ||
| 12 | # published by the Free Software Foundation. | ||
| 13 | # | ||
| 14 | # This program is distributed in the hope that it will be useful, | ||
| 15 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 16 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
| 17 | # GNU General Public License for more details. | ||
| 18 | # | ||
| 19 | # You should have received a copy of the GNU General Public License along | ||
| 20 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
| 21 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
| 22 | |||
| 23 | import os | ||
| 24 | import bb | ||
| 25 | from bb import data | ||
| 26 | from bb.fetch2 import Fetch | ||
| 27 | from bb.fetch2 import runfetchcmd | ||
| 28 | from bb.fetch2 import logger | ||
| 29 | |||
| 30 | class Git(Fetch): | ||
| 31 | """Class to fetch a module or modules from git repositories""" | ||
| 32 | def init(self, d): | ||
| 33 | # | ||
| 34 | # Only enable _sortable revision if the key is set | ||
| 35 | # | ||
| 36 | if bb.data.getVar("BB_GIT_CLONE_FOR_SRCREV", d, True): | ||
| 37 | self._sortable_buildindex = self._sortable_buildindex_disabled | ||
| 38 | def supports(self, url, ud, d): | ||
| 39 | """ | ||
| 40 | Check to see if a given url can be fetched with git. | ||
| 41 | """ | ||
| 42 | return ud.type in ['git'] | ||
| 43 | |||
| 44 | def urldata_init(self, ud, d): | ||
| 45 | """ | ||
| 46 | init git specific variable within url data | ||
| 47 | so that the git method like latest_revision() can work | ||
| 48 | """ | ||
| 49 | if 'protocol' in ud.parm: | ||
| 50 | ud.proto = ud.parm['protocol'] | ||
| 51 | elif not ud.host: | ||
| 52 | ud.proto = 'file' | ||
| 53 | else: | ||
| 54 | ud.proto = "rsync" | ||
| 55 | |||
| 56 | ud.branch = ud.parm.get("branch", "master") | ||
| 57 | |||
| 58 | gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.')) | ||
| 59 | ud.mirrortarball = 'git_%s.tar.gz' % (gitsrcname) | ||
| 60 | ud.clonedir = os.path.join(data.expand('${GITDIR}', d), gitsrcname) | ||
| 61 | |||
| 62 | ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git" | ||
| 63 | |||
| 64 | def localpath(self, url, ud, d): | ||
| 65 | ud.tag = ud.revision | ||
| 66 | if not ud.tag or ud.tag == "master": | ||
| 67 | ud.tag = self.latest_revision(url, ud, d) | ||
| 68 | |||
| 69 | subdir = ud.parm.get("subpath", "") | ||
| 70 | if subdir != "": | ||
| 71 | if subdir.endswith("/"): | ||
| 72 | subdir = subdir[:-1] | ||
| 73 | subdirpath = os.path.join(ud.path, subdir); | ||
| 74 | else: | ||
| 75 | subdirpath = ud.path; | ||
| 76 | |||
| 77 | if 'fullclone' in ud.parm: | ||
| 78 | ud.localfile = ud.mirrortarball | ||
| 79 | else: | ||
| 80 | ud.localfile = data.expand('git_%s%s_%s.tar.gz' % (ud.host, subdirpath.replace('/', '.'), ud.tag), d) | ||
| 81 | |||
| 82 | if 'noclone' in ud.parm: | ||
| 83 | ud.localfile = None | ||
| 84 | return None | ||
| 85 | |||
| 86 | return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) | ||
| 87 | |||
| 88 | def forcefetch(self, url, ud, d): | ||
| 89 | if 'fullclone' in ud.parm: | ||
| 90 | return True | ||
| 91 | if 'noclone' in ud.parm: | ||
| 92 | return False | ||
| 93 | if os.path.exists(ud.localpath): | ||
| 94 | return False | ||
| 95 | if not self._contains_ref(ud.tag, d): | ||
| 96 | return True | ||
| 97 | return False | ||
| 98 | |||
| 99 | def try_premirror(self, u, ud, d): | ||
| 100 | if 'noclone' in ud.parm: | ||
| 101 | return False | ||
| 102 | if os.path.exists(ud.clonedir): | ||
| 103 | return False | ||
| 104 | if os.path.exists(ud.localpath): | ||
| 105 | return False | ||
| 106 | |||
| 107 | return True | ||
| 108 | |||
| 109 | def go(self, loc, ud, d): | ||
| 110 | """Fetch url""" | ||
| 111 | |||
| 112 | if ud.user: | ||
| 113 | username = ud.user + '@' | ||
| 114 | else: | ||
| 115 | username = "" | ||
| 116 | |||
| 117 | repofile = os.path.join(data.getVar("DL_DIR", d, 1), ud.mirrortarball) | ||
| 118 | |||
| 119 | coname = '%s' % (ud.tag) | ||
| 120 | codir = os.path.join(ud.clonedir, coname) | ||
| 121 | |||
| 122 | # If we have no existing clone and no mirror tarball, try and obtain one | ||
| 123 | if not os.path.exists(ud.clonedir) and not os.path.exists(repofile): | ||
| 124 | try: | ||
| 125 | Fetch.try_mirrors(ud.mirrortarball) | ||
| 126 | except: | ||
| 127 | pass | ||
| 128 | |||
| 129 | # If the checkout doesn't exist and the mirror tarball does, extract it | ||
| 130 | if not os.path.exists(ud.clonedir) and os.path.exists(repofile): | ||
| 131 | bb.mkdirhier(ud.clonedir) | ||
| 132 | os.chdir(ud.clonedir) | ||
| 133 | runfetchcmd("tar -xzf %s" % (repofile), d) | ||
| 134 | |||
| 135 | # If the repo still doesn't exist, fallback to cloning it | ||
| 136 | if not os.path.exists(ud.clonedir): | ||
| 137 | runfetchcmd("%s clone -n %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.clonedir), d) | ||
| 138 | |||
| 139 | os.chdir(ud.clonedir) | ||
| 140 | # Update the checkout if needed | ||
| 141 | if not self._contains_ref(ud.tag, d) or 'fullclone' in ud.parm: | ||
| 142 | # Remove all but the .git directory | ||
| 143 | runfetchcmd("rm * -Rf", d) | ||
| 144 | if 'fullclone' in ud.parm: | ||
| 145 | runfetchcmd("%s fetch --all" % (ud.basecmd), d) | ||
| 146 | else: | ||
| 147 | runfetchcmd("%s fetch %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.branch), d) | ||
| 148 | runfetchcmd("%s fetch --tags %s://%s%s%s" % (ud.basecmd, ud.proto, username, ud.host, ud.path), d) | ||
| 149 | runfetchcmd("%s prune-packed" % ud.basecmd, d) | ||
| 150 | runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d) | ||
| 151 | |||
| 152 | # Generate a mirror tarball if needed | ||
| 153 | os.chdir(ud.clonedir) | ||
| 154 | mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) | ||
| 155 | if mirror_tarballs != "0" or 'fullclone' in ud.parm: | ||
| 156 | logger.info("Creating tarball of git repository") | ||
| 157 | runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d) | ||
| 158 | |||
| 159 | if 'fullclone' in ud.parm: | ||
| 160 | return | ||
| 161 | |||
| 162 | if os.path.exists(codir): | ||
| 163 | bb.utils.prunedir(codir) | ||
| 164 | |||
| 165 | subdir = ud.parm.get("subpath", "") | ||
| 166 | if subdir != "": | ||
| 167 | if subdir.endswith("/"): | ||
| 168 | subdirbase = os.path.basename(subdir[:-1]) | ||
| 169 | else: | ||
| 170 | subdirbase = os.path.basename(subdir) | ||
| 171 | else: | ||
| 172 | subdirbase = "" | ||
| 173 | |||
| 174 | if subdir != "": | ||
| 175 | readpathspec = ":%s" % (subdir) | ||
| 176 | codir = os.path.join(codir, "git") | ||
| 177 | coprefix = os.path.join(codir, subdirbase, "") | ||
| 178 | else: | ||
| 179 | readpathspec = "" | ||
| 180 | coprefix = os.path.join(codir, "git", "") | ||
| 181 | |||
| 182 | scmdata = ud.parm.get("scmdata", "") | ||
| 183 | if scmdata == "keep": | ||
| 184 | runfetchcmd("%s clone -n %s %s" % (ud.basecmd, ud.clonedir, coprefix), d) | ||
| 185 | os.chdir(coprefix) | ||
| 186 | runfetchcmd("%s checkout -q -f %s%s" % (ud.basecmd, ud.tag, readpathspec), d) | ||
| 187 | else: | ||
| 188 | bb.mkdirhier(codir) | ||
| 189 | os.chdir(ud.clonedir) | ||
| 190 | runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.tag, readpathspec), d) | ||
| 191 | runfetchcmd("%s checkout-index -q -f --prefix=%s -a" % (ud.basecmd, coprefix), d) | ||
| 192 | |||
| 193 | os.chdir(codir) | ||
| 194 | logger.info("Creating tarball of git checkout") | ||
| 195 | runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ), d) | ||
| 196 | |||
| 197 | os.chdir(ud.clonedir) | ||
| 198 | bb.utils.prunedir(codir) | ||
| 199 | |||
| 200 | def supports_srcrev(self): | ||
| 201 | return True | ||
| 202 | |||
| 203 | def _contains_ref(self, tag, d): | ||
| 204 | basecmd = data.getVar("FETCHCMD_git", d, True) or "git" | ||
| 205 | output = runfetchcmd("%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (basecmd, tag), d, quiet=True) | ||
| 206 | return output.split()[0] != "0" | ||
| 207 | |||
| 208 | def _revision_key(self, url, ud, d): | ||
| 209 | """ | ||
| 210 | Return a unique key for the url | ||
| 211 | """ | ||
| 212 | return "git:" + ud.host + ud.path.replace('/', '.') + ud.branch | ||
| 213 | |||
| 214 | def _latest_revision(self, url, ud, d): | ||
| 215 | """ | ||
| 216 | Compute the HEAD revision for the url | ||
| 217 | """ | ||
| 218 | if ud.user: | ||
| 219 | username = ud.user + '@' | ||
| 220 | else: | ||
| 221 | username = "" | ||
| 222 | |||
| 223 | basecmd = data.getVar("FETCHCMD_git", d, True) or "git" | ||
| 224 | cmd = "%s ls-remote %s://%s%s%s %s" % (basecmd, ud.proto, username, ud.host, ud.path, ud.branch) | ||
| 225 | output = runfetchcmd(cmd, d, True) | ||
| 226 | if not output: | ||
| 227 | raise bb.fetch2.FetchError("Fetch command %s gave empty output\n" % (cmd)) | ||
| 228 | return output.split()[0] | ||
| 229 | |||
| 230 | def _build_revision(self, url, ud, d): | ||
| 231 | return ud.tag | ||
| 232 | |||
| 233 | def _sortable_buildindex_disabled(self, url, ud, d, rev): | ||
| 234 | """ | ||
| 235 | Return a suitable buildindex for the revision specified. This is done by counting revisions | ||
| 236 | using "git rev-list" which may or may not work in different circumstances. | ||
| 237 | """ | ||
| 238 | |||
| 239 | cwd = os.getcwd() | ||
| 240 | |||
| 241 | # Check if we have the rev already | ||
| 242 | |||
| 243 | if not os.path.exists(ud.clonedir): | ||
| 244 | print("no repo") | ||
| 245 | self.go(None, ud, d) | ||
| 246 | if not os.path.exists(ud.clonedir): | ||
| 247 | logger.error("GIT repository for %s doesn't exist in %s, cannot get sortable buildnumber, using old value", url, ud.clonedir) | ||
| 248 | return None | ||
| 249 | |||
| 250 | |||
| 251 | os.chdir(ud.clonedir) | ||
| 252 | if not self._contains_ref(rev, d): | ||
| 253 | self.go(None, ud, d) | ||
| 254 | |||
| 255 | output = runfetchcmd("%s rev-list %s -- 2> /dev/null | wc -l" % (ud.basecmd, rev), d, quiet=True) | ||
| 256 | os.chdir(cwd) | ||
| 257 | |||
| 258 | buildindex = "%s" % output.split()[0] | ||
| 259 | logger.debug(1, "GIT repository for %s in %s is returning %s revisions in rev-list before %s", url, ud.clonedir, buildindex, rev) | ||
| 260 | return buildindex | ||
diff --git a/bitbake/lib/bb/fetch2/hg.py b/bitbake/lib/bb/fetch2/hg.py new file mode 100644 index 0000000000..0ba84330a5 --- /dev/null +++ b/bitbake/lib/bb/fetch2/hg.py | |||
| @@ -0,0 +1,178 @@ | |||
| 1 | # ex:ts=4:sw=4:sts=4:et | ||
| 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 3 | """ | ||
| 4 | BitBake 'Fetch' implementation for mercurial DRCS (hg). | ||
| 5 | |||
| 6 | """ | ||
| 7 | |||
| 8 | # Copyright (C) 2003, 2004 Chris Larson | ||
| 9 | # Copyright (C) 2004 Marcin Juszkiewicz | ||
| 10 | # Copyright (C) 2007 Robert Schuster | ||
| 11 | # | ||
| 12 | # This program is free software; you can redistribute it and/or modify | ||
| 13 | # it under the terms of the GNU General Public License version 2 as | ||
| 14 | # published by the Free Software Foundation. | ||
| 15 | # | ||
| 16 | # This program is distributed in the hope that it will be useful, | ||
| 17 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 18 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
| 19 | # GNU General Public License for more details. | ||
| 20 | # | ||
| 21 | # You should have received a copy of the GNU General Public License along | ||
| 22 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
| 23 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
| 24 | # | ||
| 25 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
| 26 | |||
| 27 | import os | ||
| 28 | import sys | ||
| 29 | import logging | ||
| 30 | import bb | ||
| 31 | from bb import data | ||
| 32 | from bb.fetch2 import Fetch | ||
| 33 | from bb.fetch2 import FetchError | ||
| 34 | from bb.fetch2 import MissingParameterError | ||
| 35 | from bb.fetch2 import runfetchcmd | ||
| 36 | from bb.fetch2 import logger | ||
| 37 | |||
| 38 | class Hg(Fetch): | ||
| 39 | """Class to fetch from mercurial repositories""" | ||
| 40 | def supports(self, url, ud, d): | ||
| 41 | """ | ||
| 42 | Check to see if a given url can be fetched with mercurial. | ||
| 43 | """ | ||
| 44 | return ud.type in ['hg'] | ||
| 45 | |||
| 46 | def urldata_init(self, ud, d): | ||
| 47 | """ | ||
| 48 | init hg specific variable within url data | ||
| 49 | """ | ||
| 50 | if not "module" in ud.parm: | ||
| 51 | raise MissingParameterError("hg method needs a 'module' parameter") | ||
| 52 | |||
| 53 | ud.module = ud.parm["module"] | ||
| 54 | |||
| 55 | # Create paths to mercurial checkouts | ||
| 56 | relpath = self._strip_leading_slashes(ud.path) | ||
| 57 | ud.pkgdir = os.path.join(data.expand('${HGDIR}', d), ud.host, relpath) | ||
| 58 | ud.moddir = os.path.join(ud.pkgdir, ud.module) | ||
| 59 | |||
| 60 | def forcefetch(self, url, ud, d): | ||
| 61 | revTag = ud.parm.get('rev', 'tip') | ||
| 62 | return revTag == "tip" | ||
| 63 | |||
| 64 | def localpath(self, url, ud, d): | ||
| 65 | if 'rev' in ud.parm: | ||
| 66 | ud.revision = ud.parm['rev'] | ||
| 67 | elif not ud.revision: | ||
| 68 | ud.revision = self.latest_revision(url, ud, d) | ||
| 69 | |||
| 70 | ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d) | ||
| 71 | |||
| 72 | return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) | ||
| 73 | |||
| 74 | def _buildhgcommand(self, ud, d, command): | ||
| 75 | """ | ||
| 76 | Build up an hg commandline based on ud | ||
| 77 | command is "fetch", "update", "info" | ||
| 78 | """ | ||
| 79 | |||
| 80 | basecmd = data.expand('${FETCHCMD_hg}', d) | ||
| 81 | |||
| 82 | proto = ud.parm.get('proto', 'http') | ||
| 83 | |||
| 84 | host = ud.host | ||
| 85 | if proto == "file": | ||
| 86 | host = "/" | ||
| 87 | ud.host = "localhost" | ||
| 88 | |||
| 89 | if not ud.user: | ||
| 90 | hgroot = host + ud.path | ||
| 91 | else: | ||
| 92 | hgroot = ud.user + "@" + host + ud.path | ||
| 93 | |||
| 94 | if command is "info": | ||
| 95 | return "%s identify -i %s://%s/%s" % (basecmd, proto, hgroot, ud.module) | ||
| 96 | |||
| 97 | options = []; | ||
| 98 | if ud.revision: | ||
| 99 | options.append("-r %s" % ud.revision) | ||
| 100 | |||
| 101 | if command is "fetch": | ||
| 102 | cmd = "%s clone %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, hgroot, ud.module, ud.module) | ||
| 103 | elif command is "pull": | ||
| 104 | # do not pass options list; limiting pull to rev causes the local | ||
| 105 | # repo not to contain it and immediately following "update" command | ||
| 106 | # will crash | ||
| 107 | cmd = "%s pull" % (basecmd) | ||
| 108 | elif command is "update": | ||
| 109 | cmd = "%s update -C %s" % (basecmd, " ".join(options)) | ||
| 110 | else: | ||
| 111 | raise FetchError("Invalid hg command %s" % command) | ||
| 112 | |||
| 113 | return cmd | ||
| 114 | |||
| 115 | def go(self, loc, ud, d): | ||
| 116 | """Fetch url""" | ||
| 117 | |||
| 118 | logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") | ||
| 119 | |||
| 120 | if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK): | ||
| 121 | updatecmd = self._buildhgcommand(ud, d, "pull") | ||
| 122 | logger.info("Update " + loc) | ||
| 123 | # update sources there | ||
| 124 | os.chdir(ud.moddir) | ||
| 125 | logger.debug(1, "Running %s", updatecmd) | ||
| 126 | runfetchcmd(updatecmd, d) | ||
| 127 | |||
| 128 | else: | ||
| 129 | fetchcmd = self._buildhgcommand(ud, d, "fetch") | ||
| 130 | logger.info("Fetch " + loc) | ||
| 131 | # check out sources there | ||
| 132 | bb.mkdirhier(ud.pkgdir) | ||
| 133 | os.chdir(ud.pkgdir) | ||
| 134 | logger.debug(1, "Running %s", fetchcmd) | ||
| 135 | runfetchcmd(fetchcmd, d) | ||
| 136 | |||
| 137 | # Even when we clone (fetch), we still need to update as hg's clone | ||
| 138 | # won't checkout the specified revision if its on a branch | ||
| 139 | updatecmd = self._buildhgcommand(ud, d, "update") | ||
| 140 | os.chdir(ud.moddir) | ||
| 141 | logger.debug(1, "Running %s", updatecmd) | ||
| 142 | runfetchcmd(updatecmd, d) | ||
| 143 | |||
| 144 | scmdata = ud.parm.get("scmdata", "") | ||
| 145 | if scmdata == "keep": | ||
| 146 | tar_flags = "" | ||
| 147 | else: | ||
| 148 | tar_flags = "--exclude '.hg' --exclude '.hgrags'" | ||
| 149 | |||
| 150 | os.chdir(ud.pkgdir) | ||
| 151 | try: | ||
| 152 | runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d) | ||
| 153 | except: | ||
| 154 | t, v, tb = sys.exc_info() | ||
| 155 | try: | ||
| 156 | os.unlink(ud.localpath) | ||
| 157 | except OSError: | ||
| 158 | pass | ||
| 159 | raise t, v, tb | ||
| 160 | |||
| 161 | def supports_srcrev(self): | ||
| 162 | return True | ||
| 163 | |||
| 164 | def _latest_revision(self, url, ud, d): | ||
| 165 | """ | ||
| 166 | Compute tip revision for the url | ||
| 167 | """ | ||
| 168 | output = runfetchcmd(self._buildhgcommand(ud, d, "info"), d) | ||
| 169 | return output.strip() | ||
| 170 | |||
| 171 | def _build_revision(self, url, ud, d): | ||
| 172 | return ud.revision | ||
| 173 | |||
| 174 | def _revision_key(self, url, ud, d): | ||
| 175 | """ | ||
| 176 | Return a unique key for the url | ||
| 177 | """ | ||
| 178 | return "hg:" + ud.moddir | ||
diff --git a/bitbake/lib/bb/fetch2/local.py b/bitbake/lib/bb/fetch2/local.py new file mode 100644 index 0000000000..bcb30dfc95 --- /dev/null +++ b/bitbake/lib/bb/fetch2/local.py | |||
| @@ -0,0 +1,73 @@ | |||
| 1 | # ex:ts=4:sw=4:sts=4:et | ||
| 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 3 | """ | ||
| 4 | BitBake 'Fetch' implementations | ||
| 5 | |||
| 6 | Classes for obtaining upstream sources for the | ||
| 7 | BitBake build tools. | ||
| 8 | |||
| 9 | """ | ||
| 10 | |||
| 11 | # Copyright (C) 2003, 2004 Chris Larson | ||
| 12 | # | ||
| 13 | # This program is free software; you can redistribute it and/or modify | ||
| 14 | # it under the terms of the GNU General Public License version 2 as | ||
| 15 | # published by the Free Software Foundation. | ||
| 16 | # | ||
| 17 | # This program is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
| 20 | # GNU General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU General Public License along | ||
| 23 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
| 24 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
| 25 | # | ||
| 26 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
| 27 | |||
| 28 | import os | ||
| 29 | import bb | ||
| 30 | import bb.utils | ||
| 31 | from bb import data | ||
| 32 | from bb.fetch2 import Fetch | ||
| 33 | |||
| 34 | class Local(Fetch): | ||
| 35 | def supports(self, url, urldata, d): | ||
| 36 | """ | ||
| 37 | Check to see if a given url represents a local fetch. | ||
| 38 | """ | ||
| 39 | return urldata.type in ['file'] | ||
| 40 | |||
| 41 | def localpath(self, url, urldata, d): | ||
| 42 | """ | ||
| 43 | Return the local filename of a given url assuming a successful fetch. | ||
| 44 | """ | ||
| 45 | path = url.split("://")[1] | ||
| 46 | path = path.split(";")[0] | ||
| 47 | newpath = path | ||
| 48 | if path[0] != "/": | ||
| 49 | filespath = data.getVar('FILESPATH', d, 1) | ||
| 50 | if filespath: | ||
| 51 | newpath = bb.utils.which(filespath, path) | ||
| 52 | if not newpath: | ||
| 53 | filesdir = data.getVar('FILESDIR', d, 1) | ||
| 54 | if filesdir: | ||
| 55 | newpath = os.path.join(filesdir, path) | ||
| 56 | # We don't set localfile as for this fetcher the file is already local! | ||
| 57 | return newpath | ||
| 58 | |||
| 59 | def go(self, url, urldata, d): | ||
| 60 | """Fetch urls (no-op for Local method)""" | ||
| 61 | # no need to fetch local files, we'll deal with them in place. | ||
| 62 | return 1 | ||
| 63 | |||
| 64 | def checkstatus(self, url, urldata, d): | ||
| 65 | """ | ||
| 66 | Check the status of the url | ||
| 67 | """ | ||
| 68 | if urldata.localpath.find("*") != -1: | ||
| 69 | logger.info("URL %s looks like a glob and was therefore not checked.", url) | ||
| 70 | return True | ||
| 71 | if os.path.exists(urldata.localpath): | ||
| 72 | return True | ||
| 73 | return False | ||
diff --git a/bitbake/lib/bb/fetch2/osc.py b/bitbake/lib/bb/fetch2/osc.py new file mode 100644 index 0000000000..06ac5a9ce3 --- /dev/null +++ b/bitbake/lib/bb/fetch2/osc.py | |||
| @@ -0,0 +1,143 @@ | |||
| 1 | # ex:ts=4:sw=4:sts=4:et | ||
| 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 3 | """ | ||
| 4 | Bitbake "Fetch" implementation for osc (Opensuse build service client). | ||
| 5 | Based on the svn "Fetch" implementation. | ||
| 6 | |||
| 7 | """ | ||
| 8 | |||
| 9 | import os | ||
| 10 | import sys | ||
| 11 | import logging | ||
| 12 | import bb | ||
| 13 | from bb import data | ||
| 14 | from bb.fetch2 import Fetch | ||
| 15 | from bb.fetch2 import FetchError | ||
| 16 | from bb.fetch2 import MissingParameterError | ||
| 17 | from bb.fetch2 import runfetchcmd | ||
| 18 | |||
| 19 | class Osc(Fetch): | ||
| 20 | """Class to fetch a module or modules from Opensuse build server | ||
| 21 | repositories.""" | ||
| 22 | |||
| 23 | def supports(self, url, ud, d): | ||
| 24 | """ | ||
| 25 | Check to see if a given url can be fetched with osc. | ||
| 26 | """ | ||
| 27 | return ud.type in ['osc'] | ||
| 28 | |||
| 29 | def localpath(self, url, ud, d): | ||
| 30 | if not "module" in ud.parm: | ||
| 31 | raise MissingParameterError("osc method needs a 'module' parameter.") | ||
| 32 | |||
| 33 | ud.module = ud.parm["module"] | ||
| 34 | |||
| 35 | # Create paths to osc checkouts | ||
| 36 | relpath = self._strip_leading_slashes(ud.path) | ||
| 37 | ud.pkgdir = os.path.join(data.expand('${OSCDIR}', d), ud.host) | ||
| 38 | ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module) | ||
| 39 | |||
| 40 | if 'rev' in ud.parm: | ||
| 41 | ud.revision = ud.parm['rev'] | ||
| 42 | else: | ||
| 43 | pv = data.getVar("PV", d, 0) | ||
| 44 | rev = Fetch.srcrev_internal_helper(ud, d) | ||
| 45 | if rev and rev != True: | ||
| 46 | ud.revision = rev | ||
| 47 | else: | ||
| 48 | ud.revision = "" | ||
| 49 | |||
| 50 | ud.localfile = data.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision), d) | ||
| 51 | |||
| 52 | return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) | ||
| 53 | |||
| 54 | def _buildosccommand(self, ud, d, command): | ||
| 55 | """ | ||
| 56 | Build up an ocs commandline based on ud | ||
| 57 | command is "fetch", "update", "info" | ||
| 58 | """ | ||
| 59 | |||
| 60 | basecmd = data.expand('${FETCHCMD_osc}', d) | ||
| 61 | |||
| 62 | proto = ud.parm.get('proto', 'ocs') | ||
| 63 | |||
| 64 | options = [] | ||
| 65 | |||
| 66 | config = "-c %s" % self.generate_config(ud, d) | ||
| 67 | |||
| 68 | if ud.revision: | ||
| 69 | options.append("-r %s" % ud.revision) | ||
| 70 | |||
| 71 | coroot = self._strip_leading_slashes(ud.path) | ||
| 72 | |||
| 73 | if command is "fetch": | ||
| 74 | osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options)) | ||
| 75 | elif command is "update": | ||
| 76 | osccmd = "%s %s up %s" % (basecmd, config, " ".join(options)) | ||
| 77 | else: | ||
| 78 | raise FetchError("Invalid osc command %s" % command) | ||
| 79 | |||
| 80 | return osccmd | ||
| 81 | |||
| 82 | def go(self, loc, ud, d): | ||
| 83 | """ | ||
| 84 | Fetch url | ||
| 85 | """ | ||
| 86 | |||
| 87 | logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") | ||
| 88 | |||
| 89 | if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK): | ||
| 90 | oscupdatecmd = self._buildosccommand(ud, d, "update") | ||
| 91 | logger.info("Update "+ loc) | ||
| 92 | # update sources there | ||
| 93 | os.chdir(ud.moddir) | ||
| 94 | logger.debug(1, "Running %s", oscupdatecmd) | ||
| 95 | runfetchcmd(oscupdatecmd, d) | ||
| 96 | else: | ||
| 97 | oscfetchcmd = self._buildosccommand(ud, d, "fetch") | ||
| 98 | logger.info("Fetch " + loc) | ||
| 99 | # check out sources there | ||
| 100 | bb.mkdirhier(ud.pkgdir) | ||
| 101 | os.chdir(ud.pkgdir) | ||
| 102 | logger.debug(1, "Running %s", oscfetchcmd) | ||
| 103 | runfetchcmd(oscfetchcmd, d) | ||
| 104 | |||
| 105 | os.chdir(os.path.join(ud.pkgdir + ud.path)) | ||
| 106 | # tar them up to a defined filename | ||
| 107 | try: | ||
| 108 | runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d) | ||
| 109 | except: | ||
| 110 | t, v, tb = sys.exc_info() | ||
| 111 | try: | ||
| 112 | os.unlink(ud.localpath) | ||
| 113 | except OSError: | ||
| 114 | pass | ||
| 115 | raise t, v, tb | ||
| 116 | |||
| 117 | def supports_srcrev(self): | ||
| 118 | return False | ||
| 119 | |||
| 120 | def generate_config(self, ud, d): | ||
| 121 | """ | ||
| 122 | Generate a .oscrc to be used for this run. | ||
| 123 | """ | ||
| 124 | |||
| 125 | config_path = os.path.join(data.expand('${OSCDIR}', d), "oscrc") | ||
| 126 | if (os.path.exists(config_path)): | ||
| 127 | os.remove(config_path) | ||
| 128 | |||
| 129 | f = open(config_path, 'w') | ||
| 130 | f.write("[general]\n") | ||
| 131 | f.write("apisrv = %s\n" % ud.host) | ||
| 132 | f.write("scheme = http\n") | ||
| 133 | f.write("su-wrapper = su -c\n") | ||
| 134 | f.write("build-root = %s\n" % data.expand('${WORKDIR}', d)) | ||
| 135 | f.write("urllist = http://moblin-obs.jf.intel.com:8888/build/%(project)s/%(repository)s/%(buildarch)s/:full/%(name)s.rpm\n") | ||
| 136 | f.write("extra-pkgs = gzip\n") | ||
| 137 | f.write("\n") | ||
| 138 | f.write("[%s]\n" % ud.host) | ||
| 139 | f.write("user = %s\n" % ud.parm["user"]) | ||
| 140 | f.write("pass = %s\n" % ud.parm["pswd"]) | ||
| 141 | f.close() | ||
| 142 | |||
| 143 | return config_path | ||
diff --git a/bitbake/lib/bb/fetch2/perforce.py b/bitbake/lib/bb/fetch2/perforce.py new file mode 100644 index 0000000000..18b27812e0 --- /dev/null +++ b/bitbake/lib/bb/fetch2/perforce.py | |||
| @@ -0,0 +1,206 @@ | |||
| 1 | # ex:ts=4:sw=4:sts=4:et | ||
| 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 3 | """ | ||
| 4 | BitBake 'Fetch' implementations | ||
| 5 | |||
| 6 | Classes for obtaining upstream sources for the | ||
| 7 | BitBake build tools. | ||
| 8 | |||
| 9 | """ | ||
| 10 | |||
| 11 | # Copyright (C) 2003, 2004 Chris Larson | ||
| 12 | # | ||
| 13 | # This program is free software; you can redistribute it and/or modify | ||
| 14 | # it under the terms of the GNU General Public License version 2 as | ||
| 15 | # published by the Free Software Foundation. | ||
| 16 | # | ||
| 17 | # This program is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
| 20 | # GNU General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU General Public License along | ||
| 23 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
| 24 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
| 25 | # | ||
| 26 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
| 27 | |||
| 28 | from future_builtins import zip | ||
| 29 | import os | ||
| 30 | import logging | ||
| 31 | import bb | ||
| 32 | from bb import data | ||
| 33 | from bb.fetch2 import Fetch | ||
| 34 | from bb.fetch2 import FetchError | ||
| 35 | from bb.fetch2 import logger | ||
| 36 | |||
| 37 | class Perforce(Fetch): | ||
| 38 | def supports(self, url, ud, d): | ||
| 39 | return ud.type in ['p4'] | ||
| 40 | |||
| 41 | def doparse(url, d): | ||
| 42 | parm = {} | ||
| 43 | path = url.split("://")[1] | ||
| 44 | delim = path.find("@"); | ||
| 45 | if delim != -1: | ||
| 46 | (user, pswd, host, port) = path.split('@')[0].split(":") | ||
| 47 | path = path.split('@')[1] | ||
| 48 | else: | ||
| 49 | (host, port) = data.getVar('P4PORT', d).split(':') | ||
| 50 | user = "" | ||
| 51 | pswd = "" | ||
| 52 | |||
| 53 | if path.find(";") != -1: | ||
| 54 | keys=[] | ||
| 55 | values=[] | ||
| 56 | plist = path.split(';') | ||
| 57 | for item in plist: | ||
| 58 | if item.count('='): | ||
| 59 | (key, value) = item.split('=') | ||
| 60 | keys.append(key) | ||
| 61 | values.append(value) | ||
| 62 | |||
| 63 | parm = dict(zip(keys, values)) | ||
| 64 | path = "//" + path.split(';')[0] | ||
| 65 | host += ":%s" % (port) | ||
| 66 | parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm) | ||
| 67 | |||
| 68 | return host, path, user, pswd, parm | ||
| 69 | doparse = staticmethod(doparse) | ||
| 70 | |||
| 71 | def getcset(d, depot, host, user, pswd, parm): | ||
| 72 | p4opt = "" | ||
| 73 | if "cset" in parm: | ||
| 74 | return parm["cset"]; | ||
| 75 | if user: | ||
| 76 | p4opt += " -u %s" % (user) | ||
| 77 | if pswd: | ||
| 78 | p4opt += " -P %s" % (pswd) | ||
| 79 | if host: | ||
| 80 | p4opt += " -p %s" % (host) | ||
| 81 | |||
| 82 | p4date = data.getVar("P4DATE", d, 1) | ||
| 83 | if "revision" in parm: | ||
| 84 | depot += "#%s" % (parm["revision"]) | ||
| 85 | elif "label" in parm: | ||
| 86 | depot += "@%s" % (parm["label"]) | ||
| 87 | elif p4date: | ||
| 88 | depot += "@%s" % (p4date) | ||
| 89 | |||
| 90 | p4cmd = data.getVar('FETCHCOMMAND_p4', d, 1) | ||
| 91 | logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot) | ||
| 92 | p4file = os.popen("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot)) | ||
| 93 | cset = p4file.readline().strip() | ||
| 94 | logger.debug(1, "READ %s", cset) | ||
| 95 | if not cset: | ||
| 96 | return -1 | ||
| 97 | |||
| 98 | return cset.split(' ')[1] | ||
| 99 | getcset = staticmethod(getcset) | ||
| 100 | |||
| 101 | def localpath(self, url, ud, d): | ||
| 102 | |||
| 103 | (host, path, user, pswd, parm) = Perforce.doparse(url, d) | ||
| 104 | |||
| 105 | # If a label is specified, we use that as our filename | ||
| 106 | |||
| 107 | if "label" in parm: | ||
| 108 | ud.localfile = "%s.tar.gz" % (parm["label"]) | ||
| 109 | return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile) | ||
| 110 | |||
| 111 | base = path | ||
| 112 | which = path.find('/...') | ||
| 113 | if which != -1: | ||
| 114 | base = path[:which] | ||
| 115 | |||
| 116 | base = self._strip_leading_slashes(base) | ||
| 117 | |||
| 118 | cset = Perforce.getcset(d, path, host, user, pswd, parm) | ||
| 119 | |||
| 120 | ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base.replace('/', '.'), cset), d) | ||
| 121 | |||
| 122 | return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile) | ||
| 123 | |||
| 124 | def go(self, loc, ud, d): | ||
| 125 | """ | ||
| 126 | Fetch urls | ||
| 127 | """ | ||
| 128 | |||
| 129 | (host, depot, user, pswd, parm) = Perforce.doparse(loc, d) | ||
| 130 | |||
| 131 | if depot.find('/...') != -1: | ||
| 132 | path = depot[:depot.find('/...')] | ||
| 133 | else: | ||
| 134 | path = depot | ||
| 135 | |||
| 136 | module = parm.get('module', os.path.basename(path)) | ||
| 137 | |||
| 138 | localdata = data.createCopy(d) | ||
| 139 | data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata) | ||
| 140 | data.update_data(localdata) | ||
| 141 | |||
| 142 | # Get the p4 command | ||
| 143 | p4opt = "" | ||
| 144 | if user: | ||
| 145 | p4opt += " -u %s" % (user) | ||
| 146 | |||
| 147 | if pswd: | ||
| 148 | p4opt += " -P %s" % (pswd) | ||
| 149 | |||
| 150 | if host: | ||
| 151 | p4opt += " -p %s" % (host) | ||
| 152 | |||
| 153 | p4cmd = data.getVar('FETCHCOMMAND', localdata, 1) | ||
| 154 | |||
| 155 | # create temp directory | ||
| 156 | logger.debug(2, "Fetch: creating temporary directory") | ||
| 157 | bb.mkdirhier(data.expand('${WORKDIR}', localdata)) | ||
| 158 | data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata) | ||
| 159 | tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") | ||
| 160 | tmpfile = tmppipe.readline().strip() | ||
| 161 | if not tmpfile: | ||
| 162 | logger.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") | ||
| 163 | raise FetchError(module) | ||
| 164 | |||
| 165 | if "label" in parm: | ||
| 166 | depot = "%s@%s" % (depot, parm["label"]) | ||
| 167 | else: | ||
| 168 | cset = Perforce.getcset(d, depot, host, user, pswd, parm) | ||
| 169 | depot = "%s@%s" % (depot, cset) | ||
| 170 | |||
| 171 | os.chdir(tmpfile) | ||
| 172 | logger.info("Fetch " + loc) | ||
| 173 | logger.info("%s%s files %s", p4cmd, p4opt, depot) | ||
| 174 | p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot)) | ||
| 175 | |||
| 176 | if not p4file: | ||
| 177 | logger.error("Fetch: unable to get the P4 files from %s", depot) | ||
| 178 | raise FetchError(module) | ||
| 179 | |||
| 180 | count = 0 | ||
| 181 | |||
| 182 | for file in p4file: | ||
| 183 | list = file.split() | ||
| 184 | |||
| 185 | if list[2] == "delete": | ||
| 186 | continue | ||
| 187 | |||
| 188 | dest = list[0][len(path)+1:] | ||
| 189 | where = dest.find("#") | ||
| 190 | |||
| 191 | os.system("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0])) | ||
| 192 | count = count + 1 | ||
| 193 | |||
| 194 | if count == 0: | ||
| 195 | logger.error("Fetch: No files gathered from the P4 fetch") | ||
| 196 | raise FetchError(module) | ||
| 197 | |||
| 198 | myret = os.system("tar -czf %s %s" % (ud.localpath, module)) | ||
| 199 | if myret != 0: | ||
| 200 | try: | ||
| 201 | os.unlink(ud.localpath) | ||
| 202 | except OSError: | ||
| 203 | pass | ||
| 204 | raise FetchError(module) | ||
| 205 | # cleanup | ||
| 206 | bb.utils.prunedir(tmpfile) | ||
diff --git a/bitbake/lib/bb/fetch2/repo.py b/bitbake/lib/bb/fetch2/repo.py new file mode 100644 index 0000000000..3330957ce3 --- /dev/null +++ b/bitbake/lib/bb/fetch2/repo.py | |||
| @@ -0,0 +1,98 @@ | |||
| 1 | # ex:ts=4:sw=4:sts=4:et | ||
| 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 3 | """ | ||
| 4 | BitBake "Fetch" repo (git) implementation | ||
| 5 | |||
| 6 | """ | ||
| 7 | |||
| 8 | # Copyright (C) 2009 Tom Rini <trini@embeddedalley.com> | ||
| 9 | # | ||
| 10 | # Based on git.py which is: | ||
| 11 | #Copyright (C) 2005 Richard Purdie | ||
| 12 | # | ||
| 13 | # This program is free software; you can redistribute it and/or modify | ||
| 14 | # it under the terms of the GNU General Public License version 2 as | ||
| 15 | # published by the Free Software Foundation. | ||
| 16 | # | ||
| 17 | # This program is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
| 20 | # GNU General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU General Public License along | ||
| 23 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
| 24 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
| 25 | |||
| 26 | import os | ||
| 27 | import bb | ||
| 28 | from bb import data | ||
| 29 | from bb.fetch2 import Fetch | ||
| 30 | from bb.fetch2 import runfetchcmd | ||
| 31 | |||
| 32 | class Repo(Fetch): | ||
| 33 | """Class to fetch a module or modules from repo (git) repositories""" | ||
| 34 | def supports(self, url, ud, d): | ||
| 35 | """ | ||
| 36 | Check to see if a given url can be fetched with repo. | ||
| 37 | """ | ||
| 38 | return ud.type in ["repo"] | ||
| 39 | |||
| 40 | def localpath(self, url, ud, d): | ||
| 41 | """ | ||
| 42 | We don"t care about the git rev of the manifests repository, but | ||
| 43 | we do care about the manifest to use. The default is "default". | ||
| 44 | We also care about the branch or tag to be used. The default is | ||
| 45 | "master". | ||
| 46 | """ | ||
| 47 | |||
| 48 | ud.proto = ud.parm.get('protocol', 'git') | ||
| 49 | ud.branch = ud.parm.get('branch', 'master') | ||
| 50 | ud.manifest = ud.parm.get('manifest', 'default.xml') | ||
| 51 | if not ud.manifest.endswith('.xml'): | ||
| 52 | ud.manifest += '.xml' | ||
| 53 | |||
| 54 | ud.localfile = data.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch), d) | ||
| 55 | |||
| 56 | return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) | ||
| 57 | |||
| 58 | def go(self, loc, ud, d): | ||
| 59 | """Fetch url""" | ||
| 60 | |||
| 61 | if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK): | ||
| 62 | logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath) | ||
| 63 | return | ||
| 64 | |||
| 65 | gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", ".")) | ||
| 66 | repodir = data.getVar("REPODIR", d, True) or os.path.join(data.getVar("DL_DIR", d, True), "repo") | ||
| 67 | codir = os.path.join(repodir, gitsrcname, ud.manifest) | ||
| 68 | |||
| 69 | if ud.user: | ||
| 70 | username = ud.user + "@" | ||
| 71 | else: | ||
| 72 | username = "" | ||
| 73 | |||
| 74 | bb.mkdirhier(os.path.join(codir, "repo")) | ||
| 75 | os.chdir(os.path.join(codir, "repo")) | ||
| 76 | if not os.path.exists(os.path.join(codir, "repo", ".repo")): | ||
| 77 | runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d) | ||
| 78 | |||
| 79 | runfetchcmd("repo sync", d) | ||
| 80 | os.chdir(codir) | ||
| 81 | |||
| 82 | scmdata = ud.parm.get("scmdata", "") | ||
| 83 | if scmdata == "keep": | ||
| 84 | tar_flags = "" | ||
| 85 | else: | ||
| 86 | tar_flags = "--exclude '.repo' --exclude '.git'" | ||
| 87 | |||
| 88 | # Create a cache | ||
| 89 | runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d) | ||
| 90 | |||
| 91 | def supports_srcrev(self): | ||
| 92 | return False | ||
| 93 | |||
| 94 | def _build_revision(self, url, ud, d): | ||
| 95 | return ud.manifest | ||
| 96 | |||
| 97 | def _want_sortable_revision(self, url, ud, d): | ||
| 98 | return False | ||
diff --git a/bitbake/lib/bb/fetch2/ssh.py b/bitbake/lib/bb/fetch2/ssh.py new file mode 100644 index 0000000000..8b283222bf --- /dev/null +++ b/bitbake/lib/bb/fetch2/ssh.py | |||
| @@ -0,0 +1,118 @@ | |||
| 1 | # ex:ts=4:sw=4:sts=4:et | ||
| 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 3 | ''' | ||
| 4 | BitBake 'Fetch' implementations | ||
| 5 | |||
| 6 | This implementation is for Secure Shell (SSH), and attempts to comply with the | ||
| 7 | IETF secsh internet draft: | ||
| 8 | http://tools.ietf.org/wg/secsh/draft-ietf-secsh-scp-sftp-ssh-uri/ | ||
| 9 | |||
| 10 | Currently does not support the sftp parameters, as this uses scp | ||
| 11 | Also does not support the 'fingerprint' connection parameter. | ||
| 12 | |||
| 13 | ''' | ||
| 14 | |||
| 15 | # Copyright (C) 2006 OpenedHand Ltd. | ||
| 16 | # | ||
| 17 | # | ||
| 18 | # Based in part on svk.py: | ||
| 19 | # Copyright (C) 2006 Holger Hans Peter Freyther | ||
| 20 | # Based on svn.py: | ||
| 21 | # Copyright (C) 2003, 2004 Chris Larson | ||
| 22 | # Based on functions from the base bb module: | ||
| 23 | # Copyright 2003 Holger Schurig | ||
| 24 | # | ||
| 25 | # | ||
| 26 | # This program is free software; you can redistribute it and/or modify | ||
| 27 | # it under the terms of the GNU General Public License version 2 as | ||
| 28 | # published by the Free Software Foundation. | ||
| 29 | # | ||
| 30 | # This program is distributed in the hope that it will be useful, | ||
| 31 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 32 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
| 33 | # GNU General Public License for more details. | ||
| 34 | # | ||
| 35 | # You should have received a copy of the GNU General Public License along | ||
| 36 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
| 37 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
| 38 | |||
| 39 | import re, os | ||
| 40 | from bb import data | ||
| 41 | from bb.fetch2 import Fetch | ||
| 42 | from bb.fetch2 import FetchError | ||
| 43 | |||
| 44 | |||
| 45 | __pattern__ = re.compile(r''' | ||
| 46 | \s* # Skip leading whitespace | ||
| 47 | ssh:// # scheme | ||
| 48 | ( # Optional username/password block | ||
| 49 | (?P<user>\S+) # username | ||
| 50 | (:(?P<pass>\S+))? # colon followed by the password (optional) | ||
| 51 | )? | ||
| 52 | (?P<cparam>(;[^;]+)*)? # connection parameters block (optional) | ||
| 53 | @ | ||
| 54 | (?P<host>\S+?) # non-greedy match of the host | ||
| 55 | (:(?P<port>[0-9]+))? # colon followed by the port (optional) | ||
| 56 | / | ||
| 57 | (?P<path>[^;]+) # path on the remote system, may be absolute or relative, | ||
| 58 | # and may include the use of '~' to reference the remote home | ||
| 59 | # directory | ||
| 60 | (?P<sparam>(;[^;]+)*)? # parameters block (optional) | ||
| 61 | $ | ||
| 62 | ''', re.VERBOSE) | ||
| 63 | |||
| 64 | class SSH(Fetch): | ||
| 65 | '''Class to fetch a module or modules via Secure Shell''' | ||
| 66 | |||
| 67 | def supports(self, url, urldata, d): | ||
| 68 | return __pattern__.match(url) != None | ||
| 69 | |||
| 70 | def localpath(self, url, urldata, d): | ||
| 71 | m = __pattern__.match(url) | ||
| 72 | path = m.group('path') | ||
| 73 | host = m.group('host') | ||
| 74 | lpath = os.path.join(data.getVar('DL_DIR', d, True), host, os.path.basename(path)) | ||
| 75 | return lpath | ||
| 76 | |||
| 77 | def go(self, url, urldata, d): | ||
| 78 | dldir = data.getVar('DL_DIR', d, 1) | ||
| 79 | |||
| 80 | m = __pattern__.match(url) | ||
| 81 | path = m.group('path') | ||
| 82 | host = m.group('host') | ||
| 83 | port = m.group('port') | ||
| 84 | user = m.group('user') | ||
| 85 | password = m.group('pass') | ||
| 86 | |||
| 87 | ldir = os.path.join(dldir, host) | ||
| 88 | lpath = os.path.join(ldir, os.path.basename(path)) | ||
| 89 | |||
| 90 | if not os.path.exists(ldir): | ||
| 91 | os.makedirs(ldir) | ||
| 92 | |||
| 93 | if port: | ||
| 94 | port = '-P %s' % port | ||
| 95 | else: | ||
| 96 | port = '' | ||
| 97 | |||
| 98 | if user: | ||
| 99 | fr = user | ||
| 100 | if password: | ||
| 101 | fr += ':%s' % password | ||
| 102 | fr += '@%s' % host | ||
| 103 | else: | ||
| 104 | fr = host | ||
| 105 | fr += ':%s' % path | ||
| 106 | |||
| 107 | |||
| 108 | import commands | ||
| 109 | cmd = 'scp -B -r %s %s %s/' % ( | ||
| 110 | port, | ||
| 111 | commands.mkarg(fr), | ||
| 112 | commands.mkarg(ldir) | ||
| 113 | ) | ||
| 114 | |||
| 115 | (exitstatus, output) = commands.getstatusoutput(cmd) | ||
| 116 | if exitstatus != 0: | ||
| 117 | print(output) | ||
| 118 | raise FetchError('Unable to fetch %s' % url) | ||
diff --git a/bitbake/lib/bb/fetch2/svk.py b/bitbake/lib/bb/fetch2/svk.py new file mode 100644 index 0000000000..7990ff21fa --- /dev/null +++ b/bitbake/lib/bb/fetch2/svk.py | |||
| @@ -0,0 +1,104 @@ | |||
| 1 | # ex:ts=4:sw=4:sts=4:et | ||
| 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 3 | """ | ||
| 4 | BitBake 'Fetch' implementations | ||
| 5 | |||
| 6 | This implementation is for svk. It is based on the svn implementation | ||
| 7 | |||
| 8 | """ | ||
| 9 | |||
| 10 | # Copyright (C) 2006 Holger Hans Peter Freyther | ||
| 11 | # Copyright (C) 2003, 2004 Chris Larson | ||
| 12 | # | ||
| 13 | # This program is free software; you can redistribute it and/or modify | ||
| 14 | # it under the terms of the GNU General Public License version 2 as | ||
| 15 | # published by the Free Software Foundation. | ||
| 16 | # | ||
| 17 | # This program is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
| 20 | # GNU General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU General Public License along | ||
| 23 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
| 24 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
| 25 | # | ||
| 26 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
| 27 | |||
| 28 | import os | ||
| 29 | import logging | ||
| 30 | import bb | ||
| 31 | from bb import data | ||
| 32 | from bb.fetch2 import Fetch | ||
| 33 | from bb.fetch2 import FetchError | ||
| 34 | from bb.fetch2 import MissingParameterError | ||
| 35 | from bb.fetch2 import logger | ||
| 36 | |||
| 37 | class Svk(Fetch): | ||
| 38 | """Class to fetch a module or modules from svk repositories""" | ||
| 39 | def supports(self, url, ud, d): | ||
| 40 | """ | ||
| 41 | Check to see if a given url can be fetched with svk. | ||
| 42 | """ | ||
| 43 | return ud.type in ['svk'] | ||
| 44 | |||
| 45 | def localpath(self, url, ud, d): | ||
| 46 | if not "module" in ud.parm: | ||
| 47 | raise MissingParameterError("svk method needs a 'module' parameter") | ||
| 48 | else: | ||
| 49 | ud.module = ud.parm["module"] | ||
| 50 | |||
| 51 | ud.revision = ud.parm.get('rev', "") | ||
| 52 | |||
| 53 | ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d) | ||
| 54 | |||
| 55 | return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) | ||
| 56 | |||
| 57 | def forcefetch(self, url, ud, d): | ||
| 58 | return ud.date == "now" | ||
| 59 | |||
| 60 | def go(self, loc, ud, d): | ||
| 61 | """Fetch urls""" | ||
| 62 | |||
| 63 | svkroot = ud.host + ud.path | ||
| 64 | |||
| 65 | svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module) | ||
| 66 | |||
| 67 | if ud.revision: | ||
| 68 | svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module) | ||
| 69 | |||
| 70 | # create temp directory | ||
| 71 | localdata = data.createCopy(d) | ||
| 72 | data.update_data(localdata) | ||
| 73 | logger.debug(2, "Fetch: creating temporary directory") | ||
| 74 | bb.mkdirhier(data.expand('${WORKDIR}', localdata)) | ||
| 75 | data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata) | ||
| 76 | tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") | ||
| 77 | tmpfile = tmppipe.readline().strip() | ||
| 78 | if not tmpfile: | ||
| 79 | logger.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") | ||
| 80 | raise FetchError(ud.module) | ||
| 81 | |||
| 82 | # check out sources there | ||
| 83 | os.chdir(tmpfile) | ||
| 84 | logger.info("Fetch " + loc) | ||
| 85 | logger.debug(1, "Running %s", svkcmd) | ||
| 86 | myret = os.system(svkcmd) | ||
| 87 | if myret != 0: | ||
| 88 | try: | ||
| 89 | os.rmdir(tmpfile) | ||
| 90 | except OSError: | ||
| 91 | pass | ||
| 92 | raise FetchError(ud.module) | ||
| 93 | |||
| 94 | os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module))) | ||
| 95 | # tar them up to a defined filename | ||
| 96 | myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module))) | ||
| 97 | if myret != 0: | ||
| 98 | try: | ||
| 99 | os.unlink(ud.localpath) | ||
| 100 | except OSError: | ||
| 101 | pass | ||
| 102 | raise FetchError(ud.module) | ||
| 103 | # cleanup | ||
| 104 | bb.utils.prunedir(tmpfile) | ||
diff --git a/bitbake/lib/bb/fetch2/svn.py b/bitbake/lib/bb/fetch2/svn.py new file mode 100644 index 0000000000..1116795e87 --- /dev/null +++ b/bitbake/lib/bb/fetch2/svn.py | |||
| @@ -0,0 +1,202 @@ | |||
| 1 | # ex:ts=4:sw=4:sts=4:et | ||
| 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 3 | """ | ||
| 4 | BitBake 'Fetch' implementation for svn. | ||
| 5 | |||
| 6 | """ | ||
| 7 | |||
| 8 | # Copyright (C) 2003, 2004 Chris Larson | ||
| 9 | # Copyright (C) 2004 Marcin Juszkiewicz | ||
| 10 | # | ||
| 11 | # This program is free software; you can redistribute it and/or modify | ||
| 12 | # it under the terms of the GNU General Public License version 2 as | ||
| 13 | # published by the Free Software Foundation. | ||
| 14 | # | ||
| 15 | # This program is distributed in the hope that it will be useful, | ||
| 16 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 17 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
| 18 | # GNU General Public License for more details. | ||
| 19 | # | ||
| 20 | # You should have received a copy of the GNU General Public License along | ||
| 21 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
| 22 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
| 23 | # | ||
| 24 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
| 25 | |||
| 26 | import os | ||
| 27 | import sys | ||
| 28 | import logging | ||
| 29 | import bb | ||
| 30 | from bb import data | ||
| 31 | from bb.fetch2 import Fetch | ||
| 32 | from bb.fetch2 import FetchError | ||
| 33 | from bb.fetch2 import MissingParameterError | ||
| 34 | from bb.fetch2 import runfetchcmd | ||
| 35 | from bb.fetch2 import logger | ||
| 36 | |||
| 37 | class Svn(Fetch): | ||
| 38 | """Class to fetch a module or modules from svn repositories""" | ||
| 39 | def supports(self, url, ud, d): | ||
| 40 | """ | ||
| 41 | Check to see if a given url can be fetched with svn. | ||
| 42 | """ | ||
| 43 | return ud.type in ['svn'] | ||
| 44 | |||
| 45 | def urldata_init(self, ud, d): | ||
| 46 | """ | ||
| 47 | init svn specific variable within url data | ||
| 48 | """ | ||
| 49 | if not "module" in ud.parm: | ||
| 50 | raise MissingParameterError("svn method needs a 'module' parameter") | ||
| 51 | |||
| 52 | ud.module = ud.parm["module"] | ||
| 53 | |||
| 54 | # Create paths to svn checkouts | ||
| 55 | relpath = self._strip_leading_slashes(ud.path) | ||
| 56 | ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath) | ||
| 57 | ud.moddir = os.path.join(ud.pkgdir, ud.module) | ||
| 58 | |||
| 59 | def localpath(self, url, ud, d): | ||
| 60 | if 'rev' in ud.parm: | ||
| 61 | ud.date = "" | ||
| 62 | ud.revision = ud.parm['rev'] | ||
| 63 | elif 'date' in ud.date: | ||
| 64 | ud.date = ud.parm['date'] | ||
| 65 | ud.revision = "" | ||
| 66 | else: | ||
| 67 | # | ||
| 68 | # ***Nasty hack*** | ||
| 69 | # If DATE in unexpanded PV, use ud.date (which is set from SRCDATE) | ||
| 70 | # Should warn people to switch to SRCREV here | ||
| 71 | # | ||
| 72 | pv = data.getVar("PV", d, 0) | ||
| 73 | if "DATE" in pv: | ||
| 74 | ud.revision = "" | ||
| 75 | else: | ||
| 76 | # use the initizlied revision | ||
| 77 | if ud.revision: | ||
| 78 | ud.date = "" | ||
| 79 | |||
| 80 | ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d) | ||
| 81 | |||
| 82 | return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) | ||
| 83 | |||
| 84 | def _buildsvncommand(self, ud, d, command): | ||
| 85 | """ | ||
| 86 | Build up an svn commandline based on ud | ||
| 87 | command is "fetch", "update", "info" | ||
| 88 | """ | ||
| 89 | |||
| 90 | basecmd = data.expand('${FETCHCMD_svn}', d) | ||
| 91 | |||
| 92 | proto = ud.parm.get('proto', 'svn') | ||
| 93 | |||
| 94 | svn_rsh = None | ||
| 95 | if proto == "svn+ssh" and "rsh" in ud.parm: | ||
| 96 | svn_rsh = ud.parm["rsh"] | ||
| 97 | |||
| 98 | svnroot = ud.host + ud.path | ||
| 99 | |||
| 100 | # either use the revision, or SRCDATE in braces, | ||
| 101 | options = [] | ||
| 102 | |||
| 103 | if ud.user: | ||
| 104 | options.append("--username %s" % ud.user) | ||
| 105 | |||
| 106 | if ud.pswd: | ||
| 107 | options.append("--password %s" % ud.pswd) | ||
| 108 | |||
| 109 | if command is "info": | ||
| 110 | svncmd = "%s info %s %s://%s/%s/" % (basecmd, " ".join(options), proto, svnroot, ud.module) | ||
| 111 | else: | ||
| 112 | suffix = "" | ||
| 113 | if ud.revision: | ||
| 114 | options.append("-r %s" % ud.revision) | ||
| 115 | suffix = "@%s" % (ud.revision) | ||
| 116 | elif ud.date: | ||
| 117 | options.append("-r {%s}" % ud.date) | ||
| 118 | |||
| 119 | if command is "fetch": | ||
| 120 | svncmd = "%s co %s %s://%s/%s%s %s" % (basecmd, " ".join(options), proto, svnroot, ud.module, suffix, ud.module) | ||
| 121 | elif command is "update": | ||
| 122 | svncmd = "%s update %s" % (basecmd, " ".join(options)) | ||
| 123 | else: | ||
| 124 | raise FetchError("Invalid svn command %s" % command) | ||
| 125 | |||
| 126 | if svn_rsh: | ||
| 127 | svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd) | ||
| 128 | |||
| 129 | return svncmd | ||
| 130 | |||
| 131 | def go(self, loc, ud, d): | ||
| 132 | """Fetch url""" | ||
| 133 | |||
| 134 | logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") | ||
| 135 | |||
| 136 | if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK): | ||
| 137 | svnupdatecmd = self._buildsvncommand(ud, d, "update") | ||
| 138 | logger.info("Update " + loc) | ||
| 139 | # update sources there | ||
| 140 | os.chdir(ud.moddir) | ||
| 141 | logger.debug(1, "Running %s", svnupdatecmd) | ||
| 142 | runfetchcmd(svnupdatecmd, d) | ||
| 143 | else: | ||
| 144 | svnfetchcmd = self._buildsvncommand(ud, d, "fetch") | ||
| 145 | logger.info("Fetch " + loc) | ||
| 146 | # check out sources there | ||
| 147 | bb.mkdirhier(ud.pkgdir) | ||
| 148 | os.chdir(ud.pkgdir) | ||
| 149 | logger.debug(1, "Running %s", svnfetchcmd) | ||
| 150 | runfetchcmd(svnfetchcmd, d) | ||
| 151 | |||
| 152 | scmdata = ud.parm.get("scmdata", "") | ||
| 153 | if scmdata == "keep": | ||
| 154 | tar_flags = "" | ||
| 155 | else: | ||
| 156 | tar_flags = "--exclude '.svn'" | ||
| 157 | |||
| 158 | os.chdir(ud.pkgdir) | ||
| 159 | # tar them up to a defined filename | ||
| 160 | try: | ||
| 161 | runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d) | ||
| 162 | except: | ||
| 163 | t, v, tb = sys.exc_info() | ||
| 164 | try: | ||
| 165 | os.unlink(ud.localpath) | ||
| 166 | except OSError: | ||
| 167 | pass | ||
| 168 | raise t, v, tb | ||
| 169 | |||
| 170 | def supports_srcrev(self): | ||
| 171 | return True | ||
| 172 | |||
| 173 | def _revision_key(self, url, ud, d): | ||
| 174 | """ | ||
| 175 | Return a unique key for the url | ||
| 176 | """ | ||
| 177 | return "svn:" + ud.moddir | ||
| 178 | |||
| 179 | def _latest_revision(self, url, ud, d): | ||
| 180 | """ | ||
| 181 | Return the latest upstream revision number | ||
| 182 | """ | ||
| 183 | logger.debug(2, "SVN fetcher hitting network for %s", url) | ||
| 184 | |||
| 185 | output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "info"), d, True) | ||
| 186 | |||
| 187 | revision = None | ||
| 188 | for line in output.splitlines(): | ||
| 189 | if "Last Changed Rev" in line: | ||
| 190 | revision = line.split(":")[1].strip() | ||
| 191 | |||
| 192 | return revision | ||
| 193 | |||
| 194 | def _sortable_revision(self, url, ud, d): | ||
| 195 | """ | ||
| 196 | Return a sortable revision number which in our case is the revision number | ||
| 197 | """ | ||
| 198 | |||
| 199 | return self._build_revision(url, ud, d) | ||
| 200 | |||
| 201 | def _build_revision(self, url, ud, d): | ||
| 202 | return ud.revision | ||
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py new file mode 100644 index 0000000000..cf36ccad0a --- /dev/null +++ b/bitbake/lib/bb/fetch2/wget.py | |||
| @@ -0,0 +1,93 @@ | |||
| 1 | # ex:ts=4:sw=4:sts=4:et | ||
| 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 3 | """ | ||
| 4 | BitBake 'Fetch' implementations | ||
| 5 | |||
| 6 | Classes for obtaining upstream sources for the | ||
| 7 | BitBake build tools. | ||
| 8 | |||
| 9 | """ | ||
| 10 | |||
| 11 | # Copyright (C) 2003, 2004 Chris Larson | ||
| 12 | # | ||
| 13 | # This program is free software; you can redistribute it and/or modify | ||
| 14 | # it under the terms of the GNU General Public License version 2 as | ||
| 15 | # published by the Free Software Foundation. | ||
| 16 | # | ||
| 17 | # This program is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
| 20 | # GNU General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU General Public License along | ||
| 23 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
| 24 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
| 25 | # | ||
| 26 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
| 27 | |||
| 28 | import os | ||
| 29 | import logging | ||
| 30 | import bb | ||
| 31 | import urllib | ||
| 32 | from bb import data | ||
| 33 | from bb.fetch2 import Fetch, FetchError, encodeurl, decodeurl, logger, runfetchcmd | ||
| 34 | |||
| 35 | class Wget(Fetch): | ||
| 36 | """Class to fetch urls via 'wget'""" | ||
| 37 | def supports(self, url, ud, d): | ||
| 38 | """ | ||
| 39 | Check to see if a given url can be fetched with wget. | ||
| 40 | """ | ||
| 41 | return ud.type in ['http', 'https', 'ftp'] | ||
| 42 | |||
| 43 | def localpath(self, url, ud, d): | ||
| 44 | |||
| 45 | url = encodeurl([ud.type, ud.host, ud.path, ud.user, ud.pswd, {}]) | ||
| 46 | ud.basename = os.path.basename(ud.path) | ||
| 47 | ud.localfile = data.expand(urllib.unquote(ud.basename), d) | ||
| 48 | |||
| 49 | return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) | ||
| 50 | |||
| 51 | def go(self, uri, ud, d, checkonly = False): | ||
| 52 | """Fetch urls""" | ||
| 53 | |||
| 54 | def fetch_uri(uri, ud, d): | ||
| 55 | if checkonly: | ||
| 56 | fetchcmd = data.getVar("CHECKCOMMAND", d, 1) | ||
| 57 | elif os.path.exists(ud.localpath): | ||
| 58 | # file exists, but we didnt complete it.. trying again.. | ||
| 59 | fetchcmd = data.getVar("RESUMECOMMAND", d, 1) | ||
| 60 | else: | ||
| 61 | fetchcmd = data.getVar("FETCHCOMMAND", d, 1) | ||
| 62 | |||
| 63 | uri = uri.split(";")[0] | ||
| 64 | uri_decoded = list(decodeurl(uri)) | ||
| 65 | uri_type = uri_decoded[0] | ||
| 66 | uri_host = uri_decoded[1] | ||
| 67 | |||
| 68 | fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0]) | ||
| 69 | fetchcmd = fetchcmd.replace("${FILE}", ud.basename) | ||
| 70 | logger.info("fetch " + uri) | ||
| 71 | logger.debug(2, "executing " + fetchcmd) | ||
| 72 | runfetchcmd(fetchcmd, d) | ||
| 73 | |||
| 74 | # Sanity check since wget can pretend it succeed when it didn't | ||
| 75 | # Also, this used to happen if sourceforge sent us to the mirror page | ||
| 76 | if not os.path.exists(ud.localpath) and not checkonly: | ||
| 77 | logger.debug(2, "The fetch command for %s returned success but %s doesn't exist?...", uri, ud.localpath) | ||
| 78 | return False | ||
| 79 | |||
| 80 | return True | ||
| 81 | |||
| 82 | localdata = data.createCopy(d) | ||
| 83 | data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata) | ||
| 84 | data.update_data(localdata) | ||
| 85 | |||
| 86 | if fetch_uri(uri, ud, localdata): | ||
| 87 | return True | ||
| 88 | |||
| 89 | raise FetchError(uri) | ||
| 90 | |||
| 91 | |||
| 92 | def checkstatus(self, uri, ud, d): | ||
| 93 | return self.go(uri, ud, d, True) | ||
diff --git a/bitbake/lib/bb/msg.py b/bitbake/lib/bb/msg.py index 13ee569e9a..00dc139c88 100644 --- a/bitbake/lib/bb/msg.py +++ b/bitbake/lib/bb/msg.py | |||
| @@ -33,17 +33,22 @@ import bb.event | |||
| 33 | class BBLogFormatter(logging.Formatter): | 33 | class BBLogFormatter(logging.Formatter): |
| 34 | """Formatter which ensures that our 'plain' messages (logging.INFO + 1) are used as is""" | 34 | """Formatter which ensures that our 'plain' messages (logging.INFO + 1) are used as is""" |
| 35 | 35 | ||
| 36 | DEBUG = logging.DEBUG | ||
| 36 | VERBOSE = 19 | 37 | VERBOSE = 19 |
| 38 | NOTE = logging.INFO | ||
| 37 | PLAIN = 21 | 39 | PLAIN = 21 |
| 40 | ERROR = logging.ERROR | ||
| 41 | WARNING = logging.WARNING | ||
| 42 | CRITICAL = logging.CRITICAL | ||
| 43 | |||
| 38 | levelnames = { | 44 | levelnames = { |
| 45 | DEBUG : 'DEBUG', | ||
| 39 | PLAIN : '', | 46 | PLAIN : '', |
| 47 | NOTE : 'NOTE', | ||
| 40 | VERBOSE: 'NOTE', | 48 | VERBOSE: 'NOTE', |
| 41 | 49 | WARNING : 'WARNING', | |
| 42 | logging.DEBUG : 'DEBUG', | 50 | ERROR : 'ERROR', |
| 43 | logging.INFO : 'NOTE', | 51 | CRITICAL: 'ERROR', |
| 44 | logging.WARNING : 'WARNING', | ||
| 45 | logging.ERROR : 'ERROR', | ||
| 46 | logging.CRITICAL: 'ERROR', | ||
| 47 | } | 52 | } |
| 48 | 53 | ||
| 49 | def getLevelName(self, levelno): | 54 | def getLevelName(self, levelno): |
diff --git a/bitbake/lib/bb/parse/ast.py b/bitbake/lib/bb/parse/ast.py index e34f1fe894..8fffe1e8f0 100644 --- a/bitbake/lib/bb/parse/ast.py +++ b/bitbake/lib/bb/parse/ast.py | |||
| @@ -40,13 +40,14 @@ class StatementGroup(list): | |||
| 40 | statement.eval(data) | 40 | statement.eval(data) |
| 41 | 41 | ||
| 42 | class AstNode(object): | 42 | class AstNode(object): |
| 43 | pass | 43 | def __init__(self, filename, lineno): |
| 44 | self.filename = filename | ||
| 45 | self.lineno = lineno | ||
| 44 | 46 | ||
| 45 | class IncludeNode(AstNode): | 47 | class IncludeNode(AstNode): |
| 46 | def __init__(self, what_file, fn, lineno, force): | 48 | def __init__(self, filename, lineno, what_file, force): |
| 49 | AstNode.__init__(self, filename, lineno) | ||
| 47 | self.what_file = what_file | 50 | self.what_file = what_file |
| 48 | self.from_fn = fn | ||
| 49 | self.from_lineno = lineno | ||
| 50 | self.force = force | 51 | self.force = force |
| 51 | 52 | ||
| 52 | def eval(self, data): | 53 | def eval(self, data): |
| @@ -54,16 +55,17 @@ class IncludeNode(AstNode): | |||
| 54 | Include the file and evaluate the statements | 55 | Include the file and evaluate the statements |
| 55 | """ | 56 | """ |
| 56 | s = bb.data.expand(self.what_file, data) | 57 | s = bb.data.expand(self.what_file, data) |
| 57 | logger.debug(2, "CONF %s:%s: including %s", self.from_fn, self.from_lineno, s) | 58 | logger.debug(2, "CONF %s:%s: including %s", self.filename, self.lineno, s) |
| 58 | 59 | ||
| 59 | # TODO: Cache those includes... maybe not here though | 60 | # TODO: Cache those includes... maybe not here though |
| 60 | if self.force: | 61 | if self.force: |
| 61 | bb.parse.ConfHandler.include(self.from_fn, s, data, "include required") | 62 | bb.parse.ConfHandler.include(self.filename, s, data, "include required") |
| 62 | else: | 63 | else: |
| 63 | bb.parse.ConfHandler.include(self.from_fn, s, data, False) | 64 | bb.parse.ConfHandler.include(self.filename, s, data, False) |
| 64 | 65 | ||
| 65 | class ExportNode(AstNode): | 66 | class ExportNode(AstNode): |
| 66 | def __init__(self, var): | 67 | def __init__(self, filename, lineno, var): |
| 68 | AstNode.__init__(self, filename, lineno) | ||
| 67 | self.var = var | 69 | self.var = var |
| 68 | 70 | ||
| 69 | def eval(self, data): | 71 | def eval(self, data): |
| @@ -76,7 +78,8 @@ class DataNode(AstNode): | |||
| 76 | this need to be re-evaluated... we might be able to do | 78 | this need to be re-evaluated... we might be able to do |
| 77 | that faster with multiple classes. | 79 | that faster with multiple classes. |
| 78 | """ | 80 | """ |
| 79 | def __init__(self, groupd): | 81 | def __init__(self, filename, lineno, groupd): |
| 82 | AstNode.__init__(self, filename, lineno) | ||
| 80 | self.groupd = groupd | 83 | self.groupd = groupd |
| 81 | 84 | ||
| 82 | def getFunc(self, key, data): | 85 | def getFunc(self, key, data): |
| @@ -119,19 +122,18 @@ class DataNode(AstNode): | |||
| 119 | else: | 122 | else: |
| 120 | bb.data.setVar(key, val, data) | 123 | bb.data.setVar(key, val, data) |
| 121 | 124 | ||
| 122 | class MethodNode: | 125 | class MethodNode(AstNode): |
| 123 | def __init__(self, func_name, body, lineno, fn): | 126 | def __init__(self, filename, lineno, func_name, body): |
| 127 | AstNode.__init__(self, filename, lineno) | ||
| 124 | self.func_name = func_name | 128 | self.func_name = func_name |
| 125 | self.body = body | 129 | self.body = body |
| 126 | self.fn = fn | ||
| 127 | self.lineno = lineno | ||
| 128 | 130 | ||
| 129 | def eval(self, data): | 131 | def eval(self, data): |
| 130 | if self.func_name == "__anonymous": | 132 | if self.func_name == "__anonymous": |
| 131 | funcname = ("__anon_%s_%s" % (self.lineno, self.fn.translate(string.maketrans('/.+-', '____')))) | 133 | funcname = ("__anon_%s_%s" % (self.lineno, self.filename.translate(string.maketrans('/.+-', '____')))) |
| 132 | if not funcname in bb.methodpool._parsed_fns: | 134 | if not funcname in bb.methodpool._parsed_fns: |
| 133 | text = "def %s(d):\n" % (funcname) + '\n'.join(self.body) | 135 | text = "def %s(d):\n" % (funcname) + '\n'.join(self.body) |
| 134 | bb.methodpool.insert_method(funcname, text, self.fn) | 136 | bb.methodpool.insert_method(funcname, text, self.filename) |
| 135 | anonfuncs = bb.data.getVar('__BBANONFUNCS', data) or [] | 137 | anonfuncs = bb.data.getVar('__BBANONFUNCS', data) or [] |
| 136 | anonfuncs.append(funcname) | 138 | anonfuncs.append(funcname) |
| 137 | bb.data.setVar('__BBANONFUNCS', anonfuncs, data) | 139 | bb.data.setVar('__BBANONFUNCS', anonfuncs, data) |
| @@ -140,25 +142,26 @@ class MethodNode: | |||
| 140 | bb.data.setVar(self.func_name, '\n'.join(self.body), data) | 142 | bb.data.setVar(self.func_name, '\n'.join(self.body), data) |
| 141 | 143 | ||
| 142 | class PythonMethodNode(AstNode): | 144 | class PythonMethodNode(AstNode): |
| 143 | def __init__(self, funcname, root, body, fn): | 145 | def __init__(self, filename, lineno, function, define, body): |
| 144 | self.func_name = funcname | 146 | AstNode.__init__(self, filename, lineno) |
| 145 | self.root = root | 147 | self.function = function |
| 148 | self.define = define | ||
| 146 | self.body = body | 149 | self.body = body |
| 147 | self.fn = fn | ||
| 148 | 150 | ||
| 149 | def eval(self, data): | 151 | def eval(self, data): |
| 150 | # Note we will add root to parsedmethods after having parse | 152 | # Note we will add root to parsedmethods after having parse |
| 151 | # 'this' file. This means we will not parse methods from | 153 | # 'this' file. This means we will not parse methods from |
| 152 | # bb classes twice | 154 | # bb classes twice |
| 153 | text = '\n'.join(self.body) | 155 | text = '\n'.join(self.body) |
| 154 | if not bb.methodpool.parsed_module(self.root): | 156 | if not bb.methodpool.parsed_module(self.define): |
| 155 | bb.methodpool.insert_method(self.root, text, self.fn) | 157 | bb.methodpool.insert_method(self.define, text, self.filename) |
| 156 | bb.data.setVarFlag(self.func_name, "func", 1, data) | 158 | bb.data.setVarFlag(self.function, "func", 1, data) |
| 157 | bb.data.setVarFlag(self.func_name, "python", 1, data) | 159 | bb.data.setVarFlag(self.function, "python", 1, data) |
| 158 | bb.data.setVar(self.func_name, text, data) | 160 | bb.data.setVar(self.function, text, data) |
| 159 | 161 | ||
| 160 | class MethodFlagsNode(AstNode): | 162 | class MethodFlagsNode(AstNode): |
| 161 | def __init__(self, key, m): | 163 | def __init__(self, filename, lineno, key, m): |
| 164 | AstNode.__init__(self, filename, lineno) | ||
| 162 | self.key = key | 165 | self.key = key |
| 163 | self.m = m | 166 | self.m = m |
| 164 | 167 | ||
| @@ -178,7 +181,8 @@ class MethodFlagsNode(AstNode): | |||
| 178 | bb.data.delVarFlag(self.key, "fakeroot", data) | 181 | bb.data.delVarFlag(self.key, "fakeroot", data) |
| 179 | 182 | ||
| 180 | class ExportFuncsNode(AstNode): | 183 | class ExportFuncsNode(AstNode): |
| 181 | def __init__(self, fns, classes): | 184 | def __init__(self, filename, lineno, fns, classes): |
| 185 | AstNode.__init__(self, filename, lineno) | ||
| 182 | self.n = fns.split() | 186 | self.n = fns.split() |
| 183 | self.classes = classes | 187 | self.classes = classes |
| 184 | 188 | ||
| @@ -217,7 +221,8 @@ class ExportFuncsNode(AstNode): | |||
| 217 | bb.data.setVarFlag(var, 'export_func', '1', data) | 221 | bb.data.setVarFlag(var, 'export_func', '1', data) |
| 218 | 222 | ||
| 219 | class AddTaskNode(AstNode): | 223 | class AddTaskNode(AstNode): |
| 220 | def __init__(self, func, before, after): | 224 | def __init__(self, filename, lineno, func, before, after): |
| 225 | AstNode.__init__(self, filename, lineno) | ||
| 221 | self.func = func | 226 | self.func = func |
| 222 | self.before = before | 227 | self.before = before |
| 223 | self.after = after | 228 | self.after = after |
| @@ -248,7 +253,8 @@ class AddTaskNode(AstNode): | |||
| 248 | bb.data.setVarFlag(entry, "deps", [var] + existing, data) | 253 | bb.data.setVarFlag(entry, "deps", [var] + existing, data) |
| 249 | 254 | ||
| 250 | class BBHandlerNode(AstNode): | 255 | class BBHandlerNode(AstNode): |
| 251 | def __init__(self, fns): | 256 | def __init__(self, filename, lineno, fns): |
| 257 | AstNode.__init__(self, filename, lineno) | ||
| 252 | self.hs = fns.split() | 258 | self.hs = fns.split() |
| 253 | 259 | ||
| 254 | def eval(self, data): | 260 | def eval(self, data): |
| @@ -259,48 +265,49 @@ class BBHandlerNode(AstNode): | |||
| 259 | bb.data.setVar('__BBHANDLERS', bbhands, data) | 265 | bb.data.setVar('__BBHANDLERS', bbhands, data) |
| 260 | 266 | ||
| 261 | class InheritNode(AstNode): | 267 | class InheritNode(AstNode): |
| 262 | def __init__(self, classes): | 268 | def __init__(self, filename, lineno, classes): |
| 269 | AstNode.__init__(self, filename, lineno) | ||
| 263 | self.classes = classes | 270 | self.classes = classes |
| 264 | 271 | ||
| 265 | def eval(self, data): | 272 | def eval(self, data): |
| 266 | bb.parse.BBHandler.inherit(self.classes, data) | 273 | bb.parse.BBHandler.inherit(self.classes, data) |
| 267 | 274 | ||
| 268 | def handleInclude(statements, m, fn, lineno, force): | 275 | def handleInclude(statements, filename, lineno, m, force): |
| 269 | statements.append(IncludeNode(m.group(1), fn, lineno, force)) | 276 | statements.append(IncludeNode(filename, lineno, m.group(1), force)) |
| 270 | 277 | ||
| 271 | def handleExport(statements, m): | 278 | def handleExport(statements, filename, lineno, m): |
| 272 | statements.append(ExportNode(m.group(1))) | 279 | statements.append(ExportNode(filename, lineno, m.group(1))) |
| 273 | 280 | ||
| 274 | def handleData(statements, groupd): | 281 | def handleData(statements, filename, lineno, groupd): |
| 275 | statements.append(DataNode(groupd)) | 282 | statements.append(DataNode(filename, lineno, groupd)) |
| 276 | 283 | ||
| 277 | def handleMethod(statements, func_name, lineno, fn, body): | 284 | def handleMethod(statements, filename, lineno, func_name, body): |
| 278 | statements.append(MethodNode(func_name, body, lineno, fn)) | 285 | statements.append(MethodNode(filename, lineno, func_name, body)) |
| 279 | 286 | ||
| 280 | def handlePythonMethod(statements, funcname, root, body, fn): | 287 | def handlePythonMethod(statements, filename, lineno, funcname, root, body): |
| 281 | statements.append(PythonMethodNode(funcname, root, body, fn)) | 288 | statements.append(PythonMethodNode(filename, lineno, funcname, root, body)) |
| 282 | 289 | ||
| 283 | def handleMethodFlags(statements, key, m): | 290 | def handleMethodFlags(statements, filename, lineno, key, m): |
| 284 | statements.append(MethodFlagsNode(key, m)) | 291 | statements.append(MethodFlagsNode(filename, lineno, key, m)) |
| 285 | 292 | ||
| 286 | def handleExportFuncs(statements, m, classes): | 293 | def handleExportFuncs(statements, filename, lineno, m, classes): |
| 287 | statements.append(ExportFuncsNode(m.group(1), classes)) | 294 | statements.append(ExportFuncsNode(filename, lineno, m.group(1), classes)) |
| 288 | 295 | ||
| 289 | def handleAddTask(statements, m): | 296 | def handleAddTask(statements, filename, lineno, m): |
| 290 | func = m.group("func") | 297 | func = m.group("func") |
| 291 | before = m.group("before") | 298 | before = m.group("before") |
| 292 | after = m.group("after") | 299 | after = m.group("after") |
| 293 | if func is None: | 300 | if func is None: |
| 294 | return | 301 | return |
| 295 | 302 | ||
| 296 | statements.append(AddTaskNode(func, before, after)) | 303 | statements.append(AddTaskNode(filename, lineno, func, before, after)) |
| 297 | 304 | ||
| 298 | def handleBBHandlers(statements, m): | 305 | def handleBBHandlers(statements, filename, lineno, m): |
| 299 | statements.append(BBHandlerNode(m.group(1))) | 306 | statements.append(BBHandlerNode(filename, lineno, m.group(1))) |
| 300 | 307 | ||
| 301 | def handleInherit(statements, m): | 308 | def handleInherit(statements, filename, lineno, m): |
| 302 | classes = m.group(1) | 309 | classes = m.group(1) |
| 303 | statements.append(InheritNode(classes.split())) | 310 | statements.append(InheritNode(filename, lineno, classes.split())) |
| 304 | 311 | ||
| 305 | def finalize(fn, d, variant = None): | 312 | def finalize(fn, d, variant = None): |
| 306 | for lazykey in bb.data.getVar("__lazy_assigned", d) or (): | 313 | for lazykey in bb.data.getVar("__lazy_assigned", d) or (): |
diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.py b/bitbake/lib/bb/parse/parse_py/BBHandler.py index 81554b9435..31d1e21c67 100644 --- a/bitbake/lib/bb/parse/parse_py/BBHandler.py +++ b/bitbake/lib/bb/parse/parse_py/BBHandler.py | |||
| @@ -28,7 +28,7 @@ | |||
| 28 | from __future__ import absolute_import | 28 | from __future__ import absolute_import |
| 29 | import re, bb, os | 29 | import re, bb, os |
| 30 | import logging | 30 | import logging |
| 31 | import bb.fetch, bb.build, bb.utils | 31 | import bb.build, bb.utils |
| 32 | from bb import data | 32 | from bb import data |
| 33 | 33 | ||
| 34 | from . import ConfHandler | 34 | from . import ConfHandler |
| @@ -172,7 +172,7 @@ def feeder(lineno, s, fn, root, statements): | |||
| 172 | if __infunc__: | 172 | if __infunc__: |
| 173 | if s == '}': | 173 | if s == '}': |
| 174 | __body__.append('') | 174 | __body__.append('') |
| 175 | ast.handleMethod(statements, __infunc__, lineno, fn, __body__) | 175 | ast.handleMethod(statements, fn, lineno, __infunc__, __body__) |
| 176 | __infunc__ = "" | 176 | __infunc__ = "" |
| 177 | __body__ = [] | 177 | __body__ = [] |
| 178 | else: | 178 | else: |
| @@ -185,7 +185,8 @@ def feeder(lineno, s, fn, root, statements): | |||
| 185 | __body__.append(s) | 185 | __body__.append(s) |
| 186 | return | 186 | return |
| 187 | else: | 187 | else: |
| 188 | ast.handlePythonMethod(statements, __inpython__, root, __body__, fn) | 188 | ast.handlePythonMethod(statements, fn, lineno, __inpython__, |
| 189 | root, __body__) | ||
| 189 | __body__ = [] | 190 | __body__ = [] |
| 190 | __inpython__ = False | 191 | __inpython__ = False |
| 191 | 192 | ||
| @@ -206,7 +207,7 @@ def feeder(lineno, s, fn, root, statements): | |||
| 206 | m = __func_start_regexp__.match(s) | 207 | m = __func_start_regexp__.match(s) |
| 207 | if m: | 208 | if m: |
| 208 | __infunc__ = m.group("func") or "__anonymous" | 209 | __infunc__ = m.group("func") or "__anonymous" |
| 209 | ast.handleMethodFlags(statements, __infunc__, m) | 210 | ast.handleMethodFlags(statements, fn, lineno, __infunc__, m) |
| 210 | return | 211 | return |
| 211 | 212 | ||
| 212 | m = __def_regexp__.match(s) | 213 | m = __def_regexp__.match(s) |
| @@ -218,22 +219,22 @@ def feeder(lineno, s, fn, root, statements): | |||
| 218 | 219 | ||
| 219 | m = __export_func_regexp__.match(s) | 220 | m = __export_func_regexp__.match(s) |
| 220 | if m: | 221 | if m: |
| 221 | ast.handleExportFuncs(statements, m, classes) | 222 | ast.handleExportFuncs(statements, fn, lineno, m, classes) |
| 222 | return | 223 | return |
| 223 | 224 | ||
| 224 | m = __addtask_regexp__.match(s) | 225 | m = __addtask_regexp__.match(s) |
| 225 | if m: | 226 | if m: |
| 226 | ast.handleAddTask(statements, m) | 227 | ast.handleAddTask(statements, fn, lineno, m) |
| 227 | return | 228 | return |
| 228 | 229 | ||
| 229 | m = __addhandler_regexp__.match(s) | 230 | m = __addhandler_regexp__.match(s) |
| 230 | if m: | 231 | if m: |
| 231 | ast.handleBBHandlers(statements, m) | 232 | ast.handleBBHandlers(statements, fn, lineno, m) |
| 232 | return | 233 | return |
| 233 | 234 | ||
| 234 | m = __inherit_regexp__.match(s) | 235 | m = __inherit_regexp__.match(s) |
| 235 | if m: | 236 | if m: |
| 236 | ast.handleInherit(statements, m) | 237 | ast.handleInherit(statements, fn, lineno, m) |
| 237 | return | 238 | return |
| 238 | 239 | ||
| 239 | return ConfHandler.feeder(lineno, s, fn, statements) | 240 | return ConfHandler.feeder(lineno, s, fn, statements) |
diff --git a/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/bitbake/lib/bb/parse/parse_py/ConfHandler.py index d90f5d868e..fc239a3540 100644 --- a/bitbake/lib/bb/parse/parse_py/ConfHandler.py +++ b/bitbake/lib/bb/parse/parse_py/ConfHandler.py | |||
| @@ -113,22 +113,22 @@ def feeder(lineno, s, fn, statements): | |||
| 113 | m = __config_regexp__.match(s) | 113 | m = __config_regexp__.match(s) |
| 114 | if m: | 114 | if m: |
| 115 | groupd = m.groupdict() | 115 | groupd = m.groupdict() |
| 116 | ast.handleData(statements, groupd) | 116 | ast.handleData(statements, fn, lineno, groupd) |
| 117 | return | 117 | return |
| 118 | 118 | ||
| 119 | m = __include_regexp__.match(s) | 119 | m = __include_regexp__.match(s) |
| 120 | if m: | 120 | if m: |
| 121 | ast.handleInclude(statements, m, fn, lineno, False) | 121 | ast.handleInclude(statements, fn, lineno, m, False) |
| 122 | return | 122 | return |
| 123 | 123 | ||
| 124 | m = __require_regexp__.match(s) | 124 | m = __require_regexp__.match(s) |
| 125 | if m: | 125 | if m: |
| 126 | ast.handleInclude(statements, m, fn, lineno, True) | 126 | ast.handleInclude(statements, fn, lineno, m, True) |
| 127 | return | 127 | return |
| 128 | 128 | ||
| 129 | m = __export_regexp__.match(s) | 129 | m = __export_regexp__.match(s) |
| 130 | if m: | 130 | if m: |
| 131 | ast.handleExport(statements, m) | 131 | ast.handleExport(statements, fn, lineno, m) |
| 132 | return | 132 | return |
| 133 | 133 | ||
| 134 | raise ParseError("%s:%d: unparsed line: '%s'" % (fn, lineno, s)); | 134 | raise ParseError("%s:%d: unparsed line: '%s'" % (fn, lineno, s)); |
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py index 187720fc46..2b37619ae3 100644 --- a/bitbake/lib/bb/runqueue.py +++ b/bitbake/lib/bb/runqueue.py | |||
| @@ -22,13 +22,12 @@ Handles preparation and execution of a queue of tasks | |||
| 22 | # with this program; if not, write to the Free Software Foundation, Inc., | 22 | # with this program; if not, write to the Free Software Foundation, Inc., |
| 23 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | 23 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. |
| 24 | 24 | ||
| 25 | import copy | ||
| 25 | import os | 26 | import os |
| 26 | import sys | 27 | import sys |
| 27 | import subprocess | ||
| 28 | import signal | 28 | import signal |
| 29 | import stat | 29 | import stat |
| 30 | import fcntl | 30 | import fcntl |
| 31 | import copy | ||
| 32 | import logging | 31 | import logging |
| 33 | import bb | 32 | import bb |
| 34 | from bb import msg, data, event | 33 | from bb import msg, data, event |
| @@ -36,12 +35,6 @@ from bb import msg, data, event | |||
| 36 | bblogger = logging.getLogger("BitBake") | 35 | bblogger = logging.getLogger("BitBake") |
| 37 | logger = logging.getLogger("BitBake.RunQueue") | 36 | logger = logging.getLogger("BitBake.RunQueue") |
| 38 | 37 | ||
| 39 | try: | ||
| 40 | import cPickle as pickle | ||
| 41 | except ImportError: | ||
| 42 | import pickle | ||
| 43 | logger.info("Importing cPickle failed. Falling back to a very slow implementation.") | ||
| 44 | |||
| 45 | class RunQueueStats: | 38 | class RunQueueStats: |
| 46 | """ | 39 | """ |
| 47 | Holds statistics on the tasks handled by the associated runQueue | 40 | Holds statistics on the tasks handled by the associated runQueue |
| @@ -93,28 +86,28 @@ class RunQueueScheduler(object): | |||
| 93 | """ | 86 | """ |
| 94 | self.rq = runqueue | 87 | self.rq = runqueue |
| 95 | self.rqdata = rqdata | 88 | self.rqdata = rqdata |
| 96 | numTasks = len(self.rq.runq_fnid) | 89 | numTasks = len(self.rqdata.runq_fnid) |
| 97 | 90 | ||
| 98 | self.prio_map = [] | 91 | self.prio_map = [] |
| 99 | self.prio_map.extend(range(numTasks)) | 92 | self.prio_map.extend(range(numTasks)) |
| 100 | 93 | ||
| 101 | def next_buildable_tasks(self): | 94 | def next_buildable_task(self): |
| 102 | """ | 95 | """ |
| 103 | Return the id of the first task we find that is buildable | 96 | Return the id of the first task we find that is buildable |
| 104 | """ | 97 | """ |
| 105 | for tasknum in range(len(self.rqdata.runq_fnid)): | 98 | for tasknum in xrange(len(self.rqdata.runq_fnid)): |
| 106 | taskid = self.prio_map[tasknum] | 99 | taskid = self.prio_map[tasknum] |
| 107 | if self.rq.runq_running[taskid] == 1: | 100 | if self.rq.runq_running[taskid] == 1: |
| 108 | continue | 101 | continue |
| 109 | if self.rq.runq_buildable[taskid] == 1: | 102 | if self.rq.runq_buildable[taskid] == 1: |
| 110 | yield taskid | 103 | return taskid |
| 111 | 104 | ||
| 112 | def next(self): | 105 | def next(self): |
| 113 | """ | 106 | """ |
| 114 | Return the id of the task we should build next | 107 | Return the id of the task we should build next |
| 115 | """ | 108 | """ |
| 116 | if self.rq.stats.active < self.rq.number_tasks: | 109 | if self.rq.stats.active < self.rq.number_tasks: |
| 117 | return next(self.next_buildable_tasks(), None) | 110 | return self.next_buildable_task() |
| 118 | 111 | ||
| 119 | class RunQueueSchedulerSpeed(RunQueueScheduler): | 112 | class RunQueueSchedulerSpeed(RunQueueScheduler): |
| 120 | """ | 113 | """ |
| @@ -127,13 +120,12 @@ class RunQueueSchedulerSpeed(RunQueueScheduler): | |||
| 127 | """ | 120 | """ |
| 128 | The priority map is sorted by task weight. | 121 | The priority map is sorted by task weight. |
| 129 | """ | 122 | """ |
| 130 | from copy import deepcopy | ||
| 131 | 123 | ||
| 132 | self.rq = runqueue | 124 | self.rq = runqueue |
| 133 | self.rqdata = rqdata | 125 | self.rqdata = rqdata |
| 134 | 126 | ||
| 135 | sortweight = sorted(deepcopy(self.rqdata.runq_weight)) | 127 | sortweight = sorted(copy.deepcopy(self.rqdata.runq_weight)) |
| 136 | copyweight = deepcopy(self.rqdata.runq_weight) | 128 | copyweight = copy.deepcopy(self.rqdata.runq_weight) |
| 137 | self.prio_map = [] | 129 | self.prio_map = [] |
| 138 | 130 | ||
| 139 | for weight in sortweight: | 131 | for weight in sortweight: |
| @@ -155,12 +147,11 @@ class RunQueueSchedulerCompletion(RunQueueSchedulerSpeed): | |||
| 155 | 147 | ||
| 156 | def __init__(self, runqueue, rqdata): | 148 | def __init__(self, runqueue, rqdata): |
| 157 | RunQueueSchedulerSpeed.__init__(self, runqueue, rqdata) | 149 | RunQueueSchedulerSpeed.__init__(self, runqueue, rqdata) |
| 158 | from copy import deepcopy | ||
| 159 | 150 | ||
| 160 | #FIXME - whilst this groups all fnids together it does not reorder the | 151 | #FIXME - whilst this groups all fnids together it does not reorder the |
| 161 | #fnid groups optimally. | 152 | #fnid groups optimally. |
| 162 | 153 | ||
| 163 | basemap = deepcopy(self.prio_map) | 154 | basemap = copy.deepcopy(self.prio_map) |
| 164 | self.prio_map = [] | 155 | self.prio_map = [] |
| 165 | while (len(basemap) > 0): | 156 | while (len(basemap) > 0): |
| 166 | entry = basemap.pop(0) | 157 | entry = basemap.pop(0) |
| @@ -190,25 +181,6 @@ class RunQueueData: | |||
| 190 | self.stampwhitelist = bb.data.getVar("BB_STAMP_WHITELIST", cfgData, 1) or "" | 181 | self.stampwhitelist = bb.data.getVar("BB_STAMP_WHITELIST", cfgData, 1) or "" |
| 191 | self.multi_provider_whitelist = (bb.data.getVar("MULTI_PROVIDER_WHITELIST", cfgData, 1) or "").split() | 182 | self.multi_provider_whitelist = (bb.data.getVar("MULTI_PROVIDER_WHITELIST", cfgData, 1) or "").split() |
| 192 | 183 | ||
| 193 | self.schedulers = set(obj for obj in globals().itervalues() | ||
| 194 | if type(obj) is type and issubclass(obj, RunQueueScheduler)) | ||
| 195 | |||
| 196 | user_schedulers = bb.data.getVar("BB_SCHEDULERS", cfgData, True) | ||
| 197 | if user_schedulers: | ||
| 198 | for sched in user_schedulers.split(): | ||
| 199 | if not "." in sched: | ||
| 200 | bb.note("Ignoring scheduler '%s' from BB_SCHEDULERS: not an import" % sched) | ||
| 201 | continue | ||
| 202 | |||
| 203 | modname, name = sched.rsplit(".", 1) | ||
| 204 | try: | ||
| 205 | module = __import__(modname, fromlist=(name,)) | ||
| 206 | except ImportError, exc: | ||
| 207 | logger.critical("Unable to import scheduler '%s' from '%s': %s" % (name, modname, exc)) | ||
| 208 | raise SystemExit(1) | ||
| 209 | else: | ||
| 210 | self.schedulers.add(getattr(module, name)) | ||
| 211 | |||
| 212 | self.reset() | 184 | self.reset() |
| 213 | 185 | ||
| 214 | def reset(self): | 186 | def reset(self): |
| @@ -313,7 +285,7 @@ class RunQueueData: | |||
| 313 | if dep in explored_deps[revdep]: | 285 | if dep in explored_deps[revdep]: |
| 314 | scan = True | 286 | scan = True |
| 315 | if scan: | 287 | if scan: |
| 316 | find_chains(revdep, deepcopy(prev_chain)) | 288 | find_chains(revdep, copy.deepcopy(prev_chain)) |
| 317 | for dep in explored_deps[revdep]: | 289 | for dep in explored_deps[revdep]: |
| 318 | if dep not in total_deps: | 290 | if dep not in total_deps: |
| 319 | total_deps.append(dep) | 291 | total_deps.append(dep) |
| @@ -715,20 +687,15 @@ class RunQueueData: | |||
| 715 | stampfnwhitelist.append(fn) | 687 | stampfnwhitelist.append(fn) |
| 716 | self.stampfnwhitelist = stampfnwhitelist | 688 | self.stampfnwhitelist = stampfnwhitelist |
| 717 | 689 | ||
| 718 | #self.dump_data(taskData) | ||
| 719 | |||
| 720 | # Interate over the task list looking for tasks with a 'setscene' function | 690 | # Interate over the task list looking for tasks with a 'setscene' function |
| 721 | |||
| 722 | self.runq_setscene = [] | 691 | self.runq_setscene = [] |
| 723 | for task in range(len(self.runq_fnid)): | 692 | for task in range(len(self.runq_fnid)): |
| 724 | setscene = taskData.gettask_id(self.taskData.fn_index[self.runq_fnid[task]], self.runq_task[task] + "_setscene", False) | 693 | setscene = taskData.gettask_id(self.taskData.fn_index[self.runq_fnid[task]], self.runq_task[task] + "_setscene", False) |
| 725 | if not setscene: | 694 | if not setscene: |
| 726 | continue | 695 | continue |
| 727 | #bb.note("Found setscene for %s %s" % (self.taskData.fn_index[self.runq_fnid[task]], self.runq_task[task])) | ||
| 728 | self.runq_setscene.append(task) | 696 | self.runq_setscene.append(task) |
| 729 | 697 | ||
| 730 | # Interate over the task list and call into the siggen code | 698 | # Interate over the task list and call into the siggen code |
| 731 | |||
| 732 | dealtwith = set() | 699 | dealtwith = set() |
| 733 | todeal = set(range(len(self.runq_fnid))) | 700 | todeal = set(range(len(self.runq_fnid))) |
| 734 | while len(todeal) > 0: | 701 | while len(todeal) > 0: |
| @@ -744,7 +711,7 @@ class RunQueueData: | |||
| 744 | hashdata = {} | 711 | hashdata = {} |
| 745 | hashdata["hashes"] = {} | 712 | hashdata["hashes"] = {} |
| 746 | hashdata["deps"] = {} | 713 | hashdata["deps"] = {} |
| 747 | for task in range(len(self.runq_fnid)): | 714 | for task in xrange(len(self.runq_fnid)): |
| 748 | hashdata["hashes"][self.taskData.fn_index[self.runq_fnid[task]] + "." + self.runq_task[task]] = self.runq_hash[task] | 715 | hashdata["hashes"][self.taskData.fn_index[self.runq_fnid[task]] + "." + self.runq_task[task]] = self.runq_hash[task] |
| 749 | deps = [] | 716 | deps = [] |
| 750 | for dep in self.runq_depends[task]: | 717 | for dep in self.runq_depends[task]: |
| @@ -764,24 +731,24 @@ class RunQueueData: | |||
| 764 | Dump some debug information on the internal data structures | 731 | Dump some debug information on the internal data structures |
| 765 | """ | 732 | """ |
| 766 | logger.debug(3, "run_tasks:") | 733 | logger.debug(3, "run_tasks:") |
| 767 | for task in range(len(self.rqdata.runq_task)): | 734 | for task in xrange(len(self.rqdata.runq_task)): |
| 768 | logger.debug(3, " (%s)%s - %s: %s Deps %s RevDeps %s" % (task, | 735 | logger.debug(3, " (%s)%s - %s: %s Deps %s RevDeps %s", task, |
| 769 | taskQueue.fn_index[self.rqdata.runq_fnid[task]], | 736 | taskQueue.fn_index[self.rqdata.runq_fnid[task]], |
| 770 | self.rqdata.runq_task[task], | 737 | self.rqdata.runq_task[task], |
| 771 | self.rqdata.runq_weight[task], | 738 | self.rqdata.runq_weight[task], |
| 772 | self.rqdata.runq_depends[task], | 739 | self.rqdata.runq_depends[task], |
| 773 | self.rqdata.runq_revdeps[task])) | 740 | self.rqdata.runq_revdeps[task]) |
| 774 | 741 | ||
| 775 | logger.debug(3, "sorted_tasks:") | 742 | logger.debug(3, "sorted_tasks:") |
| 776 | for task1 in range(len(self.rqdata.runq_task)): | 743 | for task1 in xrange(len(self.rqdata.runq_task)): |
| 777 | if task1 in self.prio_map: | 744 | if task1 in self.prio_map: |
| 778 | task = self.prio_map[task1] | 745 | task = self.prio_map[task1] |
| 779 | logger.debug(3, " (%s)%s - %s: %s Deps %s RevDeps %s" % (task, | 746 | logger.debug(3, " (%s)%s - %s: %s Deps %s RevDeps %s", task, |
| 780 | taskQueue.fn_index[self.rqdata.runq_fnid[task]], | 747 | taskQueue.fn_index[self.rqdata.runq_fnid[task]], |
| 781 | self.rqdata.runq_task[task], | 748 | self.rqdata.runq_task[task], |
| 782 | self.rqdata.runq_weight[task], | 749 | self.rqdata.runq_weight[task], |
| 783 | self.rqdata.runq_depends[task], | 750 | self.rqdata.runq_depends[task], |
| 784 | self.rqdata.runq_revdeps[task])) | 751 | self.rqdata.runq_revdeps[task]) |
| 785 | 752 | ||
| 786 | class RunQueue: | 753 | class RunQueue: |
| 787 | def __init__(self, cooker, cfgData, dataCache, taskData, targets): | 754 | def __init__(self, cooker, cfgData, dataCache, taskData, targets): |
| @@ -809,7 +776,7 @@ class RunQueue: | |||
| 809 | if self.stamppolicy == "whitelist": | 776 | if self.stamppolicy == "whitelist": |
| 810 | stampwhitelist = self.rqdata.stampfnwhitelist | 777 | stampwhitelist = self.rqdata.stampfnwhitelist |
| 811 | 778 | ||
| 812 | for task in range(len(self.rqdata.runq_fnid)): | 779 | for task in xrange(len(self.rqdata.runq_fnid)): |
| 813 | unchecked[task] = "" | 780 | unchecked[task] = "" |
| 814 | if len(self.rqdata.runq_depends[task]) == 0: | 781 | if len(self.rqdata.runq_depends[task]) == 0: |
| 815 | buildable.append(task) | 782 | buildable.append(task) |
| @@ -824,7 +791,7 @@ class RunQueue: | |||
| 824 | if revdep in unchecked: | 791 | if revdep in unchecked: |
| 825 | buildable.append(revdep) | 792 | buildable.append(revdep) |
| 826 | 793 | ||
| 827 | for task in range(len(self.rqdata.runq_fnid)): | 794 | for task in xrange(len(self.rqdata.runq_fnid)): |
| 828 | if task not in unchecked: | 795 | if task not in unchecked: |
| 829 | continue | 796 | continue |
| 830 | fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]] | 797 | fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]] |
| @@ -909,7 +876,7 @@ class RunQueue: | |||
| 909 | fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]] | 876 | fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]] |
| 910 | if taskname is None: | 877 | if taskname is None: |
| 911 | taskname = self.rqdata.runq_task[task] | 878 | taskname = self.rqdata.runq_task[task] |
| 912 | 879 | ||
| 913 | stampfile = bb.parse.siggen.stampfile(self.rqdata.dataCache.stamp[fn], fn, taskname) | 880 | stampfile = bb.parse.siggen.stampfile(self.rqdata.dataCache.stamp[fn], fn, taskname) |
| 914 | 881 | ||
| 915 | # If the stamp is missing its not current | 882 | # If the stamp is missing its not current |
| @@ -919,7 +886,7 @@ class RunQueue: | |||
| 919 | # If its a 'nostamp' task, it's not current | 886 | # If its a 'nostamp' task, it's not current |
| 920 | taskdep = self.rqdata.dataCache.task_deps[fn] | 887 | taskdep = self.rqdata.dataCache.task_deps[fn] |
| 921 | if 'nostamp' in taskdep and taskname in taskdep['nostamp']: | 888 | if 'nostamp' in taskdep and taskname in taskdep['nostamp']: |
| 922 | logger.debug(2, "%s.%s is nostamp\n" % (fn, taskname)) | 889 | logger.debug(2, "%s.%s is nostamp\n", fn, taskname) |
| 923 | return False | 890 | return False |
| 924 | 891 | ||
| 925 | if taskname != "do_setscene" and taskname.endswith("_setscene"): | 892 | if taskname != "do_setscene" and taskname.endswith("_setscene"): |
| @@ -939,10 +906,10 @@ class RunQueue: | |||
| 939 | continue | 906 | continue |
| 940 | if fn == fn2 or (fulldeptree and fn2 not in stampwhitelist): | 907 | if fn == fn2 or (fulldeptree and fn2 not in stampwhitelist): |
| 941 | if not t2: | 908 | if not t2: |
| 942 | logger.debug(2, "Stampfile %s does not exist" % (stampfile2)) | 909 | logger.debug(2, 'Stampfile %s does not exist', stampfile2) |
| 943 | iscurrent = False | 910 | iscurrent = False |
| 944 | if t1 < t2: | 911 | if t1 < t2: |
| 945 | logger.debug(2, "Stampfile %s < %s" % (stampfile, stampfile2)) | 912 | logger.debug(2, 'Stampfile %s < %s', stampfile, stampfile2) |
| 946 | iscurrent = False | 913 | iscurrent = False |
| 947 | 914 | ||
| 948 | return iscurrent | 915 | return iscurrent |
| @@ -1014,7 +981,7 @@ class RunQueue: | |||
| 1014 | bb.note("Reparsing files to collect dependency data") | 981 | bb.note("Reparsing files to collect dependency data") |
| 1015 | for task in range(len(self.rqdata.runq_fnid)): | 982 | for task in range(len(self.rqdata.runq_fnid)): |
| 1016 | if self.rqdata.runq_fnid[task] not in done: | 983 | if self.rqdata.runq_fnid[task] not in done: |
| 1017 | fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]] | 984 | fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]] |
| 1018 | the_data = bb.cache.Cache.loadDataFull(fn, self.cooker.get_file_appends(fn), self.cooker.configuration.data) | 985 | the_data = bb.cache.Cache.loadDataFull(fn, self.cooker.get_file_appends(fn), self.cooker.configuration.data) |
| 1019 | done.add(self.rqdata.runq_fnid[task]) | 986 | done.add(self.rqdata.runq_fnid[task]) |
| 1020 | 987 | ||
| @@ -1087,7 +1054,7 @@ class RunQueueExecute: | |||
| 1087 | self.rq.state = runQueueComplete | 1054 | self.rq.state = runQueueComplete |
| 1088 | return | 1055 | return |
| 1089 | 1056 | ||
| 1090 | def fork_off_task(self, fn, task, taskname): | 1057 | def fork_off_task(self, fn, task, taskname, quieterrors=False): |
| 1091 | the_data = bb.cache.Cache.loadDataFull(fn, self.cooker.get_file_appends(fn), self.cooker.configuration.data) | 1058 | the_data = bb.cache.Cache.loadDataFull(fn, self.cooker.get_file_appends(fn), self.cooker.configuration.data) |
| 1092 | 1059 | ||
| 1093 | env = bb.data.export_vars(the_data) | 1060 | env = bb.data.export_vars(the_data) |
| @@ -1115,10 +1082,9 @@ class RunQueueExecute: | |||
| 1115 | sys.stdout.flush() | 1082 | sys.stdout.flush() |
| 1116 | sys.stderr.flush() | 1083 | sys.stderr.flush() |
| 1117 | try: | 1084 | try: |
| 1118 | pipeinfd, pipeoutfd = os.pipe() | 1085 | pipein, pipeout = os.pipe() |
| 1119 | pipein = os.fdopen(pipeinfd, 'rb', 4096) | 1086 | pipein = os.fdopen(pipein, 'rb', 4096) |
| 1120 | pipeout = os.fdopen(pipeoutfd, 'wb', 4096) | 1087 | pipeout = os.fdopen(pipeout, 'wb', 0) |
| 1121 | |||
| 1122 | pid = os.fork() | 1088 | pid = os.fork() |
| 1123 | except OSError as e: | 1089 | except OSError as e: |
| 1124 | bb.msg.fatal(bb.msg.domain.RunQueue, "fork failed: %d (%s)" % (e.errno, e.strerror)) | 1090 | bb.msg.fatal(bb.msg.domain.RunQueue, "fork failed: %d (%s)" % (e.errno, e.strerror)) |
| @@ -1148,7 +1114,7 @@ class RunQueueExecute: | |||
| 1148 | #newso = open(logout, 'w') | 1114 | #newso = open(logout, 'w') |
| 1149 | #os.dup2(newso.fileno(), sys.stdout.fileno()) | 1115 | #os.dup2(newso.fileno(), sys.stdout.fileno()) |
| 1150 | #os.dup2(newso.fileno(), sys.stderr.fileno()) | 1116 | #os.dup2(newso.fileno(), sys.stderr.fileno()) |
| 1151 | if taskname.endswith("_setscene"): | 1117 | if quieterrors: |
| 1152 | the_data.setVarFlag(taskname, "quieterrors", "1") | 1118 | the_data.setVarFlag(taskname, "quieterrors", "1") |
| 1153 | 1119 | ||
| 1154 | bb.data.setVar("BB_WORKERCONTEXT", "1", the_data) | 1120 | bb.data.setVar("BB_WORKERCONTEXT", "1", the_data) |
| @@ -1219,14 +1185,38 @@ class RunQueueExecuteTasks(RunQueueExecute): | |||
| 1219 | 1185 | ||
| 1220 | event.fire(bb.event.StampUpdate(self.rqdata.target_pairs, self.rqdata.dataCache.stamp), self.cfgData) | 1186 | event.fire(bb.event.StampUpdate(self.rqdata.target_pairs, self.rqdata.dataCache.stamp), self.cfgData) |
| 1221 | 1187 | ||
| 1222 | for scheduler in self.rqdata.schedulers: | 1188 | schedulers = self.get_schedulers() |
| 1189 | for scheduler in schedulers: | ||
| 1223 | if self.scheduler == scheduler.name: | 1190 | if self.scheduler == scheduler.name: |
| 1224 | self.sched = scheduler(self, self.rqdata) | 1191 | self.sched = scheduler(self, self.rqdata) |
| 1225 | logger.debug(1, "Using runqueue scheduler '%s'", scheduler.name) | 1192 | logger.debug(1, "Using runqueue scheduler '%s'", scheduler.name) |
| 1226 | break | 1193 | break |
| 1227 | else: | 1194 | else: |
| 1228 | bb.fatal("Invalid scheduler '%s'. Available schedulers: %s" % | 1195 | bb.fatal("Invalid scheduler '%s'. Available schedulers: %s" % |
| 1229 | (self.scheduler, ", ".join(obj.name for obj in self.rqdata.schedulers))) | 1196 | (self.scheduler, ", ".join(obj.name for obj in schedulers))) |
| 1197 | |||
| 1198 | |||
| 1199 | def get_schedulers(self): | ||
| 1200 | schedulers = set(obj for obj in globals().values() | ||
| 1201 | if type(obj) is type and | ||
| 1202 | issubclass(obj, RunQueueScheduler)) | ||
| 1203 | |||
| 1204 | user_schedulers = bb.data.getVar("BB_SCHEDULERS", self.cfgData, True) | ||
| 1205 | if user_schedulers: | ||
| 1206 | for sched in user_schedulers.split(): | ||
| 1207 | if not "." in sched: | ||
| 1208 | bb.note("Ignoring scheduler '%s' from BB_SCHEDULERS: not an import" % sched) | ||
| 1209 | continue | ||
| 1210 | |||
| 1211 | modname, name = sched.rsplit(".", 1) | ||
| 1212 | try: | ||
| 1213 | module = __import__(modname, fromlist=(name,)) | ||
| 1214 | except ImportError, exc: | ||
| 1215 | logger.critical("Unable to import scheduler '%s' from '%s': %s" % (name, modname, exc)) | ||
| 1216 | raise SystemExit(1) | ||
| 1217 | else: | ||
| 1218 | schedulers.add(getattr(module, name)) | ||
| 1219 | return schedulers | ||
| 1230 | 1220 | ||
| 1231 | def task_completeoutright(self, task): | 1221 | def task_completeoutright(self, task): |
| 1232 | """ | 1222 | """ |
| @@ -1283,17 +1273,17 @@ class RunQueueExecuteTasks(RunQueueExecute): | |||
| 1283 | # nothing to do | 1273 | # nothing to do |
| 1284 | self.rq.state = runQueueCleanUp | 1274 | self.rq.state = runQueueCleanUp |
| 1285 | 1275 | ||
| 1286 | for task in iter(self.sched.next, None): | 1276 | task = self.sched.next() |
| 1277 | if task is not None: | ||
| 1287 | fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]] | 1278 | fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]] |
| 1288 | 1279 | ||
| 1289 | taskname = self.rqdata.runq_task[task] | 1280 | taskname = self.rqdata.runq_task[task] |
| 1290 | if self.rq.check_stamp_task(task, taskname): | 1281 | if self.rq.check_stamp_task(task, taskname): |
| 1291 | logger.debug(2, "Stamp current task %s (%s)" % (task, self.rqdata.get_user_idstring(task))) | 1282 | logger.debug(2, "Stamp current task %s (%s)", task, |
| 1283 | self.rqdata.get_user_idstring(task)) | ||
| 1292 | self.task_skip(task) | 1284 | self.task_skip(task) |
| 1293 | return True | 1285 | return True |
| 1294 | 1286 | ||
| 1295 | bb.event.fire(runQueueTaskStarted(task, self.stats, self.rq), self.cfgData) | ||
| 1296 | |||
| 1297 | taskdep = self.rqdata.dataCache.task_deps[fn] | 1287 | taskdep = self.rqdata.dataCache.task_deps[fn] |
| 1298 | if 'noexec' in taskdep and taskname in taskdep['noexec']: | 1288 | if 'noexec' in taskdep and taskname in taskdep['noexec']: |
| 1299 | startevent = runQueueTaskStarted(task, self.stats, self.rq, | 1289 | startevent = runQueueTaskStarted(task, self.stats, self.rq, |
| @@ -1457,12 +1447,11 @@ class RunQueueExecuteScenequeue(RunQueueExecute): | |||
| 1457 | 1447 | ||
| 1458 | for task in xrange(len(self.sq_revdeps)): | 1448 | for task in xrange(len(self.sq_revdeps)): |
| 1459 | if task not in valid_new and task not in noexec: | 1449 | if task not in valid_new and task not in noexec: |
| 1460 | logger.debug(2, "No package found so skipping setscene task %s" % (self.rqdata.get_user_idstring(self.rqdata.runq_setscene[task]))) | 1450 | logger.debug(2, 'No package found, so skipping setscene task %s', |
| 1451 | self.rqdata.get_user_idstring(task)) | ||
| 1461 | self.task_failoutright(task) | 1452 | self.task_failoutright(task) |
| 1462 | 1453 | ||
| 1463 | #print(str(valid)) | 1454 | logger.info('Executing SetScene Tasks') |
| 1464 | |||
| 1465 | logger.info("Executing SetScene Tasks") | ||
| 1466 | 1455 | ||
| 1467 | self.rq.state = runQueueSceneRun | 1456 | self.rq.state = runQueueSceneRun |
| 1468 | 1457 | ||
| @@ -1523,11 +1512,6 @@ class RunQueueExecuteScenequeue(RunQueueExecute): | |||
| 1523 | # Find the next setscene to run | 1512 | # Find the next setscene to run |
| 1524 | for nexttask in xrange(self.stats.total): | 1513 | for nexttask in xrange(self.stats.total): |
| 1525 | if self.runq_buildable[nexttask] == 1 and self.runq_running[nexttask] != 1: | 1514 | if self.runq_buildable[nexttask] == 1 and self.runq_running[nexttask] != 1: |
| 1526 | #bb.note("Comparing %s to %s" % (self.sq_revdeps[nexttask], self.scenequeue_covered)) | ||
| 1527 | #if len(self.sq_revdeps[nexttask]) > 0 and self.sq_revdeps[nexttask].issubset(self.scenequeue_covered): | ||
| 1528 | # bb.note("Skipping task %s" % nexttask) | ||
| 1529 | # self.scenequeue_skip(nexttask) | ||
| 1530 | # return True | ||
| 1531 | task = nexttask | 1515 | task = nexttask |
| 1532 | break | 1516 | break |
| 1533 | if task is not None: | 1517 | if task is not None: |
| @@ -1536,7 +1520,8 @@ class RunQueueExecuteScenequeue(RunQueueExecute): | |||
| 1536 | 1520 | ||
| 1537 | taskname = self.rqdata.runq_task[realtask] + "_setscene" | 1521 | taskname = self.rqdata.runq_task[realtask] + "_setscene" |
| 1538 | if self.rq.check_stamp_task(realtask, self.rqdata.runq_task[realtask]): | 1522 | if self.rq.check_stamp_task(realtask, self.rqdata.runq_task[realtask]): |
| 1539 | logger.debug(2, "Stamp for underlying task %s (%s) is current so skipping setscene varient" % (task, self.rqdata.get_user_idstring(task))) | 1523 | logger.debug(2, 'Stamp for underlying task %s(%s) is current, so skipping setscene variant', |
| 1524 | task, self.rqdata.get_user_idstring(task)) | ||
| 1540 | self.task_failoutright(task) | 1525 | self.task_failoutright(task) |
| 1541 | return True | 1526 | return True |
| 1542 | 1527 | ||
| @@ -1547,7 +1532,8 @@ class RunQueueExecuteScenequeue(RunQueueExecute): | |||
| 1547 | return True | 1532 | return True |
| 1548 | 1533 | ||
| 1549 | if self.rq.check_stamp_task(realtask, taskname): | 1534 | if self.rq.check_stamp_task(realtask, taskname): |
| 1550 | logger.debug(2, "Setscene stamp current task %s (%s) so skip it and its dependencies" % (task, self.rqdata.get_user_idstring(realtask))) | 1535 | logger.debug(2, 'Setscene stamp current task %s(%s), so skip it and its dependencies', |
| 1536 | task, self.rqdata.get_user_idstring(realtask)) | ||
| 1551 | self.task_skip(task) | 1537 | self.task_skip(task) |
| 1552 | return True | 1538 | return True |
| 1553 | 1539 | ||
| @@ -1577,11 +1563,14 @@ class RunQueueExecuteScenequeue(RunQueueExecute): | |||
| 1577 | for task in oldcovered: | 1563 | for task in oldcovered: |
| 1578 | self.rq.scenequeue_covered.add(self.rqdata.runq_setscene[task]) | 1564 | self.rq.scenequeue_covered.add(self.rqdata.runq_setscene[task]) |
| 1579 | 1565 | ||
| 1580 | bb.debug(1, "We can skip tasks %s" % self.rq.scenequeue_covered) | 1566 | logger.debug(1, 'We can skip tasks %s', self.rq.scenequeue_covered) |
| 1581 | 1567 | ||
| 1582 | self.rq.state = runQueueRunInit | 1568 | self.rq.state = runQueueRunInit |
| 1583 | return True | 1569 | return True |
| 1584 | 1570 | ||
| 1571 | def fork_off_task(self, fn, task, taskname): | ||
| 1572 | return RunQueueExecute.fork_off_task(self, fn, task, taskname, quieterrors=True) | ||
| 1573 | |||
| 1585 | class TaskFailure(Exception): | 1574 | class TaskFailure(Exception): |
| 1586 | """ | 1575 | """ |
| 1587 | Exception raised when a task in a runqueue fails | 1576 | Exception raised when a task in a runqueue fails |
| @@ -1632,12 +1621,12 @@ class runQueueTaskCompleted(runQueueEvent): | |||
| 1632 | """ | 1621 | """ |
| 1633 | 1622 | ||
| 1634 | #def check_stamp_fn(fn, taskname, d): | 1623 | #def check_stamp_fn(fn, taskname, d): |
| 1635 | # rq = bb.data.getVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY", d) | 1624 | # rqexe = bb.data.getVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY", d) |
| 1636 | # fn = bb.data.getVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY2", d) | 1625 | # fn = bb.data.getVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY2", d) |
| 1637 | # fnid = rq.rqdata.taskData.getfn_id(fn) | 1626 | # fnid = rqexe.rqdata.taskData.getfn_id(fn) |
| 1638 | # taskid = rq.get_task_id(fnid, taskname) | 1627 | # taskid = rqexe.rqdata.get_task_id(fnid, taskname) |
| 1639 | # if taskid is not None: | 1628 | # if taskid is not None: |
| 1640 | # return rq.check_stamp_task(taskid) | 1629 | # return rqexe.rq.check_stamp_task(taskid) |
| 1641 | # return None | 1630 | # return None |
| 1642 | 1631 | ||
| 1643 | class runQueuePipe(): | 1632 | class runQueuePipe(): |
| @@ -1645,17 +1634,17 @@ class runQueuePipe(): | |||
| 1645 | Abstraction for a pipe between a worker thread and the server | 1634 | Abstraction for a pipe between a worker thread and the server |
| 1646 | """ | 1635 | """ |
| 1647 | def __init__(self, pipein, pipeout, d): | 1636 | def __init__(self, pipein, pipeout, d): |
| 1648 | self.fd = pipein | 1637 | self.input = pipein |
| 1649 | pipeout.close() | 1638 | pipeout.close() |
| 1650 | fcntl.fcntl(self.fd, fcntl.F_SETFL, fcntl.fcntl(self.fd, fcntl.F_GETFL) | os.O_NONBLOCK) | 1639 | fcntl.fcntl(self.input, fcntl.F_SETFL, fcntl.fcntl(self.input, fcntl.F_GETFL) | os.O_NONBLOCK) |
| 1651 | self.queue = "" | 1640 | self.queue = "" |
| 1652 | self.d = d | 1641 | self.d = d |
| 1653 | 1642 | ||
| 1654 | def read(self): | 1643 | def read(self): |
| 1655 | start = len(self.queue) | 1644 | start = len(self.queue) |
| 1656 | try: | 1645 | try: |
| 1657 | self.queue = self.queue + self.fd.read(1024) | 1646 | self.queue = self.queue + self.input.read(1024) |
| 1658 | except IOError: | 1647 | except (OSError, IOError): |
| 1659 | pass | 1648 | pass |
| 1660 | end = len(self.queue) | 1649 | end = len(self.queue) |
| 1661 | index = self.queue.find("</event>") | 1650 | index = self.queue.find("</event>") |
| @@ -1670,4 +1659,4 @@ class runQueuePipe(): | |||
| 1670 | continue | 1659 | continue |
| 1671 | if len(self.queue) > 0: | 1660 | if len(self.queue) > 0: |
| 1672 | print("Warning, worker left partial message: %s" % self.queue) | 1661 | print("Warning, worker left partial message: %s" % self.queue) |
| 1673 | self.fd.close() | 1662 | self.input.close() |
diff --git a/bitbake/lib/bb/siggen.py b/bitbake/lib/bb/siggen.py index 7d7a203b83..4dc09b3f9e 100644 --- a/bitbake/lib/bb/siggen.py +++ b/bitbake/lib/bb/siggen.py | |||
| @@ -178,6 +178,17 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
| 178 | bb.error("The mismatched hashes were %s and %s" % (dataCache.basetaskhash[k], self.basehash[k])) | 178 | bb.error("The mismatched hashes were %s and %s" % (dataCache.basetaskhash[k], self.basehash[k])) |
| 179 | self.dump_sigtask(fn, task, dataCache.stamp[fn], True) | 179 | self.dump_sigtask(fn, task, dataCache.stamp[fn], True) |
| 180 | 180 | ||
| 181 | class SignatureGeneratorBasicHash(SignatureGeneratorBasic): | ||
| 182 | name = "basichash" | ||
| 183 | |||
| 184 | def stampfile(self, stampbase, fn, taskname): | ||
| 185 | if taskname != "do_setscene" and taskname.endswith("_setscene"): | ||
| 186 | k = fn + "." + taskname[:-9] | ||
| 187 | else: | ||
| 188 | k = fn + "." + taskname | ||
| 189 | h = self.taskhash[k] | ||
| 190 | return "%s.%s.%s" % (stampbase, taskname, h) | ||
| 191 | |||
| 181 | def dump_this_task(outfile, d): | 192 | def dump_this_task(outfile, d): |
| 182 | fn = d.getVar("BB_FILENAME", True) | 193 | fn = d.getVar("BB_FILENAME", True) |
| 183 | task = "do_" + d.getVar("BB_CURRENTTASK", True) | 194 | task = "do_" + d.getVar("BB_CURRENTTASK", True) |
diff --git a/bitbake/lib/bb/ui/knotty.py b/bitbake/lib/bb/ui/knotty.py index 7fb7f84e5b..34180fb93e 100644 --- a/bitbake/lib/bb/ui/knotty.py +++ b/bitbake/lib/bb/ui/knotty.py | |||
| @@ -111,10 +111,10 @@ def main(server, eventHandler): | |||
| 111 | print("%s: %s (pid %s)" % (tasknum, activetasks[task]["title"], task)) | 111 | print("%s: %s (pid %s)" % (tasknum, activetasks[task]["title"], task)) |
| 112 | 112 | ||
| 113 | if isinstance(event, logging.LogRecord): | 113 | if isinstance(event, logging.LogRecord): |
| 114 | if event.levelno >= logging.CRITICAL: | 114 | if event.levelno >= format.ERROR: |
| 115 | return_value = 1 | ||
| 116 | if event.levelno is logging.ERROR: | ||
| 117 | return_value = 1 | 115 | return_value = 1 |
| 116 | if event.taskpid != 0 and event.levelno <= format.NOTE: | ||
| 117 | continue | ||
| 118 | logger.handle(event) | 118 | logger.handle(event) |
| 119 | continue | 119 | continue |
| 120 | 120 | ||
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py index 48ed0d72e5..5b3710f84f 100644 --- a/bitbake/lib/bb/utils.py +++ b/bitbake/lib/bb/utils.py | |||
| @@ -291,17 +291,6 @@ def join_deps(deps): | |||
| 291 | result.append(dep) | 291 | result.append(dep) |
| 292 | return ", ".join(result) | 292 | return ", ".join(result) |
| 293 | 293 | ||
| 294 | def extend_deps(dest, src): | ||
| 295 | """ | ||
| 296 | Extend the results from explode_dep_versions by appending all of the items | ||
| 297 | in the second list, avoiding duplicates. | ||
| 298 | """ | ||
| 299 | for dep in src: | ||
| 300 | if dep not in dest: | ||
| 301 | dest[dep] = src[dep] | ||
| 302 | elif dest[dep] != src[dep]: | ||
| 303 | dest[dep] = src[dep] | ||
| 304 | |||
| 305 | def _print_trace(body, line): | 294 | def _print_trace(body, line): |
| 306 | """ | 295 | """ |
| 307 | Print the Environment of a Text Body | 296 | Print the Environment of a Text Body |
| @@ -355,16 +344,14 @@ def better_exec(code, context, text, realfile = "<code>"): | |||
| 355 | if t in [bb.parse.SkipPackage, bb.build.FuncFailed]: | 344 | if t in [bb.parse.SkipPackage, bb.build.FuncFailed]: |
| 356 | raise | 345 | raise |
| 357 | 346 | ||
| 358 | logger.exception("Error executing python function in '%s'", code.co_filename) | 347 | import traceback |
| 359 | 348 | exception = traceback.format_exception_only(t, value) | |
| 360 | # print the Header of the Error Message | 349 | logger.error('Error executing a python function in %s:\n%s', |
| 361 | logger.error("There was an error when executing a python function in: %s" % code.co_filename) | 350 | realfile, ''.join(exception)) |
| 362 | logger.error("Exception:%s Message:%s" % (t, value)) | ||
| 363 | 351 | ||
| 364 | # Strip 'us' from the stack (better_exec call) | 352 | # Strip 'us' from the stack (better_exec call) |
| 365 | tb = tb.tb_next | 353 | tb = tb.tb_next |
| 366 | 354 | ||
| 367 | import traceback | ||
| 368 | textarray = text.split('\n') | 355 | textarray = text.split('\n') |
| 369 | linefailed = traceback.tb_lineno(tb) | 356 | linefailed = traceback.tb_lineno(tb) |
| 370 | 357 | ||
| @@ -490,9 +477,9 @@ def sha256_file(filename): | |||
| 490 | s.update(line) | 477 | s.update(line) |
| 491 | return s.hexdigest() | 478 | return s.hexdigest() |
| 492 | 479 | ||
| 493 | # Variables which are preserved from the original environment *and* exported | 480 | def preserved_envvars_exported(): |
| 494 | # into our worker context | 481 | """Variables which are taken from the environment and placed in and exported |
| 495 | def preserved_envvars_export_list(): | 482 | from the metadata""" |
| 496 | return [ | 483 | return [ |
| 497 | 'BB_TASKHASH', | 484 | 'BB_TASKHASH', |
| 498 | 'HOME', | 485 | 'HOME', |
| @@ -505,9 +492,9 @@ def preserved_envvars_export_list(): | |||
| 505 | 'USERNAME', | 492 | 'USERNAME', |
| 506 | ] | 493 | ] |
| 507 | 494 | ||
| 508 | # Variables which are preserved from the original environment *and* exported | 495 | def preserved_envvars_exported_interactive(): |
| 509 | # into our worker context for interactive tasks (e.g. requiring X) | 496 | """Variables which are taken from the environment and placed in and exported |
| 510 | def preserved_envvars_export_interactive_list(): | 497 | from the metadata, for interactive tasks""" |
| 511 | return [ | 498 | return [ |
| 512 | 'COLORTERM', | 499 | 'COLORTERM', |
| 513 | 'DBUS_SESSION_BUS_ADDRESS', | 500 | 'DBUS_SESSION_BUS_ADDRESS', |
| @@ -525,8 +512,8 @@ def preserved_envvars_export_interactive_list(): | |||
| 525 | 'XDG_SESSION_COOKIE', | 512 | 'XDG_SESSION_COOKIE', |
| 526 | ] | 513 | ] |
| 527 | 514 | ||
| 528 | # Variables which are preserved from the original environment into the datastore | 515 | def preserved_envvars(): |
| 529 | def preserved_envvars_list(): | 516 | """Variables which are taken from the environment and placed in the metadata""" |
| 530 | v = [ | 517 | v = [ |
| 531 | 'BBPATH', | 518 | 'BBPATH', |
| 532 | 'BB_PRESERVE_ENV', | 519 | 'BB_PRESERVE_ENV', |
| @@ -535,7 +522,7 @@ def preserved_envvars_list(): | |||
| 535 | 'LANG', | 522 | 'LANG', |
| 536 | '_', | 523 | '_', |
| 537 | ] | 524 | ] |
| 538 | return v + preserved_envvars_export_list() + preserved_envvars_export_interactive_list() | 525 | return v + preserved_envvars_exported() + preserved_envvars_exported_interactive() |
| 539 | 526 | ||
| 540 | def filter_environment(good_vars): | 527 | def filter_environment(good_vars): |
| 541 | """ | 528 | """ |
| @@ -557,8 +544,8 @@ def filter_environment(good_vars): | |||
| 557 | 544 | ||
| 558 | return removed_vars | 545 | return removed_vars |
| 559 | 546 | ||
| 560 | def create_intereactive_env(d): | 547 | def create_interactive_env(d): |
| 561 | for k in preserved_envvars_export_interactive_list(): | 548 | for k in preserved_envvars_exported_interactive(): |
| 562 | os.setenv(k, bb.data.getVar(k, d, True)) | 549 | os.setenv(k, bb.data.getVar(k, d, True)) |
| 563 | 550 | ||
| 564 | def clean_environment(): | 551 | def clean_environment(): |
| @@ -570,7 +557,7 @@ def clean_environment(): | |||
| 570 | if 'BB_ENV_WHITELIST' in os.environ: | 557 | if 'BB_ENV_WHITELIST' in os.environ: |
| 571 | good_vars = os.environ['BB_ENV_WHITELIST'].split() | 558 | good_vars = os.environ['BB_ENV_WHITELIST'].split() |
| 572 | else: | 559 | else: |
| 573 | good_vars = preserved_envvars_list() | 560 | good_vars = preserved_envvars() |
| 574 | if 'BB_ENV_EXTRAWHITE' in os.environ: | 561 | if 'BB_ENV_EXTRAWHITE' in os.environ: |
| 575 | good_vars.extend(os.environ['BB_ENV_EXTRAWHITE'].split()) | 562 | good_vars.extend(os.environ['BB_ENV_EXTRAWHITE'].split()) |
| 576 | filter_environment(good_vars) | 563 | filter_environment(good_vars) |
diff --git a/meta/classes/insane.bbclass b/meta/classes/insane.bbclass index 71ed5b6433..031583442d 100644 --- a/meta/classes/insane.bbclass +++ b/meta/classes/insane.bbclass | |||
| @@ -534,9 +534,9 @@ python do_package_qa () { | |||
| 534 | 534 | ||
| 535 | checks = [package_qa_check_rpath, package_qa_check_dev, | 535 | checks = [package_qa_check_rpath, package_qa_check_dev, |
| 536 | package_qa_check_perm, package_qa_check_arch, | 536 | package_qa_check_perm, package_qa_check_arch, |
| 537 | package_qa_check_desktop, | 537 | package_qa_check_desktop, package_qa_hash_style, |
| 538 | package_qa_check_dbg] | 538 | package_qa_check_dbg] |
| 539 | # package_qa_check_buildpaths, package_qa_hash_style | 539 | # package_qa_check_buildpaths, |
| 540 | walk_sane = True | 540 | walk_sane = True |
| 541 | rdepends_sane = True | 541 | rdepends_sane = True |
| 542 | for package in packages.split(): | 542 | for package in packages.split(): |
diff --git a/meta/classes/kernel-yocto.bbclass b/meta/classes/kernel-yocto.bbclass index 7ebe5d69aa..df2a8a43bc 100644 --- a/meta/classes/kernel-yocto.bbclass +++ b/meta/classes/kernel-yocto.bbclass | |||
| @@ -121,7 +121,7 @@ do_kernel_configme() { | |||
| 121 | echo "[INFO] doing kernel configme" | 121 | echo "[INFO] doing kernel configme" |
| 122 | 122 | ||
| 123 | cd ${S} | 123 | cd ${S} |
| 124 | configme --reconfig | 124 | configme --reconfig --output ${B} |
| 125 | if [ $? -ne 0 ]; then | 125 | if [ $? -ne 0 ]; then |
| 126 | echo "ERROR. Could not configure ${KMACHINE}-${LINUX_KERNEL_TYPE}" | 126 | echo "ERROR. Could not configure ${KMACHINE}-${LINUX_KERNEL_TYPE}" |
| 127 | exit 1 | 127 | exit 1 |
diff --git a/meta/conf/bitbake.conf b/meta/conf/bitbake.conf index 30fcc7947b..376e3cf384 100644 --- a/meta/conf/bitbake.conf +++ b/meta/conf/bitbake.conf | |||
| @@ -449,8 +449,9 @@ BUILDSDK_LDFLAGS = "-L${STAGING_LIBDIR} \ | |||
| 449 | -Wl,-rpath-link,${STAGING_DIR_HOST}${base_libdir} \ | 449 | -Wl,-rpath-link,${STAGING_DIR_HOST}${base_libdir} \ |
| 450 | -Wl,-rpath,${base_libdir} -Wl,-O1" | 450 | -Wl,-rpath,${base_libdir} -Wl,-O1" |
| 451 | 451 | ||
| 452 | TARGET_LINK_HASH_STYLE ??= "" | ||
| 452 | export LDFLAGS = "${TARGET_LDFLAGS}" | 453 | export LDFLAGS = "${TARGET_LDFLAGS}" |
| 453 | export TARGET_LDFLAGS = "-Wl,-O1" | 454 | export TARGET_LDFLAGS = "-Wl,-O1 ${TARGET_LINK_HASH_STYLE}" |
| 454 | #export TARGET_LDFLAGS = "-L${STAGING_DIR_TARGET}${libdir} \ | 455 | #export TARGET_LDFLAGS = "-L${STAGING_DIR_TARGET}${libdir} \ |
| 455 | # -Wl,-rpath-link,${STAGING_DIR_TARGET}${libdir} \ | 456 | # -Wl,-rpath-link,${STAGING_DIR_TARGET}${libdir} \ |
| 456 | # -Wl,-O1" | 457 | # -Wl,-O1" |
| @@ -536,7 +537,7 @@ UPDATECOMMAND_cvs = "/usr/bin/env cvs -d${CVSROOT} update -d -P ${CVSCOOPTS}" | |||
| 536 | UPDATECOMMAND_svn = "/usr/bin/env svn update ${SVNCOOPTS}" | 537 | UPDATECOMMAND_svn = "/usr/bin/env svn update ${SVNCOOPTS}" |
| 537 | SRCDATE = "${DATE}" | 538 | SRCDATE = "${DATE}" |
| 538 | SRCREV = "INVALID" | 539 | SRCREV = "INVALID" |
| 539 | AUTOREV = "${SRCPV}" | 540 | AUTOREV = "${@bb.fetch.get_autorev(d)}" |
| 540 | SRCPV = "${@bb.fetch.get_srcrev(d)}" | 541 | SRCPV = "${@bb.fetch.get_srcrev(d)}" |
| 541 | 542 | ||
| 542 | SRC_URI = "file://${FILE}" | 543 | SRC_URI = "file://${FILE}" |
diff --git a/meta/conf/distro/include/poky-default-revisions.inc b/meta/conf/distro/include/poky-default-revisions.inc index c11faf9316..e094109756 100644 --- a/meta/conf/distro/include/poky-default-revisions.inc +++ b/meta/conf/distro/include/poky-default-revisions.inc | |||
| @@ -57,7 +57,7 @@ SRCREV_pn-gypsy ??= "147" | |||
| 57 | SRCREV_pn-inputproto ??= "7203036522ba9d4b224d282d6afc2d0b947711ee" | 57 | SRCREV_pn-inputproto ??= "7203036522ba9d4b224d282d6afc2d0b947711ee" |
| 58 | SRCREV_pn-inputproto-native ??= "7203036522ba9d4b224d282d6afc2d0b947711ee" | 58 | SRCREV_pn-inputproto-native ??= "7203036522ba9d4b224d282d6afc2d0b947711ee" |
| 59 | SRCREV_pn-inputproto-nativesdk ??= "7203036522ba9d4b224d282d6afc2d0b947711ee" | 59 | SRCREV_pn-inputproto-nativesdk ??= "7203036522ba9d4b224d282d6afc2d0b947711ee" |
| 60 | SRCREV_pn-kern-tools-native ??= "72683bf61fdb83a1c0b4110763f803ff3e39f8ca" | 60 | SRCREV_pn-kern-tools-native ??= "f553af044615ba75c2b4b7ef1f382e6f75087213" |
| 61 | SRCREV_pn-libdrm ??= "3f3c5be6f908272199ccf53f108b1124bfe0a00e" | 61 | SRCREV_pn-libdrm ??= "3f3c5be6f908272199ccf53f108b1124bfe0a00e" |
| 62 | SRCREV_pn-libfakekey ??= "2031" | 62 | SRCREV_pn-libfakekey ??= "2031" |
| 63 | SRCREV_pn-libgdbus ??= "aeab6e3c0185b271ca343b439470491b99cc587f" | 63 | SRCREV_pn-libgdbus ??= "aeab6e3c0185b271ca343b439470491b99cc587f" |
| @@ -93,20 +93,20 @@ SRCREV_machine_pn-linux-yocto-stable_atom-pc ?= "35521a5a70316785a67aca1de1d39a7 | |||
| 93 | SRCREV_machine_pn-linux-yocto-stable_routerstationpro ?= "b323ab98f055df012277f09d444951619fda24e3" | 93 | SRCREV_machine_pn-linux-yocto-stable_routerstationpro ?= "b323ab98f055df012277f09d444951619fda24e3" |
| 94 | SRCREV_machine_pn-linux-yocto-stable_mpc8315e-rdb ?= "0d19c4ab6185b2e093beb5f366ccff4746197166" | 94 | SRCREV_machine_pn-linux-yocto-stable_mpc8315e-rdb ?= "0d19c4ab6185b2e093beb5f366ccff4746197166" |
| 95 | SRCREV_machine_pn-linux-yocto-stable_beagleboard ?= "35521a5a70316785a67aca1de1d39a7b84c49ccf" | 95 | SRCREV_machine_pn-linux-yocto-stable_beagleboard ?= "35521a5a70316785a67aca1de1d39a7b84c49ccf" |
| 96 | SRCREV_meta_pn-linux-yocto-stable ?= "582a28e4bc966ea367cbc2dc1f0de89dd4e7c3d8" | 96 | SRCREV_meta_pn-linux-yocto-stable ?= "ed446ecd29e8c0f81ab8630a2db652121eeb0b75" |
| 97 | # development SRCREVs | 97 | # development SRCREVs |
| 98 | SRCREV_machine_pn-linux-yocto_qemuarm = "605b1b1e0c68e9f78a7fb33aa219327153a4a890" | 98 | SRCREV_machine_pn-linux-yocto_qemuarm ?= "4f86b5a0cb23faa4134cc004730d673f6640614b" |
| 99 | SRCREV_machine_pn-linux-yocto_qemumips = "0f05ab306ac79684a3352f28db91dfb903c67410" | 99 | SRCREV_machine_pn-linux-yocto_qemumips ?= "2b43d84250a582b61397ad5f59a6445ce1258615" |
| 100 | SRCREV_machine_pn-linux-yocto_qemuppc = "d7f2a74cf32e9d6e26e66699d1422096437d3d0e" | 100 | SRCREV_machine_pn-linux-yocto_qemuppc ?= "20f182db27f3e6fc6bd55bfc9e24c8c023fbff7f" |
| 101 | SRCREV_machine_pn-linux-yocto_qemux86 = "6635864790ac23db856147fcbc4e8ebbfd30a0ba" | 101 | SRCREV_machine_pn-linux-yocto_qemux86 ?= "a9d833fda90e2f1257888a97e092135610b5f259" |
| 102 | SRCREV_machine_pn-linux-yocto_qemux86-64 = "6635864790ac23db856147fcbc4e8ebbfd30a0ba" | 102 | SRCREV_machine_pn-linux-yocto_qemux86-64 ?= "a9d833fda90e2f1257888a97e092135610b5f259" |
| 103 | SRCREV_machine_pn-linux-yocto_emenlow = "6635864790ac23db856147fcbc4e8ebbfd30a0ba" | 103 | SRCREV_machine_pn-linux-yocto_emenlow ?= "a9d833fda90e2f1257888a97e092135610b5f259" |
| 104 | SRCREV_machine_pn-linux-yocto_atom-pc = "6635864790ac23db856147fcbc4e8ebbfd30a0ba" | 104 | SRCREV_machine_pn-linux-yocto_atom-pc ?= "a9d833fda90e2f1257888a97e092135610b5f259" |
| 105 | SRCREV_machine_pn-linux-yocto_routerstationpro = "cc1d802228803e0ebf27f3d3772de3620159d195" | 105 | SRCREV_machine_pn-linux-yocto_routerstationpro ?= "9d24b148fc23a2c8fab0a6add18500453b375d54" |
| 106 | SRCREV_machine_pn-linux-yocto_mpc8315e-rdb = "be26676bf67b0c4b5f36b57fe9e46f8bfd2a67c9" | 106 | SRCREV_machine_pn-linux-yocto_mpc8315e-rdb ?= "3f01b98238c2ff7913ce9f68f9db5ae9538717bc" |
| 107 | SRCREV_machine_pn-linux-yocto_beagleboard = "6635864790ac23db856147fcbc4e8ebbfd30a0ba" | 107 | SRCREV_machine_pn-linux-yocto_beagleboard ?= "a9d833fda90e2f1257888a97e092135610b5f259" |
| 108 | SRCREV_meta_pn-linux-yocto ?= "5955ebea1f0d2fbd67a66ed138ce2b3363adf72a" | 108 | SRCREV_meta_pn-linux-yocto ?= "7102097a25c7658e0f4d4dc71844e0ff6c446b25" |
| 109 | SRCREV_pn-linux-libc-headers-yocto ??= "6635864790ac23db856147fcbc4e8ebbfd30a0ba" | 109 | SRCREV_pn-linux-libc-headers-yocto ??= "a9d833fda90e2f1257888a97e092135610b5f259" |
| 110 | SRCREV_pn-matchbox-config-gtk ??= "2081" | 110 | SRCREV_pn-matchbox-config-gtk ??= "2081" |
| 111 | SRCREV_pn-matchbox-desktop-sato ??= "76" | 111 | SRCREV_pn-matchbox-desktop-sato ??= "76" |
| 112 | SRCREV_pn-matchbox-desktop ??= "2096" | 112 | SRCREV_pn-matchbox-desktop ??= "2096" |
diff --git a/meta/conf/distro/poky.conf b/meta/conf/distro/poky.conf index e6ef465052..1c5c72104b 100644 --- a/meta/conf/distro/poky.conf +++ b/meta/conf/distro/poky.conf | |||
| @@ -22,6 +22,8 @@ TARGET_FPU_arm ?= "soft" | |||
| 22 | TARGET_FPU_armeb ?= "soft" | 22 | TARGET_FPU_armeb ?= "soft" |
| 23 | TARGET_FPU_nokia800 = "hard" | 23 | TARGET_FPU_nokia800 = "hard" |
| 24 | 24 | ||
| 25 | TARGET_LINK_HASH_STYLE ?= "${@['-Wl,--hash-style=gnu',''][bb.data.getVar('TARGET_ARCH', d, True) in ['mips', 'mipsel', 'mips64', 'mips64el']]}" | ||
| 26 | |||
| 25 | QA_LOGFILE = "${TMPDIR}/qa.log" | 27 | QA_LOGFILE = "${TMPDIR}/qa.log" |
| 26 | 28 | ||
| 27 | IMAGE_ROOTFS_SIZE_ext2 ?= "131072" | 29 | IMAGE_ROOTFS_SIZE_ext2 ?= "131072" |
diff --git a/meta/conf/machine/akita.conf b/meta/conf/machine/akita.conf index 0f5eaca9cc..cdd465200c 100644 --- a/meta/conf/machine/akita.conf +++ b/meta/conf/machine/akita.conf | |||
| @@ -5,7 +5,6 @@ | |||
| 5 | require conf/machine/include/zaurus-2.6.inc | 5 | require conf/machine/include/zaurus-2.6.inc |
| 6 | require conf/machine/include/zaurus-clamshell.inc | 6 | require conf/machine/include/zaurus-clamshell.inc |
| 7 | 7 | ||
| 8 | PACKAGE_EXTRA_ARCHS += "iwmmxt" | ||
| 9 | MACHINE_FEATURES += "iwmmxt" | 8 | MACHINE_FEATURES += "iwmmxt" |
| 10 | 9 | ||
| 11 | IMAGE_FSTYPES ?= "jffs2" | 10 | IMAGE_FSTYPES ?= "jffs2" |
diff --git a/meta/conf/machine/atom-pc.conf b/meta/conf/machine/atom-pc.conf index 8cf09b811e..58c9b0da65 100644 --- a/meta/conf/machine/atom-pc.conf +++ b/meta/conf/machine/atom-pc.conf | |||
| @@ -4,7 +4,6 @@ | |||
| 4 | #@DESCRIPTION: Machine configuration for Intel Atom based PCs. Currently supported machines are the Asus eee901, Acer Aspire One, Toshiba NB305, and Intel BlackSand development board. | 4 | #@DESCRIPTION: Machine configuration for Intel Atom based PCs. Currently supported machines are the Asus eee901, Acer Aspire One, Toshiba NB305, and Intel BlackSand development board. |
| 5 | 5 | ||
| 6 | TARGET_ARCH = "i586" | 6 | TARGET_ARCH = "i586" |
| 7 | PACKAGE_EXTRA_ARCHS = "x86 core2" | ||
| 8 | 7 | ||
| 9 | include conf/machine/include/tune-atom.inc | 8 | include conf/machine/include/tune-atom.inc |
| 10 | 9 | ||
diff --git a/meta/conf/machine/beagleboard.conf b/meta/conf/machine/beagleboard.conf index 1b73250c24..cb6417b7fb 100644 --- a/meta/conf/machine/beagleboard.conf +++ b/meta/conf/machine/beagleboard.conf | |||
| @@ -2,7 +2,6 @@ | |||
| 2 | #@NAME: Beagleboard machine | 2 | #@NAME: Beagleboard machine |
| 3 | #@DESCRIPTION: Machine configuration for the http://beagleboard.org/ board | 3 | #@DESCRIPTION: Machine configuration for the http://beagleboard.org/ board |
| 4 | TARGET_ARCH = "arm" | 4 | TARGET_ARCH = "arm" |
| 5 | PACKAGE_EXTRA_ARCHS = "armv4 armv4t armv5te armv6 armv7 armv7a" | ||
| 6 | 5 | ||
| 7 | PREFERRED_PROVIDER_virtual/xserver = "xserver-kdrive" | 6 | PREFERRED_PROVIDER_virtual/xserver = "xserver-kdrive" |
| 8 | XSERVER = "xserver-kdrive-fbdev" | 7 | XSERVER = "xserver-kdrive-fbdev" |
diff --git a/meta/conf/machine/cm-x270.conf b/meta/conf/machine/cm-x270.conf index 226942de73..9bf7d8f6ae 100644 --- a/meta/conf/machine/cm-x270.conf +++ b/meta/conf/machine/cm-x270.conf | |||
| @@ -8,9 +8,9 @@ GUI_MACHINE_CLASS ?= "bigscreen" | |||
| 8 | PREFERRED_PROVIDER_virtual/kernel = "linux" | 8 | PREFERRED_PROVIDER_virtual/kernel = "linux" |
| 9 | PREFERRED_VERSION_linux ?= "2.6.23" | 9 | PREFERRED_VERSION_linux ?= "2.6.23" |
| 10 | 10 | ||
| 11 | PACKAGE_EXTRA_ARCHS = "armv4 armv4t armv5te iwmmxt" | ||
| 12 | 11 | ||
| 13 | require conf/machine/include/tune-xscale.inc | 12 | require conf/machine/include/tune-xscale.inc |
| 13 | PACKAGE_EXTRA_ARCHS += "iwmmxt" | ||
| 14 | 14 | ||
| 15 | # Console serial port. | 15 | # Console serial port. |
| 16 | # If you have a W module, add the following line to your local.conf: | 16 | # If you have a W module, add the following line to your local.conf: |
diff --git a/meta/conf/machine/depicture.conf b/meta/conf/machine/depicture.conf index 333af4f2f2..f72ab2939e 100644 --- a/meta/conf/machine/depicture.conf +++ b/meta/conf/machine/depicture.conf | |||
| @@ -3,7 +3,6 @@ | |||
| 3 | #@DESCRIPTION: Machine configuration for running an ARM 920t in the BBD20EUROU board. | 3 | #@DESCRIPTION: Machine configuration for running an ARM 920t in the BBD20EUROU board. |
| 4 | 4 | ||
| 5 | TARGET_ARCH = "arm" | 5 | TARGET_ARCH = "arm" |
| 6 | PACKAGE_EXTRA_ARCHS = "armv4 armv4t" | ||
| 7 | 6 | ||
| 8 | require conf/machine/include/qemu.inc | 7 | require conf/machine/include/qemu.inc |
| 9 | require conf/machine/include/tune-arm920t.inc | 8 | require conf/machine/include/tune-arm920t.inc |
diff --git a/meta/conf/machine/em-x270.conf b/meta/conf/machine/em-x270.conf index 47fb8ca533..9bcec5b742 100644 --- a/meta/conf/machine/em-x270.conf +++ b/meta/conf/machine/em-x270.conf | |||
| @@ -8,7 +8,6 @@ | |||
| 8 | TARGET_ARCH = "arm" | 8 | TARGET_ARCH = "arm" |
| 9 | GUI_MACHINE_CLASS ?= "bigscreen" | 9 | GUI_MACHINE_CLASS ?= "bigscreen" |
| 10 | 10 | ||
| 11 | PACKAGE_EXTRA_ARCHS = "armv4 armv4t armv5te iwmmxt " | ||
| 12 | PREFERRED_PROVIDER_virtual/kernel = "linux" | 11 | PREFERRED_PROVIDER_virtual/kernel = "linux" |
| 13 | 12 | ||
| 14 | require conf/machine/include/tune-xscale.inc | 13 | require conf/machine/include/tune-xscale.inc |
diff --git a/meta/conf/machine/htcuniversal.conf b/meta/conf/machine/htcuniversal.conf index 2f5b798bff..72f3bc4aad 100644 --- a/meta/conf/machine/htcuniversal.conf +++ b/meta/conf/machine/htcuniversal.conf | |||
| @@ -4,9 +4,9 @@ | |||
| 4 | 4 | ||
| 5 | #include conf/machine/include/tune-iwmmxt.inc | 5 | #include conf/machine/include/tune-iwmmxt.inc |
| 6 | include conf/machine/include/tune-xscale.inc | 6 | include conf/machine/include/tune-xscale.inc |
| 7 | PACKAGE_EXTRA_ARCHS += "iwmmxt" | ||
| 7 | 8 | ||
| 8 | TARGET_ARCH = "arm" | 9 | TARGET_ARCH = "arm" |
| 9 | PACKAGE_EXTRA_ARCHS = "armv4t armv5te iwmmxt" | ||
| 10 | 10 | ||
| 11 | PREFERRED_PROVIDER_virtual/kernel = "linux-rp" | 11 | PREFERRED_PROVIDER_virtual/kernel = "linux-rp" |
| 12 | 12 | ||
diff --git a/meta/conf/machine/hx2000.conf b/meta/conf/machine/hx2000.conf index 8b4c39f805..a6775f2c3a 100644 --- a/meta/conf/machine/hx2000.conf +++ b/meta/conf/machine/hx2000.conf | |||
| @@ -3,7 +3,7 @@ | |||
| 3 | #@DESCRIPTION: Machine configuration for the hx2000 iPAQs with a pxa27x CPU | 3 | #@DESCRIPTION: Machine configuration for the hx2000 iPAQs with a pxa27x CPU |
| 4 | 4 | ||
| 5 | TARGET_ARCH = "arm" | 5 | TARGET_ARCH = "arm" |
| 6 | PACKAGE_EXTRA_ARCHS = "armv4 armv4t armv5te iwmmxt " | 6 | PACKAGE_EXTRA_ARCHS += "iwmmxt" |
| 7 | 7 | ||
| 8 | PREFERRED_PROVIDER_virtual/kernel = "linux-rp" | 8 | PREFERRED_PROVIDER_virtual/kernel = "linux-rp" |
| 9 | 9 | ||
diff --git a/meta/conf/machine/igep0020.conf b/meta/conf/machine/igep0020.conf index 9ddbad65b3..86adfcf1a0 100644 --- a/meta/conf/machine/igep0020.conf +++ b/meta/conf/machine/igep0020.conf | |||
| @@ -2,7 +2,6 @@ | |||
| 2 | #@NAME: IGEP v2 machine | 2 | #@NAME: IGEP v2 machine |
| 3 | #@DESCRIPTION: Machine configuration for the IGEP v2 board (http://www.myigep.com) | 3 | #@DESCRIPTION: Machine configuration for the IGEP v2 board (http://www.myigep.com) |
| 4 | TARGET_ARCH = "arm" | 4 | TARGET_ARCH = "arm" |
| 5 | PACKAGE_EXTRA_ARCHS = "armv4 armv4t armv5te armv6 armv7 armv7a" | ||
| 6 | 5 | ||
| 7 | # Floating point option | 6 | # Floating point option |
| 8 | TARGET_FPU_arm = "hard" | 7 | TARGET_FPU_arm = "hard" |
diff --git a/meta/conf/machine/igep0030.conf b/meta/conf/machine/igep0030.conf index 7ebfaa0ca5..af36982a6a 100644 --- a/meta/conf/machine/igep0030.conf +++ b/meta/conf/machine/igep0030.conf | |||
| @@ -2,7 +2,6 @@ | |||
| 2 | #@NAME: OMAP3 IGEP module | 2 | #@NAME: OMAP3 IGEP module |
| 3 | #@DESCRIPTION: Machine configuration for the OMAP3 IGEP module (http://www.myigep.com) | 3 | #@DESCRIPTION: Machine configuration for the OMAP3 IGEP module (http://www.myigep.com) |
| 4 | TARGET_ARCH = "arm" | 4 | TARGET_ARCH = "arm" |
| 5 | PACKAGE_EXTRA_ARCHS = "armv4 armv4t armv5te armv6 armv7 armv7a" | ||
| 6 | 5 | ||
| 7 | # Floating point option | 6 | # Floating point option |
| 8 | TARGET_FPU_arm = "hard" | 7 | TARGET_FPU_arm = "hard" |
diff --git a/meta/conf/machine/include/tune-arm1136jf-s.inc b/meta/conf/machine/include/tune-arm1136jf-s.inc index be5aedc509..a7717510e6 100644 --- a/meta/conf/machine/include/tune-arm1136jf-s.inc +++ b/meta/conf/machine/include/tune-arm1136jf-s.inc | |||
| @@ -4,5 +4,6 @@ FEED_ARCH = "armv6" | |||
| 4 | BASE_PACKAGE_ARCH = "armv6" | 4 | BASE_PACKAGE_ARCH = "armv6" |
| 5 | 5 | ||
| 6 | ARM_INSTRUCTION_SET = "${@['thumb','arm'][bb.data.getVar('CPU_FEATURES', d, 1).find('thumb') == -1]}" | 6 | ARM_INSTRUCTION_SET = "${@['thumb','arm'][bb.data.getVar('CPU_FEATURES', d, 1).find('thumb') == -1]}" |
| 7 | PACKAGE_EXTRA_ARCHS += "armv4 armv4t armv5te armv6" | ||
| 7 | 8 | ||
| 8 | require conf/machine/include/tune-thumb.inc | 9 | require conf/machine/include/tune-thumb.inc |
diff --git a/meta/conf/machine/include/tune-arm920t.inc b/meta/conf/machine/include/tune-arm920t.inc index 1f7a04bb3b..fee5c586c9 100644 --- a/meta/conf/machine/include/tune-arm920t.inc +++ b/meta/conf/machine/include/tune-arm920t.inc | |||
| @@ -1,4 +1,4 @@ | |||
| 1 | FEED_ARCH = "armv4t" | 1 | FEED_ARCH = "armv4t" |
| 2 | BASE_PACKAGE_ARCH = "armv4t" | 2 | BASE_PACKAGE_ARCH = "armv4t" |
| 3 | TARGET_CC_ARCH = "-march=armv4t -mtune=arm920t" | 3 | TARGET_CC_ARCH = "-march=armv4t -mtune=arm920t" |
| 4 | 4 | PACKAGE_EXTRA_ARCHS += "armv4 armv4t" | |
diff --git a/meta/conf/machine/include/tune-arm926ejs.inc b/meta/conf/machine/include/tune-arm926ejs.inc index e80ef10490..f41e460999 100644 --- a/meta/conf/machine/include/tune-arm926ejs.inc +++ b/meta/conf/machine/include/tune-arm926ejs.inc | |||
| @@ -1,5 +1,5 @@ | |||
| 1 | FEED_ARCH = "armv5te" | 1 | FEED_ARCH = "armv5te" |
| 2 | 2 | PACKAGE_EXTRA_ARCHS += "armv4 armv4t armv5te" | |
| 3 | # For gcc 3.x you need: | 3 | # For gcc 3.x you need: |
| 4 | #TARGET_CC_ARCH = "-march=armv5te -mtune=arm926ejs" | 4 | #TARGET_CC_ARCH = "-march=armv5te -mtune=arm926ejs" |
| 5 | # For gcc 4.x you need: | 5 | # For gcc 4.x you need: |
diff --git a/meta/conf/machine/include/tune-arm9tdmi.inc b/meta/conf/machine/include/tune-arm9tdmi.inc index 39e4493014..d788b5304f 100644 --- a/meta/conf/machine/include/tune-arm9tdmi.inc +++ b/meta/conf/machine/include/tune-arm9tdmi.inc | |||
| @@ -1,2 +1,4 @@ | |||
| 1 | FEED_ARCH = "armv4t" | ||
| 1 | BASE_PACKAGE_ARCH = "armv4t" | 2 | BASE_PACKAGE_ARCH = "armv4t" |
| 3 | PACKAGE_EXTRA_ARCHS += "armv4 armv4t" | ||
| 2 | TARGET_CC_ARCH = "-mcpu=arm9tdmi -mtune=arm9tdmi" | 4 | TARGET_CC_ARCH = "-mcpu=arm9tdmi -mtune=arm9tdmi" |
diff --git a/meta/conf/machine/include/tune-armv7.inc b/meta/conf/machine/include/tune-armv7.inc index 96c8c64904..379a3eb011 100644 --- a/meta/conf/machine/include/tune-armv7.inc +++ b/meta/conf/machine/include/tune-armv7.inc | |||
| @@ -3,4 +3,5 @@ | |||
| 3 | # This will NOT compile programs in 'ARM' mode, which is what you really want | 3 | # This will NOT compile programs in 'ARM' mode, which is what you really want |
| 4 | TARGET_CC_ARCH = "-march=armv7 -mfpu=vfp -mfloat-abi=softfp" | 4 | TARGET_CC_ARCH = "-march=armv7 -mfpu=vfp -mfloat-abi=softfp" |
| 5 | FEED_ARCH = "armv7" | 5 | FEED_ARCH = "armv7" |
| 6 | PACKAGE_EXTRA_ARCHS += "armv4 armv4t armv5te armv6 armv7" | ||
| 6 | BASE_PACKAGE_ARCH = "armv7" | 7 | BASE_PACKAGE_ARCH = "armv7" |
diff --git a/meta/conf/machine/include/tune-atom.inc b/meta/conf/machine/include/tune-atom.inc index 0f09e8d56f..a401856c8b 100644 --- a/meta/conf/machine/include/tune-atom.inc +++ b/meta/conf/machine/include/tune-atom.inc | |||
| @@ -2,3 +2,4 @@ BASE_PACKAGE_ARCH = "core2" | |||
| 2 | TARGET_CC_ARCH = "-m32 -march=core2 -msse3 -mtune=generic -mfpmath=sse" | 2 | TARGET_CC_ARCH = "-m32 -march=core2 -msse3 -mtune=generic -mfpmath=sse" |
| 3 | #MOBLIN_CFLAGS = "-Os -Wp,-D_FORTIFY_SOURCE=2 -fexceptions -fstack-protector --param=ssp-buffer-size=4 -fasynchronous-unwind-tables" | 3 | #MOBLIN_CFLAGS = "-Os -Wp,-D_FORTIFY_SOURCE=2 -fexceptions -fstack-protector --param=ssp-buffer-size=4 -fasynchronous-unwind-tables" |
| 4 | 4 | ||
| 5 | PACKAGE_EXTRA_ARCHS += "x86 i386 i486 i586 core2" | ||
diff --git a/meta/conf/machine/include/tune-c3.inc b/meta/conf/machine/include/tune-c3.inc index 107341eb62..1d636e89c6 100644 --- a/meta/conf/machine/include/tune-c3.inc +++ b/meta/conf/machine/include/tune-c3.inc | |||
| @@ -1,3 +1,4 @@ | |||
| 1 | TARGET_CC_ARCH = "-march=c3 -mtune=c3" | 1 | TARGET_CC_ARCH = "-march=c3 -mtune=c3" |
| 2 | PACKAGE_EXTRA_ARCHS += "i386 i486 i586" | ||
| 2 | BASE_PACKAGE_ARCH = "i586" | 3 | BASE_PACKAGE_ARCH = "i586" |
| 3 | 4 | FEED_ARCH = "i586" | |
diff --git a/meta/conf/machine/include/tune-cortexa8.inc b/meta/conf/machine/include/tune-cortexa8.inc index 2c40b70560..3ec1c03500 100644 --- a/meta/conf/machine/include/tune-cortexa8.inc +++ b/meta/conf/machine/include/tune-cortexa8.inc | |||
| @@ -10,3 +10,4 @@ TARGET_CC_ARCH = "-march=armv7-a -mtune=cortex-a8 -mfpu=neon -mfloat-abi=softfp | |||
| 10 | 10 | ||
| 11 | FEED_ARCH = "armv7a" | 11 | FEED_ARCH = "armv7a" |
| 12 | BASE_PACKAGE_ARCH = "armv7a" | 12 | BASE_PACKAGE_ARCH = "armv7a" |
| 13 | PACKAGE_EXTRA_ARCHS += "armv4 armv4t armv5te armv6 armv7 armv7a" | ||
diff --git a/meta/conf/machine/include/tune-ep9312.inc b/meta/conf/machine/include/tune-ep9312.inc index 2cfdf5270f..d0c5bb2e98 100644 --- a/meta/conf/machine/include/tune-ep9312.inc +++ b/meta/conf/machine/include/tune-ep9312.inc | |||
| @@ -2,5 +2,7 @@ TARGET_CC_ARCH = "-march=ep9312 -mtune=ep9312 -mcpu=ep9312" | |||
| 2 | # add "-mfp=maverick" for newer gcc versions > 4.0 | 2 | # add "-mfp=maverick" for newer gcc versions > 4.0 |
| 3 | 3 | ||
| 4 | #set arch to ep9312 for all generated packages | 4 | #set arch to ep9312 for all generated packages |
| 5 | PACKAGE_EXTRA_ARCHS += "armv4t ep9312" | ||
| 5 | BASE_PACKAGE_ARCH = "ep9312" | 6 | BASE_PACKAGE_ARCH = "ep9312" |
| 7 | FEED_ARCH = "ep9312" | ||
| 6 | 8 | ||
diff --git a/meta/conf/machine/include/tune-iwmmxt.inc b/meta/conf/machine/include/tune-iwmmxt.inc index 49e560ea1a..f7291be805 100644 --- a/meta/conf/machine/include/tune-iwmmxt.inc +++ b/meta/conf/machine/include/tune-iwmmxt.inc | |||
| @@ -2,5 +2,6 @@ | |||
| 2 | # Please use tune-xscale for PXA255/PXA26x based processors. | 2 | # Please use tune-xscale for PXA255/PXA26x based processors. |
| 3 | TARGET_CC_ARCH = "-march=iwmmxt -mcpu=iwmmxt -mtune=iwmmxt" | 3 | TARGET_CC_ARCH = "-march=iwmmxt -mcpu=iwmmxt -mtune=iwmmxt" |
| 4 | BASE_PACKAGE_ARCH = "iwmmxt" | 4 | BASE_PACKAGE_ARCH = "iwmmxt" |
| 5 | PACKAGE_EXTRA_ARCHS += "iwmmxt" | 5 | PACKAGE_EXTRA_ARCHS += "armv4 armv4t armv5te iwmmxt" |
| 6 | FEED_ARCH = "iwmmxt" | ||
| 6 | 7 | ||
diff --git a/meta/conf/machine/include/tune-mips32.inc b/meta/conf/machine/include/tune-mips32.inc index 93da66b158..67edca2530 100644 --- a/meta/conf/machine/include/tune-mips32.inc +++ b/meta/conf/machine/include/tune-mips32.inc | |||
| @@ -1 +1,3 @@ | |||
| 1 | TARGET_CC_ARCH = "-march=mips32" | 1 | TARGET_CC_ARCH = "-march=mips32" |
| 2 | FEED_ARCH = "${TARGET_ARCH}" | ||
| 3 | BASE_PACKAGE_ARCH = "${TARGET_ARCH}" | ||
diff --git a/meta/conf/machine/include/tune-ppc603e.inc b/meta/conf/machine/include/tune-ppc603e.inc index a4a68d60e3..15a72d10a1 100644 --- a/meta/conf/machine/include/tune-ppc603e.inc +++ b/meta/conf/machine/include/tune-ppc603e.inc | |||
| @@ -1,2 +1,4 @@ | |||
| 1 | TARGET_CC_ARCH = "-mcpu=603e -mhard-float" | 1 | TARGET_CC_ARCH = "-mcpu=603e -mhard-float" |
| 2 | BASE_PACKAGE_ARCH = "ppc603e" | 2 | BASE_PACKAGE_ARCH = "ppc603e" |
| 3 | FEED_ARCH = "ppc603e" | ||
| 4 | PACKAGE_EXTRA_ARCHS += "ppc603e" | ||
diff --git a/meta/conf/machine/include/tune-ppce300c2.inc b/meta/conf/machine/include/tune-ppce300c2.inc index 067a7b100c..2956875186 100644 --- a/meta/conf/machine/include/tune-ppce300c2.inc +++ b/meta/conf/machine/include/tune-ppce300c2.inc | |||
| @@ -1,2 +1,3 @@ | |||
| 1 | TARGET_CC_ARCH = "-mcpu=e300c2 -msoft-float" | 1 | TARGET_CC_ARCH = "-mcpu=e300c2 -msoft-float" |
| 2 | BASE_PACKAGE_ARCH = "ppce300" | 2 | BASE_PACKAGE_ARCH = "ppce300" |
| 3 | PACKAGE_EXTRA_ARCHS += "ppce300" | ||
diff --git a/meta/conf/machine/include/tune-ppce500.inc b/meta/conf/machine/include/tune-ppce500.inc index 2482196424..44f8742f9f 100644 --- a/meta/conf/machine/include/tune-ppce500.inc +++ b/meta/conf/machine/include/tune-ppce500.inc | |||
| @@ -1,2 +1,4 @@ | |||
| 1 | TARGET_CC_ARCH = "-mcpu=8540" | 1 | TARGET_CC_ARCH = "-mcpu=8540" |
| 2 | BASE_PACKAGE_ARCH = "ppce500" | 2 | BASE_PACKAGE_ARCH = "ppce500" |
| 3 | FEED_ARCH = "ppce500" | ||
| 4 | PACKAGE_EXTRA_ARCHS += "ppce500" | ||
diff --git a/meta/conf/machine/include/tune-sh3.inc b/meta/conf/machine/include/tune-sh3.inc index 192dd8fd22..32801a61c3 100644 --- a/meta/conf/machine/include/tune-sh3.inc +++ b/meta/conf/machine/include/tune-sh3.inc | |||
| @@ -1 +1,3 @@ | |||
| 1 | TARGET_CC_ARCH = "-ml -m3" | 1 | TARGET_CC_ARCH = "-ml -m3" |
| 2 | FEED_ARCH = "sh3" | ||
| 3 | BASE_PACKAGE_ARCH = "sh3" | ||
diff --git a/meta/conf/machine/include/tune-sh4.inc b/meta/conf/machine/include/tune-sh4.inc index 866f7f89d5..51b40691bc 100644 --- a/meta/conf/machine/include/tune-sh4.inc +++ b/meta/conf/machine/include/tune-sh4.inc | |||
| @@ -1 +1,7 @@ | |||
| 1 | # NOTE: If you want to optimize to sh4a, conf/machine/include/tune-sh4a.inc. | ||
| 2 | # But it is not compatible for sh4. | ||
| 3 | # The binary optimized by m4a doesn't operate on sh4. It works on sh4a only. | ||
| 4 | |||
| 1 | TARGET_CC_ARCH = "-ml -m4" | 5 | TARGET_CC_ARCH = "-ml -m4" |
| 6 | FEED_ARCH = "sh4" | ||
| 7 | BASE_PACKAGE_ARCH = "sh4" | ||
diff --git a/meta/conf/machine/include/tune-supersparc.inc b/meta/conf/machine/include/tune-supersparc.inc index 10133c6c21..a402e7cd2b 100644 --- a/meta/conf/machine/include/tune-supersparc.inc +++ b/meta/conf/machine/include/tune-supersparc.inc | |||
| @@ -1,2 +1,3 @@ | |||
| 1 | TARGET_CC_ARCH = "-mcpu=supersparc" | 1 | TARGET_CC_ARCH = "-mcpu=supersparc" |
| 2 | BASE_PACKAGE_ARCH = "supersparc" | 2 | BASE_PACKAGE_ARCH = "supersparc" |
| 3 | FEED_ARCH = "supersparc" | ||
diff --git a/meta/conf/machine/include/tune-xscale.inc b/meta/conf/machine/include/tune-xscale.inc index 4f96814a72..f1ef3d46af 100644 --- a/meta/conf/machine/include/tune-xscale.inc +++ b/meta/conf/machine/include/tune-xscale.inc | |||
| @@ -5,6 +5,7 @@ INHERIT += "siteinfo" | |||
| 5 | TARGET_CC_ARCH = "-march=armv5te -mtune=xscale" | 5 | TARGET_CC_ARCH = "-march=armv5te -mtune=xscale" |
| 6 | TARGET_CC_KERNEL_ARCH = "-march=armv5te -mtune=xscale" | 6 | TARGET_CC_KERNEL_ARCH = "-march=armv5te -mtune=xscale" |
| 7 | BASE_PACKAGE_ARCH = "${@['armv5teb', 'armv5te'][bb.data.getVar('SITEINFO_ENDIANESS', d, 1) == 'le']}" | 7 | BASE_PACKAGE_ARCH = "${@['armv5teb', 'armv5te'][bb.data.getVar('SITEINFO_ENDIANESS', d, 1) == 'le']}" |
| 8 | PACKAGE_EXTRA_ARCHS += "${@['armv4b armv4tb armv5teb', 'armv4 armv4t armv5te'][bb.data.getVar('SITEINFO_ENDIANESS', d, 1) == 'le']}" | ||
| 8 | 9 | ||
| 9 | # webkit-gtk has alignment issues with double instructions on armv5 so | 10 | # webkit-gtk has alignment issues with double instructions on armv5 so |
| 10 | # disable them here | 11 | # disable them here |
diff --git a/meta/conf/machine/mpc8315e-rdb.conf b/meta/conf/machine/mpc8315e-rdb.conf index 1b7982a5dd..919a95d9ef 100644 --- a/meta/conf/machine/mpc8315e-rdb.conf +++ b/meta/conf/machine/mpc8315e-rdb.conf | |||
| @@ -2,7 +2,6 @@ | |||
| 2 | #@DESCRIPTION: Machine configuration for running | 2 | #@DESCRIPTION: Machine configuration for running |
| 3 | 3 | ||
| 4 | TARGET_ARCH = "powerpc" | 4 | TARGET_ARCH = "powerpc" |
| 5 | PACKAGE_EXTRA_ARCHS = "ppc603e" | ||
| 6 | TARGET_FPU = "spe" | 5 | TARGET_FPU = "spe" |
| 7 | 6 | ||
| 8 | require conf/machine/include/tune-ppc603e.inc | 7 | require conf/machine/include/tune-ppc603e.inc |
diff --git a/meta/conf/machine/mx31ads.conf b/meta/conf/machine/mx31ads.conf index 04a238d600..8b3e3dbc1a 100644 --- a/meta/conf/machine/mx31ads.conf +++ b/meta/conf/machine/mx31ads.conf | |||
| @@ -2,7 +2,6 @@ | |||
| 2 | #@NAME: Freescale imx31 ADS Board | 2 | #@NAME: Freescale imx31 ADS Board |
| 3 | #@DESCRIPTION: Machine configuration for the imx31 based Freescale ADS Board | 3 | #@DESCRIPTION: Machine configuration for the imx31 based Freescale ADS Board |
| 4 | TARGET_ARCH = "arm" | 4 | TARGET_ARCH = "arm" |
| 5 | PACKAGE_EXTRA_ARCHS = "armv4 armv4t armv5te armv6" | ||
| 6 | 5 | ||
| 7 | PREFERRED_PROVIDER_virtual/kernel = "linux-mx31" | 6 | PREFERRED_PROVIDER_virtual/kernel = "linux-mx31" |
| 8 | 7 | ||
diff --git a/meta/conf/machine/mx31phy.conf b/meta/conf/machine/mx31phy.conf index 28b255ccec..eea673ab16 100644 --- a/meta/conf/machine/mx31phy.conf +++ b/meta/conf/machine/mx31phy.conf | |||
| @@ -2,7 +2,6 @@ | |||
| 2 | #@NAME: Phytec phyCORE-iMX31 Board | 2 | #@NAME: Phytec phyCORE-iMX31 Board |
| 3 | #@DESCRIPTION: Machine configuration for the imx31 based Phytec phyCORE-iMX31 | 3 | #@DESCRIPTION: Machine configuration for the imx31 based Phytec phyCORE-iMX31 |
| 4 | TARGET_ARCH = "arm" | 4 | TARGET_ARCH = "arm" |
| 5 | PACKAGE_EXTRA_ARCHS = "armv4 armv4t armv5te armv6" | ||
| 6 | 5 | ||
| 7 | PREFERRED_PROVIDER_virtual/kernel = "linux-mx31" | 6 | PREFERRED_PROVIDER_virtual/kernel = "linux-mx31" |
| 8 | 7 | ||
diff --git a/meta/conf/machine/omap-3430ldp.conf b/meta/conf/machine/omap-3430ldp.conf index 0be0546b80..759d58f015 100644 --- a/meta/conf/machine/omap-3430ldp.conf +++ b/meta/conf/machine/omap-3430ldp.conf | |||
| @@ -2,7 +2,6 @@ | |||
| 2 | #@NAME: TI OMAP 3430 LDP | 2 | #@NAME: TI OMAP 3430 LDP |
| 3 | #@DESCRIPTION: Machine configuration for the TI OMAP 3430 LDP (aka OMAP Zoom) | 3 | #@DESCRIPTION: Machine configuration for the TI OMAP 3430 LDP (aka OMAP Zoom) |
| 4 | TARGET_ARCH = "arm" | 4 | TARGET_ARCH = "arm" |
| 5 | PACKAGE_EXTRA_ARCHS = "armv4 armv4t armv5te armv6 armv7a" | ||
| 6 | 5 | ||
| 7 | PREFERRED_PROVIDER_virtual/kernel = "linux-omap" | 6 | PREFERRED_PROVIDER_virtual/kernel = "linux-omap" |
| 8 | 7 | ||
diff --git a/meta/conf/machine/omap-3430sdp.conf b/meta/conf/machine/omap-3430sdp.conf index 5d87bd1483..651d6a9968 100644 --- a/meta/conf/machine/omap-3430sdp.conf +++ b/meta/conf/machine/omap-3430sdp.conf | |||
| @@ -2,7 +2,6 @@ | |||
| 2 | #@NAME: TI OMAP 3430 SDP | 2 | #@NAME: TI OMAP 3430 SDP |
| 3 | #@DESCRIPTION: Machine configuration for the TI OMAP 3430 SDP | 3 | #@DESCRIPTION: Machine configuration for the TI OMAP 3430 SDP |
| 4 | TARGET_ARCH = "arm" | 4 | TARGET_ARCH = "arm" |
| 5 | PACKAGE_EXTRA_ARCHS = "armv4 armv4t armv5te armv6 armv7a" | ||
| 6 | 5 | ||
| 7 | PREFERRED_PROVIDER_virtual/kernel = "linux-omap" | 6 | PREFERRED_PROVIDER_virtual/kernel = "linux-omap" |
| 8 | 7 | ||
diff --git a/meta/conf/machine/overo.conf b/meta/conf/machine/overo.conf index dde4bff476..9fefd73b48 100644 --- a/meta/conf/machine/overo.conf +++ b/meta/conf/machine/overo.conf | |||
| @@ -5,7 +5,6 @@ | |||
| 5 | include conf/machine/include/tune-cortexa8.inc | 5 | include conf/machine/include/tune-cortexa8.inc |
| 6 | 6 | ||
| 7 | TARGET_ARCH = "arm" | 7 | TARGET_ARCH = "arm" |
| 8 | PACKAGE_EXTRA_ARCHS = "armv4 armv4t armv5te armv6 armv7 armv7a" | ||
| 9 | MACHINE_FEATURES = "kernel26 apm usbgadget usbhost vfat alsa" | 8 | MACHINE_FEATURES = "kernel26 apm usbgadget usbhost vfat alsa" |
| 10 | 9 | ||
| 11 | PREFERRED_VERSION_u-boot = "git" | 10 | PREFERRED_VERSION_u-boot = "git" |
diff --git a/meta/conf/machine/qemuarm.conf b/meta/conf/machine/qemuarm.conf index 400ecc8245..9fb18932ca 100644 --- a/meta/conf/machine/qemuarm.conf +++ b/meta/conf/machine/qemuarm.conf | |||
| @@ -3,7 +3,6 @@ | |||
| 3 | #@DESCRIPTION: arm_versaile_926ejs | 3 | #@DESCRIPTION: arm_versaile_926ejs |
| 4 | 4 | ||
| 5 | TARGET_ARCH = "arm" | 5 | TARGET_ARCH = "arm" |
| 6 | PACKAGE_EXTRA_ARCHS = "armv4 armv4t armv5te" | ||
| 7 | 6 | ||
| 8 | require conf/machine/include/qemu.inc | 7 | require conf/machine/include/qemu.inc |
| 9 | require conf/machine/include/tune-arm926ejs.inc | 8 | require conf/machine/include/tune-arm926ejs.inc |
diff --git a/meta/conf/machine/qemuppc.conf b/meta/conf/machine/qemuppc.conf index 0bbeac7da6..83d0841159 100644 --- a/meta/conf/machine/qemuppc.conf +++ b/meta/conf/machine/qemuppc.conf | |||
| @@ -3,7 +3,6 @@ | |||
| 3 | #@DESCRIPTION: Machine configuration for running an PPC system under qemu emulation | 3 | #@DESCRIPTION: Machine configuration for running an PPC system under qemu emulation |
| 4 | 4 | ||
| 5 | TARGET_ARCH = "powerpc" | 5 | TARGET_ARCH = "powerpc" |
| 6 | PACKAGE_EXTRA_ARCHS = "ppc603e" | ||
| 7 | 6 | ||
| 8 | require conf/machine/include/qemu.inc | 7 | require conf/machine/include/qemu.inc |
| 9 | require conf/machine/include/tune-ppc603e.inc | 8 | require conf/machine/include/tune-ppc603e.inc |
diff --git a/meta/conf/machine/qemux86-64.conf b/meta/conf/machine/qemux86-64.conf index fd34709b78..182759adfe 100644 --- a/meta/conf/machine/qemux86-64.conf +++ b/meta/conf/machine/qemux86-64.conf | |||
| @@ -4,7 +4,6 @@ | |||
| 4 | 4 | ||
| 5 | TARGET_ARCH = "x86_64" | 5 | TARGET_ARCH = "x86_64" |
| 6 | TRANSLATED_TARGET_ARCH = "x86-64" | 6 | TRANSLATED_TARGET_ARCH = "x86-64" |
| 7 | PACKAGE_EXTRA_ARCHS = "x86" | ||
| 8 | 7 | ||
| 9 | PREFERRED_PROVIDER_virtual/libgl = "mesa-dri" | 8 | PREFERRED_PROVIDER_virtual/libgl = "mesa-dri" |
| 10 | PREFERRED_PROVIDER_virtual/libx11 ?= "libx11-trim" | 9 | PREFERRED_PROVIDER_virtual/libx11 ?= "libx11-trim" |
diff --git a/meta/conf/machine/qemux86.conf b/meta/conf/machine/qemux86.conf index cb2a32d7bd..f1a0939ebc 100644 --- a/meta/conf/machine/qemux86.conf +++ b/meta/conf/machine/qemux86.conf | |||
| @@ -3,7 +3,6 @@ | |||
| 3 | #@DESCRIPTION: Machine configuration for running a common x86 | 3 | #@DESCRIPTION: Machine configuration for running a common x86 |
| 4 | 4 | ||
| 5 | TARGET_ARCH = "i586" | 5 | TARGET_ARCH = "i586" |
| 6 | PACKAGE_EXTRA_ARCHS = "x86" | ||
| 7 | 6 | ||
| 8 | PREFERRED_PROVIDER_virtual/libgl = "mesa-dri" | 7 | PREFERRED_PROVIDER_virtual/libgl = "mesa-dri" |
| 9 | PREFERRED_PROVIDER_virtual/libx11 ?= "libx11-trim" | 8 | PREFERRED_PROVIDER_virtual/libx11 ?= "libx11-trim" |
diff --git a/meta/conf/machine/spitz.conf b/meta/conf/machine/spitz.conf index 4a794331e8..faa00e991f 100644 --- a/meta/conf/machine/spitz.conf +++ b/meta/conf/machine/spitz.conf | |||
| @@ -9,7 +9,6 @@ MACHINE_FEATURES += "iwmmxt" | |||
| 9 | 9 | ||
| 10 | PIVOTBOOT_EXTRA_RDEPENDS += "pivotinit ${PCMCIA_MANAGER}" | 10 | PIVOTBOOT_EXTRA_RDEPENDS += "pivotinit ${PCMCIA_MANAGER}" |
| 11 | 11 | ||
| 12 | PACKAGE_EXTRA_ARCHS += "iwmmxt" | ||
| 13 | IMAGE_FSTYPES ?= "tar.gz ext3" | 12 | IMAGE_FSTYPES ?= "tar.gz ext3" |
| 14 | IMAGE_ROOTFS_SIZE_ext3 = "250000" | 13 | IMAGE_ROOTFS_SIZE_ext3 = "250000" |
| 15 | 14 | ||
diff --git a/meta/conf/machine/zoom2.conf b/meta/conf/machine/zoom2.conf index 142a40c5a1..dec90c2c7b 100644 --- a/meta/conf/machine/zoom2.conf +++ b/meta/conf/machine/zoom2.conf | |||
| @@ -2,7 +2,6 @@ | |||
| 2 | #@NAME: Logic PD Zoom2 | 2 | #@NAME: Logic PD Zoom2 |
| 3 | #@DESCRIPTION: Machine configuration for the OMAP3-powered Logic PD Zoom2 | 3 | #@DESCRIPTION: Machine configuration for the OMAP3-powered Logic PD Zoom2 |
| 4 | TARGET_ARCH = "arm" | 4 | TARGET_ARCH = "arm" |
| 5 | PACKAGE_EXTRA_ARCHS = "armv4 armv4t armv5te armv6 armv7a" | ||
| 6 | 5 | ||
| 7 | PREFERRED_PROVIDER_virtual/kernel = "linux-omap-zoomsync" | 6 | PREFERRED_PROVIDER_virtual/kernel = "linux-omap-zoomsync" |
| 8 | 7 | ||
diff --git a/meta/conf/machine/zylonite.conf b/meta/conf/machine/zylonite.conf index 644675e849..e82f041ff2 100644 --- a/meta/conf/machine/zylonite.conf +++ b/meta/conf/machine/zylonite.conf | |||
| @@ -4,8 +4,9 @@ | |||
| 4 | 4 | ||
| 5 | require conf/machine/include/tune-xscale.inc | 5 | require conf/machine/include/tune-xscale.inc |
| 6 | 6 | ||
| 7 | PACKAGE_EXTRA_ARCHS += "iwmmxt" | ||
| 8 | |||
| 7 | TARGET_ARCH = "arm" | 9 | TARGET_ARCH = "arm" |
| 8 | PACKAGE_EXTRA_ARCHS = "armv4 armv4t armv5e armv5te iwmmxt" | ||
| 9 | 10 | ||
| 10 | EXTRA_IMAGECMD_jffs2 = "--little-endian --eraseblock=0x20000 -n -x lzo -p" | 11 | EXTRA_IMAGECMD_jffs2 = "--little-endian --eraseblock=0x20000 -n -x lzo -p" |
| 11 | 12 | ||
diff --git a/meta/recipes-devtools/bison/bison_2.4.3.bb b/meta/recipes-devtools/bison/bison_2.4.3.bb index 03fd463fd8..fa2f15400a 100644 --- a/meta/recipes-devtools/bison/bison_2.4.3.bb +++ b/meta/recipes-devtools/bison/bison_2.4.3.bb | |||
| @@ -10,7 +10,7 @@ SECTION = "devel" | |||
| 10 | PRIORITY = "optional" | 10 | PRIORITY = "optional" |
| 11 | DEPENDS = "gettext bison-native" | 11 | DEPENDS = "gettext bison-native" |
| 12 | 12 | ||
| 13 | PR = "r0" | 13 | PR = "r1" |
| 14 | 14 | ||
| 15 | BASE_SRC_URI = "${GNU_MIRROR}/bison/bison-${PV}.tar.gz \ | 15 | BASE_SRC_URI = "${GNU_MIRROR}/bison/bison-${PV}.tar.gz \ |
| 16 | file://m4.patch;patch=1" | 16 | file://m4.patch;patch=1" |
| @@ -27,4 +27,10 @@ SRC_URI_virtclass-native = "${BASE_SRC_URI}" | |||
| 27 | inherit autotools | 27 | inherit autotools |
| 28 | acpaths = "-I ${S}/m4" | 28 | acpaths = "-I ${S}/m4" |
| 29 | 29 | ||
| 30 | do_install_append_virtclass-native() { | ||
| 31 | create_wrapper ${D}/${bindir}/bison \ | ||
| 32 | M4=${STAGING_BINDIR_NATIVE}/m4 \ | ||
| 33 | BISON_PKGDATADIR=${STAGING_DATADIR_NATIVE}/bison | ||
| 34 | } | ||
| 35 | #bison-native encodes the staging M4 path | ||
| 30 | BBCLASSEXTEND = "native" | 36 | BBCLASSEXTEND = "native" |
diff --git a/meta/recipes-devtools/flex/flex.inc b/meta/recipes-devtools/flex/flex.inc index 93fa7e81a4..54c400c044 100644 --- a/meta/recipes-devtools/flex/flex.inc +++ b/meta/recipes-devtools/flex/flex.inc | |||
| @@ -11,3 +11,6 @@ SRC_URI = "${SOURCEFORGE_MIRROR}/flex/flex-${PV}.tar.bz2 " | |||
| 11 | 11 | ||
| 12 | inherit autotools | 12 | inherit autotools |
| 13 | 13 | ||
| 14 | do_install_append_virtclass-native() { | ||
| 15 | create_wrapper ${D}/${bindir}/flex M4=${STAGING_BINDIR_NATIVE}/m4 | ||
| 16 | } | ||
diff --git a/meta/recipes-devtools/flex/flex_2.5.35.bb b/meta/recipes-devtools/flex/flex_2.5.35.bb index 4fdce33ccf..28a4f1f06b 100644 --- a/meta/recipes-devtools/flex/flex_2.5.35.bb +++ b/meta/recipes-devtools/flex/flex_2.5.35.bb | |||
| @@ -1,5 +1,5 @@ | |||
| 1 | require flex.inc | 1 | require flex.inc |
| 2 | PR = "r1" | 2 | PR = "r2" |
| 3 | LICENSE="BSD" | 3 | LICENSE="BSD" |
| 4 | LIC_FILES_CHKSUM = "file://COPYING;md5=e4742cf92e89040b39486a6219b68067" | 4 | LIC_FILES_CHKSUM = "file://COPYING;md5=e4742cf92e89040b39486a6219b68067" |
| 5 | BBCLASSEXTEND = "native" | 5 | BBCLASSEXTEND = "native" |
diff --git a/meta/recipes-devtools/perl/perl_5.8.8.bb b/meta/recipes-devtools/perl/perl_5.8.8.bb index 8e2f0c0af7..5cd4d4fe2f 100644 --- a/meta/recipes-devtools/perl/perl_5.8.8.bb +++ b/meta/recipes-devtools/perl/perl_5.8.8.bb | |||
| @@ -11,7 +11,7 @@ PRIORITY = "optional" | |||
| 11 | # We need gnugrep (for -I) | 11 | # We need gnugrep (for -I) |
| 12 | DEPENDS = "virtual/db perl-native grep-native" | 12 | DEPENDS = "virtual/db perl-native grep-native" |
| 13 | DEPENDS += "gdbm" | 13 | DEPENDS += "gdbm" |
| 14 | PR = "r21" | 14 | PR = "r22" |
| 15 | 15 | ||
| 16 | # Major part of version | 16 | # Major part of version |
| 17 | PVM = "5.8" | 17 | PVM = "5.8" |
| @@ -114,7 +114,7 @@ do_compile() { | |||
| 114 | sed -i -e 's|/usr/include|${STAGING_INCDIR}|g' ext/Errno/Errno_pm.PL | 114 | sed -i -e 's|/usr/include|${STAGING_INCDIR}|g' ext/Errno/Errno_pm.PL |
| 115 | fi | 115 | fi |
| 116 | cd Cross | 116 | cd Cross |
| 117 | oe_runmake perl LD="${TARGET_SYS}-gcc" | 117 | oe_runmake perl LD="${CCLD}" |
| 118 | } | 118 | } |
| 119 | 119 | ||
| 120 | do_install() { | 120 | do_install() { |
diff --git a/meta/recipes-graphics/xorg-app/xvinfo_1.1.1.bb b/meta/recipes-graphics/xorg-app/xvinfo_1.1.1.bb index 0bebbfb86e..0547bd62ca 100644 --- a/meta/recipes-graphics/xorg-app/xvinfo_1.1.1.bb +++ b/meta/recipes-graphics/xorg-app/xvinfo_1.1.1.bb | |||
| @@ -1,7 +1,7 @@ | |||
| 1 | require xorg-app-common.inc | 1 | require xorg-app-common.inc |
| 2 | 2 | ||
| 3 | DESCRIPTION = "Print out X-Video extension adaptor information" | 3 | DESCRIPTION = "Print out X-Video extension adaptor information" |
| 4 | LIC_FILES_CHKSUM = "file://COPYING;md5=4641deddaa80fe7ca88e944e1fd94a94" | 4 | LIC_FILES_CHKSUM = "file://COPYING;md5=b664101ad7a1dc758a4c4109bf978e68" |
| 5 | DEPENDS += " libxv" | 5 | DEPENDS += " libxv" |
| 6 | PE = "1" | 6 | PE = "1" |
| 7 | 7 | ||
diff --git a/meta/recipes-graphics/xorg-xserver/xserver-xf86-common.inc b/meta/recipes-graphics/xorg-xserver/xserver-xf86-common.inc index 5cbfd11d18..f9ea0e4124 100644 --- a/meta/recipes-graphics/xorg-xserver/xserver-xf86-common.inc +++ b/meta/recipes-graphics/xorg-xserver/xserver-xf86-common.inc | |||
| @@ -34,4 +34,3 @@ FILES_${PN}-dbg += "${libdir}/xorg/modules/.debug \ | |||
| 34 | 34 | ||
| 35 | SRC_URI += "file://macro_tweak.patch" | 35 | SRC_URI += "file://macro_tweak.patch" |
| 36 | 36 | ||
| 37 | COMPATIBLE_MACHINE = "(qemux86*|igep*)" | ||
diff --git a/meta/recipes-multimedia/pulseaudio/libcanberra_0.26.bb b/meta/recipes-multimedia/pulseaudio/libcanberra_0.26.bb index 7f91bdc978..0daeb0e6c5 100644 --- a/meta/recipes-multimedia/pulseaudio/libcanberra_0.26.bb +++ b/meta/recipes-multimedia/pulseaudio/libcanberra_0.26.bb | |||
| @@ -2,7 +2,7 @@ SUMMARY = "Implementation of XDG Sound Theme and Name Specifications" | |||
| 2 | DESCRIPTION = "Libcanberra is an implementation of the XDG Sound Theme and Name Specifications, for generating event sounds on free desktops." | 2 | DESCRIPTION = "Libcanberra is an implementation of the XDG Sound Theme and Name Specifications, for generating event sounds on free desktops." |
| 3 | LICENSE = "LGPLv2.1" | 3 | LICENSE = "LGPLv2.1" |
| 4 | LIC_FILES_CHKSUM = "file://LGPL;md5=2d5025d4aa3495befef8f17206a5b0a1 \ | 4 | LIC_FILES_CHKSUM = "file://LGPL;md5=2d5025d4aa3495befef8f17206a5b0a1 \ |
| 5 | file://src/canberra.h;endline=24;md5=8dd99ba690687f5816f711d9313c8967" | 5 | file://src/canberra.h;beginline=7;endline=24;md5=c616c687cf8da540a14f917e0d23ab03" |
| 6 | 6 | ||
| 7 | DEPENDS = "gtk+ pulseaudio alsa-lib libtool" | 7 | DEPENDS = "gtk+ pulseaudio alsa-lib libtool" |
| 8 | PR = "r0" | 8 | PR = "r0" |
