diff options
| author | Richard Purdie <rpurdie@linux.intel.com> | 2010-01-20 18:46:02 +0000 |
|---|---|---|
| committer | Richard Purdie <rpurdie@linux.intel.com> | 2010-01-20 18:46:02 +0000 |
| commit | 22c29d8651668195f72e2f6a8e059d625eb511c3 (patch) | |
| tree | dd1dd43f0ec47a9964c8a766eb8b3ad75cf51a64 /bitbake-dev/lib/bb/cooker.py | |
| parent | 1bfd6edef9db9c9175058ae801d1b601e4f15263 (diff) | |
| download | poky-22c29d8651668195f72e2f6a8e059d625eb511c3.tar.gz | |
bitbake: Switch to bitbake-dev version (bitbake master upstream)
Signed-off-by: Richard Purdie <rpurdie@linux.intel.com>
Diffstat (limited to 'bitbake-dev/lib/bb/cooker.py')
| -rw-r--r-- | bitbake-dev/lib/bb/cooker.py | 978 |
1 files changed, 0 insertions, 978 deletions
diff --git a/bitbake-dev/lib/bb/cooker.py b/bitbake-dev/lib/bb/cooker.py deleted file mode 100644 index 8036d7e9d5..0000000000 --- a/bitbake-dev/lib/bb/cooker.py +++ /dev/null | |||
| @@ -1,978 +0,0 @@ | |||
| 1 | #!/usr/bin/env python | ||
| 2 | # ex:ts=4:sw=4:sts=4:et | ||
| 3 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 4 | # | ||
| 5 | # Copyright (C) 2003, 2004 Chris Larson | ||
| 6 | # Copyright (C) 2003, 2004 Phil Blundell | ||
| 7 | # Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer | ||
| 8 | # Copyright (C) 2005 Holger Hans Peter Freyther | ||
| 9 | # Copyright (C) 2005 ROAD GmbH | ||
| 10 | # Copyright (C) 2006 - 2007 Richard Purdie | ||
| 11 | # | ||
| 12 | # This program is free software; you can redistribute it and/or modify | ||
| 13 | # it under the terms of the GNU General Public License version 2 as | ||
| 14 | # published by the Free Software Foundation. | ||
| 15 | # | ||
| 16 | # This program is distributed in the hope that it will be useful, | ||
| 17 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 18 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
| 19 | # GNU General Public License for more details. | ||
| 20 | # | ||
| 21 | # You should have received a copy of the GNU General Public License along | ||
| 22 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
| 23 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
| 24 | |||
| 25 | import sys, os, getopt, glob, copy, os.path, re, time | ||
| 26 | import bb | ||
| 27 | from bb import utils, data, parse, event, cache, providers, taskdata, runqueue | ||
| 28 | from bb import command | ||
| 29 | import bb.server.xmlrpc | ||
| 30 | import itertools, sre_constants | ||
| 31 | |||
| 32 | class MultipleMatches(Exception): | ||
| 33 | """ | ||
| 34 | Exception raised when multiple file matches are found | ||
| 35 | """ | ||
| 36 | |||
| 37 | class ParsingErrorsFound(Exception): | ||
| 38 | """ | ||
| 39 | Exception raised when parsing errors are found | ||
| 40 | """ | ||
| 41 | |||
| 42 | class NothingToBuild(Exception): | ||
| 43 | """ | ||
| 44 | Exception raised when there is nothing to build | ||
| 45 | """ | ||
| 46 | |||
| 47 | |||
| 48 | # Different states cooker can be in | ||
| 49 | cookerClean = 1 | ||
| 50 | cookerParsing = 2 | ||
| 51 | cookerParsed = 3 | ||
| 52 | |||
| 53 | # Different action states the cooker can be in | ||
| 54 | cookerRun = 1 # Cooker is running normally | ||
| 55 | cookerShutdown = 2 # Active tasks should be brought to a controlled stop | ||
| 56 | cookerStop = 3 # Stop, now! | ||
| 57 | |||
| 58 | #============================================================================# | ||
| 59 | # BBCooker | ||
| 60 | #============================================================================# | ||
| 61 | class BBCooker: | ||
| 62 | """ | ||
| 63 | Manages one bitbake build run | ||
| 64 | """ | ||
| 65 | |||
| 66 | def __init__(self, configuration, server): | ||
| 67 | self.status = None | ||
| 68 | |||
| 69 | self.cache = None | ||
| 70 | self.bb_cache = None | ||
| 71 | |||
| 72 | self.server = server.BitBakeServer(self) | ||
| 73 | |||
| 74 | self.configuration = configuration | ||
| 75 | |||
| 76 | if self.configuration.verbose: | ||
| 77 | bb.msg.set_verbose(True) | ||
| 78 | |||
| 79 | if self.configuration.debug: | ||
| 80 | bb.msg.set_debug_level(self.configuration.debug) | ||
| 81 | else: | ||
| 82 | bb.msg.set_debug_level(0) | ||
| 83 | |||
| 84 | if self.configuration.debug_domains: | ||
| 85 | bb.msg.set_debug_domains(self.configuration.debug_domains) | ||
| 86 | |||
| 87 | self.configuration.data = bb.data.init() | ||
| 88 | |||
| 89 | bb.data.inheritFromOS(self.configuration.data) | ||
| 90 | |||
| 91 | for f in self.configuration.file: | ||
| 92 | self.parseConfigurationFile( f ) | ||
| 93 | |||
| 94 | self.parseConfigurationFile( os.path.join( "conf", "bitbake.conf" ) ) | ||
| 95 | |||
| 96 | if not self.configuration.cmd: | ||
| 97 | self.configuration.cmd = bb.data.getVar("BB_DEFAULT_TASK", self.configuration.data, True) or "build" | ||
| 98 | |||
| 99 | bbpkgs = bb.data.getVar('BBPKGS', self.configuration.data, True) | ||
| 100 | if bbpkgs and len(self.configuration.pkgs_to_build) == 0: | ||
| 101 | self.configuration.pkgs_to_build.extend(bbpkgs.split()) | ||
| 102 | |||
| 103 | # | ||
| 104 | # Special updated configuration we use for firing events | ||
| 105 | # | ||
| 106 | self.configuration.event_data = bb.data.createCopy(self.configuration.data) | ||
| 107 | bb.data.update_data(self.configuration.event_data) | ||
| 108 | |||
| 109 | # TOSTOP must not be set or our children will hang when they output | ||
| 110 | fd = sys.stdout.fileno() | ||
| 111 | if os.isatty(fd): | ||
| 112 | import termios | ||
| 113 | tcattr = termios.tcgetattr(fd) | ||
| 114 | if tcattr[3] & termios.TOSTOP: | ||
| 115 | bb.msg.note(1, bb.msg.domain.Build, "The terminal had the TOSTOP bit set, clearing...") | ||
| 116 | tcattr[3] = tcattr[3] & ~termios.TOSTOP | ||
| 117 | termios.tcsetattr(fd, termios.TCSANOW, tcattr) | ||
| 118 | |||
| 119 | self.command = bb.command.Command(self) | ||
| 120 | self.cookerState = cookerClean | ||
| 121 | self.cookerAction = cookerRun | ||
| 122 | |||
| 123 | def parseConfiguration(self): | ||
| 124 | |||
| 125 | |||
| 126 | # Change nice level if we're asked to | ||
| 127 | nice = bb.data.getVar("BB_NICE_LEVEL", self.configuration.data, True) | ||
| 128 | if nice: | ||
| 129 | curnice = os.nice(0) | ||
| 130 | nice = int(nice) - curnice | ||
| 131 | bb.msg.note(2, bb.msg.domain.Build, "Renice to %s " % os.nice(nice)) | ||
| 132 | |||
| 133 | def parseCommandLine(self): | ||
| 134 | # Parse any commandline into actions | ||
| 135 | if self.configuration.show_environment: | ||
| 136 | self.commandlineAction = None | ||
| 137 | |||
| 138 | if 'world' in self.configuration.pkgs_to_build: | ||
| 139 | bb.error("'world' is not a valid target for --environment.") | ||
| 140 | elif len(self.configuration.pkgs_to_build) > 1: | ||
| 141 | bb.error("Only one target can be used with the --environment option.") | ||
| 142 | elif self.configuration.buildfile and len(self.configuration.pkgs_to_build) > 0: | ||
| 143 | bb.error("No target should be used with the --environment and --buildfile options.") | ||
| 144 | elif len(self.configuration.pkgs_to_build) > 0: | ||
| 145 | self.commandlineAction = ["showEnvironmentTarget", self.configuration.pkgs_to_build] | ||
| 146 | else: | ||
| 147 | self.commandlineAction = ["showEnvironment", self.configuration.buildfile] | ||
| 148 | elif self.configuration.buildfile is not None: | ||
| 149 | self.commandlineAction = ["buildFile", self.configuration.buildfile, self.configuration.cmd] | ||
| 150 | elif self.configuration.revisions_changed: | ||
| 151 | self.commandlineAction = ["compareRevisions"] | ||
| 152 | elif self.configuration.show_versions: | ||
| 153 | self.commandlineAction = ["showVersions"] | ||
| 154 | elif self.configuration.parse_only: | ||
| 155 | self.commandlineAction = ["parseFiles"] | ||
| 156 | # FIXME - implement | ||
| 157 | #elif self.configuration.interactive: | ||
| 158 | # self.interactiveMode() | ||
| 159 | elif self.configuration.dot_graph: | ||
| 160 | if self.configuration.pkgs_to_build: | ||
| 161 | self.commandlineAction = ["generateDotGraph", self.configuration.pkgs_to_build, self.configuration.cmd] | ||
| 162 | else: | ||
| 163 | self.commandlineAction = None | ||
| 164 | bb.error("Please specify a package name for dependency graph generation.") | ||
| 165 | else: | ||
| 166 | if self.configuration.pkgs_to_build: | ||
| 167 | self.commandlineAction = ["buildTargets", self.configuration.pkgs_to_build, self.configuration.cmd] | ||
| 168 | else: | ||
| 169 | self.commandlineAction = None | ||
| 170 | bb.error("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.") | ||
| 171 | |||
| 172 | def runCommands(self, server, data, abort): | ||
| 173 | """ | ||
| 174 | Run any queued asynchronous command | ||
| 175 | This is done by the idle handler so it runs in true context rather than | ||
| 176 | tied to any UI. | ||
| 177 | """ | ||
| 178 | |||
| 179 | return self.command.runAsyncCommand() | ||
| 180 | |||
| 181 | def tryBuildPackage(self, fn, item, task, the_data): | ||
| 182 | """ | ||
| 183 | Build one task of a package, optionally build following task depends | ||
| 184 | """ | ||
| 185 | try: | ||
| 186 | if not self.configuration.dry_run: | ||
| 187 | bb.build.exec_task('do_%s' % task, the_data) | ||
| 188 | return True | ||
| 189 | except bb.build.FuncFailed: | ||
| 190 | bb.msg.error(bb.msg.domain.Build, "task stack execution failed") | ||
| 191 | raise | ||
| 192 | except bb.build.EventException, e: | ||
| 193 | event = e.args[1] | ||
| 194 | bb.msg.error(bb.msg.domain.Build, "%s event exception, aborting" % bb.event.getName(event)) | ||
| 195 | raise | ||
| 196 | |||
| 197 | def tryBuild(self, fn, task): | ||
| 198 | """ | ||
| 199 | Build a provider and its dependencies. | ||
| 200 | build_depends is a list of previous build dependencies (not runtime) | ||
| 201 | If build_depends is empty, we're dealing with a runtime depends | ||
| 202 | """ | ||
| 203 | |||
| 204 | the_data = self.bb_cache.loadDataFull(fn, self.configuration.data) | ||
| 205 | |||
| 206 | item = self.status.pkg_fn[fn] | ||
| 207 | |||
| 208 | #if bb.build.stamp_is_current('do_%s' % self.configuration.cmd, the_data): | ||
| 209 | # return True | ||
| 210 | |||
| 211 | return self.tryBuildPackage(fn, item, task, the_data) | ||
| 212 | |||
| 213 | def showVersions(self): | ||
| 214 | |||
| 215 | # Need files parsed | ||
| 216 | self.updateCache() | ||
| 217 | |||
| 218 | pkg_pn = self.status.pkg_pn | ||
| 219 | preferred_versions = {} | ||
| 220 | latest_versions = {} | ||
| 221 | |||
| 222 | # Sort by priority | ||
| 223 | for pn in pkg_pn.keys(): | ||
| 224 | (last_ver,last_file,pref_ver,pref_file) = bb.providers.findBestProvider(pn, self.configuration.data, self.status) | ||
| 225 | preferred_versions[pn] = (pref_ver, pref_file) | ||
| 226 | latest_versions[pn] = (last_ver, last_file) | ||
| 227 | |||
| 228 | pkg_list = pkg_pn.keys() | ||
| 229 | pkg_list.sort() | ||
| 230 | |||
| 231 | bb.msg.plain("%-35s %25s %25s" % ("Package Name", "Latest Version", "Preferred Version")) | ||
| 232 | bb.msg.plain("%-35s %25s %25s\n" % ("============", "==============", "=================")) | ||
| 233 | |||
| 234 | for p in pkg_list: | ||
| 235 | pref = preferred_versions[p] | ||
| 236 | latest = latest_versions[p] | ||
| 237 | |||
| 238 | prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2] | ||
| 239 | lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2] | ||
| 240 | |||
| 241 | if pref == latest: | ||
| 242 | prefstr = "" | ||
| 243 | |||
| 244 | bb.msg.plain("%-35s %25s %25s" % (p, lateststr, prefstr)) | ||
| 245 | |||
| 246 | def compareRevisions(self): | ||
| 247 | ret = bb.fetch.fetcher_compare_revisons(self.configuration.data) | ||
| 248 | bb.event.fire(bb.command.CookerCommandSetExitCode(ret), self.configuration.event_data) | ||
| 249 | |||
| 250 | def showEnvironment(self, buildfile = None, pkgs_to_build = []): | ||
| 251 | """ | ||
| 252 | Show the outer or per-package environment | ||
| 253 | """ | ||
| 254 | fn = None | ||
| 255 | envdata = None | ||
| 256 | |||
| 257 | if buildfile: | ||
| 258 | self.cb = None | ||
| 259 | self.bb_cache = bb.cache.init(self) | ||
| 260 | fn = self.matchFile(buildfile) | ||
| 261 | elif len(pkgs_to_build) == 1: | ||
| 262 | self.updateCache() | ||
| 263 | |||
| 264 | localdata = data.createCopy(self.configuration.data) | ||
| 265 | bb.data.update_data(localdata) | ||
| 266 | bb.data.expandKeys(localdata) | ||
| 267 | |||
| 268 | taskdata = bb.taskdata.TaskData(self.configuration.abort) | ||
| 269 | taskdata.add_provider(localdata, self.status, pkgs_to_build[0]) | ||
| 270 | taskdata.add_unresolved(localdata, self.status) | ||
| 271 | |||
| 272 | targetid = taskdata.getbuild_id(pkgs_to_build[0]) | ||
| 273 | fnid = taskdata.build_targets[targetid][0] | ||
| 274 | fn = taskdata.fn_index[fnid] | ||
| 275 | else: | ||
| 276 | envdata = self.configuration.data | ||
| 277 | |||
| 278 | if fn: | ||
| 279 | try: | ||
| 280 | envdata = self.bb_cache.loadDataFull(fn, self.configuration.data) | ||
| 281 | except IOError, e: | ||
| 282 | bb.msg.error(bb.msg.domain.Parsing, "Unable to read %s: %s" % (fn, e)) | ||
| 283 | raise | ||
| 284 | except Exception, e: | ||
| 285 | bb.msg.error(bb.msg.domain.Parsing, "%s" % e) | ||
| 286 | raise | ||
| 287 | |||
| 288 | class dummywrite: | ||
| 289 | def __init__(self): | ||
| 290 | self.writebuf = "" | ||
| 291 | def write(self, output): | ||
| 292 | self.writebuf = self.writebuf + output | ||
| 293 | |||
| 294 | # emit variables and shell functions | ||
| 295 | try: | ||
| 296 | data.update_data(envdata) | ||
| 297 | wb = dummywrite() | ||
| 298 | data.emit_env(wb, envdata, True) | ||
| 299 | bb.msg.plain(wb.writebuf) | ||
| 300 | except Exception, e: | ||
| 301 | bb.msg.fatal(bb.msg.domain.Parsing, "%s" % e) | ||
| 302 | # emit the metadata which isnt valid shell | ||
| 303 | data.expandKeys(envdata) | ||
| 304 | for e in envdata.keys(): | ||
| 305 | if data.getVarFlag( e, 'python', envdata ): | ||
| 306 | bb.msg.plain("\npython %s () {\n%s}\n" % (e, data.getVar(e, envdata, 1))) | ||
| 307 | |||
| 308 | def generateDepTreeData(self, pkgs_to_build, task): | ||
| 309 | """ | ||
| 310 | Create a dependency tree of pkgs_to_build, returning the data. | ||
| 311 | """ | ||
| 312 | |||
| 313 | # Need files parsed | ||
| 314 | self.updateCache() | ||
| 315 | |||
| 316 | # If we are told to do the None task then query the default task | ||
| 317 | if (task == None): | ||
| 318 | task = self.configuration.cmd | ||
| 319 | |||
| 320 | pkgs_to_build = self.checkPackages(pkgs_to_build) | ||
| 321 | |||
| 322 | localdata = data.createCopy(self.configuration.data) | ||
| 323 | bb.data.update_data(localdata) | ||
| 324 | bb.data.expandKeys(localdata) | ||
| 325 | taskdata = bb.taskdata.TaskData(self.configuration.abort) | ||
| 326 | |||
| 327 | runlist = [] | ||
| 328 | for k in pkgs_to_build: | ||
| 329 | taskdata.add_provider(localdata, self.status, k) | ||
| 330 | runlist.append([k, "do_%s" % task]) | ||
| 331 | taskdata.add_unresolved(localdata, self.status) | ||
| 332 | |||
| 333 | rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist) | ||
| 334 | rq.prepare_runqueue() | ||
| 335 | |||
| 336 | seen_fnids = [] | ||
| 337 | depend_tree = {} | ||
| 338 | depend_tree["depends"] = {} | ||
| 339 | depend_tree["tdepends"] = {} | ||
| 340 | depend_tree["pn"] = {} | ||
| 341 | depend_tree["rdepends-pn"] = {} | ||
| 342 | depend_tree["packages"] = {} | ||
| 343 | depend_tree["rdepends-pkg"] = {} | ||
| 344 | depend_tree["rrecs-pkg"] = {} | ||
| 345 | |||
| 346 | for task in range(len(rq.runq_fnid)): | ||
| 347 | taskname = rq.runq_task[task] | ||
| 348 | fnid = rq.runq_fnid[task] | ||
| 349 | fn = taskdata.fn_index[fnid] | ||
| 350 | pn = self.status.pkg_fn[fn] | ||
| 351 | version = "%s:%s-%s" % self.status.pkg_pepvpr[fn] | ||
| 352 | if pn not in depend_tree["pn"]: | ||
| 353 | depend_tree["pn"][pn] = {} | ||
| 354 | depend_tree["pn"][pn]["filename"] = fn | ||
| 355 | depend_tree["pn"][pn]["version"] = version | ||
| 356 | for dep in rq.runq_depends[task]: | ||
| 357 | depfn = taskdata.fn_index[rq.runq_fnid[dep]] | ||
| 358 | deppn = self.status.pkg_fn[depfn] | ||
| 359 | dotname = "%s.%s" % (pn, rq.runq_task[task]) | ||
| 360 | if not dotname in depend_tree["tdepends"]: | ||
| 361 | depend_tree["tdepends"][dotname] = [] | ||
| 362 | depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, rq.runq_task[dep])) | ||
| 363 | if fnid not in seen_fnids: | ||
| 364 | seen_fnids.append(fnid) | ||
| 365 | packages = [] | ||
| 366 | |||
| 367 | depend_tree["depends"][pn] = [] | ||
| 368 | for dep in taskdata.depids[fnid]: | ||
| 369 | depend_tree["depends"][pn].append(taskdata.build_names_index[dep]) | ||
| 370 | |||
| 371 | depend_tree["rdepends-pn"][pn] = [] | ||
| 372 | for rdep in taskdata.rdepids[fnid]: | ||
| 373 | depend_tree["rdepends-pn"][pn].append(taskdata.run_names_index[rdep]) | ||
| 374 | |||
| 375 | rdepends = self.status.rundeps[fn] | ||
| 376 | for package in rdepends: | ||
| 377 | depend_tree["rdepends-pkg"][package] = [] | ||
| 378 | for rdepend in rdepends[package]: | ||
| 379 | depend_tree["rdepends-pkg"][package].append(rdepend) | ||
| 380 | packages.append(package) | ||
| 381 | |||
| 382 | rrecs = self.status.runrecs[fn] | ||
| 383 | for package in rrecs: | ||
| 384 | depend_tree["rrecs-pkg"][package] = [] | ||
| 385 | for rdepend in rrecs[package]: | ||
| 386 | depend_tree["rrecs-pkg"][package].append(rdepend) | ||
| 387 | if not package in packages: | ||
| 388 | packages.append(package) | ||
| 389 | |||
| 390 | for package in packages: | ||
| 391 | if package not in depend_tree["packages"]: | ||
| 392 | depend_tree["packages"][package] = {} | ||
| 393 | depend_tree["packages"][package]["pn"] = pn | ||
| 394 | depend_tree["packages"][package]["filename"] = fn | ||
| 395 | depend_tree["packages"][package]["version"] = version | ||
| 396 | |||
| 397 | return depend_tree | ||
| 398 | |||
| 399 | |||
| 400 | def generateDepTreeEvent(self, pkgs_to_build, task): | ||
| 401 | """ | ||
| 402 | Create a task dependency graph of pkgs_to_build. | ||
| 403 | Generate an event with the result | ||
| 404 | """ | ||
| 405 | depgraph = self.generateDepTreeData(pkgs_to_build, task) | ||
| 406 | bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.configuration.data) | ||
| 407 | |||
| 408 | def generateDotGraphFiles(self, pkgs_to_build, task): | ||
| 409 | """ | ||
| 410 | Create a task dependency graph of pkgs_to_build. | ||
| 411 | Save the result to a set of .dot files. | ||
| 412 | """ | ||
| 413 | |||
| 414 | depgraph = self.generateDepTreeData(pkgs_to_build, task) | ||
| 415 | |||
| 416 | # Prints a flattened form of package-depends below where subpackages of a package are merged into the main pn | ||
| 417 | depends_file = file('pn-depends.dot', 'w' ) | ||
| 418 | print >> depends_file, "digraph depends {" | ||
| 419 | for pn in depgraph["pn"]: | ||
| 420 | fn = depgraph["pn"][pn]["filename"] | ||
| 421 | version = depgraph["pn"][pn]["version"] | ||
| 422 | print >> depends_file, '"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn) | ||
| 423 | for pn in depgraph["depends"]: | ||
| 424 | for depend in depgraph["depends"][pn]: | ||
| 425 | print >> depends_file, '"%s" -> "%s"' % (pn, depend) | ||
| 426 | for pn in depgraph["rdepends-pn"]: | ||
| 427 | for rdepend in depgraph["rdepends-pn"][pn]: | ||
| 428 | print >> depends_file, '"%s" -> "%s" [style=dashed]' % (pn, rdepend) | ||
| 429 | print >> depends_file, "}" | ||
| 430 | bb.msg.plain("PN dependencies saved to 'pn-depends.dot'") | ||
| 431 | |||
| 432 | depends_file = file('package-depends.dot', 'w' ) | ||
| 433 | print >> depends_file, "digraph depends {" | ||
| 434 | for package in depgraph["packages"]: | ||
| 435 | pn = depgraph["packages"][package]["pn"] | ||
| 436 | fn = depgraph["packages"][package]["filename"] | ||
| 437 | version = depgraph["packages"][package]["version"] | ||
| 438 | if package == pn: | ||
| 439 | print >> depends_file, '"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn) | ||
| 440 | else: | ||
| 441 | print >> depends_file, '"%s" [label="%s(%s) %s\\n%s"]' % (package, package, pn, version, fn) | ||
| 442 | for depend in depgraph["depends"][pn]: | ||
| 443 | print >> depends_file, '"%s" -> "%s"' % (package, depend) | ||
| 444 | for package in depgraph["rdepends-pkg"]: | ||
| 445 | for rdepend in depgraph["rdepends-pkg"][package]: | ||
| 446 | print >> depends_file, '"%s" -> "%s" [style=dashed]' % (package, rdepend) | ||
| 447 | for package in depgraph["rrecs-pkg"]: | ||
| 448 | for rdepend in depgraph["rrecs-pkg"][package]: | ||
| 449 | print >> depends_file, '"%s" -> "%s" [style=dashed]' % (package, rdepend) | ||
| 450 | print >> depends_file, "}" | ||
| 451 | bb.msg.plain("Package dependencies saved to 'package-depends.dot'") | ||
| 452 | |||
| 453 | tdepends_file = file('task-depends.dot', 'w' ) | ||
| 454 | print >> tdepends_file, "digraph depends {" | ||
| 455 | for task in depgraph["tdepends"]: | ||
| 456 | (pn, taskname) = task.rsplit(".", 1) | ||
| 457 | fn = depgraph["pn"][pn]["filename"] | ||
| 458 | version = depgraph["pn"][pn]["version"] | ||
| 459 | print >> tdepends_file, '"%s.%s" [label="%s %s\\n%s\\n%s"]' % (pn, taskname, pn, taskname, version, fn) | ||
| 460 | for dep in depgraph["tdepends"][task]: | ||
| 461 | print >> tdepends_file, '"%s" -> "%s"' % (task, dep) | ||
| 462 | print >> tdepends_file, "}" | ||
| 463 | bb.msg.plain("Task dependencies saved to 'task-depends.dot'") | ||
| 464 | |||
| 465 | def buildDepgraph( self ): | ||
| 466 | all_depends = self.status.all_depends | ||
| 467 | pn_provides = self.status.pn_provides | ||
| 468 | |||
| 469 | localdata = data.createCopy(self.configuration.data) | ||
| 470 | bb.data.update_data(localdata) | ||
| 471 | bb.data.expandKeys(localdata) | ||
| 472 | |||
| 473 | def calc_bbfile_priority(filename): | ||
| 474 | for (regex, pri) in self.status.bbfile_config_priorities: | ||
| 475 | if regex.match(filename): | ||
| 476 | return pri | ||
| 477 | return 0 | ||
| 478 | |||
| 479 | # Handle PREFERRED_PROVIDERS | ||
| 480 | for p in (bb.data.getVar('PREFERRED_PROVIDERS', localdata, 1) or "").split(): | ||
| 481 | try: | ||
| 482 | (providee, provider) = p.split(':') | ||
| 483 | except: | ||
| 484 | bb.msg.fatal(bb.msg.domain.Provider, "Malformed option in PREFERRED_PROVIDERS variable: %s" % p) | ||
| 485 | continue | ||
| 486 | if providee in self.status.preferred and self.status.preferred[providee] != provider: | ||
| 487 | bb.msg.error(bb.msg.domain.Provider, "conflicting preferences for %s: both %s and %s specified" % (providee, provider, self.status.preferred[providee])) | ||
| 488 | self.status.preferred[providee] = provider | ||
| 489 | |||
| 490 | # Calculate priorities for each file | ||
| 491 | for p in self.status.pkg_fn.keys(): | ||
| 492 | self.status.bbfile_priority[p] = calc_bbfile_priority(p) | ||
| 493 | |||
| 494 | def buildWorldTargetList(self): | ||
| 495 | """ | ||
| 496 | Build package list for "bitbake world" | ||
| 497 | """ | ||
| 498 | all_depends = self.status.all_depends | ||
| 499 | pn_provides = self.status.pn_provides | ||
| 500 | bb.msg.debug(1, bb.msg.domain.Parsing, "collating packages for \"world\"") | ||
| 501 | for f in self.status.possible_world: | ||
| 502 | terminal = True | ||
| 503 | pn = self.status.pkg_fn[f] | ||
| 504 | |||
| 505 | for p in pn_provides[pn]: | ||
| 506 | if p.startswith('virtual/'): | ||
| 507 | bb.msg.debug(2, bb.msg.domain.Parsing, "World build skipping %s due to %s provider starting with virtual/" % (f, p)) | ||
| 508 | terminal = False | ||
| 509 | break | ||
| 510 | for pf in self.status.providers[p]: | ||
| 511 | if self.status.pkg_fn[pf] != pn: | ||
| 512 | bb.msg.debug(2, bb.msg.domain.Parsing, "World build skipping %s due to both us and %s providing %s" % (f, pf, p)) | ||
| 513 | terminal = False | ||
| 514 | break | ||
| 515 | if terminal: | ||
| 516 | self.status.world_target.add(pn) | ||
| 517 | |||
| 518 | # drop reference count now | ||
| 519 | self.status.possible_world = None | ||
| 520 | self.status.all_depends = None | ||
| 521 | |||
| 522 | def interactiveMode( self ): | ||
| 523 | """Drop off into a shell""" | ||
| 524 | try: | ||
| 525 | from bb import shell | ||
| 526 | except ImportError, details: | ||
| 527 | bb.msg.fatal(bb.msg.domain.Parsing, "Sorry, shell not available (%s)" % details ) | ||
| 528 | else: | ||
| 529 | shell.start( self ) | ||
| 530 | |||
| 531 | def parseConfigurationFile( self, afile ): | ||
| 532 | try: | ||
| 533 | self.configuration.data = bb.parse.handle( afile, self.configuration.data ) | ||
| 534 | |||
| 535 | # Handle any INHERITs and inherit the base class | ||
| 536 | inherits = ["base"] + (bb.data.getVar('INHERIT', self.configuration.data, True ) or "").split() | ||
| 537 | for inherit in inherits: | ||
| 538 | self.configuration.data = bb.parse.handle(os.path.join('classes', '%s.bbclass' % inherit), self.configuration.data, True ) | ||
| 539 | |||
| 540 | # Nomally we only register event handlers at the end of parsing .bb files | ||
| 541 | # We register any handlers we've found so far here... | ||
| 542 | for var in data.getVar('__BBHANDLERS', self.configuration.data) or []: | ||
| 543 | bb.event.register(var,bb.data.getVar(var, self.configuration.data)) | ||
| 544 | |||
| 545 | bb.fetch.fetcher_init(self.configuration.data) | ||
| 546 | |||
| 547 | bb.event.fire(bb.event.ConfigParsed(), self.configuration.data) | ||
| 548 | |||
| 549 | except IOError, e: | ||
| 550 | bb.msg.fatal(bb.msg.domain.Parsing, "Error when parsing %s: %s" % (afile, str(e))) | ||
| 551 | except bb.parse.ParseError, details: | ||
| 552 | bb.msg.fatal(bb.msg.domain.Parsing, "Unable to parse %s (%s)" % (afile, details) ) | ||
| 553 | |||
| 554 | def handleCollections( self, collections ): | ||
| 555 | """Handle collections""" | ||
| 556 | if collections: | ||
| 557 | collection_list = collections.split() | ||
| 558 | for c in collection_list: | ||
| 559 | regex = bb.data.getVar("BBFILE_PATTERN_%s" % c, self.configuration.data, 1) | ||
| 560 | if regex == None: | ||
| 561 | bb.msg.error(bb.msg.domain.Parsing, "BBFILE_PATTERN_%s not defined" % c) | ||
| 562 | continue | ||
| 563 | priority = bb.data.getVar("BBFILE_PRIORITY_%s" % c, self.configuration.data, 1) | ||
| 564 | if priority == None: | ||
| 565 | bb.msg.error(bb.msg.domain.Parsing, "BBFILE_PRIORITY_%s not defined" % c) | ||
| 566 | continue | ||
| 567 | try: | ||
| 568 | cre = re.compile(regex) | ||
| 569 | except re.error: | ||
| 570 | bb.msg.error(bb.msg.domain.Parsing, "BBFILE_PATTERN_%s \"%s\" is not a valid regular expression" % (c, regex)) | ||
| 571 | continue | ||
| 572 | try: | ||
| 573 | pri = int(priority) | ||
| 574 | self.status.bbfile_config_priorities.append((cre, pri)) | ||
| 575 | except ValueError: | ||
| 576 | bb.msg.error(bb.msg.domain.Parsing, "invalid value for BBFILE_PRIORITY_%s: \"%s\"" % (c, priority)) | ||
| 577 | |||
| 578 | def buildSetVars(self): | ||
| 579 | """ | ||
| 580 | Setup any variables needed before starting a build | ||
| 581 | """ | ||
| 582 | if not bb.data.getVar("BUILDNAME", self.configuration.data): | ||
| 583 | bb.data.setVar("BUILDNAME", os.popen('date +%Y%m%d%H%M').readline().strip(), self.configuration.data) | ||
| 584 | bb.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S',time.gmtime()), self.configuration.data) | ||
| 585 | |||
| 586 | def matchFiles(self, buildfile): | ||
| 587 | """ | ||
| 588 | Find the .bb files which match the expression in 'buildfile'. | ||
| 589 | """ | ||
| 590 | |||
| 591 | bf = os.path.abspath(buildfile) | ||
| 592 | try: | ||
| 593 | os.stat(bf) | ||
| 594 | return [bf] | ||
| 595 | except OSError: | ||
| 596 | (filelist, masked) = self.collect_bbfiles() | ||
| 597 | regexp = re.compile(buildfile) | ||
| 598 | matches = [] | ||
| 599 | for f in filelist: | ||
| 600 | if regexp.search(f) and os.path.isfile(f): | ||
| 601 | bf = f | ||
| 602 | matches.append(f) | ||
| 603 | return matches | ||
| 604 | |||
| 605 | def matchFile(self, buildfile): | ||
| 606 | """ | ||
| 607 | Find the .bb file which matches the expression in 'buildfile'. | ||
| 608 | Raise an error if multiple files | ||
| 609 | """ | ||
| 610 | matches = self.matchFiles(buildfile) | ||
| 611 | if len(matches) != 1: | ||
| 612 | bb.msg.error(bb.msg.domain.Parsing, "Unable to match %s (%s matches found):" % (buildfile, len(matches))) | ||
| 613 | for f in matches: | ||
| 614 | bb.msg.error(bb.msg.domain.Parsing, " %s" % f) | ||
| 615 | raise MultipleMatches | ||
| 616 | return matches[0] | ||
| 617 | |||
| 618 | def buildFile(self, buildfile, task): | ||
| 619 | """ | ||
| 620 | Build the file matching regexp buildfile | ||
| 621 | """ | ||
| 622 | |||
| 623 | # Parse the configuration here. We need to do it explicitly here since | ||
| 624 | # buildFile() doesn't use the cache | ||
| 625 | self.parseConfiguration() | ||
| 626 | |||
| 627 | # If we are told to do the None task then query the default task | ||
| 628 | if (task == None): | ||
| 629 | task = self.configuration.cmd | ||
| 630 | |||
| 631 | fn = self.matchFile(buildfile) | ||
| 632 | self.buildSetVars() | ||
| 633 | |||
| 634 | # Load data into the cache for fn and parse the loaded cache data | ||
| 635 | self.bb_cache = bb.cache.init(self) | ||
| 636 | self.status = bb.cache.CacheData() | ||
| 637 | self.bb_cache.loadData(fn, self.configuration.data, self.status) | ||
| 638 | |||
| 639 | # Tweak some variables | ||
| 640 | item = self.bb_cache.getVar('PN', fn, True) | ||
| 641 | self.status.ignored_dependencies = set() | ||
| 642 | self.status.bbfile_priority[fn] = 1 | ||
| 643 | |||
| 644 | # Remove external dependencies | ||
| 645 | self.status.task_deps[fn]['depends'] = {} | ||
| 646 | self.status.deps[fn] = [] | ||
| 647 | self.status.rundeps[fn] = [] | ||
| 648 | self.status.runrecs[fn] = [] | ||
| 649 | |||
| 650 | # Remove stamp for target if force mode active | ||
| 651 | if self.configuration.force: | ||
| 652 | bb.msg.note(2, bb.msg.domain.RunQueue, "Remove stamp %s, %s" % (task, fn)) | ||
| 653 | bb.build.del_stamp('do_%s' % task, self.status, fn) | ||
| 654 | |||
| 655 | # Setup taskdata structure | ||
| 656 | taskdata = bb.taskdata.TaskData(self.configuration.abort) | ||
| 657 | taskdata.add_provider(self.configuration.data, self.status, item) | ||
| 658 | |||
| 659 | buildname = bb.data.getVar("BUILDNAME", self.configuration.data) | ||
| 660 | bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.configuration.event_data) | ||
| 661 | |||
| 662 | # Execute the runqueue | ||
| 663 | runlist = [[item, "do_%s" % task]] | ||
| 664 | |||
| 665 | rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist) | ||
| 666 | |||
| 667 | def buildFileIdle(server, rq, abort): | ||
| 668 | |||
| 669 | if abort or self.cookerAction == cookerStop: | ||
| 670 | rq.finish_runqueue(True) | ||
| 671 | elif self.cookerAction == cookerShutdown: | ||
| 672 | rq.finish_runqueue(False) | ||
| 673 | failures = 0 | ||
| 674 | try: | ||
| 675 | retval = rq.execute_runqueue() | ||
| 676 | except runqueue.TaskFailure, fnids: | ||
| 677 | for fnid in fnids: | ||
| 678 | bb.msg.error(bb.msg.domain.Build, "'%s' failed" % taskdata.fn_index[fnid]) | ||
| 679 | failures = failures + 1 | ||
| 680 | retval = False | ||
| 681 | if not retval: | ||
| 682 | self.command.finishAsyncCommand() | ||
| 683 | bb.event.fire(bb.event.BuildCompleted(buildname, item, failures), self.configuration.event_data) | ||
| 684 | return False | ||
| 685 | return 0.5 | ||
| 686 | |||
| 687 | self.server.register_idle_function(buildFileIdle, rq) | ||
| 688 | |||
| 689 | def buildTargets(self, targets, task): | ||
| 690 | """ | ||
| 691 | Attempt to build the targets specified | ||
| 692 | """ | ||
| 693 | |||
| 694 | # Need files parsed | ||
| 695 | self.updateCache() | ||
| 696 | |||
| 697 | # If we are told to do the NULL task then query the default task | ||
| 698 | if (task == None): | ||
| 699 | task = self.configuration.cmd | ||
| 700 | |||
| 701 | targets = self.checkPackages(targets) | ||
| 702 | |||
| 703 | def buildTargetsIdle(server, rq, abort): | ||
| 704 | |||
| 705 | if abort or self.cookerAction == cookerStop: | ||
| 706 | rq.finish_runqueue(True) | ||
| 707 | elif self.cookerAction == cookerShutdown: | ||
| 708 | rq.finish_runqueue(False) | ||
| 709 | failures = 0 | ||
| 710 | try: | ||
| 711 | retval = rq.execute_runqueue() | ||
| 712 | except runqueue.TaskFailure, fnids: | ||
| 713 | for fnid in fnids: | ||
| 714 | bb.msg.error(bb.msg.domain.Build, "'%s' failed" % taskdata.fn_index[fnid]) | ||
| 715 | failures = failures + 1 | ||
| 716 | retval = False | ||
| 717 | if not retval: | ||
| 718 | self.command.finishAsyncCommand() | ||
| 719 | bb.event.fire(bb.event.BuildCompleted(buildname, targets, failures), self.configuration.event_data) | ||
| 720 | return None | ||
| 721 | return 0.5 | ||
| 722 | |||
| 723 | self.buildSetVars() | ||
| 724 | |||
| 725 | buildname = bb.data.getVar("BUILDNAME", self.configuration.data) | ||
| 726 | bb.event.fire(bb.event.BuildStarted(buildname, targets), self.configuration.event_data) | ||
| 727 | |||
| 728 | localdata = data.createCopy(self.configuration.data) | ||
| 729 | bb.data.update_data(localdata) | ||
| 730 | bb.data.expandKeys(localdata) | ||
| 731 | |||
| 732 | taskdata = bb.taskdata.TaskData(self.configuration.abort) | ||
| 733 | |||
| 734 | runlist = [] | ||
| 735 | for k in targets: | ||
| 736 | taskdata.add_provider(localdata, self.status, k) | ||
| 737 | runlist.append([k, "do_%s" % task]) | ||
| 738 | taskdata.add_unresolved(localdata, self.status) | ||
| 739 | |||
| 740 | rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist) | ||
| 741 | |||
| 742 | self.server.register_idle_function(buildTargetsIdle, rq) | ||
| 743 | |||
| 744 | def updateCache(self): | ||
| 745 | |||
| 746 | if self.cookerState == cookerParsed: | ||
| 747 | return | ||
| 748 | |||
| 749 | if self.cookerState != cookerParsing: | ||
| 750 | |||
| 751 | self.parseConfiguration () | ||
| 752 | |||
| 753 | # Import Psyco if available and not disabled | ||
| 754 | import platform | ||
| 755 | if platform.machine() in ['i386', 'i486', 'i586', 'i686']: | ||
| 756 | if not self.configuration.disable_psyco: | ||
| 757 | try: | ||
| 758 | import psyco | ||
| 759 | except ImportError: | ||
| 760 | bb.msg.note(1, bb.msg.domain.Collection, "Psyco JIT Compiler (http://psyco.sf.net) not available. Install it to increase performance.") | ||
| 761 | else: | ||
| 762 | psyco.bind( CookerParser.parse_next ) | ||
| 763 | else: | ||
| 764 | bb.msg.note(1, bb.msg.domain.Collection, "You have disabled Psyco. This decreases performance.") | ||
| 765 | |||
| 766 | self.status = bb.cache.CacheData() | ||
| 767 | |||
| 768 | ignore = bb.data.getVar("ASSUME_PROVIDED", self.configuration.data, 1) or "" | ||
| 769 | self.status.ignored_dependencies = set(ignore.split()) | ||
| 770 | |||
| 771 | for dep in self.configuration.extra_assume_provided: | ||
| 772 | self.status.ignored_dependencies.add(dep) | ||
| 773 | |||
| 774 | self.handleCollections( bb.data.getVar("BBFILE_COLLECTIONS", self.configuration.data, 1) ) | ||
| 775 | |||
| 776 | bb.msg.debug(1, bb.msg.domain.Collection, "collecting .bb files") | ||
| 777 | (filelist, masked) = self.collect_bbfiles() | ||
| 778 | bb.data.renameVar("__depends", "__base_depends", self.configuration.data) | ||
| 779 | |||
| 780 | self.parser = CookerParser(self, filelist, masked) | ||
| 781 | self.cookerState = cookerParsing | ||
| 782 | |||
| 783 | if not self.parser.parse_next(): | ||
| 784 | bb.msg.debug(1, bb.msg.domain.Collection, "parsing complete") | ||
| 785 | self.buildDepgraph() | ||
| 786 | self.cookerState = cookerParsed | ||
| 787 | return None | ||
| 788 | |||
| 789 | return True | ||
| 790 | |||
| 791 | def checkPackages(self, pkgs_to_build): | ||
| 792 | |||
| 793 | if len(pkgs_to_build) == 0: | ||
| 794 | raise NothingToBuild | ||
| 795 | |||
| 796 | if 'world' in pkgs_to_build: | ||
| 797 | self.buildWorldTargetList() | ||
| 798 | pkgs_to_build.remove('world') | ||
| 799 | for t in self.status.world_target: | ||
| 800 | pkgs_to_build.append(t) | ||
| 801 | |||
| 802 | return pkgs_to_build | ||
| 803 | |||
| 804 | def get_bbfiles( self, path = os.getcwd() ): | ||
| 805 | """Get list of default .bb files by reading out the current directory""" | ||
| 806 | contents = os.listdir(path) | ||
| 807 | bbfiles = [] | ||
| 808 | for f in contents: | ||
| 809 | (root, ext) = os.path.splitext(f) | ||
| 810 | if ext == ".bb": | ||
| 811 | bbfiles.append(os.path.abspath(os.path.join(os.getcwd(),f))) | ||
| 812 | return bbfiles | ||
| 813 | |||
| 814 | def find_bbfiles( self, path ): | ||
| 815 | """Find all the .bb files in a directory""" | ||
| 816 | from os.path import join | ||
| 817 | |||
| 818 | found = [] | ||
| 819 | for dir, dirs, files in os.walk(path): | ||
| 820 | for ignored in ('SCCS', 'CVS', '.svn'): | ||
| 821 | if ignored in dirs: | ||
| 822 | dirs.remove(ignored) | ||
| 823 | found += [join(dir,f) for f in files if f.endswith('.bb')] | ||
| 824 | |||
| 825 | return found | ||
| 826 | |||
| 827 | def collect_bbfiles( self ): | ||
| 828 | """Collect all available .bb build files""" | ||
| 829 | parsed, cached, skipped, masked = 0, 0, 0, 0 | ||
| 830 | self.bb_cache = bb.cache.init(self) | ||
| 831 | |||
| 832 | files = (data.getVar( "BBFILES", self.configuration.data, 1 ) or "").split() | ||
| 833 | data.setVar("BBFILES", " ".join(files), self.configuration.data) | ||
| 834 | |||
| 835 | if not len(files): | ||
| 836 | files = self.get_bbfiles() | ||
| 837 | |||
| 838 | if not len(files): | ||
| 839 | bb.msg.error(bb.msg.domain.Collection, "no files to build.") | ||
| 840 | |||
| 841 | newfiles = [] | ||
| 842 | for f in files: | ||
| 843 | if os.path.isdir(f): | ||
| 844 | dirfiles = self.find_bbfiles(f) | ||
| 845 | if dirfiles: | ||
| 846 | newfiles += dirfiles | ||
| 847 | continue | ||
| 848 | else: | ||
| 849 | globbed = glob.glob(f) | ||
| 850 | if not globbed and os.path.exists(f): | ||
| 851 | globbed = [f] | ||
| 852 | newfiles += globbed | ||
| 853 | |||
| 854 | bbmask = bb.data.getVar('BBMASK', self.configuration.data, 1) | ||
| 855 | |||
| 856 | if not bbmask: | ||
| 857 | return (newfiles, 0) | ||
| 858 | |||
| 859 | try: | ||
| 860 | bbmask_compiled = re.compile(bbmask) | ||
| 861 | except sre_constants.error: | ||
| 862 | bb.msg.fatal(bb.msg.domain.Collection, "BBMASK is not a valid regular expression.") | ||
| 863 | |||
| 864 | finalfiles = [] | ||
| 865 | for f in newfiles: | ||
| 866 | if bbmask_compiled.search(f): | ||
| 867 | bb.msg.debug(1, bb.msg.domain.Collection, "skipping masked file %s" % f) | ||
| 868 | masked += 1 | ||
| 869 | continue | ||
| 870 | finalfiles.append(f) | ||
| 871 | |||
| 872 | return (finalfiles, masked) | ||
| 873 | |||
| 874 | def serve(self): | ||
| 875 | |||
| 876 | # Empty the environment. The environment will be populated as | ||
| 877 | # necessary from the data store. | ||
| 878 | bb.utils.empty_environment() | ||
| 879 | |||
| 880 | if self.configuration.profile: | ||
| 881 | try: | ||
| 882 | import cProfile as profile | ||
| 883 | except: | ||
| 884 | import profile | ||
| 885 | |||
| 886 | profile.runctx("self.server.serve_forever()", globals(), locals(), "profile.log") | ||
| 887 | |||
| 888 | # Redirect stdout to capture profile information | ||
| 889 | pout = open('profile.log.processed', 'w') | ||
| 890 | so = sys.stdout.fileno() | ||
| 891 | os.dup2(pout.fileno(), so) | ||
| 892 | |||
| 893 | import pstats | ||
| 894 | p = pstats.Stats('profile.log') | ||
| 895 | p.sort_stats('time') | ||
| 896 | p.print_stats() | ||
| 897 | p.print_callers() | ||
| 898 | p.sort_stats('cumulative') | ||
| 899 | p.print_stats() | ||
| 900 | |||
| 901 | os.dup2(so, pout.fileno()) | ||
| 902 | pout.flush() | ||
| 903 | pout.close() | ||
| 904 | else: | ||
| 905 | self.server.serve_forever() | ||
| 906 | |||
| 907 | bb.event.fire(CookerExit(), self.configuration.event_data) | ||
| 908 | |||
| 909 | class CookerExit(bb.event.Event): | ||
| 910 | """ | ||
| 911 | Notify clients of the Cooker shutdown | ||
| 912 | """ | ||
| 913 | |||
| 914 | def __init__(self): | ||
| 915 | bb.event.Event.__init__(self) | ||
| 916 | |||
| 917 | class CookerParser: | ||
| 918 | def __init__(self, cooker, filelist, masked): | ||
| 919 | # Internal data | ||
| 920 | self.filelist = filelist | ||
| 921 | self.cooker = cooker | ||
| 922 | |||
| 923 | # Accounting statistics | ||
| 924 | self.parsed = 0 | ||
| 925 | self.cached = 0 | ||
| 926 | self.error = 0 | ||
| 927 | self.masked = masked | ||
| 928 | self.total = len(filelist) | ||
| 929 | |||
| 930 | self.skipped = 0 | ||
| 931 | self.virtuals = 0 | ||
| 932 | |||
| 933 | # Pointer to the next file to parse | ||
| 934 | self.pointer = 0 | ||
| 935 | |||
| 936 | def parse_next(self): | ||
| 937 | if self.pointer < len(self.filelist): | ||
| 938 | f = self.filelist[self.pointer] | ||
| 939 | cooker = self.cooker | ||
| 940 | |||
| 941 | try: | ||
| 942 | fromCache, skipped, virtuals = cooker.bb_cache.loadData(f, cooker.configuration.data, cooker.status) | ||
| 943 | if fromCache: | ||
| 944 | self.cached += 1 | ||
| 945 | else: | ||
| 946 | self.parsed += 1 | ||
| 947 | |||
| 948 | self.skipped += skipped | ||
| 949 | self.virtuals += virtuals | ||
| 950 | |||
| 951 | except IOError, e: | ||
| 952 | self.error += 1 | ||
| 953 | cooker.bb_cache.remove(f) | ||
| 954 | bb.msg.error(bb.msg.domain.Collection, "opening %s: %s" % (f, e)) | ||
| 955 | pass | ||
| 956 | except KeyboardInterrupt: | ||
| 957 | cooker.bb_cache.remove(f) | ||
| 958 | cooker.bb_cache.sync() | ||
| 959 | raise | ||
| 960 | except Exception, e: | ||
| 961 | self.error += 1 | ||
| 962 | cooker.bb_cache.remove(f) | ||
| 963 | bb.msg.error(bb.msg.domain.Collection, "%s while parsing %s" % (e, f)) | ||
| 964 | except: | ||
| 965 | cooker.bb_cache.remove(f) | ||
| 966 | raise | ||
| 967 | finally: | ||
| 968 | bb.event.fire(bb.event.ParseProgress(self.cached, self.parsed, self.skipped, self.masked, self.virtuals, self.error, self.total), cooker.configuration.event_data) | ||
| 969 | |||
| 970 | self.pointer += 1 | ||
| 971 | |||
| 972 | if self.pointer >= self.total: | ||
| 973 | cooker.bb_cache.sync() | ||
| 974 | if self.error > 0: | ||
| 975 | raise ParsingErrorsFound | ||
| 976 | return False | ||
| 977 | return True | ||
| 978 | |||
