summaryrefslogtreecommitdiffstats
path: root/bitbake/bin
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/bin')
-rwxr-xr-xbitbake/bin/bitbake45
l---------bitbake/bin/bitbake-config-build1
-rwxr-xr-xbitbake/bin/bitbake-diffsigs208
l---------bitbake/bin/bitbake-dumpsig1
-rwxr-xr-xbitbake/bin/bitbake-getvar71
-rwxr-xr-xbitbake/bin/bitbake-hashclient462
-rwxr-xr-xbitbake/bin/bitbake-hashserv179
-rwxr-xr-xbitbake/bin/bitbake-layers106
-rwxr-xr-xbitbake/bin/bitbake-prserv113
-rwxr-xr-xbitbake/bin/bitbake-selftest80
-rwxr-xr-xbitbake/bin/bitbake-server56
-rwxr-xr-xbitbake/bin/bitbake-setup860
-rwxr-xr-xbitbake/bin/bitbake-worker590
-rwxr-xr-xbitbake/bin/git-make-shallow175
-rwxr-xr-xbitbake/bin/toaster332
-rwxr-xr-xbitbake/bin/toaster-eventreplay59
16 files changed, 0 insertions, 3338 deletions
diff --git a/bitbake/bin/bitbake b/bitbake/bin/bitbake
deleted file mode 100755
index 3acf53229b..0000000000
--- a/bitbake/bin/bitbake
+++ /dev/null
@@ -1,45 +0,0 @@
1#!/usr/bin/env python3
2#
3# Copyright (C) 2003, 2004 Chris Larson
4# Copyright (C) 2003, 2004 Phil Blundell
5# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
6# Copyright (C) 2005 Holger Hans Peter Freyther
7# Copyright (C) 2005 ROAD GmbH
8# Copyright (C) 2006 Richard Purdie
9#
10# SPDX-License-Identifier: GPL-2.0-only
11#
12
13import os
14import sys
15import warnings
16warnings.simplefilter("default")
17
18sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)),
19 'lib'))
20try:
21 import bb
22except RuntimeError as exc:
23 sys.exit(str(exc))
24
25from bb import cookerdata
26from bb.main import bitbake_main, BitBakeConfigParameters, BBMainException
27
28bb.utils.check_system_locale()
29
30__version__ = "2.15.2"
31
32if __name__ == "__main__":
33 if __version__ != bb.__version__:
34 sys.exit("Bitbake core version and program version mismatch!")
35 try:
36 sys.exit(bitbake_main(BitBakeConfigParameters(sys.argv),
37 cookerdata.CookerConfiguration()))
38 except BBMainException as err:
39 sys.exit(err)
40 except bb.BBHandledException:
41 sys.exit(1)
42 except Exception:
43 import traceback
44 traceback.print_exc()
45 sys.exit(1)
diff --git a/bitbake/bin/bitbake-config-build b/bitbake/bin/bitbake-config-build
deleted file mode 120000
index 11e6df80c4..0000000000
--- a/bitbake/bin/bitbake-config-build
+++ /dev/null
@@ -1 +0,0 @@
1bitbake-layers \ No newline at end of file
diff --git a/bitbake/bin/bitbake-diffsigs b/bitbake/bin/bitbake-diffsigs
deleted file mode 100755
index 9d6cb8c944..0000000000
--- a/bitbake/bin/bitbake-diffsigs
+++ /dev/null
@@ -1,208 +0,0 @@
1#!/usr/bin/env python3
2
3# bitbake-diffsigs / bitbake-dumpsig
4# BitBake task signature data dump and comparison utility
5#
6# Copyright (C) 2012-2013, 2017 Intel Corporation
7#
8# SPDX-License-Identifier: GPL-2.0-only
9#
10
11import os
12import sys
13import warnings
14
15warnings.simplefilter("default")
16import argparse
17import logging
18import pickle
19
20sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
21
22import bb.tinfoil
23import bb.siggen
24import bb.msg
25
26myname = os.path.basename(sys.argv[0])
27logger = bb.msg.logger_create(myname)
28
29is_dump = myname == 'bitbake-dumpsig'
30
31
32def find_siginfo(tinfoil, pn, taskname, sigs=None):
33 result = None
34 tinfoil.set_event_mask(['bb.event.FindSigInfoResult',
35 'logging.LogRecord',
36 'bb.command.CommandCompleted',
37 'bb.command.CommandFailed'])
38 ret = tinfoil.run_command('findSigInfo', pn, taskname, sigs)
39 if ret:
40 while True:
41 event = tinfoil.wait_event(1)
42 if event:
43 if isinstance(event, bb.command.CommandCompleted):
44 break
45 elif isinstance(event, bb.command.CommandFailed):
46 logger.error(str(event))
47 sys.exit(2)
48 elif isinstance(event, bb.event.FindSigInfoResult):
49 result = event.result
50 elif isinstance(event, logging.LogRecord):
51 logger.handle(event)
52 else:
53 logger.error('No result returned from findSigInfo command')
54 sys.exit(2)
55 return result
56
57
58def find_siginfo_task(bbhandler, pn, taskname, sig1=None, sig2=None):
59 """ Find the most recent signature files for the specified PN/task """
60
61 if not taskname.startswith('do_'):
62 taskname = 'do_%s' % taskname
63
64 if sig1 and sig2:
65 sigfiles = find_siginfo(bbhandler, pn, taskname, [sig1, sig2])
66 if not sigfiles:
67 logger.error('No sigdata files found matching %s %s matching either %s or %s' % (pn, taskname, sig1, sig2))
68 sys.exit(1)
69 elif sig1 not in sigfiles:
70 logger.error('No sigdata files found matching %s %s with signature %s' % (pn, taskname, sig1))
71 sys.exit(1)
72 elif sig2 not in sigfiles:
73 logger.error('No sigdata files found matching %s %s with signature %s' % (pn, taskname, sig2))
74 sys.exit(1)
75
76 latestfiles = [sigfiles[sig1]['path'], sigfiles[sig2]['path']]
77 else:
78 sigfiles = find_siginfo(bbhandler, pn, taskname)
79 latestsigs = sorted(sigfiles.keys(), key=lambda h: sigfiles[h]['time'])[-2:]
80 if not latestsigs:
81 logger.error('No sigdata files found matching %s %s' % (pn, taskname))
82 sys.exit(1)
83 latestfiles = [sigfiles[latestsigs[0]]['path']]
84 if len(latestsigs) > 1:
85 latestfiles.append(sigfiles[latestsigs[1]]['path'])
86
87 return latestfiles
88
89
90# Define recursion callback
91def recursecb(key, hash1, hash2):
92 hashes = [hash1, hash2]
93 hashfiles = find_siginfo(tinfoil, key, None, hashes)
94
95 recout = []
96 if not hashfiles:
97 recout.append("Unable to find matching sigdata for %s with hashes %s or %s" % (key, hash1, hash2))
98 elif hash1 not in hashfiles:
99 recout.append("Unable to find matching sigdata for %s with hash %s" % (key, hash1))
100 elif hash2 not in hashfiles:
101 recout.append("Unable to find matching sigdata for %s with hash %s" % (key, hash2))
102 else:
103 out2 = bb.siggen.compare_sigfiles(hashfiles[hash1]['path'], hashfiles[hash2]['path'], recursecb, color=color)
104 for change in out2:
105 for line in change.splitlines():
106 recout.append(' ' + line)
107
108 return recout
109
110
111parser = argparse.ArgumentParser(
112 description=("Dumps" if is_dump else "Compares") + " siginfo/sigdata files written out by BitBake")
113
114parser.add_argument('-D', '--debug',
115 help='Enable debug output',
116 action='store_true')
117
118if is_dump:
119 parser.add_argument("-t", "--task",
120 help="find the signature data file for the last run of the specified task",
121 action="store", dest="taskargs", nargs=2, metavar=('recipename', 'taskname'))
122
123 parser.add_argument("sigdatafile1",
124 help="Signature file to dump. Not used when using -t/--task.",
125 action="store", nargs='?', metavar="sigdatafile")
126else:
127 parser.add_argument('-c', '--color',
128 help='Colorize the output (where %(metavar)s is %(choices)s)',
129 choices=['auto', 'always', 'never'], default='auto', metavar='color')
130
131 parser.add_argument('-d', '--dump',
132 help='Dump the last signature data instead of comparing (equivalent to using bitbake-dumpsig)',
133 action='store_true')
134
135 parser.add_argument("-t", "--task",
136 help="find the signature data files for the last two runs of the specified task and compare them",
137 action="store", dest="taskargs", nargs=2, metavar=('recipename', 'taskname'))
138
139 parser.add_argument("-s", "--signature",
140 help="With -t/--task, specify the signatures to look for instead of taking the last two",
141 action="store", dest="sigargs", nargs=2, metavar=('fromsig', 'tosig'))
142
143 parser.add_argument("sigdatafile1",
144 help="First signature file to compare (or signature file to dump, if second not specified). Not used when using -t/--task.",
145 action="store", nargs='?')
146
147 parser.add_argument("sigdatafile2",
148 help="Second signature file to compare",
149 action="store", nargs='?')
150
151options = parser.parse_args()
152if is_dump:
153 options.color = 'never'
154 options.dump = True
155 options.sigdatafile2 = None
156 options.sigargs = None
157
158if options.debug:
159 logger.setLevel(logging.DEBUG)
160
161color = (options.color == 'always' or (options.color == 'auto' and sys.stdout.isatty()))
162
163if options.taskargs:
164 with bb.tinfoil.Tinfoil() as tinfoil:
165 tinfoil.prepare(config_only=True)
166 if not options.dump and options.sigargs:
167 files = find_siginfo_task(tinfoil, options.taskargs[0], options.taskargs[1], options.sigargs[0],
168 options.sigargs[1])
169 else:
170 files = find_siginfo_task(tinfoil, options.taskargs[0], options.taskargs[1])
171
172 if options.dump:
173 logger.debug("Signature file: %s" % files[-1])
174 output = bb.siggen.dump_sigfile(files[-1])
175 else:
176 if len(files) < 2:
177 logger.error('Only one matching sigdata file found for the specified task (%s %s)' % (
178 options.taskargs[0], options.taskargs[1]))
179 sys.exit(1)
180
181 # Recurse into signature comparison
182 logger.debug("Signature file (previous): %s" % files[-2])
183 logger.debug("Signature file (latest): %s" % files[-1])
184 output = bb.siggen.compare_sigfiles(files[-2], files[-1], recursecb, color=color)
185else:
186 if options.sigargs:
187 logger.error('-s/--signature can only be used together with -t/--task')
188 sys.exit(1)
189 try:
190 if not options.dump and options.sigdatafile1 and options.sigdatafile2:
191 with bb.tinfoil.Tinfoil() as tinfoil:
192 tinfoil.prepare(config_only=True)
193 output = bb.siggen.compare_sigfiles(options.sigdatafile1, options.sigdatafile2, recursecb, color=color)
194 elif options.sigdatafile1:
195 output = bb.siggen.dump_sigfile(options.sigdatafile1)
196 else:
197 logger.error('Must specify signature file(s) or -t/--task')
198 parser.print_help()
199 sys.exit(1)
200 except IOError as e:
201 logger.error(str(e))
202 sys.exit(1)
203 except (pickle.UnpicklingError, EOFError):
204 logger.error('Invalid signature data - ensure you are specifying sigdata/siginfo files')
205 sys.exit(1)
206
207if output:
208 print('\n'.join(output))
diff --git a/bitbake/bin/bitbake-dumpsig b/bitbake/bin/bitbake-dumpsig
deleted file mode 120000
index b1e8489b45..0000000000
--- a/bitbake/bin/bitbake-dumpsig
+++ /dev/null
@@ -1 +0,0 @@
1bitbake-diffsigs \ No newline at end of file
diff --git a/bitbake/bin/bitbake-getvar b/bitbake/bin/bitbake-getvar
deleted file mode 100755
index 378fb13572..0000000000
--- a/bitbake/bin/bitbake-getvar
+++ /dev/null
@@ -1,71 +0,0 @@
1#! /usr/bin/env python3
2#
3# Copyright (C) 2021 Richard Purdie
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import argparse
9import io
10import os
11import sys
12import warnings
13import logging
14warnings.simplefilter("default")
15
16bindir = os.path.dirname(__file__)
17topdir = os.path.dirname(bindir)
18sys.path[0:0] = [os.path.join(topdir, 'lib')]
19
20import bb.providers
21import bb.tinfoil
22
23if __name__ == "__main__":
24 parser = argparse.ArgumentParser(description="Bitbake Query Variable")
25 parser.add_argument("variable", help="variable name to query")
26 parser.add_argument("-r", "--recipe", help="Recipe name to query", default=None, required=False)
27 parser.add_argument('-u', '--unexpand', help='Do not expand the value (with --value)', action="store_true")
28 parser.add_argument('-f', '--flag', help='Specify a variable flag to query (with --value)', default=None)
29 parser.add_argument('--value', help='Only report the value, no history and no variable name', action="store_true")
30 parser.add_argument('-q', '--quiet', help='Silence bitbake server logging', action="store_true")
31 parser.add_argument('--ignore-undefined', help='Suppress any errors related to undefined variables', action="store_true")
32 args = parser.parse_args()
33
34 if not args.value:
35 if args.unexpand:
36 sys.exit("--unexpand only makes sense with --value")
37
38 if args.flag:
39 sys.exit("--flag only makes sense with --value")
40
41 quiet = args.quiet or args.value
42 if quiet:
43 logger = logging.getLogger("BitBake")
44 logger.setLevel(logging.WARNING)
45
46 with bb.tinfoil.Tinfoil(tracking=True, setup_logging=not quiet) as tinfoil:
47 if args.recipe:
48 tinfoil.prepare(quiet=3 if quiet else 2)
49 try:
50 d = tinfoil.parse_recipe(args.recipe)
51 except bb.providers.NoProvider as e:
52 sys.exit(str(e))
53 else:
54 tinfoil.prepare(quiet=2, config_only=True)
55 # Expand keys and run anonymous functions to get identical result to
56 # "bitbake -e"
57 d = tinfoil.finalizeData()
58
59 value = None
60 if args.flag:
61 value = d.getVarFlag(args.variable, args.flag, expand=not args.unexpand)
62 if value is None and not args.ignore_undefined:
63 sys.exit(f"The flag '{args.flag}' is not defined for variable '{args.variable}'")
64 else:
65 value = d.getVar(args.variable, expand=not args.unexpand)
66 if value is None and not args.ignore_undefined:
67 sys.exit(f"The variable '{args.variable}' is not defined")
68 if args.value:
69 print(str(value if value is not None else ""))
70 else:
71 bb.data.emit_var(args.variable, d=d, all=True)
diff --git a/bitbake/bin/bitbake-hashclient b/bitbake/bin/bitbake-hashclient
deleted file mode 100755
index b8755c5797..0000000000
--- a/bitbake/bin/bitbake-hashclient
+++ /dev/null
@@ -1,462 +0,0 @@
1#! /usr/bin/env python3
2#
3# Copyright (C) 2019 Garmin Ltd.
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import argparse
9import hashlib
10import logging
11import os
12import pprint
13import sys
14import threading
15import time
16import warnings
17import netrc
18import json
19import statistics
20import textwrap
21warnings.simplefilter("default")
22
23try:
24 import tqdm
25 ProgressBar = tqdm.tqdm
26except ImportError:
27 class ProgressBar(object):
28 def __init__(self, *args, **kwargs):
29 pass
30
31 def __enter__(self):
32 return self
33
34 def __exit__(self, *args, **kwargs):
35 pass
36
37 def update(self):
38 pass
39
40sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), 'lib'))
41
42import hashserv
43import bb.asyncrpc
44
45DEFAULT_ADDRESS = 'unix://./hashserve.sock'
46METHOD = 'stress.test.method'
47
48def print_user(u):
49 print(f"Username: {u['username']}")
50 if "permissions" in u:
51 print("Permissions: " + " ".join(u["permissions"]))
52 if "token" in u:
53 print(f"Token: {u['token']}")
54
55
56def main():
57 def handle_get(args, client):
58 result = client.get_taskhash(args.method, args.taskhash, all_properties=True)
59 if not result:
60 return 0
61
62 print(json.dumps(result, sort_keys=True, indent=4))
63 return 0
64
65 def handle_get_outhash(args, client):
66 result = client.get_outhash(args.method, args.outhash, args.taskhash)
67 if not result:
68 return 0
69
70 print(json.dumps(result, sort_keys=True, indent=4))
71 return 0
72
73 def handle_stats(args, client):
74 if args.reset:
75 s = client.reset_stats()
76 else:
77 s = client.get_stats()
78 print(json.dumps(s, sort_keys=True, indent=4))
79 return 0
80
81 def handle_stress(args, client):
82 def thread_main(pbar, lock):
83 nonlocal found_hashes
84 nonlocal missed_hashes
85 nonlocal max_time
86 nonlocal times
87
88 with hashserv.create_client(args.address) as client:
89 for i in range(args.requests):
90 taskhash = hashlib.sha256()
91 taskhash.update(args.taskhash_seed.encode('utf-8'))
92 taskhash.update(str(i).encode('utf-8'))
93
94 start_time = time.perf_counter()
95 l = client.get_unihash(METHOD, taskhash.hexdigest())
96 elapsed = time.perf_counter() - start_time
97
98 with lock:
99 if l:
100 found_hashes += 1
101 else:
102 missed_hashes += 1
103
104 times.append(elapsed)
105 pbar.update()
106
107 max_time = 0
108 found_hashes = 0
109 missed_hashes = 0
110 lock = threading.Lock()
111 times = []
112 start_time = time.perf_counter()
113 with ProgressBar(total=args.clients * args.requests) as pbar:
114 threads = [threading.Thread(target=thread_main, args=(pbar, lock), daemon=False) for _ in range(args.clients)]
115 for t in threads:
116 t.start()
117
118 for t in threads:
119 t.join()
120 total_elapsed = time.perf_counter() - start_time
121
122 with lock:
123 mean = statistics.mean(times)
124 median = statistics.median(times)
125 stddev = statistics.pstdev(times)
126
127 print(f"Number of clients: {args.clients}")
128 print(f"Requests per client: {args.requests}")
129 print(f"Number of requests: {len(times)}")
130 print(f"Total elapsed time: {total_elapsed:.3f}s")
131 print(f"Total request rate: {len(times)/total_elapsed:.3f} req/s")
132 print(f"Average request time: {mean:.3f}s")
133 print(f"Median request time: {median:.3f}s")
134 print(f"Request time std dev: {stddev:.3f}s")
135 print(f"Maximum request time: {max(times):.3f}s")
136 print(f"Minimum request time: {min(times):.3f}s")
137 print(f"Hashes found: {found_hashes}")
138 print(f"Hashes missed: {missed_hashes}")
139
140 if args.report:
141 with ProgressBar(total=args.requests) as pbar:
142 for i in range(args.requests):
143 taskhash = hashlib.sha256()
144 taskhash.update(args.taskhash_seed.encode('utf-8'))
145 taskhash.update(str(i).encode('utf-8'))
146
147 outhash = hashlib.sha256()
148 outhash.update(args.outhash_seed.encode('utf-8'))
149 outhash.update(str(i).encode('utf-8'))
150
151 client.report_unihash(taskhash.hexdigest(), METHOD, outhash.hexdigest(), taskhash.hexdigest())
152
153 with lock:
154 pbar.update()
155
156 def handle_remove(args, client):
157 where = {k: v for k, v in args.where}
158 if where:
159 result = client.remove(where)
160 print("Removed %d row(s)" % (result["count"]))
161 else:
162 print("No query specified")
163
164 def handle_clean_unused(args, client):
165 result = client.clean_unused(args.max_age)
166 print("Removed %d rows" % (result["count"]))
167 return 0
168
169 def handle_refresh_token(args, client):
170 r = client.refresh_token(args.username)
171 print_user(r)
172
173 def handle_set_user_permissions(args, client):
174 r = client.set_user_perms(args.username, args.permissions)
175 print_user(r)
176
177 def handle_get_user(args, client):
178 r = client.get_user(args.username)
179 print_user(r)
180
181 def handle_get_all_users(args, client):
182 users = client.get_all_users()
183 print("{username:20}| {permissions}".format(username="Username", permissions="Permissions"))
184 print(("-" * 20) + "+" + ("-" * 20))
185 for u in users:
186 print("{username:20}| {permissions}".format(username=u["username"], permissions=" ".join(u["permissions"])))
187
188 def handle_new_user(args, client):
189 r = client.new_user(args.username, args.permissions)
190 print_user(r)
191
192 def handle_delete_user(args, client):
193 r = client.delete_user(args.username)
194 print_user(r)
195
196 def handle_get_db_usage(args, client):
197 usage = client.get_db_usage()
198 print(usage)
199 tables = sorted(usage.keys())
200 print("{name:20}| {rows:20}".format(name="Table name", rows="Rows"))
201 print(("-" * 20) + "+" + ("-" * 20))
202 for t in tables:
203 print("{name:20}| {rows:<20}".format(name=t, rows=usage[t]["rows"]))
204 print()
205
206 total_rows = sum(t["rows"] for t in usage.values())
207 print(f"Total rows: {total_rows}")
208
209 def handle_get_db_query_columns(args, client):
210 columns = client.get_db_query_columns()
211 print("\n".join(sorted(columns)))
212
213 def handle_gc_status(args, client):
214 result = client.gc_status()
215 if not result["mark"]:
216 print("No Garbage collection in progress")
217 return 0
218
219 print("Current Mark: %s" % result["mark"])
220 print("Total hashes to keep: %d" % result["keep"])
221 print("Total hashes to remove: %s" % result["remove"])
222 return 0
223
224 def handle_gc_mark(args, client):
225 where = {k: v for k, v in args.where}
226 result = client.gc_mark(args.mark, where)
227 print("New hashes marked: %d" % result["count"])
228 return 0
229
230 def handle_gc_mark_stream(args, client):
231 stdin = (l.strip() for l in sys.stdin)
232 marked_hashes = 0
233
234 try:
235 result = client.gc_mark_stream(args.mark, stdin)
236 marked_hashes = result["count"]
237 except ConnectionError:
238 logger.warning(
239 "Server doesn't seem to support `gc-mark-stream`. Sending "
240 "hashes sequentially using `gc-mark` API."
241 )
242 for line in stdin:
243 pairs = line.split()
244 condition = dict(zip(pairs[::2], pairs[1::2]))
245 result = client.gc_mark(args.mark, condition)
246 marked_hashes += result["count"]
247
248 print("New hashes marked: %d" % marked_hashes)
249 return 0
250
251 def handle_gc_sweep(args, client):
252 result = client.gc_sweep(args.mark)
253 print("Removed %d rows" % result["count"])
254 return 0
255
256 def handle_unihash_exists(args, client):
257 result = client.unihash_exists(args.unihash)
258 if args.quiet:
259 return 0 if result else 1
260
261 print("true" if result else "false")
262 return 0
263
264 def handle_ping(args, client):
265 times = []
266 for i in range(1, args.count + 1):
267 if not args.quiet:
268 print(f"Ping {i} of {args.count}... ", end="")
269 start_time = time.perf_counter()
270 client.ping()
271 elapsed = time.perf_counter() - start_time
272 times.append(elapsed)
273 if not args.quiet:
274 print(f"{elapsed:.3f}s")
275
276 mean = statistics.mean(times)
277 median = statistics.median(times)
278 std_dev = statistics.pstdev(times)
279
280 if not args.quiet:
281 print("------------------------")
282 print(f"Number of pings: {len(times)}")
283 print(f"Average round trip time: {mean:.3f}s")
284 print(f"Median round trip time: {median:.3f}s")
285 print(f"Round trip time std dev: {std_dev:.3f}s")
286 print(f"Min time is: {min(times):.3f}s")
287 print(f"Max time is: {max(times):.3f}s")
288 return 0
289
290 parser = argparse.ArgumentParser(
291 formatter_class=argparse.RawDescriptionHelpFormatter,
292 description='Hash Equivalence Client',
293 epilog=textwrap.dedent(
294 """
295 Possible ADDRESS options are:
296 unix://PATH Connect to UNIX domain socket at PATH
297 ws://HOST[:PORT] Connect to websocket at HOST:PORT (default port is 80)
298 wss://HOST[:PORT] Connect to secure websocket at HOST:PORT (default port is 443)
299 HOST:PORT Connect to TCP server at HOST:PORT
300 """
301 ),
302 )
303 parser.add_argument('--address', default=DEFAULT_ADDRESS, help='Server address (default "%(default)s")')
304 parser.add_argument('--log', default='WARNING', help='Set logging level')
305 parser.add_argument('--login', '-l', metavar="USERNAME", help="Authenticate as USERNAME")
306 parser.add_argument('--password', '-p', metavar="TOKEN", help="Authenticate using token TOKEN")
307 parser.add_argument('--become', '-b', metavar="USERNAME", help="Impersonate user USERNAME (if allowed) when performing actions")
308 parser.add_argument('--no-netrc', '-n', action="store_false", dest="netrc", help="Do not use .netrc")
309
310 subparsers = parser.add_subparsers()
311
312 get_parser = subparsers.add_parser('get', help="Get the unihash for a taskhash")
313 get_parser.add_argument("method", help="Method to query")
314 get_parser.add_argument("taskhash", help="Task hash to query")
315 get_parser.set_defaults(func=handle_get)
316
317 get_outhash_parser = subparsers.add_parser('get-outhash', help="Get output hash information")
318 get_outhash_parser.add_argument("method", help="Method to query")
319 get_outhash_parser.add_argument("outhash", help="Output hash to query")
320 get_outhash_parser.add_argument("taskhash", help="Task hash to query")
321 get_outhash_parser.set_defaults(func=handle_get_outhash)
322
323 stats_parser = subparsers.add_parser('stats', help='Show server stats')
324 stats_parser.add_argument('--reset', action='store_true',
325 help='Reset server stats')
326 stats_parser.set_defaults(func=handle_stats)
327
328 stress_parser = subparsers.add_parser('stress', help='Run stress test')
329 stress_parser.add_argument('--clients', type=int, default=10,
330 help='Number of simultaneous clients')
331 stress_parser.add_argument('--requests', type=int, default=1000,
332 help='Number of requests each client will perform')
333 stress_parser.add_argument('--report', action='store_true',
334 help='Report new hashes')
335 stress_parser.add_argument('--taskhash-seed', default='',
336 help='Include string in taskhash')
337 stress_parser.add_argument('--outhash-seed', default='',
338 help='Include string in outhash')
339 stress_parser.set_defaults(func=handle_stress)
340
341 remove_parser = subparsers.add_parser('remove', help="Remove hash entries")
342 remove_parser.add_argument("--where", "-w", metavar="KEY VALUE", nargs=2, action="append", default=[],
343 help="Remove entries from table where KEY == VALUE")
344 remove_parser.set_defaults(func=handle_remove)
345
346 clean_unused_parser = subparsers.add_parser('clean-unused', help="Remove unused database entries")
347 clean_unused_parser.add_argument("max_age", metavar="SECONDS", type=int, help="Remove unused entries older than SECONDS old")
348 clean_unused_parser.set_defaults(func=handle_clean_unused)
349
350 refresh_token_parser = subparsers.add_parser('refresh-token', help="Refresh auth token")
351 refresh_token_parser.add_argument("--username", "-u", help="Refresh the token for another user (if authorized)")
352 refresh_token_parser.set_defaults(func=handle_refresh_token)
353
354 set_user_perms_parser = subparsers.add_parser('set-user-perms', help="Set new permissions for user")
355 set_user_perms_parser.add_argument("--username", "-u", help="Username", required=True)
356 set_user_perms_parser.add_argument("permissions", metavar="PERM", nargs="*", default=[], help="New permissions")
357 set_user_perms_parser.set_defaults(func=handle_set_user_permissions)
358
359 get_user_parser = subparsers.add_parser('get-user', help="Get user")
360 get_user_parser.add_argument("--username", "-u", help="Username")
361 get_user_parser.set_defaults(func=handle_get_user)
362
363 get_all_users_parser = subparsers.add_parser('get-all-users', help="List all users")
364 get_all_users_parser.set_defaults(func=handle_get_all_users)
365
366 new_user_parser = subparsers.add_parser('new-user', help="Create new user")
367 new_user_parser.add_argument("--username", "-u", help="Username", required=True)
368 new_user_parser.add_argument("permissions", metavar="PERM", nargs="*", default=[], help="New permissions")
369 new_user_parser.set_defaults(func=handle_new_user)
370
371 delete_user_parser = subparsers.add_parser('delete-user', help="Delete user")
372 delete_user_parser.add_argument("--username", "-u", help="Username", required=True)
373 delete_user_parser.set_defaults(func=handle_delete_user)
374
375 db_usage_parser = subparsers.add_parser('get-db-usage', help="Database Usage")
376 db_usage_parser.set_defaults(func=handle_get_db_usage)
377
378 db_query_columns_parser = subparsers.add_parser('get-db-query-columns', help="Show columns that can be used in database queries")
379 db_query_columns_parser.set_defaults(func=handle_get_db_query_columns)
380
381 gc_status_parser = subparsers.add_parser("gc-status", help="Show garbage collection status")
382 gc_status_parser.set_defaults(func=handle_gc_status)
383
384 gc_mark_parser = subparsers.add_parser('gc-mark', help="Mark hashes to be kept for garbage collection")
385 gc_mark_parser.add_argument("mark", help="Mark for this garbage collection operation")
386 gc_mark_parser.add_argument("--where", "-w", metavar="KEY VALUE", nargs=2, action="append", default=[],
387 help="Keep entries in table where KEY == VALUE")
388 gc_mark_parser.set_defaults(func=handle_gc_mark)
389
390 gc_mark_parser_stream = subparsers.add_parser(
391 'gc-mark-stream',
392 help=(
393 "Mark multiple hashes to be retained for garbage collection. Input should be provided via stdin, "
394 "with each line formatted as key-value pairs separated by spaces, for example 'column1 foo column2 bar'."
395 )
396 )
397 gc_mark_parser_stream.add_argument("mark", help="Mark for this garbage collection operation")
398 gc_mark_parser_stream.set_defaults(func=handle_gc_mark_stream)
399
400 gc_sweep_parser = subparsers.add_parser('gc-sweep', help="Perform garbage collection and delete any entries that are not marked")
401 gc_sweep_parser.add_argument("mark", help="Mark for this garbage collection operation")
402 gc_sweep_parser.set_defaults(func=handle_gc_sweep)
403
404 unihash_exists_parser = subparsers.add_parser('unihash-exists', help="Check if a unihash is known to the server")
405 unihash_exists_parser.add_argument("--quiet", action="store_true", help="Don't print status. Instead, exit with 0 if unihash exists and 1 if it does not")
406 unihash_exists_parser.add_argument("unihash", help="Unihash to check")
407 unihash_exists_parser.set_defaults(func=handle_unihash_exists)
408
409 ping_parser = subparsers.add_parser('ping', help="Ping server")
410 ping_parser.add_argument("-n", "--count", type=int, help="Number of pings. Default is %(default)s", default=10)
411 ping_parser.add_argument("-q", "--quiet", action="store_true", help="Don't print each ping; only print results")
412 ping_parser.set_defaults(func=handle_ping)
413
414 args = parser.parse_args()
415
416 logger = logging.getLogger('hashserv')
417
418 level = getattr(logging, args.log.upper(), None)
419 if not isinstance(level, int):
420 raise ValueError('Invalid log level: %s' % args.log)
421
422 logger.setLevel(level)
423 console = logging.StreamHandler()
424 console.setLevel(level)
425 logger.addHandler(console)
426
427 login = args.login
428 password = args.password
429
430 if login is None and args.netrc:
431 try:
432 n = netrc.netrc()
433 auth = n.authenticators(args.address)
434 if auth is not None:
435 login, _, password = auth
436 except FileNotFoundError:
437 pass
438 except netrc.NetrcParseError as e:
439 sys.stderr.write(f"Error parsing {e.filename}:{e.lineno}: {e.msg}\n")
440
441 func = getattr(args, 'func', None)
442 if func:
443 try:
444 with hashserv.create_client(args.address, login, password) as client:
445 if args.become:
446 client.become_user(args.become)
447 return func(args, client)
448 except bb.asyncrpc.InvokeError as e:
449 print(f"ERROR: {e}")
450 return 1
451
452 return 0
453
454
455if __name__ == '__main__':
456 try:
457 ret = main()
458 except Exception:
459 ret = 1
460 import traceback
461 traceback.print_exc()
462 sys.exit(ret)
diff --git a/bitbake/bin/bitbake-hashserv b/bitbake/bin/bitbake-hashserv
deleted file mode 100755
index 01503736b9..0000000000
--- a/bitbake/bin/bitbake-hashserv
+++ /dev/null
@@ -1,179 +0,0 @@
1#! /usr/bin/env python3
2#
3# Copyright (C) 2018 Garmin Ltd.
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import os
9import sys
10import logging
11import argparse
12import sqlite3
13import warnings
14
15warnings.simplefilter("default")
16
17sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), "lib"))
18
19import hashserv
20from hashserv.server import DEFAULT_ANON_PERMS
21
22VERSION = "1.0.0"
23
24DEFAULT_BIND = "unix://./hashserve.sock"
25
26
27def main():
28 parser = argparse.ArgumentParser(
29 description="Hash Equivalence Reference Server. Version=%s" % VERSION,
30 formatter_class=argparse.RawTextHelpFormatter,
31 epilog="""
32The bind address may take one of the following formats:
33 unix://PATH - Bind to unix domain socket at PATH
34 ws://ADDRESS:PORT - Bind to websocket on ADDRESS:PORT
35 ADDRESS:PORT - Bind to raw TCP socket on ADDRESS:PORT
36
37To bind to all addresses, leave the ADDRESS empty, e.g. "--bind :8686" or
38"--bind ws://:8686". To bind to a specific IPv6 address, enclose the address in
39"[]", e.g. "--bind [::1]:8686" or "--bind ws://[::1]:8686"
40
41Note that the default Anonymous permissions are designed to not break existing
42server instances when upgrading, but are not particularly secure defaults. If
43you want to use authentication, it is recommended that you use "--anon-perms
44@read" to only give anonymous users read access, or "--anon-perms @none" to
45give un-authenticated users no access at all.
46
47Setting "--anon-perms @all" or "--anon-perms @user-admin" is not allowed, since
48this would allow anonymous users to manage all users accounts, which is a bad
49idea.
50
51If you are using user authentication, you should run your server in websockets
52mode with an SSL terminating load balancer in front of it (as this server does
53not implement SSL). Otherwise all usernames and passwords will be transmitted
54in the clear. When configured this way, clients can connect using a secure
55websocket, as in "wss://SERVER:PORT"
56
57The following permissions are supported by the server:
58
59 @none - No permissions
60 @read - The ability to read equivalent hashes from the server
61 @report - The ability to report equivalent hashes to the server
62 @db-admin - Manage the hash database(s). This includes cleaning the
63 database, removing hashes, etc.
64 @user-admin - The ability to manage user accounts. This includes, creating
65 users, deleting users, resetting login tokens, and assigning
66 permissions.
67 @all - All possible permissions, including any that may be added
68 in the future
69 """,
70 )
71
72 parser.add_argument(
73 "-b",
74 "--bind",
75 default=os.environ.get("HASHSERVER_BIND", DEFAULT_BIND),
76 help='Bind address (default $HASHSERVER_BIND, "%(default)s")',
77 )
78 parser.add_argument(
79 "-d",
80 "--database",
81 default=os.environ.get("HASHSERVER_DB", "./hashserv.db"),
82 help='Database file (default $HASHSERVER_DB, "%(default)s")',
83 )
84 parser.add_argument(
85 "-l",
86 "--log",
87 default=os.environ.get("HASHSERVER_LOG_LEVEL", "WARNING"),
88 help='Set logging level (default $HASHSERVER_LOG_LEVEL, "%(default)s")',
89 )
90 parser.add_argument(
91 "-u",
92 "--upstream",
93 default=os.environ.get("HASHSERVER_UPSTREAM", None),
94 help="Upstream hashserv to pull hashes from ($HASHSERVER_UPSTREAM)",
95 )
96 parser.add_argument(
97 "-r",
98 "--read-only",
99 action="store_true",
100 help="Disallow write operations from clients ($HASHSERVER_READ_ONLY)",
101 )
102 parser.add_argument(
103 "--db-username",
104 default=os.environ.get("HASHSERVER_DB_USERNAME", None),
105 help="Database username ($HASHSERVER_DB_USERNAME)",
106 )
107 parser.add_argument(
108 "--db-password",
109 default=os.environ.get("HASHSERVER_DB_PASSWORD", None),
110 help="Database password ($HASHSERVER_DB_PASSWORD)",
111 )
112 parser.add_argument(
113 "--anon-perms",
114 metavar="PERM[,PERM[,...]]",
115 default=os.environ.get("HASHSERVER_ANON_PERMS", ",".join(DEFAULT_ANON_PERMS)),
116 help='Permissions to give anonymous users (default $HASHSERVER_ANON_PERMS, "%(default)s")',
117 )
118 parser.add_argument(
119 "--admin-user",
120 default=os.environ.get("HASHSERVER_ADMIN_USER", None),
121 help="Create default admin user with name ADMIN_USER ($HASHSERVER_ADMIN_USER)",
122 )
123 parser.add_argument(
124 "--admin-password",
125 default=os.environ.get("HASHSERVER_ADMIN_PASSWORD", None),
126 help="Create default admin user with password ADMIN_PASSWORD ($HASHSERVER_ADMIN_PASSWORD)",
127 )
128 parser.add_argument(
129 "--reuseport",
130 action="store_true",
131 help="Enable SO_REUSEPORT, allowing multiple servers to bind to the same port for load balancing",
132 )
133
134 args = parser.parse_args()
135
136 logger = logging.getLogger("hashserv")
137
138 level = getattr(logging, args.log.upper(), None)
139 if not isinstance(level, int):
140 raise ValueError(
141 "Invalid log level: %s (Try ERROR/WARNING/INFO/DEBUG)" % args.log
142 )
143
144 logger.setLevel(level)
145 console = logging.StreamHandler()
146 console.setLevel(level)
147 logger.addHandler(console)
148
149 read_only = (os.environ.get("HASHSERVER_READ_ONLY", "0") == "1") or args.read_only
150 if "," in args.anon_perms:
151 anon_perms = args.anon_perms.split(",")
152 else:
153 anon_perms = args.anon_perms.split()
154
155 server = hashserv.create_server(
156 args.bind,
157 args.database,
158 upstream=args.upstream,
159 read_only=read_only,
160 db_username=args.db_username,
161 db_password=args.db_password,
162 anon_perms=anon_perms,
163 admin_username=args.admin_user,
164 admin_password=args.admin_password,
165 reuseport=args.reuseport,
166 )
167 server.serve_forever()
168 return 0
169
170
171if __name__ == "__main__":
172 try:
173 ret = main()
174 except Exception:
175 ret = 1
176 import traceback
177
178 traceback.print_exc()
179 sys.exit(ret)
diff --git a/bitbake/bin/bitbake-layers b/bitbake/bin/bitbake-layers
deleted file mode 100755
index 341ecbcd97..0000000000
--- a/bitbake/bin/bitbake-layers
+++ /dev/null
@@ -1,106 +0,0 @@
1#!/usr/bin/env python3
2
3# This script has subcommands which operate against your bitbake layers, either
4# displaying useful information, or acting against them.
5# See the help output for details on available commands.
6
7# Copyright (C) 2011 Mentor Graphics Corporation
8# Copyright (C) 2011-2015 Intel Corporation
9#
10# SPDX-License-Identifier: GPL-2.0-only
11#
12
13import logging
14import os
15import sys
16import argparse
17import warnings
18warnings.simplefilter("default")
19
20bindir = os.path.dirname(__file__)
21toolname = os.path.basename(__file__).split(".")[0]
22topdir = os.path.dirname(bindir)
23sys.path[0:0] = [os.path.join(topdir, 'lib')]
24
25import bb.tinfoil
26import bb.msg
27
28logger = bb.msg.logger_create(toolname, sys.stdout)
29
30def main():
31 parser = argparse.ArgumentParser(
32 description="BitBake layers utility",
33 epilog="Use %(prog)s <subcommand> --help to get help on a specific command",
34 add_help=False)
35 parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true')
36 parser.add_argument('-q', '--quiet', help='Print only errors', action='store_true')
37 parser.add_argument('-F', '--force', help='Forced execution: can be specified multiple times. -F will force add without recipe parse verification and -FF will additionally force the run withput layer parsing.', action='count', default=0)
38 parser.add_argument('--color', choices=['auto', 'always', 'never'], default='auto', help='Colorize output (where %(metavar)s is %(choices)s)', metavar='COLOR')
39
40 global_args, unparsed_args = parser.parse_known_args()
41
42 # Help is added here rather than via add_help=True, as we don't want it to
43 # be handled by parse_known_args()
44 parser.add_argument('-h', '--help', action='help', default=argparse.SUPPRESS,
45 help='show this help message and exit')
46 subparsers = parser.add_subparsers(title='subcommands', metavar='<subcommand>')
47 subparsers.required = True
48
49 if global_args.debug:
50 logger.setLevel(logging.DEBUG)
51 elif global_args.quiet:
52 logger.setLevel(logging.ERROR)
53
54 # Need to re-run logger_create with color argument
55 # (will be the same logger since it has the same name)
56 bb.msg.logger_create('bitbake-layers', output=sys.stdout,
57 color=global_args.color,
58 level=logger.getEffectiveLevel())
59
60 plugins = []
61 with bb.tinfoil.Tinfoil(tracking=True) as tinfoil:
62 tinfoil.logger.setLevel(logger.getEffectiveLevel())
63
64 if global_args.force > 1:
65 bbpaths = []
66 else:
67 tinfoil.prepare(True)
68 bbpaths = tinfoil.config_data.getVar('BBPATH').split(':')
69
70 for path in ([topdir] + bbpaths):
71 pluginbasepath = {"bitbake-layers":'bblayers', 'bitbake-config-build':'bbconfigbuild'}[toolname]
72 pluginpath = os.path.join(path, 'lib', pluginbasepath)
73 bb.utils.load_plugins(logger, plugins, pluginpath)
74
75 registered = False
76 for plugin in plugins:
77 if hasattr(plugin, 'tinfoil_init') and global_args.force <= 1:
78 plugin.tinfoil_init(tinfoil)
79 if hasattr(plugin, 'register_commands'):
80 registered = True
81 plugin.register_commands(subparsers)
82
83 if not registered:
84 logger.error("No commands registered - missing plugins?")
85 sys.exit(1)
86
87 args = parser.parse_args(unparsed_args, namespace=global_args)
88
89 if getattr(args, 'parserecipes', False):
90 tinfoil.config_data.disableTracking()
91 tinfoil.parse_recipes()
92 tinfoil.config_data.enableTracking()
93
94 return args.func(args)
95
96
97if __name__ == "__main__":
98 try:
99 ret = main()
100 except bb.BBHandledException:
101 ret = 1
102 except Exception:
103 ret = 1
104 import traceback
105 traceback.print_exc()
106 sys.exit(ret)
diff --git a/bitbake/bin/bitbake-prserv b/bitbake/bin/bitbake-prserv
deleted file mode 100755
index 3992e84eab..0000000000
--- a/bitbake/bin/bitbake-prserv
+++ /dev/null
@@ -1,113 +0,0 @@
1#!/usr/bin/env python3
2#
3# Copyright BitBake Contributors
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import os
9import sys,logging
10import argparse
11import warnings
12warnings.simplefilter("default")
13
14sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), "lib"))
15
16import prserv
17import prserv.serv
18
19VERSION = "2.0.0"
20
21PRHOST_DEFAULT="0.0.0.0"
22PRPORT_DEFAULT=8585
23
24def init_logger(logfile, loglevel):
25 numeric_level = getattr(logging, loglevel.upper(), None)
26 if not isinstance(numeric_level, int):
27 raise ValueError("Invalid log level: %s" % loglevel)
28 FORMAT = "%(asctime)-15s %(message)s"
29 logging.basicConfig(level=numeric_level, filename=logfile, format=FORMAT)
30
31def main():
32 parser = argparse.ArgumentParser(
33 description="BitBake PR Server. Version=%s" % VERSION,
34 formatter_class=argparse.RawTextHelpFormatter)
35
36 parser.add_argument(
37 "-f",
38 "--file",
39 default="prserv.sqlite3",
40 help="database filename (default: prserv.sqlite3)",
41 )
42 parser.add_argument(
43 "-l",
44 "--log",
45 default="prserv.log",
46 help="log filename(default: prserv.log)",
47 )
48 parser.add_argument(
49 "--loglevel",
50 default="INFO",
51 help="logging level, i.e. CRITICAL, ERROR, WARNING, INFO, DEBUG",
52 )
53 parser.add_argument(
54 "--start",
55 action="store_true",
56 help="start daemon",
57 )
58 parser.add_argument(
59 "--stop",
60 action="store_true",
61 help="stop daemon",
62 )
63 parser.add_argument(
64 "--host",
65 help="ip address to bind",
66 default=PRHOST_DEFAULT,
67 )
68 parser.add_argument(
69 "--port",
70 type=int,
71 default=PRPORT_DEFAULT,
72 help="port number (default: 8585)",
73 )
74 parser.add_argument(
75 "-r",
76 "--read-only",
77 action="store_true",
78 help="open database in read-only mode",
79 )
80 parser.add_argument(
81 "-u",
82 "--upstream",
83 default=os.environ.get("PRSERV_UPSTREAM", None),
84 help="Upstream PR service (host:port)",
85 )
86
87 args = parser.parse_args()
88 init_logger(os.path.abspath(args.log), args.loglevel)
89
90 if args.start:
91 ret=prserv.serv.start_daemon(
92 args.file,
93 args.host,
94 args.port,
95 os.path.abspath(args.log),
96 args.read_only,
97 args.upstream
98 )
99 elif args.stop:
100 ret=prserv.serv.stop_daemon(args.host, args.port)
101 else:
102 ret=parser.print_help()
103 return ret
104
105if __name__ == "__main__":
106 try:
107 ret = main()
108 except Exception:
109 ret = 1
110 import traceback
111 traceback.print_exc()
112 sys.exit(ret)
113
diff --git a/bitbake/bin/bitbake-selftest b/bitbake/bin/bitbake-selftest
deleted file mode 100755
index fb7c57dd83..0000000000
--- a/bitbake/bin/bitbake-selftest
+++ /dev/null
@@ -1,80 +0,0 @@
1#!/usr/bin/env python3
2#
3# Copyright (C) 2012 Richard Purdie
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import os
9import sys, logging
10import warnings
11warnings.simplefilter("default")
12sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), 'lib'))
13
14import unittest
15try:
16 import bb
17 import hashserv
18 import prserv
19 import layerindexlib
20except RuntimeError as exc:
21 sys.exit(str(exc))
22
23tests = ["bb.tests.codeparser",
24 "bb.tests.color",
25 "bb.tests.cooker",
26 "bb.tests.cow",
27 "bb.tests.data",
28 "bb.tests.event",
29 "bb.tests.fetch",
30 "bb.tests.parse",
31 "bb.tests.runqueue",
32 "bb.tests.setup",
33 "bb.tests.siggen",
34 "bb.tests.utils",
35 "bb.tests.compression",
36 "bb.tests.filter",
37 "hashserv.tests",
38 "prserv.tests",
39 "layerindexlib.tests.layerindexobj",
40 "layerindexlib.tests.restapi",
41 "layerindexlib.tests.cooker"]
42
43for t in tests:
44 t = '.'.join(t.split('.')[:3])
45 __import__(t)
46
47
48# Set-up logging
49class StdoutStreamHandler(logging.StreamHandler):
50 """Special handler so that unittest is able to capture stdout"""
51 def __init__(self):
52 # Override __init__() because we don't want to set self.stream here
53 logging.Handler.__init__(self)
54
55 @property
56 def stream(self):
57 # We want to dynamically write wherever sys.stdout is pointing to
58 return sys.stdout
59
60
61handler = StdoutStreamHandler()
62bb.logger.addHandler(handler)
63bb.logger.setLevel(logging.DEBUG)
64
65
66ENV_HELP = """\
67Environment variables:
68 BB_SKIP_NETTESTS set to 'yes' in order to skip tests using network
69 connection
70 BB_TMPDIR_NOCLEAN set to 'yes' to preserve test tmp directories
71"""
72
73class main(unittest.main):
74 def _print_help(self, *args, **kwargs):
75 super(main, self)._print_help(*args, **kwargs)
76 print(ENV_HELP)
77
78
79if __name__ == '__main__':
80 main(defaultTest=tests, buffer=True)
diff --git a/bitbake/bin/bitbake-server b/bitbake/bin/bitbake-server
deleted file mode 100755
index 01f83d982f..0000000000
--- a/bitbake/bin/bitbake-server
+++ /dev/null
@@ -1,56 +0,0 @@
1#!/usr/bin/env python3
2#
3# SPDX-License-Identifier: GPL-2.0-only
4#
5# Copyright (C) 2020 Richard Purdie
6#
7
8import os
9import sys
10import warnings
11warnings.simplefilter("default")
12warnings.filterwarnings("ignore", category=DeprecationWarning, message=".*use.of.fork.*may.lead.to.deadlocks.in.the.child.*")
13import logging
14sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
15
16import bb
17
18bb.utils.check_system_locale()
19
20# Users shouldn't be running this code directly
21if len(sys.argv) != 11 or not sys.argv[1].startswith("decafbad"):
22 print("bitbake-server is meant for internal execution by bitbake itself, please don't use it standalone.")
23 sys.exit(1)
24
25import bb.server.process
26
27lockfd = int(sys.argv[2])
28readypipeinfd = int(sys.argv[3])
29logfile = sys.argv[4]
30lockname = sys.argv[5]
31sockname = sys.argv[6]
32timeout = float(sys.argv[7])
33profile = sys.argv[8]
34xmlrpcinterface = (sys.argv[9], int(sys.argv[10]))
35if xmlrpcinterface[0] == "None":
36 xmlrpcinterface = (None, xmlrpcinterface[1])
37
38# Replace standard fds with our own
39with open('/dev/null', 'r') as si:
40 os.dup2(si.fileno(), sys.stdin.fileno())
41
42with open(logfile, 'a+') as so:
43 os.dup2(so.fileno(), sys.stdout.fileno())
44 os.dup2(so.fileno(), sys.stderr.fileno())
45
46# Have stdout and stderr be the same so log output matches chronologically
47# and there aren't two seperate buffers
48sys.stderr = sys.stdout
49
50logger = logging.getLogger("BitBake")
51# Ensure logging messages get sent to the UI as events
52handler = bb.event.LogHandler()
53logger.addHandler(handler)
54
55bb.server.process.execServer(lockfd, readypipeinfd, lockname, sockname, timeout, xmlrpcinterface, profile)
56
diff --git a/bitbake/bin/bitbake-setup b/bitbake/bin/bitbake-setup
deleted file mode 100755
index a0426005ff..0000000000
--- a/bitbake/bin/bitbake-setup
+++ /dev/null
@@ -1,860 +0,0 @@
1#!/usr/bin/env python3
2
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import logging
8import os
9import sys
10import argparse
11import warnings
12import json
13import shutil
14import time
15import stat
16import tempfile
17import configparser
18import datetime
19import glob
20import subprocess
21
22default_registry = os.path.normpath(os.path.dirname(__file__) + "/../default-registry")
23
24bindir = os.path.abspath(os.path.dirname(__file__))
25sys.path[0:0] = [os.path.join(os.path.dirname(bindir), 'lib')]
26
27import bb.msg
28import bb.process
29
30logger = bb.msg.logger_create('bitbake-setup', sys.stdout)
31
32def cache_dir(top_dir):
33 return os.path.join(top_dir, '.bitbake-setup-cache')
34
35def init_bb_cache(top_dir, settings, args):
36 dldir = settings["default"]["dl-dir"]
37 bb_cachedir = os.path.join(cache_dir(top_dir), 'bitbake-cache')
38
39 d = bb.data.init()
40 d.setVar("DL_DIR", dldir)
41 d.setVar("BB_CACHEDIR", bb_cachedir)
42 d.setVar("__BBSRCREV_SEEN", "1")
43 if args.no_network:
44 d.setVar("BB_SRCREV_POLICY", "cache")
45 bb.fetch.fetcher_init(d)
46 return d
47
48def save_bb_cache():
49 bb.fetch2.fetcher_parse_save()
50 bb.fetch2.fetcher_parse_done()
51
52def get_config_name(config):
53 suffix = '.conf.json'
54 config_file = os.path.basename(config)
55 if config_file.endswith(suffix):
56 return config_file[:-len(suffix)]
57 else:
58 raise Exception("Config file {} does not end with {}, please rename the file.".format(config, suffix))
59
60def write_config(config, config_dir):
61 with open(os.path.join(config_dir, "config-upstream.json"),'w') as s:
62 json.dump(config, s, sort_keys=True, indent=4)
63
64def commit_config(config_dir):
65 bb.process.run("git -C {} add .".format(config_dir))
66 bb.process.run("git -C {} commit --no-verify -a -m 'Configuration at {}'".format(config_dir, time.asctime()))
67
68def _write_layer_list(dest, repodirs):
69 layers = []
70 for r in repodirs:
71 for root, dirs, files in os.walk(os.path.join(dest,r)):
72 if os.path.basename(root) == 'conf' and 'layer.conf' in files:
73 layers.append(os.path.relpath(os.path.dirname(root), dest))
74 layers_f = os.path.join(dest, ".oe-layers.json")
75 with open(layers_f, 'w') as f:
76 json.dump({"version":"1.0","layers":layers}, f, sort_keys=True, indent=4)
77
78def checkout_layers(layers, layerdir, d):
79 repodirs = []
80 oesetupbuild = None
81 print("Fetching layer/tool repositories into {}".format(layerdir))
82 for r_name in layers:
83 r_data = layers[r_name]
84 repodir = r_data["path"]
85 repodirs.append(repodir)
86
87 r_remote = r_data['git-remote']
88 rev = r_remote['rev']
89 branch = r_remote.get('branch', None)
90 remotes = r_remote['remotes']
91
92 for remote in remotes:
93 type,host,path,user,pswd,params = bb.fetch.decodeurl(remotes[remote]["uri"])
94 fetchuri = bb.fetch.encodeurl(('git',host,path,user,pswd,params))
95 print(" {}".format(r_name))
96 if branch:
97 fetcher = bb.fetch.Fetch(["{};protocol={};rev={};branch={};destsuffix={}".format(fetchuri,type,rev,branch,repodir)], d)
98 else:
99 fetcher = bb.fetch.Fetch(["{};protocol={};rev={};nobranch=1;destsuffix={}".format(fetchuri,type,rev,repodir)], d)
100 do_fetch(fetcher, layerdir)
101
102 if os.path.exists(os.path.join(layerdir, repodir, 'scripts/oe-setup-build')):
103 oesetupbuild = os.path.join(layerdir, repodir, 'scripts/oe-setup-build')
104 oeinitbuildenvdir = os.path.join(layerdir, repodir)
105
106 print(" ")
107 _write_layer_list(layerdir, repodirs)
108
109 if oesetupbuild:
110 links = {'setup-build': oesetupbuild, 'oe-scripts': os.path.dirname(oesetupbuild), 'oe-init-build-env-dir': oeinitbuildenvdir}
111 for l,t in links.items():
112 symlink = os.path.join(layerdir, l)
113 if os.path.lexists(symlink):
114 os.remove(symlink)
115 os.symlink(os.path.relpath(t,layerdir),symlink)
116
117def setup_bitbake_build(bitbake_config, layerdir, builddir, thisdir):
118 def _setup_build_conf(layers, build_conf_dir):
119 os.makedirs(build_conf_dir)
120 layers_s = []
121 for l in layers:
122 if l.startswith("{THISDIR}/"):
123 if thisdir:
124 l = l.format(THISDIR=thisdir)
125 else:
126 raise Exception("Configuration is using {THISDIR} to specify " \
127 "a layer path relative to itself. This can be done only " \
128 "when the configuration is specified by its path on local " \
129 "disk, not when it's in a registry or is fetched over http.")
130 if not os.path.isabs(l):
131 l = os.path.join(layerdir, l)
132 layers_s.append(" {} \\".format(l))
133 layers_s = "\n".join(layers_s)
134 bblayers_conf = """BBLAYERS ?= " \\
135{}
136 "
137""".format(layers_s)
138 with open(os.path.join(build_conf_dir, "bblayers.conf"), 'w') as f:
139 f.write(bblayers_conf)
140
141 local_conf = """#
142# This file is intended for local configuration tweaks.
143#
144# If you would like to publish and share changes made to this file,
145# it is recommended to put them into a distro config, or to create
146# layer fragments from changes made here.
147#
148"""
149 with open(os.path.join(build_conf_dir, "local.conf"), 'w') as f:
150 f.write(local_conf)
151
152 with open(os.path.join(build_conf_dir, "templateconf.cfg"), 'w') as f:
153 f.write("")
154
155 with open(os.path.join(build_conf_dir, "conf-summary.txt"), 'w') as f:
156 f.write(bitbake_config["description"] + "\n")
157
158 with open(os.path.join(build_conf_dir, "conf-notes.txt"), 'w') as f:
159 f.write("")
160
161 def _make_init_build_env(builddir, oeinitbuildenvdir):
162 builddir = os.path.realpath(builddir)
163 cmd = "cd {}\nset {}\n. ./oe-init-build-env\n".format(oeinitbuildenvdir, builddir)
164 initbuild_in_builddir = os.path.join(builddir, 'init-build-env')
165
166 with open(initbuild_in_builddir, 'w') as f:
167 f.write("# init-build-env wrapper created by bitbake-setup\n")
168 f.write(cmd + '\n')
169
170 def _prepend_passthrough_to_init_build_env(builddir):
171 env = bitbake_config.get("bb-env-passthrough-additions")
172 if not env:
173 return
174
175 initbuild_in_builddir = os.path.join(builddir, 'init-build-env')
176 with open(initbuild_in_builddir) as f:
177 content = f.read()
178
179 joined = " \\\n".join(env)
180 env = "export BB_ENV_PASSTHROUGH_ADDITIONS=\" \\\n"
181 env += "${BB_ENV_PASSTHROUGH_ADDITIONS} \\\n"
182 env += joined
183 env += '"'
184
185 with open(initbuild_in_builddir, 'w') as f:
186 f.write("# environment passthrough added by bitbake-setup\n")
187 f.write(env + '\n')
188 f.write('\n')
189 f.write(content)
190
191 bitbake_builddir = os.path.join(builddir, "build")
192 print("Setting up bitbake configuration in\n {}\n".format(bitbake_builddir))
193
194 template = bitbake_config.get("oe-template")
195 layers = bitbake_config.get("bb-layers")
196 if not template and not layers:
197 print("Bitbake configuration does not contain a reference to an OpenEmbedded build template via 'oe-template' or a list of layers via 'bb-layers'; please use oe-setup-build, oe-init-build-env or another mechanism manually to complete the setup.")
198 return
199 oesetupbuild = os.path.join(layerdir, 'setup-build')
200 if template and not os.path.exists(oesetupbuild):
201 raise Exception("Cannot complete setting up a bitbake build directory from OpenEmbedded template '{}' as oe-setup-build was not found in any layers; please use oe-init-build-env manually.".format(template))
202
203 bitbake_confdir = os.path.join(bitbake_builddir, 'conf')
204 backup_bitbake_confdir = bitbake_confdir + "-backup.{}".format(time.strftime("%Y%m%d%H%M%S"))
205 if os.path.exists(bitbake_confdir):
206 os.rename(bitbake_confdir, backup_bitbake_confdir)
207
208 if layers:
209 _setup_build_conf(layers, bitbake_confdir)
210
211 if template:
212 bb.process.run("{} setup -c {} -b {} --no-shell".format(oesetupbuild, template, bitbake_builddir))
213 else:
214 oeinitbuildenvdir = os.path.join(layerdir, 'oe-init-build-env-dir')
215 if not os.path.exists(os.path.join(oeinitbuildenvdir, "oe-init-build-env")):
216 print("Could not find oe-init-build-env in any of the layers; please use another mechanism to initialize the bitbake environment")
217 return
218 _make_init_build_env(bitbake_builddir, os.path.realpath(oeinitbuildenvdir))
219
220 _prepend_passthrough_to_init_build_env(bitbake_builddir)
221
222 siteconf_symlink = os.path.join(bitbake_confdir, "site.conf")
223 siteconf = os.path.normpath(os.path.join(builddir, '..', "site.conf"))
224 if os.path.lexists(siteconf_symlink):
225 os.remove(symlink)
226 os.symlink(os.path.relpath(siteconf, bitbake_confdir) ,siteconf_symlink)
227
228
229 init_script = os.path.join(bitbake_builddir, "init-build-env")
230 shell = "bash"
231 fragments = bitbake_config.get("oe-fragments", []) + sorted(bitbake_config.get("oe-fragment-choices",{}).values())
232 if fragments:
233 bb.process.run("{} -c '. {} && bitbake-config-build enable-fragment {}'".format(shell, init_script, " ".join(fragments)))
234
235 if os.path.exists(backup_bitbake_confdir):
236 bitbake_config_diff = get_diff(backup_bitbake_confdir, bitbake_confdir)
237 if bitbake_config_diff:
238 print("Existing bitbake configuration directory renamed to {}".format(backup_bitbake_confdir))
239 print("The bitbake configuration has changed:")
240 print(bitbake_config_diff)
241 else:
242 shutil.rmtree(backup_bitbake_confdir)
243
244 print("This bitbake configuration provides:\n {}\n".format(bitbake_config["description"]))
245
246 readme = """{}\n\nAdditional information is in {} and {}\n
247Source the environment using '. {}' to run builds from the command line.
248The bitbake configuration files (local.conf, bblayers.conf and more) can be found in {}/conf
249""".format(
250 bitbake_config["description"],
251 os.path.join(bitbake_builddir,'conf/conf-summary.txt'),
252 os.path.join(bitbake_builddir,'conf/conf-notes.txt'),
253 init_script,
254 bitbake_builddir
255 )
256 readme_file = os.path.join(bitbake_builddir, "README")
257 with open(readme_file, 'w') as f:
258 f.write(readme)
259 print("Usage instructions and additional information are in\n {}\n".format(readme_file))
260 print("The bitbake configuration files (local.conf, bblayers.conf and more) can be found in\n {}/conf\n".format(bitbake_builddir))
261 print("To run builds, source the environment using\n . {}".format(init_script))
262
263def get_registry_config(registry_path, id):
264 for root, dirs, files in os.walk(registry_path):
265 for f in files:
266 if f.endswith('.conf.json') and id == get_config_name(f):
267 return os.path.join(root, f)
268 raise Exception("Unable to find {} in available configurations; use 'list' sub-command to see what is available".format(id))
269
270def update_build(config, confdir, builddir, layerdir, d):
271 layer_config = config["data"]["sources"]
272 layer_overrides = config["source-overrides"]["sources"]
273 for k,v in layer_overrides.items():
274 if k in layer_config:
275 layer_config[k]["git-remote"] = v["git-remote"]
276 checkout_layers(layer_config, layerdir, d)
277 bitbake_config = config["bitbake-config"]
278 thisdir = os.path.dirname(config["path"]) if config["type"] == 'local' else None
279 setup_bitbake_build(bitbake_config, layerdir, builddir, thisdir)
280
281def int_input(allowed_values):
282 n = None
283 while n is None:
284 try:
285 n = int(input())
286 except ValueError:
287 print('Not a valid number, please try again:')
288 continue
289 if n not in allowed_values:
290 print('Number {} not one of {}, please try again:'.format(n, allowed_values))
291 n = None
292 return n
293
294def flatten_bitbake_configs(configs):
295 def merge_configs(c1,c2):
296 c_merged = {}
297 for k,v in c2.items():
298 if k not in c1.keys():
299 c_merged[k] = v
300 for k,v in c1.items():
301 if k not in c2.keys():
302 c_merged[k] = v
303 else:
304 c_merged[k] = c1[k] + c2[k]
305 del c_merged['configurations']
306 return c_merged
307
308 flattened_configs = []
309 for c in configs:
310 if 'configurations' not in c:
311 flattened_configs.append(c)
312 else:
313 for sub_c in flatten_bitbake_configs(c['configurations']):
314 flattened_configs.append(merge_configs(c, sub_c))
315 return flattened_configs
316
317def choose_bitbake_config(configs, parameters, non_interactive):
318 flattened_configs = flatten_bitbake_configs(configs)
319 configs_dict = {i["name"]:i for i in flattened_configs}
320
321 if parameters:
322 config_id = parameters[0]
323 if config_id not in configs_dict:
324 raise Exception("Bitbake configuration {} not found; replace with one of {}".format(config_id, configs_dict))
325 return configs_dict[config_id]
326
327 enumerated_configs = list(enumerate(flattened_configs))
328 if len(enumerated_configs) == 1:
329 only_config = flattened_configs[0]
330 print("\nSelecting the only available bitbake configuration {}".format(only_config["name"]))
331 return only_config
332
333 if non_interactive:
334 raise Exception("Unable to choose from bitbake configurations in non-interactive mode: {}".format(configs_dict))
335
336 print("\nAvailable bitbake configurations:")
337 for n, config_data in enumerated_configs:
338 print("{}. {}\t{}".format(n, config_data["name"], config_data["description"]))
339 print("\nPlease select one of the above bitbake configurations by its number:")
340 config_n = int_input([i[0] for i in enumerated_configs])
341 return flattened_configs[config_n]
342
343def choose_config(configs, non_interactive):
344 not_expired_configs = [k for k in configs.keys() if not has_expired(configs[k].get("expires", None))]
345 config_list = list(enumerate(not_expired_configs))
346 if len(config_list) == 1:
347 only_config = config_list[0][1]
348 print("\nSelecting the only available configuration {}\n".format(only_config))
349 return only_config
350
351 if non_interactive:
352 raise Exception("Unable to choose from configurations in non-interactive mode: {}".format(not_expired_configs))
353
354 print("\nAvailable configurations:")
355 for n, config_name in config_list:
356 config_data = configs[config_name]
357 expiry_date = config_data.get("expires", None)
358 config_desc = config_data["description"]
359 if expiry_date:
360 print("{}. {}\t{} (supported until {})".format(n, config_name, config_desc, expiry_date))
361 else:
362 print("{}. {}\t{}".format(n, config_name, config_desc))
363 print("\nPlease select one of the above configurations by its number:")
364 config_n = int_input([i[0] for i in config_list])
365 return config_list[config_n][1]
366
367def choose_fragments(possibilities, parameters, non_interactive, skip_selection):
368 choices = {}
369 for k,v in possibilities.items():
370 if skip_selection and k in skip_selection:
371 print("Skipping a selection of {}, as requested on command line. The resulting bitbake configuration may require further manual adjustments.".format(k))
372 continue
373 choice = [o for o in v["options"] if o in parameters]
374 if len(choice) > 1:
375 raise Exception("Options specified on command line do not allow a single selection from possibilities {}, please remove one or more from {}".format(v["options"], parameters))
376 if len(choice) == 1:
377 choices[k] = choice[0]
378 continue
379
380 if non_interactive:
381 raise Exception("Unable to choose from options in non-interactive mode: {}".format(v["options"]))
382
383 print("\n" + v["description"] + ":")
384 options_enumerated = list(enumerate(v["options"]))
385 for n,o in options_enumerated:
386 print("{}. {}".format(n, o))
387 print("\nPlease select one of the above options by its number:")
388 option_n = int_input([i[0] for i in options_enumerated])
389 choices[k] = options_enumerated[option_n][1]
390 return choices
391
392def obtain_config(top_dir, settings, args, source_overrides, d):
393 if args.config:
394 config_id = args.config[0]
395 config_parameters = args.config[1:]
396 if os.path.exists(config_id):
397 print("Reading configuration from local file\n {}".format(config_id))
398 upstream_config = {'type':'local',
399 'path':os.path.abspath(config_id),
400 'name':get_config_name(config_id),
401 'data':json.load(open(config_id))
402 }
403 elif config_id.startswith("http://") or config_id.startswith("https://"):
404 print("Reading configuration from network URI\n {}".format(config_id))
405 import urllib.request
406 with urllib.request.urlopen(config_id) as f:
407 upstream_config = {'type':'network','uri':config_id,'name':get_config_name(config_id),'data':json.load(f)}
408 else:
409 print("Looking up config {} in configuration registry".format(config_id))
410 registry_path = update_registry(settings["default"]["registry"], cache_dir(top_dir), d)
411 registry_configs = list_registry(registry_path, with_expired=True)
412 if config_id not in registry_configs:
413 raise Exception("Config {} not found in configuration registry, re-run 'init' without parameters to choose from available configurations.".format(config_id))
414 upstream_config = {'type':'registry','registry':settings["default"]["registry"],'name':config_id,'data':json.load(open(get_registry_config(registry_path,config_id)))}
415 expiry_date = upstream_config['data'].get("expires", None)
416 if has_expired(expiry_date):
417 print("This configuration is no longer supported after {}. Please consider changing to a supported configuration.".format(expiry_date))
418 else:
419 registry_path = update_registry(settings["default"]["registry"], cache_dir(top_dir), d)
420 registry_configs = list_registry(registry_path, with_expired=True)
421 config_id = choose_config(registry_configs, args.non_interactive)
422 config_parameters = []
423 upstream_config = {'type':'registry','registry':settings["default"]["registry"],'name':config_id,'data':json.load(open(get_registry_config(registry_path,config_id)))}
424
425 upstream_config['bitbake-config'] = choose_bitbake_config(upstream_config['data']['bitbake-setup']['configurations'], config_parameters, args.non_interactive)
426 upstream_config['bitbake-config']['oe-fragment-choices'] = choose_fragments(upstream_config['bitbake-config'].get('oe-fragments-one-of',{}), config_parameters[1:], args.non_interactive, args.skip_selection)
427 upstream_config['non-interactive-cmdline-options'] = [config_id, upstream_config['bitbake-config']['name']] + sorted(upstream_config['bitbake-config']['oe-fragment-choices'].values())
428 upstream_config['source-overrides'] = source_overrides
429 upstream_config['skip-selection'] = args.skip_selection
430 return upstream_config
431
432def init_config(top_dir, settings, args, d):
433 stdout = sys.stdout
434 def handle_task_progress(event, d):
435 rate = event.rate if event.rate else ''
436 progress = event.progress if event.progress > 0 else 0
437 print("{}% {} ".format(progress, rate), file=stdout, end='\r')
438
439 create_siteconf(top_dir, args.non_interactive)
440 source_overrides = json.load(open(args.source_overrides)) if args.source_overrides else {'sources':{}}
441 upstream_config = obtain_config(top_dir, settings, args, source_overrides, d)
442 print("\nRun 'bitbake-setup init --non-interactive {}' to select this configuration non-interactively.\n".format(" ".join(upstream_config['non-interactive-cmdline-options'])))
443
444 builddir = os.path.join(os.path.abspath(top_dir), args.build_dir_name or "{}-{}".format(upstream_config['name']," ".join(upstream_config['non-interactive-cmdline-options'][1:]).replace(" ","-").replace("/","_")))
445 if os.path.exists(os.path.join(builddir, "layers")):
446 print(f"Build already initialized in:\n {builddir}\nUse 'bitbake-setup status' to check if it needs to be updated, or 'bitbake-setup update' to perform the update.\nIf you would like to start over and re-initialize a build in this directory, remove it, and run 'bitbake-setup init' again.")
447 return
448
449 print("Initializing a build in\n {}".format(builddir))
450 if not args.non_interactive:
451 y_or_n = input('Continue? (y/N): ')
452 if y_or_n != 'y':
453 exit()
454 print()
455
456 os.makedirs(builddir, exist_ok=True)
457
458 confdir = os.path.join(builddir, "config")
459 layerdir = os.path.join(builddir, "layers")
460
461 os.makedirs(confdir)
462 os.makedirs(layerdir)
463
464 bb.process.run("git -C {} init -b main".format(confdir))
465 # Make sure commiting doesn't fail if no default git user is configured on the machine
466 bb.process.run("git -C {} config user.name bitbake-setup".format(confdir))
467 bb.process.run("git -C {} config user.email bitbake-setup@not.set".format(confdir))
468 bb.process.run("git -C {} commit --no-verify --allow-empty -m 'Initial commit'".format(confdir))
469
470 bb.event.register("bb.build.TaskProgress", handle_task_progress, data=d)
471
472 write_config(upstream_config, confdir)
473 commit_config(confdir)
474 update_build(upstream_config, confdir, builddir, layerdir, d)
475
476 bb.event.remove("bb.build.TaskProgress", None)
477
478def get_diff(file1, file2):
479 try:
480 bb.process.run('diff -uNr {} {}'.format(file1, file2))
481 except bb.process.ExecutionError as e:
482 if e.exitcode == 1:
483 return e.stdout
484 else:
485 raise e
486 return None
487
488def are_layers_changed(layers, layerdir, d):
489 changed = False
490 for r_name in layers:
491 r_data = layers[r_name]
492 repodir = r_data["path"]
493
494 r_remote = r_data['git-remote']
495 rev = r_remote['rev']
496 branch = r_remote.get('branch', None)
497 remotes = r_remote['remotes']
498
499 for remote in remotes:
500 type,host,path,user,pswd,params = bb.fetch.decodeurl(remotes[remote]["uri"])
501 fetchuri = bb.fetch.encodeurl(('git',host,path,user,pswd,params))
502 if branch:
503 fetcher = bb.fetch.FetchData("{};protocol={};rev={};branch={};destsuffix={}".format(fetchuri,type,rev,branch,repodir), d)
504 else:
505 fetcher = bb.fetch.FetchData("{};protocol={};rev={};nobranch=1;destsuffix={}".format(fetchuri,type,rev,repodir), d)
506 upstream_revision = fetcher.method.latest_revision(fetcher, d, 'default')
507 rev_parse_result = bb.process.run('git -C {} rev-parse HEAD'.format(os.path.join(layerdir, repodir)))
508 local_revision = rev_parse_result[0].strip()
509 if upstream_revision != local_revision:
510 changed = True
511 print('Layer repository {} checked out into {} updated revision {} from {} to {}'.format(remotes[remote]["uri"], os.path.join(layerdir, repodir), rev, local_revision, upstream_revision))
512
513 return changed
514
515def build_status(top_dir, settings, args, d, update=False):
516 builddir = args.build_dir
517
518 confdir = os.path.join(builddir, "config")
519 layerdir = os.path.join(builddir, "layers")
520
521 current_upstream_config = json.load(open(os.path.join(confdir, "config-upstream.json")))
522
523 args.config = current_upstream_config['non-interactive-cmdline-options']
524 args.non_interactive = True
525 args.skip_selection = current_upstream_config['skip-selection']
526 source_overrides = current_upstream_config["source-overrides"]
527 new_upstream_config = obtain_config(top_dir, settings, args, source_overrides, d)
528
529 write_config(new_upstream_config, confdir)
530 config_diff = bb.process.run('git -C {} diff'.format(confdir))[0]
531
532 if config_diff:
533 print('\nConfiguration in {} has changed:\n{}'.format(builddir, config_diff))
534 if update:
535 commit_config(confdir)
536 update_build(new_upstream_config, confdir, builddir, layerdir, d)
537 else:
538 bb.process.run('git -C {} restore config-upstream.json'.format(confdir))
539 return
540
541 if are_layers_changed(current_upstream_config["data"]["sources"], layerdir, d):
542 if update:
543 update_build(current_upstream_config, confdir, builddir, layerdir, d)
544 return
545
546 print("\nConfiguration in {} has not changed.".format(builddir))
547
548def build_update(top_dir, settings, args, d):
549 build_status(top_dir, settings, args, d, update=True)
550
551def do_fetch(fetcher, dir):
552 # git fetcher simply dumps git output to stdout; in bitbake context that is redirected to temp/log.do_fetch
553 # and we need to set up smth similar here
554 fetchlogdir = os.path.join(dir, 'logs')
555 os.makedirs(fetchlogdir, exist_ok=True)
556 fetchlog = os.path.join(fetchlogdir, 'fetch_log.{}'.format(datetime.datetime.now().strftime("%Y%m%d%H%M%S")))
557 with open(fetchlog, 'a') as f:
558 oldstdout = sys.stdout
559 sys.stdout = f
560 fetcher.download()
561 fetcher.unpack(dir)
562 sys.stdout = oldstdout
563
564def update_registry(registry, cachedir, d):
565 registrydir = 'configurations'
566 if registry.startswith("."):
567 full_registrydir = os.path.join(os.getcwd(), registry, registrydir)
568 elif registry.startswith("/"):
569 full_registrydir = os.path.join(registry, registrydir)
570 else:
571 full_registrydir = os.path.join(cachedir, registrydir)
572 print("Fetching configuration registry\n {}\ninto\n {}".format(registry, full_registrydir))
573 fetcher = bb.fetch.Fetch(["{};destsuffix={}".format(registry, registrydir)], d)
574 do_fetch(fetcher, cachedir)
575 return full_registrydir
576
577def has_expired(expiry_date):
578 if expiry_date:
579 return datetime.datetime.now() > datetime.datetime.fromisoformat(expiry_date)
580 return False
581
582def list_registry(registry_path, with_expired):
583 json_data = {}
584
585 for root, dirs, files in os.walk(registry_path):
586 for f in files:
587 if f.endswith('.conf.json'):
588 config_name = get_config_name(f)
589 config_data = json.load(open(os.path.join(root, f)))
590 config_desc = config_data["description"]
591 expiry_date = config_data.get("expires", None)
592 if expiry_date:
593 if with_expired or not has_expired(expiry_date):
594 json_data[config_name] = {"description": config_desc, "expires": expiry_date}
595 else:
596 json_data[config_name] = {"description": config_desc}
597 return json_data
598
599def list_configs(top_dir, settings, args, d):
600 registry_path = update_registry(settings["default"]["registry"], cache_dir(top_dir), d)
601 json_data = list_registry(registry_path, args.with_expired)
602 print("\nAvailable configurations:")
603 for config_name, config_data in json_data.items():
604 expiry_date = config_data.get("expires", None)
605 config_desc = config_data["description"]
606 if expiry_date:
607 if args.with_expired or not has_expired(expiry_date):
608 print("{}\t{} (supported until {})".format(config_name, config_desc, expiry_date))
609 else:
610 print("{}\t{}".format(config_name, config_desc))
611 print("\nRun 'init' with one of the above configuration identifiers to set up a build.")
612
613 if args.write_json:
614 with open(args.write_json, 'w') as f:
615 json.dump(json_data, f, sort_keys=True, indent=4)
616 print("Available configurations written into {}".format(args.write_json))
617
618def install_buildtools(top_dir, settings, args, d):
619 buildtools_install_dir = os.path.join(args.build_dir, 'buildtools')
620 if os.path.exists(buildtools_install_dir):
621 if not args.force:
622 print("Buildtools are already installed in {}.".format(buildtools_install_dir))
623 env_scripts = glob.glob(os.path.join(buildtools_install_dir, 'environment-setup-*'))
624 if env_scripts:
625 print("If you wish to use them, you need to source the environment setup script e.g.")
626 for s in env_scripts:
627 print("$ . {}".format(s))
628 print("You can also re-run bitbake-setup install-buildtools with --force option to force a reinstallation.")
629 return
630 shutil.rmtree(buildtools_install_dir)
631
632 install_buildtools = os.path.join(args.build_dir, 'layers/oe-scripts/install-buildtools')
633 buildtools_download_dir = os.path.join(args.build_dir, 'buildtools-downloads/{}'.format(time.strftime("%Y%m%d%H%M%S")))
634 print("Buildtools archive is downloaded into {} and its content installed into {}".format(buildtools_download_dir, buildtools_install_dir))
635 subprocess.check_call("{} -d {} --downloads-directory {}".format(install_buildtools, buildtools_install_dir, buildtools_download_dir), shell=True)
636
637def default_settings_path(top_dir):
638 return os.path.join(top_dir, 'settings.conf')
639
640def create_siteconf(top_dir, non_interactive=True):
641 siteconfpath = os.path.join(top_dir, 'site.conf')
642 print('A common site.conf file will be created, please edit or replace before running builds\n {}\n'.format(siteconfpath))
643 if not non_interactive:
644 y_or_n = input('Proceed? (y/N): ')
645 if y_or_n != 'y':
646 exit()
647
648 os.makedirs(os.path.dirname(top_dir), exist_ok=True)
649 if os.path.exists(siteconfpath):
650 backup_siteconf = siteconfpath + "-backup.{}".format(time.strftime("%Y%m%d%H%M%S"))
651 os.rename(siteconfpath, backup_siteconf)
652 print("Previous settings are in {}".format(backup_siteconf))
653 with open(siteconfpath, 'w') as siteconffile:
654 siteconffile.write('# This file is intended for build host-specific bitbake settings\n')
655
656def global_settings_path(args):
657 return os.path.abspath(args.global_settings) if args.global_settings else os.path.join(os.path.expanduser('~'), '.config', 'bitbake-setup', 'settings.conf')
658
659def load_settings(settings_path):
660 settings = configparser.ConfigParser()
661 if os.path.exists(settings_path):
662 print('Loading settings from\n {}\n'.format(settings_path))
663 settings.read_file(open(settings_path))
664 return settings
665
666def change_setting(top_dir, args):
667 if vars(args)['global']:
668 settings_path = global_settings_path(args)
669 else:
670 settings_path = default_settings_path(top_dir)
671 settings = load_settings(settings_path)
672
673 if args.subcommand == 'set':
674 if args.section not in settings.keys():
675 settings[args.section] = {}
676 settings[args.section][args.setting] = args.value
677 print(f"From section '{args.section}' the setting '{args.setting}' was changed to '{args.value}'")
678 if args.subcommand == 'unset':
679 if args.section in settings.keys() and args.setting in settings[args.section].keys():
680 del settings[args.section][args.setting]
681 print(f"From section '{args.section}' the setting '{args.setting}' has been removed")
682
683 os.makedirs(os.path.dirname(settings_path), exist_ok=True)
684 with open(settings_path, 'w') as settingsfile:
685 settings.write(settingsfile)
686 print(f"Settings written to {settings_path}")
687
688def list_settings(all_settings):
689 for section, section_settings in all_settings.items():
690 for key, value in section_settings.items():
691 print("{} {} {}".format(section, key, value))
692
693def settings_func(top_dir, all_settings, args):
694 if args.subcommand == 'list':
695 list_settings(all_settings)
696 elif args.subcommand == 'set' or args.subcommand == 'unset':
697 change_setting(top_dir, args)
698
699def get_build_dir_via_bbpath():
700 bbpath = os.environ.get('BBPATH')
701 if bbpath:
702 bitbake_dir = os.path.normpath(bbpath.split(':')[0])
703 if os.path.exists(os.path.join(bitbake_dir,'init-build-env')):
704 build_dir = os.path.dirname(bitbake_dir)
705 return build_dir
706 return None
707
708def get_top_dir(args, settings):
709 build_dir_via_bbpath = get_build_dir_via_bbpath()
710 if build_dir_via_bbpath:
711 top_dir = os.path.dirname(build_dir_via_bbpath)
712 if os.path.exists(default_settings_path(top_dir)):
713 return top_dir
714
715 if hasattr(args, 'build_dir'):
716 top_dir = os.path.dirname(os.path.normpath(args.build_dir))
717 return top_dir
718
719 top_dir_prefix = settings['default']['top-dir-prefix']
720 top_dir_name = settings['default']['top-dir-name']
721 return os.path.join(top_dir_prefix, top_dir_name)
722
723def merge_settings(builtin_settings, global_settings, local_settings, cmdline_settings):
724 all_settings = builtin_settings
725
726 for s in (global_settings, local_settings):
727 for section, section_settings in s.items():
728 for setting, value in section_settings.items():
729 all_settings[section][setting] = value
730
731 for (section, setting, value) in cmdline_settings:
732 all_settings[section][setting] = value
733
734 return all_settings
735
736def main():
737 def add_build_dir_arg(parser):
738 build_dir = get_build_dir_via_bbpath()
739 if build_dir:
740 parser.add_argument('--build-dir', default=build_dir, help="Path to the build, default is %(default)s via BBPATH")
741 else:
742 parser.add_argument('--build-dir', required=True, help="Path to the build")
743
744 parser = argparse.ArgumentParser(
745 description="BitBake setup utility. Run with 'init' argument to get started.",
746 epilog="Use %(prog)s <subcommand> --help to get help on a specific command"
747 )
748 parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true')
749 parser.add_argument('-q', '--quiet', help='Print only errors', action='store_true')
750 parser.add_argument('--color', choices=['auto', 'always', 'never'], default='auto', help='Colorize output (where %(metavar)s is %(choices)s)', metavar='COLOR')
751 parser.add_argument('--no-network', action='store_true', help='Do not check whether configuration repositories and layer repositories have been updated; use only the local cache.')
752 parser.add_argument('--global-settings', action='store', metavar='PATH', help='Path to the global settings file.')
753 parser.add_argument('--setting', default=[], action='append', dest='cmdline_settings',
754 nargs=3, metavar=('SECTION', 'SETTING', 'VALUE'),
755 help='Modify a setting (for this bitbake-setup invocation only), for example "--setting default top-dir-prefix /path/to/top/dir".')
756
757 subparsers = parser.add_subparsers()
758
759 parser_list = subparsers.add_parser('list', help='List available configurations')
760 parser_list.add_argument('--with-expired', action='store_true', help='List also configurations that are no longer supported due to reaching their end-of-life dates.')
761 parser_list.add_argument('--write-json', action='store', help='Write available configurations into a json file so they can be programmatically processed.')
762 parser_list.set_defaults(func=list_configs)
763
764 parser_init = subparsers.add_parser('init', help='Select a configuration and initialize a build from it')
765 parser_init.add_argument('config', nargs='*', help="path/URL/id to a configuration file (use 'list' command to get available ids), followed by configuration options. Bitbake-setup will ask to choose from available choices if command line doesn't completely specify them.")
766 parser_init.add_argument('--non-interactive', action='store_true', help='Do not ask to interactively choose from available options; if bitbake-setup cannot make a decision it will stop with a failure.')
767 parser_init.add_argument('--source-overrides', action='store', help='Override sources information (repositories/revisions) with values from a local json file.')
768 parser_init.add_argument('--build-dir-name', action='store', help='A custom build directory name under the top directory.')
769 parser_init.add_argument('--skip-selection', action='append', help='Do not select and set an option/fragment from available choices; the resulting bitbake configuration may be incomplete.')
770 parser_init.set_defaults(func=init_config)
771
772 parser_status = subparsers.add_parser('status', help='Check if the build needs to be synchronized with configuration')
773 add_build_dir_arg(parser_status)
774 parser_status.set_defaults(func=build_status)
775
776 parser_update = subparsers.add_parser('update', help='Update a build to be in sync with configuration')
777 add_build_dir_arg(parser_update)
778 parser_update.set_defaults(func=build_update)
779
780 parser_install_buildtools = subparsers.add_parser('install-buildtools', help='Install buildtools which can help fulfil missing or incorrect dependencies on the host machine')
781 add_build_dir_arg(parser_install_buildtools)
782 parser_install_buildtools.add_argument('--force', action='store_true', help='Force a reinstall of buildtools over the previous installation.')
783 parser_install_buildtools.set_defaults(func=install_buildtools)
784
785 parser_settings_arg_global = argparse.ArgumentParser(add_help=False)
786 parser_settings_arg_global.add_argument('--global', action='store_true', help="Modify the setting in a global settings file, rather than one specific to a top directory")
787
788 parser_settings = subparsers.add_parser('settings',
789 help='List current settings, or set or unset a setting in a settings file (e.g. the default prefix and name of the top directory, the location of build configuration registry, downloads directory and other settings specific to a top directory)')
790 parser_settings.set_defaults(func=settings_func)
791
792 subparser_settings = parser_settings.add_subparsers(dest="subcommand", required=True, help="The action to perform on the settings file")
793
794 parser_settings_list = subparser_settings.add_parser('list',
795 help="List all settings with their values")
796
797 parser_settings_set = subparser_settings.add_parser('set', parents=[parser_settings_arg_global],
798 help="In a Section, set a setting to a certain value")
799 parser_settings_set.add_argument("section", metavar="<section>", help="Section in a settings file, typically 'default'")
800 parser_settings_set.add_argument("setting", metavar="<setting>", help="Name of a setting")
801 parser_settings_set.add_argument("value", metavar="<value>", help="The setting value")
802
803 parser_settings_unset = subparser_settings.add_parser('unset', parents=[parser_settings_arg_global],
804 help="Unset a setting, e.g. 'bitbake-setup settings unset default registry' would revert to the registry setting in a global settings file")
805 parser_settings_unset.add_argument("section", metavar="<section>", help="Section in a settings file, typically 'default'")
806 parser_settings_unset.add_argument("setting", metavar="<setting>", help="The setting to remove")
807
808 args = parser.parse_args()
809
810 logging.basicConfig(stream=sys.stdout)
811 if args.debug:
812 logger.setLevel(logging.DEBUG)
813 elif args.quiet:
814 logger.setLevel(logging.ERROR)
815
816 # Need to re-run logger_create with color argument
817 # (will be the same logger since it has the same name)
818 bb.msg.logger_create('bitbake-setup', output=sys.stdout,
819 color=args.color,
820 level=logger.getEffectiveLevel())
821
822 if 'func' in args:
823 if hasattr(args, 'build_dir'):
824 if not os.path.exists(os.path.join(args.build_dir,'build', 'init-build-env')):
825 print("Not a valid build directory: build/init-build-env does not exist in {}".format(args.build_dir))
826 return
827
828 if not hasattr(args, 'non_interactive'):
829 args.non_interactive = True
830
831 builtin_settings = {}
832 builtin_settings['default'] = {
833 'top-dir-prefix':os.path.expanduser('~'),
834 'top-dir-name':'bitbake-builds',
835 'registry':default_registry,
836 }
837
838 global_settings = load_settings(global_settings_path(args))
839 top_dir = get_top_dir(args, merge_settings(builtin_settings, global_settings, {}, args.cmdline_settings))
840
841 # This cannot be set with the rest of the builtin settings as top_dir needs to be determined first
842 builtin_settings['default']['dl-dir'] = os.path.join(top_dir, '.bitbake-setup-downloads')
843
844 topdir_settings = load_settings(default_settings_path(top_dir))
845 all_settings = merge_settings(builtin_settings, global_settings, topdir_settings, args.cmdline_settings)
846
847 if args.func == settings_func:
848 settings_func(top_dir, all_settings, args)
849 return
850
851 print('Bitbake-setup is using {} as top directory ("bitbake-setup settings --help" shows how to change it).\n'.format(top_dir, global_settings_path(args)))
852
853 d = init_bb_cache(top_dir, all_settings, args)
854 args.func(top_dir, all_settings, args, d)
855 save_bb_cache()
856 else:
857 from argparse import Namespace
858 parser.print_help()
859
860main()
diff --git a/bitbake/bin/bitbake-worker b/bitbake/bin/bitbake-worker
deleted file mode 100755
index d2b146a6a9..0000000000
--- a/bitbake/bin/bitbake-worker
+++ /dev/null
@@ -1,590 +0,0 @@
1#!/usr/bin/env python3
2#
3# Copyright BitBake Contributors
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import os
9import sys
10import warnings
11warnings.simplefilter("default")
12warnings.filterwarnings("ignore", category=DeprecationWarning, message=".*use.of.fork.*may.lead.to.deadlocks.in.the.child.*")
13sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
14from bb import fetch2
15import logging
16import bb
17import select
18import errno
19import signal
20import pickle
21import traceback
22import queue
23import shlex
24import subprocess
25import fcntl
26from multiprocessing import Lock
27from threading import Thread
28
29# Remove when we have a minimum of python 3.10
30if not hasattr(fcntl, 'F_SETPIPE_SZ'):
31 fcntl.F_SETPIPE_SZ = 1031
32
33bb.utils.check_system_locale()
34
35# Users shouldn't be running this code directly
36if len(sys.argv) != 2 or not sys.argv[1].startswith("decafbad"):
37 print("bitbake-worker is meant for internal execution by bitbake itself, please don't use it standalone.")
38 sys.exit(1)
39
40profiling = False
41if sys.argv[1].startswith("decafbadbad"):
42 profiling = True
43 try:
44 import cProfile as profile
45 except:
46 import profile
47
48# Unbuffer stdout to avoid log truncation in the event
49# of an unorderly exit as well as to provide timely
50# updates to log files for use with tail
51try:
52 if sys.stdout.name == '<stdout>':
53 fl = fcntl.fcntl(sys.stdout.fileno(), fcntl.F_GETFL)
54 fl |= os.O_SYNC
55 fcntl.fcntl(sys.stdout.fileno(), fcntl.F_SETFL, fl)
56 #sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
57except:
58 pass
59
60logger = logging.getLogger("BitBake")
61
62worker_pipe = sys.stdout.fileno()
63bb.utils.nonblockingfd(worker_pipe)
64# Try to make the pipe buffers larger as it is much more efficient. If we can't
65# e.g. out of buffer space (/proc/sys/fs/pipe-user-pages-soft) then just pass over.
66try:
67 fcntl.fcntl(worker_pipe, fcntl.F_SETPIPE_SZ, 512 * 1024)
68except:
69 pass
70# Need to guard against multiprocessing being used in child processes
71# and multiple processes trying to write to the parent at the same time
72worker_pipe_lock = None
73
74handler = bb.event.LogHandler()
75logger.addHandler(handler)
76
77if 0:
78 # Code to write out a log file of all events passing through the worker
79 logfilename = "/tmp/workerlogfile"
80 format_str = "%(levelname)s: %(message)s"
81 conlogformat = bb.msg.BBLogFormatter(format_str)
82 consolelog = logging.FileHandler(logfilename)
83 consolelog.setFormatter(conlogformat)
84 logger.addHandler(consolelog)
85
86worker_queue = queue.Queue()
87
88def worker_fire(event, d):
89 data = b"<event>" + pickle.dumps(event) + b"</event>"
90 worker_fire_prepickled(data)
91
92def worker_fire_prepickled(event):
93 global worker_queue
94
95 worker_queue.put(event)
96
97#
98# We can end up with write contention with the cooker, it can be trying to send commands
99# and we can be trying to send event data back. Therefore use a separate thread for writing
100# back data to cooker.
101#
102worker_thread_exit = False
103
104def worker_flush(worker_queue):
105 worker_queue_int = bytearray()
106 global worker_pipe, worker_thread_exit
107
108 while True:
109 try:
110 worker_queue_int.extend(worker_queue.get(True, 1))
111 except queue.Empty:
112 pass
113 while (worker_queue_int or not worker_queue.empty()):
114 try:
115 (_, ready, _) = select.select([], [worker_pipe], [], 1)
116 if not worker_queue.empty():
117 worker_queue_int.extend(worker_queue.get())
118 written = os.write(worker_pipe, worker_queue_int)
119 del worker_queue_int[0:written]
120 except (IOError, OSError) as e:
121 if e.errno != errno.EAGAIN and e.errno != errno.EPIPE:
122 raise
123 if worker_thread_exit and worker_queue.empty() and not worker_queue_int:
124 return
125
126worker_thread = Thread(target=worker_flush, args=(worker_queue,))
127worker_thread.start()
128
129def worker_child_fire(event, d):
130 global worker_pipe
131 global worker_pipe_lock
132
133 data = b"<event>" + pickle.dumps(event) + b"</event>"
134 try:
135 with bb.utils.lock_timeout(worker_pipe_lock):
136 while(len(data)):
137 written = worker_pipe.write(data)
138 data = data[written:]
139 except IOError:
140 sigterm_handler(None, None)
141 raise
142
143bb.event.worker_fire = worker_fire
144
145lf = None
146#lf = open("/tmp/workercommandlog", "w+")
147def workerlog_write(msg):
148 if lf:
149 lf.write(msg)
150 lf.flush()
151
152def sigterm_handler(signum, frame):
153 signal.signal(signal.SIGTERM, signal.SIG_DFL)
154 os.killpg(0, signal.SIGTERM)
155 sys.exit()
156
157def fork_off_task(cfg, data, databuilder, workerdata, extraconfigdata, runtask):
158
159 fn = runtask['fn']
160 task = runtask['task']
161 taskname = runtask['taskname']
162 taskhash = runtask['taskhash']
163 unihash = runtask['unihash']
164 appends = runtask['appends']
165 layername = runtask['layername']
166 taskdepdata = runtask['taskdepdata']
167 quieterrors = runtask['quieterrors']
168 # We need to setup the environment BEFORE the fork, since
169 # a fork() or exec*() activates PSEUDO...
170
171 envbackup = {}
172 fakeroot = False
173 fakeenv = {}
174 umask = None
175
176 uid = os.getuid()
177 gid = os.getgid()
178
179 taskdep = runtask['taskdep']
180 if 'umask' in taskdep and taskname in taskdep['umask']:
181 umask = taskdep['umask'][taskname]
182 elif workerdata["umask"]:
183 umask = workerdata["umask"]
184 if umask:
185 # Convert to a python numeric value as it could be a string
186 umask = bb.utils.to_filemode(umask)
187
188 dry_run = cfg.dry_run or runtask['dry_run']
189
190 # We can't use the fakeroot environment in a dry run as it possibly hasn't been built
191 if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not dry_run:
192 fakeroot = True
193 envvars = (runtask['fakerootenv'] or "").split()
194 for key, value in (var.split('=',1) for var in envvars):
195 envbackup[key] = os.environ.get(key)
196 os.environ[key] = value
197 fakeenv[key] = value
198
199 fakedirs = (runtask['fakerootdirs'] or "").split()
200 for p in fakedirs:
201 bb.utils.mkdirhier(p)
202 logger.debug2('Running %s:%s under fakeroot, fakedirs: %s' %
203 (fn, taskname, ', '.join(fakedirs)))
204 else:
205 envvars = (runtask['fakerootnoenv'] or "").split()
206 for key, value in (var.split('=',1) for var in envvars):
207 envbackup[key] = os.environ.get(key)
208 os.environ[key] = value
209 fakeenv[key] = value
210
211 sys.stdout.flush()
212 sys.stderr.flush()
213
214 try:
215 pipein, pipeout = os.pipe()
216 pipein = os.fdopen(pipein, 'rb', 4096)
217 pipeout = os.fdopen(pipeout, 'wb', 0)
218 pid = os.fork()
219 except OSError as e:
220 logger.critical("fork failed: %d (%s)" % (e.errno, e.strerror))
221 sys.exit(1)
222
223 if pid == 0:
224 def child():
225 global worker_pipe
226 global worker_pipe_lock
227 pipein.close()
228
229 bb.utils.signal_on_parent_exit("SIGTERM")
230
231 # Save out the PID so that the event can include it the
232 # events
233 bb.event.worker_pid = os.getpid()
234 bb.event.worker_fire = worker_child_fire
235 worker_pipe = pipeout
236 worker_pipe_lock = Lock()
237
238 # Make the child the process group leader and ensure no
239 # child process will be controlled by the current terminal
240 # This ensures signals sent to the controlling terminal like Ctrl+C
241 # don't stop the child processes.
242 os.setsid()
243
244 signal.signal(signal.SIGTERM, sigterm_handler)
245 # Let SIGHUP exit as SIGTERM
246 signal.signal(signal.SIGHUP, sigterm_handler)
247
248 # No stdin & stdout
249 # stdout is used as a status report channel and must not be used by child processes.
250 dumbio = os.open(os.devnull, os.O_RDWR)
251 os.dup2(dumbio, sys.stdin.fileno())
252 os.dup2(dumbio, sys.stdout.fileno())
253
254 if umask is not None:
255 os.umask(umask)
256
257 try:
258 (realfn, virtual, mc) = bb.cache.virtualfn2realfn(fn)
259 the_data = databuilder.mcdata[mc]
260 the_data.setVar("BB_WORKERCONTEXT", "1")
261 the_data.setVar("BB_TASKDEPDATA", taskdepdata)
262 the_data.setVar('BB_CURRENTTASK', taskname.replace("do_", ""))
263 if cfg.limited_deps:
264 the_data.setVar("BB_LIMITEDDEPS", "1")
265 the_data.setVar("BUILDNAME", workerdata["buildname"])
266 the_data.setVar("DATE", workerdata["date"])
267 the_data.setVar("TIME", workerdata["time"])
268 for varname, value in extraconfigdata.items():
269 the_data.setVar(varname, value)
270
271 bb.parse.siggen.set_taskdata(workerdata["sigdata"])
272 if "newhashes" in workerdata:
273 bb.parse.siggen.set_taskhashes(workerdata["newhashes"])
274 ret = 0
275
276 the_data = databuilder.parseRecipe(fn, appends, layername)
277 the_data.setVar('BB_TASKHASH', taskhash)
278 the_data.setVar('BB_UNIHASH', unihash)
279 bb.parse.siggen.setup_datacache_from_datastore(fn, the_data)
280
281 bb.utils.set_process_name("%s:%s" % (the_data.getVar("PN"), taskname.replace("do_", "")))
282
283 if not bb.utils.to_boolean(the_data.getVarFlag(taskname, 'network')):
284 if bb.utils.is_local_uid(uid):
285 logger.debug("Attempting to disable network for %s" % taskname)
286 bb.utils.disable_network(uid, gid)
287 else:
288 logger.debug("Skipping disable network for %s since %s is not a local uid." % (taskname, uid))
289
290 # exported_vars() returns a generator which *cannot* be passed to os.environ.update()
291 # successfully. We also need to unset anything from the environment which shouldn't be there
292 exports = bb.data.exported_vars(the_data)
293
294 bb.utils.empty_environment()
295 for e, v in exports:
296 os.environ[e] = v
297
298 for e in fakeenv:
299 os.environ[e] = fakeenv[e]
300 the_data.setVar(e, fakeenv[e])
301 the_data.setVarFlag(e, 'export', "1")
302
303 task_exports = the_data.getVarFlag(taskname, 'exports')
304 if task_exports:
305 for e in task_exports.split():
306 the_data.setVarFlag(e, 'export', '1')
307 v = the_data.getVar(e)
308 if v is not None:
309 os.environ[e] = v
310
311 if quieterrors:
312 the_data.setVarFlag(taskname, "quieterrors", "1")
313
314 except Exception:
315 if not quieterrors:
316 logger.critical(traceback.format_exc())
317 os._exit(1)
318
319 sys.stdout.flush()
320 sys.stderr.flush()
321
322 try:
323 if dry_run:
324 return 0
325 try:
326 ret = bb.build.exec_task(fn, taskname, the_data, cfg.profile)
327 finally:
328 if fakeroot:
329 fakerootcmd = shlex.split(the_data.getVar("FAKEROOTCMD"))
330 subprocess.run(fakerootcmd + ['-S'], check=True, stdout=subprocess.PIPE)
331 return ret
332 except:
333 os._exit(1)
334 if not profiling:
335 os._exit(child())
336 else:
337 profname = "profile-%s.log" % (fn.replace("/", "-") + "-" + taskname)
338 prof = profile.Profile()
339 try:
340 ret = profile.Profile.runcall(prof, child)
341 finally:
342 prof.dump_stats(profname)
343 bb.utils.process_profilelog(profname)
344 os._exit(ret)
345 else:
346 for key, value in iter(envbackup.items()):
347 if value is None:
348 del os.environ[key]
349 else:
350 os.environ[key] = value
351
352 return pid, pipein, pipeout
353
354class runQueueWorkerPipe():
355 """
356 Abstraction for a pipe between a worker thread and the worker server
357 """
358 def __init__(self, pipein, pipeout):
359 self.input = pipein
360 if pipeout:
361 pipeout.close()
362 bb.utils.nonblockingfd(self.input)
363 self.queue = bytearray()
364
365 def read(self):
366 start = len(self.queue)
367 try:
368 self.queue.extend(self.input.read(512*1024) or b"")
369 except (OSError, IOError) as e:
370 if e.errno != errno.EAGAIN:
371 raise
372
373 end = len(self.queue)
374 index = self.queue.find(b"</event>")
375 while index != -1:
376 msg = self.queue[:index+8]
377 assert msg.startswith(b"<event>") and msg.count(b"<event>") == 1
378 worker_fire_prepickled(msg)
379 self.queue = self.queue[index+8:]
380 index = self.queue.find(b"</event>")
381 return (end > start)
382
383 def close(self):
384 while self.read():
385 continue
386 if len(self.queue) > 0:
387 print("Warning, worker child left partial message: %s" % self.queue)
388 self.input.close()
389
390normalexit = False
391
392class BitbakeWorker(object):
393 def __init__(self, din):
394 self.input = din
395 bb.utils.nonblockingfd(self.input)
396 self.queue = bytearray()
397 self.cookercfg = None
398 self.databuilder = None
399 self.data = None
400 self.extraconfigdata = None
401 self.build_pids = {}
402 self.build_pipes = {}
403
404 signal.signal(signal.SIGTERM, self.sigterm_exception)
405 # Let SIGHUP exit as SIGTERM
406 signal.signal(signal.SIGHUP, self.sigterm_exception)
407 if "beef" in sys.argv[1]:
408 bb.utils.set_process_name("Worker (Fakeroot)")
409 else:
410 bb.utils.set_process_name("Worker")
411
412 def sigterm_exception(self, signum, stackframe):
413 if signum == signal.SIGTERM:
414 bb.warn("Worker received SIGTERM, shutting down...")
415 elif signum == signal.SIGHUP:
416 bb.warn("Worker received SIGHUP, shutting down...")
417 self.handle_finishnow(None)
418 signal.signal(signal.SIGTERM, signal.SIG_DFL)
419 os.kill(os.getpid(), signal.SIGTERM)
420
421 def serve(self):
422 while True:
423 (ready, _, _) = select.select([self.input] + [i.input for i in self.build_pipes.values()], [] , [], 1)
424 if self.input in ready:
425 try:
426 r = self.input.read()
427 if len(r) == 0:
428 # EOF on pipe, server must have terminated
429 self.sigterm_exception(signal.SIGTERM, None)
430 self.queue.extend(r)
431 except (OSError, IOError):
432 pass
433 if len(self.queue):
434 self.handle_item(b"cookerconfig", self.handle_cookercfg)
435 self.handle_item(b"extraconfigdata", self.handle_extraconfigdata)
436 self.handle_item(b"workerdata", self.handle_workerdata)
437 self.handle_item(b"newtaskhashes", self.handle_newtaskhashes)
438 self.handle_item(b"runtask", self.handle_runtask)
439 self.handle_item(b"finishnow", self.handle_finishnow)
440 self.handle_item(b"ping", self.handle_ping)
441 self.handle_item(b"quit", self.handle_quit)
442
443 for pipe in self.build_pipes:
444 if self.build_pipes[pipe].input in ready:
445 self.build_pipes[pipe].read()
446 if len(self.build_pids):
447 while self.process_waitpid():
448 continue
449
450 def handle_item(self, item, func):
451 opening_tag = b"<" + item + b">"
452 if not self.queue.startswith(opening_tag):
453 return
454
455 tag_len = len(opening_tag)
456 if len(self.queue) < tag_len + 4:
457 # we need to receive more data
458 return
459 header = self.queue[tag_len:tag_len + 4]
460 payload_len = int.from_bytes(header, 'big')
461 # closing tag has length (tag_len + 1)
462 if len(self.queue) < tag_len * 2 + 1 + payload_len:
463 # we need to receive more data
464 return
465
466 index = self.queue.find(b"</" + item + b">")
467 if index != -1:
468 try:
469 func(self.queue[(tag_len + 4):index])
470 except pickle.UnpicklingError:
471 workerlog_write("Unable to unpickle data: %s\n" % ":".join("{:02x}".format(c) for c in self.queue))
472 raise
473 self.queue = self.queue[(index + len(b"</") + len(item) + len(b">")):]
474
475 def handle_cookercfg(self, data):
476 self.cookercfg = pickle.loads(data)
477 self.databuilder = bb.cookerdata.CookerDataBuilder(self.cookercfg, worker=True)
478 self.databuilder.parseBaseConfiguration(worker=True)
479 self.data = self.databuilder.data
480
481 def handle_extraconfigdata(self, data):
482 self.extraconfigdata = pickle.loads(data)
483
484 def handle_workerdata(self, data):
485 self.workerdata = pickle.loads(data)
486 bb.build.verboseShellLogging = self.workerdata["build_verbose_shell"]
487 bb.build.verboseStdoutLogging = self.workerdata["build_verbose_stdout"]
488 bb.msg.loggerDefaultLogLevel = self.workerdata["logdefaultlevel"]
489 bb.msg.loggerDefaultDomains = self.workerdata["logdefaultdomain"]
490 for mc in self.databuilder.mcdata:
491 self.databuilder.mcdata[mc].setVar("PRSERV_HOST", self.workerdata["prhost"])
492 self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.workerdata["hashservaddr"])
493 self.databuilder.mcdata[mc].setVar("__bbclasstype", "recipe")
494
495 def handle_newtaskhashes(self, data):
496 self.workerdata["newhashes"] = pickle.loads(data)
497
498 def handle_ping(self, _):
499 workerlog_write("Handling ping\n")
500
501 logger.warning("Pong from bitbake-worker!")
502
503 def handle_quit(self, data):
504 workerlog_write("Handling quit\n")
505
506 global normalexit
507 normalexit = True
508 sys.exit(0)
509
510 def handle_runtask(self, data):
511 runtask = pickle.loads(data)
512
513 fn = runtask['fn']
514 task = runtask['task']
515 taskname = runtask['taskname']
516
517 workerlog_write("Handling runtask %s %s %s\n" % (task, fn, taskname))
518
519 pid, pipein, pipeout = fork_off_task(self.cookercfg, self.data, self.databuilder, self.workerdata, self.extraconfigdata, runtask)
520 self.build_pids[pid] = task
521 self.build_pipes[pid] = runQueueWorkerPipe(pipein, pipeout)
522
523 def process_waitpid(self):
524 """
525 Return none is there are no processes awaiting result collection, otherwise
526 collect the process exit codes and close the information pipe.
527 """
528 try:
529 pid, status = os.waitpid(-1, os.WNOHANG)
530 if pid == 0 or os.WIFSTOPPED(status):
531 return False
532 except OSError:
533 return False
534
535 workerlog_write("Exit code of %s for pid %s\n" % (status, pid))
536
537 if os.WIFEXITED(status):
538 status = os.WEXITSTATUS(status)
539 elif os.WIFSIGNALED(status):
540 # Per shell conventions for $?, when a process exits due to
541 # a signal, we return an exit code of 128 + SIGNUM
542 status = 128 + os.WTERMSIG(status)
543
544 task = self.build_pids[pid]
545 del self.build_pids[pid]
546
547 self.build_pipes[pid].close()
548 del self.build_pipes[pid]
549
550 worker_fire_prepickled(b"<exitcode>" + pickle.dumps((task, status)) + b"</exitcode>")
551
552 return True
553
554 def handle_finishnow(self, _):
555 if self.build_pids:
556 logger.info("Sending SIGTERM to remaining %s tasks", len(self.build_pids))
557 for k, v in iter(self.build_pids.items()):
558 try:
559 os.kill(-k, signal.SIGTERM)
560 os.waitpid(-1, 0)
561 except:
562 pass
563 for pipe in self.build_pipes:
564 self.build_pipes[pipe].read()
565
566try:
567 worker = BitbakeWorker(os.fdopen(sys.stdin.fileno(), 'rb'))
568 if not profiling:
569 worker.serve()
570 else:
571 profname = "profile-worker.log"
572 prof = profile.Profile()
573 try:
574 profile.Profile.runcall(prof, worker.serve)
575 finally:
576 prof.dump_stats(profname)
577 bb.utils.process_profilelog(profname)
578except BaseException as e:
579 if not normalexit:
580 import traceback
581 sys.stderr.write(traceback.format_exc())
582 sys.stderr.write(str(e))
583finally:
584 worker_thread_exit = True
585 worker_thread.join()
586
587workerlog_write("exiting")
588if not normalexit:
589 sys.exit(1)
590sys.exit(0)
diff --git a/bitbake/bin/git-make-shallow b/bitbake/bin/git-make-shallow
deleted file mode 100755
index e6c180b4d6..0000000000
--- a/bitbake/bin/git-make-shallow
+++ /dev/null
@@ -1,175 +0,0 @@
1#!/usr/bin/env python3
2#
3# Copyright BitBake Contributors
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8"""git-make-shallow: make the current git repository shallow
9
10Remove the history of the specified revisions, then optionally filter the
11available refs to those specified.
12"""
13
14import argparse
15import collections
16import errno
17import itertools
18import os
19import subprocess
20import sys
21import warnings
22warnings.simplefilter("default")
23
24version = 1.0
25
26
27git_cmd = ['git', '-c', 'safe.bareRepository=all']
28
29def main():
30 if sys.version_info < (3, 4, 0):
31 sys.exit('Python 3.4 or greater is required')
32
33 git_dir = check_output(git_cmd + ['rev-parse', '--git-dir']).rstrip()
34 shallow_file = os.path.join(git_dir, 'shallow')
35 if os.path.exists(shallow_file):
36 try:
37 check_output(git_cmd + ['fetch', '--unshallow'])
38 except subprocess.CalledProcessError:
39 try:
40 os.unlink(shallow_file)
41 except OSError as exc:
42 if exc.errno != errno.ENOENT:
43 raise
44
45 args = process_args()
46 revs = check_output(git_cmd + ['rev-list'] + args.revisions).splitlines()
47
48 make_shallow(shallow_file, args.revisions, args.refs)
49
50 ref_revs = check_output(git_cmd + ['rev-list'] + args.refs).splitlines()
51 remaining_history = set(revs) & set(ref_revs)
52 for rev in remaining_history:
53 if check_output(git_cmd + ['rev-parse', '{}^@'.format(rev)]):
54 sys.exit('Error: %s was not made shallow' % rev)
55
56 filter_refs(args.refs)
57
58 if args.shrink:
59 shrink_repo(git_dir)
60 subprocess.check_call(git_cmd + ['fsck', '--unreachable'])
61
62
63def process_args():
64 # TODO: add argument to automatically keep local-only refs, since they
65 # can't be easily restored with a git fetch.
66 parser = argparse.ArgumentParser(description='Remove the history of the specified revisions, then optionally filter the available refs to those specified.')
67 parser.add_argument('--ref', '-r', metavar='REF', action='append', dest='refs', help='remove all but the specified refs (cumulative)')
68 parser.add_argument('--shrink', '-s', action='store_true', help='shrink the git repository by repacking and pruning')
69 parser.add_argument('revisions', metavar='REVISION', nargs='+', help='a git revision/commit')
70 if len(sys.argv) < 2:
71 parser.print_help()
72 sys.exit(2)
73
74 args = parser.parse_args()
75
76 if args.refs:
77 args.refs = check_output(git_cmd + ['rev-parse', '--symbolic-full-name'] + args.refs).splitlines()
78 else:
79 args.refs = get_all_refs(lambda r, t, tt: t == 'commit' or tt == 'commit')
80
81 args.refs = list(filter(lambda r: not r.endswith('/HEAD'), args.refs))
82 args.revisions = check_output(git_cmd + ['rev-parse'] + ['%s^{}' % i for i in args.revisions]).splitlines()
83 return args
84
85
86def check_output(cmd, input=None):
87 return subprocess.check_output(cmd, universal_newlines=True, input=input)
88
89
90def make_shallow(shallow_file, revisions, refs):
91 """Remove the history of the specified revisions."""
92 for rev in follow_history_intersections(revisions, refs):
93 print("Processing %s" % rev)
94 with open(shallow_file, 'a') as f:
95 f.write(rev + '\n')
96
97
98def get_all_refs(ref_filter=None):
99 """Return all the existing refs in this repository, optionally filtering the refs."""
100 ref_output = check_output(git_cmd + ['for-each-ref', '--format=%(refname)\t%(objecttype)\t%(*objecttype)'])
101 ref_split = [tuple(iter_extend(l.rsplit('\t'), 3)) for l in ref_output.splitlines()]
102 if ref_filter:
103 ref_split = (e for e in ref_split if ref_filter(*e))
104 refs = [r[0] for r in ref_split]
105 return refs
106
107
108def iter_extend(iterable, length, obj=None):
109 """Ensure that iterable is the specified length by extending with obj."""
110 return itertools.islice(itertools.chain(iterable, itertools.repeat(obj)), length)
111
112
113def filter_refs(refs):
114 """Remove all but the specified refs from the git repository."""
115 all_refs = get_all_refs()
116 to_remove = set(all_refs) - set(refs)
117 if to_remove:
118 check_output(git_cmd + ['update-ref', '--no-deref', '--stdin', '-z'],
119 input=''.join('delete ' + l + '\0\0' for l in to_remove))
120
121
122def follow_history_intersections(revisions, refs):
123 """Determine all the points where the history of the specified revisions intersects the specified refs."""
124 queue = collections.deque(revisions)
125 seen = set()
126
127 for rev in iter_except(queue.popleft, IndexError):
128 if rev in seen:
129 continue
130
131 parents = check_output(git_cmd + ['rev-parse', '%s^@' % rev]).splitlines()
132
133 yield rev
134 seen.add(rev)
135
136 if not parents:
137 continue
138
139 check_refs = check_output(git_cmd + ['merge-base', '--independent'] + sorted(refs)).splitlines()
140 for parent in parents:
141 for ref in check_refs:
142 print("Checking %s vs %s" % (parent, ref))
143 try:
144 merge_base = check_output(git_cmd + ['merge-base', parent, ref]).rstrip()
145 except subprocess.CalledProcessError:
146 continue
147 else:
148 queue.append(merge_base)
149
150
151def iter_except(func, exception, start=None):
152 """Yield a function repeatedly until it raises an exception."""
153 try:
154 if start is not None:
155 yield start()
156 while True:
157 yield func()
158 except exception:
159 pass
160
161
162def shrink_repo(git_dir):
163 """Shrink the newly shallow repository, removing the unreachable objects."""
164 subprocess.check_call(git_cmd + ['reflog', 'expire', '--expire-unreachable=now', '--all'])
165 subprocess.check_call(git_cmd + ['repack', '-ad'])
166 try:
167 os.unlink(os.path.join(git_dir, 'objects', 'info', 'alternates'))
168 except OSError as exc:
169 if exc.errno != errno.ENOENT:
170 raise
171 subprocess.check_call(git_cmd + ['prune', '--expire', 'now'])
172
173
174if __name__ == '__main__':
175 main()
diff --git a/bitbake/bin/toaster b/bitbake/bin/toaster
deleted file mode 100755
index f002c8c159..0000000000
--- a/bitbake/bin/toaster
+++ /dev/null
@@ -1,332 +0,0 @@
1#!/bin/echo ERROR: This script needs to be sourced. Please run as .
2
3# toaster - shell script to start Toaster
4
5# Copyright (C) 2013-2015 Intel Corp.
6#
7# SPDX-License-Identifier: GPL-2.0-or-later
8#
9
10HELP="
11Usage 1: source toaster start|stop [webport=<address:port>] [noweb] [nobuild] [toasterdir]
12 Optional arguments:
13 [nobuild] Setup the environment for capturing builds with toaster but disable managed builds
14 [noweb] Setup the environment for capturing builds with toaster but don't start the web server
15 [webport] Set the development server (default: localhost:8000)
16 [toasterdir] Set absolute path to be used as TOASTER_DIR (default: BUILDDIR/../)
17Usage 2: source toaster manage [createsuperuser|lsupdates|migrate|makemigrations|checksettings|collectstatic|...]
18"
19
20custom_extention()
21{
22 custom_extension=$BBBASEDIR/lib/toaster/orm/fixtures/custom_toaster_append.sh
23 if [ -f $custom_extension ] ; then
24 $custom_extension $*
25 fi
26}
27
28databaseCheck()
29{
30 retval=0
31 # you can always add a superuser later via
32 # ../bitbake/lib/toaster/manage.py createsuperuser --username=<ME>
33 $MANAGE migrate --noinput || retval=1
34
35 if [ $retval -eq 1 ]; then
36 echo "Failed migrations, halting system start" 1>&2
37 return $retval
38 fi
39 # Make sure that checksettings can pick up any value for TEMPLATECONF
40 export TEMPLATECONF
41 $MANAGE checksettings --traceback || retval=1
42
43 if [ $retval -eq 1 ]; then
44 printf "\nError while checking settings; exiting\n"
45 return $retval
46 fi
47
48 return $retval
49}
50
51webserverKillAll()
52{
53 local pidfile
54 if [ -f ${BUILDDIR}/.toastermain.pid ] ; then
55 custom_extention web_stop_postpend
56 else
57 custom_extention noweb_stop_postpend
58 fi
59 for pidfile in ${BUILDDIR}/.toastermain.pid ${BUILDDIR}/.runbuilds.pid; do
60 if [ -f ${pidfile} ]; then
61 pid=`cat ${pidfile}`
62 while kill -0 $pid 2>/dev/null; do
63 kill -SIGTERM $pid 2>/dev/null
64 sleep 1
65 done
66 rm ${pidfile}
67 fi
68 done
69}
70
71webserverStartAll()
72{
73 # do not start if toastermain points to a valid process
74 if ! cat "${BUILDDIR}/.toastermain.pid" 2>/dev/null | xargs -I{} kill -0 {} ; then
75 retval=1
76 rm "${BUILDDIR}/.toastermain.pid"
77 fi
78
79 retval=0
80
81 # check the database
82 databaseCheck || return 1
83
84 echo "Starting webserver..."
85
86 $MANAGE runserver --noreload "$ADDR_PORT" \
87 </dev/null >>${TOASTER_LOGS_DIR}/web.log 2>&1 \
88 & echo $! >${BUILDDIR}/.toastermain.pid
89
90 sleep 1
91
92 if ! cat "${BUILDDIR}/.toastermain.pid" | xargs -I{} kill -0 {} ; then
93 retval=1
94 rm "${BUILDDIR}/.toastermain.pid"
95 else
96 echo "Toaster development webserver started at http://$ADDR_PORT"
97 echo -e "\nYou can now run 'bitbake <target>' on the command line and monitor your build in Toaster.\nYou can also use a Toaster project to configure and run a build.\n"
98 custom_extention web_start_postpend $ADDR_PORT
99 fi
100
101 return $retval
102}
103
104INSTOPSYSTEM=0
105
106# define the stop command
107stop_system()
108{
109 # prevent reentry
110 if [ $INSTOPSYSTEM -eq 1 ]; then return; fi
111 INSTOPSYSTEM=1
112 webserverKillAll
113 # unset exported variables
114 unset TOASTER_DIR
115 unset BITBAKE_UI
116 unset BBBASEDIR
117 trap - SIGHUP
118 #trap - SIGCHLD
119 INSTOPSYSTEM=0
120}
121
122verify_prereq() {
123 # Verify Django version
124 reqfile=$(python3 -c "import os; print(os.path.realpath('$BBBASEDIR/toaster-requirements.txt'))")
125 exp='s/Django\([><=]\+\)\([^,]\+\),\([><=]\+\)\(.\+\)/'
126 # expand version parts to 2 digits to support 1.10.x > 1.8
127 # (note:helper functions hard to insert in-line)
128 exp=$exp'import sys,django;'
129 exp=$exp'version=["%02d" % int(n) for n in django.get_version().split(".")];'
130 exp=$exp'vmin=["%02d" % int(n) for n in "\2".split(".")];'
131 exp=$exp'vmax=["%02d" % int(n) for n in "\4".split(".")];'
132 exp=$exp'sys.exit(not (version \1 vmin and version \3 vmax))'
133 exp=$exp'/p'
134 if ! sed -n "$exp" $reqfile | python3 - ; then
135 req=`grep ^Django $reqfile`
136 echo "This program needs $req"
137 echo "Please install with pip3 install -r $reqfile"
138 return 2
139 fi
140
141 return 0
142}
143
144# read command line parameters
145if [ -n "$BASH_SOURCE" ] ; then
146 TOASTER=${BASH_SOURCE}
147elif [ -n "$ZSH_NAME" ] ; then
148 TOASTER=${(%):-%x}
149else
150 TOASTER=$0
151fi
152
153export BBBASEDIR=`dirname $TOASTER`/..
154MANAGE="python3 $BBBASEDIR/lib/toaster/manage.py"
155if [ -z "$OE_ROOT" ]; then
156 OE_ROOT=`dirname $TOASTER`/../..
157fi
158
159# this is the configuraton file we are using for toaster
160# we are using the same logic that oe-setup-builddir uses
161# (based on TEMPLATECONF and .templateconf) to determine
162# which toasterconf.json to use.
163# note: There are a number of relative path assumptions
164# in the local layers that currently make using an arbitrary
165# toasterconf.json difficult.
166
167. $OE_ROOT/.templateconf
168if [ -n "$TEMPLATECONF" ]; then
169 if [ ! -d "$TEMPLATECONF" ]; then
170 # Allow TEMPLATECONF=meta-xyz/conf as a shortcut
171 if [ -d "$OE_ROOT/$TEMPLATECONF" ]; then
172 TEMPLATECONF="$OE_ROOT/$TEMPLATECONF"
173 fi
174 fi
175fi
176
177unset OE_ROOT
178
179
180WEBSERVER=1
181export TOASTER_BUILDSERVER=1
182ADDR_PORT="localhost:8000"
183TOASTERDIR=`dirname $BUILDDIR`
184# ${BUILDDIR}/toaster_logs/ became the default location for toaster logs
185# This is needed for implemented django-log-viewer: https://pypi.org/project/django-log-viewer/
186# If the directory does not exist, create it.
187TOASTER_LOGS_DIR="${BUILDDIR}/toaster_logs/"
188if [ ! -d $TOASTER_LOGS_DIR ]
189then
190 mkdir $TOASTER_LOGS_DIR
191fi
192unset CMD
193for param in $*; do
194 case $param in
195 noweb )
196 WEBSERVER=0
197 ;;
198 nobuild )
199 TOASTER_BUILDSERVER=0
200 ;;
201 start )
202 CMD=$param
203 ;;
204 stop )
205 CMD=$param
206 ;;
207 webport=*)
208 ADDR_PORT="${param#*=}"
209 # Split the addr:port string
210 ADDR=`echo $ADDR_PORT | cut -f 1 -d ':'`
211 PORT=`echo $ADDR_PORT | cut -f 2 -d ':'`
212 # If only a port has been speified then set address to localhost.
213 if [ $ADDR = $PORT ] ; then
214 ADDR_PORT="localhost:$PORT"
215 fi
216 ;;
217 toasterdir=*)
218 TOASTERDIR="${param#*=}"
219 ;;
220 manage )
221 CMD=$param
222 manage_cmd=""
223 ;;
224 --help)
225 echo "$HELP"
226 return 0
227 ;;
228 *)
229 if [ "manage" == "$CMD" ] ; then
230 manage_cmd="$manage_cmd $param"
231 else
232 echo "$HELP"
233 exit 1
234 fi
235 ;;
236
237 esac
238done
239
240if [ `basename \"$0\"` = `basename \"${TOASTER}\"` ]; then
241 echo "Error: This script needs to be sourced. Please run as . $TOASTER"
242 return 1
243fi
244
245verify_prereq || return 1
246
247# We make sure we're running in the current shell and in a good environment
248if [ -z "$BUILDDIR" ] || ! which bitbake >/dev/null 2>&1 ; then
249 echo "Error: Build environment is not setup or bitbake is not in path." 1>&2
250 return 2
251fi
252
253# this defines the dir toaster will use for
254# 1) clones of layers (in _toaster_clones )
255# 2) the build dir (in build)
256# 3) the sqlite db if that is being used.
257# 4) pid's we need to clean up on exit/shutdown
258export TOASTER_DIR=$TOASTERDIR
259export BB_ENV_PASSTHROUGH_ADDITIONS="$BB_ENV_PASSTHROUGH_ADDITIONS TOASTER_DIR"
260
261# Determine the action. If specified by arguments, fine, if not, toggle it
262if [ "$CMD" = "start" ] ; then
263 if [ -n "$BBSERVER" ]; then
264 echo " Toaster is already running. Exiting..."
265 return 1
266fi
267elif [ "$CMD" = "" ]; then
268 echo "No command specified"
269 echo "$HELP"
270 return 1
271fi
272
273echo "The system will $CMD."
274
275# Execute the commands
276custom_extention toaster_prepend $CMD $ADDR_PORT
277
278case $CMD in
279 start )
280 # check if addr:port is not in use
281 if [ "$CMD" == 'start' ]; then
282 if [ $WEBSERVER -gt 0 ]; then
283 $MANAGE checksocket "$ADDR_PORT" || return 1
284 fi
285 fi
286
287 # Create configuration file
288 conf=${BUILDDIR}/conf/local.conf
289 line='INHERIT+="toaster buildhistory"'
290 grep -q "$line" $conf || echo $line >> $conf
291
292 if [ $WEBSERVER -eq 0 ] ; then
293 # Do not update the database for "noweb" unless
294 # it does not yet exist
295 if [ ! -f "$TOASTER_DIR/toaster.sqlite" ] ; then
296 if ! databaseCheck; then
297 echo "Failed ${CMD}."
298 return 4
299 fi
300 fi
301 custom_extention noweb_start_postpend $ADDR_PORT
302 fi
303 if [ $WEBSERVER -gt 0 ] && ! webserverStartAll; then
304 echo "Failed ${CMD}."
305 return 4
306 fi
307 export BITBAKE_UI='toasterui'
308 if [ $TOASTER_BUILDSERVER -eq 1 ] ; then
309 $MANAGE runbuilds \
310 </dev/null >>${TOASTER_LOGS_DIR}/toaster_runbuilds.log 2>&1 \
311 & echo $! >${BUILDDIR}/.runbuilds.pid
312 else
313 echo "Toaster build server not started."
314 fi
315
316 # set fail safe stop system on terminal exit
317 trap stop_system SIGHUP
318 echo "Successful ${CMD}."
319 custom_extention toaster_postpend $CMD $ADDR_PORT
320 return 0
321 ;;
322 stop )
323 stop_system
324 echo "Successful ${CMD}."
325 ;;
326 manage )
327 cd $BBBASEDIR/lib/toaster
328 $MANAGE $manage_cmd
329 ;;
330esac
331custom_extention toaster_postpend $CMD $ADDR_PORT
332
diff --git a/bitbake/bin/toaster-eventreplay b/bitbake/bin/toaster-eventreplay
deleted file mode 100755
index 74a319320e..0000000000
--- a/bitbake/bin/toaster-eventreplay
+++ /dev/null
@@ -1,59 +0,0 @@
1#!/usr/bin/env python3
2#
3# Copyright (C) 2014 Alex Damian
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7# This file re-uses code spread throughout other Bitbake source files.
8# As such, all other copyrights belong to their own right holders.
9#
10
11"""
12This command takes a filename as a single parameter. The filename is read
13as a build eventlog, and the ToasterUI is used to process events in the file
14and log data in the database
15"""
16
17import os
18import sys
19import json
20import pickle
21import codecs
22import warnings
23warnings.simplefilter("default")
24
25from collections import namedtuple
26
27# mangle syspath to allow easy import of modules
28from os.path import join, dirname, abspath
29sys.path.insert(0, join(dirname(dirname(abspath(__file__))), 'lib'))
30
31import bb.cooker
32from bb.ui import toasterui
33from bb.ui import eventreplay
34
35def main(argv):
36 with open(argv[-1]) as eventfile:
37 # load variables from the first line
38 variables = None
39 while line := eventfile.readline().strip():
40 try:
41 variables = json.loads(line)['allvariables']
42 break
43 except (KeyError, json.JSONDecodeError):
44 continue
45 if not variables:
46 sys.exit("Cannot find allvariables entry in event log file %s" % argv[-1])
47 eventfile.seek(0)
48 params = namedtuple('ConfigParams', ['observe_only'])(True)
49 player = eventreplay.EventPlayer(eventfile, variables)
50
51 return toasterui.main(player, player, params)
52
53# run toaster ui on our mock bitbake class
54if __name__ == "__main__":
55 if len(sys.argv) != 2:
56 print("Usage: %s <event file>" % os.path.basename(sys.argv[0]))
57 sys.exit(1)
58
59 sys.exit(main(sys.argv))