diff options
Diffstat (limited to 'meta/classes')
-rw-r--r-- | meta/classes/icecc.bbclass | 443 | ||||
-rw-r--r-- | meta/classes/multilib.bbclass | 1 |
2 files changed, 1 insertions, 443 deletions
diff --git a/meta/classes/icecc.bbclass b/meta/classes/icecc.bbclass deleted file mode 100644 index 8a48f2ad63..0000000000 --- a/meta/classes/icecc.bbclass +++ /dev/null | |||
@@ -1,443 +0,0 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: MIT | ||
5 | # | ||
6 | |||
7 | # Icecream distributed compiling support | ||
8 | # | ||
9 | # Stages directories with symlinks from gcc/g++ to icecc, for both | ||
10 | # native and cross compilers. Depending on each configure or compile, | ||
11 | # the directories are added at the head of the PATH list and ICECC_CXX | ||
12 | # and ICECC_CC are set. | ||
13 | # | ||
14 | # For the cross compiler, creates a tar.gz of our toolchain and sets | ||
15 | # ICECC_VERSION accordingly. | ||
16 | # | ||
17 | # The class now handles all 3 different compile 'stages' (i.e native ,cross-kernel and target) creating the | ||
18 | # necessary environment tar.gz file to be used by the remote machines. | ||
19 | # It also supports meta-toolchain generation. | ||
20 | # | ||
21 | # If ICECC_PATH is not set in local.conf then the class will try to locate it using 'bb.utils.which' | ||
22 | # but nothing is sure. ;) | ||
23 | # | ||
24 | # If ICECC_ENV_EXEC is set in local.conf, then it should point to the icecc-create-env script provided by the user | ||
25 | # or the default one provided by icecc-create-env_0.1.bb will be used. | ||
26 | # (NOTE that this is a modified version of the needed script and *not the one that comes with icecream*). | ||
27 | # | ||
28 | # User can specify if specific recipes or recipes inheriting specific classes should not use icecc to distribute | ||
29 | # compile jobs to remote machines, but handle them locally by defining ICECC_CLASS_DISABLE and ICECC_RECIPE_DISABLE | ||
30 | # with the appropriate values in local.conf. In addition the user can force to enable icecc for recipes | ||
31 | # which set an empty PARALLEL_MAKE variable by defining ICECC_RECIPE_ENABLE. | ||
32 | # | ||
33 | ######################################################################################### | ||
34 | # Error checking is kept to minimum so double check any parameters you pass to the class | ||
35 | ######################################################################################### | ||
36 | |||
37 | BB_BASEHASH_IGNORE_VARS += "ICECC_PARALLEL_MAKE ICECC_DISABLED ICECC_RECIPE_DISABLE \ | ||
38 | ICECC_CLASS_DISABLE ICECC_RECIPE_ENABLE ICECC_PATH ICECC_ENV_EXEC \ | ||
39 | ICECC_CARET_WORKAROUND ICECC_CFLAGS ICECC_ENV_VERSION \ | ||
40 | ICECC_DEBUG ICECC_LOGFILE ICECC_REPEAT_RATE ICECC_PREFERRED_HOST \ | ||
41 | ICECC_CLANG_REMOTE_CPP ICECC_IGNORE_UNVERIFIED ICECC_TEST_SOCKET \ | ||
42 | ICECC_ENV_DEBUG ICECC_REMOTE_CPP \ | ||
43 | " | ||
44 | |||
45 | ICECC_ENV_EXEC ?= "${STAGING_BINDIR_NATIVE}/icecc-create-env" | ||
46 | |||
47 | HOSTTOOLS_NONFATAL += "icecc patchelf" | ||
48 | |||
49 | # This version can be incremented when changes are made to the environment that | ||
50 | # invalidate the version on the compile nodes. Changing it will cause a new | ||
51 | # environment to be created. | ||
52 | # | ||
53 | # A useful thing to do for testing icecream changes locally is to add a | ||
54 | # subversion in local.conf: | ||
55 | # ICECC_ENV_VERSION:append = "-my-ver-1" | ||
56 | ICECC_ENV_VERSION = "2" | ||
57 | |||
58 | # Default to disabling the caret workaround, If set to "1" in local.conf, icecc | ||
59 | # will locally recompile any files that have warnings, which can adversely | ||
60 | # affect performance. | ||
61 | # | ||
62 | # See: https://github.com/icecc/icecream/issues/190 | ||
63 | export ICECC_CARET_WORKAROUND ??= "0" | ||
64 | |||
65 | export ICECC_REMOTE_CPP ??= "0" | ||
66 | |||
67 | ICECC_CFLAGS = "" | ||
68 | CFLAGS += "${ICECC_CFLAGS}" | ||
69 | CXXFLAGS += "${ICECC_CFLAGS}" | ||
70 | |||
71 | # Debug flags when generating environments | ||
72 | ICECC_ENV_DEBUG ??= "" | ||
73 | |||
74 | # Disable recipe list contains a list of recipes that can not distribute | ||
75 | # compile tasks for one reason or the other. When adding a new entry, please | ||
76 | # document why (how it failed) so that we can re-evaluate it later e.g. when | ||
77 | # there is a new version. | ||
78 | # | ||
79 | # libgcc-initial - fails with CPP sanity check error if host sysroot contains | ||
80 | # cross gcc built for another target tune/variant. | ||
81 | # pixman - prng_state: TLS reference mismatches non-TLS reference, possibly due to | ||
82 | # pragma omp threadprivate(prng_state). | ||
83 | # systemtap - _HelperSDT.c undefs macros and uses the identifiers in macros emitting | ||
84 | # inline assembly. | ||
85 | # target-sdk-provides-dummy - ${HOST_PREFIX} is empty which triggers the "NULL | ||
86 | # prefix" error. | ||
87 | ICECC_RECIPE_DISABLE += "\ | ||
88 | libgcc-initial \ | ||
89 | pixman \ | ||
90 | systemtap \ | ||
91 | target-sdk-provides-dummy \ | ||
92 | " | ||
93 | |||
94 | # Classes that should not use icecc. When adding a new entry, please | ||
95 | # document why (how it failed) so that we can re-evaluate it later. | ||
96 | # | ||
97 | # image - images aren't compiling, but the testing framework for images captures | ||
98 | # PARALLEL_MAKE as part of the test environment. Many tests won't use | ||
99 | # icecream, but leaving the high level of parallelism can cause them to | ||
100 | # consume an unnecessary amount of resources. | ||
101 | ICECC_CLASS_DISABLE += "\ | ||
102 | image \ | ||
103 | " | ||
104 | |||
105 | def get_icecc_dep(d): | ||
106 | # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not | ||
107 | # we need that built is the responsibility of the patch function / class, not | ||
108 | # the application. | ||
109 | if not d.getVar('INHIBIT_DEFAULT_DEPS'): | ||
110 | return "icecc-create-env-native" | ||
111 | return "" | ||
112 | |||
113 | DEPENDS:prepend = "${@get_icecc_dep(d)} " | ||
114 | |||
115 | get_cross_kernel_cc[vardepsexclude] += "KERNEL_CC" | ||
116 | def get_cross_kernel_cc(bb,d): | ||
117 | if not icecc_is_kernel(bb, d): | ||
118 | return None | ||
119 | |||
120 | # evaluate the expression by the shell if necessary | ||
121 | kernel_cc = d.getVar('KERNEL_CC') | ||
122 | if '`' in kernel_cc or '$(' in kernel_cc: | ||
123 | import subprocess | ||
124 | kernel_cc = subprocess.check_output("echo %s" % kernel_cc, shell=True).decode("utf-8")[:-1] | ||
125 | |||
126 | kernel_cc = kernel_cc.replace('ccache', '').strip() | ||
127 | kernel_cc = kernel_cc.split(' ')[0] | ||
128 | kernel_cc = kernel_cc.strip() | ||
129 | return kernel_cc | ||
130 | |||
131 | def get_icecc(d): | ||
132 | return d.getVar('ICECC_PATH') or bb.utils.which(os.getenv("PATH"), "icecc") | ||
133 | |||
134 | def use_icecc(bb,d): | ||
135 | if d.getVar('ICECC_DISABLED') == "1": | ||
136 | # don't even try it, when explicitly disabled | ||
137 | return "no" | ||
138 | |||
139 | # allarch recipes don't use compiler | ||
140 | if icecc_is_allarch(bb, d): | ||
141 | return "no" | ||
142 | |||
143 | if icecc_is_cross_canadian(bb, d): | ||
144 | return "no" | ||
145 | |||
146 | pn = d.getVar('PN') | ||
147 | bpn = d.getVar('BPN') | ||
148 | |||
149 | # Enable/disable checks are made against BPN, because there is a good | ||
150 | # chance that if icecc should be skipped for a recipe, it should be skipped | ||
151 | # for all the variants of that recipe. PN is still checked in case a user | ||
152 | # specified a more specific recipe. | ||
153 | check_pn = set([pn, bpn]) | ||
154 | |||
155 | class_disable = (d.getVar('ICECC_CLASS_DISABLE') or "").split() | ||
156 | |||
157 | for bbclass in class_disable: | ||
158 | if bb.data.inherits_class(bbclass, d): | ||
159 | bb.debug(1, "%s: bbclass %s found in disable, disable icecc" % (pn, bbclass)) | ||
160 | return "no" | ||
161 | |||
162 | disabled_recipes = (d.getVar('ICECC_RECIPE_DISABLE') or "").split() | ||
163 | enabled_recipes = (d.getVar('ICECC_RECIPE_ENABLE') or "").split() | ||
164 | |||
165 | if check_pn & set(disabled_recipes): | ||
166 | bb.debug(1, "%s: found in disable list, disable icecc" % pn) | ||
167 | return "no" | ||
168 | |||
169 | if check_pn & set(enabled_recipes): | ||
170 | bb.debug(1, "%s: found in enabled recipes list, enable icecc" % pn) | ||
171 | return "yes" | ||
172 | |||
173 | if d.getVar('PARALLEL_MAKE') == "": | ||
174 | bb.debug(1, "%s: has empty PARALLEL_MAKE, disable icecc" % pn) | ||
175 | return "no" | ||
176 | |||
177 | return "yes" | ||
178 | |||
179 | def icecc_is_allarch(bb, d): | ||
180 | return d.getVar("PACKAGE_ARCH") == "all" | ||
181 | |||
182 | def icecc_is_kernel(bb, d): | ||
183 | return \ | ||
184 | bb.data.inherits_class("kernel", d); | ||
185 | |||
186 | def icecc_is_native(bb, d): | ||
187 | return \ | ||
188 | bb.data.inherits_class("cross", d) or \ | ||
189 | bb.data.inherits_class("native", d); | ||
190 | |||
191 | def icecc_is_cross_canadian(bb, d): | ||
192 | return bb.data.inherits_class("cross-canadian", d) | ||
193 | |||
194 | def icecc_dir(bb, d): | ||
195 | return d.expand('${TMPDIR}/work-shared/ice') | ||
196 | |||
197 | # Don't pollute allarch signatures with TARGET_FPU | ||
198 | icecc_version[vardepsexclude] += "TARGET_FPU" | ||
199 | def icecc_version(bb, d): | ||
200 | if use_icecc(bb, d) == "no": | ||
201 | return "" | ||
202 | |||
203 | parallel = d.getVar('ICECC_PARALLEL_MAKE') or "" | ||
204 | if not d.getVar('PARALLEL_MAKE') == "" and parallel: | ||
205 | d.setVar("PARALLEL_MAKE", parallel) | ||
206 | |||
207 | # Disable showing the caret in the GCC compiler output if the workaround is | ||
208 | # disabled | ||
209 | if d.getVar('ICECC_CARET_WORKAROUND') == '0': | ||
210 | d.setVar('ICECC_CFLAGS', '-fno-diagnostics-show-caret') | ||
211 | |||
212 | if icecc_is_native(bb, d): | ||
213 | archive_name = "local-host-env" | ||
214 | elif d.expand('${HOST_PREFIX}') == "": | ||
215 | bb.fatal(d.expand("${PN}"), " NULL prefix") | ||
216 | else: | ||
217 | prefix = d.expand('${HOST_PREFIX}' ) | ||
218 | distro = d.expand('${DISTRO}') | ||
219 | target_sys = d.expand('${TARGET_SYS}') | ||
220 | float = d.getVar('TARGET_FPU') or "hard" | ||
221 | archive_name = prefix + distro + "-" + target_sys + "-" + float | ||
222 | if icecc_is_kernel(bb, d): | ||
223 | archive_name += "-kernel" | ||
224 | |||
225 | import socket | ||
226 | ice_dir = icecc_dir(bb, d) | ||
227 | tar_file = os.path.join(ice_dir, "{archive}-{version}-@VERSION@-{hostname}.tar.gz".format( | ||
228 | archive=archive_name, | ||
229 | version=d.getVar('ICECC_ENV_VERSION'), | ||
230 | hostname=socket.gethostname() | ||
231 | )) | ||
232 | |||
233 | return tar_file | ||
234 | |||
235 | def icecc_path(bb,d): | ||
236 | if use_icecc(bb, d) == "no": | ||
237 | # don't create unnecessary directories when icecc is disabled | ||
238 | return | ||
239 | |||
240 | staging = os.path.join(d.expand('${STAGING_BINDIR}'), "ice") | ||
241 | if icecc_is_kernel(bb, d): | ||
242 | staging += "-kernel" | ||
243 | |||
244 | return staging | ||
245 | |||
246 | def icecc_get_external_tool(bb, d, tool): | ||
247 | external_toolchain_bindir = d.expand('${EXTERNAL_TOOLCHAIN}${bindir_cross}') | ||
248 | target_prefix = d.expand('${TARGET_PREFIX}') | ||
249 | return os.path.join(external_toolchain_bindir, '%s%s' % (target_prefix, tool)) | ||
250 | |||
251 | def icecc_get_tool_link(tool, d): | ||
252 | import subprocess | ||
253 | try: | ||
254 | return subprocess.check_output("readlink -f %s" % tool, shell=True).decode("utf-8")[:-1] | ||
255 | except subprocess.CalledProcessError as e: | ||
256 | bb.note("icecc: one of the tools probably disappeared during recipe parsing, cmd readlink -f %s returned %d:\n%s" % (tool, e.returncode, e.output.decode("utf-8"))) | ||
257 | return tool | ||
258 | |||
259 | def icecc_get_path_tool(tool, d): | ||
260 | # This is a little ugly, but we want to make sure we add an actual | ||
261 | # compiler to the toolchain, not ccache. Some distros (e.g. Fedora) | ||
262 | # have ccache enabled by default using symlinks in PATH, meaning ccache | ||
263 | # would be found first when looking for the compiler. | ||
264 | paths = os.getenv("PATH").split(':') | ||
265 | while True: | ||
266 | p, hist = bb.utils.which(':'.join(paths), tool, history=True) | ||
267 | if not p or os.path.basename(icecc_get_tool_link(p, d)) != 'ccache': | ||
268 | return p | ||
269 | paths = paths[len(hist):] | ||
270 | |||
271 | return "" | ||
272 | |||
273 | # Don't pollute native signatures with target TUNE_PKGARCH through STAGING_BINDIR_TOOLCHAIN | ||
274 | icecc_get_tool[vardepsexclude] += "STAGING_BINDIR_TOOLCHAIN" | ||
275 | def icecc_get_tool(bb, d, tool): | ||
276 | if icecc_is_native(bb, d): | ||
277 | return icecc_get_path_tool(tool, d) | ||
278 | elif icecc_is_kernel(bb, d): | ||
279 | return icecc_get_path_tool(get_cross_kernel_cc(bb, d), d) | ||
280 | else: | ||
281 | ice_dir = d.expand('${STAGING_BINDIR_TOOLCHAIN}') | ||
282 | target_sys = d.expand('${TARGET_SYS}') | ||
283 | for p in ice_dir.split(':'): | ||
284 | tool_bin = os.path.join(p, "%s-%s" % (target_sys, tool)) | ||
285 | if os.path.isfile(tool_bin): | ||
286 | return tool_bin | ||
287 | external_tool_bin = icecc_get_external_tool(bb, d, tool) | ||
288 | if os.path.isfile(external_tool_bin): | ||
289 | return external_tool_bin | ||
290 | return "" | ||
291 | |||
292 | def icecc_get_and_check_tool(bb, d, tool): | ||
293 | # Check that g++ or gcc is not a symbolic link to icecc binary in | ||
294 | # PATH or icecc-create-env script will silently create an invalid | ||
295 | # compiler environment package. | ||
296 | t = icecc_get_tool(bb, d, tool) | ||
297 | if t: | ||
298 | link_path = icecc_get_tool_link(t, d) | ||
299 | if link_path == get_icecc(d): | ||
300 | bb.error("%s is a symlink to %s in PATH and this prevents icecc from working" % (t, link_path)) | ||
301 | return "" | ||
302 | else: | ||
303 | return t | ||
304 | else: | ||
305 | return t | ||
306 | |||
307 | set_icecc_env[vardepsexclude] += "KERNEL_CC" | ||
308 | python set_icecc_env() { | ||
309 | import os | ||
310 | import subprocess | ||
311 | |||
312 | if use_icecc(bb, d) == "no": | ||
313 | return | ||
314 | ICECC_VERSION = icecc_version(bb, d) | ||
315 | if not ICECC_VERSION: | ||
316 | bb.warn("Cannot use icecc: could not get ICECC_VERSION") | ||
317 | return | ||
318 | |||
319 | ICE_PATH = icecc_path(bb, d) | ||
320 | if not ICE_PATH: | ||
321 | bb.warn("Cannot use icecc: could not get ICE_PATH") | ||
322 | return | ||
323 | |||
324 | ICECC_BIN = get_icecc(d) | ||
325 | if not ICECC_BIN: | ||
326 | bb.warn("Cannot use icecc: icecc binary not found") | ||
327 | return | ||
328 | |||
329 | if (not bb.utils.which(os.getenv("PATH"), "patchelf") and | ||
330 | not bb.utils.which(os.getenv("PATH"), "patchelf-uninative")): | ||
331 | bb.warn("Cannot use icecc: patchelf not found") | ||
332 | return | ||
333 | |||
334 | ICECC_CC = icecc_get_and_check_tool(bb, d, "gcc") | ||
335 | ICECC_CXX = icecc_get_and_check_tool(bb, d, "g++") | ||
336 | # cannot use icecc_get_and_check_tool here because it assumes as without target_sys prefix | ||
337 | ICECC_WHICH_AS = bb.utils.which(os.getenv('PATH'), 'as') | ||
338 | if (not os.access(ICECC_CC, os.X_OK) or | ||
339 | not os.access(ICECC_CXX, os.X_OK)): | ||
340 | bb.note("Cannot use icecc: could not get ICECC_CC or ICECC_CXX") | ||
341 | return | ||
342 | |||
343 | cmd = [] | ||
344 | try: | ||
345 | cmd = [ICECC_CC, '-dumpversion'] | ||
346 | ICE_VERSION = subprocess.check_output(cmd).decode("utf-8").strip() | ||
347 | except subprocess.CalledProcessError as e: | ||
348 | bb.warn("icecc: '{}' returned {}:\n{}".format(cmd, e.returncode, e.output.decode("utf-8"))) | ||
349 | return | ||
350 | |||
351 | ICECC_VERSION = ICECC_VERSION.replace("@VERSION@", ICE_VERSION) | ||
352 | |||
353 | if not os.access(d.getVar('ICECC_ENV_EXEC'), os.X_OK): | ||
354 | bb.warn("Cannot use icecc: invalid ICECC_ENV_EXEC") | ||
355 | return | ||
356 | |||
357 | # Create symlinks to icecc and wrapper-scripts in the recipe-sysroot directory | ||
358 | symlink_path = os.path.join(ICE_PATH, "symlinks") | ||
359 | bb.utils.mkdirhier(symlink_path) | ||
360 | compilers = [] | ||
361 | if icecc_is_kernel(bb, d): | ||
362 | compilers.append(get_cross_kernel_cc(bb,d)) | ||
363 | else: | ||
364 | host_prefix = d.getVar('HOST_PREFIX') | ||
365 | compilers.extend([host_prefix + 'gcc', host_prefix + 'g++']) | ||
366 | |||
367 | for compiler in compilers: | ||
368 | try: | ||
369 | os.symlink(ICECC_BIN, symlink_path + '/' + compiler) | ||
370 | except FileExistsError: | ||
371 | pass | ||
372 | wrapper_script = os.path.join(ICE_PATH, compiler) | ||
373 | with open(wrapper_script, 'w') as fd: | ||
374 | fd.write("#!/bin/sh -e\n") | ||
375 | fd.write("export ICECC_VERSION={}\n".format(ICECC_VERSION)) | ||
376 | fd.write("export ICECC_CC={}\n".format(ICECC_CC)) | ||
377 | fd.write("export ICECC_CXX={}\n".format(ICECC_CXX)) | ||
378 | fd.write("{} \"$@\"\n".format(os.path.join(ICE_PATH, "symlinks", compiler))) | ||
379 | os.chmod(wrapper_script, 0o755) | ||
380 | |||
381 | try: | ||
382 | cmd = [ICECC_CC, '-print-prog-name=as'] | ||
383 | ICECC_AS = subprocess.check_output(cmd).decode("utf-8").strip() | ||
384 | except subprocess.CalledProcessError as e: | ||
385 | bb.warn("icecc: '{}' returned {}:\n{}".format(cmd, e.returncode, e.output.decode("utf-8"))) | ||
386 | return | ||
387 | # for target recipes should return something like: | ||
388 | # /OE/tmp-eglibc/sysroots/x86_64-linux/usr/libexec/arm920tt-oe-linux-gnueabi/gcc/arm-oe-linux-gnueabi/4.8.2/as | ||
389 | # and just "as" for native, if it returns "as" in current directory (for whatever reason) use "as" from PATH | ||
390 | if not os.path.dirname(ICECC_AS): | ||
391 | ICECC_AS = ICECC_WHICH_AS | ||
392 | |||
393 | if not os.path.isfile(ICECC_VERSION + ".done"): | ||
394 | bb.utils.mkdirhier(os.path.dirname(ICECC_VERSION)) | ||
395 | |||
396 | # the ICECC_VERSION generation step must be locked by a mutex | ||
397 | # in order to prevent race conditions | ||
398 | lock = bb.utils.lockfile(ICECC_VERSION + '.lock') | ||
399 | try: | ||
400 | cmd = [d.getVar('ICECC_ENV_EXEC')] | ||
401 | if d.getVar('ICECC_ENV_DEBUG'): | ||
402 | cmd.append(d.getVar('ICECC_ENV_DEBUG')) | ||
403 | cmd.extend([ICECC_CC, ICECC_CXX, ICECC_AS, ICECC_VERSION]) | ||
404 | subprocess.check_output(cmd) | ||
405 | cmd = ['touch', ICECC_VERSION + '.done'] | ||
406 | subprocess.check_output(cmd) | ||
407 | except subprocess.CalledProcessError as e: | ||
408 | bb.warn("icecc: '{}' returned {}:\n{}".format(cmd, e.returncode, e.output.decode("utf-8"))) | ||
409 | bb.utils.unlockfile(lock) | ||
410 | return | ||
411 | bb.utils.unlockfile(lock) | ||
412 | |||
413 | # Don't let ccache find the icecream compiler links that have been created, otherwise | ||
414 | # it can end up invoking icecream recursively. | ||
415 | d.setVar('CCACHE_PATH', d.getVar('PATH')) | ||
416 | d.setVar('CCACHE_DISABLE', '1') | ||
417 | |||
418 | d.prependVar('PATH', ICE_PATH + ':') | ||
419 | |||
420 | bb.note("Using icecc path: {}".format(ICE_PATH)) | ||
421 | bb.note("Using icecc tarball: {}".format(ICECC_VERSION)) | ||
422 | } | ||
423 | |||
424 | do_configure[prefuncs] += "set_icecc_env" | ||
425 | do_compile[prefuncs] += "set_icecc_env" | ||
426 | do_compile_kernelmodules[prefuncs] += "set_icecc_env" | ||
427 | do_install[prefuncs] += "set_icecc_env" | ||
428 | |||
429 | # Icecream is not (currently) supported in the extensible SDK | ||
430 | ICECC_SDK_HOST_TASK = "nativesdk-icecc-toolchain" | ||
431 | ICECC_SDK_HOST_TASK:task-populate-sdk-ext = "" | ||
432 | |||
433 | # Don't include icecream in uninative tarball | ||
434 | ICECC_SDK_HOST_TASK:pn-uninative-tarball = "" | ||
435 | |||
436 | # Add the toolchain scripts to the SDK | ||
437 | TOOLCHAIN_HOST_TASK:append = " ${ICECC_SDK_HOST_TASK}" | ||
438 | |||
439 | python () { | ||
440 | if d.getVar('ICECC_DISABLED') != "1": | ||
441 | for task in ['do_configure', 'do_compile', 'do_compile_kernelmodules', 'do_install']: | ||
442 | d.setVarFlag(task, 'network', '1') | ||
443 | } | ||
diff --git a/meta/classes/multilib.bbclass b/meta/classes/multilib.bbclass index a4151658a6..b959bbd93c 100644 --- a/meta/classes/multilib.bbclass +++ b/meta/classes/multilib.bbclass | |||
@@ -21,6 +21,7 @@ python multilib_virtclass_handler () { | |||
21 | bpn = d.getVar("BPN") | 21 | bpn = d.getVar("BPN") |
22 | if ("virtual/kernel" in provides | 22 | if ("virtual/kernel" in provides |
23 | or bb.data.inherits_class('module-base', d) | 23 | or bb.data.inherits_class('module-base', d) |
24 | or bb.data.inherits_class('kernel-fit-image', d) | ||
24 | or bpn in non_ml_recipes): | 25 | or bpn in non_ml_recipes): |
25 | raise bb.parse.SkipRecipe("We shouldn't have multilib variants for %s" % bpn) | 26 | raise bb.parse.SkipRecipe("We shouldn't have multilib variants for %s" % bpn) |
26 | 27 | ||