summaryrefslogtreecommitdiffstats
path: root/meta/classes/create-spdx-3.0.bbclass
diff options
context:
space:
mode:
Diffstat (limited to 'meta/classes/create-spdx-3.0.bbclass')
-rw-r--r--meta/classes/create-spdx-3.0.bbclass874
1 files changed, 9 insertions, 865 deletions
diff --git a/meta/classes/create-spdx-3.0.bbclass b/meta/classes/create-spdx-3.0.bbclass
index a930ea8115..41840d9d1a 100644
--- a/meta/classes/create-spdx-3.0.bbclass
+++ b/meta/classes/create-spdx-3.0.bbclass
@@ -116,698 +116,15 @@ SPDX_PACKAGE_SUPPLIER[doc] = "The base variable name to describe the Agent who \
116 116
117IMAGE_CLASSES:append = " create-spdx-image-3.0" 117IMAGE_CLASSES:append = " create-spdx-image-3.0"
118 118
119def set_timestamp_now(d, o, prop): 119oe.spdx30_tasks.set_timestamp_now[vardepsexclude] = "SPDX_INCLUDE_TIMESTAMPS"
120 from datetime import datetime, timezone 120oe.spdx30_tasks.get_package_sources_from_debug[vardepsexclude] += "STAGING_KERNEL_DIR"
121oe.spdx30_tasks.collect_dep_objsets[vardepsexclude] = "SSTATE_ARCHS"
121 122
122 if d.getVar("SPDX_INCLUDE_TIMESTAMPS") == "1":
123 setattr(o, prop, datetime.now(timezone.utc))
124 else:
125 # Doing this helps to validated that the property actually exists, and
126 # also that it is not mandatory
127 delattr(o, prop)
128
129set_timestamp_now[vardepsexclude] = "SPDX_INCLUDE_TIMESTAMPS"
130
131def add_license_expression(d, objset, license_expression):
132 from pathlib import Path
133 import oe.spdx30
134 import oe.sbom30
135
136 license_data = d.getVar("SPDX_LICENSE_DATA")
137 simple_license_text = {}
138 license_text_map = {}
139 license_ref_idx = 0
140
141 def add_license_text(name):
142 nonlocal objset
143 nonlocal simple_license_text
144
145 if name in simple_license_text:
146 return simple_license_text[name]
147
148 lic = objset.find_filter(
149 oe.spdx30.simplelicensing_SimpleLicensingText,
150 name=name,
151 )
152
153 if lic is not None:
154 simple_license_text[name] = lic
155 return lic
156
157 lic = objset.add(oe.spdx30.simplelicensing_SimpleLicensingText(
158 _id=objset.new_spdxid("license-text", name),
159 creationInfo=objset.doc.creationInfo,
160 name=name,
161 ))
162 simple_license_text[name] = lic
163
164 if name == "PD":
165 lic.simplelicensing_licenseText = "Software released to the public domain"
166 return lic
167
168 # Seach for the license in COMMON_LICENSE_DIR and LICENSE_PATH
169 for directory in [d.getVar('COMMON_LICENSE_DIR')] + (d.getVar('LICENSE_PATH') or '').split():
170 try:
171 with (Path(directory) / name).open(errors="replace") as f:
172 lic.simplelicensing_licenseText = f.read()
173 return lic
174
175 except FileNotFoundError:
176 pass
177
178 # If it's not SPDX or PD, then NO_GENERIC_LICENSE must be set
179 filename = d.getVarFlag('NO_GENERIC_LICENSE', name)
180 if filename:
181 filename = d.expand("${S}/" + filename)
182 with open(filename, errors="replace") as f:
183 lic.simplelicensing_licenseText = f.read()
184 return lic
185 else:
186 bb.fatal("Cannot find any text for license %s" % name)
187
188 def convert(l):
189 nonlocal license_text_map
190 nonlocal license_ref_idx
191
192 if l == "(" or l == ")":
193 return l
194
195 if l == "&":
196 return "AND"
197
198 if l == "|":
199 return "OR"
200
201 if l == "CLOSED":
202 return "NONE"
203
204 spdx_license = d.getVarFlag("SPDXLICENSEMAP", l) or l
205 if spdx_license in license_data["licenses"]:
206 return spdx_license
207
208 spdx_license = "LicenseRef-" + l
209 if spdx_license not in license_text_map:
210 license_text_map[spdx_license] = add_license_text(l)._id
211
212 return spdx_license
213
214 lic_split = license_expression.replace("(", " ( ").replace(")", " ) ").replace("|", " | ").replace("&", " & ").split()
215 spdx_license_expression = ' '.join(convert(l) for l in lic_split)
216
217 return objset.new_license_expression(spdx_license_expression, license_text_map)
218
219
220def add_package_files(d, objset, topdir, get_spdxid, get_purposes, *, archive=None, ignore_dirs=[], ignore_top_level_dirs=[]):
221 from pathlib import Path
222 import oe.spdx30
223 import oe.sbom30
224
225 source_date_epoch = d.getVar("SOURCE_DATE_EPOCH")
226 if source_date_epoch:
227 source_date_epoch = int(source_date_epoch)
228
229 spdx_files = set()
230
231 file_counter = 1
232 for subdir, dirs, files in os.walk(topdir):
233 dirs[:] = [d for d in dirs if d not in ignore_dirs]
234 if subdir == str(topdir):
235 dirs[:] = [d for d in dirs if d not in ignore_top_level_dirs]
236
237 for file in files:
238 filepath = Path(subdir) / file
239 if filepath.is_symlink() or not filepath.is_file():
240 continue
241
242 bb.debug(1, "Adding file %s to %s" % (filepath, objset.doc._id))
243
244 filename = str(filepath.relative_to(topdir))
245 file_purposes = get_purposes(filepath)
246
247 spdx_file = objset.new_file(
248 get_spdxid(file_counter),
249 filename,
250 filepath,
251 purposes=file_purposes,
252 )
253 spdx_files.add(spdx_file)
254
255 if oe.spdx30.software_SoftwarePurpose.source in file_purposes:
256 objset.scan_declared_licenses(spdx_file, filepath)
257
258 if archive is not None:
259 with filepath.open("rb") as f:
260 info = archive.gettarinfo(fileobj=f)
261 info.name = filename
262 info.uid = 0
263 info.gid = 0
264 info.uname = "root"
265 info.gname = "root"
266
267 if source_date_epoch is not None and info.mtime > source_date_epoch:
268 info.mtime = source_date_epoch
269
270 archive.addfile(info, f)
271
272 file_counter += 1
273
274 return spdx_files
275
276
277def get_package_sources_from_debug(d, package, package_files, sources, source_hash_cache):
278 from pathlib import Path
279 import oe.packagedata
280
281 def file_path_match(file_path, pkg_file):
282 if file_path.lstrip("/") == pkg_file.name.lstrip("/"):
283 return True
284
285 for e in pkg_file.extension:
286 if isinstance(e, oe.sbom30.OEFileNameAliasExtension):
287 for a in e.aliases:
288 if file_path.lstrip("/") == a.lstrip("/"):
289 return True
290
291 return False
292
293 debug_search_paths = [
294 Path(d.getVar('PKGD')),
295 Path(d.getVar('STAGING_DIR_TARGET')),
296 Path(d.getVar('STAGING_DIR_NATIVE')),
297 Path(d.getVar('STAGING_KERNEL_DIR')),
298 ]
299
300 pkg_data = oe.packagedata.read_subpkgdata_extended(package, d)
301
302 if pkg_data is None:
303 return
304
305 dep_source_files = set()
306
307 for file_path, file_data in pkg_data["files_info"].items():
308 if not "debugsrc" in file_data:
309 continue
310
311 if not any(file_path_match(file_path, pkg_file) for pkg_file in package_files):
312 bb.fatal("No package file found for %s in %s; SPDX found: %s" % (str(file_path), package,
313 " ".join(p.name for p in package_files)))
314 continue
315
316 for debugsrc in file_data["debugsrc"]:
317 for search in debug_search_paths:
318 if debugsrc.startswith("/usr/src/kernel"):
319 debugsrc_path = search / debugsrc.replace('/usr/src/kernel/', '')
320 else:
321 debugsrc_path = search / debugsrc.lstrip("/")
322
323 if debugsrc_path in source_hash_cache:
324 file_sha256 = source_hash_cache[debugsrc_path]
325 if file_sha256 is None:
326 continue
327 else:
328 if not debugsrc_path.exists():
329 source_hash_cache[debugsrc_path] = None
330 continue
331
332 file_sha256 = bb.utils.sha256_file(debugsrc_path)
333 source_hash_cache[debugsrc_path] = file_sha256
334
335 if file_sha256 in sources:
336 source_file = sources[file_sha256]
337 dep_source_files.add(source_file)
338 else:
339 bb.debug(1, "Debug source %s with SHA256 %s not found in any dependency" % (str(debugsrc_path), file_sha256))
340 break
341 else:
342 bb.debug(1, "Debug source %s not found" % debugsrc)
343
344 return dep_source_files
345
346get_package_sources_from_debug[vardepsexclude] += "STAGING_KERNEL_DIR"
347
348def collect_dep_objsets(d, build):
349 import json
350 from pathlib import Path
351 import oe.sbom30
352 import oe.spdx30
353 import oe.spdx_common
354
355 deps = oe.spdx_common.get_spdx_deps(d)
356
357 dep_objsets = []
358 dep_builds = set()
359
360 dep_build_spdxids = set()
361 for dep in deps:
362 bb.debug(1, "Fetching SPDX for dependency %s" % (dep.pn))
363 dep_build, dep_objset = oe.sbom30.find_root_obj_in_jsonld(d, "recipes", dep.pn, oe.spdx30.build_Build)
364 # If the dependency is part of the taskhash, return it to be linked
365 # against. Otherwise, it cannot be linked against because this recipe
366 # will not rebuilt if dependency changes
367 if dep.in_taskhash:
368 dep_objsets.append(dep_objset)
369
370 # The build _can_ be linked against (by alias)
371 dep_builds.add(dep_build)
372
373 return dep_objsets, dep_builds
374
375collect_dep_objsets[vardepsexclude] = "SSTATE_ARCHS"
376
377def collect_dep_sources(dep_objsets):
378 import oe.spdx30
379 import oe.sbom30
380
381 sources = {}
382 for objset in dep_objsets:
383 # Don't collect sources from native recipes as they
384 # match non-native sources also.
385 if objset.is_native():
386 continue
387
388 bb.debug(1, "Fetching Sources for dependency %s" % (objset.doc.name))
389
390 dep_build = objset.find_root(oe.spdx30.build_Build)
391 if not dep_build:
392 bb.fatal("Unable to find a build")
393
394 for e in objset.foreach_type(oe.spdx30.Relationship):
395 if dep_build is not e.from_:
396 continue
397
398 if e.relationshipType != oe.spdx30.RelationshipType.hasInputs:
399 continue
400
401 for to in e.to:
402 if not isinstance(to, oe.spdx30.software_File):
403 continue
404
405 if to.software_primaryPurpose != oe.spdx30.software_SoftwarePurpose.source:
406 continue
407
408 for v in to.verifiedUsing:
409 if v.algorithm == oe.spdx30.HashAlgorithm.sha256:
410 sources[v.hashValue] = to
411 break
412 else:
413 bb.fatal("No SHA256 found for %s in %s" % (to.name, objset.doc.name))
414
415 return sources
416
417def add_download_files(d, objset):
418 import oe.patch
419 import oe.spdx30
420 import os
421
422 inputs = set()
423
424 urls = d.getVar("SRC_URI").split()
425 fetch = bb.fetch2.Fetch(urls, d)
426
427 for download_idx, src_uri in enumerate(urls):
428 fd = fetch.ud[src_uri]
429
430 for name in fd.names:
431 file_name = os.path.basename(fetch.localpath(src_uri))
432 if oe.patch.patch_path(src_uri, fetch, '', expand=False):
433 primary_purpose = oe.spdx30.software_SoftwarePurpose.patch
434 else:
435 primary_purpose = oe.spdx30.software_SoftwarePurpose.source
436
437 if fd.type == "file":
438 if os.path.isdir(fd.localpath):
439 walk_idx = 1
440 for root, dirs, files in os.walk(fd.localpath):
441 for f in files:
442 f_path = os.path.join(root, f)
443 if os.path.islink(f_path):
444 # TODO: SPDX doesn't support symlinks yet
445 continue
446
447 file = objset.new_file(
448 objset.new_spdxid("source", str(download_idx + 1), str(walk_idx)),
449 os.path.join(file_name, os.path.relpath(f_path, fd.localpath)),
450 f_path,
451 purposes=[primary_purpose],
452 )
453
454 inputs.add(file)
455 walk_idx += 1
456
457 else:
458 file = objset.new_file(
459 objset.new_spdxid("source", str(download_idx + 1)),
460 file_name,
461 fd.localpath,
462 purposes=[primary_purpose],
463 )
464 inputs.add(file)
465
466 else:
467 uri = fd.type
468 proto = getattr(fd, "proto", None)
469 if proto is not None:
470 uri = uri + "+" + proto
471 uri = uri + "://" + fd.host + fd.path
472
473 if fd.method.supports_srcrev():
474 uri = uri + "@" + fd.revisions[name]
475
476 dl = objset.add(oe.spdx30.software_Package(
477 _id=objset.new_spdxid("source", str(download_idx + 1)),
478 creationInfo=objset.doc.creationInfo,
479 name=file_name,
480 software_primaryPurpose=primary_purpose,
481 software_downloadLocation=uri,
482 ))
483
484 if fd.method.supports_checksum(fd):
485 # TODO Need something better than hard coding this
486 for checksum_id in ["sha256", "sha1"]:
487 expected_checksum = getattr(fd, "%s_expected" % checksum_id, None)
488 if expected_checksum is None:
489 continue
490
491 dl.verifiedUsing.append(
492 oe.spdx30.Hash(
493 algorithm=getattr(oe.spdx30.HashAlgorithm, checksum_id),
494 hashValue=expected_checksum,
495 )
496 )
497
498 inputs.add(dl)
499
500 return inputs
501
502
503def set_purposes(d, element, *var_names, force_purposes=[]):
504 purposes = force_purposes[:]
505
506 for var_name in var_names:
507 val = d.getVar(var_name)
508 if val:
509 purposes.extend(val.split())
510 break
511
512 if not purposes:
513 bb.warn("No SPDX purposes found in %s" % " ".join(var_names))
514 return
515
516 element.software_primaryPurpose = getattr(oe.spdx30.software_SoftwarePurpose, purposes[0])
517 element.software_additionalPurpose = [getattr(oe.spdx30.software_SoftwarePurpose, p) for p in purposes[1:]]
518 123
519 124
520python do_create_spdx() { 125python do_create_spdx() {
521 import oe.sbom30 126 import oe.spdx30_tasks
522 import oe.spdx30 127 oe.spdx30_tasks.create_spdx(d)
523 import oe.spdx_common
524 from pathlib import Path
525 from contextlib import contextmanager
526 import oe.cve_check
527 from datetime import datetime
528
529 def set_var_field(var, obj, name, package=None):
530 val = None
531 if package:
532 val = d.getVar("%s:%s" % (var, package))
533
534 if not val:
535 val = d.getVar(var)
536
537 if val:
538 setattr(obj, name, val)
539
540 deploydir = Path(d.getVar("SPDXDEPLOY"))
541 deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
542 spdx_workdir = Path(d.getVar("SPDXWORK"))
543 include_sources = d.getVar("SPDX_INCLUDE_SOURCES") == "1"
544 pkg_arch = d.getVar("SSTATE_PKGARCH")
545 is_native = bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d)
546 include_vex = d.getVar("SPDX_INCLUDE_VEX")
547 if not include_vex in ("none", "current", "all"):
548 bb.fatal("SPDX_INCLUDE_VEX must be one of 'none', 'current', 'all'")
549
550 build_objset = oe.sbom30.ObjectSet.new_objset(d, d.getVar("PN"))
551
552 build = build_objset.new_task_build("recipe", "recipe")
553 build_objset.doc.rootElement.append(build)
554
555 build_objset.set_is_native(is_native)
556
557 for var in (d.getVar('SPDX_CUSTOM_ANNOTATION_VARS') or "").split():
558 new_annotation(
559 d,
560 build_objset,
561 build,
562 "%s=%s" % (var, d.getVar(var)),
563 oe.spdx30.AnnotationType.other
564 )
565
566 build_inputs = set()
567
568 # Add CVEs
569 cve_by_status = {}
570 if include_vex != "none":
571 for cve in (d.getVarFlags("CVE_STATUS") or {}):
572 status, detail, description = oe.cve_check.decode_cve_status(d, cve)
573
574 # If this CVE is fixed upstream, skip it unless all CVEs are
575 # specified.
576 if include_vex != "all" and detail in ("fixed-version", "cpe-stable-backport"):
577 bb.debug(1, "Skipping %s since it is already fixed upstream" % cve)
578 continue
579
580 cve_by_status.setdefault(status, {})[cve] = (
581 build_objset.new_cve_vuln(cve),
582 detail,
583 description,
584 )
585
586 cpe_ids = oe.cve_check.get_cpe_ids(d.getVar("CVE_PRODUCT"), d.getVar("CVE_VERSION"))
587
588 source_files = add_download_files(d, build_objset)
589 build_inputs |= source_files
590
591 recipe_spdx_license = add_license_expression(d, build_objset, d.getVar("LICENSE"))
592 build_objset.new_relationship(
593 source_files,
594 oe.spdx30.RelationshipType.hasConcludedLicense,
595 [recipe_spdx_license],
596 )
597
598 if oe.spdx_common.process_sources(d) and include_sources:
599 bb.debug(1, "Adding source files to SPDX")
600 oe.spdx_common.get_patched_src(d)
601
602 build_inputs |= add_package_files(
603 d,
604 build_objset,
605 spdx_workdir,
606 lambda file_counter: build_objset.new_spdxid("sourcefile", str(file_counter)),
607 lambda filepath: [oe.spdx30.software_SoftwarePurpose.source],
608 ignore_dirs=[".git"],
609 ignore_top_level_dirs=["temp"],
610 archive=None,
611 )
612
613
614 dep_objsets, dep_builds = collect_dep_objsets(d, build)
615 if dep_builds:
616 build_objset.new_scoped_relationship(
617 [build],
618 oe.spdx30.RelationshipType.dependsOn,
619 oe.spdx30.LifecycleScopeType.build,
620 sorted(oe.sbom30.get_element_link_id(b) for b in dep_builds),
621 )
622
623 debug_source_ids = set()
624 source_hash_cache = {}
625
626 # Write out the package SPDX data now. It is not complete as we cannot
627 # write the runtime data, so write it to a staging area and a later task
628 # will write out the final collection
629
630 # TODO: Handle native recipe output
631 if not is_native:
632 bb.debug(1, "Collecting Dependency sources files")
633 sources = collect_dep_sources(dep_objsets)
634
635 bb.build.exec_func("read_subpackage_metadata", d)
636
637 pkgdest = Path(d.getVar("PKGDEST"))
638 for package in d.getVar("PACKAGES").split():
639 if not oe.packagedata.packaged(package, d):
640 continue
641
642 pkg_name = d.getVar("PKG:%s" % package) or package
643
644 bb.debug(1, "Creating SPDX for package %s" % pkg_name)
645
646 pkg_objset = oe.sbom30.ObjectSet.new_objset(d, pkg_name)
647
648 spdx_package = pkg_objset.add_root(oe.spdx30.software_Package(
649 _id=pkg_objset.new_spdxid("package", pkg_name),
650 creationInfo=pkg_objset.doc.creationInfo,
651 name=pkg_name,
652 software_packageVersion=d.getVar("PV"),
653 ))
654 set_timestamp_now(d, spdx_package, "builtTime")
655
656 set_purposes(
657 d,
658 spdx_package,
659 "SPDX_PACKAGE_ADDITIONAL_PURPOSE:%s" % package,
660 "SPDX_PACKAGE_ADDITIONAL_PURPOSE",
661 force_purposes=["install"],
662 )
663
664
665 supplier = build_objset.new_agent("SPDX_PACKAGE_SUPPLIER")
666 if supplier is not None:
667 spdx_package.supplier = supplier if isinstance(supplier, str) else supplier._id
668
669 set_var_field("HOMEPAGE", spdx_package, "software_homePage", package=package)
670 set_var_field("SUMMARY", spdx_package, "summary", package=package)
671 set_var_field("DESCRIPTION", spdx_package, "description", package=package)
672
673 pkg_objset.new_scoped_relationship(
674 [build._id],
675 oe.spdx30.RelationshipType.hasOutputs,
676 oe.spdx30.LifecycleScopeType.build,
677 [spdx_package],
678 )
679
680 for cpe_id in cpe_ids:
681 spdx_package.externalIdentifier.append(
682 oe.spdx30.ExternalIdentifier(
683 externalIdentifierType=oe.spdx30.ExternalIdentifierType.cpe23,
684 identifier=cpe_id,
685 ))
686
687 # TODO: Generate a file for each actual IPK/DEB/RPM/TGZ file
688 # generated and link it to the package
689 #spdx_package_file = pkg_objset.add(oe.spdx30.software_File(
690 # _id=pkg_objset.new_spdxid("distribution", pkg_name),
691 # creationInfo=pkg_objset.doc.creationInfo,
692 # name=pkg_name,
693 # software_primaryPurpose=spdx_package.software_primaryPurpose,
694 # software_additionalPurpose=spdx_package.software_additionalPurpose,
695 #))
696 #set_timestamp_now(d, spdx_package_file, "builtTime")
697
698 ## TODO add hashes
699 #pkg_objset.new_relationship(
700 # [spdx_package],
701 # oe.spdx30.RelationshipType.hasDistributionArtifact,
702 # [spdx_package_file],
703 #)
704
705 # NOTE: licenses live in the recipe collection and are referenced
706 # by ID in the package collection(s). This helps reduce duplication
707 # (since a lot of packages will have the same license), and also
708 # prevents duplicate license SPDX IDs in the packages
709 package_license = d.getVar("LICENSE:%s" % package)
710 if package_license and package_license != d.getVar("LICENSE"):
711 package_spdx_license = add_license_expression(d, build_objset, package_license)
712 else:
713 package_spdx_license = recipe_spdx_license
714
715 pkg_objset.new_relationship(
716 [spdx_package],
717 oe.spdx30.RelationshipType.hasConcludedLicense,
718 [package_spdx_license._id],
719 )
720
721 # NOTE: CVE Elements live in the recipe collection
722 all_cves = set()
723 for status, cves in cve_by_status.items():
724 for cve, items in cves.items():
725 spdx_cve, detail, description = items
726
727 all_cves.add(spdx_cve._id)
728
729 if status == "Patched":
730 pkg_objset.new_vex_patched_relationship([spdx_cve._id], [spdx_package])
731 elif status == "Unpatched":
732 pkg_objset.new_vex_unpatched_relationship([spdx_cve._id], [spdx_package])
733 elif status == "Ignored":
734 spdx_vex = pkg_objset.new_vex_ignored_relationship(
735 [spdx_cve._id],
736 [spdx_package],
737 impact_statement=description,
738 )
739
740 if detail in ("ignored", "cpe-incorrect", "disputed", "upstream-wontfix"):
741 # VEX doesn't have justifications for this
742 pass
743 elif detail in ("not-applicable-config", "not-applicable-platform"):
744 for v in spdx_vex:
745 v.security_justificationType = oe.spdx30.security_VexJustificationType.vulnerableCodeNotPresent
746 else:
747 bb.fatal(f"Unknown detail '{detail}' for ignored {cve}")
748 else:
749 bb.fatal(f"Unknown CVE status {status}")
750
751 if all_cves:
752 pkg_objset.new_relationship(
753 [spdx_package],
754 oe.spdx30.RelationshipType.hasAssociatedVulnerability,
755 sorted(list(all_cves)),
756 )
757
758 bb.debug(1, "Adding package files to SPDX for package %s" % pkg_name)
759 package_files = add_package_files(
760 d,
761 pkg_objset,
762 pkgdest / package,
763 lambda file_counter: pkg_objset.new_spdxid("package", pkg_name, "file", str(file_counter)),
764 # TODO: Can we know the purpose here?
765 lambda filepath: [],
766 ignore_top_level_dirs=['CONTROL', 'DEBIAN'],
767 archive=None,
768 )
769
770 if package_files:
771 pkg_objset.new_relationship(
772 [spdx_package],
773 oe.spdx30.RelationshipType.contains,
774 sorted(list(package_files)),
775 )
776
777 if include_sources:
778 debug_sources = get_package_sources_from_debug(d, package, package_files, sources, source_hash_cache)
779 debug_source_ids |= set(oe.sbom30.get_element_link_id(d) for d in debug_sources)
780
781 oe.sbom30.write_recipe_jsonld_doc(d, pkg_objset, "packages-staging", deploydir, create_spdx_id_links=False)
782
783 if include_sources:
784 bb.debug(1, "Adding sysroot files to SPDX")
785 sysroot_files = add_package_files(
786 d,
787 build_objset,
788 d.expand("${COMPONENTS_DIR}/${PACKAGE_ARCH}/${PN}"),
789 lambda file_counter: build_objset.new_spdxid("sysroot", str(file_counter)),
790 lambda filepath: [],
791 archive=None,
792 )
793
794 if sysroot_files:
795 build_objset.new_scoped_relationship(
796 [build],
797 oe.spdx30.RelationshipType.hasOutputs,
798 oe.spdx30.LifecycleScopeType.build,
799 sorted(list(sysroot_files)),
800 )
801
802 if build_inputs or debug_source_ids:
803 build_objset.new_scoped_relationship(
804 [build],
805 oe.spdx30.RelationshipType.hasInputs,
806 oe.spdx30.LifecycleScopeType.build,
807 sorted(list(build_inputs)) + sorted(list(debug_source_ids)),
808 )
809
810 oe.sbom30.write_recipe_jsonld_doc(d, build_objset, "recipes", deploydir)
811} 128}
812do_create_spdx[vardepsexclude] += "BB_NUMBER_THREADS" 129do_create_spdx[vardepsexclude] += "BB_NUMBER_THREADS"
813addtask do_create_spdx after \ 130addtask do_create_spdx after \
@@ -844,101 +161,9 @@ do_create_spdx[cleandirs] = "${SPDXDEPLOY} ${SPDXWORK}"
844do_create_spdx[depends] += "${PATCHDEPENDENCY}" 161do_create_spdx[depends] += "${PATCHDEPENDENCY}"
845 162
846python do_create_package_spdx() { 163python do_create_package_spdx() {
847 import oe.sbom30 164 import oe.spdx30_tasks
848 import oe.spdx30 165 oe.spdx30_tasks.create_package_spdx(d)
849 import oe.spdx_common
850 import oe.packagedata
851 from pathlib import Path
852
853 deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
854 deploydir = Path(d.getVar("SPDXRUNTIMEDEPLOY"))
855 is_native = bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d)
856
857 providers = oe.spdx_common.collect_package_providers(d)
858 pkg_arch = d.getVar("SSTATE_PKGARCH")
859
860 if not is_native:
861 bb.build.exec_func("read_subpackage_metadata", d)
862
863 dep_package_cache = {}
864
865 # Any element common to all packages that need to be referenced by ID
866 # should be written into this objset set
867 common_objset = oe.sbom30.ObjectSet.new_objset(d, "%s-package-common" % d.getVar("PN"))
868
869 pkgdest = Path(d.getVar("PKGDEST"))
870 for package in d.getVar("PACKAGES").split():
871 localdata = bb.data.createCopy(d)
872 pkg_name = d.getVar("PKG:%s" % package) or package
873 localdata.setVar("PKG", pkg_name)
874 localdata.setVar('OVERRIDES', d.getVar("OVERRIDES", False) + ":" + package)
875
876 if not oe.packagedata.packaged(package, localdata):
877 continue
878
879 spdx_package, pkg_objset = oe.sbom30.load_obj_in_jsonld(
880 d,
881 pkg_arch,
882 "packages-staging",
883 pkg_name,
884 oe.spdx30.software_Package,
885 software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.install,
886 )
887
888 # We will write out a new collection, so link it to the new
889 # creation info in the common package data. The old creation info
890 # should still exist and be referenced by all the existing elements
891 # in the package
892 pkg_objset.creationInfo = pkg_objset.copy_creation_info(common_objset.doc.creationInfo)
893
894 runtime_spdx_deps = set()
895
896 deps = bb.utils.explode_dep_versions2(localdata.getVar("RDEPENDS") or "")
897 seen_deps = set()
898 for dep, _ in deps.items():
899 if dep in seen_deps:
900 continue
901
902 if dep not in providers:
903 continue
904
905 (dep, _) = providers[dep]
906
907 if not oe.packagedata.packaged(dep, localdata):
908 continue
909
910 dep_pkg_data = oe.packagedata.read_subpkgdata_dict(dep, d)
911 dep_pkg = dep_pkg_data["PKG"]
912
913 if dep in dep_package_cache:
914 dep_spdx_package = dep_package_cache[dep]
915 else:
916 bb.debug(1, "Searching for %s" % dep_pkg)
917 dep_spdx_package, _ = oe.sbom30.find_root_obj_in_jsonld(
918 d,
919 "packages-staging",
920 dep_pkg,
921 oe.spdx30.software_Package,
922 software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.install,
923 )
924 dep_package_cache[dep] = dep_spdx_package
925
926 runtime_spdx_deps.add(dep_spdx_package)
927 seen_deps.add(dep)
928
929 if runtime_spdx_deps:
930 pkg_objset.new_scoped_relationship(
931 [spdx_package],
932 oe.spdx30.RelationshipType.dependsOn,
933 oe.spdx30.LifecycleScopeType.runtime,
934 [oe.sbom30.get_element_link_id(dep) for dep in runtime_spdx_deps],
935 )
936
937 oe.sbom30.write_recipe_jsonld_doc(d, pkg_objset, "packages", deploydir)
938
939 oe.sbom30.write_recipe_jsonld_doc(d, common_objset, "common-package", deploydir)
940} 166}
941
942do_create_package_spdx[vardepsexclude] += "OVERRIDES SSTATE_ARCHS" 167do_create_package_spdx[vardepsexclude] += "OVERRIDES SSTATE_ARCHS"
943 168
944addtask do_create_package_spdx after do_create_spdx before do_build do_rm_work 169addtask do_create_package_spdx after do_create_spdx before do_build do_rm_work
@@ -955,91 +180,10 @@ do_create_package_spdx[dirs] = "${SPDXRUNTIMEDEPLOY}"
955do_create_package_spdx[cleandirs] = "${SPDXRUNTIMEDEPLOY}" 180do_create_package_spdx[cleandirs] = "${SPDXRUNTIMEDEPLOY}"
956do_create_package_spdx[rdeptask] = "do_create_spdx" 181do_create_package_spdx[rdeptask] = "do_create_spdx"
957 182
958
959
960python spdx30_build_started_handler () { 183python spdx30_build_started_handler () {
961 import oe.spdx30 184 import oe.spdx30_tasks
962 import oe.sbom30
963 import oe.spdx_common
964 import os
965 from pathlib import Path
966 from datetime import datetime, timezone
967
968 # Create a copy of the datastore. Set PN to "bitbake" so that SPDX IDs can
969 # be generated
970 d = e.data.createCopy() 185 d = e.data.createCopy()
971 d.setVar("PN", "bitbake") 186 oe.spdx30_tasks.write_bitbake_spdx(d)
972 d.setVar("BB_TASKHASH", "bitbake")
973 oe.spdx_common.load_spdx_license_data(d)
974
975 deploy_dir_spdx = Path(e.data.getVar("DEPLOY_DIR_SPDX"))
976
977 objset = oe.sbom30.ObjectSet.new_objset(d, "bitbake", False)
978
979 host_import_key = d.getVar("SPDX_BUILD_HOST")
980 invoked_by = objset.new_agent("SPDX_INVOKED_BY", add=False)
981 on_behalf_of = objset.new_agent("SPDX_ON_BEHALF_OF", add=False)
982
983 if d.getVar("SPDX_INCLUDE_BITBAKE_PARENT_BUILD") == "1":
984 # Since the Build objects are unique, we may as well set the creation
985 # time to the current time instead of the fallback SDE
986 objset.doc.creationInfo.created = datetime.now(timezone.utc)
987
988 # Each invocation of bitbake should have a unique ID since it is a
989 # unique build
990 nonce = os.urandom(16).hex()
991
992 build = objset.add_root(oe.spdx30.build_Build(
993 _id=objset.new_spdxid(nonce, include_unihash=False),
994 creationInfo=objset.doc.creationInfo,
995 build_buildType=oe.sbom30.SPDX_BUILD_TYPE,
996 ))
997 set_timestamp_now(d, build, "build_buildStartTime")
998
999 if host_import_key:
1000 objset.new_scoped_relationship(
1001 [build],
1002 oe.spdx30.RelationshipType.hasHost,
1003 oe.spdx30.LifecycleScopeType.build,
1004 [objset.new_import("SPDX_BUILD_HOST")],
1005 )
1006
1007 if invoked_by:
1008 objset.add(invoked_by)
1009 invoked_by_spdx = objset.new_scoped_relationship(
1010 [build],
1011 oe.spdx30.RelationshipType.invokedBy,
1012 oe.spdx30.LifecycleScopeType.build,
1013 [invoked_by],
1014 )
1015
1016 if on_behalf_of:
1017 objset.add(on_behalf_of)
1018 objset.new_scoped_relationship(
1019 [on_behalf_of],
1020 oe.spdx30.RelationshipType.delegatedTo,
1021 oe.spdx30.LifecycleScopeType.build,
1022 invoked_by_spdx,
1023 )
1024
1025 elif on_behalf_of:
1026 bb.warn("SPDX_ON_BEHALF_OF has no effect if SPDX_INVOKED_BY is not set")
1027
1028 else:
1029 if host_import_key:
1030 bb.warn("SPDX_BUILD_HOST has no effect if SPDX_INCLUDE_BITBAKE_PARENT_BUILD is not set")
1031
1032 if invoked_by:
1033 bb.warn("SPDX_INVOKED_BY has no effect if SPDX_INCLUDE_BITBAKE_PARENT_BUILD is not set")
1034
1035 if on_behalf_of:
1036 bb.warn("SPDX_ON_BEHALF_OF has no effect if SPDX_INCLUDE_BITBAKE_PARENT_BUILD is not set")
1037
1038 for obj in objset.foreach_type(oe.spdx30.Element):
1039 obj.extension.append(oe.sbom30.OELinkExtension(link_spdx_id=False))
1040 obj.extension.append(oe.sbom30.OEIdAliasExtension())
1041
1042 oe.sbom30.write_jsonld_doc(d, objset, deploy_dir_spdx / "bitbake.spdx.json")
1043} 187}
1044 188
1045addhandler spdx30_build_started_handler 189addhandler spdx30_build_started_handler