@@ -401,6 +401,8 @@ python do_create_spdx() {
from contextlib import contextmanager
import oe.cve_check
+ oe.spdx_common.load_spdx_license_data(d)
+
@contextmanager
def optional_tarfile(name, guard, mode="w"):
import tarfile
@@ -39,12 +39,6 @@ SPDX_CUSTOM_ANNOTATION_VARS ??= ""
SPDX_MULTILIB_SSTATE_ARCHS ??= "${SSTATE_ARCHS}"
-python() {
- import oe.spdx_common
- oe.spdx_common.load_spdx_license_data(d)
-}
-
-
python do_collect_spdx_deps() {
# This task calculates the build time dependencies of the recipe, and is
# required because while a task can deptask on itself, those dependencies
@@ -452,6 +452,9 @@ def create_spdx(d):
if val:
setattr(obj, name, val)
+ import oe.spdx_common
+ oe.spdx_common.load_spdx_license_data(d)
+
deploydir = Path(d.getVar("SPDXDEPLOY"))
deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
spdx_workdir = Path(d.getVar("SPDXWORK"))
Loading a load of json files into a memory structure and stashing in a bitbake variable is relatively anti-social making bitbake -e output hard to read for example as well as other potential performance issues. Defer loading of that data until it is actually needed/used in a funciton. We probably should skip putting it into the datastore at all in future. Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org> --- meta/classes/create-spdx-2.2.bbclass | 2 ++ meta/classes/spdx-common.bbclass | 6 ------ meta/lib/oe/spdx30_tasks.py | 3 +++ 3 files changed, 5 insertions(+), 6 deletions(-)