diff mbox series

[meta-darwin,02/11] use classes from poky directly

Message ID 20240610143214.2376414-2-ecordonnier@snap.com
State New
Headers show
Series [meta-darwin,01/11] layer.conf/README: update to scarthgap | expand

Commit Message

Etienne Cordonnier June 10, 2024, 2:32 p.m. UTC
From: Etienne Cordonnier <ecordonnier@snap.com>

The changes which were needed for Darwin support have been upstreamed
to poky in version scarthgap, so there is no need for those files any more.

poky commits:
https://git.yoctoproject.org/poky/commit/?id=02dbf1c7d26b24e6a9f25171522352cb2ff66d5c
https://git.yoctoproject.org/poky/commit/?id=90ea98a24c3df4f36f51b6411ba8963af98a0e19
https://git.yoctoproject.org/poky/commit/?id=3db106c41b22f9152b7253a1310467589eacf9d3
https://git.yoctoproject.org/poky/commit/?id=5031cf42ffb221b52e527d8afc54138c1fb161a1

Signed-off-by: Etienne Cordonnier <ecordonnier@snap.com>
---
 classes/chrpath.bbclass  |  134 --
 classes/cmake.bbclass    |  219 ----
 classes/package.bbclass  | 2552 --------------------------------------
 classes/siteinfo.bbclass |  226 ----
 4 files changed, 3131 deletions(-)
 delete mode 100644 classes/chrpath.bbclass
 delete mode 100644 classes/cmake.bbclass
 delete mode 100644 classes/package.bbclass
 delete mode 100644 classes/siteinfo.bbclass
diff mbox series

Patch

diff --git a/classes/chrpath.bbclass b/classes/chrpath.bbclass
deleted file mode 100644
index 873a5b5..0000000
--- a/classes/chrpath.bbclass
+++ /dev/null
@@ -1,134 +0,0 @@ 
-CHRPATH_BIN ?= "chrpath"
-PREPROCESS_RELOCATE_DIRS ?= ""
-
-def process_file_linux(cmd, fpath, rootdir, baseprefix, tmpdir, d, break_hardlinks = False):
-    import subprocess, oe.qa
-
-    with oe.qa.ELFFile(fpath) as elf:
-        try:
-            elf.open()
-        except oe.qa.NotELFFileError:
-            return
-
-    try:
-        out = subprocess.check_output([cmd, "-l", fpath], universal_newlines=True)
-    except subprocess.CalledProcessError:
-        return
-
-    # Handle RUNPATH as well as RPATH
-    out = out.replace("RUNPATH=","RPATH=")
-    # Throw away everything other than the rpath list
-    curr_rpath = out.partition("RPATH=")[2]
-    #bb.note("Current rpath for %s is %s" % (fpath, curr_rpath.strip()))
-    rpaths = curr_rpath.strip().split(":")
-    new_rpaths = []
-    modified = False
-    for rpath in rpaths:
-        # If rpath is already dynamic copy it to new_rpath and continue
-        if rpath.find("$ORIGIN") != -1:
-            new_rpaths.append(rpath)
-            continue
-        rpath =  os.path.normpath(rpath)
-        if baseprefix not in rpath and tmpdir not in rpath:
-            # Skip standard search paths
-            if rpath in ['/lib', '/usr/lib', '/lib64/', '/usr/lib64']:
-                bb.warn("Skipping RPATH %s as is a standard search path for %s" % (rpath, fpath))
-                modified = True
-                continue
-            new_rpaths.append(rpath)
-            continue
-        new_rpaths.append("$ORIGIN/" + os.path.relpath(rpath, os.path.dirname(fpath.replace(rootdir, "/"))))
-        modified = True
-
-    # if we have modified some rpaths call chrpath to update the binary
-    if modified:
-        if break_hardlinks:
-            bb.utils.break_hardlinks(fpath)
-
-        args = ":".join(new_rpaths)
-        #bb.note("Setting rpath for %s to %s" %(fpath, args))
-        try:
-            subprocess.check_output([cmd, "-r", args, fpath],
-            stderr=subprocess.PIPE, universal_newlines=True)
-        except subprocess.CalledProcessError as e:
-            bb.fatal("chrpath command failed with exit code %d:\n%s\n%s" % (e.returncode, e.stdout, e.stderr))
-
-def process_file_darwin(cmd, fpath, rootdir, baseprefix, tmpdir, d, break_hardlinks = False):
-    import subprocess as sub
-
-    p = sub.Popen([d.expand("${HOST_PREFIX}otool"), '-L', fpath],stdout=sub.PIPE,stderr=sub.PIPE, universal_newlines=True)
-    out, err = p.communicate()
-    # If returned successfully, process stdout for results
-    if p.returncode != 0:
-        return
-    for l in out.split("\n"):
-        if "(compatibility" not in l:
-            continue
-        rpath = l.partition("(compatibility")[0].strip()
-        if baseprefix not in rpath:
-            continue
-
-        if break_hardlinks:
-            bb.utils.break_hardlinks(fpath)
-
-        newpath = "@loader_path/" + os.path.relpath(rpath, os.path.dirname(fpath.replace(rootdir, "/")))
-        p = sub.Popen([d.expand("${HOST_PREFIX}install_name_tool"), '-change', rpath, newpath, fpath],stdout=sub.PIPE,stderr=sub.PIPE)
-        out, err = p.communicate()
-
-def process_dir(rootdir, directory, d, break_hardlinks = False):
-    bb.debug(2, "Checking %s for binaries to process" % directory)
-    if not os.path.exists(directory):
-        return
-
-    import stat
-
-    rootdir = os.path.normpath(rootdir)
-    cmd = d.expand('${CHRPATH_BIN}')
-    tmpdir = os.path.normpath(d.getVar('TMPDIR', False))
-    baseprefix = os.path.normpath(d.expand('${base_prefix}'))
-    hostos = d.getVar("HOST_OS")
-
-    if "linux" in hostos:
-        process_file = process_file_linux
-    elif "darwin" in hostos:
-        process_file = process_file_darwin
-    else:
-        # Relocations not supported
-        return
-
-    dirs = os.listdir(directory)
-    for file in dirs:
-        fpath = directory + "/" + file
-        fpath = os.path.normpath(fpath)
-        if os.path.islink(fpath):
-            # Skip symlinks
-            continue
-
-        if os.path.isdir(fpath):
-            process_dir(rootdir, fpath, d, break_hardlinks = break_hardlinks)
-        else:
-            #bb.note("Testing %s for relocatability" % fpath)
-
-            # We need read and write permissions for chrpath, if we don't have
-            # them then set them temporarily. Take a copy of the files
-            # permissions so that we can restore them afterwards.
-            perms = os.stat(fpath)[stat.ST_MODE]
-            if os.access(fpath, os.W_OK|os.R_OK):
-                perms = None
-            else:
-                # Temporarily make the file writeable so we can chrpath it
-                os.chmod(fpath, perms|stat.S_IRWXU)
-
-            process_file(cmd, fpath, rootdir, baseprefix, tmpdir, d, break_hardlinks = break_hardlinks)
-
-            if perms:
-                os.chmod(fpath, perms)
-
-def rpath_replace (path, d):
-    bindirs = d.expand("${bindir} ${sbindir} ${base_sbindir} ${base_bindir} ${libdir} ${base_libdir} ${libexecdir} ${PREPROCESS_RELOCATE_DIRS}").split()
-
-    for bindir in bindirs:
-        #bb.note ("Processing directory " + bindir)
-        directory = path + "/" + bindir
-        process_dir (path, directory, d)
-
diff --git a/classes/cmake.bbclass b/classes/cmake.bbclass
deleted file mode 100644
index 735d68d..0000000
--- a/classes/cmake.bbclass
+++ /dev/null
@@ -1,219 +0,0 @@ 
-# Path to the CMake file to process.
-OECMAKE_SOURCEPATH ??= "${S}"
-
-DEPENDS:prepend = "cmake-native "
-B = "${WORKDIR}/build"
-
-# What CMake generator to use.
-# The supported options are "Unix Makefiles" or "Ninja".
-OECMAKE_GENERATOR ?= "Ninja"
-
-python() {
-    generator = d.getVar("OECMAKE_GENERATOR")
-    if "Unix Makefiles" in generator:
-        args = "-G '" + generator +  "' -DCMAKE_MAKE_PROGRAM=" + d.getVar("MAKE")
-        d.setVar("OECMAKE_GENERATOR_ARGS", args)
-        d.setVarFlag("do_compile", "progress", "percent")
-    elif "Ninja" in generator:
-        args = "-G '" + generator + "' -DCMAKE_MAKE_PROGRAM=ninja"
-        d.appendVar("DEPENDS", " ninja-native")
-        d.setVar("OECMAKE_GENERATOR_ARGS", args)
-        d.setVarFlag("do_compile", "progress", r"outof:^\[(\d+)/(\d+)\]\s+")
-    else:
-        bb.fatal("Unknown CMake Generator %s" % generator)
-}
-OECMAKE_AR ?= "${AR}"
-
-# Compiler flags
-OECMAKE_C_FLAGS ?= "${HOST_CC_ARCH} ${TOOLCHAIN_OPTIONS} ${CFLAGS}"
-OECMAKE_CXX_FLAGS ?= "${HOST_CC_ARCH} ${TOOLCHAIN_OPTIONS} ${CXXFLAGS}"
-OECMAKE_C_FLAGS_RELEASE ?= "-DNDEBUG"
-OECMAKE_CXX_FLAGS_RELEASE ?= "-DNDEBUG"
-OECMAKE_C_LINK_FLAGS ?= "${HOST_CC_ARCH} ${TOOLCHAIN_OPTIONS} ${CPPFLAGS} ${LDFLAGS}"
-OECMAKE_CXX_LINK_FLAGS ?= "${HOST_CC_ARCH} ${TOOLCHAIN_OPTIONS} ${CXXFLAGS} ${LDFLAGS}"
-
-def oecmake_map_compiler(compiler, d):
-    args = d.getVar(compiler).split()
-    if args[0] == "ccache":
-        return args[1], args[0]
-    return args[0], ""
-
-# C/C++ Compiler (without cpu arch/tune arguments)
-OECMAKE_C_COMPILER ?= "${@oecmake_map_compiler('CC', d)[0]}"
-OECMAKE_C_COMPILER_LAUNCHER ?= "${@oecmake_map_compiler('CC', d)[1]}"
-OECMAKE_CXX_COMPILER ?= "${@oecmake_map_compiler('CXX', d)[0]}"
-OECMAKE_CXX_COMPILER_LAUNCHER ?= "${@oecmake_map_compiler('CXX', d)[1]}"
-
-# clear compiler vars for allarch to avoid sig hash difference
-OECMAKE_C_COMPILER_allarch = ""
-OECMAKE_C_COMPILER_LAUNCHER_allarch = ""
-OECMAKE_CXX_COMPILER_allarch = ""
-OECMAKE_CXX_COMPILER_LAUNCHER_allarch = ""
-
-OECMAKE_RPATH ?= ""
-OECMAKE_PERLNATIVE_DIR ??= ""
-OECMAKE_EXTRA_ROOT_PATH ?= ""
-
-OECMAKE_FIND_ROOT_PATH_MODE_PROGRAM = "ONLY"
-OECMAKE_FIND_ROOT_PATH_MODE_PROGRAM:class-native = "BOTH"
-
-EXTRA_OECMAKE:append = " ${PACKAGECONFIG_CONFARGS}"
-
-export CMAKE_BUILD_PARALLEL_LEVEL
-CMAKE_BUILD_PARALLEL_LEVEL:task-compile = "${@oe.utils.parallel_make(d, False)}"
-CMAKE_BUILD_PARALLEL_LEVEL:task-install = "${@oe.utils.parallel_make(d, True)}"
-
-OECMAKE_TARGET_COMPILE ?= "all"
-OECMAKE_TARGET_INSTALL ?= "install"
-
-def map_host_os_to_system_name(host_os):
-    if host_os.startswith('darwin'):
-        return 'Darwin'
-    if host_os.startswith('mingw'):
-        return 'Windows'
-    if host_os.startswith('linux'):
-        return 'Linux'
-    return host_os
-
-# CMake expects target architectures in the format of uname(2),
-# which do not always match TARGET_ARCH, so all the necessary
-# conversions should happen here.
-def map_host_arch_to_uname_arch(host_arch):
-    if host_arch == "powerpc":
-        return "ppc"
-    if host_arch == "powerpc64le":
-        return "ppc64le"
-    if host_arch == "powerpc64":
-        return "ppc64"
-    return host_arch
-
-cmake_do_generate_toolchain_file() {
-	if [ "${BUILD_SYS}" = "${HOST_SYS}" ]; then
-		cmake_crosscompiling="set( CMAKE_CROSSCOMPILING FALSE )"
-	fi
-	cat > ${WORKDIR}/toolchain.cmake <<EOF
-# CMake system name must be something like "Linux".
-# This is important for cross-compiling.
-$cmake_crosscompiling
-set( CMAKE_SYSTEM_NAME ${@map_host_os_to_system_name(d.getVar('HOST_OS'))} )
-set( CMAKE_SYSTEM_PROCESSOR ${@map_host_arch_to_uname_arch(d.getVar('HOST_ARCH'))} )
-set( CMAKE_C_COMPILER ${OECMAKE_C_COMPILER} )
-set( CMAKE_CXX_COMPILER ${OECMAKE_CXX_COMPILER} )
-set( CMAKE_C_COMPILER_LAUNCHER ${OECMAKE_C_COMPILER_LAUNCHER} )
-set( CMAKE_CXX_COMPILER_LAUNCHER ${OECMAKE_CXX_COMPILER_LAUNCHER} )
-set( CMAKE_ASM_COMPILER ${OECMAKE_C_COMPILER} )
-find_program( CMAKE_AR ${OECMAKE_AR} DOC "Archiver" REQUIRED )
-
-set( CMAKE_C_FLAGS "${OECMAKE_C_FLAGS}" CACHE STRING "CFLAGS" )
-set( CMAKE_CXX_FLAGS "${OECMAKE_CXX_FLAGS}" CACHE STRING "CXXFLAGS" )
-set( CMAKE_ASM_FLAGS "${OECMAKE_C_FLAGS}" CACHE STRING "ASM FLAGS" )
-set( CMAKE_C_FLAGS_RELEASE "${OECMAKE_C_FLAGS_RELEASE}" CACHE STRING "Additional CFLAGS for release" )
-set( CMAKE_CXX_FLAGS_RELEASE "${OECMAKE_CXX_FLAGS_RELEASE}" CACHE STRING "Additional CXXFLAGS for release" )
-set( CMAKE_ASM_FLAGS_RELEASE "${OECMAKE_C_FLAGS_RELEASE}" CACHE STRING "Additional ASM FLAGS for release" )
-set( CMAKE_C_LINK_FLAGS "${OECMAKE_C_LINK_FLAGS}" CACHE STRING "LDFLAGS" )
-set( CMAKE_CXX_LINK_FLAGS "${OECMAKE_CXX_LINK_FLAGS}" CACHE STRING "LDFLAGS" )
-
-# only search in the paths provided so cmake doesnt pick
-# up libraries and tools from the native build machine
-set( CMAKE_FIND_ROOT_PATH ${STAGING_DIR_HOST} ${STAGING_DIR_NATIVE} ${CROSS_DIR} ${OECMAKE_PERLNATIVE_DIR} ${OECMAKE_EXTRA_ROOT_PATH} ${EXTERNAL_TOOLCHAIN} ${HOSTTOOLS_DIR})
-set( CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY )
-set( CMAKE_FIND_ROOT_PATH_MODE_PROGRAM ${OECMAKE_FIND_ROOT_PATH_MODE_PROGRAM} )
-set( CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY )
-set( CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY )
-set( CMAKE_PROGRAM_PATH "/" )
-
-# Use qt.conf settings
-set( ENV{QT_CONF_PATH} ${WORKDIR}/qt.conf )
-
-# We need to set the rpath to the correct directory as cmake does not provide any
-# directory as rpath by default
-set( CMAKE_INSTALL_RPATH ${OECMAKE_RPATH} )
-
-# Use RPATHs relative to build directory for reproducibility
-set( CMAKE_BUILD_RPATH_USE_ORIGIN ON )
-
-# Use our cmake modules
-list(APPEND CMAKE_MODULE_PATH "${STAGING_DATADIR}/cmake/Modules/")
-
-# add for non /usr/lib libdir, e.g. /usr/lib64
-set( CMAKE_LIBRARY_PATH ${libdir} ${base_libdir})
-
-# add include dir to implicit includes in case it differs from /usr/include
-list(APPEND CMAKE_C_IMPLICIT_INCLUDE_DIRECTORIES ${includedir})
-list(APPEND CMAKE_CXX_IMPLICIT_INCLUDE_DIRECTORIES ${includedir})
-
-EOF
-}
-
-addtask generate_toolchain_file after do_patch before do_configure
-
-CONFIGURE_FILES = "CMakeLists.txt"
-
-do_configure[cleandirs] = "${@d.getVar('B') if d.getVar('S') != d.getVar('B') else ''}"
-
-cmake_do_configure() {
-	if [ "${OECMAKE_BUILDPATH}" ]; then
-		bbnote "cmake.bbclass no longer uses OECMAKE_BUILDPATH.  The default behaviour is now out-of-tree builds with B=WORKDIR/build."
-	fi
-
-	if [ "${S}" = "${B}" ]; then
-		find ${B} -name CMakeFiles -or -name Makefile -or -name cmake_install.cmake -or -name CMakeCache.txt -delete
-	fi
-
-	# Just like autotools cmake can use a site file to cache result that need generated binaries to run
-	if [ -e ${WORKDIR}/site-file.cmake ] ; then
-		oecmake_sitefile="-C ${WORKDIR}/site-file.cmake"
-	else
-		oecmake_sitefile=
-	fi
-
-	cmake \
-	  ${OECMAKE_GENERATOR_ARGS} \
-	  $oecmake_sitefile \
-	  ${OECMAKE_SOURCEPATH} \
-	  -DCMAKE_INSTALL_PREFIX:PATH=${prefix} \
-	  -DCMAKE_INSTALL_BINDIR:PATH=${@os.path.relpath(d.getVar('bindir'), d.getVar('prefix') + '/')} \
-	  -DCMAKE_INSTALL_SBINDIR:PATH=${@os.path.relpath(d.getVar('sbindir'), d.getVar('prefix') + '/')} \
-	  -DCMAKE_INSTALL_LIBEXECDIR:PATH=${@os.path.relpath(d.getVar('libexecdir'), d.getVar('prefix') + '/')} \
-	  -DCMAKE_INSTALL_SYSCONFDIR:PATH=${sysconfdir} \
-	  -DCMAKE_INSTALL_SHAREDSTATEDIR:PATH=${@os.path.relpath(d.getVar('sharedstatedir'), d.  getVar('prefix') + '/')} \
-	  -DCMAKE_INSTALL_LOCALSTATEDIR:PATH=${localstatedir} \
-	  -DCMAKE_INSTALL_LIBDIR:PATH=${@os.path.relpath(d.getVar('libdir'), d.getVar('prefix') + '/')} \
-	  -DCMAKE_INSTALL_INCLUDEDIR:PATH=${@os.path.relpath(d.getVar('includedir'), d.getVar('prefix') + '/')} \
-	  -DCMAKE_INSTALL_DATAROOTDIR:PATH=${@os.path.relpath(d.getVar('datadir'), d.getVar('prefix') + '/')} \
-	  -DPYTHON_EXECUTABLE:PATH=${PYTHON} \
-	  -DPython_EXECUTABLE:PATH=${PYTHON} \
-	  -DPython3_EXECUTABLE:PATH=${PYTHON} \
-	  -DLIB_SUFFIX=${@d.getVar('baselib').replace('lib', '')} \
-	  -DCMAKE_INSTALL_SO_NO_EXE=0 \
-	  -DCMAKE_TOOLCHAIN_FILE=${WORKDIR}/toolchain.cmake \
-	  -DCMAKE_NO_SYSTEM_FROM_IMPORTED=1 \
-	  -DCMAKE_EXPORT_NO_PACKAGE_REGISTRY=ON \
-	  -DFETCHCONTENT_FULLY_DISCONNECTED=ON \
-	  ${EXTRA_OECMAKE} \
-	  -Wno-dev
-}
-
-# To disable verbose cmake logs for a given recipe or globally config metadata e.g. local.conf
-# add following
-#
-# CMAKE_VERBOSE = ""
-#
-
-CMAKE_VERBOSE ??= "VERBOSE=1"
-
-# Then run do_compile again
-cmake_runcmake_build() {
-	bbnote ${DESTDIR:+DESTDIR=${DESTDIR} }${CMAKE_VERBOSE} cmake --build '${B}' "$@" -- ${EXTRA_OECMAKE_BUILD}
-	eval ${DESTDIR:+DESTDIR=${DESTDIR} }${CMAKE_VERBOSE} cmake --build '${B}' "$@" -- ${EXTRA_OECMAKE_BUILD}
-}
-
-cmake_do_compile()  {
-	cmake_runcmake_build --target ${OECMAKE_TARGET_COMPILE}
-}
-
-cmake_do_install() {
-	DESTDIR='${D}' cmake_runcmake_build --target ${OECMAKE_TARGET_INSTALL}
-}
-
-EXPORT_FUNCTIONS do_configure do_compile do_install do_generate_toolchain_file
diff --git a/classes/package.bbclass b/classes/package.bbclass
deleted file mode 100644
index 8f5825e..0000000
--- a/classes/package.bbclass
+++ /dev/null
@@ -1,2552 +0,0 @@ 
-#
-# Packaging process
-#
-# Executive summary: This class iterates over the functions listed in PACKAGEFUNCS
-# Taking D and splitting it up into the packages listed in PACKAGES, placing the
-# resulting output in PKGDEST.
-#
-# There are the following default steps but PACKAGEFUNCS can be extended:
-#
-# a) package_convert_pr_autoinc - convert AUTOINC in PKGV to ${PRSERV_PV_AUTOINC}
-#
-# b) perform_packagecopy - Copy D into PKGD
-#
-# c) package_do_split_locales - Split out the locale files, updates FILES and PACKAGES
-#
-# d) split_and_strip_files - split the files into runtime and debug and strip them.
-#    Debug files include debug info split, and associated sources that end up in -dbg packages
-#
-# e) fixup_perms - Fix up permissions in the package before we split it.
-#
-# f) populate_packages - Split the files in PKGD into separate packages in PKGDEST/<pkgname>
-#    Also triggers the binary stripping code to put files in -dbg packages.
-#
-# g) package_do_filedeps - Collect perfile run-time dependency metadata
-#    The data is stores in FILER{PROVIDES,DEPENDS}_file_pkg variables with
-#    a list of affected files in FILER{PROVIDES,DEPENDS}FLIST_pkg
-#
-# h) package_do_shlibs - Look at the shared libraries generated and autotmatically add any
-#    dependencies found. Also stores the package name so anyone else using this library
-#    knows which package to depend on.
-#
-# i) package_do_pkgconfig - Keep track of which packages need and provide which .pc files
-#
-# j) read_shlibdeps - Reads the stored shlibs information into the metadata
-#
-# k) package_depchains - Adds automatic dependencies to -dbg and -dev packages
-#
-# l) emit_pkgdata - saves the packaging data into PKGDATA_DIR for use in later
-#    packaging steps
-
-inherit packagedata
-inherit chrpath
-inherit package_pkgdata
-inherit insane
-
-PKGD    = "${WORKDIR}/package"
-PKGDEST = "${WORKDIR}/packages-split"
-
-LOCALE_SECTION ?= ''
-
-ALL_MULTILIB_PACKAGE_ARCHS = "${@all_multilib_tune_values(d, 'PACKAGE_ARCHS')}"
-
-# rpm is used for the per-file dependency identification
-# dwarfsrcfiles is used to determine the list of debug source files
-PACKAGE_DEPENDS += "rpm-native dwarfsrcfiles-native"
-
-
-# If your postinstall can execute at rootfs creation time rather than on
-# target but depends on a native/cross tool in order to execute, you need to
-# list that tool in PACKAGE_WRITE_DEPS. Target package dependencies belong
-# in the package dependencies as normal, this is just for native/cross support
-# tools at rootfs build time.
-PACKAGE_WRITE_DEPS ??= ""
-
-def legitimize_package_name(s):
-    """
-    Make sure package names are legitimate strings
-    """
-    import re
-
-    def fixutf(m):
-        cp = m.group(1)
-        if cp:
-            return ('\\u%s' % cp).encode('latin-1').decode('unicode_escape')
-
-    # Handle unicode codepoints encoded as <U0123>, as in glibc locale files.
-    s = re.sub(r'<U([0-9A-Fa-f]{1,4})>', fixutf, s)
-
-    # Remaining package name validity fixes
-    return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-')
-
-def do_split_packages(d, root, file_regex, output_pattern, description, postinst=None, recursive=False, hook=None, extra_depends=None, aux_files_pattern=None, postrm=None, allow_dirs=False, prepend=False, match_path=False, aux_files_pattern_verbatim=None, allow_links=False, summary=None):
-    """
-    Used in .bb files to split up dynamically generated subpackages of a
-    given package, usually plugins or modules.
-
-    Arguments:
-    root           -- the path in which to search
-    file_regex     -- regular expression to match searched files. Use
-                      parentheses () to mark the part of this expression
-                      that should be used to derive the module name (to be
-                      substituted where %s is used in other function
-                      arguments as noted below)
-    output_pattern -- pattern to use for the package names. Must include %s.
-    description    -- description to set for each package. Must include %s.
-    postinst       -- postinstall script to use for all packages (as a
-                      string)
-    recursive      -- True to perform a recursive search - default False
-    hook           -- a hook function to be called for every match. The
-                      function will be called with the following arguments
-                      (in the order listed):
-                        f: full path to the file/directory match
-                        pkg: the package name
-                        file_regex: as above
-                        output_pattern: as above
-                        modulename: the module name derived using file_regex
-    extra_depends  -- extra runtime dependencies (RDEPENDS) to be set for
-                      all packages. The default value of None causes a
-                      dependency on the main package (${PN}) - if you do
-                      not want this, pass '' for this parameter.
-    aux_files_pattern -- extra item(s) to be added to FILES for each
-                      package. Can be a single string item or a list of
-                      strings for multiple items.  Must include %s.
-    postrm         -- postrm script to use for all packages (as a string)
-    allow_dirs     -- True allow directories to be matched - default False
-    prepend        -- if True, prepend created packages to PACKAGES instead
-                      of the default False which appends them
-    match_path     -- match file_regex on the whole relative path to the
-                      root rather than just the file name
-    aux_files_pattern_verbatim -- extra item(s) to be added to FILES for
-                      each package, using the actual derived module name
-                      rather than converting it to something legal for a
-                      package name. Can be a single string item or a list
-                      of strings for multiple items. Must include %s.
-    allow_links    -- True to allow symlinks to be matched - default False
-    summary        -- Summary to set for each package. Must include %s;
-                      defaults to description if not set.
-
-    """
-
-    dvar = d.getVar('PKGD')
-    root = d.expand(root)
-    output_pattern = d.expand(output_pattern)
-    extra_depends = d.expand(extra_depends)
-
-    # If the root directory doesn't exist, don't error out later but silently do
-    # no splitting.
-    if not os.path.exists(dvar + root):
-        return []
-
-    ml = d.getVar("MLPREFIX")
-    if ml:
-        if not output_pattern.startswith(ml):
-            output_pattern = ml + output_pattern
-
-        newdeps = []
-        for dep in (extra_depends or "").split():
-            if dep.startswith(ml):
-                newdeps.append(dep)
-            else:
-                newdeps.append(ml + dep)
-        if newdeps:
-            extra_depends = " ".join(newdeps)
-
-
-    packages = d.getVar('PACKAGES').split()
-    split_packages = set()
-
-    if postinst:
-        postinst = '#!/bin/sh\n' + postinst + '\n'
-    if postrm:
-        postrm = '#!/bin/sh\n' + postrm + '\n'
-    if not recursive:
-        objs = os.listdir(dvar + root)
-    else:
-        objs = []
-        for walkroot, dirs, files in os.walk(dvar + root):
-            for file in files:
-                relpath = os.path.join(walkroot, file).replace(dvar + root + '/', '', 1)
-                if relpath:
-                    objs.append(relpath)
-
-    if extra_depends == None:
-        extra_depends = d.getVar("PN")
-
-    if not summary:
-        summary = description
-
-    for o in sorted(objs):
-        import re, stat
-        if match_path:
-            m = re.match(file_regex, o)
-        else:
-            m = re.match(file_regex, os.path.basename(o))
-
-        if not m:
-            continue
-        f = os.path.join(dvar + root, o)
-        mode = os.lstat(f).st_mode
-        if not (stat.S_ISREG(mode) or (allow_links and stat.S_ISLNK(mode)) or (allow_dirs and stat.S_ISDIR(mode))):
-            continue
-        on = legitimize_package_name(m.group(1))
-        pkg = output_pattern % on
-        split_packages.add(pkg)
-        if not pkg in packages:
-            if prepend:
-                packages = [pkg] + packages
-            else:
-                packages.append(pkg)
-        oldfiles = d.getVar('FILES:' + pkg)
-        newfile = os.path.join(root, o)
-        # These names will be passed through glob() so if the filename actually
-        # contains * or ? (rare, but possible) we need to handle that specially
-        newfile = newfile.replace('*', '[*]')
-        newfile = newfile.replace('?', '[?]')
-        if not oldfiles:
-            the_files = [newfile]
-            if aux_files_pattern:
-                if type(aux_files_pattern) is list:
-                    for fp in aux_files_pattern:
-                        the_files.append(fp % on)
-                else:
-                    the_files.append(aux_files_pattern % on)
-            if aux_files_pattern_verbatim:
-                if type(aux_files_pattern_verbatim) is list:
-                    for fp in aux_files_pattern_verbatim:
-                        the_files.append(fp % m.group(1))
-                else:
-                    the_files.append(aux_files_pattern_verbatim % m.group(1))
-            d.setVar('FILES:' + pkg, " ".join(the_files))
-        else:
-            d.setVar('FILES:' + pkg, oldfiles + " " + newfile)
-        if extra_depends != '':
-            d.appendVar('RDEPENDS:' + pkg, ' ' + extra_depends)
-        if not d.getVar('DESCRIPTION:' + pkg):
-            d.setVar('DESCRIPTION:' + pkg, description % on)
-        if not d.getVar('SUMMARY:' + pkg):
-            d.setVar('SUMMARY:' + pkg, summary % on)
-        if postinst:
-            d.setVar('pkg_postinst:' + pkg, postinst)
-        if postrm:
-            d.setVar('pkg_postrm:' + pkg, postrm)
-        if callable(hook):
-            hook(f, pkg, file_regex, output_pattern, m.group(1))
-
-    d.setVar('PACKAGES', ' '.join(packages))
-    return list(split_packages)
-
-PACKAGE_DEPENDS += "file-native"
-
-python () {
-    if d.getVar('PACKAGES') != '':
-        deps = ""
-        for dep in (d.getVar('PACKAGE_DEPENDS') or "").split():
-            deps += " %s:do_populate_sysroot" % dep
-        if d.getVar('PACKAGE_MINIDEBUGINFO') == '1':
-            deps += ' xz-native:do_populate_sysroot'
-        d.appendVarFlag('do_package', 'depends', deps)
-
-        # shlibs requires any DEPENDS to have already packaged for the *.list files
-        d.appendVarFlag('do_package', 'deptask', " do_packagedata")
-}
-
-# Get a list of files from file vars by searching files under current working directory
-# The list contains symlinks, directories and normal files.
-def files_from_filevars(filevars):
-    import os,glob
-    cpath = oe.cachedpath.CachedPath()
-    files = []
-    for f in filevars:
-        if os.path.isabs(f):
-            f = '.' + f
-        if not f.startswith("./"):
-            f = './' + f
-        globbed = glob.glob(f)
-        if globbed:
-            if [ f ] != globbed:
-                files += globbed
-                continue
-        files.append(f)
-
-    symlink_paths = []
-    for ind, f in enumerate(files):
-        # Handle directory symlinks. Truncate path to the lowest level symlink
-        parent = ''
-        for dirname in f.split('/')[:-1]:
-            parent = os.path.join(parent, dirname)
-            if dirname == '.':
-                continue
-            if cpath.islink(parent):
-                bb.warn("FILES contains file '%s' which resides under a "
-                        "directory symlink. Please fix the recipe and use the "
-                        "real path for the file." % f[1:])
-                symlink_paths.append(f)
-                files[ind] = parent
-                f = parent
-                break
-
-        if not cpath.islink(f):
-            if cpath.isdir(f):
-                newfiles = [ os.path.join(f,x) for x in os.listdir(f) ]
-                if newfiles:
-                    files += newfiles
-
-    return files, symlink_paths
-
-# Called in package_<rpm,ipk,deb>.bbclass to get the correct list of configuration files
-def get_conffiles(pkg, d):
-    pkgdest = d.getVar('PKGDEST')
-    root = os.path.join(pkgdest, pkg)
-    cwd = os.getcwd()
-    os.chdir(root)
-
-    conffiles = d.getVar('CONFFILES:%s' % pkg);
-    if conffiles == None:
-        conffiles = d.getVar('CONFFILES')
-    if conffiles == None:
-        conffiles = ""
-    conffiles = conffiles.split()
-    conf_orig_list = files_from_filevars(conffiles)[0]
-
-    # Remove links and directories from conf_orig_list to get conf_list which only contains normal files
-    conf_list = []
-    for f in conf_orig_list:
-        if os.path.isdir(f):
-            continue
-        if os.path.islink(f):
-            continue
-        if not os.path.exists(f):
-            continue
-        conf_list.append(f)
-
-    # Remove the leading './'
-    for i in range(0, len(conf_list)):
-        conf_list[i] = conf_list[i][1:]
-
-    os.chdir(cwd)
-    return conf_list
-
-def checkbuildpath(file, d):
-    tmpdir = d.getVar('TMPDIR')
-    with open(file) as f:
-        file_content = f.read()
-        if tmpdir in file_content:
-            return True
-
-    return False
-
-def parse_debugsources_from_dwarfsrcfiles_output(dwarfsrcfiles_output):
-    debugfiles = {}
-
-    for line in dwarfsrcfiles_output.splitlines():
-        if line.startswith("\t"):
-            debugfiles[os.path.normpath(line.split()[0])] = ""
-
-    return debugfiles.keys()
-
-def source_info(file, d, fatal=True):
-    import subprocess
-
-    cmd = ["dwarfsrcfiles", file]
-    try:
-        output = subprocess.check_output(cmd, universal_newlines=True, stderr=subprocess.STDOUT)
-        retval = 0
-    except subprocess.CalledProcessError as exc:
-        output = exc.output
-        retval = exc.returncode
-
-    # 255 means a specific file wasn't fully parsed to get the debug file list, which is not a fatal failure
-    if retval != 0 and retval != 255:
-        msg = "dwarfsrcfiles failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else "")
-        if fatal:
-            bb.fatal(msg)
-        bb.note(msg)
-
-    debugsources = parse_debugsources_from_dwarfsrcfiles_output(output)
-
-    return list(debugsources)
-
-def splitdebuginfo(file, dvar, dv, d):
-    # Function to split a single file into two components, one is the stripped
-    # target system binary, the other contains any debugging information. The
-    # two files are linked to reference each other.
-    #
-    # return a mapping of files:debugsources
-
-    import stat
-    import subprocess
-
-    src = file[len(dvar):]
-    dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
-    debugfile = dvar + dest
-    sources = []
-
-    if file.endswith(".ko") and file.find("/lib/modules/") != -1:
-        if oe.package.is_kernel_module_signed(file):
-            bb.debug(1, "Skip strip on signed module %s" % file)
-            return (file, sources)
-
-    # Split the file...
-    bb.utils.mkdirhier(os.path.dirname(debugfile))
-    #bb.note("Split %s -> %s" % (file, debugfile))
-    # Only store off the hard link reference if we successfully split!
-
-    dvar = d.getVar('PKGD')
-    objcopy = d.getVar("OBJCOPY")
-
-    newmode = None
-    if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
-        origmode = os.stat(file)[stat.ST_MODE]
-        newmode = origmode | stat.S_IWRITE | stat.S_IREAD
-        os.chmod(file, newmode)
-
-    # We need to extract the debug src information here...
-    if dv["srcdir"]:
-        sources = source_info(file, d)
-
-    bb.utils.mkdirhier(os.path.dirname(debugfile))
-
-    subprocess.check_output([objcopy, '--only-keep-debug', file, debugfile], stderr=subprocess.STDOUT)
-
-    # Set the debuglink to have the view of the file path on the target
-    subprocess.check_output([objcopy, '--add-gnu-debuglink', debugfile, file], stderr=subprocess.STDOUT)
-
-    if newmode:
-        os.chmod(file, origmode)
-
-    return (file, sources)
-
-def splitstaticdebuginfo(file, dvar, dv, d):
-    # Unlike the function above, there is no way to split a static library
-    # two components.  So to get similar results we will copy the unmodified
-    # static library (containing the debug symbols) into a new directory.
-    # We will then strip (preserving symbols) the static library in the
-    # typical location.
-    #
-    # return a mapping of files:debugsources
-
-    import stat
-
-    src = file[len(dvar):]
-    dest = dv["staticlibdir"] + os.path.dirname(src) + dv["staticdir"] + "/" + os.path.basename(src) + dv["staticappend"]
-    debugfile = dvar + dest
-    sources = []
-
-    # Copy the file...
-    bb.utils.mkdirhier(os.path.dirname(debugfile))
-    #bb.note("Copy %s -> %s" % (file, debugfile))
-
-    dvar = d.getVar('PKGD')
-
-    newmode = None
-    if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
-        origmode = os.stat(file)[stat.ST_MODE]
-        newmode = origmode | stat.S_IWRITE | stat.S_IREAD
-        os.chmod(file, newmode)
-
-    # We need to extract the debug src information here...
-    if dv["srcdir"]:
-        sources = source_info(file, d)
-
-    bb.utils.mkdirhier(os.path.dirname(debugfile))
-
-    # Copy the unmodified item to the debug directory
-    shutil.copy2(file, debugfile)
-
-    if newmode:
-        os.chmod(file, origmode)
-
-    return (file, sources)
-
-def inject_minidebuginfo(file, dvar, dv, d):
-    # Extract just the symbols from debuginfo into minidebuginfo,
-    # compress it with xz and inject it back into the binary in a .gnu_debugdata section.
-    # https://sourceware.org/gdb/onlinedocs/gdb/MiniDebugInfo.html
-
-    import subprocess
-
-    readelf = d.getVar('READELF')
-    nm = d.getVar('NM')
-    objcopy = d.getVar('OBJCOPY')
-
-    minidebuginfodir = d.expand('${WORKDIR}/minidebuginfo')
-
-    src = file[len(dvar):]
-    dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
-    debugfile = dvar + dest
-    minidebugfile = minidebuginfodir + src + '.minidebug'
-    bb.utils.mkdirhier(os.path.dirname(minidebugfile))
-
-    # If we didn't produce debuginfo for any reason, we can't produce minidebuginfo either
-    # so skip it.
-    if not os.path.exists(debugfile):
-        bb.debug(1, 'ELF file {} has no debuginfo, skipping minidebuginfo injection'.format(file))
-        return
-
-    # Find non-allocated PROGBITS, NOTE, and NOBITS sections in the debuginfo.
-    # We will exclude all of these from minidebuginfo to save space.
-    remove_section_names = []
-    for line in subprocess.check_output([readelf, '-W', '-S', debugfile], universal_newlines=True).splitlines():
-        fields = line.split()
-        if len(fields) < 8:
-            continue
-        name = fields[0]
-        type = fields[1]
-        flags = fields[7]
-        # .debug_ sections will be removed by objcopy -S so no need to explicitly remove them
-        if name.startswith('.debug_'):
-            continue
-        if 'A' not in flags and type in ['PROGBITS', 'NOTE', 'NOBITS']:
-            remove_section_names.append(name)
-
-    # List dynamic symbols in the binary. We can exclude these from minidebuginfo
-    # because they are always present in the binary.
-    dynsyms = set()
-    for line in subprocess.check_output([nm, '-D', file, '--format=posix', '--defined-only'], universal_newlines=True).splitlines():
-        dynsyms.add(line.split()[0])
-
-    # Find all function symbols from debuginfo which aren't in the dynamic symbols table.
-    # These are the ones we want to keep in minidebuginfo.
-    keep_symbols_file = minidebugfile + '.symlist'
-    found_any_symbols = False
-    with open(keep_symbols_file, 'w') as f:
-        for line in subprocess.check_output([nm, debugfile, '--format=sysv', '--defined-only'], universal_newlines=True).splitlines():
-            fields = line.split('|')
-            if len(fields) < 7:
-                continue
-            name = fields[0].strip()
-            type = fields[3].strip()
-            if type == 'FUNC' and name not in dynsyms:
-                f.write('{}\n'.format(name))
-                found_any_symbols = True
-
-    if not found_any_symbols:
-        bb.debug(1, 'ELF file {} contains no symbols, skipping minidebuginfo injection'.format(file))
-        return
-
-    bb.utils.remove(minidebugfile)
-    bb.utils.remove(minidebugfile + '.xz')
-
-    subprocess.check_call([objcopy, '-S'] +
-                          ['--remove-section={}'.format(s) for s in remove_section_names] +
-                          ['--keep-symbols={}'.format(keep_symbols_file), debugfile, minidebugfile])
-
-    subprocess.check_call(['xz', '--keep', minidebugfile])
-
-    subprocess.check_call([objcopy, '--add-section', '.gnu_debugdata={}.xz'.format(minidebugfile), file])
-
-def copydebugsources(debugsrcdir, sources, d):
-    # The debug src information written out to sourcefile is further processed
-    # and copied to the destination here.
-
-    import stat
-    import subprocess
-
-    if debugsrcdir and sources:
-        sourcefile = d.expand("${WORKDIR}/debugsources.list")
-        bb.utils.remove(sourcefile)
-
-        # filenames are null-separated - this is an artefact of the previous use
-        # of rpm's debugedit, which was writing them out that way, and the code elsewhere
-        # is still assuming that.
-        debuglistoutput = '\0'.join(sources) + '\0'
-        with open(sourcefile, 'a') as sf:
-           sf.write(debuglistoutput)
-
-        dvar = d.getVar('PKGD')
-        strip = d.getVar("STRIP")
-        objcopy = d.getVar("OBJCOPY")
-        workdir = d.getVar("WORKDIR")
-        sdir = d.getVar("S")
-        sparentdir = os.path.dirname(os.path.dirname(sdir))
-        sbasedir = os.path.basename(os.path.dirname(sdir)) + "/" + os.path.basename(sdir)
-        workparentdir = os.path.dirname(os.path.dirname(workdir))
-        workbasedir = os.path.basename(os.path.dirname(workdir)) + "/" + os.path.basename(workdir)
-
-        # If S isnt based on WORKDIR we can infer our sources are located elsewhere,
-        # e.g. using externalsrc; use S as base for our dirs
-        if workdir in sdir or 'work-shared' in sdir:
-            basedir = workbasedir
-            parentdir = workparentdir
-        else:
-            basedir = sbasedir
-            parentdir = sparentdir
-
-        # If build path exists in sourcefile, it means toolchain did not use
-        # -fdebug-prefix-map to compile
-        if checkbuildpath(sourcefile, d):
-            localsrc_prefix = parentdir + "/"
-        else:
-            localsrc_prefix = "/usr/src/debug/"
-
-        nosuchdir = []
-        basepath = dvar
-        for p in debugsrcdir.split("/"):
-            basepath = basepath + "/" + p
-            if not cpath.exists(basepath):
-                nosuchdir.append(basepath)
-        bb.utils.mkdirhier(basepath)
-        cpath.updatecache(basepath)
-
-        # Ignore files from the recipe sysroots (target and native)
-        processdebugsrc =  "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '((<internal>|<built-in>)$|/.*recipe-sysroot.*/)' | "
-        # We need to ignore files that are not actually ours
-        # we do this by only paying attention to items from this package
-        processdebugsrc += "fgrep -zw '%s' | "
-        # Remove prefix in the source paths
-        processdebugsrc += "sed 's#%s##g' | "
-        processdebugsrc += "(cd '%s' ; cpio -pd0mlL --no-preserve-owner '%s%s' 2>/dev/null)"
-
-        cmd = processdebugsrc % (sourcefile, basedir, localsrc_prefix, parentdir, dvar, debugsrcdir)
-        try:
-            subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
-        except subprocess.CalledProcessError:
-            # Can "fail" if internal headers/transient sources are attempted
-            pass
-
-        # cpio seems to have a bug with -lL together and symbolic links are just copied, not dereferenced.
-        # Work around this by manually finding and copying any symbolic links that made it through.
-        cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s')" % \
-                (dvar, debugsrcdir, dvar, debugsrcdir, parentdir, dvar, debugsrcdir)
-        subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
-
-
-        # debugsources.list may be polluted from the host if we used externalsrc,
-        # cpio uses copy-pass and may have just created a directory structure
-        # matching the one from the host, if thats the case move those files to
-        # debugsrcdir to avoid host contamination.
-        # Empty dir structure will be deleted in the next step.
-
-        # Same check as above for externalsrc
-        if workdir not in sdir:
-            if os.path.exists(dvar + debugsrcdir + sdir):
-                cmd = "mv %s%s%s/* %s%s" % (dvar, debugsrcdir, sdir, dvar,debugsrcdir)
-                subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
-
-        # The copy by cpio may have resulted in some empty directories!  Remove these
-        cmd = "find %s%s -empty -type d -delete" % (dvar, debugsrcdir)
-        subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
-
-        # Also remove debugsrcdir if its empty
-        for p in nosuchdir[::-1]:
-            if os.path.exists(p) and not os.listdir(p):
-                os.rmdir(p)
-
-#
-# Package data handling routines
-#
-
-def get_package_mapping (pkg, basepkg, d, depversions=None):
-    import oe.packagedata
-
-    data = oe.packagedata.read_subpkgdata(pkg, d)
-    key = "PKG:%s" % pkg
-
-    if key in data:
-        if bb.data.inherits_class('allarch', d) and bb.data.inherits_class('packagegroup', d) and pkg != data[key]:
-            bb.error("An allarch packagegroup shouldn't depend on packages which are dynamically renamed (%s to %s)" % (pkg, data[key]))
-        # Have to avoid undoing the write_extra_pkgs(global_variants...)
-        if bb.data.inherits_class('allarch', d) and not d.getVar('MULTILIB_VARIANTS') \
-            and data[key] == basepkg:
-            return pkg
-        if depversions == []:
-            # Avoid returning a mapping if the renamed package rprovides its original name
-            rprovkey = "RPROVIDES:%s" % pkg
-            if rprovkey in data:
-                if pkg in bb.utils.explode_dep_versions2(data[rprovkey]):
-                    bb.note("%s rprovides %s, not replacing the latter" % (data[key], pkg))
-                    return pkg
-        # Do map to rewritten package name
-        return data[key]
-
-    return pkg
-
-def get_package_additional_metadata (pkg_type, d):
-    base_key = "PACKAGE_ADD_METADATA"
-    for key in ("%s_%s" % (base_key, pkg_type.upper()), base_key):
-        if d.getVar(key, False) is None:
-            continue
-        d.setVarFlag(key, "type", "list")
-        if d.getVarFlag(key, "separator") is None:
-            d.setVarFlag(key, "separator", "\\n")
-        metadata_fields = [field.strip() for field in oe.data.typed_value(key, d)]
-        return "\n".join(metadata_fields).strip()
-
-def runtime_mapping_rename (varname, pkg, d):
-    #bb.note("%s before: %s" % (varname, d.getVar(varname)))
-
-    new_depends = {}
-    deps = bb.utils.explode_dep_versions2(d.getVar(varname) or "")
-    for depend, depversions in deps.items():
-        new_depend = get_package_mapping(depend, pkg, d, depversions)
-        if depend != new_depend:
-            bb.note("package name mapping done: %s -> %s" % (depend, new_depend))
-        new_depends[new_depend] = deps[depend]
-
-    d.setVar(varname, bb.utils.join_deps(new_depends, commasep=False))
-
-    #bb.note("%s after: %s" % (varname, d.getVar(varname)))
-
-#
-# Used by do_packagedata (and possibly other routines post do_package)
-#
-
-PRSERV_ACTIVE = "${@bool(d.getVar("PRSERV_HOST"))}"
-PRSERV_ACTIVE[vardepvalue] = "${PRSERV_ACTIVE}"
-package_get_auto_pr[vardepsexclude] = "BB_TASKDEPDATA"
-package_get_auto_pr[vardeps] += "PRSERV_ACTIVE"
-python package_get_auto_pr() {
-    import oe.prservice
-
-    def get_do_package_hash(pn):
-        if d.getVar("BB_RUNTASK") != "do_package":
-            taskdepdata = d.getVar("BB_TASKDEPDATA", False)
-            for dep in taskdepdata:
-                if taskdepdata[dep][1] == "do_package" and taskdepdata[dep][0] == pn:
-                    return taskdepdata[dep][6]
-        return None
-
-    # Support per recipe PRSERV_HOST
-    pn = d.getVar('PN')
-    host = d.getVar("PRSERV_HOST_" + pn)
-    if not (host is None):
-        d.setVar("PRSERV_HOST", host)
-
-    pkgv = d.getVar("PKGV")
-
-    # PR Server not active, handle AUTOINC
-    if not d.getVar('PRSERV_HOST'):
-        d.setVar("PRSERV_PV_AUTOINC", "0")
-        return
-
-    auto_pr = None
-    pv = d.getVar("PV")
-    version = d.getVar("PRAUTOINX")
-    pkgarch = d.getVar("PACKAGE_ARCH")
-    checksum = get_do_package_hash(pn)
-
-    # If do_package isn't in the dependencies, we can't get the checksum...
-    if not checksum:
-        bb.warn('Task %s requested do_package unihash, but it was not available.' % d.getVar('BB_RUNTASK'))
-        #taskdepdata = d.getVar("BB_TASKDEPDATA", False)
-        #for dep in taskdepdata:
-        #    bb.warn('%s:%s = %s' % (taskdepdata[dep][0], taskdepdata[dep][1], taskdepdata[dep][6]))
-        return
-
-    if d.getVar('PRSERV_LOCKDOWN'):
-        auto_pr = d.getVar('PRAUTO_' + version + '_' + pkgarch) or d.getVar('PRAUTO_' + version) or None
-        if auto_pr is None:
-            bb.fatal("Can NOT get PRAUTO from lockdown exported file")
-        d.setVar('PRAUTO',str(auto_pr))
-        return
-
-    try:
-        conn = oe.prservice.prserv_make_conn(d)
-        if conn is not None:
-            if "AUTOINC" in pkgv:
-                srcpv = bb.fetch2.get_srcrev(d)
-                base_ver = "AUTOINC-%s" % version[:version.find(srcpv)]
-                value = conn.getPR(base_ver, pkgarch, srcpv)
-                d.setVar("PRSERV_PV_AUTOINC", str(value))
-
-            auto_pr = conn.getPR(version, pkgarch, checksum)
-            conn.close()
-    except Exception as e:
-        bb.fatal("Can NOT get PRAUTO, exception %s" %  str(e))
-    if auto_pr is None:
-        bb.fatal("Can NOT get PRAUTO from remote PR service")
-    d.setVar('PRAUTO',str(auto_pr))
-}
-
-#
-# Package functions suitable for inclusion in PACKAGEFUNCS
-#
-
-python package_convert_pr_autoinc() {
-    pkgv = d.getVar("PKGV")
-
-    # Adjust pkgv as necessary...
-    if 'AUTOINC' in pkgv:
-        d.setVar("PKGV", pkgv.replace("AUTOINC", "${PRSERV_PV_AUTOINC}"))
-
-    # Change PRSERV_PV_AUTOINC and EXTENDPRAUTO usage to special values
-    d.setVar('PRSERV_PV_AUTOINC', '@PRSERV_PV_AUTOINC@')
-    d.setVar('EXTENDPRAUTO', '@EXTENDPRAUTO@')
-}
-
-LOCALEBASEPN ??= "${PN}"
-
-python package_do_split_locales() {
-    if (d.getVar('PACKAGE_NO_LOCALE') == '1'):
-        bb.debug(1, "package requested not splitting locales")
-        return
-
-    packages = (d.getVar('PACKAGES') or "").split()
-
-    datadir = d.getVar('datadir')
-    if not datadir:
-        bb.note("datadir not defined")
-        return
-
-    dvar = d.getVar('PKGD')
-    pn = d.getVar('LOCALEBASEPN')
-
-    if pn + '-locale' in packages:
-        packages.remove(pn + '-locale')
-
-    localedir = os.path.join(dvar + datadir, 'locale')
-
-    if not cpath.isdir(localedir):
-        bb.debug(1, "No locale files in this package")
-        return
-
-    locales = os.listdir(localedir)
-
-    summary = d.getVar('SUMMARY') or pn
-    description = d.getVar('DESCRIPTION') or ""
-    locale_section = d.getVar('LOCALE_SECTION')
-    mlprefix = d.getVar('MLPREFIX') or ""
-    for l in sorted(locales):
-        ln = legitimize_package_name(l)
-        pkg = pn + '-locale-' + ln
-        packages.append(pkg)
-        d.setVar('FILES:' + pkg, os.path.join(datadir, 'locale', l))
-        d.setVar('RRECOMMENDS:' + pkg, '%svirtual-locale-%s' % (mlprefix, ln))
-        d.setVar('RPROVIDES:' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln))
-        d.setVar('SUMMARY:' + pkg, '%s - %s translations' % (summary, l))
-        d.setVar('DESCRIPTION:' + pkg, '%s  This package contains language translation files for the %s locale.' % (description, l))
-        if locale_section:
-            d.setVar('SECTION:' + pkg, locale_section)
-
-    d.setVar('PACKAGES', ' '.join(packages))
-
-    # Disabled by RP 18/06/07
-    # Wildcards aren't supported in debian
-    # They break with ipkg since glibc-locale* will mean that
-    # glibc-localedata-translit* won't install as a dependency
-    # for some other package which breaks meta-toolchain
-    # Probably breaks since virtual-locale- isn't provided anywhere
-    #rdep = (d.getVar('RDEPENDS:%s' % pn) or "").split()
-    #rdep.append('%s-locale*' % pn)
-    #d.setVar('RDEPENDS:%s' % pn, ' '.join(rdep))
-}
-
-python perform_packagecopy () {
-    import subprocess
-    import shutil
-
-    dest = d.getVar('D')
-    dvar = d.getVar('PKGD')
-
-    # Start by package population by taking a copy of the installed
-    # files to operate on
-    # Preserve sparse files and hard links
-    cmd = 'tar --exclude=./sysroot-only -cf - -C %s -p -S . | tar -xf - -C %s' % (dest, dvar)
-    subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
-
-    # replace RPATHs for the nativesdk binaries, to make them relocatable
-    if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d):
-        rpath_replace (dvar, d)
-}
-perform_packagecopy[cleandirs] = "${PKGD}"
-perform_packagecopy[dirs] = "${PKGD}"
-
-# We generate a master list of directories to process, we start by
-# seeding this list with reasonable defaults, then load from
-# the fs-perms.txt files
-python fixup_perms () {
-    import pwd, grp
-
-    # init using a string with the same format as a line as documented in
-    # the fs-perms.txt file
-    # <path> <mode> <uid> <gid> <walk> <fmode> <fuid> <fgid>
-    # <path> link <link target>
-    #
-    # __str__ can be used to print out an entry in the input format
-    #
-    # if fs_perms_entry.path is None:
-    #    an error occurred
-    # if fs_perms_entry.link, you can retrieve:
-    #    fs_perms_entry.path = path
-    #    fs_perms_entry.link = target of link
-    # if not fs_perms_entry.link, you can retrieve:
-    #    fs_perms_entry.path = path
-    #    fs_perms_entry.mode = expected dir mode or None
-    #    fs_perms_entry.uid = expected uid or -1
-    #    fs_perms_entry.gid = expected gid or -1
-    #    fs_perms_entry.walk = 'true' or something else
-    #    fs_perms_entry.fmode = expected file mode or None
-    #    fs_perms_entry.fuid = expected file uid or -1
-    #    fs_perms_entry_fgid = expected file gid or -1
-    class fs_perms_entry():
-        def __init__(self, line):
-            lsplit = line.split()
-            if len(lsplit) == 3 and lsplit[1].lower() == "link":
-                self._setlink(lsplit[0], lsplit[2])
-            elif len(lsplit) == 8:
-                self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7])
-            else:
-                msg = "Fixup Perms: invalid config line %s" % line
-                oe.qa.handle_error("perm-config", msg, d)
-                self.path = None
-                self.link = None
-
-        def _setdir(self, path, mode, uid, gid, walk, fmode, fuid, fgid):
-            self.path = os.path.normpath(path)
-            self.link = None
-            self.mode = self._procmode(mode)
-            self.uid  = self._procuid(uid)
-            self.gid  = self._procgid(gid)
-            self.walk = walk.lower()
-            self.fmode = self._procmode(fmode)
-            self.fuid = self._procuid(fuid)
-            self.fgid = self._procgid(fgid)
-
-        def _setlink(self, path, link):
-            self.path = os.path.normpath(path)
-            self.link = link
-
-        def _procmode(self, mode):
-            if not mode or (mode and mode == "-"):
-                return None
-            else:
-                return int(mode,8)
-
-        # Note uid/gid -1 has special significance in os.lchown
-        def _procuid(self, uid):
-            if uid is None or uid == "-":
-                return -1
-            elif uid.isdigit():
-                return int(uid)
-            else:
-                return pwd.getpwnam(uid).pw_uid
-
-        def _procgid(self, gid):
-            if gid is None or gid == "-":
-                return -1
-            elif gid.isdigit():
-                return int(gid)
-            else:
-                return grp.getgrnam(gid).gr_gid
-
-        # Use for debugging the entries
-        def __str__(self):
-            if self.link:
-                return "%s link %s" % (self.path, self.link)
-            else:
-                mode = "-"
-                if self.mode:
-                    mode = "0%o" % self.mode
-                fmode = "-"
-                if self.fmode:
-                    fmode = "0%o" % self.fmode
-                uid = self._mapugid(self.uid)
-                gid = self._mapugid(self.gid)
-                fuid = self._mapugid(self.fuid)
-                fgid = self._mapugid(self.fgid)
-                return "%s %s %s %s %s %s %s %s" % (self.path, mode, uid, gid, self.walk, fmode, fuid, fgid)
-
-        def _mapugid(self, id):
-            if id is None or id == -1:
-                return "-"
-            else:
-                return "%d" % id
-
-    # Fix the permission, owner and group of path
-    def fix_perms(path, mode, uid, gid, dir):
-        if mode and not os.path.islink(path):
-            #bb.note("Fixup Perms: chmod 0%o %s" % (mode, dir))
-            os.chmod(path, mode)
-        # -1 is a special value that means don't change the uid/gid
-        # if they are BOTH -1, don't bother to lchown
-        if not (uid == -1 and gid == -1):
-            #bb.note("Fixup Perms: lchown %d:%d %s" % (uid, gid, dir))
-            os.lchown(path, uid, gid)
-
-    # Return a list of configuration files based on either the default
-    # files/fs-perms.txt or the contents of FILESYSTEM_PERMS_TABLES
-    # paths are resolved via BBPATH
-    def get_fs_perms_list(d):
-        str = ""
-        bbpath = d.getVar('BBPATH')
-        fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES') or ""
-        for conf_file in fs_perms_tables.split():
-            confpath = bb.utils.which(bbpath, conf_file)
-            if confpath:
-                str += " %s" % bb.utils.which(bbpath, conf_file)
-            else:
-                bb.warn("cannot find %s specified in FILESYSTEM_PERMS_TABLES" % conf_file)
-        return str
-
-
-
-    dvar = d.getVar('PKGD')
-
-    fs_perms_table = {}
-    fs_link_table = {}
-
-    # By default all of the standard directories specified in
-    # bitbake.conf will get 0755 root:root.
-    target_path_vars = [    'base_prefix',
-                'prefix',
-                'exec_prefix',
-                'base_bindir',
-                'base_sbindir',
-                'base_libdir',
-                'datadir',
-                'sysconfdir',
-                'servicedir',
-                'sharedstatedir',
-                'localstatedir',
-                'infodir',
-                'mandir',
-                'docdir',
-                'bindir',
-                'sbindir',
-                'libexecdir',
-                'libdir',
-                'includedir',
-                'oldincludedir' ]
-
-    for path in target_path_vars:
-        dir = d.getVar(path) or ""
-        if dir == "":
-            continue
-        fs_perms_table[dir] = fs_perms_entry(d.expand("%s 0755 root root false - - -" % (dir)))
-
-    # Now we actually load from the configuration files
-    for conf in get_fs_perms_list(d).split():
-        if not os.path.exists(conf):
-            continue
-        with open(conf) as f:
-            for line in f:
-                if line.startswith('#'):
-                    continue
-                lsplit = line.split()
-                if len(lsplit) == 0:
-                    continue
-                if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"):
-                    msg = "Fixup perms: %s invalid line: %s" % (conf, line)
-                    oe.qa.handle_error("perm-line", msg, d)
-                    continue
-                entry = fs_perms_entry(d.expand(line))
-                if entry and entry.path:
-                    if entry.link:
-                        fs_link_table[entry.path] = entry
-                        if entry.path in fs_perms_table:
-                            fs_perms_table.pop(entry.path)
-                    else:
-                        fs_perms_table[entry.path] = entry
-                        if entry.path in fs_link_table:
-                            fs_link_table.pop(entry.path)
-
-    # Debug -- list out in-memory table
-    #for dir in fs_perms_table:
-    #    bb.note("Fixup Perms: %s: %s" % (dir, str(fs_perms_table[dir])))
-    #for link in fs_link_table:
-    #    bb.note("Fixup Perms: %s: %s" % (link, str(fs_link_table[link])))
-
-    # We process links first, so we can go back and fixup directory ownership
-    # for any newly created directories
-    # Process in sorted order so /run gets created before /run/lock, etc.
-    for entry in sorted(fs_link_table.values(), key=lambda x: x.link):
-        link = entry.link
-        dir = entry.path
-        origin = dvar + dir
-        if not (cpath.exists(origin) and cpath.isdir(origin) and not cpath.islink(origin)):
-            continue
-
-        if link[0] == "/":
-            target = dvar + link
-            ptarget = link
-        else:
-            target = os.path.join(os.path.dirname(origin), link)
-            ptarget = os.path.join(os.path.dirname(dir), link)
-        if os.path.exists(target):
-            msg = "Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget)
-            oe.qa.handle_error("perm-link", msg, d)
-            continue
-
-        # Create path to move directory to, move it, and then setup the symlink
-        bb.utils.mkdirhier(os.path.dirname(target))
-        #bb.note("Fixup Perms: Rename %s -> %s" % (dir, ptarget))
-        bb.utils.rename(origin, target)
-        #bb.note("Fixup Perms: Link %s -> %s" % (dir, link))
-        os.symlink(link, origin)
-
-    for dir in fs_perms_table:
-        origin = dvar + dir
-        if not (cpath.exists(origin) and cpath.isdir(origin)):
-            continue
-
-        fix_perms(origin, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
-
-        if fs_perms_table[dir].walk == 'true':
-            for root, dirs, files in os.walk(origin):
-                for dr in dirs:
-                    each_dir = os.path.join(root, dr)
-                    fix_perms(each_dir, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
-                for f in files:
-                    each_file = os.path.join(root, f)
-                    fix_perms(each_file, fs_perms_table[dir].fmode, fs_perms_table[dir].fuid, fs_perms_table[dir].fgid, dir)
-}
-
-def package_debug_vars(d):
-    # We default to '.debug' style
-    if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory':
-        # Single debug-file-directory style debug info
-        debug_vars = {
-            "append": ".debug",
-            "staticappend": "",
-            "dir": "",
-            "staticdir": "",
-            "libdir": "/usr/lib/debug",
-            "staticlibdir": "/usr/lib/debug-static",
-            "srcdir": "/usr/src/debug",
-        }
-    elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-without-src':
-        # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug
-        debug_vars = {
-            "append": "",
-            "staticappend": "",
-            "dir": "/.debug",
-            "staticdir": "/.debug-static",
-            "libdir": "",
-            "staticlibdir": "",
-            "srcdir": "",
-        }
-    elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg':
-        debug_vars = {
-            "append": "",
-            "staticappend": "",
-            "dir": "/.debug",
-            "staticdir": "/.debug-static",
-            "libdir": "",
-            "staticlibdir": "",
-            "srcdir": "/usr/src/debug",
-        }
-    else:
-        # Original OE-core, a.k.a. ".debug", style debug info
-        debug_vars = {
-            "append": "",
-            "staticappend": "",
-            "dir": "/.debug",
-            "staticdir": "/.debug-static",
-            "libdir": "",
-            "staticlibdir": "",
-            "srcdir": "/usr/src/debug",
-        }
-
-    return debug_vars
-
-python split_and_strip_files () {
-    import stat, errno
-    import subprocess
-
-    dvar = d.getVar('PKGD')
-    pn = d.getVar('PN')
-    hostos = d.getVar('HOST_OS')
-
-    oldcwd = os.getcwd()
-    os.chdir(dvar)
-
-    dv = package_debug_vars(d)
-
-    #
-    # First lets figure out all of the files we may have to process ... do this only once!
-    #
-    elffiles = {}
-    symlinks = {}
-    staticlibs = []
-    inodes = {}
-    libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir"))
-    baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir"))
-    skipfiles = (d.getVar("INHIBIT_PACKAGE_STRIP_FILES") or "").split()
-    if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1' or \
-            d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
-        checkelf = {}
-        checkelflinks = {}
-        for root, dirs, files in cpath.walk(dvar):
-            for f in files:
-                file = os.path.join(root, f)
-
-                # Skip debug files
-                if dv["append"] and file.endswith(dv["append"]):
-                    continue
-                if dv["dir"] and dv["dir"] in os.path.dirname(file[len(dvar):]):
-                    continue
-
-                if file in skipfiles:
-                    continue
-
-                if oe.package.is_static_lib(file):
-                    staticlibs.append(file)
-                    continue
-
-                try:
-                    ltarget = cpath.realpath(file, dvar, False)
-                    s = cpath.lstat(ltarget)
-                except OSError as e:
-                    (err, strerror) = e.args
-                    if err != errno.ENOENT:
-                        raise
-                    # Skip broken symlinks
-                    continue
-                if not s:
-                    continue
-                # Check its an executable
-                if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) \
-                        or (s[stat.ST_MODE] & stat.S_IXOTH) \
-                        or ((file.startswith(libdir) or file.startswith(baselibdir)) \
-                        and (".so" in f or ".node" in f)) \
-                        or (f.startswith('vmlinux') or ".ko" in f):
-
-                    if cpath.islink(file):
-                        checkelflinks[file] = ltarget
-                        continue
-                    # Use a reference of device ID and inode number to identify files
-                    file_reference = "%d_%d" % (s.st_dev, s.st_ino)
-                    checkelf[file] = (file, file_reference)
-
-        results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelflinks.values(), d)
-        results_map = {}
-        for (ltarget, elf_file) in results:
-            results_map[ltarget] = elf_file
-        for file in checkelflinks:
-            ltarget = checkelflinks[file]
-            # If it's a symlink, and points to an ELF file, we capture the readlink target
-            if results_map[ltarget]:
-                target = os.readlink(file)
-                #bb.note("Sym: %s (%d)" % (ltarget, results_map[ltarget]))
-                symlinks[file] = target
-
-        results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelf.keys(), d)
-
-        # Sort results by file path. This ensures that the files are always
-        # processed in the same order, which is important to make sure builds
-        # are reproducible when dealing with hardlinks
-        results.sort(key=lambda x: x[0])
-
-        for (file, elf_file) in results:
-            # It's a file (or hardlink), not a link
-            # ...but is it ELF, and is it already stripped?
-            if elf_file & 1:
-                if elf_file & 2:
-                    if 'already-stripped' in (d.getVar('INSANE_SKIP:' + pn) or "").split():
-                        bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn))
-                    else:
-                        msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn)
-                        oe.qa.handle_error("already-stripped", msg, d)
-                    continue
-
-                # At this point we have an unstripped elf file. We need to:
-                #  a) Make sure any file we strip is not hardlinked to anything else outside this tree
-                #  b) Only strip any hardlinked file once (no races)
-                #  c) Track any hardlinks between files so that we can reconstruct matching debug file hardlinks
-
-                # Use a reference of device ID and inode number to identify files
-                file_reference = checkelf[file][1]
-                if file_reference in inodes:
-                    os.unlink(file)
-                    os.link(inodes[file_reference][0], file)
-                    inodes[file_reference].append(file)
-                else:
-                    inodes[file_reference] = [file]
-                    # break hardlink
-                    bb.utils.break_hardlinks(file)
-                    elffiles[file] = elf_file
-                # Modified the file so clear the cache
-                cpath.updatecache(file)
-
-    def strip_pkgd_prefix(f):
-        nonlocal dvar
-
-        if f.startswith(dvar):
-            return f[len(dvar):]
-
-        return f
-
-    #
-    # First lets process debug splitting
-    #
-    if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
-        results = oe.utils.multiprocess_launch(splitdebuginfo, list(elffiles), d, extraargs=(dvar, dv, d))
-
-        if dv["srcdir"] and not hostos.startswith("mingw"):
-            if (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
-                results = oe.utils.multiprocess_launch(splitstaticdebuginfo, staticlibs, d, extraargs=(dvar, dv, d))
-            else:
-                for file in staticlibs:
-                    results.append( (file,source_info(file, d)) )
-
-        d.setVar("PKGDEBUGSOURCES", {strip_pkgd_prefix(f): sorted(s) for f, s in results})
-
-        sources = set()
-        for r in results:
-            sources.update(r[1])
-
-        # Hardlink our debug symbols to the other hardlink copies
-        for ref in inodes:
-            if len(inodes[ref]) == 1:
-                continue
-
-            target = inodes[ref][0][len(dvar):]
-            for file in inodes[ref][1:]:
-                src = file[len(dvar):]
-                dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
-                fpath = dvar + dest
-                ftarget = dvar + dv["libdir"] + os.path.dirname(target) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
-                bb.utils.mkdirhier(os.path.dirname(fpath))
-                # Only one hardlink of separated debug info file in each directory
-                if not os.access(fpath, os.R_OK):
-                    #bb.note("Link %s -> %s" % (fpath, ftarget))
-                    os.link(ftarget, fpath)
-
-        # Create symlinks for all cases we were able to split symbols
-        for file in symlinks:
-            src = file[len(dvar):]
-            dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
-            fpath = dvar + dest
-            # Skip it if the target doesn't exist
-            try:
-                s = os.stat(fpath)
-            except OSError as e:
-                (err, strerror) = e.args
-                if err != errno.ENOENT:
-                    raise
-                continue
-
-            ltarget = symlinks[file]
-            lpath = os.path.dirname(ltarget)
-            lbase = os.path.basename(ltarget)
-            ftarget = ""
-            if lpath and lpath != ".":
-                ftarget += lpath + dv["dir"] + "/"
-            ftarget += lbase + dv["append"]
-            if lpath.startswith(".."):
-                ftarget = os.path.join("..", ftarget)
-            bb.utils.mkdirhier(os.path.dirname(fpath))
-            #bb.note("Symlink %s -> %s" % (fpath, ftarget))
-            os.symlink(ftarget, fpath)
-
-        # Process the dv["srcdir"] if requested...
-        # This copies and places the referenced sources for later debugging...
-        copydebugsources(dv["srcdir"], sources, d)
-    #
-    # End of debug splitting
-    #
-
-    #
-    # Now lets go back over things and strip them
-    #
-    if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1'):
-        strip = d.getVar("STRIP")
-        sfiles = []
-        for file in elffiles:
-            elf_file = int(elffiles[file])
-            #bb.note("Strip %s" % file)
-            sfiles.append((file, elf_file, strip))
-        if (d.getVar('PACKAGE_STRIP_STATIC') == '1' or d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
-            for f in staticlibs:
-                sfiles.append((f, 16, strip))
-
-        oe.utils.multiprocess_launch(oe.package.runstrip, sfiles, d)
-
-    # Build "minidebuginfo" and reinject it back into the stripped binaries
-    if d.getVar('PACKAGE_MINIDEBUGINFO') == '1':
-        oe.utils.multiprocess_launch(inject_minidebuginfo, list(elffiles), d,
-                                     extraargs=(dvar, dv, d))
-
-    #
-    # End of strip
-    #
-    os.chdir(oldcwd)
-}
-
-python populate_packages () {
-    import glob, re
-
-    workdir = d.getVar('WORKDIR')
-    outdir = d.getVar('DEPLOY_DIR')
-    dvar = d.getVar('PKGD')
-    packages = d.getVar('PACKAGES').split()
-    pn = d.getVar('PN')
-
-    bb.utils.mkdirhier(outdir)
-    os.chdir(dvar)
-    
-    autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG") or False)
-
-    split_source_package = (d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg')
-
-    # If debug-with-srcpkg mode is enabled then add the source package if it
-    # doesn't exist and add the source file contents to the source package.
-    if split_source_package:
-        src_package_name = ('%s-src' % d.getVar('PN'))
-        if not src_package_name in packages:
-            packages.append(src_package_name)
-        d.setVar('FILES:%s' % src_package_name, '/usr/src/debug')
-
-    # Sanity check PACKAGES for duplicates
-    # Sanity should be moved to sanity.bbclass once we have the infrastructure
-    package_dict = {}
-
-    for i, pkg in enumerate(packages):
-        if pkg in package_dict:
-            msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg
-            oe.qa.handle_error("packages-list", msg, d)
-        # Ensure the source package gets the chance to pick up the source files
-        # before the debug package by ordering it first in PACKAGES. Whether it
-        # actually picks up any source files is controlled by
-        # PACKAGE_DEBUG_SPLIT_STYLE.
-        elif pkg.endswith("-src"):
-            package_dict[pkg] = (10, i)
-        elif autodebug and pkg.endswith("-dbg"):
-            package_dict[pkg] = (30, i)
-        else:
-            package_dict[pkg] = (50, i)
-    packages = sorted(package_dict.keys(), key=package_dict.get)
-    d.setVar('PACKAGES', ' '.join(packages))
-    pkgdest = d.getVar('PKGDEST')
-
-    seen = []
-
-    # os.mkdir masks the permissions with umask so we have to unset it first
-    oldumask = os.umask(0)
-
-    debug = []
-    for root, dirs, files in cpath.walk(dvar):
-        dir = root[len(dvar):]
-        if not dir:
-            dir = os.sep
-        for f in (files + dirs):
-            path = "." + os.path.join(dir, f)
-            if "/.debug/" in path or "/.debug-static/" in path or path.endswith("/.debug"):
-                debug.append(path)
-
-    for pkg in packages:
-        root = os.path.join(pkgdest, pkg)
-        bb.utils.mkdirhier(root)
-
-        filesvar = d.getVar('FILES:%s' % pkg) or ""
-        if "//" in filesvar:
-            msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg
-            oe.qa.handle_error("files-invalid", msg, d)
-            filesvar.replace("//", "/")
-
-        origfiles = filesvar.split()
-        files, symlink_paths = files_from_filevars(origfiles)
-
-        if autodebug and pkg.endswith("-dbg"):
-            files.extend(debug)
-
-        for file in files:
-            if (not cpath.islink(file)) and (not cpath.exists(file)):
-                continue
-            if file in seen:
-                continue
-            seen.append(file)
-
-            def mkdir(src, dest, p):
-                src = os.path.join(src, p)
-                dest = os.path.join(dest, p)
-                fstat = cpath.stat(src)
-                os.mkdir(dest)
-                os.chmod(dest, fstat.st_mode)
-                os.chown(dest, fstat.st_uid, fstat.st_gid)
-                if p not in seen:
-                    seen.append(p)
-                cpath.updatecache(dest)
-
-            def mkdir_recurse(src, dest, paths):
-                if cpath.exists(dest + '/' + paths):
-                    return
-                while paths.startswith("./"):
-                    paths = paths[2:]
-                p = "."
-                for c in paths.split("/"):
-                    p = os.path.join(p, c)
-                    if not cpath.exists(os.path.join(dest, p)):
-                        mkdir(src, dest, p)
-
-            if cpath.isdir(file) and not cpath.islink(file):
-                mkdir_recurse(dvar, root, file)
-                continue
-
-            mkdir_recurse(dvar, root, os.path.dirname(file))
-            fpath = os.path.join(root,file)
-            if not cpath.islink(file):
-                os.link(file, fpath)
-                continue
-            ret = bb.utils.copyfile(file, fpath)
-            if ret is False or ret == 0:
-                bb.fatal("File population failed")
-
-        # Check if symlink paths exist
-        for file in symlink_paths:
-            if not os.path.exists(os.path.join(root,file)):
-                bb.fatal("File '%s' cannot be packaged into '%s' because its "
-                         "parent directory structure does not exist. One of "
-                         "its parent directories is a symlink whose target "
-                         "directory is not included in the package." %
-                         (file, pkg))
-
-    os.umask(oldumask)
-    os.chdir(workdir)
-
-    # Handle excluding packages with incompatible licenses
-    package_list = []
-    for pkg in packages:
-        licenses = d.getVar('_exclude_incompatible-' + pkg)
-        if licenses:
-            msg = "Excluding %s from packaging as it has incompatible license(s): %s" % (pkg, licenses)
-            oe.qa.handle_error("incompatible-license", msg, d)
-        else:
-            package_list.append(pkg)
-    d.setVar('PACKAGES', ' '.join(package_list))
-
-    unshipped = []
-    for root, dirs, files in cpath.walk(dvar):
-        dir = root[len(dvar):]
-        if not dir:
-            dir = os.sep
-        for f in (files + dirs):
-            path = os.path.join(dir, f)
-            if ('.' + path) not in seen:
-                unshipped.append(path)
-
-    if unshipped != []:
-        msg = pn + ": Files/directories were installed but not shipped in any package:"
-        if "installed-vs-shipped" in (d.getVar('INSANE_SKIP:' + pn) or "").split():
-            bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn)
-        else:
-            for f in unshipped:
-                msg = msg + "\n  " + f
-            msg = msg + "\nPlease set FILES such that these items are packaged. Alternatively if they are unneeded, avoid installing them or delete them within do_install.\n"
-            msg = msg + "%s: %d installed and not shipped files." % (pn, len(unshipped))
-            oe.qa.handle_error("installed-vs-shipped", msg, d)
-}
-populate_packages[dirs] = "${D}"
-
-python package_fixsymlinks () {
-    import errno
-    pkgdest = d.getVar('PKGDEST')
-    packages = d.getVar("PACKAGES", False).split()
-
-    dangling_links = {}
-    pkg_files = {}
-    for pkg in packages:
-        dangling_links[pkg] = []
-        pkg_files[pkg] = []
-        inst_root = os.path.join(pkgdest, pkg)
-        for path in pkgfiles[pkg]:
-                rpath = path[len(inst_root):]
-                pkg_files[pkg].append(rpath)
-                rtarget = cpath.realpath(path, inst_root, True, assume_dir = True)
-                if not cpath.lexists(rtarget):
-                    dangling_links[pkg].append(os.path.normpath(rtarget[len(inst_root):]))
-
-    newrdepends = {}
-    for pkg in dangling_links:
-        for l in dangling_links[pkg]:
-            found = False
-            bb.debug(1, "%s contains dangling link %s" % (pkg, l))
-            for p in packages:
-                if l in pkg_files[p]:
-                        found = True
-                        bb.debug(1, "target found in %s" % p)
-                        if p == pkg:
-                            break
-                        if pkg not in newrdepends:
-                            newrdepends[pkg] = []
-                        newrdepends[pkg].append(p)
-                        break
-            if found == False:
-                bb.note("%s contains dangling symlink to %s" % (pkg, l))
-
-    for pkg in newrdepends:
-        rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "")
-        for p in newrdepends[pkg]:
-            if p not in rdepends:
-                rdepends[p] = []
-        d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False))
-}
-
-
-python package_package_name_hook() {
-    """
-    A package_name_hook function can be used to rewrite the package names by
-    changing PKG.  For an example, see debian.bbclass.
-    """
-    pass
-}
-
-EXPORT_FUNCTIONS package_name_hook
-
-
-PKGDESTWORK = "${WORKDIR}/pkgdata"
-
-PKGDATA_VARS = "PN PE PV PR PKGE PKGV PKGR LICENSE DESCRIPTION SUMMARY RDEPENDS RPROVIDES RRECOMMENDS RSUGGESTS RREPLACES RCONFLICTS SECTION PKG ALLOW_EMPTY FILES CONFFILES FILES_INFO PACKAGE_ADD_METADATA pkg_postinst pkg_postrm pkg_preinst pkg_prerm"
-
-python emit_pkgdata() {
-    from glob import glob
-    import json
-    import bb.compress.zstd
-
-    def process_postinst_on_target(pkg, mlprefix):
-        pkgval = d.getVar('PKG:%s' % pkg)
-        if pkgval is None:
-            pkgval = pkg
-
-        defer_fragment = """
-if [ -n "$D" ]; then
-    $INTERCEPT_DIR/postinst_intercept delay_to_first_boot %s mlprefix=%s
-    exit 0
-fi
-""" % (pkgval, mlprefix)
-
-        postinst = d.getVar('pkg_postinst:%s' % pkg)
-        postinst_ontarget = d.getVar('pkg_postinst_ontarget:%s' % pkg)
-
-        if postinst_ontarget:
-            bb.debug(1, 'adding deferred pkg_postinst_ontarget() to pkg_postinst() for %s' % pkg)
-            if not postinst:
-                postinst = '#!/bin/sh\n'
-            postinst += defer_fragment
-            postinst += postinst_ontarget
-            d.setVar('pkg_postinst:%s' % pkg, postinst)
-
-    def add_set_e_to_scriptlets(pkg):
-        for scriptlet_name in ('pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm'):
-            scriptlet = d.getVar('%s:%s' % (scriptlet_name, pkg))
-            if scriptlet:
-                scriptlet_split = scriptlet.split('\n')
-                if scriptlet_split[0].startswith("#!"):
-                    scriptlet = scriptlet_split[0] + "\nset -e\n" + "\n".join(scriptlet_split[1:])
-                else:
-                    scriptlet = "set -e\n" + "\n".join(scriptlet_split[0:])
-            d.setVar('%s:%s' % (scriptlet_name, pkg), scriptlet)
-
-    def write_if_exists(f, pkg, var):
-        def encode(str):
-            import codecs
-            c = codecs.getencoder("unicode_escape")
-            return c(str)[0].decode("latin1")
-
-        val = d.getVar('%s:%s' % (var, pkg))
-        if val:
-            f.write('%s:%s: %s\n' % (var, pkg, encode(val)))
-            return val
-        val = d.getVar('%s' % (var))
-        if val:
-            f.write('%s: %s\n' % (var, encode(val)))
-        return val
-
-    def write_extra_pkgs(variants, pn, packages, pkgdatadir):
-        for variant in variants:
-            with open("%s/%s-%s" % (pkgdatadir, variant, pn), 'w') as fd:
-                fd.write("PACKAGES: %s\n" % ' '.join(
-                            map(lambda pkg: '%s-%s' % (variant, pkg), packages.split())))
-
-    def write_extra_runtime_pkgs(variants, packages, pkgdatadir):
-        for variant in variants:
-            for pkg in packages.split():
-                ml_pkg = "%s-%s" % (variant, pkg)
-                subdata_file = "%s/runtime/%s" % (pkgdatadir, ml_pkg)
-                with open(subdata_file, 'w') as fd:
-                    fd.write("PKG:%s: %s" % (ml_pkg, pkg))
-
-    packages = d.getVar('PACKAGES')
-    pkgdest = d.getVar('PKGDEST')
-    pkgdatadir = d.getVar('PKGDESTWORK')
-
-    data_file = pkgdatadir + d.expand("/${PN}")
-    with open(data_file, 'w') as fd:
-        fd.write("PACKAGES: %s\n" % packages)
-
-    pkgdebugsource = d.getVar("PKGDEBUGSOURCES") or []
-
-    pn = d.getVar('PN')
-    global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS') or "").split()
-    variants = (d.getVar('MULTILIB_VARIANTS') or "").split()
-
-    if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
-        write_extra_pkgs(variants, pn, packages, pkgdatadir)
-
-    if bb.data.inherits_class('allarch', d) and not variants \
-        and not bb.data.inherits_class('packagegroup', d):
-        write_extra_pkgs(global_variants, pn, packages, pkgdatadir)
-
-    workdir = d.getVar('WORKDIR')
-
-    for pkg in packages.split():
-        pkgval = d.getVar('PKG:%s' % pkg)
-        if pkgval is None:
-            pkgval = pkg
-            d.setVar('PKG:%s' % pkg, pkg)
-
-        extended_data = {
-            "files_info": {}
-        }
-
-        pkgdestpkg = os.path.join(pkgdest, pkg)
-        files = {}
-        files_extra = {}
-        total_size = 0
-        seen = set()
-        for f in pkgfiles[pkg]:
-            fpath = os.sep + os.path.relpath(f, pkgdestpkg)
-
-            fstat = os.lstat(f)
-            files[fpath] = fstat.st_size
-
-            extended_data["files_info"].setdefault(fpath, {})
-            extended_data["files_info"][fpath]['size'] = fstat.st_size
-
-            if fstat.st_ino not in seen:
-                seen.add(fstat.st_ino)
-                total_size += fstat.st_size
-
-            if fpath in pkgdebugsource:
-                extended_data["files_info"][fpath]['debugsrc'] = pkgdebugsource[fpath]
-                del pkgdebugsource[fpath]
-
-        d.setVar('FILES_INFO:' + pkg , json.dumps(files, sort_keys=True))
-
-        process_postinst_on_target(pkg, d.getVar("MLPREFIX"))
-        add_set_e_to_scriptlets(pkg)
-
-        subdata_file = pkgdatadir + "/runtime/%s" % pkg
-        with open(subdata_file, 'w') as sf:
-            for var in (d.getVar('PKGDATA_VARS') or "").split():
-                val = write_if_exists(sf, pkg, var)
-
-            write_if_exists(sf, pkg, 'FILERPROVIDESFLIST')
-            for dfile in sorted((d.getVar('FILERPROVIDESFLIST:' + pkg) or "").split()):
-                write_if_exists(sf, pkg, 'FILERPROVIDES:' + dfile)
-
-            write_if_exists(sf, pkg, 'FILERDEPENDSFLIST')
-            for dfile in sorted((d.getVar('FILERDEPENDSFLIST:' + pkg) or "").split()):
-                write_if_exists(sf, pkg, 'FILERDEPENDS:' + dfile)
-
-            sf.write('%s:%s: %d\n' % ('PKGSIZE', pkg, total_size))
-
-        subdata_extended_file = pkgdatadir + "/extended/%s.json.zstd" % pkg
-        num_threads = int(d.getVar("BB_NUMBER_THREADS"))
-        with bb.compress.zstd.open(subdata_extended_file, "wt", encoding="utf-8", num_threads=num_threads) as f:
-            json.dump(extended_data, f, sort_keys=True, separators=(",", ":"))
-
-        # Symlinks needed for rprovides lookup
-        rprov = d.getVar('RPROVIDES:%s' % pkg) or d.getVar('RPROVIDES')
-        if rprov:
-            for p in bb.utils.explode_deps(rprov):
-                subdata_sym = pkgdatadir + "/runtime-rprovides/%s/%s" % (p, pkg)
-                bb.utils.mkdirhier(os.path.dirname(subdata_sym))
-                oe.path.symlink("../../runtime/%s" % pkg, subdata_sym, True)
-
-        allow_empty = d.getVar('ALLOW_EMPTY:%s' % pkg)
-        if not allow_empty:
-            allow_empty = d.getVar('ALLOW_EMPTY')
-        root = "%s/%s" % (pkgdest, pkg)
-        os.chdir(root)
-        g = glob('*')
-        if g or allow_empty == "1":
-            # Symlinks needed for reverse lookups (from the final package name)
-            subdata_sym = pkgdatadir + "/runtime-reverse/%s" % pkgval
-            oe.path.symlink("../runtime/%s" % pkg, subdata_sym, True)
-
-            packagedfile = pkgdatadir + '/runtime/%s.packaged' % pkg
-            open(packagedfile, 'w').close()
-
-    if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
-        write_extra_runtime_pkgs(variants, packages, pkgdatadir)
-
-    if bb.data.inherits_class('allarch', d) and not variants \
-        and not bb.data.inherits_class('packagegroup', d):
-        write_extra_runtime_pkgs(global_variants, packages, pkgdatadir)
-
-}
-emit_pkgdata[dirs] = "${PKGDESTWORK}/runtime ${PKGDESTWORK}/runtime-reverse ${PKGDESTWORK}/runtime-rprovides ${PKGDESTWORK}/extended"
-emit_pkgdata[vardepsexclude] = "BB_NUMBER_THREADS"
-
-ldconfig_postinst_fragment() {
-if [ x"$D" = "x" ]; then
-	if [ -x /sbin/ldconfig ]; then /sbin/ldconfig ; fi
-fi
-}
-
-RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/rpmdeps --alldeps --define '__font_provides %{nil}'"
-
-# Collect perfile run-time dependency metadata
-# Output:
-#  FILERPROVIDESFLIST:pkg - list of all files w/ deps
-#  FILERPROVIDES:filepath:pkg - per file dep
-#
-#  FILERDEPENDSFLIST:pkg - list of all files w/ deps
-#  FILERDEPENDS:filepath:pkg - per file dep
-
-python package_do_filedeps() {
-    if d.getVar('SKIP_FILEDEPS') == '1':
-        return
-
-    pkgdest = d.getVar('PKGDEST')
-    packages = d.getVar('PACKAGES')
-    rpmdeps = d.getVar('RPMDEPS')
-
-    def chunks(files, n):
-        return [files[i:i+n] for i in range(0, len(files), n)]
-
-    pkglist = []
-    for pkg in packages.split():
-        if d.getVar('SKIP_FILEDEPS:' + pkg) == '1':
-            continue
-        if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-') or pkg.endswith('-src'):
-            continue
-        for files in chunks(pkgfiles[pkg], 100):
-            pkglist.append((pkg, files, rpmdeps, pkgdest))
-
-    processed = oe.utils.multiprocess_launch(oe.package.filedeprunner, pkglist, d)
-
-    provides_files = {}
-    requires_files = {}
-
-    for result in processed:
-        (pkg, provides, requires) = result
-
-        if pkg not in provides_files:
-            provides_files[pkg] = []
-        if pkg not in requires_files:
-            requires_files[pkg] = []
-
-        for file in sorted(provides):
-            provides_files[pkg].append(file)
-            key = "FILERPROVIDES:" + file + ":" + pkg
-            d.appendVar(key, " " + " ".join(provides[file]))
-
-        for file in sorted(requires):
-            requires_files[pkg].append(file)
-            key = "FILERDEPENDS:" + file + ":" + pkg
-            d.appendVar(key, " " + " ".join(requires[file]))
-
-    for pkg in requires_files:
-        d.setVar("FILERDEPENDSFLIST:" + pkg, " ".join(sorted(requires_files[pkg])))
-    for pkg in provides_files:
-        d.setVar("FILERPROVIDESFLIST:" + pkg, " ".join(sorted(provides_files[pkg])))
-}
-
-SHLIBSDIRS = "${WORKDIR_PKGDATA}/${MLPREFIX}shlibs2"
-SHLIBSWORKDIR = "${PKGDESTWORK}/${MLPREFIX}shlibs2"
-
-python package_do_shlibs() {
-    import itertools
-    import re, pipes
-    import subprocess
-
-    exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', False)
-    if exclude_shlibs:
-        bb.note("not generating shlibs")
-        return
-
-    lib_re = re.compile(r"^.*\.so")
-    libdir_re = re.compile(r".*/%s$" % d.getVar('baselib'))
-
-    packages = d.getVar('PACKAGES')
-
-    shlib_pkgs = []
-    exclusion_list = d.getVar("EXCLUDE_PACKAGES_FROM_SHLIBS")
-    if exclusion_list:
-        for pkg in packages.split():
-            if pkg not in exclusion_list.split():
-                shlib_pkgs.append(pkg)
-            else:
-                bb.note("not generating shlibs for %s" % pkg)
-    else:
-        shlib_pkgs = packages.split()
-
-    hostos = d.getVar('HOST_OS')
-
-    workdir = d.getVar('WORKDIR')
-
-    ver = d.getVar('PKGV')
-    if not ver:
-        msg = "PKGV not defined"
-        oe.qa.handle_error("pkgv-undefined", msg, d)
-        return
-
-    pkgdest = d.getVar('PKGDEST')
-
-    shlibswork_dir = d.getVar('SHLIBSWORKDIR')
-
-    def linux_so(file, pkg, pkgver, d):
-        needs_ldconfig = False
-        needed = set()
-        sonames = set()
-        renames = []
-        ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
-        cmd = d.getVar('OBJDUMP') + " -p " + pipes.quote(file) + " 2>/dev/null"
-        fd = os.popen(cmd)
-        lines = fd.readlines()
-        fd.close()
-        rpath = tuple()
-        for l in lines:
-            m = re.match(r"\s+RPATH\s+([^\s]*)", l)
-            if m:
-                rpaths = m.group(1).replace("$ORIGIN", ldir).split(":")
-                rpath = tuple(map(os.path.normpath, rpaths))
-        for l in lines:
-            m = re.match(r"\s+NEEDED\s+([^\s]*)", l)
-            if m:
-                dep = m.group(1)
-                if dep not in needed:
-                    needed.add((dep, file, rpath))
-            m = re.match(r"\s+SONAME\s+([^\s]*)", l)
-            if m:
-                this_soname = m.group(1)
-                prov = (this_soname, ldir, pkgver)
-                if not prov in sonames:
-                    # if library is private (only used by package) then do not build shlib for it
-                    import fnmatch
-                    if not private_libs or len([i for i in private_libs if fnmatch.fnmatch(this_soname, i)]) == 0:
-                        sonames.add(prov)
-                if libdir_re.match(os.path.dirname(file)):
-                    needs_ldconfig = True
-                if needs_ldconfig and snap_symlinks and (os.path.basename(file) != this_soname):
-                    renames.append((file, os.path.join(os.path.dirname(file), this_soname)))
-        return (needs_ldconfig, needed, sonames, renames)
-
-    def darwin_so(file, needed, sonames, renames, pkgver):
-        if not os.path.exists(file):
-            return
-        ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
-
-        def get_combinations(base):
-            #
-            # Given a base library name, find all combinations of this split by "." and "-"
-            #
-            combos = []
-            options = base.split(".")
-            for i in range(1, len(options) + 1):
-                combos.append(".".join(options[0:i]))
-            options = base.split("-")
-            for i in range(1, len(options) + 1):
-                combos.append("-".join(options[0:i]))
-            return combos
-
-        if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.endswith('-src'):
-            # Drop suffix
-            name = os.path.basename(file).rsplit(".",1)[0]
-            # Find all combinations
-            combos = get_combinations(name)
-            for combo in combos:
-                if not combo in sonames:
-                    prov = (combo, ldir, pkgver)
-                    sonames.add(prov)
-        if file.endswith('.dylib') or file.endswith('.so'):
-            rpath = []
-            p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-l', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
-            out, err = p.communicate()
-            # If returned successfully, process stdout for results
-            if p.returncode == 0:
-                for l in out.split("\n"):
-                    l = l.strip()
-                    if l.startswith('path '):
-                        rpath.append(l.split()[1])
-
-        p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-L', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
-        out, err = p.communicate()
-        # If returned successfully, process stdout for results
-        if p.returncode == 0:
-            for l in out.split("\n"):
-                l = l.strip()
-                if not l or l.endswith(":"):
-                    continue
-                if "is not an object file" in l:
-                    continue
-                name = os.path.basename(l.split()[0]).rsplit(".", 1)[0]
-                if name and name not in needed[pkg]:
-                     needed[pkg].add((name, file, tuple()))
-
-    def mingw_dll(file, needed, sonames, renames, pkgver):
-        if not os.path.exists(file):
-            return
-
-        if file.endswith(".dll"):
-            # assume all dlls are shared objects provided by the package
-            sonames.add((os.path.basename(file), os.path.dirname(file).replace(pkgdest + "/" + pkg, ''), pkgver))
-
-        if (file.endswith(".dll") or file.endswith(".exe")):
-            # use objdump to search for "DLL Name: .*\.dll"
-            p = subprocess.Popen([d.expand("${HOST_PREFIX}objdump"), "-p", file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-            out, err = p.communicate()
-            # process the output, grabbing all .dll names
-            if p.returncode == 0:
-                for m in re.finditer(r"DLL Name: (.*?\.dll)$", out.decode(), re.MULTILINE | re.IGNORECASE):
-                    dllname = m.group(1)
-                    if dllname:
-                        needed[pkg].add((dllname, file, tuple()))
-
-    if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS') == "1":
-        snap_symlinks = True
-    else:
-        snap_symlinks = False
-
-    needed = {}
-
-    shlib_provider = oe.package.read_shlib_providers(d)
-
-    for pkg in shlib_pkgs:
-        private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or ""
-        private_libs = private_libs.split()
-        needs_ldconfig = False
-        bb.debug(2, "calculating shlib provides for %s" % pkg)
-
-        pkgver = d.getVar('PKGV:' + pkg)
-        if not pkgver:
-            pkgver = d.getVar('PV_' + pkg)
-        if not pkgver:
-            pkgver = ver
-
-        needed[pkg] = set()
-        sonames = set()
-        renames = []
-        linuxlist = []
-        for file in pkgfiles[pkg]:
-                soname = None
-                if cpath.islink(file):
-                    continue
-                if hostos == "darwin" or hostos == "darwin21":
-                    darwin_so(file, needed, sonames, renames, pkgver)
-                elif hostos.startswith("mingw"):
-                    mingw_dll(file, needed, sonames, renames, pkgver)
-                elif os.access(file, os.X_OK) or lib_re.match(file):
-                    linuxlist.append(file)
-
-        if linuxlist:
-            results = oe.utils.multiprocess_launch(linux_so, linuxlist, d, extraargs=(pkg, pkgver, d))
-            for r in results:
-                ldconfig = r[0]
-                needed[pkg] |= r[1]
-                sonames |= r[2]
-                renames.extend(r[3])
-                needs_ldconfig = needs_ldconfig or ldconfig
-
-        for (old, new) in renames:
-            bb.note("Renaming %s to %s" % (old, new))
-            bb.utils.rename(old, new)
-            pkgfiles[pkg].remove(old)
-
-        shlibs_file = os.path.join(shlibswork_dir, pkg + ".list")
-        if len(sonames):
-            with open(shlibs_file, 'w') as fd:
-                for s in sorted(sonames):
-                    if s[0] in shlib_provider and s[1] in shlib_provider[s[0]]:
-                        (old_pkg, old_pkgver) = shlib_provider[s[0]][s[1]]
-                        if old_pkg != pkg:
-                            bb.warn('%s-%s was registered as shlib provider for %s, changing it to %s-%s because it was built later' % (old_pkg, old_pkgver, s[0], pkg, pkgver))
-                    bb.debug(1, 'registering %s-%s as shlib provider for %s' % (pkg, pkgver, s[0]))
-                    fd.write(s[0] + ':' + s[1] + ':' + s[2] + '\n')
-                    if s[0] not in shlib_provider:
-                        shlib_provider[s[0]] = {}
-                    shlib_provider[s[0]][s[1]] = (pkg, pkgver)
-        if needs_ldconfig:
-            bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
-            postinst = d.getVar('pkg_postinst:%s' % pkg)
-            if not postinst:
-                postinst = '#!/bin/sh\n'
-            postinst += d.getVar('ldconfig_postinst_fragment')
-            d.setVar('pkg_postinst:%s' % pkg, postinst)
-        bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames))
-
-    assumed_libs = d.getVar('ASSUME_SHLIBS')
-    if assumed_libs:
-        libdir = d.getVar("libdir")
-        for e in assumed_libs.split():
-            l, dep_pkg = e.split(":")
-            lib_ver = None
-            dep_pkg = dep_pkg.rsplit("_", 1)
-            if len(dep_pkg) == 2:
-                lib_ver = dep_pkg[1]
-            dep_pkg = dep_pkg[0]
-            if l not in shlib_provider:
-                shlib_provider[l] = {}
-            shlib_provider[l][libdir] = (dep_pkg, lib_ver)
-
-    libsearchpath = [d.getVar('libdir'), d.getVar('base_libdir')]
-
-    for pkg in shlib_pkgs:
-        bb.debug(2, "calculating shlib requirements for %s" % pkg)
-
-        private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or ""
-        private_libs = private_libs.split()
-
-        deps = list()
-        for n in needed[pkg]:
-            # if n is in private libraries, don't try to search provider for it
-            # this could cause problem in case some abc.bb provides private
-            # /opt/abc/lib/libfoo.so.1 and contains /usr/bin/abc depending on system library libfoo.so.1
-            # but skipping it is still better alternative than providing own
-            # version and then adding runtime dependency for the same system library
-            import fnmatch
-            if private_libs and len([i for i in private_libs if fnmatch.fnmatch(n[0], i)]) > 0:
-                bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0]))
-                continue
-            if n[0] in shlib_provider.keys():
-                shlib_provider_map = shlib_provider[n[0]]
-                matches = set()
-                for p in itertools.chain(list(n[2]), sorted(shlib_provider_map.keys()), libsearchpath):
-                    if p in shlib_provider_map:
-                        matches.add(p)
-                if len(matches) > 1:
-                    matchpkgs = ', '.join([shlib_provider_map[match][0] for match in matches])
-                    bb.error("%s: Multiple shlib providers for %s: %s (used by files: %s)" % (pkg, n[0], matchpkgs, n[1]))
-                elif len(matches) == 1:
-                    (dep_pkg, ver_needed) = shlib_provider_map[matches.pop()]
-
-                    bb.debug(2, '%s: Dependency %s requires package %s (used by files: %s)' % (pkg, n[0], dep_pkg, n[1]))
-
-                    if dep_pkg == pkg:
-                        continue
-
-                    if ver_needed:
-                        dep = "%s (>= %s)" % (dep_pkg, ver_needed)
-                    else:
-                        dep = dep_pkg
-                    if not dep in deps:
-                        deps.append(dep)
-                    continue
-            bb.note("Couldn't find shared library provider for %s, used by files: %s" % (n[0], n[1]))
-
-        deps_file = os.path.join(pkgdest, pkg + ".shlibdeps")
-        if os.path.exists(deps_file):
-            os.remove(deps_file)
-        if deps:
-            with open(deps_file, 'w') as fd:
-                for dep in sorted(deps):
-                    fd.write(dep + '\n')
-}
-
-python package_do_pkgconfig () {
-    import re
-
-    packages = d.getVar('PACKAGES')
-    workdir = d.getVar('WORKDIR')
-    pkgdest = d.getVar('PKGDEST')
-
-    shlibs_dirs = d.getVar('SHLIBSDIRS').split()
-    shlibswork_dir = d.getVar('SHLIBSWORKDIR')
-
-    pc_re = re.compile(r'(.*)\.pc$')
-    var_re = re.compile(r'(.*)=(.*)')
-    field_re = re.compile(r'(.*): (.*)')
-
-    pkgconfig_provided = {}
-    pkgconfig_needed = {}
-    for pkg in packages.split():
-        pkgconfig_provided[pkg] = []
-        pkgconfig_needed[pkg] = []
-        for file in sorted(pkgfiles[pkg]):
-                m = pc_re.match(file)
-                if m:
-                    pd = bb.data.init()
-                    name = m.group(1)
-                    pkgconfig_provided[pkg].append(os.path.basename(name))
-                    if not os.access(file, os.R_OK):
-                        continue
-                    with open(file, 'r') as f:
-                        lines = f.readlines()
-                    for l in lines:
-                        m = var_re.match(l)
-                        if m:
-                            name = m.group(1)
-                            val = m.group(2)
-                            pd.setVar(name, pd.expand(val))
-                            continue
-                        m = field_re.match(l)
-                        if m:
-                            hdr = m.group(1)
-                            exp = pd.expand(m.group(2))
-                            if hdr == 'Requires':
-                                pkgconfig_needed[pkg] += exp.replace(',', ' ').split()
-
-    for pkg in packages.split():
-        pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist")
-        if pkgconfig_provided[pkg] != []:
-            with open(pkgs_file, 'w') as f:
-                for p in sorted(pkgconfig_provided[pkg]):
-                    f.write('%s\n' % p)
-
-    # Go from least to most specific since the last one found wins
-    for dir in reversed(shlibs_dirs):
-        if not os.path.exists(dir):
-            continue
-        for file in sorted(os.listdir(dir)):
-            m = re.match(r'^(.*)\.pclist$', file)
-            if m:
-                pkg = m.group(1)
-                with open(os.path.join(dir, file)) as fd:
-                    lines = fd.readlines()
-                pkgconfig_provided[pkg] = []
-                for l in lines:
-                    pkgconfig_provided[pkg].append(l.rstrip())
-
-    for pkg in packages.split():
-        deps = []
-        for n in pkgconfig_needed[pkg]:
-            found = False
-            for k in pkgconfig_provided.keys():
-                if n in pkgconfig_provided[k]:
-                    if k != pkg and not (k in deps):
-                        deps.append(k)
-                    found = True
-            if found == False:
-                bb.note("couldn't find pkgconfig module '%s' in any package" % n)
-        deps_file = os.path.join(pkgdest, pkg + ".pcdeps")
-        if len(deps):
-            with open(deps_file, 'w') as fd:
-                for dep in deps:
-                    fd.write(dep + '\n')
-}
-
-def read_libdep_files(d):
-    pkglibdeps = {}
-    packages = d.getVar('PACKAGES').split()
-    for pkg in packages:
-        pkglibdeps[pkg] = {}
-        for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
-            depsfile = d.expand("${PKGDEST}/" + pkg + extension)
-            if os.access(depsfile, os.R_OK):
-                with open(depsfile) as fd:
-                    lines = fd.readlines()
-                for l in lines:
-                    l.rstrip()
-                    deps = bb.utils.explode_dep_versions2(l)
-                    for dep in deps:
-                        if not dep in pkglibdeps[pkg]:
-                            pkglibdeps[pkg][dep] = deps[dep]
-    return pkglibdeps
-
-python read_shlibdeps () {
-    pkglibdeps = read_libdep_files(d)
-
-    packages = d.getVar('PACKAGES').split()
-    for pkg in packages:
-        rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "")
-        for dep in sorted(pkglibdeps[pkg]):
-            # Add the dep if it's not already there, or if no comparison is set
-            if dep not in rdepends:
-                rdepends[dep] = []
-            for v in pkglibdeps[pkg][dep]:
-                if v not in rdepends[dep]:
-                    rdepends[dep].append(v)
-        d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False))
-}
-
-python package_depchains() {
-    """
-    For a given set of prefix and postfix modifiers, make those packages
-    RRECOMMENDS on the corresponding packages for its RDEPENDS.
-
-    Example:  If package A depends upon package B, and A's .bb emits an
-    A-dev package, this would make A-dev Recommends: B-dev.
-
-    If only one of a given suffix is specified, it will take the RRECOMMENDS
-    based on the RDEPENDS of *all* other packages. If more than one of a given
-    suffix is specified, its will only use the RDEPENDS of the single parent
-    package.
-    """
-
-    packages  = d.getVar('PACKAGES')
-    postfixes = (d.getVar('DEPCHAIN_POST') or '').split()
-    prefixes  = (d.getVar('DEPCHAIN_PRE') or '').split()
-
-    def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d):
-
-        #bb.note('depends for %s is %s' % (base, depends))
-        rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "")
-
-        for depend in sorted(depends):
-            if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
-                #bb.note("Skipping %s" % depend)
-                continue
-            if depend.endswith('-dev'):
-                depend = depend[:-4]
-            if depend.endswith('-dbg'):
-                depend = depend[:-4]
-            pkgname = getname(depend, suffix)
-            #bb.note("Adding %s for %s" % (pkgname, depend))
-            if pkgname not in rreclist and pkgname != pkg:
-                rreclist[pkgname] = []
-
-        #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist)))
-        d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
-
-    def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
-
-        #bb.note('rdepends for %s is %s' % (base, rdepends))
-        rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "")
-
-        for depend in sorted(rdepends):
-            if depend.find('virtual-locale-') != -1:
-                #bb.note("Skipping %s" % depend)
-                continue
-            if depend.endswith('-dev'):
-                depend = depend[:-4]
-            if depend.endswith('-dbg'):
-                depend = depend[:-4]
-            pkgname = getname(depend, suffix)
-            #bb.note("Adding %s for %s" % (pkgname, depend))
-            if pkgname not in rreclist and pkgname != pkg:
-                rreclist[pkgname] = []
-
-        #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist)))
-        d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
-
-    def add_dep(list, dep):
-        if dep not in list:
-            list.append(dep)
-
-    depends = []
-    for dep in bb.utils.explode_deps(d.getVar('DEPENDS') or ""):
-        add_dep(depends, dep)
-
-    rdepends = []
-    for pkg in packages.split():
-        for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + pkg) or ""):
-            add_dep(rdepends, dep)
-
-    #bb.note('rdepends is %s' % rdepends)
-
-    def post_getname(name, suffix):
-        return '%s%s' % (name, suffix)
-    def pre_getname(name, suffix):
-        return '%s%s' % (suffix, name)
-
-    pkgs = {}
-    for pkg in packages.split():
-        for postfix in postfixes:
-            if pkg.endswith(postfix):
-                if not postfix in pkgs:
-                    pkgs[postfix] = {}
-                pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname)
-
-        for prefix in prefixes:
-            if pkg.startswith(prefix):
-                if not prefix in pkgs:
-                    pkgs[prefix] = {}
-                pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname)
-
-    if "-dbg" in pkgs:
-        pkglibdeps = read_libdep_files(d)
-        pkglibdeplist = []
-        for pkg in pkglibdeps:
-            for k in pkglibdeps[pkg]:
-                add_dep(pkglibdeplist, k)
-        dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS') == '1') or (bb.data.inherits_class('packagegroup', d)))
-
-    for suffix in pkgs:
-        for pkg in pkgs[suffix]:
-            if d.getVarFlag('RRECOMMENDS:' + pkg, 'nodeprrecs'):
-                continue
-            (base, func) = pkgs[suffix][pkg]
-            if suffix == "-dev":
-                pkg_adddeprrecs(pkg, base, suffix, func, depends, d)
-            elif suffix == "-dbg":
-                if not dbgdefaultdeps:
-                    pkg_addrrecs(pkg, base, suffix, func, pkglibdeplist, d)
-                    continue
-            if len(pkgs[suffix]) == 1:
-                pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
-            else:
-                rdeps = []
-                for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + base) or ""):
-                    add_dep(rdeps, dep)
-                pkg_addrrecs(pkg, base, suffix, func, rdeps, d)
-}
-
-# Since bitbake can't determine which variables are accessed during package
-# iteration, we need to list them here:
-PACKAGEVARS = "FILES RDEPENDS RRECOMMENDS SUMMARY DESCRIPTION RSUGGESTS RPROVIDES RCONFLICTS PKG ALLOW_EMPTY pkg_postinst pkg_postrm pkg_postinst_ontarget INITSCRIPT_NAME INITSCRIPT_PARAMS DEBIAN_NOAUTONAME ALTERNATIVE PKGE PKGV PKGR USERADD_PARAM GROUPADD_PARAM CONFFILES SYSTEMD_SERVICE LICENSE SECTION pkg_preinst pkg_prerm RREPLACES GROUPMEMS_PARAM SYSTEMD_AUTO_ENABLE SKIP_FILEDEPS PRIVATE_LIBS PACKAGE_ADD_METADATA"
-
-def gen_packagevar(d, pkgvars="PACKAGEVARS"):
-    ret = []
-    pkgs = (d.getVar("PACKAGES") or "").split()
-    vars = (d.getVar(pkgvars) or "").split()
-    for v in vars:
-        ret.append(v)
-    for p in pkgs:
-        for v in vars:
-            ret.append(v + ":" + p)
-
-        # Ensure that changes to INCOMPATIBLE_LICENSE re-run do_package for
-        # affected recipes.
-        ret.append('_exclude_incompatible-%s' % p)
-    return " ".join(ret)
-
-PACKAGE_PREPROCESS_FUNCS ?= ""
-# Functions for setting up PKGD
-PACKAGEBUILDPKGD ?= " \
-                package_prepare_pkgdata \
-                perform_packagecopy \
-                ${PACKAGE_PREPROCESS_FUNCS} \
-                split_and_strip_files \
-                fixup_perms \
-                "
-# Functions which split PKGD up into separate packages
-PACKAGESPLITFUNCS ?= " \
-                package_do_split_locales \
-                populate_packages"
-# Functions which process metadata based on split packages
-PACKAGEFUNCS += " \
-                package_fixsymlinks \
-                package_name_hook \
-                package_do_filedeps \
-                package_do_shlibs \
-                package_do_pkgconfig \
-                read_shlibdeps \
-                package_depchains \
-                emit_pkgdata"
-
-python do_package () {
-    # Change the following version to cause sstate to invalidate the package
-    # cache.  This is useful if an item this class depends on changes in a
-    # way that the output of this class changes.  rpmdeps is a good example
-    # as any change to rpmdeps requires this to be rerun.
-    # PACKAGE_BBCLASS_VERSION = "4"
-
-    # Init cachedpath
-    global cpath
-    cpath = oe.cachedpath.CachedPath()
-
-    ###########################################################################
-    # Sanity test the setup
-    ###########################################################################
-
-    packages = (d.getVar('PACKAGES') or "").split()
-    if len(packages) < 1:
-        bb.debug(1, "No packages to build, skipping do_package")
-        return
-
-    workdir = d.getVar('WORKDIR')
-    outdir = d.getVar('DEPLOY_DIR')
-    dest = d.getVar('D')
-    dvar = d.getVar('PKGD')
-    pn = d.getVar('PN')
-
-    if not workdir or not outdir or not dest or not dvar or not pn:
-        msg = "WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package"
-        oe.qa.handle_error("var-undefined", msg, d)
-        return
-
-    bb.build.exec_func("package_convert_pr_autoinc", d)
-
-    ###########################################################################
-    # Optimisations
-    ###########################################################################
-
-    # Continually expanding complex expressions is inefficient, particularly
-    # when we write to the datastore and invalidate the expansion cache. This
-    # code pre-expands some frequently used variables
-
-    def expandVar(x, d):
-        d.setVar(x, d.getVar(x))
-
-    for x in 'PN', 'PV', 'BPN', 'TARGET_SYS', 'EXTENDPRAUTO':
-        expandVar(x, d)
-
-    ###########################################################################
-    # Setup PKGD (from D)
-    ###########################################################################
-
-    for f in (d.getVar('PACKAGEBUILDPKGD') or '').split():
-        bb.build.exec_func(f, d)
-
-    ###########################################################################
-    # Split up PKGD into PKGDEST
-    ###########################################################################
-
-    cpath = oe.cachedpath.CachedPath()
-
-    for f in (d.getVar('PACKAGESPLITFUNCS') or '').split():
-        bb.build.exec_func(f, d)
-
-    ###########################################################################
-    # Process PKGDEST
-    ###########################################################################
-
-    # Build global list of files in each split package
-    global pkgfiles
-    pkgfiles = {}
-    packages = d.getVar('PACKAGES').split()
-    pkgdest = d.getVar('PKGDEST')
-    for pkg in packages:
-        pkgfiles[pkg] = []
-        for walkroot, dirs, files in cpath.walk(pkgdest + "/" + pkg):
-            for file in files:
-                pkgfiles[pkg].append(walkroot + os.sep + file)
-
-    for f in (d.getVar('PACKAGEFUNCS') or '').split():
-        bb.build.exec_func(f, d)
-
-    oe.qa.exit_if_errors(d)
-}
-
-do_package[dirs] = "${SHLIBSWORKDIR} ${D}"
-do_package[vardeps] += "${PACKAGEBUILDPKGD} ${PACKAGESPLITFUNCS} ${PACKAGEFUNCS} ${@gen_packagevar(d)}"
-addtask package after do_install
-
-SSTATETASKS += "do_package"
-do_package[cleandirs] = "${PKGDEST} ${PKGDESTWORK}"
-do_package[sstate-plaindirs] = "${PKGD} ${PKGDEST} ${PKGDESTWORK}"
-do_package_setscene[dirs] = "${STAGING_DIR}"
-
-python do_package_setscene () {
-    sstate_setscene(d)
-}
-addtask do_package_setscene
-
-# Copy from PKGDESTWORK to tempdirectory as tempdirectory can be cleaned at both
-# do_package_setscene and do_packagedata_setscene leading to races
-python do_packagedata () {
-    bb.build.exec_func("package_get_auto_pr", d)
-
-    src = d.expand("${PKGDESTWORK}")
-    dest = d.expand("${WORKDIR}/pkgdata-pdata-input")
-    oe.path.copyhardlinktree(src, dest)
-
-    bb.build.exec_func("packagedata_translate_pr_autoinc", d)
-}
-do_packagedata[cleandirs] += "${WORKDIR}/pkgdata-pdata-input"
-
-# Translate the EXTENDPRAUTO and AUTOINC to the final values
-packagedata_translate_pr_autoinc() {
-    find ${WORKDIR}/pkgdata-pdata-input -type f | xargs --no-run-if-empty \
-        sed -e 's,@PRSERV_PV_AUTOINC@,${PRSERV_PV_AUTOINC},g' \
-            -e 's,@EXTENDPRAUTO@,${EXTENDPRAUTO},g' -i
-}
-
-addtask packagedata before do_build after do_package
-
-SSTATETASKS += "do_packagedata"
-do_packagedata[sstate-inputdirs] = "${WORKDIR}/pkgdata-pdata-input"
-do_packagedata[sstate-outputdirs] = "${PKGDATA_DIR}"
-do_packagedata[stamp-extra-info] = "${MACHINE_ARCH}"
-
-python do_packagedata_setscene () {
-    sstate_setscene(d)
-}
-addtask do_packagedata_setscene
-
-#
-# Helper functions for the package writing classes
-#
-
-def mapping_rename_hook(d):
-    """
-    Rewrite variables to account for package renaming in things
-    like debian.bbclass or manual PKG variable name changes
-    """
-    pkg = d.getVar("PKG")
-    runtime_mapping_rename("RDEPENDS", pkg, d)
-    runtime_mapping_rename("RRECOMMENDS", pkg, d)
-    runtime_mapping_rename("RSUGGESTS", pkg, d)
diff --git a/classes/siteinfo.bbclass b/classes/siteinfo.bbclass
deleted file mode 100644
index 34ca9bb..0000000
--- a/classes/siteinfo.bbclass
+++ /dev/null
@@ -1,226 +0,0 @@ 
-# This class exists to provide information about the targets that
-# may be needed by other classes and/or recipes. If you add a new
-# target this will probably need to be updated.
-
-#
-# Returns information about 'what' for the named target 'target'
-# where 'target' == "<arch>-<os>"
-#
-# 'what' can be one of
-# * target: Returns the target name ("<arch>-<os>")
-# * endianness: Return "be" for big endian targets, "le" for little endian
-# * bits: Returns the bit size of the target, either "32" or "64"
-# * libc: Returns the name of the c library used by the target
-#
-# It is an error for the target not to exist.
-# If 'what' doesn't exist then an empty value is returned
-#
-def siteinfo_data_for_machine(arch, os, d):
-    archinfo = {
-        "allarch": "endian-little bit-32", # bogus, but better than special-casing the checks below for allarch
-        "aarch64": "endian-little bit-64 arm-common arm-64",
-        "aarch64_be": "endian-big bit-64 arm-common arm-64",
-        "arc": "endian-little bit-32 arc-common",
-        "arceb": "endian-big bit-32 arc-common",
-        "arm": "endian-little bit-32 arm-common arm-32",
-        "armeb": "endian-big bit-32 arm-common arm-32",
-        "avr32": "endian-big bit-32 avr32-common",
-        "bfin": "endian-little bit-32 bfin-common",
-        "epiphany": "endian-little bit-32",
-        "i386": "endian-little bit-32 ix86-common",
-        "i486": "endian-little bit-32 ix86-common",
-        "i586": "endian-little bit-32 ix86-common",
-        "i686": "endian-little bit-32 ix86-common",
-        "ia64": "endian-little bit-64",
-        "lm32": "endian-big bit-32",
-        "m68k": "endian-big bit-32",
-        "microblaze": "endian-big bit-32 microblaze-common",
-        "microblazeel": "endian-little bit-32 microblaze-common",
-        "mips": "endian-big bit-32 mips-common",
-        "mips64": "endian-big bit-64 mips-common",
-        "mips64el": "endian-little bit-64 mips-common",
-        "mipsisa64r6": "endian-big bit-64 mips-common",
-        "mipsisa64r6el": "endian-little bit-64 mips-common",
-        "mipsel": "endian-little bit-32 mips-common",
-        "mipsisa32r6": "endian-big bit-32 mips-common",
-        "mipsisa32r6el": "endian-little bit-32 mips-common",
-        "powerpc": "endian-big bit-32 powerpc-common",
-        "powerpcle": "endian-little bit-32 powerpc-common",
-        "nios2": "endian-little bit-32 nios2-common",
-        "powerpc64": "endian-big bit-64 powerpc-common",
-        "powerpc64le": "endian-little bit-64 powerpc-common",
-        "ppc": "endian-big bit-32 powerpc-common",
-        "ppc64": "endian-big bit-64 powerpc-common",
-        "ppc64le" : "endian-little bit-64 powerpc-common",
-        "riscv32": "endian-little bit-32 riscv-common",
-        "riscv64": "endian-little bit-64 riscv-common",
-        "sh3": "endian-little bit-32 sh-common",
-        "sh3eb": "endian-big bit-32 sh-common",
-        "sh4": "endian-little bit-32 sh-common",
-        "sh4eb": "endian-big bit-32 sh-common",
-        "sparc": "endian-big bit-32",
-        "viac3": "endian-little bit-32 ix86-common",
-        "x86_64": "endian-little", # bitinfo specified in targetinfo
-    }
-    osinfo = {
-        "darwin": "common-darwin",
-        "darwin21": "common-darwin",
-        "linux": "common-linux common-glibc",
-        "linux-gnu": "common-linux common-glibc",
-        "linux-gnu_ilp32": "common-linux common-glibc",
-        "linux-gnux32": "common-linux common-glibc",
-        "linux-gnun32": "common-linux common-glibc",
-        "linux-gnueabi": "common-linux common-glibc",
-        "linux-gnuspe": "common-linux common-glibc",
-        "linux-musl": "common-linux common-musl",
-        "linux-muslx32": "common-linux common-musl",
-        "linux-musleabi": "common-linux common-musl",
-        "linux-muslspe": "common-linux common-musl",
-        "uclinux-uclibc": "common-uclibc",
-        "cygwin": "common-cygwin",
-        "mingw32": "common-mingw",
-    }
-    targetinfo = {
-        "aarch64-linux-gnu": "aarch64-linux",
-        "aarch64_be-linux-gnu": "aarch64_be-linux",
-        "aarch64-linux-gnu_ilp32": "bit-32 aarch64_be-linux arm-32",
-        "aarch64_be-linux-gnu_ilp32": "bit-32 aarch64_be-linux arm-32",
-        "aarch64-linux-musl": "aarch64-linux",
-        "aarch64_be-linux-musl": "aarch64_be-linux",
-        "arm-linux-gnueabi": "arm-linux",
-        "arm-linux-musleabi": "arm-linux",
-        "armeb-linux-gnueabi": "armeb-linux",
-        "armeb-linux-musleabi": "armeb-linux",
-        "microblazeel-linux" : "microblaze-linux",
-        "microblazeel-linux-musl" : "microblaze-linux",
-        "mips-linux-musl": "mips-linux",
-        "mipsel-linux-musl": "mipsel-linux",
-        "mips64-linux-musl": "mips64-linux",
-        "mips64el-linux-musl": "mips64el-linux",
-        "mips64-linux-gnun32": "mips-linux bit-32",
-        "mips64el-linux-gnun32": "mipsel-linux bit-32",
-        "mipsisa64r6-linux-gnun32": "mipsisa32r6-linux bit-32",
-        "mipsisa64r6el-linux-gnun32": "mipsisa32r6el-linux bit-32",
-        "powerpc-linux": "powerpc32-linux powerpc32-linux-glibc",
-        "powerpc-linux-musl": "powerpc-linux powerpc32-linux powerpc32-linux-musl",
-        "powerpcle-linux": "powerpc32-linux powerpc32-linux-glibc",
-        "powerpcle-linux-musl": "powerpc-linux powerpc32-linux powerpc32-linux-musl",
-        "powerpc-linux-gnuspe": "powerpc-linux powerpc32-linux powerpc32-linux-glibc",
-        "powerpc-linux-muslspe": "powerpc-linux powerpc32-linux powerpc32-linux-musl",
-        "powerpc64-linux-gnuspe": "powerpc-linux powerpc64-linux powerpc64-linux-glibc",
-        "powerpc64-linux-muslspe": "powerpc-linux powerpc64-linux powerpc64-linux-musl",
-        "powerpc64-linux": "powerpc-linux powerpc64-linux powerpc64-linux-glibc",
-        "powerpc64-linux-musl": "powerpc-linux powerpc64-linux powerpc64-linux-musl",
-        "powerpc64le-linux": "powerpc-linux powerpc64-linux powerpc64-linux-glibc",
-        "powerpc64le-linux-musl": "powerpc-linux powerpc64-linux powerpc64-linux-musl",
-        "riscv32-linux": "riscv32-linux",
-        "riscv32-linux-musl": "riscv32-linux",
-        "riscv64-linux": "riscv64-linux",
-        "riscv64-linux-musl": "riscv64-linux",
-        "x86_64-cygwin": "bit-64",
-        "x86_64-darwin": "bit-64",
-        "x86_64-darwin21": "bit-64",
-        "x86_64-linux": "bit-64",
-        "x86_64-linux-musl": "x86_64-linux bit-64",
-        "x86_64-linux-muslx32": "bit-32 ix86-common x32-linux",
-        "x86_64-elf": "bit-64",
-        "x86_64-linux-gnu": "bit-64 x86_64-linux",
-        "x86_64-linux-gnux32": "bit-32 ix86-common x32-linux",
-        "x86_64-mingw32": "bit-64",
-    }
-
-    # Add in any extra user supplied data which may come from a BSP layer, removing the
-    # need to always change this class directly
-    extra_siteinfo = (d.getVar("SITEINFO_EXTRA_DATAFUNCS") or "").split()
-    for m in extra_siteinfo:
-        call = m + "(archinfo, osinfo, targetinfo, d)"
-        locs = { "archinfo" : archinfo, "osinfo" : osinfo, "targetinfo" : targetinfo, "d" : d}
-        archinfo, osinfo, targetinfo = bb.utils.better_eval(call, locs)
-
-    target = "%s-%s" % (arch, os)
-
-    sitedata = []
-    if arch in archinfo:
-        sitedata.extend(archinfo[arch].split())
-    if os in osinfo:
-        sitedata.extend(osinfo[os].split())
-    if target in targetinfo:
-        sitedata.extend(targetinfo[target].split())
-    sitedata.append(target)
-    sitedata.append("common")
-
-    bb.debug(1, "SITE files %s" % sitedata);
-    return sitedata
-
-def siteinfo_data(d):
-    return siteinfo_data_for_machine(d.getVar("HOST_ARCH"), d.getVar("HOST_OS"), d)
-
-python () {
-    sitedata = set(siteinfo_data(d))
-    if "endian-little" in sitedata:
-        d.setVar("SITEINFO_ENDIANNESS", "le")
-    elif "endian-big" in sitedata:
-        d.setVar("SITEINFO_ENDIANNESS", "be")
-    else:
-        bb.error("Unable to determine endianness for architecture '%s'" %
-                 d.getVar("HOST_ARCH"))
-        bb.fatal("Please add your architecture to siteinfo.bbclass")
-
-    if "bit-32" in sitedata:
-        d.setVar("SITEINFO_BITS", "32")
-    elif "bit-64" in sitedata:
-        d.setVar("SITEINFO_BITS", "64")
-    else:
-        bb.error("Unable to determine bit size for architecture '%s'" %
-                 d.getVar("HOST_ARCH"))
-        bb.fatal("Please add your architecture to siteinfo.bbclass")
-}
-
-# Layers with siteconfig need to add a replacement path to this variable so the
-# sstate isn't path specific
-SITEINFO_PATHVARS = "COREBASE"
-
-def siteinfo_get_files(d, sysrootcache=False):
-    sitedata = siteinfo_data(d)
-    sitefiles = []
-    searched = []
-    for path in d.getVar("BBPATH").split(":"):
-        for element in sitedata:
-            filename = os.path.join(path, "site", element)
-            if os.path.exists(filename):
-                searched.append(filename + ":True")
-                sitefiles.append(filename)
-            else:
-                searched.append(filename + ":False")
-
-    # Have to parameterise out hardcoded paths such as COREBASE for the main site files
-    for var in d.getVar("SITEINFO_PATHVARS").split():
-        searched2 = []
-        replace = os.path.normpath(d.getVar(var))
-        for s in searched:
-            searched2.append(s.replace(replace, "${" + var + "}"))
-        searched = searched2
-
-    if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d) or bb.data.inherits_class('crosssdk', d):
-        # We need sstate sigs for native/cross not to vary upon arch so we can't depend on the site files.
-        # In future we may want to depend upon all site files?
-        # This would show up as breaking sstatetests.SStateTests.test_sstate_32_64_same_hash for example
-        searched = []
-
-    if not sysrootcache:
-        return sitefiles, searched
-
-    # Now check for siteconfig cache files in sysroots
-    path_siteconfig = d.getVar('SITECONFIG_SYSROOTCACHE')
-    if path_siteconfig and os.path.isdir(path_siteconfig):
-        for i in os.listdir(path_siteconfig):
-            if not i.endswith("_config"):
-                continue
-            filename = os.path.join(path_siteconfig, i)
-            sitefiles.append(filename)
-    return sitefiles, searched
-
-#
-# Make some information available via variables
-#
-SITECONFIG_SYSROOTCACHE = "${STAGING_DATADIR}/${TARGET_SYS}_config_site.d"