@@ -139,9 +139,24 @@ do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}"
do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}"
do_fetch[prefuncs] += "fetcher_hashes_dummyfunc"
do_fetch[network] = "1"
-python base_do_fetch() {
- src_uri = (d.getVar('SRC_URI') or "").split()
+def get_src_uris(d, early=False):
+ import oe.vendor
+
+ src_uris = (d.getVar("SRC_URI") or "").split()
+ if early:
+ return src_uris
+
+ src_uri_files = (d.getVar("SRC_URI_FILES") or "").split()
+ for fn in src_uri_files:
+ with open(fn, "r") as f:
+ u = oe.vendor.load(f)
+ src_uris.extend(u)
+
+ return src_uris
+
+def fetch_src_uris(d, early=False):
+ src_uri = get_src_uris(d, early)
if not src_uri:
return
@@ -150,11 +165,25 @@ python base_do_fetch() {
fetcher.download()
except bb.fetch2.BBFetchException as e:
bb.fatal("Bitbake Fetcher Error: " + repr(e))
+
+python base_do_fetch() {
+ fetch_src_uris(d)
}
addtask unpack after do_fetch
do_unpack[cleandirs] = "${UNPACKDIR}"
+def unpack_src_uris(d, unpackdir, early=False):
+ src_uri = get_src_uris(d, early)
+ if not src_uri:
+ return []
+
+ try:
+ fetcher = bb.fetch2.Fetch(src_uri, d)
+ fetcher.unpack(unpackdir)
+ except bb.fetch2.BBFetchException as e:
+ bb.fatal("Bitbake Fetcher Error: " + repr(e))
+
python base_do_unpack() {
import shutil
@@ -162,12 +191,8 @@ python base_do_unpack() {
# Intentionally keep SOURCE_BASEDIR internal to the task just for SDE
d.setVar("SOURCE_BASEDIR", sourcedir)
- src_uri = (d.getVar('SRC_URI') or "").split()
- if not src_uri:
- return
-
+ unpackdir = d.getVar("UNPACKDIR")
basedir = None
- unpackdir = d.getVar('UNPACKDIR')
workdir = d.getVar('WORKDIR')
if sourcedir.startswith(workdir) and not sourcedir.startswith(unpackdir):
basedir = sourcedir.replace(workdir, '').strip("/").split('/')[0]
@@ -175,11 +200,7 @@ python base_do_unpack() {
bb.utils.remove(workdir + '/' + basedir, True)
d.setVar("SOURCE_BASEDIR", workdir + '/' + basedir)
- try:
- fetcher = bb.fetch2.Fetch(src_uri, d)
- fetcher.unpack(d.getVar('UNPACKDIR'))
- except bb.fetch2.BBFetchException as e:
- bb.fatal("Bitbake Fetcher Error: " + repr(e))
+ unpack_src_uris(d, unpackdir)
if basedir and os.path.exists(unpackdir + '/' + basedir):
# Compatibility magic to ensure ${WORKDIR}/git and ${WORKDIR}/${BP}
@@ -704,7 +725,7 @@ addtask cleanall after do_cleansstate
do_cleansstate[nostamp] = "1"
python do_cleanall() {
- src_uri = (d.getVar('SRC_URI') or "").split()
+ src_uri = get_src_uris(d, True)
if not src_uri:
return
@@ -190,7 +190,7 @@ python do_ar_original() {
ar_outdir = d.getVar('ARCHIVER_OUTDIR')
bb.note('Archiving the original source...')
- urls = d.getVar("SRC_URI").split()
+ urls = get_src_uris(d)
# destsuffix (git fetcher) and subdir (everything else) are allowed to be
# absolute paths (for example, destsuffix=${S}/foobar).
# That messes with unpacking inside our tmpdir below, because the fetchers
@@ -332,7 +332,7 @@ python do_ar_configured() {
python do_ar_mirror() {
import subprocess
- src_uri = (d.getVar('SRC_URI') or '').split()
+ src_uri = get_src_uris(d)
if len(src_uri) == 0:
return
@@ -289,7 +289,7 @@ python buildhistory_emit_pkghistory() {
rcpinfo.layer = layer
rcpinfo.license = license
rcpinfo.config = sortlist(oe.utils.squashspaces(d.getVar('PACKAGECONFIG') or ""))
- rcpinfo.src_uri = oe.utils.squashspaces(d.getVar('SRC_URI') or "")
+ rcpinfo.src_uri = " ".join(get_src_uris(d))
write_recipehistory(rcpinfo, d)
bb.build.exec_func("read_subpackage_metadata", d)
@@ -933,7 +933,7 @@ def _get_srcrev_values(d):
"""
scms = []
- fetcher = bb.fetch.Fetch(d.getVar('SRC_URI').split(), d)
+ fetcher = bb.fetch.Fetch(get_src_uris(d), d)
urldata = fetcher.ud
for u in urldata:
if urldata[u].method.supports_srcrev():
@@ -29,7 +29,7 @@ python do_prepare_copyleft_sources () {
sources_dir = d.getVar('COPYLEFT_SOURCES_DIR')
dl_dir = d.getVar('DL_DIR')
- src_uri = d.getVar('SRC_URI').split()
+ src_uri = get_src_uris(d)
fetch = bb.fetch2.Fetch(src_uri, d)
ud = fetch.ud
@@ -349,7 +349,7 @@ def add_download_packages(d, doc, recipe):
import oe.spdx
import oe.sbom
- urls = d.getVar("SRC_URI").split()
+ urls = get_src_uris(d)
fetcher = bb.fetch2.Fetch(urls, d)
for download_idx, f in enumerate(fetcher.expanded_urldata()):
@@ -65,7 +65,7 @@ python () {
bb.fetch.get_hashvalue(d)
local_srcuri = []
- fetch = bb.fetch2.Fetch((d.getVar('SRC_URI') or '').split(), d)
+ fetch = bb.fetch2.Fetch(get_src_uris(d), d)
for url in fetch.urls:
url_data = fetch.ud[url]
parm = url_data.parm