@@ -2134,9 +2134,9 @@ methods.append(osc.Osc())
methods.append(repo.Repo())
methods.append(clearcase.ClearCase())
methods.append(npm.Npm())
-methods.append(npmsw.NpmShrinkWrap())
methods.append(az.Az())
methods.append(crate.Crate())
methods.append(gcp.GCP())
methods.append(gomod.GoMod())
methods.append(gomod.GoModGit())
+methods.extend(npmsw.methods)
@@ -46,7 +46,6 @@ class DependencyMixin:
return returns
def verify_donestamp(self, ud, d):
- """Verify the donestamp file"""
if not super().verify_donestamp(ud, d):
return False
@@ -56,7 +55,6 @@ class DependencyMixin:
return all(self._foreach_proxy_method(ud, handle, d))
def update_donestamp(self, ud, d):
- """Update the donestamp file"""
super().update_donestamp(ud, d)
self._init_proxy(ud, d)
@@ -65,7 +63,6 @@ class DependencyMixin:
self._foreach_proxy_method(ud, handle, d)
def need_update(self, ud, d):
- """Force a fetch, even if localpath exists ?"""
if super().need_update(ud, d):
return True
@@ -75,7 +72,6 @@ class DependencyMixin:
return any(self._foreach_proxy_method(ud, handle, d))
def try_mirrors(self, fetch, ud, d, mirrors):
- """Try to use a mirror"""
if not super().try_mirrors(fetch, ud, d, mirrors):
return False
@@ -85,25 +81,21 @@ class DependencyMixin:
return all(self._foreach_proxy_method(ud, handle, d))
def download(self, ud, d):
- """Fetch url"""
super().download(ud, d)
self._init_proxy(ud, d)
ud.proxy.download()
def unpack(self, ud, rootdir, d):
- """Unpack the downloaded dependencies"""
super().unpack(ud, rootdir, d)
self._init_proxy(ud, d)
ud.proxy.unpack(ud.destdir)
def clean(self, ud, d):
- """Clean any existing full or partial download"""
self._init_proxy(ud, d)
ud.proxy.clean()
super().clean(ud, d)
def done(self, ud, d):
- """Is the download done ?"""
if not super().done(ud, d):
return False
@@ -1,37 +1,35 @@
# Copyright (C) 2020 Savoir-Faire Linux
+# Copyright (C) 2024-2025 Weidmueller Interface GmbH & Co. KG
+# Stefan Herbrechtsmeier <stefan.herbrechtsmeier@weidmueller.com>
#
# SPDX-License-Identifier: GPL-2.0-only
#
"""
-BitBake 'Fetch' npm shrinkwrap implementation
+BitBake 'Fetch' implementation for npm-shrinkwrap.json and package-lock.json
-npm fetcher support the SRC_URI with format of:
-SRC_URI = "npmsw://some.registry.url;OptionA=xxx;OptionB=xxx;..."
+The npmsw, npmsw+https and npmsw+git fetchers are used to download npm package
+dependencies via a npm-shrinkwrap.json and package-lock.json file.
-Supported SRC_URI options are:
+The fetcher support the SRC_URI with format of:
+SRC_URI = "npmsw://npm-shrinkwrap.json"
+SRC_URI = "npmsw+https://example.com/name-1.2.3.tar.gz"
+SRC_URI = "npmsw+git://example.com/repo.git"
+
+Additional supported SRC_URI options are:
- dev
Set to 1 to also install devDependencies.
-
-- destsuffix
- Specifies the directory to use to unpack the dependencies (default: ${S}).
"""
+import base64
import json
import os
import re
import bb
-from bb.fetch2 import Fetch
-from bb.fetch2 import FetchMethod
-from bb.fetch2 import ParameterError
-from bb.fetch2 import runfetchcmd
-from bb.fetch2 import URI
-from bb.fetch2.npm import npm_integrity
-from bb.fetch2.npm import npm_localfile
-from bb.fetch2.npm import npm_unpack
+from bb.fetch2 import FetchError, ParameterError, URI
+from bb.fetch2.dependency import create_methods
+from bb.fetch2.npm import construct_url_path
from bb.utils import is_semver
-from bb.utils import lockfile
-from bb.utils import unlockfile
def foreach_dependencies(shrinkwrap, callback=None, dev=False):
"""
@@ -58,40 +56,31 @@ def foreach_dependencies(shrinkwrap, callback=None, dev=False):
name = location.split('node_modules/')[-1]
callback(name, data, location)
-class NpmShrinkWrap(FetchMethod):
- """Class to fetch all package from a shrinkwrap file"""
-
- def supports(self, ud, d):
- """Check if a given url can be fetched with npmsw"""
- return ud.type in ["npmsw"]
-
+class NpmShrinkWrapMixin:
def urldata_init(self, ud, d):
"""Init npmsw specific variables within url data"""
-
- # Get the 'shrinkwrap' parameter
- ud.shrinkwrap_file = re.sub(r"^npmsw://", "", ud.url.split(";")[0])
-
- # Get the 'dev' parameter
+ super().urldata_init(ud, d)
ud.dev = bb.utils.to_boolean(ud.parm.get("dev"), False)
- # Resolve the dependencies
- ud.deps = []
+ def resolve_dependencies(self, ud, localpath, d):
+ urls = []
- def _resolve_dependency(name, params, destsuffix):
+ def resolve_dependency(name, data, location):
url = None
- localpath = None
- extrapaths = []
- unpack = True
- integrity = params.get("integrity")
- resolved = params.get("resolved")
- version = params.get("version")
- link = params.get("link", False)
+ integrity = data.get("integrity")
+ resolved = data.get("resolved")
+ version = data.get("version")
+ link = data.get("link", False)
+
+ if integrity:
+ algorithm, value = integrity.split("-", maxsplit=1)
+ checksum_name = f"{algorithm}sum"
+ checksum_value = base64.b64decode(value).hex()
- # Handle link sources
+ # Skip link sources
if link:
- localpath = resolved
- unpack = False
+ return
# Handle registry sources
elif version and is_semver(version) and integrity:
@@ -99,193 +88,62 @@ class NpmShrinkWrap(FetchMethod):
if not resolved:
return
- localfile = npm_localfile(name, version)
-
uri = URI(resolved)
- uri.params["downloadfilename"] = localfile
-
- checksum_name, checksum_expected = npm_integrity(integrity)
- uri.params[checksum_name] = checksum_expected
-
+ package_path = construct_url_path(name, version)
+ if uri.scheme == "https" and uri.path.endswith(package_path):
+ uri.scheme = "npm"
+ uri.path = uri.path[:-len(package_path)]
+ uri.params["dn"] = name
+ uri.params["dv"] = version
+ uri.params["destsuffix"] = location
+ else:
+ bb.warn(f"Please add support for the url to npm fetcher: {resolved}")
+ uri.params[checksum_name] = checksum_value
url = str(uri)
- localpath = os.path.join(d.getVar("DL_DIR"), localfile)
-
- # Create a resolve file to mimic the npm fetcher and allow
- # re-usability of the downloaded file.
- resolvefile = localpath + ".resolved"
-
- bb.utils.mkdirhier(os.path.dirname(resolvefile))
- with open(resolvefile, "w") as f:
- f.write(url)
-
- extrapaths.append(resolvefile)
-
# Handle http tarball sources
elif resolved.startswith("http") and integrity:
- localfile = npm_localfile(os.path.basename(resolved))
-
uri = URI(resolved)
- uri.params["downloadfilename"] = localfile
-
- checksum_name, checksum_expected = npm_integrity(integrity)
- uri.params[checksum_name] = checksum_expected
-
+ uri.params["subdir"] = location
+ uri.params["striplevel"] = 1
+ uri.params[checksum_name] = checksum_value
url = str(uri)
- localpath = os.path.join(d.getVar("DL_DIR"), localfile)
-
- # Handle local tarball sources
+ # Skip local tarball
elif resolved.startswith("file"):
- localpath = resolved[5:]
+ return
# Handle git sources
elif resolved.startswith("git"):
- regex = re.compile(r"""
- ^
- git\+
- (?P<protocol>[a-z]+)
- ://
- (?P<url>[^#]+)
- \#
- (?P<rev>[0-9a-f]+)
- $
- """, re.VERBOSE)
-
- match = regex.match(resolved)
- if not match:
- raise ParameterError("Invalid git url: %s" % resolved, ud.url)
-
- groups = match.groupdict()
-
- uri = URI("git://" + str(groups["url"]))
- uri.params["protocol"] = str(groups["protocol"])
- uri.params["rev"] = str(groups["rev"])
+ url, _, rev = resolved.partition("#")
+ uri = URI(url)
+ scheme, _, protocol = uri.scheme.partition("+")
+ if protocol:
+ uri.params["protocol"] = protocol
+ uri.scheme = scheme
+ uri.params["rev"] = rev
uri.params["nobranch"] = "1"
- uri.params["destsuffix"] = destsuffix
-
+ uri.params["destsuffix"] = location
url = str(uri)
else:
- raise ParameterError("Unsupported dependency: %s" % name, ud.url)
+ raise ParameterError(f"Unsupported dependency: {name}", ud.url)
- # name is needed by unpack tracer for module mapping
- ud.deps.append({
- "name": name,
- "url": url,
- "localpath": localpath,
- "extrapaths": extrapaths,
- "destsuffix": destsuffix,
- "unpack": unpack,
- })
+ urls.append(url)
+ if os.path.isdir(localpath):
+ localdir = localpath
+ localpath = os.path.join(localdir, "npm-shrinkwrap.json")
+ if not os.path.isfile(localpath):
+ localpath = os.path.join(localdir, "package-lock.json")
try:
- with open(ud.shrinkwrap_file, "r") as f:
+ with open(localpath, "r") as f:
shrinkwrap = json.load(f)
except Exception as e:
raise ParameterError("Invalid shrinkwrap file: %s" % str(e), ud.url)
- foreach_dependencies(shrinkwrap, _resolve_dependency, ud.dev)
-
- # Avoid conflicts between the environment data and:
- # - the proxy url revision
- # - the proxy url checksum
- data = bb.data.createCopy(d)
- data.delVar("SRCREV")
- data.delVarFlags("SRC_URI")
-
- # This fetcher resolves multiple URIs from a shrinkwrap file and then
- # forwards it to a proxy fetcher. The management of the donestamp file,
- # the lockfile and the checksums are forwarded to the proxy fetcher.
- shrinkwrap_urls = [dep["url"] for dep in ud.deps if dep["url"]]
- if shrinkwrap_urls:
- ud.proxy = Fetch(shrinkwrap_urls, data)
- ud.needdonestamp = False
-
- @staticmethod
- def _foreach_proxy_method(ud, handle):
- returns = []
- #Check if there are dependencies before try to fetch them
- if len(ud.deps) > 0:
- for proxy_url in ud.proxy.urls:
- proxy_ud = ud.proxy.ud[proxy_url]
- proxy_d = ud.proxy.d
- proxy_ud.setup_localpath(proxy_d)
- lf = lockfile(proxy_ud.lockfile)
- returns.append(handle(proxy_ud.method, proxy_ud, proxy_d))
- unlockfile(lf)
- return returns
-
- def verify_donestamp(self, ud, d):
- """Verify the donestamp file"""
- def _handle(m, ud, d):
- return m.verify_donestamp(ud, d)
- return all(self._foreach_proxy_method(ud, _handle))
-
- def update_donestamp(self, ud, d):
- """Update the donestamp file"""
- def _handle(m, ud, d):
- m.update_donestamp(ud, d)
- self._foreach_proxy_method(ud, _handle)
-
- def need_update(self, ud, d):
- """Force a fetch, even if localpath exists ?"""
- def _handle(m, ud, d):
- return m.need_update(ud, d)
- return all(self._foreach_proxy_method(ud, _handle))
-
- def try_mirrors(self, fetch, ud, d, mirrors):
- """Try to use a mirror"""
- def _handle(m, ud, d):
- return m.try_mirrors(fetch, ud, d, mirrors)
- return all(self._foreach_proxy_method(ud, _handle))
-
- def download(self, ud, d):
- """Fetch url"""
- ud.proxy.download()
-
- def unpack(self, ud, rootdir, d):
- """Unpack the downloaded dependencies"""
- destdir = rootdir
- destsuffix = ud.parm.get("destsuffix")
- if destsuffix:
- destdir = os.path.join(rootdir, destsuffix)
- ud.unpack_tracer.unpack("npm-shrinkwrap", destdir)
-
- bb.utils.mkdirhier(destdir)
- bb.utils.copyfile(ud.shrinkwrap_file,
- os.path.join(destdir, "npm-shrinkwrap.json"))
-
- auto = [dep["url"] for dep in ud.deps if not dep["localpath"]]
- manual = [dep for dep in ud.deps if dep["localpath"]]
-
- if auto:
- ud.proxy.unpack(destdir, auto)
-
- for dep in manual:
- depdestdir = os.path.join(destdir, dep["destsuffix"])
- if dep["url"]:
- npm_unpack(dep["localpath"], depdestdir, d)
- else:
- depsrcdir= os.path.join(destdir, dep["localpath"])
- if dep["unpack"]:
- npm_unpack(depsrcdir, depdestdir, d)
- else:
- bb.utils.mkdirhier(depdestdir)
- cmd = 'cp -fpPRH "%s/." .' % (depsrcdir)
- runfetchcmd(cmd, d, workdir=depdestdir)
-
- def clean(self, ud, d):
- """Clean any existing full or partial download"""
- ud.proxy.clean()
+ foreach_dependencies(shrinkwrap, resolve_dependency, ud.dev)
- # Clean extra files
- for dep in ud.deps:
- for path in dep["extrapaths"]:
- bb.utils.remove(path)
+ return urls
- def done(self, ud, d):
- """Is the download done ?"""
- def _handle(m, ud, d):
- return m.done(ud, d)
- return all(self._foreach_proxy_method(ud, _handle))
+methods = create_methods("npmsw", NpmShrinkWrapMixin)