diff mbox series

[AUH,v2,5/9] upgrade-helper.py: Add stable option for patch-only upgrades

Message ID 20260424114603.2444938-6-daniel.turull@ericsson.com
State New
Headers show
Series upgrade_helper: scarthgap compatibility, stable updates and changelog extraction | expand

Commit Message

Daniel Turull April 24, 2026, 11:45 a.m. UTC
From: Daniel Turull <daniel.turull@ericsson.com>

Add --stable flag to restrict upgrades to the next patch version within
the current stable branch (e.g. 1.2.3 -> 1.2.4). When the latest
upstream version is a major/minor bump, AUH queries all available
versions and picks the best patch-level update.

Signed-off-by: Daniel Turull <daniel.turull@ericsson.com>
Assisted-by: Claude, Anthropic
---
 modules/utils/version.py | 160 +++++++++++++++++++++++++++++++++++++++
 upgrade-helper.py        |  51 +++++++++++++
 2 files changed, 211 insertions(+)
 create mode 100644 modules/utils/version.py

Comments

Alexander Kanavin April 27, 2026, 10:15 a.m. UTC | #1
This functionality really needs to go to oe-core, and it should be
based on recipe metadata, e.g. for things like openssl we could
indicate in the recipe that stable maintenance is done with point
version releases, or systemd is maintained via minor versions (259.3
-> 259.4) or glibc has commits on a maintenance branch. You can't
assume that point releases are equivalent to bugfix-only maintenance.

So that you could run e.g. 'devtool check-upgrade-status --stable' and
it would print a list of possible maintenance updates.

Alex

On Fri, 24 Apr 2026 at 13:46, <daniel.turull@ericsson.com> wrote:
>
> From: Daniel Turull <daniel.turull@ericsson.com>
>
> Add --stable flag to restrict upgrades to the next patch version within
> the current stable branch (e.g. 1.2.3 -> 1.2.4). When the latest
> upstream version is a major/minor bump, AUH queries all available
> versions and picks the best patch-level update.
>
> Signed-off-by: Daniel Turull <daniel.turull@ericsson.com>
> Assisted-by: Claude, Anthropic
> ---
>  modules/utils/version.py | 160 +++++++++++++++++++++++++++++++++++++++
>  upgrade-helper.py        |  51 +++++++++++++
>  2 files changed, 211 insertions(+)
>  create mode 100644 modules/utils/version.py
>
> diff --git a/modules/utils/version.py b/modules/utils/version.py
> new file mode 100644
> index 0000000..b40eb1c
> --- /dev/null
> +++ b/modules/utils/version.py
> @@ -0,0 +1,160 @@
> +# SPDX-License-Identifier: GPL-2.0-or-later
> +#
> +# Version utilities for --stable patch-only upgrades.
> +#
> +# get_all_upstream_versions() collects *all* available upstream versions so
> +# that we can pick the highest patch-level release within the current stable
> +# branch.  The existing bitbake/oe-core APIs (latest_versionstring,
> +# get_recipe_upstream_version) only return the single highest version, so we
> +# must re-implement the inner loops here.
> +#
> +# The HTTP path mirrors bb.fetch2.wget.Wget._check_latest_version() and the
> +# git path mirrors bb.fetch2.git.Git.latest_versionstring(), both from
> +# bitbake scarthgap.  We cannot modify oe-core on a stable release, hence
> +# the duplication.
> +
> +import functools
> +import re
> +from logging import warning as W
> +
> +import bb.utils
> +import bb.fetch2
> +from bs4 import BeautifulSoup, SoupStrainer
> +
> +
> +def _split_version(ver):
> +    # Split on '.', '-', '_' and letter-digit boundaries (e.g. 10.0p2 -> [10, 0, p, 2])
> +    parts = re.split(r'[\.\-_]', ver)
> +    result = []
> +    for p in parts:
> +        result.extend(re.split(r'(?<=[a-zA-Z])(?=\d)|(?<=\d)(?=[a-zA-Z])', p))
> +    return result
> +
> +
> +def is_patch_update(current_ver, candidate_ver):
> +    cur = _split_version(current_ver)
> +    cand = _split_version(candidate_ver)
> +    if len(cur) != len(cand) or len(cur) < 3:
> +        return False
> +    if cur[:-1] != cand[:-1]:
> +        return False
> +    try:
> +        return int(cand[-1]) > int(cur[-1])
> +    except ValueError:
> +        return bb.utils.vercmp_string(candidate_ver, current_ver) > 0
> +
> +
> +def _find_best_version(current_ver, all_versions, filter_fn):
> +    candidates = [v for v in all_versions if filter_fn(current_ver, v)]
> +    if not candidates:
> +        return None
> +    candidates.sort(
> +        key=functools.cmp_to_key(bb.utils.vercmp_string), reverse=True
> +    )
> +    return candidates[0]
> +
> +
> +def find_patch_version(current_ver, all_versions):
> +    return _find_best_version(current_ver, all_versions, is_patch_update)
> +
> +
> +def get_all_upstream_versions(rd):
> +    """Get all upstream versions using the fetcher infrastructure.
> +
> +    Unlike ud.method.latest_versionstring() which returns only the highest
> +    version, this collects every version so the caller can filter for
> +    patch-only updates.
> +    """
> +    src_uris = rd.getVar('SRC_URI')
> +    if not src_uris:
> +        return []
> +
> +    src_uri = src_uris.split()[0]
> +    ud = bb.fetch2.FetchData(src_uri, rd)
> +
> +    if ud.type == 'git':
> +        return _get_git_versions(ud, rd)
> +    return _get_http_versions(ud, rd)
> +
> +
> +def _get_http_versions(ud, rd):
> +    """Collect all upstream versions from an HTTP index page.
> +
> +    Adapted from bb.fetch2.wget.Wget._check_latest_version() and
> +    ._init_regexes().  The upstream code only keeps the highest version;
> +    we collect them all.  Uses BeautifulSoup to parse <a> tags, matching
> +    the upstream behaviour.
> +    """
> +    try:
> +        package = ud.path.split("/")[-1]
> +
> +        regex_uri = rd.getVar('UPSTREAM_CHECK_URI')
> +        if not regex_uri:
> +            path = ud.path.split(package)[0]
> +            regex_uri = bb.fetch.encodeurl([ud.type, ud.host, path,
> +                                            ud.user, ud.pswd, {}])
> +
> +        page = ud.method._fetch_index(regex_uri, ud, rd)
> +        if not page:
> +            return []
> +
> +        regex = rd.getVar('UPSTREAM_CHECK_REGEX')
> +        if regex:
> +            regex = re.compile(regex)
> +        else:
> +            regex = ud.method._init_regexes(package, ud, rd)
> +        if not regex:
> +            return []
> +
> +        # Parse HTML links, same as Wget._check_latest_version()
> +        soup = BeautifulSoup(page, "html.parser",
> +                             parse_only=SoupStrainer("a"))
> +        if not soup:
> +            return []
> +
> +        versions = set()
> +        for link in soup.find_all('a', href=True):
> +            for text in (link['href'], str(link)):
> +                m = regex.search(text)
> +                if m and 'pver' in m.groupdict() and m.group('pver'):
> +                    versions.add(re.sub('_', '.', m.group('pver')))
> +                    break
> +        return list(versions)
> +    except Exception as e:
> +        W(" Failed to get HTTP versions: %s" % str(e))
> +        return []
> +
> +
> +def _get_git_versions(ud, rd):
> +    """Collect all tagged versions from a git remote.
> +
> +    Adapted from bb.fetch2.git.Git.latest_versionstring().  The upstream
> +    code only keeps the highest version; we collect them all.
> +    """
> +    try:
> +        output = ud.method._lsremote(ud, rd, "refs/tags/*")
> +    except (bb.fetch2.FetchError, bb.fetch2.NetworkAccess, OSError) as e:
> +        W(" Failed to list remote tags: %s" % str(e))
> +        return []
> +
> +    rev_tag_re = re.compile(r"([0-9a-f]{40})\s+refs/tags/(.*)")
> +    pver_re = re.compile(
> +        rd.getVar('UPSTREAM_CHECK_GITTAGREGEX')
> +        or r"(?P<pver>([0-9][\.|_]?)+)"
> +    )
> +    nonrel_re = re.compile(r"(alpha|beta|rc|final)+")
> +
> +    versions = set()
> +    for line in output.split("\n"):
> +        if not line:
> +            break
> +        m = rev_tag_re.match(line)
> +        if not m:
> +            continue
> +        tag = m.group(2)
> +        if nonrel_re.search(tag):
> +            continue
> +        m = pver_re.search(tag)
> +        if m:
> +            versions.add(m.group('pver').replace("_", "."))
> +    return list(versions)
> diff --git a/upgrade-helper.py b/upgrade-helper.py
> index 327bb6d..b7b8ddf 100755
> --- a/upgrade-helper.py
> +++ b/upgrade-helper.py
> @@ -59,6 +59,7 @@ from utils.emailhandler import Email
>  from statistics import Statistics
>  from steps import upgrade_steps
>  from testimage import TestImage
> +from utils.version import is_patch_update, find_patch_version, get_all_upstream_versions
>
>  if not os.getenv('BUILDDIR', False):
>      E(" You must source oe-init-build-env before running this script!\n")
> @@ -74,6 +75,7 @@ scriptpath.add_bitbake_lib_path()
>  scriptpath.add_oe_lib_path()
>
>  import oe.recipeutils
> +import bb.tinfoil
>
>  help_text = """Usage examples:
>  * To upgrade xmodmap recipe to the latest available version:
> @@ -97,6 +99,8 @@ def parse_cmdline():
>                          help="version to upgrade the recipe to")
>      parser.add_argument("--changelog", action="store_true", default=False,
>                          help="extract changelog between old and new versions, highlighting CVEs")
> +    parser.add_argument("--stable", action="store_true", default=False,
> +                        help="only upgrade to the next patch version within the stable branch (e.g. 1.2.3 -> 1.2.4)")
>
>      parser.add_argument("-d", "--debug-level", type=int, default=4, choices=range(1, 6),
>                          help="set the debug level: CRITICAL=1, ERROR=2, WARNING=3, INFO=4, DEBUG=5")
> @@ -698,6 +702,30 @@ class UniverseUpdater(Updater):
>
>      def _get_packagegroups_to_upgrade(self, packages=None):
>
> +        def _resolve_stable_version(pn, cur_ver, next_ver, tinfoil):
> +            """Find the latest patch version within the current stable branch."""
> +            if is_patch_update(cur_ver, next_ver):
> +                return next_ver, None
> +            I(" %s: latest version %s is not a patch update from %s,"
> +              " searching for versions..." %
> +              (pn, next_ver, cur_ver))
> +            try:
> +                rd = tinfoil.parse_recipe(pn)
> +                if not rd:
> +                    I(" %s: could not parse recipe, skipping" % pn)
> +                    return None, None
> +                all_versions = get_all_upstream_versions(rd)
> +                ver = find_patch_version(cur_ver, all_versions)
> +                if ver:
> +                    I(" %s: found version %s" % (pn, ver))
> +                    return ver, "N/A"
> +                else:
> +                    I(" %s: no suitable version available, skipping" % pn)
> +                    return None, None
> +            except Exception as e:
> +                I(" %s: failed to search for versions: %s" % (pn, e))
> +                return None, None
> +
>          # Prepare a single pkg dict data (or None is not upgradable) from recipeutils.get_recipe_upgrade_status data.
>          def _get_pkg_to_upgrade(self, layer_name, pn, status, cur_ver, next_ver, maintainer, revision, no_upgrade_reason):
>              pkg_to_upgrade = None
> @@ -763,6 +791,29 @@ class UniverseUpdater(Updater):
>                          upgrade_group.append(pkg_to_upgrade)
>                  if upgrade_group:
>                      upgrade_pkggroups.append(upgrade_group)
> +
> +        if self.args.stable and upgrade_pkggroups:
> +            stable_tinfoil = bb.tinfoil.Tinfoil()
> +            stable_tinfoil.prepare(config_only=False)
> +            try:
> +                filtered = []
> +                for group in upgrade_pkggroups:
> +                    filtered_group = []
> +                    for pkg in group:
> +                        stable_ver, stable_rev = _resolve_stable_version(
> +                            pkg['pn'], pkg['cur_ver'], pkg['next_ver'],
> +                            stable_tinfoil)
> +                        if stable_ver is not None:
> +                            pkg['next_ver'] = stable_ver
> +                            if stable_rev is not None:
> +                                pkg['revision'] = stable_rev
> +                            filtered_group.append(pkg)
> +                    if filtered_group:
> +                        filtered.append(filtered_group)
> +                upgrade_pkggroups = filtered
> +            finally:
> +                stable_tinfoil.shutdown()
> +
>          return upgrade_pkggroups
>
>      def pkg_upgrade_handler(self, pkg_ctx):
> --
> 2.34.1
>
diff mbox series

Patch

diff --git a/modules/utils/version.py b/modules/utils/version.py
new file mode 100644
index 0000000..b40eb1c
--- /dev/null
+++ b/modules/utils/version.py
@@ -0,0 +1,160 @@ 
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Version utilities for --stable patch-only upgrades.
+#
+# get_all_upstream_versions() collects *all* available upstream versions so
+# that we can pick the highest patch-level release within the current stable
+# branch.  The existing bitbake/oe-core APIs (latest_versionstring,
+# get_recipe_upstream_version) only return the single highest version, so we
+# must re-implement the inner loops here.
+#
+# The HTTP path mirrors bb.fetch2.wget.Wget._check_latest_version() and the
+# git path mirrors bb.fetch2.git.Git.latest_versionstring(), both from
+# bitbake scarthgap.  We cannot modify oe-core on a stable release, hence
+# the duplication.
+
+import functools
+import re
+from logging import warning as W
+
+import bb.utils
+import bb.fetch2
+from bs4 import BeautifulSoup, SoupStrainer
+
+
+def _split_version(ver):
+    # Split on '.', '-', '_' and letter-digit boundaries (e.g. 10.0p2 -> [10, 0, p, 2])
+    parts = re.split(r'[\.\-_]', ver)
+    result = []
+    for p in parts:
+        result.extend(re.split(r'(?<=[a-zA-Z])(?=\d)|(?<=\d)(?=[a-zA-Z])', p))
+    return result
+
+
+def is_patch_update(current_ver, candidate_ver):
+    cur = _split_version(current_ver)
+    cand = _split_version(candidate_ver)
+    if len(cur) != len(cand) or len(cur) < 3:
+        return False
+    if cur[:-1] != cand[:-1]:
+        return False
+    try:
+        return int(cand[-1]) > int(cur[-1])
+    except ValueError:
+        return bb.utils.vercmp_string(candidate_ver, current_ver) > 0
+
+
+def _find_best_version(current_ver, all_versions, filter_fn):
+    candidates = [v for v in all_versions if filter_fn(current_ver, v)]
+    if not candidates:
+        return None
+    candidates.sort(
+        key=functools.cmp_to_key(bb.utils.vercmp_string), reverse=True
+    )
+    return candidates[0]
+
+
+def find_patch_version(current_ver, all_versions):
+    return _find_best_version(current_ver, all_versions, is_patch_update)
+
+
+def get_all_upstream_versions(rd):
+    """Get all upstream versions using the fetcher infrastructure.
+
+    Unlike ud.method.latest_versionstring() which returns only the highest
+    version, this collects every version so the caller can filter for
+    patch-only updates.
+    """
+    src_uris = rd.getVar('SRC_URI')
+    if not src_uris:
+        return []
+
+    src_uri = src_uris.split()[0]
+    ud = bb.fetch2.FetchData(src_uri, rd)
+
+    if ud.type == 'git':
+        return _get_git_versions(ud, rd)
+    return _get_http_versions(ud, rd)
+
+
+def _get_http_versions(ud, rd):
+    """Collect all upstream versions from an HTTP index page.
+
+    Adapted from bb.fetch2.wget.Wget._check_latest_version() and
+    ._init_regexes().  The upstream code only keeps the highest version;
+    we collect them all.  Uses BeautifulSoup to parse <a> tags, matching
+    the upstream behaviour.
+    """
+    try:
+        package = ud.path.split("/")[-1]
+
+        regex_uri = rd.getVar('UPSTREAM_CHECK_URI')
+        if not regex_uri:
+            path = ud.path.split(package)[0]
+            regex_uri = bb.fetch.encodeurl([ud.type, ud.host, path,
+                                            ud.user, ud.pswd, {}])
+
+        page = ud.method._fetch_index(regex_uri, ud, rd)
+        if not page:
+            return []
+
+        regex = rd.getVar('UPSTREAM_CHECK_REGEX')
+        if regex:
+            regex = re.compile(regex)
+        else:
+            regex = ud.method._init_regexes(package, ud, rd)
+        if not regex:
+            return []
+
+        # Parse HTML links, same as Wget._check_latest_version()
+        soup = BeautifulSoup(page, "html.parser",
+                             parse_only=SoupStrainer("a"))
+        if not soup:
+            return []
+
+        versions = set()
+        for link in soup.find_all('a', href=True):
+            for text in (link['href'], str(link)):
+                m = regex.search(text)
+                if m and 'pver' in m.groupdict() and m.group('pver'):
+                    versions.add(re.sub('_', '.', m.group('pver')))
+                    break
+        return list(versions)
+    except Exception as e:
+        W(" Failed to get HTTP versions: %s" % str(e))
+        return []
+
+
+def _get_git_versions(ud, rd):
+    """Collect all tagged versions from a git remote.
+
+    Adapted from bb.fetch2.git.Git.latest_versionstring().  The upstream
+    code only keeps the highest version; we collect them all.
+    """
+    try:
+        output = ud.method._lsremote(ud, rd, "refs/tags/*")
+    except (bb.fetch2.FetchError, bb.fetch2.NetworkAccess, OSError) as e:
+        W(" Failed to list remote tags: %s" % str(e))
+        return []
+
+    rev_tag_re = re.compile(r"([0-9a-f]{40})\s+refs/tags/(.*)")
+    pver_re = re.compile(
+        rd.getVar('UPSTREAM_CHECK_GITTAGREGEX')
+        or r"(?P<pver>([0-9][\.|_]?)+)"
+    )
+    nonrel_re = re.compile(r"(alpha|beta|rc|final)+")
+
+    versions = set()
+    for line in output.split("\n"):
+        if not line:
+            break
+        m = rev_tag_re.match(line)
+        if not m:
+            continue
+        tag = m.group(2)
+        if nonrel_re.search(tag):
+            continue
+        m = pver_re.search(tag)
+        if m:
+            versions.add(m.group('pver').replace("_", "."))
+    return list(versions)
diff --git a/upgrade-helper.py b/upgrade-helper.py
index 327bb6d..b7b8ddf 100755
--- a/upgrade-helper.py
+++ b/upgrade-helper.py
@@ -59,6 +59,7 @@  from utils.emailhandler import Email
 from statistics import Statistics
 from steps import upgrade_steps
 from testimage import TestImage
+from utils.version import is_patch_update, find_patch_version, get_all_upstream_versions
 
 if not os.getenv('BUILDDIR', False):
     E(" You must source oe-init-build-env before running this script!\n")
@@ -74,6 +75,7 @@  scriptpath.add_bitbake_lib_path()
 scriptpath.add_oe_lib_path()
 
 import oe.recipeutils
+import bb.tinfoil
 
 help_text = """Usage examples:
 * To upgrade xmodmap recipe to the latest available version:
@@ -97,6 +99,8 @@  def parse_cmdline():
                         help="version to upgrade the recipe to")
     parser.add_argument("--changelog", action="store_true", default=False,
                         help="extract changelog between old and new versions, highlighting CVEs")
+    parser.add_argument("--stable", action="store_true", default=False,
+                        help="only upgrade to the next patch version within the stable branch (e.g. 1.2.3 -> 1.2.4)")
 
     parser.add_argument("-d", "--debug-level", type=int, default=4, choices=range(1, 6),
                         help="set the debug level: CRITICAL=1, ERROR=2, WARNING=3, INFO=4, DEBUG=5")
@@ -698,6 +702,30 @@  class UniverseUpdater(Updater):
 
     def _get_packagegroups_to_upgrade(self, packages=None):
 
+        def _resolve_stable_version(pn, cur_ver, next_ver, tinfoil):
+            """Find the latest patch version within the current stable branch."""
+            if is_patch_update(cur_ver, next_ver):
+                return next_ver, None
+            I(" %s: latest version %s is not a patch update from %s,"
+              " searching for versions..." %
+              (pn, next_ver, cur_ver))
+            try:
+                rd = tinfoil.parse_recipe(pn)
+                if not rd:
+                    I(" %s: could not parse recipe, skipping" % pn)
+                    return None, None
+                all_versions = get_all_upstream_versions(rd)
+                ver = find_patch_version(cur_ver, all_versions)
+                if ver:
+                    I(" %s: found version %s" % (pn, ver))
+                    return ver, "N/A"
+                else:
+                    I(" %s: no suitable version available, skipping" % pn)
+                    return None, None
+            except Exception as e:
+                I(" %s: failed to search for versions: %s" % (pn, e))
+                return None, None
+
         # Prepare a single pkg dict data (or None is not upgradable) from recipeutils.get_recipe_upgrade_status data.
         def _get_pkg_to_upgrade(self, layer_name, pn, status, cur_ver, next_ver, maintainer, revision, no_upgrade_reason):
             pkg_to_upgrade = None
@@ -763,6 +791,29 @@  class UniverseUpdater(Updater):
                         upgrade_group.append(pkg_to_upgrade)
                 if upgrade_group:
                     upgrade_pkggroups.append(upgrade_group)
+
+        if self.args.stable and upgrade_pkggroups:
+            stable_tinfoil = bb.tinfoil.Tinfoil()
+            stable_tinfoil.prepare(config_only=False)
+            try:
+                filtered = []
+                for group in upgrade_pkggroups:
+                    filtered_group = []
+                    for pkg in group:
+                        stable_ver, stable_rev = _resolve_stable_version(
+                            pkg['pn'], pkg['cur_ver'], pkg['next_ver'],
+                            stable_tinfoil)
+                        if stable_ver is not None:
+                            pkg['next_ver'] = stable_ver
+                            if stable_rev is not None:
+                                pkg['revision'] = stable_rev
+                            filtered_group.append(pkg)
+                    if filtered_group:
+                        filtered.append(filtered_group)
+                upgrade_pkggroups = filtered
+            finally:
+                stable_tinfoil.shutdown()
+
         return upgrade_pkggroups
 
     def pkg_upgrade_handler(self, pkg_ctx):