diff --git a/modules/utils/version.py b/modules/utils/version.py
new file mode 100644
index 0000000..4417bd5
--- /dev/null
+++ b/modules/utils/version.py
@@ -0,0 +1,185 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Version utilities for --stable patch-only upgrades.
+#
+# get_all_upstream_versions() collects *all* available upstream versions so
+# that we can pick the highest patch-level release within the current stable
+# branch.  The existing bitbake/oe-core APIs (latest_versionstring,
+# get_recipe_upstream_version) only return the single highest version, so we
+# must re-implement the inner loops here.
+#
+# The HTTP path mirrors bb.fetch2.wget.Wget._check_latest_version() and the
+# git path mirrors bb.fetch2.git.Git.latest_versionstring(), both from
+# bitbake scarthgap.  We cannot modify oe-core on a stable release, hence
+# the duplication.
+#
+# Requires: beautifulsoup4 (python3-beautifulsoup4 in oe-core)
+
+import functools
+import re
+from logging import warning as W
+
+import bb.utils
+import bb.fetch2
+from bs4 import BeautifulSoup, SoupStrainer
+
+
+def _split_version(ver):
+    # Split on '.', '-', '_' and letter-digit boundaries (e.g. 10.0p2 -> [10, 0, p, 2])
+    parts = re.split(r'[\.\-_]', ver)
+    result = []
+    for p in parts:
+        result.extend(re.split(r'(?<=[a-zA-Z])(?=\d)|(?<=\d)(?=[a-zA-Z])', p))
+    return result
+
+
+def is_patch_update(current_ver, candidate_ver):
+    """Check if candidate is a patch-level update.
+
+    A patch update changes only the last version component while keeping
+    all preceding components identical. Also accepts sub-patch extensions
+    (e.g. 1.2.3 -> 1.2.3.1).
+    """
+    cur = _split_version(current_ver)
+    cand = _split_version(candidate_ver)
+    if len(cur) < 2 or len(cand) < 2:
+        return False
+    if bb.utils.vercmp_string(candidate_ver, current_ver) <= 0:
+        return False
+    # All but the last component of the current version must match
+    # the corresponding prefix of the candidate
+    if len(cand) < len(cur):
+        return False
+    return cur[:-1] == cand[:len(cur) - 1]
+
+
+def is_minor_update(current_ver, candidate_ver):
+    """Check if candidate is a newer release within the same major version."""
+    cur = _split_version(current_ver)
+    cand = _split_version(candidate_ver)
+    if len(cur) < 2 or len(cand) < 2:
+        return False
+    if cur[0] != cand[0]:
+        return False
+    return bb.utils.vercmp_string(candidate_ver, current_ver) > 0
+
+
+def _find_best_version(current_ver, all_versions, filter_fn):
+    candidates = [v for v in all_versions if filter_fn(current_ver, v)]
+    if not candidates:
+        return None
+    candidates.sort(
+        key=functools.cmp_to_key(bb.utils.vercmp_string), reverse=True
+    )
+    return candidates[0]
+
+
+def find_patch_version(current_ver, all_versions):
+    return _find_best_version(current_ver, all_versions, is_patch_update)
+
+
+def find_minor_version(current_ver, all_versions):
+    return _find_best_version(current_ver, all_versions, is_minor_update)
+
+
+def get_all_upstream_versions(rd):
+    """Get all upstream versions using the fetcher infrastructure.
+
+    Unlike ud.method.latest_versionstring() which returns only the highest
+    version, this collects every version so the caller can filter for
+    patch-only updates.
+    """
+    src_uris = rd.getVar('SRC_URI')
+    if not src_uris:
+        return []
+
+    src_uri = src_uris.split()[0]
+    fetch = bb.fetch2.Fetch([src_uri], rd)
+    ud = fetch.ud[fetch.urls[0]]
+
+    if ud.type == 'git':
+        return _get_git_versions(ud, rd)
+    return _get_http_versions(ud, rd)
+
+
+def _get_http_versions(ud, rd):
+    """Collect all upstream versions from an HTTP index page.
+
+    Adapted from bb.fetch2.wget.Wget._check_latest_version() and
+    ._init_regexes().  The upstream code only keeps the highest version;
+    we collect them all.  Uses BeautifulSoup to parse <a> tags, matching
+    the upstream behaviour.
+    """
+    try:
+        package = ud.path.split("/")[-1]
+
+        regex_uri = rd.getVar('UPSTREAM_CHECK_URI')
+        if not regex_uri:
+            path = ud.path.split(package)[0]
+            regex_uri = bb.fetch2.encodeurl([ud.type, ud.host, path,
+                                            ud.user, ud.pswd, {}])
+
+        page = ud.method._fetch_index(regex_uri, ud, rd)
+        if not page:
+            return []
+
+        regex = rd.getVar('UPSTREAM_CHECK_REGEX')
+        if regex:
+            regex = re.compile(regex)
+        else:
+            regex = ud.method._init_regexes(package, ud, rd)
+        if not regex:
+            return []
+
+        # Parse HTML links, same as Wget._check_latest_version()
+        soup = BeautifulSoup(page, "html.parser",
+                             parse_only=SoupStrainer("a"))
+        if not soup:
+            return []
+
+        versions = set()
+        for link in soup.find_all('a', href=True):
+            for text in (link['href'], str(link)):
+                m = regex.search(text)
+                if m and 'pver' in m.groupdict() and m.group('pver'):
+                    versions.add(re.sub('_', '.', m.group('pver')))
+                    break
+        return list(versions)
+    except Exception as e:
+        W(" Failed to get HTTP versions: %s" % e)
+        return []
+
+
+def _get_git_versions(ud, rd):
+    """Collect all tagged versions from a git remote.
+
+    Adapted from bb.fetch2.git.Git.latest_versionstring().  The upstream
+    code only keeps the highest version; we collect them all.
+    """
+    try:
+        output = ud.method._lsremote(ud, rd, "refs/tags/*")
+    except (bb.fetch2.FetchError, bb.fetch2.NetworkAccess, OSError) as e:
+        W(" Failed to list remote tags: %s" % e)
+        return []
+
+    rev_tag_re = re.compile(r"([0-9a-f]{40})\s+refs/tags/(.*)")
+    pver_re = re.compile(
+        rd.getVar('UPSTREAM_CHECK_GITTAGREGEX')
+        or r"(?P<pver>([0-9][\.|_]?)+)"
+    )
+    nonrel_re = re.compile(r"(alpha|beta|rc|final)+")
+
+    versions = set()
+    for line in output.split("\n"):
+        if not line:
+            continue
+        m = rev_tag_re.match(line)
+        if not m:
+            continue
+        tag = m.group(2)
+        if nonrel_re.search(tag):
+            continue
+        m = pver_re.search(tag)
+        if m:
+            versions.add(m.group('pver').replace("_", "."))
+    return list(versions)
diff --git a/upgrade-helper.py b/upgrade-helper.py
index 9954a66..aecb207 100755
--- a/upgrade-helper.py
+++ b/upgrade-helper.py
@@ -59,6 +59,9 @@ from utils.emailhandler import Email
 from statistics import Statistics
 from steps import upgrade_steps
 from testimage import TestImage
+from utils.version import (is_patch_update, find_patch_version,
+                           is_minor_update, find_minor_version,
+                           get_all_upstream_versions)
 
 if not os.getenv('BUILDDIR', False):
     E(" You must source oe-init-build-env before running this script!\n")
@@ -74,6 +77,7 @@ scriptpath.add_bitbake_lib_path()
 scriptpath.add_oe_lib_path()
 
 import oe.recipeutils
+import bb.tinfoil
 
 help_text = """Usage examples:
 * To upgrade xmodmap recipe to the latest available version:
@@ -96,6 +100,9 @@ def parse_cmdline():
     parser.add_argument("-t", "--to_version",
                         help="version to upgrade the recipe to")
 
+    parser.add_argument("--stable", action="store_true", default=False,
+                        help="only upgrade to the next patch version within the stable branch (e.g. 1.2.3 -> 1.2.4)")
+
     parser.add_argument("-d", "--debug-level", type=int, default=4, choices=range(1, 6),
                         help="set the debug level: CRITICAL=1, ERROR=2, WARNING=3, INFO=4, DEBUG=5")
     parser.add_argument("-e", "--send-emails", action="store_true", default=False,
@@ -679,6 +686,67 @@ class UniverseUpdater(Updater):
 
     def _get_packagegroups_to_upgrade(self, packages=None):
 
+        def _resolve_stable_version(pn, cur_ver, next_ver, tinfoil):
+            """Find the best version within the policy allowed by AUH_UPGRADE_POLICY.
+
+            Only called when --stable is active. --stable enables version
+            filtering globally; AUH_UPGRADE_POLICY overrides the policy
+            per-recipe within that filtering.
+
+            AUH_UPGRADE_POLICY can be set per-recipe in local.conf or in the
+            recipe itself:
+              "patch" (default) - only patch-level updates (e.g. 1.2.3 -> 1.2.4)
+              "minor"           - same-major updates (e.g. 1.2.3 -> 1.3.0)
+              "none"            - skip this recipe entirely
+            """
+            # Quick path: if next_ver is already a patch update, accept it
+            # without parsing the recipe (covers the common case).
+            if is_patch_update(cur_ver, next_ver):
+                return next_ver, None
+
+            try:
+                rd = tinfoil.parse_recipe(pn)
+            except Exception:
+                rd = None
+            if not rd:
+                I(" %s: could not parse recipe, skipping" % pn)
+                return None, None
+
+            policy = (rd.getVar('AUH_UPGRADE_POLICY') or 'patch').strip().lower()
+            if policy not in ('patch', 'minor', 'none'):
+                W(" %s: unrecognized AUH_UPGRADE_POLICY '%s',"
+                  " defaulting to 'patch'" % (pn, policy))
+                policy = 'patch'
+            if policy == 'none':
+                I(" %s: AUH_UPGRADE_POLICY=none, skipping" % pn)
+                return None, None
+
+            if policy == 'minor':
+                check_fn = is_minor_update
+                find_fn = find_minor_version
+            else:
+                check_fn = is_patch_update
+                find_fn = find_patch_version
+
+            if check_fn(cur_ver, next_ver):
+                return next_ver, None
+
+            I(" %s: latest version %s is not a %s update from %s,"
+              " searching for versions..." %
+              (pn, next_ver, policy, cur_ver))
+            try:
+                all_versions = get_all_upstream_versions(rd)
+                ver = find_fn(cur_ver, all_versions)
+                if ver:
+                    I(" %s: found version %s" % (pn, ver))
+                    return ver, "N/A"
+                else:
+                    I(" %s: no suitable version available, skipping" % pn)
+                    return None, None
+            except Exception as e:
+                I(" %s: failed to search for versions: %s" % (pn, e))
+                return None, None
+
         # Prepare a single pkg dict data (or None is not upgradable) from recipeutils.get_recipe_upgrade_status data.
         def _get_pkg_to_upgrade(self, layer_name, pn, status, cur_ver, next_ver, maintainer, revision, no_upgrade_reason):
             pkg_to_upgrade = None
@@ -736,6 +804,42 @@ class UniverseUpdater(Updater):
                         upgrade_group.append(pkg_to_upgrade)
                 if upgrade_group:
                     upgrade_pkggroups.append(upgrade_group)
+
+        if self.args.stable and upgrade_pkggroups:
+            # Quick-path: accept packages where next_ver is already a patch
+            # update without opening tinfoil.  For the rest, tinfoil must
+            # remain open because bb.fetch2.Fetch needs a live rd object.
+            needs_tinfoil = []
+            for group in upgrade_pkggroups:
+                quick_group = []
+                slow_pkgs = []
+                for pkg in group:
+                    if is_patch_update(pkg['cur_ver'], pkg['next_ver']):
+                        quick_group.append(pkg)
+                    else:
+                        slow_pkgs.append(pkg)
+                if quick_group or slow_pkgs:
+                    needs_tinfoil.append((quick_group, slow_pkgs))
+
+            if any(slow for _, slow in needs_tinfoil):
+                stable_tinfoil = bb.tinfoil.Tinfoil()
+                stable_tinfoil.prepare(config_only=False)
+                try:
+                    for quick_group, slow_pkgs in needs_tinfoil:
+                        for pkg in slow_pkgs:
+                            ver, rev = _resolve_stable_version(
+                                pkg['pn'], pkg['cur_ver'],
+                                pkg['next_ver'], stable_tinfoil)
+                            if ver is not None:
+                                pkg['next_ver'] = ver
+                                if rev is not None:
+                                    pkg['revision'] = rev
+                                quick_group.append(pkg)
+                finally:
+                    stable_tinfoil.shutdown()
+
+            upgrade_pkggroups = [g for g, _ in needs_tinfoil if g]
+
         return upgrade_pkggroups
 
     def pkg_upgrade_handler(self, pkg_ctx):
