diff mbox series

fetch/git: Add an 'update' unpack mode to the fetchers (git only for now)

Message ID 20260319122832.2607446-1-richard.purdie@linuxfoundation.org
State Accepted, archived
Commit e7d5e156275782948d3346f299780389ab263ab6
Headers show
Series fetch/git: Add an 'update' unpack mode to the fetchers (git only for now) | expand

Commit Message

Richard Purdie March 19, 2026, 12:28 p.m. UTC
We need a way to allow the git fetcher to update data in place rather
than remove and replace. This change adds an unpack_update() function
which can be used in place of the usual unpack() call. This will attempt
to rebase changes on top of the upstream changes. It will raise an error
if any local uncommitted changes are present.

The implementation adds a "dldir" origin to the list of origins in the git
repo which we can then fetch from and update against. This origin may be
of use to users accessing the git repo outside the fetcher too.

unpack_update() should never delete existing data in the way unpack() does
but can error out in many more different ways due to the number of possible
input states.

Currently only git is supported.

The intention is for this to use used by bitbake-setup instead of unpack.

Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
---
 lib/bb/fetch2/__init__.py | 13 ++++++++--
 lib/bb/fetch2/git.py      | 51 +++++++++++++++++++++++++++++++++++----
 2 files changed, 57 insertions(+), 7 deletions(-)
diff mbox series

Patch

diff --git a/lib/bb/fetch2/__init__.py b/lib/bb/fetch2/__init__.py
index aaefd860204..909ae98146e 100644
--- a/lib/bb/fetch2/__init__.py
+++ b/lib/bb/fetch2/__init__.py
@@ -1634,6 +1634,9 @@  class FetchMethod(object):
 
         return
 
+    def unpack_update(self, urldata, rootdir, data):
+        raise RuntimeError("No method available for this url type: %s" % urldata.type)
+
     def clean(self, urldata, d):
         """
         Clean any existing full or partial download
@@ -1990,7 +1993,7 @@  class Fetch(object):
             if not ret:
                 raise FetchError("URL doesn't work", u)
 
-    def unpack(self, root, urls=None):
+    def unpack(self, root, urls=None, update=False):
         """
         Unpack urls to root
         """
@@ -2009,7 +2012,10 @@  class Fetch(object):
                     lf = bb.utils.lockfile(ud.lockfile)
 
                 unpack_tracer.start_url(u)
-                ud.method.unpack(ud, root, self.d)
+                if update:
+                    ud.method.unpack_update(ud, root, self.d)
+                else:
+                    ud.method.unpack(ud, root, self.d)
                 unpack_tracer.finish_url(u)
 
             finally:
@@ -2018,6 +2024,9 @@  class Fetch(object):
 
         unpack_tracer.complete()
 
+    def unpack_update(self, root, urls=None):
+        self.unpack(root, urls, update=True)
+
     def clean(self, urls=None):
         """
         Clean files that the fetcher gets or places
diff --git a/lib/bb/fetch2/git.py b/lib/bb/fetch2/git.py
index 738174cd104..1de70c5757a 100644
--- a/lib/bb/fetch2/git.py
+++ b/lib/bb/fetch2/git.py
@@ -656,7 +656,10 @@  class Git(FetchMethod):
         # The url is local ud.clonedir, set it to upstream one
         runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=dest)
 
-    def unpack(self, ud, destdir, d):
+    def unpack_update(self, ud, destdir, d):
+        return self.unpack(ud, destdir, d, update=True)
+
+    def unpack(self, ud, destdir, d, update=False):
         """ unpack the downloaded src to destdir"""
 
         subdir = ud.parm.get("subdir")
@@ -680,7 +683,7 @@  class Git(FetchMethod):
 
         destsuffix = ud.parm.get("destsuffix", def_destsuffix)
         destdir = ud.destdir = os.path.join(destdir, destsuffix)
-        if os.path.exists(destdir):
+        if os.path.exists(destdir) and not update:
             bb.utils.prunedir(destdir)
         if not ud.bareclone:
             ud.unpack_tracer.unpack("git", destdir)
@@ -691,11 +694,15 @@  class Git(FetchMethod):
             ud.basecmd = "GIT_LFS_SKIP_SMUDGE=1 " + ud.basecmd
 
         source_found = False
+        update_mode = False
         source_error = []
 
         clonedir_is_up_to_date = not self.clonedir_need_update(ud, d)
         if clonedir_is_up_to_date:
-            runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, destdir), d)
+            if update and os.path.exists(destdir):
+                update_mode = True
+            else:
+                runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, destdir), d)
             source_found = True
         else:
             source_error.append("clone directory not available or not up to date: " + ud.clonedir)
@@ -703,8 +710,11 @@  class Git(FetchMethod):
         if not source_found:
             if ud.shallow:
                 if os.path.exists(ud.fullshallow):
-                    bb.utils.mkdirhier(destdir)
-                    runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=destdir)
+                    if update and os.path.exists(destdir):
+                        update_mode = True
+                    else:
+                        bb.utils.mkdirhier(destdir)
+                        runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=destdir)
                     source_found = True
                 else:
                     source_error.append("shallow clone not available: " + ud.fullshallow)
@@ -714,6 +724,32 @@  class Git(FetchMethod):
         if not source_found:
             raise bb.fetch2.UnpackError("No up to date source found: " + "; ".join(source_error), ud.url)
 
+        if update_mode:
+            if ud.shallow:
+                bb.fetch2.UnpackError("Can't update shallow clones checkouts without network access, not supported.", ud.url)
+
+            output = runfetchcmd("%s status --porcelain" % (ud.basecmd), d, workdir=destdir)
+            if output:
+                raise bb.fetch2.UnpackError("Repository at %s has uncommitted changes, unable to update:\n%s" % (destdir, output), ud.url)
+
+            # Set up remote for the download location if it doesn't exist
+            try:
+                runfetchcmd("%s remote get-url dldir" % (ud.basecmd), d, workdir=destdir)
+            except bb.fetch2.FetchError:
+                if ud.clonedir:
+                    runfetchcmd("%s remote add dldir file://%s" % (ud.basecmd, ud.clonedir), d, workdir=destdir)
+            try:
+                runfetchcmd("%s fetch dldir" % (ud.basecmd), d, workdir=destdir)
+                runfetchcmd("%s rebase --no-autosquash --no-autostash %s" % (ud.basecmd, ud.revision), d, workdir=destdir)
+            except bb.fetch2.FetchError as e:
+                # If rebase failed, abort it
+                try:
+                    runfetchcmd("%s rebase --abort" % (ud.basecmd), d, workdir=destdir)
+                except Exception:
+                    pass
+                raise bb.fetch2.UnpackError("Failed to update checkout in place: %s" % str(e), ud.url)
+            return True
+
         # If there is a tag parameter in the url and we also have a fixed srcrev, check the tag
         # matches the revision
         if 'tag' in ud.parm and sha1_re.match(ud.revision):
@@ -729,6 +765,11 @@  class Git(FetchMethod):
 
         repourl = self._get_repo_url(ud)
         runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=destdir)
+        if ud.clonedir:
+            try:
+                runfetchcmd("%s remote get-url dldir" % (ud.basecmd), d, workdir=destdir)
+            except bb.fetch2.FetchError:
+                runfetchcmd("%s remote add dldir file://%s" % (ud.basecmd, ud.clonedir), d, workdir=destdir)
 
         if self._contains_lfs(ud, d, destdir):
             if not need_lfs: