diff --git a/lib/bb/fetch2/__init__.py b/lib/bb/fetch2/__init__.py
index 52d5556d..35874e25 100644
--- a/lib/bb/fetch2/__init__.py
+++ b/lib/bb/fetch2/__init__.py
@@ -1075,6 +1075,14 @@ def rename_bad_checksum(ud, suffix):
     if ud.localpath is None:
         return
 
+    # For file:// URLs the localpath points at the user's source tree
+    # (e.g. recipe-shipped files under FILESPATH or an absolute path the
+    # user passed in). Renaming those out from under the user is
+    # destructive and surprising; surface the ChecksumError without
+    # mutating their files.
+    if ud.type == 'file':
+        return
+
     new_localpath = "%s_bad-checksum_%s" % (ud.localpath, suffix)
     bb.warn("Renaming %s to %s" % (ud.localpath, new_localpath))
     if not bb.utils.movefile(ud.localpath, new_localpath):
diff --git a/lib/bb/fetch2/local.py b/lib/bb/fetch2/local.py
index fda56a56..2abcc5eb 100644
--- a/lib/bb/fetch2/local.py
+++ b/lib/bb/fetch2/local.py
@@ -17,7 +17,7 @@ import os
 import urllib.request, urllib.parse, urllib.error
 import bb
 import bb.utils
-from   bb.fetch2 import FetchMethod, FetchError, ParameterError
+from   bb.fetch2 import CHECKSUM_LIST, FetchMethod, FetchError, ParameterError
 from   bb.fetch2 import logger
 
 class Local(FetchMethod):
@@ -31,7 +31,22 @@ class Local(FetchMethod):
         # We don't set localfile as for this fetcher the file is already local!
         ud.basename = os.path.basename(ud.path)
         ud.basepath = ud.path
-        ud.needdonestamp = False
+        # Without a donestamp the verify_checksum path is bypassed entirely
+        # for file:// urls, so an explicit checksum on the url (for instance
+        # UNINATIVE_CHECKSUM expanded into the uninative tarball file:// url)
+        # is never compared against the file's actual contents. Keep
+        # needdonestamp at the FetchData default (True) when any of the
+        # known checksum parameters is present, so the standard
+        # donestamp + verify_checksum cycle runs and a mismatch raises
+        # ChecksumError. When no checksum is supplied (the common case for
+        # recipe patches under file://) needdonestamp stays False and
+        # behavior is unchanged.
+        name = ud.parm.get("name")
+        ud.needdonestamp = any(
+            (name and "%s.%ssum" % (name, a) in ud.parm)
+            or "%ssum" % a in ud.parm
+            for a in CHECKSUM_LIST
+        )
         if "*" in ud.path:
             raise bb.fetch2.ParameterError("file:// urls using globbing are no longer supported. Please place the files in a directory and reference that instead.", ud.url)
         return
diff --git a/lib/bb/tests/fetch.py b/lib/bb/tests/fetch.py
index 077f741e..3347c60d 100644
--- a/lib/bb/tests/fetch.py
+++ b/lib/bb/tests/fetch.py
@@ -813,6 +813,110 @@ class FetcherLocalTest(FetcherTest):
         tree = self.fetchUnpack(['file://archive.tar.bz2;subdir=bar;striplevel=1'])
         self.assertEqual(tree, ['bar/c', 'bar/d', 'bar/subdir/e'])
 
+    def test_local_no_checksum_no_donestamp(self):
+        # The common case: file:// recipe patches without a checksum
+        # parameter must keep the historical "no donestamp tracking"
+        # behavior so this fix does not regress the hot path. After
+        # download(), no .done / .lock entries must appear in DL_DIR
+        # for the bare file:// url.
+        with open(os.path.join(self.localsrcdir, 'plain'), 'wb') as f:
+            f.write(b"plain\n")
+        fetcher = bb.fetch.Fetch(['file://plain'], self.d)
+        ud = fetcher.ud[fetcher.urls[0]]
+        self.assertFalse(ud.needdonestamp)
+        # When needdonestamp is False, FetchData leaves ud.donestamp
+        # unset (None); we assert that no .done / .lock entries are
+        # written to DL_DIR for the bare file:// url after download.
+        fetcher.download()
+        for entry in os.listdir(self.dldir):
+            self.assertFalse(entry.startswith('plain.done'))
+            self.assertFalse(entry.startswith('plain.lock'))
+
+    def test_local_checksum_match(self):
+        # A correct sha256 must drive the verify_checksum cycle to
+        # completion: download() returns without raising AND the
+        # donestamp lands in DL_DIR. The donestamp is the on-disk
+        # evidence that update_stamp() ran after verify_checksum()
+        # passed - without the fix, needdonestamp=False short-circuits
+        # update_stamp() and no donestamp is written even on "match".
+        import hashlib
+        content = b"file:// checksum match test\n"
+        with open(os.path.join(self.localsrcdir, 'sumfile'), 'wb') as f:
+            f.write(content)
+        good = hashlib.sha256(content).hexdigest()
+        fetcher = bb.fetch.Fetch(['file://sumfile;sha256sum=' + good], self.d)
+        ud = fetcher.ud[fetcher.urls[0]]
+        self.assertTrue(ud.needdonestamp)
+        fetcher.download()
+        self.assertTrue(os.path.exists(ud.donestamp))
+
+    def test_local_checksum_mismatch(self):
+        content = b"file:// checksum mismatch test\n"
+        srcpath = os.path.join(self.localsrcdir, 'sumfile')
+        with open(srcpath, 'wb') as f:
+            f.write(content)
+        bad = "0" * 64
+        fetcher = bb.fetch.Fetch(['file://sumfile;sha256sum=' + bad], self.d)
+        with self.assertRaises(bb.fetch2.FetchError):
+            fetcher.download()
+        # The user's source tree must not be mutated. rename_bad_checksum
+        # would otherwise leave a <path>_bad-checksum_<sha> sibling and
+        # remove the original.
+        self.assertTrue(os.path.exists(srcpath))
+        with open(srcpath, 'rb') as f:
+            self.assertEqual(f.read(), content)
+        for entry in os.listdir(self.localsrcdir):
+            self.assertNotIn('_bad-checksum_', entry)
+
+    def test_local_checksum_mismatch_md5(self):
+        # Coverage for an algorithm other than sha256 so a regression
+        # that special-cased sha256 detection does not slip through.
+        # Mirror the sha256 mismatch assertions: source file present,
+        # contents unchanged, no _bad-checksum_ sibling left behind.
+        content = b"file:// md5 mismatch test\n"
+        srcpath = os.path.join(self.localsrcdir, 'sumfile_md5')
+        with open(srcpath, 'wb') as f:
+            f.write(content)
+        bad = "0" * 32
+        fetcher = bb.fetch.Fetch(['file://sumfile_md5;md5sum=' + bad], self.d)
+        with self.assertRaises(bb.fetch2.FetchError):
+            fetcher.download()
+        self.assertTrue(os.path.exists(srcpath))
+        with open(srcpath, 'rb') as f:
+            self.assertEqual(f.read(), content)
+        for entry in os.listdir(self.localsrcdir):
+            self.assertNotIn('_bad-checksum_', entry)
+
+    def test_local_checksum_named(self):
+        # UNINATIVE_CHECKSUM (the original reproducer for this bug) and
+        # several other recipe-driven file:// urls use the name= prefix
+        # form: foo.sha256sum= rather than bare sha256sum=. Confirm the
+        # named form flips needdonestamp AND that the verify cycle runs
+        # (donestamp on disk after download).
+        import hashlib
+        content = b"file:// named checksum test\n"
+        with open(os.path.join(self.localsrcdir, 'sumfile_named'), 'wb') as f:
+            f.write(content)
+        good = hashlib.sha256(content).hexdigest()
+        fetcher = bb.fetch.Fetch(
+            ['file://sumfile_named;name=blob;blob.sha256sum=' + good], self.d)
+        ud = fetcher.ud[fetcher.urls[0]]
+        self.assertTrue(ud.needdonestamp)
+        fetcher.download()
+        self.assertTrue(os.path.exists(ud.donestamp))
+
+    def test_local_checksum_directory_ignored(self):
+        # Local supports directory urls; supports_checksum returns False
+        # for directories. A checksum parm on a directory url should
+        # therefore be a no-op rather than a hard error: verify_checksum
+        # short-circuits on supports_checksum() and no ChecksumError is
+        # raised even when the checksum value is bogus.
+        bad = "0" * 64
+        fetcher = bb.fetch.Fetch(['file://dir;sha256sum=' + bad], self.d)
+        ud = fetcher.ud[fetcher.urls[0]]
+        self.assertFalse(ud.method.supports_checksum(ud))
+        fetcher.download()
+
     def dummyGitTest(self, suffix):
         # Create dummy local Git repo
         src_dir = tempfile.mkdtemp(dir=self.tempdir,
