diff mbox series

[v2] cache/cookerdata: Move recipe parsing functions from cache to databuilder

Message ID 20221116000622.2480508-1-richard.purdie@linuxfoundation.org
State Accepted, archived
Commit 783879319c6a4cf3639fcbf763b964e42f602eca
Headers show
Series [v2] cache/cookerdata: Move recipe parsing functions from cache to databuilder | expand

Commit Message

Richard Purdie Nov. 16, 2022, 12:06 a.m. UTC
When 'NoCache' was written, databuilder/cookerdata didn't exist. It does
now and the recipe parsing functionality contained in NoCache clearly
belongs there, it isn't a cache function. Move those functions, renaming
to match the style in databuilder but otherwise not changing functionality
for now. Fix up the callers to match (which make it clear this is the right
move).

Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
---
 bin/bitbake-worker   |  3 +-
 lib/bb/cache.py      | 71 ++++----------------------------------------
 lib/bb/command.py    |  3 +-
 lib/bb/cooker.py     |  3 +-
 lib/bb/cookerdata.py | 51 +++++++++++++++++++++++++++++++
 lib/bb/runqueue.py   |  3 +-
 6 files changed, 60 insertions(+), 74 deletions(-)

v2: Fix "bitbake -e <recipe>" reference to loadDataFull() in cooker.py
diff mbox series

Patch

diff --git a/bin/bitbake-worker b/bin/bitbake-worker
index 7be39370b3..d54044f361 100755
--- a/bin/bitbake-worker
+++ b/bin/bitbake-worker
@@ -238,7 +238,6 @@  def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskha
                 os.umask(umask)
 
             try:
-                bb_cache = bb.cache.NoCache(databuilder)
                 (realfn, virtual, mc) = bb.cache.virtualfn2realfn(fn)
                 the_data = databuilder.mcdata[mc]
                 the_data.setVar("BB_WORKERCONTEXT", "1")
@@ -257,7 +256,7 @@  def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskha
                     bb.parse.siggen.set_taskhashes(workerdata["newhashes"])
                 ret = 0
 
-                the_data = bb_cache.loadDataFull(fn, appends)
+                the_data = databuilder.parseRecipe(fn, appends)
                 the_data.setVar('BB_TASKHASH', taskhash)
                 the_data.setVar('BB_UNIHASH', unihash)
 
diff --git a/lib/bb/cache.py b/lib/bb/cache.py
index 988c596c39..4d715e911d 100644
--- a/lib/bb/cache.py
+++ b/lib/bb/cache.py
@@ -280,75 +280,14 @@  def variant2virtual(realfn, variant):
         return "mc:" + elems[1] + ":" + realfn
     return "virtual:" + variant + ":" + realfn
 
-def parse_recipe(bb_data, bbfile, appends, mc=''):
-    """
-    Parse a recipe
-    """
-
-    bb_data.setVar("__BBMULTICONFIG", mc)
-
-    bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
-    bb.parse.cached_mtime_noerror(bbfile_loc)
-
-    if appends:
-        bb_data.setVar('__BBAPPEND', " ".join(appends))
-    bb_data = bb.parse.handle(bbfile, bb_data)
-    return bb_data
-
-
-class NoCache(object):
-
-    def __init__(self, databuilder):
-        self.databuilder = databuilder
-        self.data = databuilder.data
-
-    def loadDataFull(self, virtualfn, appends):
-        """
-        Return a complete set of data for fn.
-        To do this, we need to parse the file.
-        """
-        logger.debug("Parsing %s (full)" % virtualfn)
-        (fn, virtual, mc) = virtualfn2realfn(virtualfn)
-        bb_data = self.load_bbfile(virtualfn, appends, virtonly=True)
-        return bb_data[virtual]
-
-    def load_bbfile(self, bbfile, appends, virtonly = False, mc=None):
-        """
-        Load and parse one .bb build file
-        Return the data and whether parsing resulted in the file being skipped
-        """
-
-        if virtonly:
-            (bbfile, virtual, mc) = virtualfn2realfn(bbfile)
-            bb_data = self.databuilder.mcdata[mc].createCopy()
-            bb_data.setVar("__ONLYFINALISE", virtual or "default")
-            datastores = parse_recipe(bb_data, bbfile, appends, mc)
-            return datastores
-
-        if mc is not None:
-            bb_data = self.databuilder.mcdata[mc].createCopy()
-            return parse_recipe(bb_data, bbfile, appends, mc)
 
-        bb_data = self.data.createCopy()
-        datastores = parse_recipe(bb_data, bbfile, appends)
-
-        for mc in self.databuilder.mcdata:
-            if not mc:
-                continue
-            bb_data = self.databuilder.mcdata[mc].createCopy()
-            newstores = parse_recipe(bb_data, bbfile, appends, mc)
-            for ns in newstores:
-                datastores["mc:%s:%s" % (mc, ns)] = newstores[ns]
-
-        return datastores
-
-class Cache(NoCache):
+class Cache(object):
     """
     BitBake Cache implementation
     """
     def __init__(self, databuilder, mc, data_hash, caches_array):
-        super().__init__(databuilder)
-        data = databuilder.data
+        self.databuilder = databuilder
+        self.data = databuilder.data
 
         # Pass caches_array information into Cache Constructor
         # It will be used later for deciding whether we
@@ -356,7 +295,7 @@  class Cache(NoCache):
         self.mc = mc
         self.logger = PrefixLoggerAdapter("Cache: %s: " % (mc if mc else "default"), logger)
         self.caches_array = caches_array
-        self.cachedir = data.getVar("CACHE")
+        self.cachedir = self.data.getVar("CACHE")
         self.clean = set()
         self.checked = set()
         self.depends_cache = {}
@@ -486,7 +425,7 @@  class Cache(NoCache):
         """Parse the specified filename, returning the recipe information"""
         self.logger.debug("Parsing %s", filename)
         infos = []
-        datastores = self.load_bbfile(filename, appends, mc=self.mc)
+        datastores = self.databuilder.parseRecipeVariants(filename, appends, mc=self.mc)
         depends = []
         variants = []
         # Process the "real" fn last so we can store variants list
diff --git a/lib/bb/command.py b/lib/bb/command.py
index ec86885220..fa9fd054c2 100644
--- a/lib/bb/command.py
+++ b/lib/bb/command.py
@@ -567,8 +567,7 @@  class CommandsSync:
             envdata = bb.cache.parse_recipe(config_data, fn, appendfiles, mc)['']
         else:
             # Use the standard path
-            parser = bb.cache.NoCache(command.cooker.databuilder)
-            envdata = parser.loadDataFull(fn, appendfiles)
+            envdata = command.cooker.databuilder.parseRecipe(fn, appendfiles)
         idx = command.remotedatastores.store(envdata)
         return DataStoreConnectionHandle(idx)
     parseRecipeFile.readonly = True
diff --git a/lib/bb/cooker.py b/lib/bb/cooker.py
index 1da2f03197..1af29f217d 100644
--- a/lib/bb/cooker.py
+++ b/lib/bb/cooker.py
@@ -617,8 +617,7 @@  class BBCooker:
 
         if fn:
             try:
-                bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array)
-                envdata = bb_caches[mc].loadDataFull(fn, self.collections[mc].get_file_appends(fn))
+                envdata = self.databuilder.parseRecipe(fn, self.collections[mc].get_file_appends(fn))
             except Exception as e:
                 parselog.exception("Unable to read %s", fn)
                 raise
diff --git a/lib/bb/cookerdata.py b/lib/bb/cookerdata.py
index 8a354fed7c..c322ab2ffb 100644
--- a/lib/bb/cookerdata.py
+++ b/lib/bb/cookerdata.py
@@ -466,3 +466,54 @@  class CookerDataBuilder(object):
 
         return data
 
+    @staticmethod
+    def _parse_recipe(bb_data, bbfile, appends, mc=''):
+        bb_data.setVar("__BBMULTICONFIG", mc)
+
+        bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
+        bb.parse.cached_mtime_noerror(bbfile_loc)
+
+        if appends:
+            bb_data.setVar('__BBAPPEND', " ".join(appends))
+        bb_data = bb.parse.handle(bbfile, bb_data)
+        return bb_data
+
+    def parseRecipeVariants(self, bbfile, appends, virtonly=False, mc=None):
+        """
+        Load and parse one .bb build file
+        Return the data and whether parsing resulted in the file being skipped
+        """
+
+        if virtonly:
+            (bbfile, virtual, mc) = bb.cache.virtualfn2realfn(bbfile)
+            bb_data = self.mcdata[mc].createCopy()
+            bb_data.setVar("__ONLYFINALISE", virtual or "default")
+            datastores = self._parse_recipe(bb_data, bbfile, appends, mc)
+            return datastores
+
+        if mc is not None:
+            bb_data = self.mcdata[mc].createCopy()
+            return self._parse_recipe(bb_data, bbfile, appends, mc)
+
+        bb_data = self.data.createCopy()
+        datastores = self._parse_recipe(bb_data, bbfile, appends)
+
+        for mc in self.mcdata:
+            if not mc:
+                continue
+            bb_data = self.mcdata[mc].createCopy()
+            newstores = self._parse_recipe(bb_data, bbfile, appends, mc)
+            for ns in newstores:
+                datastores["mc:%s:%s" % (mc, ns)] = newstores[ns]
+
+        return datastores
+
+    def parseRecipe(self, virtualfn, appends):
+        """
+        Return a complete set of data for fn.
+        To do this, we need to parse the file.
+        """
+        logger.debug("Parsing %s (full)" % virtualfn)
+        (fn, virtual, mc) = bb.cache.virtualfn2realfn(virtualfn)
+        bb_data = self.parseRecipeVariants(virtualfn, appends, virtonly=True)
+        return bb_data[virtual]
diff --git a/lib/bb/runqueue.py b/lib/bb/runqueue.py
index 338d1fe36f..437f4a185c 100644
--- a/lib/bb/runqueue.py
+++ b/lib/bb/runqueue.py
@@ -1610,9 +1610,8 @@  class RunQueue:
             self.rqexe.finish()
 
     def rq_dump_sigfn(self, fn, options):
-        bb_cache = bb.cache.NoCache(self.cooker.databuilder)
         mc = bb.runqueue.mc_from_tid(fn)
-        the_data = bb_cache.loadDataFull(fn, self.cooker.collections[mc].get_file_appends(fn))
+        the_data = self.cooker.databuilder.parseRecipe(fn, self.cooker.collections[mc].get_file_appends(fn))
         siggen = bb.parse.siggen
         dataCaches = self.rqdata.dataCaches
         siggen.dump_sigfn(fn, dataCaches, options)