diff options
Diffstat (limited to 'bitbake/lib/bb/cache.py')
-rw-r--r-- | bitbake/lib/bb/cache.py | 44 |
1 files changed, 16 insertions, 28 deletions
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py index 936829b838..a1dde96425 100644 --- a/bitbake/lib/bb/cache.py +++ b/bitbake/lib/bb/cache.py @@ -43,7 +43,7 @@ except ImportError: logger.info("Importing cPickle failed. " "Falling back to a very slow implementation.") -__cache_version__ = "147" +__cache_version__ = "148" def getCacheFile(path, filename, data_hash): return os.path.join(path, filename + "." + data_hash) @@ -225,7 +225,7 @@ class CoreRecipeInfo(RecipeInfoCommon): for package in self.packages_dynamic: cachedata.packages_dynamic[package].append(fn) - # Build hash of runtime depends and rececommends + # Build hash of runtime depends and recommends for package in self.packages + [self.pn]: cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package] cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package] @@ -261,7 +261,7 @@ class Cache(object): def __init__(self, data, data_hash, caches_array): # Pass caches_array information into Cache Constructor - # It will be used in later for deciding whether we + # It will be used later for deciding whether we # need extra cache file dump/load support self.caches_array = caches_array self.cachedir = data.getVar("CACHE", True) @@ -529,8 +529,11 @@ class Cache(object): if hasattr(info_array[0], 'file_checksums'): for _, fl in info_array[0].file_checksums.items(): for f in fl.split(): - if not ('*' in f or os.path.exists(f)): - logger.debug(2, "Cache: %s's file checksum list file %s was removed", + if "*" in f: + continue + f, exist = f.split(":") + if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)): + logger.debug(2, "Cache: %s's file checksum list file %s changed", fn, f) self.remove(fn) return False @@ -620,10 +623,13 @@ class Cache(object): def mtime(cachefile): return bb.parse.cached_mtime_noerror(cachefile) - def add_info(self, filename, info_array, cacheData, parsed=None): + def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None): if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped): cacheData.add_from_recipeinfo(filename, info_array) + if watcher: + watcher(info_array[0].file_depends) + if not self.has_cache: return @@ -764,16 +770,6 @@ class MultiProcessCache(object): self.cachedata = data - def internSet(self, items): - new = set() - for i in items: - new.add(intern(i)) - return new - - def compress_keys(self, data): - # Override in subclasses if desired - return - def create_cachedata(self): data = [{}] return data @@ -814,15 +810,7 @@ class MultiProcessCache(object): glf = bb.utils.lockfile(self.cachefile + ".lock") - try: - with open(self.cachefile, "rb") as f: - p = pickle.Unpickler(f) - data, version = p.load() - except (IOError, EOFError): - data, version = None, None - - if version != self.__class__.CACHE_VERSION: - data = self.create_cachedata() + data = self.cachedata for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]: f = os.path.join(os.path.dirname(self.cachefile), f) @@ -831,16 +819,16 @@ class MultiProcessCache(object): p = pickle.Unpickler(fd) extradata, version = p.load() except (IOError, EOFError): - extradata, version = self.create_cachedata(), None + os.unlink(f) + continue if version != self.__class__.CACHE_VERSION: + os.unlink(f) continue self.merge_data(extradata, data) os.unlink(f) - self.compress_keys(data) - with open(self.cachefile, "wb") as f: p = pickle.Pickler(f, -1) p.dump([data, self.__class__.CACHE_VERSION]) |