@@ -62,6 +62,11 @@ to all potential matches in the remote cache, identified by matching
architecture, recipe (`PN`), and task. This analysis has the same output
format as `bitbake-diffsigs`.
+### lint
+
+The `lint` command searches form common flaws that reduce the
+cachability of a layer.
+
## Backends
### Filesystem backend
@@ -119,6 +124,7 @@ import shutil
import sys
from tempfile import NamedTemporaryFile
import time
+import pickle
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'bitbake', 'lib'))
from bb.siggen import compare_sigfiles
@@ -556,8 +562,8 @@ def arguments():
parser = argparse.ArgumentParser()
parser.add_argument(
'command', type=str, metavar='command',
- choices='info upload clean analyze'.split(),
- help="command to execute (info, upload, clean, analyze)")
+ choices='info upload clean analyze lint'.split(),
+ help="command to execute (info, upload, clean, analyze, lint)")
parser.add_argument(
'source', type=str, nargs='?',
help="local sstate dir (for uploads or analysis)")
@@ -572,6 +578,15 @@ def arguments():
parser.add_argument(
'--max-sig-age', type=str, default=None,
help="clean: remove siginfo files older than MAX_SIG_AGE (defaults to MAX_AGE)")
+ parser.add_argument(
+ '--sources-dir', type=str, default='/work/',
+ help="lint: absolute path to sources folder (e.g. layerbase)")
+ parser.add_argument(
+ '--build-dir', type=str, default='/build/tmp/',
+ help="lint: absolute path to build folder")
+ parser.add_argument(
+ '--exit-code', type=int, default=None,
+ help="lint: return this instead of number of found issues")
args = parser.parse_args()
if args.command in 'upload analyze'.split() and args.source is None:
@@ -774,6 +789,60 @@ def sstate_analyze(source, target, **kwargs):
print('\n'.join(out))
+def sstate_lint(target, verbose, sources_dir, build_dir, exit_code, **kwargs):
+ ADDITIONAL_IGNORED_VARNAMES = 'PP'.split()
+ if not target.exists():
+ print(f"ERROR: target {target} does not exist. Nothing to analyze.")
+ return -1
+
+ cache_sigs = {s.hash: s for s in target.list_all() if s.suffix.endswith('.siginfo')}
+
+ hits_srcdir = 0
+ hits_builddir = 0
+ hits_other = 0
+ for sig in cache_sigs.values():
+ sig_file = target.download(sig.path)
+ with open(sig_file, 'rb') as f:
+ sigdata_raw = pickle.Unpickler(f)
+ sigdata = sigdata_raw.load()
+
+ pn_issues = []
+ for name, val in sigdata['varvals'].items():
+ if not name[0].isupper():
+ continue
+ if sigdata['basewhitelist'] and name in sigdata['basewhitelist'] or \
+ sigdata['taskwhitelist'] and name in sigdata['taskwhitelist'] or \
+ name in ADDITIONAL_IGNORED_VARNAMES:
+ continue
+ if not val or not val[0] == '/':
+ continue
+ task = sigdata['task']
+ if val.startswith(build_dir):
+ pn_issues.append(f'\033[0;31m-> path in build-dir: {name} = "{val}"\033[0m')
+ hits_builddir += 1
+ elif val.startswith(sources_dir):
+ pn_issues.append(f'\033[0;31m-> path in sources-dir: {name} = "{val}"\033[0m')
+ hits_srcdir += 1
+ else:
+ hits_other += 1
+ if verbose:
+ pn_issues.append(f'\033[0;34m-> other absolute path: {name} = "{val}"\033[0m')
+ if len(pn_issues) > 0:
+ print(f"\033[1;33m==== issues found in {sig.arch}:{sig.pn}:{sig.task} ({sig.hash[:8]}) ====\033[0m")
+ print('\n'.join(pn_issues))
+ target.release(sig_file)
+
+ sum_hits = hits_srcdir + hits_builddir
+ if sum_hits == 0:
+ print(f'no cachability issues found (scanned {len(cache_sigs)} signatures)')
+ else:
+ print(f'warning: found cachability issues (scanned {len(cache_sigs)} signatures)')
+ print(f'-> absolute paths: sources-dir {hits_srcdir}, build-dir {hits_builddir}, other {hits_other}')
+ if exit_code is not None:
+ return exit_code
+ return hits_srcdir + hits_builddir
+
+
def main():
args = arguments()