bitbake: Update to 1.50.5 release

Message ID 20220420143540.5159-1-henning.schild@siemens.com
State Superseded, archived
Headers show
Series bitbake: Update to 1.50.5 release | expand

Commit Message

Henning Schild April 20, 2022, 6:35 a.m. UTC
Update bitbake to the latest release in 1.50 branch. This release is
tagged to the commit ID aaa7f7af23d5f89fe4a5ed48c57ea3dfca07c79d
in the bitbake upstream.

Signed-off-by: Henning Schild <henning.schild@siemens.com>
---
 bitbake/lib/bb/cache.py                       |  3 +-
 bitbake/lib/bb/cooker.py                      | 30 ++++++++++++++--
 bitbake/lib/bb/data_smart.py                  |  4 +--
 bitbake/lib/bb/fetch2/__init__.py             |  4 +++
 bitbake/lib/bb/fetch2/perforce.py             |  2 +-
 bitbake/lib/bb/fetch2/wget.py                 |  2 +-
 bitbake/lib/bb/persist_data.py                |  5 +--
 bitbake/lib/bb/process.py                     |  2 +-
 bitbake/lib/bb/runqueue.py                    | 34 +++++++++---------
 bitbake/lib/bb/server/process.py              |  2 +-
 bitbake/lib/bb/tests/fetch.py                 | 35 ++++++++++---------
 bitbake/lib/bb/utils.py                       | 13 +++++--
 bitbake/lib/hashserv/server.py                |  4 +--
 bitbake/lib/toaster/tests/builds/buildtest.py |  2 +-
 14 files changed, 89 insertions(+), 53 deletions(-)

Comments

Henning Schild April 20, 2022, 7 a.m. UTC | #1
While that kind of works, the next trouble with python 3.10 will be in
wic.

I propose to merge my 3.9 patch quickly and deal with the wic and
bitbake bump later. bookworm is important and should not be allowed to
fail in CI.

Henning

Am Wed, 20 Apr 2022 16:35:40 +0200
schrieb Henning Schild <henning.schild@siemens.com>:

> Update bitbake to the latest release in 1.50 branch. This release is
> tagged to the commit ID aaa7f7af23d5f89fe4a5ed48c57ea3dfca07c79d
> in the bitbake upstream.
> 
> Signed-off-by: Henning Schild <henning.schild@siemens.com>
> ---
>  bitbake/lib/bb/cache.py                       |  3 +-
>  bitbake/lib/bb/cooker.py                      | 30 ++++++++++++++--
>  bitbake/lib/bb/data_smart.py                  |  4 +--
>  bitbake/lib/bb/fetch2/__init__.py             |  4 +++
>  bitbake/lib/bb/fetch2/perforce.py             |  2 +-
>  bitbake/lib/bb/fetch2/wget.py                 |  2 +-
>  bitbake/lib/bb/persist_data.py                |  5 +--
>  bitbake/lib/bb/process.py                     |  2 +-
>  bitbake/lib/bb/runqueue.py                    | 34 +++++++++---------
>  bitbake/lib/bb/server/process.py              |  2 +-
>  bitbake/lib/bb/tests/fetch.py                 | 35
> ++++++++++--------- bitbake/lib/bb/utils.py                       |
> 13 +++++-- bitbake/lib/hashserv/server.py                |  4 +--
>  bitbake/lib/toaster/tests/builds/buildtest.py |  2 +-
>  14 files changed, 89 insertions(+), 53 deletions(-)
> 
> diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py
> index 27eb271798e8..5f9c0a779d75 100644
> --- a/bitbake/lib/bb/cache.py
> +++ b/bitbake/lib/bb/cache.py
> @@ -19,7 +19,8 @@
>  import os
>  import logging
>  import pickle
> -from collections import defaultdict, Mapping
> +from collections import defaultdict
> +from collections.abc import Mapping
>  import bb.utils
>  from bb import PrefixLoggerAdapter
>  import re
> diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
> index 89f1fad08310..c946800a8c62 100644
> --- a/bitbake/lib/bb/cooker.py
> +++ b/bitbake/lib/bb/cooker.py
> @@ -388,12 +388,22 @@ class BBCooker:
>              # Create a new hash server bound to a unix domain socket
>              if not self.hashserv:
>                  dbfile = (self.data.getVar("PERSISTENT_DIR") or
> self.data.getVar("CACHE")) + "/hashserv.db"
> +                upstream = self.data.getVar("BB_HASHSERVE_UPSTREAM")
> or None
> +                if upstream:
> +                    import socket
> +                    try:
> +                        sock =
> socket.create_connection(upstream.split(":"), 5)
> +                        sock.close()
> +                    except socket.error as e:
> +                        bb.warn("BB_HASHSERVE_UPSTREAM is not valid,
> unable to connect hash equivalence server at '%s': %s" 
> +                                 % (upstream, repr(e)))
> +
>                  self.hashservaddr = "unix://%s/hashserve.sock" %
> self.data.getVar("TOPDIR") self.hashserv = hashserv.create_server(
>                      self.hashservaddr,
>                      dbfile,
>                      sync=False,
> -
> upstream=self.data.getVar("BB_HASHSERVE_UPSTREAM") or None,
> +                    upstream=upstream,
>                  )
>                  self.hashserv.process =
> multiprocessing.Process(target=self.hashserv.serve_forever)
> self.hashserv.process.start() @@ -805,7 +815,9 @@ class BBCooker:
>              for dep in rq.rqdata.runtaskentries[tid].depends:
>                  (depmc, depfn, _, deptaskfn) =
> bb.runqueue.split_tid_mcfn(dep) deppn =
> self.recipecaches[depmc].pkg_fn[deptaskfn]
> -                depend_tree["tdepends"][dotname].append("%s.%s" %
> (deppn, bb.runqueue.taskname_from_tid(dep)))
> +                if depmc:
> +                    depmc = "mc:" + depmc + ":"
> +                depend_tree["tdepends"][dotname].append("%s%s.%s" %
> (depmc, deppn, bb.runqueue.taskname_from_tid(dep))) if taskfn not in
> seen_fns: seen_fns.append(taskfn)
>                  packages = []
> @@ -2204,21 +2216,33 @@ class CookerParser(object):
>              yield not cached, mc, infos
>  
>      def parse_generator(self):
> -        while True:
> +        empty = False
> +        while self.processes or not empty:
> +            for process in self.processes.copy():
> +                if not process.is_alive():
> +                    process.join()
> +                    self.processes.remove(process)
> +
>              if self.parsed >= self.toparse:
>                  break
>  
>              try:
>                  result = self.result_queue.get(timeout=0.25)
>              except queue.Empty:
> +                empty = True
>                  pass
>              else:
> +                empty = False
>                  value = result[1]
>                  if isinstance(value, BaseException):
>                      raise value
>                  else:
>                      yield result
>  
> +        if not (self.parsed >= self.toparse):
> +            raise bb.parse.ParseError("Not all recipes parsed,
> parser thread killed/died? Exiting.", None) +
> +
>      def parse_next(self):
>          result = []
>          parsed = None
> diff --git a/bitbake/lib/bb/data_smart.py
> b/bitbake/lib/bb/data_smart.py index 8291ca65e309..65857a9c7941 100644
> --- a/bitbake/lib/bb/data_smart.py
> +++ b/bitbake/lib/bb/data_smart.py
> @@ -17,7 +17,7 @@ BitBake build tools.
>  # Based on functions from the base bb module, Copyright 2003 Holger
> Schurig 
>  import copy, re, sys, traceback
> -from collections import MutableMapping
> +from collections.abc import MutableMapping
>  import logging
>  import hashlib
>  import bb, bb.codeparser
> @@ -403,7 +403,7 @@ class DataSmart(MutableMapping):
>                      s =
> __expand_python_regexp__.sub(varparse.python_sub, s) except
> SyntaxError as e: # Likely unmatched brackets, just don't expand the
> expression
> -                    if e.msg != "EOL while scanning string literal":
> +                    if e.msg != "EOL while scanning string literal"
> and not e.msg.startswith("unterminated string literal"): raise
>                  if s == olds:
>                      break
> diff --git a/bitbake/lib/bb/fetch2/__init__.py
> b/bitbake/lib/bb/fetch2/__init__.py index dbf8b50e68a7..1005ec10c639
> 100644 --- a/bitbake/lib/bb/fetch2/__init__.py
> +++ b/bitbake/lib/bb/fetch2/__init__.py
> @@ -430,6 +430,7 @@ def uri_replace(ud, uri_find, uri_replace,
> replacements, d, mirrortarball=None): uri_replace_decoded =
> list(decodeurl(uri_replace)) logger.debug2("For url %s comparing %s
> to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
> result_decoded = ['', '', '', '', '', {}]
> +    # 0 - type, 1 - host, 2 - path, 3 - user,  4- pswd, 5 - params
>      for loc, i in enumerate(uri_find_decoded):
>          result_decoded[loc] = uri_decoded[loc]
>          regexp = i
> @@ -449,6 +450,9 @@ def uri_replace(ud, uri_find, uri_replace,
> replacements, d, mirrortarball=None): for l in replacements:
>                      uri_replace_decoded[loc][k] =
> uri_replace_decoded[loc][k].replace(l, replacements[l])
> result_decoded[loc][k] = uri_replace_decoded[loc][k]
> +        elif (loc == 3 or loc == 4) and uri_replace_decoded[loc]:
> +            # User/password in the replacement is just a straight
> replacement
> +            result_decoded[loc] = uri_replace_decoded[loc]
>          elif (re.match(regexp, uri_decoded[loc])):
>              if not uri_replace_decoded[loc]:
>                  result_decoded[loc] = ""
> diff --git a/bitbake/lib/bb/fetch2/perforce.py
> b/bitbake/lib/bb/fetch2/perforce.py index e2a41a4a1287..3b6fa4b1ec9a
> 100644 --- a/bitbake/lib/bb/fetch2/perforce.py
> +++ b/bitbake/lib/bb/fetch2/perforce.py
> @@ -134,7 +134,7 @@ class Perforce(FetchMethod):
>  
>          ud.setup_revisions(d)
>  
> -        ud.localfile = d.expand('%s_%s_%s_%s.tar.gz' % (cleanedhost,
> cleanedpath, cleandedmodule, ud.revision))
> +        ud.localfile = d.expand('%s_%s_%s_%s.tar.gz' % (cleanedhost,
> cleanedpath, cleanedmodule, ud.revision)) 
>      def _buildp4command(self, ud, d, command, depot_filename=None):
>          """
> diff --git a/bitbake/lib/bb/fetch2/wget.py
> b/bitbake/lib/bb/fetch2/wget.py index 784df70c9f62..7fa2a87ffde5
> 100644 --- a/bitbake/lib/bb/fetch2/wget.py
> +++ b/bitbake/lib/bb/fetch2/wget.py
> @@ -322,7 +322,7 @@ class Wget(FetchMethod):
>              except (TypeError, ImportError, IOError,
> netrc.NetrcParseError): pass
>  
> -            with opener.open(r) as response:
> +            with opener.open(r, timeout=30) as response:
>                  pass
>          except urllib.error.URLError as e:
>              if try_again:
> diff --git a/bitbake/lib/bb/persist_data.py
> b/bitbake/lib/bb/persist_data.py index c6a209fb3fc1..6f32d81afe80
> 100644 --- a/bitbake/lib/bb/persist_data.py
> +++ b/bitbake/lib/bb/persist_data.py
> @@ -12,6 +12,7 @@ currently, providing a key/value store accessed by
> 'domain'. #
>  
>  import collections
> +import collections.abc
>  import contextlib
>  import functools
>  import logging
> @@ -19,7 +20,7 @@ import os.path
>  import sqlite3
>  import sys
>  import warnings
> -from collections import Mapping
> +from collections.abc import Mapping
>  
>  sqlversion = sqlite3.sqlite_version_info
>  if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
> @@ -29,7 +30,7 @@ if sqlversion[0] < 3 or (sqlversion[0] == 3 and
> sqlversion[1] < 3): logger = logging.getLogger("BitBake.PersistData")
>  
>  @functools.total_ordering
> -class SQLTable(collections.MutableMapping):
> +class SQLTable(collections.abc.MutableMapping):
>      class _Decorators(object):
>          @staticmethod
>          def retry(*, reconnect=True):
> diff --git a/bitbake/lib/bb/process.py b/bitbake/lib/bb/process.py
> index d5a1775fcec0..af5d804a1d59 100644
> --- a/bitbake/lib/bb/process.py
> +++ b/bitbake/lib/bb/process.py
> @@ -60,7 +60,7 @@ class Popen(subprocess.Popen):
>          "close_fds": True,
>          "preexec_fn": subprocess_setup,
>          "stdout": subprocess.PIPE,
> -        "stderr": subprocess.STDOUT,
> +        "stderr": subprocess.PIPE,
>          "stdin": subprocess.PIPE,
>          "shell": False,
>      }
> diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py
> index 10511a09dc1c..cd10da8b3a6f 100644
> --- a/bitbake/lib/bb/runqueue.py
> +++ b/bitbake/lib/bb/runqueue.py
> @@ -926,38 +926,36 @@ class RunQueueData:
>          #
>          # Once all active tasks are marked, prune the ones we don't
> need. 
> -        delcount = {}
> -        for tid in list(self.runtaskentries.keys()):
> -            if tid not in runq_build:
> -                delcount[tid] = self.runtaskentries[tid]
> -                del self.runtaskentries[tid]
> -
>          # Handle --runall
>          if self.cooker.configuration.runall:
>              # re-run the mark_active and then drop unused tasks from
> new list
> +            reduced_tasklist = set(self.runtaskentries.keys())
> +            for tid in list(self.runtaskentries.keys()):
> +                if tid not in runq_build:
> +                   reduced_tasklist.remove(tid)
>              runq_build = {}
>  
>              for task in self.cooker.configuration.runall:
>                  if not task.startswith("do_"):
>                      task = "do_{0}".format(task)
>                  runall_tids = set()
> -                for tid in list(self.runtaskentries):
> +                for tid in reduced_tasklist:
>                      wanttid = "{0}:{1}".format(fn_from_tid(tid),
> task)
> -                    if wanttid in delcount:
> -                        self.runtaskentries[wanttid] =
> delcount[wanttid] if wanttid in self.runtaskentries:
>                          runall_tids.add(wanttid)
>  
>                  for tid in list(runall_tids):
> -                    mark_active(tid,1)
> +                    mark_active(tid, 1)
>                      if self.cooker.configuration.force:
>                          invalidate_task(tid, False)
>  
> -            for tid in list(self.runtaskentries.keys()):
> -                if tid not in runq_build:
> -                    delcount[tid] = self.runtaskentries[tid]
> -                    del self.runtaskentries[tid]
> +        delcount = set()
> +        for tid in list(self.runtaskentries.keys()):
> +            if tid not in runq_build:
> +                delcount.add(tid)
> +                del self.runtaskentries[tid]
>  
> +        if self.cooker.configuration.runall:
>              if len(self.runtaskentries) == 0:
>                  bb.msg.fatal("RunQueue", "Could not find any tasks
> with the tasknames %s to run within the recipes of the taskgraphs of
> the targets %s" % (str(self.cooker.configuration.runall),
> str(self.targets))) @@ -971,16 +969,16 @@ class RunQueueData: for
> task in self.cooker.configuration.runonly: if not
> task.startswith("do_"): task = "do_{0}".format(task)
> -                runonly_tids = { k: v for k, v in
> self.runtaskentries.items() if taskname_from_tid(k) == task }
> +                runonly_tids = [k for k in
> self.runtaskentries.keys() if taskname_from_tid(k) == task] 
> -                for tid in list(runonly_tids):
> -                    mark_active(tid,1)
> +                for tid in runonly_tids:
> +                    mark_active(tid, 1)
>                      if self.cooker.configuration.force:
>                          invalidate_task(tid, False)
>  
>              for tid in list(self.runtaskentries.keys()):
>                  if tid not in runq_build:
> -                    delcount[tid] = self.runtaskentries[tid]
> +                    delcount.add(tid)
>                      del self.runtaskentries[tid]
>  
>              if len(self.runtaskentries) == 0:
> diff --git a/bitbake/lib/bb/server/process.py
> b/bitbake/lib/bb/server/process.py index 07bb785a1822..fcdce19717d2
> 100644 --- a/bitbake/lib/bb/server/process.py
> +++ b/bitbake/lib/bb/server/process.py
> @@ -659,7 +659,7 @@ class BBUIEventQueue:
>          self.reader = ConnectionReader(readfd)
>  
>          self.t = threading.Thread()
> -        self.t.setDaemon(True)
> +        self.t.daemon = True
>          self.t.run = self.startCallbackHandler
>          self.t.start()
>  
> diff --git a/bitbake/lib/bb/tests/fetch.py
> b/bitbake/lib/bb/tests/fetch.py index f5d557e8c000..3b64584da0e9
> 100644 --- a/bitbake/lib/bb/tests/fetch.py
> +++ b/bitbake/lib/bb/tests/fetch.py
> @@ -431,6 +431,10 @@ class MirrorUriTest(FetcherTest):
>          ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master",
> "git://someserver.org/bitbake;branch=master",
> "git://git.openembedded.org/bitbake;protocol=http") :
> "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master;protocol=http", 
> +
> ("git://user1@someserver.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master",
> "git://someserver.org/bitbake;branch=master",
> "git://user2@git.openembedded.org/bitbake;protocol=http")
> +            :
> "git://user2@git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
> + +
>          #Renaming files doesn't work
>          #("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz",
> "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz",
> "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz") :
> "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz"
> #("file://sstate-xyz.tgz", "file://.*/.*",
> "file:///somewhere/1234/sstate-cache") :
> "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz", @@ -491,7
> +495,7 @@ class GitDownloadDirectoryNamingTest(FetcherTest):
> super(GitDownloadDirectoryNamingTest, self).setUp() self.recipe_url =
> "git://git.openembedded.org/bitbake" self.recipe_dir =
> "git.openembedded.org.bitbake"
> -        self.mirror_url = "git://github.com/openembedded/bitbake.git"
> +        self.mirror_url =
> "git://github.com/openembedded/bitbake.git;protocol=https"
> self.mirror_dir = "github.com.openembedded.bitbake.git" 
>          self.d.setVar('SRCREV',
> '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40') @@ -539,7 +543,7 @@ class
> TarballNamingTest(FetcherTest): super(TarballNamingTest, self).setUp()
>          self.recipe_url = "git://git.openembedded.org/bitbake"
>          self.recipe_tarball =
> "git2_git.openembedded.org.bitbake.tar.gz"
> -        self.mirror_url = "git://github.com/openembedded/bitbake.git"
> +        self.mirror_url =
> "git://github.com/openembedded/bitbake.git;protocol=https"
> self.mirror_tarball =
> "git2_github.com.openembedded.bitbake.git.tar.gz"
> self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '1') @@ -573,7 +577,7 @@
> class GitShallowTarballNamingTest(FetcherTest):
> super(GitShallowTarballNamingTest, self).setUp() self.recipe_url =
> "git://git.openembedded.org/bitbake" self.recipe_tarball =
> "gitshallow_git.openembedded.org.bitbake_82ea737-1_master.tar.gz"
> -        self.mirror_url = "git://github.com/openembedded/bitbake.git"
> +        self.mirror_url =
> "git://github.com/openembedded/bitbake.git;protocol=https"
> self.mirror_tarball =
> "gitshallow_github.com.openembedded.bitbake.git_82ea737-1_master.tar.gz"
> self.d.setVar('BB_GIT_SHALLOW', '1') @@ -985,7 +989,7 @@ class
> FetcherNetworkTest(FetcherTest): def
> test_git_submodule_dbus_broker(self): # The following external
> repositories have show failures in fetch and unpack operations # We
> want to avoid regressions!
> -        url =
> "gitsm://github.com/bus1/dbus-broker;protocol=git;rev=fc874afa0992d0c75ec25acb43d344679f0ee7d2;branch=main"
> +        url =
> "gitsm://github.com/bus1/dbus-broker;protocol=https;rev=fc874afa0992d0c75ec25acb43d344679f0ee7d2;branch=main"
> fetcher = bb.fetch.Fetch([url], self.d) fetcher.download()
>          # Previous cwd has been deleted
> @@ -1001,7 +1005,7 @@ class FetcherNetworkTest(FetcherTest):
>  
>      @skipIfNoNetwork()
>      def test_git_submodule_CLI11(self):
> -        url =
> "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=bd4dc911847d0cde7a6b41dfa626a85aab213baf;branch=main"
> +        url =
> "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=bd4dc911847d0cde7a6b41dfa626a85aab213baf;branch=main"
> fetcher = bb.fetch.Fetch([url], self.d) fetcher.download()
>          # Previous cwd has been deleted
> @@ -1016,12 +1020,12 @@ class FetcherNetworkTest(FetcherTest):
>      @skipIfNoNetwork()
>      def test_git_submodule_update_CLI11(self):
>          """ Prevent regression on update detection not finding
> missing submodule, or modules without needed commits """
> -        url =
> "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714;branch=main"
> +        url =
> "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714;branch=main"
> fetcher = bb.fetch.Fetch([url], self.d) fetcher.download()
>  
>          # CLI11 that pulls in a newer nlohmann-json
> -        url =
> "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca;branch=main"
> +        url =
> "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca;branch=main"
> fetcher = bb.fetch.Fetch([url], self.d) fetcher.download()
>          # Previous cwd has been deleted
> @@ -1035,7 +1039,7 @@ class FetcherNetworkTest(FetcherTest):
>  
>      @skipIfNoNetwork()
>      def test_git_submodule_aktualizr(self):
> -        url =
> "gitsm://github.com/advancedtelematic/aktualizr;branch=master;protocol=git;rev=d00d1a04cc2366d1a5f143b84b9f507f8bd32c44"
> +        url =
> "gitsm://github.com/advancedtelematic/aktualizr;branch=master;protocol=https;rev=d00d1a04cc2366d1a5f143b84b9f507f8bd32c44"
> fetcher = bb.fetch.Fetch([url], self.d) fetcher.download()
>          # Previous cwd has been deleted
> @@ -1055,7 +1059,7 @@ class FetcherNetworkTest(FetcherTest):
>          """ Prevent regression on deeply nested submodules not being
> checked out properly, even though they were fetched. """ 
>          # This repository also has submodules where the module
> (name), path and url do not align
> -        url =
> "gitsm://github.com/azure/iotedge.git;protocol=git;rev=d76e0316c6f324345d77c48a83ce836d09392699"
> +        url =
> "gitsm://github.com/azure/iotedge.git;protocol=https;rev=d76e0316c6f324345d77c48a83ce836d09392699"
> fetcher = bb.fetch.Fetch([url], self.d) fetcher.download()
>          # Previous cwd has been deleted
> @@ -1113,7 +1117,7 @@ class SVNTest(FetcherTest):
>  
>          bb.process.run("svn co %s svnfetch_co" % self.repo_url,
> cwd=self.tempdir) # Github will emulate SVN.  Use this to check if
> we're downloding...
> -        bb.process.run("svn propset svn:externals 'bitbake
> svn://vcs.pcre.org/pcre2/code' .",
> +        bb.process.run("svn propset svn:externals 'bitbake
> https://github.com/PhilipHazel/pcre2.git' .",
> cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk'))
> bb.process.run("svn commit --non-interactive -m 'Add external'",
> cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk')) @@ -1231,7
> +1235,7 @@ class FetchLatestVersionTest(FetcherTest): 
>      test_git_uris = {
>          # version pattern "X.Y.Z"
> -        ("mx-1.0",
> "git://github.com/clutter-project/mx.git;branch=mx-1.4",
> "9b1db6b8060bd00b121a692f942404a24ae2960f", "")
> +        ("mx-1.0",
> "git://github.com/clutter-project/mx.git;branch=mx-1.4;protocol=https",
> "9b1db6b8060bd00b121a692f942404a24ae2960f", "") : "1.99.4", # version
> pattern "vX.Y" # mirror of git.infradead.org since network issues
> interfered with testing @@ -1258,9 +1262,9 @@ class
> FetchLatestVersionTest(FetcherTest): : "0.4.3",
>          ("build-appliance-image", "git://git.yoctoproject.org/poky",
> "b37dd451a52622d5b570183a81583cc34c2ff555",
> "(?P<pver>(([0-9][\.|_]?)+[0-9]))") : "11.0.0",
> -        ("chkconfig-alternatives-native",
> "git://github.com/kergoth/chkconfig;branch=sysroot",
> "cd437ecbd8986c894442f8fce1e0061e20f04dee",
> "chkconfig\-(?P<pver>((\d+[\.\-_]*)+))")
> +        ("chkconfig-alternatives-native",
> "git://github.com/kergoth/chkconfig;branch=sysroot;protocol=https",
> "cd437ecbd8986c894442f8fce1e0061e20f04dee",
> "chkconfig\-(?P<pver>((\d+[\.\-_]*)+))") : "1.3.59",
> -        ("remake", "git://github.com/rocky/remake.git",
> "f05508e521987c8494c92d9c2871aec46307d51d",
> "(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))")
> +        ("remake",
> "git://github.com/rocky/remake.git;protocol=https",
> "f05508e521987c8494c92d9c2871aec46307d51d",
> "(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))") : "3.82+dbg0.9", } 
> @@ -1354,9 +1358,6 @@ class FetchCheckStatusTest(FetcherTest):
>                        "http://downloads.yoctoproject.org/releases/opkg/opkg-0.1.7.tar.gz",
>                        "http://downloads.yoctoproject.org/releases/opkg/opkg-0.3.0.tar.gz",
>                        "ftp://sourceware.org/pub/libffi/libffi-1.20.tar.gz",
> -
> "http://ftp.gnu.org/gnu/autoconf/autoconf-2.60.tar.gz",
> -
> "https://ftp.gnu.org/gnu/chess/gnuchess-5.08.tar.gz",
> -                      "https://ftp.gnu.org/gnu/gmp/gmp-4.0.tar.gz",
>                        # GitHub releases are hosted on Amazon S3,
> which doesn't support HEAD
> "https://github.com/kergoth/tslib/releases/download/1.1/tslib-1.1.tar.xz"
> ] @@ -2047,7 +2048,7 @@ class GitShallowTest(FetcherTest):
>  
>      @skipIfNoNetwork()
>      def test_bitbake(self):
> -        self.git('remote add --mirror=fetch origin
> git://github.com/openembedded/bitbake', cwd=self.srcdir)
> +        self.git('remote add --mirror=fetch origin
> https://github.com/openembedded/bitbake', cwd=self.srcdir)
> self.git('config core.bare true', cwd=self.srcdir) self.git('fetch',
> cwd=self.srcdir) 
> diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py
> index b282d09abfce..2a150fe9c73e 100644
> --- a/bitbake/lib/bb/utils.py
> +++ b/bitbake/lib/bb/utils.py
> @@ -16,7 +16,8 @@ import bb.msg
>  import multiprocessing
>  import fcntl
>  import importlib
> -from importlib import machinery
> +import importlib.machinery
> +import importlib.util
>  import itertools
>  import subprocess
>  import glob
> @@ -451,6 +452,10 @@ def lockfile(name, shared=False, retry=True,
> block=False): consider the possibility of sending a signal to the
> process to break out - at which point you want block=True rather than
> retry=True. """
> +    if len(name) > 255:
> +        root, ext = os.path.splitext(name)
> +        name = root[:255 - len(ext)] + ext
> +
>      dirname = os.path.dirname(name)
>      mkdirhier(dirname)
>  
> @@ -487,7 +492,7 @@ def lockfile(name, shared=False, retry=True,
> block=False): return lf
>              lf.close()
>          except OSError as e:
> -            if e.errno == errno.EACCES:
> +            if e.errno == errno.EACCES or e.errno ==
> errno.ENAMETOOLONG: logger.error("Unable to acquire lock '%s', %s",
>                               e.strerror, name)
>                  sys.exit(1)
> @@ -1616,7 +1621,9 @@ def load_plugins(logger, plugins, pluginpath):
>          logger.debug('Loading plugin %s' % name)
>          spec = importlib.machinery.PathFinder.find_spec(name,
> path=[pluginpath] ) if spec:
> -            return spec.loader.load_module()
> +            mod = importlib.util.module_from_spec(spec)
> +            spec.loader.exec_module(mod)
> +            return mod
>  
>      logger.debug('Loading plugins from %s...' % pluginpath)
>  
> diff --git a/bitbake/lib/hashserv/server.py
> b/bitbake/lib/hashserv/server.py index a0dc0c170f2b..df0fa0a07937
> 100644 --- a/bitbake/lib/hashserv/server.py
> +++ b/bitbake/lib/hashserv/server.py
> @@ -521,7 +521,7 @@ class Server(object):
>  
>      def start_tcp_server(self, host, port):
>          self.server = self.loop.run_until_complete(
> -            asyncio.start_server(self.handle_client, host, port,
> loop=self.loop)
> +            asyncio.start_server(self.handle_client, host, port)
>          )
>  
>          for s in self.server.sockets:
> @@ -546,7 +546,7 @@ class Server(object):
>              # Work around path length limits in AF_UNIX
>              os.chdir(os.path.dirname(path))
>              self.server = self.loop.run_until_complete(
> -                asyncio.start_unix_server(self.handle_client,
> os.path.basename(path), loop=self.loop)
> +                asyncio.start_unix_server(self.handle_client,
> os.path.basename(path)) )
>          finally:
>              os.chdir(cwd)
> diff --git a/bitbake/lib/toaster/tests/builds/buildtest.py
> b/bitbake/lib/toaster/tests/builds/buildtest.py index
> 872bbd377510..13b51fb0d8e4 100644 ---
> a/bitbake/lib/toaster/tests/builds/buildtest.py +++
> b/bitbake/lib/toaster/tests/builds/buildtest.py @@ -119,7 +119,7 @@
> class BuildTest(unittest.TestCase): if
> os.environ.get("TOASTER_TEST_USE_SSTATE_MIRROR"):
> ProjectVariable.objects.get_or_create( name="SSTATE_MIRRORS",
> -                value="file://.*
> http://autobuilder.yoctoproject.org/pub/sstate/PATH;downloadfilename=PATH",
> +                value="file://.*
> http://sstate.yoctoproject.org/PATH;downloadfilename=PATH",
> project=project) 
>          ProjectTarget.objects.create(project=project,
Henning Schild April 20, 2022, 7:52 a.m. UTC | #2
Am Wed, 20 Apr 2022 17:00:59 +0200
schrieb Henning Schild <henning.schild@siemens.com>:

> While that kind of works, the next trouble with python 3.10 will be in
> wic.

I am already looking into the wic version bump, it seems to be rather
easy. There was some merging around our forked plugins with a new
feature "create-unified-kernel-image" which might not work. But that
might even be left like it is until someone uses that.

I will follow up with patches. If anyone wants early access and join
the testing ...

https://github.com/henning-schild-work/isar/tree/henning/staging3

Henning

> I propose to merge my 3.9 patch quickly and deal with the wic and
> bitbake bump later. bookworm is important and should not be allowed to
> fail in CI.
> 
> Henning
> 
> Am Wed, 20 Apr 2022 16:35:40 +0200
> schrieb Henning Schild <henning.schild@siemens.com>:
> 
> > Update bitbake to the latest release in 1.50 branch. This release is
> > tagged to the commit ID aaa7f7af23d5f89fe4a5ed48c57ea3dfca07c79d
> > in the bitbake upstream.
> > 
> > Signed-off-by: Henning Schild <henning.schild@siemens.com>
> > ---
> >  bitbake/lib/bb/cache.py                       |  3 +-
> >  bitbake/lib/bb/cooker.py                      | 30 ++++++++++++++--
> >  bitbake/lib/bb/data_smart.py                  |  4 +--
> >  bitbake/lib/bb/fetch2/__init__.py             |  4 +++
> >  bitbake/lib/bb/fetch2/perforce.py             |  2 +-
> >  bitbake/lib/bb/fetch2/wget.py                 |  2 +-
> >  bitbake/lib/bb/persist_data.py                |  5 +--
> >  bitbake/lib/bb/process.py                     |  2 +-
> >  bitbake/lib/bb/runqueue.py                    | 34
> > +++++++++--------- bitbake/lib/bb/server/process.py              |
> > 2 +- bitbake/lib/bb/tests/fetch.py                 | 35
> > ++++++++++--------- bitbake/lib/bb/utils.py                       |
> > 13 +++++-- bitbake/lib/hashserv/server.py                |  4 +--
> >  bitbake/lib/toaster/tests/builds/buildtest.py |  2 +-
> >  14 files changed, 89 insertions(+), 53 deletions(-)
> > 
> > diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py
> > index 27eb271798e8..5f9c0a779d75 100644
> > --- a/bitbake/lib/bb/cache.py
> > +++ b/bitbake/lib/bb/cache.py
> > @@ -19,7 +19,8 @@
> >  import os
> >  import logging
> >  import pickle
> > -from collections import defaultdict, Mapping
> > +from collections import defaultdict
> > +from collections.abc import Mapping
> >  import bb.utils
> >  from bb import PrefixLoggerAdapter
> >  import re
> > diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
> > index 89f1fad08310..c946800a8c62 100644
> > --- a/bitbake/lib/bb/cooker.py
> > +++ b/bitbake/lib/bb/cooker.py
> > @@ -388,12 +388,22 @@ class BBCooker:
> >              # Create a new hash server bound to a unix domain
> > socket if not self.hashserv:
> >                  dbfile = (self.data.getVar("PERSISTENT_DIR") or
> > self.data.getVar("CACHE")) + "/hashserv.db"
> > +                upstream =
> > self.data.getVar("BB_HASHSERVE_UPSTREAM") or None
> > +                if upstream:
> > +                    import socket
> > +                    try:
> > +                        sock =
> > socket.create_connection(upstream.split(":"), 5)
> > +                        sock.close()
> > +                    except socket.error as e:
> > +                        bb.warn("BB_HASHSERVE_UPSTREAM is not
> > valid, unable to connect hash equivalence server at '%s': %s" 
> > +                                 % (upstream, repr(e)))
> > +
> >                  self.hashservaddr = "unix://%s/hashserve.sock" %
> > self.data.getVar("TOPDIR") self.hashserv = hashserv.create_server(
> >                      self.hashservaddr,
> >                      dbfile,
> >                      sync=False,
> > -
> > upstream=self.data.getVar("BB_HASHSERVE_UPSTREAM") or None,
> > +                    upstream=upstream,
> >                  )
> >                  self.hashserv.process =
> > multiprocessing.Process(target=self.hashserv.serve_forever)
> > self.hashserv.process.start() @@ -805,7 +815,9 @@ class BBCooker:
> >              for dep in rq.rqdata.runtaskentries[tid].depends:
> >                  (depmc, depfn, _, deptaskfn) =
> > bb.runqueue.split_tid_mcfn(dep) deppn =
> > self.recipecaches[depmc].pkg_fn[deptaskfn]
> > -                depend_tree["tdepends"][dotname].append("%s.%s" %
> > (deppn, bb.runqueue.taskname_from_tid(dep)))
> > +                if depmc:
> > +                    depmc = "mc:" + depmc + ":"
> > +                depend_tree["tdepends"][dotname].append("%s%s.%s" %
> > (depmc, deppn, bb.runqueue.taskname_from_tid(dep))) if taskfn not in
> > seen_fns: seen_fns.append(taskfn)
> >                  packages = []
> > @@ -2204,21 +2216,33 @@ class CookerParser(object):
> >              yield not cached, mc, infos
> >  
> >      def parse_generator(self):
> > -        while True:
> > +        empty = False
> > +        while self.processes or not empty:
> > +            for process in self.processes.copy():
> > +                if not process.is_alive():
> > +                    process.join()
> > +                    self.processes.remove(process)
> > +
> >              if self.parsed >= self.toparse:
> >                  break
> >  
> >              try:
> >                  result = self.result_queue.get(timeout=0.25)
> >              except queue.Empty:
> > +                empty = True
> >                  pass
> >              else:
> > +                empty = False
> >                  value = result[1]
> >                  if isinstance(value, BaseException):
> >                      raise value
> >                  else:
> >                      yield result
> >  
> > +        if not (self.parsed >= self.toparse):
> > +            raise bb.parse.ParseError("Not all recipes parsed,
> > parser thread killed/died? Exiting.", None) +
> > +
> >      def parse_next(self):
> >          result = []
> >          parsed = None
> > diff --git a/bitbake/lib/bb/data_smart.py
> > b/bitbake/lib/bb/data_smart.py index 8291ca65e309..65857a9c7941
> > 100644 --- a/bitbake/lib/bb/data_smart.py
> > +++ b/bitbake/lib/bb/data_smart.py
> > @@ -17,7 +17,7 @@ BitBake build tools.
> >  # Based on functions from the base bb module, Copyright 2003 Holger
> > Schurig 
> >  import copy, re, sys, traceback
> > -from collections import MutableMapping
> > +from collections.abc import MutableMapping
> >  import logging
> >  import hashlib
> >  import bb, bb.codeparser
> > @@ -403,7 +403,7 @@ class DataSmart(MutableMapping):
> >                      s =
> > __expand_python_regexp__.sub(varparse.python_sub, s) except
> > SyntaxError as e: # Likely unmatched brackets, just don't expand the
> > expression
> > -                    if e.msg != "EOL while scanning string
> > literal":
> > +                    if e.msg != "EOL while scanning string literal"
> > and not e.msg.startswith("unterminated string literal"): raise
> >                  if s == olds:
> >                      break
> > diff --git a/bitbake/lib/bb/fetch2/__init__.py
> > b/bitbake/lib/bb/fetch2/__init__.py index dbf8b50e68a7..1005ec10c639
> > 100644 --- a/bitbake/lib/bb/fetch2/__init__.py
> > +++ b/bitbake/lib/bb/fetch2/__init__.py
> > @@ -430,6 +430,7 @@ def uri_replace(ud, uri_find, uri_replace,
> > replacements, d, mirrortarball=None): uri_replace_decoded =
> > list(decodeurl(uri_replace)) logger.debug2("For url %s comparing %s
> > to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
> > result_decoded = ['', '', '', '', '', {}]
> > +    # 0 - type, 1 - host, 2 - path, 3 - user,  4- pswd, 5 - params
> >      for loc, i in enumerate(uri_find_decoded):
> >          result_decoded[loc] = uri_decoded[loc]
> >          regexp = i
> > @@ -449,6 +450,9 @@ def uri_replace(ud, uri_find, uri_replace,
> > replacements, d, mirrortarball=None): for l in replacements:
> >                      uri_replace_decoded[loc][k] =
> > uri_replace_decoded[loc][k].replace(l, replacements[l])
> > result_decoded[loc][k] = uri_replace_decoded[loc][k]
> > +        elif (loc == 3 or loc == 4) and uri_replace_decoded[loc]:
> > +            # User/password in the replacement is just a straight
> > replacement
> > +            result_decoded[loc] = uri_replace_decoded[loc]
> >          elif (re.match(regexp, uri_decoded[loc])):
> >              if not uri_replace_decoded[loc]:
> >                  result_decoded[loc] = ""
> > diff --git a/bitbake/lib/bb/fetch2/perforce.py
> > b/bitbake/lib/bb/fetch2/perforce.py index e2a41a4a1287..3b6fa4b1ec9a
> > 100644 --- a/bitbake/lib/bb/fetch2/perforce.py
> > +++ b/bitbake/lib/bb/fetch2/perforce.py
> > @@ -134,7 +134,7 @@ class Perforce(FetchMethod):
> >  
> >          ud.setup_revisions(d)
> >  
> > -        ud.localfile = d.expand('%s_%s_%s_%s.tar.gz' %
> > (cleanedhost, cleanedpath, cleandedmodule, ud.revision))
> > +        ud.localfile = d.expand('%s_%s_%s_%s.tar.gz' %
> > (cleanedhost, cleanedpath, cleanedmodule, ud.revision)) 
> >      def _buildp4command(self, ud, d, command, depot_filename=None):
> >          """
> > diff --git a/bitbake/lib/bb/fetch2/wget.py
> > b/bitbake/lib/bb/fetch2/wget.py index 784df70c9f62..7fa2a87ffde5
> > 100644 --- a/bitbake/lib/bb/fetch2/wget.py
> > +++ b/bitbake/lib/bb/fetch2/wget.py
> > @@ -322,7 +322,7 @@ class Wget(FetchMethod):
> >              except (TypeError, ImportError, IOError,
> > netrc.NetrcParseError): pass
> >  
> > -            with opener.open(r) as response:
> > +            with opener.open(r, timeout=30) as response:
> >                  pass
> >          except urllib.error.URLError as e:
> >              if try_again:
> > diff --git a/bitbake/lib/bb/persist_data.py
> > b/bitbake/lib/bb/persist_data.py index c6a209fb3fc1..6f32d81afe80
> > 100644 --- a/bitbake/lib/bb/persist_data.py
> > +++ b/bitbake/lib/bb/persist_data.py
> > @@ -12,6 +12,7 @@ currently, providing a key/value store accessed by
> > 'domain'. #
> >  
> >  import collections
> > +import collections.abc
> >  import contextlib
> >  import functools
> >  import logging
> > @@ -19,7 +20,7 @@ import os.path
> >  import sqlite3
> >  import sys
> >  import warnings
> > -from collections import Mapping
> > +from collections.abc import Mapping
> >  
> >  sqlversion = sqlite3.sqlite_version_info
> >  if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
> > @@ -29,7 +30,7 @@ if sqlversion[0] < 3 or (sqlversion[0] == 3 and
> > sqlversion[1] < 3): logger =
> > logging.getLogger("BitBake.PersistData") 
> >  @functools.total_ordering
> > -class SQLTable(collections.MutableMapping):
> > +class SQLTable(collections.abc.MutableMapping):
> >      class _Decorators(object):
> >          @staticmethod
> >          def retry(*, reconnect=True):
> > diff --git a/bitbake/lib/bb/process.py b/bitbake/lib/bb/process.py
> > index d5a1775fcec0..af5d804a1d59 100644
> > --- a/bitbake/lib/bb/process.py
> > +++ b/bitbake/lib/bb/process.py
> > @@ -60,7 +60,7 @@ class Popen(subprocess.Popen):
> >          "close_fds": True,
> >          "preexec_fn": subprocess_setup,
> >          "stdout": subprocess.PIPE,
> > -        "stderr": subprocess.STDOUT,
> > +        "stderr": subprocess.PIPE,
> >          "stdin": subprocess.PIPE,
> >          "shell": False,
> >      }
> > diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py
> > index 10511a09dc1c..cd10da8b3a6f 100644
> > --- a/bitbake/lib/bb/runqueue.py
> > +++ b/bitbake/lib/bb/runqueue.py
> > @@ -926,38 +926,36 @@ class RunQueueData:
> >          #
> >          # Once all active tasks are marked, prune the ones we don't
> > need. 
> > -        delcount = {}
> > -        for tid in list(self.runtaskentries.keys()):
> > -            if tid not in runq_build:
> > -                delcount[tid] = self.runtaskentries[tid]
> > -                del self.runtaskentries[tid]
> > -
> >          # Handle --runall
> >          if self.cooker.configuration.runall:
> >              # re-run the mark_active and then drop unused tasks
> > from new list
> > +            reduced_tasklist = set(self.runtaskentries.keys())
> > +            for tid in list(self.runtaskentries.keys()):
> > +                if tid not in runq_build:
> > +                   reduced_tasklist.remove(tid)
> >              runq_build = {}
> >  
> >              for task in self.cooker.configuration.runall:
> >                  if not task.startswith("do_"):
> >                      task = "do_{0}".format(task)
> >                  runall_tids = set()
> > -                for tid in list(self.runtaskentries):
> > +                for tid in reduced_tasklist:
> >                      wanttid = "{0}:{1}".format(fn_from_tid(tid),
> > task)
> > -                    if wanttid in delcount:
> > -                        self.runtaskentries[wanttid] =
> > delcount[wanttid] if wanttid in self.runtaskentries:
> >                          runall_tids.add(wanttid)
> >  
> >                  for tid in list(runall_tids):
> > -                    mark_active(tid,1)
> > +                    mark_active(tid, 1)
> >                      if self.cooker.configuration.force:
> >                          invalidate_task(tid, False)
> >  
> > -            for tid in list(self.runtaskentries.keys()):
> > -                if tid not in runq_build:
> > -                    delcount[tid] = self.runtaskentries[tid]
> > -                    del self.runtaskentries[tid]
> > +        delcount = set()
> > +        for tid in list(self.runtaskentries.keys()):
> > +            if tid not in runq_build:
> > +                delcount.add(tid)
> > +                del self.runtaskentries[tid]
> >  
> > +        if self.cooker.configuration.runall:
> >              if len(self.runtaskentries) == 0:
> >                  bb.msg.fatal("RunQueue", "Could not find any tasks
> > with the tasknames %s to run within the recipes of the taskgraphs of
> > the targets %s" % (str(self.cooker.configuration.runall),
> > str(self.targets))) @@ -971,16 +969,16 @@ class RunQueueData: for
> > task in self.cooker.configuration.runonly: if not
> > task.startswith("do_"): task = "do_{0}".format(task)
> > -                runonly_tids = { k: v for k, v in
> > self.runtaskentries.items() if taskname_from_tid(k) == task }
> > +                runonly_tids = [k for k in
> > self.runtaskentries.keys() if taskname_from_tid(k) == task] 
> > -                for tid in list(runonly_tids):
> > -                    mark_active(tid,1)
> > +                for tid in runonly_tids:
> > +                    mark_active(tid, 1)
> >                      if self.cooker.configuration.force:
> >                          invalidate_task(tid, False)
> >  
> >              for tid in list(self.runtaskentries.keys()):
> >                  if tid not in runq_build:
> > -                    delcount[tid] = self.runtaskentries[tid]
> > +                    delcount.add(tid)
> >                      del self.runtaskentries[tid]
> >  
> >              if len(self.runtaskentries) == 0:
> > diff --git a/bitbake/lib/bb/server/process.py
> > b/bitbake/lib/bb/server/process.py index 07bb785a1822..fcdce19717d2
> > 100644 --- a/bitbake/lib/bb/server/process.py
> > +++ b/bitbake/lib/bb/server/process.py
> > @@ -659,7 +659,7 @@ class BBUIEventQueue:
> >          self.reader = ConnectionReader(readfd)
> >  
> >          self.t = threading.Thread()
> > -        self.t.setDaemon(True)
> > +        self.t.daemon = True
> >          self.t.run = self.startCallbackHandler
> >          self.t.start()
> >  
> > diff --git a/bitbake/lib/bb/tests/fetch.py
> > b/bitbake/lib/bb/tests/fetch.py index f5d557e8c000..3b64584da0e9
> > 100644 --- a/bitbake/lib/bb/tests/fetch.py
> > +++ b/bitbake/lib/bb/tests/fetch.py
> > @@ -431,6 +431,10 @@ class MirrorUriTest(FetcherTest):
> >          ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master",
> > "git://someserver.org/bitbake;branch=master",
> > "git://git.openembedded.org/bitbake;protocol=http") :
> > "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master;protocol=http", 
> > +
> > ("git://user1@someserver.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master",
> > "git://someserver.org/bitbake;branch=master",
> > "git://user2@git.openembedded.org/bitbake;protocol=http")
> > +            :
> > "git://user2@git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
> > + +
> >          #Renaming files doesn't work
> >          #("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz",
> > "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz",
> > "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz") :
> > "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz"
> > #("file://sstate-xyz.tgz", "file://.*/.*",
> > "file:///somewhere/1234/sstate-cache") :
> > "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz", @@ -491,7
> > +495,7 @@ class GitDownloadDirectoryNamingTest(FetcherTest):
> > super(GitDownloadDirectoryNamingTest, self).setUp() self.recipe_url
> > = "git://git.openembedded.org/bitbake" self.recipe_dir =
> > "git.openembedded.org.bitbake"
> > -        self.mirror_url =
> > "git://github.com/openembedded/bitbake.git"
> > +        self.mirror_url =
> > "git://github.com/openembedded/bitbake.git;protocol=https"
> > self.mirror_dir = "github.com.openembedded.bitbake.git" 
> >          self.d.setVar('SRCREV',
> > '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40') @@ -539,7 +543,7 @@
> > class TarballNamingTest(FetcherTest): super(TarballNamingTest,
> > self).setUp() self.recipe_url = "git://git.openembedded.org/bitbake"
> >          self.recipe_tarball =
> > "git2_git.openembedded.org.bitbake.tar.gz"
> > -        self.mirror_url =
> > "git://github.com/openembedded/bitbake.git"
> > +        self.mirror_url =
> > "git://github.com/openembedded/bitbake.git;protocol=https"
> > self.mirror_tarball =
> > "git2_github.com.openembedded.bitbake.git.tar.gz"
> > self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '1') @@ -573,7 +577,7
> > @@ class GitShallowTarballNamingTest(FetcherTest):
> > super(GitShallowTarballNamingTest, self).setUp() self.recipe_url =
> > "git://git.openembedded.org/bitbake" self.recipe_tarball =
> > "gitshallow_git.openembedded.org.bitbake_82ea737-1_master.tar.gz"
> > -        self.mirror_url =
> > "git://github.com/openembedded/bitbake.git"
> > +        self.mirror_url =
> > "git://github.com/openembedded/bitbake.git;protocol=https"
> > self.mirror_tarball =
> > "gitshallow_github.com.openembedded.bitbake.git_82ea737-1_master.tar.gz"
> > self.d.setVar('BB_GIT_SHALLOW', '1') @@ -985,7 +989,7 @@ class
> > FetcherNetworkTest(FetcherTest): def
> > test_git_submodule_dbus_broker(self): # The following external
> > repositories have show failures in fetch and unpack operations # We
> > want to avoid regressions!
> > -        url =
> > "gitsm://github.com/bus1/dbus-broker;protocol=git;rev=fc874afa0992d0c75ec25acb43d344679f0ee7d2;branch=main"
> > +        url =
> > "gitsm://github.com/bus1/dbus-broker;protocol=https;rev=fc874afa0992d0c75ec25acb43d344679f0ee7d2;branch=main"
> > fetcher = bb.fetch.Fetch([url], self.d) fetcher.download()
> >          # Previous cwd has been deleted
> > @@ -1001,7 +1005,7 @@ class FetcherNetworkTest(FetcherTest):
> >  
> >      @skipIfNoNetwork()
> >      def test_git_submodule_CLI11(self):
> > -        url =
> > "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=bd4dc911847d0cde7a6b41dfa626a85aab213baf;branch=main"
> > +        url =
> > "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=bd4dc911847d0cde7a6b41dfa626a85aab213baf;branch=main"
> > fetcher = bb.fetch.Fetch([url], self.d) fetcher.download()
> >          # Previous cwd has been deleted
> > @@ -1016,12 +1020,12 @@ class FetcherNetworkTest(FetcherTest):
> >      @skipIfNoNetwork()
> >      def test_git_submodule_update_CLI11(self):
> >          """ Prevent regression on update detection not finding
> > missing submodule, or modules without needed commits """
> > -        url =
> > "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714;branch=main"
> > +        url =
> > "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714;branch=main"
> > fetcher = bb.fetch.Fetch([url], self.d) fetcher.download()
> >  
> >          # CLI11 that pulls in a newer nlohmann-json
> > -        url =
> > "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca;branch=main"
> > +        url =
> > "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca;branch=main"
> > fetcher = bb.fetch.Fetch([url], self.d) fetcher.download()
> >          # Previous cwd has been deleted
> > @@ -1035,7 +1039,7 @@ class FetcherNetworkTest(FetcherTest):
> >  
> >      @skipIfNoNetwork()
> >      def test_git_submodule_aktualizr(self):
> > -        url =
> > "gitsm://github.com/advancedtelematic/aktualizr;branch=master;protocol=git;rev=d00d1a04cc2366d1a5f143b84b9f507f8bd32c44"
> > +        url =
> > "gitsm://github.com/advancedtelematic/aktualizr;branch=master;protocol=https;rev=d00d1a04cc2366d1a5f143b84b9f507f8bd32c44"
> > fetcher = bb.fetch.Fetch([url], self.d) fetcher.download()
> >          # Previous cwd has been deleted
> > @@ -1055,7 +1059,7 @@ class FetcherNetworkTest(FetcherTest):
> >          """ Prevent regression on deeply nested submodules not
> > being checked out properly, even though they were fetched. """ 
> >          # This repository also has submodules where the module
> > (name), path and url do not align
> > -        url =
> > "gitsm://github.com/azure/iotedge.git;protocol=git;rev=d76e0316c6f324345d77c48a83ce836d09392699"
> > +        url =
> > "gitsm://github.com/azure/iotedge.git;protocol=https;rev=d76e0316c6f324345d77c48a83ce836d09392699"
> > fetcher = bb.fetch.Fetch([url], self.d) fetcher.download()
> >          # Previous cwd has been deleted
> > @@ -1113,7 +1117,7 @@ class SVNTest(FetcherTest):
> >  
> >          bb.process.run("svn co %s svnfetch_co" % self.repo_url,
> > cwd=self.tempdir) # Github will emulate SVN.  Use this to check if
> > we're downloding...
> > -        bb.process.run("svn propset svn:externals 'bitbake
> > svn://vcs.pcre.org/pcre2/code' .",
> > +        bb.process.run("svn propset svn:externals 'bitbake
> > https://github.com/PhilipHazel/pcre2.git' .",
> > cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk'))
> > bb.process.run("svn commit --non-interactive -m 'Add external'",
> > cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk')) @@ -1231,7
> > +1235,7 @@ class FetchLatestVersionTest(FetcherTest): 
> >      test_git_uris = {
> >          # version pattern "X.Y.Z"
> > -        ("mx-1.0",
> > "git://github.com/clutter-project/mx.git;branch=mx-1.4",
> > "9b1db6b8060bd00b121a692f942404a24ae2960f", "")
> > +        ("mx-1.0",
> > "git://github.com/clutter-project/mx.git;branch=mx-1.4;protocol=https",
> > "9b1db6b8060bd00b121a692f942404a24ae2960f", "") : "1.99.4", #
> > version pattern "vX.Y" # mirror of git.infradead.org since network
> > issues interfered with testing @@ -1258,9 +1262,9 @@ class
> > FetchLatestVersionTest(FetcherTest): : "0.4.3",
> >          ("build-appliance-image",
> > "git://git.yoctoproject.org/poky",
> > "b37dd451a52622d5b570183a81583cc34c2ff555",
> > "(?P<pver>(([0-9][\.|_]?)+[0-9]))") : "11.0.0",
> > -        ("chkconfig-alternatives-native",
> > "git://github.com/kergoth/chkconfig;branch=sysroot",
> > "cd437ecbd8986c894442f8fce1e0061e20f04dee",
> > "chkconfig\-(?P<pver>((\d+[\.\-_]*)+))")
> > +        ("chkconfig-alternatives-native",
> > "git://github.com/kergoth/chkconfig;branch=sysroot;protocol=https",
> > "cd437ecbd8986c894442f8fce1e0061e20f04dee",
> > "chkconfig\-(?P<pver>((\d+[\.\-_]*)+))") : "1.3.59",
> > -        ("remake", "git://github.com/rocky/remake.git",
> > "f05508e521987c8494c92d9c2871aec46307d51d",
> > "(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))")
> > +        ("remake",
> > "git://github.com/rocky/remake.git;protocol=https",
> > "f05508e521987c8494c92d9c2871aec46307d51d",
> > "(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))") : "3.82+dbg0.9",
> > } @@ -1354,9 +1358,6 @@ class FetchCheckStatusTest(FetcherTest):
> >                        "http://downloads.yoctoproject.org/releases/opkg/opkg-0.1.7.tar.gz",
> >                        "http://downloads.yoctoproject.org/releases/opkg/opkg-0.3.0.tar.gz",
> >                        "ftp://sourceware.org/pub/libffi/libffi-1.20.tar.gz",
> > -
> > "http://ftp.gnu.org/gnu/autoconf/autoconf-2.60.tar.gz",
> > -
> > "https://ftp.gnu.org/gnu/chess/gnuchess-5.08.tar.gz",
> > -                      "https://ftp.gnu.org/gnu/gmp/gmp-4.0.tar.gz",
> >                        # GitHub releases are hosted on Amazon S3,
> > which doesn't support HEAD
> > "https://github.com/kergoth/tslib/releases/download/1.1/tslib-1.1.tar.xz"
> > ] @@ -2047,7 +2048,7 @@ class GitShallowTest(FetcherTest):
> >  
> >      @skipIfNoNetwork()
> >      def test_bitbake(self):
> > -        self.git('remote add --mirror=fetch origin
> > git://github.com/openembedded/bitbake', cwd=self.srcdir)
> > +        self.git('remote add --mirror=fetch origin
> > https://github.com/openembedded/bitbake', cwd=self.srcdir)
> > self.git('config core.bare true', cwd=self.srcdir) self.git('fetch',
> > cwd=self.srcdir) 
> > diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py
> > index b282d09abfce..2a150fe9c73e 100644
> > --- a/bitbake/lib/bb/utils.py
> > +++ b/bitbake/lib/bb/utils.py
> > @@ -16,7 +16,8 @@ import bb.msg
> >  import multiprocessing
> >  import fcntl
> >  import importlib
> > -from importlib import machinery
> > +import importlib.machinery
> > +import importlib.util
> >  import itertools
> >  import subprocess
> >  import glob
> > @@ -451,6 +452,10 @@ def lockfile(name, shared=False, retry=True,
> > block=False): consider the possibility of sending a signal to the
> > process to break out - at which point you want block=True rather
> > than retry=True. """
> > +    if len(name) > 255:
> > +        root, ext = os.path.splitext(name)
> > +        name = root[:255 - len(ext)] + ext
> > +
> >      dirname = os.path.dirname(name)
> >      mkdirhier(dirname)
> >  
> > @@ -487,7 +492,7 @@ def lockfile(name, shared=False, retry=True,
> > block=False): return lf
> >              lf.close()
> >          except OSError as e:
> > -            if e.errno == errno.EACCES:
> > +            if e.errno == errno.EACCES or e.errno ==
> > errno.ENAMETOOLONG: logger.error("Unable to acquire lock '%s', %s",
> >                               e.strerror, name)
> >                  sys.exit(1)
> > @@ -1616,7 +1621,9 @@ def load_plugins(logger, plugins, pluginpath):
> >          logger.debug('Loading plugin %s' % name)
> >          spec = importlib.machinery.PathFinder.find_spec(name,
> > path=[pluginpath] ) if spec:
> > -            return spec.loader.load_module()
> > +            mod = importlib.util.module_from_spec(spec)
> > +            spec.loader.exec_module(mod)
> > +            return mod
> >  
> >      logger.debug('Loading plugins from %s...' % pluginpath)
> >  
> > diff --git a/bitbake/lib/hashserv/server.py
> > b/bitbake/lib/hashserv/server.py index a0dc0c170f2b..df0fa0a07937
> > 100644 --- a/bitbake/lib/hashserv/server.py
> > +++ b/bitbake/lib/hashserv/server.py
> > @@ -521,7 +521,7 @@ class Server(object):
> >  
> >      def start_tcp_server(self, host, port):
> >          self.server = self.loop.run_until_complete(
> > -            asyncio.start_server(self.handle_client, host, port,
> > loop=self.loop)
> > +            asyncio.start_server(self.handle_client, host, port)
> >          )
> >  
> >          for s in self.server.sockets:
> > @@ -546,7 +546,7 @@ class Server(object):
> >              # Work around path length limits in AF_UNIX
> >              os.chdir(os.path.dirname(path))
> >              self.server = self.loop.run_until_complete(
> > -                asyncio.start_unix_server(self.handle_client,
> > os.path.basename(path), loop=self.loop)
> > +                asyncio.start_unix_server(self.handle_client,
> > os.path.basename(path)) )
> >          finally:
> >              os.chdir(cwd)
> > diff --git a/bitbake/lib/toaster/tests/builds/buildtest.py
> > b/bitbake/lib/toaster/tests/builds/buildtest.py index
> > 872bbd377510..13b51fb0d8e4 100644 ---
> > a/bitbake/lib/toaster/tests/builds/buildtest.py +++
> > b/bitbake/lib/toaster/tests/builds/buildtest.py @@ -119,7 +119,7 @@
> > class BuildTest(unittest.TestCase): if
> > os.environ.get("TOASTER_TEST_USE_SSTATE_MIRROR"):
> > ProjectVariable.objects.get_or_create( name="SSTATE_MIRRORS",
> > -                value="file://.*
> > http://autobuilder.yoctoproject.org/pub/sstate/PATH;downloadfilename=PATH",
> > +                value="file://.*
> > http://sstate.yoctoproject.org/PATH;downloadfilename=PATH",
> > project=project) 
> >          ProjectTarget.objects.create(project=project,  
>
Jan Kiszka April 20, 2022, 8:24 a.m. UTC | #3
On 20.04.22 17:52, Henning Schild wrote:
> Am Wed, 20 Apr 2022 17:00:59 +0200
> schrieb Henning Schild <henning.schild@siemens.com>:
> 
>> While that kind of works, the next trouble with python 3.10 will be in
>> wic.
> 
> I am already looking into the wic version bump, it seems to be rather
> easy. There was some merging around our forked plugins with a new
> feature "create-unified-kernel-image" which might not work. But that
> might even be left like it is until someone uses that.

Where we are currently consuming unified kernels, this tends to happen
in EFI Boot Guard context with the related wic plugin that already
supports this. In addition, the unified kernel stub that systemd
provides showed some limitations on ARM platforms with DTB replacement
needs. That's why we are about deploy an own one (via EBG) with own
integration process.

Long story short: I don't expect an urgent need for OE's/systemd's
unified-kernel-image.

Jan
Henning Schild April 21, 2022, 12:02 a.m. UTC | #4
Am Wed, 20 Apr 2022 18:24:42 +0200
schrieb Jan Kiszka <jan.kiszka@siemens.com>:

> On 20.04.22 17:52, Henning Schild wrote:
> > Am Wed, 20 Apr 2022 17:00:59 +0200
> > schrieb Henning Schild <henning.schild@siemens.com>:
> >   
> >> While that kind of works, the next trouble with python 3.10 will
> >> be in wic.  
> > 
> > I am already looking into the wic version bump, it seems to be
> > rather easy. There was some merging around our forked plugins with
> > a new feature "create-unified-kernel-image" which might not work.
> > But that might even be left like it is until someone uses that.  
> 
> Where we are currently consuming unified kernels, this tends to happen
> in EFI Boot Guard context with the related wic plugin that already
> supports this. In addition, the unified kernel stub that systemd
> provides showed some limitations on ARM platforms with DTB replacement
> needs. That's why we are about deploy an own one (via EBG) with own
> integration process.
> 
> Long story short: I don't expect an urgent need for OE's/systemd's
> unified-kernel-image.

Ok i will include the code because we inherit it, but will not test
that path, let alone enable it should it not work.

Henning

> Jan
>
Florian Bezdeka April 21, 2022, 12:40 a.m. UTC | #5
On Wed, 2022-04-20 at 17:52 +0200, Henning Schild wrote:
> Am Wed, 20 Apr 2022 17:00:59 +0200
> schrieb Henning Schild <henning.schild@siemens.com>:
> 
> > While that kind of works, the next trouble with python 3.10 will be in
> > wic.
> 
> I am already looking into the wic version bump, it seems to be rather
> easy. There was some merging around our forked plugins with a new
> feature "create-unified-kernel-image" which might not work. But that
> might even be left like it is until someone uses that.
> 
> I will follow up with patches. If anyone wants early access and join
> the testing ...
> 
> https://github.com/henning-schild-work/isar/tree/henning/staging3

I was able to test the wic + bitbake bump here. Looks good so far. At
least my (quite minimal) bookworm based image builds fine now.

> 
> Henning
> 
> > I propose to merge my 3.9 patch quickly and deal with the wic and
> > bitbake bump later. bookworm is important and should not be allowed to
> > fail in CI.
> > 
> > Henning
> > 
> > Am Wed, 20 Apr 2022 16:35:40 +0200
> > schrieb Henning Schild <henning.schild@siemens.com>:
> > 
> > > Update bitbake to the latest release in 1.50 branch. This release is
> > > tagged to the commit ID aaa7f7af23d5f89fe4a5ed48c57ea3dfca07c79d
> > > in the bitbake upstream.
> > > 
> > > Signed-off-by: Henning Schild <henning.schild@siemens.com>
> > > ---
> > >  bitbake/lib/bb/cache.py                       |  3 +-
> > >  bitbake/lib/bb/cooker.py                      | 30 ++++++++++++++--
> > >  bitbake/lib/bb/data_smart.py                  |  4 +--
> > >  bitbake/lib/bb/fetch2/__init__.py             |  4 +++
> > >  bitbake/lib/bb/fetch2/perforce.py             |  2 +-
> > >  bitbake/lib/bb/fetch2/wget.py                 |  2 +-
> > >  bitbake/lib/bb/persist_data.py                |  5 +--
> > >  bitbake/lib/bb/process.py                     |  2 +-
> > >  bitbake/lib/bb/runqueue.py                    | 34
> > > +++++++++--------- bitbake/lib/bb/server/process.py              |
> > > 2 +- bitbake/lib/bb/tests/fetch.py                 | 35
> > > ++++++++++--------- bitbake/lib/bb/utils.py                       |
> > > 13 +++++-- bitbake/lib/hashserv/server.py                |  4 +--
> > >  bitbake/lib/toaster/tests/builds/buildtest.py |  2 +-
> > >  14 files changed, 89 insertions(+), 53 deletions(-)
> > > 
> > > diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py
> > > index 27eb271798e8..5f9c0a779d75 100644
> > > --- a/bitbake/lib/bb/cache.py
> > > +++ b/bitbake/lib/bb/cache.py
> > > @@ -19,7 +19,8 @@
> > >  import os
> > >  import logging
> > >  import pickle
> > > -from collections import defaultdict, Mapping
> > > +from collections import defaultdict
> > > +from collections.abc import Mapping
> > >  import bb.utils
> > >  from bb import PrefixLoggerAdapter
> > >  import re
> > > diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
> > > index 89f1fad08310..c946800a8c62 100644
> > > --- a/bitbake/lib/bb/cooker.py
> > > +++ b/bitbake/lib/bb/cooker.py
> > > @@ -388,12 +388,22 @@ class BBCooker:
> > >              # Create a new hash server bound to a unix domain
> > > socket if not self.hashserv:
> > >                  dbfile = (self.data.getVar("PERSISTENT_DIR") or
> > > self.data.getVar("CACHE")) + "/hashserv.db"
> > > +                upstream =
> > > self.data.getVar("BB_HASHSERVE_UPSTREAM") or None
> > > +                if upstream:
> > > +                    import socket
> > > +                    try:
> > > +                        sock =
> > > socket.create_connection(upstream.split(":"), 5)
> > > +                        sock.close()
> > > +                    except socket.error as e:
> > > +                        bb.warn("BB_HASHSERVE_UPSTREAM is not
> > > valid, unable to connect hash equivalence server at '%s': %s" 
> > > +                                 % (upstream, repr(e)))
> > > +
> > >                  self.hashservaddr = "unix://%s/hashserve.sock" %
> > > self.data.getVar("TOPDIR") self.hashserv = hashserv.create_server(
> > >                      self.hashservaddr,
> > >                      dbfile,
> > >                      sync=False,
> > > -
> > > upstream=self.data.getVar("BB_HASHSERVE_UPSTREAM") or None,
> > > +                    upstream=upstream,
> > >                  )
> > >                  self.hashserv.process =
> > > multiprocessing.Process(target=self.hashserv.serve_forever)
> > > self.hashserv.process.start() @@ -805,7 +815,9 @@ class BBCooker:
> > >              for dep in rq.rqdata.runtaskentries[tid].depends:
> > >                  (depmc, depfn, _, deptaskfn) =
> > > bb.runqueue.split_tid_mcfn(dep) deppn =
> > > self.recipecaches[depmc].pkg_fn[deptaskfn]
> > > -                depend_tree["tdepends"][dotname].append("%s.%s" %
> > > (deppn, bb.runqueue.taskname_from_tid(dep)))
> > > +                if depmc:
> > > +                    depmc = "mc:" + depmc + ":"
> > > +                depend_tree["tdepends"][dotname].append("%s%s.%s" %
> > > (depmc, deppn, bb.runqueue.taskname_from_tid(dep))) if taskfn not in
> > > seen_fns: seen_fns.append(taskfn)
> > >                  packages = []
> > > @@ -2204,21 +2216,33 @@ class CookerParser(object):
> > >              yield not cached, mc, infos
> > >  
> > >      def parse_generator(self):
> > > -        while True:
> > > +        empty = False
> > > +        while self.processes or not empty:
> > > +            for process in self.processes.copy():
> > > +                if not process.is_alive():
> > > +                    process.join()
> > > +                    self.processes.remove(process)
> > > +
> > >              if self.parsed >= self.toparse:
> > >                  break
> > >  
> > >              try:
> > >                  result = self.result_queue.get(timeout=0.25)
> > >              except queue.Empty:
> > > +                empty = True
> > >                  pass
> > >              else:
> > > +                empty = False
> > >                  value = result[1]
> > >                  if isinstance(value, BaseException):
> > >                      raise value
> > >                  else:
> > >                      yield result
> > >  
> > > +        if not (self.parsed >= self.toparse):
> > > +            raise bb.parse.ParseError("Not all recipes parsed,
> > > parser thread killed/died? Exiting.", None) +
> > > +
> > >      def parse_next(self):
> > >          result = []
> > >          parsed = None
> > > diff --git a/bitbake/lib/bb/data_smart.py
> > > b/bitbake/lib/bb/data_smart.py index 8291ca65e309..65857a9c7941
> > > 100644 --- a/bitbake/lib/bb/data_smart.py
> > > +++ b/bitbake/lib/bb/data_smart.py
> > > @@ -17,7 +17,7 @@ BitBake build tools.
> > >  # Based on functions from the base bb module, Copyright 2003 Holger
> > > Schurig 
> > >  import copy, re, sys, traceback
> > > -from collections import MutableMapping
> > > +from collections.abc import MutableMapping
> > >  import logging
> > >  import hashlib
> > >  import bb, bb.codeparser
> > > @@ -403,7 +403,7 @@ class DataSmart(MutableMapping):
> > >                      s =
> > > __expand_python_regexp__.sub(varparse.python_sub, s) except
> > > SyntaxError as e: # Likely unmatched brackets, just don't expand the
> > > expression
> > > -                    if e.msg != "EOL while scanning string
> > > literal":
> > > +                    if e.msg != "EOL while scanning string literal"
> > > and not e.msg.startswith("unterminated string literal"): raise
> > >                  if s == olds:
> > >                      break
> > > diff --git a/bitbake/lib/bb/fetch2/__init__.py
> > > b/bitbake/lib/bb/fetch2/__init__.py index dbf8b50e68a7..1005ec10c639
> > > 100644 --- a/bitbake/lib/bb/fetch2/__init__.py
> > > +++ b/bitbake/lib/bb/fetch2/__init__.py
> > > @@ -430,6 +430,7 @@ def uri_replace(ud, uri_find, uri_replace,
> > > replacements, d, mirrortarball=None): uri_replace_decoded =
> > > list(decodeurl(uri_replace)) logger.debug2("For url %s comparing %s
> > > to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
> > > result_decoded = ['', '', '', '', '', {}]
> > > +    # 0 - type, 1 - host, 2 - path, 3 - user,  4- pswd, 5 - params
> > >      for loc, i in enumerate(uri_find_decoded):
> > >          result_decoded[loc] = uri_decoded[loc]
> > >          regexp = i
> > > @@ -449,6 +450,9 @@ def uri_replace(ud, uri_find, uri_replace,
> > > replacements, d, mirrortarball=None): for l in replacements:
> > >                      uri_replace_decoded[loc][k] =
> > > uri_replace_decoded[loc][k].replace(l, replacements[l])
> > > result_decoded[loc][k] = uri_replace_decoded[loc][k]
> > > +        elif (loc == 3 or loc == 4) and uri_replace_decoded[loc]:
> > > +            # User/password in the replacement is just a straight
> > > replacement
> > > +            result_decoded[loc] = uri_replace_decoded[loc]
> > >          elif (re.match(regexp, uri_decoded[loc])):
> > >              if not uri_replace_decoded[loc]:
> > >                  result_decoded[loc] = ""
> > > diff --git a/bitbake/lib/bb/fetch2/perforce.py
> > > b/bitbake/lib/bb/fetch2/perforce.py index e2a41a4a1287..3b6fa4b1ec9a
> > > 100644 --- a/bitbake/lib/bb/fetch2/perforce.py
> > > +++ b/bitbake/lib/bb/fetch2/perforce.py
> > > @@ -134,7 +134,7 @@ class Perforce(FetchMethod):
> > >  
> > >          ud.setup_revisions(d)
> > >  
> > > -        ud.localfile = d.expand('%s_%s_%s_%s.tar.gz' %
> > > (cleanedhost, cleanedpath, cleandedmodule, ud.revision))
> > > +        ud.localfile = d.expand('%s_%s_%s_%s.tar.gz' %
> > > (cleanedhost, cleanedpath, cleanedmodule, ud.revision)) 
> > >      def _buildp4command(self, ud, d, command, depot_filename=None):
> > >          """
> > > diff --git a/bitbake/lib/bb/fetch2/wget.py
> > > b/bitbake/lib/bb/fetch2/wget.py index 784df70c9f62..7fa2a87ffde5
> > > 100644 --- a/bitbake/lib/bb/fetch2/wget.py
> > > +++ b/bitbake/lib/bb/fetch2/wget.py
> > > @@ -322,7 +322,7 @@ class Wget(FetchMethod):
> > >              except (TypeError, ImportError, IOError,
> > > netrc.NetrcParseError): pass
> > >  
> > > -            with opener.open(r) as response:
> > > +            with opener.open(r, timeout=30) as response:
> > >                  pass
> > >          except urllib.error.URLError as e:
> > >              if try_again:
> > > diff --git a/bitbake/lib/bb/persist_data.py
> > > b/bitbake/lib/bb/persist_data.py index c6a209fb3fc1..6f32d81afe80
> > > 100644 --- a/bitbake/lib/bb/persist_data.py
> > > +++ b/bitbake/lib/bb/persist_data.py
> > > @@ -12,6 +12,7 @@ currently, providing a key/value store accessed by
> > > 'domain'. #
> > >  
> > >  import collections
> > > +import collections.abc
> > >  import contextlib
> > >  import functools
> > >  import logging
> > > @@ -19,7 +20,7 @@ import os.path
> > >  import sqlite3
> > >  import sys
> > >  import warnings
> > > -from collections import Mapping
> > > +from collections.abc import Mapping
> > >  
> > >  sqlversion = sqlite3.sqlite_version_info
> > >  if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
> > > @@ -29,7 +30,7 @@ if sqlversion[0] < 3 or (sqlversion[0] == 3 and
> > > sqlversion[1] < 3): logger =
> > > logging.getLogger("BitBake.PersistData") 
> > >  @functools.total_ordering
> > > -class SQLTable(collections.MutableMapping):
> > > +class SQLTable(collections.abc.MutableMapping):
> > >      class _Decorators(object):
> > >          @staticmethod
> > >          def retry(*, reconnect=True):
> > > diff --git a/bitbake/lib/bb/process.py b/bitbake/lib/bb/process.py
> > > index d5a1775fcec0..af5d804a1d59 100644
> > > --- a/bitbake/lib/bb/process.py
> > > +++ b/bitbake/lib/bb/process.py
> > > @@ -60,7 +60,7 @@ class Popen(subprocess.Popen):
> > >          "close_fds": True,
> > >          "preexec_fn": subprocess_setup,
> > >          "stdout": subprocess.PIPE,
> > > -        "stderr": subprocess.STDOUT,
> > > +        "stderr": subprocess.PIPE,
> > >          "stdin": subprocess.PIPE,
> > >          "shell": False,
> > >      }
> > > diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py
> > > index 10511a09dc1c..cd10da8b3a6f 100644
> > > --- a/bitbake/lib/bb/runqueue.py
> > > +++ b/bitbake/lib/bb/runqueue.py
> > > @@ -926,38 +926,36 @@ class RunQueueData:
> > >          #
> > >          # Once all active tasks are marked, prune the ones we don't
> > > need. 
> > > -        delcount = {}
> > > -        for tid in list(self.runtaskentries.keys()):
> > > -            if tid not in runq_build:
> > > -                delcount[tid] = self.runtaskentries[tid]
> > > -                del self.runtaskentries[tid]
> > > -
> > >          # Handle --runall
> > >          if self.cooker.configuration.runall:
> > >              # re-run the mark_active and then drop unused tasks
> > > from new list
> > > +            reduced_tasklist = set(self.runtaskentries.keys())
> > > +            for tid in list(self.runtaskentries.keys()):
> > > +                if tid not in runq_build:
> > > +                   reduced_tasklist.remove(tid)
> > >              runq_build = {}
> > >  
> > >              for task in self.cooker.configuration.runall:
> > >                  if not task.startswith("do_"):
> > >                      task = "do_{0}".format(task)
> > >                  runall_tids = set()
> > > -                for tid in list(self.runtaskentries):
> > > +                for tid in reduced_tasklist:
> > >                      wanttid = "{0}:{1}".format(fn_from_tid(tid),
> > > task)
> > > -                    if wanttid in delcount:
> > > -                        self.runtaskentries[wanttid] =
> > > delcount[wanttid] if wanttid in self.runtaskentries:
> > >                          runall_tids.add(wanttid)
> > >  
> > >                  for tid in list(runall_tids):
> > > -                    mark_active(tid,1)
> > > +                    mark_active(tid, 1)
> > >                      if self.cooker.configuration.force:
> > >                          invalidate_task(tid, False)
> > >  
> > > -            for tid in list(self.runtaskentries.keys()):
> > > -                if tid not in runq_build:
> > > -                    delcount[tid] = self.runtaskentries[tid]
> > > -                    del self.runtaskentries[tid]
> > > +        delcount = set()
> > > +        for tid in list(self.runtaskentries.keys()):
> > > +            if tid not in runq_build:
> > > +                delcount.add(tid)
> > > +                del self.runtaskentries[tid]
> > >  
> > > +        if self.cooker.configuration.runall:
> > >              if len(self.runtaskentries) == 0:
> > >                  bb.msg.fatal("RunQueue", "Could not find any tasks
> > > with the tasknames %s to run within the recipes of the taskgraphs of
> > > the targets %s" % (str(self.cooker.configuration.runall),
> > > str(self.targets))) @@ -971,16 +969,16 @@ class RunQueueData: for
> > > task in self.cooker.configuration.runonly: if not
> > > task.startswith("do_"): task = "do_{0}".format(task)
> > > -                runonly_tids = { k: v for k, v in
> > > self.runtaskentries.items() if taskname_from_tid(k) == task }
> > > +                runonly_tids = [k for k in
> > > self.runtaskentries.keys() if taskname_from_tid(k) == task] 
> > > -                for tid in list(runonly_tids):
> > > -                    mark_active(tid,1)
> > > +                for tid in runonly_tids:
> > > +                    mark_active(tid, 1)
> > >                      if self.cooker.configuration.force:
> > >                          invalidate_task(tid, False)
> > >  
> > >              for tid in list(self.runtaskentries.keys()):
> > >                  if tid not in runq_build:
> > > -                    delcount[tid] = self.runtaskentries[tid]
> > > +                    delcount.add(tid)
> > >                      del self.runtaskentries[tid]
> > >  
> > >              if len(self.runtaskentries) == 0:
> > > diff --git a/bitbake/lib/bb/server/process.py
> > > b/bitbake/lib/bb/server/process.py index 07bb785a1822..fcdce19717d2
> > > 100644 --- a/bitbake/lib/bb/server/process.py
> > > +++ b/bitbake/lib/bb/server/process.py
> > > @@ -659,7 +659,7 @@ class BBUIEventQueue:
> > >          self.reader = ConnectionReader(readfd)
> > >  
> > >          self.t = threading.Thread()
> > > -        self.t.setDaemon(True)
> > > +        self.t.daemon = True
> > >          self.t.run = self.startCallbackHandler
> > >          self.t.start()
> > >  
> > > diff --git a/bitbake/lib/bb/tests/fetch.py
> > > b/bitbake/lib/bb/tests/fetch.py index f5d557e8c000..3b64584da0e9
> > > 100644 --- a/bitbake/lib/bb/tests/fetch.py
> > > +++ b/bitbake/lib/bb/tests/fetch.py
> > > @@ -431,6 +431,10 @@ class MirrorUriTest(FetcherTest):
> > >          ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master",
> > > "git://someserver.org/bitbake;branch=master",
> > > "git://git.openembedded.org/bitbake;protocol=http") :
> > > "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master;protocol=http", 
> > > +
> > > ("git://user1@someserver.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master",
> > > "git://someserver.org/bitbake;branch=master",
> > > "git://user2@git.openembedded.org/bitbake;protocol=http")
> > > +            :
> > > "git://user2@git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
> > > + +
> > >          #Renaming files doesn't work
> > >          #("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz",
> > > "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz",
> > > "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz") :
> > > "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz"
> > > #("file://sstate-xyz.tgz", "file://.*/.*",
> > > "file:///somewhere/1234/sstate-cache") :
> > > "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz", @@ -491,7
> > > +495,7 @@ class GitDownloadDirectoryNamingTest(FetcherTest):
> > > super(GitDownloadDirectoryNamingTest, self).setUp() self.recipe_url
> > > = "git://git.openembedded.org/bitbake" self.recipe_dir =
> > > "git.openembedded.org.bitbake"
> > > -        self.mirror_url =
> > > "git://github.com/openembedded/bitbake.git"
> > > +        self.mirror_url =
> > > "git://github.com/openembedded/bitbake.git;protocol=https"
> > > self.mirror_dir = "github.com.openembedded.bitbake.git" 
> > >          self.d.setVar('SRCREV',
> > > '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40') @@ -539,7 +543,7 @@
> > > class TarballNamingTest(FetcherTest): super(TarballNamingTest,
> > > self).setUp() self.recipe_url = "git://git.openembedded.org/bitbake"
> > >          self.recipe_tarball =
> > > "git2_git.openembedded.org.bitbake.tar.gz"
> > > -        self.mirror_url =
> > > "git://github.com/openembedded/bitbake.git"
> > > +        self.mirror_url =
> > > "git://github.com/openembedded/bitbake.git;protocol=https"
> > > self.mirror_tarball =
> > > "git2_github.com.openembedded.bitbake.git.tar.gz"
> > > self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '1') @@ -573,7 +577,7
> > > @@ class GitShallowTarballNamingTest(FetcherTest):
> > > super(GitShallowTarballNamingTest, self).setUp() self.recipe_url =
> > > "git://git.openembedded.org/bitbake" self.recipe_tarball =
> > > "gitshallow_git.openembedded.org.bitbake_82ea737-1_master.tar.gz"
> > > -        self.mirror_url =
> > > "git://github.com/openembedded/bitbake.git"
> > > +        self.mirror_url =
> > > "git://github.com/openembedded/bitbake.git;protocol=https"
> > > self.mirror_tarball =
> > > "gitshallow_github.com.openembedded.bitbake.git_82ea737-1_master.tar.gz"
> > > self.d.setVar('BB_GIT_SHALLOW', '1') @@ -985,7 +989,7 @@ class
> > > FetcherNetworkTest(FetcherTest): def
> > > test_git_submodule_dbus_broker(self): # The following external
> > > repositories have show failures in fetch and unpack operations # We
> > > want to avoid regressions!
> > > -        url =
> > > "gitsm://github.com/bus1/dbus-broker;protocol=git;rev=fc874afa0992d0c75ec25acb43d344679f0ee7d2;branch=main"
> > > +        url =
> > > "gitsm://github.com/bus1/dbus-broker;protocol=https;rev=fc874afa0992d0c75ec25acb43d344679f0ee7d2;branch=main"
> > > fetcher = bb.fetch.Fetch([url], self.d) fetcher.download()
> > >          # Previous cwd has been deleted
> > > @@ -1001,7 +1005,7 @@ class FetcherNetworkTest(FetcherTest):
> > >  
> > >      @skipIfNoNetwork()
> > >      def test_git_submodule_CLI11(self):
> > > -        url =
> > > "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=bd4dc911847d0cde7a6b41dfa626a85aab213baf;branch=main"
> > > +        url =
> > > "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=bd4dc911847d0cde7a6b41dfa626a85aab213baf;branch=main"
> > > fetcher = bb.fetch.Fetch([url], self.d) fetcher.download()
> > >          # Previous cwd has been deleted
> > > @@ -1016,12 +1020,12 @@ class FetcherNetworkTest(FetcherTest):
> > >      @skipIfNoNetwork()
> > >      def test_git_submodule_update_CLI11(self):
> > >          """ Prevent regression on update detection not finding
> > > missing submodule, or modules without needed commits """
> > > -        url =
> > > "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714;branch=main"
> > > +        url =
> > > "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714;branch=main"
> > > fetcher = bb.fetch.Fetch([url], self.d) fetcher.download()
> > >  
> > >          # CLI11 that pulls in a newer nlohmann-json
> > > -        url =
> > > "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca;branch=main"
> > > +        url =
> > > "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca;branch=main"
> > > fetcher = bb.fetch.Fetch([url], self.d) fetcher.download()
> > >          # Previous cwd has been deleted
> > > @@ -1035,7 +1039,7 @@ class FetcherNetworkTest(FetcherTest):
> > >  
> > >      @skipIfNoNetwork()
> > >      def test_git_submodule_aktualizr(self):
> > > -        url =
> > > "gitsm://github.com/advancedtelematic/aktualizr;branch=master;protocol=git;rev=d00d1a04cc2366d1a5f143b84b9f507f8bd32c44"
> > > +        url =
> > > "gitsm://github.com/advancedtelematic/aktualizr;branch=master;protocol=https;rev=d00d1a04cc2366d1a5f143b84b9f507f8bd32c44"
> > > fetcher = bb.fetch.Fetch([url], self.d) fetcher.download()
> > >          # Previous cwd has been deleted
> > > @@ -1055,7 +1059,7 @@ class FetcherNetworkTest(FetcherTest):
> > >          """ Prevent regression on deeply nested submodules not
> > > being checked out properly, even though they were fetched. """ 
> > >          # This repository also has submodules where the module
> > > (name), path and url do not align
> > > -        url =
> > > "gitsm://github.com/azure/iotedge.git;protocol=git;rev=d76e0316c6f324345d77c48a83ce836d09392699"
> > > +        url =
> > > "gitsm://github.com/azure/iotedge.git;protocol=https;rev=d76e0316c6f324345d77c48a83ce836d09392699"
> > > fetcher = bb.fetch.Fetch([url], self.d) fetcher.download()
> > >          # Previous cwd has been deleted
> > > @@ -1113,7 +1117,7 @@ class SVNTest(FetcherTest):
> > >  
> > >          bb.process.run("svn co %s svnfetch_co" % self.repo_url,
> > > cwd=self.tempdir) # Github will emulate SVN.  Use this to check if
> > > we're downloding...
> > > -        bb.process.run("svn propset svn:externals 'bitbake
> > > svn://vcs.pcre.org/pcre2/code' .",
> > > +        bb.process.run("svn propset svn:externals 'bitbake
> > > https://github.com/PhilipHazel/pcre2.git' .",
> > > cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk'))
> > > bb.process.run("svn commit --non-interactive -m 'Add external'",
> > > cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk')) @@ -1231,7
> > > +1235,7 @@ class FetchLatestVersionTest(FetcherTest): 
> > >      test_git_uris = {
> > >          # version pattern "X.Y.Z"
> > > -        ("mx-1.0",
> > > "git://github.com/clutter-project/mx.git;branch=mx-1.4",
> > > "9b1db6b8060bd00b121a692f942404a24ae2960f", "")
> > > +        ("mx-1.0",
> > > "git://github.com/clutter-project/mx.git;branch=mx-1.4;protocol=https",
> > > "9b1db6b8060bd00b121a692f942404a24ae2960f", "") : "1.99.4", #
> > > version pattern "vX.Y" # mirror of git.infradead.org since network
> > > issues interfered with testing @@ -1258,9 +1262,9 @@ class
> > > FetchLatestVersionTest(FetcherTest): : "0.4.3",
> > >          ("build-appliance-image",
> > > "git://git.yoctoproject.org/poky",
> > > "b37dd451a52622d5b570183a81583cc34c2ff555",
> > > "(?P<pver>(([0-9][\.|_]?)+[0-9]))") : "11.0.0",
> > > -        ("chkconfig-alternatives-native",
> > > "git://github.com/kergoth/chkconfig;branch=sysroot",
> > > "cd437ecbd8986c894442f8fce1e0061e20f04dee",
> > > "chkconfig\-(?P<pver>((\d+[\.\-_]*)+))")
> > > +        ("chkconfig-alternatives-native",
> > > "git://github.com/kergoth/chkconfig;branch=sysroot;protocol=https",
> > > "cd437ecbd8986c894442f8fce1e0061e20f04dee",
> > > "chkconfig\-(?P<pver>((\d+[\.\-_]*)+))") : "1.3.59",
> > > -        ("remake", "git://github.com/rocky/remake.git",
> > > "f05508e521987c8494c92d9c2871aec46307d51d",
> > > "(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))")
> > > +        ("remake",
> > > "git://github.com/rocky/remake.git;protocol=https",
> > > "f05508e521987c8494c92d9c2871aec46307d51d",
> > > "(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))") : "3.82+dbg0.9",
> > > } @@ -1354,9 +1358,6 @@ class FetchCheckStatusTest(FetcherTest):
> > >                        "http://downloads.yoctoproject.org/releases/opkg/opkg-0.1.7.tar.gz",
> > >                        "http://downloads.yoctoproject.org/releases/opkg/opkg-0.3.0.tar.gz",
> > >                        "ftp://sourceware.org/pub/libffi/libffi-1.20.tar.gz",
> > > -
> > > "http://ftp.gnu.org/gnu/autoconf/autoconf-2.60.tar.gz",
> > > -
> > > "https://ftp.gnu.org/gnu/chess/gnuchess-5.08.tar.gz",
> > > -                      "https://ftp.gnu.org/gnu/gmp/gmp-4.0.tar.gz",
> > >                        # GitHub releases are hosted on Amazon S3,
> > > which doesn't support HEAD
> > > "https://github.com/kergoth/tslib/releases/download/1.1/tslib-1.1.tar.xz"
> > > ] @@ -2047,7 +2048,7 @@ class GitShallowTest(FetcherTest):
> > >  
> > >      @skipIfNoNetwork()
> > >      def test_bitbake(self):
> > > -        self.git('remote add --mirror=fetch origin
> > > git://github.com/openembedded/bitbake', cwd=self.srcdir)
> > > +        self.git('remote add --mirror=fetch origin
> > > https://github.com/openembedded/bitbake', cwd=self.srcdir)
> > > self.git('config core.bare true', cwd=self.srcdir) self.git('fetch',
> > > cwd=self.srcdir) 
> > > diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py
> > > index b282d09abfce..2a150fe9c73e 100644
> > > --- a/bitbake/lib/bb/utils.py
> > > +++ b/bitbake/lib/bb/utils.py
> > > @@ -16,7 +16,8 @@ import bb.msg
> > >  import multiprocessing
> > >  import fcntl
> > >  import importlib
> > > -from importlib import machinery
> > > +import importlib.machinery
> > > +import importlib.util
> > >  import itertools
> > >  import subprocess
> > >  import glob
> > > @@ -451,6 +452,10 @@ def lockfile(name, shared=False, retry=True,
> > > block=False): consider the possibility of sending a signal to the
> > > process to break out - at which point you want block=True rather
> > > than retry=True. """
> > > +    if len(name) > 255:
> > > +        root, ext = os.path.splitext(name)
> > > +        name = root[:255 - len(ext)] + ext
> > > +
> > >      dirname = os.path.dirname(name)
> > >      mkdirhier(dirname)
> > >  
> > > @@ -487,7 +492,7 @@ def lockfile(name, shared=False, retry=True,
> > > block=False): return lf
> > >              lf.close()
> > >          except OSError as e:
> > > -            if e.errno == errno.EACCES:
> > > +            if e.errno == errno.EACCES or e.errno ==
> > > errno.ENAMETOOLONG: logger.error("Unable to acquire lock '%s', %s",
> > >                               e.strerror, name)
> > >                  sys.exit(1)
> > > @@ -1616,7 +1621,9 @@ def load_plugins(logger, plugins, pluginpath):
> > >          logger.debug('Loading plugin %s' % name)
> > >          spec = importlib.machinery.PathFinder.find_spec(name,
> > > path=[pluginpath] ) if spec:
> > > -            return spec.loader.load_module()
> > > +            mod = importlib.util.module_from_spec(spec)
> > > +            spec.loader.exec_module(mod)
> > > +            return mod
> > >  
> > >      logger.debug('Loading plugins from %s...' % pluginpath)
> > >  
> > > diff --git a/bitbake/lib/hashserv/server.py
> > > b/bitbake/lib/hashserv/server.py index a0dc0c170f2b..df0fa0a07937
> > > 100644 --- a/bitbake/lib/hashserv/server.py
> > > +++ b/bitbake/lib/hashserv/server.py
> > > @@ -521,7 +521,7 @@ class Server(object):
> > >  
> > >      def start_tcp_server(self, host, port):
> > >          self.server = self.loop.run_until_complete(
> > > -            asyncio.start_server(self.handle_client, host, port,
> > > loop=self.loop)
> > > +            asyncio.start_server(self.handle_client, host, port)
> > >          )
> > >  
> > >          for s in self.server.sockets:
> > > @@ -546,7 +546,7 @@ class Server(object):
> > >              # Work around path length limits in AF_UNIX
> > >              os.chdir(os.path.dirname(path))
> > >              self.server = self.loop.run_until_complete(
> > > -                asyncio.start_unix_server(self.handle_client,
> > > os.path.basename(path), loop=self.loop)
> > > +                asyncio.start_unix_server(self.handle_client,
> > > os.path.basename(path)) )
> > >          finally:
> > >              os.chdir(cwd)
> > > diff --git a/bitbake/lib/toaster/tests/builds/buildtest.py
> > > b/bitbake/lib/toaster/tests/builds/buildtest.py index
> > > 872bbd377510..13b51fb0d8e4 100644 ---
> > > a/bitbake/lib/toaster/tests/builds/buildtest.py +++
> > > b/bitbake/lib/toaster/tests/builds/buildtest.py @@ -119,7 +119,7 @@
> > > class BuildTest(unittest.TestCase): if
> > > os.environ.get("TOASTER_TEST_USE_SSTATE_MIRROR"):
> > > ProjectVariable.objects.get_or_create( name="SSTATE_MIRRORS",
> > > -                value="file://.*
> > > http://autobuilder.yoctoproject.org/pub/sstate/PATH;downloadfilename=PATH",
> > > +                value="file://.*
> > > http://sstate.yoctoproject.org/PATH;downloadfilename=PATH",
> > > project=project) 
> > >          ProjectTarget.objects.create(project=project,  
> > 
>
Henning Schild April 21, 2022, 12:55 a.m. UTC | #6
Am Thu, 21 Apr 2022 10:40:05 +0200
schrieb "Bezdeka, Florian (T CED SES-DE)" <florian.bezdeka@siemens.com>:

> On Wed, 2022-04-20 at 17:52 +0200, Henning Schild wrote:
> > Am Wed, 20 Apr 2022 17:00:59 +0200
> > schrieb Henning Schild <henning.schild@siemens.com>:
> >   
> > > While that kind of works, the next trouble with python 3.10 will
> > > be in wic.  
> > 
> > I am already looking into the wic version bump, it seems to be
> > rather easy. There was some merging around our forked plugins with
> > a new feature "create-unified-kernel-image" which might not work.
> > But that might even be left like it is until someone uses that.
> > 
> > I will follow up with patches. If anyone wants early access and join
> > the testing ...
> > 
> > https://github.com/henning-schild-work/isar/tree/henning/staging3  
> 
> I was able to test the wic + bitbake bump here. Looks good so far. At
> least my (quite minimal) bookworm based image builds fine now.

I just sent that. My manual tests also look good, our CI liked it ...
but will KFAIL still so i would have to read the logs. ilbers CI is not
helpful but i had it running there and canceled after 17 hours, maybe
some bookworm wic images came out, did not read the logs.

Henning

> > 
> > Henning
> >   
> > > I propose to merge my 3.9 patch quickly and deal with the wic and
> > > bitbake bump later. bookworm is important and should not be
> > > allowed to fail in CI.
> > > 
> > > Henning
> > > 
> > > Am Wed, 20 Apr 2022 16:35:40 +0200
> > > schrieb Henning Schild <henning.schild@siemens.com>:
> > >   
> > > > Update bitbake to the latest release in 1.50 branch. This
> > > > release is tagged to the commit ID
> > > > aaa7f7af23d5f89fe4a5ed48c57ea3dfca07c79d in the bitbake
> > > > upstream.
> > > > 
> > > > Signed-off-by: Henning Schild <henning.schild@siemens.com>
> > > > ---
> > > >  bitbake/lib/bb/cache.py                       |  3 +-
> > > >  bitbake/lib/bb/cooker.py                      | 30
> > > > ++++++++++++++-- bitbake/lib/bb/data_smart.py
> > > > |  4 +-- bitbake/lib/bb/fetch2/__init__.py             |  4 +++
> > > >  bitbake/lib/bb/fetch2/perforce.py             |  2 +-
> > > >  bitbake/lib/bb/fetch2/wget.py                 |  2 +-
> > > >  bitbake/lib/bb/persist_data.py                |  5 +--
> > > >  bitbake/lib/bb/process.py                     |  2 +-
> > > >  bitbake/lib/bb/runqueue.py                    | 34
> > > > +++++++++--------- bitbake/lib/bb/server/process.py
> > > >  | 2 +- bitbake/lib/bb/tests/fetch.py                 | 35
> > > > ++++++++++--------- bitbake/lib/bb/utils.py
> > > >   | 13 +++++-- bitbake/lib/hashserv/server.py                |
> > > > 4 +-- bitbake/lib/toaster/tests/builds/buildtest.py |  2 +-
> > > >  14 files changed, 89 insertions(+), 53 deletions(-)
> > > > 
> > > > diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py
> > > > index 27eb271798e8..5f9c0a779d75 100644
> > > > --- a/bitbake/lib/bb/cache.py
> > > > +++ b/bitbake/lib/bb/cache.py
> > > > @@ -19,7 +19,8 @@
> > > >  import os
> > > >  import logging
> > > >  import pickle
> > > > -from collections import defaultdict, Mapping
> > > > +from collections import defaultdict
> > > > +from collections.abc import Mapping
> > > >  import bb.utils
> > > >  from bb import PrefixLoggerAdapter
> > > >  import re
> > > > diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
> > > > index 89f1fad08310..c946800a8c62 100644
> > > > --- a/bitbake/lib/bb/cooker.py
> > > > +++ b/bitbake/lib/bb/cooker.py
> > > > @@ -388,12 +388,22 @@ class BBCooker:
> > > >              # Create a new hash server bound to a unix domain
> > > > socket if not self.hashserv:
> > > >                  dbfile = (self.data.getVar("PERSISTENT_DIR") or
> > > > self.data.getVar("CACHE")) + "/hashserv.db"
> > > > +                upstream =
> > > > self.data.getVar("BB_HASHSERVE_UPSTREAM") or None
> > > > +                if upstream:
> > > > +                    import socket
> > > > +                    try:
> > > > +                        sock =
> > > > socket.create_connection(upstream.split(":"), 5)
> > > > +                        sock.close()
> > > > +                    except socket.error as e:
> > > > +                        bb.warn("BB_HASHSERVE_UPSTREAM is not
> > > > valid, unable to connect hash equivalence server at '%s': %s" 
> > > > +                                 % (upstream, repr(e)))
> > > > +
> > > >                  self.hashservaddr = "unix://%s/hashserve.sock"
> > > > % self.data.getVar("TOPDIR") self.hashserv =
> > > > hashserv.create_server( self.hashservaddr,
> > > >                      dbfile,
> > > >                      sync=False,
> > > > -
> > > > upstream=self.data.getVar("BB_HASHSERVE_UPSTREAM") or None,
> > > > +                    upstream=upstream,
> > > >                  )
> > > >                  self.hashserv.process =
> > > > multiprocessing.Process(target=self.hashserv.serve_forever)
> > > > self.hashserv.process.start() @@ -805,7 +815,9 @@ class
> > > > BBCooker: for dep in rq.rqdata.runtaskentries[tid].depends:
> > > >                  (depmc, depfn, _, deptaskfn) =
> > > > bb.runqueue.split_tid_mcfn(dep) deppn =
> > > > self.recipecaches[depmc].pkg_fn[deptaskfn]
> > > > -
> > > > depend_tree["tdepends"][dotname].append("%s.%s" % (deppn,
> > > > bb.runqueue.taskname_from_tid(dep)))
> > > > +                if depmc:
> > > > +                    depmc = "mc:" + depmc + ":"
> > > > +
> > > > depend_tree["tdepends"][dotname].append("%s%s.%s" % (depmc,
> > > > deppn, bb.runqueue.taskname_from_tid(dep))) if taskfn not in
> > > > seen_fns: seen_fns.append(taskfn) packages = []
> > > > @@ -2204,21 +2216,33 @@ class CookerParser(object):
> > > >              yield not cached, mc, infos
> > > >  
> > > >      def parse_generator(self):
> > > > -        while True:
> > > > +        empty = False
> > > > +        while self.processes or not empty:
> > > > +            for process in self.processes.copy():
> > > > +                if not process.is_alive():
> > > > +                    process.join()
> > > > +                    self.processes.remove(process)
> > > > +
> > > >              if self.parsed >= self.toparse:
> > > >                  break
> > > >  
> > > >              try:
> > > >                  result = self.result_queue.get(timeout=0.25)
> > > >              except queue.Empty:
> > > > +                empty = True
> > > >                  pass
> > > >              else:
> > > > +                empty = False
> > > >                  value = result[1]
> > > >                  if isinstance(value, BaseException):
> > > >                      raise value
> > > >                  else:
> > > >                      yield result
> > > >  
> > > > +        if not (self.parsed >= self.toparse):
> > > > +            raise bb.parse.ParseError("Not all recipes parsed,
> > > > parser thread killed/died? Exiting.", None) +
> > > > +
> > > >      def parse_next(self):
> > > >          result = []
> > > >          parsed = None
> > > > diff --git a/bitbake/lib/bb/data_smart.py
> > > > b/bitbake/lib/bb/data_smart.py index 8291ca65e309..65857a9c7941
> > > > 100644 --- a/bitbake/lib/bb/data_smart.py
> > > > +++ b/bitbake/lib/bb/data_smart.py
> > > > @@ -17,7 +17,7 @@ BitBake build tools.
> > > >  # Based on functions from the base bb module, Copyright 2003
> > > > Holger Schurig 
> > > >  import copy, re, sys, traceback
> > > > -from collections import MutableMapping
> > > > +from collections.abc import MutableMapping
> > > >  import logging
> > > >  import hashlib
> > > >  import bb, bb.codeparser
> > > > @@ -403,7 +403,7 @@ class DataSmart(MutableMapping):
> > > >                      s =
> > > > __expand_python_regexp__.sub(varparse.python_sub, s) except
> > > > SyntaxError as e: # Likely unmatched brackets, just don't
> > > > expand the expression
> > > > -                    if e.msg != "EOL while scanning string
> > > > literal":
> > > > +                    if e.msg != "EOL while scanning string
> > > > literal" and not e.msg.startswith("unterminated string
> > > > literal"): raise if s == olds:
> > > >                      break
> > > > diff --git a/bitbake/lib/bb/fetch2/__init__.py
> > > > b/bitbake/lib/bb/fetch2/__init__.py index
> > > > dbf8b50e68a7..1005ec10c639 100644 ---
> > > > a/bitbake/lib/bb/fetch2/__init__.py +++
> > > > b/bitbake/lib/bb/fetch2/__init__.py @@ -430,6 +430,7 @@ def
> > > > uri_replace(ud, uri_find, uri_replace, replacements, d,
> > > > mirrortarball=None): uri_replace_decoded =
> > > > list(decodeurl(uri_replace)) logger.debug2("For url %s
> > > > comparing %s to %s" % (uri_decoded, uri_find_decoded,
> > > > uri_replace_decoded)) result_decoded = ['', '', '', '', '', {}]
> > > > +    # 0 - type, 1 - host, 2 - path, 3 - user,  4- pswd, 5 -
> > > > params for loc, i in enumerate(uri_find_decoded):
> > > >          result_decoded[loc] = uri_decoded[loc]
> > > >          regexp = i
> > > > @@ -449,6 +450,9 @@ def uri_replace(ud, uri_find, uri_replace,
> > > > replacements, d, mirrortarball=None): for l in replacements:
> > > >                      uri_replace_decoded[loc][k] =
> > > > uri_replace_decoded[loc][k].replace(l, replacements[l])
> > > > result_decoded[loc][k] = uri_replace_decoded[loc][k]
> > > > +        elif (loc == 3 or loc == 4) and
> > > > uri_replace_decoded[loc]:
> > > > +            # User/password in the replacement is just a
> > > > straight replacement
> > > > +            result_decoded[loc] = uri_replace_decoded[loc]
> > > >          elif (re.match(regexp, uri_decoded[loc])):
> > > >              if not uri_replace_decoded[loc]:
> > > >                  result_decoded[loc] = ""
> > > > diff --git a/bitbake/lib/bb/fetch2/perforce.py
> > > > b/bitbake/lib/bb/fetch2/perforce.py index
> > > > e2a41a4a1287..3b6fa4b1ec9a 100644 ---
> > > > a/bitbake/lib/bb/fetch2/perforce.py +++
> > > > b/bitbake/lib/bb/fetch2/perforce.py @@ -134,7 +134,7 @@ class
> > > > Perforce(FetchMethod): 
> > > >          ud.setup_revisions(d)
> > > >  
> > > > -        ud.localfile = d.expand('%s_%s_%s_%s.tar.gz' %
> > > > (cleanedhost, cleanedpath, cleandedmodule, ud.revision))
> > > > +        ud.localfile = d.expand('%s_%s_%s_%s.tar.gz' %
> > > > (cleanedhost, cleanedpath, cleanedmodule, ud.revision)) 
> > > >      def _buildp4command(self, ud, d, command,
> > > > depot_filename=None): """
> > > > diff --git a/bitbake/lib/bb/fetch2/wget.py
> > > > b/bitbake/lib/bb/fetch2/wget.py index 784df70c9f62..7fa2a87ffde5
> > > > 100644 --- a/bitbake/lib/bb/fetch2/wget.py
> > > > +++ b/bitbake/lib/bb/fetch2/wget.py
> > > > @@ -322,7 +322,7 @@ class Wget(FetchMethod):
> > > >              except (TypeError, ImportError, IOError,
> > > > netrc.NetrcParseError): pass
> > > >  
> > > > -            with opener.open(r) as response:
> > > > +            with opener.open(r, timeout=30) as response:
> > > >                  pass
> > > >          except urllib.error.URLError as e:
> > > >              if try_again:
> > > > diff --git a/bitbake/lib/bb/persist_data.py
> > > > b/bitbake/lib/bb/persist_data.py index
> > > > c6a209fb3fc1..6f32d81afe80 100644 ---
> > > > a/bitbake/lib/bb/persist_data.py +++
> > > > b/bitbake/lib/bb/persist_data.py @@ -12,6 +12,7 @@ currently,
> > > > providing a key/value store accessed by 'domain'. #
> > > >  
> > > >  import collections
> > > > +import collections.abc
> > > >  import contextlib
> > > >  import functools
> > > >  import logging
> > > > @@ -19,7 +20,7 @@ import os.path
> > > >  import sqlite3
> > > >  import sys
> > > >  import warnings
> > > > -from collections import Mapping
> > > > +from collections.abc import Mapping
> > > >  
> > > >  sqlversion = sqlite3.sqlite_version_info
> > > >  if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1]
> > > > < 3): @@ -29,7 +30,7 @@ if sqlversion[0] < 3 or (sqlversion[0]
> > > > == 3 and sqlversion[1] < 3): logger =
> > > > logging.getLogger("BitBake.PersistData") 
> > > >  @functools.total_ordering
> > > > -class SQLTable(collections.MutableMapping):
> > > > +class SQLTable(collections.abc.MutableMapping):
> > > >      class _Decorators(object):
> > > >          @staticmethod
> > > >          def retry(*, reconnect=True):
> > > > diff --git a/bitbake/lib/bb/process.py
> > > > b/bitbake/lib/bb/process.py index d5a1775fcec0..af5d804a1d59
> > > > 100644 --- a/bitbake/lib/bb/process.py
> > > > +++ b/bitbake/lib/bb/process.py
> > > > @@ -60,7 +60,7 @@ class Popen(subprocess.Popen):
> > > >          "close_fds": True,
> > > >          "preexec_fn": subprocess_setup,
> > > >          "stdout": subprocess.PIPE,
> > > > -        "stderr": subprocess.STDOUT,
> > > > +        "stderr": subprocess.PIPE,
> > > >          "stdin": subprocess.PIPE,
> > > >          "shell": False,
> > > >      }
> > > > diff --git a/bitbake/lib/bb/runqueue.py
> > > > b/bitbake/lib/bb/runqueue.py index 10511a09dc1c..cd10da8b3a6f
> > > > 100644 --- a/bitbake/lib/bb/runqueue.py
> > > > +++ b/bitbake/lib/bb/runqueue.py
> > > > @@ -926,38 +926,36 @@ class RunQueueData:
> > > >          #
> > > >          # Once all active tasks are marked, prune the ones we
> > > > don't need. 
> > > > -        delcount = {}
> > > > -        for tid in list(self.runtaskentries.keys()):
> > > > -            if tid not in runq_build:
> > > > -                delcount[tid] = self.runtaskentries[tid]
> > > > -                del self.runtaskentries[tid]
> > > > -
> > > >          # Handle --runall
> > > >          if self.cooker.configuration.runall:
> > > >              # re-run the mark_active and then drop unused tasks
> > > > from new list
> > > > +            reduced_tasklist = set(self.runtaskentries.keys())
> > > > +            for tid in list(self.runtaskentries.keys()):
> > > > +                if tid not in runq_build:
> > > > +                   reduced_tasklist.remove(tid)
> > > >              runq_build = {}
> > > >  
> > > >              for task in self.cooker.configuration.runall:
> > > >                  if not task.startswith("do_"):
> > > >                      task = "do_{0}".format(task)
> > > >                  runall_tids = set()
> > > > -                for tid in list(self.runtaskentries):
> > > > +                for tid in reduced_tasklist:
> > > >                      wanttid =
> > > > "{0}:{1}".format(fn_from_tid(tid), task)
> > > > -                    if wanttid in delcount:
> > > > -                        self.runtaskentries[wanttid] =
> > > > delcount[wanttid] if wanttid in self.runtaskentries:
> > > >                          runall_tids.add(wanttid)
> > > >  
> > > >                  for tid in list(runall_tids):
> > > > -                    mark_active(tid,1)
> > > > +                    mark_active(tid, 1)
> > > >                      if self.cooker.configuration.force:
> > > >                          invalidate_task(tid, False)
> > > >  
> > > > -            for tid in list(self.runtaskentries.keys()):
> > > > -                if tid not in runq_build:
> > > > -                    delcount[tid] = self.runtaskentries[tid]
> > > > -                    del self.runtaskentries[tid]
> > > > +        delcount = set()
> > > > +        for tid in list(self.runtaskentries.keys()):
> > > > +            if tid not in runq_build:
> > > > +                delcount.add(tid)
> > > > +                del self.runtaskentries[tid]
> > > >  
> > > > +        if self.cooker.configuration.runall:
> > > >              if len(self.runtaskentries) == 0:
> > > >                  bb.msg.fatal("RunQueue", "Could not find any
> > > > tasks with the tasknames %s to run within the recipes of the
> > > > taskgraphs of the targets %s" %
> > > > (str(self.cooker.configuration.runall), str(self.targets))) @@
> > > > -971,16 +969,16 @@ class RunQueueData: for task in
> > > > self.cooker.configuration.runonly: if not
> > > > task.startswith("do_"): task = "do_{0}".format(task)
> > > > -                runonly_tids = { k: v for k, v in
> > > > self.runtaskentries.items() if taskname_from_tid(k) == task }
> > > > +                runonly_tids = [k for k in
> > > > self.runtaskentries.keys() if taskname_from_tid(k) == task] 
> > > > -                for tid in list(runonly_tids):
> > > > -                    mark_active(tid,1)
> > > > +                for tid in runonly_tids:
> > > > +                    mark_active(tid, 1)
> > > >                      if self.cooker.configuration.force:
> > > >                          invalidate_task(tid, False)
> > > >  
> > > >              for tid in list(self.runtaskentries.keys()):
> > > >                  if tid not in runq_build:
> > > > -                    delcount[tid] = self.runtaskentries[tid]
> > > > +                    delcount.add(tid)
> > > >                      del self.runtaskentries[tid]
> > > >  
> > > >              if len(self.runtaskentries) == 0:
> > > > diff --git a/bitbake/lib/bb/server/process.py
> > > > b/bitbake/lib/bb/server/process.py index
> > > > 07bb785a1822..fcdce19717d2 100644 ---
> > > > a/bitbake/lib/bb/server/process.py +++
> > > > b/bitbake/lib/bb/server/process.py @@ -659,7 +659,7 @@ class
> > > > BBUIEventQueue: self.reader = ConnectionReader(readfd)
> > > >  
> > > >          self.t = threading.Thread()
> > > > -        self.t.setDaemon(True)
> > > > +        self.t.daemon = True
> > > >          self.t.run = self.startCallbackHandler
> > > >          self.t.start()
> > > >  
> > > > diff --git a/bitbake/lib/bb/tests/fetch.py
> > > > b/bitbake/lib/bb/tests/fetch.py index f5d557e8c000..3b64584da0e9
> > > > 100644 --- a/bitbake/lib/bb/tests/fetch.py
> > > > +++ b/bitbake/lib/bb/tests/fetch.py
> > > > @@ -431,6 +431,10 @@ class MirrorUriTest(FetcherTest):
> > > >          ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master",
> > > > "git://someserver.org/bitbake;branch=master",
> > > > "git://git.openembedded.org/bitbake;protocol=http") :
> > > > "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master;protocol=http", 
> > > > +
> > > > ("git://user1@someserver.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master",
> > > > "git://someserver.org/bitbake;branch=master",
> > > > "git://user2@git.openembedded.org/bitbake;protocol=http")
> > > > +            :
> > > > "git://user2@git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
> > > > + +
> > > >          #Renaming files doesn't work
> > > >          #("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz",
> > > > "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz",
> > > > "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz") :
> > > > "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz"
> > > > #("file://sstate-xyz.tgz", "file://.*/.*",
> > > > "file:///somewhere/1234/sstate-cache") :
> > > > "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz", @@ -491,7
> > > > +495,7 @@ class GitDownloadDirectoryNamingTest(FetcherTest):
> > > > super(GitDownloadDirectoryNamingTest, self).setUp()
> > > > self.recipe_url = "git://git.openembedded.org/bitbake"
> > > > self.recipe_dir = "git.openembedded.org.bitbake"
> > > > -        self.mirror_url =
> > > > "git://github.com/openembedded/bitbake.git"
> > > > +        self.mirror_url =
> > > > "git://github.com/openembedded/bitbake.git;protocol=https"
> > > > self.mirror_dir = "github.com.openembedded.bitbake.git" 
> > > >          self.d.setVar('SRCREV',
> > > > '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40') @@ -539,7 +543,7 @@
> > > > class TarballNamingTest(FetcherTest): super(TarballNamingTest,
> > > > self).setUp() self.recipe_url =
> > > > "git://git.openembedded.org/bitbake" self.recipe_tarball =
> > > > "git2_git.openembedded.org.bitbake.tar.gz"
> > > > -        self.mirror_url =
> > > > "git://github.com/openembedded/bitbake.git"
> > > > +        self.mirror_url =
> > > > "git://github.com/openembedded/bitbake.git;protocol=https"
> > > > self.mirror_tarball =
> > > > "git2_github.com.openembedded.bitbake.git.tar.gz"
> > > > self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '1') @@ -573,7
> > > > +577,7 @@ class GitShallowTarballNamingTest(FetcherTest):
> > > > super(GitShallowTarballNamingTest, self).setUp()
> > > > self.recipe_url = "git://git.openembedded.org/bitbake"
> > > > self.recipe_tarball =
> > > > "gitshallow_git.openembedded.org.bitbake_82ea737-1_master.tar.gz"
> > > > -        self.mirror_url =
> > > > "git://github.com/openembedded/bitbake.git"
> > > > +        self.mirror_url =
> > > > "git://github.com/openembedded/bitbake.git;protocol=https"
> > > > self.mirror_tarball =
> > > > "gitshallow_github.com.openembedded.bitbake.git_82ea737-1_master.tar.gz"
> > > > self.d.setVar('BB_GIT_SHALLOW', '1') @@ -985,7 +989,7 @@ class
> > > > FetcherNetworkTest(FetcherTest): def
> > > > test_git_submodule_dbus_broker(self): # The following external
> > > > repositories have show failures in fetch and unpack operations
> > > > # We want to avoid regressions!
> > > > -        url =
> > > > "gitsm://github.com/bus1/dbus-broker;protocol=git;rev=fc874afa0992d0c75ec25acb43d344679f0ee7d2;branch=main"
> > > > +        url =
> > > > "gitsm://github.com/bus1/dbus-broker;protocol=https;rev=fc874afa0992d0c75ec25acb43d344679f0ee7d2;branch=main"
> > > > fetcher = bb.fetch.Fetch([url], self.d) fetcher.download()
> > > >          # Previous cwd has been deleted
> > > > @@ -1001,7 +1005,7 @@ class FetcherNetworkTest(FetcherTest):
> > > >  
> > > >      @skipIfNoNetwork()
> > > >      def test_git_submodule_CLI11(self):
> > > > -        url =
> > > > "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=bd4dc911847d0cde7a6b41dfa626a85aab213baf;branch=main"
> > > > +        url =
> > > > "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=bd4dc911847d0cde7a6b41dfa626a85aab213baf;branch=main"
> > > > fetcher = bb.fetch.Fetch([url], self.d) fetcher.download()
> > > >          # Previous cwd has been deleted
> > > > @@ -1016,12 +1020,12 @@ class FetcherNetworkTest(FetcherTest):
> > > >      @skipIfNoNetwork()
> > > >      def test_git_submodule_update_CLI11(self):
> > > >          """ Prevent regression on update detection not finding
> > > > missing submodule, or modules without needed commits """
> > > > -        url =
> > > > "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714;branch=main"
> > > > +        url =
> > > > "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714;branch=main"
> > > > fetcher = bb.fetch.Fetch([url], self.d) fetcher.download()
> > > >  
> > > >          # CLI11 that pulls in a newer nlohmann-json
> > > > -        url =
> > > > "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca;branch=main"
> > > > +        url =
> > > > "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca;branch=main"
> > > > fetcher = bb.fetch.Fetch([url], self.d) fetcher.download()
> > > >          # Previous cwd has been deleted
> > > > @@ -1035,7 +1039,7 @@ class FetcherNetworkTest(FetcherTest):
> > > >  
> > > >      @skipIfNoNetwork()
> > > >      def test_git_submodule_aktualizr(self):
> > > > -        url =
> > > > "gitsm://github.com/advancedtelematic/aktualizr;branch=master;protocol=git;rev=d00d1a04cc2366d1a5f143b84b9f507f8bd32c44"
> > > > +        url =
> > > > "gitsm://github.com/advancedtelematic/aktualizr;branch=master;protocol=https;rev=d00d1a04cc2366d1a5f143b84b9f507f8bd32c44"
> > > > fetcher = bb.fetch.Fetch([url], self.d) fetcher.download()
> > > >          # Previous cwd has been deleted
> > > > @@ -1055,7 +1059,7 @@ class FetcherNetworkTest(FetcherTest):
> > > >          """ Prevent regression on deeply nested submodules not
> > > > being checked out properly, even though they were fetched. """ 
> > > >          # This repository also has submodules where the module
> > > > (name), path and url do not align
> > > > -        url =
> > > > "gitsm://github.com/azure/iotedge.git;protocol=git;rev=d76e0316c6f324345d77c48a83ce836d09392699"
> > > > +        url =
> > > > "gitsm://github.com/azure/iotedge.git;protocol=https;rev=d76e0316c6f324345d77c48a83ce836d09392699"
> > > > fetcher = bb.fetch.Fetch([url], self.d) fetcher.download()
> > > >          # Previous cwd has been deleted
> > > > @@ -1113,7 +1117,7 @@ class SVNTest(FetcherTest):
> > > >  
> > > >          bb.process.run("svn co %s svnfetch_co" % self.repo_url,
> > > > cwd=self.tempdir) # Github will emulate SVN.  Use this to check
> > > > if we're downloding...
> > > > -        bb.process.run("svn propset svn:externals 'bitbake
> > > > svn://vcs.pcre.org/pcre2/code' .",
> > > > +        bb.process.run("svn propset svn:externals 'bitbake
> > > > https://github.com/PhilipHazel/pcre2.git' .",
> > > > cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk'))
> > > > bb.process.run("svn commit --non-interactive -m 'Add external'",
> > > > cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk')) @@
> > > > -1231,7 +1235,7 @@ class FetchLatestVersionTest(FetcherTest): 
> > > >      test_git_uris = {
> > > >          # version pattern "X.Y.Z"
> > > > -        ("mx-1.0",
> > > > "git://github.com/clutter-project/mx.git;branch=mx-1.4",
> > > > "9b1db6b8060bd00b121a692f942404a24ae2960f", "")
> > > > +        ("mx-1.0",
> > > > "git://github.com/clutter-project/mx.git;branch=mx-1.4;protocol=https",
> > > > "9b1db6b8060bd00b121a692f942404a24ae2960f", "") : "1.99.4", #
> > > > version pattern "vX.Y" # mirror of git.infradead.org since
> > > > network issues interfered with testing @@ -1258,9 +1262,9 @@
> > > > class FetchLatestVersionTest(FetcherTest): : "0.4.3",
> > > >          ("build-appliance-image",
> > > > "git://git.yoctoproject.org/poky",
> > > > "b37dd451a52622d5b570183a81583cc34c2ff555",
> > > > "(?P<pver>(([0-9][\.|_]?)+[0-9]))") : "11.0.0",
> > > > -        ("chkconfig-alternatives-native",
> > > > "git://github.com/kergoth/chkconfig;branch=sysroot",
> > > > "cd437ecbd8986c894442f8fce1e0061e20f04dee",
> > > > "chkconfig\-(?P<pver>((\d+[\.\-_]*)+))")
> > > > +        ("chkconfig-alternatives-native",
> > > > "git://github.com/kergoth/chkconfig;branch=sysroot;protocol=https",
> > > > "cd437ecbd8986c894442f8fce1e0061e20f04dee",
> > > > "chkconfig\-(?P<pver>((\d+[\.\-_]*)+))") : "1.3.59",
> > > > -        ("remake", "git://github.com/rocky/remake.git",
> > > > "f05508e521987c8494c92d9c2871aec46307d51d",
> > > > "(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))")
> > > > +        ("remake",
> > > > "git://github.com/rocky/remake.git;protocol=https",
> > > > "f05508e521987c8494c92d9c2871aec46307d51d",
> > > > "(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))") :
> > > > "3.82+dbg0.9", } @@ -1354,9 +1358,6 @@ class
> > > > FetchCheckStatusTest(FetcherTest):
> > > > "http://downloads.yoctoproject.org/releases/opkg/opkg-0.1.7.tar.gz",
> > > > "http://downloads.yoctoproject.org/releases/opkg/opkg-0.3.0.tar.gz",
> > > > "ftp://sourceware.org/pub/libffi/libffi-1.20.tar.gz", -
> > > > "http://ftp.gnu.org/gnu/autoconf/autoconf-2.60.tar.gz",
> > > > -
> > > > "https://ftp.gnu.org/gnu/chess/gnuchess-5.08.tar.gz",
> > > > -
> > > > "https://ftp.gnu.org/gnu/gmp/gmp-4.0.tar.gz", # GitHub releases
> > > > are hosted on Amazon S3, which doesn't support HEAD
> > > > "https://github.com/kergoth/tslib/releases/download/1.1/tslib-1.1.tar.xz"
> > > > ] @@ -2047,7 +2048,7 @@ class GitShallowTest(FetcherTest):
> > > >  
> > > >      @skipIfNoNetwork()
> > > >      def test_bitbake(self):
> > > > -        self.git('remote add --mirror=fetch origin
> > > > git://github.com/openembedded/bitbake', cwd=self.srcdir)
> > > > +        self.git('remote add --mirror=fetch origin
> > > > https://github.com/openembedded/bitbake', cwd=self.srcdir)
> > > > self.git('config core.bare true', cwd=self.srcdir)
> > > > self.git('fetch', cwd=self.srcdir) 
> > > > diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py
> > > > index b282d09abfce..2a150fe9c73e 100644
> > > > --- a/bitbake/lib/bb/utils.py
> > > > +++ b/bitbake/lib/bb/utils.py
> > > > @@ -16,7 +16,8 @@ import bb.msg
> > > >  import multiprocessing
> > > >  import fcntl
> > > >  import importlib
> > > > -from importlib import machinery
> > > > +import importlib.machinery
> > > > +import importlib.util
> > > >  import itertools
> > > >  import subprocess
> > > >  import glob
> > > > @@ -451,6 +452,10 @@ def lockfile(name, shared=False,
> > > > retry=True, block=False): consider the possibility of sending a
> > > > signal to the process to break out - at which point you want
> > > > block=True rather than retry=True. """
> > > > +    if len(name) > 255:
> > > > +        root, ext = os.path.splitext(name)
> > > > +        name = root[:255 - len(ext)] + ext
> > > > +
> > > >      dirname = os.path.dirname(name)
> > > >      mkdirhier(dirname)
> > > >  
> > > > @@ -487,7 +492,7 @@ def lockfile(name, shared=False, retry=True,
> > > > block=False): return lf
> > > >              lf.close()
> > > >          except OSError as e:
> > > > -            if e.errno == errno.EACCES:
> > > > +            if e.errno == errno.EACCES or e.errno ==
> > > > errno.ENAMETOOLONG: logger.error("Unable to acquire lock '%s',
> > > > %s", e.strerror, name)
> > > >                  sys.exit(1)
> > > > @@ -1616,7 +1621,9 @@ def load_plugins(logger, plugins,
> > > > pluginpath): logger.debug('Loading plugin %s' % name)
> > > >          spec = importlib.machinery.PathFinder.find_spec(name,
> > > > path=[pluginpath] ) if spec:
> > > > -            return spec.loader.load_module()
> > > > +            mod = importlib.util.module_from_spec(spec)
> > > > +            spec.loader.exec_module(mod)
> > > > +            return mod
> > > >  
> > > >      logger.debug('Loading plugins from %s...' % pluginpath)
> > > >  
> > > > diff --git a/bitbake/lib/hashserv/server.py
> > > > b/bitbake/lib/hashserv/server.py index
> > > > a0dc0c170f2b..df0fa0a07937 100644 ---
> > > > a/bitbake/lib/hashserv/server.py +++
> > > > b/bitbake/lib/hashserv/server.py @@ -521,7 +521,7 @@ class
> > > > Server(object): 
> > > >      def start_tcp_server(self, host, port):
> > > >          self.server = self.loop.run_until_complete(
> > > > -            asyncio.start_server(self.handle_client, host,
> > > > port, loop=self.loop)
> > > > +            asyncio.start_server(self.handle_client, host,
> > > > port) )
> > > >  
> > > >          for s in self.server.sockets:
> > > > @@ -546,7 +546,7 @@ class Server(object):
> > > >              # Work around path length limits in AF_UNIX
> > > >              os.chdir(os.path.dirname(path))
> > > >              self.server = self.loop.run_until_complete(
> > > > -                asyncio.start_unix_server(self.handle_client,
> > > > os.path.basename(path), loop=self.loop)
> > > > +                asyncio.start_unix_server(self.handle_client,
> > > > os.path.basename(path)) )
> > > >          finally:
> > > >              os.chdir(cwd)
> > > > diff --git a/bitbake/lib/toaster/tests/builds/buildtest.py
> > > > b/bitbake/lib/toaster/tests/builds/buildtest.py index
> > > > 872bbd377510..13b51fb0d8e4 100644 ---
> > > > a/bitbake/lib/toaster/tests/builds/buildtest.py +++
> > > > b/bitbake/lib/toaster/tests/builds/buildtest.py @@ -119,7
> > > > +119,7 @@ class BuildTest(unittest.TestCase): if
> > > > os.environ.get("TOASTER_TEST_USE_SSTATE_MIRROR"):
> > > > ProjectVariable.objects.get_or_create( name="SSTATE_MIRRORS",
> > > > -                value="file://.*
> > > > http://autobuilder.yoctoproject.org/pub/sstate/PATH;downloadfilename=PATH",
> > > > +                value="file://.*
> > > > http://sstate.yoctoproject.org/PATH;downloadfilename=PATH",
> > > > project=project) 
> > > >          ProjectTarget.objects.create(project=project,    
> > >   
> >   
>

Patch

diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py
index 27eb271798e8..5f9c0a779d75 100644
--- a/bitbake/lib/bb/cache.py
+++ b/bitbake/lib/bb/cache.py
@@ -19,7 +19,8 @@ 
 import os
 import logging
 import pickle
-from collections import defaultdict, Mapping
+from collections import defaultdict
+from collections.abc import Mapping
 import bb.utils
 from bb import PrefixLoggerAdapter
 import re
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
index 89f1fad08310..c946800a8c62 100644
--- a/bitbake/lib/bb/cooker.py
+++ b/bitbake/lib/bb/cooker.py
@@ -388,12 +388,22 @@  class BBCooker:
             # Create a new hash server bound to a unix domain socket
             if not self.hashserv:
                 dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db"
+                upstream = self.data.getVar("BB_HASHSERVE_UPSTREAM") or None
+                if upstream:
+                    import socket
+                    try:
+                        sock = socket.create_connection(upstream.split(":"), 5)
+                        sock.close()
+                    except socket.error as e:
+                        bb.warn("BB_HASHSERVE_UPSTREAM is not valid, unable to connect hash equivalence server at '%s': %s" 
+                                 % (upstream, repr(e)))
+
                 self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
                 self.hashserv = hashserv.create_server(
                     self.hashservaddr,
                     dbfile,
                     sync=False,
-                    upstream=self.data.getVar("BB_HASHSERVE_UPSTREAM") or None,
+                    upstream=upstream,
                 )
                 self.hashserv.process = multiprocessing.Process(target=self.hashserv.serve_forever)
                 self.hashserv.process.start()
@@ -805,7 +815,9 @@  class BBCooker:
             for dep in rq.rqdata.runtaskentries[tid].depends:
                 (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
                 deppn = self.recipecaches[depmc].pkg_fn[deptaskfn]
-                depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, bb.runqueue.taskname_from_tid(dep)))
+                if depmc:
+                    depmc = "mc:" + depmc + ":"
+                depend_tree["tdepends"][dotname].append("%s%s.%s" % (depmc, deppn, bb.runqueue.taskname_from_tid(dep)))
             if taskfn not in seen_fns:
                 seen_fns.append(taskfn)
                 packages = []
@@ -2204,21 +2216,33 @@  class CookerParser(object):
             yield not cached, mc, infos
 
     def parse_generator(self):
-        while True:
+        empty = False
+        while self.processes or not empty:
+            for process in self.processes.copy():
+                if not process.is_alive():
+                    process.join()
+                    self.processes.remove(process)
+
             if self.parsed >= self.toparse:
                 break
 
             try:
                 result = self.result_queue.get(timeout=0.25)
             except queue.Empty:
+                empty = True
                 pass
             else:
+                empty = False
                 value = result[1]
                 if isinstance(value, BaseException):
                     raise value
                 else:
                     yield result
 
+        if not (self.parsed >= self.toparse):
+            raise bb.parse.ParseError("Not all recipes parsed, parser thread killed/died? Exiting.", None)
+
+
     def parse_next(self):
         result = []
         parsed = None
diff --git a/bitbake/lib/bb/data_smart.py b/bitbake/lib/bb/data_smart.py
index 8291ca65e309..65857a9c7941 100644
--- a/bitbake/lib/bb/data_smart.py
+++ b/bitbake/lib/bb/data_smart.py
@@ -17,7 +17,7 @@  BitBake build tools.
 # Based on functions from the base bb module, Copyright 2003 Holger Schurig
 
 import copy, re, sys, traceback
-from collections import MutableMapping
+from collections.abc import MutableMapping
 import logging
 import hashlib
 import bb, bb.codeparser
@@ -403,7 +403,7 @@  class DataSmart(MutableMapping):
                     s = __expand_python_regexp__.sub(varparse.python_sub, s)
                 except SyntaxError as e:
                     # Likely unmatched brackets, just don't expand the expression
-                    if e.msg != "EOL while scanning string literal":
+                    if e.msg != "EOL while scanning string literal" and not e.msg.startswith("unterminated string literal"):
                         raise
                 if s == olds:
                     break
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
index dbf8b50e68a7..1005ec10c639 100644
--- a/bitbake/lib/bb/fetch2/__init__.py
+++ b/bitbake/lib/bb/fetch2/__init__.py
@@ -430,6 +430,7 @@  def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
     uri_replace_decoded = list(decodeurl(uri_replace))
     logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
     result_decoded = ['', '', '', '', '', {}]
+    # 0 - type, 1 - host, 2 - path, 3 - user,  4- pswd, 5 - params
     for loc, i in enumerate(uri_find_decoded):
         result_decoded[loc] = uri_decoded[loc]
         regexp = i
@@ -449,6 +450,9 @@  def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
                 for l in replacements:
                     uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l])
                 result_decoded[loc][k] = uri_replace_decoded[loc][k]
+        elif (loc == 3 or loc == 4) and uri_replace_decoded[loc]:
+            # User/password in the replacement is just a straight replacement
+            result_decoded[loc] = uri_replace_decoded[loc]
         elif (re.match(regexp, uri_decoded[loc])):
             if not uri_replace_decoded[loc]:
                 result_decoded[loc] = ""
diff --git a/bitbake/lib/bb/fetch2/perforce.py b/bitbake/lib/bb/fetch2/perforce.py
index e2a41a4a1287..3b6fa4b1ec9a 100644
--- a/bitbake/lib/bb/fetch2/perforce.py
+++ b/bitbake/lib/bb/fetch2/perforce.py
@@ -134,7 +134,7 @@  class Perforce(FetchMethod):
 
         ud.setup_revisions(d)
 
-        ud.localfile = d.expand('%s_%s_%s_%s.tar.gz' % (cleanedhost, cleanedpath, cleandedmodule, ud.revision))
+        ud.localfile = d.expand('%s_%s_%s_%s.tar.gz' % (cleanedhost, cleanedpath, cleanedmodule, ud.revision))
 
     def _buildp4command(self, ud, d, command, depot_filename=None):
         """
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py
index 784df70c9f62..7fa2a87ffde5 100644
--- a/bitbake/lib/bb/fetch2/wget.py
+++ b/bitbake/lib/bb/fetch2/wget.py
@@ -322,7 +322,7 @@  class Wget(FetchMethod):
             except (TypeError, ImportError, IOError, netrc.NetrcParseError):
                 pass
 
-            with opener.open(r) as response:
+            with opener.open(r, timeout=30) as response:
                 pass
         except urllib.error.URLError as e:
             if try_again:
diff --git a/bitbake/lib/bb/persist_data.py b/bitbake/lib/bb/persist_data.py
index c6a209fb3fc1..6f32d81afe80 100644
--- a/bitbake/lib/bb/persist_data.py
+++ b/bitbake/lib/bb/persist_data.py
@@ -12,6 +12,7 @@  currently, providing a key/value store accessed by 'domain'.
 #
 
 import collections
+import collections.abc
 import contextlib
 import functools
 import logging
@@ -19,7 +20,7 @@  import os.path
 import sqlite3
 import sys
 import warnings
-from collections import Mapping
+from collections.abc import Mapping
 
 sqlversion = sqlite3.sqlite_version_info
 if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
@@ -29,7 +30,7 @@  if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
 logger = logging.getLogger("BitBake.PersistData")
 
 @functools.total_ordering
-class SQLTable(collections.MutableMapping):
+class SQLTable(collections.abc.MutableMapping):
     class _Decorators(object):
         @staticmethod
         def retry(*, reconnect=True):
diff --git a/bitbake/lib/bb/process.py b/bitbake/lib/bb/process.py
index d5a1775fcec0..af5d804a1d59 100644
--- a/bitbake/lib/bb/process.py
+++ b/bitbake/lib/bb/process.py
@@ -60,7 +60,7 @@  class Popen(subprocess.Popen):
         "close_fds": True,
         "preexec_fn": subprocess_setup,
         "stdout": subprocess.PIPE,
-        "stderr": subprocess.STDOUT,
+        "stderr": subprocess.PIPE,
         "stdin": subprocess.PIPE,
         "shell": False,
     }
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py
index 10511a09dc1c..cd10da8b3a6f 100644
--- a/bitbake/lib/bb/runqueue.py
+++ b/bitbake/lib/bb/runqueue.py
@@ -926,38 +926,36 @@  class RunQueueData:
         #
         # Once all active tasks are marked, prune the ones we don't need.
 
-        delcount = {}
-        for tid in list(self.runtaskentries.keys()):
-            if tid not in runq_build:
-                delcount[tid] = self.runtaskentries[tid]
-                del self.runtaskentries[tid]
-
         # Handle --runall
         if self.cooker.configuration.runall:
             # re-run the mark_active and then drop unused tasks from new list
+            reduced_tasklist = set(self.runtaskentries.keys())
+            for tid in list(self.runtaskentries.keys()):
+                if tid not in runq_build:
+                   reduced_tasklist.remove(tid)
             runq_build = {}
 
             for task in self.cooker.configuration.runall:
                 if not task.startswith("do_"):
                     task = "do_{0}".format(task)
                 runall_tids = set()
-                for tid in list(self.runtaskentries):
+                for tid in reduced_tasklist:
                     wanttid = "{0}:{1}".format(fn_from_tid(tid), task)
-                    if wanttid in delcount:
-                        self.runtaskentries[wanttid] = delcount[wanttid]
                     if wanttid in self.runtaskentries:
                         runall_tids.add(wanttid)
 
                 for tid in list(runall_tids):
-                    mark_active(tid,1)
+                    mark_active(tid, 1)
                     if self.cooker.configuration.force:
                         invalidate_task(tid, False)
 
-            for tid in list(self.runtaskentries.keys()):
-                if tid not in runq_build:
-                    delcount[tid] = self.runtaskentries[tid]
-                    del self.runtaskentries[tid]
+        delcount = set()
+        for tid in list(self.runtaskentries.keys()):
+            if tid not in runq_build:
+                delcount.add(tid)
+                del self.runtaskentries[tid]
 
+        if self.cooker.configuration.runall:
             if len(self.runtaskentries) == 0:
                 bb.msg.fatal("RunQueue", "Could not find any tasks with the tasknames %s to run within the recipes of the taskgraphs of the targets %s" % (str(self.cooker.configuration.runall), str(self.targets)))
 
@@ -971,16 +969,16 @@  class RunQueueData:
             for task in self.cooker.configuration.runonly:
                 if not task.startswith("do_"):
                     task = "do_{0}".format(task)
-                runonly_tids = { k: v for k, v in self.runtaskentries.items() if taskname_from_tid(k) == task }
+                runonly_tids = [k for k in self.runtaskentries.keys() if taskname_from_tid(k) == task]
 
-                for tid in list(runonly_tids):
-                    mark_active(tid,1)
+                for tid in runonly_tids:
+                    mark_active(tid, 1)
                     if self.cooker.configuration.force:
                         invalidate_task(tid, False)
 
             for tid in list(self.runtaskentries.keys()):
                 if tid not in runq_build:
-                    delcount[tid] = self.runtaskentries[tid]
+                    delcount.add(tid)
                     del self.runtaskentries[tid]
 
             if len(self.runtaskentries) == 0:
diff --git a/bitbake/lib/bb/server/process.py b/bitbake/lib/bb/server/process.py
index 07bb785a1822..fcdce19717d2 100644
--- a/bitbake/lib/bb/server/process.py
+++ b/bitbake/lib/bb/server/process.py
@@ -659,7 +659,7 @@  class BBUIEventQueue:
         self.reader = ConnectionReader(readfd)
 
         self.t = threading.Thread()
-        self.t.setDaemon(True)
+        self.t.daemon = True
         self.t.run = self.startCallbackHandler
         self.t.start()
 
diff --git a/bitbake/lib/bb/tests/fetch.py b/bitbake/lib/bb/tests/fetch.py
index f5d557e8c000..3b64584da0e9 100644
--- a/bitbake/lib/bb/tests/fetch.py
+++ b/bitbake/lib/bb/tests/fetch.py
@@ -431,6 +431,10 @@  class MirrorUriTest(FetcherTest):
         ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master", "git://someserver.org/bitbake;branch=master", "git://git.openembedded.org/bitbake;protocol=http")
             : "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
 
+        ("git://user1@someserver.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master", "git://someserver.org/bitbake;branch=master", "git://user2@git.openembedded.org/bitbake;protocol=http")
+            : "git://user2@git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
+
+
         #Renaming files doesn't work
         #("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz") : "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz"
         #("file://sstate-xyz.tgz", "file://.*/.*", "file:///somewhere/1234/sstate-cache") : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz",
@@ -491,7 +495,7 @@  class GitDownloadDirectoryNamingTest(FetcherTest):
         super(GitDownloadDirectoryNamingTest, self).setUp()
         self.recipe_url = "git://git.openembedded.org/bitbake"
         self.recipe_dir = "git.openembedded.org.bitbake"
-        self.mirror_url = "git://github.com/openembedded/bitbake.git"
+        self.mirror_url = "git://github.com/openembedded/bitbake.git;protocol=https"
         self.mirror_dir = "github.com.openembedded.bitbake.git"
 
         self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40')
@@ -539,7 +543,7 @@  class TarballNamingTest(FetcherTest):
         super(TarballNamingTest, self).setUp()
         self.recipe_url = "git://git.openembedded.org/bitbake"
         self.recipe_tarball = "git2_git.openembedded.org.bitbake.tar.gz"
-        self.mirror_url = "git://github.com/openembedded/bitbake.git"
+        self.mirror_url = "git://github.com/openembedded/bitbake.git;protocol=https"
         self.mirror_tarball = "git2_github.com.openembedded.bitbake.git.tar.gz"
 
         self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '1')
@@ -573,7 +577,7 @@  class GitShallowTarballNamingTest(FetcherTest):
         super(GitShallowTarballNamingTest, self).setUp()
         self.recipe_url = "git://git.openembedded.org/bitbake"
         self.recipe_tarball = "gitshallow_git.openembedded.org.bitbake_82ea737-1_master.tar.gz"
-        self.mirror_url = "git://github.com/openembedded/bitbake.git"
+        self.mirror_url = "git://github.com/openembedded/bitbake.git;protocol=https"
         self.mirror_tarball = "gitshallow_github.com.openembedded.bitbake.git_82ea737-1_master.tar.gz"
 
         self.d.setVar('BB_GIT_SHALLOW', '1')
@@ -985,7 +989,7 @@  class FetcherNetworkTest(FetcherTest):
     def test_git_submodule_dbus_broker(self):
         # The following external repositories have show failures in fetch and unpack operations
         # We want to avoid regressions!
-        url = "gitsm://github.com/bus1/dbus-broker;protocol=git;rev=fc874afa0992d0c75ec25acb43d344679f0ee7d2;branch=main"
+        url = "gitsm://github.com/bus1/dbus-broker;protocol=https;rev=fc874afa0992d0c75ec25acb43d344679f0ee7d2;branch=main"
         fetcher = bb.fetch.Fetch([url], self.d)
         fetcher.download()
         # Previous cwd has been deleted
@@ -1001,7 +1005,7 @@  class FetcherNetworkTest(FetcherTest):
 
     @skipIfNoNetwork()
     def test_git_submodule_CLI11(self):
-        url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=bd4dc911847d0cde7a6b41dfa626a85aab213baf;branch=main"
+        url = "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=bd4dc911847d0cde7a6b41dfa626a85aab213baf;branch=main"
         fetcher = bb.fetch.Fetch([url], self.d)
         fetcher.download()
         # Previous cwd has been deleted
@@ -1016,12 +1020,12 @@  class FetcherNetworkTest(FetcherTest):
     @skipIfNoNetwork()
     def test_git_submodule_update_CLI11(self):
         """ Prevent regression on update detection not finding missing submodule, or modules without needed commits """
-        url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714;branch=main"
+        url = "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714;branch=main"
         fetcher = bb.fetch.Fetch([url], self.d)
         fetcher.download()
 
         # CLI11 that pulls in a newer nlohmann-json
-        url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca;branch=main"
+        url = "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca;branch=main"
         fetcher = bb.fetch.Fetch([url], self.d)
         fetcher.download()
         # Previous cwd has been deleted
@@ -1035,7 +1039,7 @@  class FetcherNetworkTest(FetcherTest):
 
     @skipIfNoNetwork()
     def test_git_submodule_aktualizr(self):
-        url = "gitsm://github.com/advancedtelematic/aktualizr;branch=master;protocol=git;rev=d00d1a04cc2366d1a5f143b84b9f507f8bd32c44"
+        url = "gitsm://github.com/advancedtelematic/aktualizr;branch=master;protocol=https;rev=d00d1a04cc2366d1a5f143b84b9f507f8bd32c44"
         fetcher = bb.fetch.Fetch([url], self.d)
         fetcher.download()
         # Previous cwd has been deleted
@@ -1055,7 +1059,7 @@  class FetcherNetworkTest(FetcherTest):
         """ Prevent regression on deeply nested submodules not being checked out properly, even though they were fetched. """
 
         # This repository also has submodules where the module (name), path and url do not align
-        url = "gitsm://github.com/azure/iotedge.git;protocol=git;rev=d76e0316c6f324345d77c48a83ce836d09392699"
+        url = "gitsm://github.com/azure/iotedge.git;protocol=https;rev=d76e0316c6f324345d77c48a83ce836d09392699"
         fetcher = bb.fetch.Fetch([url], self.d)
         fetcher.download()
         # Previous cwd has been deleted
@@ -1113,7 +1117,7 @@  class SVNTest(FetcherTest):
 
         bb.process.run("svn co %s svnfetch_co" % self.repo_url, cwd=self.tempdir)
         # Github will emulate SVN.  Use this to check if we're downloding...
-        bb.process.run("svn propset svn:externals 'bitbake svn://vcs.pcre.org/pcre2/code' .",
+        bb.process.run("svn propset svn:externals 'bitbake https://github.com/PhilipHazel/pcre2.git' .",
                        cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk'))
         bb.process.run("svn commit --non-interactive -m 'Add external'",
                        cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk'))
@@ -1231,7 +1235,7 @@  class FetchLatestVersionTest(FetcherTest):
 
     test_git_uris = {
         # version pattern "X.Y.Z"
-        ("mx-1.0", "git://github.com/clutter-project/mx.git;branch=mx-1.4", "9b1db6b8060bd00b121a692f942404a24ae2960f", "")
+        ("mx-1.0", "git://github.com/clutter-project/mx.git;branch=mx-1.4;protocol=https", "9b1db6b8060bd00b121a692f942404a24ae2960f", "")
             : "1.99.4",
         # version pattern "vX.Y"
         # mirror of git.infradead.org since network issues interfered with testing
@@ -1258,9 +1262,9 @@  class FetchLatestVersionTest(FetcherTest):
             : "0.4.3",
         ("build-appliance-image", "git://git.yoctoproject.org/poky", "b37dd451a52622d5b570183a81583cc34c2ff555", "(?P<pver>(([0-9][\.|_]?)+[0-9]))")
             : "11.0.0",
-        ("chkconfig-alternatives-native", "git://github.com/kergoth/chkconfig;branch=sysroot", "cd437ecbd8986c894442f8fce1e0061e20f04dee", "chkconfig\-(?P<pver>((\d+[\.\-_]*)+))")
+        ("chkconfig-alternatives-native", "git://github.com/kergoth/chkconfig;branch=sysroot;protocol=https", "cd437ecbd8986c894442f8fce1e0061e20f04dee", "chkconfig\-(?P<pver>((\d+[\.\-_]*)+))")
             : "1.3.59",
-        ("remake", "git://github.com/rocky/remake.git", "f05508e521987c8494c92d9c2871aec46307d51d", "(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))")
+        ("remake", "git://github.com/rocky/remake.git;protocol=https", "f05508e521987c8494c92d9c2871aec46307d51d", "(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))")
             : "3.82+dbg0.9",
     }
 
@@ -1354,9 +1358,6 @@  class FetchCheckStatusTest(FetcherTest):
                       "http://downloads.yoctoproject.org/releases/opkg/opkg-0.1.7.tar.gz",
                       "http://downloads.yoctoproject.org/releases/opkg/opkg-0.3.0.tar.gz",
                       "ftp://sourceware.org/pub/libffi/libffi-1.20.tar.gz",
-                      "http://ftp.gnu.org/gnu/autoconf/autoconf-2.60.tar.gz",
-                      "https://ftp.gnu.org/gnu/chess/gnuchess-5.08.tar.gz",
-                      "https://ftp.gnu.org/gnu/gmp/gmp-4.0.tar.gz",
                       # GitHub releases are hosted on Amazon S3, which doesn't support HEAD
                       "https://github.com/kergoth/tslib/releases/download/1.1/tslib-1.1.tar.xz"
                       ]
@@ -2047,7 +2048,7 @@  class GitShallowTest(FetcherTest):
 
     @skipIfNoNetwork()
     def test_bitbake(self):
-        self.git('remote add --mirror=fetch origin git://github.com/openembedded/bitbake', cwd=self.srcdir)
+        self.git('remote add --mirror=fetch origin https://github.com/openembedded/bitbake', cwd=self.srcdir)
         self.git('config core.bare true', cwd=self.srcdir)
         self.git('fetch', cwd=self.srcdir)
 
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py
index b282d09abfce..2a150fe9c73e 100644
--- a/bitbake/lib/bb/utils.py
+++ b/bitbake/lib/bb/utils.py
@@ -16,7 +16,8 @@  import bb.msg
 import multiprocessing
 import fcntl
 import importlib
-from importlib import machinery
+import importlib.machinery
+import importlib.util
 import itertools
 import subprocess
 import glob
@@ -451,6 +452,10 @@  def lockfile(name, shared=False, retry=True, block=False):
     consider the possibility of sending a signal to the process to break
     out - at which point you want block=True rather than retry=True.
     """
+    if len(name) > 255:
+        root, ext = os.path.splitext(name)
+        name = root[:255 - len(ext)] + ext
+
     dirname = os.path.dirname(name)
     mkdirhier(dirname)
 
@@ -487,7 +492,7 @@  def lockfile(name, shared=False, retry=True, block=False):
                     return lf
             lf.close()
         except OSError as e:
-            if e.errno == errno.EACCES:
+            if e.errno == errno.EACCES or e.errno == errno.ENAMETOOLONG:
                 logger.error("Unable to acquire lock '%s', %s",
                              e.strerror, name)
                 sys.exit(1)
@@ -1616,7 +1621,9 @@  def load_plugins(logger, plugins, pluginpath):
         logger.debug('Loading plugin %s' % name)
         spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] )
         if spec:
-            return spec.loader.load_module()
+            mod = importlib.util.module_from_spec(spec)
+            spec.loader.exec_module(mod)
+            return mod
 
     logger.debug('Loading plugins from %s...' % pluginpath)
 
diff --git a/bitbake/lib/hashserv/server.py b/bitbake/lib/hashserv/server.py
index a0dc0c170f2b..df0fa0a07937 100644
--- a/bitbake/lib/hashserv/server.py
+++ b/bitbake/lib/hashserv/server.py
@@ -521,7 +521,7 @@  class Server(object):
 
     def start_tcp_server(self, host, port):
         self.server = self.loop.run_until_complete(
-            asyncio.start_server(self.handle_client, host, port, loop=self.loop)
+            asyncio.start_server(self.handle_client, host, port)
         )
 
         for s in self.server.sockets:
@@ -546,7 +546,7 @@  class Server(object):
             # Work around path length limits in AF_UNIX
             os.chdir(os.path.dirname(path))
             self.server = self.loop.run_until_complete(
-                asyncio.start_unix_server(self.handle_client, os.path.basename(path), loop=self.loop)
+                asyncio.start_unix_server(self.handle_client, os.path.basename(path))
             )
         finally:
             os.chdir(cwd)
diff --git a/bitbake/lib/toaster/tests/builds/buildtest.py b/bitbake/lib/toaster/tests/builds/buildtest.py
index 872bbd377510..13b51fb0d8e4 100644
--- a/bitbake/lib/toaster/tests/builds/buildtest.py
+++ b/bitbake/lib/toaster/tests/builds/buildtest.py
@@ -119,7 +119,7 @@  class BuildTest(unittest.TestCase):
         if os.environ.get("TOASTER_TEST_USE_SSTATE_MIRROR"):
             ProjectVariable.objects.get_or_create(
                 name="SSTATE_MIRRORS",
-                value="file://.* http://autobuilder.yoctoproject.org/pub/sstate/PATH;downloadfilename=PATH",
+                value="file://.* http://sstate.yoctoproject.org/PATH;downloadfilename=PATH",
                 project=project)
 
         ProjectTarget.objects.create(project=project,