summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb/fetch2/__init__.py
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb/fetch2/__init__.py')
-rw-r--r--bitbake/lib/bb/fetch2/__init__.py64
1 files changed, 32 insertions, 32 deletions
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
index 5c76b22529..ced43630ea 100644
--- a/bitbake/lib/bb/fetch2/__init__.py
+++ b/bitbake/lib/bb/fetch2/__init__.py
@@ -491,7 +491,7 @@ def fetcher_init(d):
Calls before this must not hit the cache.
"""
# When to drop SCM head revisions controlled by user policy
- srcrev_policy = d.getVar('BB_SRCREV_POLICY', True) or "clear"
+ srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear"
if srcrev_policy == "cache":
logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
elif srcrev_policy == "clear":
@@ -572,7 +572,7 @@ def verify_checksum(ud, d, precomputed={}):
if ud.method.recommends_checksum(ud) and not ud.md5_expected and not ud.sha256_expected:
# If strict checking enabled and neither sum defined, raise error
- strict = d.getVar("BB_STRICT_CHECKSUM", True) or "0"
+ strict = d.getVar("BB_STRICT_CHECKSUM") or "0"
if strict == "1":
logger.error('No checksum specified for %s, please add at least one to the recipe:\n'
'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' %
@@ -718,7 +718,7 @@ def subprocess_setup():
def get_autorev(d):
# only not cache src rev in autorev case
- if d.getVar('BB_SRCREV_POLICY', True) != "cache":
+ if d.getVar('BB_SRCREV_POLICY') != "cache":
d.setVar('BB_DONT_CACHE', '1')
return "AUTOINC"
@@ -737,7 +737,7 @@ def get_srcrev(d, method_name='sortable_revision'):
"""
scms = []
- fetcher = Fetch(d.getVar('SRC_URI', True).split(), d)
+ fetcher = Fetch(d.getVar('SRC_URI').split(), d)
urldata = fetcher.ud
for u in urldata:
if urldata[u].method.supports_srcrev():
@@ -757,7 +757,7 @@ def get_srcrev(d, method_name='sortable_revision'):
#
# Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
#
- format = d.getVar('SRCREV_FORMAT', True)
+ format = d.getVar('SRCREV_FORMAT')
if not format:
raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
@@ -821,7 +821,7 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
origenv = d.getVar("BB_ORIGENV", False)
for var in exportvars:
- val = d.getVar(var, True) or (origenv and origenv.getVar(var, True))
+ val = d.getVar(var) or (origenv and origenv.getVar(var))
if val:
cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
@@ -860,7 +860,7 @@ def check_network_access(d, info = "", url = None):
"""
log remote network access, and error if BB_NO_NETWORK is set
"""
- if d.getVar("BB_NO_NETWORK", True) == "1":
+ if d.getVar("BB_NO_NETWORK") == "1":
raise NetworkAccess(url, info)
else:
logger.debug(1, "Fetcher accessed the network with the command %s" % info)
@@ -958,7 +958,7 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
# We may be obtaining a mirror tarball which needs further processing by the real fetcher
# If that tarball is a local file:// we need to provide a symlink to it
- dldir = ld.getVar("DL_DIR", True)
+ dldir = ld.getVar("DL_DIR")
if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \
and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
# Create donestamp in old format to avoid triggering a re-download
@@ -1032,14 +1032,14 @@ def trusted_network(d, url):
BB_ALLOWED_NETWORKS is set globally or for a specific recipe.
Note: modifies SRC_URI & mirrors.
"""
- if d.getVar('BB_NO_NETWORK', True) == "1":
+ if d.getVar('BB_NO_NETWORK') == "1":
return True
pkgname = d.expand(d.getVar('PN', False))
trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False)
if not trusted_hosts:
- trusted_hosts = d.getVar('BB_ALLOWED_NETWORKS', True)
+ trusted_hosts = d.getVar('BB_ALLOWED_NETWORKS')
# Not enabled.
if not trusted_hosts:
@@ -1071,7 +1071,7 @@ def srcrev_internal_helper(ud, d, name):
"""
srcrev = None
- pn = d.getVar("PN", True)
+ pn = d.getVar("PN")
attempts = []
if name != '' and pn:
attempts.append("SRCREV_%s_pn-%s" % (name, pn))
@@ -1082,7 +1082,7 @@ def srcrev_internal_helper(ud, d, name):
attempts.append("SRCREV")
for a in attempts:
- srcrev = d.getVar(a, True)
+ srcrev = d.getVar(a)
if srcrev and srcrev != "INVALID":
break
@@ -1115,7 +1115,7 @@ def get_checksum_file_list(d):
"""
fetch = Fetch([], d, cache = False, localonly = True)
- dl_dir = d.getVar('DL_DIR', True)
+ dl_dir = d.getVar('DL_DIR')
filelist = []
for u in fetch.urls:
ud = fetch.ud[u]
@@ -1129,9 +1129,9 @@ def get_checksum_file_list(d):
if f.startswith(dl_dir):
# The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
if os.path.exists(f):
- bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN', True), os.path.basename(f)))
+ bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN'), os.path.basename(f)))
else:
- bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN', True), os.path.basename(f)))
+ bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN'), os.path.basename(f)))
filelist.append(f + ":" + str(os.path.exists(f)))
return " ".join(filelist)
@@ -1204,7 +1204,7 @@ class FetchData(object):
raise NonLocalMethod()
if self.parm.get("proto", None) and "protocol" not in self.parm:
- logger.warning('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN', True))
+ logger.warning('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN'))
self.parm["protocol"] = self.parm.get("proto", None)
if hasattr(self.method, "urldata_init"):
@@ -1217,7 +1217,7 @@ class FetchData(object):
elif self.localfile:
self.localpath = self.method.localpath(self, d)
- dldir = d.getVar("DL_DIR", True)
+ dldir = d.getVar("DL_DIR")
if not self.needdonestamp:
return
@@ -1257,12 +1257,12 @@ class FetchData(object):
if "srcdate" in self.parm:
return self.parm['srcdate']
- pn = d.getVar("PN", True)
+ pn = d.getVar("PN")
if pn:
- return d.getVar("SRCDATE_%s" % pn, True) or d.getVar("SRCDATE", True) or d.getVar("DATE", True)
+ return d.getVar("SRCDATE_%s" % pn) or d.getVar("SRCDATE") or d.getVar("DATE")
- return d.getVar("SRCDATE", True) or d.getVar("DATE", True)
+ return d.getVar("SRCDATE") or d.getVar("DATE")
class FetchMethod(object):
"""Base class for 'fetch'ing data"""
@@ -1282,7 +1282,7 @@ class FetchMethod(object):
Can also setup variables in urldata for use in go (saving code duplication
and duplicate code execution)
"""
- return os.path.join(d.getVar("DL_DIR", True), urldata.localfile)
+ return os.path.join(d.getVar("DL_DIR"), urldata.localfile)
def supports_checksum(self, urldata):
"""
@@ -1450,7 +1450,7 @@ class FetchMethod(object):
if not cmd:
return
- path = data.getVar('PATH', True)
+ path = data.getVar('PATH')
if path:
cmd = "PATH=\"%s\" %s" % (path, cmd)
bb.note("Unpacking %s to %s/" % (file, unpackdir))
@@ -1507,7 +1507,7 @@ class FetchMethod(object):
def generate_revision_key(self, ud, d, name):
key = self._revision_key(ud, d, name)
- return "%s-%s" % (key, d.getVar("PN", True) or "")
+ return "%s-%s" % (key, d.getVar("PN") or "")
class Fetch(object):
def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None):
@@ -1515,14 +1515,14 @@ class Fetch(object):
raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
if len(urls) == 0:
- urls = d.getVar("SRC_URI", True).split()
+ urls = d.getVar("SRC_URI").split()
self.urls = urls
self.d = d
self.ud = {}
self.connection_cache = connection_cache
- fn = d.getVar('FILE', True)
- mc = d.getVar('__BBMULTICONFIG', True) or ""
+ fn = d.getVar('FILE')
+ mc = d.getVar('__BBMULTICONFIG') or ""
if cache and fn and mc + fn in urldata_cache:
self.ud = urldata_cache[mc + fn]
@@ -1565,8 +1565,8 @@ class Fetch(object):
if not urls:
urls = self.urls
- network = self.d.getVar("BB_NO_NETWORK", True)
- premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY", True) == "1")
+ network = self.d.getVar("BB_NO_NETWORK")
+ premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY") == "1")
for u in urls:
ud = self.ud[u]
@@ -1584,7 +1584,7 @@ class Fetch(object):
localpath = ud.localpath
elif m.try_premirror(ud, self.d):
logger.debug(1, "Trying PREMIRRORS")
- mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
+ mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
localpath = try_mirrors(self, self.d, ud, mirrors, False)
if premirroronly:
@@ -1624,7 +1624,7 @@ class Fetch(object):
if not verified_stamp:
m.clean(ud, self.d)
logger.debug(1, "Trying MIRRORS")
- mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
+ mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
localpath = try_mirrors(self, self.d, ud, mirrors)
if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1):
@@ -1657,7 +1657,7 @@ class Fetch(object):
m = ud.method
logger.debug(1, "Testing URL %s", u)
# First try checking uri, u, from PREMIRRORS
- mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
+ mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
ret = try_mirrors(self, self.d, ud, mirrors, True)
if not ret:
# Next try checking from the original uri, u
@@ -1665,7 +1665,7 @@ class Fetch(object):
ret = m.checkstatus(self, ud, self.d)
except:
# Finally, try checking uri, u, from MIRRORS
- mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
+ mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
ret = try_mirrors(self, self.d, ud, mirrors, True)
if not ret: