aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorPatrick Ohly <patrick.ohly@intel.com>2016-11-03 09:42:34 +0100
committerPatrick Ohly <patrick.ohly@intel.com>2016-12-08 14:12:55 +0100
commit2ae0c47b2158a162ff8c310a9d25495831a96cc6 (patch)
treec489ac044f55e3ea254adadbd850161dd6a3859f
parent9e3968d23d060144254bb915a21820e7110847c5 (diff)
downloadmeta-swupd-2ae0c47b2158a162ff8c310a9d25495831a96cc6.tar.gz
meta-swupd-2ae0c47b2158a162ff8c310a9d25495831a96cc6.tar.bz2
meta-swupd-2ae0c47b2158a162ff8c310a9d25495831a96cc6.zip
meta-swupd: create update based on previous build
Creating updates based on the Manifest.full of the previous build allows reusing unchanged files, i.e. work on compressing these file and the storing the result again under "files" gets avoided. This works by referencing the previous version in the new Manifest files. The implication of that is that versions no longer can be published separately. The content produced by all previous builds must also be available to the client. This is independent of computing deltas. Nothing besides the previous "www" content needs to be available. It gets downloaded automatically when starting a build without a previous swupd deploy directory, so no extra work is needed to enable this mode besides publishing the previous build results. Fixes [YOCTO #9189] Signed-off-by: Patrick Ohly <patrick.ohly@intel.com>
-rw-r--r--classes/swupd-image.bbclass45
-rw-r--r--lib/swupd/bundles.py135
2 files changed, 173 insertions, 7 deletions
diff --git a/classes/swupd-image.bbclass b/classes/swupd-image.bbclass
index 94daafb..466aa46 100644
--- a/classes/swupd-image.bbclass
+++ b/classes/swupd-image.bbclass
@@ -46,6 +46,11 @@ IMAGE_INSTALL_append = " swupd-client-format${SWUPD_FORMAT}"
# The version URL determines what the client picks as the version that it updates to.
# The content URL must have all builds ever produced and is expected to also
# have the corresponding version information.
+#
+# To build the very first version of an image, set these to empty.
+# Errors while accessing the server (as the non-existent download.example.com)
+# or not having any previous build on that server are fatal. The latter
+# is necessary to detect misconfiguration.
SWUPD_VERSION_URL ??= "http://download.example.com/updates/my-distro/milestone/${MACHINE}/${SWUPD_IMAGE_PN}"
SWUPD_CONTENT_URL ??= "http://download.example.com/updates/my-distro/builds/${MACHINE}/${SWUPD_IMAGE_PN}"
@@ -192,6 +197,7 @@ python () {
ctime = os.fstat(f.fileno()).st_ctime
bb.parse.mark_dependency(d, stampfile)
d.setVar('REDO_SWUPD', ctime)
+ d.appendVarFlag('do_fetch_swupd_inputs', 'vardeps', ' REDO_SWUPD')
d.appendVarFlag('do_stage_swupd_inputs', 'vardeps', ' REDO_SWUPD')
d.appendVarFlag('do_swupd_update', 'vardeps', ' REDO_SWUPD')
}
@@ -254,12 +260,28 @@ fakeroot python do_stage_swupd_inputs () {
swupd.bundles.copy_core_contents(d)
swupd.bundles.copy_bundle_contents(d)
- swupd.bundles.copy_old_versions(d)
}
addtask stage_swupd_inputs after do_image before do_swupd_update
do_stage_swupd_inputs[dirs] = "${SWUPDIMAGEDIR} ${SWUPDMANIFESTDIR} ${DEPLOY_DIR_SWUPD}/maps/"
do_stage_swupd_inputs[depends] += "virtual/fakeroot-native:do_populate_sysroot"
+python do_fetch_swupd_inputs () {
+ import swupd.bundles
+
+ if d.getVar('PN_BASE', True):
+ bb.debug(2, 'Skipping update input staging for non-base image %s' % d.getVar('PN', True))
+ return
+
+ # Get information from remote update repo.
+ swupd.bundles.download_old_versions(d)
+ # Stage locally cached information about previous builds
+ # (corresponds to the "archive the files of the current build"
+ # step in do_swupd_update).
+ swupd.bundles.copy_old_versions(d)
+}
+do_fetch_swupd_inputs[dirs] = "${SWUPDIMAGEDIR}"
+addtask do_fetch_swupd_inputs before do_swupd_update
+
# do_swupd_update uses its own pseudo database, for several reasons:
# - Performance is better when the pseudo instance is not shared
# with the do_image tasks of other virtual swupd image recipes (those
@@ -328,15 +350,12 @@ outputdir=${DEPLOY_DIR_SWUPD}/www/
emptydir=${DEPLOY_DIR_SWUPD}/empty/
END
+ # do_fetch_swupd_inputs() creates this file when a content
+ # URL was set, so creating an empty file shouldn't be necessary
+ # in most cases.
if [ -e ${DEPLOY_DIR_SWUPD}/image/latest.version ]; then
PREVREL=`cat ${DEPLOY_DIR_SWUPD}/image/latest.version`
else
- # TODO: locate information about latest version from online www update repo
- # and download the relevant files. That makes swupd_create_fullfiles
- # a lot faster because it allows reusing existing, unmodified files.
- # Saves a lot of space, too, because the new Manifest files then merely
- # point to the older version (no entry in ${DEPLOY_DIR_SWUPD}/www/${OS_VERSION}/files,
- # not even a link).
bbdebug 2 "Stubbing out empty latest.version file"
touch ${DEPLOY_DIR_SWUPD}/image/latest.version
PREVREL="0"
@@ -383,11 +402,23 @@ END
# env $PSEUDO bsdtar -acf ${DEPLOY_DIR}/swupd-before-make-fullfiles.tar.gz -C ${DEPLOY_DIR} swupd
invoke_swupd ${STAGING_BINDIR_NATIVE}/swupd_make_fullfiles --log-stdout -S ${DEPLOY_DIR_SWUPD} ${OS_VERSION}
+ if [ "${SWUPD_CONTENT_URL}" ]; then
+ content_url_parameter="--content-url ${SWUPD_CONTENT_URL}"
+ else
+ content_url_parameter=""
+ fi
+
${SWUPD_LOG_FN} "Generating zero packs, this can take some time."
# env $PSEUDO bsdtar -acf ${DEPLOY_DIR}/swupd-before-make-zero-pack.tar.gz -C ${DEPLOY_DIR} swupd
for bndl in ${ALL_BUNDLES}; do
${SWUPD_LOG_FN} "Generating zero pack for $bndl"
invoke_swupd ${STAGING_BINDIR_NATIVE}/swupd_make_pack --log-stdout -S ${DEPLOY_DIR_SWUPD} 0 ${OS_VERSION} $bndl
+ # The zero packs are used by the swupd client when adding bundles.
+ # The zero pack for os-core is not needed by the swupd client itself;
+ # in Clear Linux OS it is used by the installer. We could use some
+ # space by skipping the os-core zero bundle, but for now it gets
+ # generated, just in case that it has some future use.
+ invoke_swupd ${STAGING_BINDIR_NATIVE}/swupd_make_pack --log-stdout $content_url_parameter -S ${DEPLOY_DIR_SWUPD} 0 ${OS_VERSION} $bndl | sed -u -e "s/^/$bndl: /"
done
# Generate delta-packs against previous versions chosen by our caller.
diff --git a/lib/swupd/bundles.py b/lib/swupd/bundles.py
index 611f59d..78ffaa5 100644
--- a/lib/swupd/bundles.py
+++ b/lib/swupd/bundles.py
@@ -1,6 +1,10 @@
import glob
+import re
import subprocess
import shutil
+import urllib.request
+import urllib.error
+from bb.utils import export_proxies
from oe.package_manager import RpmPM
from oe.package_manager import OpkgPM
from oe.package_manager import DpkgPM
@@ -159,3 +163,134 @@ def copy_old_versions(d):
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
if output:
bb.fatal('Unexpected output from the following command:\n%s\n%s' % (cmd, output))
+
+def download_manifests(content_url, version, component, to_dir):
+ """
+ Download one manifest file and recursively all manifests referenced by it.
+ Does not overwrite existing files. Unpacks on-the-fly using bsdtar
+ and thus is independent of the compression format, as long as bsdtar
+ recognizes it.
+ """
+ source = '%s/%d/Manifest.%s.tar' % (content_url, version, component)
+ target = os.path.join(to_dir, 'Manifest.%s' % component)
+ base_versions = set()
+ if not os.path.exists(target):
+ bb.debug(1, 'Downloading %s -> %s' % (source, target))
+ response = urllib.request.urlopen(source)
+ archive = response.read()
+ bb.utils.mkdirhier(to_dir)
+ with open(target + '.tar', 'wb') as tarfile:
+ tarfile.write(archive)
+ bsdtar = subprocess.Popen(['bsdtar', '-xf', '-', '-C', to_dir],
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+ output, _ = bsdtar.communicate(archive)
+ if output or bsdtar.returncode:
+ bb.fatal('Unpacking %s with bsdtar failed:\n%s' % (source, output.decode('utf-8')))
+ with open(target) as f:
+ # Matches the header. We might be parsing Manifest.os-core from build
+ # 1000, but the actual Manifest could be from "version: 900", so get
+ # that as base version, too.
+ version_re = re.compile(r'^(?:previous|version):\s+(\d+)\n$')
+ # Matches the individual entries.
+ manifest_re = re.compile(r'^M.*\s(\d+)\s+(\S+)\n$')
+ for line in f.readlines():
+ m = manifest_re.match(line)
+ if m:
+ subversion = int(m.group(1))
+ submanifest = m.group(2)
+ download_manifests(content_url, subversion, submanifest, to_dir)
+ base_versions.add(subversion)
+ else:
+ m = version_re.match(line)
+ if m:
+ base_versions.add(int(m.group(1)))
+ return base_versions
+
+def download_old_versions(d):
+ """
+ Download the necessary information from the update repo that is needed
+ to build updates in that update stream. This can run in parallel to
+ a normal build and thus is not on the critical path.
+ """
+
+ content_url = d.getVar('SWUPD_CONTENT_URL', True)
+ version_url = d.getVar('SWUPD_VERSION_URL', True)
+ current_format = int(d.getVar('SWUPD_FORMAT', True))
+ deploy_dir = d.getVar('DEPLOY_DIR_SWUPD', True)
+ www_dir = os.path.join(deploy_dir, 'www')
+
+ if not content_url or not version_url:
+ bb.warn('SWUPD_CONTENT_URL and/or SWUPD_VERSION_URL not set, skipping download of old versions for the initial build of a swupd update stream.')
+ return
+
+ # Avoid double // in path. At least twisted is sensitive to that.
+ content_url = content_url.rstrip('/')
+
+ # Set up env variables with proxy information for use in urllib.
+ export_proxies(d)
+
+ # Find latest version for each of the older formats.
+ # For now we ignore the released milestones and go
+ # directly to the URL with all builds. The information
+ # about milestones may be relevant for determining
+ # how format changes need to be handled.
+ latest_versions = {}
+ for format in range(3, current_format + 1):
+ try:
+ url = '%s/version/format%d/latest' % (content_url, format)
+ response = urllib.request.urlopen(url)
+ version = int(response.read())
+ latest_versions[format] = version
+ formatdir = os.path.join(www_dir, 'version', 'format%d' % format)
+ bb.utils.mkdirhier(formatdir)
+ with open(os.path.join(formatdir, 'latest'), 'w') as latest:
+ latest.write(str(version))
+ except urllib.error.HTTPError as http_error:
+ if http_error.code == 404:
+ bb.debug(1, '%s does not exist, skipping that format' % url)
+ else:
+ raise
+
+ # Now get the Manifests of the latest versions and the
+ # versions we are supposed to provide a delta for, as a starting point.
+ # In addition, we also need Manifests that provide files reused by
+ # these initial set of Manifests or get referenced by them.
+ #
+ # There's no integrity checking for the files. bsdtar is
+ # expected to detect corrupted archives and https is expected
+ # to protect against man-in-the-middle attacks.
+ pending_versions = set(latest_versions.values())
+ pending_versions.update([int(x) for x in d.getVar('SWUPD_DELTAPACK_VERSIONS', True).split()])
+ fetched_versions = set([0])
+ while pending_versions:
+ version = pending_versions.pop()
+ sub_versions = set()
+ sub_versions.update(download_manifests(content_url, version,
+ 'MoM',
+ os.path.join(www_dir, str(version))))
+ sub_versions.update(download_manifests(content_url, version,
+ 'full',
+ os.path.join(www_dir, str(version))))
+ fetched_versions.add(version)
+ pending_versions.update(sub_versions.difference(fetched_versions))
+
+ latest_version_file = os.path.join(deploy_dir, 'image', 'latest.version')
+ if not os.path.exists(latest_version_file):
+ # We located information about latest version from online www update repo.
+ # Now use that to determine what we are updating from. Doing this here
+ # instead of swupd-image.bbclass has the advantage that we can do some
+ # sanity checking very early in a build.
+ #
+ # Building a proper update makes swupd_create_fullfiles
+ # a lot faster because it allows reusing existing, unmodified files.
+ # Saves a lot of space, too, because the new Manifest files then merely
+ # point to the older version (no entry in ${DEPLOY_DIR_SWUPD}/www/${OS_VERSION}/files,
+ # not even a link).
+ if not latest_versions:
+ bb.fatal("%s does not exist and no information was found under SWUPD_CONTENT_URL %s, cannot proceed without information about the previous build. When building the initial version, unset SWUPD_VERSION_URL and SWUPD_CONTENT_URL to proceed." % (latest_version_file, content_url))
+ latest = sorted(latest_versions.values())[-1]
+ bb.debug(2, "Setting %d in latest.version file" % latest)
+ with open(latest_version_file, 'w') as f:
+ f.write(str(latest))