aboutsummaryrefslogtreecommitdiffstats
path: root/lib/swupd/bundles.py
blob: 78ffaa5d3cb548d05b5ace5b0b5d33ac00c9d333 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
import glob
import re
import subprocess
import shutil
import urllib.request
import urllib.error
from bb.utils import export_proxies
from oe.package_manager import RpmPM
from oe.package_manager import OpkgPM
from oe.package_manager import DpkgPM
from oe.utils import format_pkg_list
from oe.rootfs import image_list_installed_packages
import oe.path
import swupd.path
import swupd.utils


def create_bundle_manifest(d, bundlename, dest=None):
    """
    create a bundle subscription receipt

    swupd-client expects a bundle subscription to exist for each
    installed bundle. This is simply an empty file named for the
    bundle in /usr/share/clear/bundles

    d -- the bitbake datastore
    bundlename -- the name of the bundle [and the receipt file name]
    dest -- the effective root location in which to create the receipt
        (default IMAGE_ROOTFS)
    """
    tgtpath = '/usr/share/clear/bundles'
    if dest:
        bundledir = dest + tgtpath
    else:
        bundledir = d.expand('${IMAGE_ROOTFS}%s' % tgtpath)
    bb.utils.mkdirhier(bundledir)
    open(os.path.join(bundledir, bundlename), 'w+b').close()


def get_bundle_packages(d, bundle):
    """
    Return a list of packages included in a bundle

    d -- the bitbake datastore
    bundle -- the name of the bundle for which we return a package list
    """
    pkgs = (d.getVarFlag('BUNDLE_CONTENTS', bundle, True) or '').split()
    return pkgs


def copy_core_contents(d):
    """
    Determine the os-core contents and copy the mega image to swupd's image directory.

    d -- the bitbake datastore
    """
    imagedir = d.expand('${SWUPDIMAGEDIR}/${OS_VERSION}')
    corefile = d.expand('${SWUPDIMAGEDIR}/${OS_VERSION}/os-core')
    contentsuffix = d.getVar('SWUPD_ROOTFS_MANIFEST_SUFFIX', True)
    imagesuffix = d.getVar('SWUPD_IMAGE_MANIFEST_SUFFIX', True)
    fullfile = d.expand('${SWUPDIMAGEDIR}/${OS_VERSION}/full')
    bundle = d.expand('${SWUPDIMAGEDIR}/${OS_VERSION}/full.tar')
    rootfs = d.getVar('IMAGE_ROOTFS', True)

    # Generate a manifest of the bundle content.
    bb.utils.mkdirhier(imagedir)
    unwanted_files = (d.getVar('SWUPD_FILE_BLACKLIST', True) or '').split()
    swupd.utils.create_content_manifests(rootfs,
                                         corefile + contentsuffix,
                                         corefile + imagesuffix,
                                         unwanted_files)

    havebundles = (d.getVar('SWUPD_BUNDLES', True) or '') != ''
    imgrootfs = d.getVar('MEGA_IMAGE_ROOTFS', True)
    if not havebundles:
        imgrootfs = rootfs
        for suffix in (contentsuffix, imagesuffix):
            shutil.copy2(corefile + suffix, fullfile + suffix)
    else:
        swupd.utils.create_content_manifests(imgrootfs,
                                             fullfile + contentsuffix,
                                             fullfile + imagesuffix,
                                             unwanted_files)
        manifest_files = swupd.utils.manifest_to_file_list(fullfile + contentsuffix) + \
                         swupd.utils.manifest_to_file_list(fullfile + imagesuffix)

    bb.debug(1, "Copying from image (%s) to full bundle (%s)" % (imgrootfs, bundle))
    # Create full.tar.gz instead of directory - speeds up
    # do_stage_swupd_input from ~11min in the Ostro CI to 6min.
    swupd.path.copyxattrfiles(d, manifest_files, imgrootfs, bundle, True)


def stage_image_bundle_contents(d, bundle):
    """
    Determine bundle contents which aren't part of os-core from the mega-image rootfs

    For an image-based bundle, generate a list of files which exist in the
    bundle but not os-core and stage those files from the mega image rootfs to
    the swupd inputs directory

    d -- the bitbake datastore
    bundle -- the name of the bundle to be staged
    """

    # Construct paths to manifest files and directories
    pn = d.getVar('PN', True)
    corefile = d.expand('${SWUPDIMAGEDIR}/${OS_VERSION}/os-core')
    bundlefile = d.expand('${SWUPDIMAGEDIR}/${OS_VERSION}/') + bundle
    contentsuffix = d.getVar('SWUPD_ROOTFS_MANIFEST_SUFFIX', True)
    imagesuffix = d.getVar('SWUPD_IMAGE_MANIFEST_SUFFIX', True)
    megarootfs = d.getVar('MEGA_IMAGE_ROOTFS', True)
    imagesrc = megarootfs.replace('mega', bundle)

    # Generate the manifest of the bundle image's file contents,
    # excluding blacklisted files and the content of the os-core.
    bb.debug(3, 'Writing bundle image file manifests %s' % bundlefile)
    unwanted_files = set((d.getVar('SWUPD_FILE_BLACKLIST', True) or '').split())
    unwanted_files.update(['/' + x for x in swupd.utils.manifest_to_file_list(corefile + contentsuffix)])
    swupd.utils.create_content_manifests(imagesrc,
                                         bundlefile + contentsuffix,
                                         bundlefile + imagesuffix,
                                         unwanted_files)

def stage_empty_bundle(d, bundle):
    """
    stage an empty bundle

    d -- the bitbake datastore
    bundle -- the name of the bundle to be staged
    """
    bundledir = d.expand('${SWUPDIMAGEDIR}/${OS_VERSION}/%s' % bundle)
    bb.utils.mkdirhier(bundledir)
    create_bundle_manifest(d, bundle, bundledir)


def copy_bundle_contents(d):
    """
    Stage bundle contents

    Copy the contents of all bundles from the mega image rootfs to the swupd
    inputs directory to ensure that any image postprocessing which modifies
    files is reflected in os-core bundle

    d -- the bitbake datastore
    """
    bb.debug(1, 'Copying contents of bundles for %s from mega image rootfs' % d.getVar('PN', True))
    bundles = (d.getVar('SWUPD_BUNDLES', True) or '').split()
    for bndl in bundles:
        stage_image_bundle_contents(d, bndl)
    bundles = (d.getVar('SWUPD_EMPTY_BUNDLES', True) or '').split()
    for bndl in bundles:
        stage_empty_bundle(d, bndl)

def copy_old_versions(d):
    for prevver in d.getVar('SWUPD_DELTAPACK_VERSIONS', True).split():
        if not os.path.exists(os.path.join(d.expand('${DEPLOY_DIR_SWUPD}/image'), prevver)):
            pattern = d.expand('${DEPLOY_DIR_IMAGE}/${IMAGE_BASENAME}*-%s-swupd.tar' % prevver)
            prevver_tar = glob.glob(pattern)
            if not prevver_tar or len(prevver_tar) > 1 or not os.path.exists(prevver_tar[0]):
                bb.fatal("Creating swupd delta packs against %s is not possible because %s is not available." %
                         (prevver, pattern))
            cmd = ['tar', '-C', d.getVar('DEPLOY_DIR_SWUPD', True), '-xf', prevver_tar[0]]
            output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
            if output:
                bb.fatal('Unexpected output from the following command:\n%s\n%s' % (cmd, output))

def download_manifests(content_url, version, component, to_dir):
    """
    Download one manifest file and recursively all manifests referenced by it.
    Does not overwrite existing files. Unpacks on-the-fly using bsdtar
    and thus is independent of the compression format, as long as bsdtar
    recognizes it.
    """
    source = '%s/%d/Manifest.%s.tar' % (content_url, version, component)
    target = os.path.join(to_dir, 'Manifest.%s' % component)
    base_versions = set()
    if not os.path.exists(target):
        bb.debug(1, 'Downloading %s -> %s' % (source, target))
        response = urllib.request.urlopen(source)
        archive = response.read()
        bb.utils.mkdirhier(to_dir)
        with open(target + '.tar', 'wb') as tarfile:
            tarfile.write(archive)
        bsdtar = subprocess.Popen(['bsdtar', '-xf', '-', '-C', to_dir],
                                  stdin=subprocess.PIPE,
                                  stdout=subprocess.PIPE,
                                  stderr=subprocess.STDOUT)
        output, _ = bsdtar.communicate(archive)
        if output or bsdtar.returncode:
            bb.fatal('Unpacking %s with bsdtar failed:\n%s' % (source, output.decode('utf-8')))
    with open(target) as f:
        # Matches the header. We might be parsing Manifest.os-core from build
        # 1000, but the actual Manifest could be from "version: 900", so get
        # that as base version, too.
        version_re = re.compile(r'^(?:previous|version):\s+(\d+)\n$')
        # Matches the individual entries.
        manifest_re = re.compile(r'^M.*\s(\d+)\s+(\S+)\n$')
        for line in f.readlines():
            m = manifest_re.match(line)
            if m:
                subversion = int(m.group(1))
                submanifest = m.group(2)
                download_manifests(content_url, subversion, submanifest, to_dir)
                base_versions.add(subversion)
            else:
                m = version_re.match(line)
                if m:
                    base_versions.add(int(m.group(1)))
    return base_versions

def download_old_versions(d):
    """
    Download the necessary information from the update repo that is needed
    to build updates in that update stream. This can run in parallel to
    a normal build and thus is not on the critical path.
    """

    content_url = d.getVar('SWUPD_CONTENT_URL', True)
    version_url = d.getVar('SWUPD_VERSION_URL', True)
    current_format = int(d.getVar('SWUPD_FORMAT', True))
    deploy_dir = d.getVar('DEPLOY_DIR_SWUPD', True)
    www_dir = os.path.join(deploy_dir, 'www')

    if not content_url or not version_url:
        bb.warn('SWUPD_CONTENT_URL and/or SWUPD_VERSION_URL not set, skipping download of old versions for the initial build of a swupd update stream.')
        return

    # Avoid double // in path. At least twisted is sensitive to that.
    content_url = content_url.rstrip('/')

    # Set up env variables with proxy information for use in urllib.
    export_proxies(d)

    # Find latest version for each of the older formats.
    # For now we ignore the released milestones and go
    # directly to the URL with all builds. The information
    # about milestones may be relevant for determining
    # how format changes need to be handled.
    latest_versions = {}
    for format in range(3, current_format + 1):
        try:
            url = '%s/version/format%d/latest' % (content_url, format)
            response = urllib.request.urlopen(url)
            version = int(response.read())
            latest_versions[format] = version
            formatdir = os.path.join(www_dir, 'version', 'format%d' % format)
            bb.utils.mkdirhier(formatdir)
            with open(os.path.join(formatdir, 'latest'), 'w') as latest:
                latest.write(str(version))
        except urllib.error.HTTPError as http_error:
            if http_error.code == 404:
                bb.debug(1, '%s does not exist, skipping that format' % url)
            else:
                raise

    # Now get the Manifests of the latest versions and the
    # versions we are supposed to provide a delta for, as a starting point.
    # In addition, we also need Manifests that provide files reused by
    # these initial set of Manifests or get referenced by them.
    #
    # There's no integrity checking for the files. bsdtar is
    # expected to detect corrupted archives and https is expected
    # to protect against man-in-the-middle attacks.
    pending_versions = set(latest_versions.values())
    pending_versions.update([int(x) for x in d.getVar('SWUPD_DELTAPACK_VERSIONS', True).split()])
    fetched_versions = set([0])
    while pending_versions:
        version = pending_versions.pop()
        sub_versions = set()
        sub_versions.update(download_manifests(content_url, version,
                                               'MoM',
                                               os.path.join(www_dir, str(version))))
        sub_versions.update(download_manifests(content_url, version,
                                               'full',
                                               os.path.join(www_dir, str(version))))
        fetched_versions.add(version)
        pending_versions.update(sub_versions.difference(fetched_versions))

    latest_version_file = os.path.join(deploy_dir, 'image', 'latest.version')
    if not os.path.exists(latest_version_file):
        # We located information about latest version from online www update repo.
        # Now use that to determine what we are updating from. Doing this here
        # instead of swupd-image.bbclass has the advantage that we can do some
        # sanity checking very early in a build.
        #
        # Building a proper update makes swupd_create_fullfiles
        # a lot faster because it allows reusing existing, unmodified files.
        # Saves a lot of space, too, because the new Manifest files then merely
        # point to the older version (no entry in ${DEPLOY_DIR_SWUPD}/www/${OS_VERSION}/files,
        # not even a link).
        if not latest_versions:
            bb.fatal("%s does not exist and no information was found under SWUPD_CONTENT_URL %s, cannot proceed without information about the previous build. When building the initial version, unset SWUPD_VERSION_URL and SWUPD_CONTENT_URL to proceed." % (latest_version_file, content_url))
        latest = sorted(latest_versions.values())[-1]
        bb.debug(2, "Setting %d in latest.version file" % latest)
        with open(latest_version_file, 'w') as f:
            f.write(str(latest))