aboutsummaryrefslogtreecommitdiffstats
path: root/meta-mentor-common/classes/archive-release-downloads.bbclass
blob: 37d9c373b0f43395dec8a1ca553a58c1c62fdfa3 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
# ---------------------------------------------------------------------------------------------------------------------
# SPDX-License-Identifier: MIT
# ---------------------------------------------------------------------------------------------------------------------

# Deploy the downloads necessary for doing offline builds with the layers
# distributed by archive-release, taking into account redistribution rights
# for those sources by using license filtering.
#
# Originally copied from copyleft_compliance. This is kept separate due to the
# slightly different purposes, and to let us alter how it operates without
# affecting potential user use of copyleft_compliance.
#
# vi:sts=4:sw=4:et

ARCHIVE_RELEASE_DL_DIR ?= '${DEPLOY_DIR}/release-downloads'
ARCHIVE_RELEASE_EXCLUDED_DL_DIR ?= '${DEPLOY_DIR}/release-downloads-excluded'

DL_LICENSE_INCLUDE ?= "${@' '.join(sorted(set(d.getVarFlags('SPDXLICENSEMAP').values())))}"
DL_LICENSE_INCLUDE[type] = 'list'
DL_LICENSE_INCLUDE[doc] = 'Space separated list of included licenses (supports wildcards)'

DL_LICENSE_EXCLUDE ?= 'CLOSED Proprietary* Mentor Freescale EULA INTEL NetLogic'
DL_LICENSE_EXCLUDE[type] = 'list'
DL_LICENSE_EXCLUDE[doc] = 'Space separated list of excluded licenses (supports wildcards)'

python do_archive_release_downloads () {
    """Populate a tree of the recipe sources and emit patch series files"""
    import os.path
    import shutil
    import oe.license

    include = oe.data.typed_value('DL_LICENSE_INCLUDE', d)
    exclude = oe.data.typed_value('DL_LICENSE_EXCLUDE', d)

    try:
        included, reason = oe.license.is_included(d.getVar('LICENSE', True), include, exclude)
    except oe.license.LicenseError as exc:
        bb.fatal('%s: %s' % (d.getVar('PF', True), exc))

    p = d.getVar('P', True)
    if not included:
        bb.debug(1, 'archive-release-downloads: %s is excluded: %s' % (p, reason))
        sources_dir = d.getVar('ARCHIVE_RELEASE_EXCLUDED_DL_DIR', True)
    else:
        bb.debug(1, 'archive-release-downloads: %s is included: %s' % (p, reason))
        sources_dir = d.getVar('ARCHIVE_RELEASE_DL_DIR', True)

    dl_dir = d.getVar('DL_DIR', True)
    src_uri = d.getVar('SRC_URI', True).split()
    fetch = bb.fetch2.Fetch(src_uri, d)
    ud = fetch.ud

    bb.utils.mkdirhier(sources_dir)

    for u in ud.values():
        archive_download(u, dl_dir, sources_dir, d)
}

def archive_download(u, dl_dir, sources_dir, d):
    if hasattr(u.method, 'process_submodules'):
        def archive_submodule(ud, url, module, modpath, workdir, d):
            url += ";bareclone=1;nobranch=1"
            newfetch = bb.fetch2.Fetch([url], d)
            for subud in newfetch.ud.values():
                return archive_download(subud, dl_dir, sources_dir, d)

        # If we're using a shallow mirror tarball it needs to be unpacked
        # temporarily so that we can examine the .gitmodules file
        if u.shallow and os.path.exists(u.fullshallow) and u.method.need_update(u, d):
            import tempfile
            with tempfile.TemporaryDirectory(dir=sources_dir) as tmpdir:
                bb.fetch2.runfetchcmd("tar -xzf %s" % u.fullshallow, d, workdir=tmpdir)
                u.method.process_submodules(u, tmpdir, archive_submodule, d)
        else:
            u.method.process_submodules(u, u.clonedir, archive_submodule, d)

    u.setup_localpath(d)
    local = u.localpath
    if local.endswith('.bb'):
        return
    elif not local.startswith(dl_dir + '/'):
        # For our purposes, we only want downloads, not what's in the layers
        return
    elif local.endswith('/'):
        local = local[:-1]

    if hasattr(u, 'mirrortarballs'):
        tarballs = u.mirrortarballs
    elif u.mirrortarball:
        tarballs = [u.mirrortarball]
    else:
        tarballs = None

    if tarballs:
        for tarball in tarballs:
            if tarball:
                tarball_path = os.path.join(dl_dir, tarball)
                if os.path.exists(tarball_path):
                    local = tarball_path
                    break
        else:
            bb.warn('No mirror tarball found for %s, using %s' % (u.url, local))

    oe.path.symlink(local, os.path.join(sources_dir, os.path.basename(local)), force=True)
    donestamp = local + '.done'
    if os.path.exists(donestamp):
        oe.path.symlink(donestamp, os.path.join(sources_dir, os.path.basename(donestamp)), force=True)

do_archive_release_downloads[dirs] = "${WORKDIR}"
addtask archive_release_downloads after do_fetch

python do_archive_release_downloads_all() {
    pass
}
do_archive_release_downloads_all[recrdeptask] = "do_archive_release_downloads_all do_archive_release_downloads"
do_archive_release_downloads_all[recideptask] = "do_${BB_DEFAULT_TASK}"
do_archive_release_downloads_all[nostamp] = "1"
addtask archive_release_downloads_all after do_archive_release_downloads