aboutsummaryrefslogtreecommitdiffstats
path: root/scripts/layer-parser.py
blob: 246158d67264d63517136ce2b3b1b2e4325d24a6 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
#!/usr/bin/env python3

import os
import time
import requests
import json

LAYER_API_URL = "https://layers.openembedded.org/layerindex/api/layers/?filter=yp_compatible_version__isnull:false&format=json"
LAYERDATA = "layerdata.json"
RELEASE_URL = "https://docs.yoctoproject.org/releases.json"
RELEASEDATA = "releasedata.json"


def fetch_data(url, file_path, cache):
    if os.path.isfile(file_path):
        modification_time = os.path.getmtime(file_path)
        time_difference = time.time() - modification_time
    else:
        time_difference = 1000000

    # Re-download if the time difference is greater than cache value
    if time_difference > cache:
        response = requests.get(url)
        if response.status_code == 200:
            with open(file_path, "w") as file:
                file.write(response.text)
            return json.loads(response.text)
    else:
        with open(file_path, "r") as file:
            return json.load(file)


layers = fetch_data(LAYER_API_URL, LAYERDATA, 600)
releases = fetch_data(RELEASE_URL, RELEASEDATA, 600)

# grab the recent release branches and add master, so we can ignore old branches
active_releases = [
    e["release_codename"].lower() for e in releases if e["series"] != "full"
]
active_releases.append("master")
active_releases.append("main")

header = dict()
header["layer"] = "Layer"
header["branches"] = "Branches"
header["desc"] = "Description"
header["maintainers"] = ["Maintainer(s)"]
header["url"] = "Source Code"
parsed_layers = {}

for layer in layers:
    name = layer["layer"]["name"]
    if layer["branch"]["name"] not in active_releases:
        continue
    if name in parsed_layers:
        parsed_layers[name]["branches"] += ", " + layer["branch"]["name"]
        output["maintainers"].union(set([e["name"] for e in layer["maintainers"]]))
    else:
        output = dict()
        output["layer"] = name
        output["branches"] = layer["branch"]["name"]
        if len(layer["layer"]["description"]) > 80:
            output["desc"] = layer["layer"]["summary"]
        else:
            output["desc"] = layer["layer"]["description"]
        for maintainer in layer["maintainers"]:
            if "name" not in maintainer:
                maintainer["name"] = maintainer["email"]
            if "email" not in maintainer:
                maintainer["email"] = "NA"
        output["maintainers"] = set([e["name"] for e in layer["maintainers"]])
        output["url"] = '<a href="{u}">{u}</a>'.format(u=layer["layer"]["vcs_web_url"])
        parsed_layers[name] = output

# Convert the lists of maintainers to a pretty string
for layer in parsed_layers:
    maintainers = list(parsed_layers[layer]["maintainers"])
    if len(maintainers) == 1:
        parsed_layers[layer]["maintainers"] = maintainers.pop()
    elif len(maintainers) == 0:
        print("No maintainers for ", layer)
        parsed_layers[layer]["maintainers"] = "Awaiting Maintainers"
    else:
        print(maintainers)
        parsed_layers[layer]["maintainers"] = "{} and {}".format(
            ", ".join(maintainers[:-1]), maintainers[-1]
        )
sorted_parsed_layers = {"header": header} 
for layername in sorted(parsed_layers.keys()):
    sorted_parsed_layers[layername] = parsed_layers[layername]
with open("parsed-layers.json", "w") as file:
    json.dump(sorted_parsed_layers, file)