aboutsummaryrefslogtreecommitdiffstats
path: root/lib/python2.7/site-packages/autobuilder/lib/wiki.py
blob: 7bdd4e3e9f45536bc9553bc5e3227f8b77b9fd16 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
'''
Created on Dec 13, 2016

__author__ = "Joshua Lock"
__copyright__ = "Copyright 2016, Intel Corp."
__credits__ = ["Joshua Lock"]
__license__ = "GPL"
__version__ = "2.0"
__maintainer__ = "Joshua Lock"
__email__ = "joshua.g.lock@intel.com"
'''

import codecs
import hashlib
import time
import requests
from twisted.python import log


class YPWiki(object):
    MAX_TRIES = 5
    TIMEOUT = 60

    def __init__(self, wiki_uri, wiki_un, wiki_pass):
        self.wiki_uri = wiki_uri
        self.wiki_un = wiki_un
        self.wiki_pass = wiki_pass

    @staticmethod
    def retry_request(requesturl, **kwargs):
        """
        Rather than failing when a request to a 'requesturl' throws an
        exception retry again a minute later. Perform this retry no more than
        5 times.

        @type   requesturl:  string
        """
        kwargs['timeout'] = YPWiki.TIMEOUT

        def try_request():
            try:
                req = requests.get(requesturl, **kwargs)
                return req
            except (requests.exceptions.RequestException,
                    requests.exceptions.Timeout):
                return None

        tries = 0
        req = None
        while not req and tries < YPWiki.MAX_TRIES:
            if tries > 0:
                time.sleep(60)
            req = try_request()
            tries = tries + 1

        return req

    @staticmethod
    def parse_json(response):
        """
        This method handles stripping UTF-8 BOM from the beginning of responses
        from the Yocto Project wiki.

        http://en.wikipedia.org/wiki/Byte_Order_Mark
        http://bugs.python.org/issue18958

        @type   response:   requests.Response
        """
        bom = codecs.BOM_UTF8
        text = ''

        # In Requests 0.8.2 (Ubuntu 12.04) Response.content has type unicode,
        # whereas in requests 2.1.10 (Fedora 23) Response.content is a str
        # Ensure that bom is the same type as the content, codecs.BOM_UTF8 is
        # a str
        if type(response.content) == unicode:
            bom = unicode(codecs.BOM_UTF8, 'utf8')

        # If we discover a BOM set the encoding appropriately so that the
        # built in decoding routines in requests work correctly.
        if response.content.startswith(bom):
            response.encoding = 'utf-8-sig'

        return response.json()

    def login(self):
        """
        Login to the wiki and return cookies for the logged in session
        """
        payload = {
            'action': 'login',
            'lgname': self.wiki_un,
            'lgpassword': self.wiki_pass,
            'utf8': '',
            'format': 'json'
        }

        try:
            req1 = requests.post(self.wiki_uri, data=payload,
                                 timeout=self.TIMEOUT)
        except (requests.exceptions.RequestException,
                requests.exceptions.Timeout):
            return None

        parsed = self.parse_json(req1)
        login_token = parsed['login']['token'].encode('utf-8')

        payload['lgtoken'] = login_token
        try:
            req2 = requests.post(self.wiki_uri, data=payload,
                                 cookies=req1.cookies, timeout=self.TIMEOUT)
        except (requests.exceptions.RequestException,
                requests.exceptions.Timeout):
            return None

        return req2.cookies.copy()

    def get_content(self, wiki_page):
        """
        Get the current content of the 'wiki_page' -- to make the wiki page
        as useful as possible the most recent log entry should be at the top,
        to that end we need to edit the whole page so that we can insert the
        new entry after the log but before the other entries.

        This method fetches the current page content, splits out the blurb and
        returns a pair:
        1) the blurb
        2) the current entries

        @type   wiki_page:  string
        """

        pm = '?format=json&action=query&prop=revisions&rvprop=content&titles='

        req = self.retry_request(self.wiki_uri+pm+wiki_page)
        if not req:
            return None, None

        parsed = self.parse_json(req)
        pageid = sorted(parsed['query']['pages'].keys())[-1].encode('utf-8')
        content = parsed['query']['pages'][pageid]['revisions'][0]['*']
        content = content.encode('utf-8')
        blurb, entries = content.split('==', 1)
        # ensure we keep only a single newline after the blurb
        blurb = blurb.strip() + "\n"
        entries = '=='+entries

        return blurb, entries

    def post_entry(self, wiki_page, content, summary, cookies):
        """
        Post the new page contents 'content' to  the page title 'wiki_page'
        with a 'summary' using the login credentials from 'cookies'

        @type   wiki_page:  string
        @type   content:    string
        @type   summary:    string
        @type   cookies:    CookieJar
        """

        params = ("?format=json&action=query&prop=info|revisions"
                  "&intoken=edit&rvprop=timestamp&titles=")
        req = self.retry_request(self.wiki_uri+params+wiki_page,
                                 cookies=cookies)
        if not req:
            return False

        parsed = self.parse_json(req)
        pageid = sorted(parsed['query']['pages'].keys())[-1].encode('utf-8')
        edit_token = parsed['query']['pages'][pageid]['edittoken']
        edit_token = edit_token.encode('utf-8')

        edit_cookie = cookies.copy()
        edit_cookie.update(req.cookies)

        content_hash = hashlib.md5(content).hexdigest()

        payload = {
            'action': 'edit',
            'assert': 'user',
            'title': wiki_page,
            'summary': summary,
            'text': content,
            'md5': content_hash,
            'token': edit_token,
            'utf8': '',
            'format': 'json'
        }

        try:
            req = requests.post(self.wiki_uri, data=payload,
                                cookies=edit_cookie, timeout=self.TIMEOUT)
        except (requests.exceptions.RequestException,
                requests.exceptions.Timeout):
            return False

        if not req.status_code == requests.codes.ok:
            log.err("Unexpected status code %s received when trying to post"
                    " an entry to the wiki." % req.status_code)
            return False
        else:
            result = self.parse_json(req)
            status = result.get('edit', {}).get('result', '').encode('utf-8')
            if status == 'Success':
                return True
            return False