diff options
Diffstat (limited to 'doc/sphinx/_exts/httpdomain/httpdomain.py')
-rw-r--r-- | doc/sphinx/_exts/httpdomain/httpdomain.py | 772 |
1 files changed, 772 insertions, 0 deletions
diff --git a/doc/sphinx/_exts/httpdomain/httpdomain.py b/doc/sphinx/_exts/httpdomain/httpdomain.py new file mode 100644 index 0000000..59665a0 --- /dev/null +++ b/doc/sphinx/_exts/httpdomain/httpdomain.py | |||
@@ -0,0 +1,772 @@ | |||
1 | """ | ||
2 | sphinxcontrib.httpdomain | ||
3 | ~~~~~~~~~~~~~~~~~~~~~~~~ | ||
4 | |||
5 | The HTTP domain for documenting RESTful HTTP APIs. | ||
6 | |||
7 | :copyright: Copyright 2011 by Hong Minhee | ||
8 | :license: BSD, see LICENSE for details. | ||
9 | |||
10 | """ | ||
11 | |||
12 | import re | ||
13 | |||
14 | from docutils import nodes | ||
15 | |||
16 | from pygments.lexer import RegexLexer, bygroups | ||
17 | from pygments.lexers import get_lexer_by_name | ||
18 | from pygments.token import Literal, Text, Operator, Keyword, Name, Number | ||
19 | from pygments.util import ClassNotFound | ||
20 | |||
21 | from sphinx import addnodes | ||
22 | from sphinx.roles import XRefRole | ||
23 | from sphinx.domains import Domain, ObjType, Index | ||
24 | from sphinx.directives import ObjectDescription, directives | ||
25 | from sphinx.util.nodes import make_refnode | ||
26 | from sphinx.util.docfields import GroupedField, TypedField | ||
27 | |||
28 | # The env.get_doctree() lookup results in a pickle.load() call which is | ||
29 | # expensive enough to dominate the runtime entirely when the number of endpoints | ||
30 | # and references is large enough. The doctrees are generated during the read- | ||
31 | # phase and we can cache their lookup during the write-phase significantly | ||
32 | # improving performance. | ||
33 | # Currently sphinxcontrib-httpdomain does not declare to support parallel read | ||
34 | # support (parallel_read_safe is the default False) so we can simply use a | ||
35 | # module global to hold the cache. | ||
36 | _doctree_cache = {} | ||
37 | |||
38 | |||
39 | class DocRef(object): | ||
40 | """Represents a reference to an abstract specification.""" | ||
41 | |||
42 | def __init__(self, base_url, anchor, section): | ||
43 | self.base_url = base_url | ||
44 | self.anchor = anchor | ||
45 | self.section = section | ||
46 | |||
47 | def __repr__(self): | ||
48 | """Returns the URL onto related specification section for the related | ||
49 | object.""" | ||
50 | return '{0}#{1}{2}'.format(self.base_url, self.anchor, self.section) | ||
51 | |||
52 | |||
53 | class RFC2616Ref(DocRef): | ||
54 | """Represents a reference to RFC2616. | ||
55 | In 2014, RFC2616 was replaced by multiple RFCs (7230-7237).""" | ||
56 | |||
57 | def __init__(self, section): | ||
58 | url = 'http://www.w3.org/Protocols/rfc2616/rfc2616-sec{0:d}.html' | ||
59 | url = url.format(int(section)) | ||
60 | super(RFC2616Ref, self).__init__(url, 'sec', section) | ||
61 | |||
62 | |||
63 | class IETFRef(DocRef): | ||
64 | """Represents a reference to the specific IETF RFC.""" | ||
65 | |||
66 | def __init__(self, rfc, section): | ||
67 | url = 'https://tools.ietf.org/html/rfc{0:d}'.format(rfc) | ||
68 | super(IETFRef, self).__init__(url, 'section-', section) | ||
69 | |||
70 | |||
71 | class EventSourceRef(DocRef): | ||
72 | |||
73 | def __init__(self, section): | ||
74 | url = 'http://www.w3.org/TR/eventsource/' | ||
75 | super(EventSourceRef, self).__init__(url, section, '') | ||
76 | |||
77 | |||
78 | class CORSRef(DocRef): | ||
79 | """Represents a reference to W3 Cross-Origin Resource Sharing recommendation.""" | ||
80 | |||
81 | def __init__(self, name, type): | ||
82 | url = 'http://www.w3.org/TR/cors/' | ||
83 | super(CORSRef, self).__init__(url, name, '-' + type) | ||
84 | |||
85 | |||
86 | #: Mapping from lowercase HTTP method name to :class:`DocRef` object which | ||
87 | #: maintains the URL which points to the section of the RFC which defines that | ||
88 | #: HTTP method. | ||
89 | METHOD_REFS = { | ||
90 | 'patch': IETFRef(5789, 2), | ||
91 | 'options': IETFRef(7231, '4.3.7'), | ||
92 | 'get': IETFRef(7231, '4.3.1'), | ||
93 | 'head': IETFRef(7231, '4.3.2'), | ||
94 | 'post': IETFRef(7231, '4.3.3'), | ||
95 | 'put': IETFRef(7231, '4.3.4'), | ||
96 | 'delete': IETFRef(7231, '4.3.5'), | ||
97 | 'trace': IETFRef(7231, '4.3.8'), | ||
98 | 'connect': IETFRef(7231, '4.3.6'), | ||
99 | 'copy': IETFRef(2518, 8.8), | ||
100 | 'any': '' | ||
101 | } | ||
102 | |||
103 | |||
104 | #: Mapping from HTTP header name to :class:`DocRef` object which | ||
105 | #: maintains the URL which points to the related section of the RFC. | ||
106 | HEADER_REFS = { | ||
107 | 'Accept': IETFRef(7231, '5.3.2'), | ||
108 | 'Accept-Charset': IETFRef(7231, '5.3.3'), | ||
109 | 'Accept-Encoding': IETFRef(7231, '5.3.4'), | ||
110 | 'Accept-Language': IETFRef(7231, '5.3.5'), | ||
111 | 'Accept-Ranges': IETFRef(7233, 2.3), | ||
112 | 'Age': IETFRef(7234, 5.1), | ||
113 | 'Allow': IETFRef(7231, '7.4.1'), | ||
114 | 'Authorization': IETFRef(7235, 4.2), | ||
115 | 'Cache-Control': IETFRef(7234, 5.2), | ||
116 | 'Connection': IETFRef(7230, 6.1), | ||
117 | 'Content-Encoding': IETFRef(7231, '3.1.2.2'), | ||
118 | 'Content-Language': IETFRef(7231, '3.1.3.2'), | ||
119 | 'Content-Length': IETFRef(7230, '3.3.2'), | ||
120 | 'Content-Location': IETFRef(7231, '3.1.4.2'), | ||
121 | 'Content-MD5': RFC2616Ref(14.15), # removed | ||
122 | 'Content-Range': IETFRef(7233, 4.2), | ||
123 | 'Content-Type': IETFRef(7231, '3.1.1.5'), | ||
124 | 'Cookie': IETFRef(2109, '4.3.4'), # also RFC6265 section 5.4 | ||
125 | 'Date': IETFRef(7231, '7.1.1.2'), | ||
126 | 'Destination': IETFRef(2518, 9.3), | ||
127 | 'ETag': IETFRef(7232, 2.3), | ||
128 | 'Expect': IETFRef(7231, '5.1.1'), | ||
129 | 'Expires': IETFRef(7234, 5.3), | ||
130 | 'From': IETFRef(7231, '5.5.2'), | ||
131 | 'Host': IETFRef(7230, 5.4), | ||
132 | 'If-Match': IETFRef(7232, 3.1), | ||
133 | 'If-Modified-Since': IETFRef(7232, 3.3), | ||
134 | 'If-None-Match': IETFRef(7232, 3.2), | ||
135 | 'If-Range': IETFRef(7233, 3.2), | ||
136 | 'If-Unmodified-Since': IETFRef(7232, 3.4), | ||
137 | 'Last-Event-ID': EventSourceRef('last-event-id'), | ||
138 | 'Last-Modified': IETFRef(7232, 2.2), | ||
139 | 'Link': IETFRef(5988, '5'), | ||
140 | 'Location': IETFRef(7231, '7.1.2'), | ||
141 | 'Max-Forwards': IETFRef(7231, '5.1.2'), | ||
142 | 'Pragma': IETFRef(7234, 5.4), | ||
143 | 'Proxy-Authenticate': IETFRef(7235, 4.3), | ||
144 | 'Proxy-Authorization': IETFRef(7235, 4.4), | ||
145 | 'Range': IETFRef(7233, 3.1), | ||
146 | 'Referer': IETFRef(7231, '5.5.2'), | ||
147 | 'Retry-After': IETFRef(7231, '7.1.3'), | ||
148 | 'Server': IETFRef(7231, '7.4.2'), | ||
149 | 'Set-Cookie': IETFRef(2109, '4.2.2'), | ||
150 | 'TE': IETFRef(7230, 4.3), | ||
151 | 'Trailer': IETFRef(7230, 4.4), | ||
152 | 'Transfer-Encoding': IETFRef(7230, '3.3.1'), | ||
153 | 'Upgrade': IETFRef(7230, 6.7), | ||
154 | 'User-Agent': IETFRef(7231, '5.5.3'), | ||
155 | 'Vary': IETFRef(7231, '7.1.4'), | ||
156 | 'Via': IETFRef(7230, '5.7.1'), | ||
157 | 'Warning': IETFRef(7234, 5.5), | ||
158 | 'WWW-Authenticate': IETFRef(7235, 4.1), | ||
159 | 'Access-Control-Allow-Origin': CORSRef('access-control-allow-origin', | ||
160 | 'response-header'), | ||
161 | 'Access-Control-Allow-Credentials': CORSRef('access-control-allow-credentials', | ||
162 | 'response-header'), | ||
163 | 'Access-Control-Expose-Headers': CORSRef('access-control-expose-headers', | ||
164 | 'response-header'), | ||
165 | 'Access-Control-Max-Age': CORSRef('access-control-max-age', | ||
166 | 'response-header'), | ||
167 | 'Access-Control-Allow-Methods': CORSRef('access-control-allow-methods', | ||
168 | 'response-header'), | ||
169 | 'Access-Control-Allow-Headers': CORSRef('access-control-allow-headers', | ||
170 | 'response-header'), | ||
171 | 'Origin': CORSRef('origin', 'request-header'), | ||
172 | 'Access-Control-Request-Method': CORSRef('access-control-request-method', | ||
173 | 'response-header'), | ||
174 | 'Access-Control-Request-Headers': CORSRef('access-control-request-headers', | ||
175 | 'response-header'), | ||
176 | } | ||
177 | |||
178 | |||
179 | HTTP_STATUS_CODES = { | ||
180 | 100: 'Continue', | ||
181 | 101: 'Switching Protocols', | ||
182 | 102: 'Processing', | ||
183 | 200: 'OK', | ||
184 | 201: 'Created', | ||
185 | 202: 'Accepted', | ||
186 | 203: 'Non Authoritative Information', | ||
187 | 204: 'No Content', | ||
188 | 205: 'Reset Content', | ||
189 | 206: 'Partial Content', | ||
190 | 207: 'Multi Status', | ||
191 | 226: 'IM Used', # see RFC 3229 | ||
192 | 300: 'Multiple Choices', | ||
193 | 301: 'Moved Permanently', | ||
194 | 302: 'Found', | ||
195 | 303: 'See Other', | ||
196 | 304: 'Not Modified', | ||
197 | 305: 'Use Proxy', | ||
198 | 307: 'Temporary Redirect', | ||
199 | 308: 'Permanent Redirect', | ||
200 | 400: 'Bad Request', | ||
201 | 401: 'Unauthorized', | ||
202 | 402: 'Payment Required', # unused | ||
203 | 403: 'Forbidden', | ||
204 | 404: 'Not Found', | ||
205 | 405: 'Method Not Allowed', | ||
206 | 406: 'Not Acceptable', | ||
207 | 407: 'Proxy Authentication Required', | ||
208 | 408: 'Request Timeout', | ||
209 | 409: 'Conflict', | ||
210 | 410: 'Gone', | ||
211 | 411: 'Length Required', | ||
212 | 412: 'Precondition Failed', | ||
213 | 413: 'Request Entity Too Large', | ||
214 | 414: 'Request URI Too Long', | ||
215 | 415: 'Unsupported Media Type', | ||
216 | 416: 'Requested Range Not Satisfiable', | ||
217 | 417: 'Expectation Failed', | ||
218 | 418: "I'm a teapot", # see RFC 2324 | ||
219 | 422: 'Unprocessable Entity', | ||
220 | 423: 'Locked', | ||
221 | 424: 'Failed Dependency', | ||
222 | 426: 'Upgrade Required', | ||
223 | 429: 'Too Many Requests', | ||
224 | 449: 'Retry With', # proprietary MS extension | ||
225 | 451: 'Unavailable For Legal Reasons', | ||
226 | 500: 'Internal Server Error', | ||
227 | 501: 'Not Implemented', | ||
228 | 502: 'Bad Gateway', | ||
229 | 503: 'Service Unavailable', | ||
230 | 504: 'Gateway Timeout', | ||
231 | 505: 'HTTP Version Not Supported', | ||
232 | 507: 'Insufficient Storage', | ||
233 | 510: 'Not Extended' | ||
234 | } | ||
235 | |||
236 | WEBDAV_STATUS_CODES = [207, 422, 423, 424, 507] | ||
237 | |||
238 | http_sig_param_re = re.compile(r'\((?:(?P<type>[^:)]+):)?(?P<name>[\w_]+)\)', | ||
239 | re.VERBOSE) | ||
240 | |||
241 | |||
242 | def sort_by_method(entries): | ||
243 | def cmp(item): | ||
244 | order = ['HEAD', 'GET', 'POST', 'PUT', 'DELETE', 'PATCH', | ||
245 | 'OPTIONS', 'TRACE', 'CONNECT', 'COPY', 'ANY'] | ||
246 | method = item[0].split(' ', 1)[0] | ||
247 | if method in order: | ||
248 | return order.index(method) | ||
249 | return 100 | ||
250 | return sorted(entries, key=cmp) | ||
251 | |||
252 | |||
253 | def http_resource_anchor(method, path): | ||
254 | path = re.sub(r'[{}]', '', re.sub(r'[<>:/]', '-', path)) | ||
255 | return method.lower() + '-' + path | ||
256 | |||
257 | |||
258 | class HTTPResource(ObjectDescription): | ||
259 | |||
260 | doc_field_types = [ | ||
261 | TypedField('parameter', label='Parameters', | ||
262 | names=('param', 'parameter', 'arg', 'argument'), | ||
263 | typerolename='obj', typenames=('paramtype', 'type')), | ||
264 | TypedField('jsonparameter', label='JSON Parameters', | ||
265 | names=('jsonparameter', 'jsonparam', 'json'), | ||
266 | typerolename='obj', typenames=('jsonparamtype', 'jsontype')), | ||
267 | TypedField('requestjsonobject', label='Request JSON Object', | ||
268 | names=('reqjsonobj', 'reqjson', '<jsonobj', '<json'), | ||
269 | typerolename='obj', typenames=('reqjsonobj', '<jsonobj')), | ||
270 | TypedField('requestjsonarray', label='Request JSON Array of Objects', | ||
271 | names=('reqjsonarr', '<jsonarr'), | ||
272 | typerolename='obj', | ||
273 | typenames=('reqjsonarrtype', '<jsonarrtype')), | ||
274 | TypedField('responsejsonobject', label='Response JSON Object', | ||
275 | names=('resjsonobj', 'resjson', '>jsonobj', '>json'), | ||
276 | typerolename='obj', typenames=('resjsonobj', '>jsonobj')), | ||
277 | TypedField('responsejsonarray', label='Response JSON Array of Objects', | ||
278 | names=('resjsonarr', '>jsonarr'), | ||
279 | typerolename='obj', | ||
280 | typenames=('resjsonarrtype', '>jsonarrtype')), | ||
281 | TypedField('queryparameter', label='Query Parameters', | ||
282 | names=('queryparameter', 'queryparam', 'qparam', 'query'), | ||
283 | typerolename='obj', | ||
284 | typenames=('queryparamtype', 'querytype', 'qtype')), | ||
285 | GroupedField('formparameter', label='Form Parameters', | ||
286 | names=('formparameter', 'formparam', 'fparam', 'form')), | ||
287 | GroupedField('requestheader', label='Request Headers', | ||
288 | rolename='header', | ||
289 | names=('<header', 'reqheader', 'requestheader')), | ||
290 | GroupedField('responseheader', label='Response Headers', | ||
291 | rolename='header', | ||
292 | names=('>header', 'resheader', 'responseheader')), | ||
293 | GroupedField('statuscode', label='Status Codes', | ||
294 | rolename='statuscode', | ||
295 | names=('statuscode', 'status', 'code')) | ||
296 | ] | ||
297 | |||
298 | option_spec = { | ||
299 | 'deprecated': directives.flag, | ||
300 | 'noindex': directives.flag, | ||
301 | 'synopsis': lambda x: x, | ||
302 | } | ||
303 | |||
304 | method = NotImplemented | ||
305 | |||
306 | def handle_signature(self, sig, signode): | ||
307 | method = self.method.upper() + ' ' | ||
308 | signode += addnodes.desc_name(method, method) | ||
309 | offset = 0 | ||
310 | path = None | ||
311 | for match in http_sig_param_re.finditer(sig): | ||
312 | path = sig[offset:match.start()] | ||
313 | signode += addnodes.desc_name(path, path) | ||
314 | params = addnodes.desc_parameterlist() | ||
315 | typ = match.group('type') | ||
316 | if typ: | ||
317 | typ += ': ' | ||
318 | params += addnodes.desc_annotation(typ, typ) | ||
319 | name = match.group('name') | ||
320 | params += addnodes.desc_parameter(name, name) | ||
321 | signode += params | ||
322 | offset = match.end() | ||
323 | if offset < len(sig): | ||
324 | path = sig[offset:len(sig)] | ||
325 | signode += addnodes.desc_name(path, path) | ||
326 | assert path is not None, 'no matches for sig: %s' % sig | ||
327 | fullname = self.method.upper() + ' ' + path | ||
328 | signode['method'] = self.method | ||
329 | signode['path'] = sig | ||
330 | signode['fullname'] = fullname | ||
331 | return (fullname, self.method, sig) | ||
332 | |||
333 | def needs_arglist(self): | ||
334 | return False | ||
335 | |||
336 | def add_target_and_index(self, name_cls, sig, signode): | ||
337 | signode['ids'].append(http_resource_anchor(*name_cls[1:])) | ||
338 | if 'noindex' not in self.options: | ||
339 | self.env.domaindata['http'][self.method][sig] = ( | ||
340 | self.env.docname, | ||
341 | self.options.get('synopsis', ''), | ||
342 | 'deprecated' in self.options) | ||
343 | |||
344 | def get_index_text(self, modname, name): | ||
345 | return '' | ||
346 | |||
347 | |||
348 | class HTTPOptions(HTTPResource): | ||
349 | |||
350 | method = 'options' | ||
351 | |||
352 | |||
353 | class HTTPHead(HTTPResource): | ||
354 | |||
355 | method = 'head' | ||
356 | |||
357 | |||
358 | class HTTPPatch(HTTPResource): | ||
359 | |||
360 | method = 'patch' | ||
361 | |||
362 | |||
363 | class HTTPPost(HTTPResource): | ||
364 | |||
365 | method = 'post' | ||
366 | |||
367 | |||
368 | class HTTPGet(HTTPResource): | ||
369 | |||
370 | method = 'get' | ||
371 | |||
372 | |||
373 | class HTTPPut(HTTPResource): | ||
374 | |||
375 | method = 'put' | ||
376 | |||
377 | |||
378 | class HTTPDelete(HTTPResource): | ||
379 | |||
380 | method = 'delete' | ||
381 | |||
382 | |||
383 | class HTTPTrace(HTTPResource): | ||
384 | |||
385 | method = 'trace' | ||
386 | |||
387 | |||
388 | class HTTPConnect(HTTPResource): | ||
389 | |||
390 | method = 'connect' | ||
391 | |||
392 | |||
393 | class HTTPCopy(HTTPResource): | ||
394 | |||
395 | method = 'copy' | ||
396 | |||
397 | |||
398 | class HTTPAny(HTTPResource): | ||
399 | |||
400 | method = 'any' | ||
401 | |||
402 | |||
403 | class HTTPXRefRole(XRefRole): | ||
404 | |||
405 | def __init__(self, method, **kwargs): | ||
406 | XRefRole.__init__(self, **kwargs) | ||
407 | self.method = method | ||
408 | |||
409 | def process_link(self, env, refnode, has_explicit_title, title, target): | ||
410 | if not has_explicit_title: | ||
411 | title = self.method.upper() + ' ' + title | ||
412 | return title, target | ||
413 | |||
414 | |||
415 | class HTTPXRefMethodRole(XRefRole): | ||
416 | |||
417 | def result_nodes(self, document, env, node, is_ref): | ||
418 | method = node[0][0].lower() | ||
419 | rawsource = node[0].rawsource | ||
420 | config = env.domains['http'].env.config | ||
421 | if method not in METHOD_REFS: | ||
422 | if not config['http_strict_mode']: | ||
423 | return [nodes.emphasis(method, method)], [] | ||
424 | reporter = document.reporter | ||
425 | msg = reporter.error('%s is not valid HTTP method' % method, | ||
426 | line=node.line) | ||
427 | prb = nodes.problematic(method, method) | ||
428 | return [prb], [msg] | ||
429 | url = str(METHOD_REFS[method]) | ||
430 | if not url: | ||
431 | return [nodes.emphasis(method, method)], [] | ||
432 | node = nodes.reference(rawsource, method.upper(), refuri=url) | ||
433 | return [node], [] | ||
434 | |||
435 | |||
436 | class HTTPXRefStatusRole(XRefRole): | ||
437 | |||
438 | def result_nodes(self, document, env, node, is_ref): | ||
439 | def get_code_status(text): | ||
440 | if text.isdigit(): | ||
441 | code = int(text) | ||
442 | return code, HTTP_STATUS_CODES.get(code) | ||
443 | else: | ||
444 | try: | ||
445 | code, status = re.split(r'\s', text.strip(), 1) | ||
446 | code = int(code) | ||
447 | except ValueError: | ||
448 | return None, None | ||
449 | known_status = HTTP_STATUS_CODES.get(code) | ||
450 | if known_status is None: | ||
451 | return code, None | ||
452 | elif known_status.lower() != status.lower(): | ||
453 | return code, None | ||
454 | else: | ||
455 | return code, status | ||
456 | |||
457 | def report_unknown_code(): | ||
458 | if not config['http_strict_mode']: | ||
459 | return [nodes.emphasis(text, text)], [] | ||
460 | reporter = document.reporter | ||
461 | msg = reporter.error('%d is unknown HTTP status code' % code, | ||
462 | line=node.line) | ||
463 | prb = nodes.problematic(text, text) | ||
464 | return [prb], [msg] | ||
465 | |||
466 | def report_invalid_code(): | ||
467 | if not config['http_strict_mode']: | ||
468 | return [nodes.emphasis(text, text)], [] | ||
469 | reporter = document.reporter | ||
470 | msg = reporter.error( | ||
471 | 'HTTP status code must be an integer (e.g. `200`) or ' | ||
472 | 'start with an integer (e.g. `200 OK`); %r is invalid' % | ||
473 | text, | ||
474 | line=node.line | ||
475 | ) | ||
476 | prb = nodes.problematic(text, text) | ||
477 | return [prb], [msg] | ||
478 | |||
479 | text = node[0][0] | ||
480 | rawsource = node[0].rawsource | ||
481 | config = env.domains['http'].env.config | ||
482 | |||
483 | code, status = get_code_status(text) | ||
484 | if code is None: | ||
485 | return report_invalid_code() | ||
486 | elif status is None: | ||
487 | return report_unknown_code() | ||
488 | elif code == 226: | ||
489 | url = 'http://www.ietf.org/rfc/rfc3229.txt' | ||
490 | elif code == 418: | ||
491 | url = 'http://www.ietf.org/rfc/rfc2324.txt' | ||
492 | elif code == 429: | ||
493 | url = 'http://tools.ietf.org/html/rfc6585#section-4' | ||
494 | elif code == 449: | ||
495 | url = 'http://msdn.microsoft.com/en-us/library/dd891478(v=prot.10).aspx' | ||
496 | elif code == 451: | ||
497 | url = 'http://www.ietf.org/rfc/rfc7725.txt' | ||
498 | elif code in WEBDAV_STATUS_CODES: | ||
499 | url = 'http://tools.ietf.org/html/rfc4918#section-11.%d' % (WEBDAV_STATUS_CODES.index(code) + 1) | ||
500 | elif code in HTTP_STATUS_CODES: | ||
501 | url = 'http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html' \ | ||
502 | '#sec10.' + ('%d.%d' % (code // 100, 1 + code % 100)) | ||
503 | else: | ||
504 | url = '' | ||
505 | node = nodes.reference(rawsource, '%d %s' % (code, status), refuri=url) | ||
506 | return [node], [] | ||
507 | |||
508 | |||
509 | class HTTPXRefHeaderRole(XRefRole): | ||
510 | |||
511 | def result_nodes(self, document, env, node, is_ref): | ||
512 | header = node[0][0] | ||
513 | rawsource = node[0].rawsource | ||
514 | if header not in HEADER_REFS: | ||
515 | _header = '-'.join(map(lambda i: i.title(), header.split('-'))) | ||
516 | if _header not in HEADER_REFS: | ||
517 | return [nodes.emphasis(header, header)], [] | ||
518 | url = str(HEADER_REFS[header]) | ||
519 | node = nodes.reference(rawsource, header, refuri=url) | ||
520 | return [node], [] | ||
521 | |||
522 | |||
523 | class HTTPIndex(Index): | ||
524 | |||
525 | name = 'routingtable' | ||
526 | localname = 'HTTP Routing Table' | ||
527 | shortname = 'routing table' | ||
528 | |||
529 | def __init__(self, *args, **kwargs): | ||
530 | super(HTTPIndex, self).__init__(*args, **kwargs) | ||
531 | |||
532 | self.ignore = [ | ||
533 | [l for l in x.split('/') if l] | ||
534 | for x in self.domain.env.config['http_index_ignore_prefixes']] | ||
535 | self.ignore.sort(reverse=True) | ||
536 | |||
537 | # During HTML generation these values pick from class, | ||
538 | # not from instance so we have a little hack the system | ||
539 | cls = self.__class__ | ||
540 | cls.shortname = self.domain.env.config['http_index_shortname'] | ||
541 | cls.localname = self.domain.env.config['http_index_localname'] | ||
542 | |||
543 | def grouping_prefix(self, path): | ||
544 | letters = [x for x in path.split('/') if x] | ||
545 | for prefix in self.ignore: | ||
546 | if letters[:len(prefix)] == prefix: | ||
547 | return '/' + '/'.join(letters[:len(prefix) + 1]) | ||
548 | return '/%s' % (letters[0] if letters else '',) | ||
549 | |||
550 | def generate(self, docnames=None): | ||
551 | content = {} | ||
552 | items = ((method, path, info) | ||
553 | for method, routes in self.domain.routes.items() | ||
554 | for path, info in routes.items()) | ||
555 | items = sorted(items, key=lambda item: item[1]) | ||
556 | for method, path, info in items: | ||
557 | entries = content.setdefault(self.grouping_prefix(path), []) | ||
558 | entries.append([ | ||
559 | method.upper() + ' ' + path, 0, info[0], | ||
560 | http_resource_anchor(method, path), | ||
561 | '', 'Deprecated' if info[2] else '', info[1] | ||
562 | ]) | ||
563 | items = sorted( | ||
564 | (path, sort_by_method(entries)) | ||
565 | for path, entries in content.items() | ||
566 | ) | ||
567 | return (items, True) | ||
568 | |||
569 | |||
570 | class HTTPDomain(Domain): | ||
571 | """HTTP domain.""" | ||
572 | |||
573 | name = 'http' | ||
574 | label = 'HTTP' | ||
575 | |||
576 | object_types = { | ||
577 | 'options': ObjType('options', 'options', 'obj'), | ||
578 | 'head': ObjType('head', 'head', 'obj'), | ||
579 | 'post': ObjType('post', 'post', 'obj'), | ||
580 | 'get': ObjType('get', 'get', 'obj'), | ||
581 | 'put': ObjType('put', 'put', 'obj'), | ||
582 | 'patch': ObjType('patch', 'patch', 'obj'), | ||
583 | 'delete': ObjType('delete', 'delete', 'obj'), | ||
584 | 'trace': ObjType('trace', 'trace', 'obj'), | ||
585 | 'connect': ObjType('connect', 'connect', 'obj'), | ||
586 | 'copy': ObjType('copy', 'copy', 'obj'), | ||
587 | 'any': ObjType('any', 'any', 'obj') | ||
588 | } | ||
589 | |||
590 | directives = { | ||
591 | 'options': HTTPOptions, | ||
592 | 'head': HTTPHead, | ||
593 | 'post': HTTPPost, | ||
594 | 'get': HTTPGet, | ||
595 | 'put': HTTPPut, | ||
596 | 'patch': HTTPPatch, | ||
597 | 'delete': HTTPDelete, | ||
598 | 'trace': HTTPTrace, | ||
599 | 'connect': HTTPConnect, | ||
600 | 'copy': HTTPCopy, | ||
601 | 'any': HTTPAny | ||
602 | } | ||
603 | |||
604 | roles = { | ||
605 | 'options': HTTPXRefRole('options'), | ||
606 | 'head': HTTPXRefRole('head'), | ||
607 | 'post': HTTPXRefRole('post'), | ||
608 | 'get': HTTPXRefRole('get'), | ||
609 | 'put': HTTPXRefRole('put'), | ||
610 | 'patch': HTTPXRefRole('patch'), | ||
611 | 'delete': HTTPXRefRole('delete'), | ||
612 | 'trace': HTTPXRefRole('trace'), | ||
613 | 'connect': HTTPXRefRole('connect'), | ||
614 | 'copy': HTTPXRefRole('copy'), | ||
615 | 'any': HTTPXRefRole('any'), | ||
616 | 'statuscode': HTTPXRefStatusRole(), | ||
617 | 'method': HTTPXRefMethodRole(), | ||
618 | 'header': HTTPXRefHeaderRole() | ||
619 | } | ||
620 | |||
621 | initial_data = { | ||
622 | 'options': {}, # path: (docname, synopsis) | ||
623 | 'head': {}, | ||
624 | 'post': {}, | ||
625 | 'get': {}, | ||
626 | 'put': {}, | ||
627 | 'patch': {}, | ||
628 | 'delete': {}, | ||
629 | 'trace': {}, | ||
630 | 'connect': {}, | ||
631 | 'copy': {}, | ||
632 | 'any': {} | ||
633 | } | ||
634 | |||
635 | indices = [] | ||
636 | |||
637 | @property | ||
638 | def routes(self): | ||
639 | return dict((key, self.data[key]) for key in self.object_types) | ||
640 | |||
641 | def clear_doc(self, docname): | ||
642 | for typ, routes in self.routes.items(): | ||
643 | for path, info in list(routes.items()): | ||
644 | if info[0] == docname: | ||
645 | del routes[path] | ||
646 | |||
647 | def resolve_xref(self, env, fromdocname, builder, typ, target, | ||
648 | node, contnode): | ||
649 | try: | ||
650 | info = self.data[str(typ)][target] | ||
651 | except KeyError: | ||
652 | text = contnode.rawsource | ||
653 | role = self.roles.get(typ) | ||
654 | if role is None: | ||
655 | return None | ||
656 | |||
657 | if fromdocname not in _doctree_cache: | ||
658 | _doctree_cache[fromdocname] = env.get_doctree(fromdocname) | ||
659 | doctree = _doctree_cache[fromdocname] | ||
660 | |||
661 | resnode = role.result_nodes(doctree, env, node, None)[0][0] | ||
662 | if isinstance(resnode, addnodes.pending_xref): | ||
663 | text = node[0][0] | ||
664 | reporter = doctree.reporter | ||
665 | reporter.warning('Cannot resolve reference to %r' % text, | ||
666 | line=node.line) | ||
667 | return None | ||
668 | return resnode | ||
669 | else: | ||
670 | anchor = http_resource_anchor(typ, target) | ||
671 | title = typ.upper() + ' ' + target | ||
672 | return make_refnode(builder, fromdocname, info[0], anchor, | ||
673 | contnode, title) | ||
674 | |||
675 | def resolve_any_xref(self, env, fromdocname, builder, target, node, contnode): | ||
676 | """Resolve the pending_xref *node* with the given *target*. | ||
677 | |||
678 | The reference comes from an "any" or similar role, which means that Sphinx | ||
679 | don't know the type. | ||
680 | |||
681 | For now sphinxcontrib-httpdomain doesn't resolve any xref nodes. | ||
682 | |||
683 | :return: | ||
684 | list of tuples ``('domain:role', newnode)``, where ``'domain:role'`` | ||
685 | is the name of a role that could have created the same reference, | ||
686 | """ | ||
687 | return [] | ||
688 | |||
689 | def get_objects(self): | ||
690 | for method, routes in self.routes.items(): | ||
691 | for path, info in routes.items(): | ||
692 | anchor = http_resource_anchor(method, path) | ||
693 | yield (path, path, method, info[0], anchor, 1) | ||
694 | |||
695 | |||
696 | class HTTPLexer(RegexLexer): | ||
697 | """Lexer for HTTP sessions.""" | ||
698 | |||
699 | name = 'HTTP' | ||
700 | aliases = ['http'] | ||
701 | |||
702 | flags = re.DOTALL | ||
703 | |||
704 | def header_callback(self, match): | ||
705 | if match.group(1).lower() == 'content-type': | ||
706 | content_type = match.group(5).strip() | ||
707 | if ';' in content_type: | ||
708 | content_type = content_type[:content_type.find(';')].strip() | ||
709 | self.content_type = content_type | ||
710 | yield match.start(1), Name.Attribute, match.group(1) | ||
711 | yield match.start(2), Text, match.group(2) | ||
712 | yield match.start(3), Operator, match.group(3) | ||
713 | yield match.start(4), Text, match.group(4) | ||
714 | yield match.start(5), Literal, match.group(5) | ||
715 | yield match.start(6), Text, match.group(6) | ||
716 | |||
717 | def continuous_header_callback(self, match): | ||
718 | yield match.start(1), Text, match.group(1) | ||
719 | yield match.start(2), Literal, match.group(2) | ||
720 | yield match.start(3), Text, match.group(3) | ||
721 | |||
722 | def content_callback(self, match): | ||
723 | content_type = getattr(self, 'content_type', None) | ||
724 | content = match.group() | ||
725 | offset = match.start() | ||
726 | if content_type: | ||
727 | from pygments.lexers import get_lexer_for_mimetype | ||
728 | try: | ||
729 | lexer = get_lexer_for_mimetype(content_type) | ||
730 | except ClassNotFound: | ||
731 | pass | ||
732 | else: | ||
733 | for idx, token, value in lexer.get_tokens_unprocessed(content): | ||
734 | yield offset + idx, token, value | ||
735 | return | ||
736 | yield offset, Text, content | ||
737 | |||
738 | tokens = { | ||
739 | 'root': [ | ||
740 | (r'(GET|POST|PUT|PATCH|DELETE|HEAD|OPTIONS|TRACE|COPY)( +)([^ ]+)( +)' | ||
741 | r'(HTTPS?)(/)(1\.[01])(\r?\n|$)', | ||
742 | bygroups(Name.Function, Text, Name.Namespace, Text, | ||
743 | Keyword.Reserved, Operator, Number, Text), | ||
744 | 'headers'), | ||
745 | (r'(HTTPS?)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|$)', | ||
746 | bygroups(Keyword.Reserved, Operator, Number, Text, Number, | ||
747 | Text, Name.Exception, Text), | ||
748 | 'headers'), | ||
749 | ], | ||
750 | 'headers': [ | ||
751 | (r'([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|$)', header_callback), | ||
752 | (r'([\t ]+)([^\r\n]+)(\r?\n|$)', continuous_header_callback), | ||
753 | (r'\r?\n', Text, 'content') | ||
754 | ], | ||
755 | 'content': [ | ||
756 | (r'.+', content_callback) | ||
757 | ] | ||
758 | } | ||
759 | |||
760 | |||
761 | def setup(app): | ||
762 | app.add_domain(HTTPDomain) | ||
763 | |||
764 | try: | ||
765 | get_lexer_by_name('http') | ||
766 | except ClassNotFound: | ||
767 | app.add_lexer('http', HTTPLexer()) | ||
768 | app.add_config_value('http_index_ignore_prefixes', [], None) | ||
769 | app.add_config_value('http_index_shortname', 'routing table', True) | ||
770 | app.add_config_value('http_index_localname', 'HTTP Routing Table', True) | ||
771 | app.add_config_value('http_strict_mode', True, None) | ||
772 | app.add_config_value('http_headers_ignore_prefixes', ['X-'], None) | ||