httpdomain.py (27016B)
1 """ 2 sphinxcontrib.httpdomain 3 ~~~~~~~~~~~~~~~~~~~~~~~~ 4 5 The HTTP domain for documenting RESTful HTTP APIs. 6 7 :copyright: Copyright 2011 by Hong Minhee 8 :license: BSD, see LICENSE for details. 9 10 """ 11 12 import re 13 14 from docutils import nodes 15 16 from pygments.lexer import RegexLexer, bygroups 17 from pygments.lexers import get_lexer_by_name 18 from pygments.token import Literal, Text, Operator, Keyword, Name, Number 19 from pygments.util import ClassNotFound 20 21 from sphinx import addnodes 22 from sphinx.roles import XRefRole 23 from sphinx.domains import Domain, ObjType, Index 24 from sphinx.directives import ObjectDescription, directives 25 from sphinx.util.nodes import make_refnode 26 from sphinx.util.docfields import GroupedField, TypedField 27 28 # The env.get_doctree() lookup results in a pickle.load() call which is 29 # expensive enough to dominate the runtime entirely when the number of endpoints 30 # and references is large enough. The doctrees are generated during the read- 31 # phase and we can cache their lookup during the write-phase significantly 32 # improving performance. 33 # Currently sphinxcontrib-httpdomain does not declare to support parallel read 34 # support (parallel_read_safe is the default False) so we can simply use a 35 # module global to hold the cache. 36 _doctree_cache = {} 37 38 39 class DocRef(object): 40 """Represents a reference to an abstract specification.""" 41 42 def __init__(self, base_url, anchor, section): 43 self.base_url = base_url 44 self.anchor = anchor 45 self.section = section 46 47 def __repr__(self): 48 """Returns the URL onto related specification section for the related 49 object.""" 50 return '{0}#{1}{2}'.format(self.base_url, self.anchor, self.section) 51 52 53 class RFC2616Ref(DocRef): 54 """Represents a reference to RFC2616. 55 In 2014, RFC2616 was replaced by multiple RFCs (7230-7237).""" 56 57 def __init__(self, section): 58 url = 'http://www.w3.org/Protocols/rfc2616/rfc2616-sec{0:d}.html' 59 url = url.format(int(section)) 60 super(RFC2616Ref, self).__init__(url, 'sec', section) 61 62 63 class IETFRef(DocRef): 64 """Represents a reference to the specific IETF RFC.""" 65 66 def __init__(self, rfc, section): 67 url = 'https://tools.ietf.org/html/rfc{0:d}'.format(rfc) 68 super(IETFRef, self).__init__(url, 'section-', section) 69 70 71 class EventSourceRef(DocRef): 72 73 def __init__(self, section): 74 url = 'http://www.w3.org/TR/eventsource/' 75 super(EventSourceRef, self).__init__(url, section, '') 76 77 78 class CORSRef(DocRef): 79 """Represents a reference to W3 Cross-Origin Resource Sharing recommendation.""" 80 81 def __init__(self, name, type): 82 url = 'http://www.w3.org/TR/cors/' 83 super(CORSRef, self).__init__(url, name, '-' + type) 84 85 86 #: Mapping from lowercase HTTP method name to :class:`DocRef` object which 87 #: maintains the URL which points to the section of the RFC which defines that 88 #: HTTP method. 89 METHOD_REFS = { 90 'patch': IETFRef(5789, 2), 91 'options': IETFRef(7231, '4.3.7'), 92 'get': IETFRef(7231, '4.3.1'), 93 'head': IETFRef(7231, '4.3.2'), 94 'post': IETFRef(7231, '4.3.3'), 95 'put': IETFRef(7231, '4.3.4'), 96 'delete': IETFRef(7231, '4.3.5'), 97 'trace': IETFRef(7231, '4.3.8'), 98 'connect': IETFRef(7231, '4.3.6'), 99 'copy': IETFRef(2518, 8.8), 100 'any': '' 101 } 102 103 104 #: Mapping from HTTP header name to :class:`DocRef` object which 105 #: maintains the URL which points to the related section of the RFC. 106 HEADER_REFS = { 107 'Accept': IETFRef(7231, '5.3.2'), 108 'Accept-Charset': IETFRef(7231, '5.3.3'), 109 'Accept-Encoding': IETFRef(7231, '5.3.4'), 110 'Accept-Language': IETFRef(7231, '5.3.5'), 111 'Accept-Ranges': IETFRef(7233, 2.3), 112 'Age': IETFRef(7234, 5.1), 113 'Allow': IETFRef(7231, '7.4.1'), 114 'Authorization': IETFRef(7235, 4.2), 115 'Cache-Control': IETFRef(7234, 5.2), 116 'Connection': IETFRef(7230, 6.1), 117 'Content-Encoding': IETFRef(7231, '3.1.2.2'), 118 'Content-Language': IETFRef(7231, '3.1.3.2'), 119 'Content-Length': IETFRef(7230, '3.3.2'), 120 'Content-Location': IETFRef(7231, '3.1.4.2'), 121 'Content-MD5': RFC2616Ref(14.15), # removed 122 'Content-Range': IETFRef(7233, 4.2), 123 'Content-Type': IETFRef(7231, '3.1.1.5'), 124 'Cookie': IETFRef(2109, '4.3.4'), # also RFC6265 section 5.4 125 'Date': IETFRef(7231, '7.1.1.2'), 126 'Destination': IETFRef(2518, 9.3), 127 'ETag': IETFRef(7232, 2.3), 128 'Expect': IETFRef(7231, '5.1.1'), 129 'Expires': IETFRef(7234, 5.3), 130 'From': IETFRef(7231, '5.5.2'), 131 'Host': IETFRef(7230, 5.4), 132 'If-Match': IETFRef(7232, 3.1), 133 'If-Modified-Since': IETFRef(7232, 3.3), 134 'If-None-Match': IETFRef(7232, 3.2), 135 'If-Range': IETFRef(7233, 3.2), 136 'If-Unmodified-Since': IETFRef(7232, 3.4), 137 'Last-Event-ID': EventSourceRef('last-event-id'), 138 'Last-Modified': IETFRef(7232, 2.2), 139 'Link': IETFRef(5988, '5'), 140 'Location': IETFRef(7231, '7.1.2'), 141 'Max-Forwards': IETFRef(7231, '5.1.2'), 142 'Pragma': IETFRef(7234, 5.4), 143 'Proxy-Authenticate': IETFRef(7235, 4.3), 144 'Proxy-Authorization': IETFRef(7235, 4.4), 145 'Range': IETFRef(7233, 3.1), 146 'Referer': IETFRef(7231, '5.5.2'), 147 'Retry-After': IETFRef(7231, '7.1.3'), 148 'Server': IETFRef(7231, '7.4.2'), 149 'Set-Cookie': IETFRef(2109, '4.2.2'), 150 'TE': IETFRef(7230, 4.3), 151 'Trailer': IETFRef(7230, 4.4), 152 'Transfer-Encoding': IETFRef(7230, '3.3.1'), 153 'Upgrade': IETFRef(7230, 6.7), 154 'User-Agent': IETFRef(7231, '5.5.3'), 155 'Vary': IETFRef(7231, '7.1.4'), 156 'Via': IETFRef(7230, '5.7.1'), 157 'Warning': IETFRef(7234, 5.5), 158 'WWW-Authenticate': IETFRef(7235, 4.1), 159 'Access-Control-Allow-Origin': CORSRef('access-control-allow-origin', 160 'response-header'), 161 'Access-Control-Allow-Credentials': CORSRef('access-control-allow-credentials', 162 'response-header'), 163 'Access-Control-Expose-Headers': CORSRef('access-control-expose-headers', 164 'response-header'), 165 'Access-Control-Max-Age': CORSRef('access-control-max-age', 166 'response-header'), 167 'Access-Control-Allow-Methods': CORSRef('access-control-allow-methods', 168 'response-header'), 169 'Access-Control-Allow-Headers': CORSRef('access-control-allow-headers', 170 'response-header'), 171 'Origin': CORSRef('origin', 'request-header'), 172 'Access-Control-Request-Method': CORSRef('access-control-request-method', 173 'response-header'), 174 'Access-Control-Request-Headers': CORSRef('access-control-request-headers', 175 'response-header'), 176 } 177 178 179 HTTP_STATUS_CODES = { 180 100: 'Continue', 181 101: 'Switching Protocols', 182 102: 'Processing', 183 200: 'OK', 184 201: 'Created', 185 202: 'Accepted', 186 203: 'Non Authoritative Information', 187 204: 'No Content', 188 205: 'Reset Content', 189 206: 'Partial Content', 190 207: 'Multi Status', 191 226: 'IM Used', # see RFC 3229 192 300: 'Multiple Choices', 193 301: 'Moved Permanently', 194 302: 'Found', 195 303: 'See Other', 196 304: 'Not Modified', 197 305: 'Use Proxy', 198 307: 'Temporary Redirect', 199 308: 'Permanent Redirect', 200 400: 'Bad Request', 201 401: 'Unauthorized', 202 402: 'Payment Required', # unused 203 403: 'Forbidden', 204 404: 'Not Found', 205 405: 'Method Not Allowed', 206 406: 'Not Acceptable', 207 407: 'Proxy Authentication Required', 208 408: 'Request Timeout', 209 409: 'Conflict', 210 410: 'Gone', 211 411: 'Length Required', 212 412: 'Precondition Failed', 213 413: 'Request Entity Too Large', 214 414: 'Request URI Too Long', 215 415: 'Unsupported Media Type', 216 416: 'Requested Range Not Satisfiable', 217 417: 'Expectation Failed', 218 418: "I'm a teapot", # see RFC 2324 219 422: 'Unprocessable Entity', 220 423: 'Locked', 221 424: 'Failed Dependency', 222 425: 'Too Early', # RFC 8470 223 426: 'Upgrade Required', 224 429: 'Too Many Requests', 225 449: 'Retry With', # proprietary MS extension 226 451: 'Unavailable For Legal Reasons', 227 500: 'Internal Server Error', 228 501: 'Not Implemented', 229 502: 'Bad Gateway', 230 503: 'Service Unavailable', 231 504: 'Gateway Timeout', 232 505: 'HTTP Version Not Supported', 233 507: 'Insufficient Storage', 234 510: 'Not Extended' 235 } 236 237 WEBDAV_STATUS_CODES = [207, 422, 423, 424, 507] 238 239 http_sig_param_re = re.compile(r'\((?:(?P<type>[^:)]+):)?(?P<name>[\w_]+)\)', 240 re.VERBOSE) 241 242 243 def sort_by_method(entries): 244 def cmp(item): 245 order = ['HEAD', 'GET', 'POST', 'PUT', 'DELETE', 'PATCH', 246 'OPTIONS', 'TRACE', 'CONNECT', 'COPY', 'ANY'] 247 method = item[0].split(' ', 1)[0] 248 if method in order: 249 return order.index(method) 250 return 100 251 return sorted(entries, key=cmp) 252 253 254 def http_resource_anchor(method, path): 255 path = re.sub(r'[{}]', '', re.sub(r'[<>:/]', '-', path)) 256 return method.lower() + '-' + path 257 258 259 class HTTPResource(ObjectDescription): 260 261 doc_field_types = [ 262 TypedField('parameter', label='Parameters', 263 names=('param', 'parameter', 'arg', 'argument'), 264 typerolename='obj', typenames=('paramtype', 'type')), 265 TypedField('jsonparameter', label='JSON Parameters', 266 names=('jsonparameter', 'jsonparam', 'json'), 267 typerolename='obj', typenames=('jsonparamtype', 'jsontype')), 268 TypedField('requestjsonobject', label='Request JSON Object', 269 names=('reqjsonobj', 'reqjson', '<jsonobj', '<json'), 270 typerolename='obj', typenames=('reqjsonobj', '<jsonobj')), 271 TypedField('requestjsonarray', label='Request JSON Array of Objects', 272 names=('reqjsonarr', '<jsonarr'), 273 typerolename='obj', 274 typenames=('reqjsonarrtype', '<jsonarrtype')), 275 TypedField('responsejsonobject', label='Response JSON Object', 276 names=('resjsonobj', 'resjson', '>jsonobj', '>json'), 277 typerolename='obj', typenames=('resjsonobj', '>jsonobj')), 278 TypedField('responsejsonarray', label='Response JSON Array of Objects', 279 names=('resjsonarr', '>jsonarr'), 280 typerolename='obj', 281 typenames=('resjsonarrtype', '>jsonarrtype')), 282 TypedField('queryparameter', label='Query Parameters', 283 names=('queryparameter', 'queryparam', 'qparam', 'query'), 284 typerolename='obj', 285 typenames=('queryparamtype', 'querytype', 'qtype')), 286 GroupedField('formparameter', label='Form Parameters', 287 names=('formparameter', 'formparam', 'fparam', 'form')), 288 GroupedField('requestheader', label='Request Headers', 289 rolename='header', 290 names=('<header', 'reqheader', 'requestheader')), 291 GroupedField('responseheader', label='Response Headers', 292 rolename='header', 293 names=('>header', 'resheader', 'responseheader')), 294 GroupedField('statuscode', label='Status Codes', 295 rolename='statuscode', 296 names=('statuscode', 'status', 'code')) 297 ] 298 299 option_spec = { 300 'deprecated': directives.flag, 301 'noindex': directives.flag, 302 'synopsis': lambda x: x, 303 } 304 305 method = NotImplemented 306 307 def handle_signature(self, sig, signode): 308 method = self.method.upper() + ' ' 309 signode += addnodes.desc_name(method, method) 310 offset = 0 311 path = None 312 for match in http_sig_param_re.finditer(sig): 313 path = sig[offset:match.start()] 314 signode += addnodes.desc_name(path, path) 315 params = addnodes.desc_parameterlist() 316 typ = match.group('type') 317 if typ: 318 typ += ': ' 319 params += addnodes.desc_annotation(typ, typ) 320 name = match.group('name') 321 params += addnodes.desc_parameter(name, name) 322 signode += params 323 offset = match.end() 324 if offset < len(sig): 325 path = sig[offset:len(sig)] 326 signode += addnodes.desc_name(path, path) 327 assert path is not None, 'no matches for sig: %s' % sig 328 fullname = self.method.upper() + ' ' + path 329 signode['method'] = self.method 330 signode['path'] = sig 331 signode['fullname'] = fullname 332 return (fullname, self.method, sig) 333 334 def needs_arglist(self): 335 return False 336 337 def add_target_and_index(self, name_cls, sig, signode): 338 signode['ids'].append(http_resource_anchor(*name_cls[1:])) 339 if 'noindex' not in self.options: 340 self.env.domaindata['http'][self.method][sig] = ( 341 self.env.docname, 342 self.options.get('synopsis', ''), 343 'deprecated' in self.options) 344 345 def get_index_text(self, modname, name): 346 return '' 347 348 349 class HTTPOptions(HTTPResource): 350 351 method = 'options' 352 353 354 class HTTPHead(HTTPResource): 355 356 method = 'head' 357 358 359 class HTTPPatch(HTTPResource): 360 361 method = 'patch' 362 363 364 class HTTPPost(HTTPResource): 365 366 method = 'post' 367 368 369 class HTTPGet(HTTPResource): 370 371 method = 'get' 372 373 374 class HTTPPut(HTTPResource): 375 376 method = 'put' 377 378 379 class HTTPDelete(HTTPResource): 380 381 method = 'delete' 382 383 384 class HTTPTrace(HTTPResource): 385 386 method = 'trace' 387 388 389 class HTTPConnect(HTTPResource): 390 391 method = 'connect' 392 393 394 class HTTPCopy(HTTPResource): 395 396 method = 'copy' 397 398 399 class HTTPAny(HTTPResource): 400 401 method = 'any' 402 403 404 class HTTPXRefRole(XRefRole): 405 406 def __init__(self, method, **kwargs): 407 XRefRole.__init__(self, **kwargs) 408 self.method = method 409 410 def process_link(self, env, refnode, has_explicit_title, title, target): 411 if not has_explicit_title: 412 title = self.method.upper() + ' ' + title 413 return title, target 414 415 416 class HTTPXRefMethodRole(XRefRole): 417 418 def result_nodes(self, document, env, node, is_ref): 419 method = node[0][0].lower() 420 rawsource = node[0].rawsource 421 config = env.domains['http'].env.config 422 if method not in METHOD_REFS: 423 if not config['http_strict_mode']: 424 return [nodes.emphasis(method, method)], [] 425 reporter = document.reporter 426 msg = reporter.error('%s is not valid HTTP method' % method, 427 line=node.line) 428 prb = nodes.problematic(method, method) 429 return [prb], [msg] 430 url = str(METHOD_REFS[method]) 431 if not url: 432 return [nodes.emphasis(method, method)], [] 433 node = nodes.reference(rawsource, method.upper(), refuri=url) 434 return [node], [] 435 436 437 class HTTPXRefStatusRole(XRefRole): 438 439 def result_nodes(self, document, env, node, is_ref): 440 def get_code_status(text): 441 if text.isdigit(): 442 code = int(text) 443 return code, HTTP_STATUS_CODES.get(code) 444 else: 445 try: 446 code, status = re.split(r'\s', text.strip(), 1) 447 code = int(code) 448 except ValueError: 449 return None, None 450 known_status = HTTP_STATUS_CODES.get(code) 451 if known_status is None: 452 return code, None 453 elif known_status.lower() != status.lower(): 454 return code, None 455 else: 456 return code, status 457 458 def report_unknown_code(): 459 if not config['http_strict_mode']: 460 return [nodes.emphasis(text, text)], [] 461 reporter = document.reporter 462 msg = reporter.error('%d is unknown HTTP status code' % code, 463 line=node.line) 464 prb = nodes.problematic(text, text) 465 return [prb], [msg] 466 467 def report_invalid_code(): 468 if not config['http_strict_mode']: 469 return [nodes.emphasis(text, text)], [] 470 reporter = document.reporter 471 msg = reporter.error( 472 'HTTP status code must be an integer (e.g. `200`) or ' 473 'start with an integer (e.g. `200 OK`); %r is invalid' % 474 text, 475 line=node.line 476 ) 477 prb = nodes.problematic(text, text) 478 return [prb], [msg] 479 480 text = node[0][0] 481 rawsource = node[0].rawsource 482 config = env.domains['http'].env.config 483 484 code, status = get_code_status(text) 485 if code is None: 486 return report_invalid_code() 487 elif status is None: 488 return report_unknown_code() 489 elif code == 226: 490 url = 'http://www.ietf.org/rfc/rfc3229.txt' 491 elif code == 418: 492 url = 'http://www.ietf.org/rfc/rfc2324.txt' 493 elif code == 429: 494 url = 'http://tools.ietf.org/html/rfc6585#section-4' 495 elif code == 449: 496 url = 'http://msdn.microsoft.com/en-us/library/dd891478(v=prot.10).aspx' 497 elif code == 451: 498 url = 'http://www.ietf.org/rfc/rfc7725.txt' 499 elif code in WEBDAV_STATUS_CODES: 500 url = 'http://tools.ietf.org/html/rfc4918#section-11.%d' % (WEBDAV_STATUS_CODES.index(code) + 1) 501 elif code in HTTP_STATUS_CODES: 502 url = 'http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html' \ 503 '#sec10.' + ('%d.%d' % (code // 100, 1 + code % 100)) 504 else: 505 url = '' 506 node = nodes.reference(rawsource, '%d %s' % (code, status), refuri=url) 507 return [node], [] 508 509 510 class HTTPXRefHeaderRole(XRefRole): 511 512 def result_nodes(self, document, env, node, is_ref): 513 header = node[0][0] 514 rawsource = node[0].rawsource 515 if header not in HEADER_REFS: 516 _header = '-'.join(map(lambda i: i.title(), header.split('-'))) 517 if _header not in HEADER_REFS: 518 return [nodes.emphasis(header, header)], [] 519 url = str(HEADER_REFS[header]) 520 node = nodes.reference(rawsource, header, refuri=url) 521 return [node], [] 522 523 524 class HTTPIndex(Index): 525 526 name = 'routingtable' 527 localname = 'HTTP Routing Table' 528 shortname = 'routing table' 529 530 def __init__(self, *args, **kwargs): 531 super(HTTPIndex, self).__init__(*args, **kwargs) 532 533 self.ignore = [ 534 [l for l in x.split('/') if l] 535 for x in self.domain.env.config['http_index_ignore_prefixes']] 536 self.ignore.sort(reverse=True) 537 538 # During HTML generation these values pick from class, 539 # not from instance so we have a little hack the system 540 cls = self.__class__ 541 cls.shortname = self.domain.env.config['http_index_shortname'] 542 cls.localname = self.domain.env.config['http_index_localname'] 543 544 def grouping_prefix(self, path): 545 letters = [x for x in path.split('/') if x] 546 for prefix in self.ignore: 547 if letters[:len(prefix)] == prefix: 548 return '/' + '/'.join(letters[:len(prefix) + 1]) 549 return '/%s' % (letters[0] if letters else '',) 550 551 def generate(self, docnames=None): 552 content = {} 553 items = ((method, path, info) 554 for method, routes in self.domain.routes.items() 555 for path, info in routes.items()) 556 items = sorted(items, key=lambda item: item[1]) 557 for method, path, info in items: 558 entries = content.setdefault(self.grouping_prefix(path), []) 559 entries.append([ 560 method.upper() + ' ' + path, 0, info[0], 561 http_resource_anchor(method, path), 562 '', 'Deprecated' if info[2] else '', info[1] 563 ]) 564 items = sorted( 565 (path, sort_by_method(entries)) 566 for path, entries in content.items() 567 ) 568 return (items, True) 569 570 571 class HTTPDomain(Domain): 572 """HTTP domain.""" 573 574 name = 'http' 575 label = 'HTTP' 576 577 object_types = { 578 'options': ObjType('options', 'options', 'obj'), 579 'head': ObjType('head', 'head', 'obj'), 580 'post': ObjType('post', 'post', 'obj'), 581 'get': ObjType('get', 'get', 'obj'), 582 'put': ObjType('put', 'put', 'obj'), 583 'patch': ObjType('patch', 'patch', 'obj'), 584 'delete': ObjType('delete', 'delete', 'obj'), 585 'trace': ObjType('trace', 'trace', 'obj'), 586 'connect': ObjType('connect', 'connect', 'obj'), 587 'copy': ObjType('copy', 'copy', 'obj'), 588 'any': ObjType('any', 'any', 'obj') 589 } 590 591 directives = { 592 'options': HTTPOptions, 593 'head': HTTPHead, 594 'post': HTTPPost, 595 'get': HTTPGet, 596 'put': HTTPPut, 597 'patch': HTTPPatch, 598 'delete': HTTPDelete, 599 'trace': HTTPTrace, 600 'connect': HTTPConnect, 601 'copy': HTTPCopy, 602 'any': HTTPAny 603 } 604 605 roles = { 606 'options': HTTPXRefRole('options'), 607 'head': HTTPXRefRole('head'), 608 'post': HTTPXRefRole('post'), 609 'get': HTTPXRefRole('get'), 610 'put': HTTPXRefRole('put'), 611 'patch': HTTPXRefRole('patch'), 612 'delete': HTTPXRefRole('delete'), 613 'trace': HTTPXRefRole('trace'), 614 'connect': HTTPXRefRole('connect'), 615 'copy': HTTPXRefRole('copy'), 616 'any': HTTPXRefRole('any'), 617 'statuscode': HTTPXRefStatusRole(), 618 'method': HTTPXRefMethodRole(), 619 'header': HTTPXRefHeaderRole() 620 } 621 622 initial_data = { 623 'options': {}, # path: (docname, synopsis) 624 'head': {}, 625 'post': {}, 626 'get': {}, 627 'put': {}, 628 'patch': {}, 629 'delete': {}, 630 'trace': {}, 631 'connect': {}, 632 'copy': {}, 633 'any': {} 634 } 635 636 indices = [] 637 638 @property 639 def routes(self): 640 return dict((key, self.data[key]) for key in self.object_types) 641 642 def clear_doc(self, docname): 643 for typ, routes in self.routes.items(): 644 for path, info in list(routes.items()): 645 if info[0] == docname: 646 del routes[path] 647 648 def resolve_xref(self, env, fromdocname, builder, typ, target, 649 node, contnode): 650 try: 651 info = self.data[str(typ)][target] 652 except KeyError: 653 text = contnode.rawsource 654 role = self.roles.get(typ) 655 if role is None: 656 return None 657 658 if fromdocname not in _doctree_cache: 659 _doctree_cache[fromdocname] = env.get_doctree(fromdocname) 660 doctree = _doctree_cache[fromdocname] 661 662 resnode = role.result_nodes(doctree, env, node, None)[0][0] 663 if isinstance(resnode, addnodes.pending_xref): 664 text = node[0][0] 665 reporter = doctree.reporter 666 reporter.warning('Cannot resolve reference to %r' % text, 667 line=node.line) 668 return None 669 return resnode 670 else: 671 anchor = http_resource_anchor(typ, target) 672 title = typ.upper() + ' ' + target 673 return make_refnode(builder, fromdocname, info[0], anchor, 674 contnode, title) 675 676 def resolve_any_xref(self, env, fromdocname, builder, target, node, contnode): 677 """Resolve the pending_xref *node* with the given *target*. 678 679 The reference comes from an "any" or similar role, which means that Sphinx 680 don't know the type. 681 682 For now sphinxcontrib-httpdomain doesn't resolve any xref nodes. 683 684 :return: 685 list of tuples ``('domain:role', newnode)``, where ``'domain:role'`` 686 is the name of a role that could have created the same reference, 687 """ 688 return [] 689 690 def get_objects(self): 691 for method, routes in self.routes.items(): 692 for path, info in routes.items(): 693 anchor = http_resource_anchor(method, path) 694 yield (path, path, method, info[0], anchor, 1) 695 696 697 class HTTPLexer(RegexLexer): 698 """Lexer for HTTP sessions.""" 699 700 name = 'HTTP' 701 aliases = ['http'] 702 703 flags = re.DOTALL 704 705 def header_callback(self, match): 706 if match.group(1).lower() == 'content-type': 707 content_type = match.group(5).strip() 708 if ';' in content_type: 709 content_type = content_type[:content_type.find(';')].strip() 710 self.content_type = content_type 711 yield match.start(1), Name.Attribute, match.group(1) 712 yield match.start(2), Text, match.group(2) 713 yield match.start(3), Operator, match.group(3) 714 yield match.start(4), Text, match.group(4) 715 yield match.start(5), Literal, match.group(5) 716 yield match.start(6), Text, match.group(6) 717 718 def continuous_header_callback(self, match): 719 yield match.start(1), Text, match.group(1) 720 yield match.start(2), Literal, match.group(2) 721 yield match.start(3), Text, match.group(3) 722 723 def content_callback(self, match): 724 content_type = getattr(self, 'content_type', None) 725 content = match.group() 726 offset = match.start() 727 if content_type: 728 from pygments.lexers import get_lexer_for_mimetype 729 try: 730 lexer = get_lexer_for_mimetype(content_type) 731 except ClassNotFound: 732 pass 733 else: 734 for idx, token, value in lexer.get_tokens_unprocessed(content): 735 yield offset + idx, token, value 736 return 737 yield offset, Text, content 738 739 tokens = { 740 'root': [ 741 (r'(GET|POST|PUT|PATCH|DELETE|HEAD|OPTIONS|TRACE|COPY)( +)([^ ]+)( +)' 742 r'(HTTPS?)(/)(1\.[01])(\r?\n|$)', 743 bygroups(Name.Function, Text, Name.Namespace, Text, 744 Keyword.Reserved, Operator, Number, Text), 745 'headers'), 746 (r'(HTTPS?)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|$)', 747 bygroups(Keyword.Reserved, Operator, Number, Text, Number, 748 Text, Name.Exception, Text), 749 'headers'), 750 ], 751 'headers': [ 752 (r'([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|$)', header_callback), 753 (r'([\t ]+)([^\r\n]+)(\r?\n|$)', continuous_header_callback), 754 (r'\r?\n', Text, 'content') 755 ], 756 'content': [ 757 (r'.+', content_callback) 758 ] 759 } 760 761 762 def setup(app): 763 app.add_domain(HTTPDomain) 764 765 try: 766 get_lexer_by_name('http') 767 except ClassNotFound: 768 app.add_lexer('http', HTTPLexer()) 769 app.add_config_value('http_index_ignore_prefixes', [], None) 770 app.add_config_value('http_index_shortname', 'routing table', True) 771 app.add_config_value('http_index_localname', 'HTTP Routing Table', True) 772 app.add_config_value('http_strict_mode', True, None) 773 app.add_config_value('http_headers_ignore_prefixes', ['X-'], None)