2012-08-03 02:01:15 +03:00
|
|
|
"""Output streaming, processing and formatting.
|
2012-07-20 22:54:41 +03:00
|
|
|
|
|
|
|
"""
|
2012-02-25 15:39:38 +03:00
|
|
|
import json
|
2012-08-04 20:12:51 +03:00
|
|
|
from functools import partial
|
|
|
|
from itertools import chain
|
2012-07-26 01:26:23 +03:00
|
|
|
|
2012-02-25 15:39:38 +03:00
|
|
|
import pygments
|
2012-07-20 22:54:41 +03:00
|
|
|
from pygments import token, lexer
|
2012-04-28 15:18:59 +03:00
|
|
|
from pygments.styles import get_style_by_name, STYLE_MAP
|
2012-08-01 18:35:32 +03:00
|
|
|
from pygments.lexers import get_lexer_for_mimetype, get_lexer_by_name
|
2012-04-28 15:13:40 +03:00
|
|
|
from pygments.formatters.terminal import TerminalFormatter
|
2012-07-20 22:54:41 +03:00
|
|
|
from pygments.formatters.terminal256 import Terminal256Formatter
|
|
|
|
from pygments.util import ClassNotFound
|
2012-07-28 14:24:44 +03:00
|
|
|
from requests.compat import is_windows
|
2012-07-26 01:26:23 +03:00
|
|
|
|
2012-07-30 13:11:16 +03:00
|
|
|
from .solarized import Solarized256Style
|
2012-08-04 20:12:51 +03:00
|
|
|
from .models import HTTPRequest, HTTPResponse, Environment
|
|
|
|
from .input import (OUT_REQ_BODY, OUT_REQ_HEAD,
|
|
|
|
OUT_RESP_HEAD, OUT_RESP_BODY)
|
2012-02-25 15:39:38 +03:00
|
|
|
|
2012-07-20 22:54:41 +03:00
|
|
|
|
2012-08-07 19:22:47 +03:00
|
|
|
# Colors on Windows via colorama don't look that
|
|
|
|
# great and fruity seems to give the best result there.
|
2012-08-18 04:54:38 +03:00
|
|
|
AVAILABLE_STYLES = set(STYLE_MAP.keys())
|
|
|
|
AVAILABLE_STYLES.add('solarized')
|
2012-08-03 02:01:15 +03:00
|
|
|
DEFAULT_STYLE = 'solarized' if not is_windows else 'fruity'
|
|
|
|
|
|
|
|
|
2012-07-28 06:45:44 +03:00
|
|
|
BINARY_SUPPRESSED_NOTICE = (
|
2012-08-03 02:01:15 +03:00
|
|
|
b'\n'
|
|
|
|
b'+-----------------------------------------+\n'
|
|
|
|
b'| NOTE: binary data not shown in terminal |\n'
|
|
|
|
b'+-----------------------------------------+'
|
2012-07-28 06:45:44 +03:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2012-08-03 02:01:15 +03:00
|
|
|
class BinarySuppressedError(Exception):
|
|
|
|
"""An error indicating that the body is binary and won't be written,
|
|
|
|
e.g., for terminal output)."""
|
2012-07-28 06:45:44 +03:00
|
|
|
|
2012-08-03 02:01:15 +03:00
|
|
|
message = BINARY_SUPPRESSED_NOTICE
|
2012-07-28 06:45:44 +03:00
|
|
|
|
|
|
|
|
2012-08-03 02:01:15 +03:00
|
|
|
###############################################################################
|
|
|
|
# Output Streams
|
|
|
|
###############################################################################
|
|
|
|
|
2012-08-04 20:12:51 +03:00
|
|
|
|
|
|
|
def write(stream, outfile, flush):
|
|
|
|
"""Write the output stream."""
|
|
|
|
try:
|
|
|
|
# Writing bytes so we use the buffer interface (Python 3).
|
|
|
|
buf = outfile.buffer
|
|
|
|
except AttributeError:
|
|
|
|
buf = outfile
|
|
|
|
|
|
|
|
for chunk in stream:
|
|
|
|
buf.write(chunk)
|
|
|
|
if flush:
|
|
|
|
outfile.flush()
|
|
|
|
|
|
|
|
|
2012-08-17 07:35:18 +03:00
|
|
|
def write_with_colors_win_p3k(stream, outfile, flush):
|
|
|
|
"""Like `write`, but colorized chunks are written as text
|
|
|
|
directly to `outfile` to ensure it gets processed by colorama.
|
|
|
|
Applies only to Windows with Python 3 and colorized terminal output.
|
|
|
|
|
|
|
|
"""
|
|
|
|
color = b'\x1b['
|
|
|
|
encoding = outfile.encoding
|
|
|
|
for chunk in stream:
|
|
|
|
if color in chunk:
|
|
|
|
outfile.write(chunk.decode(encoding))
|
|
|
|
else:
|
|
|
|
outfile.buffer.write(chunk)
|
|
|
|
if flush:
|
|
|
|
outfile.flush()
|
|
|
|
|
|
|
|
|
2012-12-05 07:03:18 +03:00
|
|
|
def build_output_stream(args, env, request, response):
|
2012-08-04 20:12:51 +03:00
|
|
|
"""Build and return a chain of iterators over the `request`-`response`
|
|
|
|
exchange each of which yields `bytes` chunks.
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
req_h = OUT_REQ_HEAD in args.output_options
|
|
|
|
req_b = OUT_REQ_BODY in args.output_options
|
|
|
|
resp_h = OUT_RESP_HEAD in args.output_options
|
2012-08-21 16:45:22 +03:00
|
|
|
resp_b = OUT_RESP_BODY in args.output_options
|
2012-08-04 20:12:51 +03:00
|
|
|
req = req_h or req_b
|
|
|
|
resp = resp_h or resp_b
|
|
|
|
|
|
|
|
output = []
|
2012-12-05 07:03:18 +03:00
|
|
|
Stream = get_stream_type(env, args)
|
2012-08-04 20:12:51 +03:00
|
|
|
|
|
|
|
if req:
|
|
|
|
output.append(Stream(
|
|
|
|
msg=HTTPRequest(request),
|
|
|
|
with_headers=req_h,
|
|
|
|
with_body=req_b))
|
|
|
|
|
2012-08-10 02:07:01 +03:00
|
|
|
if req_b and resp:
|
|
|
|
# Request/Response separator.
|
|
|
|
output.append([b'\n\n'])
|
2012-08-04 20:12:51 +03:00
|
|
|
|
|
|
|
if resp:
|
|
|
|
output.append(Stream(
|
|
|
|
msg=HTTPResponse(response),
|
|
|
|
with_headers=resp_h,
|
|
|
|
with_body=resp_b))
|
|
|
|
|
2012-08-10 02:07:01 +03:00
|
|
|
if env.stdout_isatty and resp_b:
|
|
|
|
# Ensure a blank line after the response body.
|
|
|
|
# For terminal output only.
|
2012-08-04 20:12:51 +03:00
|
|
|
output.append([b'\n\n'])
|
|
|
|
|
|
|
|
return chain(*output)
|
|
|
|
|
|
|
|
|
2012-12-05 07:03:18 +03:00
|
|
|
def get_stream_type(env, args):
|
2012-08-06 23:14:52 +03:00
|
|
|
"""Pick the right stream type based on `env` and `args`.
|
|
|
|
Wrap it in a partial with the type-specific args so that
|
|
|
|
we don't need to think what stream we are dealing with.
|
2012-08-04 20:12:51 +03:00
|
|
|
|
|
|
|
"""
|
|
|
|
if not env.stdout_isatty and not args.prettify:
|
|
|
|
Stream = partial(
|
|
|
|
RawStream,
|
|
|
|
chunk_size=RawStream.CHUNK_SIZE_BY_LINE
|
|
|
|
if args.stream
|
2012-08-21 16:45:22 +03:00
|
|
|
else RawStream.CHUNK_SIZE
|
|
|
|
)
|
2012-08-04 20:12:51 +03:00
|
|
|
elif args.prettify:
|
|
|
|
Stream = partial(
|
|
|
|
PrettyStream if args.stream else BufferedPrettyStream,
|
2012-08-21 16:45:22 +03:00
|
|
|
env=env,
|
|
|
|
processor=OutputProcessor(
|
|
|
|
env=env, groups=args.prettify, pygments_style=args.style),
|
|
|
|
)
|
2012-08-04 20:12:51 +03:00
|
|
|
else:
|
|
|
|
Stream = partial(EncodedStream, env=env)
|
|
|
|
|
|
|
|
return Stream
|
|
|
|
|
|
|
|
|
2012-08-03 02:01:15 +03:00
|
|
|
class BaseStream(object):
|
2012-12-05 07:03:18 +03:00
|
|
|
"""Base HTTP message output stream class."""
|
2012-08-03 02:01:15 +03:00
|
|
|
|
|
|
|
def __init__(self, msg, with_headers=True, with_body=True):
|
|
|
|
"""
|
|
|
|
:param msg: a :class:`models.HTTPMessage` subclass
|
|
|
|
:param with_headers: if `True`, headers will be included
|
|
|
|
:param with_body: if `True`, body will be included
|
|
|
|
|
|
|
|
"""
|
2012-12-11 14:54:34 +03:00
|
|
|
assert with_headers or with_body
|
2012-08-03 02:01:15 +03:00
|
|
|
self.msg = msg
|
|
|
|
self.with_headers = with_headers
|
|
|
|
self.with_body = with_body
|
|
|
|
|
2012-11-09 17:49:23 +03:00
|
|
|
def _get_headers(self):
|
2012-08-03 02:01:15 +03:00
|
|
|
"""Return the headers' bytes."""
|
|
|
|
return self.msg.headers.encode('ascii')
|
|
|
|
|
2012-11-09 17:49:23 +03:00
|
|
|
def _iter_body(self):
|
2012-08-03 02:01:15 +03:00
|
|
|
"""Return an iterator over the message body."""
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
def __iter__(self):
|
|
|
|
"""Return an iterator over `self.msg`."""
|
|
|
|
if self.with_headers:
|
2012-11-09 17:49:23 +03:00
|
|
|
yield self._get_headers()
|
2012-08-10 02:07:01 +03:00
|
|
|
yield b'\r\n\r\n'
|
2012-08-03 02:01:15 +03:00
|
|
|
|
|
|
|
if self.with_body:
|
2012-07-28 06:45:44 +03:00
|
|
|
try:
|
2012-11-09 17:49:23 +03:00
|
|
|
for chunk in self._iter_body():
|
2012-08-03 02:01:15 +03:00
|
|
|
yield chunk
|
|
|
|
except BinarySuppressedError as e:
|
2012-08-04 17:59:36 +03:00
|
|
|
if self.with_headers:
|
|
|
|
yield b'\n'
|
2012-08-03 02:01:15 +03:00
|
|
|
yield e.message
|
|
|
|
|
|
|
|
|
|
|
|
class RawStream(BaseStream):
|
|
|
|
"""The message is streamed in chunks with no processing."""
|
|
|
|
|
|
|
|
CHUNK_SIZE = 1024 * 100
|
|
|
|
CHUNK_SIZE_BY_LINE = 1024 * 5
|
|
|
|
|
|
|
|
def __init__(self, chunk_size=CHUNK_SIZE, **kwargs):
|
|
|
|
super(RawStream, self).__init__(**kwargs)
|
|
|
|
self.chunk_size = chunk_size
|
|
|
|
|
2012-11-09 17:49:23 +03:00
|
|
|
def _iter_body(self):
|
2012-08-03 02:01:15 +03:00
|
|
|
return self.msg.iter_body(self.chunk_size)
|
|
|
|
|
|
|
|
|
|
|
|
class EncodedStream(BaseStream):
|
|
|
|
"""Encoded HTTP message stream.
|
|
|
|
|
|
|
|
The message bytes are converted to an encoding suitable for
|
|
|
|
`self.env.stdout`. Unicode errors are replaced and binary data
|
|
|
|
is suppressed. The body is always streamed by line.
|
|
|
|
|
|
|
|
"""
|
|
|
|
CHUNK_SIZE = 1024 * 5
|
2012-08-21 16:45:22 +03:00
|
|
|
|
2012-08-03 02:01:15 +03:00
|
|
|
def __init__(self, env=Environment(), **kwargs):
|
|
|
|
|
|
|
|
super(EncodedStream, self).__init__(**kwargs)
|
|
|
|
|
|
|
|
if env.stdout_isatty:
|
|
|
|
# Use the encoding supported by the terminal.
|
|
|
|
output_encoding = getattr(env.stdout, 'encoding', None)
|
|
|
|
else:
|
|
|
|
# Preserve the message encoding.
|
|
|
|
output_encoding = self.msg.encoding
|
|
|
|
|
|
|
|
# Default to utf8 when unsure.
|
|
|
|
self.output_encoding = output_encoding or 'utf8'
|
|
|
|
|
2012-11-09 17:49:23 +03:00
|
|
|
def _iter_body(self):
|
2012-08-03 02:01:15 +03:00
|
|
|
|
|
|
|
for line, lf in self.msg.iter_lines(self.CHUNK_SIZE):
|
|
|
|
|
|
|
|
if b'\0' in line:
|
|
|
|
raise BinarySuppressedError()
|
|
|
|
|
|
|
|
yield line.decode(self.msg.encoding)\
|
|
|
|
.encode(self.output_encoding, 'replace') + lf
|
|
|
|
|
|
|
|
|
|
|
|
class PrettyStream(EncodedStream):
|
|
|
|
"""In addition to :class:`EncodedStream` behaviour, this stream applies
|
|
|
|
content processing.
|
2012-07-17 04:48:10 +03:00
|
|
|
|
2012-08-03 02:01:15 +03:00
|
|
|
Useful for long-lived HTTP responses that stream by lines
|
|
|
|
such as the Twitter streaming API.
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
CHUNK_SIZE = 1024 * 5
|
|
|
|
|
|
|
|
def __init__(self, processor, **kwargs):
|
|
|
|
super(PrettyStream, self).__init__(**kwargs)
|
|
|
|
self.processor = processor
|
|
|
|
|
2012-11-09 17:49:23 +03:00
|
|
|
def _get_headers(self):
|
2012-08-03 02:01:15 +03:00
|
|
|
return self.processor.process_headers(
|
|
|
|
self.msg.headers).encode(self.output_encoding)
|
|
|
|
|
2012-11-09 17:49:23 +03:00
|
|
|
def _iter_body(self):
|
2012-08-03 02:01:15 +03:00
|
|
|
for line, lf in self.msg.iter_lines(self.CHUNK_SIZE):
|
|
|
|
if b'\0' in line:
|
|
|
|
raise BinarySuppressedError()
|
|
|
|
yield self._process_body(line) + lf
|
|
|
|
|
|
|
|
def _process_body(self, chunk):
|
|
|
|
return (self.processor
|
|
|
|
.process_body(
|
|
|
|
chunk.decode(self.msg.encoding, 'replace'),
|
|
|
|
self.msg.content_type)
|
|
|
|
.encode(self.output_encoding, 'replace'))
|
|
|
|
|
|
|
|
|
|
|
|
class BufferedPrettyStream(PrettyStream):
|
|
|
|
"""The same as :class:`PrettyStream` except that the body is fully
|
|
|
|
fetched before it's processed.
|
|
|
|
|
|
|
|
Suitable regular HTTP responses.
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
CHUNK_SIZE = 1024 * 10
|
|
|
|
|
2012-11-09 17:49:23 +03:00
|
|
|
def _iter_body(self):
|
2012-08-03 02:01:15 +03:00
|
|
|
|
|
|
|
# Read the whole body before prettifying it,
|
|
|
|
# but bail out immediately if the body is binary.
|
|
|
|
body = bytearray()
|
2012-08-16 04:06:48 +03:00
|
|
|
for chunk in self.msg.iter_body(self.CHUNK_SIZE):
|
2012-08-03 02:01:15 +03:00
|
|
|
if b'\0' in chunk:
|
|
|
|
raise BinarySuppressedError()
|
2012-08-16 04:06:48 +03:00
|
|
|
body.extend(chunk)
|
2012-08-03 02:01:15 +03:00
|
|
|
|
|
|
|
yield self._process_body(body)
|
|
|
|
|
|
|
|
|
|
|
|
###############################################################################
|
|
|
|
# Processing
|
|
|
|
###############################################################################
|
2012-02-25 15:39:38 +03:00
|
|
|
|
2012-07-20 22:54:41 +03:00
|
|
|
class HTTPLexer(lexer.RegexLexer):
|
2012-07-26 07:37:03 +03:00
|
|
|
"""Simplified HTTP lexer for Pygments.
|
2012-07-20 22:54:41 +03:00
|
|
|
|
|
|
|
It only operates on headers and provides a stronger contrast between
|
|
|
|
their names and values than the original one bundled with Pygments
|
2012-08-03 02:01:15 +03:00
|
|
|
(:class:`pygments.lexers.text import HttpLexer`), especially when
|
2012-07-20 22:54:41 +03:00
|
|
|
Solarized color scheme is used.
|
|
|
|
|
|
|
|
"""
|
|
|
|
name = 'HTTP'
|
|
|
|
aliases = ['http']
|
|
|
|
filenames = ['*.http']
|
|
|
|
tokens = {
|
|
|
|
'root': [
|
|
|
|
# Request-Line
|
|
|
|
(r'([A-Z]+)( +)([^ ]+)( +)(HTTP)(/)(\d+\.\d+)',
|
|
|
|
lexer.bygroups(
|
2012-08-21 16:45:22 +03:00
|
|
|
token.Name.Function,
|
|
|
|
token.Text,
|
|
|
|
token.Name.Namespace,
|
|
|
|
token.Text,
|
|
|
|
token.Keyword.Reserved,
|
|
|
|
token.Operator,
|
|
|
|
token.Number
|
2012-07-20 22:54:41 +03:00
|
|
|
)),
|
|
|
|
# Response Status-Line
|
|
|
|
(r'(HTTP)(/)(\d+\.\d+)( +)(\d{3})( +)(.+)',
|
|
|
|
lexer.bygroups(
|
|
|
|
token.Keyword.Reserved, # 'HTTP'
|
|
|
|
token.Operator, # '/'
|
|
|
|
token.Number, # Version
|
|
|
|
token.Text,
|
|
|
|
token.Number, # Status code
|
|
|
|
token.Text,
|
|
|
|
token.Name.Exception, # Reason
|
|
|
|
)),
|
|
|
|
# Header
|
|
|
|
(r'(.*?)( *)(:)( *)(.+)', lexer.bygroups(
|
2012-08-21 16:45:22 +03:00
|
|
|
token.Name.Attribute, # Name
|
2012-07-20 22:54:41 +03:00
|
|
|
token.Text,
|
|
|
|
token.Operator, # Colon
|
|
|
|
token.Text,
|
|
|
|
token.String # Value
|
|
|
|
))
|
2012-08-21 16:45:22 +03:00
|
|
|
]
|
|
|
|
}
|
2012-07-14 17:27:11 +03:00
|
|
|
|
2012-04-26 14:05:59 +03:00
|
|
|
|
2012-07-21 03:59:43 +03:00
|
|
|
class BaseProcessor(object):
|
2012-08-03 02:01:15 +03:00
|
|
|
"""Base, noop output processor class."""
|
2012-07-21 03:59:43 +03:00
|
|
|
|
|
|
|
enabled = True
|
|
|
|
|
2012-08-19 00:03:31 +03:00
|
|
|
def __init__(self, env=Environment(), **kwargs):
|
2012-08-03 02:01:15 +03:00
|
|
|
"""
|
2012-08-07 19:22:47 +03:00
|
|
|
:param env: an class:`Environment` instance
|
|
|
|
:param kwargs: additional keyword argument that some
|
|
|
|
processor might require.
|
2012-08-03 02:01:15 +03:00
|
|
|
|
|
|
|
"""
|
2012-07-21 03:59:43 +03:00
|
|
|
self.env = env
|
|
|
|
self.kwargs = kwargs
|
|
|
|
|
|
|
|
def process_headers(self, headers):
|
2012-08-03 02:01:15 +03:00
|
|
|
"""Return processed `headers`
|
|
|
|
|
2012-08-07 19:22:47 +03:00
|
|
|
:param headers: The headers as text.
|
2012-08-03 02:01:15 +03:00
|
|
|
|
|
|
|
"""
|
2012-07-21 03:59:43 +03:00
|
|
|
return headers
|
|
|
|
|
2012-08-01 18:35:32 +03:00
|
|
|
def process_body(self, content, content_type, subtype):
|
|
|
|
"""Return processed `content`.
|
|
|
|
|
2012-08-07 19:22:47 +03:00
|
|
|
:param content: The body content as text
|
|
|
|
:param content_type: Full content type, e.g., 'application/atom+xml'.
|
|
|
|
:param subtype: E.g. 'xml'.
|
2012-08-01 18:35:32 +03:00
|
|
|
|
|
|
|
"""
|
2012-07-21 03:59:43 +03:00
|
|
|
return content
|
|
|
|
|
|
|
|
|
|
|
|
class JSONProcessor(BaseProcessor):
|
2012-08-03 02:01:15 +03:00
|
|
|
"""JSON body processor."""
|
2012-07-21 03:59:43 +03:00
|
|
|
|
2012-08-01 18:35:32 +03:00
|
|
|
def process_body(self, content, content_type, subtype):
|
|
|
|
if subtype == 'json':
|
2012-07-21 03:59:43 +03:00
|
|
|
try:
|
2012-08-01 18:35:32 +03:00
|
|
|
# Indent the JSON data, sort keys by name, and
|
|
|
|
# avoid unicode escapes to improve readability.
|
|
|
|
content = json.dumps(json.loads(content),
|
|
|
|
sort_keys=True,
|
|
|
|
ensure_ascii=False,
|
|
|
|
indent=4)
|
2012-07-21 03:59:43 +03:00
|
|
|
except ValueError:
|
2012-08-01 18:35:32 +03:00
|
|
|
# Invalid JSON but we don't care.
|
2012-07-21 03:59:43 +03:00
|
|
|
pass
|
|
|
|
return content
|
|
|
|
|
|
|
|
|
|
|
|
class PygmentsProcessor(BaseProcessor):
|
2012-08-03 02:01:15 +03:00
|
|
|
"""A processor that applies syntax-highlighting using Pygments
|
|
|
|
to the headers, and to the body as well if its content type is recognized.
|
2012-07-21 03:59:43 +03:00
|
|
|
|
2012-08-03 02:01:15 +03:00
|
|
|
"""
|
2012-07-21 03:59:43 +03:00
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
super(PygmentsProcessor, self).__init__(*args, **kwargs)
|
|
|
|
|
2012-08-03 02:01:15 +03:00
|
|
|
# Cache that speeds up when we process streamed body by line.
|
|
|
|
self.lexers_by_type = {}
|
|
|
|
|
2012-07-21 03:59:43 +03:00
|
|
|
if not self.env.colors:
|
|
|
|
self.enabled = False
|
|
|
|
return
|
2012-04-28 15:13:40 +03:00
|
|
|
|
2012-07-20 22:54:41 +03:00
|
|
|
try:
|
2012-08-19 00:03:31 +03:00
|
|
|
style = get_style_by_name(
|
|
|
|
self.kwargs.get('pygments_style', DEFAULT_STYLE))
|
2012-07-20 22:54:41 +03:00
|
|
|
except ClassNotFound:
|
2012-07-30 13:11:16 +03:00
|
|
|
style = Solarized256Style
|
2012-04-28 15:13:40 +03:00
|
|
|
|
2012-08-03 02:01:15 +03:00
|
|
|
if self.env.is_windows or self.env.colors == 256:
|
2012-07-21 03:59:43 +03:00
|
|
|
fmt_class = Terminal256Formatter
|
|
|
|
else:
|
|
|
|
fmt_class = TerminalFormatter
|
|
|
|
self.formatter = fmt_class(style=style)
|
2012-04-28 15:13:40 +03:00
|
|
|
|
2012-07-21 03:59:43 +03:00
|
|
|
def process_headers(self, headers):
|
|
|
|
return pygments.highlight(
|
2012-08-03 02:01:15 +03:00
|
|
|
headers, HTTPLexer(), self.formatter).strip()
|
2012-07-20 22:54:41 +03:00
|
|
|
|
2012-08-01 18:35:32 +03:00
|
|
|
def process_body(self, content, content_type, subtype):
|
2012-07-21 03:59:43 +03:00
|
|
|
try:
|
2012-08-03 02:01:15 +03:00
|
|
|
lexer = self.lexers_by_type.get(content_type)
|
|
|
|
if not lexer:
|
|
|
|
try:
|
|
|
|
lexer = get_lexer_for_mimetype(content_type)
|
|
|
|
except ClassNotFound:
|
|
|
|
lexer = get_lexer_by_name(subtype)
|
|
|
|
self.lexers_by_type[content_type] = lexer
|
2012-07-21 03:59:43 +03:00
|
|
|
except ClassNotFound:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
content = pygments.highlight(content, lexer, self.formatter)
|
2012-08-03 02:01:15 +03:00
|
|
|
return content.strip()
|
2012-07-21 03:59:43 +03:00
|
|
|
|
|
|
|
|
2012-08-03 01:58:01 +03:00
|
|
|
class HeadersProcessor(BaseProcessor):
|
2012-08-03 02:01:15 +03:00
|
|
|
"""Sorts headers by name retaining relative order of multiple headers
|
2012-08-03 01:58:01 +03:00
|
|
|
with the same name.
|
|
|
|
|
|
|
|
"""
|
|
|
|
def process_headers(self, headers):
|
|
|
|
lines = headers.splitlines()
|
|
|
|
headers = sorted(lines[1:], key=lambda h: h.split(':')[0])
|
2012-08-10 02:07:01 +03:00
|
|
|
return '\r\n'.join(lines[:1] + headers)
|
2012-08-03 01:58:01 +03:00
|
|
|
|
|
|
|
|
2012-07-21 03:59:43 +03:00
|
|
|
class OutputProcessor(object):
|
2012-08-03 02:01:15 +03:00
|
|
|
"""A delegate class that invokes the actual processors."""
|
2012-07-21 03:59:43 +03:00
|
|
|
|
2012-08-07 17:56:04 +03:00
|
|
|
installed_processors = {
|
|
|
|
'format': [
|
|
|
|
HeadersProcessor,
|
|
|
|
JSONProcessor
|
|
|
|
],
|
|
|
|
'colors': [
|
|
|
|
PygmentsProcessor
|
2012-07-21 03:59:43 +03:00
|
|
|
]
|
2012-08-07 17:56:04 +03:00
|
|
|
}
|
|
|
|
|
2012-08-19 00:03:31 +03:00
|
|
|
def __init__(self, groups, env=Environment(), **kwargs):
|
2012-08-07 19:22:47 +03:00
|
|
|
"""
|
|
|
|
:param env: a :class:`models.Environment` instance
|
|
|
|
:param groups: the groups of processors to be applied
|
|
|
|
:param kwargs: additional keyword arguments for processors
|
2012-08-07 17:56:04 +03:00
|
|
|
|
2012-08-07 19:22:47 +03:00
|
|
|
"""
|
|
|
|
self.processors = []
|
2012-08-07 17:56:04 +03:00
|
|
|
for group in groups:
|
|
|
|
for cls in self.installed_processors[group]:
|
2012-08-07 19:22:47 +03:00
|
|
|
processor = cls(env, **kwargs)
|
2012-08-10 02:07:01 +03:00
|
|
|
if processor.enabled:
|
2012-08-07 19:22:47 +03:00
|
|
|
self.processors.append(processor)
|
2012-07-21 03:59:43 +03:00
|
|
|
|
|
|
|
def process_headers(self, headers):
|
|
|
|
for processor in self.processors:
|
2012-08-01 18:35:32 +03:00
|
|
|
headers = processor.process_headers(headers)
|
2012-07-21 03:59:43 +03:00
|
|
|
return headers
|
|
|
|
|
|
|
|
def process_body(self, content, content_type):
|
2012-08-01 18:35:32 +03:00
|
|
|
# e.g., 'application/atom+xml'
|
2012-04-28 15:13:40 +03:00
|
|
|
content_type = content_type.split(';')[0]
|
2012-08-01 18:35:32 +03:00
|
|
|
# e.g., 'xml'
|
|
|
|
subtype = content_type.split('/')[-1].split('+')[-1]
|
2012-07-14 17:27:11 +03:00
|
|
|
|
2012-07-21 03:59:43 +03:00
|
|
|
for processor in self.processors:
|
2012-08-01 18:35:32 +03:00
|
|
|
content = processor.process_body(content, content_type, subtype)
|
2012-04-26 14:05:59 +03:00
|
|
|
|
2012-07-21 03:59:43 +03:00
|
|
|
return content
|