2012-07-20 22:54:41 +03:00
|
|
|
"""
|
|
|
|
Colorizing of HTTP messages and content processing.
|
|
|
|
|
|
|
|
"""
|
2012-03-02 03:39:22 +03:00
|
|
|
import os
|
2012-07-14 17:27:11 +03:00
|
|
|
import re
|
2012-02-25 15:39:38 +03:00
|
|
|
import json
|
|
|
|
import pygments
|
2012-07-20 22:54:41 +03:00
|
|
|
from pygments import token, lexer
|
2012-04-28 15:18:59 +03:00
|
|
|
from pygments.styles import get_style_by_name, STYLE_MAP
|
2012-07-20 22:54:41 +03:00
|
|
|
from pygments.lexers import get_lexer_for_mimetype
|
2012-04-28 15:13:40 +03:00
|
|
|
from pygments.formatters.terminal import TerminalFormatter
|
2012-07-20 22:54:41 +03:00
|
|
|
from pygments.formatters.terminal256 import Terminal256Formatter
|
|
|
|
from pygments.util import ClassNotFound
|
2012-07-17 04:48:10 +03:00
|
|
|
from requests.compat import is_windows
|
2012-03-14 02:05:44 +03:00
|
|
|
from . import solarized
|
2012-02-25 15:39:38 +03:00
|
|
|
|
2012-07-20 22:54:41 +03:00
|
|
|
|
|
|
|
DEFAULT_STYLE = 'solarized'
|
|
|
|
AVAILABLE_STYLES = [DEFAULT_STYLE] + list(STYLE_MAP.keys())
|
2012-07-17 04:48:10 +03:00
|
|
|
if is_windows:
|
|
|
|
import colorama
|
|
|
|
colorama.init()
|
|
|
|
# 256 looks better on Windows
|
2012-07-20 22:54:41 +03:00
|
|
|
formatter_class = Terminal256Formatter
|
2012-07-17 04:48:10 +03:00
|
|
|
else:
|
2012-07-20 22:54:41 +03:00
|
|
|
formatter_class = (
|
2012-07-17 04:48:10 +03:00
|
|
|
Terminal256Formatter
|
|
|
|
if '256color' in os.environ.get('TERM', '')
|
|
|
|
else TerminalFormatter
|
|
|
|
)
|
|
|
|
|
2012-02-25 15:39:38 +03:00
|
|
|
|
2012-07-20 22:54:41 +03:00
|
|
|
class HTTPLexer(lexer.RegexLexer):
|
|
|
|
"""
|
|
|
|
Simplified HTTP lexer for Pygments.
|
|
|
|
|
|
|
|
It only operates on headers and provides a stronger contrast between
|
|
|
|
their names and values than the original one bundled with Pygments
|
|
|
|
(`pygments.lexers.text import HttpLexer`), especially when
|
|
|
|
Solarized color scheme is used.
|
|
|
|
|
|
|
|
"""
|
|
|
|
name = 'HTTP'
|
|
|
|
aliases = ['http']
|
|
|
|
filenames = ['*.http']
|
|
|
|
tokens = {
|
|
|
|
'root': [
|
2012-02-25 15:39:38 +03:00
|
|
|
|
2012-07-20 22:54:41 +03:00
|
|
|
# Request-Line
|
|
|
|
(r'([A-Z]+)( +)([^ ]+)( +)(HTTP)(/)(\d+\.\d+)',
|
|
|
|
lexer.bygroups(
|
|
|
|
token.Name.Function,
|
|
|
|
token.Text,
|
|
|
|
token.Name.Namespace,
|
|
|
|
token.Text,
|
|
|
|
token.Keyword.Reserved,
|
|
|
|
token.Operator,
|
|
|
|
token.Number
|
|
|
|
)),
|
|
|
|
|
|
|
|
# Response Status-Line
|
|
|
|
(r'(HTTP)(/)(\d+\.\d+)( +)(\d{3})( +)(.+)',
|
|
|
|
lexer.bygroups(
|
|
|
|
token.Keyword.Reserved, # 'HTTP'
|
|
|
|
token.Operator, # '/'
|
|
|
|
token.Number, # Version
|
|
|
|
token.Text,
|
|
|
|
token.Number, # Status code
|
|
|
|
token.Text,
|
|
|
|
token.Name.Exception, # Reason
|
|
|
|
)),
|
|
|
|
|
|
|
|
# Header
|
|
|
|
(r'(.*?)( *)(:)( *)(.+)', lexer.bygroups(
|
|
|
|
token.Name.Attribute, # Name
|
|
|
|
token.Text,
|
|
|
|
token.Operator, # Colon
|
|
|
|
token.Text,
|
|
|
|
token.String # Value
|
|
|
|
))
|
|
|
|
]}
|
2012-07-14 17:27:11 +03:00
|
|
|
|
2012-04-26 14:05:59 +03:00
|
|
|
|
2012-04-28 15:13:40 +03:00
|
|
|
class PrettyHttp(object):
|
2012-07-20 22:54:41 +03:00
|
|
|
"""HTTP headers & body prettyfier."""
|
2012-04-28 15:13:40 +03:00
|
|
|
|
|
|
|
def __init__(self, style_name):
|
2012-07-20 22:54:41 +03:00
|
|
|
try:
|
2012-04-28 15:13:40 +03:00
|
|
|
style = get_style_by_name(style_name)
|
2012-07-20 22:54:41 +03:00
|
|
|
except ClassNotFound:
|
|
|
|
style = solarized.SolarizedStyle
|
|
|
|
self.formatter = formatter_class(style=style)
|
2012-04-28 15:13:40 +03:00
|
|
|
|
|
|
|
def headers(self, content):
|
2012-07-20 22:54:41 +03:00
|
|
|
"""Pygmentize HTTP headers."""
|
|
|
|
return pygments.highlight(content, HTTPLexer(), self.formatter)
|
2012-04-28 15:13:40 +03:00
|
|
|
|
|
|
|
def body(self, content, content_type):
|
2012-07-20 22:54:41 +03:00
|
|
|
"""Pygmentize `content` based on `content_type`."""
|
|
|
|
|
2012-04-28 15:13:40 +03:00
|
|
|
content_type = content_type.split(';')[0]
|
2012-07-20 22:54:41 +03:00
|
|
|
|
|
|
|
application_match = re.match(
|
|
|
|
r'application/(.+\+)(json|xml)$',
|
|
|
|
content_type
|
|
|
|
)
|
2012-07-14 17:27:11 +03:00
|
|
|
if application_match:
|
|
|
|
# Strip vendor and extensions from Content-Type
|
|
|
|
vendor, extension = application_match.groups()
|
2012-07-17 02:26:21 +03:00
|
|
|
content_type = content_type.replace(vendor, '')
|
2012-07-14 17:27:11 +03:00
|
|
|
|
2012-04-28 15:13:40 +03:00
|
|
|
try:
|
|
|
|
lexer = get_lexer_for_mimetype(content_type)
|
|
|
|
except ClassNotFound:
|
|
|
|
return content
|
2012-04-26 14:05:59 +03:00
|
|
|
|
2012-07-20 22:54:41 +03:00
|
|
|
if content_type == 'application/json':
|
2012-04-28 15:13:40 +03:00
|
|
|
try:
|
|
|
|
# Indent and sort the JSON data.
|
|
|
|
content = json.dumps(json.loads(content),
|
2012-07-12 16:30:41 +03:00
|
|
|
sort_keys=True, indent=4,
|
|
|
|
ensure_ascii=False)
|
2012-07-20 22:54:41 +03:00
|
|
|
except ValueError:
|
|
|
|
# Invalid JSON - we don't care.
|
2012-04-28 15:13:40 +03:00
|
|
|
pass
|
2012-04-26 14:05:59 +03:00
|
|
|
|
2012-04-28 15:13:40 +03:00
|
|
|
return pygments.highlight(content, lexer, self.formatter)
|