Back to index

moin  1.9.0~rc2
other.py
Go to the documentation of this file.
00001 # -*- coding: utf-8 -*-
00002 """
00003     pygments.formatters.other
00004     ~~~~~~~~~~~~~~~~~~~~~~~~~
00005 
00006     Other formatters: NullFormatter, RawTokenFormatter.
00007 
00008     :copyright: Copyright 2006-2009 by the Pygments team, see AUTHORS.
00009     :license: BSD, see LICENSE for details.
00010 """
00011 
00012 from pygments.formatter import Formatter
00013 from pygments.util import OptionError, get_choice_opt, b
00014 from pygments.token import Token
00015 from pygments.console import colorize
00016 
00017 __all__ = ['NullFormatter', 'RawTokenFormatter']
00018 
00019 
00020 class NullFormatter(Formatter):
00021     """
00022     Output the text unchanged without any formatting.
00023     """
00024     name = 'Text only'
00025     aliases = ['text', 'null']
00026     filenames = ['*.txt']
00027 
00028     def format(self, tokensource, outfile):
00029         enc = self.encoding
00030         for ttype, value in tokensource:
00031             if enc:
00032                 outfile.write(value.encode(enc))
00033             else:
00034                 outfile.write(value)
00035 
00036 
00037 class RawTokenFormatter(Formatter):
00038     r"""
00039     Format tokens as a raw representation for storing token streams.
00040 
00041     The format is ``tokentype<TAB>repr(tokenstring)\n``. The output can later
00042     be converted to a token stream with the `RawTokenLexer`, described in the
00043     `lexer list <lexers.txt>`_.
00044 
00045     Only two options are accepted:
00046 
00047     `compress`
00048         If set to ``'gz'`` or ``'bz2'``, compress the output with the given
00049         compression algorithm after encoding (default: ``''``).
00050     `error_color`
00051         If set to a color name, highlight error tokens using that color.  If
00052         set but with no value, defaults to ``'red'``.
00053         *New in Pygments 0.11.*
00054 
00055     """
00056     name = 'Raw tokens'
00057     aliases = ['raw', 'tokens']
00058     filenames = ['*.raw']
00059 
00060     unicodeoutput = False
00061 
00062     def __init__(self, **options):
00063         Formatter.__init__(self, **options)
00064         if self.encoding:
00065             raise OptionError('the raw formatter does not support the '
00066                               'encoding option')
00067         self.encoding = 'ascii'  # let pygments.format() do the right thing
00068         self.compress = get_choice_opt(options, 'compress',
00069                                        ['', 'none', 'gz', 'bz2'], '')
00070         self.error_color = options.get('error_color', None)
00071         if self.error_color is True:
00072             self.error_color = 'red'
00073         if self.error_color is not None:
00074             try:
00075                 colorize(self.error_color, '')
00076             except KeyError:
00077                 raise ValueError("Invalid color %r specified" %
00078                                  self.error_color)
00079 
00080     def format(self, tokensource, outfile):
00081         try:
00082             outfile.write(b(''))
00083         except TypeError:
00084             raise TypeError('The raw tokens formatter needs a binary '
00085                             'output file')
00086         if self.compress == 'gz':
00087             import gzip
00088             outfile = gzip.GzipFile('', 'wb', 9, outfile)
00089             def write(text):
00090                 outfile.write(text.encode())
00091             flush = outfile.flush
00092         elif self.compress == 'bz2':
00093             import bz2
00094             compressor = bz2.BZ2Compressor(9)
00095             def write(text):
00096                 outfile.write(compressor.compress(text.encode()))
00097             def flush():
00098                 outfile.write(compressor.flush())
00099                 outfile.flush()
00100         else:
00101             def write(text):
00102                 outfile.write(text.encode())
00103             flush = outfile.flush
00104 
00105         lasttype = None
00106         lastval = u''
00107         if self.error_color:
00108             for ttype, value in tokensource:
00109                 line = "%s\t%r\n" % (ttype, value)
00110                 if ttype is Token.Error:
00111                     write(colorize(self.error_color, line))
00112                 else:
00113                     write(line)
00114         else:
00115             for ttype, value in tokensource:
00116                 write("%s\t%r\n" % (ttype, value))
00117         flush()