# -*- coding: utf-8 -*-
"""
pygments.lexers.web
~~~~~~~~~~~~~~~~~~~
Lexers for web-related languages and markup.
:copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
import copy
from pygments.lexer import RegexLexer, ExtendedRegexLexer, bygroups, using, \
include, this
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Other, Punctuation, Literal
from pygments.util import get_bool_opt, get_list_opt, looks_like_xml, \
html_doctype_matches
from pygments.lexers.agile import RubyLexer
from pygments.lexers.compiled import ScalaLexer
__all__ = ['HtmlLexer', 'XmlLexer', 'JavascriptLexer', 'JSONLexer', 'CssLexer',
'PhpLexer', 'ActionScriptLexer', 'XsltLexer', 'ActionScript3Lexer',
'MxmlLexer', 'HaxeLexer', 'HamlLexer', 'SassLexer', 'ScssLexer',
'ObjectiveJLexer', 'CoffeeScriptLexer', 'DuelLexer', 'ScamlLexer',
'JadeLexer', 'XQueryLexer', 'DtdLexer', 'DartLexer']
class JavascriptLexer(RegexLexer):
"""
For JavaScript source code.
"""
name = 'JavaScript'
aliases = ['js', 'javascript']
filenames = ['*.js', ]
mimetypes = ['application/javascript', 'application/x-javascript',
'text/x-javascript', 'text/javascript', ]
flags = re.DOTALL
tokens = {
'commentsandwhitespace': [
(r'\s+', Text),
(r'', Comment, '#pop'),
('-', Comment),
],
'tag': [
(r'\s+', Text),
(r'[a-zA-Z0-9_:-]+\s*=', Name.Attribute, 'attr'),
(r'[a-zA-Z0-9_:-]+', Name.Attribute),
(r'/?\s*>', Name.Tag, '#pop'),
],
'script-content': [
(r'<\s*/\s*script\s*>', Name.Tag, '#pop'),
(r'.+?(?=<\s*/\s*script\s*>)', using(JavascriptLexer)),
],
'style-content': [
(r'<\s*/\s*style\s*>', Name.Tag, '#pop'),
(r'.+?(?=<\s*/\s*style\s*>)', using(CssLexer)),
],
'attr': [
('".*?"', String, '#pop'),
("'.*?'", String, '#pop'),
(r'[^\s>]+', String, '#pop'),
],
}
def analyse_text(text):
if html_doctype_matches(text):
return 0.5
class PhpLexer(RegexLexer):
"""
For `PHP `_ source code.
For PHP embedded in HTML, use the `HtmlPhpLexer`.
Additional options accepted:
`startinline`
If given and ``True`` the lexer starts highlighting with
php code (i.e.: no starting ``>> from pygments.lexers._phpbuiltins import MODULES
>>> MODULES.keys()
['PHP Options/Info', 'Zip', 'dba', ...]
In fact the names of those modules match the module names from
the php documentation.
"""
name = 'PHP'
aliases = ['php', 'php3', 'php4', 'php5']
filenames = ['*.php', '*.php[345]']
mimetypes = ['text/x-php']
flags = re.IGNORECASE | re.DOTALL | re.MULTILINE
tokens = {
'root': [
(r'<\?(php)?', Comment.Preproc, 'php'),
(r'[^<]+', Other),
(r'<', Other)
],
'php': [
(r'\?>', Comment.Preproc, '#pop'),
(r'<<<(\'?)([a-zA-Z_][a-zA-Z0-9_]*)\1\n.*?\n\2\;?\n', String),
(r'\s+', Text),
(r'#.*?\n', Comment.Single),
(r'//.*?\n', Comment.Single),
# put the empty comment here, it is otherwise seen as
# the start of a docstring
(r'/\*\*/', Comment.Multiline),
(r'/\*\*.*?\*/', String.Doc),
(r'/\*.*?\*/', Comment.Multiline),
(r'(->|::)(\s*)([a-zA-Z_][a-zA-Z0-9_]*)',
bygroups(Operator, Text, Name.Attribute)),
(r'[~!%^&*+=|:.<>/?@-]+', Operator),
(r'[\[\]{}();,]+', Punctuation),
(r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
(r'(function)(\s*)(?=\()', bygroups(Keyword, Text)),
(r'(function)(\s+)(&?)(\s*)',
bygroups(Keyword, Text, Operator, Text), 'functionname'),
(r'(const)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)',
bygroups(Keyword, Text, Name.Constant)),
(r'(and|E_PARSE|old_function|E_ERROR|or|as|E_WARNING|parent|'
r'eval|PHP_OS|break|exit|case|extends|PHP_VERSION|cfunction|'
r'FALSE|print|for|require|continue|foreach|require_once|'
r'declare|return|default|static|do|switch|die|stdClass|'
r'echo|else|TRUE|elseif|var|empty|if|xor|enddeclare|include|'
r'virtual|endfor|include_once|while|endforeach|global|__FILE__|'
r'endif|list|__LINE__|endswitch|new|__sleep|endwhile|not|'
r'array|__wakeup|E_ALL|NULL|final|php_user_filter|interface|'
r'implements|public|private|protected|abstract|clone|try|'
r'catch|throw|this|use|namespace)\b', Keyword),
(r'(true|false|null)\b', Keyword.Constant),
(r'\$\{\$+[a-zA-Z_][a-zA-Z0-9_]*\}', Name.Variable),
(r'\$+[a-zA-Z_][a-zA-Z0-9_]*', Name.Variable),
(r'[\\a-zA-Z_][\\a-zA-Z0-9_]*', Name.Other),
(r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
(r'\d+[eE][+-]?[0-9]+', Number.Float),
(r'0[0-7]+', Number.Oct),
(r'0[xX][a-fA-F0-9]+', Number.Hex),
(r'\d+', Number.Integer),
(r"'([^'\\]*(?:\\.[^'\\]*)*)'", String.Single),
(r'`([^`\\]*(?:\\.[^`\\]*)*)`', String.Backtick),
(r'"', String.Double, 'string'),
],
'classname': [
(r'[a-zA-Z_][\\a-zA-Z0-9_]*', Name.Class, '#pop')
],
'functionname': [
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Function, '#pop')
],
'string': [
(r'"', String.Double, '#pop'),
(r'[^{$"\\]+', String.Double),
(r'\\([nrt\"$\\]|[0-7]{1,3}|x[0-9A-Fa-f]{1,2})', String.Escape),
(r'\$[a-zA-Z_][a-zA-Z0-9_]*(\[\S+\]|->[a-zA-Z_][a-zA-Z0-9_]*)?',
String.Interpol),
(r'(\{\$\{)(.*?)(\}\})',
bygroups(String.Interpol, using(this, _startinline=True),
String.Interpol)),
(r'(\{)(\$.*?)(\})',
bygroups(String.Interpol, using(this, _startinline=True),
String.Interpol)),
(r'(\$\{)(\S+)(\})',
bygroups(String.Interpol, Name.Variable, String.Interpol)),
(r'[${\\]+', String.Double)
],
}
def __init__(self, **options):
self.funcnamehighlighting = get_bool_opt(
options, 'funcnamehighlighting', True)
self.disabledmodules = get_list_opt(
options, 'disabledmodules', ['unknown'])
self.startinline = get_bool_opt(options, 'startinline', False)
# private option argument for the lexer itself
if '_startinline' in options:
self.startinline = options.pop('_startinline')
# collect activated functions in a set
self._functions = set()
if self.funcnamehighlighting:
from pygments.lexers._phpbuiltins import MODULES
for key, value in MODULES.iteritems():
if key not in self.disabledmodules:
self._functions.update(value)
RegexLexer.__init__(self, **options)
def get_tokens_unprocessed(self, text):
stack = ['root']
if self.startinline:
stack.append('php')
for index, token, value in \
RegexLexer.get_tokens_unprocessed(self, text, stack):
if token is Name.Other:
if value in self._functions:
yield index, Name.Builtin, value
continue
yield index, token, value
def analyse_text(text):
rv = 0.0
if re.search(r'<\?(?!xml)', text):
rv += 0.3
if '?>' in text:
rv += 0.1
return rv
class DtdLexer(RegexLexer):
"""
A lexer for DTDs (Document Type Definitions).
*New in Pygments 1.5.*
"""
flags = re.MULTILINE | re.DOTALL
name = 'DTD'
aliases = ['dtd']
filenames = ['*.dtd']
mimetypes = ['application/xml-dtd']
tokens = {
'root': [
include('common'),
(r'(\s]+)',
bygroups(Keyword, Text, Name.Tag)),
(r'PUBLIC|SYSTEM', Keyword.Constant),
(r'[\[\]>]', Keyword),
],
'common': [
(r'\s+', Text),
(r'(%|&)[^;]*;', Name.Entity),
('', Comment, '#pop'),
('-', Comment),
],
'element': [
include('common'),
(r'EMPTY|ANY|#PCDATA', Keyword.Constant),
(r'[^>\s\|()?+*,]+', Name.Tag),
(r'>', Keyword, '#pop'),
],
'attlist': [
include('common'),
(r'CDATA|IDREFS|IDREF|ID|NMTOKENS|NMTOKEN|ENTITIES|ENTITY|NOTATION', Keyword.Constant),
(r'#REQUIRED|#IMPLIED|#FIXED', Keyword.Constant),
(r'xml:space|xml:lang', Keyword.Reserved),
(r'[^>\s\|()?+*,]+', Name.Attribute),
(r'>', Keyword, '#pop'),
],
'entity': [
include('common'),
(r'SYSTEM|PUBLIC|NDATA', Keyword.Constant),
(r'[^>\s\|()?+*,]+', Name.Entity),
(r'>', Keyword, '#pop'),
],
'notation': [
include('common'),
(r'SYSTEM|PUBLIC', Keyword.Constant),
(r'[^>\s\|()?+*,]+', Name.Attribute),
(r'>', Keyword, '#pop'),
],
}
def analyse_text(text):
if not looks_like_xml(text) and \
('', Comment.Preproc),
('', Comment, '#pop'),
('-', Comment),
],
'tag': [
(r'\s+', Text),
(r'[a-zA-Z0-9_.:-]+\s*=', Name.Attribute, 'attr'),
(r'/?\s*>', Name.Tag, '#pop'),
],
'attr': [
('\s+', Text),
('".*?"', String, '#pop'),
("'.*?'", String, '#pop'),
(r'[^\s>]+', String, '#pop'),
],
}
def analyse_text(text):
if looks_like_xml(text):
return 0.5
class XsltLexer(XmlLexer):
'''
A lexer for XSLT.
*New in Pygments 0.10.*
'''
name = 'XSLT'
aliases = ['xslt']
filenames = ['*.xsl', '*.xslt']
mimetypes = ['application/xsl+xml', 'application/xslt+xml']
EXTRA_KEYWORDS = set([
'apply-imports', 'apply-templates', 'attribute',
'attribute-set', 'call-template', 'choose', 'comment',
'copy', 'copy-of', 'decimal-format', 'element', 'fallback',
'for-each', 'if', 'import', 'include', 'key', 'message',
'namespace-alias', 'number', 'otherwise', 'output', 'param',
'preserve-space', 'processing-instruction', 'sort',
'strip-space', 'stylesheet', 'template', 'text', 'transform',
'value-of', 'variable', 'when', 'with-param'
])
def get_tokens_unprocessed(self, text):
for index, token, value in XmlLexer.get_tokens_unprocessed(self, text):
m = re.match('?xsl:([^>]*)/?>?', value)
if token is Name.Tag and m and m.group(1) in self.EXTRA_KEYWORDS:
yield index, Keyword, value
else:
yield index, token, value
def analyse_text(text):
if looks_like_xml(text) and ' tags is highlighted by the appropriate lexer.
"""
flags = re.MULTILINE | re.DOTALL
name = 'MXML'
aliases = ['mxml']
filenames = ['*.mxml']
mimetimes = ['text/xml', 'application/xml']
tokens = {
'root': [
('[^<&]+', Text),
(r'&\S*?;', Name.Entity),
(r'(\<\!\[CDATA\[)(.*?)(\]\]\>)',
bygroups(String, using(ActionScript3Lexer), String)),
('', Comment, '#pop'),
('-', Comment),
],
'tag': [
(r'\s+', Text),
(r'[a-zA-Z0-9_.:-]+\s*=', Name.Attribute, 'attr'),
(r'/?\s*>', Name.Tag, '#pop'),
],
'attr': [
('\s+', Text),
('".*?"', String, '#pop'),
("'.*?'", String, '#pop'),
(r'[^\s>]+', String, '#pop'),
],
}
class HaxeLexer(RegexLexer):
"""
For haXe source code (http://haxe.org/).
"""
name = 'haXe'
aliases = ['hx', 'haXe']
filenames = ['*.hx']
mimetypes = ['text/haxe']
ident = r'(?:[a-zA-Z_][a-zA-Z0-9_]*)'
typeid = r'(?:(?:[a-z0-9_\.])*[A-Z_][A-Za-z0-9_]*)'
key_prop = r'(?:default|null|never)'
key_decl_mod = r'(?:public|private|override|static|inline|extern|dynamic)'
flags = re.DOTALL | re.MULTILINE
tokens = {
'root': [
include('whitespace'),
include('comments'),
(key_decl_mod, Keyword.Declaration),
include('enumdef'),
include('typedef'),
include('classdef'),
include('imports'),
],
# General constructs
'comments': [
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'#[^\n]*', Comment.Preproc),
],
'whitespace': [
include('comments'),
(r'\s+', Text),
],
'codekeywords': [
(r'\b(if|else|while|do|for|in|break|continue|'
r'return|switch|case|try|catch|throw|null|trace|'
r'new|this|super|untyped|cast|callback|here)\b',
Keyword.Reserved),
],
'literals': [
(r'0[xX][0-9a-fA-F]+', Number.Hex),
(r'[0-9]+', Number.Integer),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r"'(\\\\|\\'|[^'])*'", String.Single),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r'~/([^\n])*?/[gisx]*', String.Regex),
(r'\b(true|false|null)\b', Keyword.Constant),
],
'codeblock': [
include('whitespace'),
include('new'),
include('case'),
include('anonfundef'),
include('literals'),
include('vardef'),
include('codekeywords'),
(r'[();,\[\]]', Punctuation),
(r'(?:=|\+=|-=|\*=|/=|%=|&=|\|=|\^=|<<=|>>=|>>>=|\|\||&&|'
r'\.\.\.|==|!=|>|<|>=|<=|\||&|\^|<<|>>>|>>|\+|\-|\*|/|%|'
r'!|\+\+|\-\-|~|\.|\?|\:)',
Operator),
(ident, Name),
(r'}', Punctuation,'#pop'),
(r'{', Punctuation,'#push'),
],
# Instance/Block level constructs
'propertydef': [
(r'(\()(' + key_prop + ')(,)(' + key_prop + ')(\))',
bygroups(Punctuation, Keyword.Reserved, Punctuation,
Keyword.Reserved, Punctuation)),
],
'new': [
(r'\bnew\b', Keyword, 'typedecl'),
],
'case': [
(r'\b(case)(\s+)(' + ident + ')(\s*)(\()',
bygroups(Keyword.Reserved, Text, Name, Text, Punctuation),
'funargdecl'),
],
'vardef': [
(r'\b(var)(\s+)(' + ident + ')',
bygroups(Keyword.Declaration, Text, Name.Variable), 'vardecl'),
],
'vardecl': [
include('whitespace'),
include('typelabel'),
(r'=', Operator,'#pop'),
(r';', Punctuation,'#pop'),
],
'instancevardef': [
(key_decl_mod,Keyword.Declaration),
(r'\b(var)(\s+)(' + ident + ')',
bygroups(Keyword.Declaration, Text, Name.Variable.Instance),
'instancevardecl'),
],
'instancevardecl': [
include('vardecl'),
include('propertydef'),
],
'anonfundef': [
(r'\bfunction\b', Keyword.Declaration, 'fundecl'),
],
'instancefundef': [
(key_decl_mod, Keyword.Declaration),
(r'\b(function)(\s+)(' + ident + ')',
bygroups(Keyword.Declaration, Text, Name.Function), 'fundecl'),
],
'fundecl': [
include('whitespace'),
include('typelabel'),
include('generictypedecl'),
(r'\(',Punctuation,'funargdecl'),
(r'(?=[a-zA-Z0-9_])',Text,'#pop'),
(r'{',Punctuation,('#pop','codeblock')),
(r';',Punctuation,'#pop'),
],
'funargdecl': [
include('whitespace'),
(ident, Name.Variable),
include('typelabel'),
include('literals'),
(r'=', Operator),
(r',', Punctuation),
(r'\?', Punctuation),
(r'\)', Punctuation, '#pop'),
],
'typelabel': [
(r':', Punctuation, 'type'),
],
'typedecl': [
include('whitespace'),
(typeid, Name.Class),
(r'<', Punctuation, 'generictypedecl'),
(r'(?=[{}()=,a-z])', Text,'#pop'),
],
'type': [
include('whitespace'),
(typeid, Name.Class),
(r'<', Punctuation, 'generictypedecl'),
(r'->', Keyword.Type),
(r'(?=[{}(),;=])', Text, '#pop'),
],
'generictypedecl': [
include('whitespace'),
(typeid, Name.Class),
(r'<', Punctuation, '#push'),
(r'>', Punctuation, '#pop'),
(r',', Punctuation),
],
# Top level constructs
'imports': [
(r'(package|import|using)(\s+)([^;]+)(;)',
bygroups(Keyword.Namespace, Text, Name.Namespace,Punctuation)),
],
'typedef': [
(r'typedef', Keyword.Declaration, ('typedefprebody', 'typedecl')),
],
'typedefprebody': [
include('whitespace'),
(r'(=)(\s*)({)', bygroups(Punctuation, Text, Punctuation),
('#pop', 'typedefbody')),
],
'enumdef': [
(r'enum', Keyword.Declaration, ('enumdefprebody', 'typedecl')),
],
'enumdefprebody': [
include('whitespace'),
(r'{', Punctuation, ('#pop','enumdefbody')),
],
'classdef': [
(r'class', Keyword.Declaration, ('classdefprebody', 'typedecl')),
],
'classdefprebody': [
include('whitespace'),
(r'(extends|implements)', Keyword.Declaration,'typedecl'),
(r'{', Punctuation, ('#pop', 'classdefbody')),
],
'interfacedef': [
(r'interface', Keyword.Declaration,
('interfacedefprebody', 'typedecl')),
],
'interfacedefprebody': [
include('whitespace'),
(r'(extends)', Keyword.Declaration, 'typedecl'),
(r'{', Punctuation, ('#pop', 'classdefbody')),
],
'typedefbody': [
include('whitespace'),
include('instancevardef'),
include('instancefundef'),
(r'>', Punctuation, 'typedecl'),
(r',', Punctuation),
(r'}', Punctuation, '#pop'),
],
'enumdefbody': [
include('whitespace'),
(ident, Name.Variable.Instance),
(r'\(', Punctuation, 'funargdecl'),
(r';', Punctuation),
(r'}', Punctuation, '#pop'),
],
'classdefbody': [
include('whitespace'),
include('instancevardef'),
include('instancefundef'),
(r'}', Punctuation, '#pop'),
include('codeblock'),
],
}
def analyse_text(text):
if re.match(r'\w+\s*:\s*\w', text): return 0.3
def _indentation(lexer, match, ctx):
indentation = match.group(0)
yield match.start(), Text, indentation
ctx.last_indentation = indentation
ctx.pos = match.end()
if hasattr(ctx, 'block_state') and ctx.block_state and \
indentation.startswith(ctx.block_indentation) and \
indentation != ctx.block_indentation:
ctx.stack.append(ctx.block_state)
else:
ctx.block_state = None
ctx.block_indentation = None
ctx.stack.append('content')
def _starts_block(token, state):
def callback(lexer, match, ctx):
yield match.start(), token, match.group(0)
if hasattr(ctx, 'last_indentation'):
ctx.block_indentation = ctx.last_indentation
else:
ctx.block_indentation = ''
ctx.block_state = state
ctx.pos = match.end()
return callback
class HamlLexer(ExtendedRegexLexer):
"""
For Haml markup.
*New in Pygments 1.3.*
"""
name = 'Haml'
aliases = ['haml', 'HAML']
filenames = ['*.haml']
mimetypes = ['text/x-haml']
flags = re.IGNORECASE
# Haml can include " |\n" anywhere,
# which is ignored and used to wrap long lines.
# To accomodate this, use this custom faux dot instead.
_dot = r'(?: \|\n(?=.* \|)|.)'
# In certain places, a comma at the end of the line
# allows line wrapping as well.
_comma_dot = r'(?:,\s*\n|' + _dot + ')'
tokens = {
'root': [
(r'[ \t]*\n', Text),
(r'[ \t]*', _indentation),
],
'css': [
(r'\.[a-z0-9_:-]+', Name.Class, 'tag'),
(r'\#[a-z0-9_:-]+', Name.Function, 'tag'),
],
'eval-or-plain': [
(r'[&!]?==', Punctuation, 'plain'),
(r'([&!]?[=~])(' + _comma_dot + r'*\n)',
bygroups(Punctuation, using(RubyLexer)),
'root'),
(r'', Text, 'plain'),
],
'content': [
include('css'),
(r'%[a-z0-9_:-]+', Name.Tag, 'tag'),
(r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
(r'(/)(\[' + _dot + '*?\])(' + _dot + r'*\n)',
bygroups(Comment, Comment.Special, Comment),
'#pop'),
(r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
'#pop'),
(r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc,
'haml-comment-block'), '#pop'),
(r'(-)(' + _comma_dot + r'*\n)',
bygroups(Punctuation, using(RubyLexer)),
'#pop'),
(r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'),
'#pop'),
include('eval-or-plain'),
],
'tag': [
include('css'),
(r'\{(,\n|' + _dot + ')*?\}', using(RubyLexer)),
(r'\[' + _dot + '*?\]', using(RubyLexer)),
(r'\(', Text, 'html-attributes'),
(r'/[ \t]*\n', Punctuation, '#pop:2'),
(r'[<>]{1,2}(?=[ \t=])', Punctuation),
include('eval-or-plain'),
],
'plain': [
(r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
(r'(#\{)(' + _dot + '*?)(\})',
bygroups(String.Interpol, using(RubyLexer), String.Interpol)),
(r'\n', Text, 'root'),
],
'html-attributes': [
(r'\s+', Text),
(r'[a-z0-9_:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
(r'[a-z0-9_:-]+', Name.Attribute),
(r'\)', Text, '#pop'),
],
'html-attribute-value': [
(r'[ \t]+', Text),
(r'[a-z0-9_]+', Name.Variable, '#pop'),
(r'@[a-z0-9_]+', Name.Variable.Instance, '#pop'),
(r'\$[a-z0-9_]+', Name.Variable.Global, '#pop'),
(r"'(\\\\|\\'|[^'\n])*'", String, '#pop'),
(r'"(\\\\|\\"|[^"\n])*"', String, '#pop'),
],
'html-comment-block': [
(_dot + '+', Comment),
(r'\n', Text, 'root'),
],
'haml-comment-block': [
(_dot + '+', Comment.Preproc),
(r'\n', Text, 'root'),
],
'filter-block': [
(r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
(r'(#\{)(' + _dot + '*?)(\})',
bygroups(String.Interpol, using(RubyLexer), String.Interpol)),
(r'\n', Text, 'root'),
],
}
common_sass_tokens = {
'value': [
(r'[ \t]+', Text),
(r'[!$][\w-]+', Name.Variable),
(r'url\(', String.Other, 'string-url'),
(r'[a-z_-][\w-]*(?=\()', Name.Function),
(r'(azimuth|background-attachment|background-color|'
r'background-image|background-position|background-repeat|'
r'background|border-bottom-color|border-bottom-style|'
r'border-bottom-width|border-left-color|border-left-style|'
r'border-left-width|border-right|border-right-color|'
r'border-right-style|border-right-width|border-top-color|'
r'border-top-style|border-top-width|border-bottom|'
r'border-collapse|border-left|border-width|border-color|'
r'border-spacing|border-style|border-top|border|caption-side|'
r'clear|clip|color|content|counter-increment|counter-reset|'
r'cue-after|cue-before|cue|cursor|direction|display|'
r'elevation|empty-cells|float|font-family|font-size|'
r'font-size-adjust|font-stretch|font-style|font-variant|'
r'font-weight|font|height|letter-spacing|line-height|'
r'list-style-type|list-style-image|list-style-position|'
r'list-style|margin-bottom|margin-left|margin-right|'
r'margin-top|margin|marker-offset|marks|max-height|max-width|'
r'min-height|min-width|opacity|orphans|outline|outline-color|'
r'outline-style|outline-width|overflow|padding-bottom|'
r'padding-left|padding-right|padding-top|padding|page|'
r'page-break-after|page-break-before|page-break-inside|'
r'pause-after|pause-before|pause|pitch|pitch-range|'
r'play-during|position|quotes|richness|right|size|'
r'speak-header|speak-numeral|speak-punctuation|speak|'
r'speech-rate|stress|table-layout|text-align|text-decoration|'
r'text-indent|text-shadow|text-transform|top|unicode-bidi|'
r'vertical-align|visibility|voice-family|volume|white-space|'
r'widows|width|word-spacing|z-index|bottom|left|'
r'above|absolute|always|armenian|aural|auto|avoid|baseline|'
r'behind|below|bidi-override|blink|block|bold|bolder|both|'
r'capitalize|center-left|center-right|center|circle|'
r'cjk-ideographic|close-quote|collapse|condensed|continuous|'
r'crop|crosshair|cross|cursive|dashed|decimal-leading-zero|'
r'decimal|default|digits|disc|dotted|double|e-resize|embed|'
r'extra-condensed|extra-expanded|expanded|fantasy|far-left|'
r'far-right|faster|fast|fixed|georgian|groove|hebrew|help|'
r'hidden|hide|higher|high|hiragana-iroha|hiragana|icon|'
r'inherit|inline-table|inline|inset|inside|invert|italic|'
r'justify|katakana-iroha|katakana|landscape|larger|large|'
r'left-side|leftwards|level|lighter|line-through|list-item|'
r'loud|lower-alpha|lower-greek|lower-roman|lowercase|ltr|'
r'lower|low|medium|message-box|middle|mix|monospace|'
r'n-resize|narrower|ne-resize|no-close-quote|no-open-quote|'
r'no-repeat|none|normal|nowrap|nw-resize|oblique|once|'
r'open-quote|outset|outside|overline|pointer|portrait|px|'
r'relative|repeat-x|repeat-y|repeat|rgb|ridge|right-side|'
r'rightwards|s-resize|sans-serif|scroll|se-resize|'
r'semi-condensed|semi-expanded|separate|serif|show|silent|'
r'slow|slower|small-caps|small-caption|smaller|soft|solid|'
r'spell-out|square|static|status-bar|super|sw-resize|'
r'table-caption|table-cell|table-column|table-column-group|'
r'table-footer-group|table-header-group|table-row|'
r'table-row-group|text|text-bottom|text-top|thick|thin|'
r'transparent|ultra-condensed|ultra-expanded|underline|'
r'upper-alpha|upper-latin|upper-roman|uppercase|url|'
r'visible|w-resize|wait|wider|x-fast|x-high|x-large|x-loud|'
r'x-low|x-small|x-soft|xx-large|xx-small|yes)\b', Name.Constant),
(r'(indigo|gold|firebrick|indianred|darkolivegreen|'
r'darkseagreen|mediumvioletred|mediumorchid|chartreuse|'
r'mediumslateblue|springgreen|crimson|lightsalmon|brown|'
r'turquoise|olivedrab|cyan|skyblue|darkturquoise|'
r'goldenrod|darkgreen|darkviolet|darkgray|lightpink|'
r'darkmagenta|lightgoldenrodyellow|lavender|yellowgreen|thistle|'
r'violet|orchid|ghostwhite|honeydew|cornflowerblue|'
r'darkblue|darkkhaki|mediumpurple|cornsilk|bisque|slategray|'
r'darkcyan|khaki|wheat|deepskyblue|darkred|steelblue|aliceblue|'
r'gainsboro|mediumturquoise|floralwhite|coral|lightgrey|'
r'lightcyan|darksalmon|beige|azure|lightsteelblue|oldlace|'
r'greenyellow|royalblue|lightseagreen|mistyrose|sienna|'
r'lightcoral|orangered|navajowhite|palegreen|burlywood|'
r'seashell|mediumspringgreen|papayawhip|blanchedalmond|'
r'peru|aquamarine|darkslategray|ivory|dodgerblue|'
r'lemonchiffon|chocolate|orange|forestgreen|slateblue|'
r'mintcream|antiquewhite|darkorange|cadetblue|moccasin|'
r'limegreen|saddlebrown|darkslateblue|lightskyblue|deeppink|'
r'plum|darkgoldenrod|sandybrown|magenta|tan|'
r'rosybrown|pink|lightblue|palevioletred|mediumseagreen|'
r'dimgray|powderblue|seagreen|snow|mediumblue|midnightblue|'
r'paleturquoise|palegoldenrod|whitesmoke|darkorchid|salmon|'
r'lightslategray|lawngreen|lightgreen|tomato|hotpink|'
r'lightyellow|lavenderblush|linen|mediumaquamarine|'
r'blueviolet|peachpuff)\b', Name.Entity),
(r'(black|silver|gray|white|maroon|red|purple|fuchsia|green|'
r'lime|olive|yellow|navy|blue|teal|aqua)\b', Name.Builtin),
(r'\!(important|default)', Name.Exception),
(r'(true|false)', Name.Pseudo),
(r'(and|or|not)', Operator.Word),
(r'/\*', Comment.Multiline, 'inline-comment'),
(r'//[^\n]*', Comment.Single),
(r'\#[a-z0-9]{1,6}', Number.Hex),
(r'(-?\d+)(\%|[a-z]+)?', bygroups(Number.Integer, Keyword.Type)),
(r'(-?\d*\.\d+)(\%|[a-z]+)?', bygroups(Number.Float, Keyword.Type)),
(r'#{', String.Interpol, 'interpolation'),
(r'[~\^\*!&%<>\|+=@:,./?-]+', Operator),
(r'[\[\]()]+', Punctuation),
(r'"', String.Double, 'string-double'),
(r"'", String.Single, 'string-single'),
(r'[a-z_-][\w-]*', Name),
],
'interpolation': [
(r'\}', String.Interpol, '#pop'),
include('value'),
],
'selector': [
(r'[ \t]+', Text),
(r'\:', Name.Decorator, 'pseudo-class'),
(r'\.', Name.Class, 'class'),
(r'\#', Name.Namespace, 'id'),
(r'[a-zA-Z0-9_-]+', Name.Tag),
(r'#\{', String.Interpol, 'interpolation'),
(r'&', Keyword),
(r'[~\^\*!&\[\]\(\)<>\|+=@:;,./?-]', Operator),
(r'"', String.Double, 'string-double'),
(r"'", String.Single, 'string-single'),
],
'string-double': [
(r'(\\.|#(?=[^\n{])|[^\n"#])+', String.Double),
(r'#\{', String.Interpol, 'interpolation'),
(r'"', String.Double, '#pop'),
],
'string-single': [
(r"(\\.|#(?=[^\n{])|[^\n'#])+", String.Double),
(r'#\{', String.Interpol, 'interpolation'),
(r"'", String.Double, '#pop'),
],
'string-url': [
(r'(\\#|#(?=[^\n{])|[^\n#)])+', String.Other),
(r'#\{', String.Interpol, 'interpolation'),
(r'\)', String.Other, '#pop'),
],
'pseudo-class': [
(r'[\w-]+', Name.Decorator),
(r'#\{', String.Interpol, 'interpolation'),
(r'', Text, '#pop'),
],
'class': [
(r'[\w-]+', Name.Class),
(r'#\{', String.Interpol, 'interpolation'),
(r'', Text, '#pop'),
],
'id': [
(r'[\w-]+', Name.Namespace),
(r'#\{', String.Interpol, 'interpolation'),
(r'', Text, '#pop'),
],
'for': [
(r'(from|to|through)', Operator.Word),
include('value'),
],
}
class SassLexer(ExtendedRegexLexer):
"""
For Sass stylesheets.
*New in Pygments 1.3.*
"""
name = 'Sass'
aliases = ['sass', 'SASS']
filenames = ['*.sass']
mimetypes = ['text/x-sass']
flags = re.IGNORECASE
tokens = {
'root': [
(r'[ \t]*\n', Text),
(r'[ \t]*', _indentation),
],
'content': [
(r'//[^\n]*', _starts_block(Comment.Single, 'single-comment'),
'root'),
(r'/\*[^\n]*', _starts_block(Comment.Multiline, 'multi-comment'),
'root'),
(r'@import', Keyword, 'import'),
(r'@for', Keyword, 'for'),
(r'@(debug|warn|if|while)', Keyword, 'value'),
(r'(@mixin)( [\w-]+)', bygroups(Keyword, Name.Function), 'value'),
(r'(@include)( [\w-]+)', bygroups(Keyword, Name.Decorator), 'value'),
(r'@extend', Keyword, 'selector'),
(r'@[a-z0-9_-]+', Keyword, 'selector'),
(r'=[\w-]+', Name.Function, 'value'),
(r'\+[\w-]+', Name.Decorator, 'value'),
(r'([!$][\w-]\w*)([ \t]*(?:(?:\|\|)?=|:))',
bygroups(Name.Variable, Operator), 'value'),
(r':', Name.Attribute, 'old-style-attr'),
(r'(?=.+?[=:]([^a-z]|$))', Name.Attribute, 'new-style-attr'),
(r'', Text, 'selector'),
],
'single-comment': [
(r'.+', Comment.Single),
(r'\n', Text, 'root'),
],
'multi-comment': [
(r'.+', Comment.Multiline),
(r'\n', Text, 'root'),
],
'import': [
(r'[ \t]+', Text),
(r'\S+', String),
(r'\n', Text, 'root'),
],
'old-style-attr': [
(r'[^\s:="\[]+', Name.Attribute),
(r'#{', String.Interpol, 'interpolation'),
(r'[ \t]*=', Operator, 'value'),
(r'', Text, 'value'),
],
'new-style-attr': [
(r'[^\s:="\[]+', Name.Attribute),
(r'#{', String.Interpol, 'interpolation'),
(r'[ \t]*[=:]', Operator, 'value'),
],
'inline-comment': [
(r"(\\#|#(?=[^\n{])|\*(?=[^\n/])|[^\n#*])+", Comment.Multiline),
(r'#\{', String.Interpol, 'interpolation'),
(r"\*/", Comment, '#pop'),
],
}
for group, common in common_sass_tokens.iteritems():
tokens[group] = copy.copy(common)
tokens['value'].append((r'\n', Text, 'root'))
tokens['selector'].append((r'\n', Text, 'root'))
class ScssLexer(RegexLexer):
"""
For SCSS stylesheets.
"""
name = 'SCSS'
aliases = ['scss']
filenames = ['*.scss']
mimetypes = ['text/x-scss']
flags = re.IGNORECASE | re.DOTALL
tokens = {
'root': [
(r'\s+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'@import', Keyword, 'value'),
(r'@for', Keyword, 'for'),
(r'@(debug|warn|if|while)', Keyword, 'value'),
(r'(@mixin)( [\w-]+)', bygroups(Keyword, Name.Function), 'value'),
(r'(@include)( [\w-]+)', bygroups(Keyword, Name.Decorator), 'value'),
(r'@extend', Keyword, 'selector'),
(r'@[a-z0-9_-]+', Keyword, 'selector'),
(r'(\$[\w-]\w*)([ \t]*:)', bygroups(Name.Variable, Operator), 'value'),
(r'(?=[^;{}][;}])', Name.Attribute, 'attr'),
(r'(?=[^;{}:]+:[^a-z])', Name.Attribute, 'attr'),
(r'', Text, 'selector'),
],
'attr': [
(r'[^\s:="\[]+', Name.Attribute),
(r'#{', String.Interpol, 'interpolation'),
(r'[ \t]*:', Operator, 'value'),
],
'inline-comment': [
(r"(\\#|#(?=[^{])|\*(?=[^/])|[^#*])+", Comment.Multiline),
(r'#\{', String.Interpol, 'interpolation'),
(r"\*/", Comment, '#pop'),
],
}
for group, common in common_sass_tokens.iteritems():
tokens[group] = copy.copy(common)
tokens['value'].extend([(r'\n', Text), (r'[;{}]', Punctuation, 'root')])
tokens['selector'].extend([(r'\n', Text), (r'[;{}]', Punctuation, 'root')])
class CoffeeScriptLexer(RegexLexer):
"""
For `CoffeeScript`_ source code.
.. _CoffeeScript: http://coffeescript.org
*New in Pygments 1.3.*
"""
name = 'CoffeeScript'
aliases = ['coffee-script', 'coffeescript']
filenames = ['*.coffee']
mimetypes = ['text/coffeescript']
flags = re.DOTALL
tokens = {
'commentsandwhitespace': [
(r'\s+', Text),
(r'###.*?###', Comment.Multiline),
(r'#.*?\n', Comment.Single),
],
'multilineregex': [
include('commentsandwhitespace'),
(r'///([gim]+\b|\B)', String.Regex, '#pop'),
(r'/', String.Regex),
(r'[^/#]+', String.Regex)
],
'slashstartsregex': [
include('commentsandwhitespace'),
(r'///', String.Regex, ('#pop', 'multilineregex')),
(r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
r'([gim]+\b|\B)', String.Regex, '#pop'),
(r'', Text, '#pop'),
],
'root': [
# this next expr leads to infinite loops root -> slashstartsregex
#(r'^(?=\s|/|)', popstate_xmlcomment_callback),
(r'[^-]{1,2}', Literal),
(ur'\t|\r|\n|[\u0020-\U0000D7FF]|[\U0000E000-\U0000FFFD]|'
ur'[\U00010000-\U0010FFFF]', Literal),
],
'processing_instruction': [
(r'\s+', Text, 'processing_instruction_content'),
(r'\?>', String.Doc, '#pop'),
(pitarget, Name),
],
'processing_instruction_content': [
(r'\?>', String.Doc, '#pop'),
(ur'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|'
ur'[\U00010000-\U0010FFFF]', Literal),
],
'cdata_section': [
(r']]>', String.Doc, '#pop'),
(ur'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|'
ur'[\U00010000-\U0010FFFF]', Literal),
],
'start_tag': [
include('whitespace'),
(r'(/>)', popstate_tag_callback),
(r'>', Name.Tag, 'element_content'),
(r'"', Punctuation, 'quot_attribute_content'),
(r"'", Punctuation, 'apos_attribute_content'),
(r'=', Operator),
(qname, Name.Tag),
],
'quot_attribute_content': [
(r'"', Punctuation, 'start_tag'),
(r'(\{)', pushstate_root_callback),
(r'""', Name.Attribute),
(quotattrcontentchar, Name.Attribute),
(entityref, Name.Attribute),
(charref, Name.Attribute),
(r'\{\{|\}\}', Name.Attribute),
],
'apos_attribute_content': [
(r"'", Punctuation, 'start_tag'),
(r'\{', Punctuation, 'root'),
(r"''", Name.Attribute),
(aposattrcontentchar, Name.Attribute),
(entityref, Name.Attribute),
(charref, Name.Attribute),
(r'\{\{|\}\}', Name.Attribute),
],
'element_content': [
(r'', Name.Tag, 'end_tag'),
(r'(\{)', pushstate_root_callback),
(r'(