Pagination, readable error messages to user, syntax highlighting started.

Pagination now works. Temporary hardcoded 10 pastes per page, will be put
in configuration later. Maybe.

From now user will receive readable error message if error occured.

Started to work on syntax highlighting, tried to make lexers detection
work but apparently to no avail.
This commit is contained in:
2018-05-01 02:37:51 +05:00
parent 79c7d39759
commit 48d43ca097
221 changed files with 30321 additions and 29 deletions

54
vendor/github.com/alecthomas/chroma/lexers/m/make.go generated vendored Normal file
View File

@@ -0,0 +1,54 @@
package m
import (
. "github.com/alecthomas/chroma" // nolint
. "github.com/alecthomas/chroma/lexers/b" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// Makefile lexer.
var Makefile = internal.Register(MustNewLexer(
&Config{
Name: "Base Makefile",
Aliases: []string{"make", "makefile", "mf", "bsdmake"},
Filenames: []string{"*.mak", "*.mk", "Makefile", "makefile", "Makefile.*", "GNUmakefile"},
MimeTypes: []string{"text/x-makefile"},
EnsureNL: true,
},
Rules{
"root": {
{`^(?:[\t ]+.*\n|\n)+`, Using(Bash), nil},
{`\$[<@$+%?|*]`, Keyword, nil},
{`\s+`, Text, nil},
{`#.*?\n`, Comment, nil},
{`(export)(\s+)(?=[\w${}\t -]+\n)`, ByGroups(Keyword, Text), Push("export")},
{`export\s+`, Keyword, nil},
{`([\w${}().-]+)(\s*)([!?:+]?=)([ \t]*)((?:.*\\\n)+|.*\n)`, ByGroups(NameVariable, Text, Operator, Text, Using(Bash)), nil},
{`(?s)"(\\\\|\\.|[^"\\])*"`, LiteralStringDouble, nil},
{`(?s)'(\\\\|\\.|[^'\\])*'`, LiteralStringSingle, nil},
{`([^\n:]+)(:+)([ \t]*)`, ByGroups(NameFunction, Operator, Text), Push("block-header")},
{`\$\(`, Keyword, Push("expansion")},
},
"expansion": {
{`[^$a-zA-Z_()]+`, Text, nil},
{`[a-zA-Z_]+`, NameVariable, nil},
{`\$`, Keyword, nil},
{`\(`, Keyword, Push()},
{`\)`, Keyword, Pop(1)},
},
"export": {
{`[\w${}-]+`, NameVariable, nil},
{`\n`, Text, Pop(1)},
{`\s+`, Text, nil},
},
"block-header": {
{`[,|]`, Punctuation, nil},
{`#.*?\n`, Comment, Pop(1)},
{`\\\n`, Text, nil},
{`\$\(`, Keyword, Push("expansion")},
{`[a-zA-Z_]+`, Name, nil},
{`\n`, Text, Pop(1)},
{`.`, Text, nil},
},
},
))

60
vendor/github.com/alecthomas/chroma/lexers/m/mako.go generated vendored Normal file
View File

@@ -0,0 +1,60 @@
package m
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
. "github.com/alecthomas/chroma/lexers/p" // nolint
)
// Mako lexer.
var Mako = internal.Register(MustNewLexer(
&Config{
Name: "Mako",
Aliases: []string{"mako"},
Filenames: []string{"*.mao"},
MimeTypes: []string{"application/x-mako"},
},
Rules{
"root": {
{`(\s*)(%)(\s*end(?:\w+))(\n|\Z)`, ByGroups(Text, CommentPreproc, Keyword, Other), nil},
{`(\s*)(%)([^\n]*)(\n|\Z)`, ByGroups(Text, CommentPreproc, Using(Python), Other), nil},
{`(\s*)(##[^\n]*)(\n|\Z)`, ByGroups(Text, CommentPreproc, Other), nil},
{`(?s)<%doc>.*?</%doc>`, CommentPreproc, nil},
{`(<%)([\w.:]+)`, ByGroups(CommentPreproc, NameBuiltin), Push("tag")},
{`(</%)([\w.:]+)(>)`, ByGroups(CommentPreproc, NameBuiltin, CommentPreproc), nil},
{`<%(?=([\w.:]+))`, CommentPreproc, Push("ondeftags")},
{`(<%(?:!?))(.*?)(%>)(?s)`, ByGroups(CommentPreproc, Using(Python), CommentPreproc), nil},
{`(\$\{)(.*?)(\})`, ByGroups(CommentPreproc, Using(Python), CommentPreproc), nil},
{`(?sx)
(.+?) # anything, followed by:
(?:
(?<=\n)(?=%|\#\#) | # an eval or comment line
(?=\#\*) | # multiline comment
(?=</?%) | # a python block
# call start or end
(?=\$\{) | # a substitution
(?<=\n)(?=\s*%) |
# - don't consume
(\\\n) | # an escaped newline
\Z # end of string
)
`, ByGroups(Other, Operator), nil},
{`\s+`, Text, nil},
},
"ondeftags": {
{`<%`, CommentPreproc, nil},
{`(?<=<%)(include|inherit|namespace|page)`, NameBuiltin, nil},
Include("tag"),
},
"tag": {
{`((?:\w+)\s*=)(\s*)(".*?")`, ByGroups(NameAttribute, Text, LiteralString), nil},
{`/?\s*>`, CommentPreproc, Pop(1)},
{`\s+`, Text, nil},
},
"attr": {
{`".*?"`, LiteralString, Pop(1)},
{`'.*?'`, LiteralString, Pop(1)},
{`[^\s>]+`, LiteralString, Pop(1)},
},
},
))

View File

@@ -0,0 +1,63 @@
package m
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// Markdown lexer.
var Markdown = internal.Register(MustNewLexer(
&Config{
Name: "markdown",
Aliases: []string{"md", "mkd"},
Filenames: []string{"*.md", "*.mkd", "*.markdown"},
MimeTypes: []string{"text/x-markdown"},
},
Rules{
"root": {
{`^(#)([^#].+\n)`, ByGroups(GenericHeading, Text), nil},
{`^(#{2,6})(.+\n)`, ByGroups(GenericSubheading, Text), nil},
{`^(\s*)([*-] )(\[[ xX]\])( .+\n)`, ByGroups(Text, Keyword, Keyword, UsingSelf("inline")), nil},
{`^(\s*)([*-])(\s)(.+\n)`, ByGroups(Text, Keyword, Text, UsingSelf("inline")), nil},
{`^(\s*)([0-9]+\.)( .+\n)`, ByGroups(Text, Keyword, UsingSelf("inline")), nil},
{`^(\s*>\s)(.+\n)`, ByGroups(Keyword, GenericEmph), nil},
{"^(```\\n)([\\w\\W]*?)(^```$)", ByGroups(LiteralString, Text, LiteralString), nil},
{"^(```)(\\w+)(\\n)([\\w\\W]*?)(^```$)", EmitterFunc(markdownCodeBlock), nil},
Include("inline"),
},
"inline": {
{`\\.`, Text, nil},
{`(\s)([*_][^*_]+[*_])(\W|\n)`, ByGroups(Text, GenericEmph, Text), nil},
{`(\s)((\*\*|__).*\3)((?=\W|\n))`, ByGroups(Text, GenericStrong, None, Text), nil},
{`(\s)(~~[^~]+~~)((?=\W|\n))`, ByGroups(Text, GenericDeleted, Text), nil},
{"`[^`]+`", LiteralStringBacktick, nil},
{`[@#][\w/:]+`, NameEntity, nil},
{`(!?\[)([^]]+)(\])(\()([^)]+)(\))`, ByGroups(Text, NameTag, Text, Text, NameAttribute, Text), nil},
{`[^\\\s]+`, Text, nil},
{`.`, Text, nil},
},
},
))
func markdownCodeBlock(groups []string, lexer Lexer) Iterator {
iterators := []Iterator{}
tokens := []*Token{
{String, groups[1]},
{String, groups[2]},
{Text, groups[3]},
}
code := groups[4]
lexer = internal.Get(groups[2])
if lexer == nil {
tokens = append(tokens, &Token{String, code})
iterators = append(iterators, Literator(tokens...))
} else {
sub, err := lexer.Tokenise(nil, code)
if err != nil {
panic(err)
}
iterators = append(iterators, Literator(tokens...), sub)
}
iterators = append(iterators, Literator(&Token{String, groups[5]}))
return Concaterator(iterators...)
}

43
vendor/github.com/alecthomas/chroma/lexers/m/mason.go generated vendored Normal file
View File

@@ -0,0 +1,43 @@
package m
import (
. "github.com/alecthomas/chroma" // nolint
. "github.com/alecthomas/chroma/lexers/h" // nolint
"github.com/alecthomas/chroma/lexers/internal"
. "github.com/alecthomas/chroma/lexers/p" // nolint
)
// Mason lexer.
var Mason = internal.Register(MustNewLexer(
&Config{
Name: "Mason",
Aliases: []string{"mason"},
Filenames: []string{"*.m", "*.mhtml", "*.mc", "*.mi", "autohandler", "dhandler"},
MimeTypes: []string{"application/x-mason"},
Priority: 0.1,
},
Rules{
"root": {
{`\s+`, Text, nil},
{`(<%doc>)(.*?)(</%doc>)(?s)`, ByGroups(NameTag, CommentMultiline, NameTag), nil},
{`(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)(?s)`, ByGroups(NameTag, Text, NameFunction, NameTag, UsingSelf("root"), NameTag), nil},
{`(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)(?s)`, ByGroups(NameTag, NameFunction, NameTag, Using(Perl), NameTag), nil},
{`(<&[^|])(.*?)(,.*?)?(&>)(?s)`, ByGroups(NameTag, NameFunction, Using(Perl), NameTag), nil},
{`(<&\|)(.*?)(,.*?)?(&>)(?s)`, ByGroups(NameTag, NameFunction, Using(Perl), NameTag), nil},
{`</&>`, NameTag, nil},
{`(<%!?)(.*?)(%>)(?s)`, ByGroups(NameTag, Using(Perl), NameTag), nil},
{`(?<=^)#[^\n]*(\n|\Z)`, Comment, nil},
{`(?<=^)(%)([^\n]*)(\n|\Z)`, ByGroups(NameTag, Using(Perl), Other), nil},
{`(?sx)
(.+?) # anything, followed by:
(?:
(?<=\n)(?=[%#]) | # an eval or comment line
(?=</?[%&]) | # a substitution or block or
# call start or end
# - don't consume
(\\\n) | # an escaped newline
\Z # end of string
)`, ByGroups(Using(HTML), Operator), nil},
},
},
))

View File

@@ -0,0 +1,32 @@
package m
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// Mathematica lexer.
var Mathematica = internal.Register(MustNewLexer(
&Config{
Name: "Mathematica",
Aliases: []string{"mathematica", "mma", "nb"},
Filenames: []string{"*.nb", "*.cdf", "*.nbp", "*.ma"},
MimeTypes: []string{"application/mathematica", "application/vnd.wolfram.mathematica", "application/vnd.wolfram.mathematica.package", "application/vnd.wolfram.cdf"},
},
Rules{
"root": {
{`(?s)\(\*.*?\*\)`, Comment, nil},
{"([a-zA-Z]+[A-Za-z0-9]*`)", NameNamespace, nil},
{`([A-Za-z0-9]*_+[A-Za-z0-9]*)`, NameVariable, nil},
{`#\d*`, NameVariable, nil},
{`([a-zA-Z]+[a-zA-Z0-9]*)`, Name, nil},
{`-?\d+\.\d*`, LiteralNumberFloat, nil},
{`-?\d*\.\d+`, LiteralNumberFloat, nil},
{`-?\d+`, LiteralNumberInteger, nil},
{Words(``, ``, `;;`, `=`, `=.`, `!===`, `:=`, `->`, `:>`, `/.`, `+`, `-`, `*`, `/`, `^`, `&&`, `||`, `!`, `<>`, `|`, `/;`, `?`, `@`, `//`, `/@`, `@@`, `@@@`, `~~`, `===`, `&`, `<`, `>`, `<=`, `>=`), Operator, nil},
{Words(``, ``, `,`, `;`, `(`, `)`, `[`, `]`, `{`, `}`), Punctuation, nil},
{`".*?"`, LiteralString, nil},
{`\s+`, TextWhitespace, nil},
},
},
))

View File

@@ -0,0 +1,41 @@
package m
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// MiniZinc lexer.
var MZN = internal.Register(MustNewLexer(
&Config{
Name: "MiniZinc",
Aliases: []string{"minizinc", "MZN", "mzn"},
Filenames: []string{"*.mzn", "*.dzn", "*.fzn"},
MimeTypes: []string{"text/minizinc"},
},
Rules{
"root": {
{`\n`, Text, nil},
{`\s+`, Text, nil},
{`\\\n`, Text, nil},
{`\%(.*?)\n`, CommentSingle, nil},
{`/(\\\n)?[*](.|\n)*?[*](\\\n)?/`, CommentMultiline, nil},
{`"(\\\\|\\"|[^"])*"`, LiteralString, nil},
{Words(`\b`, `\b`, `ann`, `annotation`, `any`, `constraint`, `function`, `include`, `list`, `of`, `op`, `output`, `minimize`, `maximize`, `par`, `predicate`, `record`, `satisfy`, `solve`, `test`, `type`, `var`), Keyword, nil},
{Words(`\b`, `\b`, `array`, `set`, `bool`, `enum`, `float`, `int`, `string`, `tuple`), KeywordType, nil},
{Words(`\b`, `\b`, `for`, `forall`, `if`, `then`, `else`, `endif`, `where`), Keyword, nil},
{Words(`\b`, `\b`, `abort`, `abs`, `acosh`, `array_intersect`, `array_union`, `array1d`, `array2d`, `array3d`, `array4d`, `array5d`, `array6d`, `asin`, `assert`, `atan`, `bool2int`, `card`, `ceil`, `concat`, `cos`, `cosh`, `dom`, `dom_array`, `dom_size`, `fix`, `exp`, `floor`, `index_set`, `index_set_1of2`, `index_set_2of2`, `index_set_1of3`, `index_set_2of3`, `index_set_3of3`, `int2float`, `is_fixed`, `join`, `lb`, `lb_array`, `length`, `ln`, `log`, `log2`, `log10`, `min`, `max`, `pow`, `product`, `round`, `set2array`, `show`, `show_int`, `show_float`, `sin`, `sinh`, `sqrt`, `sum`, `tan`, `tanh`, `trace`, `ub`, `ub_array`), NameBuiltin, nil},
{`(not|<->|->|<-|\\/|xor|/\\)`, Operator, nil},
{`(<|>|<=|>=|==|=|!=)`, Operator, nil},
{`(\+|-|\*|/|div|mod)`, Operator, nil},
{Words(`\b`, `\b`, `in`, `subset`, `superset`, `union`, `diff`, `symdiff`, `intersect`), Operator, nil},
{`(\\|\.\.|\+\+)`, Operator, nil},
{`[|()\[\]{},:;]`, Punctuation, nil},
{`(true|false)\b`, KeywordConstant, nil},
{`([+-]?)\d+(\.(?!\.)\d*)?([eE][-+]?\d+)?`, LiteralNumber, nil},
{`::\s*([^\W\d]\w*)(\s*\([^\)]*\))?`, NameDecorator, nil},
{`\b([^\W\d]\w*)\b(\()`, ByGroups(NameFunction, Punctuation), nil},
{`[^\W\d]\w*`, NameOther, nil},
},
},
))

115
vendor/github.com/alecthomas/chroma/lexers/m/modula2.go generated vendored Normal file
View File

@@ -0,0 +1,115 @@
package m
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// Modula-2 lexer.
var Modula2 = internal.Register(MustNewLexer(
&Config{
Name: "Modula-2",
Aliases: []string{"modula2", "m2"},
Filenames: []string{"*.def", "*.mod"},
MimeTypes: []string{"text/x-modula2"},
DotAll: true,
},
Rules{
"whitespace": {
{`\n+`, Text, nil},
{`\s+`, Text, nil},
},
"dialecttags": {
{`\(\*!m2pim\*\)`, CommentSpecial, nil},
{`\(\*!m2iso\*\)`, CommentSpecial, nil},
{`\(\*!m2r10\*\)`, CommentSpecial, nil},
{`\(\*!objm2\*\)`, CommentSpecial, nil},
{`\(\*!m2iso\+aglet\*\)`, CommentSpecial, nil},
{`\(\*!m2pim\+gm2\*\)`, CommentSpecial, nil},
{`\(\*!m2iso\+p1\*\)`, CommentSpecial, nil},
{`\(\*!m2iso\+xds\*\)`, CommentSpecial, nil},
},
"identifiers": {
{`([a-zA-Z_$][\w$]*)`, Name, nil},
},
"prefixed_number_literals": {
{`0b[01]+(\'[01]+)*`, LiteralNumberBin, nil},
{`0[ux][0-9A-F]+(\'[0-9A-F]+)*`, LiteralNumberHex, nil},
},
"plain_number_literals": {
{`[0-9]+(\'[0-9]+)*\.[0-9]+(\'[0-9]+)*[eE][+-]?[0-9]+(\'[0-9]+)*`, LiteralNumberFloat, nil},
{`[0-9]+(\'[0-9]+)*\.[0-9]+(\'[0-9]+)*`, LiteralNumberFloat, nil},
{`[0-9]+(\'[0-9]+)*`, LiteralNumberInteger, nil},
},
"suffixed_number_literals": {
{`[0-7]+B`, LiteralNumberOct, nil},
{`[0-7]+C`, LiteralNumberOct, nil},
{`[0-9A-F]+H`, LiteralNumberHex, nil},
},
"string_literals": {
{`'(\\\\|\\'|[^'])*'`, LiteralString, nil},
{`"(\\\\|\\"|[^"])*"`, LiteralString, nil},
},
"digraph_operators": {
{`\*\.`, Operator, nil},
{`\+>`, Operator, nil},
{`<>`, Operator, nil},
{`<=`, Operator, nil},
{`>=`, Operator, nil},
{`==`, Operator, nil},
{`::`, Operator, nil},
{`:=`, Operator, nil},
{`\+\+`, Operator, nil},
{`--`, Operator, nil},
},
"unigraph_operators": {
{`[+-]`, Operator, nil},
{`[*/]`, Operator, nil},
{`\\`, Operator, nil},
{`[=#<>]`, Operator, nil},
{`\^`, Operator, nil},
{`@`, Operator, nil},
{`&`, Operator, nil},
{`~`, Operator, nil},
{"`", Operator, nil},
},
"digraph_punctuation": {
{`\.\.`, Punctuation, nil},
{`<<`, Punctuation, nil},
{`>>`, Punctuation, nil},
{`->`, Punctuation, nil},
{`\|#`, Punctuation, nil},
{`##`, Punctuation, nil},
{`\|\*`, Punctuation, nil},
},
"unigraph_punctuation": {
{`[()\[\]{},.:;|]`, Punctuation, nil},
{`!`, Punctuation, nil},
{`\?`, Punctuation, nil},
},
"comments": {
{`^//.*?\n`, CommentSingle, nil},
{`\(\*([^$].*?)\*\)`, CommentMultiline, nil},
{`/\*(.*?)\*/`, CommentMultiline, nil},
},
"pragmas": {
{`<\*.*?\*>`, CommentPreproc, nil},
{`\(\*\$.*?\*\)`, CommentPreproc, nil},
},
"root": {
Include("whitespace"),
Include("dialecttags"),
Include("pragmas"),
Include("comments"),
Include("identifiers"),
Include("suffixed_number_literals"),
Include("prefixed_number_literals"),
Include("plain_number_literals"),
Include("string_literals"),
Include("digraph_punctuation"),
Include("digraph_operators"),
Include("unigraph_punctuation"),
Include("unigraph_operators"),
},
},
))

File diff suppressed because one or more lines are too long

40
vendor/github.com/alecthomas/chroma/lexers/m/myghty.go generated vendored Normal file
View File

@@ -0,0 +1,40 @@
package m
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
. "github.com/alecthomas/chroma/lexers/p" // nolint
)
// Myghty lexer.
var Myghty = internal.Register(MustNewLexer(
&Config{
Name: "Myghty",
Aliases: []string{"myghty"},
Filenames: []string{"*.myt", "autodelegate"},
MimeTypes: []string{"application/x-myghty"},
},
Rules{
"root": {
{`\s+`, Text, nil},
{`(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)(?s)`, ByGroups(NameTag, Text, NameFunction, NameTag, UsingSelf("root"), NameTag), nil},
{`(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)(?s)`, ByGroups(NameTag, NameFunction, NameTag, Using(Python), NameTag), nil},
{`(<&[^|])(.*?)(,.*?)?(&>)`, ByGroups(NameTag, NameFunction, Using(Python), NameTag), nil},
{`(<&\|)(.*?)(,.*?)?(&>)(?s)`, ByGroups(NameTag, NameFunction, Using(Python), NameTag), nil},
{`</&>`, NameTag, nil},
{`(<%!?)(.*?)(%>)(?s)`, ByGroups(NameTag, Using(Python), NameTag), nil},
{`(?<=^)#[^\n]*(\n|\Z)`, Comment, nil},
{`(?<=^)(%)([^\n]*)(\n|\Z)`, ByGroups(NameTag, Using(Python), Other), nil},
{`(?sx)
(.+?) # anything, followed by:
(?:
(?<=\n)(?=[%#]) | # an eval or comment line
(?=</?[%&]) | # a substitution or block or
# call start or end
# - don't consume
(\\\n) | # an escaped newline
\Z # end of string
)`, ByGroups(Other, Operator), nil},
},
},
))

54
vendor/github.com/alecthomas/chroma/lexers/m/mysql.go generated vendored Normal file
View File

@@ -0,0 +1,54 @@
package m
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// MySQL lexer.
var MySQL = internal.Register(MustNewLexer(
&Config{
Name: "MySQL",
Aliases: []string{"mysql"},
Filenames: []string{"*.sql"},
MimeTypes: []string{"text/x-mysql"},
NotMultiline: true,
CaseInsensitive: true,
},
Rules{
"root": {
{`\s+`, Text, nil},
{`(#|--\s+).*\n?`, CommentSingle, nil},
{`/\*`, CommentMultiline, Push("multiline-comments")},
{`[0-9]+`, LiteralNumberInteger, nil},
{`[0-9]*\.[0-9]+(e[+-][0-9]+)`, LiteralNumberFloat, nil},
{`((?:_[a-z0-9]+)?)(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Push("string")},
{`((?:_[a-z0-9]+)?)(")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Push("double-string")},
{"[+*/<>=~!@#%^&|`?-]", Operator, nil},
{`\b(tinyint|smallint|mediumint|int|integer|bigint|date|datetime|time|bit|bool|tinytext|mediumtext|longtext|text|tinyblob|mediumblob|longblob|blob|float|double|double\s+precision|real|numeric|dec|decimal|timestamp|year|char|varchar|varbinary|varcharacter|enum|set)(\b\s*)(\()?`, ByGroups(KeywordType, Text, Punctuation), nil},
{`\b(add|all|alter|analyze|and|as|asc|asensitive|before|between|bigint|binary|blob|both|by|call|cascade|case|change|char|character|check|collate|column|condition|constraint|continue|convert|create|cross|current_date|current_time|current_timestamp|current_user|cursor|database|databases|day_hour|day_microsecond|day_minute|day_second|dec|decimal|declare|default|delayed|delete|desc|describe|deterministic|distinct|distinctrow|div|double|drop|dual|each|else|elseif|enclosed|escaped|exists|exit|explain|fetch|flush|float|float4|float8|for|force|foreign|from|fulltext|grant|group|having|high_priority|hour_microsecond|hour_minute|hour_second|if|ignore|in|index|infile|inner|inout|insensitive|insert|int|int1|int2|int3|int4|int8|integer|interval|into|is|iterate|join|key|keys|kill|leading|leave|left|like|limit|lines|load|localtime|localtimestamp|lock|long|loop|low_priority|match|minute_microsecond|minute_second|mod|modifies|natural|no_write_to_binlog|not|numeric|on|optimize|option|optionally|or|order|out|outer|outfile|precision|primary|procedure|purge|raid0|read|reads|real|references|regexp|release|rename|repeat|replace|require|restrict|return|revoke|right|rlike|schema|schemas|second_microsecond|select|sensitive|separator|set|show|smallint|soname|spatial|specific|sql|sql_big_result|sql_calc_found_rows|sql_small_result|sqlexception|sqlstate|sqlwarning|ssl|starting|straight_join|table|terminated|then|to|trailing|trigger|undo|union|unique|unlock|unsigned|update|usage|use|using|utc_date|utc_time|utc_timestamp|values|varying|when|where|while|with|write|x509|xor|year_month|zerofill)\b`, Keyword, nil},
{`\b(auto_increment|engine|charset|tables)\b`, KeywordPseudo, nil},
{`(true|false|null)`, NameConstant, nil},
{`([a-z_]\w*)(\s*)(\()`, ByGroups(NameFunction, Text, Punctuation), nil},
{`[a-z_]\w*`, Name, nil},
{`@[a-z0-9]*[._]*[a-z0-9]*`, NameVariable, nil},
{`[;:()\[\],.]`, Punctuation, nil},
},
"multiline-comments": {
{`/\*`, CommentMultiline, Push("multiline-comments")},
{`\*/`, CommentMultiline, Pop(1)},
{`[^/*]+`, CommentMultiline, nil},
{`[/*]`, CommentMultiline, nil},
},
"string": {
{`[^']+`, LiteralStringSingle, nil},
{`''`, LiteralStringSingle, nil},
{`'`, LiteralStringSingle, Pop(1)},
},
"double-string": {
{`[^"]+`, LiteralStringDouble, nil},
{`""`, LiteralStringDouble, nil},
{`"`, LiteralStringDouble, Pop(1)},
},
},
))