Move to Go modules (#10)
This commit is contained in:
7
vendor/github.com/alecthomas/chroma/lexers/h/haxe.go
generated
vendored
7
vendor/github.com/alecthomas/chroma/lexers/h/haxe.go
generated
vendored
@@ -619,13 +619,14 @@ func haxePreProcMutator(state *LexerState) error {
|
||||
}
|
||||
|
||||
proc := state.Groups[2]
|
||||
if proc == "if" {
|
||||
switch proc {
|
||||
case "if":
|
||||
stack = append(stack, state.Stack)
|
||||
} else if proc == "else" || proc == "elseif" {
|
||||
case "else", "elseif":
|
||||
if len(stack) > 0 {
|
||||
state.Stack = stack[len(stack)-1]
|
||||
}
|
||||
} else if proc == "end" {
|
||||
case "end":
|
||||
stack = stack[:len(stack)-1]
|
||||
}
|
||||
|
||||
|
69
vendor/github.com/alecthomas/chroma/lexers/h/hcl.go
generated
vendored
Normal file
69
vendor/github.com/alecthomas/chroma/lexers/h/hcl.go
generated
vendored
Normal file
@@ -0,0 +1,69 @@
|
||||
package h
|
||||
|
||||
import (
|
||||
. "github.com/alecthomas/chroma" // nolint
|
||||
"github.com/alecthomas/chroma/lexers/internal"
|
||||
)
|
||||
|
||||
// HCL lexer.
|
||||
var HCL = internal.Register(MustNewLexer(
|
||||
&Config{
|
||||
Name: "HCL",
|
||||
Aliases: []string{"hcl"},
|
||||
Filenames: []string{"*.hcl"},
|
||||
MimeTypes: []string{"application/x-hcl"},
|
||||
},
|
||||
Rules{
|
||||
"root": {
|
||||
Include("string"),
|
||||
Include("punctuation"),
|
||||
Include("curly"),
|
||||
Include("basic"),
|
||||
Include("whitespace"),
|
||||
{`[0-9]+`, LiteralNumber, nil},
|
||||
},
|
||||
"basic": {
|
||||
{Words(`\b`, `\b`, `true`, `false`), KeywordType, nil},
|
||||
{`\s*/\*`, CommentMultiline, Push("comment")},
|
||||
{`\s*#.*\n`, CommentSingle, nil},
|
||||
{`(.*?)(\s*)(=)`, ByGroups(Name, Text, Operator), nil},
|
||||
{`\d+`, Number, nil},
|
||||
{`\b\w+\b`, Keyword, nil},
|
||||
{`\$\{`, LiteralStringInterpol, Push("var_builtin")},
|
||||
},
|
||||
"function": {
|
||||
{`(\s+)(".*")(\s+)`, ByGroups(Text, LiteralString, Text), nil},
|
||||
Include("punctuation"),
|
||||
Include("curly"),
|
||||
},
|
||||
"var_builtin": {
|
||||
{`\$\{`, LiteralStringInterpol, Push()},
|
||||
{Words(`\b`, `\b`, `concat`, `file`, `join`, `lookup`, `element`), NameBuiltin, nil},
|
||||
Include("string"),
|
||||
Include("punctuation"),
|
||||
{`\s+`, Text, nil},
|
||||
{`\}`, LiteralStringInterpol, Pop(1)},
|
||||
},
|
||||
"string": {
|
||||
{`(".*")`, ByGroups(LiteralStringDouble), nil},
|
||||
},
|
||||
"punctuation": {
|
||||
{`[\[\](),.]`, Punctuation, nil},
|
||||
},
|
||||
"curly": {
|
||||
{`\{`, TextPunctuation, nil},
|
||||
{`\}`, TextPunctuation, nil},
|
||||
},
|
||||
"comment": {
|
||||
{`[^*/]`, CommentMultiline, nil},
|
||||
{`/\*`, CommentMultiline, Push()},
|
||||
{`\*/`, CommentMultiline, Pop(1)},
|
||||
{`[*/]`, CommentMultiline, nil},
|
||||
},
|
||||
"whitespace": {
|
||||
{`\n`, Text, nil},
|
||||
{`\s+`, Text, nil},
|
||||
{`\\\n`, Text, nil},
|
||||
},
|
||||
},
|
||||
))
|
87
vendor/github.com/alecthomas/chroma/lexers/h/http.go
generated
vendored
87
vendor/github.com/alecthomas/chroma/lexers/h/http.go
generated
vendored
@@ -77,56 +77,55 @@ func (d *httpBodyContentTyper) Tokenise(options *TokeniseOptions, text string) (
|
||||
}
|
||||
|
||||
return func() Token {
|
||||
for token := it(); token != EOF; token = it() {
|
||||
switch {
|
||||
case token.Type == Name && strings.ToLower(token.Value) == "content-type":
|
||||
{
|
||||
isContentType = true
|
||||
}
|
||||
case token.Type == Literal && isContentType:
|
||||
{
|
||||
isContentType = false
|
||||
contentType = strings.TrimSpace(token.Value)
|
||||
pos := strings.Index(contentType, ";")
|
||||
if pos > 0 {
|
||||
contentType = strings.TrimSpace(contentType[:pos])
|
||||
}
|
||||
}
|
||||
case token.Type == Generic && contentType != "":
|
||||
{
|
||||
lexer := internal.MatchMimeType(contentType)
|
||||
|
||||
// application/calendar+xml can be treated as application/xml
|
||||
// if there's not a better match.
|
||||
if lexer == nil && strings.Contains(contentType, "+") {
|
||||
slashPos := strings.Index(contentType, "/")
|
||||
plusPos := strings.LastIndex(contentType, "+")
|
||||
contentType = contentType[:slashPos+1] + contentType[plusPos+1:]
|
||||
lexer = internal.MatchMimeType(contentType)
|
||||
}
|
||||
|
||||
if lexer == nil {
|
||||
token.Type = Text
|
||||
} else {
|
||||
subIterator, err = lexer.Tokenise(nil, token.Value)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return EOF
|
||||
}
|
||||
}
|
||||
token := it()
|
||||
|
||||
if token == EOF {
|
||||
if subIterator != nil {
|
||||
return subIterator()
|
||||
}
|
||||
|
||||
return token
|
||||
return EOF
|
||||
}
|
||||
|
||||
if subIterator != nil {
|
||||
for token := subIterator(); token != EOF; token = subIterator() {
|
||||
return token
|
||||
switch {
|
||||
case token.Type == Name && strings.ToLower(token.Value) == "content-type":
|
||||
{
|
||||
isContentType = true
|
||||
}
|
||||
case token.Type == Literal && isContentType:
|
||||
{
|
||||
isContentType = false
|
||||
contentType = strings.TrimSpace(token.Value)
|
||||
pos := strings.Index(contentType, ";")
|
||||
if pos > 0 {
|
||||
contentType = strings.TrimSpace(contentType[:pos])
|
||||
}
|
||||
}
|
||||
case token.Type == Generic && contentType != "":
|
||||
{
|
||||
lexer := internal.MatchMimeType(contentType)
|
||||
|
||||
// application/calendar+xml can be treated as application/xml
|
||||
// if there's not a better match.
|
||||
if lexer == nil && strings.Contains(contentType, "+") {
|
||||
slashPos := strings.Index(contentType, "/")
|
||||
plusPos := strings.LastIndex(contentType, "+")
|
||||
contentType = contentType[:slashPos+1] + contentType[plusPos+1:]
|
||||
lexer = internal.MatchMimeType(contentType)
|
||||
}
|
||||
|
||||
if lexer == nil {
|
||||
token.Type = Text
|
||||
} else {
|
||||
subIterator, err = lexer.Tokenise(nil, token.Value)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return EOF
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
return EOF
|
||||
return token
|
||||
|
||||
}, nil
|
||||
}
|
||||
|
Reference in New Issue
Block a user