mirror of https://github.com/go-gitea/gitea
update chroma to v0.8.0 (#12337)
parent
4315e313d1
commit
bfb25e4be1
@ -0,0 +1,206 @@ |
||||
package c |
||||
|
||||
import ( |
||||
. "github.com/alecthomas/chroma" // nolint
|
||||
"github.com/alecthomas/chroma/lexers/internal" |
||||
) |
||||
|
||||
// caddyfileCommon are the rules common to both of the lexer variants
|
||||
var caddyfileCommon = Rules{ |
||||
"site_block_common": { |
||||
// Import keyword
|
||||
{`(import)(\s+)([^\s]+)`, ByGroups(Keyword, Text, NameVariableMagic), nil}, |
||||
// Matcher definition
|
||||
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")}, |
||||
// Matcher token stub for docs
|
||||
{`\[\<matcher\>\]`, NameDecorator, Push("matcher")}, |
||||
// These cannot have matchers but may have things that look like
|
||||
// matchers in their arguments, so we just parse as a subdirective.
|
||||
{`try_files`, Keyword, Push("subdirective")}, |
||||
// These are special, they can nest more directives
|
||||
{`handle_errors|handle|route|handle_path|not`, Keyword, Push("nested_directive")}, |
||||
// Any other directive
|
||||
{`[^\s#]+`, Keyword, Push("directive")}, |
||||
Include("base"), |
||||
}, |
||||
"matcher": { |
||||
{`\{`, Punctuation, Push("block")}, |
||||
// Not can be one-liner
|
||||
{`not`, Keyword, Push("deep_not_matcher")}, |
||||
// Any other same-line matcher
|
||||
{`[^\s#]+`, Keyword, Push("arguments")}, |
||||
// Terminators
|
||||
{`\n`, Text, Pop(1)}, |
||||
{`\}`, Punctuation, Pop(1)}, |
||||
Include("base"), |
||||
}, |
||||
"block": { |
||||
{`\}`, Punctuation, Pop(2)}, |
||||
// Not can be one-liner
|
||||
{`not`, Keyword, Push("not_matcher")}, |
||||
// Any other subdirective
|
||||
{`[^\s#]+`, Keyword, Push("subdirective")}, |
||||
Include("base"), |
||||
}, |
||||
"nested_block": { |
||||
{`\}`, Punctuation, Pop(2)}, |
||||
// Matcher definition
|
||||
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")}, |
||||
// Something that starts with literally < is probably a docs stub
|
||||
{`\<[^#]+\>`, Keyword, Push("nested_directive")}, |
||||
// Any other directive
|
||||
{`[^\s#]+`, Keyword, Push("nested_directive")}, |
||||
Include("base"), |
||||
}, |
||||
"not_matcher": { |
||||
{`\}`, Punctuation, Pop(2)}, |
||||
{`\{(?=\s)`, Punctuation, Push("block")}, |
||||
{`[^\s#]+`, Keyword, Push("arguments")}, |
||||
{`\s+`, Text, nil}, |
||||
}, |
||||
"deep_not_matcher": { |
||||
{`\}`, Punctuation, Pop(2)}, |
||||
{`\{(?=\s)`, Punctuation, Push("block")}, |
||||
{`[^\s#]+`, Keyword, Push("deep_subdirective")}, |
||||
{`\s+`, Text, nil}, |
||||
}, |
||||
"directive": { |
||||
{`\{(?=\s)`, Punctuation, Push("block")}, |
||||
Include("matcher_token"), |
||||
Include("comments_pop_1"), |
||||
{`\n`, Text, Pop(1)}, |
||||
Include("base"), |
||||
}, |
||||
"nested_directive": { |
||||
{`\{(?=\s)`, Punctuation, Push("nested_block")}, |
||||
Include("matcher_token"), |
||||
Include("comments_pop_1"), |
||||
{`\n`, Text, Pop(1)}, |
||||
Include("base"), |
||||
}, |
||||
"subdirective": { |
||||
{`\{(?=\s)`, Punctuation, Push("block")}, |
||||
Include("comments_pop_1"), |
||||
{`\n`, Text, Pop(1)}, |
||||
Include("base"), |
||||
}, |
||||
"arguments": { |
||||
{`\{(?=\s)`, Punctuation, Push("block")}, |
||||
Include("comments_pop_2"), |
||||
{`\\\n`, Text, nil}, // Skip escaped newlines
|
||||
{`\n`, Text, Pop(2)}, |
||||
Include("base"), |
||||
}, |
||||
"deep_subdirective": { |
||||
{`\{(?=\s)`, Punctuation, Push("block")}, |
||||
Include("comments_pop_3"), |
||||
{`\n`, Text, Pop(3)}, |
||||
Include("base"), |
||||
}, |
||||
"matcher_token": { |
||||
{`@[^\s]+`, NameDecorator, Push("arguments")}, // Named matcher
|
||||
{`/[^\s]+`, NameDecorator, Push("arguments")}, // Path matcher
|
||||
{`\*`, NameDecorator, Push("arguments")}, // Wildcard path matcher
|
||||
{`\[\<matcher\>\]`, NameDecorator, Push("arguments")}, // Matcher token stub for docs
|
||||
}, |
||||
"comments": { |
||||
{`^#.*\n`, CommentSingle, nil}, // Comment at start of line
|
||||
{`\s+#.*\n`, CommentSingle, nil}, // Comment preceded by whitespace
|
||||
}, |
||||
"comments_pop_1": { |
||||
{`^#.*\n`, CommentSingle, Pop(1)}, // Comment at start of line
|
||||
{`\s+#.*\n`, CommentSingle, Pop(1)}, // Comment preceded by whitespace
|
||||
}, |
||||
"comments_pop_2": { |
||||
{`^#.*\n`, CommentSingle, Pop(2)}, // Comment at start of line
|
||||
{`\s+#.*\n`, CommentSingle, Pop(2)}, // Comment preceded by whitespace
|
||||
}, |
||||
"comments_pop_3": { |
||||
{`^#.*\n`, CommentSingle, Pop(3)}, // Comment at start of line
|
||||
{`\s+#.*\n`, CommentSingle, Pop(3)}, // Comment preceded by whitespace
|
||||
}, |
||||
"base": { |
||||
Include("comments"), |
||||
{`(on|off|first|last|before|after|internal|strip_prefix|strip_suffix|replace)\b`, NameConstant, nil}, |
||||
{`(https?://)?([a-z0-9.-]+)(:)([0-9]+)`, ByGroups(Name, Name, Punctuation, LiteralNumberInteger), nil}, |
||||
{`[a-z-]+/[a-z-+]+`, LiteralString, nil}, |
||||
{`[0-9]+[km]?\b`, LiteralNumberInteger, nil}, |
||||
{`\{[\w+.\$-]+\}`, LiteralStringEscape, nil}, // Placeholder
|
||||
{`\[(?=[^#{}$]+\])`, Punctuation, nil}, |
||||
{`\]|\|`, Punctuation, nil}, |
||||
{`[^\s#{}$\]]+`, LiteralString, nil}, |
||||
{`/[^\s#]*`, Name, nil}, |
||||
{`\s+`, Text, nil}, |
||||
}, |
||||
} |
||||
|
||||
// Caddyfile lexer.
|
||||
var Caddyfile = internal.Register(MustNewLexer( |
||||
&Config{ |
||||
Name: "Caddyfile", |
||||
Aliases: []string{"caddyfile", "caddy"}, |
||||
Filenames: []string{"Caddyfile*"}, |
||||
MimeTypes: []string{}, |
||||
}, |
||||
Rules{ |
||||
"root": { |
||||
Include("comments"), |
||||
// Global options block
|
||||
{`^\s*(\{)\s*$`, ByGroups(Punctuation), Push("globals")}, |
||||
// Snippets
|
||||
{`(\([^\s#]+\))(\s*)(\{)`, ByGroups(NameVariableAnonymous, Text, Punctuation), Push("snippet")}, |
||||
// Site label
|
||||
{`[^#{(\s,]+`, GenericHeading, Push("label")}, |
||||
// Site label with placeholder
|
||||
{`\{[\w+.\$-]+\}`, LiteralStringEscape, Push("label")}, |
||||
{`\s+`, Text, nil}, |
||||
}, |
||||
"globals": { |
||||
{`\}`, Punctuation, Pop(1)}, |
||||
{`[^\s#]+`, Keyword, Push("directive")}, |
||||
Include("base"), |
||||
}, |
||||
"snippet": { |
||||
{`\}`, Punctuation, Pop(1)}, |
||||
// Matcher definition
|
||||
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")}, |
||||
// Any directive
|
||||
{`[^\s#]+`, Keyword, Push("directive")}, |
||||
Include("base"), |
||||
}, |
||||
"label": { |
||||
// Allow multiple labels, comma separated, newlines after
|
||||
// a comma means another label is coming
|
||||
{`,\s*\n?`, Text, nil}, |
||||
{` `, Text, nil}, |
||||
// Site label with placeholder
|
||||
{`\{[\w+.\$-]+\}`, LiteralStringEscape, nil}, |
||||
// Site label
|
||||
{`[^#{(\s,]+`, GenericHeading, nil}, |
||||
// Comment after non-block label (hack because comments end in \n)
|
||||
{`#.*\n`, CommentSingle, Push("site_block")}, |
||||
// Note: if \n, we'll never pop out of the site_block, it's valid
|
||||
{`\{(?=\s)|\n`, Punctuation, Push("site_block")}, |
||||
}, |
||||
"site_block": { |
||||
{`\}`, Punctuation, Pop(2)}, |
||||
Include("site_block_common"), |
||||
}, |
||||
}.Merge(caddyfileCommon), |
||||
)) |
||||
|
||||
// Caddyfile directive-only lexer.
|
||||
var CaddyfileDirectives = internal.Register(MustNewLexer( |
||||
&Config{ |
||||
Name: "Caddyfile Directives", |
||||
Aliases: []string{"caddyfile-directives", "caddyfile-d", "caddy-d"}, |
||||
Filenames: []string{}, |
||||
MimeTypes: []string{}, |
||||
}, |
||||
Rules{ |
||||
// Same as "site_block" in Caddyfile
|
||||
"root": { |
||||
Include("site_block_common"), |
||||
}, |
||||
}.Merge(caddyfileCommon), |
||||
)) |
@ -0,0 +1,34 @@ |
||||
package circular |
||||
|
||||
import ( |
||||
"strings" |
||||
|
||||
. "github.com/alecthomas/chroma" // nolint
|
||||
"github.com/alecthomas/chroma/lexers/h" |
||||
"github.com/alecthomas/chroma/lexers/internal" |
||||
) |
||||
|
||||
// PHTML lexer is PHP in HTML.
|
||||
var PHTML = internal.Register(DelegatingLexer(h.HTML, MustNewLexer( |
||||
&Config{ |
||||
Name: "PHTML", |
||||
Aliases: []string{"phtml"}, |
||||
Filenames: []string{"*.phtml"}, |
||||
MimeTypes: []string{"application/x-php", "application/x-httpd-php", "application/x-httpd-php3", "application/x-httpd-php4", "application/x-httpd-php5"}, |
||||
DotAll: true, |
||||
CaseInsensitive: true, |
||||
EnsureNL: true, |
||||
}, |
||||
Rules{ |
||||
"root": { |
||||
{`<\?(php)?`, CommentPreproc, Push("php")}, |
||||
{`[^<]+`, Other, nil}, |
||||
{`<`, Other, nil}, |
||||
}, |
||||
}.Merge(phpCommonRules), |
||||
).SetAnalyser(func(text string) float32 { |
||||
if strings.Contains(text, "<?php") { |
||||
return 0.5 |
||||
} |
||||
return 0.0 |
||||
}))) |
@ -0,0 +1,59 @@ |
||||
package p |
||||
|
||||
import ( |
||||
. "github.com/alecthomas/chroma" // nolint
|
||||
"github.com/alecthomas/chroma/lexers/internal" |
||||
) |
||||
|
||||
// Pony lexer.
|
||||
var Pony = internal.Register(MustNewLexer( |
||||
&Config{ |
||||
Name: "Pony", |
||||
Aliases: []string{"pony"}, |
||||
Filenames: []string{"*.pony"}, |
||||
MimeTypes: []string{}, |
||||
}, |
||||
Rules{ |
||||
"root": { |
||||
{`\n`, Text, nil}, |
||||
{`[^\S\n]+`, Text, nil}, |
||||
{`//.*\n`, CommentSingle, nil}, |
||||
{`/\*`, CommentMultiline, Push("nested_comment")}, |
||||
{`"""(?:.|\n)*?"""`, LiteralStringDoc, nil}, |
||||
{`"`, LiteralString, Push("string")}, |
||||
{`\'.*\'`, LiteralStringChar, nil}, |
||||
{`=>|[]{}:().~;,|&!^?[]`, Punctuation, nil}, |
||||
{Words(``, `\b`, `addressof`, `and`, `as`, `consume`, `digestof`, `is`, `isnt`, `not`, `or`), OperatorWord, nil}, |
||||
{`!=|==|<<|>>|[-+/*%=<>]`, Operator, nil}, |
||||
{Words(``, `\b`, `box`, `break`, `compile_error`, `compile_intrinsic`, `continue`, `do`, `else`, `elseif`, `embed`, `end`, `error`, `for`, `if`, `ifdef`, `in`, `iso`, `lambda`, `let`, `match`, `object`, `recover`, `ref`, `repeat`, `return`, `tag`, `then`, `this`, `trn`, `try`, `until`, `use`, `var`, `val`, `where`, `while`, `with`, `#any`, `#read`, `#send`, `#share`), Keyword, nil}, |
||||
{`(actor|class|struct|primitive|interface|trait|type)((?:\s)+)`, ByGroups(Keyword, Text), Push("typename")}, |
||||
{`(new|fun|be)((?:\s)+)`, ByGroups(Keyword, Text), Push("methodname")}, |
||||
{Words(``, `\b`, `U8`, `U16`, `U32`, `U64`, `ULong`, `USize`, `U128`, `Unsigned`, `Stringable`, `String`, `StringBytes`, `StringRunes`, `InputNotify`, `InputStream`, `Stdin`, `ByteSeq`, `ByteSeqIter`, `OutStream`, `StdStream`, `SourceLoc`, `I8`, `I16`, `I32`, `I64`, `ILong`, `ISize`, `I128`, `Signed`, `Seq`, `RuntimeOptions`, `Real`, `Integer`, `SignedInteger`, `UnsignedInteger`, `FloatingPoint`, `Number`, `Int`, `ReadSeq`, `ReadElement`, `Pointer`, `Platform`, `NullablePointer`, `None`, `Iterator`, `F32`, `F64`, `Float`, `Env`, `DoNotOptimise`, `DisposableActor`, `Less`, `Equal`, `Greater`, `Compare`, `HasEq`, `Equatable`, `Comparable`, `Bool`, `AsioEventID`, `AsioEventNotify`, `AsioEvent`, `Array`, `ArrayKeys`, `ArrayValues`, `ArrayPairs`, `Any`, `AmbientAuth`), KeywordType, nil}, |
||||
{`_?[A-Z]\w*`, NameClass, nil}, |
||||
{`string\(\)`, NameOther, nil}, |
||||
{`(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+`, LiteralNumberFloat, nil}, |
||||
{`0x[0-9a-fA-F]+`, LiteralNumberHex, nil}, |
||||
{`\d+`, LiteralNumberInteger, nil}, |
||||
{`(true|false)\b`, Keyword, nil}, |
||||
{`_\d*`, Name, nil}, |
||||
{`_?[a-z][\w\'_]*`, Name, nil}, |
||||
}, |
||||
"typename": { |
||||
{`(iso|trn|ref|val|box|tag)?((?:\s)*)(_?[A-Z]\w*)`, ByGroups(Keyword, Text, NameClass), Pop(1)}, |
||||
}, |
||||
"methodname": { |
||||
{`(iso|trn|ref|val|box|tag)?((?:\s)*)(_?[a-z]\w*)`, ByGroups(Keyword, Text, NameFunction), Pop(1)}, |
||||
}, |
||||
"nested_comment": { |
||||
{`[^*/]+`, CommentMultiline, nil}, |
||||
{`/\*`, CommentMultiline, Push()}, |
||||
{`\*/`, CommentMultiline, Pop(1)}, |
||||
{`[*/]`, CommentMultiline, nil}, |
||||
}, |
||||
"string": { |
||||
{`"`, LiteralString, Pop(1)}, |
||||
{`\\"`, LiteralString, nil}, |
||||
{`[^\\"]+`, LiteralString, nil}, |
||||
}, |
||||
}, |
||||
)) |
@ -0,0 +1,54 @@ |
||||
package z |
||||
|
||||
import ( |
||||
. "github.com/alecthomas/chroma" // nolint
|
||||
"github.com/alecthomas/chroma/lexers/internal" |
||||
) |
||||
|
||||
// Zig lexer.
|
||||
var Zig = internal.Register(MustNewLexer( |
||||
&Config{ |
||||
Name: "Zig", |
||||
Aliases: []string{"zig"}, |
||||
Filenames: []string{"*.zig"}, |
||||
MimeTypes: []string{"text/zig"}, |
||||
}, |
||||
Rules{ |
||||
"root": { |
||||
{`\n`, TextWhitespace, nil}, |
||||
{`\s+`, TextWhitespace, nil}, |
||||
{`//.*?\n`, CommentSingle, nil}, |
||||
{Words(``, `\b`, `break`, `return`, `continue`, `asm`, `defer`, `errdefer`, `unreachable`, `try`, `catch`, `async`, `await`, `suspend`, `resume`, `cancel`), Keyword, nil}, |
||||
{Words(``, `\b`, `const`, `var`, `extern`, `packed`, `export`, `pub`, `noalias`, `inline`, `comptime`, `nakedcc`, `stdcallcc`, `volatile`, `allowzero`, `align`, `linksection`, `threadlocal`), KeywordReserved, nil}, |
||||
{Words(``, `\b`, `struct`, `enum`, `union`, `error`), Keyword, nil}, |
||||
{Words(``, `\b`, `while`, `for`), Keyword, nil}, |
||||
{Words(``, `\b`, `bool`, `f16`, `f32`, `f64`, `f128`, `void`, `noreturn`, `type`, `anyerror`, `promise`, `i0`, `u0`, `isize`, `usize`, `comptime_int`, `comptime_float`, `c_short`, `c_ushort`, `c_int`, `c_uint`, `c_long`, `c_ulong`, `c_longlong`, `c_ulonglong`, `c_longdouble`, `c_voidi8`, `u8`, `i16`, `u16`, `i32`, `u32`, `i64`, `u64`, `i128`, `u128`), KeywordType, nil}, |
||||
{Words(``, `\b`, `true`, `false`, `null`, `undefined`), KeywordConstant, nil}, |
||||
{Words(``, `\b`, `if`, `else`, `switch`, `and`, `or`, `orelse`), Keyword, nil}, |
||||
{Words(``, `\b`, `fn`, `usingnamespace`, `test`), Keyword, nil}, |
||||
{`0x[0-9a-fA-F]+\.[0-9a-fA-F]+([pP][\-+]?[0-9a-fA-F]+)?`, LiteralNumberFloat, nil}, |
||||
{`0x[0-9a-fA-F]+\.?[pP][\-+]?[0-9a-fA-F]+`, LiteralNumberFloat, nil}, |
||||
{`[0-9]+\.[0-9]+([eE][-+]?[0-9]+)?`, LiteralNumberFloat, nil}, |
||||
{`[0-9]+\.?[eE][-+]?[0-9]+`, LiteralNumberFloat, nil}, |
||||
{`0b[01]+`, LiteralNumberBin, nil}, |
||||
{`0o[0-7]+`, LiteralNumberOct, nil}, |
||||
{`0x[0-9a-fA-F]+`, LiteralNumberHex, nil}, |
||||
{`[0-9]+`, LiteralNumberInteger, nil}, |
||||
{`@[a-zA-Z_]\w*`, NameBuiltin, nil}, |
||||
{`[a-zA-Z_]\w*`, Name, nil}, |
||||
{`\'\\\'\'`, LiteralStringEscape, nil}, |
||||
{`\'\\(|x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])\'`, LiteralStringEscape, nil}, |
||||
{`\'[^\\\']\'`, LiteralString, nil}, |
||||
{`\\\\[^\n]*`, LiteralStringHeredoc, nil}, |
||||
{`c\\\\[^\n]*`, LiteralStringHeredoc, nil}, |
||||
{`c?"`, LiteralString, Push("string")}, |
||||
{`[+%=><|^!?/\-*&~:]`, Operator, nil}, |
||||
{`[{}()\[\],.;]`, Punctuation, nil}, |
||||
}, |
||||
"string": { |
||||
{`\\(x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])`, LiteralStringEscape, nil}, |
||||
{`[^\\"\n]+`, LiteralString, nil}, |
||||
{`"`, LiteralString, Pop(1)}, |
||||
}, |
||||
}, |
||||
)) |
Loading…
Reference in new issue