Use vendored modules

Signed-off-by: Izuru Yakumo <yakumo.izuru@chaotic.ninja>
このコミットが含まれているのは:
Izuru Yakumo 2023-07-23 10:18:53 -03:00
コミット 1570d02d52
739個のファイルの変更277774行の追加15行の削除

ファイルの表示

@ -1,17 +1,17 @@
destdir ?=
goflags ?= -v -ldflags "-w -X `go list`.Version=$(version) -X `go list`.Commit=$(commit)" -tags "static_build"
prefix ?= /usr/local
version ?= `git describe --abbrev=0 --tags || echo "$version"`
commit ?= `git rev-parse --short HEAD || echo "$commit"`
DESTDIR ?=
GOFLAGS ?= -v -ldflags "-w -X `go list`.Version=$(VERSION) -X `go list`.Commit=$(COMMIT)" -tags "static_build" -mod=vendor
PREFIX ?= /usr/local
VERSION ?= `git describe --abbrev=0 --tags || echo "$VERSION"`
COMMIT ?= `git rev-parse --short HEAD || echo "$COMMIT"`
build:
go build ${goflags} ./cmd/aya
go build ${GOFLAGS} ./cmd/aya
clean:
rm -f aya
install:
install -Dm0755 aya ${destdir}${prefix}/bin/aya
install -Dm0644 aya.1 ${destdir}${prefix}/share/man/man1/aya.1
install -Dm0755 aya ${DESTDIR}${PREFIX}/bin/aya
install -Dm0644 aya.1 ${DESTDIR}${PREFIX}/share/man/man1/aya.1
uninstall:
rm -f ${prefix}/bin/aya
rm -f ${prefix}/share/man/man1/aya.1
rm -f ${PREFIX}/bin/aya
rm -f ${PREFIX}/share/man/man1/aya.1

2
go.mod
ファイルの表示

@ -12,7 +12,7 @@ require (
)
require (
github.com/alecthomas/chroma/v2 v2.2.0 // indirect
github.com/alecthomas/chroma/v2 v2.3.0 // indirect
github.com/dlclark/regexp2 v1.4.0 // indirect
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8 // indirect
)

14
go.sum
ファイルの表示

@ -1,9 +1,11 @@
github.com/Depado/bfchroma/v2 v2.0.0 h1:IRpN9BPkNwEpR6w1ectIcNWOuhDSLx+8f1pn83fzxx8=
github.com/Depado/bfchroma/v2 v2.0.0/go.mod h1:wFwW/Pw8Tnd0irzgO9Zxtxgzp3aPS8qBWlyadxujxmw=
github.com/alecthomas/chroma/v2 v2.2.0 h1:Aten8jfQwUqEdadVFFjNyjx7HTexhKP0XuqBG67mRDY=
github.com/alecthomas/chroma/v2 v2.2.0/go.mod h1:vf4zrexSH54oEjJ7EdB65tGNHmH3pGZmVkgTP5RHvAs=
github.com/alecthomas/repr v0.0.0-20220113201626-b1b626ac65ae h1:zzGwJfFlFGD94CyyYwCJeSuD32Gj9GTaSi5y9hoVzdY=
github.com/alecthomas/chroma/v2 v2.3.0 h1:83xfxrnjv8eK+Cf8qZDzNo3PPF9IbTWHs7z28GY6D0U=
github.com/alecthomas/chroma/v2 v2.3.0/go.mod h1:mZxeWZlxP2Dy+/8cBob2PYd8O2DwNAzave5AY7A2eQw=
github.com/alecthomas/repr v0.0.0-20220113201626-b1b626ac65ae/go.mod h1:2kn6fqh/zIyPLmm3ugklbEi5hg5wS435eygvNfaDQL8=
github.com/alecthomas/repr v0.1.0 h1:ENn2e1+J3k09gyj2shc0dHr/yjaWSHRlrJ4DPMevDqE=
github.com/alecthomas/repr v0.1.0/go.mod h1:2kn6fqh/zIyPLmm3ugklbEi5hg5wS435eygvNfaDQL8=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
@ -18,8 +20,11 @@ github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQD
github.com/sirupsen/logrus v1.9.0 h1:trlNQbNUG3OdDrDil03MCb1H2o9nJ1x4/5LYw7byDE0=
github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0 h1:pSgiaMZlXftHpm5L7V1+rVB+AZJydKsMxsQBIJw4PKk=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/yosssi/gcss v0.1.0 h1:jRuino7qq7kqntBIhT+0xSUI5/sBgCA/zCQ1Tuzd6Gg=
github.com/yosssi/gcss v0.1.0/go.mod h1:M3mTPOWZWjVROkXKZ2AiDzOBOXu2MqQeDXF/nKO44sI=
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8 h1:0A+M6Uqn+Eje4kHMK80dtF3JCXC4ykBgQG4Fe06QRhQ=
@ -28,5 +33,6 @@ gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

52
vendor/github.com/Depado/bfchroma/v2/.drone.yml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,52 @@
---
kind: pipeline
name: default
type: docker
steps:
- name: test
image: golang:latest
volumes:
- name: deps
path: /go
commands:
- go test -race -coverprofile=coverage.txt -covermode=atomic
- name: linter
image: golang:latest
volumes:
- name: deps
path: /go
commands:
- curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s
- ./bin/golangci-lint run --timeout 5m
- rm -r ./bin/
- name: coverage
image: plugins/codecov
settings:
token:
from_secret: codecov_token
files:
- coverage.txt
- name: telegram
image: appleboy/drone-telegram
settings:
to: 790376882
format: markdown
token:
from_secret: telegram_token
message: >
*{{repo.name}}*
[Build {{build.number}}]({{build.link}}) by {{commit.author}} {{#success build.status}}succeeded{{else}}failed{{/success}} in {{buildtime build.started}}
`{{truncate commit.sha 8}}`: "{{commit.message}}"
when:
status:
- success
- failure
volumes:
- name: deps
host:
path: /var/lib/cache/godeps/

17
vendor/github.com/Depado/bfchroma/v2/.gitignore generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,17 @@
# Binaries for programs and plugins
*.exe
*.dll
*.so
*.dylib
# Test binary, build with `go test -c`
*.test
# Output of the go coverage tool, specifically when used with LiteIDE
*.out
# Project-local glide cache, RE: https://github.com/Masterminds/glide/issues/736
.glide/
vendor/
coverage.txt

21
vendor/github.com/Depado/bfchroma/v2/LICENSE generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2017
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

206
vendor/github.com/Depado/bfchroma/v2/README.md generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,206 @@
# bfchroma
[![forthebadge](https://forthebadge.com/images/badges/made-with-go.svg)](https://forthebadge.com)[![forthebadge](https://forthebadge.com/images/badges/built-with-love.svg)](https://forthebadge.com)
[![Go Report Card](https://goreportcard.com/badge/github.com/Depado/bfchroma)](https://goreportcard.com/report/github.com/Depado/bfchroma)
[![Build Status](https://drone.depa.do/api/badges/Depado/bfchroma/status.svg)](https://drone.depa.do/Depado/bfchroma)
[![codecov](https://codecov.io/gh/Depado/bfchroma/branch/master/graph/badge.svg)](https://codecov.io/gh/Depado/bfchroma)
[![License](https://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/Depado/bfchroma/blob/master/LICENSE)
[![Godoc](https://godoc.org/github.com/Depado/bfchroma?status.svg)](https://godoc.org/github.com/Depado/bfchroma)
[![Sourcegraph](https://sourcegraph.com/github.com/Depado/bfchroma/-/badge.svg)](https://sourcegraph.com/github.com/Depado/bfchroma?badge)
[![Say Thanks!](https://img.shields.io/badge/Say%20Thanks-!-1EAEDB.svg)](https://saythanks.io/to/Depado)
Integrating [Chroma](https://github.com/alecthomas/chroma) syntax highlighter as
a [Blackfriday](https://github.com/russross/blackfriday) renderer.
## Install and prerequisites
This project requires and uses the `v2` version of
[Blackfriday](https://github.com/russross/blackfriday/tree/v2).
```
$ go get github.com/Depado/bfchroma
```
## Features
This renderer integrates chroma to highlight code with triple backtick notation.
It will try to use the given language when available otherwise it will try to
detect the language. If none of these two method works it will fallback to sane
defaults.
## Usage
bfchroma uses the functional options approach so you can customize the behavior
of the renderer. It uses sane defaults when no option is passed so you can use
the renderer simply by doing so :
```go
html := bf.Run([]byte(md), bf.WithRenderer(bfchroma.NewRenderer()))
```
### Options
- `Style(s string)`
Define the style used by chroma for the rendering. The full list can be found [here](https://github.com/alecthomas/chroma/tree/master/styles)
- `ChromaStyle(*chroma.Style)`
This option can be used to passe directly a `*chroma.Style` instead of the
string representing the style as with the `Style(string)` option.
- `WithoutAutodetect()`
By default when no language information is written in the code block, this
renderer will try to auto-detect the used language. This option disables
this behavior and will fallback to a sane default when no language
information is available.
- `EmbedCSS()`
This option will embed CSS needed for chroma's `html.WithClasses()` at the beginning of blackfriday document.
CSS can also be extracted separately by calling `Renderer`'s.`ChromaCSS(w)` method, which will return styleshet for currently set style
- `Extend(bf.Renderer)`
This option allows to define the base blackfriday that will be extended.
- `ChromaOptions(...html.Option)`
This option allows you to pass Chroma's html options in the renderer. Such
options can be found [here](https://github.com/alecthomas/chroma#the-html-formatter).
### Option examples
Disabling language auto-detection and displaying line numbers
```go
r := bfchroma.NewRenderer(
bfchroma.WithoutAutodetect(),
bfchroma.ChromaOptions(html.WithLineNumbers()),
)
```
Extend a blackfriday renderer
```go
b := bf.NewHTMLRenderer(bf.HTMLRendererParameters{
Flags: bf.CommonHTMLFlags,
})
r := bfchroma.NewRenderer(bfchroma.Extend(b))
```
Use a different style
```go
r := bfchroma.NewRenderer(bfchroma.Style("dracula"))
// Or
r = bfchroma.NewRenderer(bfchroma.ChromaStyle(styles.Dracula))
```
## Examples
```go
package main
import (
"fmt"
"github.com/Depado/bfchroma"
bf "github.com/russross/blackfriday/v2"
)
var md = "This is some sample code.\n\n```go\n" +
`func main() {
fmt.Println("Hi")
}
` + "```"
func main() {
html := bf.Run([]byte(md), bf.WithRenderer(bfchroma.NewRenderer()))
fmt.Println(string(html))
}
```
Will output :
```html
<p>This is some sample code.</p>
<pre style="color:#f8f8f2;background-color:#272822"><span style="color:#66d9ef">func</span> <span style="color:#a6e22e">main</span>() {
<span style="color:#a6e22e">fmt</span>.<span style="color:#a6e22e">Println</span>(<span style="color:#e6db74">&#34;Hi&#34;</span>)
}
</pre>
```
## Real-life example
In [smallblog](https://github.com/Depado/smallblog) I'm using bfchroma to render
my articles. It's using a combination of both bfchroma's options and blackfriday
extensions and flags.
```go
package main
import (
"github.com/Depado/bfchroma"
"github.com/alecthomas/chroma/formatters/html"
bf "github.com/russross/blackfriday/v2"
)
// Defines the extensions that are used
var exts = bf.NoIntraEmphasis | bf.Tables | bf.FencedCode | bf.Autolink |
bf.Strikethrough | bf.SpaceHeadings | bf.BackslashLineBreak |
bf.DefinitionLists | bf.Footnotes
// Defines the HTML rendering flags that are used
var flags = bf.UseXHTML | bf.Smartypants | bf.SmartypantsFractions |
bf.SmartypantsDashes | bf.SmartypantsLatexDashes | bf.TOC
// render will take a []byte input and will render it using a new renderer each
// time because reusing the same can mess with TOC and header IDs
func render(input []byte) []byte {
return bf.Run(
input,
bf.WithRenderer(
bfchroma.NewRenderer(
bfchroma.WithoutAutodetect(),
bfchroma.ChromaOptions(
html.WithLineNumbers(),
),
bfchroma.Extend(
bf.NewHTMLRenderer(bf.HTMLRendererParameters{
Flags: flags,
}),
),
),
),
bf.WithExtensions(exts),
)
}
```
## Classes
If you have loads of code in your markdown, you might want to consider using
`html.WithClasses()` in your `bfchroma.ChromaOptions()`. The CSS of the style
you chose can then be accessed like this :
```go
r := bfchroma.NewRenderer(
bfchroma.WithoutAutodetect(),
bfchroma.Extend(
bf.NewHTMLRenderer(bf.HTMLRendererParameters{Flags: flags}),
),
bfchroma.Style("monokai"),
bfchroma.ChromaOptions(html.WithClasses()),
)
var css template.CSS
b := new(bytes.Buffer)
if err := r.ChromaCSS(b); err != nil {
logrus.WithError(err).Warning("Couldn't write CSS")
}
css = template.CSS(b.String())
bf.Run(input, bf.WithRenderer(r), bf.WithExtensions(exts))
```
This way, you can pass your `css` var to any template and render it along the
rendered markdown.

147
vendor/github.com/Depado/bfchroma/v2/renderer.go generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,147 @@
// Package bfchroma provides an easy and extensible blackfriday renderer that
// uses the chroma syntax highlighter to render code blocks.
package bfchroma
import (
"io"
"github.com/alecthomas/chroma/v2"
"github.com/alecthomas/chroma/v2/formatters/html"
"github.com/alecthomas/chroma/v2/lexers"
"github.com/alecthomas/chroma/v2/styles"
bf "github.com/russross/blackfriday/v2"
)
// Option defines the functional option type
type Option func(r *Renderer)
// Style is a function option allowing to set the style used by chroma
// Default : "monokai"
func Style(s string) Option {
return func(r *Renderer) {
r.Style = styles.Get(s)
}
}
// ChromaStyle is an option to directly set the style of the renderer using a
// chroma style instead of a string
func ChromaStyle(s *chroma.Style) Option {
return func(r *Renderer) {
r.Style = s
}
}
// WithoutAutodetect disables chroma's language detection when no codeblock
// extra information is given. It will fallback to a sane default instead of
// trying to detect the language.
func WithoutAutodetect() Option {
return func(r *Renderer) {
r.Autodetect = false
}
}
// EmbedCSS will embed CSS needed for html.WithClasses() in beginning of the document
func EmbedCSS() Option {
return func(r *Renderer) {
r.embedCSS = true
}
}
// ChromaOptions allows to pass Chroma html.Option such as Standalone()
// WithClasses(), ClassPrefix(prefix)...
func ChromaOptions(options ...html.Option) Option {
return func(r *Renderer) {
r.ChromaOptions = options
}
}
// Extend allows to specify the blackfriday renderer which is extended
func Extend(br bf.Renderer) Option {
return func(r *Renderer) {
r.Base = br
}
}
// NewRenderer will return a new bfchroma renderer with sane defaults
func NewRenderer(options ...Option) *Renderer {
r := &Renderer{
Base: bf.NewHTMLRenderer(bf.HTMLRendererParameters{
Flags: bf.CommonHTMLFlags,
}),
Style: styles.Monokai,
Autodetect: true,
}
for _, option := range options {
option(r)
}
r.Formatter = html.New(r.ChromaOptions...)
return r
}
// RenderWithChroma will render the given text to the w io.Writer
func (r *Renderer) RenderWithChroma(w io.Writer, text []byte, data bf.CodeBlockData) error {
var lexer chroma.Lexer
// Determining the lexer to use
if len(data.Info) > 0 {
lexer = lexers.Get(string(data.Info))
} else if r.Autodetect {
lexer = lexers.Analyse(string(text))
}
if lexer == nil {
lexer = lexers.Fallback
}
// Tokenize the code
iterator, err := lexer.Tokenise(nil, string(text))
if err != nil {
return err
}
return r.Formatter.Format(w, r.Style, iterator)
}
// Renderer is a custom Blackfriday renderer that uses the capabilities of
// chroma to highlight code with triple backtick notation
type Renderer struct {
Base bf.Renderer
Autodetect bool
ChromaOptions []html.Option
Style *chroma.Style
Formatter *html.Formatter
embedCSS bool
}
// RenderNode satisfies the Renderer interface
func (r *Renderer) RenderNode(w io.Writer, node *bf.Node, entering bool) bf.WalkStatus {
switch node.Type {
case bf.Document:
if entering && r.embedCSS {
w.Write([]byte("<style>")) // nolint: errcheck
r.Formatter.WriteCSS(w, r.Style) // nolint: errcheck
w.Write([]byte("</style>")) // nolint: errcheck
}
return r.Base.RenderNode(w, node, entering)
case bf.CodeBlock:
if err := r.RenderWithChroma(w, node.Literal, node.CodeBlockData); err != nil {
return r.Base.RenderNode(w, node, entering)
}
return bf.SkipChildren
default:
return r.Base.RenderNode(w, node, entering)
}
}
// RenderHeader satisfies the Renderer interface
func (r *Renderer) RenderHeader(w io.Writer, ast *bf.Node) {
r.Base.RenderHeader(w, ast)
}
// RenderFooter satisfies the Renderer interface
func (r *Renderer) RenderFooter(w io.Writer, ast *bf.Node) {
r.Base.RenderFooter(w, ast)
}
// ChromaCSS returns CSS used with chroma's html.WithClasses() option
func (r *Renderer) ChromaCSS(w io.Writer) error {
return r.Formatter.WriteCSS(w, r.Style)
}

19
vendor/github.com/alecthomas/chroma/v2/.gitignore generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,19 @@
# Binaries for programs and plugins
*.exe
*.dll
*.so
*.dylib
/cmd/chroma/chroma
# Test binary, build with `go test -c`
*.test
# Output of the go coverage tool, specifically when used with LiteIDE
*.out
# Project-local glide cache, RE: https://github.com/Masterminds/glide/issues/736
.glide/
_models/
_examples/

78
vendor/github.com/alecthomas/chroma/v2/.golangci.yml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,78 @@
run:
tests: true
skip-dirs:
- _examples
output:
print-issued-lines: false
linters:
enable-all: true
disable:
- maligned
- megacheck
- lll
- gocyclo
- dupl
- gochecknoglobals
- funlen
- godox
- wsl
- gomnd
- gocognit
- goerr113
- nolintlint
- testpackage
- godot
- nestif
- paralleltest
- nlreturn
- cyclop
- exhaustivestruct
- gci
- gofumpt
- errorlint
- exhaustive
- ifshort
- wrapcheck
- stylecheck
- thelper
linters-settings:
govet:
check-shadowing: true
gocyclo:
min-complexity: 10
dupl:
threshold: 100
goconst:
min-len: 8
min-occurrences: 3
forbidigo:
#forbid:
# - (Must)?NewLexer$
exclude_godoc_examples: false
issues:
max-per-linter: 0
max-same: 0
exclude-use-default: false
exclude:
# Captured by errcheck.
- '^(G104|G204):'
# Very commonly not checked.
- 'Error return value of .(.*\.Help|.*\.MarkFlagRequired|(os\.)?std(out|err)\..*|.*Close|.*Flush|os\.Remove(All)?|.*printf?|os\.(Un)?Setenv). is not checked'
- 'exported method (.*\.MarshalJSON|.*\.UnmarshalJSON|.*\.EntityURN|.*\.GoString|.*\.Pos) should have comment or be unexported'
- 'composite literal uses unkeyed fields'
- 'declaration of "err" shadows declaration'
- 'should not use dot imports'
- 'Potential file inclusion via variable'
- 'should have comment or be unexported'
- 'comment on exported var .* should be of the form'
- 'at least one file in a package should have a package comment'
- 'string literal contains the Unicode'
- 'methods on the same type should have the same receiver name'
- '_TokenType_name should be _TokenTypeName'
- '`_TokenType_map` should be `_TokenTypeMap`'
- 'rewrite if-else to switch statement'

37
vendor/github.com/alecthomas/chroma/v2/.goreleaser.yml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,37 @@
project_name: chroma
release:
github:
owner: alecthomas
name: chroma
brews:
-
install: bin.install "chroma"
env:
- CGO_ENABLED=0
builds:
- goos:
- linux
- darwin
- windows
goarch:
- arm64
- amd64
- "386"
goarm:
- "6"
dir: ./cmd/chroma
main: .
ldflags: -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.Date}}
binary: chroma
archives:
-
format: tar.gz
name_template: '{{ .Binary }}-{{ .Version }}-{{ .Os }}-{{ .Arch }}{{ if .Arm }}v{{
.Arm }}{{ end }}'
files:
- COPYING
- README*
snapshot:
name_template: SNAPSHOT-{{ .Commit }}
checksum:
name_template: '{{ .ProjectName }}-{{ .Version }}-checksums.txt'

19
vendor/github.com/alecthomas/chroma/v2/COPYING generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,19 @@
Copyright (C) 2017 Alec Thomas
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

19
vendor/github.com/alecthomas/chroma/v2/Makefile generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,19 @@
.PHONY: chromad upload all
VERSION ?= $(shell git describe --tags --dirty --always)
all: README.md tokentype_string.go
README.md: lexers/*/*.go
./table.py
tokentype_string.go: types.go
go generate
chromad:
rm -f chromad
(export CGOENABLED=0 GOOS=linux GOARCH=amd64; cd ./cmd/chromad && go build -ldflags="-X 'main.version=$(VERSION)'" -o ../../chromad .)
upload: chromad
scp chromad root@swapoff.org: && \
ssh root@swapoff.org 'install -m755 ./chromad /srv/http/swapoff.org/bin && service chromad restart'

285
vendor/github.com/alecthomas/chroma/v2/README.md generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,285 @@
# Chroma — A general purpose syntax highlighter in pure Go
[![Golang Documentation](https://godoc.org/github.com/alecthomas/chroma?status.svg)](https://godoc.org/github.com/alecthomas/chroma) [![CI](https://github.com/alecthomas/chroma/actions/workflows/ci.yml/badge.svg)](https://github.com/alecthomas/chroma/actions/workflows/ci.yml) [![Slack chat](https://img.shields.io/static/v1?logo=slack&style=flat&label=slack&color=green&message=gophers)](https://invite.slack.golangbridge.org/)
> **NOTE:** As Chroma has just been released, its API is still in flux. That said, the high-level interface should not change significantly.
Chroma takes source code and other structured text and converts it into syntax
highlighted HTML, ANSI-coloured text, etc.
Chroma is based heavily on [Pygments](http://pygments.org/), and includes
translators for Pygments lexers and styles.
<a id="markdown-table-of-contents" name="table-of-contents"></a>
## Table of Contents
<!-- TOC -->
1. [Table of Contents](#table-of-contents)
2. [Supported languages](#supported-languages)
3. [Try it](#try-it)
4. [Using the library](#using-the-library)
1. [Quick start](#quick-start)
2. [Identifying the language](#identifying-the-language)
3. [Formatting the output](#formatting-the-output)
4. [The HTML formatter](#the-html-formatter)
5. [More detail](#more-detail)
1. [Lexers](#lexers)
2. [Formatters](#formatters)
3. [Styles](#styles)
6. [Command-line interface](#command-line-interface)
7. [What's missing compared to Pygments?](#whats-missing-compared-to-pygments)
<!-- /TOC -->
<a id="markdown-supported-languages" name="supported-languages"></a>
## Supported languages
Prefix | Language
:----: | --------
A | ABAP, ABNF, ActionScript, ActionScript 3, Ada, Angular2, ANTLR, ApacheConf, APL, AppleScript, Arduino, Awk
B | Ballerina, Base Makefile, Bash, Batchfile, BibTeX, Bicep, BlitzBasic, BNF, Brainfuck
C | C, C#, C++, Caddyfile, Caddyfile Directives, Cap'n Proto, Cassandra CQL, Ceylon, CFEngine3, cfstatement, ChaiScript, Chapel, Cheetah, Clojure, CMake, COBOL, CoffeeScript, Common Lisp, Coq, Crystal, CSS, Cython
D | D, Dart, Diff, Django/Jinja, Docker, DTD, Dylan
E | EBNF, Elixir, Elm, EmacsLisp, Erlang
F | Factor, Fish, Forth, Fortran, FSharp
G | GAS, GDScript, Genshi, Genshi HTML, Genshi Text, Gherkin, GLSL, Gnuplot, Go, Go HTML Template, Go Text Template, GraphQL, Groff, Groovy
H | Handlebars, Haskell, Haxe, HCL, Hexdump, HLB, HTML, HTTP, Hy
I | Idris, Igor, INI, Io
J | J, Java, JavaScript, JSON, Julia, Jungle
K | Kotlin
L | Lighttpd configuration file, LLVM, Lua
M | Mako, markdown, Mason, Mathematica, Matlab, MiniZinc, MLIR, Modula-2, MonkeyC, MorrowindScript, Myghty, MySQL
N | NASM, Newspeak, Nginx configuration file, Nim, Nix
O | Objective-C, OCaml, Octave, OnesEnterprise, OpenEdge ABL, OpenSCAD, Org Mode
P | PacmanConf, Perl, PHP, PHTML, Pig, PkgConfig, PL/pgSQL, plaintext, Pony, PostgreSQL SQL dialect, PostScript, POVRay, PowerShell, Prolog, PromQL, Properties, Protocol Buffer, Puppet, Python 2, Python
Q | QBasic
R | R, Racket, Ragel, Raku, react, ReasonML, reg, reStructuredText, Rexx, Ruby, Rust
S | SAS, Sass, Scala, Scheme, Scilab, SCSS, Smalltalk, Smarty, Snobol, Solidity, SPARQL, SQL, SquidConf, Standard ML, Stylus, Svelte, Swift, SYSTEMD, systemverilog
T | TableGen, TASM, Tcl, Tcsh, Termcap, Terminfo, Terraform, TeX, Thrift, TOML, TradingView, Transact-SQL, Turing, Turtle, Twig, TypeScript, TypoScript, TypoScriptCssData, TypoScriptHtmlData
V | VB.net, verilog, VHDL, VimL, vue
W | WDTE
X | XML, Xorg
Y | YAML, YANG
Z | Zig
_I will attempt to keep this section up to date, but an authoritative list can be
displayed with `chroma --list`._
<a id="markdown-try-it" name="try-it"></a>
## Try it
Try out various languages and styles on the [Chroma Playground](https://swapoff.org/chroma/playground/).
<a id="markdown-using-the-library" name="using-the-library"></a>
## Using the library
Chroma, like Pygments, has the concepts of
[lexers](https://github.com/alecthomas/chroma/tree/master/lexers),
[formatters](https://github.com/alecthomas/chroma/tree/master/formatters) and
[styles](https://github.com/alecthomas/chroma/tree/master/styles).
Lexers convert source text into a stream of tokens, styles specify how token
types are mapped to colours, and formatters convert tokens and styles into
formatted output.
A package exists for each of these, containing a global `Registry` variable
with all of the registered implementations. There are also helper functions
for using the registry in each package, such as looking up lexers by name or
matching filenames, etc.
In all cases, if a lexer, formatter or style can not be determined, `nil` will
be returned. In this situation you may want to default to the `Fallback`
value in each respective package, which provides sane defaults.
<a id="markdown-quick-start" name="quick-start"></a>
### Quick start
A convenience function exists that can be used to simply format some source
text, without any effort:
```go
err := quick.Highlight(os.Stdout, someSourceCode, "go", "html", "monokai")
```
<a id="markdown-identifying-the-language" name="identifying-the-language"></a>
### Identifying the language
To highlight code, you'll first have to identify what language the code is
written in. There are three primary ways to do that:
1. Detect the language from its filename.
```go
lexer := lexers.Match("foo.go")
```
3. Explicitly specify the language by its Chroma syntax ID (a full list is available from `lexers.Names()`).
```go
lexer := lexers.Get("go")
```
3. Detect the language from its content.
```go
lexer := lexers.Analyse("package main\n\nfunc main()\n{\n}\n")
```
In all cases, `nil` will be returned if the language can not be identified.
```go
if lexer == nil {
lexer = lexers.Fallback
}
```
At this point, it should be noted that some lexers can be extremely chatty. To
mitigate this, you can use the coalescing lexer to coalesce runs of identical
token types into a single token:
```go
lexer = chroma.Coalesce(lexer)
```
<a id="markdown-formatting-the-output" name="formatting-the-output"></a>
### Formatting the output
Once a language is identified you will need to pick a formatter and a style (theme).
```go
style := styles.Get("swapoff")
if style == nil {
style = styles.Fallback
}
formatter := formatters.Get("html")
if formatter == nil {
formatter = formatters.Fallback
}
```
Then obtain an iterator over the tokens:
```go
contents, err := ioutil.ReadAll(r)
iterator, err := lexer.Tokenise(nil, string(contents))
```
And finally, format the tokens from the iterator:
```go
err := formatter.Format(w, style, iterator)
```
<a id="markdown-the-html-formatter" name="the-html-formatter"></a>
### The HTML formatter
By default the `html` registered formatter generates standalone HTML with
embedded CSS. More flexibility is available through the `formatters/html` package.
Firstly, the output generated by the formatter can be customised with the
following constructor options:
- `Standalone()` - generate standalone HTML with embedded CSS.
- `WithClasses()` - use classes rather than inlined style attributes.
- `ClassPrefix(prefix)` - prefix each generated CSS class.
- `TabWidth(width)` - Set the rendered tab width, in characters.
- `WithLineNumbers()` - Render line numbers (style with `LineNumbers`).
- `LinkableLineNumbers()` - Make the line numbers linkable and be a link to themselves.
- `HighlightLines(ranges)` - Highlight lines in these ranges (style with `LineHighlight`).
- `LineNumbersInTable()` - Use a table for formatting line numbers and code, rather than spans.
If `WithClasses()` is used, the corresponding CSS can be obtained from the formatter with:
```go
formatter := html.New(html.WithClasses(true))
err := formatter.WriteCSS(w, style)
```
<a id="markdown-more-detail" name="more-detail"></a>
## More detail
<a id="markdown-lexers" name="lexers"></a>
### Lexers
See the [Pygments documentation](http://pygments.org/docs/lexerdevelopment/)
for details on implementing lexers. Most concepts apply directly to Chroma,
but see existing lexer implementations for real examples.
In many cases lexers can be automatically converted directly from Pygments by
using the included Python 3 script `pygments2chroma.py`. I use something like
the following:
```sh
python3 _tools/pygments2chroma.py \
pygments.lexers.jvm.KotlinLexer \
> lexers/k/kotlin.go \
&& gofmt -s -w lexers/k/kotlin.go
```
See notes in [pygments-lexers.txt](https://github.com/alecthomas/chroma/blob/master/pygments-lexers.txt)
for a list of lexers, and notes on some of the issues importing them.
<a id="markdown-formatters" name="formatters"></a>
### Formatters
Chroma supports HTML output, as well as terminal output in 8 colour, 256 colour, and true-colour.
A `noop` formatter is included that outputs the token text only, and a `tokens`
formatter outputs raw tokens. The latter is useful for debugging lexers.
<a id="markdown-styles" name="styles"></a>
### Styles
Chroma styles use the [same syntax](http://pygments.org/docs/styles/) as Pygments.
All Pygments styles have been converted to Chroma using the `_tools/style.py` script.
When you work with one of [Chroma's styles](https://github.com/alecthomas/chroma/tree/master/styles), know that the `chroma.Background` token type provides the default style for tokens. It does so by defining a foreground color and background color.
For example, this gives each token name not defined in the style a default color of `#f8f8f8` and uses `#000000` for the highlighted code block's background:
~~~go
chroma.Background: "#f8f8f2 bg:#000000",
~~~
Also, token types in a style file are hierarchical. For instance, when `CommentSpecial` is not defined, Chroma uses the token style from `Comment`. So when several comment tokens use the same color, you'll only need to define `Comment` and override the one that has a different color.
For a quick overview of the available styles and how they look, check out the [Chroma Style Gallery](https://xyproto.github.io/splash/docs/).
<a id="markdown-command-line-interface" name="command-line-interface"></a>
## Command-line interface
A command-line interface to Chroma is included.
Binaries are available to install from [the releases page](https://github.com/alecthomas/chroma/releases).
The CLI can be used as a preprocessor to colorise output of `less(1)`,
see documentation for the `LESSOPEN` environment variable.
The `--fail` flag can be used to suppress output and return with exit status
1 to facilitate falling back to some other preprocessor in case chroma
does not resolve a specific lexer to use for the given file. For example:
```shell
export LESSOPEN='| p() { chroma --fail "$1" || cat "$1"; }; p "%s"'
```
Replace `cat` with your favourite fallback preprocessor.
When invoked as `.lessfilter`, the `--fail` flag is automatically turned
on under the hood for easy integration with [lesspipe shipping with
Debian and derivatives](https://manpages.debian.org/lesspipe#USER_DEFINED_FILTERS);
for that setup the `chroma` executable can be just symlinked to `~/.lessfilter`.
<a id="markdown-whats-missing-compared-to-pygments" name="whats-missing-compared-to-pygments"></a>
## What's missing compared to Pygments?
- Quite a few lexers, for various reasons (pull-requests welcome):
- Pygments lexers for complex languages often include custom code to
handle certain aspects, such as Raku's ability to nest code inside
regular expressions. These require time and effort to convert.
- I mostly only converted languages I had heard of, to reduce the porting cost.
- Some more esoteric features of Pygments are omitted for simplicity.
- Though the Chroma API supports content detection, very few languages support them.
I have plans to implement a statistical analyser at some point, but not enough time.

35
vendor/github.com/alecthomas/chroma/v2/coalesce.go generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,35 @@
package chroma
// Coalesce is a Lexer interceptor that collapses runs of common types into a single token.
func Coalesce(lexer Lexer) Lexer { return &coalescer{lexer} }
type coalescer struct{ Lexer }
func (d *coalescer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) {
var prev Token
it, err := d.Lexer.Tokenise(options, text)
if err != nil {
return nil, err
}
return func() Token {
for token := it(); token != (EOF); token = it() {
if len(token.Value) == 0 {
continue
}
if prev == EOF {
prev = token
} else {
if prev.Type == token.Type && len(prev.Value) < 8192 {
prev.Value += token.Value
} else {
out := prev
prev = token
return out
}
}
}
out := prev
prev = EOF
return out
}, nil
}

192
vendor/github.com/alecthomas/chroma/v2/colour.go generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,192 @@
package chroma
import (
"fmt"
"math"
"strconv"
"strings"
)
// ANSI2RGB maps ANSI colour names, as supported by Chroma, to hex RGB values.
var ANSI2RGB = map[string]string{
"#ansiblack": "000000",
"#ansidarkred": "7f0000",
"#ansidarkgreen": "007f00",
"#ansibrown": "7f7fe0",
"#ansidarkblue": "00007f",
"#ansipurple": "7f007f",
"#ansiteal": "007f7f",
"#ansilightgray": "e5e5e5",
// Normal
"#ansidarkgray": "555555",
"#ansired": "ff0000",
"#ansigreen": "00ff00",
"#ansiyellow": "ffff00",
"#ansiblue": "0000ff",
"#ansifuchsia": "ff00ff",
"#ansiturquoise": "00ffff",
"#ansiwhite": "ffffff",
// Aliases without the "ansi" prefix, because...why?
"#black": "000000",
"#darkred": "7f0000",
"#darkgreen": "007f00",
"#brown": "7f7fe0",
"#darkblue": "00007f",
"#purple": "7f007f",
"#teal": "007f7f",
"#lightgray": "e5e5e5",
// Normal
"#darkgray": "555555",
"#red": "ff0000",
"#green": "00ff00",
"#yellow": "ffff00",
"#blue": "0000ff",
"#fuchsia": "ff00ff",
"#turquoise": "00ffff",
"#white": "ffffff",
}
// Colour represents an RGB colour.
type Colour int32
// NewColour creates a Colour directly from RGB values.
func NewColour(r, g, b uint8) Colour {
return ParseColour(fmt.Sprintf("%02x%02x%02x", r, g, b))
}
// Distance between this colour and another.
//
// This uses the approach described here (https://www.compuphase.com/cmetric.htm).
// This is not as accurate as LAB, et. al. but is *vastly* simpler and sufficient for our needs.
func (c Colour) Distance(e2 Colour) float64 {
ar, ag, ab := int64(c.Red()), int64(c.Green()), int64(c.Blue())
br, bg, bb := int64(e2.Red()), int64(e2.Green()), int64(e2.Blue())
rmean := (ar + br) / 2
r := ar - br
g := ag - bg
b := ab - bb
return math.Sqrt(float64((((512 + rmean) * r * r) >> 8) + 4*g*g + (((767 - rmean) * b * b) >> 8)))
}
// Brighten returns a copy of this colour with its brightness adjusted.
//
// If factor is negative, the colour is darkened.
//
// Uses approach described here (http://www.pvladov.com/2012/09/make-color-lighter-or-darker.html).
func (c Colour) Brighten(factor float64) Colour {
r := float64(c.Red())
g := float64(c.Green())
b := float64(c.Blue())
if factor < 0 {
factor++
r *= factor
g *= factor
b *= factor
} else {
r = (255-r)*factor + r
g = (255-g)*factor + g
b = (255-b)*factor + b
}
return NewColour(uint8(r), uint8(g), uint8(b))
}
// BrightenOrDarken brightens a colour if it is < 0.5 brightness or darkens if > 0.5 brightness.
func (c Colour) BrightenOrDarken(factor float64) Colour {
if c.Brightness() < 0.5 {
return c.Brighten(factor)
}
return c.Brighten(-factor)
}
// ClampBrightness returns a copy of this colour with its brightness adjusted such that
// it falls within the range [min, max] (or very close to it due to rounding errors).
// The supplied values use the same [0.0, 1.0] range as Brightness.
func (c Colour) ClampBrightness(min, max float64) Colour {
if !c.IsSet() {
return c
}
min = math.Max(min, 0)
max = math.Min(max, 1)
current := c.Brightness()
target := math.Min(math.Max(current, min), max)
if current == target {
return c
}
r := float64(c.Red())
g := float64(c.Green())
b := float64(c.Blue())
rgb := r + g + b
if target > current {
// Solve for x: target == ((255-r)*x + r + (255-g)*x + g + (255-b)*x + b) / 255 / 3
return c.Brighten((target*255*3 - rgb) / (255*3 - rgb))
}
// Solve for x: target == (r*(x+1) + g*(x+1) + b*(x+1)) / 255 / 3
return c.Brighten((target*255*3)/rgb - 1)
}
// Brightness of the colour (roughly) in the range 0.0 to 1.0.
func (c Colour) Brightness() float64 {
return (float64(c.Red()) + float64(c.Green()) + float64(c.Blue())) / 255.0 / 3.0
}
// ParseColour in the forms #rgb, #rrggbb, #ansi<colour>, or #<colour>.
// Will return an "unset" colour if invalid.
func ParseColour(colour string) Colour {
colour = normaliseColour(colour)
n, err := strconv.ParseUint(colour, 16, 32)
if err != nil {
return 0
}
return Colour(n + 1)
}
// MustParseColour is like ParseColour except it panics if the colour is invalid.
//
// Will panic if colour is in an invalid format.
func MustParseColour(colour string) Colour {
parsed := ParseColour(colour)
if !parsed.IsSet() {
panic(fmt.Errorf("invalid colour %q", colour))
}
return parsed
}
// IsSet returns true if the colour is set.
func (c Colour) IsSet() bool { return c != 0 }
func (c Colour) String() string { return fmt.Sprintf("#%06x", int(c-1)) }
func (c Colour) GoString() string { return fmt.Sprintf("Colour(0x%06x)", int(c-1)) }
// Red component of colour.
func (c Colour) Red() uint8 { return uint8(((c - 1) >> 16) & 0xff) }
// Green component of colour.
func (c Colour) Green() uint8 { return uint8(((c - 1) >> 8) & 0xff) }
// Blue component of colour.
func (c Colour) Blue() uint8 { return uint8((c - 1) & 0xff) }
// Colours is an orderable set of colours.
type Colours []Colour
func (c Colours) Len() int { return len(c) }
func (c Colours) Swap(i, j int) { c[i], c[j] = c[j], c[i] }
func (c Colours) Less(i, j int) bool { return c[i] < c[j] }
// Convert colours to #rrggbb.
func normaliseColour(colour string) string {
if ansi, ok := ANSI2RGB[colour]; ok {
return ansi
}
if strings.HasPrefix(colour, "#") {
colour = colour[1:]
if len(colour) == 3 {
return colour[0:1] + colour[0:1] + colour[1:2] + colour[1:2] + colour[2:3] + colour[2:3]
}
}
return colour
}

152
vendor/github.com/alecthomas/chroma/v2/delegate.go generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,152 @@
package chroma
import (
"bytes"
)
type delegatingLexer struct {
root Lexer
language Lexer
}
// DelegatingLexer combines two lexers to handle the common case of a language embedded inside another, such as PHP
// inside HTML or PHP inside plain text.
//
// It takes two lexer as arguments: a root lexer and a language lexer. First everything is scanned using the language
// lexer, which must return "Other" for unrecognised tokens. Then all "Other" tokens are lexed using the root lexer.
// Finally, these two sets of tokens are merged.
//
// The lexers from the template lexer package use this base lexer.
func DelegatingLexer(root Lexer, language Lexer) Lexer {
return &delegatingLexer{
root: root,
language: language,
}
}
func (d *delegatingLexer) AnalyseText(text string) float32 {
return d.root.AnalyseText(text)
}
func (d *delegatingLexer) SetAnalyser(analyser func(text string) float32) Lexer {
d.root.SetAnalyser(analyser)
return d
}
func (d *delegatingLexer) SetRegistry(r *LexerRegistry) Lexer {
d.root.SetRegistry(r)
d.language.SetRegistry(r)
return d
}
func (d *delegatingLexer) Config() *Config {
return d.language.Config()
}
// An insertion is the character range where language tokens should be inserted.
type insertion struct {
start, end int
tokens []Token
}
func (d *delegatingLexer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) { // nolint: gocognit
tokens, err := Tokenise(Coalesce(d.language), options, text)
if err != nil {
return nil, err
}
// Compute insertions and gather "Other" tokens.
others := &bytes.Buffer{}
insertions := []*insertion{}
var insert *insertion
offset := 0
var last Token
for _, t := range tokens {
if t.Type == Other {
if last != EOF && insert != nil && last.Type != Other {
insert.end = offset
}
others.WriteString(t.Value)
} else {
if last == EOF || last.Type == Other {
insert = &insertion{start: offset}
insertions = append(insertions, insert)
}
insert.tokens = append(insert.tokens, t)
}
last = t
offset += len(t.Value)
}
if len(insertions) == 0 {
return d.root.Tokenise(options, text)
}
// Lex the other tokens.
rootTokens, err := Tokenise(Coalesce(d.root), options, others.String())
if err != nil {
return nil, err
}
// Interleave the two sets of tokens.
var out []Token
offset = 0 // Offset into text.
tokenIndex := 0
nextToken := func() Token {
if tokenIndex >= len(rootTokens) {
return EOF
}
t := rootTokens[tokenIndex]
tokenIndex++
return t
}
insertionIndex := 0
nextInsertion := func() *insertion {
if insertionIndex >= len(insertions) {
return nil
}
i := insertions[insertionIndex]
insertionIndex++
return i
}
t := nextToken()
i := nextInsertion()
for t != EOF || i != nil {
// fmt.Printf("%d->%d:%q %d->%d:%q\n", offset, offset+len(t.Value), t.Value, i.start, i.end, Stringify(i.tokens...))
if t == EOF || (i != nil && i.start < offset+len(t.Value)) {
var l Token
l, t = splitToken(t, i.start-offset)
if l != EOF {
out = append(out, l)
offset += len(l.Value)
}
out = append(out, i.tokens...)
offset += i.end - i.start
if t == EOF {
t = nextToken()
}
i = nextInsertion()
} else {
out = append(out, t)
offset += len(t.Value)
t = nextToken()
}
}
return Literator(out...), nil
}
func splitToken(t Token, offset int) (l Token, r Token) {
if t == EOF {
return EOF, EOF
}
if offset == 0 {
return EOF, t
}
if offset == len(t.Value) {
return t, EOF
}
l = t.Clone()
r = t.Clone()
l.Value = l.Value[:offset]
r.Value = r.Value[offset:]
return
}

7
vendor/github.com/alecthomas/chroma/v2/doc.go generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,7 @@
// Package chroma takes source code and other structured text and converts it into syntax highlighted HTML, ANSI-
// coloured text, etc.
//
// Chroma is based heavily on Pygments, and includes translators for Pygments lexers and styles.
//
// For more information, go here: https://github.com/alecthomas/chroma
package chroma

221
vendor/github.com/alecthomas/chroma/v2/emitters.go generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,221 @@
package chroma
import (
"fmt"
)
// An Emitter takes group matches and returns tokens.
type Emitter interface {
// Emit tokens for the given regex groups.
Emit(groups []string, state *LexerState) Iterator
}
// SerialisableEmitter is an Emitter that can be serialised and deserialised to/from JSON.
type SerialisableEmitter interface {
Emitter
EmitterKind() string
}
// EmitterFunc is a function that is an Emitter.
type EmitterFunc func(groups []string, state *LexerState) Iterator
// Emit tokens for groups.
func (e EmitterFunc) Emit(groups []string, state *LexerState) Iterator {
return e(groups, state)
}
type Emitters []Emitter
type byGroupsEmitter struct {
Emitters
}
// ByGroups emits a token for each matching group in the rule's regex.
func ByGroups(emitters ...Emitter) Emitter {
return &byGroupsEmitter{Emitters: emitters}
}
func (b *byGroupsEmitter) EmitterKind() string { return "bygroups" }
func (b *byGroupsEmitter) Emit(groups []string, state *LexerState) Iterator {
iterators := make([]Iterator, 0, len(groups)-1)
if len(b.Emitters) != len(groups)-1 {
iterators = append(iterators, Error.Emit(groups, state))
// panic(errors.Errorf("number of groups %q does not match number of emitters %v", groups, emitters))
} else {
for i, group := range groups[1:] {
if b.Emitters[i] != nil {
iterators = append(iterators, b.Emitters[i].Emit([]string{group}, state))
}
}
}
return Concaterator(iterators...)
}
// ByGroupNames emits a token for each named matching group in the rule's regex.
func ByGroupNames(emitters map[string]Emitter) Emitter {
return EmitterFunc(func(groups []string, state *LexerState) Iterator {
iterators := make([]Iterator, 0, len(state.NamedGroups)-1)
if len(state.NamedGroups)-1 == 0 {
if emitter, ok := emitters[`0`]; ok {
iterators = append(iterators, emitter.Emit(groups, state))
} else {
iterators = append(iterators, Error.Emit(groups, state))
}
} else {
ruleRegex := state.Rules[state.State][state.Rule].Regexp
for i := 1; i < len(state.NamedGroups); i++ {
groupName := ruleRegex.GroupNameFromNumber(i)
group := state.NamedGroups[groupName]
if emitter, ok := emitters[groupName]; ok {
if emitter != nil {
iterators = append(iterators, emitter.Emit([]string{group}, state))
}
} else {
iterators = append(iterators, Error.Emit([]string{group}, state))
}
}
}
return Concaterator(iterators...)
})
}
// UsingByGroup emits tokens for the matched groups in the regex using a
// "sublexer". Used when lexing code blocks where the name of a sublexer is
// contained within the block, for example on a Markdown text block or SQL
// language block.
//
// The sublexer will be retrieved using sublexerGetFunc (typically
// internal.Get), using the captured value from the matched sublexerNameGroup.
//
// If sublexerGetFunc returns a non-nil lexer for the captured sublexerNameGroup,
// then tokens for the matched codeGroup will be emitted using the retrieved
// lexer. Otherwise, if the sublexer is nil, then tokens will be emitted from
// the passed emitter.
//
// Example:
//
// var Markdown = internal.Register(MustNewLexer(
// &Config{
// Name: "markdown",
// Aliases: []string{"md", "mkd"},
// Filenames: []string{"*.md", "*.mkd", "*.markdown"},
// MimeTypes: []string{"text/x-markdown"},
// },
// Rules{
// "root": {
// {"^(```)(\\w+)(\\n)([\\w\\W]*?)(^```$)",
// UsingByGroup(
// internal.Get,
// 2, 4,
// String, String, String, Text, String,
// ),
// nil,
// },
// },
// },
// ))
//
// See the lexers/m/markdown.go for the complete example.
//
// Note: panic's if the number of emitters does not equal the number of matched
// groups in the regex.
func UsingByGroup(sublexerNameGroup, codeGroup int, emitters ...Emitter) Emitter {
return &usingByGroup{
SublexerNameGroup: sublexerNameGroup,
CodeGroup: codeGroup,
Emitters: emitters,
}
}
type usingByGroup struct {
SublexerNameGroup int `xml:"sublexer_name_group"`
CodeGroup int `xml:"code_group"`
Emitters Emitters `xml:"emitters"`
}
func (u *usingByGroup) EmitterKind() string { return "usingbygroup" }
func (u *usingByGroup) Emit(groups []string, state *LexerState) Iterator {
// bounds check
if len(u.Emitters) != len(groups)-1 {
panic("UsingByGroup expects number of emitters to be the same as len(groups)-1")
}
// grab sublexer
sublexer := state.Registry.Get(groups[u.SublexerNameGroup])
// build iterators
iterators := make([]Iterator, len(groups)-1)
for i, group := range groups[1:] {
if i == u.CodeGroup-1 && sublexer != nil {
var err error
iterators[i], err = sublexer.Tokenise(nil, groups[u.CodeGroup])
if err != nil {
panic(err)
}
} else if u.Emitters[i] != nil {
iterators[i] = u.Emitters[i].Emit([]string{group}, state)
}
}
return Concaterator(iterators...)
}
// UsingLexer returns an Emitter that uses a given Lexer for parsing and emitting.
//
// This Emitter is not serialisable.
func UsingLexer(lexer Lexer) Emitter {
return EmitterFunc(func(groups []string, _ *LexerState) Iterator {
it, err := lexer.Tokenise(&TokeniseOptions{State: "root", Nested: true}, groups[0])
if err != nil {
panic(err)
}
return it
})
}
type usingEmitter struct {
Lexer string `xml:"lexer,attr"`
}
func (u *usingEmitter) EmitterKind() string { return "using" }
func (u *usingEmitter) Emit(groups []string, state *LexerState) Iterator {
if state.Registry == nil {
panic(fmt.Sprintf("no LexerRegistry available for Using(%q)", u.Lexer))
}
lexer := state.Registry.Get(u.Lexer)
if lexer == nil {
panic(fmt.Sprintf("no such lexer %q", u.Lexer))
}
it, err := lexer.Tokenise(&TokeniseOptions{State: "root", Nested: true}, groups[0])
if err != nil {
panic(err)
}
return it
}
// Using returns an Emitter that uses a given Lexer reference for parsing and emitting.
//
// The referenced lexer must be stored in the same LexerRegistry.
func Using(lexer string) Emitter {
return &usingEmitter{Lexer: lexer}
}
type usingSelfEmitter struct {
State string `xml:"state,attr"`
}
func (u *usingSelfEmitter) EmitterKind() string { return "usingself" }
func (u *usingSelfEmitter) Emit(groups []string, state *LexerState) Iterator {
it, err := state.Lexer.Tokenise(&TokeniseOptions{State: u.State, Nested: true}, groups[0])
if err != nil {
panic(err)
}
return it
}
// UsingSelf is like Using, but uses the current Lexer.
func UsingSelf(stateName string) Emitter {
return &usingSelfEmitter{stateName}
}

43
vendor/github.com/alecthomas/chroma/v2/formatter.go generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,43 @@
package chroma
import (
"io"
)
// A Formatter for Chroma lexers.
type Formatter interface {
// Format returns a formatting function for tokens.
//
// If the iterator panics, the Formatter should recover.
Format(w io.Writer, style *Style, iterator Iterator) error
}
// A FormatterFunc is a Formatter implemented as a function.
//
// Guards against iterator panics.
type FormatterFunc func(w io.Writer, style *Style, iterator Iterator) error
func (f FormatterFunc) Format(w io.Writer, s *Style, it Iterator) (err error) { // nolint
defer func() {
if perr := recover(); perr != nil {
err = perr.(error)
}
}()
return f(w, s, it)
}
type recoveringFormatter struct {
Formatter
}
func (r recoveringFormatter) Format(w io.Writer, s *Style, it Iterator) (err error) {
defer func() {
if perr := recover(); perr != nil {
err = perr.(error)
}
}()
return r.Formatter.Format(w, s, it)
}
// RecoveringFormatter wraps a formatter with panic recovery.
func RecoveringFormatter(formatter Formatter) Formatter { return recoveringFormatter{formatter} }

563
vendor/github.com/alecthomas/chroma/v2/formatters/html/html.go generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,563 @@
package html
import (
"fmt"
"html"
"io"
"sort"
"strings"
"github.com/alecthomas/chroma/v2"
)
// Option sets an option of the HTML formatter.
type Option func(f *Formatter)
// Standalone configures the HTML formatter for generating a standalone HTML document.
func Standalone(b bool) Option { return func(f *Formatter) { f.standalone = b } }
// ClassPrefix sets the CSS class prefix.
func ClassPrefix(prefix string) Option { return func(f *Formatter) { f.prefix = prefix } }
// WithClasses emits HTML using CSS classes, rather than inline styles.
func WithClasses(b bool) Option { return func(f *Formatter) { f.Classes = b } }
// WithAllClasses disables an optimisation that omits redundant CSS classes.
func WithAllClasses(b bool) Option { return func(f *Formatter) { f.allClasses = b } }
// WithCustomCSS sets user's custom CSS styles.
func WithCustomCSS(css map[chroma.TokenType]string) Option {
return func(f *Formatter) {
f.customCSS = css
}
}
// TabWidth sets the number of characters for a tab. Defaults to 8.
func TabWidth(width int) Option { return func(f *Formatter) { f.tabWidth = width } }
// PreventSurroundingPre prevents the surrounding pre tags around the generated code.
func PreventSurroundingPre(b bool) Option {
return func(f *Formatter) {
f.preventSurroundingPre = b
if b {
f.preWrapper = nopPreWrapper
} else {
f.preWrapper = defaultPreWrapper
}
}
}
// InlineCode creates inline code wrapped in a code tag.
func InlineCode(b bool) Option {
return func(f *Formatter) {
f.inlineCode = b
f.preWrapper = preWrapper{
start: func(code bool, styleAttr string) string {
if code {
return fmt.Sprintf(`<code%s>`, styleAttr)
}
return ``
},
end: func(code bool) string {
if code {
return `</code>`
}
return ``
},
}
}
}
// WithPreWrapper allows control of the surrounding pre tags.
func WithPreWrapper(wrapper PreWrapper) Option {
return func(f *Formatter) {
f.preWrapper = wrapper
}
}
// WrapLongLines wraps long lines.
func WrapLongLines(b bool) Option {
return func(f *Formatter) {
f.wrapLongLines = b
}
}
// WithLineNumbers formats output with line numbers.
func WithLineNumbers(b bool) Option {
return func(f *Formatter) {
f.lineNumbers = b
}
}
// LineNumbersInTable will, when combined with WithLineNumbers, separate the line numbers
// and code in table td's, which make them copy-and-paste friendly.
func LineNumbersInTable(b bool) Option {
return func(f *Formatter) {
f.lineNumbersInTable = b
}
}
// LinkableLineNumbers decorates the line numbers HTML elements with an "id"
// attribute so they can be linked.
func LinkableLineNumbers(b bool, prefix string) Option {
return func(f *Formatter) {
f.linkableLineNumbers = b
f.lineNumbersIDPrefix = prefix
}
}
// HighlightLines higlights the given line ranges with the Highlight style.
//
// A range is the beginning and ending of a range as 1-based line numbers, inclusive.
func HighlightLines(ranges [][2]int) Option {
return func(f *Formatter) {
f.highlightRanges = ranges
sort.Sort(f.highlightRanges)
}
}
// BaseLineNumber sets the initial number to start line numbering at. Defaults to 1.
func BaseLineNumber(n int) Option {
return func(f *Formatter) {
f.baseLineNumber = n
}
}
// New HTML formatter.
func New(options ...Option) *Formatter {
f := &Formatter{
baseLineNumber: 1,
preWrapper: defaultPreWrapper,
}
for _, option := range options {
option(f)
}
return f
}
// PreWrapper defines the operations supported in WithPreWrapper.
type PreWrapper interface {
// Start is called to write a start <pre> element.
// The code flag tells whether this block surrounds
// highlighted code. This will be false when surrounding
// line numbers.
Start(code bool, styleAttr string) string
// End is called to write the end </pre> element.
End(code bool) string
}
type preWrapper struct {
start func(code bool, styleAttr string) string
end func(code bool) string
}
func (p preWrapper) Start(code bool, styleAttr string) string {
return p.start(code, styleAttr)
}
func (p preWrapper) End(code bool) string {
return p.end(code)
}
var (
nopPreWrapper = preWrapper{
start: func(code bool, styleAttr string) string { return "" },
end: func(code bool) string { return "" },
}
defaultPreWrapper = preWrapper{
start: func(code bool, styleAttr string) string {
if code {
return fmt.Sprintf(`<pre tabindex="0"%s><code>`, styleAttr)
}
return fmt.Sprintf(`<pre tabindex="0"%s>`, styleAttr)
},
end: func(code bool) string {
if code {
return `</code></pre>`
}
return `</pre>`
},
}
)
// Formatter that generates HTML.
type Formatter struct {
standalone bool
prefix string
Classes bool // Exported field to detect when classes are being used
allClasses bool
customCSS map[chroma.TokenType]string
preWrapper PreWrapper
inlineCode bool
preventSurroundingPre bool
tabWidth int
wrapLongLines bool
lineNumbers bool
lineNumbersInTable bool
linkableLineNumbers bool
lineNumbersIDPrefix string
highlightRanges highlightRanges
baseLineNumber int
}
type highlightRanges [][2]int
func (h highlightRanges) Len() int { return len(h) }
func (h highlightRanges) Swap(i, j int) { h[i], h[j] = h[j], h[i] }
func (h highlightRanges) Less(i, j int) bool { return h[i][0] < h[j][0] }
func (f *Formatter) Format(w io.Writer, style *chroma.Style, iterator chroma.Iterator) (err error) {
return f.writeHTML(w, style, iterator.Tokens())
}
// We deliberately don't use html/template here because it is two orders of magnitude slower (benchmarked).
//
// OTOH we need to be super careful about correct escaping...
func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.Token) (err error) { // nolint: gocyclo
css := f.styleToCSS(style)
if !f.Classes {
for t, style := range css {
css[t] = compressStyle(style)
}
}
if f.standalone {
fmt.Fprint(w, "<html>\n")
if f.Classes {
fmt.Fprint(w, "<style type=\"text/css\">\n")
err = f.WriteCSS(w, style)
if err != nil {
return err
}
fmt.Fprintf(w, "body { %s; }\n", css[chroma.Background])
fmt.Fprint(w, "</style>")
}
fmt.Fprintf(w, "<body%s>\n", f.styleAttr(css, chroma.Background))
}
wrapInTable := f.lineNumbers && f.lineNumbersInTable
lines := chroma.SplitTokensIntoLines(tokens)
lineDigits := len(fmt.Sprintf("%d", f.baseLineNumber+len(lines)-1))
highlightIndex := 0
if wrapInTable {
// List line numbers in its own <td>
fmt.Fprintf(w, "<div%s>\n", f.styleAttr(css, chroma.PreWrapper))
fmt.Fprintf(w, "<table%s><tr>", f.styleAttr(css, chroma.LineTable))
fmt.Fprintf(w, "<td%s>\n", f.styleAttr(css, chroma.LineTableTD))
fmt.Fprintf(w, f.preWrapper.Start(false, f.styleAttr(css, chroma.PreWrapper)))
for index := range lines {
line := f.baseLineNumber + index
highlight, next := f.shouldHighlight(highlightIndex, line)
if next {
highlightIndex++
}
if highlight {
fmt.Fprintf(w, "<span%s>", f.styleAttr(css, chroma.LineHighlight))
}
fmt.Fprintf(w, "<span%s%s>%s\n</span>", f.styleAttr(css, chroma.LineNumbersTable), f.lineIDAttribute(line), f.lineTitleWithLinkIfNeeded(lineDigits, line))
if highlight {
fmt.Fprintf(w, "</span>")
}
}
fmt.Fprint(w, f.preWrapper.End(false))
fmt.Fprint(w, "</td>\n")
fmt.Fprintf(w, "<td%s>\n", f.styleAttr(css, chroma.LineTableTD, "width:100%"))
}
fmt.Fprintf(w, f.preWrapper.Start(true, f.styleAttr(css, chroma.PreWrapper)))
highlightIndex = 0
for index, tokens := range lines {
// 1-based line number.
line := f.baseLineNumber + index
highlight, next := f.shouldHighlight(highlightIndex, line)
if next {
highlightIndex++
}
if !(f.preventSurroundingPre || f.inlineCode) {
// Start of Line
fmt.Fprint(w, `<span`)
if highlight {
// Line + LineHighlight
if f.Classes {
fmt.Fprintf(w, ` class="%s %s"`, f.class(chroma.Line), f.class(chroma.LineHighlight))
} else {
fmt.Fprintf(w, ` style="%s %s"`, css[chroma.Line], css[chroma.LineHighlight])
}
fmt.Fprint(w, `>`)
} else {
fmt.Fprintf(w, "%s>", f.styleAttr(css, chroma.Line))
}
// Line number
if f.lineNumbers && !wrapInTable {
fmt.Fprintf(w, "<span%s%s>%s</span>", f.styleAttr(css, chroma.LineNumbers), f.lineIDAttribute(line), f.lineTitleWithLinkIfNeeded(lineDigits, line))
}
fmt.Fprintf(w, `<span%s>`, f.styleAttr(css, chroma.CodeLine))
}
for _, token := range tokens {
html := html.EscapeString(token.String())
attr := f.styleAttr(css, token.Type)
if attr != "" {
html = fmt.Sprintf("<span%s>%s</span>", attr, html)
}
fmt.Fprint(w, html)
}
if !(f.preventSurroundingPre || f.inlineCode) {
fmt.Fprint(w, `</span>`) // End of CodeLine
fmt.Fprint(w, `</span>`) // End of Line
}
}
fmt.Fprintf(w, f.preWrapper.End(true))
if wrapInTable {
fmt.Fprint(w, "</td></tr></table>\n")
fmt.Fprint(w, "</div>\n")
}
if f.standalone {
fmt.Fprint(w, "\n</body>\n")
fmt.Fprint(w, "</html>\n")
}
return nil
}
func (f *Formatter) lineIDAttribute(line int) string {
if !f.linkableLineNumbers {
return ""
}
return fmt.Sprintf(" id=\"%s\"", f.lineID(line))
}
func (f *Formatter) lineTitleWithLinkIfNeeded(lineDigits, line int) string {
title := fmt.Sprintf("%*d", lineDigits, line)
if !f.linkableLineNumbers {
return title
}
return fmt.Sprintf("<a style=\"outline: none; text-decoration:none; color:inherit\" href=\"#%s\">%s</a>", f.lineID(line), title)
}
func (f *Formatter) lineID(line int) string {
return fmt.Sprintf("%s%d", f.lineNumbersIDPrefix, line)
}
func (f *Formatter) shouldHighlight(highlightIndex, line int) (bool, bool) {
next := false
for highlightIndex < len(f.highlightRanges) && line > f.highlightRanges[highlightIndex][1] {
highlightIndex++
next = true
}
if highlightIndex < len(f.highlightRanges) {
hrange := f.highlightRanges[highlightIndex]
if line >= hrange[0] && line <= hrange[1] {
return true, next
}
}
return false, next
}
func (f *Formatter) class(t chroma.TokenType) string {
for t != 0 {
if cls, ok := chroma.StandardTypes[t]; ok {
if cls != "" {
return f.prefix + cls
}
return ""
}
t = t.Parent()
}
if cls := chroma.StandardTypes[t]; cls != "" {
return f.prefix + cls
}
return ""
}
func (f *Formatter) styleAttr(styles map[chroma.TokenType]string, tt chroma.TokenType, extraCSS ...string) string {
if f.Classes {
cls := f.class(tt)
if cls == "" {
return ""
}
return fmt.Sprintf(` class="%s"`, cls)
}
if _, ok := styles[tt]; !ok {
tt = tt.SubCategory()
if _, ok := styles[tt]; !ok {
tt = tt.Category()
if _, ok := styles[tt]; !ok {
return ""
}
}
}
css := []string{styles[tt]}
css = append(css, extraCSS...)
return fmt.Sprintf(` style="%s"`, strings.Join(css, ";"))
}
func (f *Formatter) tabWidthStyle() string {
if f.tabWidth != 0 && f.tabWidth != 8 {
return fmt.Sprintf("-moz-tab-size: %[1]d; -o-tab-size: %[1]d; tab-size: %[1]d;", f.tabWidth)
}
return ""
}
// WriteCSS writes CSS style definitions (without any surrounding HTML).
func (f *Formatter) WriteCSS(w io.Writer, style *chroma.Style) error {
css := f.styleToCSS(style)
// Special-case background as it is mapped to the outer ".chroma" class.
if _, err := fmt.Fprintf(w, "/* %s */ .%sbg { %s }\n", chroma.Background, f.prefix, css[chroma.Background]); err != nil {
return err
}
// Special-case PreWrapper as it is the ".chroma" class.
if _, err := fmt.Fprintf(w, "/* %s */ .%schroma { %s }\n", chroma.PreWrapper, f.prefix, css[chroma.PreWrapper]); err != nil {
return err
}
// Special-case code column of table to expand width.
if f.lineNumbers && f.lineNumbersInTable {
if _, err := fmt.Fprintf(w, "/* %s */ .%schroma .%s:last-child { width: 100%%; }",
chroma.LineTableTD, f.prefix, f.class(chroma.LineTableTD)); err != nil {
return err
}
}
// Special-case line number highlighting when targeted.
if f.lineNumbers || f.lineNumbersInTable {
targetedLineCSS := StyleEntryToCSS(style.Get(chroma.LineHighlight))
for _, tt := range []chroma.TokenType{chroma.LineNumbers, chroma.LineNumbersTable} {
fmt.Fprintf(w, "/* %s targeted by URL anchor */ .%schroma .%s:target { %s }\n", tt, f.prefix, f.class(tt), targetedLineCSS)
}
}
tts := []int{}
for tt := range css {
tts = append(tts, int(tt))
}
sort.Ints(tts)
for _, ti := range tts {
tt := chroma.TokenType(ti)
switch tt {
case chroma.Background, chroma.PreWrapper:
continue
}
class := f.class(tt)
if class == "" {
continue
}
styles := css[tt]
if _, err := fmt.Fprintf(w, "/* %s */ .%schroma .%s { %s }\n", tt, f.prefix, class, styles); err != nil {
return err
}
}
return nil
}
func (f *Formatter) styleToCSS(style *chroma.Style) map[chroma.TokenType]string {
classes := map[chroma.TokenType]string{}
bg := style.Get(chroma.Background)
// Convert the style.
for t := range chroma.StandardTypes {
entry := style.Get(t)
if t != chroma.Background {
entry = entry.Sub(bg)
}
// Inherit from custom CSS provided by user
tokenCategory := t.Category()
tokenSubCategory := t.SubCategory()
if t != tokenCategory {
if css, ok := f.customCSS[tokenCategory]; ok {
classes[t] = css
}
}
if tokenCategory != tokenSubCategory {
if css, ok := f.customCSS[tokenSubCategory]; ok {
classes[t] += css
}
}
// Add custom CSS provided by user
if css, ok := f.customCSS[t]; ok {
classes[t] += css
}
if !f.allClasses && entry.IsZero() && classes[t] == `` {
continue
}
styleEntryCSS := StyleEntryToCSS(entry)
if styleEntryCSS != `` && classes[t] != `` {
styleEntryCSS += `;`
}
classes[t] = styleEntryCSS + classes[t]
}
classes[chroma.Background] += `;` + f.tabWidthStyle()
classes[chroma.PreWrapper] += classes[chroma.Background]
// Make PreWrapper a grid to show highlight style with full width.
if len(f.highlightRanges) > 0 && f.customCSS[chroma.PreWrapper] == `` {
classes[chroma.PreWrapper] += `display: grid;`
}
// Make PreWrapper wrap long lines.
if f.wrapLongLines {
classes[chroma.PreWrapper] += `white-space: pre-wrap; word-break: break-word;`
}
lineNumbersStyle := `white-space: pre; user-select: none; margin-right: 0.4em; padding: 0 0.4em 0 0.4em;`
// All rules begin with default rules followed by user provided rules
classes[chroma.Line] = `display: flex;` + classes[chroma.Line]
classes[chroma.LineNumbers] = lineNumbersStyle + classes[chroma.LineNumbers]
classes[chroma.LineNumbersTable] = lineNumbersStyle + classes[chroma.LineNumbersTable]
classes[chroma.LineTable] = "border-spacing: 0; padding: 0; margin: 0; border: 0;" + classes[chroma.LineTable]
classes[chroma.LineTableTD] = "vertical-align: top; padding: 0; margin: 0; border: 0;" + classes[chroma.LineTableTD]
return classes
}
// StyleEntryToCSS converts a chroma.StyleEntry to CSS attributes.
func StyleEntryToCSS(e chroma.StyleEntry) string {
styles := []string{}
if e.Colour.IsSet() {
styles = append(styles, "color: "+e.Colour.String())
}
if e.Background.IsSet() {
styles = append(styles, "background-color: "+e.Background.String())
}
if e.Bold == chroma.Yes {
styles = append(styles, "font-weight: bold")
}
if e.Italic == chroma.Yes {
styles = append(styles, "font-style: italic")
}
if e.Underline == chroma.Yes {
styles = append(styles, "text-decoration: underline")
}
return strings.Join(styles, "; ")
}
// Compress CSS attributes - remove spaces, transform 6-digit colours to 3.
func compressStyle(s string) string {
parts := strings.Split(s, ";")
out := []string{}
for _, p := range parts {
p = strings.Join(strings.Fields(p), " ")
p = strings.Replace(p, ": ", ":", 1)
if strings.Contains(p, "#") {
c := p[len(p)-6:]
if c[0] == c[1] && c[2] == c[3] && c[4] == c[5] {
p = p[:len(p)-6] + c[0:1] + c[2:3] + c[4:5]
}
}
out = append(out, p)
}
return strings.Join(out, ";")
}

76
vendor/github.com/alecthomas/chroma/v2/iterator.go generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,76 @@
package chroma
import "strings"
// An Iterator across tokens.
//
// EOF will be returned at the end of the Token stream.
//
// If an error occurs within an Iterator, it may propagate this in a panic. Formatters should recover.
type Iterator func() Token
// Tokens consumes all tokens from the iterator and returns them as a slice.
func (i Iterator) Tokens() []Token {
var out []Token
for t := i(); t != EOF; t = i() {
out = append(out, t)
}
return out
}
// Concaterator concatenates tokens from a series of iterators.
func Concaterator(iterators ...Iterator) Iterator {
return func() Token {
for len(iterators) > 0 {
t := iterators[0]()
if t != EOF {
return t
}
iterators = iterators[1:]
}
return EOF
}
}
// Literator converts a sequence of literal Tokens into an Iterator.
func Literator(tokens ...Token) Iterator {
return func() Token {
if len(tokens) == 0 {
return EOF
}
token := tokens[0]
tokens = tokens[1:]
return token
}
}
// SplitTokensIntoLines splits tokens containing newlines in two.
func SplitTokensIntoLines(tokens []Token) (out [][]Token) {
var line []Token // nolint: prealloc
for _, token := range tokens {
for strings.Contains(token.Value, "\n") {
parts := strings.SplitAfterN(token.Value, "\n", 2)
// Token becomes the tail.
token.Value = parts[1]
// Append the head to the line and flush the line.
clone := token.Clone()
clone.Value = parts[0]
line = append(line, clone)
out = append(out, line)
line = nil
}
line = append(line, token)
}
if len(line) > 0 {
out = append(out, line)
}
// Strip empty trailing token line.
if len(out) > 0 {
last := out[len(out)-1]
if len(last) == 1 && last[0].Value == "" {
out = out[:len(out)-1]
}
}
return
}

142
vendor/github.com/alecthomas/chroma/v2/lexer.go generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,142 @@
package chroma
import (
"fmt"
"strings"
)
var (
defaultOptions = &TokeniseOptions{
State: "root",
EnsureLF: true,
}
)
// Config for a lexer.
type Config struct {
// Name of the lexer.
Name string `xml:"name,omitempty"`
// Shortcuts for the lexer
Aliases []string `xml:"alias,omitempty"`
// File name globs
Filenames []string `xml:"filename,omitempty"`
// Secondary file name globs
AliasFilenames []string `xml:"alias_filename,omitempty"`
// MIME types
MimeTypes []string `xml:"mime_type,omitempty"`
// Regex matching is case-insensitive.
CaseInsensitive bool `xml:"case_insensitive,omitempty"`
// Regex matches all characters.
DotAll bool `xml:"dot_all,omitempty"`
// Regex does not match across lines ($ matches EOL).
//
// Defaults to multiline.
NotMultiline bool `xml:"not_multiline,omitempty"`
// Don't strip leading and trailing newlines from the input.
// DontStripNL bool
// Strip all leading and trailing whitespace from the input
// StripAll bool
// Make sure that the input ends with a newline. This
// is required for some lexers that consume input linewise.
EnsureNL bool `xml:"ensure_nl,omitempty"`
// If given and greater than 0, expand tabs in the input.
// TabSize int
// Priority of lexer.
//
// If this is 0 it will be treated as a default of 1.
Priority float32 `xml:"priority,omitempty"`
}
// Token output to formatter.
type Token struct {
Type TokenType `json:"type"`
Value string `json:"value"`
}
func (t *Token) String() string { return t.Value }
func (t *Token) GoString() string { return fmt.Sprintf("&Token{%s, %q}", t.Type, t.Value) }
// Clone returns a clone of the Token.
func (t *Token) Clone() Token {
return *t
}
// EOF is returned by lexers at the end of input.
var EOF Token
// TokeniseOptions contains options for tokenisers.
type TokeniseOptions struct {
// State to start tokenisation in. Defaults to "root".
State string
// Nested tokenisation.
Nested bool
// If true, all EOLs are converted into LF
// by replacing CRLF and CR
EnsureLF bool
}
// A Lexer for tokenising source code.
type Lexer interface {
// Config describing the features of the Lexer.
Config() *Config
// Tokenise returns an Iterator over tokens in text.
Tokenise(options *TokeniseOptions, text string) (Iterator, error)
// SetRegistry sets the registry this Lexer is associated with.
//
// The registry should be used by the Lexer if it needs to look up other
// lexers.
SetRegistry(registry *LexerRegistry) Lexer
// SetAnalyser sets a function the Lexer should use for scoring how
// likely a fragment of text is to match this lexer, between 0.0 and 1.0.
// A value of 1 indicates high confidence.
//
// Lexers may ignore this if they implement their own analysers.
SetAnalyser(analyser func(text string) float32) Lexer
// AnalyseText scores how likely a fragment of text is to match
// this lexer, between 0.0 and 1.0. A value of 1 indicates high confidence.
AnalyseText(text string) float32
}
// Lexers is a slice of lexers sortable by name.
type Lexers []Lexer
func (l Lexers) Len() int { return len(l) }
func (l Lexers) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
func (l Lexers) Less(i, j int) bool {
return strings.ToLower(l[i].Config().Name) < strings.ToLower(l[j].Config().Name)
}
// PrioritisedLexers is a slice of lexers sortable by priority.
type PrioritisedLexers []Lexer
func (l PrioritisedLexers) Len() int { return len(l) }
func (l PrioritisedLexers) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
func (l PrioritisedLexers) Less(i, j int) bool {
ip := l[i].Config().Priority
if ip == 0 {
ip = 1
}
jp := l[j].Config().Priority
if jp == 0 {
jp = 1
}
return ip > jp
}
// Analyser determines how appropriate this lexer is for the given text.
type Analyser interface {
AnalyseText(text string) float32
}

40
vendor/github.com/alecthomas/chroma/v2/lexers/README.md generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,40 @@
# Lexer tests
The tests in this directory feed a known input `testdata/<name>.actual` into the parser for `<name>` and check
that its output matches `<name>.exported`.
It is also possible to perform several tests on a same parser `<name>`, by placing know inputs `*.actual` into a
directory `testdata/<name>/`.
## Running the tests
Run the tests as normal:
```go
go test ./lexers
```
## Update existing tests
When you add a new test data file (`*.actual`), you need to regenerate all tests. That's how Chroma creates the `*.expected` test file based on the corresponding lexer.
To regenerate all tests, type in your terminal:
```go
RECORD=true go test ./lexers
```
This first sets the `RECORD` environment variable to `true`. Then it runs `go test` on the `./lexers` directory of the Chroma project.
(That environment variable tells Chroma it needs to output test data. After running `go test ./lexers` you can remove or reset that variable.)
### Windows users
Windows users will find that the `RECORD=true go test ./lexers` command fails in both the standard command prompt terminal and in PowerShell.
Instead we have to perform both steps separately:
- Set the `RECORD` environment variable to `true`.
+ In the regular command prompt window, the `set` command sets an environment variable for the current session: `set RECORD=true`. See [this page](https://superuser.com/questions/212150/how-to-set-env-variable-in-windows-cmd-line) for more.
+ In PowerShell, you can use the `$env:RECORD = 'true'` command for that. See [this article](https://mcpmag.com/articles/2019/03/28/environment-variables-in-powershell.aspx) for more.
+ You can also make a persistent environment variable by hand in the Windows computer settings. See [this article](https://www.computerhope.com/issues/ch000549.htm) for how.
- When the environment variable is set, run `go tests ./lexers`.
Chroma will now regenerate the test files and print its results to the console window.

17
vendor/github.com/alecthomas/chroma/v2/lexers/bash.go generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,17 @@
package lexers
import (
"regexp"
)
// TODO(moorereason): can this be factored away?
var bashAnalyserRe = regexp.MustCompile(`(?m)^#!.*/bin/(?:env |)(?:bash|zsh|sh|ksh)`)
func init() { // nolint: gochecknoinits
Get("bash").SetAnalyser(func(text string) float32 {
if bashAnalyserRe.FindString(text) != "" {
return 1.0
}
return 0.0
})
}

26
vendor/github.com/alecthomas/chroma/v2/lexers/bashsession.go generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,26 @@
package lexers
import (
. "github.com/alecthomas/chroma/v2" // nolint
)
// BashSession lexer.
var BashSession = Register(MustNewLexer(
&Config{
Name: "BashSession",
Aliases: []string{"bash-session", "console", "shell-session"},
Filenames: []string{".sh-session"},
MimeTypes: []string{"text/x-sh"},
EnsureNL: true,
},
bashsessionRules,
))
func bashsessionRules() Rules {
return Rules{
"root": {
{`^((?:\[[^]]+@[^]]+\]\s?)?[#$%>])(\s*)(.*\n?)`, ByGroups(GenericPrompt, Text, Using("Bash")), nil},
{`^.+\n?`, GenericOutput, nil},
},
}
}

215
vendor/github.com/alecthomas/chroma/v2/lexers/caddyfile.go generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,215 @@
package lexers
import (
. "github.com/alecthomas/chroma/v2" // nolint
)
// caddyfileCommon are the rules common to both of the lexer variants
func caddyfileCommonRules() Rules {
return Rules{
"site_block_common": {
// Import keyword
{`(import)(\s+)([^\s]+)`, ByGroups(Keyword, Text, NameVariableMagic), nil},
// Matcher definition
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
// Matcher token stub for docs
{`\[\<matcher\>\]`, NameDecorator, Push("matcher")},
// These cannot have matchers but may have things that look like
// matchers in their arguments, so we just parse as a subdirective.
{`try_files`, Keyword, Push("subdirective")},
// These are special, they can nest more directives
{`handle_errors|handle|route|handle_path|not`, Keyword, Push("nested_directive")},
// Any other directive
{`[^\s#]+`, Keyword, Push("directive")},
Include("base"),
},
"matcher": {
{`\{`, Punctuation, Push("block")},
// Not can be one-liner
{`not`, Keyword, Push("deep_not_matcher")},
// Any other same-line matcher
{`[^\s#]+`, Keyword, Push("arguments")},
// Terminators
{`\n`, Text, Pop(1)},
{`\}`, Punctuation, Pop(1)},
Include("base"),
},
"block": {
{`\}`, Punctuation, Pop(2)},
// Not can be one-liner
{`not`, Keyword, Push("not_matcher")},
// Any other subdirective
{`[^\s#]+`, Keyword, Push("subdirective")},
Include("base"),
},
"nested_block": {
{`\}`, Punctuation, Pop(2)},
// Matcher definition
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
// Something that starts with literally < is probably a docs stub
{`\<[^#]+\>`, Keyword, Push("nested_directive")},
// Any other directive
{`[^\s#]+`, Keyword, Push("nested_directive")},
Include("base"),
},
"not_matcher": {
{`\}`, Punctuation, Pop(2)},
{`\{(?=\s)`, Punctuation, Push("block")},
{`[^\s#]+`, Keyword, Push("arguments")},
{`\s+`, Text, nil},
},
"deep_not_matcher": {
{`\}`, Punctuation, Pop(2)},
{`\{(?=\s)`, Punctuation, Push("block")},
{`[^\s#]+`, Keyword, Push("deep_subdirective")},
{`\s+`, Text, nil},
},
"directive": {
{`\{(?=\s)`, Punctuation, Push("block")},
Include("matcher_token"),
Include("comments_pop_1"),
{`\n`, Text, Pop(1)},
Include("base"),
},
"nested_directive": {
{`\{(?=\s)`, Punctuation, Push("nested_block")},
Include("matcher_token"),
Include("comments_pop_1"),
{`\n`, Text, Pop(1)},
Include("base"),
},
"subdirective": {
{`\{(?=\s)`, Punctuation, Push("block")},
Include("comments_pop_1"),
{`\n`, Text, Pop(1)},
Include("base"),
},
"arguments": {
{`\{(?=\s)`, Punctuation, Push("block")},
Include("comments_pop_2"),
{`\\\n`, Text, nil}, // Skip escaped newlines
{`\n`, Text, Pop(2)},
Include("base"),
},
"deep_subdirective": {
{`\{(?=\s)`, Punctuation, Push("block")},
Include("comments_pop_3"),
{`\n`, Text, Pop(3)},
Include("base"),
},
"matcher_token": {
{`@[^\s]+`, NameDecorator, Push("arguments")}, // Named matcher
{`/[^\s]+`, NameDecorator, Push("arguments")}, // Path matcher
{`\*`, NameDecorator, Push("arguments")}, // Wildcard path matcher
{`\[\<matcher\>\]`, NameDecorator, Push("arguments")}, // Matcher token stub for docs
},
"comments": {
{`^#.*\n`, CommentSingle, nil}, // Comment at start of line
{`\s+#.*\n`, CommentSingle, nil}, // Comment preceded by whitespace
},
"comments_pop_1": {
{`^#.*\n`, CommentSingle, Pop(1)}, // Comment at start of line
{`\s+#.*\n`, CommentSingle, Pop(1)}, // Comment preceded by whitespace
},
"comments_pop_2": {
{`^#.*\n`, CommentSingle, Pop(2)}, // Comment at start of line
{`\s+#.*\n`, CommentSingle, Pop(2)}, // Comment preceded by whitespace
},
"comments_pop_3": {
{`^#.*\n`, CommentSingle, Pop(3)}, // Comment at start of line
{`\s+#.*\n`, CommentSingle, Pop(3)}, // Comment preceded by whitespace
},
"base": {
Include("comments"),
{`(on|off|first|last|before|after|internal|strip_prefix|strip_suffix|replace)\b`, NameConstant, nil},
{`(https?://)?([a-z0-9.-]+)(:)([0-9]+)`, ByGroups(Name, Name, Punctuation, LiteralNumberInteger), nil},
{`[a-z-]+/[a-z-+]+`, LiteralString, nil},
{`[0-9]+[km]?\b`, LiteralNumberInteger, nil},
{`\{[\w+.\$-]+\}`, LiteralStringEscape, nil}, // Placeholder
{`\[(?=[^#{}$]+\])`, Punctuation, nil},
{`\]|\|`, Punctuation, nil},
{`[^\s#{}$\]]+`, LiteralString, nil},
{`/[^\s#]*`, Name, nil},
{`\s+`, Text, nil},
},
}
}
// Caddyfile lexer.
var Caddyfile = Register(MustNewLexer(
&Config{
Name: "Caddyfile",
Aliases: []string{"caddyfile", "caddy"},
Filenames: []string{"Caddyfile*"},
MimeTypes: []string{},
},
caddyfileRules,
))
func caddyfileRules() Rules {
return Rules{
"root": {
Include("comments"),
// Global options block
{`^\s*(\{)\s*$`, ByGroups(Punctuation), Push("globals")},
// Snippets
{`(\([^\s#]+\))(\s*)(\{)`, ByGroups(NameVariableAnonymous, Text, Punctuation), Push("snippet")},
// Site label
{`[^#{(\s,]+`, GenericHeading, Push("label")},
// Site label with placeholder
{`\{[\w+.\$-]+\}`, LiteralStringEscape, Push("label")},
{`\s+`, Text, nil},
},
"globals": {
{`\}`, Punctuation, Pop(1)},
{`[^\s#]+`, Keyword, Push("directive")},
Include("base"),
},
"snippet": {
{`\}`, Punctuation, Pop(1)},
// Matcher definition
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
// Any directive
{`[^\s#]+`, Keyword, Push("directive")},
Include("base"),
},
"label": {
// Allow multiple labels, comma separated, newlines after
// a comma means another label is coming
{`,\s*\n?`, Text, nil},
{` `, Text, nil},
// Site label with placeholder
{`\{[\w+.\$-]+\}`, LiteralStringEscape, nil},
// Site label
{`[^#{(\s,]+`, GenericHeading, nil},
// Comment after non-block label (hack because comments end in \n)
{`#.*\n`, CommentSingle, Push("site_block")},
// Note: if \n, we'll never pop out of the site_block, it's valid
{`\{(?=\s)|\n`, Punctuation, Push("site_block")},
},
"site_block": {
{`\}`, Punctuation, Pop(2)},
Include("site_block_common"),
},
}.Merge(caddyfileCommonRules())
}
// Caddyfile directive-only lexer.
var CaddyfileDirectives = Register(MustNewLexer(
&Config{
Name: "Caddyfile Directives",
Aliases: []string{"caddyfile-directives", "caddyfile-d", "caddy-d"},
Filenames: []string{},
MimeTypes: []string{},
},
caddyfileDirectivesRules,
))
func caddyfileDirectivesRules() Rules {
return Rules{
// Same as "site_block" in Caddyfile
"root": {
Include("site_block_common"),
},
}.Merge(caddyfileCommonRules())
}

62
vendor/github.com/alecthomas/chroma/v2/lexers/chapel.go generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,62 @@
package lexers
import (
. "github.com/alecthomas/chroma/v2" // nolint
)
// Chapel lexer.
var Chapel = Register(MustNewLexer(
&Config{
Name: "Chapel",
Aliases: []string{"chapel", "chpl"},
Filenames: []string{"*.chpl"},
MimeTypes: []string{},
},
func() Rules {
return Rules{
"root": {
{`\n`, TextWhitespace, nil},
{`\s+`, TextWhitespace, nil},
{`\\\n`, Text, nil},
{`//(.*?)\n`, CommentSingle, nil},
{`/(\\\n)?[*](.|\n)*?[*](\\\n)?/`, CommentMultiline, nil},
{Words(``, `\b`, `config`, `const`, `in`, `inout`, `out`, `param`, `ref`, `type`, `var`), KeywordDeclaration, nil},
{Words(``, `\b`, `false`, `nil`, `none`, `true`), KeywordConstant, nil},
{Words(``, `\b`, `bool`, `bytes`, `complex`, `imag`, `int`, `locale`, `nothing`, `opaque`, `range`, `real`, `string`, `uint`, `void`), KeywordType, nil},
{Words(``, `\b`, `atomic`, `single`, `sync`, `borrowed`, `owned`, `shared`, `unmanaged`, `align`, `as`, `begin`, `break`, `by`, `catch`, `cobegin`, `coforall`, `continue`, `defer`, `delete`, `dmapped`, `do`, `domain`, `else`, `enum`, `except`, `export`, `extern`, `for`, `forall`, `foreach`, `forwarding`, `if`, `implements`, `import`, `index`, `init`, `inline`, `label`, `lambda`, `let`, `lifetime`, `local`, `new`, `noinit`, `on`, `only`, `otherwise`, `override`, `pragma`, `primitive`, `private`, `prototype`, `public`, `reduce`, `require`, `return`, `scan`, `select`, `serial`, `sparse`, `subdomain`, `then`, `this`, `throw`, `throws`, `try`, `use`, `when`, `where`, `while`, `with`, `yield`, `zip`), Keyword, nil},
{`(iter)(\s+)`, ByGroups(Keyword, TextWhitespace), Push("procname")},
{`(proc)(\s+)`, ByGroups(Keyword, TextWhitespace), Push("procname")},
{`(operator)(\s+)`, ByGroups(Keyword, TextWhitespace), Push("procname")},
{`(class|interface|module|record|union)(\s+)`, ByGroups(Keyword, TextWhitespace), Push("classname")},
{`\d+i`, LiteralNumber, nil},
{`\d+\.\d*([Ee][-+]\d+)?i`, LiteralNumber, nil},
{`\.\d+([Ee][-+]\d+)?i`, LiteralNumber, nil},
{`\d+[Ee][-+]\d+i`, LiteralNumber, nil},
{`(\d*\.\d+)([eE][+-]?[0-9]+)?i?`, LiteralNumberFloat, nil},
{`\d+[eE][+-]?[0-9]+i?`, LiteralNumberFloat, nil},
{`0[bB][01]+`, LiteralNumberBin, nil},
{`0[xX][0-9a-fA-F]+`, LiteralNumberHex, nil},
{`0[oO][0-7]+`, LiteralNumberOct, nil},
{`[0-9]+`, LiteralNumberInteger, nil},
{`"(\\\\|\\"|[^"])*"`, LiteralString, nil},
{`'(\\\\|\\'|[^'])*'`, LiteralString, nil},
{`(=|\+=|-=|\*=|/=|\*\*=|%=|&=|\|=|\^=|&&=|\|\|=|<<=|>>=|<=>|<~>|\.\.|by|#|\.\.\.|&&|\|\||!|&|\||\^|~|<<|>>|==|!=|<=|>=|<|>|[+\-*/%]|\*\*)`, Operator, nil},
{`[:;,.?()\[\]{}]`, Punctuation, nil},
{`[a-zA-Z_][\w$]*`, NameOther, nil},
},
"classname": {
{`[a-zA-Z_][\w$]*`, NameClass, Pop(1)},
},
"procname": {
{`([a-zA-Z_][.\w$]*|\~[a-zA-Z_][.\w$]*|[+*/!~%<>=&^|\-:]{1,2})`, NameFunction, Pop(1)},
{`\(`, Punctuation, Push("receivertype")},
{`\)+\.`, Punctuation, nil},
},
"receivertype": {
{Words(``, `\b`, `atomic`, `single`, `sync`, `borrowed`, `owned`, `shared`, `unmanaged`), Keyword, nil},
{Words(``, `\b`, `bool`, `bytes`, `complex`, `imag`, `int`, `locale`, `nothing`, `opaque`, `range`, `real`, `string`, `uint`, `void`), KeywordType, nil},
{`[^()]*`, NameOther, Pop(1)},
},
}
},
))

39
vendor/github.com/alecthomas/chroma/v2/lexers/cheetah.go generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,39 @@
package lexers
import (
. "github.com/alecthomas/chroma/v2" // nolint
)
// Cheetah lexer.
var Cheetah = Register(MustNewLexer(
&Config{
Name: "Cheetah",
Aliases: []string{"cheetah", "spitfire"},
Filenames: []string{"*.tmpl", "*.spt"},
MimeTypes: []string{"application/x-cheetah", "application/x-spitfire"},
},
cheetahRules,
))
func cheetahRules() Rules {
return Rules{
"root": {
{`(##[^\n]*)$`, ByGroups(Comment), nil},
{`#[*](.|\n)*?[*]#`, Comment, nil},
{`#end[^#\n]*(?:#|$)`, CommentPreproc, nil},
{`#slurp$`, CommentPreproc, nil},
{`(#[a-zA-Z]+)([^#\n]*)(#|$)`, ByGroups(CommentPreproc, Using("Python"), CommentPreproc), nil},
{`(\$)([a-zA-Z_][\w.]*\w)`, ByGroups(CommentPreproc, Using("Python")), nil},
{`(\$\{!?)(.*?)(\})(?s)`, ByGroups(CommentPreproc, Using("Python"), CommentPreproc), nil},
{`(?sx)
(.+?) # anything, followed by:
(?:
(?=\#[#a-zA-Z]*) | # an eval comment
(?=\$[a-zA-Z_{]) | # a substitution
\Z # end of string
)
`, Other, nil},
{`\s+`, Text, nil},
},
}
}

243
vendor/github.com/alecthomas/chroma/v2/lexers/cl.go generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,243 @@
package lexers
import (
. "github.com/alecthomas/chroma/v2" // nolint
)
var (
clBuiltinFunctions = []string{
"<", "<=", "=", ">", ">=", "-", "/", "/=", "*", "+", "1-", "1+",
"abort", "abs", "acons", "acos", "acosh", "add-method", "adjoin",
"adjustable-array-p", "adjust-array", "allocate-instance",
"alpha-char-p", "alphanumericp", "append", "apply", "apropos",
"apropos-list", "aref", "arithmetic-error-operands",
"arithmetic-error-operation", "array-dimension", "array-dimensions",
"array-displacement", "array-element-type", "array-has-fill-pointer-p",
"array-in-bounds-p", "arrayp", "array-rank", "array-row-major-index",
"array-total-size", "ash", "asin", "asinh", "assoc", "assoc-if",
"assoc-if-not", "atan", "atanh", "atom", "bit", "bit-and", "bit-andc1",
"bit-andc2", "bit-eqv", "bit-ior", "bit-nand", "bit-nor", "bit-not",
"bit-orc1", "bit-orc2", "bit-vector-p", "bit-xor", "boole",
"both-case-p", "boundp", "break", "broadcast-stream-streams",
"butlast", "byte", "byte-position", "byte-size", "caaaar", "caaadr",
"caaar", "caadar", "caaddr", "caadr", "caar", "cadaar", "cadadr",
"cadar", "caddar", "cadddr", "caddr", "cadr", "call-next-method", "car",
"cdaaar", "cdaadr", "cdaar", "cdadar", "cdaddr", "cdadr", "cdar",
"cddaar", "cddadr", "cddar", "cdddar", "cddddr", "cdddr", "cddr", "cdr",
"ceiling", "cell-error-name", "cerror", "change-class", "char", "char<",
"char<=", "char=", "char>", "char>=", "char/=", "character",
"characterp", "char-code", "char-downcase", "char-equal",
"char-greaterp", "char-int", "char-lessp", "char-name",
"char-not-equal", "char-not-greaterp", "char-not-lessp", "char-upcase",
"cis", "class-name", "class-of", "clear-input", "clear-output",
"close", "clrhash", "code-char", "coerce", "compile",
"compiled-function-p", "compile-file", "compile-file-pathname",
"compiler-macro-function", "complement", "complex", "complexp",
"compute-applicable-methods", "compute-restarts", "concatenate",
"concatenated-stream-streams", "conjugate", "cons", "consp",
"constantly", "constantp", "continue", "copy-alist", "copy-list",
"copy-pprint-dispatch", "copy-readtable", "copy-seq", "copy-structure",
"copy-symbol", "copy-tree", "cos", "cosh", "count", "count-if",
"count-if-not", "decode-float", "decode-universal-time", "delete",
"delete-duplicates", "delete-file", "delete-if", "delete-if-not",
"delete-package", "denominator", "deposit-field", "describe",
"describe-object", "digit-char", "digit-char-p", "directory",
"directory-namestring", "disassemble", "documentation", "dpb",
"dribble", "echo-stream-input-stream", "echo-stream-output-stream",
"ed", "eighth", "elt", "encode-universal-time", "endp",
"enough-namestring", "ensure-directories-exist",
"ensure-generic-function", "eq", "eql", "equal", "equalp", "error",
"eval", "evenp", "every", "exp", "export", "expt", "fboundp",
"fceiling", "fdefinition", "ffloor", "fifth", "file-author",
"file-error-pathname", "file-length", "file-namestring",
"file-position", "file-string-length", "file-write-date",
"fill", "fill-pointer", "find", "find-all-symbols", "find-class",
"find-if", "find-if-not", "find-method", "find-package", "find-restart",
"find-symbol", "finish-output", "first", "float", "float-digits",
"floatp", "float-precision", "float-radix", "float-sign", "floor",
"fmakunbound", "force-output", "format", "fourth", "fresh-line",
"fround", "ftruncate", "funcall", "function-keywords",
"function-lambda-expression", "functionp", "gcd", "gensym", "gentemp",
"get", "get-decoded-time", "get-dispatch-macro-character", "getf",
"gethash", "get-internal-real-time", "get-internal-run-time",
"get-macro-character", "get-output-stream-string", "get-properties",
"get-setf-expansion", "get-universal-time", "graphic-char-p",
"hash-table-count", "hash-table-p", "hash-table-rehash-size",
"hash-table-rehash-threshold", "hash-table-size", "hash-table-test",
"host-namestring", "identity", "imagpart", "import",
"initialize-instance", "input-stream-p", "inspect",
"integer-decode-float", "integer-length", "integerp",
"interactive-stream-p", "intern", "intersection",
"invalid-method-error", "invoke-debugger", "invoke-restart",
"invoke-restart-interactively", "isqrt", "keywordp", "last", "lcm",
"ldb", "ldb-test", "ldiff", "length", "lisp-implementation-type",
"lisp-implementation-version", "list", "list*", "list-all-packages",
"listen", "list-length", "listp", "load",
"load-logical-pathname-translations", "log", "logand", "logandc1",
"logandc2", "logbitp", "logcount", "logeqv", "logical-pathname",
"logical-pathname-translations", "logior", "lognand", "lognor",
"lognot", "logorc1", "logorc2", "logtest", "logxor", "long-site-name",
"lower-case-p", "machine-instance", "machine-type", "machine-version",
"macroexpand", "macroexpand-1", "macro-function", "make-array",
"make-broadcast-stream", "make-concatenated-stream", "make-condition",
"make-dispatch-macro-character", "make-echo-stream", "make-hash-table",
"make-instance", "make-instances-obsolete", "make-list",
"make-load-form", "make-load-form-saving-slots", "make-package",
"make-pathname", "make-random-state", "make-sequence", "make-string",
"make-string-input-stream", "make-string-output-stream", "make-symbol",
"make-synonym-stream", "make-two-way-stream", "makunbound", "map",
"mapc", "mapcan", "mapcar", "mapcon", "maphash", "map-into", "mapl",
"maplist", "mask-field", "max", "member", "member-if", "member-if-not",
"merge", "merge-pathnames", "method-combination-error",
"method-qualifiers", "min", "minusp", "mismatch", "mod",
"muffle-warning", "name-char", "namestring", "nbutlast", "nconc",
"next-method-p", "nintersection", "ninth", "no-applicable-method",
"no-next-method", "not", "notany", "notevery", "nreconc", "nreverse",
"nset-difference", "nset-exclusive-or", "nstring-capitalize",
"nstring-downcase", "nstring-upcase", "nsublis", "nsubst", "nsubst-if",
"nsubst-if-not", "nsubstitute", "nsubstitute-if", "nsubstitute-if-not",
"nth", "nthcdr", "null", "numberp", "numerator", "nunion", "oddp",
"open", "open-stream-p", "output-stream-p", "package-error-package",
"package-name", "package-nicknames", "packagep",
"package-shadowing-symbols", "package-used-by-list", "package-use-list",
"pairlis", "parse-integer", "parse-namestring", "pathname",
"pathname-device", "pathname-directory", "pathname-host",
"pathname-match-p", "pathname-name", "pathnamep", "pathname-type",
"pathname-version", "peek-char", "phase", "plusp", "position",
"position-if", "position-if-not", "pprint", "pprint-dispatch",
"pprint-fill", "pprint-indent", "pprint-linear", "pprint-newline",
"pprint-tab", "pprint-tabular", "prin1", "prin1-to-string", "princ",
"princ-to-string", "print", "print-object", "probe-file", "proclaim",
"provide", "random", "random-state-p", "rassoc", "rassoc-if",
"rassoc-if-not", "rational", "rationalize", "rationalp", "read",
"read-byte", "read-char", "read-char-no-hang", "read-delimited-list",
"read-from-string", "read-line", "read-preserving-whitespace",
"read-sequence", "readtable-case", "readtablep", "realp", "realpart",
"reduce", "reinitialize-instance", "rem", "remhash", "remove",
"remove-duplicates", "remove-if", "remove-if-not", "remove-method",
"remprop", "rename-file", "rename-package", "replace", "require",
"rest", "restart-name", "revappend", "reverse", "room", "round",
"row-major-aref", "rplaca", "rplacd", "sbit", "scale-float", "schar",
"search", "second", "set", "set-difference",
"set-dispatch-macro-character", "set-exclusive-or",
"set-macro-character", "set-pprint-dispatch", "set-syntax-from-char",
"seventh", "shadow", "shadowing-import", "shared-initialize",
"short-site-name", "signal", "signum", "simple-bit-vector-p",
"simple-condition-format-arguments", "simple-condition-format-control",
"simple-string-p", "simple-vector-p", "sin", "sinh", "sixth", "sleep",
"slot-boundp", "slot-exists-p", "slot-makunbound", "slot-missing",
"slot-unbound", "slot-value", "software-type", "software-version",
"some", "sort", "special-operator-p", "sqrt", "stable-sort",
"standard-char-p", "store-value", "stream-element-type",
"stream-error-stream", "stream-external-format", "streamp", "string",
"string<", "string<=", "string=", "string>", "string>=", "string/=",
"string-capitalize", "string-downcase", "string-equal",
"string-greaterp", "string-left-trim", "string-lessp",
"string-not-equal", "string-not-greaterp", "string-not-lessp",
"stringp", "string-right-trim", "string-trim", "string-upcase",
"sublis", "subseq", "subsetp", "subst", "subst-if", "subst-if-not",
"substitute", "substitute-if", "substitute-if-not", "subtypep", "svref",
"sxhash", "symbol-function", "symbol-name", "symbolp", "symbol-package",
"symbol-plist", "symbol-value", "synonym-stream-symbol", "syntax:",
"tailp", "tan", "tanh", "tenth", "terpri", "third",
"translate-logical-pathname", "translate-pathname", "tree-equal",
"truename", "truncate", "two-way-stream-input-stream",
"two-way-stream-output-stream", "type-error-datum",
"type-error-expected-type", "type-of", "typep", "unbound-slot-instance",
"unexport", "unintern", "union", "unread-char", "unuse-package",
"update-instance-for-different-class",
"update-instance-for-redefined-class", "upgraded-array-element-type",
"upgraded-complex-part-type", "upper-case-p", "use-package",
"user-homedir-pathname", "use-value", "values", "values-list", "vector",
"vectorp", "vector-pop", "vector-push", "vector-push-extend", "warn",
"wild-pathname-p", "write", "write-byte", "write-char", "write-line",
"write-sequence", "write-string", "write-to-string", "yes-or-no-p",
"y-or-n-p", "zerop",
}
clSpecialForms = []string{
"block", "catch", "declare", "eval-when", "flet", "function", "go", "if",
"labels", "lambda", "let", "let*", "load-time-value", "locally", "macrolet",
"multiple-value-call", "multiple-value-prog1", "progn", "progv", "quote",
"return-from", "setq", "symbol-macrolet", "tagbody", "the", "throw",
"unwind-protect",
}
clMacros = []string{
"and", "assert", "call-method", "case", "ccase", "check-type", "cond",
"ctypecase", "decf", "declaim", "defclass", "defconstant", "defgeneric",
"define-compiler-macro", "define-condition", "define-method-combination",
"define-modify-macro", "define-setf-expander", "define-symbol-macro",
"defmacro", "defmethod", "defpackage", "defparameter", "defsetf",
"defstruct", "deftype", "defun", "defvar", "destructuring-bind", "do",
"do*", "do-all-symbols", "do-external-symbols", "dolist", "do-symbols",
"dotimes", "ecase", "etypecase", "formatter", "handler-bind",
"handler-case", "ignore-errors", "incf", "in-package", "lambda", "loop",
"loop-finish", "make-method", "multiple-value-bind", "multiple-value-list",
"multiple-value-setq", "nth-value", "or", "pop",
"pprint-exit-if-list-exhausted", "pprint-logical-block", "pprint-pop",
"print-unreadable-object", "prog", "prog*", "prog1", "prog2", "psetf",
"psetq", "push", "pushnew", "remf", "restart-bind", "restart-case",
"return", "rotatef", "setf", "shiftf", "step", "time", "trace", "typecase",
"unless", "untrace", "when", "with-accessors", "with-compilation-unit",
"with-condition-restarts", "with-hash-table-iterator",
"with-input-from-string", "with-open-file", "with-open-stream",
"with-output-to-string", "with-package-iterator", "with-simple-restart",
"with-slots", "with-standard-io-syntax",
}
clLambdaListKeywords = []string{
"&allow-other-keys", "&aux", "&body", "&environment", "&key", "&optional",
"&rest", "&whole",
}
clDeclarations = []string{
"dynamic-extent", "ignore", "optimize", "ftype", "inline", "special",
"ignorable", "notinline", "type",
}
clBuiltinTypes = []string{
"atom", "boolean", "base-char", "base-string", "bignum", "bit",
"compiled-function", "extended-char", "fixnum", "keyword", "nil",
"signed-byte", "short-float", "single-float", "double-float", "long-float",
"simple-array", "simple-base-string", "simple-bit-vector", "simple-string",
"simple-vector", "standard-char", "unsigned-byte",
// Condition Types
"arithmetic-error", "cell-error", "condition", "control-error",
"division-by-zero", "end-of-file", "error", "file-error",
"floating-point-inexact", "floating-point-overflow",
"floating-point-underflow", "floating-point-invalid-operation",
"parse-error", "package-error", "print-not-readable", "program-error",
"reader-error", "serious-condition", "simple-condition", "simple-error",
"simple-type-error", "simple-warning", "stream-error", "storage-condition",
"style-warning", "type-error", "unbound-variable", "unbound-slot",
"undefined-function", "warning",
}
clBuiltinClasses = []string{
"array", "broadcast-stream", "bit-vector", "built-in-class", "character",
"class", "complex", "concatenated-stream", "cons", "echo-stream",
"file-stream", "float", "function", "generic-function", "hash-table",
"integer", "list", "logical-pathname", "method-combination", "method",
"null", "number", "package", "pathname", "ratio", "rational", "readtable",
"real", "random-state", "restart", "sequence", "standard-class",
"standard-generic-function", "standard-method", "standard-object",
"string-stream", "stream", "string", "structure-class", "structure-object",
"symbol", "synonym-stream", "t", "two-way-stream", "vector",
}
)
// Common Lisp lexer.
var CommonLisp = Register(TypeRemappingLexer(MustNewXMLLexer(
embedded,
"embedded/common_lisp.xml",
), TypeMapping{
{NameVariable, NameFunction, clBuiltinFunctions},
{NameVariable, Keyword, clSpecialForms},
{NameVariable, NameBuiltin, clMacros},
{NameVariable, Keyword, clLambdaListKeywords},
{NameVariable, Keyword, clDeclarations},
{NameVariable, KeywordType, clBuiltinTypes},
{NameVariable, NameClass, clBuiltinClasses},
}))

70
vendor/github.com/alecthomas/chroma/v2/lexers/cql.go generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,70 @@
package lexers
import (
. "github.com/alecthomas/chroma/v2" // nolint
)
// CassandraCQL lexer.
var CassandraCQL = Register(MustNewLexer(
&Config{
Name: "Cassandra CQL",
Aliases: []string{"cassandra", "cql"},
Filenames: []string{"*.cql"},
MimeTypes: []string{"text/x-cql"},
NotMultiline: true,
CaseInsensitive: true,
},
cassandraCQLRules,
))
func cassandraCQLRules() Rules {
return Rules{
"root": {
{`\s+`, TextWhitespace, nil},
{`(--|\/\/).*\n?`, CommentSingle, nil},
{`/\*`, CommentMultiline, Push("multiline-comments")},
{`(ascii|bigint|blob|boolean|counter|date|decimal|double|float|frozen|inet|int|list|map|set|smallint|text|time|timestamp|timeuuid|tinyint|tuple|uuid|varchar|varint)\b`, NameBuiltin, nil},
{Words(``, `\b`, `ADD`, `AGGREGATE`, `ALL`, `ALLOW`, `ALTER`, `AND`, `ANY`, `APPLY`, `AS`, `ASC`, `AUTHORIZE`, `BATCH`, `BEGIN`, `BY`, `CLUSTERING`, `COLUMNFAMILY`, `COMPACT`, `CONSISTENCY`, `COUNT`, `CREATE`, `CUSTOM`, `DELETE`, `DESC`, `DISTINCT`, `DROP`, `EACH_QUORUM`, `ENTRIES`, `EXISTS`, `FILTERING`, `FROM`, `FULL`, `GRANT`, `IF`, `IN`, `INDEX`, `INFINITY`, `INSERT`, `INTO`, `KEY`, `KEYS`, `KEYSPACE`, `KEYSPACES`, `LEVEL`, `LIMIT`, `LOCAL_ONE`, `LOCAL_QUORUM`, `MATERIALIZED`, `MODIFY`, `NAN`, `NORECURSIVE`, `NOSUPERUSER`, `NOT`, `OF`, `ON`, `ONE`, `ORDER`, `PARTITION`, `PASSWORD`, `PER`, `PERMISSION`, `PERMISSIONS`, `PRIMARY`, `QUORUM`, `RENAME`, `REVOKE`, `SCHEMA`, `SELECT`, `STATIC`, `STORAGE`, `SUPERUSER`, `TABLE`, `THREE`, `TO`, `TOKEN`, `TRUNCATE`, `TTL`, `TWO`, `TYPE`, `UNLOGGED`, `UPDATE`, `USE`, `USER`, `USERS`, `USING`, `VALUES`, `VIEW`, `WHERE`, `WITH`, `WRITETIME`, `REPLICATION`, `OR`, `REPLACE`, `FUNCTION`, `CALLED`, `INPUT`, `RETURNS`, `LANGUAGE`, `ROLE`, `ROLES`, `TRIGGER`, `DURABLE_WRITES`, `LOGIN`, `OPTIONS`, `LOGGED`, `SFUNC`, `STYPE`, `FINALFUNC`, `INITCOND`, `IS`, `CONTAINS`, `JSON`, `PAGING`, `OFF`), Keyword, nil},
{"[+*/<>=~!@#%^&|`?-]+", Operator, nil},
{
`(?s)(java|javascript)(\s+)(AS)(\s+)('|\$\$)(.*?)(\5)`,
UsingByGroup(1, 6,
NameBuiltin, TextWhitespace, Keyword, TextWhitespace,
LiteralStringHeredoc, LiteralStringHeredoc, LiteralStringHeredoc),
nil,
},
{`(true|false|null)\b`, KeywordConstant, nil},
{`0x[0-9a-f]+`, LiteralNumberHex, nil},
{`[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}`, LiteralNumberHex, nil},
{`\.[0-9]+(e[+-]?[0-9]+)?`, Error, nil},
{`-?[0-9]+(\.[0-9])?(e[+-]?[0-9]+)?`, LiteralNumberFloat, nil},
{`[0-9]+`, LiteralNumberInteger, nil},
{`'`, LiteralStringSingle, Push("string")},
{`"`, LiteralStringName, Push("quoted-ident")},
{`\$\$`, LiteralStringHeredoc, Push("dollar-string")},
{`[a-z_]\w*`, Name, nil},
{`:(['"]?)[a-z]\w*\b\1`, NameVariable, nil},
{`[;:()\[\]\{\},.]`, Punctuation, nil},
},
"multiline-comments": {
{`/\*`, CommentMultiline, Push("multiline-comments")},
{`\*/`, CommentMultiline, Pop(1)},
{`[^/*]+`, CommentMultiline, nil},
{`[/*]`, CommentMultiline, nil},
},
"string": {
{`[^']+`, LiteralStringSingle, nil},
{`''`, LiteralStringSingle, nil},
{`'`, LiteralStringSingle, Pop(1)},
},
"quoted-ident": {
{`[^"]+`, LiteralStringName, nil},
{`""`, LiteralStringName, nil},
{`"`, LiteralStringName, Pop(1)},
},
"dollar-string": {
{`[^\$]+`, LiteralStringHeredoc, nil},
{`\$\$`, LiteralStringHeredoc, Pop(1)},
},
}
}

17
vendor/github.com/alecthomas/chroma/v2/lexers/dns.go generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,17 @@
package lexers
import (
"regexp"
)
// TODO(moorereason): can this be factored away?
var zoneAnalyserRe = regexp.MustCompile(`(?m)^@\s+IN\s+SOA\s+`)
func init() { // nolint: gochecknoinits
Get("dns").SetAnalyser(func(text string) float32 {
if zoneAnalyserRe.FindString(text) != "" {
return 1.0
}
return 0.0
})
}

32
vendor/github.com/alecthomas/chroma/v2/lexers/docker.go generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,32 @@
package lexers
import (
. "github.com/alecthomas/chroma/v2" // nolint
)
// Docker lexer.
var Docker = Register(MustNewLexer(
&Config{
Name: "Docker",
Aliases: []string{"docker", "dockerfile"},
Filenames: []string{"Dockerfile", "Dockerfile.*", "*.docker"},
MimeTypes: []string{"text/x-dockerfile-config"},
CaseInsensitive: true,
},
dockerRules,
))
func dockerRules() Rules {
return Rules{
"root": {
{`#.*`, Comment, nil},
{`(ONBUILD)((?:\s*\\?\s*))`, ByGroups(Keyword, Using("Bash")), nil},
{`(HEALTHCHECK)(((?:\s*\\?\s*)--\w+=\w+(?:\s*\\?\s*))*)`, ByGroups(Keyword, Using("Bash")), nil},
{`(VOLUME|ENTRYPOINT|CMD|SHELL)((?:\s*\\?\s*))(\[.*?\])`, ByGroups(Keyword, Using("Bash"), Using("JSON")), nil},
{`(LABEL|ENV|ARG)((?:(?:\s*\\?\s*)\w+=\w+(?:\s*\\?\s*))*)`, ByGroups(Keyword, Using("Bash")), nil},
{`((?:FROM|MAINTAINER|EXPOSE|WORKDIR|USER|STOPSIGNAL)|VOLUME)\b(.*)`, ByGroups(Keyword, LiteralString), nil},
{`((?:RUN|CMD|ENTRYPOINT|ENV|ARG|LABEL|ADD|COPY))`, Keyword, nil},
{`(.*\\\n)*.+`, Using("Bash"), nil},
},
}
}

533
vendor/github.com/alecthomas/chroma/v2/lexers/emacs.go generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,533 @@
package lexers
import (
. "github.com/alecthomas/chroma/v2" // nolint
)
var (
emacsMacros = []string{
"atomic-change-group", "case", "block", "cl-block", "cl-callf", "cl-callf2",
"cl-case", "cl-decf", "cl-declaim", "cl-declare",
"cl-define-compiler-macro", "cl-defmacro", "cl-defstruct",
"cl-defsubst", "cl-deftype", "cl-defun", "cl-destructuring-bind",
"cl-do", "cl-do*", "cl-do-all-symbols", "cl-do-symbols", "cl-dolist",
"cl-dotimes", "cl-ecase", "cl-etypecase", "eval-when", "cl-eval-when", "cl-flet",
"cl-flet*", "cl-function", "cl-incf", "cl-labels", "cl-letf",
"cl-letf*", "cl-load-time-value", "cl-locally", "cl-loop",
"cl-macrolet", "cl-multiple-value-bind", "cl-multiple-value-setq",
"cl-progv", "cl-psetf", "cl-psetq", "cl-pushnew", "cl-remf",
"cl-return", "cl-return-from", "cl-rotatef", "cl-shiftf",
"cl-symbol-macrolet", "cl-tagbody", "cl-the", "cl-typecase",
"combine-after-change-calls", "condition-case-unless-debug", "decf",
"declaim", "declare", "declare-function", "def-edebug-spec",
"defadvice", "defclass", "defcustom", "defface", "defgeneric",
"defgroup", "define-advice", "define-alternatives",
"define-compiler-macro", "define-derived-mode", "define-generic-mode",
"define-global-minor-mode", "define-globalized-minor-mode",
"define-minor-mode", "define-modify-macro",
"define-obsolete-face-alias", "define-obsolete-function-alias",
"define-obsolete-variable-alias", "define-setf-expander",
"define-skeleton", "defmacro", "defmethod", "defsetf", "defstruct",
"defsubst", "deftheme", "deftype", "defun", "defvar-local",
"delay-mode-hooks", "destructuring-bind", "do", "do*",
"do-all-symbols", "do-symbols", "dolist", "dont-compile", "dotimes",
"dotimes-with-progress-reporter", "ecase", "ert-deftest", "etypecase",
"eval-and-compile", "eval-when-compile", "flet", "ignore-errors",
"incf", "labels", "lambda", "letrec", "lexical-let", "lexical-let*",
"loop", "multiple-value-bind", "multiple-value-setq", "noreturn",
"oref", "oref-default", "oset", "oset-default", "pcase",
"pcase-defmacro", "pcase-dolist", "pcase-exhaustive", "pcase-let",
"pcase-let*", "pop", "psetf", "psetq", "push", "pushnew", "remf",
"return", "rotatef", "rx", "save-match-data", "save-selected-window",
"save-window-excursion", "setf", "setq-local", "shiftf",
"track-mouse", "typecase", "unless", "use-package", "when",
"while-no-input", "with-case-table", "with-category-table",
"with-coding-priority", "with-current-buffer", "with-demoted-errors",
"with-eval-after-load", "with-file-modes", "with-local-quit",
"with-output-to-string", "with-output-to-temp-buffer",
"with-parsed-tramp-file-name", "with-selected-frame",
"with-selected-window", "with-silent-modifications", "with-slots",
"with-syntax-table", "with-temp-buffer", "with-temp-file",
"with-temp-message", "with-timeout", "with-tramp-connection-property",
"with-tramp-file-property", "with-tramp-progress-reporter",
"with-wrapper-hook", "load-time-value", "locally", "macrolet", "progv",
"return-from",
}
emacsSpecialForms = []string{
"and", "catch", "cond", "condition-case", "defconst", "defvar",
"function", "if", "interactive", "let", "let*", "or", "prog1",
"prog2", "progn", "quote", "save-current-buffer", "save-excursion",
"save-restriction", "setq", "setq-default", "subr-arity",
"unwind-protect", "while",
}
emacsBuiltinFunction = []string{
"%", "*", "+", "-", "/", "/=", "1+", "1-", "<", "<=", "=", ">", ">=",
"Snarf-documentation", "abort-recursive-edit", "abs",
"accept-process-output", "access-file", "accessible-keymaps", "acos",
"active-minibuffer-window", "add-face-text-property",
"add-name-to-file", "add-text-properties", "all-completions",
"append", "apply", "apropos-internal", "aref", "arrayp", "aset",
"ash", "asin", "assoc", "assoc-string", "assq", "atan", "atom",
"autoload", "autoload-do-load", "backtrace", "backtrace--locals",
"backtrace-debug", "backtrace-eval", "backtrace-frame",
"backward-char", "backward-prefix-chars", "barf-if-buffer-read-only",
"base64-decode-region", "base64-decode-string",
"base64-encode-region", "base64-encode-string", "beginning-of-line",
"bidi-find-overridden-directionality", "bidi-resolved-levels",
"bitmap-spec-p", "bobp", "bolp", "bool-vector",
"bool-vector-count-consecutive", "bool-vector-count-population",
"bool-vector-exclusive-or", "bool-vector-intersection",
"bool-vector-not", "bool-vector-p", "bool-vector-set-difference",
"bool-vector-subsetp", "bool-vector-union", "boundp",
"buffer-base-buffer", "buffer-chars-modified-tick",
"buffer-enable-undo", "buffer-file-name", "buffer-has-markers-at",
"buffer-list", "buffer-live-p", "buffer-local-value",
"buffer-local-variables", "buffer-modified-p", "buffer-modified-tick",
"buffer-name", "buffer-size", "buffer-string", "buffer-substring",
"buffer-substring-no-properties", "buffer-swap-text", "bufferp",
"bury-buffer-internal", "byte-code", "byte-code-function-p",
"byte-to-position", "byte-to-string", "byteorder",
"call-interactively", "call-last-kbd-macro", "call-process",
"call-process-region", "cancel-kbd-macro-events", "capitalize",
"capitalize-region", "capitalize-word", "car", "car-less-than-car",
"car-safe", "case-table-p", "category-docstring",
"category-set-mnemonics", "category-table", "category-table-p",
"ccl-execute", "ccl-execute-on-string", "ccl-program-p", "cdr",
"cdr-safe", "ceiling", "char-after", "char-before",
"char-category-set", "char-charset", "char-equal", "char-or-string-p",
"char-resolve-modifiers", "char-syntax", "char-table-extra-slot",
"char-table-p", "char-table-parent", "char-table-range",
"char-table-subtype", "char-to-string", "char-width", "characterp",
"charset-after", "charset-id-internal", "charset-plist",
"charset-priority-list", "charsetp", "check-coding-system",
"check-coding-systems-region", "clear-buffer-auto-save-failure",
"clear-charset-maps", "clear-face-cache", "clear-font-cache",
"clear-image-cache", "clear-string", "clear-this-command-keys",
"close-font", "clrhash", "coding-system-aliases",
"coding-system-base", "coding-system-eol-type", "coding-system-p",
"coding-system-plist", "coding-system-priority-list",
"coding-system-put", "color-distance", "color-gray-p",
"color-supported-p", "combine-after-change-execute",
"command-error-default-function", "command-remapping", "commandp",
"compare-buffer-substrings", "compare-strings",
"compare-window-configurations", "completing-read",
"compose-region-internal", "compose-string-internal",
"composition-get-gstring", "compute-motion", "concat", "cons",
"consp", "constrain-to-field", "continue-process",
"controlling-tty-p", "coordinates-in-window-p", "copy-alist",
"copy-category-table", "copy-file", "copy-hash-table", "copy-keymap",
"copy-marker", "copy-sequence", "copy-syntax-table", "copysign",
"cos", "current-active-maps", "current-bidi-paragraph-direction",
"current-buffer", "current-case-table", "current-column",
"current-global-map", "current-idle-time", "current-indentation",
"current-input-mode", "current-local-map", "current-message",
"current-minor-mode-maps", "current-time", "current-time-string",
"current-time-zone", "current-window-configuration",
"cygwin-convert-file-name-from-windows",
"cygwin-convert-file-name-to-windows", "daemon-initialized",
"daemonp", "dbus--init-bus", "dbus-get-unique-name",
"dbus-message-internal", "debug-timer-check", "declare-equiv-charset",
"decode-big5-char", "decode-char", "decode-coding-region",
"decode-coding-string", "decode-sjis-char", "decode-time",
"default-boundp", "default-file-modes", "default-printer-name",
"default-toplevel-value", "default-value", "define-category",
"define-charset-alias", "define-charset-internal",
"define-coding-system-alias", "define-coding-system-internal",
"define-fringe-bitmap", "define-hash-table-test", "define-key",
"define-prefix-command", "delete",
"delete-all-overlays", "delete-and-extract-region", "delete-char",
"delete-directory-internal", "delete-field", "delete-file",
"delete-frame", "delete-other-windows-internal", "delete-overlay",
"delete-process", "delete-region", "delete-terminal",
"delete-window-internal", "delq", "describe-buffer-bindings",
"describe-vector", "destroy-fringe-bitmap", "detect-coding-region",
"detect-coding-string", "ding", "directory-file-name",
"directory-files", "directory-files-and-attributes", "discard-input",
"display-supports-face-attributes-p", "do-auto-save", "documentation",
"documentation-property", "downcase", "downcase-region",
"downcase-word", "draw-string", "dump-colors", "dump-emacs",
"dump-face", "dump-frame-glyph-matrix", "dump-glyph-matrix",
"dump-glyph-row", "dump-redisplay-history", "dump-tool-bar-row",
"elt", "emacs-pid", "encode-big5-char", "encode-char",
"encode-coding-region", "encode-coding-string", "encode-sjis-char",
"encode-time", "end-kbd-macro", "end-of-line", "eobp", "eolp", "eq",
"eql", "equal", "equal-including-properties", "erase-buffer",
"error-message-string", "eval", "eval-buffer", "eval-region",
"event-convert-list", "execute-kbd-macro", "exit-recursive-edit",
"exp", "expand-file-name", "expt", "external-debugging-output",
"face-attribute-relative-p", "face-attributes-as-vector", "face-font",
"fboundp", "fceiling", "fetch-bytecode", "ffloor",
"field-beginning", "field-end", "field-string",
"field-string-no-properties", "file-accessible-directory-p",
"file-acl", "file-attributes", "file-attributes-lessp",
"file-directory-p", "file-executable-p", "file-exists-p",
"file-locked-p", "file-modes", "file-name-absolute-p",
"file-name-all-completions", "file-name-as-directory",
"file-name-completion", "file-name-directory",
"file-name-nondirectory", "file-newer-than-file-p", "file-readable-p",
"file-regular-p", "file-selinux-context", "file-symlink-p",
"file-system-info", "file-system-info", "file-writable-p",
"fillarray", "find-charset-region", "find-charset-string",
"find-coding-systems-region-internal", "find-composition-internal",
"find-file-name-handler", "find-font", "find-operation-coding-system",
"float", "float-time", "floatp", "floor", "fmakunbound",
"following-char", "font-at", "font-drive-otf", "font-face-attributes",
"font-family-list", "font-get", "font-get-glyphs",
"font-get-system-font", "font-get-system-normal-font", "font-info",
"font-match-p", "font-otf-alternates", "font-put",
"font-shape-gstring", "font-spec", "font-variation-glyphs",
"font-xlfd-name", "fontp", "fontset-font", "fontset-info",
"fontset-list", "fontset-list-all", "force-mode-line-update",
"force-window-update", "format", "format-mode-line",
"format-network-address", "format-time-string", "forward-char",
"forward-comment", "forward-line", "forward-word",
"frame-border-width", "frame-bottom-divider-width",
"frame-can-run-window-configuration-change-hook", "frame-char-height",
"frame-char-width", "frame-face-alist", "frame-first-window",
"frame-focus", "frame-font-cache", "frame-fringe-width", "frame-list",
"frame-live-p", "frame-or-buffer-changed-p", "frame-parameter",
"frame-parameters", "frame-pixel-height", "frame-pixel-width",
"frame-pointer-visible-p", "frame-right-divider-width",
"frame-root-window", "frame-scroll-bar-height",
"frame-scroll-bar-width", "frame-selected-window", "frame-terminal",
"frame-text-cols", "frame-text-height", "frame-text-lines",
"frame-text-width", "frame-total-cols", "frame-total-lines",
"frame-visible-p", "framep", "frexp", "fringe-bitmaps-at-pos",
"fround", "fset", "ftruncate", "funcall", "funcall-interactively",
"function-equal", "functionp", "gap-position", "gap-size",
"garbage-collect", "gc-status", "generate-new-buffer-name", "get",
"get-buffer", "get-buffer-create", "get-buffer-process",
"get-buffer-window", "get-byte", "get-char-property",
"get-char-property-and-overlay", "get-file-buffer", "get-file-char",
"get-internal-run-time", "get-load-suffixes", "get-pos-property",
"get-process", "get-screen-color", "get-text-property",
"get-unicode-property-internal", "get-unused-category",
"get-unused-iso-final-char", "getenv-internal", "gethash",
"gfile-add-watch", "gfile-rm-watch", "global-key-binding",
"gnutls-available-p", "gnutls-boot", "gnutls-bye", "gnutls-deinit",
"gnutls-error-fatalp", "gnutls-error-string", "gnutls-errorp",
"gnutls-get-initstage", "gnutls-peer-status",
"gnutls-peer-status-warning-describe", "goto-char", "gpm-mouse-start",
"gpm-mouse-stop", "group-gid", "group-real-gid",
"handle-save-session", "handle-switch-frame", "hash-table-count",
"hash-table-p", "hash-table-rehash-size",
"hash-table-rehash-threshold", "hash-table-size", "hash-table-test",
"hash-table-weakness", "iconify-frame", "identity", "image-flush",
"image-mask-p", "image-metadata", "image-size", "imagemagick-types",
"imagep", "indent-to", "indirect-function", "indirect-variable",
"init-image-library", "inotify-add-watch", "inotify-rm-watch",
"input-pending-p", "insert", "insert-and-inherit",
"insert-before-markers", "insert-before-markers-and-inherit",
"insert-buffer-substring", "insert-byte", "insert-char",
"insert-file-contents", "insert-startup-screen", "int86",
"integer-or-marker-p", "integerp", "interactive-form", "intern",
"intern-soft", "internal--track-mouse", "internal-char-font",
"internal-complete-buffer", "internal-copy-lisp-face",
"internal-default-process-filter",
"internal-default-process-sentinel", "internal-describe-syntax-value",
"internal-event-symbol-parse-modifiers",
"internal-face-x-get-resource", "internal-get-lisp-face-attribute",
"internal-lisp-face-attribute-values", "internal-lisp-face-empty-p",
"internal-lisp-face-equal-p", "internal-lisp-face-p",
"internal-make-lisp-face", "internal-make-var-non-special",
"internal-merge-in-global-face",
"internal-set-alternative-font-family-alist",
"internal-set-alternative-font-registry-alist",
"internal-set-font-selection-order",
"internal-set-lisp-face-attribute",
"internal-set-lisp-face-attribute-from-resource",
"internal-show-cursor", "internal-show-cursor-p", "interrupt-process",
"invisible-p", "invocation-directory", "invocation-name", "isnan",
"iso-charset", "key-binding", "key-description",
"keyboard-coding-system", "keymap-parent", "keymap-prompt", "keymapp",
"keywordp", "kill-all-local-variables", "kill-buffer", "kill-emacs",
"kill-local-variable", "kill-process", "last-nonminibuffer-frame",
"lax-plist-get", "lax-plist-put", "ldexp", "length",
"libxml-parse-html-region", "libxml-parse-xml-region",
"line-beginning-position", "line-end-position", "line-pixel-height",
"list", "list-fonts", "list-system-processes", "listp", "load",
"load-average", "local-key-binding", "local-variable-if-set-p",
"local-variable-p", "locale-info", "locate-file-internal",
"lock-buffer", "log", "logand", "logb", "logior", "lognot", "logxor",
"looking-at", "lookup-image", "lookup-image-map", "lookup-key",
"lower-frame", "lsh", "macroexpand", "make-bool-vector",
"make-byte-code", "make-category-set", "make-category-table",
"make-char", "make-char-table", "make-directory-internal",
"make-frame-invisible", "make-frame-visible", "make-hash-table",
"make-indirect-buffer", "make-keymap", "make-list",
"make-local-variable", "make-marker", "make-network-process",
"make-overlay", "make-serial-process", "make-sparse-keymap",
"make-string", "make-symbol", "make-symbolic-link", "make-temp-name",
"make-terminal-frame", "make-variable-buffer-local",
"make-variable-frame-local", "make-vector", "makunbound",
"map-char-table", "map-charset-chars", "map-keymap",
"map-keymap-internal", "mapatoms", "mapc", "mapcar", "mapconcat",
"maphash", "mark-marker", "marker-buffer", "marker-insertion-type",
"marker-position", "markerp", "match-beginning", "match-data",
"match-end", "matching-paren", "max", "max-char", "md5", "member",
"memory-info", "memory-limit", "memory-use-counts", "memq", "memql",
"menu-bar-menu-at-x-y", "menu-or-popup-active-p",
"menu-or-popup-active-p", "merge-face-attribute", "message",
"message-box", "message-or-box", "min",
"minibuffer-completion-contents", "minibuffer-contents",
"minibuffer-contents-no-properties", "minibuffer-depth",
"minibuffer-prompt", "minibuffer-prompt-end",
"minibuffer-selected-window", "minibuffer-window", "minibufferp",
"minor-mode-key-binding", "mod", "modify-category-entry",
"modify-frame-parameters", "modify-syntax-entry",
"mouse-pixel-position", "mouse-position", "move-overlay",
"move-point-visually", "move-to-column", "move-to-window-line",
"msdos-downcase-filename", "msdos-long-file-names", "msdos-memget",
"msdos-memput", "msdos-mouse-disable", "msdos-mouse-enable",
"msdos-mouse-init", "msdos-mouse-p", "msdos-remember-default-colors",
"msdos-set-keyboard", "msdos-set-mouse-buttons",
"multibyte-char-to-unibyte", "multibyte-string-p", "narrow-to-region",
"natnump", "nconc", "network-interface-info",
"network-interface-list", "new-fontset", "newline-cache-check",
"next-char-property-change", "next-frame", "next-overlay-change",
"next-property-change", "next-read-file-uses-dialog-p",
"next-single-char-property-change", "next-single-property-change",
"next-window", "nlistp", "nreverse", "nth", "nthcdr", "null",
"number-or-marker-p", "number-to-string", "numberp",
"open-dribble-file", "open-font", "open-termscript",
"optimize-char-table", "other-buffer", "other-window-for-scrolling",
"overlay-buffer", "overlay-end", "overlay-get", "overlay-lists",
"overlay-properties", "overlay-put", "overlay-recenter",
"overlay-start", "overlayp", "overlays-at", "overlays-in",
"parse-partial-sexp", "play-sound-internal", "plist-get",
"plist-member", "plist-put", "point", "point-marker", "point-max",
"point-max-marker", "point-min", "point-min-marker",
"pos-visible-in-window-p", "position-bytes", "posix-looking-at",
"posix-search-backward", "posix-search-forward", "posix-string-match",
"posn-at-point", "posn-at-x-y", "preceding-char",
"prefix-numeric-value", "previous-char-property-change",
"previous-frame", "previous-overlay-change",
"previous-property-change", "previous-single-char-property-change",
"previous-single-property-change", "previous-window", "prin1",
"prin1-to-string", "princ", "print", "process-attributes",
"process-buffer", "process-coding-system", "process-command",
"process-connection", "process-contact", "process-datagram-address",
"process-exit-status", "process-filter", "process-filter-multibyte-p",
"process-id", "process-inherit-coding-system-flag", "process-list",
"process-mark", "process-name", "process-plist",
"process-query-on-exit-flag", "process-running-child-p",
"process-send-eof", "process-send-region", "process-send-string",
"process-sentinel", "process-status", "process-tty-name",
"process-type", "processp", "profiler-cpu-log",
"profiler-cpu-running-p", "profiler-cpu-start", "profiler-cpu-stop",
"profiler-memory-log", "profiler-memory-running-p",
"profiler-memory-start", "profiler-memory-stop", "propertize",
"purecopy", "put", "put-text-property",
"put-unicode-property-internal", "puthash", "query-font",
"query-fontset", "quit-process", "raise-frame", "random", "rassoc",
"rassq", "re-search-backward", "re-search-forward", "read",
"read-buffer", "read-char", "read-char-exclusive",
"read-coding-system", "read-command", "read-event",
"read-from-minibuffer", "read-from-string", "read-function",
"read-key-sequence", "read-key-sequence-vector",
"read-no-blanks-input", "read-non-nil-coding-system", "read-string",
"read-variable", "recent-auto-save-p", "recent-doskeys",
"recent-keys", "recenter", "recursion-depth", "recursive-edit",
"redirect-debugging-output", "redirect-frame-focus", "redisplay",
"redraw-display", "redraw-frame", "regexp-quote", "region-beginning",
"region-end", "register-ccl-program", "register-code-conversion-map",
"remhash", "remove-list-of-text-properties", "remove-text-properties",
"rename-buffer", "rename-file", "replace-match",
"reset-this-command-lengths", "resize-mini-window-internal",
"restore-buffer-modified-p", "resume-tty", "reverse", "round",
"run-hook-with-args", "run-hook-with-args-until-failure",
"run-hook-with-args-until-success", "run-hook-wrapped", "run-hooks",
"run-window-configuration-change-hook", "run-window-scroll-functions",
"safe-length", "scan-lists", "scan-sexps", "scroll-down",
"scroll-left", "scroll-other-window", "scroll-right", "scroll-up",
"search-backward", "search-forward", "secure-hash", "select-frame",
"select-window", "selected-frame", "selected-window",
"self-insert-command", "send-string-to-terminal", "sequencep",
"serial-process-configure", "set", "set-buffer",
"set-buffer-auto-saved", "set-buffer-major-mode",
"set-buffer-modified-p", "set-buffer-multibyte", "set-case-table",
"set-category-table", "set-char-table-extra-slot",
"set-char-table-parent", "set-char-table-range", "set-charset-plist",
"set-charset-priority", "set-coding-system-priority",
"set-cursor-size", "set-default", "set-default-file-modes",
"set-default-toplevel-value", "set-file-acl", "set-file-modes",
"set-file-selinux-context", "set-file-times", "set-fontset-font",
"set-frame-height", "set-frame-position", "set-frame-selected-window",
"set-frame-size", "set-frame-width", "set-fringe-bitmap-face",
"set-input-interrupt-mode", "set-input-meta-mode", "set-input-mode",
"set-keyboard-coding-system-internal", "set-keymap-parent",
"set-marker", "set-marker-insertion-type", "set-match-data",
"set-message-beep", "set-minibuffer-window",
"set-mouse-pixel-position", "set-mouse-position",
"set-network-process-option", "set-output-flow-control",
"set-process-buffer", "set-process-coding-system",
"set-process-datagram-address", "set-process-filter",
"set-process-filter-multibyte",
"set-process-inherit-coding-system-flag", "set-process-plist",
"set-process-query-on-exit-flag", "set-process-sentinel",
"set-process-window-size", "set-quit-char",
"set-safe-terminal-coding-system-internal", "set-screen-color",
"set-standard-case-table", "set-syntax-table",
"set-terminal-coding-system-internal", "set-terminal-local-value",
"set-terminal-parameter", "set-text-properties", "set-time-zone-rule",
"set-visited-file-modtime", "set-window-buffer",
"set-window-combination-limit", "set-window-configuration",
"set-window-dedicated-p", "set-window-display-table",
"set-window-fringes", "set-window-hscroll", "set-window-margins",
"set-window-new-normal", "set-window-new-pixel",
"set-window-new-total", "set-window-next-buffers",
"set-window-parameter", "set-window-point", "set-window-prev-buffers",
"set-window-redisplay-end-trigger", "set-window-scroll-bars",
"set-window-start", "set-window-vscroll", "setcar", "setcdr",
"setplist", "show-face-resources", "signal", "signal-process", "sin",
"single-key-description", "skip-chars-backward", "skip-chars-forward",
"skip-syntax-backward", "skip-syntax-forward", "sleep-for", "sort",
"sort-charsets", "special-variable-p", "split-char",
"split-window-internal", "sqrt", "standard-case-table",
"standard-category-table", "standard-syntax-table", "start-kbd-macro",
"start-process", "stop-process", "store-kbd-macro-event", "string",
"string-as-multibyte", "string-as-unibyte", "string-bytes",
"string-collate-equalp", "string-collate-lessp", "string-equal",
"string-lessp", "string-make-multibyte", "string-make-unibyte",
"string-match", "string-to-char", "string-to-multibyte",
"string-to-number", "string-to-syntax", "string-to-unibyte",
"string-width", "stringp", "subr-name", "subrp",
"subst-char-in-region", "substitute-command-keys",
"substitute-in-file-name", "substring", "substring-no-properties",
"suspend-emacs", "suspend-tty", "suspicious-object", "sxhash",
"symbol-function", "symbol-name", "symbol-plist", "symbol-value",
"symbolp", "syntax-table", "syntax-table-p", "system-groups",
"system-move-file-to-trash", "system-name", "system-users", "tan",
"terminal-coding-system", "terminal-list", "terminal-live-p",
"terminal-local-value", "terminal-name", "terminal-parameter",
"terminal-parameters", "terpri", "test-completion",
"text-char-description", "text-properties-at", "text-property-any",
"text-property-not-all", "this-command-keys",
"this-command-keys-vector", "this-single-command-keys",
"this-single-command-raw-keys", "time-add", "time-less-p",
"time-subtract", "tool-bar-get-system-style", "tool-bar-height",
"tool-bar-pixel-width", "top-level", "trace-redisplay",
"trace-to-stderr", "translate-region-internal", "transpose-regions",
"truncate", "try-completion", "tty-display-color-cells",
"tty-display-color-p", "tty-no-underline",
"tty-suppress-bold-inverse-default-colors", "tty-top-frame",
"tty-type", "type-of", "undo-boundary", "unencodable-char-position",
"unhandled-file-name-directory", "unibyte-char-to-multibyte",
"unibyte-string", "unicode-property-table-internal", "unify-charset",
"unintern", "unix-sync", "unlock-buffer", "upcase", "upcase-initials",
"upcase-initials-region", "upcase-region", "upcase-word",
"use-global-map", "use-local-map", "user-full-name",
"user-login-name", "user-real-login-name", "user-real-uid",
"user-uid", "variable-binding-locus", "vconcat", "vector",
"vector-or-char-table-p", "vectorp", "verify-visited-file-modtime",
"vertical-motion", "visible-frame-list", "visited-file-modtime",
"w16-get-clipboard-data", "w16-selection-exists-p",
"w16-set-clipboard-data", "w32-battery-status",
"w32-default-color-map", "w32-define-rgb-color",
"w32-display-monitor-attributes-list", "w32-frame-menu-bar-size",
"w32-frame-rect", "w32-get-clipboard-data",
"w32-get-codepage-charset", "w32-get-console-codepage",
"w32-get-console-output-codepage", "w32-get-current-locale-id",
"w32-get-default-locale-id", "w32-get-keyboard-layout",
"w32-get-locale-info", "w32-get-valid-codepages",
"w32-get-valid-keyboard-layouts", "w32-get-valid-locale-ids",
"w32-has-winsock", "w32-long-file-name", "w32-reconstruct-hot-key",
"w32-register-hot-key", "w32-registered-hot-keys",
"w32-selection-exists-p", "w32-send-sys-command",
"w32-set-clipboard-data", "w32-set-console-codepage",
"w32-set-console-output-codepage", "w32-set-current-locale",
"w32-set-keyboard-layout", "w32-set-process-priority",
"w32-shell-execute", "w32-short-file-name", "w32-toggle-lock-key",
"w32-unload-winsock", "w32-unregister-hot-key", "w32-window-exists-p",
"w32notify-add-watch", "w32notify-rm-watch",
"waiting-for-user-input-p", "where-is-internal", "widen",
"widget-apply", "widget-get", "widget-put",
"window-absolute-pixel-edges", "window-at", "window-body-height",
"window-body-width", "window-bottom-divider-width", "window-buffer",
"window-combination-limit", "window-configuration-frame",
"window-configuration-p", "window-dedicated-p",
"window-display-table", "window-edges", "window-end", "window-frame",
"window-fringes", "window-header-line-height", "window-hscroll",
"window-inside-absolute-pixel-edges", "window-inside-edges",
"window-inside-pixel-edges", "window-left-child",
"window-left-column", "window-line-height", "window-list",
"window-list-1", "window-live-p", "window-margins",
"window-minibuffer-p", "window-mode-line-height", "window-new-normal",
"window-new-pixel", "window-new-total", "window-next-buffers",
"window-next-sibling", "window-normal-size", "window-old-point",
"window-parameter", "window-parameters", "window-parent",
"window-pixel-edges", "window-pixel-height", "window-pixel-left",
"window-pixel-top", "window-pixel-width", "window-point",
"window-prev-buffers", "window-prev-sibling",
"window-redisplay-end-trigger", "window-resize-apply",
"window-resize-apply-total", "window-right-divider-width",
"window-scroll-bar-height", "window-scroll-bar-width",
"window-scroll-bars", "window-start", "window-system",
"window-text-height", "window-text-pixel-size", "window-text-width",
"window-top-child", "window-top-line", "window-total-height",
"window-total-width", "window-use-time", "window-valid-p",
"window-vscroll", "windowp", "write-char", "write-region",
"x-backspace-delete-keys-p", "x-change-window-property",
"x-change-window-property", "x-close-connection",
"x-close-connection", "x-create-frame", "x-create-frame",
"x-delete-window-property", "x-delete-window-property",
"x-disown-selection-internal", "x-display-backing-store",
"x-display-backing-store", "x-display-color-cells",
"x-display-color-cells", "x-display-grayscale-p",
"x-display-grayscale-p", "x-display-list", "x-display-list",
"x-display-mm-height", "x-display-mm-height", "x-display-mm-width",
"x-display-mm-width", "x-display-monitor-attributes-list",
"x-display-pixel-height", "x-display-pixel-height",
"x-display-pixel-width", "x-display-pixel-width", "x-display-planes",
"x-display-planes", "x-display-save-under", "x-display-save-under",
"x-display-screens", "x-display-screens", "x-display-visual-class",
"x-display-visual-class", "x-family-fonts", "x-file-dialog",
"x-file-dialog", "x-file-dialog", "x-focus-frame", "x-frame-geometry",
"x-frame-geometry", "x-get-atom-name", "x-get-resource",
"x-get-selection-internal", "x-hide-tip", "x-hide-tip",
"x-list-fonts", "x-load-color-file", "x-menu-bar-open-internal",
"x-menu-bar-open-internal", "x-open-connection", "x-open-connection",
"x-own-selection-internal", "x-parse-geometry", "x-popup-dialog",
"x-popup-menu", "x-register-dnd-atom", "x-select-font",
"x-select-font", "x-selection-exists-p", "x-selection-owner-p",
"x-send-client-message", "x-server-max-request-size",
"x-server-max-request-size", "x-server-vendor", "x-server-vendor",
"x-server-version", "x-server-version", "x-show-tip", "x-show-tip",
"x-synchronize", "x-synchronize", "x-uses-old-gtk-dialog",
"x-window-property", "x-window-property", "x-wm-set-size-hint",
"xw-color-defined-p", "xw-color-defined-p", "xw-color-values",
"xw-color-values", "xw-display-color-p", "xw-display-color-p",
"yes-or-no-p", "zlib-available-p", "zlib-decompress-region",
"forward-point",
}
emacsBuiltinFunctionHighlighted = []string{
"defvaralias", "provide", "require",
"with-no-warnings", "define-widget", "with-electric-help",
"throw", "defalias", "featurep",
}
emacsLambdaListKeywords = []string{
"&allow-other-keys", "&aux", "&body", "&environment", "&key", "&optional",
"&rest", "&whole",
}
emacsErrorKeywords = []string{
"cl-assert", "cl-check-type", "error", "signal",
"user-error", "warn",
}
)
// EmacsLisp lexer.
var EmacsLisp = Register(TypeRemappingLexer(MustNewXMLLexer(
embedded,
"embedded/emacslisp.xml",
), TypeMapping{
{NameVariable, NameFunction, emacsBuiltinFunction},
{NameVariable, NameBuiltin, emacsSpecialForms},
{NameVariable, NameException, emacsErrorKeywords},
{NameVariable, NameBuiltin, append(emacsBuiltinFunctionHighlighted, emacsMacros...)},
{NameVariable, KeywordPseudo, emacsLambdaListKeywords},
}))

154
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/abap.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,154 @@
<lexer>
<config>
<name>ABAP</name>
<alias>abap</alias>
<filename>*.abap</filename>
<filename>*.ABAP</filename>
<mime_type>text/x-abap</mime_type>
<case_insensitive>true</case_insensitive>
</config>
<rules>
<state name="common">
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern="^\*.*$">
<token type="CommentSingle"/>
</rule>
<rule pattern="\&#34;.*?\n">
<token type="CommentSingle"/>
</rule>
<rule pattern="##\w+">
<token type="CommentSpecial"/>
</rule>
</state>
<state name="variable-names">
<rule pattern="&lt;\S+&gt;">
<token type="NameVariable"/>
</rule>
<rule pattern="\w[\w~]*(?:(\[\])|-&gt;\*)?">
<token type="NameVariable"/>
</rule>
</state>
<state name="root">
<rule>
<include state="common"/>
</rule>
<rule pattern="CALL\s+(?:BADI|CUSTOMER-FUNCTION|FUNCTION)">
<token type="Keyword"/>
</rule>
<rule pattern="(CALL\s+(?:DIALOG|SCREEN|SUBSCREEN|SELECTION-SCREEN|TRANSACTION|TRANSFORMATION))\b">
<token type="Keyword"/>
</rule>
<rule pattern="(FORM|PERFORM)(\s+)(\w+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="NameFunction"/>
</bygroups>
</rule>
<rule pattern="(PERFORM)(\s+)(\()(\w+)(\))">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="Punctuation"/>
<token type="NameVariable"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule pattern="(MODULE)(\s+)(\S+)(\s+)(INPUT|OUTPUT)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="NameFunction"/>
<token type="Text"/>
<token type="Keyword"/>
</bygroups>
</rule>
<rule pattern="(METHOD)(\s+)([\w~]+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="NameFunction"/>
</bygroups>
</rule>
<rule pattern="(\s+)([\w\-]+)([=\-]&gt;)([\w\-~]+)">
<bygroups>
<token type="Text"/>
<token type="NameVariable"/>
<token type="Operator"/>
<token type="NameFunction"/>
</bygroups>
</rule>
<rule pattern="(?&lt;=(=|-)&gt;)([\w\-~]+)(?=\()">
<token type="NameFunction"/>
</rule>
<rule pattern="(TEXT)(-)(\d{3})">
<bygroups>
<token type="Keyword"/>
<token type="Punctuation"/>
<token type="LiteralNumberInteger"/>
</bygroups>
</rule>
<rule pattern="(TEXT)(-)(\w{3})">
<bygroups>
<token type="Keyword"/>
<token type="Punctuation"/>
<token type="NameVariable"/>
</bygroups>
</rule>
<rule pattern="(ADD-CORRESPONDING|AUTHORITY-CHECK|CLASS-DATA|CLASS-EVENTS|CLASS-METHODS|CLASS-POOL|DELETE-ADJACENT|DIVIDE-CORRESPONDING|EDITOR-CALL|ENHANCEMENT-POINT|ENHANCEMENT-SECTION|EXIT-COMMAND|FIELD-GROUPS|FIELD-SYMBOLS|FUNCTION-POOL|INTERFACE-POOL|INVERTED-DATE|LOAD-OF-PROGRAM|LOG-POINT|MESSAGE-ID|MOVE-CORRESPONDING|MULTIPLY-CORRESPONDING|NEW-LINE|NEW-PAGE|NEW-SECTION|NO-EXTENSION|OUTPUT-LENGTH|PRINT-CONTROL|SELECT-OPTIONS|START-OF-SELECTION|SUBTRACT-CORRESPONDING|SYNTAX-CHECK|SYSTEM-EXCEPTIONS|TYPE-POOL|TYPE-POOLS|NO-DISPLAY)\b">
<token type="Keyword"/>
</rule>
<rule pattern="(?&lt;![-\&gt;])(CREATE\s+(PUBLIC|PRIVATE|DATA|OBJECT)|(PUBLIC|PRIVATE|PROTECTED)\s+SECTION|(TYPE|LIKE)\s+((LINE\s+OF|REF\s+TO|(SORTED|STANDARD|HASHED)\s+TABLE\s+OF))?|FROM\s+(DATABASE|MEMORY)|CALL\s+METHOD|(GROUP|ORDER) BY|HAVING|SEPARATED BY|GET\s+(BADI|BIT|CURSOR|DATASET|LOCALE|PARAMETER|PF-STATUS|(PROPERTY|REFERENCE)\s+OF|RUN\s+TIME|TIME\s+(STAMP)?)?|SET\s+(BIT|BLANK\s+LINES|COUNTRY|CURSOR|DATASET|EXTENDED\s+CHECK|HANDLER|HOLD\s+DATA|LANGUAGE|LEFT\s+SCROLL-BOUNDARY|LOCALE|MARGIN|PARAMETER|PF-STATUS|PROPERTY\s+OF|RUN\s+TIME\s+(ANALYZER|CLOCK\s+RESOLUTION)|SCREEN|TITLEBAR|UPADTE\s+TASK\s+LOCAL|USER-COMMAND)|CONVERT\s+((INVERTED-)?DATE|TIME|TIME\s+STAMP|TEXT)|(CLOSE|OPEN)\s+(DATASET|CURSOR)|(TO|FROM)\s+(DATA BUFFER|INTERNAL TABLE|MEMORY ID|DATABASE|SHARED\s+(MEMORY|BUFFER))|DESCRIBE\s+(DISTANCE\s+BETWEEN|FIELD|LIST|TABLE)|FREE\s(MEMORY|OBJECT)?|PROCESS\s+(BEFORE\s+OUTPUT|AFTER\s+INPUT|ON\s+(VALUE-REQUEST|HELP-REQUEST))|AT\s+(LINE-SELECTION|USER-COMMAND|END\s+OF|NEW)|AT\s+SELECTION-SCREEN(\s+(ON(\s+(BLOCK|(HELP|VALUE)-REQUEST\s+FOR|END\s+OF|RADIOBUTTON\s+GROUP))?|OUTPUT))?|SELECTION-SCREEN:?\s+((BEGIN|END)\s+OF\s+((TABBED\s+)?BLOCK|LINE|SCREEN)|COMMENT|FUNCTION\s+KEY|INCLUDE\s+BLOCKS|POSITION|PUSHBUTTON|SKIP|ULINE)|LEAVE\s+(LIST-PROCESSING|PROGRAM|SCREEN|TO LIST-PROCESSING|TO TRANSACTION)(ENDING|STARTING)\s+AT|FORMAT\s+(COLOR|INTENSIFIED|INVERSE|HOTSPOT|INPUT|FRAMES|RESET)|AS\s+(CHECKBOX|SUBSCREEN|WINDOW)|WITH\s+(((NON-)?UNIQUE)?\s+KEY|FRAME)|(BEGIN|END)\s+OF|DELETE(\s+ADJACENT\s+DUPLICATES\sFROM)?|COMPARING(\s+ALL\s+FIELDS)?|(INSERT|APPEND)(\s+INITIAL\s+LINE\s+(IN)?TO|\s+LINES\s+OF)?|IN\s+((BYTE|CHARACTER)\s+MODE|PROGRAM)|END-OF-(DEFINITION|PAGE|SELECTION)|WITH\s+FRAME(\s+TITLE)|(REPLACE|FIND)\s+((FIRST|ALL)\s+OCCURRENCES?\s+OF\s+)?(SUBSTRING|REGEX)?|MATCH\s+(LENGTH|COUNT|LINE|OFFSET)|(RESPECTING|IGNORING)\s+CASE|IN\s+UPDATE\s+TASK|(SOURCE|RESULT)\s+(XML)?|REFERENCE\s+INTO|AND\s+(MARK|RETURN)|CLIENT\s+SPECIFIED|CORRESPONDING\s+FIELDS\s+OF|IF\s+FOUND|FOR\s+EVENT|INHERITING\s+FROM|LEAVE\s+TO\s+SCREEN|LOOP\s+AT\s+(SCREEN)?|LOWER\s+CASE|MATCHCODE\s+OBJECT|MODIF\s+ID|MODIFY\s+SCREEN|NESTING\s+LEVEL|NO\s+INTERVALS|OF\s+STRUCTURE|RADIOBUTTON\s+GROUP|RANGE\s+OF|REF\s+TO|SUPPRESS DIALOG|TABLE\s+OF|UPPER\s+CASE|TRANSPORTING\s+NO\s+FIELDS|VALUE\s+CHECK|VISIBLE\s+LENGTH|HEADER\s+LINE|COMMON\s+PART)\b">
<token type="Keyword"/>
</rule>
<rule pattern="(^|(?&lt;=(\s|\.)))(ABBREVIATED|ABSTRACT|ADD|ALIASES|ALIGN|ALPHA|ASSERT|AS|ASSIGN(ING)?|AT(\s+FIRST)?|BACK|BLOCK|BREAK-POINT|CASE|CATCH|CHANGING|CHECK|CLASS|CLEAR|COLLECT|COLOR|COMMIT|CREATE|COMMUNICATION|COMPONENTS?|COMPUTE|CONCATENATE|CONDENSE|CONSTANTS|CONTEXTS|CONTINUE|CONTROLS|COUNTRY|CURRENCY|DATA|DATE|DECIMALS|DEFAULT|DEFINE|DEFINITION|DEFERRED|DEMAND|DETAIL|DIRECTORY|DIVIDE|DO|DUMMY|ELSE(IF)?|ENDAT|ENDCASE|ENDCATCH|ENDCLASS|ENDDO|ENDFORM|ENDFUNCTION|ENDIF|ENDINTERFACE|ENDLOOP|ENDMETHOD|ENDMODULE|ENDSELECT|ENDTRY|ENDWHILE|ENHANCEMENT|EVENTS|EXACT|EXCEPTIONS?|EXIT|EXPONENT|EXPORT|EXPORTING|EXTRACT|FETCH|FIELDS?|FOR|FORM|FORMAT|FREE|FROM|FUNCTION|HIDE|ID|IF|IMPORT|IMPLEMENTATION|IMPORTING|IN|INCLUDE|INCLUDING|INDEX|INFOTYPES|INITIALIZATION|INTERFACE|INTERFACES|INTO|LANGUAGE|LEAVE|LENGTH|LINES|LOAD|LOCAL|JOIN|KEY|NEXT|MAXIMUM|MESSAGE|METHOD[S]?|MINIMUM|MODULE|MODIFIER|MODIFY|MOVE|MULTIPLY|NODES|NUMBER|OBLIGATORY|OBJECT|OF|OFF|ON|OTHERS|OVERLAY|PACK|PAD|PARAMETERS|PERCENTAGE|POSITION|PROGRAM|PROVIDE|PUBLIC|PUT|PF\d\d|RAISE|RAISING|RANGES?|READ|RECEIVE|REDEFINITION|REFRESH|REJECT|REPORT|RESERVE|RESUME|RETRY|RETURN|RETURNING|RIGHT|ROLLBACK|REPLACE|SCROLL|SEARCH|SELECT|SHIFT|SIGN|SINGLE|SIZE|SKIP|SORT|SPLIT|STATICS|STOP|STYLE|SUBMATCHES|SUBMIT|SUBTRACT|SUM(?!\()|SUMMARY|SUMMING|SUPPLY|TABLE|TABLES|TIMESTAMP|TIMES?|TIMEZONE|TITLE|\??TO|TOP-OF-PAGE|TRANSFER|TRANSLATE|TRY|TYPES|ULINE|UNDER|UNPACK|UPDATE|USING|VALUE|VALUES|VIA|VARYING|VARY|WAIT|WHEN|WHERE|WIDTH|WHILE|WITH|WINDOW|WRITE|XSD|ZERO)\b">
<token type="Keyword"/>
</rule>
<rule pattern="(abs|acos|asin|atan|boolc|boolx|bit_set|char_off|charlen|ceil|cmax|cmin|condense|contains|contains_any_of|contains_any_not_of|concat_lines_of|cos|cosh|count|count_any_of|count_any_not_of|dbmaxlen|distance|escape|exp|find|find_end|find_any_of|find_any_not_of|floor|frac|from_mixed|insert|lines|log|log10|match|matches|nmax|nmin|numofchar|repeat|replace|rescale|reverse|round|segment|shift_left|shift_right|sign|sin|sinh|sqrt|strlen|substring|substring_after|substring_from|substring_before|substring_to|tan|tanh|to_upper|to_lower|to_mixed|translate|trunc|xstrlen)(\()\b">
<bygroups>
<token type="NameBuiltin"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule pattern="&amp;[0-9]">
<token type="Name"/>
</rule>
<rule pattern="[0-9]+">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="(?&lt;=(\s|.))(AND|OR|EQ|NE|GT|LT|GE|LE|CO|CN|CA|NA|CS|NOT|NS|CP|NP|BYTE-CO|BYTE-CN|BYTE-CA|BYTE-NA|BYTE-CS|BYTE-NS|IS\s+(NOT\s+)?(INITIAL|ASSIGNED|REQUESTED|BOUND))\b">
<token type="OperatorWord"/>
</rule>
<rule>
<include state="variable-names"/>
</rule>
<rule pattern="[?*&lt;&gt;=\-+&amp;]">
<token type="Operator"/>
</rule>
<rule pattern="&#39;(&#39;&#39;|[^&#39;])*&#39;">
<token type="LiteralStringSingle"/>
</rule>
<rule pattern="`([^`])*`">
<token type="LiteralStringSingle"/>
</rule>
<rule pattern="([|}])([^{}|]*?)([|{])">
<bygroups>
<token type="Punctuation"/>
<token type="LiteralStringSingle"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule pattern="[/;:()\[\],.]">
<token type="Punctuation"/>
</rule>
<rule pattern="(!)(\w+)">
<bygroups>
<token type="Operator"/>
<token type="Name"/>
</bygroups>
</rule>
</state>
</rules>
</lexer>

66
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/abnf.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,66 @@
<lexer>
<config>
<name>ABNF</name>
<alias>abnf</alias>
<filename>*.abnf</filename>
<mime_type>text/x-abnf</mime_type>
</config>
<rules>
<state name="root">
<rule pattern=";.*$">
<token type="CommentSingle"/>
</rule>
<rule pattern="(%[si])?&#34;[^&#34;]*&#34;">
<token type="Literal"/>
</rule>
<rule pattern="%b[01]+\-[01]+\b">
<token type="Literal"/>
</rule>
<rule pattern="%b[01]+(\.[01]+)*\b">
<token type="Literal"/>
</rule>
<rule pattern="%d[0-9]+\-[0-9]+\b">
<token type="Literal"/>
</rule>
<rule pattern="%d[0-9]+(\.[0-9]+)*\b">
<token type="Literal"/>
</rule>
<rule pattern="%x[0-9a-fA-F]+\-[0-9a-fA-F]+\b">
<token type="Literal"/>
</rule>
<rule pattern="%x[0-9a-fA-F]+(\.[0-9a-fA-F]+)*\b">
<token type="Literal"/>
</rule>
<rule pattern="\b[0-9]+\*[0-9]+">
<token type="Operator"/>
</rule>
<rule pattern="\b[0-9]+\*">
<token type="Operator"/>
</rule>
<rule pattern="\b[0-9]+">
<token type="Operator"/>
</rule>
<rule pattern="\*">
<token type="Operator"/>
</rule>
<rule pattern="(HEXDIG|DQUOTE|DIGIT|VCHAR|OCTET|ALPHA|CHAR|CRLF|HTAB|LWSP|BIT|CTL|WSP|LF|SP|CR)\b">
<token type="Keyword"/>
</rule>
<rule pattern="[a-zA-Z][a-zA-Z0-9-]+\b">
<token type="NameClass"/>
</rule>
<rule pattern="(=/|=|/)">
<token type="Operator"/>
</rule>
<rule pattern="[\[\]()]">
<token type="Punctuation"/>
</rule>
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern=".">
<token type="Text"/>
</rule>
</state>
</rules>
</lexer>

68
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/actionscript.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,68 @@
<lexer>
<config>
<name>ActionScript</name>
<alias>as</alias>
<alias>actionscript</alias>
<filename>*.as</filename>
<mime_type>application/x-actionscript</mime_type>
<mime_type>text/x-actionscript</mime_type>
<mime_type>text/actionscript</mime_type>
<dot_all>true</dot_all>
<not_multiline>true</not_multiline>
</config>
<rules>
<state name="root">
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern="//.*?\n">
<token type="CommentSingle"/>
</rule>
<rule pattern="/\*.*?\*/">
<token type="CommentMultiline"/>
</rule>
<rule pattern="/(\\\\|\\/|[^/\n])*/[gim]*">
<token type="LiteralStringRegex"/>
</rule>
<rule pattern="[~^*!%&amp;&lt;&gt;|+=:;,/?\\-]+">
<token type="Operator"/>
</rule>
<rule pattern="[{}\[\]();.]+">
<token type="Punctuation"/>
</rule>
<rule pattern="(instanceof|arguments|continue|default|typeof|switch|return|catch|break|while|throw|each|this|with|else|case|var|new|for|try|if|do|in)\b">
<token type="Keyword"/>
</rule>
<rule pattern="(implements|protected|namespace|interface|intrinsic|override|function|internal|private|package|extends|dynamic|import|native|return|public|static|class|const|super|final|get|set)\b">
<token type="KeywordDeclaration"/>
</rule>
<rule pattern="(true|false|null|NaN|Infinity|-Infinity|undefined|Void)\b">
<token type="KeywordConstant"/>
</rule>
<rule pattern="(IDynamicPropertyOutputIDynamicPropertyWriter|DisplacmentMapFilterMode|AccessibilityProperties|ContextMenuBuiltInItems|SharedObjectFlushStatus|DisplayObjectContainer|IllegalOperationError|DisplacmentMapFilter|InterpolationMethod|URLLoaderDataFormat|PrintJobOrientation|ActionScriptVersion|BitmapFilterQuality|GradientBevelFilter|GradientGlowFilter|DeleteObjectSample|StackOverflowError|SoundLoaderContext|ScriptTimeoutError|SecurityErrorEvent|InteractiveObject|StageDisplayState|FileReferenceList|TextFieldAutoSize|ApplicationDomain|BitmapDataChannel|ColorMatrixFilter|ExternalInterface|IMEConversionMode|DropShadowFilter|URLRequestHeader|ContextMenuEvent|ConvultionFilter|URLRequestMethod|BitmapFilterType|IEventDispatcher|ContextMenuItem|LocalConnection|InvalidSWFError|AsyncErrorEvent|MovieClipLoader|IBitmapDrawable|PrintJobOptions|EventDispatcher|NewObjectSample|HTTPStatusEvent|TextFormatAlign|IExternalizable|FullScreenEvent|DefinitionError|TextLineMetrics|NetStatusEvent|ColorTransform|ObjectEncoding|SecurityDomain|StageScaleMode|FocusDirection|ReferenceError|SoundTransform|KeyboardEvent|DisplayObject|PixelSnapping|LoaderContext|NetConnection|SecurityPanel|SecurityError|FileReference|AsBroadcaster|LineScaleMode|AntiAliasType|Accessibility|TextFieldType|URLVariabeles|ActivityEvent|ProgressEvent|TextColorType|StageQuality|TextSnapshot|Capabilities|BitmapFilter|SpreadMethod|GradientType|TextRenderer|SoundChannel|SharedObject|IOErrorEvent|SimpleButton|ContextMenu|InvokeEvent|CSMSettings|SyntaxError|StatusEvent|KeyLocation|IDataOutput|VerifyError|XMLDocument|XMLNodeType|MemoryError|GridFitType|BevelFilter|ErrorEvent|FrameLabel|GlowFilter|LoaderInfo|Microphone|MorphShape|BlurFilter|MouseEvent|FocusEvent|SoundMixer|FileFilter|TimerEvent|JointStyle|EventPhase|StageAlign|Dictionary|URLRequest|StyleSheet|SWFVersion|IDataInput|StaticText|RangeError|BitmapData|TextFormat|StackFrame|Namespace|SyncEvent|Rectangle|URLLoader|TypeError|Responder|NetStream|BlendMode|CapsStyle|DataEvent|ByteArray|MovieClip|Transform|TextField|Selection|AVM1Movie|XMLSocket|URLStream|FontStyle|EvalError|FontType|LoadVars|Graphics|Security|IMEEvent|URIError|Keyboard|Function|EOFError|PrintJob|IOError|XMLList|Boolean|ID3Info|XMLNode|Bitmap|String|RegExp|Sample|Object|Sprite|System|Endian|Matrix|Camera|Locale|Number|Loader|Socket|QName|Class|Timer|Sound|Shape|XMLUI|Mouse|Scene|Stage|Color|Point|Video|Error|Event|Proxy|Array|Date|uint|Math|Font|int|Key|IME|XML)\b">
<token type="NameBuiltin"/>
</rule>
<rule pattern="(decodeURIComponent|updateAfterEvent|clearInterval|setInterval|getVersion|parseFloat|fscommand|isXMLName|encodeURI|decodeURI|getTimer|unescape|isFinite|parseInt|getURL|escape|trace|isNaN|eval)\b">
<token type="NameFunction"/>
</rule>
<rule pattern="[$a-zA-Z_]\w*">
<token type="NameOther"/>
</rule>
<rule pattern="[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="0x[0-9a-f]+">
<token type="LiteralNumberHex"/>
</rule>
<rule pattern="[0-9]+">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="&#34;(\\\\|\\&#34;|[^&#34;])*&#34;">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="&#39;(\\\\|\\&#39;|[^&#39;])*&#39;">
<token type="LiteralStringSingle"/>
</rule>
</state>
</rules>
</lexer>

163
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/actionscript_3.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,163 @@
<lexer>
<config>
<name>ActionScript 3</name>
<alias>as3</alias>
<alias>actionscript3</alias>
<filename>*.as</filename>
<mime_type>application/x-actionscript3</mime_type>
<mime_type>text/x-actionscript3</mime_type>
<mime_type>text/actionscript3</mime_type>
<dot_all>true</dot_all>
</config>
<rules>
<state name="funcparams">
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern="(\s*)(\.\.\.)?([$a-zA-Z_]\w*)(\s*)(:)(\s*)([$a-zA-Z_]\w*(?:\.&lt;\w+&gt;)?|\*)(\s*)">
<bygroups>
<token type="Text"/>
<token type="Punctuation"/>
<token type="Name"/>
<token type="Text"/>
<token type="Operator"/>
<token type="Text"/>
<token type="KeywordType"/>
<token type="Text"/>
</bygroups>
<push state="defval"/>
</rule>
<rule pattern="\)">
<token type="Operator"/>
<push state="type"/>
</rule>
</state>
<state name="type">
<rule pattern="(\s*)(:)(\s*)([$a-zA-Z_]\w*(?:\.&lt;\w+&gt;)?|\*)">
<bygroups>
<token type="Text"/>
<token type="Operator"/>
<token type="Text"/>
<token type="KeywordType"/>
</bygroups>
<pop depth="2"/>
</rule>
<rule pattern="\s+">
<token type="Text"/>
<pop depth="2"/>
</rule>
<rule>
<pop depth="2"/>
</rule>
</state>
<state name="defval">
<rule pattern="(=)(\s*)([^(),]+)(\s*)(,?)">
<bygroups>
<token type="Operator"/>
<token type="Text"/>
<usingself state="root"/>
<token type="Text"/>
<token type="Operator"/>
</bygroups>
<pop depth="1"/>
</rule>
<rule pattern=",">
<token type="Operator"/>
<pop depth="1"/>
</rule>
<rule>
<pop depth="1"/>
</rule>
</state>
<state name="root">
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern="(function\s+)([$a-zA-Z_]\w*)(\s*)(\()">
<bygroups>
<token type="KeywordDeclaration"/>
<token type="NameFunction"/>
<token type="Text"/>
<token type="Operator"/>
</bygroups>
<push state="funcparams"/>
</rule>
<rule pattern="(var|const)(\s+)([$a-zA-Z_]\w*)(\s*)(:)(\s*)([$a-zA-Z_]\w*(?:\.&lt;\w+&gt;)?)">
<bygroups>
<token type="KeywordDeclaration"/>
<token type="Text"/>
<token type="Name"/>
<token type="Text"/>
<token type="Punctuation"/>
<token type="Text"/>
<token type="KeywordType"/>
</bygroups>
</rule>
<rule pattern="(import|package)(\s+)((?:[$a-zA-Z_]\w*|\.)+)(\s*)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="NameNamespace"/>
<token type="Text"/>
</bygroups>
</rule>
<rule pattern="(new)(\s+)([$a-zA-Z_]\w*(?:\.&lt;\w+&gt;)?)(\s*)(\()">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="KeywordType"/>
<token type="Text"/>
<token type="Operator"/>
</bygroups>
</rule>
<rule pattern="//.*?\n">
<token type="CommentSingle"/>
</rule>
<rule pattern="/\*.*?\*/">
<token type="CommentMultiline"/>
</rule>
<rule pattern="/(\\\\|\\/|[^\n])*/[gisx]*">
<token type="LiteralStringRegex"/>
</rule>
<rule pattern="(\.)([$a-zA-Z_]\w*)">
<bygroups>
<token type="Operator"/>
<token type="NameAttribute"/>
</bygroups>
</rule>
<rule pattern="(case|default|for|each|in|while|do|break|return|continue|if|else|throw|try|catch|with|new|typeof|arguments|instanceof|this|switch|import|include|as|is)\b">
<token type="Keyword"/>
</rule>
<rule pattern="(class|public|final|internal|native|override|private|protected|static|import|extends|implements|interface|intrinsic|return|super|dynamic|function|const|get|namespace|package|set)\b">
<token type="KeywordDeclaration"/>
</rule>
<rule pattern="(true|false|null|NaN|Infinity|-Infinity|undefined|void)\b">
<token type="KeywordConstant"/>
</rule>
<rule pattern="(decodeURI|decodeURIComponent|encodeURI|escape|eval|isFinite|isNaN|isXMLName|clearInterval|fscommand|getTimer|getURL|getVersion|isFinite|parseFloat|parseInt|setInterval|trace|updateAfterEvent|unescape)\b">
<token type="NameFunction"/>
</rule>
<rule pattern="[$a-zA-Z_]\w*">
<token type="Name"/>
</rule>
<rule pattern="[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="0x[0-9a-f]+">
<token type="LiteralNumberHex"/>
</rule>
<rule pattern="[0-9]+">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="&#34;(\\\\|\\&#34;|[^&#34;])*&#34;">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="&#39;(\\\\|\\&#39;|[^&#39;])*&#39;">
<token type="LiteralStringSingle"/>
</rule>
<rule pattern="[~^*!%&amp;&lt;&gt;|+=:;,/?\\{}\[\]().-]+">
<token type="Operator"/>
</rule>
</state>
</rules>
</lexer>

321
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ada.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,321 @@
<lexer>
<config>
<name>Ada</name>
<alias>ada</alias>
<alias>ada95</alias>
<alias>ada2005</alias>
<filename>*.adb</filename>
<filename>*.ads</filename>
<filename>*.ada</filename>
<mime_type>text/x-ada</mime_type>
<case_insensitive>true</case_insensitive>
</config>
<rules>
<state name="end">
<rule pattern="(if|case|record|loop|select)">
<token type="KeywordReserved"/>
</rule>
<rule pattern="&#34;[^&#34;]+&#34;|[\w.]+">
<token type="NameFunction"/>
</rule>
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern=";">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
</state>
<state name="array_def">
<rule pattern=";">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
<rule pattern="(\w+)(\s+)(range)">
<bygroups>
<token type="KeywordType"/>
<token type="Text"/>
<token type="KeywordReserved"/>
</bygroups>
</rule>
<rule>
<include state="root"/>
</rule>
</state>
<state name="package_instantiation">
<rule pattern="(&#34;[^&#34;]+&#34;|\w+)(\s+)(=&gt;)">
<bygroups>
<token type="NameVariable"/>
<token type="Text"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule pattern="[\w.\&#39;&#34;]">
<token type="Text"/>
</rule>
<rule pattern="\)">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
<rule>
<include state="root"/>
</rule>
</state>
<state name="subprogram">
<rule pattern="\(">
<token type="Punctuation"/>
<push state="#pop" state="formal_part"/>
</rule>
<rule pattern=";">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
<rule pattern="is\b">
<token type="KeywordReserved"/>
<pop depth="1"/>
</rule>
<rule pattern="&#34;[^&#34;]+&#34;|\w+">
<token type="NameFunction"/>
</rule>
<rule>
<include state="root"/>
</rule>
</state>
<state name="type_def">
<rule pattern=";">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
<rule pattern="\(">
<token type="Punctuation"/>
<push state="formal_part"/>
</rule>
<rule pattern="with|and|use">
<token type="KeywordReserved"/>
</rule>
<rule pattern="array\b">
<token type="KeywordReserved"/>
<push state="#pop" state="array_def"/>
</rule>
<rule pattern="record\b">
<token type="KeywordReserved"/>
<push state="record_def"/>
</rule>
<rule pattern="(null record)(;)">
<bygroups>
<token type="KeywordReserved"/>
<token type="Punctuation"/>
</bygroups>
<pop depth="1"/>
</rule>
<rule>
<include state="root"/>
</rule>
</state>
<state name="import">
<rule pattern="[\w.]+">
<token type="NameNamespace"/>
<pop depth="1"/>
</rule>
<rule>
<pop depth="1"/>
</rule>
</state>
<state name="formal_part">
<rule pattern="\)">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
<rule pattern="\w+">
<token type="NameVariable"/>
</rule>
<rule pattern=",|:[^=]">
<token type="Punctuation"/>
</rule>
<rule pattern="(in|not|null|out|access)\b">
<token type="KeywordReserved"/>
</rule>
<rule>
<include state="root"/>
</rule>
</state>
<state name="package">
<rule pattern="body">
<token type="KeywordDeclaration"/>
</rule>
<rule pattern="is\s+new|renames">
<token type="KeywordReserved"/>
</rule>
<rule pattern="is">
<token type="KeywordReserved"/>
<pop depth="1"/>
</rule>
<rule pattern=";">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
<rule pattern="\(">
<token type="Punctuation"/>
<push state="package_instantiation"/>
</rule>
<rule pattern="([\w.]+)">
<token type="NameClass"/>
</rule>
<rule>
<include state="root"/>
</rule>
</state>
<state name="attribute">
<rule pattern="(&#39;)(\w+)">
<bygroups>
<token type="Punctuation"/>
<token type="NameAttribute"/>
</bygroups>
</rule>
</state>
<state name="record_def">
<rule pattern="end record">
<token type="KeywordReserved"/>
<pop depth="1"/>
</rule>
<rule>
<include state="root"/>
</rule>
</state>
<state name="root">
<rule pattern="[^\S\n]+">
<token type="Text"/>
</rule>
<rule pattern="--.*?\n">
<token type="CommentSingle"/>
</rule>
<rule pattern="[^\S\n]+">
<token type="Text"/>
</rule>
<rule pattern="function|procedure|entry">
<token type="KeywordDeclaration"/>
<push state="subprogram"/>
</rule>
<rule pattern="(subtype|type)(\s+)(\w+)">
<bygroups>
<token type="KeywordDeclaration"/>
<token type="Text"/>
<token type="KeywordType"/>
</bygroups>
<push state="type_def"/>
</rule>
<rule pattern="task|protected">
<token type="KeywordDeclaration"/>
</rule>
<rule pattern="(subtype)(\s+)">
<bygroups>
<token type="KeywordDeclaration"/>
<token type="Text"/>
</bygroups>
</rule>
<rule pattern="(end)(\s+)">
<bygroups>
<token type="KeywordReserved"/>
<token type="Text"/>
</bygroups>
<push state="end"/>
</rule>
<rule pattern="(pragma)(\s+)(\w+)">
<bygroups>
<token type="KeywordReserved"/>
<token type="Text"/>
<token type="CommentPreproc"/>
</bygroups>
</rule>
<rule pattern="(true|false|null)\b">
<token type="KeywordConstant"/>
</rule>
<rule pattern="(Short_Short_Integer|Short_Short_Float|Long_Long_Integer|Long_Long_Float|Wide_Character|Reference_Type|Short_Integer|Long_Integer|Wide_String|Short_Float|Controlled|Long_Float|Character|Generator|File_Type|File_Mode|Positive|Duration|Boolean|Natural|Integer|Address|Cursor|String|Count|Float|Byte)\b">
<token type="KeywordType"/>
</rule>
<rule pattern="(and(\s+then)?|in|mod|not|or(\s+else)|rem)\b">
<token type="OperatorWord"/>
</rule>
<rule pattern="generic|private">
<token type="KeywordDeclaration"/>
</rule>
<rule pattern="package">
<token type="KeywordDeclaration"/>
<push state="package"/>
</rule>
<rule pattern="array\b">
<token type="KeywordReserved"/>
<push state="array_def"/>
</rule>
<rule pattern="(with|use)(\s+)">
<bygroups>
<token type="KeywordNamespace"/>
<token type="Text"/>
</bygroups>
<push state="import"/>
</rule>
<rule pattern="(\w+)(\s*)(:)(\s*)(constant)">
<bygroups>
<token type="NameConstant"/>
<token type="Text"/>
<token type="Punctuation"/>
<token type="Text"/>
<token type="KeywordReserved"/>
</bygroups>
</rule>
<rule pattern="&lt;&lt;\w+&gt;&gt;">
<token type="NameLabel"/>
</rule>
<rule pattern="(\w+)(\s*)(:)(\s*)(declare|begin|loop|for|while)">
<bygroups>
<token type="NameLabel"/>
<token type="Text"/>
<token type="Punctuation"/>
<token type="Text"/>
<token type="KeywordReserved"/>
</bygroups>
</rule>
<rule pattern="\b(synchronized|overriding|terminate|interface|exception|protected|separate|constant|abstract|renames|reverse|subtype|aliased|declare|requeue|limited|return|tagged|access|record|select|accept|digits|others|pragma|entry|elsif|delta|delay|array|until|range|raise|while|begin|abort|else|loop|when|type|null|then|body|task|goto|case|exit|end|for|abs|xor|all|new|out|is|of|if|or|do|at)\b">
<token type="KeywordReserved"/>
</rule>
<rule pattern="&#34;[^&#34;]*&#34;">
<token type="LiteralString"/>
</rule>
<rule>
<include state="attribute"/>
</rule>
<rule>
<include state="numbers"/>
</rule>
<rule pattern="&#39;[^&#39;]&#39;">
<token type="LiteralStringChar"/>
</rule>
<rule pattern="(\w+)(\s*|[(,])">
<bygroups>
<token type="Name"/>
<usingself state="root"/>
</bygroups>
</rule>
<rule pattern="(&lt;&gt;|=&gt;|:=|[()|:;,.&#39;])">
<token type="Punctuation"/>
</rule>
<rule pattern="[*&lt;&gt;+=/&amp;-]">
<token type="Operator"/>
</rule>
<rule pattern="\n+">
<token type="Text"/>
</rule>
</state>
<state name="numbers">
<rule pattern="[0-9_]+#[0-9a-f]+#">
<token type="LiteralNumberHex"/>
</rule>
<rule pattern="[0-9_]+\.[0-9_]*">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="[0-9_]+">
<token type="LiteralNumberInteger"/>
</rule>
</state>
</rules>
</lexer>

75
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/al.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,75 @@
<lexer>
<config>
<name>AL</name>
<alias>al</alias>
<filename>*.al</filename>
<filename>*.dal</filename>
<mime_type>text/x-al</mime_type>
<case_insensitive>true</case_insensitive>
<dot_all>true</dot_all>
</config>
<rules>
<state name="root">
<rule pattern="\s+">
<token type="TextWhitespace"/>
</rule>
<rule pattern="(?s)\/\*.*?\\*\*\/">
<token type="CommentMultiline"/>
</rule>
<rule pattern="(?s)//.*?\n">
<token type="CommentSingle"/>
</rule>
<rule pattern="\&#34;([^\&#34;])*\&#34;">
<token type="Text"/>
</rule>
<rule pattern="&#39;([^&#39;])*&#39;">
<token type="LiteralString"/>
</rule>
<rule pattern="\b(?i:(ARRAY|ASSERTERROR|BEGIN|BREAK|CASE|DO|DOWNTO|ELSE|END|EVENT|EXIT|FOR|FOREACH|FUNCTION|IF|IMPLEMENTS|IN|INDATASET|INTERFACE|INTERNAL|LOCAL|OF|PROCEDURE|PROGRAM|PROTECTED|REPEAT|RUNONCLIENT|SECURITYFILTERING|SUPPRESSDISPOSE|TEMPORARY|THEN|TO|TRIGGER|UNTIL|VAR|WHILE|WITH|WITHEVENTS))\b">
<token type="Keyword"/>
</rule>
<rule pattern="\b(?i:(AND|DIV|MOD|NOT|OR|XOR))\b">
<token type="OperatorWord"/>
</rule>
<rule pattern="\b(?i:(AVERAGE|CONST|COUNT|EXIST|FIELD|FILTER|LOOKUP|MAX|MIN|ORDER|SORTING|SUM|TABLEDATA|UPPERLIMIT|WHERE|ASCENDING|DESCENDING))\b">
<token type="Keyword"/>
</rule>
<rule pattern="\b(?i:(CODEUNIT|PAGE|PAGEEXTENSION|PAGECUSTOMIZATION|DOTNET|ENUM|ENUMEXTENSION|VALUE|QUERY|REPORT|TABLE|TABLEEXTENSION|XMLPORT|PROFILE|CONTROLADDIN|REPORTEXTENSION|INTERFACE|PERMISSIONSET|PERMISSIONSETEXTENSION|ENTITLEMENT))\b">
<token type="Keyword"/>
</rule>
<rule pattern="\b(?i:(Action|Array|Automation|BigInteger|BigText|Blob|Boolean|Byte|Char|ClientType|Code|Codeunit|CompletionTriggerErrorLevel|ConnectionType|Database|DataClassification|DataScope|Date|DateFormula|DateTime|Decimal|DefaultLayout|Dialog|Dictionary|DotNet|DotNetAssembly|DotNetTypeDeclaration|Duration|Enum|ErrorInfo|ErrorType|ExecutionContext|ExecutionMode|FieldClass|FieldRef|FieldType|File|FilterPageBuilder|Guid|InStream|Integer|Joker|KeyRef|List|ModuleDependencyInfo|ModuleInfo|None|Notification|NotificationScope|ObjectType|Option|OutStream|Page|PageResult|Query|Record|RecordId|RecordRef|Report|ReportFormat|SecurityFilter|SecurityFiltering|Table|TableConnectionType|TableFilter|TestAction|TestField|TestFilterField|TestPage|TestPermissions|TestRequestPage|Text|TextBuilder|TextConst|TextEncoding|Time|TransactionModel|TransactionType|Variant|Verbosity|Version|XmlPort|HttpContent|HttpHeaders|HttpClient|HttpRequestMessage|HttpResponseMessage|JsonToken|JsonValue|JsonArray|JsonObject|View|Views|XmlAttribute|XmlAttributeCollection|XmlComment|XmlCData|XmlDeclaration|XmlDocument|XmlDocumentType|XmlElement|XmlNamespaceManager|XmlNameTable|XmlNode|XmlNodeList|XmlProcessingInstruction|XmlReadOptions|XmlText|XmlWriteOptions|WebServiceActionContext|WebServiceActionResultCode|SessionSettings))\b">
<token type="Keyword"/>
</rule>
<rule pattern="\b([&lt;&gt;]=|&lt;&gt;|&lt;|&gt;)\b?">
<token type="Operator"/>
</rule>
<rule pattern="\b(\-|\+|\/|\*)\b">
<token type="Operator"/>
</rule>
<rule pattern="\s*(\:=|\+=|-=|\/=|\*=)\s*?">
<token type="Operator"/>
</rule>
<rule pattern="\b(?i:(ADD|ADDFIRST|ADDLAST|ADDAFTER|ADDBEFORE|ACTION|ACTIONS|AREA|ASSEMBLY|CHARTPART|CUEGROUP|CUSTOMIZES|COLUMN|DATAITEM|DATASET|ELEMENTS|EXTENDS|FIELD|FIELDGROUP|FIELDATTRIBUTE|FIELDELEMENT|FIELDGROUPS|FIELDS|FILTER|FIXED|GRID|GROUP|MOVEAFTER|MOVEBEFORE|KEY|KEYS|LABEL|LABELS|LAYOUT|MODIFY|MOVEFIRST|MOVELAST|MOVEBEFORE|MOVEAFTER|PART|REPEATER|USERCONTROL|REQUESTPAGE|SCHEMA|SEPARATOR|SYSTEMPART|TABLEELEMENT|TEXTATTRIBUTE|TEXTELEMENT|TYPE))\b">
<token type="Keyword"/>
</rule>
<rule pattern="\s*[(\.\.)&amp;\|]\s*">
<token type="Operator"/>
</rule>
<rule pattern="\b((0(x|X)[0-9a-fA-F]*)|(([0-9]+\.?[0-9]*)|(\.[0-9]+))((e|E)(\+|-)?[0-9]+)?)(L|l|UL|ul|u|U|F|f|ll|LL|ull|ULL)?\b">
<token type="LiteralNumber"/>
</rule>
<rule pattern="[;:,]">
<token type="Punctuation"/>
</rule>
<rule pattern="#[ \t]*(if|else|elif|endif|define|undef|region|endregion|pragma)\b.*?\n">
<token type="CommentPreproc"/>
</rule>
<rule pattern="\w+">
<token type="Text"/>
</rule>
<rule pattern=".">
<token type="Text"/>
</rule>
</state>
</rules>
</lexer>

108
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/angular2.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,108 @@
<lexer>
<config>
<name>Angular2</name>
<alias>ng2</alias>
</config>
<rules>
<state name="attr">
<rule pattern="&#34;.*?&#34;">
<token type="LiteralString"/>
<pop depth="1"/>
</rule>
<rule pattern="&#39;.*?&#39;">
<token type="LiteralString"/>
<pop depth="1"/>
</rule>
<rule pattern="[^\s&gt;]+">
<token type="LiteralString"/>
<pop depth="1"/>
</rule>
</state>
<state name="root">
<rule pattern="[^{([*#]+">
<token type="Other"/>
</rule>
<rule pattern="(\{\{)(\s*)">
<bygroups>
<token type="CommentPreproc"/>
<token type="Text"/>
</bygroups>
<push state="ngExpression"/>
</rule>
<rule pattern="([([]+)([\w:.-]+)([\])]+)(\s*)(=)(\s*)">
<bygroups>
<token type="Punctuation"/>
<token type="NameAttribute"/>
<token type="Punctuation"/>
<token type="Text"/>
<token type="Operator"/>
<token type="Text"/>
</bygroups>
<push state="attr"/>
</rule>
<rule pattern="([([]+)([\w:.-]+)([\])]+)(\s*)">
<bygroups>
<token type="Punctuation"/>
<token type="NameAttribute"/>
<token type="Punctuation"/>
<token type="Text"/>
</bygroups>
</rule>
<rule pattern="([*#])([\w:.-]+)(\s*)(=)(\s*)">
<bygroups>
<token type="Punctuation"/>
<token type="NameAttribute"/>
<token type="Punctuation"/>
<token type="Operator"/>
</bygroups>
<push state="attr"/>
</rule>
<rule pattern="([*#])([\w:.-]+)(\s*)">
<bygroups>
<token type="Punctuation"/>
<token type="NameAttribute"/>
<token type="Punctuation"/>
</bygroups>
</rule>
</state>
<state name="ngExpression">
<rule pattern="\s+(\|\s+)?">
<token type="Text"/>
</rule>
<rule pattern="\}\}">
<token type="CommentPreproc"/>
<pop depth="1"/>
</rule>
<rule pattern=":?(true|false)">
<token type="LiteralStringBoolean"/>
</rule>
<rule pattern=":?&#34;(\\\\|\\&#34;|[^&#34;])*&#34;">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern=":?&#39;(\\\\|\\&#39;|[^&#39;])*&#39;">
<token type="LiteralStringSingle"/>
</rule>
<rule pattern="[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?">
<token type="LiteralNumber"/>
</rule>
<rule pattern="[a-zA-Z][\w-]*(\(.*\))?">
<token type="NameVariable"/>
</rule>
<rule pattern="\.[\w-]+(\(.*\))?">
<token type="NameVariable"/>
</rule>
<rule pattern="(\?)(\s*)([^}\s]+)(\s*)(:)(\s*)([^}\s]+)(\s*)">
<bygroups>
<token type="Operator"/>
<token type="Text"/>
<token type="LiteralString"/>
<token type="Text"/>
<token type="Operator"/>
<token type="Text"/>
<token type="LiteralString"/>
<token type="Text"/>
</bygroups>
</rule>
</state>
</rules>
</lexer>

317
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/antlr.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,317 @@
<lexer>
<config>
<name>ANTLR</name>
<alias>antlr</alias>
</config>
<rules>
<state name="nested-arg-action">
<rule pattern="([^$\[\]\&#39;&#34;/]+|&#34;(\\\\|\\&#34;|[^&#34;])*&#34;|&#39;(\\\\|\\&#39;|[^&#39;])*&#39;|//.*$\n?|/\*(.|\n)*?\*/|/(?!\*)(\\\\|\\/|[^/])*/|/)+">
<token type="Other"/>
</rule>
<rule pattern="\[">
<token type="Punctuation"/>
<push/>
</rule>
<rule pattern="\]">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
<rule pattern="(\$[a-zA-Z]+)(\.?)(text|value)?">
<bygroups>
<token type="NameVariable"/>
<token type="Punctuation"/>
<token type="NameProperty"/>
</bygroups>
</rule>
<rule pattern="(\\\\|\\\]|\\\[|[^\[\]])+">
<token type="Other"/>
</rule>
</state>
<state name="exception">
<rule pattern="\n">
<token type="TextWhitespace"/>
<pop depth="1"/>
</rule>
<rule pattern="\s">
<token type="TextWhitespace"/>
</rule>
<rule>
<include state="comments"/>
</rule>
<rule pattern="\[">
<token type="Punctuation"/>
<push state="nested-arg-action"/>
</rule>
<rule pattern="\{">
<token type="Punctuation"/>
<push state="action"/>
</rule>
</state>
<state name="whitespace">
<rule pattern="\s+">
<token type="TextWhitespace"/>
</rule>
</state>
<state name="root">
<rule>
<include state="whitespace"/>
</rule>
<rule>
<include state="comments"/>
</rule>
<rule pattern="(lexer|parser|tree)?(\s*)(grammar\b)(\s*)([A-Za-z]\w*)(;)">
<bygroups>
<token type="Keyword"/>
<token type="TextWhitespace"/>
<token type="Keyword"/>
<token type="TextWhitespace"/>
<token type="NameClass"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule pattern="options\b">
<token type="Keyword"/>
<push state="options"/>
</rule>
<rule pattern="tokens\b">
<token type="Keyword"/>
<push state="tokens"/>
</rule>
<rule pattern="(scope)(\s*)([A-Za-z]\w*)(\s*)(\{)">
<bygroups>
<token type="Keyword"/>
<token type="TextWhitespace"/>
<token type="NameVariable"/>
<token type="TextWhitespace"/>
<token type="Punctuation"/>
</bygroups>
<push state="action"/>
</rule>
<rule pattern="(catch|finally)\b">
<token type="Keyword"/>
<push state="exception"/>
</rule>
<rule pattern="(@[A-Za-z]\w*)(\s*)(::)?(\s*)([A-Za-z]\w*)(\s*)(\{)">
<bygroups>
<token type="NameLabel"/>
<token type="TextWhitespace"/>
<token type="Punctuation"/>
<token type="TextWhitespace"/>
<token type="NameLabel"/>
<token type="TextWhitespace"/>
<token type="Punctuation"/>
</bygroups>
<push state="action"/>
</rule>
<rule pattern="((?:protected|private|public|fragment)\b)?(\s*)([A-Za-z]\w*)(!)?">
<bygroups>
<token type="Keyword"/>
<token type="TextWhitespace"/>
<token type="NameLabel"/>
<token type="Punctuation"/>
</bygroups>
<push state="rule-alts" state="rule-prelims"/>
</rule>
</state>
<state name="tokens">
<rule>
<include state="whitespace"/>
</rule>
<rule>
<include state="comments"/>
</rule>
<rule pattern="\{">
<token type="Punctuation"/>
</rule>
<rule pattern="([A-Z]\w*)(\s*)(=)?(\s*)(\&#39;(?:\\\\|\\\&#39;|[^\&#39;]*)\&#39;)?(\s*)(;)">
<bygroups>
<token type="NameLabel"/>
<token type="TextWhitespace"/>
<token type="Punctuation"/>
<token type="TextWhitespace"/>
<token type="LiteralString"/>
<token type="TextWhitespace"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule pattern="\}">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
</state>
<state name="options">
<rule>
<include state="whitespace"/>
</rule>
<rule>
<include state="comments"/>
</rule>
<rule pattern="\{">
<token type="Punctuation"/>
</rule>
<rule pattern="([A-Za-z]\w*)(\s*)(=)(\s*)([A-Za-z]\w*|\&#39;(?:\\\\|\\\&#39;|[^\&#39;]*)\&#39;|[0-9]+|\*)(\s*)(;)">
<bygroups>
<token type="NameVariable"/>
<token type="TextWhitespace"/>
<token type="Punctuation"/>
<token type="TextWhitespace"/>
<token type="Text"/>
<token type="TextWhitespace"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule pattern="\}">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
</state>
<state name="rule-alts">
<rule>
<include state="whitespace"/>
</rule>
<rule>
<include state="comments"/>
</rule>
<rule pattern="options\b">
<token type="Keyword"/>
<push state="options"/>
</rule>
<rule pattern=":">
<token type="Punctuation"/>
</rule>
<rule pattern="&#39;(\\\\|\\&#39;|[^&#39;])*&#39;">
<token type="LiteralString"/>
</rule>
<rule pattern="&#34;(\\\\|\\&#34;|[^&#34;])*&#34;">
<token type="LiteralString"/>
</rule>
<rule pattern="&lt;&lt;([^&gt;]|&gt;[^&gt;])&gt;&gt;">
<token type="LiteralString"/>
</rule>
<rule pattern="\$?[A-Z_]\w*">
<token type="NameConstant"/>
</rule>
<rule pattern="\$?[a-z_]\w*">
<token type="NameVariable"/>
</rule>
<rule pattern="(\+|\||-&gt;|=&gt;|=|\(|\)|\.\.|\.|\?|\*|\^|!|\#|~)">
<token type="Operator"/>
</rule>
<rule pattern=",">
<token type="Punctuation"/>
</rule>
<rule pattern="\[">
<token type="Punctuation"/>
<push state="nested-arg-action"/>
</rule>
<rule pattern="\{">
<token type="Punctuation"/>
<push state="action"/>
</rule>
<rule pattern=";">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
</state>
<state name="rule-prelims">
<rule>
<include state="whitespace"/>
</rule>
<rule>
<include state="comments"/>
</rule>
<rule pattern="returns\b">
<token type="Keyword"/>
</rule>
<rule pattern="\[">
<token type="Punctuation"/>
<push state="nested-arg-action"/>
</rule>
<rule pattern="\{">
<token type="Punctuation"/>
<push state="action"/>
</rule>
<rule pattern="(throws)(\s+)([A-Za-z]\w*)">
<bygroups>
<token type="Keyword"/>
<token type="TextWhitespace"/>
<token type="NameLabel"/>
</bygroups>
</rule>
<rule pattern="(,)(\s*)([A-Za-z]\w*)">
<bygroups>
<token type="Punctuation"/>
<token type="TextWhitespace"/>
<token type="NameLabel"/>
</bygroups>
</rule>
<rule pattern="options\b">
<token type="Keyword"/>
<push state="options"/>
</rule>
<rule pattern="(scope)(\s+)(\{)">
<bygroups>
<token type="Keyword"/>
<token type="TextWhitespace"/>
<token type="Punctuation"/>
</bygroups>
<push state="action"/>
</rule>
<rule pattern="(scope)(\s+)([A-Za-z]\w*)(\s*)(;)">
<bygroups>
<token type="Keyword"/>
<token type="TextWhitespace"/>
<token type="NameLabel"/>
<token type="TextWhitespace"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule pattern="(@[A-Za-z]\w*)(\s*)(\{)">
<bygroups>
<token type="NameLabel"/>
<token type="TextWhitespace"/>
<token type="Punctuation"/>
</bygroups>
<push state="action"/>
</rule>
<rule pattern=":">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
</state>
<state name="action">
<rule pattern="([^${}\&#39;&#34;/\\]+|&#34;(\\\\|\\&#34;|[^&#34;])*&#34;|&#39;(\\\\|\\&#39;|[^&#39;])*&#39;|//.*$\n?|/\*(.|\n)*?\*/|/(?!\*)(\\\\|\\/|[^/])*/|\\(?!%)|/)+">
<token type="Other"/>
</rule>
<rule pattern="(\\)(%)">
<bygroups>
<token type="Punctuation"/>
<token type="Other"/>
</bygroups>
</rule>
<rule pattern="(\$[a-zA-Z]+)(\.?)(text|value)?">
<bygroups>
<token type="NameVariable"/>
<token type="Punctuation"/>
<token type="NameProperty"/>
</bygroups>
</rule>
<rule pattern="\{">
<token type="Punctuation"/>
<push/>
</rule>
<rule pattern="\}">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
</state>
<state name="comments">
<rule pattern="//.*$">
<token type="Comment"/>
</rule>
<rule pattern="/\*(.|\n)*?\*/">
<token type="Comment"/>
</rule>
</state>
</rules>
</lexer>

74
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/apacheconf.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,74 @@
<lexer>
<config>
<name>ApacheConf</name>
<alias>apacheconf</alias>
<alias>aconf</alias>
<alias>apache</alias>
<filename>.htaccess</filename>
<filename>apache.conf</filename>
<filename>apache2.conf</filename>
<mime_type>text/x-apacheconf</mime_type>
<case_insensitive>true</case_insensitive>
</config>
<rules>
<state name="root">
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern="(#.*?)$">
<token type="Comment"/>
</rule>
<rule pattern="(&lt;[^\s&gt;]+)(?:(\s+)(.*?))?(&gt;)">
<bygroups>
<token type="NameTag"/>
<token type="Text"/>
<token type="LiteralString"/>
<token type="NameTag"/>
</bygroups>
</rule>
<rule pattern="([a-z]\w*)(\s+)">
<bygroups>
<token type="NameBuiltin"/>
<token type="Text"/>
</bygroups>
<push state="value"/>
</rule>
<rule pattern="\.+">
<token type="Text"/>
</rule>
</state>
<state name="value">
<rule pattern="\\\n">
<token type="Text"/>
</rule>
<rule pattern="$">
<token type="Text"/>
<pop depth="1"/>
</rule>
<rule pattern="\\">
<token type="Text"/>
</rule>
<rule pattern="[^\S\n]+">
<token type="Text"/>
</rule>
<rule pattern="\d+\.\d+\.\d+\.\d+(?:/\d+)?">
<token type="LiteralNumber"/>
</rule>
<rule pattern="\d+">
<token type="LiteralNumber"/>
</rule>
<rule pattern="/([a-z0-9][\w./-]+)">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="(on|off|none|any|all|double|email|dns|min|minimal|os|productonly|full|emerg|alert|crit|error|warn|notice|info|debug|registry|script|inetd|standalone|user|group)\b">
<token type="Keyword"/>
</rule>
<rule pattern="&#34;([^&#34;\\]*(?:\\.[^&#34;\\]*)*)&#34;">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="[^\s&#34;\\]+">
<token type="Text"/>
</rule>
</state>
</rules>
</lexer>

59
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/apl.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,59 @@
<lexer>
<config>
<name>APL</name>
<alias>apl</alias>
<filename>*.apl</filename>
</config>
<rules>
<state name="root">
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern="[⍝#].*$">
<token type="CommentSingle"/>
</rule>
<rule pattern="\&#39;((\&#39;\&#39;)|[^\&#39;])*\&#39;">
<token type="LiteralStringSingle"/>
</rule>
<rule pattern="&#34;((&#34;&#34;)|[^&#34;])*&#34;">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="[⋄◇()]">
<token type="Punctuation"/>
</rule>
<rule pattern="[\[\];]">
<token type="LiteralStringRegex"/>
</rule>
<rule pattern="⎕[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*">
<token type="NameFunction"/>
</rule>
<rule pattern="[A-Za-zΔ∆⍙_][A-Za-zΔ∆⍙_¯0-9]*">
<token type="NameVariable"/>
</rule>
<rule pattern="¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞)([Jj]¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞))?">
<token type="LiteralNumber"/>
</rule>
<rule pattern="[\.\\/⌿⍀¨⍣⍨⍠⍤∘⍥@⌺⌶⍢]">
<token type="NameAttribute"/>
</rule>
<rule pattern="[+\-×÷⌈⌊∣|?*⍟○!⌹&lt;≤=&gt;≥≠≡≢∊⍷∪∩~∨∧⍱⍲⍴,⍪⌽⊖⍉↑↓⊂⊃⌷⍋⍒⊤⊥⍕⍎⊣⊢⍁⍂≈⌸⍯↗⊆⍸]">
<token type="Operator"/>
</rule>
<rule pattern="⍬">
<token type="NameConstant"/>
</rule>
<rule pattern="[⎕⍞]">
<token type="NameVariableGlobal"/>
</rule>
<rule pattern="[←→]">
<token type="KeywordDeclaration"/>
</rule>
<rule pattern="[⍺⍵⍶⍹∇:]">
<token type="NameBuiltinPseudo"/>
</rule>
<rule pattern="[{}]">
<token type="KeywordType"/>
</rule>
</state>
</rules>
</lexer>

130
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/applescript.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,130 @@
<lexer>
<config>
<name>AppleScript</name>
<alias>applescript</alias>
<filename>*.applescript</filename>
<dot_all>true</dot_all>
</config>
<rules>
<state name="root">
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern="¬\n">
<token type="LiteralStringEscape"/>
</rule>
<rule pattern="&#39;s\s+">
<token type="Text"/>
</rule>
<rule pattern="(--|#).*?$">
<token type="Comment"/>
</rule>
<rule pattern="\(\*">
<token type="CommentMultiline"/>
<push state="comment"/>
</rule>
<rule pattern="[(){}!,.:]">
<token type="Punctuation"/>
</rule>
<rule pattern="(«)([^»]+)(»)">
<bygroups>
<token type="Text"/>
<token type="NameBuiltin"/>
<token type="Text"/>
</bygroups>
</rule>
<rule pattern="\b((?:considering|ignoring)\s*)(application responses|case|diacriticals|hyphens|numeric strings|punctuation|white space)">
<bygroups>
<token type="Keyword"/>
<token type="NameBuiltin"/>
</bygroups>
</rule>
<rule pattern="(-|\*|\+|&amp;|≠|&gt;=?|&lt;=?|=|≥|≤|/|÷|\^)">
<token type="Operator"/>
</rule>
<rule pattern="\b(and|or|is equal|equals|(is )?equal to|is not|isn&#39;t|isn&#39;t equal( to)?|is not equal( to)?|doesn&#39;t equal|does not equal|(is )?greater than|comes after|is not less than or equal( to)?|isn&#39;t less than or equal( to)?|(is )?less than|comes before|is not greater than or equal( to)?|isn&#39;t greater than or equal( to)?|(is )?greater than or equal( to)?|is not less than|isn&#39;t less than|does not come before|doesn&#39;t come before|(is )?less than or equal( to)?|is not greater than|isn&#39;t greater than|does not come after|doesn&#39;t come after|starts? with|begins? with|ends? with|contains?|does not contain|doesn&#39;t contain|is in|is contained by|is not in|is not contained by|isn&#39;t contained by|div|mod|not|(a )?(ref( to)?|reference to)|is|does)\b">
<token type="OperatorWord"/>
</rule>
<rule pattern="^(\s*(?:on|end)\s+)(zoomed|write to file|will zoom|will show|will select tab view item|will resize( sub views)?|will resign active|will quit|will pop up|will open|will move|will miniaturize|will hide|will finish launching|will display outline cell|will display item cell|will display cell|will display browser cell|will dismiss|will close|will become active|was miniaturized|was hidden|update toolbar item|update parameters|update menu item|shown|should zoom|should selection change|should select tab view item|should select row|should select item|should select column|should quit( after last window closed)?|should open( untitled)?|should expand item|should end editing|should collapse item|should close|should begin editing|selection changing|selection changed|selected tab view item|scroll wheel|rows changed|right mouse up|right mouse dragged|right mouse down|resized( sub views)?|resigned main|resigned key|resigned active|read from file|prepare table drop|prepare table drag|prepare outline drop|prepare outline drag|prepare drop|plugin loaded|parameters updated|panel ended|opened|open untitled|number of rows|number of items|number of browser rows|moved|mouse up|mouse moved|mouse exited|mouse entered|mouse dragged|mouse down|miniaturized|load data representation|launched|keyboard up|keyboard down|items changed|item value changed|item value|item expandable|idle|exposed|end editing|drop|drag( (entered|exited|updated))?|double clicked|document nib name|dialog ended|deminiaturized|data representation|conclude drop|column resized|column moved|column clicked|closed|clicked toolbar item|clicked|choose menu item|child of item|changed|change item value|change cell value|cell value changed|cell value|bounds changed|begin editing|became main|became key|awake from nib|alert ended|activated|action|accept table drop|accept outline drop)">
<bygroups>
<token type="Keyword"/>
<token type="NameFunction"/>
</bygroups>
</rule>
<rule pattern="^(\s*)(in|on|script|to)(\s+)">
<bygroups>
<token type="Text"/>
<token type="Keyword"/>
<token type="Text"/>
</bygroups>
</rule>
<rule pattern="\b(as )(alias |application |boolean |class |constant |date |file |integer |list |number |POSIX file |real |record |reference |RGB color |script |text |unit types|(?:Unicode )?text|string)\b">
<bygroups>
<token type="Keyword"/>
<token type="NameClass"/>
</bygroups>
</rule>
<rule pattern="\b(AppleScript|current application|false|linefeed|missing value|pi|quote|result|return|space|tab|text item delimiters|true|version)\b">
<token type="NameConstant"/>
</rule>
<rule pattern="\b(ASCII (character|number)|activate|beep|choose URL|choose application|choose color|choose file( name)?|choose folder|choose from list|choose remote application|clipboard info|close( access)?|copy|count|current date|delay|delete|display (alert|dialog)|do shell script|duplicate|exists|get eof|get volume settings|info for|launch|list (disks|folder)|load script|log|make|mount volume|new|offset|open( (for access|location))?|path to|print|quit|random number|read|round|run( script)?|say|scripting components|set (eof|the clipboard to|volume)|store script|summarize|system attribute|system info|the clipboard|time to GMT|write|quoted form)\b">
<token type="NameBuiltin"/>
</rule>
<rule pattern="\b(considering|else|error|exit|from|if|ignoring|in|repeat|tell|then|times|to|try|until|using terms from|while|with|with timeout( of)?|with transaction|by|continue|end|its?|me|my|return|of|as)\b">
<token type="Keyword"/>
</rule>
<rule pattern="\b(global|local|prop(erty)?|set|get)\b">
<token type="Keyword"/>
</rule>
<rule pattern="\b(but|put|returning|the)\b">
<token type="NameBuiltin"/>
</rule>
<rule pattern="\b(attachment|attribute run|character|day|month|paragraph|word|year)s?\b">
<token type="NameBuiltin"/>
</rule>
<rule pattern="\b(about|above|against|apart from|around|aside from|at|below|beneath|beside|between|for|given|instead of|on|onto|out of|over|since)\b">
<token type="NameBuiltin"/>
</rule>
<rule pattern="\b(accepts arrow key|action method|active|alignment|allowed identifiers|allows branch selection|allows column reordering|allows column resizing|allows column selection|allows customization|allows editing text attributes|allows empty selection|allows mixed state|allows multiple selection|allows reordering|allows undo|alpha( value)?|alternate image|alternate increment value|alternate title|animation delay|associated file name|associated object|auto completes|auto display|auto enables items|auto repeat|auto resizes( outline column)?|auto save expanded items|auto save name|auto save table columns|auto saves configuration|auto scroll|auto sizes all columns to fit|auto sizes cells|background color|bezel state|bezel style|bezeled|border rect|border type|bordered|bounds( rotation)?|box type|button returned|button type|can choose directories|can choose files|can draw|can hide|cell( (background color|size|type))?|characters|class|click count|clicked( data)? column|clicked data item|clicked( data)? row|closeable|collating|color( (mode|panel))|command key down|configuration|content(s| (size|view( margins)?))?|context|continuous|control key down|control size|control tint|control view|controller visible|coordinate system|copies( on scroll)?|corner view|current cell|current column|current( field)? editor|current( menu)? item|current row|current tab view item|data source|default identifiers|delta (x|y|z)|destination window|directory|display mode|displayed cell|document( (edited|rect|view))?|double value|dragged column|dragged distance|dragged items|draws( cell)? background|draws grid|dynamically scrolls|echos bullets|edge|editable|edited( data)? column|edited data item|edited( data)? row|enabled|enclosing scroll view|ending page|error handling|event number|event type|excluded from windows menu|executable path|expanded|fax number|field editor|file kind|file name|file type|first responder|first visible column|flipped|floating|font( panel)?|formatter|frameworks path|frontmost|gave up|grid color|has data items|has horizontal ruler|has horizontal scroller|has parent data item|has resize indicator|has shadow|has sub menu|has vertical ruler|has vertical scroller|header cell|header view|hidden|hides when deactivated|highlights by|horizontal line scroll|horizontal page scroll|horizontal ruler view|horizontally resizable|icon image|id|identifier|ignores multiple clicks|image( (alignment|dims when disabled|frame style|scaling))?|imports graphics|increment value|indentation per level|indeterminate|index|integer value|intercell spacing|item height|key( (code|equivalent( modifier)?|window))?|knob thickness|label|last( visible)? column|leading offset|leaf|level|line scroll|loaded|localized sort|location|loop mode|main( (bunde|menu|window))?|marker follows cell|matrix mode|maximum( content)? size|maximum visible columns|menu( form representation)?|miniaturizable|miniaturized|minimized image|minimized title|minimum column width|minimum( content)? size|modal|modified|mouse down state|movie( (controller|file|rect))?|muted|name|needs display|next state|next text|number of tick marks|only tick mark values|opaque|open panel|option key down|outline table column|page scroll|pages across|pages down|palette label|pane splitter|parent data item|parent window|pasteboard|path( (names|separator))?|playing|plays every frame|plays selection only|position|preferred edge|preferred type|pressure|previous text|prompt|properties|prototype cell|pulls down|rate|released when closed|repeated|requested print time|required file type|resizable|resized column|resource path|returns records|reuses columns|rich text|roll over|row height|rulers visible|save panel|scripts path|scrollable|selectable( identifiers)?|selected cell|selected( data)? columns?|selected data items?|selected( data)? rows?|selected item identifier|selection by rect|send action on arrow key|sends action when done editing|separates columns|separator item|sequence number|services menu|shared frameworks path|shared support path|sheet|shift key down|shows alpha|shows state by|size( mode)?|smart insert delete enabled|sort case sensitivity|sort column|sort order|sort type|sorted( data rows)?|sound|source( mask)?|spell checking enabled|starting page|state|string value|sub menu|super menu|super view|tab key traverses cells|tab state|tab type|tab view|table view|tag|target( printer)?|text color|text container insert|text container origin|text returned|tick mark position|time stamp|title(d| (cell|font|height|position|rect))?|tool tip|toolbar|trailing offset|transparent|treat packages as directories|truncated labels|types|unmodified characters|update views|use sort indicator|user defaults|uses data source|uses ruler|uses threaded animation|uses title from previous column|value wraps|version|vertical( (line scroll|page scroll|ruler view))?|vertically resizable|view|visible( document rect)?|volume|width|window|windows menu|wraps|zoomable|zoomed)\b">
<token type="NameAttribute"/>
</rule>
<rule pattern="\b(action cell|alert reply|application|box|browser( cell)?|bundle|button( cell)?|cell|clip view|color well|color-panel|combo box( item)?|control|data( (cell|column|item|row|source))?|default entry|dialog reply|document|drag info|drawer|event|font(-panel)?|formatter|image( (cell|view))?|matrix|menu( item)?|item|movie( view)?|open-panel|outline view|panel|pasteboard|plugin|popup button|progress indicator|responder|save-panel|scroll view|secure text field( cell)?|slider|sound|split view|stepper|tab view( item)?|table( (column|header cell|header view|view))|text( (field( cell)?|view))?|toolbar( item)?|user-defaults|view|window)s?\b">
<token type="NameBuiltin"/>
</rule>
<rule pattern="\b(animate|append|call method|center|close drawer|close panel|display|display alert|display dialog|display panel|go|hide|highlight|increment|item for|load image|load movie|load nib|load panel|load sound|localized string|lock focus|log|open drawer|path for|pause|perform action|play|register|resume|scroll|select( all)?|show|size to fit|start|step back|step forward|stop|synchronize|unlock focus|update)\b">
<token type="NameBuiltin"/>
</rule>
<rule pattern="\b((in )?back of|(in )?front of|[0-9]+(st|nd|rd|th)|first|second|third|fourth|fifth|sixth|seventh|eighth|ninth|tenth|after|back|before|behind|every|front|index|last|middle|some|that|through|thru|where|whose)\b">
<token type="NameBuiltin"/>
</rule>
<rule pattern="&#34;(\\\\|\\&#34;|[^&#34;])*&#34;">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="\b([a-zA-Z]\w*)\b">
<token type="NameVariable"/>
</rule>
<rule pattern="[-+]?(\d+\.\d*|\d*\.\d+)(E[-+][0-9]+)?">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="[-+]?\d+">
<token type="LiteralNumberInteger"/>
</rule>
</state>
<state name="comment">
<rule pattern="\(\*">
<token type="CommentMultiline"/>
<push/>
</rule>
<rule pattern="\*\)">
<token type="CommentMultiline"/>
<pop depth="1"/>
</rule>
<rule pattern="[^*(]+">
<token type="CommentMultiline"/>
</rule>
<rule pattern="[*(]">
<token type="CommentMultiline"/>
</rule>
</state>
</rules>
</lexer>

309
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/arduino.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,309 @@
<lexer>
<config>
<name>Arduino</name>
<alias>arduino</alias>
<filename>*.ino</filename>
<mime_type>text/x-arduino</mime_type>
<ensure_nl>true</ensure_nl>
</config>
<rules>
<state name="whitespace">
<rule pattern="^#if\s+0">
<token type="CommentPreproc"/>
<push state="if0"/>
</rule>
<rule pattern="^#">
<token type="CommentPreproc"/>
<push state="macro"/>
</rule>
<rule pattern="^(\s*(?:/[*].*?[*]/\s*)?)(#if\s+0)">
<bygroups>
<usingself state="root"/>
<token type="CommentPreproc"/>
</bygroups>
<push state="if0"/>
</rule>
<rule pattern="^(\s*(?:/[*].*?[*]/\s*)?)(#)">
<bygroups>
<usingself state="root"/>
<token type="CommentPreproc"/>
</bygroups>
<push state="macro"/>
</rule>
<rule pattern="\n">
<token type="Text"/>
</rule>
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern="\\\n">
<token type="Text"/>
</rule>
<rule pattern="//(\n|[\w\W]*?[^\\]\n)">
<token type="CommentSingle"/>
</rule>
<rule pattern="/(\\\n)?[*][\w\W]*?[*](\\\n)?/">
<token type="CommentMultiline"/>
</rule>
<rule pattern="/(\\\n)?[*][\w\W]*">
<token type="CommentMultiline"/>
</rule>
</state>
<state name="string">
<rule pattern="&#34;">
<token type="LiteralString"/>
<pop depth="1"/>
</rule>
<rule pattern="\\([\\abfnrtv&#34;\&#39;]|x[a-fA-F0-9]{2,4}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})">
<token type="LiteralStringEscape"/>
</rule>
<rule pattern="[^\\&#34;\n]+">
<token type="LiteralString"/>
</rule>
<rule pattern="\\\n">
<token type="LiteralString"/>
</rule>
<rule pattern="\\">
<token type="LiteralString"/>
</rule>
</state>
<state name="macro">
<rule pattern="(include)(\s*(?:/[*].*?[*]/\s*)?)([^\n]+)">
<bygroups>
<token type="CommentPreproc"/>
<token type="Text"/>
<token type="CommentPreprocFile"/>
</bygroups>
</rule>
<rule pattern="[^/\n]+">
<token type="CommentPreproc"/>
</rule>
<rule pattern="/[*](.|\n)*?[*]/">
<token type="CommentMultiline"/>
</rule>
<rule pattern="//.*?\n">
<token type="CommentSingle"/>
<pop depth="1"/>
</rule>
<rule pattern="/">
<token type="CommentPreproc"/>
</rule>
<rule pattern="(?&lt;=\\)\n">
<token type="CommentPreproc"/>
</rule>
<rule pattern="\n">
<token type="CommentPreproc"/>
<pop depth="1"/>
</rule>
</state>
<state name="statements">
<rule pattern="(reinterpret_cast|static_assert|dynamic_cast|thread_local|static_cast|const_cast|protected|constexpr|namespace|restrict|noexcept|override|operator|typename|template|explicit|decltype|nullptr|private|alignof|virtual|mutable|alignas|typeid|friend|throws|export|public|delete|final|using|throw|catch|this|try|new)\b">
<token type="Keyword"/>
</rule>
<rule pattern="char(16_t|32_t)\b">
<token type="KeywordType"/>
</rule>
<rule pattern="(class)\b">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
</bygroups>
<push state="classname"/>
</rule>
<rule pattern="(R)(&#34;)([^\\()\s]{,16})(\()((?:.|\n)*?)(\)\3)(&#34;)">
<bygroups>
<token type="LiteralStringAffix"/>
<token type="LiteralString"/>
<token type="LiteralStringDelimiter"/>
<token type="LiteralStringDelimiter"/>
<token type="LiteralString"/>
<token type="LiteralStringDelimiter"/>
<token type="LiteralString"/>
</bygroups>
</rule>
<rule pattern="(u8|u|U)(&#34;)">
<bygroups>
<token type="LiteralStringAffix"/>
<token type="LiteralString"/>
</bygroups>
<push state="string"/>
</rule>
<rule pattern="(L?)(&#34;)">
<bygroups>
<token type="LiteralStringAffix"/>
<token type="LiteralString"/>
</bygroups>
<push state="string"/>
</rule>
<rule pattern="(L?)(&#39;)(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\&#39;\n])(&#39;)">
<bygroups>
<token type="LiteralStringAffix"/>
<token type="LiteralStringChar"/>
<token type="LiteralStringChar"/>
<token type="LiteralStringChar"/>
</bygroups>
</rule>
<rule pattern="(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="(\d+\.\d*|\.\d+|\d+[fF])[fF]?">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="0x[0-9a-fA-F]+[LlUu]*">
<token type="LiteralNumberHex"/>
</rule>
<rule pattern="0[0-7]+[LlUu]*">
<token type="LiteralNumberOct"/>
</rule>
<rule pattern="\d+[LlUu]*">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="\*/">
<token type="Error"/>
</rule>
<rule pattern="[~!%^&amp;*+=|?:&lt;&gt;/-]">
<token type="Operator"/>
</rule>
<rule pattern="[()\[\],.]">
<token type="Punctuation"/>
</rule>
<rule pattern="(restricted|volatile|continue|register|default|typedef|struct|extern|switch|sizeof|static|return|union|while|const|break|goto|enum|else|case|auto|for|asm|if|do)\b">
<token type="Keyword"/>
</rule>
<rule pattern="(_Bool|_Complex|_Imaginary|array|atomic_bool|atomic_char|atomic_int|atomic_llong|atomic_long|atomic_schar|atomic_short|atomic_uchar|atomic_uint|atomic_ullong|atomic_ulong|atomic_ushort|auto|bool|boolean|BooleanVariables|Byte|byte|Char|char|char16_t|char32_t|class|complex|Const|const|const_cast|delete|double|dynamic_cast|enum|explicit|extern|Float|float|friend|inline|Int|int|int16_t|int32_t|int64_t|int8_t|Long|long|new|NULL|null|operator|private|PROGMEM|protected|public|register|reinterpret_cast|short|signed|sizeof|Static|static|static_cast|String|struct|typedef|uint16_t|uint32_t|uint64_t|uint8_t|union|unsigned|virtual|Void|void|Volatile|volatile|word)\b">
<token type="KeywordType"/>
</rule>
<rule pattern="(and|final|If|Loop|loop|not|or|override|setup|Setup|throw|try|xor)\b">
<token type="Keyword"/>
</rule>
<rule pattern="(ANALOG_MESSAGE|BIN|CHANGE|DEC|DEFAULT|DIGITAL_MESSAGE|EXTERNAL|FALLING|FIRMATA_STRING|HALF_PI|HEX|HIGH|INPUT|INPUT_PULLUP|INTERNAL|INTERNAL1V1|INTERNAL1V1|INTERNAL2V56|INTERNAL2V56|LED_BUILTIN|LED_BUILTIN_RX|LED_BUILTIN_TX|LOW|LSBFIRST|MSBFIRST|OCT|OUTPUT|PI|REPORT_ANALOG|REPORT_DIGITAL|RISING|SET_PIN_MODE|SYSEX_START|SYSTEM_RESET|TWO_PI)\b">
<token type="KeywordConstant"/>
</rule>
<rule pattern="(boolean|const|byte|word|string|String|array)\b">
<token type="NameVariable"/>
</rule>
<rule pattern="(Keyboard|KeyboardController|MouseController|SoftwareSerial|EthernetServer|EthernetClient|LiquidCrystal|RobotControl|GSMVoiceCall|EthernetUDP|EsploraTFT|HttpClient|RobotMotor|WiFiClient|GSMScanner|FileSystem|Scheduler|GSMServer|YunClient|YunServer|IPAddress|GSMClient|GSMModem|Keyboard|Ethernet|Console|GSMBand|Esplora|Stepper|Process|WiFiUDP|GSM_SMS|Mailbox|USBHost|Firmata|PImage|Client|Server|GSMPIN|FileIO|Bridge|Serial|EEPROM|Stream|Mouse|Audio|Servo|File|Task|GPRS|WiFi|Wire|TFT|GSM|SPI|SD)\b">
<token type="NameClass"/>
</rule>
<rule pattern="(abs|Abs|accept|ACos|acos|acosf|addParameter|analogRead|AnalogRead|analogReadResolution|AnalogReadResolution|analogReference|AnalogReference|analogWrite|AnalogWrite|analogWriteResolution|AnalogWriteResolution|answerCall|asin|ASin|asinf|atan|ATan|atan2|ATan2|atan2f|atanf|attach|attached|attachGPRS|attachInterrupt|AttachInterrupt|autoscroll|available|availableForWrite|background|beep|begin|beginPacket|beginSD|beginSMS|beginSpeaker|beginTFT|beginTransmission|beginWrite|bit|Bit|BitClear|bitClear|bitRead|BitRead|bitSet|BitSet|BitWrite|bitWrite|blink|blinkVersion|BSSID|buffer|byte|cbrt|cbrtf|Ceil|ceil|ceilf|changePIN|char|charAt|checkPIN|checkPUK|checkReg|circle|cityNameRead|cityNameWrite|clear|clearScreen|click|close|compareTo|compassRead|concat|config|connect|connected|constrain|Constrain|copysign|copysignf|cos|Cos|cosf|cosh|coshf|countryNameRead|countryNameWrite|createChar|cursor|debugPrint|degrees|Delay|delay|DelayMicroseconds|delayMicroseconds|detach|DetachInterrupt|detachInterrupt|DigitalPinToInterrupt|digitalPinToInterrupt|DigitalRead|digitalRead|DigitalWrite|digitalWrite|disconnect|display|displayLogos|drawBMP|drawCompass|encryptionType|end|endPacket|endSMS|endsWith|endTransmission|endWrite|equals|equalsIgnoreCase|exists|exitValue|Exp|exp|expf|fabs|fabsf|fdim|fdimf|fill|find|findUntil|float|floor|Floor|floorf|flush|fma|fmaf|fmax|fmaxf|fmin|fminf|fmod|fmodf|gatewayIP|get|getAsynchronously|getBand|getButton|getBytes|getCurrentCarrier|getIMEI|getKey|getModifiers|getOemKey|getPINUsed|getResult|getSignalStrength|getSocket|getVoiceCallStatus|getXChange|getYChange|hangCall|height|highByte|HighByte|home|hypot|hypotf|image|indexOf|int|interrupts|IPAddress|IRread|isActionDone|isAlpha|isAlphaNumeric|isAscii|isControl|isDigit|isDirectory|isfinite|isGraph|isHexadecimalDigit|isinf|isListening|isLowerCase|isnan|isPIN|isPressed|isPrintable|isPunct|isSpace|isUpperCase|isValid|isWhitespace|keyboardRead|keyPressed|keyReleased|knobRead|lastIndexOf|ldexp|ldexpf|leftToRight|length|line|lineFollowConfig|listen|listenOnLocalhost|loadImage|localIP|log|Log|log10|log10f|logf|long|lowByte|LowByte|lrint|lrintf|lround|lroundf|macAddress|maintain|map|Map|Max|max|messageAvailable|Micros|micros|millis|Millis|Min|min|mkdir|motorsStop|motorsWrite|mouseDragged|mouseMoved|mousePressed|mouseReleased|move|noAutoscroll|noBlink|noBuffer|noCursor|noDisplay|noFill|noInterrupts|NoInterrupts|noListenOnLocalhost|noStroke|noTone|NoTone|onReceive|onRequest|open|openNextFile|overflow|parseCommand|parseFloat|parseInt|parsePacket|pauseMode|peek|PinMode|pinMode|playFile|playMelody|point|pointTo|position|Pow|pow|powf|prepare|press|print|printFirmwareVersion|println|printVersion|process|processInput|PulseIn|pulseIn|pulseInLong|PulseInLong|put|radians|random|Random|randomSeed|RandomSeed|read|readAccelerometer|readBlue|readButton|readBytes|readBytesUntil|readGreen|readJoystickButton|readJoystickSwitch|readJoystickX|readJoystickY|readLightSensor|readMessage|readMicrophone|readNetworks|readRed|readSlider|readString|readStringUntil|readTemperature|ready|rect|release|releaseAll|remoteIP|remoteNumber|remotePort|remove|replace|requestFrom|retrieveCallingNumber|rewindDirectory|rightToLeft|rmdir|robotNameRead|robotNameWrite|round|roundf|RSSI|run|runAsynchronously|running|runShellCommand|runShellCommandAsynchronously|scanNetworks|scrollDisplayLeft|scrollDisplayRight|seek|sendAnalog|sendDigitalPortPair|sendDigitalPorts|sendString|sendSysex|Serial_Available|Serial_Begin|Serial_End|Serial_Flush|Serial_Peek|Serial_Print|Serial_Println|Serial_Read|serialEvent|setBand|setBitOrder|setCharAt|setClockDivider|setCursor|setDataMode|setDNS|setFirmwareVersion|setMode|setPINUsed|setSpeed|setTextSize|setTimeout|ShiftIn|shiftIn|ShiftOut|shiftOut|shutdown|signbit|sin|Sin|sinf|sinh|sinhf|size|sizeof|Sq|sq|Sqrt|sqrt|sqrtf|SSID|startLoop|startsWith|step|stop|stroke|subnetMask|substring|switchPIN|tan|Tan|tanf|tanh|tanhf|tempoWrite|text|toCharArray|toInt|toLowerCase|tone|Tone|toUpperCase|transfer|trim|trunc|truncf|tuneWrite|turn|updateIR|userNameRead|userNameWrite|voiceCall|waitContinue|width|WiFiServer|word|write|writeBlue|writeGreen|writeJSON|writeMessage|writeMicroseconds|writeRed|writeRGB|yield|Yield)\b">
<token type="NameFunction"/>
</rule>
<rule pattern="(typename|__inline|restrict|_inline|thread|inline|naked)\b">
<token type="KeywordReserved"/>
</rule>
<rule pattern="(__m(128i|128d|128|64))\b">
<token type="KeywordReserved"/>
</rule>
<rule pattern="__(forceinline|identifier|unaligned|declspec|fastcall|finally|stdcall|wchar_t|assume|except|int32|cdecl|int16|leave|based|raise|int64|noop|int8|w64|try|asm)\b">
<token type="KeywordReserved"/>
</rule>
<rule pattern="(true|false|NULL)\b">
<token type="NameBuiltin"/>
</rule>
<rule pattern="([a-zA-Z_]\w*)(\s*)(:)(?!:)">
<bygroups>
<token type="NameLabel"/>
<token type="Text"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule pattern="[a-zA-Z_]\w*">
<token type="Name"/>
</rule>
</state>
<state name="function">
<rule>
<include state="whitespace"/>
</rule>
<rule>
<include state="statements"/>
</rule>
<rule pattern=";">
<token type="Punctuation"/>
</rule>
<rule pattern="\{">
<token type="Punctuation"/>
<push/>
</rule>
<rule pattern="\}">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
</state>
<state name="if0">
<rule pattern="^\s*#if.*?(?&lt;!\\)\n">
<token type="CommentPreproc"/>
<push/>
</rule>
<rule pattern="^\s*#el(?:se|if).*\n">
<token type="CommentPreproc"/>
<pop depth="1"/>
</rule>
<rule pattern="^\s*#endif.*?(?&lt;!\\)\n">
<token type="CommentPreproc"/>
<pop depth="1"/>
</rule>
<rule pattern=".*?\n">
<token type="Comment"/>
</rule>
</state>
<state name="classname">
<rule pattern="[a-zA-Z_]\w*">
<token type="NameClass"/>
<pop depth="1"/>
</rule>
<rule pattern="\s*(?=&gt;)">
<token type="Text"/>
<pop depth="1"/>
</rule>
</state>
<state name="statement">
<rule>
<include state="whitespace"/>
</rule>
<rule>
<include state="statements"/>
</rule>
<rule pattern="[{}]">
<token type="Punctuation"/>
</rule>
<rule pattern=";">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
</state>
<state name="root">
<rule>
<include state="whitespace"/>
</rule>
<rule pattern="((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;{]*)(\{)">
<bygroups>
<usingself state="root"/>
<token type="NameFunction"/>
<usingself state="root"/>
<usingself state="root"/>
<token type="Punctuation"/>
</bygroups>
<push state="function"/>
</rule>
<rule pattern="((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;]*)(;)">
<bygroups>
<usingself state="root"/>
<token type="NameFunction"/>
<usingself state="root"/>
<usingself state="root"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule>
<push state="statement"/>
</rule>
<rule pattern="__(multiple_inheritance|virtual_inheritance|single_inheritance|interface|uuidof|super|event)\b">
<token type="KeywordReserved"/>
</rule>
<rule pattern="__(offload|blockingoffload|outer)\b">
<token type="KeywordPseudo"/>
</rule>
</state>
</rules>
</lexer>

126
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/armasm.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,126 @@
<lexer>
<config>
<name>ArmAsm</name>
<alias>armasm</alias>
<filename>*.s</filename>
<filename>*.S</filename>
<mime_type>text/x-armasm</mime_type>
<mime_type>text/x-asm</mime_type>
<ensure_nl>true</ensure_nl>
</config>
<rules>
<state name="root">
<rule>
<include state="commentsandwhitespace"/>
</rule>
<rule pattern="(\.\w+)([ \t]+\w+\s+?)?">
<bygroups>
<token type="KeywordNamespace"/>
<token type="NameLabel"/>
</bygroups>
</rule>
<rule pattern="(\w+)(:)(\s+\.\w+\s+)">
<bygroups>
<token type="NameLabel"/>
<token type="Punctuation"/>
<token type="KeywordNamespace"/>
</bygroups>
<push state="literal"/>
</rule>
<rule pattern="(\w+)(:)">
<bygroups>
<token type="NameLabel"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule pattern="svc\s+\w+">
<token type="NameNamespace"/>
</rule>
<rule pattern="[a-zA-Z]+">
<token type="Text"/>
<push state="opcode"/>
</rule>
</state>
<state name="commentsandwhitespace">
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern="[@;].*?\n">
<token type="CommentSingle"/>
</rule>
<rule pattern="/\*.*?\*/">
<token type="CommentMultiline"/>
</rule>
</state>
<state name="literal">
<rule pattern="0b[01]+">
<token type="LiteralNumberBin"/>
<pop depth="1"/>
</rule>
<rule pattern="0x\w{1,8}">
<token type="LiteralNumberHex"/>
<pop depth="1"/>
</rule>
<rule pattern="0\d+">
<token type="LiteralNumberOct"/>
<pop depth="1"/>
</rule>
<rule pattern="\d+?\.\d+?">
<token type="LiteralNumberFloat"/>
<pop depth="1"/>
</rule>
<rule pattern="\d+">
<token type="LiteralNumberInteger"/>
<pop depth="1"/>
</rule>
<rule pattern="(&#34;)(.+)(&#34;)">
<bygroups>
<token type="Punctuation"/>
<token type="LiteralStringDouble"/>
<token type="Punctuation"/>
</bygroups>
<pop depth="1"/>
</rule>
<rule pattern="(&#39;)(.{1}|\\.{1})(&#39;)">
<bygroups>
<token type="Punctuation"/>
<token type="LiteralStringChar"/>
<token type="Punctuation"/>
</bygroups>
<pop depth="1"/>
</rule>
</state>
<state name="opcode">
<rule pattern="\n">
<token type="Text"/>
<pop depth="1"/>
</rule>
<rule pattern="(@|;).*\n">
<token type="CommentSingle"/>
<pop depth="1"/>
</rule>
<rule pattern="(\s+|,)">
<token type="Text"/>
</rule>
<rule pattern="[rapcfxwbhsdqv]\d{1,2}">
<token type="NameClass"/>
</rule>
<rule pattern="=0x\w+">
<bygroups>
<token type="Text"/>
<token type="NameLabel"/>
</bygroups>
</rule>
<rule pattern="(=)(\w+)">
<bygroups>
<token type="Text"/>
<token type="NameLabel"/>
</bygroups>
</rule>
<rule pattern="#">
<token type="Text"/>
<push state="literal"/>
</rule>
</state>
</rules>
</lexer>

95
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/awk.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,95 @@
<lexer>
<config>
<name>Awk</name>
<alias>awk</alias>
<alias>gawk</alias>
<alias>mawk</alias>
<alias>nawk</alias>
<filename>*.awk</filename>
<mime_type>application/x-awk</mime_type>
</config>
<rules>
<state name="root">
<rule pattern="^(?=\s|/)">
<token type="Text"/>
<push state="slashstartsregex"/>
</rule>
<rule>
<include state="commentsandwhitespace"/>
</rule>
<rule pattern="\+\+|--|\|\||&amp;&amp;|in\b|\$|!?~|\|&amp;|(\*\*|[-&lt;&gt;+*%\^/!=|])=?">
<token type="Operator"/>
<push state="slashstartsregex"/>
</rule>
<rule pattern="[{(\[;,]">
<token type="Punctuation"/>
<push state="slashstartsregex"/>
</rule>
<rule pattern="[})\].]">
<token type="Punctuation"/>
</rule>
<rule pattern="(break|continue|do|while|exit|for|if|else|return|switch|case|default)\b">
<token type="Keyword"/>
<push state="slashstartsregex"/>
</rule>
<rule pattern="function\b">
<token type="KeywordDeclaration"/>
<push state="slashstartsregex"/>
</rule>
<rule pattern="(atan2|cos|exp|int|log|rand|sin|sqrt|srand|gensub|gsub|index|length|match|split|patsplit|sprintf|sub|substr|tolower|toupper|close|fflush|getline|next(file)|print|printf|strftime|systime|mktime|delete|system|strtonum|and|compl|lshift|or|rshift|asorti?|isarray|bindtextdomain|dcn?gettext|@(include|load|namespace))\b">
<token type="KeywordReserved"/>
</rule>
<rule pattern="(ARGC|ARGIND|ARGV|BEGIN(FILE)?|BINMODE|CONVFMT|ENVIRON|END(FILE)?|ERRNO|FIELDWIDTHS|FILENAME|FNR|FPAT|FS|IGNORECASE|LINT|NF|NR|OFMT|OFS|ORS|PROCINFO|RLENGTH|RS|RSTART|RT|SUBSEP|TEXTDOMAIN)\b">
<token type="NameBuiltin"/>
</rule>
<rule pattern="[@$a-zA-Z_]\w*">
<token type="NameOther"/>
</rule>
<rule pattern="[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="0x[0-9a-fA-F]+">
<token type="LiteralNumberHex"/>
</rule>
<rule pattern="[0-9]+">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="&#34;(\\\\|\\&#34;|[^&#34;])*&#34;">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="&#39;(\\\\|\\&#39;|[^&#39;])*&#39;">
<token type="LiteralStringSingle"/>
</rule>
</state>
<state name="commentsandwhitespace">
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern="#.*$">
<token type="CommentSingle"/>
</rule>
</state>
<state name="slashstartsregex">
<rule>
<include state="commentsandwhitespace"/>
</rule>
<rule pattern="/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/\B">
<token type="LiteralStringRegex"/>
<pop depth="1"/>
</rule>
<rule pattern="(?=/)">
<token type="Text"/>
<push state="#pop" state="badregex"/>
</rule>
<rule>
<pop depth="1"/>
</rule>
</state>
<state name="badregex">
<rule pattern="\n">
<token type="Text"/>
<pop depth="1"/>
</rule>
</state>
</rules>
</lexer>

97
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ballerina.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,97 @@
<lexer>
<config>
<name>Ballerina</name>
<alias>ballerina</alias>
<filename>*.bal</filename>
<mime_type>text/x-ballerina</mime_type>
<dot_all>true</dot_all>
</config>
<rules>
<state name="root">
<rule pattern="[^\S\n]+">
<token type="Text"/>
</rule>
<rule pattern="//.*?\n">
<token type="CommentSingle"/>
</rule>
<rule pattern="/\*.*?\*/">
<token type="CommentMultiline"/>
</rule>
<rule pattern="(break|catch|continue|done|else|finally|foreach|forever|fork|if|lock|match|return|throw|transaction|try|while)\b">
<token type="Keyword"/>
</rule>
<rule pattern="((?:(?:[^\W\d]|\$)[\w.\[\]$&lt;&gt;]*\s+)+?)((?:[^\W\d]|\$)[\w$]*)(\s*)(\()">
<bygroups>
<usingself state="root"/>
<token type="NameFunction"/>
<token type="Text"/>
<token type="Operator"/>
</bygroups>
</rule>
<rule pattern="@[^\W\d][\w.]*">
<token type="NameDecorator"/>
</rule>
<rule pattern="(annotation|bind|but|endpoint|error|function|object|private|public|returns|service|type|var|with|worker)\b">
<token type="KeywordDeclaration"/>
</rule>
<rule pattern="(boolean|byte|decimal|float|int|json|map|nil|record|string|table|xml)\b">
<token type="KeywordType"/>
</rule>
<rule pattern="(true|false|null)\b">
<token type="KeywordConstant"/>
</rule>
<rule pattern="(import)(\s+)">
<bygroups>
<token type="KeywordNamespace"/>
<token type="Text"/>
</bygroups>
<push state="import"/>
</rule>
<rule pattern="&#34;(\\\\|\\&#34;|[^&#34;])*&#34;">
<token type="LiteralString"/>
</rule>
<rule pattern="&#39;\\.&#39;|&#39;[^\\]&#39;|&#39;\\u[0-9a-fA-F]{4}&#39;">
<token type="LiteralStringChar"/>
</rule>
<rule pattern="(\.)((?:[^\W\d]|\$)[\w$]*)">
<bygroups>
<token type="Operator"/>
<token type="NameAttribute"/>
</bygroups>
</rule>
<rule pattern="^\s*([^\W\d]|\$)[\w$]*:">
<token type="NameLabel"/>
</rule>
<rule pattern="([^\W\d]|\$)[\w$]*">
<token type="Name"/>
</rule>
<rule pattern="([0-9][0-9_]*\.([0-9][0-9_]*)?|\.[0-9][0-9_]*)([eE][+\-]?[0-9][0-9_]*)?[fFdD]?|[0-9][eE][+\-]?[0-9][0-9_]*[fFdD]?|[0-9]([eE][+\-]?[0-9][0-9_]*)?[fFdD]|0[xX]([0-9a-fA-F][0-9a-fA-F_]*\.?|([0-9a-fA-F][0-9a-fA-F_]*)?\.[0-9a-fA-F][0-9a-fA-F_]*)[pP][+\-]?[0-9][0-9_]*[fFdD]?">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="0[xX][0-9a-fA-F][0-9a-fA-F_]*[lL]?">
<token type="LiteralNumberHex"/>
</rule>
<rule pattern="0[bB][01][01_]*[lL]?">
<token type="LiteralNumberBin"/>
</rule>
<rule pattern="0[0-7_]+[lL]?">
<token type="LiteralNumberOct"/>
</rule>
<rule pattern="0|[1-9][0-9_]*[lL]?">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="[~^*!%&amp;\[\](){}&lt;&gt;|+=:;,./?-]">
<token type="Operator"/>
</rule>
<rule pattern="\n">
<token type="Text"/>
</rule>
</state>
<state name="import">
<rule pattern="[\w.]+">
<token type="NameNamespace"/>
<pop depth="1"/>
</rule>
</state>
</rules>
</lexer>

217
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bash.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,217 @@
<lexer>
<config>
<name>Bash</name>
<alias>bash</alias>
<alias>sh</alias>
<alias>ksh</alias>
<alias>zsh</alias>
<alias>shell</alias>
<filename>*.sh</filename>
<filename>*.ksh</filename>
<filename>*.bash</filename>
<filename>*.ebuild</filename>
<filename>*.eclass</filename>
<filename>.env</filename>
<filename>*.env</filename>
<filename>*.exheres-0</filename>
<filename>*.exlib</filename>
<filename>*.zsh</filename>
<filename>*.zshrc</filename>
<filename>.bashrc</filename>
<filename>bashrc</filename>
<filename>.bash_*</filename>
<filename>bash_*</filename>
<filename>zshrc</filename>
<filename>.zshrc</filename>
<filename>PKGBUILD</filename>
<mime_type>application/x-sh</mime_type>
<mime_type>application/x-shellscript</mime_type>
</config>
<rules>
<state name="data">
<rule pattern="(?s)\$?&#34;(\\\\|\\[0-7]+|\\.|[^&#34;\\$])*&#34;">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="&#34;">
<token type="LiteralStringDouble"/>
<push state="string"/>
</rule>
<rule pattern="(?s)\$&#39;(\\\\|\\[0-7]+|\\.|[^&#39;\\])*&#39;">
<token type="LiteralStringSingle"/>
</rule>
<rule pattern="(?s)&#39;.*?&#39;">
<token type="LiteralStringSingle"/>
</rule>
<rule pattern=";">
<token type="Punctuation"/>
</rule>
<rule pattern="&amp;">
<token type="Punctuation"/>
</rule>
<rule pattern="\|">
<token type="Punctuation"/>
</rule>
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern="\d+(?= |$)">
<token type="LiteralNumber"/>
</rule>
<rule pattern="[^=\s\[\]{}()$&#34;\&#39;`\\&lt;&amp;|;]+">
<token type="Text"/>
</rule>
<rule pattern="&lt;">
<token type="Text"/>
</rule>
</state>
<state name="string">
<rule pattern="&#34;">
<token type="LiteralStringDouble"/>
<pop depth="1"/>
</rule>
<rule pattern="(?s)(\\\\|\\[0-7]+|\\.|[^&#34;\\$])+">
<token type="LiteralStringDouble"/>
</rule>
<rule>
<include state="interp"/>
</rule>
</state>
<state name="interp">
<rule pattern="\$\(\(">
<token type="Keyword"/>
<push state="math"/>
</rule>
<rule pattern="\$\(">
<token type="Keyword"/>
<push state="paren"/>
</rule>
<rule pattern="\$\{#?">
<token type="LiteralStringInterpol"/>
<push state="curly"/>
</rule>
<rule pattern="\$[a-zA-Z_]\w*">
<token type="NameVariable"/>
</rule>
<rule pattern="\$(?:\d+|[#$?!_*@-])">
<token type="NameVariable"/>
</rule>
<rule pattern="\$">
<token type="Text"/>
</rule>
</state>
<state name="paren">
<rule pattern="\)">
<token type="Keyword"/>
<pop depth="1"/>
</rule>
<rule>
<include state="root"/>
</rule>
</state>
<state name="math">
<rule pattern="\)\)">
<token type="Keyword"/>
<pop depth="1"/>
</rule>
<rule pattern="[-+*/%^|&amp;]|\*\*|\|\|">
<token type="Operator"/>
</rule>
<rule pattern="\d+#\d+">
<token type="LiteralNumber"/>
</rule>
<rule pattern="\d+#(?! )">
<token type="LiteralNumber"/>
</rule>
<rule pattern="\d+">
<token type="LiteralNumber"/>
</rule>
<rule>
<include state="root"/>
</rule>
</state>
<state name="backticks">
<rule pattern="`">
<token type="LiteralStringBacktick"/>
<pop depth="1"/>
</rule>
<rule>
<include state="root"/>
</rule>
</state>
<state name="root">
<rule>
<include state="basic"/>
</rule>
<rule pattern="`">
<token type="LiteralStringBacktick"/>
<push state="backticks"/>
</rule>
<rule>
<include state="data"/>
</rule>
<rule>
<include state="interp"/>
</rule>
</state>
<state name="basic">
<rule pattern="\b(if|fi|else|while|do|done|for|then|return|function|case|select|continue|until|esac|elif)(\s*)\b">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
</bygroups>
</rule>
<rule pattern="\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|complete|declare|dirs|disown|echo|enable|eval|exec|exit|export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|shopt|source|suspend|test|time|times|trap|true|type|typeset|ulimit|umask|unalias|unset|wait)(?=[\s)`])">
<token type="NameBuiltin"/>
</rule>
<rule pattern="\A#!.+\n">
<token type="CommentPreproc"/>
</rule>
<rule pattern="#.*(\S|$)">
<token type="CommentSingle"/>
</rule>
<rule pattern="\\[\w\W]">
<token type="LiteralStringEscape"/>
</rule>
<rule pattern="(\b\w+)(\s*)(\+?=)">
<bygroups>
<token type="NameVariable"/>
<token type="Text"/>
<token type="Operator"/>
</bygroups>
</rule>
<rule pattern="[\[\]{}()=]">
<token type="Operator"/>
</rule>
<rule pattern="&lt;&lt;&lt;">
<token type="Operator"/>
</rule>
<rule pattern="&lt;&lt;-?\s*(\&#39;?)\\?(\w+)[\w\W]+?\2">
<token type="LiteralString"/>
</rule>
<rule pattern="&amp;&amp;|\|\|">
<token type="Operator"/>
</rule>
</state>
<state name="curly">
<rule pattern="\}">
<token type="LiteralStringInterpol"/>
<pop depth="1"/>
</rule>
<rule pattern=":-">
<token type="Keyword"/>
</rule>
<rule pattern="\w+">
<token type="NameVariable"/>
</rule>
<rule pattern="[^}:&#34;\&#39;`$\\]+">
<token type="Punctuation"/>
</rule>
<rule pattern=":">
<token type="Punctuation"/>
</rule>
<rule>
<include state="root"/>
</rule>
</state>
</rules>
</lexer>

660
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/batchfile.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,660 @@
<lexer>
<config>
<name>Batchfile</name>
<alias>bat</alias>
<alias>batch</alias>
<alias>dosbatch</alias>
<alias>winbatch</alias>
<filename>*.bat</filename>
<filename>*.cmd</filename>
<mime_type>application/x-dos-batch</mime_type>
<case_insensitive>true</case_insensitive>
</config>
<rules>
<state name="arithmetic">
<rule pattern="0[0-7]+">
<token type="LiteralNumberOct"/>
</rule>
<rule pattern="0x[\da-f]+">
<token type="LiteralNumberHex"/>
</rule>
<rule pattern="\d+">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="[(),]+">
<token type="Punctuation"/>
</rule>
<rule pattern="([=+\-*/!~]|%|\^\^)+">
<token type="Operator"/>
</rule>
<rule pattern="((?:&#34;[^\n\x1a&#34;]*(?:&#34;|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(\^[\n\x1a]?)?[^()=+\-*/!~%^&#34;\n\x1a&amp;&lt;&gt;|\t\v\f\r ,;=\xa0]|\^[\n\x1a\t\v\f\r ,;=\xa0]?[\w\W])+">
<usingself state="variable"/>
</rule>
<rule pattern="(?=[\x00|&amp;])">
<token type="Text"/>
<pop depth="1"/>
</rule>
<rule>
<include state="follow"/>
</rule>
</state>
<state name="else?">
<rule pattern="(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)">
<usingself state="text"/>
</rule>
<rule pattern="else(?=\^?[\t\v\f\r ,;=\xa0]|[&amp;&lt;&gt;|\n\x1a])">
<token type="Keyword"/>
<pop depth="1"/>
</rule>
<rule>
<pop depth="1"/>
</rule>
</state>
<state name="sqstring">
<rule>
<include state="variable-or-escape"/>
</rule>
<rule pattern="[^%]+|%">
<token type="LiteralStringSingle"/>
</rule>
</state>
<state name="root">
<rule pattern="\)((?=\()|(?=\^?[\t\v\f\r ,;=\xa0]|[&amp;&lt;&gt;|\n\x1a]))(?:(?:[^\n\x1a^]|\^[\n\x1a]?[\w\W])*)">
<token type="CommentSingle"/>
</rule>
<rule pattern="(?=((?:(?&lt;=^[^:])|^[^:]?)[\t\v\f\r ,;=\xa0]*)(:))">
<token type="Text"/>
<push state="follow"/>
</rule>
<rule pattern="(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)">
<usingself state="text"/>
</rule>
<rule>
<include state="redirect"/>
</rule>
<rule pattern="[\n\x1a]+">
<token type="Text"/>
</rule>
<rule pattern="\(">
<token type="Punctuation"/>
<push state="root/compound"/>
</rule>
<rule pattern="@+">
<token type="Punctuation"/>
</rule>
<rule pattern="((?:for|if|rem)(?:(?=(?:\^[\n\x1a]?)?/)|(?:(?!\^)|(?&lt;=m))(?:(?=\()|(?=\^?[\t\v\f\r ,;=\xa0]|[&amp;&lt;&gt;|\n\x1a]))))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:(?:(?:\^[\n\x1a]?)?[^&#34;\n\x1a&amp;&lt;&gt;|\t\v\f\r ,;=\xa0])+)?(?:\^[\n\x1a]?)?/(?:\^[\n\x1a]?)?\?)">
<bygroups>
<token type="Keyword"/>
<usingself state="text"/>
</bygroups>
<push state="follow"/>
</rule>
<rule pattern="(goto(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&amp;&lt;&gt;|(]))((?:(?:&#34;[^\n\x1a&#34;]*(?:&#34;|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[^&#34;%\n\x1a&amp;&lt;&gt;|])*(?:\^[\n\x1a]?)?/(?:\^[\n\x1a]?)?\?(?:(?:&#34;[^\n\x1a&#34;]*(?:&#34;|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[^&#34;%\n\x1a&amp;&lt;&gt;|])*)">
<bygroups>
<token type="Keyword"/>
<usingself state="text"/>
</bygroups>
<push state="follow"/>
</rule>
<rule pattern="(setlocal|endlocal|prompt|verify|rename|mklink|rmdir|shift|start|color|dpath|title|chdir|erase|pushd|ftype|break|pause|mkdir|assoc|date|path|time|popd|keys|exit|type|copy|echo|move|dir|del|ren|ver|cls|vol|rd|md|cd)(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&amp;&lt;&gt;|(])">
<token type="Keyword"/>
<push state="follow"/>
</rule>
<rule pattern="(call)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)(:)">
<bygroups>
<token type="Keyword"/>
<usingself state="text"/>
<token type="Punctuation"/>
</bygroups>
<push state="call"/>
</rule>
<rule pattern="call(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&amp;&lt;&gt;|(])">
<token type="Keyword"/>
</rule>
<rule pattern="(for(?=\^?[\t\v\f\r ,;=\xa0]|[&amp;&lt;&gt;|\n\x1a])(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(/f(?=\^?[\t\v\f\r ,;=\xa0]|[&amp;&lt;&gt;|\n\x1a]))">
<bygroups>
<token type="Keyword"/>
<usingself state="text"/>
<token type="Keyword"/>
</bygroups>
<push state="for/f" state="for"/>
</rule>
<rule pattern="(for(?=\^?[\t\v\f\r ,;=\xa0]|[&amp;&lt;&gt;|\n\x1a])(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(/l(?=\^?[\t\v\f\r ,;=\xa0]|[&amp;&lt;&gt;|\n\x1a]))">
<bygroups>
<token type="Keyword"/>
<usingself state="text"/>
<token type="Keyword"/>
</bygroups>
<push state="for/l" state="for"/>
</rule>
<rule pattern="for(?=\^?[\t\v\f\r ,;=\xa0]|[&amp;&lt;&gt;|\n\x1a])(?!\^)">
<token type="Keyword"/>
<push state="for2" state="for"/>
</rule>
<rule pattern="(goto(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&amp;&lt;&gt;|(]))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)(:?)">
<bygroups>
<token type="Keyword"/>
<usingself state="text"/>
<token type="Punctuation"/>
</bygroups>
<push state="label"/>
</rule>
<rule pattern="(if(?:(?=\()|(?=\^?[\t\v\f\r ,;=\xa0]|[&amp;&lt;&gt;|\n\x1a]))(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)((?:/i(?=\^?[\t\v\f\r ,;=\xa0]|[&amp;&lt;&gt;|\n\x1a]))?)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)((?:not(?=\^?[\t\v\f\r ,;=\xa0]|[&amp;&lt;&gt;|\n\x1a]))?)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)">
<bygroups>
<token type="Keyword"/>
<usingself state="text"/>
<token type="Keyword"/>
<usingself state="text"/>
<token type="Keyword"/>
<usingself state="text"/>
</bygroups>
<push state="(?" state="if"/>
</rule>
<rule pattern="rem(((?=\()|(?=\^?[\t\v\f\r ,;=\xa0]|[&amp;&lt;&gt;|\n\x1a]))(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:[&amp;&lt;&gt;|]+|(?:(?:&#34;[^\n\x1a&#34;]*(?:&#34;|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^&#34;\n\x1a&amp;&lt;&gt;|\t\v\f\r ,;=\xa0])+))+)?.*|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&amp;&lt;&gt;|(])(?:(?:[^\n\x1a^]|\^[\n\x1a]?[\w\W])*))">
<token type="CommentSingle"/>
<push state="follow"/>
</rule>
<rule pattern="(set(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&amp;&lt;&gt;|(]))((?:(?:\^[\n\x1a]?)?[^\S\n])*)(/a)">
<bygroups>
<token type="Keyword"/>
<usingself state="text"/>
<token type="Keyword"/>
</bygroups>
<push state="arithmetic"/>
</rule>
<rule pattern="(set(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&amp;&lt;&gt;|(]))((?:(?:\^[\n\x1a]?)?[^\S\n])*)((?:/p)?)((?:(?:\^[\n\x1a]?)?[^\S\n])*)((?:(?:(?:\^[\n\x1a]?)?[^&#34;\n\x1a&amp;&lt;&gt;|^=]|\^[\n\x1a]?[^&#34;=])+)?)((?:(?:\^[\n\x1a]?)?=)?)">
<bygroups>
<token type="Keyword"/>
<usingself state="text"/>
<token type="Keyword"/>
<usingself state="text"/>
<usingself state="variable"/>
<token type="Punctuation"/>
</bygroups>
<push state="follow"/>
</rule>
<rule>
<push state="follow"/>
</rule>
</state>
<state name="follow">
<rule pattern="((?:(?&lt;=^[^:])|^[^:]?)[\t\v\f\r ,;=\xa0]*)(:)([\t\v\f\r ,;=\xa0]*)((?:(?:[^\n\x1a&amp;&lt;&gt;|\t\v\f\r ,;=\xa0+:^]|\^[\n\x1a]?[\w\W])*))(.*)">
<bygroups>
<token type="Text"/>
<token type="Punctuation"/>
<token type="Text"/>
<token type="NameLabel"/>
<token type="CommentSingle"/>
</bygroups>
</rule>
<rule>
<include state="redirect"/>
</rule>
<rule pattern="(?=[\n\x1a])">
<token type="Text"/>
<pop depth="1"/>
</rule>
<rule pattern="\|\|?|&amp;&amp;?">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
<rule>
<include state="text"/>
</rule>
</state>
<state name="bqstring">
<rule>
<include state="variable-or-escape"/>
</rule>
<rule pattern="[^%]+|%">
<token type="LiteralStringBacktick"/>
</rule>
</state>
<state name="for2">
<rule pattern="\)">
<token type="Punctuation"/>
</rule>
<rule pattern="((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(do(?=\^?[\t\v\f\r ,;=\xa0]|[&amp;&lt;&gt;|\n\x1a]))">
<bygroups>
<usingself state="text"/>
<token type="Keyword"/>
</bygroups>
<pop depth="1"/>
</rule>
<rule pattern="[\n\x1a]+">
<token type="Text"/>
</rule>
<rule>
<include state="follow"/>
</rule>
</state>
<state name="label/compound">
<rule pattern="(?=\))">
<token type="Text"/>
<pop depth="1"/>
</rule>
<rule pattern="((?:(?:[^\n\x1a&amp;&lt;&gt;|\t\v\f\r ,;=\xa0+:^)]|\^[\n\x1a]?[^)])*)?)((?:(?:&#34;[^\n\x1a&#34;]*(?:&#34;|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|\^[\n\x1a]?[^)]|[^&#34;%^\n\x1a&amp;&lt;&gt;|)])*)">
<bygroups>
<token type="NameLabel"/>
<token type="CommentSingle"/>
</bygroups>
<pop depth="1"/>
</rule>
</state>
<state name="for">
<rule pattern="((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(in)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(\()">
<bygroups>
<usingself state="text"/>
<token type="Keyword"/>
<usingself state="text"/>
<token type="Punctuation"/>
</bygroups>
<pop depth="1"/>
</rule>
<rule>
<include state="follow"/>
</rule>
</state>
<state name="redirect/compound">
<rule pattern="((?:(?&lt;=[\n\x1a\t\v\f\r ,;=\xa0])\d)?)(&gt;&gt;?&amp;|&lt;&amp;)([\n\x1a\t\v\f\r ,;=\xa0]*)(\d)">
<bygroups>
<token type="LiteralNumberInteger"/>
<token type="Punctuation"/>
<token type="Text"/>
<token type="LiteralNumberInteger"/>
</bygroups>
</rule>
<rule pattern="((?:(?&lt;=[\n\x1a\t\v\f\r ,;=\xa0])(?&lt;!\^[\n\x1a])\d)?)(&gt;&gt;?|&lt;)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:[&amp;&lt;&gt;|]+|(?:(?:&#34;[^\n\x1a&#34;]*(?:&#34;|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^&#34;\n\x1a&amp;&lt;&gt;|\t\v\f\r ,;=\xa0)])+))+))">
<bygroups>
<token type="LiteralNumberInteger"/>
<token type="Punctuation"/>
<usingself state="text"/>
</bygroups>
</rule>
</state>
<state name="if">
<rule pattern="((?:cmdextversion|errorlevel)(?=\^?[\t\v\f\r ,;=\xa0]|[&amp;&lt;&gt;|\n\x1a]))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(\d+)">
<bygroups>
<token type="Keyword"/>
<usingself state="text"/>
<token type="LiteralNumberInteger"/>
</bygroups>
<pop depth="1"/>
</rule>
<rule pattern="(defined(?=\^?[\t\v\f\r ,;=\xa0]|[&amp;&lt;&gt;|\n\x1a]))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))((?:[&amp;&lt;&gt;|]+|(?:(?:&#34;[^\n\x1a&#34;]*(?:&#34;|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^&#34;\n\x1a&amp;&lt;&gt;|\t\v\f\r ,;=\xa0])+))+))">
<bygroups>
<token type="Keyword"/>
<usingself state="text"/>
<usingself state="variable"/>
</bygroups>
<pop depth="1"/>
</rule>
<rule pattern="(exist(?=\^?[\t\v\f\r ,;=\xa0]|[&amp;&lt;&gt;|\n\x1a]))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)(?:[&amp;&lt;&gt;|]+|(?:(?:&#34;[^\n\x1a&#34;]*(?:&#34;|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^&#34;\n\x1a&amp;&lt;&gt;|\t\v\f\r ,;=\xa0])+))+))">
<bygroups>
<token type="Keyword"/>
<usingself state="text"/>
</bygroups>
<pop depth="1"/>
</rule>
<rule pattern="((?:-?(?:0[0-7]+|0x[\da-f]+|\d+)(?=\^?[\t\v\f\r ,;=\xa0]|[&amp;&lt;&gt;|\n\x1a]))(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))((?:equ|geq|gtr|leq|lss|neq))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)(?:-?(?:0[0-7]+|0x[\da-f]+|\d+)(?=\^?[\t\v\f\r ,;=\xa0]|[&amp;&lt;&gt;|\n\x1a])))">
<bygroups>
<usingself state="arithmetic"/>
<token type="OperatorWord"/>
<usingself state="arithmetic"/>
</bygroups>
<pop depth="1"/>
</rule>
<rule pattern="(?:[&amp;&lt;&gt;|]+|(?:(?:&#34;[^\n\x1a&#34;]*(?:&#34;|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^&#34;\n\x1a&amp;&lt;&gt;|\t\v\f\r ,;=\xa0])+))+)">
<usingself state="text"/>
<push state="#pop" state="if2"/>
</rule>
</state>
<state name="root/compound">
<rule pattern="\)">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
<rule pattern="(?=((?:(?&lt;=^[^:])|^[^:]?)[\t\v\f\r ,;=\xa0]*)(:))">
<token type="Text"/>
<push state="follow/compound"/>
</rule>
<rule pattern="(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)">
<usingself state="text"/>
</rule>
<rule>
<include state="redirect/compound"/>
</rule>
<rule pattern="[\n\x1a]+">
<token type="Text"/>
</rule>
<rule pattern="\(">
<token type="Punctuation"/>
<push state="root/compound"/>
</rule>
<rule pattern="@+">
<token type="Punctuation"/>
</rule>
<rule pattern="((?:for|if|rem)(?:(?=(?:\^[\n\x1a]?)?/)|(?:(?!\^)|(?&lt;=m))(?:(?=\()|(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&amp;&lt;&gt;|\n\x1a])))))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:(?:(?:\^[\n\x1a]?)?[^&#34;\n\x1a&amp;&lt;&gt;|\t\v\f\r ,;=\xa0)])+)?(?:\^[\n\x1a]?)?/(?:\^[\n\x1a]?)?\?)">
<bygroups>
<token type="Keyword"/>
<usingself state="text"/>
</bygroups>
<push state="follow/compound"/>
</rule>
<rule pattern="(goto(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&amp;&lt;&gt;|(])))((?:(?:&#34;[^\n\x1a&#34;]*(?:&#34;|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[^&#34;%\n\x1a&amp;&lt;&gt;|)])*(?:\^[\n\x1a]?)?/(?:\^[\n\x1a]?)?\?(?:(?:&#34;[^\n\x1a&#34;]*(?:&#34;|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[^&#34;%\n\x1a&amp;&lt;&gt;|)])*)">
<bygroups>
<token type="Keyword"/>
<usingself state="text"/>
</bygroups>
<push state="follow/compound"/>
</rule>
<rule pattern="(setlocal|endlocal|prompt|verify|rename|mklink|rmdir|shift|start|color|dpath|title|chdir|erase|pushd|ftype|break|pause|mkdir|assoc|date|path|time|popd|keys|exit|type|copy|echo|move|dir|del|ren|ver|cls|vol|rd|md|cd)(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&amp;&lt;&gt;|(]))">
<token type="Keyword"/>
<push state="follow/compound"/>
</rule>
<rule pattern="(call)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)(:)">
<bygroups>
<token type="Keyword"/>
<usingself state="text"/>
<token type="Punctuation"/>
</bygroups>
<push state="call/compound"/>
</rule>
<rule pattern="call(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&amp;&lt;&gt;|(]))">
<token type="Keyword"/>
</rule>
<rule pattern="(for(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&amp;&lt;&gt;|\n\x1a]))(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(/f(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&amp;&lt;&gt;|\n\x1a])))">
<bygroups>
<token type="Keyword"/>
<usingself state="text"/>
<token type="Keyword"/>
</bygroups>
<push state="for/f" state="for"/>
</rule>
<rule pattern="(for(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&amp;&lt;&gt;|\n\x1a]))(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(/l(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&amp;&lt;&gt;|\n\x1a])))">
<bygroups>
<token type="Keyword"/>
<usingself state="text"/>
<token type="Keyword"/>
</bygroups>
<push state="for/l" state="for"/>
</rule>
<rule pattern="for(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&amp;&lt;&gt;|\n\x1a]))(?!\^)">
<token type="Keyword"/>
<push state="for2" state="for"/>
</rule>
<rule pattern="(goto(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&amp;&lt;&gt;|(])))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)(:?)">
<bygroups>
<token type="Keyword"/>
<usingself state="text"/>
<token type="Punctuation"/>
</bygroups>
<push state="label/compound"/>
</rule>
<rule pattern="(if(?:(?=\()|(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&amp;&lt;&gt;|\n\x1a])))(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)((?:/i(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&amp;&lt;&gt;|\n\x1a])))?)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)((?:not(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&amp;&lt;&gt;|\n\x1a])))?)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)">
<bygroups>
<token type="Keyword"/>
<usingself state="text"/>
<token type="Keyword"/>
<usingself state="text"/>
<token type="Keyword"/>
<usingself state="text"/>
</bygroups>
<push state="(?" state="if"/>
</rule>
<rule pattern="rem(((?=\()|(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&amp;&lt;&gt;|\n\x1a])))(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:[&amp;&lt;&gt;|]+|(?:(?:&#34;[^\n\x1a&#34;]*(?:&#34;|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^&#34;\n\x1a&amp;&lt;&gt;|\t\v\f\r ,;=\xa0])+))+)?.*|(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&amp;&lt;&gt;|(]))(?:(?:[^\n\x1a^)]|\^[\n\x1a]?[^)])*))">
<token type="CommentSingle"/>
<push state="follow/compound"/>
</rule>
<rule pattern="(set(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&amp;&lt;&gt;|(])))((?:(?:\^[\n\x1a]?)?[^\S\n])*)(/a)">
<bygroups>
<token type="Keyword"/>
<usingself state="text"/>
<token type="Keyword"/>
</bygroups>
<push state="arithmetic/compound"/>
</rule>
<rule pattern="(set(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&amp;&lt;&gt;|(])))((?:(?:\^[\n\x1a]?)?[^\S\n])*)((?:/p)?)((?:(?:\^[\n\x1a]?)?[^\S\n])*)((?:(?:(?:\^[\n\x1a]?)?[^&#34;\n\x1a&amp;&lt;&gt;|^=)]|\^[\n\x1a]?[^&#34;=])+)?)((?:(?:\^[\n\x1a]?)?=)?)">
<bygroups>
<token type="Keyword"/>
<usingself state="text"/>
<token type="Keyword"/>
<usingself state="text"/>
<usingself state="variable"/>
<token type="Punctuation"/>
</bygroups>
<push state="follow/compound"/>
</rule>
<rule>
<push state="follow/compound"/>
</rule>
</state>
<state name="follow/compound">
<rule pattern="(?=\))">
<token type="Text"/>
<pop depth="1"/>
</rule>
<rule pattern="((?:(?&lt;=^[^:])|^[^:]?)[\t\v\f\r ,;=\xa0]*)(:)([\t\v\f\r ,;=\xa0]*)((?:(?:[^\n\x1a&amp;&lt;&gt;|\t\v\f\r ,;=\xa0+:^)]|\^[\n\x1a]?[^)])*))(.*)">
<bygroups>
<token type="Text"/>
<token type="Punctuation"/>
<token type="Text"/>
<token type="NameLabel"/>
<token type="CommentSingle"/>
</bygroups>
</rule>
<rule>
<include state="redirect/compound"/>
</rule>
<rule pattern="(?=[\n\x1a])">
<token type="Text"/>
<pop depth="1"/>
</rule>
<rule pattern="\|\|?|&amp;&amp;?">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
<rule>
<include state="text"/>
</rule>
</state>
<state name="text">
<rule pattern="&#34;">
<token type="LiteralStringDouble"/>
<push state="string"/>
</rule>
<rule>
<include state="variable-or-escape"/>
</rule>
<rule pattern="[^&#34;%^\n\x1a&amp;&lt;&gt;|\t\v\f\r ,;=\xa0\d)]+|.">
<token type="Text"/>
</rule>
</state>
<state name="redirect">
<rule pattern="((?:(?&lt;=[\n\x1a\t\v\f\r ,;=\xa0])\d)?)(&gt;&gt;?&amp;|&lt;&amp;)([\n\x1a\t\v\f\r ,;=\xa0]*)(\d)">
<bygroups>
<token type="LiteralNumberInteger"/>
<token type="Punctuation"/>
<token type="Text"/>
<token type="LiteralNumberInteger"/>
</bygroups>
</rule>
<rule pattern="((?:(?&lt;=[\n\x1a\t\v\f\r ,;=\xa0])(?&lt;!\^[\n\x1a])\d)?)(&gt;&gt;?|&lt;)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:[&amp;&lt;&gt;|]+|(?:(?:&#34;[^\n\x1a&#34;]*(?:&#34;|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^&#34;\n\x1a&amp;&lt;&gt;|\t\v\f\r ,;=\xa0])+))+))">
<bygroups>
<token type="LiteralNumberInteger"/>
<token type="Punctuation"/>
<usingself state="text"/>
</bygroups>
</rule>
</state>
<state name="label">
<rule pattern="((?:(?:[^\n\x1a&amp;&lt;&gt;|\t\v\f\r ,;=\xa0+:^]|\^[\n\x1a]?[\w\W])*)?)((?:(?:&#34;[^\n\x1a&#34;]*(?:&#34;|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|\^[\n\x1a]?[\w\W]|[^&#34;%^\n\x1a&amp;&lt;&gt;|])*)">
<bygroups>
<token type="NameLabel"/>
<token type="CommentSingle"/>
</bygroups>
<pop depth="1"/>
</rule>
</state>
<state name="arithmetic/compound">
<rule pattern="(?=\))">
<token type="Text"/>
<pop depth="1"/>
</rule>
<rule pattern="0[0-7]+">
<token type="LiteralNumberOct"/>
</rule>
<rule pattern="0x[\da-f]+">
<token type="LiteralNumberHex"/>
</rule>
<rule pattern="\d+">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="[(),]+">
<token type="Punctuation"/>
</rule>
<rule pattern="([=+\-*/!~]|%|\^\^)+">
<token type="Operator"/>
</rule>
<rule pattern="((?:&#34;[^\n\x1a&#34;]*(?:&#34;|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(\^[\n\x1a]?)?[^()=+\-*/!~%^&#34;\n\x1a&amp;&lt;&gt;|\t\v\f\r ,;=\xa0]|\^[\n\x1a\t\v\f\r ,;=\xa0]?[^)])+">
<usingself state="variable"/>
</rule>
<rule pattern="(?=[\x00|&amp;])">
<token type="Text"/>
<pop depth="1"/>
</rule>
<rule>
<include state="follow"/>
</rule>
</state>
<state name="string">
<rule pattern="&#34;">
<token type="LiteralStringDouble"/>
<pop depth="1"/>
</rule>
<rule pattern="(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))">
<token type="NameVariable"/>
</rule>
<rule pattern="\^!|%%">
<token type="LiteralStringEscape"/>
</rule>
<rule pattern="[^&#34;%^\n\x1a]+|[%^]">
<token type="LiteralStringDouble"/>
</rule>
<rule>
<pop depth="1"/>
</rule>
</state>
<state name="variable">
<rule pattern="&#34;">
<token type="LiteralStringDouble"/>
<push state="string"/>
</rule>
<rule>
<include state="variable-or-escape"/>
</rule>
<rule pattern="[^&#34;%^\n\x1a]+|.">
<token type="NameVariable"/>
</rule>
</state>
<state name="call/compound">
<rule pattern="(?=\))">
<token type="Text"/>
<pop depth="1"/>
</rule>
<rule pattern="(:?)((?:(?:[^\n\x1a&amp;&lt;&gt;|\t\v\f\r ,;=\xa0+:^)]|\^[\n\x1a]?[^)])*))">
<bygroups>
<token type="Punctuation"/>
<token type="NameLabel"/>
</bygroups>
<pop depth="1"/>
</rule>
</state>
<state name="for/f">
<rule pattern="(&#34;)((?:(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[^&#34;])*?&#34;)([\n\x1a\t\v\f\r ,;=\xa0]*)(\))">
<bygroups>
<token type="LiteralStringDouble"/>
<usingself state="string"/>
<token type="Text"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule pattern="&#34;">
<token type="LiteralStringDouble"/>
<push state="#pop" state="for2" state="string"/>
</rule>
<rule pattern="(&#39;(?:%%|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[\w\W])*?&#39;)([\n\x1a\t\v\f\r ,;=\xa0]*)(\))">
<bygroups>
<usingself state="sqstring"/>
<token type="Text"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule pattern="(`(?:%%|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[\w\W])*?`)([\n\x1a\t\v\f\r ,;=\xa0]*)(\))">
<bygroups>
<usingself state="bqstring"/>
<token type="Text"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule>
<include state="for2"/>
</rule>
</state>
<state name="for/l">
<rule pattern="-?\d+">
<token type="LiteralNumberInteger"/>
</rule>
<rule>
<include state="for2"/>
</rule>
</state>
<state name="if2">
<rule pattern="((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)(==)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:[&amp;&lt;&gt;|]+|(?:(?:&#34;[^\n\x1a&#34;]*(?:&#34;|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^&#34;\n\x1a&amp;&lt;&gt;|\t\v\f\r ,;=\xa0])+))+))">
<bygroups>
<usingself state="text"/>
<token type="Operator"/>
<usingself state="text"/>
</bygroups>
<pop depth="1"/>
</rule>
<rule pattern="((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))((?:equ|geq|gtr|leq|lss|neq))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)(?:[&amp;&lt;&gt;|]+|(?:(?:&#34;[^\n\x1a&#34;]*(?:&#34;|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^&#34;\n\x1a&amp;&lt;&gt;|\t\v\f\r ,;=\xa0])+))+))">
<bygroups>
<usingself state="text"/>
<token type="OperatorWord"/>
<usingself state="text"/>
</bygroups>
<pop depth="1"/>
</rule>
</state>
<state name="(?">
<rule pattern="(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)">
<usingself state="text"/>
</rule>
<rule pattern="\(">
<token type="Punctuation"/>
<push state="#pop" state="else?" state="root/compound"/>
</rule>
<rule>
<pop depth="1"/>
</rule>
</state>
<state name="call">
<rule pattern="(:?)((?:(?:[^\n\x1a&amp;&lt;&gt;|\t\v\f\r ,;=\xa0+:^]|\^[\n\x1a]?[\w\W])*))">
<bygroups>
<token type="Punctuation"/>
<token type="NameLabel"/>
</bygroups>
<pop depth="1"/>
</rule>
</state>
<state name="variable-or-escape">
<rule pattern="(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))">
<token type="NameVariable"/>
</rule>
<rule pattern="%%|\^[\n\x1a]?(\^!|[\w\W])">
<token type="LiteralStringEscape"/>
</rule>
</state>
</rules>
</lexer>

152
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bibtex.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,152 @@
<lexer>
<config>
<name>BibTeX</name>
<alias>bib</alias>
<alias>bibtex</alias>
<filename>*.bib</filename>
<mime_type>text/x-bibtex</mime_type>
<case_insensitive>true</case_insensitive>
<not_multiline>true</not_multiline>
</config>
<rules>
<state name="closing-brace">
<rule>
<include state="whitespace"/>
</rule>
<rule pattern="[})]">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
</state>
<state name="braced-string">
<rule pattern="\{">
<token type="LiteralString"/>
<push/>
</rule>
<rule pattern="\}">
<token type="LiteralString"/>
<pop depth="1"/>
</rule>
<rule pattern="[^\{\}]+">
<token type="LiteralString"/>
</rule>
</state>
<state name="whitespace">
<rule pattern="\s+">
<token type="Text"/>
</rule>
</state>
<state name="value">
<rule>
<include state="whitespace"/>
</rule>
<rule pattern="[a-z_@!$&amp;*+\-./:;&lt;&gt;?\[\\\]^`|~][\w@!$&amp;*+\-./:;&lt;&gt;?\[\\\]^`|~]*">
<token type="NameVariable"/>
</rule>
<rule pattern="&#34;">
<token type="LiteralString"/>
<push state="quoted-string"/>
</rule>
<rule pattern="\{">
<token type="LiteralString"/>
<push state="braced-string"/>
</rule>
<rule pattern="[\d]+">
<token type="LiteralNumber"/>
</rule>
<rule pattern="#">
<token type="Punctuation"/>
</rule>
<rule>
<pop depth="1"/>
</rule>
</state>
<state name="quoted-string">
<rule pattern="\{">
<token type="LiteralString"/>
<push state="braced-string"/>
</rule>
<rule pattern="&#34;">
<token type="LiteralString"/>
<pop depth="1"/>
</rule>
<rule pattern="[^\{\&#34;]+">
<token type="LiteralString"/>
</rule>
</state>
<state name="root">
<rule>
<include state="whitespace"/>
</rule>
<rule pattern="@comment">
<token type="Comment"/>
</rule>
<rule pattern="@preamble">
<token type="NameClass"/>
<push state="closing-brace" state="value" state="opening-brace"/>
</rule>
<rule pattern="@string">
<token type="NameClass"/>
<push state="closing-brace" state="field" state="opening-brace"/>
</rule>
<rule pattern="@[a-z_@!$&amp;*+\-./:;&lt;&gt;?\[\\\]^`|~][\w@!$&amp;*+\-./:;&lt;&gt;?\[\\\]^`|~]*">
<token type="NameClass"/>
<push state="closing-brace" state="command-body" state="opening-brace"/>
</rule>
<rule pattern=".+">
<token type="Comment"/>
</rule>
</state>
<state name="command-body">
<rule>
<include state="whitespace"/>
</rule>
<rule pattern="[^\s\,\}]+">
<token type="NameLabel"/>
<push state="#pop" state="fields"/>
</rule>
</state>
<state name="fields">
<rule>
<include state="whitespace"/>
</rule>
<rule pattern=",">
<token type="Punctuation"/>
<push state="field"/>
</rule>
<rule>
<pop depth="1"/>
</rule>
</state>
<state name="=">
<rule>
<include state="whitespace"/>
</rule>
<rule pattern="=">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
</state>
<state name="field">
<rule>
<include state="whitespace"/>
</rule>
<rule pattern="[a-z_@!$&amp;*+\-./:;&lt;&gt;?\[\\\]^`|~][\w@!$&amp;*+\-./:;&lt;&gt;?\[\\\]^`|~]*">
<token type="NameAttribute"/>
<push state="value" state="="/>
</rule>
<rule>
<pop depth="1"/>
</rule>
</state>
<state name="opening-brace">
<rule>
<include state="whitespace"/>
</rule>
<rule pattern="[{(]">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
</state>
</rules>
</lexer>

68
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bicep.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,68 @@
<lexer>
<config>
<name>Bicep</name>
<alias>bicep</alias>
<filename>*.bicep</filename>
</config>
<rules>
<state name="root">
<rule pattern="//[^\n\r]+">
<token type="CommentSingle"/>
</rule>
<rule pattern="/\*.*?\*/">
<token type="CommentMultiline"/>
</rule>
<rule pattern="([&#39;]?\w+[&#39;]?)(:)">
<bygroups>
<token type="NameProperty"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule pattern="\b(&#39;(resourceGroup|subscription|managementGroup|tenant)&#39;)\b">
<token type="KeywordNamespace"/>
</rule>
<rule pattern="&#39;[\w\$\{\(\)\}\.]{1,}?&#39;">
<token type="LiteralStringInterpol"/>
</rule>
<rule pattern="(&#39;&#39;&#39;|&#39;).*?(&#39;&#39;&#39;|&#39;)">
<token type="LiteralString"/>
</rule>
<rule pattern="\b(allowed|batchSize|description|maxLength|maxValue|metadata|minLength|minValue|secure)\b">
<token type="NameDecorator"/>
</rule>
<rule pattern="\b(az|sys)\.">
<token type="NameNamespace"/>
</rule>
<rule pattern="\b(any|array|concat|contains|empty|first|intersection|items|last|length|min|max|range|skip|take|union|dateTimeAdd|utcNow|deployment|environment|loadFileAsBase64|loadTextContent|int|json|extensionResourceId|getSecret|list|listKeys|listKeyValue|listAccountSas|listSecrets|pickZones|reference|resourceId|subscriptionResourceId|tenantResourceId|managementGroup|resourceGroup|subscription|tenant|base64|base64ToJson|base64ToString|dataUri|dataUriToString|endsWith|format|guid|indexOf|lastIndexOf|length|newGuid|padLeft|replace|split|startsWith|string|substring|toLower|toUpper|trim|uniqueString|uri|uriComponent|uriComponentToString)\b">
<token type="NameFunction"/>
</rule>
<rule pattern="\b(bool)(\()">
<bygroups>
<token type="NameFunction"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule pattern="\b(for|if|in)\b">
<token type="Keyword"/>
</rule>
<rule pattern="\b(module|output|param|resource|var)\b">
<token type="KeywordDeclaration"/>
</rule>
<rule pattern="\b(array|bool|int|object|string)\b">
<token type="KeywordType"/>
</rule>
<rule pattern="(&gt;=|&gt;|&lt;=|&lt;|==|!=|=~|!~|::|&amp;&amp;|\?\?|!|-|%|\*|\/|\+)">
<token type="Operator"/>
</rule>
<rule pattern="[\(\)\[\]\.:\?{}@=]">
<token type="Punctuation"/>
</rule>
<rule pattern="[\w_-]+">
<token type="Text"/>
</rule>
<rule pattern="\s+">
<token type="TextWhitespace"/>
</rule>
</state>
</rules>
</lexer>

141
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/blitzbasic.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,141 @@
<lexer>
<config>
<name>BlitzBasic</name>
<alias>blitzbasic</alias>
<alias>b3d</alias>
<alias>bplus</alias>
<filename>*.bb</filename>
<filename>*.decls</filename>
<mime_type>text/x-bb</mime_type>
<case_insensitive>true</case_insensitive>
</config>
<rules>
<state name="string">
<rule pattern="&#34;&#34;">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="&#34;C?">
<token type="LiteralStringDouble"/>
<pop depth="1"/>
</rule>
<rule pattern="[^&#34;]+">
<token type="LiteralStringDouble"/>
</rule>
</state>
<state name="root">
<rule pattern="[ \t]+">
<token type="Text"/>
</rule>
<rule pattern=";.*?\n">
<token type="CommentSingle"/>
</rule>
<rule pattern="&#34;">
<token type="LiteralStringDouble"/>
<push state="string"/>
</rule>
<rule pattern="[0-9]+\.[0-9]*(?!\.)">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="\.[0-9]+(?!\.)">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="[0-9]+">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="\$[0-9a-f]+">
<token type="LiteralNumberHex"/>
</rule>
<rule pattern="\%[10]+">
<token type="LiteralNumberBin"/>
</rule>
<rule pattern="\b(Before|Handle|After|First|Float|Last|Sgn|Abs|Not|And|Int|Mod|Str|Sar|Shr|Shl|Or)\b">
<token type="Operator"/>
</rule>
<rule pattern="([+\-*/~=&lt;&gt;^])">
<token type="Operator"/>
</rule>
<rule pattern="[(),:\[\]\\]">
<token type="Punctuation"/>
</rule>
<rule pattern="\.([ \t]*)([a-z]\w*)">
<token type="NameLabel"/>
</rule>
<rule pattern="\b(New)\b([ \t]+)([a-z]\w*)">
<bygroups>
<token type="KeywordReserved"/>
<token type="Text"/>
<token type="NameClass"/>
</bygroups>
</rule>
<rule pattern="\b(Gosub|Goto)\b([ \t]+)([a-z]\w*)">
<bygroups>
<token type="KeywordReserved"/>
<token type="Text"/>
<token type="NameLabel"/>
</bygroups>
</rule>
<rule pattern="\b(Object)\b([ \t]*)([.])([ \t]*)([a-z]\w*)\b">
<bygroups>
<token type="Operator"/>
<token type="Text"/>
<token type="Punctuation"/>
<token type="Text"/>
<token type="NameClass"/>
</bygroups>
</rule>
<rule pattern="\b([a-z]\w*)(?:([ \t]*)(@{1,2}|[#$%])|([ \t]*)([.])([ \t]*)(?:([a-z]\w*)))?\b([ \t]*)(\()">
<bygroups>
<token type="NameFunction"/>
<token type="Text"/>
<token type="KeywordType"/>
<token type="Text"/>
<token type="Punctuation"/>
<token type="Text"/>
<token type="NameClass"/>
<token type="Text"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule pattern="\b(Function)\b([ \t]+)([a-z]\w*)(?:([ \t]*)(@{1,2}|[#$%])|([ \t]*)([.])([ \t]*)(?:([a-z]\w*)))?">
<bygroups>
<token type="KeywordReserved"/>
<token type="Text"/>
<token type="NameFunction"/>
<token type="Text"/>
<token type="KeywordType"/>
<token type="Text"/>
<token type="Punctuation"/>
<token type="Text"/>
<token type="NameClass"/>
</bygroups>
</rule>
<rule pattern="\b(Type)([ \t]+)([a-z]\w*)">
<bygroups>
<token type="KeywordReserved"/>
<token type="Text"/>
<token type="NameClass"/>
</bygroups>
</rule>
<rule pattern="\b(Pi|True|False|Null)\b">
<token type="KeywordConstant"/>
</rule>
<rule pattern="\b(Local|Global|Const|Field|Dim)\b">
<token type="KeywordDeclaration"/>
</rule>
<rule pattern="\b(Function|Restore|Default|Forever|Include|Return|Repeat|ElseIf|Delete|Insert|Select|EndIf|Until|While|Gosub|Type|Goto|Else|Data|Next|Step|Each|Case|Wend|Exit|Read|Then|For|New|Asc|Len|Chr|End|To|If)\b">
<token type="KeywordReserved"/>
</rule>
<rule pattern="([a-z]\w*)(?:([ \t]*)(@{1,2}|[#$%])|([ \t]*)([.])([ \t]*)(?:([a-z]\w*)))?">
<bygroups>
<token type="NameVariable"/>
<token type="Text"/>
<token type="KeywordType"/>
<token type="Text"/>
<token type="Punctuation"/>
<token type="Text"/>
<token type="NameClass"/>
</bygroups>
</rule>
</state>
</rules>
</lexer>

28
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bnf.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,28 @@
<lexer>
<config>
<name>BNF</name>
<alias>bnf</alias>
<filename>*.bnf</filename>
<mime_type>text/x-bnf</mime_type>
</config>
<rules>
<state name="root">
<rule pattern="(&lt;)([ -;=?-~]+)(&gt;)">
<bygroups>
<token type="Punctuation"/>
<token type="NameClass"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule pattern="::=">
<token type="Operator"/>
</rule>
<rule pattern="[^&lt;&gt;:]+">
<token type="Text"/>
</rule>
<rule pattern=".">
<token type="Text"/>
</rule>
</state>
</rules>
</lexer>

51
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/brainfuck.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,51 @@
<lexer>
<config>
<name>Brainfuck</name>
<alias>brainfuck</alias>
<alias>bf</alias>
<filename>*.bf</filename>
<filename>*.b</filename>
<mime_type>application/x-brainfuck</mime_type>
</config>
<rules>
<state name="common">
<rule pattern="[.,]+">
<token type="NameTag"/>
</rule>
<rule pattern="[+-]+">
<token type="NameBuiltin"/>
</rule>
<rule pattern="[&lt;&gt;]+">
<token type="NameVariable"/>
</rule>
<rule pattern="[^.,+\-&lt;&gt;\[\]]+">
<token type="Comment"/>
</rule>
</state>
<state name="root">
<rule pattern="\[">
<token type="Keyword"/>
<push state="loop"/>
</rule>
<rule pattern="\]">
<token type="Error"/>
</rule>
<rule>
<include state="common"/>
</rule>
</state>
<state name="loop">
<rule pattern="\[">
<token type="Keyword"/>
<push/>
</rule>
<rule pattern="\]">
<token type="Keyword"/>
<pop depth="1"/>
</rule>
<rule>
<include state="common"/>
</rule>
</state>
</rules>
</lexer>

326
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/c++.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,326 @@
<lexer>
<config>
<name>C++</name>
<alias>cpp</alias>
<alias>c++</alias>
<filename>*.cpp</filename>
<filename>*.hpp</filename>
<filename>*.c++</filename>
<filename>*.h++</filename>
<filename>*.cc</filename>
<filename>*.hh</filename>
<filename>*.cxx</filename>
<filename>*.hxx</filename>
<filename>*.C</filename>
<filename>*.H</filename>
<filename>*.cp</filename>
<filename>*.CPP</filename>
<mime_type>text/x-c++hdr</mime_type>
<mime_type>text/x-c++src</mime_type>
<ensure_nl>true</ensure_nl>
</config>
<rules>
<state name="classname">
<rule pattern="(\[\[.+\]\])(\s*)">
<bygroups>
<token type="NameAttribute"/>
<token type="Text"/>
</bygroups>
</rule>
<rule pattern="[a-zA-Z_]\w*">
<token type="NameClass"/>
<pop depth="1"/>
</rule>
<rule pattern="\s*(?=[&gt;{])">
<token type="Text"/>
<pop depth="1"/>
</rule>
</state>
<state name="whitespace">
<rule pattern="^#if\s+0">
<token type="CommentPreproc"/>
<push state="if0"/>
</rule>
<rule pattern="^#">
<token type="CommentPreproc"/>
<push state="macro"/>
</rule>
<rule pattern="^(\s*(?:/[*].*?[*]/\s*)?)(#if\s+0)">
<bygroups>
<usingself state="root"/>
<token type="CommentPreproc"/>
</bygroups>
<push state="if0"/>
</rule>
<rule pattern="^(\s*(?:/[*].*?[*]/\s*)?)(#)">
<bygroups>
<usingself state="root"/>
<token type="CommentPreproc"/>
</bygroups>
<push state="macro"/>
</rule>
<rule pattern="\n">
<token type="Text"/>
</rule>
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern="\\\n">
<token type="Text"/>
</rule>
<rule pattern="//(\n|[\w\W]*?[^\\]\n)">
<token type="CommentSingle"/>
</rule>
<rule pattern="/(\\\n)?[*][\w\W]*?[*](\\\n)?/">
<token type="CommentMultiline"/>
</rule>
<rule pattern="/(\\\n)?[*][\w\W]*">
<token type="CommentMultiline"/>
</rule>
</state>
<state name="macro">
<rule pattern="(include)(\s*(?:/[*].*?[*]/\s*)?)([^\n]+)">
<bygroups>
<token type="CommentPreproc"/>
<token type="Text"/>
<token type="CommentPreprocFile"/>
</bygroups>
</rule>
<rule pattern="[^/\n]+">
<token type="CommentPreproc"/>
</rule>
<rule pattern="/[*](.|\n)*?[*]/">
<token type="CommentMultiline"/>
</rule>
<rule pattern="//.*?\n">
<token type="CommentSingle"/>
<pop depth="1"/>
</rule>
<rule pattern="/">
<token type="CommentPreproc"/>
</rule>
<rule pattern="(?&lt;=\\)\n">
<token type="CommentPreproc"/>
</rule>
<rule pattern="\n">
<token type="CommentPreproc"/>
<pop depth="1"/>
</rule>
</state>
<state name="statements">
<rule pattern="(reinterpret_cast|static_assert|thread_local|dynamic_cast|static_cast|const_cast|co_return|protected|namespace|consteval|constexpr|typename|co_await|co_yield|operator|restrict|explicit|template|override|noexcept|requires|decltype|alignof|private|alignas|virtual|mutable|nullptr|concept|export|friend|typeid|throws|public|delete|final|throw|catch|using|this|new|try)\b">
<token type="Keyword"/>
</rule>
<rule pattern="(enum)\b(\s+)(class)\b(\s*)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="Keyword"/>
<token type="Text"/>
</bygroups>
<push state="classname"/>
</rule>
<rule pattern="(class|struct|enum|union)\b(\s*)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
</bygroups>
<push state="classname"/>
</rule>
<rule pattern="\[\[.+\]\]">
<token type="NameAttribute"/>
</rule>
<rule pattern="(R)(&#34;)([^\\()\s]{,16})(\()((?:.|\n)*?)(\)\3)(&#34;)">
<bygroups>
<token type="LiteralStringAffix"/>
<token type="LiteralString"/>
<token type="LiteralStringDelimiter"/>
<token type="LiteralStringDelimiter"/>
<token type="LiteralString"/>
<token type="LiteralStringDelimiter"/>
<token type="LiteralString"/>
</bygroups>
</rule>
<rule pattern="(u8|u|U)(&#34;)">
<bygroups>
<token type="LiteralStringAffix"/>
<token type="LiteralString"/>
</bygroups>
<push state="string"/>
</rule>
<rule pattern="(L?)(&#34;)">
<bygroups>
<token type="LiteralStringAffix"/>
<token type="LiteralString"/>
</bygroups>
<push state="string"/>
</rule>
<rule pattern="(L?)(&#39;)(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\&#39;\n])(&#39;)">
<bygroups>
<token type="LiteralStringAffix"/>
<token type="LiteralStringChar"/>
<token type="LiteralStringChar"/>
<token type="LiteralStringChar"/>
</bygroups>
</rule>
<rule pattern="(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="(\d+\.\d*|\.\d+|\d+[fF])[fF]?">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="0[xX]([0-9A-Fa-f](&#39;?[0-9A-Fa-f]+)*)[LlUu]*">
<token type="LiteralNumberHex"/>
</rule>
<rule pattern="0(&#39;?[0-7]+)+[LlUu]*">
<token type="LiteralNumberOct"/>
</rule>
<rule pattern="0[Bb][01](&#39;?[01]+)*[LlUu]*">
<token type="LiteralNumberBin"/>
</rule>
<rule pattern="[0-9](&#39;?[0-9]+)*[LlUu]*">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="\*/">
<token type="Error"/>
</rule>
<rule pattern="[~!%^&amp;*+=|?:&lt;&gt;/-]">
<token type="Operator"/>
</rule>
<rule pattern="[()\[\],.]">
<token type="Punctuation"/>
</rule>
<rule pattern="(restricted|volatile|continue|register|default|typedef|struct|extern|switch|sizeof|static|return|union|while|const|break|goto|enum|else|case|auto|for|asm|if|do)\b">
<token type="Keyword"/>
</rule>
<rule pattern="(bool|int|long|float|short|double|char((8|16|32)_t)?|wchar_t|unsigned|signed|void|u?int(_fast|_least|)(8|16|32|64)_t)\b">
<token type="KeywordType"/>
</rule>
<rule pattern="(typename|__inline|restrict|_inline|thread|inline|naked)\b">
<token type="KeywordReserved"/>
</rule>
<rule pattern="(__m(128i|128d|128|64))\b">
<token type="KeywordReserved"/>
</rule>
<rule pattern="__(forceinline|identifier|unaligned|declspec|fastcall|stdcall|finally|except|assume|int32|cdecl|int64|based|leave|int16|raise|noop|int8|w64|try|asm)\b">
<token type="KeywordReserved"/>
</rule>
<rule pattern="(true|false|NULL)\b">
<token type="NameBuiltin"/>
</rule>
<rule pattern="([a-zA-Z_]\w*)(\s*)(:)(?!:)">
<bygroups>
<token type="NameLabel"/>
<token type="Text"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule pattern="[a-zA-Z_]\w*">
<token type="Name"/>
</rule>
</state>
<state name="function">
<rule>
<include state="whitespace"/>
</rule>
<rule>
<include state="statements"/>
</rule>
<rule pattern=";">
<token type="Punctuation"/>
</rule>
<rule pattern="\{">
<token type="Punctuation"/>
<push/>
</rule>
<rule pattern="\}">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
</state>
<state name="string">
<rule pattern="&#34;">
<token type="LiteralString"/>
<pop depth="1"/>
</rule>
<rule pattern="\\([\\abfnrtv&#34;\&#39;]|x[a-fA-F0-9]{2,4}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})">
<token type="LiteralStringEscape"/>
</rule>
<rule pattern="[^\\&#34;\n]+">
<token type="LiteralString"/>
</rule>
<rule pattern="\\\n">
<token type="LiteralString"/>
</rule>
<rule pattern="\\">
<token type="LiteralString"/>
</rule>
</state>
<state name="if0">
<rule pattern="^\s*#if.*?(?&lt;!\\)\n">
<token type="CommentPreproc"/>
<push/>
</rule>
<rule pattern="^\s*#el(?:se|if).*\n">
<token type="CommentPreproc"/>
<pop depth="1"/>
</rule>
<rule pattern="^\s*#endif.*?(?&lt;!\\)\n">
<token type="CommentPreproc"/>
<pop depth="1"/>
</rule>
<rule pattern=".*?\n">
<token type="Comment"/>
</rule>
</state>
<state name="root">
<rule>
<include state="whitespace"/>
</rule>
<rule pattern="((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;{]*)(\{)">
<bygroups>
<usingself state="root"/>
<token type="NameFunction"/>
<usingself state="root"/>
<usingself state="root"/>
<token type="Punctuation"/>
</bygroups>
<push state="function"/>
</rule>
<rule pattern="((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;]*)(;)">
<bygroups>
<usingself state="root"/>
<token type="NameFunction"/>
<usingself state="root"/>
<usingself state="root"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule>
<push state="statement"/>
</rule>
<rule pattern="__(multiple_inheritance|virtual_inheritance|single_inheritance|interface|uuidof|super|event)\b">
<token type="KeywordReserved"/>
</rule>
<rule pattern="__(offload|blockingoffload|outer)\b">
<token type="KeywordPseudo"/>
</rule>
</state>
<state name="statement">
<rule>
<include state="whitespace"/>
</rule>
<rule>
<include state="statements"/>
</rule>
<rule pattern="[{]">
<token type="Punctuation"/>
<push state="root"/>
</rule>
<rule pattern="[;}]">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
</state>
</rules>
</lexer>

253
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/c.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,253 @@
<lexer>
<config>
<name>C</name>
<alias>c</alias>
<filename>*.c</filename>
<filename>*.h</filename>
<filename>*.idc</filename>
<filename>*.x[bp]m</filename>
<mime_type>text/x-chdr</mime_type>
<mime_type>text/x-csrc</mime_type>
<mime_type>image/x-xbitmap</mime_type>
<mime_type>image/x-xpixmap</mime_type>
<ensure_nl>true</ensure_nl>
</config>
<rules>
<state name="statement">
<rule>
<include state="whitespace"/>
</rule>
<rule>
<include state="statements"/>
</rule>
<rule pattern="[{}]">
<token type="Punctuation"/>
</rule>
<rule pattern=";">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
</state>
<state name="function">
<rule>
<include state="whitespace"/>
</rule>
<rule>
<include state="statements"/>
</rule>
<rule pattern=";">
<token type="Punctuation"/>
</rule>
<rule pattern="\{">
<token type="Punctuation"/>
<push/>
</rule>
<rule pattern="\}">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
</state>
<state name="string">
<rule pattern="&#34;">
<token type="LiteralString"/>
<pop depth="1"/>
</rule>
<rule pattern="\\([\\abfnrtv&#34;\&#39;]|x[a-fA-F0-9]{2,4}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})">
<token type="LiteralStringEscape"/>
</rule>
<rule pattern="[^\\&#34;\n]+">
<token type="LiteralString"/>
</rule>
<rule pattern="\\\n">
<token type="LiteralString"/>
</rule>
<rule pattern="\\">
<token type="LiteralString"/>
</rule>
</state>
<state name="macro">
<rule pattern="(include)(\s*(?:/[*].*?[*]/\s*)?)([^\n]+)">
<bygroups>
<token type="CommentPreproc"/>
<token type="Text"/>
<token type="CommentPreprocFile"/>
</bygroups>
</rule>
<rule pattern="[^/\n]+">
<token type="CommentPreproc"/>
</rule>
<rule pattern="/[*](.|\n)*?[*]/">
<token type="CommentMultiline"/>
</rule>
<rule pattern="//.*?\n">
<token type="CommentSingle"/>
<pop depth="1"/>
</rule>
<rule pattern="/">
<token type="CommentPreproc"/>
</rule>
<rule pattern="(?&lt;=\\)\n">
<token type="CommentPreproc"/>
</rule>
<rule pattern="\n">
<token type="CommentPreproc"/>
<pop depth="1"/>
</rule>
</state>
<state name="if0">
<rule pattern="^\s*#if.*?(?&lt;!\\)\n">
<token type="CommentPreproc"/>
<push/>
</rule>
<rule pattern="^\s*#el(?:se|if).*\n">
<token type="CommentPreproc"/>
<pop depth="1"/>
</rule>
<rule pattern="^\s*#endif.*?(?&lt;!\\)\n">
<token type="CommentPreproc"/>
<pop depth="1"/>
</rule>
<rule pattern=".*?\n">
<token type="Comment"/>
</rule>
</state>
<state name="whitespace">
<rule pattern="^#if\s+0">
<token type="CommentPreproc"/>
<push state="if0"/>
</rule>
<rule pattern="^#">
<token type="CommentPreproc"/>
<push state="macro"/>
</rule>
<rule pattern="^(\s*(?:/[*].*?[*]/\s*)?)(#if\s+0)">
<bygroups>
<usingself state="root"/>
<token type="CommentPreproc"/>
</bygroups>
<push state="if0"/>
</rule>
<rule pattern="^(\s*(?:/[*].*?[*]/\s*)?)(#)">
<bygroups>
<usingself state="root"/>
<token type="CommentPreproc"/>
</bygroups>
<push state="macro"/>
</rule>
<rule pattern="\n">
<token type="Text"/>
</rule>
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern="\\\n">
<token type="Text"/>
</rule>
<rule pattern="//(\n|[\w\W]*?[^\\]\n)">
<token type="CommentSingle"/>
</rule>
<rule pattern="/(\\\n)?[*][\w\W]*?[*](\\\n)?/">
<token type="CommentMultiline"/>
</rule>
<rule pattern="/(\\\n)?[*][\w\W]*">
<token type="CommentMultiline"/>
</rule>
</state>
<state name="statements">
<rule pattern="(L?)(&#34;)">
<bygroups>
<token type="LiteralStringAffix"/>
<token type="LiteralString"/>
</bygroups>
<push state="string"/>
</rule>
<rule pattern="(L?)(&#39;)(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\&#39;\n])(&#39;)">
<bygroups>
<token type="LiteralStringAffix"/>
<token type="LiteralStringChar"/>
<token type="LiteralStringChar"/>
<token type="LiteralStringChar"/>
</bygroups>
</rule>
<rule pattern="(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="(\d+\.\d*|\.\d+|\d+[fF])[fF]?">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="0x[0-9a-fA-F]+[LlUu]*">
<token type="LiteralNumberHex"/>
</rule>
<rule pattern="0[0-7]+[LlUu]*">
<token type="LiteralNumberOct"/>
</rule>
<rule pattern="\d+[LlUu]*">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="\*/">
<token type="Error"/>
</rule>
<rule pattern="[~!%^&amp;*+=|?:&lt;&gt;/-]">
<token type="Operator"/>
</rule>
<rule pattern="[()\[\],.]">
<token type="Punctuation"/>
</rule>
<rule pattern="(restricted|volatile|continue|register|default|typedef|struct|extern|switch|sizeof|static|return|union|while|const|break|goto|enum|else|case|auto|for|asm|if|do)\b">
<token type="Keyword"/>
</rule>
<rule pattern="(bool|int|long|float|short|double|char((8|16|32)_t)?|unsigned|signed|void|u?int(_fast|_least|)(8|16|32|64)_t)\b">
<token type="KeywordType"/>
</rule>
<rule pattern="(typename|__inline|restrict|_inline|thread|inline|naked)\b">
<token type="KeywordReserved"/>
</rule>
<rule pattern="(__m(128i|128d|128|64))\b">
<token type="KeywordReserved"/>
</rule>
<rule pattern="__(forceinline|identifier|unaligned|declspec|fastcall|finally|stdcall|wchar_t|assume|except|int32|cdecl|int16|leave|based|raise|int64|noop|int8|w64|try|asm)\b">
<token type="KeywordReserved"/>
</rule>
<rule pattern="(true|false|NULL)\b">
<token type="NameBuiltin"/>
</rule>
<rule pattern="([a-zA-Z_]\w*)(\s*)(:)(?!:)">
<bygroups>
<token type="NameLabel"/>
<token type="Text"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule pattern="[a-zA-Z_]\w*">
<token type="Name"/>
</rule>
</state>
<state name="root">
<rule>
<include state="whitespace"/>
</rule>
<rule pattern="((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;{]*)(\{)">
<bygroups>
<usingself state="root"/>
<token type="NameFunction"/>
<usingself state="root"/>
<usingself state="root"/>
<token type="Punctuation"/>
</bygroups>
<push state="function"/>
</rule>
<rule pattern="((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;]*)(;)">
<bygroups>
<usingself state="root"/>
<token type="NameFunction"/>
<usingself state="root"/>
<usingself state="root"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule>
<push state="statement"/>
</rule>
</state>
</rules>
</lexer>

122
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cap_n_proto.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,122 @@
<lexer>
<config>
<name>Cap&#39;n Proto</name>
<alias>capnp</alias>
<filename>*.capnp</filename>
</config>
<rules>
<state name="root">
<rule pattern="#.*?$">
<token type="CommentSingle"/>
</rule>
<rule pattern="@[0-9a-zA-Z]*">
<token type="NameDecorator"/>
</rule>
<rule pattern="=">
<token type="Literal"/>
<push state="expression"/>
</rule>
<rule pattern=":">
<token type="NameClass"/>
<push state="type"/>
</rule>
<rule pattern="\$">
<token type="NameAttribute"/>
<push state="annotation"/>
</rule>
<rule pattern="(struct|enum|interface|union|import|using|const|annotation|extends|in|of|on|as|with|from|fixed)\b">
<token type="Keyword"/>
</rule>
<rule pattern="[\w.]+">
<token type="Name"/>
</rule>
<rule pattern="[^#@=:$\w]+">
<token type="Text"/>
</rule>
</state>
<state name="type">
<rule pattern="[^][=;,(){}$]+">
<token type="NameClass"/>
</rule>
<rule pattern="[[(]">
<token type="NameClass"/>
<push state="parentype"/>
</rule>
<rule>
<pop depth="1"/>
</rule>
</state>
<state name="parentype">
<rule pattern="[^][;()]+">
<token type="NameClass"/>
</rule>
<rule pattern="[[(]">
<token type="NameClass"/>
<push/>
</rule>
<rule pattern="[])]">
<token type="NameClass"/>
<pop depth="1"/>
</rule>
<rule>
<pop depth="1"/>
</rule>
</state>
<state name="expression">
<rule pattern="[^][;,(){}$]+">
<token type="Literal"/>
</rule>
<rule pattern="[[(]">
<token type="Literal"/>
<push state="parenexp"/>
</rule>
<rule>
<pop depth="1"/>
</rule>
</state>
<state name="parenexp">
<rule pattern="[^][;()]+">
<token type="Literal"/>
</rule>
<rule pattern="[[(]">
<token type="Literal"/>
<push/>
</rule>
<rule pattern="[])]">
<token type="Literal"/>
<pop depth="1"/>
</rule>
<rule>
<pop depth="1"/>
</rule>
</state>
<state name="annotation">
<rule pattern="[^][;,(){}=:]+">
<token type="NameAttribute"/>
</rule>
<rule pattern="[[(]">
<token type="NameAttribute"/>
<push state="annexp"/>
</rule>
<rule>
<pop depth="1"/>
</rule>
</state>
<state name="annexp">
<rule pattern="[^][;()]+">
<token type="NameAttribute"/>
</rule>
<rule pattern="[[(]">
<token type="NameAttribute"/>
<push/>
</rule>
<rule pattern="[])]">
<token type="NameAttribute"/>
<pop depth="1"/>
</rule>
<rule>
<pop depth="1"/>
</rule>
</state>
</rules>
</lexer>

151
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ceylon.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,151 @@
<lexer>
<config>
<name>Ceylon</name>
<alias>ceylon</alias>
<filename>*.ceylon</filename>
<mime_type>text/x-ceylon</mime_type>
<dot_all>true</dot_all>
</config>
<rules>
<state name="class">
<rule pattern="[A-Za-z_]\w*">
<token type="NameClass"/>
<pop depth="1"/>
</rule>
</state>
<state name="import">
<rule pattern="[a-z][\w.]*">
<token type="NameNamespace"/>
<pop depth="1"/>
</rule>
</state>
<state name="comment">
<rule pattern="[^*/]">
<token type="CommentMultiline"/>
</rule>
<rule pattern="/\*">
<token type="CommentMultiline"/>
<push/>
</rule>
<rule pattern="\*/">
<token type="CommentMultiline"/>
<pop depth="1"/>
</rule>
<rule pattern="[*/]">
<token type="CommentMultiline"/>
</rule>
</state>
<state name="root">
<rule pattern="^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)([a-zA-Z_]\w*)(\s*)(\()">
<bygroups>
<usingself state="root"/>
<token type="NameFunction"/>
<token type="Text"/>
<token type="Operator"/>
</bygroups>
</rule>
<rule pattern="[^\S\n]+">
<token type="Text"/>
</rule>
<rule pattern="//.*?\n">
<token type="CommentSingle"/>
</rule>
<rule pattern="/\*">
<token type="CommentMultiline"/>
<push state="comment"/>
</rule>
<rule pattern="(shared|abstract|formal|default|actual|variable|deprecated|small|late|literal|doc|by|see|throws|optional|license|tagged|final|native|annotation|sealed)\b">
<token type="NameDecorator"/>
</rule>
<rule pattern="(break|case|catch|continue|else|finally|for|in|if|return|switch|this|throw|try|while|is|exists|dynamic|nonempty|then|outer|assert|let)\b">
<token type="Keyword"/>
</rule>
<rule pattern="(abstracts|extends|satisfies|super|given|of|out|assign)\b">
<token type="KeywordDeclaration"/>
</rule>
<rule pattern="(function|value|void|new)\b">
<token type="KeywordType"/>
</rule>
<rule pattern="(assembly|module|package)(\s+)">
<bygroups>
<token type="KeywordNamespace"/>
<token type="Text"/>
</bygroups>
</rule>
<rule pattern="(true|false|null)\b">
<token type="KeywordConstant"/>
</rule>
<rule pattern="(class|interface|object|alias)(\s+)">
<bygroups>
<token type="KeywordDeclaration"/>
<token type="Text"/>
</bygroups>
<push state="class"/>
</rule>
<rule pattern="(import)(\s+)">
<bygroups>
<token type="KeywordNamespace"/>
<token type="Text"/>
</bygroups>
<push state="import"/>
</rule>
<rule pattern="&#34;(\\\\|\\&#34;|[^&#34;])*&#34;">
<token type="LiteralString"/>
</rule>
<rule pattern="&#39;\\.&#39;|&#39;[^\\]&#39;|&#39;\\\{#[0-9a-fA-F]{4}\}&#39;">
<token type="LiteralStringChar"/>
</rule>
<rule pattern="&#34;.*``.*``.*&#34;">
<token type="LiteralStringInterpol"/>
</rule>
<rule pattern="(\.)([a-z_]\w*)">
<bygroups>
<token type="Operator"/>
<token type="NameAttribute"/>
</bygroups>
</rule>
<rule pattern="[a-zA-Z_]\w*:">
<token type="NameLabel"/>
</rule>
<rule pattern="[a-zA-Z_]\w*">
<token type="Name"/>
</rule>
<rule pattern="[~^*!%&amp;\[\](){}&lt;&gt;|+=:;,./?-]">
<token type="Operator"/>
</rule>
<rule pattern="\d{1,3}(_\d{3})+\.\d{1,3}(_\d{3})+[kMGTPmunpf]?">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="\d{1,3}(_\d{3})+\.[0-9]+([eE][+-]?[0-9]+)?[kMGTPmunpf]?">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="[0-9][0-9]*\.\d{1,3}(_\d{3})+[kMGTPmunpf]?">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="[0-9][0-9]*\.[0-9]+([eE][+-]?[0-9]+)?[kMGTPmunpf]?">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="#([0-9a-fA-F]{4})(_[0-9a-fA-F]{4})+">
<token type="LiteralNumberHex"/>
</rule>
<rule pattern="#[0-9a-fA-F]+">
<token type="LiteralNumberHex"/>
</rule>
<rule pattern="\$([01]{4})(_[01]{4})+">
<token type="LiteralNumberBin"/>
</rule>
<rule pattern="\$[01]+">
<token type="LiteralNumberBin"/>
</rule>
<rule pattern="\d{1,3}(_\d{3})+[kMGTP]?">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="[0-9]+[kMGTP]?">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="\n">
<token type="Text"/>
</rule>
</state>
</rules>
</lexer>

194
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cfengine3.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,194 @@
<lexer>
<config>
<name>CFEngine3</name>
<alias>cfengine3</alias>
<alias>cf3</alias>
<filename>*.cf</filename>
</config>
<rules>
<state name="interpol">
<rule pattern="\$[{(]">
<token type="LiteralStringInterpol"/>
<push/>
</rule>
<rule pattern="[})]">
<token type="LiteralStringInterpol"/>
<pop depth="1"/>
</rule>
<rule pattern="[^${()}]+">
<token type="LiteralStringInterpol"/>
</rule>
</state>
<state name="arglist">
<rule pattern="\)">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
<rule pattern=",">
<token type="Punctuation"/>
</rule>
<rule pattern="\w+">
<token type="NameVariable"/>
</rule>
<rule pattern="\s+">
<token type="Text"/>
</rule>
</state>
<state name="root">
<rule pattern="#.*?\n">
<token type="Comment"/>
</rule>
<rule pattern="@.*?\n">
<token type="CommentPreproc"/>
</rule>
<rule pattern="(body)(\s+)(\S+)(\s+)(control)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="Keyword"/>
<token type="Text"/>
<token type="Keyword"/>
</bygroups>
</rule>
<rule pattern="(body|bundle|promise)(\s+)(\S+)(\s+)(\w+)(\()">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="Keyword"/>
<token type="Text"/>
<token type="NameFunction"/>
<token type="Punctuation"/>
</bygroups>
<push state="arglist"/>
</rule>
<rule pattern="(body|bundle|promise)(\s+)(\S+)(\s+)(\w+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="Keyword"/>
<token type="Text"/>
<token type="NameFunction"/>
</bygroups>
</rule>
<rule pattern="(\S+)(\s*)(=&gt;)(\s*)">
<bygroups>
<token type="KeywordReserved"/>
<token type="Text"/>
<token type="Operator"/>
<token type="Text"/>
</bygroups>
</rule>
<rule pattern="&#34;">
<token type="LiteralString"/>
<push state="doublequotestring"/>
</rule>
<rule pattern="&#39;">
<token type="LiteralString"/>
<push state="singlequotestring"/>
</rule>
<rule pattern="&#96;">
<token type="LiteralString"/>
<push state="backtickstring"/>
</rule>
<rule pattern="(\w+)(\()">
<bygroups>
<token type="NameFunction"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule pattern="([\w.!&amp;|()]+)(::)">
<bygroups>
<token type="NameClass"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule pattern="(\w+)(:)">
<bygroups>
<token type="KeywordDeclaration"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule pattern="@[{(][^)}]+[})]">
<token type="NameVariable"/>
</rule>
<rule pattern="[(){},;]">
<token type="Punctuation"/>
</rule>
<rule pattern="=&gt;">
<token type="Operator"/>
</rule>
<rule pattern="-&gt;">
<token type="Operator"/>
</rule>
<rule pattern="\d+\.\d+">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="\d+">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="\w+">
<token type="NameFunction"/>
</rule>
<rule pattern="\s+">
<token type="Text"/>
</rule>
</state>
<state name="doublequotestring">
<rule pattern="\$[{(]">
<token type="LiteralStringInterpol"/>
<push state="interpol"/>
</rule>
<rule pattern="\\.">
<token type="LiteralStringEscape"/>
</rule>
<rule pattern="&#34;">
<token type="LiteralString"/>
<pop depth="1"/>
</rule>
<rule pattern="\n">
<token type="LiteralString"/>
</rule>
<rule pattern=".">
<token type="LiteralString"/>
</rule>
</state>
<state name="singlequotestring">
<rule pattern="\$[{(]">
<token type="LiteralStringInterpol"/>
<push state="interpol"/>
</rule>
<rule pattern="\\.">
<token type="LiteralStringEscape"/>
</rule>
<rule pattern="&#39;">
<token type="LiteralString"/>
<pop depth="1"/>
</rule>
<rule pattern="\n">
<token type="LiteralString"/>
</rule>
<rule pattern=".">
<token type="LiteralString"/>
</rule>
</state>
<state name="backtickstring">
<rule pattern="\$[{(]">
<token type="LiteralStringInterpol"/>
<push state="interpol"/>
</rule>
<rule pattern="\\.">
<token type="LiteralStringEscape"/>
</rule>
<rule pattern="&#96;">
<token type="LiteralString"/>
<pop depth="1"/>
</rule>
<rule pattern="\n">
<token type="LiteralString"/>
</rule>
<rule pattern=".">
<token type="LiteralString"/>
</rule>
</state>
</rules>
</lexer>

92
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cfstatement.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,92 @@
<lexer>
<config>
<name>cfstatement</name>
<alias>cfs</alias>
<case_insensitive>true</case_insensitive>
<not_multiline>true</not_multiline>
</config>
<rules>
<state name="root">
<rule pattern="//.*?\n">
<token type="CommentSingle"/>
</rule>
<rule pattern="/\*(?:.|\n)*?\*/">
<token type="CommentMultiline"/>
</rule>
<rule pattern="\+\+|--">
<token type="Operator"/>
</rule>
<rule pattern="[-+*/^&amp;=!]">
<token type="Operator"/>
</rule>
<rule pattern="&lt;=|&gt;=|&lt;|&gt;|==">
<token type="Operator"/>
</rule>
<rule pattern="mod\b">
<token type="Operator"/>
</rule>
<rule pattern="(eq|lt|gt|lte|gte|not|is|and|or)\b">
<token type="Operator"/>
</rule>
<rule pattern="\|\||&amp;&amp;">
<token type="Operator"/>
</rule>
<rule pattern="\?">
<token type="Operator"/>
</rule>
<rule pattern="&#34;">
<token type="LiteralStringDouble"/>
<push state="string"/>
</rule>
<rule pattern="&#39;.*?&#39;">
<token type="LiteralStringSingle"/>
</rule>
<rule pattern="\d+">
<token type="LiteralNumber"/>
</rule>
<rule pattern="(if|else|len|var|xml|default|break|switch|component|property|function|do|try|catch|in|continue|for|return|while|required|any|array|binary|boolean|component|date|guid|numeric|query|string|struct|uuid|case)\b">
<token type="Keyword"/>
</rule>
<rule pattern="(true|false|null)\b">
<token type="KeywordConstant"/>
</rule>
<rule pattern="(application|session|client|cookie|super|this|variables|arguments)\b">
<token type="NameConstant"/>
</rule>
<rule pattern="([a-z_$][\w.]*)(\s*)(\()">
<bygroups>
<token type="NameFunction"/>
<token type="Text"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule pattern="[a-z_$][\w.]*">
<token type="NameVariable"/>
</rule>
<rule pattern="[()\[\]{};:,.\\]">
<token type="Punctuation"/>
</rule>
<rule pattern="\s+">
<token type="Text"/>
</rule>
</state>
<state name="string">
<rule pattern="&#34;&#34;">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="#.+?#">
<token type="LiteralStringInterpol"/>
</rule>
<rule pattern="[^&#34;#]+">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="#">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="&#34;">
<token type="LiteralStringDouble"/>
<pop depth="1"/>
</rule>
</state>
</rules>
</lexer>

134
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/chaiscript.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,134 @@
<lexer>
<config>
<name>ChaiScript</name>
<alias>chai</alias>
<alias>chaiscript</alias>
<filename>*.chai</filename>
<mime_type>text/x-chaiscript</mime_type>
<mime_type>application/x-chaiscript</mime_type>
<dot_all>true</dot_all>
</config>
<rules>
<state name="dqstring">
<rule pattern="\$\{[^&#34;}]+?\}">
<token type="LiteralStringInterpol"/>
</rule>
<rule pattern="\$">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="\\\\">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="\\&#34;">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="[^\\&#34;$]+">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="&#34;">
<token type="LiteralStringDouble"/>
<pop depth="1"/>
</rule>
</state>
<state name="commentsandwhitespace">
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern="//.*?\n">
<token type="CommentSingle"/>
</rule>
<rule pattern="/\*.*?\*/">
<token type="CommentMultiline"/>
</rule>
<rule pattern="^\#.*?\n">
<token type="CommentSingle"/>
</rule>
</state>
<state name="slashstartsregex">
<rule>
<include state="commentsandwhitespace"/>
</rule>
<rule pattern="/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/([gim]+\b|\B)">
<token type="LiteralStringRegex"/>
<pop depth="1"/>
</rule>
<rule pattern="(?=/)">
<token type="Text"/>
<push state="#pop" state="badregex"/>
</rule>
<rule>
<pop depth="1"/>
</rule>
</state>
<state name="badregex">
<rule pattern="\n">
<token type="Text"/>
<pop depth="1"/>
</rule>
</state>
<state name="root">
<rule>
<include state="commentsandwhitespace"/>
</rule>
<rule pattern="\n">
<token type="Text"/>
</rule>
<rule pattern="[^\S\n]+">
<token type="Text"/>
</rule>
<rule pattern="\+\+|--|~|&amp;&amp;|\?|:|\|\||\\(?=\n)|\.\.(&lt;&lt;|&gt;&gt;&gt;?|==?|!=?|[-&lt;&gt;+*%&amp;|^/])=?">
<token type="Operator"/>
<push state="slashstartsregex"/>
</rule>
<rule pattern="[{(\[;,]">
<token type="Punctuation"/>
<push state="slashstartsregex"/>
</rule>
<rule pattern="[})\].]">
<token type="Punctuation"/>
</rule>
<rule pattern="[=+\-*/]">
<token type="Operator"/>
</rule>
<rule pattern="(for|in|while|do|break|return|continue|if|else|throw|try|catch)\b">
<token type="Keyword"/>
<push state="slashstartsregex"/>
</rule>
<rule pattern="(var)\b">
<token type="KeywordDeclaration"/>
<push state="slashstartsregex"/>
</rule>
<rule pattern="(attr|def|fun)\b">
<token type="KeywordReserved"/>
</rule>
<rule pattern="(true|false)\b">
<token type="KeywordConstant"/>
</rule>
<rule pattern="(eval|throw)\b">
<token type="NameBuiltin"/>
</rule>
<rule pattern="`\S+`">
<token type="NameBuiltin"/>
</rule>
<rule pattern="[$a-zA-Z_]\w*">
<token type="NameOther"/>
</rule>
<rule pattern="[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="0x[0-9a-fA-F]+">
<token type="LiteralNumberHex"/>
</rule>
<rule pattern="[0-9]+">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="&#34;">
<token type="LiteralStringDouble"/>
<push state="dqstring"/>
</rule>
<rule pattern="&#39;(\\\\|\\&#39;|[^&#39;])*&#39;">
<token type="LiteralStringSingle"/>
</rule>
</state>
</rules>
</lexer>

68
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/clojure.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,68 @@
<lexer>
<config>
<name>Clojure</name>
<alias>clojure</alias>
<alias>clj</alias>
<filename>*.clj</filename>
<mime_type>text/x-clojure</mime_type>
<mime_type>application/x-clojure</mime_type>
</config>
<rules>
<state name="root">
<rule pattern=";.*$">
<token type="CommentSingle"/>
</rule>
<rule pattern="[,\s]+">
<token type="Text"/>
</rule>
<rule pattern="-?\d+\.\d+">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="-?\d+">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="0x-?[abcdef\d]+">
<token type="LiteralNumberHex"/>
</rule>
<rule pattern="&#34;(\\\\|\\&#34;|[^&#34;])*&#34;">
<token type="LiteralString"/>
</rule>
<rule pattern="&#39;(?!#)[\w!$%*+&lt;=&gt;?/.#-]+">
<token type="LiteralStringSymbol"/>
</rule>
<rule pattern="\\(.|[a-z]+)">
<token type="LiteralStringChar"/>
</rule>
<rule pattern="::?#?(?!#)[\w!$%*+&lt;=&gt;?/.#-]+">
<token type="LiteralStringSymbol"/>
</rule>
<rule pattern="~@|[`\&#39;#^~&amp;@]">
<token type="Operator"/>
</rule>
<rule pattern="(quote|loop|new|var|let|def|if|do|fn|\.) ">
<token type="Keyword"/>
</rule>
<rule pattern="(definterface|defprotocol|defproject|defstruct|definline|defmethod|defrecord|defmulti|defmacro|defonce|declare|deftype|defn-|def-|defn|ns) ">
<token type="KeywordDeclaration"/>
</rule>
<rule pattern="(clear-agent-errors|construct-proxy|bit-shift-right|get-proxy-class|special-symbol\?|with-local-vars|proxy-mappings|bit-shift-left|sorted-map-by|macroexpand-1|remove-method|create-struct|resultset-seq|inspect-table|inspect-tree|update-proxy|aset-boolean|agent-errors|with-out-str|insert-child|append-child|intersection|insert-right|to-array-2d|rename-keys|println-str|macroexpand|aset-double|select-keys|insert-left|aset-float|aset-short|interleave|re-pattern|make-array|identical\?|take-while|into-array|re-matches|re-matcher|complement|vector-zip|drop-while|when-first|map-invert|sorted-map|ns-resolve|difference|sorted-set|merge-with|ns-publics|split-with|ns-interns|ns-imports|constantly|struct-map|comparator|not-every\?|aset-long|print-str|re-groups|lazy-cons|remove-ns|namespace|await-for|contains\?|array-map|create-ns|make-node|with-meta|with-open|instance\?|ns-refers|aset-byte|aset-char|load-file|read-line|replicate|send-off|aset-int|distinct|not-any\?|take-nth|tree-seq|split-at|to-array|ns-unmap|identity|find-doc|find-var|hash-set|when-not|children|when-let|lazy-cat|hash-map|line-seq|rand-int|keyword\?|file-seq|accessor|replace|bit-not|find-ns|resolve|bit-and|println|binding|locking|vector\?|partial|nthrest|max-key|bit-xor|dotimes|ref-set|xml-seq|boolean|var-get|seq-zip|sort-by|branch\?|butlast|symbol\?|project|min-key|ns-name|comment|string\?|iterate|commute|alength|xml-zip|keyword|newline|re-find|reverse|var-set|prn-str|bit-or|import|re-seq|rights|assert|reduce|remove|gensym|rename|filter|ffirst|if-let|false\?|pr-str|every\?|vector|mapcat|ensure|rfirst|concat|second|double|select|dosync|symbol|subvec|if-not|ns-map|struct|zipper|zipmap|all-ns|dissoc|repeat|assoc|cycle|class|deref|zero\?|slurp|short|dorun|doseq|merge|memfn|agent|rrest|count|parse|right|float|flush|alter|fnseq|frest|doall|print|refer|in-ns|apply|union|await|list\*|proxy|lefts|true\?|index|first|range|left|keys|aset|join|into|last|read|rand|list|load|long|loop|conj|test|vals|pos\?|bean|peek|subs|path|time|find|rest|eval|end\?|edit|map\?|drop|root|aget|rseq|down|doto|meta|send|when|byte|take|seq\?|sync|name|neg\?|some|sort|cast|char|disj|next|not=|nil\?|node|comp|cond|cons|quot|var\?|max|new|rem|set|doc|seq|for|get|ref|inc|int|key|not|prn|min|map|val|nth|dec|pop|and|str|pr|\.\.|up|&gt;=|-&gt;|==|&lt;=|or|=|/|&gt;|-|\*|\+|&lt;) ">
<token type="NameBuiltin"/>
</rule>
<rule pattern="(?&lt;=\()(?!#)[\w!$%*+&lt;=&gt;?/.#-]+">
<token type="NameFunction"/>
</rule>
<rule pattern="(?!#)[\w!$%*+&lt;=&gt;?/.#-]+">
<token type="NameVariable"/>
</rule>
<rule pattern="(\[|\])">
<token type="Punctuation"/>
</rule>
<rule pattern="(\{|\})">
<token type="Punctuation"/>
</rule>
<rule pattern="(\(|\))">
<token type="Punctuation"/>
</rule>
</state>
</rules>
</lexer>

90
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cmake.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,90 @@
<lexer>
<config>
<name>CMake</name>
<alias>cmake</alias>
<filename>*.cmake</filename>
<filename>CMakeLists.txt</filename>
<mime_type>text/x-cmake</mime_type>
</config>
<rules>
<state name="root">
<rule pattern="\b(\w+)([ \t]*)(\()">
<bygroups>
<token type="NameBuiltin"/>
<token type="Text"/>
<token type="Punctuation"/>
</bygroups>
<push state="args"/>
</rule>
<rule>
<include state="keywords"/>
</rule>
<rule>
<include state="ws"/>
</rule>
</state>
<state name="args">
<rule pattern="\(">
<token type="Punctuation"/>
<push/>
</rule>
<rule pattern="\)">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
<rule pattern="(\$\{)(.+?)(\})">
<bygroups>
<token type="Operator"/>
<token type="NameVariable"/>
<token type="Operator"/>
</bygroups>
</rule>
<rule pattern="(\$ENV\{)(.+?)(\})">
<bygroups>
<token type="Operator"/>
<token type="NameVariable"/>
<token type="Operator"/>
</bygroups>
</rule>
<rule pattern="(\$&lt;)(.+?)(&gt;)">
<bygroups>
<token type="Operator"/>
<token type="NameVariable"/>
<token type="Operator"/>
</bygroups>
</rule>
<rule pattern="(?s)&#34;.*?&#34;">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="\\\S+">
<token type="LiteralString"/>
</rule>
<rule pattern="[^)$&#34;# \t\n]+">
<token type="LiteralString"/>
</rule>
<rule pattern="\n">
<token type="Text"/>
</rule>
<rule>
<include state="keywords"/>
</rule>
<rule>
<include state="ws"/>
</rule>
</state>
<state name="string"/>
<state name="keywords">
<rule pattern="\b(WIN32|UNIX|APPLE|CYGWIN|BORLAND|MINGW|MSVC|MSVC_IDE|MSVC60|MSVC70|MSVC71|MSVC80|MSVC90)\b">
<token type="Keyword"/>
</rule>
</state>
<state name="ws">
<rule pattern="[ \t]+">
<token type="Text"/>
</rule>
<rule pattern="#.*\n">
<token type="Comment"/>
</rule>
</state>
</rules>
</lexer>

90
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cobol.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,90 @@
<lexer>
<config>
<name>COBOL</name>
<alias>cobol</alias>
<filename>*.cob</filename>
<filename>*.COB</filename>
<filename>*.cpy</filename>
<filename>*.CPY</filename>
<mime_type>text/x-cobol</mime_type>
<case_insensitive>true</case_insensitive>
</config>
<rules>
<state name="strings">
<rule pattern="&#34;[^&#34;\n]*(&#34;|\n)">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="&#39;[^&#39;\n]*(&#39;|\n)">
<token type="LiteralStringSingle"/>
</rule>
</state>
<state name="nums">
<rule pattern="\d+(\s*|\.$|$)">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="[+-]?\d*\.\d+(E[-+]?\d+)?">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="[+-]?\d+\.\d*(E[-+]?\d+)?">
<token type="LiteralNumberFloat"/>
</rule>
</state>
<state name="root">
<rule>
<include state="comment"/>
</rule>
<rule>
<include state="strings"/>
</rule>
<rule>
<include state="core"/>
</rule>
<rule>
<include state="nums"/>
</rule>
<rule pattern="[a-z0-9]([\w\-]*[a-z0-9]+)?">
<token type="NameVariable"/>
</rule>
<rule pattern="[ \t]+">
<token type="Text"/>
</rule>
</state>
<state name="comment">
<rule pattern="(^.{6}[*/].*\n|^.{6}|\*&gt;.*\n)">
<token type="Comment"/>
</rule>
</state>
<state name="core">
<rule pattern="(^|(?&lt;=[^\w\-]))(ALL\s+)?((ZEROES)|(HIGH-VALUE|LOW-VALUE|QUOTE|SPACE|ZERO)(S)?)\s*($|(?=[^\w\-]))">
<token type="NameConstant"/>
</rule>
<rule pattern="(^|(?&lt;=[^\w\-]))(WORKING-STORAGE|IDENTIFICATION|LOCAL-STORAGE|CONFIGURATION|END-EVALUATE|FILE-CONTROL|END-UNSTRING|END-SUBTRACT|END-MULTIPLY|INPUT-OUTPUT|END-PERFORM|END-DISPLAY|END-OF-PAGE|END-COMPUTE|ENVIRONMENT|I-O-CONTROL|END-REWRITE|END-RETURN|INITIALIZE|END-ACCEPT|END-DIVIDE|PROGRAM-ID|END-STRING|END-DELETE|END-SEARCH|END-WRITE|PROCEDURE|END-START|TERMINATE|END-READ|MULTIPLY|CONTINUE|SUPPRESS|SUBTRACT|INITIATE|UNSTRING|DIVISION|VALIDATE|END-CALL|ALLOCATE|GENERATE|EVALUATE|PERFORM|FOREVER|LINKAGE|END-ADD|REWRITE|INSPECT|SECTION|RELEASE|COMPUTE|DISPLAY|END-IF|GOBACK|INVOKE|CANCEL|UNLOCK|SCREEN|SEARCH|DELETE|STRING|DIVIDE|ACCEPT|RETURN|RESUME|START|RAISE|MERGE|CLOSE|WRITE|FILE|STOP|FREE|READ|ELSE|THEN|SORT|EXIT|OPEN|CALL|MOVE|DATA|END|SET|ADD|USE|GO|FD|SD|IF)\s*($|(?=[^\w\-]))">
<token type="KeywordReserved"/>
</rule>
<rule pattern="(^|(?&lt;=[^\w\-]))(ALPHANUMERIC-EDITED|PROCEDURE-POINTER|ENVIRONMENT-VALUE|ENVIRONMENT-NAME|ALPHABETIC-UPPER|FOREGROUND-COLOR|ALPHABETIC-LOWER|BACKGROUND-COLOR|OBJECT-COMPUTER|SOURCE-COMPUTER|PROGRAM-POINTER|ARGUMENT-NUMBER|NATIONAL-EDITED|NUMERIC-EDITED|LINAGE-COUNTER|UNSIGNED-SHORT|ARGUMENT-VALUE|PACKED-DECIMAL|SPECIAL-NAMES|AUTOTERMINATE|DECIMAL-POINT|CORRESPONDING|UNSIGNED-LONG|SEGMENT-LIMIT|REVERSE-VIDEO|SIGNED-SHORT|SYNCHRONIZED|ALPHANUMERIC|ALTERNATEANY|UNSIGNED-INT|ORGANIZATION|DECLARATIVES|COMMAND-LINE|FUNCTION-ID|SIGNED-LONG|INITIALIZED|FLOAT-SHORT|BYTE-LENGTH|DAY-OF-WEEK|DESCENDING|CHARACTERS|SEQUENTIAL|REPOSITORY|SIGNED-INT|SORT-MERGE|DUPLICATES|STANDARD-1|STANDARD-2|CONVERTING|FLOAT-LONG|ALPHABETIC|PROCEDURES|RECORDING|HIGHLIGHT|ADVANCING|RETURNING|REPLACING|UNDERLINE|TRANSFORM|REMAINDER|INTRINSIC|JUSTIFIED|REFERENCE|REDEFINES|EXCLUSIVE|RECURSIVE|EXCEPTION|COLLATING|ASCENDING|REPORTING|AUTO-SKIP|DEBUGGING|AUTOMATIC|CHARACTER|PARAGRAPH|DELIMITED|DELIMITER|DEPENDING|PREVIOUS|POSITIVE|POSITION|CHAINING|STANDARD|FUNCTION|OVERFLOW|PRINTING|CODE-SET|SEPARATE|OPTIONAL|SYMBOLIC|SEQUENCE|NEGATIVE|TALLYING|NATIONAL|REQUIRED|CURRENCY|MULTIPLE|TRAILING|LOWLIGHT|OVERLINE|SENTENCE|RELATIVE|ROLLBACK|CONTROLS|UNSIGNED|INDICATE|IGNORING|ALPHABET|CONTAINS|CONSTANT|EXTERNAL|YYYYMMDD|FILE-ID|CONTROL|RENAMES|REMOVAL|HEADING|YYYYDDD|RESERVE|VARYING|RECORDS|REPORTS|COLUMNS|PROGRAM|INDEXED|FOOTING|INITIAL|PROCEED|ROUNDED|DEFAULT|PRESENT|INVALID|POINTER|SHARING|PADDING|DYNAMIC|OMITTED|ADDRESS|LEADING|CONTENT|NUMBERS|THROUGH|NUMERIC|PRINTER|SELECT|COMMON|REPORT|LOCALE|ACCESS|LINAGE|MANUAL|MEMORY|LIMITS|FILLER|EXTEND|ESCAPE|GIVING|NATIVE|GLOBAL|COMMIT|ASSIGN|STATUS|OUTPUT|NUMBER|LENGTH|REWIND|CURSOR|BEFORE|OCCURS|SOURCE|IGNORE|VALUES|RECORD|BOTTOM|SIGNED|RANDOM|QUOTES|PROMPT|SCROLL|EBCDIC|FORMAT|SECURE|DETAIL|UPDATE|COLUMN|TIMES|USAGE|CLASS|FINAL|FIRST|USING|CYCLE|UNTIL|MINUS|INPUT|FIXED|INDEX|VALUE|OTHER|QUOTE|ENTRY|ORDER|RIGHT|BLOCK|LABEL|BLINK|BASED|AFTER|NULLS|COUNT|AREAS|WORDS|GROUP|ERASE|LIMIT|LINES|ALTER|COMMA|ERROR|BLANK|THRU|CORR|REEL|FROM|WITH|DATE|WHEN|CODE|SAME|WAIT|COPY|DISK|PLUS|COLS|INTO|UPON|DOWN|PAGE|SIGN|JUST|ONLY|LAST|ALSO|SIZE|UNIT|LINE|BELL|TYPE|BEEP|LOCK|AUTO|NULL|AREA|MODE|SYNC|TIME|NEXT|TAPE|TEST|FULL|LEFT|EOL|CRT|I-O|TOP|DAY|EOS|EOP|FOR|ARE|OFF|ALL|COL|SUM|KEY|RUN|UP|ON|IS|IN|BY|OF|ID|DE|AT|RD|TO|NO|AS)\s*($|(?=[^\w\-]))">
<token type="KeywordPseudo"/>
</rule>
<rule pattern="(^|(?&lt;=[^\w\-]))(REPRESENTS-NOT-A-NUMBER|FUNCTION-POINTER|FLOAT-DECIMAL-34|FLOAT-DECIMAL-16|ENTRY-CONVENTION|EXCEPTION-OBJECT|OBJECT-REFERENCE|FLOAT-BINARY-34|FLOAT-BINARY-16|VALIDATE-STATUS|FLOAT-EXTENDED|CLASSIFICATION|FLOAT-BINARY-7|SYSTEM-DEFAULT|COMMUNICATION|PAGE-COUNTER|USER-DEFAULT|LINE-COUNTER|ACTIVE-CLASS|DATA-POINTER|INTERFACE-ID|DESTINATION|GROUP-USAGE|LC_MESSAGES|SUB-QUEUE-3|SUB-QUEUE-2|SUB-QUEUE-1|END-RECEIVE|LC_MONETARY|VAL-STATUS|LC_NUMERIC|IMPLEMENTS|LC_COLLATE|ARITHMETIC|STATEMENT|METHOD-ID|INTERFACE|ATTRIBUTE|PROTOTYPE|CONDITION|UNIVERSAL|LC_CTYPE|VALIDATE|PROPERTY|TERMINAL|INFINITY|INHERITS|CLASS-ID|RELATION|OVERRIDE|ANYCASE|DISABLE|ALIGNED|REPLACE|RAISING|EXPANDS|FACTORY|TYPEDEF|MESSAGE|RECEIVE|BOOLEAN|SECONDS|SOURCES|SEGMENT|LC_TIME|OPTIONS|METHOD|UTF-16|OBJECT|NORMAL|NESTED|CENTER|LC_ALL|RESUME|INVOKE|ENABLE|STRONG|FORMAT|SYMBOL|B-XOR|QUEUE|RESET|PURGE|RETRY|B-AND|VALID|B-NOT|UTF-8|SUPER|RAISE|UCS-4|TABLE|CHAIN|NONE|B-OR|STEP|SEND|SELF|TEXT|BIT|EMI|ESI|EGI|GET|CD|CF|EC|EO|PF|PH|RH|RF|CH)\s*($|(?=[^\w\-]))">
<token type="Error"/>
</rule>
<rule pattern="(^|(?&lt;=[^\w\-]))(PIC\s+.+?(?=(\s|\.\s))|PICTURE\s+.+?(?=(\s|\.\s))|(COMPUTATIONAL)(-[1-5X])?|(COMP)(-[1-5X])?|BINARY-C-LONG|BINARY-CHAR|BINARY-DOUBLE|BINARY-LONG|BINARY-SHORT|BINARY)\s*($|(?=[^\w\-]))">
<token type="KeywordType"/>
</rule>
<rule pattern="(\*\*|\*|\+|-|/|&lt;=|&gt;=|&lt;|&gt;|==|/=|=)">
<token type="Operator"/>
</rule>
<rule pattern="([(),;:&amp;%.])">
<token type="Punctuation"/>
</rule>
<rule pattern="(^|(?&lt;=[^\w\-]))(ABS|ACOS|ANNUITY|ASIN|ATAN|BYTE-LENGTH|CHAR|COMBINED-DATETIME|CONCATENATE|COS|CURRENT-DATE|DATE-OF-INTEGER|DATE-TO-YYYYMMDD|DAY-OF-INTEGER|DAY-TO-YYYYDDD|EXCEPTION-(?:FILE|LOCATION|STATEMENT|STATUS)|EXP10|EXP|E|FACTORIAL|FRACTION-PART|INTEGER-OF-(?:DATE|DAY|PART)|INTEGER|LENGTH|LOCALE-(?:DATE|TIME(?:-FROM-SECONDS)?)|LOG(?:10)?|LOWER-CASE|MAX|MEAN|MEDIAN|MIDRANGE|MIN|MOD|NUMVAL(?:-C)?|ORD(?:-MAX|-MIN)?|PI|PRESENT-VALUE|RANDOM|RANGE|REM|REVERSE|SECONDS-FROM-FORMATTED-TIME|SECONDS-PAST-MIDNIGHT|SIGN|SIN|SQRT|STANDARD-DEVIATION|STORED-CHAR-LENGTH|SUBSTITUTE(?:-CASE)?|SUM|TAN|TEST-DATE-YYYYMMDD|TEST-DAY-YYYYDDD|TRIM|UPPER-CASE|VARIANCE|WHEN-COMPILED|YEAR-TO-YYYY)\s*($|(?=[^\w\-]))">
<token type="NameFunction"/>
</rule>
<rule pattern="(^|(?&lt;=[^\w\-]))(true|false)\s*($|(?=[^\w\-]))">
<token type="NameBuiltin"/>
</rule>
<rule pattern="(^|(?&lt;=[^\w\-]))(equal|equals|ne|lt|le|gt|ge|greater|less|than|not|and|or)\s*($|(?=[^\w\-]))">
<token type="OperatorWord"/>
</rule>
</state>
</rules>
</lexer>

210
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/coffeescript.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,210 @@
<lexer>
<config>
<name>CoffeeScript</name>
<alias>coffee-script</alias>
<alias>coffeescript</alias>
<alias>coffee</alias>
<filename>*.coffee</filename>
<mime_type>text/coffeescript</mime_type>
<dot_all>true</dot_all>
<not_multiline>true</not_multiline>
</config>
<rules>
<state name="commentsandwhitespace">
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern="###[^#].*?###">
<token type="CommentMultiline"/>
</rule>
<rule pattern="#(?!##[^#]).*?\n">
<token type="CommentSingle"/>
</rule>
</state>
<state name="multilineregex">
<rule pattern="[^/#]+">
<token type="LiteralStringRegex"/>
</rule>
<rule pattern="///([gim]+\b|\B)">
<token type="LiteralStringRegex"/>
<pop depth="1"/>
</rule>
<rule pattern="#\{">
<token type="LiteralStringInterpol"/>
<push state="interpoling_string"/>
</rule>
<rule pattern="[/#]">
<token type="LiteralStringRegex"/>
</rule>
</state>
<state name="slashstartsregex">
<rule>
<include state="commentsandwhitespace"/>
</rule>
<rule pattern="///">
<token type="LiteralStringRegex"/>
<push state="#pop" state="multilineregex"/>
</rule>
<rule pattern="/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/([gim]+\b|\B)">
<token type="LiteralStringRegex"/>
<pop depth="1"/>
</rule>
<rule pattern="/">
<token type="Operator"/>
</rule>
<rule>
<pop depth="1"/>
</rule>
</state>
<state name="tsqs">
<rule pattern="&#39;&#39;&#39;">
<token type="LiteralString"/>
<pop depth="1"/>
</rule>
<rule pattern="#|\\.|\&#39;|&#34;">
<token type="LiteralString"/>
</rule>
<rule>
<include state="strings"/>
</rule>
</state>
<state name="dqs">
<rule pattern="&#34;">
<token type="LiteralString"/>
<pop depth="1"/>
</rule>
<rule pattern="\\.|\&#39;">
<token type="LiteralString"/>
</rule>
<rule pattern="#\{">
<token type="LiteralStringInterpol"/>
<push state="interpoling_string"/>
</rule>
<rule pattern="#">
<token type="LiteralString"/>
</rule>
<rule>
<include state="strings"/>
</rule>
</state>
<state name="sqs">
<rule pattern="&#39;">
<token type="LiteralString"/>
<pop depth="1"/>
</rule>
<rule pattern="#|\\.|&#34;">
<token type="LiteralString"/>
</rule>
<rule>
<include state="strings"/>
</rule>
</state>
<state name="tdqs">
<rule pattern="&#34;&#34;&#34;">
<token type="LiteralString"/>
<pop depth="1"/>
</rule>
<rule pattern="\\.|\&#39;|&#34;">
<token type="LiteralString"/>
</rule>
<rule pattern="#\{">
<token type="LiteralStringInterpol"/>
<push state="interpoling_string"/>
</rule>
<rule pattern="#">
<token type="LiteralString"/>
</rule>
<rule>
<include state="strings"/>
</rule>
</state>
<state name="root">
<rule>
<include state="commentsandwhitespace"/>
</rule>
<rule pattern="^(?=\s|/)">
<token type="Text"/>
<push state="slashstartsregex"/>
</rule>
<rule pattern="\+\+|~|&amp;&amp;|\band\b|\bor\b|\bis\b|\bisnt\b|\bnot\b|\?|:|\|\||\\(?=\n)|(&lt;&lt;|&gt;&gt;&gt;?|==?(?!&gt;)|!=?|=(?!&gt;)|-(?!&gt;)|[&lt;&gt;+*`%&amp;\|\^/])=?">
<token type="Operator"/>
<push state="slashstartsregex"/>
</rule>
<rule pattern="(?:\([^()]*\))?\s*[=-]&gt;">
<token type="NameFunction"/>
<push state="slashstartsregex"/>
</rule>
<rule pattern="[{(\[;,]">
<token type="Punctuation"/>
<push state="slashstartsregex"/>
</rule>
<rule pattern="[})\].]">
<token type="Punctuation"/>
</rule>
<rule pattern="(?&lt;![.$])(for|own|in|of|while|until|loop|break|return|continue|switch|when|then|if|unless|else|throw|try|catch|finally|new|delete|typeof|instanceof|super|extends|this|class|by)\b">
<token type="Keyword"/>
<push state="slashstartsregex"/>
</rule>
<rule pattern="(?&lt;![.$])(true|false|yes|no|on|off|null|NaN|Infinity|undefined)\b">
<token type="KeywordConstant"/>
</rule>
<rule pattern="(Array|Boolean|Date|Error|Function|Math|netscape|Number|Object|Packages|RegExp|String|sun|decodeURI|decodeURIComponent|encodeURI|encodeURIComponent|eval|isFinite|isNaN|parseFloat|parseInt|document|window)\b">
<token type="NameBuiltin"/>
</rule>
<rule pattern="[$a-zA-Z_][\w.:$]*\s*[:=]\s">
<token type="NameVariable"/>
<push state="slashstartsregex"/>
</rule>
<rule pattern="@[$a-zA-Z_][\w.:$]*\s*[:=]\s">
<token type="NameVariableInstance"/>
<push state="slashstartsregex"/>
</rule>
<rule pattern="@">
<token type="NameOther"/>
<push state="slashstartsregex"/>
</rule>
<rule pattern="@?[$a-zA-Z_][\w$]*">
<token type="NameOther"/>
</rule>
<rule pattern="[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="0x[0-9a-fA-F]+">
<token type="LiteralNumberHex"/>
</rule>
<rule pattern="[0-9]+">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="&#34;&#34;&#34;">
<token type="LiteralString"/>
<push state="tdqs"/>
</rule>
<rule pattern="&#39;&#39;&#39;">
<token type="LiteralString"/>
<push state="tsqs"/>
</rule>
<rule pattern="&#34;">
<token type="LiteralString"/>
<push state="dqs"/>
</rule>
<rule pattern="&#39;">
<token type="LiteralString"/>
<push state="sqs"/>
</rule>
</state>
<state name="interpoling_string">
<rule pattern="\}">
<token type="LiteralStringInterpol"/>
<pop depth="1"/>
</rule>
<rule>
<include state="root"/>
</rule>
</state>
<state name="strings">
<rule pattern="[^#\\\&#39;&#34;]+">
<token type="LiteralString"/>
</rule>
</state>
</rules>
</lexer>

184
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/common_lisp.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,184 @@
<lexer>
<config>
<name>Common Lisp</name>
<alias>common-lisp</alias>
<alias>cl</alias>
<alias>lisp</alias>
<filename>*.cl</filename>
<filename>*.lisp</filename>
<mime_type>text/x-common-lisp</mime_type>
<case_insensitive>true</case_insensitive>
</config>
<rules>
<state name="body">
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern=";.*$">
<token type="CommentSingle"/>
</rule>
<rule pattern="#\|">
<token type="CommentMultiline"/>
<push state="multiline-comment"/>
</rule>
<rule pattern="#\d*Y.*$">
<token type="CommentSpecial"/>
</rule>
<rule pattern="&#34;(\\.|\\\n|[^&#34;\\])*&#34;">
<token type="LiteralString"/>
</rule>
<rule pattern=":(\|[^|]+\||(?:\\.|[\w!$%&amp;*+-/&lt;=&gt;?@\[\]^{}~])(?:\\.|[\w!$%&amp;*+-/&lt;=&gt;?@\[\]^{}~]|[#.:])*)">
<token type="LiteralStringSymbol"/>
</rule>
<rule pattern="::(\|[^|]+\||(?:\\.|[\w!$%&amp;*+-/&lt;=&gt;?@\[\]^{}~])(?:\\.|[\w!$%&amp;*+-/&lt;=&gt;?@\[\]^{}~]|[#.:])*)">
<token type="LiteralStringSymbol"/>
</rule>
<rule pattern=":#(\|[^|]+\||(?:\\.|[\w!$%&amp;*+-/&lt;=&gt;?@\[\]^{}~])(?:\\.|[\w!$%&amp;*+-/&lt;=&gt;?@\[\]^{}~]|[#.:])*)">
<token type="LiteralStringSymbol"/>
</rule>
<rule pattern="&#39;(\|[^|]+\||(?:\\.|[\w!$%&amp;*+-/&lt;=&gt;?@\[\]^{}~])(?:\\.|[\w!$%&amp;*+-/&lt;=&gt;?@\[\]^{}~]|[#.:])*)">
<token type="LiteralStringSymbol"/>
</rule>
<rule pattern="&#39;">
<token type="Operator"/>
</rule>
<rule pattern="`">
<token type="Operator"/>
</rule>
<rule pattern="[-+]?\d+\.?(?=[ &#34;()\&#39;\n,;`])">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="[-+]?\d+/\d+(?=[ &#34;()\&#39;\n,;`])">
<token type="LiteralNumber"/>
</rule>
<rule pattern="[-+]?(\d*\.\d+([defls][-+]?\d+)?|\d+(\.\d*)?[defls][-+]?\d+)(?=[ &#34;()\&#39;\n,;`])">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="#\\.(?=[ &#34;()\&#39;\n,;`])">
<token type="LiteralStringChar"/>
</rule>
<rule pattern="#\\(\|[^|]+\||(?:\\.|[\w!$%&amp;*+-/&lt;=&gt;?@\[\]^{}~])(?:\\.|[\w!$%&amp;*+-/&lt;=&gt;?@\[\]^{}~]|[#.:])*)">
<token type="LiteralStringChar"/>
</rule>
<rule pattern="#\(">
<token type="Operator"/>
<push state="body"/>
</rule>
<rule pattern="#\d*\*[01]*">
<token type="LiteralOther"/>
</rule>
<rule pattern="#:(\|[^|]+\||(?:\\.|[\w!$%&amp;*+-/&lt;=&gt;?@\[\]^{}~])(?:\\.|[\w!$%&amp;*+-/&lt;=&gt;?@\[\]^{}~]|[#.:])*)">
<token type="LiteralStringSymbol"/>
</rule>
<rule pattern="#[.,]">
<token type="Operator"/>
</rule>
<rule pattern="#\&#39;">
<token type="NameFunction"/>
</rule>
<rule pattern="#b[+-]?[01]+(/[01]+)?">
<token type="LiteralNumberBin"/>
</rule>
<rule pattern="#o[+-]?[0-7]+(/[0-7]+)?">
<token type="LiteralNumberOct"/>
</rule>
<rule pattern="#x[+-]?[0-9a-f]+(/[0-9a-f]+)?">
<token type="LiteralNumberHex"/>
</rule>
<rule pattern="#\d+r[+-]?[0-9a-z]+(/[0-9a-z]+)?">
<token type="LiteralNumber"/>
</rule>
<rule pattern="(#c)(\()">
<bygroups>
<token type="LiteralNumber"/>
<token type="Punctuation"/>
</bygroups>
<push state="body"/>
</rule>
<rule pattern="(#\d+a)(\()">
<bygroups>
<token type="LiteralOther"/>
<token type="Punctuation"/>
</bygroups>
<push state="body"/>
</rule>
<rule pattern="(#s)(\()">
<bygroups>
<token type="LiteralOther"/>
<token type="Punctuation"/>
</bygroups>
<push state="body"/>
</rule>
<rule pattern="#p?&#34;(\\.|[^&#34;])*&#34;">
<token type="LiteralOther"/>
</rule>
<rule pattern="#\d+=">
<token type="Operator"/>
</rule>
<rule pattern="#\d+#">
<token type="Operator"/>
</rule>
<rule pattern="#+nil(?=[ &#34;()\&#39;\n,;`])\s*\(">
<token type="CommentPreproc"/>
<push state="commented-form"/>
</rule>
<rule pattern="#[+-]">
<token type="Operator"/>
</rule>
<rule pattern="(,@|,|\.)">
<token type="Operator"/>
</rule>
<rule pattern="(t|nil)(?=[ &#34;()\&#39;\n,;`])">
<token type="NameConstant"/>
</rule>
<rule pattern="\*(\|[^|]+\||(?:\\.|[\w!$%&amp;*+-/&lt;=&gt;?@\[\]^{}~])(?:\\.|[\w!$%&amp;*+-/&lt;=&gt;?@\[\]^{}~]|[#.:])*)\*">
<token type="NameVariableGlobal"/>
</rule>
<rule pattern="(\|[^|]+\||(?:\\.|[\w!$%&amp;*+-/&lt;=&gt;?@\[\]^{}~])(?:\\.|[\w!$%&amp;*+-/&lt;=&gt;?@\[\]^{}~]|[#.:])*)">
<token type="NameVariable"/>
</rule>
<rule pattern="\(">
<token type="Punctuation"/>
<push state="body"/>
</rule>
<rule pattern="\)">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
</state>
<state name="root">
<rule>
<push state="body"/>
</rule>
</state>
<state name="multiline-comment">
<rule pattern="#\|">
<token type="CommentMultiline"/>
<push/>
</rule>
<rule pattern="\|#">
<token type="CommentMultiline"/>
<pop depth="1"/>
</rule>
<rule pattern="[^|#]+">
<token type="CommentMultiline"/>
</rule>
<rule pattern="[|#]">
<token type="CommentMultiline"/>
</rule>
</state>
<state name="commented-form">
<rule pattern="\(">
<token type="CommentPreproc"/>
<push/>
</rule>
<rule pattern="\)">
<token type="CommentPreproc"/>
<pop depth="1"/>
</rule>
<rule pattern="[^()]+">
<token type="CommentPreproc"/>
</rule>
</state>
</rules>
</lexer>

136
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/coq.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,136 @@
<lexer>
<config>
<name>Coq</name>
<alias>coq</alias>
<filename>*.v</filename>
<mime_type>text/x-coq</mime_type>
</config>
<rules>
<state name="string">
<rule pattern="[^&#34;]+">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="&#34;&#34;">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="&#34;">
<token type="LiteralStringDouble"/>
<pop depth="1"/>
</rule>
</state>
<state name="dotted">
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern="\.">
<token type="Punctuation"/>
</rule>
<rule pattern="[A-Z][\w\&#39;]*(?=\s*\.)">
<token type="NameNamespace"/>
</rule>
<rule pattern="[A-Z][\w\&#39;]*">
<token type="NameClass"/>
<pop depth="1"/>
</rule>
<rule pattern="[a-z][a-z0-9_\&#39;]*">
<token type="Name"/>
<pop depth="1"/>
</rule>
<rule>
<pop depth="1"/>
</rule>
</state>
<state name="root">
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern="false|true|\(\)|\[\]">
<token type="NameBuiltinPseudo"/>
</rule>
<rule pattern="\(\*">
<token type="Comment"/>
<push state="comment"/>
</rule>
<rule pattern="\b(Projections|Monomorphic|Polymorphic|Proposition|CoInductive|Hypothesis|CoFixpoint|Contextual|Definition|Parameters|Hypotheses|Structure|Inductive|Corollary|Implicits|Parameter|Variables|Arguments|Canonical|Printing|Coercion|Reserved|Universe|Notation|Instance|Fixpoint|Variable|Morphism|Relation|Existing|Implicit|Example|Theorem|Delimit|Defined|Rewrite|outside|Require|Resolve|Section|Context|Prenex|Strict|Module|Import|Export|Global|inside|Remark|Tactic|Search|Record|Scope|Unset|Check|Local|Close|Class|Graph|Proof|Lemma|Print|Axiom|Show|Goal|Open|Fact|Hint|Bind|Ltac|Save|View|Let|Set|All|End|Qed)\b">
<token type="KeywordNamespace"/>
</rule>
<rule pattern="\b(exists2|nosimpl|struct|exists|return|forall|match|cofix|then|with|else|for|fix|let|fun|end|is|of|if|in|as)\b">
<token type="Keyword"/>
</rule>
<rule pattern="\b(Type|Prop)\b">
<token type="KeywordType"/>
</rule>
<rule pattern="\b(native_compute|setoid_rewrite|etransitivity|econstructor|transitivity|autorewrite|constructor|cutrewrite|vm_compute|bool_congr|generalize|inversion|induction|injection|nat_congr|intuition|destruct|suffices|erewrite|symmetry|nat_norm|replace|rewrite|compute|pattern|trivial|without|assert|unfold|change|eapply|intros|unlock|revert|rename|refine|eauto|tauto|after|right|congr|split|field|simpl|intro|clear|apply|using|subst|case|left|suff|loss|wlog|have|fold|ring|move|lazy|elim|pose|auto|red|cbv|hnf|cut|set)\b">
<token type="Keyword"/>
</rule>
<rule pattern="\b(contradiction|discriminate|reflexivity|assumption|congruence|romega|omega|exact|solve|tauto|done|by)\b">
<token type="KeywordPseudo"/>
</rule>
<rule pattern="\b(repeat|first|idtac|last|try|do)\b">
<token type="KeywordReserved"/>
</rule>
<rule pattern="\b([A-Z][\w\&#39;]*)">
<token type="Name"/>
</rule>
<rule pattern="(λ|Π|\|\}|\{\||\\/|/\\|=&gt;|~|\}|\|]|\||\{&lt;|\{|`|_|]|\[\||\[&gt;|\[&lt;|\[|\?\?|\?|&gt;\}|&gt;]|&gt;|=|&lt;-&gt;|&lt;-|&lt;|;;|;|:&gt;|:=|::|:|\.\.|\.|-&gt;|-\.|-|,|\+|\*|\)|\(|&amp;&amp;|&amp;|#|!=)">
<token type="Operator"/>
</rule>
<rule pattern="([=&lt;&gt;@^|&amp;+\*/$%-]|[!?~])?[!$%&amp;*+\./:&lt;=&gt;?@^|~-]">
<token type="Operator"/>
</rule>
<rule pattern="\b(unit|nat|bool|string|ascii|list)\b">
<token type="KeywordType"/>
</rule>
<rule pattern="[^\W\d][\w&#39;]*">
<token type="Name"/>
</rule>
<rule pattern="\d[\d_]*">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="0[xX][\da-fA-F][\da-fA-F_]*">
<token type="LiteralNumberHex"/>
</rule>
<rule pattern="0[oO][0-7][0-7_]*">
<token type="LiteralNumberOct"/>
</rule>
<rule pattern="0[bB][01][01_]*">
<token type="LiteralNumberBin"/>
</rule>
<rule pattern="-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="&#39;(?:(\\[\\\&#34;&#39;ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))&#39;">
<token type="LiteralStringChar"/>
</rule>
<rule pattern="&#39;.&#39;">
<token type="LiteralStringChar"/>
</rule>
<rule pattern="&#39;">
<token type="Keyword"/>
</rule>
<rule pattern="&#34;">
<token type="LiteralStringDouble"/>
<push state="string"/>
</rule>
<rule pattern="[~?][a-z][\w\&#39;]*:">
<token type="Name"/>
</rule>
</state>
<state name="comment">
<rule pattern="[^(*)]+">
<token type="Comment"/>
</rule>
<rule pattern="\(\*">
<token type="Comment"/>
<push/>
</rule>
<rule pattern="\*\)">
<token type="Comment"/>
<pop depth="1"/>
</rule>
<rule pattern="[(*)]">
<token type="Comment"/>
</rule>
</state>
</rules>
</lexer>

762
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/crystal.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,762 @@
<lexer>
<config>
<name>Crystal</name>
<alias>cr</alias>
<alias>crystal</alias>
<filename>*.cr</filename>
<mime_type>text/x-crystal</mime_type>
<dot_all>true</dot_all>
</config>
<rules>
<state name="pa-intp-string">
<rule pattern="\\[\(]">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="\(">
<token type="LiteralStringOther"/>
<push/>
</rule>
<rule pattern="\)">
<token type="LiteralStringOther"/>
<pop depth="1"/>
</rule>
<rule>
<include state="string-intp-escaped"/>
</rule>
<rule pattern="[\\#()]">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="[^\\#()]+">
<token type="LiteralStringOther"/>
</rule>
</state>
<state name="ab-regex">
<rule pattern="\\[\\&lt;&gt;]">
<token type="LiteralStringRegex"/>
</rule>
<rule pattern="&lt;">
<token type="LiteralStringRegex"/>
<push/>
</rule>
<rule pattern="&gt;[imsx]*">
<token type="LiteralStringRegex"/>
<pop depth="1"/>
</rule>
<rule>
<include state="string-intp"/>
</rule>
<rule pattern="[\\#&lt;&gt;]">
<token type="LiteralStringRegex"/>
</rule>
<rule pattern="[^\\#&lt;&gt;]+">
<token type="LiteralStringRegex"/>
</rule>
</state>
<state name="cb-regex">
<rule pattern="\\[\\{}]">
<token type="LiteralStringRegex"/>
</rule>
<rule pattern="\{">
<token type="LiteralStringRegex"/>
<push/>
</rule>
<rule pattern="\}[imsx]*">
<token type="LiteralStringRegex"/>
<pop depth="1"/>
</rule>
<rule>
<include state="string-intp"/>
</rule>
<rule pattern="[\\#{}]">
<token type="LiteralStringRegex"/>
</rule>
<rule pattern="[^\\#{}]+">
<token type="LiteralStringRegex"/>
</rule>
</state>
<state name="simple-backtick">
<rule>
<include state="string-intp-escaped"/>
</rule>
<rule pattern="[^\\`#]+">
<token type="LiteralStringBacktick"/>
</rule>
<rule pattern="[\\#]">
<token type="LiteralStringBacktick"/>
</rule>
<rule pattern="`">
<token type="LiteralStringBacktick"/>
<pop depth="1"/>
</rule>
</state>
<state name="string-intp">
<rule pattern="#\{">
<token type="LiteralStringInterpol"/>
<push state="in-intp"/>
</rule>
</state>
<state name="interpolated-regex">
<rule>
<include state="string-intp"/>
</rule>
<rule pattern="[\\#]">
<token type="LiteralStringRegex"/>
</rule>
<rule pattern="[^\\#]+">
<token type="LiteralStringRegex"/>
</rule>
</state>
<state name="cb-string">
<rule pattern="\\[\\{}]">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="\{">
<token type="LiteralStringOther"/>
<push/>
</rule>
<rule pattern="\}">
<token type="LiteralStringOther"/>
<pop depth="1"/>
</rule>
<rule pattern="[\\#{}]">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="[^\\#{}]+">
<token type="LiteralStringOther"/>
</rule>
</state>
<state name="in-macro-control">
<rule pattern="\{%">
<token type="LiteralStringInterpol"/>
<push/>
</rule>
<rule pattern="%\}">
<token type="LiteralStringInterpol"/>
<pop depth="1"/>
</rule>
<rule pattern="for\b|in\b">
<token type="Keyword"/>
</rule>
<rule>
<include state="root"/>
</rule>
</state>
<state name="interpolated-string">
<rule>
<include state="string-intp"/>
</rule>
<rule pattern="[\\#]">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="[^\\#]+">
<token type="LiteralStringOther"/>
</rule>
</state>
<state name="in-macro-expr">
<rule pattern="\{\{">
<token type="LiteralStringInterpol"/>
<push/>
</rule>
<rule pattern="\}\}">
<token type="LiteralStringInterpol"/>
<pop depth="1"/>
</rule>
<rule>
<include state="root"/>
</rule>
</state>
<state name="simple-string">
<rule>
<include state="string-intp-escaped"/>
</rule>
<rule pattern="[^\\&#34;#]+">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="[\\#]">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="&#34;">
<token type="LiteralStringDouble"/>
<pop depth="1"/>
</rule>
</state>
<state name="cb-intp-string">
<rule pattern="\\[\{]">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="\{">
<token type="LiteralStringOther"/>
<push/>
</rule>
<rule pattern="\}">
<token type="LiteralStringOther"/>
<pop depth="1"/>
</rule>
<rule>
<include state="string-intp-escaped"/>
</rule>
<rule pattern="[\\#{}]">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="[^\\#{}]+">
<token type="LiteralStringOther"/>
</rule>
</state>
<state name="string-intp-escaped">
<rule>
<include state="string-intp"/>
</rule>
<rule>
<include state="string-escaped"/>
</rule>
</state>
<state name="sb-regex">
<rule pattern="\\[\\\[\]]">
<token type="LiteralStringRegex"/>
</rule>
<rule pattern="\[">
<token type="LiteralStringRegex"/>
<push/>
</rule>
<rule pattern="\][imsx]*">
<token type="LiteralStringRegex"/>
<pop depth="1"/>
</rule>
<rule>
<include state="string-intp"/>
</rule>
<rule pattern="[\\#\[\]]">
<token type="LiteralStringRegex"/>
</rule>
<rule pattern="[^\\#\[\]]+">
<token type="LiteralStringRegex"/>
</rule>
</state>
<state name="classname">
<rule pattern="[A-Z_]\w*">
<token type="NameClass"/>
</rule>
<rule pattern="(\()(\s*)([A-Z_]\w*)(\s*)(\))">
<bygroups>
<token type="Punctuation"/>
<token type="Text"/>
<token type="NameClass"/>
<token type="Text"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule>
<pop depth="1"/>
</rule>
</state>
<state name="string-escaped">
<rule pattern="\\([\\befnstv#&#34;\&#39;]|x[a-fA-F0-9]{1,2}|[0-7]{1,3})">
<token type="LiteralStringEscape"/>
</rule>
</state>
<state name="sb-intp-string">
<rule pattern="\\[\[]">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="\[">
<token type="LiteralStringOther"/>
<push/>
</rule>
<rule pattern="\]">
<token type="LiteralStringOther"/>
<pop depth="1"/>
</rule>
<rule>
<include state="string-intp-escaped"/>
</rule>
<rule pattern="[\\#\[\]]">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="[^\\#\[\]]+">
<token type="LiteralStringOther"/>
</rule>
</state>
<state name="pa-regex">
<rule pattern="\\[\\()]">
<token type="LiteralStringRegex"/>
</rule>
<rule pattern="\(">
<token type="LiteralStringRegex"/>
<push/>
</rule>
<rule pattern="\)[imsx]*">
<token type="LiteralStringRegex"/>
<pop depth="1"/>
</rule>
<rule>
<include state="string-intp"/>
</rule>
<rule pattern="[\\#()]">
<token type="LiteralStringRegex"/>
</rule>
<rule pattern="[^\\#()]+">
<token type="LiteralStringRegex"/>
</rule>
</state>
<state name="in-attr">
<rule pattern="\[">
<token type="Operator"/>
<push/>
</rule>
<rule pattern="\]">
<token type="Operator"/>
<pop depth="1"/>
</rule>
<rule>
<include state="root"/>
</rule>
</state>
<state name="ab-intp-string">
<rule pattern="\\[&lt;]">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="&lt;">
<token type="LiteralStringOther"/>
<push/>
</rule>
<rule pattern="&gt;">
<token type="LiteralStringOther"/>
<pop depth="1"/>
</rule>
<rule>
<include state="string-intp-escaped"/>
</rule>
<rule pattern="[\\#&lt;&gt;]">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="[^\\#&lt;&gt;]+">
<token type="LiteralStringOther"/>
</rule>
</state>
<state name="in-intp">
<rule pattern="\{">
<token type="LiteralStringInterpol"/>
<push/>
</rule>
<rule pattern="\}">
<token type="LiteralStringInterpol"/>
<pop depth="1"/>
</rule>
<rule>
<include state="root"/>
</rule>
</state>
<state name="end-part">
<rule pattern=".+">
<token type="CommentPreproc"/>
<pop depth="1"/>
</rule>
</state>
<state name="root">
<rule pattern="#.*?$">
<token type="CommentSingle"/>
</rule>
<rule pattern="(instance_sizeof|pointerof|protected|abstract|require|private|include|unless|typeof|sizeof|return|extend|ensure|rescue|ifdef|super|break|begin|until|while|elsif|yield|next|when|else|then|case|with|end|asm|if|do|as|of)\b">
<token type="Keyword"/>
</rule>
<rule pattern="(false|true|nil)\b">
<token type="KeywordConstant"/>
</rule>
<rule pattern="(module|lib)(\s+)([a-zA-Z_]\w*(?:::[a-zA-Z_]\w*)*)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="NameNamespace"/>
</bygroups>
</rule>
<rule pattern="(def|fun|macro)(\s+)((?:[a-zA-Z_]\w*::)*)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="NameNamespace"/>
</bygroups>
<push state="funcname"/>
</rule>
<rule pattern="def(?=[*%&amp;^`~+-/\[&lt;&gt;=])">
<token type="Keyword"/>
<push state="funcname"/>
</rule>
<rule pattern="(class|struct|union|type|alias|enum)(\s+)((?:[a-zA-Z_]\w*::)*)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="NameNamespace"/>
</bygroups>
<push state="classname"/>
</rule>
<rule pattern="(self|out|uninitialized)\b|(is_a|responds_to)\?">
<token type="KeywordPseudo"/>
</rule>
<rule pattern="(def_equals_and_hash|assert_responds_to|forward_missing_to|def_equals|property|def_hash|parallel|delegate|debugger|getter|record|setter|spawn|pp)\b">
<token type="NameBuiltinPseudo"/>
</rule>
<rule pattern="getter[!?]|property[!?]|__(DIR|FILE|LINE)__\b">
<token type="NameBuiltinPseudo"/>
</rule>
<rule pattern="(?&lt;!\.)(get_stack_top|StaticArray|Concurrent|with_color|Reference|Scheduler|read_line|Exception|at_exit|Pointer|Channel|Float64|sprintf|Float32|Process|Object|Struct|caller|UInt16|UInt32|UInt64|system|future|Number|printf|String|Symbol|Int32|Range|Slice|Regex|Mutex|sleep|Array|Class|raise|Tuple|Deque|delay|Float|Int16|print|abort|Value|UInt8|Int64|puts|Proc|File|Void|exit|fork|Bool|Char|gets|lazy|loop|main|rand|Enum|Int8|Time|Hash|Set|Box|Nil|Dir|Int|p)\b">
<token type="NameBuiltin"/>
</rule>
<rule pattern="(?&lt;!\w)(&lt;&lt;-?)([&#34;`\&#39;]?)([a-zA-Z_]\w*)(\2)(.*?\n)">
<token type="LiteralStringHeredoc"/>
</rule>
<rule pattern="(&lt;&lt;-?)(&#34;|\&#39;)()(\2)(.*?\n)">
<token type="LiteralStringHeredoc"/>
</rule>
<rule pattern="__END__">
<token type="CommentPreproc"/>
<push state="end-part"/>
</rule>
<rule pattern="(?:^|(?&lt;=[=&lt;&gt;~!:])|(?&lt;=(?:\s|;)when\s)|(?&lt;=(?:\s|;)or\s)|(?&lt;=(?:\s|;)and\s)|(?&lt;=\.index\s)|(?&lt;=\.scan\s)|(?&lt;=\.sub\s)|(?&lt;=\.sub!\s)|(?&lt;=\.gsub\s)|(?&lt;=\.gsub!\s)|(?&lt;=\.match\s)|(?&lt;=(?:\s|;)if\s)|(?&lt;=(?:\s|;)elsif\s)|(?&lt;=^when\s)|(?&lt;=^index\s)|(?&lt;=^scan\s)|(?&lt;=^sub\s)|(?&lt;=^gsub\s)|(?&lt;=^sub!\s)|(?&lt;=^gsub!\s)|(?&lt;=^match\s)|(?&lt;=^if\s)|(?&lt;=^elsif\s))(\s*)(/)">
<bygroups>
<token type="Text"/>
<token type="LiteralStringRegex"/>
</bygroups>
<push state="multiline-regex"/>
</rule>
<rule pattern="(?&lt;=\(|,|\[)/">
<token type="LiteralStringRegex"/>
<push state="multiline-regex"/>
</rule>
<rule pattern="(\s+)(/)(?![\s=])">
<bygroups>
<token type="Text"/>
<token type="LiteralStringRegex"/>
</bygroups>
<push state="multiline-regex"/>
</rule>
<rule pattern="(0o[0-7]+(?:_[0-7]+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?">
<bygroups>
<token type="LiteralNumberOct"/>
<token type="Text"/>
<token type="Operator"/>
</bygroups>
</rule>
<rule pattern="(0x[0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?">
<bygroups>
<token type="LiteralNumberHex"/>
<token type="Text"/>
<token type="Operator"/>
</bygroups>
</rule>
<rule pattern="(0b[01]+(?:_[01]+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?">
<bygroups>
<token type="LiteralNumberBin"/>
<token type="Text"/>
<token type="Operator"/>
</bygroups>
</rule>
<rule pattern="((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)(?:e[+-]?[0-9]+)?(?:_?f[0-9]+)?)(\s*)([/?])?">
<bygroups>
<token type="LiteralNumberFloat"/>
<token type="Text"/>
<token type="Operator"/>
</bygroups>
</rule>
<rule pattern="((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)?(?:e[+-]?[0-9]+)(?:_?f[0-9]+)?)(\s*)([/?])?">
<bygroups>
<token type="LiteralNumberFloat"/>
<token type="Text"/>
<token type="Operator"/>
</bygroups>
</rule>
<rule pattern="((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)?(?:e[+-]?[0-9]+)?(?:_?f[0-9]+))(\s*)([/?])?">
<bygroups>
<token type="LiteralNumberFloat"/>
<token type="Text"/>
<token type="Operator"/>
</bygroups>
</rule>
<rule pattern="(0\b|[1-9][\d]*(?:_\d+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?">
<bygroups>
<token type="LiteralNumberInteger"/>
<token type="Text"/>
<token type="Operator"/>
</bygroups>
</rule>
<rule pattern="@@[a-zA-Z_]\w*">
<token type="NameVariableClass"/>
</rule>
<rule pattern="@[a-zA-Z_]\w*">
<token type="NameVariableInstance"/>
</rule>
<rule pattern="\$\w+">
<token type="NameVariableGlobal"/>
</rule>
<rule pattern="\$[!@&amp;`\&#39;+~=/\\,;.&lt;&gt;_*$?:&#34;^-]">
<token type="NameVariableGlobal"/>
</rule>
<rule pattern="\$-[0adFiIlpvw]">
<token type="NameVariableGlobal"/>
</rule>
<rule pattern="::">
<token type="Operator"/>
</rule>
<rule>
<include state="strings"/>
</rule>
<rule pattern="\?(\\[MC]-)*(\\([\\befnrtv#&#34;\&#39;]|x[a-fA-F0-9]{1,2}|[0-7]{1,3})|\S)(?!\w)">
<token type="LiteralStringChar"/>
</rule>
<rule pattern="[A-Z][A-Z_]+\b">
<token type="NameConstant"/>
</rule>
<rule pattern="\{%">
<token type="LiteralStringInterpol"/>
<push state="in-macro-control"/>
</rule>
<rule pattern="\{\{">
<token type="LiteralStringInterpol"/>
<push state="in-macro-expr"/>
</rule>
<rule pattern="(@\[)(\s*)([A-Z]\w*)">
<bygroups>
<token type="Operator"/>
<token type="Text"/>
<token type="NameDecorator"/>
</bygroups>
<push state="in-attr"/>
</rule>
<rule pattern="(\.|::)(\[\]\?|&lt;=&gt;|===|\[\]=|&gt;&gt;|&amp;&amp;|\*\*|\[\]|\|\||&gt;=|=~|!~|&lt;&lt;|&lt;=|!=|==|&lt;|/|=|-|\+|&gt;|\*|&amp;|%|\^|!|\||~)">
<bygroups>
<token type="Operator"/>
<token type="NameOperator"/>
</bygroups>
</rule>
<rule pattern="(\.|::)([a-zA-Z_]\w*[!?]?|[*%&amp;^`~+\-/\[&lt;&gt;=])">
<bygroups>
<token type="Operator"/>
<token type="Name"/>
</bygroups>
</rule>
<rule pattern="[a-zA-Z_]\w*(?:[!?](?!=))?">
<token type="Name"/>
</rule>
<rule pattern="(\[|\]\??|\*\*|&lt;=&gt;?|&gt;=|&lt;&lt;?|&gt;&gt;?|=~|===|!~|&amp;&amp;?|\|\||\.{1,3})">
<token type="Operator"/>
</rule>
<rule pattern="[-+/*%=&lt;&gt;&amp;!^|~]=?">
<token type="Operator"/>
</rule>
<rule pattern="[(){};,/?:\\]">
<token type="Punctuation"/>
</rule>
<rule pattern="\s+">
<token type="Text"/>
</rule>
</state>
<state name="multiline-regex">
<rule>
<include state="string-intp"/>
</rule>
<rule pattern="\\\\">
<token type="LiteralStringRegex"/>
</rule>
<rule pattern="\\/">
<token type="LiteralStringRegex"/>
</rule>
<rule pattern="[\\#]">
<token type="LiteralStringRegex"/>
</rule>
<rule pattern="[^\\/#]+">
<token type="LiteralStringRegex"/>
</rule>
<rule pattern="/[imsx]*">
<token type="LiteralStringRegex"/>
<pop depth="1"/>
</rule>
</state>
<state name="ab-string">
<rule pattern="\\[\\&lt;&gt;]">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="&lt;">
<token type="LiteralStringOther"/>
<push/>
</rule>
<rule pattern="&gt;">
<token type="LiteralStringOther"/>
<pop depth="1"/>
</rule>
<rule pattern="[\\#&lt;&gt;]">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="[^\\#&lt;&gt;]+">
<token type="LiteralStringOther"/>
</rule>
</state>
<state name="pa-string">
<rule pattern="\\[\\()]">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="\(">
<token type="LiteralStringOther"/>
<push/>
</rule>
<rule pattern="\)">
<token type="LiteralStringOther"/>
<pop depth="1"/>
</rule>
<rule pattern="[\\#()]">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="[^\\#()]+">
<token type="LiteralStringOther"/>
</rule>
</state>
<state name="strings">
<rule pattern="\:@{0,2}[a-zA-Z_]\w*[!?]?">
<token type="LiteralStringSymbol"/>
</rule>
<rule pattern="\:@{0,2}(\[\]\?|&lt;=&gt;|===|\[\]=|&gt;&gt;|&amp;&amp;|\*\*|\[\]|\|\||&gt;=|=~|!~|&lt;&lt;|&lt;=|!=|==|&lt;|/|=|-|\+|&gt;|\*|&amp;|%|\^|!|\||~)">
<token type="LiteralStringSymbol"/>
</rule>
<rule pattern=":&#39;(\\\\|\\&#39;|[^&#39;])*&#39;">
<token type="LiteralStringSymbol"/>
</rule>
<rule pattern="&#39;(\\\\|\\&#39;|[^&#39;]|\\[^&#39;\\]+)&#39;">
<token type="LiteralStringChar"/>
</rule>
<rule pattern=":&#34;">
<token type="LiteralStringSymbol"/>
<push state="simple-sym"/>
</rule>
<rule pattern="([a-zA-Z_]\w*)(:)(?!:)">
<bygroups>
<token type="LiteralStringSymbol"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule pattern="&#34;">
<token type="LiteralStringDouble"/>
<push state="simple-string"/>
</rule>
<rule pattern="(?&lt;!\.)`">
<token type="LiteralStringBacktick"/>
<push state="simple-backtick"/>
</rule>
<rule pattern="%\{">
<token type="LiteralStringOther"/>
<push state="cb-intp-string"/>
</rule>
<rule pattern="%[wi]\{">
<token type="LiteralStringOther"/>
<push state="cb-string"/>
</rule>
<rule pattern="%r\{">
<token type="LiteralStringRegex"/>
<push state="cb-regex"/>
</rule>
<rule pattern="%\[">
<token type="LiteralStringOther"/>
<push state="sb-intp-string"/>
</rule>
<rule pattern="%[wi]\[">
<token type="LiteralStringOther"/>
<push state="sb-string"/>
</rule>
<rule pattern="%r\[">
<token type="LiteralStringRegex"/>
<push state="sb-regex"/>
</rule>
<rule pattern="%\(">
<token type="LiteralStringOther"/>
<push state="pa-intp-string"/>
</rule>
<rule pattern="%[wi]\(">
<token type="LiteralStringOther"/>
<push state="pa-string"/>
</rule>
<rule pattern="%r\(">
<token type="LiteralStringRegex"/>
<push state="pa-regex"/>
</rule>
<rule pattern="%&lt;">
<token type="LiteralStringOther"/>
<push state="ab-intp-string"/>
</rule>
<rule pattern="%[wi]&lt;">
<token type="LiteralStringOther"/>
<push state="ab-string"/>
</rule>
<rule pattern="%r&lt;">
<token type="LiteralStringRegex"/>
<push state="ab-regex"/>
</rule>
<rule pattern="(%r([\W_]))((?:\\\2|(?!\2).)*)(\2[imsx]*)">
<token type="LiteralString"/>
</rule>
<rule pattern="(%[wi]([\W_]))((?:\\\2|(?!\2).)*)(\2)">
<token type="LiteralString"/>
</rule>
<rule pattern="(?&lt;=[-+/*%=&lt;&gt;&amp;!^|~,(])(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)">
<bygroups>
<token type="Text"/>
<token type="LiteralStringOther"/>
<token type="None"/>
</bygroups>
</rule>
<rule pattern="^(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)">
<bygroups>
<token type="Text"/>
<token type="LiteralStringOther"/>
<token type="None"/>
</bygroups>
</rule>
<rule pattern="(%([\[{(&lt;]))((?:\\\2|(?!\2).)*)(\2)">
<token type="LiteralString"/>
</rule>
</state>
<state name="sb-string">
<rule pattern="\\[\\\[\]]">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="\[">
<token type="LiteralStringOther"/>
<push/>
</rule>
<rule pattern="\]">
<token type="LiteralStringOther"/>
<pop depth="1"/>
</rule>
<rule pattern="[\\#\[\]]">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="[^\\#\[\]]+">
<token type="LiteralStringOther"/>
</rule>
</state>
<state name="funcname">
<rule pattern="(?:([a-zA-Z_]\w*)(\.))?([a-zA-Z_]\w*[!?]?|\*\*?|[-+]@?|[/%&amp;|^`~]|\[\]=?|&lt;&lt;|&gt;&gt;|&lt;=?&gt;|&gt;=?|===?)">
<bygroups>
<token type="NameClass"/>
<token type="Operator"/>
<token type="NameFunction"/>
</bygroups>
<pop depth="1"/>
</rule>
<rule>
<pop depth="1"/>
</rule>
</state>
<state name="simple-sym">
<rule>
<include state="string-escaped"/>
</rule>
<rule pattern="[^\\&#34;#]+">
<token type="LiteralStringSymbol"/>
</rule>
<rule pattern="[\\#]">
<token type="LiteralStringSymbol"/>
</rule>
<rule pattern="&#34;">
<token type="LiteralStringSymbol"/>
<pop depth="1"/>
</rule>
</state>
</rules>
</lexer>

115
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/csharp.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,115 @@
<lexer>
<config>
<name>C#</name>
<alias>csharp</alias>
<alias>c#</alias>
<filename>*.cs</filename>
<mime_type>text/x-csharp</mime_type>
<dot_all>true</dot_all>
<ensure_nl>true</ensure_nl>
</config>
<rules>
<state name="root">
<rule pattern="^\s*\[.*?\]">
<token type="NameAttribute"/>
</rule>
<rule pattern="[^\S\n]+">
<token type="Text"/>
</rule>
<rule pattern="\\\n">
<token type="Text"/>
</rule>
<rule pattern="///[^\n\r]+">
<token type="CommentSpecial"/>
</rule>
<rule pattern="//[^\n\r]+">
<token type="CommentSingle"/>
</rule>
<rule pattern="/[*].*?[*]/">
<token type="CommentMultiline"/>
</rule>
<rule pattern="\n">
<token type="Text"/>
</rule>
<rule pattern="[~!%^&amp;*()+=|\[\]:;,.&lt;&gt;/?-]">
<token type="Punctuation"/>
</rule>
<rule pattern="[{}]">
<token type="Punctuation"/>
</rule>
<rule pattern="@&#34;(&#34;&#34;|[^&#34;])*&#34;">
<token type="LiteralString"/>
</rule>
<rule pattern="\$@?&#34;(&#34;&#34;|[^&#34;])*&#34;">
<token type="LiteralString"/>
</rule>
<rule pattern="&#34;(\\\\|\\&#34;|[^&#34;\n])*[&#34;\n]">
<token type="LiteralString"/>
</rule>
<rule pattern="&#39;\\.&#39;|&#39;[^\\]&#39;">
<token type="LiteralStringChar"/>
</rule>
<rule pattern="0[xX][0-9a-fA-F]+[Ll]?|\d[_\d]*(\.\d*)?([eE][+-]?\d+)?[flFLdD]?">
<token type="LiteralNumber"/>
</rule>
<rule pattern="#[ \t]*(if|endif|else|elif|define|undef|line|error|warning|region|endregion|pragma|nullable)\b[^\n\r]+">
<token type="CommentPreproc"/>
</rule>
<rule pattern="\b(extern)(\s+)(alias)\b">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="Keyword"/>
</bygroups>
</rule>
<rule pattern="(abstract|as|async|await|base|break|by|case|catch|checked|const|continue|default|delegate|do|else|enum|event|explicit|extern|false|finally|fixed|for|foreach|goto|if|implicit|in|init|internal|is|let|lock|new|null|on|operator|out|override|params|private|protected|public|readonly|ref|return|sealed|sizeof|stackalloc|static|switch|this|throw|true|try|typeof|unchecked|unsafe|virtual|void|while|get|set|new|partial|yield|add|remove|value|alias|ascending|descending|from|group|into|orderby|select|thenby|where|join|equals)\b">
<token type="Keyword"/>
</rule>
<rule pattern="(global)(::)">
<bygroups>
<token type="Keyword"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule pattern="(bool|byte|char|decimal|double|dynamic|float|int|long|object|sbyte|short|string|uint|ulong|ushort|var)\b\??">
<token type="KeywordType"/>
</rule>
<rule pattern="(class|struct|record|interface)(\s+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
</bygroups>
<push state="class"/>
</rule>
<rule pattern="(namespace|using)(\s+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
</bygroups>
<push state="namespace"/>
</rule>
<rule pattern="@?[_a-zA-Z]\w*">
<token type="Name"/>
</rule>
</state>
<state name="class">
<rule pattern="@?[_a-zA-Z]\w*">
<token type="NameClass"/>
<pop depth="1"/>
</rule>
<rule>
<pop depth="1"/>
</rule>
</state>
<state name="namespace">
<rule pattern="(?=\()">
<token type="Text"/>
<pop depth="1"/>
</rule>
<rule pattern="(@?[_a-zA-Z]\w*|\.)+">
<token type="NameNamespace"/>
<pop depth="1"/>
</rule>
</state>
</rules>
</lexer>

323
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/css.xml generated vendored ノーマルファイル

長すぎる行があるためファイル差分は表示されません

372
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cython.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,372 @@
<lexer>
<config>
<name>Cython</name>
<alias>cython</alias>
<alias>pyx</alias>
<alias>pyrex</alias>
<filename>*.pyx</filename>
<filename>*.pxd</filename>
<filename>*.pxi</filename>
<mime_type>text/x-cython</mime_type>
<mime_type>application/x-cython</mime_type>
</config>
<rules>
<state name="funcname">
<rule pattern="[a-zA-Z_]\w*">
<token type="NameFunction"/>
<pop depth="1"/>
</rule>
</state>
<state name="root">
<rule pattern="\n">
<token type="Text"/>
</rule>
<rule pattern="^(\s*)(&#34;&#34;&#34;(?:.|\n)*?&#34;&#34;&#34;)">
<bygroups>
<token type="Text"/>
<token type="LiteralStringDoc"/>
</bygroups>
</rule>
<rule pattern="^(\s*)(&#39;&#39;&#39;(?:.|\n)*?&#39;&#39;&#39;)">
<bygroups>
<token type="Text"/>
<token type="LiteralStringDoc"/>
</bygroups>
</rule>
<rule pattern="[^\S\n]+">
<token type="Text"/>
</rule>
<rule pattern="#.*$">
<token type="Comment"/>
</rule>
<rule pattern="[]{}:(),;[]">
<token type="Punctuation"/>
</rule>
<rule pattern="\\\n">
<token type="Text"/>
</rule>
<rule pattern="\\">
<token type="Text"/>
</rule>
<rule pattern="(in|is|and|or|not)\b">
<token type="OperatorWord"/>
</rule>
<rule pattern="(&lt;)([a-zA-Z0-9.?]+)(&gt;)">
<bygroups>
<token type="Punctuation"/>
<token type="KeywordType"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule pattern="!=|==|&lt;&lt;|&gt;&gt;|[-~+/*%=&lt;&gt;&amp;^|.?]">
<token type="Operator"/>
</rule>
<rule pattern="(from)(\d+)(&lt;=)(\s+)(&lt;)(\d+)(:)">
<bygroups>
<token type="Keyword"/>
<token type="LiteralNumberInteger"/>
<token type="Operator"/>
<token type="Name"/>
<token type="Operator"/>
<token type="Name"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule>
<include state="keywords"/>
</rule>
<rule pattern="(def|property)(\s+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
</bygroups>
<push state="funcname"/>
</rule>
<rule pattern="(cp?def)(\s+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
</bygroups>
<push state="cdef"/>
</rule>
<rule pattern="(cdef)(:)">
<bygroups>
<token type="Keyword"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule pattern="(class|struct)(\s+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
</bygroups>
<push state="classname"/>
</rule>
<rule pattern="(from)(\s+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
</bygroups>
<push state="fromimport"/>
</rule>
<rule pattern="(c?import)(\s+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
</bygroups>
<push state="import"/>
</rule>
<rule>
<include state="builtins"/>
</rule>
<rule>
<include state="backtick"/>
</rule>
<rule pattern="(?:[rR]|[uU][rR]|[rR][uU])&#34;&#34;&#34;">
<token type="LiteralString"/>
<push state="tdqs"/>
</rule>
<rule pattern="(?:[rR]|[uU][rR]|[rR][uU])&#39;&#39;&#39;">
<token type="LiteralString"/>
<push state="tsqs"/>
</rule>
<rule pattern="(?:[rR]|[uU][rR]|[rR][uU])&#34;">
<token type="LiteralString"/>
<push state="dqs"/>
</rule>
<rule pattern="(?:[rR]|[uU][rR]|[rR][uU])&#39;">
<token type="LiteralString"/>
<push state="sqs"/>
</rule>
<rule pattern="[uU]?&#34;&#34;&#34;">
<token type="LiteralString"/>
<combined state="stringescape" state="tdqs"/>
</rule>
<rule pattern="[uU]?&#39;&#39;&#39;">
<token type="LiteralString"/>
<combined state="stringescape" state="tsqs"/>
</rule>
<rule pattern="[uU]?&#34;">
<token type="LiteralString"/>
<combined state="stringescape" state="dqs"/>
</rule>
<rule pattern="[uU]?&#39;">
<token type="LiteralString"/>
<combined state="stringescape" state="sqs"/>
</rule>
<rule>
<include state="name"/>
</rule>
<rule>
<include state="numbers"/>
</rule>
</state>
<state name="stringescape">
<rule pattern="\\([\\abfnrtv&#34;\&#39;]|\n|N\{.*?\}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})">
<token type="LiteralStringEscape"/>
</rule>
</state>
<state name="strings">
<rule pattern="%(\([a-zA-Z0-9]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?[hlL]?[E-GXc-giorsux%]">
<token type="LiteralStringInterpol"/>
</rule>
<rule pattern="[^\\\&#39;&#34;%\n]+">
<token type="LiteralString"/>
</rule>
<rule pattern="[\&#39;&#34;\\]">
<token type="LiteralString"/>
</rule>
<rule pattern="%">
<token type="LiteralString"/>
</rule>
</state>
<state name="backtick">
<rule pattern="`.*?`">
<token type="LiteralStringBacktick"/>
</rule>
</state>
<state name="numbers">
<rule pattern="(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="0\d+">
<token type="LiteralNumberOct"/>
</rule>
<rule pattern="0[xX][a-fA-F0-9]+">
<token type="LiteralNumberHex"/>
</rule>
<rule pattern="\d+L">
<token type="LiteralNumberIntegerLong"/>
</rule>
<rule pattern="\d+">
<token type="LiteralNumberInteger"/>
</rule>
</state>
<state name="keywords">
<rule pattern="(continue|ctypedef|except\?|include|finally|global|return|lambda|assert|except|print|nogil|while|fused|yield|break|raise|exec|else|elif|pass|with|gil|for|try|del|by|as|if)\b">
<token type="Keyword"/>
</rule>
<rule pattern="(DEF|IF|ELIF|ELSE)\b">
<token type="CommentPreproc"/>
</rule>
</state>
<state name="fromimport">
<rule pattern="(\s+)(c?import)\b">
<bygroups>
<token type="Text"/>
<token type="Keyword"/>
</bygroups>
<pop depth="1"/>
</rule>
<rule pattern="[a-zA-Z_.][\w.]*">
<token type="NameNamespace"/>
</rule>
<rule>
<pop depth="1"/>
</rule>
</state>
<state name="nl">
<rule pattern="\n">
<token type="LiteralString"/>
</rule>
</state>
<state name="dqs">
<rule pattern="&#34;">
<token type="LiteralString"/>
<pop depth="1"/>
</rule>
<rule pattern="\\\\|\\&#34;|\\\n">
<token type="LiteralStringEscape"/>
</rule>
<rule>
<include state="strings"/>
</rule>
</state>
<state name="tsqs">
<rule pattern="&#39;&#39;&#39;">
<token type="LiteralString"/>
<pop depth="1"/>
</rule>
<rule>
<include state="strings"/>
</rule>
<rule>
<include state="nl"/>
</rule>
</state>
<state name="import">
<rule pattern="(\s+)(as)(\s+)">
<bygroups>
<token type="Text"/>
<token type="Keyword"/>
<token type="Text"/>
</bygroups>
</rule>
<rule pattern="[a-zA-Z_][\w.]*">
<token type="NameNamespace"/>
</rule>
<rule pattern="(\s*)(,)(\s*)">
<bygroups>
<token type="Text"/>
<token type="Operator"/>
<token type="Text"/>
</bygroups>
</rule>
<rule>
<pop depth="1"/>
</rule>
</state>
<state name="name">
<rule pattern="@\w+">
<token type="NameDecorator"/>
</rule>
<rule pattern="[a-zA-Z_]\w*">
<token type="Name"/>
</rule>
</state>
<state name="cdef">
<rule pattern="(public|readonly|extern|api|inline)\b">
<token type="KeywordReserved"/>
</rule>
<rule pattern="(struct|enum|union|class)\b">
<token type="Keyword"/>
</rule>
<rule pattern="([a-zA-Z_]\w*)(\s*)(?=[(:#=]|$)">
<bygroups>
<token type="NameFunction"/>
<token type="Text"/>
</bygroups>
<pop depth="1"/>
</rule>
<rule pattern="([a-zA-Z_]\w*)(\s*)(,)">
<bygroups>
<token type="NameFunction"/>
<token type="Text"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule pattern="from\b">
<token type="Keyword"/>
<pop depth="1"/>
</rule>
<rule pattern="as\b">
<token type="Keyword"/>
</rule>
<rule pattern=":">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
<rule pattern="(?=[&#34;\&#39;])">
<token type="Text"/>
<pop depth="1"/>
</rule>
<rule pattern="[a-zA-Z_]\w*">
<token type="KeywordType"/>
</rule>
<rule pattern=".">
<token type="Text"/>
</rule>
</state>
<state name="classname">
<rule pattern="[a-zA-Z_]\w*">
<token type="NameClass"/>
<pop depth="1"/>
</rule>
</state>
<state name="sqs">
<rule pattern="&#39;">
<token type="LiteralString"/>
<pop depth="1"/>
</rule>
<rule pattern="\\\\|\\&#39;|\\\n">
<token type="LiteralStringEscape"/>
</rule>
<rule>
<include state="strings"/>
</rule>
</state>
<state name="tdqs">
<rule pattern="&#34;&#34;&#34;">
<token type="LiteralString"/>
<pop depth="1"/>
</rule>
<rule>
<include state="strings"/>
</rule>
<rule>
<include state="nl"/>
</rule>
</state>
<state name="builtins">
<rule pattern="(?&lt;!\.)(staticmethod|classmethod|__import__|issubclass|isinstance|basestring|bytearray|raw_input|frozenset|enumerate|property|unsigned|reversed|callable|execfile|hasattr|compile|complex|delattr|setattr|unicode|globals|getattr|reload|divmod|xrange|unichr|filter|reduce|buffer|intern|coerce|sorted|locals|object|round|input|range|super|tuple|bytes|float|slice|apply|bool|long|exit|vars|file|next|type|iter|open|dict|repr|hash|list|eval|oct|map|zip|int|hex|set|sum|chr|cmp|any|str|pow|ord|dir|len|min|all|abs|max|bin|id)\b">
<token type="NameBuiltin"/>
</rule>
<rule pattern="(?&lt;!\.)(self|None|Ellipsis|NotImplemented|False|True|NULL)\b">
<token type="NameBuiltinPseudo"/>
</rule>
<rule pattern="(?&lt;!\.)(PendingDeprecationWarning|UnicodeTranslateError|NotImplementedError|FloatingPointError|DeprecationWarning|UnicodeDecodeError|UnicodeEncodeError|UnboundLocalError|KeyboardInterrupt|ZeroDivisionError|IndentationError|EnvironmentError|OverflowWarning|ArithmeticError|RuntimeWarning|UnicodeWarning|AttributeError|AssertionError|NotImplemented|ReferenceError|StopIteration|SyntaxWarning|OverflowError|GeneratorExit|FutureWarning|BaseException|ImportWarning|StandardError|RuntimeError|UnicodeError|LookupError|ImportError|SyntaxError|MemoryError|SystemError|UserWarning|SystemExit|ValueError|IndexError|NameError|TypeError|Exception|KeyError|EOFError|TabError|OSError|Warning|IOError)\b">
<token type="NameException"/>
</rule>
</state>
</rules>
</lexer>

130
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/d.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,130 @@
<lexer>
<config>
<name>D</name>
<alias>d</alias>
<filename>*.d</filename>
<filename>*.di</filename>
<mime_type>text/x-d</mime_type>
<ensure_nl>true</ensure_nl>
</config>
<rules>
<state name="root">
<rule pattern="[^\S\n]+">
<token type="Text"/>
</rule>
<rule pattern="//.*?\n">
<token type="CommentSingle"/>
</rule>
<rule pattern="/\*.*?\*/">
<token type="CommentMultiline"/>
</rule>
<rule pattern="/\+.*?\+/">
<token type="CommentMultiline"/>
</rule>
<rule pattern="(asm|assert|body|break|case|cast|catch|continue|default|debug|delete|deprecated|do|else|finally|for|foreach|foreach_reverse|goto|if|in|invariant|is|macro|mixin|new|out|pragma|return|super|switch|this|throw|try|version|while|with)\b">
<token type="Keyword"/>
</rule>
<rule pattern="__(FILE|FILE_FULL_PATH|MODULE|LINE|FUNCTION|PRETTY_FUNCTION|DATE|EOF|TIME|TIMESTAMP|VENDOR|VERSION)__\b">
<token type="NameBuiltin"/>
</rule>
<rule pattern="__(traits|vector|parameters)\b">
<token type="NameBuiltin"/>
</rule>
<rule pattern="((?:(?:[^\W\d]|\$)[\w.\[\]$&lt;&gt;]*\s+)+?)((?:[^\W\d]|\$)[\w$]*)(\s*)(\()">
<bygroups>
<usingself state="root"/>
<token type="NameFunction"/>
<token type="Text"/>
<token type="Operator"/>
</bygroups>
</rule>
<rule pattern="@[\w.]*">
<token type="NameDecorator"/>
</rule>
<rule pattern="(abstract|auto|alias|align|const|delegate|enum|export|final|function|inout|lazy|nothrow|override|package|private|protected|public|pure|static|synchronized|template|volatile|__gshared)\b">
<token type="KeywordDeclaration"/>
</rule>
<rule pattern="(void|bool|byte|ubyte|short|ushort|int|uint|long|ulong|cent|ucent|float|double|real|ifloat|idouble|ireal|cfloat|cdouble|creal|char|wchar|dchar|string|wstring|dstring)\b">
<token type="KeywordType"/>
</rule>
<rule pattern="(module)(\s+)">
<bygroups>
<token type="KeywordNamespace"/>
<token type="Text"/>
</bygroups>
<push state="import"/>
</rule>
<rule pattern="(true|false|null)\b">
<token type="KeywordConstant"/>
</rule>
<rule pattern="(class|interface|struct|template|union)(\s+)">
<bygroups>
<token type="KeywordDeclaration"/>
<token type="Text"/>
</bygroups>
<push state="class"/>
</rule>
<rule pattern="(import)(\s+)">
<bygroups>
<token type="KeywordNamespace"/>
<token type="Text"/>
</bygroups>
<push state="import"/>
</rule>
<rule pattern="[qr]?&#34;(\\\\|\\&#34;|[^&#34;])*&#34;[cwd]?">
<token type="LiteralString"/>
</rule>
<rule pattern="(`)([^`]*)(`)[cwd]?">
<token type="LiteralString"/>
</rule>
<rule pattern="&#39;\\.&#39;|&#39;[^\\]&#39;|&#39;\\u[0-9a-fA-F]{4}&#39;">
<token type="LiteralStringChar"/>
</rule>
<rule pattern="(\.)((?:[^\W\d]|\$)[\w$]*)">
<bygroups>
<token type="Operator"/>
<token type="NameAttribute"/>
</bygroups>
</rule>
<rule pattern="^\s*([^\W\d]|\$)[\w$]*:">
<token type="NameLabel"/>
</rule>
<rule pattern="([0-9][0-9_]*\.([0-9][0-9_]*)?|\.[0-9][0-9_]*)([eE][+\-]?[0-9][0-9_]*)?[fFL]?i?|[0-9][eE][+\-]?[0-9][0-9_]*[fFL]?|[0-9]([eE][+\-]?[0-9][0-9_]*)?[fFL]|0[xX]([0-9a-fA-F][0-9a-fA-F_]*\.?|([0-9a-fA-F][0-9a-fA-F_]*)?\.[0-9a-fA-F][0-9a-fA-F_]*)[pP][+\-]?[0-9][0-9_]*[fFL]?">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="0[xX][0-9a-fA-F][0-9a-fA-F_]*[lL]?">
<token type="LiteralNumberHex"/>
</rule>
<rule pattern="0[bB][01][01_]*[lL]?">
<token type="LiteralNumberBin"/>
</rule>
<rule pattern="0[0-7_]+[lL]?">
<token type="LiteralNumberOct"/>
</rule>
<rule pattern="0|[1-9][0-9_]*[lL]?">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="([~^*!%&amp;\[\](){}&lt;&gt;|+=:;,./?-]|q{)">
<token type="Operator"/>
</rule>
<rule pattern="([^\W\d]|\$)[\w$]*">
<token type="Name"/>
</rule>
<rule pattern="\n">
<token type="Text"/>
</rule>
</state>
<state name="class">
<rule pattern="([^\W\d]|\$)[\w$]*">
<token type="NameClass"/>
<pop depth="1"/>
</rule>
</state>
<state name="import">
<rule pattern="[\w.]+\*?">
<token type="NameNamespace"/>
<pop depth="1"/>
</rule>
</state>
</rules>
</lexer>

213
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dart.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,213 @@
<lexer>
<config>
<name>Dart</name>
<alias>dart</alias>
<filename>*.dart</filename>
<mime_type>text/x-dart</mime_type>
<dot_all>true</dot_all>
</config>
<rules>
<state name="string_double_multiline">
<rule pattern="&#34;&#34;&#34;">
<token type="LiteralStringDouble"/>
<pop depth="1"/>
</rule>
<rule pattern="[^&#34;$\\]+">
<token type="LiteralStringDouble"/>
</rule>
<rule>
<include state="string_common"/>
</rule>
<rule pattern="(\$|\&#34;)+">
<token type="LiteralStringDouble"/>
</rule>
</state>
<state name="class">
<rule pattern="[a-zA-Z_$]\w*">
<token type="NameClass"/>
<pop depth="1"/>
</rule>
</state>
<state name="import_decl">
<rule>
<include state="string_literal"/>
</rule>
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern="\b(as|show|hide)\b">
<token type="Keyword"/>
</rule>
<rule pattern="[a-zA-Z_$]\w*">
<token type="Name"/>
</rule>
<rule pattern="\,">
<token type="Punctuation"/>
</rule>
<rule pattern="\;">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
</state>
<state name="string_single_multiline">
<rule pattern="&#39;&#39;&#39;">
<token type="LiteralStringSingle"/>
<pop depth="1"/>
</rule>
<rule pattern="[^\&#39;$\\]+">
<token type="LiteralStringSingle"/>
</rule>
<rule>
<include state="string_common"/>
</rule>
<rule pattern="(\$|\&#39;)+">
<token type="LiteralStringSingle"/>
</rule>
</state>
<state name="root">
<rule>
<include state="string_literal"/>
</rule>
<rule pattern="#!(.*?)$">
<token type="CommentPreproc"/>
</rule>
<rule pattern="\b(import|export)\b">
<token type="Keyword"/>
<push state="import_decl"/>
</rule>
<rule pattern="\b(library|source|part of|part)\b">
<token type="Keyword"/>
</rule>
<rule pattern="[^\S\n]+">
<token type="Text"/>
</rule>
<rule pattern="//.*?\n">
<token type="CommentSingle"/>
</rule>
<rule pattern="/\*.*?\*/">
<token type="CommentMultiline"/>
</rule>
<rule pattern="\b(class)\b(\s+)">
<bygroups>
<token type="KeywordDeclaration"/>
<token type="Text"/>
</bygroups>
<push state="class"/>
</rule>
<rule pattern="\b(assert|break|case|catch|continue|default|do|else|finally|for|if|in|is|new|return|super|switch|this|throw|try|while)\b">
<token type="Keyword"/>
</rule>
<rule pattern="\b(abstract|async|await|const|extends|factory|final|get|implements|native|operator|required|set|static|sync|typedef|var|with|yield)\b">
<token type="KeywordDeclaration"/>
</rule>
<rule pattern="\b(bool|double|dynamic|int|num|Object|String|void)\b">
<token type="KeywordType"/>
</rule>
<rule pattern="\b(false|null|true)\b">
<token type="KeywordConstant"/>
</rule>
<rule pattern="[~!%^&amp;*+=|?:&lt;&gt;/-]|as\b">
<token type="Operator"/>
</rule>
<rule pattern="[a-zA-Z_$]\w*:">
<token type="NameLabel"/>
</rule>
<rule pattern="[a-zA-Z_$]\w*">
<token type="Name"/>
</rule>
<rule pattern="[(){}\[\],.;]">
<token type="Punctuation"/>
</rule>
<rule pattern="0[xX][0-9a-fA-F]+">
<token type="LiteralNumberHex"/>
</rule>
<rule pattern="\d+(\.\d*)?([eE][+-]?\d+)?">
<token type="LiteralNumber"/>
</rule>
<rule pattern="\.\d+([eE][+-]?\d+)?">
<token type="LiteralNumber"/>
</rule>
<rule pattern="\n">
<token type="Text"/>
</rule>
</state>
<state name="string_literal">
<rule pattern="r&#34;&#34;&#34;([\w\W]*?)&#34;&#34;&#34;">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="r&#39;&#39;&#39;([\w\W]*?)&#39;&#39;&#39;">
<token type="LiteralStringSingle"/>
</rule>
<rule pattern="r&#34;(.*?)&#34;">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="r&#39;(.*?)&#39;">
<token type="LiteralStringSingle"/>
</rule>
<rule pattern="&#34;&#34;&#34;">
<token type="LiteralStringDouble"/>
<push state="string_double_multiline"/>
</rule>
<rule pattern="&#39;&#39;&#39;">
<token type="LiteralStringSingle"/>
<push state="string_single_multiline"/>
</rule>
<rule pattern="&#34;">
<token type="LiteralStringDouble"/>
<push state="string_double"/>
</rule>
<rule pattern="&#39;">
<token type="LiteralStringSingle"/>
<push state="string_single"/>
</rule>
</state>
<state name="string_common">
<rule pattern="\\(x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|u\{[0-9A-Fa-f]*\}|[a-z&#39;\&#34;$\\])">
<token type="LiteralStringEscape"/>
</rule>
<rule pattern="(\$)([a-zA-Z_]\w*)">
<bygroups>
<token type="LiteralStringInterpol"/>
<token type="Name"/>
</bygroups>
</rule>
<rule pattern="(\$\{)(.*?)(\})">
<bygroups>
<token type="LiteralStringInterpol"/>
<usingself state="root"/>
<token type="LiteralStringInterpol"/>
</bygroups>
</rule>
</state>
<state name="string_double">
<rule pattern="&#34;">
<token type="LiteralStringDouble"/>
<pop depth="1"/>
</rule>
<rule pattern="[^&#34;$\\\n]+">
<token type="LiteralStringDouble"/>
</rule>
<rule>
<include state="string_common"/>
</rule>
<rule pattern="\$+">
<token type="LiteralStringDouble"/>
</rule>
</state>
<state name="string_single">
<rule pattern="&#39;">
<token type="LiteralStringSingle"/>
<pop depth="1"/>
</rule>
<rule pattern="[^&#39;$\\\n]+">
<token type="LiteralStringSingle"/>
</rule>
<rule>
<include state="string_common"/>
</rule>
<rule pattern="\$+">
<token type="LiteralStringSingle"/>
</rule>
</state>
</rules>
</lexer>

40
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/diff.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,40 @@
<lexer>
<config>
<name>Diff</name>
<alias>diff</alias>
<alias>udiff</alias>
<filename>*.diff</filename>
<filename>*.patch</filename>
<mime_type>text/x-diff</mime_type>
<mime_type>text/x-patch</mime_type>
<ensure_nl>true</ensure_nl>
</config>
<rules>
<state name="root">
<rule pattern=" .*\n">
<token type="Text"/>
</rule>
<rule pattern="\+.*\n">
<token type="GenericInserted"/>
</rule>
<rule pattern="-.*\n">
<token type="GenericDeleted"/>
</rule>
<rule pattern="!.*\n">
<token type="GenericStrong"/>
</rule>
<rule pattern="@.*\n">
<token type="GenericSubheading"/>
</rule>
<rule pattern="([Ii]ndex|diff).*\n">
<token type="GenericHeading"/>
</rule>
<rule pattern="=.*\n">
<token type="GenericHeading"/>
</rule>
<rule pattern=".*\n">
<token type="Text"/>
</rule>
</state>
</rules>
</lexer>

153
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/django_jinja.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,153 @@
<lexer>
<config>
<name>Django/Jinja</name>
<alias>django</alias>
<alias>jinja</alias>
<mime_type>application/x-django-templating</mime_type>
<mime_type>application/x-jinja</mime_type>
<dot_all>true</dot_all>
</config>
<rules>
<state name="var">
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern="(-?)(\}\})">
<bygroups>
<token type="Text"/>
<token type="CommentPreproc"/>
</bygroups>
<pop depth="1"/>
</rule>
<rule>
<include state="varnames"/>
</rule>
</state>
<state name="block">
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern="(-?)(%\})">
<bygroups>
<token type="Text"/>
<token type="CommentPreproc"/>
</bygroups>
<pop depth="1"/>
</rule>
<rule>
<include state="varnames"/>
</rule>
<rule pattern=".">
<token type="Punctuation"/>
</rule>
</state>
<state name="root">
<rule pattern="[^{]+">
<token type="Other"/>
</rule>
<rule pattern="\{\{">
<token type="CommentPreproc"/>
<push state="var"/>
</rule>
<rule pattern="\{[*#].*?[*#]\}">
<token type="Comment"/>
</rule>
<rule pattern="(\{%)(-?\s*)(comment)(\s*-?)(%\})(.*?)(\{%)(-?\s*)(endcomment)(\s*-?)(%\})">
<bygroups>
<token type="CommentPreproc"/>
<token type="Text"/>
<token type="Keyword"/>
<token type="Text"/>
<token type="CommentPreproc"/>
<token type="Comment"/>
<token type="CommentPreproc"/>
<token type="Text"/>
<token type="Keyword"/>
<token type="Text"/>
<token type="CommentPreproc"/>
</bygroups>
</rule>
<rule pattern="(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)(\{%)(-?\s*)(endraw)(\s*-?)(%\})">
<bygroups>
<token type="CommentPreproc"/>
<token type="Text"/>
<token type="Keyword"/>
<token type="Text"/>
<token type="CommentPreproc"/>
<token type="Text"/>
<token type="CommentPreproc"/>
<token type="Text"/>
<token type="Keyword"/>
<token type="Text"/>
<token type="CommentPreproc"/>
</bygroups>
</rule>
<rule pattern="(\{%)(-?\s*)(filter)(\s+)([a-zA-Z_]\w*)">
<bygroups>
<token type="CommentPreproc"/>
<token type="Text"/>
<token type="Keyword"/>
<token type="Text"/>
<token type="NameFunction"/>
</bygroups>
<push state="block"/>
</rule>
<rule pattern="(\{%)(-?\s*)([a-zA-Z_]\w*)">
<bygroups>
<token type="CommentPreproc"/>
<token type="Text"/>
<token type="Keyword"/>
</bygroups>
<push state="block"/>
</rule>
<rule pattern="\{">
<token type="Other"/>
</rule>
</state>
<state name="varnames">
<rule pattern="(\|)(\s*)([a-zA-Z_]\w*)">
<bygroups>
<token type="Operator"/>
<token type="Text"/>
<token type="NameFunction"/>
</bygroups>
</rule>
<rule pattern="(is)(\s+)(not)?(\s+)?([a-zA-Z_]\w*)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="Keyword"/>
<token type="Text"/>
<token type="NameFunction"/>
</bygroups>
</rule>
<rule pattern="(_|true|false|none|True|False|None)\b">
<token type="KeywordPseudo"/>
</rule>
<rule pattern="(in|as|reversed|recursive|not|and|or|is|if|else|import|with(?:(?:out)?\s*context)?|scoped|ignore\s+missing)\b">
<token type="Keyword"/>
</rule>
<rule pattern="(loop|block|super|forloop)\b">
<token type="NameBuiltin"/>
</rule>
<rule pattern="[a-zA-Z_][\w-]*">
<token type="NameVariable"/>
</rule>
<rule pattern="\.\w+">
<token type="NameVariable"/>
</rule>
<rule pattern=":?&#34;(\\\\|\\&#34;|[^&#34;])*&#34;">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern=":?&#39;(\\\\|\\&#39;|[^&#39;])*&#39;">
<token type="LiteralStringSingle"/>
</rule>
<rule pattern="([{}()\[\]+\-*/,:~]|[&gt;&lt;=]=?)">
<token type="Operator"/>
</rule>
<rule pattern="[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?">
<token type="LiteralNumber"/>
</rule>
</state>
</rules>
</lexer>

42
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dns.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,42 @@
<?xml version="1.0"?>
<lexer>
<config>
<name>dns</name>
<alias>zone</alias>
<alias>bind</alias>
</config>
<rules>
<state name="root">
<rule pattern="\b(IN|A|AAAA|AFSDB|APL|CAA|CDNSKEY|CDS|CERT|CNAME|DHCID|DLV|DNAME|DNSKEY|DS|HIP|IPSECKEY|KEY|KX|LOC|MX|NAPTR|NS|NSEC|NSEC3|NSEC3PARAM|PTR|RRSIG|RP|SIG|SOA|SRV|SSHFP|TA|TKEY|TLSA|TSIG|TXT)\b">
<token type="Keyword"/>
</rule>
<rule pattern=";.*(\S|$)">
<token type="Comment"/>
</rule>
<rule pattern="\b((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d{2}|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d{2}|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d{2}|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d{2}|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d{2}|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d{2}|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d{2}|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d{2}|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d{2}|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d{2}|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d{2}|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d{2}|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d{2}|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d{2}|[1-9]?\d)){3}))|:)))\b">
<token type="LiteralNumberIntegerLong"/>
</rule>
<rule pattern="\b((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\b">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="\b\d+[dhwm]?">
<token type="LiteralStringChar"/>
</rule>
<rule pattern="\b([[:alnum:].-])+\.($|\s)">
<token type="NameProperty"/>
</rule>
<rule pattern="^(@|[[:alnum:]-]+)">
<token type="NameClass"/>
</rule>
<rule pattern="^\$(TTL|GENERATE|INCLUDE|ORIGIN)">
<token type="NameAttribute"/>
</rule>
<rule pattern="\(|\)">
<token type="Punctuation"/>
</rule>
<rule pattern="[\r\n\s\t]+">
<token type="TextWhitespace"/>
</rule>
</state>
</rules>
</lexer>

168
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dtd.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,168 @@
<lexer>
<config>
<name>DTD</name>
<alias>dtd</alias>
<filename>*.dtd</filename>
<mime_type>application/xml-dtd</mime_type>
<dot_all>true</dot_all>
</config>
<rules>
<state name="common">
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern="(%|&amp;)[^;]*;">
<token type="NameEntity"/>
</rule>
<rule pattern="&lt;!--">
<token type="Comment"/>
<push state="comment"/>
</rule>
<rule pattern="[(|)*,?+]">
<token type="Operator"/>
</rule>
<rule pattern="&#34;[^&#34;]*&#34;">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="\&#39;[^\&#39;]*\&#39;">
<token type="LiteralStringSingle"/>
</rule>
</state>
<state name="comment">
<rule pattern="[^-]+">
<token type="Comment"/>
</rule>
<rule pattern="--&gt;">
<token type="Comment"/>
<pop depth="1"/>
</rule>
<rule pattern="-">
<token type="Comment"/>
</rule>
</state>
<state name="element">
<rule>
<include state="common"/>
</rule>
<rule pattern="EMPTY|ANY|#PCDATA">
<token type="KeywordConstant"/>
</rule>
<rule pattern="[^&gt;\s|()?+*,]+">
<token type="NameTag"/>
</rule>
<rule pattern="&gt;">
<token type="Keyword"/>
<pop depth="1"/>
</rule>
</state>
<state name="attlist">
<rule>
<include state="common"/>
</rule>
<rule pattern="CDATA|IDREFS|IDREF|ID|NMTOKENS|NMTOKEN|ENTITIES|ENTITY|NOTATION">
<token type="KeywordConstant"/>
</rule>
<rule pattern="#REQUIRED|#IMPLIED|#FIXED">
<token type="KeywordConstant"/>
</rule>
<rule pattern="xml:space|xml:lang">
<token type="KeywordReserved"/>
</rule>
<rule pattern="[^&gt;\s|()?+*,]+">
<token type="NameAttribute"/>
</rule>
<rule pattern="&gt;">
<token type="Keyword"/>
<pop depth="1"/>
</rule>
</state>
<state name="entity">
<rule>
<include state="common"/>
</rule>
<rule pattern="SYSTEM|PUBLIC|NDATA">
<token type="KeywordConstant"/>
</rule>
<rule pattern="[^&gt;\s|()?+*,]+">
<token type="NameEntity"/>
</rule>
<rule pattern="&gt;">
<token type="Keyword"/>
<pop depth="1"/>
</rule>
</state>
<state name="notation">
<rule>
<include state="common"/>
</rule>
<rule pattern="SYSTEM|PUBLIC">
<token type="KeywordConstant"/>
</rule>
<rule pattern="[^&gt;\s|()?+*,]+">
<token type="NameAttribute"/>
</rule>
<rule pattern="&gt;">
<token type="Keyword"/>
<pop depth="1"/>
</rule>
</state>
<state name="root">
<rule>
<include state="common"/>
</rule>
<rule pattern="(&lt;!ELEMENT)(\s+)(\S+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="NameTag"/>
</bygroups>
<push state="element"/>
</rule>
<rule pattern="(&lt;!ATTLIST)(\s+)(\S+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="NameTag"/>
</bygroups>
<push state="attlist"/>
</rule>
<rule pattern="(&lt;!ENTITY)(\s+)(\S+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="NameEntity"/>
</bygroups>
<push state="entity"/>
</rule>
<rule pattern="(&lt;!NOTATION)(\s+)(\S+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="NameTag"/>
</bygroups>
<push state="notation"/>
</rule>
<rule pattern="(&lt;!\[)([^\[\s]+)(\s*)(\[)">
<bygroups>
<token type="Keyword"/>
<token type="NameEntity"/>
<token type="Text"/>
<token type="Keyword"/>
</bygroups>
</rule>
<rule pattern="(&lt;!DOCTYPE)(\s+)([^&gt;\s]+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="NameTag"/>
</bygroups>
</rule>
<rule pattern="PUBLIC|SYSTEM">
<token type="KeywordConstant"/>
</rule>
<rule pattern="[\[\]&gt;]">
<token type="Keyword"/>
</rule>
</state>
</rules>
</lexer>

176
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dylan.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,176 @@
<lexer>
<config>
<name>Dylan</name>
<alias>dylan</alias>
<filename>*.dylan</filename>
<filename>*.dyl</filename>
<filename>*.intr</filename>
<mime_type>text/x-dylan</mime_type>
<case_insensitive>true</case_insensitive>
</config>
<rules>
<state name="string">
<rule pattern="&#34;">
<token type="LiteralString"/>
<pop depth="1"/>
</rule>
<rule pattern="\\([\\abfnrtv&#34;\&#39;]|x[a-f0-9]{2,4}|[0-7]{1,3})">
<token type="LiteralStringEscape"/>
</rule>
<rule pattern="[^\\&#34;\n]+">
<token type="LiteralString"/>
</rule>
<rule pattern="\\\n">
<token type="LiteralString"/>
</rule>
<rule pattern="\\">
<token type="LiteralString"/>
</rule>
</state>
<state name="root">
<rule pattern="\s+">
<token type="TextWhitespace"/>
</rule>
<rule pattern="//.*?\n">
<token type="CommentSingle"/>
</rule>
<rule pattern="([a-z0-9-]+:)([ \t]*)(.*(?:\n[ \t].+)*)">
<bygroups>
<token type="NameAttribute"/>
<token type="TextWhitespace"/>
<token type="LiteralString"/>
</bygroups>
</rule>
<rule>
<push state="code"/>
</rule>
</state>
<state name="code">
<rule pattern="\s+">
<token type="TextWhitespace"/>
</rule>
<rule pattern="//.*?\n">
<token type="CommentSingle"/>
</rule>
<rule pattern="/\*">
<token type="CommentMultiline"/>
<push state="comment"/>
</rule>
<rule pattern="&#34;">
<token type="LiteralString"/>
<push state="string"/>
</rule>
<rule pattern="&#39;(\\.|\\[0-7]{1,3}|\\x[a-f0-9]{1,2}|[^\\\&#39;\n])&#39;">
<token type="LiteralStringChar"/>
</rule>
<rule pattern="#b[01]+">
<token type="LiteralNumberBin"/>
</rule>
<rule pattern="#o[0-7]+">
<token type="LiteralNumberOct"/>
</rule>
<rule pattern="[-+]?(\d*\.\d+([ed][-+]?\d+)?|\d+(\.\d*)?e[-+]?\d+)">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="[-+]?\d+">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="#x[0-9a-f]+">
<token type="LiteralNumberHex"/>
</rule>
<rule pattern="(\?\\?)([\w!&amp;*&lt;&gt;|^$%@+~?/=-]+)(:)(token|name|variable|expression|body|case-body|\*)">
<bygroups>
<token type="Operator"/>
<token type="NameVariable"/>
<token type="Operator"/>
<token type="NameBuiltin"/>
</bygroups>
</rule>
<rule pattern="(\?)(:)(token|name|variable|expression|body|case-body|\*)">
<bygroups>
<token type="Operator"/>
<token type="Operator"/>
<token type="NameVariable"/>
</bygroups>
</rule>
<rule pattern="(\?\\?)([\w!&amp;*&lt;&gt;|^$%@+~?/=-]+)">
<bygroups>
<token type="Operator"/>
<token type="NameVariable"/>
</bygroups>
</rule>
<rule pattern="(=&gt;|::|#\(|#\[|##|\?\?|\?=|\?|[(){}\[\],.;])">
<token type="Punctuation"/>
</rule>
<rule pattern=":=">
<token type="Operator"/>
</rule>
<rule pattern="#[tf]">
<token type="Literal"/>
</rule>
<rule pattern="#&#34;">
<token type="LiteralStringSymbol"/>
<push state="symbol"/>
</rule>
<rule pattern="#[a-z0-9-]+">
<token type="Keyword"/>
</rule>
<rule pattern="#(all-keys|include|key|next|rest)">
<token type="Keyword"/>
</rule>
<rule pattern="[\w!&amp;*&lt;&gt;|^$%@+~?/=-]+:">
<token type="KeywordConstant"/>
</rule>
<rule pattern="&lt;[\w!&amp;*&lt;&gt;|^$%@+~?/=-]+&gt;">
<token type="NameClass"/>
</rule>
<rule pattern="\*[\w!&amp;*&lt;&gt;|^$%@+~?/=-]+\*">
<token type="NameVariableGlobal"/>
</rule>
<rule pattern="\$[\w!&amp;*&lt;&gt;|^$%@+~?/=-]+">
<token type="NameConstant"/>
</rule>
<rule pattern="(let|method|function)([ \t]+)([\w!&amp;*&lt;&gt;|^$%@+~?/=-]+)">
<bygroups>
<token type="NameBuiltin"/>
<token type="TextWhitespace"/>
<token type="NameVariable"/>
</bygroups>
</rule>
<rule pattern="(error|signal|return|break)">
<token type="NameException"/>
</rule>
<rule pattern="(\\?)([\w!&amp;*&lt;&gt;|^$%@+~?/=-]+)">
<bygroups>
<token type="Operator"/>
<token type="Name"/>
</bygroups>
</rule>
</state>
<state name="comment">
<rule pattern="[^*/]">
<token type="CommentMultiline"/>
</rule>
<rule pattern="/\*">
<token type="CommentMultiline"/>
<push/>
</rule>
<rule pattern="\*/">
<token type="CommentMultiline"/>
<pop depth="1"/>
</rule>
<rule pattern="[*/]">
<token type="CommentMultiline"/>
</rule>
</state>
<state name="symbol">
<rule pattern="&#34;">
<token type="LiteralStringSymbol"/>
<pop depth="1"/>
</rule>
<rule pattern="[^\\&#34;]+">
<token type="LiteralStringSymbol"/>
</rule>
</state>
</rules>
</lexer>

90
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ebnf.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,90 @@
<lexer>
<config>
<name>EBNF</name>
<alias>ebnf</alias>
<filename>*.ebnf</filename>
<mime_type>text/x-ebnf</mime_type>
</config>
<rules>
<state name="comment">
<rule pattern="[^*)]">
<token type="CommentMultiline"/>
</rule>
<rule>
<include state="comment_start"/>
</rule>
<rule pattern="\*\)">
<token type="CommentMultiline"/>
<pop depth="1"/>
</rule>
<rule pattern="[*)]">
<token type="CommentMultiline"/>
</rule>
</state>
<state name="identifier">
<rule pattern="([a-zA-Z][\w \-]*)">
<token type="Keyword"/>
</rule>
</state>
<state name="root">
<rule>
<include state="whitespace"/>
</rule>
<rule>
<include state="comment_start"/>
</rule>
<rule>
<include state="identifier"/>
</rule>
<rule pattern="=">
<token type="Operator"/>
<push state="production"/>
</rule>
</state>
<state name="production">
<rule>
<include state="whitespace"/>
</rule>
<rule>
<include state="comment_start"/>
</rule>
<rule>
<include state="identifier"/>
</rule>
<rule pattern="&#34;[^&#34;]*&#34;">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="&#39;[^&#39;]*&#39;">
<token type="LiteralStringSingle"/>
</rule>
<rule pattern="(\?[^?]*\?)">
<token type="NameEntity"/>
</rule>
<rule pattern="[\[\]{}(),|]">
<token type="Punctuation"/>
</rule>
<rule pattern="-">
<token type="Operator"/>
</rule>
<rule pattern=";">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
<rule pattern="\.">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
</state>
<state name="whitespace">
<rule pattern="\s+">
<token type="Text"/>
</rule>
</state>
<state name="comment_start">
<rule pattern="\(\*">
<token type="CommentMultiline"/>
<push state="comment"/>
</rule>
</state>
</rules>
</lexer>

743
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/elixir.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,743 @@
<lexer>
<config>
<name>Elixir</name>
<alias>elixir</alias>
<alias>ex</alias>
<alias>exs</alias>
<filename>*.ex</filename>
<filename>*.exs</filename>
<mime_type>text/x-elixir</mime_type>
</config>
<rules>
<state name="cb-intp">
<rule pattern="[^#\}\\]+">
<token type="LiteralStringOther"/>
</rule>
<rule>
<include state="escapes"/>
</rule>
<rule pattern="\\.">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="\}[a-zA-Z]*">
<token type="LiteralStringOther"/>
<pop depth="1"/>
</rule>
<rule>
<include state="interpol"/>
</rule>
</state>
<state name="triquot-end">
<rule pattern="[a-zA-Z]+">
<token type="LiteralStringOther"/>
<pop depth="1"/>
</rule>
<rule>
<pop depth="1"/>
</rule>
</state>
<state name="apos-no-intp">
<rule pattern="[^&#39;\\]+">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="\\.">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="&#39;[a-zA-Z]*">
<token type="LiteralStringOther"/>
<pop depth="1"/>
</rule>
</state>
<state name="slas-no-intp">
<rule pattern="[^/\\]+">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="\\.">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="/[a-zA-Z]*">
<token type="LiteralStringOther"/>
<pop depth="1"/>
</rule>
</state>
<state name="pipe-no-intp">
<rule pattern="[^\|\\]+">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="\\.">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="\|[a-zA-Z]*">
<token type="LiteralStringOther"/>
<pop depth="1"/>
</rule>
</state>
<state name="apos-intp">
<rule pattern="[^#&#39;\\]+">
<token type="LiteralStringOther"/>
</rule>
<rule>
<include state="escapes"/>
</rule>
<rule pattern="\\.">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="&#39;[a-zA-Z]*">
<token type="LiteralStringOther"/>
<pop depth="1"/>
</rule>
<rule>
<include state="interpol"/>
</rule>
</state>
<state name="cb-no-intp">
<rule pattern="[^\}\\]+">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="\\.">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="\}[a-zA-Z]*">
<token type="LiteralStringOther"/>
<pop depth="1"/>
</rule>
</state>
<state name="heredoc_double">
<rule pattern="^\s*&#34;&#34;&#34;">
<token type="LiteralStringHeredoc"/>
<pop depth="1"/>
</rule>
<rule>
<include state="heredoc_interpol"/>
</rule>
</state>
<state name="triapos-end">
<rule pattern="[a-zA-Z]+">
<token type="LiteralStringOther"/>
<pop depth="1"/>
</rule>
<rule>
<pop depth="1"/>
</rule>
</state>
<state name="interpol_string">
<rule pattern="\}">
<token type="LiteralStringInterpol"/>
<pop depth="1"/>
</rule>
<rule>
<include state="root"/>
</rule>
</state>
<state name="triquot-intp">
<rule pattern="^\s*&#34;&#34;&#34;">
<token type="LiteralStringHeredoc"/>
<pop depth="1"/>
</rule>
<rule>
<include state="heredoc_interpol"/>
</rule>
</state>
<state name="interpol">
<rule pattern="#\{">
<token type="LiteralStringInterpol"/>
<push state="interpol_string"/>
</rule>
</state>
<state name="pa-no-intp">
<rule pattern="[^\)\\]+">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="\\.">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="\)[a-zA-Z]*">
<token type="LiteralStringOther"/>
<pop depth="1"/>
</rule>
</state>
<state name="map_key">
<rule>
<include state="root"/>
</rule>
<rule pattern=":">
<token type="Punctuation"/>
<push state="map_val"/>
</rule>
<rule pattern="=&gt;">
<token type="Punctuation"/>
<push state="map_val"/>
</rule>
<rule pattern="\}">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
</state>
<state name="pa-intp">
<rule pattern="[^#\)\\]+">
<token type="LiteralStringOther"/>
</rule>
<rule>
<include state="escapes"/>
</rule>
<rule pattern="\\.">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="\)[a-zA-Z]*">
<token type="LiteralStringOther"/>
<pop depth="1"/>
</rule>
<rule>
<include state="interpol"/>
</rule>
</state>
<state name="tuple">
<rule>
<include state="root"/>
</rule>
<rule pattern="\}">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
</state>
<state name="root">
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern="#.*$">
<token type="CommentSingle"/>
</rule>
<rule pattern="(\?)(\\x\{)([\da-fA-F]+)(\})">
<bygroups>
<token type="LiteralStringChar"/>
<token type="LiteralStringEscape"/>
<token type="LiteralNumberHex"/>
<token type="LiteralStringEscape"/>
</bygroups>
</rule>
<rule pattern="(\?)(\\x[\da-fA-F]{1,2})">
<bygroups>
<token type="LiteralStringChar"/>
<token type="LiteralStringEscape"/>
</bygroups>
</rule>
<rule pattern="(\?)(\\[abdefnrstv])">
<bygroups>
<token type="LiteralStringChar"/>
<token type="LiteralStringEscape"/>
</bygroups>
</rule>
<rule pattern="\?\\?.">
<token type="LiteralStringChar"/>
</rule>
<rule pattern=":::">
<token type="LiteralStringSymbol"/>
</rule>
<rule pattern="::">
<token type="Operator"/>
</rule>
<rule pattern=":(?:\.\.\.|&lt;&lt;&gt;&gt;|%\{\}|%|\{\})">
<token type="LiteralStringSymbol"/>
</rule>
<rule pattern=":(?:(?:\.\.\.|[a-z_]\w*[!?]?)|[A-Z]\w*(?:\.[A-Z]\w*)*|(?:\&lt;\&lt;\&lt;|\&gt;\&gt;\&gt;|\|\|\||\&amp;\&amp;\&amp;|\^\^\^|\~\~\~|\=\=\=|\!\=\=|\~\&gt;\&gt;|\&lt;\~\&gt;|\|\~\&gt;|\&lt;\|\&gt;|\=\=|\!\=|\&lt;\=|\&gt;\=|\&amp;\&amp;|\|\||\&lt;\&gt;|\+\+|\-\-|\|\&gt;|\=\~|\-\&gt;|\&lt;\-|\||\.|\=|\~\&gt;|\&lt;\~|\&lt;|\&gt;|\+|\-|\*|\/|\!|\^|\&amp;))">
<token type="LiteralStringSymbol"/>
</rule>
<rule pattern=":&#34;">
<token type="LiteralStringSymbol"/>
<push state="string_double_atom"/>
</rule>
<rule pattern=":&#39;">
<token type="LiteralStringSymbol"/>
<push state="string_single_atom"/>
</rule>
<rule pattern="((?:\.\.\.|&lt;&lt;&gt;&gt;|%\{\}|%|\{\})|(?:(?:\.\.\.|[a-z_]\w*[!?]?)|[A-Z]\w*(?:\.[A-Z]\w*)*|(?:\&lt;\&lt;\&lt;|\&gt;\&gt;\&gt;|\|\|\||\&amp;\&amp;\&amp;|\^\^\^|\~\~\~|\=\=\=|\!\=\=|\~\&gt;\&gt;|\&lt;\~\&gt;|\|\~\&gt;|\&lt;\|\&gt;|\=\=|\!\=|\&lt;\=|\&gt;\=|\&amp;\&amp;|\|\||\&lt;\&gt;|\+\+|\-\-|\|\&gt;|\=\~|\-\&gt;|\&lt;\-|\||\.|\=|\~\&gt;|\&lt;\~|\&lt;|\&gt;|\+|\-|\*|\/|\!|\^|\&amp;)))(:)(?=\s|\n)">
<bygroups>
<token type="LiteralStringSymbol"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule pattern="(fn|do|end|after|else|rescue|catch)\b">
<token type="Keyword"/>
</rule>
<rule pattern="(not|and|or|when|in)\b">
<token type="OperatorWord"/>
</rule>
<rule pattern="(case|cond|for|if|unless|try|receive|raise|quote|unquote|unquote_splicing|throw|super|while)\b">
<token type="Keyword"/>
</rule>
<rule pattern="(def|defp|defmodule|defprotocol|defmacro|defmacrop|defdelegate|defexception|defstruct|defimpl|defcallback)\b">
<token type="KeywordDeclaration"/>
</rule>
<rule pattern="(import|require|use|alias)\b">
<token type="KeywordNamespace"/>
</rule>
<rule pattern="(nil|true|false)\b">
<token type="NameConstant"/>
</rule>
<rule pattern="(_|__MODULE__|__DIR__|__ENV__|__CALLER__)\b">
<token type="NamePseudo"/>
</rule>
<rule pattern="@(?:\.\.\.|[a-z_]\w*[!?]?)">
<token type="NameAttribute"/>
</rule>
<rule pattern="(?:\.\.\.|[a-z_]\w*[!?]?)">
<token type="Name"/>
</rule>
<rule pattern="(%?)([A-Z]\w*(?:\.[A-Z]\w*)*)">
<bygroups>
<token type="Punctuation"/>
<token type="NameClass"/>
</bygroups>
</rule>
<rule pattern="\&lt;\&lt;\&lt;|\&gt;\&gt;\&gt;|\|\|\||\&amp;\&amp;\&amp;|\^\^\^|\~\~\~|\=\=\=|\!\=\=|\~\&gt;\&gt;|\&lt;\~\&gt;|\|\~\&gt;|\&lt;\|\&gt;">
<token type="Operator"/>
</rule>
<rule pattern="\=\=|\!\=|\&lt;\=|\&gt;\=|\&amp;\&amp;|\|\||\&lt;\&gt;|\+\+|\-\-|\|\&gt;|\=\~|\-\&gt;|\&lt;\-|\||\.|\=|\~\&gt;|\&lt;\~">
<token type="Operator"/>
</rule>
<rule pattern="\\\\|\&lt;\&lt;|\&gt;\&gt;|\=\&gt;|\(|\)|\:|\;|\,|\[|\]">
<token type="Punctuation"/>
</rule>
<rule pattern="&amp;\d">
<token type="NameEntity"/>
</rule>
<rule pattern="\&lt;|\&gt;|\+|\-|\*|\/|\!|\^|\&amp;">
<token type="Operator"/>
</rule>
<rule pattern="0b[01](_?[01])*">
<token type="LiteralNumberBin"/>
</rule>
<rule pattern="0o[0-7](_?[0-7])*">
<token type="LiteralNumberOct"/>
</rule>
<rule pattern="0x[\da-fA-F](_?[\dA-Fa-f])*">
<token type="LiteralNumberHex"/>
</rule>
<rule pattern="\d(_?\d)*\.\d(_?\d)*([eE][-+]?\d(_?\d)*)?">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="\d(_?\d)*">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="&#34;&#34;&#34;\s*">
<token type="LiteralStringHeredoc"/>
<push state="heredoc_double"/>
</rule>
<rule pattern="&#39;&#39;&#39;\s*$">
<token type="LiteralStringHeredoc"/>
<push state="heredoc_single"/>
</rule>
<rule pattern="&#34;">
<token type="LiteralStringDouble"/>
<push state="string_double"/>
</rule>
<rule pattern="&#39;">
<token type="LiteralStringSingle"/>
<push state="string_single"/>
</rule>
<rule>
<include state="sigils"/>
</rule>
<rule pattern="%\{">
<token type="Punctuation"/>
<push state="map_key"/>
</rule>
<rule pattern="\{">
<token type="Punctuation"/>
<push state="tuple"/>
</rule>
</state>
<state name="sigils">
<rule pattern="(~[a-z])(&#34;&#34;&#34;)">
<bygroups>
<token type="LiteralStringOther"/>
<token type="LiteralStringHeredoc"/>
</bygroups>
<push state="triquot-end" state="triquot-intp"/>
</rule>
<rule pattern="(~[A-Z])(&#34;&#34;&#34;)">
<bygroups>
<token type="LiteralStringOther"/>
<token type="LiteralStringHeredoc"/>
</bygroups>
<push state="triquot-end" state="triquot-no-intp"/>
</rule>
<rule pattern="(~[a-z])(&#39;&#39;&#39;)">
<bygroups>
<token type="LiteralStringOther"/>
<token type="LiteralStringHeredoc"/>
</bygroups>
<push state="triapos-end" state="triapos-intp"/>
</rule>
<rule pattern="(~[A-Z])(&#39;&#39;&#39;)">
<bygroups>
<token type="LiteralStringOther"/>
<token type="LiteralStringHeredoc"/>
</bygroups>
<push state="triapos-end" state="triapos-no-intp"/>
</rule>
<rule pattern="~[a-z]\{">
<token type="LiteralStringOther"/>
<push state="cb-intp"/>
</rule>
<rule pattern="~[A-Z]\{">
<token type="LiteralStringOther"/>
<push state="cb-no-intp"/>
</rule>
<rule pattern="~[a-z]\[">
<token type="LiteralStringOther"/>
<push state="sb-intp"/>
</rule>
<rule pattern="~[A-Z]\[">
<token type="LiteralStringOther"/>
<push state="sb-no-intp"/>
</rule>
<rule pattern="~[a-z]\(">
<token type="LiteralStringOther"/>
<push state="pa-intp"/>
</rule>
<rule pattern="~[A-Z]\(">
<token type="LiteralStringOther"/>
<push state="pa-no-intp"/>
</rule>
<rule pattern="~[a-z]&lt;">
<token type="LiteralStringOther"/>
<push state="ab-intp"/>
</rule>
<rule pattern="~[A-Z]&lt;">
<token type="LiteralStringOther"/>
<push state="ab-no-intp"/>
</rule>
<rule pattern="~[a-z]/">
<token type="LiteralStringOther"/>
<push state="slas-intp"/>
</rule>
<rule pattern="~[A-Z]/">
<token type="LiteralStringOther"/>
<push state="slas-no-intp"/>
</rule>
<rule pattern="~[a-z]\|">
<token type="LiteralStringOther"/>
<push state="pipe-intp"/>
</rule>
<rule pattern="~[A-Z]\|">
<token type="LiteralStringOther"/>
<push state="pipe-no-intp"/>
</rule>
<rule pattern="~[a-z]&#34;">
<token type="LiteralStringOther"/>
<push state="quot-intp"/>
</rule>
<rule pattern="~[A-Z]&#34;">
<token type="LiteralStringOther"/>
<push state="quot-no-intp"/>
</rule>
<rule pattern="~[a-z]&#39;">
<token type="LiteralStringOther"/>
<push state="apos-intp"/>
</rule>
<rule pattern="~[A-Z]&#39;">
<token type="LiteralStringOther"/>
<push state="apos-no-intp"/>
</rule>
</state>
<state name="triapos-intp">
<rule pattern="^\s*&#39;&#39;&#39;">
<token type="LiteralStringHeredoc"/>
<pop depth="1"/>
</rule>
<rule>
<include state="heredoc_interpol"/>
</rule>
</state>
<state name="string_single_atom">
<rule pattern="[^#&#39;\\]+">
<token type="LiteralStringSymbol"/>
</rule>
<rule>
<include state="escapes"/>
</rule>
<rule pattern="\\.">
<token type="LiteralStringSymbol"/>
</rule>
<rule pattern="(&#39;)">
<bygroups>
<token type="LiteralStringSymbol"/>
</bygroups>
<pop depth="1"/>
</rule>
<rule>
<include state="interpol"/>
</rule>
</state>
<state name="quot-intp">
<rule pattern="[^#&#34;\\]+">
<token type="LiteralStringOther"/>
</rule>
<rule>
<include state="escapes"/>
</rule>
<rule pattern="\\.">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="&#34;[a-zA-Z]*">
<token type="LiteralStringOther"/>
<pop depth="1"/>
</rule>
<rule>
<include state="interpol"/>
</rule>
</state>
<state name="sb-no-intp">
<rule pattern="[^\]\\]+">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="\\.">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="\][a-zA-Z]*">
<token type="LiteralStringOther"/>
<pop depth="1"/>
</rule>
</state>
<state name="slas-intp">
<rule pattern="[^#/\\]+">
<token type="LiteralStringOther"/>
</rule>
<rule>
<include state="escapes"/>
</rule>
<rule pattern="\\.">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="/[a-zA-Z]*">
<token type="LiteralStringOther"/>
<pop depth="1"/>
</rule>
<rule>
<include state="interpol"/>
</rule>
</state>
<state name="sb-intp">
<rule pattern="[^#\]\\]+">
<token type="LiteralStringOther"/>
</rule>
<rule>
<include state="escapes"/>
</rule>
<rule pattern="\\.">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="\][a-zA-Z]*">
<token type="LiteralStringOther"/>
<pop depth="1"/>
</rule>
<rule>
<include state="interpol"/>
</rule>
</state>
<state name="heredoc_no_interpol">
<rule pattern="[^\\\n]+">
<token type="LiteralStringHeredoc"/>
</rule>
<rule pattern="\\.">
<token type="LiteralStringHeredoc"/>
</rule>
<rule pattern="\n+">
<token type="LiteralStringHeredoc"/>
</rule>
</state>
<state name="pipe-intp">
<rule pattern="[^#\|\\]+">
<token type="LiteralStringOther"/>
</rule>
<rule>
<include state="escapes"/>
</rule>
<rule pattern="\\.">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="\|[a-zA-Z]*">
<token type="LiteralStringOther"/>
<pop depth="1"/>
</rule>
<rule>
<include state="interpol"/>
</rule>
</state>
<state name="map_val">
<rule>
<include state="root"/>
</rule>
<rule pattern=",">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
<rule pattern="(?=\})">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
</state>
<state name="heredoc_single">
<rule pattern="^\s*&#39;&#39;&#39;">
<token type="LiteralStringHeredoc"/>
<pop depth="1"/>
</rule>
<rule>
<include state="heredoc_interpol"/>
</rule>
</state>
<state name="heredoc_interpol">
<rule pattern="[^#\\\n]+">
<token type="LiteralStringHeredoc"/>
</rule>
<rule>
<include state="escapes"/>
</rule>
<rule pattern="\\.">
<token type="LiteralStringHeredoc"/>
</rule>
<rule pattern="\n+">
<token type="LiteralStringHeredoc"/>
</rule>
<rule>
<include state="interpol"/>
</rule>
</state>
<state name="string_single">
<rule pattern="[^#&#39;\\]+">
<token type="LiteralStringSingle"/>
</rule>
<rule>
<include state="escapes"/>
</rule>
<rule pattern="\\.">
<token type="LiteralStringSingle"/>
</rule>
<rule pattern="(&#39;)">
<bygroups>
<token type="LiteralStringSingle"/>
</bygroups>
<pop depth="1"/>
</rule>
<rule>
<include state="interpol"/>
</rule>
</state>
<state name="string_double_atom">
<rule pattern="[^#&#34;\\]+">
<token type="LiteralStringSymbol"/>
</rule>
<rule>
<include state="escapes"/>
</rule>
<rule pattern="\\.">
<token type="LiteralStringSymbol"/>
</rule>
<rule pattern="(&#34;)">
<bygroups>
<token type="LiteralStringSymbol"/>
</bygroups>
<pop depth="1"/>
</rule>
<rule>
<include state="interpol"/>
</rule>
</state>
<state name="ab-no-intp">
<rule pattern="[^&gt;\\]+">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="\\.">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="&gt;[a-zA-Z]*">
<token type="LiteralStringOther"/>
<pop depth="1"/>
</rule>
</state>
<state name="ab-intp">
<rule pattern="[^#&gt;\\]+">
<token type="LiteralStringOther"/>
</rule>
<rule>
<include state="escapes"/>
</rule>
<rule pattern="\\.">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="&gt;[a-zA-Z]*">
<token type="LiteralStringOther"/>
<pop depth="1"/>
</rule>
<rule>
<include state="interpol"/>
</rule>
</state>
<state name="quot-no-intp">
<rule pattern="[^&#34;\\]+">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="\\.">
<token type="LiteralStringOther"/>
</rule>
<rule pattern="&#34;[a-zA-Z]*">
<token type="LiteralStringOther"/>
<pop depth="1"/>
</rule>
</state>
<state name="triapos-no-intp">
<rule pattern="^\s*&#39;&#39;&#39;">
<token type="LiteralStringHeredoc"/>
<pop depth="1"/>
</rule>
<rule>
<include state="heredoc_no_interpol"/>
</rule>
</state>
<state name="string_double">
<rule pattern="[^#&#34;\\]+">
<token type="LiteralStringDouble"/>
</rule>
<rule>
<include state="escapes"/>
</rule>
<rule pattern="\\.">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="(&#34;)">
<bygroups>
<token type="LiteralStringDouble"/>
</bygroups>
<pop depth="1"/>
</rule>
<rule>
<include state="interpol"/>
</rule>
</state>
<state name="escapes">
<rule pattern="(\\x\{)([\da-fA-F]+)(\})">
<bygroups>
<token type="LiteralStringEscape"/>
<token type="LiteralNumberHex"/>
<token type="LiteralStringEscape"/>
</bygroups>
</rule>
<rule pattern="(\\x[\da-fA-F]{1,2})">
<token type="LiteralStringEscape"/>
</rule>
<rule pattern="(\\[abdefnrstv])">
<token type="LiteralStringEscape"/>
</rule>
</state>
<state name="triquot-no-intp">
<rule pattern="^\s*&#34;&#34;&#34;">
<token type="LiteralStringHeredoc"/>
<pop depth="1"/>
</rule>
<rule>
<include state="heredoc_no_interpol"/>
</rule>
</state>
</rules>
</lexer>

119
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/elm.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,119 @@
<lexer>
<config>
<name>Elm</name>
<alias>elm</alias>
<filename>*.elm</filename>
<mime_type>text/x-elm</mime_type>
</config>
<rules>
<state name="shader">
<rule pattern="\|(?!\])">
<token type="NameEntity"/>
</rule>
<rule pattern="\|\]">
<token type="NameEntity"/>
<pop depth="1"/>
</rule>
<rule pattern=".*\n">
<token type="NameEntity"/>
</rule>
</state>
<state name="root">
<rule pattern="\{-">
<token type="CommentMultiline"/>
<push state="comment"/>
</rule>
<rule pattern="--.*">
<token type="CommentSingle"/>
</rule>
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern="&#34;">
<token type="LiteralString"/>
<push state="doublequote"/>
</rule>
<rule pattern="^\s*module\s*">
<token type="KeywordNamespace"/>
<push state="imports"/>
</rule>
<rule pattern="^\s*import\s*">
<token type="KeywordNamespace"/>
<push state="imports"/>
</rule>
<rule pattern="\[glsl\|.*">
<token type="NameEntity"/>
<push state="shader"/>
</rule>
<rule pattern="(import|module|alias|where|port|else|type|case|then|let|as|of|if|in)\b">
<token type="KeywordReserved"/>
</rule>
<rule pattern="[A-Z]\w*">
<token type="KeywordType"/>
</rule>
<rule pattern="^main ">
<token type="KeywordReserved"/>
</rule>
<rule pattern="\((&lt;-|\|\||\|&gt;|&amp;&amp;|\+\+|-&gt;|\.\.|//|&gt;&gt;|&gt;=|/=|==|::|&lt;~|&lt;\||&lt;=|&lt;&lt;|~|&lt;|=|:|&gt;|&#39;|/|\\|\.|\^|-|`|\+|\*|\||%)\)">
<token type="NameFunction"/>
</rule>
<rule pattern="(&lt;-|\|\||\|&gt;|&amp;&amp;|\+\+|-&gt;|\.\.|//|&gt;&gt;|&gt;=|/=|==|::|&lt;~|&lt;\||&lt;=|&lt;&lt;|~|&lt;|=|:|&gt;|&#39;|/|\\|\.|\^|-|`|\+|\*|\||%)">
<token type="NameFunction"/>
</rule>
<rule>
<include state="numbers"/>
</rule>
<rule pattern="[a-z_][a-zA-Z_\&#39;]*">
<token type="NameVariable"/>
</rule>
<rule pattern="[,()\[\]{}]">
<token type="Punctuation"/>
</rule>
</state>
<state name="comment">
<rule pattern="-(?!\})">
<token type="CommentMultiline"/>
</rule>
<rule pattern="\{-">
<token type="CommentMultiline"/>
<push state="comment"/>
</rule>
<rule pattern="[^-}]">
<token type="CommentMultiline"/>
</rule>
<rule pattern="-\}">
<token type="CommentMultiline"/>
<pop depth="1"/>
</rule>
</state>
<state name="doublequote">
<rule pattern="\\u[0-9a-fA-F]{4}">
<token type="LiteralStringEscape"/>
</rule>
<rule pattern="\\[nrfvb\\&#34;]">
<token type="LiteralStringEscape"/>
</rule>
<rule pattern="[^&#34;]">
<token type="LiteralString"/>
</rule>
<rule pattern="&#34;">
<token type="LiteralString"/>
<pop depth="1"/>
</rule>
</state>
<state name="imports">
<rule pattern="\w+(\.\w+)*">
<token type="NameClass"/>
<pop depth="1"/>
</rule>
</state>
<state name="numbers">
<rule pattern="_?\d+\.(?=\d+)">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="_?\d+">
<token type="LiteralNumberInteger"/>
</rule>
</state>
</rules>
</lexer>

132
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/emacslisp.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,132 @@
<lexer>
<config>
<name>EmacsLisp</name>
<alias>emacs</alias>
<alias>elisp</alias>
<alias>emacs-lisp</alias>
<filename>*.el</filename>
<mime_type>text/x-elisp</mime_type>
<mime_type>application/x-elisp</mime_type>
</config>
<rules>
<state name="string">
<rule pattern="[^&#34;\\`]+">
<token type="LiteralString"/>
</rule>
<rule pattern="`((?:\\.|[\w!$%&amp;*+-/&lt;=&gt;?@^{}~|])(?:\\.|[\w!$%&amp;*+-/&lt;=&gt;?@^{}~|]|[#.:])*)\&#39;">
<token type="LiteralStringSymbol"/>
</rule>
<rule pattern="`">
<token type="LiteralString"/>
</rule>
<rule pattern="\\.">
<token type="LiteralString"/>
</rule>
<rule pattern="\\\n">
<token type="LiteralString"/>
</rule>
<rule pattern="&#34;">
<token type="LiteralString"/>
<pop depth="1"/>
</rule>
</state>
<state name="root">
<rule>
<push state="body"/>
</rule>
</state>
<state name="body">
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern=";.*$">
<token type="CommentSingle"/>
</rule>
<rule pattern="&#34;">
<token type="LiteralString"/>
<push state="string"/>
</rule>
<rule pattern="\?([^\\]|\\.)">
<token type="LiteralStringChar"/>
</rule>
<rule pattern=":((?:\\.|[\w!$%&amp;*+-/&lt;=&gt;?@^{}~|])(?:\\.|[\w!$%&amp;*+-/&lt;=&gt;?@^{}~|]|[#.:])*)">
<token type="NameBuiltin"/>
</rule>
<rule pattern="::((?:\\.|[\w!$%&amp;*+-/&lt;=&gt;?@^{}~|])(?:\\.|[\w!$%&amp;*+-/&lt;=&gt;?@^{}~|]|[#.:])*)">
<token type="LiteralStringSymbol"/>
</rule>
<rule pattern="&#39;((?:\\.|[\w!$%&amp;*+-/&lt;=&gt;?@^{}~|])(?:\\.|[\w!$%&amp;*+-/&lt;=&gt;?@^{}~|]|[#.:])*)">
<token type="LiteralStringSymbol"/>
</rule>
<rule pattern="&#39;">
<token type="Operator"/>
</rule>
<rule pattern="`">
<token type="Operator"/>
</rule>
<rule pattern="[-+]?\d+\.?(?=[ &#34;()\]\&#39;\n,;`])">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="[-+]?\d+/\d+(?=[ &#34;()\]\&#39;\n,;`])">
<token type="LiteralNumber"/>
</rule>
<rule pattern="[-+]?(\d*\.\d+([defls][-+]?\d+)?|\d+(\.\d*)?[defls][-+]?\d+)(?=[ &#34;()\]\&#39;\n,;`])">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="\[|\]">
<token type="Punctuation"/>
</rule>
<rule pattern="#:((?:\\.|[\w!$%&amp;*+-/&lt;=&gt;?@^{}~|])(?:\\.|[\w!$%&amp;*+-/&lt;=&gt;?@^{}~|]|[#.:])*)">
<token type="LiteralStringSymbol"/>
</rule>
<rule pattern="#\^\^?">
<token type="Operator"/>
</rule>
<rule pattern="#\&#39;">
<token type="NameFunction"/>
</rule>
<rule pattern="#[bB][+-]?[01]+(/[01]+)?">
<token type="LiteralNumberBin"/>
</rule>
<rule pattern="#[oO][+-]?[0-7]+(/[0-7]+)?">
<token type="LiteralNumberOct"/>
</rule>
<rule pattern="#[xX][+-]?[0-9a-fA-F]+(/[0-9a-fA-F]+)?">
<token type="LiteralNumberHex"/>
</rule>
<rule pattern="#\d+r[+-]?[0-9a-zA-Z]+(/[0-9a-zA-Z]+)?">
<token type="LiteralNumber"/>
</rule>
<rule pattern="#\d+=">
<token type="Operator"/>
</rule>
<rule pattern="#\d+#">
<token type="Operator"/>
</rule>
<rule pattern="(,@|,|\.|:)">
<token type="Operator"/>
</rule>
<rule pattern="(t|nil)(?=[ &#34;()\]\&#39;\n,;`])">
<token type="NameConstant"/>
</rule>
<rule pattern="\*((?:\\.|[\w!$%&amp;*+-/&lt;=&gt;?@^{}~|])(?:\\.|[\w!$%&amp;*+-/&lt;=&gt;?@^{}~|]|[#.:])*)\*">
<token type="NameVariableGlobal"/>
</rule>
<rule pattern="((?:\\.|[\w!$%&amp;*+-/&lt;=&gt;?@^{}~|])(?:\\.|[\w!$%&amp;*+-/&lt;=&gt;?@^{}~|]|[#.:])*)">
<token type="NameVariable"/>
</rule>
<rule pattern="#\(">
<token type="Operator"/>
<push state="body"/>
</rule>
<rule pattern="\(">
<token type="Punctuation"/>
<push state="body"/>
</rule>
<rule pattern="\)">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
</state>
</rules>
</lexer>

166
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/erlang.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,166 @@
<lexer>
<config>
<name>Erlang</name>
<alias>erlang</alias>
<filename>*.erl</filename>
<filename>*.hrl</filename>
<filename>*.es</filename>
<filename>*.escript</filename>
<mime_type>text/x-erlang</mime_type>
</config>
<rules>
<state name="root">
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern="%.*\n">
<token type="Comment"/>
</rule>
<rule pattern="(receive|after|begin|catch|query|case|cond|when|let|fun|end|try|of|if)\b">
<token type="Keyword"/>
</rule>
<rule pattern="(localtime_to_universaltime|universaltime_to_localtime|list_to_existing_atom|check_process_code|bitstring_to_list|list_to_bitstring|function_exported|is_process_alive|iolist_to_binary|bump_reductions|garbage_collect|process_display|suspend_process|list_to_integer|disconnect_node|integer_to_list|trace_delivered|send_nosuspend|list_to_binary|system_profile|binary_to_term|binary_to_list|resume_process|append_element|term_to_binary|system_monitor|list_to_tuple|spawn_monitor|delete_module|trace_pattern|tuple_to_list|list_to_float|float_to_list|module_loaded|port_connect|is_bitstring|port_to_list|monitor_node|process_info|port_control|split_binary|cancel_timer|purge_module|group_leader|list_to_atom|atom_to_list|port_command|is_reference|process_flag|pid_to_list|system_info|start_timer|iolist_size|fun_to_list|load_module|is_function|ref_to_list|list_to_pid|system_flag|make_tuple|is_builtin|unregister|is_boolean|set_cookie|md5_update|spawn_link|setelement|trace_info|read_timer|statistics|send_after|port_close|is_integer|tuple_size|spawn_opt|open_port|is_record|is_binary|md5_final|port_call|port_info|is_number|byte_size|demonitor|register|is_float|bit_size|fun_info|get_keys|is_tuple|is_atom|element|is_list|is_port|monitor|display|whereis|is_pid|memory|unlink|phash2|length|spawn|nodes|trace|round|apply|erase|phash|trunc|float|size|link|node|exit|hash|send|get|md5|put|abs|hd|tl)\b">
<token type="NameBuiltin"/>
</rule>
<rule pattern="(andalso|orelse|bxor|band|bnot|and|bsr|bsl|div|not|rem|bor|xor|or)\b">
<token type="OperatorWord"/>
</rule>
<rule pattern="^-">
<token type="Punctuation"/>
<push state="directive"/>
</rule>
<rule pattern="(\+\+?|--?|\*|/|&lt;|&gt;|/=|=:=|=/=|=&lt;|&gt;=|==?|&lt;-|!|\?)">
<token type="Operator"/>
</rule>
<rule pattern="&#34;">
<token type="LiteralString"/>
<push state="string"/>
</rule>
<rule pattern="&lt;&lt;">
<token type="NameLabel"/>
</rule>
<rule pattern="&gt;&gt;">
<token type="NameLabel"/>
</rule>
<rule pattern="((?:[a-z]\w*|&#39;[^\n&#39;]*[^\\]&#39;))(:)">
<bygroups>
<token type="NameNamespace"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule pattern="(?:^|(?&lt;=:))((?:[a-z]\w*|&#39;[^\n&#39;]*[^\\]&#39;))(\s*)(\()">
<bygroups>
<token type="NameFunction"/>
<token type="Text"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule pattern="[+-]?(?:[2-9]|[12][0-9]|3[0-6])#[0-9a-zA-Z]+">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="[+-]?\d+">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="[+-]?\d+.\d+">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="[]\[:_@\&#34;.{}()|;,]">
<token type="Punctuation"/>
</rule>
<rule pattern="(?:[A-Z_]\w*)">
<token type="NameVariable"/>
</rule>
<rule pattern="(?:[a-z]\w*|&#39;[^\n&#39;]*[^\\]&#39;)">
<token type="Name"/>
</rule>
<rule pattern="\?(?:(?:[A-Z_]\w*)|(?:[a-z]\w*|&#39;[^\n&#39;]*[^\\]&#39;))">
<token type="NameConstant"/>
</rule>
<rule pattern="\$(?:(?:\\(?:[bdefnrstv\&#39;&#34;\\]|[0-7][0-7]?[0-7]?|(?:x[0-9a-fA-F]{2}|x\{[0-9a-fA-F]+\})|\^[a-zA-Z]))|\\[ %]|[^\\])">
<token type="LiteralStringChar"/>
</rule>
<rule pattern="#(?:[a-z]\w*|&#39;[^\n&#39;]*[^\\]&#39;)(:?\.(?:[a-z]\w*|&#39;[^\n&#39;]*[^\\]&#39;))?">
<token type="NameLabel"/>
</rule>
<rule pattern="\A#!.+\n">
<token type="CommentHashbang"/>
</rule>
<rule pattern="#\{">
<token type="Punctuation"/>
<push state="map_key"/>
</rule>
</state>
<state name="string">
<rule pattern="(?:\\(?:[bdefnrstv\&#39;&#34;\\]|[0-7][0-7]?[0-7]?|(?:x[0-9a-fA-F]{2}|x\{[0-9a-fA-F]+\})|\^[a-zA-Z]))">
<token type="LiteralStringEscape"/>
</rule>
<rule pattern="&#34;">
<token type="LiteralString"/>
<pop depth="1"/>
</rule>
<rule pattern="~[0-9.*]*[~#+BPWXb-ginpswx]">
<token type="LiteralStringInterpol"/>
</rule>
<rule pattern="[^&#34;\\~]+">
<token type="LiteralString"/>
</rule>
<rule pattern="~">
<token type="LiteralString"/>
</rule>
</state>
<state name="directive">
<rule pattern="(define)(\s*)(\()((?:(?:[A-Z_]\w*)|(?:[a-z]\w*|&#39;[^\n&#39;]*[^\\]&#39;)))">
<bygroups>
<token type="NameEntity"/>
<token type="Text"/>
<token type="Punctuation"/>
<token type="NameConstant"/>
</bygroups>
<pop depth="1"/>
</rule>
<rule pattern="(record)(\s*)(\()((?:(?:[A-Z_]\w*)|(?:[a-z]\w*|&#39;[^\n&#39;]*[^\\]&#39;)))">
<bygroups>
<token type="NameEntity"/>
<token type="Text"/>
<token type="Punctuation"/>
<token type="NameLabel"/>
</bygroups>
<pop depth="1"/>
</rule>
<rule pattern="(?:[a-z]\w*|&#39;[^\n&#39;]*[^\\]&#39;)">
<token type="NameEntity"/>
<pop depth="1"/>
</rule>
</state>
<state name="map_key">
<rule>
<include state="root"/>
</rule>
<rule pattern="=&gt;">
<token type="Punctuation"/>
<push state="map_val"/>
</rule>
<rule pattern=":=">
<token type="Punctuation"/>
<push state="map_val"/>
</rule>
<rule pattern="\}">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
</state>
<state name="map_val">
<rule>
<include state="root"/>
</rule>
<rule pattern=",">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
<rule pattern="(?=\})">
<token type="Punctuation"/>
<pop depth="1"/>
</rule>
</state>
</rules>
</lexer>

412
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/factor.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,412 @@
<lexer>
<config>
<name>Factor</name>
<alias>factor</alias>
<filename>*.factor</filename>
<mime_type>text/x-factor</mime_type>
</config>
<rules>
<state name="base">
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern="((?:MACRO|MEMO|TYPED)?:[:]?)(\s+)(\S+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="NameFunction"/>
</bygroups>
</rule>
<rule pattern="(M:[:]?)(\s+)(\S+)(\s+)(\S+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="NameClass"/>
<token type="Text"/>
<token type="NameFunction"/>
</bygroups>
</rule>
<rule pattern="(C:)(\s+)(\S+)(\s+)(\S+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="NameFunction"/>
<token type="Text"/>
<token type="NameClass"/>
</bygroups>
</rule>
<rule pattern="(GENERIC:)(\s+)(\S+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="NameFunction"/>
</bygroups>
</rule>
<rule pattern="(HOOK:|GENERIC#)(\s+)(\S+)(\s+)(\S+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="NameFunction"/>
<token type="Text"/>
<token type="NameFunction"/>
</bygroups>
</rule>
<rule pattern="\(\s">
<token type="NameFunction"/>
<push state="stackeffect"/>
</rule>
<rule pattern=";\s">
<token type="Keyword"/>
</rule>
<rule pattern="(USING:)(\s+)">
<bygroups>
<token type="KeywordNamespace"/>
<token type="Text"/>
</bygroups>
<push state="vocabs"/>
</rule>
<rule pattern="(USE:|UNUSE:|IN:|QUALIFIED:)(\s+)(\S+)">
<bygroups>
<token type="KeywordNamespace"/>
<token type="Text"/>
<token type="NameNamespace"/>
</bygroups>
</rule>
<rule pattern="(QUALIFIED-WITH:)(\s+)(\S+)(\s+)(\S+)">
<bygroups>
<token type="KeywordNamespace"/>
<token type="Text"/>
<token type="NameNamespace"/>
<token type="Text"/>
<token type="NameNamespace"/>
</bygroups>
</rule>
<rule pattern="(FROM:|EXCLUDE:)(\s+)(\S+)(\s+=&gt;\s)">
<bygroups>
<token type="KeywordNamespace"/>
<token type="Text"/>
<token type="NameNamespace"/>
<token type="Text"/>
</bygroups>
<push state="words"/>
</rule>
<rule pattern="(RENAME:)(\s+)(\S+)(\s+)(\S+)(\s+=&gt;\s+)(\S+)">
<bygroups>
<token type="KeywordNamespace"/>
<token type="Text"/>
<token type="NameFunction"/>
<token type="Text"/>
<token type="NameNamespace"/>
<token type="Text"/>
<token type="NameFunction"/>
</bygroups>
</rule>
<rule pattern="(ALIAS:|TYPEDEF:)(\s+)(\S+)(\s+)(\S+)">
<bygroups>
<token type="KeywordNamespace"/>
<token type="Text"/>
<token type="NameFunction"/>
<token type="Text"/>
<token type="NameFunction"/>
</bygroups>
</rule>
<rule pattern="(DEFER:|FORGET:|POSTPONE:)(\s+)(\S+)">
<bygroups>
<token type="KeywordNamespace"/>
<token type="Text"/>
<token type="NameFunction"/>
</bygroups>
</rule>
<rule pattern="(TUPLE:|ERROR:)(\s+)(\S+)(\s+&lt;\s+)(\S+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="NameClass"/>
<token type="Text"/>
<token type="NameClass"/>
</bygroups>
<push state="slots"/>
</rule>
<rule pattern="(TUPLE:|ERROR:|BUILTIN:)(\s+)(\S+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="NameClass"/>
</bygroups>
<push state="slots"/>
</rule>
<rule pattern="(MIXIN:|UNION:|INTERSECTION:)(\s+)(\S+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="NameClass"/>
</bygroups>
</rule>
<rule pattern="(PREDICATE:)(\s+)(\S+)(\s+&lt;\s+)(\S+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="NameClass"/>
<token type="Text"/>
<token type="NameClass"/>
</bygroups>
</rule>
<rule pattern="(C:)(\s+)(\S+)(\s+)(\S+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="NameFunction"/>
<token type="Text"/>
<token type="NameClass"/>
</bygroups>
</rule>
<rule pattern="(INSTANCE:)(\s+)(\S+)(\s+)(\S+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="NameClass"/>
<token type="Text"/>
<token type="NameClass"/>
</bygroups>
</rule>
<rule pattern="(SLOT:)(\s+)(\S+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="NameFunction"/>
</bygroups>
</rule>
<rule pattern="(SINGLETON:)(\s+)(\S+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="NameClass"/>
</bygroups>
</rule>
<rule pattern="SINGLETONS:">
<token type="Keyword"/>
<push state="classes"/>
</rule>
<rule pattern="(CONSTANT:|SYMBOL:|MAIN:|HELP:)(\s+)(\S+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="NameFunction"/>
</bygroups>
</rule>
<rule pattern="SYMBOLS:\s">
<token type="Keyword"/>
<push state="words"/>
</rule>
<rule pattern="SYNTAX:\s">
<token type="Keyword"/>
</rule>
<rule pattern="ALIEN:\s">
<token type="Keyword"/>
</rule>
<rule pattern="(STRUCT:)(\s+)(\S+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="NameClass"/>
</bygroups>
</rule>
<rule pattern="(FUNCTION:)(\s+\S+\s+)(\S+)(\s+\(\s+[^)]+\)\s)">
<bygroups>
<token type="KeywordNamespace"/>
<token type="Text"/>
<token type="NameFunction"/>
<token type="Text"/>
</bygroups>
</rule>
<rule pattern="(FUNCTION-ALIAS:)(\s+)(\S+)(\s+\S+\s+)(\S+)(\s+\(\s+[^)]+\)\s)">
<bygroups>
<token type="KeywordNamespace"/>
<token type="Text"/>
<token type="NameFunction"/>
<token type="Text"/>
<token type="NameFunction"/>
<token type="Text"/>
</bygroups>
</rule>
<rule pattern="(?:&lt;PRIVATE|PRIVATE&gt;)\s">
<token type="KeywordNamespace"/>
</rule>
<rule pattern="&#34;&#34;&#34;\s+(?:.|\n)*?\s+&#34;&#34;&#34;">
<token type="LiteralString"/>
</rule>
<rule pattern="&#34;(?:\\\\|\\&#34;|[^&#34;])*&#34;">
<token type="LiteralString"/>
</rule>
<rule pattern="\S+&#34;\s+(?:\\\\|\\&#34;|[^&#34;])*&#34;">
<token type="LiteralString"/>
</rule>
<rule pattern="CHAR:\s+(?:\\[\\abfnrstv]|[^\\]\S*)\s">
<token type="LiteralStringChar"/>
</rule>
<rule pattern="!\s+.*$">
<token type="Comment"/>
</rule>
<rule pattern="#!\s+.*$">
<token type="Comment"/>
</rule>
<rule pattern="/\*\s+(?:.|\n)*?\s\*/\s">
<token type="Comment"/>
</rule>
<rule pattern="[tf]\s">
<token type="NameConstant"/>
</rule>
<rule pattern="[\\$]\s+\S+">
<token type="NameConstant"/>
</rule>
<rule pattern="M\\\s+\S+\s+\S+">
<token type="NameConstant"/>
</rule>
<rule pattern="[+-]?(?:[\d,]*\d)?\.(?:\d([\d,]*\d)?)?(?:[eE][+-]?\d+)?\s">
<token type="LiteralNumber"/>
</rule>
<rule pattern="[+-]?\d(?:[\d,]*\d)?(?:[eE][+-]?\d+)?\s">
<token type="LiteralNumber"/>
</rule>
<rule pattern="0x[a-fA-F\d](?:[a-fA-F\d,]*[a-fA-F\d])?(?:p\d([\d,]*\d)?)?\s">
<token type="LiteralNumber"/>
</rule>
<rule pattern="NAN:\s+[a-fA-F\d](?:[a-fA-F\d,]*[a-fA-F\d])?(?:p\d([\d,]*\d)?)?\s">
<token type="LiteralNumber"/>
</rule>
<rule pattern="0b[01]+\s">
<token type="LiteralNumberBin"/>
</rule>
<rule pattern="0o[0-7]+\s">
<token type="LiteralNumberOct"/>
</rule>
<rule pattern="(?:\d([\d,]*\d)?)?\+\d(?:[\d,]*\d)?/\d(?:[\d,]*\d)?\s">
<token type="LiteralNumber"/>
</rule>
<rule pattern="(?:\-\d([\d,]*\d)?)?\-\d(?:[\d,]*\d)?/\d(?:[\d,]*\d)?\s">
<token type="LiteralNumber"/>
</rule>
<rule pattern="(?:deprecated|final|foldable|flushable|inline|recursive)\s">
<token type="Keyword"/>
</rule>
<rule pattern="(identity-hashcode|callstack&gt;array|identity-tuple\?|identity-tuple|retainstack|callstack\?|tri-curry\*|tri-curry@|tri-curry|&lt;wrapper&gt;|datastack|bi-curry@|bi-curry\*|hashcode\*|callstack|\?execute|hashcode|boolean\?|compose\?|&gt;boolean|wrapper\?|bi-curry|unless\*|boolean|assert\?|\(clone\)|either\?|prepose|assert=|execute|wrapper|compose|3curry|assert|2curry|curry\?|object|equal\?|tuple\?|unless|build|3drop|same\?|2tri\*|2tri@|both\?|3keep|4drop|throw|2over|swapd|clear|2keep|2drop|until|curry|4keep|clone|while|tuple|when\*|-rot|tri@|dupd|drop|tri\*|call|when|with|4dup|4dip|3tri|3dup|3dip|2tri|keep|loop|most|2nip|swap|2dup|null|2dip|2bi\*|2bi@|pick|over|and|rot|not|nip|new|if\*|tri|2bi|boa|eq\?|dup|3bi|dip|die|bi\*|bi@|\?if|xor|bi|do|if|or|\?|=)\s">
<token type="NameBuiltin"/>
</rule>
<rule pattern="(assoc-clone-like|assoc-filter-as|assoc-partition|assoc-intersect|assoc-hashcode|assoc-combine|assoc-filter!|assoc-subset\?|assoc-union!|maybe-set-at|extract-keys|assoc-map-as|assoc-differ|assoc-refine|assoc-empty\?|assoc-filter|assoc-diff!|sift-values|assoc-union|assoc-stack|clear-assoc|assoc-all\?|delete-at\*|assoc-find|substitute|assoc-each|assoc-size|assoc-diff|assoc-any\?|assoc-like|rename-at|sift-keys|new-assoc|map&gt;assoc|value-at\*|assoc-map|delete-at|change-at|assoc&gt;map|value-at|push-at|assoc=|values|set-at|&lt;enum&gt;|inc-at|2cache|value\?|assoc\?|&gt;alist|cache|enum\?|assoc|unzip|key\?|enum|keys|\?at|\?of|zip|at\+|at\*|at|of)\s">
<token type="NameBuiltin"/>
</rule>
<rule pattern="(shallow-spread&gt;quot|recursive-hashcode|linear-case-quot|deep-spread&gt;quot|to-fixed-point|execute-effect|wrong-values\?|4cleave&gt;quot|2cleave&gt;quot|wrong-values|3cleave&gt;quot|cleave&gt;quot|call-effect|alist&gt;quot|case&gt;quot|case-find|cond&gt;quot|no-case\?|no-cond\?|no-case|no-cond|4cleave|3cleave|2cleave|cleave|spread|cond|case)\s">
<token type="NameBuiltin"/>
</rule>
<rule pattern="(log2-expects-positive\?|integer&gt;fixnum-strict|log2-expects-positive|out-of-fixnum-range\?|out-of-fixnum-range|find-last-integer|next-power-of-2|\(all-integers\?\)|integer&gt;fixnum|\(find-integer\)|\(each-integer\)|imaginary-part|fp-nan-payload|all-integers\?|find-integer|each-integer|fp-infinity\?|fp-special\?|fp-bitwise=|bits&gt;double|double&gt;bits|power-of-2\?|unless-zero|denominator|next-float|bits&gt;float|float&gt;bits|prev-float|unordered\?|real-part|when-zero|numerator|rational\?|&gt;integer|rational|complex\?|&lt;fp-nan&gt;|fp-qnan\?|fp-snan\?|integer\?|number=|bignum\?|integer|&gt;fixnum|fp-sign|fp-nan\?|fixnum\?|number\?|complex|if-zero|&gt;bignum|bignum|number|fixnum|float\?|bitxor|ratio\?|bitnot|bitand|&gt;float|real\?|bitor|zero\?|even\?|times|shift|float|recip|align|ratio|neg\?|real|log2|bit\?|odd\?|/mod|\?1\+|mod|rem|neg|sgn|u&lt;=|u&gt;=|abs|u&gt;|2/|2\^|/i|/f|sq|&lt;=|u&lt;|&gt;=|-|\+|&lt;|\*|/|&gt;)\s">
<token type="NameBuiltin"/>
</rule>
<rule pattern="(non-negative-integer-expected\?|non-negative-integer-expected|immutable-sequence\?|immutable-sequence|virtual-sequence\?|sequence-hashcode|cartesian-product|check-slice-error|unclip-last-slice|assert-sequence=|assert-sequence\?|virtual-exemplar|virtual-sequence|assert-sequence|trim-head-slice|last-index-from|find-index-from|trim-tail-slice|find-last-from|cartesian-each|collapse-slice|but-last-slice|map-find-last|cartesian-map|collector-for|bounds-error\?|accumulate-as|replace-slice|bounds-check\?|binary-reduce|new-resizable|unless-empty|delete-slice|replicate-as|map-integers|selector-for|bounds-check|reduce-index|bounds-error|unclip-slice|new-sequence|&lt;repetition&gt;|slice-error\?|slice-error|unclip-last|drop-prefix|supremum-by|push-either|2map-reduce|accumulate!|tail-slice\*|repetition\?|check-slice|iota-tuple\?|remove-nth!|sum-lengths|head-slice\*|find-index|clone-like|delete-all|change-nth|prepend-as|member-eq\?|max-length|each-index|map-reduce|iota-tuple|produce-as|snip-slice|accumulate|remove-eq!|last-index|min-length|remove-nth|&lt;reversed&gt;|repetition|tail-slice|3append-as|when-empty|interleave|insert-nth|infimum-by|index-from|set-second|immutable\?|rest-slice|set-fourth|head-slice|trim-slice|set-length|set-third|concat-as|immutable|trim-tail|cut-slice|collector|set-first|sequence\?|sequence=|midpoint@|trim-head|each-from|reversed\?|map-index|partition|find-last|2selector|2sequence|replicate|find-from|filter-as|3sequence|append-as|4sequence|remove-eq|1sequence|virtual@|push-all|lengthen|shorter\?|map-find|reverse!|reversed|exchange|pad-tail|pad-head|surround|selector|shortest|sequence|set-last|mismatch|supremum|new-like|if-empty|but-last|\?set-nth|filter!|harvest|member\?|map-sum|indices|padding|set-nth|2map-as|shorter|shorten|prepend|infimum|2reduce|append!|product|subseq\?|longest|longer\?|push-if|suffix!|reverse|join-as|remove!|3append|\?second|3map-as|&lt;slice&gt;|produce|length|\?first|start\*|longer|remove|subseq|unclip|first2|first3|reduce|second|follow|filter|slice\?|map-as|empty\?|fourth|suffix|halves|concat|first4|prefix|append|index|short|2all\?|count|2each|third|tail\*|slice|first|tail\?|head\*|3each|head\?|start|\?last|join|iota|last|like|snip|map!|head|glue|move|tail|2map|find|sift|flip|nths|trim|each|cut\*|3map|pop\*|copy|any\?|all\?|\?nth|push|rest|sum|nth|pop|map|cut)\s">
<token type="NameBuiltin"/>
</rule>
<rule pattern="(init-namespaces|with-variables|with-variable|set-namestack|change-global|with-global|initialize|get-global|set-global|with-scope|make-assoc|is-global|namespace|namestack|counter|change|toggle|global|set|get|dec|off|inc|on|\+@)\s">
<token type="NameBuiltin"/>
</rule>
<rule pattern="(resize-array|&lt;array&gt;|1array|2array|3array|4array|&gt;array|array\?|array|pair\?|pair)\s">
<token type="NameBuiltin"/>
</rule>
<rule pattern="(\(stream-contents-by-length-or-block\)|with-input-output\+error-streams\*|with-input-output\+error-streams|\(stream-contents-by-element\)|\(stream-contents-by-length\)|stream-read-partial-unsafe|\(stream-contents-by-block\)|with-output\+error-stream\*|\(each-stream-block-slice\)|stream-read-partial-into|with-output\+error-stream|each-stream-block-slice|invalid-read-buffer\?|stream-read-partial|stream-element-type|\(each-stream-block\)|with-output-stream\*|invalid-read-buffer|with-output-stream|with-input-stream\*|stream-read-unsafe|with-error-stream\*|with-error-stream|stream-read-until|each-stream-block|with-output&gt;error|with-input-stream|with-error&gt;output|read-partial-into|stream-contents\*|each-stream-line|stream-seekable\?|stream-read-into|each-block-slice|each-block-size|stream-contents|bad-seek-type\?|seek-absolute\?|output-stream\?|seek-relative\?|stream-write1|with-streams\*|output-stream|stream-length|bad-seek-type|seek-absolute|input-stream\?|stream-readln|seek-relative|with-streams|read-partial|stream-copy\*|stream-flush|stream-read1|stream-lines|stream-write|stream-print|error-stream|input-stream|stream-tell|\+character\+|stream-copy|each-morsel|seek-output|stream-read|tell-output|stream-seek|read-until|seek-input|each-block|tell-input|each-line|seek-end\?|read-into|stream-nl|stream-bl|contents|seek-end|write1|\+byte\+|readln|write|read1|print|flush|lines|read|nl|bl)\s">
<token type="NameBuiltin"/>
</rule>
<rule pattern="(resize-string|&lt;string&gt;|1string|&gt;string|string\?|string)\s">
<token type="NameBuiltin"/>
</rule>
<rule pattern="(&lt;vector&gt;|1vector|&gt;vector|vector\?|vector|\?push)\s">
<token type="NameBuiltin"/>
</rule>
<rule pattern="(current-continuation|return-continuation|callback-error-hook|error-continuation|attempt-all-error\?|thread-error-hook|attempt-all-error|rethrow-restarts|continue-restart|compute-restarts|error-in-thread|throw-continue|throw-restarts|with-datastack|&lt;continuation&gt;|original-error|ignore-errors|continue-with|continuation\?|in-callback\?|continuation|error-thread|attempt-all|&lt;condition&gt;|with-return|condition\?|&lt;restart&gt;|condition|continue|restart\?|restarts|rethrow|callcc0|recover|restart|cleanup|callcc1|return|error|ifcc)\s">
<token type="NameBuiltin"/>
</rule>
<rule pattern="\S+">
<token type="Text"/>
</rule>
</state>
<state name="stackeffect">
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern="\(\s+">
<token type="NameFunction"/>
<push state="stackeffect"/>
</rule>
<rule pattern="\)\s">
<token type="NameFunction"/>
<pop depth="1"/>
</rule>
<rule pattern="--\s">
<token type="NameFunction"/>
</rule>
<rule pattern="\S+">
<token type="NameVariable"/>
</rule>
</state>
<state name="slots">
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern=";\s">
<token type="Keyword"/>
<pop depth="1"/>
</rule>
<rule pattern="(\{\s+)(\S+)(\s+[^}]+\s+\}\s)">
<bygroups>
<token type="Text"/>
<token type="NameVariable"/>
<token type="Text"/>
</bygroups>
</rule>
<rule pattern="\S+">
<token type="NameVariable"/>
</rule>
</state>
<state name="vocabs">
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern=";\s">
<token type="Keyword"/>
<pop depth="1"/>
</rule>
<rule pattern="\S+">
<token type="NameNamespace"/>
</rule>
</state>
<state name="classes">
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern=";\s">
<token type="Keyword"/>
<pop depth="1"/>
</rule>
<rule pattern="\S+">
<token type="NameClass"/>
</rule>
</state>
<state name="words">
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern=";\s">
<token type="Keyword"/>
<pop depth="1"/>
</rule>
<rule pattern="\S+">
<token type="NameFunction"/>
</rule>
</state>
<state name="root">
<rule pattern="#!.*$">
<token type="CommentPreproc"/>
</rule>
<rule>
<push state="base"/>
</rule>
</state>
</rules>
</lexer>

68
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fennel.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,68 @@
<lexer>
<config>
<name>Fennel</name>
<alias>fennel</alias>
<alias>fnl</alias>
<filename>*.fennel</filename>
<mime_type>text/x-fennel</mime_type>
<mime_type>application/x-fennel</mime_type>
</config>
<rules>
<state name="root">
<rule pattern=";.*$">
<token type="CommentSingle"/>
</rule>
<rule pattern="\s+">
<token type="TextWhitespace"/>
</rule>
<rule pattern="-?\d+\.\d+">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="-?\d+">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="0x-?[abcdef\d]+">
<token type="LiteralNumberHex"/>
</rule>
<rule pattern="&#34;(\\\\|\\&#34;|[^&#34;])*&#34;">
<token type="LiteralString"/>
</rule>
<rule pattern="&#39;(?!#)[\w!$%*+&lt;=&gt;?/.#-]+">
<token type="LiteralStringSymbol"/>
</rule>
<rule pattern="\\(.|[a-z]+)">
<token type="LiteralStringChar"/>
</rule>
<rule pattern="::?#?(?!#)[\w!$%*+&lt;=&gt;?/.#-]+">
<token type="LiteralStringSymbol"/>
</rule>
<rule pattern="~@|[`\&#39;#^~&amp;@]">
<token type="Operator"/>
</rule>
<rule pattern="(require-macros|set-forcibly!|import-macros|eval-compiler|pick-values|accumulate|macrodebug|pick-args|with-open|icollect|partial|comment|include|collect|hashfn|rshift|values|length|lshift|quote|match|while|doto|band|when|bnot|bxor|not=|tset|-\?&gt;&gt;|each|-&gt;&gt;|let|doc|for|and|set|not|-\?&gt;|bor|lua|\?\.|do|&gt;=|&lt;=|//|\.\.|-&gt;|or|if|~=|\^|&gt;|=|&lt;|:|/|\.|-|\+|\*|%|#) ">
<token type="Keyword"/>
</rule>
<rule pattern="(global|lambda|macros|local|macro|var|fn|λ) ">
<token type="KeywordDeclaration"/>
</rule>
<rule pattern="(debug\.setuservalue|debug\.getmetatable|debug\.getuservalue|package\.searchpath|debug\.setmetatable|debug\.upvaluejoin|debug\.getregistry|coroutine\.running|coroutine\.create|debug\.setupvalue|debug\.getupvalue|coroutine\.status|coroutine\.resume|debug\.upvalueid|package\.loadlib|debug\.traceback|math\.randomseed|coroutine\.yield|collectgarbage|debug\.getlocal|package\.seeall|string\.reverse|coroutine\.wrap|debug\.setlocal|bit32\.replace|bit32\.lrotate|debug\.gethook|debug\.getinfo|bit32\.extract|string\.gmatch|string\.format|bit32\.arshift|bit32\.rrotate|debug\.sethook|table\.concat|os\.setlocale|table\.remove|string\.lower|bit32\.rshift|bit32\.lshift|string\.match|table\.unpack|setmetatable|getmetatable|table\.insert|string\.upper|string\.byte|debug\.debug|string\.gsub|bit32\.btest|math\.random|string\.find|string\.dump|os\.difftime|string\.char|table\.sort|loadstring|io\.tmpfile|bit32\.band|bit32\.bnot|string\.sub|os\.execute|os\.tmpname|table\.maxn|math\.log10|math\.atan2|table\.pack|math\.frexp|math\.ldexp|bit32\.bxor|string\.len|math\.floor|string\.rep|coroutine|math\.cosh|math\.ceil|math\.atan|math\.asin|math\.acos|math\.modf|os\.rename|os\.remove|io\.output|os\.getenv|bit32\.bor|math\.sinh|math\.fmod|math\.tanh|math\.sqrt|math\.cos|math\.tan|io\.lines|os\.clock|tostring|io\.input|math\.sin|tonumber|loadfile|math\.rad|math\.pow|io\.flush|math\.abs|math\.min|rawequal|math\.max|math\.log|io\.close|io\.popen|math\.exp|math\.deg|io\.write|os\.time|io\.read|io\.open|require|os\.exit|os\.date|package|io\.type|module|select|rawset|rawlen|rawget|unpack|assert|dofile|ipairs|string|xpcall|table|pcall|bit32|print|debug|error|pairs|math|type|next|load|arg|io|os|_G) ">
<token type="NameBuiltin"/>
</rule>
<rule pattern="(?&lt;=\()(?!#)[\w!$%*+&lt;=&gt;?/.#-]+">
<token type="NameFunction"/>
</rule>
<rule pattern="(?!#)[\w!$%*+&lt;=&gt;?/.#-]+">
<token type="NameVariable"/>
</rule>
<rule pattern="(\[|\])">
<token type="Punctuation"/>
</rule>
<rule pattern="(\{|\})">
<token type="Punctuation"/>
</rule>
<rule pattern="(\(|\))">
<token type="Punctuation"/>
</rule>
</state>
</rules>
</lexer>

159
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fish.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,159 @@
<lexer>
<config>
<name>Fish</name>
<alias>fish</alias>
<alias>fishshell</alias>
<filename>*.fish</filename>
<filename>*.load</filename>
<mime_type>application/x-fish</mime_type>
</config>
<rules>
<state name="paren">
<rule pattern="\)">
<token type="Keyword"/>
<pop depth="1"/>
</rule>
<rule>
<include state="root"/>
</rule>
</state>
<state name="math">
<rule pattern="\)\)">
<token type="Keyword"/>
<pop depth="1"/>
</rule>
<rule pattern="[-+*/%^|&amp;]|\*\*|\|\|">
<token type="Operator"/>
</rule>
<rule pattern="\d+#\d+">
<token type="LiteralNumber"/>
</rule>
<rule pattern="\d+#(?! )">
<token type="LiteralNumber"/>
</rule>
<rule pattern="\d+">
<token type="LiteralNumber"/>
</rule>
<rule>
<include state="root"/>
</rule>
</state>
<state name="root">
<rule>
<include state="basic"/>
</rule>
<rule>
<include state="interp"/>
</rule>
<rule>
<include state="data"/>
</rule>
</state>
<state name="interp">
<rule pattern="\$\(\(">
<token type="Keyword"/>
<push state="math"/>
</rule>
<rule pattern="\(">
<token type="Keyword"/>
<push state="paren"/>
</rule>
<rule pattern="\$#?(\w+|.)">
<token type="NameVariable"/>
</rule>
</state>
<state name="basic">
<rule pattern="(?&lt;=(?:^|\A|;|&amp;&amp;|\|\||\||\b(continue|function|return|switch|begin|while|break|count|false|block|echo|case|true|else|exit|test|set|cdh|and|pwd|for|end|not|if|cd|or)\b)\s*)(continue|function|return|switch|begin|while|break|count|false|block|test|case|true|echo|exit|else|set|cdh|and|pwd|for|end|not|if|cd|or)(?=;?\b)">
<token type="Keyword"/>
</rule>
<rule pattern="(?&lt;=for\s+\S+\s+)in\b">
<token type="Keyword"/>
</rule>
<rule pattern="\b(fish_update_completions|fish_command_not_found|fish_breakpoint_prompt|fish_status_to_signal|fish_right_prompt|fish_is_root_user|fish_mode_prompt|fish_vcs_prompt|fish_key_reader|fish_svn_prompt|fish_git_prompt|fish_hg_prompt|fish_greeting|fish_add_path|commandline|fish_prompt|fish_indent|fish_config|fish_pager|breakpoint|fish_title|prompt_pwd|functions|set_color|realpath|funcsave|contains|complete|argparse|fish_opt|history|builtin|getopts|suspend|command|mimedb|printf|ulimit|disown|string|source|funced|status|random|isatty|fishd|prevd|vared|umask|nextd|alias|pushd|emit|jobs|popd|help|psub|wait|fish|read|time|exec|eval|math|trap|type|dirs|dirh|abbr|kill|bind|hash|open|fc|bg|fg)\s*\b(?!\.)">
<token type="NameBuiltin"/>
</rule>
<rule pattern="#!.*\n">
<token type="CommentHashbang"/>
</rule>
<rule pattern="#.*\n">
<token type="Comment"/>
</rule>
<rule pattern="\\[\w\W]">
<token type="LiteralStringEscape"/>
</rule>
<rule pattern="(\b\w+)(\s*)(=)">
<bygroups>
<token type="NameVariable"/>
<token type="Text"/>
<token type="Operator"/>
</bygroups>
</rule>
<rule pattern="[\[\]()={}]">
<token type="Operator"/>
</rule>
<rule pattern="(?&lt;=\[[^\]]+)\.\.|-(?=[^\[]+\])">
<token type="Operator"/>
</rule>
<rule pattern="&lt;&lt;-?\s*(\&#39;?)\\?(\w+)[\w\W]+?\2">
<token type="LiteralString"/>
</rule>
<rule pattern="(?&lt;=set\s+(?:--?[^\d\W][\w-]*\s+)?)\w+">
<token type="NameVariable"/>
</rule>
<rule pattern="(?&lt;=for\s+)\w[\w-]*(?=\s+in)">
<token type="NameVariable"/>
</rule>
<rule pattern="(?&lt;=function\s+)\w(?:[^\n])*?(?= *[-\n])">
<token type="NameFunction"/>
</rule>
<rule pattern="(?&lt;=(?:^|\b(?:and|or|sudo)\b|;|\|\||&amp;&amp;|\||\(|(?:\b\w+\s*=\S+\s)) *)\w[\w-]*">
<token type="NameFunction"/>
</rule>
</state>
<state name="data">
<rule pattern="(?s)\$?&#34;(\\\\|\\[0-7]+|\\.|[^&#34;\\$])*&#34;">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="&#34;">
<token type="LiteralStringDouble"/>
<push state="string"/>
</rule>
<rule pattern="(?s)\$&#39;(\\\\|\\[0-7]+|\\.|[^&#39;\\])*&#39;">
<token type="LiteralStringSingle"/>
</rule>
<rule pattern="(?s)&#39;.*?&#39;">
<token type="LiteralStringSingle"/>
</rule>
<rule pattern=";">
<token type="Punctuation"/>
</rule>
<rule pattern="&amp;&amp;|\|\||&amp;|\||\^|&lt;|&gt;">
<token type="Operator"/>
</rule>
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern="\b\d+\b">
<token type="LiteralNumber"/>
</rule>
<rule pattern="(?&lt;=\s+)--?[^\d][\w-]*">
<token type="NameAttribute"/>
</rule>
<rule pattern=".+?">
<token type="Text"/>
</rule>
</state>
<state name="string">
<rule pattern="&#34;">
<token type="LiteralStringDouble"/>
<pop depth="1"/>
</rule>
<rule pattern="(?s)(\\\\|\\[0-7]+|\\.|[^&#34;\\$])+">
<token type="LiteralStringDouble"/>
</rule>
<rule>
<include state="interp"/>
</rule>
</state>
</rules>
</lexer>

78
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/forth.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,78 @@
<lexer>
<config>
<name>Forth</name>
<alias>forth</alias>
<filename>*.frt</filename>
<filename>*.fth</filename>
<filename>*.fs</filename>
<mime_type>application/x-forth</mime_type>
<case_insensitive>true</case_insensitive>
</config>
<rules>
<state name="root">
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern="\\.*?\n">
<token type="CommentSingle"/>
</rule>
<rule pattern="\([\s].*?\)">
<token type="CommentSingle"/>
</rule>
<rule pattern="(:|variable|constant|value|buffer:)(\s+)">
<bygroups>
<token type="KeywordNamespace"/>
<token type="Text"/>
</bygroups>
<push state="worddef"/>
</rule>
<rule pattern="([.sc]&#34;)(\s+?)">
<bygroups>
<token type="LiteralString"/>
<token type="Text"/>
</bygroups>
<push state="stringdef"/>
</rule>
<rule pattern="(blk|block|buffer|evaluate|flush|load|save-buffers|update|empty-buffers|list|refill|scr|thru|\#s|\*\/mod|\+loop|\/mod|0&lt;|0=|1\+|1-|2!|2\*|2\/|2@|2drop|2dup|2over|2swap|&gt;body|&gt;in|&gt;number|&gt;r|\?dup|abort|abort\&#34;|abs|accept|align|aligned|allot|and|base|begin|bl|c!|c,|c@|cell\+|cells|char|char\+|chars|constant|count|cr|create|decimal|depth|do|does&gt;|drop|dup|else|emit|environment\?|evaluate|execute|exit|fill|find|fm\/mod|here|hold|i|if|immediate|invert|j|key|leave|literal|loop|lshift|m\*|max|min|mod|move|negate|or|over|postpone|quit|r&gt;|r@|recurse|repeat|rot|rshift|s\&#34;|s&gt;d|sign|sm\/rem|source|space|spaces|state|swap|then|type|u\.|u\&lt;|um\*|um\/mod|unloop|until|variable|while|word|xor|\[char\]|\[\&#39;\]|@|!|\#|&lt;\#|\#&gt;|:|;|\+|-|\*|\/|,|&lt;|&gt;|\|1\+|1-|\.|\.r|0&lt;&gt;|0&gt;|2&gt;r|2r&gt;|2r@|:noname|\?do|again|c\&#34;|case|compile,|endcase|endof|erase|false|hex|marker|nip|of|pad|parse|pick|refill|restore-input|roll|save-input|source-id|to|true|tuck|u\.r|u&gt;|unused|value|within|\[compile\]|\#tib|convert|expect|query|span|tib|2constant|2literal|2variable|d\+|d-|d\.|d\.r|d0&lt;|d0=|d2\*|d2\/|d&lt;|d=|d&gt;s|dabs|dmax|dmin|dnegate|m\*\/|m\+|2rot|du&lt;|catch|throw|abort|abort\&#34;|at-xy|key\?|page|ekey|ekey&gt;char|ekey\?|emit\?|ms|time&amp;date|BIN|CLOSE-FILE|CREATE-FILE|DELETE-FILE|FILE-POSITION|FILE-SIZE|INCLUDE-FILE|INCLUDED|OPEN-FILE|R\/O|R\/W|READ-FILE|READ-LINE|REPOSITION-FILE|RESIZE-FILE|S\&#34;|SOURCE-ID|W/O|WRITE-FILE|WRITE-LINE|FILE-STATUS|FLUSH-FILE|REFILL|RENAME-FILE|&gt;float|d&gt;f|f!|f\*|f\+|f-|f\/|f0&lt;|f0=|f&lt;|f&gt;d|f@|falign|faligned|fconstant|fdepth|fdrop|fdup|fliteral|float\+|floats|floor|fmax|fmin|fnegate|fover|frot|fround|fswap|fvariable|represent|df!|df@|dfalign|dfaligned|dfloat\+|dfloats|f\*\*|f\.|fabs|facos|facosh|falog|fasin|fasinh|fatan|fatan2|fatanh|fcos|fcosh|fe\.|fexp|fexpm1|fln|flnp1|flog|fs\.|fsin|fsincos|fsinh|fsqrt|ftan|ftanh|f~|precision|set-precision|sf!|sf@|sfalign|sfaligned|sfloat\+|sfloats|\(local\)|to|locals\||allocate|free|resize|definitions|find|forth-wordlist|get-current|get-order|search-wordlist|set-current|set-order|wordlist|also|forth|only|order|previous|-trailing|\/string|blank|cmove|cmove&gt;|compare|search|sliteral|.s|dump|see|words|;code|ahead|assembler|bye|code|cs-pick|cs-roll|editor|state|\[else\]|\[if\]|\[then\]|forget|defer|defer@|defer!|action-of|begin-structure|field:|buffer:|parse-name|buffer:|traverse-wordlist|n&gt;r|nr&gt;|2value|fvalue|name&gt;interpret|name&gt;compile|name&gt;string|cfield:|end-structure)\s">
<token type="Keyword"/>
</rule>
<rule pattern="(\$[0-9A-F]+)">
<token type="LiteralNumberHex"/>
</rule>
<rule pattern="(\#|%|&amp;|\-|\+)?[0-9]+">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="(\#|%|&amp;|\-|\+)?[0-9.]+">
<token type="KeywordType"/>
</rule>
<rule pattern="(@i|!i|@e|!e|pause|noop|turnkey|sleep|itype|icompare|sp@|sp!|rp@|rp!|up@|up!|&gt;a|a&gt;|a@|a!|a@+|a@-|&gt;b|b&gt;|b@|b!|b@+|b@-|find-name|1ms|sp0|rp0|\(evaluate\)|int-trap|int!)\s">
<token type="NameConstant"/>
</rule>
<rule pattern="(do-recognizer|r:fail|recognizer:|get-recognizers|set-recognizers|r:float|r&gt;comp|r&gt;int|r&gt;post|r:name|r:word|r:dnum|r:num|recognizer|forth-recognizer|rec:num|rec:float|rec:word)\s">
<token type="NameDecorator"/>
</rule>
<rule pattern="(Evalue|Rvalue|Uvalue|Edefer|Rdefer|Udefer)(\s+)">
<bygroups>
<token type="KeywordNamespace"/>
<token type="Text"/>
</bygroups>
<push state="worddef"/>
</rule>
<rule pattern="[^\s]+(?=[\s])">
<token type="NameFunction"/>
</rule>
</state>
<state name="worddef">
<rule pattern="\S+">
<token type="NameClass"/>
<pop depth="1"/>
</rule>
</state>
<state name="stringdef">
<rule pattern="[^&#34;]+">
<token type="LiteralString"/>
<pop depth="1"/>
</rule>
</state>
</rules>
</lexer>

94
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fortran.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,94 @@
<lexer>
<config>
<name>Fortran</name>
<alias>fortran</alias>
<alias>f90</alias>
<filename>*.f03</filename>
<filename>*.f90</filename>
<filename>*.f95</filename>
<filename>*.F03</filename>
<filename>*.F90</filename>
<filename>*.F95</filename>
<mime_type>text/x-fortran</mime_type>
<case_insensitive>true</case_insensitive>
</config>
<rules>
<state name="core">
<rule pattern="\b(DO)(\s+)(CONCURRENT)\b">
<token type="Keyword"/>
</rule>
<rule pattern="\b(GO)(\s*)(TO)\b">
<token type="Keyword"/>
</rule>
<rule pattern="\b(ABSTRACT|ACCEPT|ALL|ALLSTOP|ALLOCATABLE|ALLOCATE|ARRAY|ASSIGN|ASSOCIATE|ASYNCHRONOUS|BACKSPACE|BIND|BLOCK|BLOCKDATA|BYTE|CALL|CASE|CLASS|CLOSE|CODIMENSION|COMMON|CONTIGUOUS|CONTAINS|CONTINUE|CRITICAL|CYCLE|DATA|DEALLOCATE|DECODE|DEFERRED|DIMENSION|DO|ELEMENTAL|ELSE|ENCODE|END|ENDASSOCIATE|ENDBLOCK|ENDDO|ENDENUM|ENDFORALL|ENDFUNCTION|ENDIF|ENDINTERFACE|ENDMODULE|ENDPROGRAM|ENDSELECT|ENDSUBMODULE|ENDSUBROUTINE|ENDTYPE|ENDWHERE|ENTRY|ENUM|ENUMERATOR|EQUIVALENCE|ERROR STOP|EXIT|EXTENDS|EXTERNAL|EXTRINSIC|FILE|FINAL|FORALL|FORMAT|FUNCTION|GENERIC|IF|IMAGES|IMPLICIT|IMPORT|IMPURE|INCLUDE|INQUIRE|INTENT|INTERFACE|INTRINSIC|IS|LOCK|MEMORY|MODULE|NAMELIST|NULLIFY|NONE|NON_INTRINSIC|NON_OVERRIDABLE|NOPASS|ONLY|OPEN|OPTIONAL|OPTIONS|PARAMETER|PASS|PAUSE|POINTER|PRINT|PRIVATE|PROGRAM|PROCEDURE|PROTECTED|PUBLIC|PURE|READ|RECURSIVE|RESULT|RETURN|REWIND|SAVE|SELECT|SEQUENCE|STOP|SUBMODULE|SUBROUTINE|SYNC|SYNCALL|SYNCIMAGES|SYNCMEMORY|TARGET|THEN|TYPE|UNLOCK|USE|VALUE|VOLATILE|WHERE|WRITE|WHILE)\s*\b">
<token type="Keyword"/>
</rule>
<rule pattern="\b(CHARACTER|COMPLEX|DOUBLE PRECISION|DOUBLE COMPLEX|INTEGER|LOGICAL|REAL|C_INT|C_SHORT|C_LONG|C_LONG_LONG|C_SIGNED_CHAR|C_SIZE_T|C_INT8_T|C_INT16_T|C_INT32_T|C_INT64_T|C_INT_LEAST8_T|C_INT_LEAST16_T|C_INT_LEAST32_T|C_INT_LEAST64_T|C_INT_FAST8_T|C_INT_FAST16_T|C_INT_FAST32_T|C_INT_FAST64_T|C_INTMAX_T|C_INTPTR_T|C_FLOAT|C_DOUBLE|C_LONG_DOUBLE|C_FLOAT_COMPLEX|C_DOUBLE_COMPLEX|C_LONG_DOUBLE_COMPLEX|C_BOOL|C_CHAR|C_PTR|C_FUNPTR)\s*\b">
<token type="Keyword"/>
</rule>
<rule pattern="(\*\*|\*|\+|-|\/|&lt;|&gt;|&lt;=|&gt;=|==|\/=|=)">
<token type="Operator"/>
</rule>
<rule pattern="(::)">
<token type="KeywordDeclaration"/>
</rule>
<rule pattern="[()\[\],:&amp;%;.]">
<token type="Punctuation"/>
</rule>
<rule pattern="\b(Abort|Abs|Access|AChar|ACos|ACosH|AdjustL|AdjustR|AImag|AInt|Alarm|All|Allocated|ALog|AMax|AMin|AMod|And|ANInt|Any|ASin|ASinH|Associated|ATan|ATanH|Atomic_Define|Atomic_Ref|BesJ|BesJN|Bessel_J0|Bessel_J1|Bessel_JN|Bessel_Y0|Bessel_Y1|Bessel_YN|BesY|BesYN|BGE|BGT|BLE|BLT|Bit_Size|BTest|CAbs|CCos|Ceiling|CExp|Char|ChDir|ChMod|CLog|Cmplx|Command_Argument_Count|Complex|Conjg|Cos|CosH|Count|CPU_Time|CShift|CSin|CSqRt|CTime|C_Loc|C_Associated|C_Null_Ptr|C_Null_Funptr|C_F_Pointer|C_F_ProcPointer|C_Null_Char|C_Alert|C_Backspace|C_Form_Feed|C_FunLoc|C_Sizeof|C_New_Line|C_Carriage_Return|C_Horizontal_Tab|C_Vertical_Tab|DAbs|DACos|DASin|DATan|Date_and_Time|DbesJ|DbesJN|DbesY|DbesYN|Dble|DCos|DCosH|DDiM|DErF|DErFC|DExp|Digits|DiM|DInt|DLog|DMax|DMin|DMod|DNInt|Dot_Product|DProd|DSign|DSinH|DShiftL|DShiftR|DSin|DSqRt|DTanH|DTan|DTime|EOShift|Epsilon|ErF|ErFC|ErFC_Scaled|ETime|Execute_Command_Line|Exit|Exp|Exponent|Extends_Type_Of|FDate|FGet|FGetC|FindLoc|Float|Floor|Flush|FNum|FPutC|FPut|Fraction|FSeek|FStat|FTell|Gamma|GError|GetArg|Get_Command|Get_Command_Argument|Get_Environment_Variable|GetCWD|GetEnv|GetGId|GetLog|GetPId|GetUId|GMTime|HostNm|Huge|Hypot|IAbs|IAChar|IAll|IAnd|IAny|IArgC|IBClr|IBits|IBSet|IChar|IDate|IDiM|IDInt|IDNInt|IEOr|IErrNo|IFix|Imag|ImagPart|Image_Index|Index|Int|IOr|IParity|IRand|IsaTty|IShft|IShftC|ISign|Iso_C_Binding|Is_Contiguous|Is_Iostat_End|Is_Iostat_Eor|ITime|Kill|Kind|LBound|LCoBound|Len|Len_Trim|LGe|LGt|Link|LLe|LLt|LnBlnk|Loc|Log|Log_Gamma|Logical|Long|LShift|LStat|LTime|MaskL|MaskR|MatMul|Max|MaxExponent|MaxLoc|MaxVal|MClock|Merge|Merge_Bits|Move_Alloc|Min|MinExponent|MinLoc|MinVal|Mod|Modulo|MvBits|Nearest|New_Line|NInt|Norm2|Not|Null|Num_Images|Or|Pack|Parity|PError|Precision|Present|Product|Radix|Rand|Random_Number|Random_Seed|Range|Real|RealPart|Rename|Repeat|Reshape|RRSpacing|RShift|Same_Type_As|Scale|Scan|Second|Selected_Char_Kind|Selected_Int_Kind|Selected_Real_Kind|Set_Exponent|Shape|ShiftA|ShiftL|ShiftR|Short|Sign|Signal|SinH|Sin|Sleep|Sngl|Spacing|Spread|SqRt|SRand|Stat|Storage_Size|Sum|SymLnk|System|System_Clock|Tan|TanH|Time|This_Image|Tiny|TrailZ|Transfer|Transpose|Trim|TtyNam|UBound|UCoBound|UMask|Unlink|Unpack|Verify|XOr|ZAbs|ZCos|ZExp|ZLog|ZSin|ZSqRt)\s*\b">
<token type="NameBuiltin"/>
</rule>
<rule pattern="\.(true|false)\.">
<token type="NameBuiltin"/>
</rule>
<rule pattern="\.(eq|ne|lt|le|gt|ge|not|and|or|eqv|neqv)\.">
<token type="OperatorWord"/>
</rule>
</state>
<state name="strings">
<rule pattern="(?s)&#34;(\\\\|\\[0-7]+|\\.|[^&#34;\\])*&#34;">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="(?s)&#39;(\\\\|\\[0-7]+|\\.|[^&#39;\\])*&#39;">
<token type="LiteralStringSingle"/>
</rule>
</state>
<state name="nums">
<rule pattern="\d+(?![.e])(_([1-9]|[a-z]\w*))?">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="[+-]?\d*\.\d+([ed][-+]?\d+)?(_([1-9]|[a-z]\w+))?">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="[+-]?\d+\.\d*([ed][-+]?\d+)?(_([1-9]|[a-z]\w*))?">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="[+-]?\d+(\.\d*)?[ed][-+]?\d+(_([1-9]|[a-z]\w*))?">
<token type="LiteralNumberFloat"/>
</rule>
</state>
<state name="root">
<rule pattern="^#.*\n">
<token type="CommentPreproc"/>
</rule>
<rule pattern="!.*\n">
<token type="Comment"/>
</rule>
<rule>
<include state="strings"/>
</rule>
<rule>
<include state="core"/>
</rule>
<rule pattern="[a-z][\w$]*">
<token type="Name"/>
</rule>
<rule>
<include state="nums"/>
</rule>
<rule pattern="[\s]+">
<token type="TextWhitespace"/>
</rule>
</state>
</rules>
</lexer>

245
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fsharp.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,245 @@
<lexer>
<config>
<name>FSharp</name>
<alias>fsharp</alias>
<filename>*.fs</filename>
<filename>*.fsi</filename>
<mime_type>text/x-fsharp</mime_type>
</config>
<rules>
<state name="comment">
<rule pattern="[^(*)@&#34;]+">
<token type="Comment"/>
</rule>
<rule pattern="\(\*">
<token type="Comment"/>
<push/>
</rule>
<rule pattern="\*\)">
<token type="Comment"/>
<pop depth="1"/>
</rule>
<rule pattern="@&#34;">
<token type="LiteralString"/>
<push state="lstring"/>
</rule>
<rule pattern="&#34;&#34;&#34;">
<token type="LiteralString"/>
<push state="tqs"/>
</rule>
<rule pattern="&#34;">
<token type="LiteralString"/>
<push state="string"/>
</rule>
<rule pattern="[(*)@]">
<token type="Comment"/>
</rule>
</state>
<state name="string">
<rule pattern="[^\\&#34;]+">
<token type="LiteralString"/>
</rule>
<rule>
<include state="escape-sequence"/>
</rule>
<rule pattern="\\\n">
<token type="LiteralString"/>
</rule>
<rule pattern="\n">
<token type="LiteralString"/>
</rule>
<rule pattern="&#34;B?">
<token type="LiteralString"/>
<pop depth="1"/>
</rule>
</state>
<state name="lstring">
<rule pattern="[^&#34;]+">
<token type="LiteralString"/>
</rule>
<rule pattern="\n">
<token type="LiteralString"/>
</rule>
<rule pattern="&#34;&#34;">
<token type="LiteralString"/>
</rule>
<rule pattern="&#34;B?">
<token type="LiteralString"/>
<pop depth="1"/>
</rule>
</state>
<state name="tqs">
<rule pattern="[^&#34;]+">
<token type="LiteralString"/>
</rule>
<rule pattern="\n">
<token type="LiteralString"/>
</rule>
<rule pattern="&#34;&#34;&#34;B?">
<token type="LiteralString"/>
<pop depth="1"/>
</rule>
<rule pattern="&#34;">
<token type="LiteralString"/>
</rule>
</state>
<state name="escape-sequence">
<rule pattern="\\[\\&#34;\&#39;ntbrafv]">
<token type="LiteralStringEscape"/>
</rule>
<rule pattern="\\[0-9]{3}">
<token type="LiteralStringEscape"/>
</rule>
<rule pattern="\\u[0-9a-fA-F]{4}">
<token type="LiteralStringEscape"/>
</rule>
<rule pattern="\\U[0-9a-fA-F]{8}">
<token type="LiteralStringEscape"/>
</rule>
</state>
<state name="root">
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern="\(\)|\[\]">
<token type="NameBuiltinPseudo"/>
</rule>
<rule pattern="\b(?&lt;!\.)([A-Z][\w\&#39;]*)(?=\s*\.)">
<token type="NameNamespace"/>
<push state="dotted"/>
</rule>
<rule pattern="\b([A-Z][\w\&#39;]*)">
<token type="Name"/>
</rule>
<rule pattern="///.*?\n">
<token type="LiteralStringDoc"/>
</rule>
<rule pattern="//.*?\n">
<token type="CommentSingle"/>
</rule>
<rule pattern="\(\*(?!\))">
<token type="Comment"/>
<push state="comment"/>
</rule>
<rule pattern="@&#34;">
<token type="LiteralString"/>
<push state="lstring"/>
</rule>
<rule pattern="&#34;&#34;&#34;">
<token type="LiteralString"/>
<push state="tqs"/>
</rule>
<rule pattern="&#34;">
<token type="LiteralString"/>
<push state="string"/>
</rule>
<rule pattern="\b(open|module)(\s+)([\w.]+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="NameNamespace"/>
</bygroups>
</rule>
<rule pattern="\b(let!?)(\s+)(\w+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="NameVariable"/>
</bygroups>
</rule>
<rule pattern="\b(type)(\s+)(\w+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="NameClass"/>
</bygroups>
</rule>
<rule pattern="\b(member|override)(\s+)(\w+)(\.)(\w+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
<token type="Name"/>
<token type="Punctuation"/>
<token type="NameFunction"/>
</bygroups>
</rule>
<rule pattern="\b(abstract|as|assert|base|begin|class|default|delegate|do!|do|done|downcast|downto|elif|else|end|exception|extern|false|finally|for|function|fun|global|if|inherit|inline|interface|internal|in|lazy|let!|let|match|member|module|mutable|namespace|new|null|of|open|override|private|public|rec|return!|return|select|static|struct|then|to|true|try|type|upcast|use!|use|val|void|when|while|with|yield!|yield|atomic|break|checked|component|const|constraint|constructor|continue|eager|event|external|fixed|functor|include|method|mixin|object|parallel|process|protected|pure|sealed|tailcall|trait|virtual|volatile)\b">
<token type="Keyword"/>
</rule>
<rule pattern="``([^`\n\r\t]|`[^`\n\r\t])+``">
<token type="Name"/>
</rule>
<rule pattern="#[ \t]*(if|endif|else|line|nowarn|light|r|\d+)\b">
<token type="CommentPreproc"/>
</rule>
<rule pattern="(!=|#|&amp;&amp;|&amp;|\(|\)|\*|\+|,|-\.|-&gt;|-|\.\.|\.|::|:=|:&gt;|:|;;|;|&lt;-|&lt;\]|&lt;|&gt;\]|&gt;|\?\?|\?|\[&lt;|\[\||\[|\]|_|`|\{|\|\]|\||\}|~|&lt;@@|&lt;@|=|@&gt;|@@&gt;)">
<token type="Operator"/>
</rule>
<rule pattern="([=&lt;&gt;@^|&amp;+\*/$%-]|[!?~])?[!$%&amp;*+\./:&lt;=&gt;?@^|~-]">
<token type="Operator"/>
</rule>
<rule pattern="\b(and|or|not)\b">
<token type="OperatorWord"/>
</rule>
<rule pattern="\b(sbyte|byte|char|nativeint|unativeint|float32|single|float|double|int8|uint8|int16|uint16|int32|uint32|int64|uint64|decimal|unit|bool|string|list|exn|obj|enum)\b">
<token type="KeywordType"/>
</rule>
<rule pattern="[^\W\d][\w&#39;]*">
<token type="Name"/>
</rule>
<rule pattern="\d[\d_]*[uU]?[yslLnQRZINGmM]?">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="0[xX][\da-fA-F][\da-fA-F_]*[uU]?[yslLn]?[fF]?">
<token type="LiteralNumberHex"/>
</rule>
<rule pattern="0[oO][0-7][0-7_]*[uU]?[yslLn]?">
<token type="LiteralNumberOct"/>
</rule>
<rule pattern="0[bB][01][01_]*[uU]?[yslLn]?">
<token type="LiteralNumberBin"/>
</rule>
<rule pattern="-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)[fFmM]?">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="&#39;(?:(\\[\\\&#34;&#39;ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))&#39;B?">
<token type="LiteralStringChar"/>
</rule>
<rule pattern="&#39;.&#39;">
<token type="LiteralStringChar"/>
</rule>
<rule pattern="&#39;">
<token type="Keyword"/>
</rule>
<rule pattern="@?&#34;">
<token type="LiteralStringDouble"/>
<push state="string"/>
</rule>
<rule pattern="[~?][a-z][\w\&#39;]*:">
<token type="NameVariable"/>
</rule>
</state>
<state name="dotted">
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern="\.">
<token type="Punctuation"/>
</rule>
<rule pattern="[A-Z][\w\&#39;]*(?=\s*\.)">
<token type="NameNamespace"/>
</rule>
<rule pattern="[A-Z][\w\&#39;]*">
<token type="Name"/>
<pop depth="1"/>
</rule>
<rule pattern="[a-z_][\w\&#39;]*">
<token type="Name"/>
<pop depth="1"/>
</rule>
<rule>
<pop depth="1"/>
</rule>
</state>
</rules>
</lexer>

149
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gas.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,149 @@
<lexer>
<config>
<name>GAS</name>
<alias>gas</alias>
<alias>asm</alias>
<filename>*.s</filename>
<filename>*.S</filename>
<mime_type>text/x-gas</mime_type>
</config>
<rules>
<state name="punctuation">
<rule pattern="[-*,.()\[\]!:]+">
<token type="Punctuation"/>
</rule>
</state>
<state name="root">
<rule>
<include state="whitespace"/>
</rule>
<rule pattern="(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+):">
<token type="NameLabel"/>
</rule>
<rule pattern="\.(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+)">
<token type="NameAttribute"/>
<push state="directive-args"/>
</rule>
<rule pattern="lock|rep(n?z)?|data\d+">
<token type="NameAttribute"/>
</rule>
<rule pattern="(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+)">
<token type="NameFunction"/>
<push state="instruction-args"/>
</rule>
<rule pattern="[\r\n]+">
<token type="Text"/>
</rule>
</state>
<state name="directive-args">
<rule pattern="(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+)">
<token type="NameConstant"/>
</rule>
<rule pattern="&#34;(\\&#34;|[^&#34;])*&#34;">
<token type="LiteralString"/>
</rule>
<rule pattern="@(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+)">
<token type="NameAttribute"/>
</rule>
<rule pattern="(?:0[xX][a-zA-Z0-9]+|\d+)">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="%(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+)">
<token type="NameVariable"/>
</rule>
<rule pattern="[\r\n]+">
<token type="Text"/>
<pop depth="1"/>
</rule>
<rule pattern="([;#]|//).*?\n">
<token type="CommentSingle"/>
<pop depth="1"/>
</rule>
<rule pattern="/[*].*?[*]/">
<token type="CommentMultiline"/>
</rule>
<rule pattern="/[*].*?\n[\w\W]*?[*]/">
<token type="CommentMultiline"/>
<pop depth="1"/>
</rule>
<rule>
<include state="punctuation"/>
</rule>
<rule>
<include state="whitespace"/>
</rule>
</state>
<state name="instruction-args">
<rule pattern="([a-z0-9]+)( )(&lt;)((?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+))(&gt;)">
<bygroups>
<token type="LiteralNumberHex"/>
<token type="Text"/>
<token type="Punctuation"/>
<token type="NameConstant"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule pattern="([a-z0-9]+)( )(&lt;)((?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+))([-+])((?:0[xX][a-zA-Z0-9]+|\d+))(&gt;)">
<bygroups>
<token type="LiteralNumberHex"/>
<token type="Text"/>
<token type="Punctuation"/>
<token type="NameConstant"/>
<token type="Punctuation"/>
<token type="LiteralNumberInteger"/>
<token type="Punctuation"/>
</bygroups>
</rule>
<rule pattern="(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+)">
<token type="NameConstant"/>
</rule>
<rule pattern="(?:0[xX][a-zA-Z0-9]+|\d+)">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="%(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+)">
<token type="NameVariable"/>
</rule>
<rule pattern="$(?:0[xX][a-zA-Z0-9]+|\d+)">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="$&#39;(.|\\&#39;)&#39;">
<token type="LiteralStringChar"/>
</rule>
<rule pattern="[\r\n]+">
<token type="Text"/>
<pop depth="1"/>
</rule>
<rule pattern="([;#]|//).*?\n">
<token type="CommentSingle"/>
<pop depth="1"/>
</rule>
<rule pattern="/[*].*?[*]/">
<token type="CommentMultiline"/>
</rule>
<rule pattern="/[*].*?\n[\w\W]*?[*]/">
<token type="CommentMultiline"/>
<pop depth="1"/>
</rule>
<rule>
<include state="punctuation"/>
</rule>
<rule>
<include state="whitespace"/>
</rule>
</state>
<state name="whitespace">
<rule pattern="\n">
<token type="Text"/>
</rule>
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern="([;#]|//).*?\n">
<token type="CommentSingle"/>
</rule>
<rule pattern="/[*][\w\W]*?[*]/">
<token type="CommentMultiline"/>
</rule>
</state>
</rules>
</lexer>

265
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gdscript.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,265 @@
<lexer>
<config>
<name>GDScript</name>
<alias>gdscript</alias>
<alias>gd</alias>
<filename>*.gd</filename>
<mime_type>text/x-gdscript</mime_type>
<mime_type>application/x-gdscript</mime_type>
</config>
<rules>
<state name="builtins">
<rule pattern="(?&lt;!\.)(instance_from_id|nearest_po2|print_stack|type_exist|rand_range|linear2db|var2bytes|dict2inst|randomize|bytes2var|rand_seed|db2linear|inst2dict|printerr|printraw|decimals|preload|deg2rad|str2var|stepify|var2str|convert|weakref|fposmod|funcref|rad2deg|dectime|printt|is_inf|is_nan|assert|Color8|typeof|ColorN|prints|floor|atan2|yield|randf|print|range|clamp|round|randi|sqrt|tanh|cosh|ceil|ease|acos|load|fmod|lerp|seed|sign|atan|sinh|hash|asin|sin|str|cos|tan|pow|exp|min|abs|log|max)\b">
<token type="NameBuiltin"/>
</rule>
<rule pattern="(?&lt;!\.)(self|false|true|PI|NAN|INF)\b">
<token type="NameBuiltinPseudo"/>
</rule>
<rule pattern="(?&lt;!\.)(Physics2DShapeQueryParameters|PhysicsShapeQueryParameters|Physics2DDirectBodyStateSW|NavigationPolygonInstance|ResourceInteractiveLoader|Physics2DDirectSpaceState|Physics2DShapeQueryResult|Physics2DTestMotionResult|InputEventJoystickButton|InputEventJoystickMotion|Physics2DDirectBodyState|PhysicsDirectBodyStateSW|PhysicsShapeQueryResult|PhysicsDirectSpaceState|SpatialSound2DServerSW|PackedDataContainerRef|NavigationMeshInstance|ResourceImportMetadata|PhysicsDirectBodyState|ConcavePolygonShape2D|CanvasItemShaderGraph|EditorScenePostImport|InputEventScreenTouch|InputEventMouseButton|InputEventMouseMotion|SpatialSound2DServer|AudioStreamOGGVorbis|VisibilityNotifier2D|InputEventScreenDrag|ConvexPolygonShape2D|SpatialSoundServerSW|ParticleAttractor2D|PackedDataContainer|SpatialStreamPlayer|RenderTargetTexture|AnimationTreePlayer|ConcavePolygonShape|InstancePlaceholder|MaterialShaderGraph|AudioStreamPlayback|VisibilityEnabler2D|SpatialSamplePlayer|DampedSpringJoint2D|InterpolatedCamera|ConvexPolygonShape|ConfirmationDialog|SpatialSoundServer|BakedLightInstance|ParallaxBackground|CollisionPolygon2D|CanvasItemMaterial|VisibilityNotifier|EditorImportPlugin|VideoStreamTheora|TouchScreenButton|ResourcePreloader|OccluderPolygon2D|BakedLightSampler|CollisionObject2D|RemoteTransform2D|PolygonPathFinder|StyleBoxImageMask|NavigationPolygon|TranslationServer|MultiMeshInstance|ImmediateGeometry|Physics2DServerSW|ColorPickerButton|VisibilityEnabler|PHashTranslation|RectangleShape2D|DirectionalLight|AnimatedSprite3D|WorldEnvironment|CollisionShape2D|EventStreamChibi|InputEventAction|CollisionPolygon|AudioStreamSpeex|EditorFileDialog|GeometryInstance|Generic6DOFJoint|PacketPeerStream|CanvasItemShader|KinematicBody2D|StyleBoxTexture|PhysicsServerSW|VSplitContainer|CenterContainer|GDFunctionState|AudioStreamOpus|TextureProgress|MarginContainer|CollisionObject|LightOccluder2D|AnimationPlayer|HSplitContainer|ScrollContainer|SoundRoomParams|Physics2DServer|MaterialShader|ShaderMaterial|ViewportSprite|SplitContainer|AudioStreamMPC|VisualInstance|PanelContainer|BackBufferCopy|SamplePlayer2D|CanvasModulate|ResourceLoader|CapsuleShape2D|ReferenceFrame|NavigationMesh|CollisionShape|ConeTwistJoint|ProximityGroup|AnimatedSprite|SegmentShape2D|BoneAttachment|RichTextLabel|CircleShape2D|VBoxContainer|PacketPeerUDP|SpatialPlayer|TextureButton|KinematicBody|SoundPlayer2D|PhysicsServer|ParallaxLayer|InputEventKey|GrooveJoint2D|PhysicsBody2D|FixedMaterial|GridContainer|HBoxContainer|StreamPeerSSL|StyleBoxEmpty|StreamPeerTCP|SampleLibrary|GDNativeClass|AudioServerSW|ResourceSaver|SpriteBase3D|StreamPlayer|AtlasTexture|VisualServer|SamplePlayer|StyleBoxFlat|StaticBody2D|SpriteFrames|MeshDataTool|MeshInstance|Vector3Array|BoxContainer|TabContainer|HButtonArray|LargeTexture|Navigation2D|WindowDialog|EditorScript|EditorPlugin|TextureFrame|AcceptDialog|ImageTexture|CapsuleShape|VehicleWheel|VButtonArray|Vector2Array|InputDefault|OptionButton|PathFollow2D|VehicleBody|ColorPicker|PopupDialog|ProgressBar|CanvasLayer|Translation|Environment|EventPlayer|VideoPlayer|EventStream|VideoStream|ButtonGroup|Particles2D|Patch9Frame|ButtonArray|SurfaceTool|MeshLibrary|PackedScene|PhysicsBody|AudioStream|Performance|StringArray|AudioServer|RigidBody2D|LineShape2D|SliderJoint|SphereShape|ShaderGraph|CheckButton|StreamPeer|FileDialog|PathFollow|SceneState|RoomBounds|Dictionary|VSeparator|PacketPeer|VScrollBar|MenuButton|HTTPClient|PinJoint2D|BakedLight|PlaneShape|InputEvent|BaseButton|HSeparator|HScrollBar|Navigation|PopupPanel|StaticBody|Position2D|Position3D|ToolButton|HingeJoint|CanvasItem|RayShape2D|ColorArray|ConfigFile|TCP_Server|RayCast2D|ColorRamp|SpotLight|RealArray|GraphNode|Container|Reference|PopupMenu|Separator|Polygon2D|MultiMesh|Semaphore|Transform|OmniLight|GraphEdit|Particles|Animation|Marshalls|SceneTree|RigidBody|XMLParser|PathRemap|ScrollBar|Directory|PCKPacker|RawArray|TextEdit|MainLoop|TreeItem|StyleBox|Material|Geometry|Matrix32|Resource|UndoRedo|RayShape|TestCube|ItemList|CheckBox|Camera2D|Skeleton|Sprite3D|Viewport|NodePath|IntArray|BoxShape|PinJoint|InputMap|LineEdit|GDScript|Vector3|TileMap|HSlider|Spatial|SpinBox|World2D|IP_Unix|Curve2D|Curve3D|WeakRef|GridMap|Matrix3|VSlider|CubeMap|Joint2D|Globals|Shape2D|Texture|Control|TileSet|Light2D|FuncRef|Vector2|RayCast|Script|Node2D|Button|BitMap|Sample|Object|String|Shader|Area2D|Slider|Sprite|Thread|Path2D|Camera|Portal|float|Theme|World|YSort|Shape|Joint|Mutex|Tween|RegEx|Label|Rect2|Array|Plane|Light|Range|Color|Input|Popup|Panel|Timer|Image|Area|Quad|bool|AABB|Quat|File|Tabs|Path|Font|Tree|Room|Mesh|Node|RID|int|Nil|IP|OS)\b">
<token type="NameException"/>
</rule>
</state>
<state name="sqs">
<rule pattern="&#39;">
<token type="LiteralStringSingle"/>
<pop depth="1"/>
</rule>
<rule pattern="\\\\|\\&#39;|\\\n">
<token type="LiteralStringEscape"/>
</rule>
<rule>
<include state="strings-single"/>
</rule>
</state>
<state name="stringescape">
<rule pattern="\\([\\abfnrtv&#34;\&#39;]|\n|N\{.*?\}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})">
<token type="LiteralStringEscape"/>
</rule>
</state>
<state name="classname">
<rule pattern="[a-zA-Z_]\w*">
<token type="NameClass"/>
<pop depth="1"/>
</rule>
</state>
<state name="strings-single">
<rule pattern="%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?[hlL]?[E-GXc-giorsux%]">
<token type="LiteralStringInterpol"/>
</rule>
<rule pattern="[^\\\&#39;&#34;%\n]+">
<token type="LiteralStringSingle"/>
</rule>
<rule pattern="[\&#39;&#34;\\]">
<token type="LiteralStringSingle"/>
</rule>
<rule pattern="%">
<token type="LiteralStringSingle"/>
</rule>
</state>
<state name="funcname">
<rule pattern="[a-zA-Z_]\w*">
<token type="NameFunction"/>
<pop depth="1"/>
</rule>
<rule>
<pop depth="1"/>
</rule>
</state>
<state name="numbers">
<rule pattern="(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="\d+[eE][+-]?[0-9]+j?">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="0[xX][a-fA-F0-9]+">
<token type="LiteralNumberHex"/>
</rule>
<rule pattern="\d+j?">
<token type="LiteralNumberInteger"/>
</rule>
</state>
<state name="tdqs">
<rule pattern="&#34;&#34;&#34;">
<token type="LiteralStringDouble"/>
<pop depth="1"/>
</rule>
<rule>
<include state="strings-double"/>
</rule>
<rule pattern="\n">
<token type="LiteralStringDouble"/>
</rule>
</state>
<state name="name">
<rule pattern="[a-zA-Z_]\w*">
<token type="Name"/>
</rule>
</state>
<state name="root">
<rule pattern="\n">
<token type="Text"/>
</rule>
<rule pattern="^(\s*)([rRuUbB]{,2})(&#34;&#34;&#34;(?:.|\n)*?&#34;&#34;&#34;)">
<bygroups>
<token type="Text"/>
<token type="LiteralStringAffix"/>
<token type="LiteralStringDoc"/>
</bygroups>
</rule>
<rule pattern="^(\s*)([rRuUbB]{,2})(&#39;&#39;&#39;(?:.|\n)*?&#39;&#39;&#39;)">
<bygroups>
<token type="Text"/>
<token type="LiteralStringAffix"/>
<token type="LiteralStringDoc"/>
</bygroups>
</rule>
<rule pattern="[^\S\n]+">
<token type="Text"/>
</rule>
<rule pattern="#.*$">
<token type="CommentSingle"/>
</rule>
<rule pattern="[]{}:(),;[]">
<token type="Punctuation"/>
</rule>
<rule pattern="\\\n">
<token type="Text"/>
</rule>
<rule pattern="\\">
<token type="Text"/>
</rule>
<rule pattern="(in|and|or|not)\b">
<token type="OperatorWord"/>
</rule>
<rule pattern="!=|==|&lt;&lt;|&gt;&gt;|&amp;&amp;|\+=|-=|\*=|/=|%=|&amp;=|\|=|\|\||[-~+/*%=&lt;&gt;&amp;^.!|$]">
<token type="Operator"/>
</rule>
<rule>
<include state="keywords"/>
</rule>
<rule pattern="(def)((?:\s|\\\s)+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
</bygroups>
<push state="funcname"/>
</rule>
<rule pattern="(class)((?:\s|\\\s)+)">
<bygroups>
<token type="Keyword"/>
<token type="Text"/>
</bygroups>
<push state="classname"/>
</rule>
<rule>
<include state="builtins"/>
</rule>
<rule pattern="([rR]|[uUbB][rR]|[rR][uUbB])(&#34;&#34;&#34;)">
<bygroups>
<token type="LiteralStringAffix"/>
<token type="LiteralStringDouble"/>
</bygroups>
<push state="tdqs"/>
</rule>
<rule pattern="([rR]|[uUbB][rR]|[rR][uUbB])(&#39;&#39;&#39;)">
<bygroups>
<token type="LiteralStringAffix"/>
<token type="LiteralStringSingle"/>
</bygroups>
<push state="tsqs"/>
</rule>
<rule pattern="([rR]|[uUbB][rR]|[rR][uUbB])(&#34;)">
<bygroups>
<token type="LiteralStringAffix"/>
<token type="LiteralStringDouble"/>
</bygroups>
<push state="dqs"/>
</rule>
<rule pattern="([rR]|[uUbB][rR]|[rR][uUbB])(&#39;)">
<bygroups>
<token type="LiteralStringAffix"/>
<token type="LiteralStringSingle"/>
</bygroups>
<push state="sqs"/>
</rule>
<rule pattern="([uUbB]?)(&#34;&#34;&#34;)">
<bygroups>
<token type="LiteralStringAffix"/>
<token type="LiteralStringDouble"/>
</bygroups>
<combined state="stringescape" state="tdqs"/>
</rule>
<rule pattern="([uUbB]?)(&#39;&#39;&#39;)">
<bygroups>
<token type="LiteralStringAffix"/>
<token type="LiteralStringSingle"/>
</bygroups>
<combined state="stringescape" state="tsqs"/>
</rule>
<rule pattern="([uUbB]?)(&#34;)">
<bygroups>
<token type="LiteralStringAffix"/>
<token type="LiteralStringDouble"/>
</bygroups>
<combined state="stringescape" state="dqs"/>
</rule>
<rule pattern="([uUbB]?)(&#39;)">
<bygroups>
<token type="LiteralStringAffix"/>
<token type="LiteralStringSingle"/>
</bygroups>
<combined state="stringescape" state="sqs"/>
</rule>
<rule>
<include state="name"/>
</rule>
<rule>
<include state="numbers"/>
</rule>
</state>
<state name="keywords">
<rule pattern="(breakpoint|continue|onready|extends|signal|return|export|static|setget|switch|break|const|while|class|tool|pass|func|case|enum|else|elif|var|for|do|if)\b">
<token type="Keyword"/>
</rule>
</state>
<state name="dqs">
<rule pattern="&#34;">
<token type="LiteralStringDouble"/>
<pop depth="1"/>
</rule>
<rule pattern="\\\\|\\&#34;|\\\n">
<token type="LiteralStringEscape"/>
</rule>
<rule>
<include state="strings-double"/>
</rule>
</state>
<state name="tsqs">
<rule pattern="&#39;&#39;&#39;">
<token type="LiteralStringSingle"/>
<pop depth="1"/>
</rule>
<rule>
<include state="strings-single"/>
</rule>
<rule pattern="\n">
<token type="LiteralStringSingle"/>
</rule>
</state>
<state name="strings-double">
<rule pattern="%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?[hlL]?[E-GXc-giorsux%]">
<token type="LiteralStringInterpol"/>
</rule>
<rule pattern="[^\\\&#39;&#34;%\n]+">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="[\&#39;&#34;\\]">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="%">
<token type="LiteralStringDouble"/>
</rule>
</state>
</rules>
</lexer>

263
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gherkin.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,263 @@
<lexer>
<config>
<name>Gherkin</name>
<alias>cucumber</alias>
<alias>Cucumber</alias>
<alias>gherkin</alias>
<alias>Gherkin</alias>
<filename>*.feature</filename>
<filename>*.FEATURE</filename>
<mime_type>text/x-gherkin</mime_type>
</config>
<rules>
<state name="comments">
<rule pattern="\s*#.*$">
<token type="Comment"/>
</rule>
</state>
<state name="featureElementsOnStack">
<rule pattern="^(\s*)(하지만|조건|먼저|만일|만약|단|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假设|假如|假定|但是|但し|並且|并且|同時|同时|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Унда |То |Припустимо, що |Припустимо |Онда |Но |Нехай |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Але |Агар |А |І |Și |És |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Youse know when youse got |Youse know like when |Yna |Ya know how |Ya gotta |Y |Wun |Wtedy |When y&#39;all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y&#39;all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Sed |Se |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu&#39;|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kaj |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givun |Givet |Given y&#39;all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Dun |Donitaĵo |Donat |Donada |Do |Diyelim ki |Dengan |Den youse gotta |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |Cal |But y&#39;all |But |Buh |Biết |Bet |BUT |Atès |Atunci |Atesa |Anrhegedig a |Angenommen |And y&#39;all |And |An |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\* )">
<token type="Keyword"/>
<pop depth="2"/>
</rule>
<rule>
<include state="comments"/>
</rule>
<rule pattern="(\s|.)">
<token type="NameFunction"/>
</rule>
</state>
<state name="tableContent">
<rule pattern="\s+\|\s*$">
<token type="Keyword"/>
<pop depth="1"/>
</rule>
<rule>
<include state="comments"/>
</rule>
<rule pattern="\\\|">
<token type="LiteralString"/>
</rule>
<rule pattern="\s*\|">
<token type="Keyword"/>
</rule>
<rule pattern="&#34;">
<token type="LiteralString"/>
<push state="doubleStringTable"/>
</rule>
<rule>
<include state="string"/>
</rule>
</state>
<state name="numbers">
<rule pattern="(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?">
<token type="LiteralString"/>
</rule>
</state>
<state name="tableVars">
<rule pattern="(&lt;[^&gt;]+&gt;)">
<token type="NameVariable"/>
</rule>
</state>
<state name="doubleString">
<rule pattern="&#34;">
<token type="NameFunction"/>
<pop depth="1"/>
</rule>
<rule>
<include state="string"/>
</rule>
</state>
<state name="featureElements">
<rule pattern="^(\s*)(하지만|조건|먼저|만일|만약|단|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假设|假如|假定|但是|但し|並且|并且|同時|同时|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Унда |То |Припустимо, що |Припустимо |Онда |Но |Нехай |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Але |Агар |А |І |Și |És |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Youse know when youse got |Youse know like when |Yna |Ya know how |Ya gotta |Y |Wun |Wtedy |When y&#39;all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y&#39;all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Sed |Se |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu&#39;|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kaj |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givun |Givet |Given y&#39;all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Dun |Donitaĵo |Donat |Donada |Do |Diyelim ki |Dengan |Den youse gotta |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |Cal |But y&#39;all |But |Buh |Biết |Bet |BUT |Atès |Atunci |Atesa |Anrhegedig a |Angenommen |And y&#39;all |And |An |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\* )">
<token type="Keyword"/>
<push state="stepContentStack"/>
</rule>
<rule>
<include state="comments"/>
</rule>
<rule pattern="(\s|.)">
<token type="NameFunction"/>
</rule>
</state>
<state name="examplesTableHeader">
<rule pattern="\s+\|\s*$">
<token type="Keyword"/>
<pop depth="2"/>
</rule>
<rule>
<include state="comments"/>
</rule>
<rule pattern="\\\|">
<token type="NameVariable"/>
</rule>
<rule pattern="\s*\|">
<token type="Keyword"/>
</rule>
<rule pattern="[^|]">
<token type="NameVariable"/>
</rule>
</state>
<state name="stepContentStack">
<rule pattern="$">
<token type="Keyword"/>
<pop depth="2"/>
</rule>
<rule>
<include state="stepContent"/>
</rule>
</state>
<state name="pyString">
<rule pattern="&#34;&#34;&#34;">
<token type="Keyword"/>
<pop depth="1"/>
</rule>
<rule>
<include state="string"/>
</rule>
</state>
<state name="examplesTable">
<rule pattern="\s+\|">
<token type="Keyword"/>
<push state="examplesTableHeader"/>
</rule>
<rule>
<include state="comments"/>
</rule>
<rule pattern="(\s|.)">
<token type="NameFunction"/>
</rule>
</state>
<state name="stepContentRoot">
<rule pattern="$">
<token type="Keyword"/>
<pop depth="1"/>
</rule>
<rule>
<include state="stepContent"/>
</rule>
</state>
<state name="doubleStringTable">
<rule pattern="&#34;">
<token type="LiteralString"/>
<pop depth="1"/>
</rule>
<rule>
<include state="string"/>
</rule>
</state>
<state name="string">
<rule>
<include state="tableVars"/>
</rule>
<rule pattern="(\s|.)">
<token type="LiteralString"/>
</rule>
</state>
<state name="stepContent">
<rule pattern="&#34;">
<token type="NameFunction"/>
<push state="doubleString"/>
</rule>
<rule>
<include state="tableVars"/>
</rule>
<rule>
<include state="numbers"/>
</rule>
<rule>
<include state="comments"/>
</rule>
<rule pattern="(\s|.)">
<token type="NameFunction"/>
</rule>
</state>
<state name="scenarioSectionsOnStack">
<rule pattern="^(\s*)(시나리오 개요|시나리오|배경|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|剧本大纲|剧本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلفية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарій|Сценарио|Сценарий структураси|Сценарий|Структура сценарію|Структура сценарија|Структура сценария|Скица|Рамка на сценарий|Пример|Предыстория|Предистория|Позадина|Передумова|Основа|Концепт|Контекст|Założenia|Wharrimean is|Tình huống|The thing of it is|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|Szenario|Szablon scenariusza|Stsenaarium|Struktura scenarija|Skica|Skenario konsep|Skenario|Situācija|Senaryo taslağı|Senaryo|Scénář|Scénario|Schema dello scenario|Scenārijs pēc parauga|Scenārijs|Scenár|Scenaro|Scenariusz|Scenariul de şablon|Scenariul de sablon|Scenariu|Scenario Outline|Scenario Amlinellol|Scenario|Scenarijus|Scenarijaus šablonas|Scenarij|Scenarie|Rerefons|Raamstsenaarium|Primer|Pozadí|Pozadina|Pozadie|Plan du scénario|Plan du Scénario|Osnova scénáře|Osnova|Náčrt Scénáře|Náčrt Scenáru|Mate|MISHUN SRSLY|MISHUN|Kịch bản|Konturo de la scenaro|Kontext|Konteksts|Kontekstas|Kontekst|Koncept|Khung tình huống|Khung kịch bản|Háttér|Grundlage|Geçmiş|Forgatókönyv vázlat|Forgatókönyv|Fono|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l&#39;escenari|Escenario|Escenari|Dis is what went down|Dasar|Contexto|Contexte|Contesto|Condiţii|Conditii|Cenário|Cenario|Cefndir|Bối cảnh|Blokes|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|All y&#39;all|Achtergrond|Abstrakt Scenario|Abstract Scenario)(:)(.*)$">
<bygroups>
<token type="NameFunction"/>
<token type="Keyword"/>
<token type="Keyword"/>
<token type="NameFunction"/>
</bygroups>
<push state="featureElementsOnStack"/>
</rule>
</state>
<state name="narrative">
<rule>
<include state="scenarioSectionsOnStack"/>
</rule>
<rule pattern="(\s|.)">
<token type="NameFunction"/>
</rule>
</state>
<state name="root">
<rule pattern="\n">
<token type="NameFunction"/>
</rule>
<rule>
<include state="comments"/>
</rule>
<rule pattern="&#34;&#34;&#34;">
<token type="Keyword"/>
<push state="pyString"/>
</rule>
<rule pattern="\s+\|">
<token type="Keyword"/>
<push state="tableContent"/>
</rule>
<rule pattern="&#34;">
<token type="NameFunction"/>
<push state="doubleString"/>
</rule>
<rule>
<include state="tableVars"/>
</rule>
<rule>
<include state="numbers"/>
</rule>
<rule pattern="(\s*)(@[^@\r\n\t ]+)">
<bygroups>
<token type="NameFunction"/>
<token type="NameTag"/>
</bygroups>
</rule>
<rule pattern="^(\s*)(하지만|조건|먼저|만일|만약|단|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假设|假如|假定|但是|但し|並且|并且|同時|同时|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Унда |То |Припустимо, що |Припустимо |Онда |Но |Нехай |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Але |Агар |А |І |Și |És |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Youse know when youse got |Youse know like when |Yna |Ya know how |Ya gotta |Y |Wun |Wtedy |When y&#39;all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y&#39;all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Sed |Se |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu&#39;|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kaj |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givun |Givet |Given y&#39;all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Dun |Donitaĵo |Donat |Donada |Do |Diyelim ki |Dengan |Den youse gotta |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |Cal |But y&#39;all |But |Buh |Biết |Bet |BUT |Atès |Atunci |Atesa |Anrhegedig a |Angenommen |And y&#39;all |And |An |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\* )">
<bygroups>
<token type="NameFunction"/>
<token type="Keyword"/>
</bygroups>
<push state="stepContentRoot"/>
</rule>
<rule pattern="^(기능|機能|功能|フィーチャ|خاصية|תכונה|Функціонал|Функционалност|Функционал|Фича|Особина|Могућност|Özellik|Właściwość|Tính năng|Trajto|Savybė|Požiadavka|Požadavek|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|Jellemző|Fīča|Funzionalità|Funktionalität|Funkcionalnost|Funkcionalitāte|Funcționalitate|Functionaliteit|Functionalitate|Funcionalitat|Funcionalidade|Fonctionnalité|Fitur|Feature|Egenskap|Egenskab|Crikey|Característica|Arwedd)(:)(.*)$">
<bygroups>
<token type="Keyword"/>
<token type="Keyword"/>
<token type="NameFunction"/>
</bygroups>
<push state="narrative"/>
</rule>
<rule pattern="^(\s*)(시나리오 개요|시나리오|배경|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|剧本大纲|剧本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلفية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарій|Сценарио|Сценарий структураси|Сценарий|Структура сценарію|Структура сценарија|Структура сценария|Скица|Рамка на сценарий|Пример|Предыстория|Предистория|Позадина|Передумова|Основа|Концепт|Контекст|Założenia|Wharrimean is|Tình huống|The thing of it is|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|Szenario|Szablon scenariusza|Stsenaarium|Struktura scenarija|Skica|Skenario konsep|Skenario|Situācija|Senaryo taslağı|Senaryo|Scénář|Scénario|Schema dello scenario|Scenārijs pēc parauga|Scenārijs|Scenár|Scenaro|Scenariusz|Scenariul de şablon|Scenariul de sablon|Scenariu|Scenario Outline|Scenario Amlinellol|Scenario|Scenarijus|Scenarijaus šablonas|Scenarij|Scenarie|Rerefons|Raamstsenaarium|Primer|Pozadí|Pozadina|Pozadie|Plan du scénario|Plan du Scénario|Osnova scénáře|Osnova|Náčrt Scénáře|Náčrt Scenáru|Mate|MISHUN SRSLY|MISHUN|Kịch bản|Konturo de la scenaro|Kontext|Konteksts|Kontekstas|Kontekst|Koncept|Khung tình huống|Khung kịch bản|Háttér|Grundlage|Geçmiş|Forgatókönyv vázlat|Forgatókönyv|Fono|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l&#39;escenari|Escenario|Escenari|Dis is what went down|Dasar|Contexto|Contexte|Contesto|Condiţii|Conditii|Cenário|Cenario|Cefndir|Bối cảnh|Blokes|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|All y&#39;all|Achtergrond|Abstrakt Scenario|Abstract Scenario)(:)(.*)$">
<bygroups>
<token type="NameFunction"/>
<token type="Keyword"/>
<token type="Keyword"/>
<token type="NameFunction"/>
</bygroups>
<push state="featureElements"/>
</rule>
<rule pattern="^(\s*)(예|例子|例|サンプル|امثلة|דוגמאות|Сценарији|Примери|Приклади|Мисоллар|Значения|Örnekler|Voorbeelden|Variantai|Tapaukset|Scenarios|Scenariji|Scenarijai|Příklady|Példák|Príklady|Przykłady|Primjeri|Primeri|Piemēri|Pavyzdžiai|Paraugs|Juhtumid|Exemplos|Exemples|Exemplele|Exempel|Examples|Esempi|Enghreifftiau|Ekzemploj|Eksempler|Ejemplos|EXAMPLZ|Dữ liệu|Contoh|Cobber|Beispiele)(:)(.*)$">
<bygroups>
<token type="NameFunction"/>
<token type="Keyword"/>
<token type="Keyword"/>
<token type="NameFunction"/>
</bygroups>
<push state="examplesTable"/>
</rule>
<rule pattern="(\s|.)">
<token type="NameFunction"/>
</rule>
</state>
</rules>
</lexer>

65
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/glsl.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,65 @@
<lexer>
<config>
<name>GLSL</name>
<alias>glsl</alias>
<filename>*.vert</filename>
<filename>*.frag</filename>
<filename>*.geo</filename>
<mime_type>text/x-glslsrc</mime_type>
</config>
<rules>
<state name="root">
<rule pattern="^#.*">
<token type="CommentPreproc"/>
</rule>
<rule pattern="//.*">
<token type="CommentSingle"/>
</rule>
<rule pattern="/(\\\n)?[*](.|\n)*?[*](\\\n)?/">
<token type="CommentMultiline"/>
</rule>
<rule pattern="\+|-|~|!=?|\*|/|%|&lt;&lt;|&gt;&gt;|&lt;=?|&gt;=?|==?|&amp;&amp;?|\^|\|\|?">
<token type="Operator"/>
</rule>
<rule pattern="[?:]">
<token type="Operator"/>
</rule>
<rule pattern="\bdefined\b">
<token type="Operator"/>
</rule>
<rule pattern="[;{}(),\[\]]">
<token type="Punctuation"/>
</rule>
<rule pattern="[+-]?\d*\.\d+([eE][-+]?\d+)?">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="[+-]?\d+\.\d*([eE][-+]?\d+)?">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="0[xX][0-9a-fA-F]*">
<token type="LiteralNumberHex"/>
</rule>
<rule pattern="0[0-7]*">
<token type="LiteralNumberOct"/>
</rule>
<rule pattern="[1-9][0-9]*">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="\b(sampler3DsamplerCube|sampler2DShadow|sampler1DShadow|invariant|sampler1D|sampler2D|attribute|mat3mat4|centroid|continue|varying|uniform|discard|mat4x4|mat3x3|mat2x3|mat4x2|mat3x2|mat2x2|mat2x4|mat3x4|struct|return|mat4x3|bvec4|false|ivec4|ivec3|const|float|inout|ivec2|break|while|bvec3|bvec2|vec3|else|true|void|bool|vec2|vec4|mat2|for|out|int|in|do|if)\b">
<token type="Keyword"/>
</rule>
<rule pattern="\b(sampler2DRectShadow|sampler2DRect|sampler3DRect|namespace|precision|interface|volatile|template|unsigned|external|noinline|mediump|typedef|default|switch|static|extern|inline|sizeof|output|packed|double|public|fvec3|class|union|short|highp|fixed|input|fvec4|hvec2|hvec3|hvec4|dvec2|dvec3|dvec4|fvec2|using|long|this|enum|lowp|cast|goto|half|asm)\b">
<token type="Keyword"/>
</rule>
<rule pattern="[a-zA-Z_]\w*">
<token type="Name"/>
</rule>
<rule pattern="\.">
<token type="Punctuation"/>
</rule>
<rule pattern="\s+">
<token type="Text"/>
</rule>
</state>
</rules>
</lexer>

289
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gnuplot.xml generated vendored ノーマルファイル

長すぎる行があるためファイル差分は表示されません

112
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/go_template.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,112 @@
<lexer>
<config>
<name>Go HTML Template</name>
<alias>go-html-template</alias>
</config>
<rules>
<state name="template">
<rule pattern="[-]?}}">
<token type="CommentPreproc"/>
<pop depth="1"/>
</rule>
<rule pattern="(?=}})">
<token type="CommentPreproc"/>
<pop depth="1"/>
</rule>
<rule pattern="\(">
<token type="Operator"/>
<push state="subexpression"/>
</rule>
<rule pattern="&#34;(\\\\|\\&#34;|[^&#34;])*&#34;">
<token type="LiteralString"/>
</rule>
<rule>
<include state="expression"/>
</rule>
</state>
<state name="subexpression">
<rule pattern="\)">
<token type="Operator"/>
<pop depth="1"/>
</rule>
<rule>
<include state="expression"/>
</rule>
</state>
<state name="expression">
<rule pattern="\s+">
<token type="TextWhitespace"/>
</rule>
<rule pattern="\(">
<token type="Operator"/>
<push state="subexpression"/>
</rule>
<rule pattern="(range|if|else|while|with|template|end|true|false|nil|and|call|html|index|js|len|not|or|print|printf|println|urlquery|eq|ne|lt|le|gt|ge)\b">
<token type="Keyword"/>
</rule>
<rule pattern="\||:?=|,">
<token type="Operator"/>
</rule>
<rule pattern="[$]?[^\W\d]\w*">
<token type="NameOther"/>
</rule>
<rule pattern="\$|[$]?\.(?:[^\W\d]\w*)?">
<token type="NameAttribute"/>
</rule>
<rule pattern="&#34;(\\\\|\\&#34;|[^&#34;])*&#34;">
<token type="LiteralString"/>
</rule>
<rule pattern="-?\d+i">
<token type="LiteralNumber"/>
</rule>
<rule pattern="-?\d+\.\d*([Ee][-+]\d+)?i">
<token type="LiteralNumber"/>
</rule>
<rule pattern="\.\d+([Ee][-+]\d+)?i">
<token type="LiteralNumber"/>
</rule>
<rule pattern="-?\d+[Ee][-+]\d+i">
<token type="LiteralNumber"/>
</rule>
<rule pattern="-?\d+(\.\d+[eE][+\-]?\d+|\.\d*|[eE][+\-]?\d+)">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="-?\.\d+([eE][+\-]?\d+)?">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="-?0[0-7]+">
<token type="LiteralNumberOct"/>
</rule>
<rule pattern="-?0[xX][0-9a-fA-F]+">
<token type="LiteralNumberHex"/>
</rule>
<rule pattern="-?0b[01_]+">
<token type="LiteralNumberBin"/>
</rule>
<rule pattern="-?(0|[1-9][0-9]*)">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="&#39;(\\[&#39;&#34;\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])&#39;">
<token type="LiteralStringChar"/>
</rule>
<rule pattern="`[^`]*`">
<token type="LiteralString"/>
</rule>
</state>
<state name="root">
<rule pattern="{{(- )?/\*(.|\n)*?\*/( -)?}}">
<token type="CommentMultiline"/>
</rule>
<rule pattern="{{[-]?">
<token type="CommentPreproc"/>
<push state="template"/>
</rule>
<rule pattern="[^{]+">
<token type="Other"/>
</rule>
<rule pattern="{">
<token type="Other"/>
</rule>
</state>
</rules>
</lexer>

88
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/graphql.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,88 @@
<lexer>
<config>
<name>GraphQL</name>
<alias>graphql</alias>
<alias>graphqls</alias>
<alias>gql</alias>
<filename>*.graphql</filename>
<filename>*.graphqls</filename>
</config>
<rules>
<state name="root">
<rule pattern="(query|mutation|subscription|fragment|scalar|implements|interface|union|enum|input|type)">
<token type="KeywordDeclaration"/>
<push state="type"/>
</rule>
<rule pattern="(on|extend|schema|directive|\.\.\.)">
<token type="KeywordDeclaration"/>
</rule>
<rule pattern="(QUERY|MUTATION|SUBSCRIPTION|FIELD|FRAGMENT_DEFINITION|FRAGMENT_SPREAD|INLINE_FRAGMENT|SCHEMA|SCALAR|OBJECT|FIELD_DEFINITION|ARGUMENT_DEFINITION|INTERFACE|UNION|ENUM|ENUM_VALUE|INPUT_OBJECT|INPUT_FIELD_DEFINITION)\b">
<token type="KeywordConstant"/>
</rule>
<rule pattern="[^\W\d]\w*">
<token type="NameProperty"/>
</rule>
<rule pattern="\@\w+">
<token type="NameDecorator"/>
</rule>
<rule pattern=":">
<token type="Punctuation"/>
<push state="type"/>
</rule>
<rule pattern="[\(\)\{\}\[\],!\|=]">
<token type="Punctuation"/>
</rule>
<rule pattern="\$\w+">
<token type="NameVariable"/>
</rule>
<rule pattern="\d+i">
<token type="LiteralNumber"/>
</rule>
<rule pattern="\d+\.\d*([Ee][-+]\d+)?i">
<token type="LiteralNumber"/>
</rule>
<rule pattern="\.\d+([Ee][-+]\d+)?i">
<token type="LiteralNumber"/>
</rule>
<rule pattern="\d+[Ee][-+]\d+i">
<token type="LiteralNumber"/>
</rule>
<rule pattern="\d+(\.\d+[eE][+\-]?\d+|\.\d*|[eE][+\-]?\d+)">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="\.\d+([eE][+\-]?\d+)?">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="(0|[1-9][0-9]*)">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="&#34;&#34;&#34;[\x00-\x7F]*?&#34;&#34;&#34;">
<token type="LiteralString"/>
</rule>
<rule pattern="&#34;(\\[&#34;\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])&#34;">
<token type="LiteralStringChar"/>
</rule>
<rule pattern="&#34;(\\\\|\\&#34;|[^&#34;])*&#34;">
<token type="LiteralString"/>
</rule>
<rule pattern="&#34;(true|false|null)*&#34;">
<token type="Literal"/>
</rule>
<rule pattern="[\r\n\s]+">
<token type="TextWhitespace"/>
</rule>
<rule pattern="#[^\r\n]*">
<token type="Comment"/>
</rule>
</state>
<state name="type">
<rule pattern="[^\W\d]\w*">
<token type="NameClass"/>
<pop depth="1"/>
</rule>
<rule>
<include state="root"/>
</rule>
</state>
</rules>
</lexer>

90
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/groff.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,90 @@
<lexer>
<config>
<name>Groff</name>
<alias>groff</alias>
<alias>nroff</alias>
<alias>man</alias>
<filename>*.[1-9]</filename>
<filename>*.1p</filename>
<filename>*.3pm</filename>
<filename>*.man</filename>
<mime_type>application/x-troff</mime_type>
<mime_type>text/troff</mime_type>
</config>
<rules>
<state name="request">
<rule pattern="\n">
<token type="Text"/>
<pop depth="1"/>
</rule>
<rule>
<include state="escapes"/>
</rule>
<rule pattern="&#34;[^\n&#34;]+&#34;">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="\d+">
<token type="LiteralNumber"/>
</rule>
<rule pattern="\S+">
<token type="LiteralString"/>
</rule>
<rule pattern="\s+">
<token type="Text"/>
</rule>
</state>
<state name="root">
<rule pattern="(\.)(\w+)">
<bygroups>
<token type="Text"/>
<token type="Keyword"/>
</bygroups>
<push state="request"/>
</rule>
<rule pattern="\.">
<token type="Punctuation"/>
<push state="request"/>
</rule>
<rule pattern="[^\\\n]+">
<token type="Text"/>
<push state="textline"/>
</rule>
<rule>
<push state="textline"/>
</rule>
</state>
<state name="textline">
<rule>
<include state="escapes"/>
</rule>
<rule pattern="[^\\\n]+">
<token type="Text"/>
</rule>
<rule pattern="\n">
<token type="Text"/>
<pop depth="1"/>
</rule>
</state>
<state name="escapes">
<rule pattern="\\&#34;[^\n]*">
<token type="Comment"/>
</rule>
<rule pattern="\\[fn]\w">
<token type="LiteralStringEscape"/>
</rule>
<rule pattern="\\\(.{2}">
<token type="LiteralStringEscape"/>
</rule>
<rule pattern="\\.\[.*\]">
<token type="LiteralStringEscape"/>
</rule>
<rule pattern="\\.">
<token type="LiteralStringEscape"/>
</rule>
<rule pattern="\\\n">
<token type="Text"/>
<push state="request"/>
</rule>
</state>
</rules>
</lexer>

135
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/groovy.xml generated vendored ノーマルファイル
ファイルの表示

@ -0,0 +1,135 @@
<lexer>
<config>
<name>Groovy</name>
<alias>groovy</alias>
<filename>*.groovy</filename>
<filename>*.gradle</filename>
<mime_type>text/x-groovy</mime_type>
<dot_all>true</dot_all>
</config>
<rules>
<state name="root">
<rule pattern="#!(.*?)$">
<token type="CommentPreproc"/>
<push state="base"/>
</rule>
<rule>
<push state="base"/>
</rule>
</state>
<state name="base">
<rule pattern="^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)([a-zA-Z_]\w*)(\s*)(\()">
<bygroups>
<usingself state="root"/>
<token type="NameFunction"/>
<token type="Text"/>
<token type="Operator"/>
</bygroups>
</rule>
<rule pattern="[^\S\n]+">
<token type="Text"/>
</rule>
<rule pattern="//.*?\n">
<token type="CommentSingle"/>
</rule>
<rule pattern="/\*.*?\*/">
<token type="CommentMultiline"/>
</rule>
<rule pattern="@[a-zA-Z_][\w.]*">
<token type="NameDecorator"/>
</rule>
<rule pattern="(as|assert|break|case|catch|continue|default|do|else|finally|for|if|in|goto|instanceof|new|return|switch|this|throw|try|while|in|as)\b">
<token type="Keyword"/>
</rule>
<rule pattern="(abstract|const|enum|extends|final|implements|native|private|protected|public|static|strictfp|super|synchronized|throws|transient|volatile)\b">
<token type="KeywordDeclaration"/>
</rule>
<rule pattern="(def|boolean|byte|char|double|float|int|long|short|void)\b">
<token type="KeywordType"/>
</rule>
<rule pattern="(package)(\s+)">
<bygroups>
<token type="KeywordNamespace"/>
<token type="Text"/>
</bygroups>
</rule>
<rule pattern="(true|false|null)\b">
<token type="KeywordConstant"/>
</rule>
<rule pattern="(class|interface)(\s+)">
<bygroups>
<token type="KeywordDeclaration"/>
<token type="Text"/>
</bygroups>
<push state="class"/>
</rule>
<rule pattern="(import)(\s+)">
<bygroups>
<token type="KeywordNamespace"/>
<token type="Text"/>
</bygroups>
<push state="import"/>
</rule>
<rule pattern="&#34;&#34;&#34;.*?&#34;&#34;&#34;">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="&#39;&#39;&#39;.*?&#39;&#39;&#39;">
<token type="LiteralStringSingle"/>
</rule>
<rule pattern="&#34;(\\\\|\\&#34;|[^&#34;])*&#34;">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="&#39;(\\\\|\\&#39;|[^&#39;])*&#39;">
<token type="LiteralStringSingle"/>
</rule>
<rule pattern="\$/((?!/\$).)*/\$">
<token type="LiteralString"/>
</rule>
<rule pattern="/(\\\\|\\&#34;|[^/])*/">
<token type="LiteralString"/>
</rule>
<rule pattern="&#39;\\.&#39;|&#39;[^\\]&#39;|&#39;\\u[0-9a-fA-F]{4}&#39;">
<token type="LiteralStringChar"/>
</rule>
<rule pattern="(\.)([a-zA-Z_]\w*)">
<bygroups>
<token type="Operator"/>
<token type="NameAttribute"/>
</bygroups>
</rule>
<rule pattern="[a-zA-Z_]\w*:">
<token type="NameLabel"/>
</rule>
<rule pattern="[a-zA-Z_$]\w*">
<token type="Name"/>
</rule>
<rule pattern="[~^*!%&amp;\[\](){}&lt;&gt;|+=:;,./?-]">
<token type="Operator"/>
</rule>
<rule pattern="[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="0x[0-9a-fA-F]+">
<token type="LiteralNumberHex"/>
</rule>
<rule pattern="[0-9]+L?">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="\n">
<token type="Text"/>
</rule>
</state>
<state name="class">
<rule pattern="[a-zA-Z_]\w*">
<token type="NameClass"/>
<pop depth="1"/>
</rule>
</state>
<state name="import">
<rule pattern="[\w.]+\*?">
<token type="NameNamespace"/>
<pop depth="1"/>
</rule>
</state>
</rules>
</lexer>

変更されたファイルが多すぎるため、一部のファイルは表示されません さらに表示