1 | package chroma
|
---|
2 |
|
---|
3 | type remappingLexer struct {
|
---|
4 | lexer Lexer
|
---|
5 | mapper func(Token) []Token
|
---|
6 | }
|
---|
7 |
|
---|
8 | // RemappingLexer remaps a token to a set of, potentially empty, tokens.
|
---|
9 | func RemappingLexer(lexer Lexer, mapper func(Token) []Token) Lexer {
|
---|
10 | return &remappingLexer{lexer, mapper}
|
---|
11 | }
|
---|
12 |
|
---|
13 | func (r *remappingLexer) AnalyseText(text string) float32 {
|
---|
14 | return r.lexer.AnalyseText(text)
|
---|
15 | }
|
---|
16 |
|
---|
17 | func (r *remappingLexer) SetAnalyser(analyser func(text string) float32) Lexer {
|
---|
18 | r.lexer.SetAnalyser(analyser)
|
---|
19 | return r
|
---|
20 | }
|
---|
21 |
|
---|
22 | func (r *remappingLexer) SetRegistry(registry *LexerRegistry) Lexer {
|
---|
23 | r.lexer.SetRegistry(registry)
|
---|
24 | return r
|
---|
25 | }
|
---|
26 |
|
---|
27 | func (r *remappingLexer) Config() *Config {
|
---|
28 | return r.lexer.Config()
|
---|
29 | }
|
---|
30 |
|
---|
31 | func (r *remappingLexer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) {
|
---|
32 | it, err := r.lexer.Tokenise(options, text)
|
---|
33 | if err != nil {
|
---|
34 | return nil, err
|
---|
35 | }
|
---|
36 | var buffer []Token
|
---|
37 | return func() Token {
|
---|
38 | for {
|
---|
39 | if len(buffer) > 0 {
|
---|
40 | t := buffer[0]
|
---|
41 | buffer = buffer[1:]
|
---|
42 | return t
|
---|
43 | }
|
---|
44 | t := it()
|
---|
45 | if t == EOF {
|
---|
46 | return t
|
---|
47 | }
|
---|
48 | buffer = r.mapper(t)
|
---|
49 | }
|
---|
50 | }, nil
|
---|
51 | }
|
---|
52 |
|
---|
53 | // TypeMapping defines type maps for the TypeRemappingLexer.
|
---|
54 | type TypeMapping []struct {
|
---|
55 | From, To TokenType
|
---|
56 | Words []string
|
---|
57 | }
|
---|
58 |
|
---|
59 | // TypeRemappingLexer remaps types of tokens coming from a parent Lexer.
|
---|
60 | //
|
---|
61 | // eg. Map "defvaralias" tokens of type NameVariable to NameFunction:
|
---|
62 | //
|
---|
63 | // mapping := TypeMapping{
|
---|
64 | // {NameVariable, NameFunction, []string{"defvaralias"},
|
---|
65 | // }
|
---|
66 | // lexer = TypeRemappingLexer(lexer, mapping)
|
---|
67 | func TypeRemappingLexer(lexer Lexer, mapping TypeMapping) Lexer {
|
---|
68 | // Lookup table for fast remapping.
|
---|
69 | lut := map[TokenType]map[string]TokenType{}
|
---|
70 | for _, rt := range mapping {
|
---|
71 | km, ok := lut[rt.From]
|
---|
72 | if !ok {
|
---|
73 | km = map[string]TokenType{}
|
---|
74 | lut[rt.From] = km
|
---|
75 | }
|
---|
76 | if len(rt.Words) == 0 {
|
---|
77 | km[""] = rt.To
|
---|
78 | } else {
|
---|
79 | for _, k := range rt.Words {
|
---|
80 | km[k] = rt.To
|
---|
81 | }
|
---|
82 | }
|
---|
83 | }
|
---|
84 | return RemappingLexer(lexer, func(t Token) []Token {
|
---|
85 | if k, ok := lut[t.Type]; ok {
|
---|
86 | if tt, ok := k[t.Value]; ok {
|
---|
87 | t.Type = tt
|
---|
88 | } else if tt, ok := k[""]; ok {
|
---|
89 | t.Type = tt
|
---|
90 | }
|
---|
91 | }
|
---|
92 | return []Token{t}
|
---|
93 | })
|
---|
94 | }
|
---|