go-tg-screenshot-bot
284 строки · 4.7 Кб
1package dbus
2
3import (
4"fmt"
5"strings"
6"unicode"
7"unicode/utf8"
8)
9
10// Heavily inspired by the lexer from text/template.
11
12type varToken struct {
13typ varTokenType
14val string
15}
16
17type varTokenType byte
18
19const (
20tokEOF varTokenType = iota
21tokError
22tokNumber
23tokString
24tokBool
25tokArrayStart
26tokArrayEnd
27tokDictStart
28tokDictEnd
29tokVariantStart
30tokVariantEnd
31tokComma
32tokColon
33tokType
34tokByteString
35)
36
37type varLexer struct {
38input string
39start int
40pos int
41width int
42tokens []varToken
43}
44
45type lexState func(*varLexer) lexState
46
47func varLex(s string) []varToken {
48l := &varLexer{input: s}
49l.run()
50return l.tokens
51}
52
53func (l *varLexer) accept(valid string) bool {
54if strings.ContainsRune(valid, l.next()) {
55return true
56}
57l.backup()
58return false
59}
60
61func (l *varLexer) backup() {
62l.pos -= l.width
63}
64
65func (l *varLexer) emit(t varTokenType) {
66l.tokens = append(l.tokens, varToken{t, l.input[l.start:l.pos]})
67l.start = l.pos
68}
69
70func (l *varLexer) errorf(format string, v ...interface{}) lexState {
71l.tokens = append(l.tokens, varToken{
72tokError,
73fmt.Sprintf(format, v...),
74})
75return nil
76}
77
78func (l *varLexer) ignore() {
79l.start = l.pos
80}
81
82func (l *varLexer) next() rune {
83var r rune
84
85if l.pos >= len(l.input) {
86l.width = 0
87return -1
88}
89r, l.width = utf8.DecodeRuneInString(l.input[l.pos:])
90l.pos += l.width
91return r
92}
93
94func (l *varLexer) run() {
95for state := varLexNormal; state != nil; {
96state = state(l)
97}
98}
99
100func (l *varLexer) peek() rune {
101r := l.next()
102l.backup()
103return r
104}
105
106func varLexNormal(l *varLexer) lexState {
107for {
108r := l.next()
109switch {
110case r == -1:
111l.emit(tokEOF)
112return nil
113case r == '[':
114l.emit(tokArrayStart)
115case r == ']':
116l.emit(tokArrayEnd)
117case r == '{':
118l.emit(tokDictStart)
119case r == '}':
120l.emit(tokDictEnd)
121case r == '<':
122l.emit(tokVariantStart)
123case r == '>':
124l.emit(tokVariantEnd)
125case r == ':':
126l.emit(tokColon)
127case r == ',':
128l.emit(tokComma)
129case r == '\'' || r == '"':
130l.backup()
131return varLexString
132case r == '@':
133l.backup()
134return varLexType
135case unicode.IsSpace(r):
136l.ignore()
137case unicode.IsNumber(r) || r == '+' || r == '-':
138l.backup()
139return varLexNumber
140case r == 'b':
141pos := l.start
142if n := l.peek(); n == '"' || n == '\'' {
143return varLexByteString
144}
145// not a byte string; try to parse it as a type or bool below
146l.pos = pos + 1
147l.width = 1
148fallthrough
149default:
150// either a bool or a type. Try bools first.
151l.backup()
152if l.pos+4 <= len(l.input) {
153if l.input[l.pos:l.pos+4] == "true" {
154l.pos += 4
155l.emit(tokBool)
156continue
157}
158}
159if l.pos+5 <= len(l.input) {
160if l.input[l.pos:l.pos+5] == "false" {
161l.pos += 5
162l.emit(tokBool)
163continue
164}
165}
166// must be a type.
167return varLexType
168}
169}
170}
171
172var varTypeMap = map[string]string{
173"boolean": "b",
174"byte": "y",
175"int16": "n",
176"uint16": "q",
177"int32": "i",
178"uint32": "u",
179"int64": "x",
180"uint64": "t",
181"double": "f",
182"string": "s",
183"objectpath": "o",
184"signature": "g",
185}
186
187func varLexByteString(l *varLexer) lexState {
188q := l.next()
189Loop:
190for {
191switch l.next() {
192case '\\':
193if r := l.next(); r != -1 {
194break
195}
196fallthrough
197case -1:
198return l.errorf("unterminated bytestring")
199case q:
200break Loop
201}
202}
203l.emit(tokByteString)
204return varLexNormal
205}
206
207func varLexNumber(l *varLexer) lexState {
208l.accept("+-")
209digits := "0123456789"
210if l.accept("0") {
211if l.accept("x") {
212digits = "0123456789abcdefABCDEF"
213} else {
214digits = "01234567"
215}
216}
217for strings.ContainsRune(digits, l.next()) {
218}
219l.backup()
220if l.accept(".") {
221for strings.ContainsRune(digits, l.next()) {
222}
223l.backup()
224}
225if l.accept("eE") {
226l.accept("+-")
227for strings.ContainsRune("0123456789", l.next()) {
228}
229l.backup()
230}
231if r := l.peek(); unicode.IsLetter(r) {
232l.next()
233return l.errorf("bad number syntax: %q", l.input[l.start:l.pos])
234}
235l.emit(tokNumber)
236return varLexNormal
237}
238
239func varLexString(l *varLexer) lexState {
240q := l.next()
241Loop:
242for {
243switch l.next() {
244case '\\':
245if r := l.next(); r != -1 {
246break
247}
248fallthrough
249case -1:
250return l.errorf("unterminated string")
251case q:
252break Loop
253}
254}
255l.emit(tokString)
256return varLexNormal
257}
258
259func varLexType(l *varLexer) lexState {
260at := l.accept("@")
261for {
262r := l.next()
263if r == -1 {
264break
265}
266if unicode.IsSpace(r) {
267l.backup()
268break
269}
270}
271if at {
272if _, err := ParseSignature(l.input[l.start+1 : l.pos]); err != nil {
273return l.errorf("%s", err)
274}
275} else {
276if _, ok := varTypeMap[l.input[l.start:l.pos]]; ok {
277l.emit(tokType)
278return varLexNormal
279}
280return l.errorf("unrecognized type %q", l.input[l.start:l.pos])
281}
282l.emit(tokType)
283return varLexNormal
284}
285