asciigoat's INI parser
https://asciigoat.org/ini
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
155 lines
3.2 KiB
155 lines
3.2 KiB
1 year ago
|
package basic
|
||
|
|
||
|
import (
|
||
|
"fmt"
|
||
|
|
||
|
"asciigoat.org/core/lexer"
|
||
|
"asciigoat.org/ini/parser"
|
||
|
)
|
||
|
|
||
|
type token struct {
|
||
|
pos lexer.Position
|
||
|
typ parser.TokenType
|
||
|
value string
|
||
|
}
|
||
|
|
||
|
func (t token) String() string {
|
||
|
return fmt.Sprintf("%s %s: %q", t.pos, t.typ, t.value)
|
||
|
}
|
||
|
|
||
|
func (dec *decoder) executeFinal() {
|
||
|
if len(dec.queue) > 0 {
|
||
|
// we have unfinished businesses
|
||
|
switch dec.queue[0].typ {
|
||
|
case parser.TokenSectionStart:
|
||
|
dec.execute(parser.TokenSectionEnd)
|
||
|
case parser.TokenFieldKey:
|
||
|
dec.execute(parser.TokenFieldValue)
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
func (dec *decoder) execute(typ parser.TokenType) {
|
||
|
switch typ {
|
||
|
case parser.TokenSectionEnd:
|
||
|
name1, ok1 := dec.getValue(1, parser.TokenSectionName)
|
||
|
|
||
|
if ok1 {
|
||
|
name2, ok2 := dec.getValue(2, parser.TokenSectionSubname)
|
||
|
|
||
|
dec.addSection(name1, name2, ok2)
|
||
|
}
|
||
|
|
||
|
dec.reset()
|
||
|
case parser.TokenFieldValue:
|
||
|
key, _ := dec.getValue(0, parser.TokenFieldKey)
|
||
|
value, _ := dec.getValue(1, parser.TokenFieldValue)
|
||
|
|
||
|
dec.addField(key, value)
|
||
|
dec.reset()
|
||
|
}
|
||
|
}
|
||
|
|
||
|
func (dec *decoder) addSection(key, id string, allowEmptyID bool) {
|
||
|
emptyID := allowEmptyID && id == ""
|
||
|
|
||
|
// index for dec.current
|
||
|
n := len(dec.out.Sections)
|
||
|
|
||
|
// new section
|
||
|
dec.out.Sections = append(dec.out.Sections, Section{
|
||
|
Key: key,
|
||
|
ID: id,
|
||
|
EmptyID: emptyID,
|
||
|
})
|
||
|
|
||
|
// pointer to the latest section
|
||
|
dec.current = &dec.out.Sections[n]
|
||
|
}
|
||
|
|
||
|
func (dec *decoder) addField(key, value string) {
|
||
|
field := Field{
|
||
|
Key: key,
|
||
|
Value: value,
|
||
|
}
|
||
|
|
||
|
if p := dec.current; p != nil {
|
||
|
// in section
|
||
|
p.Fields = append(p.Fields, field)
|
||
|
} else {
|
||
|
// global
|
||
|
dec.out.Global = append(dec.out.Global, field)
|
||
|
}
|
||
|
}
|
||
|
|
||
|
func (dec *decoder) getValue(idx int, typ parser.TokenType) (string, bool) {
|
||
|
switch {
|
||
|
case idx < 0 || idx >= len(dec.queue):
|
||
|
// out of range
|
||
|
return "", false
|
||
|
case dec.queue[idx].typ != typ:
|
||
|
// wrong type
|
||
|
return "", false
|
||
|
default:
|
||
|
return dec.queue[idx].value, true
|
||
|
}
|
||
|
}
|
||
|
|
||
|
func (dec *decoder) reset() {
|
||
|
dec.queue = dec.queue[:0]
|
||
|
}
|
||
|
|
||
|
func (dec *decoder) depth(depth int) bool {
|
||
|
return len(dec.queue) == depth
|
||
|
}
|
||
|
|
||
|
func (dec *decoder) depthAfter(depth int, typ parser.TokenType) bool {
|
||
|
_, ok := dec.getValue(depth-1, typ)
|
||
|
if ok {
|
||
|
return len(dec.queue) == depth
|
||
|
}
|
||
|
return false
|
||
|
}
|
||
|
|
||
|
func (dec *decoder) typeOK(typ parser.TokenType) bool {
|
||
|
switch typ {
|
||
|
case parser.TokenSectionStart, parser.TokenFieldKey:
|
||
|
// first token only
|
||
|
return dec.depth(0)
|
||
|
case parser.TokenSectionName:
|
||
|
// right after TokenSectionStart
|
||
|
return dec.depthAfter(1, parser.TokenSectionStart)
|
||
|
case parser.TokenSectionSubname:
|
||
|
// right after TokenSectionName
|
||
|
return dec.depthAfter(2, parser.TokenSectionName)
|
||
|
case parser.TokenSectionEnd:
|
||
|
// only on a section with name
|
||
|
_, ok := dec.getValue(1, parser.TokenSectionName)
|
||
|
return ok
|
||
|
case parser.TokenFieldValue:
|
||
|
// right after a TokenFieldKey
|
||
|
return dec.depthAfter(1, parser.TokenFieldKey)
|
||
|
default:
|
||
|
// never
|
||
|
return false
|
||
|
}
|
||
|
}
|
||
|
|
||
|
func (dec *decoder) OnToken(pos lexer.Position, typ parser.TokenType, value string) error {
|
||
|
t := &token{pos, typ, value}
|
||
|
|
||
|
switch {
|
||
|
case typ == parser.TokenComment:
|
||
|
// ignore comments
|
||
|
return nil
|
||
|
case dec.typeOK(typ):
|
||
|
// acceptable token
|
||
|
dec.queue = append(dec.queue, t)
|
||
|
dec.execute(typ)
|
||
|
return nil
|
||
|
default:
|
||
|
// unacceptable
|
||
|
return newErrInvalidToken(t)
|
||
|
}
|
||
|
}
|