package basic import ( "fmt" "asciigoat.org/core/lexer" "asciigoat.org/ini/parser" ) type token struct { pos lexer.Position typ parser.TokenType value string } func (t token) String() string { return fmt.Sprintf("%s %s: %q", t.pos, t.typ, t.value) } func (dec *decoder) executeFinal() { if len(dec.queue) > 0 { // we have unfinished businesses switch dec.queue[0].typ { case parser.TokenSectionStart: dec.execute(parser.TokenSectionEnd) case parser.TokenFieldKey: dec.execute(parser.TokenFieldValue) } } } func (dec *decoder) execute(typ parser.TokenType) { switch typ { case parser.TokenSectionEnd: name1, ok1 := dec.queueValue(1, parser.TokenSectionName) if ok1 { name2, ok2 := dec.queueValue(2, parser.TokenSectionSubname) dec.addSection(name1, name2, ok2) } dec.queueReset() case parser.TokenFieldValue: key, _ := dec.queueValue(0, parser.TokenFieldKey) value, _ := dec.queueValue(1, parser.TokenFieldValue) dec.addField(key, value) dec.queueReset() } } func (dec *decoder) addSection(key, id string, allowEmptyID bool) { emptyID := allowEmptyID && id == "" // index for dec.current n := len(dec.out.Sections) // new section dec.out.Sections = append(dec.out.Sections, Section{ Key: key, ID: id, EmptyID: emptyID, }) // pointer to the latest section dec.current = &dec.out.Sections[n] } func (dec *decoder) addField(key, value string) { field := Field{ Key: key, Value: value, } if p := dec.current; p != nil { // in section p.Fields = append(p.Fields, field) } else { // global dec.out.Global = append(dec.out.Global, field) } } // queueValue extracts the value of element on the queue if the type matches. func (dec *decoder) queueValue(idx int, typ parser.TokenType) (string, bool) { switch { case idx < 0 || idx >= len(dec.queue): // out of range return "", false case dec.queue[idx].typ != typ: // wrong type return "", false default: return dec.queue[idx].value, true } } // queueReset removes all tokens from the queue func (dec *decoder) queueReset() { dec.queue = dec.queue[:0] } // queueDepth confirms the current depth of the queue func (dec *decoder) queueDepth(depth int) bool { return len(dec.queue) == depth } // queueDepthType confirms the current depth of the queue and the type of the last // element. func (dec *decoder) queueDepthType(depth int, typ parser.TokenType) bool { if dec.queueDepth(depth) { return dec.queueType(depth-1, typ) } return false } // queueType tells if the specified element on the queue is of the required type. func (dec *decoder) queueType(idx int, typ parser.TokenType) bool { _, ok := dec.queueValue(idx, typ) return ok } func (dec *decoder) typeOK(typ parser.TokenType) bool { switch typ { case parser.TokenSectionStart, parser.TokenFieldKey: // first token only return dec.queueDepth(0) case parser.TokenSectionName: // right after TokenSectionStart return dec.queueDepthType(1, parser.TokenSectionStart) case parser.TokenSectionSubname: // right after TokenSectionName return dec.queueDepthType(2, parser.TokenSectionName) case parser.TokenSectionEnd: // only on a section with name return dec.queueType(1, parser.TokenSectionName) case parser.TokenFieldValue: // right after a TokenFieldKey return dec.queueDepthType(1, parser.TokenFieldKey) default: // never return false } } func (dec *decoder) OnToken(pos lexer.Position, typ parser.TokenType, value string) error { t := &token{pos, typ, value} switch { case typ == parser.TokenComment: // ignore comments return nil case dec.typeOK(typ): // acceptable token dec.queue = append(dec.queue, t) dec.execute(typ) return nil default: // unacceptable err := newErrInvalidToken(t) dec.executeFinal() return err } }