Compare commits

...

11 Commits

Author SHA1 Message Date
Alejandro Mery cbd3f705f5 basic: GoStringer [WIP] 1 year ago
Alejandro Mery c9f206c9aa Unmarshal: WIP 1 year ago
Alejandro Mery 9912146d21 Decoder: WIP 1 year ago
Alejandro Mery 71ab4a58c1 build-sys: use local darvaza.org/core [DO-NOT-MERGE] 1 year ago
Alejandro Mery 8e838c3566 build-sys: use local asciigoat.org/core [DO-NOT-MERGE] 1 year ago
Alejandro Mery ff0c7d1b9f vscode: add Subname to the dictionary 1 year ago
Alejandro Mery fabd192e3d vscode: add unescapes to the dictionary 1 year ago
Alejandro Mery 37f3efebfb vscode: add asciigoat to the dictionary 1 year ago
Alejandro Mery 506fff8725 Merge branch 'pr-amery-basic' into next-amery 1 year ago
Alejandro Mery d75b2dbc78 basic: rename and document queue related methods 1 year ago
Alejandro Mery 46ba96d6b4 basic: refactor error handling 1 year ago
  1. 7
      .vscode/settings.json
  2. 22
      basic/error.go
  3. 51
      basic/token.go
  4. 18
      basic/write.go
  5. 49
      decoder.go
  6. 5
      go.mod
  7. 2
      go.sum
  8. 15
      unmarshal.go

7
.vscode/settings.json vendored

@ -0,0 +1,7 @@
{
"cSpell.words": [
"asciigoat",
"Subname",
"unescapes"
]
}

22
basic/error.go

@ -10,24 +10,22 @@ var (
errInvalidToken = errors.New("invalid token")
)
func newErrInvalidToken(t *token) *lexer.Error {
err := &lexer.Error{
Line: t.pos.Line,
Column: t.pos.Column,
Content: t.value,
Err: errInvalidToken,
}
return err
}
func (dec *decoder) OnError(pos lexer.Position, content string, err error) error {
err = &lexer.Error{
func newError(pos lexer.Position, content, hint string, err error) *lexer.Error {
return &lexer.Error{
Line: pos.Line,
Column: pos.Column,
Content: content,
Hint: hint,
Err: err,
}
}
func newErrInvalidToken(t *token) *lexer.Error {
return newError(t.pos, t.value, "", errInvalidToken)
}
func (dec *decoder) OnError(pos lexer.Position, content string, err error) error {
err = newError(pos, content, "", err)
dec.executeFinal()
return err
}

51
basic/token.go

@ -32,21 +32,21 @@ func (dec *decoder) executeFinal() {
func (dec *decoder) execute(typ parser.TokenType) {
switch typ {
case parser.TokenSectionEnd:
name1, ok1 := dec.getValue(1, parser.TokenSectionName)
name1, ok1 := dec.queueValue(1, parser.TokenSectionName)
if ok1 {
name2, ok2 := dec.getValue(2, parser.TokenSectionSubname)
name2, ok2 := dec.queueValue(2, parser.TokenSectionSubname)
dec.addSection(name1, name2, ok2)
}
dec.reset()
dec.queueReset()
case parser.TokenFieldValue:
key, _ := dec.getValue(0, parser.TokenFieldKey)
value, _ := dec.getValue(1, parser.TokenFieldValue)
key, _ := dec.queueValue(0, parser.TokenFieldKey)
value, _ := dec.queueValue(1, parser.TokenFieldValue)
dec.addField(key, value)
dec.reset()
dec.queueReset()
}
}
@ -82,7 +82,8 @@ func (dec *decoder) addField(key, value string) {
}
}
func (dec *decoder) getValue(idx int, typ parser.TokenType) (string, bool) {
// queueValue extracts the value of element on the queue if the type matches.
func (dec *decoder) queueValue(idx int, typ parser.TokenType) (string, bool) {
switch {
case idx < 0 || idx >= len(dec.queue):
// out of range
@ -95,40 +96,48 @@ func (dec *decoder) getValue(idx int, typ parser.TokenType) (string, bool) {
}
}
func (dec *decoder) reset() {
// queueReset removes all tokens from the queue
func (dec *decoder) queueReset() {
dec.queue = dec.queue[:0]
}
func (dec *decoder) depth(depth int) bool {
// queueDepth confirms the current depth of the queue
func (dec *decoder) queueDepth(depth int) bool {
return len(dec.queue) == depth
}
func (dec *decoder) depthAfter(depth int, typ parser.TokenType) bool {
_, ok := dec.getValue(depth-1, typ)
if ok {
return len(dec.queue) == depth
// queueDepthType confirms the current depth of the queue and the type of the last
// element.
func (dec *decoder) queueDepthType(depth int, typ parser.TokenType) bool {
if dec.queueDepth(depth) {
return dec.queueType(depth-1, typ)
}
return false
}
// queueType tells if the specified element on the queue is of the required type.
func (dec *decoder) queueType(idx int, typ parser.TokenType) bool {
_, ok := dec.queueValue(idx, typ)
return ok
}
func (dec *decoder) typeOK(typ parser.TokenType) bool {
switch typ {
case parser.TokenSectionStart, parser.TokenFieldKey:
// first token only
return dec.depth(0)
return dec.queueDepth(0)
case parser.TokenSectionName:
// right after TokenSectionStart
return dec.depthAfter(1, parser.TokenSectionStart)
return dec.queueDepthType(1, parser.TokenSectionStart)
case parser.TokenSectionSubname:
// right after TokenSectionName
return dec.depthAfter(2, parser.TokenSectionName)
return dec.queueDepthType(2, parser.TokenSectionName)
case parser.TokenSectionEnd:
// only on a section with name
_, ok := dec.getValue(1, parser.TokenSectionName)
return ok
return dec.queueType(1, parser.TokenSectionName)
case parser.TokenFieldValue:
// right after a TokenFieldKey
return dec.depthAfter(1, parser.TokenFieldKey)
return dec.queueDepthType(1, parser.TokenFieldKey)
default:
// never
return false
@ -149,6 +158,8 @@ func (dec *decoder) OnToken(pos lexer.Position, typ parser.TokenType, value stri
return nil
default:
// unacceptable
return newErrInvalidToken(t)
err := newErrInvalidToken(t)
dec.executeFinal()
return err
}
}

18
basic/write.go

@ -53,6 +53,12 @@ func (field Field) String() string {
return buf.String()
}
// GoString generates a string output for "%#v"
func (*Field) GoString() string {
var buf bytes.Buffer
return buf.String()
}
func writeSectionToBuffer(w *bytes.Buffer, sec *Section, nl string) int {
var written, n int
@ -90,6 +96,12 @@ func (sec *Section) String() string {
return buf.String()
}
// GoString generates a string output for "%#v"
func (*Section) GoString() string {
var buf bytes.Buffer
return buf.String()
}
// WriteTo writes a INI representation of the document
// onto the provided writer.
func (doc *Document) WriteTo(w io.Writer) (int64, error) {
@ -102,3 +114,9 @@ func (doc *Document) String() string {
buf := doc.AsBuffer(WriteNewLine)
return buf.String()
}
// GoString generates a string output for "%#v"
func (Document) GoString() string {
var buf bytes.Buffer
return buf.String()
}

49
decoder.go

@ -0,0 +1,49 @@
package ini
import (
"bytes"
"io"
"strings"
"asciigoat.org/core"
"asciigoat.org/ini/parser"
)
// Decoder ...
type Decoder struct {
io.Closer
p *parser.Parser
}
// Decode ...
func (dec *Decoder) Decode() error {
defer dec.Close()
return dec.p.Run()
}
// NewDecoder creates a Decoder over the provided [io.Reader]
func NewDecoder(r io.Reader) *Decoder {
rc := core.NewReadCloser(r)
switch {
case rc == nil:
return nil
default:
dec := &Decoder{
p: parser.NewParser(rc),
Closer: rc,
}
return dec
}
}
// NewDecoderBytes creates a Decoder over a provided bytes array
func NewDecoderBytes(b []byte) *Decoder {
return NewDecoder(bytes.NewReader(b))
}
// NewDecoderString creates a Decoder over a provided string of data
func NewDecoderString(s string) *Decoder {
return NewDecoder(strings.NewReader(s))
}

5
go.mod

@ -2,6 +2,11 @@ module asciigoat.org/ini
go 1.19
replace (
asciigoat.org/core => ../core
darvaza.org/core => ../../darvaza.org/core
)
require (
asciigoat.org/core v0.3.9
github.com/mgechev/revive v1.3.3

2
go.sum

@ -1,5 +1,3 @@
asciigoat.org/core v0.3.9 h1:hgDDz4ecm3ZvehX++m8A/IzAt+B5oDPiRtxatzfUHPQ=
asciigoat.org/core v0.3.9/go.mod h1:CAaHwyw8MpAq4a1MYtN2dxJrsK+hmIdW50OndaQZYPI=
github.com/BurntSushi/toml v1.3.2 h1:o7IhLm0Msx3BaB+n3Ag7L8EVlByGnpq14C4YWiu/gL8=
github.com/BurntSushi/toml v1.3.2/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ=
github.com/chavacava/garif v0.1.0 h1:2JHa3hbYf5D9dsgseMKAmc/MZ109otzgNFk5s87H9Pc=

15
unmarshal.go

@ -0,0 +1,15 @@
package ini
import "io"
// ReadInto ...
func ReadInto(v any, r io.Reader) error {
dec := NewDecoder(r)
return dec.Unmarshal(v)
}
// Unmarshal ...
func (dec *Decoder) Unmarshal(any) error {
return dec.p.Run()
}
Loading…
Cancel
Save