Skip to content

Commit

Permalink
#9: panic with last taken in stream (#18)
Browse files Browse the repository at this point in the history
  • Loading branch information
bzick authored Apr 5, 2024
1 parent cef4da6 commit 25ea868
Show file tree
Hide file tree
Showing 3 changed files with 22 additions and 2 deletions.
2 changes: 1 addition & 1 deletion parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -199,11 +199,11 @@ func (p *parsing) parse() {
p.token.key = TokenUnknown
p.token.value = p.str[p.pos : p.pos+1]
p.token.offset = p.offset + p.pos
p.next()
p.emmitToken()
if p.curr == 0 {
break
}
p.next()
}
if len(p.token.indent) > 0 {
p.tail = p.token.indent
Expand Down
16 changes: 16 additions & 0 deletions stream_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -256,6 +256,22 @@ func TestIssue11(t *testing.T) {
require.Equal(t, "2", string(stream.CurrentToken().Value()))
}

func TestIssue9(t *testing.T) {
parser := New()
buf := bytes.NewBuffer([]byte("a]"))

stream := parser.ParseStream(buf, 4096)
defer stream.Close()
stream.CurrentToken()
for stream.IsValid() {
switch stream.CurrentToken().Key() {
default:
// println(stream.CurrentToken().ValueString())
stream.GoNext()
}
}
}

var pattern = []byte(`<item count=10 valid id="n9762"> Носки <![CDATA[ socks ]]></item>`)

type dataGenerator struct {
Expand Down
6 changes: 5 additions & 1 deletion tokenizer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ func TestTokenizeEdgeCases(t *testing.T) {
{key: TokenUnknown, value: s2b("_"), offset: 3, line: 1, id: 1},
{key: TokenInteger, value: s2b("1"), offset: 4, line: 1, id: 2},
}},
{"1..2", []Token{
{"1..2", []Token{ // https://github.com/bzick/tokenizer/issues/11
{key: TokenInteger, value: s2b("1"), offset: 0, line: 1, id: 0},
{key: TokenUnknown, value: s2b("."), offset: 1, line: 1, id: 1},
{key: TokenFloat, value: s2b(".2"), offset: 2, line: 1, id: 2},
Expand All @@ -136,6 +136,10 @@ func TestTokenizeEdgeCases(t *testing.T) {
{key: TokenFloat, value: s2b(".1"), offset: 0, line: 1, id: 0},
{key: TokenFloat, value: s2b(".2"), offset: 2, line: 1, id: 1},
}},
{"a]", []Token{ // https://github.com/bzick/tokenizer/issues/9
{key: TokenKeyword, value: s2b("a"), offset: 0, line: 1, id: 0},
{key: TokenUnknown, value: s2b("]"), offset: 1, line: 1, id: 1},
}},
}
for _, v := range data1 {
stream := tokenizer.ParseString(v.str)
Expand Down

0 comments on commit 25ea868

Please sign in to comment.