I have searched for many libraries, but none of them support segmented JOSN parsing. I think this is a very necessary function, because segmented scenarios are inevitable during network transmission. If data is cached all the time, it will inevitably take up a lot of memory. Does anyone have a good solution?
package main
import (
"encoding/json"
"fmt"
"io"
"strings"
)
type Item struct {
ID int `json:"id"`
Name string `json:"name"`
}
func main() {
ch := make(chan string)
// A large json data stream
go func() {
ch <- `[
{"id": 1, "name": `
ch <- `"Item 1"},
{"id": 2, "name": "Item 2"},`
ch <- `
{"id": 3, "name": "Item 3"}
]`
close(ch)
}()
// Read data from the channel
for data := range ch {
fmt.Println("Received data:", data)
decoder := json.NewDecoder(io.MultiReader(strings.NewReader(data)))
_, err := decoder.Token()
if err != nil {
fmt.Println("Error reading array start token:", err)
return
}
for decoder.More() {
var item Item
err := decoder.Decode(&item)
if err != nil {
fmt.Println("Error decoding item:", err)
return
}
fmt.Printf("Decoded item: %+vn", item)
}
_, err = decoder.Token()
if err != nil && err != io.EOF {
fmt.Println("Error reading array end token:", err)
return
}
}
}