Unmarshal map in struct in golang - json

I have a go proto file here. I have defined a struct that I would like to unmarshal using the golang json package.
package main
import (
"bytes"
"encoding/json"
"fmt"
gw "github.com/kserve/rest-proxy/gen"
)
type REST struct {
Id string `json:"id,omitempty"`
Parameters map[string]*gw.InferParameter `json:"parameters,omitempty"`
Inputs []*gw.ModelInferRequest_InferInputTensor `json:"inputs,omitempty"`
Outputs []*gw.ModelInferRequest_InferRequestedOutputTensor `json:"outputs,omitempty"`
}
func main() {
d := REST{}
buffer := &bytes.Buffer{}
data := `
{
"id": "1",
"inputs": [{
"name": "input:0",
"shape": [-1, -1, -1, -1],
"datatype": "UINT8",
"parameters": {
"binary_data_size": {
"int64Param": 100
}
}
}]
}
`
buffer.Write([]byte(data))
if err := json.NewDecoder(buffer).Decode(&d); err != nil {
fmt.Println(err)
}
fmt.Println(d)
}
{1 map[] [name:"input:0" datatype:"UINT8" shape:-1 shape:-1 shape:-1 shape:-1 parameters:{key:"binary_data_size" value:{}}] []}
May I know how to get binary_data_size value which should be 100. Is my JSON schema incorrect ? Thank you.

The standard encoding/json package "does not operate correctly on protocol buffer messages" (source; also see this issue for more info). There are ways of working around this but using google.golang.org/protobuf/encoding/protojson is the recommended approach - i.e.:
package main
import (
"fmt"
gw "github.com/kserve/rest-proxy/gen"
"google.golang.org/protobuf/encoding/protojson"
)
func main() {
d := gw.ModelInferRequest{}
data := []byte(`
{
"id": "1",
"inputs": [{
"name": "input:0",
"shape": [-1, -1, -1, -1],
"datatype": "UINT8",
"parameters": {
"binary_data_size": {
"int64Param": 100
}
}
}]
}`)
if err := protojson.Unmarshal(data, &d); err != nil {
panic(err)
}
fmt.Println(d)
}
Result:
{{{} [] [] 0xc00013ecd0} 0 [] 1 map[] [name:"input:0" datatype:"UINT8" shape:-1 shape:-1 shape:-1 shape:-1 parameters:{key:"binary_data_size" value:{int64_param:100}}] [] []}

Related

Converting dynamic JSON to CSV in Golang

I tried to convert dynamic JSON to CSV, I investigate libs and answers I can't find a remarkable thing.
This and this examples could be helpful but I can't add the JSON's struct to my code, JSON is dynamic.
In Python & JS, I saw these examples;
Python;
# Python program to convert
# JSON file to CSV
import json
import csv
# Opening JSON file and loading the data
# into the variable data
with open('data.json') as json_file:
data = json.load(json_file)
employee_data = data['emp_details']
# now we will open a file for writing
data_file = open('data_file.csv', 'w')
# create the csv writer object
csv_writer = csv.writer(data_file)
# Counter variable used for writing
# headers to the CSV file
count = 0
for emp in employee_data:
if count == 0:
# Writing headers of CSV file
header = emp.keys()
csv_writer.writerow(header)
count += 1
# Writing data of CSV file
csv_writer.writerow(emp.values())
data_file.close()
JS;
const items = json3.items
const replacer = (key, value) => value === null ? '' : value // specify how you want to handle null values here
const header = Object.keys(items[0])
const csv = [
header.join(','), // header row first
...items.map(row => header.map(fieldName => JSON.stringify(row[fieldName], replacer)).join(','))
].join('\r\n')
console.log(csv)
These codes help convert dynamic JSON to CSV easily.
Example input & output;
JSON(input);
[
{
"name": "John",
"age": "21"
},
{
"name": "Noah",
"age": "23"
},
{
"name": "Justin",
"age": "25"
}
]
CSV(output);
"name","age"
"John","21"
"Noah","23"
"Justi","25"
So how can I convert dynamic JSON to CSV in Go?
PS: I discover a Golang lib(json2csv) that helps to convert but only works on command prompt.
I few online tools for example;
https://csvjson.com/json2csv
https://data.page/json/csv
After investigation, I handle it with yukithm/json2csv package.
package main
import (
"bytes"
"encoding/json"
"github.com/yukithm/json2csv"
"log"
"os"
)
func main() {
b := &bytes.Buffer{}
wr := json2csv.NewCSVWriter(b)
j, _ := os.ReadFile("your-input-path\\input.json")
var x []map[string]interface{}
// unMarshall json
err := json.Unmarshal(j, &x)
if err != nil {
log.Fatal(err)
}
// convert json to CSV
csv, err := json2csv.JSON2CSV(x)
if err != nil {
log.Fatal(err)
}
// CSV bytes convert & writing...
err = wr.WriteCSV(csv)
if err != nil {
log.Fatal(err)
}
wr.Flush()
got := b.String()
//Following line prints CSV
println(got)
// create file and append if you want
createFileAppendText("output.csv", got)
}
//
func createFileAppendText(filename string, text string) {
f, err := os.OpenFile(filename, os.O_APPEND|os.O_WRONLY|os.O_CREATE, 0600)
if err != nil {
panic(err)
}
defer f.Close()
if _, err = f.WriteString(text); err != nil {
panic(err)
}
}
input.json;
[
{
"Name": "Japan",
"Capital": "Tokyo",
"Continent": "Asia"
},
{
"Name": "Germany",
"Capital": "Berlin",
"Continent": "Europe"
},
{
"Name": "Turkey",
"Capital": "Ankara",
"Continent": "Europe"
},
{
"Name": "Greece",
"Capital": "Athens",
"Continent": "Europe"
},
{
"Name": "Israel",
"Capital": "Jerusalem",
"Continent": "Asia"
}
]
output.csv
/Capital,/Continent,/Name
Tokyo,Asia,Japan
Berlin,Europe,Germany
Ankara,Europe,Turkey
Athens,Europe,Greece
Jerusalem,Asia,Israel
Paste this to https://github.com/yukithm/json2csv/blob/master/cmd/json2csv/main.go and insert the func to main.
func stackoverflow() {
jsonStr := `
[
{
"name": "John",
"age": "21"
},
{
"name": "Noah",
"age": "23"
},
{
"name": "Justin",
"age": "25"
}
]`
buff := bytes.NewBufferString(jsonStr)
data, _ := readJSON(buff)
results, _ := json2csv.JSON2CSV(data)
headerStyle := headerStyleTable["jsonpointer"]
err := printCSV(os.Stdout, results, headerStyle, false)
if err != nil {
log.Fatal(err)
}
}
It works for me.
➜ json2csv git:(master) ✗ go run main.go
/age,/name
21,John
23,Noah
25,Justin

Golang unmarshaling JSON to protobuf generated structs

I would like to reqeive a JSON response withing a client application and unmarshal this response into a struct. To ensure that the struct stays the same accross all client apps using this package, I would like to define the JSON responses as protobuf messages. I am having difficulties unmarshaling the JSON to the protobuf generated structs.
I have the following JSON data:
[
{
"name": "C1",
"type": "docker"
},
{
"name": "C2",
"type": "docker"
}
]
I have modeled my protobuf definitions like this:
syntax = "proto3";
package main;
message Container {
string name = 1;
string type = 2;
}
message Containers {
repeated Container containers = 1;
}
Using this pattern with structs normaly works, but for some reason using these proto definitions causes issues. The below code demonstrates a working and a non-working example. Although one of the versions work, I am unable to use this solution, since []*Container does not satisfy the proto.Message interface.
package main
import (
"encoding/json"
"fmt"
"strings"
"github.com/gogo/protobuf/jsonpb"
)
func working(data string) ([]*Container, error) {
var cs []*Container
return cs, json.Unmarshal([]byte(data), &cs)
}
func notWorking(data string) (*Containers, error) {
c := &Containers{}
jsm := jsonpb.Unmarshaler{}
if err := jsm.Unmarshal(strings.NewReader(data), c); err != nil {
return nil, err
}
return c, nil
}
func main() {
data := `
[
{
"name": "C1",
"type": "docker"
},
{
"name": "C2",
"type": "docker"
}
]`
w, err := working(data)
if err != nil {
panic(err)
}
fmt.Print(w)
nw, err := notWorking(data)
if err != nil {
panic(err)
}
fmt.Print(nw.Containers)
}
Running this gives the following output:
[name:"C1" type:"docker" name:"C2" type:"docker" ]
panic: json: cannot unmarshal array into Go value of type map[string]json.RawMessage
goroutine 1 [running]:
main.main()
/Users/example/go/src/github.com/example/example/main.go:46 +0x1ee
Process finished with exit code 2
Is there a way to unmarshal this JSON to Containers? Or alternatively, make []*Container to satisfy the proto.Message interface?
For the message Containers, i.e.
message Containers {
repeated Container containers = 1;
}
The correct JSON should look like:
{
"containers" : [
{
"name": "C1",
"type": "docker"
},
{
"name": "C2",
"type": "docker"
}
]
}
If you cannot change the JSON then you can utilize the func that you've created
func working(data string) ([]*Container, error) {
var cs []*Container
err := json.Unmarshal([]byte(data), &cs)
// handle the error here
return &Containers{
containers: cs,
}, nil
}
You should use NewDecoder to transfer the data to jsonDecoder and then traverse
the array.The code is this
func main() {
data := `
[
{
"name": "C1",
"type": "docker"
},
{
"name": "C2",
"type": "docker"
}
]`
jsonDecoder := json.NewDecoder(strings.NewReader(data))
_, err := jsonDecoder.Token()
if err != nil {
log.Fatal(err)
}
var protoMessages []*pb.Container
for jsonDecoder.More() {
protoMessage := pb.Container{}
err := jsonpb.UnmarshalNext(jsonDecoder, &protoMessage)
if err != nil {
log.Fatal(err)
}
protoMessages = append(protoMessages, &protoMessage)
}
fmt.Println("%s", protoMessages)
}

How to empty an existing JSON array object without marshalling the entire document

I'm trying to import a large JSON document from a file, empty all arrays matching a specific key or pattern, then output it, without having to marshall the entire document.
It will be run as part of a periodic batch job, so performance/efficiency is not a priority.
Simplicity, and making sure the code is agnostic to the overall JSON structure, is more important.
Is there an easy way to do solve this in Go?
Example input:
{
"panels": [
{
"alert": {
"executionErrorState": "alerting",
"notifications": [
{
"uid": "fRLbH_6Zk"
},
{
"uid": "8gamKl6Waz"
}
]
}
},
{
"alert": {
"executionErrorState": "alerting",
"notifications": [
{
"uid": "DqjrD_6Zk"
}
]
}
}
]
}
Desired output (all entries in 'alert.notifications' in 'panels' removed):
{
"panels": [
{
"alert": {
"executionErrorState": "alerting",
"notifications": []
}
},
{
"alert": {
"executionErrorState": "alerting",
"notifications": []
}
}
]
}
you can use read streams, to read objects one by one. Code will be unmarshall first object, but will have error on the next one. Its like approve of state that code dont read whole file, here example:
package main
import (
"encoding/json"
"fmt"
"log"
"strings"
)
func main() {
const jsonStream = `
[
{"Name": "Ed", "Text": "Knock knock."},
asdasd sadasd,
`
type Message struct {
Name, Text string
}
dec := json.NewDecoder(strings.NewReader(jsonStream))
// read open bracket
t, err := dec.Token()
if err != nil {
log.Fatal(err)
}
fmt.Printf("%T: %v\n", t, t)
// while the array contains values
for dec.More() {
var m Message
// decode an array value (Message)
err := dec.Decode(&m)
if err != nil {
log.Fatal(err)
}
fmt.Printf("%v: %v\n", m.Name, m.Text)
}
// read closing bracket
t, err = dec.Token()
if err != nil {
log.Fatal(err)
}
fmt.Printf("%T: %v\n", t, t)
}

Reading nested json data with golang's encoding/json

I can't get the correct definition for my structs to capture the nested json data saved in a variable. My code snippet is as below:
package main
import "fmt"
import "encoding/json"
type Data struct {
P string `json:"ports"`
Ports struct {
Portnums []int
}
Protocols []string `json:"protocols"`
}
func main() {
y := `{
"ports": {
"udp": [
1,
30
],
"tcp": [
100,
1023
]
},
"protocols": [
"tcp",
"udp"
]
}`
var data Data
e := json.Unmarshal([]byte(y), &data)
if e == nil {
fmt.Println(data)
} else {
fmt.Println("Failed:", e)
}
}
$ go run foo.go
Failed: json: cannot unmarshal object into Go value of type string
This works for me (see comment to your question above) GoPlay
type Data struct {
Ports struct {
Tcp []float64 `json:"tcp"`
Udp []float64 `json:"udp"`
} `json:"ports"`
Protocols []string `json:"protocols"`
}
func main() {
y := `{
"ports": {
"udp": [
1,
30
],
"tcp": [
100,
1023
]
},
"protocols": [
"tcp",
"udp"
]
}`
d := Data{}
err := json.Unmarshal([]byte(y), &d)
if err != nil {
fmt.Println("Error:", err.Error())
} else {
fmt.Printf("%#+v", d)
}
}
OUTPUT
main.Data{
Ports:struct {
Tcp []float64 "json:\"tcp\"";
Udp []float64 "json:\"udp\""
}{
Tcp:[]float64{100, 1023},
Udp:[]float64{1, 30}
},
Protocols:[]string{"tcp", "udp"}
}

Let the node tree to json in golang?

I have a tree like the following, and want to save as json format?
package main
import ( "fmt")
type Node struct {
Id string
Nodes []*Node
}
func main() {
node1 := Node{Id: "1"}
node2 := Node{Id:"2"}
node3 := Node{Id: "3"}
node4 := Node{Id: "4"}
node1.Nodes = append(node1.Nodes, &node2)
node2.Nodes = append(node2.Nodes, &node3)
node3.Nodes = append(node3.Nodes, &node4)
fmt.Printf("node1: %p %v \n", &node1, node1)
}
the output json i want is like this ,and how to do it?:
{
Id:"1",
Nodes:[
Id:"2",
Nodes:[
Id:"3",
Nodes:[Id:"4",Nodes:[]]
],
]
}
The following code should do what you want:
package main
import (
"encoding/json"
"fmt"
"log"
)
type Node struct {
Id string
Nodes []*Node
}
func main() {
node1 := Node{Id: "1"}
node2 := Node{Id: "2"}
node3 := Node{Id: "3"}
node4 := Node{Id: "4"}
node1.Nodes = append(node1.Nodes, &node2)
node2.Nodes = append(node2.Nodes, &node3)
node3.Nodes = append(node3.Nodes, &node4)
fmt.Printf("node1: %p %v \n", &node1, node1)
bytes, err := json.Marshal(node1)
if err != nil {
log.Fatal(err)
}
fmt.Println(string(bytes))
}
This code will output json like so:
{
"Id": "1",
"Nodes": [
{
"Id": "2",
"Nodes": [
{
"Id": "3",
"Nodes": [
{
"Id": "4",
"Nodes": null
}
]
}
]
}
]
}
*Notice, that if a Nodes field has no slice of Node objects the field will be marshaled as a null value in the resulting json. If you want the Nodes slice to render as empty, you will have to make sure they are initialized to an empty slice.
Play with this code on the playground here!