Go- Copy all common fields between structs - json

I have a database that stores JSON, and a server that provides an external API to whereby through an HTTP post, values in this database can be changed. The database is used by different processes internally, and as such have a common naming scheme.
The keys the customer sees are different, but map 1:1 with the keys in the database (there are unexposed keys). For example:
This is in the database:
{ "bit_size": 8, "secret_key": false }
And this is presented to the client:
{ "num_bits": 8 }
The API can change with respect to field names, but the database always has consistent keys.
I have named the fields the same in the struct, with different flags to the json encoder:
type DB struct {
NumBits int `json:"bit_size"`
Secret bool `json:"secret_key"`
}
type User struct {
NumBits int `json:"num_bits"`
}
I'm using encoding/json to do the Marshal/Unmarshal.
Is reflect the right tool for this? Is there an easier way since all of the keys are the same? I was thinking some kind of memcpy (if I kept the user fields in the same order).

Couldn't struct embedding be useful here?
package main
import (
"fmt"
)
type DB struct {
User
Secret bool `json:"secret_key"`
}
type User struct {
NumBits int `json:"num_bits"`
}
func main() {
db := DB{User{10}, true}
fmt.Printf("Hello, DB: %+v\n", db)
fmt.Printf("Hello, DB.NumBits: %+v\n", db.NumBits)
fmt.Printf("Hello, User: %+v\n", db.User)
}
http://play.golang.org/p/9s4bii3tQ2

buf := bytes.Buffer{}
err := gob.NewEncoder(&buf).Encode(&DbVar)
if err != nil {
return err
}
u := User{}
err = gob.NewDecoder(&buf).Decode(&u)
if err != nil {
return err
}

Here's a solution using reflection. You have to further develop it if you need more complex structures with embedded struct fields and such.
http://play.golang.org/p/iTaDgsdSaI
package main
import (
"encoding/json"
"fmt"
"reflect"
)
type M map[string]interface{} // just an alias
var Record = []byte(`{ "bit_size": 8, "secret_key": false }`)
type DB struct {
NumBits int `json:"bit_size"`
Secret bool `json:"secret_key"`
}
type User struct {
NumBits int `json:"num_bits"`
}
func main() {
d := new(DB)
e := json.Unmarshal(Record, d)
if e != nil {
panic(e)
}
m := mapFields(d)
fmt.Println("Mapped fields: ", m)
u := new(User)
o := applyMap(u, m)
fmt.Println("Applied map: ", o)
j, e := json.Marshal(o)
if e != nil {
panic(e)
}
fmt.Println("Output JSON: ", string(j))
}
func applyMap(u *User, m M) M {
t := reflect.TypeOf(u).Elem()
o := make(M)
for i := 0; i < t.NumField(); i++ {
f := t.FieldByIndex([]int{i})
// skip unexported fields
if f.PkgPath != "" {
continue
}
if x, ok := m[f.Name]; ok {
k := f.Tag.Get("json")
o[k] = x
}
}
return o
}
func mapFields(x *DB) M {
o := make(M)
v := reflect.ValueOf(x).Elem()
t := v.Type()
for i := 0; i < v.NumField(); i++ {
f := t.FieldByIndex([]int{i})
// skip unexported fields
if f.PkgPath != "" {
continue
}
o[f.Name] = v.FieldByIndex([]int{i}).Interface()
}
return o
}

Using struct tags, the following would sure be nice,
package main
import (
"fmt"
"log"
"hacked/json"
)
var dbj = `{ "bit_size": 8, "secret_key": false }`
type User struct {
NumBits int `json:"bit_size" api:"num_bits"`
}
func main() {
fmt.Println(dbj)
// unmarshal from full db record to User struct
var u User
if err := json.Unmarshal([]byte(dbj), &u); err != nil {
log.Fatal(err)
}
// remarshal User struct using api field names
api, err := json.MarshalTag(u, "api")
if err != nil {
log.Fatal(err)
}
fmt.Println(string(api))
}
Adding MarshalTag requires just a small patch to encode.go:
106c106,112
< e := &encodeState{}
---
> return MarshalTag(v, "json")
> }
>
> // MarshalTag is like Marshal but marshalls fields with
> // the specified tag key instead of the default "json".
> func MarshalTag(v interface{}, tag string) ([]byte, error) {
> e := &encodeState{tagKey: tag}
201a208
> tagKey string
328c335
< for _, ef := range encodeFields(v.Type()) {
---
> for _, ef := range encodeFields(v.Type(), e.tagKey) {
509c516
< func encodeFields(t reflect.Type) []encodeField {
---
> func encodeFields(t reflect.Type, tagKey string) []encodeField {
540c547
< tv := f.Tag.Get("json")
---
> tv := f.Tag.Get(tagKey)

The following function use reflect to copy fields between two structs. A src field is copied to a dest field if they have the same field name.
// CopyCommonFields copies src fields into dest fields. A src field is copied
// to a dest field if they have the same field name.
// Dest and src must be pointers to structs.
func CopyCommonFields(dest, src interface{}) {
srcType := reflect.TypeOf(src).Elem()
destType := reflect.TypeOf(dest).Elem()
destFieldsMap := map[string]int{}
for i := 0; i < destType.NumField(); i++ {
destFieldsMap[destType.Field(i).Name] = i
}
for i := 0; i < srcType.NumField(); i++ {
if j, ok := destFieldsMap[srcType.Field(i).Name]; ok {
reflect.ValueOf(dest).Elem().Field(j).Set(
reflect.ValueOf(src).Elem().Field(i),
)
}
}
}
Usage:
func main() {
type T struct {
A string
B int
}
type U struct {
A string
}
src := T{
A: "foo",
B: 5,
}
dest := U{}
CopyCommonFields(&dest, &src)
fmt.Printf("%+v\n", dest)
// output: {A:foo}
}

You can cast structures if they have same field names and types, effectively reassigning field tags:
package main
import "encoding/json"
type DB struct {
dbNumBits
Secret bool `json:"secret_key"`
}
type dbNumBits struct {
NumBits int `json:"bit_size"`
}
type User struct {
NumBits int `json:"num_bits"`
}
var Record = []byte(`{ "bit_size": 8, "secret_key": false }`)
func main() {
d := new(DB)
e := json.Unmarshal(Record, d)
if e != nil {
panic(e)
}
var u User = User(d.dbNumBits)
println(u.NumBits)
}
https://play.golang.org/p/uX-IIgL-rjc

Here's a solution without reflection, unsafe, or a function per struct. The example is a little convoluted, and maybe you wouldn't need to do it just like this, but the key is using a map[string]interface{} to get away from a struct with field tags. You might be able to use the idea in a similar solution.
package main
import (
"encoding/json"
"fmt"
"log"
)
// example full database record
var dbj = `{ "bit_size": 8, "secret_key": false }`
// User type has only the fields going to the API
type User struct {
// tag still specifies internal name, not API name
NumBits int `json:"bit_size"`
}
// mapping from internal field names to API field names.
// (you could have more than one mapping, or even construct this
// at run time)
var ApiField = map[string]string{
// internal: API
"bit_size": "num_bits",
// ...
}
func main() {
fmt.Println(dbj)
// select user fields from full db record by unmarshalling
var u User
if err := json.Unmarshal([]byte(dbj), &u); err != nil {
log.Fatal(err)
}
// remarshal from User struct back to json
exportable, err := json.Marshal(u)
if err != nil {
log.Fatal(err)
}
// unmarshal into a map this time, to shrug field tags.
type jmap map[string]interface{}
mInternal := jmap{}
if err := json.Unmarshal(exportable, &mInternal); err != nil {
log.Fatal(err)
}
// translate field names
mExportable := jmap{}
for internalField, v := range mInternal {
mExportable[ApiField[internalField]] = v
}
// marshal final result with API field names
if exportable, err = json.Marshal(mExportable); err != nil {
log.Fatal(err)
}
fmt.Println(string(exportable))
}
Output:
{ "bit_size": 8, "secret_key": false }
{"num_bits":8}
Edit: More explanation. As Tom notes in a comment, there's reflection going on behind the code. The goal here is to keep the code simple by using the available capabilities of the library. Package json currently offers two ways to work with data, struct tags and maps of [string]interface{}. The struct tags let you select fields, but force you to statically pick a single json field name. The maps let you pick field names at run time, but not which fields to Marshal. It would be nice if the json package let you do both at once, but it doesn't. The answer here just shows the two techniques and how they can be composed in a solution to the example problem in the OP.

"Is reflect the right tool for this?" A better question might be, "Are struct tags the right tool for this?" and the answer might be no.
package main
import (
"encoding/json"
"fmt"
"log"
)
var dbj = `{ "bit_size": 8, "secret_key": false }`
// translation from internal field name to api field name
type apiTrans struct {
db, api string
}
var User = []apiTrans{
{db: "bit_size", api: "num_bits"},
}
func main() {
fmt.Println(dbj)
type jmap map[string]interface{}
// unmarshal full db record
mdb := jmap{}
if err := json.Unmarshal([]byte(dbj), &mdb); err != nil {
log.Fatal(err)
}
// build result
mres := jmap{}
for _, t := range User {
if v, ok := mdb[t.db]; ok {
mres[t.api] = v
}
}
// marshal result
exportable, err := json.Marshal(mres)
if err != nil {
log.Fatal(err)
}
fmt.Println(string(exportable))
}

An efficient way to achieve your goal is to use the gob package.
Here an example with the playground:
package main
import (
"bytes"
"encoding/gob"
"fmt"
)
type DB struct {
NumBits int
Secret bool
}
type User struct {
NumBits int
}
func main() {
db := DB{10, true}
user := User{}
buf := bytes.Buffer{}
err := gob.NewEncoder(&buf).Encode(&db)
if err != nil {
panic(err)
}
err = gob.NewDecoder(&buf).Decode(&user)
if err != nil {
panic(err)
}
fmt.Println(user)
}
Here the official blog post: https://blog.golang.org/gob

Related

Validating custom types which should only accept defined constant

Here is the code that i have
package main
import (
"fmt"
"github.com/go-playground/validator/v10"
)
type PriorityLevel string
const (
high PriorityLevel = "P1"
medium PriorityLevel = "P2"
)
type OrdersPriority struct {
OrderID string `json:"order_id" validate:"required"`
Priority PriorityLevel `json:"priority" validate:"required"`
}
var validate *validator.Validate
func main() {
order := OrdersPriority{
OrderID: "Order_1",
Priority: "random_priority",
}
validate = validator.New()
err := validate.Struct(order)
fmt.Printf("%+v", err)
}
As you can see that I have PriorityLevel which should have only high or medium values. How can i enforce this using validator ?
I know that in validate tag I can provide "possible" values but that option is not scalable i.e., if i end up adding more values to that constant, I would have to make that change there too. Is there more Go way to do so using this validator library?
You can use eq and put in multiple values but later on will be difficult to maintain
package main
import (
"fmt"
v "github.com/go-playground/validator/v10"
)
type Abc struct {
A string `validate:"required,eq=public|eq=private"`
}
func main() {
fmt.Println("Hello, playground")
a := Abc{
A: "public",
}
b := Abc{
A: "private",
}
c := Abc{
A: "panda",
}
validator := v.New()
err := validator.Struct(a)
fmt.Printf("a %+v\n", err)
err = validator.Struct(b)
fmt.Printf("b %+v\n", err)
err = validator.Struct(c)
fmt.Printf("c %+v\n", err)
}
It will be better to use a custom validator function for your use case like
validate.RegisterValidation(`mycustomvalidator`, func(fl validator.FieldLevel) bool {
str := fl.Param()
//Now you can check your custom validation here
return str == "abc"
})

Accessing Struct Data in Inner Scope from main and save to csv in Golang

appendStruct function is designed to run in multiple threads in order to collect and append DataItem into DataContainer. So far I can print the result from inner appendStruct Q1: how to access and print container from main, Q2: save that struct data type to csv from main ?
package main
import "fmt"
type DataItem struct {
name string
}
type DataContainer struct {
Items []DataItem
}
func (box *DataContainer) AddItem(item DataItem) []DataItem {
box.Items = append(box.Items, item)
return box.Items
}
func appendStruct() {
items := []DataItem{}
container := DataContainer{items}
item1 := DataItem{name: fmt.Sprintf("Item1")}
item2 := DataItem{name: fmt.Sprintf("Item2")}
container.AddItem(item1)
container.AddItem(item2)
var ss = fmt.Sprintf("", container)
fmt.Println(ss)
}
func main() {
appendStruct()
}
OUTPUT from go run test.go is:
%!(EXTRA main.DataContainer={[{Item1} {Item2}]})
re Q1. "encoding/csv" has to implement string interface [][]string there is a hint how to approach it in Write struct to csv file
but lacks implementation example.
In appendStruct, container is a local variable, so it's not accessible outside that function call. You could return it, which would make it accessible from the caller (in this case, main):
func appendStruct() DataContainer {
//...
return container
}
func main() {
container := appendStruct()
}
The answer you linked is an excellent starting point. A code example shouldn't really be necessary - they're basically recommending that you create a helper method/function that takes all the fields of the struct and puts them into a slice in whatever order you want them to appear in the CSV, e.g.:
func (c DataItem) ToSlice() []string {
row := make([]string, 1, 1) // Since you only have 1 field in the struct
row[0] = c.name
return row
}
Then you can loop over these to write them to a CSV file.
The error output you're getting is because you're using Sprintf, which expects a format string as the first parameter with a reference for each other argument. You're passing an empty format string, which would only work with no other arguments (and be pointless). Perhaps you meant Sprintf("%v", container) or just Sprint(container)?
Thank you #Adrian, your answer was very helpful. Below is working code:
package main
import (
"fmt"
"os"
"encoding/csv"
"log"
)
type DataItem struct {
name string
}
type DataContainer struct {
Items []DataItem
}
func (box *DataContainer) AddItem(item DataItem) []DataItem {
box.Items = append(box.Items, item)
return box.Items
}
func appendStruct() DataContainer{
items := []DataItem{}
container := DataContainer{items}
item1 := DataItem{name: fmt.Sprintf("Item1")}
item2 := DataItem{name: fmt.Sprintf("Item2")}
container.AddItem(item1)
container.AddItem(item2)
return container
}
func (c DataItem) ToSlice() []string {
row := make([]string, 1, 1)
row[0] = c.name
return row
}
func checkError(message string, err error) {
if err != nil {
log.Fatal(message, err)
}
}
func main() {
container := appendStruct()
var ss = fmt.Sprint(container)
println(ss)
file, err := os.Create("result.csv")
checkError("Cannot create file", err)
defer file.Close()
w := csv.NewWriter(file)
for _, record := range container.Items {
values := record.ToSlice()
if err := w.Write(values); err != nil {
log.Fatalln("error writing record to csv:", err)
}
}
w.Flush()
if err := w.Error(); err != nil {
log.Fatal(err)
}

Don't read unneeded JSON key-values into memory

I have a JSON file with a single field that takes a huge amount of space when loaded into memory. The other fields are reasonable, but I'm trying to take care not to load that particular field unless I absolutely have to.
{
"Field1": "value1",
"Field2": "value2",
"Field3": "a very very long string that potentially takes a few GB of memory"
}
When reading that file into memory, I'd want to ignore Field3 (because loading it could crash my app). Here's some code that I would assume does that because it uses io streams rather than passing a []byte type to the Unmarshal command.
package main
import (
"encoding/json"
"os"
)
func main() {
type MyStruct struct {
Field1 string
Field2 string
}
fi, err := os.Open("myJSONFile.json")
if err != nil {
os.Exit(2)
}
// create an instance and populate
var mystruct MyStruct
err = json.NewDecoder(fi).Decode(&mystruct)
if err != nil {
os.Exit(2)
}
// do some other stuff
}
The issue is that the built-in json.Decoder type reads the entire file into memory on Decode before throwing away key-values that don't match the struct's fields (as has been pointed out on StackOverflow before: link).
Are there any ways of decoding JSON in Go without keeping the entire JSON object in memory?
You could write a custom io.Reader that you feed to json.Decoder and that will pre-read your json file and skip that specific field.
The other option is to write your own decoder, more complicated and messy.
//edit it seemed like a fun exercise, so here goes:
type IgnoreField struct {
io.Reader
Field string
buf bytes.Buffer
}
func NewIgnoreField(r io.Reader, field string) *IgnoreField {
return &IgnoreField{
Reader: r,
Field: field,
}
}
func (iF *IgnoreField) Read(p []byte) (n int, err error) {
if n, err = iF.Reader.Read(p); err != nil {
return
}
s := string(p)
fl := `"` + iF.Field + `"`
if i := strings.Index(s, fl); i != -1 {
l := strings.LastIndex(s[0:i], ",")
if l == -1 {
l = i
}
iF.buf.WriteString(s[0:l])
s = s[i+1+len(fl):]
i = strings.Index(s, `"`)
if i != -1 {
s = s[i+1:]
}
for {
i = strings.Index(s, `"`) //end quote
if i != -1 {
s = s[i+1:]
fmt.Println("Skipped")
break
} else {
if n, err = iF.Reader.Read(p); err != nil {
return
}
s = string(p)
}
}
iF.buf.WriteString(s)
}
ln := iF.buf.Len()
if ln >= len(p) {
tmp := iF.buf.Bytes()
iF.buf.Reset()
copy(p, tmp[0:len(p)])
iF.buf.Write(p[len(p):])
ln = len(p)
} else {
copy(p, iF.buf.Bytes())
iF.buf.Reset()
}
return ln, nil
}
func main() {
type MyStruct struct {
Field1 string
Field2 string
}
fi, err := os.Open("myJSONFile.json")
if err != nil {
os.Exit(2)
}
// create an instance and populate
var mystruct MyStruct
err := json.NewDecoder(NewIgnoreField(fi, "Field3")).Decode(&mystruct)
if err != nil {
fmt.Println(err)
}
fmt.Println(mystruct)
}
playground

Getting json dynamic key name as string?

For example:
{"id":
{"12345678901234":
{"Account":"asdf",
"Password":"qwerty"
"LastSeen":"1397621470",
}
}
}
A program I've been trying to make needs to get the id as a string and then later use it to check the time in LastSeen.
I've tried using simplejson and jsonq,but still cant figure out how to do that.
You can use RawMessage and make it much simpiler (play with it) :
package main
import (
"encoding/json"
"fmt"
)
var data []byte = []byte(`{"id": {"12345678901234": {"Account":"asdf", "Password":"qwerty", "LastSeen":"1397621470"}}}`)
type Message struct {
Id string
Info struct {
Account string
Password string
LastSeen string
}
}
func main() {
var (
tmpmsg struct {
Data map[string]json.RawMessage `json:"id"`
}
msg Message
)
if err := json.Unmarshal(data, &tmpmsg); err != nil {
panic(err) //you probably wanna use or something instead
}
for id, raw := range tmpmsg.Data {
msg.Id = id
if err := json.Unmarshal(raw, &msg.Info); err != nil {
panic(err)
}
}
fmt.Printf("%+v\n", msg)
}
Looking at the Golang blog post on JSON here it can be done using the encoding/json package. I created a small program to do this as follows:
package main
import (
"encoding/json"
"fmt"
)
var data []byte = []byte(`{"id": {"12345678901234": {"Account":"asdf", "Password":"qwerty", "LastSeen":"1397621470"}}}`)
type Message struct {
id string
LastSeen int64
}
var m Message
func main() {
var i interface {}
err := json.Unmarshal(data, &i)
if err != nil {
println("Error decoding data")
fmt.Printf("%s", err.Error())
return
}
m := i.(map[string]interface{})
for k, v := range m {
println(k)
im := v.(map[string]interface{})
for ik, iv := range im {
println("\t", ik)
jm := iv.(map[string]interface{})
for jk, jv := range jm {
println("\t\t", jk, ": ", jv.(string))
}
}
}
}
I apologise if this is poor in terms of Go best practices and such, I am new to the language. And I know that some elements of this aren't entirely necessary like the Message type definition but this works, at least on your data.

Unmarshal CSV record into struct in Go

The problem how to automatically deserialize/unmarshal record from CSV file into Go struct.
For example, I have
type Test struct {
Name string
Surname string
Age int
}
And CSV file contains records
John;Smith;42
Piter;Abel;50
Is there an easy way to unmarshal those records into struct except by using "encoding/csv" package for reading record and then doing something like
record, _ := reader.Read()
test := Test{record[0],record[1],atoi(record[2])}
There is gocarina/gocsv which handles custom struct in the same way encoding/json does.
You can also write custom marshaller and unmarshaller for specific types.
Example:
type Client struct {
Id string `csv:"client_id"` // .csv column headers
Name string `csv:"client_name"`
Age string `csv:"client_age"`
}
func main() {
in, err := os.Open("clients.csv")
if err != nil {
panic(err)
}
defer in.Close()
clients := []*Client{}
if err := gocsv.UnmarshalFile(in, &clients); err != nil {
panic(err)
}
for _, client := range clients {
fmt.Println("Hello, ", client.Name)
}
}
Seems I've done with automatic marshaling of CSV records into structs (limited to string and int). Hope this would be useful.
Here is a link to playground: http://play.golang.org/p/kwc32A5mJf
func Unmarshal(reader *csv.Reader, v interface{}) error {
record, err := reader.Read()
if err != nil {
return err
}
s := reflect.ValueOf(v).Elem()
if s.NumField() != len(record) {
return &FieldMismatch{s.NumField(), len(record)}
}
for i := 0; i < s.NumField(); i++ {
f := s.Field(i)
switch f.Type().String() {
case "string":
f.SetString(record[i])
case "int":
ival, err := strconv.ParseInt(record[i], 10, 0)
if err != nil {
return err
}
f.SetInt(ival)
default:
return &UnsupportedType{f.Type().String()}
}
}
return nil
}
I'll try to create github package is someone needs this implementation.
You could bake your own. Perhaps something like this:
package main
import (
"fmt"
"strconv"
"strings"
)
type Test struct {
Name string
Surname string
Age int
}
func (t Test) String() string {
return fmt.Sprintf("%s;%s;%d", t.Name, t.Surname, t.Age)
}
func (t *Test) Parse(in string) {
tmp := strings.Split(in, ";")
t.Name = tmp[0]
t.Surname = tmp[1]
t.Age, _ = strconv.Atoi(tmp[2])
}
func main() {
john := Test{"John", "Smith", 42}
fmt.Printf("john:%v\n", john)
johnString := john.String()
fmt.Printf("johnString:%s\n", johnString)
var rebornJohn Test
rebornJohn.Parse(johnString)
fmt.Printf("rebornJohn:%v\n", rebornJohn)
}
Using csvutil it is possible to give column header see example.
In your case, this could be :
package main
import (
"encoding/csv"
"fmt"
"io"
"os"
"github.com/jszwec/csvutil"
)
type Test struct {
Name string
Surname string
Age int
}
func main() {
csv_file, _ := os.Open("test.csv")
reader := csv.NewReader(csv_file)
reader.Comma = ';'
userHeader, _ := csvutil.Header(Test{}, "csv")
dec, _ := csvutil.NewDecoder(reader, userHeader...)
var users []Test
for {
var u Test
if err := dec.Decode(&u); err == io.EOF {
break
}
users = append(users, u)
}
fmt.Println(users)
}