Combining data from multiple cells into one JSON object - json

I am trying to combine data from multiple cells from an excel spreadsheet into one JSON encoded string. I cannot figure out how to do so, the code below is creating a new JSON object per cell. How do I differentiate the cells to combine into the same JSON string?
package main
import (
"fmt"
"github.com/tealeg/xlsx"
"encoding/json"
)
func main() {
excelFileName := "/Users/isaacmelton/Desktop/Test_Data.xlsx"
xlFile, err := xlsx.OpenFile(excelFileName)
if err != nil {
fmt.Printf("Cannot parse data")
}
for _, sheet := range xlFile.Sheets {
for _, row := range sheet.Rows {
fmt.Printf("\n")
for x, cell := range row.Cells {
if x == 3 || x == 5 {
data := map[string]string{"d_name": cell.String(), "name": cell.String()}
json_data, _ := json.Marshal(data)
fmt.Println(string(json_data))
}
}
}
}
}
Running the above code results in the following:
{"foo":"cell1","bar":"cell1"}
{"foo":"cell2","bar":"cell2"}
I expect something like this:
{"foo":"cell1", "bar":"cell2"}

If I right understand your request you just need to define root element, add cells into it and marshal this element rather than individual cells.
root := []map[string]string{}
for x, cell := range row.Cells {
if x == 3 || x == 5 {
root = append(root, map[string]string{"d_name": cell.String(), "name": cell.String()})
}
}
json_data, _ := json.Marshal(root)
fmt.Println(string(json_data))
http://play.golang.org/p/SHnShHvW_0

You may use
a, err := row.Cells[3].String()
b, err := row.Cells[5].String()
Like this working code:
package main
import (
"encoding/json"
"fmt"
"github.com/tealeg/xlsx"
)
func main() {
xlFile, err := xlsx.OpenFile(`Test_Data.xlsx`)
if err != nil {
panic(err)
}
for _, sheet := range xlFile.Sheets {
for _, row := range sheet.Rows {
//for x, cell := range row.Cells {
//if x == 3 || x == 5 {
a, err := row.Cells[3].String()
if err != nil {
panic(err)
}
b, err := row.Cells[5].String()
if err != nil {
panic(err)
}
data := map[string]string{"d_name": a, "name": b}
json_data, err := json.Marshal(data)
if err != nil {
panic(err)
}
fmt.Println(string(json_data))
//}
//}
}
}
}
output:
{"d_name":"1000","name":"a"}
{"d_name":"2000","name":"b"}
{"d_name":"3000","name":"c"}
{"d_name":"4000","name":"d"}
{"d_name":"5000","name":"e"}
input file content:
1 10 100 1000 10000 a
2 20 200 2000 20000 b
3 30 300 3000 30000 c
4 40 400 4000 40000 d
5 50 500 5000 50000 e

Related

sql.Query truncated or incomplete results

I've the following code:
const qInstances = `
SELECT
i.uuid,
i.host,
i.hostname
FROM
db.instances AS i
WHERE
i.deleted_at IS NULL
GROUP BY i.uuid;
`
...
instancesMap := make(map[string]*models.InstanceModel)
instances := []models.Instance{}
instancesCount := 0
instancesRow, err := db.Query(qInstances)
if err != nil {
panic(err.Error())
}
defer instancesRow.Close()
for instancesRow.Next() {
i := models.Instance{}
err = instancesRow.Scan(&i.UUID, &i.Host, &i.Hostname)
if err != nil {
log.Printf("[Error] - While Scanning Instances Rows, error msg: %s\n", err)
panic(err.Error())
} else {
if i.UUID.String != "" {
instancesCount++
}
if _, ok := instancesMap[i.UUID.String]; !ok {
instancesMap[i.UUID.String] = &models.InstanceModel{}
inst := instancesMap[i.UUID.String]
inst.UUID = i.UUID.String
inst.Host = i.Host.String
inst.Hostname = i.Hostname.String
} else {
inst := instancesMap[i.UUID.String]
inst.UUID = i.UUID.String
inst.Host = i.Host.String
inst.Hostname = i.Hostname.String
}
instances = append(instances, i)
}
}
log.Printf("[Instances] - Total Count: %d\n", instancesCount)
The problem that I'm facing is that if run the SQL query directly to the database (mariadb) it returns 7150 records, but the total count inside the program outputs 5196 records. I also check the SetConnMaxLifetime parameter for the db connection and set it to 240 seconds, and it doesn't show any errors or broken connectivity between the db and the program. Also I try to do some pagination (LIMIT to 5000 records each) and issue two different queries and the first one returns the 5000 records, but the second one just 196 records. I'm using the "github.com/go-sql-driver/mysql" package. Any ideas?

Multiple rows.Next() in the same time golang sql/database

I am working on a function which gets users. My problem is that I have to return the result of two queries as one result in the rows.Next() and rows1.Next() like user_id, subject, phone.
Here is what I have but it doesn't work. Could you please help me out with some suggestions:
func GetUsers() (users []Users, err error) {
users = make([]Users, 0)
rows, err := db1.Query("SELECT user_id, subject,phone FROM users limit 11")
rows1, err := db1.Query("Select body from users limit 11")
defer rows.Close()
if err != nil {
// handle this error better than this
log.Fatal(err)
//return
}
var user Users
for rows.Next() {
rows.Scan(&user.ID, &user.Subject, &user.Phone)
users = append(users, user)
}
for rows1.Next() {
rows1.Scan(&user.Body)
users = append(users, user)
}
return
}
I have a table called users which has no primary key, basically:
user_id . name . body
-----------------------
1 . model . 2
1 . brand . 1
1 . fuel . 3
1 . date . 1
1 . year . 1
I have to have a result:
userid . model . brand . fuel . date . year
1 . 2 . 1 . 3 . 1 . 1
so the values in the column name gonna be as rows. I have already defined my struct which contains all the fields needed.
Looking at the example code it's clear that you should do whatever you're doing in one query instead of two. Even if your real code is a little different, say you need to query more than one table, you should still probably do only one query using JOINs.
users := make([]*User, 0)
rows, err := db1.Query("SELECT user_id, subject,phone, body FROM users limit 11")
if err != nil {
log.Fatal(err)
}
defer rows.Close()
for rows.Next() {
user := new(User)
if err := rows.Scan(&user.ID, &user.Subject, &user.Phone, &user.Body); err != nil {
panic(err)
}
users = append(users, user)
}
if err := rows.Err(); err != nil {
panic(err)
}
That said, if you want to do it the way you illustrated in your question you can do that like this.
(this probably not what you should do)
users := make([]*User, 0)
rows, err := db1.Query("SELECT user_id, subject,phone FROM users limit 11")
if err != nil {
log.Fatal(err)
}
defer rows.Close()
rows1, err := db1.Query("Select body from users limit 11")
if err != nil {
panic(err)
}
defer rows1.Close()
for rows.Next() {
user := new(User)
if err := rows.Scan(&user.ID, &user.Subject, &user.Phone); err != nil {
panic(err)
}
if !rows1.Next() {
panic("no next body row")
}
if err := rows1.Scan(&user.Body); err != nil {
panic(err)
}
users = append(users, user)
}
if err := rows.Err(); err != nil {
panic(err)
}
if err := rows1.Err(); err != nil {
panic(err)
}
const maxPerPage = 100
type Scanable interface {
GetScans() []interface{}
}
type User struct {
ID int
Subject string
Phone string
Body string
}
func (s *User) GetScans() []interface{} {
return []interface{}{&s.ID, &s.Subject, &s.Phone, &s.Body}
}
func getNewUserList() []*User {
users := make([]*User, maxPerPage)
for i := 0; i < maxPerPage; i++ {
users[i] = new(User)
}
return users
}
var usersPool = sync.Pool{
New: func() interface{} {
return getNewUserList()
},
}
func getUsers() (b []*User) {
ifc := usersPool.Get()
if ifc != nil {
b = ifc.([]*User)
} else {
b = getNewUserList()
}
return
}
func putUsers(b []*User) {
// if cap(b) <= maxPerPage {
// b = b[:0] // reset
// usersPool.Put(&b)
// }
usersPool.Put(&b)
}
func TestUsers(t *testing.T) {
users := getUsers()
rows, err := PostgresConnection.Query("SELECT user_id, subject,phone, body FROM users limit 11")
if err != nil {
log.Fatal(err)
}
defer rows.Close()
length := 0
for rows.Next() {
if err := rows.Scan(users[i].GetScans()...); err != nil {
panic(err)
}
if err := rows.Err(); err != nil {
panic(err)
}
length++
}
result := users[:length]
fmt.Println(result)
putUsers(users)
}
this is the sample function to get multiple document from db using golang sqlx library.
func GetDocuments() ([]SupportedDocument, error) {
var documents []SupportedDocument
query := `SELECT * FROM documents limit 10`
err := database.Select(&documents, query)
return documents, err
}

Function in Go to execute select query on database and return json output

I am writing a function in Go to execute select query on database.
Input: String e.g. "Select id, name, age from sometable"
This query changes everytime.
Output: Output of select query in json format.
Sample Expected output: {"Data":[{"id":1,"name":"abc", "age":40},{"id":2,"name":"xyz", "age":45}]}
Sample Actual output: {"Data":[[1,"abc",40],[2,"xyz",45]]}
Instead of i.e. column_name:value, I get only values.
How do I get the expected output?
func executeSQL(queryStr string) []byte {
connString := createConnectString()
conn, err := sql.Open("mssql", connString)
if err != nil {
log.Fatal("Error while opening database connection:", err.Error())
}
defer conn.Close()
rows, err := conn.Query(queryStr)
if err != nil {
log.Fatal("Query failed:", err.Error())
}
defer rows.Close()
columns, _ := rows.Columns()
count := len(columns)
var v struct {
Data []interface{} // `json:"data"`
}
for rows.Next() {
values := make([]interface{}, count)
valuePtrs := make([]interface{}, count)
for i, _ := range columns {
valuePtrs[i] = &values[i]
}
if err := rows.Scan(valuePtrs...); err != nil {
log.Fatal(err)
}
v.Data = append(v.Data, values)
}
jsonMsg, err := json.Marshal(v)
return jsonMsg
}
Got the solution. Here is what I did.
func executeSQL(queryStr string) []byte {
connString := createConnectString()
conn, err := sql.Open("mssql", connString)
if err != nil {
log.Fatal("Error while opening database connection:", err.Error())
}
defer conn.Close()
rows, err := conn.Query(queryStr)
if err != nil {
log.Fatal("Query failed:", err.Error())
}
defer rows.Close()
columns, _ := rows.Columns()
count := len(columns)
var v struct {
Data []interface{} // `json:"data"`
}
for rows.Next() {
values := make([]interface{}, count)
valuePtrs := make([]interface{}, count)
for i, _ := range columns {
valuePtrs[i] = &values[i]
}
if err := rows.Scan(valuePtrs...); err != nil {
log.Fatal(err)
}
//Created a map to handle the issue
var m map[string]interface{}
m = make(map[string]interface{})
for i := range columns {
m[columns[i]] = values[i]
}
v.Data = append(v.Data, m)
}
jsonMsg, err := json.Marshal(v)
return jsonMsg
}
Let me know if there exists a better solution.
This code is directly from my "sandbox" for MsSQL (using denisenkom/go-mssqldb and jmoiron/sqlx) - I think it helps showing different approaches and probably QueryIntoMap is what you were looking for:
package main
import (
"log"
"fmt"
_ "github.com/denisenkom/go-mssqldb"
"time"
"github.com/jmoiron/sqlx"
"encoding/json"
)
type Customer struct {
CustomerId string `db:"customerID" json:"customer_id"`
Company interface{} `db:"companyName" json:"company_name"`
Contact interface{} `db:"contactName" json:"contact_name"`
}
func main() {
connection := "server=192.168.55.3\\SqlExpress2012;database=Northwind;user id=me;Password=secret"
//QueryIntoMap(connection)
ScanIntoSlice(connection)
}
func QueryIntoMap(connection string) {
fmt.Println("QueryIntoMap sample")
fmt.Println("--------------------")
sel := `select customerId, companyName, contactName
from customers
where customerId = :id`
values := make(map[string]interface{})
db, err := sqlx.Open("mssql", connection)
//db.MapperFunc(strings.ToUpper)
e(err)
defer db.Close()
tx := db.MustBegin()
stmt, err := tx.PrepareNamed(sel)
e(err)
stmt.QueryRowx(map[string]interface{}{"id": "BONAP"}).MapScan(values)
tx.Commit()
for k, v := range values {
fmt.Printf("%s %v\n", k, v)
}
js, err := json.Marshal(values)
if err != nil {
fmt.Println(err)
}
fmt.Println(string(js))
fmt.Println("--------------------")
}
func ScanIntoStruct(connection string) {
fmt.Println("Scan into struct sample")
fmt.Println("--------------------")
db, err := sqlx.Open("mssql", connection)
e(err)
defer db.Close()
customer := Customer{}
rows, err := db.Queryx(`select customerID, companyName, contactName
from Customers`)
for rows.Next() {
err = rows.StructScan(&customer)
if err != nil {
log.Fatalln(err)
}
//fmt.Printf("%#v\n", user)
fmt.Printf("%-10s %-50v %-50v\n",
customer.CustomerId,
customer.Company,
customer.Contact)
js, err := json.Marshal(customer)
e(err)
fmt.Println(string(js))
}
fmt.Println("--------------------")
}
func ScanIntoSlice(connection string) {
fmt.Println("Scan into slice sample")
fmt.Println("--------------------")
start := time.Now()
db, err := sqlx.Open("mssql", connection)
e(err)
defer db.Close()
customers := []Customer{}
err = db.Select(&customers, `select customerID, companyName, contactName from customers`)
e(err)
for i, customer := range customers {
fmt.Printf("%3d. %-10s %-50v %-50v\n",
i,
customer.CustomerId,
customer.Company,
customer.Contact)
}
js, err := json.Marshal(customers)
e(err)
fmt.Println(string(js))
fmt.Printf("%s", time.Since(start))
fmt.Println("--------------------")
}
func e(err error) {
if err != nil {
log.Fatal(err)
}
}

Is there a better way to marshal sql rows?

I have the following struct:
type MyTable struct{
DBColA []byte `db:"cola" json:"-"`
ColA string `json:"cola"`
DBColB []byte `db:"colb" json:"-"`
ColB string `json:"colb"`
}
I map to []byte [to better handle null values in my sql][1]
When I grab the rows I need to output it as json. In order to do that I convert []byte to string:
var rows []*MyTable
if _, err := Session.Select(&rows, sql, args...); err != nil {
log.Println(err)
}
for _, row := range rows{
row.ColA = string(row.DBColA)
row.ColB = string(row.DBColB)
}
w.Header().Set("Content-Type", "application/json")
if err := json.NewEncoder(w).Encode(rows); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
}
It seems very inefficient to have DBColA and ColA in my struct and then converting DBColA to a string....I have a lot of columns. Is there a better way?
[1]: https://github.com/go-sql-driver/mysql/wiki/Examples
Have you tried gosqljson in https://github.com/elgs/gosqljson ?
See example:
```golang
package main
import (
"database/sql"
"fmt"
"github.com/elgs/gosqljson"
_ "github.com/go-sql-driver/mysql"
)
func main() {
ds := "username:password#tcp(host:3306)/db"
db, err := sql.Open("mysql", ds)
defer db.Close()
if err != nil {
fmt.Println("sql.Open:", err)
}
theCase := "lower" // "lower" default, "upper", camel
a, _ := gosqljson.QueryDbToArrayJson(db, theCase, "SELECT ID,NAME FROM t LIMIT ?,?", 0, 3)
fmt.Println(a)
// [["id","name"],["0","Alicia"],["1","Brian"],["2","Chloe"]]
m, _ := gosqljson.QueryDbToMapJson(db, theCase, "SELECT ID,NAME FROM t LIMIT ?,?", 0, 3)
fmt.Println(m)
// [{"id":"0","name":"Alicia"},{"id":"1","name":"Brian"},{"id":"2","name":"Chloe"}]
}
````

Dumping MySQL tables to JSON with Golang

Was putting together a quick dumper for MySQL to JSON in Go. However I find that everything that I retrieve from the database is a []byte array. Thus instead of native JSON integers or booleans, I'm getting everything encoded as strings.
Subset of the code:
import (
"encoding/json"
"database/sql"
_ "github.com/go-sql-driver/mysql"
)
func dumpTable(w io.Writer, table) {
// ...
rows, err := Query(db, fmt.Sprintf("SELECT * FROM %s", table))
checkError(err)
columns, err := rows.Columns()
checkError(err)
scanArgs := make([]interface{}, len(columns))
values := make([]interface{}, len(columns))
for i := range values {
scanArgs[i] = &values[i]
}
for rows.Next() {
err = rows.Scan(scanArgs...)
checkError(err)
record := make(map[string]interface{})
for i, col := range values {
if col != nil {
fmt.Printf("\n%s: type= %s\n", columns[i], reflect.TypeOf(col))
switch t := col.(type) {
default:
fmt.Printf("Unexpected type %T\n", t)
case bool:
fmt.Printf("bool\n")
record[columns[i]] = col.(bool)
case int:
fmt.Printf("int\n")
record[columns[i]] = col.(int)
case int64:
fmt.Printf("int64\n")
record[columns[i]] = col.(int64)
case float64:
fmt.Printf("float64\n")
record[columns[i]] = col.(float64)
case string:
fmt.Printf("string\n")
record[columns[i]] = col.(string)
case []byte: // -- all cases go HERE!
fmt.Printf("[]byte\n")
record[columns[i]] = string(col.([]byte))
case time.Time:
// record[columns[i]] = col.(string)
}
}
}
s, _ := json.Marshal(record)
w.Write(s)
io.WriteString(w, "\n")
}
}
I also needed to dump database tables to json and here is how I achieved:
(different than another answer in this topic, everything is not string, thanks to this answer: https://stackoverflow.com/a/17885636/4124416, I could get integer fields correctly)
func getJSON(sqlString string) (string, error) {
rows, err := db.Query(sqlString)
if err != nil {
return "", err
}
defer rows.Close()
columns, err := rows.Columns()
if err != nil {
return "", err
}
count := len(columns)
tableData := make([]map[string]interface{}, 0)
values := make([]interface{}, count)
valuePtrs := make([]interface{}, count)
for rows.Next() {
for i := 0; i < count; i++ {
valuePtrs[i] = &values[i]
}
rows.Scan(valuePtrs...)
entry := make(map[string]interface{})
for i, col := range columns {
var v interface{}
val := values[i]
b, ok := val.([]byte)
if ok {
v = string(b)
} else {
v = val
}
entry[col] = v
}
tableData = append(tableData, entry)
}
jsonData, err := json.Marshal(tableData)
if err != nil {
return "", err
}
fmt.Println(string(jsonData))
return string(jsonData), nil
}
Here is a sample output:
[{"ID":0,"Text":"Zero"},{"ID":1,"Text":"One"},{"ID":2,"Text":"Two"}]
It is needed to use prepared statements to get the native types. MySQL has two protocols, one transmits everything as text, the other as the "real" type. And that binary protocol is only used when you use prepared statements. See https://github.com/go-sql-driver/mysql/issues/407
The function getJSON below is correct:
func getJSON(sqlString string) (string, error) {
stmt, err := db.Prepare(sqlString)
if err != nil {
return "", err
}
defer stmt.Close()
rows, err := stmt.Query()
if err != nil {
return "", err
}
defer rows.Close()
columns, err := rows.Columns()
if err != nil {
return "", err
}
tableData := make([]map[string]interface{}, 0)
count := len(columns)
values := make([]interface{}, count)
scanArgs := make([]interface{}, count)
for i := range values {
scanArgs[i] = &values[i]
}
for rows.Next() {
err := rows.Scan(scanArgs...)
if err != nil {
return "", err
}
entry := make(map[string]interface{})
for i, col := range columns {
v := values[i]
b, ok := v.([]byte)
if (ok) {
entry[col] = string(b)
} else {
entry[col] = v
}
}
tableData = append(tableData, entry)
}
jsonData, err := json.Marshal(tableData)
if err != nil {
return "", err
}
return string(jsonData), nil
}
There's not much you can do because the driver - database/sql interaction is pretty much a one way street and the driver can't help you with anything when the data is handed over to database/sql.
You could try your luck with http://godoc.org/github.com/arnehormann/sqlinternals/mysqlinternals
Query the database
Retrieve the Column slice with cols, err := mysqlinternals.Columns(rows)
Create a new var values := make([]interface{}, len(cols)) and iterate over cols
Get the closest matching Go type per column with refType, err := cols[i].ReflectGoType()
Create type placeholders with values[i] = reflect.Zero(refType).Interface()
rows.Next() and err = rows.Scan(values...). Don't recreate values, copy and reuse it.
I guess this will still be pretty slow, but you should be able to get somewhere with it.
If you encounter problems, please file an issue - I'll get to it as soon as I can.
I have a table named users inside practice_db database. I have mentioned the table structure with data in the following program that converts the users table into JSON format.
You can also check the source code at https://gist.github.com/hygull/1725442b0f121a5fc17b28e04796714d.
/**
{
"created_on": "26 may 2017",
"todos": [
"go get github.com/go-sql-driver/mysql",
"postman(optional)",
"browser(optional)",
],
"aim": "Reading fname column into []string(slice of strings)"
}
*/
/*
mysql> select * from users;
+----+-----------+----------+----------+-------------------------------+--------------+-------------------------------------------------------------------------------------------------+
| id | fname | lname | uname | email | contact | profile_pic |
+----+-----------+----------+----------+-------------------------------+--------------+-------------------------------------------------------------------------------------------------+
| 1 | Rishikesh | Agrawani | hygull | rishikesh0014051992#gmail.com | 917353787704 | https://cdn4.iconfinder.com/data/icons/rcons-user/32/user_group_users_accounts_contacts-512.png |
| 2 | Sandeep | E | sandeep | sandeepeswar8#gmail.com | 919739040038 | https://cdn4.iconfinder.com/data/icons/eldorado-user/40/user-512.png |
| 3 | Darshan | Sidar | darshan | sidardarshan#gmail.com | 917996917565 | https://cdn4.iconfinder.com/data/icons/rcons-user/32/child_boy-512.png |
| 4 | Surendra | Prajapat | surendra | surendrakgadwal#gmail.com | 918385894407 | https://cdn4.iconfinder.com/data/icons/rcons-user/32/account_male-512.png |
| 5 | Mukesh | Jakhar | mukesh | mjakhar.kjakhar#gmail.com | 919772254140 | https://cdn2.iconfinder.com/data/icons/rcons-user/32/male-circle-512.png |
+----+-----------+----------+----------+-------------------------------+--------------+-------------------------------------------------------------------------------------------------+
5 rows in set (0.00 sec)
mysql>
*/
package main
import "log"
import "net/http"
import "encoding/json"
import (
_"github.com/go-sql-driver/mysql"
"database/sql"
)
func users(w http.ResponseWriter, r *http.Request) {
// db, err := sql.Open("mysql", "<username>:<password>#tcp(127.0.0.1:<port>)/<dbname>?charset=utf8" )
db, err := sql.Open("mysql", "hygull:admin#67#tcp(127.0.0.1:3306)/practice_db?charset=utf8")
w.Header().Set("Content-Type", "application/json")
if err != nil {
log.Fatal(err)
}
rows, err := db.Query("select id, fname, lname, uname, email, contact, profile_pic from users")
if err != nil {
log.Fatal(err)
}
type User struct {
Id int `json:"id"`
Fname string `json:"firstname"`
Lname string `json:"lastname"`
Uname string `json:"username"`
Email string `json:"email"`
Contact int `json:"contact"`
ProfilePic string `json:"profile_pic"`
}
var users []User
for rows.Next() {
var id, contact int
var fname string
var lname string
var uname, email, profile_pic string
rows.Scan(&id ,&fname, &lname, &uname, &email, &contact, &profile_pic)
users = append(users, User{id, fname, lname, uname, email, contact, &profile_pic })
}
usersBytes, _ := json.Marshal(&users)
w.Write(usersBytes)
db.Close()
}
func main() {
http.HandleFunc("/users/", users)
http.ListenAndServe(":8080", nil)
}
/* REQUSET
http://127.0.0.1:8080/users/
*/
/* RESPONSE
[
{
"id": 1,
"firstname": "Rishikesh",
"lastname": "Agrawani",
"username": "hygull",
"email": "rishikesh0014051992#gmail.com",
"contact": 917353787704,
"profile_pic": "https://cdn4.iconfinder.com/data/icons/rcons-user/32/user_group_users_accounts_contacts-512.png"
},
{
"id": 2,
"firstname": "Sandeep",
"lastname": "E",
"username": "sandeep",
"email": "sandeepeswar8#gmail.com",
"contact": 919739040038,
"profile_pic": "https://cdn4.iconfinder.com/data/icons/eldorado-user/40/user-512.png"
},
{
"id": 3,
"firstname": "Darshan",
"lastname": "Sidar",
"username": "darshan",
"email": "sidardarshan#gmail.com",
"contact": 917996917565,
"profile_pic": "https://cdn4.iconfinder.com/data/icons/rcons-user/32/child_boy-512.png"
},
{
"id": 4,
"firstname": "Surendra",
"lastname": "Prajapat",
"username": "surendra",
"email": "surendrakgadwal#gmail.com",
"contact": 918385894407,
"profile_pic": "https://cdn4.iconfinder.com/data/icons/rcons-user/32/account_male-512.png"
},
{
"id": 5,
"firstname": "Mukesh",
"lastname": "Jakhar",
"username": "mukesh",
"email": "mjakhar.kjakhar#gmail.com",
"contact": 919772254140,
"profile_pic": "https://cdn2.iconfinder.com/data/icons/rcons-user/32/male-circle-512.png"
}
]
*/
Based on the answers here, this is the most efficient code I could come up with. Note that this is outputting each row as a separate JSON array to save key name repetition.
// OutputJSONMysqlRowsStream outputs rows as a JSON array stream to save ram & output size due to key name repetition
func OutputJSONMysqlRowsStream(writer http.ResponseWriter, rows *sql.Rows) {
defer rows.Close()
columns, err := rows.Columns()
if err != nil {
OutputJSONError(writer, "Failed to get column names")
return
}
jsonColumns, err := json.Marshal(columns)
if err != nil {
OutputJSONError(writer, "Failed to encode json of column names")
return
}
writer.Header().Set("Content-Type", "application/cal-json-stream; charset=utf-8")
fmt.Fprintln(writer, "{\"status\": \"done\", \"data\":{ \"json_stream_fields\":"+string(jsonColumns)+"}}")
columnCount := len(columns)
rowDataHolder := make([]interface{}, columnCount)
rowDataHolderPointers := make([]interface{}, columnCount)
if err != nil {
log.Println(err)
}
for rows.Next() {
for i := 0; i < columnCount; i++ {
rowDataHolderPointers[i] = &rowDataHolder[i]
}
err := rows.Scan(rowDataHolderPointers...)
if err != nil {
log.Println(err)
} else {
for i, value := range rowDataHolder {
tempValue, ok := value.([]byte)
if ok {
rowDataHolder[i] = string(tempValue)
}
}
jsonEncoder := json.NewEncoder(writer)
err = jsonEncoder.Encode(rowDataHolder)
if err != nil {
log.Println(err)
}
}
}
}
you can dump the table into json just fine, however everything will be string :(
q := "select * from table"
debug("SQL: %s", q)
rows, err := db.Query(q)
checkError(err)
defer rows.Close()
columns, err := rows.Columns()
checkError(err)
scanArgs := make([]interface{}, len(columns))
values := make([]interface{}, len(columns))
for i := range values {
scanArgs[i] = &values[i]
}
for rows.Next() {
err = rows.Scan(scanArgs...)
checkError(err)
record := make(map[string]interface{})
for i, col := range values {
if col != nil {
record[columns[i]] = fmt.Sprintf("%s", string(col.([]byte)))
}
}
s, _ := json.Marshal(record)
fmt.Printf("%s\n", s)
}