merge two queries from two tables as a one struct - mysql

I have a struct called Ads
type Ads struct {
ID int `json:"id"`
Subject string `json:"subject"`
Phone string `json:"phone"`
}
func GetAdsPostgres() (ads []Ads, err error) {
ads = make([]Ads, 0)
rows, err := db1.Query("Select ad_id, subject FROM ads limit 200 ")
for rows.Next() {
var ad Ads
rows.Scan(&ad.ID, &ad.Subject)
test := reflect.ValueOf(ad.ID)
addd := test.Interface().(int)
rows1, _ := db1.Query("Select phone FROM ads where ad_id=$1", addd)
rows1.Scan(&ad.Phone)
ads = append(ads, ad)
rows1.Close()
}
if err = rows.Err(); err != nil {
return
}
return
}
without thinking of join the table because the phone has multipte enumeration values which I have to reform them as fields after

Just get the phone number too in the first query; don't repeat yourself.
db1.Query("Select ad_id, subject, phone FROM ads limit 200 ")

Related

How can i Check if my Db.Query returns null rows

Following is my code to get multiple rows from db and it works.
defer db.Close()
for rows.Next() {
err = rows.Scan(&a)
if err != nil {
log(err)
}
How can I check if the rows contains No row?
Even I tried like below
if err == sql.ErrNoRows {
fmt.Print(No rows)
}
and also checked while scanning
err = rows.Scan(&a)
if err == sql.ErrNoRows {
fmt.Print(No rows)
}
I don't understand which one gives ErrNoRows either *Rows or err or Scan
QueryRow returns a *Row (not a *Rows) and you cannot iterate through the results (because it's only expecting a single row back). This means that rows.Scan in your example code will not compile).
If you expect your SQL query to return a single resullt (e.g. you are running a count() or selecting using an ID) then use QueryRow; for example (modified from here):
id := 43
var username string
err = stmt.QueryRow("SELECT username FROM users WHERE id = ?", id).Scan(&username)
switch {
case err == sql.ErrNoRows:
log.Fatalf("no user with id %d", id)
case err != nil:
log.Fatal(err)
default:
log.Printf("username is %s\n", username)
}
If you are expecting multiple rows then use Query() for example (modified from here):
age := 27
rows, err := db.Query("SELECT name FROM users WHERE age=?", age)
if err != nil {
log.Fatal(err)
}
defer rows.Close()
names := make([]string, 0)
for rows.Next() {
var name string
if err := rows.Scan(&name); err != nil {
log.Fatal(err)
}
names = append(names, name)
}
// Check for errors from iterating over rows.
if err := rows.Err(); err != nil {
log.Fatal(err)
}
// Check for no results
if len(names) == 0 {
log.Fatal("No Results")
}
log.Printf("%s are %d years old", strings.Join(names, ", "), age)
The above shows one way of checking if there are no results. If you are not putting the results into a slice/map then you can keep a counter or set a boolean within the loop. Note that no error will be returned if there are no results (because this is a perfectly valid outcome) and the SQL package provides no way to check the number of results other than iterate through them (if all you are interested in is the number of results then run select count(*)...).

Multiple One to Many Relations in GORM

I have a struct definition in GO like this
package models
//StoryStatus indicates the current state of the story
type StoryStatus string
const (
//Progress indicates a story is currenty being written
Progress StoryStatus = "progress"
//Completed indicates a story was completed
Completed StoryStatus = "completed"
)
//Story holds detials of story
type Story struct {
ID int
Title string `gorm:"type:varchar(100);unique_index"`
Status StoryStatus `sql:"type ENUM('progress', 'completed');default:'progress'"`
Paragraphs []Paragraph `gorm:"ForeignKey:StoryID"`
}
//Paragraph is linked to a story
//A story can have around configurable paragraph
type Paragraph struct {
ID int
StoryID int
Sentences []Sentence `gorm:"ForeignKey:ParagraphID"`
}
//Sentence are linked to paragraph
//A paragraph can have around configurable paragraphs
type Sentence struct {
ID uint
Value string
Status bool
ParagraphID uint
}
I am using GORM for orm in GO.
How do I fetch all the information for a story based on story id like all the paragraphs and all the sentences for each paragraph.
The GORM example show only with 2 models to use preload
This is what you're looking for:
db, err := gorm.Open("mysql", "user:password#/dbname?charset=utf8&parseTime=True&loc=Local")
defer db.Close()
story := &Story{}
db.Preload("Paragraphs").Preload("Paragraphs.Sentences").First(story, 1)
It finds the story with the id = 1 and preloads its relationships
fmt.Printf("%+v\n", story)
This prints out the result nicely for you
Side note:
You can turn on log mode of Gorm so that you can see the underlying queries, to debug, or any other purposes:
db.LogMode(true)
Looking this [example][1] this One to many.
package main
import (
"log"
"github.com/jinzhu/gorm"
_ "github.com/jinzhu/gorm/dialects/sqlite"
"github.com/kylelemons/godebug/pretty"
)
// Order
type Order struct {
gorm.Model
Status string
OrderItems []OrderItem
}
// Order line item
type OrderItem struct {
gorm.Model
OrderID uint
ItemID uint
Item Item
Quantity int
}
// Product
type Item struct {
gorm.Model
ItemName string
Amount float32
}
var (
items = []Item{
{ItemName: "Go Mug", Amount: 12.49},
{ItemName: "Go Keychain", Amount: 6.95},
{ItemName: "Go Tshirt", Amount: 17.99},
}
)
func main() {
db, err := gorm.Open("sqlite3", "/tmp/gorm.db")
db.LogMode(true)
if err != nil {
log.Panic(err)
}
defer db.Close()
// Migrate the schema
db.AutoMigrate(&OrderItem{}, &Order{}, &Item{})
// Create Items
for index := range items {
db.Create(&items[index])
}
order := Order{Status: "pending"}
db.Create(&order)
item1 := OrderItem{OrderID: order.ID, ItemID: items[0].ID, Quantity: 1}
item2 := OrderItem{OrderID: order.ID, ItemID: items[1].ID, Quantity: 4}
db.Create(&item1)
db.Create(&item2)
// Query with joins
rows, err := db.Table("orders").Where("orders.id = ? and status = ?", order.ID, "pending").
Joins("Join order_items on order_items.order_id = orders.id").
Joins("Join items on items.id = order_items.id").
Select("orders.id, orders.status, order_items.order_id, order_items.item_id, order_items.quantity" +
", items.item_name, items.amount").Rows()
if err != nil {
log.Panic(err)
}
defer rows.Close()
// Values to load into
newOrder := &Order{}
newOrder.OrderItems = make([]OrderItem, 0)
for rows.Next() {
orderItem := OrderItem{}
item := Item{}
err = rows.Scan(&newOrder.ID, &newOrder.Status, &orderItem.OrderID, &orderItem.ItemID, &orderItem.Quantity, &item.ItemName, &item.Amount)
if err != nil {
log.Panic(err)
}
orderItem.Item = item
newOrder.OrderItems = append(newOrder.OrderItems, orderItem)
}
log.Print(pretty.Sprint(newOrder))
}
[1]: https://stackoverflow.com/questions/35821810/golang-gorm-one-to-many-with-has-one

Multiple rows.Next() in the same time golang sql/database

I am working on a function which gets users. My problem is that I have to return the result of two queries as one result in the rows.Next() and rows1.Next() like user_id, subject, phone.
Here is what I have but it doesn't work. Could you please help me out with some suggestions:
func GetUsers() (users []Users, err error) {
users = make([]Users, 0)
rows, err := db1.Query("SELECT user_id, subject,phone FROM users limit 11")
rows1, err := db1.Query("Select body from users limit 11")
defer rows.Close()
if err != nil {
// handle this error better than this
log.Fatal(err)
//return
}
var user Users
for rows.Next() {
rows.Scan(&user.ID, &user.Subject, &user.Phone)
users = append(users, user)
}
for rows1.Next() {
rows1.Scan(&user.Body)
users = append(users, user)
}
return
}
I have a table called users which has no primary key, basically:
user_id . name . body
-----------------------
1 . model . 2
1 . brand . 1
1 . fuel . 3
1 . date . 1
1 . year . 1
I have to have a result:
userid . model . brand . fuel . date . year
1 . 2 . 1 . 3 . 1 . 1
so the values in the column name gonna be as rows. I have already defined my struct which contains all the fields needed.
Looking at the example code it's clear that you should do whatever you're doing in one query instead of two. Even if your real code is a little different, say you need to query more than one table, you should still probably do only one query using JOINs.
users := make([]*User, 0)
rows, err := db1.Query("SELECT user_id, subject,phone, body FROM users limit 11")
if err != nil {
log.Fatal(err)
}
defer rows.Close()
for rows.Next() {
user := new(User)
if err := rows.Scan(&user.ID, &user.Subject, &user.Phone, &user.Body); err != nil {
panic(err)
}
users = append(users, user)
}
if err := rows.Err(); err != nil {
panic(err)
}
That said, if you want to do it the way you illustrated in your question you can do that like this.
(this probably not what you should do)
users := make([]*User, 0)
rows, err := db1.Query("SELECT user_id, subject,phone FROM users limit 11")
if err != nil {
log.Fatal(err)
}
defer rows.Close()
rows1, err := db1.Query("Select body from users limit 11")
if err != nil {
panic(err)
}
defer rows1.Close()
for rows.Next() {
user := new(User)
if err := rows.Scan(&user.ID, &user.Subject, &user.Phone); err != nil {
panic(err)
}
if !rows1.Next() {
panic("no next body row")
}
if err := rows1.Scan(&user.Body); err != nil {
panic(err)
}
users = append(users, user)
}
if err := rows.Err(); err != nil {
panic(err)
}
if err := rows1.Err(); err != nil {
panic(err)
}
const maxPerPage = 100
type Scanable interface {
GetScans() []interface{}
}
type User struct {
ID int
Subject string
Phone string
Body string
}
func (s *User) GetScans() []interface{} {
return []interface{}{&s.ID, &s.Subject, &s.Phone, &s.Body}
}
func getNewUserList() []*User {
users := make([]*User, maxPerPage)
for i := 0; i < maxPerPage; i++ {
users[i] = new(User)
}
return users
}
var usersPool = sync.Pool{
New: func() interface{} {
return getNewUserList()
},
}
func getUsers() (b []*User) {
ifc := usersPool.Get()
if ifc != nil {
b = ifc.([]*User)
} else {
b = getNewUserList()
}
return
}
func putUsers(b []*User) {
// if cap(b) <= maxPerPage {
// b = b[:0] // reset
// usersPool.Put(&b)
// }
usersPool.Put(&b)
}
func TestUsers(t *testing.T) {
users := getUsers()
rows, err := PostgresConnection.Query("SELECT user_id, subject,phone, body FROM users limit 11")
if err != nil {
log.Fatal(err)
}
defer rows.Close()
length := 0
for rows.Next() {
if err := rows.Scan(users[i].GetScans()...); err != nil {
panic(err)
}
if err := rows.Err(); err != nil {
panic(err)
}
length++
}
result := users[:length]
fmt.Println(result)
putUsers(users)
}
this is the sample function to get multiple document from db using golang sqlx library.
func GetDocuments() ([]SupportedDocument, error) {
var documents []SupportedDocument
query := `SELECT * FROM documents limit 10`
err := database.Select(&documents, query)
return documents, err
}

Is there a better way to marshal sql rows?

I have the following struct:
type MyTable struct{
DBColA []byte `db:"cola" json:"-"`
ColA string `json:"cola"`
DBColB []byte `db:"colb" json:"-"`
ColB string `json:"colb"`
}
I map to []byte [to better handle null values in my sql][1]
When I grab the rows I need to output it as json. In order to do that I convert []byte to string:
var rows []*MyTable
if _, err := Session.Select(&rows, sql, args...); err != nil {
log.Println(err)
}
for _, row := range rows{
row.ColA = string(row.DBColA)
row.ColB = string(row.DBColB)
}
w.Header().Set("Content-Type", "application/json")
if err := json.NewEncoder(w).Encode(rows); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
}
It seems very inefficient to have DBColA and ColA in my struct and then converting DBColA to a string....I have a lot of columns. Is there a better way?
[1]: https://github.com/go-sql-driver/mysql/wiki/Examples
Have you tried gosqljson in https://github.com/elgs/gosqljson ?
See example:
```golang
package main
import (
"database/sql"
"fmt"
"github.com/elgs/gosqljson"
_ "github.com/go-sql-driver/mysql"
)
func main() {
ds := "username:password#tcp(host:3306)/db"
db, err := sql.Open("mysql", ds)
defer db.Close()
if err != nil {
fmt.Println("sql.Open:", err)
}
theCase := "lower" // "lower" default, "upper", camel
a, _ := gosqljson.QueryDbToArrayJson(db, theCase, "SELECT ID,NAME FROM t LIMIT ?,?", 0, 3)
fmt.Println(a)
// [["id","name"],["0","Alicia"],["1","Brian"],["2","Chloe"]]
m, _ := gosqljson.QueryDbToMapJson(db, theCase, "SELECT ID,NAME FROM t LIMIT ?,?", 0, 3)
fmt.Println(m)
// [{"id":"0","name":"Alicia"},{"id":"1","name":"Brian"},{"id":"2","name":"Chloe"}]
}
````

Dumping MySQL tables to JSON with Golang

Was putting together a quick dumper for MySQL to JSON in Go. However I find that everything that I retrieve from the database is a []byte array. Thus instead of native JSON integers or booleans, I'm getting everything encoded as strings.
Subset of the code:
import (
"encoding/json"
"database/sql"
_ "github.com/go-sql-driver/mysql"
)
func dumpTable(w io.Writer, table) {
// ...
rows, err := Query(db, fmt.Sprintf("SELECT * FROM %s", table))
checkError(err)
columns, err := rows.Columns()
checkError(err)
scanArgs := make([]interface{}, len(columns))
values := make([]interface{}, len(columns))
for i := range values {
scanArgs[i] = &values[i]
}
for rows.Next() {
err = rows.Scan(scanArgs...)
checkError(err)
record := make(map[string]interface{})
for i, col := range values {
if col != nil {
fmt.Printf("\n%s: type= %s\n", columns[i], reflect.TypeOf(col))
switch t := col.(type) {
default:
fmt.Printf("Unexpected type %T\n", t)
case bool:
fmt.Printf("bool\n")
record[columns[i]] = col.(bool)
case int:
fmt.Printf("int\n")
record[columns[i]] = col.(int)
case int64:
fmt.Printf("int64\n")
record[columns[i]] = col.(int64)
case float64:
fmt.Printf("float64\n")
record[columns[i]] = col.(float64)
case string:
fmt.Printf("string\n")
record[columns[i]] = col.(string)
case []byte: // -- all cases go HERE!
fmt.Printf("[]byte\n")
record[columns[i]] = string(col.([]byte))
case time.Time:
// record[columns[i]] = col.(string)
}
}
}
s, _ := json.Marshal(record)
w.Write(s)
io.WriteString(w, "\n")
}
}
I also needed to dump database tables to json and here is how I achieved:
(different than another answer in this topic, everything is not string, thanks to this answer: https://stackoverflow.com/a/17885636/4124416, I could get integer fields correctly)
func getJSON(sqlString string) (string, error) {
rows, err := db.Query(sqlString)
if err != nil {
return "", err
}
defer rows.Close()
columns, err := rows.Columns()
if err != nil {
return "", err
}
count := len(columns)
tableData := make([]map[string]interface{}, 0)
values := make([]interface{}, count)
valuePtrs := make([]interface{}, count)
for rows.Next() {
for i := 0; i < count; i++ {
valuePtrs[i] = &values[i]
}
rows.Scan(valuePtrs...)
entry := make(map[string]interface{})
for i, col := range columns {
var v interface{}
val := values[i]
b, ok := val.([]byte)
if ok {
v = string(b)
} else {
v = val
}
entry[col] = v
}
tableData = append(tableData, entry)
}
jsonData, err := json.Marshal(tableData)
if err != nil {
return "", err
}
fmt.Println(string(jsonData))
return string(jsonData), nil
}
Here is a sample output:
[{"ID":0,"Text":"Zero"},{"ID":1,"Text":"One"},{"ID":2,"Text":"Two"}]
It is needed to use prepared statements to get the native types. MySQL has two protocols, one transmits everything as text, the other as the "real" type. And that binary protocol is only used when you use prepared statements. See https://github.com/go-sql-driver/mysql/issues/407
The function getJSON below is correct:
func getJSON(sqlString string) (string, error) {
stmt, err := db.Prepare(sqlString)
if err != nil {
return "", err
}
defer stmt.Close()
rows, err := stmt.Query()
if err != nil {
return "", err
}
defer rows.Close()
columns, err := rows.Columns()
if err != nil {
return "", err
}
tableData := make([]map[string]interface{}, 0)
count := len(columns)
values := make([]interface{}, count)
scanArgs := make([]interface{}, count)
for i := range values {
scanArgs[i] = &values[i]
}
for rows.Next() {
err := rows.Scan(scanArgs...)
if err != nil {
return "", err
}
entry := make(map[string]interface{})
for i, col := range columns {
v := values[i]
b, ok := v.([]byte)
if (ok) {
entry[col] = string(b)
} else {
entry[col] = v
}
}
tableData = append(tableData, entry)
}
jsonData, err := json.Marshal(tableData)
if err != nil {
return "", err
}
return string(jsonData), nil
}
There's not much you can do because the driver - database/sql interaction is pretty much a one way street and the driver can't help you with anything when the data is handed over to database/sql.
You could try your luck with http://godoc.org/github.com/arnehormann/sqlinternals/mysqlinternals
Query the database
Retrieve the Column slice with cols, err := mysqlinternals.Columns(rows)
Create a new var values := make([]interface{}, len(cols)) and iterate over cols
Get the closest matching Go type per column with refType, err := cols[i].ReflectGoType()
Create type placeholders with values[i] = reflect.Zero(refType).Interface()
rows.Next() and err = rows.Scan(values...). Don't recreate values, copy and reuse it.
I guess this will still be pretty slow, but you should be able to get somewhere with it.
If you encounter problems, please file an issue - I'll get to it as soon as I can.
I have a table named users inside practice_db database. I have mentioned the table structure with data in the following program that converts the users table into JSON format.
You can also check the source code at https://gist.github.com/hygull/1725442b0f121a5fc17b28e04796714d.
/**
{
"created_on": "26 may 2017",
"todos": [
"go get github.com/go-sql-driver/mysql",
"postman(optional)",
"browser(optional)",
],
"aim": "Reading fname column into []string(slice of strings)"
}
*/
/*
mysql> select * from users;
+----+-----------+----------+----------+-------------------------------+--------------+-------------------------------------------------------------------------------------------------+
| id | fname | lname | uname | email | contact | profile_pic |
+----+-----------+----------+----------+-------------------------------+--------------+-------------------------------------------------------------------------------------------------+
| 1 | Rishikesh | Agrawani | hygull | rishikesh0014051992#gmail.com | 917353787704 | https://cdn4.iconfinder.com/data/icons/rcons-user/32/user_group_users_accounts_contacts-512.png |
| 2 | Sandeep | E | sandeep | sandeepeswar8#gmail.com | 919739040038 | https://cdn4.iconfinder.com/data/icons/eldorado-user/40/user-512.png |
| 3 | Darshan | Sidar | darshan | sidardarshan#gmail.com | 917996917565 | https://cdn4.iconfinder.com/data/icons/rcons-user/32/child_boy-512.png |
| 4 | Surendra | Prajapat | surendra | surendrakgadwal#gmail.com | 918385894407 | https://cdn4.iconfinder.com/data/icons/rcons-user/32/account_male-512.png |
| 5 | Mukesh | Jakhar | mukesh | mjakhar.kjakhar#gmail.com | 919772254140 | https://cdn2.iconfinder.com/data/icons/rcons-user/32/male-circle-512.png |
+----+-----------+----------+----------+-------------------------------+--------------+-------------------------------------------------------------------------------------------------+
5 rows in set (0.00 sec)
mysql>
*/
package main
import "log"
import "net/http"
import "encoding/json"
import (
_"github.com/go-sql-driver/mysql"
"database/sql"
)
func users(w http.ResponseWriter, r *http.Request) {
// db, err := sql.Open("mysql", "<username>:<password>#tcp(127.0.0.1:<port>)/<dbname>?charset=utf8" )
db, err := sql.Open("mysql", "hygull:admin#67#tcp(127.0.0.1:3306)/practice_db?charset=utf8")
w.Header().Set("Content-Type", "application/json")
if err != nil {
log.Fatal(err)
}
rows, err := db.Query("select id, fname, lname, uname, email, contact, profile_pic from users")
if err != nil {
log.Fatal(err)
}
type User struct {
Id int `json:"id"`
Fname string `json:"firstname"`
Lname string `json:"lastname"`
Uname string `json:"username"`
Email string `json:"email"`
Contact int `json:"contact"`
ProfilePic string `json:"profile_pic"`
}
var users []User
for rows.Next() {
var id, contact int
var fname string
var lname string
var uname, email, profile_pic string
rows.Scan(&id ,&fname, &lname, &uname, &email, &contact, &profile_pic)
users = append(users, User{id, fname, lname, uname, email, contact, &profile_pic })
}
usersBytes, _ := json.Marshal(&users)
w.Write(usersBytes)
db.Close()
}
func main() {
http.HandleFunc("/users/", users)
http.ListenAndServe(":8080", nil)
}
/* REQUSET
http://127.0.0.1:8080/users/
*/
/* RESPONSE
[
{
"id": 1,
"firstname": "Rishikesh",
"lastname": "Agrawani",
"username": "hygull",
"email": "rishikesh0014051992#gmail.com",
"contact": 917353787704,
"profile_pic": "https://cdn4.iconfinder.com/data/icons/rcons-user/32/user_group_users_accounts_contacts-512.png"
},
{
"id": 2,
"firstname": "Sandeep",
"lastname": "E",
"username": "sandeep",
"email": "sandeepeswar8#gmail.com",
"contact": 919739040038,
"profile_pic": "https://cdn4.iconfinder.com/data/icons/eldorado-user/40/user-512.png"
},
{
"id": 3,
"firstname": "Darshan",
"lastname": "Sidar",
"username": "darshan",
"email": "sidardarshan#gmail.com",
"contact": 917996917565,
"profile_pic": "https://cdn4.iconfinder.com/data/icons/rcons-user/32/child_boy-512.png"
},
{
"id": 4,
"firstname": "Surendra",
"lastname": "Prajapat",
"username": "surendra",
"email": "surendrakgadwal#gmail.com",
"contact": 918385894407,
"profile_pic": "https://cdn4.iconfinder.com/data/icons/rcons-user/32/account_male-512.png"
},
{
"id": 5,
"firstname": "Mukesh",
"lastname": "Jakhar",
"username": "mukesh",
"email": "mjakhar.kjakhar#gmail.com",
"contact": 919772254140,
"profile_pic": "https://cdn2.iconfinder.com/data/icons/rcons-user/32/male-circle-512.png"
}
]
*/
Based on the answers here, this is the most efficient code I could come up with. Note that this is outputting each row as a separate JSON array to save key name repetition.
// OutputJSONMysqlRowsStream outputs rows as a JSON array stream to save ram & output size due to key name repetition
func OutputJSONMysqlRowsStream(writer http.ResponseWriter, rows *sql.Rows) {
defer rows.Close()
columns, err := rows.Columns()
if err != nil {
OutputJSONError(writer, "Failed to get column names")
return
}
jsonColumns, err := json.Marshal(columns)
if err != nil {
OutputJSONError(writer, "Failed to encode json of column names")
return
}
writer.Header().Set("Content-Type", "application/cal-json-stream; charset=utf-8")
fmt.Fprintln(writer, "{\"status\": \"done\", \"data\":{ \"json_stream_fields\":"+string(jsonColumns)+"}}")
columnCount := len(columns)
rowDataHolder := make([]interface{}, columnCount)
rowDataHolderPointers := make([]interface{}, columnCount)
if err != nil {
log.Println(err)
}
for rows.Next() {
for i := 0; i < columnCount; i++ {
rowDataHolderPointers[i] = &rowDataHolder[i]
}
err := rows.Scan(rowDataHolderPointers...)
if err != nil {
log.Println(err)
} else {
for i, value := range rowDataHolder {
tempValue, ok := value.([]byte)
if ok {
rowDataHolder[i] = string(tempValue)
}
}
jsonEncoder := json.NewEncoder(writer)
err = jsonEncoder.Encode(rowDataHolder)
if err != nil {
log.Println(err)
}
}
}
}
you can dump the table into json just fine, however everything will be string :(
q := "select * from table"
debug("SQL: %s", q)
rows, err := db.Query(q)
checkError(err)
defer rows.Close()
columns, err := rows.Columns()
checkError(err)
scanArgs := make([]interface{}, len(columns))
values := make([]interface{}, len(columns))
for i := range values {
scanArgs[i] = &values[i]
}
for rows.Next() {
err = rows.Scan(scanArgs...)
checkError(err)
record := make(map[string]interface{})
for i, col := range values {
if col != nil {
record[columns[i]] = fmt.Sprintf("%s", string(col.([]byte)))
}
}
s, _ := json.Marshal(record)
fmt.Printf("%s\n", s)
}