I am trying to create two CSV files and write both simultaneously. This code doesn't work that way, but it's just to exemplify what I want to do. Since (* csv.Writer) is a pointer I don't know how I can differentiate it from another file. Is it possible to do that?
package test
import (
"encoding/csv"
"os"
"sync"
"time"
)
var csvSuccess *os.File
var csvError *os.File
var csvErr error
var csvWriterSuccess *csv.Writer
var csvWriterError *csv.Writer
var mutex *sync.Mutex
func init() {
csvSuccess, csvErr = os.Create("sucess-result.csv")
csvError, csvErr = os.Create("error-result.csv")
if csvErr != nil {
panic("Error open file")
}
csvWriterSuccess = csv.NewWriter(csvSuccess)
csvWriterError = csv.NewWriter(csvSuccess)
mutex = &sync.Mutex{}
}
func WriteRecordSuccess(record []string) {
mutex.Lock()
if err := csvWriterSuccess.Write(record); err != nil {
if err != nil {
panic("Error writing success record")
}
csvWriterSuccess.Flush()
}
mutex.Unlock()
}
func WriteRecordError(record []string) {
mutex.Lock()
if err := csvWriterError.Write(record); err != nil {
if err != nil {
panic("Error writing error record")
}
csvWriterError.Flush()
}
mutex.Unlock()
}
func ClosecsvFile() {
csvWriterSuccess.Flush()
csvWriterError.Flush()
csvSuccess.Sync()
csvError.Sync()
csvSuccess.Close()
csvError.Close()
}
In your init function you have both writers writing to the success file.
csvWriterSuccess = csv.NewWriter(csvSuccess)
csvWriterError = csv.NewWriter(csvSuccess)
Write to the success and error files separately.
csvWriterSuccess = csv.NewWriter(csvSuccess)
csvWriterError = csv.NewWriter(csvError)
Related
I am working on a website scraper. I can send only 1 JSON data to JSON file regularly. I want to write one after another JSON data, so I need to keep hundreds of data in a single JSON file. like this
[
{
"id": 1321931,
"name": "Mike"
},
{
"id": 32139219,
"name": "Melissa"
},
{
"id": 8421921,
"name": "Jordan"
},
{
"id": 4291901,
"name": "David"
}
]
but output like this. When I send new data, just the first JSON data update itself.
[
{
"id": 1,
"name": "Mike"
}
]
here is the code:
package main
import (
"encoding/json"
"fmt"
"html/template"
"io/ioutil"
"log"
"math/rand"
"net/http"
"os"
"strings"
"github.com/gocolly/colly"
)
type Info struct {
ID int `json:"id"`
Name string `json:"name"`
}
var tpl *template.Template
var name string
var stonf Info
var allInfos []Info
var id int
var co = colly.NewCollector()
func main() {
fmt.Println("Started...")
allInfos = make([]Info, 1)
id = rand.Intn((99999 - 10000) + 10000)
// Reading Data From Json
data, err := ioutil.ReadFile("stocky.json")
if err != nil {
fmt.Println("ERROR 1 JSON", err)
}
// Unmarshal JSON data
var d []Info
err = json.Unmarshal([]byte(data), &d)
if err != nil {
fmt.Println(err)
}
tpl, _ = tpl.ParseGlob("templates/*.html")
http.HandleFunc("/mete", hellloHandleFunc)
staticHandler := http.FileServer(http.Dir("./css/"))
http.Handle("/css/", http.StripPrefix("/css", staticHandler))
http.ListenAndServe("localhost:8080", nil)
}
func hellloHandleFunc(w http.ResponseWriter, r *http.Request) {
err := r.ParseForm()
if err != nil {
log.Fatal(err)
}
allInfos[0].ID = id // JSON-PRO
// GET Price - Fiyat GETİR
co.OnHTML("div#dp", func(p *colly.HTMLElement) {
name = p.ChildText("h1#title")
})
requestLink := strings.TrimSpace(r.FormValue("input-link"))
co.Visit(requestLink)
// FIRST DATA JSON
enc := json.NewEncoder(os.Stdout)
enc.SetIndent("", " ")
enc.Encode(allInfos)
stonf = Info{
Name: name,
}
fmt.Println("Index Running")
tpl.ExecuteTemplate(w, "form-copy.html", stonf)
}
func writeJson(data []Info) {
dataFile, err := json.MarshalIndent(data, "", " ")
if err != nil {
log.Println("Could not create JSON", err)
}
ioutil.WriteFile("stocky.json", dataFile, 0666)
}
Here is a solution which appends new Info to the list and store in file.
The solution will perform properly only for relatively small list. For large lists, the overhead of writing the entire file each time may be too high. In such case i propose to change the format to ndjson. It will allow to write only the current Info struct instead of the whole list.
I've also added synchronization mechanism to avoid race conditions in case you send multiple HTTP requests at the same time.
I assumed that the identifier must be generated separately for each request, and it is not a problem if collision occur.
package main
import (
"encoding/json"
"fmt"
"html/template"
"io/ioutil"
"log"
"math/rand"
"net/http"
"os"
"strings"
"sync"
"github.com/gocolly/colly"
)
type (
Info struct {
ID int `json:"id"`
Name string `json:"name"`
}
Infos struct {
List []Info
sync.Mutex
}
)
var (
infos *Infos
tpl *template.Template
co = colly.NewCollector()
)
func main() {
fmt.Println("Started...")
var err error
infos, err = readInfos()
if err != nil {
log.Fatal(err)
}
tpl, _ = tpl.ParseGlob("templates/*.html")
http.HandleFunc("/mete", hellloHandleFunc)
staticHandler := http.FileServer(http.Dir("./css/"))
http.Handle("/css/", http.StripPrefix("/css", staticHandler))
if err := http.ListenAndServe("localhost:8080", nil); err != nil {
log.Fatal(err)
}
}
func hellloHandleFunc(w http.ResponseWriter, r *http.Request) {
err := r.ParseForm()
if err != nil {
log.Fatal(err)
}
stonf := Info{
ID: rand.Intn((99999 - 10000) + 10000),
}
// GET Price - Fiyat GETİR
co.OnHTML("div#dp", func(p *colly.HTMLElement) {
stonf.Name = p.ChildText("h1#title")
})
requestLink := strings.TrimSpace(r.FormValue("input-link"))
if err := co.Visit(requestLink); err != nil {
log.Fatal(err)
}
if err := infos.AppendAndWrite(stonf); err != nil {
log.Fatal(err)
}
// FIRST DATA JSON
enc := json.NewEncoder(os.Stdout)
enc.SetIndent("", " ")
enc.Encode(stonf)
fmt.Println("Index Running")
tpl.ExecuteTemplate(w, "form-copy.html", stonf)
}
func readInfos() (*Infos, error) {
// Reading Data From Json
data, err := ioutil.ReadFile("stocky.json")
if err != nil {
return nil, err
}
var r []Info
// Unmarshal JSON data
err = json.Unmarshal([]byte(data), &r)
if err != nil {
return nil, err
}
return &Infos{List: r}, nil
}
func (i *Infos) AppendAndWrite(info Info) error {
i.Lock()
defer i.Unlock()
i.List = append(i.List, info)
if err := i.storeLocked(); err != nil {
return fmt.Errorf("storing info list failed: %w", err)
}
return nil
}
func (i *Infos) storeLocked() error {
dataFile, err := json.MarshalIndent(i.List, "", " ")
if err != nil {
return fmt.Errorf("could not marshal infos JSON: %w", err)
}
err = ioutil.WriteFile("stocky.json", dataFile, 0666)
if err != nil {
return fmt.Errorf("could not write 'stocky.json' file: %w", err)
}
return nil
}
There is a standard called JSON lines (https://jsonlines.org/) consisting on only one JSON per line instead of wrapping all in a JSON array.
JSON library from Go stdlib works pretty well with JSON lines on both cases, reading and writing.
Write multiple JSON (one per line):
e := json.NewEncoder(yourWriterFile)
e.Encode(object1)
e.Encode(object2)
//...
Read multiple JSON (one per line or concatenated):
d := json.NewDecoder(yourReaderFile)
d.Decode(&object1)
d.Decode(&object2)
//...
More info: https://pkg.go.dev/encoding/json
I've been trying to have a " working " file to which i save certain basic state of my application instead of having them in Ram since they would need to be saved everyday, i've decided on creating file per day, this part is working but i've stripped it from the code for more clarity.
Now i'm able to initialise my file with false value for the informations struct and then unmarshalling and reading from it.
The problem arise when i'm trying to update the "file" after it's been unmarshalled before i save it back to the text file.
The isImportStarted does work (when removing the erronous line obv ) but i can't seem to update the file properly i get this error :
./test.go:62:34: cannot assign to struct field
TheList[symbol].ImportStarted in map
./test.go:71:3: cannot take the address of
TheList[symbol].ImportStarted
./test.go:71:34: cannot assign to &TheList[symbol].ImportStarted
My code :
package main
import (
"encoding/json"
"fmt"
"os"
"io/ioutil"
"log"
)
type Informations struct {
ImportStarted bool
ImportDone bool
}
var MyList = map[string]*Informations{
"test": &Informations{ImportStarted: false,ImportDone:false},
"test2": &Informations{ImportStarted: false,ImportDone:false},
}
func ReadFile(filename string) []byte{
data, err := ioutil.ReadFile(filename)
if err != nil {
log.Panicf("failed reading data from file: %s", err)
}
return data
}
func writeFile(json string,filename string){
file, err := os.OpenFile(filename, os.O_APPEND|os.O_WRONLY, os.ModeAppend)
defer file.Close()
if err != nil {
fmt.Println(err)
}
_,err2 := file.WriteString(json)
fmt.Println(err2)
}
func main() {
isImportStarted("test")
ImportStart("test")
}
func ImportStart(symbol string){
filename := "test.txt"
_, err := os.Stat(filename)
if err != nil {
if os.IsNotExist(err) {
fmt.Println("File does not exist creating it...")
file, err := os.Create(filename)
jsonString, _ := json.Marshal(MyList)
writeFile(string(jsonString),filename)
if err != nil {
fmt.Println(err)
}
fmt.Println("reading from file"+filename )
x := ReadFile(filename)
var TheList = map[string]Informations{}
json.Unmarshal(x,&TheList )
TheList[symbol].ImportStarted = true
defer file.Close()
//wanting to save afterwards...
}
} else {
fmt.Println("reading from file "+ filename)
x := ReadFile(filename)
var TheList = map[string]Informations{}
json.Unmarshal(x,&TheList )
&TheList[symbol].ImportStarted = true
}
}
func isImportStarted(symbol string) bool{
filename := "test.txt"
x := ReadFile(filename)
var TheList = map[string]Informations{}
json.Unmarshal(x,&TheList )
return TheList[symbol].ImportStarted
}
I've tried the Why do I get a "cannot assign" error when setting value to a struct as a value in a map? question but it doesn't fit my use case at all as it would effectivly initialize all my structs with nil instead of {false,false}
Any ideas?
Try var TheList = map[string]*Informations{}, why you cannot assign a value in a map please refer to why-do-i-get-a-cannot-assing-error or access-struct-in-map-without-copying
I have a json array where it contains some flags as key and I have set the default values for those keys as false. this is my json array.
var flags = map[string]bool{
"terminationFlag": false,
"transferFlag": false,
"jrCancelledFlag": false,
"jrFilledFlag": false,
}
On performing an operation in a for loop, i have to update 1 field in the above json array as true. During the next iteration, it has to update the 2nd field in the json array as true. After all the fields in the json array is set to true, I have to return the json array.
the code i tried:
Keystrings := []string{"terminationReport - 2019-1","transferReport - 2019-1","jrCancelledReport - 2019-1","jrFilledReport - 2019-1"}
fmt.Println("Keystrings ", Keystrings)
for i,value := range Keystrings {
bytesread, err = stub.GetState(value)
var result []string
_ = json.Unmarshal(bytesread, &result)
fmt.Println("result ", result)
if result[0] == "yes"{
fmt.Println("result in if ", result)
flags[i] = true
}
}
Since it's very hard to understand from the question what is being asked, here's a simple attempt at working with similar data as the question, in the hope that you can take the right parts from this sample and adapt them to your issue. Follow the comments in the code to understand what's going on.
package main
import (
"encoding/json"
"fmt"
"log"
)
var jsonBlob = []byte(`["jrCancelledFlag", "yes"]`)
var flags = map[string]bool{
"terminationFlag": false,
"transferFlag": false,
"jrCancelledFlag": false,
"jrFilledFlag": false,
}
func main() {
// Parse jsonBlob into a slice of strings
var parsed []string
if err := json.Unmarshal(jsonBlob, &parsed); err != nil {
log.Fatalf("JSON unmarshal: %s", err)
}
// Expect the slice to be of length 2, first item flag name, second item
// yes/no.
if len(parsed) != 2 {
log.Fatalf("parsed len %d, expected 2", len(parsed))
}
// Assume parsed[0] actually appears in flags... otherwise more error checking
// is needed.
if parsed[1] == "yes" {
flags[parsed[0]] = true
}
// Emit updated flags as json
json, err := json.Marshal(flags)
if err != nil {
log.Fatalf("JSON marshal: %s", err)
}
fmt.Println(string(json))
}
This can be achieved cleaning by using the JSON interface to define your own unmarshaller
https://medium.com/#nate510/dynamic-json-umarshalling-in-go-88095561d6a0
package main
import (
"encoding/json"
"fmt"
"log"
)
var jsonBlob = []byte(`["jrCancelledFlag", "yes"]`)
// Flags ...
type Flags struct {
TerminationFlag bool `json:"terminationFlag,omitempty"`
TransferFlag bool `json:"transferFlag,omitempty"`
JRCancelledFlag bool `json:"jrCancelledFlag,omitempty"`
JRFilledFlag bool `json:"jrFilledFlag,omitempty"`
}
// UnmarshalJSON satisfies the JSON unmarshaller interface
func (f *Flags) UnmarshalJSON(data []byte) error {
var parsed []string
if err := json.Unmarshal(jsonBlob, &parsed); err != nil {
return err
}
if len(parsed)%2 != 0 {
return fmt.Errorf("expected string to be evenly paired")
}
for i := 0; i < len(parsed); i++ {
j := i + 1
if j < len(parsed) {
switch parsed[i] {
case "terminationFlag":
f.TerminationFlag = toBool(parsed[j])
case "transferFlag":
f.TransferFlag = toBool(parsed[j])
case "jrCancelledFlag":
f.JRCancelledFlag = toBool(parsed[j])
case "jrFilledFlag":
f.JRFilledFlag = toBool(parsed[j])
}
}
}
return nil
}
func toBool(s string) bool {
if s == "yes" {
return true
}
return false
}
func main() {
var flags Flags
err := json.Unmarshal(jsonBlob, &flags)
if err != nil {
log.Fatal(err)
}
b, _ := json.Marshal(flags)
fmt.Println(string(b))
}
I'm new with golang. I'm trying to share mysql database connection in my package, latter maybe in several packages. To skip defining database connection in every package I've created Database package and now I'm trying to get that package, connect to db and use that object in whole package.
I'm using this mysql plugin: github.com/go-sql-driver/mysql
here is my code:
main.go
package main
import (
"log"
"./packages/db" // this is my custom database package
"database/sql"
_ "github.com/go-sql-driver/mysql"
)
var dbType Database.DatabaseType
var db *sql.DB
func main() {
log.Printf("-- entering main...")
dbType := Database.New()
db = dbType.GetDb()
dbType.DbConnect()
delete_test_data()
dbType.DbClose()
}
func delete_test_data(){
log.Printf("-- entering delete_test_data...")
//db.Exec("DELETE FROM test;")
}
packages/db/db.go
package Database
import (
"log"
"database/sql"
_ "github.com/go-sql-driver/mysql"
)
type DatabaseType struct {
DatabaseObject *sql.DB
}
func New()(d *DatabaseType) {
d = new(DatabaseType)
//db.DatabaseObject = db.DbConnect()
return d
}
func (d *DatabaseType) DbConnect() *DatabaseType{
log.Printf("-- entering DbConnect...")
var err error
if d.DatabaseObject == nil {
log.Printf("--------- > Database IS NIL...")
d.DatabaseObject, err = sql.Open("mysql", "...")
if err != nil {
panic(err.Error())
}
err = d.DatabaseObject.Ping()
if err != nil {
panic(err.Error())
}
}
return d
}
func (d *DatabaseType) DbClose(){
log.Printf("-- entering DbClose...")
defer d.DatabaseObject.Close()
}
func (d *DatabaseType) GetDb() *sql.DB{
return d.DatabaseObject
}
Everything is ok and without error until I uncomment this line:
db.Exec("DELETE FROM test;")
Can someone tell me what is correct way to share db connection?
Your dbType.DbConnect() method returns a DatabaseType with an initialized connection, but you're ignoring the return value entirely.
Further - to simplify your code - look at having New(host string) *DB instead of three different functions (New/DbConnect/GetDb) that do the same thing.
e.g.
package datastore
type DB struct {
// Directly embed this
*sql.DB
}
func NewDB(host string) (*DB, error) {
db, err := sql.Open(...)
if err != nil {
return nil, err
}
return &DB{db}, nil
}
package main
var db *datastore.DB
func main() {
var err error
db, err = datastore.NewDB(host)
if err != nil {
log.Fatal(err)
}
err := someFunc()
}
func someFunc() error {
rows, err := db.Exec("DELETE FROM ...")
// Handle the error, parse the result, etc.
}
This reduces the juggling you have to do, and you can still call close on your DB type because it embeds *sql.DB - there's no need to implement your own Close() method.
I'm trying to read the content of a bucket on Google Cloud Storage using GO.
I'm able to do that, but is is very slow.
The content of the bucket is like this:
bucket name
-> folders with alphanumeric characters
----> 5 files into each of the folder
--------> each file has a json array inside
what I want to do is to inspect the content of the jsons files for all the folders in the bucket and look for a specific value. The following code work, but it is very slow:
package backend
import (
"encoding/json"
"fmt"
"golang.org/x/net/context"
"golang.org/x/oauth2"
"golang.org/x/oauth2/google"
"google.golang.org/appengine"
"google.golang.org/appengine/file"
"google.golang.org/appengine/urlfetch"
"google.golang.org/cloud"
"google.golang.org/cloud/storage"
"io"
"io/ioutil"
"net/http"
)
var bucket = "bucket_Name"
type jsonStruct struct {
Gender string `json:"gender"`
Age string `json:"age"`
ID string `json:"id"`
Done int `json:"done"`
}
type saveData struct {
c context.Context
r *http.Request //http response
w http.ResponseWriter //http writer
ctx context.Context
cleanUp []string // cleanUp is a list of filenames that need cleaning up at the end of the saving.
failed bool // failed indicates that one or more of the saving steps failed.
}
func init() {
http.HandleFunc("/", handleStatic)
http.HandleFunc("/listBuckets", listBuckets)
}
func handleStatic(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Cache-Control", "no-cache")
http.ServeFile(w, r, "static/"+r.URL.Path)
}
func listBuckets(w http.ResponseWriter, r *http.Request) {
c := appengine.NewContext(r)
if bucket == "" {
var err error
if bucket, err = file.DefaultBucketName(c); err != nil {
// log.Errorf(c, "failed to get default GCS bucket name: %v", err)
return
}
}
hc := &http.Client{
Transport: &oauth2.Transport{
Source: google.AppEngineTokenSource(c, storage.ScopeFullControl),
Base: &urlfetch.Transport{Context: c},
},
}
ctx := cloud.NewContext(appengine.AppID(c), hc)
// structure to holds information needed to run the various saving functions
d := &saveData{
c: c,
r: r,
w: w,
ctx: ctx,
}
d.listBucket(bucket)
}
func (d *saveData) errorf(format string, args ...interface{}) {
d.failed = true
// log.Errorf(d.c, format, args...)
}
func (d *saveData) listBucket(bucket string) {
io.WriteString(d.w, "\nListbucket result:\n")
query := &storage.Query{}
for query != nil {
objs, err := storage.ListObjects(d.ctx, bucket, query)
if err != nil {
d.errorf("listBucket: unable to list bucket %q: %v", bucket, err)
return
}
query = objs.Next
for _, obj := range objs.Results {
d.readFile(obj.Name)
}
}
}
func (d *saveData) readFile(fileName string) {
rc, err := storage.NewReader(d.ctx, bucket, fileName)
if err != nil {
d.errorf("readFile: unable to open file from bucket %q, file %q: %v", bucket, fileName, err)
return
}
defer rc.Close()
slurp, err := ioutil.ReadAll(rc)
if err != nil {
d.errorf("readFile: unable to read data from bucket %q, file %q: %v", bucket, fileName, err)
return
}
var userDetails jsonStruct
err1 := json.Unmarshal(slurp, &userDetails)
if err1 != nil {
d.errorf("readFile: %v", err1)
return
}
fmt.Fprintf(d.w, "done is: %v\n", userDetails.Done)
}
Basically now I'm reading the folder name from the bucket and then I read the content using the folder name. It would be possible to cache all the bucket content in a go variable and then work on that variable instead of read the bucket for each folder?
I really need this to be faster because I need to present the result back in real time.
Thanks a lot
See below a simple Go sample code to list the bucket content on Google Cloud Storage:
package main
import (
"context"
"fmt"
"log"
"os"
"cloud.google.com/go/storage"
"google.golang.org/api/iterator"
)
func main() {
os.Setenv("GOOGLE_APPLICATION_CREDENTIALS",
"C:\\Users\\Shubham Snehi\\Downloads\\awacs-dev-160bf0e57dc1.json")
ctx := context.Background()
client, err := storage.NewClient(ctx)
if err != nil {
log.Fatalf("Failed to create client: %v", err)
}
defer client.Close()
// Sets the name for the new bucket.
bucketName := "balatestawacs"
// Creates a Bucket instance.
bucket := client.Bucket(bucketName)
it := bucket.Objects(ctx, nil)
for {
attrs, err := it.Next()
if err == iterator.Done {
break
}
if err != nil {
panic(err)
}
fmt.Println(attrs.Owner)
}
}