Trying to use the JSON lib from "github.com/bitly/go-simplejson"
url = "http://api.stackoverflow.com/1.1/tags?pagesize=100&page=1"
res, err := http.Get(url)
body, err := ioutil.ReadAll(res.Body)
fmt.Printf("%s\n", string(body)) //WORKS
js, err := simplejson.NewJson(body)
total,_ := js.Get("total").String()
fmt.Printf("Total:%s"+total )
But it seems it doenst work !?
Trying to access the total and tag fields
You have a few mistakes:
If you'll check the JSON response you'll notice that total field is not string, that's why you should use MustInt() method, not String(), when you are accessing the field.
Printf() method invocation was totally wrong. You should pass a "template", and then pass arguments appropriate to the number of "placeholders".
By the way, I strongly recomend you to check err != nil everywhere, that'll help you a lot.
Here is the working example:
package main
import (
"fmt"
"github.com/bitly/go-simplejson"
"io/ioutil"
"log"
"net/http"
)
func main() {
url := "http://api.stackoverflow.com/1.1/tags?pagesize=100&page=1"
res, err := http.Get(url)
if err != nil {
log.Fatalln(err)
}
body, err := ioutil.ReadAll(res.Body)
if err != nil {
log.Fatalln(err)
}
// fmt.Printf("%s\n", string(body))
js, err := simplejson.NewJson(body)
if err != nil {
log.Fatalln(err)
}
total := js.Get("total").MustInt()
if err != nil {
log.Fatalln(err)
}
fmt.Printf("Total:%s", total)
}
Related
Is there a way to insert csv file using this go library https://github.com/ClickHouse/clickhouse-go in one command (without reading csv and iterating through the content.). If there is a way can you provide me with the example.
if not how can we convert this system command and write it in golang using os/exec library.
cat /home/srijan/employee.csv | clickhouse-client --query="INSERT INTO test1 FORMAT CSV"
It's impossible with that go library. You can use http api https://clickhouse.com/docs/en/interfaces/http/ and any http go client
for example
package main
import (
"compress/gzip"
"fmt"
"io"
"io/ioutil"
"net/http"
"net/url"
"os"
)
func compress(data io.Reader) io.Reader {
pr, pw := io.Pipe()
gw, err := gzip.NewWriterLevel(pw, int(3))
if err != nil {
panic(err)
}
go func() {
_, _ = io.Copy(gw, data)
gw.Close()
pw.Close()
}()
return pr
}
func main() {
p, err := url.Parse("http://localhost:8123/")
if err != nil {
panic(err)
}
q := p.Query()
q.Set("query", "INSERT INTO test1 FORMAT CSV")
p.RawQuery = q.Encode()
queryUrl := p.String()
var req *http.Request
req, err = http.NewRequest("POST", queryUrl, compress(os.Stdin))
req.Header.Add("Content-Encoding", "gzip")
if err != nil {
panic(err)
}
client := &http.Client{
Transport: &http.Transport{DisableKeepAlives: true},
}
resp, err := client.Do(req)
if err != nil {
panic(err)
}
defer resp.Body.Close()
body, _ := ioutil.ReadAll(resp.Body)
if resp.StatusCode != 200 {
panic(fmt.Errorf("clickhouse response status %d: %s", resp.StatusCode, string(body)))
}
}
I have a JSON file in S3 that takes the format of the following struct:
type StockInfo []struct {
Ticker string `json:"ticker"`
BoughtPrice string `json:"boughtPrice"`
NumberOfShares string `json:"numberOfShares"`
}
and I want to read it into a struct value from S3. In python the code would look something like this:
import boto3
import json
s3 = boto3.client('s3', 'us-east-1')
obj = s3.get_object(Bucket=os.environ["BucketName"], Key=os.environ["Key"])
fileContents = obj['Body'].read().decode('utf-8')
json_content = json.loads(fileContents)
However I'm kinda stuck on how to make this happen in Go. I've gotten this far:
package main
import (
"archive/tar"
"bytes"
"fmt"
"log"
"os"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/s3"
"github.com/aws/aws-sdk-go/service/s3/s3manager"
"github.com/joho/godotenv"
)
type StockInfo []struct {
Ticker string `json:"ticker"`
BoughtPrice string `json:"boughtPrice"`
NumberOfShares string `json:"numberOfShares"`
}
func init() {
// loads values from .env into the system
if err := godotenv.Load(); err != nil {
log.Print("No .env file found")
}
return
}
func main() {
// Store the PATH environment variable in a variable
sess, err := session.NewSession(&aws.Config{
Region: aws.String("us-east-1")},
)
if err != nil {
panic(err)
}
s3Client := s3.New(sess)
bucket := "ian-test-bucket-go-python"
key := "StockInfo.json"
requestInput := &s3.GetObjectInput{
Bucket: aws.String(bucket),
Key: aws.String(key),
}
result, err := s3Client.GetObject(requestInput)
if err != nil {
fmt.Println(err)
}
fmt.Println(result)
which returns to me the body/object buffer, but im not sure how to read that into a string so I can marshal it into my struct. I found this code in a similar question:
requestInput := &s3.GetObjectInput{
Bucket: aws.String(bucket),
Key: aws.String(key),
}
buf := new(aws.WriteAtBuffer)
numBytes, _ := *s3manager.Downloader.Download(buf, requestInput)
tr := tar.NewReader(bytes.NewReader(buf.Bytes()))
but I get the following errors:
not enough arguments in call to method expression s3manager.Downloader.Download
have (*aws.WriteAtBuffer, *s3.GetObjectInput)
want (s3manager.Downloader, io.WriterAt, *s3.GetObjectInput, ...func(*s3manager.Downloader))
multiple-value s3manager.Downloader.Download() in single-value context
Can anyone point me in the right direction? kinda frustrating how hard it seems to do this compared to python.
I was able to do it with the following code:
requestInput := &s3.GetObjectInput{
Bucket: aws.String(bucket),
Key: aws.String(key),
}
result, err := s3Client.GetObject(requestInput)
if err != nil {
fmt.Println(err)
}
defer result.Body.Close()
body1, err := ioutil.ReadAll(result.Body)
if err != nil {
fmt.Println(err)
}
bodyString1 := fmt.Sprintf("%s", body1)
var s3data StockInfo
decoder := json.NewDecoder(strings.NewReader(bodyString1))
err = decoder.Decode(&s3data)
if err != nil {
fmt.Println("twas an error")
}
fmt.Println(s3data)
Alternative solution using json.Unmarshal
besed on aws-sdk-go-v2
...
params := &s3.GetObjectInput{
Bucket: aws.String(s3Record.S3.Bucket.Name),
Key: aws.String(s3Record.S3.Object.Key),
}
result, _ := client.GetObject(context.TODO(), params)
if err != nil {
panic(err)
}
defer result.Body.Close()
// capture all bytes from upload
b, err := ioutil.ReadAll(result.Body)
if err != nil {
panic(err)
}
var temp StockInfo
if err = json.Unmarshal(b, &temp); err != nil {
panic(err)
}
ftm.Println("res: ",b)
I build a client and a server in golang both are using this functions to encrypt/decrypt
func encrypt(text []byte) ([]byte, error) {
block, err := aes.NewCipher(key)
if err != nil {
return nil, err
}
b := base64.StdEncoding.EncodeToString(text)
ciphertext := make([]byte, aes.BlockSize+len(b))
iv := ciphertext[:aes.BlockSize]
if _, err := io.ReadFull(rand.Reader, iv); err != nil {
return nil, err
}
cfb := cipher.NewCFBEncrypter(block, iv)
cfb.XORKeyStream(ciphertext[aes.BlockSize:], []byte(b))
return ciphertext, nil
}
func decrypt(text []byte) ([]byte, error) {
block, err := aes.NewCipher(key)
if err != nil {
return nil, err
}
if len(text) < aes.BlockSize {
return nil, errors.New("ciphertext too short")
}
iv := text[:aes.BlockSize]
text = text[aes.BlockSize:]
cfb := cipher.NewCFBDecrypter(block, iv)
cfb.XORKeyStream(text, text)
data, err := base64.StdEncoding.DecodeString(string(text))
if err != nil {
return nil, err
}
return data, nil
}
so yeah I make a normal post request
url := "https://"+configuration.Server+configuration.Port+"/get"
// TODO maybe bugs rest here
ciphertext, err := encrypt([]byte(*getUrl))
if err != nil {
fmt.Println("Error: " + err.Error())
}
fmt.Println(string(ciphertext))
values := map[string]interface{}{"url": *getUrl, "urlCrypted": ciphertext}
jsonValue, _ := json.Marshal(values)
jsonStr := bytes.NewBuffer(jsonValue)
req, err := http.NewRequest("POST", url, jsonStr)
and the servercode is as following
requestContent := getRequestContentFromRequest(req)
url := requestContent["url"].(string)
undecryptedUrl := requestContent["urlCrypted"].(string)
decryptedurl, err := decrypt([]byte(undecryptedUrl))
if err != nil {
fmt.Println("Error: " + err.Error())
}
fmt.Println(decryptedurl)
where getRequestContentFromRequest is as following
func getRequestContentFromRequest(req *http.Request)
map[string]interface{} {
buf := new(bytes.Buffer)
buf.ReadFrom(req.Body)
data := buf.Bytes()
var requestContent map[string]interface{}
err := json.Unmarshal(data, &requestContent)
if err != nil {
fmt.Println(err)
}
return requestContent
}
Now to the problem.
If I encrypt my string in the client and decrypt it direct after that everything is fine.
But, when I send the encrypted string to the server and try to decrypt it with literrally the same function as in the client, the decrypt function throws an error.
Error: illegal base64 data at input byte 0
I think the Problem is the unmarshalling of the JSON.
Thanks for help.
P.S.
Repos are
github.com/BelphegorPrime/goSafeClient and github.com/BelphegorPrime/goSafe
UPDATE
Example JSON
{"url":"facebook2.com","urlCrypted":"/}\ufffd\ufffd\ufffdgP\ufffdN뼞\ufffd\u0016\ufffd)\ufffd\ufffd\ufffdy\u001c\u000f\ufffd\ufffd\ufffdep\ufffd\rY\ufffd\ufffd$\ufffd\ufffd"}
UPDATE2
I made a playground here
The problem is that you encode in base64 twice. The first time in the encrypt function and the second time during the JSON marshalling. byte slices are automatically converted into base64 strings by the encoding/json marshaller.
The solution is to decode the base64 string before calling decrypt.
Example on the Go PlayGround
EDIT
Working solution here
I am trying to build an API, but to secure it properly I believe I need to go with RSA encryption for a private key stored on my server and a public key for the client. I have stored the generated private key into a JSON file, I plan to store on my server but to write to JSON, I needed to convert the type too []byte. Now when I try to retrieve the private key to generate a public key, but it will not let me use type bytes for *Publickey
The only other way I can think of to accomplish this goal is to seed the random number generator, so I can have the seed a secret on my server and then my private key should always generate to the same thing, any help this this would be great.
package main
import (
"bytes"
"crypto/rand"
"crypto/rsa"
"encoding/json"
"fmt"
"io/ioutil"
"os"
)
func main() {
mimicPrivateKey, err := rsa.GenerateKey(rand.Reader, 2048)
if err != nil {
fmt.Println(err)
os.Exit(1)
}
buf := new(bytes.Buffer)
json.NewEncoder(buf).Encode(mimicPrivateKey)
secrets, _ := os.OpenFile("secrets.json", os.O_RDWR|os.O_APPEND|os.O_CREATE, 0666)
// Close the secrets file when the surrounding function exists
secrets.WriteString(buf.String())
secrets.Close()
secrets, _ = os.OpenFile("secrets.json", os.O_RDWR, 0666)
serverKey, _ := ioutil.ReadAll(secrets)
if serverKey != nil {
fmt.Println("can not open key")
}
serverKeyPublic := &serverKey.PublicKey
}
You need to Unmarshal it:
var data *rsa.PrivateKey
err = json.Unmarshal(serverKey, &data)
if err != nil {
panic(err)
}
And you may use
err = ioutil.WriteFile("secrets.json", buf.Bytes(), 0666)
and
serverKey, err := ioutil.ReadFile("secrets.json")
See:
package main
import (
"bytes"
"crypto/rand"
"crypto/rsa"
"encoding/json"
"fmt"
"io/ioutil"
)
func main() {
mimicPrivateKey, err := rsa.GenerateKey(rand.Reader, 2048)
if err != nil {
panic(err)
}
var buf bytes.Buffer
err = json.NewEncoder(&buf).Encode(mimicPrivateKey)
if err != nil {
panic(err)
}
err = ioutil.WriteFile("secrets.json", buf.Bytes(), 0666)
if err != nil {
panic(err)
}
serverKey, err := ioutil.ReadFile("secrets.json")
if err != nil {
panic(err)
}
var data *rsa.PrivateKey
err = json.Unmarshal(serverKey, &data)
if err != nil {
panic(err)
}
serverKeyPublic := data.PublicKey
fmt.Println(serverKeyPublic)
}
The Go code below reads in a 10,000 record CSV (of timestamp times and float values), runs some operations on the data, and then writes the original values to another CSV along with an additional column for score. However it is terribly slow (i.e. hours, but most of that is calculateStuff()) and I'm curious if there are any inefficiencies in the CSV reading/writing I can take care of.
package main
import (
"encoding/csv"
"log"
"os"
"strconv"
)
func ReadCSV(filepath string) ([][]string, error) {
csvfile, err := os.Open(filepath)
if err != nil {
return nil, err
}
defer csvfile.Close()
reader := csv.NewReader(csvfile)
fields, err := reader.ReadAll()
return fields, nil
}
func main() {
// load data csv
records, err := ReadCSV("./path/to/datafile.csv")
if err != nil {
log.Fatal(err)
}
// write results to a new csv
outfile, err := os.Create("./where/to/write/resultsfile.csv"))
if err != nil {
log.Fatal("Unable to open output")
}
defer outfile.Close()
writer := csv.NewWriter(outfile)
for i, record := range records {
time := record[0]
value := record[1]
// skip header row
if i == 0 {
writer.Write([]string{time, value, "score"})
continue
}
// get float values
floatValue, err := strconv.ParseFloat(value, 64)
if err != nil {
log.Fatal("Record: %v, Error: %v", floatValue, err)
}
// calculate scores; THIS EXTERNAL METHOD CANNOT BE CHANGED
score := calculateStuff(floatValue)
valueString := strconv.FormatFloat(floatValue, 'f', 8, 64)
scoreString := strconv.FormatFloat(prob, 'f', 8, 64)
//fmt.Printf("Result: %v\n", []string{time, valueString, scoreString})
writer.Write([]string{time, valueString, scoreString})
}
writer.Flush()
}
I'm looking for help making this CSV read/write template code as fast as possible. For the scope of this question we need not worry about the calculateStuff method.
You're loading the file in memory first then processing it, that can be slow with a big file.
You need to loop and call .Read and process one line at a time.
func processCSV(rc io.Reader) (ch chan []string) {
ch = make(chan []string, 10)
go func() {
r := csv.NewReader(rc)
if _, err := r.Read(); err != nil { //read header
log.Fatal(err)
}
defer close(ch)
for {
rec, err := r.Read()
if err != nil {
if err == io.EOF {
break
}
log.Fatal(err)
}
ch <- rec
}
}()
return
}
playground
//note it's roughly based on DaveC's comment.
This is essentially Dave C's answer from the comments sections:
package main
import (
"encoding/csv"
"log"
"os"
"strconv"
)
func main() {
// setup reader
csvIn, err := os.Open("./path/to/datafile.csv")
if err != nil {
log.Fatal(err)
}
r := csv.NewReader(csvIn)
// setup writer
csvOut, err := os.Create("./where/to/write/resultsfile.csv"))
if err != nil {
log.Fatal("Unable to open output")
}
w := csv.NewWriter(csvOut)
defer csvOut.Close()
// handle header
rec, err := r.Read()
if err != nil {
log.Fatal(err)
}
rec = append(rec, "score")
if err = w.Write(rec); err != nil {
log.Fatal(err)
}
for {
rec, err = r.Read()
if err != nil {
if err == io.EOF {
break
}
log.Fatal(err)
}
// get float value
value := rec[1]
floatValue, err := strconv.ParseFloat(value, 64)
if err != nil {
log.Fatal("Record, error: %v, %v", value, err)
}
// calculate scores; THIS EXTERNAL METHOD CANNOT BE CHANGED
score := calculateStuff(floatValue)
scoreString := strconv.FormatFloat(score, 'f', 8, 64)
rec = append(rec, scoreString)
if err = w.Write(rec); err != nil {
log.Fatal(err)
}
w.Flush()
}
}
Note of course the logic is all jammed into main(), better would be to split it into several functions, but that's beyond the scope of this question.
encoding/csv is indeed very slow on big files, as it performs a lot of allocations. Since your format is so simple I recommend using strings.Split instead which is much faster.
If even that is not fast enough you can consider implementing the parsing yourself using strings.IndexByte which is implemented in assembly: http://golang.org/src/strings/strings_decl.go?s=274:310#L1
Having said that, you should also reconsider using ReadAll if the file is larger than your memory.